galaxy-commits
Threads by month
- ----- 2025 -----
- July
- June
- May
- April
- March
- February
- January
- ----- 2024 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2023 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2022 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2021 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2020 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2019 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2018 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2017 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2016 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2015 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2014 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2013 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2012 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2011 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2010 -----
- December
- November
- October
- September
- August
- July
- June
- May
- 15302 discussions

commit/galaxy-central: dannon: Fix bitbucket issue #612 regarding administrative job lock toggling inappropriately. Split logical forms into actual forms.
by Bitbucket 21 Jul '11
by Bitbucket 21 Jul '11
21 Jul '11
1 new changeset in galaxy-central:
http://bitbucket.org/galaxy/galaxy-central/changeset/ebb1014b237d/
changeset: ebb1014b237d
user: dannon
date: 2011-07-21 16:43:16
summary: Fix bitbucket issue #612 regarding administrative job lock toggling inappropriately. Split logical forms into actual forms.
affected #: 2 files (459 bytes)
--- a/lib/galaxy/web/base/controller.py Thu Jul 21 10:26:41 2011 -0400
+++ b/lib/galaxy/web/base/controller.py Thu Jul 21 10:43:16 2011 -0400
@@ -21,7 +21,7 @@
# RE that tests for valid slug.
VALID_SLUG_RE = re.compile( "^[a-z0-9\-]+$" )
-
+
class BaseController( object ):
"""
Base class for Galaxy web application controllers.
@@ -51,7 +51,7 @@
else:
item_class = None
return item_class
-
+
Root = BaseController
class SharableItemSecurity:
@@ -72,7 +72,7 @@
#
# TODO: need to move UsesHistory, etc. mixins to better location - perhaps lib/galaxy/model/XXX ?
-#
+#
class UsesHistoryDatasetAssociation:
""" Mixin for controllers that use HistoryDatasetAssociation objects. """
@@ -119,15 +119,15 @@
dataset_data = open( dataset.file_name ).read(max_peek_size)
truncated = False
return truncated, dataset_data
-
+
class UsesVisualization( SharableItemSecurity ):
""" Mixin for controllers that use Visualization objects. """
len_files = None
-
+
def _get_dbkeys( self, trans ):
""" Returns all valid dbkeys that a user can use in a visualization. """
-
+
# Read len files.
if not self.len_files:
len_files = glob.glob( os.path.join(trans.app.config.len_file_path, "*.len") )
@@ -137,10 +137,10 @@
user = trans.get_user()
if 'dbkeys' in user.preferences:
user_keys = from_json_string( user.preferences['dbkeys'] )
-
+
dbkeys = [ (v, k) for k, v in trans.db_builds if k in self.len_files or k in user_keys ]
return dbkeys
-
+
def get_visualization( self, trans, id, check_ownership=True, check_accessible=False ):
""" Get a Visualization from the database by id, verifying ownership. """
# Load workflow from database
@@ -152,7 +152,7 @@
error( "Visualization not found" )
else:
return self.security_check( trans.get_user(), visualization, check_ownership, check_accessible )
-
+
def get_visualization_config( self, trans, visualization ):
""" Returns a visualization's configuration. Only works for trackster visualizations right now. """
@@ -172,16 +172,16 @@
dataset = self.get_dataset( trans, dataset_id, check_ownership=False, check_accessible=True )
else:
dataset = trans.sa_session.query( trans.app.model.LibraryDatasetDatasetAssociation ).get( trans.security.decode_id(dataset_id) )
-
+
try:
prefs = t['prefs']
except KeyError:
prefs = {}
-
+
track_type, _ = dataset.datatype.get_track_type()
track_data_provider_class = get_data_provider( original_dataset=dataset )
track_data_provider = track_data_provider_class( original_dataset=dataset )
-
+
tracks.append( {
"track_type": track_type,
"name": t['name'],
@@ -192,15 +192,15 @@
"tool": get_tool_def( trans, dataset ),
"is_child": t.get('is_child', False)
} )
-
- config = { "title": visualization.title, "vis_id": trans.security.encode_id( visualization.id ),
+
+ config = { "title": visualization.title, "vis_id": trans.security.encode_id( visualization.id ),
"tracks": tracks, "bookmarks": bookmarks, "chrom": "", "dbkey": visualization.dbkey }
if 'viewport' in latest_revision.config:
config['viewport'] = latest_revision.config['viewport']
-
+
return config
-
+
class UsesStoredWorkflow( SharableItemSecurity ):
""" Mixin for controllers that use StoredWorkflow objects. """
def get_stored_workflow( self, trans, id, check_ownership=True, check_accessible=False ):
@@ -225,7 +225,7 @@
step.upgrade_messages = module.check_and_update_state()
# Any connected input needs to have value DummyDataset (these
# are not persisted so we need to do it every time)
- module.add_dummy_datasets( connections=step.input_connections )
+ module.add_dummy_datasets( connections=step.input_connections )
# Store state with the step
step.module = module
step.state = module.state
@@ -270,7 +270,7 @@
"""Mixin for controllers that use Galaxy form objects."""
def get_all_forms( self, trans, all_versions=False, filter=None, form_type='All' ):
"""
- Return all the latest forms from the form_definition_current table
+ Return all the latest forms from the form_definition_current table
if all_versions is set to True. Otherwise return all the versions
of all the forms from the form_definition table.
"""
@@ -684,7 +684,7 @@
trans.sa_session.flush()
info_association = sra.run
else:
- info_association = assoc.run
+ info_association = assoc.run
else:
info_association = None
if info_association:
@@ -912,7 +912,7 @@
else:
field_value = int( input_text_value )
elif field_type == CheckboxField.__name__:
- field_value = CheckboxField.is_checked( input_value )
+ field_value = CheckboxField.is_checked( input_value )
elif field_type == PasswordField.__name__:
field_value = kwd.get( field_name, '' )
else:
@@ -1043,7 +1043,7 @@
@web.require_login( "modify Galaxy items" )
def set_slug_async( self, trans, id, new_slug ):
""" Set item slug asynchronously. """
- pass
+ pass
@web.expose
@web.require_login( "share Galaxy items" )
def sharing( self, trans, id, **kwargs ):
@@ -1099,7 +1099,7 @@
item.slug = slug
return True
return False
-
+
"""
Deprecated: `BaseController` used to be available under the name `Root`
"""
@@ -1111,7 +1111,7 @@
user_list_grid = None
role_list_grid = None
group_list_grid = None
-
+
@web.expose
@web.require_admin
def index( self, trans, **kwd ):
@@ -1158,7 +1158,7 @@
toolbox=self.app.toolbox,
message=message,
status='done' )
-
+
# Galaxy Role Stuff
@web.expose
@web.require_admin
@@ -1342,7 +1342,7 @@
action='roles',
webapp=webapp,
message=util.sanitize_text( message ),
- status=status ) )
+ status=status ) )
in_users = []
out_users = []
in_groups = []
@@ -1934,7 +1934,7 @@
def purge_user( self, trans, **kwd ):
# This method should only be called for a User that has previously been deleted.
# We keep the User in the database ( marked as purged ), and stuff associated
- # with the user's private role in case we want the ability to unpurge the user
+ # with the user's private role in case we want the ability to unpurge the user
# some time in the future.
# Purging a deleted User deletes all of the following:
# - History where user_id = User.id
@@ -2158,7 +2158,7 @@
@web.expose
@web.require_admin
- def jobs( self, trans, stop = [], stop_msg = None, cutoff = 180, job_lock = None, **kwd ):
+ def jobs( self, trans, stop = [], stop_msg = None, cutoff = 180, job_lock = None, ajl_submit = None, **kwd ):
deleted = []
msg = None
status = None
@@ -2181,10 +2181,11 @@
msg += ' for deletion: '
msg += ', '.join( deleted )
status = 'done'
- if job_lock == 'lock':
- trans.app.job_manager.job_queue.job_lock = True
- elif job_lock == 'unlock':
- trans.app.job_manager.job_queue.job_lock = False
+ if ajl_submit:
+ if job_lock == 'on':
+ trans.app.job_manager.job_queue.job_lock = True
+ else:
+ trans.app.job_manager.job_queue.job_lock = False
cutoff_time = datetime.utcnow() - timedelta( seconds=int( cutoff ) )
jobs = trans.sa_session.query( trans.app.model.Job ) \
.filter( and_( trans.app.model.Job.table.c.update_time < cutoff_time,
@@ -2209,7 +2210,7 @@
job_lock = trans.app.job_manager.job_queue.job_lock )
## ---- Utility methods -------------------------------------------------------
-
+
def get_user( trans, id ):
"""Get a User from the database by id."""
# Load user from database
--- a/templates/admin/jobs.mako Thu Jul 21 10:26:41 2011 -0400
+++ b/templates/admin/jobs.mako Thu Jul 21 10:43:16 2011 -0400
@@ -21,11 +21,11 @@
report this error".
</p>
-<form name="jobs" action="${h.url_for()}" method="POST"><p/>
%if jobs:
+<form name="jobs" action="${h.url_for()}" method="POST"><table class="manage-table colored" border="0" cellspacing="0" cellpadding="0" width="100%"><tr class="header"><td> </td>
@@ -84,10 +84,12 @@
</div></div><p/>
+</form>
%else:
<div class="infomessage">There are no unfinished jobs to show with current cutoff time.</div><p/>
%endif
+<form name="jobs" action="${h.url_for()}" method="POST"><div class="toolForm"><div class="toolFormTitle">
Update Jobs
@@ -110,30 +112,33 @@
</div></div></div>
- <p/>
+</form>
+<form name="jobs" action="${h.url_for()}" method="POST">
+ <p/><div class="toolForm"><div class="toolFormTitle">
Administrative Job Lock
</div><div class="toolFormBody">
- %if job_lock==True:
<div class="form-row">
- <p>All job execution is currently locked. Click here to unlock.</p>
- <input type='hidden' name='job_lock' value='unlock'/>
+ <input type="hidden" name="ajl_submit" value="True"/>
+ %if job_lock==True:
+ <p>Job dispatching is currently <strong>locked</strong>.</p>
+ <label>
+ <input type='checkbox' name='job_lock' checked='checked' />
+ Prevent jobs from dispatching.
+ </label>
+ %else:
+ <p>Job dispatching is currently <strong>unlocked</strong>.</p>
+ <label>
+ <input type='checkbox' name='job_lock' />
+ Prevent jobs from dispatching.
+ </label>
+ %endif
</div><div class="form-row">
- <input type="submit" class="primary-button" name="submit" value="Unlock">
+ <input type="submit" class="primary-button" name="submit" value="Update"></div>
- %else:
- <div class="form-row">
- <p>To prevent new jobs from dispatching, you can lock the job queue here.</p>
- <input type='hidden' name='job_lock' value='lock'/>
- </div>
- <div class="form-row">
- <input type="submit" class="primary-button" name="submit" value="Lock">
- </div>
- %endif
</div></div>
-
</form>
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: jgoecks: Fix filtering of deleted objects in grid framework when using sqlite. Also make history grid's label for deleted filter clearer. Fixes #596
by Bitbucket 21 Jul '11
by Bitbucket 21 Jul '11
21 Jul '11
1 new changeset in galaxy-central:
http://bitbucket.org/galaxy/galaxy-central/changeset/c176363cb7b2/
changeset: c176363cb7b2
user: jgoecks
date: 2011-07-21 16:04:48
summary: Fix filtering of deleted objects in grid framework when using sqlite. Also make history grid's label for deleted filter clearer. Fixes #596
affected #: 2 files (15 bytes)
--- a/lib/galaxy/web/controllers/history.py Thu Jul 21 09:43:49 2011 -0400
+++ b/lib/galaxy/web/controllers/history.py Thu Jul 21 10:04:48 2011 -0400
@@ -56,7 +56,7 @@
grids.GridColumn( "Created", key="create_time", format=time_ago ),
grids.GridColumn( "Last Updated", key="update_time", format=time_ago ),
# Columns that are valid for filtering but are not visible.
- grids.DeletedColumn( "Deleted", key="deleted", visible=False, filterable="advanced" )
+ grids.DeletedColumn( "Status", key="deleted", visible=False, filterable="advanced" )
]
columns.append(
grids.MulticolFilterColumn(
--- a/lib/galaxy/web/framework/helpers/grids.py Thu Jul 21 09:43:49 2011 -0400
+++ b/lib/galaxy/web/framework/helpers/grids.py Thu Jul 21 10:04:48 2011 -0400
@@ -631,7 +631,7 @@
if column_filter == "All":
pass
elif column_filter in [ "True", "False" ]:
- query = query.filter( self.model_class.deleted == column_filter )
+ query = query.filter( self.model_class.deleted == ( column_filter == "True" ) )
return query
class StateColumn( GridColumn ):
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

21 Jul '11
1 new changeset in galaxy-central:
http://bitbucket.org/galaxy/galaxy-central/changeset/b68bbdc8dd14/
changeset: b68bbdc8dd14
user: greg
date: 2011-07-21 15:43:49
summary: Several tool shed enhancements and fixes:
1) Add the ability to add an entry for a tool to the tool_data_table_conf.xml file in real time. This allows metadata to be generated for tools that use this feature.
2) Add a method to configure settings to the mercurial ui, and configure it such that all message output from mercurial are not displayed (we're in quiet mode).
3) Make sure that mercurial commits performed within exception blocks account for the user performing the commit.
affected #: 6 files (7.5 KB)
--- a/lib/galaxy/util/__init__.py Wed Jul 20 09:01:37 2011 -0400
+++ b/lib/galaxy/util/__init__.py Thu Jul 21 09:43:49 2011 -0400
@@ -127,7 +127,8 @@
'@' : '__at__',
'\n' : '__cn__',
'\r' : '__cr__',
- '\t' : '__tc__'
+ '\t' : '__tc__',
+ '#' : '__pd__'
}
def restore_text(text):
--- a/lib/galaxy/webapps/community/controllers/common.py Wed Jul 20 09:01:37 2011 -0400
+++ b/lib/galaxy/webapps/community/controllers/common.py Thu Jul 21 09:43:49 2011 -0400
@@ -81,7 +81,7 @@
status = 'done'
repository = get_repository( trans, id )
repo_dir = repository.repo_path
- repo = hg.repository( ui.ui(), repo_dir )
+ repo = hg.repository( get_configured_ui(), repo_dir )
change_set = get_change_set( trans, repo, change_set_revision )
invalid_files = []
flush_needed = False
@@ -214,10 +214,6 @@
if invalid_files:
message = "Metadata cannot be defined for change set revision '%s'. Correct the following problems and reset metadata.<br/>" % str( change_set_revision )
for itc_tup in invalid_files:
- # Handle the special case where a tool depends on a missing xxx.loc file by telliing
- # the user to upload xxx.loc.sample to the repository so that it can be copied to
- # ~/tool-data/xxx.loc. In this case, itc_tup[1] will be a message looking something like:
- # [Errno 2] No such file or directory: '/Users/gvk/central/tool-data/blast2go.loc'
tool_file = itc_tup[0]
exception_msg = itc_tup[1]
if exception_msg.find( 'No such file or directory' ) >= 0:
@@ -226,10 +222,28 @@
missing_file = missing_file_items[-1].rstrip( '\'' )
correction_msg = "This file refers to a missing file <b>%s</b>. " % str( missing_file )
if exception_msg.find( '.loc' ) >= 0:
+ # Handle the special case where a tool depends on a missing xxx.loc file by telliing
+ # the user to upload xxx.loc.sample to the repository so that it can be copied to
+ # ~/tool-data/xxx.loc. In this case, exception_msg will look something like:
+ # [Errno 2] No such file or directory: '/Users/gvk/central/tool-data/blast2go.loc'
sample_loc_file = '%s.sample' % str( missing_file )
correction_msg += "Upload a file named <b>%s</b> to the repository to correct this error." % sample_loc_file
else:
correction_msg += "Upload a file named <b>%s</b> to the repository to correct this error." % missing_file
+ elif exception_msg.find( 'Data table named' ) >= 0:
+ # Handle the special case where the tool requires an entry in the tool_data_table.conf file.
+ # In this case, exception_msg will look something like:
+ # Data table named 'tmap_indexes' is required by tool but not configured
+ exception_items = exception_msg.split()
+ name_attr = exception_items[3].lstrip( '\'' ).rstrip( '\'' )
+ message += "<b>%s</b> - This tool requires an entry in the tool_data_table_conf.xml file. " % tool_file
+ message += "Complete and <b>Save</b> the form below to resolve this issue.<br/>"
+ return trans.response.send_redirect( web.url_for( controller='repository',
+ action='add_tool_data_table_entry',
+ name_attr=name_attr,
+ repository_id=id,
+ message=message,
+ status='error' ) )
else:
correction_msg = exception_msg
message += "<b>%s</b> - %s<br/>" % ( tool_file, correction_msg )
@@ -257,12 +271,21 @@
if not ( os.path.exists( os.path.join( tool_data_path, loc_file ) ) or os.path.exists( os.path.join( tool_data_path, sample_loc_file ) ) ):
shutil.copy( os.path.abspath( filename ), os.path.join( tool_data_path, sample_loc_file ) )
shutil.copy( os.path.abspath( filename ), os.path.join( tool_data_path, loc_file ) )
+def get_configured_ui():
+ # Configure any desired ui settings.
+ _ui = ui.ui()
+ # The following will suppress all messages. This is
+ # the same as adding the following setting to the repo
+ # hgrc file' [ui] section:
+ # quiet = True
+ _ui.setconfig( 'ui', 'quiet', True )
+ return _ui
def get_user( trans, id ):
"""Get a user from the database"""
return trans.sa_session.query( trans.model.User ).get( trans.security.decode_id( id ) )
def handle_email_alerts( trans, repository ):
repo_dir = repository.repo_path
- repo = hg.repository( ui.ui(), repo_dir )
+ repo = hg.repository( get_configured_ui(), repo_dir )
smtp_server = trans.app.config.smtp_server
if smtp_server and repository.email_alerts:
# Send email alert to users that want them.
@@ -299,17 +322,19 @@
util.send_mail( frm, to, subject, body, trans.app.config )
except Exception, e:
log.exception( "An error occurred sending a tool shed repository update alert by email." )
-def update_for_browsing( repository, current_working_dir, commit_message='' ):
- # Make a copy of a repository's files for browsing.
+def update_for_browsing( trans, repository, current_working_dir, commit_message='' ):
+ # Make a copy of a repository's files for browsing, remove from disk all files that
+ # are not tracked, and commit all added, modified or removed files that have not yet
+ # been committed.
repo_dir = repository.repo_path
- repo = hg.repository( ui.ui(), repo_dir )
+ repo = hg.repository( get_configured_ui(), repo_dir )
# The following will delete the disk copy of only the files in the repository.
#os.system( 'hg update -r null > /dev/null 2>&1' )
repo.ui.pushbuffer()
commands.status( repo.ui, repo, all=True )
status_and_file_names = repo.ui.popbuffer().strip().split( "\n" )
# status_and_file_names looks something like:
- # ['? MY_README_AGAIN', '? galaxy_tmap_tool/tmap-0.0.9.tar.gz', '? dna_filtering.py', 'C filtering.py', 'C filtering.xml']
+ # ['? README', '? tmap_tool/tmap-0.0.9.tar.gz', '? dna_filtering.py', 'C filtering.py', 'C filtering.xml']
# The codes used to show the status of files are:
# M = modified
# A = added
@@ -345,7 +370,7 @@
if not commit_message:
commit_message = 'Committed changes to: %s' % ', '.join( files_to_commit )
repo.dirstate.write()
- repo.commit( text=commit_message )
+ repo.commit( user=trans.user.username, text=commit_message )
os.chdir( repo_dir )
os.system( 'hg update > /dev/null 2>&1' )
os.chdir( current_working_dir )
--- a/lib/galaxy/webapps/community/controllers/repository.py Wed Jul 20 09:01:37 2011 -0400
+++ b/lib/galaxy/webapps/community/controllers/repository.py Thu Jul 21 09:43:49 2011 -0400
@@ -312,7 +312,7 @@
if not os.path.exists( repository_path ):
os.makedirs( repository_path )
# Create the local repository
- repo = hg.repository( ui.ui(), repository_path, create=True )
+ repo = hg.repository( get_configured_ui(), repository_path, create=True )
# Add an entry in the hgweb.config file for the local repository
# This enables calls to repository.repo_path
self.__add_hgweb_config_entry( trans, repository, repository_path )
@@ -406,7 +406,7 @@
# push_ssl = False
# Since we support both http and https, we set push_ssl to False to override
# the default (which is True) in the mercurial api.
- repo = hg.repository( ui.ui(), path=repository.repo_path )
+ repo = hg.repository( get_configured_ui(), path=repository.repo_path )
fp = repo.opener( 'hgrc', 'wb' )
fp.write( '[paths]\n' )
fp.write( 'default = .\n' )
@@ -423,9 +423,10 @@
status = params.get( 'status', 'done' )
commit_message = util.restore_text( params.get( 'commit_message', 'Deleted selected files' ) )
repository = get_repository( trans, id )
- repo = hg.repository( ui.ui(), repository.repo_path )
+ repo = hg.repository( get_configured_ui(), repository.repo_path )
current_working_dir = os.getcwd()
- update_for_browsing( repository, current_working_dir, commit_message=commit_message )
+ # Update repository files for browsing.
+ update_for_browsing( trans, repository, current_working_dir, commit_message=commit_message )
return trans.fill_template( '/webapps/community/repository/browse_repository.mako',
repo=repo,
repository=repository,
@@ -440,7 +441,7 @@
commit_message = util.restore_text( params.get( 'commit_message', 'Deleted selected files' ) )
repository = get_repository( trans, id )
repo_dir = repository.repo_path
- repo = hg.repository( ui.ui(), repo_dir )
+ repo = hg.repository( get_configured_ui(), repo_dir )
selected_files_to_delete = util.restore_text( params.get( 'selected_files_to_delete', '' ) )
if params.get( 'select_files_to_delete_button', False ):
if selected_files_to_delete:
@@ -450,7 +451,7 @@
tip = repository.tip
for selected_file in selected_files_to_delete:
repo_file = os.path.abspath( selected_file )
- commands.remove( repo.ui, repo, repo_file )
+ commands.remove( repo.ui, repo, repo_file, force=True )
# Commit the change set.
if not commit_message:
commit_message = 'Deleted selected files'
@@ -461,12 +462,12 @@
# tool shed environment, it occasionally throws a "TypeError: array item must be char"
# exception. If this happens, we'll try the following.
repo.dirstate.write()
- repo.commit( text=commit_message )
+ repo.commit( user=trans.user.username, text=commit_message )
handle_email_alerts( trans, repository )
# Update the repository files for browsing.
- update_for_browsing( repository, current_working_dir, commit_message=commit_message )
+ update_for_browsing( trans, repository, current_working_dir, commit_message=commit_message )
# Get the new repository tip.
- repo = hg.repository( ui.ui(), repo_dir )
+ repo = hg.repository( get_configured_ui(), repo_dir )
if tip != repository.tip:
message = "The selected files were deleted from the repository."
else:
@@ -495,7 +496,7 @@
message = util.restore_text( params.get( 'message', '' ) )
status = params.get( 'status', 'done' )
repository = get_repository( trans, id )
- repo = hg.repository( ui.ui(), repository.repo_path )
+ repo = hg.repository( get_configured_ui(), repository.repo_path )
avg_rating, num_ratings = self.get_ave_item_rating_data( trans.sa_session, repository, webapp_model=trans.model )
display_reviews = util.string_as_bool( params.get( 'display_reviews', False ) )
alerts = params.get( 'alerts', '' )
@@ -545,7 +546,7 @@
status = params.get( 'status', 'done' )
repository = get_repository( trans, id )
repo_dir = repository.repo_path
- repo = hg.repository( ui.ui(), repo_dir )
+ repo = hg.repository( get_configured_ui(), repo_dir )
repo_name = util.restore_text( params.get( 'repo_name', repository.name ) )
description = util.restore_text( params.get( 'description', repository.description ) )
long_description = util.restore_text( params.get( 'long_description', repository.long_description ) )
@@ -673,7 +674,7 @@
message = util.restore_text( params.get( 'message', '' ) )
status = params.get( 'status', 'done' )
repository = get_repository( trans, id )
- repo = hg.repository( ui.ui(), repository.repo_path )
+ repo = hg.repository( get_configured_ui(), repository.repo_path )
changesets = []
for changeset in repo.changelog:
ctx = repo.changectx( changeset )
@@ -701,7 +702,7 @@
message = util.restore_text( params.get( 'message', '' ) )
status = params.get( 'status', 'done' )
repository = get_repository( trans, id )
- repo = hg.repository( ui.ui(), repository.repo_path )
+ repo = hg.repository( get_configured_ui(), repository.repo_path )
ctx = get_change_set( trans, repo, ctx_str )
if ctx is None:
message = "Repository does not include changeset revision '%s'." % str( ctx_str )
@@ -745,7 +746,7 @@
message='Select a repository to rate',
status='error' ) )
repository = get_repository( trans, id )
- repo = hg.repository( ui.ui(), repository.repo_path )
+ repo = hg.repository( get_configured_ui(), repository.repo_path )
if repository.user == trans.user:
return trans.response.send_redirect( web.url_for( controller='repository',
action='browse_repositories',
@@ -815,6 +816,98 @@
message=message,
status=status ) )
@web.expose
+ def add_tool_data_table_entry( self, trans, name_attr, repository_id, **kwd ):
+ params = util.Params( kwd )
+ message = util.restore_text( params.get( 'message', '' ) )
+ status = params.get( 'status', 'done' )
+ comment_char = util.restore_text( params.get( 'comment_char', '#' ) )
+ loc_filename = util.restore_text( params.get( 'loc_filename', '' ) )
+ repository = get_repository( trans, repository_id )
+ repo = hg.repository( get_configured_ui(), repository.repo_path )
+ column_fields = self.__get_column_fields( **kwd )
+ if params.get( 'add_field_button', False ):
+ # Add a field
+ field_index = len( column_fields ) + 1
+ field_tup = ( '%i_field_name' % field_index, '' )
+ column_fields.append( field_tup )
+ elif params.get( 'remove_button', False ):
+ # Delete a field - find the index of the field to be removed from the remove button label
+ index = int( kwd[ 'remove_button' ].split( ' ' )[2] ) - 1
+ tup_to_remove = column_fields[ index ]
+ column_fields.remove( tup_to_remove )
+ # Re-number field tups
+ new_column_fields = []
+ for field_index, old_field_tup in enumerate( column_fields ):
+ name = '%i_field_name' % ( field_index + 1 )
+ value = old_field_tup[1]
+ new_column_fields.append( ( name, value ) )
+ column_fields = new_column_fields
+ elif params.get( 'add_tool_data_table_entry_button', False ):
+ # Add an entry to the end of the tool_data_table_conf.xml file
+ tdt_config = "%s/tool_data_table_conf.xml" % trans.app.config.root
+ if os.path.exists( tdt_config ):
+ # Make a backup of the file since we're going to be changing it.
+ today = date.today()
+ backup_date = today.strftime( "%Y_%m_%d" )
+ tdt_config_copy = '%s/tool_data_table_conf.xml_%s_backup' % ( trans.app.config.root, backup_date )
+ shutil.copy( os.path.abspath( tdt_config ), os.path.abspath( tdt_config_copy ) )
+ # Generate the string of column names
+ column_names = ', '.join( [ column_tup[1] for column_tup in column_fields ] )
+ # Write each line of the tool_data_table_conf.xml file, except the last line to a temp file.
+ fh = tempfile.NamedTemporaryFile( 'wb' )
+ tmp_filename = fh.name
+ fh.close()
+ new_tdt_config = open( tmp_filename, 'wb' )
+ for i, line in enumerate( open( tdt_config, 'rb' ) ):
+ if line.startswith( '</tables>' ):
+ break
+ new_tdt_config.write( line )
+ new_tdt_config.write( ' <!-- Location of %s files -->\n' % name_attr )
+ new_tdt_config.write( ' <table name="%s" comment_char="%s">\n' % ( name_attr, comment_char ) )
+ new_tdt_config.write( ' <columns>%s</columns>\n' % column_names )
+ new_tdt_config.write( ' <file path="tool-data/%s" />\n' % loc_filename )
+ new_tdt_config.write( ' </table>\n' )
+ # Now write the last line of the file
+ new_tdt_config.write( '</tables>\n' )
+ new_tdt_config.close()
+ shutil.move( tmp_filename, os.path.abspath( tdt_config ) )
+ # Reload the tool_data_table_conf entries
+ trans.app.tool_data_tables = galaxy.tools.data.ToolDataTableManager( trans.app.config.tool_data_table_config_path )
+ message = "The new entry has been added to the tool_data_table_conf.xml file, so click the <b>Reset metadata</b> button below."
+ # TODO: what if ~/tool-data/<loc_filename> doesn't exist? We need to figure out how to
+ # force the user to upload it's sample to the repository in order to generate metadata.
+ return trans.response.send_redirect( web.url_for( controller='repository',
+ action='manage_repository',
+ id=repository_id,
+ message=message,
+ status=status ) )
+ return trans.fill_template( '/webapps/community/repository/add_tool_data_table_entry.mako',
+ name_attr=name_attr,
+ repository=repository,
+ comment_char=comment_char,
+ loc_filename=loc_filename,
+ column_fields=column_fields,
+ message=message,
+ status=status )
+ def __get_column_fields( self, **kwd ):
+ '''
+ Return a dictionary of the user-entered form fields representing columns
+ in the location file.
+ '''
+ params = util.Params( kwd )
+ column_fields = []
+ index = 0
+ while True:
+ name = '%i_field_name' % ( index + 1 )
+ if kwd.has_key( name ):
+ value = util.restore_text( params.get( name, '' ) )
+ field_tup = ( name, value )
+ index += 1
+ column_fields.append( field_tup )
+ else:
+ break
+ return column_fields
+ @web.expose
def display_tool( self, trans, repository_id, tool_config, **kwd ):
params = util.Params( kwd )
message = util.restore_text( params.get( 'message', '' ) )
--- a/lib/galaxy/webapps/community/controllers/upload.py Wed Jul 20 09:01:37 2011 -0400
+++ b/lib/galaxy/webapps/community/controllers/upload.py Thu Jul 21 09:43:49 2011 -0400
@@ -27,7 +27,7 @@
repository_id = params.get( 'repository_id', '' )
repository = get_repository( trans, repository_id )
repo_dir = repository.repo_path
- repo = hg.repository( ui.ui(), repo_dir )
+ repo = hg.repository( get_configured_ui(), repo_dir )
uncompress_file = util.string_as_bool( params.get( 'uncompress_file', 'true' ) )
remove_repo_files_not_in_tar = util.string_as_bool( params.get( 'remove_repo_files_not_in_tar', 'true' ) )
uploaded_file = None
@@ -87,7 +87,6 @@
# Move the uploaded file to the load_point within the repository hierarchy.
shutil.move( uploaded_file_name, full_path )
commands.add( repo.ui, repo, full_path )
- """
try:
commands.commit( repo.ui, repo, full_path, user=trans.user.username, message=commit_message )
except Exception, e:
@@ -95,17 +94,15 @@
# tool shed environment, it occasionally throws a "TypeError: array item must be char"
# exception. If this happens, we'll try the following.
repo.dirstate.write()
- repo.commit( text=commit_message )
- """
+ repo.commit( user=trans.user.username, text=commit_message )
if full_path.endswith( '.loc.sample' ):
# Handle the special case where a xxx.loc.sample file is
# being uploaded by copying it to ~/tool-data/xxx.loc.
copy_sample_loc_file( trans, full_path )
handle_email_alerts( trans, repository )
if ok:
- # Update the repository files for browsing, a by-product of doing this
- # is eliminating unwanted files from the repository directory.
- update_for_browsing( repository, current_working_dir, commit_message=commit_message )
+ # Update the repository files for browsing.
+ update_for_browsing( trans, repository, current_working_dir, commit_message=commit_message )
# Get the new repository tip.
if tip != repository.tip:
if ( isgzip or isbz2 ) and uncompress_file:
@@ -148,7 +145,7 @@
def upload_tar( self, trans, repository, tar, uploaded_file, upload_point, remove_repo_files_not_in_tar, commit_message ):
# Upload a tar archive of files.
repo_dir = repository.repo_path
- repo = hg.repository( ui.ui(), repo_dir )
+ repo = hg.repository( get_configured_ui(), repo_dir )
files_to_remove = []
ok, message = self.__check_archive( tar )
if not ok:
@@ -185,7 +182,7 @@
for repo_file in files_to_remove:
# Remove files in the repository (relative to the upload point)
# that are not in the uploaded archive.
- commands.remove( repo.ui, repo, repo_file )
+ commands.remove( repo.ui, repo, repo_file, force=True )
for filename_in_archive in filenames_in_archive:
commands.add( repo.ui, repo, filename_in_archive )
if filename_in_archive.endswith( '.loc.sample' ):
@@ -199,7 +196,7 @@
# tool shed environment, it occasionally throws a "TypeError: array item must be char"
# exception. If this happens, we'll try the following.
repo.dirstate.write()
- repo.commit( text=commit_message )
+ repo.commit( user=trans.user.username, text=commit_message )
handle_email_alerts( trans, repository )
return True, '', files_to_remove
def uncompress( self, repository, uploaded_file_name, uploaded_file_filename, isgzip, isbz2 ):
--- a/templates/webapps/community/repository/create_repository.mako Wed Jul 20 09:01:37 2011 -0400
+++ b/templates/webapps/community/repository/create_repository.mako Thu Jul 21 09:43:49 2011 -0400
@@ -17,15 +17,15 @@
<div class="toolForm"><div class="toolFormTitle">Create Repository</div><div class="toolFormBody">
- <form name="create_repository_form" id="create_repository_form" action="${h.url_for( action='create_repository' )}" method="post" >
+ <form name="create_repository_form" id="create_repository_form" action="${h.url_for( controller='repository', action='create_repository' )}" method="post" ><div class="form-row"><label>Name:</label>
- <input name="name" type="textfield" value="${name}" size=40"/>
+ <input name="name" type="textfield" value="${name}" size="40"/><div style="clear: both"></div></div><div class="form-row"><label>Synopsis:</label>
- <input name="description" type="textfield" value="${description}" size=80"/>
+ <input name="description" type="textfield" value="${description}" size="80"/><div style="clear: both"></div></div><div class="form-row">
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
2 new changesets in galaxy-central:
http://bitbucket.org/galaxy/galaxy-central/changeset/ce8cc8feb6e0/
changeset: ce8cc8feb6e0
user: dannon
date: 2011-07-19 19:40:24
summary: Code cleanup.
affected #: 1 file (886 bytes)
--- a/lib/galaxy/web/controllers/history.py Tue Jul 19 11:27:16 2011 -0400
+++ b/lib/galaxy/web/controllers/history.py Tue Jul 19 13:40:24 2011 -0400
@@ -58,13 +58,12 @@
# Columns that are valid for filtering but are not visible.
grids.DeletedColumn( "Deleted", key="deleted", visible=False, filterable="advanced" )
]
- columns.append(
- grids.MulticolFilterColumn(
- "search history names and tags",
- cols_to_filter=[ columns[0], columns[2] ],
+ columns.append(
+ grids.MulticolFilterColumn(
+ "search history names and tags",
+ cols_to_filter=[ columns[0], columns[2] ],
key="free-text-search", visible=False, filterable="standard" )
)
-
operations = [
grids.GridOperation( "Switch", allow_multiple=False, condition=( lambda item: not item.deleted ), async_compatible=False ),
grids.GridOperation( "Share or Publish", allow_multiple=False, condition=( lambda item: not item.deleted ), async_compatible=False ),
@@ -125,11 +124,11 @@
return trans.sa_session.query( self.model_class ).join( 'users_shared_with' )
def apply_query_filter( self, trans, query, **kwargs ):
return query.filter( model.HistoryUserShareAssociation.user == trans.user )
-
+
class HistoryAllPublishedGrid( grids.Grid ):
class NameURLColumn( grids.PublicURLColumn, NameColumn ):
pass
-
+
title = "Published Histories"
model_class = model.History
default_sort_key = "update_time"
@@ -138,15 +137,15 @@
columns = [
NameURLColumn( "Name", key="name", filterable="advanced" ),
grids.OwnerAnnotationColumn( "Annotation", key="annotation", model_annotation_association_class=model.HistoryAnnotationAssociation, filterable="advanced" ),
- grids.OwnerColumn( "Owner", key="username", model_class=model.User, filterable="advanced" ),
+ grids.OwnerColumn( "Owner", key="username", model_class=model.User, filterable="advanced" ),
grids.CommunityRatingColumn( "Community Rating", key="rating" ),
grids.CommunityTagsColumn( "Community Tags", key="tags", model_tag_association_class=model.HistoryTagAssociation, filterable="advanced", grid_name="PublicHistoryListGrid" ),
grids.ReverseSortColumn( "Last Updated", key="update_time", format=time_ago )
]
- columns.append(
- grids.MulticolFilterColumn(
- "Search name, annotation, owner, and tags",
- cols_to_filter=[ columns[0], columns[1], columns[2], columns[4] ],
+ columns.append(
+ grids.MulticolFilterColumn(
+ "Search name, annotation, owner, and tags",
+ cols_to_filter=[ columns[0], columns[1], columns[2], columns[4] ],
key="free-text-search", visible=False, filterable="standard" )
)
operations = []
@@ -156,7 +155,7 @@
def apply_query_filter( self, trans, query, **kwargs ):
# A public history is published, has a slug, and is not deleted.
return query.filter( self.model_class.published == True ).filter( self.model_class.slug != None ).filter( self.model_class.deleted == False )
-
+
class HistoryController( BaseController, Sharable, UsesAnnotations, UsesItemRatings, UsesHistory ):
@web.expose
def index( self, trans ):
@@ -166,11 +165,11 @@
"""XML history list for functional tests"""
trans.response.set_content_type( 'text/xml' )
return trans.fill_template( "/history/list_as_xml.mako" )
-
+
stored_list_grid = HistoryListGrid()
shared_list_grid = SharedHistoryListGrid()
published_list_grid = HistoryAllPublishedGrid()
-
+
@web.expose
def list_published( self, trans, **kwargs ):
grid = self.published_list_grid( trans, **kwargs )
@@ -179,7 +178,7 @@
else:
# Render grid wrapped in panels
return trans.fill_template( "history/list_published.mako", grid=grid )
-
+
@web.expose
@web.require_login( "work with multiple histories" )
def list( self, trans, **kwargs ):
@@ -200,7 +199,7 @@
refresh_history = False
# Load the histories and ensure they all belong to the current user
histories = []
- for history_id in history_ids:
+ for history_id in history_ids:
history = self.get_history( trans, history_id )
if history:
# Ensure history is owned by current user
@@ -209,18 +208,18 @@
histories.append( history )
else:
log.warn( "Invalid history id '%r' passed to list", history_id )
- if histories:
+ if histories:
if operation == "switch":
status, message = self._list_switch( trans, histories )
- # Take action to update UI to reflect history switch. If
+ # Take action to update UI to reflect history switch. If
# grid is using panels, it is standalone and hence a redirect
# to root is needed; if grid is not using panels, it is nested
- # in the main Galaxy UI and refreshing the history frame
+ # in the main Galaxy UI and refreshing the history frame
# is sufficient.
use_panels = kwargs.get('use_panels', False) == 'True'
if use_panels:
return trans.response.send_redirect( url_for( "/" ) )
- else:
+ else:
trans.template_context['refresh_frames'] = ['history']
elif operation in ( "delete", "delete and remove datasets from disk" ):
if operation == "delete and remove datasets from disk":
@@ -338,7 +337,7 @@
trans.set_history( new_history )
# No message
return None, None
-
+
@web.expose
@web.require_login( "work with shared histories" )
def list_shared( self, trans, **kwargs ):
@@ -373,7 +372,7 @@
status = 'done'
# Render the list view
return self.shared_list_grid( trans, status=status, message=message, **kwargs )
-
+
@web.expose
def display_structured( self, trans, id=None ):
"""
@@ -444,7 +443,7 @@
items.sort( key=( lambda x: x[0].create_time ), reverse=True )
#
return trans.fill_template( "history/display_structured.mako", items=items )
-
+
@web.expose
def delete_current( self, trans ):
"""Delete just the active history -- this does not require a logged in user."""
@@ -456,25 +455,22 @@
trans.sa_session.add( history )
trans.sa_session.flush()
trans.log_event( "History id %d marked as deleted" % history.id )
- # Regardless of whether it was previously deleted, we make a new history active
+ # Regardless of whether it was previously deleted, we make a new history active
trans.new_history()
- return trans.show_ok_message( "History deleted, a new history is active", refresh_frames=['history'] )
-
+ return trans.show_ok_message( "History deleted, a new history is active", refresh_frames=['history'] )
+
@web.expose
@web.require_login( "rate items" )
@web.json
def rate_async( self, trans, id, rating ):
""" Rate a history asynchronously and return updated community data. """
-
history = self.get_history( trans, id, check_ownership=False, check_accessible=True )
if not history:
return trans.show_error_message( "The specified history does not exist." )
-
# Rate history.
history_rating = self.rate_item( trans.sa_session, trans.get_user(), history, rating )
-
return self.get_ave_item_rating_data( trans.sa_session, history )
-
+
@web.expose
def rename_async( self, trans, id=None, new_name=None ):
history = self.get_history( trans, id )
@@ -490,7 +486,7 @@
trans.sa_session.add( history )
trans.sa_session.flush()
return history.name
-
+
@web.expose
@web.require_login( "use Galaxy histories" )
def annotate_async( self, trans, id, new_annotation=None, **kwargs ):
@@ -503,12 +499,11 @@
return new_annotation
@web.expose
- # TODO: Remove require_login when users are warned that, if they are not
+ # TODO: Remove require_login when users are warned that, if they are not
# logged in, this will remove their current history.
@web.require_login( "use Galaxy histories" )
def import_archive( self, trans, **kwargs ):
""" Import a history from a file archive. """
-
# Set archive source and type.
archive_file = kwargs.get( 'archive_file', None )
archive_url = kwargs.get( 'archive_url', None )
@@ -519,37 +514,34 @@
elif archive_url:
archive_source = archive_url
archive_type = 'url'
-
# If no source to create archive from, show form to upload archive or specify URL.
if not archive_source:
- return trans.show_form(
+ return trans.show_form(
web.FormBuilder( web.url_for(), "Import a History from an Archive", submit_text="Submit" ) \
.add_input( "text", "Archived History URL", "archive_url", value="", error=None )
# TODO: add support for importing via a file.
- #.add_input( "file", "Archived History File", "archive_file", value=None, error=None )
+ #.add_input( "file", "Archived History File", "archive_file", value=None, error=None )
)
-
# Run job to do import.
history_imp_tool = trans.app.toolbox.tools_by_id[ '__IMPORT_HISTORY__' ]
incoming = { '__ARCHIVE_SOURCE__' : archive_source, '__ARCHIVE_TYPE__' : archive_type }
history_imp_tool.execute( trans, incoming=incoming )
return trans.show_message( "Importing history from '%s'. \
This history will be visible when the import is complete" % archive_source )
-
- @web.expose
+
+ @web.expose
def export_archive( self, trans, id=None, gzip=True, include_hidden=False, include_deleted=False ):
""" Export a history to an archive. """
-
- #
+ #
# Convert options to booleans.
#
if isinstance( gzip, basestring ):
- gzip = ( gzip in [ 'True', 'true', 'T', 't' ] )
+ gzip = ( gzip in [ 'True', 'true', 'T', 't' ] )
if isinstance( include_hidden, basestring ):
include_hidden = ( include_hidden in [ 'True', 'true', 'T', 't' ] )
if isinstance( include_deleted, basestring ):
- include_deleted = ( include_deleted in [ 'True', 'true', 'T', 't' ] )
-
+ include_deleted = ( include_deleted in [ 'True', 'true', 'T', 't' ] )
+
#
# Get history to export.
#
@@ -559,10 +551,10 @@
# Use current history.
history = trans.history
id = trans.security.encode_id( history.id )
-
+
if not history:
return trans.show_error_message( "This history does not exist or you cannot export this history." )
-
+
#
# If history has already been exported and it has not changed since export, stream it.
#
@@ -585,40 +577,38 @@
elif jeha.job.state in [ model.Job.states.RUNNING, model.Job.states.QUEUED, model.Job.states.WAITING ]:
return trans.show_message( "Still exporting history %(n)s; please check back soon. Link: <a href='%(s)s'>%(s)s</a>" \
% ( { 'n' : history.name, 's' : url_for( action="export_archive", id=id, qualified=True ) } ) )
-
+
# Run job to do export.
history_exp_tool = trans.app.toolbox.tools_by_id[ '__EXPORT_HISTORY__' ]
- params = {
- 'history_to_export' : history,
- 'compress' : gzip,
- 'include_hidden' : include_hidden,
+ params = {
+ 'history_to_export' : history,
+ 'compress' : gzip,
+ 'include_hidden' : include_hidden,
'include_deleted' : include_deleted }
history_exp_tool.execute( trans, incoming = params, set_output_hid = True )
return trans.show_message( "Exporting History '%(n)s'. Use this link to download \
the archive or import it to another Galaxy server: \
<a href='%(u)s'>%(u)s</a>" \
% ( { 'n' : history.name, 'u' : url_for( action="export_archive", id=id, qualified=True ) } ) )
-
+
@web.expose
@web.json
@web.require_login( "get history name and link" )
def get_name_and_link_async( self, trans, id=None ):
""" Returns history's name and link. """
history = self.get_history( trans, id, False )
-
if self.create_item_slug( trans.sa_session, history ):
trans.sa_session.flush()
- return_dict = {
- "name" : history.name,
+ return_dict = {
+ "name" : history.name,
"link" : url_for( action="display_by_username_and_slug", username=history.user.username, slug=history.slug ) }
return return_dict
-
+
@web.expose
@web.require_login( "set history's accessible flag" )
def set_accessible_async( self, trans, id=None, accessible=False ):
""" Set history's importable attribute and slug. """
history = self.get_history( trans, id, True )
-
# Only set if importable value would change; this prevents a change in the update_time unless attribute really changed.
importable = accessible in ['True', 'true', 't', 'T'];
if history and history.importable != importable:
@@ -627,7 +617,6 @@
else:
history.importable = importable
trans.sa_session.flush()
-
return
@web.expose
@@ -638,7 +627,7 @@
history.slug = new_slug
trans.sa_session.flush()
return history.slug
-
+
@web.expose
def get_item_content_async( self, trans, id ):
""" Returns item content in HTML format. """
@@ -646,7 +635,7 @@
history = self.get_history( trans, id, False, True )
if history is None:
raise web.httpexceptions.HTTPNotFound()
-
+
# Get datasets.
datasets = self.get_history_datasets( trans, history )
# Get annotations.
@@ -654,7 +643,7 @@
for dataset in datasets:
dataset.annotation = self.get_item_annotation_str( trans.sa_session, history.user, dataset )
return trans.stream_template_mako( "/history/item_content.mako", item = history, item_data = datasets )
-
+
@web.expose
def name_autocomplete_data( self, trans, q=None, limit=None, timestamp=None ):
"""Return autocomplete data for history names"""
@@ -666,7 +655,7 @@
for history in trans.sa_session.query( model.History ).filter_by( user=user ).filter( func.lower( model.History.name ) .like(q.lower() + "%") ):
ac_data = ac_data + history.name + "\n"
return ac_data
-
+
@web.expose
def imp( self, trans, id=None, confirm=False, **kwd ):
"""Import another user's history via a shared URL"""
@@ -682,7 +671,7 @@
referer_message = "<a href='%s'>return to the previous page</a>" % referer
else:
referer_message = "<a href='%s'>go to Galaxy's start page</a>" % url_for( '/' )
-
+
# Do import.
if not id:
return trans.show_error_message( "You must specify a history you want to import.<br>You can %s." % referer_message, use_panels=True )
@@ -712,7 +701,7 @@
# Set imported history to be user's current history.
trans.set_history( new_history )
return trans.show_ok_message(
- message="""History "%s" has been imported. <br>You can <a href="%s">start using this history</a> or %s."""
+ message="""History "%s" has been imported. <br>You can <a href="%s">start using this history</a> or %s."""
% ( new_history.name, web.url_for( '/' ), referer_message ), use_panels=True )
elif not user_history or not user_history.datasets or confirm:
new_history = import_history.copy()
@@ -730,13 +719,13 @@
trans.sa_session.flush()
trans.set_history( new_history )
return trans.show_ok_message(
- message="""History "%s" has been imported. <br>You can <a href="%s">start using this history</a> or %s."""
+ message="""History "%s" has been imported. <br>You can <a href="%s">start using this history</a> or %s."""
% ( new_history.name, web.url_for( '/' ), referer_message ), use_panels=True )
return trans.show_warn_message( """
Warning! If you import this history, you will lose your current
history. <br>You can <a href="%s">continue and import this history</a> or %s.
""" % ( web.url_for( id=id, confirm=True, referer=trans.request.referer ), referer_message ), use_panels=True )
-
+
@web.expose
def view( self, trans, id=None, show_deleted=False ):
"""View a history. If a history is importable, then it is viewable by any user."""
@@ -757,11 +746,11 @@
history = history_to_view,
datasets = datasets,
show_deleted = show_deleted )
-
+
@web.expose
def display_by_username_and_slug( self, trans, username, slug ):
- """ Display history based on a username and slug. """
-
+ """ Display history based on a username and slug. """
+
# Get history.
session = trans.sa_session
user = session.query( model.User ).filter_by( username=username ).first()
@@ -770,14 +759,14 @@
raise web.httpexceptions.HTTPNotFound()
# Security check raises error if user cannot access history.
self.security_check( trans.get_user(), history, False, True)
-
+
# Get datasets.
datasets = self.get_history_datasets( trans, history )
# Get annotations.
history.annotation = self.get_item_annotation_str( trans.sa_session, history.user, history )
for dataset in datasets:
dataset.annotation = self.get_item_annotation_str( trans.sa_session, history.user, dataset )
-
+
# Get rating data.
user_item_rating = 0
if trans.get_user():
@@ -787,9 +776,9 @@
else:
user_item_rating = 0
ave_item_rating, num_ratings = self.get_ave_item_rating_data( trans.sa_session, history )
- return trans.stream_template_mako( "history/display.mako", item = history, item_data = datasets,
+ return trans.stream_template_mako( "history/display.mako", item = history, item_data = datasets,
user_item_rating = user_item_rating, ave_item_rating=ave_item_rating, num_ratings=num_ratings )
-
+
@web.expose
@web.require_login( "share Galaxy histories" )
def sharing( self, trans, id=None, histories=[], **kwargs ):
@@ -804,7 +793,7 @@
histories = [ self.get_history( trans, history_id ) for history_id in ids ]
elif not histories:
histories = [ trans.history ]
-
+
# Do operation on histories.
for history in histories:
if 'make_accessible_via_link' in kwargs:
@@ -837,17 +826,17 @@
message = "History '%s' does not seem to be shared with user '%s'" % ( history.name, user.email )
return trans.fill_template( '/sharing_base.mako', item=history,
message=message, status='error' )
-
-
+
+
# Legacy issue: histories made accessible before recent updates may not have a slug. Create slug for any histories that need them.
for history in histories:
if history.importable and not history.slug:
self._make_item_accessible( trans.sa_session, history )
-
+
session.flush()
-
+
return trans.fill_template( "/sharing_base.mako", item=history )
-
+
@web.expose
@web.require_login( "share histories with other users" )
def share( self, trans, id=None, email="", **kwd ):
@@ -890,11 +879,11 @@
send_to_err = "The histories you are sharing do not contain any datasets that can be accessed by the users with which you are sharing."
return trans.fill_template( "/history/share.mako", histories=histories, email=email, send_to_err=send_to_err )
if can_change or cannot_change:
- return trans.fill_template( "/history/share.mako",
- histories=histories,
- email=email,
- send_to_err=send_to_err,
- can_change=can_change,
+ return trans.fill_template( "/history/share.mako",
+ histories=histories,
+ email=email,
+ send_to_err=send_to_err,
+ can_change=can_change,
cannot_change=cannot_change,
no_change_needed=unique_no_change_needed )
if no_change_needed:
@@ -903,11 +892,11 @@
# User seems to be sharing an empty history
send_to_err = "You cannot share an empty history. "
return trans.fill_template( "/history/share.mako", histories=histories, email=email, send_to_err=send_to_err )
-
+
@web.expose
@web.require_login( "share restricted histories with other users" )
def share_restricted( self, trans, id=None, email="", **kwd ):
- if 'action' in kwd:
+ if 'action' in kwd:
action = kwd[ 'action' ]
else:
err_msg = "Select an action. "
@@ -938,10 +927,10 @@
# The action here is either 'public' or 'private', so we'll continue to populate the
# histories_for_sharing dictionary from the can_change dictionary.
for send_to_user, history_dict in can_change.items():
- for history in history_dict:
+ for history in history_dict:
# Make sure the current history has not already been shared with the current send_to_user
if trans.sa_session.query( trans.app.model.HistoryUserShareAssociation ) \
- .filter( and_( trans.app.model.HistoryUserShareAssociation.table.c.user_id == send_to_user.id,
+ .filter( and_( trans.app.model.HistoryUserShareAssociation.table.c.user_id == send_to_user.id,
trans.app.model.HistoryUserShareAssociation.table.c.history_id == history.id ) ) \
.count() > 0:
send_to_err += "History (%s) already shared with user (%s)" % ( history.name, send_to_user.email )
@@ -954,7 +943,7 @@
# The user with which we are sharing the history does not have access permission on the current dataset
if trans.app.security_agent.can_manage_dataset( user_roles, hda.dataset ) and not hda.dataset.library_associations:
# The current user has authority to change permissions on the current dataset because
- # they have permission to manage permissions on the dataset and the dataset is not associated
+ # they have permission to manage permissions on the dataset and the dataset is not associated
# with a library.
if action == "private":
trans.app.security_agent.privately_share_dataset( hda.dataset, users=[ user, send_to_user ] )
@@ -986,7 +975,7 @@
send_to_user = trans.sa_session.query( trans.app.model.User ) \
.filter( and_( trans.app.model.User.table.c.email==email_address,
trans.app.model.User.table.c.deleted==False ) ) \
- .first()
+ .first()
if send_to_user:
send_to_users.append( send_to_user )
else:
@@ -1004,7 +993,7 @@
for history in history_dict:
# Make sure the current history has not already been shared with the current send_to_user
if trans.sa_session.query( trans.app.model.HistoryUserShareAssociation ) \
- .filter( and_( trans.app.model.HistoryUserShareAssociation.table.c.user_id == send_to_user.id,
+ .filter( and_( trans.app.model.HistoryUserShareAssociation.table.c.user_id == send_to_user.id,
trans.app.model.HistoryUserShareAssociation.table.c.history_id == history.id ) ) \
.count() > 0:
send_to_err += "History (%s) already shared with user (%s)" % ( history.name, send_to_user.email )
@@ -1019,7 +1008,7 @@
# The user may be attempting to share histories whose datasets cannot all be accessed by other users.
# If this is the case, the user sharing the histories can:
# 1) action=='public': choose to make the datasets public if he is permitted to do so
- # 2) action=='private': automatically create a new "sharing role" allowing protected
+ # 2) action=='private': automatically create a new "sharing role" allowing protected
# datasets to be accessed only by the desired users
# This method will populate the can_change, cannot_change and no_change_needed dictionaries, which
# are used for either displaying to the user, letting them make 1 of the choices above, or sharing
@@ -1036,7 +1025,7 @@
for send_to_user in send_to_users:
# Make sure the current history has not already been shared with the current send_to_user
if trans.sa_session.query( trans.app.model.HistoryUserShareAssociation ) \
- .filter( and_( trans.app.model.HistoryUserShareAssociation.table.c.user_id == send_to_user.id,
+ .filter( and_( trans.app.model.HistoryUserShareAssociation.table.c.user_id == send_to_user.id,
trans.app.model.HistoryUserShareAssociation.table.c.history_id == history.id ) ) \
.count() > 0:
send_to_err += "History (%s) already shared with user (%s)" % ( history.name, send_to_user.email )
@@ -1125,7 +1114,7 @@
if send_to_err:
msg += send_to_err
return self.sharing( trans, histories=shared_histories, msg=msg )
-
+
@web.expose
@web.require_login( "rename histories" )
def rename( self, trans, id=None, name=None, **kwd ):
@@ -1164,7 +1153,7 @@
else:
change_msg = change_msg + "<p>History: "+cur_names[i]+" does not appear to belong to you.</p>"
return trans.show_message( "<p>%s" % change_msg, refresh_frames=['history'] )
-
+
@web.expose
@web.require_login( "clone shared Galaxy history" )
def clone( self, trans, id=None, **kwd ):
@@ -1207,13 +1196,11 @@
else:
msg = '%d cloned histories are now included in your previously stored histories.' % len( histories )
return trans.show_ok_message( msg )
-
+
@web.expose
@web.require_login( "switch to a history" )
def switch_to_history( self, trans, hist_id=None ):
decoded_id = trans.security.decode_id(hist_id)
hist = trans.sa_session.query( trans.app.model.History ).get( decoded_id )
-
trans.set_history( hist )
return trans.response.send_redirect( url_for( "/" ) )
-
http://bitbucket.org/galaxy/galaxy-central/changeset/c875100ea5ed/
changeset: c875100ea5ed
user: dannon
date: 2011-07-20 15:01:37
summary: Merge
affected #: 30 files (3.4 KB)
--- a/README.txt Tue Jul 19 13:40:24 2011 -0400
+++ b/README.txt Wed Jul 20 09:01:37 2011 -0400
@@ -28,4 +28,4 @@
Not all dependencies are included for the tools provided in the sample
tool_conf.xml. A full list of external dependencies is available at:
-http://bitbucket.org/galaxy/galaxy-central/wiki/ToolDependencies
+http://wiki.g2.bx.psu.edu/Admin/Tools/Tool%20Dependencies
--- a/dist-eggs.ini Tue Jul 19 13:40:24 2011 -0400
+++ b/dist-eggs.ini Wed Jul 20 09:01:37 2011 -0400
@@ -3,7 +3,7 @@
; eggs.g2.bx.psu.edu) Probably only useful to Galaxy developers at
; Penn State. This file is used by scripts/dist-scramble.py
;
-; More information: http://bitbucket.org/galaxy/galaxy-central/wiki/Config/Eggs
+; More information: http://wiki.g2.bx.psu.edu/Admin/Config/Eggs
;
[hosts]
--- a/eggs.ini Tue Jul 19 13:40:24 2011 -0400
+++ b/eggs.ini Wed Jul 20 09:01:37 2011 -0400
@@ -3,7 +3,7 @@
;
; This file is version controlled and should not be edited by hand!
; For more information, see:
-; http://bitbucket.org/galaxy/galaxy-central/wiki/Config/Eggs
+; http://wiki.g2.bx.psu.edu/Admin/Config/Eggs
;
[general]
--- a/lib/galaxy/jobs/runners/pbs.py Tue Jul 19 13:40:24 2011 -0400
+++ b/lib/galaxy/jobs/runners/pbs.py Wed Jul 20 09:01:37 2011 -0400
@@ -17,7 +17,7 @@
configured properly. Galaxy's "scramble" system should make this installation
simple, please follow the instructions found at:
- http://bitbucket.org/galaxy/galaxy-central/wiki/Config/Cluster
+ http://wiki.g2.bx.psu.edu/Admin/Config/Performance/Cluster
Additional errors may follow:
%s
--- a/lib/galaxy/jobs/runners/sge.py Tue Jul 19 13:40:24 2011 -0400
+++ b/lib/galaxy/jobs/runners/sge.py Wed Jul 20 09:01:37 2011 -0400
@@ -14,7 +14,7 @@
"scramble" system should make this installation simple, please follow the
instructions found at:
- http://bitbucket.org/galaxy/galaxy-central/wiki/Config/Cluster
+ http://wiki.g2.bx.psu.edu/Admin/Config/Performance/Cluster
Additional errors may follow:
%s
--- a/lib/galaxy/web/form_builder.py Tue Jul 19 13:40:24 2011 -0400
+++ b/lib/galaxy/web/form_builder.py Wed Jul 20 09:01:37 2011 -0400
@@ -658,7 +658,7 @@
self.name = name
self.ldda = value
self.trans = trans
- def get_html( self, disabled=False ):
+ def get_html( self, prefix="", disabled=False ):
if not self.ldda:
ldda = ""
text = "Choose a library dataset"
@@ -666,7 +666,7 @@
ldda = self.trans.security.encode_id(self.ldda.id)
text = self.ldda.name
return '<a href="javascript:void(0);" class="add-librarydataset">%s</a> \
- <input type="hidden" name="%s" value="%s">' % ( text, self.name, escape( str(ldda), quote=True ) )
+ <input type="hidden" name="%s%s" value="%s">' % ( text, prefix, self.name, escape( str(ldda), quote=True ) )
def get_display_text(self):
if self.ldda:
--- a/lib/galaxy/webapps/community/config.py Tue Jul 19 13:40:24 2011 -0400
+++ b/lib/galaxy/webapps/community/config.py Wed Jul 20 09:01:37 2011 -0400
@@ -64,7 +64,7 @@
self.nginx_upload_path = kwargs.get( 'nginx_upload_path', False )
self.log_actions = string_as_bool( kwargs.get( 'log_actions', 'False' ) )
self.brand = kwargs.get( 'brand', None )
- self.wiki_url = kwargs.get( 'wiki_url', 'http://bitbucket.org/galaxy/galaxy-central/wiki/Home' )
+ self.wiki_url = kwargs.get( 'wiki_url', 'http://wiki.g2.bx.psu.edu/FrontPage' )
self.bugs_email = kwargs.get( 'bugs_email', None )
self.blog_url = kwargs.get( 'blog_url', None )
self.screencasts_url = kwargs.get( 'screencasts_url', None )
--- a/lib/galaxy/webapps/community/controllers/common.py Tue Jul 19 13:40:24 2011 -0400
+++ b/lib/galaxy/webapps/community/controllers/common.py Wed Jul 20 09:01:37 2011 -0400
@@ -230,6 +230,8 @@
correction_msg += "Upload a file named <b>%s</b> to the repository to correct this error." % sample_loc_file
else:
correction_msg += "Upload a file named <b>%s</b> to the repository to correct this error." % missing_file
+ else:
+ correction_msg = exception_msg
message += "<b>%s</b> - %s<br/>" % ( tool_file, correction_msg )
status = 'error'
elif flush_needed:
@@ -297,7 +299,7 @@
util.send_mail( frm, to, subject, body, trans.app.config )
except Exception, e:
log.exception( "An error occurred sending a tool shed repository update alert by email." )
-def update_for_browsing( repository, current_working_dir ):
+def update_for_browsing( repository, current_working_dir, commit_message='' ):
# Make a copy of a repository's files for browsing.
repo_dir = repository.repo_path
repo = hg.repository( ui.ui(), repo_dir )
@@ -316,12 +318,15 @@
# ! = deleted, but still tracked
# ? = not tracked
# I = ignored
- # We'll remove all files that are not tracked or ignored.
files_to_remove_from_disk = []
+ files_to_commit = []
for status_and_file_name in status_and_file_names:
if status_and_file_name.startswith( '?' ) or status_and_file_name.startswith( 'I' ):
files_to_remove_from_disk.append( os.path.abspath( os.path.join( repo_dir, status_and_file_name.split()[1] ) ) )
+ elif status_and_file_name.startswith( 'M' ) or status_and_file_name.startswith( 'A' ) or status_and_file_name.startswith( 'R' ):
+ files_to_commit.append( os.path.abspath( os.path.join( repo_dir, status_and_file_name.split()[1] ) ) )
for full_path in files_to_remove_from_disk:
+ # We'll remove all files that are not tracked or ignored.
if os.path.isdir( full_path ):
try:
os.rmdir( full_path )
@@ -336,6 +341,11 @@
except OSError, e:
# The directory is not empty
pass
+ if files_to_commit:
+ if not commit_message:
+ commit_message = 'Committed changes to: %s' % ', '.join( files_to_commit )
+ repo.dirstate.write()
+ repo.commit( text=commit_message )
os.chdir( repo_dir )
os.system( 'hg update > /dev/null 2>&1' )
os.chdir( current_working_dir )
--- a/lib/galaxy/webapps/community/controllers/repository.py Tue Jul 19 13:40:24 2011 -0400
+++ b/lib/galaxy/webapps/community/controllers/repository.py Wed Jul 20 09:01:37 2011 -0400
@@ -425,7 +425,7 @@
repository = get_repository( trans, id )
repo = hg.repository( ui.ui(), repository.repo_path )
current_working_dir = os.getcwd()
- update_for_browsing( repository, current_working_dir )
+ update_for_browsing( repository, current_working_dir, commit_message=commit_message )
return trans.fill_template( '/webapps/community/repository/browse_repository.mako',
repo=repo,
repository=repository,
@@ -454,11 +454,17 @@
# Commit the change set.
if not commit_message:
commit_message = 'Deleted selected files'
- # Commit the changes.
- commands.commit( repo.ui, repo, repo_dir, user=trans.user.username, message=commit_message )
+ try:
+ commands.commit( repo.ui, repo, repo_dir, user=trans.user.username, message=commit_message )
+ except Exception, e:
+ # I never have a problem with commands.commit on a Mac, but in the test/production
+ # tool shed environment, it occasionally throws a "TypeError: array item must be char"
+ # exception. If this happens, we'll try the following.
+ repo.dirstate.write()
+ repo.commit( text=commit_message )
handle_email_alerts( trans, repository )
# Update the repository files for browsing.
- update_for_browsing( repository, current_working_dir )
+ update_for_browsing( repository, current_working_dir, commit_message=commit_message )
# Get the new repository tip.
repo = hg.repository( ui.ui(), repo_dir )
if tip != repository.tip:
--- a/lib/galaxy/webapps/community/controllers/upload.py Tue Jul 19 13:40:24 2011 -0400
+++ b/lib/galaxy/webapps/community/controllers/upload.py Wed Jul 20 09:01:37 2011 -0400
@@ -87,7 +87,16 @@
# Move the uploaded file to the load_point within the repository hierarchy.
shutil.move( uploaded_file_name, full_path )
commands.add( repo.ui, repo, full_path )
- commands.commit( repo.ui, repo, full_path, user=trans.user.username, message=commit_message )
+ """
+ try:
+ commands.commit( repo.ui, repo, full_path, user=trans.user.username, message=commit_message )
+ except Exception, e:
+ # I never have a problem with commands.commit on a Mac, but in the test/production
+ # tool shed environment, it occasionally throws a "TypeError: array item must be char"
+ # exception. If this happens, we'll try the following.
+ repo.dirstate.write()
+ repo.commit( text=commit_message )
+ """
if full_path.endswith( '.loc.sample' ):
# Handle the special case where a xxx.loc.sample file is
# being uploaded by copying it to ~/tool-data/xxx.loc.
@@ -96,7 +105,7 @@
if ok:
# Update the repository files for browsing, a by-product of doing this
# is eliminating unwanted files from the repository directory.
- update_for_browsing( repository, current_working_dir )
+ update_for_browsing( repository, current_working_dir, commit_message=commit_message )
# Get the new repository tip.
if tip != repository.tip:
if ( isgzip or isbz2 ) and uncompress_file:
@@ -183,8 +192,14 @@
# Handle the special case where a xxx.loc.sample file is
# being uploaded by copying it to ~/tool-data/xxx.loc.
copy_sample_loc_file( trans, filename_in_archive )
- # Commit the changes.
- commands.commit( repo.ui, repo, full_path, user=trans.user.username, message=commit_message )
+ try:
+ commands.commit( repo.ui, repo, full_path, user=trans.user.username, message=commit_message )
+ except Exception, e:
+ # I never have a problem with commands.commit on a Mac, but in the test/production
+ # tool shed environment, it occasionally throws a "TypeError: array item must be char"
+ # exception. If this happens, we'll try the following.
+ repo.dirstate.write()
+ repo.commit( text=commit_message )
handle_email_alerts( trans, repository )
return True, '', files_to_remove
def uncompress( self, repository, uploaded_file_name, uploaded_file_filename, isgzip, isbz2 ):
--- a/lib/galaxy/webapps/demo_sequencer/config.py Tue Jul 19 13:40:24 2011 -0400
+++ b/lib/galaxy/webapps/demo_sequencer/config.py Wed Jul 20 09:01:37 2011 -0400
@@ -49,7 +49,7 @@
self.smtp_server = kwargs.get( 'smtp_server', None )
self.log_actions = string_as_bool( kwargs.get( 'log_actions', 'False' ) )
self.brand = kwargs.get( 'brand', None )
- self.wiki_url = kwargs.get( 'wiki_url', 'http://bitbucket.org/galaxy/galaxy-central/wiki/Home' )
+ self.wiki_url = kwargs.get( 'wiki_url', 'http://wiki.g2.bx.psu.edu/FrontPage' )
self.bugs_email = kwargs.get( 'bugs_email', None )
self.blog_url = kwargs.get( 'blog_url', None )
self.screencasts_url = kwargs.get( 'screencasts_url', None )
--- a/lib/galaxy/webapps/reports/config.py Tue Jul 19 13:40:24 2011 -0400
+++ b/lib/galaxy/webapps/reports/config.py Wed Jul 20 09:01:37 2011 -0400
@@ -33,7 +33,7 @@
self.sendmail_path = kwargs.get('sendmail_path',"/usr/sbin/sendmail")
self.log_actions = string_as_bool( kwargs.get( 'log_actions', 'False' ) )
self.brand = kwargs.get( 'brand', None )
- self.wiki_url = kwargs.get( 'wiki_url', 'http://bitbucket.org/galaxy/galaxy-central/wiki/Home' )
+ self.wiki_url = kwargs.get( 'wiki_url', 'http://wiki.g2.bx.psu.edu/FrontPage' )
self.bugs_email = kwargs.get( 'bugs_email', None )
self.blog_url = kwargs.get( 'blog_url', None )
self.screencasts_url = kwargs.get( 'screencasts_url', None )
--- a/static/welcome.rst Tue Jul 19 13:40:24 2011 -0400
+++ b/static/welcome.rst Wed Jul 20 09:01:37 2011 -0400
@@ -30,7 +30,7 @@
.. __: /static/help.html
.. __: http://www.bx.psu.edu/cgi-bin/trac.cgi
-.. __: http://bitbucket.org/galaxy/galaxy-central/wiki/GalaxyTeam
+.. __: http://wiki.g2.bx.psu.edu/Galaxy%20Team
.. __: mailto:galaxy@bx.psu.edu
Version: <b>2.1</b> Revision: <b>$Rev$</b>
--- a/templates/webapps/community/base_panels.mako Tue Jul 19 13:40:24 2011 -0400
+++ b/templates/webapps/community/base_panels.mako Wed Jul 20 09:01:37 2011 -0400
@@ -34,9 +34,9 @@
<div class="submenu"><ul><li><a href="${app.config.get( "bugs_email", "mailto:galaxy-bugs@bx.psu.edu" )}">Email comments, bug reports, or suggestions</a></li>
- <li><a target="_blank" href="${app.config.get( "wiki_url", "http://bitbucket.org/galaxy/galaxy-central/wiki" )}">Galaxy Wiki</a></li>
+ <li><a target="_blank" href="${app.config.get( "wiki_url", "http://wiki.g2.bx.psu.edu/" )}">Galaxy Wiki</a></li><li><a target="_blank" href="${app.config.get( "screencasts_url", "http://galaxycast.org" )}">Video tutorials (screencasts)</a></li>
- <li><a target="_blank" href="${app.config.get( "citation_url", "http://bitbucket.org/galaxy/galaxy-central/wiki/Citations" )}">How to Cite Galaxy</a></li>
+ <li><a target="_blank" href="${app.config.get( "citation_url", "http://wiki.g2.bx.psu.edu/Citing%20Galaxy" )}">How to Cite Galaxy</a></li></ul></div></td>
--- a/templates/webapps/galaxy/base_panels.mako Tue Jul 19 13:40:24 2011 -0400
+++ b/templates/webapps/galaxy/base_panels.mako Wed Jul 20 09:01:37 2011 -0400
@@ -107,9 +107,9 @@
<%
menu_options = [
['Email comments, bug reports, or suggestions', app.config.get( "bugs_email", "mailto:galaxy-bugs@bx.psu.edu" ) ],
- ['Galaxy Wiki', app.config.get( "wiki_url", "http://bitbucket.org/galaxy/galaxy-central/wiki" ), "_blank" ],
+ ['Galaxy Wiki', app.config.get( "wiki_url", "http://wiki.g2.bx.psu.edu/" ), "_blank" ],
['Video tutorials (screencasts)', app.config.get( "screencasts_url", "http://galaxycast.org" ), "_blank" ],
- ['How to Cite Galaxy', app.config.get( "screencasts_url", "http://bitbucket.org/galaxy/galaxy-central/wiki/Citations" ), "_blank" ]
+ ['How to Cite Galaxy', app.config.get( "screencasts_url", "http://wiki.g2.bx.psu.edu/Citing%20Galaxy" ), "_blank" ]
]
tab( "help", "Help", None, menu_options=menu_options)
%>
--- a/test-data/rgtestouts/rgManQQ/rgManQQtest1.html Tue Jul 19 13:40:24 2011 -0400
+++ b/test-data/rgtestouts/rgManQQ/rgManQQtest1.html Wed Jul 20 09:01:37 2011 -0400
@@ -13,8 +13,8 @@
<h1>rgManQQtest1</h1><table>
-<tr><td><a href="Allelep_manhattan.png"><img src="Allelep_manhattan.png" alt="Allelep_manhattan.png hspace="10" width="400"><br>(Click to download image Allelep_manhattan.png)</a></td></tr>
-<tr><td><a href="Allelep_qqplot.png"><img src="Allelep_qqplot.png" alt="Allelep_qqplot.png hspace="10" width="400"><br>(Click to download image Allelep_qqplot.png)</a></td></tr>
+<tr><td><a href="Allelep_manhattan.png"><img src="Allelep_manhattan.png" title="Allelep_manhattan.png hspace="10" width="400"><br>(Click to download image Allelep_manhattan.png)</a></td></tr>
+<tr><td><a href="Allelep_qqplot.png"><img src="Allelep_qqplot.png" title="Allelep_qqplot.png hspace="10" width="400"><br>(Click to download image Allelep_qqplot.png)</a></td></tr><tr><td><a href="rgManQQtest1.R">rgManQQtest1.R</a></td></tr><tr><td><a href="rgManQQtest1.R.log">rgManQQtest1.R.log</a></td></tr></table>
@@ -35,7 +35,7 @@
- round_any
+ rename, round_any
@@ -43,11 +43,11 @@
Loading required package: proto
-[1] "### 101 values read from /tmp/rgManQQtemplYC5wa read - now running plots"
+[1] "### 101 values read from /data/tmp/tmpM8NZ50/database/files/000/dataset_1.dat read - now running plots"
[1] "## qqplot on Allelep done"
-[1] "## manhattan on Allelep starting 1 2 3"
+[1] "## manhattan on Allelep starting 2 3 8"
[1] "## manhattan plot on Allelep done"
@@ -62,7 +62,7 @@
# http://StephenTurner.us/
# http://GettingGeneticsDone.blogspot.com/
-# Last updated: Tuesday, December 22, 2009
+# Last updated: 19 July 2011 by Ross Lazarus
# R code for making manhattan plots and QQ plots from plink output files.
# With GWAS data this can take a lot of memory. Recommended for use on
# 64bit machines only, for now.
@@ -72,30 +72,30 @@
library(ggplot2)
coloursTouse = c('firebrick','darkblue','goldenrod','darkgreen')
-# not too fugly but need a colour expert please...
+# not too ugly but need a colour expert please...
-manhattan = function(chrom=NULL,offset=NULL,pvals=NULL, title=NULL, max.y="max",
- suggestiveline=0, genomewide=T, size.x.labels=9, size.y.labels=10, annotate=F, SNPlist=NULL,grey=0) {
-
+DrawManhattan = function(pvals=Null,chrom=Null,offset=Null,title=NULL, max.y="max",suggestiveline=0, genomewide=T, size.x.labels=9,
+ size.y.labels=10, annotate=F, SNPlist=NULL,grey=0) {
if (annotate & is.null(SNPlist)) stop("You requested annotation but provided no SNPlist!")
genomewideline=NULL # was genomewideline=-log10(5e-8)
if (genomewide) { # use bonferroni since might be only a small region?
genomewideline = -log10(0.05/length(pvals)) }
- d=data.frame(CHR=chrom,BP=offset,P=pvals)
-
- #limit to only chrs 1-23?
- d=d[d$CHR %in% 1:23, ]
-
+ offset = as.integer(offset)
+ pvals = as.double(pvals)
+ chro = as.integer(chrom) # already dealt with X and friends?
+ d=data.frame(CHR=chro,BP=offset,P=pvals)
+ #limit to only chrs 1-22, x=23,y=24,Mt=25?
+ d=d[d$CHR %in% 1:25, ]
if ("CHR" %in% names(d) & "BP" %in% names(d) & "P" %in% names(d) ) {
- d=na.omit(d)
+ #d=na.omit(d)
d=d[d$P>0 & d$P<=1, ]
- d$logp = -log10(d$P)
-
+ d$logp = as.double(-log10(d$P))
d$pos=NA
ticks=NULL
lastbase=0
chrlist = unique(d$CHR)
+ chrlist = sort(chrlist) # returns lexical ordering
nchr = length(chrlist) # may be any number?
if (nchr >= 2) {
for (x in c(1:nchr)) {
@@ -107,7 +107,11 @@
lastchr = chrlist[x-1] # previous whatever the list
lastbase=lastbase+tail(subset(d,CHR==lastchr)$BP, 1)
d[d$CHR==i, ]$pos=d[d$CHR==i, ]$BP+lastbase
+ if (sum(is.na(lastchr),is.na(lastbase),is.na(d[d$CHR==i, ]$pos))) {
+ cat(paste('manhattan: For',title,'chrlistx=',i,'lastchr=',lastchr,'lastbase=',lastbase,'pos=',d[d$CHR==i,]$pos))
+ }
tks=c(tks, d[d$CHR==i, ]$pos[floor(length(d[d$CHR==i, ]$pos)/2)+1])
+
}
ticklim=c(min(d$pos),max(d$pos))
xlabs = chrlist
@@ -129,8 +133,6 @@
if (max.y=="max") maxy=ceiling(max(d$logp)) else maxy=max.y
maxy = max(maxy,1.1*genomewideline)
- # if (maxy<8) maxy=8
- # only makes sense if genome wide is assumed - we could have a fine mapping region?
if (annotate) d.annotate=d[as.numeric(substr(d$SNP,3,100)) %in% SNPlist, ]
if (nchr >= 2) {
manplot=qplot(pos,logp,data=d, ylab=expression(-log[10](italic(p))) , colour=factor(CHR))
@@ -149,9 +151,6 @@
axis.text.y=theme_text(size=size.y.labels, colour="grey50"),
axis.ticks=theme_segment(colour=NA)
)
- #manplot = manplot + opts(panel.grid.y.minor=theme_blank(),panel.grid.y.major=theme_blank())
- #manplot = manplot + opts(panel.grid.major=theme_blank())
-
if (suggestiveline) manplot=manplot+geom_hline(yintercept=suggestiveline,colour="blue", alpha=I(1/3))
if (genomewideline) manplot=manplot+geom_hline(yintercept=genomewideline,colour="red")
manplot
@@ -178,16 +177,24 @@
if (spartan) plot=plot+opts(panel.background=theme_rect(col="grey50"), panel.grid.minor=theme_blank())
qq
}
-rgqqMan = function(infile="/tmp/rgManQQtemplYC5wa",chromcolumn=1, offsetcolumn=2, pvalscolumns=c(3),
+
+rgqqMan = function(infile="/data/tmp/tmpM8NZ50/database/files/000/dataset_1.dat",chromcolumn=2, offsetcolumn=3, pvalscolumns=c(8),
title="rgManQQtest1",grey=0) {
rawd = read.table(infile,head=T,sep='\t')
dn = names(rawd)
cc = dn[chromcolumn]
oc = dn[offsetcolumn]
-nams = c(cc,oc)
+rawd[,cc] = sub('chr','',rawd[,cc],ignore.case = T) # just in case
+rawd[,cc] = sub(':','',rawd[,cc],ignore.case = T) # ugh
+rawd[,cc] = sub('X',23,rawd[,cc],ignore.case = T)
+rawd[,cc] = sub('Y',24,rawd[,cc],ignore.case = T)
+rawd[,cc] = sub('Mt',25,rawd[,cc], ignore.case = T)
+nams = c(cc,oc) # for sorting
plen = length(rawd[,1])
-doreorder=1
print(paste('###',plen,'values read from',infile,'read - now running plots',sep=' '))
+rawd = rawd[do.call(order,rawd[nams]),]
+# mmmf - suggested by http://onertipaday.blogspot.com/2007/08/sortingordering-dataframe-according…
+# in case not yet ordered
if (plen > 0) {
for (pvalscolumn in pvalscolumns) {
if (pvalscolumn > 0)
@@ -199,14 +206,8 @@
ggsave(filename=paste(myfname,"qqplot.png",sep='_'),myqqplot,width=6,height=4,dpi=100)
print(paste('## qqplot on',cname,'done'))
if ((chromcolumn > 0) & (offsetcolumn > 0)) {
- if (doreorder) {
- rawd = rawd[do.call(order,rawd[nams]),]
- # mmmf - suggested by http://onertipaday.blogspot.com/2007/08/sortingordering-dataframe-according…
- # in case not yet ordered
- doreorder = 0
- }
print(paste('## manhattan on',cname,'starting',chromcolumn,offsetcolumn,pvalscolumn))
- mymanplot= manhattan(chrom=rawd[,chromcolumn],offset=rawd[,offsetcolumn],pvals=rawd[,pvalscolumn],title=mytitle,grey=grey)
+ mymanplot= DrawManhattan(chrom=rawd[,chromcolumn],offset=rawd[,offsetcolumn],pvals=rawd[,pvalscolumn],title=mytitle,grey=grey)
print(paste('## manhattan plot on',cname,'done'))
ggsave(filename=paste(myfname,"manhattan.png",sep='_'),mymanplot,width=6,height=4,dpi=100)
}
@@ -227,6 +228,6 @@
</pre>
-<h3><a href="http://rgenetics.org">Rgenetics</a> tool rgManQQ.py run at 07/11/2010 20:04:20</h3>
+<b><a href="http://rgenetics.org">Galaxy Rgenetics</a> tool output rgManQQ.py run at 20/07/2011 13:29:43</b><br/></div></body></html>
--- a/tools/data_source/microbial_import.xml Tue Jul 19 13:40:24 2011 -0400
+++ b/tools/data_source/microbial_import.xml Wed Jul 20 09:01:37 2011 -0400
@@ -109,7 +109,7 @@
**Note:** Having trouble locating your organism? Click here_ for a list of available species and their location.
-.. _here: http://bitbucket.org/galaxy/galaxy-central/wiki/Microbes
+.. _here: http://wiki.g2.bx.psu.edu/Main/Data%20Libraries/Microbes
</help></tool>
--- a/tools/new_operations/basecoverage.xml Tue Jul 19 13:40:24 2011 -0400
+++ b/tools/new_operations/basecoverage.xml Wed Jul 20 09:01:37 2011 -0400
@@ -34,7 +34,7 @@
See Galaxy Interval Operation Screencasts_ (right click to open this link in another window).
-.. _Screencasts: http://bitbucket.org/galaxy/galaxy-central/wiki/GopsDesc
+.. _Screencasts: http://wiki.g2.bx.psu.edu/Learn/Interval%20Operations
</help>
--- a/tools/new_operations/cluster.xml Tue Jul 19 13:40:24 2011 -0400
+++ b/tools/new_operations/cluster.xml Wed Jul 20 09:01:37 2011 -0400
@@ -67,7 +67,7 @@
See Galaxy Interval Operation Screencasts_ (right click to open this link in another window).
-.. _Screencasts: http://bitbucket.org/galaxy/galaxy-central/wiki/GopsDesc
+.. _Screencasts: http://wiki.g2.bx.psu.edu/Learn/Interval%20Operations
-----
--- a/tools/new_operations/complement.xml Tue Jul 19 13:40:24 2011 -0400
+++ b/tools/new_operations/complement.xml Wed Jul 20 09:01:37 2011 -0400
@@ -43,7 +43,7 @@
See Galaxy Interval Operation Screencasts_ (right click to open this link in another window).
-.. _Screencasts: http://bitbucket.org/galaxy/galaxy-central/wiki/GopsDesc
+.. _Screencasts: http://wiki.g2.bx.psu.edu/Learn/Interval%20Operations
-----
--- a/tools/new_operations/concat.xml Tue Jul 19 13:40:24 2011 -0400
+++ b/tools/new_operations/concat.xml Wed Jul 20 09:01:37 2011 -0400
@@ -41,7 +41,7 @@
See Galaxy Interval Operation Screencasts_ (right click to open this link in another window).
-.. _Screencasts: http://bitbucket.org/galaxy/galaxy-central/wiki/GopsDesc
+.. _Screencasts: http://wiki.g2.bx.psu.edu/Learn/Interval%20Operations
-----
--- a/tools/new_operations/coverage.xml Tue Jul 19 13:40:24 2011 -0400
+++ b/tools/new_operations/coverage.xml Wed Jul 20 09:01:37 2011 -0400
@@ -44,7 +44,7 @@
See Galaxy Interval Operation Screencasts_ (right click to open this link in another window).
-.. _Screencasts: http://bitbucket.org/galaxy/galaxy-central/wiki/GopsDesc
+.. _Screencasts: http://wiki.g2.bx.psu.edu/Learn/Interval%20Operations
-----
--- a/tools/new_operations/intersect.xml Tue Jul 19 13:40:24 2011 -0400
+++ b/tools/new_operations/intersect.xml Wed Jul 20 09:01:37 2011 -0400
@@ -117,7 +117,7 @@
See Galaxy Interval Operation Screencasts_ (right click to open this link in another window).
-.. _Screencasts: http://bitbucket.org/galaxy/galaxy-central/wiki/GopsDesc
+.. _Screencasts: http://wiki.g2.bx.psu.edu/Learn/Interval%20Operations
-----
--- a/tools/new_operations/join.xml Tue Jul 19 13:40:24 2011 -0400
+++ b/tools/new_operations/join.xml Wed Jul 20 09:01:37 2011 -0400
@@ -78,7 +78,7 @@
See Galaxy Interval Operation Screencasts_ (right click to open this link in another window).
-.. _Screencasts: http://bitbucket.org/galaxy/galaxy-central/wiki/GopsDesc
+.. _Screencasts: http://wiki.g2.bx.psu.edu/Learn/Interval%20Operations
-----
--- a/tools/new_operations/merge.xml Tue Jul 19 13:40:24 2011 -0400
+++ b/tools/new_operations/merge.xml Wed Jul 20 09:01:37 2011 -0400
@@ -44,7 +44,7 @@
See Galaxy Interval Operation Screencasts_ (right click to open this link in another window).
-.. _Screencasts: http://bitbucket.org/galaxy/galaxy-central/wiki/GopsDesc
+.. _Screencasts: http://wiki.g2.bx.psu.edu/Learn/Interval%20Operations
-----
--- a/tools/new_operations/subtract.xml Tue Jul 19 13:40:24 2011 -0400
+++ b/tools/new_operations/subtract.xml Wed Jul 20 09:01:37 2011 -0400
@@ -98,7 +98,7 @@
See Galaxy Interval Operation Screencasts_ (right click to open this link in another window).
-.. _Screencasts: http://bitbucket.org/galaxy/galaxy-central/wiki/GopsDesc
+.. _Screencasts: http://wiki.g2.bx.psu.edu/Learn/Interval%20Operations
-----
--- a/tools/next_gen_conversion/fastq_gen_conv.xml Tue Jul 19 13:40:24 2011 -0400
+++ b/tools/next_gen_conversion/fastq_gen_conv.xml Wed Jul 20 09:01:37 2011 -0400
@@ -75,7 +75,7 @@
A good description of fastq datasets can be found `here`__, while a description of Galaxy's fastq "logic" can be found `here`__. Because ranges of quality values within different types of fastq datasets overlap it very difficult to detect them automatically. This tool supports conversion of two commonly found types (Solexa/Illumina 1.0 and Illumina 1.3+) into fastq Sanger.
.. __: http://en.wikipedia.org/wiki/FASTQ_format
- .. __: http://bitbucket.org/galaxy/galaxy-central/wiki/NGS
+ .. __: http://wiki.g2.bx.psu.edu/Admin/NGS%20Local%20Setup
.. class:: warningmark
--- a/tools/rgenetics/rgManQQ.py Tue Jul 19 13:40:24 2011 -0400
+++ b/tools/rgenetics/rgManQQ.py Wed Jul 20 09:01:37 2011 -0400
@@ -1,5 +1,9 @@
#!/usr/local/bin/python
-
+# updated july 20 to fix sort order - R unique() sorts into strict collating order
+# so need to sort after unique to revert to lexicographic order for x axis on Manhattan
+# rgmanqq updated july 19 to deal with x,y and mt
+# lots of fixes
+# ross lazarus
import sys,math,shutil,subprocess,os,time,tempfile,string
from os.path import abspath
from rgutils import timenow, RRun, galhtmlprefix, galhtmlpostfix, galhtmlattr
@@ -18,7 +22,7 @@
# http://StephenTurner.us/
# http://GettingGeneticsDone.blogspot.com/
-# Last updated: Tuesday, December 22, 2009
+# Last updated: 19 July 2011 by Ross Lazarus
# R code for making manhattan plots and QQ plots from plink output files.
# With GWAS data this can take a lot of memory. Recommended for use on
# 64bit machines only, for now.
@@ -28,30 +32,30 @@
library(ggplot2)
coloursTouse = c('firebrick','darkblue','goldenrod','darkgreen')
-# not too fugly but need a colour expert please...
+# not too ugly but need a colour expert please...
-manhattan = function(chrom=NULL,offset=NULL,pvals=NULL, title=NULL, max.y="max",
- suggestiveline=0, genomewide=T, size.x.labels=9, size.y.labels=10, annotate=F, SNPlist=NULL,grey=0) {
-
+DrawManhattan = function(pvals=Null,chrom=Null,offset=Null,title=NULL, max.y="max",suggestiveline=0, genomewide=T, size.x.labels=9,
+ size.y.labels=10, annotate=F, SNPlist=NULL,grey=0) {
if (annotate & is.null(SNPlist)) stop("You requested annotation but provided no SNPlist!")
genomewideline=NULL # was genomewideline=-log10(5e-8)
if (genomewide) { # use bonferroni since might be only a small region?
genomewideline = -log10(0.05/length(pvals)) }
- d=data.frame(CHR=chrom,BP=offset,P=pvals)
-
- #limit to only chrs 1-23?
- d=d[d$CHR %in% 1:23, ]
-
+ offset = as.integer(offset)
+ pvals = as.double(pvals)
+ chro = as.integer(chrom) # already dealt with X and friends?
+ d=data.frame(CHR=chro,BP=offset,P=pvals)
+ #limit to only chrs 1-22, x=23,y=24,Mt=25?
+ d=d[d$CHR %in% 1:25, ]
if ("CHR" %in% names(d) & "BP" %in% names(d) & "P" %in% names(d) ) {
- d=na.omit(d)
+ #d=na.omit(d)
d=d[d$P>0 & d$P<=1, ]
- d$logp = -log10(d$P)
-
+ d$logp = as.double(-log10(d$P))
d$pos=NA
ticks=NULL
lastbase=0
chrlist = unique(d$CHR)
+ chrlist = sort(chrlist) # returns lexical ordering
nchr = length(chrlist) # may be any number?
if (nchr >= 2) {
for (x in c(1:nchr)) {
@@ -63,7 +67,11 @@
lastchr = chrlist[x-1] # previous whatever the list
lastbase=lastbase+tail(subset(d,CHR==lastchr)$BP, 1)
d[d$CHR==i, ]$pos=d[d$CHR==i, ]$BP+lastbase
+ if (sum(is.na(lastchr),is.na(lastbase),is.na(d[d$CHR==i, ]$pos))) {
+ cat(paste('manhattan: For',title,'chrlistx=',i,'lastchr=',lastchr,'lastbase=',lastbase,'pos=',d[d$CHR==i,]$pos))
+ }
tks=c(tks, d[d$CHR==i, ]$pos[floor(length(d[d$CHR==i, ]$pos)/2)+1])
+
}
ticklim=c(min(d$pos),max(d$pos))
xlabs = chrlist
@@ -85,8 +93,6 @@
if (max.y=="max") maxy=ceiling(max(d$logp)) else maxy=max.y
maxy = max(maxy,1.1*genomewideline)
- # if (maxy<8) maxy=8
- # only makes sense if genome wide is assumed - we could have a fine mapping region?
if (annotate) d.annotate=d[as.numeric(substr(d$SNP,3,100)) %in% SNPlist, ]
if (nchr >= 2) {
manplot=qplot(pos,logp,data=d, ylab=expression(-log[10](italic(p))) , colour=factor(CHR))
@@ -105,9 +111,6 @@
axis.text.y=theme_text(size=size.y.labels, colour="grey50"),
axis.ticks=theme_segment(colour=NA)
)
- #manplot = manplot + opts(panel.grid.y.minor=theme_blank(),panel.grid.y.major=theme_blank())
- #manplot = manplot + opts(panel.grid.major=theme_blank())
-
if (suggestiveline) manplot=manplot+geom_hline(yintercept=suggestiveline,colour="blue", alpha=I(1/3))
if (genomewideline) manplot=manplot+geom_hline(yintercept=genomewideline,colour="red")
manplot
@@ -134,21 +137,29 @@
if (spartan) plot=plot+opts(panel.background=theme_rect(col="grey50"), panel.grid.minor=theme_blank())
qq
}
+
"""
# we need another string to avoid confusion over string substitutions with %in%
# instantiate rcode2 string with infile,chromcol,offsetcol,pvalscols,title before saving and running
-rcode2 = """rgqqMan = function(infile="%s",chromcolumn=%s, offsetcolumn=%s, pvalscolumns=%s,
+rcode2 = """rgqqMan = function(infile="%s",chromcolumn=%d, offsetcolumn=%d, pvalscolumns=c(%s),
title="%s",grey=%d) {
rawd = read.table(infile,head=T,sep='\\t')
dn = names(rawd)
cc = dn[chromcolumn]
oc = dn[offsetcolumn]
-nams = c(cc,oc)
+rawd[,cc] = sub('chr','',rawd[,cc],ignore.case = T) # just in case
+rawd[,cc] = sub(':','',rawd[,cc],ignore.case = T) # ugh
+rawd[,cc] = sub('X',23,rawd[,cc],ignore.case = T)
+rawd[,cc] = sub('Y',24,rawd[,cc],ignore.case = T)
+rawd[,cc] = sub('Mt',25,rawd[,cc], ignore.case = T)
+nams = c(cc,oc) # for sorting
plen = length(rawd[,1])
-doreorder=1
print(paste('###',plen,'values read from',infile,'read - now running plots',sep=' '))
+rawd = rawd[do.call(order,rawd[nams]),]
+# mmmf - suggested by http://onertipaday.blogspot.com/2007/08/sortingordering-dataframe-according…
+# in case not yet ordered
if (plen > 0) {
for (pvalscolumn in pvalscolumns) {
if (pvalscolumn > 0)
@@ -160,14 +171,8 @@
ggsave(filename=paste(myfname,"qqplot.png",sep='_'),myqqplot,width=6,height=4,dpi=100)
print(paste('## qqplot on',cname,'done'))
if ((chromcolumn > 0) & (offsetcolumn > 0)) {
- if (doreorder) {
- rawd = rawd[do.call(order,rawd[nams]),]
- # mmmf - suggested by http://onertipaday.blogspot.com/2007/08/sortingordering-dataframe-according…
- # in case not yet ordered
- doreorder = 0
- }
print(paste('## manhattan on',cname,'starting',chromcolumn,offsetcolumn,pvalscolumn))
- mymanplot= manhattan(chrom=rawd[,chromcolumn],offset=rawd[,offsetcolumn],pvals=rawd[,pvalscolumn],title=mytitle,grey=grey)
+ mymanplot= DrawManhattan(chrom=rawd[,chromcolumn],offset=rawd[,offsetcolumn],pvals=rawd[,pvalscolumn],title=mytitle,grey=grey)
print(paste('## manhattan plot on',cname,'done'))
ggsave(filename=paste(myfname,"manhattan.png",sep='_'),mymanplot,width=6,height=4,dpi=100)
}
@@ -198,50 +203,13 @@
this can be called externally, I guess...for QC eg?
"""
if debug:
- print 'doManQQ',input_fname,chrom_col,offset_col,pval_cols,title,grey,ctitle,outdir
- ffd,filtered_fname = tempfile.mkstemp(prefix='rgManQQtemp')
- f = open(filtered_fname,'w')
- inf = open(input_fname,'r')
- ohead = inf.readline().strip().split('\t') # see if we have a header
- inf.seek(0) # rewind
- newhead = ['pval%d' % (x+1) for x in pval_cols]
- newhead.insert(0,'Offset')
- newhead.insert(0,'Chrom')
- havehead = 0
- wewant = [chrom_col,offset_col]
- wewant += pval_cols
- try:
- allnums = ['%d' % x for x in ohead] # this should barf if non numerics == header row?
- f.write('\t'.join(newhead)) # for R to read
- f.write('\n')
- except:
- havehead = 1
- newhead = [ohead[chrom_col],ohead[offset_col]]
- newhead += [ohead[x] for x in pval_cols]
- f.write('\t'.join(newhead)) # use the original head
- f.write('\n')
- for i,row in enumerate(inf):
- if i == 0 and havehead:
- continue # ignore header
- sr = row.strip().split('\t')
- if len(sr) > 1:
- if sr[chrom_col].lower().find('chr') <> -1:
- sr[chrom_col] = sr[chrom_col][3:]
- newr = [sr[x] for x in wewant] # grab cols we need
- s = '\t'.join(newr)
- f.write(s)
- f.write('\n')
- f.close()
- pvc = [x+3 for x in range(len(pval_cols))] # 2 for offset and chrom, 1 for r offset start
- pvc = 'c(%s)' % (','.join(map(str,pvc)))
- rcmd = '%s%s' % (rcode,rcode2 % (filtered_fname,'1','2',pvc,title,grey))
+ print 'doManQQ',input_fname,chrom_col,offset_col,pval_cols,title,grey,ctitle,outdir
+ rcmd = '%s%s' % (rcode,rcode2 % (input_fname,chrom_col,offset_col,pval_cols,title,grey))
if debug:
- print 'running\n%s\n' % rcmd
+ print 'running\n%s\n' % rcmd
rlog,flist = RRun(rcmd=rcmd,title=ctitle,outdir=outdir)
rlog.append('## R script=')
rlog.append(rcmd)
- if beTidy:
- os.unlink(filtered_fname)
return rlog,flist
@@ -272,19 +240,20 @@
offset_col = -1
p = sys.argv[7].strip().split(',')
try:
- p = [int(x) for x in p]
+ q = [int(x) for x in p]
except:
- p = [-1]
+ p = -1
if chrom_col == -1 or offset_col == -1: # was passed as zero - do not do manhattan plots
chrom_col = -1
offset_col = -1
grey = 0
if (sys.argv[8].lower() in ['1','true']):
grey = 1
- if p == [-1]:
+ if p == -1:
print >> sys.stderr,'## Cannot run rgManQQ - missing pval column'
sys.exit(1)
- rlog,flist = doManQQ(input_fname,chrom_col,offset_col,p,title,grey,ctitle,outdir)
+ p = ['%d' % (int(x) + 1) for x in p]
+ rlog,flist = doManQQ(input_fname,chrom_col+1,offset_col+1,','.join(p),title,grey,ctitle,outdir)
flist.sort()
html = [galhtmlprefix % progname,]
html.append('<h1>%s</h1>' % title)
@@ -294,7 +263,7 @@
fname,expl = row # RRun returns pairs of filenames fiddled for the log and R script
e = os.path.splitext(fname)[-1]
if e in ['.png','.jpg']:
- s= '<tr><td><a href="%s"><img src="%s" alt="%s hspace="10" width="400"><br>(Click to download image %s)</a></td></tr>' \
+ s= '<tr><td><a href="%s"><img src="%s" title="%s hspace="10" width="400"><br>(Click to download image %s)</a></td></tr>' \
% (fname,fname,expl,expl )
html.append(s)
else:
@@ -317,3 +286,4 @@
if __name__ == "__main__":
main()
+
--- a/tools/rgenetics/rgManQQ.xml Tue Jul 19 13:40:24 2011 -0400
+++ b/tools/rgenetics/rgManQQ.xml Wed Jul 20 09:01:37 2011 -0400
@@ -1,4 +1,4 @@
-<tool id="rgManQQ1" name="Manhattan/QQ:" version="1.0.1">
+<tool id="rgManQQ1" name="Manhattan/QQ:" version="1.0.2"><code file="rgManQQ_code.py"/><description>Plots for WGA P values</description>
--- a/universe_wsgi.ini.sample Tue Jul 19 13:40:24 2011 -0400
+++ b/universe_wsgi.ini.sample Wed Jul 20 09:01:37 2011 -0400
@@ -3,7 +3,7 @@
# environment. To tune the application for a multi-user production
# environment, see the documentation at:
#
-# http://bitbucket.org/galaxy/galaxy-central/wiki/Config/ProductionServer
+# http://wiki.g2.bx.psu.edu/Admin/Config/Performance/Production%20Server
#
# Throughout this sample configuration file, except where stated otherwise,
@@ -129,7 +129,7 @@
# Directory where data used by tools is located, see the samples in that
# directory and the wiki for help:
-# http://bitbucket.org/galaxy/galaxy-central/wiki/DataIntegration
+# http://wiki.g2.bx.psu.edu/Admin/Data%20Integration
#tool_data_path = tool-data
# Directory where chrom len files are kept, currently mainly used by trackster
@@ -208,13 +208,13 @@
#logo_url = /
# The URL linked by the "Galaxy Wiki" link in the "Help" menu.
-#wiki_url = http://bitbucket.org/galaxy/galaxy-central/wiki
+#wiki_url = http://wiki.g2.bx.psu.edu/
# The URL linked by the "Email comments..." link in the "Help" menu.
#bugs_email = mailto:galaxy-bugs@bx.psu.edu
# The URL linked by the "How to Cite..." link in the "Help" menu.
-#citation_url = http://bitbucket.org/galaxy/galaxy-central/wiki/Citations
+#citation_url = http://wiki.g2.bx.psu.edu/Citing%20Galaxy
# Serve static content, which must be enabled if you're not serving it via a
# proxy server. These options should be self explanatory and so are not
@@ -314,7 +314,7 @@
# -- Data Libraries
# These library upload options are described in much more detail in the wiki:
-# http://bitbucket.org/galaxy/galaxy-central/wiki/DataLibraries/UploadingFiles
+# http://wiki.g2.bx.psu.edu/Admin/Data%20Libraries/Uploading%20Library%20Files
# Add an option to the library upload form which allows administrators to
# upload a directory of files.
@@ -372,7 +372,7 @@
# User authentication can be delegated to an upstream proxy server (usually
# Apache). The upstream proxy should set a REMOTE_USER header in the request.
# Enabling remote user disables regular logins. For more information, see:
-# http://bitbucket.org/galaxy/galaxy-central/wiki/Config/ApacheProxy
+# http://wiki.g2.bx.psu.edu/Admin/Config/Apache%20Proxy
#use_remote_user = False
# If use_remote_user is enabled and your external authentication
@@ -388,7 +388,7 @@
# users (email addresses). These users will have access to the Admin section
# of the server, and will have access to create users, groups, roles,
# libraries, and more. For more information, see:
-# http://bitbucket.org/galaxy/galaxy-central/wiki/Admin/AdminInterface
+# http://wiki.g2.bx.psu.edu/Admin/Interface
#admin_users = None
# Force everyone to log in (disable anonymous access).
@@ -454,7 +454,7 @@
# If running multiple Galaxy processes, one can be designated as the job
# runner. For more information, see:
-# http://bitbucket.org/galaxy/galaxy-central/wiki/Config/WebApplicationScaling
+# http://wiki.g2.bx.psu.edu/Admin/Config/Performance/Web%20Application%20Scal…
#enable_job_running = True
# Should jobs be tracked through the database, rather than in memory.
@@ -505,7 +505,7 @@
# Clustering Galaxy is not a straightforward process and requires some
# pre-configuration. See the the wiki before attempting to set any of these
# options:
-# http://bitbucket.org/galaxy/galaxy-central/wiki/Config/Cluster
+# http://wiki.g2.bx.psu.edu/Admin/Config/Performance/Cluster
# Comma-separated list of job runners to start. local is always started. If
# left commented, no jobs will be run on the cluster, even if a cluster URL is
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

20 Jul '11
1 new changeset in galaxy-central:
http://bitbucket.org/galaxy/galaxy-central/changeset/dacf97a6c663/
changeset: dacf97a6c663
user: fubar
date: 2011-07-20 05:34:27
summary: Minor tweak to the x axis sorting of Manhattan plots to fix the fact that in R, unique() returns an alphabetically sorted list - need to use sort on the result to get correct lexicographic (10 comes after 9 eg) on the X axis...
affected #: 2 files (181 bytes)
--- a/test-data/rgtestouts/rgManQQ/rgManQQtest1.html Tue Jul 19 22:18:40 2011 -0400
+++ b/test-data/rgtestouts/rgManQQ/rgManQQtest1.html Wed Jul 20 13:34:27 2011 +1000
@@ -43,7 +43,7 @@
Loading required package: proto
-[1] "### 101 values read from /data/tmp/tmpTPXdE1/database/files/000/dataset_1.dat read - now running plots"
+[1] "### 101 values read from /data/tmp/tmpM8NZ50/database/files/000/dataset_1.dat read - now running plots"
[1] "## qqplot on Allelep done"
@@ -62,7 +62,7 @@
# http://StephenTurner.us/
# http://GettingGeneticsDone.blogspot.com/
-# Last updated: Tuesday, December 22, 2009
+# Last updated: 19 July 2011 by Ross Lazarus
# R code for making manhattan plots and QQ plots from plink output files.
# With GWAS data this can take a lot of memory. Recommended for use on
# 64bit machines only, for now.
@@ -81,14 +81,9 @@
genomewideline=NULL # was genomewideline=-log10(5e-8)
if (genomewide) { # use bonferroni since might be only a small region?
genomewideline = -log10(0.05/length(pvals)) }
- chro = sub('chr','',chrom, ignore.case = T) # just in case
- chro = sub(':','',chro, ignore.case = T) # ugh
- chro = sub('X',23,chro, ignore.case = T)
- chro = sub('Y',24,chro, ignore.case = T)
- chro = sub('Mt',25,chro, ignore.case = T)
offset = as.integer(offset)
pvals = as.double(pvals)
- chro = as.integer(chro)
+ chro = as.integer(chrom) # already dealt with X and friends?
d=data.frame(CHR=chro,BP=offset,P=pvals)
#limit to only chrs 1-22, x=23,y=24,Mt=25?
d=d[d$CHR %in% 1:25, ]
@@ -100,6 +95,7 @@
ticks=NULL
lastbase=0
chrlist = unique(d$CHR)
+ chrlist = sort(chrlist) # returns lexical ordering
nchr = length(chrlist) # may be any number?
if (nchr >= 2) {
for (x in c(1:nchr)) {
@@ -182,16 +178,23 @@
qq
}
-rgqqMan = function(infile="/data/tmp/tmpTPXdE1/database/files/000/dataset_1.dat",chromcolumn=2, offsetcolumn=3, pvalscolumns=c(8),
+rgqqMan = function(infile="/data/tmp/tmpM8NZ50/database/files/000/dataset_1.dat",chromcolumn=2, offsetcolumn=3, pvalscolumns=c(8),
title="rgManQQtest1",grey=0) {
rawd = read.table(infile,head=T,sep='\t')
dn = names(rawd)
cc = dn[chromcolumn]
oc = dn[offsetcolumn]
-nams = c(cc,oc)
+rawd[,cc] = sub('chr','',rawd[,cc],ignore.case = T) # just in case
+rawd[,cc] = sub(':','',rawd[,cc],ignore.case = T) # ugh
+rawd[,cc] = sub('X',23,rawd[,cc],ignore.case = T)
+rawd[,cc] = sub('Y',24,rawd[,cc],ignore.case = T)
+rawd[,cc] = sub('Mt',25,rawd[,cc], ignore.case = T)
+nams = c(cc,oc) # for sorting
plen = length(rawd[,1])
-doreorder=1
print(paste('###',plen,'values read from',infile,'read - now running plots',sep=' '))
+rawd = rawd[do.call(order,rawd[nams]),]
+# mmmf - suggested by http://onertipaday.blogspot.com/2007/08/sortingordering-dataframe-according…
+# in case not yet ordered
if (plen > 0) {
for (pvalscolumn in pvalscolumns) {
if (pvalscolumn > 0)
@@ -203,12 +206,6 @@
ggsave(filename=paste(myfname,"qqplot.png",sep='_'),myqqplot,width=6,height=4,dpi=100)
print(paste('## qqplot on',cname,'done'))
if ((chromcolumn > 0) & (offsetcolumn > 0)) {
- if (doreorder) {
- rawd = rawd[do.call(order,rawd[nams]),]
- # mmmf - suggested by http://onertipaday.blogspot.com/2007/08/sortingordering-dataframe-according…
- # in case not yet ordered
- doreorder = 0
- }
print(paste('## manhattan on',cname,'starting',chromcolumn,offsetcolumn,pvalscolumn))
mymanplot= DrawManhattan(chrom=rawd[,chromcolumn],offset=rawd[,offsetcolumn],pvals=rawd[,pvalscolumn],title=mytitle,grey=grey)
print(paste('## manhattan plot on',cname,'done'))
@@ -231,6 +228,6 @@
</pre>
-<b><a href="http://rgenetics.org">Galaxy Rgenetics</a> tool output rgManQQ.py run at 20/07/2011 12:08:46</b><br/>
+<b><a href="http://rgenetics.org">Galaxy Rgenetics</a> tool output rgManQQ.py run at 20/07/2011 13:29:43</b><br/></div></body></html>
--- a/tools/rgenetics/rgManQQ.py Tue Jul 19 22:18:40 2011 -0400
+++ b/tools/rgenetics/rgManQQ.py Wed Jul 20 13:34:27 2011 +1000
@@ -1,4 +1,6 @@
#!/usr/local/bin/python
+# updated july 20 to fix sort order - R unique() sorts into strict collating order
+# so need to sort after unique to revert to lexicographic order for x axis on Manhattan
# rgmanqq updated july 19 to deal with x,y and mt
# lots of fixes
# ross lazarus
@@ -39,14 +41,9 @@
genomewideline=NULL # was genomewideline=-log10(5e-8)
if (genomewide) { # use bonferroni since might be only a small region?
genomewideline = -log10(0.05/length(pvals)) }
- chro = sub('chr','',chrom, ignore.case = T) # just in case
- chro = sub(':','',chro, ignore.case = T) # ugh
- chro = sub('X',23,chro, ignore.case = T)
- chro = sub('Y',24,chro, ignore.case = T)
- chro = sub('Mt',25,chro, ignore.case = T)
offset = as.integer(offset)
pvals = as.double(pvals)
- chro = as.integer(chro)
+ chro = as.integer(chrom) # already dealt with X and friends?
d=data.frame(CHR=chro,BP=offset,P=pvals)
#limit to only chrs 1-22, x=23,y=24,Mt=25?
d=d[d$CHR %in% 1:25, ]
@@ -58,6 +55,7 @@
ticks=NULL
lastbase=0
chrlist = unique(d$CHR)
+ chrlist = sort(chrlist) # returns lexical ordering
nchr = length(chrlist) # may be any number?
if (nchr >= 2) {
for (x in c(1:nchr)) {
@@ -151,10 +149,17 @@
dn = names(rawd)
cc = dn[chromcolumn]
oc = dn[offsetcolumn]
-nams = c(cc,oc)
+rawd[,cc] = sub('chr','',rawd[,cc],ignore.case = T) # just in case
+rawd[,cc] = sub(':','',rawd[,cc],ignore.case = T) # ugh
+rawd[,cc] = sub('X',23,rawd[,cc],ignore.case = T)
+rawd[,cc] = sub('Y',24,rawd[,cc],ignore.case = T)
+rawd[,cc] = sub('Mt',25,rawd[,cc], ignore.case = T)
+nams = c(cc,oc) # for sorting
plen = length(rawd[,1])
-doreorder=1
print(paste('###',plen,'values read from',infile,'read - now running plots',sep=' '))
+rawd = rawd[do.call(order,rawd[nams]),]
+# mmmf - suggested by http://onertipaday.blogspot.com/2007/08/sortingordering-dataframe-according…
+# in case not yet ordered
if (plen > 0) {
for (pvalscolumn in pvalscolumns) {
if (pvalscolumn > 0)
@@ -166,12 +171,6 @@
ggsave(filename=paste(myfname,"qqplot.png",sep='_'),myqqplot,width=6,height=4,dpi=100)
print(paste('## qqplot on',cname,'done'))
if ((chromcolumn > 0) & (offsetcolumn > 0)) {
- if (doreorder) {
- rawd = rawd[do.call(order,rawd[nams]),]
- # mmmf - suggested by http://onertipaday.blogspot.com/2007/08/sortingordering-dataframe-according…
- # in case not yet ordered
- doreorder = 0
- }
print(paste('## manhattan on',cname,'starting',chromcolumn,offsetcolumn,pvalscolumn))
mymanplot= DrawManhattan(chrom=rawd[,chromcolumn],offset=rawd[,offsetcolumn],pvals=rawd[,pvalscolumn],title=mytitle,grey=grey)
print(paste('## manhattan plot on',cname,'done'))
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: fubar: Fixes to rgManQQ.py to handle chromosome oddities like 'chr1' and X.
by Bitbucket 20 Jul '11
by Bitbucket 20 Jul '11
20 Jul '11
1 new changeset in galaxy-central:
http://bitbucket.org/galaxy/galaxy-central/changeset/2b55d3bd13d4/
changeset: 2b55d3bd13d4
user: fubar
date: 2011-07-20 04:17:43
summary: Fixes to rgManQQ.py to handle chromosome oddities like 'chr1' and X.
Also cleaned up redundant file recreation - the code passes column numbers to R so the raw data can be read as supplied by Galaxy
Thanks to Alison Harrill for exposing some more input edge cases...
affected #: 3 files (1.3 KB)
--- a/test-data/rgtestouts/rgManQQ/rgManQQtest1.html Tue Jul 19 21:22:07 2011 -0400
+++ b/test-data/rgtestouts/rgManQQ/rgManQQtest1.html Wed Jul 20 12:17:43 2011 +1000
@@ -13,8 +13,8 @@
<h1>rgManQQtest1</h1><table>
-<tr><td><a href="Allelep_manhattan.png"><img src="Allelep_manhattan.png" alt="Allelep_manhattan.png hspace="10" width="400"><br>(Click to download image Allelep_manhattan.png)</a></td></tr>
-<tr><td><a href="Allelep_qqplot.png"><img src="Allelep_qqplot.png" alt="Allelep_qqplot.png hspace="10" width="400"><br>(Click to download image Allelep_qqplot.png)</a></td></tr>
+<tr><td><a href="Allelep_manhattan.png"><img src="Allelep_manhattan.png" title="Allelep_manhattan.png hspace="10" width="400"><br>(Click to download image Allelep_manhattan.png)</a></td></tr>
+<tr><td><a href="Allelep_qqplot.png"><img src="Allelep_qqplot.png" title="Allelep_qqplot.png hspace="10" width="400"><br>(Click to download image Allelep_qqplot.png)</a></td></tr><tr><td><a href="rgManQQtest1.R">rgManQQtest1.R</a></td></tr><tr><td><a href="rgManQQtest1.R.log">rgManQQtest1.R.log</a></td></tr></table>
@@ -35,7 +35,7 @@
- round_any
+ rename, round_any
@@ -43,11 +43,11 @@
Loading required package: proto
-[1] "### 101 values read from /tmp/rgManQQtemplYC5wa read - now running plots"
+[1] "### 101 values read from /data/tmp/tmpTPXdE1/database/files/000/dataset_1.dat read - now running plots"
[1] "## qqplot on Allelep done"
-[1] "## manhattan on Allelep starting 1 2 3"
+[1] "## manhattan on Allelep starting 2 3 8"
[1] "## manhattan plot on Allelep done"
@@ -72,26 +72,30 @@
library(ggplot2)
coloursTouse = c('firebrick','darkblue','goldenrod','darkgreen')
-# not too fugly but need a colour expert please...
+# not too ugly but need a colour expert please...
-manhattan = function(chrom=NULL,offset=NULL,pvals=NULL, title=NULL, max.y="max",
- suggestiveline=0, genomewide=T, size.x.labels=9, size.y.labels=10, annotate=F, SNPlist=NULL,grey=0) {
-
+DrawManhattan = function(pvals=Null,chrom=Null,offset=Null,title=NULL, max.y="max",suggestiveline=0, genomewide=T, size.x.labels=9,
+ size.y.labels=10, annotate=F, SNPlist=NULL,grey=0) {
if (annotate & is.null(SNPlist)) stop("You requested annotation but provided no SNPlist!")
genomewideline=NULL # was genomewideline=-log10(5e-8)
if (genomewide) { # use bonferroni since might be only a small region?
genomewideline = -log10(0.05/length(pvals)) }
- d=data.frame(CHR=chrom,BP=offset,P=pvals)
-
- #limit to only chrs 1-23?
- d=d[d$CHR %in% 1:23, ]
-
+ chro = sub('chr','',chrom, ignore.case = T) # just in case
+ chro = sub(':','',chro, ignore.case = T) # ugh
+ chro = sub('X',23,chro, ignore.case = T)
+ chro = sub('Y',24,chro, ignore.case = T)
+ chro = sub('Mt',25,chro, ignore.case = T)
+ offset = as.integer(offset)
+ pvals = as.double(pvals)
+ chro = as.integer(chro)
+ d=data.frame(CHR=chro,BP=offset,P=pvals)
+ #limit to only chrs 1-22, x=23,y=24,Mt=25?
+ d=d[d$CHR %in% 1:25, ]
if ("CHR" %in% names(d) & "BP" %in% names(d) & "P" %in% names(d) ) {
- d=na.omit(d)
+ #d=na.omit(d)
d=d[d$P>0 & d$P<=1, ]
- d$logp = -log10(d$P)
-
+ d$logp = as.double(-log10(d$P))
d$pos=NA
ticks=NULL
lastbase=0
@@ -107,7 +111,11 @@
lastchr = chrlist[x-1] # previous whatever the list
lastbase=lastbase+tail(subset(d,CHR==lastchr)$BP, 1)
d[d$CHR==i, ]$pos=d[d$CHR==i, ]$BP+lastbase
+ if (sum(is.na(lastchr),is.na(lastbase),is.na(d[d$CHR==i, ]$pos))) {
+ cat(paste('manhattan: For',title,'chrlistx=',i,'lastchr=',lastchr,'lastbase=',lastbase,'pos=',d[d$CHR==i,]$pos))
+ }
tks=c(tks, d[d$CHR==i, ]$pos[floor(length(d[d$CHR==i, ]$pos)/2)+1])
+
}
ticklim=c(min(d$pos),max(d$pos))
xlabs = chrlist
@@ -129,8 +137,6 @@
if (max.y=="max") maxy=ceiling(max(d$logp)) else maxy=max.y
maxy = max(maxy,1.1*genomewideline)
- # if (maxy<8) maxy=8
- # only makes sense if genome wide is assumed - we could have a fine mapping region?
if (annotate) d.annotate=d[as.numeric(substr(d$SNP,3,100)) %in% SNPlist, ]
if (nchr >= 2) {
manplot=qplot(pos,logp,data=d, ylab=expression(-log[10](italic(p))) , colour=factor(CHR))
@@ -149,9 +155,6 @@
axis.text.y=theme_text(size=size.y.labels, colour="grey50"),
axis.ticks=theme_segment(colour=NA)
)
- #manplot = manplot + opts(panel.grid.y.minor=theme_blank(),panel.grid.y.major=theme_blank())
- #manplot = manplot + opts(panel.grid.major=theme_blank())
-
if (suggestiveline) manplot=manplot+geom_hline(yintercept=suggestiveline,colour="blue", alpha=I(1/3))
if (genomewideline) manplot=manplot+geom_hline(yintercept=genomewideline,colour="red")
manplot
@@ -178,7 +181,8 @@
if (spartan) plot=plot+opts(panel.background=theme_rect(col="grey50"), panel.grid.minor=theme_blank())
qq
}
-rgqqMan = function(infile="/tmp/rgManQQtemplYC5wa",chromcolumn=1, offsetcolumn=2, pvalscolumns=c(3),
+
+rgqqMan = function(infile="/data/tmp/tmpTPXdE1/database/files/000/dataset_1.dat",chromcolumn=2, offsetcolumn=3, pvalscolumns=c(8),
title="rgManQQtest1",grey=0) {
rawd = read.table(infile,head=T,sep='\t')
dn = names(rawd)
@@ -206,7 +210,7 @@
doreorder = 0
}
print(paste('## manhattan on',cname,'starting',chromcolumn,offsetcolumn,pvalscolumn))
- mymanplot= manhattan(chrom=rawd[,chromcolumn],offset=rawd[,offsetcolumn],pvals=rawd[,pvalscolumn],title=mytitle,grey=grey)
+ mymanplot= DrawManhattan(chrom=rawd[,chromcolumn],offset=rawd[,offsetcolumn],pvals=rawd[,pvalscolumn],title=mytitle,grey=grey)
print(paste('## manhattan plot on',cname,'done'))
ggsave(filename=paste(myfname,"manhattan.png",sep='_'),mymanplot,width=6,height=4,dpi=100)
}
@@ -227,6 +231,6 @@
</pre>
-<h3><a href="http://rgenetics.org">Rgenetics</a> tool rgManQQ.py run at 07/11/2010 20:04:20</h3>
+<b><a href="http://rgenetics.org">Galaxy Rgenetics</a> tool output rgManQQ.py run at 20/07/2011 12:08:46</b><br/></div></body></html>
--- a/tools/rgenetics/rgManQQ.py Tue Jul 19 21:22:07 2011 -0400
+++ b/tools/rgenetics/rgManQQ.py Wed Jul 20 12:17:43 2011 +1000
@@ -1,5 +1,7 @@
#!/usr/local/bin/python
-
+# rgmanqq updated july 19 to deal with x,y and mt
+# lots of fixes
+# ross lazarus
import sys,math,shutil,subprocess,os,time,tempfile,string
from os.path import abspath
from rgutils import timenow, RRun, galhtmlprefix, galhtmlpostfix, galhtmlattr
@@ -18,7 +20,7 @@
# http://StephenTurner.us/
# http://GettingGeneticsDone.blogspot.com/
-# Last updated: Tuesday, December 22, 2009
+# Last updated: 19 July 2011 by Ross Lazarus
# R code for making manhattan plots and QQ plots from plink output files.
# With GWAS data this can take a lot of memory. Recommended for use on
# 64bit machines only, for now.
@@ -28,26 +30,30 @@
library(ggplot2)
coloursTouse = c('firebrick','darkblue','goldenrod','darkgreen')
-# not too fugly but need a colour expert please...
+# not too ugly but need a colour expert please...
-manhattan = function(chrom=NULL,offset=NULL,pvals=NULL, title=NULL, max.y="max",
- suggestiveline=0, genomewide=T, size.x.labels=9, size.y.labels=10, annotate=F, SNPlist=NULL,grey=0) {
-
+DrawManhattan = function(pvals=Null,chrom=Null,offset=Null,title=NULL, max.y="max",suggestiveline=0, genomewide=T, size.x.labels=9,
+ size.y.labels=10, annotate=F, SNPlist=NULL,grey=0) {
if (annotate & is.null(SNPlist)) stop("You requested annotation but provided no SNPlist!")
genomewideline=NULL # was genomewideline=-log10(5e-8)
if (genomewide) { # use bonferroni since might be only a small region?
genomewideline = -log10(0.05/length(pvals)) }
- d=data.frame(CHR=chrom,BP=offset,P=pvals)
-
- #limit to only chrs 1-23?
- d=d[d$CHR %in% 1:23, ]
-
+ chro = sub('chr','',chrom, ignore.case = T) # just in case
+ chro = sub(':','',chro, ignore.case = T) # ugh
+ chro = sub('X',23,chro, ignore.case = T)
+ chro = sub('Y',24,chro, ignore.case = T)
+ chro = sub('Mt',25,chro, ignore.case = T)
+ offset = as.integer(offset)
+ pvals = as.double(pvals)
+ chro = as.integer(chro)
+ d=data.frame(CHR=chro,BP=offset,P=pvals)
+ #limit to only chrs 1-22, x=23,y=24,Mt=25?
+ d=d[d$CHR %in% 1:25, ]
if ("CHR" %in% names(d) & "BP" %in% names(d) & "P" %in% names(d) ) {
- d=na.omit(d)
+ #d=na.omit(d)
d=d[d$P>0 & d$P<=1, ]
- d$logp = -log10(d$P)
-
+ d$logp = as.double(-log10(d$P))
d$pos=NA
ticks=NULL
lastbase=0
@@ -63,7 +69,11 @@
lastchr = chrlist[x-1] # previous whatever the list
lastbase=lastbase+tail(subset(d,CHR==lastchr)$BP, 1)
d[d$CHR==i, ]$pos=d[d$CHR==i, ]$BP+lastbase
+ if (sum(is.na(lastchr),is.na(lastbase),is.na(d[d$CHR==i, ]$pos))) {
+ cat(paste('manhattan: For',title,'chrlistx=',i,'lastchr=',lastchr,'lastbase=',lastbase,'pos=',d[d$CHR==i,]$pos))
+ }
tks=c(tks, d[d$CHR==i, ]$pos[floor(length(d[d$CHR==i, ]$pos)/2)+1])
+
}
ticklim=c(min(d$pos),max(d$pos))
xlabs = chrlist
@@ -85,8 +95,6 @@
if (max.y=="max") maxy=ceiling(max(d$logp)) else maxy=max.y
maxy = max(maxy,1.1*genomewideline)
- # if (maxy<8) maxy=8
- # only makes sense if genome wide is assumed - we could have a fine mapping region?
if (annotate) d.annotate=d[as.numeric(substr(d$SNP,3,100)) %in% SNPlist, ]
if (nchr >= 2) {
manplot=qplot(pos,logp,data=d, ylab=expression(-log[10](italic(p))) , colour=factor(CHR))
@@ -105,9 +113,6 @@
axis.text.y=theme_text(size=size.y.labels, colour="grey50"),
axis.ticks=theme_segment(colour=NA)
)
- #manplot = manplot + opts(panel.grid.y.minor=theme_blank(),panel.grid.y.major=theme_blank())
- #manplot = manplot + opts(panel.grid.major=theme_blank())
-
if (suggestiveline) manplot=manplot+geom_hline(yintercept=suggestiveline,colour="blue", alpha=I(1/3))
if (genomewideline) manplot=manplot+geom_hline(yintercept=genomewideline,colour="red")
manplot
@@ -134,12 +139,13 @@
if (spartan) plot=plot+opts(panel.background=theme_rect(col="grey50"), panel.grid.minor=theme_blank())
qq
}
+
"""
# we need another string to avoid confusion over string substitutions with %in%
# instantiate rcode2 string with infile,chromcol,offsetcol,pvalscols,title before saving and running
-rcode2 = """rgqqMan = function(infile="%s",chromcolumn=%s, offsetcolumn=%s, pvalscolumns=%s,
+rcode2 = """rgqqMan = function(infile="%s",chromcolumn=%d, offsetcolumn=%d, pvalscolumns=c(%s),
title="%s",grey=%d) {
rawd = read.table(infile,head=T,sep='\\t')
dn = names(rawd)
@@ -167,7 +173,7 @@
doreorder = 0
}
print(paste('## manhattan on',cname,'starting',chromcolumn,offsetcolumn,pvalscolumn))
- mymanplot= manhattan(chrom=rawd[,chromcolumn],offset=rawd[,offsetcolumn],pvals=rawd[,pvalscolumn],title=mytitle,grey=grey)
+ mymanplot= DrawManhattan(chrom=rawd[,chromcolumn],offset=rawd[,offsetcolumn],pvals=rawd[,pvalscolumn],title=mytitle,grey=grey)
print(paste('## manhattan plot on',cname,'done'))
ggsave(filename=paste(myfname,"manhattan.png",sep='_'),mymanplot,width=6,height=4,dpi=100)
}
@@ -198,50 +204,13 @@
this can be called externally, I guess...for QC eg?
"""
if debug:
- print 'doManQQ',input_fname,chrom_col,offset_col,pval_cols,title,grey,ctitle,outdir
- ffd,filtered_fname = tempfile.mkstemp(prefix='rgManQQtemp')
- f = open(filtered_fname,'w')
- inf = open(input_fname,'r')
- ohead = inf.readline().strip().split('\t') # see if we have a header
- inf.seek(0) # rewind
- newhead = ['pval%d' % (x+1) for x in pval_cols]
- newhead.insert(0,'Offset')
- newhead.insert(0,'Chrom')
- havehead = 0
- wewant = [chrom_col,offset_col]
- wewant += pval_cols
- try:
- allnums = ['%d' % x for x in ohead] # this should barf if non numerics == header row?
- f.write('\t'.join(newhead)) # for R to read
- f.write('\n')
- except:
- havehead = 1
- newhead = [ohead[chrom_col],ohead[offset_col]]
- newhead += [ohead[x] for x in pval_cols]
- f.write('\t'.join(newhead)) # use the original head
- f.write('\n')
- for i,row in enumerate(inf):
- if i == 0 and havehead:
- continue # ignore header
- sr = row.strip().split('\t')
- if len(sr) > 1:
- if sr[chrom_col].lower().find('chr') <> -1:
- sr[chrom_col] = sr[chrom_col][3:]
- newr = [sr[x] for x in wewant] # grab cols we need
- s = '\t'.join(newr)
- f.write(s)
- f.write('\n')
- f.close()
- pvc = [x+3 for x in range(len(pval_cols))] # 2 for offset and chrom, 1 for r offset start
- pvc = 'c(%s)' % (','.join(map(str,pvc)))
- rcmd = '%s%s' % (rcode,rcode2 % (filtered_fname,'1','2',pvc,title,grey))
+ print 'doManQQ',input_fname,chrom_col,offset_col,pval_cols,title,grey,ctitle,outdir
+ rcmd = '%s%s' % (rcode,rcode2 % (input_fname,chrom_col,offset_col,pval_cols,title,grey))
if debug:
- print 'running\n%s\n' % rcmd
+ print 'running\n%s\n' % rcmd
rlog,flist = RRun(rcmd=rcmd,title=ctitle,outdir=outdir)
rlog.append('## R script=')
rlog.append(rcmd)
- if beTidy:
- os.unlink(filtered_fname)
return rlog,flist
@@ -272,19 +241,20 @@
offset_col = -1
p = sys.argv[7].strip().split(',')
try:
- p = [int(x) for x in p]
+ q = [int(x) for x in p]
except:
- p = [-1]
+ p = -1
if chrom_col == -1 or offset_col == -1: # was passed as zero - do not do manhattan plots
chrom_col = -1
offset_col = -1
grey = 0
if (sys.argv[8].lower() in ['1','true']):
grey = 1
- if p == [-1]:
+ if p == -1:
print >> sys.stderr,'## Cannot run rgManQQ - missing pval column'
sys.exit(1)
- rlog,flist = doManQQ(input_fname,chrom_col,offset_col,p,title,grey,ctitle,outdir)
+ p = ['%d' % (int(x) + 1) for x in p]
+ rlog,flist = doManQQ(input_fname,chrom_col+1,offset_col+1,','.join(p),title,grey,ctitle,outdir)
flist.sort()
html = [galhtmlprefix % progname,]
html.append('<h1>%s</h1>' % title)
@@ -294,7 +264,7 @@
fname,expl = row # RRun returns pairs of filenames fiddled for the log and R script
e = os.path.splitext(fname)[-1]
if e in ['.png','.jpg']:
- s= '<tr><td><a href="%s"><img src="%s" alt="%s hspace="10" width="400"><br>(Click to download image %s)</a></td></tr>' \
+ s= '<tr><td><a href="%s"><img src="%s" title="%s hspace="10" width="400"><br>(Click to download image %s)</a></td></tr>' \
% (fname,fname,expl,expl )
html.append(s)
else:
@@ -317,3 +287,4 @@
if __name__ == "__main__":
main()
+
--- a/tools/rgenetics/rgManQQ.xml Tue Jul 19 21:22:07 2011 -0400
+++ b/tools/rgenetics/rgManQQ.xml Wed Jul 20 12:17:43 2011 +1000
@@ -1,4 +1,4 @@
-<tool id="rgManQQ1" name="Manhattan/QQ:" version="1.0.1">
+<tool id="rgManQQ1" name="Manhattan/QQ:" version="1.0.2"><code file="rgManQQ_code.py"/><description>Plots for WGA P values</description>
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: kanwei: Fix LibraryField grouping issue by adding prefix
by Bitbucket 20 Jul '11
by Bitbucket 20 Jul '11
20 Jul '11
1 new changeset in galaxy-central:
http://bitbucket.org/galaxy/galaxy-central/changeset/f5f352f7099d/
changeset: f5f352f7099d
user: kanwei
date: 2011-07-20 03:22:07
summary: Fix LibraryField grouping issue by adding prefix
affected #: 1 file (21 bytes)
--- a/lib/galaxy/web/form_builder.py Tue Jul 19 15:10:11 2011 -0400
+++ b/lib/galaxy/web/form_builder.py Tue Jul 19 21:22:07 2011 -0400
@@ -658,7 +658,7 @@
self.name = name
self.ldda = value
self.trans = trans
- def get_html( self, disabled=False ):
+ def get_html( self, prefix="", disabled=False ):
if not self.ldda:
ldda = ""
text = "Choose a library dataset"
@@ -666,7 +666,7 @@
ldda = self.trans.security.encode_id(self.ldda.id)
text = self.ldda.name
return '<a href="javascript:void(0);" class="add-librarydataset">%s</a> \
- <input type="hidden" name="%s" value="%s">' % ( text, self.name, escape( str(ldda), quote=True ) )
+ <input type="hidden" name="%s%s" value="%s">' % ( text, prefix, self.name, escape( str(ldda), quote=True ) )
def get_display_text(self):
if self.ldda:
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: greg: Fix for message display when setting tool shed repository metadata.
by Bitbucket 19 Jul '11
by Bitbucket 19 Jul '11
19 Jul '11
1 new changeset in galaxy-central:
http://bitbucket.org/galaxy/galaxy-central/changeset/9d17faf3398a/
changeset: 9d17faf3398a
user: greg
date: 2011-07-19 21:10:11
summary: Fix for message display when setting tool shed repository metadata.
affected #: 1 file (64 bytes)
--- a/lib/galaxy/webapps/community/controllers/common.py Tue Jul 19 13:48:42 2011 -0400
+++ b/lib/galaxy/webapps/community/controllers/common.py Tue Jul 19 15:10:11 2011 -0400
@@ -230,6 +230,8 @@
correction_msg += "Upload a file named <b>%s</b> to the repository to correct this error." % sample_loc_file
else:
correction_msg += "Upload a file named <b>%s</b> to the repository to correct this error." % missing_file
+ else:
+ correction_msg = exception_msg
message += "<b>%s</b> - %s<br/>" % ( tool_file, correction_msg )
status = 'error'
elif flush_needed:
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
1 new changeset in galaxy-central:
http://bitbucket.org/galaxy/galaxy-central/changeset/f9ef2aecaecd/
changeset: f9ef2aecaecd
user: greg
date: 2011-07-19 19:48:42
summary: Add exception handling around all mercurial api commit calls. On Linux, the mercurial api's commit seems to bump into issues intermittently, throws an exception, and rolls back the transaction. Although I can reproduce it on Linux, I cannot produce this behavior on a Mac. The exception handler uses a different approach to committing the changes. I've also added this code to the update_for_browsing method.
affected #: 3 files (1.9 KB)
--- a/lib/galaxy/webapps/community/controllers/common.py Tue Jul 19 11:27:16 2011 -0400
+++ b/lib/galaxy/webapps/community/controllers/common.py Tue Jul 19 13:48:42 2011 -0400
@@ -297,7 +297,7 @@
util.send_mail( frm, to, subject, body, trans.app.config )
except Exception, e:
log.exception( "An error occurred sending a tool shed repository update alert by email." )
-def update_for_browsing( repository, current_working_dir ):
+def update_for_browsing( repository, current_working_dir, commit_message='' ):
# Make a copy of a repository's files for browsing.
repo_dir = repository.repo_path
repo = hg.repository( ui.ui(), repo_dir )
@@ -316,12 +316,15 @@
# ! = deleted, but still tracked
# ? = not tracked
# I = ignored
- # We'll remove all files that are not tracked or ignored.
files_to_remove_from_disk = []
+ files_to_commit = []
for status_and_file_name in status_and_file_names:
if status_and_file_name.startswith( '?' ) or status_and_file_name.startswith( 'I' ):
files_to_remove_from_disk.append( os.path.abspath( os.path.join( repo_dir, status_and_file_name.split()[1] ) ) )
+ elif status_and_file_name.startswith( 'M' ) or status_and_file_name.startswith( 'A' ) or status_and_file_name.startswith( 'R' ):
+ files_to_commit.append( os.path.abspath( os.path.join( repo_dir, status_and_file_name.split()[1] ) ) )
for full_path in files_to_remove_from_disk:
+ # We'll remove all files that are not tracked or ignored.
if os.path.isdir( full_path ):
try:
os.rmdir( full_path )
@@ -336,6 +339,11 @@
except OSError, e:
# The directory is not empty
pass
+ if files_to_commit:
+ if not commit_message:
+ commit_message = 'Committed changes to: %s' % ', '.join( files_to_commit )
+ repo.dirstate.write()
+ repo.commit( text=commit_message )
os.chdir( repo_dir )
os.system( 'hg update > /dev/null 2>&1' )
os.chdir( current_working_dir )
--- a/lib/galaxy/webapps/community/controllers/repository.py Tue Jul 19 11:27:16 2011 -0400
+++ b/lib/galaxy/webapps/community/controllers/repository.py Tue Jul 19 13:48:42 2011 -0400
@@ -425,7 +425,7 @@
repository = get_repository( trans, id )
repo = hg.repository( ui.ui(), repository.repo_path )
current_working_dir = os.getcwd()
- update_for_browsing( repository, current_working_dir )
+ update_for_browsing( repository, current_working_dir, commit_message=commit_message )
return trans.fill_template( '/webapps/community/repository/browse_repository.mako',
repo=repo,
repository=repository,
@@ -454,11 +454,17 @@
# Commit the change set.
if not commit_message:
commit_message = 'Deleted selected files'
- # Commit the changes.
- commands.commit( repo.ui, repo, repo_dir, user=trans.user.username, message=commit_message )
+ try:
+ commands.commit( repo.ui, repo, repo_dir, user=trans.user.username, message=commit_message )
+ except Exception, e:
+ # I never have a problem with commands.commit on a Mac, but in the test/production
+ # tool shed environment, it occasionally throws a "TypeError: array item must be char"
+ # exception. If this happens, we'll try the following.
+ repo.dirstate.write()
+ repo.commit( text=commit_message )
handle_email_alerts( trans, repository )
# Update the repository files for browsing.
- update_for_browsing( repository, current_working_dir )
+ update_for_browsing( repository, current_working_dir, commit_message=commit_message )
# Get the new repository tip.
repo = hg.repository( ui.ui(), repo_dir )
if tip != repository.tip:
--- a/lib/galaxy/webapps/community/controllers/upload.py Tue Jul 19 11:27:16 2011 -0400
+++ b/lib/galaxy/webapps/community/controllers/upload.py Tue Jul 19 13:48:42 2011 -0400
@@ -87,7 +87,16 @@
# Move the uploaded file to the load_point within the repository hierarchy.
shutil.move( uploaded_file_name, full_path )
commands.add( repo.ui, repo, full_path )
- commands.commit( repo.ui, repo, full_path, user=trans.user.username, message=commit_message )
+ """
+ try:
+ commands.commit( repo.ui, repo, full_path, user=trans.user.username, message=commit_message )
+ except Exception, e:
+ # I never have a problem with commands.commit on a Mac, but in the test/production
+ # tool shed environment, it occasionally throws a "TypeError: array item must be char"
+ # exception. If this happens, we'll try the following.
+ repo.dirstate.write()
+ repo.commit( text=commit_message )
+ """
if full_path.endswith( '.loc.sample' ):
# Handle the special case where a xxx.loc.sample file is
# being uploaded by copying it to ~/tool-data/xxx.loc.
@@ -96,7 +105,7 @@
if ok:
# Update the repository files for browsing, a by-product of doing this
# is eliminating unwanted files from the repository directory.
- update_for_browsing( repository, current_working_dir )
+ update_for_browsing( repository, current_working_dir, commit_message=commit_message )
# Get the new repository tip.
if tip != repository.tip:
if ( isgzip or isbz2 ) and uncompress_file:
@@ -183,8 +192,14 @@
# Handle the special case where a xxx.loc.sample file is
# being uploaded by copying it to ~/tool-data/xxx.loc.
copy_sample_loc_file( trans, filename_in_archive )
- # Commit the changes.
- commands.commit( repo.ui, repo, full_path, user=trans.user.username, message=commit_message )
+ try:
+ commands.commit( repo.ui, repo, full_path, user=trans.user.username, message=commit_message )
+ except Exception, e:
+ # I never have a problem with commands.commit on a Mac, but in the test/production
+ # tool shed environment, it occasionally throws a "TypeError: array item must be char"
+ # exception. If this happens, we'll try the following.
+ repo.dirstate.write()
+ repo.commit( text=commit_message )
handle_email_alerts( trans, repository )
return True, '', files_to_remove
def uncompress( self, repository, uploaded_file_name, uploaded_file_filename, isgzip, isbz2 ):
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

19 Jul '11
1 new changeset in galaxy-central:
http://bitbucket.org/galaxy/galaxy-central/changeset/98ef4f135243/
changeset: 98ef4f135243
user: greg
date: 2011-07-19 17:27:16
summary: Various tool shed fixes and enhancements:
1) Fix for uploading empty files to a repository
2) Better error messages when setting repository metadata fails
3) Automatically copy uploaded xxx.loc.sample files to ~/tool-data/xxx.loc
4) Fix for setting the value of the upload_point when uploading files to a repository
affected #: 4 files (3.3 KB)
--- a/lib/galaxy/webapps/community/controllers/common.py Mon Jul 18 21:56:23 2011 -0400
+++ b/lib/galaxy/webapps/community/controllers/common.py Tue Jul 19 11:27:16 2011 -0400
@@ -214,7 +214,23 @@
if invalid_files:
message = "Metadata cannot be defined for change set revision '%s'. Correct the following problems and reset metadata.<br/>" % str( change_set_revision )
for itc_tup in invalid_files:
- message += "<b>%s</b> - %s<br/>" % ( itc_tup[0], itc_tup[1] )
+ # Handle the special case where a tool depends on a missing xxx.loc file by telliing
+ # the user to upload xxx.loc.sample to the repository so that it can be copied to
+ # ~/tool-data/xxx.loc. In this case, itc_tup[1] will be a message looking something like:
+ # [Errno 2] No such file or directory: '/Users/gvk/central/tool-data/blast2go.loc'
+ tool_file = itc_tup[0]
+ exception_msg = itc_tup[1]
+ if exception_msg.find( 'No such file or directory' ) >= 0:
+ exception_items = exception_msg.split()
+ missing_file_items = exception_items[7].split( '/' )
+ missing_file = missing_file_items[-1].rstrip( '\'' )
+ correction_msg = "This file refers to a missing file <b>%s</b>. " % str( missing_file )
+ if exception_msg.find( '.loc' ) >= 0:
+ sample_loc_file = '%s.sample' % str( missing_file )
+ correction_msg += "Upload a file named <b>%s</b> to the repository to correct this error." % sample_loc_file
+ else:
+ correction_msg += "Upload a file named <b>%s</b> to the repository to correct this error." % missing_file
+ message += "<b>%s</b> - %s<br/>" % ( tool_file, correction_msg )
status = 'error'
elif flush_needed:
# We only flush if there are no tool config errors, so change sets will only have metadata
@@ -231,6 +247,14 @@
if str( ctx ) == change_set_revision:
return ctx
return None
+def copy_sample_loc_file( trans, filename ):
+ """Copy xxx.loc.sample to ~/tool-data/xxx.loc"""
+ sample_loc_file = os.path.split( filename )[1]
+ loc_file = os.path.split( filename )[1].rstrip( '.sample' )
+ tool_data_path = os.path.abspath( trans.app.config.tool_data_path )
+ if not ( os.path.exists( os.path.join( tool_data_path, loc_file ) ) or os.path.exists( os.path.join( tool_data_path, sample_loc_file ) ) ):
+ shutil.copy( os.path.abspath( filename ), os.path.join( tool_data_path, sample_loc_file ) )
+ shutil.copy( os.path.abspath( filename ), os.path.join( tool_data_path, loc_file ) )
def get_user( trans, id ):
"""Get a user from the database"""
return trans.sa_session.query( trans.model.User ).get( trans.security.decode_id( id ) )
--- a/lib/galaxy/webapps/community/controllers/upload.py Mon Jul 18 21:56:23 2011 -0400
+++ b/lib/galaxy/webapps/community/controllers/upload.py Tue Jul 19 11:27:16 2011 -0400
@@ -45,6 +45,7 @@
uploaded_file = file_data.file
uploaded_file_name = uploaded_file.name
uploaded_file_filename = file_data.filename
+ isempty = os.path.getsize( os.path.abspath( uploaded_file_name ) ) == 0
if uploaded_file:
isgzip = False
isbz2 = False
@@ -53,17 +54,21 @@
if not isgzip:
isbz2 = is_bz2( uploaded_file_name )
ok = True
- # Determine what we have - a single file or an archive
- try:
- if ( isgzip or isbz2 ) and uncompress_file:
- # Open for reading with transparent compression.
- tar = tarfile.open( uploaded_file_name, 'r:*' )
- else:
- tar = tarfile.open( uploaded_file_name )
- istar = True
- except tarfile.ReadError, e:
+ if isempty:
tar = None
istar = False
+ else:
+ # Determine what we have - a single file or an archive
+ try:
+ if ( isgzip or isbz2 ) and uncompress_file:
+ # Open for reading with transparent compression.
+ tar = tarfile.open( uploaded_file_name, 'r:*' )
+ else:
+ tar = tarfile.open( uploaded_file_name )
+ istar = True
+ except tarfile.ReadError, e:
+ tar = None
+ istar = False
if istar:
ok, message, files_to_remove = self.upload_tar( trans,
repository,
@@ -83,6 +88,10 @@
shutil.move( uploaded_file_name, full_path )
commands.add( repo.ui, repo, full_path )
commands.commit( repo.ui, repo, full_path, user=trans.user.username, message=commit_message )
+ if full_path.endswith( '.loc.sample' ):
+ # Handle the special case where a xxx.loc.sample file is
+ # being uploaded by copying it to ~/tool-data/xxx.loc.
+ copy_sample_loc_file( trans, full_path )
handle_email_alerts( trans, repository )
if ok:
# Update the repository files for browsing, a by-product of doing this
@@ -170,6 +179,10 @@
commands.remove( repo.ui, repo, repo_file )
for filename_in_archive in filenames_in_archive:
commands.add( repo.ui, repo, filename_in_archive )
+ if filename_in_archive.endswith( '.loc.sample' ):
+ # Handle the special case where a xxx.loc.sample file is
+ # being uploaded by copying it to ~/tool-data/xxx.loc.
+ copy_sample_loc_file( trans, filename_in_archive )
# Commit the changes.
commands.commit( repo.ui, repo, full_path, user=trans.user.username, message=commit_message )
handle_email_alerts( trans, repository )
--- a/templates/webapps/community/repository/common.mako Mon Jul 18 21:56:23 2011 -0400
+++ b/templates/webapps/community/repository/common.mako Tue Jul 19 11:27:16 2011 -0400
@@ -44,7 +44,7 @@
}
// The following is used only in ~/templates/webapps/community/repository/upload.mako.
if (document.forms["upload_form"]) {
- document.upload_form.upload_point.value = selKeys[0];
+ document.upload_form.upload_point.value = selKeys.slice(-1);
}
},
onActivate: function(dtnode) {
--- a/templates/webapps/community/repository/upload.mako Mon Jul 18 21:56:23 2011 -0400
+++ b/templates/webapps/community/repository/upload.mako Tue Jul 19 11:27:16 2011 -0400
@@ -64,96 +64,96 @@
<div class="toolForm"><div class="toolFormTitle">Upload a single file or a tarball</div><div class="toolFormBody">
- ## TODO: nginx
- <form id="upload_form" name="upload_form" action="${h.url_for( controller='upload', action='upload', repository_id=trans.security.encode_id( repository.id ) )}" enctype="multipart/form-data" method="post">
- <div class="form-row">
- <label>File:</label>
- <div class="form-row-input">
- <input type="file" name="file_data"/>
- </div>
- <div style="clear: both"></div>
- </div>
-
- <div class="form-row">
- <%
- if uncompress_file:
- yes_selected = 'selected'
- no_selected = ''
- else:
- yes_selected = ''
- no_selected = 'selected'
- %>
- <label>Uncompress files?</label>
- <div class="form-row-input">
- <select name="uncompress_file">
- <option value="true" ${yes_selected}>Yes
- <option value="false" ${no_selected}>No
- </select>
- </div>
- <div class="toolParamHelp" style="clear: both;">
- Supported compression types are gz and bz2. If <b>Yes</b> is selected, the uploaded file will be uncompressed. However,
- if the uploaded file is an archive that contains compressed files, the contained files will not be uncompressed. For
- example, if the uploaded compressed file is some_file.tar.gz, some_file.tar will be uncompressed and extracted, but if
- some_file.tar contains some_contained_file.gz, the contained file will not be uncompressed.
- </div>
- </div>
- %if not is_new:
- <div class="form-row">
- <%
- if remove_repo_files_not_in_tar:
- yes_selected = 'selected'
- no_selected = ''
- else:
- yes_selected = ''
- no_selected = 'selected'
- %>
- <label>Remove files in the repository (relative to the root or selected upload point) that are not in the uploaded archive?</label>
- <div class="form-row-input">
- <select name="remove_repo_files_not_in_tar">
- <option value="true" ${yes_selected}>Yes
- <option value="false" ${no_selected}>No
- </select>
+ ## TODO: nginx
+ <form id="upload_form" name="upload_form" action="${h.url_for( controller='upload', action='upload', repository_id=trans.security.encode_id( repository.id ) )}" enctype="multipart/form-data" method="post">
+ <div class="form-row">
+ <label>File:</label>
+ <div class="form-row-input">
+ <input type="file" name="file_data"/>
+ </div>
+ <div style="clear: both"></div></div>
- <div class="toolParamHelp" style="clear: both;">
- This selection pertains only to uploaded tar archives, not to single file uploads. If <b>Yes</b> is selected, files
- that exist in the repository (relative to the root or selected upload point) but that are not in the uploaded archive
- will be removed from the repository. Otherwise, all existing repository files will remain and the uploaded archive
- files will be added to the repository.
+
+ <div class="form-row">
+ <%
+ if uncompress_file:
+ yes_selected = 'selected'
+ no_selected = ''
+ else:
+ yes_selected = ''
+ no_selected = 'selected'
+ %>
+ <label>Uncompress files?</label>
+ <div class="form-row-input">
+ <select name="uncompress_file">
+ <option value="true" ${yes_selected}>Yes
+ <option value="false" ${no_selected}>No
+ </select>
+ </div>
+ <div class="toolParamHelp" style="clear: both;">
+ Supported compression types are gz and bz2. If <b>Yes</b> is selected, the uploaded file will be uncompressed. However,
+ if the uploaded file is an archive that contains compressed files, the contained files will not be uncompressed. For
+ example, if the uploaded compressed file is some_file.tar.gz, some_file.tar will be uncompressed and extracted, but if
+ some_file.tar contains some_contained_file.gz, the contained file will not be uncompressed.
+ </div></div>
- </div>
- %endif
- <div class="form-row">
- <label>Change set commit message:</label>
- <div class="form-row-input">
- %if commit_message:
- <pre><textarea name="commit_message" rows="3" cols="35">${commit_message}</textarea></pre>
- %else:
- <textarea name="commit_message" rows="3" cols="35"></textarea>
+ %if not is_new:
+ <div class="form-row">
+ <%
+ if remove_repo_files_not_in_tar:
+ yes_selected = 'selected'
+ no_selected = ''
+ else:
+ yes_selected = ''
+ no_selected = 'selected'
+ %>
+ <label>Remove files in the repository (relative to the root or selected upload point) that are not in the uploaded archive?</label>
+ <div class="form-row-input">
+ <select name="remove_repo_files_not_in_tar">
+ <option value="true" ${yes_selected}>Yes
+ <option value="false" ${no_selected}>No
+ </select>
+ </div>
+ <div class="toolParamHelp" style="clear: both;">
+ This selection pertains only to uploaded tar archives, not to single file uploads. If <b>Yes</b> is selected, files
+ that exist in the repository (relative to the root or selected upload point) but that are not in the uploaded archive
+ will be removed from the repository. Otherwise, all existing repository files will remain and the uploaded archive
+ files will be added to the repository.
+ </div>
+ </div>
%endif
- </div>
- <div class="toolParamHelp" style="clear: both;">
- This is the commit message for the mercurial change set that will be created by this upload.
- </div>
- <div style="clear: both"></div>
- </div>
- %if not repository.is_new:
- <div class="form-row" >
- <label>Contents:</label>
- <div id="tree" >
- Loading...
+ <div class="form-row">
+ <label>Change set commit message:</label>
+ <div class="form-row-input">
+ %if commit_message:
+ <pre><textarea name="commit_message" rows="3" cols="35">${commit_message}</textarea></pre>
+ %else:
+ <textarea name="commit_message" rows="3" cols="35"></textarea>
+ %endif
+ </div>
+ <div class="toolParamHelp" style="clear: both;">
+ This is the commit message for the mercurial change set that will be created by this upload.
+ </div>
+ <div style="clear: both"></div></div>
- <input type="hidden" id="upload_point" name="upload_point" value=""/>
- <div class="toolParamHelp" style="clear: both;">
- Select a location within the repository to upload your files by clicking a check box next to the location. The
- selected location is considered the upload point. If a location is not selected, the upload point will be the
- repository root.
+ %if not repository.is_new:
+ <div class="form-row" >
+ <label>Contents:</label>
+ <div id="tree" >
+ Loading...
+ </div>
+ <input type="hidden" id="upload_point" name="upload_point" value=""/>
+ <div class="toolParamHelp" style="clear: both;">
+ Select a location within the repository to upload your files by clicking a check box next to the location. The
+ selected location is considered the upload point. If a location is not selected, the upload point will be the
+ repository root.
+ </div>
+ <div style="clear: both"></div>
+ </div>
+ %endif
+ <div class="form-row">
+ <input type="submit" class="primary-button" name="upload_button" value="Upload"></div>
- <div style="clear: both"></div>
- </div>
- %endif
- <div class="form-row">
- <input type="submit" class="primary-button" name="upload_button" value="Upload">
- </div>
- </form>
+ </form></div></div>
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0