galaxy-commits
Threads by month
- ----- 2024 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2023 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2022 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2021 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2020 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2019 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2018 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2017 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2016 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2015 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2014 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2013 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2012 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2011 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2010 -----
- December
- November
- October
- September
- August
- July
- June
- May
August 2012
- 1 participants
- 118 discussions
commit/galaxy-central: jgoecks: Make workflows compatible with change made in 29680fa5c35e that makes tool help a template rather than static text. Bonus: images in tool help now display in workflow editor as well.
by Bitbucket 08 Aug '12
by Bitbucket 08 Aug '12
08 Aug '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/4d22e26e595a/
changeset: 4d22e26e595a
user: jgoecks
date: 2012-08-08 14:39:39
summary: Make workflows compatible with change made in 29680fa5c35e that makes tool help a template rather than static text. Bonus: images in tool help now display in workflow editor as well.
affected #: 2 files
diff -r 4f17b0d16ff7b878fba9c6e8730998ee61c54c5e -r 4d22e26e595a278f96b814908b8dc24b3f77a06b lib/galaxy/web/controllers/workflow.py
--- a/lib/galaxy/web/controllers/workflow.py
+++ b/lib/galaxy/web/controllers/workflow.py
@@ -655,7 +655,7 @@
'name': module.get_name(),
'tool_id': module.get_tool_id(),
'tool_state': module.get_state(),
- 'tooltip': module.get_tooltip(),
+ 'tooltip': module.get_tooltip( static_path=url_for( '/static' ) ),
'data_inputs': module.get_data_inputs(),
'data_outputs': module.get_data_outputs(),
'form_html': module.get_config_form(),
@@ -732,7 +732,7 @@
'tool_id': module.get_tool_id(),
'name': module.get_name(),
'tool_state': module.get_state(),
- 'tooltip': module.get_tooltip(),
+ 'tooltip': module.get_tooltip( static_path=url_for( '/static' ) ),
'tool_errors': module.get_errors(),
'data_inputs': module.get_data_inputs(),
'data_outputs': module.get_data_outputs(),
diff -r 4f17b0d16ff7b878fba9c6e8730998ee61c54c5e -r 4d22e26e595a278f96b814908b8dc24b3f77a06b lib/galaxy/workflow/modules.py
--- a/lib/galaxy/workflow/modules.py
+++ b/lib/galaxy/workflow/modules.py
@@ -49,7 +49,7 @@
return self.name
def get_tool_id( self ):
return None
- def get_tooltip( self ):
+ def get_tooltip( self, static_path='' ):
return None
## ---- Configuration time -----------------------------------------------
@@ -258,8 +258,8 @@
return self.state.encode( self.tool, self.trans.app, secure=secure )
def get_errors( self ):
return self.errors
- def get_tooltip( self ):
- return self.tool.help
+ def get_tooltip( self, static_path='' ):
+ return self.tool.help.render( static_path=static_path )
def get_data_inputs( self ):
data_inputs = []
def callback( input, value, prefixed_name, prefixed_label ):
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: carlfeberhard: fix to rusted display_string in TestLibraryFeatures.test_100_add_ldda_to_folder3
by Bitbucket 07 Aug '12
by Bitbucket 07 Aug '12
07 Aug '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/4f17b0d16ff7/
changeset: 4f17b0d16ff7
user: carlfeberhard
date: 2012-08-07 21:57:13
summary: fix to rusted display_string in TestLibraryFeatures.test_100_add_ldda_to_folder3
affected #: 1 file
diff -r 1cb2fdf2c7cf22d74ec46e6877609f7056372d7c -r 4f17b0d16ff7b878fba9c6e8730998ee61c54c5e test/functional/test_library_templates.py
--- a/test/functional/test_library_templates.py
+++ b/test/functional/test_library_templates.py
@@ -398,7 +398,7 @@
folder_id=self.security.encode_id( folder3.id ),
upload_option='import_from_history',
hda_ids=self.security.encode_id( hda.id ),
- strings_displayed=[ '<input type="hidden" name="%s" value="Option1"/>' % select_field_name ] )
+ strings_displayed=[ '<select name="%s" last_selected_value="Option1">' % select_field_name ] )
ldda = get_latest_ldda_by_name( filename )
assert ldda is not None, 'Problem retrieving LibraryDatasetDatasetAssociation ldda from the database'
self.browse_library( cntrller='library_admin',
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: greg: Ensure repository metadata records are unique in the tool shed.
by Bitbucket 07 Aug '12
by Bitbucket 07 Aug '12
07 Aug '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/1cb2fdf2c7cf/
changeset: 1cb2fdf2c7cf
user: greg
date: 2012-08-07 20:42:28
summary: Ensure repository metadata records are unique in the tool shed.
affected #: 1 file
diff -r ca835611e8c79a9b95c50acc28d670f7f81266e7 -r 1cb2fdf2c7cf22d74ec46e6877609f7056372d7c lib/galaxy/webapps/community/controllers/common.py
--- a/lib/galaxy/webapps/community/controllers/common.py
+++ b/lib/galaxy/webapps/community/controllers/common.py
@@ -238,10 +238,17 @@
reset_tool_data_tables( trans.app )
return can_set_metadata, invalid_files
def clean_repository_metadata( trans, id, changeset_revisions ):
- # Delete all repository_metadata reecords associated with the repository that have a changeset_revision that is not in changeset_revisions.
+ # Delete all repository_metadata records associated with the repository that have a changeset_revision that is not in changeset_revisions.
+ # We sometimes see multiple records with the same changeset revision value - no idea how this happens. We'll assume we can delete the older
+ # records, so we'll order by update_time descending and delete records that have the same changeset_revision we come across later..
+ changeset_revisions_checked = []
for repository_metadata in trans.sa_session.query( trans.model.RepositoryMetadata ) \
- .filter( trans.model.RepositoryMetadata.table.c.repository_id == trans.security.decode_id( id ) ):
- if repository_metadata.changeset_revision not in changeset_revisions:
+ .filter( trans.model.RepositoryMetadata.table.c.repository_id == trans.security.decode_id( id ) ) \
+ .order_by( trans.model.RepositoryMetadata.table.c.changeset_revision,
+ trans.model.RepositoryMetadata.table.c.update_time.desc() ):
+ changeset_revision = repository_metadata.changeset_revision
+ can_delete = changeset_revision in changeset_revisions_checked or changeset_revision not in changeset_revisions
+ if can_delete:
trans.sa_session.delete( repository_metadata )
trans.sa_session.flush()
def compare_changeset_revisions( ancestor_changeset_revision, ancestor_metadata_dict, current_changeset_revision, current_metadata_dict ):
@@ -874,9 +881,8 @@
if 'workflows' in metadata_dict:
repository_metadata = get_latest_repository_metadata( trans, repository.id )
if repository_metadata:
- if repository_metadata.metadata:
- # The repository has metadata, so update the workflows value - no new record is needed.
- return False
+ # The repository has metadata, so update the workflows value - no new record is needed.
+ return False
else:
# There is no saved repository metadata, so we need to create a new repository_metadata table record.
return True
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: greg: Fixes for setting metadata on repositories in the tool shed.
by Bitbucket 07 Aug '12
by Bitbucket 07 Aug '12
07 Aug '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/ca835611e8c7/
changeset: ca835611e8c7
user: greg
date: 2012-08-07 19:52:50
summary: Fixes for setting metadata on repositories in the tool shed.
affected #: 11 files
diff -r f4e633e6ab4e3f319f02a33fd21cbccfda2a80c1 -r ca835611e8c79a9b95c50acc28d670f7f81266e7 lib/galaxy/webapps/community/app.py
--- a/lib/galaxy/webapps/community/app.py
+++ b/lib/galaxy/webapps/community/app.py
@@ -1,4 +1,5 @@
import sys, config
+from galaxy import tools
import galaxy.tools.data
import galaxy.quota
import galaxy.datatypes.registry
@@ -38,7 +39,7 @@
# Tool data tables - never pass a config file here because the tool shed should always have an empty dictionary!
self.tool_data_tables = galaxy.tools.data.ToolDataTableManager( self.config.tool_data_path )
# The tool shed has no toolbox, but this attribute is still required.
- self.toolbox = None
+ self.toolbox = tools.ToolBox( [], self.config.tool_path, self )
# Load security policy
self.security_agent = self.model.security_agent
self.quota_agent = galaxy.quota.NoQuotaAgent( self.model )
diff -r f4e633e6ab4e3f319f02a33fd21cbccfda2a80c1 -r ca835611e8c79a9b95c50acc28d670f7f81266e7 lib/galaxy/webapps/community/config.py
--- a/lib/galaxy/webapps/community/config.py
+++ b/lib/galaxy/webapps/community/config.py
@@ -44,6 +44,7 @@
self.test_conf = resolve_path( kwargs.get( "test_conf", "" ), self.root )
self.id_secret = kwargs.get( "id_secret", "USING THE DEFAULT IS NOT SECURE!" )
# Tool stuff
+ self.tool_path = resolve_path( kwargs.get( "tool_path", "tools" ), self.root )
self.tool_secret = kwargs.get( "tool_secret", "" )
self.tool_data_path = resolve_path( kwargs.get( "tool_data_path", "shed-tool-data" ), os.getcwd() )
self.tool_data_table_config_path = resolve_path( kwargs.get( 'tool_data_table_config_path', 'tool_data_table_conf.xml' ), self.root )
@@ -55,6 +56,7 @@
else:
self.tool_dependency_dir = None
self.use_tool_dependencies = False
+ self.update_integrated_tool_panel = False
self.use_remote_user = string_as_bool( kwargs.get( "use_remote_user", "False" ) )
self.remote_user_maildomain = kwargs.get( "remote_user_maildomain", None )
self.remote_user_logout_href = kwargs.get( "remote_user_logout_href", None )
diff -r f4e633e6ab4e3f319f02a33fd21cbccfda2a80c1 -r ca835611e8c79a9b95c50acc28d670f7f81266e7 lib/galaxy/webapps/community/controllers/admin.py
--- a/lib/galaxy/webapps/community/controllers/admin.py
+++ b/lib/galaxy/webapps/community/controllers/admin.py
@@ -2,13 +2,14 @@
from galaxy.webapps.community import model
from galaxy.model.orm import *
from galaxy.web.framework.helpers import time_ago, iff, grids
+from galaxy.web.form_builder import SelectField
from galaxy.util import inflector
from galaxy.util.shed_util import get_changectx_for_changeset, get_configured_ui
from common import *
from repository import RepositoryListGrid, CategoryListGrid
from galaxy import eggs
-eggs.require('mercurial')
+eggs.require( 'mercurial' )
from mercurial import hg
import logging
@@ -415,89 +416,6 @@
@web.expose
@web.require_admin
- def browse_repository_metadata( self, trans, **kwd ):
- if 'operation' in kwd:
- operation = kwd[ 'operation' ].lower()
- if operation == "delete":
- return self.delete_repository_metadata( trans, **kwd )
- if operation == "view_or_manage_repository_revision":
- # The received id is a RepositoryMetadata object id, so we need to get the
- # associated Repository and redirect to view_or_manage_repository with the
- # changeset_revision.
- repository_metadata = get_repository_metadata_by_id( trans, kwd[ 'id' ] )
- repository = repository_metadata.repository
- kwd[ 'id' ] = trans.security.encode_id( repository.id )
- kwd[ 'changeset_revision' ] = repository_metadata.changeset_revision
- kwd[ 'operation' ] = 'view_or_manage_repository'
- return trans.response.send_redirect( web.url_for( controller='repository',
- action='browse_repositories',
- **kwd ) )
- return self.repository_metadata_list_grid( trans, **kwd )
- @web.expose
- @web.require_admin
- def delete_repository_metadata( self, trans, **kwd ):
- params = util.Params( kwd )
- message = util.restore_text( params.get( 'message', '' ) )
- status = params.get( 'status', 'done' )
- id = kwd.get( 'id', None )
- if id:
- ids = util.listify( id )
- count = 0
- for repository_metadata_id in ids:
- repository_metadata = get_repository_metadata_by_id( trans, repository_metadata_id )
- trans.sa_session.delete( repository_metadata )
- trans.sa_session.flush()
- count += 1
- if count:
- message = "Deleted %d repository metadata %s" % ( count, inflector.cond_plural( len( ids ), "record" ) )
- else:
- message = "No repository metadata ids received for deleting."
- status = 'error'
- trans.response.send_redirect( web.url_for( controller='admin',
- action='browse_repository_metadata',
- message=util.sanitize_text( message ),
- status=status ) )
- @web.expose
- @web.require_admin
- def reset_metadata_on_all_repositories( self, trans, **kwd ):
- params = util.Params( kwd )
- message = util.restore_text( params.get( 'message', '' ) )
- status = params.get( 'status', 'done' )
- if 'reset_metadata_on_all_repositories_button' in kwd:
- successful_count = 0
- unsuccessful_count = 0
- for repository in trans.sa_session.query( trans.model.Repository ) \
- .filter( trans.model.Repository.table.c.deleted == False ):
- try:
- error_message, status = reset_all_metadata_on_repository( trans, trans.security.encode_id( repository.id ) )
- if status not in [ 'ok' ] and error_message:
- log.debug( "Error attempting to reset metadata on repository '%s': %s" % ( repository.name, error_message ) )
- unsuccessful_count += 1
- elif status in [ 'ok' ] and error_message:
- log.debug( "Successfully reset metadata on repository %s, but encountered this problem: %s" % ( repository.name, error_message ) )
- successful_count += 1
- else:
- log.debug( "Successfully reset metadata on repository %s" % repository.name )
- successful_count += 1
- except Exception, e:
- log.debug( "Error attempting to reset metadata on repository '%s': %s" % ( repository.name, str( e ) ) )
- unsuccessful_count += 1
- message = "Successfully reset metadata on %d %s. " % ( successful_count,
- inflector.cond_plural( successful_count, "repository" ) )
- if unsuccessful_count:
- message += "Error setting metadata on %d %s - see the paster log for details. " % ( unsuccessful_count,
- inflector.cond_plural( unsuccessful_count,
- "repository" ) )
- trans.response.send_redirect( web.url_for( controller='admin',
- action='browse_repository_metadata',
- webapp='community',
- message=util.sanitize_text( message ),
- status=status ) )
- return trans.fill_template( '/webapps/community/admin/reset_metadata_on_all_repositories.mako',
- message=message,
- status=status )
- @web.expose
- @web.require_admin
def browse_repositories( self, trans, **kwd ):
# We add params to the keyword dict in this method in order to rename the param
# with an "f-" prefix, simulating filtering by clicking a search link. We have
@@ -568,101 +486,24 @@
return self.repository_list_grid( trans, **kwd )
@web.expose
@web.require_admin
- def regenerate_statistics( self, trans, **kwd ):
- params = util.Params( kwd )
- message = util.restore_text( params.get( 'message', '' ) )
- status = params.get( 'status', 'done' )
- if 'regenerate_statistics_button' in kwd:
- trans.app.shed_counter.generate_statistics()
- message = "Successfully regenerated statistics"
- return trans.fill_template( '/webapps/community/admin/statistics.mako',
- message=message,
- status=status )
- @web.expose
- @web.require_admin
- def delete_repository( self, trans, **kwd ):
- params = util.Params( kwd )
- message = util.restore_text( params.get( 'message', '' ) )
- status = params.get( 'status', 'done' )
- id = kwd.get( 'id', None )
- if id:
- # Deleting multiple items is currently not allowed (allow_multiple=False), so there will only be 1 id.
- ids = util.listify( id )
- count = 0
- deleted_repositories = ""
- for repository_id in ids:
- repository = get_repository( trans, repository_id )
- if not repository.deleted:
- repository.deleted = True
- trans.sa_session.add( repository )
- trans.sa_session.flush()
- count += 1
- deleted_repositories += " %s " % repository.name
- if count:
- message = "Deleted %d %s: %s" % ( count, inflector.cond_plural( len( ids ), "repository" ), deleted_repositories )
- else:
- message = "All selected repositories were already marked deleted."
- else:
- message = "No repository ids received for deleting."
- status = 'error'
- trans.response.send_redirect( web.url_for( controller='admin',
- action='browse_repositories',
- message=util.sanitize_text( message ),
- status=status ) )
- @web.expose
- @web.require_admin
- def undelete_repository( self, trans, **kwd ):
- params = util.Params( kwd )
- message = util.restore_text( params.get( 'message', '' ) )
- status = params.get( 'status', 'done' )
- id = kwd.get( 'id', None )
- if id:
- # Undeleting multiple items is currently not allowed (allow_multiple=False), so there will only be 1 id.
- ids = util.listify( id )
- count = 0
- undeleted_repositories = ""
- for repository_id in ids:
- repository = get_repository( trans, repository_id )
- if repository.deleted:
- repository.deleted = False
- trans.sa_session.add( repository )
- trans.sa_session.flush()
- count += 1
- undeleted_repositories += " %s" % repository.name
- if count:
- message = "Undeleted %d %s: %s" % ( count, inflector.cond_plural( count, "repository" ), undeleted_repositories )
- else:
- message = "No selected repositories were marked deleted, so they could not be undeleted."
- else:
- message = "No repository ids received for undeleting."
- status = 'error'
- trans.response.send_redirect( web.url_for( controller='admin',
- action='browse_repositories',
- message=util.sanitize_text( message ),
- status='done' ) )
- @web.expose
- @web.require_admin
- def manage_categories( self, trans, **kwd ):
- if 'f-free-text-search' in kwd:
- # Trick to enable searching repository name, description from the CategoryListGrid.
- # What we've done is rendered the search box for the RepositoryListGrid on the grid.mako
- # template for the CategoryListGrid. See ~/templates/webapps/community/category/grid.mako.
- # Since we are searching repositories and not categories, redirect to browse_repositories().
- return self.browse_repositories( trans, **kwd )
+ def browse_repository_metadata( self, trans, **kwd ):
if 'operation' in kwd:
- operation = kwd['operation'].lower()
- if operation == "create":
- return self.create_category( trans, **kwd )
- elif operation == "delete":
- return self.mark_category_deleted( trans, **kwd )
- elif operation == "undelete":
- return self.undelete_category( trans, **kwd )
- elif operation == "purge":
- return self.purge_category( trans, **kwd )
- elif operation == "edit":
- return self.edit_category( trans, **kwd )
- # Render the list view
- return self.manage_category_list_grid( trans, **kwd )
+ operation = kwd[ 'operation' ].lower()
+ if operation == "delete":
+ return self.delete_repository_metadata( trans, **kwd )
+ if operation == "view_or_manage_repository_revision":
+ # The received id is a RepositoryMetadata object id, so we need to get the
+ # associated Repository and redirect to view_or_manage_repository with the
+ # changeset_revision.
+ repository_metadata = get_repository_metadata_by_id( trans, kwd[ 'id' ] )
+ repository = repository_metadata.repository
+ kwd[ 'id' ] = trans.security.encode_id( repository.id )
+ kwd[ 'changeset_revision' ] = repository_metadata.changeset_revision
+ kwd[ 'operation' ] = 'view_or_manage_repository'
+ return trans.response.send_redirect( web.url_for( controller='repository',
+ action='browse_repositories',
+ **kwd ) )
+ return self.repository_metadata_list_grid( trans, **kwd )
@web.expose
@web.require_admin
def create_category( self, trans, **kwd ):
@@ -711,6 +552,61 @@
status=status )
@web.expose
@web.require_admin
+ def delete_repository( self, trans, **kwd ):
+ params = util.Params( kwd )
+ message = util.restore_text( params.get( 'message', '' ) )
+ status = params.get( 'status', 'done' )
+ id = kwd.get( 'id', None )
+ if id:
+ # Deleting multiple items is currently not allowed (allow_multiple=False), so there will only be 1 id.
+ ids = util.listify( id )
+ count = 0
+ deleted_repositories = ""
+ for repository_id in ids:
+ repository = get_repository( trans, repository_id )
+ if not repository.deleted:
+ repository.deleted = True
+ trans.sa_session.add( repository )
+ trans.sa_session.flush()
+ count += 1
+ deleted_repositories += " %s " % repository.name
+ if count:
+ message = "Deleted %d %s: %s" % ( count, inflector.cond_plural( len( ids ), "repository" ), deleted_repositories )
+ else:
+ message = "All selected repositories were already marked deleted."
+ else:
+ message = "No repository ids received for deleting."
+ status = 'error'
+ trans.response.send_redirect( web.url_for( controller='admin',
+ action='browse_repositories',
+ message=util.sanitize_text( message ),
+ status=status ) )
+ @web.expose
+ @web.require_admin
+ def delete_repository_metadata( self, trans, **kwd ):
+ params = util.Params( kwd )
+ message = util.restore_text( params.get( 'message', '' ) )
+ status = params.get( 'status', 'done' )
+ id = kwd.get( 'id', None )
+ if id:
+ ids = util.listify( id )
+ count = 0
+ for repository_metadata_id in ids:
+ repository_metadata = get_repository_metadata_by_id( trans, repository_metadata_id )
+ trans.sa_session.delete( repository_metadata )
+ trans.sa_session.flush()
+ count += 1
+ if count:
+ message = "Deleted %d repository metadata %s" % ( count, inflector.cond_plural( len( ids ), "record" ) )
+ else:
+ message = "No repository metadata ids received for deleting."
+ status = 'error'
+ trans.response.send_redirect( web.url_for( controller='admin',
+ action='browse_repository_metadata',
+ message=util.sanitize_text( message ),
+ status=status ) )
+ @web.expose
+ @web.require_admin
def edit_category( self, trans, **kwd ):
params = util.Params( kwd )
message = util.restore_text( params.get( 'message', '' ) )
@@ -750,6 +646,124 @@
status=status )
@web.expose
@web.require_admin
+ def manage_categories( self, trans, **kwd ):
+ if 'f-free-text-search' in kwd:
+ # Trick to enable searching repository name, description from the CategoryListGrid.
+ # What we've done is rendered the search box for the RepositoryListGrid on the grid.mako
+ # template for the CategoryListGrid. See ~/templates/webapps/community/category/grid.mako.
+ # Since we are searching repositories and not categories, redirect to browse_repositories().
+ return self.browse_repositories( trans, **kwd )
+ if 'operation' in kwd:
+ operation = kwd['operation'].lower()
+ if operation == "create":
+ return self.create_category( trans, **kwd )
+ elif operation == "delete":
+ return self.mark_category_deleted( trans, **kwd )
+ elif operation == "undelete":
+ return self.undelete_category( trans, **kwd )
+ elif operation == "purge":
+ return self.purge_category( trans, **kwd )
+ elif operation == "edit":
+ return self.edit_category( trans, **kwd )
+ # Render the list view
+ return self.manage_category_list_grid( trans, **kwd )
+ @web.expose
+ @web.require_admin
+ def regenerate_statistics( self, trans, **kwd ):
+ params = util.Params( kwd )
+ message = util.restore_text( params.get( 'message', '' ) )
+ status = params.get( 'status', 'done' )
+ if 'regenerate_statistics_button' in kwd:
+ trans.app.shed_counter.generate_statistics()
+ message = "Successfully regenerated statistics"
+ return trans.fill_template( '/webapps/community/admin/statistics.mako',
+ message=message,
+ status=status )
+ @web.expose
+ @web.require_admin
+ def reset_metadata_on_selected_repositories( self, trans, **kwd ):
+ params = util.Params( kwd )
+ message = util.restore_text( params.get( 'message', '' ) )
+ status = params.get( 'status', 'done' )
+ repository_names_by_owner = util.listify( kwd.get( 'repository_names_by_owner', None ) )
+ if 'reset_metadata_on_selected_repositories_button' in kwd:
+ if repository_names_by_owner:
+ successful_count = 0
+ unsuccessful_count = 0
+ for repository_name_owner_str in repository_names_by_owner:
+ repository_name_owner_list = repository_name_owner_str.split( '__ESEP__' )
+ name = repository_name_owner_list[ 0 ]
+ owner = repository_name_owner_list[ 1 ]
+ repository = get_repository_by_name_and_owner( trans, name, owner )
+ try:
+ reset_all_metadata_on_repository( trans, trans.security.encode_id( repository.id ) )
+ log.debug( "Successfully reset metadata on repository %s" % repository.name )
+ successful_count += 1
+ except Exception, e:
+ log.debug( "Error attempting to reset metadata on repository '%s': %s" % ( repository.name, str( e ) ) )
+ unsuccessful_count += 1
+ message = "Successfully reset metadata on %d %s. " % ( successful_count,
+ inflector.cond_plural( successful_count, "repository" ) )
+ if unsuccessful_count:
+ message += "Error setting metadata on %d %s - see the paster log for details. " % ( unsuccessful_count,
+ inflector.cond_plural( unsuccessful_count,
+ "repository" ) )
+ trans.response.send_redirect( web.url_for( controller='admin',
+ action='browse_repository_metadata',
+ webapp='community',
+ message=util.sanitize_text( message ),
+ status=status ) )
+ else:
+ 'Select at least one repository to on which to reset all metadata.'
+ status = 'error'
+ repositories_select_field = SelectField( name='repository_names_by_owner',
+ multiple=True,
+ display='checkboxes' )
+ for repository in trans.sa_session.query( trans.model.Repository ) \
+ .filter( trans.model.Repository.table.c.deleted == False ) \
+ .order_by( trans.model.Repository.table.c.name,
+ trans.model.Repository.table.c.user_id ):
+ owner = repository.user.username
+ option_label = '%s (%s)' % ( repository.name, owner )
+ option_value = '%s__ESEP__%s' % ( repository.name, owner )
+ repositories_select_field.add_option( option_label, option_value )
+ return trans.fill_template( '/webapps/community/admin/reset_metadata_on_selected_repositories.mako',
+ repositories_select_field=repositories_select_field,
+ message=message,
+ status=status )
+ @web.expose
+ @web.require_admin
+ def undelete_repository( self, trans, **kwd ):
+ params = util.Params( kwd )
+ message = util.restore_text( params.get( 'message', '' ) )
+ status = params.get( 'status', 'done' )
+ id = kwd.get( 'id', None )
+ if id:
+ # Undeleting multiple items is currently not allowed (allow_multiple=False), so there will only be 1 id.
+ ids = util.listify( id )
+ count = 0
+ undeleted_repositories = ""
+ for repository_id in ids:
+ repository = get_repository( trans, repository_id )
+ if repository.deleted:
+ repository.deleted = False
+ trans.sa_session.add( repository )
+ trans.sa_session.flush()
+ count += 1
+ undeleted_repositories += " %s" % repository.name
+ if count:
+ message = "Undeleted %d %s: %s" % ( count, inflector.cond_plural( count, "repository" ), undeleted_repositories )
+ else:
+ message = "No selected repositories were marked deleted, so they could not be undeleted."
+ else:
+ message = "No repository ids received for undeleting."
+ status = 'error'
+ trans.response.send_redirect( web.url_for( controller='admin',
+ action='browse_repositories',
+ message=util.sanitize_text( message ),
+ status='done' ) )
+ @web.expose
+ @web.require_admin
def mark_category_deleted( self, trans, **kwd ):
# TODO: We should probably eliminate the Category.deleted column since it really makes no
# sense to mark a category as deleted (category names and descriptions can be changed instead).
@@ -776,33 +790,6 @@
status='done' ) )
@web.expose
@web.require_admin
- def undelete_category( self, trans, **kwd ):
- params = util.Params( kwd )
- message = util.restore_text( params.get( 'message', '' ) )
- status = params.get( 'status', 'done' )
- id = kwd.get( 'id', None )
- if id:
- ids = util.listify( id )
- count = 0
- undeleted_categories = ""
- for category_id in ids:
- category = get_category( trans, category_id )
- if category.deleted:
- category.deleted = False
- trans.sa_session.add( category )
- trans.sa_session.flush()
- count += 1
- undeleted_categories += " %s" % category.name
- message = "Undeleted %d categories: %s" % ( count, undeleted_categories )
- else:
- message = "No category ids received for undeleting."
- status = 'error'
- trans.response.send_redirect( web.url_for( controller='admin',
- action='manage_categories',
- message=util.sanitize_text( message ),
- status='done' ) )
- @web.expose
- @web.require_admin
def purge_category( self, trans, **kwd ):
# This method should only be called for a Category that has previously been deleted.
# Purging a deleted Category deletes all of the following from the database:
@@ -832,3 +819,30 @@
action='manage_categories',
message=util.sanitize_text( message ),
status='done' ) )
+ @web.expose
+ @web.require_admin
+ def undelete_category( self, trans, **kwd ):
+ params = util.Params( kwd )
+ message = util.restore_text( params.get( 'message', '' ) )
+ status = params.get( 'status', 'done' )
+ id = kwd.get( 'id', None )
+ if id:
+ ids = util.listify( id )
+ count = 0
+ undeleted_categories = ""
+ for category_id in ids:
+ category = get_category( trans, category_id )
+ if category.deleted:
+ category.deleted = False
+ trans.sa_session.add( category )
+ trans.sa_session.flush()
+ count += 1
+ undeleted_categories += " %s" % category.name
+ message = "Undeleted %d categories: %s" % ( count, undeleted_categories )
+ else:
+ message = "No category ids received for undeleting."
+ status = 'error'
+ trans.response.send_redirect( web.url_for( controller='admin',
+ action='manage_categories',
+ message=util.sanitize_text( message ),
+ status='done' ) )
diff -r f4e633e6ab4e3f319f02a33fd21cbccfda2a80c1 -r ca835611e8c79a9b95c50acc28d670f7f81266e7 lib/galaxy/webapps/community/controllers/common.py
--- a/lib/galaxy/webapps/community/controllers/common.py
+++ b/lib/galaxy/webapps/community/controllers/common.py
@@ -5,10 +5,11 @@
from galaxy.tools import *
from galaxy.util.json import from_json_string, to_json_string
from galaxy.util.hash_util import *
-from galaxy.util.shed_util import copy_sample_file, generate_datatypes_metadata, generate_tool_dependency_metadata, generate_tool_metadata
-from galaxy.util.shed_util import generate_workflow_metadata, get_changectx_for_changeset, get_config, get_configured_ui, get_named_tmpfile_from_ctx
-from galaxy.util.shed_util import handle_sample_tool_data_table_conf_file, INITIAL_CHANGELOG_HASH, make_tmp_directory, NOT_TOOL_CONFIGS, reset_tool_data_tables
-from galaxy.util.shed_util import reversed_upper_bounded_changelog, strip_path, to_html_escaped, to_html_str, update_repository
+from galaxy.util.shed_util import clone_repository, copy_sample_file, generate_datatypes_metadata, generate_tool_dependency_metadata, generate_tool_metadata
+from galaxy.util.shed_util import generate_workflow_metadata, get_changectx_for_changeset, get_config, get_config_from_disk, get_configured_ui
+from galaxy.util.shed_util import get_named_tmpfile_from_ctx, get_sample_files_from_disk, handle_sample_tool_data_table_conf_file, INITIAL_CHANGELOG_HASH
+from galaxy.util.shed_util import make_tmp_directory, NOT_TOOL_CONFIGS, reset_tool_data_tables, reversed_upper_bounded_changelog, strip_path, to_html_escaped
+from galaxy.util.shed_util import to_html_str, update_repository
from galaxy.web.base.controller import *
from galaxy.webapps.community import model
from galaxy.model.orm import *
@@ -187,25 +188,23 @@
if user_email in admin_users:
return True
return False
-def check_tool_input_params( trans, repo, repo_dir, ctx, xml_file_in_ctx, tool, sample_files, invalid_files, tool_data_path, dir ):
+def check_tool_input_params( trans, repo_dir, tool_config, tool, sample_files, invalid_files ):
"""
Check all of the tool's input parameters, looking for any that are dynamically generated using external data files to make
- sure the files exist. This method is called only from the tool shed when generating metadata for a specified changeset revision.
+ sure the files exist.
"""
can_set_metadata = True
correction_msg = ''
- # Keep track of copied files so they can be removed after metadata generation.
- sample_files_copied = []
for input_param in tool.input_params:
- if isinstance( input_param, galaxy.tools.parameters.basic.SelectToolParameter ) and input_param.is_dynamic:
+ if isinstance( input_param, tools.parameters.basic.SelectToolParameter ) and input_param.is_dynamic:
# If the tool refers to .loc files or requires an entry in the tool_data_table_conf.xml, make sure all requirements exist.
options = input_param.dynamic_options or input_param.options
if options:
if options.tool_data_table or options.missing_tool_data_table_name:
# Make sure the repository contains a tool_data_table_conf.xml.sample file.
- sample_tool_data_table_conf = get_config( 'tool_data_table_conf.xml.sample', repo, ctx, dir )
+ sample_tool_data_table_conf = get_config_from_disk( 'tool_data_table_conf.xml.sample', repo_dir )
if sample_tool_data_table_conf:
- error, correction_msg = handle_sample_tool_data_table_conf_file( trans.app, sample_tool_data_table_conf )
+ error, correction_msg = handle_sample_tool_data_table_conf_file( trans, sample_tool_data_table_conf )
if error:
can_set_metadata = False
invalid_files.append( ( 'tool_data_table_conf.xml.sample', correction_msg ) )
@@ -215,7 +214,7 @@
can_set_metadata = False
correction_msg = "This file requires an entry in the tool_data_table_conf.xml file. Upload a file named tool_data_table_conf.xml.sample "
correction_msg += "to the repository that includes the required entry to correct this error.<br/>"
- invalid_files.append( ( xml_file_in_ctx, correction_msg ) )
+ invalid_files.append( ( tool_config, correction_msg ) )
if options.index_file or options.missing_index_file:
# Make sure the repository contains the required xxx.loc.sample file.
index_file = options.index_file or options.missing_index_file
@@ -224,13 +223,6 @@
for sample_file in sample_files:
sample_file_name = strip_path( sample_file )
if sample_file_name == '%s.sample' % index_file_name:
- # If sample_file_name is on disk, copy it to dir.
- copied_sample_file = copy_file_from_disk( sample_file_name, repo_dir, dir )
- if not copied_sample_file:
- # Get sample_file_name from the repository manifest.
- copied_sample_file = copy_file_from_manifest( repo, ctx, sample_file_name, dir )
- copy_sample_file( trans.app, copied_sample_file, dest_path=tool_data_path )
- sample_files_copied.append( sample_file_name )
options.index_file = index_file_name
options.missing_index_file = None
if options.tool_data_table:
@@ -241,10 +233,10 @@
can_set_metadata = False
correction_msg = "This file refers to a file named <b>%s</b>. " % str( index_file )
correction_msg += "Upload a file named <b>%s.sample</b> to the repository to correct this error." % str( index_file_name )
- invalid_files.append( ( xml_file_in_ctx, correction_msg ) )
+ invalid_files.append( ( tool_config, correction_msg ) )
# Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file.
reset_tool_data_tables( trans.app )
- return sample_files_copied, can_set_metadata, invalid_files
+ return can_set_metadata, invalid_files
def clean_repository_metadata( trans, id, changeset_revisions ):
# Delete all repository_metadata reecords associated with the repository that have a changeset_revision that is not in changeset_revisions.
for repository_metadata in trans.sa_session.query( trans.model.RepositoryMetadata ) \
@@ -254,11 +246,8 @@
trans.sa_session.flush()
def compare_changeset_revisions( ancestor_changeset_revision, ancestor_metadata_dict, current_changeset_revision, current_metadata_dict ):
# The metadata associated with ancestor_changeset_revision is ancestor_metadata_dict. This changeset_revision is an ancestor of
- # current_changeset_revision which is associated with current_metadata_dict.
- #
- # A new repository_metadata record will be created only when this method returns the string 'not equal and not subset'. However, we're
- # currently also returning the strings 'no metadata', 'equal' and 'subset', depending upon how the 2 change sets compare. We'll leave
- # things this way for the current time in case we discover a use for these additional result strings.
+ # current_changeset_revision which is associated with current_metadata_dict. A new repository_metadata record will be created only
+ # when this method returns the string 'not equal and not subset'.
ancestor_datatypes = ancestor_metadata_dict.get( 'datatypes', [] )
ancestor_tools = ancestor_metadata_dict.get( 'tools', [] )
ancestor_guids = [ tool_dict[ 'guid' ] for tool_dict in ancestor_tools ]
@@ -279,7 +268,7 @@
# Handle case where all metadata is the same.
if ancestor_guids == current_guids and workflow_comparison == 'equal' and datatype_comparison == 'equal':
return 'equal'
- if workflow_comparison == 'subset' and datatype_comparison == 'subset':
+ if workflow_comparison in [ 'equal', 'subset' ] and datatype_comparison in [ 'equal', 'subset' ]:
is_subset = True
for guid in ancestor_guids:
if guid not in current_guids:
@@ -391,103 +380,69 @@
return '%s://%s%s/repos/%s/%s' % ( protocol, username, base, repository.user.username, repository.name )
else:
return '%s/repos/%s/%s' % ( base_url, repository.user.username, repository.name )
-def generate_metadata_for_changeset_revision( trans, repo, id, ctx, changeset_revision, repo_dir, updating_tip=False ):
- if updating_tip:
- # If a push from the command line is occurring, update the repository files on disk before setting metadata.
- update_repository( repo, str( ctx.rev() ) )
+def generate_metadata_for_changeset_revision( trans, repository_files_dir, repository_clone_url ):
+ """
+ Generate metadata for a repository using it's files on disk. To generate metadata for changeset revisions older than the repository tip,
+ the repository will have been cloned to a temporary location and updated to a specified changeset revision to access that changeset revision's
+ disk files, so the value of repository_files_dir will not always be repository.repo_path (it could be a temporary directory containing a clone).
+ """
metadata_dict = {}
invalid_files = []
invalid_tool_configs = []
- original_tool_data_path = trans.app.config.tool_data_path
- work_dir = make_tmp_directory()
- datatypes_config = get_config( 'datatypes_conf.xml', repo, ctx, work_dir )
+ tool_dependencies_config = None
+ datatypes_config = get_config_from_disk( 'datatypes_conf.xml', repository_files_dir )
if datatypes_config:
- metadata_dict = generate_datatypes_metadata( datatypes_config, metadata_dict )
- sample_files, deleted_sample_files = get_list_of_copied_sample_files( repo, ctx, dir=work_dir )
+ metadata_dict = generate_datatypes_metadata( datatypes_config, metadata_dict )
+ sample_files = get_sample_files_from_disk( repository_files_dir )
if sample_files:
- trans.app.config.tool_data_path = work_dir
- all_sample_files_copied = []
- # Handle the tool_data_table_conf.xml.sample file if it is included in the repository.
- if 'tool_data_table_conf.xml.sample' in sample_files:
- tool_data_table_config = copy_file_from_manifest( repo, ctx, 'tool_data_table_conf.xml.sample', work_dir )
- error, correction_msg = handle_sample_tool_data_table_conf_file( trans.app, tool_data_table_config )
- for filename in ctx:
- # Find all tool configs.
- ctx_file_name = strip_path( filename )
- if ctx_file_name not in NOT_TOOL_CONFIGS and filename.endswith( '.xml' ):
- is_tool_config, valid, tool, error_message = load_tool_from_tmp_directory( trans, repo, repo_dir, ctx, filename, work_dir )
- if is_tool_config and valid and tool is not None:
- sample_files_copied, can_set_metadata, invalid_files = check_tool_input_params( trans,
- repo,
- repo_dir,
- ctx,
- filename,
- tool,
- sample_files,
- invalid_files,
- original_tool_data_path,
- work_dir )
- all_sample_files_copied.extend( sample_files_copied )
- if can_set_metadata:
- # Update the list of metadata dictionaries for tools in metadata_dict.
- repository_clone_url = generate_clone_url( trans, id )
- metadata_dict = generate_tool_metadata( filename, tool, repository_clone_url, metadata_dict )
- else:
- invalid_tool_configs.append( ctx_file_name )
- elif is_tool_config:
- if not error_message:
- error_message = 'Unknown problems loading tool.'
- # We have a tool config but it is invalid or the tool does not properly load.
- invalid_files.append( ( ctx_file_name, error_message ) )
- invalid_tool_configs.append( ctx_file_name )
- # Find all exported workflows.
- elif filename.endswith( '.ga' ):
- try:
- fctx = ctx[ filename ]
- workflow_text = fctx.data()
- exported_workflow_dict = from_json_string( workflow_text )
- if 'a_galaxy_workflow' in exported_workflow_dict and exported_workflow_dict[ 'a_galaxy_workflow' ] == 'true':
- metadata_dict = generate_workflow_metadata( '', exported_workflow_dict, metadata_dict )
- except Exception, e:
- invalid_files.append( ( ctx_file_name, str( e ) ) )
+ metadata_dict[ 'sample_files' ] = sample_files
+ # Find all tool configs and exported workflows.
+ for root, dirs, files in os.walk( repository_files_dir ):
+ if root.find( '.hg' ) < 0 and root.find( 'hgrc' ) < 0:
+ if '.hg' in dirs:
+ dirs.remove( '.hg' )
+ for name in files:
+ # Find all tool configs.
+ if name not in NOT_TOOL_CONFIGS and name.endswith( '.xml' ):
+ full_path = os.path.abspath( os.path.join( root, name ) )
+ if not ( check_binary( full_path ) or check_image( full_path ) or check_gzip( full_path )[ 0 ]
+ or check_bz2( full_path )[ 0 ] or check_zip( full_path ) ):
+ try:
+ # Make sure we're looking at a tool config and not a display application config or something else.
+ element_tree = util.parse_xml( full_path )
+ element_tree_root = element_tree.getroot()
+ is_tool = element_tree_root.tag == 'tool'
+ except Exception, e:
+ print "Error parsing %s", full_path, ", exception: ", str( e )
+ is_tool = False
+ if is_tool:
+ try:
+ tool = trans.app.toolbox.load_tool( full_path )
+ tool_config = os.path.join( root, name )
+ except Exception, e:
+ tool = None
+ invalid_tool_configs.append( name )
+ if tool is not None:
+ can_set_metadata, invalid_files = check_tool_input_params( trans, repository_files_dir, tool_config, tool, sample_files, invalid_files )
+ if can_set_metadata:
+ metadata_dict = generate_tool_metadata( tool_config, tool, repository_clone_url, metadata_dict )
+ # Find all exported workflows
+ elif name.endswith( '.ga' ):
+ relative_path = os.path.join( root, name )
+ fp = open( relative_path, 'rb' )
+ workflow_text = fp.read()
+ fp.close()
+ exported_workflow_dict = from_json_string( workflow_text )
+ if 'a_galaxy_workflow' in exported_workflow_dict and exported_workflow_dict[ 'a_galaxy_workflow' ] == 'true':
+ metadata_dict = generate_workflow_metadata( relative_path, exported_workflow_dict, metadata_dict )
if 'tools' in metadata_dict:
- # Find tool_dependencies.xml if it exists. This step must be done after metadata for tools has been defined.
- tool_dependencies_config = get_config( 'tool_dependencies.xml', repo, ctx, work_dir )
+ # This step must be done after metadata for tools has been defined.
+ tool_dependencies_config = get_config_from_disk( 'tool_dependencies.xml', repository_files_dir )
if tool_dependencies_config:
metadata_dict = generate_tool_dependency_metadata( tool_dependencies_config, metadata_dict )
if invalid_tool_configs:
metadata_dict [ 'invalid_tools' ] = invalid_tool_configs
- if sample_files:
- # Don't forget to reset the value of trans.app.config.tool_data_path!
- trans.app.config.tool_data_path = original_tool_data_path
- # Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file.
- reset_tool_data_tables( trans.app )
- try:
- shutil.rmtree( work_dir )
- except:
- pass
- # Remove all copied sample files from both the original tool data path (~/shed-tool-data) and the temporary
- # value of trans.app.config.tool_data_path, which is work_dir.
- for copied_sample_file in all_sample_files_copied:
- copied_file = copied_sample_file.replace( '.sample', '' )
- try:
- os.unlink( os.path.join( trans.app.config.tool_data_path, copied_sample_file ) )
- except:
- pass
- try:
- os.unlink( os.path.join( trans.app.config.tool_data_path, copied_file ) )
- except:
- pass
- if trans.app.config.tool_data_path == work_dir:
- try:
- os.unlink( os.path.join( original_tool_data_path, copied_sample_file ) )
- except:
- pass
- try:
- os.unlink( os.path.join( original_tool_data_path, copied_file ) )
- except:
- pass
- return metadata_dict, invalid_files, deleted_sample_files
+ return metadata_dict, invalid_files
def generate_tool_guid( trans, repository, tool ):
"""
Generate a guid for the received tool. The form of the guid is
@@ -529,10 +484,10 @@
if deleted:
return 'DELETED'
return None
-def get_latest_repository_metadata( trans, id ):
+def get_latest_repository_metadata( trans, decoded_repository_id ):
"""Get last metadata defined for a specified repository from the database"""
return trans.sa_session.query( trans.model.RepositoryMetadata ) \
- .filter( trans.model.RepositoryMetadata.table.c.repository_id == trans.security.decode_id( id ) ) \
+ .filter( trans.model.RepositoryMetadata.table.c.repository_id == decoded_repository_id ) \
.order_by( trans.model.RepositoryMetadata.table.c.id.desc() ) \
.first()
def get_list_of_copied_sample_files( repo, ctx, dir ):
@@ -873,14 +828,13 @@
# Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file.
reset_tool_data_tables( trans.app )
return is_tool_config, valid, tool, error_message
-def new_tool_metadata_required( trans, id, metadata_dict ):
+def new_tool_metadata_required( trans, repository, metadata_dict ):
"""
- Compare the last saved metadata for each tool in the repository with the new metadata
- in metadata_dict to determine if a new repository_metadata table record is required, or
- if the last saved metadata record can updated instead.
+ Compare the last saved metadata for each tool in the repository with the new metadata in metadata_dict to determine if a new repository_metadata
+ table record is required, or if the last saved metadata record can be updated instead.
"""
if 'tools' in metadata_dict:
- repository_metadata = get_latest_repository_metadata( trans, id )
+ repository_metadata = get_latest_repository_metadata( trans, repository.id )
if repository_metadata:
metadata = repository_metadata.metadata
if metadata and 'tools' in metadata:
@@ -908,20 +862,17 @@
# repository, so we can update the existing repository metadata.
return False
else:
- # There is no saved repository metadata, so we need to create a new repository_metadata
- # table record.
+ # There is no saved repository metadata, so we need to create a new repository_metadata table record.
return True
- # The received metadata_dict includes no metadata for tools, so a new repository_metadata table
- # record is not needed.
+ # The received metadata_dict includes no metadata for tools, so a new repository_metadata table record is not needed.
return False
-def new_workflow_metadata_required( trans, id, metadata_dict ):
+def new_workflow_metadata_required( trans, repository, metadata_dict ):
"""
- Currently everything about an exported workflow except the name is hard-coded, so there's
- no real way to differentiate versions of exported workflows. If this changes at some future
- time, this method should be enhanced accordingly.
+ Currently everything about an exported workflow except the name is hard-coded, so there's no real way to differentiate versions of
+ exported workflows. If this changes at some future time, this method should be enhanced accordingly.
"""
if 'workflows' in metadata_dict:
- repository_metadata = get_latest_repository_metadata( trans, id )
+ repository_metadata = get_latest_repository_metadata( trans, repository.id )
if repository_metadata:
if repository_metadata.metadata:
# The repository has metadata, so update the workflows value - no new record is needed.
@@ -939,167 +890,128 @@
log.debug( "Resetting all metadata on repository: %s" % repository.name )
repo_dir = repository.repo_path
repo = hg.repository( get_configured_ui(), repo_dir )
- missing_sample_files = []
- if len( repo ) == 1:
- error_message, status = set_repository_metadata( trans, id, repository.tip, **kwd )
- if error_message:
- return error_message, status
- else:
- add_repository_metadata_tool_versions( trans, id, [ repository.tip ] )
- else:
- # The list of changeset_revisions refers to repository_metadata records that have been created or updated. When the following loop
- # completes, we'll delete all repository_metadata records for this repository that do not have a changeset_revision value in this list.
- changeset_revisions = []
- # When a new repository_metadata record is created, it always uses the values of metadata_changeset_revision and metadata_dict.
- metadata_changeset_revision = None
- metadata_dict = None
- ancestor_changeset_revision = None
- ancestor_metadata_dict = None
- for changeset in repo.changelog:
- current_changeset_revision = str( repo.changectx( changeset ) )
- ctx = get_changectx_for_changeset( repo, current_changeset_revision )
- current_metadata_dict, invalid_files, deleted_sample_files = generate_metadata_for_changeset_revision( trans,
- repo,
- id,
- ctx,
- current_changeset_revision,
- repo_dir,
- updating_tip=current_changeset_revision==repository.tip )
- for deleted_sample_file in deleted_sample_files:
- if deleted_sample_file not in missing_sample_files:
- missing_sample_files.append( deleted_sample_file )
- if current_metadata_dict:
- if not metadata_changeset_revision and not metadata_dict:
- # We're at the first change set in the change log.
- metadata_changeset_revision = current_changeset_revision
- metadata_dict = current_metadata_dict
- if ancestor_changeset_revision:
- # Compare metadata from ancestor and current. The value of comparsion will be one of:
- # 'no metadata' - no metadata for either ancestor or current, so continue from current
- # 'equal' - ancestor metadata is equivalent to current metadata, so continue from current
- # 'subset' - ancestor metadata is a subset of current metadata, so continue from current
- # 'not equal and not subset' - ancestor metadata is neither equal to nor a subset of current metadata, so persist ancestor metadata.
- comparison = compare_changeset_revisions( ancestor_changeset_revision,
- ancestor_metadata_dict,
- current_changeset_revision,
- current_metadata_dict )
- if comparison in [ 'no metadata', 'equal', 'subset' ]:
- ancestor_changeset_revision = current_changeset_revision
- ancestor_metadata_dict = current_metadata_dict
- elif comparison == 'not equal and not subset':
- metadata_changeset_revision = ancestor_changeset_revision
- metadata_dict = ancestor_metadata_dict
- create_or_update_repository_metadata( trans, id, repository, metadata_changeset_revision, metadata_dict )
- changeset_revisions.append( metadata_changeset_revision )
- ancestor_changeset_revision = current_changeset_revision
- ancestor_metadata_dict = current_metadata_dict
- else:
- # We're at the beginning of the change log.
+ repository_clone_url = generate_clone_url( trans, id )
+ # The list of changeset_revisions refers to repository_metadata records that have been created or updated. When the following loop
+ # completes, we'll delete all repository_metadata records for this repository that do not have a changeset_revision value in this list.
+ changeset_revisions = []
+ # When a new repository_metadata record is created, it always uses the values of metadata_changeset_revision and metadata_dict.
+ metadata_changeset_revision = None
+ metadata_dict = None
+ ancestor_changeset_revision = None
+ ancestor_metadata_dict = None
+ home_dir = os.getcwd()
+ for changeset in repo.changelog:
+ work_dir = tempfile.mkdtemp()
+ current_changeset_revision = str( repo.changectx( changeset ) )
+ ctx = repo.changectx( changeset )
+ print "Cloning repository revision: ", str( ctx.rev() )
+ clone_repository( repository_clone_url, work_dir, str( ctx.rev() ) )
+ print "Generating metadata for changset revision: ", str( ctx.rev() )
+ current_metadata_dict, invalid_files = generate_metadata_for_changeset_revision( trans, work_dir, repository_clone_url )
+ if current_metadata_dict:
+ if not metadata_changeset_revision and not metadata_dict:
+ # We're at the first change set in the change log.
+ metadata_changeset_revision = current_changeset_revision
+ metadata_dict = current_metadata_dict
+ if ancestor_changeset_revision:
+ # Compare metadata from ancestor and current. The value of comparison will be one of:
+ # 'no metadata' - no metadata for either ancestor or current, so continue from current
+ # 'equal' - ancestor metadata is equivalent to current metadata, so continue from current
+ # 'subset' - ancestor metadata is a subset of current metadata, so continue from current
+ # 'not equal and not subset' - ancestor metadata is neither equal to nor a subset of current metadata, so persist ancestor metadata.
+ comparison = compare_changeset_revisions( ancestor_changeset_revision,
+ ancestor_metadata_dict,
+ current_changeset_revision,
+ current_metadata_dict )
+ if comparison in [ 'no metadata', 'equal', 'subset' ]:
ancestor_changeset_revision = current_changeset_revision
ancestor_metadata_dict = current_metadata_dict
- if not ctx.children():
- metadata_changeset_revision = current_changeset_revision
- metadata_dict = current_metadata_dict
- # We're at the end of the change log.
+ elif comparison == 'not equal and not subset':
+ metadata_changeset_revision = ancestor_changeset_revision
+ metadata_dict = ancestor_metadata_dict
create_or_update_repository_metadata( trans, id, repository, metadata_changeset_revision, metadata_dict )
changeset_revisions.append( metadata_changeset_revision )
- ancestor_changeset_revision = None
- ancestor_metadata_dict = None
- elif ancestor_metadata_dict:
- # We reach here only if current_metadata_dict is empty and ancestor_metadata_dict is not.
+ ancestor_changeset_revision = current_changeset_revision
+ ancestor_metadata_dict = current_metadata_dict
+ else:
+ # We're at the beginning of the change log.
ancestor_changeset_revision = current_changeset_revision
+ ancestor_metadata_dict = current_metadata_dict
+ if not ctx.children():
metadata_changeset_revision = current_changeset_revision
- metadata_dict = ancestor_metadata_dict
- if not ctx.children():
- # We're at the end of the change log.
- create_or_update_repository_metadata( trans, id, repository, metadata_changeset_revision, metadata_dict )
- changeset_revisions.append( metadata_changeset_revision )
- ancestor_changeset_revision = None
- ancestor_metadata_dict = None
- # Delete all repository_metadata records for this repository that do not have a changeset_revision value in changeset_revisions.
- clean_repository_metadata( trans, id, changeset_revisions )
- add_repository_metadata_tool_versions( trans, id, changeset_revisions )
- if missing_sample_files:
- message += "Metadata was successfully reset, but the following required sample files have been deleted from the repository so the version "
- message += "of each file just prior to its deletion is being used. These files should be re-added to the repository as soon as possible: "
- message += "<b>%s</b><br/>" % ', '.join( missing_sample_files )
- return message, 'ok'
- return '', 'ok'
-def set_repository_metadata( trans, id, changeset_revision, content_alert_str='', **kwd ):
+ metadata_dict = current_metadata_dict
+ # We're at the end of the change log.
+ create_or_update_repository_metadata( trans, id, repository, metadata_changeset_revision, metadata_dict )
+ changeset_revisions.append( metadata_changeset_revision )
+ ancestor_changeset_revision = None
+ ancestor_metadata_dict = None
+ elif ancestor_metadata_dict:
+ # We reach here only if current_metadata_dict is empty and ancestor_metadata_dict is not.
+ ancestor_changeset_revision = current_changeset_revision
+ metadata_changeset_revision = current_changeset_revision
+ metadata_dict = ancestor_metadata_dict
+ if not ctx.children():
+ # We're at the end of the change log.
+ create_or_update_repository_metadata( trans, id, repository, metadata_changeset_revision, metadata_dict )
+ changeset_revisions.append( metadata_changeset_revision )
+ ancestor_changeset_revision = None
+ ancestor_metadata_dict = None
+ if os.path.exists( work_dir ):
+ try:
+ shutil.rmtree( work_dir )
+ except:
+ pass
+ # Delete all repository_metadata records for this repository that do not have a changeset_revision value in changeset_revisions.
+ clean_repository_metadata( trans, id, changeset_revisions )
+ add_repository_metadata_tool_versions( trans, id, changeset_revisions )
+def set_repository_metadata( trans, repository, content_alert_str='', **kwd ):
"""
- Set repository metadata on the repository tip, returning specific error messages (if any) to alert the repository owner that the changeset
+ Set metadata using the repository's current disk files, returning specific error messages (if any) to alert the repository owner that the changeset
has problems.
"""
message = ''
status = 'done'
- repository = get_repository( trans, id )
+ repository_clone_url = generate_clone_url( trans, trans.security.encode_id( repository.id ) )
repo_dir = repository.repo_path
repo = hg.repository( get_configured_ui(), repo_dir )
- ctx = get_changectx_for_changeset( repo, changeset_revision )
- metadata_dict = {}
- invalid_files = []
- updating_tip = changeset_revision == repository.tip
- if ctx is not None:
- metadata_dict, invalid_files, deleted_sample_files = generate_metadata_for_changeset_revision( trans,
- repo,
- id,
- ctx,
- changeset_revision,
- repo_dir,
- updating_tip=updating_tip )
- if metadata_dict:
- if updating_tip:
- if new_tool_metadata_required( trans, id, metadata_dict ) or new_workflow_metadata_required( trans, id, metadata_dict ):
- # Create a new repository_metadata table row.
- repository_metadata = trans.model.RepositoryMetadata( repository.id, changeset_revision, metadata_dict )
- trans.sa_session.add( repository_metadata )
- try:
- trans.sa_session.flush()
- # If this is the first record stored for this repository, see if we need to send any email alerts.
- if len( repository.downloadable_revisions ) == 1:
- handle_email_alerts( trans, repository, content_alert_str='', new_repo_alert=True, admin_only=False )
- except TypeError, e:
- message = "Unable to save metadata for this repository probably due to a tool config file that doesn't conform to the Cheetah template syntax."
- status = 'error'
- else:
- repository_metadata = get_latest_repository_metadata( trans, id )
- if repository_metadata:
- # Update the last saved repository_metadata table row.
- repository_metadata.changeset_revision = changeset_revision
- repository_metadata.metadata = metadata_dict
- repository_metadata.downloadable = changeset_is_downloadable( metadata_dict )
- trans.sa_session.add( repository_metadata )
- trans.sa_session.flush()
- else:
- # There are no tools in the repository, and we're setting metadata on the repository tip.
- repository_metadata = trans.model.RepositoryMetadata( repository.id, changeset_revision, metadata_dict )
- trans.sa_session.add( repository_metadata )
- trans.sa_session.flush()
- else:
- # We're re-generating metadata for an old repository revision.
- repository_metadata = get_repository_metadata_by_changeset_revision( trans, id, changeset_revision )
+ metadata_dict, invalid_files = generate_metadata_for_changeset_revision( trans, repo_dir, repository_clone_url )
+ if metadata_dict:
+ if new_tool_metadata_required( trans, repository, metadata_dict ) or new_workflow_metadata_required( trans, repository, metadata_dict ):
+ # Create a new repository_metadata table row.
+ repository_metadata = trans.model.RepositoryMetadata( repository.id, repository.tip, metadata_dict )
+ trans.sa_session.add( repository_metadata )
+ try:
+ trans.sa_session.flush()
+ # If this is the first record stored for this repository, see if we need to send any email alerts.
+ if len( repository.downloadable_revisions ) == 1:
+ handle_email_alerts( trans, repository, content_alert_str='', new_repo_alert=True, admin_only=False )
+ except TypeError, e:
+ message = "Unable to save metadata for this repository, exception: %s" % str( e )
+ status = 'error'
+ else:
+ repository_metadata = get_latest_repository_metadata( trans, repository.id )
+ if repository_metadata:
+ # Update the last saved repository_metadata table row.
+ repository_metadata.changeset_revision = repository.tip
repository_metadata.metadata = metadata_dict
repository_metadata.downloadable = changeset_is_downloadable( metadata_dict )
trans.sa_session.add( repository_metadata )
trans.sa_session.flush()
- elif updating_tip and len( repo ) == 1 and not invalid_files:
- message = "Revision '%s' includes no tools, datatypes or exported workflows for which metadata can " % str( changeset_revision )
- message += "be defined so this revision cannot be automatically installed into a local Galaxy instance."
- status = "error"
- else:
- # Here ctx is None.
- message = "This repository does not include revision '%s'." % str( changeset_revision )
- status = 'error'
- if deleted_sample_files:
- message += "Metadata was successfully reset, but the following required sample files have been deleted from the repository so the version "
- message += "of each file just prior to its deletion is being used. These files should be re-added to the repository as soon as possible: "
- message += "<b>%s</b><br/>" % ', '.join( deleted_sample_files )
+ else:
+ # There are no tools in the repository, and we're setting metadata on the repository tip.
+ repository_metadata = trans.model.RepositoryMetadata( repository.id, repository.tip, metadata_dict )
+ trans.sa_session.add( repository_metadata )
+ trans.sa_session.flush()
+ elif len( repo ) == 1 and not invalid_files:
+ message = "Revision '%s' includes no tools, datatypes or exported workflows for which metadata can " % str( repository.tip )
+ message += "be defined so this revision cannot be automatically installed into a local Galaxy instance."
+ status = "error"
if invalid_files:
if metadata_dict:
- message = "Metadata was defined for some items in revision '%s'. " % str( changeset_revision )
+ message += "Metadata was defined for some items in revision '%s'. " % str( repository.tip )
message += "Correct the following problems if necessary and reset metadata.<br/>"
else:
- message = "Metadata cannot be defined for revision '%s' so this revision cannot be automatically " % str( changeset_revision )
+ message += "Metadata cannot be defined for revision '%s' so this revision cannot be automatically " % str( repository.tip )
message += "installed into a local Galaxy instance. Correct the following problems and reset metadata.<br/>"
for itc_tup in invalid_files:
tool_file, exception_msg = itc_tup
@@ -1118,18 +1030,14 @@
message += "<b>%s</b> - %s<br/>" % ( tool_file, correction_msg )
status = 'error'
return message, status
-def set_repository_metadata_due_to_new_tip( trans, id, repository, content_alert_str=None, **kwd ):
+def set_repository_metadata_due_to_new_tip( trans, repository, content_alert_str=None, **kwd ):
# Set metadata on the repository tip.
- error_message, status = set_repository_metadata( trans, id, repository.tip, content_alert_str=content_alert_str, **kwd )
- if not error_message:
- # If no error occurred in setting metadata on the repository tip, reset metadata on all changeset revisions for the repository.
- # This will result in a more standardized set of valid repository revisions that can be installed.
- error_message, status = reset_all_metadata_on_repository( trans, id, **kwd )
+ error_message, status = set_repository_metadata( trans, repository, content_alert_str=content_alert_str, **kwd )
if error_message:
# If there is an error, display it.
return trans.response.send_redirect( web.url_for( controller='repository',
action='manage_repository',
- id=id,
+ id=trans.security.encode_id( repository.id ),
message=error_message,
status='error' ) )
def update_for_browsing( trans, repository, current_working_dir, commit_message='' ):
diff -r f4e633e6ab4e3f319f02a33fd21cbccfda2a80c1 -r ca835611e8c79a9b95c50acc28d670f7f81266e7 lib/galaxy/webapps/community/controllers/repository.py
--- a/lib/galaxy/webapps/community/controllers/repository.py
+++ b/lib/galaxy/webapps/community/controllers/repository.py
@@ -1735,13 +1735,9 @@
status=status )
@web.expose
def reset_all_metadata( self, trans, id, **kwd ):
- error_message, status = reset_all_metadata_on_repository( trans, id, **kwd )
- if error_message:
- message = error_message
- status = 'error'
- else:
- message = "All repository metadata has been reset."
- status = 'done'
+ reset_all_metadata_on_repository( trans, id, **kwd )
+ message = "All repository metadata has been reset."
+ status = 'done'
return trans.response.send_redirect( web.url_for( controller='repository',
action='manage_repository',
id=id,
@@ -1890,7 +1886,7 @@
else:
message += 'The selected files were deleted from the repository. '
kwd[ 'message' ] = message
- set_repository_metadata_due_to_new_tip( trans, id, repository, **kwd )
+ set_repository_metadata_due_to_new_tip( trans, repository, **kwd )
else:
message = "Select at least 1 file to delete from the repository before clicking <b>Delete selected files</b>."
status = "error"
@@ -1981,8 +1977,8 @@
action=caller,
**kwd ) )
@web.expose
- @web.require_login( "set repository metadata" )
- def set_metadata( self, trans, id, ctx_str, **kwd ):
+ @web.require_login( "set repository as malicious" )
+ def set_malicious( self, trans, id, ctx_str, **kwd ):
malicious = kwd.get( 'malicious', '' )
if kwd.get( 'malicious_button', False ):
repository_metadata = get_repository_metadata_by_changeset_revision( trans, id, ctx_str )
@@ -1995,11 +1991,6 @@
else:
message = "The repository tip has been defined as <b>not</b> malicious."
status = 'done'
- else:
- # The set_metadata_button was clicked
- message, status = set_repository_metadata( trans, id, ctx_str, **kwd )
- if not message:
- message = "Metadata for change set revision '%s' has been reset." % str( ctx_str )
return trans.response.send_redirect( web.url_for( controller='repository',
action='manage_repository',
id=id,
diff -r f4e633e6ab4e3f319f02a33fd21cbccfda2a80c1 -r ca835611e8c79a9b95c50acc28d670f7f81266e7 lib/galaxy/webapps/community/controllers/upload.py
--- a/lib/galaxy/webapps/community/controllers/upload.py
+++ b/lib/galaxy/webapps/community/controllers/upload.py
@@ -3,7 +3,7 @@
from galaxy.model.orm import *
from galaxy.datatypes.checkers import *
from common import *
-from galaxy.util.shed_util import get_configured_ui, reset_tool_data_tables, handle_sample_tool_data_table_conf_file
+from galaxy.util.shed_util import get_configured_ui, reset_tool_data_tables, handle_sample_tool_data_table_conf_file, update_repository
from galaxy import eggs
eggs.require('mercurial')
@@ -154,7 +154,7 @@
else:
message += " %d files were removed from the repository root. " % len( files_to_remove )
kwd[ 'message' ] = message
- set_repository_metadata_due_to_new_tip( trans, repository_id, repository, content_alert_str=content_alert_str, **kwd )
+ set_repository_metadata_due_to_new_tip( trans, repository, content_alert_str=content_alert_str, **kwd )
# Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file.
reset_tool_data_tables( trans.app )
trans.response.send_redirect( web.url_for( controller='repository',
diff -r f4e633e6ab4e3f319f02a33fd21cbccfda2a80c1 -r ca835611e8c79a9b95c50acc28d670f7f81266e7 templates/webapps/community/admin/index.mako
--- a/templates/webapps/community/admin/index.mako
+++ b/templates/webapps/community/admin/index.mako
@@ -55,7 +55,7 @@
<a target="galaxy_main" href="${h.url_for( controller='admin', action='browse_repositories', webapp='community' )}">Browse all repositories</a></div><div class="toolTitle">
- <a target="galaxy_main" href="${h.url_for( controller='admin', action='reset_metadata_on_all_repositories', webapp='community' )}">Reset all metadata</a>
+ <a target="galaxy_main" href="${h.url_for( controller='admin', action='reset_metadata_on_selected_repositories', webapp='community' )}">Reset selected metadata</a></div><div class="toolTitle"><a target="galaxy_main" href="${h.url_for( controller='admin', action='browse_repository_metadata', webapp='community' )}">Browse metadata</a>
diff -r f4e633e6ab4e3f319f02a33fd21cbccfda2a80c1 -r ca835611e8c79a9b95c50acc28d670f7f81266e7 templates/webapps/community/admin/reset_metadata_on_all_repositories.mako
--- a/templates/webapps/community/admin/reset_metadata_on_all_repositories.mako
+++ /dev/null
@@ -1,19 +0,0 @@
-<%inherit file="/base.mako"/>
-<%namespace file="/message.mako" import="render_msg" />
-
-%if message:
- ${render_msg( message, status )}
-%endif
-
-<div class="toolForm">
- <div class="toolFormTitle">Reset metadata on each change set of the repositories in this tool shed</div>
- <form name="reset_metadata_on_all_repositories" id="reset_metadata_on_all_repositories" action="${h.url_for( controller='admin', action='reset_metadata_on_all_repositories' )}" method="post" >
- <div class="form-row">
- Click the button below to reset metadata on each change set of the repositories in this tool shed.
- </div>
- <div class="form-row">
- <input type="submit" name="reset_metadata_on_all_repositories_button" value="Reset metadata on all repositories"/>
- </div>
- </form>
- </div>
-</div>
diff -r f4e633e6ab4e3f319f02a33fd21cbccfda2a80c1 -r ca835611e8c79a9b95c50acc28d670f7f81266e7 templates/webapps/community/admin/reset_metadata_on_selected_repositories.mako
--- /dev/null
+++ b/templates/webapps/community/admin/reset_metadata_on_selected_repositories.mako
@@ -0,0 +1,69 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+
+<%def name="local_javascripts()">
+ <script type="text/javascript">
+ function checkAllFields()
+ {
+ var chkAll = document.getElementById('checkAll');
+ var checks = document.getElementsByTagName('input');
+ var boxLength = checks.length;
+ var allChecked = false;
+ var totalChecked = 0;
+ if ( chkAll.checked == true )
+ {
+ for ( i=0; i < boxLength; i++ )
+ {
+ if ( checks[i].name.indexOf( 'repository_names_by_owner' ) != -1)
+ {
+ checks[i].checked = true;
+ }
+ }
+ }
+ else
+ {
+ for ( i=0; i < boxLength; i++ )
+ {
+ if ( checks[i].name.indexOf( 'repository_names_by_owner' ) != -1)
+ {
+ checks[i].checked = false
+ }
+ }
+ }
+ }
+ </script>
+</%def>
+
+<%def name="javascripts()">
+ ${parent.javascripts()}
+ ${local_javascripts()}
+</%def>
+
+%if message:
+ ${render_msg( message, status )}
+%endif
+
+<div class="toolForm">
+ <div class="toolFormTitle">Reset all metadata on each selected repository</div>
+ <form name="reset_metadata_on_selected_repositories" id="reset_metadata_on_selected_repositories" action="${h.url_for( controller='admin', action='reset_metadata_on_selected_repositories' )}" method="post" >
+ <div class="form-row">
+ Check each repository for which you want to reset metadata. Repository names are followed by owners in parentheses. Resetting metadata
+ may take a while because this process clones each change set in each selected repository's change log to a temporary location on disk.
+ Wait until this page redirects after clicking the <b>Reset metadata on selected repositories</b> button, as doing anything else will not
+ be helpful. Watch the tool shed paster log to pass the time if necessary.
+ </div>
+ <div style="clear: both"></div>
+ <div class="form-row">
+ <input type="checkbox" id="checkAll" name=select_all_repositories_checkbox value="true" onclick='checkAllFields(1);'/><input type="hidden" name=select_all_repositories_checkbox value="true"/><b>Select/unselect all repositories</b>
+ </div>
+ <div style="clear: both"></div>
+ <div class="form-row">
+ ${repositories_select_field.get_html()}
+ </div>
+ <div style="clear: both"></div>
+ <div class="form-row">
+ <input type="submit" name="reset_metadata_on_selected_repositories_button" value="Reset metadata on selected repositories"/>
+ </div>
+ </form>
+ </div>
+</div>
diff -r f4e633e6ab4e3f319f02a33fd21cbccfda2a80c1 -r ca835611e8c79a9b95c50acc28d670f7f81266e7 templates/webapps/community/repository/manage_repository.mako
--- a/templates/webapps/community/repository/manage_repository.mako
+++ b/templates/webapps/community/repository/manage_repository.mako
@@ -340,7 +340,7 @@
<div class="toolForm"><div class="toolFormTitle">Malicious repository tip</div><div class="toolFormBody">
- <form name="malicious" id="malicious" action="${h.url_for( controller='repository', action='set_metadata', id=trans.security.encode_id( repository.id ), ctx_str=changeset_revision )}" method="post">
+ <form name="malicious" id="malicious" action="${h.url_for( controller='repository', action='set_malicious', id=trans.security.encode_id( repository.id ), ctx_str=changeset_revision )}" method="post"><div class="form-row"><label>Define repository tip as malicious:</label>
${malicious_check_box.get_html()}
diff -r f4e633e6ab4e3f319f02a33fd21cbccfda2a80c1 -r ca835611e8c79a9b95c50acc28d670f7f81266e7 templates/webapps/community/repository/view_changelog.mako
--- a/templates/webapps/community/repository/view_changelog.mako
+++ b/templates/webapps/community/repository/view_changelog.mako
@@ -107,7 +107,7 @@
else:
ctx_parent_str = "%s:%s" % ( ctx_parent_rev, ctx_parent )
if changeset[ 'has_metadata' ]:
- has_metadata_str = '<table border="0" bgcolor="#D8D8D8"><tr><td>Repository metadata is associated with this change set.</td></tr></table>'
+ has_metadata_str = '<table border="0"><tr><td bgcolor="#D8D8D8">Repository metadata is associated with this change set.</td></tr></table>'
else:
has_metadata_str = ''
%>
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
07 Aug '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/f4e633e6ab4e/
changeset: f4e633e6ab4e
user: greg
date: 2012-08-07 19:43:36
summary: Fix mercurial command line handling for pushes from the command line that were broken in change set d19fbfefb676 for versions of mercurial older than 2.2.3. Pushes should now function correctly for those running mercurial older than version 2.2.3.
affected #: 2 files
diff -r 3f17b65c590758f03c517cc564d59efcc5f3998c -r f4e633e6ab4e3f319f02a33fd21cbccfda2a80c1 lib/galaxy/webapps/community/controllers/hg.py
--- a/lib/galaxy/webapps/community/controllers/hg.py
+++ b/lib/galaxy/webapps/community/controllers/hg.py
@@ -1,9 +1,12 @@
import os, logging
from galaxy.web.base.controller import *
from galaxy.webapps.community.controllers.common import *
+from galaxy.util.shed_util import update_repository
from galaxy import eggs
eggs.require('mercurial')
+import mercurial.__version__
+from mercurial import hg, ui, commands
from mercurial.hgweb.hgwebdir_mod import hgwebdir
from mercurial.hgweb.request import wsgiapplication
@@ -14,17 +17,32 @@
def handle_request( self, trans, **kwd ):
# The os command that results in this method being called will look something like
# hg clone http://test@127.0.0.1:9009/repos/test/convert_characters1
+ hg_version = mercurial.__version__.version
cmd = kwd.get( 'cmd', None )
wsgi_app = wsgiapplication( make_web_app )
- if cmd == 'pushkey':
- # This results from an "hg push" from the command line. When doing this, the following 6 commands, in order,
- # will be retrieved from environ: capabilities -> batch -> branchmap -> unbundle -> listkeys -> pushkey
+ # In mercurial version 2.2.3, section 15.2. Command changes includes a new feature: pushkey: add hooks for pushkey/listkeys (see
+ # http://mercurial.selenic.com/wiki/WhatsNew#Mercurial_2.2.3_.282012-07-01.29) Older versions require checking for 'listkeys'.
+ push_from_command_line = ( hg_version < '2.2.3' and cmd == 'listkeys' ) or ( hg_version >= '2.2.3' and cmd == 'pushkey' )
+ if push_from_command_line:
+ # When doing an "hg push" from the command line, the following commands, in order, will be retrieved from environ, depending
+ # upon the mercurial version being used. There is a weakness if the mercurial version < '2.2.3' because several commands include
+ # listkeys, so repository metadata will be set, but only for the files currently on disk, so doing so is not too expensive.
+ # If mercurial version < '2.2.3:
+ # capabilities -> batch -> branchmap -> unbundle -> listkeys
+ # If mercurial version >= '2.2.3':
+ # capabilities -> batch -> branchmap -> unbundle -> listkeys -> pushkey
path_info = kwd.get( 'path_info', None )
if path_info:
owner, name = path_info.split( '/' )
repository = get_repository_by_name_and_owner( trans, name, owner )
if repository:
- error_message, status = reset_all_metadata_on_repository( trans, trans.security.encode_id( repository.id ) )
+ if hg_version < '2.2.3':
+ # We're forced to update the repository so the disk files include the changes in the push. This is handled in the
+ # pushkey hook in mercurial version 2.2.3 and newer.
+ repo = hg.repository( ui.ui(), repository.repo_path )
+ update_repository( repo )
+ # Set metadata using the repository files on disk.
+ error_message, status = set_repository_metadata( trans, repository )
if status not in [ 'ok' ] and error_message:
log.debug( "Error resetting metadata on repository '%s': %s" % ( str( repository.name ), str( error_message ) ) )
elif status in [ 'ok' ] and error_message:
diff -r 3f17b65c590758f03c517cc564d59efcc5f3998c -r f4e633e6ab4e3f319f02a33fd21cbccfda2a80c1 lib/galaxy/webapps/community/framework/middleware/hg.py
--- a/lib/galaxy/webapps/community/framework/middleware/hg.py
+++ b/lib/galaxy/webapps/community/framework/middleware/hg.py
@@ -8,11 +8,13 @@
from galaxy.webapps.community import model
from galaxy.util.hash_util import new_secure_hash
+import mercurial.__version__
log = logging.getLogger(__name__)
class Hg( object ):
def __init__( self, app, config ):
+ print "mercurial version is:", mercurial.__version__.version
self.app = app
self.config = config
# Authenticate this mercurial request using basic authentication
@@ -56,7 +58,7 @@
connection.close()
if cmd == 'unbundle':
# This is an hg push from the command line. When doing this, the following 7 commands, in order,
- # will be retrieved from environ:
+ # will be retrieved from environ (see the docs at http://mercurial.selenic.com/wiki/WireProtocol)
# between -> capabilities -> heads -> branchmap -> unbundle -> unbundle -> listkeys
#
# The mercurial API unbundle() ( i.e., hg push ) method ultimately requires authorization.
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: smcmanus: Removed offending code from last checkin - sorry for the foul-up
by Bitbucket 07 Aug '12
by Bitbucket 07 Aug '12
07 Aug '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/3f17b65c5907/
changeset: 3f17b65c5907
user: smcmanus
date: 2012-08-07 19:35:39
summary: Removed offending code from last checkin - sorry for the foul-up
affected #: 2 files
diff -r fc9b4ae154efd2d8fb02c3637835a82767898be4 -r 3f17b65c590758f03c517cc564d59efcc5f3998c tools/filters/changeCase.pl
--- a/tools/filters/changeCase.pl
+++ b/tools/filters/changeCase.pl
@@ -56,10 +56,3 @@
close IN;
close OUT;
-
-sleep(10);
-if ( 0 == floor(4 * rand()) % 4 ) {
- print "Exiting randomly - no actual error\n";
- exit 2;
-}
-sleep(50);
diff -r fc9b4ae154efd2d8fb02c3637835a82767898be4 -r 3f17b65c590758f03c517cc564d59efcc5f3998c tools/filters/changeCase.xml
--- a/tools/filters/changeCase.xml
+++ b/tools/filters/changeCase.xml
@@ -1,10 +1,5 @@
<tool id="ChangeCase" name="Change Case"><description> of selected columns</description>
- <parallelism method="multi"
- split_inputs="input"
- split_mode="number_of_parts"
- split_size="8"
- merge_outputs="out_file1" /><stdio><exit_code range="1:" err_level="fatal" /></stdio>
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: jgoecks: Update track icons when decomposing a composite track into individual tracks.
by Bitbucket 07 Aug '12
by Bitbucket 07 Aug '12
07 Aug '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/fc9b4ae154ef/
changeset: fc9b4ae154ef
user: jgoecks
date: 2012-08-07 15:52:14
summary: Update track icons when decomposing a composite track into individual tracks.
affected #: 1 file
diff -r 29680fa5c35e6c22a6b7208015237b95d4bce2cc -r fc9b4ae154efd2d8fb02c3637835a82767898be4 static/scripts/viz/trackster.js
--- a/static/scripts/viz/trackster.js
+++ b/static/scripts/viz/trackster.js
@@ -4067,6 +4067,7 @@
track;
for (var i = 0; i < this.drawables.length; i++) {
track = this.drawables[i];
+ track.update_icons();
group.add_drawable(track);
track.container = group;
group.content_div.append(track.container_div);
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: jgoecks: Use mako template in tool help so that dynamic image paths can be used. Fixes #141
by Bitbucket 07 Aug '12
by Bitbucket 07 Aug '12
07 Aug '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/29680fa5c35e/
changeset: 29680fa5c35e
user: jgoecks
date: 2012-08-07 15:40:51
summary: Use mako template in tool help so that dynamic image paths can be used. Fixes #141
affected #: 58 files
diff -r 81692d707741d1933c3f7007148b59e7c9e28698 -r 29680fa5c35e6c22a6b7208015237b95d4bce2cc lib/galaxy/tools/__init__.py
--- a/lib/galaxy/tools/__init__.py
+++ b/lib/galaxy/tools/__init__.py
@@ -8,6 +8,7 @@
import logging, os, string, sys, tempfile, glob, shutil, types, urllib, subprocess, random, math, traceback
import simplejson
import binascii
+from mako.template import Template
from UserDict import DictMixin
from galaxy.util.odict import odict
from galaxy.util.bunch import Bunch
@@ -1068,7 +1069,8 @@
break
def parse_help( self, root ):
"""
- Parse the help text for the tool. Formatted in reStructuredText.
+ Parse the help text for the tool. Formatted in reStructuredText, but
+ stored as Mako to allow for dynamic image paths.
This implementation supports multiple pages.
"""
# TODO: Allow raw HTML or an external link.
@@ -1080,7 +1082,7 @@
help_pages = self.help.findall( "page" )
help_header = self.help.text
try:
- self.help = util.rst_to_html(self.help.text)
+ self.help = Template( util.rst_to_html(self.help.text) )
except:
log.exception( "error in help for tool %s" % self.name )
# Multiple help page case
@@ -1090,7 +1092,7 @@
help_footer = help_footer + help_page.tail
# Each page has to rendered all-together because of backreferences allowed by rst
try:
- self.help_by_page = [ util.rst_to_html( help_header + x + help_footer )
+ self.help_by_page = [ Template( util.rst_to_html( help_header + x + help_footer ) )
for x in self.help_by_page ]
except:
log.exception( "error in multi-page help for tool %s" % self.name )
diff -r 81692d707741d1933c3f7007148b59e7c9e28698 -r 29680fa5c35e6c22a6b7208015237b95d4bce2cc templates/tool_form.mako
--- a/templates/tool_form.mako
+++ b/templates/tool_form.mako
@@ -332,6 +332,9 @@
else:
tool_help = tool.help
+ # Help is Mako template, so render using current static path.
+ tool_help = tool_help.render( static_path=h.url_for( '/static' ) )
+
# Convert to unicode to display non-ascii characters.
if type( tool_help ) is not unicode:
tool_help = unicode( tool_help, 'utf-8')
diff -r 81692d707741d1933c3f7007148b59e7c9e28698 -r 29680fa5c35e6c22a6b7208015237b95d4bce2cc tools/discreteWavelet/execute_dwt_IvC_all.xml
--- a/tools/discreteWavelet/execute_dwt_IvC_all.xml
+++ b/tools/discreteWavelet/execute_dwt_IvC_all.xml
@@ -101,11 +101,11 @@
The second output file:
-.. image:: ./static/operation_icons/dwt_IvC_1.png
-.. image:: ./static/operation_icons/dwt_IvC_2.png
-.. image:: ./static/operation_icons/dwt_IvC_3.png
-.. image:: ./static/operation_icons/dwt_IvC_4.png
-.. image:: ./static/operation_icons/dwt_IvC_5.png
+.. image:: ${static_path}/operation_icons/dwt_IvC_1.png
+.. image:: ${static_path}/operation_icons/dwt_IvC_2.png
+.. image:: ${static_path}/operation_icons/dwt_IvC_3.png
+.. image:: ${static_path}/operation_icons/dwt_IvC_4.png
+.. image:: ${static_path}/operation_icons/dwt_IvC_5.png
</help>
diff -r 81692d707741d1933c3f7007148b59e7c9e28698 -r 29680fa5c35e6c22a6b7208015237b95d4bce2cc tools/discreteWavelet/execute_dwt_cor_aVa_perClass.xml
--- a/tools/discreteWavelet/execute_dwt_cor_aVa_perClass.xml
+++ b/tools/discreteWavelet/execute_dwt_cor_aVa_perClass.xml
@@ -101,11 +101,11 @@
The second output file:
-.. image:: ./static/operation_icons/dwt_cor_aVa_1.png
-.. image:: ./static/operation_icons/dwt_cor_aVa_2.png
-.. image:: ./static/operation_icons/dwt_cor_aVa_3.png
-.. image:: ./static/operation_icons/dwt_cor_aVa_4.png
-.. image:: ./static/operation_icons/dwt_cor_aVa_5.png
+.. image:: ${static_path}/operation_icons/dwt_cor_aVa_1.png
+.. image:: ${static_path}/operation_icons/dwt_cor_aVa_2.png
+.. image:: ${static_path}/operation_icons/dwt_cor_aVa_3.png
+.. image:: ${static_path}/operation_icons/dwt_cor_aVa_4.png
+.. image:: ${static_path}/operation_icons/dwt_cor_aVa_5.png
</help>
diff -r 81692d707741d1933c3f7007148b59e7c9e28698 -r 29680fa5c35e6c22a6b7208015237b95d4bce2cc tools/discreteWavelet/execute_dwt_cor_aVb_all.xml
--- a/tools/discreteWavelet/execute_dwt_cor_aVb_all.xml
+++ b/tools/discreteWavelet/execute_dwt_cor_aVb_all.xml
@@ -106,16 +106,16 @@
The second output file:
-.. image:: ./static/operation_icons/dwt_cor_aVb_all_1.png
-.. image:: ./static/operation_icons/dwt_cor_aVb_all_2.png
-.. image:: ./static/operation_icons/dwt_cor_aVb_all_3.png
-.. image:: ./static/operation_icons/dwt_cor_aVb_all_4.png
-.. image:: ./static/operation_icons/dwt_cor_aVb_all_5.png
-.. image:: ./static/operation_icons/dwt_cor_aVb_all_6.png
-.. image:: ./static/operation_icons/dwt_cor_aVb_all_7.png
-.. image:: ./static/operation_icons/dwt_cor_aVb_all_8.png
-.. image:: ./static/operation_icons/dwt_cor_aVb_all_9.png
-.. image:: ./static/operation_icons/dwt_cor_aVb_all_10.png
+.. image:: ${static_path}/operation_icons/dwt_cor_aVb_all_1.png
+.. image:: ${static_path}/operation_icons/dwt_cor_aVb_all_2.png
+.. image:: ${static_path}/operation_icons/dwt_cor_aVb_all_3.png
+.. image:: ${static_path}/operation_icons/dwt_cor_aVb_all_4.png
+.. image:: ${static_path}/operation_icons/dwt_cor_aVb_all_5.png
+.. image:: ${static_path}/operation_icons/dwt_cor_aVb_all_6.png
+.. image:: ${static_path}/operation_icons/dwt_cor_aVb_all_7.png
+.. image:: ${static_path}/operation_icons/dwt_cor_aVb_all_8.png
+.. image:: ${static_path}/operation_icons/dwt_cor_aVb_all_9.png
+.. image:: ${static_path}/operation_icons/dwt_cor_aVb_all_10.png
</help>
diff -r 81692d707741d1933c3f7007148b59e7c9e28698 -r 29680fa5c35e6c22a6b7208015237b95d4bce2cc tools/discreteWavelet/execute_dwt_var_perClass.xml
--- a/tools/discreteWavelet/execute_dwt_var_perClass.xml
+++ b/tools/discreteWavelet/execute_dwt_var_perClass.xml
@@ -98,7 +98,7 @@
The third output file:
-.. image:: ./static/operation_icons/dwt_var_perClass.png
+.. image:: ${static_path}/operation_icons/dwt_var_perClass.png
</help>
diff -r 81692d707741d1933c3f7007148b59e7c9e28698 -r 29680fa5c35e6c22a6b7208015237b95d4bce2cc tools/evolution/add_scores.xml
--- a/tools/evolution/add_scores.xml
+++ b/tools/evolution/add_scores.xml
@@ -43,8 +43,8 @@
The input can be any interval_ format dataset. The output is also in interval format.
(`Dataset missing?`_)
-.. _interval: ./static/formatHelp.html#interval
-.. _Dataset missing?: ./static/formatHelp.html
+.. _interval: ${static_path}/formatHelp.html#interval
+.. _Dataset missing?: ${static_path}/formatHelp.html
-----
diff -r 81692d707741d1933c3f7007148b59e7c9e28698 -r 29680fa5c35e6c22a6b7208015237b95d4bce2cc tools/evolution/codingSnps.xml
--- a/tools/evolution/codingSnps.xml
+++ b/tools/evolution/codingSnps.xml
@@ -94,9 +94,9 @@
The gene dataset is in BED_ format with 12 columns. The output dataset is also interval.
(`Dataset missing?`_)
-.. _interval: ./static/formatHelp.html#interval
-.. _BED: ./static/formatHelp.html#bed
-.. _Dataset missing?: ./static/formatHelp.html
+.. _interval: ${static_path}/formatHelp.html#interval
+.. _BED: ${static_path}/formatHelp.html#bed
+.. _Dataset missing?: ${static_path}/formatHelp.html
-----
diff -r 81692d707741d1933c3f7007148b59e7c9e28698 -r 29680fa5c35e6c22a6b7208015237b95d4bce2cc tools/fastx_toolkit/fasta_clipping_histogram.xml
--- a/tools/fastx_toolkit/fasta_clipping_histogram.xml
+++ b/tools/fastx_toolkit/fasta_clipping_histogram.xml
@@ -1,6 +1,6 @@
<tool id="cshl_fasta_clipping_histogram" name="Length Distribution"><description>chart</description>
- <requirements><requirement type="package">fastx_toolkit</requirement></requirements>
+ <requirements><requirement type="package">fastx_toolkit</requirement></requirements><command>fasta_clipping_histogram.pl $input $outfile</command><inputs>
@@ -25,13 +25,13 @@
In the following library, most sequences are 24-mers to 27-mers.
This could indicate an abundance of endo-siRNAs (depending of course of what you've tried to sequence in the first place).
-.. image:: ./static/fastx_icons/fasta_clipping_histogram_1.png
+.. image:: ${static_path}/fastx_icons/fasta_clipping_histogram_1.png
In the following library, most sequences are 19,22 or 23-mers.
This could indicate an abundance of miRNAs (depending of course of what you've tried to sequence in the first place).
-.. image:: ./static/fastx_icons/fasta_clipping_histogram_2.png
+.. image:: ${static_path}/fastx_icons/fasta_clipping_histogram_2.png
-----
@@ -81,7 +81,7 @@
Each sequence is counts as one, to produce the following chart:
-.. image:: ./static/fastx_icons/fasta_clipping_histogram_3.png
+.. image:: ${static_path}/fastx_icons/fasta_clipping_histogram_3.png
Example 2 - The following FASTA file have multiplicity counts::
@@ -95,7 +95,7 @@
The first sequence counts as 2, the second as 10, the third as 3, to produce the following chart:
-.. image:: ./static/fastx_icons/fasta_clipping_histogram_4.png
+.. image:: ${static_path}/fastx_icons/fasta_clipping_histogram_4.png
Use the **FASTA Collapser** tool to create FASTA files with multiplicity counts.
diff -r 81692d707741d1933c3f7007148b59e7c9e28698 -r 29680fa5c35e6c22a6b7208015237b95d4bce2cc tools/fastx_toolkit/fastq_quality_boxplot.xml
--- a/tools/fastx_toolkit/fastq_quality_boxplot.xml
+++ b/tools/fastx_toolkit/fastq_quality_boxplot.xml
@@ -32,16 +32,16 @@
An excellent quality library (median quality is 40 for almost all 36 cycles):
-.. image:: ./static/fastx_icons/fastq_quality_boxplot_1.png
+.. image:: ${static_path}/fastx_icons/fastq_quality_boxplot_1.png
A relatively good quality library (median quality degrades towards later cycles):
-.. image:: ./static/fastx_icons/fastq_quality_boxplot_2.png
+.. image:: ${static_path}/fastx_icons/fastq_quality_boxplot_2.png
A low quality library (median drops quickly):
-.. image:: ./static/fastx_icons/fastq_quality_boxplot_3.png
+.. image:: ${static_path}/fastx_icons/fastq_quality_boxplot_3.png
------
diff -r 81692d707741d1933c3f7007148b59e7c9e28698 -r 29680fa5c35e6c22a6b7208015237b95d4bce2cc tools/fastx_toolkit/fastx_barcode_splitter.xml
--- a/tools/fastx_toolkit/fastx_barcode_splitter.xml
+++ b/tools/fastx_toolkit/fastx_barcode_splitter.xml
@@ -1,6 +1,6 @@
<tool id="cshl_fastx_barcode_splitter" name="Barcode Splitter"><description></description>
- <requirements><requirement type="package">fastx_toolkit</requirement></requirements>
+ <requirements><requirement type="package">fastx_toolkit</requirement></requirements><command interpreter="bash">fastx_barcode_splitter_galaxy_wrapper.sh $BARCODE $input "$input.name" "$output.files_path" --mismatches $mismatches --partial $partial $EOL > $output </command><inputs>
@@ -62,7 +62,7 @@
**Output Example**
-.. image:: ./static/fastx_icons/barcode_splitter_output_example.png
+.. image:: ${static_path}/fastx_icons/barcode_splitter_output_example.png
------
diff -r 81692d707741d1933c3f7007148b59e7c9e28698 -r 29680fa5c35e6c22a6b7208015237b95d4bce2cc tools/fastx_toolkit/fastx_clipper.xml
--- a/tools/fastx_toolkit/fastx_clipper.xml
+++ b/tools/fastx_toolkit/fastx_clipper.xml
@@ -1,6 +1,6 @@
<tool id="cshl_fastx_clipper" name="Clip" version="1.0.1" ><description>adapter sequences</description>
- <requirements><requirement type="package">fastx_toolkit</requirement></requirements>
+ <requirements><requirement type="package">fastx_toolkit</requirement></requirements><command>
zcat -f $input | fastx_clipper -l $minlength -a $clip_source.clip_sequence -d $keepdelta -o $output -v $KEEP_N $DISCARD_OPTIONS
#if $input.ext == "fastqsanger":
@@ -82,7 +82,7 @@
**Clipping Illustration:**
-.. image:: ./static/fastx_icons/fastx_clipper_illustration.png
+.. image:: ${static_path}/fastx_icons/fastx_clipper_illustration.png
@@ -93,7 +93,7 @@
**Clipping Example:**
-.. image:: ./static/fastx_icons/fastx_clipper_example.png
+.. image:: ${static_path}/fastx_icons/fastx_clipper_example.png
diff -r 81692d707741d1933c3f7007148b59e7c9e28698 -r 29680fa5c35e6c22a6b7208015237b95d4bce2cc tools/fastx_toolkit/fastx_nucleotides_distribution.xml
--- a/tools/fastx_toolkit/fastx_nucleotides_distribution.xml
+++ b/tools/fastx_toolkit/fastx_nucleotides_distribution.xml
@@ -1,6 +1,6 @@
<tool id="cshl_fastx_nucleotides_distribution" name="Draw nucleotides distribution chart"><description></description>
- <requirements><requirement type="package">fastx_toolkit</requirement></requirements>
+ <requirements><requirement type="package">fastx_toolkit</requirement></requirements><command>fastx_nucleotide_distribution_graph.sh -t '$input.name' -i $input -o $output</command><inputs>
@@ -26,19 +26,19 @@
The following chart clearly shows the barcode used at the 5'-end of the library: **GATCT**
-.. image:: ./static/fastx_icons/fastq_nucleotides_distribution_1.png
+.. image:: ${static_path}/fastx_icons/fastq_nucleotides_distribution_1.png
In the following chart, one can almost 'read' the most abundant sequence by looking at the dominant values: **TGATA TCGTA TTGAT GACTG AA...**
-.. image:: ./static/fastx_icons/fastq_nucleotides_distribution_2.png
+.. image:: ${static_path}/fastx_icons/fastq_nucleotides_distribution_2.png
The following chart shows a growing number of unknown (N) nucleotides towards later cycles (which might indicate a sequencing problem):
-.. image:: ./static/fastx_icons/fastq_nucleotides_distribution_3.png
+.. image:: ${static_path}/fastx_icons/fastq_nucleotides_distribution_3.png
But most of the time, the chart will look rather random:
-.. image:: ./static/fastx_icons/fastq_nucleotides_distribution_4.png
+.. image:: ${static_path}/fastx_icons/fastq_nucleotides_distribution_4.png
------
diff -r 81692d707741d1933c3f7007148b59e7c9e28698 -r 29680fa5c35e6c22a6b7208015237b95d4bce2cc tools/maf/interval2maf.xml
--- a/tools/maf/interval2maf.xml
+++ b/tools/maf/interval2maf.xml
@@ -110,7 +110,7 @@
Here a single interval is superimposed on three MAF blocks. Blocks 1 and 3 are trimmed because they extend beyond boundaries of the interval:
-.. image:: ./static/images/maf_icons/interval2maf.png
+.. image:: ${static_path}/images/maf_icons/interval2maf.png
-------
diff -r 81692d707741d1933c3f7007148b59e7c9e28698 -r 29680fa5c35e6c22a6b7208015237b95d4bce2cc tools/maf/interval2maf_pairwise.xml
--- a/tools/maf/interval2maf_pairwise.xml
+++ b/tools/maf/interval2maf_pairwise.xml
@@ -37,7 +37,7 @@
Here a single interval is superimposed on three MAF blocks. Blocks 1 and 3 are trimmed because they extend beyond boundaries of the interval:
-.. image:: ./static/images/maf_icons/interval2maf.png
+.. image:: ${static_path}/images/maf_icons/interval2maf.png
------
diff -r 81692d707741d1933c3f7007148b59e7c9e28698 -r 29680fa5c35e6c22a6b7208015237b95d4bce2cc tools/maf/interval_maf_to_merged_fasta.xml
--- a/tools/maf/interval_maf_to_merged_fasta.xml
+++ b/tools/maf/interval_maf_to_merged_fasta.xml
@@ -101,7 +101,7 @@
Here three MAF blocks overlapping a single interval are stitched together. Space between blocks 2 and 3 is filled with gaps:
-.. image:: ./static/images/maf_icons/stitchMaf.png
+.. image:: ${static_path}/images/maf_icons/stitchMaf.png
------
diff -r 81692d707741d1933c3f7007148b59e7c9e28698 -r 29680fa5c35e6c22a6b7208015237b95d4bce2cc tools/metag_tools/blat_mapping.xml
--- a/tools/metag_tools/blat_mapping.xml
+++ b/tools/metag_tools/blat_mapping.xml
@@ -35,7 +35,7 @@
Showing reads coverage on human chromosome 22 (partial result) in UCSC Genome Browser Custom Track:
- .. image:: ./static/images/blat_mapping_example.png
+ .. image:: ${static_path}/images/blat_mapping_example.png
:width: 600
</help>
diff -r 81692d707741d1933c3f7007148b59e7c9e28698 -r 29680fa5c35e6c22a6b7208015237b95d4bce2cc tools/metag_tools/convert_SOLiD_color2nuc.xml
--- a/tools/metag_tools/convert_SOLiD_color2nuc.xml
+++ b/tools/metag_tools/convert_SOLiD_color2nuc.xml
@@ -65,7 +65,7 @@
Each di-nucleotide is represented by a single digit: 0 to 3. The matrix is symmetric, thus the leading nucleotide is necessary to determine the sequence (otherwise there are four possibilities).
- .. image:: ./static/images/dualcolorcode.png
+ .. image:: ${static_path}/images/dualcolorcode.png
</help>
diff -r 81692d707741d1933c3f7007148b59e7c9e28698 -r 29680fa5c35e6c22a6b7208015237b95d4bce2cc tools/metag_tools/short_reads_figure_score.xml
--- a/tools/metag_tools/short_reads_figure_score.xml
+++ b/tools/metag_tools/short_reads_figure_score.xml
@@ -56,7 +56,7 @@
Quality scores are summarized as boxplot (Roche 454 FLX data):
-.. image:: ./static/images/short_reads_boxplot.png
+.. image:: ${static_path}/images/short_reads_boxplot.png
where the **X-axis** is coordinate along the read and the **Y-axis** is quality score adjusted to comply with the Phred score metric. Units on the X-axis depend on whether your data comes from Roche (454) or Illumina (Solexa) and ABI SOLiD machines:
diff -r 81692d707741d1933c3f7007148b59e7c9e28698 -r 29680fa5c35e6c22a6b7208015237b95d4bce2cc tools/mutation/visualize.xml
--- a/tools/mutation/visualize.xml
+++ b/tools/mutation/visualize.xml
@@ -96,7 +96,7 @@
Visualization output:
-.. image:: ./static/images/mutation_visualization_example.png
+.. image:: ${static_path}/images/mutation_visualization_example.png
:width: 150
Here the left-most column represents the position and the background color is the reference base color. Each column on its right describe each sample.
diff -r 81692d707741d1933c3f7007148b59e7c9e28698 -r 29680fa5c35e6c22a6b7208015237b95d4bce2cc tools/new_operations/basecoverage.xml
--- a/tools/new_operations/basecoverage.xml
+++ b/tools/new_operations/basecoverage.xml
@@ -38,7 +38,7 @@
**Example**
-.. image:: ./static/operation_icons/gops_baseCoverage.gif
+.. image:: ${static_path}/operation_icons/gops_baseCoverage.gif
</help>
diff -r 81692d707741d1933c3f7007148b59e7c9e28698 -r 29680fa5c35e6c22a6b7208015237b95d4bce2cc tools/new_operations/cluster.xml
--- a/tools/new_operations/cluster.xml
+++ b/tools/new_operations/cluster.xml
@@ -86,11 +86,11 @@
Find Clusters:
-.. image:: ./static/operation_icons/gops_clusterFind.gif
+.. image:: ${static_path}/operation_icons/gops_clusterFind.gif
Merge Clusters:
-.. image:: ./static/operation_icons/gops_clusterMerge.gif
+.. image:: ${static_path}/operation_icons/gops_clusterMerge.gif
</help></tool>
diff -r 81692d707741d1933c3f7007148b59e7c9e28698 -r 29680fa5c35e6c22a6b7208015237b95d4bce2cc tools/new_operations/complement.xml
--- a/tools/new_operations/complement.xml
+++ b/tools/new_operations/complement.xml
@@ -55,7 +55,7 @@
**Example**
-.. image:: ./static/operation_icons/gops_complement.gif
+.. image:: ${static_path}/operation_icons/gops_complement.gif
</help></tool>
diff -r 81692d707741d1933c3f7007148b59e7c9e28698 -r 29680fa5c35e6c22a6b7208015237b95d4bce2cc tools/new_operations/concat.xml
--- a/tools/new_operations/concat.xml
+++ b/tools/new_operations/concat.xml
@@ -53,7 +53,7 @@
**Example**
-.. image:: ./static/operation_icons/gops_concatenate.gif
+.. image:: ${static_path}/operation_icons/gops_concatenate.gif
</help></tool>
\ No newline at end of file
diff -r 81692d707741d1933c3f7007148b59e7c9e28698 -r 29680fa5c35e6c22a6b7208015237b95d4bce2cc tools/new_operations/get_flanks.xml
--- a/tools/new_operations/get_flanks.xml
+++ b/tools/new_operations/get_flanks.xml
@@ -58,7 +58,7 @@
chr22 500 800 NM_174568 0 +
-.. image:: ./static/operation_icons/flanks_ex1.gif
+.. image:: ${static_path}/operation_icons/flanks_ex1.gif
**Example 2**
@@ -70,7 +70,7 @@
chr22 500 800 NM_028946 0 -
-.. image:: ./static/operation_icons/flanks_ex2.gif
+.. image:: ${static_path}/operation_icons/flanks_ex2.gif
</help>
diff -r 81692d707741d1933c3f7007148b59e7c9e28698 -r 29680fa5c35e6c22a6b7208015237b95d4bce2cc tools/new_operations/intersect.xml
--- a/tools/new_operations/intersect.xml
+++ b/tools/new_operations/intersect.xml
@@ -133,11 +133,11 @@
Overlapping Intervals:
-.. image:: ./static/operation_icons/gops_intersectOverlappingIntervals.gif
+.. image:: ${static_path}/operation_icons/gops_intersectOverlappingIntervals.gif
Overlapping Pieces of Intervals:
-.. image:: ./static/operation_icons/gops_intersectOverlappingPieces.gif
+.. image:: ${static_path}/operation_icons/gops_intersectOverlappingPieces.gif
</help></tool>
diff -r 81692d707741d1933c3f7007148b59e7c9e28698 -r 29680fa5c35e6c22a6b7208015237b95d4bce2cc tools/new_operations/join.xml
--- a/tools/new_operations/join.xml
+++ b/tools/new_operations/join.xml
@@ -94,23 +94,23 @@
**Examples**
-.. image:: ./static/operation_icons/gops_joinRecordsList.gif
+.. image:: ${static_path}/operation_icons/gops_joinRecordsList.gif
Only records that are joined (inner join):
-.. image:: ./static/operation_icons/gops_joinInner.gif
+.. image:: ${static_path}/operation_icons/gops_joinInner.gif
All records of first dataset:
-.. image:: ./static/operation_icons/gops_joinLeftOuter.gif
+.. image:: ${static_path}/operation_icons/gops_joinLeftOuter.gif
All records of second dataset:
-.. image:: ./static/operation_icons/gops_joinRightOuter.gif
+.. image:: ${static_path}/operation_icons/gops_joinRightOuter.gif
All records of both datasets:
-.. image:: ./static/operation_icons/gops_joinFullOuter.gif
+.. image:: ${static_path}/operation_icons/gops_joinFullOuter.gif
</help>
diff -r 81692d707741d1933c3f7007148b59e7c9e28698 -r 29680fa5c35e6c22a6b7208015237b95d4bce2cc tools/new_operations/merge.xml
--- a/tools/new_operations/merge.xml
+++ b/tools/new_operations/merge.xml
@@ -52,7 +52,7 @@
**Example**
-.. image:: ./static/operation_icons/gops_merge.gif
+.. image:: ${static_path}/operation_icons/gops_merge.gif
</help></tool>
\ No newline at end of file
diff -r 81692d707741d1933c3f7007148b59e7c9e28698 -r 29680fa5c35e6c22a6b7208015237b95d4bce2cc tools/new_operations/subtract.xml
--- a/tools/new_operations/subtract.xml
+++ b/tools/new_operations/subtract.xml
@@ -114,11 +114,11 @@
Intervals with no overlap:
-.. image:: ./static/operation_icons/gops_subtractOverlappingIntervals.gif
+.. image:: ${static_path}/operation_icons/gops_subtractOverlappingIntervals.gif
Non-overlapping pieces of intervals:
-.. image:: ./static/operation_icons/gops_subtractOverlappingPieces.gif
+.. image:: ${static_path}/operation_icons/gops_subtractOverlappingPieces.gif
</help></tool>
diff -r 81692d707741d1933c3f7007148b59e7c9e28698 -r 29680fa5c35e6c22a6b7208015237b95d4bce2cc tools/ngs_simulation/ngs_simulation.xml
--- a/tools/ngs_simulation/ngs_simulation.xml
+++ b/tools/ngs_simulation/ngs_simulation.xml
@@ -188,7 +188,7 @@
Plot output (png):
-.. image:: ./static/images/ngs_simulation.png
+.. image:: ${static_path}/images/ngs_simulation.png
Summary output (txt)::
diff -r 81692d707741d1933c3f7007148b59e7c9e28698 -r 29680fa5c35e6c22a6b7208015237b95d4bce2cc tools/phenotype_association/beam.xml
--- a/tools/phenotype_association/beam.xml
+++ b/tools/phenotype_association/beam.xml
@@ -59,9 +59,9 @@
The input dataset must be in lped_ format. The output datasets are both tabular_.
(`Dataset missing?`_)
-.. _lped: ./static/formatHelp.html#lped
-.. _tabular: ./static/formatHelp.html#tabular
-.. _Dataset missing?: ./static/formatHelp.html
+.. _lped: ${static_path}/formatHelp.html#lped
+.. _tabular: ${static_path}/formatHelp.html#tabular
+.. _Dataset missing?: ${static_path}/formatHelp.html
-----
diff -r 81692d707741d1933c3f7007148b59e7c9e28698 -r 29680fa5c35e6c22a6b7208015237b95d4bce2cc tools/phenotype_association/ctd.xml
--- a/tools/phenotype_association/ctd.xml
+++ b/tools/phenotype_association/ctd.xml
@@ -246,7 +246,7 @@
Home page: http://ctdbase.org
-.. _tabular: ./static/formatHelp.html#tab
+.. _tabular: ${static_path}/formatHelp.html#tab
-----
diff -r 81692d707741d1933c3f7007148b59e7c9e28698 -r 29680fa5c35e6c22a6b7208015237b95d4bce2cc tools/phenotype_association/funDo.xml
--- a/tools/phenotype_association/funDo.xml
+++ b/tools/phenotype_association/funDo.xml
@@ -34,7 +34,7 @@
There is no input dataset. The output is in interval_ format.
-.. _interval: ./static/formatHelp.html#interval
+.. _interval: ${static_path}/formatHelp.html#interval
-----
diff -r 81692d707741d1933c3f7007148b59e7c9e28698 -r 29680fa5c35e6c22a6b7208015237b95d4bce2cc tools/phenotype_association/gpass.xml
--- a/tools/phenotype_association/gpass.xml
+++ b/tools/phenotype_association/gpass.xml
@@ -39,9 +39,9 @@
The input dataset must be in lped_ format, and the output is tabular_.
(`Dataset missing?`_)
-.. _lped: ./static/formatHelp.html#lped
-.. _tabular: ./static/formatHelp.html#tab
-.. _Dataset missing?: ./static/formatHelp.html
+.. _lped: ${static_path}/formatHelp.html#lped
+.. _tabular: ${static_path}/formatHelp.html#tab
+.. _Dataset missing?: ${static_path}/formatHelp.html
-----
diff -r 81692d707741d1933c3f7007148b59e7c9e28698 -r 29680fa5c35e6c22a6b7208015237b95d4bce2cc tools/phenotype_association/hilbertvis.xml
--- a/tools/phenotype_association/hilbertvis.xml
+++ b/tools/phenotype_association/hilbertvis.xml
@@ -63,8 +63,8 @@
The input format is interval_, and the output is an image in PDF format.
(`Dataset missing?`_)
-.. _interval: ./static/formatHelp.html#interval
-.. _Dataset missing?: ./static/formatHelp.html
+.. _interval: ${static_path}/formatHelp.html#interval
+.. _Dataset missing?: ${static_path}/formatHelp.html
-----
@@ -75,7 +75,7 @@
visualization onto a two-dimensional square. For example, here is a diagram
showing the path of a level-2 Hilbert curve.
-.. image:: ./static/images/hilbertvisDiagram.png
+.. image:: ${static_path}/images/hilbertvisDiagram.png
The shade of each pixel represents the value for the corresponding bin of
consecutive genomic positions, calculated according to the specified
@@ -99,11 +99,11 @@
Here are some examples from the HilbertVis homepage, using ChIP-Seq data.
-.. image:: ./static/images/hilbertvis1.png
+.. image:: ${static_path}/images/hilbertvis1.png
-----
-.. image:: ./static/images/hilbertvis2.png
+.. image:: ${static_path}/images/hilbertvis2.png
-----
diff -r 81692d707741d1933c3f7007148b59e7c9e28698 -r 29680fa5c35e6c22a6b7208015237b95d4bce2cc tools/phenotype_association/ldtools.xml
--- a/tools/phenotype_association/ldtools.xml
+++ b/tools/phenotype_association/ldtools.xml
@@ -34,8 +34,8 @@
The input and output datasets are tabular_.
(`Dataset missing?`_)
-.. _tabular: ./static/formatHelp.html#tab
-.. _Dataset missing?: ./static/formatHelp.html
+.. _tabular: ${static_path}/formatHelp.html#tab
+.. _Dataset missing?: ${static_path}/formatHelp.html
-----
diff -r 81692d707741d1933c3f7007148b59e7c9e28698 -r 29680fa5c35e6c22a6b7208015237b95d4bce2cc tools/phenotype_association/linkToDavid.xml
--- a/tools/phenotype_association/linkToDavid.xml
+++ b/tools/phenotype_association/linkToDavid.xml
@@ -76,9 +76,9 @@
a link to the DAVID website as described below.
(`Dataset missing?`_)
-.. _tabular: ./static/formatHelp.html#tab
-.. _html: ./static/formatHelp.html#html
-.. _Dataset missing?: ./static/formatHelp.html
+.. _tabular: ${static_path}/formatHelp.html#tab
+.. _html: ${static_path}/formatHelp.html#html
+.. _Dataset missing?: ${static_path}/formatHelp.html
-----
diff -r 81692d707741d1933c3f7007148b59e7c9e28698 -r 29680fa5c35e6c22a6b7208015237b95d4bce2cc tools/phenotype_association/linkToGProfile.xml
--- a/tools/phenotype_association/linkToGProfile.xml
+++ b/tools/phenotype_association/linkToGProfile.xml
@@ -48,9 +48,9 @@
The output dataset is html_ with a link to g:Profiler.
(`Dataset missing?`_)
-.. _tabular: ./static/formatHelp.html#tab
-.. _html: ./static/formatHelp.html#html
-.. _Dataset missing?: ./static/formatHelp.html
+.. _tabular: ${static_path}/formatHelp.html#tab
+.. _html: ${static_path}/formatHelp.html#html
+.. _Dataset missing?: ${static_path}/formatHelp.html
-----
diff -r 81692d707741d1933c3f7007148b59e7c9e28698 -r 29680fa5c35e6c22a6b7208015237b95d4bce2cc tools/phenotype_association/lps.xml
--- a/tools/phenotype_association/lps.xml
+++ b/tools/phenotype_association/lps.xml
@@ -180,9 +180,9 @@
There is a second output dataset (a log) that is in text_ format.
(`Dataset missing?`_)
-.. _tabular: ./static/formatHelp.html#tab
-.. _text: ./static/formatHelp.html#text
-.. _Dataset missing?: ./static/formatHelp.html
+.. _tabular: ${static_path}/formatHelp.html#tab
+.. _text: ${static_path}/formatHelp.html#text
+.. _Dataset missing?: ${static_path}/formatHelp.html
-----
diff -r 81692d707741d1933c3f7007148b59e7c9e28698 -r 29680fa5c35e6c22a6b7208015237b95d4bce2cc tools/phenotype_association/pass.xml
--- a/tools/phenotype_association/pass.xml
+++ b/tools/phenotype_association/pass.xml
@@ -39,9 +39,9 @@
The input is in GFF_ format, and the output is tabular_.
(`Dataset missing?`_)
-.. _GFF: ./static/formatHelp.html#gff
-.. _tabular: ./static/formatHelp.html#tab
-.. _Dataset missing?: ./static/formatHelp.html
+.. _GFF: ${static_path}/formatHelp.html#gff
+.. _tabular: ${static_path}/formatHelp.html#tab
+.. _Dataset missing?: ${static_path}/formatHelp.html
-----
diff -r 81692d707741d1933c3f7007148b59e7c9e28698 -r 29680fa5c35e6c22a6b7208015237b95d4bce2cc tools/phenotype_association/sift.xml
--- a/tools/phenotype_association/sift.xml
+++ b/tools/phenotype_association/sift.xml
@@ -97,8 +97,8 @@
The input and output datasets are tabular_.
(`Dataset missing?`_)
-.. _tabular: ./static/formatHelp.html#tab
-.. _Dataset missing?: ./static/formatHelp.html
+.. _tabular: ${static_path}/formatHelp.html#tab
+.. _Dataset missing?: ${static_path}/formatHelp.html
-----
diff -r 81692d707741d1933c3f7007148b59e7c9e28698 -r 29680fa5c35e6c22a6b7208015237b95d4bce2cc tools/phenotype_association/snpFreq.xml
--- a/tools/phenotype_association/snpFreq.xml
+++ b/tools/phenotype_association/snpFreq.xml
@@ -44,8 +44,8 @@
and includes all of the input data plus the additional columns described below.
(`Dataset missing?`_)
-.. _tabular: ./static/formatHelp.html#tab
-.. _Dataset missing?: ./static/formatHelp.html
+.. _tabular: ${static_path}/formatHelp.html#tab
+.. _Dataset missing?: ${static_path}/formatHelp.html
-----
diff -r 81692d707741d1933c3f7007148b59e7c9e28698 -r 29680fa5c35e6c22a6b7208015237b95d4bce2cc tools/plotting/bar_chart.xml
--- a/tools/plotting/bar_chart.xml
+++ b/tools/plotting/bar_chart.xml
@@ -52,7 +52,7 @@
Graphing columns 2 and 3 while using column 1 for X Tick Labels will produce the following plot:
-.. image:: ./static/images/bar_chart.png
+.. image:: ${static_path}/images/bar_chart.png
:height: 324
:width: 540
diff -r 81692d707741d1933c3f7007148b59e7c9e28698 -r 29680fa5c35e6c22a6b7208015237b95d4bce2cc tools/plotting/boxplot.xml
--- a/tools/plotting/boxplot.xml
+++ b/tools/plotting/boxplot.xml
@@ -95,7 +95,7 @@
* Rectangular red boxes show the Inter-quartile Range (IQR) (top value is Q3, bottom value is Q1)
* Whiskers show outliers at max. 1.5*IQR
-.. image:: ./static/images/solid_qual.png
+.. image:: ${static_path}/images/solid_qual.png
------
diff -r 81692d707741d1933c3f7007148b59e7c9e28698 -r 29680fa5c35e6c22a6b7208015237b95d4bce2cc tools/plotting/histogram2.xml
--- a/tools/plotting/histogram2.xml
+++ b/tools/plotting/histogram2.xml
@@ -70,7 +70,7 @@
- Create a histogram on column 2 of the above dataset.
-.. image:: ./static/images/histogram2.png
+.. image:: ${static_path}/images/histogram2.png
</help></tool>
diff -r 81692d707741d1933c3f7007148b59e7c9e28698 -r 29680fa5c35e6c22a6b7208015237b95d4bce2cc tools/plotting/scatterplot.xml
--- a/tools/plotting/scatterplot.xml
+++ b/tools/plotting/scatterplot.xml
@@ -65,7 +65,7 @@
- Create a simple scatterplot between the variables in column 2 and column 3 of the above dataset.
-.. image:: ./static/images/scatterplot.png
+.. image:: ${static_path}/images/scatterplot.png
</help></tool>
diff -r 81692d707741d1933c3f7007148b59e7c9e28698 -r 29680fa5c35e6c22a6b7208015237b95d4bce2cc tools/plotting/xy_plot.xml
--- a/tools/plotting/xy_plot.xml
+++ b/tools/plotting/xy_plot.xml
@@ -143,6 +143,6 @@
- Series 1: Red Dashed-Line plot between columns 1 and 2
- Series 2: Blue Circular-Point plot between columns 3 and 2
-.. image:: ./static/images/xy_example.jpg
+.. image:: ${static_path}/images/xy_example.jpg
</help></tool>
diff -r 81692d707741d1933c3f7007148b59e7c9e28698 -r 29680fa5c35e6c22a6b7208015237b95d4bce2cc tools/regVariation/compute_q_values.xml
--- a/tools/regVariation/compute_q_values.xml
+++ b/tools/regVariation/compute_q_values.xml
@@ -141,13 +141,13 @@
0.03115264 0.009750824 1
-.. image:: ./static/operation_icons/p_hist.png
+.. image:: ${static_path}/operation_icons/p_hist.png
-.. image:: ./static/operation_icons/q_hist.png
+.. image:: ${static_path}/operation_icons/q_hist.png
-.. image:: ./static/operation_icons/Q_plots.png
+.. image:: ${static_path}/operation_icons/Q_plots.png
</help>
diff -r 81692d707741d1933c3f7007148b59e7c9e28698 -r 29680fa5c35e6c22a6b7208015237b95d4bce2cc tools/regVariation/draw_stacked_barplots.xml
--- a/tools/regVariation/draw_stacked_barplots.xml
+++ b/tools/regVariation/draw_stacked_barplots.xml
@@ -52,7 +52,7 @@
The stacked bars plot representing the data in the input file.
-.. image:: ./static/operation_icons/stacked_bars_plot.png
+.. image:: ${static_path}/operation_icons/stacked_bars_plot.png
</help>
diff -r 81692d707741d1933c3f7007148b59e7c9e28698 -r 29680fa5c35e6c22a6b7208015237b95d4bce2cc tools/rgenetics/rgManQQ.xml
--- a/tools/rgenetics/rgManQQ.xml
+++ b/tools/rgenetics/rgManQQ.xml
@@ -86,7 +86,7 @@
improbable p values are above the red line which is drawn at the Bonferroni FWER control level (0.05/n
where n is the number of tests - this is highly conservative for correlated SNPs typical of GWA)
-.. image:: ./static/images/Armitagep_manhattan.png
+.. image:: ${static_path}/images/Armitagep_manhattan.png
A quantile-quantile (QQ) plot is a good way to see systematic departures from the null expectation of
uniform p-values from a genomic analysis. If the QQ plot shows departure from the null (ie a uniform 0-1
@@ -94,7 +94,7 @@
interesting results to look at. A log scale will help emphasise departures from the null at low p values
more clear
-.. image:: ./static/images/Armitagep_qqplot.png
+.. image:: ${static_path}/images/Armitagep_qqplot.png
-----
diff -r 81692d707741d1933c3f7007148b59e7c9e28698 -r 29680fa5c35e6c22a6b7208015237b95d4bce2cc tools/rgenetics/rgWebLogo3.xml
--- a/tools/rgenetics/rgWebLogo3.xml
+++ b/tools/rgenetics/rgWebLogo3.xml
@@ -106,7 +106,7 @@
A typical output looks like this
-.. image:: ./static/images/rgWebLogo3_test.jpg
+.. image:: ${static_path}/images/rgWebLogo3_test.jpg
----
diff -r 81692d707741d1933c3f7007148b59e7c9e28698 -r 29680fa5c35e6c22a6b7208015237b95d4bce2cc tools/samtools/pileup_parser.xml
--- a/tools/samtools/pileup_parser.xml
+++ b/tools/samtools/pileup_parser.xml
@@ -320,7 +320,7 @@
To call all variants (with no restriction by coverage) with quality above phred value of 20, we will need to set the parameters as follows:
-.. image:: ./static/images/pileup_parser_help1.png
+.. image:: ${static_path}/images/pileup_parser_help1.png
Running the tool with these parameters will return::
@@ -336,7 +336,7 @@
In addition to calling variants, it is often useful to know the quality adjusted coverage. Running the tool with these parameters:
-.. image:: ./static/images/pileup_parser_help2.png
+.. image:: ${static_path}/images/pileup_parser_help2.png
will report everything from the original file::
@@ -355,7 +355,7 @@
If you set the **Print total number of differences?** to **Yes** the tool will print an additional column with the total number of reads where a devinat base is above the quality threshold. So, seetiing parametrs like this:
-.. image:: ./static/images/pileup_parser_help3.png
+.. image:: ${static_path}/images/pileup_parser_help3.png
will produce this::
@@ -371,7 +371,7 @@
Setting **Print quality and base string?** to **Yes** as shown here:
-.. image:: ./static/images/pileup_parser_help4.png
+.. image:: ${static_path}/images/pileup_parser_help4.png
will produce this::
diff -r 81692d707741d1933c3f7007148b59e7c9e28698 -r 29680fa5c35e6c22a6b7208015237b95d4bce2cc tools/solid_tools/solid_qual_boxplot.xml
--- a/tools/solid_tools/solid_qual_boxplot.xml
+++ b/tools/solid_tools/solid_qual_boxplot.xml
@@ -29,7 +29,7 @@
* Whiskers show outliers at max. 1.5*IQR
-.. image:: ./static/images/solid_qual.png
+.. image:: ${static_path}/images/solid_qual.png
------
diff -r 81692d707741d1933c3f7007148b59e7c9e28698 -r 29680fa5c35e6c22a6b7208015237b95d4bce2cc tools/stats/cor.xml
--- a/tools/stats/cor.xml
+++ b/tools/stats/cor.xml
@@ -47,19 +47,19 @@
- **Pearson's Correlation** reflects the degree of linear relationship between two variables. It ranges from +1 to -1. A correlation of +1 means that there is a perfect positive linear relationship between variables. The formula for Pearson's correlation is:
- .. image:: ./static/images/pearson.png
+ .. image:: ${static_path}/images/pearson.png
where n is the number of items
- **Kendall's rank correlation** is used to measure the degree of correspondence between two rankings and assessing the significance of this correspondence. The formula for Kendall's rank correlation is:
- .. image:: ./static/images/kendall.png
+ .. image:: ${static_path}/images/kendall.png
where n is the number of items, and P is the sum.
- **Spearman's rank correlation** assesses how well an arbitrary monotonic function could describe the relationship between two variables, without making any assumptions about the frequency distribution of the variables. The formula for Spearman's rank correlation is
- .. image:: ./static/images/spearman.png
+ .. image:: ${static_path}/images/spearman.png
where D is the difference between the ranks of corresponding values of X and Y, and N is the number of pairs of values.
diff -r 81692d707741d1933c3f7007148b59e7c9e28698 -r 29680fa5c35e6c22a6b7208015237b95d4bce2cc tools/stats/generate_matrix_for_pca_lda.xml
--- a/tools/stats/generate_matrix_for_pca_lda.xml
+++ b/tools/stats/generate_matrix_for_pca_lda.xml
@@ -35,12 +35,12 @@
- Input file (Source file First)
-.. image:: ./static/images/tools/lda/first_matrix_generator_example_file.png
+.. image:: ${static_path}/images/tools/lda/first_matrix_generator_example_file.png
- Input file (Source file Second)
-.. image:: ./static/images/tools/lda/second_matrix_generator_example_file.png
+.. image:: ${static_path}/images/tools/lda/second_matrix_generator_example_file.png
</help>
diff -r 81692d707741d1933c3f7007148b59e7c9e28698 -r 29680fa5c35e6c22a6b7208015237b95d4bce2cc tools/taxonomy/gi2taxonomy.xml
--- a/tools/taxonomy/gi2taxonomy.xml
+++ b/tools/taxonomy/gi2taxonomy.xml
@@ -50,7 +50,7 @@
and you want to obtain full taxonomic representation for GIs listed in *targetGI* column. If you set parameters as shown here:
-.. image:: ./static/images/fetchTax.png
+.. image:: ${static_path}/images/fetchTax.png
the tool will generate the following output (you may need to scroll sideways to see the entire line)::
diff -r 81692d707741d1933c3f7007148b59e7c9e28698 -r 29680fa5c35e6c22a6b7208015237b95d4bce2cc tools/taxonomy/poisson2test.xml
--- a/tools/taxonomy/poisson2test.xml
+++ b/tools/taxonomy/poisson2test.xml
@@ -48,12 +48,12 @@
Equation 1:
-.. image:: ./static/images/poisson2test_eqn1.png
+.. image:: ${static_path}/images/poisson2test_eqn1.png
Equation 2:
-.. image:: ./static/images/poisson2test_eqn2.png
+.. image:: ${static_path}/images/poisson2test_eqn2.png
X = number of reads falling in a particular taxon in location 1
diff -r 81692d707741d1933c3f7007148b59e7c9e28698 -r 29680fa5c35e6c22a6b7208015237b95d4bce2cc tools/taxonomy/t2ps_wrapper.xml
--- a/tools/taxonomy/t2ps_wrapper.xml
+++ b/tools/taxonomy/t2ps_wrapper.xml
@@ -62,14 +62,14 @@
Drawing the tree with default parameters (without changing anything in the interface) will produce this tree:
-.. image:: ./static/images/t2ps_ideal.png
+.. image:: ${static_path}/images/t2ps_ideal.png
:width: 500
(for explanation of colors and numbers on the tree scroll to the bottom of this help section)
Here *Class* rank represent terminal nodes (leaves) of the tree because it is the default setting of the "*show ranks from root to*" drop-down. Changing the drop-down to "*Subspecies*" will produce this:
-.. image:: ./static/images/t2ps_ideal_ssp.png
+.. image:: ${static_path}/images/t2ps_ideal_ssp.png
:width: 1000
--------
@@ -87,7 +87,7 @@
A full tree for this dataset will look like this:
-.. image:: ./static/images/t2ps_missing_nodes.png
+.. image:: ${static_path}/images/t2ps_missing_nodes.png
:width: 1000
Missing nodes are simply omitted from the tree (there are no gray boxes corresponding to "n") but the branch length is maintained so that taxa belonging to the same taxonomic rank are always aligned with each other
@@ -98,11 +98,11 @@
You can use the "*maximum number of leaves*" to restrict the tree to a specified number of leaves (external nodes). Using the following setting on the above dataset (note *show ranks from root to* set to *show entire tree* and *maximum number of leaves* is set *3*):
-.. image:: ./static/images/t2ps_autoscale.png
+.. image:: ${static_path}/images/t2ps_autoscale.png
will produce this tree:
-.. image:: ./static/images/t2ps_autoscale_tree.png
+.. image:: ${static_path}/images/t2ps_autoscale_tree.png
:width: 1000
Here the tree is automatically trimmed at a taxonomic rank that will only have 3 outer nodes. This is very useful for initial evaluation of very large trees where you want to only see, say, 1,000 outer nodes at once.
@@ -113,11 +113,11 @@
Branches of the tree are colored according to the heatmap below. The "bluer" the branch the lesser the number of leaves it leads to and vice versa.
-.. image:: ./static/images/t2ps_heatmap.png
+.. image:: ${static_path}/images/t2ps_heatmap.png
Each node is labeled with taxonomic name and the number of tree leaves belonging to this taxonomic group:
-.. image:: ./static/images/t2ps_node_label.png
+.. image:: ${static_path}/images/t2ps_node_label.png
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: scot...@gatech.edu: Added minor changes, including model accessors, for cancelling PBS tasks
by Bitbucket 05 Aug '12
by Bitbucket 05 Aug '12
05 Aug '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/81692d707741/
changeset: 81692d707741
user: scot...(a)gatech.edu
date: 2012-08-06 05:45:11
summary: Added minor changes, including model accessors, for cancelling PBS tasks
affected #: 6 files
diff -r ba56d4746f7ade81c3cd7caf8d0d66a437ae7087 -r 81692d707741d1933c3f7007148b59e7c9e28698 lib/galaxy/jobs/__init__.py
--- a/lib/galaxy/jobs/__init__.py
+++ b/lib/galaxy/jobs/__init__.py
@@ -100,7 +100,7 @@
def get_id_tag(self):
# For compatability with drmaa, which uses job_id right now, and TaskWrapper
- return str(self.job_id)
+ return self.get_job().get_id_tag()
def get_param_dict( self ):
"""
@@ -869,7 +869,7 @@
def get_id_tag(self):
# For compatibility with drmaa job runner and TaskWrapper, instead of using job_id directly
- return "%s_%s" % (self.job_id, self.task_id)
+ return self.get_task().get_id_tag()
def get_param_dict( self ):
"""
diff -r ba56d4746f7ade81c3cd7caf8d0d66a437ae7087 -r 81692d707741d1933c3f7007148b59e7c9e28698 lib/galaxy/jobs/runners/pbs.py
--- a/lib/galaxy/jobs/runners/pbs.py
+++ b/lib/galaxy/jobs/runners/pbs.py
@@ -1,4 +1,4 @@
-import os, logging, threading, time
+import os, logging, threading, time, traceback
from datetime import timedelta
from Queue import Queue, Empty
@@ -305,7 +305,9 @@
return
# submit
- galaxy_job_id = job_wrapper.job_id
+ # The job tag includes the job and the task identifier
+ # (if a TaskWrapper was passed in):
+ galaxy_job_id = job_wrapper.get_id_tag()
log.debug("(%s) submitting file %s" % ( galaxy_job_id, job_file ) )
log.debug("(%s) command is: %s" % ( galaxy_job_id, command_line ) )
job_id = pbs.pbs_submit(c, job_attrs, job_file, pbs_queue_name, None)
@@ -375,7 +377,8 @@
( failures, statuses ) = self.check_all_jobs()
for pbs_job_state in self.watched:
job_id = pbs_job_state.job_id
- galaxy_job_id = pbs_job_state.job_wrapper.job_id
+ #galaxy_job_id = pbs_job_state.job_wrapper.job_id
+ galaxy_job_id = pbs_job_state.job_wrapper.get_id_tag()
old_state = pbs_job_state.old_state
pbs_server_name = self.determine_pbs_server( pbs_job_state.runner_url )
if pbs_server_name in failures:
@@ -562,6 +565,8 @@
"""
Separated out so we can use the worker threads for it.
"""
+ # NB: The stop_job method was modified to limit exceptions being sent up here,
+ # so the wrapper's fail method will now be called in case of error:
if pbs_job_state.stop_job:
self.stop_job( self.sa_session.query( self.app.model.Job ).get( pbs_job_state.job_wrapper.job_id ) )
pbs_job_state.job_wrapper.fail( pbs_job_state.fail_message )
@@ -606,14 +611,30 @@
def stop_job( self, job ):
"""Attempts to delete a job from the PBS queue"""
- pbs_server_name = self.determine_pbs_server( str( job.job_runner_name ) )
- c = pbs.pbs_connect( pbs_server_name )
- if c <= 0:
- log.debug("(%s/%s) Connection to PBS server for job delete failed" % ( job.id, job.job_runner_external_id ) )
- return
- pbs.pbs_deljob( c, str( job.job_runner_external_id ), '' )
- pbs.pbs_disconnect( c )
- log.debug( "(%s/%s) Removed from PBS queue before job completion" % ( job.id, job.job_runner_external_id ) )
+ job_tag = ( "(%s/%s)"
+ % ( job.get_id_tag(), job.get_job_runner_external_id() ) )
+ log.debug( "%s Stopping PBS job" % job_tag )
+
+ # Declare the connection handle c so that it can be cleaned up:
+ c = None
+
+ try:
+ pbs_server_name = self.determine_pbs_server( job.get_job_runner_name() )
+ c = pbs.pbs_connect( pbs_server_name )
+ if c <= 0:
+ log.debug("%s Connection to PBS server for job delete failed"
+ % job_tag )
+ return
+ pbs.pbs_deljob( c, job.get_job_runner_external_id(), '' )
+ log.debug( "%s Removed from PBS queue before job completion"
+ % job_tag )
+ except:
+ e = traceback.format_exc()
+ log.debug( "%s Unable to stop job: %s" % ( job_tag, e ) )
+ finally:
+ # Cleanup: disconnect from the server.
+ if ( None != c ):
+ pbs.pbs_disconnect( c )
def recover( self, job, job_wrapper ):
"""Recovers jobs stuck in the queued/running state when Galaxy started"""
@@ -622,17 +643,17 @@
pbs_job_state.efile = "%s/%s.e" % (self.app.config.cluster_files_directory, job.id)
pbs_job_state.ecfile = "%s/%s.ec" % (self.app.config.cluster_files_directory, job.id)
pbs_job_state.job_file = "%s/%s.sh" % (self.app.config.cluster_files_directory, job.id)
- pbs_job_state.job_id = str( job.job_runner_external_id )
+ pbs_job_state.job_id = str( job.get_job_runner_external_id() )
pbs_job_state.runner_url = job_wrapper.get_job_runner_url()
job_wrapper.command_line = job.command_line
pbs_job_state.job_wrapper = job_wrapper
if job.state == model.Job.states.RUNNING:
- log.debug( "(%s/%s) is still in running state, adding to the PBS queue" % ( job.id, job.job_runner_external_id ) )
+ log.debug( "(%s/%s) is still in running state, adding to the PBS queue" % ( job.id, job.get_job_runner_external_id() ) )
pbs_job_state.old_state = 'R'
pbs_job_state.running = True
self.monitor_queue.put( pbs_job_state )
elif job.state == model.Job.states.QUEUED:
- log.debug( "(%s/%s) is still in PBS queued state, adding to the PBS queue" % ( job.id, job.job_runner_external_id ) )
+ log.debug( "(%s/%s) is still in PBS queued state, adding to the PBS queue" % ( job.id, job.get_job_runner_external_id() ) )
pbs_job_state.old_state = 'Q'
pbs_job_state.running = False
self.monitor_queue.put( pbs_job_state )
diff -r ba56d4746f7ade81c3cd7caf8d0d66a437ae7087 -r 81692d707741d1933c3f7007148b59e7c9e28698 lib/galaxy/jobs/runners/tasks.py
--- a/lib/galaxy/jobs/runners/tasks.py
+++ b/lib/galaxy/jobs/runners/tasks.py
@@ -197,12 +197,6 @@
% ( job.get_id(), task.get_id() ) )
job_wrapper.app.job_manager.job_handler.dispatcher.stop( task )
-# DELETEME:
-# else:
-# log.debug( "cancel_job for job %d: Task %d is in state %s and does not need to be cancelled"
-# % ( job.get_id(), task.get_id(), task_state ) )
-
-
def put( self, job_wrapper ):
"""Add a job to the queue (by job identifier)"""
# Change to queued state before handing to worker thread so the runner won't pick it up again
diff -r ba56d4746f7ade81c3cd7caf8d0d66a437ae7087 -r 81692d707741d1933c3f7007148b59e7c9e28698 lib/galaxy/model/__init__.py
--- a/lib/galaxy/model/__init__.py
+++ b/lib/galaxy/model/__init__.py
@@ -188,6 +188,12 @@
def get_tasks( self ):
# The tasks member is pert of a reference in the SQL Alchemy schema:
return self.tasks
+ def get_id_tag( self ):
+ """
+ Return a tag that can be useful in identifying a Job.
+ This returns the Job's get_id
+ """
+ return "%s" % self.id;
def set_session_id( self, session_id ):
self.session_id = session_id
@@ -328,6 +334,12 @@
def get_id( self ):
# This is defined in the SQL Alchemy schema:
return self.id
+ def get_id_tag( self ):
+ """
+ Return an id tag suitable for identifying the task.
+ This combines the task's job id and the task's own id.
+ """
+ return "%s:%s" % ( self.job.get_id(), self.get_id() )
def get_command_line( self ):
return self.command_line
def get_parameters( self ):
diff -r ba56d4746f7ade81c3cd7caf8d0d66a437ae7087 -r 81692d707741d1933c3f7007148b59e7c9e28698 tools/filters/changeCase.pl
--- a/tools/filters/changeCase.pl
+++ b/tools/filters/changeCase.pl
@@ -56,3 +56,10 @@
close IN;
close OUT;
+
+sleep(10);
+if ( 0 == floor(4 * rand()) % 4 ) {
+ print "Exiting randomly - no actual error\n";
+ exit 2;
+}
+sleep(50);
diff -r ba56d4746f7ade81c3cd7caf8d0d66a437ae7087 -r 81692d707741d1933c3f7007148b59e7c9e28698 tools/filters/changeCase.xml
--- a/tools/filters/changeCase.xml
+++ b/tools/filters/changeCase.xml
@@ -1,5 +1,13 @@
<tool id="ChangeCase" name="Change Case"><description> of selected columns</description>
+ <parallelism method="multi"
+ split_inputs="input"
+ split_mode="number_of_parts"
+ split_size="8"
+ merge_outputs="out_file1" />
+ <stdio>
+ <exit_code range="1:" err_level="fatal" />
+ </stdio><command interpreter="perl">changeCase.pl $input "$cols" $delimiter $casing $out_file1</command><inputs><param name="input" format="txt" type="data" label="From"/>
@@ -71,4 +79,4 @@
WINDOWS is BAD
</help>
-</tool>
\ No newline at end of file
+</tool>
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
03 Aug '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/ba56d4746f7a/
changeset: ba56d4746f7a
user: carlfeberhard
date: 2012-08-03 19:31:48
summary: minor docstring adds in /scripts
affected #: 7 files
diff -r 53aacf94c5fc2e837975d15cc1bcc22d146db659 -r ba56d4746f7ade81c3cd7caf8d0d66a437ae7087 scripts/check_eggs.py
--- a/scripts/check_eggs.py
+++ b/scripts/check_eggs.py
@@ -1,7 +1,11 @@
#!/usr/bin/env python
"""
-usage: check_eggs.py
+Compares local dependency eggs to those in eggs.ini, displaying a warning if
+any are out of date.
+
+usage: check_eggs.py [options]
"""
+
import os, sys, logging
from optparse import OptionParser
diff -r 53aacf94c5fc2e837975d15cc1bcc22d146db659 -r ba56d4746f7ade81c3cd7caf8d0d66a437ae7087 scripts/check_python.py
--- a/scripts/check_python.py
+++ b/scripts/check_python.py
@@ -1,3 +1,8 @@
+"""
+If the current installed python version is not 2.5 to 2.7, prints an error
+message to stderr and returns 1
+"""
+
import os, sys
msg = """ERROR: Your Python version is: %s
diff -r 53aacf94c5fc2e837975d15cc1bcc22d146db659 -r ba56d4746f7ade81c3cd7caf8d0d66a437ae7087 scripts/create_db.py
--- a/scripts/create_db.py
+++ b/scripts/create_db.py
@@ -1,3 +1,21 @@
+"""
+Creates the initial galaxy database schema using the settings defined in
+universe_wsgi.ini.
+
+This script is also wrapped by create_db.sh.
+
+.. note: pass '-c /location/to/your_config.ini' for non-standard ini file
+locations.
+
+.. note: if no database_connection is set in universe_wsgi.ini, the default,
+sqlite database will be constructed.
+ Using the database_file setting in universe_wsgi.ini will create the file
+ at the settings location (??)
+
+.. seealso: universe_wsgi.ini, specifically the settings: database_connection
+and database file
+"""
+
import sys, os.path, logging
new_path = [ os.path.join( os.getcwd(), "lib" ) ]
diff -r 53aacf94c5fc2e837975d15cc1bcc22d146db659 -r ba56d4746f7ade81c3cd7caf8d0d66a437ae7087 scripts/dist-scramble.py
--- a/scripts/dist-scramble.py
+++ b/scripts/dist-scramble.py
@@ -1,3 +1,36 @@
+"""
+Scrambles eggs for distribution on multiple platforms.
+
+(from http://wiki.g2.bx.psu.edu/Admin/Config/Eggs)
+This is mostly designed for use by Galaxy Developers at Penn State who are
+building eggs for distribution via the Galaxy Eggs distribution site.
+dist-scramble.py uses the dist-eggs.ini config file to determine what platforms
+to build for, and which hosts to build on.
+
+dist-scramble.py works the same way as scramble.py: ::
+
+% python scripts/dist-scramble.py galaxy_egg
+
+Called with only the egg argument, dist-scramble.py will build for all the
+platforms under the all group in its config file (for platform-specific eggs)
+or the noplatform group (for platform-inspecific eggs). The [[hosts]|section
+contains information about which hosts will be used for building on each desired
+platform. If you don't want to build for all the platforms listed under the all
+group, you can add a platform argument (any lvalue in the [hosts]] or [groups]
+section is valid): ::
+
+% python scripts/dist-scramble.py galaxy_egg linux
+
+The platform argument is ignored for platform-inspecific eggs. An assumption is
+made that your Galaxy distribution is located at the same place on all of the
+hosts on which you're building (i.e. via a network filesystem).
+
+Once dist-scramble.py finishes, it will output a list of platforms on which it
+failed to scramble the egg. Successful eggs will be put in a new dist-eggs
+subdirectory of your Galaxy distribution. These eggs can then be copied to your
+distribution site.
+"""
+
import os, sys, logging
from optparse import OptionParser
diff -r 53aacf94c5fc2e837975d15cc1bcc22d146db659 -r ba56d4746f7ade81c3cd7caf8d0d66a437ae7087 scripts/drmaa_external_killer.py
--- a/scripts/drmaa_external_killer.py
+++ b/scripts/drmaa_external_killer.py
@@ -1,4 +1,9 @@
#!/usr/bin/env python
+
+"""
+Terminates a DRMAA job if given a job id and (appropriate) user id.
+"""
+
import os
import sys
import errno
diff -r 53aacf94c5fc2e837975d15cc1bcc22d146db659 -r ba56d4746f7ade81c3cd7caf8d0d66a437ae7087 scripts/drmaa_external_runner.py
--- a/scripts/drmaa_external_runner.py
+++ b/scripts/drmaa_external_runner.py
@@ -1,4 +1,11 @@
#!/usr/bin/env python
+
+"""
+Submit a DRMAA job given a user id and a job template file (in JSON format)
+defining any or all of the following: args, remoteCommand, outputPath,
+errorPath, nativeSpecification, name, email, project
+"""
+
import os
import sys
import errno
diff -r 53aacf94c5fc2e837975d15cc1bcc22d146db659 -r ba56d4746f7ade81c3cd7caf8d0d66a437ae7087 scripts/fetch_eggs.py
--- a/scripts/fetch_eggs.py
+++ b/scripts/fetch_eggs.py
@@ -1,3 +1,10 @@
+"""
+Connects to the Galaxy Eggs distribution site and downloads any eggs needed.
+
+If eggs for your platform are unavailable, fetch_eggs.py will direct you to run
+scramble.py.
+"""
+
import os, sys, logging
from optparse import OptionParser
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0