galaxy-commits
Threads by month
- ----- 2026 -----
- April
- March
- February
- January
- ----- 2025 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2024 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2023 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2022 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2021 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2020 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2019 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2018 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2017 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2016 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2015 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2014 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2013 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2012 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2011 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2010 -----
- December
- November
- October
- September
- August
- July
- June
- May
- 15302 discussions
commit/galaxy-central: greg: Eliminate the use of util.Params in tool shed controller methods.
by commits-noreply@bitbucket.org 04 Jun '13
by commits-noreply@bitbucket.org 04 Jun '13
04 Jun '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/df488dbd5bdb/
Changeset: df488dbd5bdb
User: greg
Date: 2013-06-04 19:36:14
Summary: Eliminate the use of util.Params in tool shed controller methods.
Affected #: 3 files
diff -r f3cfd3936759aa70f3a3b772ecf469e1a059d7cb -r df488dbd5bdb22dc78549118834788992e5cabcb lib/galaxy/webapps/tool_shed/controllers/admin.py
--- a/lib/galaxy/webapps/tool_shed/controllers/admin.py
+++ b/lib/galaxy/webapps/tool_shed/controllers/admin.py
@@ -31,7 +31,7 @@
# with an "f-" prefix, simulating filtering by clicking a search link. We have
# to take this approach because the "-" character is illegal in HTTP requests.
if 'operation' in kwd:
- operation = kwd['operation'].lower()
+ operation = kwd[ 'operation' ].lower()
if operation == "view_or_manage_repository":
return trans.response.send_redirect( web.url_for( controller='repository',
action='browse_repositories',
@@ -119,12 +119,11 @@
@web.expose
@web.require_admin
def create_category( self, trans, **kwd ):
- params = util.Params( kwd )
- message = util.restore_text( params.get( 'message', '' ) )
- status = params.get( 'status', 'done' )
- name = util.restore_text( params.get( 'name', '' ) ).strip()
- description = util.restore_text( params.get( 'description', '' ) ).strip()
- if params.get( 'create_category_button', False ):
+ message = kwd.get( 'message', '' )
+ status = kwd.get( 'status', 'done' )
+ name = kwd.get( 'name', '' ).strip()
+ description = kwd.get( 'description', '' ).strip()
+ if kwd.get( 'create_category_button', False ):
if not name or not description:
message = 'Enter a valid name and a description'
status = 'error'
@@ -151,9 +150,8 @@
@web.expose
@web.require_admin
def delete_repository( self, trans, **kwd ):
- params = util.Params( kwd )
- message = util.restore_text( params.get( 'message', '' ) )
- status = params.get( 'status', 'done' )
+ message = kwd.get( 'message', '' )
+ status = kwd.get( 'status', 'done' )
id = kwd.get( 'id', None )
if id:
# Deleting multiple items is currently not allowed (allow_multiple=False), so there will only be 1 id.
@@ -188,9 +186,8 @@
@web.expose
@web.require_admin
def delete_repository_metadata( self, trans, **kwd ):
- params = util.Params( kwd )
- message = util.restore_text( params.get( 'message', '' ) )
- status = params.get( 'status', 'done' )
+ message = kwd.get( 'message', '' )
+ status = kwd.get( 'status', 'done' )
id = kwd.get( 'id', None )
if id:
ids = util.listify( id )
@@ -213,10 +210,9 @@
@web.expose
@web.require_admin
def edit_category( self, trans, **kwd ):
- params = util.Params( kwd )
- message = util.restore_text( params.get( 'message', '' ) )
- status = params.get( 'status', 'done' )
- id = params.get( 'id', None )
+ message = kwd.get( 'message', '' )
+ status = kwd.get( 'status', 'done' )
+ id = kwd.get( 'id', None )
if not id:
message = "No category ids received for editing"
trans.response.send_redirect( web.url_for( controller='admin',
@@ -224,9 +220,9 @@
message=message,
status='error' ) )
category = suc.get_category( trans, id )
- if params.get( 'edit_category_button', False ):
- new_name = util.restore_text( params.get( 'name', '' ) ).strip()
- new_description = util.restore_text( params.get( 'description', '' ) ).strip()
+ if kwd.get( 'edit_category_button', False ):
+ new_name = kwd.get( 'name', '' ).strip()
+ new_description = kwd.get( 'description', '' ).strip()
if category.name != new_name or category.description != new_description:
if not new_name:
message = 'Enter a valid name'
@@ -288,9 +284,8 @@
@web.expose
@web.require_admin
def regenerate_statistics( self, trans, **kwd ):
- params = util.Params( kwd )
- message = util.restore_text( params.get( 'message', '' ) )
- status = params.get( 'status', 'done' )
+ message = kwd.get( 'message', '' )
+ status = kwd.get( 'status', 'done' )
if 'regenerate_statistics_button' in kwd:
trans.app.shed_counter.generate_statistics()
message = "Successfully regenerated statistics"
@@ -315,9 +310,8 @@
@web.expose
@web.require_admin
def undelete_repository( self, trans, **kwd ):
- params = util.Params( kwd )
- message = util.restore_text( params.get( 'message', '' ) )
- status = params.get( 'status', 'done' )
+ message = kwd.get( 'message', '' )
+ status = kwd.get( 'status', 'done' )
id = kwd.get( 'id', None )
if id:
# Undeleting multiple items is currently not allowed (allow_multiple=False), so there will only be 1 id.
@@ -358,9 +352,8 @@
# TODO: We should probably eliminate the Category.deleted column since it really makes no
# sense to mark a category as deleted (category names and descriptions can be changed instead).
# If we do this, and the following 2 methods can be eliminated.
- params = util.Params( kwd )
- message = util.restore_text( params.get( 'message', '' ) )
- status = params.get( 'status', 'done' )
+ message = kwd.get( 'message', '' )
+ status = kwd.get( 'status', 'done' )
id = kwd.get( 'id', None )
if id:
ids = util.listify( id )
@@ -385,9 +378,8 @@
# This method should only be called for a Category that has previously been deleted.
# Purging a deleted Category deletes all of the following from the database:
# - RepoitoryCategoryAssociations where category_id == Category.id
- params = util.Params( kwd )
- message = util.restore_text( params.get( 'message', '' ) )
- status = params.get( 'status', 'done' )
+ message = kwd.get( 'message', '' )
+ status = kwd.get( 'status', 'done' )
id = kwd.get( 'id', None )
if id:
ids = util.listify( id )
@@ -414,9 +406,8 @@
@web.expose
@web.require_admin
def undelete_category( self, trans, **kwd ):
- params = util.Params( kwd )
- message = util.restore_text( params.get( 'message', '' ) )
- status = params.get( 'status', 'done' )
+ message = kwd.get( 'message', '' )
+ status = kwd.get( 'status', 'done' )
id = kwd.get( 'id', None )
if id:
ids = util.listify( id )
diff -r f3cfd3936759aa70f3a3b772ecf469e1a059d7cb -r df488dbd5bdb22dc78549118834788992e5cabcb lib/galaxy/webapps/tool_shed/controllers/repository.py
--- a/lib/galaxy/webapps/tool_shed/controllers/repository.py
+++ b/lib/galaxy/webapps/tool_shed/controllers/repository.py
@@ -657,10 +657,9 @@
@web.expose
def browse_repository( self, trans, id, **kwd ):
- params = util.Params( kwd )
- message = util.restore_text( params.get( 'message', '' ) )
- status = params.get( 'status', 'done' )
- commit_message = util.restore_text( params.get( 'commit_message', 'Deleted selected files' ) )
+ message = kwd.get( 'message', '' )
+ status = kwd.get( 'status', 'done' )
+ commit_message = kwd.get( 'commit_message', 'Deleted selected files' )
repository = suc.get_repository_in_tool_shed( trans, id )
repo = hg.repository( suc.get_configured_ui(), repository.repo_path( trans.app ) )
# Update repository files for browsing.
@@ -833,14 +832,13 @@
@web.expose
def check_for_updates( self, trans, **kwd ):
"""Handle a request from a local Galaxy instance."""
- params = util.Params( kwd )
- message = util.restore_text( params.get( 'message', '' ) )
- status = params.get( 'status', 'done' )
+ message = kwd.get( 'message', '' )
+ status = kwd.get( 'status', 'done' )
# If the request originated with the UpdateManager, it will not include a galaxy_url.
galaxy_url = suc.handle_galaxy_url( trans, **kwd )
- name = params.get( 'name', None )
- owner = params.get( 'owner', None )
- changeset_revision = params.get( 'changeset_revision', None )
+ name = kwd.get( 'name', None )
+ owner = kwd.get( 'owner', None )
+ changeset_revision = kwd.get( 'changeset_revision', None )
repository = suc.get_repository_by_name_and_owner( trans.app, name, owner )
repo_dir = repository.repo_path( trans.app )
repo = hg.repository( suc.get_configured_ui(), repo_dir )
@@ -913,9 +911,8 @@
@web.expose
def contact_owner( self, trans, id, **kwd ):
- params = util.Params( kwd )
- message = util.restore_text( params.get( 'message', '' ) )
- status = params.get( 'status', 'done' )
+ message = kwd.get( 'message', '' )
+ status = kwd.get( 'status', 'done' )
repository = suc.get_repository_in_tool_shed( trans, id )
metadata = self.get_metadata( trans, id, repository.tip( trans.app ) )
if trans.user and trans.user.email:
@@ -948,9 +945,8 @@
@web.expose
def create_repository( self, trans, **kwd ):
- params = util.Params( kwd )
- message = util.restore_text( params.get( 'message', '' ) )
- status = params.get( 'status', 'done' )
+ message = kwd.get( 'message', '' )
+ status = kwd.get( 'status', 'done' )
categories = suc.get_categories( trans )
if not categories:
message = 'No categories have been configured in this instance of the Galaxy Tool Shed. ' + \
@@ -960,12 +956,12 @@
action='browse_repositories',
message=message,
status=status ) )
- name = util.restore_text( params.get( 'name', '' ) )
- description = util.restore_text( params.get( 'description', '' ) )
- long_description = util.restore_text( params.get( 'long_description', '' ) )
- category_ids = util.listify( params.get( 'category_id', '' ) )
+ name = kwd.get( 'name', '' )
+ description = kwd.get( 'description', '' )
+ long_description = kwd.get( 'long_description', '' )
+ category_ids = util.listify( kwd.get( 'category_id', '' ) )
selected_categories = [ trans.security.decode_id( id ) for id in category_ids ]
- if params.get( 'create_repository_button', False ):
+ if kwd.get( 'create_repository_button', False ):
error = False
message = self.__validate_repository_name( name, trans.user )
if message:
@@ -1031,12 +1027,11 @@
"""Mark a repository in the tool shed as deprecated or not deprecated."""
# Marking a repository in the tool shed as deprecated has no effect on any downloadable changeset revisions that may be associated with the
# repository. Revisions are not marked as not downlaodable because those that have installed the repository must be allowed to get updates.
- params = util.Params( kwd )
- message = util.restore_text( params.get( 'message', '' ) )
- status = params.get( 'status', 'done' )
- repository_id = params.get( 'id', None )
+ message = kwd.get( 'message', '' )
+ status = kwd.get( 'status', 'done' )
+ repository_id = kwd.get( 'id', None )
repository = suc.get_repository_in_tool_shed( trans, repository_id )
- mark_deprecated = util.string_as_bool( params.get( 'mark_deprecated', False ) )
+ mark_deprecated = util.string_as_bool( kwd.get( 'mark_deprecated', False ) )
repository.deprecated = mark_deprecated
trans.sa_session.add( repository )
trans.sa_session.flush()
@@ -1052,9 +1047,8 @@
@web.expose
def display_tool( self, trans, repository_id, tool_config, changeset_revision, **kwd ):
- params = util.Params( kwd )
- message = util.restore_text( params.get( 'message', '' ) )
- status = params.get( 'status', 'done' )
+ message = kwd.get( 'message', '' )
+ status = kwd.get( 'status', 'done' )
repository, tool, message = tool_util.load_tool_from_changeset_revision( trans, repository_id, changeset_revision, tool_config )
if message:
status = 'error'
@@ -1105,7 +1099,6 @@
@web.expose
def download( self, trans, repository_id, changeset_revision, file_type, **kwd ):
# Download an archive of the repository files compressed as zip, gz or bz2.
- params = util.Params( kwd )
repository = suc.get_repository_in_tool_shed( trans, repository_id )
# Allow hgweb to handle the download. This requires the tool shed
# server account's .hgrc file to include the following setting:
@@ -1125,9 +1118,8 @@
@web.expose
def find_tools( self, trans, **kwd ):
- params = util.Params( kwd )
- message = util.restore_text( params.get( 'message', '' ) )
- status = params.get( 'status', 'done' )
+ message = kwd.get( 'message', '' )
+ status = kwd.get( 'status', 'done' )
galaxy_url = suc.handle_galaxy_url( trans, **kwd )
if 'operation' in kwd:
item_id = kwd.get( 'id', '' )
@@ -1169,7 +1161,7 @@
tool_ids = [ item.lower() for item in util.listify( kwd.get( 'tool_id', '' ) ) ]
tool_names = [ item.lower() for item in util.listify( kwd.get( 'tool_name', '' ) ) ]
tool_versions = [ item.lower() for item in util.listify( kwd.get( 'tool_version', '' ) ) ]
- exact_matches = params.get( 'exact_matches', '' )
+ exact_matches = kwd.get( 'exact_matches', '' )
exact_matches_checked = CheckboxField.is_checked( exact_matches )
match_tuples = []
ok = True
@@ -1210,9 +1202,8 @@
@web.expose
def find_workflows( self, trans, **kwd ):
- params = util.Params( kwd )
- message = util.restore_text( params.get( 'message', '' ) )
- status = params.get( 'status', 'done' )
+ message = kwd.get( 'message', '' )
+ status = kwd.get( 'status', 'done' )
galaxy_url = suc.handle_galaxy_url( trans, **kwd )
if 'operation' in kwd:
item_id = kwd.get( 'id', '' )
@@ -1253,7 +1244,7 @@
return trans.show_error_message( "No items were selected." )
if 'find_workflows_button' in kwd:
workflow_names = [ item.lower() for item in util.listify( kwd.get( 'workflow_name', '' ) ) ]
- exact_matches = params.get( 'exact_matches', '' )
+ exact_matches = kwd.get( 'exact_matches', '' )
exact_matches_checked = CheckboxField.is_checked( exact_matches )
match_tuples = []
ok = True
@@ -1327,12 +1318,11 @@
if 'workflows' in metadata:
includes_workflows = True
return includes_data_managers, includes_datatypes, includes_tools, includes_tools_for_display_in_tool_panel, includes_tool_dependencies, has_repository_dependencies, includes_workflows
- params = util.Params( kwd )
- message = util.restore_text( params.get( 'message', '' ) )
- status = params.get( 'status', 'done' )
- name = params.get( 'name', None )
- owner = params.get( 'owner', None )
- changeset_revision = params.get( 'changeset_revision', None )
+ message = kwd.get( 'message', '' )
+ status = kwd.get( 'status', 'done' )
+ name = kwd.get( 'name', None )
+ owner = kwd.get( 'owner', None )
+ changeset_revision = kwd.get( 'changeset_revision', None )
repository = suc.get_repository_by_name_and_owner( trans.app, name, owner )
repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans,
trans.security.encode_id( repository.id ),
@@ -1532,10 +1522,9 @@
@web.json
def get_repository_dependencies( self, trans, **kwd ):
"""Return an encoded dictionary of all repositories upon which the contents of the received repository depends."""
- params = util.Params( kwd )
- name = params.get( 'name', None )
- owner = params.get( 'owner', None )
- changeset_revision = params.get( 'changeset_revision', None )
+ name = kwd.get( 'name', None )
+ owner = kwd.get( 'owner', None )
+ changeset_revision = kwd.get( 'changeset_revision', None )
repository = suc.get_repository_by_name_and_owner( trans.app, name, owner )
repository_id = trans.security.encode_id( repository.id )
repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans, repository_id, changeset_revision )
@@ -1629,10 +1618,9 @@
@web.expose
def get_tool_dependencies( self, trans, **kwd ):
"""Handle a request from a Galaxy instance to get the tool_dependencies entry from the metadata for a specified changeset revision."""
- params = util.Params( kwd )
- name = params.get( 'name', None )
- owner = params.get( 'owner', None )
- changeset_revision = params.get( 'changeset_revision', None )
+ name = kwd.get( 'name', None )
+ owner = kwd.get( 'owner', None )
+ changeset_revision = kwd.get( 'changeset_revision', None )
repository = suc.get_repository_by_name_and_owner( trans.app, name, owner )
for downloadable_revision in repository.downloadable_revisions:
if downloadable_revision.changeset_revision == changeset_revision:
@@ -1646,10 +1634,9 @@
@web.expose
def get_tool_dependencies_config_contents( self, trans, **kwd ):
"""Handle a request from a Galaxy instance to get the tool_dependencies.xml file contents for a specified changeset revision."""
- params = util.Params( kwd )
- name = params.get( 'name', None )
- owner = params.get( 'owner', None )
- changeset_revision = params.get( 'changeset_revision', None )
+ name = kwd.get( 'name', None )
+ owner = kwd.get( 'owner', None )
+ changeset_revision = kwd.get( 'changeset_revision', None )
repository = suc.get_repository_by_name_and_owner( trans.app, name, owner )
# TODO: We're currently returning the tool_dependencies.xml file that is available on disk. We need to enhance this process
# to retrieve older versions of the tool-dependencies.xml file from the repository manafest.
@@ -1786,9 +1773,8 @@
@web.expose
def help( self, trans, **kwd ):
- params = util.Params( kwd )
- message = util.restore_text( params.get( 'message', '' ) )
- status = params.get( 'status', 'done' )
+ message = kwd.get( 'message', '' )
+ status = kwd.get( 'status', 'done' )
return trans.fill_template( '/webapps/tool_shed/repository/help.mako', message=message, status=status, **kwd )
def __in_tool_dict( self, tool_dict, exact_matches_checked, tool_id=None, tool_name=None, tool_version=None ):
@@ -1840,9 +1826,8 @@
@web.expose
def index( self, trans, **kwd ):
- params = util.Params( kwd )
- message = util.restore_text( params.get( 'message', '' ) )
- status = params.get( 'status', 'done' )
+ message = kwd.get( 'message', '' )
+ status = kwd.get( 'status', 'done' )
# See if there are any RepositoryMetadata records since menu items require them.
repository_metadata = trans.sa_session.query( model.RepositoryMetadata ).first()
current_user = trans.user
@@ -1861,9 +1846,9 @@
break
# Route in may have been from a sharable URL, in whcih case we'll have a user_id and possibly a name
# The received user_id will be the id of the repository owner.
- user_id = params.get( 'user_id', None )
- repository_id = params.get( 'repository_id', None )
- changeset_revision = params.get( 'changeset_revision', None )
+ user_id = kwd.get( 'user_id', None )
+ repository_id = kwd.get( 'repository_id', None )
+ changeset_revision = kwd.get( 'changeset_revision', None )
return trans.fill_template( '/webapps/tool_shed/index.mako',
repository_metadata=repository_metadata,
has_reviewed_repositories=has_reviewed_repositories,
@@ -1904,9 +1889,8 @@
@web.expose
def load_invalid_tool( self, trans, repository_id, tool_config, changeset_revision, **kwd ):
- params = util.Params( kwd )
- message = util.restore_text( params.get( 'message', '' ) )
- status = params.get( 'status', 'error' )
+ message = kwd.get( 'message', '' )
+ status = kwd.get( 'status', 'error' )
repository, tool, error_message = tool_util.load_tool_from_changeset_revision( trans, repository_id, changeset_revision, tool_config )
tool_state = self.__new_state( trans )
invalid_file_tups = []
@@ -1958,13 +1942,12 @@
@web.expose
@web.require_login( "manage email alerts" )
def manage_email_alerts( self, trans, **kwd ):
- params = util.Params( kwd )
- message = util.restore_text( params.get( 'message', '' ) )
- status = params.get( 'status', 'done' )
- new_repo_alert = params.get( 'new_repo_alert', '' )
+ message = kwd.get( 'message', '' )
+ status = kwd.get( 'status', 'done' )
+ new_repo_alert = kwd.get( 'new_repo_alert', '' )
new_repo_alert_checked = CheckboxField.is_checked( new_repo_alert )
user = trans.user
- if params.get( 'new_repo_alert_button', False ):
+ if kwd.get( 'new_repo_alert_button', False ):
user.new_repo_alert = new_repo_alert_checked
trans.sa_session.add( user )
trans.sa_session.flush()
@@ -1990,15 +1973,15 @@
@web.expose
@web.require_login( "manage repository" )
def manage_repository( self, trans, id, **kwd ):
- message = util.restore_text( kwd.get( 'message', '' ) )
+ message = kwd.get( 'message', '' )
status = kwd.get( 'status', 'done' )
repository = suc.get_repository_in_tool_shed( trans, id )
repo_dir = repository.repo_path( trans.app )
repo = hg.repository( suc.get_configured_ui(), repo_dir )
- repo_name = util.restore_text( kwd.get( 'repo_name', repository.name ) )
- changeset_revision = util.restore_text( kwd.get( 'changeset_revision', repository.tip( trans.app ) ) )
- description = util.restore_text( kwd.get( 'description', repository.description ) )
- long_description = util.restore_text( kwd.get( 'long_description', repository.long_description ) )
+ repo_name = kwd.get( 'repo_name', repository.name )
+ changeset_revision = kwd.get( 'changeset_revision', repository.tip( trans.app ) )
+ description = kwd.get( 'description', repository.description )
+ long_description = kwd.get( 'long_description', repository.long_description )
avg_rating, num_ratings = self.get_ave_item_rating_data( trans.sa_session, repository, webapp_model=trans.model )
display_reviews = util.string_as_bool( kwd.get( 'display_reviews', False ) )
alerts = kwd.get( 'alerts', '' )
@@ -2223,9 +2206,8 @@
@web.expose
@web.require_login( "multi select email alerts" )
def multi_select_email_alerts( self, trans, **kwd ):
- params = util.Params( kwd )
- message = util.restore_text( params.get( 'message', '' ) )
- status = params.get( 'status', 'done' )
+ message = kwd.get( 'message', '' )
+ status = kwd.get( 'status', 'done' )
if 'operation' in kwd:
operation = kwd[ 'operation' ].lower()
if operation == "receive email alerts":
@@ -2263,13 +2245,12 @@
@web.expose
def preview_tools_in_changeset( self, trans, repository_id, **kwd ):
- params = util.Params( kwd )
- message = util.restore_text( params.get( 'message', '' ) )
- status = params.get( 'status', 'done' )
+ message = kwd.get( 'message', '' )
+ status = kwd.get( 'status', 'done' )
repository = suc.get_repository_in_tool_shed( trans, repository_id )
repo_dir = repository.repo_path( trans.app )
repo = hg.repository( suc.get_configured_ui(), repo_dir )
- changeset_revision = util.restore_text( params.get( 'changeset_revision', repository.tip( trans.app ) ) )
+ changeset_revision = kwd.get( 'changeset_revision', repository.tip( trans.app ) )
repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans, repository_id, changeset_revision )
if repository_metadata:
repository_metadata_id = trans.security.encode_id( repository_metadata.id ),
@@ -2316,12 +2297,11 @@
older changeset_revsion, but later the repository was updated in the tool shed and the Galaxy admin is trying to install the latest
changeset revision of the same repository instead of updating the one that was previously installed.
"""
- params = util.Params( kwd )
- message = util.restore_text( params.get( 'message', '' ) )
- status = params.get( 'status', 'done' )
- name = params.get( 'name', None )
- owner = params.get( 'owner', None )
- changeset_revision = params.get( 'changeset_revision', None )
+ message = kwd.get( 'message', '' )
+ status = kwd.get( 'status', 'done' )
+ name = kwd.get( 'name', None )
+ owner = kwd.get( 'owner', None )
+ changeset_revision = kwd.get( 'changeset_revision', None )
repository = suc.get_repository_by_name_and_owner( trans.app, name, owner )
repo_dir = repository.repo_path( trans.app )
repo = hg.repository( suc.get_configured_ui(), repo_dir )
@@ -2340,10 +2320,9 @@
@web.require_login( "rate repositories" )
def rate_repository( self, trans, **kwd ):
""" Rate a repository and return updated rating data. """
- params = util.Params( kwd )
- message = util.restore_text( params.get( 'message', '' ) )
- status = params.get( 'status', 'done' )
- id = params.get( 'id', None )
+ message = kwd.get( 'message', '' )
+ status = kwd.get( 'status', 'done' )
+ id = kwd.get( 'id', None )
if not id:
return trans.response.send_redirect( web.url_for( controller='repository',
action='browse_repositories',
@@ -2356,12 +2335,12 @@
action='browse_repositories',
message="You are not allowed to rate your own repository",
status='error' ) )
- if params.get( 'rate_button', False ):
- rating = int( params.get( 'rating', '0' ) )
- comment = util.restore_text( params.get( 'comment', '' ) )
+ if kwd.get( 'rate_button', False ):
+ rating = int( kwd.get( 'rating', '0' ) )
+ comment = kwd.get( 'comment', '' )
rating = self.rate_item( trans, trans.user, repository, rating, comment )
avg_rating, num_ratings = self.get_ave_item_rating_data( trans.sa_session, repository, webapp_model=trans.model )
- display_reviews = util.string_as_bool( params.get( 'display_reviews', False ) )
+ display_reviews = util.string_as_bool( kwd.get( 'display_reviews', False ) )
rra = self.get_user_item_rating( trans.sa_session, trans.user, repository, webapp_model=trans.model )
metadata = self.get_metadata( trans, id, repository.tip( trans.app ) )
return trans.fill_template( '/webapps/tool_shed/repository/rate_repository.mako',
@@ -2397,7 +2376,7 @@
if 'reset_metadata_on_selected_repositories_button' in kwd:
message, status = metadata_util.reset_metadata_on_selected_repositories( trans, **kwd )
else:
- message = util.restore_text( kwd.get( 'message', '' ) )
+ message = kwd.get( 'message', '' )
status = kwd.get( 'status', 'done' )
repositories_select_field = suc.build_repository_ids_select_field( trans, my_writable=True )
return trans.fill_template( '/webapps/tool_shed/common/reset_metadata_on_selected_repositories.mako',
@@ -2507,15 +2486,14 @@
@web.expose
def select_files_to_delete( self, trans, id, **kwd ):
- params = util.Params( kwd )
- message = util.restore_text( params.get( 'message', '' ) )
- status = params.get( 'status', 'done' )
- commit_message = util.restore_text( params.get( 'commit_message', 'Deleted selected files' ) )
+ message = kwd.get( 'message', '' )
+ status = kwd.get( 'status', 'done' )
+ commit_message = kwd.get( 'commit_message', 'Deleted selected files' )
repository = suc.get_repository_in_tool_shed( trans, id )
repo_dir = repository.repo_path( trans.app )
repo = hg.repository( suc.get_configured_ui(), repo_dir )
- selected_files_to_delete = util.restore_text( params.get( 'selected_files_to_delete', '' ) )
- if params.get( 'select_files_to_delete_button', False ):
+ selected_files_to_delete = kwd.get( 'selected_files_to_delete', '' )
+ if kwd.get( 'select_files_to_delete_button', False ):
if selected_files_to_delete:
selected_files_to_delete = selected_files_to_delete.split( ',' )
# Get the current repository tip.
@@ -2771,10 +2749,9 @@
revision for the required repository, making it impossible to discover the repository without knowledge of revisions to which it could have been
updated.
"""
- params = util.Params( kwd )
- name = params.get( 'name', None )
- owner = params.get( 'owner', None )
- changeset_revision = params.get( 'changeset_revision', None )
+ name = kwd.get( 'name', None )
+ owner = kwd.get( 'owner', None )
+ changeset_revision = kwd.get( 'changeset_revision', None )
repository = suc.get_repository_by_name_and_owner( trans.app, name, owner )
repo_dir = repository.repo_path( trans.app )
repo = hg.repository( suc.get_configured_ui(), repo_dir )
@@ -2811,9 +2788,8 @@
@web.expose
def view_changelog( self, trans, id, **kwd ):
- params = util.Params( kwd )
- message = util.restore_text( params.get( 'message', '' ) )
- status = params.get( 'status', 'done' )
+ message = kwd.get( 'message', '' )
+ status = kwd.get( 'status', 'done' )
repository = suc.get_repository_in_tool_shed( trans, id )
repo = hg.repository( suc.get_configured_ui(), repository.repo_path( trans.app ) )
changesets = []
@@ -2847,9 +2823,8 @@
@web.expose
def view_changeset( self, trans, id, ctx_str, **kwd ):
- params = util.Params( kwd )
- message = util.restore_text( params.get( 'message', '' ) )
- status = params.get( 'status', 'done' )
+ message = kwd.get( 'message', '' )
+ status = kwd.get( 'status', 'done' )
repository = suc.get_repository_in_tool_shed( trans, id )
repo = hg.repository( suc.get_configured_ui(), repository.repo_path( trans.app ) )
ctx = suc.get_changectx_for_changeset( repo, ctx_str )
@@ -2925,15 +2900,14 @@
@web.expose
def view_repository( self, trans, id, **kwd ):
- params = util.Params( kwd )
- message = util.restore_text( params.get( 'message', '' ) )
- status = params.get( 'status', 'done' )
+ message = kwd.get( 'message', '' )
+ status = kwd.get( 'status', 'done' )
repository = suc.get_repository_in_tool_shed( trans, id )
repo = hg.repository( suc.get_configured_ui(), repository.repo_path( trans.app ) )
avg_rating, num_ratings = self.get_ave_item_rating_data( trans.sa_session, repository, webapp_model=trans.model )
- changeset_revision = util.restore_text( params.get( 'changeset_revision', repository.tip( trans.app ) ) )
- display_reviews = util.string_as_bool( params.get( 'display_reviews', False ) )
- alerts = params.get( 'alerts', '' )
+ changeset_revision = kwd.get( 'changeset_revision', repository.tip( trans.app ) )
+ display_reviews = kwd.get( 'display_reviews', False )
+ alerts = kwd.get( 'alerts', '' )
alerts_checked = CheckboxField.is_checked( alerts )
if repository.email_alerts:
email_alerts = json.from_json_string( repository.email_alerts )
@@ -2941,7 +2915,7 @@
email_alerts = []
repository_dependencies = None
user = trans.user
- if user and params.get( 'receive_email_alerts_button', False ):
+ if user and kwd.get( 'receive_email_alerts_button', False ):
flush_needed = False
if alerts_checked:
if user.email not in email_alerts:
@@ -3009,9 +2983,8 @@
@web.expose
def view_tool_metadata( self, trans, repository_id, changeset_revision, tool_id, **kwd ):
- params = util.Params( kwd )
- message = util.restore_text( params.get( 'message', '' ) )
- status = params.get( 'status', 'done' )
+ message = kwd.get( 'message', '' )
+ status = kwd.get( 'status', 'done' )
repository = suc.get_repository_in_tool_shed( trans, repository_id )
repo_files_dir = repository.repo_path( trans.app )
repo = hg.repository( suc.get_configured_ui(), repo_files_dir )
@@ -3081,9 +3054,8 @@
@web.expose
def view_workflow( self, trans, workflow_name, repository_metadata_id, **kwd ):
"""Retrieve necessary information about a workflow from the database so that it can be displayed in an svg image."""
- params = util.Params( kwd )
- message = util.restore_text( params.get( 'message', '' ) )
- status = params.get( 'status', 'done' )
+ message = kwd.get( 'message', '' )
+ status = kwd.get( 'status', 'done' )
if workflow_name:
workflow_name = encoding_util.tool_shed_decode( workflow_name )
repository_metadata = metadata_util.get_repository_metadata_by_id( trans, repository_metadata_id )
diff -r f3cfd3936759aa70f3a3b772ecf469e1a059d7cb -r df488dbd5bdb22dc78549118834788992e5cabcb lib/galaxy/webapps/tool_shed/controllers/repository_review.py
--- a/lib/galaxy/webapps/tool_shed/controllers/repository_review.py
+++ b/lib/galaxy/webapps/tool_shed/controllers/repository_review.py
@@ -34,9 +34,8 @@
@web.require_login( "approve repository review" )
def approve_repository_review( self, trans, **kwd ):
# The value of the received id is the encoded review id.
- params = util.Params( kwd )
- message = util.restore_text( params.get( 'message', '' ) )
- status = params.get( 'status', 'done' )
+ message = kwd.get( 'message', '' )
+ status = kwd.get( 'status', 'done' )
encoded_review_id = kwd[ 'id' ]
review = review_util.get_review( trans, encoded_review_id )
if kwd.get( 'approve_repository_review_button', False ):
@@ -69,9 +68,8 @@
@web.expose
@web.require_login( "browse review" )
def browse_review( self, trans, **kwd ):
- params = util.Params( kwd )
- message = util.restore_text( params.get( 'message', '' ) )
- status = params.get( 'status', 'done' )
+ message = kwd.get( 'message', '' )
+ status = kwd.get( 'status', 'done' )
review = review_util.get_review( trans, kwd[ 'id' ] )
repository = review.repository
repo = hg.repository( suc.get_configured_ui(), repository.repo_path( trans.app ) )
@@ -101,12 +99,11 @@
@web.expose
@web.require_login( "create component" )
def create_component( self, trans, **kwd ):
- params = util.Params( kwd )
- message = util.restore_text( params.get( 'message', '' ) )
- status = params.get( 'status', 'done' )
- name = util.restore_text( params.get( 'name', '' ) )
- description = util.restore_text( params.get( 'description', '' ) )
- if params.get( 'create_component_button', False ):
+ message = kwd.get( 'message', '' )
+ status = kwd.get( 'status', 'done' )
+ name = kwd.get( 'name', '' )
+ description = kwd.get( 'description', '' )
+ if kwd.get( 'create_component_button', False ):
if not name or not description:
message = 'Enter a valid name and a description'
status = 'error'
@@ -133,9 +130,8 @@
@web.require_login( "create review" )
def create_review( self, trans, **kwd ):
# The value of the received id is the encoded repository id.
- params = util.Params( kwd )
- message = util.restore_text( params.get( 'message', '' ) )
- status = params.get( 'status', 'done' )
+ message = kwd.get( 'message', '' )
+ status = kwd.get( 'status', 'done' )
repository_id = kwd.get( 'id', None )
changeset_revision = kwd.get( 'changeset_revision', None )
previous_review_id = kwd.get( 'previous_review_id', None )
@@ -197,10 +193,9 @@
@web.expose
@web.require_login( "edit component" )
def edit_component( self, trans, **kwd ):
- params = util.Params( kwd )
- message = util.restore_text( params.get( 'message', '' ) )
- status = params.get( 'status', 'done' )
- id = params.get( 'id', None )
+ message = kwd.get( 'message', '' )
+ status = kwd.get( 'status', 'done' )
+ id = kwd.get( 'id', None )
if not id:
message = "No component ids received for editing"
trans.response.send_redirect( web.url_for( controller='admin',
@@ -208,8 +203,8 @@
message=message,
status='error' ) )
component = review_util.get_component( trans, id )
- if params.get( 'edit_component_button', False ):
- new_description = util.restore_text( params.get( 'description', '' ) ).strip()
+ if kwd.get( 'edit_component_button', False ):
+ new_description = kwd.get( 'description', '' ).strip()
if component.description != new_description:
component.description = new_description
trans.sa_session.add( component )
@@ -229,9 +224,8 @@
@web.require_login( "edit review" )
def edit_review( self, trans, **kwd ):
# The value of the received id is the encoded review id.
- params = util.Params( kwd )
- message = util.restore_text( params.get( 'message', '' ) )
- status = params.get( 'status', 'done' )
+ message = kwd.get( 'message', '' )
+ status = kwd.get( 'status', 'done' )
review_id = kwd.get( 'id', None )
review = review_util.get_review( trans, review_id )
components_dict = odict()
@@ -403,9 +397,8 @@
@web.require_login( "manage repositories reviewed by me" )
def manage_repositories_reviewed_by_me( self, trans, **kwd ):
# The value of the received id is the encoded repository id.
- params = util.Params( kwd )
- message = util.restore_text( params.get( 'message', '' ) )
- status = params.get( 'status', 'done' )
+ message = kwd.get( 'message', '' )
+ status = kwd.get( 'status', 'done' )
if 'operation' in kwd:
kwd[ 'mine' ] = True
return trans.response.send_redirect( web.url_for( controller='repository_review',
@@ -471,9 +464,8 @@
@web.require_login( "manage repository reviews" )
def manage_repository_reviews( self, trans, mine=False, **kwd ):
# The value of the received id is the encoded repository id.
- params = util.Params( kwd )
- message = util.restore_text( params.get( 'message', '' ) )
- status = params.get( 'status', 'done' )
+ message = kwd.get( 'message', '' )
+ status = kwd.get( 'status', 'done' )
repository_id = kwd.get( 'id', None )
if repository_id:
repository = suc.get_repository_in_tool_shed( trans, repository_id )
@@ -519,9 +511,8 @@
@web.require_login( "manage repository reviews of revision" )
def manage_repository_reviews_of_revision( self, trans, **kwd ):
# The value of the received id is the encoded repository id.
- params = util.Params( kwd )
- message = util.restore_text( params.get( 'message', '' ) )
- status = params.get( 'status', 'done' )
+ message = kwd.get( 'message', '' )
+ status = kwd.get( 'status', 'done' )
repository_id = kwd.get( 'id', None )
changeset_revision = kwd.get( 'changeset_revision', None )
repository = suc.get_repository_in_tool_shed( trans, repository_id )
@@ -542,9 +533,8 @@
@web.expose
@web.require_login( "repository reviews by user" )
def repository_reviews_by_user( self, trans, **kwd ):
- params = util.Params( kwd )
- message = util.restore_text( params.get( 'message', '' ) )
- status = params.get( 'status', 'done' )
+ message = kwd.get( 'message', '' )
+ status = kwd.get( 'status', 'done' )
if 'operation' in kwd:
operation = kwd['operation'].lower()
@@ -569,9 +559,8 @@
@web.expose
@web.require_login( "reviewed repositories i own" )
def reviewed_repositories_i_own( self, trans, **kwd ):
- params = util.Params( kwd )
- message = util.restore_text( params.get( 'message', '' ) )
- status = params.get( 'status', 'done' )
+ message = kwd.get( 'message', '' )
+ status = kwd.get( 'status', 'done' )
# The value of the received id is the encoded repository id.
if 'operation' in kwd:
operation = kwd['operation'].lower()
@@ -589,9 +578,8 @@
@web.require_login( "select previous review" )
def select_previous_review( self, trans, **kwd ):
# The value of the received id is the encoded repository id.
- params = util.Params( kwd )
- message = util.restore_text( params.get( 'message', '' ) )
- status = params.get( 'status', 'done' )
+ message = kwd.get( 'message', '' )
+ status = kwd.get( 'status', 'done' )
repository = suc.get_repository_in_tool_shed( trans, kwd[ 'id' ] )
changeset_revision = kwd.get( 'changeset_revision', None )
repo = hg.repository( suc.get_configured_ui(), repository.repo_path( trans.app ) )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
2 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/2dd18c7198ad/
Changeset: 2dd18c7198ad
Branch: stable
User: Dave Bouvier
Date: 2013-06-04 18:02:55
Summary: Fix missing import.
Affected #: 1 file
diff -r 2a116647d7d75d5088c43fe3ac1d10bd9e66a08c -r 2dd18c7198ad7ff1c354dbefa0715b2344360487 lib/tool_shed/util/shed_util_common.py
--- a/lib/tool_shed/util/shed_util_common.py
+++ b/lib/tool_shed/util/shed_util_common.py
@@ -8,6 +8,7 @@
from time import strftime
from galaxy import util
from galaxy.util import json
+from galaxy.util import unicodify
from galaxy.web import url_for
from galaxy.web.form_builder import SelectField
from galaxy.datatypes import checkers
https://bitbucket.org/galaxy/galaxy-central/commits/f3cfd3936759/
Changeset: f3cfd3936759
User: Dave Bouvier
Date: 2013-06-04 18:02:55
Summary: Fix missing import.
Affected #: 1 file
diff -r ef3816ca5e0cc88616c34144af00b4fcce5bf0d0 -r f3cfd3936759aa70f3a3b772ecf469e1a059d7cb lib/tool_shed/util/shed_util_common.py
--- a/lib/tool_shed/util/shed_util_common.py
+++ b/lib/tool_shed/util/shed_util_common.py
@@ -8,6 +8,7 @@
from time import strftime
from galaxy import util
from galaxy.util import json
+from galaxy.util import unicodify
from galaxy.web import url_for
from galaxy.web.form_builder import SelectField
from galaxy.datatypes import checkers
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
2 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/ef3816ca5e0c/
Changeset: ef3816ca5e0c
User: Dave Bouvier
Date: 2013-06-04 17:05:17
Summary: Fix server error when displaying a tool dependency installation status when the tool dependency contains certain unicode characters in the displayed error message.
Affected #: 1 file
diff -r 6ae2d6a466b8f721aa39d3e183b36594d7b4e235 -r ef3816ca5e0cc88616c34144af00b4fcce5bf0d0 lib/tool_shed/util/shed_util_common.py
--- a/lib/tool_shed/util/shed_util_common.py
+++ b/lib/tool_shed/util/shed_util_common.py
@@ -1240,7 +1240,7 @@
if text:
if to_html:
try:
- escaped_text = text.decode( 'utf-8' )
+ escaped_text = unicodify( text )
escaped_text = escaped_text.encode( 'ascii', 'ignore' )
escaped_text = str( markupsafe.escape( escaped_text ) )
except UnicodeDecodeError, e:
https://bitbucket.org/galaxy/galaxy-central/commits/2a116647d7d7/
Changeset: 2a116647d7d7
Branch: stable
User: Dave Bouvier
Date: 2013-06-04 17:05:17
Summary: Fix server error when displaying a tool dependency installation status when the tool dependency contains certain unicode characters in the displayed error message.
Affected #: 1 file
diff -r cea3ddf6cddaac2f8703598307449ffc13240efc -r 2a116647d7d75d5088c43fe3ac1d10bd9e66a08c lib/tool_shed/util/shed_util_common.py
--- a/lib/tool_shed/util/shed_util_common.py
+++ b/lib/tool_shed/util/shed_util_common.py
@@ -1214,7 +1214,7 @@
if text:
if to_html:
try:
- escaped_text = text.decode( 'utf-8' )
+ escaped_text = unicodify( text )
escaped_text = escaped_text.encode( 'ascii', 'ignore' )
escaped_text = str( markupsafe.escape( escaped_text ) )
except UnicodeDecodeError, e:
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: dan: Refactor installation of sample files from the Tool Shed into Galaxy. Data Tables and location files are now namespaced when installed from a ToolShed. Data Managers will write only to the location files that were installed along with them. Modify behavior of 'from_shed_config' in Data Tables so that it only applies within the toolshed application.
by commits-noreply@bitbucket.org 03 Jun '13
by commits-noreply@bitbucket.org 03 Jun '13
03 Jun '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/6ae2d6a466b8/
Changeset: 6ae2d6a466b8
User: dan
Date: 2013-06-03 22:31:11
Summary: Refactor installation of sample files from the Tool Shed into Galaxy. Data Tables and location files are now namespaced when installed from a ToolShed. Data Managers will write only to the location files that were installed along with them. Modify behavior of 'from_shed_config' in Data Tables so that it only applies within the toolshed application.
TODO: directory structure inside of namespacing is still flat (matching previous behavior), we should allow keeping and utilizing directory hierarchy, especially when considering two same named files which exist under different directories in the repository.
Affected #: 9 files
diff -r 1c7ca04c86393c3f45df48514ac153cb64b4695e -r 6ae2d6a466b8f721aa39d3e183b36594d7b4e235 lib/galaxy/app.py
--- a/lib/galaxy/app.py
+++ b/lib/galaxy/app.py
@@ -92,7 +92,7 @@
# Load additional entries defined by self.config.shed_tool_data_table_config into tool data tables.
self.tool_data_tables.load_from_config_file( config_filename=self.config.shed_tool_data_table_config,
tool_data_path=self.tool_data_tables.tool_data_path,
- from_shed_config=True )
+ from_shed_config=False )
# Initialize the job management configuration
self.job_config = jobs.JobConfiguration(self)
# Initialize the tools, making sure the list of tool configs includes the reserved migrated_tools_conf.xml file.
diff -r 1c7ca04c86393c3f45df48514ac153cb64b4695e -r 6ae2d6a466b8f721aa39d3e183b36594d7b4e235 lib/galaxy/config.py
--- a/lib/galaxy/config.py
+++ b/lib/galaxy/config.py
@@ -65,6 +65,11 @@
else:
tcf = 'tool_conf.xml'
self.tool_configs = [ resolve_path( p, self.root ) for p in listify( tcf ) ]
+ self.shed_tool_data_path = kwargs.get( "shed_tool_data_path", None )
+ if self.shed_tool_data_path:
+ self.shed_tool_data_path = resolve_path( self.shed_tool_data_path, self.root )
+ else:
+ self.shed_tool_data_path = self.tool_data_path
self.tool_data_table_config_path = resolve_path( kwargs.get( 'tool_data_table_config_path', 'tool_data_table_conf.xml' ), self.root )
self.shed_tool_data_table_config = resolve_path( kwargs.get( 'shed_tool_data_table_config', 'shed_tool_data_table_conf.xml' ), self.root )
self.enable_tool_shed_check = string_as_bool( kwargs.get( 'enable_tool_shed_check', False ) )
diff -r 1c7ca04c86393c3f45df48514ac153cb64b4695e -r 6ae2d6a466b8f721aa39d3e183b36594d7b4e235 lib/galaxy/tools/data/__init__.py
--- a/lib/galaxy/tools/data/__init__.py
+++ b/lib/galaxy/tools/data/__init__.py
@@ -13,9 +13,11 @@
import tempfile
from galaxy import util
+from galaxy.util.odict import odict
log = logging.getLogger( __name__ )
+DEFAULT_TABLE_TYPE = 'tabular'
class ToolDataTableManager( object ):
"""Manages a collection of tool data tables"""
@@ -26,9 +28,6 @@
# at server startup. If tool shed repositories are installed that contain a valid file named tool_data_table_conf.xml.sample, entries
# from that file are inserted into this dict at the time of installation.
self.data_tables = {}
- # Store config elements for on-the-fly persistence to the defined shed_tool_data_table_config file name.
- self.shed_data_table_elems = []
- self.data_table_elem_names = []
if config_filename:
self.load_from_config_file( config_filename, self.tool_data_path, from_shed_config=False )
@@ -58,23 +57,15 @@
root = tree.getroot()
table_elems = []
for table_elem in root.findall( 'table' ):
- type = table_elem.get( 'type', 'tabular' )
- assert type in tool_data_table_types, "Unknown data table type '%s'" % type
+ table = ToolDataTable.from_elem( table_elem, tool_data_path, from_shed_config )
table_elems.append( table_elem )
- table_elem_name = table_elem.get( 'name', None )
- if table_elem_name and table_elem_name not in self.data_table_elem_names:
- self.data_table_elem_names.append( table_elem_name )
- if from_shed_config:
- self.shed_data_table_elems.append( table_elem )
- table = tool_data_table_types[ type ]( table_elem, tool_data_path, from_shed_config)
if table.name not in self.data_tables:
self.data_tables[ table.name ] = table
log.debug( "Loaded tool data table '%s'", table.name )
else:
- for table_row in table.data:
- # FIXME: This does not account for an entry with the same unique build ID, but a different path.
- if table_row not in self.data_tables[ table.name ].data:
- self.data_tables[ table.name ].data.append( table_row )
+ log.debug( "Loading another instance of data table '%s', attempting to merge content.", table.name )
+ self.data_tables[ table.name ].merge_tool_data_table( table, allow_duplicates=False ) #only merge content, do not persist to disk, do not allow duplicate rows when merging
+ # FIXME: This does not account for an entry with the same unique build ID, but a different path.
return table_elems
def add_new_entries_from_config_file( self, config_filename, tool_data_path, shed_tool_data_table_config, persist=False ):
@@ -100,84 +91,98 @@
"""
error_message = ''
- table_elems = []
try:
- tree = util.parse_xml( config_filename )
- root = tree.getroot()
+ table_elems = self.load_from_config_file( config_filename=config_filename,
+ tool_data_path=tool_data_path,
+ from_shed_config=True )
except Exception, e:
error_message = 'Error attempting to parse file %s: %s' % ( str( os.path.split( config_filename )[ 1 ] ), str( e ) )
log.debug( error_message )
- return table_elems, error_message
- # Make a copy of the current list of data_table_elem_names so we can persist later if changes to the config file are necessary.
- original_data_table_elem_names = [ name for name in self.data_table_elem_names ]
- if root.tag == 'tables':
- table_elems = self.load_from_config_file( config_filename=config_filename,
- tool_data_path=tool_data_path,
- from_shed_config=True )
- else:
- type = root.get( 'type', 'tabular' )
- assert type in tool_data_table_types, "Unknown data table type '%s'" % type
- table_elems.append( root )
- table_elem_name = root.get( 'name', None )
- if table_elem_name and table_elem_name not in self.data_table_elem_names:
- self.data_table_elem_names.append( table_elem_name )
- self.shed_data_table_elems.append( root )
- table = tool_data_table_types[ type ]( root, tool_data_path )
- if table.name not in self.data_tables:
- self.data_tables[ table.name ] = table
- log.debug( "Added new tool data table '%s'", table.name )
- if persist and self.data_table_elem_names != original_data_table_elem_names:
+ table_elems = []
+ if persist:
# Persist Galaxy's version of the changed tool_data_table_conf.xml file.
- self.to_xml_file( shed_tool_data_table_config )
+ self.to_xml_file( shed_tool_data_table_config, table_elems )
return table_elems, error_message
- def to_xml_file( self, shed_tool_data_table_config ):
- """Write the current in-memory version of the shed_tool_data_table_conf.xml file to disk."""
+ def to_xml_file( self, shed_tool_data_table_config, new_elems=None, remove_elems=None ):
+ """
+ Write the current in-memory version of the shed_tool_data_table_conf.xml file to disk.
+ remove_elems are removed before new_elems are added.
+ """
+ if not ( new_elems or remove_elems ):
+ log.debug( 'ToolDataTableManager.to_xml_file called without any elements to add or remove.' )
+ return #no changes provided, no need to persist any changes
+ if not new_elems:
+ new_elems = []
+ if not remove_elems:
+ remove_elems = []
full_path = os.path.abspath( shed_tool_data_table_config )
- fd, filename = tempfile.mkstemp()
- os.write( fd, '<?xml version="1.0"?>\n' )
- os.write( fd, '<tables>\n' )
- for elem in self.shed_data_table_elems:
- os.write( fd, '%s' % util.xml_to_string( elem ) )
- os.write( fd, '</tables>\n' )
- os.close( fd )
- shutil.move( filename, full_path )
+ #FIXME: we should lock changing this file by other threads / head nodes
+ try:
+ tree = util.parse_xml( full_path )
+ root = tree.getroot()
+ out_elems = [ elem for elem in root ]
+ except Exception, e:
+ out_elems = []
+ log.debug( 'Could not parse existing tool data table config, assume no existing elements: %s', e )
+ for elem in remove_elems:
+ #handle multiple occurrences of remove elem in existing elems
+ while elem in out_elems:
+ remove_elems.remove( elem )
+ #add new elems
+ out_elems.extend( new_elems )
+ with open( full_path, 'wb' ) as out:
+ out.write( '<?xml version="1.0"?>\n<tables>\n' )
+ for elem in out_elems:
+ out.write( util.xml_to_string( elem ) )
+ out.write( '</tables>\n' )
os.chmod( full_path, 0644 )
+class ToolDataTable( object ):
-class ToolDataTable( object ):
+ @classmethod
+ def from_elem( cls, table_elem, tool_data_path, from_shed_config ):
+ table_type = table_elem.get( 'type', 'tabular' )
+ assert table_type in tool_data_table_types, "Unknown data table type '%s'" % type
+ return tool_data_table_types[ table_type ]( table_elem, tool_data_path, from_shed_config=from_shed_config )
def __init__( self, config_element, tool_data_path, from_shed_config = False):
self.name = config_element.get( 'name' )
self.comment_char = config_element.get( 'comment_char' )
self.empty_field_value = config_element.get( 'empty_field_value', '' )
self.empty_field_values = {}
- for file_elem in config_element.findall( 'file' ):
- # There should only be one file_elem.
- if 'path' in file_elem.attrib:
- tool_data_file_path = file_elem.get( 'path' )
- self.tool_data_file = os.path.split( tool_data_file_path )[1]
- else:
- self.tool_data_file = None
+ self.filenames = odict()
self.tool_data_path = tool_data_path
self.missing_index_file = None
# increment this variable any time a new entry is added, or when the table is totally reloaded
# This value has no external meaning, and does not represent an abstract version of the underlying data
self._loaded_content_version = 1
-
+
+ def _update_version( self ):
+ self._loaded_content_version += 1
+ return self._loaded_content_version
+
def get_empty_field_by_name( self, name ):
return self.empty_field_values.get( name, self.empty_field_value )
- def _add_entry( self, entry, persist=False, persist_on_error=False, **kwd ):
+ def _add_entry( self, entry, allow_duplicates=True, persist=False, persist_on_error=False, entry_source=None, **kwd ):
raise NotImplementedError( "Abstract method" )
- def add_entry( self, entry, persist=False, persist_on_error=False, **kwd ):
- self._add_entry( entry, persist=persist, persist_on_error=persist_on_error, **kwd )
- self._loaded_content_version += 1
+ def add_entry( self, entry, allow_duplicates=True, persist=False, persist_on_error=False, entry_source=None, **kwd ):
+ self._add_entry( entry, allow_duplicates=allow_duplicates, persist=persist, persist_on_error=persist_on_error, entry_source=entry_source, **kwd )
+ return self._update_version()
+
+ def add_entries( self, entries, allow_duplicates=True, persist=False, persist_on_error=False, entry_source=None, **kwd ):
+ if entries:
+ for entry in entries:
+ self.add_entry( entry, allow_duplicates=allow_duplicates, persist=persist, persist_on_error=persist_on_error, entry_source=entry_source, **kwd )
return self._loaded_content_version
def is_current_version( self, other_version ):
return self._loaded_content_version == other_version
+
+ def merge_tool_data_table( self, other_table, allow_duplicates=True, persist=False, persist_on_error=False, entry_source=None, **kwd ):
+ raise NotImplementedError( "Abstract method" )
class TabularToolDataTable( ToolDataTable ):
"""
@@ -196,6 +201,7 @@
def __init__( self, config_element, tool_data_path, from_shed_config = False):
super( TabularToolDataTable, self ).__init__( config_element, tool_data_path, from_shed_config)
+ self.data = []
self.configure_and_load( config_element, tool_data_path, from_shed_config)
def configure_and_load( self, config_element, tool_data_path, from_shed_config = False):
@@ -206,24 +212,37 @@
self.comment_char = config_element.get( 'comment_char', '#' )
# Configure columns
self.parse_column_spec( config_element )
+
+ #store repo info if available:
+ repo_elem = config_element.find( 'tool_shed_repository' )
+ if repo_elem is not None:
+ repo_info = dict( tool_shed=repo_elem.find( 'tool_shed' ).text, name=repo_elem.find( 'repository_name' ).text,
+ owner=repo_elem.find( 'repository_owner' ).text, installed_changeset_revision=repo_elem.find( 'installed_changeset_revision' ).text )
+ else:
+ repo_info = None
# Read every file
- all_rows = []
for file_element in config_element.findall( 'file' ):
+ filename = file_path = file_element.get( 'path', None )
found = False
+ if file_path is None:
+ log.debug( "Encountered a file element (%s) that does not contain a path value when loading tool data table '%s'.", util.xml_to_string( file_element ), self.name )
+ continue
+
+ #FIXME: splitting on and merging paths from a configuration file when loading is wonky
+ # Data should exist on disk in the state needed, i.e. the xml configuration should
+ # point directly to the desired file to load. Munging of the tool_data_tables_conf.xml.sample
+ # can be done during installing / testing / metadata resetting with the creation of a proper
+ # tool_data_tables_conf.xml file, containing correct <file path=> attributes. Allowing a
+ # path.join with a different root should be allowed, but splitting should not be necessary.
if tool_data_path and from_shed_config:
# Must identify with from_shed_config as well, because the
# regular galaxy app has and uses tool_data_path.
# We're loading a tool in the tool shed, so we cannot use the Galaxy tool-data
# directory which is hard-coded into the tool_data_table_conf.xml entries.
- filepath = file_element.get( 'path' )
- filename = os.path.split( filepath )[ 1 ]
+ filename = os.path.split( file_path )[ 1 ]
filename = os.path.join( tool_data_path, filename )
- else:
- filename = file_element.get( 'path' )
if os.path.exists( filename ):
found = True
- all_rows.extend( self.parse_file_fields( open( filename ) ) )
- self.filename = filename
else:
# Since the path attribute can include a hard-coded path to a specific directory
# (e.g., <file path="tool-data/cg_crr_files.loc" />) which may not be the same value
@@ -233,14 +252,32 @@
if file_path and file_path != self.tool_data_path:
corrected_filename = os.path.join( self.tool_data_path, file_name )
if os.path.exists( corrected_filename ):
+ filename = corrected_filename
found = True
- all_rows.extend( self.parse_file_fields( open( corrected_filename ) ) )
- self.filename = corrected_filename
- if not found:
+
+ if found:
+ self.data.extend( self.parse_file_fields( open( filename ) ) )
+ self._update_version()
+ else:
self.missing_index_file = filename
log.warn( "Cannot find index file '%s' for tool data table '%s'" % ( filename, self.name ) )
- self.data = all_rows
-
+
+ if filename not in self.filenames or not self.filenames[ filename ][ 'found' ]:
+ self.filenames[ filename ] = dict( found=found, filename=filename, from_shed_config=from_shed_config, tool_data_path=tool_data_path,
+ config_element=config_element, tool_shed_repository=repo_info )
+ else:
+ log.debug( "Filename '%s' already exists in filenames (%s), not adding", filename, self.filenames.keys() )
+
+
+ def merge_tool_data_table( self, other_table, allow_duplicates=True, persist=False, persist_on_error=False, entry_source=None, **kwd ):
+ assert self.columns == other_table.columns, "Merging tabular data tables with non matching columns is not allowed: %s:%s != %s:%s" % ( self.name, self.columns, other_table.name, other_table.columns )
+ #merge filename info
+ for filename, info in other_table.filenames.iteritems():
+ if filename not in self.filenames:
+ self.filenames[ filename ] = info
+ #add data entries and return current data table version
+ return self.add_entries( other_table.data, allow_duplicates=allow_duplicates, persist=persist, persist_on_error=persist_on_error, entry_source=entry_source, **kwd )
+
def handle_found_index_file( self, filename ):
self.missing_index_file = None
self.data.extend( self.parse_file_fields( open( filename ) ) )
@@ -341,7 +378,7 @@
break
return rval
- def _add_entry( self, entry, persist=False, persist_on_error=False, **kwd ):
+ def _add_entry( self, entry, allow_duplicates=True, persist=False, persist_on_error=False, entry_source=None, **kwd ):
#accepts dict or list of columns
if isinstance( entry, dict ):
fields = []
@@ -354,28 +391,53 @@
fields.append( field_value )
else:
fields = entry
+ is_error = False
if self.largest_index < len( fields ):
fields = self._replace_field_separators( fields )
- self.data.append( fields )
- field_len_error = False
+ if fields not in self.data or allow_duplicates:
+ self.data.append( fields )
+ else:
+ log.error( "Attempted to add fields (%s) to data table '%s', but this entry already exists and allow_duplicates is False.", fields, self.name )
+ is_error = True
else:
log.error( "Attempted to add fields (%s) to data table '%s', but there were not enough fields specified ( %i < %i ).", fields, self.name, len( fields ), self.largest_index + 1 )
- field_len_error = True
- if persist and ( not field_len_error or persist_on_error ):
- #FIXME: Need to lock these files for editing
- try:
- data_table_fh = open( self.filename, 'r+b' )
- except IOError, e:
- log.warning( 'Error opening data table file (%s) with r+b, assuming file does not exist and will open as wb: %s', self.filename, e )
- data_table_fh = open( self.filename, 'wb' )
- if os.stat( self.filename )[6] != 0:
- # ensure last existing line ends with new line
- data_table_fh.seek( -1, 2 ) #last char in file
- last_char = data_table_fh.read( 1 )
- if last_char not in [ '\n', '\r' ]:
- data_table_fh.write( '\n' )
- data_table_fh.write( "%s\n" % ( self.separator.join( fields ) ) )
- return not field_len_error
+ is_error = True
+ filename = None
+
+ if persist and ( not is_error or persist_on_error ):
+ if entry_source:
+ #if dict, assume is compatible info dict, otherwise call method
+ if isinstance( entry_source, dict ):
+ source_repo_info = entry_source
+ else:
+ source_repo_info = entry_source.get_tool_shed_repository_info_dict()
+ else:
+ source_repo_info = None
+ for name, value in self.filenames.iteritems():
+ repo_info = value.get( 'tool_shed_repository', None )
+ if ( not source_repo_info and not repo_info ) or ( source_repo_info and repo_info and source_repo_info == repo_info ):
+ filename = name
+ break
+ if filename is None:
+ #should we default to using any filename here instead?
+ log.error( "Unable to determine filename for persisting data table '%s' values: '%s'.", self.name, fields )
+ is_error = True
+ else:
+ #FIXME: Need to lock these files for editing
+ log.debug( "Persisting changes to file: %s", filename )
+ try:
+ data_table_fh = open( filename, 'r+b' )
+ except IOError, e:
+ log.warning( 'Error opening data table file (%s) with r+b, assuming file does not exist and will open as wb: %s', self.filename, e )
+ data_table_fh = open( filename, 'wb' )
+ if os.stat( filename )[6] != 0:
+ # ensure last existing line ends with new line
+ data_table_fh.seek( -1, 2 ) #last char in file
+ last_char = data_table_fh.read( 1 )
+ if last_char not in [ '\n', '\r' ]:
+ data_table_fh.write( '\n' )
+ data_table_fh.write( "%s\n" % ( self.separator.join( fields ) ) )
+ return not is_error
def _replace_field_separators( self, fields, separator=None, replace=None, comment_char=None ):
#make sure none of the fields contain separator
diff -r 1c7ca04c86393c3f45df48514ac153cb64b4695e -r 6ae2d6a466b8f721aa39d3e183b36594d7b4e235 lib/galaxy/tools/data_manager/manager.py
--- a/lib/galaxy/tools/data_manager/manager.py
+++ b/lib/galaxy/tools/data_manager/manager.py
@@ -106,6 +106,7 @@
self.output_ref_by_data_table = {}
self.move_by_data_table_column = {}
self.value_translation_by_data_table_column = {}
+ self.tool_shed_repository_info_dict = None
if elem is not None:
self.load_from_element( elem, tool_path or self.data_managers.tool_path )
def load_from_element( self, elem, tool_path ):
@@ -126,6 +127,9 @@
repository_name = tool_elem.find( 'repository_name' ).text
repository_owner = tool_elem.find( 'repository_owner' ).text
installed_changeset_revision = tool_elem.find( 'installed_changeset_revision' ).text
+ #save repository info here
+ self.tool_shed_repository_info_dict = dict( tool_shed=tool_shed, name=repository_name, owner=repository_owner, installed_changeset_revision=installed_changeset_revision )
+ #get tool_shed repo id
tool_shed_repository = suc.get_tool_shed_repository_by_shed_name_owner_installed_changeset_revision( self.data_managers.app, tool_shed, repository_name, repository_owner, installed_changeset_revision )
tool_shed_repository_id = self.data_managers.app.security.encode_id( tool_shed_repository.id )
#use shed_conf_file to determine tool_path
@@ -241,7 +245,7 @@
if name in output_ref_values:
moved = self.process_move( data_table_name, name, output_ref_values[ name ].extra_files_path, **data_table_value )
data_table_value[ name ] = self.process_value_translation( data_table_name, name, **data_table_value )
- data_table.add_entry( data_table_value, persist=True )
+ data_table.add_entry( data_table_value, persist=True, entry_source=self )
for data_table_name, data_table_values in data_tables_dict.iteritems():
#tool returned extra data table entries, but data table was not declared in data manager
@@ -289,3 +293,6 @@
value_translation = self.value_translation_by_data_table_column[ data_table_name ][ column_name ]
value = fill_template( value_translation, GALAXY_DATA_MANAGER_DATA_PATH=self.data_managers.app.config.galaxy_data_manager_data_path, **kwd )
return value
+
+ def get_tool_shed_repository_info_dict( self ):
+ return self.tool_shed_repository_info_dict
diff -r 1c7ca04c86393c3f45df48514ac153cb64b4695e -r 6ae2d6a466b8f721aa39d3e183b36594d7b4e235 lib/galaxy/util/__init__.py
--- a/lib/galaxy/util/__init__.py
+++ b/lib/galaxy/util/__init__.py
@@ -137,8 +137,60 @@
def xml_to_string( elem, pretty=False ):
"""Returns a string from an xml tree"""
if pretty:
- return ElementTree.tostring( pretty_print_xml( elem ) )
- return ElementTree.tostring( elem )
+ elem = pretty_print_xml( elem )
+ try:
+ return ElementTree.tostring( elem )
+ except TypeError, e:
+ #assume this is a comment
+ if hasattr( elem, 'text' ):
+ return "<!-- %s -->\n" % ( elem.text )
+ else:
+ raise e
+
+def xml_element_compare( elem1, elem2 ):
+ if not isinstance( elem1, dict ):
+ elem1 = xml_element_to_dict( elem1 )
+ if not isinstance( elem2, dict ):
+ elem2 = xml_element_to_dict( elem2 )
+ return elem1 == elem2
+
+def xml_element_list_compare( elem_list1, elem_list2 ):
+ return [ xml_element_to_dict( elem ) for elem in elem_list1 ] == [ xml_element_to_dict( elem ) for elem in elem_list2 ]
+
+def xml_element_to_dict( elem ):
+ rval = {}
+ if elem.attrib:
+ rval[ elem.tag ] = {}
+ else:
+ rval[ elem.tag ] = None
+
+ sub_elems = list( elem )
+ if sub_elems:
+ sub_elem_dict = dict()
+ for sub_sub_elem_dict in map( xml_element_to_dict, sub_elems ):
+ for key, value in sub_sub_elem_dict.iteritems():
+ if key not in sub_elem_dict:
+ sub_elem_dict[ key ] = []
+ sub_elem_dict[ key ].append( value )
+ for key, value in sub_elem_dict.iteritems():
+ if len( value ) == 1:
+ rval[ elem.tag ][ k ] = value[0]
+ else:
+ rval[ elem.tag ][ k ] = value
+ if elem.attrib:
+ for key, value in elem.attrib.iteritems():
+ rval[ elem.tag ][ "@%s" % key ] = value
+
+ if elem.text:
+ text = elem.text.strip()
+ if text and sub_elems or elem.attrib:
+ rval[ elem.tag ][ '#text' ] = text
+ else:
+ rval[ elem.tag ] = text
+
+ return rval
+
+
def pretty_print_xml( elem, level=0 ):
pad = ' '
@@ -287,7 +339,6 @@
elif isinstance( value, list ):
return map(sanitize_text, value)
else:
- print value
raise Exception, 'Unknown parameter type (%s)' % ( type( value ) )
valid_filename_chars = set( string.ascii_letters + string.digits + '_.' )
diff -r 1c7ca04c86393c3f45df48514ac153cb64b4695e -r 6ae2d6a466b8f721aa39d3e183b36594d7b4e235 lib/tool_shed/galaxy_install/repository_util.py
--- a/lib/tool_shed/galaxy_install/repository_util.py
+++ b/lib/tool_shed/galaxy_install/repository_util.py
@@ -283,6 +283,12 @@
trans.sa_session.flush()
if 'tool_dependencies' in metadata_dict and not reinstalling:
tool_dependencies = tool_dependency_util.create_tool_dependency_objects( trans.app, tool_shed_repository, relative_install_dir, set_status=True )
+ if 'sample_files' in metadata_dict:
+ sample_files = metadata_dict.get( 'sample_files', [] )
+ tool_index_sample_files = tool_util.get_tool_index_sample_files( sample_files )
+ tool_data_table_conf_filename, tool_data_table_elems = tool_util.install_tool_data_tables( trans.app, tool_shed_repository, tool_index_sample_files )
+ if tool_data_table_elems:
+ trans.app.tool_data_tables.add_new_entries_from_config_file( tool_data_table_conf_filename, None, trans.app.config.shed_tool_data_table_config, persist=True )
if 'tools' in metadata_dict:
tool_panel_dict = tool_util.generate_tool_panel_dict_for_new_install( metadata_dict[ 'tools' ], tool_section )
sample_files = metadata_dict.get( 'sample_files', [] )
@@ -483,7 +489,7 @@
message += "from the installed repository's <b>Repository Actions</b> menu. "
status = 'error'
if install_tool_dependencies and tool_shed_repository.tool_dependencies and 'tool_dependencies' in metadata:
- work_dir = tempfile.mkdtemp()
+ work_dir = tempfile.mkdtemp( prefix="tmp-toolshed-itsr" )
# Install tool dependencies.
suc.update_tool_shed_repository_status( trans.app,
tool_shed_repository,
diff -r 1c7ca04c86393c3f45df48514ac153cb64b4695e -r 6ae2d6a466b8f721aa39d3e183b36594d7b4e235 lib/tool_shed/util/metadata_util.py
--- a/lib/tool_shed/util/metadata_util.py
+++ b/lib/tool_shed/util/metadata_util.py
@@ -571,12 +571,12 @@
app.config.tool_data_table_config_path = repository_files_dir
else:
# Use a temporary working directory to copy all sample files.
- work_dir = tempfile.mkdtemp()
+ work_dir = tempfile.mkdtemp( prefix="tmp-toolshed-gmfcr" )
# All other files are on disk in the repository's repo_path, which is the value of relative_install_dir.
files_dir = relative_install_dir
if shed_config_dict.get( 'tool_path' ):
files_dir = os.path.join( shed_config_dict[ 'tool_path' ], files_dir )
- app.config.tool_data_path = work_dir
+ app.config.tool_data_path = work_dir #FIXME: Thread safe?
app.config.tool_data_table_config_path = work_dir
# Handle proprietary datatypes, if any.
datatypes_config = suc.get_config_from_disk( 'datatypes_conf.xml', files_dir )
@@ -598,7 +598,7 @@
new_table_elems, error_message = app.tool_data_tables.add_new_entries_from_config_file( config_filename=sample_file,
tool_data_path=app.config.tool_data_path,
shed_tool_data_table_config=app.config.shed_tool_data_table_config,
- persist=persist )
+ persist=False )
if error_message:
invalid_file_tups.append( ( filename, error_message ) )
for root, dirs, files in os.walk( files_dir ):
@@ -1584,7 +1584,7 @@
invalid_file_tups = []
home_dir = os.getcwd()
for changeset in repo.changelog:
- work_dir = tempfile.mkdtemp()
+ work_dir = tempfile.mkdtemp( prefix="tmp-toolshed-ramorits" )
current_changeset_revision = str( repo.changectx( changeset ) )
ctx = repo.changectx( changeset )
log.debug( "Cloning repository changeset revision: %s", str( ctx.rev() ) )
diff -r 1c7ca04c86393c3f45df48514ac153cb64b4695e -r 6ae2d6a466b8f721aa39d3e183b36594d7b4e235 lib/tool_shed/util/shed_util_common.py
--- a/lib/tool_shed/util/shed_util_common.py
+++ b/lib/tool_shed/util/shed_util_common.py
@@ -189,7 +189,7 @@
def config_elems_to_xml_file( app, config_elems, config_filename, tool_path ):
"""Persist the current in-memory list of config_elems to a file named by the value of config_filename."""
- fd, filename = tempfile.mkstemp()
+ fd, filename = tempfile.mkstemp( prefix="tmp-toolshed-cetxf" )
os.write( fd, '<?xml version="1.0"?>\n' )
os.write( fd, '<toolbox tool_path="%s">\n' % str( tool_path ) )
for elem in config_elems:
@@ -294,6 +294,32 @@
# Don't include the changeset_revision in clone urls.
return url_join( toolshed, 'repos', owner, name )
+def generate_repository_info_elem( tool_shed, repository_name, changeset_revision, owner, parent_elem=None, **kwd ):
+ """Create and return an ElementTree repository info Element."""
+ if parent_elem is None:
+ elem = XmlET.Element( 'tool_shed_repository' )
+ else:
+ elem = XmlET.SubElement( parent_elem, 'tool_shed_repository' )
+
+ tool_shed_elem = XmlET.SubElement( elem, 'tool_shed' )
+ tool_shed_elem.text = tool_shed
+ repository_name_elem = XmlET.SubElement( elem, 'repository_name' )
+ repository_name_elem.text = repository_name
+ repository_owner_elem = XmlET.SubElement( elem, 'repository_owner' )
+ repository_owner_elem.text = owner
+ changeset_revision_elem = XmlET.SubElement( elem, 'installed_changeset_revision' )
+ changeset_revision_elem.text = changeset_revision
+ #add additional values
+ #TODO: enhance additional values to allow e.g. use of dict values that will recurse
+ for key, value in kwd.iteritems():
+ new_elem = XmlET.SubElement( elem, key )
+ new_elem.text = value
+ return elem
+
+def generate_repository_info_elem_from_repository( tool_shed_repository, parent_elem=None, **kwd ):
+ return generate_repository_info_elem( tool_shed_repository.tool_shed, tool_shed_repository.name, tool_shed_repository.installed_changeset_revision, tool_shed_repository.owner, parent_elem=parent_elem, **kwd )
+
+
def generate_sharable_link_for_repository_in_tool_shed( trans, repository, changeset_revision=None ):
"""Generate the URL for sharing a repository that is in the tool shed."""
base_url = url_for( '/', qualified=True ).rstrip( '/' )
@@ -546,7 +572,7 @@
fctx = None
continue
if fctx:
- fh = tempfile.NamedTemporaryFile( 'wb', dir=dir )
+ fh = tempfile.NamedTemporaryFile( 'wb', prefix="tmp-toolshed-gntfc", dir=dir )
tmp_filename = fh.name
fh.close()
fh = open( tmp_filename, 'wb' )
diff -r 1c7ca04c86393c3f45df48514ac153cb64b4695e -r 6ae2d6a466b8f721aa39d3e183b36594d7b4e235 lib/tool_shed/util/tool_util.py
--- a/lib/tool_shed/util/tool_util.py
+++ b/lib/tool_shed/util/tool_util.py
@@ -416,7 +416,7 @@
# The ctx_file may have been moved in the change set. For example, 'ncbi_blastp_wrapper.xml' was moved to
# 'tools/ncbi_blast_plus/ncbi_blastp_wrapper.xml', so keep looking for the file until we find the new location.
continue
- fh = tempfile.NamedTemporaryFile( 'wb' )
+ fh = tempfile.NamedTemporaryFile( 'wb', prefix="tmp-toolshed-gltcrfrm" )
tmp_filename = fh.name
fh.close()
fh = open( tmp_filename, 'wb' )
@@ -538,8 +538,7 @@
# The repository must contain a tool_data_table_conf.xml.sample file that includes all required entries for all tools in the repository.
sample_tool_data_table_conf = suc.get_config_from_disk( 'tool_data_table_conf.xml.sample', relative_install_dir )
if sample_tool_data_table_conf:
- # Add entries to the ToolDataTableManager's in-memory data_tables dictionary as well as the list of data_table_elems and the list of
- # data_table_elem_names.
+ # Add entries to the ToolDataTableManager's in-memory data_tables dictionary.
error, message = handle_sample_tool_data_table_conf_file( app, sample_tool_data_table_conf, persist=True )
if error:
# TODO: Do more here than logging an exception.
@@ -706,6 +705,60 @@
sa_session.add( tool_version_association )
sa_session.flush()
+def install_tool_data_tables( app, tool_shed_repository, tool_index_sample_files ):
+ """Only ever called from Galaxy end when installing"""
+ TOOL_DATA_TABLE_FILE_NAME = 'tool_data_table_conf.xml'
+ TOOL_DATA_TABLE_FILE_SAMPLE_NAME = '%s.sample' % ( TOOL_DATA_TABLE_FILE_NAME )
+ SAMPLE_SUFFIX = '.sample'
+ SAMPLE_SUFFIX_OFFSET = -len( SAMPLE_SUFFIX )
+ tool_path, relative_target_dir = tool_shed_repository.get_tool_relative_path( app )
+ target_dir = os.path.join( app.config.shed_tool_data_path, relative_target_dir ) #this is where index files will reside on a per repo/installed version
+ if not os.path.exists( target_dir ):
+ os.makedirs( target_dir )
+ for sample_file in tool_index_sample_files:
+ path, filename = os.path.split ( sample_file )
+ target_filename = filename
+ if target_filename.endswith( SAMPLE_SUFFIX ):
+ target_filename = target_filename[ : SAMPLE_SUFFIX_OFFSET ]
+ source_file = os.path.join( tool_path, sample_file )
+ #we're not currently uninstalling index files, do not overwrite existing files
+ target_path_filename = os.path.join( target_dir, target_filename )
+ if not os.path.exists( target_path_filename ) or target_filename == TOOL_DATA_TABLE_FILE_NAME:
+ shutil.copy2( source_file, target_path_filename )
+ else:
+ log.debug( "Did not copy sample file '%s' to install directory '%s' because file already exists.", filename, target_dir )
+ #for provenance and to simplify introspection, lets keep the original data table sample file around
+ if filename == TOOL_DATA_TABLE_FILE_SAMPLE_NAME:
+ shutil.copy2( source_file, os.path.join( target_dir, filename ) )
+ tool_data_table_conf_filename = os.path.join( target_dir, TOOL_DATA_TABLE_FILE_NAME )
+ elems = []
+ if os.path.exists( tool_data_table_conf_filename ):
+ tree, error_message = xml_util.parse_xml( tool_data_table_conf_filename )
+ if tree:
+ for elem in tree.getroot():
+ #append individual table elems or other elemes, but not tables elems
+ if elem.tag == 'tables':
+ for table_elem in elems:
+ elems.append( elem )
+ else:
+ elems.append( elem )
+ else:
+ log.debug( "The '%s' data table file was not found, but was expected to be copied from '%s' during repository installation.", tool_data_table_conf_filename, TOOL_DATA_TABLE_FILE_SAMPLE_NAME )
+ for elem in elems:
+ if elem.tag == 'table':
+ for file_elem in elem.findall( 'file' ):
+ path = file_elem.get( 'path', None )
+ if path:
+ file_elem.set( 'path', os.path.normpath( os.path.join( target_dir, os.path.split( path )[1] ) ) )
+ #store repository info in the table tagset for traceability
+ repo_elem = suc.generate_repository_info_elem_from_repository( tool_shed_repository, parent_elem=elem )
+ if elems:
+ os.unlink( tool_data_table_conf_filename ) #remove old data_table
+ app.tool_data_tables.to_xml_file( tool_data_table_conf_filename, elems ) #persist new data_table content
+
+ return tool_data_table_conf_filename, elems
+
+
def is_column_based( fname, sep='\t', skip=0, is_multi_byte=False ):
"""See if the file is column based with respect to a separator."""
headers = get_headers( fname, sep, is_multi_byte=is_multi_byte )
@@ -763,7 +816,7 @@
tool = None
can_use_disk_file = False
tool_config_filepath = suc.get_absolute_path_to_file_in_repository( repo_files_dir, tool_config_filename )
- work_dir = tempfile.mkdtemp()
+ work_dir = tempfile.mkdtemp( prefix="tmp-toolshed-ltfcr" )
can_use_disk_file = can_use_tool_config_disk_file( trans, repository, repo, tool_config_filepath, changeset_revision )
if can_use_disk_file:
trans.app.config.tool_data_path = work_dir
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: dan: Add traceable prefixes to various tempfile creation.
by commits-noreply@bitbucket.org 03 Jun '13
by commits-noreply@bitbucket.org 03 Jun '13
03 Jun '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/1c7ca04c8639/
Changeset: 1c7ca04c8639
User: dan
Date: 2013-06-03 22:30:51
Summary: Add traceable prefixes to various tempfile creation.
Affected #: 6 files
diff -r 8cbb23ed305e6803da733eb7be74bab779ee9ea4 -r 1c7ca04c86393c3f45df48514ac153cb64b4695e lib/tool_shed/galaxy_install/install_manager.py
--- a/lib/tool_shed/galaxy_install/install_manager.py
+++ b/lib/tool_shed/galaxy_install/install_manager.py
@@ -268,7 +268,7 @@
tool_shed_repository.includes_datatypes = True
self.app.sa_session.add( tool_shed_repository )
self.app.sa_session.flush()
- work_dir = tempfile.mkdtemp()
+ work_dir = tempfile.mkdtemp( prefix="tmp-toolshed-hrc" )
datatypes_config = suc.get_config_from_disk( 'datatypes_conf.xml', repo_install_dir )
# Load proprietary data types required by tools. The value of override is not important here since the Galaxy server will be started
# after this installation completes.
diff -r 8cbb23ed305e6803da733eb7be74bab779ee9ea4 -r 1c7ca04c86393c3f45df48514ac153cb64b4695e lib/tool_shed/galaxy_install/tool_dependencies/fabric_util.py
--- a/lib/tool_shed/galaxy_install/tool_dependencies/fabric_util.py
+++ b/lib/tool_shed/galaxy_install/tool_dependencies/fabric_util.py
@@ -345,7 +345,7 @@
@contextmanager
def make_tmp_dir():
- work_dir = tempfile.mkdtemp()
+ work_dir = tempfile.mkdtemp( prefix="tmp-toolshed-mtd" )
yield work_dir
if os.path.exists( work_dir ):
local( 'rm -rf %s' % work_dir )
diff -r 8cbb23ed305e6803da733eb7be74bab779ee9ea4 -r 1c7ca04c86393c3f45df48514ac153cb64b4695e lib/tool_shed/galaxy_install/tool_dependencies/install_util.py
--- a/lib/tool_shed/galaxy_install/tool_dependencies/install_util.py
+++ b/lib/tool_shed/galaxy_install/tool_dependencies/install_util.py
@@ -27,7 +27,7 @@
text = cu.tool_shed_get( app, tool_shed_url, url )
if text:
# Write the contents to a temporary file on disk so it can be reloaded and parsed.
- fh = tempfile.NamedTemporaryFile( 'wb' )
+ fh = tempfile.NamedTemporaryFile( 'wb', prefix="tmp-toolshed-cttdc" )
tmp_filename = fh.name
fh.close()
fh = open( tmp_filename, 'wb' )
@@ -595,7 +595,7 @@
else:
env[ 'PYTHONPATH' ] = os.path.abspath( os.path.join( app.config.root, 'lib' ) )
message = ''
- tmp_name = tempfile.NamedTemporaryFile().name
+ tmp_name = tempfile.NamedTemporaryFile( prefix="tmp-toolshed-rs" ).name
tmp_stderr = open( tmp_name, 'wb' )
proc = subprocess.Popen( cmd, shell=True, env=env, stderr=tmp_stderr.fileno() )
returncode = proc.wait()
diff -r 8cbb23ed305e6803da733eb7be74bab779ee9ea4 -r 1c7ca04c86393c3f45df48514ac153cb64b4695e lib/tool_shed/scripts/check_repositories_for_functional_tests.py
--- a/lib/tool_shed/scripts/check_repositories_for_functional_tests.py
+++ b/lib/tool_shed/scripts/check_repositories_for_functional_tests.py
@@ -248,7 +248,7 @@
# Clone the repository up to the changeset revision we're checking.
repo_dir = metadata_record.repository.repo_path( app )
repo = hg.repository( get_configured_ui(), repo_dir )
- work_dir = tempfile.mkdtemp()
+ work_dir = tempfile.mkdtemp( prefix="tmp-toolshed-cafr" )
cloned_ok, error_message = clone_repository( repo_dir, work_dir, changeset_revision )
if cloned_ok:
# Iterate through all the directories in the cloned changeset revision and determine whether there's a
diff -r 8cbb23ed305e6803da733eb7be74bab779ee9ea4 -r 1c7ca04c86393c3f45df48514ac153cb64b4695e lib/tool_shed/util/datatype_util.py
--- a/lib/tool_shed/util/datatype_util.py
+++ b/lib/tool_shed/util/datatype_util.py
@@ -70,7 +70,7 @@
elem.attrib[ 'proprietary_path' ] = os.path.abspath( datatype_file_name_path )
elem.attrib[ 'proprietary_datatype_module' ] = proprietary_datatype_module
# Temporarily persist the proprietary datatypes configuration file so it can be loaded into the datatypes registry.
- fd, proprietary_datatypes_config = tempfile.mkstemp()
+ fd, proprietary_datatypes_config = tempfile.mkstemp( prefix="tmp-toolshed-acalpd" )
os.write( fd, '<?xml version="1.0"?>\n' )
os.write( fd, '<datatypes>\n' )
os.write( fd, '%s' % xml_util.xml_to_string( registration ) )
diff -r 8cbb23ed305e6803da733eb7be74bab779ee9ea4 -r 1c7ca04c86393c3f45df48514ac153cb64b4695e lib/tool_shed/util/xml_util.py
--- a/lib/tool_shed/util/xml_util.py
+++ b/lib/tool_shed/util/xml_util.py
@@ -32,7 +32,7 @@
def create_and_write_tmp_file( elem ):
tmp_str = xml_to_string( elem )
- fh = tempfile.NamedTemporaryFile( 'wb' )
+ fh = tempfile.NamedTemporaryFile( 'wb', prefix="tmp-toolshed-cawrf" )
tmp_filename = fh.name
fh.close()
fh = open( tmp_filename, 'wb' )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: natefoo: Update tag for stable_2013.06.03
by commits-noreply@bitbucket.org 03 Jun '13
by commits-noreply@bitbucket.org 03 Jun '13
03 Jun '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/cea3ddf6cdda/
Changeset: cea3ddf6cdda
Branch: stable
User: natefoo
Date: 2013-06-03 22:17:31
Summary: Update tag for stable_2013.06.03
Affected #: 1 file
diff -r 524f246ca85395082719ae7a6ff72260d7ad5612 -r cea3ddf6cddaac2f8703598307449ffc13240efc .hgtags
--- a/.hgtags
+++ b/.hgtags
@@ -2,4 +2,4 @@
1c717491139269651bb59687563da9410b84c65d release_2013.02.08
75f09617abaadbc8cc732bb8ee519decaeb56ea7 release_2013.04.01
2cc8d10988e03257dc7b97f8bb332c7df745d1dd security_2013.04.08
-58811a78af8a09b77405dc343a5b2795f4cc6e88 release_2013.06.03
+524f246ca85395082719ae7a6ff72260d7ad5612 release_2013.06.03
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: Dave Bouvier: Cleanup after merge conflict.
by commits-noreply@bitbucket.org 03 Jun '13
by commits-noreply@bitbucket.org 03 Jun '13
03 Jun '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/524f246ca853/
Changeset: 524f246ca853
Branch: stable
User: Dave Bouvier
Date: 2013-06-03 22:12:24
Summary: Cleanup after merge conflict.
Affected #: 1 file
diff -r 776ac26512144a65593ab999dafa2217bd2c4153 -r 524f246ca85395082719ae7a6ff72260d7ad5612 templates/webapps/tool_shed/index.mako
--- a/templates/webapps/tool_shed/index.mako
+++ b/templates/webapps/tool_shed/index.mako
@@ -92,14 +92,11 @@
<a target="galaxy_main" href="${h.url_for( controller='repository', action='browse_categories' )}">Browse by category</a></div>
%if trans.user:
- <div class="toolSectionPad"></div>
- <div class="toolSectionTitle">
- Repositories I Can Change
- </div>
- <div class="toolTitle">
- <a target="galaxy_main" href="${h.url_for( controller='repository', action='browse_repositories_i_own' )}">Repositories I own</a>
- </div>
- %if has_reviewed_repositories:
+ %if trans.user.active_repositories:
+ <div class="toolSectionPad"></div>
+ <div class="toolSectionTitle">
+ Repositories I Can Change
+ </div><div class="toolTitle"><a target="galaxy_main" href="${h.url_for( controller='repository', action='browse_repositories_i_own' )}">Repositories I own</a></div>
@@ -138,32 +135,6 @@
<a target="galaxy_main" href="${h.url_for( controller='repository', action='browse_my_writable_repositories_with_invalid_tools' )}">Latest revision: invalid tools</a></div>
%endif
- %if has_deprecated_repositories:
- <div class="toolTitle">
- <a target="galaxy_main" href="${h.url_for( controller='repository', action='browse_deprecated_repositories_i_own' )}">Deprecated repositories I own</a>
- </div>
- %endif
- <div class="toolTitle">
- <a target="galaxy_main" href="${h.url_for( controller='repository', action='browse_my_writable_repositories' )}">My writable repositories</a>
- </div>
- <div class="toolTitle">
- <a target="galaxy_main" href="${h.url_for( controller='repository', action='browse_my_writable_repositories_missing_tool_test_components' )}">Latest revision: missing tool tests</a>
- </div>
- <div class="toolTitle">
- <a target="galaxy_main" href="${h.url_for( controller='repository', action='browse_my_writable_repositories_with_install_errors' )}">Latest revision: installation errors</a>
- </div>
- <div class="toolTitle">
- <a target="galaxy_main" href="${h.url_for( controller='repository', action='browse_my_writable_repositories_with_failing_tool_tests' )}">Latest revision: failing tool tests</a>
- </div>
- <div class="toolTitle">
- <a target="galaxy_main" href="${h.url_for( controller='repository', action='browse_my_writable_repositories_with_skip_tool_test_checked' )}">Latest revision: skip tool tests</a>
- </div>
- <div class="toolTitle">
- <a target="galaxy_main" href="${h.url_for( controller='repository', action='browse_my_writable_repositories_with_no_failing_tool_tests' )}">Latest revision: all tool tests pass</a>
- </div>
- <div class="toolTitle">
- <a target="galaxy_main" href="${h.url_for( controller='repository', action='browse_my_writable_repositories_with_invalid_tools' )}">Latest revision: invalid tools</a>
- </div><div class="toolSectionPad"></div><div class="toolSectionTitle">
Available Actions
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
3 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/7740b1dc41fa/
Changeset: 7740b1dc41fa
Branch: next-stable
User: natefoo
Date: 2013-06-03 21:43:31
Summary: Close the next-stable branch.
Affected #: 0 files
https://bitbucket.org/galaxy/galaxy-central/commits/58811a78af8a/
Changeset: 58811a78af8a
Branch: stable
User: natefoo
Date: 2013-06-03 21:44:31
Summary: Merge changes from next-stable to stable.
Affected #: 626 files
diff -r 03ebba1d1de6f14ccb00e08795ef2f1896135bae -r 58811a78af8a09b77405dc343a5b2795f4cc6e88 .hgignore
--- a/.hgignore
+++ b/.hgignore
@@ -20,6 +20,7 @@
database/pbs
database/tmp
database/*.sqlite
+database/openid_consumer_cache
# Python bytecode
*.pyc
@@ -35,6 +36,11 @@
tool_shed_webapp.pid
hgweb.config*
+# Reports Runtime Files
+reports_webapp.lock
+reports_webapp.log
+reports_webapp.pid
+
# Config files
universe_wsgi.ini
reports_wsgi.ini
@@ -54,7 +60,7 @@
job_conf.xml
data_manager_conf.xml
shed_data_manager_conf.xml
-
+visualizations_conf.xml
static/welcome.html.*
static/welcome.html
@@ -75,6 +81,7 @@
# Test output
run_functional_tests.html
+test/tool_shed/tmp/*
# Project files
*.kpf
diff -r 03ebba1d1de6f14ccb00e08795ef2f1896135bae -r 58811a78af8a09b77405dc343a5b2795f4cc6e88 buildbot_setup.sh
--- a/buildbot_setup.sh
+++ b/buildbot_setup.sh
@@ -93,26 +93,49 @@
JARS="/galaxy/software/jars"
-for link in $LINKS; do
- echo "Linking $link"
- rm -f tool-data/`basename $link`
- ln -sf $link tool-data
-done
-
-if [ -d "$HYPHY" ]; then
- echo "Linking $HYPHY"
- rm -f tool-data/HYPHY
- ln -sf $HYPHY tool-data/HYPHY
+if [ ! $1 ]; then
+ type="standard"
+elif [ $1 == "-ec2" ]; then
+ type="external-ec2"
+else
+ type="unknown"
fi
-if [ -d "$JARS" ]; then
- echo "Linking $JARS"
- rm -f tool-data/shared/jars
- ln -sf $JARS tool-data/shared/jars
-fi
+case $type in
+ external*)
+ echo "Running standalone buildbot setup..."
+ for sample in tool-data/*.sample; do
+ basename=${sample%.sample}
+ if [ ! -f $basename ]; then
+ echo "Copying $sample to $basename"
+ cp "$sample" "$basename"
+ fi
+ done
+ ;;
+ *)
+ echo "Running standard buildbot setup..."
+ for link in $LINKS; do
+ echo "Linking $link"
+ rm -f tool-data/`basename $link`
+ ln -sf $link tool-data
+ done
+
+ if [ -d "$HYPHY" ]; then
+ echo "Linking $HYPHY"
+ rm -f tool-data/HYPHY
+ ln -sf $HYPHY tool-data/HYPHY
+ fi
+
+ if [ -d "$JARS" ]; then
+ echo "Linking $JARS"
+ rm -f tool-data/shared/jars
+ ln -sf $JARS tool-data/shared/jars
+ fi
+ ;;
+esac
for sample in $SAMPLES; do
- file=`echo $sample | sed -e 's/\.sample$//'`
+ file=${sample%.sample}
echo "Copying $sample to $file"
cp $sample $file
done
diff -r 03ebba1d1de6f14ccb00e08795ef2f1896135bae -r 58811a78af8a09b77405dc343a5b2795f4cc6e88 datatypes_conf.xml.sample
--- a/datatypes_conf.xml.sample
+++ b/datatypes_conf.xml.sample
@@ -3,16 +3,13 @@
<registration converters_path="lib/galaxy/datatypes/converters" display_path="display_applications"><datatype extension="ab1" type="galaxy.datatypes.binary:Ab1" mimetype="application/octet-stream" display_in_upload="true"/><datatype extension="afg" type="galaxy.datatypes.assembly:Amos" display_in_upload="false"/>
+ <datatype extension="asn1" type="galaxy.datatypes.data:GenericAsn1" mimetype="text/plain" display_in_upload="true" />
+ <datatype extension="asn1-binary" type="galaxy.datatypes.binary:GenericAsn1Binary" mimetype="application/octet-stream" display_in_upload="true" /><datatype extension="axt" type="galaxy.datatypes.sequence:Axt" display_in_upload="true"/><datatype extension="fli" type="galaxy.datatypes.tabular:FeatureLocationIndex" display_in_upload="false"/><datatype extension="bam" type="galaxy.datatypes.binary:Bam" mimetype="application/octet-stream" display_in_upload="true"><converter file="bam_to_bai.xml" target_datatype="bai"/>
- <converter file="bam_to_summary_tree_converter.xml" target_datatype="summary_tree"/>
- <!--
- Caution: (a) this converter requires bedtools to be installed and (b) it is very memory intensive and
- is not recommended for most laptops/desktops.
- <converter file="bam_to_bigwig_converter.xml" target_datatype="bigwig"/>
- -->
+ <converter file="bam_to_bigwig_converter.xml" target_datatype="bigwig"/><display file="ucsc/bam.xml" /><display file="ensembl/ensembl_bam.xml" /><display file="igv/bam.xml" />
@@ -20,10 +17,9 @@
</datatype><datatype extension="bed" type="galaxy.datatypes.interval:Bed" display_in_upload="true"><converter file="bed_to_gff_converter.xml" target_datatype="gff"/>
- <converter file="interval_to_coverage.xml" target_datatype="coverage"/><converter file="bed_to_bgzip_converter.xml" target_datatype="bgzip"/><converter file="bed_to_tabix_converter.xml" target_datatype="tabix" depends_on="bgzip"/>
- <converter file="bed_to_summary_tree_converter.xml" target_datatype="summary_tree"/>
+ <converter file="bed_gff_or_vcf_to_bigwig_converter.xml" target_datatype="bigwig"/><converter file="bed_to_fli_converter.xml" target_datatype="fli"/><!-- <display file="ucsc/interval_as_bed.xml" /> --><display file="igb/bed.xml" />
@@ -49,7 +45,7 @@
<datatype extension="chrint" type="galaxy.datatypes.interval:ChromatinInteractions" display_in_upload="True"><converter file="interval_to_bgzip_converter.xml" target_datatype="bgzip"/><converter file="interval_to_tabix_converter.xml" target_datatype="tabix" depends_on="bgzip"/>
- <converter file="interval_to_summary_tree_converter.xml" target_datatype="summary_tree"/>
+ <converter file="bed_gff_or_vcf_to_bigwig_converter.xml" target_datatype="bigwig"/></datatype><!-- MSI added Datatypes --><datatype extension="csv" type="galaxy.datatypes.tabular:Tabular" subclass="True" display_in_upload="true" /><!-- FIXME: csv is 'tabular'ized data, but not 'tab-delimited'; the class used here is intended for 'tab-delimited' -->
@@ -91,7 +87,7 @@
<datatype extension="gff" type="galaxy.datatypes.interval:Gff" display_in_upload="true"><converter file="gff_to_bed_converter.xml" target_datatype="bed"/><converter file="gff_to_interval_index_converter.xml" target_datatype="interval_index"/>
- <converter file="gff_to_summary_tree_converter.xml" target_datatype="summary_tree"/>
+ <converter file="bed_gff_or_vcf_to_bigwig_converter.xml" target_datatype="bigwig"/><converter file="gff_to_fli_converter.xml" target_datatype="fli"/><display file="ensembl/ensembl_gff.xml" inherit="True"/><!-- <display file="gbrowse/gbrowse_gff.xml" inherit="True" /> -->
@@ -101,7 +97,7 @@
<datatype extension="gmaj.zip" type="galaxy.datatypes.images:Gmaj" mimetype="application/zip"/><datatype extension="gtf" type="galaxy.datatypes.interval:Gtf" display_in_upload="true"><converter file="gff_to_interval_index_converter.xml" target_datatype="interval_index"/>
- <converter file="gff_to_summary_tree_converter.xml" target_datatype="summary_tree"/>
+ <converter file="bed_gff_or_vcf_to_bigwig_converter.xml" target_datatype="bigwig"/></datatype><datatype extension="toolshed.gz" type="galaxy.datatypes.binary:Binary" mimetype="multipart/x-gzip" subclass="True" /><datatype extension="h5" type="galaxy.datatypes.binary:Binary" mimetype="application/octet-stream" subclass="True" />
@@ -113,7 +109,7 @@
<converter file="interval_to_bed12_converter.xml" target_datatype="bed12"/><converter file="interval_to_bgzip_converter.xml" target_datatype="bgzip"/><converter file="interval_to_tabix_converter.xml" target_datatype="tabix" depends_on="bgzip"/>
- <converter file="interval_to_summary_tree_converter.xml" target_datatype="summary_tree"/>
+ <converter file="interval_to_bigwig_converter.xml" target_datatype="bigwig"/><!-- <display file="ucsc/interval_as_bed.xml" inherit="True" /> --><display file="ensembl/ensembl_interval_as_bed.xml" inherit="True"/><display file="gbrowse/gbrowse_interval_as_bed.xml" inherit="True"/>
@@ -154,10 +150,13 @@
<datatype extension="encodepeak" type="galaxy.datatypes.interval:ENCODEPeak" display_in_upload="True"><converter file="encodepeak_to_tabix_converter.xml" target_datatype="tabix" depends_on="bgzip"/><converter file="encodepeak_to_bgzip_converter.xml" target_datatype="bgzip"/>
- <converter file="encodepeak_to_summary_tree_converter.xml" target_datatype="summary_tree"/>
+ <converter file="bed_gff_or_vcf_to_bigwig_converter.xml" target_datatype="bigwig"/></datatype><datatype extension="pdf" type="galaxy.datatypes.images:Pdf" mimetype="application/pdf"/>
- <datatype extension="pileup" type="galaxy.datatypes.tabular:Pileup" display_in_upload="true" />
+ <datatype extension="pileup" type="galaxy.datatypes.tabular:Pileup" display_in_upload="true">
+ <converter file="interval_to_bgzip_converter.xml" target_datatype="bgzip"/>
+ <converter file="interval_to_tabix_converter.xml" target_datatype="tabix" depends_on="bgzip"/>
+ </datatype><datatype extension="png" type="galaxy.datatypes.images:Png" mimetype="image/png"/><datatype extension="qual" type="galaxy.datatypes.qualityscore:QualityScore" /><datatype extension="qualsolexa" type="galaxy.datatypes.qualityscore:QualityScoreSolexa" display_in_upload="true"/>
@@ -167,7 +166,7 @@
<datatype extension="Roadmaps" type="galaxy.datatypes.assembly:Roadmaps" display_in_upload="false"/><datatype extension="sam" type="galaxy.datatypes.tabular:Sam" display_in_upload="true"><converter file="sam_to_bam.xml" target_datatype="bam"/>
- <converter file="sam_to_summary_tree_converter.xml" target_datatype="summary_tree"/>
+ <converter file="sam_to_bigwig_converter.xml" target_datatype="bigwig"/></datatype><datatype extension="scf" type="galaxy.datatypes.binary:Scf" mimetype="application/octet-stream" display_in_upload="true"/><datatype extension="Sequences" type="galaxy.datatypes.assembly:Sequences" display_in_upload="false"/>
@@ -185,7 +184,7 @@
<converter file="vcf_to_bgzip_converter.xml" target_datatype="bgzip"/><converter file="vcf_to_vcf_bgzip_converter.xml" target_datatype="vcf_bgzip"/><converter file="vcf_to_tabix_converter.xml" target_datatype="tabix" depends_on="bgzip"/>
- <converter file="vcf_to_summary_tree_converter.xml" target_datatype="summary_tree"/>
+ <converter file="bed_gff_or_vcf_to_bigwig_converter.xml" target_datatype="bigwig"/><display file="ucsc/vcf.xml" /><display file="igv/vcf.xml" /><display file="rviewer/vcf.xml" inherit="True"/>
@@ -198,7 +197,6 @@
<!-- <display file="gbrowse/gbrowse_wig.xml" /> --><display file="igb/wig.xml" /></datatype>
- <datatype extension="summary_tree" type="galaxy.datatypes.binary:Binary" subclass="True" /><datatype extension="interval_index" type="galaxy.datatypes.binary:Binary" subclass="True" /><datatype extension="tabix" type="galaxy.datatypes.binary:Binary" subclass="True" /><datatype extension="bgzip" type="galaxy.datatypes.binary:Binary" subclass="True" />
diff -r 03ebba1d1de6f14ccb00e08795ef2f1896135bae -r 58811a78af8a09b77405dc343a5b2795f4cc6e88 doc/source/lib/galaxy.webapps.tool_shed.api.rst
--- /dev/null
+++ b/doc/source/lib/galaxy.webapps.tool_shed.api.rst
@@ -0,0 +1,19 @@
+api Package
+===========
+
+:mod:`repositories` Module
+--------------------------
+
+.. automodule:: galaxy.webapps.tool_shed.api.repositories
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`repository_revisions` Module
+----------------------------------
+
+.. automodule:: galaxy.webapps.tool_shed.api.repository_revisions
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
diff -r 03ebba1d1de6f14ccb00e08795ef2f1896135bae -r 58811a78af8a09b77405dc343a5b2795f4cc6e88 doc/source/lib/galaxy.webapps.tool_shed.controllers.rst
--- /dev/null
+++ b/doc/source/lib/galaxy.webapps.tool_shed.controllers.rst
@@ -0,0 +1,59 @@
+controllers Package
+===================
+
+:mod:`controllers` Package
+--------------------------
+
+.. automodule:: galaxy.webapps.tool_shed.controllers
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`admin` Module
+-------------------
+
+.. automodule:: galaxy.webapps.tool_shed.controllers.admin
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`hg` Module
+----------------
+
+.. automodule:: galaxy.webapps.tool_shed.controllers.hg
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`repository` Module
+------------------------
+
+.. automodule:: galaxy.webapps.tool_shed.controllers.repository
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`repository_review` Module
+-------------------------------
+
+.. automodule:: galaxy.webapps.tool_shed.controllers.repository_review
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`upload` Module
+--------------------
+
+.. automodule:: galaxy.webapps.tool_shed.controllers.upload
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`user` Module
+------------------
+
+.. automodule:: galaxy.webapps.tool_shed.controllers.user
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
diff -r 03ebba1d1de6f14ccb00e08795ef2f1896135bae -r 58811a78af8a09b77405dc343a5b2795f4cc6e88 doc/source/lib/galaxy.webapps.tool_shed.framework.middleware.rst
--- /dev/null
+++ b/doc/source/lib/galaxy.webapps.tool_shed.framework.middleware.rst
@@ -0,0 +1,27 @@
+middleware Package
+==================
+
+:mod:`middleware` Package
+-------------------------
+
+.. automodule:: galaxy.webapps.tool_shed.framework.middleware
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`hg` Module
+----------------
+
+.. automodule:: galaxy.webapps.tool_shed.framework.middleware.hg
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`remoteuser` Module
+------------------------
+
+.. automodule:: galaxy.webapps.tool_shed.framework.middleware.remoteuser
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
diff -r 03ebba1d1de6f14ccb00e08795ef2f1896135bae -r 58811a78af8a09b77405dc343a5b2795f4cc6e88 doc/source/lib/galaxy.webapps.tool_shed.framework.rst
--- /dev/null
+++ b/doc/source/lib/galaxy.webapps.tool_shed.framework.rst
@@ -0,0 +1,18 @@
+framework Package
+=================
+
+:mod:`framework` Package
+------------------------
+
+.. automodule:: galaxy.webapps.tool_shed.framework
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+Subpackages
+-----------
+
+.. toctree::
+
+ galaxy.webapps.tool_shed.framework.middleware
+
diff -r 03ebba1d1de6f14ccb00e08795ef2f1896135bae -r 58811a78af8a09b77405dc343a5b2795f4cc6e88 doc/source/lib/galaxy.webapps.tool_shed.model.migrate.rst
--- /dev/null
+++ b/doc/source/lib/galaxy.webapps.tool_shed.model.migrate.rst
@@ -0,0 +1,11 @@
+migrate Package
+===============
+
+:mod:`check` Module
+-------------------
+
+.. automodule:: galaxy.webapps.tool_shed.model.migrate.check
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
diff -r 03ebba1d1de6f14ccb00e08795ef2f1896135bae -r 58811a78af8a09b77405dc343a5b2795f4cc6e88 doc/source/lib/galaxy.webapps.tool_shed.model.rst
--- /dev/null
+++ b/doc/source/lib/galaxy.webapps.tool_shed.model.rst
@@ -0,0 +1,26 @@
+model Package
+=============
+
+:mod:`model` Package
+--------------------
+
+.. automodule:: galaxy.webapps.tool_shed.model
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`mapping` Module
+---------------------
+
+.. automodule:: galaxy.webapps.tool_shed.model.mapping
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+Subpackages
+-----------
+
+.. toctree::
+
+ galaxy.webapps.tool_shed.model.migrate
+
diff -r 03ebba1d1de6f14ccb00e08795ef2f1896135bae -r 58811a78af8a09b77405dc343a5b2795f4cc6e88 doc/source/lib/galaxy.webapps.tool_shed.rst
--- /dev/null
+++ b/doc/source/lib/galaxy.webapps.tool_shed.rst
@@ -0,0 +1,47 @@
+tool_shed Package
+=================
+
+:mod:`tool_shed` Package
+------------------------
+
+.. automodule:: galaxy.webapps.tool_shed
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`app` Module
+-----------------
+
+.. automodule:: galaxy.webapps.tool_shed.app
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`buildapp` Module
+----------------------
+
+.. automodule:: galaxy.webapps.tool_shed.buildapp
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`config` Module
+--------------------
+
+.. automodule:: galaxy.webapps.tool_shed.config
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+Subpackages
+-----------
+
+.. toctree::
+
+ galaxy.webapps.tool_shed.api
+ galaxy.webapps.tool_shed.controllers
+ galaxy.webapps.tool_shed.framework
+ galaxy.webapps.tool_shed.model
+ galaxy.webapps.tool_shed.security
+ galaxy.webapps.tool_shed.util
+
diff -r 03ebba1d1de6f14ccb00e08795ef2f1896135bae -r 58811a78af8a09b77405dc343a5b2795f4cc6e88 doc/source/lib/galaxy.webapps.tool_shed.security.rst
--- /dev/null
+++ b/doc/source/lib/galaxy.webapps.tool_shed.security.rst
@@ -0,0 +1,11 @@
+security Package
+================
+
+:mod:`security` Package
+-----------------------
+
+.. automodule:: galaxy.webapps.tool_shed.security
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
diff -r 03ebba1d1de6f14ccb00e08795ef2f1896135bae -r 58811a78af8a09b77405dc343a5b2795f4cc6e88 doc/source/lib/galaxy.webapps.tool_shed.util.rst
--- /dev/null
+++ b/doc/source/lib/galaxy.webapps.tool_shed.util.rst
@@ -0,0 +1,35 @@
+util Package
+============
+
+:mod:`common_util` Module
+-------------------------
+
+.. automodule:: galaxy.webapps.tool_shed.util.common_util
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`container_util` Module
+----------------------------
+
+.. automodule:: galaxy.webapps.tool_shed.util.container_util
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`hgweb_config` Module
+--------------------------
+
+.. automodule:: galaxy.webapps.tool_shed.util.hgweb_config
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`shed_statistics` Module
+-----------------------------
+
+.. automodule:: galaxy.webapps.tool_shed.util.shed_statistics
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
diff -r 03ebba1d1de6f14ccb00e08795ef2f1896135bae -r 58811a78af8a09b77405dc343a5b2795f4cc6e88 doc/source/lib/tool_shed.galaxy_install.grids.rst
--- /dev/null
+++ b/doc/source/lib/tool_shed.galaxy_install.grids.rst
@@ -0,0 +1,11 @@
+grids Package
+=============
+
+:mod:`admin_toolshed_grids` Module
+----------------------------------
+
+.. automodule:: tool_shed.galaxy_install.grids.admin_toolshed_grids
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
diff -r 03ebba1d1de6f14ccb00e08795ef2f1896135bae -r 58811a78af8a09b77405dc343a5b2795f4cc6e88 doc/source/lib/tool_shed.galaxy_install.migrate.rst
--- /dev/null
+++ b/doc/source/lib/tool_shed.galaxy_install.migrate.rst
@@ -0,0 +1,19 @@
+migrate Package
+===============
+
+:mod:`check` Module
+-------------------
+
+.. automodule:: tool_shed.galaxy_install.migrate.check
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`common` Module
+--------------------
+
+.. automodule:: tool_shed.galaxy_install.migrate.common
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
diff -r 03ebba1d1de6f14ccb00e08795ef2f1896135bae -r 58811a78af8a09b77405dc343a5b2795f4cc6e88 doc/source/lib/tool_shed.galaxy_install.rst
--- /dev/null
+++ b/doc/source/lib/tool_shed.galaxy_install.rst
@@ -0,0 +1,44 @@
+galaxy_install Package
+======================
+
+:mod:`galaxy_install` Package
+-----------------------------
+
+.. automodule:: tool_shed.galaxy_install
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`install_manager` Module
+-----------------------------
+
+.. automodule:: tool_shed.galaxy_install.install_manager
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`repository_util` Module
+-----------------------------
+
+.. automodule:: tool_shed.galaxy_install.repository_util
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`update_manager` Module
+----------------------------
+
+.. automodule:: tool_shed.galaxy_install.update_manager
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+Subpackages
+-----------
+
+.. toctree::
+
+ tool_shed.galaxy_install.grids
+ tool_shed.galaxy_install.migrate
+ tool_shed.galaxy_install.tool_dependencies
+
diff -r 03ebba1d1de6f14ccb00e08795ef2f1896135bae -r 58811a78af8a09b77405dc343a5b2795f4cc6e88 doc/source/lib/tool_shed.galaxy_install.tool_dependencies.rst
--- /dev/null
+++ b/doc/source/lib/tool_shed.galaxy_install.tool_dependencies.rst
@@ -0,0 +1,27 @@
+tool_dependencies Package
+=========================
+
+:mod:`common_util` Module
+-------------------------
+
+.. automodule:: tool_shed.galaxy_install.tool_dependencies.common_util
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`fabric_util` Module
+-------------------------
+
+.. automodule:: tool_shed.galaxy_install.tool_dependencies.fabric_util
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`install_util` Module
+--------------------------
+
+.. automodule:: tool_shed.galaxy_install.tool_dependencies.install_util
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
diff -r 03ebba1d1de6f14ccb00e08795ef2f1896135bae -r 58811a78af8a09b77405dc343a5b2795f4cc6e88 doc/source/lib/tool_shed.grids.rst
--- /dev/null
+++ b/doc/source/lib/tool_shed.grids.rst
@@ -0,0 +1,35 @@
+grids Package
+=============
+
+:mod:`admin_grids` Module
+-------------------------
+
+.. automodule:: tool_shed.grids.admin_grids
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`repository_grids` Module
+------------------------------
+
+.. automodule:: tool_shed.grids.repository_grids
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`repository_review_grids` Module
+-------------------------------------
+
+.. automodule:: tool_shed.grids.repository_review_grids
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`util` Module
+------------------
+
+.. automodule:: tool_shed.grids.util
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
diff -r 03ebba1d1de6f14ccb00e08795ef2f1896135bae -r 58811a78af8a09b77405dc343a5b2795f4cc6e88 doc/source/lib/tool_shed.rst
--- /dev/null
+++ b/doc/source/lib/tool_shed.rst
@@ -0,0 +1,20 @@
+tool_shed Package
+=================
+
+:mod:`tool_shed_registry` Module
+--------------------------------
+
+.. automodule:: tool_shed.tool_shed_registry
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+Subpackages
+-----------
+
+.. toctree::
+
+ tool_shed.galaxy_install
+ tool_shed.grids
+ tool_shed.util
+
diff -r 03ebba1d1de6f14ccb00e08795ef2f1896135bae -r 58811a78af8a09b77405dc343a5b2795f4cc6e88 doc/source/lib/tool_shed.util.rst
--- /dev/null
+++ b/doc/source/lib/tool_shed.util.rst
@@ -0,0 +1,107 @@
+util Package
+============
+
+:mod:`common_install_util` Module
+---------------------------------
+
+.. automodule:: tool_shed.util.common_install_util
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`common_util` Module
+-------------------------
+
+.. automodule:: tool_shed.util.common_util
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`data_manager_util` Module
+-------------------------------
+
+.. automodule:: tool_shed.util.data_manager_util
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`datatype_util` Module
+---------------------------
+
+.. automodule:: tool_shed.util.datatype_util
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`encoding_util` Module
+---------------------------
+
+.. automodule:: tool_shed.util.encoding_util
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`metadata_util` Module
+---------------------------
+
+.. automodule:: tool_shed.util.metadata_util
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`readme_util` Module
+-------------------------
+
+.. automodule:: tool_shed.util.readme_util
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`repository_dependency_util` Module
+----------------------------------------
+
+.. automodule:: tool_shed.util.repository_dependency_util
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`review_util` Module
+-------------------------
+
+.. automodule:: tool_shed.util.review_util
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`shed_util_common` Module
+------------------------------
+
+.. automodule:: tool_shed.util.shed_util_common
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`tool_dependency_util` Module
+----------------------------------
+
+.. automodule:: tool_shed.util.tool_dependency_util
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`tool_util` Module
+-----------------------
+
+.. automodule:: tool_shed.util.tool_util
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`workflow_util` Module
+---------------------------
+
+.. automodule:: tool_shed.util.workflow_util
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
diff -r 03ebba1d1de6f14ccb00e08795ef2f1896135bae -r 58811a78af8a09b77405dc343a5b2795f4cc6e88 eggs.ini
--- a/eggs.ini
+++ b/eggs.ini
@@ -14,7 +14,6 @@
[eggs:platform]
bx_python = 0.7.1
Cheetah = 2.2.2
-ctypes = 1.0.2
DRMAA_python = 0.2
MarkupSafe = 0.12
mercurial = 2.2.3
@@ -29,6 +28,7 @@
simplejson = 2.1.1
threadframe = 0.2
guppy = 0.1.8
+SQLAlchemy = 0.7.9
; msgpack_python = 0.2.4
[eggs:noplatform]
@@ -46,17 +46,17 @@
nose = 0.11.1
NoseHTML = 0.4.1
NoseTestDiff = 0.1
+Parsley = 1.1
Paste = 1.7.5.1
PasteDeploy = 1.5.0
pexpect = 2.4
python_openid = 2.2.5
python_daemon = 1.5.5
Routes = 1.12.3
-SQLAlchemy = 0.5.6
-sqlalchemy_migrate = 0.5.4
+sqlalchemy_migrate = 0.7.2
ssh = 1.7.14
SVGFig = 1.1.6
-Tempita = 0.1
+Tempita = 0.5.1
twill = 0.9
WebError = 0.8a
WebHelpers = 0.2
@@ -75,7 +75,6 @@
MySQL_python = _5.1.41_static
bx_python = _7b95ff194725
GeneTrack = _dev_48da9e998f0caf01c5be731e926f4b0481f658f0
-SQLAlchemy = _dev_r6498
pysam = _kanwei_b10f6e722e9a
; dependency source urls, necessary for scrambling. for an explanation, see
diff -r 03ebba1d1de6f14ccb00e08795ef2f1896135bae -r 58811a78af8a09b77405dc343a5b2795f4cc6e88 job_conf.xml.sample_advanced
--- a/job_conf.xml.sample_advanced
+++ b/job_conf.xml.sample_advanced
@@ -7,7 +7,7 @@
<plugin id="local" type="runner" load="galaxy.jobs.runners.local:LocalJobRunner"/><plugin id="pbs" type="runner" load="galaxy.jobs.runners.pbs:PBSJobRunner" workers="2"/><plugin id="drmaa" type="runner" load="galaxy.jobs.runners.drmaa:DRMAAJobRunner"/>
- <plugin id="lwr" type="runner" load="galaxy.jobs.runners.lwr.LwrJobRunner" /><!-- https://lwr.readthedocs.org -->
+ <plugin id="lwr" type="runner" load="galaxy.jobs.runners.lwr:LwrJobRunner" /><!-- https://lwr.readthedocs.org --><plugin id="cli" type="runner" load="galaxy.jobs.runners.cli:ShellJobRunner" /><plugin id="condor" type="runner" load="galaxy.jobs.runners.condor:CondorJobRunner" /></plugins>
@@ -40,7 +40,6 @@
</destination><destination id="dynamic" runner="dynamic"><!-- A destination that represents a method in the dynamic runner. -->
- <param id="type">python</param><param id="function">foo</param></destination><destination id="secure_lwr" runner="lwr">
diff -r 03ebba1d1de6f14ccb00e08795ef2f1896135bae -r 58811a78af8a09b77405dc343a5b2795f4cc6e88 lib/fpconst.py
--- a/lib/fpconst.py
+++ /dev/null
@@ -1,163 +0,0 @@
-"""Utilities for handling IEEE 754 floating point special values
-
-This python module implements constants and functions for working with
-IEEE754 double-precision special values. It provides constants for
-Not-a-Number (NaN), Positive Infinity (PosInf), and Negative Infinity
-(NegInf), as well as functions to test for these values.
-
-The code is implemented in pure python by taking advantage of the
-'struct' standard module. Care has been taken to generate proper
-results on both big-endian and little-endian machines. Some efficiency
-could be gained by translating the core routines into C.
-
-See <http://babbage.cs.qc.edu/courses/cs341/IEEE-754references.html>
-for reference material on the IEEE 754 floating point standard.
-
-Further information on this package is available at
-<http://www.analytics.washington.edu/statcomp/projects/rzope/fpconst/>.
-
-Author: Gregory R. Warnes <gregory_r_warnes(a)groton.pfizer.com>
-Date:: 2003-04-08
-Copyright: (c) 2003, Pfizer, Inc.
-"""
-
-__version__ = "0.7.0"
-ident = "$Id: fpconst.py,v 1.12 2004/05/22 04:38:17 warnes Exp $"
-
-import struct, operator
-
-# check endianess
-_big_endian = struct.pack('i',1)[0] != '\x01'
-
-# and define appropriate constants
-if(_big_endian):
- NaN = struct.unpack('d', '\x7F\xF8\x00\x00\x00\x00\x00\x00')[0]
- PosInf = struct.unpack('d', '\x7F\xF0\x00\x00\x00\x00\x00\x00')[0]
- NegInf = -PosInf
-else:
- NaN = struct.unpack('d', '\x00\x00\x00\x00\x00\x00\xf8\xff')[0]
- PosInf = struct.unpack('d', '\x00\x00\x00\x00\x00\x00\xf0\x7f')[0]
- NegInf = -PosInf
-
-def _double_as_bytes(dval):
- "Use struct.unpack to decode a double precision float into eight bytes"
- tmp = list(struct.unpack('8B',struct.pack('d', dval)))
- if not _big_endian:
- tmp.reverse()
- return tmp
-
-##
-## Functions to extract components of the IEEE 754 floating point format
-##
-
-def _sign(dval):
- "Extract the sign bit from a double-precision floating point value"
- bb = _double_as_bytes(dval)
- return bb[0] >> 7 & 0x01
-
-def _exponent(dval):
- """Extract the exponentent bits from a double-precision floating
- point value.
-
- Note that for normalized values, the exponent bits have an offset
- of 1023. As a consequence, the actual exponentent is obtained
- by subtracting 1023 from the value returned by this function
- """
- bb = _double_as_bytes(dval)
- return (bb[0] << 4 | bb[1] >> 4) & 0x7ff
-
-def _mantissa(dval):
- """Extract the _mantissa bits from a double-precision floating
- point value."""
-
- bb = _double_as_bytes(dval)
- mantissa = bb[1] & 0x0f << 48
- mantissa += bb[2] << 40
- mantissa += bb[3] << 32
- mantissa += bb[4]
- return mantissa
-
-def _zero_mantissa(dval):
- """Determine whether the mantissa bits of the given double are all
- zero."""
- bb = _double_as_bytes(dval)
- return ((bb[1] & 0x0f) | reduce(operator.or_, bb[2:])) == 0
-
-##
-## Functions to test for IEEE 754 special values
-##
-
-def isNaN(value):
- "Determine if the argument is a IEEE 754 NaN (Not a Number) value."
- return (_exponent(value)==0x7ff and not _zero_mantissa(value))
-
-def isInf(value):
- """Determine if the argument is an infinite IEEE 754 value (positive
- or negative inifinity)"""
- return (_exponent(value)==0x7ff and _zero_mantissa(value))
-
-def isFinite(value):
- """Determine if the argument is an finite IEEE 754 value (i.e., is
- not NaN, positive or negative inifinity)"""
- return (_exponent(value)!=0x7ff)
-
-def isPosInf(value):
- "Determine if the argument is a IEEE 754 positive infinity value"
- return (_sign(value)==0 and _exponent(value)==0x7ff and \
- _zero_mantissa(value))
-
-def isNegInf(value):
- "Determine if the argument is a IEEE 754 negative infinity value"
- return (_sign(value)==1 and _exponent(value)==0x7ff and \
- _zero_mantissa(value))
-
-##
-## Functions to test public functions.
-##
-
-def test_isNaN():
- assert( not isNaN(PosInf) )
- assert( not isNaN(NegInf) )
- assert( isNaN(NaN ) )
- assert( not isNaN( 1.0) )
- assert( not isNaN( -1.0) )
-
-def test_isInf():
- assert( isInf(PosInf) )
- assert( isInf(NegInf) )
- assert( not isInf(NaN ) )
- assert( not isInf( 1.0) )
- assert( not isInf( -1.0) )
-
-def test_isFinite():
- assert( not isFinite(PosInf) )
- assert( not isFinite(NegInf) )
- assert( not isFinite(NaN ) )
- assert( isFinite( 1.0) )
- assert( isFinite( -1.0) )
-
-def test_isPosInf():
- assert( isPosInf(PosInf) )
- assert( not isPosInf(NegInf) )
- assert( not isPosInf(NaN ) )
- assert( not isPosInf( 1.0) )
- assert( not isPosInf( -1.0) )
-
-def test_isNegInf():
- assert( not isNegInf(PosInf) )
- assert( isNegInf(NegInf) )
- assert( not isNegInf(NaN ) )
- assert( not isNegInf( 1.0) )
- assert( not isNegInf( -1.0) )
-
-# overall test
-def test():
- test_isNaN()
- test_isInf()
- test_isFinite()
- test_isPosInf()
- test_isNegInf()
-
-if __name__ == "__main__":
- test()
-
diff -r 03ebba1d1de6f14ccb00e08795ef2f1896135bae -r 58811a78af8a09b77405dc343a5b2795f4cc6e88 lib/galaxy/__init__.py
--- a/lib/galaxy/__init__.py
+++ b/lib/galaxy/__init__.py
@@ -95,10 +95,15 @@
pkg_resources.Distribution._insert_on = pkg_resources.Distribution.insert_on
pkg_resources.Distribution.insert_on = _insert_on
-# patch to add the NullHandler class to logging
-if sys.version_info[:2] < ( 2, 7 ):
- import logging
+# compat: BadZipFile introduced in Python 2.7
+import zipfile
+if not hasattr( zipfile, 'BadZipFile' ):
+ zipfile.BadZipFile = zipfile.error
+
+# compat: patch to add the NullHandler class to logging
+import logging
+if not hasattr( logging, 'NullHandler' ):
class NullHandler( logging.Handler ):
def emit( self, record ):
pass
- logging.NullHandler = NullHandler
+ logging.NullHandler = NullHandler
\ No newline at end of file
diff -r 03ebba1d1de6f14ccb00e08795ef2f1896135bae -r 58811a78af8a09b77405dc343a5b2795f4cc6e88 lib/galaxy/app.py
--- a/lib/galaxy/app.py
+++ b/lib/galaxy/app.py
@@ -15,6 +15,7 @@
from galaxy.tags.tag_handler import GalaxyTagHandler
from galaxy.visualization.genomes import Genomes
from galaxy.visualization.data_providers.registry import DataProviderRegistry
+from galaxy.visualization.registry import VisualizationsRegistry
from galaxy.tools.imp_exp import load_history_imp_exp_tools
from galaxy.tools.genome_index import load_genome_index_tools
from galaxy.sample_tracking import external_service_types
@@ -61,7 +62,8 @@
self.config.database_engine_options,
database_query_profiling_proxy = self.config.database_query_profiling_proxy,
object_store = self.object_store,
- trace_logger=self.trace_logger )
+ trace_logger=self.trace_logger,
+ use_pbkdf2=self.config.get_bool( 'use_pbkdf2', True ) )
# Manage installed tool shed repositories.
self.installed_repository_manager = tool_shed.galaxy_install.InstalledRepositoryManager( self )
# Create an empty datatypes registry.
@@ -120,6 +122,9 @@
load_history_imp_exp_tools( self.toolbox )
# Load genome indexer tool.
load_genome_index_tools( self.toolbox )
+ # visualizations registry: associates resources with visualizations, controls how to render
+ self.visualizations_registry = ( VisualizationsRegistry( self.config.root, self.config.visualizations_conf_path )
+ if self.config.visualizations_conf_path else None )
# Load security policy.
self.security_agent = self.model.security_agent
self.host_security_agent = galaxy.security.HostAgent( model=self.security_agent.model, permitted_actions=self.security_agent.permitted_actions )
diff -r 03ebba1d1de6f14ccb00e08795ef2f1896135bae -r 58811a78af8a09b77405dc343a5b2795f4cc6e88 lib/galaxy/config.py
--- a/lib/galaxy/config.py
+++ b/lib/galaxy/config.py
@@ -86,7 +86,6 @@
self.galaxy_data_manager_data_path = kwargs.get( 'galaxy_data_manager_data_path', self.tool_data_path )
self.tool_secret = kwargs.get( "tool_secret", "" )
self.id_secret = kwargs.get( "id_secret", "USING THE DEFAULT IS NOT SECURE!" )
- self.set_metadata_externally = string_as_bool( kwargs.get( "set_metadata_externally", "False" ) )
self.retry_metadata_internally = string_as_bool( kwargs.get( "retry_metadata_internally", "True" ) )
self.use_remote_user = string_as_bool( kwargs.get( "use_remote_user", "False" ) )
self.remote_user_maildomain = kwargs.get( "remote_user_maildomain", None )
@@ -155,6 +154,10 @@
self.ucsc_display_sites = kwargs.get( 'ucsc_display_sites', "main,test,archaea,ucla" ).lower().split(",")
self.gbrowse_display_sites = kwargs.get( 'gbrowse_display_sites', "modencode,sgd_yeast,tair,wormbase,wormbase_ws120,wormbase_ws140,wormbase_ws170,wormbase_ws180,wormbase_ws190,wormbase_ws200,wormbase_ws204,wormbase_ws210,wormbase_ws220,wormbase_ws225" ).lower().split(",")
self.brand = kwargs.get( 'brand', None )
+ # Configuration for the message box directly below the masthead.
+ self.message_box_visible = kwargs.get( 'message_box_visible', False )
+ self.message_box_content = kwargs.get( 'message_box_content', None )
+ self.message_box_class = kwargs.get( 'message_box_class', 'info' )
self.support_url = kwargs.get( 'support_url', 'http://wiki.g2.bx.psu.edu/Support' )
self.wiki_url = kwargs.get( 'wiki_url', 'http://g2.trac.bx.psu.edu/' )
self.blog_url = kwargs.get( 'blog_url', None )
@@ -166,6 +169,7 @@
self.enable_whoosh_library_search = string_as_bool( kwargs.get( 'enable_whoosh_library_search', False ) )
self.whoosh_index_dir = resolve_path( kwargs.get( "whoosh_index_dir", "database/whoosh_indexes" ), self.root )
self.ftp_upload_dir = kwargs.get( 'ftp_upload_dir', None )
+ self.ftp_upload_dir_identifier = kwargs.get( 'ftp_upload_dir_identifier', 'email' ) # attribute on user - email, username, id, etc...
self.ftp_upload_site = kwargs.get( 'ftp_upload_site', None )
self.allow_library_path_paste = kwargs.get( 'allow_library_path_paste', False )
self.disable_library_comptypes = kwargs.get( 'disable_library_comptypes', '' ).lower().split( ',' )
@@ -271,6 +275,8 @@
self.fluent_log = string_as_bool( kwargs.get( 'fluent_log', False ) )
self.fluent_host = kwargs.get( 'fluent_host', 'localhost' )
self.fluent_port = int( kwargs.get( 'fluent_port', 24224 ) )
+ # visualizations registry config path
+ self.visualizations_conf_path = kwargs.get( 'visualizations_conf_path', None )
@property
def sentry_dsn_public( self ):
diff -r 03ebba1d1de6f14ccb00e08795ef2f1896135bae -r 58811a78af8a09b77405dc343a5b2795f4cc6e88 lib/galaxy/datatypes/assembly.py
--- a/lib/galaxy/datatypes/assembly.py
+++ b/lib/galaxy/datatypes/assembly.py
@@ -5,14 +5,14 @@
"""
import data
+import logging
+import os
+import re
+import sys
from galaxy.datatypes import sequence
-import logging, os, sys, time, tempfile, shutil, string, glob, re
-import galaxy.model
-from galaxy.datatypes import metadata
+from galaxy.datatypes.images import Html
from galaxy.datatypes.metadata import MetadataElement
-from galaxy import util
-from galaxy.datatypes.images import Html
-from sniff import *
+
log = logging.getLogger(__name__)
@@ -174,7 +174,6 @@
gen_msg = ''
try:
efp = dataset.extra_files_path
- flist = os.listdir(efp)
log_path = os.path.join(efp,'Log')
f = open(log_path,'r')
log_content = f.read(1000)
@@ -223,5 +222,5 @@
self.regenerate_primary_file(dataset)
if __name__ == '__main__':
- import doctest, sys
+ import doctest
doctest.testmod(sys.modules[__name__])
diff -r 03ebba1d1de6f14ccb00e08795ef2f1896135bae -r 58811a78af8a09b77405dc343a5b2795f4cc6e88 lib/galaxy/datatypes/binary.py
--- a/lib/galaxy/datatypes/binary.py
+++ b/lib/galaxy/datatypes/binary.py
@@ -2,18 +2,26 @@
Binary classes
"""
-import data, logging, binascii
+import binascii
+import data
+import gzip
+import logging
+import os
+import shutil
+import struct
+import subprocess
+import tempfile
+import zipfile
+
+from urllib import urlencode, quote_plus
+from galaxy import eggs
+eggs.require( "bx-python" )
+
+from bx.seq.twobit import TWOBIT_MAGIC_NUMBER, TWOBIT_MAGIC_NUMBER_SWAP, TWOBIT_MAGIC_SIZE
+
from galaxy.datatypes.metadata import MetadataElement
from galaxy.datatypes import metadata
from galaxy.datatypes.sniff import *
-from galaxy import eggs
-import pkg_resources
-pkg_resources.require( "bx-python" )
-from bx.seq.twobit import TWOBIT_MAGIC_NUMBER, TWOBIT_MAGIC_NUMBER_SWAP, TWOBIT_MAGIC_SIZE
-from urllib import urlencode, quote_plus
-import zipfile, gzip
-import os, subprocess, tempfile
-import struct
log = logging.getLogger(__name__)
@@ -85,9 +93,18 @@
Binary.register_unsniffable_binary_ext("ab1")
+class GenericAsn1Binary( Binary ):
+ """Class for generic ASN.1 binary format"""
+ file_ext = "asn1-binary"
+
+Binary.register_unsniffable_binary_ext("asn1-binary")
+
class Bam( Binary ):
"""Class describing a BAM binary file"""
file_ext = "bam"
+ track_type = "ReadTrack"
+ data_sources = { "data": "bai", "index": "bigwig" }
+
MetadataElement( name="bam_index", desc="BAM Index File", param=metadata.FileParameter, file_ext="bai", readonly=True, no_value=None, visible=False, optional=True )
def _get_samtools_version( self ):
@@ -238,9 +255,7 @@
return dataset.peek
except:
return "Binary bam alignments file (%s)" % ( data.nice_size( dataset.get_size() ) )
- def get_track_type( self ):
- return "ReadTrack", { "data": "bai", "index": [ "bigwig", "summary_tree" ] }
-
+
Binary.register_sniffable_binary_format("bam", "bam", Bam)
class H5( Binary ):
@@ -318,6 +333,9 @@
The supplemental info in the paper has the binary details:
http://bioinformatics.oxfordjournals.org/cgi/content/abstract/btq351v1
"""
+ track_type = "LineTrack"
+ data_sources = { "data_standalone": "bigwig" }
+
def __init__( self, **kwd ):
Binary.__init__( self, **kwd )
self._magic = 0x888FFC26
@@ -342,19 +360,18 @@
return dataset.peek
except:
return "Binary UCSC %s file (%s)" % ( self._name, data.nice_size( dataset.get_size() ) )
- def get_track_type( self ):
- return "LineTrack", {"data_standalone": "bigwig"}
-
+
Binary.register_sniffable_binary_format("bigwig", "bigwig", BigWig)
class BigBed(BigWig):
"""BigBed support from UCSC."""
+
+ data_sources = { "data_standalone": "bigbed" }
+
def __init__( self, **kwd ):
Binary.__init__( self, **kwd )
self._magic = 0x8789F2EB
self._name = "BigBed"
- def get_track_type( self ):
- return "LineTrack", {"data_standalone": "bigbed"}
Binary.register_sniffable_binary_format("bigbed", "bigbed", BigBed)
diff -r 03ebba1d1de6f14ccb00e08795ef2f1896135bae -r 58811a78af8a09b77405dc343a5b2795f4cc6e88 lib/galaxy/datatypes/chrominfo.py
--- a/lib/galaxy/datatypes/chrominfo.py
+++ b/lib/galaxy/datatypes/chrominfo.py
@@ -1,7 +1,3 @@
-import data
-from galaxy import util
-from galaxy.datatypes.sniff import *
-from galaxy.web import url_for
from tabular import Tabular
from galaxy.datatypes import metadata
from galaxy.datatypes.metadata import MetadataElement
diff -r 03ebba1d1de6f14ccb00e08795ef2f1896135bae -r 58811a78af8a09b77405dc343a5b2795f4cc6e88 lib/galaxy/datatypes/converters/bam_to_bigwig_converter.xml
--- a/lib/galaxy/datatypes/converters/bam_to_bigwig_converter.xml
+++ b/lib/galaxy/datatypes/converters/bam_to_bigwig_converter.xml
@@ -1,7 +1,14 @@
<tool id="CONVERTER_bam_to_bigwig_0" name="Convert BAM to BigWig" version="1.0.0" hidden="true"><!-- <description>__NOT_USED_CURRENTLY_FOR_CONVERTERS__</description> --><command>
- bedtools genomecov -bg -split -ibam $input -g $chromInfo | wigToBigWig stdin $chromInfo $output
+ bedtools genomecov -bg -split -ibam $input -g $chromInfo
+
+ ## Streaming the bedgraph file to wigToBigWig is fast but very memory intensive; hence, this
+ ## should only be used on systems with large RAM.
+ ## | wigToBigWig stdin $chromInfo $output
+
+ ## This can be used anywhere.
+ > temp.bg ; bedGraphToBigWig temp.bg $chromInfo $output
</command><inputs><param format="bam" name="input" type="data" label="Choose BAM file"/>
diff -r 03ebba1d1de6f14ccb00e08795ef2f1896135bae -r 58811a78af8a09b77405dc343a5b2795f4cc6e88 lib/galaxy/datatypes/converters/bam_to_summary_tree_converter.xml
--- a/lib/galaxy/datatypes/converters/bam_to_summary_tree_converter.xml
+++ /dev/null
@@ -1,14 +0,0 @@
-<tool id="CONVERTER_bam_to_summary_tree_0" name="Convert BAM to Summary Tree" version="1.0.0" hidden="true">
- <!-- <description>__NOT_USED_CURRENTLY_FOR_CONVERTERS__</description> -->
- <command interpreter="python">
- sam_or_bam_to_summary_tree_converter.py --bam $input1 $input1.metadata.bam_index $output1
- </command>
- <inputs>
- <param format="bam" name="input1" type="data" label="Choose BAM file"/>
- </inputs>
- <outputs>
- <data format="summary_tree" name="output1"/>
- </outputs>
- <help>
- </help>
-</tool>
diff -r 03ebba1d1de6f14ccb00e08795ef2f1896135bae -r 58811a78af8a09b77405dc343a5b2795f4cc6e88 lib/galaxy/datatypes/converters/bed_gff_or_vcf_to_bigwig_converter.xml
--- /dev/null
+++ b/lib/galaxy/datatypes/converters/bed_gff_or_vcf_to_bigwig_converter.xml
@@ -0,0 +1,25 @@
+<tool id="CONVERTER_bed_gff_or_vcf_to_bigwig_0" name="Convert BED, GFF, or VCF to BigWig" version="1.0.0" hidden="true">
+ <!-- <description>__NOT_USED_CURRENTLY_FOR_CONVERTERS__</description> -->
+ <command>
+ ## Remove comments and sort by chromosome.
+ grep -v '^#' $input | sort -k1,1 |
+
+ ## Generate coverage bedgraph.
+ bedtools genomecov -bg -split -i stdin -g $chromInfo
+
+ ## Streaming the bedgraph file to wigToBigWig is fast but very memory intensive; hence, this
+ ## should only be used on systems with large RAM.
+ ## | wigToBigWig stdin $chromInfo $output
+
+ ## This can be used anywhere.
+ > temp.bg ; bedGraphToBigWig temp.bg $chromInfo $output
+ </command>
+ <inputs>
+ <param format="bed,gff,vcf" name="input" type="data" label="Choose input file"/>
+ </inputs>
+ <outputs>
+ <data format="bigwig" name="output"/>
+ </outputs>
+ <help>
+ </help>
+</tool>
diff -r 03ebba1d1de6f14ccb00e08795ef2f1896135bae -r 58811a78af8a09b77405dc343a5b2795f4cc6e88 lib/galaxy/datatypes/converters/bed_to_summary_tree_converter.xml
--- a/lib/galaxy/datatypes/converters/bed_to_summary_tree_converter.xml
+++ /dev/null
@@ -1,14 +0,0 @@
-<tool id="CONVERTER_bed_to_summary_tree_0" name="Convert BED to Summary Tree" version="1.0.0" hidden="true">
-<!-- <description>__NOT_USED_CURRENTLY_FOR_CONVERTERS__</description> -->
- <command interpreter="python">interval_to_summary_tree_converter.py $input1 $output1</command>
- <inputs>
- <page>
- <param format="bed" name="input1" type="data" label="Choose BED file"/>
- </page>
- </inputs>
- <outputs>
- <data format="summary_tree" name="output1"/>
- </outputs>
- <help>
- </help>
-</tool>
diff -r 03ebba1d1de6f14ccb00e08795ef2f1896135bae -r 58811a78af8a09b77405dc343a5b2795f4cc6e88 lib/galaxy/datatypes/converters/encodepeak_to_summary_tree_converter.xml
--- a/lib/galaxy/datatypes/converters/encodepeak_to_summary_tree_converter.xml
+++ /dev/null
@@ -1,20 +0,0 @@
-<tool id="CONVERTER_encodepeak_to_summary_tree_0" name="Convert ENCODEPeak to Summary Tree" version="1.0.0" hidden="true">
-<!-- <description>__NOT_USED_CURRENTLY_FOR_CONVERTERS__</description> -->
- <command interpreter="python">interval_to_summary_tree_converter.py
- -c ${input1.metadata.chromCol}
- -s ${input1.metadata.startCol}
- -e ${input1.metadata.endCol}
- $input1 $output1
- </command>
-
- <inputs>
- <page>
- <param format="ENCODEPeak" name="input1" type="data" label="Choose ENCODEPeak file"/>
- </page>
- </inputs>
- <outputs>
- <data format="summary_tree" name="output1"/>
- </outputs>
- <help>
- </help>
-</tool>
diff -r 03ebba1d1de6f14ccb00e08795ef2f1896135bae -r 58811a78af8a09b77405dc343a5b2795f4cc6e88 lib/galaxy/datatypes/converters/gff_to_summary_tree_converter.xml
--- a/lib/galaxy/datatypes/converters/gff_to_summary_tree_converter.xml
+++ /dev/null
@@ -1,14 +0,0 @@
-<tool id="CONVERTER_gff_to_summary_tree_0" name="Convert GFF to Summary Tree" version="1.0.0" hidden="true">
-<!-- <description>__NOT_USED_CURRENTLY_FOR_CONVERTERS__</description> -->
- <command interpreter="python">interval_to_summary_tree_converter.py $input1 $output1 --gff</command>
- <inputs>
- <page>
- <param format="gff" name="input1" type="data" label="Choose GFF file"/>
- </page>
- </inputs>
- <outputs>
- <data format="summary_tree" name="output1"/>
- </outputs>
- <help>
- </help>
-</tool>
diff -r 03ebba1d1de6f14ccb00e08795ef2f1896135bae -r 58811a78af8a09b77405dc343a5b2795f4cc6e88 lib/galaxy/datatypes/converters/interval_to_bigwig_converter.xml
--- /dev/null
+++ b/lib/galaxy/datatypes/converters/interval_to_bigwig_converter.xml
@@ -0,0 +1,33 @@
+<tool id="CONVERTER_interval_to_bigwig_0" name="Convert Genomic Intervals To Coverage">
+ <!-- <description>__NOT_USED_CURRENTLY_FOR_CONVERTERS__</description> -->
+ <!-- Used on the metadata edit page. -->
+ <command>
+
+ ## Remove comments and sort by chromosome.
+ grep -v '^#' $input1 | sort -k${input1.metadata.chromCol},${input1.metadata.chromCol} |
+
+ ## Create simple BED by cutting chrom, start, and end columns.
+ awk -v OFS=' ' '{print $${input1.metadata.chromCol},$${input1.metadata.startCol},$${input1.metadata.endCol} }' |
+
+ ## Generate coverage bedgraph.
+ bedtools genomecov -bg -split -i stdin -g $chromInfo
+
+ ## Streaming the bedgraph file to wigToBigWig is fast but very memory intensive; hence, this
+ ## should only be used on systems with large RAM.
+ ## | wigToBigWig stdin $chromInfo $output
+
+ ## This can be used anywhere.
+ > temp.bg ; bedGraphToBigWig temp.bg $chromInfo $output
+
+ </command>
+ <inputs>
+ <page>
+ <param format="interval" name="input1" type="data" label="Choose intervals"/>
+ </page>
+ </inputs>
+ <outputs>
+ <data format="bigwig" name="output"/>
+ </outputs>
+ <help>
+ </help>
+</tool>
diff -r 03ebba1d1de6f14ccb00e08795ef2f1896135bae -r 58811a78af8a09b77405dc343a5b2795f4cc6e88 lib/galaxy/datatypes/converters/interval_to_summary_tree_converter.py
--- a/lib/galaxy/datatypes/converters/interval_to_summary_tree_converter.py
+++ /dev/null
@@ -1,63 +0,0 @@
-#!/usr/bin/env python
-
-"""
-Convert from interval file to summary tree file. Default input file format is BED (0-based, half-open intervals).
-
-usage: %prog <options> in_file out_file
- -c, --chr-col: chromosome column, default=1
- -s, --start-col: start column, default=2
- -e, --end-col: end column, default=3
- -t, --strand-col: strand column, default=6
- -G, --gff: input is GFF format, meaning start and end coordinates are 1-based, closed interval
-"""
-from __future__ import division
-
-import sys, fileinput, optparse
-from galaxy import eggs
-import pkg_resources; pkg_resources.require( "bx-python" )
-from galaxy.visualization.tracks.summary import *
-from bx.intervals.io import *
-from galaxy.datatypes.util.gff_util import *
-
-def main():
- # Read options, args.
- parser = optparse.OptionParser()
- parser.add_option( '-c', '--chr-col', type='int', dest='chrom_col', default=1 )
- parser.add_option( '-s', '--start-col', type='int', dest='start_col', default=2 )
- parser.add_option( '-e', '--end-col', type='int', dest='end_col', default=3 )
- parser.add_option( '-t', '--strand-col', type='int', dest='strand_col', default=6 )
- parser.add_option( '-G', '--gff', dest="gff_format", action="store_true" )
- (options, args) = parser.parse_args()
- input_fname, output_fname = args
-
- # Convert column indices to 0-based.
- options.chrom_col -= 1
- options.start_col -= 1
- options.end_col -= 1
- options.strand_col -= 1
-
- # Do conversion.
- if options.gff_format:
- reader_wrapper_class = GFFReaderWrapper
- chr_col, start_col, end_col, strand_col = ( 0, 3, 4, 6 )
- else:
- reader_wrapper_class = NiceReaderWrapper
- chr_col, start_col, end_col, strand_col = ( options.chrom_col, options.start_col, options.end_col, options.strand_col )
- reader_wrapper = reader_wrapper_class( fileinput.FileInput( input_fname ),
- chrom_col=chr_col,
- start_col=start_col,
- end_col=end_col,
- strand_col=strand_col,
- fix_strand=True )
- st = SummaryTree()
- for feature in list( reader_wrapper ):
- if isinstance( feature, GenomicInterval ):
- # Tree expects BED coordinates.
- if type( feature ) is GFFFeature:
- convert_gff_coords_to_bed( feature )
- st.insert_range( feature.chrom, long( feature.start ), long( feature.end ) )
-
- st.write( output_fname )
-
-if __name__ == "__main__":
- main()
diff -r 03ebba1d1de6f14ccb00e08795ef2f1896135bae -r 58811a78af8a09b77405dc343a5b2795f4cc6e88 lib/galaxy/datatypes/converters/interval_to_summary_tree_converter.xml
--- a/lib/galaxy/datatypes/converters/interval_to_summary_tree_converter.xml
+++ /dev/null
@@ -1,20 +0,0 @@
-<tool id="CONVERTER_interval_to_summary_tree_0" name="Convert Interval to Summary Tree" version="1.0.0" hidden="true">
-<!-- <description>__NOT_USED_CURRENTLY_FOR_CONVERTERS__</description> -->
- <command interpreter="python">interval_to_summary_tree_converter.py
- -c ${input1.metadata.chromCol}
- -s ${input1.metadata.startCol}
- -e ${input1.metadata.endCol}
- $input1 $output1
- </command>
-
- <inputs>
- <page>
- <param format="interval" name="input1" type="data" label="Choose Interval file"/>
- </page>
- </inputs>
- <outputs>
- <data format="summary_tree" name="output1"/>
- </outputs>
- <help>
- </help>
-</tool>
diff -r 03ebba1d1de6f14ccb00e08795ef2f1896135bae -r 58811a78af8a09b77405dc343a5b2795f4cc6e88 lib/galaxy/datatypes/converters/pileup_to_interval_index_converter.py
--- /dev/null
+++ b/lib/galaxy/datatypes/converters/pileup_to_interval_index_converter.py
@@ -0,0 +1,39 @@
+#!/usr/bin/env python
+
+"""
+Convert from pileup file to interval index file.
+
+usage: %prog <options> in_file out_file
+"""
+
+from __future__ import division
+
+import sys, fileinput, optparse
+from galaxy import eggs
+import pkg_resources; pkg_resources.require( "bx-python" )
+from galaxy.visualization.tracks.summary import *
+from galaxy.datatypes.util.gff_util import convert_gff_coords_to_bed
+from bx.interval_index_file import Indexes
+
+def main():
+
+ # Read options, args.
+ parser = optparse.OptionParser()
+ (options, args) = parser.parse_args()
+ input_fname, output_fname = args
+
+ # Do conversion.
+ index = Indexes()
+ offset = 0
+ for line in open( input_fname, "r" ):
+ chrom, start = line.split()[ 0:2 ]
+ # Pileup format is 1-based.
+ start = int( start ) - 1
+ index.add( chrom, start, start + 1, offset )
+ offset += len( line )
+
+ index.write( open(output_fname, "w") )
+
+if __name__ == "__main__":
+ main()
+
\ No newline at end of file
diff -r 03ebba1d1de6f14ccb00e08795ef2f1896135bae -r 58811a78af8a09b77405dc343a5b2795f4cc6e88 lib/galaxy/datatypes/converters/pileup_to_interval_index_converter.xml
--- /dev/null
+++ b/lib/galaxy/datatypes/converters/pileup_to_interval_index_converter.xml
@@ -0,0 +1,15 @@
+<tool id="CONVERTER_pileup_to_interval_index_0" name="Convert Pileup to Interval Index" version="1.0.0" hidden="true">
+<!-- <description>__NOT_USED_CURRENTLY_FOR_CONVERTERS__</description> -->
+ <command interpreter="python">pileup_to_interval_index_converter.py $input $output
+ </command>
+ <inputs>
+ <page>
+ <param format="pileup" name="input" type="data" label="Choose Pileup file"/>
+ </page>
+ </inputs>
+ <outputs>
+ <data format="interval_index" name="output"/>
+ </outputs>
+ <help>
+ </help>
+</tool>
diff -r 03ebba1d1de6f14ccb00e08795ef2f1896135bae -r 58811a78af8a09b77405dc343a5b2795f4cc6e88 lib/galaxy/datatypes/converters/sam_or_bam_to_summary_tree_converter.py
--- a/lib/galaxy/datatypes/converters/sam_or_bam_to_summary_tree_converter.py
+++ /dev/null
@@ -1,42 +0,0 @@
-#!/usr/bin/env python
-
-from __future__ import division
-
-import sys, os, optparse
-sys.stderr = open(os.devnull, 'w') # suppress stderr as cython produces warning on some systems:
- # csamtools.so:6: RuntimeWarning: __builtin__.file size changed
-
-from galaxy import eggs
-import pkg_resources
-
-if sys.version_info[:2] == (2, 4):
- pkg_resources.require( "ctypes" )
-pkg_resources.require( "pysam" )
-
-from pysam import csamtools
-from galaxy.visualization.tracks.summary import *
-
-def main():
- parser = optparse.OptionParser()
- parser.add_option( '-S', '--sam', action="store_true", dest="is_sam" )
- parser.add_option( '-B', '--bam', action="store_true", dest="is_bam" )
- options, args = parser.parse_args()
-
- if options.is_bam:
- input_fname = args[0]
- index_fname = args[1]
- out_fname = args[2]
- samfile = csamtools.Samfile( filename=input_fname, mode='rb', index_filename=index_fname )
- elif options.is_sam:
- input_fname = args[0]
- out_fname = args[1]
- samfile = csamtools.Samfile( filename=input_fname, mode='r' )
-
- st = SummaryTree()
- for read in samfile.fetch():
- st.insert_range( samfile.getrname( read.rname ), read.pos, read.pos + read.rlen )
-
- st.write(out_fname)
-
-if __name__ == "__main__":
- main()
diff -r 03ebba1d1de6f14ccb00e08795ef2f1896135bae -r 58811a78af8a09b77405dc343a5b2795f4cc6e88 lib/galaxy/datatypes/converters/sam_to_bigwig_converter.xml
--- /dev/null
+++ b/lib/galaxy/datatypes/converters/sam_to_bigwig_converter.xml
@@ -0,0 +1,20 @@
+<tool id="CONVERTER_sam_to_bigwig_0" name="Convert SAM to BigWig" version="1.0.0" hidden="true">
+ <command>
+ samtools view -bh $input | bedtools genomecov -bg -split -ibam stdin -g $chromInfo
+
+ ## Streaming the bedgraph file to wigToBigWig is fast but very memory intensive; hence, this
+ ## should only be used on systems with large RAM.
+ ## | wigToBigWig stdin $chromInfo $output
+
+ ## This can be used anywhere.
+ > temp.bg ; bedGraphToBigWig temp.bg $chromInfo $output
+ </command>
+ <inputs>
+ <param format="bam" name="input" type="data" label="Choose BAM file"/>
+ </inputs>
+ <outputs>
+ <data format="bigwig" name="output"/>
+ </outputs>
+ <help>
+ </help>
+</tool>
diff -r 03ebba1d1de6f14ccb00e08795ef2f1896135bae -r 58811a78af8a09b77405dc343a5b2795f4cc6e88 lib/galaxy/datatypes/converters/sam_to_summary_tree_converter.xml
--- a/lib/galaxy/datatypes/converters/sam_to_summary_tree_converter.xml
+++ /dev/null
@@ -1,14 +0,0 @@
-<tool id="CONVERTER_sam_to_summary_tree_0" name="Convert SAM to Summary Tree" version="1.0.0" hidden="true">
-<!-- <description>__NOT_USED_CURRENTLY_FOR_CONVERTERS__</description> -->
- <command interpreter="python">sam_or_bam_to_summary_tree_converter.py --sam $input1 $output1</command>
- <inputs>
- <page>
- <param format="sam" name="input1" type="data" label="Choose sam file"/>
- </page>
- </inputs>
- <outputs>
- <data format="summary_tree" name="output1"/>
- </outputs>
- <help>
- </help>
-</tool>
diff -r 03ebba1d1de6f14ccb00e08795ef2f1896135bae -r 58811a78af8a09b77405dc343a5b2795f4cc6e88 lib/galaxy/datatypes/converters/vcf_to_summary_tree_converter.py
--- a/lib/galaxy/datatypes/converters/vcf_to_summary_tree_converter.py
+++ /dev/null
@@ -1,30 +0,0 @@
-#!/usr/bin/env python
-
-"""
-Convert from VCF file to summary tree file.
-
-usage: %prog in_file out_file
-"""
-from __future__ import division
-
-import optparse
-import galaxy_utils.sequence.vcf
-from galaxy.visualization.tracks.summary import SummaryTree
-
-def main():
- # Read options, args.
- parser = optparse.OptionParser()
- (options, args) = parser.parse_args()
- in_file, out_file = args
-
- # Do conversion.
- st = SummaryTree()
- for line in list( galaxy_utils.sequence.vcf.Reader( open( in_file ) ) ):
- # VCF format provides a chrom and 1-based position for each variant.
- # SummaryTree expects 0-based coordinates.
- st.insert_range( line.chrom, long( line.pos-1 ), long( line.pos ) )
-
- st.write(out_file)
-
-if __name__ == "__main__":
- main()
\ No newline at end of file
diff -r 03ebba1d1de6f14ccb00e08795ef2f1896135bae -r 58811a78af8a09b77405dc343a5b2795f4cc6e88 lib/galaxy/datatypes/converters/vcf_to_summary_tree_converter.xml
--- a/lib/galaxy/datatypes/converters/vcf_to_summary_tree_converter.xml
+++ /dev/null
@@ -1,14 +0,0 @@
-<tool id="CONVERTER_vcf_to_summary_tree_0" name="Convert VCF to Summary Tree" version="1.0.0" hidden="true">
- <!-- <description>__NOT_USED_CURRENTLY_FOR_CONVERTERS__</description> -->
- <command interpreter="python">vcf_to_summary_tree_converter.py $input1 $output1</command>
- <inputs>
- <page>
- <param format="vcf" name="input1" type="data" label="Choose VCF file"/>
- </page>
- </inputs>
- <outputs>
- <data format="summary_tree" name="output1"/>
- </outputs>
- <help>
- </help>
-</tool>
diff -r 03ebba1d1de6f14ccb00e08795ef2f1896135bae -r 58811a78af8a09b77405dc343a5b2795f4cc6e88 lib/galaxy/datatypes/coverage.py
--- a/lib/galaxy/datatypes/coverage.py
+++ b/lib/galaxy/datatypes/coverage.py
@@ -2,21 +2,14 @@
Coverage datatypes
"""
-import pkg_resources
-pkg_resources.require( "bx-python" )
-import logging, os, sys, time, tempfile, shutil
-import data
-from galaxy import util
-from galaxy.datatypes.sniff import *
-from galaxy.web import url_for
-from cgi import escape
-import urllib
-from bx.intervals.io import *
+import logging
+import math
+
+from galaxy import eggs
from galaxy.datatypes import metadata
from galaxy.datatypes.metadata import MetadataElement
from galaxy.datatypes.tabular import Tabular
-import math
log = logging.getLogger(__name__)
@@ -34,7 +27,7 @@
Assumes we have a numpy file.
"""
# Maybe if we import here people will still be able to use Galaxy when numpy kills it
- pkg_resources.require("numpy>=1.2.1")
+ eggs.require("numpy>=1.2.1")
#from numpy.lib import format
import numpy
diff -r 03ebba1d1de6f14ccb00e08795ef2f1896135bae -r 58811a78af8a09b77405dc343a5b2795f4cc6e88 lib/galaxy/datatypes/data.py
--- a/lib/galaxy/datatypes/data.py
+++ b/lib/galaxy/datatypes/data.py
@@ -2,6 +2,7 @@
import metadata
import mimetypes
import os
+import shutil
import sys
import tempfile
import zipfile
@@ -17,12 +18,6 @@
eggs.require( "Paste" )
import paste
-
-if sys.version_info[:2] < ( 2, 6 ):
- zipfile.BadZipFile = zipfile.error
-if sys.version_info[:2] < ( 2, 5 ):
- zipfile.LargeZipFile = zipfile.error
-
log = logging.getLogger(__name__)
tmpd = tempfile.mkdtemp()
@@ -103,6 +98,12 @@
#A per datatype setting (inherited): max file size (in bytes) for setting optional metadata
_max_optional_metadata_filesize = None
+ # Trackster track type.
+ track_type = None
+
+ # Data sources.
+ data_sources = {}
+
def __init__(self, **kwd):
"""Initialize the datatype"""
object.__init__(self, **kwd)
@@ -545,21 +546,21 @@
return False
-
def merge( split_files, output_file):
"""
- TODO: Do we need to merge gzip files using gzjoin? cat seems to work,
- but might be brittle. Need to revisit this.
+ Merge files with copy.copyfileobj() will not hit the
+ max argument limitation of cat. gz and bz2 files are also working.
"""
if not split_files:
raise ValueError('Asked to merge zero files as %s' % output_file)
elif len(split_files) == 1:
- cmd = 'mv -f %s %s' % ( split_files[0], output_file )
+ shutil.copyfileobj(open(split_files[0], 'rb'), open(output_file, 'wb'))
else:
- cmd = 'cat %s > %s' % ( ' '.join(split_files), output_file )
- result = os.system(cmd)
- if result != 0:
- raise Exception('Result %s from %s' % (result, cmd))
+ fdst = open(output_file, 'wb')
+ for fsrc in split_files:
+ shutil.copyfileobj(open(fsrc, 'rb'), fdst)
+ fdst.close()
+
merge = staticmethod(merge)
def get_visualizations( self, dataset ):
@@ -567,7 +568,7 @@
Returns a list of visualizations for datatype.
"""
- if hasattr( self, 'get_track_type' ):
+ if self.track_type:
return [ 'trackster', 'circster' ]
return []
@@ -740,6 +741,10 @@
f.close()
split = classmethod(split)
+class GenericAsn1( Text ):
+ """Class for generic ASN.1 text format"""
+ file_ext = 'asn1'
+
class LineCount( Text ):
"""
Dataset contains a single line with a single integer that denotes the
diff -r 03ebba1d1de6f14ccb00e08795ef2f1896135bae -r 58811a78af8a09b77405dc343a5b2795f4cc6e88 lib/galaxy/datatypes/display_applications/application.py
--- a/lib/galaxy/datatypes/display_applications/application.py
+++ b/lib/galaxy/datatypes/display_applications/application.py
@@ -12,7 +12,7 @@
log = logging.getLogger( __name__ )
#Any basic functions that we want to provide as a basic part of parameter dict should be added to this dict
-BASE_PARAMS = { 'qp': quote_plus, 'url_for':url_for } #url_for has route memory...
+BASE_PARAMS = { 'qp': quote_plus, 'url_for':url_for }
class DisplayApplicationLink( object ):
@classmethod
@@ -40,7 +40,7 @@
self.name = None
def get_display_url( self, data, trans ):
dataset_hash, user_hash = encode_dataset_user( trans, data, None )
- return url_for( controller='/dataset',
+ return url_for( controller='dataset',
action="display_application",
dataset_id=dataset_hash,
user_id=user_hash,
This diff is so big that we needed to truncate the remainder.
https://bitbucket.org/galaxy/galaxy-central/commits/776ac2651214/
Changeset: 776ac2651214
Branch: stable
User: natefoo
Date: 2013-06-03 21:45:31
Summary: Added tag release_2013.06.03 for changeset 58811a78af8a
Affected #: 1 file
diff -r 58811a78af8a09b77405dc343a5b2795f4cc6e88 -r 776ac26512144a65593ab999dafa2217bd2c4153 .hgtags
--- a/.hgtags
+++ b/.hgtags
@@ -2,3 +2,4 @@
1c717491139269651bb59687563da9410b84c65d release_2013.02.08
75f09617abaadbc8cc732bb8ee519decaeb56ea7 release_2013.04.01
2cc8d10988e03257dc7b97f8bb332c7df745d1dd security_2013.04.08
+58811a78af8a09b77405dc343a5b2795f4cc6e88 release_2013.06.03
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
2 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/a243d2dbca9d/
Changeset: a243d2dbca9d
Branch: next-stable
User: greg
Date: 2013-06-03 21:40:37
Summary: Fix for rendering xml elements that were not loaded using normal parsing methods.
Affected #: 2 files
diff -r 09d45fa9a84c1a91e7b14b94186bd8128883ddc4 -r a243d2dbca9d88724d03848a4136d9ae88d0b174 lib/tool_shed/util/shed_util_common.py
--- a/lib/tool_shed/util/shed_util_common.py
+++ b/lib/tool_shed/util/shed_util_common.py
@@ -193,7 +193,7 @@
os.write( fd, '<?xml version="1.0"?>\n' )
os.write( fd, '<toolbox tool_path="%s">\n' % str( tool_path ) )
for elem in config_elems:
- os.write( fd, '%s' % xml_util.xml_to_string( elem ) )
+ os.write( fd, '%s' % xml_util.xml_to_string( elem, use_indent=True ) )
os.write( fd, '</toolbox>\n' )
os.close( fd )
shutil.move( filename, os.path.abspath( config_filename ) )
diff -r 09d45fa9a84c1a91e7b14b94186bd8128883ddc4 -r a243d2dbca9d88724d03848a4136d9ae88d0b174 lib/tool_shed/util/xml_util.py
--- a/lib/tool_shed/util/xml_util.py
+++ b/lib/tool_shed/util/xml_util.py
@@ -41,6 +41,28 @@
fh.close()
return tmp_filename
+def indent( elem, level=0 ):
+ """
+ Prints an XML tree with each node indented according to its depth. This method is used to print the shed tool config (e.g., shed_tool_conf.xml
+ from the in-memory list of config_elems because each config_elem in the list may be a hierarchical structure that was not created using the
+ parse_xml() method below, and so will not be properly written with xml.etree.ElementTree.tostring() without manually indenting the tree first.
+ """
+ i = "\n" + level * " "
+ if len( elem ):
+ if not elem.text or not elem.text.strip():
+ elem.text = i + " "
+ if not elem.tail or not elem.tail.strip():
+ elem.tail = i
+ for child in elem:
+ indent( child, level+1 )
+ if not child.tail or not child.tail.strip():
+ child.tail = i
+ if not elem.tail or not elem.tail.strip():
+ elem.tail = i
+ else:
+ if level and ( not elem.tail or not elem.tail.strip() ):
+ elem.tail = i
+
def parse_xml( file_name ):
"""Returns a parsed xml tree with comments intact."""
error_message = ''
@@ -64,9 +86,15 @@
fobj.close()
return tree, error_message
-def xml_to_string( elem, encoding='utf-8' ):
- if using_python_27:
- xml_str = '%s\n' % xml.etree.ElementTree.tostring( elem, encoding=encoding, method="xml" )
+def xml_to_string( elem, encoding='utf-8', use_indent=False ):
+ if elem:
+ if use_indent:
+ # We were called from suc.config_elems_to_xml_file(), so set the level to 1 since level 0 is the <toolbox> tag set.
+ indent( elem, level=1 )
+ if using_python_27:
+ xml_str = '%s\n' % xml.etree.ElementTree.tostring( elem, encoding=encoding, method="xml" )
+ else:
+ xml_str = '%s\n' % xml.etree.ElementTree.tostring( elem, encoding=encoding )
else:
- xml_str = '%s\n' % xml.etree.ElementTree.tostring( elem, encoding=encoding )
+ xml_str = ''
return xml_str
https://bitbucket.org/galaxy/galaxy-central/commits/8cbb23ed305e/
Changeset: 8cbb23ed305e
User: greg
Date: 2013-06-03 21:40:58
Summary: Merged from next-stable
Affected #: 2 files
diff -r 75bea1afc2e3e3a49c95a3540a27be165766aaac -r 8cbb23ed305e6803da733eb7be74bab779ee9ea4 lib/tool_shed/util/shed_util_common.py
--- a/lib/tool_shed/util/shed_util_common.py
+++ b/lib/tool_shed/util/shed_util_common.py
@@ -193,7 +193,7 @@
os.write( fd, '<?xml version="1.0"?>\n' )
os.write( fd, '<toolbox tool_path="%s">\n' % str( tool_path ) )
for elem in config_elems:
- os.write( fd, '%s' % xml_util.xml_to_string( elem ) )
+ os.write( fd, '%s' % xml_util.xml_to_string( elem, use_indent=True ) )
os.write( fd, '</toolbox>\n' )
os.close( fd )
shutil.move( filename, os.path.abspath( config_filename ) )
diff -r 75bea1afc2e3e3a49c95a3540a27be165766aaac -r 8cbb23ed305e6803da733eb7be74bab779ee9ea4 lib/tool_shed/util/xml_util.py
--- a/lib/tool_shed/util/xml_util.py
+++ b/lib/tool_shed/util/xml_util.py
@@ -41,6 +41,28 @@
fh.close()
return tmp_filename
+def indent( elem, level=0 ):
+ """
+ Prints an XML tree with each node indented according to its depth. This method is used to print the shed tool config (e.g., shed_tool_conf.xml
+ from the in-memory list of config_elems because each config_elem in the list may be a hierarchical structure that was not created using the
+ parse_xml() method below, and so will not be properly written with xml.etree.ElementTree.tostring() without manually indenting the tree first.
+ """
+ i = "\n" + level * " "
+ if len( elem ):
+ if not elem.text or not elem.text.strip():
+ elem.text = i + " "
+ if not elem.tail or not elem.tail.strip():
+ elem.tail = i
+ for child in elem:
+ indent( child, level+1 )
+ if not child.tail or not child.tail.strip():
+ child.tail = i
+ if not elem.tail or not elem.tail.strip():
+ elem.tail = i
+ else:
+ if level and ( not elem.tail or not elem.tail.strip() ):
+ elem.tail = i
+
def parse_xml( file_name ):
"""Returns a parsed xml tree with comments intact."""
error_message = ''
@@ -64,9 +86,15 @@
fobj.close()
return tree, error_message
-def xml_to_string( elem, encoding='utf-8' ):
- if using_python_27:
- xml_str = '%s\n' % xml.etree.ElementTree.tostring( elem, encoding=encoding, method="xml" )
+def xml_to_string( elem, encoding='utf-8', use_indent=False ):
+ if elem:
+ if use_indent:
+ # We were called from suc.config_elems_to_xml_file(), so set the level to 1 since level 0 is the <toolbox> tag set.
+ indent( elem, level=1 )
+ if using_python_27:
+ xml_str = '%s\n' % xml.etree.ElementTree.tostring( elem, encoding=encoding, method="xml" )
+ else:
+ xml_str = '%s\n' % xml.etree.ElementTree.tostring( elem, encoding=encoding )
else:
- xml_str = '%s\n' % xml.etree.ElementTree.tostring( elem, encoding=encoding )
+ xml_str = ''
return xml_str
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
2 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/09d45fa9a84c/
Changeset: 09d45fa9a84c
Branch: next-stable
User: Dave Bouvier
Date: 2013-06-03 18:33:02
Summary: Fix for installing a tool dependencies when set_environment actions are distributed between multiple action tag groups.
Affected #: 1 file
diff -r 6a8584218f61c5cf7aa129677a7548c610cd0cca -r 09d45fa9a84c1a91e7b14b94186bd8128883ddc4 lib/tool_shed/galaxy_install/tool_dependencies/install_util.py
--- a/lib/tool_shed/galaxy_install/tool_dependencies/install_util.py
+++ b/lib/tool_shed/galaxy_install/tool_dependencies/install_util.py
@@ -130,10 +130,10 @@
if package_install_version == '1.0':
# Since the required tool dependency is installed for a repository dependency, we first need to inspect the <actions> tag set to find
# the <action type="set_environment"> tag.
+ env_var_dicts = []
for actions_elem in package_elem:
for action_elem in actions_elem:
action_type = action_elem.get( 'type', 'shell_command' )
- env_var_dicts = []
if action_type == 'set_environment':
# <action type="set_environment">
# <environment_variable name="PYTHONPATH" action="append_to">$INSTALL_DIR/lib/python</environment_variable>
@@ -143,7 +143,8 @@
if env_elem.tag == 'environment_variable':
env_var_dict = common_util.create_env_var_dict( env_elem, tool_dependency_install_dir=install_dir )
if env_var_dict:
- env_var_dicts.append( env_var_dict )
+ if env_var_dict not in env_var_dicts:
+ env_var_dicts.append( env_var_dict )
elif action_type == 'setup_virtualenv':
# Add the virtualenv's site-packages to PYTHONPATH and bin to PATH. This is a bit hackish.
site_packages_command = "%s -c 'import os, sys; print os.path.join(sys.prefix, \"lib\", \"python\" + sys.version[:3], \"site-packages\")'" % os.path.join( install_dir, "venv", "bin", "python" )
@@ -155,40 +156,40 @@
else:
env_var_dicts.append( dict( name="PYTHONPATH", action="prepend_to", value=output.stdout ) )
env_var_dicts.append( dict( name="PATH", action="prepend_to", value=os.path.join( install_dir, 'venv', 'bin' ) ) )
- if env_var_dicts:
- if required_repository.status in [ app.model.ToolShedRepository.installation_status.INSTALLED,
- app.model.ToolShedRepository.installation_status.DEACTIVATED ]:
- # Handle the case where we have an installed required repository due to the prior_installation_required = True
- # setting in the received tool_shed_repository's tool_dependencies.xml file and the required repository's
- # tool_dependencies.xml file may include the use of the $ENV[] variable inheritance feature. To handle this,
- # we will replace the current "value" entries in each env_var_dict with the actual path taken from the env.sh
- # file generated for the installed required repository. Each env_var_dict currently looks something like this:
- # {'action': 'append_to', 'name': 'LD_LIBRARY_PATH', 'value': '$BOOST_ROOT_DIR/lib/'}
- # We'll read the contents of the received required_repository's env.sh file and replace the 'value' entry of each env_var_dict
- # with the associated value in the env.sh file.
- new_env_var_dicts = []
- env_sh_file_dir = get_tool_dependency_install_dir( app=app,
- repository_name=required_repository.name,
- repository_owner=required_repository.owner,
- repository_changeset_revision=required_repository.installed_changeset_revision,
- tool_dependency_type='package',
- tool_dependency_name=package_name,
- tool_dependency_version=package_version )
- env_sh_file_path = os.path.join( env_sh_file_dir, 'env.sh' )
- for i, line in enumerate( open( env_sh_file_path, 'r' ) ):
- env_var_dict = env_var_dicts[ i ]
- action = env_var_dict.get( 'action', None )
- name = env_var_dict.get( 'name', None )
- value = env_var_dict.get( 'value', None )
- if action and name and value:
- new_value = parse_env_shell_entry( action, name, value, line )
- env_var_dict[ 'value' ] = new_value
- new_env_var_dicts.append( env_var_dict )
- action_dict[ 'environment_variable' ] = new_env_var_dicts
- else:
- action_dict[ 'environment_variable' ] = env_var_dicts
- actions.append( ( 'set_environment', action_dict ) )
- return tool_dependency, actions
+ if env_var_dicts:
+ if required_repository.status in [ app.model.ToolShedRepository.installation_status.INSTALLED,
+ app.model.ToolShedRepository.installation_status.DEACTIVATED ]:
+ # Handle the case where we have an installed required repository due to the prior_installation_required = True
+ # setting in the received tool_shed_repository's tool_dependencies.xml file and the required repository's
+ # tool_dependencies.xml file may include the use of the $ENV[] variable inheritance feature. To handle this,
+ # we will replace the current "value" entries in each env_var_dict with the actual path taken from the env.sh
+ # file generated for the installed required repository. Each env_var_dict currently looks something like this:
+ # {'action': 'append_to', 'name': 'LD_LIBRARY_PATH', 'value': '$BOOST_ROOT_DIR/lib/'}
+ # We'll read the contents of the received required_repository's env.sh file and replace the 'value' entry of each env_var_dict
+ # with the associated value in the env.sh file.
+ new_env_var_dicts = []
+ env_sh_file_dir = get_tool_dependency_install_dir( app=app,
+ repository_name=required_repository.name,
+ repository_owner=required_repository.owner,
+ repository_changeset_revision=required_repository.installed_changeset_revision,
+ tool_dependency_type='package',
+ tool_dependency_name=package_name,
+ tool_dependency_version=package_version )
+ env_sh_file_path = os.path.join( env_sh_file_dir, 'env.sh' )
+ for i, line in enumerate( open( env_sh_file_path, 'r' ) ):
+ env_var_dict = env_var_dicts[ i ]
+ action = env_var_dict.get( 'action', None )
+ name = env_var_dict.get( 'name', None )
+ value = env_var_dict.get( 'value', None )
+ if action and name and value:
+ new_value = parse_env_shell_entry( action, name, value, line )
+ env_var_dict[ 'value' ] = new_value
+ new_env_var_dicts.append( env_var_dict )
+ action_dict[ 'environment_variable' ] = new_env_var_dicts
+ else:
+ action_dict[ 'environment_variable' ] = env_var_dicts
+ actions.append( ( 'set_environment', action_dict ) )
+ return tool_dependency, actions
else:
raise NotImplementedError( 'Only install version 1.0 is currently supported (i.e., change your tag to be <install version="1.0">).' )
return None, actions
@@ -464,7 +465,12 @@
else:
log.debug( "Unsupported action type '%s'. Not proceeding." % str( action_type ) )
raise Exception( "Unsupported action type '%s' in tool dependency definition." % str( action_type ) )
- actions.append( ( action_type, action_dict ) )
+ action_tuple = ( action_type, action_dict )
+ if action_type == 'set_environment':
+ if action_tuple not in actions:
+ actions.append( action_tuple )
+ else:
+ actions.append( action_tuple )
if actions:
actions_dict[ 'actions' ] = actions
if proprietary_fabfile_path:
https://bitbucket.org/galaxy/galaxy-central/commits/75bea1afc2e3/
Changeset: 75bea1afc2e3
User: Dave Bouvier
Date: 2013-06-03 18:35:52
Summary: Merge in next-stable.
Affected #: 1 file
diff -r 83213accd759e752538c025275557d3dfc5d1433 -r 75bea1afc2e3e3a49c95a3540a27be165766aaac lib/tool_shed/galaxy_install/tool_dependencies/install_util.py
--- a/lib/tool_shed/galaxy_install/tool_dependencies/install_util.py
+++ b/lib/tool_shed/galaxy_install/tool_dependencies/install_util.py
@@ -130,10 +130,10 @@
if package_install_version == '1.0':
# Since the required tool dependency is installed for a repository dependency, we first need to inspect the <actions> tag set to find
# the <action type="set_environment"> tag.
+ env_var_dicts = []
for actions_elem in package_elem:
for action_elem in actions_elem:
action_type = action_elem.get( 'type', 'shell_command' )
- env_var_dicts = []
if action_type == 'set_environment':
# <action type="set_environment">
# <environment_variable name="PYTHONPATH" action="append_to">$INSTALL_DIR/lib/python</environment_variable>
@@ -143,7 +143,8 @@
if env_elem.tag == 'environment_variable':
env_var_dict = common_util.create_env_var_dict( env_elem, tool_dependency_install_dir=install_dir )
if env_var_dict:
- env_var_dicts.append( env_var_dict )
+ if env_var_dict not in env_var_dicts:
+ env_var_dicts.append( env_var_dict )
elif action_type == 'setup_virtualenv':
# Add the virtualenv's site-packages to PYTHONPATH and bin to PATH. This is a bit hackish.
site_packages_command = "%s -c 'import os, sys; print os.path.join(sys.prefix, \"lib\", \"python\" + sys.version[:3], \"site-packages\")'" % os.path.join( install_dir, "venv", "bin", "python" )
@@ -155,40 +156,40 @@
else:
env_var_dicts.append( dict( name="PYTHONPATH", action="prepend_to", value=output.stdout ) )
env_var_dicts.append( dict( name="PATH", action="prepend_to", value=os.path.join( install_dir, 'venv', 'bin' ) ) )
- if env_var_dicts:
- if required_repository.status in [ app.model.ToolShedRepository.installation_status.INSTALLED,
- app.model.ToolShedRepository.installation_status.DEACTIVATED ]:
- # Handle the case where we have an installed required repository due to the prior_installation_required = True
- # setting in the received tool_shed_repository's tool_dependencies.xml file and the required repository's
- # tool_dependencies.xml file may include the use of the $ENV[] variable inheritance feature. To handle this,
- # we will replace the current "value" entries in each env_var_dict with the actual path taken from the env.sh
- # file generated for the installed required repository. Each env_var_dict currently looks something like this:
- # {'action': 'append_to', 'name': 'LD_LIBRARY_PATH', 'value': '$BOOST_ROOT_DIR/lib/'}
- # We'll read the contents of the received required_repository's env.sh file and replace the 'value' entry of each env_var_dict
- # with the associated value in the env.sh file.
- new_env_var_dicts = []
- env_sh_file_dir = get_tool_dependency_install_dir( app=app,
- repository_name=required_repository.name,
- repository_owner=required_repository.owner,
- repository_changeset_revision=required_repository.installed_changeset_revision,
- tool_dependency_type='package',
- tool_dependency_name=package_name,
- tool_dependency_version=package_version )
- env_sh_file_path = os.path.join( env_sh_file_dir, 'env.sh' )
- for i, line in enumerate( open( env_sh_file_path, 'r' ) ):
- env_var_dict = env_var_dicts[ i ]
- action = env_var_dict.get( 'action', None )
- name = env_var_dict.get( 'name', None )
- value = env_var_dict.get( 'value', None )
- if action and name and value:
- new_value = parse_env_shell_entry( action, name, value, line )
- env_var_dict[ 'value' ] = new_value
- new_env_var_dicts.append( env_var_dict )
- action_dict[ 'environment_variable' ] = new_env_var_dicts
- else:
- action_dict[ 'environment_variable' ] = env_var_dicts
- actions.append( ( 'set_environment', action_dict ) )
- return tool_dependency, actions
+ if env_var_dicts:
+ if required_repository.status in [ app.model.ToolShedRepository.installation_status.INSTALLED,
+ app.model.ToolShedRepository.installation_status.DEACTIVATED ]:
+ # Handle the case where we have an installed required repository due to the prior_installation_required = True
+ # setting in the received tool_shed_repository's tool_dependencies.xml file and the required repository's
+ # tool_dependencies.xml file may include the use of the $ENV[] variable inheritance feature. To handle this,
+ # we will replace the current "value" entries in each env_var_dict with the actual path taken from the env.sh
+ # file generated for the installed required repository. Each env_var_dict currently looks something like this:
+ # {'action': 'append_to', 'name': 'LD_LIBRARY_PATH', 'value': '$BOOST_ROOT_DIR/lib/'}
+ # We'll read the contents of the received required_repository's env.sh file and replace the 'value' entry of each env_var_dict
+ # with the associated value in the env.sh file.
+ new_env_var_dicts = []
+ env_sh_file_dir = get_tool_dependency_install_dir( app=app,
+ repository_name=required_repository.name,
+ repository_owner=required_repository.owner,
+ repository_changeset_revision=required_repository.installed_changeset_revision,
+ tool_dependency_type='package',
+ tool_dependency_name=package_name,
+ tool_dependency_version=package_version )
+ env_sh_file_path = os.path.join( env_sh_file_dir, 'env.sh' )
+ for i, line in enumerate( open( env_sh_file_path, 'r' ) ):
+ env_var_dict = env_var_dicts[ i ]
+ action = env_var_dict.get( 'action', None )
+ name = env_var_dict.get( 'name', None )
+ value = env_var_dict.get( 'value', None )
+ if action and name and value:
+ new_value = parse_env_shell_entry( action, name, value, line )
+ env_var_dict[ 'value' ] = new_value
+ new_env_var_dicts.append( env_var_dict )
+ action_dict[ 'environment_variable' ] = new_env_var_dicts
+ else:
+ action_dict[ 'environment_variable' ] = env_var_dicts
+ actions.append( ( 'set_environment', action_dict ) )
+ return tool_dependency, actions
else:
raise NotImplementedError( 'Only install version 1.0 is currently supported (i.e., change your tag to be <install version="1.0">).' )
return None, actions
@@ -464,7 +465,12 @@
else:
log.debug( "Unsupported action type '%s'. Not proceeding." % str( action_type ) )
raise Exception( "Unsupported action type '%s' in tool dependency definition." % str( action_type ) )
- actions.append( ( action_type, action_dict ) )
+ action_tuple = ( action_type, action_dict )
+ if action_type == 'set_environment':
+ if action_tuple not in actions:
+ actions.append( action_tuple )
+ else:
+ actions.append( action_tuple )
if actions:
actions_dict[ 'actions' ] = actions
if proprietary_fabfile_path:
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0