commit/galaxy-central: greg: The basis of this change set is to provide some fixes to certain Tool Shed functions for supporting changing HTTP protocols over time (ie.e, https <-> http). In doing this, I've refactored several functions and moved most of the Tool Shed <-> Galaxy URL munging functions to the Tool Shed's common_util.py module. Ive renamed the munging functions to clarify what each of them does. Several duplicate functions have been eliminated from the code as well.
1 new commit in galaxy-central: https://bitbucket.org/galaxy/galaxy-central/commits/4aee689e69da/ Changeset: 4aee689e69da User: greg Date: 2014-04-09 16:11:48 Summary: The basis of this change set is to provide some fixes to certain Tool Shed functions for supporting changing HTTP protocols over time (ie.e, https <-> http). In doing this, I've refactored several functions and moved most of the Tool Shed <-> Galaxy URL munging functions to the Tool Shed's common_util.py module. Ive renamed the munging functions to clarify what each of them does. Several duplicate functions have been eliminated from the code as well. Affected #: 28 files diff -r 47b3011ebe653f7218c70e0b458252783ef6f255 -r 4aee689e69dac57676de7f9ceb0a31f227bc0dd2 lib/galaxy/model/tool_shed_install/__init__.py --- a/lib/galaxy/model/tool_shed_install/__init__.py +++ b/lib/galaxy/model/tool_shed_install/__init__.py @@ -123,12 +123,8 @@ self.shed_config_filename = name return shed_tool_conf_dict if self.includes_datatypes: - #we need to search by filepaths here, which is less desirable - tool_shed_url = self.tool_shed - if tool_shed_url.find( ':' ) > 0: - # Eliminate the port, if any, since it will result in an invalid directory name. - tool_shed_url = tool_shed_url.split( ':' )[ 0 ] - tool_shed = tool_shed_url.rstrip( '/' ) + # We need to search by file paths here, which is less desirable. + tool_shed = common_util.remove_protocol_and_port_from_tool_shed_url( self.tool_shed ) for shed_tool_conf_dict in app.toolbox.shed_tool_confs: tool_path = shed_tool_conf_dict[ 'tool_path' ] relative_path = os.path.join( tool_path, tool_shed, 'repos', self.owner, self.name, self.installed_changeset_revision ) @@ -271,11 +267,7 @@ return None def repo_path( self, app ): - tool_shed_url = self.tool_shed - if tool_shed_url.find( ':' ) > 0: - # Eliminate the port, if any, since it will result in an invalid directory name. - tool_shed_url = tool_shed_url.split( ':' )[ 0 ] - tool_shed = tool_shed_url.rstrip( '/' ) + tool_shed = common_util.remove_protocol_and_port_from_tool_shed_url( self.tool_shed ) for index, shed_tool_conf_dict in enumerate( app.toolbox.shed_tool_confs ): tool_path = shed_tool_conf_dict[ 'tool_path' ] relative_path = os.path.join( tool_path, tool_shed, 'repos', self.owner, self.name, self.installed_changeset_revision ) @@ -339,7 +331,7 @@ be installed in order for this repository to function correctly. However, those repository dependencies that are defined for this repository with prior_installation_required set to True place them in a special category in that the required repositories must be installed before this repository is installed. Among other things, this enables these "special" repository dependencies to include - information that enables the successful intallation of this repository. This method is not used during the initial installation of + information that enables the successful installation of this repository. This method is not used during the initial installation of this repository, but only after it has been installed (metadata must be set for this repository in order for this method to be useful). """ required_rd_tups_that_must_be_installed = [] @@ -347,12 +339,14 @@ rd_tups = self.metadata[ 'repository_dependencies' ][ 'repository_dependencies' ] for rd_tup in rd_tups: if len( rd_tup ) == 5: - tool_shed, name, owner, changeset_revision, prior_installation_required = rd_tup + tool_shed, name, owner, changeset_revision, prior_installation_required, only_if_compiling_contained_td = \ + common_util.parse_repository_dependency_tuple( rd_tup, contains_error=False ) if asbool( prior_installation_required ): required_rd_tups_that_must_be_installed.append( ( tool_shed, name, owner, changeset_revision, 'True', 'False' ) ) elif len( rd_tup ) == 6: - tool_shed, name, owner, changeset_revision, prior_installation_required, only_if_compiling_contained_td = rd_tup - # The repository dependency will only be required to be previsously installed if it does not fall into the category of + tool_shed, name, owner, changeset_revision, prior_installation_required, only_if_compiling_contained_td = \ + common_util.parse_repository_dependency_tuple( rd_tup, contains_error=False ) + # The repository dependency will only be required to be previously installed if it does not fall into the category of # a repository that must be installed only so that it's contained tool dependency can be used for compiling the tool # dependency of the dependent repository. if not asbool( only_if_compiling_contained_td ): diff -r 47b3011ebe653f7218c70e0b458252783ef6f255 -r 4aee689e69dac57676de7f9ceb0a31f227bc0dd2 lib/galaxy/tools/__init__.py --- a/lib/galaxy/tools/__init__.py +++ b/lib/galaxy/tools/__init__.py @@ -63,6 +63,7 @@ from galaxy.web.form_builder import SelectField from galaxy.model.item_attrs import Dictifiable from galaxy.model import Workflow +from tool_shed.util import common_util from tool_shed.util import shed_util_common as suc from .loader import load_tool, template_macro_params from .wrappers import ( @@ -467,6 +468,8 @@ .first() def __get_tool_shed_repository( self, tool_shed, name, owner, installed_changeset_revision ): + # We store only the port, if one exists, in the database. + tool_shed = common_util.remove_protocol_from_tool_shed_url( tool_shed ) return self.app.install_model.context.query( self.app.install_model.ToolShedRepository ) \ .filter( and_( self.app.install_model.ToolShedRepository.table.c.tool_shed == tool_shed, self.app.install_model.ToolShedRepository.table.c.name == name, @@ -532,7 +535,10 @@ # Backward compatibility issue - the tag used to be named 'changeset_revision'. installed_changeset_revision_elem = elem.find( "changeset_revision" ) installed_changeset_revision = installed_changeset_revision_elem.text - tool_shed_repository = self.__get_tool_shed_repository( tool_shed, repository_name, repository_owner, installed_changeset_revision ) + tool_shed_repository = self.__get_tool_shed_repository( tool_shed, + repository_name, + repository_owner, + installed_changeset_revision ) if tool_shed_repository: # Only load tools if the repository is not deactivated or uninstalled. can_load_into_panel_dict = not tool_shed_repository.deleted diff -r 47b3011ebe653f7218c70e0b458252783ef6f255 -r 4aee689e69dac57676de7f9ceb0a31f227bc0dd2 lib/galaxy/tools/data_manager/manager.py --- a/lib/galaxy/tools/data_manager/manager.py +++ b/lib/galaxy/tools/data_manager/manager.py @@ -6,6 +6,7 @@ from galaxy.util.odict import odict from galaxy.util.template import fill_template from galaxy.tools.data import TabularToolDataTable +from tool_shed.util import common_util import tool_shed.util.shed_util_common as suc #set up logger @@ -16,6 +17,7 @@ VALUE_TRANSLATION_FUNCTIONS = dict( abspath=os.path.abspath ) DEFAULT_VALUE_TRANSLATION_TYPE = 'template' + class DataManagers( object ): def __init__( self, app, xml_filename=None ): self.app = app @@ -26,6 +28,7 @@ self.load_from_xml( self.filename ) if self.app.config.shed_data_manager_config_file: self.load_from_xml( self.app.config.shed_data_manager_config_file, store_tool_path=False, replace_existing=True ) + def load_from_xml( self, xml_filename, store_tool_path=True, replace_existing=False ): try: tree = util.parse_xml( xml_filename ) @@ -45,6 +48,7 @@ self.tool_path = tool_path for data_manager_elem in root.findall( 'data_manager' ): self.load_manager_from_elem( data_manager_elem, replace_existing=replace_existing ) + def load_manager_from_elem( self, data_manager_elem, tool_path=None, add_manager=True, replace_existing=False ): try: data_manager = DataManager( self, data_manager_elem, tool_path=tool_path ) @@ -55,6 +59,7 @@ self.add_manager( data_manager, replace_existing=replace_existing ) log.debug( 'Loaded Data Manager: %s' % ( data_manager.id ) ) return data_manager + def add_manager( self, data_manager, replace_existing=False ): if not replace_existing: assert data_manager.id not in self.data_managers, "A data manager has been defined twice: %s" % ( data_manager.id ) @@ -67,8 +72,10 @@ if data_table_name not in self.managed_data_tables: self.managed_data_tables[ data_table_name ] = [] self.managed_data_tables[ data_table_name ].append( data_manager ) + def get_manager( self, *args, **kwds ): return self.data_managers.get( *args, **kwds ) + def remove_manager( self, manager_ids ): if not isinstance( manager_ids, list ): manager_ids = [ manager_ids ] @@ -89,6 +96,7 @@ if remove_data_table_tracking and data_table_name in self.managed_data_tables: del self.managed_data_tables[ data_table_name ] + class DataManager( object ): GUID_TYPE = 'data_manager' DEFAULT_VERSION = "0.0.1" @@ -108,6 +116,7 @@ self.tool_shed_repository_info_dict = None if elem is not None: self.load_from_element( elem, tool_path or self.data_managers.tool_path ) + def load_from_element( self, elem, tool_path ): assert elem.tag == 'data_manager', 'A data manager configuration must have a "data_manager" tag as the root. "%s" is present' % ( elem.tag ) self.declared_id = elem.get( 'id', None ) @@ -122,14 +131,24 @@ path = tool_elem.get( "file", None ) tool_guid = tool_elem.get( "guid", None ) #need to determine repository info so that dependencies will work correctly - tool_shed = tool_elem.find( 'tool_shed' ).text + tool_shed_url = tool_elem.find( 'tool_shed' ).text + # Handle protocol changes. + tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( self.data_managers.app, tool_shed_url ) repository_name = tool_elem.find( 'repository_name' ).text repository_owner = tool_elem.find( 'repository_owner' ).text installed_changeset_revision = tool_elem.find( 'installed_changeset_revision' ).text - #save repository info here - self.tool_shed_repository_info_dict = dict( tool_shed=tool_shed, name=repository_name, owner=repository_owner, installed_changeset_revision=installed_changeset_revision ) - #get tool_shed repo id - tool_shed_repository = suc.get_tool_shed_repository_by_shed_name_owner_installed_changeset_revision( self.data_managers.app, tool_shed, repository_name, repository_owner, installed_changeset_revision ) + self.tool_shed_repository_info_dict = dict( tool_shed_url=tool_shed_url, + name=repository_name, + owner=repository_owner, + installed_changeset_revision=installed_changeset_revision ) + # The protocol is not stored in the database. + tool_shed = common_util.remove_protocol_from_tool_shed_url( tool_shed_url ) + tool_shed_repository = \ + suc.get_tool_shed_repository_by_shed_name_owner_installed_changeset_revision( self.data_managers.app, + tool_shed, + repository_name, + repository_owner, + installed_changeset_revision ) if tool_shed_repository is None: log.warning( 'Could not determine tool shed repository from database. This should only ever happen when running tests.' ) #we'll set tool_path manually here from shed_conf_file @@ -147,7 +166,10 @@ if shed_conf: tool_path = shed_conf.get( "tool_path", tool_path ) assert path is not None, "A tool file path could not be determined:\n%s" % ( util.xml_to_string( elem ) ) - self.load_tool( os.path.join( tool_path, path ), guid=tool_guid, data_manager_id=self.id, tool_shed_repository_id=tool_shed_repository_id ) + self.load_tool( os.path.join( tool_path, path ), + guid=tool_guid, + data_manager_id=self.id, + tool_shed_repository_id=tool_shed_repository_id ) self.name = elem.get( 'name', self.tool.name ) self.description = elem.get( 'description', self.tool.description ) @@ -206,12 +228,23 @@ target_value = target_elem.text if data_table_name not in self.move_by_data_table_column: self.move_by_data_table_column[ data_table_name ] = {} - self.move_by_data_table_column[ data_table_name ][ data_table_coumn_name ] = dict( type=move_type, source_base=source_base, source_value=source_value, target_base=target_base, target_value=target_value, relativize_symlinks=relativize_symlinks ) + self.move_by_data_table_column[ data_table_name ][ data_table_coumn_name ] = \ + dict( type=move_type, + source_base=source_base, + source_value=source_value, + target_base=target_base, + target_value=target_value, + relativize_symlinks=relativize_symlinks ) + @property def id( self ): return self.guid or self.declared_id #if we have a guid, we will use that as the data_manager id + def load_tool( self, tool_filename, guid=None, data_manager_id=None, tool_shed_repository_id=None ): - tool = self.data_managers.app.toolbox.load_tool( tool_filename, guid=guid, data_manager_id=data_manager_id, repository_id=tool_shed_repository_id ) + tool = self.data_managers.app.toolbox.load_tool( tool_filename, + guid=guid, + data_manager_id=data_manager_id, + repository_id=tool_shed_repository_id ) self.data_managers.app.toolbox.data_manager_tools[ tool.id ] = tool self.data_managers.app.toolbox.tools_by_id[ tool.id ] = tool self.tool = tool @@ -294,7 +327,6 @@ except OSError, e: if e.errno != errno.EEXIST: raise e - #log.debug( 'Error creating directory "%s": %s' % ( dirs, e ) ) #moving a directory and the target already exists, we move the contents instead util.move_merge( source, target ) diff -r 47b3011ebe653f7218c70e0b458252783ef6f255 -r 4aee689e69dac57676de7f9ceb0a31f227bc0dd2 lib/galaxy/webapps/galaxy/api/tool_shed_repositories.py --- a/lib/galaxy/webapps/galaxy/api/tool_shed_repositories.py +++ b/lib/galaxy/webapps/galaxy/api/tool_shed_repositories.py @@ -207,9 +207,9 @@ # Keep track of all repositories that are installed - there may be more than one if repository dependencies are installed. installed_tool_shed_repositories = [] # Get all of the information necessary for installing the repository from the specified tool shed. - url = suc.url_join( tool_shed_url, - 'api/repositories/get_repository_revision_install_info?name=%s&owner=%s&changeset_revision=%s' % \ - ( name, owner, changeset_revision ) ) + params = '?name=%s&owner=%s&changeset_revision=%s' % ( name, owner, changeset_revision ) + url = common_util.url_join( tool_shed_url, + 'api/repositories/get_repository_revision_install_info%s' % params ) try: raw_text = common_util.tool_shed_get( trans.app, tool_shed_url, url ) except Exception, e: diff -r 47b3011ebe653f7218c70e0b458252783ef6f255 -r 4aee689e69dac57676de7f9ceb0a31f227bc0dd2 lib/galaxy/webapps/galaxy/controllers/admin.py --- a/lib/galaxy/webapps/galaxy/controllers/admin.py +++ b/lib/galaxy/webapps/galaxy/controllers/admin.py @@ -20,46 +20,74 @@ log = logging.getLogger( __name__ ) + class UserListGrid( grids.Grid ): + + class EmailColumn( grids.TextColumn ): + def get_value( self, trans, grid, user ): return user.email + + class UserNameColumn( grids.TextColumn ): + def get_value( self, trans, grid, user ): if user.username: return user.username return 'not set' + + class StatusColumn( grids.GridColumn ): + def get_value( self, trans, grid, user ): if user.purged: return "purged" elif user.deleted: return "deleted" return "" + + class GroupsColumn( grids.GridColumn ): + def get_value( self, trans, grid, user ): if user.groups: return len( user.groups ) return 0 + + class RolesColumn( grids.GridColumn ): + def get_value( self, trans, grid, user ): if user.roles: return len( user.roles ) return 0 + + class ExternalColumn( grids.GridColumn ): + def get_value( self, trans, grid, user ): if user.external: return 'yes' return 'no' + + class LastLoginColumn( grids.GridColumn ): + def get_value( self, trans, grid, user ): if user.galaxy_sessions: return self.format( user.galaxy_sessions[ 0 ].update_time ) return 'never' + + class TimeCreatedColumn( grids.GridColumn ): + def get_value( self, trans, grid, user ): return user.create_time.strftime('%x') + + class ActivatedColumn( grids.GridColumn ): + def get_value( self, trans, grid, user ): if user.active: return 'Y' @@ -121,32 +149,53 @@ num_rows_per_page = 50 preserve_state = False use_paging = True + def get_current_item( self, trans, **kwargs ): return trans.user + class RoleListGrid( grids.Grid ): + + class NameColumn( grids.TextColumn ): + def get_value( self, trans, grid, role ): return role.name + + class DescriptionColumn( grids.TextColumn ): + def get_value( self, trans, grid, role ): if role.description: return role.description return '' + + class TypeColumn( grids.TextColumn ): + def get_value( self, trans, grid, role ): return role.type + + class StatusColumn( grids.GridColumn ): + def get_value( self, trans, grid, role ): if role.deleted: return "deleted" return "" + + class GroupsColumn( grids.GridColumn ): + def get_value( self, trans, grid, role ): if role.groups: return len( role.groups ) return 0 + + class UsersColumn( grids.GridColumn ): + + def get_value( self, trans, grid, role ): if role.users: return len( role.users ) @@ -212,24 +261,38 @@ num_rows_per_page = 50 preserve_state = False use_paging = True + def apply_query_filter( self, trans, query, **kwargs ): return query.filter( model.Role.type != model.Role.types.PRIVATE ) + class GroupListGrid( grids.Grid ): + + class NameColumn( grids.TextColumn ): + def get_value( self, trans, grid, group ): return group.name + + class StatusColumn( grids.GridColumn ): + def get_value( self, trans, grid, group ): if group.deleted: return "deleted" return "" + + class RolesColumn( grids.GridColumn ): + def get_value( self, trans, grid, group ): if group.roles: return len( group.roles ) return 0 + + class UsersColumn( grids.GridColumn ): + def get_value( self, trans, grid, group ): if group.members: return len( group.members ) @@ -287,30 +350,48 @@ use_paging = True class QuotaListGrid( grids.Grid ): + + class NameColumn( grids.TextColumn ): + def get_value( self, trans, grid, quota ): return quota.name + + class DescriptionColumn( grids.TextColumn ): + def get_value( self, trans, grid, quota ): if quota.description: return quota.description return '' + + class AmountColumn( grids.TextColumn ): + def get_value( self, trans, grid, quota ): return quota.operation + quota.display_amount + + class StatusColumn( grids.GridColumn ): + def get_value( self, trans, grid, quota ): if quota.deleted: return "deleted" elif quota.default: return "<strong>default for %s users</strong>" % quota.default[0].type return "" + + class UsersColumn( grids.GridColumn ): + def get_value( self, trans, grid, quota ): if quota.users: return len( quota.users ) return 0 + + class GroupsColumn( grids.GridColumn ): + def get_value( self, trans, grid, quota ): if quota.groups: return len( quota.groups ) @@ -397,15 +478,22 @@ preserve_state = False use_paging = True + class ToolVersionListGrid( grids.Grid ): + + class ToolIdColumn( grids.TextColumn ): + def get_value( self, trans, grid, tool_version ): if tool_version.tool_id in trans.app.toolbox.tools_by_id: link = url_for( controller='tool_runner', tool_id=tool_version.tool_id ) link_str = '<a href="%s">' % link return '<div class="count-box state-color-ok">%s%s</a></div>' % ( link_str, tool_version.tool_id ) return tool_version.tool_id + + class ToolVersionsColumn( grids.TextColumn ): + def get_value( self, trans, grid, tool_version ): tool_ids_str = '' for tool_id in tool_version.get_version_ids( trans.app ): @@ -443,6 +531,7 @@ def build_initial_query( self, trans, **kwd ): return trans.install_model.context.query( self.model_class ) + class AdminGalaxy( BaseUIController, Admin, AdminActions, UsesQuotaMixin, QuotaParamParser ): user_list_grid = UserListGrid() @@ -479,6 +568,7 @@ return self.edit_quota( trans, **kwargs ) # Render the list view return self.quota_list_grid( trans, **kwargs ) + @web.expose @web.require_admin def create_quota( self, trans, **kwd ): @@ -525,6 +615,7 @@ out_groups=params.out_groups, message=params.message, status=params.status ) + @web.expose @web.require_admin def rename_quota( self, trans, **kwd ): @@ -538,6 +629,7 @@ webapp=params.webapp, message=params.message, status=params.status ) + @web.expose @web.require_admin def manage_users_and_groups_for_quota( self, trans, **kwd ): @@ -572,6 +664,7 @@ webapp=params.webapp, message=params.message, status=params.status ) + @web.expose @web.require_admin def edit_quota( self, trans, **kwd ): @@ -585,6 +678,7 @@ webapp=params.webapp, message=params.message, status=params.status ) + @web.expose @web.require_admin def set_quota_default( self, trans, **kwd ): @@ -603,6 +697,7 @@ webapp=params.webapp, message=params.message, status=params.status ) + @web.expose @web.require_admin def unset_quota_default( self, trans, **kwd ): @@ -614,6 +709,7 @@ webapp=params.webapp, message=sanitize_text( params.message ), status='error' ) ) + @web.expose @web.require_admin def mark_quota_deleted( self, trans, **kwd ): @@ -625,6 +721,7 @@ webapp=params.webapp, message=sanitize_text( params.message ), status='error' ) ) + @web.expose @web.require_admin def undelete_quota( self, trans, **kwd ): @@ -636,6 +733,7 @@ webapp=params.webapp, message=sanitize_text( params.message ), status='error' ) ) + @web.expose @web.require_admin def purge_quota( self, trans, **kwd ): @@ -647,6 +745,7 @@ webapp=params.webapp, message=sanitize_text( params.message ), status='error' ) ) + def _quota_op( self, trans, do_op, op_method, kwd, listify=False ): params = self.get_quota_params( kwd ) if listify: @@ -684,6 +783,7 @@ params.message = e.err_msg params.status = e.type return quota, params + @web.expose @web.require_admin def impersonate( self, trans, email=None, **kwd ): @@ -705,21 +805,14 @@ if emails is None: emails = [ u.email for u in trans.sa_session.query( trans.app.model.User ).enable_eagerloads( False ).all() ] return trans.fill_template( 'admin/impersonate.mako', emails=emails, message=message, status=status ) - def get_tool_shed_url_from_tools_xml_file_path( self, trans, tool_shed ): - search_str = '://%s' % tool_shed - for shed_name, shed_url in trans.app.tool_shed_registry.tool_sheds.items(): - if shed_url.find( search_str ) >= 0: - if shed_url.endswith( '/' ): - shed_url = shed_url.rstrip( '/' ) - return shed_url - return None + def check_for_tool_dependencies( self, trans, migration_stage ): # Get the 000x_tools.xml file associated with migration_stage. tools_xml_file_path = os.path.abspath( os.path.join( trans.app.config.root, 'scripts', 'migrate_tools', '%04d_tools.xml' % migration_stage ) ) tree = galaxy.util.parse_xml( tools_xml_file_path ) root = tree.getroot() tool_shed = root.get( 'name' ) - tool_shed_url = self.get_tool_shed_url_from_tools_xml_file_path( trans, tool_shed ) + tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( trans.app, tool_shed ) repo_name_dependency_tups = [] if tool_shed_url: for elem in root: @@ -741,6 +834,7 @@ tool_dependencies.append( ( tool_dependency_name, tool_dependency_version, tool_dependency_type, tool_dependency_readme ) ) repo_name_dependency_tups.append( ( repository_name, tool_dependencies ) ) return repo_name_dependency_tups + @web.expose @web.require_admin def review_tool_migration_stages( self, trans, **kwd ): @@ -772,12 +866,14 @@ migration_stages_dict=migration_stages_dict, message=message, status=status ) + @web.expose @web.require_admin def view_datatypes_registry( self, trans, **kwd ): message = galaxy.util.restore_text( kwd.get( 'message', '' ) ) status = galaxy.util.restore_text( kwd.get( 'status', 'done' ) ) return trans.fill_template( 'admin/view_datatypes_registry.mako', message=message, status=status ) + @web.expose @web.require_admin def view_tool_data_tables( self, trans, **kwd ): diff -r 47b3011ebe653f7218c70e0b458252783ef6f255 -r 4aee689e69dac57676de7f9ceb0a31f227bc0dd2 lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py --- a/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py +++ b/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py @@ -180,9 +180,10 @@ @web.expose @web.require_admin def browse_tool_shed( self, trans, **kwd ): - tool_shed_url = kwd[ 'tool_shed_url' ] + tool_shed_url = kwd.get( 'tool_shed_url', '' ) + tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( trans.app, tool_shed_url ) galaxy_url = web.url_for( '/', qualified=True ) - url = suc.url_join( tool_shed_url, 'repository/browse_valid_categories?galaxy_url=%s' % ( galaxy_url ) ) + url = common_util.url_join( tool_shed_url, 'repository/browse_valid_categories?galaxy_url=%s' % ( galaxy_url ) ) return trans.response.send_redirect( url ) @web.expose @@ -200,13 +201,13 @@ """Send a request to the relevant tool shed to see if there are any updates.""" repository_id = kwd.get( 'id', None ) repository = suc.get_installed_tool_shed_repository( trans, repository_id ) - tool_shed_url = suc.get_url_from_tool_shed( trans.app, repository.tool_shed ) + tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( trans.app, str( repository.tool_shed ) ) params = '?galaxy_url=%s&name=%s&owner=%s&changeset_revision=%s' % \ ( web.url_for( '/', qualified=True ), str( repository.name ), str( repository.owner ), str( repository.changeset_revision ) ) - url = suc.url_join( tool_shed_url, + url = common_util.url_join( tool_shed_url, 'repository/check_for_updates%s' % params ) return trans.response.send_redirect( url ) @@ -340,17 +341,19 @@ @web.expose @web.require_admin def find_tools_in_tool_shed( self, trans, **kwd ): - tool_shed_url = kwd[ 'tool_shed_url' ] + tool_shed_url = kwd.get( 'tool_shed_url', '' ) + tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( trans.app, tool_shed_url ) galaxy_url = web.url_for( '/', qualified=True ) - url = suc.url_join( tool_shed_url, 'repository/find_tools?galaxy_url=%s' % galaxy_url ) + url = common_util.url_join( tool_shed_url, 'repository/find_tools?galaxy_url=%s' % galaxy_url ) return trans.response.send_redirect( url ) @web.expose @web.require_admin def find_workflows_in_tool_shed( self, trans, **kwd ): - tool_shed_url = kwd[ 'tool_shed_url' ] + tool_shed_url = kwd.get( 'tool_shed_url', '' ) + tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( trans.app, tool_shed_url ) galaxy_url = web.url_for( '/', qualified=True ) - url = suc.url_join( tool_shed_url, 'repository/find_workflows?galaxy_url=%s' % galaxy_url ) + url = common_util.url_join( tool_shed_url, 'repository/find_workflows?galaxy_url=%s' % galaxy_url ) return trans.response.send_redirect( url ) @web.expose @@ -377,10 +380,10 @@ it was installed. """ repository = suc.get_installed_tool_shed_repository( trans, repository_id ) - tool_shed_url = suc.get_url_from_tool_shed( trans.app, repository.tool_shed ) - url = suc.url_join( tool_shed_url, - 'repository/get_tool_dependencies?name=%s&owner=%s&changeset_revision=%s' % \ - ( repository_name, repository_owner, changeset_revision ) ) + tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( trans.app, str( repository.tool_shed ) ) + params = '?name=%s&owner=%s&changeset_revision=%s' % ( repository_name, repository_owner, changeset_revision ) + url = common_util.url_join( tool_shed_url, + 'repository/get_tool_dependencies%s' % params ) raw_text = common_util.tool_shed_get( trans.app, tool_shed_url, url ) if len( raw_text ) > 2: encoded_text = json.from_json_string( raw_text ) @@ -397,10 +400,12 @@ an updated revision of an uninstalled tool shed repository. """ repository = suc.get_installed_tool_shed_repository( trans, repository_id ) - tool_shed_url = suc.get_url_from_tool_shed( trans.app, repository.tool_shed ) - url = suc.url_join( tool_shed_url, - 'repository/get_updated_repository_information?name=%s&owner=%s&changeset_revision=%s' % \ - ( repository_name, repository_owner, changeset_revision ) ) + tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( trans.app, str( repository.tool_shed ) ) + params = '?name=%s&owner=%s&changeset_revision=%s' % ( str( repository_name ), + str( repository_owner ), + changeset_revision ) + url = common_util.url_join( tool_shed_url, + 'repository/get_updated_repository_information%s' % params ) raw_text = common_util.tool_shed_get( trans.app, tool_shed_url, url ) repo_information_dict = json.from_json_string( raw_text ) return repo_information_dict @@ -479,12 +484,12 @@ if repository_id is not None: repository = suc.get_installed_tool_shed_repository( trans, repository_id ) if repository is not None: - tool_shed_url = suc.get_url_from_tool_shed( trans.app, repository.tool_shed ) + tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( trans.app, str( repository.tool_shed ) ) name = str( repository.name ) owner = str( repository.owner ) - url = suc.url_join( tool_shed_url, - 'repository/get_latest_downloadable_changeset_revision?galaxy_url=%s&name=%s&owner=%s' % \ - ( web.url_for( '/', qualified=True ), name, owner ) ) + params = '?galaxy_url=%s&name=%s&owner=%s' % ( web.url_for( '/', qualified=True ), name, owner ) + url = common_util.url_join( tool_shed_url, + 'repository/get_latest_downloadable_changeset_revision%s' % params ) raw_text = common_util.tool_shed_get( trans.app, tool_shed_url, url ) latest_downloadable_revision = json.from_json_string( raw_text ) if latest_downloadable_revision == suc.INITIAL_CHANGELOG_HASH: @@ -496,7 +501,7 @@ # appropriate repository revision if one exists. We need to create a temporary repo_info_tuple # with the following entries to handle this. # ( description, clone_url, changeset_revision, ctx_rev, owner, repository_dependencies, tool_dependencies ) - tmp_clone_url = suc.url_join( tool_shed_url, 'repos', owner, name ) + tmp_clone_url = common_util.url_join( tool_shed_url, 'repos', owner, name ) tmp_repo_info_tuple = ( None, tmp_clone_url, latest_downloadable_revision, None, owner, None, None ) installed_repository, installed_changeset_revision = \ suc.repository_was_previously_installed( trans, tool_shed_url, name, tmp_repo_info_tuple ) @@ -509,10 +514,12 @@ status = 'error' else: # Install the latest downloadable revision of the repository. - params = '?name=%s&owner=%s&changeset_revisions=%s&galaxy_url=%s' % \ - ( name, owner, str( latest_downloadable_revision ), web.url_for( '/', qualified=True ) ) - url = suc.url_join( tool_shed_url, - 'repository/install_repositories_by_revision%s' % params ) + params = '?name=%s&owner=%s&changeset_revisions=%s&galaxy_url=%s' % ( name, + owner, + str( latest_downloadable_revision ), + web.url_for( '/', qualified=True ) ) + url = common_util.url_join( tool_shed_url, + 'repository/install_repositories_by_revision%s' % params ) return trans.response.send_redirect( url ) else: message = 'Cannot locate installed tool shed repository with encoded id <b>%s</b>.' % str( repository_id ) @@ -713,7 +720,7 @@ repository = suc.get_installed_tool_shed_repository( trans, repository_id ) if repository is None: return trans.show_error_message( 'Invalid repository specified.' ) - tool_shed_url = suc.get_url_from_tool_shed( trans.app, repository.tool_shed ) + tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( trans.app, str( repository.tool_shed ) ) name = str( repository.name ) owner = str( repository.owner ) installed_changeset_revision = str( repository.installed_changeset_revision ) @@ -724,9 +731,12 @@ tool_shed_repository_ids=tool_shed_repository_ids ) ) if repository.can_install and operation == 'install': # Send a request to the tool shed to install the repository. - url = suc.url_join( tool_shed_url, - 'repository/install_repositories_by_revision?name=%s&owner=%s&changeset_revisions=%s&galaxy_url=%s' % \ - ( name, owner, installed_changeset_revision, ( web.url_for( '/', qualified=True ) ) ) ) + params = '?name=%s&owner=%s&changeset_revisions=%s&galaxy_url=%s' % ( name, + owner, + installed_changeset_revision, + web.url_for( '/', qualified=True ) ) + url = common_util.url_join( tool_shed_url, + 'repository/install_repositories_by_revision%s' % params ) return trans.response.send_redirect( url ) description = kwd.get( 'description', repository.description ) shed_tool_conf, tool_path, relative_install_dir = suc.get_tool_panel_config_tool_path_install_dir( trans.app, repository ) @@ -938,7 +948,8 @@ message = kwd.get( 'message', '' ) status = kwd.get( 'status', 'done' ) shed_tool_conf = kwd.get( 'shed_tool_conf', None ) - tool_shed_url = kwd.get( 'tool_shed_url', None ) + tool_shed_url = kwd.get( 'tool_shed_url', '' ) + tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( trans.app, tool_shed_url ) # Handle repository dependencies, which do not include those that are required only for compiling a dependent # repository's tool dependencies. has_repository_dependencies = util.string_as_bool( kwd.get( 'has_repository_dependencies', False ) ) @@ -978,9 +989,9 @@ repository = suc.get_tool_shed_repository_by_id( trans, updating_repository_id ) # For backward compatibility to the 12/20/12 Galaxy release. try: - url = suc.url_join( tool_shed_url, - 'repository/get_repository_id?name=%s&owner=%s' % \ - ( str( repository.name ), str( repository.owner ) ) ) + params = '?name=%s&owner=%s' % ( str( repository.name ), str( repository.owner ) ) + url = common_util.url_join( tool_shed_url, + 'repository/get_repository_id%s' % params ) repository_ids = common_util.tool_shed_get( trans.app, tool_shed_url, url ) except Exception, e: # The Tool Shed cannot handle the get_repository_id request, so the code must be older than the @@ -999,9 +1010,9 @@ else: changeset_revisions = kwd.get( 'changeset_revisions', None ) # Get the information necessary to install each repository. - url = suc.url_join( tool_shed_url, - 'repository/get_repository_information?repository_ids=%s&changeset_revisions=%s' % \ - ( str( repository_ids ), str( changeset_revisions ) ) ) + params = '?repository_ids=%s&changeset_revisions=%s' % ( str( repository_ids ), str( changeset_revisions ) ) + url = common_util.url_join( tool_shed_url, + 'repository/get_repository_information%s' % params ) raw_text = common_util.tool_shed_get( trans.app, tool_shed_url, url ) repo_information_dict = json.from_json_string( raw_text ) for encoded_repo_info_dict in repo_information_dict.get( 'repo_info_dicts', [] ): @@ -1245,12 +1256,12 @@ install_tool_dependencies = CheckboxField.is_checked( kwd.get( 'install_tool_dependencies', '' ) ) shed_tool_conf, tool_path, relative_install_dir = \ suc.get_tool_panel_config_tool_path_install_dir( trans.app, tool_shed_repository ) - repository_clone_url = suc.generate_clone_url_for_installed_repository( trans.app, tool_shed_repository ) + repository_clone_url = common_util.generate_clone_url_for_installed_repository( trans.app, tool_shed_repository ) clone_dir = os.path.join( tool_path, suc.generate_tool_shed_repository_install_dir( repository_clone_url, tool_shed_repository.installed_changeset_revision ) ) relative_install_dir = os.path.join( clone_dir, tool_shed_repository.name ) - tool_shed_url = suc.get_url_from_tool_shed( trans.app, tool_shed_repository.tool_shed ) + tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( trans.app, tool_shed_repository.tool_shed ) tool_section = None tool_panel_section_id = kwd.get( 'tool_panel_section_id', '' ) new_tool_panel_section_label = kwd.get( 'new_tool_panel_section_label', '' ) @@ -1328,8 +1339,8 @@ install_repository_dependencies=install_repository_dependencies, no_changes_checked=no_changes_checked, tool_panel_section_id=tool_panel_section_id ) - # Default the selected tool panel location for loading tools included in each newly installed required tool shed repository to the location - # selected for the repository selected for reinstallation. + # Default the selected tool panel location for loading tools included in each newly installed required + # tool shed repository to the location selected for the repository selected for re-installation. for index, tps_key in enumerate( tool_panel_section_keys ): if tps_key is None: tool_panel_section_keys[ index ] = tool_panel_section_key @@ -1463,9 +1474,9 @@ latest_changeset_revision = kwd.get( 'latest_changeset_revision', None ) latest_ctx_rev = kwd.get( 'latest_ctx_rev', None ) tool_shed_repository = suc.get_installed_tool_shed_repository( trans, repository_id ) - repository_clone_url = suc.generate_clone_url_for_installed_repository( trans.app, tool_shed_repository ) + repository_clone_url = common_util.generate_clone_url_for_installed_repository( trans.app, tool_shed_repository ) metadata = tool_shed_repository.metadata - tool_shed_url = suc.get_url_from_tool_shed( trans.app, tool_shed_repository.tool_shed ) + tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( trans.app, str( tool_shed_repository.tool_shed ) ) tool_path, relative_install_dir = tool_shed_repository.get_tool_relative_path( trans.app ) if latest_changeset_revision and latest_ctx_rev: # There are updates available in the tool shed for the repository, so use the receieved dependency information which was retrieved from @@ -1510,9 +1521,11 @@ if 'workflows' in metadata: includes_workflows = True # Since we're reinstalling, we need to send a request to the tool shed to get the README files. - url = suc.url_join( tool_shed_url, - 'repository/get_readme_files?name=%s&owner=%s&changeset_revision=%s' % \ - ( tool_shed_repository.name, tool_shed_repository.owner, tool_shed_repository.installed_changeset_revision ) ) + params = '?name=%s&owner=%s&changeset_revision=%s' % ( str( tool_shed_repository.name ), + str( tool_shed_repository.owner ), + str( tool_shed_repository.installed_changeset_revision ) ) + url = common_util.url_join( tool_shed_url, + 'repository/get_readme_files%s' % params ) raw_text = common_util.tool_shed_get( trans.app, tool_shed_url, url ) readme_files_dict = json.from_json_string( raw_text ) tool_dependencies = metadata.get( 'tool_dependencies', None ) @@ -1647,8 +1660,7 @@ def reset_repository_metadata( self, trans, id ): """Reset all metadata on a single installed tool shed repository.""" repository = suc.get_installed_tool_shed_repository( trans, id ) - tool_shed_url = suc.get_url_from_tool_shed( trans.app, repository.tool_shed ) - repository_clone_url = suc.generate_clone_url_for_installed_repository( trans.app, repository ) + repository_clone_url = common_util.generate_clone_url_for_installed_repository( trans.app, repository ) tool_path, relative_install_dir = repository.get_tool_relative_path( trans.app ) if relative_install_dir: original_metadata_dict = repository.metadata @@ -1713,10 +1725,12 @@ repository and update the metadata for the repository's revision in the Galaxy database. """ repository = suc.get_installed_tool_shed_repository( trans, kwd[ 'id' ] ) - tool_shed_url = suc.get_url_from_tool_shed( trans.app, repository.tool_shed ) - url = suc.url_join( tool_shed_url, - 'repository/get_tool_versions?name=%s&owner=%s&changeset_revision=%s' % \ - ( repository.name, repository.owner, repository.changeset_revision ) ) + tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( trans.app, str( repository.tool_shed ) ) + params = '?name=%s&owner=%s&changeset_revision=%s' % ( str( repository.name ), + str( repository.owner ), + str( repository.changeset_revision ) ) + url = common_util.url_join( tool_shed_url, + 'repository/get_tool_versions%s' % params ) text = common_util.tool_shed_get( trans.app, tool_shed_url, url ) if text: tool_version_dicts = json.from_json_string( text ) @@ -1814,7 +1828,9 @@ """Update a cloned repository to the latest revision possible.""" message = kwd.get( 'message', '' ) status = kwd.get( 'status', 'done' ) - tool_shed_url = kwd.get( 'tool_shed_url', None ) + tool_shed_url = kwd.get( 'tool_shed_url', '' ) + # Handle protocol changes over time. + tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( trans.app, tool_shed_url ) name = kwd.get( 'name', None ) owner = kwd.get( 'owner', None ) changeset_revision = kwd.get( 'changeset_revision', None ) @@ -1839,7 +1855,6 @@ repository_clone_url = os.path.join( tool_shed_url, 'repos', owner, name ) repository_util.pull_repository( repo, repository_clone_url, latest_ctx_rev ) suc.update_repository( repo, latest_ctx_rev ) - tool_shed = suc.clean_tool_shed_url( tool_shed_url ) # Remove old Data Manager entries if repository.includes_data_managers: data_manager_util.remove_from_data_manager( trans.app, repository ) diff -r 47b3011ebe653f7218c70e0b458252783ef6f255 -r 4aee689e69dac57676de7f9ceb0a31f227bc0dd2 lib/galaxy/webapps/tool_shed/controllers/repository.py --- a/lib/galaxy/webapps/tool_shed/controllers/repository.py +++ b/lib/galaxy/webapps/tool_shed/controllers/repository.py @@ -793,7 +793,7 @@ def browse_valid_categories( self, trans, **kwd ): """Filter repositories per category by those that are valid for installing into Galaxy.""" # The request came from Galaxy, so restrict category links to display only valid repository changeset revisions. - galaxy_url = suc.handle_galaxy_url( trans, **kwd ) + galaxy_url = common_util.handle_galaxy_url( trans, **kwd ) if galaxy_url: kwd[ 'galaxy_url' ] = galaxy_url if 'f-free-text-search' in kwd: @@ -831,7 +831,7 @@ @web.expose def browse_valid_repositories( self, trans, **kwd ): """Filter repositories to those that are installable into Galaxy.""" - galaxy_url = suc.handle_galaxy_url( trans, **kwd ) + galaxy_url = common_util.handle_galaxy_url( trans, **kwd ) if galaxy_url: kwd[ 'galaxy_url' ] = galaxy_url repository_id = kwd.get( 'id', None ) @@ -885,7 +885,7 @@ message = kwd.get( 'message', '' ) status = kwd.get( 'status', 'done' ) # If the request originated with the UpdateManager, it will not include a galaxy_url. - galaxy_url = suc.handle_galaxy_url( trans, **kwd ) + galaxy_url = common_util.handle_galaxy_url( trans, **kwd ) name = kwd.get( 'name', None ) owner = kwd.get( 'owner', None ) changeset_revision = kwd.get( 'changeset_revision', None ) @@ -902,9 +902,12 @@ elif galaxy_url: # Start building up the url to redirect back to the calling Galaxy instance. params = '?tool_shed_url=%s&name=%s&owner=%s&changeset_revision=%s&latest_changeset_revision=' % \ - ( web.url_for( '/', qualified=True ), repository.name, repository.user.username, changeset_revision ) - url = suc.url_join( galaxy_url, - 'admin_toolshed/update_to_changeset_revision%s' % params ) + ( web.url_for( '/', qualified=True ), + str( repository.name ), + str( repository.user.username ), + changeset_revision ) + url = common_util.url_join( galaxy_url, + 'admin_toolshed/update_to_changeset_revision%s' % params ) else: message = 'Unable to check for updates due to an invalid Galaxy URL: <b>%s</b>. ' % galaxy_url message += 'You may need to enable third-party cookies in your browser. ' @@ -1145,7 +1148,12 @@ repository.times_downloaded += 1 trans.sa_session.add( repository ) trans.sa_session.flush() - download_url = suc.url_join( '/', 'repos', repository.user.username, repository.name, 'archive', file_type_str ) + download_url = common_util.url_join( '/', + 'repos', + str( repository.user.username ), + str( repository.name ), + 'archive', + file_type_str ) return trans.response.send_redirect( download_url ) @web.expose @@ -1233,7 +1241,7 @@ def find_tools( self, trans, **kwd ): message = kwd.get( 'message', '' ) status = kwd.get( 'status', 'done' ) - galaxy_url = suc.handle_galaxy_url( trans, **kwd ) + galaxy_url = common_util.handle_galaxy_url( trans, **kwd ) if 'operation' in kwd: item_id = kwd.get( 'id', '' ) if item_id: @@ -1321,7 +1329,7 @@ def find_workflows( self, trans, **kwd ): message = kwd.get( 'message', '' ) status = kwd.get( 'status', 'done' ) - galaxy_url = suc.handle_galaxy_url( trans, **kwd ) + galaxy_url = common_util.handle_galaxy_url( trans, **kwd ) if 'operation' in kwd: item_id = kwd.get( 'id', '' ) if item_id: @@ -1615,7 +1623,11 @@ # section 5.1, e.g. Sat, 07 Sep 2002 00:00:01 UT time_tested = repository_metadata.time_last_tested.strftime( '%a, %d %b %Y %H:%M:%S UT' ) # Generate a citable URL for this repository with owner and changeset revision. - repository_citable_url = suc.url_join( tool_shed_url, 'view', user.username, repository.name, repository_metadata.changeset_revision ) + repository_citable_url = common_util.url_join( tool_shed_url, + 'view', + str( user.username ), + str( repository.name ), + str( repository_metadata.changeset_revision ) ) passed_tests = len( tool_test_results.get( 'passed_tests', [] ) ) failed_tests = len( tool_test_results.get( 'failed_tests', [] ) ) missing_test_components = len( tool_test_results.get( 'missing_test_components', [] ) ) @@ -1866,7 +1878,7 @@ """ repository = suc.get_repository_by_name_and_owner( trans.app, name, owner ) repository_id = trans.security.encode_id( repository.id ) - repository_clone_url = suc.generate_clone_url_for_repository_in_tool_shed( trans, repository ) + repository_clone_url = common_util.generate_clone_url_for_repository_in_tool_shed( trans, repository ) repo_dir = repository.repo_path( trans.app ) repo = hg.repository( suc.get_configured_ui(), repo_dir ) repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans, repository_id, changeset_revision ) @@ -2051,15 +2063,15 @@ if not repository_ids: repository = suc.get_repository_by_name_and_owner( trans.app, name, owner ) repository_ids = trans.security.encode_id( repository.id ) - galaxy_url = suc.handle_galaxy_url( trans, **kwd ) + galaxy_url = common_util.handle_galaxy_url( trans, **kwd ) if galaxy_url: # Redirect back to local Galaxy to perform install. params = '?tool_shed_url=%s&repository_ids=%s&changeset_revisions=%s' % \ ( web.url_for( '/', qualified=True ), ','.join( util.listify( repository_ids ) ), ','.join( util.listify( changeset_revisions ) ) ) - url = suc.url_join( galaxy_url, - 'admin_toolshed/prepare_for_install%s' % params ) + url = common_util.url_join( galaxy_url, + 'admin_toolshed/prepare_for_install%s' % params ) return trans.response.send_redirect( url ) else: message = 'Repository installation is not possible due to an invalid Galaxy URL: <b>%s</b>. ' % galaxy_url @@ -2560,8 +2572,8 @@ if invalid: message += 'The repository dependency definitions for this repository are invalid and will be ignored. ' message += 'The complete dependency hierarchy could not be determined. The cause of repository dependency ' - message += 'definition errors like this can usually be seen when viewing the repository directly from the' - message += 'Tool Shed. The exact cause cannot be determined when visiting the Tool Shed from Galaxy to' + message += 'definition errors like this can usually be seen when viewing the repository directly from the ' + message += 'Tool Shed. The exact cause cannot be determined when visiting the Tool Shed from Galaxy to ' message += 'install the repository.' status = 'error' else: diff -r 47b3011ebe653f7218c70e0b458252783ef6f255 -r 4aee689e69dac57676de7f9ceb0a31f227bc0dd2 lib/tool_shed/galaxy_install/install_manager.py --- a/lib/tool_shed/galaxy_install/install_manager.py +++ b/lib/tool_shed/galaxy_install/install_manager.py @@ -58,8 +58,8 @@ else: root = tree.getroot() defined_tool_shed_url = root.get( 'name' ) - self.tool_shed_url = suc.get_url_from_tool_shed( self.app, defined_tool_shed_url ) - self.tool_shed = suc.clean_tool_shed_url( defined_tool_shed_url ) + self.tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( self.app, defined_tool_shed_url ) + self.tool_shed = common_util.remove_protocol_and_port_from_tool_shed_url( self.tool_shed_url ) self.repository_owner = common_util.REPOSITORY_OWNER index, self.shed_config_dict = suc.get_shed_tool_conf_dict( app, self.migrated_tools_config ) # Since tool migration scripts can be executed any number of times, we need to make sure the appropriate tools are defined in @@ -101,7 +101,11 @@ name = repository_elem.get( 'name' ) changeset_revision = repository_elem.get( 'changeset_revision' ) tool_shed_accessible, repository_dependencies_dict = \ - common_util.get_repository_dependencies( app, self.tool_shed_url, name, self.repository_owner, changeset_revision ) + common_util.get_repository_dependencies( app, + self.tool_shed_url, + name, + self.repository_owner, + changeset_revision ) # Make sure all repository dependency records exist (as tool_shed_repository table rows) in the Galaxy database. created_tool_shed_repositories = self.create_or_update_tool_shed_repository_records( name, changeset_revision, @@ -155,12 +159,15 @@ def create_or_update_tool_shed_repository_records( self, name, changeset_revision, repository_dependencies_dict ): """ - Make sure the repository defined by name and changeset_revision and all of it's repository dependencies have associated tool_shed_repository - table rows in the Galaxy database. + Make sure the repository defined by name and changeset_revision and all of it's repository dependencies have + associated tool_shed_repository table rows in the Galaxy database. """ created_tool_shed_repositories = [] description = repository_dependencies_dict.get( 'description', None ) - tool_shed_repository = self.create_or_update_tool_shed_repository_record( name, self.repository_owner, changeset_revision, description=description ) + tool_shed_repository = self.create_or_update_tool_shed_repository_record( name, + self.repository_owner, + changeset_revision, + description=description ) if tool_shed_repository: created_tool_shed_repositories.append( tool_shed_repository ) for rd_key, rd_tups in repository_dependencies_dict.items(): @@ -170,7 +177,10 @@ parsed_rd_tup = common_util.parse_repository_dependency_tuple( rd_tup ) rd_tool_shed, rd_name, rd_owner, rd_changeset_revision = parsed_rd_tup[ 0:4 ] # TODO: Make sure the repository description is applied to the new repository record during installation. - tool_shed_repository = self.create_or_update_tool_shed_repository_record( rd_name, rd_owner, rd_changeset_revision, description=None ) + tool_shed_repository = self.create_or_update_tool_shed_repository_record( rd_name, + rd_owner, + rd_changeset_revision, + description=None ) if tool_shed_repository: created_tool_shed_repositories.append( tool_shed_repository ) return created_tool_shed_repositories @@ -451,7 +461,7 @@ converter_path, display_path = datatype_util.alter_config_and_load_prorietary_datatypes( self.app, datatypes_config, repo_install_dir, override=False ) #repo_install_dir was relative_install_dir if converter_path or display_path: # Create a dictionary of tool shed repository related information. - repository_dict = datatype_util.create_repository_dict_for_proprietary_datatypes( tool_shed=self.tool_shed, + repository_dict = datatype_util.create_repository_dict_for_proprietary_datatypes( tool_shed=self.tool_shed_url, name=tool_shed_repository.name, owner=self.repository_owner, installed_changeset_revision=tool_shed_repository.installed_changeset_revision, diff -r 47b3011ebe653f7218c70e0b458252783ef6f255 -r 4aee689e69dac57676de7f9ceb0a31f227bc0dd2 lib/tool_shed/galaxy_install/installed_repository_manager.py --- a/lib/tool_shed/galaxy_install/installed_repository_manager.py +++ b/lib/tool_shed/galaxy_install/installed_repository_manager.py @@ -3,7 +3,7 @@ """ import logging import os -import tool_shed.util.shed_util_common as suc +from tool_shed.util import common_util from tool_shed.util import datatype_util from tool_shed.util import repository_dependency_util from tool_shed.util import tool_dependency_util @@ -175,7 +175,7 @@ root = tree.getroot() tool_path = root.get( 'tool_path', None ) if tool_path: - ts = suc.clean_tool_shed_url( tool_shed_repository.tool_shed ) + ts = common_util.remove_port_from_tool_shed_url( str( tool_shed_repository.tool_shed ) ) relative_path = os.path.join( tool_path, ts, 'repos', diff -r 47b3011ebe653f7218c70e0b458252783ef6f255 -r 4aee689e69dac57676de7f9ceb0a31f227bc0dd2 lib/tool_shed/galaxy_install/repository_util.py --- a/lib/tool_shed/galaxy_install/repository_util.py +++ b/lib/tool_shed/galaxy_install/repository_util.py @@ -64,13 +64,13 @@ if repository_metadata: metadata = repository_metadata.metadata if metadata: - toolshed_url = str( web.url_for( '/', qualified=True ) ).rstrip( '/' ) + tool_shed_url = str( web.url_for( '/', qualified=True ) ).rstrip( '/' ) # Get a dictionary of all repositories upon which the contents of the received repository depends. repository_dependencies = \ repository_dependency_util.get_repository_dependencies_for_changeset_revision( trans=trans, repository=repository, repository_metadata=repository_metadata, - toolshed_base_url=toolshed_url, + toolshed_base_url=tool_shed_url, key_rd_dicts_to_be_processed=None, all_repository_dependencies=None, handled_key_rd_dicts=None, @@ -187,7 +187,7 @@ repository = suc.get_repository_in_tool_shed( trans, repository_id ) repo_dir = repository.repo_path( trans.app ) repo = hg.repository( suc.get_configured_ui(), repo_dir ) - repository_clone_url = suc.generate_clone_url_for_repository_in_tool_shed( trans, repository ) + repository_clone_url = common_util.generate_clone_url_for_repository_in_tool_shed( trans, repository ) repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans, repository_id, changeset_revision ) @@ -241,20 +241,23 @@ def get_repo_info_dict_for_repair( trans, repository ): tool_panel_section_key = None - repository_clone_url = suc.generate_clone_url_for_installed_repository( trans.app, repository ) - repository_dependencies = repository_dependency_util.get_repository_dependencies_for_installed_tool_shed_repository( trans, repository ) + repository_clone_url = common_util.generate_clone_url_for_installed_repository( trans.app, repository ) + repository_dependencies = \ + repository_dependency_util.get_repository_dependencies_for_installed_tool_shed_repository( trans, repository ) metadata = repository.metadata if metadata: tool_dependencies = metadata.get( 'tool_dependencies', None ) tool_panel_section_dict = metadata.get( 'tool_panel_section', None ) if tool_panel_section_dict: # The repository must be in the uninstalled state. The structure of tool_panel_section_dict is: - # {<tool guid> : [{ 'id':<section id>, 'name':<section name>, 'version':<section version>, 'tool_config':<tool config file name> }]} + # {<tool guid> : + # [{ 'id':<section id>, 'name':<section name>, 'version':<section version>, 'tool_config':<tool config file name> }]} # Here is an example: # {"localhost:9009/repos/test/filter/Filter1/1.1.0": - # [{"id": "filter_and_sort", "name": "Filter and Sort", "tool_config": "filtering.xml", "version": ""}]} - # Currently all tools contained within an installed tool shed repository must be loaded into the same section in the tool panel, so we can - # get the section id of the first guid in the tool_panel_section_dict. In the future, we'll have to handle different sections per guid. + # [{"id": "filter_and_sort", "name": "Filter and Sort", "tool_config": "filtering.xml", "version": ""}]} + # Currently all tools contained within an installed tool shed repository must be loaded into the same + # section in the tool panel, so we can get the section id of the first guid in the tool_panel_section_dict. + # In the future, we'll have to handle different sections per guid. guid = tool_panel_section_dict.keys()[ 0 ] section_dicts = tool_panel_section_dict[ guid ] section_dict = section_dicts[ 0 ] @@ -315,9 +318,11 @@ def get_update_to_changeset_revision_and_ctx_rev( trans, repository ): """Return the changeset revision hash to which the repository can be updated.""" changeset_revision_dict = {} - tool_shed_url = suc.get_url_from_tool_shed( trans.app, repository.tool_shed ) - url = suc.url_join( tool_shed_url, 'repository/get_changeset_revision_and_ctx_rev?name=%s&owner=%s&changeset_revision=%s' % \ - ( repository.name, repository.owner, repository.installed_changeset_revision ) ) + tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( trans.app, str( repository.tool_shed ) ) + params = '?name=%s&owner=%s&changeset_revision=%s' % ( str( repository.name ), + str( repository.owner ), + str( repository.installed_changeset_revision ) ) + url = common_util.url_join( tool_shed_url, 'repository/get_changeset_revision_and_ctx_rev%s' % params ) try: encoded_update_dict = common_util.tool_shed_get( trans.app, tool_shed_url, url ) if encoded_update_dict: @@ -588,10 +593,12 @@ suc.update_tool_shed_repository_status( trans.app, tool_shed_repository, trans.install_model.ToolShedRepository.installation_status.SETTING_TOOL_VERSIONS ) - tool_shed_url = suc.get_url_from_tool_shed( trans.app, tool_shed_repository.tool_shed ) - url = suc.url_join( tool_shed_url, - '/repository/get_tool_versions?name=%s&owner=%s&changeset_revision=%s' % - ( tool_shed_repository.name, tool_shed_repository.owner, tool_shed_repository.changeset_revision ) ) + tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( trans.app, str( tool_shed_repository.tool_shed ) ) + params = '?name=%s&owner=%s&changeset_revision=%s' % ( str( tool_shed_repository.name ), + str( tool_shed_repository.owner ), + str( tool_shed_repository.changeset_revision ) ) + url = common_util.url_join( tool_shed_url, + '/repository/get_tool_versions%s' % params ) text = common_util.tool_shed_get( trans.app, tool_shed_url, url ) if text: tool_version_dicts = json.from_json_string( text ) diff -r 47b3011ebe653f7218c70e0b458252783ef6f255 -r 4aee689e69dac57676de7f9ceb0a31f227bc0dd2 lib/tool_shed/galaxy_install/tool_dependencies/install_util.py --- a/lib/tool_shed/galaxy_install/tool_dependencies/install_util.py +++ b/lib/tool_shed/galaxy_install/tool_dependencies/install_util.py @@ -22,8 +22,10 @@ def create_temporary_tool_dependencies_config( app, tool_shed_url, name, owner, changeset_revision ): """Make a call to the tool shed to get the required repository's tool_dependencies.xml file.""" - url = url_join( tool_shed_url, - 'repository/get_tool_dependencies_config_contents?name=%s&owner=%s&changeset_revision=%s' % ( name, owner, changeset_revision ) ) + tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( app, tool_shed_url ) + params = '?name=%s&owner=%s&changeset_revision=%s' % ( name, owner, changeset_revision ) + url = common_util.url_join( tool_shed_url, + 'repository/get_tool_dependencies_config_contents%s' % params ) text = common_util.tool_shed_get( app, tool_shed_url, url ) if text: # Write the contents to a temporary file on disk so it can be reloaded and parsed. @@ -132,7 +134,8 @@ def get_tool_shed_repository_by_tool_shed_name_owner_changeset_revision( app, tool_shed_url, name, owner, changeset_revision ): sa_session = app.install_model.context - tool_shed = td_common_util.clean_tool_shed_url( tool_shed_url ) + # The protocol is not stored, but the port is if it exists. + tool_shed = common_util.remove_protocol_from_tool_shed_url( tool_shed_url ) tool_shed_repository = sa_session.query( app.install_model.ToolShedRepository ) \ .filter( and_( app.install_model.ToolShedRepository.table.c.tool_shed == tool_shed, app.install_model.ToolShedRepository.table.c.name == name, @@ -165,8 +168,10 @@ Get all appropriate newer changeset revisions for the repository defined by the received tool_shed_url / name / owner combination. """ - url = suc.url_join( tool_shed_url, - 'repository/updated_changeset_revisions?name=%s&owner=%s&changeset_revision=%s' % ( name, owner, changeset_revision ) ) + tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( app, tool_shed_url ) + params = '?name=%s&owner=%s&changeset_revision=%s' % ( name, owner, changeset_revision ) + url = common_util.url_join( tool_shed_url, + 'repository/updated_changeset_revisions%s' % params ) text = common_util.tool_shed_get( app, tool_shed_url, url ) return text @@ -180,6 +185,8 @@ """ handled_tool_dependencies = [] tool_shed = elem.attrib[ 'toolshed' ] + # The protocol is not stored, but the port is if it exists. + tool_shed = common_util.remove_protocol_from_tool_shed_url( tool_shed ) required_repository_name = elem.attrib[ 'name' ] required_repository_owner = elem.attrib[ 'owner' ] default_required_repository_changeset_revision = elem.attrib[ 'changeset_revision' ] @@ -950,9 +957,3 @@ except: file_name = fpath return file_name - -def url_join( *args ): - parts = [] - for arg in args: - parts.append( arg.strip( '/' ) ) - return '/'.join( parts ) diff -r 47b3011ebe653f7218c70e0b458252783ef6f255 -r 4aee689e69dac57676de7f9ceb0a31f227bc0dd2 lib/tool_shed/galaxy_install/tool_dependencies/td_common_util.py --- a/lib/tool_shed/galaxy_install/tool_dependencies/td_common_util.py +++ b/lib/tool_shed/galaxy_install/tool_dependencies/td_common_util.py @@ -9,6 +9,7 @@ import urllib2 import zipfile from string import Template +from tool_shed.util import common_util import tool_shed.util.shed_util_common as suc from galaxy.datatypes import checkers @@ -108,19 +109,6 @@ def open_zip( self, filepath, mode ): return zipfile.ZipFile( filepath, mode ) -def clean_tool_shed_url( base_url ): - if base_url: - if base_url.find( '://' ) > -1: - try: - protocol, base = base_url.split( '://' ) - except ValueError, e: - # The received base_url must be an invalid url. - log.debug( "Returning unchanged invalid base_url from td_common_util.clean_tool_shed_url: %s" % str( base_url ) ) - return base_url - return base.rstrip( '/' ) - return base_url.rstrip( '/' ) - return base_url - def create_env_var_dict( elem, tool_dependency_install_dir=None, tool_shed_repository_install_dir=None ): env_var_name = elem.get( 'name', 'PATH' ) env_var_action = elem.get( 'action', 'prepend_to' ) @@ -192,7 +180,8 @@ repository_owner = elem.get( 'owner', None ) changeset_revision = elem.get( 'changeset_revision', None ) if toolshed and repository_name and repository_owner and changeset_revision: - toolshed = clean_tool_shed_url( toolshed ) + # The protocol is not stored, but the port is if it exists. + toolshed = common_util.remove_protocol_from_tool_shed_url( toolshed ) repository = suc.get_repository_for_dependency_relationship( app, toolshed, repository_name, repository_owner, changeset_revision ) if repository: for sub_elem in elem: @@ -203,7 +192,9 @@ # Get the tool_dependency so we can get it's installation directory. tool_dependency = None for tool_dependency in repository.tool_dependencies: - if tool_dependency.type == tool_dependency_type and tool_dependency.name == tool_dependency_name and tool_dependency.version == tool_dependency_version: + if tool_dependency.type == tool_dependency_type and \ + tool_dependency.name == tool_dependency_name and \ + tool_dependency.version == tool_dependency_version: break if tool_dependency: tool_dependency_key = '%s/%s' % ( tool_dependency_name, tool_dependency_version ) diff -r 47b3011ebe653f7218c70e0b458252783ef6f255 -r 4aee689e69dac57676de7f9ceb0a31f227bc0dd2 lib/tool_shed/scripts/api/common.py --- a/lib/tool_shed/scripts/api/common.py +++ b/lib/tool_shed/scripts/api/common.py @@ -9,6 +9,7 @@ sys.path = new_path import tool_shed.util.shed_util_common as suc +from tool_shed.util import common_util from galaxy import eggs import pkg_resources @@ -90,7 +91,7 @@ parts.insert( 0, 'api' ) elif 'api' not in parts: parts.insert( 0, 'api' ) - url = suc.url_join( base, *parts ) + url = common_util.url_join( base, *parts ) if params is not None: try: query_string = urllib.urlencode( params ) diff -r 47b3011ebe653f7218c70e0b458252783ef6f255 -r 4aee689e69dac57676de7f9ceb0a31f227bc0dd2 lib/tool_shed/scripts/deprecate_repositories_without_metadata.py --- a/lib/tool_shed/scripts/deprecate_repositories_without_metadata.py +++ b/lib/tool_shed/scripts/deprecate_repositories_without_metadata.py @@ -20,7 +20,7 @@ from optparse import OptionParser from galaxy.tools import parameters -from tool_shed.util.shed_util_common import url_join +from tool_shed.util.common_util import url_join import galaxy.webapps.tool_shed.config as tool_shed_config import galaxy.webapps.tool_shed.model.mapping import sqlalchemy as sa diff -r 47b3011ebe653f7218c70e0b458252783ef6f255 -r 4aee689e69dac57676de7f9ceb0a31f227bc0dd2 lib/tool_shed/util/common_install_util.py --- a/lib/tool_shed/util/common_install_util.py +++ b/lib/tool_shed/util/common_install_util.py @@ -24,7 +24,7 @@ def activate_repository( trans, repository ): """Activate an installed tool shed repository that has been marked as deactivated.""" - repository_clone_url = suc.generate_clone_url_for_installed_repository( trans.app, repository ) + repository_clone_url = common_util.generate_clone_url_for_installed_repository( trans.app, repository ) shed_tool_conf, tool_path, relative_install_dir = suc.get_tool_panel_config_tool_path_install_dir( trans.app, repository ) repository.deleted = False repository.status = trans.install_model.ToolShedRepository.installation_status.INSTALLED @@ -90,7 +90,7 @@ # Inspect the tool_dependencies dictionary to separate the installed and missing tool dependencies. # We don't add to installed_td and missing_td here because at this point they are empty. installed_td, missing_td = \ - get_installed_and_missing_tool_dependencies_for_repository( trans, tool_shed_url, tool_dependencies ) + get_installed_and_missing_tool_dependencies_for_repository( trans, tool_dependencies ) # In cases where a repository dependency is required only for compiling a dependent repository's # tool dependency, the value of repository_dependencies will be an empty dictionary here. if repository_dependencies: @@ -130,9 +130,7 @@ if required_tool_dependencies: # Discover and categorize all tool dependencies defined for this repository's repository dependencies. required_installed_td, required_missing_td = \ - get_installed_and_missing_tool_dependencies_for_repository( trans, - tool_shed_url, - required_tool_dependencies ) + get_installed_and_missing_tool_dependencies_for_repository( trans, required_tool_dependencies ) if required_installed_td: if not includes_tool_dependencies: includes_tool_dependencies = True @@ -189,7 +187,7 @@ installed_rd_tups = [] missing_rd_tups = [] for tsr in repository.repository_dependencies: - prior_installation_required = suc.set_prior_installation_required( repository, tsr ) + prior_installation_required = suc.set_prior_installation_required( trans.app, repository, tsr ) only_if_compiling_contained_td = suc.set_only_if_compiling_contained_td( repository, tsr ) rd_tup = [ tsr.tool_shed, tsr.name, @@ -261,7 +259,7 @@ # that includes the correct repository owner which we get from the current rd_tup. The current # tuple looks like: ( description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, # repository_dependencies, installed_td ) - tmp_clone_url = suc.generate_clone_url_from_repo_info_tup( rd_tup ) + tmp_clone_url = common_util.generate_clone_url_from_repo_info_tup( trans, rd_tup ) tmp_repo_info_tuple = ( None, tmp_clone_url, changeset_revision, None, owner, None, None ) repository, installed_changeset_revision = suc.repository_was_previously_installed( trans, tool_shed, @@ -312,7 +310,7 @@ missing_repository_dependencies[ 'description' ] = description return installed_repository_dependencies, missing_repository_dependencies -def get_installed_and_missing_tool_dependencies_for_repository( trans, tool_shed_url, tool_dependencies_dict ): +def get_installed_and_missing_tool_dependencies_for_repository( trans, tool_dependencies_dict ): """ Return the lists of installed tool dependencies and missing tool dependencies for a set of repositories being installed into Galaxy. @@ -439,7 +437,10 @@ encoded_required_repository_tups.append( encoding_util.encoding_sep.join( required_repository_tup ) ) encoded_required_repository_str = encoding_util.encoding_sep2.join( encoded_required_repository_tups ) encoded_required_repository_str = encoding_util.tool_shed_encode( encoded_required_repository_str ) - url = suc.url_join( tool_shed_url, '/repository/get_required_repo_info_dict' ) + if trans.webapp.name == 'galaxy': + # Handle secure / insecure Tool Shed URL protocol changes and port changes. + tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( trans.app, tool_shed_url ) + url = common_util.url_join( tool_shed_url, '/repository/get_required_repo_info_dict' ) request = urllib2.Request( url, data=urllib.urlencode( dict( encoded_str=encoded_required_repository_str ) ) ) response = urllib2.urlopen( request ).read() if response: @@ -569,7 +570,10 @@ tool_shed, name, owner, changeset_revision, prior_installation_required, only_if_compiling_contained_td = rd_tup # TODO: we may discover that we need to check more than just installed_changeset_revision and changeset_revision here, in which # case we'll need to contact the tool shed to get the list of all possible changeset_revisions. - if repository_dependency.tool_shed == tool_shed and \ + cleaned_tool_shed = common_util.remove_protocol_and_port_from_tool_shed_url( tool_shed ) + cleaned_repository_dependency_tool_shed = \ + common_util.remove_protocol_and_port_from_tool_shed_url( str( repository_dependency.tool_shed ) ) + if cleaned_repository_dependency_tool_shed == cleaned_tool_shed and \ repository_dependency.name == name and \ repository_dependency.owner == owner and \ ( repository_dependency.installed_changeset_revision == changeset_revision or \ diff -r 47b3011ebe653f7218c70e0b458252783ef6f255 -r 4aee689e69dac57676de7f9ceb0a31f227bc0dd2 lib/tool_shed/util/common_util.py --- a/lib/tool_shed/util/common_util.py +++ b/lib/tool_shed/util/common_util.py @@ -3,6 +3,7 @@ import urllib2 from galaxy.util import json from galaxy.util.odict import odict +from galaxy.web import url_for from tool_shed.util import encoding_util from tool_shed.util import xml_util @@ -28,7 +29,7 @@ return False, odict() root = tree.getroot() tool_shed = root.get( 'name' ) - tool_shed_url = get_tool_shed_url_from_tools_xml_file_path( app, tool_shed ) + tool_shed_url = get_tool_shed_url_from_tool_shed_registry( app, tool_shed ) # The default behavior is that the tool shed is down. tool_shed_accessible = False missing_tool_configs_dict = odict() @@ -52,7 +53,7 @@ tool_shed, name, owner, changeset_revision, prior_installation_required, only_if_compiling_contained_td = \ parse_repository_dependency_tuple( rd_tup ) tool_shed_accessible, tool_dependencies = get_tool_dependencies( app, - tool_shed, + tool_shed_url, name, owner, changeset_revision ) @@ -96,6 +97,30 @@ missing_tool_configs_dict[ name ] = migrated_tool_configs_dict[ migrated_tool_config ] return missing_tool_configs_dict +def generate_clone_url_for_installed_repository( app, repository ): + """Generate the URL for cloning a repository that has been installed into a Galaxy instance.""" + tool_shed_url = get_tool_shed_url_from_tool_shed_registry( app, str( repository.tool_shed ) ) + return url_join( tool_shed_url, 'repos', repository.owner, repository.name ) + +def generate_clone_url_for_repository_in_tool_shed( trans, repository ): + """Generate the URL for cloning a repository that is in the tool shed.""" + base_url = url_for( '/', qualified=True ).rstrip( '/' ) + if trans and trans.user: + protocol, base = base_url.split( '://' ) + username = '%s@' % trans.user.username + return '%s://%s%s/repos/%s/%s' % ( protocol, username, base, repository.user.username, repository.name ) + else: + return '%s/repos/%s/%s' % ( base_url, repository.user.username, repository.name ) + +def generate_clone_url_from_repo_info_tup( trans, repo_info_tup ): + """Generate the URL for cloning a repository given a tuple of toolshed, name, owner, changeset_revision.""" + # Example tuple: ['http://localhost:9009', 'blast_datatypes', 'test', '461a4216e8ab', False] + toolshed, name, owner, changeset_revision, prior_installation_required, only_if_compiling_contained_td = \ + parse_repository_dependency_tuple( repo_info_tup ) + tool_shed_url = get_tool_shed_url_from_tool_shed_registry( trans.app, toolshed ) + # Don't include the changeset_revision in clone urls. + return url_join( tool_shed_url, 'repos', owner, name ) + def get_non_shed_tool_panel_configs( app ): """Get the non-shed related tool panel configs - there can be more than one, and the default is tool_conf.xml.""" config_filenames = [] @@ -149,15 +174,44 @@ tool_dependencies.append( ( tool_dependency_name, tool_dependency_version, tool_dependency_type ) ) return tool_shed_accessible, tool_dependencies -def get_tool_shed_url_from_tools_xml_file_path( app, tool_shed ): - search_str = '://%s' % tool_shed +def get_tool_shed_url_from_tool_shed_registry( app, tool_shed ): + """ + The value of tool_shed is something like: toolshed.g2.bx.psu.edu. We need the URL to this tool shed, which is + something like: http://toolshed.g2.bx.psu.edu/ + """ + cleaned_tool_shed = remove_protocol_from_tool_shed_url( tool_shed ) for shed_name, shed_url in app.tool_shed_registry.tool_sheds.items(): - if shed_url.find( search_str ) >= 0: + if shed_url.find( cleaned_tool_shed ) >= 0: if shed_url.endswith( '/' ): shed_url = shed_url.rstrip( '/' ) return shed_url + # The tool shed from which the repository was originally installed must no longer be configured in tool_sheds_conf.xml. return None +def handle_galaxy_url( trans, **kwd ): + galaxy_url = kwd.get( 'galaxy_url', None ) + if galaxy_url: + trans.set_cookie( galaxy_url, name='toolshedgalaxyurl' ) + else: + galaxy_url = trans.get_cookie( name='toolshedgalaxyurl' ) + return galaxy_url + +def handle_tool_shed_url_protocol( app, shed_url ): + """Handle secure and insecure HTTP protocol since they may change over time.""" + try: + if app.name == 'galaxy': + url = remove_protocol_from_tool_shed_url( shed_url ) + tool_shed_url = get_tool_shed_url_from_tool_shed_registry( app, url ) + else: + tool_shed_url = str( url_for( '/', qualified=True ) ).rstrip( '/' ) + return tool_shed_url + except Exception, e: + # We receive a lot of calls here where the tool_shed_url is None. The container_util uses + # that value when creating a header row. If the tool_shed_url is not None, we have a problem. + if shed_url is not None: + log.exception( "Handled exception removing protocol from URL %s:\n%s" % ( str( shed_url ), str( e ) ) ) + return shed_url + def parse_repository_dependency_tuple( repository_dependency_tuple, contains_error=False ): # Default both prior_installation_required and only_if_compiling_contained_td to False in cases where metadata should be reset on the # repository containing the repository_dependency definition. @@ -167,10 +221,11 @@ if len( repository_dependency_tuple ) == 5: tool_shed, name, owner, changeset_revision, error = repository_dependency_tuple elif len( repository_dependency_tuple ) == 6: - toolshed, name, owner, changeset_revision, prior_installation_required, error = repository_dependency_tuple + tool_shed, name, owner, changeset_revision, prior_installation_required, error = repository_dependency_tuple elif len( repository_dependency_tuple ) == 7: - toolshed, name, owner, changeset_revision, prior_installation_required, only_if_compiling_contained_td, error = repository_dependency_tuple - return toolshed, name, owner, changeset_revision, prior_installation_required, only_if_compiling_contained_td, error + tool_shed, name, owner, changeset_revision, prior_installation_required, only_if_compiling_contained_td, error = \ + repository_dependency_tuple + return tool_shed, name, owner, changeset_revision, prior_installation_required, only_if_compiling_contained_td, error else: if len( repository_dependency_tuple ) == 4: tool_shed, name, owner, changeset_revision = repository_dependency_tuple @@ -180,6 +235,59 @@ tool_shed, name, owner, changeset_revision, prior_installation_required, only_if_compiling_contained_td = repository_dependency_tuple return tool_shed, name, owner, changeset_revision, prior_installation_required, only_if_compiling_contained_td +def remove_port_from_tool_shed_url( tool_shed_url ): + """Return a partial Tool Shed URL, eliminating the port if it exists.""" + try: + if tool_shed_url.find( ':' ) > 0: + # Eliminate the port, if any, since it will result in an invalid directory name. + new_tool_shed_url = tool_shed_url.split( ':' )[ 0 ] + else: + new_tool_shed_url = tool_shed_url + return new_tool_shed_url.rstrip( '/' ) + except Exception, e: + # We receive a lot of calls here where the tool_shed_url is None. The container_util uses + # that value when creating a header row. If the tool_shed_url is not None, we have a problem. + if tool_shed_url is not None: + log.exception( "Handled exception removing the port from Tool Shed URL %s:\n%s" % ( str( tool_shed_url ), str( e ) ) ) + return tool_shed_url + +def remove_protocol_and_port_from_tool_shed_url( tool_shed_url ): + """Return a partial Tool Shed URL, eliminating the protocol and/or port if either exists.""" + tool_shed = remove_protocol_from_tool_shed_url( tool_shed_url ) + tool_shed = remove_port_from_tool_shed_url( tool_shed ) + return tool_shed + +def remove_protocol_and_user_from_clone_url( repository_clone_url ): + """Return a URL that can be used to clone a repository, eliminating the protocol and user if either exists.""" + if repository_clone_url.find( '@' ) > 0: + # We have an url that includes an authenticated user, something like: + # http://test@bx.psu.edu:9009/repos/some_username/column + items = repository_clone_url.split( '@' ) + tmp_url = items[ 1 ] + elif repository_clone_url.find( '//' ) > 0: + # We have an url that includes only a protocol, something like: + # http://bx.psu.edu:9009/repos/some_username/column + items = repository_clone_url.split( '//' ) + tmp_url = items[ 1 ] + else: + tmp_url = repository_clone_url + return tmp_url.rstrip( '/' ) + +def remove_protocol_from_tool_shed_url( tool_shed_url ): + """Return a partial Tool Shed URL, eliminating the protocol if it exists.""" + try: + if tool_shed_url.find( '://' ) > 0: + new_tool_shed_url = tool_shed_url.split( '://' )[1] + else: + new_tool_shed_url = tool_shed_url + return new_tool_shed_url.rstrip( '/' ) + except Exception, e: + # We receive a lot of calls here where the tool_shed_url is None. The container_util uses + # that value when creating a header row. If the tool_shed_url is not None, we have a problem. + if tool_shed_url is not None: + log.exception( "Handled exception removing the protocol from Tool Shed URL %s:\n%s" % ( str( tool_shed_url ), str( e ) ) ) + return tool_shed_url + def tool_shed_get( app, tool_shed_url, uri ): """Make contact with the tool shed via the uri provided.""" registry = app.tool_shed_registry @@ -196,3 +304,10 @@ content = response.read() response.close() return content + +def url_join( *args ): + """Return a valid URL produced by appending a base URL and a set of request parameters.""" + parts = [] + for arg in args: + parts.append( arg.strip( '/' ) ) + return '/'.join( parts ) diff -r 47b3011ebe653f7218c70e0b458252783ef6f255 -r 4aee689e69dac57676de7f9ceb0a31f227bc0dd2 lib/tool_shed/util/container_util.py --- a/lib/tool_shed/util/container_util.py +++ b/lib/tool_shed/util/container_util.py @@ -65,17 +65,6 @@ if contained_repository_dependency.listify == listified_repository_dependency: self.repository_dependencies.remove( contained_repository_dependency ) - def to_repository_dependency( self, repository_dependency_id ): - toolshed, name, owner, changeset_revision, prior_installation_required, only_if_compiling_contained_td = \ - common_util.parse_repository_dependency_tuple( self.key.split( STRSEP ) ) - return RepositoryDependency( id=repository_dependency_id, - toolshed=toolshed, - repository_name=name, - repository_owner=owner, - changeset_revision=changeset_revision, - prior_installation_required=galaxy.util.asbool( prior_installation_required ), - only_if_compiling_contained_td=galaxy.util.asbool( only_if_compiling_contained_td ) ) - class DataManager( object ): """Data Manager object""" @@ -1525,22 +1514,6 @@ return True return False -def cast_empty_repository_dependency_folders( folder, repository_dependency_id ): - """ - Change any empty folders contained within the repository dependencies container into a repository dependency - since it has no repository dependencies of it's own. This method is not used (and may not be needed), but here - it is just in case. - """ - if not folder.folders and not folder.repository_dependencies: - repository_dependency_id += 1 - repository_dependency = folder.to_repository_dependency( repository_dependency_id ) - if not folder.parent.contains_repository_dependency( repository_dependency ): - folder.parent.repository_dependencies.append( repository_dependency ) - folder.parent.folders.remove( folder ) - for sub_folder in folder.folders: - return cast_empty_repository_dependency_folders( sub_folder, repository_dependency_id ) - return folder, repository_dependency_id - def generate_repository_dependencies_folder_label_from_key( repository_name, repository_owner, changeset_revision, prior_installation_required, only_if_compiling_contained_td, key ): """Return a repository dependency label based on the repository dependency key.""" @@ -1562,17 +1535,21 @@ def generate_repository_dependencies_key_for_repository( toolshed_base_url, repository_name, repository_owner, changeset_revision, prior_installation_required, only_if_compiling_contained_td ): """Assumes tool shed is current tool shed since repository dependencies across tool sheds is not yet supported.""" - return '%s%s%s%s%s%s%s%s%s%s%s' % ( str( toolshed_base_url ).rstrip( '/' ), - STRSEP, - str( repository_name ), - STRSEP, - str( repository_owner ), - STRSEP, - str( changeset_revision ), - STRSEP, - str( prior_installation_required ), - STRSEP, - str( only_if_compiling_contained_td ) ) + # The tool_shed portion of the key must be the value that is stored in the tool_shed_repository.tool_shed column + # of the Galaxy database for an installed repository. This value does not include the protocol, but does include + # the port if there is one. + tool_shed = common_util.remove_protocol_from_tool_shed_url( toolshed_base_url ) + return '%s%s%s%s%s%s%s%s%s%s%s' % ( tool_shed, + STRSEP, + str( repository_name ), + STRSEP, + str( repository_owner ), + STRSEP, + str( changeset_revision ), + STRSEP, + str( prior_installation_required ), + STRSEP, + str( only_if_compiling_contained_td ) ) def generate_tool_dependencies_key( name, version, type ): return '%s%s%s%s%s' % ( str( name ), STRSEP, str( version ), STRSEP, str( type ) ) diff -r 47b3011ebe653f7218c70e0b458252783ef6f255 -r 4aee689e69dac57676de7f9ceb0a31f227bc0dd2 lib/tool_shed/util/export_util.py --- a/lib/tool_shed/util/export_util.py +++ b/lib/tool_shed/util/export_util.py @@ -12,6 +12,7 @@ from galaxy.util.odict import odict from tool_shed.util import commit_util from tool_shed.util import common_install_util +from tool_shed.util import common_util from tool_shed.util import encoding_util from tool_shed.util import repository_dependency_util from tool_shed.util import xml_util @@ -49,12 +50,6 @@ log.exception( error_message ) return return_code, error_message -def clean_tool_shed_url( base_url ): - protocol, base = base_url.split( '://' ) - base = base.replace( ':', '_colon_' ) - base = base.rstrip( '/' ) - return base - def export_repository( trans, tool_shed_url, repository_id, repository_name, changeset_revision, file_type, export_repository_dependencies, api=False ): repository = suc.get_repository_in_tool_shed( trans, repository_id ) @@ -124,8 +119,9 @@ repositories_archive.close() if api: encoded_repositories_archive_name = encoding_util.tool_shed_encode( repositories_archive_filename ) - download_url = suc.url_join( web.url_for( '/', qualified=True ), - 'repository/export_via_api?encoded_repositories_archive_name=%s' % encoded_repositories_archive_name ) + params = '?encoded_repositories_archive_name=%s' % encoded_repositories_archive_name + download_url = common_util.url_join( web.url_for( '/', qualified=True ), + 'repository/export_via_api%s' % params ) return dict( download_url=download_url, error_messages=error_messages ) return repositories_archive, error_messages @@ -171,7 +167,7 @@ return repository_archive, error_message def generate_repository_archive_filename( tool_shed_url, name, owner, changeset_revision, file_type, export_repository_dependencies=False, use_tmp_archive_dir=False ): - tool_shed = clean_tool_shed_url( tool_shed_url ) + tool_shed = remove_protocol_from_tool_shed_url( tool_shed_url ) file_type_str = suc.get_file_type_str( changeset_revision, file_type ) if export_repository_dependencies: repositories_archive_filename = '%s_%s_%s_%s_%s' % ( CAPSULE_WITH_DEPENDENCIES_FILENAME, tool_shed, name, owner, file_type_str ) @@ -242,7 +238,7 @@ repo_info_dict = {} # Cast unicode to string. repo_info_dict[ str( repository.name ) ] = ( str( repository.description ), - suc.generate_clone_url_for_repository_in_tool_shed( trans, repository ), + common_util.generate_clone_url_for_repository_in_tool_shed( trans, repository ), str( changeset_revision ), str( ctx.rev() ), str( repository.user.username ), @@ -343,3 +339,9 @@ ordered_repositories.append( repository ) ordered_changeset_revisions.append( changeset_revision ) return ordered_repository_ids, ordered_repositories, ordered_changeset_revisions + +def remove_protocol_from_tool_shed_url( base_url ): + protocol, base = base_url.split( '://' ) + base = base.replace( ':', '_colon_' ) + base = base.rstrip( '/' ) + return base diff -r 47b3011ebe653f7218c70e0b458252783ef6f255 -r 4aee689e69dac57676de7f9ceb0a31f227bc0dd2 lib/tool_shed/util/import_util.py --- a/lib/tool_shed/util/import_util.py +++ b/lib/tool_shed/util/import_util.py @@ -343,7 +343,6 @@ return_dict[ 'error_message' ] = error_message return_dict[ 'status' ] = 'error' uploaded_file.close() - tar_archive.close() return return_dict return_dict[ 'tar_archive' ] = tar_archive return_dict[ 'capsule_file_name' ] = uploaded_file_filename diff -r 47b3011ebe653f7218c70e0b458252783ef6f255 -r 4aee689e69dac57676de7f9ceb0a31f227bc0dd2 lib/tool_shed/util/metadata_util.py --- a/lib/tool_shed/util/metadata_util.py +++ b/lib/tool_shed/util/metadata_util.py @@ -206,11 +206,13 @@ for ancestor_tup in ancestor_repository_dependencies: a_tool_shed, a_repo_name, a_repo_owner, a_changeset_revision, a_prior_installation_required, a_only_if_compiling_contained_td = \ ancestor_tup + cleaned_a_tool_shed = common_util.remove_protocol_from_tool_shed_url( a_tool_shed ) found_in_current = False for current_tup in current_repository_dependencies: c_tool_shed, c_repo_name, c_repo_owner, c_changeset_revision, c_prior_installation_required, c_only_if_compiling_contained_td = \ current_tup - if c_tool_shed == a_tool_shed and \ + cleaned_c_tool_shed = common_util.remove_protocol_from_tool_shed_url( c_tool_shed ) + if cleaned_c_tool_shed == cleaned_a_tool_shed and \ c_repo_name == a_repo_name and \ c_repo_owner == a_repo_owner and \ c_changeset_revision == a_changeset_revision and \ @@ -369,15 +371,18 @@ def different_revision_defines_tip_only_repository_dependency( trans, rd_tup, repository_dependencies ): """ - Determine if the only difference between rd_tup and a dependency definition in the list of repository_dependencies is the changeset_revision value. + Determine if the only difference between rd_tup and a dependency definition in the list of + repository_dependencies is the changeset_revision value. """ new_metadata_required = False rd_tool_shed, rd_name, rd_owner, rd_changeset_revision, rd_prior_installation_required, rd_only_if_compiling_contained_td = \ common_util.parse_repository_dependency_tuple( rd_tup ) + cleaned_rd_tool_shed = common_util.remove_protocol_from_tool_shed_url( rd_tool_shed ) for repository_dependency in repository_dependencies: tool_shed, name, owner, changeset_revision, prior_installation_required, only_if_compiling_contained_td = \ common_util.parse_repository_dependency_tuple( repository_dependency ) - if rd_tool_shed == tool_shed and rd_name == name and rd_owner == owner: + cleaned_tool_shed = common_util.remove_protocol_from_tool_shed_url( tool_shed ) + if cleaned_rd_tool_shed == cleaned_tool_shed and rd_name == name and rd_owner == owner: # Determine if the repository represented by the dependency tuple is an instance of the repository type TipOnly. required_repository = suc.get_repository_by_name_and_owner( trans.app, name, owner ) repository_type_class = trans.app.repository_types_registry.get_class_by_label( required_repository.type ) @@ -393,11 +398,11 @@ # Galaxy Side. repo_files_directory = repository.repo_files_directory( app ) repo_dir = repo_files_directory - repository_clone_url = suc.generate_clone_url_for_installed_repository( app, repository ) + repository_clone_url = common_util.generate_clone_url_for_installed_repository( app, repository ) except AttributeError: # Tool Shed side. repo_files_directory = repo_path - repository_clone_url = suc.generate_clone_url_for_repository_in_tool_shed( None, repository ) + repository_clone_url = common_util.generate_clone_url_for_repository_in_tool_shed( None, repository ) relative_data_manager_dir = util.relpath( os.path.split( data_manager_config_filename )[0], repo_dir ) rel_data_manager_config_filename = os.path.join( relative_data_manager_dir, os.path.split( data_manager_config_filename )[1] ) data_managers = {} @@ -572,7 +577,7 @@ return valid_tool_dependencies_dict def generate_guid_for_object( repository_clone_url, guid_type, obj_id, version ): - tmp_url = suc.clean_repository_clone_url( repository_clone_url ) + tmp_url = common_util.remove_protocol_and_user_from_clone_url( repository_clone_url ) return '%s/%s/%s/%s' % ( tmp_url, guid_type, obj_id, version ) def generate_metadata_for_changeset_revision( app, repository, changeset_revision, repository_clone_url, @@ -1217,16 +1222,16 @@ changeset_revision, prior_installation_required, str( only_if_compiling_contained_td ) ] - cleaned_toolshed = td_common_util.clean_tool_shed_url( toolshed ) user = None repository = None + toolshed = common_util.remove_protocol_from_tool_shed_url( toolshed ) if app.name == 'galaxy': # We're in Galaxy. We reach here when we're generating the metadata for a tool dependencies package defined # for a repository or when we're generating metadata for an installed repository. See if we can locate the # installed repository via the changeset_revision defined in the repository_elem (it may be outdated). If we're # successful in locating an installed repository with the attributes defined in the repository_elem, we know it # is valid. - repository = suc.get_repository_for_dependency_relationship( app, cleaned_toolshed, name, owner, changeset_revision ) + repository = suc.get_repository_for_dependency_relationship( app, toolshed, name, owner, changeset_revision ) if repository: return repository_dependency_tup, is_valid, error_message else: @@ -1237,7 +1242,7 @@ updated_changeset_revisions = util.listify( text ) for updated_changeset_revision in updated_changeset_revisions: repository = suc.get_repository_for_dependency_relationship( app, - cleaned_toolshed, + toolshed, name, owner, updated_changeset_revision ) @@ -1648,9 +1653,12 @@ if reinstalling or repository.status not in [ trans.install_model.ToolShedRepository.installation_status.DEACTIVATED, trans.install_model.ToolShedRepository.installation_status.INSTALLED ]: # Since we're reinstalling, we need to send a request to the tool shed to get the README files. - url = suc.url_join( tool_shed_url, - 'repository/get_readme_files?name=%s&owner=%s&changeset_revision=%s' % \ - ( repository.name, repository.owner, repository.installed_changeset_revision ) ) + tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( trans.app, tool_shed_url ) + params = '?name=%s&owner=%s&changeset_revision=%s' % ( str( repository.name ), + str( repository.owner ), + str( repository.installed_changeset_revision ) ) + url = common_util.url_join( tool_shed_url, + 'repository/get_readme_files%s' % params ) raw_text = common_util.tool_shed_get( trans.app, tool_shed_url, url ) readme_files_dict = json.from_json_string( raw_text ) else: @@ -1722,8 +1730,7 @@ def reset_all_metadata_on_installed_repository( trans, id ): """Reset all metadata on a single tool shed repository installed into a Galaxy instance.""" repository = suc.get_installed_tool_shed_repository( trans, id ) - tool_shed_url = suc.get_url_from_tool_shed( trans.app, repository.tool_shed ) - repository_clone_url = suc.generate_clone_url_for_installed_repository( trans.app, repository ) + repository_clone_url = common_util.generate_clone_url_for_installed_repository( trans.app, repository ) tool_path, relative_install_dir = repository.get_tool_relative_path( trans.app ) if relative_install_dir: original_metadata_dict = repository.metadata @@ -1791,7 +1798,7 @@ log.debug( "Resetting all metadata on repository: %s" % repository.name ) repo_dir = repository.repo_path( trans.app ) repo = hg.repository( suc.get_configured_ui(), repo_dir ) - repository_clone_url = suc.generate_clone_url_for_repository_in_tool_shed( trans, repository ) + repository_clone_url = common_util.generate_clone_url_for_repository_in_tool_shed( trans, repository ) # The list of changeset_revisions refers to repository_metadata records that have been created or updated. When the following loop # completes, we'll delete all repository_metadata records for this repository that do not have a changeset_revision value in this list. changeset_revisions = [] @@ -1969,7 +1976,7 @@ message = '' status = 'done' encoded_id = trans.security.encode_id( repository.id ) - repository_clone_url = suc.generate_clone_url_for_repository_in_tool_shed( trans, repository ) + repository_clone_url = common_util.generate_clone_url_for_repository_in_tool_shed( trans, repository ) repo_dir = repository.repo_path( trans.app ) repo = hg.repository( suc.get_configured_ui(), repo_dir ) metadata_dict, invalid_file_tups = generate_metadata_for_changeset_revision( app=trans.app, diff -r 47b3011ebe653f7218c70e0b458252783ef6f255 -r 4aee689e69dac57676de7f9ceb0a31f227bc0dd2 lib/tool_shed/util/readme_util.py --- a/lib/tool_shed/util/readme_util.py +++ b/lib/tool_shed/util/readme_util.py @@ -95,9 +95,11 @@ repo_info_tuple = repo_info_dict[ name ] description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, installed_td = \ suc.get_repo_info_tuple_contents( repo_info_tuple ) - # Handle README files. - url = suc.url_join( tool_shed_url, - 'repository/get_readme_files?name=%s&owner=%s&changeset_revision=%s' % ( name, repository_owner, changeset_revision ) ) + # Handle changing HTTP protocols over time. + tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( trans.app, tool_shed_url ) + params = '?name=%s&owner=%s&changeset_revision=%s' % ( name, repository_owner, changeset_revision ) + url = common_util.url_join( tool_shed_url, + 'repository/get_readme_files%s' % params ) raw_text = common_util.tool_shed_get( trans.app, tool_shed_url, url ) readme_files_dict = json.from_json_string( raw_text ) return readme_files_dict This diff is so big that we needed to truncate the remainder. Repository URL: https://bitbucket.org/galaxy/galaxy-central/ -- This is a commit notification from bitbucket.org. You are receiving this because you have the service enabled, addressing the recipient of this email.
participants (1)
-
commits-noreply@bitbucket.org