commit/galaxy-central: greg: Eliminate the use of the common_install_util module for Galaxy installs from the Tool Shed.
1 new commit in galaxy-central: https://bitbucket.org/galaxy/galaxy-central/commits/33fdaf7a0dcb/ Changeset: 33fdaf7a0dcb User: greg Date: 2014-06-19 04:08:00 Summary: Eliminate the use of the common_install_util module for Galaxy installs from the Tool Shed. Affected #: 10 files diff -r 404fcf47260db528f04b2a6ce75ede9015f4228b -r 33fdaf7a0dcb7b6149dd5f1e70d2e687d43721a6 lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py --- a/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py +++ b/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py @@ -16,7 +16,6 @@ import tool_shed.repository_types.util as rt_util from tool_shed.util import common_util -from tool_shed.util import common_install_util from tool_shed.util import data_manager_util from tool_shed.util import datatype_util from tool_shed.util import encoding_util @@ -49,7 +48,7 @@ repository_id = kwd[ 'id' ] repository = suc.get_installed_tool_shed_repository( trans.app, repository_id ) try: - common_install_util.activate_repository( trans.app, repository ) + trans.app.installed_repository_manager.activate_repository( repository ) except Exception, e: error_message = "Error activating repository %s: %s" % ( repository.name, str( e ) ) log.exception( error_message ) @@ -1051,7 +1050,7 @@ includes_tool_dependencies = util.string_as_bool( repo_information_dict.get( 'includes_tool_dependencies', False ) ) encoded_repo_info_dicts = util.listify( repo_information_dict.get( 'repo_info_dicts', [] ) ) repo_info_dicts = [ encoding_util.tool_shed_decode( encoded_repo_info_dict ) for encoded_repo_info_dict in encoded_repo_info_dicts ] - irm = install_manager.InstallRepositoryManager( trans.app ) + install_repository_manager = install_manager.InstallRepositoryManager( trans.app ) if ( not includes_tools_for_display_in_tool_panel and kwd.get( 'select_shed_tool_panel_config_button', False ) ) or \ ( includes_tools_for_display_in_tool_panel and kwd.get( 'select_tool_panel_section_button', False ) ): if updating: @@ -1080,7 +1079,7 @@ tool_path=tool_path, tool_shed_url=tool_shed_url ) created_or_updated_tool_shed_repositories, tool_panel_section_keys, repo_info_dicts, filtered_repo_info_dicts = \ - irm.handle_tool_shed_repositories( installation_dict, using_api=False ) + install_repository_manager.handle_tool_shed_repositories( installation_dict, using_api=False ) if created_or_updated_tool_shed_repositories: installation_dict = dict( created_or_updated_tool_shed_repositories=created_or_updated_tool_shed_repositories, filtered_repo_info_dicts=filtered_repo_info_dicts, @@ -1099,7 +1098,7 @@ tool_path=tool_path, tool_shed_url=tool_shed_url ) encoded_kwd, query, tool_shed_repositories, encoded_repository_ids = \ - irm.initiate_repository_installation( installation_dict ) + install_repository_manager.initiate_repository_installation( installation_dict ) return trans.fill_template( 'admin/tool_shed_repository/initiate_repository_installation.mako', encoded_kwd=encoded_kwd, query=query, @@ -1119,11 +1118,11 @@ # If we're installing or updating a single repository, see if it contains a readme or # dependencies that we can display. repo_info_dict = repo_info_dicts[ 0 ] - dependencies_for_repository_dict = common_install_util.get_dependencies_for_repository( trans, - tool_shed_url, - repo_info_dict, - includes_tool_dependencies, - updating=updating ) + dependencies_for_repository_dict = \ + trans.app.installed_repository_manager.get_dependencies_for_repository( tool_shed_url, + repo_info_dict, + includes_tool_dependencies, + updating=updating ) changeset_revision = dependencies_for_repository_dict.get( 'changeset_revision', None ) if not has_repository_dependencies: has_repository_dependencies = dependencies_for_repository_dict.get( 'has_repository_dependencies', False ) @@ -1151,23 +1150,24 @@ # defined repository (and possibly tool) dependencies. In this case, merging will result in newly defined # dependencies to be lost. We pass the updating parameter to make sure merging occurs only when appropriate. containers_dict = \ - irm.populate_containers_dict_for_new_install( tool_shed_url=tool_shed_url, - tool_path=tool_path, - readme_files_dict=readme_files_dict, - installed_repository_dependencies=installed_repository_dependencies, - missing_repository_dependencies=missing_repository_dependencies, - installed_tool_dependencies=installed_tool_dependencies, - missing_tool_dependencies=missing_tool_dependencies, - updating=updating ) + install_repository_manager.populate_containers_dict_for_new_install( tool_shed_url=tool_shed_url, + tool_path=tool_path, + readme_files_dict=readme_files_dict, + installed_repository_dependencies=installed_repository_dependencies, + missing_repository_dependencies=missing_repository_dependencies, + installed_tool_dependencies=installed_tool_dependencies, + missing_tool_dependencies=missing_tool_dependencies, + updating=updating ) else: # We're installing a list of repositories, each of which may have tool dependencies or repository dependencies. containers_dicts = [] + installed_repository_manager = trans.app.installed_repository_manager for repo_info_dict in repo_info_dicts: - dependencies_for_repository_dict = common_install_util.get_dependencies_for_repository( trans, - tool_shed_url, - repo_info_dict, - includes_tool_dependencies, - updating=updating ) + dependencies_for_repository_dict = \ + trans.app.installed_repository_manager.get_dependencies_for_repository( tool_shed_url, + repo_info_dict, + includes_tool_dependencies, + updating=updating ) changeset_revision = dependencies_for_repository_dict.get( 'changeset_revision', None ) if not has_repository_dependencies: has_repository_dependencies = dependencies_for_repository_dict.get( 'has_repository_dependencies', False ) @@ -1185,17 +1185,17 @@ name = dependencies_for_repository_dict.get( 'name', None ) repository_owner = dependencies_for_repository_dict.get( 'repository_owner', None ) containers_dict = \ - irm.populate_containers_dict_for_new_install( tool_shed_url=tool_shed_url, - tool_path=tool_path, - readme_files_dict=None, - installed_repository_dependencies=installed_repository_dependencies, - missing_repository_dependencies=missing_repository_dependencies, - installed_tool_dependencies=installed_tool_dependencies, - missing_tool_dependencies=missing_tool_dependencies, - updating=updating ) + install_repository_manager.populate_containers_dict_for_new_install( tool_shed_url=tool_shed_url, + tool_path=tool_path, + readme_files_dict=None, + installed_repository_dependencies=installed_repository_dependencies, + missing_repository_dependencies=missing_repository_dependencies, + installed_tool_dependencies=installed_tool_dependencies, + missing_tool_dependencies=missing_tool_dependencies, + updating=updating ) containers_dicts.append( containers_dict ) # Merge all containers into a single container. - containers_dict = irm.merge_containers_dicts_for_new_install( containers_dicts ) + containers_dict = install_repository_manager.merge_containers_dicts_for_new_install( containers_dicts ) # Handle tool dependencies check box. if trans.app.config.tool_dependency_dir is None: if includes_tool_dependencies: @@ -1592,11 +1592,11 @@ repository_metadata=None, tool_dependencies=tool_dependencies, repository_dependencies=repository_dependencies ) - dependencies_for_repository_dict = common_install_util.get_dependencies_for_repository( trans, - tool_shed_url, - repo_info_dict, - includes_tool_dependencies, - updating=False ) + irm = trans.app.installed_repository_manager + dependencies_for_repository_dict = irm.get_dependencies_for_repository( tool_shed_url, + repo_info_dict, + includes_tool_dependencies, + updating=False ) changeset_revision = dependencies_for_repository_dict.get( 'changeset_revision', None ) has_repository_dependencies = dependencies_for_repository_dict.get( 'has_repository_dependencies', False ) includes_tool_dependencies = dependencies_for_repository_dict.get( 'includes_tool_dependencies', False ) diff -r 404fcf47260db528f04b2a6ce75ede9015f4228b -r 33fdaf7a0dcb7b6149dd5f1e70d2e687d43721a6 lib/tool_shed/galaxy_install/installed_repository_manager.py --- a/lib/tool_shed/galaxy_install/installed_repository_manager.py +++ b/lib/tool_shed/galaxy_install/installed_repository_manager.py @@ -1,12 +1,18 @@ """ -Class encapsulating the management of repositories installed from Galaxy tool sheds. +Class encapsulating the management of repositories installed into Galaxy from the Tool Shed. """ +import copy import logging import os +from galaxy import util from tool_shed.util import common_util +from tool_shed.util import container_util +from tool_shed.util import data_manager_util from tool_shed.util import datatype_util from tool_shed.util import repository_dependency_util +from tool_shed.util import shed_util_common as suc from tool_shed.util import tool_dependency_util +from tool_shed.util import tool_util from tool_shed.util import xml_util from galaxy.model.orm import and_ @@ -66,18 +72,68 @@ # Load defined dependency relationships for installed tool shed repositories and their contents. self.load_dependency_relationships() + def activate_repository( self, repository ): + """Activate an installed tool shed repository that has been marked as deactivated.""" + repository_clone_url = common_util.generate_clone_url_for_installed_repository( self.app, repository ) + shed_tool_conf, tool_path, relative_install_dir = suc.get_tool_panel_config_tool_path_install_dir( self.app, repository ) + repository.deleted = False + repository.status = self.install_model.ToolShedRepository.installation_status.INSTALLED + if repository.includes_tools_for_display_in_tool_panel: + metadata = repository.metadata + repository_tools_tups = suc.get_repository_tools_tups( self.app, metadata ) + # Reload tools into the appropriate tool panel section. + tool_panel_dict = repository.metadata[ 'tool_panel_section' ] + tool_util.add_to_tool_panel( self.app, + repository.name, + repository_clone_url, + repository.installed_changeset_revision, + repository_tools_tups, + repository.owner, + shed_tool_conf, + tool_panel_dict, + new_install=False ) + if repository.includes_data_managers: + tp, data_manager_relative_install_dir = repository.get_tool_relative_path( self.app ) + # Hack to add repository.name here, which is actually the root of the installed repository + data_manager_relative_install_dir = os.path.join( data_manager_relative_install_dir, repository.name ) + new_data_managers = data_manager_util.install_data_managers( self.app, + self.app.config.shed_data_manager_config_file, + metadata, + repository.get_shed_config_dict( self.app ), + data_manager_relative_install_dir, + repository, + repository_tools_tups ) + self.install_model.context.add( repository ) + self.install_model.context.flush() + if repository.includes_datatypes: + if tool_path: + repository_install_dir = os.path.abspath( os.path.join( tool_path, relative_install_dir ) ) + else: + repository_install_dir = os.path.abspath( relative_install_dir ) + # Activate proprietary datatypes. + installed_repository_dict = datatype_util.load_installed_datatypes( self.app, + repository, + repository_install_dir, + deactivate=False ) + if installed_repository_dict: + converter_path = installed_repository_dict.get( 'converter_path' ) + if converter_path is not None: + datatype_util.load_installed_datatype_converters( self.app, installed_repository_dict, deactivate=False ) + display_path = installed_repository_dict.get( 'display_path' ) + if display_path is not None: + datatype_util.load_installed_display_applications( self.app, installed_repository_dict, deactivate=False ) + def add_entry_to_installed_repository_dependencies_of_installed_repositories( self, repository ): """ Add an entry to self.installed_repository_dependencies_of_installed_repositories. A side-effect of this method is the population of self.installed_dependent_repositories_of_installed_repositories. Since this method discovers all repositories required by the received repository, it can use the list to add entries to the reverse dictionary. """ - repository_tup = repository_dependency_util.get_repository_tuple_for_installed_repository_manager( repository ) + repository_tup = self.get_repository_tuple_for_installed_repository_manager( repository ) tool_shed, name, owner, installed_changeset_revision = repository_tup # Get the list of repository dependencies for this repository. status = self.install_model.ToolShedRepository.installation_status.INSTALLED - repository_dependency_tups = \ - repository_dependency_util.get_repository_dependency_tups_for_installed_repository( self.app, repository, status=status ) + repository_dependency_tups = self.get_repository_dependency_tups_for_installed_repository( repository, status=status ) # Add an entry to self.installed_repository_dependencies_of_installed_repositories. if repository_tup not in self.installed_repository_dependencies_of_installed_repositories: debug_msg = "Adding an entry for revision %s of repository %s owned by %s " % ( installed_changeset_revision, name, owner ) @@ -97,21 +153,21 @@ def add_entry_to_installed_runtime_dependent_tool_dependencies_of_installed_tool_dependencies( self, tool_dependency ): """Add an entry to self.installed_runtime_dependent_tool_dependencies_of_installed_tool_dependencies.""" - tool_dependency_tup = tool_dependency_util.get_tool_dependency_tuple_for_installed_repository_manager( tool_dependency ) + tool_dependency_tup = self.get_tool_dependency_tuple_for_installed_repository_manager( tool_dependency ) if tool_dependency_tup not in self.installed_runtime_dependent_tool_dependencies_of_installed_tool_dependencies: tool_shed_repository_id, name, version, type = tool_dependency_tup debug_msg = "Adding an entry for version %s of %s %s " % ( version, type, name ) debug_msg += "to installed_runtime_dependent_tool_dependencies_of_installed_tool_dependencies." log.debug( debug_msg ) status = self.install_model.ToolDependency.installation_status.INSTALLED - installed_runtime_dependent_tool_dependency_tups = \ - tool_dependency_util.get_runtime_dependent_tool_dependency_tuples( self.app, tool_dependency, status=status ) + installed_runtime_dependent_tool_dependency_tups = self.get_runtime_dependent_tool_dependency_tuples( tool_dependency, + status=status ) self.installed_runtime_dependent_tool_dependencies_of_installed_tool_dependencies[ tool_dependency_tup ] = \ installed_runtime_dependent_tool_dependency_tups def add_entry_to_installed_tool_dependencies_of_installed_repositories( self, repository ): """Add an entry to self.installed_tool_dependencies_of_installed_repositories.""" - repository_tup = repository_dependency_util.get_repository_tuple_for_installed_repository_manager( repository ) + repository_tup = self.get_repository_tuple_for_installed_repository_manager( repository ) if repository_tup not in self.installed_tool_dependencies_of_installed_repositories: tool_shed, name, owner, installed_changeset_revision = repository_tup debug_msg = "Adding an entry for revision %s of repository %s owned by %s " % ( installed_changeset_revision, name, owner ) @@ -120,38 +176,37 @@ installed_tool_dependency_tups = [] for tool_dependency in repository.tool_dependencies: if tool_dependency.status == self.app.install_model.ToolDependency.installation_status.INSTALLED: - tool_dependency_tup = tool_dependency_util.get_tool_dependency_tuple_for_installed_repository_manager( tool_dependency ) + tool_dependency_tup = self.get_tool_dependency_tuple_for_installed_repository_manager( tool_dependency ) installed_tool_dependency_tups.append( tool_dependency_tup ) self.installed_tool_dependencies_of_installed_repositories[ repository_tup ] = installed_tool_dependency_tups def add_entry_to_repository_dependencies_of_installed_repositories( self, repository ): """Add an entry to self.repository_dependencies_of_installed_repositories.""" - repository_tup = repository_dependency_util.get_repository_tuple_for_installed_repository_manager( repository ) + repository_tup = self.get_repository_tuple_for_installed_repository_manager( repository ) if repository_tup not in self.repository_dependencies_of_installed_repositories: tool_shed, name, owner, installed_changeset_revision = repository_tup debug_msg = "Adding an entry for revision %s of repository %s owned by %s " % ( installed_changeset_revision, name, owner ) debug_msg += "to repository_dependencies_of_installed_repositories." log.debug( debug_msg ) - repository_dependency_tups = \ - repository_dependency_util.get_repository_dependency_tups_for_installed_repository( self.app, repository, status=None ) + repository_dependency_tups = self.get_repository_dependency_tups_for_installed_repository( repository, status=None ) self.repository_dependencies_of_installed_repositories[ repository_tup ] = repository_dependency_tups def add_entry_to_runtime_tool_dependencies_of_installed_tool_dependencies( self, tool_dependency ): """Add an entry to self.runtime_tool_dependencies_of_installed_tool_dependencies.""" - tool_dependency_tup = tool_dependency_util.get_tool_dependency_tuple_for_installed_repository_manager( tool_dependency ) + tool_dependency_tup = self.get_tool_dependency_tuple_for_installed_repository_manager( tool_dependency ) if tool_dependency_tup not in self.runtime_tool_dependencies_of_installed_tool_dependencies: tool_shed_repository_id, name, version, type = tool_dependency_tup debug_msg = "Adding an entry for version %s of %s %s " % ( version, type, name ) debug_msg += "to runtime_tool_dependencies_of_installed_tool_dependencies." log.debug( debug_msg ) - runtime_dependent_tool_dependency_tups = \ - tool_dependency_util.get_runtime_dependent_tool_dependency_tuples( self.app, tool_dependency, status=None ) + runtime_dependent_tool_dependency_tups = self.get_runtime_dependent_tool_dependency_tuples( tool_dependency, + status=None ) self.runtime_tool_dependencies_of_installed_tool_dependencies[ tool_dependency_tup ] = \ runtime_dependent_tool_dependency_tups def add_entry_to_tool_dependencies_of_installed_repositories( self, repository ): """Add an entry to self.tool_dependencies_of_installed_repositories.""" - repository_tup = repository_dependency_util.get_repository_tuple_for_installed_repository_manager( repository ) + repository_tup = self.get_repository_tuple_for_installed_repository_manager( repository ) if repository_tup not in self.tool_dependencies_of_installed_repositories: tool_shed, name, owner, installed_changeset_revision = repository_tup debug_msg = "Adding an entry for revision %s of repository %s owned by %s " % ( installed_changeset_revision, name, owner ) @@ -159,14 +214,357 @@ log.debug( debug_msg ) tool_dependency_tups = [] for tool_dependency in repository.tool_dependencies: - tool_dependency_tup = tool_dependency_util.get_tool_dependency_tuple_for_installed_repository_manager( tool_dependency ) + tool_dependency_tup = self.get_tool_dependency_tuple_for_installed_repository_manager( tool_dependency ) tool_dependency_tups.append( tool_dependency_tup ) self.tool_dependencies_of_installed_repositories[ repository_tup ] = tool_dependency_tups def get_containing_repository_for_tool_dependency( self, tool_dependency_tup ): tool_shed_repository_id, name, version, type = tool_dependency_tup return self.app.install_model.context.query( self.app.install_model.ToolShedRepository ).get( tool_shed_repository_id ) - + + def get_dependencies_for_repository( self, tool_shed_url, repo_info_dict, includes_tool_dependencies, updating=False ): + """ + Return dictionaries containing the sets of installed and missing tool dependencies and repository + dependencies associated with the repository defined by the received repo_info_dict. + """ + repository = None + installed_rd = {} + installed_td = {} + missing_rd = {} + missing_td = {} + name = repo_info_dict.keys()[ 0 ] + repo_info_tuple = repo_info_dict[ name ] + description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, tool_dependencies = \ + suc.get_repo_info_tuple_contents( repo_info_tuple ) + if tool_dependencies: + if not includes_tool_dependencies: + includes_tool_dependencies = True + # Inspect the tool_dependencies dictionary to separate the installed and missing tool dependencies. + # We don't add to installed_td and missing_td here because at this point they are empty. + installed_td, missing_td = self.get_installed_and_missing_tool_dependencies_for_repository( tool_dependencies ) + # In cases where a repository dependency is required only for compiling a dependent repository's + # tool dependency, the value of repository_dependencies will be an empty dictionary here. + if repository_dependencies: + # We have a repository with one or more defined repository dependencies. + if not repository: + repository = suc.get_repository_for_dependency_relationship( self.app, + tool_shed_url, + name, + repository_owner, + changeset_revision ) + if not updating and repository and repository.metadata: + installed_rd, missing_rd = self.get_installed_and_missing_repository_dependencies( repository ) + else: + installed_rd, missing_rd = \ + self.get_installed_and_missing_repository_dependencies_for_new_or_updated_install( repo_info_tuple ) + # Discover all repository dependencies and retrieve information for installing them. + all_repo_info_dict = repository_dependency_util.get_required_repo_info_dicts( self.app, + tool_shed_url, + util.listify( repo_info_dict ) ) + has_repository_dependencies = all_repo_info_dict.get( 'has_repository_dependencies', False ) + has_repository_dependencies_only_if_compiling_contained_td = \ + all_repo_info_dict.get( 'has_repository_dependencies_only_if_compiling_contained_td', False ) + includes_tools_for_display_in_tool_panel = all_repo_info_dict.get( 'includes_tools_for_display_in_tool_panel', False ) + includes_tool_dependencies = all_repo_info_dict.get( 'includes_tool_dependencies', False ) + includes_tools = all_repo_info_dict.get( 'includes_tools', False ) + required_repo_info_dicts = all_repo_info_dict.get( 'all_repo_info_dicts', [] ) + # Display tool dependencies defined for each of the repository dependencies. + if required_repo_info_dicts: + required_tool_dependencies = {} + for rid in required_repo_info_dicts: + for name, repo_info_tuple in rid.items(): + description, repository_clone_url, changeset_revision, ctx_rev, \ + repository_owner, rid_repository_dependencies, rid_tool_dependencies = \ + suc.get_repo_info_tuple_contents( repo_info_tuple ) + if rid_tool_dependencies: + for td_key, td_dict in rid_tool_dependencies.items(): + if td_key not in required_tool_dependencies: + required_tool_dependencies[ td_key ] = td_dict + if required_tool_dependencies: + # Discover and categorize all tool dependencies defined for this repository's repository dependencies. + required_installed_td, required_missing_td = \ + self.get_installed_and_missing_tool_dependencies_for_repository( required_tool_dependencies ) + if required_installed_td: + if not includes_tool_dependencies: + includes_tool_dependencies = True + for td_key, td_dict in required_installed_td.items(): + if td_key not in installed_td: + installed_td[ td_key ] = td_dict + if required_missing_td: + if not includes_tool_dependencies: + includes_tool_dependencies = True + for td_key, td_dict in required_missing_td.items(): + if td_key not in missing_td: + missing_td[ td_key ] = td_dict + else: + # We have a single repository with (possibly) no defined repository dependencies. + all_repo_info_dict = repository_dependency_util.get_required_repo_info_dicts( self.app, + tool_shed_url, + util.listify( repo_info_dict ) ) + has_repository_dependencies = all_repo_info_dict.get( 'has_repository_dependencies', False ) + has_repository_dependencies_only_if_compiling_contained_td = \ + all_repo_info_dict.get( 'has_repository_dependencies_only_if_compiling_contained_td', False ) + includes_tools_for_display_in_tool_panel = all_repo_info_dict.get( 'includes_tools_for_display_in_tool_panel', False ) + includes_tool_dependencies = all_repo_info_dict.get( 'includes_tool_dependencies', False ) + includes_tools = all_repo_info_dict.get( 'includes_tools', False ) + required_repo_info_dicts = all_repo_info_dict.get( 'all_repo_info_dicts', [] ) + dependencies_for_repository_dict = \ + dict( changeset_revision=changeset_revision, + has_repository_dependencies=has_repository_dependencies, + has_repository_dependencies_only_if_compiling_contained_td=has_repository_dependencies_only_if_compiling_contained_td, + includes_tool_dependencies=includes_tool_dependencies, + includes_tools=includes_tools, + includes_tools_for_display_in_tool_panel=includes_tools_for_display_in_tool_panel, + installed_repository_dependencies=installed_rd, + installed_tool_dependencies=installed_td, + missing_repository_dependencies=missing_rd, + missing_tool_dependencies=missing_td, + name=name, + repository_owner=repository_owner ) + return dependencies_for_repository_dict + + def get_installed_and_missing_repository_dependencies( self, repository ): + """ + Return the installed and missing repository dependencies for a tool shed repository that has a record + in the Galaxy database, but may or may not be installed. In this case, the repository dependencies are + associated with the repository in the database. Do not include a repository dependency if it is required + only to compile a tool dependency defined for the dependent repository since these special kinds of repository + dependencies are really a dependency of the dependent repository's contained tool dependency, and only + if that tool dependency requires compilation. + """ + missing_repository_dependencies = {} + installed_repository_dependencies = {} + has_repository_dependencies = repository.has_repository_dependencies + if has_repository_dependencies: + # The repository dependencies container will include only the immediate repository + # dependencies of this repository, so the container will be only a single level in depth. + metadata = repository.metadata + installed_rd_tups = [] + missing_rd_tups = [] + for tsr in repository.repository_dependencies: + prior_installation_required = suc.set_prior_installation_required( self.app, repository, tsr ) + only_if_compiling_contained_td = suc.set_only_if_compiling_contained_td( repository, tsr ) + rd_tup = [ tsr.tool_shed, + tsr.name, + tsr.owner, + tsr.changeset_revision, + prior_installation_required, + only_if_compiling_contained_td, + tsr.id, + tsr.status ] + if tsr.status == self.app.install_model.ToolShedRepository.installation_status.INSTALLED: + installed_rd_tups.append( rd_tup ) + else: + # We'll only add the rd_tup to the missing_rd_tups list if the received repository + # has tool dependencies that are not correctly installed. This may prove to be a + # weak check since the repository in question may not have anything to do with + # compiling the missing tool dependencies. If we discover that this is a problem, + # more granular checking will be necessary here. + if repository.missing_tool_dependencies: + if not self.repository_dependency_needed_only_for_compiling_tool_dependency( repository, tsr ): + missing_rd_tups.append( rd_tup ) + else: + missing_rd_tups.append( rd_tup ) + if installed_rd_tups or missing_rd_tups: + # Get the description from the metadata in case it has a value. + repository_dependencies = metadata.get( 'repository_dependencies', {} ) + description = repository_dependencies.get( 'description', None ) + # We need to add a root_key entry to one or both of installed_repository_dependencies dictionary and the + # missing_repository_dependencies dictionaries for proper display parsing. + root_key = container_util.generate_repository_dependencies_key_for_repository( repository.tool_shed, + repository.name, + repository.owner, + repository.installed_changeset_revision, + prior_installation_required, + only_if_compiling_contained_td ) + if installed_rd_tups: + installed_repository_dependencies[ 'root_key' ] = root_key + installed_repository_dependencies[ root_key ] = installed_rd_tups + installed_repository_dependencies[ 'description' ] = description + if missing_rd_tups: + missing_repository_dependencies[ 'root_key' ] = root_key + missing_repository_dependencies[ root_key ] = missing_rd_tups + missing_repository_dependencies[ 'description' ] = description + return installed_repository_dependencies, missing_repository_dependencies + + def get_installed_and_missing_repository_dependencies_for_new_or_updated_install( self, repo_info_tuple ): + """ + Parse the received repository_dependencies dictionary that is associated with a repository being + installed into Galaxy for the first time and attempt to determine repository dependencies that are + already installed and those that are not. + """ + missing_repository_dependencies = {} + installed_repository_dependencies = {} + missing_rd_tups = [] + installed_rd_tups = [] + description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, tool_dependencies = \ + suc.get_repo_info_tuple_contents( repo_info_tuple ) + if repository_dependencies: + description = repository_dependencies[ 'description' ] + root_key = repository_dependencies[ 'root_key' ] + # The repository dependencies container will include only the immediate repository dependencies of + # this repository, so the container will be only a single level in depth. + for key, rd_tups in repository_dependencies.items(): + if key in [ 'description', 'root_key' ]: + continue + for rd_tup in rd_tups: + tool_shed, name, owner, changeset_revision, prior_installation_required, only_if_compiling_contained_td = \ + common_util.parse_repository_dependency_tuple( rd_tup ) + # Updates to installed repository revisions may have occurred, so make sure to locate the + # appropriate repository revision if one exists. We need to create a temporary repo_info_tuple + # that includes the correct repository owner which we get from the current rd_tup. The current + # tuple looks like: ( description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, + # repository_dependencies, installed_td ) + tmp_clone_url = common_util.generate_clone_url_from_repo_info_tup( self.app, rd_tup ) + tmp_repo_info_tuple = ( None, tmp_clone_url, changeset_revision, None, owner, None, None ) + repository, installed_changeset_revision = suc.repository_was_previously_installed( self.app, + tool_shed, + name, + tmp_repo_info_tuple, + from_tip=False ) + if repository: + new_rd_tup = [ tool_shed, + name, + owner, + changeset_revision, + prior_installation_required, + only_if_compiling_contained_td, + repository.id, + repository.status ] + if repository.status == self.install_model.ToolShedRepository.installation_status.INSTALLED: + if new_rd_tup not in installed_rd_tups: + installed_rd_tups.append( new_rd_tup ) + else: + # A repository dependency that is not installed will not be considered missing if its value + # for only_if_compiling_contained_td is True This is because this type of repository dependency + # will only be considered at the time that the specified tool dependency is being installed, and + # even then only if the compiled binary of the tool dependency could not be installed due to the + # unsupported installation environment. + if not util.asbool( only_if_compiling_contained_td ): + if new_rd_tup not in missing_rd_tups: + missing_rd_tups.append( new_rd_tup ) + else: + new_rd_tup = [ tool_shed, + name, + owner, + changeset_revision, + prior_installation_required, + only_if_compiling_contained_td, + None, + 'Never installed' ] + if not util.asbool( only_if_compiling_contained_td ): + # A repository dependency that is not installed will not be considered missing if its value for + # only_if_compiling_contained_td is True - see above... + if new_rd_tup not in missing_rd_tups: + missing_rd_tups.append( new_rd_tup ) + if installed_rd_tups: + installed_repository_dependencies[ 'root_key' ] = root_key + installed_repository_dependencies[ root_key ] = installed_rd_tups + installed_repository_dependencies[ 'description' ] = description + if missing_rd_tups: + missing_repository_dependencies[ 'root_key' ] = root_key + missing_repository_dependencies[ root_key ] = missing_rd_tups + missing_repository_dependencies[ 'description' ] = description + return installed_repository_dependencies, missing_repository_dependencies + + def get_installed_and_missing_tool_dependencies_for_repository( self, tool_dependencies_dict ): + """ + Return the lists of installed tool dependencies and missing tool dependencies for a set of repositories + being installed into Galaxy. + """ + # FIXME: This implementation breaks when updates to a repository contain dependencies that result in + # multiple entries for a specific tool dependency. A scenario where this can happen is where 2 repositories + # define the same dependency internally (not using the complex repository dependency definition to a separate + # package repository approach). If 2 repositories contain the same tool_dependencies.xml file, one dependency + # will be lost since the values in these returned dictionaries are not lists. All tool dependency dictionaries + # should have lists as values. These scenarios are probably extreme corner cases, but still should be handled. + installed_tool_dependencies = {} + missing_tool_dependencies = {} + if tool_dependencies_dict: + # Make sure not to change anything in the received tool_dependencies_dict as that would be a bad side-effect! + tmp_tool_dependencies_dict = copy.deepcopy( tool_dependencies_dict ) + for td_key, val in tmp_tool_dependencies_dict.items(): + # Default the status to NEVER_INSTALLED. + tool_dependency_status = self.install_model.ToolDependency.installation_status.NEVER_INSTALLED + # Set environment tool dependencies are a list. + if td_key == 'set_environment': + new_val = [] + for requirement_dict in val: + # {'repository_name': 'xx', + # 'name': 'bwa', + # 'version': '0.5.9', + # 'repository_owner': 'yy', + # 'changeset_revision': 'zz', + # 'type': 'package'} + tool_dependency = \ + tool_dependency_util.get_tool_dependency_by_name_version_type( self.app, + requirement_dict.get( 'name', None ), + requirement_dict.get( 'version', None ), + requirement_dict.get( 'type', 'package' ) ) + if tool_dependency: + tool_dependency_status = tool_dependency.status + requirement_dict[ 'status' ] = tool_dependency_status + new_val.append( requirement_dict ) + if tool_dependency_status in [ self.install_model.ToolDependency.installation_status.INSTALLED ]: + if td_key in installed_tool_dependencies: + installed_tool_dependencies[ td_key ].extend( new_val ) + else: + installed_tool_dependencies[ td_key ] = new_val + else: + if td_key in missing_tool_dependencies: + missing_tool_dependencies[ td_key ].extend( new_val ) + else: + missing_tool_dependencies[ td_key ] = new_val + else: + # The val dictionary looks something like this: + # {'repository_name': 'xx', + # 'name': 'bwa', + # 'version': '0.5.9', + # 'repository_owner': 'yy', + # 'changeset_revision': 'zz', + # 'type': 'package'} + tool_dependency = tool_dependency_util.get_tool_dependency_by_name_version_type( self.app, + val.get( 'name', None ), + val.get( 'version', None ), + val.get( 'type', 'package' ) ) + if tool_dependency: + tool_dependency_status = tool_dependency.status + val[ 'status' ] = tool_dependency_status + if tool_dependency_status in [ self.install_model.ToolDependency.installation_status.INSTALLED ]: + installed_tool_dependencies[ td_key ] = val + else: + missing_tool_dependencies[ td_key ] = val + return installed_tool_dependencies, missing_tool_dependencies + + def get_repository_dependency_tups_for_installed_repository( self, repository, dependency_tups=None, status=None ): + """ + Return a list of of tuples defining tool_shed_repository objects (whose status can be anything) required by the + received repository. The returned list defines the entire repository dependency tree. This method is called + only from Galaxy. + """ + if dependency_tups is None: + dependency_tups = [] + repository_tup = self.get_repository_tuple_for_installed_repository_manager( repository ) + for rrda in repository.required_repositories: + repository_dependency = rrda.repository_dependency + required_repository = repository_dependency.repository + if status is None or required_repository.status == status: + required_repository_tup = self.get_repository_tuple_for_installed_repository_manager( required_repository ) + if required_repository_tup == repository_tup: + # We have a circular repository dependency relationship, skip this entry. + continue + if required_repository_tup not in dependency_tups: + dependency_tups.append( required_repository_tup ) + return get_repository_dependency_tups_for_installed_repository( required_repository, + dependency_tups=dependency_tups ) + return dependency_tups + + def get_repository_tuple_for_installed_repository_manager( self, repository ): + return ( str( repository.tool_shed ), + str( repository.name ), + str( repository.owner ), + str( repository.installed_changeset_revision ) ) + def get_repository_install_dir( self, tool_shed_repository ): for tool_config in self.tool_configs: tree, error_message = xml_util.parse_xml( tool_config ) @@ -186,6 +584,38 @@ return relative_path return None + def get_runtime_dependent_tool_dependency_tuples( self, tool_dependency, status=None ): + """ + Return the list of tool dependency objects that require the received tool dependency at run time. The returned + list will be filtered by the received status if it is not None. This method is called only from Galaxy. + """ + runtime_dependent_tool_dependency_tups = [] + required_env_shell_file_path = tool_dependency.get_env_shell_file_path( self.app ) + if required_env_shell_file_path: + required_env_shell_file_path = os.path.abspath( required_env_shell_file_path ) + if required_env_shell_file_path is not None: + for td in self.app.install_model.context.query( self.app.install_model.ToolDependency ): + if status is None or td.status == status: + env_shell_file_path = td.get_env_shell_file_path( self.app ) + if env_shell_file_path is not None: + try: + contents = open( env_shell_file_path, 'r' ).read() + except Exception, e: + contents = None + log.debug( 'Error reading file %s, so cannot determine if package %s requires package %s at run time: %s' % \ + ( str( env_shell_file_path ), str( td.name ), str( tool_dependency.name ), str( e ) ) ) + if contents is not None and contents.find( required_env_shell_file_path ) >= 0: + td_tuple = get_tool_dependency_tuple_for_installed_repository_manager( td ) + runtime_dependent_tool_dependency_tups.append( td_tuple ) + return runtime_dependent_tool_dependency_tups + + def get_tool_dependency_tuple_for_installed_repository_manager( self, tool_dependency ): + if tool_dependency.type is None: + type = None + else: + type = str( tool_dependency.type ) + return ( tool_dependency.tool_shed_repository_id, str( tool_dependency.name ), str( tool_dependency.version ), type ) + def handle_repository_install( self, repository ): """Load the dependency relationships for a repository that was just installed or reinstalled.""" # Populate self.repository_dependencies_of_installed_repositories. @@ -205,7 +635,7 @@ def handle_repository_uninstall( self, repository ): """Remove the dependency relationships for a repository that was just uninstalled.""" for tool_dependency in repository.tool_dependencies: - tool_dependency_tup = tool_dependency_util.get_tool_dependency_tuple_for_installed_repository_manager( tool_dependency ) + tool_dependency_tup = self.get_tool_dependency_tuple_for_installed_repository_manager( tool_dependency ) # Remove this tool_dependency from all values in # self.installed_runtime_dependent_tool_dependencies_of_installed_tool_dependencies altered_installed_runtime_dependent_tool_dependencies_of_installed_tool_dependencies = {} @@ -241,8 +671,8 @@ # Populate self.installed_runtime_dependent_tool_dependencies_of_installed_tool_dependencies. self.add_entry_to_installed_runtime_dependent_tool_dependencies_of_installed_tool_dependencies( tool_dependency ) # Populate self.installed_tool_dependencies_of_installed_repositories. - repository_tup = repository_dependency_util.get_repository_tuple_for_installed_repository_manager( repository ) - tool_dependency_tup = tool_dependency_util.get_tool_dependency_tuple_for_installed_repository_manager( tool_dependency ) + repository_tup = self.get_repository_tuple_for_installed_repository_manager( repository ) + tool_dependency_tup = self.get_tool_dependency_tuple_for_installed_repository_manager( tool_dependency ) if repository_tup in self.installed_tool_dependencies_of_installed_repositories: self.installed_tool_dependencies_of_installed_repositories[ repository_tup ].append( tool_dependency_tup ) else: @@ -406,7 +836,7 @@ is removal of appropriate value items from self.installed_dependent_repositories_of_installed_repositories. """ # Remove tuples defining this repository from value lists in self.installed_dependent_repositories_of_installed_repositories. - repository_tup = repository_dependency_util.get_repository_tuple_for_installed_repository_manager( repository ) + repository_tup = self.get_repository_tuple_for_installed_repository_manager( repository ) tool_shed, name, owner, installed_changeset_revision = repository_tup altered_installed_dependent_repositories_of_installed_repositories = {} for r_tup, v_tups in self.installed_dependent_repositories_of_installed_repositories.items(): @@ -431,7 +861,7 @@ def remove_entry_from_installed_runtime_dependent_tool_dependencies_of_installed_tool_dependencies( self, tool_dependency ): """Remove an entry from self.installed_runtime_dependent_tool_dependencies_of_installed_tool_dependencies.""" - tool_dependency_tup = tool_dependency_util.get_tool_dependency_tuple_for_installed_repository_manager( tool_dependency ) + tool_dependency_tup = self.get_tool_dependency_tuple_for_installed_repository_manager( tool_dependency ) if tool_dependency_tup in self.installed_runtime_dependent_tool_dependencies_of_installed_tool_dependencies: tool_shed_repository_id, name, version, type = tool_dependency_tup debug_msg = "Removing entry for version %s of %s %s " % ( version, type, name ) @@ -441,7 +871,7 @@ def remove_entry_from_installed_tool_dependencies_of_installed_repositories( self, repository ): """Remove an entry from self.installed_tool_dependencies_of_installed_repositories.""" - repository_tup = repository_dependency_util.get_repository_tuple_for_installed_repository_manager( repository ) + repository_tup = self.get_repository_tuple_for_installed_repository_manager( repository ) if repository_tup in self.installed_tool_dependencies_of_installed_repositories: tool_shed, name, owner, installed_changeset_revision = repository_tup debug_msg = "Removing entry for revision %s of repository %s owned by %s " % ( installed_changeset_revision, name, owner ) @@ -451,7 +881,7 @@ def remove_entry_from_repository_dependencies_of_installed_repositories( self, repository ): """Remove an entry from self.repository_dependencies_of_installed_repositories.""" - repository_tup = repository_dependency_util.get_repository_tuple_for_installed_repository_manager( repository ) + repository_tup = self.get_repository_tuple_for_installed_repository_manager( repository ) if repository_tup in self.repository_dependencies_of_installed_repositories: tool_shed, name, owner, installed_changeset_revision = repository_tup debug_msg = "Removing entry for revision %s of repository %s owned by %s " % ( installed_changeset_revision, name, owner ) @@ -461,7 +891,7 @@ def remove_entry_from_runtime_tool_dependencies_of_installed_tool_dependencies( self, tool_dependency ): """Remove an entry from self.runtime_tool_dependencies_of_installed_tool_dependencies.""" - tool_dependency_tup = tool_dependency_util.get_tool_dependency_tuple_for_installed_repository_manager( tool_dependency ) + tool_dependency_tup = self.get_tool_dependency_tuple_for_installed_repository_manager( tool_dependency ) if tool_dependency_tup in self.runtime_tool_dependencies_of_installed_tool_dependencies: tool_shed_repository_id, name, version, type = tool_dependency_tup debug_msg = "Removing entry for version %s of %s %s from runtime_tool_dependencies_of_installed_tool_dependencies." % \ @@ -471,10 +901,26 @@ def remove_entry_from_tool_dependencies_of_installed_repositories( self, repository ): """Remove an entry from self.tool_dependencies_of_installed_repositories.""" - repository_tup = repository_dependency_util.get_repository_tuple_for_installed_repository_manager( repository ) + repository_tup = self.get_repository_tuple_for_installed_repository_manager( repository ) if repository_tup in self.tool_dependencies_of_installed_repositories: tool_shed, name, owner, installed_changeset_revision = repository_tup debug_msg = "Removing entry for revision %s of repository %s owned by %s from tool_dependencies_of_installed_repositories." % \ ( installed_changeset_revision, name, owner ) log.debug( debug_msg ) del self.tool_dependencies_of_installed_repositories[ repository_tup ] + + def repository_dependency_needed_only_for_compiling_tool_dependency( self, repository, repository_dependency ): + for rd_tup in repository.tuples_of_repository_dependencies_needed_for_compiling_td: + tool_shed, name, owner, changeset_revision, prior_installation_required, only_if_compiling_contained_td = rd_tup + # TODO: we may discover that we need to check more than just installed_changeset_revision and changeset_revision here, in which + # case we'll need to contact the tool shed to get the list of all possible changeset_revisions. + cleaned_tool_shed = common_util.remove_protocol_and_port_from_tool_shed_url( tool_shed ) + cleaned_repository_dependency_tool_shed = \ + common_util.remove_protocol_and_port_from_tool_shed_url( str( repository_dependency.tool_shed ) ) + if cleaned_repository_dependency_tool_shed == cleaned_tool_shed and \ + repository_dependency.name == name and \ + repository_dependency.owner == owner and \ + ( repository_dependency.installed_changeset_revision == changeset_revision or \ + repository_dependency.changeset_revision == changeset_revision ): + return True + return False diff -r 404fcf47260db528f04b2a6ce75ede9015f4228b -r 33fdaf7a0dcb7b6149dd5f1e70d2e687d43721a6 lib/tool_shed/galaxy_install/repair_repository_manager.py --- a/lib/tool_shed/galaxy_install/repair_repository_manager.py +++ b/lib/tool_shed/galaxy_install/repair_repository_manager.py @@ -5,7 +5,6 @@ from tool_shed.galaxy_install import install_manager -from tool_shed.util import common_install_util from tool_shed.util import common_util from tool_shed.util import container_util from tool_shed.util import shed_util_common as suc @@ -148,7 +147,7 @@ repair_dict = {} if repository.status in [ self.app.install_model.ToolShedRepository.installation_status.DEACTIVATED ]: try: - common_install_util.activate_repository( self.app, repository ) + self.app.installed_repository_manager.activate_repository( repository ) except Exception, e: error_message = "Error activating repository %s: %s" % ( repository.name, str( e ) ) log.debug( error_message ) diff -r 404fcf47260db528f04b2a6ce75ede9015f4228b -r 33fdaf7a0dcb7b6149dd5f1e70d2e687d43721a6 lib/tool_shed/util/common_install_util.py --- a/lib/tool_shed/util/common_install_util.py +++ /dev/null @@ -1,499 +0,0 @@ -import copy -import json -import logging -import os -import urllib -import urllib2 -from galaxy import util -from galaxy import web -import tool_shed.util.shed_util_common as suc -from tool_shed.util import common_util -from tool_shed.util import container_util -from tool_shed.util import encoding_util -from tool_shed.util import data_manager_util -from tool_shed.util import datatype_util -from tool_shed.util import tool_dependency_util -from tool_shed.util import tool_util - -log = logging.getLogger( __name__ ) - -def activate_repository( app, repository ): - """Activate an installed tool shed repository that has been marked as deactivated.""" - install_model = app.install_model - repository_clone_url = common_util.generate_clone_url_for_installed_repository( app, repository ) - shed_tool_conf, tool_path, relative_install_dir = suc.get_tool_panel_config_tool_path_install_dir( app, repository ) - repository.deleted = False - repository.status = install_model.ToolShedRepository.installation_status.INSTALLED - if repository.includes_tools_for_display_in_tool_panel: - metadata = repository.metadata - repository_tools_tups = suc.get_repository_tools_tups( app, metadata ) - # Reload tools into the appropriate tool panel section. - tool_panel_dict = repository.metadata[ 'tool_panel_section' ] - tool_util.add_to_tool_panel( app, - repository.name, - repository_clone_url, - repository.installed_changeset_revision, - repository_tools_tups, - repository.owner, - shed_tool_conf, - tool_panel_dict, - new_install=False ) - if repository.includes_data_managers: - tp, data_manager_relative_install_dir = repository.get_tool_relative_path( app ) - # Hack to add repository.name here, which is actually the root of the installed repository - data_manager_relative_install_dir = os.path.join( data_manager_relative_install_dir, repository.name ) - new_data_managers = data_manager_util.install_data_managers( app, - app.config.shed_data_manager_config_file, - metadata, - repository.get_shed_config_dict( app ), - data_manager_relative_install_dir, - repository, - repository_tools_tups ) - install_model.context.add( repository ) - install_model.context.flush() - if repository.includes_datatypes: - if tool_path: - repository_install_dir = os.path.abspath( os.path.join( tool_path, relative_install_dir ) ) - else: - repository_install_dir = os.path.abspath( relative_install_dir ) - # Activate proprietary datatypes. - installed_repository_dict = datatype_util.load_installed_datatypes( app, repository, repository_install_dir, deactivate=False ) - if installed_repository_dict: - converter_path = installed_repository_dict.get( 'converter_path' ) - if converter_path is not None: - datatype_util.load_installed_datatype_converters( app, installed_repository_dict, deactivate=False ) - display_path = installed_repository_dict.get( 'display_path' ) - if display_path is not None: - datatype_util.load_installed_display_applications( app, installed_repository_dict, deactivate=False ) - -def get_dependencies_for_repository( trans, tool_shed_url, repo_info_dict, includes_tool_dependencies, updating=False ): - """ - Return dictionaries containing the sets of installed and missing tool dependencies and repository - dependencies associated with the repository defined by the received repo_info_dict. - """ - repository = None - installed_rd = {} - installed_td = {} - missing_rd = {} - missing_td = {} - name = repo_info_dict.keys()[ 0 ] - repo_info_tuple = repo_info_dict[ name ] - description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, tool_dependencies = \ - suc.get_repo_info_tuple_contents( repo_info_tuple ) - if tool_dependencies: - if not includes_tool_dependencies: - includes_tool_dependencies = True - # Inspect the tool_dependencies dictionary to separate the installed and missing tool dependencies. - # We don't add to installed_td and missing_td here because at this point they are empty. - installed_td, missing_td = \ - get_installed_and_missing_tool_dependencies_for_repository( trans, tool_dependencies ) - # In cases where a repository dependency is required only for compiling a dependent repository's - # tool dependency, the value of repository_dependencies will be an empty dictionary here. - if repository_dependencies: - # We have a repository with one or more defined repository dependencies. - if not repository: - repository = suc.get_repository_for_dependency_relationship( trans.app, - tool_shed_url, - name, - repository_owner, - changeset_revision ) - if not updating and repository and repository.metadata: - installed_rd, missing_rd = get_installed_and_missing_repository_dependencies( trans, repository ) - else: - installed_rd, missing_rd = \ - get_installed_and_missing_repository_dependencies_for_new_or_updated_install( trans, repo_info_tuple ) - # Discover all repository dependencies and retrieve information for installing them. - all_repo_info_dict = get_required_repo_info_dicts( trans.app, tool_shed_url, util.listify( repo_info_dict ) ) - has_repository_dependencies = all_repo_info_dict.get( 'has_repository_dependencies', False ) - has_repository_dependencies_only_if_compiling_contained_td = \ - all_repo_info_dict.get( 'has_repository_dependencies_only_if_compiling_contained_td', False ) - includes_tools_for_display_in_tool_panel = all_repo_info_dict.get( 'includes_tools_for_display_in_tool_panel', False ) - includes_tool_dependencies = all_repo_info_dict.get( 'includes_tool_dependencies', False ) - includes_tools = all_repo_info_dict.get( 'includes_tools', False ) - required_repo_info_dicts = all_repo_info_dict.get( 'all_repo_info_dicts', [] ) - # Display tool dependencies defined for each of the repository dependencies. - if required_repo_info_dicts: - required_tool_dependencies = {} - for rid in required_repo_info_dicts: - for name, repo_info_tuple in rid.items(): - description, repository_clone_url, changeset_revision, ctx_rev, \ - repository_owner, rid_repository_dependencies, rid_tool_dependencies = \ - suc.get_repo_info_tuple_contents( repo_info_tuple ) - if rid_tool_dependencies: - for td_key, td_dict in rid_tool_dependencies.items(): - if td_key not in required_tool_dependencies: - required_tool_dependencies[ td_key ] = td_dict - if required_tool_dependencies: - # Discover and categorize all tool dependencies defined for this repository's repository dependencies. - required_installed_td, required_missing_td = \ - get_installed_and_missing_tool_dependencies_for_repository( trans, required_tool_dependencies ) - if required_installed_td: - if not includes_tool_dependencies: - includes_tool_dependencies = True - for td_key, td_dict in required_installed_td.items(): - if td_key not in installed_td: - installed_td[ td_key ] = td_dict - if required_missing_td: - if not includes_tool_dependencies: - includes_tool_dependencies = True - for td_key, td_dict in required_missing_td.items(): - if td_key not in missing_td: - missing_td[ td_key ] = td_dict - else: - # We have a single repository with (possibly) no defined repository dependencies. - all_repo_info_dict = get_required_repo_info_dicts( trans.app, tool_shed_url, util.listify( repo_info_dict ) ) - has_repository_dependencies = all_repo_info_dict.get( 'has_repository_dependencies', False ) - has_repository_dependencies_only_if_compiling_contained_td = \ - all_repo_info_dict.get( 'has_repository_dependencies_only_if_compiling_contained_td', False ) - includes_tools_for_display_in_tool_panel = all_repo_info_dict.get( 'includes_tools_for_display_in_tool_panel', False ) - includes_tool_dependencies = all_repo_info_dict.get( 'includes_tool_dependencies', False ) - includes_tools = all_repo_info_dict.get( 'includes_tools', False ) - required_repo_info_dicts = all_repo_info_dict.get( 'all_repo_info_dicts', [] ) - dependencies_for_repository_dict = \ - dict( changeset_revision=changeset_revision, - has_repository_dependencies=has_repository_dependencies, - has_repository_dependencies_only_if_compiling_contained_td=has_repository_dependencies_only_if_compiling_contained_td, - includes_tool_dependencies=includes_tool_dependencies, - includes_tools=includes_tools, - includes_tools_for_display_in_tool_panel=includes_tools_for_display_in_tool_panel, - installed_repository_dependencies=installed_rd, - installed_tool_dependencies=installed_td, - missing_repository_dependencies=missing_rd, - missing_tool_dependencies=missing_td, - name=name, - repository_owner=repository_owner ) - return dependencies_for_repository_dict - -def get_installed_and_missing_repository_dependencies( trans, repository ): - """ - Return the installed and missing repository dependencies for a tool shed repository that has a record - in the Galaxy database, but may or may not be installed. In this case, the repository dependencies are - associated with the repository in the database. Do not include a repository dependency if it is required - only to compile a tool dependency defined for the dependent repository since these special kinds of repository - dependencies are really a dependency of the dependent repository's contained tool dependency, and only - if that tool dependency requires compilation. - """ - missing_repository_dependencies = {} - installed_repository_dependencies = {} - has_repository_dependencies = repository.has_repository_dependencies - if has_repository_dependencies: - # The repository dependencies container will include only the immediate repository dependencies of this repository, so the container - # will be only a single level in depth. - metadata = repository.metadata - installed_rd_tups = [] - missing_rd_tups = [] - for tsr in repository.repository_dependencies: - prior_installation_required = suc.set_prior_installation_required( trans.app, repository, tsr ) - only_if_compiling_contained_td = suc.set_only_if_compiling_contained_td( repository, tsr ) - rd_tup = [ tsr.tool_shed, - tsr.name, - tsr.owner, - tsr.changeset_revision, - prior_installation_required, - only_if_compiling_contained_td, - tsr.id, - tsr.status ] - if tsr.status == trans.install_model.ToolShedRepository.installation_status.INSTALLED: - installed_rd_tups.append( rd_tup ) - else: - # We'll only add the rd_tup to the missing_rd_tups list if the received repository has tool dependencies that are not - # correctly installed. This may prove to be a weak check since the repository in question may not have anything to do - # with compiling the missing tool dependencies. If we discover that this is a problem, more granular checking will be - # necessary here. - if repository.missing_tool_dependencies: - if not repository_dependency_needed_only_for_compiling_tool_dependency( repository, tsr ): - missing_rd_tups.append( rd_tup ) - else: - missing_rd_tups.append( rd_tup ) - if installed_rd_tups or missing_rd_tups: - # Get the description from the metadata in case it has a value. - repository_dependencies = metadata.get( 'repository_dependencies', {} ) - description = repository_dependencies.get( 'description', None ) - # We need to add a root_key entry to one or both of installed_repository_dependencies dictionary and the - # missing_repository_dependencies dictionaries for proper display parsing. - root_key = container_util.generate_repository_dependencies_key_for_repository( repository.tool_shed, - repository.name, - repository.owner, - repository.installed_changeset_revision, - prior_installation_required, - only_if_compiling_contained_td ) - if installed_rd_tups: - installed_repository_dependencies[ 'root_key' ] = root_key - installed_repository_dependencies[ root_key ] = installed_rd_tups - installed_repository_dependencies[ 'description' ] = description - if missing_rd_tups: - missing_repository_dependencies[ 'root_key' ] = root_key - missing_repository_dependencies[ root_key ] = missing_rd_tups - missing_repository_dependencies[ 'description' ] = description - return installed_repository_dependencies, missing_repository_dependencies - -def get_installed_and_missing_repository_dependencies_for_new_or_updated_install( trans, repo_info_tuple ): - """ - Parse the received repository_dependencies dictionary that is associated with a repository being - installed into Galaxy for the first time and attempt to determine repository dependencies that are - already installed and those that are not. - """ - missing_repository_dependencies = {} - installed_repository_dependencies = {} - missing_rd_tups = [] - installed_rd_tups = [] - description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, tool_dependencies = \ - suc.get_repo_info_tuple_contents( repo_info_tuple ) - if repository_dependencies: - description = repository_dependencies[ 'description' ] - root_key = repository_dependencies[ 'root_key' ] - # The repository dependencies container will include only the immediate repository dependencies of - # this repository, so the container will be only a single level in depth. - for key, rd_tups in repository_dependencies.items(): - if key in [ 'description', 'root_key' ]: - continue - for rd_tup in rd_tups: - tool_shed, name, owner, changeset_revision, prior_installation_required, only_if_compiling_contained_td = \ - common_util.parse_repository_dependency_tuple( rd_tup ) - # Updates to installed repository revisions may have occurred, so make sure to locate the - # appropriate repository revision if one exists. We need to create a temporary repo_info_tuple - # that includes the correct repository owner which we get from the current rd_tup. The current - # tuple looks like: ( description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, - # repository_dependencies, installed_td ) - tmp_clone_url = common_util.generate_clone_url_from_repo_info_tup( trans, rd_tup ) - tmp_repo_info_tuple = ( None, tmp_clone_url, changeset_revision, None, owner, None, None ) - repository, installed_changeset_revision = suc.repository_was_previously_installed( trans.app, - tool_shed, - name, - tmp_repo_info_tuple, - from_tip=False ) - if repository: - new_rd_tup = [ tool_shed, - name, - owner, - changeset_revision, - prior_installation_required, - only_if_compiling_contained_td, - repository.id, - repository.status ] - if repository.status == trans.install_model.ToolShedRepository.installation_status.INSTALLED: - if new_rd_tup not in installed_rd_tups: - installed_rd_tups.append( new_rd_tup ) - else: - # A repository dependency that is not installed will not be considered missing if its value - # for only_if_compiling_contained_td is True This is because this type of repository dependency - # will only be considered at the time that the specified tool dependency is being installed, and - # even then only if the compiled binary of the tool dependency could not be installed due to the - # unsupported installation environment. - if not util.asbool( only_if_compiling_contained_td ): - if new_rd_tup not in missing_rd_tups: - missing_rd_tups.append( new_rd_tup ) - else: - new_rd_tup = [ tool_shed, - name, - owner, - changeset_revision, - prior_installation_required, - only_if_compiling_contained_td, - None, - 'Never installed' ] - if not util.asbool( only_if_compiling_contained_td ): - # A repository dependency that is not installed will not be considered missing if its value for - # only_if_compiling_contained_td is True - see above... - if new_rd_tup not in missing_rd_tups: - missing_rd_tups.append( new_rd_tup ) - if installed_rd_tups: - installed_repository_dependencies[ 'root_key' ] = root_key - installed_repository_dependencies[ root_key ] = installed_rd_tups - installed_repository_dependencies[ 'description' ] = description - if missing_rd_tups: - missing_repository_dependencies[ 'root_key' ] = root_key - missing_repository_dependencies[ root_key ] = missing_rd_tups - missing_repository_dependencies[ 'description' ] = description - return installed_repository_dependencies, missing_repository_dependencies - -def get_installed_and_missing_tool_dependencies_for_repository( trans, tool_dependencies_dict ): - """ - Return the lists of installed tool dependencies and missing tool dependencies for a set of repositories - being installed into Galaxy. - """ - # FIXME: This implementation breaks when updates to a repository contain dependencies that result in - # multiple entries for a specific tool dependency. A scenario where this can happen is where 2 repositories - # define the same dependency internally (not using the complex repository dependency definition to a separate - # package repository approach). If 2 repositories contain the same tool_dependencies.xml file, one dependency - # will be lost since the values in these returned dictionaries are not lists. All tool dependency dictionaries - # should have lists as values. These scenarios are probably extreme corner cases, but still should be handled. - installed_tool_dependencies = {} - missing_tool_dependencies = {} - if tool_dependencies_dict: - # Make sure not to change anything in the received tool_dependencies_dict as that would be a bad side-effect! - tmp_tool_dependencies_dict = copy.deepcopy( tool_dependencies_dict ) - for td_key, val in tmp_tool_dependencies_dict.items(): - # Default the status to NEVER_INSTALLED. - tool_dependency_status = trans.install_model.ToolDependency.installation_status.NEVER_INSTALLED - # Set environment tool dependencies are a list. - if td_key == 'set_environment': - new_val = [] - for requirement_dict in val: - # {'repository_name': 'xx', - # 'name': 'bwa', - # 'version': '0.5.9', - # 'repository_owner': 'yy', - # 'changeset_revision': 'zz', - # 'type': 'package'} - tool_dependency = \ - tool_dependency_util.get_tool_dependency_by_name_version_type( trans.app, - requirement_dict.get( 'name', None ), - requirement_dict.get( 'version', None ), - requirement_dict.get( 'type', 'package' ) ) - if tool_dependency: - tool_dependency_status = tool_dependency.status - requirement_dict[ 'status' ] = tool_dependency_status - new_val.append( requirement_dict ) - if tool_dependency_status in [ trans.install_model.ToolDependency.installation_status.INSTALLED ]: - if td_key in installed_tool_dependencies: - installed_tool_dependencies[ td_key ].extend( new_val ) - else: - installed_tool_dependencies[ td_key ] = new_val - else: - if td_key in missing_tool_dependencies: - missing_tool_dependencies[ td_key ].extend( new_val ) - else: - missing_tool_dependencies[ td_key ] = new_val - else: - # The val dictionary looks something like this: - # {'repository_name': 'xx', - # 'name': 'bwa', - # 'version': '0.5.9', - # 'repository_owner': 'yy', - # 'changeset_revision': 'zz', - # 'type': 'package'} - tool_dependency = tool_dependency_util.get_tool_dependency_by_name_version_type( trans.app, - val.get( 'name', None ), - val.get( 'version', None ), - val.get( 'type', 'package' ) ) - if tool_dependency: - tool_dependency_status = tool_dependency.status - val[ 'status' ] = tool_dependency_status - if tool_dependency_status in [ trans.install_model.ToolDependency.installation_status.INSTALLED ]: - installed_tool_dependencies[ td_key ] = val - else: - missing_tool_dependencies[ td_key ] = val - return installed_tool_dependencies, missing_tool_dependencies - -def get_required_repo_info_dicts( app, tool_shed_url, repo_info_dicts ): - """ - Inspect the list of repo_info_dicts for repository dependencies and append a repo_info_dict for each of - them to the list. All repository_dependencies entries in each of the received repo_info_dicts includes - all required repositories, so only one pass through this method is required to retrieve all repository - dependencies. - """ - all_required_repo_info_dict = {} - all_repo_info_dicts = [] - if repo_info_dicts: - # We'll send tuples of ( tool_shed, repository_name, repository_owner, changeset_revision ) to the tool - # shed to discover repository ids. - required_repository_tups = [] - for repo_info_dict in repo_info_dicts: - if repo_info_dict not in all_repo_info_dicts: - all_repo_info_dicts.append( repo_info_dict ) - for repository_name, repo_info_tup in repo_info_dict.items(): - description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, tool_dependencies = \ - suc.get_repo_info_tuple_contents( repo_info_tup ) - if repository_dependencies: - for key, val in repository_dependencies.items(): - if key in [ 'root_key', 'description' ]: - continue - repository_components_tuple = container_util.get_components_from_key( key ) - components_list = suc.extract_components_from_tuple( repository_components_tuple ) - # Skip listing a repository dependency if it is required only to compile a tool dependency - # defined for the dependent repository since in this case, the repository dependency is really - # a dependency of the dependent repository's contained tool dependency, and only if that - # tool dependency requires compilation. - # For backward compatibility to the 12/20/12 Galaxy release. - prior_installation_required = 'False' - only_if_compiling_contained_td = 'False' - if len( components_list ) == 4: - prior_installation_required = 'False' - only_if_compiling_contained_td = 'False' - elif len( components_list ) == 5: - prior_installation_required = components_list[ 4 ] - only_if_compiling_contained_td = 'False' - if not util.asbool( only_if_compiling_contained_td ): - if components_list not in required_repository_tups: - required_repository_tups.append( components_list ) - for components_list in val: - try: - only_if_compiling_contained_td = components_list[ 5 ] - except: - only_if_compiling_contained_td = 'False' - # Skip listing a repository dependency if it is required only to compile a tool dependency - # defined for the dependent repository (see above comment). - if not util.asbool( only_if_compiling_contained_td ): - if components_list not in required_repository_tups: - required_repository_tups.append( components_list ) - else: - # We have a single repository with no dependencies. - components_list = [ tool_shed_url, repository_name, repository_owner, changeset_revision ] - required_repository_tups.append( components_list ) - if required_repository_tups: - # The value of required_repository_tups is a list of tuples, so we need to encode it. - encoded_required_repository_tups = [] - for required_repository_tup in required_repository_tups: - # Convert every item in required_repository_tup to a string. - required_repository_tup = [ str( item ) for item in required_repository_tup ] - encoded_required_repository_tups.append( encoding_util.encoding_sep.join( required_repository_tup ) ) - encoded_required_repository_str = encoding_util.encoding_sep2.join( encoded_required_repository_tups ) - encoded_required_repository_str = encoding_util.tool_shed_encode( encoded_required_repository_str ) - if suc.is_tool_shed_client( app ): - # Handle secure / insecure Tool Shed URL protocol changes and port changes. - tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( app, tool_shed_url ) - url = common_util.url_join( tool_shed_url, '/repository/get_required_repo_info_dict' ) - # Fix for handling 307 redirect not being handled nicely by urllib2.urlopen when the urllib2.Request has data provided - url = urllib2.urlopen( urllib2.Request( url ) ).geturl() - request = urllib2.Request( url, data=urllib.urlencode( dict( encoded_str=encoded_required_repository_str ) ) ) - response = urllib2.urlopen( request ).read() - if response: - try: - required_repo_info_dict = json.loads( response ) - except Exception, e: - log.exception( e ) - return all_repo_info_dicts - required_repo_info_dicts = [] - for k, v in required_repo_info_dict.items(): - if k == 'repo_info_dicts': - encoded_dict_strings = required_repo_info_dict[ 'repo_info_dicts' ] - for encoded_dict_str in encoded_dict_strings: - decoded_dict = encoding_util.tool_shed_decode( encoded_dict_str ) - required_repo_info_dicts.append( decoded_dict ) - else: - if k not in all_required_repo_info_dict: - all_required_repo_info_dict[ k ] = v - else: - if v and not all_required_repo_info_dict[ k ]: - all_required_repo_info_dict[ k ] = v - if required_repo_info_dicts: - for required_repo_info_dict in required_repo_info_dicts: - # Each required_repo_info_dict has a single entry, and all_repo_info_dicts is a list - # of dictionaries, each of which has a single entry. We'll check keys here rather than - # the entire dictionary because a dictionary entry in all_repo_info_dicts will include - # lists of discovered repository dependencies, but these lists will be empty in the - # required_repo_info_dict since dependency discovery has not yet been performed for these - # dictionaries. - required_repo_info_dict_key = required_repo_info_dict.keys()[ 0 ] - all_repo_info_dicts_keys = [ d.keys()[ 0 ] for d in all_repo_info_dicts ] - if required_repo_info_dict_key not in all_repo_info_dicts_keys: - all_repo_info_dicts.append( required_repo_info_dict ) - all_required_repo_info_dict[ 'all_repo_info_dicts' ] = all_repo_info_dicts - return all_required_repo_info_dict - -def repository_dependency_needed_only_for_compiling_tool_dependency( repository, repository_dependency ): - for rd_tup in repository.tuples_of_repository_dependencies_needed_for_compiling_td: - tool_shed, name, owner, changeset_revision, prior_installation_required, only_if_compiling_contained_td = rd_tup - # TODO: we may discover that we need to check more than just installed_changeset_revision and changeset_revision here, in which - # case we'll need to contact the tool shed to get the list of all possible changeset_revisions. - cleaned_tool_shed = common_util.remove_protocol_and_port_from_tool_shed_url( tool_shed ) - cleaned_repository_dependency_tool_shed = \ - common_util.remove_protocol_and_port_from_tool_shed_url( str( repository_dependency.tool_shed ) ) - if cleaned_repository_dependency_tool_shed == cleaned_tool_shed and \ - repository_dependency.name == name and \ - repository_dependency.owner == owner and \ - ( repository_dependency.installed_changeset_revision == changeset_revision or \ - repository_dependency.changeset_revision == changeset_revision ): - return True - return False diff -r 404fcf47260db528f04b2a6ce75ede9015f4228b -r 33fdaf7a0dcb7b6149dd5f1e70d2e687d43721a6 lib/tool_shed/util/common_util.py --- a/lib/tool_shed/util/common_util.py +++ b/lib/tool_shed/util/common_util.py @@ -120,12 +120,12 @@ else: return '%s/repos/%s/%s' % ( base_url, repository.user.username, repository.name ) -def generate_clone_url_from_repo_info_tup( trans, repo_info_tup ): +def generate_clone_url_from_repo_info_tup( app, repo_info_tup ): """Generate the URL for cloning a repository given a tuple of toolshed, name, owner, changeset_revision.""" # Example tuple: ['http://localhost:9009', 'blast_datatypes', 'test', '461a4216e8ab', False] toolshed, name, owner, changeset_revision, prior_installation_required, only_if_compiling_contained_td = \ parse_repository_dependency_tuple( repo_info_tup ) - tool_shed_url = get_tool_shed_url_from_tool_shed_registry( trans.app, toolshed ) + tool_shed_url = get_tool_shed_url_from_tool_shed_registry( app, toolshed ) # Don't include the changeset_revision in clone urls. return url_join( tool_shed_url, 'repos', owner, name ) diff -r 404fcf47260db528f04b2a6ce75ede9015f4228b -r 33fdaf7a0dcb7b6149dd5f1e70d2e687d43721a6 lib/tool_shed/util/export_util.py --- a/lib/tool_shed/util/export_util.py +++ b/lib/tool_shed/util/export_util.py @@ -13,7 +13,6 @@ from galaxy.util.odict import odict from tool_shed.util import basic_util from tool_shed.util import commit_util -from tool_shed.util import common_install_util from tool_shed.util import common_util from tool_shed.util import encoding_util from tool_shed.util import hg_util @@ -262,7 +261,9 @@ str( repository.user.username ), repository_dependencies, None ) - all_required_repo_info_dict = common_install_util.get_required_repo_info_dicts( trans.app, tool_shed_url, [ repo_info_dict ] ) + all_required_repo_info_dict = repository_dependency_util.get_required_repo_info_dicts( trans.app, + tool_shed_url, + [ repo_info_dict ] ) all_repo_info_dicts = all_required_repo_info_dict.get( 'all_repo_info_dicts', [] ) return all_repo_info_dicts diff -r 404fcf47260db528f04b2a6ce75ede9015f4228b -r 33fdaf7a0dcb7b6149dd5f1e70d2e687d43721a6 lib/tool_shed/util/metadata_util.py --- a/lib/tool_shed/util/metadata_util.py +++ b/lib/tool_shed/util/metadata_util.py @@ -15,7 +15,6 @@ from tool_shed.repository_types.metadata import TipOnly from tool_shed.util import basic_util from tool_shed.util import common_util -from tool_shed.util import common_install_util from tool_shed.util import container_util from tool_shed.util import hg_util from tool_shed.util import readme_util @@ -1679,7 +1678,7 @@ readme_files_dict = None # Handle repository dependencies. installed_repository_dependencies, missing_repository_dependencies = \ - common_install_util.get_installed_and_missing_repository_dependencies( trans, repository ) + trans.app.installed_repository_manager.get_installed_and_missing_repository_dependencies( repository ) # Handle the current repository's tool dependencies. repository_tool_dependencies = metadata.get( 'tool_dependencies', None ) # Make sure to display missing tool dependencies as well. diff -r 404fcf47260db528f04b2a6ce75ede9015f4228b -r 33fdaf7a0dcb7b6149dd5f1e70d2e687d43721a6 lib/tool_shed/util/repository_dependency_util.py --- a/lib/tool_shed/util/repository_dependency_util.py +++ b/lib/tool_shed/util/repository_dependency_util.py @@ -1,13 +1,14 @@ +import json import logging import os +import urllib +import urllib2 from galaxy.util import asbool -from galaxy.util import json from galaxy.util import listify import tool_shed.util.shed_util_common as suc from tool_shed.util import common_util -from tool_shed.util import common_install_util from tool_shed.util import container_util from tool_shed.util import encoding_util from tool_shed.util import hg_util @@ -127,7 +128,7 @@ # Discover all repository dependencies and retrieve information for installing them. Even if the user elected # to not install repository dependencies we have to make sure all repository dependency objects exist so that # the appropriate repository dependency relationships can be built. - all_required_repo_info_dict = common_install_util.get_required_repo_info_dicts( app, tool_shed_url, repo_info_dicts ) + all_required_repo_info_dict = get_required_repo_info_dicts( app, tool_shed_url, repo_info_dicts ) all_repo_info_dicts = all_required_repo_info_dict.get( 'all_repo_info_dicts', [] ) if not all_repo_info_dicts: # No repository dependencies were discovered so process the received repositories. @@ -171,7 +172,7 @@ elif repository_db_record.status in [ install_model.ToolShedRepository.installation_status.DEACTIVATED ]: # The current tool shed repository is deactivated, so updating its database record is not necessary - just activate it. log.debug( "Reactivating deactivated tool_shed_repository '%s'." % str( repository_db_record.name ) ) - common_install_util.activate_repository( app, repository_db_record ) + app.installed_repository_manager.activate_repository( repository_db_record ) # No additional updates to the database record are necessary. can_update_db_record = False elif repository_db_record.status not in [ install_model.ToolShedRepository.installation_status.NEW ]: @@ -368,7 +369,7 @@ print "The URL\n%s\nraised the exception:\n%s\n" % ( url, str( e ) ) return '' if len( raw_text ) > 2: - encoded_text = json.from_json_string( raw_text ) + encoded_text = json.loads( raw_text ) text = encoding_util.tool_shed_decode( encoded_text ) else: text = '' @@ -486,36 +487,118 @@ ( name, owner ) ) return dependency_tups -def get_repository_dependency_tups_for_installed_repository( app, repository, dependency_tups=None, status=None ): +def get_required_repo_info_dicts( app, tool_shed_url, repo_info_dicts ): """ - Return a list of of tuples defining tool_shed_repository objects (whose status can be anything) required by the - received repository. The returned list defines the entire repository dependency tree. This method is called - only from Galaxy. + Inspect the list of repo_info_dicts for repository dependencies and append a repo_info_dict for each of + them to the list. All repository_dependency entries in each of the received repo_info_dicts includes + all required repositories, so only one pass through this method is required to retrieve all repository + dependencies. """ - if dependency_tups is None: - dependency_tups = [] - repository_tup = get_repository_tuple_for_installed_repository_manager( repository ) - for rrda in repository.required_repositories: - repository_dependency = rrda.repository_dependency - required_repository = repository_dependency.repository - if status is None or required_repository.status == status: - required_repository_tup = get_repository_tuple_for_installed_repository_manager( required_repository ) - if required_repository_tup == repository_tup: - # We have a circular repository dependency relationship, skip this entry. - continue - if required_repository_tup not in dependency_tups: - dependency_tups.append( required_repository_tup ) - return get_repository_dependency_tups_for_installed_repository( app, - required_repository, - dependency_tups=dependency_tups ) - return dependency_tups - -def get_repository_tuple_for_installed_repository_manager( repository ): - return ( str( repository.tool_shed ), - str( repository.name ), - str( repository.owner ), - str( repository.installed_changeset_revision ) ) - + all_required_repo_info_dict = {} + all_repo_info_dicts = [] + if repo_info_dicts: + # We'll send tuples of ( tool_shed, repository_name, repository_owner, changeset_revision ) to the tool + # shed to discover repository ids. + required_repository_tups = [] + for repo_info_dict in repo_info_dicts: + if repo_info_dict not in all_repo_info_dicts: + all_repo_info_dicts.append( repo_info_dict ) + for repository_name, repo_info_tup in repo_info_dict.items(): + description, \ + repository_clone_url, \ + changeset_revision, \ + ctx_rev, \ + repository_owner, \ + repository_dependencies, \ + tool_dependencies = \ + suc.get_repo_info_tuple_contents( repo_info_tup ) + if repository_dependencies: + for key, val in repository_dependencies.items(): + if key in [ 'root_key', 'description' ]: + continue + repository_components_tuple = container_util.get_components_from_key( key ) + components_list = suc.extract_components_from_tuple( repository_components_tuple ) + # Skip listing a repository dependency if it is required only to compile a tool dependency + # defined for the dependent repository since in this case, the repository dependency is really + # a dependency of the dependent repository's contained tool dependency, and only if that + # tool dependency requires compilation. + # For backward compatibility to the 12/20/12 Galaxy release. + prior_installation_required = 'False' + only_if_compiling_contained_td = 'False' + if len( components_list ) == 4: + prior_installation_required = 'False' + only_if_compiling_contained_td = 'False' + elif len( components_list ) == 5: + prior_installation_required = components_list[ 4 ] + only_if_compiling_contained_td = 'False' + if not asbool( only_if_compiling_contained_td ): + if components_list not in required_repository_tups: + required_repository_tups.append( components_list ) + for components_list in val: + try: + only_if_compiling_contained_td = components_list[ 5 ] + except: + only_if_compiling_contained_td = 'False' + # Skip listing a repository dependency if it is required only to compile a tool dependency + # defined for the dependent repository (see above comment). + if not asbool( only_if_compiling_contained_td ): + if components_list not in required_repository_tups: + required_repository_tups.append( components_list ) + else: + # We have a single repository with no dependencies. + components_list = [ tool_shed_url, repository_name, repository_owner, changeset_revision ] + required_repository_tups.append( components_list ) + if required_repository_tups: + # The value of required_repository_tups is a list of tuples, so we need to encode it. + encoded_required_repository_tups = [] + for required_repository_tup in required_repository_tups: + # Convert every item in required_repository_tup to a string. + required_repository_tup = [ str( item ) for item in required_repository_tup ] + encoded_required_repository_tups.append( encoding_util.encoding_sep.join( required_repository_tup ) ) + encoded_required_repository_str = encoding_util.encoding_sep2.join( encoded_required_repository_tups ) + encoded_required_repository_str = encoding_util.tool_shed_encode( encoded_required_repository_str ) + if suc.is_tool_shed_client( app ): + # Handle secure / insecure Tool Shed URL protocol changes and port changes. + tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( app, tool_shed_url ) + url = common_util.url_join( tool_shed_url, '/repository/get_required_repo_info_dict' ) + # Fix for handling 307 redirect not being handled nicely by urllib2.urlopen when the urllib2.Request has data provided + url = urllib2.urlopen( urllib2.Request( url ) ).geturl() + request = urllib2.Request( url, data=urllib.urlencode( dict( encoded_str=encoded_required_repository_str ) ) ) + response = urllib2.urlopen( request ).read() + if response: + try: + required_repo_info_dict = json.loads( response ) + except Exception, e: + log.exception( e ) + return all_repo_info_dicts + required_repo_info_dicts = [] + for k, v in required_repo_info_dict.items(): + if k == 'repo_info_dicts': + encoded_dict_strings = required_repo_info_dict[ 'repo_info_dicts' ] + for encoded_dict_str in encoded_dict_strings: + decoded_dict = encoding_util.tool_shed_decode( encoded_dict_str ) + required_repo_info_dicts.append( decoded_dict ) + else: + if k not in all_required_repo_info_dict: + all_required_repo_info_dict[ k ] = v + else: + if v and not all_required_repo_info_dict[ k ]: + all_required_repo_info_dict[ k ] = v + if required_repo_info_dicts: + for required_repo_info_dict in required_repo_info_dicts: + # Each required_repo_info_dict has a single entry, and all_repo_info_dicts is a list + # of dictionaries, each of which has a single entry. We'll check keys here rather than + # the entire dictionary because a dictionary entry in all_repo_info_dicts will include + # lists of discovered repository dependencies, but these lists will be empty in the + # required_repo_info_dict since dependency discovery has not yet been performed for these + # dictionaries. + required_repo_info_dict_key = required_repo_info_dict.keys()[ 0 ] + all_repo_info_dicts_keys = [ d.keys()[ 0 ] for d in all_repo_info_dicts ] + if required_repo_info_dict_key not in all_repo_info_dicts_keys: + all_repo_info_dicts.append( required_repo_info_dict ) + all_required_repo_info_dict[ 'all_repo_info_dicts' ] = all_repo_info_dicts + return all_required_repo_info_dict + def get_updated_changeset_revisions_for_repository_dependencies( app, key_rd_dicts ): updated_key_rd_dicts = [] for key_rd_dict in key_rd_dicts: diff -r 404fcf47260db528f04b2a6ce75ede9015f4228b -r 33fdaf7a0dcb7b6149dd5f1e70d2e687d43721a6 lib/tool_shed/util/tool_dependency_util.py --- a/lib/tool_shed/util/tool_dependency_util.py +++ b/lib/tool_shed/util/tool_dependency_util.py @@ -300,31 +300,6 @@ env_sh_file_path = os.path.join( env_sh_file_dir, 'env.sh' ) return env_sh_file_path -def get_runtime_dependent_tool_dependency_tuples( app, tool_dependency, status=None ): - """ - Return the list of tool dependency objects that require the received tool dependency at run time. The returned - list will be filtered by the received status if it is not None. This method is called only from Galaxy. - """ - runtime_dependent_tool_dependency_tups = [] - required_env_shell_file_path = tool_dependency.get_env_shell_file_path( app ) - if required_env_shell_file_path: - required_env_shell_file_path = os.path.abspath( required_env_shell_file_path ) - if required_env_shell_file_path is not None: - for td in app.install_model.context.query( app.install_model.ToolDependency ): - if status is None or td.status == status: - env_shell_file_path = td.get_env_shell_file_path( app ) - if env_shell_file_path is not None: - try: - contents = open( env_shell_file_path, 'r' ).read() - except Exception, e: - contents = None - log.debug( 'Error reading file %s, so cannot determine if package %s requires package %s at run time: %s' % \ - ( str( env_shell_file_path ), str( td.name ), str( tool_dependency.name ), str( e ) ) ) - if contents is not None and contents.find( required_env_shell_file_path ) >= 0: - td_tuple = get_tool_dependency_tuple_for_installed_repository_manager( td ) - runtime_dependent_tool_dependency_tups.append( td_tuple ) - return runtime_dependent_tool_dependency_tups - def get_tool_dependency( trans, id ): """Get a tool_dependency from the database via id""" return trans.install_model.context.query( trans.install_model.ToolDependency ).get( trans.security.decode_id( id ) ) @@ -389,13 +364,6 @@ repository_name, repository_changeset_revision ) ) -def get_tool_dependency_tuple_for_installed_repository_manager( tool_dependency ): - if tool_dependency.type is None: - type = None - else: - type = str( tool_dependency.type ) - return ( tool_dependency.tool_shed_repository_id, str( tool_dependency.name ), str( tool_dependency.version ), type ) - def handle_tool_dependency_installation_error( app, tool_dependency, error_message, remove_installation_path=False ): # Since there was an installation error, remove the installation directory because the install_package method uses # this: "if os.path.exists( install_dir ):". Setting remove_installation_path to True should rarely occur. It is diff -r 404fcf47260db528f04b2a6ce75ede9015f4228b -r 33fdaf7a0dcb7b6149dd5f1e70d2e687d43721a6 templates/admin/tool_shed_repository/deactivate_or_uninstall_repository.mako --- a/templates/admin/tool_shed_repository/deactivate_or_uninstall_repository.mako +++ b/templates/admin/tool_shed_repository/deactivate_or_uninstall_repository.mako @@ -108,12 +108,9 @@ </div><div style="clear: both"></div><br/> - <% - from tool_shed.util.repository_dependency_util import get_repository_tuple_for_installed_repository_manager - from tool_shed.util.tool_dependency_util import get_tool_dependency_tuple_for_installed_repository_manager - + <% irm = trans.app.installed_repository_manager - repository_tup = get_repository_tuple_for_installed_repository_manager( repository ) + repository_tup = irm.get_repository_tuple_for_installed_repository_manager( repository ) # Get installed repositories that this repository requires. installed_dependent_repositories = [] Repository URL: https://bitbucket.org/galaxy/galaxy-central/ -- This is a commit notification from bitbucket.org. You are receiving this because you have the service enabled, addressing the recipient of this email.
participants (1)
-
commits-noreply@bitbucket.org