1 new commit in galaxy-central: https://bitbucket.org/galaxy/galaxy-central/commits/0a428afbc235/ Changeset: 0a428afbc235 User: greg Date: 2014-06-16 22:57:24 Summary: Rename install_manage.IntallManager to be install_manager.InstallToolDependencyManager and add a new class to the same module named InstallRepositoryManager and move appropriate utility functions into it. Add a new repair_repository_manager.RepairRepositoryManager class and move appropriate utility functions into it. Change a log of utility functions that took trans to now take app. Affected #: 18 files diff -r 7f506e778275715639d8f5d37041712be9662b39 -r 0a428afbc23538b6f8d0a8d9dcc29b9a576c1ddc lib/galaxy/webapps/galaxy/api/tool_shed_repositories.py --- a/lib/galaxy/webapps/galaxy/api/tool_shed_repositories.py +++ b/lib/galaxy/webapps/galaxy/api/tool_shed_repositories.py @@ -10,7 +10,8 @@ from galaxy.util import json from galaxy.web.base.controller import BaseAPIController -from tool_shed.galaxy_install import repository_util +from tool_shed.galaxy_install.install_manager import InstallRepositoryManager +from tool_shed.galaxy_install.repair_repository_manager import RepairRepositoryManager from tool_shed.util import common_util from tool_shed.util import encoding_util from tool_shed.util import hg_util @@ -235,7 +236,13 @@ # Get the information about the repository to be installed from the payload. tool_shed_url, name, owner, changeset_revision = self.__parse_repository_from_payload( payload, include_changeset=True ) self.__ensure_can_install_repos( trans ) - installed_tool_shed_repositories = repository_util.install( trans.app, tool_shed_url, name, owner, changeset_revision, payload ) + install_repository_manager = InstallRepositoryManager( trans.app ) + installed_tool_shed_repositories = install_repository_manager.install( trans.app, + tool_shed_url, + name, + owner, + changeset_revision, + payload ) def to_dict( tool_shed_repository ): tool_shed_repository_dict = tool_shed_repository.as_dict( value_mapper=self.__get_value_mapper( trans, tool_shed_repository ) ) @@ -341,8 +348,13 @@ # Get the information about the repository to be installed from the payload. tool_shed_url, name, owner, changeset_revision = self.__parse_repository_from_payload( payload, include_changeset=True ) tool_shed_repositories = [] - tool_shed_repository = suc.get_tool_shed_repository_by_shed_name_owner_changeset_revision( trans.app, tool_shed_url, name, owner, changeset_revision ) - repair_dict = repository_util.get_repair_dict( trans, tool_shed_repository ) + tool_shed_repository = suc.get_tool_shed_repository_by_shed_name_owner_changeset_revision( trans.app, + tool_shed_url, + name, + owner, + changeset_revision ) + rrm = RepairRepositoryManager( trans.app ) + repair_dict = rrm.get_repair_dict( tool_shed_repository ) ordered_tsr_ids = repair_dict.get( 'ordered_tsr_ids', [] ) ordered_repo_info_dicts = repair_dict.get( 'ordered_repo_info_dicts', [] ) if ordered_tsr_ids and ordered_repo_info_dicts: @@ -350,9 +362,8 @@ repository = trans.install_model.context.query( trans.install_model.ToolShedRepository ).get( trans.security.decode_id( tsr_id ) ) repo_info_dict = ordered_repo_info_dicts[ index ] # TODO: handle errors in repair_dict. - repair_dict = repository_util.repair_tool_shed_repository( trans, - repository, - encoding_util.tool_shed_encode( repo_info_dict ) ) + repair_dict = rrm.repair_tool_shed_repository( repository, + encoding_util.tool_shed_encode( repo_info_dict ) ) repository_dict = repository.to_dict( value_mapper=self.__get_value_mapper( trans, repository ) ) repository_dict[ 'url' ] = web.url_for( controller='tool_shed_repositories', action='show', diff -r 7f506e778275715639d8f5d37041712be9662b39 -r 0a428afbc23538b6f8d0a8d9dcc29b9a576c1ddc lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py --- a/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py +++ b/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py @@ -29,6 +29,8 @@ from tool_shed.util import workflow_util from tool_shed.util import xml_util from tool_shed.galaxy_install import repository_util +from tool_shed.galaxy_install import install_manager +from tool_shed.galaxy_install.repair_repository_manager import RepairRepositoryManager import tool_shed.galaxy_install.grids.admin_toolshed_grids as admin_toolshed_grids log = logging.getLogger( __name__ ) @@ -473,12 +475,11 @@ # Get the tool_dependencies.xml file from the repository. tool_dependencies_config = hg_util.get_config_from_disk( rt_util.TOOL_DEPENDENCY_DEFINITION_FILENAME, tool_shed_repository.repo_path( trans.app ) ) - installed_tool_dependencies = \ - common_install_util.install_specified_tool_dependencies( app=trans.app, - tool_shed_repository=tool_shed_repository, - tool_dependencies_config=tool_dependencies_config, - tool_dependencies=tool_dependencies, - from_tool_migration_manager=False ) + itdm = install_manager.InstallToolDependencyManager( trans.app ) + installed_tool_dependencies = itdm.install_specified_tool_dependencies( tool_shed_repository=tool_shed_repository, + tool_dependencies_config=tool_dependencies_config, + tool_dependencies=tool_dependencies, + from_tool_migration_manager=False ) for installed_tool_dependency in installed_tool_dependencies: if installed_tool_dependency.status == trans.app.install_model.ToolDependency.installation_status.ERROR: text = util.unicodify( installed_tool_dependency.error_message ) @@ -587,7 +588,7 @@ updated_metadata = encoding_util.tool_shed_decode( encoded_updated_metadata ) else: updated_metadata = None - repository = repository_util.update_repository_record( trans, + repository = repository_util.update_repository_record( trans.app, repository=repository, updated_metadata_dict=updated_metadata, updated_changeset_revision=updating_to_changeset_revision, @@ -635,17 +636,17 @@ # There must be a one-to-one mapping between items in the 3 lists: tool_shed_repositories, tool_panel_section_keys, repo_info_dicts. tool_panel_section_keys = util.listify( kwd[ 'tool_panel_section_keys' ] ) repo_info_dicts = util.listify( kwd[ 'repo_info_dicts' ] ) + irm = install_manager.InstallRepositoryManager( trans.app ) for index, tool_shed_repository in enumerate( tool_shed_repositories ): repo_info_dict = repo_info_dicts[ index ] tool_panel_section_key = tool_panel_section_keys[ index ] - repository_util.install_tool_shed_repository( trans.app, - tool_shed_repository, - repo_info_dict, - tool_panel_section_key, - shed_tool_conf, - tool_path, - install_tool_dependencies, - reinstalling=reinstalling ) + irm.install_tool_shed_repository( tool_shed_repository, + repo_info_dict, + tool_panel_section_key, + shed_tool_conf, + tool_path, + install_tool_dependencies, + reinstalling=reinstalling ) tsr_ids_for_monitoring = [ trans.security.encode_id( tsr.id ) for tsr in tool_shed_repositories ] return trans.response.send_redirect( web.url_for( controller='admin_toolshed', action='monitor_repository_installation', @@ -688,6 +689,7 @@ kwd[ 'message' ] = 'All selected tool shed repositories are already uninstalled.' kwd[ 'status' ] = 'error' elif operation == "install": + irm = install_manager.InstallRepositoryManager( trans.app ) reinstalling = util.string_as_bool( kwd.get( 'reinstalling', False ) ) encoded_kwd = kwd[ 'encoded_kwd' ] decoded_kwd = encoding_util.tool_shed_decode( encoded_kwd ) @@ -701,10 +703,9 @@ # dependent repository, so we'll order the list of tsr_ids to ensure all repositories install in the # required order. ordered_tsr_ids, ordered_repo_info_dicts, ordered_tool_panel_section_keys = \ - repository_util.order_components_for_installation( trans.app, - tsr_ids, - repo_info_dicts, - tool_panel_section_keys=tool_panel_section_keys ) + irm.order_components_for_installation( tsr_ids, + repo_info_dicts, + tool_panel_section_keys=tool_panel_section_keys ) for tsr_id in ordered_tsr_ids: repository = trans.install_model.context.query( trans.install_model.ToolShedRepository ) \ .get( trans.security.decode_id( tsr_id ) ) @@ -712,10 +713,10 @@ trans.install_model.ToolShedRepository.installation_status.UNINSTALLED ]: repositories_for_installation.append( repository ) repo_info_dict, tool_panel_section_key = \ - repository_util.get_repository_components_for_installation( tsr_id, - ordered_tsr_ids, - ordered_repo_info_dicts, - ordered_tool_panel_section_keys ) + irm.get_repository_components_for_installation( tsr_id, + ordered_tsr_ids, + ordered_repo_info_dicts, + ordered_tool_panel_section_keys ) filtered_repo_info_dicts.append( repo_info_dict ) filtered_tool_panel_section_keys.append( tool_panel_section_key ) if repositories_for_installation: @@ -1050,6 +1051,7 @@ includes_tool_dependencies = util.string_as_bool( repo_information_dict.get( 'includes_tool_dependencies', False ) ) encoded_repo_info_dicts = util.listify( repo_information_dict.get( 'repo_info_dicts', [] ) ) repo_info_dicts = [ encoding_util.tool_shed_decode( encoded_repo_info_dict ) for encoded_repo_info_dict in encoded_repo_info_dicts ] + irm = install_manager.InstallRepositoryManager( trans.app ) if ( not includes_tools_for_display_in_tool_panel and kwd.get( 'select_shed_tool_panel_config_button', False ) ) or \ ( includes_tools_for_display_in_tool_panel and kwd.get( 'select_tool_panel_section_button', False ) ): if updating: @@ -1060,7 +1062,7 @@ decoded_updated_metadata = encoding_util.tool_shed_decode( encoded_updated_metadata ) # Now that the user has decided whether they will handle dependencies, we can update # the repository to the latest revision. - repository = repository_util.update_repository_record( trans, + repository = repository_util.update_repository_record( trans.app, repository=repository, updated_metadata_dict=decoded_updated_metadata, updated_changeset_revision=updating_to_changeset_revision, @@ -1098,7 +1100,7 @@ tool_path=tool_path, tool_shed_url=tool_shed_url ) encoded_kwd, query, tool_shed_repositories, encoded_repository_ids = \ - repository_util.initiate_repository_installation( trans.app, installation_dict ) + irm.initiate_repository_installation( installation_dict ) return trans.fill_template( 'admin/tool_shed_repository/initiate_repository_installation.mako', encoded_kwd=encoded_kwd, query=query, @@ -1139,7 +1141,7 @@ missing_tool_dependencies = dependencies_for_repository_dict.get( 'missing_tool_dependencies', None ) name = dependencies_for_repository_dict.get( 'name', None ) repository_owner = dependencies_for_repository_dict.get( 'repository_owner', None ) - readme_files_dict = readme_util.get_readme_files_dict_for_display( trans, tool_shed_url, repo_info_dict ) + readme_files_dict = readme_util.get_readme_files_dict_for_display( trans.app, tool_shed_url, repo_info_dict ) # We're handling 1 of 3 scenarios here: (1) we're installing a tool shed repository for the first time, so we've # retrieved the list of installed and missing repository dependencies from the database (2) we're handling the # scenario where an error occurred during the installation process, so we have a tool_shed_repository record in @@ -1150,15 +1152,14 @@ # defined repository (and possibly tool) dependencies. In this case, merging will result in newly defined # dependencies to be lost. We pass the updating parameter to make sure merging occurs only when appropriate. containers_dict = \ - repository_util.populate_containers_dict_for_new_install( trans=trans, - tool_shed_url=tool_shed_url, - tool_path=tool_path, - readme_files_dict=readme_files_dict, - installed_repository_dependencies=installed_repository_dependencies, - missing_repository_dependencies=missing_repository_dependencies, - installed_tool_dependencies=installed_tool_dependencies, - missing_tool_dependencies=missing_tool_dependencies, - updating=updating ) + irm.populate_containers_dict_for_new_install( tool_shed_url=tool_shed_url, + tool_path=tool_path, + readme_files_dict=readme_files_dict, + installed_repository_dependencies=installed_repository_dependencies, + missing_repository_dependencies=missing_repository_dependencies, + installed_tool_dependencies=installed_tool_dependencies, + missing_tool_dependencies=missing_tool_dependencies, + updating=updating ) else: # We're installing a list of repositories, each of which may have tool dependencies or repository dependencies. containers_dicts = [] @@ -1185,18 +1186,17 @@ name = dependencies_for_repository_dict.get( 'name', None ) repository_owner = dependencies_for_repository_dict.get( 'repository_owner', None ) containers_dict = \ - repository_util.populate_containers_dict_for_new_install( trans=trans, - tool_shed_url=tool_shed_url, - tool_path=tool_path, - readme_files_dict=None, - installed_repository_dependencies=installed_repository_dependencies, - missing_repository_dependencies=missing_repository_dependencies, - installed_tool_dependencies=installed_tool_dependencies, - missing_tool_dependencies=missing_tool_dependencies, - updating=updating ) + irm.populate_containers_dict_for_new_install( tool_shed_url=tool_shed_url, + tool_path=tool_path, + readme_files_dict=None, + installed_repository_dependencies=installed_repository_dependencies, + missing_repository_dependencies=missing_repository_dependencies, + installed_tool_dependencies=installed_tool_dependencies, + missing_tool_dependencies=missing_tool_dependencies, + updating=updating ) containers_dicts.append( containers_dict ) # Merge all containers into a single container. - containers_dict = repository_util.merge_containers_dicts_for_new_install( containers_dicts ) + containers_dict = irm.merge_containers_dicts_for_new_install( containers_dicts ) # Handle tool dependencies check box. if trans.app.config.tool_dependency_dir is None: if includes_tool_dependencies: @@ -1271,7 +1271,8 @@ repository = suc.get_installed_tool_shed_repository( trans.app, repository_id ) if repository: if kwd.get( 'purge_repository', False ): - purge_status, purge_message = repository_util.purge_repository( trans.app, repository ) + irm = install_manager.InstallRepositoryManager( trans.app ) + purge_status, purge_message = irm.purge_repository( trans.app, repository ) if purge_status == 'ok': new_kwd[ 'status' ] = "done" else: @@ -1358,7 +1359,7 @@ # Entering this else block occurs only if the tool_shed_repository does not include any valid tools. if install_repository_dependencies: repository_dependencies = \ - repository_dependency_util.get_repository_dependencies_for_installed_tool_shed_repository( trans, + repository_dependency_util.get_repository_dependencies_for_installed_tool_shed_repository( trans.app, tool_shed_repository ) else: repository_dependencies = None @@ -1366,7 +1367,7 @@ tool_dependencies = metadata.get( 'tool_dependencies', None ) else: tool_dependencies = None - repo_info_dict = repository_util.create_repo_info_dict( trans=trans, + repo_info_dict = repository_util.create_repo_info_dict( app=trans.app, repository_clone_url=repository_clone_url, changeset_revision=tool_shed_repository.changeset_revision, ctx_rev=ctx_rev, @@ -1443,6 +1444,7 @@ message=message, status=status ) ) tool_shed_repository = suc.get_installed_tool_shed_repository( trans.app, repository_id ) + rrm = RepairRepositoryManager( trans.app ) if kwd.get( 'repair_repository_button', False ): encoded_repair_dict = kwd.get( 'repair_dict', None ) if encoded_repair_dict: @@ -1450,7 +1452,7 @@ else: repair_dict = None if not repair_dict: - repair_dict = repository_util.get_repair_dict( trans, tool_shed_repository ) + repair_dict = rrm.get_repair_dict( tool_shed_repository ) ordered_tsr_ids = repair_dict.get( 'ordered_tsr_ids', [] ) ordered_repo_info_dicts = repair_dict.get( 'ordered_repo_info_dicts', [] ) if ordered_tsr_ids and ordered_repo_info_dicts: @@ -1458,9 +1460,9 @@ for tsr_id in ordered_tsr_ids: repository = trans.install_model.context.query( trans.install_model.ToolShedRepository ).get( trans.security.decode_id( tsr_id ) ) repositories_for_repair.append( repository ) - return self.repair_tool_shed_repositories( trans, repositories_for_repair, ordered_repo_info_dicts ) + return self.repair_tool_shed_repositories( trans, rrm, repositories_for_repair, ordered_repo_info_dicts ) tool_shed_repository = suc.get_installed_tool_shed_repository( trans.app, repository_id ) - repair_dict = repository_util.get_repair_dict( trans, tool_shed_repository ) + repair_dict = rrm.get_repair_dict( tool_shed_repository ) encoded_repair_dict = encoding_util.tool_shed_encode( repair_dict ) ordered_tsr_ids = repair_dict.get( 'ordered_tsr_ids', [] ) ordered_repo_info_dicts = repair_dict.get( 'ordered_repo_info_dicts', [] ) @@ -1473,14 +1475,13 @@ @web.expose @web.require_admin - def repair_tool_shed_repositories( self, trans, tool_shed_repositories, repo_info_dicts, **kwd ): + def repair_tool_shed_repositories( self, trans, repair_repository_manager, tool_shed_repositories, repo_info_dicts, **kwd ): """Repair specified tool shed repositories.""" # The received lists of tool_shed_repositories and repo_info_dicts are ordered. for index, tool_shed_repository in enumerate( tool_shed_repositories ): repo_info_dict = repo_info_dicts[ index ] - repair_dict = repository_util.repair_tool_shed_repository( trans, - tool_shed_repository, - encoding_util.tool_shed_encode( repo_info_dict ) ) + repair_dict = repair_repository_manager.repair_tool_shed_repository( tool_shed_repository, + encoding_util.tool_shed_encode( repo_info_dict ) ) tsr_ids_for_monitoring = [ trans.security.encode_id( tsr.id ) for tsr in tool_shed_repositories ] return trans.response.send_redirect( web.url_for( controller='admin_toolshed', action='monitor_repository_installation', @@ -1578,9 +1579,9 @@ readme_files_dict = json.from_json_string( raw_text ) tool_dependencies = metadata.get( 'tool_dependencies', None ) repository_dependencies = \ - repository_dependency_util.get_repository_dependencies_for_installed_tool_shed_repository( trans, + repository_dependency_util.get_repository_dependencies_for_installed_tool_shed_repository( trans.app, tool_shed_repository ) - repo_info_dict = repository_util.create_repo_info_dict( trans=trans, + repo_info_dict = repository_util.create_repo_info_dict( app=trans.app, repository_clone_url=repository_clone_url, changeset_revision=tool_shed_repository.installed_changeset_revision, ctx_rev=tool_shed_repository.ctx_rev, @@ -1643,19 +1644,19 @@ original_section_name = '' tool_panel_section_select_field = None shed_tool_conf_select_field = tool_util.build_shed_tool_conf_select_field( trans ) + irm = install_manager.InstallRepositoryManager( trans.app ) containers_dict = \ - repository_util.populate_containers_dict_for_new_install( trans=trans, - tool_shed_url=tool_shed_url, - tool_path=tool_path, - readme_files_dict=readme_files_dict, - installed_repository_dependencies=installed_repository_dependencies, - missing_repository_dependencies=missing_repository_dependencies, - installed_tool_dependencies=installed_tool_dependencies, - missing_tool_dependencies=missing_tool_dependencies, - updating=False ) + irm.populate_containers_dict_for_new_install( tool_shed_url=tool_shed_url, + tool_path=tool_path, + readme_files_dict=readme_files_dict, + installed_repository_dependencies=installed_repository_dependencies, + missing_repository_dependencies=missing_repository_dependencies, + installed_tool_dependencies=installed_tool_dependencies, + missing_tool_dependencies=missing_tool_dependencies, + updating=False ) # Since we're reinstalling we'll merge the list of missing repository dependencies into the list of installed repository dependencies since each displayed # repository dependency will display a status, whether installed or missing. - containers_dict = repository_dependency_util.merge_missing_repository_dependencies_to_installed_container( containers_dict ) + containers_dict = irm.merge_missing_repository_dependencies_to_installed_container( containers_dict ) # Handle repository dependencies check box. install_repository_dependencies_check_box = CheckboxField( 'install_repository_dependencies', checked=True ) # Handle tool dependencies check box. @@ -1748,13 +1749,13 @@ """An error occurred while cloning the repository, so reset everything necessary to enable another attempt.""" repository = suc.get_installed_tool_shed_repository( trans.app, kwd[ 'id' ] ) if kwd.get( 'reset_repository', False ): - repository_util.set_repository_attributes( trans.app, - repository, - status=trans.install_model.ToolShedRepository.installation_status.NEW, - error_message=None, - deleted=False, - uninstalled=False, - remove_from_disk=True ) + suc.set_repository_attributes( trans.app, + repository, + status=trans.install_model.ToolShedRepository.installation_status.NEW, + error_message=None, + deleted=False, + uninstalled=False, + remove_from_disk=True ) new_kwd = {} new_kwd[ 'message' ] = "You can now attempt to install the repository named <b>%s</b> again." % str( repository.name ) new_kwd[ 'status' ] = "done" @@ -1908,7 +1909,7 @@ repo_path=repo_files_dir, create=False ) repository_clone_url = os.path.join( tool_shed_url, 'repos', owner, name ) - repository_util.pull_repository( repo, repository_clone_url, latest_ctx_rev ) + hg_util.pull_repository( repo, repository_clone_url, latest_ctx_rev ) hg_util.update_repository( repo, latest_ctx_rev ) # Remove old Data Manager entries if repository.includes_data_managers: @@ -2023,7 +2024,7 @@ return self.install_tool_dependencies_with_update( trans, **new_kwd ) # Updates received did not include any newly defined repository dependencies or newly defined # tool dependencies that need to be installed. - repository = repository_util.update_repository_record( trans, + repository = repository_util.update_repository_record( trans.app, repository=repository, updated_metadata_dict=metadata_dict, updated_changeset_revision=latest_changeset_revision, diff -r 7f506e778275715639d8f5d37041712be9662b39 -r 0a428afbc23538b6f8d0a8d9dcc29b9a576c1ddc lib/galaxy/webapps/tool_shed/controllers/repository.py --- a/lib/galaxy/webapps/tool_shed/controllers/repository.py +++ b/lib/galaxy/webapps/tool_shed/controllers/repository.py @@ -723,7 +723,10 @@ # Update repository files for browsing. hg_util.update_repository( repo ) changeset_revision = repository.tip( trans.app ) - metadata = metadata_util.get_repository_metadata_by_repository_id_changeset_revision( trans, id, changeset_revision, metadata_only=True ) + metadata = metadata_util.get_repository_metadata_by_repository_id_changeset_revision( trans.app, + id, + changeset_revision, + metadata_only=True ) repository_type_select_field = rt_util.build_repository_type_select_field( trans, repository=repository ) return trans.fill_template( '/webapps/tool_shed/repository/browse_repository.mako', repository=repository, @@ -970,7 +973,7 @@ message = kwd.get( 'message', '' ) status = kwd.get( 'status', 'done' ) repository = suc.get_repository_in_tool_shed( trans.app, id ) - metadata = metadata_util.get_repository_metadata_by_repository_id_changeset_revision( trans, + metadata = metadata_util.get_repository_metadata_by_repository_id_changeset_revision( trans.app, id, repository.tip( trans.app ), metadata_only=True ) @@ -1106,7 +1109,7 @@ if message: status = 'error' tool_state = tool_util.new_state( trans, tool, invalid=False ) - metadata = metadata_util.get_repository_metadata_by_repository_id_changeset_revision( trans, + metadata = metadata_util.get_repository_metadata_by_repository_id_changeset_revision( trans.app, repository_id, changeset_revision, metadata_only=True ) @@ -1195,7 +1198,7 @@ metadata = repository_metadata.metadata # Get a dictionary of all repositories upon which the contents of the current repository_metadata record depend. repository_dependencies = \ - repository_dependency_util.get_repository_dependencies_for_changeset_revision( trans=trans, + repository_dependency_util.get_repository_dependencies_for_changeset_revision( app=trans.app, repository=repository, repository_metadata=repository_metadata, toolshed_base_url=str( web.url_for( '/', qualified=True ) ).rstrip( '/' ), @@ -1206,7 +1209,7 @@ # Only display repository dependencies if they exist. exclude = [ 'datatypes', 'invalid_repository_dependencies', 'invalid_tool_dependencies', 'invalid_tools', 'readme_files', 'tool_dependencies', 'tools', 'tool_test_results', 'workflows', 'data_manager' ] - containers_dict = container_util.build_repository_containers_for_tool_shed( trans, + containers_dict = container_util.build_repository_containers_for_tool_shed( trans.app, repository, changeset_revision, repository_dependencies, @@ -1691,7 +1694,7 @@ if repository_metadata: metadata = repository_metadata.metadata if metadata: - return readme_util.build_readme_files_dict( trans, + return readme_util.build_readme_files_dict( trans.app, repository, changeset_revision, repository_metadata.metadata ) @@ -1720,7 +1723,7 @@ metadata = repository_metadata.metadata if metadata: repository_dependencies = \ - repository_dependency_util.get_repository_dependencies_for_changeset_revision( trans=trans, + repository_dependency_util.get_repository_dependencies_for_changeset_revision( app=trans.app, repository=repository, repository_metadata=repository_metadata, toolshed_base_url=str( web.url_for( '/', qualified=True ) ).rstrip( '/' ), @@ -1889,7 +1892,7 @@ after_changeset_revision=changeset_revision ) repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans.app, repository_id, changeset_revision ) ctx = hg_util.get_changectx_for_changeset( repo, changeset_revision ) - repo_info_dict = repository_util.create_repo_info_dict( trans=trans, + repo_info_dict = repository_util.create_repo_info_dict( app=trans.app, repository_clone_url=repository_clone_url, changeset_revision=changeset_revision, ctx_rev=str( ctx.rev() ), @@ -1921,7 +1924,7 @@ break if 'workflows' in metadata: includes_workflows = True - readme_files_dict = readme_util.build_readme_files_dict( trans, repository, changeset_revision, metadata ) + readme_files_dict = readme_util.build_readme_files_dict( trans.app, repository, changeset_revision, metadata ) # See if the repo_info_dict was populated with repository_dependencies or tool_dependencies. has_repository_dependencies = False has_repository_dependencies_only_if_compiling_contained_td = False @@ -2349,7 +2352,7 @@ metadata = repository_metadata.metadata # Get a dictionary of all repositories upon which the contents of the current repository_metadata record depend. repository_dependencies = \ - repository_dependency_util.get_repository_dependencies_for_changeset_revision( trans=trans, + repository_dependency_util.get_repository_dependencies_for_changeset_revision( app=trans.app, repository=repository, repository_metadata=repository_metadata, toolshed_base_url=str( web.url_for( '/', qualified=True ) ).rstrip( '/' ), @@ -2389,7 +2392,7 @@ skip_tool_tests_check_box = CheckboxField( 'skip_tool_tests', checked=skip_tool_tests_checked ) categories = suc.get_categories( trans.app ) selected_categories = [ rca.category_id for rca in repository.categories ] - containers_dict = container_util.build_repository_containers_for_tool_shed( trans, + containers_dict = container_util.build_repository_containers_for_tool_shed( trans.app, repository, changeset_revision, repository_dependencies, @@ -2547,7 +2550,7 @@ # Get a dictionary of all repositories upon which the contents of the current repository_metadata record depend. toolshed_base_url = str( web.url_for( '/', qualified=True ) ).rstrip( '/' ) repository_dependencies = \ - repository_dependency_util.get_repository_dependencies_for_changeset_revision( trans=trans, + repository_dependency_util.get_repository_dependencies_for_changeset_revision( app=trans.app, repository=repository, repository_metadata=repository_metadata, toolshed_base_url=toolshed_base_url, @@ -2581,7 +2584,11 @@ selected_value=changeset_revision, add_id_to_name=False, downloadable=False ) - containers_dict = container_util.build_repository_containers_for_tool_shed( trans, repository, changeset_revision, repository_dependencies, repository_metadata ) + containers_dict = container_util.build_repository_containers_for_tool_shed( trans.app, + repository, + changeset_revision, + repository_dependencies, + repository_metadata ) return trans.fill_template( '/webapps/tool_shed/repository/preview_tools_in_changeset.mako', repository=repository, containers_dict=containers_dict, @@ -2661,7 +2668,7 @@ avg_rating, num_ratings = self.get_ave_item_rating_data( trans.sa_session, repository, webapp_model=trans.model ) display_reviews = util.string_as_bool( kwd.get( 'display_reviews', False ) ) rra = self.get_user_item_rating( trans.sa_session, trans.user, repository, webapp_model=trans.model ) - metadata = metadata_util.get_repository_metadata_by_repository_id_changeset_revision( trans, + metadata = metadata_util.get_repository_metadata_by_repository_id_changeset_revision( trans.app, id, changeset_revision, metadata_only=True ) @@ -2770,7 +2777,7 @@ status = "error" repository_type_select_field = rt_util.build_repository_type_select_field( trans, repository=repository ) changeset_revision = repository.tip( trans.app ) - metadata = metadata_util.get_repository_metadata_by_repository_id_changeset_revision( trans, + metadata = metadata_util.get_repository_metadata_by_repository_id_changeset_revision( trans.app, id, changeset_revision, metadata_only=True ) @@ -2941,7 +2948,7 @@ repository = None if repository: repository_id = trans.security.encode_id( repository.id ) - repository_metadata = metadata_util.get_repository_metadata_by_repository_id_changeset_revision( trans, + repository_metadata = metadata_util.get_repository_metadata_by_repository_id_changeset_revision( trans.app, repository_id, changeset_revision ) if not repository_metadata: @@ -2950,7 +2957,7 @@ upper_bound_changeset_revision = suc.get_next_downloadable_changeset_revision( repository, repo, changeset_revision ) if upper_bound_changeset_revision: changeset_revision = upper_bound_changeset_revision - repository_metadata = metadata_util.get_repository_metadata_by_repository_id_changeset_revision( trans, + repository_metadata = metadata_util.get_repository_metadata_by_repository_id_changeset_revision( trans.app, repository_id, changeset_revision ) if repository_metadata: @@ -3098,7 +3105,7 @@ 'has_metadata' : has_metadata } # Make sure we'll view latest changeset first. changesets.insert( 0, change_dict ) - metadata = metadata_util.get_repository_metadata_by_repository_id_changeset_revision( trans, + metadata = metadata_util.get_repository_metadata_by_repository_id_changeset_revision( trans.app, id, repository.tip( trans.app ), metadata_only=True ) @@ -3142,7 +3149,10 @@ diffs.append( basic_util.to_html_string( diff ) ) modified, added, removed, deleted, unknown, ignored, clean = repo.status( node1=ctx_parent.node(), node2=ctx.node() ) anchors = modified + added + removed + deleted + unknown + ignored + clean - metadata = metadata_util.get_repository_metadata_by_repository_id_changeset_revision( trans, id, ctx_str, metadata_only=True ) + metadata = metadata_util.get_repository_metadata_by_repository_id_changeset_revision( trans.app, + id, + ctx_str, + metadata_only=True ) # For rendering the prev button. if ctx_parent: ctx_parent_date = hg_util.get_readable_ctx_date( ctx_parent ) @@ -3243,7 +3253,7 @@ metadata = repository_metadata.metadata # Get a dictionary of all repositories upon which the contents of the current repository_metadata record depend. repository_dependencies = \ - repository_dependency_util.get_repository_dependencies_for_changeset_revision( trans=trans, + repository_dependency_util.get_repository_dependencies_for_changeset_revision( app=trans.app, repository=repository, repository_metadata=repository_metadata, toolshed_base_url=str( web.url_for( '/', qualified=True ) ).rstrip( '/' ), @@ -3265,7 +3275,7 @@ else: message += malicious_error status = 'error' - containers_dict = container_util.build_repository_containers_for_tool_shed( trans, + containers_dict = container_util.build_repository_containers_for_tool_shed( trans.app, repository, changeset_revision, repository_dependencies, diff -r 7f506e778275715639d8f5d37041712be9662b39 -r 0a428afbc23538b6f8d0a8d9dcc29b9a576c1ddc lib/tool_shed/galaxy_install/install_manager.py --- a/lib/tool_shed/galaxy_install/install_manager.py +++ b/lib/tool_shed/galaxy_install/install_manager.py @@ -1,8 +1,12 @@ +import json import logging import os import sys +import tempfile +import threading import traceback +from galaxy import exceptions from galaxy import eggs eggs.require( 'paramiko' ) @@ -11,8 +15,19 @@ from fabric.api import lcd +from galaxy.model.orm import or_ + +from tool_shed.util import basic_util +from tool_shed.util import common_util +from tool_shed.util import container_util +from tool_shed.util import encoding_util +from tool_shed.util import hg_util +from tool_shed.util import shed_util_common as suc from tool_shed.util import tool_dependency_util +from tool_shed.util import tool_util +from tool_shed.util import xml_util +from tool_shed.galaxy_install import repository_util from tool_shed.galaxy_install.tool_dependencies.recipe.env_file_builder import EnvFileBuilder from tool_shed.galaxy_install.tool_dependencies.recipe.install_environment import InstallEnvironment from tool_shed.galaxy_install.tool_dependencies.recipe.recipe_manager import StepManager @@ -20,22 +35,24 @@ log = logging.getLogger( __name__ ) -INSTALL_ACTIONS = [ 'download_binary', 'download_by_url', 'download_file', 'setup_perl_environmnet', - 'setup_r_environmnet', 'setup_ruby_environmnet', 'shell_command' ] - -class InstallManager( object ): +class InstallToolDependencyManager( object ): + + def __init__( self, app ): + self.app = app + self.INSTALL_ACTIONS = [ 'download_binary', 'download_by_url', 'download_file', 'setup_perl_environmnet', + 'setup_r_environmnet', 'setup_ruby_environmnet', 'shell_command' ] def format_traceback( self ): ex_type, ex, tb = sys.exc_info() return ''.join( traceback.format_tb( tb ) ) - def get_tool_shed_repository_install_dir( self, app, tool_shed_repository ): - return os.path.abspath( tool_shed_repository.repo_files_directory( app ) ) + def get_tool_shed_repository_install_dir( self, tool_shed_repository ): + return os.path.abspath( tool_shed_repository.repo_files_directory( self.app ) ) - def install_and_build_package( self, app, tool_shed_repository, tool_dependency, actions_dict ): + def install_and_build_package( self, tool_shed_repository, tool_dependency, actions_dict ): """Install a Galaxy tool dependency package either via a url or a mercurial or git clone command.""" - tool_shed_repository_install_dir = self.get_tool_shed_repository_install_dir( app, tool_shed_repository ) + tool_shed_repository_install_dir = self.get_tool_shed_repository_install_dir( tool_shed_repository ) install_dir = actions_dict[ 'install_dir' ] package_name = actions_dict[ 'package_name' ] actions = actions_dict.get( 'actions', None ) @@ -53,13 +70,13 @@ # the filtered actions being used in the next stage below. The installation directory (i.e., dir) # is also defined in this stage and is used in the next stage below when defining current_dir. action_type, action_dict = actions[ 0 ] - if action_type in INSTALL_ACTIONS: + if action_type in self.INSTALL_ACTIONS: # Some of the parameters passed here are needed only by a subset of the step handler classes, # but to allow for a standard method signature we'll pass them along. We don't check the # tool_dependency status in this stage because it should not have been changed based on a # download. tool_dependency, filtered_actions, dir = \ - step_manager.execute_step( app=app, + step_manager.execute_step( app=self.app, tool_dependency=tool_dependency, package_name=package_name, actions=actions, @@ -85,7 +102,7 @@ with lcd( current_dir ): action_type, action_dict = action_tup tool_dependency, tmp_filtered_actions, tmp_dir = \ - step_manager.execute_step( app=app, + step_manager.execute_step( app=self.app, tool_dependency=tool_dependency, package_name=package_name, actions=actions, @@ -97,7 +114,7 @@ work_dir=work_dir, current_dir=current_dir, initial_download=False ) - if tool_dependency.status in [ app.install_model.ToolDependency.installation_status.ERROR ]: + if tool_dependency.status in [ self.app.install_model.ToolDependency.installation_status.ERROR ]: # If the tool_dependency status is in an error state, return it with no additional # processing. return tool_dependency @@ -108,31 +125,115 @@ dir = tmp_dir return tool_dependency - def install_and_build_package_via_fabric( self, app, tool_shed_repository, tool_dependency, actions_dict ): - sa_session = app.install_model.context + def install_and_build_package_via_fabric( self, tool_shed_repository, tool_dependency, actions_dict ): + sa_session = self.app.install_model.context try: # There is currently only one fabric method. - tool_dependency = self.install_and_build_package( app, tool_shed_repository, tool_dependency, actions_dict ) + tool_dependency = self.install_and_build_package( tool_shed_repository, tool_dependency, actions_dict ) except Exception, e: log.exception( 'Error installing tool dependency %s version %s.', str( tool_dependency.name ), str( tool_dependency.version ) ) # Since there was an installation error, update the tool dependency status to Error. The remove_installation_path option must # be left False here. error_message = '%s\n%s' % ( self.format_traceback(), str( e ) ) - tool_dependency = tool_dependency_util.handle_tool_dependency_installation_error( app, + tool_dependency = tool_dependency_util.handle_tool_dependency_installation_error( self.app, tool_dependency, error_message, remove_installation_path=False ) - tool_dependency = tool_dependency_util.mark_tool_dependency_installed( app, tool_dependency ) + tool_dependency = self.mark_tool_dependency_installed( tool_dependency ) return tool_dependency - def install_via_fabric( self, app, tool_shed_repository, tool_dependency, install_dir, package_name=None, custom_fabfile_path=None, + def install_specified_tool_dependencies( self, tool_shed_repository, tool_dependencies_config, tool_dependencies, + from_tool_migration_manager=False ): + """ + Follow the recipe in the received tool_dependencies_config to install specified packages for + repository tools. The received list of tool_dependencies are the database records for those + dependencies defined in the tool_dependencies_config that are to be installed. This list may + be a subset of the set of dependencies defined in the tool_dependencies_config. This allows + for filtering out dependencies that have not been checked for installation on the 'Manage tool + dependencies' page for an installed Tool Shed repository. + """ + attr_tups_of_dependencies_for_install = [ ( td.name, td.version, td.type ) for td in tool_dependencies ] + installed_packages = [] + tag_manager = TagManager() + # Parse the tool_dependencies.xml config. + tree, error_message = xml_util.parse_xml( tool_dependencies_config ) + if tree is None: + log.debug( "The received tool_dependencies.xml file is likely invalid: %s" % str( error_message ) ) + return installed_packages + root = tree.getroot() + elems = [] + for elem in root: + if elem.tag == 'set_environment': + version = elem.get( 'version', '1.0' ) + if version != '1.0': + raise Exception( 'The <set_environment> tag must have a version attribute with value 1.0' ) + for sub_elem in elem: + elems.append( sub_elem ) + else: + elems.append( elem ) + for elem in elems: + name = elem.get( 'name', None ) + version = elem.get( 'version', None ) + type = elem.get( 'type', None ) + if type is None: + if elem.tag in [ 'environment_variable', 'set_environment' ]: + type = 'set_environment' + else: + type = 'package' + if ( name and type == 'set_environment' ) or ( name and version ): + # elem is a package set_environment tag set. + attr_tup = ( name, version, type ) + try: + index = attr_tups_of_dependencies_for_install.index( attr_tup ) + except Exception, e: + index = None + if index is not None: + tool_dependency = tool_dependencies[ index ] + # If the tool_dependency.type is 'set_environment', then the call to process_tag_set() will + # handle everything - no additional installation is necessary. + tool_dependency, proceed_with_install, action_elem_tuples = \ + tag_manager.process_tag_set( self.app, + tool_shed_repository, + tool_dependency, + elem, + name, + version, + from_tool_migration_manager=from_tool_migration_manager, + tool_dependency_db_records=tool_dependencies ) + if ( tool_dependency.type == 'package' and proceed_with_install ): + try: + tool_dependency = self.install_package( elem, + tool_shed_repository, + tool_dependencies=tool_dependencies, + from_tool_migration_manager=from_tool_migration_manager ) + except Exception, e: + error_message = "Error installing tool dependency %s version %s: %s" % \ + ( str( name ), str( version ), str( e ) ) + log.exception( error_message ) + if tool_dependency: + # Since there was an installation error, update the tool dependency status to Error. The + # remove_installation_path option must be left False here. + tool_dependency = \ + tool_dependency_util.handle_tool_dependency_installation_error( self.app, + tool_dependency, + error_message, + remove_installation_path=False ) + if tool_dependency and tool_dependency.status in [ self.app.install_model.ToolDependency.installation_status.INSTALLED, + self.app.install_model.ToolDependency.installation_status.ERROR ]: + installed_packages.append( tool_dependency ) + if self.app.config.manage_dependency_relationships: + # Add the tool_dependency to the in-memory dictionaries in the installed_repository_manager. + self.app.installed_repository_manager.handle_tool_dependency_install( tool_shed_repository, tool_dependency ) + return installed_packages + + def install_via_fabric( self, tool_shed_repository, tool_dependency, install_dir, package_name=None, custom_fabfile_path=None, actions_elem=None, action_elem=None, **kwd ): """ Parse a tool_dependency.xml file's <actions> tag set to gather information for installation using self.install_and_build_package(). The use of fabric is being eliminated, so some of these functions may need to be renamed at some point. """ - sa_session = app.install_model.context + sa_session = self.app.install_model.context if not os.path.exists( install_dir ): os.makedirs( install_dir ) actions_dict = dict( install_dir=install_dir ) @@ -150,7 +251,7 @@ else: elems = [] step_manager = StepManager() - tool_shed_repository_install_dir = self.get_tool_shed_repository_install_dir( app, tool_shed_repository ) + tool_shed_repository_install_dir = self.get_tool_shed_repository_install_dir( tool_shed_repository ) install_environment = InstallEnvironment( tool_shed_repository_install_dir, install_dir ) for action_elem in elems: # Make sure to skip all comments, since they are now included in the XML tree. @@ -159,7 +260,7 @@ action_dict = {} action_type = action_elem.get( 'type', None ) if action_type is not None: - action_dict = step_manager.prepare_step( app=app, + action_dict = step_manager.prepare_step( app=self.app, tool_dependency=tool_dependency, action_type=action_type, action_elem=action_elem, @@ -178,10 +279,10 @@ # TODO: this is not yet supported or functional, but when it is handle it using the fabric api. raise Exception( 'Tool dependency installation using proprietary fabric scripts is not yet supported.' ) else: - tool_dependency = self.install_and_build_package_via_fabric( app, tool_shed_repository, tool_dependency, actions_dict ) + tool_dependency = self.install_and_build_package_via_fabric( tool_shed_repository, tool_dependency, actions_dict ) return tool_dependency - def install_package( self, app, elem, tool_shed_repository, tool_dependencies=None, from_tool_migration_manager=False ): + def install_package( self, elem, tool_shed_repository, tool_dependencies=None, from_tool_migration_manager=False ): """ Install a tool dependency package defined by the XML element elem. The value of tool_dependencies is a partial or full list of ToolDependency records associated with the tool_shed_repository. @@ -199,7 +300,7 @@ if tool_dependency is not None: for package_elem in elem: tool_dependency, proceed_with_install, actions_elem_tuples = \ - tag_manager.process_tag_set( app, + tag_manager.process_tag_set( self.app, tool_shed_repository, tool_dependency, package_elem, @@ -211,7 +312,7 @@ # Get the installation directory for tool dependencies that will be installed for the received # tool_shed_repository. install_dir = \ - tool_dependency_util.get_tool_dependency_install_dir( app=app, + tool_dependency_util.get_tool_dependency_install_dir( app=self.app, repository_name=tool_shed_repository.name, repository_owner=tool_shed_repository.owner, repository_changeset_revision=tool_shed_repository.installed_changeset_revision, @@ -244,14 +345,13 @@ if binary_installed: continue # No platform-specific <actions> recipe has yet resulted in a successful installation. - tool_dependency = self.install_via_fabric( app, - tool_shed_repository, + tool_dependency = self.install_via_fabric( tool_shed_repository, tool_dependency, install_dir, package_name=package_name, actions_elem=actions_elem, action_elem=None ) - if tool_dependency.status == app.install_model.ToolDependency.installation_status.INSTALLED: + if tool_dependency.status == self.app.install_model.ToolDependency.installation_status.INSTALLED: # If an <actions> tag was found that matches the current platform, and # self.install_via_fabric() did not result in an error state, set binary_installed # to True in order to skip any remaining platform-specific <actions> tags. @@ -266,17 +366,18 @@ # We've reached an <actions> tag that defines the recipe for installing and compiling from # source. If binary installation failed, we proceed with the recipe. if not binary_installed: - installation_directory = tool_dependency.installation_directory( app ) + installation_directory = tool_dependency.installation_directory( self.app ) if os.path.exists( installation_directory ): # Delete contents of installation directory if attempt at binary installation failed. installation_directory_contents = os.listdir( installation_directory ) if installation_directory_contents: - removed, error_message = tool_dependency_util.remove_tool_dependency( app, tool_dependency ) + removed, error_message = tool_dependency_util.remove_tool_dependency( self.app, + tool_dependency ) if removed: can_install_from_source = True else: log.debug( 'Error removing old files from installation directory %s: %s' % \ - ( str( tool_dependency.installation_directory( app ), str( error_message ) ) ) ) + ( str( installation_directory, str( error_message ) ) ) ) else: can_install_from_source = True else: @@ -286,20 +387,18 @@ # tag set that defines the recipe to install and compile from source. log.debug( 'Proceeding with install and compile recipe for tool dependency %s.' % \ str( tool_dependency.name ) ) - tool_dependency = self.install_via_fabric( app, - tool_shed_repository, + tool_dependency = self.install_via_fabric( tool_shed_repository, tool_dependency, install_dir, package_name=package_name, actions_elem=actions_elem, action_elem=None ) if actions_elem.tag == 'action' and \ - tool_dependency.status != app.install_model.ToolDependency.installation_status.ERROR: + tool_dependency.status != self.app.install_model.ToolDependency.installation_status.ERROR: # If the tool dependency is not in an error state, perform any final actions that have been # defined within the actions_group tag set, but outside of an <actions> tag, which defines # the recipe for installing and compiling from source. - tool_dependency = self.install_via_fabric( app, - tool_shed_repository, + tool_dependency = self.install_via_fabric( tool_shed_repository, tool_dependency, install_dir, package_name=package_name, @@ -309,14 +408,603 @@ # Checks for "os" and "architecture" attributes are not made for any <actions> tag sets outside of # an <actions_group> tag set. If the attributes are defined, they will be ignored. All <actions> tags # outside of an <actions_group> tag set will always be processed. - tool_dependency = self.install_via_fabric( app, - tool_shed_repository, + tool_dependency = self.install_via_fabric( tool_shed_repository, tool_dependency, install_dir, package_name=package_name, actions_elem=actions_elems, action_elem=None ) - if tool_dependency.status != app.install_model.ToolDependency.installation_status.ERROR: + if tool_dependency.status != self.app.install_model.ToolDependency.installation_status.ERROR: log.debug( 'Tool dependency %s version %s has been installed in %s.' % \ ( str( package_name ), str( package_version ), str( install_dir ) ) ) return tool_dependency + + def mark_tool_dependency_installed( self, tool_dependency ): + if tool_dependency.status not in [ self.app.install_model.ToolDependency.installation_status.ERROR, + self.app.install_model.ToolDependency.installation_status.INSTALLED ]: + log.debug( 'Changing status for tool dependency %s from %s to %s.' % \ + ( str( tool_dependency.name ), + str( tool_dependency.status ), + str( self.app.install_model.ToolDependency.installation_status.INSTALLED ) ) ) + status = self.app.install_model.ToolDependency.installation_status.INSTALLED + tool_dependency = tool_dependency_util.set_tool_dependency_attributes( self.app, + tool_dependency=tool_dependency, + status=status, + error_message=None, + remove_from_disk=False ) + return tool_dependency + + +class InstallRepositoryManager( object ): + + def __init__( self, app ): + self.app = app + + def get_repository_components_for_installation( self, encoded_tsr_id, encoded_tsr_ids, repo_info_dicts, + tool_panel_section_keys ): + """ + The received encoded_tsr_ids, repo_info_dicts, and tool_panel_section_keys are 3 lists that + contain associated elements at each location in the list. This method will return the elements + from repo_info_dicts and tool_panel_section_keys associated with the received encoded_tsr_id + by determining its location in the received encoded_tsr_ids list. + """ + for index, tsr_id in enumerate( encoded_tsr_ids ): + if tsr_id == encoded_tsr_id: + repo_info_dict = repo_info_dicts[ index ] + tool_panel_section_key = tool_panel_section_keys[ index ] + return repo_info_dict, tool_panel_section_key + return None, None + + def __get_install_info_from_tool_shed( self, tool_shed_url, name, owner, changeset_revision ): + params = '?name=%s&owner=%s&changeset_revision=%s' % ( name, owner, changeset_revision ) + url = common_util.url_join( tool_shed_url, + 'api/repositories/get_repository_revision_install_info%s' % params ) + try: + raw_text = common_util.tool_shed_get( self.app, tool_shed_url, url ) + except Exception, e: + message = "Error attempting to retrieve installation information from tool shed " + message += "%s for revision %s of repository %s owned by %s: %s" % \ + ( str( tool_shed_url ), str( changeset_revision ), str( name ), str( owner ), str( e ) ) + log.warn( message ) + raise exceptions.InternalServerError( message ) + if raw_text: + # If successful, the response from get_repository_revision_install_info will be 3 + # dictionaries, a dictionary defining the Repository, a dictionary defining the + # Repository revision (RepositoryMetadata), and a dictionary including the additional + # information required to install the repository. + items = json.loads( raw_text ) + repository_revision_dict = items[ 1 ] + repo_info_dict = items[ 2 ] + else: + message = "Unable to retrieve installation information from tool shed %s for revision %s of repository %s owned by %s: %s" % \ + ( str( tool_shed_url ), str( changeset_revision ), str( name ), str( owner ), str( e ) ) + log.warn( message ) + raise exceptions.InternalServerError( message ) + # Make sure the tool shed returned everything we need for installing the repository. + if not repository_revision_dict or not repo_info_dict: + invalid_parameter_message = "No information is available for the requested repository revision.\n" + invalid_parameter_message += "One or more of the following parameter values is likely invalid:\n" + invalid_parameter_message += "tool_shed_url: %s\n" % str( tool_shed_url ) + invalid_parameter_message += "name: %s\n" % str( name ) + invalid_parameter_message += "owner: %s\n" % str( owner ) + invalid_parameter_message += "changeset_revision: %s\n" % str( changeset_revision ) + raise exceptions.RequestParameterInvalidException( invalid_parameter_message ) + repo_info_dicts = [ repo_info_dict ] + return repository_revision_dict, repo_info_dicts + + def initiate_repository_installation( self, installation_dict ): + install_model = self.app.install_model + # The following installation_dict entries are all required. + created_or_updated_tool_shed_repositories = installation_dict[ 'created_or_updated_tool_shed_repositories' ] + filtered_repo_info_dicts = installation_dict[ 'filtered_repo_info_dicts' ] + has_repository_dependencies = installation_dict[ 'has_repository_dependencies' ] + includes_tool_dependencies = installation_dict[ 'includes_tool_dependencies' ] + includes_tools = installation_dict[ 'includes_tools' ] + includes_tools_for_display_in_tool_panel = installation_dict[ 'includes_tools_for_display_in_tool_panel' ] + install_repository_dependencies = installation_dict[ 'install_repository_dependencies' ] + install_tool_dependencies = installation_dict[ 'install_tool_dependencies' ] + message = installation_dict[ 'message' ] + new_tool_panel_section_label = installation_dict[ 'new_tool_panel_section_label' ] + shed_tool_conf = installation_dict[ 'shed_tool_conf' ] + status = installation_dict[ 'status' ] + tool_panel_section_id = installation_dict[ 'tool_panel_section_id' ] + tool_panel_section_keys = installation_dict[ 'tool_panel_section_keys' ] + tool_path = installation_dict[ 'tool_path' ] + tool_shed_url = installation_dict[ 'tool_shed_url' ] + # Handle contained tools. + if includes_tools_for_display_in_tool_panel and ( new_tool_panel_section_label or tool_panel_section_id ): + tool_panel_section_key, tool_section = \ + tool_util.handle_tool_panel_section( self.app.toolbox, + tool_panel_section_id=tool_panel_section_id, + new_tool_panel_section_label=new_tool_panel_section_label ) + else: + tool_panel_section_key = None + tool_section = None + encoded_repository_ids = [ self.app.security.encode_id( tsr.id ) for tsr in created_or_updated_tool_shed_repositories ] + new_kwd = dict( includes_tools=includes_tools, + includes_tools_for_display_in_tool_panel=includes_tools_for_display_in_tool_panel, + has_repository_dependencies=has_repository_dependencies, + install_repository_dependencies=install_repository_dependencies, + includes_tool_dependencies=includes_tool_dependencies, + install_tool_dependencies=install_tool_dependencies, + message=message, + repo_info_dicts=filtered_repo_info_dicts, + shed_tool_conf=shed_tool_conf, + status=status, + tool_path=tool_path, + tool_panel_section_keys=tool_panel_section_keys, + tool_shed_repository_ids=encoded_repository_ids, + tool_shed_url=tool_shed_url ) + encoded_kwd = encoding_util.tool_shed_encode( new_kwd ) + tsr_ids = [ r.id for r in created_or_updated_tool_shed_repositories ] + tool_shed_repositories = [] + for tsr_id in tsr_ids: + tsr = install_model.context.query( install_model.ToolShedRepository ).get( tsr_id ) + tool_shed_repositories.append( tsr ) + clause_list = [] + for tsr_id in tsr_ids: + clause_list.append( install_model.ToolShedRepository.table.c.id == tsr_id ) + query = install_model.context.query( install_model.ToolShedRepository ).filter( or_( *clause_list ) ) + return encoded_kwd, query, tool_shed_repositories, encoded_repository_ids + + def install( self, tool_shed_url, name, owner, changeset_revision, install_options ): + # Get all of the information necessary for installing the repository from the specified tool shed. + repository_revision_dict, repo_info_dicts = self.__get_install_info_from_tool_shed( tool_shed_url, + name, + owner, + changeset_revision ) + installed_tool_shed_repositories = self.__install_repositories( repository_revision_dict, + repo_info_dicts, + install_options ) + return installed_tool_shed_repositories + + def __install_repositories( self, tool_shed_url, repository_revision_dict, repo_info_dicts, install_options ): + # Keep track of all repositories that are installed - there may be more than one if repository dependencies are installed. + installed_tool_shed_repositories = [] + try: + has_repository_dependencies = repository_revision_dict[ 'has_repository_dependencies' ] + except: + raise exceptions.InternalServerError( "Tool shed response missing required parameter 'has_repository_dependencies'." ) + try: + includes_tools = repository_revision_dict[ 'includes_tools' ] + except: + raise exceptions.InternalServerError( "Tool shed response missing required parameter 'includes_tools'." ) + try: + includes_tool_dependencies = repository_revision_dict[ 'includes_tool_dependencies' ] + except: + raise exceptions.InternalServerError( "Tool shed response missing required parameter 'includes_tool_dependencies'." ) + try: + includes_tools_for_display_in_tool_panel = repository_revision_dict[ 'includes_tools_for_display_in_tool_panel' ] + except: + raise exceptions.InternalServerError( "Tool shed response missing required parameter 'includes_tools_for_display_in_tool_panel'." ) + # Get the information about the Galaxy components (e.g., tool pane section, tool config file, etc) that will contain the repository information. + install_repository_dependencies = install_options.get( 'install_repository_dependencies', False ) + install_tool_dependencies = install_options.get( 'install_tool_dependencies', False ) + if install_tool_dependencies: + if self.app.config.tool_dependency_dir is None: + no_tool_dependency_dir_message = "Tool dependencies can be automatically installed only if you set " + no_tool_dependency_dir_message += "the value of your 'tool_dependency_dir' setting in your Galaxy " + no_tool_dependency_dir_message += "configuration file (universe_wsgi.ini) and restart your Galaxy server. " + raise exceptions.ConfigDoesNotAllowException( no_tool_dependency_dir_message ) + new_tool_panel_section_label = install_options.get( 'new_tool_panel_section_label', '' ) + shed_tool_conf = install_options.get( 'shed_tool_conf', None ) + if shed_tool_conf: + # Get the tool_path setting. + index, shed_conf_dict = suc.get_shed_tool_conf_dict( self.app, shed_tool_conf ) + tool_path = shed_conf_dict[ 'tool_path' ] + else: + # Pick a semi-random shed-related tool panel configuration file and get the tool_path setting. + for shed_config_dict in self.app.toolbox.shed_tool_confs: + # Don't use migrated_tools_conf.xml. + if shed_config_dict[ 'config_filename' ] != self.app.config.migrated_tools_config: + break + shed_tool_conf = shed_config_dict[ 'config_filename' ] + tool_path = shed_config_dict[ 'tool_path' ] + if not shed_tool_conf: + raise exceptions.RequestParameterMissingException( "Missing required parameter 'shed_tool_conf'." ) + tool_panel_section_id = install_options.get( 'tool_panel_section_id', '' ) + if tool_panel_section_id not in [ None, '' ]: + if tool_panel_section_id not in self.app.toolbox.tool_panel: + fixed_tool_panel_section_id = 'section_%s' % tool_panel_section_id + if fixed_tool_panel_section_id in self.app.toolbox.tool_panel: + tool_panel_section_id = fixed_tool_panel_section_id + else: + tool_panel_section_id = '' + else: + tool_panel_section_id = '' + # Build the dictionary of information necessary for creating tool_shed_repository database records + # for each repository being installed. + installation_dict = dict( install_repository_dependencies=install_repository_dependencies, + new_tool_panel_section_label=new_tool_panel_section_label, + no_changes_checked=False, + repo_info_dicts=repo_info_dicts, + tool_panel_section_id=tool_panel_section_id, + tool_path=tool_path, + tool_shed_url=tool_shed_url ) + # Create the tool_shed_repository database records and gather additional information for repository installation. + created_or_updated_tool_shed_repositories, tool_panel_section_keys, repo_info_dicts, filtered_repo_info_dicts = \ + self.handle_tool_shed_repositories( installation_dict, using_api=True ) + if created_or_updated_tool_shed_repositories: + # Build the dictionary of information necessary for installing the repositories. + installation_dict = dict( created_or_updated_tool_shed_repositories=created_or_updated_tool_shed_repositories, + filtered_repo_info_dicts=filtered_repo_info_dicts, + has_repository_dependencies=has_repository_dependencies, + includes_tool_dependencies=includes_tool_dependencies, + includes_tools=includes_tools, + includes_tools_for_display_in_tool_panel=includes_tools_for_display_in_tool_panel, + install_repository_dependencies=install_repository_dependencies, + install_tool_dependencies=install_tool_dependencies, + message='', + new_tool_panel_section_label=new_tool_panel_section_label, + shed_tool_conf=shed_tool_conf, + status='done', + tool_panel_section_id=tool_panel_section_id, + tool_panel_section_keys=tool_panel_section_keys, + tool_path=tool_path, + tool_shed_url=tool_shed_url ) + # Prepare the repositories for installation. Even though this method receives a single combination + # of tool_shed_url, name, owner and changeset_revision, there may be multiple repositories for installation + # at this point because repository dependencies may have added additional repositories for installation + # along with the single specified repository. + encoded_kwd, query, tool_shed_repositories, encoded_repository_ids = \ + initiate_repository_installation( self.app, installation_dict ) + # Some repositories may have repository dependencies that are required to be installed before the + # dependent repository, so we'll order the list of tsr_ids to ensure all repositories install in + # the required order. + tsr_ids = [ self.app.security.encode_id( tool_shed_repository.id ) for tool_shed_repository in tool_shed_repositories ] + ordered_tsr_ids, ordered_repo_info_dicts, ordered_tool_panel_section_keys = \ + self.order_components_for_installation( tsr_ids, repo_info_dicts, tool_panel_section_keys=tool_panel_section_keys ) + # Install the repositories, keeping track of each one for later display. + for index, tsr_id in enumerate( ordered_tsr_ids ): + install_model = self.app.install_model + tool_shed_repository = install_model.context.query( install_model.ToolShedRepository ) \ + .get( self.app.security.decode_id( tsr_id ) ) + if tool_shed_repository.status in [ install_model.ToolShedRepository.installation_status.NEW, + install_model.ToolShedRepository.installation_status.UNINSTALLED ]: + repo_info_dict = ordered_repo_info_dicts[ index ] + tool_panel_section_key = ordered_tool_panel_section_keys[ index ] + self.install_tool_shed_repository( tool_shed_repository, + repo_info_dict, + tool_panel_section_key, + shed_tool_conf, + tool_path, + install_tool_dependencies, + reinstalling=False ) + installed_tool_shed_repositories.append( tool_shed_repository ) + else: + # We're attempting to install more than 1 repository, and all of them have already been installed. + raise exceptions.RequestParameterInvalidException( 'All repositories that you are attempting to install have been previously installed.' ) + return installed_tool_shed_repositories + + def install_tool_shed_repository( self, tool_shed_repository, repo_info_dict, tool_panel_section_key, shed_tool_conf, tool_path, + install_tool_dependencies, reinstalling=False ): + install_model = self.app.install_model + if tool_panel_section_key: + try: + tool_section = self.app.toolbox.tool_panel[ tool_panel_section_key ] + except KeyError: + log.debug( 'Invalid tool_panel_section_key "%s" specified. Tools will be loaded outside of sections in the tool panel.', + str( tool_panel_section_key ) ) + tool_section = None + else: + tool_section = None + if isinstance( repo_info_dict, basestring ): + repo_info_dict = encoding_util.tool_shed_decode( repo_info_dict ) + # Clone each repository to the configured location. + suc.update_tool_shed_repository_status( self.app, + tool_shed_repository, + install_model.ToolShedRepository.installation_status.CLONING ) + repo_info_tuple = repo_info_dict[ tool_shed_repository.name ] + description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, tool_dependencies = repo_info_tuple + relative_clone_dir = suc.generate_tool_shed_repository_install_dir( repository_clone_url, + tool_shed_repository.installed_changeset_revision ) + relative_install_dir = os.path.join( relative_clone_dir, tool_shed_repository.name ) + install_dir = os.path.join( tool_path, relative_install_dir ) + cloned_ok, error_message = hg_util.clone_repository( repository_clone_url, os.path.abspath( install_dir ), ctx_rev ) + if cloned_ok: + if reinstalling: + # Since we're reinstalling the repository we need to find the latest changeset revision to which it can be updated. + changeset_revision_dict = repository_util.get_update_to_changeset_revision_and_ctx_rev( self.app, + tool_shed_repository ) + current_changeset_revision = changeset_revision_dict.get( 'changeset_revision', None ) + current_ctx_rev = changeset_revision_dict.get( 'ctx_rev', None ) + if current_ctx_rev != ctx_rev: + repo = hg_util.get_repo_for_repository( self.app, + repository=None, + repo_path=os.path.abspath( install_dir ), + create=False ) + hg_util.pull_repository( repo, repository_clone_url, current_changeset_revision ) + hg_util.update_repository( repo, ctx_rev=current_ctx_rev ) + repository_util.handle_repository_contents( self.app, + tool_shed_repository=tool_shed_repository, + tool_path=tool_path, + repository_clone_url=repository_clone_url, + relative_install_dir=relative_install_dir, + tool_shed=tool_shed_repository.tool_shed, + tool_section=tool_section, + shed_tool_conf=shed_tool_conf, + reinstalling=reinstalling ) + install_model.context.refresh( tool_shed_repository ) + metadata = tool_shed_repository.metadata + if 'tools' in metadata: + # Get the tool_versions from the tool shed for each tool in the installed change set. + suc.update_tool_shed_repository_status( self.app, + tool_shed_repository, + install_model.ToolShedRepository.installation_status.SETTING_TOOL_VERSIONS ) + tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( self.app, str( tool_shed_repository.tool_shed ) ) + params = '?name=%s&owner=%s&changeset_revision=%s' % ( str( tool_shed_repository.name ), + str( tool_shed_repository.owner ), + str( tool_shed_repository.changeset_revision ) ) + url = common_util.url_join( tool_shed_url, + '/repository/get_tool_versions%s' % params ) + text = common_util.tool_shed_get( self.app, tool_shed_url, url ) + if text: + tool_version_dicts = json.loads( text ) + tool_util.handle_tool_versions( self.app, tool_version_dicts, tool_shed_repository ) + else: + if not error_message: + error_message = "" + error_message += "Version information for the tools included in the <b>%s</b> repository is missing. " % tool_shed_repository.name + error_message += "Reset all of this repository's metadata in the tool shed, then set the installed tool versions " + error_message += "from the installed repository's <b>Repository Actions</b> menu. " + if install_tool_dependencies and tool_shed_repository.tool_dependencies and 'tool_dependencies' in metadata: + work_dir = tempfile.mkdtemp( prefix="tmp-toolshed-itsr" ) + # Install tool dependencies. + suc.update_tool_shed_repository_status( self.app, + tool_shed_repository, + install_model.ToolShedRepository.installation_status.INSTALLING_TOOL_DEPENDENCIES ) + # Get the tool_dependencies.xml file from the repository. + tool_dependencies_config = hg_util.get_config_from_disk( 'tool_dependencies.xml', install_dir ) + itdm = InstallToolDependencyManager( self.app ) + installed_tool_dependencies = itdm.install_specified_tool_dependencies( tool_shed_repository=tool_shed_repository, + tool_dependencies_config=tool_dependencies_config, + tool_dependencies=tool_shed_repository.tool_dependencies, + from_tool_migration_manager=False ) + basic_util.remove_dir( work_dir ) + suc.update_tool_shed_repository_status( self.app, + tool_shed_repository, + install_model.ToolShedRepository.installation_status.INSTALLED ) + if self.app.config.manage_dependency_relationships: + # Add the installed repository and any tool dependencies to the in-memory dictionaries in the installed_repository_manager. + self.app.installed_repository_manager.handle_repository_install( tool_shed_repository ) + else: + # An error occurred while cloning the repository, so reset everything necessary to enable another attempt. + suc.set_repository_attributes( self.app, + tool_shed_repository, + status=install_model.ToolShedRepository.installation_status.ERROR, + error_message=error_message, + deleted=False, + uninstalled=False, + remove_from_disk=True ) + + def merge_containers_dicts_for_new_install( self, containers_dicts ): + """ + When installing one or more tool shed repositories for the first time, the received list of + containers_dicts contains a containers_dict for each repository being installed. Since the + repositories are being installed for the first time, all entries are None except the repository + dependencies and tool dependencies. The entries for missing dependencies are all None since + they have previously been merged into the installed dependencies. This method will merge the + dependencies entries into a single container and return it for display. + """ + new_containers_dict = dict( readme_files=None, + datatypes=None, + missing_repository_dependencies=None, + repository_dependencies=None, + missing_tool_dependencies=None, + tool_dependencies=None, + invalid_tools=None, + valid_tools=None, + workflows=None ) + if containers_dicts: + lock = threading.Lock() + lock.acquire( True ) + try: + repository_dependencies_root_folder = None + tool_dependencies_root_folder = None + # Use a unique folder id (hopefully the following is). + folder_id = 867 + for old_container_dict in containers_dicts: + # Merge repository_dependencies. + old_container_repository_dependencies_root = old_container_dict[ 'repository_dependencies' ] + if old_container_repository_dependencies_root: + if repository_dependencies_root_folder is None: + repository_dependencies_root_folder = container_util.Folder( id=folder_id, + key='root', + label='root', + parent=None ) + folder_id += 1 + repository_dependencies_folder = container_util.Folder( id=folder_id, + key='merged', + label='Repository dependencies', + parent=repository_dependencies_root_folder ) + folder_id += 1 + # The old_container_repository_dependencies_root will be a root folder containing a single sub_folder. + old_container_repository_dependencies_folder = old_container_repository_dependencies_root.folders[ 0 ] + # Change the folder id so it won't confict with others being merged. + old_container_repository_dependencies_folder.id = folder_id + folder_id += 1 + repository_components_tuple = container_util.get_components_from_key( old_container_repository_dependencies_folder.key ) + components_list = suc.extract_components_from_tuple( repository_components_tuple ) + name = components_list[ 1 ] + # Generate the label by retrieving the repository name. + old_container_repository_dependencies_folder.label = str( name ) + repository_dependencies_folder.folders.append( old_container_repository_dependencies_folder ) + # Merge tool_dependencies. + old_container_tool_dependencies_root = old_container_dict[ 'tool_dependencies' ] + if old_container_tool_dependencies_root: + if tool_dependencies_root_folder is None: + tool_dependencies_root_folder = container_util.Folder( id=folder_id, + key='root', + label='root', + parent=None ) + folder_id += 1 + tool_dependencies_folder = container_util.Folder( id=folder_id, + key='merged', + label='Tool dependencies', + parent=tool_dependencies_root_folder ) + folder_id += 1 + else: + td_list = [ td.listify for td in tool_dependencies_folder.tool_dependencies ] + # The old_container_tool_dependencies_root will be a root folder containing a single sub_folder. + old_container_tool_dependencies_folder = old_container_tool_dependencies_root.folders[ 0 ] + for td in old_container_tool_dependencies_folder.tool_dependencies: + if td.listify not in td_list: + tool_dependencies_folder.tool_dependencies.append( td ) + if repository_dependencies_root_folder: + repository_dependencies_root_folder.folders.append( repository_dependencies_folder ) + new_containers_dict[ 'repository_dependencies' ] = repository_dependencies_root_folder + if tool_dependencies_root_folder: + tool_dependencies_root_folder.folders.append( tool_dependencies_folder ) + new_containers_dict[ 'tool_dependencies' ] = tool_dependencies_root_folder + except Exception, e: + log.debug( "Exception in merge_containers_dicts_for_new_install: %s" % str( e ) ) + finally: + lock.release() + return new_containers_dict + + def merge_missing_repository_dependencies_to_installed_container( self, containers_dict ): + """Merge the list of missing repository dependencies into the list of installed repository dependencies.""" + missing_rd_container_root = containers_dict.get( 'missing_repository_dependencies', None ) + if missing_rd_container_root: + # The missing_rd_container_root will be a root folder containing a single sub_folder. + missing_rd_container = missing_rd_container_root.folders[ 0 ] + installed_rd_container_root = containers_dict.get( 'repository_dependencies', None ) + # The installed_rd_container_root will be a root folder containing a single sub_folder. + if installed_rd_container_root: + installed_rd_container = installed_rd_container_root.folders[ 0 ] + installed_rd_container.label = 'Repository dependencies' + for index, rd in enumerate( missing_rd_container.repository_dependencies ): + # Skip the header row. + if index == 0: + continue + installed_rd_container.repository_dependencies.append( rd ) + installed_rd_container_root.folders = [ installed_rd_container ] + containers_dict[ 'repository_dependencies' ] = installed_rd_container_root + else: + # Change the folder label from 'Missing repository dependencies' to be 'Repository dependencies' for display. + root_container = containers_dict[ 'missing_repository_dependencies' ] + for sub_container in root_container.folders: + # There should only be 1 sub-folder. + sub_container.label = 'Repository dependencies' + containers_dict[ 'repository_dependencies' ] = root_container + containers_dict[ 'missing_repository_dependencies' ] = None + return containers_dict + + def merge_missing_tool_dependencies_to_installed_container( self, containers_dict ): + """ Merge the list of missing tool dependencies into the list of installed tool dependencies.""" + missing_td_container_root = containers_dict.get( 'missing_tool_dependencies', None ) + if missing_td_container_root: + # The missing_td_container_root will be a root folder containing a single sub_folder. + missing_td_container = missing_td_container_root.folders[ 0 ] + installed_td_container_root = containers_dict.get( 'tool_dependencies', None ) + # The installed_td_container_root will be a root folder containing a single sub_folder. + if installed_td_container_root: + installed_td_container = installed_td_container_root.folders[ 0 ] + installed_td_container.label = 'Tool dependencies' + for index, td in enumerate( missing_td_container.tool_dependencies ): + # Skip the header row. + if index == 0: + continue + installed_td_container.tool_dependencies.append( td ) + installed_td_container_root.folders = [ installed_td_container ] + containers_dict[ 'tool_dependencies' ] = installed_td_container_root + else: + # Change the folder label from 'Missing tool dependencies' to be 'Tool dependencies' for display. + root_container = containers_dict[ 'missing_tool_dependencies' ] + for sub_container in root_container.folders: + # There should only be 1 subfolder. + sub_container.label = 'Tool dependencies' + containers_dict[ 'tool_dependencies' ] = root_container + containers_dict[ 'missing_tool_dependencies' ] = None + return containers_dict + + def order_components_for_installation( self, tsr_ids, repo_info_dicts, tool_panel_section_keys ): + """ + Some repositories may have repository dependencies that are required to be installed + before the dependent repository. This method will inspect the list of repositories + about to be installed and make sure to order them appropriately. For each repository + about to be installed, if required repositories are not contained in the list of repositories + about to be installed, then they are not considered. Repository dependency definitions + that contain circular dependencies should not result in an infinite loop, but obviously + prior installation will not be handled for one or more of the repositories that require + prior installation. + """ + ordered_tsr_ids = [] + ordered_repo_info_dicts = [] + ordered_tool_panel_section_keys = [] + # Create a dictionary whose keys are the received tsr_ids and whose values are a list of + # tsr_ids, each of which is contained in the received list of tsr_ids and whose associated + # repository must be installed prior to the repository associated with the tsr_id key. + prior_install_required_dict = suc.get_prior_import_or_install_required_dict( self.app, + tsr_ids, + repo_info_dicts ) + processed_tsr_ids = [] + while len( processed_tsr_ids ) != len( prior_install_required_dict.keys() ): + tsr_id = suc.get_next_prior_import_or_install_required_dict_entry( prior_install_required_dict, + processed_tsr_ids ) + processed_tsr_ids.append( tsr_id ) + # Create the ordered_tsr_ids, the ordered_repo_info_dicts and the ordered_tool_panel_section_keys lists. + if tsr_id not in ordered_tsr_ids: + prior_install_required_ids = prior_install_required_dict[ tsr_id ] + for prior_install_required_id in prior_install_required_ids: + if prior_install_required_id not in ordered_tsr_ids: + # Install the associated repository dependency first. + prior_repo_info_dict, prior_tool_panel_section_key = \ + self.get_repository_components_for_installation( prior_install_required_id, + tsr_ids, + repo_info_dicts, + tool_panel_section_keys=tool_panel_section_keys ) + ordered_tsr_ids.append( prior_install_required_id ) + ordered_repo_info_dicts.append( prior_repo_info_dict ) + ordered_tool_panel_section_keys.append( prior_tool_panel_section_key ) + repo_info_dict, tool_panel_section_key = \ + self.get_repository_components_for_installation( tsr_id, + tsr_ids, + repo_info_dicts, + tool_panel_section_keys=tool_panel_section_keys ) + ordered_tsr_ids.append( tsr_id ) + ordered_repo_info_dicts.append( repo_info_dict ) + ordered_tool_panel_section_keys.append( tool_panel_section_key ) + return ordered_tsr_ids, ordered_repo_info_dicts, ordered_tool_panel_section_keys + + def populate_containers_dict_for_new_install( self, tool_shed_url, tool_path, readme_files_dict, installed_repository_dependencies, + missing_repository_dependencies, installed_tool_dependencies, missing_tool_dependencies, + updating=False ): + """ + Return the populated containers for a repository being installed for the first time or for an installed repository + that is being updated and the updates include newly defined repository (and possibly tool) dependencies. + """ + installed_tool_dependencies, missing_tool_dependencies = \ + tool_dependency_util.populate_tool_dependencies_dicts( app=self.app, + tool_shed_url=tool_shed_url, + tool_path=tool_path, + repository_installed_tool_dependencies=installed_tool_dependencies, + repository_missing_tool_dependencies=missing_tool_dependencies, + required_repo_info_dicts=None ) + # Most of the repository contents are set to None since we don't yet know what they are. + containers_dict = \ + container_util.build_repository_containers_for_galaxy( app=self.app, + repository=None, + datatypes=None, + invalid_tools=None, + missing_repository_dependencies=missing_repository_dependencies, + missing_tool_dependencies=missing_tool_dependencies, + readme_files_dict=readme_files_dict, + repository_dependencies=installed_repository_dependencies, + tool_dependencies=installed_tool_dependencies, + valid_tools=None, + workflows=None, + valid_data_managers=None, + invalid_data_managers=None, + data_managers_errors=None, + new_install=True, + reinstalling=False ) + if not updating: + # If we installing a new repository and not updaing an installed repository, we can merge + # the missing_repository_dependencies container contents to the installed_repository_dependencies + # container. When updating an installed repository, merging will result in losing newly defined + # dependencies included in the updates. + containers_dict = self.merge_missing_repository_dependencies_to_installed_container( containers_dict ) + # Merge the missing_tool_dependencies container contents to the installed_tool_dependencies container. + containers_dict = self.merge_missing_tool_dependencies_to_installed_container( containers_dict ) + return containers_dict diff -r 7f506e778275715639d8f5d37041712be9662b39 -r 0a428afbc23538b6f8d0a8d9dcc29b9a576c1ddc lib/tool_shed/galaxy_install/installed_repository_manager.py --- a/lib/tool_shed/galaxy_install/installed_repository_manager.py +++ b/lib/tool_shed/galaxy_install/installed_repository_manager.py @@ -289,6 +289,117 @@ if installed_repository_dict[ 'display_path' ]: datatype_util.load_installed_display_applications( self.app, installed_repository_dict, deactivate=deactivate ) + def purge_repository( self, app, repository ): + """Purge a repository with status New (a white ghost) from the database.""" + sa_session = self.app.model.context.current + status = 'ok' + message = '' + purged_tool_versions = 0 + purged_tool_dependencies = 0 + purged_required_repositories = 0 + purged_orphan_repository_repository_dependency_association_records = 0 + purged_orphan_repository_dependency_records = 0 + if repository.is_new: + # Purge this repository's associated tool versions. + if repository.tool_versions: + for tool_version in repository.tool_versions: + try: + tool_version_association = tool_version.parent_tool_association + sa_session.delete( tool_version_association ) + sa_session.flush() + except Exception, e: + status = 'error' + message = 'Error attempting to purge tool_versions for the repository named %s with status %s: %s.' % \ + ( str( repository.name ), str( repository.status ), str( e ) ) + return status, message + try: + tool_version_association = tool_version.child_tool_association + sa_session.delete( tool_version_association ) + sa_session.flush() + except Exception, e: + status = 'error' + message = 'Error attempting to purge tool_versions for the repository named %s with status %s: %s.' % \ + ( str( repository.name ), str( repository.status ), str( e ) ) + return status, message + try: + sa_session.delete( tool_version ) + sa_session.flush() + purged_tool_versions += 1 + except Exception, e: + status = 'error' + message = 'Error attempting to purge tool_versions for the repository named %s with status %s: %s.' % \ + ( str( repository.name ), str( repository.status ), str( e ) ) + return status, message + # Purge this repository's associated tool dependencies. + if repository.tool_dependencies: + for tool_dependency in repository.tool_dependencies: + try: + sa_session.delete( tool_dependency ) + sa_session.flush() + purged_tool_dependencies += 1 + except Exception, e: + status = 'error' + message = 'Error attempting to purge tool_dependencies for the repository named %s with status %s: %s.' % \ + ( str( repository.name ), str( repository.status ), str( e ) ) + return status, message + # Purge this repository's associated required repositories. + if repository.required_repositories: + for rrda in repository.required_repositories: + try: + sa_session.delete( rrda ) + sa_session.flush() + purged_required_repositories += 1 + except Exception, e: + status = 'error' + message = 'Error attempting to purge required_repositories for the repository named %s with status %s: %s.' % \ + ( str( repository.name ), str( repository.status ), str( e ) ) + return status, message + # Purge any "orphan" repository_dependency records associated with the repository, but not with any + # repository_repository_dependency_association records. + for orphan_repository_dependency in \ + sa_session.query( self.app.install_model.RepositoryDependency ) \ + .filter( self.app.install_model.RepositoryDependency.table.c.tool_shed_repository_id == repository.id ): + # Purge any repository_repository_dependency_association records whose repository_dependency_id is + # the id of the orphan repository_dependency record. + for orphan_rrda in \ + sa_session.query( self.app.install_model.RepositoryRepositoryDependencyAssociation ) \ + .filter( self.app.install_model.RepositoryRepositoryDependencyAssociation.table.c.repository_dependency_id == orphan_repository_dependency.id ): + try: + sa_session.delete( orphan_rrda ) + sa_session.flush() + purged_orphan_repository_repository_dependency_association_records += 1 + except Exception, e: + status = 'error' + message = 'Error attempting to purge repository_repository_dependency_association records associated with ' + message += 'an orphan repository_dependency record for the repository named %s with status %s: %s.' % \ + ( str( repository.name ), str( repository.status ), str( e ) ) + return status, message + try: + sa_session.delete( orphan_repository_dependency ) + sa_session.flush() + purged_orphan_repository_dependency_records += 1 + except Exception, e: + status = 'error' + message = 'Error attempting to purge orphan repository_dependency records for the repository named %s with status %s: %s.' % \ + ( str( repository.name ), str( repository.status ), str( e ) ) + return status, message + # Purge the repository. + sa_session.delete( repository ) + sa_session.flush() + message = 'The repository named <b>%s</b> with status <b>%s</b> has been purged.<br/>' % \ + ( str( repository.name ), str( repository.status ) ) + message += 'Total associated tool_version records purged: %d<br/>' % purged_tool_versions + message += 'Total associated tool_dependency records purged: %d<br/>' % purged_tool_dependencies + message += 'Total associated repository_repository_dependency_association records purged: %d<br/>' % purged_required_repositories + message += 'Total associated orphan repository_repository_dependency_association records purged: %d<br/>' % \ + purged_orphan_repository_repository_dependency_association_records + message += 'Total associated orphan repository_dependency records purged: %d<br/>' % purged_orphan_repository_dependency_records + else: + status = 'error' + message = 'A repository must have the status <b>New</b> in order to be purged. This repository has ' + message += ' the status %s.' % str( repository.status ) + return status, message + def remove_entry_from_installed_repository_dependencies_of_installed_repositories( self, repository ): """ Remove an entry from self.installed_repository_dependencies_of_installed_repositories. A side-effect of this method diff -r 7f506e778275715639d8f5d37041712be9662b39 -r 0a428afbc23538b6f8d0a8d9dcc29b9a576c1ddc lib/tool_shed/galaxy_install/repair_repository_manager.py --- /dev/null +++ b/lib/tool_shed/galaxy_install/repair_repository_manager.py @@ -0,0 +1,216 @@ +import logging +import os + +log = logging.getLogger( __name__ ) + +from tool_shed.galaxy_install import install_manager +from tool_shed.galaxy_install import repository_util + +from tool_shed.util import common_install_util +from tool_shed.util import common_util +from tool_shed.util import container_util +from tool_shed.util import shed_util_common as suc +from tool_shed.util import repository_dependency_util +from tool_shed.util import tool_dependency_util +from tool_shed.util import tool_util + +class RepairRepositoryManager(): + + def __init__( self, app ): + self.app = app + + def get_installed_repositories_from_repository_dependencies( self, repository_dependencies_dict ): + installed_repositories = [] + if repository_dependencies_dict and isinstance( repository_dependencies_dict, dict ): + for rd_key, rd_vals in repository_dependencies_dict.items(): + if rd_key in [ 'root_key', 'description' ]: + continue + # rd_key is something like: 'http://localhost:9009__ESEP__package_rdkit_2012_12__ESEP__test__ESEP__d635ff...' + # rd_val is something like: [['http://localhost:9009', 'package_numpy_1_7', 'test', 'cddd64ecd985', 'True']] + repository_components_tuple = container_util.get_components_from_key( rd_key ) + components_list = suc.extract_components_from_tuple( repository_components_tuple ) + tool_shed, name, owner, changeset_revision = components_list[ 0:4 ] + installed_repository = suc.get_tool_shed_repository_by_shed_name_owner_changeset_revision( self.app, + tool_shed, + name, + owner, + changeset_revision ) + if installed_repository not in installed_repositories: + installed_repositories.append( installed_repository ) + for rd_val in rd_vals: + tool_shed, name, owner, changeset_revision = rd_val[ 0:4 ] + installed_repository = suc.get_tool_shed_repository_by_shed_name_owner_changeset_revision( self.app, + tool_shed, + name, + owner, + changeset_revision ) + if installed_repository not in installed_repositories: + installed_repositories.append( installed_repository ) + return installed_repositories + + def get_repair_dict( self, repository ): + """ + Inspect the installed repository dependency hierarchy for a specified repository + and attempt to make sure they are all properly installed as well as each repository's + tool dependencies. This method is called only from Galaxy when attempting to correct + issues with an installed repository that has installation problems somewhere in its + dependency hierarchy. + """ + tsr_ids = [] + repo_info_dicts = [] + tool_panel_section_keys = [] + repair_dict = {} + irm = install_manager.InstallRepositoryManager( self.app ) + # Get a dictionary of all repositories upon which the contents of the current repository_metadata + #record depend. + repository_dependencies_dict = \ + repository_dependency_util.get_repository_dependencies_for_installed_tool_shed_repository( self.app, repository ) + if repository_dependencies_dict: + # Generate the list of installed repositories from the information contained in the + # repository_dependencies dictionary. + installed_repositories = self.get_installed_repositories_from_repository_dependencies( repository_dependencies_dict ) + # Some repositories may have repository dependencies that are required to be installed before + # the dependent repository, so we'll order the list of tsr_ids to ensure all repositories are + # repaired in the required order. + for installed_repository in installed_repositories: + tsr_ids.append( self.app.security.encode_id( installed_repository.id ) ) + repo_info_dict, tool_panel_section_key = self.get_repo_info_dict_for_repair( installed_repository ) + tool_panel_section_keys.append( tool_panel_section_key ) + repo_info_dicts.append( repo_info_dict ) + else: + # The received repository has no repository dependencies. + tsr_ids.append( self.app.security.encode_id( repository.id ) ) + repo_info_dict, tool_panel_section_key = self.get_repo_info_dict_for_repair( repository ) + tool_panel_section_keys.append( tool_panel_section_key ) + repo_info_dicts.append( repo_info_dict ) + ordered_tsr_ids, ordered_repo_info_dicts, ordered_tool_panel_section_keys = \ + irm.order_components_for_installation( tsr_ids, + repo_info_dicts, + tool_panel_section_keys=tool_panel_section_keys ) + repair_dict[ 'ordered_tsr_ids' ] = ordered_tsr_ids + repair_dict[ 'ordered_repo_info_dicts' ] = ordered_repo_info_dicts + repair_dict[ 'ordered_tool_panel_section_keys' ] = ordered_tool_panel_section_keys + return repair_dict + + def get_repo_info_dict_for_repair( self, repository ): + tool_panel_section_key = None + repository_clone_url = common_util.generate_clone_url_for_installed_repository( self.app, repository ) + repository_dependencies = \ + repository_dependency_util.get_repository_dependencies_for_installed_tool_shed_repository( self.app, repository ) + metadata = repository.metadata + if metadata: + tool_dependencies = metadata.get( 'tool_dependencies', None ) + tool_panel_section_dict = metadata.get( 'tool_panel_section', None ) + if tool_panel_section_dict: + # The repository must be in the uninstalled state. The structure of tool_panel_section_dict is: + # {<tool guid> : + # [{ 'id':<section id>, 'name':<section name>, 'version':<section version>, 'tool_config':<tool config file name> }]} + # Here is an example: + # {"localhost:9009/repos/test/filter/Filter1/1.1.0": + # [{"id": "filter_and_sort", "name": "Filter and Sort", "tool_config": "filtering.xml", "version": ""}]} + # Currently all tools contained within an installed tool shed repository must be loaded into the same + # section in the tool panel, so we can get the section id of the first guid in the tool_panel_section_dict. + # In the future, we'll have to handle different sections per guid. + guid = tool_panel_section_dict.keys()[ 0 ] + section_dicts = tool_panel_section_dict[ guid ] + section_dict = section_dicts[ 0 ] + tool_panel_section_id = section_dict[ 'id' ] + tool_panel_section_name = section_dict[ 'name' ] + if tool_panel_section_id: + tool_panel_section_key, tool_panel_section = \ + tool_util.get_or_create_tool_section( self.app.toolbox, + tool_panel_section_id=tool_panel_section_id, + new_tool_panel_section_label=tool_panel_section_name ) + else: + tool_dependencies = None + repo_info_dict = repository_util.create_repo_info_dict( app=self.app, + repository_clone_url=repository_clone_url, + changeset_revision=repository.changeset_revision, + ctx_rev=repository.ctx_rev, + repository_owner=repository.owner, + repository_name=repository.name, + repository=None, + repository_metadata=None, + tool_dependencies=tool_dependencies, + repository_dependencies=repository_dependencies ) + return repo_info_dict, tool_panel_section_key + + def repair_tool_shed_repository( self, repository, repo_info_dict ): + + def add_repair_dict_entry( repository_name, error_message ): + if repository_name in repair_dict: + repair_dict[ repository_name ].append( error_message ) + else: + repair_dict[ repository_name ] = [ error_message ] + return repair_dict + + metadata = repository.metadata + repair_dict = {} + if repository.status in [ self.app.install_model.ToolShedRepository.installation_status.DEACTIVATED ]: + try: + common_install_util.activate_repository( self.app, repository ) + except Exception, e: + error_message = "Error activating repository %s: %s" % ( repository.name, str( e ) ) + log.debug( error_message ) + repair_dict [ repository.name ] = error_message + elif repository.status not in [ self.app.install_model.ToolShedRepository.installation_status.INSTALLED ]: + shed_tool_conf, tool_path, relative_install_dir = suc.get_tool_panel_config_tool_path_install_dir( self.app, repository ) + # Reset the repository attributes to the New state for installation. + if metadata: + tool_section, tool_panel_section_key = \ + tool_util.handle_tool_panel_selection( self.app.toolbox, + metadata, + no_changes_checked=True, + tool_panel_section_id=None, + new_tool_panel_section_label=None ) + else: + # The tools will be loaded outside of any sections in the tool panel. + tool_panel_section_key = None + suc.set_repository_attributes( self.app, + repository, + status=self.app.install_model.ToolShedRepository.installation_status.NEW, + error_message=None, + deleted=False, + uninstalled=False, + remove_from_disk=True ) + irm = install_manager.InstallRepositoryManager( self.app ) + irm.install_tool_shed_repository( repository, + repo_info_dict, + tool_panel_section_key, + shed_tool_conf, + tool_path, + install_tool_dependencies=True, + reinstalling=True ) + if repository.status in [ self.app.install_model.ToolShedRepository.installation_status.ERROR ]: + repair_dict = add_repair_dict_entry( repository.name, repository.error_message ) + else: + # We have an installed tool shed repository, so handle tool dependencies if necessary. + if repository.missing_tool_dependencies and metadata and 'tool_dependencies' in metadata: + work_dir = tempfile.mkdtemp( prefix="tmp-toolshed-itdep" ) + # Reset missing tool dependencies. + for tool_dependency in repository.missing_tool_dependencies: + if tool_dependency.status in [ self.app.install_model.ToolDependency.installation_status.ERROR, + self.app.install_model.ToolDependency.installation_status.INSTALLING ]: + tool_dependency = \ + tool_dependency_util.set_tool_dependency_attributes( self.app, + tool_dependency=tool_dependency, + status=self.app.install_model.ToolDependency.installation_status.UNINSTALLED, + error_message=None, + remove_from_disk=True ) + # Install tool dependencies. + suc.update_tool_shed_repository_status( self.app, + repository, + self.app.install_model.ToolShedRepository.installation_status.INSTALLING_TOOL_DEPENDENCIES ) + # Get the tool_dependencies.xml file from the repository. + tool_dependencies_config = hg_util.get_config_from_disk( 'tool_dependencies.xml', repository.repo_path( self.app ) ) + itdm = install_manager.InstallToolDependencyManager( self.app ) + installed_tool_dependencies = itdm.install_specified_tool_dependencies( tool_shed_repository=repository, + tool_dependencies_config=tool_dependencies_config, + tool_dependencies=repository.tool_dependencies, + from_tool_migration_manager=False ) + for installed_tool_dependency in installed_tool_dependencies: + if installed_tool_dependency.status in [ self.app.install_model.ToolDependency.installation_status.ERROR ]: + repair_dict = add_repair_dict_entry( repository.name, installed_tool_dependency.error_message ) + basic_util.remove_dir( work_dir ) + suc.update_tool_shed_repository_status( self.app, repository, self.app.install_model.ToolShedRepository.installation_status.INSTALLED ) + return repair_dict \ No newline at end of file This diff is so big that we needed to truncate the remainder. Repository URL: https://bitbucket.org/galaxy/galaxy-central/ -- This is a commit notification from bitbucket.org. You are receiving this because you have the service enabled, addressing the recipient of this email.