1 new commit in galaxy-central: https://bitbucket.org/galaxy/galaxy-central/commits/5af80141674f/ Changeset: 5af80141674f User: greg Date: 2014-07-16 20:30:36 Summary: Add a DependencyDisplayer class for Galaxy installs from the Tool Shed. Affected #: 9 files diff -r 3232e68101a46276f07c1f545ccf37c55552f66a -r 5af80141674f5ac3166b6782385c408258606d0a lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py --- a/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py +++ b/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py @@ -3,16 +3,12 @@ import shutil from admin import AdminGalaxy -from galaxy import eggs from galaxy import web from galaxy import util from galaxy.web.form_builder import CheckboxField -from galaxy.web.framework.helpers import grids -from galaxy.web.framework.helpers import iff from galaxy.util import json from galaxy.model.orm import or_ -import tool_shed.util.shed_util_common as suc import tool_shed.repository_types.util as rt_util from tool_shed.util import common_util @@ -23,11 +19,14 @@ from tool_shed.util import metadata_util from tool_shed.util import readme_util from tool_shed.util import repository_maintenance_util +from tool_shed.util import shed_util_common as suc from tool_shed.util import tool_dependency_util from tool_shed.util import tool_util from tool_shed.util import workflow_util -from tool_shed.util import xml_util + +from tool_shed.galaxy_install import dependency_display from tool_shed.galaxy_install import install_manager + from tool_shed.galaxy_install.repair_repository_manager import RepairRepositoryManager import tool_shed.galaxy_install.grids.admin_toolshed_grids as admin_toolshed_grids from tool_shed.galaxy_install.repository_dependencies import repository_dependency_manager @@ -221,17 +220,20 @@ @web.require_admin def deactivate_or_uninstall_repository( self, trans, **kwd ): """ - Handle all changes when a tool shed repository is being deactivated or uninstalled. Notice that if the repository contents include - a file named tool_data_table_conf.xml.sample, its entries are not removed from the defined config.shed_tool_data_table_config. This - is because it becomes a bit complex to determine if other installed repositories include tools that require the same entry. For now - we'll never delete entries from config.shed_tool_data_table_config, but we may choose to do so in the future if it becomes necessary. + Handle all changes when a tool shed repository is being deactivated or uninstalled. Notice + that if the repository contents include a file named tool_data_table_conf.xml.sample, its + entries are not removed from the defined config.shed_tool_data_table_config. This is because + it becomes a bit complex to determine if other installed repositories include tools that + require the same entry. For now we'll never delete entries from config.shed_tool_data_table_config, + but we may choose to do so in the future if it becomes necessary. """ message = kwd.get( 'message', '' ) status = kwd.get( 'status', 'done' ) remove_from_disk = kwd.get( 'remove_from_disk', '' ) remove_from_disk_checked = CheckboxField.is_checked( remove_from_disk ) tool_shed_repository = suc.get_installed_tool_shed_repository( trans.app, kwd[ 'id' ] ) - shed_tool_conf, tool_path, relative_install_dir = suc.get_tool_panel_config_tool_path_install_dir( trans.app, tool_shed_repository ) + shed_tool_conf, tool_path, relative_install_dir = \ + suc.get_tool_panel_config_tool_path_install_dir( trans.app, tool_shed_repository ) if relative_install_dir: if tool_path: relative_install_dir = os.path.join( tool_path, relative_install_dir ) @@ -247,7 +249,10 @@ data_manager_util.remove_from_data_manager( trans.app, tool_shed_repository ) if tool_shed_repository.includes_datatypes: # Deactivate proprietary datatypes. - installed_repository_dict = datatype_util.load_installed_datatypes( trans.app, tool_shed_repository, repository_install_dir, deactivate=True ) + installed_repository_dict = datatype_util.load_installed_datatypes( trans.app, + tool_shed_repository, + repository_install_dir, + deactivate=True ) if installed_repository_dict: converter_path = installed_repository_dict.get( 'converter_path' ) if converter_path is not None: @@ -779,12 +784,12 @@ trans.install_model.context.add( repository ) trans.install_model.context.flush() message = "The repository information has been updated." - containers_dict = metadata_util.populate_containers_dict_from_repository_metadata( app=trans.app, - tool_shed_url=tool_shed_url, - tool_path=tool_path, - repository=repository, - reinstalling=False, - required_repo_info_dicts=None ) + dd = dependency_display.DependencyDisplayer( trans.app ) + containers_dict = dd.populate_containers_dict_from_repository_metadata( tool_shed_url=tool_shed_url, + tool_path=tool_path, + repository=repository, + reinstalling=False, + required_repo_info_dicts=None ) return trans.fill_template( '/admin/tool_shed_repository/manage_repository.mako', repository=repository, description=description, @@ -1049,6 +1054,7 @@ includes_tool_dependencies = util.string_as_bool( repo_information_dict.get( 'includes_tool_dependencies', False ) ) encoded_repo_info_dicts = util.listify( repo_information_dict.get( 'repo_info_dicts', [] ) ) repo_info_dicts = [ encoding_util.tool_shed_decode( encoded_repo_info_dict ) for encoded_repo_info_dict in encoded_repo_info_dicts ] + dd = dependency_display.DependencyDisplayer( trans.app ) install_repository_manager = install_manager.InstallRepositoryManager( trans.app ) if ( not includes_tools_for_display_in_tool_panel and kwd.get( 'select_shed_tool_panel_config_button', False ) ) or \ ( includes_tools_for_display_in_tool_panel and kwd.get( 'select_tool_panel_section_button', False ) ): @@ -1149,14 +1155,14 @@ # defined repository (and possibly tool) dependencies. In this case, merging will result in newly defined # dependencies to be lost. We pass the updating parameter to make sure merging occurs only when appropriate. containers_dict = \ - install_repository_manager.populate_containers_dict_for_new_install( tool_shed_url=tool_shed_url, - tool_path=tool_path, - readme_files_dict=readme_files_dict, - installed_repository_dependencies=installed_repository_dependencies, - missing_repository_dependencies=missing_repository_dependencies, - installed_tool_dependencies=installed_tool_dependencies, - missing_tool_dependencies=missing_tool_dependencies, - updating=updating ) + dd.populate_containers_dict_for_new_install( tool_shed_url=tool_shed_url, + tool_path=tool_path, + readme_files_dict=readme_files_dict, + installed_repository_dependencies=installed_repository_dependencies, + missing_repository_dependencies=missing_repository_dependencies, + installed_tool_dependencies=installed_tool_dependencies, + missing_tool_dependencies=missing_tool_dependencies, + updating=updating ) else: # We're installing a list of repositories, each of which may have tool dependencies or repository dependencies. containers_dicts = [] @@ -1184,17 +1190,17 @@ name = dependencies_for_repository_dict.get( 'name', None ) repository_owner = dependencies_for_repository_dict.get( 'repository_owner', None ) containers_dict = \ - install_repository_manager.populate_containers_dict_for_new_install( tool_shed_url=tool_shed_url, - tool_path=tool_path, - readme_files_dict=None, - installed_repository_dependencies=installed_repository_dependencies, - missing_repository_dependencies=missing_repository_dependencies, - installed_tool_dependencies=installed_tool_dependencies, - missing_tool_dependencies=missing_tool_dependencies, - updating=updating ) + dd.populate_containers_dict_for_new_install( tool_shed_url=tool_shed_url, + tool_path=tool_path, + readme_files_dict=None, + installed_repository_dependencies=installed_repository_dependencies, + missing_repository_dependencies=missing_repository_dependencies, + installed_tool_dependencies=installed_tool_dependencies, + missing_tool_dependencies=missing_tool_dependencies, + updating=updating ) containers_dicts.append( containers_dict ) # Merge all containers into a single container. - containers_dict = install_repository_manager.merge_containers_dicts_for_new_install( containers_dicts ) + containers_dict = dd.merge_containers_dicts_for_new_install( containers_dicts ) # Handle tool dependencies check box. if trans.app.config.tool_dependency_dir is None: if includes_tool_dependencies: @@ -1647,19 +1653,20 @@ original_section_name = '' tool_panel_section_select_field = None shed_tool_conf_select_field = tool_util.build_shed_tool_conf_select_field( trans ) - irm = install_manager.InstallRepositoryManager( trans.app ) + dd = dependency_display.DependencyDisplayer( trans.app ) containers_dict = \ - irm.populate_containers_dict_for_new_install( tool_shed_url=tool_shed_url, - tool_path=tool_path, - readme_files_dict=readme_files_dict, - installed_repository_dependencies=installed_repository_dependencies, - missing_repository_dependencies=missing_repository_dependencies, - installed_tool_dependencies=installed_tool_dependencies, - missing_tool_dependencies=missing_tool_dependencies, - updating=False ) - # Since we're reinstalling we'll merge the list of missing repository dependencies into the list of installed repository dependencies since each displayed - # repository dependency will display a status, whether installed or missing. - containers_dict = irm.merge_missing_repository_dependencies_to_installed_container( containers_dict ) + dd.populate_containers_dict_for_new_install( tool_shed_url=tool_shed_url, + tool_path=tool_path, + readme_files_dict=readme_files_dict, + installed_repository_dependencies=installed_repository_dependencies, + missing_repository_dependencies=missing_repository_dependencies, + installed_tool_dependencies=installed_tool_dependencies, + missing_tool_dependencies=missing_tool_dependencies, + updating=False ) + # Since we're reinstalling we'll merge the list of missing repository dependencies into the list of + # installed repository dependencies since each displayed repository dependency will display a status, + # whether installed or missing. + containers_dict = dd.merge_missing_repository_dependencies_to_installed_container( containers_dict ) # Handle repository dependencies check box. install_repository_dependencies_check_box = CheckboxField( 'install_repository_dependencies', checked=True ) # Handle tool dependencies check box. @@ -1796,12 +1803,12 @@ status = 'error' shed_tool_conf, tool_path, relative_install_dir = suc.get_tool_panel_config_tool_path_install_dir( trans.app, repository ) repo_files_dir = os.path.abspath( os.path.join( relative_install_dir, repository.name ) ) - containers_dict = metadata_util.populate_containers_dict_from_repository_metadata( app=trans.app, - tool_shed_url=tool_shed_url, - tool_path=tool_path, - repository=repository, - reinstalling=False, - required_repo_info_dicts=None ) + dd = dependency_display.DependencyDisplayer( trans.app ) + containers_dict = dd.populate_containers_dict_from_repository_metadata( tool_shed_url=tool_shed_url, + tool_path=tool_path, + repository=repository, + reinstalling=False, + required_repo_info_dicts=None ) return trans.fill_template( '/admin/tool_shed_repository/manage_repository.mako', repository=repository, description=repository.description, @@ -1860,7 +1867,8 @@ message = "Error attempting to uninstall tool dependencies: %s" % message status = 'error' else: - message = "These tool dependencies have been uninstalled: %s" % ','.join( td.name for td in tool_dependencies_for_uninstallation ) + message = "These tool dependencies have been uninstalled: %s" % \ + ','.join( td.name for td in tool_dependencies_for_uninstallation ) td_ids = [ trans.security.encode_id( td.id ) for td in tool_shed_repository.tool_dependencies ] return trans.response.send_redirect( web.url_for( controller='admin_toolshed', action='manage_repository_tool_dependencies', diff -r 3232e68101a46276f07c1f545ccf37c55552f66a -r 5af80141674f5ac3166b6782385c408258606d0a lib/galaxy/webapps/tool_shed/controllers/repository.py --- a/lib/galaxy/webapps/tool_shed/controllers/repository.py +++ b/lib/galaxy/webapps/tool_shed/controllers/repository.py @@ -6,16 +6,20 @@ from time import strftime from datetime import date from datetime import datetime + from galaxy import util from galaxy import web -from galaxy.util.odict import odict from galaxy.web.base.controller import BaseUIController from galaxy.web.form_builder import CheckboxField from galaxy.web.framework.helpers import grids from galaxy.util import json from galaxy.model.orm import and_ + from tool_shed.capsule import capsule_manager from tool_shed.dependencies.repository import relation_builder + +from tool_shed.galaxy_install import dependency_display + from tool_shed.util import basic_util from tool_shed.util import common_util from tool_shed.util import container_util @@ -24,17 +28,16 @@ from tool_shed.util import metadata_util from tool_shed.util import readme_util from tool_shed.util import repository_maintenance_util -from tool_shed.util import review_util from tool_shed.util import search_util from tool_shed.util import shed_util_common as suc -from tool_shed.util import tool_dependency_util from tool_shed.util import tool_util from tool_shed.util import workflow_util -from tool_shed.galaxy_install import install_manager + from galaxy.webapps.tool_shed.util import ratings_util -import galaxy.tools + import tool_shed.grids.repository_grids as repository_grids import tool_shed.grids.util as grids_util + import tool_shed.repository_types.util as rt_util from galaxy import eggs @@ -2421,9 +2424,8 @@ status = 'warning' else: # Handle messaging for orphan tool dependency definitions. - orphan_message = tool_dependency_util.generate_message_for_orphan_tool_dependencies( trans, - repository, - metadata ) + dd = dependency_display.DependencyDisplayer( trans.app ) + orphan_message = dd.generate_message_for_orphan_tool_dependencies( repository, metadata ) if orphan_message: message += orphan_message status = 'warning' @@ -3313,7 +3315,8 @@ repository_dependencies = rb.get_repository_dependencies_for_changeset_revision() if str( repository.type ) != rt_util.TOOL_DEPENDENCY_DEFINITION: # Handle messaging for orphan tool dependency definitions. - orphan_message = tool_dependency_util.generate_message_for_orphan_tool_dependencies( trans, repository, metadata ) + dd = dependency_display.DependencyDisplayer( trans.app ) + orphan_message = dd.generate_message_for_orphan_tool_dependencies( repository, metadata ) if orphan_message: message += orphan_message status = 'warning' diff -r 3232e68101a46276f07c1f545ccf37c55552f66a -r 5af80141674f5ac3166b6782385c408258606d0a lib/galaxy/webapps/tool_shed/controllers/upload.py --- a/lib/galaxy/webapps/tool_shed/controllers/upload.py +++ b/lib/galaxy/webapps/tool_shed/controllers/upload.py @@ -4,18 +4,21 @@ import tarfile import tempfile import urllib -from galaxy.web.base.controller import BaseUIController + from galaxy import util from galaxy import web from galaxy.datatypes import checkers +from galaxy.web.base.controller import BaseUIController + from tool_shed.dependencies import attribute_handlers +from tool_shed.galaxy_install import dependency_display import tool_shed.repository_types.util as rt_util + from tool_shed.util import basic_util from tool_shed.util import commit_util from tool_shed.util import hg_util from tool_shed.util import metadata_util from tool_shed.util import shed_util_common as suc -from tool_shed.util import tool_dependency_util from tool_shed.util import tool_util from tool_shed.util import xml_util @@ -274,6 +277,7 @@ metadata_dict = repository.metadata_revisions[ 0 ].metadata else: metadata_dict = {} + dd = dependency_display.DependencyDisplayer( trans.app ) if str( repository.type ) not in [ rt_util.REPOSITORY_SUITE_DEFINITION, rt_util.TOOL_DEPENDENCY_DEFINITION ]: change_repository_type_message = rt_util.generate_message_for_repository_type_change( trans.app, @@ -288,15 +292,12 @@ # repository), so warning messages are important because orphans are always valid. The repository # owner must be warned in case they did not intend to define an orphan dependency, but simply # provided incorrect information (tool shed, name owner, changeset_revision) for the definition. - orphan_message = tool_dependency_util.generate_message_for_orphan_tool_dependencies( trans, - repository, - metadata_dict ) + orphan_message = dd.generate_message_for_orphan_tool_dependencies( repository, metadata_dict ) if orphan_message: message += orphan_message status = 'warning' # Handle messaging for invalid tool dependencies. - invalid_tool_dependencies_message = \ - tool_dependency_util.generate_message_for_invalid_tool_dependencies( metadata_dict ) + invalid_tool_dependencies_message = dd.generate_message_for_invalid_tool_dependencies( metadata_dict ) if invalid_tool_dependencies_message: message += invalid_tool_dependencies_message status = 'error' diff -r 3232e68101a46276f07c1f545ccf37c55552f66a -r 5af80141674f5ac3166b6782385c408258606d0a lib/tool_shed/galaxy_install/dependency_display.py --- /dev/null +++ b/lib/tool_shed/galaxy_install/dependency_display.py @@ -0,0 +1,575 @@ +import logging +import os +import threading + +from tool_shed.util import common_util +from tool_shed.util import container_util +from tool_shed.util import readme_util +from tool_shed.util import shed_util_common as suc +from tool_shed.util import tool_dependency_util + +log = logging.getLogger( __name__ ) + + +class DependencyDisplayer( object ): + + def __init__( self, app ): + self.app = app + + def add_installation_directories_to_tool_dependencies( self, tool_dependencies ): + """ + Determine the path to the installation directory for each of the received + tool dependencies. This path will be displayed within the tool dependencies + container on the select_tool_panel_section or reselect_tool_panel_section + pages when installing or reinstalling repositories that contain tools with + the defined tool dependencies. The list of tool dependencies may be associated + with more than a single repository. + """ + for dependency_key, requirements_dict in tool_dependencies.items(): + if dependency_key in [ 'set_environment' ]: + continue + repository_name = requirements_dict.get( 'repository_name', 'unknown' ) + repository_owner = requirements_dict.get( 'repository_owner', 'unknown' ) + changeset_revision = requirements_dict.get( 'changeset_revision', 'unknown' ) + dependency_name = requirements_dict[ 'name' ] + version = requirements_dict[ 'version' ] + type = requirements_dict[ 'type' ] + if self.app.config.tool_dependency_dir: + root_dir = self.app.config.tool_dependency_dir + else: + root_dir = '<set your tool_dependency_dir in your Galaxy configuration file>' + install_dir = os.path.join( root_dir, + dependency_name, + version, + repository_owner, + repository_name, + changeset_revision ) + requirements_dict[ 'install_dir' ] = install_dir + tool_dependencies[ dependency_key ] = requirements_dict + return tool_dependencies + + def generate_message_for_invalid_tool_dependencies( self, metadata_dict ): + """ + Tool dependency definitions can only be invalid if they include a definition for a complex + repository dependency and the repository dependency definition is invalid. This method + retrieves the error message associated with the invalid tool dependency for display in the + caller. + """ + message = '' + if metadata_dict: + invalid_tool_dependencies = metadata_dict.get( 'invalid_tool_dependencies', None ) + if invalid_tool_dependencies: + for td_key, requirement_dict in invalid_tool_dependencies.items(): + error = requirement_dict.get( 'error', None ) + if error: + message = '%s ' % str( error ) + return message + + def generate_message_for_orphan_tool_dependencies( self, repository, metadata_dict ): + """ + The designation of a ToolDependency into the "orphan" category has evolved over time, + and is significantly restricted since the introduction of the TOOL_DEPENDENCY_DEFINITION + repository type. This designation is still critical, however, in that it handles the + case where a repository contains both tools and a tool_dependencies.xml file, but the + definition in the tool_dependencies.xml file is in no way related to anything defined + by any of the contained tool's requirements tag sets. This is important in that it is + often a result of a typo (e.g., dependency name or version) that differs between the tool + dependency definition within the tool_dependencies.xml file and what is defined in the + tool config's <requirements> tag sets. In these cases, the user should be presented with + a warning message, and this warning message is is in fact displayed if the following + is_orphan attribute is True. This is tricky because in some cases it may be intentional, + and tool dependencies that are categorized as "orphan" are in fact valid. + """ + has_orphan_package_dependencies = False + has_orphan_set_environment_dependencies = False + message = '' + package_orphans_str = '' + set_environment_orphans_str = '' + # Tool dependencies are categorized as orphan only if the repository contains tools. + if metadata_dict: + tools = metadata_dict.get( 'tools', [] ) + invalid_tools = metadata_dict.get( 'invalid_tools', [] ) + tool_dependencies = metadata_dict.get( 'tool_dependencies', {} ) + # The use of the orphan_tool_dependencies category in metadata has been deprecated, + # but we still need to check in case the metadata is out of date. + orphan_tool_dependencies = metadata_dict.get( 'orphan_tool_dependencies', {} ) + # Updating should cause no problems here since a tool dependency cannot be included + # in both dictionaries. + tool_dependencies.update( orphan_tool_dependencies ) + if tool_dependencies and ( tools or invalid_tools ): + for td_key, requirements_dict in tool_dependencies.items(): + if td_key == 'set_environment': + # "set_environment": [{"name": "R_SCRIPT_PATH", "type": "set_environment"}] + for env_requirements_dict in requirements_dict: + name = env_requirements_dict[ 'name' ] + type = env_requirements_dict[ 'type' ] + if self.tool_dependency_is_orphan( type, name, None, tools ): + if not has_orphan_set_environment_dependencies: + has_orphan_set_environment_dependencies = True + set_environment_orphans_str += "<b>* name:</b> %s, <b>type:</b> %s<br/>" % \ + ( str( name ), str( type ) ) + else: + # "R/2.15.1": {"name": "R", "readme": "some string", "type": "package", "version": "2.15.1"} + name = requirements_dict[ 'name' ] + type = requirements_dict[ 'type' ] + version = requirements_dict[ 'version' ] + if self.tool_dependency_is_orphan( type, name, version, tools ): + if not has_orphan_package_dependencies: + has_orphan_package_dependencies = True + package_orphans_str += "<b>* name:</b> %s, <b>type:</b> %s, <b>version:</b> %s<br/>" % \ + ( str( name ), str( type ), str( version ) ) + if has_orphan_package_dependencies: + message += "The settings for <b>name</b>, <b>version</b> and <b>type</b> from a " + message += "contained tool configuration file's <b>requirement</b> tag does not match " + message += "the information for the following tool dependency definitions in the " + message += "<b>tool_dependencies.xml</b> file, so these tool dependencies have no " + message += "relationship with any tools within this repository.<br/>" + message += package_orphans_str + if has_orphan_set_environment_dependencies: + message += "The settings for <b>name</b> and <b>type</b> from a contained tool " + message += "configuration file's <b>requirement</b> tag does not match the information " + message += "for the following tool dependency definitions in the <b>tool_dependencies.xml</b> " + message += "file, so these tool dependencies have no relationship with any tools within " + message += "this repository.<br/>" + message += set_environment_orphans_str + return message + + def get_installed_and_missing_tool_dependencies_for_installed_repository( self, repository, all_tool_dependencies ): + """ + Return the lists of installed tool dependencies and missing tool dependencies for a Tool Shed + repository that has been installed into Galaxy. + """ + if all_tool_dependencies: + tool_dependencies = {} + missing_tool_dependencies = {} + for td_key, val in all_tool_dependencies.items(): + if td_key in [ 'set_environment' ]: + for index, td_info_dict in enumerate( val ): + name = td_info_dict[ 'name' ] + version = None + type = td_info_dict[ 'type' ] + tool_dependency = tool_dependency_util.get_tool_dependency_by_name_type_repository( self.app, + repository, + name, + type ) + if tool_dependency: + td_info_dict[ 'repository_id' ] = repository.id + td_info_dict[ 'tool_dependency_id' ] = tool_dependency.id + if tool_dependency.status: + tool_dependency_status = str( tool_dependency.status ) + else: + tool_dependency_status = 'Never installed' + td_info_dict[ 'status' ] = tool_dependency_status + val[ index ] = td_info_dict + if tool_dependency.status == self.app.install_model.ToolDependency.installation_status.INSTALLED: + tool_dependencies[ td_key ] = val + else: + missing_tool_dependencies[ td_key ] = val + else: + name = val[ 'name' ] + version = val[ 'version' ] + type = val[ 'type' ] + tool_dependency = tool_dependency_util.get_tool_dependency_by_name_version_type_repository( self.app, + repository, + name, + version, + type ) + if tool_dependency: + val[ 'repository_id' ] = repository.id + val[ 'tool_dependency_id' ] = tool_dependency.id + if tool_dependency.status: + tool_dependency_status = str( tool_dependency.status ) + else: + tool_dependency_status = 'Never installed' + val[ 'status' ] = tool_dependency_status + if tool_dependency.status == self.app.install_model.ToolDependency.installation_status.INSTALLED: + tool_dependencies[ td_key ] = val + else: + missing_tool_dependencies[ td_key ] = val + else: + tool_dependencies = None + missing_tool_dependencies = None + return tool_dependencies, missing_tool_dependencies + + def merge_containers_dicts_for_new_install( self, containers_dicts ): + """ + When installing one or more tool shed repositories for the first time, the received list of + containers_dicts contains a containers_dict for each repository being installed. Since the + repositories are being installed for the first time, all entries are None except the repository + dependencies and tool dependencies. The entries for missing dependencies are all None since + they have previously been merged into the installed dependencies. This method will merge the + dependencies entries into a single container and return it for display. + """ + new_containers_dict = dict( readme_files=None, + datatypes=None, + missing_repository_dependencies=None, + repository_dependencies=None, + missing_tool_dependencies=None, + tool_dependencies=None, + invalid_tools=None, + valid_tools=None, + workflows=None ) + if containers_dicts: + lock = threading.Lock() + lock.acquire( True ) + try: + repository_dependencies_root_folder = None + tool_dependencies_root_folder = None + # Use a unique folder id (hopefully the following is). + folder_id = 867 + for old_container_dict in containers_dicts: + # Merge repository_dependencies. + old_container_repository_dependencies_root = old_container_dict[ 'repository_dependencies' ] + if old_container_repository_dependencies_root: + if repository_dependencies_root_folder is None: + repository_dependencies_root_folder = container_util.Folder( id=folder_id, + key='root', + label='root', + parent=None ) + folder_id += 1 + repository_dependencies_folder = container_util.Folder( id=folder_id, + key='merged', + label='Repository dependencies', + parent=repository_dependencies_root_folder ) + folder_id += 1 + # The old_container_repository_dependencies_root will be a root folder containing a single sub_folder. + old_container_repository_dependencies_folder = old_container_repository_dependencies_root.folders[ 0 ] + # Change the folder id so it won't confict with others being merged. + old_container_repository_dependencies_folder.id = folder_id + folder_id += 1 + repository_components_tuple = container_util.get_components_from_key( old_container_repository_dependencies_folder.key ) + components_list = suc.extract_components_from_tuple( repository_components_tuple ) + name = components_list[ 1 ] + # Generate the label by retrieving the repository name. + old_container_repository_dependencies_folder.label = str( name ) + repository_dependencies_folder.folders.append( old_container_repository_dependencies_folder ) + # Merge tool_dependencies. + old_container_tool_dependencies_root = old_container_dict[ 'tool_dependencies' ] + if old_container_tool_dependencies_root: + if tool_dependencies_root_folder is None: + tool_dependencies_root_folder = container_util.Folder( id=folder_id, + key='root', + label='root', + parent=None ) + folder_id += 1 + tool_dependencies_folder = container_util.Folder( id=folder_id, + key='merged', + label='Tool dependencies', + parent=tool_dependencies_root_folder ) + folder_id += 1 + else: + td_list = [ td.listify for td in tool_dependencies_folder.tool_dependencies ] + # The old_container_tool_dependencies_root will be a root folder containing a single sub_folder. + old_container_tool_dependencies_folder = old_container_tool_dependencies_root.folders[ 0 ] + for td in old_container_tool_dependencies_folder.tool_dependencies: + if td.listify not in td_list: + tool_dependencies_folder.tool_dependencies.append( td ) + if repository_dependencies_root_folder: + repository_dependencies_root_folder.folders.append( repository_dependencies_folder ) + new_containers_dict[ 'repository_dependencies' ] = repository_dependencies_root_folder + if tool_dependencies_root_folder: + tool_dependencies_root_folder.folders.append( tool_dependencies_folder ) + new_containers_dict[ 'tool_dependencies' ] = tool_dependencies_root_folder + except Exception, e: + log.debug( "Exception in merge_containers_dicts_for_new_install: %s" % str( e ) ) + finally: + lock.release() + return new_containers_dict + + def merge_missing_repository_dependencies_to_installed_container( self, containers_dict ): + """ + Merge the list of missing repository dependencies into the list of installed + repository dependencies. + """ + missing_rd_container_root = containers_dict.get( 'missing_repository_dependencies', None ) + if missing_rd_container_root: + # The missing_rd_container_root will be a root folder containing a single sub_folder. + missing_rd_container = missing_rd_container_root.folders[ 0 ] + installed_rd_container_root = containers_dict.get( 'repository_dependencies', None ) + # The installed_rd_container_root will be a root folder containing a single sub_folder. + if installed_rd_container_root: + installed_rd_container = installed_rd_container_root.folders[ 0 ] + installed_rd_container.label = 'Repository dependencies' + for index, rd in enumerate( missing_rd_container.repository_dependencies ): + # Skip the header row. + if index == 0: + continue + installed_rd_container.repository_dependencies.append( rd ) + installed_rd_container_root.folders = [ installed_rd_container ] + containers_dict[ 'repository_dependencies' ] = installed_rd_container_root + else: + # Change the folder label from 'Missing repository dependencies' to be + # 'Repository dependencies' for display. + root_container = containers_dict[ 'missing_repository_dependencies' ] + for sub_container in root_container.folders: + # There should only be 1 sub-folder. + sub_container.label = 'Repository dependencies' + containers_dict[ 'repository_dependencies' ] = root_container + containers_dict[ 'missing_repository_dependencies' ] = None + return containers_dict + + def merge_missing_tool_dependencies_to_installed_container( self, containers_dict ): + """ + Merge the list of missing tool dependencies into the list of installed tool + dependencies. + """ + missing_td_container_root = containers_dict.get( 'missing_tool_dependencies', None ) + if missing_td_container_root: + # The missing_td_container_root will be a root folder containing a single sub_folder. + missing_td_container = missing_td_container_root.folders[ 0 ] + installed_td_container_root = containers_dict.get( 'tool_dependencies', None ) + # The installed_td_container_root will be a root folder containing a single sub_folder. + if installed_td_container_root: + installed_td_container = installed_td_container_root.folders[ 0 ] + installed_td_container.label = 'Tool dependencies' + for index, td in enumerate( missing_td_container.tool_dependencies ): + # Skip the header row. + if index == 0: + continue + installed_td_container.tool_dependencies.append( td ) + installed_td_container_root.folders = [ installed_td_container ] + containers_dict[ 'tool_dependencies' ] = installed_td_container_root + else: + # Change the folder label from 'Missing tool dependencies' to be + # 'Tool dependencies' for display. + root_container = containers_dict[ 'missing_tool_dependencies' ] + for sub_container in root_container.folders: + # There should only be 1 subfolder. + sub_container.label = 'Tool dependencies' + containers_dict[ 'tool_dependencies' ] = root_container + containers_dict[ 'missing_tool_dependencies' ] = None + return containers_dict + + def populate_containers_dict_for_new_install( self, tool_shed_url, tool_path, readme_files_dict, + installed_repository_dependencies, missing_repository_dependencies, + installed_tool_dependencies, missing_tool_dependencies, + updating=False ): + """ + Return the populated containers for a repository being installed for the first time + or for an installed repository that is being updated and the updates include newly + defined repository (and possibly tool) dependencies. + """ + installed_tool_dependencies, missing_tool_dependencies = \ + self.populate_tool_dependencies_dicts( tool_shed_url=tool_shed_url, + tool_path=tool_path, + repository_installed_tool_dependencies=installed_tool_dependencies, + repository_missing_tool_dependencies=missing_tool_dependencies, + required_repo_info_dicts=None ) + # Most of the repository contents are set to None since we don't yet know what they are. + containers_dict = \ + container_util.build_repository_containers_for_galaxy( app=self.app, + repository=None, + datatypes=None, + invalid_tools=None, + missing_repository_dependencies=missing_repository_dependencies, + missing_tool_dependencies=missing_tool_dependencies, + readme_files_dict=readme_files_dict, + repository_dependencies=installed_repository_dependencies, + tool_dependencies=installed_tool_dependencies, + valid_tools=None, + workflows=None, + valid_data_managers=None, + invalid_data_managers=None, + data_managers_errors=None, + new_install=True, + reinstalling=False ) + if not updating: + # If we installing a new repository and not updaing an installed repository, we can merge + # the missing_repository_dependencies container contents to the installed_repository_dependencies + # container. When updating an installed repository, merging will result in losing newly defined + # dependencies included in the updates. + containers_dict = self.merge_missing_repository_dependencies_to_installed_container( containers_dict ) + # Merge the missing_tool_dependencies container contents to the installed_tool_dependencies container. + containers_dict = self.merge_missing_tool_dependencies_to_installed_container( containers_dict ) + return containers_dict + + def populate_containers_dict_from_repository_metadata( self, tool_shed_url, tool_path, repository, reinstalling=False, + required_repo_info_dicts=None ): + """ + Retrieve necessary information from the received repository's metadata to populate the + containers_dict for display. This method is called only from Galaxy (not the tool shed) + when displaying repository dependencies for installed repositories and when displaying + them for uninstalled repositories that are being reinstalled. + """ + metadata = repository.metadata + if metadata: + # Handle proprietary datatypes. + datatypes = metadata.get( 'datatypes', None ) + # Handle invalid tools. + invalid_tools = metadata.get( 'invalid_tools', None ) + # Handle README files. + if repository.has_readme_files: + if reinstalling or repository.status not in \ + [ self.app.install_model.ToolShedRepository.installation_status.DEACTIVATED, + self.app.install_model.ToolShedRepository.installation_status.INSTALLED ]: + # Since we're reinstalling, we need to send a request to the tool shed to get the README files. + tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( self.app, tool_shed_url ) + params = '?name=%s&owner=%s&changeset_revision=%s' % ( str( repository.name ), + str( repository.owner ), + str( repository.installed_changeset_revision ) ) + url = common_util.url_join( tool_shed_url, + 'repository/get_readme_files%s' % params ) + raw_text = common_util.tool_shed_get( self.app, tool_shed_url, url ) + readme_files_dict = json.from_json_string( raw_text ) + else: + readme_files_dict = readme_util.build_readme_files_dict( self.app, + repository, + repository.changeset_revision, + repository.metadata, tool_path ) + else: + readme_files_dict = None + # Handle repository dependencies. + installed_repository_dependencies, missing_repository_dependencies = \ + self.app.installed_repository_manager.get_installed_and_missing_repository_dependencies( repository ) + # Handle the current repository's tool dependencies. + repository_tool_dependencies = metadata.get( 'tool_dependencies', None ) + # Make sure to display missing tool dependencies as well. + repository_invalid_tool_dependencies = metadata.get( 'invalid_tool_dependencies', None ) + if repository_invalid_tool_dependencies is not None: + if repository_tool_dependencies is None: + repository_tool_dependencies = {} + repository_tool_dependencies.update( repository_invalid_tool_dependencies ) + repository_installed_tool_dependencies, repository_missing_tool_dependencies = \ + self.get_installed_and_missing_tool_dependencies_for_installed_repository( repository, + repository_tool_dependencies ) + if reinstalling: + installed_tool_dependencies, missing_tool_dependencies = \ + self.populate_tool_dependencies_dicts( tool_shed_url, + tool_path, + repository_installed_tool_dependencies, + repository_missing_tool_dependencies, + required_repo_info_dicts ) + else: + installed_tool_dependencies = repository_installed_tool_dependencies + missing_tool_dependencies = repository_missing_tool_dependencies + # Handle valid tools. + valid_tools = metadata.get( 'tools', None ) + # Handle workflows. + workflows = metadata.get( 'workflows', None ) + # Handle Data Managers + valid_data_managers = None + invalid_data_managers = None + data_managers_errors = None + if 'data_manager' in metadata: + valid_data_managers = metadata['data_manager'].get( 'data_managers', None ) + invalid_data_managers = metadata['data_manager'].get( 'invalid_data_managers', None ) + data_managers_errors = metadata['data_manager'].get( 'messages', None ) + containers_dict = \ + container_util.build_repository_containers_for_galaxy( app=self.app, + repository=repository, + datatypes=datatypes, + invalid_tools=invalid_tools, + missing_repository_dependencies=missing_repository_dependencies, + missing_tool_dependencies=missing_tool_dependencies, + readme_files_dict=readme_files_dict, + repository_dependencies=installed_repository_dependencies, + tool_dependencies=installed_tool_dependencies, + valid_tools=valid_tools, + workflows=workflows, + valid_data_managers=valid_data_managers, + invalid_data_managers=invalid_data_managers, + data_managers_errors=data_managers_errors, + new_install=False, + reinstalling=reinstalling ) + else: + containers_dict = dict( datatypes=None, + invalid_tools=None, + readme_files_dict=None, + repository_dependencies=None, + tool_dependencies=None, + valid_tools=None, + workflows=None ) + return containers_dict + + def populate_tool_dependencies_dicts( self, tool_shed_url, tool_path, repository_installed_tool_dependencies, + repository_missing_tool_dependencies, required_repo_info_dicts ): + """ + Return the populated installed_tool_dependencies and missing_tool_dependencies dictionaries + for all repositories defined by entries in the received required_repo_info_dicts. + """ + installed_tool_dependencies = None + missing_tool_dependencies = None + if repository_installed_tool_dependencies is None: + repository_installed_tool_dependencies = {} + else: + # Add the install_dir attribute to the tool_dependencies. + repository_installed_tool_dependencies = \ + self.add_installation_directories_to_tool_dependencies( repository_installed_tool_dependencies ) + if repository_missing_tool_dependencies is None: + repository_missing_tool_dependencies = {} + else: + # Add the install_dir attribute to the tool_dependencies. + repository_missing_tool_dependencies = \ + self.add_installation_directories_to_tool_dependencies( repository_missing_tool_dependencies ) + if required_repo_info_dicts: + # Handle the tool dependencies defined for each of the repository's repository dependencies. + for rid in required_repo_info_dicts: + for name, repo_info_tuple in rid.items(): + description, \ + repository_clone_url, \ + changeset_revision, \ + ctx_rev, \ + repository_owner, \ + repository_dependencies, \ + tool_dependencies = \ + suc.get_repo_info_tuple_contents( repo_info_tuple ) + if tool_dependencies: + # Add the install_dir attribute to the tool_dependencies. + tool_dependencies = self.add_installation_directories_to_tool_dependencies( tool_dependencies ) + # The required_repository may have been installed with a different changeset revision. + required_repository, installed_changeset_revision = \ + suc.repository_was_previously_installed( self.app, + tool_shed_url, + name, + repo_info_tuple, + from_tip=False ) + if required_repository: + required_repository_installed_tool_dependencies, required_repository_missing_tool_dependencies = \ + self.get_installed_and_missing_tool_dependencies_for_installed_repository( required_repository, + tool_dependencies ) + if required_repository_installed_tool_dependencies: + # Add the install_dir attribute to the tool_dependencies. + required_repository_installed_tool_dependencies = \ + self.add_installation_directories_to_tool_dependencies( required_repository_installed_tool_dependencies ) + for td_key, td_dict in required_repository_installed_tool_dependencies.items(): + if td_key not in repository_installed_tool_dependencies: + repository_installed_tool_dependencies[ td_key ] = td_dict + if required_repository_missing_tool_dependencies: + # Add the install_dir attribute to the tool_dependencies. + required_repository_missing_tool_dependencies = \ + self.add_installation_directories_to_tool_dependencies( required_repository_missing_tool_dependencies ) + for td_key, td_dict in required_repository_missing_tool_dependencies.items(): + if td_key not in repository_missing_tool_dependencies: + repository_missing_tool_dependencies[ td_key ] = td_dict + if repository_installed_tool_dependencies: + installed_tool_dependencies = repository_installed_tool_dependencies + if repository_missing_tool_dependencies: + missing_tool_dependencies = repository_missing_tool_dependencies + return installed_tool_dependencies, missing_tool_dependencies + + def tool_dependency_is_orphan( self, type, name, version, tools ): + """ + Determine if the combination of the received type, name and version is defined in the <requirement> + tag for at least one tool in the received list of tools. If not, the tool dependency defined by the + combination is considered an orphan in its repository in the tool shed. + """ + if type == 'package': + if name and version: + for tool_dict in tools: + requirements = tool_dict.get( 'requirements', [] ) + for requirement_dict in requirements: + req_name = requirement_dict.get( 'name', None ) + req_version = requirement_dict.get( 'version', None ) + req_type = requirement_dict.get( 'type', None ) + if req_name == name and req_version == version and req_type == type: + return False + elif type == 'set_environment': + if name: + for tool_dict in tools: + requirements = tool_dict.get( 'requirements', [] ) + for requirement_dict in requirements: + req_name = requirement_dict.get( 'name', None ) + req_type = requirement_dict.get( 'type', None ) + if req_name == name and req_type == type: + return False + return True diff -r 3232e68101a46276f07c1f545ccf37c55552f66a -r 5af80141674f5ac3166b6782385c408258606d0a lib/tool_shed/galaxy_install/install_manager.py --- a/lib/tool_shed/galaxy_install/install_manager.py +++ b/lib/tool_shed/galaxy_install/install_manager.py @@ -3,7 +3,6 @@ import os import sys import tempfile -import threading import traceback from galaxy import exceptions @@ -19,7 +18,6 @@ from tool_shed.util import basic_util from tool_shed.util import common_util -from tool_shed.util import container_util from tool_shed.util import data_manager_util from tool_shed.util import datatype_util from tool_shed.util import encoding_util @@ -225,7 +223,8 @@ installed_packages.append( tool_dependency ) if self.app.config.manage_dependency_relationships: # Add the tool_dependency to the in-memory dictionaries in the installed_repository_manager. - self.app.installed_repository_manager.handle_tool_dependency_install( tool_shed_repository, tool_dependency ) + self.app.installed_repository_manager.handle_tool_dependency_install( tool_shed_repository, + tool_dependency ) return installed_packages def install_via_fabric( self, tool_shed_repository, tool_dependency, install_dir, package_name=None, custom_fabfile_path=None, @@ -371,8 +370,8 @@ # Delete contents of installation directory if attempt at binary installation failed. installation_directory_contents = os.listdir( installation_directory ) if installation_directory_contents: - removed, error_message = tool_dependency_util.remove_tool_dependency( self.app, - tool_dependency ) + removed, error_message = \ + tool_dependency_util.remove_tool_dependency( self.app, tool_dependency ) if removed: can_install_from_source = True else: @@ -894,7 +893,8 @@ tool_shed_repository, install_model.ToolShedRepository.installation_status.INSTALLED ) if self.app.config.manage_dependency_relationships: - # Add the installed repository and any tool dependencies to the in-memory dictionaries in the installed_repository_manager. + # Add the installed repository and any tool dependencies to the in-memory dictionaries + # in the installed_repository_manager. self.app.installed_repository_manager.handle_repository_install( tool_shed_repository ) else: # An error occurred while cloning the repository, so reset everything necessary to enable another attempt. @@ -906,147 +906,6 @@ uninstalled=False, remove_from_disk=True ) - def merge_containers_dicts_for_new_install( self, containers_dicts ): - """ - When installing one or more tool shed repositories for the first time, the received list of - containers_dicts contains a containers_dict for each repository being installed. Since the - repositories are being installed for the first time, all entries are None except the repository - dependencies and tool dependencies. The entries for missing dependencies are all None since - they have previously been merged into the installed dependencies. This method will merge the - dependencies entries into a single container and return it for display. - """ - new_containers_dict = dict( readme_files=None, - datatypes=None, - missing_repository_dependencies=None, - repository_dependencies=None, - missing_tool_dependencies=None, - tool_dependencies=None, - invalid_tools=None, - valid_tools=None, - workflows=None ) - if containers_dicts: - lock = threading.Lock() - lock.acquire( True ) - try: - repository_dependencies_root_folder = None - tool_dependencies_root_folder = None - # Use a unique folder id (hopefully the following is). - folder_id = 867 - for old_container_dict in containers_dicts: - # Merge repository_dependencies. - old_container_repository_dependencies_root = old_container_dict[ 'repository_dependencies' ] - if old_container_repository_dependencies_root: - if repository_dependencies_root_folder is None: - repository_dependencies_root_folder = container_util.Folder( id=folder_id, - key='root', - label='root', - parent=None ) - folder_id += 1 - repository_dependencies_folder = container_util.Folder( id=folder_id, - key='merged', - label='Repository dependencies', - parent=repository_dependencies_root_folder ) - folder_id += 1 - # The old_container_repository_dependencies_root will be a root folder containing a single sub_folder. - old_container_repository_dependencies_folder = old_container_repository_dependencies_root.folders[ 0 ] - # Change the folder id so it won't confict with others being merged. - old_container_repository_dependencies_folder.id = folder_id - folder_id += 1 - repository_components_tuple = container_util.get_components_from_key( old_container_repository_dependencies_folder.key ) - components_list = suc.extract_components_from_tuple( repository_components_tuple ) - name = components_list[ 1 ] - # Generate the label by retrieving the repository name. - old_container_repository_dependencies_folder.label = str( name ) - repository_dependencies_folder.folders.append( old_container_repository_dependencies_folder ) - # Merge tool_dependencies. - old_container_tool_dependencies_root = old_container_dict[ 'tool_dependencies' ] - if old_container_tool_dependencies_root: - if tool_dependencies_root_folder is None: - tool_dependencies_root_folder = container_util.Folder( id=folder_id, - key='root', - label='root', - parent=None ) - folder_id += 1 - tool_dependencies_folder = container_util.Folder( id=folder_id, - key='merged', - label='Tool dependencies', - parent=tool_dependencies_root_folder ) - folder_id += 1 - else: - td_list = [ td.listify for td in tool_dependencies_folder.tool_dependencies ] - # The old_container_tool_dependencies_root will be a root folder containing a single sub_folder. - old_container_tool_dependencies_folder = old_container_tool_dependencies_root.folders[ 0 ] - for td in old_container_tool_dependencies_folder.tool_dependencies: - if td.listify not in td_list: - tool_dependencies_folder.tool_dependencies.append( td ) - if repository_dependencies_root_folder: - repository_dependencies_root_folder.folders.append( repository_dependencies_folder ) - new_containers_dict[ 'repository_dependencies' ] = repository_dependencies_root_folder - if tool_dependencies_root_folder: - tool_dependencies_root_folder.folders.append( tool_dependencies_folder ) - new_containers_dict[ 'tool_dependencies' ] = tool_dependencies_root_folder - except Exception, e: - log.debug( "Exception in merge_containers_dicts_for_new_install: %s" % str( e ) ) - finally: - lock.release() - return new_containers_dict - - def merge_missing_repository_dependencies_to_installed_container( self, containers_dict ): - """Merge the list of missing repository dependencies into the list of installed repository dependencies.""" - missing_rd_container_root = containers_dict.get( 'missing_repository_dependencies', None ) - if missing_rd_container_root: - # The missing_rd_container_root will be a root folder containing a single sub_folder. - missing_rd_container = missing_rd_container_root.folders[ 0 ] - installed_rd_container_root = containers_dict.get( 'repository_dependencies', None ) - # The installed_rd_container_root will be a root folder containing a single sub_folder. - if installed_rd_container_root: - installed_rd_container = installed_rd_container_root.folders[ 0 ] - installed_rd_container.label = 'Repository dependencies' - for index, rd in enumerate( missing_rd_container.repository_dependencies ): - # Skip the header row. - if index == 0: - continue - installed_rd_container.repository_dependencies.append( rd ) - installed_rd_container_root.folders = [ installed_rd_container ] - containers_dict[ 'repository_dependencies' ] = installed_rd_container_root - else: - # Change the folder label from 'Missing repository dependencies' to be 'Repository dependencies' for display. - root_container = containers_dict[ 'missing_repository_dependencies' ] - for sub_container in root_container.folders: - # There should only be 1 sub-folder. - sub_container.label = 'Repository dependencies' - containers_dict[ 'repository_dependencies' ] = root_container - containers_dict[ 'missing_repository_dependencies' ] = None - return containers_dict - - def merge_missing_tool_dependencies_to_installed_container( self, containers_dict ): - """ Merge the list of missing tool dependencies into the list of installed tool dependencies.""" - missing_td_container_root = containers_dict.get( 'missing_tool_dependencies', None ) - if missing_td_container_root: - # The missing_td_container_root will be a root folder containing a single sub_folder. - missing_td_container = missing_td_container_root.folders[ 0 ] - installed_td_container_root = containers_dict.get( 'tool_dependencies', None ) - # The installed_td_container_root will be a root folder containing a single sub_folder. - if installed_td_container_root: - installed_td_container = installed_td_container_root.folders[ 0 ] - installed_td_container.label = 'Tool dependencies' - for index, td in enumerate( missing_td_container.tool_dependencies ): - # Skip the header row. - if index == 0: - continue - installed_td_container.tool_dependencies.append( td ) - installed_td_container_root.folders = [ installed_td_container ] - containers_dict[ 'tool_dependencies' ] = installed_td_container_root - else: - # Change the folder label from 'Missing tool dependencies' to be 'Tool dependencies' for display. - root_container = containers_dict[ 'missing_tool_dependencies' ] - for sub_container in root_container.folders: - # There should only be 1 subfolder. - sub_container.label = 'Tool dependencies' - containers_dict[ 'tool_dependencies' ] = root_container - containers_dict[ 'missing_tool_dependencies' ] = None - return containers_dict - def order_components_for_installation( self, tsr_ids, repo_info_dicts, tool_panel_section_keys ): """ Some repositories may have repository dependencies that are required to be installed @@ -1096,45 +955,3 @@ ordered_repo_info_dicts.append( repo_info_dict ) ordered_tool_panel_section_keys.append( tool_panel_section_key ) return ordered_tsr_ids, ordered_repo_info_dicts, ordered_tool_panel_section_keys - - def populate_containers_dict_for_new_install( self, tool_shed_url, tool_path, readme_files_dict, installed_repository_dependencies, - missing_repository_dependencies, installed_tool_dependencies, missing_tool_dependencies, - updating=False ): - """ - Return the populated containers for a repository being installed for the first time or for an installed repository - that is being updated and the updates include newly defined repository (and possibly tool) dependencies. - """ - installed_tool_dependencies, missing_tool_dependencies = \ - tool_dependency_util.populate_tool_dependencies_dicts( app=self.app, - tool_shed_url=tool_shed_url, - tool_path=tool_path, - repository_installed_tool_dependencies=installed_tool_dependencies, - repository_missing_tool_dependencies=missing_tool_dependencies, - required_repo_info_dicts=None ) - # Most of the repository contents are set to None since we don't yet know what they are. - containers_dict = \ - container_util.build_repository_containers_for_galaxy( app=self.app, - repository=None, - datatypes=None, - invalid_tools=None, - missing_repository_dependencies=missing_repository_dependencies, - missing_tool_dependencies=missing_tool_dependencies, - readme_files_dict=readme_files_dict, - repository_dependencies=installed_repository_dependencies, - tool_dependencies=installed_tool_dependencies, - valid_tools=None, - workflows=None, - valid_data_managers=None, - invalid_data_managers=None, - data_managers_errors=None, - new_install=True, - reinstalling=False ) - if not updating: - # If we installing a new repository and not updaing an installed repository, we can merge - # the missing_repository_dependencies container contents to the installed_repository_dependencies - # container. When updating an installed repository, merging will result in losing newly defined - # dependencies included in the updates. - containers_dict = self.merge_missing_repository_dependencies_to_installed_container( containers_dict ) - # Merge the missing_tool_dependencies container contents to the installed_tool_dependencies container. - containers_dict = self.merge_missing_tool_dependencies_to_installed_container( containers_dict ) - return containers_dict diff -r 3232e68101a46276f07c1f545ccf37c55552f66a -r 5af80141674f5ac3166b6782385c408258606d0a lib/tool_shed/galaxy_install/installed_repository_manager.py --- a/lib/tool_shed/galaxy_install/installed_repository_manager.py +++ b/lib/tool_shed/galaxy_install/installed_repository_manager.py @@ -395,7 +395,13 @@ installed_repository_dependencies = {} missing_rd_tups = [] installed_rd_tups = [] - description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, tool_dependencies = \ + description, \ + repository_clone_url, \ + changeset_revision, \ + ctx_rev, \ + repository_owner, \ + repository_dependencies, \ + tool_dependencies = \ suc.get_repo_info_tuple_contents( repo_info_tuple ) if repository_dependencies: description = repository_dependencies[ 'description' ] @@ -614,6 +620,30 @@ type = str( tool_dependency.type ) return ( tool_dependency.tool_shed_repository_id, str( tool_dependency.name ), str( tool_dependency.version ), type ) + def handle_existing_tool_dependencies_that_changed_in_update( self, repository, original_dependency_dict, + new_dependency_dict ): + """ + This method is called when a Galaxy admin is getting updates for an installed tool shed + repository in order to cover the case where an existing tool dependency was changed (e.g., + the version of the dependency was changed) but the tool version for which it is a dependency + was not changed. In this case, we only want to determine if any of the dependency information + defined in original_dependency_dict was changed in new_dependency_dict. We don't care if new + dependencies were added in new_dependency_dict since they will just be treated as missing + dependencies for the tool. + """ + updated_tool_dependency_names = [] + deleted_tool_dependency_names = [] + for original_dependency_key, original_dependency_val_dict in original_dependency_dict.items(): + if original_dependency_key not in new_dependency_dict: + updated_tool_dependency = self.update_existing_tool_dependency( repository, + original_dependency_val_dict, + new_dependency_dict ) + if updated_tool_dependency: + updated_tool_dependency_names.append( updated_tool_dependency.name ) + else: + deleted_tool_dependency_names.append( original_dependency_val_dict[ 'name' ] ) + return updated_tool_dependency_names, deleted_tool_dependency_names + def handle_repository_install( self, repository ): """Load the dependency relationships for a repository that was just installed or reinstalled.""" # Populate self.repository_dependencies_of_installed_repositories. @@ -924,3 +954,68 @@ repository_dependency.changeset_revision == changeset_revision ): return True return False + + def update_existing_tool_dependency( self, repository, original_dependency_dict, new_dependencies_dict ): + """ + Update an exsiting tool dependency whose definition was updated in a change set + pulled by a Galaxy administrator when getting updates to an installed tool shed + repository. The original_dependency_dict is a single tool dependency definition, + an example of which is:: + + {"name": "bwa", + "readme": "\\nCompiling BWA requires zlib and libpthread to be present on your system.\\n ", + "type": "package", + "version": "0.6.2"} + + The new_dependencies_dict is the dictionary generated by the metadata_util.generate_tool_dependency_metadata method. + """ + new_tool_dependency = None + original_name = original_dependency_dict[ 'name' ] + original_type = original_dependency_dict[ 'type' ] + original_version = original_dependency_dict[ 'version' ] + # Locate the appropriate tool_dependency associated with the repository. + tool_dependency = None + for tool_dependency in repository.tool_dependencies: + if tool_dependency.name == original_name and \ + tool_dependency.type == original_type and \ + tool_dependency.version == original_version: + break + if tool_dependency and tool_dependency.can_update: + dependency_install_dir = tool_dependency.installation_directory( self.app ) + removed_from_disk, error_message = \ + tool_dependency_util.remove_tool_dependency_installation_directory( dependency_install_dir ) + if removed_from_disk: + context = self.app.install_model.context + new_dependency_name = None + new_dependency_type = None + new_dependency_version = None + for new_dependency_key, new_dependency_val_dict in new_dependencies_dict.items(): + # Match on name only, hopefully this will be enough! + if original_name == new_dependency_val_dict[ 'name' ]: + new_dependency_name = new_dependency_val_dict[ 'name' ] + new_dependency_type = new_dependency_val_dict[ 'type' ] + new_dependency_version = new_dependency_val_dict[ 'version' ] + break + if new_dependency_name and new_dependency_type and new_dependency_version: + # Update all attributes of the tool_dependency record in the database. + log.debug( "Updating version %s of tool dependency %s %s to have new version %s and type %s." % \ + ( str( tool_dependency.version ), + str( tool_dependency.type ), + str( tool_dependency.name ), + str( new_dependency_version ), + str( new_dependency_type ) ) ) + tool_dependency.type = new_dependency_type + tool_dependency.version = new_dependency_version + tool_dependency.status = self.app.install_model.ToolDependency.installation_status.UNINSTALLED + tool_dependency.error_message = None + context.add( tool_dependency ) + context.flush() + new_tool_dependency = tool_dependency + else: + # We have no new tool dependency definition based on a matching dependency name, so remove + # the existing tool dependency record from the database. + log.debug( "Deleting version %s of tool dependency %s %s from the database since it is no longer defined." % \ + ( str( tool_dependency.version ), str( tool_dependency.type ), str( tool_dependency.name ) ) ) + context.delete( tool_dependency ) + context.flush() + return new_tool_dependency diff -r 3232e68101a46276f07c1f545ccf37c55552f66a -r 5af80141674f5ac3166b6782385c408258606d0a lib/tool_shed/galaxy_install/tool_dependencies/recipe/tag_handler.py --- a/lib/tool_shed/galaxy_install/tool_dependencies/recipe/tag_handler.py +++ b/lib/tool_shed/galaxy_install/tool_dependencies/recipe/tag_handler.py @@ -25,7 +25,89 @@ raise "Unimplemented Method" -class Install( RecipeTag ): +class SyncDatabase( object ): + + def sync_database_with_file_system( self, app, tool_shed_repository, tool_dependency_name, tool_dependency_version, + tool_dependency_install_dir, tool_dependency_type='package' ): + """ + The installation directory defined by the received tool_dependency_install_dir exists, so check for + the presence of INSTALLATION_LOG. If the files exists, we'll assume the tool dependency is installed, + but not necessarily successfully (it could be in an error state on disk. However, we can justifiably + assume here that no matter the state, an associated database record will exist. + """ + # This method should be reached very rarely. It implies that either the Galaxy environment + # became corrupted (i.e., the database records for installed tool dependencies is not synchronized + # with tool dependencies on disk) or the Tool Shed's install and test framework is running. The Tool + # Shed's install and test framework installs repositories in 2 stages, those of type tool_dependency_definition + # followed by those containing valid tools and tool functional test components. + log.debug( "Synchronizing the database with the file system..." ) + try: + log.debug( "The value of app.config.running_functional_tests is: %s" % \ + str( app.config.running_functional_tests ) ) + except: + pass + sa_session = app.install_model.context + can_install_tool_dependency = False + tool_dependency = get_tool_dependency_by_name_version_type_repository( app, + tool_shed_repository, + tool_dependency_name, + tool_dependency_version, + tool_dependency_type ) + if tool_dependency.status == app.install_model.ToolDependency.installation_status.INSTALLING: + # The tool dependency is in an Installing state, so we don't want to do anything to it. If the tool + # dependency is being installed by someone else, we don't want to interfere with that. This assumes + # the installation by "someone else" is not hung in an Installing state, which is a weakness if that + # "someone else" never repaired it. + log.debug( 'Skipping installation of tool dependency %s version %s because it has a status of %s' % \ + ( str( tool_dependency.name ), str( tool_dependency.version ), str( tool_dependency.status ) ) ) + else: + # We have a pre-existing installation directory on the file system, but our associated database record is + # in a state that allowed us to arrive here. At this point, we'll inspect the installation directory to + # see if we have a "proper installation" and if so, synchronize the database record rather than reinstalling + # the dependency if we're "running_functional_tests". If we're not "running_functional_tests, we'll set + # the tool dependency's installation status to ERROR. + tool_dependency_installation_directory_contents = os.listdir( tool_dependency_install_dir ) + if basic_util.INSTALLATION_LOG in tool_dependency_installation_directory_contents: + # Since this tool dependency's installation directory contains an installation log, we consider it to be + # installed. In some cases the record may be missing from the database due to some activity outside of + # the control of the Tool Shed. Since a new record was created for it and we don't know the state of the + # files on disk, we will set it to an error state (unless we are running Tool Shed functional tests - see + # below). + log.debug( 'Skipping installation of tool dependency %s version %s because it is installed in %s' % \ + ( str( tool_dependency.name ), str( tool_dependency.version ), str( tool_dependency_install_dir ) ) ) + if app.config.running_functional_tests: + # If we are running functional tests, the state will be set to Installed because previously compiled + # tool dependencies are not deleted by default, from the "install and test" framework.. + tool_dependency.status = app.install_model.ToolDependency.installation_status.INSTALLED + else: + error_message = 'The installation directory for this tool dependency had contents but the database had no record. ' + error_message += 'The installation log may show this tool dependency to be correctly installed, but due to the ' + error_message += 'missing database record it is now being set to Error.' + tool_dependency.status = app.install_model.ToolDependency.installation_status.ERROR + tool_dependency.error_message = error_message + else: + error_message = '\nInstallation path %s for tool dependency %s version %s exists, but the expected file %s' % \ + ( str( tool_dependency_install_dir ), + str( tool_dependency_name ), + str( tool_dependency_version ), + str( basic_util.INSTALLATION_LOG ) ) + error_message += ' is missing. This indicates an installation error so the tool dependency is being' + error_message += ' prepared for re-installation.' + print error_message + tool_dependency.status = app.install_model.ToolDependency.installation_status.NEVER_INSTALLED + basic_util.remove_dir( tool_dependency_install_dir ) + can_install_tool_dependency = True + sa_session.add( tool_dependency ) + sa_session.flush() + try: + log.debug( "Returning from sync_database_with_file_system with tool_dependency %s, can_install_tool_dependency %s." % \ + ( str( tool_dependency.name ), str( can_install_tool_dependency ) ) ) + except Exception, e: + log.debug( str( e ) ) + return tool_dependency, can_install_tool_dependency + + +class Install( RecipeTag, SyncDatabase ): def __init__( self, app ): self.app = app @@ -57,13 +139,12 @@ proceed_with_install = True else: # Notice that we'll throw away the following tool_dependency if it can be installed. - tool_dependency, proceed_with_install = \ - tool_dependency_util.sync_database_with_file_system( self.app, - tool_shed_repository, - package_name, - package_version, - install_dir, - tool_dependency_type='package' ) + tool_dependency, proceed_with_install = self.sync_database_with_file_system( self.app, + tool_shed_repository, + package_name, + package_version, + install_dir, + tool_dependency_type='package' ) if not proceed_with_install: log.debug( "Tool dependency %s version %s cannot be installed (it was probably previously installed), so returning it." % \ ( str( tool_dependency.name ), str( tool_dependency.version ) ) ) @@ -156,7 +237,7 @@ return tool_dependency, proceed_with_install, action_elem_tuples -class Repository( RecipeTag ): +class Repository( RecipeTag, SyncDatabase ): def __init__( self, app ): self.app = app @@ -190,7 +271,8 @@ return None def create_tool_dependency_with_initialized_env_sh_file( self, dependent_install_dir, tool_shed_repository, - required_repository, package_name, package_version, tool_dependencies_config ): + required_repository, package_name, package_version, + tool_dependencies_config ): """ Create or get a tool_dependency record that is defined by the received package_name and package_version. An env.sh file will be created for the tool_dependency in the received dependent_install_dir. @@ -238,10 +320,9 @@ # the path defined by required_tool_dependency_env_file_path. It doesn't matter if the required env.sh # file currently exists.. required_tool_dependency_env_file_path = \ - tool_dependency_util.get_required_repository_package_env_sh_path( self.app, - package_name, - package_version, - required_repository ) + self.get_required_repository_package_env_sh_path( package_name, + package_version, + required_repository ) env_file_builder = EnvFileBuilder( tool_dependency.installation_directory( self.app ) ) env_file_builder.append_line( action="source", value=required_tool_dependency_env_file_path ) return_code = env_file_builder.return_code @@ -269,6 +350,19 @@ tool_dependencies.append( tool_dependency ) return tool_dependencies + def get_required_repository_package_env_sh_path( self, package_name, package_version, required_repository ): + """Return path to env.sh file in required repository if the required repository has been installed.""" + env_sh_file_dir = \ + tool_dependency_util.get_tool_dependency_install_dir( app=self.app, + repository_name=required_repository.name, + repository_owner=required_repository.owner, + repository_changeset_revision=required_repository.installed_changeset_revision, + tool_dependency_type='package', + tool_dependency_name=package_name, + tool_dependency_version=package_version ) + env_sh_file_path = os.path.join( env_sh_file_dir, 'env.sh' ) + return env_sh_file_path + def get_tool_shed_repository_by_tool_shed_name_owner_changeset_revision( self, tool_shed_url, name, owner, changeset_revision ): sa_session = self.app.install_model.context # The protocol is not stored, but the port is if it exists. @@ -352,13 +446,12 @@ can_install_tool_dependency = True else: # Notice that we'll throw away the following tool_dependency if it can be installed. - tool_dependency, can_install_tool_dependency = \ - tool_dependency_util.sync_database_with_file_system( self.app, - tool_shed_repository, - package_name, - package_version, - dependent_install_dir, - tool_dependency_type='package' ) + tool_dependency, can_install_tool_dependency = self.sync_database_with_file_system( self.app, + tool_shed_repository, + package_name, + package_version, + dependent_install_dir, + tool_dependency_type='package' ) if not can_install_tool_dependency: log.debug( "Tool dependency %s version %s cannot be installed (it was probably previously installed), " % \ ( str( tool_dependency.name, str( tool_dependency.version ) ) ) ) diff -r 3232e68101a46276f07c1f545ccf37c55552f66a -r 5af80141674f5ac3166b6782385c408258606d0a lib/tool_shed/util/metadata_util.py --- a/lib/tool_shed/util/metadata_util.py +++ b/lib/tool_shed/util/metadata_util.py @@ -3,7 +3,6 @@ import tempfile from galaxy import util -from galaxy import web from galaxy.datatypes import checkers from galaxy.model.orm import and_ from galaxy.tools.data_manager.manager import DataManager @@ -11,17 +10,17 @@ from galaxy.util import json from galaxy.web import url_for -import tool_shed.util.shed_util_common as suc from tool_shed.repository_types.metadata import TipOnly +import tool_shed.repository_types.util as rt_util + from tool_shed.util import basic_util from tool_shed.util import common_util -from tool_shed.util import container_util from tool_shed.util import hg_util from tool_shed.util import readme_util +from tool_shed.util import shed_util_common as suc from tool_shed.util import tool_dependency_util from tool_shed.util import tool_util from tool_shed.util import xml_util -import tool_shed.repository_types.util as rt_util log = logging.getLogger( __name__ ) @@ -1013,7 +1012,11 @@ description = root.get( 'description' ) for elem in root: if elem.tag == 'package': - valid_tool_dependencies_dict, invalid_tool_dependencies_dict, repository_dependency_tup, repository_dependency_is_valid, message = \ + valid_tool_dependencies_dict, \ + invalid_tool_dependencies_dict, \ + repository_dependency_tup, \ + repository_dependency_is_valid, \ + message = \ generate_package_dependency_metadata( app, elem, valid_tool_dependencies_dict, invalid_tool_dependencies_dict ) if repository_dependency_is_valid: if repository_dependency_tup and repository_dependency_tup not in valid_repository_dependency_tups: @@ -1024,7 +1027,13 @@ # We have an invalid complex repository dependency, so mark the tool dependency as invalid. tool_dependency_is_valid = False # Append the error message to the invalid repository dependency tuple. - toolshed, name, owner, changeset_revision, prior_installation_required, only_if_compiling_contained_td = repository_dependency_tup + toolshed, \ + name, \ + owner, \ + changeset_revision, \ + prior_installation_required, \ + only_if_compiling_contained_td \ + = repository_dependency_tup repository_dependency_tup = \ ( toolshed, name, owner, changeset_revision, prior_installation_required, only_if_compiling_contained_td, message ) invalid_repository_dependency_tups.append( repository_dependency_tup ) @@ -1033,9 +1042,13 @@ valid_tool_dependencies_dict = generate_environment_dependency_metadata( elem, valid_tool_dependencies_dict ) if valid_tool_dependencies_dict: if original_valid_tool_dependencies_dict: - # We're generating metadata on an update pulled to a tool shed repository installed into a Galaxy instance, so handle changes to - # tool dependencies appropriately. - handle_existing_tool_dependencies_that_changed_in_update( app, repository, original_valid_tool_dependencies_dict, valid_tool_dependencies_dict ) + # We're generating metadata on an update pulled to a tool shed repository installed + # into a Galaxy instance, so handle changes to tool dependencies appropriately. + irm = app.installed_repository_manager + updated_tool_dependency_names, deleted_tool_dependency_names = \ + irm.handle_existing_tool_dependencies_that_changed_in_update( repository, + original_valid_tool_dependencies_dict, + valid_tool_dependencies_dict ) metadata_dict[ 'tool_dependencies' ] = valid_tool_dependencies_dict if invalid_tool_dependencies_dict: metadata_dict[ 'invalid_tool_dependencies' ] = invalid_tool_dependencies_dict @@ -1296,25 +1309,6 @@ sample_file_metadata_paths.append( relative_path_to_sample_file ) return sample_file_metadata_paths, sample_file_copy_paths -def handle_existing_tool_dependencies_that_changed_in_update( app, repository, original_dependency_dict, new_dependency_dict ): - """ - This method is called when a Galaxy admin is getting updates for an installed tool shed repository in order to cover the case where an - existing tool dependency was changed (e.g., the version of the dependency was changed) but the tool version for which it is a dependency - was not changed. In this case, we only want to determine if any of the dependency information defined in original_dependency_dict was - changed in new_dependency_dict. We don't care if new dependencies were added in new_dependency_dict since they will just be treated as - missing dependencies for the tool. - """ - updated_tool_dependency_names = [] - deleted_tool_dependency_names = [] - for original_dependency_key, original_dependency_val_dict in original_dependency_dict.items(): - if original_dependency_key not in new_dependency_dict: - updated_tool_dependency = update_existing_tool_dependency( app, repository, original_dependency_val_dict, new_dependency_dict ) - if updated_tool_dependency: - updated_tool_dependency_names.append( updated_tool_dependency.name ) - else: - deleted_tool_dependency_names.append( original_dependency_val_dict[ 'name' ] ) - return updated_tool_dependency_names, deleted_tool_dependency_names - def handle_repository_elem( app, repository_elem, only_if_compiling_contained_td=False, updating_installed_repository=False ): """ Process the received repository_elem which is a <repository> tag either from a repository_dependencies.xml @@ -1737,104 +1731,6 @@ # The received metadata_dict includes no metadata for workflows, so a new repository_metadata table record is not needed. return False -def populate_containers_dict_from_repository_metadata( app, tool_shed_url, tool_path, repository, - reinstalling=False, required_repo_info_dicts=None ): - """ - Retrieve necessary information from the received repository's metadata to populate the - containers_dict for display. This method is called only from Galaxy (not the tool shed) - when displaying repository dependencies for installed repositories and when displaying - them for uninstalled repositories that are being reinstalled. - """ - metadata = repository.metadata - if metadata: - # Handle proprietary datatypes. - datatypes = metadata.get( 'datatypes', None ) - # Handle invalid tools. - invalid_tools = metadata.get( 'invalid_tools', None ) - # Handle README files. - if repository.has_readme_files: - if reinstalling or repository.status not in [ app.install_model.ToolShedRepository.installation_status.DEACTIVATED, - app.install_model.ToolShedRepository.installation_status.INSTALLED ]: - # Since we're reinstalling, we need to send a request to the tool shed to get the README files. - tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( app, tool_shed_url ) - params = '?name=%s&owner=%s&changeset_revision=%s' % ( str( repository.name ), - str( repository.owner ), - str( repository.installed_changeset_revision ) ) - url = common_util.url_join( tool_shed_url, - 'repository/get_readme_files%s' % params ) - raw_text = common_util.tool_shed_get( app, tool_shed_url, url ) - readme_files_dict = json.from_json_string( raw_text ) - else: - readme_files_dict = readme_util.build_readme_files_dict( app, - repository, - repository.changeset_revision, - repository.metadata, tool_path ) - else: - readme_files_dict = None - # Handle repository dependencies. - installed_repository_dependencies, missing_repository_dependencies = \ - app.installed_repository_manager.get_installed_and_missing_repository_dependencies( repository ) - # Handle the current repository's tool dependencies. - repository_tool_dependencies = metadata.get( 'tool_dependencies', None ) - # Make sure to display missing tool dependencies as well. - repository_invalid_tool_dependencies = metadata.get( 'invalid_tool_dependencies', None ) - if repository_invalid_tool_dependencies is not None: - if repository_tool_dependencies is None: - repository_tool_dependencies = {} - repository_tool_dependencies.update( repository_invalid_tool_dependencies ) - repository_installed_tool_dependencies, repository_missing_tool_dependencies = \ - tool_dependency_util.get_installed_and_missing_tool_dependencies_for_installed_repository( app, - repository, - repository_tool_dependencies ) - if reinstalling: - installed_tool_dependencies, missing_tool_dependencies = \ - tool_dependency_util.populate_tool_dependencies_dicts( app, - tool_shed_url, - tool_path, - repository_installed_tool_dependencies, - repository_missing_tool_dependencies, - required_repo_info_dicts ) - else: - installed_tool_dependencies = repository_installed_tool_dependencies - missing_tool_dependencies = repository_missing_tool_dependencies - # Handle valid tools. - valid_tools = metadata.get( 'tools', None ) - # Handle workflows. - workflows = metadata.get( 'workflows', None ) - # Handle Data Managers - valid_data_managers = None - invalid_data_managers = None - data_managers_errors = None - if 'data_manager' in metadata: - valid_data_managers = metadata['data_manager'].get( 'data_managers', None ) - invalid_data_managers = metadata['data_manager'].get( 'invalid_data_managers', None ) - data_managers_errors = metadata['data_manager'].get( 'messages', None ) - containers_dict = container_util.build_repository_containers_for_galaxy( app=app, - repository=repository, - datatypes=datatypes, - invalid_tools=invalid_tools, - missing_repository_dependencies=missing_repository_dependencies, - missing_tool_dependencies=missing_tool_dependencies, - readme_files_dict=readme_files_dict, - repository_dependencies=installed_repository_dependencies, - tool_dependencies=installed_tool_dependencies, - valid_tools=valid_tools, - workflows=workflows, - valid_data_managers=valid_data_managers, - invalid_data_managers=invalid_data_managers, - data_managers_errors=data_managers_errors, - new_install=False, - reinstalling=reinstalling ) - else: - containers_dict = dict( datatypes=None, - invalid_tools=None, - readme_files_dict=None, - repository_dependencies=None, - tool_dependencies=None, - valid_tools=None, - workflows=None ) - return containers_dict - def reset_all_metadata_on_installed_repository( app, id ): """Reset all metadata on a single tool shed repository installed into a Galaxy instance.""" invalid_file_tups = [] @@ -2201,68 +2097,6 @@ error_message, status = set_repository_metadata( app, host, user, repository, content_alert_str=content_alert_str, **kwd ) return status, error_message -def update_existing_tool_dependency( app, repository, original_dependency_dict, new_dependencies_dict ): - """ - Update an exsiting tool dependency whose definition was updated in a change set - pulled by a Galaxy administrator when getting updates to an installed tool shed - repository. The original_dependency_dict is a single tool dependency definition, - an example of which is:: - - {"name": "bwa", - "readme": "\\nCompiling BWA requires zlib and libpthread to be present on your system.\\n ", - "type": "package", - "version": "0.6.2"} - - The new_dependencies_dict is the dictionary generated by the metadata_util.generate_tool_dependency_metadata method. - """ - new_tool_dependency = None - original_name = original_dependency_dict[ 'name' ] - original_type = original_dependency_dict[ 'type' ] - original_version = original_dependency_dict[ 'version' ] - # Locate the appropriate tool_dependency associated with the repository. - tool_dependency = None - for tool_dependency in repository.tool_dependencies: - if tool_dependency.name == original_name and tool_dependency.type == original_type and tool_dependency.version == original_version: - break - if tool_dependency and tool_dependency.can_update: - dependency_install_dir = tool_dependency.installation_directory( app ) - removed_from_disk, error_message = tool_dependency_util.remove_tool_dependency_installation_directory( dependency_install_dir ) - if removed_from_disk: - context = app.install_model.context - new_dependency_name = None - new_dependency_type = None - new_dependency_version = None - for new_dependency_key, new_dependency_val_dict in new_dependencies_dict.items(): - # Match on name only, hopefully this will be enough! - if original_name == new_dependency_val_dict[ 'name' ]: - new_dependency_name = new_dependency_val_dict[ 'name' ] - new_dependency_type = new_dependency_val_dict[ 'type' ] - new_dependency_version = new_dependency_val_dict[ 'version' ] - break - if new_dependency_name and new_dependency_type and new_dependency_version: - # Update all attributes of the tool_dependency record in the database. - log.debug( "Updating version %s of tool dependency %s %s to have new version %s and type %s." % \ - ( str( tool_dependency.version ), - str( tool_dependency.type ), - str( tool_dependency.name ), - str( new_dependency_version ), - str( new_dependency_type ) ) ) - tool_dependency.type = new_dependency_type - tool_dependency.version = new_dependency_version - tool_dependency.status = app.install_model.ToolDependency.installation_status.UNINSTALLED - tool_dependency.error_message = None - context.add( tool_dependency ) - context.flush() - new_tool_dependency = tool_dependency - else: - # We have no new tool dependency definition based on a matching dependency name, so remove - # the existing tool dependency record from the database. - log.debug( "Deleting version %s of tool dependency %s %s from the database since it is no longer defined." % \ - ( str( tool_dependency.version ), str( tool_dependency.type ), str( tool_dependency.name ) ) ) - context.delete( tool_dependency ) - context.flush() - return new_tool_dependency - def update_repository_dependencies_metadata( metadata, repository_dependency_tups, is_valid, description ): if is_valid: repository_dependencies_dict = metadata.get( 'repository_dependencies', None ) This diff is so big that we needed to truncate the remainder. Repository URL: https://bitbucket.org/galaxy/galaxy-central/ -- This is a commit notification from bitbucket.org. You are receiving this because you have the service enabled, addressing the recipient of this email.