1 new commit in galaxy-central: https://bitbucket.org/galaxy/galaxy-central/commits/69bec229bdf8/ Changeset: 69bec229bdf8 User: greg Date: 2013-09-10 17:59:22 Summary: Framework enhancements for handling installation of repositories into Galaxy that have repository dependencies that are needed only for compiling a tool dependency. Affected #: 7 files diff -r 82833b9b6f4fbdf1408864bbf7781a02200ca633 -r 69bec229bdf85900f7d581e735aa69ee2cc2aeb9 lib/galaxy/webapps/tool_shed/controllers/repository.py --- a/lib/galaxy/webapps/tool_shed/controllers/repository.py +++ b/lib/galaxy/webapps/tool_shed/controllers/repository.py @@ -2280,8 +2280,20 @@ handled_key_rd_dicts=None ) if metadata: if 'repository_dependencies' in metadata and not repository_dependencies: - message += 'The repository dependency definitions for this repository are invalid and will be ignored.' - status = 'error' + # See if we have an invalid repository dependency definition or if the repository dependency is required only for compiling the + # repository's tool dependency. + invalid = False + repository_dependencies_dict = metadata[ 'repository_dependencies' ] + rd_tups = repository_dependencies_dict.get( 'repository_dependencies', [] ) + for rd_tup in rd_tups: + rdtool_shed, rd_name, rd_owner, rd_changeset_revision, rd_prior_installation_required, rd_only_if_compiling_contained_td = \ + common_util.parse_repository_dependency_tuple( rd_tup ) + if not util.asbool( rd_only_if_compiling_contained_td ): + invalid = True + break + if invalid: + message += 'The repository dependency definitions for this repository are invalid and will be ignored.' + status = 'error' else: repository_metadata_id = None metadata = None @@ -2718,20 +2730,8 @@ name = kwd.get( 'name', None ) owner = kwd.get( 'owner', None ) changeset_revision = kwd.get( 'changeset_revision', None ) - repository = suc.get_repository_by_name_and_owner( trans.app, name, owner ) - repo_dir = repository.repo_path( trans.app ) - repo = hg.repository( suc.get_configured_ui(), repo_dir ) - # Get the upper bound changeset revision. - upper_bound_changeset_revision = suc.get_next_downloadable_changeset_revision( repository, repo, changeset_revision ) - # Build the list of changeset revision hashes defining each available update up to, but excluding, upper_bound_changeset_revision. - changeset_hashes = [] - for changeset in suc.reversed_lower_upper_bounded_changelog( repo, changeset_revision, upper_bound_changeset_revision ): - # Make sure to exclude upper_bound_changeset_revision. - if changeset != upper_bound_changeset_revision: - changeset_hashes.append( str( repo.changectx( changeset ) ) ) - if changeset_hashes: - changeset_hashes_str = ','.join( changeset_hashes ) - return changeset_hashes_str + if name and owner and changeset_revision: + return suc.get_updated_changeset_revisions( trans, name, owner, changeset_revision ) return '' @web.expose diff -r 82833b9b6f4fbdf1408864bbf7781a02200ca633 -r 69bec229bdf85900f7d581e735aa69ee2cc2aeb9 lib/tool_shed/galaxy_install/install_manager.py --- a/lib/tool_shed/galaxy_install/install_manager.py +++ b/lib/tool_shed/galaxy_install/install_manager.py @@ -270,7 +270,8 @@ continue for rd_tup in rd_tups: prior_install_ids = [] - tool_shed, name, owner, changeset_revision, prior_installation_required = common_util.parse_repository_dependency_tuple( rd_tup ) + tool_shed, name, owner, changeset_revision, prior_installation_required, only_if_compiling_contained_td = \ + common_util.parse_repository_dependency_tuple( rd_tup ) if util.asbool( prior_installation_required ): for tsr in tool_shed_repositories: if tsr.name == name and tsr.owner == owner and tsr.changeset_revision == changeset_revision: diff -r 82833b9b6f4fbdf1408864bbf7781a02200ca633 -r 69bec229bdf85900f7d581e735aa69ee2cc2aeb9 lib/tool_shed/util/common_util.py --- a/lib/tool_shed/util/common_util.py +++ b/lib/tool_shed/util/common_util.py @@ -46,8 +46,8 @@ if rd_key in [ 'root_key', 'description' ]: continue for rd_tup in rd_tups: - tool_shed, name, owner, changeset_revision, prior_installation_required, only_if_compiling_contained_td \ - = parse_repository_dependency_tuple( rd_tup ) + tool_shed, name, owner, changeset_revision, prior_installation_required, only_if_compiling_contained_td = \ + parse_repository_dependency_tuple( rd_tup ) tool_shed_accessible, tool_dependencies = get_tool_dependencies( app, tool_shed, name, diff -r 82833b9b6f4fbdf1408864bbf7781a02200ca633 -r 69bec229bdf85900f7d581e735aa69ee2cc2aeb9 lib/tool_shed/util/container_util.py --- a/lib/tool_shed/util/container_util.py +++ b/lib/tool_shed/util/container_util.py @@ -984,7 +984,7 @@ tool_dependency_id=None, is_orphan='Orphan' ) folder.tool_dependencies.append( tool_dependency ) - is_orphan_description = "these dependencies may not be required by tools in this repository" + not_used_by_local_tools_description = "these dependencies may not be required by tools in this repository" for dependency_key, requirements_dict in tool_dependencies.items(): tool_dependency_id += 1 if dependency_key in [ 'set_environment' ]: @@ -995,7 +995,7 @@ # TODO: handle this is Galaxy is_orphan = False if is_orphan: - folder.description = is_orphan_description + folder.description = not_used_by_local_tools_description name = set_environment_dict.get( 'name', None ) type = set_environment_dict[ 'type' ] repository_id = set_environment_dict.get( 'repository_id', None ) @@ -1018,10 +1018,9 @@ if trans.webapp.name == 'tool_shed': is_orphan = requirements_dict.get( 'is_orphan', False ) else: - # TODO: handle this is Galaxy is_orphan = False if is_orphan: - folder.description = is_orphan_description + folder.description = not_used_by_local_tools_description name = requirements_dict[ 'name' ] version = requirements_dict[ 'version' ] type = requirements_dict[ 'type' ] diff -r 82833b9b6f4fbdf1408864bbf7781a02200ca633 -r 69bec229bdf85900f7d581e735aa69ee2cc2aeb9 lib/tool_shed/util/metadata_util.py --- a/lib/tool_shed/util/metadata_util.py +++ b/lib/tool_shed/util/metadata_util.py @@ -19,7 +19,9 @@ from tool_shed.util import tool_dependency_util from tool_shed.util import tool_util from tool_shed.util import xml_util +from tool_shed.galaxy_install.tool_dependencies import install_util from tool_shed.galaxy_install.tool_dependencies import td_common_util +import tool_shed.repository_types.util as rt_util import pkg_resources @@ -904,10 +906,18 @@ repository_dependency_tups=invalid_repository_dependency_tups, is_valid=False, description=description ) - # Determine and store orphan tool dependencies. - orphan_tool_dependencies = get_orphan_tool_dependencies( metadata_dict ) - if orphan_tool_dependencies: - metadata_dict[ 'orphan_tool_dependencies' ] = orphan_tool_dependencies + # We need to continue to restrict the behavior of orphan tool dependencies, possibly eliminating them altoghether at some point. + check_for_orphan_tool_dependencies = False + if app.name == 'tool_shed': + if repository.type == rt_util.UNRESTRICTED and 'tools' not in metadata_dict: + check_for_orphan_tool_dependencies = True + elif 'tools' in metadata_dict: + check_for_orphan_tool_dependencies = True + if check_for_orphan_tool_dependencies: + # Determine and store orphan tool dependencies. + orphan_tool_dependencies = get_orphan_tool_dependencies( metadata_dict ) + if orphan_tool_dependencies: + metadata_dict[ 'orphan_tool_dependencies' ] = orphan_tool_dependencies return metadata_dict, error_message def generate_tool_metadata( tool_config, tool, repository_clone_url, metadata_dict ): @@ -1128,16 +1138,6 @@ sample_file_metadata_paths.append( relative_path_to_sample_file ) return sample_file_metadata_paths, sample_file_copy_paths -def get_updated_changeset_revisions_from_tool_shed( app, tool_shed_url, name, owner, changeset_revision ): - """ - Get all appropriate newer changeset revisions for the repository defined by the received tool_shed_url / name / owner combination. - """ - url = suc.url_join( tool_shed_url, - 'repository/updated_changeset_revisions?name=%s&owner=%s&changeset_revision=%s' % - ( name, owner, changeset_revision ) ) - text = common_util.tool_shed_get( app, tool_shed_url, url ) - return text - def handle_existing_tool_dependencies_that_changed_in_update( app, repository, original_dependency_dict, new_dependency_dict ): """ This method is called when a Galaxy admin is getting updates for an installed tool shed repository in order to cover the case where an @@ -1188,7 +1188,7 @@ return repository_dependency_tup, is_valid, error_message else: # Send a request to the tool shed to retrieve appropriate additional changeset revisions with which the repository may have been installed. - text = get_updated_changeset_revisions_from_tool_shed( app, toolshed, name, owner, changeset_revision ) + text = install_util.get_updated_changeset_revisions_from_tool_shed( app, toolshed, name, owner, changeset_revision ) if text: updated_changeset_revisions = util.listify( text ) for updated_changeset_revision in updated_changeset_revisions: diff -r 82833b9b6f4fbdf1408864bbf7781a02200ca633 -r 69bec229bdf85900f7d581e735aa69ee2cc2aeb9 lib/tool_shed/util/repository_dependency_util.py --- a/lib/tool_shed/util/repository_dependency_util.py +++ b/lib/tool_shed/util/repository_dependency_util.py @@ -1,7 +1,9 @@ import logging import os from galaxy import eggs +from galaxy.util import asbool from galaxy.util import json +from galaxy.util import listify import tool_shed.util.shed_util_common as suc from tool_shed.util import common_util from tool_shed.util import common_install_util @@ -263,9 +265,9 @@ message = '%s ' % str( error ) return message -def get_key_for_repository_changeset_revision( toolshed_base_url, repository, repository_metadata, all_repository_dependencies ): +def get_key_for_repository_changeset_revision( trans, toolshed_base_url, repository, repository_metadata, all_repository_dependencies ): prior_installation_required, only_if_compiling_contained_td = \ - get_prior_installation_required_and_only_if_compiling_contained_td( toolshed_base_url, repository, repository_metadata, all_repository_dependencies ) + get_prior_installation_required_and_only_if_compiling_contained_td( trans, toolshed_base_url, repository, repository_metadata, all_repository_dependencies ) # Create a key with the value of prior_installation_required defaulted to False. key = container_util.generate_repository_dependencies_key_for_repository( toolshed_base_url=toolshed_base_url, repository_name=repository.name, @@ -275,21 +277,47 @@ only_if_compiling_contained_td=only_if_compiling_contained_td ) return key -def get_prior_installation_required_and_only_if_compiling_contained_td( toolshed_base_url, repository, repository_metadata, all_repository_dependencies ): +def get_prior_installation_required_and_only_if_compiling_contained_td( trans, toolshed_base_url, repository, repository_metadata, all_repository_dependencies ): """ - If all_repository_dependencies contains a repository dependency tuple that is associated with the received repository, return the - value of the tuple's prior_installation_required component. + This method is called from the tool shed and never Galaxy. If all_repository_dependencies contains a repository dependency tuple that is associated with + the received repository, return the value of the tuple's prior_installation_required component. """ - for rd_key, rd_tups in all_repository_dependencies.items(): - if rd_key in [ 'root_key', 'description' ]: - continue + if all_repository_dependencies: + for rd_key, rd_tups in all_repository_dependencies.items(): + if rd_key in [ 'root_key', 'description' ]: + continue + for rd_tup in rd_tups: + rd_toolshed, rd_name, rd_owner, rd_changeset_revision, rd_prior_installation_required, rd_only_if_compiling_contained_td = \ + common_util.parse_repository_dependency_tuple( rd_tup ) + if rd_toolshed == toolshed_base_url and \ + rd_name == repository.name and \ + rd_owner == repository.user.username and \ + rd_changeset_revision == repository_metadata.changeset_revision: + return rd_prior_installation_required, rd_only_if_compiling_contained_td + elif repository_metadata: + # Get the list of changeset revisions from the tool shed to which the repository may be updated. + metadata = repository_metadata.metadata + current_changeset_revision = str( repository_metadata.changeset_revision ) + # Get the changeset revision to which the current value of required_repository_changeset_revision should be updated if it's not current. + text = suc.get_updated_changeset_revisions( trans, + name=str( repository.name ), + owner=str( repository.user.username ), + changeset_revision=current_changeset_revision ) + if text: + valid_changeset_revisions = listify( text ) + if current_changeset_revision not in valid_changeset_revisions: + valid_changeset_revisions.append( current_changeset_revision ) + else: + valid_changeset_revisions = [ current_changeset_revision ] + repository_dependencies_dict = metadata[ 'repository_dependencies' ] + rd_tups = repository_dependencies_dict.get( 'repository_dependencies', [] ) for rd_tup in rd_tups: rd_toolshed, rd_name, rd_owner, rd_changeset_revision, rd_prior_installation_required, rd_only_if_compiling_contained_td = \ common_util.parse_repository_dependency_tuple( rd_tup ) if rd_toolshed == toolshed_base_url and \ rd_name == repository.name and \ rd_owner == repository.user.username and \ - rd_changeset_revision == repository_metadata.changeset_revision: + rd_changeset_revision in valid_changeset_revisions: return rd_prior_installation_required, rd_only_if_compiling_contained_td # Default both prior_installation_required and only_if_compiling_contained_td to False. return 'False', 'False' @@ -333,7 +361,11 @@ metadata = repository_metadata.metadata if metadata: if 'repository_dependencies' in metadata: - current_repository_key = get_key_for_repository_changeset_revision( toolshed_base_url, repository, repository_metadata, all_repository_dependencies ) + current_repository_key = get_key_for_repository_changeset_revision( trans, + toolshed_base_url, + repository, + repository_metadata, + all_repository_dependencies ) repository_dependencies_dict = metadata[ 'repository_dependencies' ] if not all_repository_dependencies: all_repository_dependencies = initialize_all_repository_dependencies( current_repository_key, @@ -583,6 +615,22 @@ return True return False +def filter_only_if_compiling_contained_td( key_rd_dict ): + """ + Return a copy of the received key_rd_dict with repository dependencies that are needed only_if_compiling_contained_td filtered out + of the list of repository dependencies for each rd_key. + """ + filtered_key_rd_dict = {} + for rd_key, required_rd_tup in key_rd_dict.items(): + tool_shed, name, owner, changeset_revision, prior_installation_required, only_if_compiling_contained_td = \ + common_util.parse_repository_dependency_tuple( required_rd_tup ) + if not asbool( only_if_compiling_contained_td ): + if rd_key in filtered_key_rd_dict: + filtered_key_rd_dict[ rd_key ].append( required_rd_tup ) + else: + filtered_key_rd_dict[ rd_key ] = [ required_rd_tup ] + return filtered_key_rd_dict + def merge_missing_repository_dependencies_to_installed_container( containers_dict ): """Merge the list of missing repository dependencies into the list of installed repository dependencies.""" missing_rd_container_root = containers_dict.get( 'missing_repository_dependencies', None ) @@ -629,32 +677,35 @@ current_repository_key_rd_dicts = remove_ropository_dependency_reference_to_self( current_repository_key_rd_dicts ) current_repository_key_rd_dicts = get_updated_changeset_revisions_for_repository_dependencies( trans, current_repository_key_rd_dicts ) for key_rd_dict in current_repository_key_rd_dicts: - is_circular = False - if not in_key_rd_dicts( key_rd_dict, handled_key_rd_dicts ) and not in_key_rd_dicts( key_rd_dict, key_rd_dicts_to_be_processed ): - filtered_current_repository_key_rd_dicts.append( key_rd_dict ) - repository_dependency = key_rd_dict[ current_repository_key ] - if current_repository_key in all_repository_dependencies: - # Add all repository dependencies for the current repository into it's entry in all_repository_dependencies. - all_repository_dependencies_val = all_repository_dependencies[ current_repository_key ] - if repository_dependency not in all_repository_dependencies_val: - all_repository_dependencies_val.append( repository_dependency ) - all_repository_dependencies[ current_repository_key ] = all_repository_dependencies_val - elif not in_all_repository_dependencies( current_repository_key, repository_dependency, all_repository_dependencies ): - # Handle circular repository dependencies. - if is_circular_repository_dependency( current_repository_key, repository_dependency, all_repository_dependencies ): - is_circular = True - circular_repository_dependencies, handled_key_rd_dicts, all_repository_dependencies = \ - handle_circular_repository_dependency( current_repository_key, - repository_dependency, - circular_repository_dependencies, - handled_key_rd_dicts, - all_repository_dependencies ) - else: - all_repository_dependencies[ current_repository_key ] = [ repository_dependency ] - if not is_circular and can_add_to_key_rd_dicts( key_rd_dict, key_rd_dicts_to_be_processed ): - new_key_rd_dict = {} - new_key_rd_dict[ current_repository_key ] = repository_dependency - key_rd_dicts_to_be_processed.append( new_key_rd_dict ) + # Filter out repository dependencies that are required only if compiling the dependent repository's tool dependency. + all_repository_dependencieskey_rd_dict = filter_only_if_compiling_contained_td( key_rd_dict ) + if key_rd_dict: + is_circular = False + if not in_key_rd_dicts( key_rd_dict, handled_key_rd_dicts ) and not in_key_rd_dicts( key_rd_dict, key_rd_dicts_to_be_processed ): + filtered_current_repository_key_rd_dicts.append( key_rd_dict ) + repository_dependency = key_rd_dict[ current_repository_key ] + if current_repository_key in all_repository_dependencies: + # Add all repository dependencies for the current repository into it's entry in all_repository_dependencies. + all_repository_dependencies_val = all_repository_dependencies[ current_repository_key ] + if repository_dependency not in all_repository_dependencies_val: + all_repository_dependencies_val.append( repository_dependency ) + all_repository_dependencies[ current_repository_key ] = all_repository_dependencies_val + elif not in_all_repository_dependencies( current_repository_key, repository_dependency, all_repository_dependencies ): + # Handle circular repository dependencies. + if is_circular_repository_dependency( current_repository_key, repository_dependency, all_repository_dependencies ): + is_circular = True + circular_repository_dependencies, handled_key_rd_dicts, all_repository_dependencies = \ + handle_circular_repository_dependency( current_repository_key, + repository_dependency, + circular_repository_dependencies, + handled_key_rd_dicts, + all_repository_dependencies ) + else: + all_repository_dependencies[ current_repository_key ] = [ repository_dependency ] + if not is_circular and can_add_to_key_rd_dicts( key_rd_dict, key_rd_dicts_to_be_processed ): + new_key_rd_dict = {} + new_key_rd_dict[ current_repository_key ] = repository_dependency + key_rd_dicts_to_be_processed.append( new_key_rd_dict ) return filtered_current_repository_key_rd_dicts, key_rd_dicts_to_be_processed, handled_key_rd_dicts, all_repository_dependencies def prune_invalid_repository_dependencies( repository_dependencies ): diff -r 82833b9b6f4fbdf1408864bbf7781a02200ca633 -r 69bec229bdf85900f7d581e735aa69ee2cc2aeb9 lib/tool_shed/util/shed_util_common.py --- a/lib/tool_shed/util/shed_util_common.py +++ b/lib/tool_shed/util/shed_util_common.py @@ -1190,6 +1190,27 @@ return {} return tool_shed_status_dict +def get_updated_changeset_revisions( trans, name, owner, changeset_revision ): + """ + Return a string of comma-separated changeset revision hashes for all available updates to the received changeset revision for the repository + defined by the received name and owner. + """ + repository = get_repository_by_name_and_owner( trans.app, name, owner ) + repo_dir = repository.repo_path( trans.app ) + repo = hg.repository( get_configured_ui(), repo_dir ) + # Get the upper bound changeset revision. + upper_bound_changeset_revision = get_next_downloadable_changeset_revision( repository, repo, changeset_revision ) + # Build the list of changeset revision hashes defining each available update up to, but excluding, upper_bound_changeset_revision. + changeset_hashes = [] + for changeset in reversed_lower_upper_bounded_changelog( repo, changeset_revision, upper_bound_changeset_revision ): + # Make sure to exclude upper_bound_changeset_revision. + if changeset != upper_bound_changeset_revision: + changeset_hashes.append( str( repo.changectx( changeset ) ) ) + if changeset_hashes: + changeset_hashes_str = ','.join( changeset_hashes ) + return changeset_hashes_str + return '' + def get_url_from_tool_shed( app, tool_shed ): """ The value of tool_shed is something like: toolshed.g2.bx.psu.edu. We need the URL to this tool shed, which is something like: Repository URL: https://bitbucket.org/galaxy/galaxy-central/ -- This is a commit notification from bitbucket.org. You are receiving this because you have the service enabled, addressing the recipient of this email.