commit/galaxy-central: greg: When exporting a repository and dependency hierarchy from the tool shed, always make sure the primary repository entry is ordered last in the manifest.
1 new commit in galaxy-central: https://bitbucket.org/galaxy/galaxy-central/commits/64c6c98b9976/ Changeset: 64c6c98b9976 User: greg Date: 2014-01-07 22:55:59 Summary: When exporting a repository and dependency hierarchy from the tool shed, always make sure the primary repository entry is ordered last in the manifest. Affected #: 4 files diff -r 98f5685a42d2b32481db28b80dedca1b9b6791cf -r 64c6c98b9976c3dadb77426b6a6805922aa7810c lib/tool_shed/galaxy_install/install_manager.py --- a/lib/tool_shed/galaxy_install/install_manager.py +++ b/lib/tool_shed/galaxy_install/install_manager.py @@ -567,14 +567,17 @@ def order_repositories_for_installation( self, tool_shed_repositories, repository_dependencies_dict ): """ - Some repositories may have repository dependencies that are required to be installed before the dependent repository. This method will - inspect the list of repositories about to be installed and make sure to order them appropriately. For each repository about to be installed, - if required repositories are not contained in the list of repositories about to be installed, then they are not considered. Repository - dependency definitions that contain circular dependencies should not result in an infinite loop, but obviously prior installation will not be - handled for one or more of the repositories that require prior installation. This process is similar to the process used when installing tool - shed repositories (i.e., the order_components_for_installation() method in ~/lib/tool_shed/galaxy_install/repository_util), but does not handle - managing tool panel sections and other components since repository dependency definitions contained in tool shed repositories with migrated - tools must never define a relationship to a repository dependency that contains a tool. + Some repositories may have repository dependencies that are required to be installed before the dependent + repository. This method will inspect the list of repositories about to be installed and make sure to order + them appropriately. For each repository about to be installed, if required repositories are not contained + in the list of repositories about to be installed, then they are not considered. Repository dependency + definitions that contain circular dependencies should not result in an infinite loop, but obviously prior + installation will not be handled for one or more of the repositories that require prior installation. This + process is similar to the process used when installing tool shed repositories (i.e., the + order_components_for_installation() method in ~/lib/tool_shed/galaxy_install/repository_util), but does not + handle managing tool panel sections and other components since repository dependency definitions contained + in tool shed repositories with migrated tools must never define a relationship to a repository dependency + that contains a tool. """ ordered_tool_shed_repositories = [] ordered_tsr_ids = [] diff -r 98f5685a42d2b32481db28b80dedca1b9b6791cf -r 64c6c98b9976c3dadb77426b6a6805922aa7810c lib/tool_shed/util/commit_util.py --- a/lib/tool_shed/util/commit_util.py +++ b/lib/tool_shed/util/commit_util.py @@ -313,6 +313,9 @@ if lastest_installable_changeset_revision != suc.INITIAL_CHANGELOG_HASH: elem.attrib[ 'changeset_revision' ] = lastest_installable_changeset_revision revised = True + else: + error_message = 'Invalid latest installable changeset_revision %s ' % str( lastest_installable_changeset_revision ) + error_message += 'retrieved for repository %s owned by %s. ' % ( str( name ), str( owner ) ) else: error_message = 'Unable to locate repository with name %s and owner %s. ' % ( str( name ), str( owner ) ) return revised, elem, error_message diff -r 98f5685a42d2b32481db28b80dedca1b9b6791cf -r 64c6c98b9976c3dadb77426b6a6805922aa7810c lib/tool_shed/util/export_util.py --- a/lib/tool_shed/util/export_util.py +++ b/lib/tool_shed/util/export_util.py @@ -55,7 +55,8 @@ base = base.rstrip( '/' ) return base -def export_repository( trans, tool_shed_url, repository_id, repository_name, changeset_revision, file_type, export_repository_dependencies, api=False ): +def export_repository( trans, tool_shed_url, repository_id, repository_name, changeset_revision, file_type, + export_repository_dependencies, api=False ): repository = suc.get_repository_in_tool_shed( trans, repository_id ) repositories_archive_filename = generate_repository_archive_filename( tool_shed_url, str( repository.name ), @@ -67,7 +68,8 @@ if export_repository_dependencies: repo_info_dicts = get_repo_info_dicts( trans, tool_shed_url, repository_id, changeset_revision ) repository_ids = get_repository_ids( trans, repo_info_dicts ) - ordered_repository_ids, ordered_repositories, ordered_changeset_revisions = order_components_for_import( trans, repository_ids, repo_info_dicts ) + ordered_repository_ids, ordered_repositories, ordered_changeset_revisions = \ + order_components_for_import( trans, repository_id, repository_ids, repo_info_dicts ) else: ordered_repository_ids = [] ordered_repositories = [] @@ -193,8 +195,8 @@ def get_components_from_repo_info_dict( trans, repo_info_dict ): """ - Return the repository and the associated latest installable changeset_revision (including updates) for the repository defined by the received - repo_info_dict. + Return the repository and the associated latest installable changeset_revision (including updates) for the + repository defined by the received repo_info_dict. """ for repository_name, repo_info_tup in repo_info_dict.items(): # There should only be one entry in the received repo_info_dict. @@ -208,8 +210,9 @@ def get_repo_info_dict_for_import( encoded_repository_id, encoded_repository_ids, repo_info_dicts ): """ - The received encoded_repository_ids and repo_info_dicts are lists that contain associated elements at each location in the list. This method will return the element - from repo_info_dicts associated with the received encoded_repository_id by determining it's location in the received encoded_repository_ids list. + The received encoded_repository_ids and repo_info_dicts are lists that contain associated elements at each + location in the list. This method will return the element from repo_info_dicts associated with the received + encoded_repository_id by determining it's location in the received encoded_repository_ids list. """ for index, repository_id in enumerate( encoded_repository_ids ): if repository_id == encoded_repository_id: @@ -218,6 +221,10 @@ return None def get_repo_info_dicts( trans, tool_shed_url, repository_id, changeset_revision ): + """ + Return a list of dictionaries defining repositories that are required by the repository associated with the + received repository_id. + """ repository = suc.get_repository_in_tool_shed( trans, repository_id ) repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans, repository_id, changeset_revision ) # Get a dictionary of all repositories upon which the contents of the current repository_metadata record depend. @@ -246,8 +253,9 @@ def get_repository_attributes_and_sub_elements( repository, archive_name ): """ - Get the information about a repository to create and populate an XML tag set. The generated attributes will be contained within the <repository> - tag, while the sub_elements will be tag sets contained within the <repository> tag set. + Get the information about a repository to create and populate an XML tag set. The generated attributes will + be contained within the <repository> tag, while the sub_elements will be tag sets contained within the <repository> + tag set. """ attributes = odict() sub_elements = odict() @@ -268,6 +276,7 @@ return attributes, sub_elements def get_repository_ids( trans, repo_info_dicts ): + """Return a list of repository ids associated with each dictionary in the received repo_info_dicts.""" repository_ids = [] for repo_info_dict in repo_info_dicts: for repository_name, repo_info_tup in repo_info_dict.items(): @@ -277,23 +286,36 @@ repository_ids.append( trans.security.encode_id( repository.id ) ) return repository_ids -def order_components_for_import( trans, repository_ids, repo_info_dicts ): +def order_components_for_import( trans, primary_repository_id, repository_ids, repo_info_dicts ): """ - Some repositories may have repository dependencies that must be imported and have metadata set on them before the dependent repository is imported. This method - will inspect the list of repositories about to be exported and make sure to order them appropriately for proper import. For each repository about to be exported, - if required repositories are not contained in the list of repositories about to be exported, then they are not considered. Repository dependency definitions that - contain circular dependencies should not result in an infinite loop, but obviously ordering the list will not be handled for one or more of the repositories that - require prior import. + Some repositories may have repository dependencies that must be imported and have metadata set on + them before the dependent repository is imported. This method will inspect the list of repositories + about to be exported and make sure to order them appropriately for proper import. For each repository + about to be exported, if required repositories are not contained in the list of repositories about to + be exported, then they are not considered. Repository dependency definitions that contain circular + dependencies should not result in an infinite loop, but obviously ordering the list will not be handled + for one or more of the repositories that require prior import. """ + # The received primary_repository_id is the id of the repository being exported, with the received list + # of repository_ids being only the ids of all of its repository dependencies. The primary repository will + # always be last in the returned lists. ordered_repository_ids = [] ordered_repositories = [] ordered_changeset_revisions = [] - # Create a dictionary whose keys are the received repository_ids and whose values are a list of repository_ids, each of which is contained in the received list of - # repository_ids and whose associated repository must be imported prior to the repository associated with the repository_id key. + # Create a dictionary whose keys are the received repository_ids and whose values are a list of + # repository_ids, each of which is contained in the received list of repository_ids and whose associated + # repository must be imported prior to the repository associated with the repository_id key. prior_import_required_dict = suc.get_prior_import_or_install_required_dict( trans, repository_ids, repo_info_dicts ) processed_repository_ids = [] + # Process the list of repository dependencies defined for the repository associated with the received + # primary_repository_id. while len( processed_repository_ids ) != len( prior_import_required_dict.keys() ): repository_id = suc.get_next_prior_import_or_install_required_dict_entry( prior_import_required_dict, processed_repository_ids ) + if repository_id == primary_repository_id: + # Append the primary_repository_id without processing it since it must be returned last in the order. + # It will be processed below after all dependencies are processed. + processed_repository_ids.append( primary_repository_id ) + continue processed_repository_ids.append( repository_id ) if repository_id not in ordered_repository_ids: prior_import_required_ids = prior_import_required_dict[ repository_id ] @@ -312,4 +334,11 @@ ordered_repository_ids.append( repository_id ) ordered_repositories.append( repository ) ordered_changeset_revisions.append( changeset_revision ) + # Process the repository associated with the received primary_repository_id last. + repo_info_dict = get_repo_info_dict_for_import( primary_repository_id, repository_ids, repo_info_dicts ) + repository, changeset_revision = get_components_from_repo_info_dict( trans, repo_info_dict ) + if repository and changeset_revision: + ordered_repository_ids.append( repository_id ) + ordered_repositories.append( repository ) + ordered_changeset_revisions.append( changeset_revision ) return ordered_repository_ids, ordered_repositories, ordered_changeset_revisions diff -r 98f5685a42d2b32481db28b80dedca1b9b6791cf -r 64c6c98b9976c3dadb77426b6a6805922aa7810c lib/tool_shed/util/shed_util_common.py --- a/lib/tool_shed/util/shed_util_common.py +++ b/lib/tool_shed/util/shed_util_common.py @@ -745,10 +745,11 @@ def get_next_prior_import_or_install_required_dict_entry( prior_required_dict, processed_tsr_ids ): """ - This method is used in the Tool Shed when exporting a repository and it's dependencies, and in Galaxy when a repository and it's dependencies - are being installed. The order in which the prior_required_dict is processed is critical in order to ensure that the ultimate repository import - or installation order is correctly defined. This method determines the next key / value pair from the received prior_required_dict that should - be processed. + This method is used in the Tool Shed when exporting a repository and it's dependencies, and in Galaxy + when a repository and it's dependencies are being installed. The order in which the prior_required_dict + is processed is critical in order to ensure that the ultimate repository import or installation order is + correctly defined. This method determines the next key / value pair from the received prior_required_dict + that should be processed. """ # Return the first key / value pair that is not yet processed and whose value is an empty list. for key, value in prior_required_dict.items(): @@ -756,7 +757,8 @@ continue if not value: return key - # Return the first key / value pair that is not yet processed and whose ids in value are all included in processed_tsr_ids. + # Return the first key / value pair that is not yet processed and whose ids in value are all included + # in processed_tsr_ids. for key, value in prior_required_dict.items(): if key in processed_tsr_ids: continue @@ -767,7 +769,8 @@ break if all_contained: return key - # Return the first key / value pair that is not yet processed. Hopefully this is all that is necessary at this point. + # Return the first key / value pair that is not yet processed. Hopefully this is all that is necessary + # at this point. for key, value in prior_required_dict.items(): if key in processed_tsr_ids: continue Repository URL: https://bitbucket.org/galaxy/galaxy-central/ -- This is a commit notification from bitbucket.org. You are receiving this because you have the service enabled, addressing the recipient of this email.
participants (1)
-
commits-noreply@bitbucket.org