commit/galaxy-central: greg: Move some functions out of the Tool Shed's shed_util_common module.
1 new commit in galaxy-central: https://bitbucket.org/galaxy/galaxy-central/commits/0c62ccbc8624/ Changeset: 0c62ccbc8624 User: greg Date: 2014-07-22 22:46:30 Summary: Move some functions out of the Tool Shed's shed_util_common module. Affected #: 7 files diff -r 4ee9e584dbc016727e4c90f73bbf53081f775c5a -r 0c62ccbc86246c9aebf8ff78c550234dfa6d5bd5 lib/tool_shed/capsule/capsule_manager.py --- a/lib/tool_shed/capsule/capsule_manager.py +++ b/lib/tool_shed/capsule/capsule_manager.py @@ -9,6 +9,7 @@ from time import strftime from galaxy import web +from galaxy.model.orm import and_ from galaxy.util import asbool from galaxy.util import CHUNK_SIZE from galaxy.util.odict import odict @@ -408,7 +409,7 @@ flush = True # Do not allow dependent repository revisions to be automatically installed if population # resulted in errors. - dependent_downloadable_revisions = suc.get_dependent_downloadable_revisions( self.app, repository_metadata ) + dependent_downloadable_revisions = self.get_dependent_downloadable_revisions( repository_metadata ) for dependent_downloadable_revision in dependent_downloadable_revisions: if dependent_downloadable_revision.downloadable: dependent_downloadable_revision.downloadable = False @@ -541,6 +542,66 @@ archives.append( archive_file_name ) return archives, error_message + def get_dependent_downloadable_revisions( self, repository_metadata ): + """ + Return all repository_metadata records that are downloadable and that depend upon the received + repository_metadata record. + """ + # This method is called only from the tool shed. + sa_session = self.app.model.context.current + rm_changeset_revision = repository_metadata.changeset_revision + rm_repository = repository_metadata.repository + rm_repository_name = str( rm_repository.name ) + rm_repository_owner = str( rm_repository.user.username ) + dependent_downloadable_revisions = [] + for repository in sa_session.query( self.app.model.Repository ) \ + .filter( and_( self.app.model.Repository.table.c.id != rm_repository.id, + self.app.model.Repository.table.c.deleted == False, + self.app.model.Repository.table.c.deprecated == False ) ): + downloadable_revisions = repository.downloadable_revisions + if downloadable_revisions: + for downloadable_revision in downloadable_revisions: + if downloadable_revision.has_repository_dependencies: + metadata = downloadable_revision.metadata + if metadata: + repository_dependencies_dict = metadata.get( 'repository_dependencies', {} ) + repository_dependencies_tups = repository_dependencies_dict.get( 'repository_dependencies', [] ) + for repository_dependencies_tup in repository_dependencies_tups: + tool_shed, \ + name, \ + owner, \ + changeset_revision, \ + prior_installation_required, \ + only_if_compiling_contained_td = \ + common_util.parse_repository_dependency_tuple( repository_dependencies_tup ) + if name == rm_repository_name and owner == rm_repository_owner: + # We've discovered a repository revision that depends upon the repository associated + # with the received repository_metadata record, but we need to make sure it depends + # upon the revision. + if changeset_revision == rm_changeset_revision: + dependent_downloadable_revisions.append( downloadable_revision ) + else: + # Make sure the defined changeset_revision is current. + defined_repository_metadata = \ + sa_session.query( self.app.model.RepositoryMetadata ) \ + .filter( self.app.model.RepositoryMetadata.table.c.changeset_revision == changeset_revision ) \ + .first() + if defined_repository_metadata is None: + # The defined changeset_revision is not associated with a repository_metadata + # record, so updates must be necessary. + defined_repository = suc.get_repository_by_name_and_owner( self.app, name, owner ) + defined_repo = hg_util.get_repo_for_repository( self.app, + repository=defined_repository, + repo_path=None, + create=False ) + updated_changeset_revision = \ + suc.get_next_downloadable_changeset_revision( defined_repository, + defined_repo, + changeset_revision ) + if updated_changeset_revision == rm_changeset_revision: + dependent_downloadable_revisions.append( downloadable_revision ) + return dependent_downloadable_revisions + def get_export_info_dict( self, export_info_file_path ): """ Parse the export_info.xml file contained within the capsule and return a dictionary diff -r 4ee9e584dbc016727e4c90f73bbf53081f775c5a -r 0c62ccbc86246c9aebf8ff78c550234dfa6d5bd5 lib/tool_shed/galaxy_install/installed_repository_manager.py --- a/lib/tool_shed/galaxy_install/installed_repository_manager.py +++ b/lib/tool_shed/galaxy_install/installed_repository_manager.py @@ -342,8 +342,8 @@ installed_rd_tups = [] missing_rd_tups = [] for tsr in repository.repository_dependencies: - prior_installation_required = suc.set_prior_installation_required( self.app, repository, tsr ) - only_if_compiling_contained_td = suc.set_only_if_compiling_contained_td( repository, tsr ) + prior_installation_required = self.set_prior_installation_required( repository, tsr ) + only_if_compiling_contained_td = self.set_only_if_compiling_contained_td( repository, tsr ) rd_tup = [ tsr.tool_shed, tsr.name, tsr.owner, @@ -957,6 +957,47 @@ return True return False + def set_only_if_compiling_contained_td( self, repository, required_repository ): + """ + Return True if the received required_repository is only needed to compile a tool + dependency defined for the received repository. + """ + # This method is called only from Galaxy when rendering repository dependencies + # for an installed tool shed repository. + # TODO: Do we need to check more than changeset_revision here? + required_repository_tup = [ required_repository.tool_shed, \ + required_repository.name, \ + required_repository.owner, \ + required_repository.changeset_revision ] + for tup in repository.tuples_of_repository_dependencies_needed_for_compiling_td: + partial_tup = tup[ 0:4 ] + if partial_tup == required_repository_tup: + return 'True' + return 'False' + + def set_prior_installation_required( self, repository, required_repository ): + """ + Return True if the received required_repository must be installed before the + received repository. + """ + tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( self.app, + str( required_repository.tool_shed ) ) + required_repository_tup = [ tool_shed_url, + str( required_repository.name ), + str( required_repository.owner ), + str( required_repository.changeset_revision ) ] + # Get the list of repository dependency tuples associated with the received repository + # where prior_installation_required is True. + required_rd_tups_that_must_be_installed = repository.requires_prior_installation_of + for required_rd_tup in required_rd_tups_that_must_be_installed: + # Repository dependency tuples in metadata include a prior_installation_required value, + # so strip it for comparision. + partial_required_rd_tup = required_rd_tup[ 0:4 ] + if partial_required_rd_tup == required_repository_tup: + # Return the string value of prior_installation_required, which defaults to 'False'. + return str( required_rd_tup[ 4 ] ) + return 'False' + def update_existing_tool_dependency( self, repository, original_dependency_dict, new_dependencies_dict ): """ Update an exsiting tool dependency whose definition was updated in a change set diff -r 4ee9e584dbc016727e4c90f73bbf53081f775c5a -r 0c62ccbc86246c9aebf8ff78c550234dfa6d5bd5 lib/tool_shed/galaxy_install/repository_dependencies/repository_dependency_manager.py --- a/lib/tool_shed/galaxy_install/repository_dependencies/repository_dependency_manager.py +++ b/lib/tool_shed/galaxy_install/repository_dependencies/repository_dependency_manager.py @@ -2,9 +2,9 @@ Class encapsulating the management of repository dependencies installed or being installed into Galaxy from the Tool Shed. """ - import json import logging +import os import urllib import urllib2 @@ -63,11 +63,10 @@ break if d_repository is None: # The dependent repository is not in the received list so look in the database. - d_repository = suc.get_or_create_tool_shed_repository( self.app, - d_toolshed, - d_name, - d_owner, - d_changeset_revision ) + d_repository = self.get_or_create_tool_shed_repository( d_toolshed, + d_name, + d_owner, + d_changeset_revision ) # Process each repository_dependency defined for the current dependent repository. for repository_dependency_components_list in val: required_repository = None @@ -87,11 +86,10 @@ break if required_repository is None: # The required repository is not in the received list so look in the database. - required_repository = suc.get_or_create_tool_shed_repository( self.app, - rd_toolshed, - rd_name, - rd_owner, - rd_changeset_revision ) + required_repository = self.get_or_create_tool_shed_repository( rd_toolshed, + rd_name, + rd_owner, + rd_changeset_revision ) # Ensure there is a repository_dependency relationship between d_repository and required_repository. rrda = None for rd in d_repository.repository_dependencies: @@ -257,6 +255,35 @@ self.build_repository_dependency_relationships( all_repo_info_dicts, all_created_or_updated_tool_shed_repositories ) return created_or_updated_tool_shed_repositories, tool_panel_section_keys, all_repo_info_dicts, filtered_repo_info_dicts + def get_or_create_tool_shed_repository( self, tool_shed, name, owner, changeset_revision ): + """ + Return a tool shed repository database record defined by the combination of + tool shed, repository name, repository owner and changeset_revision or + installed_changeset_revision. A new tool shed repository record will be + created if one is not located. + """ + install_model = self.app.install_model + # We store the port in the database. + tool_shed = common_util.remove_protocol_from_tool_shed_url( tool_shed ) + # This method is used only in Galaxy, not the tool shed. + repository = suc.get_repository_for_dependency_relationship( self.app, tool_shed, name, owner, changeset_revision ) + if not repository: + tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( self.app, tool_shed ) + repository_clone_url = os.path.join( tool_shed_url, 'repos', owner, name ) + ctx_rev = suc.get_ctx_rev( self.app, tool_shed_url, name, owner, changeset_revision ) + repository = suc.create_or_update_tool_shed_repository( app=self.app, + name=name, + description=None, + installed_changeset_revision=changeset_revision, + ctx_rev=ctx_rev, + repository_clone_url=repository_clone_url, + metadata_dict={}, + status=install_model.ToolShedRepository.installation_status.NEW, + current_changeset_revision=None, + owner=owner, + dist_to_shed=False ) + return repository + def get_repository_dependencies_for_installed_tool_shed_repository( self, app, repository ): """ Send a request to the appropriate tool shed to retrieve the dictionary of repository dependencies defined diff -r 4ee9e584dbc016727e4c90f73bbf53081f775c5a -r 0c62ccbc86246c9aebf8ff78c550234dfa6d5bd5 lib/tool_shed/metadata/repository_metadata_manager.py --- a/lib/tool_shed/metadata/repository_metadata_manager.py +++ b/lib/tool_shed/metadata/repository_metadata_manager.py @@ -410,7 +410,7 @@ repo = hg_util.get_repo_for_repository( self.app, repository=repository, repo_path=None, create=False ) for changeset in repo.changelog: changeset_hash = str( repo.changectx( changeset ) ) - skip_tool_test = suc.get_skip_tool_test_by_changeset_revision( self.app, changeset_hash ) + skip_tool_test = self.get_skip_tool_test_by_changeset_revision( changeset_hash ) if skip_tool_test: # We found a skip_tool_test record associated with the changeset_revision, # so see if it has a valid repository_revision. @@ -521,6 +521,16 @@ return self.sa_session.query( self.app.model.Repository ) \ .filter( self.app.model.Repository.table.c.deleted == False ) + def get_skip_tool_test_by_changeset_revision( self, changeset_revision ): + """ + Return a skip_tool_test record whose initial_changeset_revision is the received + changeset_revision. + """ + # There should only be one, but we'll use first() so callers won't have to handle exceptions. + return self.sa_session.query( self.app.model.SkipToolTest ) \ + .filter( self.app.model.SkipToolTest.table.c.initial_changeset_revision == changeset_revision ) \ + .first() + def new_datatypes_metadata_required( self, repository_metadata, metadata_dict ): """ Compare the last saved metadata for each datatype in the repository with the new metadata diff -r 4ee9e584dbc016727e4c90f73bbf53081f775c5a -r 0c62ccbc86246c9aebf8ff78c550234dfa6d5bd5 lib/tool_shed/tools/data_table_manager.py --- a/lib/tool_shed/tools/data_table_manager.py +++ b/lib/tool_shed/tools/data_table_manager.py @@ -2,8 +2,9 @@ import os import shutil +from xml.etree import ElementTree as XmlET + from tool_shed.util import hg_util -from tool_shed.util import shed_util_common as suc from tool_shed.util import xml_util log = logging.getLogger( __name__ ) @@ -14,6 +15,36 @@ def __init__( self, app ): self.app = app + def generate_repository_info_elem( self, tool_shed, repository_name, changeset_revision, owner, + parent_elem=None, **kwd ): + """Create and return an ElementTree repository info Element.""" + if parent_elem is None: + elem = XmlET.Element( 'tool_shed_repository' ) + else: + elem = XmlET.SubElement( parent_elem, 'tool_shed_repository' ) + tool_shed_elem = XmlET.SubElement( elem, 'tool_shed' ) + tool_shed_elem.text = tool_shed + repository_name_elem = XmlET.SubElement( elem, 'repository_name' ) + repository_name_elem.text = repository_name + repository_owner_elem = XmlET.SubElement( elem, 'repository_owner' ) + repository_owner_elem.text = owner + changeset_revision_elem = XmlET.SubElement( elem, 'installed_changeset_revision' ) + changeset_revision_elem.text = changeset_revision + #add additional values + #TODO: enhance additional values to allow e.g. use of dict values that will recurse + for key, value in kwd.iteritems(): + new_elem = XmlET.SubElement( elem, key ) + new_elem.text = value + return elem + + def generate_repository_info_elem_from_repository( self, tool_shed_repository, parent_elem=None, **kwd ): + return self.generate_repository_info_elem( tool_shed_repository.tool_shed, + tool_shed_repository.name, + tool_shed_repository.installed_changeset_revision, + tool_shed_repository.owner, + parent_elem=parent_elem, + **kwd ) + def get_tool_index_sample_files( self, sample_files ): """ Try to return the list of all appropriate tool data sample files included @@ -129,7 +160,7 @@ if path: file_elem.set( 'path', os.path.normpath( os.path.join( target_dir, os.path.split( path )[1] ) ) ) # Store repository info in the table tag set for trace-ability. - repo_elem = suc.generate_repository_info_elem_from_repository( tool_shed_repository, parent_elem=elem ) + repo_elem = self.generate_repository_info_elem_from_repository( tool_shed_repository, parent_elem=elem ) if elems: # Remove old data_table os.unlink( tool_data_table_conf_filename ) diff -r 4ee9e584dbc016727e4c90f73bbf53081f775c5a -r 0c62ccbc86246c9aebf8ff78c550234dfa6d5bd5 lib/tool_shed/util/metadata_util.py --- a/lib/tool_shed/util/metadata_util.py +++ b/lib/tool_shed/util/metadata_util.py @@ -7,6 +7,18 @@ log = logging.getLogger( __name__ ) +def get_latest_changeset_revision( app, repository, repo ): + repository_tip = repository.tip( app ) + repository_metadata = suc.get_repository_metadata_by_changeset_revision( app, + app.security.encode_id( repository.id ), + repository_tip ) + if repository_metadata and repository_metadata.downloadable: + return repository_tip + changeset_revisions = suc.get_ordered_metadata_changeset_revisions( repository, repo, downloadable=False ) + if changeset_revisions: + return changeset_revisions[ -1 ] + return hg_util.INITIAL_CHANGELOG_HASH + def get_latest_repository_metadata( app, decoded_repository_id, downloadable=False ): """Get last metadata defined for a specified repository from the database.""" sa_session = app.model.context.current @@ -15,7 +27,7 @@ if downloadable: changeset_revision = suc.get_latest_downloadable_changeset_revision( app, repository, repo ) else: - changeset_revision = suc.get_latest_changeset_revision( app, repository, repo ) + changeset_revision = get_latest_changeset_revision( app, repository, repo ) return suc.get_repository_metadata_by_changeset_revision( app, app.security.encode_id( repository.id ), changeset_revision ) diff -r 4ee9e584dbc016727e4c90f73bbf53081f775c5a -r 0c62ccbc86246c9aebf8ff78c550234dfa6d5bd5 lib/tool_shed/util/shed_util_common.py --- a/lib/tool_shed/util/shed_util_common.py +++ b/lib/tool_shed/util/shed_util_common.py @@ -17,7 +17,6 @@ from tool_shed.util import encoding_util from tool_shed.util import hg_util -from xml.etree import ElementTree as XmlET from urllib2 import HTTPError log = logging.getLogger( __name__ ) @@ -165,35 +164,6 @@ components_list = [ toolshed, name, owner, changeset_revision, prior_installation_required, only_if_compiling_contained_td ] return components_list -def generate_repository_info_elem( tool_shed, repository_name, changeset_revision, owner, parent_elem=None, **kwd ): - """Create and return an ElementTree repository info Element.""" - if parent_elem is None: - elem = XmlET.Element( 'tool_shed_repository' ) - else: - elem = XmlET.SubElement( parent_elem, 'tool_shed_repository' ) - tool_shed_elem = XmlET.SubElement( elem, 'tool_shed' ) - tool_shed_elem.text = tool_shed - repository_name_elem = XmlET.SubElement( elem, 'repository_name' ) - repository_name_elem.text = repository_name - repository_owner_elem = XmlET.SubElement( elem, 'repository_owner' ) - repository_owner_elem.text = owner - changeset_revision_elem = XmlET.SubElement( elem, 'installed_changeset_revision' ) - changeset_revision_elem.text = changeset_revision - #add additional values - #TODO: enhance additional values to allow e.g. use of dict values that will recurse - for key, value in kwd.iteritems(): - new_elem = XmlET.SubElement( elem, key ) - new_elem.text = value - return elem - -def generate_repository_info_elem_from_repository( tool_shed_repository, parent_elem=None, **kwd ): - return generate_repository_info_elem( tool_shed_repository.tool_shed, - tool_shed_repository.name, - tool_shed_repository.installed_changeset_revision, - tool_shed_repository.owner, - parent_elem=parent_elem, - **kwd ) - def generate_sharable_link_for_repository_in_tool_shed( repository, changeset_revision=None ): """Generate the URL for sharing a repository that is in the tool shed.""" base_url = url_for( '/', qualified=True ).rstrip( '/' ) @@ -291,61 +261,6 @@ return repository_metadata return None -def get_dependent_downloadable_revisions( app, repository_metadata ): - """ - Return all repository_metadata records that are downloadable and that depend upon the received - repository_metadata record. - """ - # This method is called only from the tool shed. - sa_session = app.model.context.current - rm_changeset_revision = repository_metadata.changeset_revision - rm_repository = repository_metadata.repository - rm_repository_name = str( rm_repository.name ) - rm_repository_owner = str( rm_repository.user.username ) - dependent_downloadable_revisions = [] - for repository in sa_session.query( app.model.Repository ) \ - .filter( and_( app.model.Repository.table.c.id != rm_repository.id, - app.model.Repository.table.c.deleted == False, - app.model.Repository.table.c.deprecated == False ) ): - downloadable_revisions = repository.downloadable_revisions - if downloadable_revisions: - for downloadable_revision in downloadable_revisions: - if downloadable_revision.has_repository_dependencies: - metadata = downloadable_revision.metadata - if metadata: - repository_dependencies_dict = metadata.get( 'repository_dependencies', {} ) - repository_dependencies_tups = repository_dependencies_dict.get( 'repository_dependencies', [] ) - for repository_dependencies_tup in repository_dependencies_tups: - tool_shed, name, owner, changeset_revision, prior_installation_required, only_if_compiling_contained_td = \ - common_util.parse_repository_dependency_tuple( repository_dependencies_tup ) - if name == rm_repository_name and owner == rm_repository_owner: - # We've discovered a repository revision that depends upon the repository associated - # with the received repository_metadata record, but we need to make sure it depends - # upon the revision. - if changeset_revision == rm_changeset_revision: - dependent_downloadable_revisions.append( downloadable_revision ) - else: - # Make sure the defined changeset_revision is current. - defined_repository_metadata = \ - sa_session.query( app.model.RepositoryMetadata ) \ - .filter( app.model.RepositoryMetadata.table.c.changeset_revision == changeset_revision ) \ - .first() - if defined_repository_metadata is None: - # The defined changeset_revision is not associated with a repository_metadata - # record, so updates must be necessary. - defined_repository = get_repository_by_name_and_owner( app, name, owner ) - defined_repo = hg_util.get_repo_for_repository( app, - repository=defined_repository, - repo_path=None, - create=False ) - updated_changeset_revision = \ - get_next_downloadable_changeset_revision( defined_repository, - defined_repo, - changeset_revision ) - if updated_changeset_revision == rm_changeset_revision: - dependent_downloadable_revisions.append( downloadable_revision ) - return dependent_downloadable_revisions - def get_ids_of_tool_shed_repositories_being_installed( app, as_string=False ): installing_repository_ids = [] new_status = app.install_model.ToolShedRepository.installation_status.NEW @@ -365,18 +280,6 @@ return ','.join( installing_repository_ids ) return installing_repository_ids -def get_latest_changeset_revision( app, repository, repo ): - repository_tip = repository.tip( app ) - repository_metadata = get_repository_metadata_by_changeset_revision( app, - app.security.encode_id( repository.id ), - repository_tip ) - if repository_metadata and repository_metadata.downloadable: - return repository_tip - changeset_revisions = get_ordered_metadata_changeset_revisions( repository, repo, downloadable=False ) - if changeset_revisions: - return changeset_revisions[ -1 ] - return hg_util.INITIAL_CHANGELOG_HASH - def get_latest_downloadable_changeset_revision( app, repository, repo ): repository_tip = repository.tip( app ) repository_metadata = get_repository_metadata_by_changeset_revision( app, app.security.encode_id( repository.id ), repository_tip ) @@ -441,37 +344,11 @@ continue return key -def get_or_create_tool_shed_repository( app, tool_shed, name, owner, changeset_revision ): +def get_ordered_metadata_changeset_revisions( repository, repo, downloadable=True ): """ - Return a tool shed repository database record defined by the combination of - tool shed, repository name, repository owner and changeset_revision or - installed_changeset_revision. A new tool shed repository record will be - created if one is not located. + Return an ordered list of changeset_revisions that are associated with metadata + where order is defined by the repository changelog. """ - install_model = app.install_model - # We store the port in the database. - tool_shed = common_util.remove_protocol_from_tool_shed_url( tool_shed ) - # This method is used only in Galaxy, not the tool shed. - repository = get_repository_for_dependency_relationship( app, tool_shed, name, owner, changeset_revision ) - if not repository: - tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( app, tool_shed ) - repository_clone_url = os.path.join( tool_shed_url, 'repos', owner, name ) - ctx_rev = get_ctx_rev( app, tool_shed_url, name, owner, changeset_revision ) - repository = create_or_update_tool_shed_repository( app=app, - name=name, - description=None, - installed_changeset_revision=changeset_revision, - ctx_rev=ctx_rev, - repository_clone_url=repository_clone_url, - metadata_dict={}, - status=install_model.ToolShedRepository.installation_status.NEW, - current_changeset_revision=None, - owner=owner, - dist_to_shed=False ) - return repository - -def get_ordered_metadata_changeset_revisions( repository, repo, downloadable=True ): - """Return an ordered list of changeset_revisions that are associated with metadata where order is defined by the repository changelog.""" if downloadable: metadata_revisions = repository.downloadable_revisions else: @@ -491,9 +368,11 @@ def get_prior_import_or_install_required_dict( app, tsr_ids, repo_info_dicts ): """ - This method is used in the Tool Shed when exporting a repository and its dependencies, and in Galaxy when a repository and its dependencies - are being installed. Return a dictionary whose keys are the received tsr_ids and whose values are a list of tsr_ids, each of which is contained - in the received list of tsr_ids and whose associated repository must be imported or installed prior to the repository associated with the tsr_id key. + This method is used in the Tool Shed when exporting a repository and its dependencies, + and in Galaxy when a repository and its dependencies are being installed. Return a + dictionary whose keys are the received tsr_ids and whose values are a list of tsr_ids, + each of which is contained in the received list of tsr_ids and whose associated repository + must be imported or installed prior to the repository associated with the tsr_id key. """ # Initialize the dictionary. prior_import_or_install_required_dict = {} @@ -698,11 +577,14 @@ def get_repository_ids_requiring_prior_import_or_install( app, tsr_ids, repository_dependencies ): """ - This method is used in the Tool Shed when exporting a repository and its dependencies, and in Galaxy when a repository and its dependencies - are being installed. Inspect the received repository_dependencies and determine if the encoded id of each required repository is in the received - tsr_ids. If so, then determine whether that required repository should be imported / installed prior to its dependent repository. Return a list - of encoded repository ids, each of which is contained in the received list of tsr_ids, and whose associated repositories must be imported / installed - prior to the dependent repository associated with the received repository_dependencies. + This method is used in the Tool Shed when exporting a repository and its dependencies, + and in Galaxy when a repository and its dependencies are being installed. Inspect the + received repository_dependencies and determine if the encoded id of each required + repository is in the received tsr_ids. If so, then determine whether that required + repository should be imported / installed prior to its dependent repository. Return a + list of encoded repository ids, each of which is contained in the received list of tsr_ids, + and whose associated repositories must be imported / installed prior to the dependent + repository associated with the received repository_dependencies. """ prior_tsr_ids = [] if repository_dependencies: @@ -710,20 +592,32 @@ if key in [ 'description', 'root_key' ]: continue for rd_tup in rd_tups: - tool_shed, name, owner, changeset_revision, prior_installation_required, only_if_compiling_contained_td = \ + tool_shed, \ + name, \ + owner, \ + changeset_revision, \ + prior_installation_required, \ + only_if_compiling_contained_td = \ common_util.parse_repository_dependency_tuple( rd_tup ) - # If only_if_compiling_contained_td is False, then the repository dependency is not required to be installed prior to the dependent - # repository even if prior_installation_required is True. This is because the only meaningful content of the repository dependency - # is its contained tool dependency, which is required in order to compile the dependent repository's tool dependency. In the scenario - # where the repository dependency is not installed prior to the dependent repository's tool dependency compilation process, the tool - # dependency compilation framework will install the repository dependency prior to compilation of the dependent repository's tool - # dependency. + # If only_if_compiling_contained_td is False, then the repository dependency + # is not required to be installed prior to the dependent repository even if + # prior_installation_required is True. This is because the only meaningful + # content of the repository dependency is its contained tool dependency, which + # is required in order to compile the dependent repository's tool dependency. + # In the scenario where the repository dependency is not installed prior to the + # dependent repository's tool dependency compilation process, the tool dependency + # compilation framework will install the repository dependency prior to compilation + # of the dependent repository's tool dependency. if not util.asbool( only_if_compiling_contained_td ): if util.asbool( prior_installation_required ): if is_tool_shed_client( app ): # We store the port, if one exists, in the database. tool_shed = common_util.remove_protocol_from_tool_shed_url( tool_shed ) - repository = get_repository_for_dependency_relationship( app, tool_shed, name, owner, changeset_revision ) + repository = get_repository_for_dependency_relationship( app, + tool_shed, + name, + owner, + changeset_revision ) else: repository = get_repository_by_name_and_owner( app, name, owner ) if repository: @@ -779,14 +673,6 @@ query = app.model.context.query( app.model.Repository ) return query -def get_skip_tool_test_by_changeset_revision( app, changeset_revision ): - """Return a skip_tool_test record whose initial_changeset_revision is the received changeset_revision.""" - # There should only be one, but we'll use first() so callers won't have to handle exceptions. - sa_session = app.model.context.current - return sa_session.query( app.model.SkipToolTest ) \ - .filter( app.model.SkipToolTest.table.c.initial_changeset_revision == changeset_revision ) \ - .first() - def get_tool_panel_config_tool_path_install_dir( app, repository ): """ Return shed-related tool panel config, the tool_path configured in it, and the relative path to @@ -1220,42 +1106,6 @@ text = re.sub( r'\.\. image:: (?!https?://)/?(.+)', r'.. image:: %s/\1' % route_to_images, text ) return text -def set_only_if_compiling_contained_td( repository, required_repository ): - """ - Return True if the received required_repository is only needed to compile a tool - dependency defined for the received repository. - """ - # This method is called only from Galaxy when rendering repository dependencies - # for an installed tool shed repository. - # TODO: Do we need to check more than changeset_revision here? - required_repository_tup = [ required_repository.tool_shed, \ - required_repository.name, \ - required_repository.owner, \ - required_repository.changeset_revision ] - for tup in repository.tuples_of_repository_dependencies_needed_for_compiling_td: - partial_tup = tup[ 0:4 ] - if partial_tup == required_repository_tup: - return 'True' - return 'False' - -def set_prior_installation_required( app, repository, required_repository ): - """Return True if the received required_repository must be installed before the received repository.""" - # This method is called only from Galaxy when rendering repository dependencies for an installed Tool Shed repository. - tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( app, str( required_repository.tool_shed ) ) - required_repository_tup = [ tool_shed_url, - str( required_repository.name ), - str( required_repository.owner ), - str( required_repository.changeset_revision ) ] - # Get the list of repository dependency tuples associated with the received repository where prior_installation_required is True. - required_rd_tups_that_must_be_installed = repository.requires_prior_installation_of - for required_rd_tup in required_rd_tups_that_must_be_installed: - # Repository dependency tuples in metadata include a prior_installation_required value, so strip it for comparision. - partial_required_rd_tup = required_rd_tup[ 0:4 ] - if partial_required_rd_tup == required_repository_tup: - # Return the string value of prior_installation_required, which defaults to 'False'. - return str( required_rd_tup[ 4 ] ) - return 'False' - def set_repository_attributes( app, repository, status, error_message, deleted, uninstalled, remove_from_disk=False ): if remove_from_disk: relative_install_dir = repository.repo_path( app ) Repository URL: https://bitbucket.org/galaxy/galaxy-central/ -- This is a commit notification from bitbucket.org. You are receiving this because you have the service enabled, addressing the recipient of this email.
participants (1)
-
commits-noreply@bitbucket.org