commit/galaxy-central: greg: Add support for installing and administering simple repository dependencies for tool shed repositories installed into a Galaxy instance.
1 new commit in galaxy-central: https://bitbucket.org/galaxy/galaxy-central/changeset/759d96f950b8/ changeset: 759d96f950b8 user: greg date: 2012-12-19 21:57:09 summary: Add support for installing and administering simple repository dependencies for tool shed repositories installed into a Galaxy instance. affected #: 27 files diff -r 075ff805a33a830244c4647fc9cefec663eff2b4 -r 759d96f950b8825bcce157cdf5efbabaa0221b11 lib/galaxy/app.py --- a/lib/galaxy/app.py +++ b/lib/galaxy/app.py @@ -23,6 +23,7 @@ """Encapsulates the state of a Universe application""" def __init__( self, **kwargs ): print >> sys.stderr, "python path is: " + ", ".join( sys.path ) + self.name = 'galaxy' self.new_installation = False # Read config file and check for errors self.config = config.Configuration( **kwargs ) diff -r 075ff805a33a830244c4647fc9cefec663eff2b4 -r 759d96f950b8825bcce157cdf5efbabaa0221b11 lib/galaxy/model/__init__.py --- a/lib/galaxy/model/__init__.py +++ b/lib/galaxy/model/__init__.py @@ -3148,6 +3148,9 @@ def can_reinstall_or_activate( self ): return self.deleted @property + def has_repository_dependencies( self ): + return self.metadata and 'repository_dependencies' in self.metadata + @property def includes_tools( self ): return self.metadata and 'tools' in self.metadata @property @@ -3211,6 +3214,15 @@ uninstalled_tool_dependencies.append( tool_dependency ) return uninstalled_tool_dependencies +class RepositoryRepositoryDependencyAssociation( object ): + def __init__( self, tool_shed_repository_id=None, repository_dependency_id=None ): + self.tool_shed_repository_id = tool_shed_repository_id + self.repository_dependency_id = repository_dependency_id + +class RepositoryDependency( object ): + def __init__( self, tool_shed_repository_id=None ): + self.tool_shed_repository_id = tool_shed_repository_id + class ToolDependency( object ): installation_status = Bunch( NEVER_INSTALLED='Never installed', INSTALLING='Installing', diff -r 075ff805a33a830244c4647fc9cefec663eff2b4 -r 759d96f950b8825bcce157cdf5efbabaa0221b11 lib/galaxy/model/mapping.py --- a/lib/galaxy/model/mapping.py +++ b/lib/galaxy/model/mapping.py @@ -409,6 +409,19 @@ Column( "status", TrimmedString( 255 ) ), Column( "error_message", TEXT ) ) +RepositoryRepositoryDependencyAssociation.table = Table( 'repository_repository_dependency_association', metadata, + Column( "id", Integer, primary_key=True ), + Column( "create_time", DateTime, default=now ), + Column( "update_time", DateTime, default=now, onupdate=now ), + Column( "tool_shed_repository_id", Integer, ForeignKey( "tool_shed_repository.id" ), index=True ), + Column( "repository_dependency_id", Integer, ForeignKey( "repository_dependency.id" ), index=True ) ) + +RepositoryDependency.table = Table( "repository_dependency", metadata, + Column( "id", Integer, primary_key=True ), + Column( "create_time", DateTime, default=now ), + Column( "update_time", DateTime, default=now, onupdate=now ), + Column( "tool_shed_repository_id", Integer, ForeignKey( "tool_shed_repository.id" ), index=True, nullable=False ) ) + ToolDependency.table = Table( "tool_dependency", metadata, Column( "id", Integer, primary_key=True ), Column( "create_time", DateTime, default=now ), @@ -1744,7 +1757,19 @@ tool_dependencies=relation( ToolDependency, primaryjoin=( ToolShedRepository.table.c.id == ToolDependency.table.c.tool_shed_repository_id ), order_by=ToolDependency.table.c.name, - backref='tool_shed_repository' ) ) ) + backref='tool_shed_repository' ), + repository_dependencies=relation( RepositoryRepositoryDependencyAssociation, + primaryjoin=( ToolShedRepository.table.c.id == RepositoryRepositoryDependencyAssociation.table.c.tool_shed_repository_id ) ) ) ) + +assign_mapper( context, RepositoryRepositoryDependencyAssociation, RepositoryRepositoryDependencyAssociation.table, + properties=dict( repository=relation( ToolShedRepository, + primaryjoin=( RepositoryRepositoryDependencyAssociation.table.c.tool_shed_repository_id == ToolShedRepository.table.c.id ) ), + repository_dependency=relation( RepositoryDependency, + primaryjoin=( RepositoryRepositoryDependencyAssociation.table.c.repository_dependency_id == RepositoryDependency.table.c.id ) ) ) ) + +assign_mapper( context, RepositoryDependency, RepositoryDependency.table, + properties=dict( repository=relation( ToolShedRepository, + primaryjoin=( RepositoryDependency.table.c.tool_shed_repository_id == ToolShedRepository.table.c.id ) ) ) ) assign_mapper( context, ToolDependency, ToolDependency.table ) diff -r 075ff805a33a830244c4647fc9cefec663eff2b4 -r 759d96f950b8825bcce157cdf5efbabaa0221b11 lib/galaxy/model/migrate/versions/0109_add_repository_dependency_tables.py --- /dev/null +++ b/lib/galaxy/model/migrate/versions/0109_add_repository_dependency_tables.py @@ -0,0 +1,58 @@ +""" +Migration script to add the repository_dependency and repository_repository_dependency_association tables. +""" +from sqlalchemy import * +from sqlalchemy.orm import * +from migrate import * +from migrate.changeset import * +import sys, logging +from galaxy.model.custom_types import * +from sqlalchemy.exc import * +import datetime +now = datetime.datetime.utcnow + +log = logging.getLogger( __name__ ) +log.setLevel( logging.DEBUG ) +handler = logging.StreamHandler( sys.stdout ) +format = "%(name)s %(levelname)s %(asctime)s %(message)s" +formatter = logging.Formatter( format ) +handler.setFormatter( formatter ) +log.addHandler( handler ) + +metadata = MetaData( migrate_engine ) + +RepositoryDependency_table = Table( "repository_dependency", metadata, + Column( "id", Integer, primary_key=True ), + Column( "create_time", DateTime, default=now ), + Column( "update_time", DateTime, default=now, onupdate=now ), + Column( "tool_shed_repository_id", Integer, ForeignKey( "tool_shed_repository.id" ), index=True, nullable=False ) ) + +RepositoryRepositoryDependencyAssociation_table = Table( "repository_repository_dependency_association", metadata, + Column( "id", Integer, primary_key=True ), + Column( "create_time", DateTime, default=now ), + Column( "update_time", DateTime, default=now, onupdate=now ), + Column( "tool_shed_repository_id", Integer, ForeignKey( "tool_shed_repository.id" ), index=True ), + Column( "repository_dependency_id", Integer, ForeignKey( "repository_dependency.id" ), index=True ) ) + +def upgrade(): + print __doc__ + metadata.reflect() + try: + RepositoryDependency_table.create() + except Exception, e: + log.debug( "Creating repository_dependency table failed: %s" % str( e ) ) + try: + RepositoryRepositoryDependencyAssociation_table.create() + except Exception, e: + log.debug( "Creating repository_repository_dependency_association table failed: %s" % str( e ) ) + +def downgrade(): + metadata.reflect() + try: + RepositoryRepositoryDependencyAssociation_table.drop() + except Exception, e: + log.debug( "Dropping repository_repository_dependency_association table failed: %s" % str( e ) ) + try: + RepositoryDependency_table.drop() + except Exception, e: + log.debug( "Dropping repository_dependency table failed: %s" % str( e ) ) diff -r 075ff805a33a830244c4647fc9cefec663eff2b4 -r 759d96f950b8825bcce157cdf5efbabaa0221b11 lib/galaxy/tool_shed/install_manager.py --- a/lib/galaxy/tool_shed/install_manager.py +++ b/lib/galaxy/tool_shed/install_manager.py @@ -283,19 +283,19 @@ repository_clone_url = os.path.join( tool_shed_url, 'repos', self.repository_owner, name ) relative_install_dir = os.path.join( relative_clone_dir, name ) install_dir = os.path.join( clone_dir, name ) - ctx_rev = shed_util.get_ctx_rev( tool_shed_url, name, self.repository_owner, installed_changeset_revision ) + ctx_rev = suc.get_ctx_rev( tool_shed_url, name, self.repository_owner, installed_changeset_revision ) print "Adding new row (or updating an existing row) for repository '%s' in the tool_shed_repository table." % name - tool_shed_repository = shed_util.create_or_update_tool_shed_repository( app=self.app, - name=name, - description=description, - installed_changeset_revision=installed_changeset_revision, - ctx_rev=ctx_rev, - repository_clone_url=repository_clone_url, - metadata_dict={}, - status=self.app.model.ToolShedRepository.installation_status.NEW, - current_changeset_revision=None, - owner=self.repository_owner, - dist_to_shed=True ) + tool_shed_repository = suc.create_or_update_tool_shed_repository( app=self.app, + name=name, + description=description, + installed_changeset_revision=installed_changeset_revision, + ctx_rev=ctx_rev, + repository_clone_url=repository_clone_url, + metadata_dict={}, + status=self.app.model.ToolShedRepository.installation_status.NEW, + current_changeset_revision=None, + owner=self.repository_owner, + dist_to_shed=True ) shed_util.update_tool_shed_repository_status( self.app, tool_shed_repository, self.app.model.ToolShedRepository.installation_status.CLONING ) cloned_ok, error_message = suc.clone_repository( repository_clone_url, os.path.abspath( install_dir ), ctx_rev ) if cloned_ok: diff -r 075ff805a33a830244c4647fc9cefec663eff2b4 -r 759d96f950b8825bcce157cdf5efbabaa0221b11 lib/galaxy/tools/__init__.py --- a/lib/galaxy/tools/__init__.py +++ b/lib/galaxy/tools/__init__.py @@ -32,7 +32,7 @@ from cgi import FieldStorage from galaxy.util.hash_util import * from galaxy.util import listify -import galaxy.util.shed_util +import galaxy.util.shed_util_common from galaxy.web import url_for from galaxy.visualization.genome.visual_analytics import TracksterConfig @@ -890,11 +890,11 @@ def tool_shed_repository( self ): # If this tool is included in an installed tool shed repository, return it. if self.tool_shed: - return galaxy.util.shed_util.get_tool_shed_repository_by_shed_name_owner_installed_changeset_revision( self.app, - self.tool_shed, - self.repository_name, - self.repository_owner, - self.installed_changeset_revision ) + return galaxy.util.shed_util_common.get_tool_shed_repository_by_shed_name_owner_installed_changeset_revision( self.app, + self.tool_shed, + self.repository_name, + self.repository_owner, + self.installed_changeset_revision ) return None def __get_job_run_config( self, run_configs, key, job_params=None ): # Look through runners/handlers to find one with matching parameters. diff -r 075ff805a33a830244c4647fc9cefec663eff2b4 -r 759d96f950b8825bcce157cdf5efbabaa0221b11 lib/galaxy/util/shed_util.py --- a/lib/galaxy/util/shed_util.py +++ b/lib/galaxy/util/shed_util.py @@ -1,6 +1,9 @@ import os, tempfile, shutil, logging, urllib2 from galaxy.datatypes import checkers +from galaxy.web import url_for from galaxy import util +from galaxy.util.json import from_json_string, to_json_string +from galaxy.webapps.community.util import container_util import shed_util_common as suc from galaxy.tools.search import ToolBoxSearch from galaxy.tool_shed.tool_dependencies.install_util import create_or_update_tool_dependency, install_package, set_environment @@ -171,6 +174,79 @@ # Attempt to ensure we're copying an appropriate file. if is_data_index_sample_file( filename ): suc.copy_sample_file( app, filename, dest_path=dest_path ) +def create_repository_dependency_objects( trans, tool_path, tool_shed_url, repo_info_dicts, reinstalling=False ): + """ + Discover all repository dependencies and make sure all tool_shed_repository and associated repository_dependency records exist as well as + the dependency relationships between installed repositories. This method is called when new repositories are being installed into a Galaxy + instance and when uninstalled repositories are being reinstalled. + """ + message = '' + created_or_updated_tool_shed_repositories = [] + # Repositories will be filtered (e.g., if already installed, etc), so filter the associated repo_info_dicts accordingly. + filtered_repo_info_dicts = [] + # Discover all repository dependencies and retrieve information for installing them. + all_repo_info_dicts = get_required_repo_info_dicts( tool_shed_url, repo_info_dicts ) + for repo_info_dict in all_repo_info_dicts: + for name, repo_info_tuple in repo_info_dict.items(): + description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, tool_dependencies = \ + suc.get_repo_info_tuple_contents( repo_info_tuple ) + clone_dir = os.path.join( tool_path, generate_tool_path( repository_clone_url, changeset_revision ) ) + relative_install_dir = os.path.join( clone_dir, name ) + # Make sure the repository was not already installed. + installed_tool_shed_repository, installed_changeset_revision = \ + repository_was_previously_installed( trans, tool_shed_url, name, repo_info_tuple, clone_dir ) + if installed_tool_shed_repository: + if reinstalling: + if installed_tool_shed_repository.status in [ trans.model.ToolShedRepository.installation_status.ERROR, + trans.model.ToolShedRepository.installation_status.UNINSTALLED ]: + can_update = True + name = installed_tool_shed_repository.name + description = installed_tool_shed_repository.description + installed_changeset_revision = installed_tool_shed_repository.installed_changeset_revision + metadata_dict = installed_tool_shed_repository.metadata + dist_to_shed = installed_tool_shed_repository.dist_to_shed + else: + # There is a repository already installed which is a dependency of the repository being reinstalled. + can_update = False + else: + # An attempt is being made to install a tool shed repository into a Galaxy instance when the same repository was previously installed. + message += "Revision <b>%s</b> of tool shed repository <b>%s</b> owned by <b>%s</b> " % ( changeset_revision, name, repository_owner ) + if installed_changeset_revision != changeset_revision: + message += "was previously installed using changeset revision <b>%s</b>. " % installed_changeset_revision + else: + message += "was previously installed. " + if installed_tool_shed_repository.uninstalled: + message += "The repository has been uninstalled, however, so reinstall the original repository instead of installing it again. " + elif installed_tool_shed_repository.deleted: + message += "The repository has been deactivated, however, so activate the original repository instead of installing it again. " + if installed_changeset_revision != changeset_revision: + message += "You can get the latest updates for the repository using the <b>Get updates</b> option from the repository's " + message += "<b>Repository Actions</b> pop-up menu. " + if len( repo_info_dicts ) == 1: + return created_or_updated_tool_shed_repositories, all_repo_info_dicts, filtered_repo_info_dicts, message + else: + # A tool shed repository is being installed into a Galaxy instance for the first time. We may have the case where a repository + # is being reinstalled where because the repository being newly installed here may be a dependency of the repository being reinstalled. + can_update = True + installed_changeset_revision = changeset_revision + metadata_dict={} + dist_to_shed = False + if can_update: + log.debug( "Adding new row (or updating an existing row) for repository '%s' in the tool_shed_repository table." % name ) + tool_shed_repository = suc.create_or_update_tool_shed_repository( app=trans.app, + name=name, + description=description, + installed_changeset_revision=changeset_revision, + ctx_rev=ctx_rev, + repository_clone_url=repository_clone_url, + metadata_dict={}, + status=trans.model.ToolShedRepository.installation_status.NEW, + current_changeset_revision=changeset_revision, + owner=repository_owner, + dist_to_shed=False ) + created_or_updated_tool_shed_repositories.append( tool_shed_repository ) + filtered_repo_info_dicts.append( encoding_util.tool_shed_encode( repo_info_dict ) ) + return created_or_updated_tool_shed_repositories, all_repo_info_dicts, filtered_repo_info_dicts, message def create_repository_dict_for_proprietary_datatypes( tool_shed, name, owner, installed_changeset_revision, tool_dicts, converter_path=None, display_path=None ): return dict( tool_shed=tool_shed, repository_name=name, @@ -179,62 +255,8 @@ tool_dicts=tool_dicts, converter_path=converter_path, display_path=display_path ) -def create_or_update_tool_shed_repository( app, name, description, installed_changeset_revision, ctx_rev, repository_clone_url, metadata_dict, - status, current_changeset_revision=None, owner='', dist_to_shed=False ): - # The received value for dist_to_shed will be True if the InstallManager is installing a repository that contains tools or datatypes that used - # to be in the Galaxy distribution, but have been moved to the main Galaxy tool shed. - if current_changeset_revision is None: - # The current_changeset_revision is not passed if a repository is being installed for the first time. If a previously installed repository - # was later uninstalled, this value should be received as the value of that change set to which the repository had been updated just prior - # to it being uninstalled. - current_changeset_revision = installed_changeset_revision - sa_session = app.model.context.current - tool_shed = suc.get_tool_shed_from_clone_url( repository_clone_url ) - if not owner: - owner = get_repository_owner_from_clone_url( repository_clone_url ) - includes_datatypes = 'datatypes' in metadata_dict - if status in [ app.model.ToolShedRepository.installation_status.DEACTIVATED ]: - deleted = True - uninstalled = False - elif status in [ app.model.ToolShedRepository.installation_status.UNINSTALLED ]: - deleted = True - uninstalled = True - else: - deleted = False - uninstalled = False - tool_shed_repository = get_tool_shed_repository_by_shed_name_owner_installed_changeset_revision( app, - tool_shed, - name, - owner, - installed_changeset_revision ) - if tool_shed_repository: - tool_shed_repository.description = description - tool_shed_repository.changeset_revision = current_changeset_revision - tool_shed_repository.ctx_rev = ctx_rev - tool_shed_repository.metadata = metadata_dict - tool_shed_repository.includes_datatypes = includes_datatypes - tool_shed_repository.deleted = deleted - tool_shed_repository.uninstalled = uninstalled - tool_shed_repository.status = status - else: - tool_shed_repository = app.model.ToolShedRepository( tool_shed=tool_shed, - name=name, - description=description, - owner=owner, - installed_changeset_revision=installed_changeset_revision, - changeset_revision=current_changeset_revision, - ctx_rev=ctx_rev, - metadata=metadata_dict, - includes_datatypes=includes_datatypes, - dist_to_shed=dist_to_shed, - deleted=deleted, - uninstalled=uninstalled, - status=status ) - sa_session.add( tool_shed_repository ) - sa_session.flush() - return tool_shed_repository def create_tool_dependency_objects( app, tool_shed_repository, relative_install_dir, set_status=True ): - # Create or update a ToolDependency for each entry in tool_dependencies_config. This method is called when installing a new tool_shed_repository. + """Create or update a ToolDependency for each entry in tool_dependencies_config. This method is called when installing a new tool_shed_repository.""" tool_dependency_objects = [] shed_config_dict = tool_shed_repository.get_shed_config_dict( app ) if shed_config_dict.get( 'tool_path' ): @@ -355,6 +377,20 @@ tool_section_dicts = generate_tool_section_dicts( tool_config=file_name, tool_sections=tool_sections ) tool_panel_dict[ guid ] = tool_section_dicts return tool_panel_dict +def generate_tool_path( repository_clone_url, changeset_revision ): + """ + Generate a tool path that guarantees repositories with the same name will always be installed + in different directories. The tool path will be of the form: + <tool shed url>/repos/<repository owner>/<repository name>/<installed changeset revision> + http://test@bx.psu.edu:9009/repos/test/filter + """ + tmp_url = suc.clean_repository_clone_url( repository_clone_url ) + # Now tmp_url is something like: bx.psu.edu:9009/repos/some_username/column + items = tmp_url.split( 'repos' ) + tool_shed_url = items[ 0 ] + repo_path = items[ 1 ] + tool_shed_url = suc.clean_tool_shed_url( tool_shed_url ) + return suc.url_join( tool_shed_url, 'repos', repo_path, changeset_revision ) def generate_tool_section_dicts( tool_config=None, tool_sections=None ): tool_section_dicts = [] if tool_config is None: @@ -438,12 +474,6 @@ if converter_path and display_path: break return converter_path, display_path -def get_ctx_rev( tool_shed_url, name, owner, changeset_revision ): - url = suc.url_join( tool_shed_url, 'repository/get_ctx_rev?name=%s&owner=%s&changeset_revision=%s' % ( name, owner, changeset_revision ) ) - response = urllib2.urlopen( url ) - ctx_rev = response.read() - response.close() - return ctx_rev def get_headers( fname, sep, count=60, is_multi_byte=False ): """Returns a list with the first 'count' lines split by 'sep'.""" headers = [] @@ -456,6 +486,23 @@ if idx == count: break return headers +def get_installed_and_missing_tool_dependencies( trans, repository, all_tool_dependencies ): + if all_tool_dependencies: + tool_dependencies = {} + missing_tool_dependencies = {} + for td_key, td_info_dict in all_tool_dependencies.items(): + name = td_info_dict[ 'name' ] + version = td_info_dict[ 'version' ] + type = td_info_dict[ 'type' ] + tool_dependency = get_tool_dependency_by_name_version_type_repository( trans, repository, name, version, type ) + if tool_dependency.status == trans.model.ToolDependency.installation_status.INSTALLED: + tool_dependencies[ td_key ] = td_info_dict + else: + missing_tool_dependencies[ td_key ] = td_info_dict + else: + tool_dependencies = None + missing_tool_dependencies = None + return tool_dependencies, missing_tool_dependencies def get_repository_owner( cleaned_repository_url ): items = cleaned_repository_url.split( 'repos' ) repo_path = items[ 1 ] @@ -466,6 +513,54 @@ tmp_url = suc.clean_repository_clone_url( repository_clone_url ) tool_shed = tmp_url.split( 'repos' )[ 0 ].rstrip( '/' ) return get_repository_owner( tmp_url ) +def get_required_repo_info_dicts( tool_shed_url, repo_info_dicts ): + """ + Inspect the list of repo_info_dicts for repository dependencies and append a repo_info_dict for each of them to the list. All + repository_dependencies entries in each of the received repo_info_dicts includes all required repositories, so only one pass through + this methid is required to retrieve all repository dependencies. + """ + if repo_info_dicts: + all_repo_info_dicts = [ rid for rid in repo_info_dicts ] + # We'll send tuples of ( tool_shed, repository_name, repository_owner, changeset_revision ) to the tool shed to discover repository ids. + required_repository_tups = [] + for repo_info_dict in repo_info_dicts: + for repository_name, repo_info_tup in repo_info_dict.items(): + description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, tool_dependencies = \ + suc.get_repo_info_tuple_contents( repo_info_tup ) + if repository_dependencies: + for key, val in repository_dependencies.items(): + if key in [ 'root_key', 'description' ]: + continue + toolshed, name, owner, changeset_revision = container_util.get_components_from_key( key ) + components_list = [ toolshed, name, owner, changeset_revision ] + if components_list not in required_repository_tups: + required_repository_tups.append( components_list ) + for components_list in val: + if components_list not in required_repository_tups: + required_repository_tups.append( components_list ) + if required_repository_tups: + # The value of required_repository_tups is a list of tuples, so we need to encode it. + encoded_required_repository_tups = [] + for required_repository_tup in required_repository_tups: + encoded_required_repository_tups.append( encoding_util.encoding_sep.join( required_repository_tup ) ) + encoded_required_repository_str = encoding_util.encoding_sep2.join( encoded_required_repository_tups ) + encoded_required_repository_str = encoding_util.tool_shed_encode( encoded_required_repository_str ) + url = suc.url_join( tool_shed_url, '/repository/get_required_repo_info_dict?encoded_str=%s' % encoded_required_repository_str ) + response = urllib2.urlopen( url ) + text = response.read() + response.close() + if text: + required_repo_info_dict = from_json_string( text ) + required_repo_info_dicts = [] + encoded_dict_strings = required_repo_info_dict[ 'repo_info_dicts' ] + for encoded_dict_str in encoded_dict_strings: + decoded_dict = encoding_util.tool_shed_decode( encoded_dict_str ) + required_repo_info_dicts.append( decoded_dict ) + if required_repo_info_dicts: + for required_repo_info_dict in required_repo_info_dicts: + if required_repo_info_dict not in all_repo_info_dicts: + all_repo_info_dicts.append( required_repo_info_dict ) + return all_repo_info_dicts def get_tool_index_sample_files( sample_files ): """Try to return the list of all appropriate tool data sample files included in the repository.""" tool_index_sample_files = [] @@ -477,6 +572,19 @@ def get_tool_dependency( trans, id ): """Get a tool_dependency from the database via id""" return trans.sa_session.query( trans.model.ToolDependency ).get( trans.security.decode_id( id ) ) +def get_tool_dependency_by_name_type_repository( trans, repository, name, type ): + return trans.sa_session.query( trans.model.ToolDependency ) \ + .filter( and_( trans.model.ToolDependency.table.c.tool_shed_repository_id == repository.id, + trans.model.ToolDependency.table.c.name == name, + trans.model.ToolDependency.table.c.type == type ) ) \ + .first() +def get_tool_dependency_by_name_version_type_repository( trans, repository, name, version, type ): + return trans.sa_session.query( trans.model.ToolDependency ) \ + .filter( and_( trans.model.ToolDependency.table.c.tool_shed_repository_id == repository.id, + trans.model.ToolDependency.table.c.name == name, + trans.model.ToolDependency.table.c.version == version, + trans.model.ToolDependency.table.c.type == type ) ) \ + .first() def get_tool_dependency_ids( as_string=False, **kwd ): tool_dependency_id = kwd.get( 'tool_dependency_id', None ) tool_dependency_ids = util.listify( kwd.get( 'tool_dependency_ids', None ) ) @@ -502,30 +610,6 @@ relative_install_dir = os.path.join( tool_path, partial_install_dir ) return tool_path, relative_install_dir return None, None -def get_tool_shed_repository_by_shed_name_owner_changeset_revision( app, tool_shed, name, owner, changeset_revision ): - # This method is used only in Galaxy, not the tool shed. - sa_session = app.model.context.current - if tool_shed.find( '//' ) > 0: - tool_shed = tool_shed.split( '//' )[1] - tool_shed = tool_shed.rstrip( '/' ) - return sa_session.query( app.model.ToolShedRepository ) \ - .filter( and_( app.model.ToolShedRepository.table.c.tool_shed == tool_shed, - app.model.ToolShedRepository.table.c.name == name, - app.model.ToolShedRepository.table.c.owner == owner, - app.model.ToolShedRepository.table.c.changeset_revision == changeset_revision ) ) \ - .first() -def get_tool_shed_repository_by_shed_name_owner_installed_changeset_revision( app, tool_shed, name, owner, installed_changeset_revision ): - # This method is used only in Galaxy, not the tool shed. - sa_session = app.model.context.current - if tool_shed.find( '//' ) > 0: - tool_shed = tool_shed.split( '//' )[1] - tool_shed = tool_shed.rstrip( '/' ) - return sa_session.query( app.model.ToolShedRepository ) \ - .filter( and_( app.model.ToolShedRepository.table.c.tool_shed == tool_shed, - app.model.ToolShedRepository.table.c.name == name, - app.model.ToolShedRepository.table.c.owner == owner, - app.model.ToolShedRepository.table.c.installed_changeset_revision == installed_changeset_revision ) ) \ - .first() def get_tool_version( app, tool_id ): sa_session = app.model.context.current return sa_session.query( app.model.ToolVersion ) \ @@ -908,6 +992,35 @@ trans.sa_session.add( tool_dependency ) trans.sa_session.flush() return removed, error_message +def repository_was_previously_installed( trans, tool_shed_url, repository_name, repo_info_tuple, clone_dir ): + """ + Handle the case where the repository was previously installed using an older changeset_revsion, but later the repository was updated + in the tool shed and now we're trying to install the latest changeset revision of the same repository instead of updating the one + that was previously installed. We'll look in the database instead of on disk since the repository may be uninstalled. + """ + description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, tool_dependencies = \ + suc.get_repo_info_tuple_contents( repo_info_tuple ) + tool_shed = suc.get_tool_shed_from_clone_url( repository_clone_url ) + # Get all previous change set revisions from the tool shed for the repository back to, but excluding, the previous valid changeset + # revision to see if it was previously installed using one of them. + url = suc.url_join( tool_shed_url, + 'repository/previous_changeset_revisions?galaxy_url=%s&name=%s&owner=%s&changeset_revision=%s' % \ + ( url_for( '/', qualified=True ), repository_name, repository_owner, changeset_revision ) ) + response = urllib2.urlopen( url ) + text = response.read() + response.close() + if text: + #clone_path, clone_directory = os.path.split( clone_dir ) + changeset_revisions = util.listify( text ) + for previous_changeset_revision in changeset_revisions: + tool_shed_repository = suc.get_tool_shed_repository_by_shed_name_owner_installed_changeset_revision( trans.app, + tool_shed, + repository_name, + repository_owner, + previous_changeset_revision ) + if tool_shed_repository and tool_shed_repository.status not in [ trans.model.ToolShedRepository.installation_status.NEW ]: + return tool_shed_repository, previous_changeset_revision + return None, None def update_tool_shed_repository_status( app, tool_shed_repository, status ): sa_session = app.model.context.current tool_shed_repository.status = status diff -r 075ff805a33a830244c4647fc9cefec663eff2b4 -r 759d96f950b8825bcce157cdf5efbabaa0221b11 lib/galaxy/util/shed_util_common.py --- a/lib/galaxy/util/shed_util_common.py +++ b/lib/galaxy/util/shed_util_common.py @@ -57,26 +57,24 @@ requirements_dict[ 'install_dir' ] = install_dir tool_dependencies[ dependency_key ] = requirements_dict return tool_dependencies -def build_readme_files_dict( repository_metadata ): +def build_readme_files_dict( metadata ): """Return a dictionary of valid readme file name <-> readme file content pairs for all readme files contained in the received repository_metadata.""" readme_files_dict = {} - if repository_metadata: - metadata = repository_metadata.metadata - if metadata and 'readme_files' in metadata: - for relative_path_to_readme_file in metadata[ 'readme_files' ]: - readme_file_name = os.path.split( relative_path_to_readme_file )[ 1 ] - full_path_to_readme_file = os.path.abspath( relative_path_to_readme_file ) - try: - f = open( full_path_to_readme_file, 'r' ) - text = f.read() - f.close() - readme_files_dict[ readme_file_name ] = translate_string( text, to_html=False ) - except Exception, e: - log.debug( "Error reading README file '%s' defined in metadata for repository '%s', revision '%s': %s" % \ - ( str( relative_path_to_readme_file ), str( repository_name ), str( changeset_revision ), str( e ) ) ) + if metadata and 'readme_files' in metadata: + for relative_path_to_readme_file in metadata[ 'readme_files' ]: + readme_file_name = os.path.split( relative_path_to_readme_file )[ 1 ] + full_path_to_readme_file = os.path.abspath( relative_path_to_readme_file ) + try: + f = open( full_path_to_readme_file, 'r' ) + text = f.read() + f.close() + readme_files_dict[ readme_file_name ] = translate_string( text, to_html=False ) + except Exception, e: + log.debug( "Error reading README file '%s' defined in metadata: %s" % ( str( relative_path_to_readme_file ), str( e ) ) ) return readme_files_dict -def build_repository_containers_for_galaxy( trans, toolshed_base_url, repository_name, repository_owner, changeset_revision, - readme_files_dict, repository_dependencies, tool_dependencies ): +def build_repository_containers_for_galaxy( trans, toolshed_base_url, repository_name, repository_owner, changeset_revision, repository, datatypes, + invalid_tools, missing_tool_dependencies, readme_files_dict, repository_dependencies, tool_dependencies, + valid_tools, workflows ): """Return a dictionary of containers for the received repository's dependencies and readme files for display during installation to Galaxy.""" containers_dict = dict( readme_files=None, repository_dependencies=None, tool_dependencies=None ) if readme_files_dict or repository_dependencies or tool_dependencies: @@ -91,9 +89,23 @@ tool_dependencies ) try: folder_id = 0 + # Datatypes container. + if datatypes: + folder_id, datatypes_root_folder = container_util.build_datatypes_folder( folder_id, datatypes ) + containers_dict[ 'datatypes' ] = datatypes_root_folder + # Invalid tools container. + if invalid_tools: + folder_id, invalid_tools_root_folder = container_util.build_invalid_tools_folder( folder_id, + invalid_tools, + changeset_revision, + repository=repository, + label='Invalid tools' ) + containers_dict[ 'invalid_tools' ] = invalid_tools_root_folder + # Readme files container. if readme_files_dict: folder_id, readme_files_root_folder = build_readme_files_folder( folder_id, readme_files_dict ) containers_dict[ 'readme_files' ] = readme_files_root_folder + # Repository dependencies container. if repository_dependencies: folder_id, repository_dependencies_root_folder = container_util.build_repository_dependencies_folder( toolshed_base_url=toolshed_base_url, repository_name=repository_name, @@ -102,9 +114,28 @@ folder_id=folder_id, repository_dependencies=repository_dependencies ) containers_dict[ 'repository_dependencies' ] = repository_dependencies_root_folder + # Tool dependencies container. if tool_dependencies: folder_id, tool_dependencies_root_folder = container_util.build_tool_dependencies_folder( folder_id, tool_dependencies, for_galaxy=True ) containers_dict[ 'tool_dependencies' ] = tool_dependencies_root_folder + # Missing tool dependencies container. + if missing_tool_dependencies: + folder_id, missing_tool_dependencies_root_folder = \ + container_util.build_tool_dependencies_folder( folder_id, missing_tool_dependencies, label='Missing tool dependencies', for_galaxy=True ) + containers_dict[ 'missing_tool_dependencies' ] = missing_tool_dependencies_root_folder + # Valid tools container. + if valid_tools: + folder_id, valid_tools_root_folder = container_util.build_tools_folder( folder_id, + valid_tools, + repository, + changeset_revision, + label='Valid tools', + description='click the name to inspect the tool metadata' ) + containers_dict[ 'valid_tools' ] = valid_tools_root_folder + # Workflows container. + if workflows: + folder_id, workflows_root_folder = container_util.build_workflows_folder( folder_id, workflows, repository_metadata, label='Workflows' ) + containers_dict[ 'workflows' ] = workflows_root_folder except Exception, e: log.debug( "Exception in build_repository_containers_for_galaxy: %s" % str( e ) ) finally: @@ -140,9 +171,10 @@ label='Invalid tools' ) containers_dict[ 'invalid_tools' ] = invalid_tools_root_folder # Readme files container. - readme_files_dict = build_readme_files_dict( repository_metadata ) - folder_id, readme_files_root_folder = container_util.build_readme_files_folder( folder_id, readme_files_dict ) - containers_dict[ 'readme_files' ] = readme_files_root_folder + if metadata and 'readme_files' in metadata: + readme_files_dict = build_readme_files_dict( metadata ) + folder_id, readme_files_root_folder = container_util.build_readme_files_folder( folder_id, readme_files_dict ) + containers_dict[ 'readme_files' ] = readme_files_root_folder # Repository dependencies container. toolshed_base_url = str( url_for( '/', qualified=True ) ).rstrip( '/' ) folder_id, repository_dependencies_root_folder = container_util.build_repository_dependencies_folder( toolshed_base_url=toolshed_base_url, @@ -177,6 +209,64 @@ finally: lock.release() return containers_dict +def build_repository_dependency_relationships( trans, repo_info_dicts, tool_shed_repositories ): + """ + Build relationships between installed tool shed repositories and other installed tool shed repositories upon which they depend. These + relationships are defined in the repository_dependencies entry for each dictionary in the received list of repo_info_dicts. Each of + these dictionaries is associated with a repository in the received tool_shed_repositories list. + """ + for repo_info_dict in repo_info_dicts: + for name, repo_info_tuple in repo_info_dict.items(): + description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, tool_dependencies = \ + get_repo_info_tuple_contents( repo_info_tuple ) + if repository_dependencies: + for key, val in repository_dependencies.items(): + if key in [ 'root_key', 'description' ]: + continue + dependent_repository = None + dependent_toolshed, dependent_name, dependent_owner, dependent_changeset_revision = container_util.get_components_from_key( key ) + for tsr in tool_shed_repositories: + # Get the the tool_shed_repository defined by name, owner and changeset_revision. This is the repository that will be + # dependent upon each of the tool shed repositories contained in val. + # TODO: Check tool_shed_repository.tool_shed as well when repository dependencies across tool sheds is supported. + if tsr.name == dependent_name and tsr.owner == dependent_owner and tsr.changeset_revision == dependent_changeset_revision: + dependent_repository = tsr + break + if dependent_repository is None: + # The dependent repository is not in the received list so look in the database. + dependent_repository = get_or_create_tool_shed_repository( trans, dependent_toolshed, dependent_name, dependent_owner, dependent_changeset_revision ) + # Process each repository_dependency defined for the current dependent repository. + for repository_dependency_components_list in val: + required_repository = None + rd_toolshed, rd_name, rd_owner, rd_changeset_revision = repository_dependency_components_list + # Get the the tool_shed_repository defined by rd_name, rd_owner and rd_changeset_revision. This is the repository that will be + # required by the current dependent_repository. + # TODO: Check tool_shed_repository.tool_shed as well when repository dependencies across tool sheds is supported. + for tsr in tool_shed_repositories: + if tsr.name == rd_name and tsr.owner == rd_owner and tsr.changeset_revision == rd_changeset_revision: + required_repository = tsr + break + if required_repository is None: + # The required repository is not in the received list so look in the database. + required_repository = get_or_create_tool_shed_repository( trans, rd_toolshed, rd_name, rd_owner, rd_changeset_revision ) + # Ensure there is a repository_dependency relationship between dependent_repository and required_repository. + rrda = None + for rd in dependent_repository.repository_dependencies: + if rd.repository_dependency.tool_shed_repository_id == required_repository.id: + rrda = rd + break + if not rrda: + # Make sure required_repository is in the repository_dependency table. + repository_dependency = get_repository_dependency_by_repository_id( trans, required_repository.id ) + if not repository_dependency: + repository_dependency = trans.model.RepositoryDependency( tool_shed_repository_id=required_repository.id ) + trans.sa_session.add( repository_dependency ) + trans.sa_session.flush() + # Build the relationship between the dependent_repository and the required_repository. + rrda = trans.model.RepositoryRepositoryDependencyAssociation( tool_shed_repository_id=dependent_repository.id, + repository_dependency_id=repository_dependency.id ) + trans.sa_session.add( rrda ) + trans.sa_session.flush() def build_repository_ids_select_field( trans, cntrller, name='repository_ids', multiple=True, display='checkboxes' ): """Method called from both Galaxy and the Tool Shed to generate the current list of repositories for resetting metadata.""" repositories_select_field = SelectField( name=name, multiple=multiple, display=display ) @@ -546,6 +636,60 @@ trans.sa_session.add( repository_metadata ) trans.sa_session.flush() return repository_metadata +def create_or_update_tool_shed_repository( app, name, description, installed_changeset_revision, ctx_rev, repository_clone_url, metadata_dict, + status, current_changeset_revision=None, owner='', dist_to_shed=False ): + # The received value for dist_to_shed will be True if the InstallManager is installing a repository that contains tools or datatypes that used + # to be in the Galaxy distribution, but have been moved to the main Galaxy tool shed. + if current_changeset_revision is None: + # The current_changeset_revision is not passed if a repository is being installed for the first time. If a previously installed repository + # was later uninstalled, this value should be received as the value of that change set to which the repository had been updated just prior + # to it being uninstalled. + current_changeset_revision = installed_changeset_revision + sa_session = app.model.context.current + tool_shed = get_tool_shed_from_clone_url( repository_clone_url ) + if not owner: + owner = get_repository_owner_from_clone_url( repository_clone_url ) + includes_datatypes = 'datatypes' in metadata_dict + if status in [ app.model.ToolShedRepository.installation_status.DEACTIVATED ]: + deleted = True + uninstalled = False + elif status in [ app.model.ToolShedRepository.installation_status.UNINSTALLED ]: + deleted = True + uninstalled = True + else: + deleted = False + uninstalled = False + tool_shed_repository = get_tool_shed_repository_by_shed_name_owner_installed_changeset_revision( app, + tool_shed, + name, + owner, + installed_changeset_revision ) + if tool_shed_repository: + tool_shed_repository.description = description + tool_shed_repository.changeset_revision = current_changeset_revision + tool_shed_repository.ctx_rev = ctx_rev + tool_shed_repository.metadata = metadata_dict + tool_shed_repository.includes_datatypes = includes_datatypes + tool_shed_repository.deleted = deleted + tool_shed_repository.uninstalled = uninstalled + tool_shed_repository.status = status + else: + tool_shed_repository = app.model.ToolShedRepository( tool_shed=tool_shed, + name=name, + description=description, + owner=owner, + installed_changeset_revision=installed_changeset_revision, + changeset_revision=current_changeset_revision, + ctx_rev=ctx_rev, + metadata=metadata_dict, + includes_datatypes=includes_datatypes, + dist_to_shed=dist_to_shed, + deleted=deleted, + uninstalled=uninstalled, + status=status ) + sa_session.add( tool_shed_repository ) + sa_session.flush() + return tool_shed_repository def create_repo_info_dict( trans, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_name=None, repository=None, repository_metadata=None, metadata=None, repository_dependencies=None ): """ @@ -585,6 +729,20 @@ repository_dependencies, metadata.get( 'tool_dependencies', None ) ) return repo_info_dict +def ensure_required_repositories_exist_for_reinstall( trans, repository_dependencies ): + """ + Inspect the received repository_dependencies dictionary and make sure tool_shed_repository objects exist in the database for each entry. These + tool_shed_repositories do not necessarily have to exist on disk, and if they do not, their status will be marked appropriately. They must exist + in the database in order for repository dependency relationships to be properly built. + """ + for key, val in repository_dependencies.items(): + if key in [ 'root_key', 'description' ]: + continue + tool_shed, name, owner, changeset_revision = container_util.get_components_from_key( key ) + repository = get_or_create_tool_shed_repository( trans, tool_shed, name, owner, changeset_revision ) + for repository_components_list in val: + tool_shed, name, owner, changeset_revision = repository_components_list + repository = get_or_create_tool_shed_repository( trans, tool_shed, name, owner, changeset_revision ) def generate_clone_url_for_installed_repository( app, repository ): """Generate the URL for cloning a repository that has been installed into a Galaxy instance.""" tool_shed_url = get_url_from_repository_tool_shed( app, repository ) @@ -765,9 +923,18 @@ # See if we have a repository dependencies defined. if name == 'repository_dependencies.xml': path_to_repository_dependencies_config = os.path.join( root, name ) - metadata_dict, error_message = generate_repository_dependency_metadata( app, path_to_repository_dependencies_config, metadata_dict ) - if error_message: - invalid_file_tups.append( ( name, error_message ) ) + if app.name == 'community': + metadata_dict, error_message = generate_repository_dependency_metadata_for_tool_shed( app, + path_to_repository_dependencies_config, + metadata_dict ) + if error_message: + invalid_file_tups.append( ( name, error_message ) ) + elif app.name == 'galaxy': + metadata_dict, error_message = generate_repository_dependency_metadata_for_installed_repository( app, + path_to_repository_dependencies_config, + metadata_dict ) + if error_message: + invalid_file_tups.append( ( name, error_message ) ) # See if we have one or more READ_ME files. elif name.lower() in readme_file_names: relative_path_to_readme = get_relative_path_to_repository_file( root, @@ -860,8 +1027,42 @@ if requirements_dict: tool_dependencies_dict[ dependency_key ] = requirements_dict return tool_dependencies_dict -def generate_repository_dependency_metadata( app, repository_dependencies_config, metadata_dict ): - """Generate a repository dependencies dictionary based on valid information defined in the received repository_dependencies_config.""" +def generate_repository_dependency_metadata_for_installed_repository( app, repository_dependencies_config, metadata_dict ): + """ + Generate a repository dependencies dictionary based on valid information defined in the received repository_dependencies_config. This method + is called only from Galaxy. + """ + repository_dependencies_tups = [] + error_message = '' + try: + # Make sure we're looking at a valid repository_dependencies.xml file. + tree = util.parse_xml( repository_dependencies_config ) + root = tree.getroot() + is_valid = root.tag == 'repositories' + except Exception, e: + error_message = "Error parsing %s, exception: %s" % ( repository_dependencies_config, str( e ) ) + log.debug( error_message ) + is_valid = False + if is_valid: + sa_session = app.model.context.current + for repository_elem in root.findall( 'repository' ): + toolshed = repository_elem.attrib[ 'toolshed' ] + name = repository_elem.attrib[ 'name' ] + owner = repository_elem.attrib[ 'owner'] + changeset_revision = repository_elem.attrib[ 'changeset_revision' ] + repository_dependencies_tup = ( toolshed, name, owner, changeset_revision ) + if repository_dependencies_tup not in repository_dependencies_tups: + repository_dependencies_tups.append( repository_dependencies_tup ) + if repository_dependencies_tups: + repository_dependencies_dict = dict( description=root.get( 'description' ), + repository_dependencies=repository_dependencies_tups ) + metadata_dict[ 'repository_dependencies' ] = repository_dependencies_dict + return metadata_dict, error_message +def generate_repository_dependency_metadata_for_tool_shed( app, repository_dependencies_config, metadata_dict ): + """ + Generate a repository dependencies dictionary based on valid information defined in the received repository_dependencies_config. This method + is called only from the tool shed. + """ repository_dependencies_tups = [] error_message = '' try: @@ -1111,6 +1312,12 @@ # quiet = True _ui.setconfig( 'ui', 'quiet', True ) return _ui +def get_ctx_rev( tool_shed_url, name, owner, changeset_revision ): + url = url_join( tool_shed_url, 'repository/get_ctx_rev?name=%s&owner=%s&changeset_revision=%s' % ( name, owner, changeset_revision ) ) + response = urllib2.urlopen( url ) + ctx_rev = response.read() + response.close() + return ctx_rev def get_ctx_file_path_from_manifest( filename, repo, changeset_revision ): """Get the ctx file path for the latest revision of filename from the repository manifest up to the value of changeset_revision.""" stripped_filename = strip_path( filename ) @@ -1223,6 +1430,25 @@ # We've found the changeset in the changelog for which we need to get the next downloadable changset. found_after_changeset_revision = True return None +def get_or_create_tool_shed_repository( trans, tool_shed, name, owner, changeset_revision ): + repository = get_repository_for_dependency_relationship( trans.app, tool_shed, name, owner, changeset_revision ) + if not repository: + tool_shed_url = get_url_from_tool_shed( trans.app, tool_shed ) + repository_clone_url = os.path.join( tool_shed_url, 'repos', owner, name ) + ctx_rev = get_ctx_rev( tool_shed_url, name, owner, installed_changeset_revision ) + print "Adding new row (or updating an existing row) for repository '%s' in the tool_shed_repository table." % name + repository = create_or_update_tool_shed_repository( app=self.app, + name=name, + description=None, + installed_changeset_revision=changeset_revision, + ctx_rev=ctx_rev, + repository_clone_url=repository_clone_url, + metadata_dict={}, + status=self.app.model.ToolShedRepository.installation_status.NEW, + current_changeset_revision=None, + owner=sowner, + dist_to_shed=False ) + return repository def get_ordered_downloadable_changeset_revisions( repository, repo ): """Return an ordered list of changeset_revisions defined by a repository changelog.""" changeset_tups = [] @@ -1366,6 +1592,23 @@ repository_dependency[ 1 ], repository_dependency[ 2 ], repository_dependency[ 3] ) +def get_repository_dependency_by_repository_id( trans, decoded_repository_id ): + return trans.sa_session.query( trans.model.RepositoryDependency ) \ + .filter( trans.model.RepositoryDependency.table.c.tool_shed_repository_id == decoded_repository_id ) \ + .first() +def get_repository_for_dependency_relationship( app, tool_shed, name, owner, changeset_revision ): + repository = get_tool_shed_repository_by_shed_name_owner_installed_changeset_revision( app=app, + tool_shed=tool_shed, + name=name, + owner=owner, + installed_changeset_revision=changeset_revision ) + if not repository: + repository = get_tool_shed_repository_by_shed_name_owner_changeset_revision( app=app, + tool_shed=tool_shed, + name=name, + owner=owner, + changeset_revision=changeset_revision ) + return repository def get_repository_file_contents( file_path ): if checkers.is_gzip( file_path ): safe_str = to_safe_string( '\ngzip compressed file\n' ) @@ -1512,6 +1755,30 @@ tool_path = shed_config_dict[ 'tool_path' ] relative_install_dir = partial_install_dir return shed_tool_conf, tool_path, relative_install_dir +def get_tool_shed_repository_by_shed_name_owner_changeset_revision( app, tool_shed, name, owner, changeset_revision ): + # This method is used only in Galaxy, not the tool shed. + sa_session = app.model.context.current + if tool_shed.find( '//' ) > 0: + tool_shed = tool_shed.split( '//' )[1] + tool_shed = tool_shed.rstrip( '/' ) + return sa_session.query( app.model.ToolShedRepository ) \ + .filter( and_( app.model.ToolShedRepository.table.c.tool_shed == tool_shed, + app.model.ToolShedRepository.table.c.name == name, + app.model.ToolShedRepository.table.c.owner == owner, + app.model.ToolShedRepository.table.c.changeset_revision == changeset_revision ) ) \ + .first() +def get_tool_shed_repository_by_shed_name_owner_installed_changeset_revision( app, tool_shed, name, owner, installed_changeset_revision ): + # This method is used only in Galaxy, not the tool shed. + sa_session = app.model.context.current + if tool_shed.find( '//' ) > 0: + tool_shed = tool_shed.split( '//' )[1] + tool_shed = tool_shed.rstrip( '/' ) + return sa_session.query( app.model.ToolShedRepository ) \ + .filter( and_( app.model.ToolShedRepository.table.c.tool_shed == tool_shed, + app.model.ToolShedRepository.table.c.name == name, + app.model.ToolShedRepository.table.c.owner == owner, + app.model.ToolShedRepository.table.c.installed_changeset_revision == installed_changeset_revision ) ) \ + .first() def get_tool_shed_from_clone_url( repository_clone_url ): tmp_url = clean_repository_clone_url( repository_clone_url ) return tmp_url.split( 'repos' )[ 0 ].rstrip( '/' ) @@ -1557,6 +1824,16 @@ return shed_url # The tool shed from which the repository was originally installed must no longer be configured in tool_sheds_conf.xml. return None +def get_url_from_tool_shed( app, tool_shed ): + # The value of tool_shed is something like: toolshed.g2.bx.psu.edu. We need the URL to this tool shed, which is something like: + # http://toolshed.g2.bx.psu.edu/ + for shed_name, shed_url in app.tool_shed_registry.tool_sheds.items(): + if shed_url.find( tool_shed ) >= 0: + if shed_url.endswith( '/' ): + shed_url = shed_url.rstrip( '/' ) + return shed_url + # The tool shed from which the repository was originally installed must no longer be configured in tool_sheds_conf.xml. + return None def get_user_by_username( trans, username ): """Get a user from the database by username""" return trans.sa_session.query( trans.model.User ) \ diff -r 075ff805a33a830244c4647fc9cefec663eff2b4 -r 759d96f950b8825bcce157cdf5efbabaa0221b11 lib/galaxy/webapps/community/app.py --- a/lib/galaxy/webapps/community/app.py +++ b/lib/galaxy/webapps/community/app.py @@ -12,6 +12,7 @@ """Encapsulates the state of a Universe application""" def __init__( self, **kwargs ): print >> sys.stderr, "python path is: " + ", ".join( sys.path ) + self.name = "community" # Read config file and check for errors self.config = config.Configuration( **kwargs ) self.config.check() diff -r 075ff805a33a830244c4647fc9cefec663eff2b4 -r 759d96f950b8825bcce157cdf5efbabaa0221b11 lib/galaxy/webapps/community/controllers/repository.py --- a/lib/galaxy/webapps/community/controllers/repository.py +++ b/lib/galaxy/webapps/community/controllers/repository.py @@ -1338,7 +1338,7 @@ changeset_revision = kwd[ 'changeset_revision' ] repository = suc.get_repository_by_name_and_owner( trans, repository_name, repository_owner ) repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans, trans.security.encode_id( repository.id ), changeset_revision ) - return suc.build_readme_files_dict( repository_metadata ) + return suc.build_readme_files_dict( repository_metadata.metadata ) @web.json def get_repository_dependencies( self, trans, **kwd ): """Return an encoded dictionary of all repositories upon which the contents of the received repository depends.""" diff -r 075ff805a33a830244c4647fc9cefec663eff2b4 -r 759d96f950b8825bcce157cdf5efbabaa0221b11 lib/galaxy/webapps/community/util/container_util.py --- a/lib/galaxy/webapps/community/util/container_util.py +++ b/lib/galaxy/webapps/community/util/container_util.py @@ -218,7 +218,7 @@ else: repository_dependencies_root_folder = None return folder_id, repository_dependencies_root_folder -def build_tools_folder( folder_id, tool_dicts, repository, changeset_revision, valid=True, label='Valid tools' ): +def build_tools_folder( folder_id, tool_dicts, repository, changeset_revision, valid=True, label='Valid tools', description=None ): """Return a folder hierarchy containing valid tools.""" if tool_dicts: tool_id = 0 @@ -226,6 +226,8 @@ tools_root_folder = Folder( id=folder_id, key='root', label='root', parent=None ) folder_id += 1 folder = Folder( id=folder_id, key='tools', label=label, parent=tools_root_folder ) + if description: + folder.description = description tools_root_folder.folders.append( folder ) # Insert a header row. tool_id += 1 @@ -239,6 +241,10 @@ repository_id='', changeset_revision='' ) folder.valid_tools.append( tool ) + if repository: + repository_id = repository.id + else: + repository_id = '' for tool_dict in tool_dicts: tool_id += 1 if 'requirements' in tool_dict: @@ -256,7 +262,7 @@ description=tool_dict[ 'description' ], version=tool_dict[ 'version' ], requirements=requirements_str, - repository_id=repository.id, + repository_id=repository_id, changeset_revision=changeset_revision ) folder.valid_tools.append( tool ) else: diff -r 075ff805a33a830244c4647fc9cefec663eff2b4 -r 759d96f950b8825bcce157cdf5efbabaa0221b11 lib/galaxy/webapps/demo_sequencer/app.py --- a/lib/galaxy/webapps/demo_sequencer/app.py +++ b/lib/galaxy/webapps/demo_sequencer/app.py @@ -6,6 +6,7 @@ """Encapsulates the state of a Universe application""" def __init__( self, **kwargs ): print >> sys.stderr, "python path is: " + ", ".join( sys.path ) + self.name = "demo_sequencer" # Read config file and check for errors self.config = config.Configuration( **kwargs ) self.config.check() diff -r 075ff805a33a830244c4647fc9cefec663eff2b4 -r 759d96f950b8825bcce157cdf5efbabaa0221b11 lib/galaxy/webapps/demo_sequencer/buildapp.py --- a/lib/galaxy/webapps/demo_sequencer/buildapp.py +++ b/lib/galaxy/webapps/demo_sequencer/buildapp.py @@ -55,7 +55,7 @@ sys.exit( 1 ) atexit.register( app.shutdown ) # Create the universe WSGI application - webapp = galaxy.webapps.demo_sequencer.framework.WebApplication( app, session_cookie='galaxydemo_sequencersession' ) + webapp = galaxy.webapps.demo_sequencer.framework.WebApplication( app, session_cookie='galaxydemo_sequencersession', name="demo_sequencer" ) add_ui_controllers( webapp, app ) # These two routes handle our simple needs at the moment webapp.add_route( '/:controller/:action', action='index' ) diff -r 075ff805a33a830244c4647fc9cefec663eff2b4 -r 759d96f950b8825bcce157cdf5efbabaa0221b11 lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py --- a/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py +++ b/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py @@ -547,20 +547,6 @@ galaxy_url = url_for( '/', qualified=True ) url = suc.url_join( tool_shed_url, 'repository/find_workflows?galaxy_url=%s' % galaxy_url ) return trans.response.send_redirect( url ) - def generate_tool_path( self, repository_clone_url, changeset_revision ): - """ - Generate a tool path that guarantees repositories with the same name will always be installed - in different directories. The tool path will be of the form: - <tool shed url>/repos/<repository owner>/<repository name>/<installed changeset revision> - http://test@bx.psu.edu:9009/repos/test/filter - """ - tmp_url = suc.clean_repository_clone_url( repository_clone_url ) - # Now tmp_url is something like: bx.psu.edu:9009/repos/some_username/column - items = tmp_url.split( 'repos' ) - tool_shed_url = items[ 0 ] - repo_path = items[ 1 ] - tool_shed_url = suc.clean_tool_shed_url( tool_shed_url ) - return suc.url_join( tool_shed_url, 'repos', repo_path, changeset_revision ) @web.json @web.require_admin def get_file_contents( self, trans, file_path ): @@ -585,59 +571,11 @@ raw_text = response.read() response.close() if len( raw_text ) > 2: - text = json.from_json_string( encoding_util.tool_shed_decode( raw_text ) ) - log.debug( text ) + encoded_text = from_json_string( raw_text ) + text = encoding_util.tool_shed_decode( encoded_text ) else: text = '' return text - def get_required_repo_info_dicts( self, tool_shed_url, repo_info_dicts ): - """ - Inspect the list of repo_info_dicts for repository dependencies and append a repo_info_dict for each of them to the list. All - repository_dependencies entries in each of the received repo_info_dicts includes all required repositories, so only one pass through - this methid is required to retrieve all repository dependencies. - """ - if repo_info_dicts: - all_repo_info_dicts = [ rid for rid in repo_info_dicts ] - # We'll send tuples of ( tool_shed, repository_name, repository_owner, changeset_revision ) to the tool shed to discover repository ids. - required_repository_tups = [] - for repo_info_dict in repo_info_dicts: - for repository_name, repo_info_tup in repo_info_dict.items(): - description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, tool_dependencies = \ - suc.get_repo_info_tuple_contents( repo_info_tup ) - if repository_dependencies: - for key, val in repository_dependencies.items(): - if key in [ 'root_key', 'description' ]: - continue - toolshed, name, owner, changeset_revision = container_util.get_components_from_key( key ) - components_list = [ toolshed, name, owner, changeset_revision ] - if components_list not in required_repository_tups: - required_repository_tups.append( components_list ) - for components_list in val: - if components_list not in required_repository_tups: - required_repository_tups.append( components_list ) - if required_repository_tups: - # The value of required_repository_tups is a list of tuples, so we need to encode it. - encoded_required_repository_tups = [] - for required_repository_tup in required_repository_tups: - encoded_required_repository_tups.append( encoding_util.encoding_sep.join( required_repository_tup ) ) - encoded_required_repository_str = encoding_util.encoding_sep2.join( encoded_required_repository_tups ) - encoded_required_repository_str = encoding_util.tool_shed_encode( encoded_required_repository_str ) - url = suc.url_join( tool_shed_url, '/repository/get_required_repo_info_dict?encoded_str=%s' % encoded_required_repository_str ) - response = urllib2.urlopen( url ) - text = response.read() - response.close() - if text: - required_repo_info_dict = from_json_string( text ) - required_repo_info_dicts = [] - encoded_dict_strings = required_repo_info_dict[ 'repo_info_dicts' ] - for encoded_dict_str in encoded_dict_strings: - decoded_dict = encoding_util.tool_shed_decode( encoded_dict_str ) - required_repo_info_dicts.append( decoded_dict ) - if required_repo_info_dicts: - for required_repo_info_dict in required_repo_info_dicts: - if required_repo_info_dict not in all_repo_info_dicts: - all_repo_info_dicts.append( required_repo_info_dict ) - return all_repo_info_dicts def get_versions_of_tool( self, app, guid ): tool_version = shed_util.get_tool_version( app, guid ) return tool_version.get_version_ids( app, reverse=True ) @@ -748,7 +686,7 @@ shed_util.update_tool_shed_repository_status( trans.app, tool_shed_repository, trans.model.ToolShedRepository.installation_status.CLONING ) repo_info_tuple = repo_info_dict[ tool_shed_repository.name ] description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, tool_dependencies = repo_info_tuple - relative_clone_dir = self.generate_tool_path( repository_clone_url, tool_shed_repository.installed_changeset_revision ) + relative_clone_dir = shed_util.generate_tool_path( repository_clone_url, tool_shed_repository.installed_changeset_revision ) clone_dir = os.path.join( tool_path, relative_clone_dir ) relative_install_dir = os.path.join( relative_clone_dir, tool_shed_repository.name ) install_dir = os.path.join( tool_path, relative_install_dir ) @@ -908,13 +846,13 @@ repository = suc.get_installed_tool_shed_repository( trans, repository_id ) if not repository: return trans.show_error_message( 'Invalid repository specified.' ) + tool_shed_url = suc.get_url_from_repository_tool_shed( trans.app, repository ) if repository.status in [ trans.model.ToolShedRepository.installation_status.CLONING ]: return trans.response.send_redirect( web.url_for( controller='admin_toolshed', action='monitor_repository_installation', **kwd ) ) if repository.can_install and operation == 'install': # Send a request to the tool shed to install the repository. - tool_shed_url = suc.get_url_from_repository_tool_shed( trans.app, repository ) url = suc.url_join( tool_shed_url, 'repository/install_repositories_by_revision?name=%s&owner=%s&changeset_revisions=%s&galaxy_url=%s' % \ ( repository.name, repository.owner, repository.installed_changeset_revision, ( url_for( '/', qualified=True ) ) ) ) @@ -939,10 +877,50 @@ trans.sa_session.add( repository ) trans.sa_session.flush() message = "The repository information has been updated." + metadata = repository.metadata + datatypes = metadata.get( 'datatypes', None ) + invalid_tools = metadata.get( 'invalid_tools', None ) + if repository.has_readme_files: + readme_files_dict = suc.build_readme_files_dict( repository.metadata ) + else: + readme_files_dict = None + repository_dependencies = metadata.get( 'repository_dependencies', None ) + repository_dependencies_dict_for_display = {} + if repository_dependencies: + # We need to add a root_key entry to the repository_dependencies dictionary since it will not be included in the installed tool + # shed repository metadata. + root_key = container_util.generate_repository_dependencies_key_for_repository( repository.tool_shed, + repository.name, + repository.owner, + repository.installed_changeset_revision ) + rd_tups_for_display = [] + rd_tups = repository_dependencies[ 'repository_dependencies' ] + repository_dependencies_dict_for_display[ 'root_key' ] = root_key + repository_dependencies_dict_for_display[ root_key ] = rd_tups + repository_dependencies_dict_for_display[ 'description' ] = repository_dependencies[ 'description' ] + all_tool_dependencies = metadata.get( 'tool_dependencies', None ) + tool_dependencies, missing_tool_dependencies = shed_util.get_installed_and_missing_tool_dependencies( trans, repository, all_tool_dependencies ) + valid_tools = metadata.get( 'tools', None ) + workflows = metadata.get( 'workflows', None ) + containers_dict = suc.build_repository_containers_for_galaxy( trans=trans, + toolshed_base_url=tool_shed_url, + repository_name=repository.name, + repository_owner=repository.owner, + changeset_revision=repository.installed_changeset_revision, + repository=repository, + datatypes=datatypes, + invalid_tools=invalid_tools, + missing_tool_dependencies=missing_tool_dependencies, + readme_files_dict=readme_files_dict, + repository_dependencies=repository_dependencies_dict_for_display, + tool_dependencies=tool_dependencies, + valid_tools=valid_tools, + workflows=workflows ) return trans.fill_template( '/admin/tool_shed_repository/manage_repository.mako', repository=repository, description=description, repo_files_dir=repo_files_dir, + containers_dict=containers_dict, message=message, status=status ) @web.expose @@ -1153,8 +1131,8 @@ shed_tool_conf = kwd[ 'shed_tool_conf' ] else: install_tool_dependencies = False - # If installing a repository that includes no tools, get the relative tool_path from the file to which the - # migrated_tools_config setting points. + # If installing a repository that includes no tools, get the relative tool_path from the file to which the migrated_tools_config + # setting points. shed_tool_conf = trans.app.config.migrated_tools_config # Get the tool path by searching the list of shed_tool_confs for the dictionary that contains the information about shed_tool_conf. for shed_tool_conf_dict in trans.app.toolbox.shed_tool_confs: @@ -1167,61 +1145,20 @@ if file_name == shed_tool_conf: tool_path = shed_tool_conf_dict[ 'tool_path' ] break - # Make sure all tool_shed_repository records exist. - created_or_updated_tool_shed_repositories = [] - # Repositories will be filtered (e.g., if already installed, etc), so filter the associated repo_info_dicts accordingly. - filtered_repo_info_dicts = [] - # Disciver all repository dependencies and retrieve information for installing them. - repo_info_dicts = self.get_required_repo_info_dicts( tool_shed_url, repo_info_dicts ) - for repo_info_dict in repo_info_dicts: - for name, repo_info_tuple in repo_info_dict.items(): - description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, tool_dependencies = \ - suc.get_repo_info_tuple_contents( repo_info_tuple ) - clone_dir = os.path.join( tool_path, self.generate_tool_path( repository_clone_url, changeset_revision ) ) - relative_install_dir = os.path.join( clone_dir, name ) - # Make sure the repository was not already installed. - installed_tool_shed_repository, installed_changeset_revision = self.repository_was_previously_installed( trans, - tool_shed_url, - name, - repo_info_tuple, - clone_dir ) - if installed_tool_shed_repository: - message += "Revision <b>%s</b> of tool shed repository <b>%s</b> owned by <b>%s</b> " % ( changeset_revision, name, repository_owner ) - if installed_changeset_revision != changeset_revision: - message += "was previously installed using changeset revision <b>%s</b>. " % installed_changeset_revision - else: - message += "was previously installed. " - if installed_tool_shed_repository.uninstalled: - message += "The repository has been uninstalled, however, so reinstall the original repository instead of installing it again. " - elif installed_tool_shed_repository.deleted: - message += "The repository has been deactivated, however, so activate the original repository instead of installing it again. " - if installed_changeset_revision != changeset_revision: - message += "You can get the latest updates for the repository using the <b>Get updates</b> option from the repository's " - message += "<b>Repository Actions</b> pop-up menu. " - message+= 'Click <a href="%s">here</a> to manage the repository. ' % \ - ( web.url_for( controller='admin_toolshed', action='manage_repository', id=trans.security.encode_id( installed_tool_shed_repository.id ) ) ) - status = 'error' - if len( repo_info_dicts ) == 1: - new_kwd = dict( message=message, status=status ) - return trans.response.send_redirect( web.url_for( controller='admin_toolshed', - action='browse_repositories', - **new_kwd ) ) - else: - log.debug( "Adding new row (or updating an existing row) for repository '%s' in the tool_shed_repository table." % name ) - tool_shed_repository = shed_util.create_or_update_tool_shed_repository( app=trans.app, - name=name, - description=description, - installed_changeset_revision=changeset_revision, - ctx_rev=ctx_rev, - repository_clone_url=repository_clone_url, - metadata_dict={}, - status=trans.model.ToolShedRepository.installation_status.NEW, - current_changeset_revision=changeset_revision, - owner=repository_owner, - dist_to_shed=False ) - created_or_updated_tool_shed_repositories.append( tool_shed_repository ) - filtered_repo_info_dicts.append( encoding_util.tool_shed_encode( repo_info_dict ) ) + created_or_updated_tool_shed_repositories, repo_info_dicts, filtered_repo_info_dicts, message = \ + shed_util.create_repository_dependency_objects( trans, tool_path, tool_shed_url, repo_info_dicts, reinstalling=False ) + if message and len( repo_info_dicts ) == 1: + message+= 'Click <a href="%s">here</a> to manage the repository. ' % \ + ( web.url_for( controller='admin_toolshed', action='manage_repository', id=trans.security.encode_id( installed_tool_shed_repository.id ) ) ) + return trans.response.send_redirect( web.url_for( controller='admin_toolshed', + action='browse_repositories', + message=message, + status='error' ) ) if created_or_updated_tool_shed_repositories: + if install_repository_dependencies: + # Build repository dependency relationships. + suc.build_repository_dependency_relationships( trans, repo_info_dicts, created_or_updated_tool_shed_repositories ) + # Handle contained tools. if includes_tools and ( new_tool_panel_section or tool_panel_section ): if new_tool_panel_section: section_id = new_tool_panel_section.lower().replace( ' ', '_' ) @@ -1286,11 +1223,8 @@ repo_info_dict = repo_info_dicts[ 0 ] name = repo_info_dict.keys()[ 0 ] repo_info_tuple = repo_info_dict[ name ] - if len( repo_info_tuple ) == 6: - description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, tool_dependencies = repo_info_tuple - repository_dependencies = None - elif len( repo_info_tuple ) == 7: - description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, tool_dependencies = repo_info_tuple + description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, tool_dependencies = \ + suc.get_repo_info_tuple_contents( repo_info_tuple ) url = suc.url_join( tool_shed_url, 'repository/get_readme_files?name=%s&owner=%s&changeset_revision=%s' % \ ( name, repository_owner, changeset_revision ) ) @@ -1303,9 +1237,15 @@ repository_name=name, repository_owner=repository_owner, changeset_revision=changeset_revision, + repository=None, + datatypes=None, + invalid_tools=None, + missing_tool_dependencies=None, readme_files_dict=readme_files_dict, repository_dependencies=repository_dependencies, - tool_dependencies=tool_dependencies ) + tool_dependencies=tool_dependencies, + valid_tools=None, + workflows=None ) else: containers_dict = dict( readme_files_dict=None, repository_dependencies=None, tool_dependencies=None ) # Handle tool dependencies chack box. @@ -1338,18 +1278,23 @@ @web.expose @web.require_admin def reinstall_repository( self, trans, **kwd ): + """ + Reinstall a tool shed repository that has been previously uninstalled, making sure to handle all repository and tool dependencies of the + repository. + """ message = kwd.get( 'message', '' ) status = kwd.get( 'status', 'done' ) repository_id = kwd[ 'id' ] tool_shed_repository = suc.get_installed_tool_shed_repository( trans, repository_id ) no_changes = kwd.get( 'no_changes', '' ) no_changes_checked = CheckboxField.is_checked( no_changes ) + install_repository_dependencies = CheckboxField.is_checked( kwd.get( 'install_repository_dependencies', '' ) ) install_tool_dependencies = CheckboxField.is_checked( kwd.get( 'install_tool_dependencies', '' ) ) new_tool_panel_section = kwd.get( 'new_tool_panel_section', '' ) tool_panel_section = kwd.get( 'tool_panel_section', '' ) shed_tool_conf, tool_path, relative_install_dir = suc.get_tool_panel_config_tool_path_install_dir( trans.app, tool_shed_repository ) repository_clone_url = suc.generate_clone_url_for_installed_repository( trans.app, tool_shed_repository ) - clone_dir = os.path.join( tool_path, self.generate_tool_path( repository_clone_url, tool_shed_repository.installed_changeset_revision ) ) + clone_dir = os.path.join( tool_path, shed_util.generate_tool_path( repository_clone_url, tool_shed_repository.installed_changeset_revision ) ) relative_install_dir = os.path.join( clone_dir, tool_shed_repository.name ) tool_shed_url = suc.get_url_from_repository_tool_shed( trans.app, tool_shed_repository ) tool_section = None @@ -1406,28 +1351,34 @@ else: tool_section = None # The repository's status must be updated from 'Uninstall' to 'New' when initiating reinstall so the repository_installation_updater will function. - tool_shed_repository = shed_util.create_or_update_tool_shed_repository( trans.app, - tool_shed_repository.name, - tool_shed_repository.description, - tool_shed_repository.installed_changeset_revision, - tool_shed_repository.ctx_rev, - repository_clone_url, - tool_shed_repository.metadata, - trans.model.ToolShedRepository.installation_status.NEW, - tool_shed_repository.installed_changeset_revision, - tool_shed_repository.owner, - tool_shed_repository.dist_to_shed ) - ctx_rev = shed_util.get_ctx_rev( tool_shed_url, tool_shed_repository.name, tool_shed_repository.owner, tool_shed_repository.installed_changeset_revision ) + tool_shed_repository = suc.create_or_update_tool_shed_repository( trans.app, + tool_shed_repository.name, + tool_shed_repository.description, + tool_shed_repository.installed_changeset_revision, + tool_shed_repository.ctx_rev, + repository_clone_url, + tool_shed_repository.metadata, + trans.model.ToolShedRepository.installation_status.NEW, + tool_shed_repository.installed_changeset_revision, + tool_shed_repository.owner, + tool_shed_repository.dist_to_shed ) + ctx_rev = suc.get_ctx_rev( tool_shed_url, tool_shed_repository.name, tool_shed_repository.owner, tool_shed_repository.installed_changeset_revision ) + repo_info_dicts = [] repo_info_dict = kwd.get( 'repo_info_dict', None ) - # The repo_info_dict should be encoded. - if not repo_info_dict: + if repo_info_dict: + # The repo_info_dict should be encoded. + repo_info_dict = encoding_util.tool_shed_decode( repo_info_dict ) + else: # Entering this if block used to happen only if the tool_shed_repository does not include any valid tools. After repository dependencies - # were introduced, it may never happen, but will keep the block just in case. - repository_dependencies = self.get_repository_dependencies( trans=trans, - repository_id=repository_id, - repository_name=tool_shed_repository.name, - repository_owner=tool_shed_repository.owner, - changeset_revision=tool_shed_repository.installed_changeset_revision ) + # were introduced, it may never happen, but we'll keep the block just in case. + if install_repository_dependencies: + repository_dependencies = self.get_repository_dependencies( trans=trans, + repository_id=repository_id, + repository_name=tool_shed_repository.name, + repository_owner=tool_shed_repository.owner, + changeset_revision=tool_shed_repository.installed_changeset_revision ) + else: + repository_dependencies = None repo = hg.repository( suc.get_configured_ui(), path=os.path.abspath( tool_shed_repository.repo_path( trans.app ) ) ) repo_info_dict = suc.create_repo_info_dict( trans=trans, repository_clone_url=repository_clone_url, @@ -1440,6 +1391,16 @@ metadata=metadata, repository_dependencies=repository_dependencies ) repo_info_dict = encoding_util.tool_shed_encode( repo_info_dict ) + repo_info_dicts.append( repo_info_dict ) + # Make sure all tool_shed_repository records exist. + created_or_updated_tool_shed_repositories = [ tool_shed_repository ] + if install_repository_dependencies: + created_or_updated_tool_shed_repositories, repo_info_dicts, filtered_repo_info_dicts = \ + shed_util.create_repository_dependency_objects( trans, tool_path, tool_shed_url, repo_info_dicts, reinstalling=True ) + if len( created_or_updated_tool_shed_repositories ) > 1: + # Build repository dependency relationships. + suc.build_repository_dependency_relationships( trans, filtered_repo_info_dicts, created_or_updated_tool_shed_repositories ) + encoded_repository_ids = [ trans.security.encode_id( r.id ) for r in created_or_updated_tool_shed_repositories ] new_kwd = dict( includes_tool_dependencies=tool_shed_repository.includes_tool_dependencies, includes_tools=tool_shed_repository.includes_tools, install_tool_dependencies=install_tool_dependencies, @@ -1451,12 +1412,12 @@ tool_panel_section=tool_panel_section, tool_path=tool_path, tool_panel_section_key=tool_panel_section_key, - tool_shed_repository_ids=[ repository_id ], + tool_shed_repository_ids=encoded_repository_ids, tool_shed_url=tool_shed_url ) encoded_kwd = encoding_util.tool_shed_encode( new_kwd ) return trans.response.send_redirect( web.url_for( controller='admin_toolshed', action='initiate_repository_installation', - shed_repository_ids=repository_id, + shed_repository_ids=encoded_repository_ids, encoded_kwd=encoded_kwd, reinstalling=True ) ) @web.json @@ -1481,48 +1442,23 @@ return rval @web.expose @web.require_admin - def repository_was_previously_installed( self, trans, tool_shed_url, repository_name, repo_info_tuple, clone_dir ): - # Handle case where the repository was previously installed using an older changeset_revsion, but later the repository was updated - # in the tool shed and now we're trying to install the latest changeset revision of the same repository instead of updating the one - # that was previously installed. We'll look in the database instead of on disk since the repository may be uninstalled. - description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, tool_dependencies = \ - suc.get_repo_info_tuple_contents( repo_info_tuple ) - tool_shed = suc.get_tool_shed_from_clone_url( repository_clone_url ) - # Get all previous change set revisions from the tool shed for the repository back to, but excluding, the previous valid changeset - # revision to see if it was previously installed using one of them. - url = suc.url_join( tool_shed_url, - 'repository/previous_changeset_revisions?galaxy_url=%s&name=%s&owner=%s&changeset_revision=%s' % \ - ( url_for( '/', qualified=True ), repository_name, repository_owner, changeset_revision ) ) - response = urllib2.urlopen( url ) - text = response.read() - response.close() - if text: - #clone_path, clone_directory = os.path.split( clone_dir ) - changeset_revisions = util.listify( text ) - for previous_changeset_revision in changeset_revisions: - tool_shed_repository = shed_util.get_tool_shed_repository_by_shed_name_owner_installed_changeset_revision( trans.app, - tool_shed, - repository_name, - repository_owner, - previous_changeset_revision ) - if tool_shed_repository and tool_shed_repository.status not in [ trans.model.ToolShedRepository.installation_status.NEW ]: - return tool_shed_repository, previous_changeset_revision - return None, None - @web.expose - @web.require_admin def reselect_tool_panel_section( self, trans, **kwd ): message = '' repository_id = kwd[ 'id' ] tool_shed_repository = suc.get_installed_tool_shed_repository( trans, repository_id ) metadata = tool_shed_repository.metadata tool_shed_url = suc.get_url_from_repository_tool_shed( trans.app, tool_shed_repository ) - ctx_rev = shed_util.get_ctx_rev( tool_shed_url, tool_shed_repository.name, tool_shed_repository.owner, tool_shed_repository.installed_changeset_revision ) + ctx_rev = suc.get_ctx_rev( tool_shed_url, tool_shed_repository.name, tool_shed_repository.owner, tool_shed_repository.installed_changeset_revision ) repository_clone_url = suc.generate_clone_url_for_installed_repository( trans.app, tool_shed_repository ) repository_dependencies = self.get_repository_dependencies( trans=trans, repository_id=repository_id, repository_name=tool_shed_repository.name, repository_owner=tool_shed_repository.owner, changeset_revision=tool_shed_repository.installed_changeset_revision ) + if repository_dependencies: + includes_repository_dependencies = True + else: + includes_repository_dependencies = False repo_info_dict = suc.create_repo_info_dict( trans=trans, repository_clone_url=repository_clone_url, changeset_revision=tool_shed_repository.installed_changeset_revision, @@ -1561,24 +1497,51 @@ message += "The tools contained in your <b>%s</b> repository were last loaded into the tool panel outside of any sections. " % tool_shed_repository.name message += "Uncheck the <b>No changes</b> check box and select a tool panel section to load the tools into that section. " status = 'warning' - if metadata and 'readme_files' in metadata: - url = suc.url_join( tool_shed_url, - 'repository/get_readme_files?name=%s&owner=%s&changeset_revision=%s' % \ - ( tool_shed_repository.name, tool_shed_repository.owner, tool_shed_repository.installed_changeset_revision ) ) - response = urllib2.urlopen( url ) - raw_text = response.read() - response.close() - readme_files_dict = from_json_string( raw_text ) - tool_dependencies = metadata.get( 'tool_dependencies', None ) + if metadata: + datatypes = metadata.get( 'datatypes', None ) + invalid_tools = metadata.get( 'invalid_tools', None ) + if tool_shed_repository.has_readme_files: + url = suc.url_join( tool_shed_url, + 'repository/get_readme_files?name=%s&owner=%s&changeset_revision=%s' % \ + ( tool_shed_repository.name, tool_shed_repository.owner, tool_shed_repository.installed_changeset_revision ) ) + response = urllib2.urlopen( url ) + raw_text = response.read() + response.close() + readme_files_dict = from_json_string( raw_text ) + else: + readme_files_dict = None repository_dependencies = metadata.get( 'repository_dependencies', None ) + repository_dependencies_dict_for_display = {} + if repository_dependencies: + # We need to add a root_key entry to the repository_dependencies dictionary since it will not be included in the installed tool + # shed repository metadata. + root_key = container_util.generate_repository_dependencies_key_for_repository( repository.tool_shed, + repository.name, + repository.owner, + repository.installed_changeset_revision ) + rd_tups_for_display = [] + rd_tups = repository_dependencies[ 'repository_dependencies' ] + repository_dependencies_dict_for_display[ 'root_key' ] = root_key + repository_dependencies_dict_for_display[ root_key ] = rd_tups + repository_dependencies_dict_for_display[ 'description' ] = repository_dependencies[ 'description' ] + all_tool_dependencies = metadata.get( 'tool_dependencies', None ) + tool_dependencies, missing_tool_dependencies = shed_util.get_installed_and_missing_tool_dependencies( trans, repository, all_tool_dependencies ) + valid_tools = metadata.get( 'tools', None ) + workflows = metadata.get( 'workflows', None ) containers_dict = suc.build_repository_containers_for_galaxy( trans=trans, toolshed_base_url=tool_shed_url, repository_name=tool_shed_repository.name, repository_owner=tool_shed_repository.owner, changeset_revision=tool_shed_repository.installed_changeset_revision, + repository=tool_shed_repository, + datatypes=datatypes, + invalid_tools=invalid_tools, + missing_tool_dependencies=missing_tool_dependencies, readme_files_dict=readme_files_dict, repository_dependencies=repository_dependencies, - tool_dependencies=tool_dependencies ) + tool_dependencies=tool_dependencies, + valid_tools=valid_tools, + workflows=workflows ) else: containers_dict = dict( readme_files_dict=None, repository_dependencies=None, tool_dependencies=None ) # Handle repository dependencies check box. @@ -1597,6 +1560,8 @@ repository=tool_shed_repository, no_changes_check_box=no_changes_check_box, original_section_name=original_section_name, + includes_tool_dependencies=tool_shed_repository.includes_tool_dependencies, + includes_repository_dependencies=includes_repository_dependencies, install_repository_dependencies_check_box=install_repository_dependencies_check_box, install_tool_dependencies_check_box=install_tool_dependencies_check_box, containers_dict=containers_dict, @@ -1797,7 +1762,7 @@ changeset_revision = params.get( 'changeset_revision', None ) latest_changeset_revision = params.get( 'latest_changeset_revision', None ) latest_ctx_rev = params.get( 'latest_ctx_rev', None ) - repository = shed_util.get_tool_shed_repository_by_shed_name_owner_changeset_revision( trans.app, tool_shed_url, name, owner, changeset_revision ) + repository = suc.get_tool_shed_repository_by_shed_name_owner_changeset_revision( trans.app, tool_shed_url, name, owner, changeset_revision ) if changeset_revision and latest_changeset_revision and latest_ctx_rev: if changeset_revision == latest_changeset_revision: message = "The installed repository named '%s' is current, there are no updates available. " % name diff -r 075ff805a33a830244c4647fc9cefec663eff2b4 -r 759d96f950b8825bcce157cdf5efbabaa0221b11 lib/galaxy/webapps/reports/app.py --- a/lib/galaxy/webapps/reports/app.py +++ b/lib/galaxy/webapps/reports/app.py @@ -6,6 +6,7 @@ """Encapsulates the state of a Universe application""" def __init__( self, **kwargs ): print >> sys.stderr, "python path is: " + ", ".join( sys.path ) + self.name = "reports" # Read config file and check for errors self.config = config.Configuration( **kwargs ) self.config.check() diff -r 075ff805a33a830244c4647fc9cefec663eff2b4 -r 759d96f950b8825bcce157cdf5efbabaa0221b11 templates/admin/tool_shed_repository/common.mako --- a/templates/admin/tool_shed_repository/common.mako +++ b/templates/admin/tool_shed_repository/common.mako @@ -103,14 +103,16 @@ </div></div> %if repository_dependencies_root_folder: - <div class="form-row"> - <label>Handle repository dependencies?</label> - ${install_repository_dependencies_check_box.get_html()} - <div class="toolParamHelp" style="clear: both;"> - Un-check to skip automatic installation of these additional repositories required by this repository. + %if install_repository_dependencies_check_box is not None: + <div class="form-row"> + <label>Handle repository dependencies?</label> + ${install_repository_dependencies_check_box.get_html()} + <div class="toolParamHelp" style="clear: both;"> + Un-check to skip automatic installation of these additional repositories required by this repository. + </div></div> - </div> - <div style="clear: both"></div> + <div style="clear: both"></div> + %endif <div class="form-row"> %if repository_dependencies_root_folder: <p/> @@ -123,19 +125,21 @@ </div> %endif %if tool_dependencies_root_folder: - <div class="form-row"> - <label>Handle tool dependencies?</label> - <% disabled = trans.app.config.tool_dependency_dir is None %> - ${install_tool_dependencies_check_box.get_html( disabled=disabled )} - <div class="toolParamHelp" style="clear: both;"> - %if disabled: - Set the tool_dependency_dir configuration value in your Galaxy config to automatically handle tool dependencies. - %else: - Un-check to skip automatic handling of these tool dependencies. - %endif + %if install_tool_dependencies_check_box is not None: + <div class="form-row"> + <label>Handle tool dependencies?</label> + <% disabled = trans.app.config.tool_dependency_dir is None %> + ${install_tool_dependencies_check_box.get_html( disabled=disabled )} + <div class="toolParamHelp" style="clear: both;"> + %if disabled: + Set the tool_dependency_dir configuration value in your Galaxy config to automatically handle tool dependencies. + %else: + Un-check to skip automatic handling of these tool dependencies. + %endif + </div></div> - </div> - <div style="clear: both"></div> + <div style="clear: both"></div> + %endif <div class="form-row"> %if tool_dependencies_root_folder: <p/> diff -r 075ff805a33a830244c4647fc9cefec663eff2b4 -r 759d96f950b8825bcce157cdf5efbabaa0221b11 templates/admin/tool_shed_repository/manage_repository.mako --- a/templates/admin/tool_shed_repository/manage_repository.mako +++ b/templates/admin/tool_shed_repository/manage_repository.mako @@ -1,7 +1,19 @@ <%inherit file="/base.mako"/><%namespace file="/message.mako" import="render_msg" /> +<%namespace file="/webapps/community/repository/common.mako" import="*" /><%namespace file="/admin/tool_shed_repository/common.mako" import="*" /> +<%def name="stylesheets()"> + ${parent.stylesheets()} + ${h.css( "library" )} +</%def> + +<%def name="javascripts()"> + ${parent.javascripts()} + ${h.js("libs/jquery/jquery.rating", "libs/jquery/jstorage" )} + ${container_javascripts()} +</%def> + <br/><br/><ul class="manage-table-actions"><li><a class="action-button" id="repository-${repository.id}-popup" class="menubutton">Repository Actions</a></li> @@ -91,218 +103,5 @@ </div><p/> %if not in_error_state: - <div class="toolForm"> - <div class="toolFormTitle">${repository.name}</div> - <div class="toolFormBody"> - <% - metadata = repository.metadata or {} - missing_tool_dependencies = repository.missing_tool_dependencies - installed_tool_dependencies = repository.installed_tool_dependencies - %> - %if missing_tool_dependencies: - <div class="form-row"> - <table width="100%"> - <tr bgcolor="#D8D8D8" width="100%"> - <td><b>Missing tool dependencies</i></td> - </tr> - </table> - </div> - <div style="clear: both"></div> - <div class="form-row"> - <table class="grid"> - <tr> - <td><b>name</b></td> - <td><b>version</b></td> - <td><b>type</b></td> - <td><b>status</b></td> - </tr> - %for tool_dependency in missing_tool_dependencies: - <tr> - <td> - <a class="view-info" href="${h.url_for( controller='admin_toolshed', action='manage_tool_dependencies', id=trans.security.encode_id( tool_dependency.id ) )}"> - ${tool_dependency.name} - </a> - </td> - <td>${tool_dependency.version}</td> - <td>${tool_dependency.type}</td> - <td>${tool_dependency.status}</td> - </tr> - %endfor - </table> - </div> - <div style="clear: both"></div> - %endif - %if installed_tool_dependencies: - <div class="form-row"> - <table width="100%"> - <tr bgcolor="#D8D8D8" width="100%"> - <td><b>Installed tool dependencies<i> - click the name to browse the dependency installation directory</i></td> - </tr> - </table> - </div> - <div style="clear: both"></div> - <div class="form-row"> - <table class="grid"> - <tr> - <td><b>name</b></td> - <td><b>version</b></td> - <td><b>type</b></td> - </tr> - %for installed_tool_dependency in installed_tool_dependencies: - <tr> - <td> - <a class="view-info" href="${h.url_for( controller='admin_toolshed', action='browse_tool_dependency', id=trans.security.encode_id( installed_tool_dependency.id ), repository_id=trans.security.encode_id( repository.id ) )}"> - ${installed_tool_dependency.name} - </a> - </td> - <td>${installed_tool_dependency.version}</td> - <td>${installed_tool_dependency.type}</td> - </tr> - %endfor - </table> - </div> - <div style="clear: both"></div> - %endif - %if 'tools' in metadata: - <div class="form-row"> - <table width="100%"> - <tr bgcolor="#D8D8D8" width="100%"> - <td><b>Tools</b><i> - click the name to view information about the tool</i></td> - </tr> - </table> - </div> - <div class="form-row"> - <% tool_dicts = metadata[ 'tools' ] %> - <table class="grid"> - <tr> - <td><b>name</b></td> - <td><b>description</b></td> - <td><b>version</b></td> - <td><b>requirements</b></td> - </tr> - %for tool_dict in tool_dicts: - <tr> - <td> - <a class="view-info" href="${h.url_for( controller='admin_toolshed', action='view_tool_metadata', repository_id=trans.security.encode_id( repository.id ), tool_id=tool_dict[ 'id' ] )}"> - ${tool_dict[ 'name' ]} - </a> - </td> - <td>${tool_dict[ 'description' ]}</td> - <td>${tool_dict[ 'version' ]}</td> - <td> - <% - if 'requirements' in tool_dict: - requirements = tool_dict[ 'requirements' ] - else: - requirements = None - %> - %if requirements: - <% - requirements_str = '' - for requirement_dict in tool_dict[ 'requirements' ]: - requirements_str += '%s (%s), ' % ( requirement_dict[ 'name' ], requirement_dict[ 'type' ] ) - requirements_str = requirements_str.rstrip( ', ' ) - %> - ${requirements_str} - %else: - none - %endif - </td> - </tr> - %endfor - </table> - </div> - <div style="clear: both"></div> - %endif - %if 'workflows' in metadata: - ## metadata[ 'workflows' ] is a list of tuples where each contained tuple is - ## [ <relative path to the .ga file in the repository>, <exported workflow dict> ] - <div class="form-row"> - <table width="100%"> - <tr bgcolor="#D8D8D8" width="100%"> - <td><b>Workflows</b><i> - click the name to import</i></td> - </tr> - </table> - </div> - <div style="clear: both"></div> - <div class="form-row"> - <% workflow_tups = metadata[ 'workflows' ] %> - <table class="grid"> - <tr> - <td><b>name</b></td> - <td><b>steps</b></td> - <td><b>format-version</b></td> - <td><b>annotation</b></td> - </tr> - <% index = 0 %> - %for workflow_tup in workflow_tups: - <% - import os.path - relative_path = workflow_tup[ 0 ] - full_path = os.path.abspath( relative_path ) - workflow_dict = workflow_tup[ 1 ] - workflow_name = workflow_dict[ 'name' ] - ## Initially steps were not stored in the metadata record. - steps = workflow_dict.get( 'steps', [] ) - format_version = workflow_dict[ 'format-version' ] - annotation = workflow_dict[ 'annotation' ] - %> - <tr> - <td> - <div class="menubutton" style="float: left;" id="workflow-${index}-popup"> - ${workflow_name} - <div popupmenu="workflow-${index}-popup"> - <a class="action-button" href="${h.url_for( controller='workflow', action='import_workflow', installed_repository_file=full_path, repository_id=trans.security.encode_id( repository.id ) )}">Import to Galaxy</a> - </div> - </div> - </td> - <td> - %if steps: - ${len( steps )} - %else: - unknown - %endif - </td> - <td>${format_version}</td> - <td>${annotation}</td> - </tr> - <% index += 1 %> - %endfor - </table> - </div> - <div style="clear: both"></div> - %endif - %if 'datatypes' in metadata: - <div class="form-row"> - <table width="100%"> - <tr bgcolor="#D8D8D8" width="100%"> - <td><b>Data types</b></td> - </tr> - </table> - </div> - <div style="clear: both"></div> - <div class="form-row"> - <% datatypes_dicts = metadata[ 'datatypes' ] %> - <table class="grid"> - <tr> - <td><b>extension</b></td> - <td><b>type</b></td> - <td><b>mimetype</b></td> - <td><b>subclass</b></td> - </tr> - %for datatypes_dict in datatypes_dicts: - <tr> - <td>${datatypes_dict.get( 'extension', ' ' )}</td> - <td>${datatypes_dict.get( 'dtype', ' ' )}</td> - <td>${datatypes_dict.get( 'mimetype', ' ' )}</td> - <td>${datatypes_dict.get( 'subclass', ' ' )}</td> - </tr> - %endfor - </table> - </div> - <div style="clear: both"></div> - %endif - </div> - </div> - <p/> + ${render_repository_items( repository.metadata, containers_dict, can_set_metadata=False )} %endif diff -r 075ff805a33a830244c4647fc9cefec663eff2b4 -r 759d96f950b8825bcce157cdf5efbabaa0221b11 templates/admin/tool_shed_repository/reselect_tool_panel_section.mako --- a/templates/admin/tool_shed_repository/reselect_tool_panel_section.mako +++ b/templates/admin/tool_shed_repository/reselect_tool_panel_section.mako @@ -2,6 +2,18 @@ <%namespace file="/message.mako" import="render_msg" /><%namespace file="/admin/tool_shed_repository/common.mako" import="render_dependencies_section" /><%namespace file="/admin/tool_shed_repository/common.mako" import="render_readme_section" /> +<%namespace file="/webapps/community/repository/common.mako" import="*" /> + +<%def name="stylesheets()"> + ${parent.stylesheets()} + ${h.css( "library" )} +</%def> + +<%def name="javascripts()"> + ${parent.javascripts()} + ${h.js("libs/jquery/jquery.rating", "libs/jquery/jstorage" )} + ${container_javascripts()} +</%def> %if message: ${render_msg( message, status )} diff -r 075ff805a33a830244c4647fc9cefec663eff2b4 -r 759d96f950b8825bcce157cdf5efbabaa0221b11 templates/webapps/community/repository/common.mako --- a/templates/webapps/community/repository/common.mako +++ b/templates/webapps/community/repository/common.mako @@ -208,14 +208,17 @@ folder_label = "%s<i> - %s</i>" % ( folder_label, folder.description ) else: folder_label = "%s<i> - this repository requires installation of these additional repositories</i>" % folder_label + elif folder.label == 'Valid tools': + col_span_str = 'colspan="3"' + if folder.description: + folder_label = "%s<i> - %s</i>" % ( folder_label, folder.description ) + else: + folder_label = "%s<i> - click the name to preview the tool and use the pop-up menu to inspect all metadata</i>" % folder_label elif folder.invalid_tools: folder_label = "%s<i> - click the tool config file name to see why the tool is invalid</i>" % folder_label elif folder.tool_dependencies: folder_label = "%s<i> - this repository's tools require handling of these dependencies</i>" % folder_label col_span_str = 'colspan="3"' - elif folder.valid_tools: - folder_label = "%s<i> - click the name to preview the tool and use the pop-up menu to inspect all metadata</i>" % folder_label - col_span_str = 'colspan="3"' elif folder.workflows: col_span_str = 'colspan="4"' %> @@ -302,7 +305,7 @@ %endif id="libraryItem-${encoded_id}"><td style="padding-left: ${pad+20}px;"> - %if invalid_tool.repository_id and invalid_tool.tool_config and invalid_tool.changeset_revision: + %if trans.webapp.name == 'community' and invalid_tool.repository_id and invalid_tool.tool_config and invalid_tool.changeset_revision: <a class="view-info" href="${h.url_for( controller='repository', action='load_invalid_tool', repository_id=trans.security.encode_id( invalid_tool.repository_id ), tool_config=invalid_tool.tool_config, changeset_revision=invalid_tool.changeset_revision )}"> ${invalid_tool.tool_config | h} </a> @@ -376,12 +379,20 @@ <th style="padding-left: ${pad+20}px;">${tool.name | h}</th> %else: <td style="padding-left: ${pad+20}px;"> - <div style="float:left;" class="menubutton split popup" id="tool-${encoded_id}-popup"> - <a class="view-info" href="${h.url_for( controller='repository', action='display_tool', repository_id=trans.security.encode_id( tool.repository_id ), tool_config=tool.tool_config, changeset_revision=tool.changeset_revision )}">${tool.name | h}</a> - </div> - <div popupmenu="tool-${encoded_id}-popup"> - <a class="action-button" href="${h.url_for( controller='repository', action='view_tool_metadata', repository_id=trans.security.encode_id( tool.repository_id ), changeset_revision=tool.changeset_revision, tool_id=tool.tool_id )}">View tool metadata</a> - </div> + %if tool.repository_id: + %if trans.webapp.name == 'community': + <div style="float:left;" class="menubutton split popup" id="tool-${encoded_id}-popup"> + <a class="view-info" href="${h.url_for( controller='repository', action='display_tool', repository_id=trans.security.encode_id( tool.repository_id ), tool_config=tool.tool_config, changeset_revision=tool.changeset_revision )}">${tool.name | h}</a> + </div> + <div popupmenu="tool-${encoded_id}-popup"> + <a class="action-button" href="${h.url_for( controller='repository', action='view_tool_metadata', repository_id=trans.security.encode_id( tool.repository_id ), changeset_revision=tool.changeset_revision, tool_id=tool.tool_id )}">View tool metadata</a> + </div> + %else: + <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='view_tool_metadata', repository_id=trans.security.encode_id( tool.repository_id ), changeset_revision=tool.changeset_revision, tool_id=tool.tool_id )}">View tool metadata</a> + %endif + %else: + ${tool.name | h} + %endif </td> %endif <${cell_type}>${tool.description | h}</${cell_type}> @@ -459,7 +470,7 @@ %></%def> -<%def name="render_repository_items( repository_metadata_id, changeset_revision, metadata, containers_dict, can_set_metadata=False )"> +<%def name="render_repository_items( metadata, containers_dict, can_set_metadata=False )"><% from galaxy.tool_shed.encoding_util import tool_shed_encode @@ -472,6 +483,7 @@ readme_files_root_folder = containers_dict.get( 'readme_files', None ) repository_dependencies_root_folder = containers_dict.get( 'repository_dependencies', None ) tool_dependencies_root_folder = containers_dict.get( 'tool_dependencies', None ) + missing_tool_dependencies_root_folder = containers_dict.get( 'missing_tool_dependencies', None ) valid_tools_root_folder = containers_dict.get( 'valid_tools', none ) workflows_root_folder = containers_dict.get( 'workflows', None ) @@ -515,6 +527,13 @@ ${render_folder( tool_dependencies_root_folder, 0, parent=None, row_counter=row_counter, is_root_folder=True )} </table> %endif + %if missing_tool_dependencies_root_folder: + <p/> + <% row_counter = RowCounter() %> + <table cellspacing="2" cellpadding="2" border="0" width="100%" class="tables container-table" id="missing_tool_dependencies"> + ${render_folder( missing_tool_dependencies_root_folder, 0, parent=None, row_counter=row_counter, is_root_folder=True )} + </table> + %endif </div></div> %endif diff -r 075ff805a33a830244c4647fc9cefec663eff2b4 -r 759d96f950b8825bcce157cdf5efbabaa0221b11 templates/webapps/community/repository/manage_repository.mako --- a/templates/webapps/community/repository/manage_repository.mako +++ b/templates/webapps/community/repository/manage_repository.mako @@ -210,7 +210,7 @@ </form></div></div> -${render_repository_items( repository_metadata_id, changeset_revision, metadata, containers_dict, can_set_metadata=True )} +${render_repository_items( metadata, containers_dict, can_set_metadata=True )} <p/><div class="toolForm"><div class="toolFormTitle">Manage categories</div> diff -r 075ff805a33a830244c4647fc9cefec663eff2b4 -r 759d96f950b8825bcce157cdf5efbabaa0221b11 templates/webapps/community/repository/preview_tools_in_changeset.mako --- a/templates/webapps/community/repository/preview_tools_in_changeset.mako +++ b/templates/webapps/community/repository/preview_tools_in_changeset.mako @@ -81,4 +81,4 @@ </div></div><p/> -${render_repository_items( repository_metadata_id, changeset_revision, metadata, containers_dict, can_set_metadata=False )} +${render_repository_items( metadata, containers_dict, can_set_metadata=False )} diff -r 075ff805a33a830244c4647fc9cefec663eff2b4 -r 759d96f950b8825bcce157cdf5efbabaa0221b11 templates/webapps/community/repository/view_repository.mako --- a/templates/webapps/community/repository/view_repository.mako +++ b/templates/webapps/community/repository/view_repository.mako @@ -188,7 +188,7 @@ %endif </div></div> -${render_repository_items( repository_metadata_id, changeset_revision, metadata, containers_dict, can_set_metadata=False )} +${render_repository_items( metadata, containers_dict, can_set_metadata=False )} %if repository.categories: <p/><div class="toolForm"> diff -r 075ff805a33a830244c4647fc9cefec663eff2b4 -r 759d96f950b8825bcce157cdf5efbabaa0221b11 test/tool_shed/base/twilltestcase.py --- a/test/tool_shed/base/twilltestcase.py +++ b/test/tool_shed/base/twilltestcase.py @@ -67,10 +67,13 @@ for dependency in installed_repository.metadata[ 'tool_dependencies' ]: tool_dependency = installed_repository.metadata[ 'tool_dependencies' ][ dependency ] strings_displayed.extend( [ tool_dependency[ 'name' ], tool_dependency[ 'version' ], tool_dependency[ 'type' ] ] ) + """ + TODO: Uncomment these when Greg enhances the tool dependencies and missing tool dependencies containers to display the status. if dependencies_installed: strings_displayed.append( 'Installed' ) else: strings_displayed.append( 'Never installed' ) + """ url = '/admin_toolshed/manage_repository?id=%s' % self.security.encode_id( installed_repository.id ) self.visit_galaxy_url( url ) self.check_for_strings( strings_displayed, strings_not_displayed ) diff -r 075ff805a33a830244c4647fc9cefec663eff2b4 -r 759d96f950b8825bcce157cdf5efbabaa0221b11 test/tool_shed/functional/test_1000_install_basic_repository.py --- a/test/tool_shed/functional/test_1000_install_basic_repository.py +++ b/test/tool_shed/functional/test_1000_install_basic_repository.py @@ -55,7 +55,7 @@ installed_repository = test_db_util.get_installed_repository_by_name_owner( 'filtering_0000', common.test_user_1_name ) self.verify_installed_repository_on_browse_page( installed_repository ) self.display_installed_repository_manage_page( installed_repository, - strings_displayed=[ 'Installed tool shed repository', 'Tools', 'Filter1' ] ) + strings_displayed=[ 'Installed tool shed repository', 'Valid tools', 'Filter1' ] ) self.verify_tool_metadata_for_installed_repository( installed_repository ) def test_0030_verify_installed_repository_metadata( self ): '''Verify that resetting the metadata on an installed repository does not change the metadata.''' diff -r 075ff805a33a830244c4647fc9cefec663eff2b4 -r 759d96f950b8825bcce157cdf5efbabaa0221b11 test/tool_shed/functional/test_1010_install_repository_with_tool_dependencies.py --- a/test/tool_shed/functional/test_1010_install_repository_with_tool_dependencies.py +++ b/test/tool_shed/functional/test_1010_install_repository_with_tool_dependencies.py @@ -74,7 +74,7 @@ installed_repository = test_db_util.get_installed_repository_by_name_owner( 'freebayes_0010', common.test_user_1_name ) self.verify_installed_repository_on_browse_page( installed_repository ) self.display_installed_repository_manage_page( installed_repository, - strings_displayed=[ 'Installed tool shed repository', 'Tools', 'FreeBayes' ] ) + strings_displayed=[ 'Installed tool shed repository', 'Valid tools', 'FreeBayes' ] ) self.check_installed_repository_tool_dependencies( installed_repository, dependencies_installed=False ) self.verify_tool_metadata_for_installed_repository( installed_repository ) def test_0020_verify_installed_repository_metadata( self ): diff -r 075ff805a33a830244c4647fc9cefec663eff2b4 -r 759d96f950b8825bcce157cdf5efbabaa0221b11 test/tool_shed/functional/test_1020_install_repository_with_repository_dependencies.py --- a/test/tool_shed/functional/test_1020_install_repository_with_repository_dependencies.py +++ b/test/tool_shed/functional/test_1020_install_repository_with_repository_dependencies.py @@ -69,7 +69,7 @@ installed_repository = test_db_util.get_installed_repository_by_name_owner( 'emboss_0020', common.test_user_1_name ) self.verify_installed_repository_on_browse_page( installed_repository ) self.display_installed_repository_manage_page( installed_repository, - strings_displayed=[ 'Installed tool shed repository', 'Tools', 'antigenic' ] ) + strings_displayed=[ 'Installed tool shed repository', 'Valid tools', 'antigenic' ] ) self.check_installed_repository_tool_dependencies( installed_repository, dependencies_installed=False ) self.verify_tool_metadata_for_installed_repository( installed_repository ) def test_0020_verify_installed_repository_metadata( self ): diff -r 075ff805a33a830244c4647fc9cefec663eff2b4 -r 759d96f950b8825bcce157cdf5efbabaa0221b11 test/tool_shed/functional/test_1030_install_repository_with_dependency_revisions.py --- a/test/tool_shed/functional/test_1030_install_repository_with_dependency_revisions.py +++ b/test/tool_shed/functional/test_1030_install_repository_with_dependency_revisions.py @@ -111,7 +111,7 @@ installed_repository = test_db_util.get_installed_repository_by_name_owner( 'emboss_0030', common.test_user_1_name ) self.verify_installed_repository_on_browse_page( installed_repository ) self.display_installed_repository_manage_page( installed_repository, - strings_displayed=[ 'Installed tool shed repository', 'Tools', 'antigenic' ] ) + strings_displayed=[ 'Installed tool shed repository', 'Valid tools', 'antigenic' ] ) self.check_installed_repository_tool_dependencies( installed_repository, dependencies_installed=False ) self.verify_tool_metadata_for_installed_repository( installed_repository ) self.update_installed_repository( installed_repository, strings_displayed=[ "there are no updates available" ] ) Repository URL: https://bitbucket.org/galaxy/galaxy-central/ -- This is a commit notification from bitbucket.org. You are receiving this because you have the service enabled, addressing the recipient of this email.
participants (1)
-
Bitbucket