commit/galaxy-central: greg: Re-engineer the process for defining valid tool shed repository changeset revisions and add support for automatically installing and building tool dependencies for tools included in tool shed repositories being installed into a local Galaxy instance. Tool dependencies will be uninstalled if the repository is uninstalled.
1 new commit in galaxy-central: https://bitbucket.org/galaxy/galaxy-central/changeset/72c4f0007f4e/ changeset: 72c4f0007f4e user: greg date: 2012-05-23 20:23:28 summary: Re-engineer the process for defining valid tool shed repository changeset revisions and add support for automatically installing and building tool dependencies for tools included in tool shed repositories being installed into a local Galaxy instance. Tool dependencies will be uninstalled if the repository is uninstalled. affected #: 37 files diff -r 688b3b621cd48cefefaf73081505bbe4749a217d -r 72c4f0007f4ed1fba919786bc8a2e017ecf19516 lib/galaxy/app.py --- a/lib/galaxy/app.py +++ b/lib/galaxy/app.py @@ -3,6 +3,7 @@ from galaxy import config, jobs, util, tools, web import galaxy.tools.search import galaxy.tools.data +import galaxy.tool_shed import galaxy.tool_shed.tool_shed_registry from galaxy.web import security import galaxy.model diff -r 688b3b621cd48cefefaf73081505bbe4749a217d -r 72c4f0007f4ed1fba919786bc8a2e017ecf19516 lib/galaxy/model/__init__.py --- a/lib/galaxy/model/__init__.py +++ b/lib/galaxy/model/__init__.py @@ -2648,7 +2648,7 @@ class ToolShedRepository( object ): def __init__( self, id=None, create_time=None, tool_shed=None, name=None, description=None, owner=None, installed_changeset_revision=None, changeset_revision=None, ctx_rev=None, metadata=None, includes_datatypes=False, update_available=False, deleted=False, - uninstalled=False, dist_to_shed=False ): + uninstalled=False, dist_to_shed=False ): self.id = id self.create_time = create_time self.tool_shed = tool_shed @@ -2664,13 +2664,37 @@ self.deleted = deleted self.uninstalled = uninstalled self.dist_to_shed = dist_to_shed + def repo_path( self, app ): + tool_shed_url = self.tool_shed + if tool_shed_url.find( ':' ) > 0: + # Eliminate the port, if any, since it will result in an invalid directory name. + tool_shed_url = tool_shed_url.split( ':' )[ 0 ] + tool_shed = tool_shed_url.rstrip( '/' ) + for index, shed_tool_conf_dict in enumerate( app.toolbox.shed_tool_confs ): + tool_path = shed_tool_conf_dict[ 'tool_path' ] + relative_path = os.path.join( tool_path, tool_shed, 'repos', self.owner, self.name, self.installed_changeset_revision ) + if os.path.exists( relative_path ): + return relative_path + return None @property def includes_tools( self ): return self.metadata and 'tools' in self.metadata @property + def includes_tool_dependencies( self ): + return self.metadata and 'tool_dependencies' in self.metadata + @property def includes_workflows( self ): return self.metadata and 'workflows' in self.metadata +class ToolDependency( object ): + def __init__( self, tool_shed_repository_id=None, installed_changeset_revision=None, name=None, version=None, type=None, uninstalled=False ): + self.tool_shed_repository_id = tool_shed_repository_id + self.installed_changeset_revision = installed_changeset_revision + self.name = name + self.version = version + self.type = type + self.uninstalled = uninstalled + class ToolVersion( object ): def __init__( self, id=None, create_time=None, tool_id=None, tool_shed_repository=None ): self.id = id diff -r 688b3b621cd48cefefaf73081505bbe4749a217d -r 72c4f0007f4ed1fba919786bc8a2e017ecf19516 lib/galaxy/model/mapping.py --- a/lib/galaxy/model/mapping.py +++ b/lib/galaxy/model/mapping.py @@ -390,6 +390,17 @@ Column( "uninstalled", Boolean, default=False ), Column( "dist_to_shed", Boolean, default=False ) ) +ToolDependency.table = Table( "tool_dependency", metadata, + Column( "id", Integer, primary_key=True ), + Column( "create_time", DateTime, default=now ), + Column( "update_time", DateTime, default=now, onupdate=now ), + Column( "tool_shed_repository_id", Integer, ForeignKey( "tool_shed_repository.id" ), index=True, nullable=False ), + Column( "installed_changeset_revision", TrimmedString( 255 ) ), + Column( "name", TrimmedString( 255 ) ), + Column( "version", TrimmedString( 40 ) ), + Column( "type", TrimmedString( 40 ) ), + Column( "uninstalled", Boolean, default=False ) ) + ToolVersion.table = Table( "tool_version", metadata, Column( "id", Integer, primary_key=True ), Column( "create_time", DateTime, default=now ), @@ -1667,7 +1678,12 @@ assign_mapper( context, ToolShedRepository, ToolShedRepository.table, properties=dict( tool_versions=relation( ToolVersion, primaryjoin=( ToolShedRepository.table.c.id == ToolVersion.table.c.tool_shed_repository_id ), - backref='tool_shed_repository' ) ) ) + backref='tool_shed_repository' ), + tool_dependencies=relation( ToolDependency, + primaryjoin=( ToolShedRepository.table.c.id == ToolDependency.table.c.tool_shed_repository_id ), + backref='tool_shed_repository' ) ) ) + +assign_mapper( context, ToolDependency, ToolDependency.table ) assign_mapper( context, ToolVersion, ToolVersion.table ) diff -r 688b3b621cd48cefefaf73081505bbe4749a217d -r 72c4f0007f4ed1fba919786bc8a2e017ecf19516 lib/galaxy/model/migrate/versions/0099_add_tool_dependency_table.py --- /dev/null +++ b/lib/galaxy/model/migrate/versions/0099_add_tool_dependency_table.py @@ -0,0 +1,49 @@ +""" +Migration script to add the tool_dependency table. +""" +from sqlalchemy import * +from sqlalchemy.orm import * +from migrate import * +from migrate.changeset import * +import sys, logging +from galaxy.model.custom_types import * +from sqlalchemy.exc import * +import datetime +now = datetime.datetime.utcnow + +log = logging.getLogger( __name__ ) +log.setLevel( logging.DEBUG ) +handler = logging.StreamHandler( sys.stdout ) +format = "%(name)s %(levelname)s %(asctime)s %(message)s" +formatter = logging.Formatter( format ) +handler.setFormatter( formatter ) +log.addHandler( handler ) + +metadata = MetaData( migrate_engine ) + +# New table to store information about cloned tool shed repositories. +ToolDependency_table = Table( "tool_dependency", metadata, + Column( "id", Integer, primary_key=True ), + Column( "create_time", DateTime, default=now ), + Column( "update_time", DateTime, default=now, onupdate=now ), + Column( "tool_shed_repository_id", Integer, ForeignKey( "tool_shed_repository.id" ), index=True, nullable=False ), + Column( "installed_changeset_revision", TrimmedString( 255 ) ), + Column( "name", TrimmedString( 255 ) ), + Column( "version", TrimmedString( 40 ) ), + Column( "type", TrimmedString( 40 ) ), + Column( "uninstalled", Boolean, default=False ) ) + +def upgrade(): + print __doc__ + metadata.reflect() + try: + ToolDependency_table.create() + except Exception, e: + log.debug( "Creating tool_dependency table failed: %s" % str( e ) ) + +def downgrade(): + metadata.reflect() + try: + ToolDependency_table.drop() + except Exception, e: + log.debug( "Dropping tool_dependency table failed: %s" % str( e ) ) diff -r 688b3b621cd48cefefaf73081505bbe4749a217d -r 72c4f0007f4ed1fba919786bc8a2e017ecf19516 lib/galaxy/tool_shed/__init__.py --- a/lib/galaxy/tool_shed/__init__.py +++ b/lib/galaxy/tool_shed/__init__.py @@ -1,27 +1,47 @@ """ Classes encapsulating the management of repositories installed from Galaxy tool sheds. """ -import os, logging -from galaxy.util.shed_util import * +import os +import galaxy.util.shed_util from galaxy.model.orm import * -log = logging.getLogger(__name__) +from galaxy import eggs +import pkg_resources + +pkg_resources.require( 'elementtree' ) +from elementtree import ElementTree, ElementInclude class InstalledRepositoryManager( object ): def __init__( self, app ): self.app = app self.model = self.app.model self.sa_session = self.model.context.current + self.tool_configs = self.app.config.tool_configs + if self.app.config.migrated_tools_config not in self.tool_configs: + self.tool_configs.append( self.app.config.migrated_tools_config ) + def get_repository_install_dir( self, tool_shed_repository ): + for tool_config in self.tool_configs: + tree = ElementTree.parse( tool_config ) + root = tree.getroot() + ElementInclude.include( root ) + tool_path = root.get( 'tool_path', None ) + if tool_path: + tool_shed = galaxy.util.shed_util.clean_tool_shed_url( tool_shed_repository.tool_shed ) + relative_path = os.path.join( tool_path, + tool_shed, + 'repos', + tool_shed_repository.owner, + tool_shed_repository.name, + tool_shed_repository.installed_changeset_revision ) + if os.path.exists( relative_path ): + return relative_path + return None def load_proprietary_datatypes( self ): for tool_shed_repository in self.sa_session.query( self.model.ToolShedRepository ) \ .filter( and_( self.model.ToolShedRepository.table.c.includes_datatypes==True, self.model.ToolShedRepository.table.c.deleted==False ) ) \ .order_by( self.model.ToolShedRepository.table.c.id ): - metadata = tool_shed_repository.metadata - datatypes_config = metadata[ 'datatypes_config' ] - # We need the repository installation directory, which we can derive from the path to the datatypes config. - path_items = datatypes_config.split( 'repos' ) - relative_install_dir = '%srepos/%s/%s/%s' % \ - ( path_items[0], tool_shed_repository.owner, tool_shed_repository.name, tool_shed_repository.installed_changeset_revision ) - load_datatype_items( self.app, tool_shed_repository, relative_install_dir ) + relative_install_dir = self.get_repository_install_dir( tool_shed_repository ) + if relative_install_dir: + galaxy.util.shed_util.load_datatype_items( self.app, tool_shed_repository, relative_install_dir ) \ No newline at end of file diff -r 688b3b621cd48cefefaf73081505bbe4749a217d -r 72c4f0007f4ed1fba919786bc8a2e017ecf19516 lib/galaxy/tool_shed/encoding_util.py --- /dev/null +++ b/lib/galaxy/tool_shed/encoding_util.py @@ -0,0 +1,41 @@ +import binascii +from galaxy.util.hash_util import * +from galaxy.util.json import json_fix + +from galaxy import eggs +import pkg_resources + +pkg_resources.require( "simplejson" ) +import simplejson + +encoding_sep = '__esep__' + +def tool_shed_decode( value ): + # Extract and verify hash + a, b = value.split( ":" ) + value = binascii.unhexlify( b ) + test = hmac_new( 'ToolShedAndGalaxyMustHaveThisSameKey', value ) + assert a == test + # Restore from string + values = None + try: + values = simplejson.loads( value ) + except Exception, e: + log.debug( "Decoding json value from tool shed threw exception: %s" % str( e ) ) + if values is not None: + try: + return json_fix( values ) + except Exception, e: + log.debug( "Fixing decoded json value from tool shed threw exception: %s" % str( e ) ) + fixed_values = values + if values is None: + values = value + return values +def tool_shed_encode( val ): + if isinstance( val, dict ): + value = simplejson.dumps( val ) + else: + value = val + a = hmac_new( 'ToolShedAndGalaxyMustHaveThisSameKey', value ) + b = binascii.hexlify( value ) + return "%s:%s" % ( a, b ) \ No newline at end of file diff -r 688b3b621cd48cefefaf73081505bbe4749a217d -r 72c4f0007f4ed1fba919786bc8a2e017ecf19516 lib/galaxy/tool_shed/install_manager.py --- a/lib/galaxy/tool_shed/install_manager.py +++ b/lib/galaxy/tool_shed/install_manager.py @@ -2,14 +2,12 @@ Manage automatic installation of tools configured in the xxx.xml files in ~/scripts/migrate_tools (e.g., 0002_tools.xml). All of the tools were at some point included in the Galaxy distribution, but are now hosted in the main Galaxy tool shed. """ -import logging, urllib2 +import urllib2 from galaxy.tools import ToolSection from galaxy.util.json import from_json_string, to_json_string from galaxy.util.shed_util import * from galaxy.util.odict import odict -log = logging.getLogger( __name__ ) - class InstallManager( object ): def __init__( self, app, latest_migration_script_number, tool_shed_install_config, migrated_tools_config ): """ @@ -73,7 +71,7 @@ # Tools outside of sections. file_path = elem.get( 'file', None ) if file_path: - path, name = os.path.split( file_path ) + name = strip_path( file_path ) if name in migrated_tool_configs: if elem not in tool_panel_elems: tool_panel_elems.append( elem ) @@ -83,7 +81,7 @@ if section_elem.tag == 'tool': file_path = section_elem.get( 'file', None ) if file_path: - path, name = os.path.split( file_path ) + name = strip_path( file_path ) if name in migrated_tool_configs: # Append the section, not the tool. if elem not in tool_panel_elems: @@ -100,7 +98,7 @@ if proprietary_tool_panel_elem.tag == 'tool': # The proprietary_tool_panel_elem looks something like <tool file="emboss_5/emboss_antigenic.xml" />. proprietary_tool_config = proprietary_tool_panel_elem.get( 'file' ) - proprietary_path, proprietary_name = os.path.split( proprietary_tool_config ) + proprietary_name = strip_path( proprietary_tool_config ) if tool_config == proprietary_name: # The tool is loaded outside of any sections. tool_sections.append( None ) @@ -112,15 +110,14 @@ if section_elem.tag == 'tool': # The section_elem looks something like <tool file="emboss_5/emboss_antigenic.xml" />. proprietary_tool_config = section_elem.get( 'file' ) - proprietary_path, proprietary_name = os.path.split( proprietary_tool_config ) + proprietary_name = strip_path( proprietary_tool_config ) if tool_config == proprietary_name: # The tool is loaded inside of the section_elem. tool_sections.append( ToolSection( proprietary_tool_panel_elem ) ) if not is_displayed: is_displayed = True return is_displayed, tool_sections - def handle_repository_contents( self, current_working_dir, repository_clone_url, relative_install_dir, repository_elem, repository_name, description, - changeset_revision, ctx_rev ): + def handle_repository_contents( self, repository_clone_url, relative_install_dir, repository_elem, repository_name, description, changeset_revision, ctx_rev ): # Generate the metadata for the installed tool shed repository, among other things. It is critical that the installed repository is # updated to the desired changeset_revision before metadata is set because the process for setting metadata uses the repository files on disk. # The values for the keys in each of the following dictionaries will be a list to allow for the same tool to be displayed in multiple places @@ -136,7 +133,7 @@ tool_panel_dict_for_tool_config = generate_tool_panel_dict_for_tool_config( guid, tool_config, tool_sections=tool_sections ) for k, v in tool_panel_dict_for_tool_config.items(): tool_panel_dict_for_display[ k ] = v - metadata_dict = generate_metadata( self.toolbox, relative_install_dir, repository_clone_url ) + metadata_dict = generate_metadata_using_disk_files( self.toolbox, relative_install_dir, repository_clone_url ) # Add a new record to the tool_shed_repository table if one doesn't already exist. If one exists but is marked # deleted, undelete it. It is critical that this happens before the call to add_to_tool_panel() below because # tools will not be properly loaded if the repository is marked deleted. @@ -150,6 +147,7 @@ metadata_dict, dist_to_shed=True ) if 'tools' in metadata_dict: + work_dir = make_tmp_directory() repository_tools_tups = get_repository_tools_tups( self.app, metadata_dict ) if repository_tools_tups: sample_files = metadata_dict.get( 'sample_files', [] ) @@ -159,11 +157,20 @@ repository_tools_tups, sample_files_copied = handle_missing_index_file( self.app, self.tool_path, sample_files, repository_tools_tups ) # Copy remaining sample files included in the repository to the ~/tool-data directory of the local Galaxy instance. copy_sample_files( self.app, sample_files, sample_files_copied=sample_files_copied ) - if 'tool_dependencies_config' in metadata_dict: + if 'tool_dependencies' in metadata_dict: + # Get the tool_dependencies.xml file from the repository. + tool_dependencies_config = get_config_from_repository( self.app, + 'tool_dependencies.xml', + tool_shed_repository, + changeset_revision, + work_dir ) # Install tool dependencies. - status, message = handle_tool_dependencies( self.app, repository_clone_url, metadata_dict[ 'tool_dependencies_config' ] ) + status, message = handle_tool_dependencies( app=self.app, + tool_shed_repository=tool_shed_repository, + installed_changeset_revision=changeset_revision, + tool_dependencies_config=tool_dependencies_config ) if status != 'ok' and message: - print 'The following error occurred while installing tool dependencies:' + print 'The following error occurred from the InstallManager while installing tool dependencies:' print message add_to_tool_panel( self.app, repository_name, @@ -174,8 +181,17 @@ self.migrated_tools_config, tool_panel_dict=tool_panel_dict_for_display, new_install=True ) - if 'datatypes_config' in metadata_dict: - datatypes_config = os.path.abspath( metadata_dict[ 'datatypes_config' ] ) + try: + shutil.rmtree( work_dir ) + except: + pass + if 'datatypes' in metadata_dict: + work_dir = make_tmp_directory() + datatypes_config = get_config_from_repository( self.app, + 'datatypes_conf.xml', + tool_shed_repository, + changeset_revision, + work_dir ) # Load proprietary data types required by tools. The value of override is not important here since the Galaxy server will be started # after this installation completes. converter_path, display_path = alter_config_and_load_prorietary_datatypes( self.app, datatypes_config, relative_install_dir, override=False ) @@ -194,6 +210,10 @@ if display_path: # Load proprietary datatype display applications self.app.datatypes_registry.load_display_applications( installed_repository_dict=repository_dict ) + try: + shutil.rmtree( work_dir ) + except: + pass return tool_shed_repository, metadata_dict def install_repository( self, repository_elem ): # Install a single repository, loading contained tools into the tool panel. @@ -205,14 +225,12 @@ if self.__isinstalled( clone_dir ): print "Skipping automatic install of repository '", name, "' because it has already been installed in location ", clone_dir else: - current_working_dir = os.getcwd() tool_shed_url = self.__get_url_from_tool_shed( self.tool_shed ) repository_clone_url = os.path.join( tool_shed_url, 'repos', self.repository_owner, name ) relative_install_dir = os.path.join( clone_dir, name ) ctx_rev = get_ctx_rev( tool_shed_url, name, self.repository_owner, changeset_revision ) clone_repository( repository_clone_url, os.path.abspath( relative_install_dir ), ctx_rev ) - tool_shed_repository, metadata_dict = self.handle_repository_contents( current_working_dir, - repository_clone_url, + tool_shed_repository, metadata_dict = self.handle_repository_contents( repository_clone_url, relative_install_dir, repository_elem, name, @@ -280,8 +298,7 @@ if shed_url.endswith( '/' ): shed_url = shed_url.rstrip( '/' ) return shed_url - # The tool shed from which the repository was originally - # installed must no longer be configured in tool_sheds_conf.xml. + # The tool shed from which the repository was originally installed must no longer be configured in tool_sheds_conf.xml. return None def __isinstalled( self, clone_dir ): full_path = os.path.abspath( clone_dir ) diff -r 688b3b621cd48cefefaf73081505bbe4749a217d -r 72c4f0007f4ed1fba919786bc8a2e017ecf19516 lib/galaxy/tool_shed/tool_dependencies/common_util.py --- /dev/null +++ b/lib/galaxy/tool_shed/tool_dependencies/common_util.py @@ -0,0 +1,43 @@ +import os, tarfile, urllib2 +from galaxy.datatypes.checkers import * + +def extract_tar( file_name, file_path ): + if isgzip( file_name ) or isbz2( file_name ): + # Open for reading with transparent compression. + tar = tarfile.open( file_name, 'r:*' ) + else: + tar = tarfile.open( file_name ) + tar.extractall( path=file_path ) + tar.close() +def isbz2( file_path ): + return is_bz2( file_path ) +def isgzip( file_path ): + return is_gzip( file_path ) +def istar( file_path ): + return tarfile.is_tarfile( file_path ) +def iszip( file_path ): + return check_zip( file_path ) +def tar_extraction_directory( file_path, file_name ): + extensions = [ '.tar.gz', '.tgz', '.tar.bz2', '.zip' ] + for extension in extensions: + if file_name.endswith( extension ): + dir_name = file_name[ :-len( extension ) ] + full_path = os.path.abspath( os.path.join( file_path, dir_name ) ) + if os.path.exists( full_path ): + return dir_name + raise ValueError( 'Could not find directory %s' % full_path ) +def url_download( install_dir, downloaded_file_name, download_url ): + file_path = os.path.join( install_dir, downloaded_file_name ) + src = None + dst = None + try: + src = urllib2.urlopen( download_url ) + data = src.read() + dst = open( file_path,'wb' ) + dst.write( data ) + except: + if src: + src.close() + if dst: + dst.close() + return os.path.abspath( file_path ) diff -r 688b3b621cd48cefefaf73081505bbe4749a217d -r 72c4f0007f4ed1fba919786bc8a2e017ecf19516 lib/galaxy/tool_shed/tool_dependencies/fabric_util.py --- /dev/null +++ b/lib/galaxy/tool_shed/tool_dependencies/fabric_util.py @@ -0,0 +1,108 @@ +# For Python 2.5 +from __future__ import with_statement + +import os +from contextlib import contextmanager +import common_util + +from galaxy import eggs +import pkg_resources + +pkg_resources.require( 'fabric' ) + +from fabric.api import env, lcd, local + +def check_fabric_version(): + version = env.version + if int( version.split( "." )[ 0 ] ) < 1: + raise NotImplementedError( "Install Fabric version 1.0 or later." ) +def set_galaxy_environment( galaxy_user, tool_dependency_dir, host='localhost', shell='/bin/bash -l -c' ): + """General Galaxy environment configuration""" + env.user = galaxy_user + env.install_dir = tool_dependency_dir + env.host_string = host + env.shell = shell + env.use_sudo = False + env.safe_cmd = local + return env +@contextmanager +def make_tmp_dir(): + tmp_dir = local( 'echo $TMPDIR' ).strip() + if not tmp_dir: + home_dir = local( 'echo $HOME' ) + tmp_dir = os.path.join( home_dir, 'tmp' ) + work_dir = os.path.join( tmp_dir, 'deploy_tmp' ) + if not os.path.exists( work_dir ): + local( 'mkdir -p %s' % work_dir ) + yield work_dir + if os.path.exists( work_dir ): + local( 'rm -rf %s' % work_dir ) +def handle_post_build_processing( tool_dependency_dir, install_dir, package_name=None ): + cmd = "echo 'PATH=%s/bin:$PATH' > %s/env.sh;chmod +x %s/env.sh" % ( install_dir, install_dir, install_dir ) + message = '' + output = local( cmd, capture=True ) + log_results( cmd, output, os.path.join( install_dir, 'env_sh.log' ) ) + if output.return_code: + message = '%s %s' % ( message, str( output.stderr ) ) + """ + Since automatic dependency installation requires a version attribute in the tool's <requirement> tag, we don't have to + create a default symlink, but we'll keep this code around for a bit just in case we need it later. + if package_name: + package_dir = os.path.join( tool_dependency_dir, package_name ) + package_default = os.path.join( package_dir, 'default' ) + if not os.path.islink( package_default ): + cmd = 'ln -s %s %s' % ( install_dir, package_default ) + output = local( cmd, capture=True ) + if output.return_code: + message = '%s\n%s' % ( message, str( output.stderr ) ) + """ + return message +def install_and_build_package( params_dict ): + """Install a Galaxy tool dependency package either via a url or a mercurial or git clone command.""" + install_dir = params_dict[ 'install_dir' ] + download_url = params_dict.get( 'download_url', None ) + clone_cmd = params_dict.get( 'clone_cmd', None ) + build_commands = params_dict.get( 'build_commands', None ) + package_name = params_dict.get( 'package_name', None ) + with make_tmp_dir() as work_dir: + with lcd( work_dir ): + if download_url: + downloaded_filename = os.path.split( download_url )[ -1 ] + downloaded_file_path = common_util.url_download( work_dir, downloaded_filename, download_url ) + if common_util.istar( downloaded_file_path ): + common_util.extract_tar( downloaded_file_path, work_dir ) + dir = common_util.tar_extraction_directory( work_dir, downloaded_filename ) + else: + dir = work_dir + elif clone_cmd: + output = local( clone_cmd, capture=True ) + log_results( clone_cmd, output, os.path.join( install_dir, 'clone_repository.log' ) ) + if output.return_code: + return '%s. ' % str( output.stderr ) + dir = package_name + if build_commands: + with lcd( dir ): + for build_command in build_commands: + output = local( build_command, capture=True ) + log_results( build_command, output, os.path.join( install_dir, 'build_commands.log' ) ) + if output.return_code: + return '%s. ' % str( output.stderr ) + return '' +def log_results( command, fabric_AttributeString, file_path ): + """ + Write attributes of fabric.operations._AttributeString (which is the output of executing command using fabric's local() method) + to a specified log file. + """ + if os.path.exists( file_path ): + logfile = open( file_path, 'ab' ) + else: + logfile = open( file_path, 'wb' ) + logfile.write( "#############################################" ) + logfile.write( '\n%s\nSTDOUT\n' % command ) + logfile.write( "#############################################" ) + logfile.write( str( fabric_AttributeString.stdout ) ) + logfile.write( "#############################################" ) + logfile.write( '\n%s\nSTDERR\n' % command ) + logfile.write( "#############################################" ) + logfile.write( str( fabric_AttributeString.stderr ) ) + logfile.close() diff -r 688b3b621cd48cefefaf73081505bbe4749a217d -r 72c4f0007f4ed1fba919786bc8a2e017ecf19516 lib/galaxy/tool_shed/tool_dependencies/install_util.py --- /dev/null +++ b/lib/galaxy/tool_shed/tool_dependencies/install_util.py @@ -0,0 +1,183 @@ +import sys, os, subprocess, tempfile +from fabric_util import * +from galaxy.tool_shed.encoding_util import * +from galaxy.model.orm import * + +from galaxy import eggs +import pkg_resources + +pkg_resources.require( 'elementtree' ) +from elementtree import ElementTree, ElementInclude +from elementtree.ElementTree import Element, SubElement + +def create_or_update_tool_dependency( app, tool_shed_repository, changeset_revision, name, version, type ): + sa_session = app.model.context.current + tool_dependency = get_tool_dependency_by_shed_changeset_revision( app, tool_shed_repository, name, version, type, changeset_revision ) + if tool_dependency: + tool_dependency.uninstalled = False + else: + tool_dependency = app.model.ToolDependency( tool_shed_repository_id=tool_shed_repository.id, + installed_changeset_revision=changeset_revision, + name=name, + version=version, + type=type ) + sa_session.add( tool_dependency ) + sa_session.flush() + return tool_dependency +def get_install_dir( app, repository, installed_changeset_revision, package_name, package_version ): + return os.path.abspath( os.path.join( app.config.tool_dependency_dir, + package_name, + package_version, + repository.owner, + repository.name, + installed_changeset_revision ) ) +def get_tool_dependency_by_shed_changeset_revision( app, repository, dependency_name, dependency_version, dependency_type, changeset_revision ): + sa_session = app.model.context.current + return sa_session.query( app.model.ToolDependency ) \ + .filter( and_( app.model.ToolDependency.table.c.tool_shed_repository_id == repository.id, + app.model.ToolDependency.table.c.name == dependency_name, + app.model.ToolDependency.table.c.version == dependency_version, + app.model.ToolDependency.table.c.type == dependency_type, + app.model.ToolDependency.table.c.installed_changeset_revision == changeset_revision ) ) \ + .first() +def install_package( app, elem, tool_shed_repository, installed_changeset_revision ): + # The value of package_name should match the value of the "package" type in the tool config's <requirements> tag set, but it's not required. + message = '' + package_name = elem.get( 'name', None ) + package_version = elem.get( 'version', None ) + if package_name and package_version: + install_dir = get_install_dir( app, tool_shed_repository, installed_changeset_revision, package_name, package_version ) + if not_installed( install_dir ): + for package_elem in elem: + if package_elem.tag == 'proprietary_fabfile': + # TODO: This is not yet working... + # Handle tool dependency installation where the repository includes one or more proprietary fabric scripts. + if not fabric_version_checked: + check_fabric_version() + fabric_version_checked = True + fabfile_name = package_elem.get( 'name', None ) + fabfile_path = os.path.abspath( os.path.join( os.path.split( tool_dependencies_config )[ 0 ], fabfile_name ) ) + print 'Installing tool dependencies via fabric script ', fabfile_path + elif package_elem.tag == 'fabfile': + # Handle tool dependency installation using a fabric script provided by Galaxy. Example tag set definition: + fabfile_path = None + for method_elem in package_elem.findall( 'method' ): + error_message = run_fabric_method( app, + method_elem, + fabfile_path, + app.config.tool_dependency_dir, + install_dir, + package_name=package_name ) + if error_message: + message += '%s' % error_message + else: + tool_dependency = create_or_update_tool_dependency( app, + tool_shed_repository, + installed_changeset_revision, + name=package_name, + version=package_version, + type='package' ) + print package_name, 'version', package_version, 'installed in', install_dir + else: + print '\nSkipping installation of tool dependency', package_name, 'version', package_version, 'since it is installed in', install_dir, '\n' + return message +def not_installed( install_dir ): + # TODO: try executing a binary or something in addition to just seeing if the install_dir exists. + return not os.path.exists( install_dir ) +def run_fabric_method( app, elem, fabfile_path, tool_dependency_dir, install_dir, package_name=None, **kwd ): + """Parse a tool_dependency.xml file's fabfile <method> tag set to build the method parameters and execute the method.""" + if not os.path.exists( install_dir ): + os.makedirs( install_dir ) + method_name = elem.get( 'name', None ) + params_dict = dict( install_dir=install_dir ) + build_commands = [] + for param_elem in elem: + param_name = param_elem.get( 'name' ) + if param_name: + if param_name == 'build_commands': + for build_command_elem in param_elem: + build_commands.append( build_command_elem.text.replace( '$INSTALL_DIR', install_dir ) ) + if build_commands: + params_dict[ 'build_commands' ] = build_commands + else: + if param_elem.text: + params_dict[ param_name ] = param_elem.text + if package_name: + params_dict[ 'package_name' ] = package_name + if fabfile_path: + # TODO: Handle this using the fabric api. + # run_proprietary_fabric_method( app, elem, fabfile_path, tool_dependency_dir, install_dir, package_name=package_name ) + return 'Tool dependency installation using proprietary fabric scripts is not yet supported. ' + else: + # There is currently only 1 fabric method, install_and_build_package(). + try: + message = install_and_build_package( params_dict ) + if message: + return message + except Exception, e: + return '%s. ' % str( e ) + try: + message = handle_post_build_processing( tool_dependency_dir, install_dir, package_name=package_name ) + if message: + return message + except: + return '%s. ' % str( e ) + print package_name, 'installed to', install_dir + return '' +def run_proprietary_fabric_method( app, elem, fabfile_path, tool_dependency_dir, install_dir, package_name=None, **kwd ): + """ + TODO: Handle this using the fabric api. + Parse a tool_dependency.xml file's fabfile <method> tag set to build the method parameters and execute the method. + """ + if not os.path.exists( install_dir ): + os.makedirs( install_dir ) + method_name = elem.get( 'name', None ) + params_str = '' + build_commands = [] + for param_elem in elem: + param_name = param_elem.get( 'name' ) + if param_name: + if param_name == 'build_commands': + for build_command_elem in param_elem: + build_commands.append( build_command_elem.text.replace( '$INSTALL_DIR', install_dir ) ) + if build_commands: + params_str += 'build_commands=%s,' % tool_shed_encode( encoding_sep.join( build_commands ) ) + else: + if param_elem.text: + param_value = tool_shed_encode( param_elem.text ) + params_str += '%s=%s,' % ( param_name, param_value ) + if package_name: + params_str += 'package_name=%s' % package_name + else: + params_str = params_str.rstrip( ',' ) + try: + cmd = 'fab -f %s %s:%s' % ( fabfile_path, method_name, params_str ) + returncode, message = run_subprocess( app, cmd ) + except Exception, e: + return "Exception executing fabric script %s: %s. " % ( str( fabfile_path ), str( e ) ) + if returncode: + return message + message = handle_post_build_processing( tool_dependency_dir, install_dir, package_name=package_name ) + if message: + return message + else: + print package_name, 'installed to', install_dir + return '' +def run_subprocess( app, cmd ): + env = os.environ + PYTHONPATH = env.get( 'PYTHONPATH', '' ) + if PYTHONPATH: + env[ 'PYTHONPATH' ] = '%s:%s' % ( os.path.abspath( os.path.join( app.config.root, 'lib' ) ), PYTHONPATH ) + else: + env[ 'PYTHONPATH' ] = os.path.abspath( os.path.join( app.config.root, 'lib' ) ) + message = '' + tmp_name = tempfile.NamedTemporaryFile().name + tmp_stderr = open( tmp_name, 'wb' ) + proc = subprocess.Popen( cmd, shell=True, env=env, stderr=tmp_stderr.fileno() ) + returncode = proc.wait() + tmp_stderr.close() + if returncode: + tmp_stderr = open( tmp_name, 'rb' ) + message = '%s\n' % str( tmp_stderr.read() ) + tmp_stderr.close() + return returncode, message diff -r 688b3b621cd48cefefaf73081505bbe4749a217d -r 72c4f0007f4ed1fba919786bc8a2e017ecf19516 lib/galaxy/tool_shed/tool_shed_registry.py --- a/lib/galaxy/tool_shed/tool_shed_registry.py +++ b/lib/galaxy/tool_shed/tool_shed_registry.py @@ -15,10 +15,9 @@ def __init__( self, root_dir=None, config=None ): self.tool_sheds = odict() if root_dir and config: - # Parse datatypes_conf.xml + # Parse tool_sheds_conf.xml tree = parse_xml( config ) root = tree.getroot() - # Load datatypes and converters from config log.debug( 'Loading references to tool sheds from %s' % config ) for elem in root.findall( 'tool_shed' ): try: diff -r 688b3b621cd48cefefaf73081505bbe4749a217d -r 72c4f0007f4ed1fba919786bc8a2e017ecf19516 lib/galaxy/tools/__init__.py --- a/lib/galaxy/tools/__init__.py +++ b/lib/galaxy/tools/__init__.py @@ -830,10 +830,8 @@ if not self.name: raise Exception, "Missing tool 'name'" # Get the UNIQUE id for the tool - # TODO: can this be generated automatically? if guid is None: self.id = root.get( "id" ) - self.version = root.get( "version" ) else: self.id = guid if not self.id: @@ -850,8 +848,7 @@ self.force_history_refresh = util.string_as_bool( root.get( 'force_history_refresh', 'False' ) ) self.display_interface = util.string_as_bool( root.get( 'display_interface', str( self.display_interface ) ) ) self.require_login = util.string_as_bool( root.get( 'require_login', str( self.require_login ) ) ) - # Load input translator, used by datasource tools to change - # names/values of incoming parameters + # Load input translator, used by datasource tools to change names/values of incoming parameters self.input_translator = root.find( "request_param_translation" ) if self.input_translator: self.input_translator = ToolInputTranslator.from_element( self.input_translator ) diff -r 688b3b621cd48cefefaf73081505bbe4749a217d -r 72c4f0007f4ed1fba919786bc8a2e017ecf19516 lib/galaxy/util/__init__.py --- a/lib/galaxy/util/__init__.py +++ b/lib/galaxy/util/__init__.py @@ -16,6 +16,7 @@ except ImportError: from md5 import new as md5 +from galaxy import eggs import pkg_resources pkg_resources.require( 'docutils' ) diff -r 688b3b621cd48cefefaf73081505bbe4749a217d -r 72c4f0007f4ed1fba919786bc8a2e017ecf19516 lib/galaxy/util/shed_util.py --- a/lib/galaxy/util/shed_util.py +++ b/lib/galaxy/util/shed_util.py @@ -1,17 +1,18 @@ -import sys, os, tempfile, shutil, subprocess, logging, string, urllib2 +import sys, os, tempfile, shutil, logging, string, urllib2 from datetime import date, datetime, timedelta from time import strftime, gmtime from galaxy import util from galaxy.datatypes.checkers import * from galaxy.util.json import * from galaxy.tools.search import ToolBoxSearch +from galaxy.tool_shed.tool_dependencies.install_util import install_package from galaxy.model.orm import * from galaxy import eggs import pkg_resources pkg_resources.require( 'mercurial' ) -from mercurial import ui, commands +from mercurial import hg, ui, commands pkg_resources.require( 'elementtree' ) from elementtree import ElementTree, ElementInclude @@ -26,6 +27,7 @@ '&' : '&', '\'' : ''' } VALID_CHARS = set( string.letters + string.digits + "'\"-=_.()/+*^,:?!#[]%\\$@;{}" ) +NOT_TOOL_CONFIGS = [ 'datatypes_conf.xml', 'tool_dependencies.xml' ] class ShedCounter( object ): def __init__( self, model ): @@ -203,7 +205,7 @@ if display_path: registration.attrib[ 'proprietary_display_path' ] = display_path for relative_path_to_datatype_file_name in datatype_class_modules: - relative_head, relative_tail = os.path.split( relative_path_to_datatype_file_name ) + datatype_file_name_path, datatype_file_name = os.path.split( relative_path_to_datatype_file_name ) for elem in registration.findall( 'datatype' ): # Handle 'type' attribute which should be something like one of the following: # type="gmap:GmapDB" @@ -216,7 +218,7 @@ # Handle the case where datatype_module is "galaxy.datatypes.gmap". proprietary_datatype_module = proprietary_datatype_module.split( '.' )[ -1 ] # The value of proprietary_path must be an absolute path due to job_working_directory. - elem.attrib[ 'proprietary_path' ] = os.path.abspath( relative_head ) + elem.attrib[ 'proprietary_path' ] = os.path.abspath( datatype_file_name_path ) elem.attrib[ 'proprietary_datatype_module' ] = proprietary_datatype_module sniffers = datatypes_config_root.find( 'sniffers' ) else: @@ -271,9 +273,7 @@ return tool_shed_url.split( ':' )[ 0 ] return tool_shed_url.rstrip( '/' ) def clone_repository( repository_clone_url, repository_file_dir, ctx_rev ): - """ - Clone the repository up to the specified changeset_revision. No subsequent revisions will be present in the cloned repository. - """ + """Clone the repository up to the specified changeset_revision. No subsequent revisions will be present in the cloned repository.""" commands.clone( get_configured_ui(), repository_clone_url, dest=repository_file_dir, @@ -281,12 +281,10 @@ noupdate=False, rev=[ ctx_rev ] ) def copy_sample_file( app, filename, dest_path=None ): - """ - Copy xxx.loc.sample to dest_path/xxx.loc.sample and dest_path/xxx.loc. The default value for dest_path is ~/tool-data. - """ + """Copy xxx.loc.sample to dest_path/xxx.loc.sample and dest_path/xxx.loc. The default value for dest_path is ~/tool-data.""" if dest_path is None: dest_path = os.path.abspath( app.config.tool_data_path ) - sample_file_path, sample_file_name = os.path.split( filename ) + sample_file_name = strip_path( filename ) copied_file = sample_file_name.replace( '.sample', '' ) # It's ok to overwrite the .sample version of the file. shutil.copy( os.path.abspath( filename ), os.path.join( dest_path, sample_file_name ) ) @@ -319,7 +317,7 @@ tool_shed = tmp_url.split( 'repos' )[ 0 ].rstrip( '/' ) if not owner: owner = get_repository_owner( tmp_url ) - includes_datatypes = 'datatypes_config' in metadata_dict + includes_datatypes = 'datatypes' in metadata_dict tool_shed_repository = get_repository_by_shed_name_owner_changeset_revision( app, tool_shed, name, owner, changeset_revision ) if tool_shed_repository: tool_shed_repository.description = description @@ -387,31 +385,111 @@ if datatypes: metadata_dict[ 'datatypes' ] = datatypes return metadata_dict -def generate_metadata( toolbox, relative_install_dir, repository_clone_url ): +def generate_tool_dependency_metadata( tool_dependencies_config, metadata_dict ): """ - Browse the repository files on disk to generate metadata. Since we are using disk files, it is imperative that the - repository is updated to the desired change set revision before metadata is generated. + If the combination of name, version and type of each element is defined in the <requirement> tag for at least one tool in the repository, + then update the received metadata_dict with information from the parsed tool_dependencies_config. """ + tree = ElementTree.parse( tool_dependencies_config ) + root = tree.getroot() + ElementInclude.include( root ) + tool_dependencies_dict = {} + if can_generate_tool_dependency_metadata( root, metadata_dict ): + for elem in root: + if elem.tag == 'package': + tool_dependencies_dict = generate_package_dependency_metadata( elem, tool_dependencies_dict ) + # Handle tool dependency installation via other means here (future). + if tool_dependencies_dict: + metadata_dict[ 'tool_dependencies' ] = tool_dependencies_dict + return metadata_dict +def can_generate_tool_dependency_metadata( root, metadata_dict ): + """ + Make sure the combination of name, version and type (the type will be the value of elem.tag) of each root element tag in the tool_dependencies.xml + file is defined in the <requirement> tag for at least one tool in the repository. + """ + can_generate_dependency_metadata = False + for elem in root: + can_generate_dependency_metadata = False + tool_dependency_name = elem.get( 'name', None ) + tool_dependency_version = elem.get( 'version', None ) + tool_dependency_type = elem.tag + if tool_dependency_name and tool_dependency_version and tool_dependency_type: + for tool_dict in metadata_dict[ 'tools' ]: + requirements = tool_dict.get( 'requirements', [] ) + for requirement_dict in requirements: + requirement_name = requirement_dict.get( 'name', None ) + requirement_version = requirement_dict.get( 'version', None ) + requirement_type = requirement_dict.get( 'type', None ) + if requirement_name == tool_dependency_name and requirement_version == tool_dependency_version and requirement_type == tool_dependency_type: + can_generate_dependency_metadata = True + break + if not can_generate_dependency_metadata: + # We've discovered at least 1 combination of name, version and type that is not defined in the <requirement> + # tag for any tool in the repository. + break + if not can_generate_dependency_metadata: + break + return can_generate_dependency_metadata +def generate_package_dependency_metadata( elem, tool_dependencies_dict ): + """The value of package_name must match the value of the "package" type in the tool config's <requirements> tag set.""" + requirements_dict = {} + package_name = elem.get( 'name', None ) + package_version = elem.get( 'version', None ) + if package_name and package_version: + requirements_dict [ 'name' ] = package_name + requirements_dict [ 'type' ] = 'package' + requirements_dict [ 'version' ] = package_version + dependency_key = '%s/%s' % ( package_name, package_version ) + fabfiles_dict = {} + for sub_elem in elem: + if sub_elem.tag == 'proprietary_fabfile': + requirements_dict = generate_fabfile_metadata( sub_elem, requirements_dict, proprietary=True ) + elif sub_elem.tag == 'fabfile': + requirements_dict = generate_fabfile_metadata( sub_elem, requirements_dict, proprietary=False ) + elif sub_elem.tag == 'readme': + requirements_dict[ 'readme' ] = sub_elem.text + if requirements_dict: + tool_dependencies_dict[ dependency_key ] = requirements_dict + return tool_dependencies_dict +def generate_fabfile_metadata( elem, requirements_dict, proprietary=False ): + """ + <proprietary_fabfile name="fabfile.py"> + <method name="install_and_build"> + <param name="download_url">ftp://emboss.open-bio.org/pub/EMBOSS/old/5.0.0/EMBOSS-5.0.0.tar.gz</param> + <param name="download_url">ftp://emboss.open-bio.org/pub/EMBOSS/old/5.0.0/PHYLIP-3.6b.tar.gz</param> + </method> + </proprietary_fabfile> + """ + fabfiles_dict = {} + fabfile_name = elem.get( 'name', None ) + if fabfile_name: + for method_elem in elem.findall( 'method' ): + method_name = method_elem.get( 'name', None ) + if method_name: + params_str = '' + for param_elem in method_elem.findall( 'param' ): + param_name = param_elem.get( 'name', None ) + param_value = param_elem.text + if param_name and param_value: + params_str += '%s=%s,' % ( param_name, param_value ) + fabfiles_dict[ 'fabfile' ] = fabfile_name + fabfiles_dict[ 'method' ] = method_name + fabfiles_dict[ 'params' ] = params_str.rstrip( ',' ) + if fabfiles_dict: + if proprietary: + key = 'proprietary_fabfiles' + else: + key = 'fabfiles' + requirements_dict[ key ] = fabfiles_dict + return requirements_dict +def generate_metadata_using_disk_files( toolbox, relative_install_dir, repository_clone_url ): + """generate metadata using only the repository files on disk - files are not retrieved from the repository manifest.""" metadata_dict = {} - sample_files = [] - datatypes_config = None - # Find datatypes_conf.xml if it exists. - for root, dirs, files in os.walk( relative_install_dir ): - if root.find( '.hg' ) < 0: - for name in files: - if name == 'datatypes_conf.xml': - relative_path = os.path.join( root, name ) - datatypes_config = os.path.abspath( relative_path ) - break + tool_dependencies_config = None + datatypes_config = get_config_from_disk( 'datatypes_conf.xml', relative_install_dir ) if datatypes_config: - metadata_dict[ 'datatypes_config' ] = relative_path metadata_dict = generate_datatypes_metadata( datatypes_config, metadata_dict ) - # Find all special .sample files. - for root, dirs, files in os.walk( relative_install_dir ): - if root.find( '.hg' ) < 0: - for name in files: - if name.endswith( '.sample' ): - sample_files.append( os.path.join( root, name ) ) + sample_files = get_sample_files_from_disk( relative_install_dir ) if sample_files: metadata_dict[ 'sample_files' ] = sample_files # Find all tool configs and exported workflows. @@ -421,7 +499,7 @@ dirs.remove( '.hg' ) for name in files: # Find all tool configs. - if name != 'datatypes_conf.xml' and name.endswith( '.xml' ): + if name not in NOT_TOOL_CONFIGS and name.endswith( '.xml' ): full_path = os.path.abspath( os.path.join( root, name ) ) if not ( check_binary( full_path ) or check_image( full_path ) or check_gzip( full_path )[ 0 ] or check_bz2( full_path )[ 0 ] or check_zip( full_path ) ): @@ -450,6 +528,10 @@ exported_workflow_dict = from_json_string( workflow_text ) if 'a_galaxy_workflow' in exported_workflow_dict and exported_workflow_dict[ 'a_galaxy_workflow' ] == 'true': metadata_dict = generate_workflow_metadata( relative_path, exported_workflow_dict, metadata_dict ) + # This step must be done after metadata for tools has been defined. + tool_dependencies_config = get_config_from_disk( 'tool_dependencies.xml', relative_install_dir ) + if tool_dependencies_config: + metadata_dict = generate_tool_dependency_metadata( tool_dependencies_config, metadata_dict ) return metadata_dict def generate_tool_guid( repository_clone_url, tool ): """ @@ -485,7 +567,7 @@ outputs = [] for output in ttb.outputs: name, file_name, extra = output - outputs.append( ( name, os.path.split( file_name )[ 1 ] ) ) + outputs.append( ( name, strip_path( file_name ) ) ) test_dict = dict( name=ttb.name, required_files=required_files, inputs=inputs, @@ -597,7 +679,7 @@ for tool_dict in metadata[ 'tools' ]: guid = tool_dict[ 'guid' ] tool_config = tool_dict[ 'tool_config' ] - file_path, file_name = os.path.split( tool_config ) + file_name = strip_path( tool_config ) guids_and_configs[ guid ] = file_name # Parse the shed_tool_conf file in which all of this repository's tools are defined and generate the tool_panel_dict. tree = util.parse_xml( shed_tool_conf ) @@ -638,7 +720,7 @@ {<Tool guid> : [{ tool_config : <tool_config_file>, id: <ToolSection id>, version : <ToolSection version>, name : <TooSection name>}]} """ tool_panel_dict = {} - file_path, file_name = os.path.split( tool_config ) + file_name = strip_path( tool_config ) tool_section_dicts = generate_tool_section_dicts( tool_config=file_name, tool_sections=tool_sections ) tool_panel_dict[ guid ] = tool_section_dicts return tool_panel_dict @@ -680,6 +762,52 @@ else: metadata_dict[ 'workflows' ] = [ ( relative_path, exported_workflow_dict ) ] return metadata_dict +def get_changectx_for_changeset( repo, changeset_revision, **kwd ): + """Retrieve a specified changectx from a repository""" + for changeset in repo.changelog: + ctx = repo.changectx( changeset ) + if str( ctx ) == changeset_revision: + return ctx + return None +def get_config( config_file, repo, repo_dir, ctx, dir ): + """Return config_filename if it exists in some changeset of the repository.""" + # First look on disk. + for root, dirs, files in os.walk( repo_dir ): + if root.find( '.hg' ) < 0: + for name in files: + if name == config_file: + dest_file_name = os.path.join( dir, name ) + shutil.copy( os.path.abspath( os.path.join( root, name ) ), dest_file_name ) + return os.path.abspath( dest_file_name ) + # Next look in the current change set. + for filename in ctx: + ctx_file_name = strip_path( filename ) + if ctx_file_name == config_file: + return get_named_tmpfile_from_ctx( ctx, filename, dir=dir ) + # Finally look in the repository manifest. + for changeset in repo.changelog: + prev_ctx = repo.changectx( changeset ) + for ctx_file in prev_ctx.files(): + ctx_file_name = strip_path( ctx_file ) + if ctx_file_name == config_file: + return get_named_tmpfile_from_ctx( prev_ctx, filename, dir=dir ) + return None +def get_config_from_disk( config_file, relative_install_dir ): + for root, dirs, files in os.walk( relative_install_dir ): + if root.find( '.hg' ) < 0: + for name in files: + if name == config_file: + return os.path.abspath( os.path.join( root, name ) ) + return None +def get_config_from_repository( app, config_file, repository, changeset_revision, dir, install_dir=None ): + """Return config_filename if it exists in some changeset of the repository using only the repository and changeset_revision.""" + if install_dir is None: + install_dir = repository.repo_path( app ) + repo_files_dir = os.path.join( install_dir, repository.name ) + repo = hg.repository( get_configured_ui(), repo_files_dir ) + ctx = get_changectx_for_changeset( repo, changeset_revision ) + config = get_config( config_file, repo, repo_files_dir, ctx, dir ) + return config def get_configured_ui(): # Configure any desired ui settings. _ui = ui.ui() @@ -701,11 +829,11 @@ for converter in elem.findall( 'converter' ): converter_config = converter.get( 'file', None ) if converter_config: - relative_head, relative_tail = os.path.split( converter_config ) + converter_config_file_name = strip_path( converter_config ) for root, dirs, files in os.walk( relative_install_dir ): if root.find( '.hg' ) < 0: for name in files: - if name == relative_tail: + if name == converter_config_file_name: # The value of converter_path must be absolute due to job_working_directory. converter_path = os.path.abspath( root ) break @@ -718,11 +846,11 @@ for display_app in elem.findall( 'display' ): display_config = display_app.get( 'file', None ) if display_config: - relative_head, relative_tail = os.path.split( display_config ) + display_config_file_name = strip_path( display_config ) for root, dirs, files in os.walk( relative_install_dir ): if root.find( '.hg' ) < 0: for name in files: - if name == relative_tail: + if name == display_config_file_name: # The value of display_path must be absolute due to job_working_directory. display_path = os.path.abspath( root ) break @@ -737,18 +865,6 @@ ctx_rev = response.read() response.close() return ctx_rev -def get_shed_tool_conf_dict( app, shed_tool_conf ): - """ - Return the in-memory version of the shed_tool_conf file, which is stored in the config_elems entry - in the shed_tool_conf_dict associated with the file. - """ - for index, shed_tool_conf_dict in enumerate( app.toolbox.shed_tool_confs ): - if shed_tool_conf == shed_tool_conf_dict[ 'config_filename' ]: - return index, shed_tool_conf_dict - else: - file_path, file_name = os.path.split( shed_tool_conf_dict[ 'config_filename' ] ) - if shed_tool_conf == file_name: - return index, shed_tool_conf_dict def get_repository_by_shed_name_owner_changeset_revision( app, tool_shed, name, owner, changeset_revision ): sa_session = app.model.context.current if tool_shed.find( '//' ) > 0: @@ -779,6 +895,26 @@ if tool: repository_tools_tups.append( ( relative_path, guid, tool ) ) return repository_tools_tups +def get_sample_files_from_disk( relative_install_dir ): + sample_files = [] + for root, dirs, files in os.walk( relative_install_dir ): + if root.find( '.hg' ) < 0: + for name in files: + if name.endswith( '.sample' ): + sample_files.append( os.path.join( root, name ) ) + return sample_files +def get_shed_tool_conf_dict( app, shed_tool_conf ): + """ + Return the in-memory version of the shed_tool_conf file, which is stored in the config_elems entry + in the shed_tool_conf_dict associated with the file. + """ + for index, shed_tool_conf_dict in enumerate( app.toolbox.shed_tool_confs ): + if shed_tool_conf == shed_tool_conf_dict[ 'config_filename' ]: + return index, shed_tool_conf_dict + else: + file_name = strip_path( shed_tool_conf_dict[ 'config_filename' ] ) + if shed_tool_conf == file_name: + return index, shed_tool_conf_dict def get_tool_panel_config_tool_path_install_dir( app, repository ): # Return shed-related tool panel config, the tool_path configured in it, and the relative path to the directory where the # repository is installed. This method assumes all repository tools are defined in a single shed-related tool panel config. @@ -868,7 +1004,7 @@ def handle_missing_data_table_entry( app, tool_path, sample_files, repository_tools_tups ): """ Inspect each tool to see if any have input parameters that are dynamically generated select lists that require entries in the - tool_data_table_conf.xml file. + tool_data_table_conf.xml file. This method is not called only from Galaxy (not the tool shed) when a repository is being installed. """ missing_data_table_entry = False for index, repository_tools_tup in enumerate( repository_tools_tups ): @@ -880,8 +1016,8 @@ sample_file = None # The repository must contain a tool_data_table_conf.xml.sample file that includes all required entries for all tools in the repository. for sample_file in sample_files: - head, tail = os.path.split( sample_file ) - if tail == 'tool_data_table_conf.xml.sample': + sample_file_name = strip_path( sample_file ) + if sample_file_name == 'tool_data_table_conf.xml.sample': break error, correction_msg = handle_sample_tool_data_table_conf_file( app, sample_file ) if error: @@ -892,18 +1028,21 @@ repository_tools_tups[ index ] = ( tup_path, guid, repository_tool ) return repository_tools_tups def handle_missing_index_file( app, tool_path, sample_files, repository_tools_tups ): - """Inspect each tool to see if it has any input parameters that are dynamically generated select lists that depend on a .loc file.""" + """ + Inspect each tool to see if it has any input parameters that are dynamically generated select lists that depend on a .loc file. + This method is not called from the tool shed, but from Galaxy when a repository is being installed. + """ sample_files_copied = [] for index, repository_tools_tup in enumerate( repository_tools_tups ): tup_path, guid, repository_tool = repository_tools_tup params_with_missing_index_file = repository_tool.params_with_missing_index_file for param in params_with_missing_index_file: options = param.options - missing_file_path, missing_file_name = os.path.split( options.missing_index_file ) + missing_file_name = strip_path( options.missing_index_file ) if missing_file_name not in sample_files_copied: # The repository must contain the required xxx.loc.sample file. for sample_file in sample_files: - sample_file_path, sample_file_name = os.path.split( sample_file ) + sample_file_name = strip_path( sample_file ) if sample_file_name == '%s.sample' % missing_file_name: copy_sample_file( app, sample_file ) if options.tool_data_table and options.tool_data_table.missing_index_file: @@ -916,10 +1055,11 @@ return repository_tools_tups, sample_files_copied def handle_sample_tool_data_table_conf_file( app, filename ): """ - Parse the incoming filename and add new entries to the in-memory - app.tool_data_tables dictionary as well as appending them to the + Parse the incoming filename and add new entries to the in-memory app.tool_data_tables dictionary as well as appending them to the shed's tool_data_table_conf.xml file on disk. """ + # TODO: Load an in-memory version of the tool_data_table_conf.xml file, and write it to disk + # from the in-memory version only when changes are made. error = False message = '' try: @@ -927,6 +1067,8 @@ except Exception, e: message = str( e ) error = True + """ + # TODO: eliminate this - the shed should not need to write this to disk... if not error: # Add an entry to the end of the tool_data_table_conf.xml file. tdt_config = "%s/tool_data_table_conf.xml" % app.config.root @@ -953,42 +1095,32 @@ else: message = "The required file named tool_data_table_conf.xml does not exist in the Galaxy install directory." error = True + """ return error, message -def handle_tool_dependencies( current_working_dir, repo_files_dir, repository_tools_tups ): +def handle_tool_dependencies( app, tool_shed_repository, installed_changeset_revision, tool_dependencies_config ): """ - Inspect each tool to see if it includes a "requirement" that refers to a fabric - script. For those that do, execute the fabric script to install tool dependencies. + Install and build tool dependencies defined in the tool_dependencies_config. This config's tag sets can refer to installation + methods in Galaxy's tool_dependencies module or to proprietary fabric scripts contained in the repository. Future enhancements + to handling tool dependencies may provide installation processes in addition to fabric based processes. + + The dependencies will be installed in: + ~/<app.config.tool_dependency_dir>/<package_name>/<package_version>/<repository_owner>/<repository_name>/<installed_changeset_revision> """ - for index, repository_tools_tup in enumerate( repository_tools_tups ): - tup_path, guid, repository_tool = repository_tools_tup - for requirement in repository_tool.requirements: - if requirement.type == 'fabfile': - log.debug( 'Executing fabric script to install dependencies for tool "%s"...' % repository_tool.name ) - fabfile = requirement.fabfile - method = requirement.method - # Find the relative path to the fabfile. - relative_fabfile_path = None - for root, dirs, files in os.walk( repo_files_dir ): - for name in files: - if name == fabfile: - relative_fabfile_path = os.path.join( root, name ) - break - if relative_fabfile_path: - # cmd will look something like: fab -f fabfile.py install_bowtie - cmd = 'fab -f %s %s' % ( relative_fabfile_path, method ) - tmp_name = tempfile.NamedTemporaryFile().name - tmp_stderr = open( tmp_name, 'wb' ) - os.chdir( repo_files_dir ) - proc = subprocess.Popen( cmd, shell=True, stderr=tmp_stderr.fileno() ) - returncode = proc.wait() - os.chdir( current_working_dir ) - tmp_stderr.close() - if returncode != 0: - # TODO: do something more here than logging the problem. - tmp_stderr = open( tmp_name, 'rb' ) - error = tmp_stderr.read() - tmp_stderr.close() - log.debug( 'Problem installing dependencies for tool "%s"\n%s' % ( repository_tool.name, error ) ) + status = 'ok' + message = '' + # Parse the tool_dependencies.xml config. + tree = ElementTree.parse( tool_dependencies_config ) + root = tree.getroot() + ElementInclude.include( root ) + fabric_version_checked = False + for elem in root: + if elem.tag == 'package': + error_message = install_package( app, elem, tool_shed_repository, installed_changeset_revision ) + if error_message: + message += ' %s' % error_message + if message: + status = 'error' + return status, message def handle_tool_versions( app, tool_version_dicts, tool_shed_repository ): """ Using the list of tool_version_dicts retrieved from the tool shed (one per changeset revison up to the currently installed changeset revision), @@ -1019,7 +1151,13 @@ def load_datatype_items( app, repository, relative_install_dir, deactivate=False ): # Load proprietary datatypes. metadata = repository.metadata - datatypes_config = metadata.get( 'datatypes_config', None ) + work_dir = make_tmp_directory() + datatypes_config = get_config_from_repository( app, + 'datatypes_conf.xml', + repository, + repository.changeset_revision, + work_dir, + install_dir=relative_install_dir ) if datatypes_config: converter_path, display_path = alter_config_and_load_prorietary_datatypes( app, datatypes_config, relative_install_dir, deactivate=deactivate ) if converter_path or display_path: @@ -1037,14 +1175,18 @@ if display_path: # Load or deactivate proprietary datatype display applications app.datatypes_registry.load_display_applications( installed_repository_dict=repository_dict, deactivate=deactivate ) + try: + shutil.rmtree( work_dir ) + except: + pass def load_repository_contents( trans, repository_name, description, owner, changeset_revision, ctx_rev, tool_path, repository_clone_url, - relative_install_dir, current_working_dir, tool_shed=None, tool_section=None, shed_tool_conf=None, - install_tool_dependencies=False ): - """Generate the metadata for the installed tool shed repository, among other things.""" - # It is critical that the installed repository is updated to the desired changeset_revision before metadata is set because the - # process for setting metadata uses the repository files on disk. This method is called when an admin is installing a new repository - # or reinstalling an uninstalled repository. - metadata_dict = generate_metadata( trans.app.toolbox, relative_install_dir, repository_clone_url ) + relative_install_dir, tool_shed=None, tool_section=None, shed_tool_conf=None, install_tool_dependencies=False ): + """ + Generate the metadata for the installed tool shed repository, among other things. This method is called from Galaxy (never the tool shed) + when an admin is installing a new repository or reinstalling an uninstalled repository. + """ + message = '' + metadata_dict = generate_metadata_using_disk_files( trans.app.toolbox, relative_install_dir, repository_clone_url ) # Add a new record to the tool_shed_repository table if one doesn't already exist. If one exists but is marked deleted, undelete it. This # must happen before the call to add_to_tool_panel() below because tools will not be properly loaded if the repository is marked deleted. log.debug( "Adding new row (or updating an existing row) for repository '%s' in the tool_shed_repository table." % repository_name ) @@ -1060,6 +1202,7 @@ tool_panel_dict = generate_tool_panel_dict_for_new_install( metadata_dict[ 'tools' ], tool_section ) repository_tools_tups = get_repository_tools_tups( trans.app, metadata_dict ) if repository_tools_tups: + work_dir = make_tmp_directory() sample_files = metadata_dict.get( 'sample_files', [] ) # Handle missing data table entries for tool parameters that are dynamically generated select lists. repository_tools_tups = handle_missing_data_table_entry( trans.app, tool_path, sample_files, repository_tools_tups ) @@ -1067,7 +1210,21 @@ repository_tools_tups, sample_files_copied = handle_missing_index_file( trans.app, tool_path, sample_files, repository_tools_tups ) # Copy remaining sample files included in the repository to the ~/tool-data directory of the local Galaxy instance. copy_sample_files( trans.app, sample_files, sample_files_copied=sample_files_copied ) - handle_tool_dependencies( current_working_dir, relative_install_dir, repository_tools_tups ) + if install_tool_dependencies and 'tool_dependencies' in metadata_dict: + # Get the tool_dependencies.xml file from the repository. + tool_dependencies_config = get_config_from_repository( trans.app, + 'tool_dependencies.xml', + tool_shed_repository, + changeset_revision, + work_dir ) + # Install dependencies for repository tools. + status, message = handle_tool_dependencies( app=trans.app, + tool_shed_repository=tool_shed_repository, + installed_changeset_revision=changeset_revision, + tool_dependencies_config=tool_dependencies_config ) + if status != 'ok' and message: + print 'The following error occurred from load_repository_contents while installing tool dependencies:' + print message add_to_tool_panel( app=trans.app, repository_name=repository_name, repository_clone_url=repository_clone_url, @@ -1077,8 +1234,17 @@ shed_tool_conf=shed_tool_conf, tool_panel_dict=tool_panel_dict, new_install=True ) - if 'datatypes_config' in metadata_dict: - datatypes_config = os.path.abspath( metadata_dict[ 'datatypes_config' ] ) + try: + shutil.rmtree( work_dir ) + except: + pass + if 'datatypes' in metadata_dict: + work_dir = make_tmp_directory() + datatypes_config = get_config_from_repository( trans.app, + 'datatypes_conf.xml', + tool_shed_repository, + changeset_revision, + work_dir ) # Load data types required by tools. converter_path, display_path = alter_config_and_load_prorietary_datatypes( trans.app, datatypes_config, relative_install_dir, override=False ) if converter_path or display_path: @@ -1096,7 +1262,22 @@ if display_path: # Load proprietary datatype display applications trans.app.datatypes_registry.load_display_applications( installed_repository_dict=repository_dict ) - return tool_shed_repository, metadata_dict + try: + shutil.rmtree( work_dir ) + except: + pass + return tool_shed_repository, metadata_dict, message +def make_tmp_directory(): + tmp_dir = os.getenv( 'TMPDIR', '' ) + if tmp_dir: + tmp_dir = tmp_dir.strip() + else: + home_dir = os.getenv( 'HOME' ) + tmp_dir = os.path.join( home_dir, 'tmp' ) + work_dir = os.path.join( tmp_dir, 'work_tmp' ) + if not os.path.exists( work_dir ): + os.makedirs( work_dir ) + return work_dir def panel_entry_per_tool( tool_section_dict ): # Return True if tool_section_dict looks like this. # {<Tool guid> : [{ tool_config : <tool_config_file>, id: <ToolSection id>, version : <ToolSection version>, name : <TooSection name>}]} @@ -1157,6 +1338,10 @@ trans.sa_session.flush() # Create a list of guids for all tools that will be removed from the in-memory tool panel and config file on disk. guids_to_remove = [ k for k in tool_panel_dict.keys() ] + # Remove the tools from the toolbox's tools_by_id dictionary. + for guid_to_remove in guids_to_remove: + if guid_to_remove in trans.app.toolbox.tools_by_id: + del trans.app.toolbox.tools_by_id[ guid_to_remove ] index, shed_tool_conf_dict = get_shed_tool_conf_dict( trans.app, shed_tool_conf ) if uninstall: # Remove from the shed_tool_conf file on disk. @@ -1194,7 +1379,7 @@ config_elems_to_remove.append( config_elem ) elif config_elem.tag == 'tool': if config_elem.get( 'guid' ) in guids_to_remove: - tool_key = key = 'tool_%s' % str( config_elem.get( 'guid' ) ) + tool_key = 'tool_%s' % str( config_elem.get( 'guid' ) ) if tool_key in trans.app.toolbox.tool_panel: del trans.app.toolbox.tool_panel[ tool_key ] if uninstall: @@ -1213,6 +1398,9 @@ if uninstall: # Write the current in-memory version of the integrated_tool_panel.xml file to disk. trans.app.toolbox.write_integrated_tool_panel_config_file() +def strip_path( fpath ): + file_path, file_name = os.path.split( fpath ) + return file_name def to_html_escaped( text ): """Translates the characters in text to html values""" translated = [] diff -r 688b3b621cd48cefefaf73081505bbe4749a217d -r 72c4f0007f4ed1fba919786bc8a2e017ecf19516 lib/galaxy/web/base/controller.py --- a/lib/galaxy/web/base/controller.py +++ b/lib/galaxy/web/base/controller.py @@ -1,13 +1,12 @@ """ Contains functionality needed in every web interface """ -import os, time, logging, re, string, sys, glob, shutil, tempfile, subprocess, binascii +import os, time, logging, re, string, sys, glob, shutil, tempfile, subprocess from datetime import date, datetime, timedelta from time import strftime from galaxy import config, tools, web, util from galaxy.util import inflector from galaxy.util.hash_util import * -from galaxy.util.json import json_fix from galaxy.web import error, form, url_for from galaxy.model.orm import * from galaxy.workflow.modules import * @@ -2463,32 +2462,3 @@ id = trans.security.decode_id( id ) quota = trans.sa_session.query( trans.model.Quota ).get( id ) return quota -def tool_shed_encode( val ): - if isinstance( val, dict ): - value = simplejson.dumps( val ) - else: - value = val - a = hmac_new( 'ToolShedAndGalaxyMustHaveThisSameKey', value ) - b = binascii.hexlify( value ) - return "%s:%s" % ( a, b ) -def tool_shed_decode( value ): - # Extract and verify hash - a, b = value.split( ":" ) - value = binascii.unhexlify( b ) - test = hmac_new( 'ToolShedAndGalaxyMustHaveThisSameKey', value ) - assert a == test - # Restore from string - values = None - try: - values = simplejson.loads( value ) - except Exception, e: - log.debug( "Decoding json value from tool shed threw exception: %s" % str( e ) ) - if values is not None: - try: - return json_fix( values ) - except Exception, e: - log.debug( "Fixing decoded json value from tool shed threw exception: %s" % str( e ) ) - fixed_values = values - if values is None: - values = value - return values diff -r 688b3b621cd48cefefaf73081505bbe4749a217d -r 72c4f0007f4ed1fba919786bc8a2e017ecf19516 lib/galaxy/web/controllers/admin_toolshed.py --- a/lib/galaxy/web/controllers/admin_toolshed.py +++ b/lib/galaxy/web/controllers/admin_toolshed.py @@ -2,6 +2,8 @@ from galaxy.web.controllers.admin import * from galaxy.util.json import from_json_string, to_json_string from galaxy.util.shed_util import * +from galaxy.tool_shed.tool_dependencies.install_util import get_install_dir, not_installed +from galaxy.tool_shed.encoding_util import * from galaxy import eggs, tools eggs.require( 'mercurial' ) @@ -180,21 +182,43 @@ remove_from_disk_checked = CheckboxField.is_checked( remove_from_disk ) repository = get_repository( trans, kwd[ 'id' ] ) shed_tool_conf, tool_path, relative_install_dir = get_tool_panel_config_tool_path_install_dir( trans.app, repository ) + repository_install_dir = os.path.abspath ( relative_install_dir ) if params.get( 'deactivate_or_uninstall_repository_button', False ): if repository.includes_tools: # Handle tool panel alterations. remove_from_tool_panel( trans, repository, shed_tool_conf, uninstall=remove_from_disk_checked ) if repository.includes_datatypes: # Deactivate proprietary datatypes. - load_datatype_items( trans.app, repository, relative_install_dir, deactivate=True ) + load_datatype_items( trans.app, repository, repository_install_dir, deactivate=True ) if remove_from_disk_checked: - # Remove the repository from disk. try: - shutil.rmtree( relative_install_dir ) - log.debug( "Removed repository installation directory: %s" % str( relative_install_dir ) ) + # Remove the repository from disk. + shutil.rmtree( repository_install_dir ) + log.debug( "Removed repository installation directory: %s" % str( repository_install_dir ) ) + removed = True except Exception, e: - log.debug( "Error removing repository installation directory %s: %s" % ( str( relative_install_dir ), str( e ) ) ) - repository.uninstalled = True + log.debug( "Error removing repository installation directory %s: %s" % ( str( repository_install_dir ), str( e ) ) ) + removed = False + if removed: + repository.uninstalled = True + # Remove all installed tool dependencies - this is required when uninstalling the repository. + for tool_dependency in repository.tool_dependencies: + dependency_install_dir = os.path.abspath( os.path.join( trans.app.config.tool_dependency_dir, + tool_dependency.name, + tool_dependency.version, + repository.owner, + repository.name, + tool_dependency.installed_changeset_revision ) ) + try: + shutil.rmtree( dependency_install_dir ) + log.debug( "Removed tool dependency installation directory: %s" % str( dependency_install_dir ) ) + removed = True + except Exception, e: + log.debug( "Error removing tool dependency installation directory %s: %s" % ( str( dependency_install_dir ), str( e ) ) ) + removed = False + if removed: + tool_dependency.uninstalled = True + trans.sa_session.add( tool_dependency ) repository.deleted = True trans.sa_session.add( repository ) trans.sa_session.flush() @@ -229,6 +253,104 @@ return trans.response.send_redirect( url ) @web.expose @web.require_admin + def install_tool_dependencies( self, trans, **kwd ): + """Install dependencies for tools included in the repository when the repository is being installed.""" + message = kwd.get( 'message', '' ) + status = kwd.get( 'status', 'done' ) + tool_shed_url = kwd[ 'tool_shed_url' ] + repo_info_dict = kwd[ 'repo_info_dict' ] + includes_tools = util.string_as_bool( kwd.get( 'includes_tools', False ) ) + # Decode the encoded repo_info_dict param value. + dict_with_tool_dependencies = tool_shed_decode( repo_info_dict ) + # The repo_info_dict includes tool dependencies which we need to display so the user knows what will be installed. + new_repo_info_dict = {} + for name, repo_info_tuple in dict_with_tool_dependencies.items(): + description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, tool_dependencies = repo_info_tuple + # Create a new repo_info_dict by eliminating tool-dependencies from the repo_info_tuple. + new_repo_info_dict[ name ] = ( description, repository_clone_url, changeset_revision, ctx_rev ) + repo_info_dict = tool_shed_encode( new_repo_info_dict ) + install_tool_dependencies_check_box = CheckboxField( 'install_tool_dependencies', checked=True ) + return trans.fill_template( '/admin/tool_shed_repository/install_tool_dependencies.mako', + tool_shed_url=tool_shed_url, + repo_info_dict=repo_info_dict, + dict_with_tool_dependencies=dict_with_tool_dependencies, + includes_tools=includes_tools, + install_tool_dependencies_check_box=install_tool_dependencies_check_box, + message=message, + status=status ) + @web.expose + @web.require_admin + def install_missing_tool_dependencies( self, trans, **kwd ): + """ + Install dependencies for tools included in the repository that were not installed when the repository was installed or that are + being reinstalled after the repository was uninstalled. + """ + reinstalling = util.string_as_bool( kwd.get( 'reinstalling', False ) ) + message = kwd.get( 'message', '' ) + status = kwd.get( 'status', 'done' ) + repository = get_repository( trans, kwd[ 'id' ] ) + install_tool_dependencies = CheckboxField.is_checked( kwd.get( 'install_tool_dependencies', '' ) ) + if not reinstalling and install_tool_dependencies and kwd.get( 'install_missing_tool_dependencies_button', False ): + shed_tool_conf, tool_path, relative_install_dir = get_tool_panel_config_tool_path_install_dir( trans.app, repository ) + repo_files_dir = os.path.abspath( os.path.join( relative_install_dir, repository.name ) ) + # Get the tool_dependencies.xml file from the repository. + work_dir = make_tmp_directory() + tool_dependencies_config = get_config_from_repository( trans.app, + 'tool_dependencies.xml', + repository, + repository.changeset_revision, + work_dir, + install_dir=relative_install_dir ) + status, message = handle_tool_dependencies( app=trans.app, + tool_shed_repository=repository, + installed_changeset_revision=repository.installed_changeset_revision, + tool_dependencies_config=tool_dependencies_config ) + try: + shutil.rmtree( work_dir ) + except: + pass + tool_dependencies_missing = status == 'error' + return trans.fill_template( '/admin/tool_shed_repository/manage_repository.mako', + repository=repository, + description=repository.description, + repo_files_dir=repo_files_dir, + tool_dependencies_missing=tool_dependencies_missing, + message=message, + status=status ) + if reinstalling and kwd.get( 'install_missing_tool_dependencies_button', False ): + # The user has been presented the option to install tool dependencies, so redirect to reinstall the repository, sending + # along the user's choice. + return trans.response.send_redirect( web.url_for( controller='admin_toolshed', + action='reinstall_repository', + **kwd ) ) + tool_dependencies = repository.metadata[ 'tool_dependencies' ] + install_tool_dependencies_check_box = CheckboxField( 'install_tool_dependencies', checked=True ) + if not reinstalling: + # Filter the tool_dependencies dictionary to eliminate successfully installed dependencies. + filtered_tool_dependencies = {} + for dependency_key, requirements_dict in tool_dependencies.items(): + name = requirements_dict[ 'name' ] + version = requirements_dict[ 'version' ] + install_dir = get_install_dir( trans.app, repository, repository.changeset_revision, name, version ) + if not_installed( install_dir ): + filtered_tool_dependencies[ dependency_key ] = requirements_dict + tool_dependencies = filtered_tool_dependencies + no_changes = kwd.get( 'no_changes', '' ) + no_changes_checked = CheckboxField.is_checked( no_changes ) + new_tool_panel_section = kwd.get( 'new_tool_panel_section', '' ) + tool_panel_section = kwd.get( 'tool_panel_section', '' ) + return trans.fill_template( '/admin/tool_shed_repository/install_missing_tool_dependencies.mako', + repository=repository, + reinstalling=reinstalling, + tool_dependencies=tool_dependencies, + no_changes_checked=no_changes_checked, + new_tool_panel_section=new_tool_panel_section, + tool_panel_section=tool_panel_section, + install_tool_dependencies_check_box=install_tool_dependencies_check_box, + message=message, + status=status ) + @web.expose + @web.require_admin def install_repository( self, trans, **kwd ): if not trans.app.toolbox.shed_tool_confs: message = 'The <b>tool_config_file</b> setting in <b>universe_wsgi.ini</b> must include at least one shed tool configuration file name with a ' @@ -246,10 +368,13 @@ new_tool_panel_section = kwd.get( 'new_tool_panel_section', '' ) tool_panel_section = kwd.get( 'tool_panel_section', '' ) includes_tools = util.string_as_bool( kwd.get( 'includes_tools', False ) ) + install_tool_dependencies = kwd.get( 'install_tool_dependencies', '' ) if not includes_tools or ( includes_tools and kwd.get( 'select_tool_panel_section_button', False ) ): if includes_tools: + install_tool_dependencies = CheckboxField.is_checked( install_tool_dependencies ) shed_tool_conf = kwd[ 'shed_tool_conf' ] else: + install_tool_dependencies = False # If installing a repository that includes no tools, get the relative tool_path from the file # to which the migrated_tools_config setting points. shed_tool_conf = trans.app.config.migrated_tools_config @@ -261,8 +386,8 @@ tool_path = shed_tool_conf_dict[ 'tool_path' ] break else: - head, tail = os.path.split( config_filename ) - if tail == shed_tool_conf: + file_name = strip_path( config_filename ) + if file_name == shed_tool_conf: tool_path = shed_tool_conf_dict[ 'tool_path' ] break if includes_tools and ( new_tool_panel_section or tool_panel_section ): @@ -290,7 +415,6 @@ # Decode the encoded repo_info_dict param value. repo_info_dict = tool_shed_decode( repo_info_dict ) # Clone the repository to the configured location. - current_working_dir = os.getcwd() installed_repository_names = [] for name, repo_info_tuple in repo_info_dict.items(): description, repository_clone_url, changeset_revision, ctx_rev = repo_info_tuple @@ -303,19 +427,22 @@ clone_repository( repository_clone_url, os.path.abspath( relative_install_dir ), ctx_rev ) owner = get_repository_owner( clean_repository_clone_url( repository_clone_url ) ) tool_shed = clean_tool_shed_url( tool_shed_url ) - tool_shed_repository, metadata_dict = load_repository_contents( trans, - repository_name=name, - description=description, - owner=owner, - changeset_revision=changeset_revision, - ctx_rev=ctx_rev, - tool_path=tool_path, - repository_clone_url=repository_clone_url, - relative_install_dir=relative_install_dir, - current_working_dir=current_working_dir, - tool_shed=tool_shed, - tool_section=tool_section, - shed_tool_conf=shed_tool_conf ) + tool_shed_repository, metadata_dict, error_message = load_repository_contents( trans, + repository_name=name, + description=description, + owner=owner, + changeset_revision=changeset_revision, + ctx_rev=ctx_rev, + tool_path=tool_path, + repository_clone_url=repository_clone_url, + relative_install_dir=relative_install_dir, + tool_shed=tool_shed, + tool_section=tool_section, + shed_tool_conf=shed_tool_conf, + install_tool_dependencies=install_tool_dependencies ) + if error_message: + message += error_message + status = 'error' if 'tools' in metadata_dict: # Get the tool_versions from the tool shed for each tool in the installed change set. url = '%srepository/get_tool_versions?name=%s&owner=%s&changeset_revision=%s&webapp=galaxy&no_reset=true' % \ @@ -335,12 +462,21 @@ if installed_repository_names: installed_repository_names.sort() num_repositories_installed = len( installed_repository_names ) + if install_tool_dependencies: + dependency_str = ' along with tool dependencies ' + else: + dependency_str = '' if tool_section: - message += 'Installed %d %s and all tools were loaded into tool panel section <b>%s</b>:<br/>Installed repositories: ' % \ - ( num_repositories_installed, inflector.cond_plural( num_repositories_installed, 'repository' ), tool_section.name ) + message += 'Installed %d %s%sand all tools were loaded into tool panel section <b>%s</b>:<br/>Installed repositories: ' % \ + ( num_repositories_installed, + inflector.cond_plural( num_repositories_installed, 'repository' ), + dependency_str, + tool_section.name ) else: - message += 'Installed %d %s and all tools were loaded into the tool panel outside of any sections.<br/>Installed repositories: ' % \ - ( num_repositories_installed, inflector.cond_plural( num_repositories_installed, 'repository' ) ) + message += 'Installed %d %s%s and all tools were loaded into the tool panel outside of any sections.<br/>Installed repositories: ' % \ + ( num_repositories_installed, + inflector.cond_plural( num_repositories_installed, 'repository' ), + dependency_str ) for i, repo_name in enumerate( installed_repository_names ): if i == len( installed_repository_names ) -1: message += '%s.<br/>' % repo_name @@ -387,6 +523,7 @@ repo_info_dict=repo_info_dict, shed_tool_conf=shed_tool_conf, includes_tools=includes_tools, + install_tool_dependencies=install_tool_dependencies, shed_tool_conf_select_field=shed_tool_conf_select_field, tool_panel_section_select_field=tool_panel_section_select_field, new_tool_panel_section=new_tool_panel_section, @@ -411,26 +548,57 @@ message = "The repository information has been updated." elif params.get( 'set_metadata_button', False ): repository_clone_url = generate_clone_url( trans, repository ) - metadata_dict = generate_metadata( trans.app.toolbox, relative_install_dir, repository_clone_url ) + metadata_dict = generate_metadata_using_disk_files( trans.app.toolbox, relative_install_dir, repository_clone_url ) if metadata_dict: repository.metadata = metadata_dict trans.sa_session.add( repository ) trans.sa_session.flush() message = "Repository metadata has been reset." + tool_dependencies_missing = False + if repository.includes_tool_dependencies: + # See if any tool dependencies need to be installed, get the tool_dependencies.xml file from the repository. + work_dir = make_tmp_directory() + tool_dependencies_config = get_config_from_repository( trans.app, + 'tool_dependencies.xml', + repository, + repository.changeset_revision, + work_dir, + install_dir=relative_install_dir ) + # Parse the tool_dependencies.xml config. + tree = ElementTree.parse( tool_dependencies_config ) + root = tree.getroot() + ElementInclude.include( root ) + fabric_version_checked = False + for elem in root: + if elem.tag == 'package': + package_name = elem.get( 'name', None ) + package_version = elem.get( 'version', None ) + if package_name and package_version: + install_dir = get_install_dir( trans.app, repository, repository.installed_changeset_revision, package_name, package_version ) + if not_installed( install_dir ): + tool_dependencies_missing = True + break + try: + shutil.rmtree( work_dir ) + except: + pass return trans.fill_template( '/admin/tool_shed_repository/manage_repository.mako', repository=repository, description=description, repo_files_dir=repo_files_dir, + tool_dependencies_missing=tool_dependencies_missing, message=message, status=status ) @web.expose @web.require_admin def reinstall_repository( self, trans, **kwd ): + message = kwd.get( 'message', '' ) + status = kwd.get( 'status', 'done' ) repository = get_repository( trans, kwd[ 'id' ] ) no_changes = kwd.get( 'no_changes', '' ) no_changes_checked = CheckboxField.is_checked( no_changes ) + install_tool_dependencies = CheckboxField.is_checked( kwd.get( 'install_tool_dependencies', '' ) ) shed_tool_conf, tool_path, relative_install_dir = get_tool_panel_config_tool_path_install_dir( trans.app, repository ) - current_working_dir = os.getcwd() repository_clone_url = generate_clone_url( trans, repository ) clone_dir = os.path.join( tool_path, self.__generate_tool_path( repository_clone_url, repository.installed_changeset_revision ) ) relative_install_dir = os.path.join( clone_dir, repository.name ) @@ -496,25 +664,31 @@ tool_section = trans.app.toolbox.tool_panel[ section_key ] else: tool_section = None - tool_shed_repository, metadata_dict = load_repository_contents( trans, - repository_name=repository.name, - description=repository.description, - owner=repository.owner, - changeset_revision=repository.installed_changeset_revision, - ctx_rev=ctx_rev, - tool_path=tool_path, - repository_clone_url=repository_clone_url, - relative_install_dir=relative_install_dir, - current_working_dir=current_working_dir, - tool_shed=repository.tool_shed, - tool_section=tool_section, - shed_tool_conf=shed_tool_conf ) + tool_shed_repository, metadata_dict, error_message = load_repository_contents( trans, + repository_name=repository.name, + description=repository.description, + owner=repository.owner, + changeset_revision=repository.installed_changeset_revision, + ctx_rev=ctx_rev, + tool_path=tool_path, + repository_clone_url=repository_clone_url, + relative_install_dir=relative_install_dir, + tool_shed=repository.tool_shed, + tool_section=tool_section, + shed_tool_conf=shed_tool_conf, + install_tool_dependencies=install_tool_dependencies ) + if error_message: + message += error_message + status = 'error' repository.uninstalled = False repository.deleted = False trans.sa_session.add( repository ) trans.sa_session.flush() - message = 'The <b>%s</b> repository has been reinstalled.' % repository.name - status = 'done' + if install_tool_dependencies: + dependency_str = ' along with tool dependencies' + else: + dependency_str = ' without tool dependencies' + message += 'The <b>%s</b> repository has been reinstalled%s. ' % ( repository.name, dependency_str ) return trans.response.send_redirect( web.url_for( controller='admin_toolshed', action='browse_repositories', message=message, @@ -605,7 +779,6 @@ if changeset_revision == latest_changeset_revision: message = "The cloned tool shed repository named '%s' is current (there are no updates available)." % name else: - current_working_dir = os.getcwd() shed_tool_conf, tool_path, relative_install_dir = get_tool_panel_config_tool_path_install_dir( trans.app, repository ) if relative_install_dir: repo_files_dir = os.path.abspath( os.path.join( relative_install_dir, name ) ) @@ -615,7 +788,7 @@ update_repository( repo, latest_ctx_rev ) # Update the repository metadata. tool_shed = clean_tool_shed_url( tool_shed_url ) - metadata_dict = generate_metadata( trans.app.toolbox, relative_install_dir, repository_clone_url ) + metadata_dict = generate_metadata_using_disk_files( trans.app.toolbox, relative_install_dir, repository_clone_url ) repository.metadata = metadata_dict # Update the repository changeset_revision in the database. repository.changeset_revision = latest_changeset_revision diff -r 688b3b621cd48cefefaf73081505bbe4749a217d -r 72c4f0007f4ed1fba919786bc8a2e017ecf19516 lib/galaxy/web/controllers/workflow.py --- a/lib/galaxy/web/controllers/workflow.py +++ b/lib/galaxy/web/controllers/workflow.py @@ -14,6 +14,7 @@ from galaxy.util.odict import odict from galaxy.util.sanitize_html import sanitize_html from galaxy.util.topsort import topsort, topsort_levels, CycleError +from galaxy.tool_shed.encoding_util import * from galaxy.workflow.modules import * from galaxy import model from galaxy import util diff -r 688b3b621cd48cefefaf73081505bbe4749a217d -r 72c4f0007f4ed1fba919786bc8a2e017ecf19516 lib/galaxy/webapps/community/config.py --- a/lib/galaxy/webapps/community/config.py +++ b/lib/galaxy/webapps/community/config.py @@ -45,7 +45,7 @@ self.id_secret = kwargs.get( "id_secret", "USING THE DEFAULT IS NOT SECURE!" ) # Tool stuff self.tool_secret = kwargs.get( "tool_secret", "" ) - self.tool_data_path = resolve_path( kwargs.get( "tool_data_path", "tool-data" ), os.getcwd() ) + self.tool_data_path = resolve_path( kwargs.get( "tool_data_path", "shed-tool-data" ), os.getcwd() ) self.tool_data_table_config_path = resolve_path( kwargs.get( 'tool_data_table_config_path', 'tool_data_table_conf.xml' ), self.root ) self.ftp_upload_dir = kwargs.get( 'ftp_upload_dir', None ) # Location for dependencies @@ -107,10 +107,26 @@ else: return default def check( self ): - # Check that required directories exist - for path in self.root, self.file_path, self.template_path: - if not os.path.isdir( path ): - raise ConfigurationError("Directory does not exist: %s" % path ) + # Check that required directories exist. + paths_to_check = [ self.root, self.file_path, self.tool_data_path, self.template_path ] + for path in paths_to_check: + if path not in [ None, False ] and not os.path.isdir( path ): + try: + os.makedirs( path ) + except Exception, e: + raise ConfigurationError( "Unable to create missing directory: %s\n%s" % ( path, e ) ) + # Create the directories that it makes sense to create. + for path in self.file_path, \ + self.template_cache, \ + os.path.join( self.tool_data_path, 'shared', 'jars' ): + if path not in [ None, False ] and not os.path.isdir( path ): + try: + os.makedirs( path ) + except Exception, e: + raise ConfigurationError( "Unable to create missing directory: %s\n%s" % ( path, e ) ) + # Check that required files exist. + if not os.path.isfile( self.datatypes_config ): + raise ConfigurationError( "File not found: %s" % self.datatypes_config ) def is_admin_user( self, user ): """ Determine if the provided user is listed in `admin_users`. diff -r 688b3b621cd48cefefaf73081505bbe4749a217d -r 72c4f0007f4ed1fba919786bc8a2e017ecf19516 lib/galaxy/webapps/community/controllers/admin.py --- a/lib/galaxy/webapps/community/controllers/admin.py +++ b/lib/galaxy/webapps/community/controllers/admin.py @@ -3,7 +3,7 @@ from galaxy.model.orm import * from galaxy.web.framework.helpers import time_ago, iff, grids from galaxy.util import inflector -from galaxy.util.shed_util import get_configured_ui +from galaxy.util.shed_util import get_changectx_for_changeset, get_configured_ui from common import * from repository import RepositoryListGrid, CategoryListGrid @@ -462,27 +462,38 @@ status=status ) ) @web.expose @web.require_admin - def reset_all_repository_metadata( self, trans, **kwd ): + def reset_metadata_on_all_repositories( self, trans, **kwd ): params = util.Params( kwd ) message = util.restore_text( params.get( 'message', '' ) ) status = params.get( 'status', 'done' ) - if 'reset_all_repository_metadata_button' in kwd: - count = 0 + if 'reset_metadata_on_all_repositories_button' in kwd: + successful_count = 0 + unsuccessful_count = 0 for repository in trans.sa_session.query( trans.model.Repository ) \ .filter( trans.model.Repository.table.c.deleted == False ): try: - reset_all_repository_metadata( trans, trans.security.encode_id( repository.id ) ) - log.debug( "Reset metadata on repository %s" % repository.name ) - count += 1 + error_message, status = reset_all_metadata_on_repository( trans, trans.security.encode_id( repository.id ) ) + if error_message: + log.debug( "Error attempting to reset metadata on repository '%s': %s" % ( repository.name, error_message ) ) + unsuccessful_count += 1 + else: + log.debug( "Successfully reset metadata on repository %s" % repository.name ) + successful_count += 1 except Exception, e: - log.debug( "Error attempting to reset metadata on repository %s: %s" % ( repository.name, str( e ) ) ) - message = "Reset metadata on %d repositories" % count + log.debug( "Error attempting to reset metadata on repository '%s': %s" % ( repository.name, str( e ) ) ) + unsuccessful_count += 1 + message = "Successfully reset metadata on %d %s. " % ( successful_count, + inflector.cond_plural( successful_count, "repository" ) ) + if unsuccessful_count: + message += "Error setting metadata on %d %s - see the paster log for details. " % ( unsuccessful_count, + inflector.cond_plural( unsuccessful_count, + "repository" ) ) trans.response.send_redirect( web.url_for( controller='admin', action='browse_repository_metadata', webapp='community', message=util.sanitize_text( message ), status=status ) ) - return trans.fill_template( '/webapps/community/admin/reset_all_repository_metadata.mako', + return trans.fill_template( '/webapps/community/admin/reset_metadata_on_all_repositories.mako', message=message, status=status ) @web.expose diff -r 688b3b621cd48cefefaf73081505bbe4749a217d -r 72c4f0007f4ed1fba919786bc8a2e017ecf19516 lib/galaxy/webapps/community/controllers/common.py --- a/lib/galaxy/webapps/community/controllers/common.py +++ b/lib/galaxy/webapps/community/controllers/common.py @@ -5,8 +5,9 @@ from galaxy.tools import * from galaxy.util.json import from_json_string, to_json_string from galaxy.util.hash_util import * -from galaxy.util.shed_util import copy_sample_file, get_configured_ui, generate_datatypes_metadata, generate_tool_metadata, generate_workflow_metadata -from galaxy.util.shed_util import handle_sample_tool_data_table_conf_file, to_html_escaped, to_html_str, update_repository +from galaxy.util.shed_util import copy_sample_file, generate_datatypes_metadata, generate_tool_dependency_metadata, generate_tool_metadata +from galaxy.util.shed_util import generate_workflow_metadata, get_changectx_for_changeset, get_config, get_configured_ui, handle_sample_tool_data_table_conf_file +from galaxy.util.shed_util import make_tmp_directory, NOT_TOOL_CONFIGS, strip_path, to_html_escaped, to_html_str, update_repository from galaxy.web.base.controller import * from galaxy.webapps.community import model from galaxy.model.orm import * @@ -106,17 +107,468 @@ ## ---- Utility methods ------------------------------------------------------- +def add_repository_metadata_tool_versions( trans, id, changeset_revisions ): + # If a repository includes tools, build a dictionary of { 'tool id' : 'parent tool id' } + # pairs for each tool in each changeset revision. + for index, changeset_revision in enumerate( changeset_revisions ): + tool_versions_dict = {} + repository_metadata = get_repository_metadata_by_changeset_revision( trans, id, changeset_revision ) + if repository_metadata: + metadata = repository_metadata.metadata + if metadata: + tool_dicts = metadata.get( 'tools', [] ) + if index == 0: + # The first changset_revision is a special case because it will have no ancestor + # changeset_revisions in which to match tools. The parent tool id for tools in + # the first changeset_revision will be the "old_id" in the tool config. + for tool_dict in tool_dicts: + tool_versions_dict[ tool_dict[ 'guid' ] ] = tool_dict[ 'id' ] + else: + for tool_dict in tool_dicts: + # We have at least 2 changeset revisions to compare tool guids and tool ids. + parent_id = get_parent_id( trans, id, tool_dict[ 'id' ], tool_dict[ 'version' ], tool_dict[ 'guid' ], changeset_revisions[ 0:index ] ) + tool_versions_dict[ tool_dict[ 'guid' ] ] = parent_id + if tool_versions_dict: + repository_metadata.tool_versions = tool_versions_dict + trans.sa_session.add( repository_metadata ) + trans.sa_session.flush() +def build_changeset_revision_select_field( trans, repository, selected_value=None, add_id_to_name=True ): + """ + Build a SelectField whose options are the changeset_revision + strings of all downloadable_revisions of the received repository. + """ + repo = hg.repository( get_configured_ui(), repository.repo_path ) + options = [] + changeset_tups = [] + refresh_on_change_values = [] + for repository_metadata in repository.downloadable_revisions: + changeset_revision = repository_metadata.changeset_revision + ctx = get_changectx_for_changeset( repo, changeset_revision ) + if ctx: + rev = '%04d' % ctx.rev() + label = "%s:%s" % ( str( ctx.rev() ), changeset_revision ) + else: + rev = '-1' + label = "-1:%s" % changeset_revision + changeset_tups.append( ( rev, label, changeset_revision ) ) + refresh_on_change_values.append( changeset_revision ) + # Sort options by the revision label. Even though the downloadable_revisions query sorts by update_time, + # the changeset revisions may not be sorted correctly because setting metadata over time will reset update_time. + for changeset_tup in sorted( changeset_tups ): + # Display the latest revision first. + options.insert( 0, ( changeset_tup[1], changeset_tup[2] ) ) + if add_id_to_name: + name = 'changeset_revision_%d' % repository.id + else: + name = 'changeset_revision' + select_field = SelectField( name=name, + refresh_on_change=True, + refresh_on_change_values=refresh_on_change_values ) + for option_tup in options: + selected = selected_value and option_tup[1] == selected_value + select_field.add_option( option_tup[0], option_tup[1], selected=selected ) + return select_field +def changeset_is_malicious( trans, id, changeset_revision, **kwd ): + """Check the malicious flag in repository metadata for a specified change set""" + repository_metadata = get_repository_metadata_by_changeset_revision( trans, id, changeset_revision ) + if repository_metadata: + return repository_metadata.malicious + return False +def check_file_contents( trans ): + # See if any admin users have chosen to receive email alerts when a repository is updated. + # If so, the file contents of the update must be checked for inappropriate content. + admin_users = trans.app.config.get( "admin_users", "" ).split( "," ) + for repository in trans.sa_session.query( trans.model.Repository ) \ + .filter( trans.model.Repository.table.c.email_alerts != None ): + email_alerts = from_json_string( repository.email_alerts ) + for user_email in email_alerts: + if user_email in admin_users: + return True + return False +def check_tool_input_params( trans, repo, repo_dir, ctx, xml_file_in_ctx, tool, sample_files, invalid_files, tool_data_path, dir ): + """ + Check all of the tool's input parameters, looking for any that are dynamically generated using external data files to make + sure the files exist. This method is called only from the tool shed when generating metadata for a specified changeset revision. + """ + can_set_metadata = True + correction_msg = '' + # Keep track of copied files so they can be removed after metadata generation. + sample_files_copied = [] + for input_param in tool.input_params: + if isinstance( input_param, galaxy.tools.parameters.basic.SelectToolParameter ) and input_param.is_dynamic: + # If the tool refers to .loc files or requires an entry in the tool_data_table_conf.xml, make sure all requirements exist. + options = input_param.dynamic_options or input_param.options + if options: + if options.tool_data_table or options.missing_tool_data_table_name: + # Make sure the repository contains a tool_data_table_conf.xml.sample file. + sample_found = False + for sample_file in sample_files: + sample_file_name = strip_path( sample_file ) + if sample_file_name == 'tool_data_table_conf.xml.sample': + sample_found = True + error, correction_msg = handle_sample_tool_data_table_conf_file( trans.app, sample_file ) + if error: + can_set_metadata = False + invalid_files.append( ( sample_file_name, correction_msg ) ) + else: + options.missing_tool_data_table_name = None + break + if not sample_found: + can_set_metadata = False + correction_msg = "This file requires an entry in the tool_data_table_conf.xml file. " + correction_msg += "Upload a file named tool_data_table_conf.xml.sample to the repository " + correction_msg += "that includes the required entry to correct this error.<br/>" + invalid_files.append( ( xml_file_in_ctx, correction_msg ) ) + if options.index_file or options.missing_index_file: + # Make sure the repository contains the required xxx.loc.sample file. + index_file = options.index_file or options.missing_index_file + index_file_name = strip_path( index_file ) + sample_found = False + for sample_file in sample_files: + sample_file_name = strip_path( sample_file ) + if sample_file_name == '%s.sample' % index_file_name: + # If sample_file_name is on disk, copy it to dir. + copied_sample_file = copy_file_from_disk( sample_file_name, repo_dir, dir ) + if not copied_sample_file: + # Get sample_file_name from the repository manifest. + copied_sample_file = copy_file_from_manifest( repo, ctx, sample_file_name, dir ) + copy_sample_file( trans.app, copied_sample_file, dest_path=tool_data_path ) + sample_files_copied.append( sample_file_name ) + options.index_file = index_file_name + options.missing_index_file = None + if options.tool_data_table: + options.tool_data_table.missing_index_file = None + sample_found = True + break + if not sample_found: + can_set_metadata = False + correction_msg = "This file refers to a file named <b>%s</b>. " % str( index_file ) + correction_msg += "Upload a file named <b>%s.sample</b> to the repository to correct this error." % str( index_file_name ) + invalid_files.append( ( xml_file_in_ctx, correction_msg ) ) + return sample_files_copied, can_set_metadata, invalid_files +def clean_repository_metadata( trans, id, changeset_revisions ): + # Delete all repository_metadata reecords associated with the repository that have a changeset_revision that is not in changeset_revisions. + for repository_metadata in trans.sa_session.query( trans.model.RepositoryMetadata ) \ + .filter( trans.model.RepositoryMetadata.table.c.repository_id == trans.security.decode_id( id ) ): + if repository_metadata.changeset_revision not in changeset_revisions: + trans.sa_session.delete( repository_metadata ) + trans.sa_session.flush() +def compare_changeset_revisions( ancestor_changeset_revision, ancestor_metadata_dict, current_changeset_revision, current_metadata_dict ): + # The metadata associated with ancestor_changeset_revision is ancestor_metadata_dict. This changeset_revision is an ancestor of + # current_changeset_revision which is associated with current_metadata_dict. + # + # TODO: a new repository_metadata record will be created only when this method returns the string 'not equal and not subset'. However, + # we're currently also returning the strings 'no metadata', 'equal' and 'subset', depending upon how the 2 change sets compare. We'll + # leave things this way for the current time in case we discover a use for these additional result strings. + ancestor_datatypes = ancestor_metadata_dict.get( 'datatypes', [] ) + ancestor_tools = ancestor_metadata_dict.get( 'tools', [] ) + ancestor_guids = [ tool_dict[ 'guid' ] for tool_dict in ancestor_tools ] + ancestor_guids.sort() + ancestor_tool_dependencies = ancestor_metadata_dict.get( 'tool_dependencies', [] ) + ancestor_workflows = ancestor_metadata_dict.get( 'workflows', [] ) + current_datatypes = current_metadata_dict.get( 'datatypes', [] ) + current_tools = current_metadata_dict.get( 'tools', [] ) + current_guids = [ tool_dict[ 'guid' ] for tool_dict in current_tools ] + current_guids.sort() + current_tool_dependencies = current_metadata_dict.get( 'tool_dependencies', [] ) + current_workflows = current_metadata_dict.get( 'workflows', [] ) + # Handle case where no metadata exists for either changeset. + if not ancestor_guids and not current_guids and not ancestor_workflows and not current_workflows and not ancestor_datatypes and not current_datatypes: + return 'no metadata' + workflow_comparison = compare_workflows( ancestor_workflows, current_workflows ) + datatype_comparison = compare_datatypes( ancestor_datatypes, current_datatypes ) + # Handle case where all metadata is the same. + if ancestor_guids == current_guids and workflow_comparison == 'equal' and datatype_comparison == 'equal': + return 'equal' + if workflow_comparison == 'subset' and datatype_comparison == 'subset': + is_subset = True + for guid in ancestor_guids: + if guid not in current_guids: + is_subset = False + break + if is_subset: + return 'subset' + return 'not equal and not subset' +def compare_datatypes( ancestor_datatypes, current_datatypes ): + # Determine if ancestor_datatypes is the same as current_datatypes + # or if ancestor_datatypes is a subset of current_datatypes. Each + # datatype dict looks something like: + # {"dtype": "galaxy.datatypes.images:Image", "extension": "pdf", "mimetype": "application/pdf"} + if len( ancestor_datatypes ) <= len( current_datatypes ): + for ancestor_datatype in ancestor_datatypes: + # Currently the only way to differentiate datatypes is by name. + ancestor_datatype_dtype = ancestor_datatype[ 'dtype' ] + ancestor_datatype_extension = ancestor_datatype[ 'extension' ] + ancestor_datatype_mimetype = ancestor_datatype.get( 'mimetype', None ) + found_in_current = False + for current_datatype in current_datatypes: + if current_datatype[ 'dtype' ] == ancestor_datatype_dtype and \ + current_datatype[ 'extension' ] == ancestor_datatype_extension and \ + current_datatype.get( 'mimetype', None ) == ancestor_datatype_mimetype: + found_in_current = True + break + if not found_in_current: + return 'not equal and not subset' + if len( ancestor_datatypes ) == len( current_datatypes ): + return 'equal' + else: + return 'subset' + return 'not equal and not subset' +def compare_workflows( ancestor_workflows, current_workflows ): + # Determine if ancestor_workflows is the same as current_workflows + # or if ancestor_workflows is a subset of current_workflows. + if len( ancestor_workflows ) <= len( current_workflows ): + for ancestor_workflow_tup in ancestor_workflows: + # ancestor_workflows is a list of tuples where each contained tuple is + # [ <relative path to the .ga file in the repository>, <exported workflow dict> ] + ancestor_workflow_dict = ancestor_workflow_tup[1] + # Currently the only way to differentiate workflows is by name. + ancestor_workflow_name = ancestor_workflow_dict[ 'name' ] + num_ancestor_workflow_steps = len( ancestor_workflow_dict[ 'steps' ] ) + found_in_current = False + for current_workflow_tup in current_workflows: + current_workflow_dict = current_workflow_tup[1] + # Assume that if the name and number of steps are euqal, + # then the workflows are the same. Of course, this may + # not be true... + if current_workflow_dict[ 'name' ] == ancestor_workflow_name and len( current_workflow_dict[ 'steps' ] ) == num_ancestor_workflow_steps: + found_in_current = True + break + if not found_in_current: + return 'not equal and not subset' + if len( ancestor_workflows ) == len( current_workflows ): + return 'equal' + else: + return 'subset' + return 'not equal and not subset' +def copy_file_from_disk( filename, repo_dir, dir ): + file_path = None + found = False + for root, dirs, files in os.walk( repo_dir ): + if root.find( '.hg' ) < 0: + for name in files: + if name == filename: + file_path = os.path.abspath( os.path.join( root, name ) ) + found = True + break + if found: + break + if file_path: + tmp_filename = os.path.join( dir, filename ) + shutil.copy( file_path, tmp_filename ) + else: + tmp_filename = None + return tmp_filename +def copy_file_from_manifest( repo, ctx, filename, dir ): + """Copy a file named filename from somewhere in the repository manifest to the directory to which dir refers.""" + filename = strip_path( filename ) + fctx = None + # First see if the file is in ctx. + for ctx_file in ctx.files(): + ctx_file_name = strip_path( ctx_file ) + if filename == ctx_file_name: + fctx = ctx[ ctx_file ] + else: + # Find the file in the repository manifest. + for changeset in repo.changelog: + prev_ctx = repo.changectx( changeset ) + for ctx_file in prev_ctx.files(): + ctx_file_name = strip_path( ctx_file ) + if filename == ctx_file_name: + fctx = prev_ctx[ ctx_file ] + break + if fctx: + file_path = os.path.join( dir, filename ) + fh = open( file_path, 'wb' ) + fh.write( fctx.data() ) + fh.close() + return file_path + return None +def create_or_update_repository_metadata( trans, id, repository, changeset_revision, metadata_dict ): + repository_metadata = get_repository_metadata_by_changeset_revision( trans, id, changeset_revision ) + if repository_metadata: + # Update RepositoryMetadata.metadata. + repository_metadata.metadata = metadata_dict + trans.sa_session.add( repository_metadata ) + trans.sa_session.flush() + else: + # Create a new repository_metadata table row. + repository_metadata = trans.model.RepositoryMetadata( repository.id, changeset_revision, metadata_dict ) + trans.sa_session.add( repository_metadata ) + trans.sa_session.flush() +def decode( value ): + # Extract and verify hash + a, b = value.split( ":" ) + value = binascii.unhexlify( b ) + test = hmac_new( 'ToolShedAndGalaxyMustHaveThisSameKey', value ) + assert a == test + # Restore from string + try: + values = json_fix( simplejson.loads( value ) ) + except Exception, e: + # We do not have a json string + values = value + return values +def encode( val ): + if isinstance( val, dict ): + value = simplejson.dumps( val ) + else: + value = val + a = hmac_new( 'ToolShedAndGalaxyMustHaveThisSameKey', value ) + b = binascii.hexlify( value ) + return "%s:%s" % ( a, b ) +def generate_clone_url( trans, repository_id ): + """Generate the URL for cloning a repository.""" + repository = get_repository( trans, repository_id ) + protocol, base = trans.request.base.split( '://' ) + if trans.user: + username = '%s@' % trans.user.username + else: + username = '' + return '%s://%s%s/repos/%s/%s' % ( protocol, username, base, repository.user.username, repository.name ) +def generate_metadata_for_changeset_revision( trans, repo, id, ctx, changeset_revision, repo_dir, updating_tip=False ): + if updating_tip: + # If a push from the command line is occurring, update the repository files on disk before setting metadata. + update_repository( repo, str( ctx.rev() ) ) + metadata_dict = {} + invalid_files = [] + invalid_tool_configs = [] + original_tool_data_path = trans.app.config.tool_data_path + work_dir = make_tmp_directory() + datatypes_config = get_config( 'datatypes_conf.xml', repo, repo_dir, ctx, work_dir ) + if datatypes_config: + metadata_dict = generate_datatypes_metadata( datatypes_config, metadata_dict ) + sample_files = get_sample_files( repo, repo_dir, dir=work_dir ) + if sample_files: + trans.app.config.tool_data_path = work_dir + for filename in ctx: + # Find all tool configs. + ctx_file_name = strip_path( filename ) + if ctx_file_name not in NOT_TOOL_CONFIGS and filename.endswith( '.xml' ): + valid, tool = load_tool_from_tmp_directory( trans, repo, repo_dir, ctx, filename, work_dir ) + if valid and tool is not None: + sample_files_copied, can_set_metadata, invalid_files = check_tool_input_params( trans, + repo, + repo_dir, + ctx, + filename, + tool, + sample_files, + invalid_files, + original_tool_data_path, + work_dir ) + if can_set_metadata: + # Update the list of metadata dictionaries for tools in metadata_dict. + repository_clone_url = generate_clone_url( trans, id ) + metadata_dict = generate_tool_metadata( filename, tool, repository_clone_url, metadata_dict ) + else: + invalid_tool_configs.append( ctx_file_name ) + # Remove all copied sample files from both the original tool data path (~/shed-tool-data) and the temporary + # value of trans.app.config.tool_data_path, which is work_dir. + for copied_sample_file in sample_files_copied: + copied_file = copied_sample_file.replace( '.sample', '' ) + try: + os.unlink( os.path.join( trans.app.config.tool_data_path, copied_sample_file ) ) + except: + pass + try: + os.unlink( os.path.join( trans.app.config.tool_data_path, copied_file ) ) + except: + pass + if trans.app.config.tool_data_path == work_dir: + try: + os.unlink( os.path.join( original_tool_data_path, copied_sample_file ) ) + except: + pass + try: + os.unlink( os.path.join( original_tool_data_path, copied_file ) ) + except: + pass + elif tool is not None: + # We have a tool config but it is invalid. + invalid_files.append( ( ctx_file_name, 'Problems loading tool.' ) ) + invalid_tool_configs.append( ctx_file_name ) + # Find all exported workflows. + elif filename.endswith( '.ga' ): + try: + fctx = ctx[ filename ] + workflow_text = fctx.data() + exported_workflow_dict = from_json_string( workflow_text ) + if 'a_galaxy_workflow' in exported_workflow_dict and exported_workflow_dict[ 'a_galaxy_workflow' ] == 'true': + metadata_dict = generate_workflow_metadata( '', exported_workflow_dict, metadata_dict ) + except Exception, e: + invalid_files.append( ( ctx_file_name, str( e ) ) ) + # Find tool_dependencies.xml if it exists. This step must be done after metadata for tools has been defined. + tool_dependencies_config = get_config( 'tool_dependencies.xml', repo, repo_dir, ctx, work_dir ) + if tool_dependencies_config: + metadata_dict = generate_tool_dependency_metadata( tool_dependencies_config, metadata_dict ) + if invalid_tool_configs: + metadata_dict [ 'invalid_tools' ] = invalid_tool_configs + if sample_files: + # Don't forget to reset the value of trans.app.config.tool_data_path! + trans.app.config.tool_data_path = original_tool_data_path + try: + shutil.rmtree( work_dir ) + except: + pass + return metadata_dict, invalid_files +def generate_tool_guid( trans, repository, tool ): + """ + Generate a guid for the received tool. The form of the guid is + <tool shed host>/repos/<tool shed username>/<tool shed repo name>/<tool id>/<tool version> + """ + return '%s/repos/%s/%s/%s/%s' % ( trans.request.host, + repository.user.username, + repository.name, + tool.id, + tool.version ) +def get_category( trans, id ): + """Get a category from the database""" + return trans.sa_session.query( trans.model.Category ).get( trans.security.decode_id( id ) ) def get_categories( trans ): """Get all categories from the database""" return trans.sa_session.query( trans.model.Category ) \ .filter( trans.model.Category.table.c.deleted==False ) \ .order_by( trans.model.Category.table.c.name ).all() -def get_category( trans, id ): - """Get a category from the database""" - return trans.sa_session.query( trans.model.Category ).get( trans.security.decode_id( id ) ) +def get_latest_repository_metadata( trans, id ): + """Get last metadata defined for a specified repository from the database""" + return trans.sa_session.query( trans.model.RepositoryMetadata ) \ + .filter( trans.model.RepositoryMetadata.table.c.repository_id == trans.security.decode_id( id ) ) \ + .order_by( trans.model.RepositoryMetadata.table.c.id.desc() ) \ + .first() +def get_named_tmpfile_from_ctx( ctx, filename, dir=None ): + fctx = ctx[ filename ] + fh = tempfile.NamedTemporaryFile( 'wb', dir=dir ) + tmp_filename = fh.name + fh.close() + fh = open( tmp_filename, 'wb' ) + fh.write( fctx.data() ) + fh.close() + return tmp_filename +def get_parent_id( trans, id, old_id, version, guid, changeset_revisions ): + parent_id = None + # Compare from most recent to oldest. + changeset_revisions.reverse() + for changeset_revision in changeset_revisions: + repository_metadata = get_repository_metadata_by_changeset_revision( trans, id, changeset_revision ) + metadata = repository_metadata.metadata + tools_dicts = metadata.get( 'tools', [] ) + for tool_dict in tools_dicts: + if tool_dict[ 'guid' ] == guid: + # The tool has not changed between the compared changeset revisions. + continue + if tool_dict[ 'id' ] == old_id and tool_dict[ 'version' ] != version: + # The tool version is different, so we've found the parent. + return tool_dict[ 'guid' ] + if parent_id is None: + # The tool did not change through all of the changeset revisions. + return old_id def get_repository( trans, id ): """Get a repository from the database via id""" return trans.sa_session.query( trans.model.Repository ).get( trans.security.decode_id( id ) ) +def get_repository_by_name( trans, name ): + """Get a repository from the database via name""" + return trans.sa_session.query( trans.model.Repository ).filter_by( name=name ).one() def get_repository_by_name_and_owner( trans, name, owner ): """Get a repository from the database via name and owner""" user = get_user_by_username( trans, owner ) @@ -148,648 +600,29 @@ return "%s:%s" % ( str( ctx.rev() ), changeset_revision ) else: return "-1:%s" % changeset_revision -def get_latest_repository_metadata( trans, id ): - """Get last metadata defined for a specified repository from the database""" - return trans.sa_session.query( trans.model.RepositoryMetadata ) \ - .filter( trans.model.RepositoryMetadata.table.c.repository_id == trans.security.decode_id( id ) ) \ - .order_by( trans.model.RepositoryMetadata.table.c.id.desc() ) \ - .first() -def generate_clone_url( trans, repository_id ): - """Generate the URL for cloning a repository.""" - repository = get_repository( trans, repository_id ) - protocol, base = trans.request.base.split( '://' ) - if trans.user: - username = '%s@' % trans.user.username - else: - username = '' - return '%s://%s%s/repos/%s/%s' % ( protocol, username, base, repository.user.username, repository.name ) -def generate_tool_guid( trans, repository, tool ): - """ - Generate a guid for the received tool. The form of the guid is - <tool shed host>/repos/<tool shed username>/<tool shed repo name>/<tool id>/<tool version> - """ - return '%s/repos/%s/%s/%s/%s' % ( trans.request.host, - repository.user.username, - repository.name, - tool.id, - tool.version ) -def check_tool_input_params( trans, name, tool, sample_files, invalid_files ): - """ - Check all of the tool's input parameters, looking for any that are dynamically generated - using external data files to make sure the files exist. - """ - can_set_metadata = True - correction_msg = '' - for input_param in tool.input_params: - if isinstance( input_param, galaxy.tools.parameters.basic.SelectToolParameter ) and input_param.is_dynamic: - # If the tool refers to .loc files or requires an entry in the - # tool_data_table_conf.xml, make sure all requirements exist. - options = input_param.dynamic_options or input_param.options - if options: - if options.tool_data_table or options.missing_tool_data_table_name: - # Make sure the repository contains a tool_data_table_conf.xml.sample file. - sample_found = False - for sample_file in sample_files: - head, tail = os.path.split( sample_file ) - if tail == 'tool_data_table_conf.xml.sample': - sample_found = True - error, correction_msg = handle_sample_tool_data_table_conf_file( trans.app, sample_file ) - if error: - can_set_metadata = False - invalid_files.append( ( tail, correction_msg ) ) - else: - options.missing_tool_data_table_name = None - break - if not sample_found: - can_set_metadata = False - correction_msg = "This file requires an entry in the tool_data_table_conf.xml file. " - correction_msg += "Upload a file named tool_data_table_conf.xml.sample to the repository " - correction_msg += "that includes the required entry to correct this error.<br/>" - invalid_files.append( ( name, correction_msg ) ) - if options.index_file or options.missing_index_file: - # Make sure the repository contains the required xxx.loc.sample file. - index_file = options.index_file or options.missing_index_file - index_file_path, index_file_name = os.path.split( index_file ) - sample_found = False - for sample_file in sample_files: - sample_file_path, sample_file_name = os.path.split( sample_file ) - if sample_file_name == '%s.sample' % index_file_name: - copy_sample_file( trans.app, sample_file ) - options.index_file = index_file_name - options.missing_index_file = None - if options.tool_data_table: - options.tool_data_table.missing_index_file = None - sample_found = True - break - if not sample_found: - can_set_metadata = False - correction_msg = "This file refers to a file named <b>%s</b>. " % str( index_file ) - correction_msg += "Upload a file named <b>%s.sample</b> to the repository to correct this error." % str( index_file_name ) - invalid_files.append( ( name, correction_msg ) ) - return can_set_metadata, invalid_files -def new_tool_metadata_required( trans, id, metadata_dict ): - """ - Compare the last saved metadata for each tool in the repository with the new metadata - in metadata_dict to determine if a new repository_metadata table record is required, or - if the last saved metadata record can updated instead. - """ - if 'tools' in metadata_dict: - repository_metadata = get_latest_repository_metadata( trans, id ) - if repository_metadata: - metadata = repository_metadata.metadata - if metadata and 'tools' in metadata: - saved_tool_ids = [] - # The metadata for one or more tools was successfully generated in the past - # for this repository, so we first compare the version string for each tool id - # in metadata_dict with what was previously saved to see if we need to create - # a new table record or if we can simply update the existing record. - for new_tool_metadata_dict in metadata_dict[ 'tools' ]: - for saved_tool_metadata_dict in metadata[ 'tools' ]: - if saved_tool_metadata_dict[ 'id' ] not in saved_tool_ids: - saved_tool_ids.append( saved_tool_metadata_dict[ 'id' ] ) - if new_tool_metadata_dict[ 'id' ] == saved_tool_metadata_dict[ 'id' ]: - if new_tool_metadata_dict[ 'version' ] != saved_tool_metadata_dict[ 'version' ]: - return True - # So far, a new metadata record is not required, but we still have to check to see if - # any new tool ids exist in metadata_dict that are not in the saved metadata. We do - # this because if a new tarball was uploaded to a repository that included tools, it - # may have removed existing tool files if they were not included in the uploaded tarball. - for new_tool_metadata_dict in metadata_dict[ 'tools' ]: - if new_tool_metadata_dict[ 'id' ] not in saved_tool_ids: - return True - else: - # We have repository metadata that does not include metadata for any tools in the - # repository, so we can update the existing repository metadata. - return False - else: - # There is no saved repository metadata, so we need to create a new repository_metadata - # table record. - return True - # The received metadata_dict includes no metadata for tools, so a new repository_metadata table - # record is not needed. - return False -def new_workflow_metadata_required( trans, id, metadata_dict ): - """ - Currently everything about an exported workflow except the name is hard-coded, so there's - no real way to differentiate versions of exported workflows. If this changes at some future - time, this method should be enhanced accordingly. - """ - if 'workflows' in metadata_dict: - repository_metadata = get_latest_repository_metadata( trans, id ) - if repository_metadata: - if repository_metadata.metadata: - # The repository has metadata, so update the workflows value - no new record is needed. - return False - else: - # There is no saved repository metadata, so we need to create a new repository_metadata table record. - return True - # The received metadata_dict includes no metadata for workflows, so a new repository_metadata table record is not needed. - return False -def generate_metadata_for_repository_tip( trans, id, ctx, changeset_revision, repo, repo_dir ): - """ - Browse the repository tip files on disk to generate metadata. This is faster than the - generate_metadata_for_changeset_revision() method below because fctx.data() does not have - to be written to disk to load tools. We also handle things like .loc.sample files and - invalid_tool_configs here, while they are ignored in older revisions. - """ - # If a push from the command line is occurring, update the repository files on disk before setting metadata. - update_repository( repo, str( ctx.rev() ) ) - metadata_dict = {} - invalid_files = [] - invalid_tool_configs = [] +def get_sample_files( repo, repo_dir, dir ): + """Return a list of all files in the repository with the special .sample extension""" sample_files = [] - datatypes_config = None - # Find datatypes_conf.xml if it exists. + # Copy all discovered sample files to dir, and the caller will set the value of app.config.tool_data_path to dir + # in order to load the tools and generate metadata for them. First look on disk. for root, dirs, files in os.walk( repo_dir ): if root.find( '.hg' ) < 0: for name in files: - if name == 'datatypes_conf.xml': - datatypes_config = os.path.abspath( os.path.join( root, name ) ) - break - if datatypes_config: - metadata_dict = generate_datatypes_metadata( datatypes_config, metadata_dict ) - # Find all special .sample files. - for root, dirs, files in os.walk( repo_dir ): - if root.find( '.hg' ) < 0: - for name in files: - if name.endswith( '.sample' ): - sample_files.append( os.path.abspath( os.path.join( root, name ) ) ) - # Find all tool configs and exported workflows. - for root, dirs, files in os.walk( repo_dir ): - if root.find( '.hg' ) < 0 and root.find( 'hgrc' ) < 0: - if '.hg' in dirs: - dirs.remove( '.hg' ) - for name in files: - # Find all tool configs. - if name != 'datatypes_conf.xml' and name.endswith( '.xml' ): - full_path = os.path.abspath( os.path.join( root, name ) ) - if not ( check_binary( full_path ) or check_image( full_path ) or check_gzip( full_path )[ 0 ] - or check_bz2( full_path )[ 0 ] or check_zip( full_path ) ): - try: - # Make sure we're looking at a tool config and not a display application config or something else. - element_tree = util.parse_xml( full_path ) - element_tree_root = element_tree.getroot() - is_tool = element_tree_root.tag == 'tool' - except Exception, e: - log.debug( "Error parsing %s, exception: %s" % ( full_path, str( e ) ) ) - is_tool = False - if is_tool: - try: - tool = load_tool( trans, full_path ) - valid = True - except Exception, e: - valid = False - invalid_files.append( ( name, str( e ) ) ) - invalid_tool_configs.append( name ) - if valid and tool is not None: - can_set_metadata, invalid_files = check_tool_input_params( trans, name, tool, sample_files, invalid_files ) - if can_set_metadata: - # Update the list of metadata dictionaries for tools in metadata_dict. - tool_config = os.path.join( root, name ) - repository_clone_url = generate_clone_url( trans, id ) - metadata_dict = generate_tool_metadata( tool_config, tool, repository_clone_url, metadata_dict ) - else: - invalid_tool_configs.append( name ) - # Find all exported workflows - elif name.endswith( '.ga' ): - try: - relative_path = os.path.join( root, name ) - # Convert workflow data from json - fp = open( relative_path, 'rb' ) - workflow_text = fp.read() - fp.close() - exported_workflow_dict = from_json_string( workflow_text ) - if 'a_galaxy_workflow' in exported_workflow_dict and exported_workflow_dict[ 'a_galaxy_workflow' ] == 'true': - metadata_dict = generate_workflow_metadata( relative_path, exported_workflow_dict, metadata_dict ) - except Exception, e: - invalid_files.append( ( name, str( e ) ) ) - if invalid_tool_configs: - metadata_dict[ 'invalid_tools' ] = invalid_tool_configs - return metadata_dict, invalid_files -def generate_metadata_for_changeset_revision( trans, id, ctx, changeset_revision, repo_dir ): - # Browse repository files within a change set to generate metadata. - metadata_dict = {} - invalid_files = [] - sample_files = [] - tmp_datatypes_config = None - # Find datatypes_conf.xml if it exists. - for filename in ctx: - if filename == 'datatypes_conf.xml': - fctx = ctx[ filename ] - # Write the contents of datatypes_config.xml to a temporary file. - fh = tempfile.NamedTemporaryFile( 'w' ) - tmp_datatypes_config = fh.name - fh.close() - fh = open( tmp_datatypes_config, 'w' ) - fh.write( fctx.data() ) - fh.close() - break - if tmp_datatypes_config: - metadata_dict = generate_datatypes_metadata( tmp_datatypes_config, metadata_dict ) - try: - os.unlink( tmp_datatypes_config ) - except: - pass - # Get all tool config file names from the hgweb url, something like: - # /repos/test/convert_chars1/file/e58dcf0026c7/convert_characters.xml - for filename in ctx: - # Find all tool configs. - if filename != 'datatypes_conf.xml' and filename.endswith( '.xml' ): - fctx = ctx[ filename ] - # Write the contents of the old tool config to a temporary file. - # TODO: figure out how to enhance the load_tool method so that a - # temporary disk file is not necessary in order to pass the tool - # config. - fh = tempfile.NamedTemporaryFile( 'w' ) - tmp_filename = fh.name - fh.close() - fh = open( tmp_filename, 'w' ) - fh.write( fctx.data() ) - fh.close() - if not ( check_binary( tmp_filename ) or check_image( tmp_filename ) or check_gzip( tmp_filename )[ 0 ] - or check_bz2( tmp_filename )[ 0 ] or check_zip( tmp_filename ) ): - try: - # Make sure we're looking at a tool config and not a display application config or something else. - element_tree = util.parse_xml( tmp_filename ) - element_tree_root = element_tree.getroot() - is_tool = element_tree_root.tag == 'tool' - except: - is_tool = False - if is_tool: - try: - tool = load_tool( trans, tmp_filename ) - valid = True - except Exception, e: - invalid_files.append( ( filename, str( e ) ) ) - valid = False - if valid and tool is not None: - # Update the list of metadata dictionaries for tools in metadata_dict. Note that filename - # here is the relative path to the config file within the change set context, something - # like filtering.xml, but when the change set was the repository tip, the value was - # something like database/community_files/000/repo_1/filtering.xml. This shouldn't break - # anything, but may result in a bit of confusion when maintaining the code / data over time. - # IMPORTANT NOTE: Here we are assuming that since the current change set is not the repository - # tip, we do not have to handle any .loc.sample files since they would have been handled previously. - repository_clone_url = generate_clone_url( trans, id ) - metadata_dict = generate_tool_metadata( filename, tool, repository_clone_url, metadata_dict ) - try: - os.unlink( tmp_filename ) - except: - pass - # Find all exported workflows. - elif filename.endswith( '.ga' ): - try: - fctx = ctx[ filename ] - workflow_text = fctx.data() - exported_workflow_dict = from_json_string( workflow_text ) - if 'a_galaxy_workflow' in exported_workflow_dict and exported_workflow_dict[ 'a_galaxy_workflow' ] == 'true': - metadata_dict = generate_workflow_metadata( '', exported_workflow_dict, metadata_dict ) - except Exception, e: - invalid_files.append( ( name, str( e ) ) ) - return metadata_dict, invalid_files -def set_repository_metadata( trans, id, changeset_revision, content_alert_str='', **kwd ): - """Set repository metadata""" - message = '' - status = 'done' - repository = get_repository( trans, id ) - repo_dir = repository.repo_path - repo = hg.repository( get_configured_ui(), repo_dir ) - ctx = get_changectx_for_changeset( repo, changeset_revision ) - metadata_dict = {} - invalid_files = [] - if ctx is not None: - if changeset_revision == repository.tip: - metadata_dict, invalid_files = generate_metadata_for_repository_tip( trans, id, ctx, changeset_revision, repo, repo_dir ) - else: - metadata_dict, invalid_files = generate_metadata_for_changeset_revision( trans, id, ctx, changeset_revision, repo_dir ) - if metadata_dict: - if changeset_revision == repository.tip: - if new_tool_metadata_required( trans, id, metadata_dict ) or new_workflow_metadata_required( trans, id, metadata_dict ): - # Create a new repository_metadata table row. - repository_metadata = trans.model.RepositoryMetadata( repository.id, changeset_revision, metadata_dict ) - trans.sa_session.add( repository_metadata ) - try: - trans.sa_session.flush() - # If this is the first record stored for this repository, see if we need to send any email alerts. - if len( repository.downloadable_revisions ) == 1: - handle_email_alerts( trans, repository, content_alert_str='', new_repo_alert=True, admin_only=False ) - except TypeError, e: - message = "Unable to save metadata for this repository probably due to a tool config file that doesn't conform to the Cheetah template syntax." - status = 'error' - else: - repository_metadata = get_latest_repository_metadata( trans, id ) - if repository_metadata: - # Update the last saved repository_metadata table row. - repository_metadata.changeset_revision = changeset_revision - repository_metadata.metadata = metadata_dict - trans.sa_session.add( repository_metadata ) - try: - trans.sa_session.flush() - except TypeError, e: - message = "Unable to save metadata for this repository probably due to a tool config file that doesn't conform to the Cheetah template syntax." - status = 'error' - else: - # There are no tools in the repository, and we're setting metadata on the repository tip. - repository_metadata = trans.model.RepositoryMetadata( repository.id, changeset_revision, metadata_dict ) - trans.sa_session.add( repository_metadata ) - trans.sa_session.flush() - else: - # We're re-generating metadata for an old repository revision. - repository_metadata = get_repository_metadata_by_changeset_revision( trans, id, changeset_revision ) - repository_metadata.metadata = metadata_dict - trans.sa_session.add( repository_metadata ) - trans.sa_session.flush() - else: - message = "Revision '%s' includes no tools, datatypes or exported workflows for which metadata can " % str( changeset_revision ) - message += "be defined so this revision cannot be automatically installed into a local Galaxy instance." - status = "error" - else: - # change_set is None - message = "This repository does not include revision '%s'." % str( changeset_revision ) - status = 'error' - if invalid_files: - if metadata_dict: - message = "Metadata was defined for some items in revision '%s'. " % str( changeset_revision ) - message += "Correct the following problems if necessary and reset metadata.<br/>" - else: - message = "Metadata cannot be defined for revision '%s' so this revision cannot be automatically " % str( changeset_revision ) - message += "installed into a local Galaxy instance. Correct the following problems and reset metadata.<br/>" - for itc_tup in invalid_files: - tool_file, exception_msg = itc_tup - if exception_msg.find( 'No such file or directory' ) >= 0: - exception_items = exception_msg.split() - missing_file_items = exception_items[7].split( '/' ) - missing_file = missing_file_items[-1].rstrip( '\'' ) - if missing_file.endswith( '.loc' ): - sample_ext = '%s.sample' % missing_file - else: - sample_ext = missing_file - correction_msg = "This file refers to a missing file <b>%s</b>. " % str( missing_file ) - correction_msg += "Upload a file named <b>%s</b> to the repository to correct this error." % sample_ext - else: - correction_msg = exception_msg - message += "<b>%s</b> - %s<br/>" % ( tool_file, correction_msg ) - status = 'error' - return message, status -def reset_all_repository_metadata( trans, id, **kwd ): - params = util.Params( kwd ) - message = util.restore_text( params.get( 'message', '' ) ) - status = params.get( 'status', 'done' ) - repository = get_repository( trans, id ) - log.debug( "Resetting all metadata on repository: %s" % repository.name ) - repo_dir = repository.repo_path - repo = hg.repository( get_configured_ui(), repo_dir ) - if len( repo ) == 1: - message, status = set_repository_metadata( trans, id, repository.tip, **kwd ) - add_repository_metadata_tool_versions( trans, id, [ repository.tip ] ) - else: - # The list of changeset_revisions refers to repository_metadata records that have been - # created or updated. When the following loop completes, we'll delete all repository_metadata - # records for this repository that do not have a changeset_revision value in this list. - changeset_revisions = [] - ancestor_changeset_revision = None - ancestor_metadata_dict = None - for changeset in repo.changelog: - current_changeset_revision = str( repo.changectx( changeset ) ) - ctx = get_changectx_for_changeset( repo, current_changeset_revision ) - if current_changeset_revision == repository.tip: - current_metadata_dict, invalid_files = generate_metadata_for_repository_tip( trans, id, ctx, current_changeset_revision, repo, repo_dir ) - else: - current_metadata_dict, invalid_files = generate_metadata_for_changeset_revision( trans, id, ctx, current_changeset_revision, repo_dir ) - if current_metadata_dict: - if ancestor_changeset_revision: - # Compare metadata from ancestor and current. The value of comparsion will be one of: - # 'no metadata' - no metadata for either ancestor or current, so continue from current - # 'equal' - ancestor metadata is equivalent to current metadata, so continue from current - # 'subset' - ancestor metadata is a subset of current metadata, so continue from current - # 'not equal and not subset' - ancestor metadata is neither equal to nor a subset of current - # metadata, so persist ancestor metadata. - comparison = compare_changeset_revisions( ancestor_changeset_revision, - ancestor_metadata_dict, - current_changeset_revision, - current_metadata_dict ) - if comparison in [ 'no metadata', 'equal', 'subset' ]: - ancestor_changeset_revision = current_changeset_revision - ancestor_metadata_dict = current_metadata_dict - elif comparison == 'not equal and not subset': - create_or_update_repository_metadata( trans, id, repository, ancestor_changeset_revision, ancestor_metadata_dict ) - # Keep track of the changeset_revisions that we've persisted. - changeset_revisions.append( ancestor_changeset_revision ) - ancestor_changeset_revision = current_changeset_revision - ancestor_metadata_dict = current_metadata_dict - else: - # We're either at the first change set in the change log or we have just created or updated - # a repository_metadata record. At this point we set the ancestor changeset to the current - # changeset for comparison in the next iteration. - ancestor_changeset_revision = current_changeset_revision - ancestor_metadata_dict = current_metadata_dict - if not ctx.children(): - # We're at the end of the change log. - create_or_update_repository_metadata( trans, id, repository, current_changeset_revision, current_metadata_dict ) - changeset_revisions.append( current_changeset_revision ) - ancestor_changeset_revision = None - ancestor_metadata_dict = None - elif ancestor_metadata_dict: - # Our current change set has no metadata, but our ancestor change set has metadata, so save it. - create_or_update_repository_metadata( trans, id, repository, ancestor_changeset_revision, ancestor_metadata_dict ) - # Keep track of the changeset_revisions that we've persisted. - changeset_revisions.append( ancestor_changeset_revision ) - ancestor_changeset_revision = None - ancestor_metadata_dict = None - clean_repository_metadata( trans, id, changeset_revisions ) - add_repository_metadata_tool_versions( trans, id, changeset_revisions ) -def clean_repository_metadata( trans, id, changeset_revisions ): - # Delete all repository_metadata reecords associated with the repository - # that have a changeset_revision that is not in changeset_revisions. - for repository_metadata in trans.sa_session.query( trans.model.RepositoryMetadata ) \ - .filter( trans.model.RepositoryMetadata.table.c.repository_id == trans.security.decode_id( id ) ): - if repository_metadata.changeset_revision not in changeset_revisions: - trans.sa_session.delete( repository_metadata ) - trans.sa_session.flush() -def add_repository_metadata_tool_versions( trans, id, changeset_revisions ): - # If a repository includes tools, build a dictionary of { 'tool id' : 'parent tool id' } - # pairs for each tool in each changeset revision. - for index, changeset_revision in enumerate( changeset_revisions ): - tool_versions_dict = {} - repository_metadata = get_repository_metadata_by_changeset_revision( trans, id, changeset_revision ) - if repository_metadata: - metadata = repository_metadata.metadata - if metadata: - tool_dicts = metadata.get( 'tools', [] ) - if index == 0: - # The first changset_revision is a special case because it will have no ancestor - # changeset_revisions in which to match tools. The parent tool id for tools in - # the first changeset_revision will be the "old_id" in the tool config. - for tool_dict in tool_dicts: - tool_versions_dict[ tool_dict[ 'guid' ] ] = tool_dict[ 'id' ] - else: - for tool_dict in tool_dicts: - # We have at least 2 changeset revisions to compare tool guids and tool ids. - parent_id = get_parent_id( trans, id, tool_dict[ 'id' ], tool_dict[ 'version' ], tool_dict[ 'guid' ], changeset_revisions[ 0:index ] ) - tool_versions_dict[ tool_dict[ 'guid' ] ] = parent_id - if tool_versions_dict: - repository_metadata.tool_versions = tool_versions_dict - trans.sa_session.add( repository_metadata ) - trans.sa_session.flush() -def get_parent_id( trans, id, old_id, version, guid, changeset_revisions ): - parent_id = None - # Compare from most recent to oldest. - changeset_revisions.reverse() - for changeset_revision in changeset_revisions: - repository_metadata = get_repository_metadata_by_changeset_revision( trans, id, changeset_revision ) - metadata = repository_metadata.metadata - tools_dicts = metadata.get( 'tools', [] ) - for tool_dict in tools_dicts: - if tool_dict[ 'guid' ] == guid: - # The tool has not changed between the compared changeset revisions. - continue - if tool_dict[ 'id' ] == old_id and tool_dict[ 'version' ] != version: - # The tool version is different, so we've found the parent. - return tool_dict[ 'guid' ] - if parent_id is None: - # The tool did not change through all of the changeset revisions. - return old_id -def create_or_update_repository_metadata( trans, id, repository, changeset_revision, metadata_dict ): - repository_metadata = get_repository_metadata_by_changeset_revision( trans, id, changeset_revision ) - if repository_metadata: - # Update RepositoryMetadata.metadata. - repository_metadata.metadata = metadata_dict - trans.sa_session.add( repository_metadata ) - trans.sa_session.flush() - else: - # Create a new repository_metadata table row. - repository_metadata = trans.model.RepositoryMetadata( repository.id, changeset_revision, metadata_dict ) - trans.sa_session.add( repository_metadata ) - trans.sa_session.flush() -def compare_changeset_revisions( ancestor_changeset_revision, ancestor_metadata_dict, current_changeset_revision, current_metadata_dict ): - # The metadata associated with ancestor_changeset_revision is ancestor_metadata_dict. This changeset_revision - # is an ancestor of current_changeset_revision which is associated with current_metadata_dict. - # - # TODO: a new repository_metadata record will be created only when this method returns the string - # 'not equal and not subset'. However, we're currently also returning the strings 'no metadata', - # 'equal' and 'subset', depending upon how the 2 change sets compare. We'll leave things this way - # for the current time in case we discover a use for these additional result strings. - # - # Get information about tools. - if 'tools' in ancestor_metadata_dict: - ancestor_tools = ancestor_metadata_dict[ 'tools' ] - else: - ancestor_tools = [] - if 'tools' in current_metadata_dict: - current_tools = current_metadata_dict[ 'tools' ] - else: - current_tools = [] - ancestor_guids = [] - for tool_dict in ancestor_tools: - ancestor_guids.append( tool_dict[ 'guid' ] ) - ancestor_guids.sort() - current_guids = [] - for tool_dict in current_tools: - current_guids.append( tool_dict[ 'guid' ] ) - current_guids.sort() - # Get information about workflows. - if 'workflows' in ancestor_metadata_dict: - ancestor_workflows = ancestor_metadata_dict[ 'workflows' ] - else: - ancestor_workflows = [] - if 'workflows' in current_metadata_dict: - current_workflows = current_metadata_dict[ 'workflows' ] - else: - current_workflows = [] - # Get information about datatypes. - if 'datatypes' in ancestor_metadata_dict: - ancestor_datatypes = ancestor_metadata_dict[ 'datatypes' ] - else: - ancestor_datatypes = [] - if 'datatypes' in current_metadata_dict: - current_datatypes = current_metadata_dict[ 'datatypes' ] - else: - current_datatypes = [] - # Handle case where no metadata exists for either changeset. - if not ancestor_guids and not current_guids and not ancestor_workflows and not current_workflows and not ancestor_datatypes and not current_datatypes: - return 'no metadata' - workflow_comparison = compare_workflows( ancestor_workflows, current_workflows ) - datatype_comparison = compare_datatypes( ancestor_datatypes, current_datatypes ) - # Handle case where all metadata is the same. - if ancestor_guids == current_guids and workflow_comparison == 'equal' and datatype_comparison == 'equal': - return 'equal' - if workflow_comparison == 'subset' and datatype_comparison == 'subset': - is_subset = True - for guid in ancestor_guids: - if guid not in current_guids: - is_subset = False - break - if is_subset: - return 'subset' - return 'not equal and not subset' -def compare_workflows( ancestor_workflows, current_workflows ): - # Determine if ancestor_workflows is the same as current_workflows - # or if ancestor_workflows is a subset of current_workflows. - if len( ancestor_workflows ) <= len( current_workflows ): - for ancestor_workflow_tup in ancestor_workflows: - # ancestor_workflows is a list of tuples where each contained tuple is - # [ <relative path to the .ga file in the repository>, <exported workflow dict> ] - ancestor_workflow_dict = ancestor_workflow_tup[1] - # Currently the only way to differentiate workflows is by name. - ancestor_workflow_name = ancestor_workflow_dict[ 'name' ] - num_ancestor_workflow_steps = len( ancestor_workflow_dict[ 'steps' ] ) - found_in_current = False - for current_workflow_tup in current_workflows: - current_workflow_dict = current_workflow_tup[1] - # Assume that if the name and number of steps are euqal, - # then the workflows are the same. Of course, this may - # not be true... - if current_workflow_dict[ 'name' ] == ancestor_workflow_name and len( current_workflow_dict[ 'steps' ] ) == num_ancestor_workflow_steps: - found_in_current = True - break - if not found_in_current: - return 'not equal and not subset' - if len( ancestor_workflows ) == len( current_workflows ): - return 'equal' - else: - return 'subset' - return 'not equal and not subset' -def compare_datatypes( ancestor_datatypes, current_datatypes ): - # Determine if ancestor_datatypes is the same as current_datatypes - # or if ancestor_datatypes is a subset of current_datatypes. Each - # datatype dict looks something like: - # {"dtype": "galaxy.datatypes.images:Image", "extension": "pdf", "mimetype": "application/pdf"} - if len( ancestor_datatypes ) <= len( current_datatypes ): - for ancestor_datatype in ancestor_datatypes: - # Currently the only way to differentiate datatypes is by name. - ancestor_datatype_dtype = ancestor_datatype[ 'dtype' ] - ancestor_datatype_extension = ancestor_datatype[ 'extension' ] - ancestor_datatype_mimetype = ancestor_datatype.get( 'mimetype', None ) - found_in_current = False - for current_datatype in current_datatypes: - if current_datatype[ 'dtype' ] == ancestor_datatype_dtype and \ - current_datatype[ 'extension' ] == ancestor_datatype_extension and \ - current_datatype.get( 'mimetype', None ) == ancestor_datatype_mimetype: - found_in_current = True - break - if not found_in_current: - return 'not equal and not subset' - if len( ancestor_datatypes ) == len( current_datatypes ): - return 'equal' - else: - return 'subset' - return 'not equal and not subset' -def get_repository_by_name( trans, name ): - """Get a repository from the database via name""" - return trans.sa_session.query( trans.model.Repository ).filter_by( name=name ).one() -def get_changectx_for_changeset( repo, changeset_revision, **kwd ): - """Retrieve a specified changectx from a repository""" + if name.endswith( '.sample' ) and name not in sample_files: + new_name = name.replace( '.sample', '' ) + file_path = os.path.join( dir, new_name ) + shutil.copy( os.path.abspath( os.path.join( root, name ) ), file_path ) + sample_files.append( name ) + # Next look in the repository manifest. for changeset in repo.changelog: ctx = repo.changectx( changeset ) - if str( ctx ) == changeset_revision: - return ctx - return None -def change_set_is_malicious( trans, id, changeset_revision, **kwd ): - """Check the malicious flag in repository metadata for a specified change set""" - repository_metadata = get_repository_metadata_by_changeset_revision( trans, id, changeset_revision ) - if repository_metadata: - return repository_metadata.malicious - return False + for ctx_file in ctx.files(): + ctx_file_name = strip_path( ctx_file ) + if ctx_file_name.endswith( '.sample' ) and ctx_file_name not in sample_files: + new_ctx_file_name = ctx_file_name.replace( '.sample', '' ) + copy_file_from_manifest( repo, ctx, ctx_file, dir ) + sample_files.append( ctx_file_name ) + return sample_files def get_user( trans, id ): """Get a user from the database by id""" return trans.sa_session.query( trans.model.User ).get( trans.security.decode_id( id ) ) @@ -880,17 +713,349 @@ util.send_mail( frm, to, subject, body, trans.app.config ) except Exception, e: log.exception( "An error occurred sending a tool shed repository update alert by email." ) -def check_file_contents( trans ): - # See if any admin users have chosen to receive email alerts when a repository is updated. - # If so, the file contents of the update must be checked for inappropriate content. - admin_users = trans.app.config.get( "admin_users", "" ).split( "," ) - for repository in trans.sa_session.query( trans.model.Repository ) \ - .filter( trans.model.Repository.table.c.email_alerts != None ): - email_alerts = from_json_string( repository.email_alerts ) - for user_email in email_alerts: - if user_email in admin_users: - return True +def load_tool( trans, config_file ): + """Load a single tool from the file named by `config_file` and return an instance of `Tool`.""" + # Parse XML configuration file and get the root element + tree = util.parse_xml( config_file ) + root = tree.getroot() + if root.tag == 'tool': + # Allow specifying a different tool subclass to instantiate + if root.find( "type" ) is not None: + type_elem = root.find( "type" ) + module = type_elem.get( 'module', 'galaxy.tools' ) + cls = type_elem.get( 'class' ) + mod = __import__( module, globals(), locals(), [cls] ) + ToolClass = getattr( mod, cls ) + elif root.get( 'tool_type', None ) is not None: + ToolClass = tool_types.get( root.get( 'tool_type' ) ) + else: + ToolClass = Tool + return ToolClass( config_file, root, trans.app ) + return None +def load_tool_from_changeset_revision( trans, repository_id, changeset_revision, tool_config ): + tool_config = strip_path( tool_config ) + repository = get_repository( trans, repository_id ) + repo_files_dir = repository.repo_path + repo = hg.repository( get_configured_ui(), repo_files_dir ) + tool = None + message = '' + work_dir = make_tmp_directory() + if changeset_revision == repository.tip: + try: + copied_tool_config = copy_file_from_disk( tool_config, repo_files_dir, work_dir ) + tool = load_tool( trans, copied_tool_config ) + except Exception, e: + tool = None + message = "Error loading tool: %s." % str( e ) + else: + # Get the tool config file name from the hgweb url, something like: /repos/test/convert_chars1/file/e58dcf0026c7/convert_characters.xml + old_tool_config_file_name = tool_config.split( '/' )[ -1 ] + ctx = get_changectx_for_changeset( repo, changeset_revision ) + in_ctx = False + for ctx_file in ctx.files(): + ctx_file_name = strip_path( ctx_file ) + if ctx_file_name == old_tool_config_file_name: + in_ctx = True + break + if in_ctx: + tmp_tool_config = get_named_tmpfile_from_ctx( ctx, ctx_file, dir=work_dir ) + element_tree = util.parse_xml( tmp_config ) + element_tree_root = element_tree.getroot() + # Look for code files required by the tool config. + tmp_code_files = [] + for code_elem in element_tree_root.findall( 'code' ): + code_file_name = code_elem.get( 'file' ) + tmp_code_file_name = copy_file_from_manifest( repo, ctx, code_file_name, work_dir ) + if tmp_code_file_name: + tmp_code_files.append( tmp_code_file_name ) + try: + tool = load_tool( trans, tmp_tool_config ) + except Exception, e: + tool = None + message = "Error loading tool: %s. Clicking <b>Reset metadata</b> may correct this error." % str( e ) + for tmp_code_file in tmp_code_files: + try: + os.unlink( tmp_code_file ) + except: + pass + try: + os.unlink( tmp_tool_config ) + except: + pass + else: + tool = None + try: + shutil.rmtree( work_dir ) + except: + pass + return tool, message +def load_tool_from_tmp_directory( trans, repo, repo_dir, ctx, filename, dir ): + tool = None + valid = False + tmp_config = get_named_tmpfile_from_ctx( ctx, filename, dir=dir ) + if not ( check_binary( tmp_config ) or check_image( tmp_config ) or check_gzip( tmp_config )[ 0 ] + or check_bz2( tmp_config )[ 0 ] or check_zip( tmp_config ) ): + try: + # Make sure we're looking at a tool config and not a display application config or something else. + element_tree = util.parse_xml( tmp_config ) + element_tree_root = element_tree.getroot() + is_tool = element_tree_root.tag == 'tool' + except Exception, e: + log.debug( "Error parsing %s, exception: %s" % ( tmp_config, str( e ) ) ) + is_tool = False + if is_tool: + # Look for code files required by the tool config. The directory to which dir refers should be removed by the caller. + for code_elem in element_tree_root.findall( 'code' ): + code_file_name = code_elem.get( 'file' ) + if not os.path.exists( os.path.join( dir, code_file_name ) ): + tmp_code_file_name = copy_file_from_disk( code_file_name, repo_dir, dir ) + if tmp_code_file_name is None: + tmp_code_file_name = copy_file_from_manifest( repo, ctx, code_file_name, dir ) + try: + tool = load_tool( trans, tmp_config ) + valid = True + except: + valid = False + return valid, tool +def new_tool_metadata_required( trans, id, metadata_dict ): + """ + Compare the last saved metadata for each tool in the repository with the new metadata + in metadata_dict to determine if a new repository_metadata table record is required, or + if the last saved metadata record can updated instead. + """ + if 'tools' in metadata_dict: + repository_metadata = get_latest_repository_metadata( trans, id ) + if repository_metadata: + metadata = repository_metadata.metadata + if metadata and 'tools' in metadata: + saved_tool_ids = [] + # The metadata for one or more tools was successfully generated in the past + # for this repository, so we first compare the version string for each tool id + # in metadata_dict with what was previously saved to see if we need to create + # a new table record or if we can simply update the existing record. + for new_tool_metadata_dict in metadata_dict[ 'tools' ]: + for saved_tool_metadata_dict in metadata[ 'tools' ]: + if saved_tool_metadata_dict[ 'id' ] not in saved_tool_ids: + saved_tool_ids.append( saved_tool_metadata_dict[ 'id' ] ) + if new_tool_metadata_dict[ 'id' ] == saved_tool_metadata_dict[ 'id' ]: + if new_tool_metadata_dict[ 'version' ] != saved_tool_metadata_dict[ 'version' ]: + return True + # So far, a new metadata record is not required, but we still have to check to see if + # any new tool ids exist in metadata_dict that are not in the saved metadata. We do + # this because if a new tarball was uploaded to a repository that included tools, it + # may have removed existing tool files if they were not included in the uploaded tarball. + for new_tool_metadata_dict in metadata_dict[ 'tools' ]: + if new_tool_metadata_dict[ 'id' ] not in saved_tool_ids: + return True + else: + # We have repository metadata that does not include metadata for any tools in the + # repository, so we can update the existing repository metadata. + return False + else: + # There is no saved repository metadata, so we need to create a new repository_metadata + # table record. + return True + # The received metadata_dict includes no metadata for tools, so a new repository_metadata table + # record is not needed. return False +def new_workflow_metadata_required( trans, id, metadata_dict ): + """ + Currently everything about an exported workflow except the name is hard-coded, so there's + no real way to differentiate versions of exported workflows. If this changes at some future + time, this method should be enhanced accordingly. + """ + if 'workflows' in metadata_dict: + repository_metadata = get_latest_repository_metadata( trans, id ) + if repository_metadata: + if repository_metadata.metadata: + # The repository has metadata, so update the workflows value - no new record is needed. + return False + else: + # There is no saved repository metadata, so we need to create a new repository_metadata table record. + return True + # The received metadata_dict includes no metadata for workflows, so a new repository_metadata table record is not needed. + return False +def reset_all_metadata_on_repository( trans, id, **kwd ): + params = util.Params( kwd ) + message = util.restore_text( params.get( 'message', '' ) ) + status = params.get( 'status', 'done' ) + repository = get_repository( trans, id ) + log.debug( "Resetting all metadata on repository: %s" % repository.name ) + repo_dir = repository.repo_path + repo = hg.repository( get_configured_ui(), repo_dir ) + if len( repo ) == 1: + error_message, status = set_repository_metadata( trans, id, repository.tip, **kwd ) + if error_message: + return error_message, status + else: + add_repository_metadata_tool_versions( trans, id, [ repository.tip ] ) + else: + # The list of changeset_revisions refers to repository_metadata records that have been created or updated. When the following loop + # completes, we'll delete all repository_metadata records for this repository that do not have a changeset_revision value in this list. + changeset_revisions = [] + ancestor_changeset_revision = None + ancestor_metadata_dict = None + for changeset in repo.changelog: + current_changeset_revision = str( repo.changectx( changeset ) ) + ctx = get_changectx_for_changeset( repo, current_changeset_revision ) + current_metadata_dict, invalid_files = generate_metadata_for_changeset_revision( trans, + repo, + id, + ctx, + current_changeset_revision, + repo_dir, + updating_tip=current_changeset_revision==repository.tip ) + if current_metadata_dict: + if ancestor_changeset_revision: + # Compare metadata from ancestor and current. The value of comparsion will be one of: + # 'no metadata' - no metadata for either ancestor or current, so continue from current + # 'equal' - ancestor metadata is equivalent to current metadata, so continue from current + # 'subset' - ancestor metadata is a subset of current metadata, so continue from current + # 'not equal and not subset' - ancestor metadata is neither equal to nor a subset of current + # metadata, so persist ancestor metadata. + comparison = compare_changeset_revisions( ancestor_changeset_revision, + ancestor_metadata_dict, + current_changeset_revision, + current_metadata_dict ) + if comparison in [ 'no metadata', 'equal', 'subset' ]: + ancestor_changeset_revision = current_changeset_revision + ancestor_metadata_dict = current_metadata_dict + elif comparison == 'not equal and not subset': + create_or_update_repository_metadata( trans, id, repository, ancestor_changeset_revision, ancestor_metadata_dict ) + # Keep track of the changeset_revisions that we've persisted. + changeset_revisions.append( ancestor_changeset_revision ) + ancestor_changeset_revision = current_changeset_revision + ancestor_metadata_dict = current_metadata_dict + else: + # We're either at the first change set in the change log or we have just created or updated + # a repository_metadata record. At this point we set the ancestor changeset to the current + # changeset for comparison in the next iteration. + ancestor_changeset_revision = current_changeset_revision + ancestor_metadata_dict = current_metadata_dict + if not ctx.children(): + # We're at the end of the change log. + create_or_update_repository_metadata( trans, id, repository, current_changeset_revision, current_metadata_dict ) + changeset_revisions.append( current_changeset_revision ) + ancestor_changeset_revision = None + ancestor_metadata_dict = None + elif ancestor_metadata_dict: + # Our current change set has no metadata, but our ancestor change set has metadata, so save it. + create_or_update_repository_metadata( trans, id, repository, ancestor_changeset_revision, ancestor_metadata_dict ) + # Keep track of the changeset_revisions that we've persisted. + changeset_revisions.append( ancestor_changeset_revision ) + ancestor_changeset_revision = None + ancestor_metadata_dict = None + clean_repository_metadata( trans, id, changeset_revisions ) + add_repository_metadata_tool_versions( trans, id, changeset_revisions ) + return '', 'ok' +def set_repository_metadata( trans, id, changeset_revision, content_alert_str='', **kwd ): + """ + Set repository metadata on the repository tip, returning specific error messages (if any) to alert the repository owner that the changeset + has problems. + """ + message = '' + status = 'done' + repository = get_repository( trans, id ) + repo_dir = repository.repo_path + repo = hg.repository( get_configured_ui(), repo_dir ) + ctx = get_changectx_for_changeset( repo, changeset_revision ) + metadata_dict = {} + invalid_files = [] + updating_tip = changeset_revision == repository.tip + if ctx is not None: + metadata_dict, invalid_files = generate_metadata_for_changeset_revision( trans, + repo, + id, + ctx, + changeset_revision, + repo_dir, + updating_tip=updating_tip ) + if metadata_dict: + if updating_tip: + if new_tool_metadata_required( trans, id, metadata_dict ) or new_workflow_metadata_required( trans, id, metadata_dict ): + # Create a new repository_metadata table row. + repository_metadata = trans.model.RepositoryMetadata( repository.id, changeset_revision, metadata_dict ) + trans.sa_session.add( repository_metadata ) + try: + trans.sa_session.flush() + # If this is the first record stored for this repository, see if we need to send any email alerts. + if len( repository.downloadable_revisions ) == 1: + handle_email_alerts( trans, repository, content_alert_str='', new_repo_alert=True, admin_only=False ) + except TypeError, e: + message = "Unable to save metadata for this repository probably due to a tool config file that doesn't conform to the Cheetah template syntax." + status = 'error' + else: + repository_metadata = get_latest_repository_metadata( trans, id ) + if repository_metadata: + # Update the last saved repository_metadata table row. + repository_metadata.changeset_revision = changeset_revision + repository_metadata.metadata = metadata_dict + trans.sa_session.add( repository_metadata ) + try: + trans.sa_session.flush() + except TypeError, e: + message = "Unable to save metadata for this repository probably due to a tool config file that doesn't conform to the Cheetah template syntax." + status = 'error' + else: + # There are no tools in the repository, and we're setting metadata on the repository tip. + repository_metadata = trans.model.RepositoryMetadata( repository.id, changeset_revision, metadata_dict ) + trans.sa_session.add( repository_metadata ) + trans.sa_session.flush() + else: + # We're re-generating metadata for an old repository revision. + repository_metadata = get_repository_metadata_by_changeset_revision( trans, id, changeset_revision ) + repository_metadata.metadata = metadata_dict + trans.sa_session.add( repository_metadata ) + trans.sa_session.flush() + elif not invalid_files: + message = "Revision '%s' includes no tools, datatypes or exported workflows for which metadata can " % str( changeset_revision ) + message += "be defined so this revision cannot be automatically installed into a local Galaxy instance." + status = "error" + else: + # Here ctx is None. + message = "This repository does not include revision '%s'." % str( changeset_revision ) + status = 'error' + if invalid_files: + if metadata_dict: + message = "Metadata was defined for some items in revision '%s'. " % str( changeset_revision ) + message += "Correct the following problems if necessary and reset metadata.<br/>" + else: + message = "Metadata cannot be defined for revision '%s' so this revision cannot be automatically " % str( changeset_revision ) + message += "installed into a local Galaxy instance. Correct the following problems and reset metadata.<br/>" + for itc_tup in invalid_files: + tool_file, exception_msg = itc_tup + if exception_msg.find( 'No such file or directory' ) >= 0: + exception_items = exception_msg.split() + missing_file_items = exception_items[ 7 ].split( '/' ) + missing_file = missing_file_items[ -1 ].rstrip( '\'' ) + if missing_file.endswith( '.loc' ): + sample_ext = '%s.sample' % missing_file + else: + sample_ext = missing_file + correction_msg = "This file refers to a missing file <b>%s</b>. " % str( missing_file ) + correction_msg += "Upload a file named <b>%s</b> to the repository to correct this error." % sample_ext + else: + correction_msg = exception_msg + message += "<b>%s</b> - %s<br/>" % ( tool_file, correction_msg ) + status = 'error' + return message, status +def set_repository_metadata_due_to_new_tip( trans, id, repository, content_alert_str=None, **kwd ): + message = util.restore_text( kwd.get( 'message', '' ) ) + # Set metadata on the repository tip. + error_message, status = set_repository_metadata( trans, id, repository.tip, content_alert_str=content_alert_str, **kwd ) + if not error_message: + # If no error occurred in setting metadata on the repository tip, reset metadata on all changeset revisions for the repository. + # This will result in a more standardized set of valid repository revisions that can be installed. + error_message, status = reset_all_metadata_on_repository( trans, id, **kwd ) + if error_message: + # If there is an error, display it. + message += '%s<br/>%s ' % ( message, error_message ) + status = 'error' + return trans.response.send_redirect( web.url_for( controller='repository', + action='manage_repository', + id=id, + message=message, + status=status ) ) def update_for_browsing( trans, repository, current_working_dir, commit_message='' ): # This method id deprecated, but we'll keep it around for a while in case we need it. The problem is that hg purge # is not supported by the mercurial API. @@ -955,124 +1120,3 @@ if return_code != 0: output = proc.stdout.read( 32768 ) log.debug( 'hg update > /dev/null 2>&1 failed in repository directory %s, reason: %s' % ( repo_dir, output ) ) -def load_tool( trans, config_file ): - """ - Load a single tool from the file named by `config_file` and return - an instance of `Tool`. - """ - # Parse XML configuration file and get the root element - tree = util.parse_xml( config_file ) - root = tree.getroot() - if root.tag == 'tool': - # Allow specifying a different tool subclass to instantiate - if root.find( "type" ) is not None: - type_elem = root.find( "type" ) - module = type_elem.get( 'module', 'galaxy.tools' ) - cls = type_elem.get( 'class' ) - mod = __import__( module, globals(), locals(), [cls]) - ToolClass = getattr( mod, cls ) - elif root.get( 'tool_type', None ) is not None: - ToolClass = tool_types.get( root.get( 'tool_type' ) ) - else: - ToolClass = Tool - return ToolClass( config_file, root, trans.app ) - return None -def load_tool_from_changeset_revision( trans, repository_id, changeset_revision, tool_config ): - repository = get_repository( trans, repository_id ) - repo = hg.repository( get_configured_ui(), repository.repo_path ) - tool = None - message = '' - if changeset_revision == repository.tip: - try: - tool = load_tool( trans, os.path.abspath( tool_config ) ) - except Exception, e: - tool = None - message = "Error loading tool: %s. Clicking <b>Reset metadata</b> may correct this error." % str( e ) - else: - # Get the tool config file name from the hgweb url, something like: - # /repos/test/convert_chars1/file/e58dcf0026c7/convert_characters.xml - old_tool_config_file_name = tool_config.split( '/' )[ -1 ] - ctx = get_changectx_for_changeset( repo, changeset_revision ) - fctx = None - for filename in ctx: - filename_head, filename_tail = os.path.split( filename ) - if filename_tail == old_tool_config_file_name: - fctx = ctx[ filename ] - break - if fctx: - # Write the contents of the old tool config to a temporary file. - fh = tempfile.NamedTemporaryFile( 'w' ) - tmp_filename = fh.name - fh.close() - fh = open( tmp_filename, 'w' ) - fh.write( fctx.data() ) - fh.close() - try: - tool = load_tool( trans, tmp_filename ) - except Exception, e: - tool = None - message = "Error loading tool: %s. Clicking <b>Reset metadata</b> may correct this error." % str( e ) - try: - os.unlink( tmp_filename ) - except: - pass - else: - tool = None - return tool, message -def build_changeset_revision_select_field( trans, repository, selected_value=None, add_id_to_name=True ): - """ - Build a SelectField whose options are the changeset_revision - strings of all downloadable_revisions of the received repository. - """ - repo = hg.repository( get_configured_ui(), repository.repo_path ) - options = [] - changeset_tups = [] - refresh_on_change_values = [] - for repository_metadata in repository.downloadable_revisions: - changeset_revision = repository_metadata.changeset_revision - ctx = get_changectx_for_changeset( repo, changeset_revision ) - if ctx: - rev = '%04d' % ctx.rev() - label = "%s:%s" % ( str( ctx.rev() ), changeset_revision ) - else: - rev = '-1' - label = "-1:%s" % changeset_revision - changeset_tups.append( ( rev, label, changeset_revision ) ) - refresh_on_change_values.append( changeset_revision ) - # Sort options by the revision label. Even though the downloadable_revisions query sorts by update_time, - # the changeset revisions may not be sorted correctly because setting metadata over time will reset update_time. - for changeset_tup in sorted( changeset_tups ): - # Display the latest revision first. - options.insert( 0, ( changeset_tup[1], changeset_tup[2] ) ) - if add_id_to_name: - name = 'changeset_revision_%d' % repository.id - else: - name = 'changeset_revision' - select_field = SelectField( name=name, - refresh_on_change=True, - refresh_on_change_values=refresh_on_change_values ) - for option_tup in options: - selected = selected_value and option_tup[1] == selected_value - select_field.add_option( option_tup[0], option_tup[1], selected=selected ) - return select_field -def encode( val ): - if isinstance( val, dict ): - value = simplejson.dumps( val ) - else: - value = val - a = hmac_new( 'ToolShedAndGalaxyMustHaveThisSameKey', value ) - b = binascii.hexlify( value ) - return "%s:%s" % ( a, b ) -def decode( value ): - # Extract and verify hash - a, b = value.split( ":" ) - value = binascii.unhexlify( b ) - test = hmac_new( 'ToolShedAndGalaxyMustHaveThisSameKey', value ) - assert a == test - # Restore from string - try: - values = json_fix( simplejson.loads( value ) ) - except Exception, e: - # We do not have a json string - values = value - return values diff -r 688b3b621cd48cefefaf73081505bbe4749a217d -r 72c4f0007f4ed1fba919786bc8a2e017ecf19516 lib/galaxy/webapps/community/controllers/hg.py --- a/lib/galaxy/webapps/community/controllers/hg.py +++ b/lib/galaxy/webapps/community/controllers/hg.py @@ -26,7 +26,9 @@ owner, name = path_info.split( '/' ) repository = get_repository_by_name_and_owner( trans, name, owner ) if repository: - reset_all_repository_metadata( trans, trans.security.encode_id( repository.id ) ) + error_message, status = reset_all_metadata_on_repository( trans, trans.security.encode_id( repository.id ) ) + if error_message: + log.debug( "Error resetting all metadata on repository '%s': %s" % ( str( repository.name ), str( error_message ) ) ) return wsgi_app def make_web_app(): diff -r 688b3b621cd48cefefaf73081505bbe4749a217d -r 72c4f0007f4ed1fba919786bc8a2e017ecf19516 lib/galaxy/webapps/community/controllers/repository.py --- a/lib/galaxy/webapps/community/controllers/repository.py +++ b/lib/galaxy/webapps/community/controllers/repository.py @@ -10,7 +10,7 @@ from galaxy.web.framework.helpers import time_ago, iff, grids from galaxy.util.json import from_json_string, to_json_string from galaxy.model.orm import * -from galaxy.util.shed_util import get_configured_ui +from galaxy.util.shed_util import get_changectx_for_changeset, get_configured_ui, make_tmp_directory, NOT_TOOL_CONFIGS, strip_path from common import * from galaxy import eggs @@ -21,6 +21,7 @@ MAX_CONTENT_SIZE = 32768 VALID_REPOSITORYNAME_RE = re.compile( "^[a-z0-9\_]+$" ) +README_FILES = [ 'readme', 'read_me', 'install' ] class CategoryListGrid( grids.Grid ): class NameColumn( grids.TextColumn ): @@ -805,19 +806,36 @@ repository = get_repository( trans, repository_id ) changeset_revision = util.restore_text( params.get( 'changeset_revision', repository.tip ) ) repository_metadata = get_repository_metadata_by_changeset_revision( trans, repository_id, changeset_revision ) - # Tell the caller if the repository includes Galaxy tools so the page - # enabling selection of the tool panel section can be displayed. - includes_tools = 'tools' in repository_metadata.metadata + metadata = repository_metadata.metadata + # Tell the caller if the repository includes Galaxy tools so the page enabling selection of the tool panel section can be displayed. + includes_tools = 'tools' in metadata + includes_tool_dependencies = 'tool_dependencies' in metadata # Get the changelog rev for this changeset_revision. repo_dir = repository.repo_path repo = hg.repository( get_configured_ui(), repo_dir ) ctx = get_changectx_for_changeset( repo, changeset_revision ) repo_info_dict = {} - repo_info_dict[ repository.name ] = ( repository.description, repository_clone_url, changeset_revision, str( ctx.rev() ) ) + if includes_tool_dependencies: + repo_info_dict[ repository.name ] = ( repository.description, + repository_clone_url, + changeset_revision, + str( ctx.rev() ), + repository.user.username, + metadata[ 'tool_dependencies' ] ) + else: + repo_info_dict[ repository.name ] = ( repository.description, + repository_clone_url, + changeset_revision, + str( ctx.rev() ) ) encoded_repo_info_dict = encode( repo_info_dict ) - # Redirect back to local Galaxy to perform install. - url = '%sadmin_toolshed/install_repository?tool_shed_url=%s&repo_info_dict=%s&includes_tools=%s' % \ - ( galaxy_url, url_for( '/', qualified=True ), encoded_repo_info_dict, str( includes_tools ) ) + if includes_tool_dependencies: + # Redirect back to local Galaxy to present the option to install tool dependencies. + url = '%sadmin_toolshed/install_tool_dependencies?tool_shed_url=%s&repo_info_dict=%s&includes_tools=%s' % \ + ( galaxy_url, url_for( '/', qualified=True ), encoded_repo_info_dict, str( includes_tools ) ) + else: + # Redirect back to local Galaxy to perform install. + url = '%sadmin_toolshed/install_repository?tool_shed_url=%s&repo_info_dict=%s&includes_tools=%s' % \ + ( galaxy_url, url_for( '/', qualified=True ), encoded_repo_info_dict, str( includes_tools ) ) return trans.response.send_redirect( url ) @web.expose def get_ctx_rev( self, trans, **kwd ): @@ -838,11 +856,15 @@ repository_name = kwd[ 'name' ] repository_owner = kwd[ 'owner' ] changeset_revision = kwd[ 'changeset_revision' ] + valid_filenames = [ r for r in README_FILES ] + for r in README_FILES: + valid_filenames.append( '%s.txt' % r ) + valid_filenames.append( '%s.txt' % repository_name ) repository = get_repository_by_name_and_owner( trans, repository_name, repository_owner ) repo_dir = repository.repo_path for root, dirs, files in os.walk( repo_dir ): for name in files: - if name.lower() in [ 'readme', 'readme.txt', 'read_me', 'read_me.txt', '%s.txt' % repository_name ]: + if name.lower() in valid_filenames: f = open( os.path.join( root, name ), 'r' ) text = f.read() f.close() @@ -851,9 +873,8 @@ @web.expose def get_tool_versions( self, trans, **kwd ): """ - For each valid /downloadable change set (up to the received changeset_revision) in the - repository's change log, append the change set's tool_versions dictionary to the list - that will be returned. + For each valid /downloadable change set (up to the received changeset_revision) in the repository's change log, append the change + set's tool_versions dictionary to the list that will be returned. """ name = kwd[ 'name' ] owner = kwd[ 'owner' ] @@ -909,59 +930,33 @@ trans.security.encode_id( repository.id ), changeset_revision ) if repository_metadata: - # If changeset_revision is in the repository_metadata table for this - # repository, then we know there are no additional updates for the tools. + # If changeset_revision is in the repository_metadata table for this repository, then we know there are no additional updates + # for the tools. if from_update_manager: return no_update else: # Return the same value for changeset_revision and latest_changeset_revision. url += changeset_revision else: - # The changeset_revision column in the repository_metadata table has been - # updated with a new changeset_revision value since the repository was cloned. - # Load each tool in the repository's changeset_revision to generate a list of - # tool guids, since guids differentiate tools by id and version. + # TODO: Re-engineer this to define the change set for update to be the one just before the next change set in the repository_metadata + # table for this repository. + # The changeset_revision column in the repository_metadata table has been updated with a new changeset_revision value since the + # repository was cloned. Load each tool in the repository's changeset_revision to generate a list of tool guids, since guids + # differentiate tools by id and version. ctx = get_changectx_for_changeset( repo, changeset_revision ) - if ctx is not None: + if ctx is not None: + work_dir = make_tmp_directory() tool_guids = [] for filename in ctx: # Find all tool configs in this repository changeset_revision. - if filename != 'datatypes_conf.xml' and filename.endswith( '.xml' ): - fctx = ctx[ filename ] - # Write the contents of the old tool config to a temporary file. - fh = tempfile.NamedTemporaryFile( 'w' ) - tmp_filename = fh.name - fh.close() - fh = open( tmp_filename, 'w' ) - fh.write( fctx.data() ) - fh.close() - if not ( check_binary( tmp_filename ) or check_image( tmp_filename ) or check_gzip( tmp_filename )[ 0 ] - or check_bz2( tmp_filename )[ 0 ] or check_zip( tmp_filename ) ): - try: - # Make sure we're looking at a tool config and not a display application config or something else. - element_tree = util.parse_xml( tmp_filename ) - element_tree_root = element_tree.getroot() - is_tool = element_tree_root.tag == 'tool' - except Exception, e: - log.debug( "Error parsing %s, exception: %s" % ( tmp_filename, str( e ) ) ) - is_tool = False - if is_tool: - try: - tool = load_tool( trans, tmp_filename ) - valid = True - except: - valid = False - if valid and tool is not None: - tool_guids.append( generate_tool_guid( trans, repository, tool ) ) - try: - os.unlink( tmp_filename ) - except: - pass + if filename not in NOT_TOOL_CONFIGS and filename.endswith( '.xml' ): + valid, tool = load_tool_from_tmp_directory( trans, repo, repo_dir, ctx, filename, work_dir ) + if valid and tool is not None: + tool_guids.append( generate_tool_guid( trans, repository, tool ) ) tool_guids.sort() if tool_guids: - # Compare our list of tool guids against those in each repository_metadata record - # for the repository to find the repository_metadata record with the changeset_revision - # value we want to pass back to the caller. + # Compare our list of tool guids against those in each repository_metadata record for the repository to find the + # repository_metadata record with the changeset_revision value we want to pass back to the caller. found = False for repository_metadata in get_repository_metadata_by_repository_id( trans, trans.security.encode_id( repository.id ) ): metadata = repository_metadata.metadata @@ -989,6 +984,10 @@ if from_update_manager: return no_update url += changeset_revision + try: + shutil.rmtree( work_dir ) + except: + pass url += '&latest_ctx_rev=%s' % str( latest_ctx.rev() ) return trans.response.send_redirect( url ) @web.expose @@ -1241,7 +1240,7 @@ current_working_dir = os.getcwd() # Update repository files for browsing. update_repository( repo ) - is_malicious = change_set_is_malicious( trans, id, repository.tip ) + is_malicious = changeset_is_malicious( trans, id, repository.tip ) return trans.fill_template( '/webapps/community/repository/browse_repository.mako', repo=repo, repository=repository, @@ -1322,9 +1321,7 @@ try: commands.remove( repo.ui, repo, selected_file, force=True ) except Exception, e: - # I never have a problem with commands.remove on a Mac, but in the test/production - # tool shed environment, it throws an exception whenever I delete all files from a - # repository. If this happens, we'll try the following. + log.debug( "Error removing files using the mercurial API, so trying a different approach, the error was: %s" % str( e )) relative_selected_file = selected_file.split( 'repo_%d' % repository.id )[1].lstrip( '/' ) repo.dirstate.remove( relative_selected_file ) repo.dirstate.write() @@ -1346,42 +1343,24 @@ # Commit the change set. if not commit_message: commit_message = 'Deleted selected files' - try: - commands.commit( repo.ui, repo, repo_dir, user=trans.user.username, message=commit_message ) - except Exception, e: - # I never have a problem with commands.commit on a Mac, but in the test/production - # tool shed environment, it occasionally throws a "TypeError: array item must be char" - # exception. If this happens, we'll try the following. - repo.dirstate.write() - repo.commit( user=trans.user.username, text=commit_message ) + commands.commit( repo.ui, repo, repo_dir, user=trans.user.username, message=commit_message ) handle_email_alerts( trans, repository ) # Update the repository files for browsing. update_repository( repo ) # Get the new repository tip. repo = hg.repository( get_configured_ui(), repo_dir ) - if tip != repository.tip: - message = "The selected files were deleted from the repository." + if tip == repository.tip: + message += 'No changes to repository. ' + kwd[ 'message' ] = message + else: - message = 'No changes to repository.' - # Set metadata on the repository tip. - error_message, status = set_repository_metadata( trans, id, repository.tip, **kwd ) - if error_message: - # If there is an error, display it. - message = '%s<br/>%s' % ( message, error_message ) - return trans.response.send_redirect( web.url_for( controller='repository', - action='manage_repository', - id=id, - message=message, - status=status ) ) - else: - # If no error occurred in setting metadata on the repository tip, reset metadata on all - # changeset revisions for the repository. This will result in a more standardized set of - # valid repository revisions that can be installed. - reset_all_repository_metadata( trans, id, **kwd ) + message += 'The selected files were deleted from the repository. ' + kwd[ 'message' ] = message + set_repository_metadata_due_to_new_tip( trans, id, repository, **kwd ) else: message = "Select at least 1 file to delete from the repository before clicking <b>Delete selected files</b>." status = "error" - is_malicious = change_set_is_malicious( trans, id, repository.tip ) + is_malicious = changeset_is_malicious( trans, id, repository.tip ) return trans.fill_template( '/webapps/community/repository/browse_repository.mako', repo=repo, repository=repository, @@ -1436,7 +1415,7 @@ else: repository_metadata_id = None metadata = None - is_malicious = change_set_is_malicious( trans, id, repository.tip ) + is_malicious = changeset_is_malicious( trans, id, repository.tip ) if is_malicious: if trans.app.security_agent.can_push( trans.user, repository ): message += malicious_error_can_push @@ -1640,7 +1619,7 @@ 'has_metadata' : has_metadata } # Make sure we'll view latest changeset first. changesets.insert( 0, change_dict ) - is_malicious = change_set_is_malicious( trans, id, repository.tip ) + is_malicious = changeset_is_malicious( trans, id, repository.tip ) return trans.fill_template( '/webapps/community/repository/view_changelog.mako', repository=repository, changesets=changesets, @@ -1669,7 +1648,7 @@ diffs = [] for diff in patch.diff( repo, node1=ctx_parent.node(), node2=ctx.node() ): diffs.append( to_html_escaped( diff ) ) - is_malicious = change_set_is_malicious( trans, id, repository.tip ) + is_malicious = changeset_is_malicious( trans, id, repository.tip ) return trans.fill_template( '/webapps/community/repository/view_changeset.mako', repository=repository, ctx=ctx, @@ -1712,7 +1691,7 @@ avg_rating, num_ratings = self.get_ave_item_rating_data( trans.sa_session, repository, webapp_model=trans.model ) display_reviews = util.string_as_bool( params.get( 'display_reviews', False ) ) rra = self.get_user_item_rating( trans.sa_session, trans.user, repository, webapp_model=trans.model ) - is_malicious = change_set_is_malicious( trans, id, repository.tip ) + is_malicious = changeset_is_malicious( trans, id, repository.tip ) return trans.fill_template( '/webapps/community/repository/rate_repository.mako', repository=repository, avg_rating=avg_rating, @@ -1845,9 +1824,12 @@ status=status ) ) @web.expose def reset_all_metadata( self, trans, id, **kwd ): - reset_all_repository_metadata( trans, id, **kwd ) - message = "All repository metadata has been reset." - status = 'done' + error_message, status = reset_all_metadata_on_repository( trans, id, **kwd ) + if error_message: + message = error_message + else: + message = "All repository metadata has been reset." + status = 'done' return trans.response.send_redirect( web.url_for( controller='repository', action='manage_repository', id=id, @@ -1862,7 +1844,7 @@ repository = get_repository( trans, repository_id ) tool, message = load_tool_from_changeset_revision( trans, repository_id, changeset_revision, tool_config ) tool_state = self.__new_state( trans ) - is_malicious = change_set_is_malicious( trans, repository_id, repository.tip ) + is_malicious = changeset_is_malicious( trans, repository_id, repository.tip ) try: return trans.fill_template( "/webapps/community/repository/tool_form.mako", repository=repository, @@ -1900,43 +1882,34 @@ repo = hg.repository( get_configured_ui(), repo_dir ) ctx = get_changectx_for_changeset( repo, changeset_revision ) invalid_message = '' - if changeset_revision == repository.tip: - for root, dirs, files in os.walk( repo_dir ): - found = False - for name in files: - if name == tool_config: - tool_config_path = os.path.join( root, name ) - found = True - break - if found: - break - metadata_dict, invalid_files = generate_metadata_for_repository_tip( trans, repository_id, ctx, changeset_revision, repo, repo_dir ) - else: - for filename in ctx: - if filename == tool_config: - fctx = ctx[ filename ] - # Write the contents of datatypes_config.xml to a temporary file. - fh = tempfile.NamedTemporaryFile( 'w' ) - tool_config_path = fh.name - fh.close() - fh = open( tool_config_path, 'w' ) - fh.write( fctx.data() ) - fh.close() - break - metadata_dict, invalid_files = generate_metadata_for_changeset_revision( trans, repository_id, ctx, changeset_revision, repo_dir ) + work_dir = make_tmp_directory() + for filename in ctx: + ctx_file_name = strip_path( filename ) + if ctx_file_name == tool_config: + tool_config_path = get_named_tmpfile_from_ctx( ctx, filename, dir=work_dir ) + break + metadata_dict, invalid_files = generate_metadata_for_changeset_revision( trans, + repo, + repository_id, + ctx, + changeset_revision, + repo_dir, + updating_tip=changeset_revision==repository.tip ) for invalid_file_tup in invalid_files: invalid_tool_config, invalid_msg = invalid_file_tup - if tool_config == invalid_tool_config: + invalid_tool_config_name = strip_path( invalid_tool_config ) + if tool_config == invalid_tool_config_name: invalid_message = invalid_msg break - tool, message = load_tool_from_changeset_revision( trans, repository_id, changeset_revision, tool_config_path ) + tool, error_message = load_tool_from_changeset_revision( trans, repository_id, changeset_revision, tool_config ) + if error_message: + message += error_message tool_state = self.__new_state( trans ) - is_malicious = change_set_is_malicious( trans, repository_id, repository.tip ) - if changeset_revision != repository.tip: - try: - os.unlink( tool_config_path ) - except: - pass + is_malicious = changeset_is_malicious( trans, repository_id, repository.tip ) + try: + shutil.rmtree( work_dir ) + except: + pass try: if invalid_message: message = invalid_message @@ -1997,7 +1970,7 @@ except: tool = None break - is_malicious = change_set_is_malicious( trans, repository_id, repository.tip ) + is_malicious = changeset_is_malicious( trans, repository_id, repository.tip ) changeset_revision_select_field = build_changeset_revision_select_field( trans, repository, selected_value=changeset_revision, diff -r 688b3b621cd48cefefaf73081505bbe4749a217d -r 72c4f0007f4ed1fba919786bc8a2e017ecf19516 lib/galaxy/webapps/community/controllers/upload.py --- a/lib/galaxy/webapps/community/controllers/upload.py +++ b/lib/galaxy/webapps/community/controllers/upload.py @@ -126,16 +126,14 @@ full_path = full_path.encode( 'ascii', 'replace' ) commands.commit( repo.ui, repo, full_path, user=trans.user.username, message=commit_message ) if full_path.endswith( 'tool_data_table_conf.xml.sample' ): - # Handle the special case where a tool_data_table_conf.xml.sample - # file is being uploaded by parsing the file and adding new entries - # to the in-memory trans.app.tool_data_tables dictionary as well as - # appending them to the shed's tool_data_table_conf.xml file on disk. + # Handle the special case where a tool_data_table_conf.xml.sample file is being uploaded by parsing the file and adding new entries + # to the in-memory trans.app.tool_data_tables dictionary. error, error_message = handle_sample_tool_data_table_conf_file( trans.app, full_path ) if error: message = '%s<br/>%s' % ( message, error_message ) - if full_path.endswith( '.loc.sample' ): - # Handle the special case where a xxx.loc.sample file is being uploaded by copying it to ~/tool-data/xxx.loc. - copy_sample_file( trans.app, full_path ) + #if full_path.endswith( '.loc.sample' ): + # # Handle the special case where a xxx.loc.sample file is being uploaded by copying it to ~/tool-data/xxx.loc. + # copy_sample_file( trans.app, full_path ) # See if the content of the change set was valid. admin_only = len( repository.downloadable_revisions ) != 1 handle_email_alerts( trans, repository, content_alert_str=content_alert_str, new_repo_alert=new_repo_alert, admin_only=admin_only ) @@ -143,37 +141,24 @@ # Update the repository files for browsing. update_repository( repo ) # Get the new repository tip. - if tip != repository.tip: + if tip == repository.tip: + message = 'No changes to repository. ' + else: if ( isgzip or isbz2 ) and uncompress_file: uncompress_str = ' uncompressed and ' else: uncompress_str = ' ' - message = "The file '%s' has been successfully%suploaded to the repository." % ( uploaded_file_filename, uncompress_str ) + message = "The file '%s' has been successfully%suploaded to the repository. " % ( uploaded_file_filename, uncompress_str ) if istar and ( undesirable_dirs_removed or undesirable_files_removed ): items_removed = undesirable_dirs_removed + undesirable_files_removed - message += " %d undesirable items (.hg .svn .git directories, .DS_Store, hgrc files, etc) were removed from the archive." % items_removed + message += " %d undesirable items (.hg .svn .git directories, .DS_Store, hgrc files, etc) were removed from the archive. " % items_removed if istar and remove_repo_files_not_in_tar and files_to_remove: if upload_point is not None: - message += " %d files were removed from the repository relative to the selected upload point '%s'." % ( len( files_to_remove ), upload_point ) + message += " %d files were removed from the repository relative to the selected upload point '%s'. " % ( len( files_to_remove ), upload_point ) else: - message += " %d files were removed from the repository root." % len( files_to_remove ) - else: - message = 'No changes to repository.' - # Set metadata on the repository tip. - error_message, status = set_repository_metadata( trans, repository_id, repository.tip, content_alert_str=content_alert_str, **kwd ) - if error_message: - # If there is an error, display it. - message = '%s<br/>%s' % ( message, error_message ) - return trans.response.send_redirect( web.url_for( controller='repository', - action='manage_repository', - id=repository_id, - message=message, - status=status ) ) - else: - # If no error occurred in setting metadata on the repository tip, reset metadata on all - # changeset revisions for the repository. This will result in a more standardized set of - # valid repository revisions that can be installed. - reset_all_repository_metadata( trans, repository_id, **kwd ) + message += " %d files were removed from the repository root. " % len( files_to_remove ) + kwd[ 'message' ] = message + set_repository_metadata_due_to_new_tip( trans, repository_id, repository, content_alert_str=content_alert_str, **kwd ) trans.response.send_redirect( web.url_for( controller='repository', action='browse_repository', id=repository_id, @@ -278,16 +263,14 @@ content_alert_str += self.__check_file_content( filename_in_archive ) commands.add( repo.ui, repo, filename_in_archive ) if filename_in_archive.endswith( 'tool_data_table_conf.xml.sample' ): - # Handle the special case where a tool_data_table_conf.xml.sample - # file is being uploaded by parsing the file and adding new entries - # to the in-memory trans.app.tool_data_tables dictionary as well as - # appending them to the shed's tool_data_table_conf.xml file on disk. + # Handle the special case where a tool_data_table_conf.xml.sample file is being uploaded by parsing the file and adding new entries + # to the in-memory trans.app.tool_data_tables dictionary. error, message = handle_sample_tool_data_table_conf_file( trans.app, filename_in_archive ) if error: return False, message, files_to_remove, content_alert_str, undesirable_dirs_removed, undesirable_files_removed - if filename_in_archive.endswith( '.loc.sample' ): - # Handle the special case where a xxx.loc.sample file is being uploaded by copying it to ~/tool-data/xxx.loc. - copy_sample_file( trans.app, filename_in_archive ) + #if filename_in_archive.endswith( '.loc.sample' ): + # # Handle the special case where a xxx.loc.sample file is being uploaded by copying it to ~/tool-data/xxx.loc. + # copy_sample_file( trans.app, filename_in_archive ) commands.commit( repo.ui, repo, full_path, user=trans.user.username, message=commit_message ) # See if the content of the change set was valid. admin_only = len( repository.downloadable_revisions ) != 1 diff -r 688b3b621cd48cefefaf73081505bbe4749a217d -r 72c4f0007f4ed1fba919786bc8a2e017ecf19516 templates/admin/tool_shed_repository/browse_repository.mako --- a/templates/admin/tool_shed_repository/browse_repository.mako +++ b/templates/admin/tool_shed_repository/browse_repository.mako @@ -2,8 +2,6 @@ <%namespace file="/message.mako" import="render_msg" /><%namespace file="/admin/tool_shed_repository/common.mako" import="*" /> -<% from galaxy.web.base.controller import tool_shed_encode, tool_shed_decode %> - <br/><br/><ul class="manage-table-actions"><li><a class="action-button" id="repository-${repository.id}-popup" class="menubutton">Repository Actions</a></li> diff -r 688b3b621cd48cefefaf73081505bbe4749a217d -r 72c4f0007f4ed1fba919786bc8a2e017ecf19516 templates/admin/tool_shed_repository/deactivate_or_uninstall_repository.mako --- a/templates/admin/tool_shed_repository/deactivate_or_uninstall_repository.mako +++ b/templates/admin/tool_shed_repository/deactivate_or_uninstall_repository.mako @@ -55,6 +55,11 @@ * The repository's tools will not be loaded into the tool panel. </div> %endif + %if repository.includes_tool_dependencies: + <div class="toolParamHelp" style="clear: both;"> + * The repository's installed tool dependencies will remain on disk. + </div> + %endif %if repository.includes_datatypes: <div class="toolParamHelp" style="clear: both;"> * The repository's datatypes, datatype converters and display applications will be eliminated from the datatypes registry. @@ -73,6 +78,14 @@ * The repository's tool tag sets will be removed from the tool config file in which they are defined. </div> %endif + %if repository.includes_tool_dependencies: + <div class="toolParamHelp" style="clear: both;"> + * The repository's installed tool dependencies will be removed from disk. + </div> + <div class="toolParamHelp" style="clear: both;"> + * Each associated tool dependency record's uninstalled column in the tool_dependency database table will be set to True. + </div> + %endif %if repository.includes_datatypes: <div class="toolParamHelp" style="clear: both;"> * The repository's datatypes, datatype converters and display applications will be eliminated from the datatypes registry. diff -r 688b3b621cd48cefefaf73081505bbe4749a217d -r 72c4f0007f4ed1fba919786bc8a2e017ecf19516 templates/admin/tool_shed_repository/install_missing_tool_dependencies.mako --- /dev/null +++ b/templates/admin/tool_shed_repository/install_missing_tool_dependencies.mako @@ -0,0 +1,90 @@ +<%inherit file="/base.mako"/> +<%namespace file="/message.mako" import="render_msg" /> + +<% import os %> + +%if message: + ${render_msg( message, status )} +%endif + +<div class="warningmessage"> + <p> + Galaxy will attempt to install the missing tool dependencies listed below. Each of these dependencies may require their own build + requirements (e.g., CMake, g++, etc). Galaxy will not attempt to install these build requirements, so if any are missing from your + environment tool dependency installation may partially fail. If this happens, you can install the missing build requirements and + have Galaxy attempt to install the tool dependencies again. + </p> +</div> +<br/> +<div class="warningmessage"> + <p> + Installation may take a while. <b>Always wait until a message is displayed in your browser after clicking the <b>Go</b> button below.</b> + If you get bored, watching your Galaxy server's paster log will help pass the time. + </p> + <p> + Information about the tool dependency installation process will be saved in various files named with a ".log" extension in the directory: + ${trans.app.config.tool_dependency_dir}/<i>package name</i>/<i>package version</i>/${repository.owner}/${repository.name}/${repository.changeset_revision} + </p> +</div> +<br/> + +<div class="toolForm"> + <div class="toolFormBody"> + <form name="install_missing_tool_dependencies" id="install_missing_tool_dependencies" action="${h.url_for( controller='admin_toolshed', action='install_missing_tool_dependencies', id=trans.security.encode_id( repository.id ), tool_panel_section=tool_panel_section, new_tool_panel_section=new_tool_panel_section, reinstalling=reinstalling )}" method="post" > + <div style="clear: both"></div> + <div class="form-row"> + <label>Install tool dependencies?</label> + ${install_tool_dependencies_check_box.get_html()} + <div class="toolParamHelp" style="clear: both;"> + Un-check to skip installation of these tool dependencies. + </div> + ## Fake the no_changes_check_box value. + %if no_changes_checked: + <input type="hidden" id="no_changes" name="no_changes" value="true" checked="checked"><input type="hidden" name="no_changes" value="true"> + %else: + <input type="hidden" name="no_changes" value="true"> + %endif + </div> + <div style="clear: both"></div> + <div class="form-row"> + <table class="grid"> + <tr><td colspan="4" bgcolor="#D8D8D8"><b>Tool dependencies</b></td></tr> + <tr> + <th>Name</th> + <th>Version</th> + <th>Type</th> + <th>Install directory</th> + </tr> + %for dependency_key, requirements_dict in tool_dependencies.items(): + <% + name = requirements_dict[ 'name' ] + version = requirements_dict[ 'version' ] + type = requirements_dict[ 'type' ] + install_dir = os.path.join( trans.app.config.tool_dependency_dir, + name, + version, + repository.owner, + repository.name, + repository.changeset_revision ) + readme_text = requirements_dict.get( 'readme', None ) + %> + <tr> + <td>${name}</td> + <td>${version}</td> + <td>${type}</td> + <td>${install_dir}</td> + </tr> + %if readme_text: + <tr><td colspan="4" bgcolor="#FFFFCC">${name} ${version} requirements and installation information</td></tr> + <tr><td colspan="4"><pre>${readme_text}</pre></td></tr> + %endif + %endfor + </table> + <div style="clear: both"></div> + </div> + <div class="form-row"> + <input type="submit" name="install_missing_tool_dependencies_button" value="Go"/> + </div> + </form> + </div> +</div> diff -r 688b3b621cd48cefefaf73081505bbe4749a217d -r 72c4f0007f4ed1fba919786bc8a2e017ecf19516 templates/admin/tool_shed_repository/install_tool_dependencies.mako --- /dev/null +++ b/templates/admin/tool_shed_repository/install_tool_dependencies.mako @@ -0,0 +1,84 @@ +<%inherit file="/base.mako"/> +<%namespace file="/message.mako" import="render_msg" /> + +<% import os %> + +%if message: + ${render_msg( message, status )} +%endif + +<div class="warningmessage"> + <p> + The tool dependencies listed below can be automatically installed with the repository. Installing them provides significant + benefits and Galaxy includes various features to manage them. + </p> + <p> + Each of these dependencies may require their own build requirements (e.g., CMake, g++, etc). Galaxy will not attempt to install + these build requirements, so if any are missing from your environment tool dependency installation may partially fail. The + repository and all of it's contents will be installed in any case. + </p> + <p> + If tool dependency installation fails in any way, you can install the missing build requirements and have Galaxy attempt to install + the tool dependencies again using the <b>Install tool dependencies</b> pop-up menu option on the <b>Manage repository</b> page. + </p> +</div> + +<div class="toolForm"> + <div class="toolFormBody"> + <form name="install_tool_dependenceies" id="install_tool_dependenceies" action="${h.url_for( controller='admin_toolshed', action='install_repository', tool_shed_url=tool_shed_url, repo_info_dict=repo_info_dict, includes_tools=includes_tools )}" method="post" > + <div style="clear: both"></div> + <div class="form-row"> + <label>Install tool dependencies?</label> + ${install_tool_dependencies_check_box.get_html()} + <div class="toolParamHelp" style="clear: both;"> + Un-check to skip automatic installation of these tool dependencies. + </div> + </div> + <div style="clear: both"></div> + <div class="form-row"> + <table class="grid"> + <tr><td colspan="4" bgcolor="#D8D8D8"><b>Tool dependencies</b></td></tr> + <tr> + <th>Name</th> + <th>Version</th> + <th>Type</th> + <th>Install directory</th> + </tr> + %for repository_name, repo_info_tuple in dict_with_tool_dependencies.items(): + <% + description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, tool_dependencies = repo_info_tuple + %> + %for dependency_key, requirements_dict in tool_dependencies.items(): + <% + name = requirements_dict[ 'name' ] + version = requirements_dict[ 'version' ] + type = requirements_dict[ 'type' ] + install_dir = os.path.join( trans.app.config.tool_dependency_dir, + name, + version, + repository_owner, + repository_name, + changeset_revision ) + readme_text = requirements_dict.get( 'readme', None ) + %> + <tr> + <td>${name}</td> + <td>${version}</td> + <td>${type}</td> + <td>${install_dir}</td> + </tr> + %if readme_text: + <tr><td colspan="4" bgcolor="#FFFFCC">${name} ${version} requirements and installation information</td></tr> + <tr><td colspan="4"><pre>${readme_text}</pre></td></tr> + %endif + %endfor + %endfor + </table> + <div style="clear: both"></div> + </div> + <div class="form-row"> + <input type="submit" name="install_tool_dependenceies_button" value="Continue"/> + </div> + </form> + </div> +</div> diff -r 688b3b621cd48cefefaf73081505bbe4749a217d -r 72c4f0007f4ed1fba919786bc8a2e017ecf19516 templates/admin/tool_shed_repository/manage_repository.mako --- a/templates/admin/tool_shed_repository/manage_repository.mako +++ b/templates/admin/tool_shed_repository/manage_repository.mako @@ -11,6 +11,9 @@ %if repository.includes_tools: <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='set_tool_versions', id=trans.security.encode_id( repository.id ) )}">Set tool versions</a> %endif + %if tool_dependencies_missing: + <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='install_missing_tool_dependencies', id=trans.security.encode_id( repository.id ) )}">Install tool dependencies</a> + %endif <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='deactivate_or_uninstall_repository', id=trans.security.encode_id( repository.id ) )}">Deactivate or Uninstall</a></div></ul> diff -r 688b3b621cd48cefefaf73081505bbe4749a217d -r 72c4f0007f4ed1fba919786bc8a2e017ecf19516 templates/admin/tool_shed_repository/reselect_tool_panel_section.mako --- a/templates/admin/tool_shed_repository/reselect_tool_panel_section.mako +++ b/templates/admin/tool_shed_repository/reselect_tool_panel_section.mako @@ -8,7 +8,11 @@ <div class="toolForm"><div class="toolFormTitle">Choose the tool panel section to contain the installed tools (optional)</div><div class="toolFormBody"> - <form name="reselect_tool_panel_section" id="reselect_tool_panel_section" action="${h.url_for( controller='admin_toolshed', action='reinstall_repository', id=trans.security.encode_id( repository.id ) )}" method="post" > + %if repository.includes_tool_dependencies: + <form name="reselect_tool_panel_section" id="reselect_tool_panel_section" action="${h.url_for( controller='admin_toolshed', action='install_missing_tool_dependencies', id=trans.security.encode_id( repository.id ), reinstalling=True )}" method="post" > + %else: + <form name="reselect_tool_panel_section" id="reselect_tool_panel_section" action="${h.url_for( controller='admin_toolshed', action='reinstall_repository', id=trans.security.encode_id( repository.id ) )}" method="post" > + %endif <div class="form-row"> ${no_changes_check_box.get_html()} <label style="display: inline;">No changes</label> diff -r 688b3b621cd48cefefaf73081505bbe4749a217d -r 72c4f0007f4ed1fba919786bc8a2e017ecf19516 templates/admin/tool_shed_repository/select_tool_panel_section.mako --- a/templates/admin/tool_shed_repository/select_tool_panel_section.mako +++ b/templates/admin/tool_shed_repository/select_tool_panel_section.mako @@ -6,26 +6,38 @@ %endif <div class="warningmessage"> - The core Galaxy development team does not maintain the contents of many Galaxy tool shed repositories. Some repository tools - may include code that produces malicious behavior, so be aware of what you are installing. - <p/> - If you discover a repository that causes problems after installation, contact <a href="http://wiki.g2.bx.psu.edu/Support" target="_blank">Galaxy support</a>, - sending all necessary information, and appropriate action will be taken. - <p/> - <a href="http://wiki.g2.bx.psu.edu/Tool%20Shed#Contacting_the_owner_of_a_repository" target="_blank">Contact the repository owner</a> for general questions - or concerns. + <p> + The core Galaxy development team does not maintain the contents of many Galaxy tool shed repositories. Some repository tools + may include code that produces malicious behavior, so be aware of what you are installing. + </p> + <p> + If you discover a repository that causes problems after installation, contact <a href="http://wiki.g2.bx.psu.edu/Support" target="_blank">Galaxy support</a>, + sending all necessary information, and appropriate action will be taken. + </p> + <p> + <a href="http://wiki.g2.bx.psu.edu/Tool%20Shed#Contacting_the_owner_of_a_repository" target="_blank">Contact the repository owner</a> for + general questions or concerns. + </p></div><br/><div class="warningmessage"> - Installation may take a while, depending upon the size of the repository contents. Wait until a message is displayed in your - browser after clicking the <b>Install</b> button below. + <p> + Installation may take a while with large repositories or if you elect to install tool dependencies. <b>Always wait until a message is + displayed in your browser after clicking the <b>Install</b> button below.</b> If you get bored, watching your Galaxy server's paster log + will help pass the time. + </p> + <p> + If installing tool dependencies, information about installation process will be saved in various files named with a ".log" + extension in the directory: + ${trans.app.config.tool_dependency_dir}/<i>package name</i>/<i>package version</i>/<i>repository owner</i>/<i>repository name</i>/<i>repository changeset revision</i> + </p></div><br/><div class="toolForm"><div class="toolFormTitle">Choose the tool panel section to contain the installed tools (optional)</div><div class="toolFormBody"> - <form name="select_tool_panel_section" id="select_tool_panel_section" action="${h.url_for( controller='admin_toolshed', action='install_repository', tool_shed_url=tool_shed_url, repo_info_dict=repo_info_dict, includes_tools=includes_tools )}" method="post" > + <form name="select_tool_panel_section" id="select_tool_panel_section" action="${h.url_for( controller='admin_toolshed', action='install_repository', tool_shed_url=tool_shed_url, repo_info_dict=repo_info_dict, includes_tools=includes_tools, install_tool_dependencies=install_tool_dependencies )}" method="post" > %if shed_tool_conf_select_field: <div class="form-row"><label>Shed tool configuration file:</label> diff -r 688b3b621cd48cefefaf73081505bbe4749a217d -r 72c4f0007f4ed1fba919786bc8a2e017ecf19516 templates/webapps/community/admin/index.mako --- a/templates/webapps/community/admin/index.mako +++ b/templates/webapps/community/admin/index.mako @@ -55,7 +55,7 @@ <a target="galaxy_main" href="${h.url_for( controller='admin', action='browse_repositories', webapp='community' )}">Browse all repositories</a></div><div class="toolTitle"> - <a target="galaxy_main" href="${h.url_for( controller='admin', action='reset_all_repository_metadata', webapp='community' )}">Reset all metadata</a> + <a target="galaxy_main" href="${h.url_for( controller='admin', action='reset_metadata_on_all_repositories', webapp='community' )}">Reset all metadata</a></div><div class="toolTitle"><a target="galaxy_main" href="${h.url_for( controller='admin', action='browse_repository_metadata', webapp='community' )}">Browse metadata</a> diff -r 688b3b621cd48cefefaf73081505bbe4749a217d -r 72c4f0007f4ed1fba919786bc8a2e017ecf19516 templates/webapps/community/admin/reset_all_repository_metadata.mako --- a/templates/webapps/community/admin/reset_all_repository_metadata.mako +++ /dev/null @@ -1,19 +0,0 @@ -<%inherit file="/base.mako"/> -<%namespace file="/message.mako" import="render_msg" /> - -%if message: - ${render_msg( message, status )} -%endif - -<div class="toolForm"> - <div class="toolFormTitle">Reset metadata on each change set of the repositories in this tool shed</div> - <form name="reset_all_repository_metadata" id="reset_all_repository_metadata" action="${h.url_for( controller='admin', action='reset_all_repository_metadata' )}" method="post" > - <div class="form-row"> - Click the button below to reset metadata on each change set of the repositories in this tool shed. - </div> - <div class="form-row"> - <input type="submit" name="reset_all_repository_metadata_button" value="Reset all repository metadata"/> - </div> - </form> - </div> -</div> diff -r 688b3b621cd48cefefaf73081505bbe4749a217d -r 72c4f0007f4ed1fba919786bc8a2e017ecf19516 templates/webapps/community/admin/reset_metadata_on_all_repositories.mako --- /dev/null +++ b/templates/webapps/community/admin/reset_metadata_on_all_repositories.mako @@ -0,0 +1,19 @@ +<%inherit file="/base.mako"/> +<%namespace file="/message.mako" import="render_msg" /> + +%if message: + ${render_msg( message, status )} +%endif + +<div class="toolForm"> + <div class="toolFormTitle">Reset metadata on each change set of the repositories in this tool shed</div> + <form name="reset_metadata_on_all_repositories" id="reset_metadata_on_all_repositories" action="${h.url_for( controller='admin', action='reset_metadata_on_all_repositories' )}" method="post" > + <div class="form-row"> + Click the button below to reset metadata on each change set of the repositories in this tool shed. + </div> + <div class="form-row"> + <input type="submit" name="reset_metadata_on_all_repositories_button" value="Reset metadata on all repositories"/> + </div> + </form> + </div> +</div> diff -r 688b3b621cd48cefefaf73081505bbe4749a217d -r 72c4f0007f4ed1fba919786bc8a2e017ecf19516 templates/webapps/community/repository/common.mako --- a/templates/webapps/community/repository/common.mako +++ b/templates/webapps/community/repository/common.mako @@ -91,6 +91,39 @@ <div class="toolFormTitle">Preview tools and inspect metadata by tool version</div><div class="toolFormBody"> %if metadata: + %if 'tool_dependencies' in metadata: + <div class="form-row"> + <table width="100%"> + <tr bgcolor="#D8D8D8" width="100%"> + <td><b>The following tool dependencies can optionally be automatically installed</i></td> + </tr> + </table> + </div> + <div style="clear: both"></div> + <div class="form-row"> + <% tool_dependencies = metadata[ 'tool_dependencies' ] %> + <table class="grid"> + <tr> + <td><b>name</b></td> + <td><b>type</b></td> + <td><b>version</b></td> + </tr> + %for dependency_key, requirements_dict in tool_dependencies.items(): + <% + name = requirements_dict[ 'name' ] + type = requirements_dict[ 'type' ] + version = requirements_dict[ 'version' ] + %> + <tr> + <td>${name}</td> + <td>${type}</td> + <td>${version}</td> + </tr> + %endfor + </table> + </div> + <div style="clear: both"></div> + %endif %if 'tools' in metadata: <div class="form-row"><table width="100%"> @@ -256,18 +289,6 @@ <div style="clear: both"></div> %endif %endif - %if can_set_metadata: - <form name="set_metadata" action="${h.url_for( controller='repository', action='set_metadata', id=trans.security.encode_id( repository.id ), ctx_str=changeset_revision )}" method="post"> - <div class="form-row"> - <div style="float: left; width: 250px; margin-right: 10px;"> - <input type="submit" name="set_metadata_button" value="Reset metadata"/> - </div> - <div class="toolParamHelp" style="clear: both;"> - Inspect the repository and reset the above attributes for the repository tip. - </div> - </div> - </form> - %endif </div></div> %endif diff -r 688b3b621cd48cefefaf73081505bbe4749a217d -r 72c4f0007f4ed1fba919786bc8a2e017ecf19516 templates/webapps/community/repository/view_tool_metadata.mako --- a/templates/webapps/community/repository/view_tool_metadata.mako +++ b/templates/webapps/community/repository/view_tool_metadata.mako @@ -165,6 +165,37 @@ <div style="clear: both"></div></div> %endif + <% + if 'requirements' in metadata: + requirements = metadata[ 'requirements' ] + else: + requirements = None + %> + %if requirements: + <div class="form-row"> + <label>Requirements:</label> + <table class="grid"> + <tr> + <td><b>name</b></td> + <td><b>version</b></td> + <td><b>type</b></td> + </tr> + %for requirement_dict in requirements: + <% + requirement_name = requirement_dict[ 'name' ] or 'not provided' + requirement_version = requirement_dict[ 'version' ] or 'not provided' + requirement_type = requirement_dict[ 'type' ] or 'not provided' + %> + <tr> + <td>${requirement_name}</td> + <td>${requirement_version}</td> + <td>${requirement_type}</td> + </tr> + %endfor + </table> + <div style="clear: both"></div> + </div> + %endif %if 'version_string_cmd' in metadata: <div class="form-row"><label>Version command string:</label> @@ -200,25 +231,6 @@ </div> %endif <% - if 'requirements' in metadata: - requirements = metadata[ 'requirements' ] - else: - requirements = None - %> - %if requirements: - <% - requirements_str = '' - for requirement_dict in metadata[ 'requirements' ]: - requirements_str += '%s (%s), ' % ( requirement_dict[ 'name' ], requirement_dict[ 'type' ] ) - requirements_str = requirements_str.rstrip( ', ' ) - %> - <div class="form-row"> - <label>Requirements:</label> - ${requirements_str} - <div style="clear: both"></div> - </div> - %endif - <% if 'tests' in metadata: tests = metadata[ 'tests' ] else: diff -r 688b3b621cd48cefefaf73081505bbe4749a217d -r 72c4f0007f4ed1fba919786bc8a2e017ecf19516 templates/webapps/community/repository/view_workflow.mako --- a/templates/webapps/community/repository/view_workflow.mako +++ b/templates/webapps/community/repository/view_workflow.mako @@ -16,7 +16,6 @@ can_upload = can_push can_download = in_tool_shed and not is_new and ( not is_malicious or can_push ) can_browse_contents = in_tool_shed and not is_new - can_set_metadata = in_tool_shed and not is_new can_rate = in_tool_shed and not is_new and trans.user and repository.user != trans.user can_view_change_log = in_tool_shed and not is_new if can_push: Repository URL: https://bitbucket.org/galaxy/galaxy-central/ -- This is a commit notification from bitbucket.org. You are receiving this because you have the service enabled, addressing the recipient of this email.
participants (1)
-
Bitbucket