commit/galaxy-central: greg: Add a new installation process for managing tools that we will remove from the distribution over time and host in the main Galaxy tool shed. This process will execute when the Galaxy server is started if enabled in the configuration (which should not happen until users running local Galaxy instances are notified).
1 new commit in galaxy-central: https://bitbucket.org/galaxy/galaxy-central/changeset/05b62675898d/ changeset: 05b62675898d user: greg date: 2011-12-08 22:26:34 summary: Add a new installation process for managing tools that we will remove from the distribution over time and host in the main Galaxy tool shed. This process will execute when the Galaxy server is started if enabled in the configuration (which should not happen until users running local Galaxy instances are notified). A new tool_id_guid_map table will be populated for each installed tool. This will enable backward compatibility for workflows that were build using tools from the current Galaxy distribution but will be installed from a tool shed in the future. affected #: 13 files diff -r af24105e47a9966566ad92833f0990805da69ac3 -r 05b62675898dcca81358692352dfcc9cee75a7a2 lib/galaxy/app.py --- a/lib/galaxy/app.py +++ b/lib/galaxy/app.py @@ -54,6 +54,11 @@ self.toolbox = tools.ToolBox( self.config.tool_configs, self.config.tool_path, self ) # Search support for tools self.toolbox_search = galaxy.tools.search.ToolBoxSearch( self.toolbox ) + # If enabled, check for tools missing from the distribution because they + # have been moved to the tool shed and install all such discovered tools. + if self.config.get_bool( 'enable_tool_shed_install', False ): + from tools import install_manager + self.install_manager = install_manager.InstallManager( self, self.config.tool_shed_install_config, self.config.install_tool_config ) # Load datatype converters self.datatypes_registry.load_datatype_converters( self.toolbox ) # Load history import/export tools diff -r af24105e47a9966566ad92833f0990805da69ac3 -r 05b62675898dcca81358692352dfcc9cee75a7a2 lib/galaxy/config.py --- a/lib/galaxy/config.py +++ b/lib/galaxy/config.py @@ -47,11 +47,14 @@ self.enable_openid = string_as_bool( kwargs.get( 'enable_openid', False ) ) self.enable_quotas = string_as_bool( kwargs.get( 'enable_quotas', False ) ) self.tool_sheds_config = kwargs.get( 'tool_sheds_config_file', 'tool_sheds_conf.xml' ) - self.enable_unique_workflow_defaults = string_as_bool ( kwargs.get ('enable_unique_workflow_defaults', False ) ) + self.enable_unique_workflow_defaults = string_as_bool( kwargs.get ( 'enable_unique_workflow_defaults', False ) ) self.tool_path = resolve_path( kwargs.get( "tool_path", "tools" ), self.root ) self.tool_data_path = resolve_path( kwargs.get( "tool_data_path", "tool-data" ), os.getcwd() ) self.len_file_path = kwargs.get( "len_file_path", resolve_path(os.path.join(self.tool_data_path, 'shared','ucsc','chrom'), self.root) ) self.test_conf = resolve_path( kwargs.get( "test_conf", "" ), self.root ) + self.enable_tool_shed_install = string_as_bool( kwargs.get ( 'enable_tool_shed_install', False ) ) + self.tool_shed_install_config = resolve_path( kwargs.get( "tool_shed_install_config_file", "tool_shed_install.xml" ), self.root ) + self.install_tool_config = resolve_path( kwargs.get( "install_tool_config_file", "shed_tool_conf.xml" ), self.root ) if 'tool_config_file' in kwargs: tcf = kwargs[ 'tool_config_file' ] elif 'tool_config_files' in kwargs: diff -r af24105e47a9966566ad92833f0990805da69ac3 -r 05b62675898dcca81358692352dfcc9cee75a7a2 lib/galaxy/model/__init__.py --- a/lib/galaxy/model/__init__.py +++ b/lib/galaxy/model/__init__.py @@ -2678,6 +2678,17 @@ self.update_available = update_available self.deleted = deleted +class ToolIdGuidMap( object ): + def __init__( self, id=None, create_time=None, tool_id=None, tool_version=None, tool_shed=None, repository_owner=None, repository_name=None, guid=None ): + self.id = id + self.create_time = create_time + self.tool_id = tool_id + self.tool_version = tool_version + self.tool_shed = tool_shed + self.repository_owner = repository_owner + self.repository_name = repository_name + self.guid = guid + ## ---- Utility methods ------------------------------------------------------- def directory_hash_id( id ): diff -r af24105e47a9966566ad92833f0990805da69ac3 -r 05b62675898dcca81358692352dfcc9cee75a7a2 lib/galaxy/model/mapping.py --- a/lib/galaxy/model/mapping.py +++ b/lib/galaxy/model/mapping.py @@ -378,6 +378,17 @@ Column( "update_available", Boolean, default=False ), Column( "deleted", Boolean, index=True, default=False ) ) +ToolIdGuidMap.table = Table( "tool_id_guid_map", metadata, + Column( "id", Integer, primary_key=True ), + Column( "create_time", DateTime, default=now ), + Column( "update_time", DateTime, default=now, onupdate=now ), + Column( "tool_id", String( 255 ) ), + Column( "tool_version", TEXT ), + Column( "tool_shed", TrimmedString( 255 ) ), + Column( "repository_owner", TrimmedString( 255 ) ), + Column( "repository_name", TrimmedString( 255 ) ), + Column( "guid", TEXT, index=True, unique=True ) ) + Job.table = Table( "job", metadata, Column( "id", Integer, primary_key=True ), Column( "create_time", DateTime, default=now ), @@ -1604,6 +1615,8 @@ assign_mapper( context, ToolShedRepository, ToolShedRepository.table ) +assign_mapper( context, ToolIdGuidMap, ToolIdGuidMap.table ) + # Set up proxy so that # Page.users_shared_with # returns a list of users that page is shared with. diff -r af24105e47a9966566ad92833f0990805da69ac3 -r 05b62675898dcca81358692352dfcc9cee75a7a2 lib/galaxy/model/migrate/versions/0087_tool_id_guid_map_table.py --- /dev/null +++ b/lib/galaxy/model/migrate/versions/0087_tool_id_guid_map_table.py @@ -0,0 +1,51 @@ +""" +Migration script to create the tool_id_guid_map table. +""" + +from sqlalchemy import * +from sqlalchemy.orm import * +from migrate import * +from migrate.changeset import * + +import datetime +now = datetime.datetime.utcnow +# Need our custom types, but don't import anything else from model +from galaxy.model.custom_types import * + +import sys, logging +log = logging.getLogger( __name__ ) +log.setLevel(logging.DEBUG) +handler = logging.StreamHandler( sys.stdout ) +format = "%(name)s %(levelname)s %(asctime)s %(message)s" +formatter = logging.Formatter( format ) +handler.setFormatter( formatter ) +log.addHandler( handler ) + +metadata = MetaData( migrate_engine ) +db_session = scoped_session( sessionmaker( bind=migrate_engine, autoflush=False, autocommit=True ) ) + +ToolIdGuidMap_table = Table( "tool_id_guid_map", metadata, + Column( "id", Integer, primary_key=True ), + Column( "create_time", DateTime, default=now ), + Column( "update_time", DateTime, default=now, onupdate=now ), + Column( "tool_id", String( 255 ) ), + Column( "tool_version", TEXT ), + Column( "tool_shed", TrimmedString( 255 ) ), + Column( "repository_owner", TrimmedString( 255 ) ), + Column( "repository_name", TrimmedString( 255 ) ), + Column( "guid", TEXT, index=True, unique=True ) ) + +def upgrade(): + print __doc__ + metadata.reflect() + try: + ToolIdGuidMap_table.create() + except Exception, e: + log.debug( "Creating tool_id_guid_map table failed: %s" % str( e ) ) + +def downgrade(): + metadata.reflect() + try: + ToolIdGuidMap_table.drop() + except Exception, e: + log.debug( "Dropping tool_id_guid_map table failed: %s" % str( e ) ) diff -r af24105e47a9966566ad92833f0990805da69ac3 -r 05b62675898dcca81358692352dfcc9cee75a7a2 lib/galaxy/tools/install_manager.py --- /dev/null +++ b/lib/galaxy/tools/install_manager.py @@ -0,0 +1,239 @@ +""" +Manage automatic installation of tools configured in tool_shed_install.xml, all of which were +at some point included in the Galaxy distribution, but are now hosted in the main Galaxy tool +shed. Tools included in tool_shed_install.xml that have already been installed will not be +re-installed. +""" +from galaxy import util +from galaxy.tools import ToolSection +from galaxy.tools.search import ToolBoxSearch +from galaxy import model +from galaxy.web.controllers.admin_toolshed import generate_metadata, generate_tool_panel_section, add_shed_tool_conf_entry, create_or_undelete_tool_shed_repository +from galaxy.web.controllers.admin_toolshed import handle_missing_data_table_entry, handle_missing_index_file, handle_tool_dependencies +from galaxy.model.orm import * +import os, subprocess, tempfile, logging + +pkg_resources.require( 'elementtree' ) +from elementtree import ElementTree, ElementInclude +from elementtree.ElementTree import Element + +log = logging.getLogger( __name__ ) + +class InstallManager( object ): + def __init__( self, app, tool_shed_install_config, install_tool_config ): + """ + Check tool settings in tool_shed_install_config and install all tools that are + not already installed. The tool panel configuration file is the received + shed_tool_config, which defaults to shed_tool_conf.xml. + """ + self.app = app + self.sa_session = self.app.model.context.current + self.install_tool_config = install_tool_config + # Parse shed_tool_config to get the install location (tool_path). + tree = util.parse_xml( install_tool_config ) + root = tree.getroot() + self.tool_path = root.get( 'tool_path' ) + self.app.toolbox.shed_tool_confs[ install_tool_config ] = self.tool_path + # Parse tool_shed_install_config to check each of the tools. + log.debug( "Parsing tool shed install configuration %s" % tool_shed_install_config ) + self.tool_shed_install_config = tool_shed_install_config + tree = util.parse_xml( tool_shed_install_config ) + root = tree.getroot() + self.tool_shed = root.get( 'name' ) + log.debug( "Repositories will be installed from tool shed '%s' into configured tool_path location '%s'" % ( str( self.tool_shed ), str( self.tool_path ) ) ) + self.repository_owner = 'devteam' + for elem in root: + if elem.tag == 'tool': + self.check_tool( elem ) + elif elem.tag == 'section': + self.check_section( elem ) + def check_tool( self, elem ): + # TODO: write this method. + pass + def check_section( self, elem ): + section_name = elem.get( 'name' ) + section_id = elem.get( 'id' ) + for repository_elem in elem: + name = repository_elem.get( 'name' ) + description = repository_elem.get( 'description' ) + changeset_revision = repository_elem.get( 'changeset_revision' ) + installed = False + for tool_elem in repository_elem: + tool_config = tool_elem.get( 'file' ) + tool_id = tool_elem.get( 'id' ) + tool_version = tool_elem.get( 'version' ) + tigm = self.__get_tool_id_guid_map_by_id_version( tool_id, tool_version ) + if tigm: + # A record exists in the tool_id_guid_map + # table, so see if the tool is still installed. + install_path = self.__generate_install_path( tigm ) + if os.path.exists( install_path ): + message = "Skipping automatic install of repository '%s' because it has already been installed in location '%s'" % \ + ( name, install_path ) + log.debug( message ) + installed = True + break + if not installed: + log.debug( "Installing repository '%s' from tool shed '%s'" % ( name, self.tool_shed ) ) + current_working_dir = os.getcwd() + tool_shed_url = self.__get_url_from_tool_shed( self.tool_shed ) + repository_clone_url = '%s/repos/devteam/%s' % ( tool_shed_url, name ) + # Install path is of the form: <tool path><tool shed>/repos/<repository owner>/<repository name>/<changeset revision> + clone_dir = os.path.join( self.tool_path, self.tool_shed, 'repos/devteam', name, changeset_revision ) + if not os.path.isdir( clone_dir ): + os.makedirs( clone_dir ) + log.debug( 'Cloning %s...' % repository_clone_url ) + cmd = 'hg clone %s' % repository_clone_url + tmp_name = tempfile.NamedTemporaryFile().name + tmp_stderr = open( tmp_name, 'wb' ) + os.chdir( clone_dir ) + proc = subprocess.Popen( args=cmd, shell=True, stderr=tmp_stderr.fileno() ) + returncode = proc.wait() + os.chdir( current_working_dir ) + tmp_stderr.close() + if returncode == 0: + # Update the cloned repository to changeset_revision. It is imperative that the + # installed repository is updated to the desired changeset_revision before metadata + # is set because the process for setting metadata uses the repository files on disk. + relative_install_dir = os.path.join( clone_dir, name ) + log.debug( 'Updating cloned repository to revision "%s"' % changeset_revision ) + cmd = 'hg update -r %s' % changeset_revision + tmp_name = tempfile.NamedTemporaryFile().name + tmp_stderr = open( tmp_name, 'wb' ) + os.chdir( relative_install_dir ) + proc = subprocess.Popen( cmd, shell=True, stderr=tmp_stderr.fileno() ) + returncode = proc.wait() + os.chdir( current_working_dir ) + tmp_stderr.close() + if returncode == 0: + # Generate the metadata for the installed tool shed repository. It is imperative that + # the installed repository is updated to the desired changeset_revision before metadata + # is set because the process for setting metadata uses the repository files on disk. + metadata_dict = generate_metadata( self.app.toolbox, relative_install_dir, repository_clone_url ) + if 'datatypes_config' in metadata_dict: + datatypes_config = os.path.abspath( metadata_dict[ 'datatypes_config' ] ) + # Load data types required by tools. + self.__load_datatypes( trans, datatypes_config, relative_install_dir ) + if 'tools' in metadata_dict: + repository_tools_tups = [] + for tool_dict in metadata_dict[ 'tools' ]: + relative_path = tool_dict[ 'tool_config' ] + guid = tool_dict[ 'guid' ] + tool = self.app.toolbox.load_tool( os.path.abspath( relative_path ) ) + repository_tools_tups.append( ( relative_path, guid, tool ) ) + if repository_tools_tups: + sample_files = metadata_dict.get( 'sample_files', [] ) + # Handle missing data table entries for tool parameters that are dynamically generated select lists. + repository_tools_tups = handle_missing_data_table_entry( self.app, self.tool_path, sample_files, repository_tools_tups ) + # Handle missing index files for tool parameters that are dynamically generated select lists. + repository_tools_tups = handle_missing_index_file( self.app, self.tool_path, sample_files, repository_tools_tups ) + # Handle tools that use fabric scripts to install dependencies. + handle_tool_dependencies( current_working_dir, relative_install_dir, repository_tools_tups ) + section_key = 'section_%s' % str( section_id ) + if section_key in self.app.toolbox.tool_panel: + # Appending a tool to an existing section in self.app.toolbox.tool_panel + log.debug( "Appending to tool panel section: %s" % section_name ) + tool_section = self.app.toolbox.tool_panel[ section_key ] + else: + # Appending a new section to self.app.toolbox.tool_panel + log.debug( "Loading new tool panel section: %s" % section_name ) + elem = Element( 'section' ) + elem.attrib[ 'name' ] = section_name + elem.attrib[ 'id' ] = section_id + tool_section = ToolSection( elem ) + self.app.toolbox.tool_panel[ section_key ] = tool_section + # Generate an in-memory tool conf section that includes the new tools. + new_tool_section = generate_tool_panel_section( name, + repository_clone_url, + changeset_revision, + tool_section, + repository_tools_tups, + owner=self.repository_owner ) + # Create a temporary file to persist the in-memory tool section + # TODO: Figure out how to do this in-memory using xml.etree. + tmp_name = tempfile.NamedTemporaryFile().name + persisted_new_tool_section = open( tmp_name, 'wb' ) + persisted_new_tool_section.write( new_tool_section ) + persisted_new_tool_section.close() + # Parse the persisted tool panel section + tree = util.parse_xml( tmp_name ) + root = tree.getroot() + # Load the tools in the section into the tool panel. + self.app.toolbox.load_section_tag_set( root, self.app.toolbox.tool_panel, self.tool_path ) + # Remove the temporary file + try: + os.unlink( tmp_name ) + except: + pass + # Append the new section to the shed_tool_config file. + add_shed_tool_conf_entry( self.app, self.install_tool_config, new_tool_section ) + if self.app.toolbox_search.enabled: + # If search support for tools is enabled, index the new installed tools. + self.app.toolbox_search = ToolBoxSearch( self.app.toolbox ) + # Add a new record to the tool_shed_repository table if one doesn't + # already exist. If one exists but is marked deleted, undelete it. + log.debug( "Adding new row to tool_shed_repository table for repository '%s'" % name ) + create_or_undelete_tool_shed_repository( self.app, + name, + description, + changeset_revision, + repository_clone_url, + metadata_dict, + owner=self.repository_owner ) + # Add a new record to the tool_id_guid_map table for each + # tool in the repository if one doesn't already exist. + if 'tools' in metadata_dict: + tools_mapped = 0 + for tool_dict in metadata_dict[ 'tools' ]: + tool_id = tool_dict[ 'id' ] + tool_version = tool_dict[ 'version' ] + guid = tool_dict[ 'guid' ] + tool_id_guid_map = model.ToolIdGuidMap( tool_id=tool_id, + tool_version=tool_version, + tool_shed=self.tool_shed, + repository_owner=self.repository_owner, + repository_name=name, + guid=guid ) + self.sa_session.add( tool_id_guid_map ) + self.sa_session.flush() + tools_mapped += 1 + log.debug( "Mapped tool ids to guids for %d tools included in repository '%s'." % ( tools_mapped, name ) ) + def __generate_install_path( self, tool_id_guid_map ): + """ + Generate a tool path in which a tool is or will be installed. The tool path will be of the form: + <tool shed>/repos/<repository owner>/<repository name>/<changeset revision> + """ + tool_shed = tool_id_guid_map.tool_shed + repository_name = tool_id_guid_map.repository_name + tool_shed_repository = self.__get_repository_by_tool_shed_name_owner( tool_shed, repository_name, self.repository_owner ) + changeset_revision = tool_shed_repository.changeset_revision + return '%s/repos%s/%s/%s/%s' % ( tool_shed, self.repository_owner, repository_name, changeset_revision ) + def __get_repository_by_tool_shed_name_owner( tool_shed, name, owner ): + """Get a repository from the database via tool_shed, name and owner.""" + # CRITICAL: this assumes that a single changeset_revision exists for each repository + # in the tool shed. In other words, if a repository has multiple changset_revisions + # there will be problems. We're probably safe here because only a single changeset_revision + # for each tool shed repository will be installed using this installation process. + return self.sa_session.query( self.app.model.ToolShedRepository ) \ + .filter( and_( self.app.model.ToolShedRepository.table.c.tool_shed == tool_shed, + self.app.model.ToolShedRepository.table.c.name == name, + self.app.model.ToolShedRepository.table.c.owner == owner ) ) \ + .first() + def __get_tool_id_guid_map_by_id_version( self, tool_id, tool_version ): + """Get a tool_id_guid_map from the database via tool_id and tool_version.""" + return self.sa_session.query( self.app.model.ToolIdGuidMap ) \ + .filter( and_( self.app.model.ToolIdGuidMap.table.c.tool_id == tool_id, + self.app.model.ToolIdGuidMap.table.c.tool_version == tool_version ) ) \ + .first() + def __get_url_from_tool_shed( self, tool_shed ): + # The value of tool_shed is something like: toolshed.g2.bx.psu.edu + # We need the URL to this tool shed, which is something like: + # http://toolshed.g2.bx.psu.edu/ + for shed_name, shed_url in self.app.tool_shed_registry.tool_sheds.items(): + if shed_url.find( tool_shed ) >= 0: + if shed_url.endswith( '/' ): + shed_url = shed_url.rstrip( '/' ) + return shed_url + # The tool shed from which the repository was originally + # installed must no longer be configured in tool_sheds_conf.xml. + return None diff -r af24105e47a9966566ad92833f0990805da69ac3 -r 05b62675898dcca81358692352dfcc9cee75a7a2 lib/galaxy/web/base/controller.py --- a/lib/galaxy/web/base/controller.py +++ b/lib/galaxy/web/base/controller.py @@ -12,7 +12,8 @@ from galaxy.model.orm import * from galaxy.workflow.modules import * from galaxy.web.framework import simplejson -from galaxy.web.form_builder import AddressField, CheckboxField, SelectField, TextArea, TextField, WorkflowField, WorkflowMappingField, HistoryField, PasswordField, build_select_field +from galaxy.web.form_builder import AddressField, CheckboxField, SelectField, TextArea, TextField +from galaxy.web.form_builder import WorkflowField, WorkflowMappingField, HistoryField, PasswordField, build_select_field from galaxy.visualization.tracks.data_providers import get_data_provider from galaxy.visualization.tracks.visual_analytics import get_tool_def from galaxy.security.validate_user_input import validate_username @@ -2425,11 +2426,11 @@ ## ---- Utility methods ------------------------------------------------------- -def copy_sample_loc_file( trans, filename ): +def copy_sample_loc_file( app, filename ): """Copy xxx.loc.sample to ~/tool-data/xxx.loc.sample and ~/tool-data/xxx.loc""" head, sample_loc_file = os.path.split( filename ) loc_file = sample_loc_file.replace( '.sample', '' ) - tool_data_path = os.path.abspath( trans.app.config.tool_data_path ) + tool_data_path = os.path.abspath( app.config.tool_data_path ) # It's ok to overwrite the .sample version of the file. shutil.copy( os.path.abspath( filename ), os.path.join( tool_data_path, sample_loc_file ) ) # Only create the .loc file if it does not yet exist. We don't @@ -2470,29 +2471,27 @@ id = trans.security.decode_id( id ) quota = trans.sa_session.query( trans.model.Quota ).get( id ) return quota -def handle_sample_tool_data_table_conf_file( trans, filename ): +def handle_sample_tool_data_table_conf_file( app, filename ): """ Parse the incoming filename and add new entries to the in-memory - trans.app.tool_data_tables dictionary as well as appending them - to the shed's tool_data_table_conf.xml file on disk. + app.tool_data_tables dictionary as well as appending them to the + shed's tool_data_table_conf.xml file on disk. """ - # Parse the incoming file and add new entries to the in-memory - # trans.app.tool_data_tables dictionary. error = False message = '' try: - new_table_elems = trans.app.tool_data_tables.add_new_entries_from_config_file( filename ) + new_table_elems = app.tool_data_tables.add_new_entries_from_config_file( filename ) except Exception, e: message = str( e ) error = True if not error: # Add an entry to the end of the tool_data_table_conf.xml file. - tdt_config = "%s/tool_data_table_conf.xml" % trans.app.config.root + tdt_config = "%s/tool_data_table_conf.xml" % app.config.root if os.path.exists( tdt_config ): # Make a backup of the file since we're going to be changing it. today = date.today() backup_date = today.strftime( "%Y_%m_%d" ) - tdt_config_copy = '%s/tool_data_table_conf.xml_%s_backup' % ( trans.app.config.root, backup_date ) + tdt_config_copy = '%s/tool_data_table_conf.xml_%s_backup' % ( app.config.root, backup_date ) shutil.copy( os.path.abspath( tdt_config ), os.path.abspath( tdt_config_copy ) ) # Write each line of the tool_data_table_conf.xml file, except the last line to a temp file. fh = tempfile.NamedTemporaryFile( 'wb' ) diff -r af24105e47a9966566ad92833f0990805da69ac3 -r 05b62675898dcca81358692352dfcc9cee75a7a2 lib/galaxy/web/controllers/admin_toolshed.py --- a/lib/galaxy/web/controllers/admin_toolshed.py +++ b/lib/galaxy/web/controllers/admin_toolshed.py @@ -201,7 +201,7 @@ # Generate the metadata for the installed tool shed repository. It is imperative that # the installed repository is updated to the desired changeset_revision before metadata # is set because the process for setting metadata uses the repository files on disk. - metadata_dict = self.__generate_metadata( trans, relative_install_dir, repository_clone_url ) + metadata_dict = generate_metadata( trans.app.toolbox, relative_install_dir, repository_clone_url ) if 'datatypes_config' in metadata_dict: datatypes_config = os.path.abspath( metadata_dict[ 'datatypes_config' ] ) # Load data types required by tools. @@ -210,22 +210,23 @@ repository_tools_tups = [] for tool_dict in metadata_dict[ 'tools' ]: relative_path = tool_dict[ 'tool_config' ] + guid = tool_dict[ 'guid' ] tool = trans.app.toolbox.load_tool( os.path.abspath( relative_path ) ) - repository_tools_tups.append( ( relative_path, tool ) ) + repository_tools_tups.append( ( relative_path, guid, tool ) ) if repository_tools_tups: sample_files = metadata_dict.get( 'sample_files', [] ) # Handle missing data table entries for tool parameters that are dynamically generated select lists. - repository_tools_tups = self.__handle_missing_data_table_entry( trans, tool_path, sample_files, repository_tools_tups ) + repository_tools_tups = handle_missing_data_table_entry( trans.app, tool_path, sample_files, repository_tools_tups ) # Handle missing index files for tool parameters that are dynamically generated select lists. - repository_tools_tups = self.__handle_missing_index_file( trans, tool_path, sample_files, repository_tools_tups ) + repository_tools_tups = handle_missing_index_file( trans.app, tool_path, sample_files, repository_tools_tups ) # Handle tools that use fabric scripts to install dependencies. - self.__handle_tool_dependencies( current_working_dir, relative_install_dir, repository_tools_tups ) + handle_tool_dependencies( current_working_dir, relative_install_dir, repository_tools_tups ) # Generate an in-memory tool conf section that includes the new tools. - new_tool_section = self.__generate_tool_panel_section( name, - repository_clone_url, - changeset_revision, - tool_section, - repository_tools_tups ) + new_tool_section = generate_tool_panel_section( name, + repository_clone_url, + changeset_revision, + tool_section, + repository_tools_tups ) # Create a temporary file to persist the in-memory tool section # TODO: Figure out how to do this in-memory using xml.etree. tmp_name = tempfile.NamedTemporaryFile().name @@ -243,18 +244,18 @@ except: pass # Append the new section to the shed_tool_config file. - self.__add_shed_tool_conf_entry( trans, shed_tool_conf, new_tool_section ) + add_shed_tool_conf_entry( trans.app, shed_tool_conf, new_tool_section ) if trans.app.toolbox_search.enabled: # If search support for tools is enabled, index the new installed tools. trans.app.toolbox_search = ToolBoxSearch( trans.app.toolbox ) # Add a new record to the tool_shed_repository table if one doesn't # already exist. If one exists but is marked deleted, undelete it. - self.__create_or_undelete_tool_shed_repository( trans, - name, - description, - changeset_revision, - repository_clone_url, - metadata_dict ) + create_or_undelete_tool_shed_repository( trans.app, + name, + description, + changeset_revision, + repository_clone_url, + metadata_dict ) installed_repository_names.append( name ) else: tmp_stderr = open( tmp_name, 'rb' ) @@ -317,7 +318,7 @@ message = "The repository information has been updated." elif params.get( 'set_metadata_button', False ): repository_clone_url = self.__generate_clone_url( trans, repository ) - metadata_dict = self.__generate_metadata( trans, relative_install_dir, repository_clone_url ) + metadata_dict = generate_metadata( trans.app.toolbox, relative_install_dir, repository_clone_url ) if metadata_dict: repository.metadata = metadata_dict trans.sa_session.add( repository ) @@ -350,7 +351,7 @@ owner = params.get( 'owner', None ) changeset_revision = params.get( 'changeset_revision', None ) latest_changeset_revision = params.get( 'latest_changeset_revision', None ) - repository = get_repository_by_shed_name_owner_changeset_revision( trans, tool_shed_url, name, owner, changeset_revision ) + repository = get_repository_by_shed_name_owner_changeset_revision( trans.app, tool_shed_url, name, owner, changeset_revision ) if changeset_revision and latest_changeset_revision: if changeset_revision == latest_changeset_revision: message = "The cloned tool shed repository named '%s' is current (there are no updates available)." % name @@ -429,147 +430,6 @@ metadata=metadata, message=message, status=status ) - def __generate_metadata( self, trans, relative_install_dir, repository_clone_url ): - """ - Browse the repository files on disk to generate metadata. Since we are using disk files, it - is imperative that the repository is updated to the desired change set revision before metadata - is generated. - """ - metadata_dict = {} - sample_files = [] - datatypes_config = None - # Find datatypes_conf.xml if it exists. - for root, dirs, files in os.walk( relative_install_dir ): - if root.find( '.hg' ) < 0: - for name in files: - if name == 'datatypes_conf.xml': - relative_path = os.path.join( root, name ) - datatypes_config = os.path.abspath( relative_path ) - break - if datatypes_config: - metadata_dict[ 'datatypes_config' ] = relative_path - metadata_dict = self.__generate_datatypes_metadata( trans, datatypes_config, metadata_dict ) - # Find all special .sample files. - for root, dirs, files in os.walk( relative_install_dir ): - if root.find( '.hg' ) < 0: - for name in files: - if name.endswith( '.sample' ): - sample_files.append( os.path.join( root, name ) ) - if sample_files: - metadata_dict[ 'sample_files' ] = sample_files - # Find all tool configs and exported workflows. - for root, dirs, files in os.walk( relative_install_dir ): - if root.find( '.hg' ) < 0 and root.find( 'hgrc' ) < 0: - if '.hg' in dirs: - dirs.remove( '.hg' ) - for name in files: - # Find all tool configs. - if name != 'datatypes_conf.xml' and name.endswith( '.xml' ): - full_path = os.path.abspath( os.path.join( root, name ) ) - try: - tool = trans.app.toolbox.load_tool( full_path ) - except Exception, e: - tool = None - if tool is not None: - tool_config = os.path.join( root, name ) - metadata_dict = self.__generate_tool_metadata( trans, tool_config, tool, repository_clone_url, metadata_dict ) - # Find all exported workflows - elif name.endswith( '.ga' ): - relative_path = os.path.join( root, name ) - fp = open( relative_path, 'rb' ) - workflow_text = fp.read() - fp.close() - exported_workflow_dict = from_json_string( workflow_text ) - if 'a_galaxy_workflow' in exported_workflow_dict and exported_workflow_dict[ 'a_galaxy_workflow' ] == 'true': - metadata_dict = self.__generate_workflow_metadata( trans, relative_path, exported_workflow_dict, metadata_dict ) - return metadata_dict - def __generate_datatypes_metadata( self, trans, datatypes_config, metadata_dict ): - """ - Update the received metadata_dict with changes that have been applied - to the received datatypes_config. - """ - # Parse datatypes_config. - tree = ElementTree.parse( datatypes_config ) - root = tree.getroot() - ElementInclude.include( root ) - repository_datatype_code_files = [] - datatype_files = root.find( 'datatype_files' ) - if datatype_files: - for elem in datatype_files.findall( 'datatype_file' ): - name = elem.get( 'name', None ) - repository_datatype_code_files.append( name ) - metadata_dict[ 'datatype_files' ] = repository_datatype_code_files - datatypes = [] - registration = root.find( 'registration' ) - if registration: - for elem in registration.findall( 'datatype' ): - extension = elem.get( 'extension', None ) - dtype = elem.get( 'type', None ) - mimetype = elem.get( 'mimetype', None ) - datatypes.append( dict( extension=extension, - dtype=dtype, - mimetype=mimetype ) ) - metadata_dict[ 'datatypes' ] = datatypes - return metadata_dict - def __generate_tool_metadata( self, trans, tool_config, tool, repository_clone_url, metadata_dict ): - """ - Update the received metadata_dict with changes that have been - applied to the received tool. - """ - # Generate the guid - guid = self.__generate_tool_guid( repository_clone_url, tool ) - # Handle tool.requirements. - tool_requirements = [] - for tr in tool.requirements: - name=tr.name - type=tr.type - if type == 'fabfile': - version = None - fabfile = tr.fabfile - method = tr.method - else: - version = tr.version - fabfile = None - method = None - requirement_dict = dict( name=name, - type=type, - version=version, - fabfile=fabfile, - method=method ) - tool_requirements.append( requirement_dict ) - # Handle tool.tests. - tool_tests = [] - if tool.tests: - for ttb in tool.tests: - test_dict = dict( name=ttb.name, - required_files=ttb.required_files, - inputs=ttb.inputs, - outputs=ttb.outputs ) - tool_tests.append( test_dict ) - tool_dict = dict( id=tool.id, - guid=guid, - name=tool.name, - version=tool.version, - description=tool.description, - version_string_cmd = tool.version_string_cmd, - tool_config=tool_config, - requirements=tool_requirements, - tests=tool_tests ) - if 'tools' in metadata_dict: - metadata_dict[ 'tools' ].append( tool_dict ) - else: - metadata_dict[ 'tools' ] = [ tool_dict ] - return metadata_dict - def __generate_workflow_metadata( self, trans, relative_path, exported_workflow_dict, metadata_dict ): - """ - Update the received metadata_dict with changes that have been applied - to the received exported_workflow_dict. Store everything in the database. - """ - if 'workflows' in metadata_dict: - metadata_dict[ 'workflows' ].append( ( relative_path, exported_workflow_dict ) ) - else: - metadata_dict[ 'workflows' ] = [ ( relative_path, exported_workflow_dict ) ] - return metadata_dict def __get_relative_install_dir( self, trans, repository ): # Get the directory where the repository is install. tool_shed = self.__clean_tool_shed_url( repository.tool_shed ) @@ -583,87 +443,6 @@ if os.path.isdir( relative_install_dir ): break return relative_install_dir - def __handle_missing_data_table_entry( self, trans, tool_path, sample_files, repository_tools_tups ): - # Inspect each tool to see if any have input parameters that are dynamically - # generated select lists that require entries in the tool_data_table_conf.xml file. - missing_data_table_entry = False - for index, repository_tools_tup in enumerate( repository_tools_tups ): - tup_path, repository_tool = repository_tools_tup - if repository_tool.params_with_missing_data_table_entry: - missing_data_table_entry = True - break - if missing_data_table_entry: - # The repository must contain a tool_data_table_conf.xml.sample file that includes - # all required entries for all tools in the repository. - for sample_file in sample_files: - head, tail = os.path.split( sample_file ) - if tail == 'tool_data_table_conf.xml.sample': - break - error, correction_msg = handle_sample_tool_data_table_conf_file( trans, sample_file ) - if error: - # TODO: Do more here than logging an exception. - log.debug( exception_msg ) - # Reload the tool into the local list of repository_tools_tups. - repository_tool = trans.app.toolbox.load_tool( os.path.join( tool_path, tup_path ) ) - repository_tools_tups[ index ] = ( tup_path, repository_tool ) - return repository_tools_tups - def __handle_missing_index_file( self, trans, tool_path, sample_files, repository_tools_tups ): - # Inspect each tool to see if it has any input parameters that - # are dynamically generated select lists that depend on a .loc file. - missing_files_handled = [] - for index, repository_tools_tup in enumerate( repository_tools_tups ): - tup_path, repository_tool = repository_tools_tup - params_with_missing_index_file = repository_tool.params_with_missing_index_file - for param in params_with_missing_index_file: - options = param.options - missing_head, missing_tail = os.path.split( options.missing_index_file ) - if missing_tail not in missing_files_handled: - # The repository must contain the required xxx.loc.sample file. - for sample_file in sample_files: - sample_head, sample_tail = os.path.split( sample_file ) - if sample_tail == '%s.sample' % missing_tail: - copy_sample_loc_file( trans, sample_file ) - if options.tool_data_table and options.tool_data_table.missing_index_file: - options.tool_data_table.handle_found_index_file( options.missing_index_file ) - missing_files_handled.append( missing_tail ) - break - # Reload the tool into the local list of repository_tools_tups. - repository_tool = trans.app.toolbox.load_tool( os.path.join( tool_path, tup_path ) ) - repository_tools_tups[ index ] = ( tup_path, repository_tool ) - return repository_tools_tups - def __handle_tool_dependencies( self, current_working_dir, repo_files_dir, repository_tools_tups ): - # Inspect each tool to see if it includes a "requirement" that refers to a fabric - # script. For those that do, execute the fabric script to install tool dependencies. - for index, repository_tools_tup in enumerate( repository_tools_tups ): - tup_path, repository_tool = repository_tools_tup - for requirement in repository_tool.requirements: - if requirement.type == 'fabfile': - log.debug( 'Executing fabric script to install dependencies for tool "%s"...' % repository_tool.name ) - fabfile = requirement.fabfile - method = requirement.method - # Find the relative path to the fabfile. - relative_fabfile_path = None - for root, dirs, files in os.walk( repo_files_dir ): - for name in files: - if name == fabfile: - relative_fabfile_path = os.path.join( root, name ) - break - if relative_fabfile_path: - # cmd will look something like: fab -f fabfile.py install_bowtie - cmd = 'fab -f %s %s' % ( relative_fabfile_path, method ) - tmp_name = tempfile.NamedTemporaryFile().name - tmp_stderr = open( tmp_name, 'wb' ) - os.chdir( repo_files_dir ) - proc = subprocess.Popen( cmd, shell=True, stderr=tmp_stderr.fileno() ) - returncode = proc.wait() - os.chdir( current_working_dir ) - tmp_stderr.close() - if returncode != 0: - # TODO: do something more here than logging the problem. - tmp_stderr = open( tmp_name, 'rb' ) - error = tmp_stderr.read() - tmp_stderr.close() - log.debug( 'Problem installing dependencies for tool "%s"\n%s' % ( repository_tool.name, error ) ) def __load_datatypes( self, trans, datatypes_config, relative_intall_dir ): imported_module = None # Parse datatypes_config. @@ -708,84 +487,11 @@ except Exception, e: log.debug( "Exception importing datatypes code file included in installed repository: %s" % str( e ) ) trans.app.datatypes_registry.load_datatypes( root_dir=trans.app.config.root, config=datatypes_config, imported_module=imported_module ) - def __create_or_undelete_tool_shed_repository( self, trans, name, description, changeset_revision, repository_clone_url, metadata_dict ): - tmp_url = self.__clean_repository_clone_url( repository_clone_url ) - tool_shed = tmp_url.split( 'repos' )[ 0 ].rstrip( '/' ) - owner = self.__get_repository_owner( tmp_url ) - includes_datatypes = 'datatypes_config' in metadata_dict - flush_needed = False - tool_shed_repository = get_repository_by_shed_name_owner_changeset_revision( trans, tool_shed, name, owner, changeset_revision ) - if tool_shed_repository: - if tool_shed_repository.deleted: - tool_shed_repository.deleted = False - # Reset includes_datatypes in case metadata changed since last installed. - tool_shed_repository.includes_datatypes = includes_datatypes - flush_needed = True - else: - tool_shed_repository = trans.model.ToolShedRepository( tool_shed=tool_shed, - name=name, - description=description, - owner=owner, - changeset_revision=changeset_revision, - metadata=metadata_dict, - includes_datatypes=includes_datatypes ) - flush_needed = True - if flush_needed: - trans.sa_session.add( tool_shed_repository ) - trans.sa_session.flush() - def __add_shed_tool_conf_entry( self, trans, shed_tool_conf, new_tool_section ): - # Add an entry in the shed_tool_conf file. An entry looks something like: - # <section name="Filter and Sort" id="filter"> - # <tool file="filter/filtering.xml" guid="toolshed.g2.bx.psu.edu/repos/test/filter/1.0.2"/> - # </section> - # Make a backup of the hgweb.config file since we're going to be changing it. - if not os.path.exists( shed_tool_conf ): - output = open( shed_tool_conf, 'w' ) - output.write( '<?xml version="1.0"?>\n' ) - output.write( '<toolbox tool_path="%s">\n' % tool_path ) - output.write( '</toolbox>\n' ) - output.close() - self.__make_shed_tool_conf_copy( trans, shed_tool_conf ) - tmp_fd, tmp_fname = tempfile.mkstemp() - new_shed_tool_conf = open( tmp_fname, 'wb' ) - for i, line in enumerate( open( shed_tool_conf ) ): - if line.startswith( '</toolbox>' ): - # We're at the end of the original config file, so add our entry. - new_shed_tool_conf.write( new_tool_section ) - new_shed_tool_conf.write( line ) - else: - new_shed_tool_conf.write( line ) - new_shed_tool_conf.close() - shutil.move( tmp_fname, os.path.abspath( shed_tool_conf ) ) - def __make_shed_tool_conf_copy( self, trans, shed_tool_conf ): - # Make a backup of the shed_tool_conf file. - today = date.today() - backup_date = today.strftime( "%Y_%m_%d" ) - shed_tool_conf_copy = '%s/%s_%s_backup' % ( trans.app.config.root, shed_tool_conf, backup_date ) - shutil.copy( os.path.abspath( shed_tool_conf ), os.path.abspath( shed_tool_conf_copy ) ) def __clean_tool_shed_url( self, tool_shed_url ): if tool_shed_url.find( ':' ) > 0: # Eliminate the port, if any, since it will result in an invalid directory name. return tool_shed_url.split( ':' )[ 0 ] return tool_shed_url.rstrip( '/' ) - def __clean_repository_clone_url( self, repository_clone_url ): - if repository_clone_url.find( '@' ) > 0: - # We have an url that includes an authenticated user, something like: - # http://test@bx.psu.edu:9009/repos/some_username/column - items = repository_clone_url.split( '@' ) - tmp_url = items[ 1 ] - elif repository_clone_url.find( '//' ) > 0: - # We have an url that includes only a protocol, something like: - # http://bx.psu.edu:9009/repos/some_username/column - items = repository_clone_url.split( '//' ) - tmp_url = items[ 1 ] - else: - tmp_url = repository_clone_url - return tmp_url - def __get_repository_owner( self, cleaned_repository_url ): - items = cleaned_repository_url.split( 'repos' ) - repo_path = items[ 1 ] - return repo_path.lstrip( '/' ).split( '/' )[ 0 ] def __generate_tool_path( self, repository_clone_url, changeset_revision ): """ Generate a tool path that guarantees repositories with the same name will always be installed @@ -793,7 +499,7 @@ <tool shed url>/repos/<repository owner>/<repository name>/<changeset revision> http://test@bx.psu.edu:9009/repos/test/filter """ - tmp_url = self.__clean_repository_clone_url( repository_clone_url ) + tmp_url = clean_repository_clone_url( repository_clone_url ) # Now tmp_url is something like: bx.psu.edu:9009/repos/some_username/column items = tmp_url.split( 'repos' ) tool_shed_url = items[ 0 ] @@ -804,36 +510,6 @@ """Generate the URL for cloning a repository.""" tool_shed_url = get_url_from_repository_tool_shed( trans, repository ) return '%s/repos/%s/%s' % ( tool_shed_url, repository.owner, repository.name ) - def __generate_tool_guid( self, repository_clone_url, tool ): - """ - Generate a guid for the installed tool. It is critical that this guid matches the guid for - the tool in the Galaxy tool shed from which it is being installed. The form of the guid is - <tool shed host>/repos/<repository owner>/<repository name>/<tool id>/<tool version> - """ - tmp_url = self.__clean_repository_clone_url( repository_clone_url ) - return '%s/%s/%s' % ( tmp_url, tool.id, tool.version ) - def __generate_tool_panel_section( self, repository_name, repository_clone_url, changeset_revision, tool_section, repository_tools_tups ): - """ - Write an in-memory tool panel section so we can load it into the tool panel and then - append it to the appropriate shed tool config. - TODO: re-write using ElementTree. - """ - tmp_url = self.__clean_repository_clone_url( repository_clone_url ) - section_str = '' - section_str += ' <section name="%s" id="%s">\n' % ( tool_section.name, tool_section.id ) - for repository_tool_tup in repository_tools_tups: - tool_file_path, tool = repository_tool_tup - guid = self.__generate_tool_guid( repository_clone_url, tool ) - section_str += ' <tool file="%s" guid="%s">\n' % ( tool_file_path, guid ) - section_str += ' <tool_shed>%s</tool_shed>\n' % tmp_url.split( 'repos' )[ 0 ].rstrip( '/' ) - section_str += ' <repository_name>%s</repository_name>\n' % repository_name - section_str += ' <repository_owner>%s</repository_owner>\n' % self.__get_repository_owner( tmp_url ) - section_str += ' <changeset_revision>%s</changeset_revision>\n' % changeset_revision - section_str += ' <id>%s</id>\n' % tool.id - section_str += ' <version>%s</version>\n' % tool.version - section_str += ' </tool>\n' - section_str += ' </section>\n' - return section_str ## ---- Utility methods ------------------------------------------------------- @@ -841,7 +517,11 @@ """Build a SelectField whose options are the keys in trans.app.toolbox.shed_tool_confs.""" options = [] for shed_tool_conf_filename, tool_path in trans.app.toolbox.shed_tool_confs.items(): - options.append( ( shed_tool_conf_filename.lstrip( './' ), shed_tool_conf_filename ) ) + if shed_tool_conf_filename.startswith( './' ): + option_label = shed_tool_conf_filename.replace( './', '', 1 ) + else: + option_label = shed_tool_conf_filename + options.append( ( option_label, shed_tool_conf_filename ) ) select_field = SelectField( name='shed_tool_conf' ) for option_tup in options: select_field.add_option( option_tup[0], option_tup[1] ) @@ -865,15 +545,17 @@ trans.model.ToolShedRepository.table.c.owner == owner, trans.model.ToolShedRepository.table.c.changeset_revision == changeset_revision ) ) \ .first() -def get_repository_by_shed_name_owner_changeset_revision( trans, tool_shed, name, owner, changeset_revision ): +def get_repository_by_shed_name_owner_changeset_revision( app, tool_shed, name, owner, changeset_revision ): + # This method is used by the InstallManager, which does not have access to trans. + sa_session = app.model.context.current if tool_shed.find( '//' ) > 0: tool_shed = tool_shed.split( '//' )[1] - return trans.sa_session.query( trans.model.ToolShedRepository ) \ - .filter( and_( trans.model.ToolShedRepository.table.c.tool_shed == tool_shed, - trans.model.ToolShedRepository.table.c.name == name, - trans.model.ToolShedRepository.table.c.owner == owner, - trans.model.ToolShedRepository.table.c.changeset_revision == changeset_revision ) ) \ - .first() + return sa_session.query( app.model.ToolShedRepository ) \ + .filter( and_( app.model.ToolShedRepository.table.c.tool_shed == tool_shed, + app.model.ToolShedRepository.table.c.name == name, + app.model.ToolShedRepository.table.c.owner == owner, + app.model.ToolShedRepository.table.c.changeset_revision == changeset_revision ) ) \ + .first() def get_url_from_repository_tool_shed( trans, repository ): # The stored value of repository.tool_shed is something like: # toolshed.g2.bx.psu.edu @@ -887,3 +569,347 @@ # The tool shed from which the repository was originally # installed must no longer be configured in tool_sheds_conf.xml. return None +def generate_tool_panel_section( repository_name, repository_clone_url, changeset_revision, tool_section, repository_tools_tups, owner='' ): + """ + Write an in-memory tool panel section so we can load it into the tool panel and then + append it to the appropriate shed tool config. + TODO: re-write using ElementTree. + """ + tmp_url = clean_repository_clone_url( repository_clone_url ) + if not owner: + owner = get_repository_owner( tmp_url ) + section_str = '' + section_str += ' <section name="%s" id="%s">\n' % ( tool_section.name, tool_section.id ) + for repository_tool_tup in repository_tools_tups: + tool_file_path, guid, tool = repository_tool_tup + section_str += ' <tool file="%s" guid="%s">\n' % ( tool_file_path, guid ) + section_str += ' <tool_shed>%s</tool_shed>\n' % tmp_url.split( 'repos' )[ 0 ].rstrip( '/' ) + section_str += ' <repository_name>%s</repository_name>\n' % repository_name + section_str += ' <repository_owner>%s</repository_owner>\n' % owner + section_str += ' <changeset_revision>%s</changeset_revision>\n' % changeset_revision + section_str += ' <id>%s</id>\n' % tool.id + section_str += ' <version>%s</version>\n' % tool.version + section_str += ' </tool>\n' + section_str += ' </section>\n' + return section_str +def get_repository_owner( cleaned_repository_url ): + items = cleaned_repository_url.split( 'repos' ) + repo_path = items[ 1 ] + if repo_path.startswith( '/' ): + repo_path = repo_path.replace( '/', '', 1 ) + return repo_path.lstrip( '/' ).split( '/' )[ 0 ] +def generate_tool_guid( repository_clone_url, tool ): + """ + Generate a guid for the installed tool. It is critical that this guid matches the guid for + the tool in the Galaxy tool shed from which it is being installed. The form of the guid is + <tool shed host>/repos/<repository owner>/<repository name>/<tool id>/<tool version> + """ + tmp_url = clean_repository_clone_url( repository_clone_url ) + return '%s/%s/%s' % ( tmp_url, tool.id, tool.version ) +def clean_repository_clone_url( repository_clone_url ): + if repository_clone_url.find( '@' ) > 0: + # We have an url that includes an authenticated user, something like: + # http://test@bx.psu.edu:9009/repos/some_username/column + items = repository_clone_url.split( '@' ) + tmp_url = items[ 1 ] + elif repository_clone_url.find( '//' ) > 0: + # We have an url that includes only a protocol, something like: + # http://bx.psu.edu:9009/repos/some_username/column + items = repository_clone_url.split( '//' ) + tmp_url = items[ 1 ] + else: + tmp_url = repository_clone_url + return tmp_url +def generate_metadata( toolbox, relative_install_dir, repository_clone_url ): + """ + Browse the repository files on disk to generate metadata. Since we are using disk files, it + is imperative that the repository is updated to the desired change set revision before metadata + is generated. This method is used by the InstallManager, which does not have access to trans. + """ + metadata_dict = {} + sample_files = [] + datatypes_config = None + # Find datatypes_conf.xml if it exists. + for root, dirs, files in os.walk( relative_install_dir ): + if root.find( '.hg' ) < 0: + for name in files: + if name == 'datatypes_conf.xml': + relative_path = os.path.join( root, name ) + datatypes_config = os.path.abspath( relative_path ) + break + if datatypes_config: + metadata_dict[ 'datatypes_config' ] = relative_path + metadata_dict = generate_datatypes_metadata( datatypes_config, metadata_dict ) + # Find all special .sample files. + for root, dirs, files in os.walk( relative_install_dir ): + if root.find( '.hg' ) < 0: + for name in files: + if name.endswith( '.sample' ): + sample_files.append( os.path.join( root, name ) ) + if sample_files: + metadata_dict[ 'sample_files' ] = sample_files + # Find all tool configs and exported workflows. + for root, dirs, files in os.walk( relative_install_dir ): + if root.find( '.hg' ) < 0 and root.find( 'hgrc' ) < 0: + if '.hg' in dirs: + dirs.remove( '.hg' ) + for name in files: + # Find all tool configs. + if name != 'datatypes_conf.xml' and name.endswith( '.xml' ): + full_path = os.path.abspath( os.path.join( root, name ) ) + try: + tool = toolbox.load_tool( full_path ) + except Exception, e: + tool = None + if tool is not None: + tool_config = os.path.join( root, name ) + metadata_dict = generate_tool_metadata( tool_config, tool, repository_clone_url, metadata_dict ) + # Find all exported workflows + elif name.endswith( '.ga' ): + relative_path = os.path.join( root, name ) + fp = open( relative_path, 'rb' ) + workflow_text = fp.read() + fp.close() + exported_workflow_dict = from_json_string( workflow_text ) + if 'a_galaxy_workflow' in exported_workflow_dict and exported_workflow_dict[ 'a_galaxy_workflow' ] == 'true': + metadata_dict = generate_workflow_metadata( relative_path, exported_workflow_dict, metadata_dict ) + return metadata_dict +def generate_datatypes_metadata( datatypes_config, metadata_dict ): + """ + Update the received metadata_dict with changes that have been applied + to the received datatypes_config. This method is used by the InstallManager, + which does not have access to trans. + """ + # Parse datatypes_config. + tree = ElementTree.parse( datatypes_config ) + root = tree.getroot() + ElementInclude.include( root ) + repository_datatype_code_files = [] + datatype_files = root.find( 'datatype_files' ) + if datatype_files: + for elem in datatype_files.findall( 'datatype_file' ): + name = elem.get( 'name', None ) + repository_datatype_code_files.append( name ) + metadata_dict[ 'datatype_files' ] = repository_datatype_code_files + datatypes = [] + registration = root.find( 'registration' ) + if registration: + for elem in registration.findall( 'datatype' ): + extension = elem.get( 'extension', None ) + dtype = elem.get( 'type', None ) + mimetype = elem.get( 'mimetype', None ) + datatypes.append( dict( extension=extension, + dtype=dtype, + mimetype=mimetype ) ) + metadata_dict[ 'datatypes' ] = datatypes + return metadata_dict +def generate_tool_metadata( tool_config, tool, repository_clone_url, metadata_dict ): + """ + Update the received metadata_dict with changes that have been + applied to the received tool. This method is used by the InstallManager, + which does not have access to trans. + """ + # Generate the guid + guid = generate_tool_guid( repository_clone_url, tool ) + # Handle tool.requirements. + tool_requirements = [] + for tr in tool.requirements: + name=tr.name + type=tr.type + if type == 'fabfile': + version = None + fabfile = tr.fabfile + method = tr.method + else: + version = tr.version + fabfile = None + method = None + requirement_dict = dict( name=name, + type=type, + version=version, + fabfile=fabfile, + method=method ) + tool_requirements.append( requirement_dict ) + # Handle tool.tests. + tool_tests = [] + if tool.tests: + for ttb in tool.tests: + test_dict = dict( name=ttb.name, + required_files=ttb.required_files, + inputs=ttb.inputs, + outputs=ttb.outputs ) + tool_tests.append( test_dict ) + tool_dict = dict( id=tool.id, + guid=guid, + name=tool.name, + version=tool.version, + description=tool.description, + version_string_cmd = tool.version_string_cmd, + tool_config=tool_config, + requirements=tool_requirements, + tests=tool_tests ) + if 'tools' in metadata_dict: + metadata_dict[ 'tools' ].append( tool_dict ) + else: + metadata_dict[ 'tools' ] = [ tool_dict ] + return metadata_dict +def generate_workflow_metadata( relative_path, exported_workflow_dict, metadata_dict ): + """ + Update the received metadata_dict with changes that have been applied + to the received exported_workflow_dict. Store everything in the database. + This method is used by the InstallManager, which does not have access to trans. + """ + if 'workflows' in metadata_dict: + metadata_dict[ 'workflows' ].append( ( relative_path, exported_workflow_dict ) ) + else: + metadata_dict[ 'workflows' ] = [ ( relative_path, exported_workflow_dict ) ] + return metadata_dict +def handle_missing_data_table_entry( app, tool_path, sample_files, repository_tools_tups ): + """ + Inspect each tool to see if any have input parameters that are dynamically + generated select lists that require entries in the tool_data_table_conf.xml file. + This method is used by the InstallManager, which does not have access to trans. + """ + missing_data_table_entry = False + for index, repository_tools_tup in enumerate( repository_tools_tups ): + tup_path, guid, repository_tool = repository_tools_tup + if repository_tool.params_with_missing_data_table_entry: + missing_data_table_entry = True + break + if missing_data_table_entry: + # The repository must contain a tool_data_table_conf.xml.sample file that includes + # all required entries for all tools in the repository. + for sample_file in sample_files: + head, tail = os.path.split( sample_file ) + if tail == 'tool_data_table_conf.xml.sample': + break + error, correction_msg = handle_sample_tool_data_table_conf_file( app, sample_file ) + if error: + # TODO: Do more here than logging an exception. + log.debug( exception_msg ) + # Reload the tool into the local list of repository_tools_tups. + repository_tool = app.toolbox.load_tool( os.path.join( tool_path, tup_path ) ) + repository_tools_tups[ index ] = ( tup_path, repository_tool ) + return repository_tools_tups +def handle_missing_index_file( app, tool_path, sample_files, repository_tools_tups ): + """ + Inspect each tool to see if it has any input parameters that + are dynamically generated select lists that depend on a .loc file. + This method is used by the InstallManager, which does not have access to trans. + """ + missing_files_handled = [] + for index, repository_tools_tup in enumerate( repository_tools_tups ): + tup_path, guid, repository_tool = repository_tools_tup + params_with_missing_index_file = repository_tool.params_with_missing_index_file + for param in params_with_missing_index_file: + options = param.options + missing_head, missing_tail = os.path.split( options.missing_index_file ) + if missing_tail not in missing_files_handled: + # The repository must contain the required xxx.loc.sample file. + for sample_file in sample_files: + sample_head, sample_tail = os.path.split( sample_file ) + if sample_tail == '%s.sample' % missing_tail: + copy_sample_loc_file( app, sample_file ) + if options.tool_data_table and options.tool_data_table.missing_index_file: + options.tool_data_table.handle_found_index_file( options.missing_index_file ) + missing_files_handled.append( missing_tail ) + break + # Reload the tool into the local list of repository_tools_tups. + repository_tool = app.toolbox.load_tool( os.path.join( tool_path, tup_path ) ) + repository_tools_tups[ index ] = ( tup_path, guid, repository_tool ) + return repository_tools_tups +def handle_tool_dependencies( current_working_dir, repo_files_dir, repository_tools_tups ): + """ + Inspect each tool to see if it includes a "requirement" that refers to a fabric + script. For those that do, execute the fabric script to install tool dependencies. + This method is used by the InstallManager, which does not have access to trans. + """ + for index, repository_tools_tup in enumerate( repository_tools_tups ): + tup_path, guid, repository_tool = repository_tools_tup + for requirement in repository_tool.requirements: + if requirement.type == 'fabfile': + log.debug( 'Executing fabric script to install dependencies for tool "%s"...' % repository_tool.name ) + fabfile = requirement.fabfile + method = requirement.method + # Find the relative path to the fabfile. + relative_fabfile_path = None + for root, dirs, files in os.walk( repo_files_dir ): + for name in files: + if name == fabfile: + relative_fabfile_path = os.path.join( root, name ) + break + if relative_fabfile_path: + # cmd will look something like: fab -f fabfile.py install_bowtie + cmd = 'fab -f %s %s' % ( relative_fabfile_path, method ) + tmp_name = tempfile.NamedTemporaryFile().name + tmp_stderr = open( tmp_name, 'wb' ) + os.chdir( repo_files_dir ) + proc = subprocess.Popen( cmd, shell=True, stderr=tmp_stderr.fileno() ) + returncode = proc.wait() + os.chdir( current_working_dir ) + tmp_stderr.close() + if returncode != 0: + # TODO: do something more here than logging the problem. + tmp_stderr = open( tmp_name, 'rb' ) + error = tmp_stderr.read() + tmp_stderr.close() + log.debug( 'Problem installing dependencies for tool "%s"\n%s' % ( repository_tool.name, error ) ) +def add_shed_tool_conf_entry( app, shed_tool_conf, new_tool_section ): + """ + Add an entry in the shed_tool_conf file. An entry looks something like: + <section name="Filter and Sort" id="filter"> + <tool file="filter/filtering.xml" guid="toolshed.g2.bx.psu.edu/repos/test/filter/1.0.2"/> + </section> + This method is used by the InstallManager, which does not have access to trans. + """ + # Make a backup of the hgweb.config file since we're going to be changing it. + if not os.path.exists( shed_tool_conf ): + output = open( shed_tool_conf, 'w' ) + output.write( '<?xml version="1.0"?>\n' ) + output.write( '<toolbox tool_path="%s">\n' % tool_path ) + output.write( '</toolbox>\n' ) + output.close() + # Make a backup of the shed_tool_conf file. + today = date.today() + backup_date = today.strftime( "%Y_%m_%d" ) + shed_tool_conf_copy = '%s/%s_%s_backup' % ( app.config.root, shed_tool_conf, backup_date ) + shutil.copy( os.path.abspath( shed_tool_conf ), os.path.abspath( shed_tool_conf_copy ) ) + tmp_fd, tmp_fname = tempfile.mkstemp() + new_shed_tool_conf = open( tmp_fname, 'wb' ) + for i, line in enumerate( open( shed_tool_conf ) ): + if line.startswith( '</toolbox>' ): + # We're at the end of the original config file, so add our entry. + new_shed_tool_conf.write( new_tool_section ) + new_shed_tool_conf.write( line ) + else: + new_shed_tool_conf.write( line ) + new_shed_tool_conf.close() + shutil.move( tmp_fname, os.path.abspath( shed_tool_conf ) ) +def create_or_undelete_tool_shed_repository( app, name, description, changeset_revision, repository_clone_url, metadata_dict, owner='' ): + # This method is used by the InstallManager, which does not have access to trans. + sa_session = app.model.context.current + tmp_url = clean_repository_clone_url( repository_clone_url ) + tool_shed = tmp_url.split( 'repos' )[ 0 ].rstrip( '/' ) + if not owner: + owner = get_repository_owner( tmp_url ) + includes_datatypes = 'datatypes_config' in metadata_dict + flush_needed = False + tool_shed_repository = get_repository_by_shed_name_owner_changeset_revision( app, tool_shed, name, owner, changeset_revision ) + if tool_shed_repository: + if tool_shed_repository.deleted: + tool_shed_repository.deleted = False + # Reset includes_datatypes in case metadata changed since last installed. + tool_shed_repository.includes_datatypes = includes_datatypes + flush_needed = True + else: + tool_shed_repository = app.model.ToolShedRepository( tool_shed=tool_shed, + name=name, + description=description, + owner=owner, + changeset_revision=changeset_revision, + metadata=metadata_dict, + includes_datatypes=includes_datatypes ) + flush_needed = True + if flush_needed: + sa_session.add( tool_shed_repository ) + sa_session.flush() diff -r af24105e47a9966566ad92833f0990805da69ac3 -r 05b62675898dcca81358692352dfcc9cee75a7a2 lib/galaxy/webapps/community/controllers/common.py --- a/lib/galaxy/webapps/community/controllers/common.py +++ b/lib/galaxy/webapps/community/controllers/common.py @@ -186,7 +186,7 @@ head, tail = os.path.split( sample_file ) if tail == 'tool_data_table_conf.xml.sample': sample_found = True - error, correction_msg = handle_sample_tool_data_table_conf_file( trans, sample_file ) + error, correction_msg = handle_sample_tool_data_table_conf_file( trans.app, sample_file ) if error: can_set_metadata = False invalid_files.append( ( tail, correction_msg ) ) @@ -207,7 +207,7 @@ for sample_file in sample_files: sample_head, sample_tail = os.path.split( sample_file ) if sample_tail == '%s.sample' % index_tail: - copy_sample_loc_file( trans, sample_file ) + copy_sample_loc_file( trans.app, sample_file ) options.index_file = index_tail options.missing_index_file = None if options.tool_data_table: diff -r af24105e47a9966566ad92833f0990805da69ac3 -r 05b62675898dcca81358692352dfcc9cee75a7a2 lib/galaxy/webapps/community/controllers/upload.py --- a/lib/galaxy/webapps/community/controllers/upload.py +++ b/lib/galaxy/webapps/community/controllers/upload.py @@ -129,13 +129,13 @@ # file is being uploaded by parsing the file and adding new entries # to the in-memory trans.app.tool_data_tables dictionary as well as # appending them to the shed's tool_data_table_conf.xml file on disk. - error, error_message = handle_sample_tool_data_table_conf_file( trans, full_path ) + error, error_message = handle_sample_tool_data_table_conf_file( trans.app, full_path ) if error: message = '%s<br/>%s' % ( message, error_message ) if full_path.endswith( '.loc.sample' ): # Handle the special case where a xxx.loc.sample file is # being uploaded by copying it to ~/tool-data/xxx.loc. - copy_sample_loc_file( trans, full_path ) + copy_sample_loc_file( trans.app, full_path ) # See if the content of the change set was valid. admin_only = len( repository.downloadable_revisions ) != 1 handle_email_alerts( trans, repository, content_alert_str=content_alert_str, new_repo_alert=new_repo_alert, admin_only=admin_only ) @@ -267,7 +267,7 @@ if filename_in_archive.endswith( '.loc.sample' ): # Handle the special case where a xxx.loc.sample file is # being uploaded by copying it to ~/tool-data/xxx.loc. - copy_sample_loc_file( trans, filename_in_archive ) + copy_sample_loc_file( trans.app, filename_in_archive ) try: commands.commit( repo.ui, repo, full_path, user=trans.user.username, message=commit_message ) except Exception, e: diff -r af24105e47a9966566ad92833f0990805da69ac3 -r 05b62675898dcca81358692352dfcc9cee75a7a2 templates/user/info.mako --- a/templates/user/info.mako +++ b/templates/user/info.mako @@ -22,12 +22,14 @@ <div class="form-row"><label>Public name:</label> %if webapp == 'community': - ${username} %if user.active_repositories: + <input type="hidden" name="username" value="${username}"/> + ${username} <div class="toolParamHelp" style="clear: both;"> You cannot change your public name after you have created a repository in this tool shed. </div> %else: + <input type="text" name="username" size="40" value="${username}"/><div class="toolParamHelp" style="clear: both;"> Your public name provides a means of identifying you publicly within this tool shed. Public names must be at least four characters in length and contain only lower-case letters, numbers, diff -r af24105e47a9966566ad92833f0990805da69ac3 -r 05b62675898dcca81358692352dfcc9cee75a7a2 tool_shed_install.xml.sample --- /dev/null +++ b/tool_shed_install.xml.sample @@ -0,0 +1,114 @@ +<?xml version="1.0"?> +<toolshed name="toolshed.g2.bx.psu.edu"> + <section name="EMBOSS" id="EMBOSSLite"> + <repository name="emboss_5" description="Galaxy wrappers for EMBOSS version 5 tools" changeset_revision="PlaceHereWhenKnown"> + <tool file="emboss_5/emboss_antigenic.xml" id="EMBOSS: antigenic1" version="5.0.0" /> + <tool file="emboss_5/emboss_backtranseq.xml" id="EMBOSS: backtranseq2" version="5.0.0" /> + <tool file="emboss_5/emboss_banana.xml" id="EMBOSS: banana3" version="5.0.0" /> + <tool file="emboss_5/emboss_biosed.xml" id="EMBOSS: biosed4" version="5.0.0" /> + <tool file="emboss_5/emboss_btwisted.xml" id="EMBOSS: btwisted5" version="5.0.0" /> + <tool file="emboss_5/emboss_cai_custom.xml" id="EMBOSS: cai_custom6" version="5.0.0" /> + <tool file="emboss_5/emboss_cai.xml" id="EMBOSS: cai6" version="5.0.0" /> + <tool file="emboss_5/emboss_chaos.xml" id="EMBOSS: chaos7" version="5.0.0" /> + <tool file="emboss_5/emboss_charge.xml" id="EMBOSS: charge8" version="5.0.0" /> + <tool file="emboss_5/emboss_checktrans.xml" id="EMBOSS: checktrans9" version="5.0.0" /> + <tool file="emboss_5/emboss_chips.xml" id="EMBOSS: chips10" version="5.0.0" /> + <tool file="emboss_5/emboss_cirdna.xml" id="EMBOSS: cirdna11" version="5.0.0" /> + <tool file="emboss_5/emboss_codcmp.xml" id="EMBOSS: codcmp12" version="5.0.0" /> + <tool file="emboss_5/emboss_coderet.xml" id="EMBOSS: coderet13" version="5.0.0" /> + <tool file="emboss_5/emboss_compseq.xml" id="EMBOSS: compseq14" version="5.0.0" /> + <tool file="emboss_5/emboss_cpgplot.xml" id="EMBOSS: cpgplot15" version="5.0.0" /> + <tool file="emboss_5/emboss_cpgreport.xml" id="EMBOSS: cpgreport16" version="5.0.0" /> + <tool file="emboss_5/emboss_cusp.xml" id="EMBOSS: cusp17" version="5.0.0" /> + <tool file="emboss_5/emboss_cutseq.xml" id="EMBOSS: cutseq18" version="5.0.0" /> + <tool file="emboss_5/emboss_dan.xml" id="EMBOSS: dan19" version="5.0.0" /> + <tool file="emboss_5/emboss_degapseq.xml" id="EMBOSS: degapseq20" version="5.0.0" /> + <tool file="emboss_5/emboss_descseq.xml" id="EMBOSS: descseq21" version="5.0.0" /> + <tool file="emboss_5/emboss_diffseq.xml" id="EMBOSS: diffseq22" version="5.0.0" /> + <tool file="emboss_5/emboss_digest.xml" id="EMBOSS: digest23" version="5.0.0" /> + <tool file="emboss_5/emboss_dotmatcher.xml" id="EMBOSS: dotmatcher24" version="5.0.0" /> + <tool file="emboss_5/emboss_dotpath.xml" id="EMBOSS: dotpath25" version="5.0.0" /> + <tool file="emboss_5/emboss_dottup.xml" id="EMBOSS: dottup26" version="5.0.0" /> + <tool file="emboss_5/emboss_dreg.xml" id="EMBOSS: dreg27" version="5.0.0" /> + <tool file="emboss_5/emboss_einverted.xml" id="EMBOSS: einverted28" version="5.0.0" /> + <tool file="emboss_5/emboss_epestfind.xml" id="EMBOSS: epestfind29" version="5.0.0" /> + <tool file="emboss_5/emboss_equicktandem.xml" id="EMBOSS: equicktandem31" version="5.0.0" /> + <tool file="emboss_5/emboss_est2genome.xml" id="EMBOSS: est2genome32" version="5.0.0" /> + <tool file="emboss_5/emboss_etandem.xml" id="EMBOSS: etandem33" version="5.0.0" /> + <tool file="emboss_5/emboss_extractfeat.xml" id="EMBOSS: extractfeat34" version="5.0.0" /> + <tool file="emboss_5/emboss_extractseq.xml" id="EMBOSS: extractseq35" version="5.0.0" /> + <tool file="emboss_5/emboss_freak.xml" id="EMBOSS: freak36" version="5.0.0" /> + <tool file="emboss_5/emboss_fuzznuc.xml" id="EMBOSS: fuzznuc37" version="5.0.0" /> + <tool file="emboss_5/emboss_fuzzpro.xml" id="EMBOSS: fuzzpro38" version="5.0.0" /> + <tool file="emboss_5/emboss_fuzztran.xml" id="EMBOSS: fuzztran39" version="5.0.0" /> + <tool file="emboss_5/emboss_garnier.xml" id="EMBOSS: garnier40" version="5.0.0" /> + <tool file="emboss_5/emboss_geecee.xml" id="EMBOSS: geecee41" version="5.0.0" /> + <tool file="emboss_5/emboss_getorf.xml" id="EMBOSS: getorf42" version="5.0.0" /> + <tool file="emboss_5/emboss_helixturnhelix.xml" id="EMBOSS: helixturnhelix43" version="5.0.0" /> + <tool file="emboss_5/emboss_hmoment.xml" id="EMBOSS: hmoment44" version="5.0.0" /> + <tool file="emboss_5/emboss_iep.xml" id="EMBOSS: iep45" version="5.0.0" /> + <tool file="emboss_5/emboss_infoseq.xml" id="EMBOSS: infoseq46" version="5.0.0" /> + <tool file="emboss_5/emboss_isochore.xml" id="EMBOSS: isochore47" version="5.0.0" /> + <tool file="emboss_5/emboss_lindna.xml" id="EMBOSS: lindna48" version="5.0.0" /> + <tool file="emboss_5/emboss_marscan.xml" id="EMBOSS: marscan49" version="5.0.0" /> + <tool file="emboss_5/emboss_maskfeat.xml" id="EMBOSS: maskfeat50" version="5.0.0" /> + <tool file="emboss_5/emboss_maskseq.xml" id="EMBOSS: maskseq51" version="5.0.0" /> + <tool file="emboss_5/emboss_matcher.xml" id="EMBOSS: matcher52" version="5.0.0" /> + <tool file="emboss_5/emboss_megamerger.xml" id="EMBOSS: megamerger53" version="5.0.0" /> + <tool file="emboss_5/emboss_merger.xml" id="EMBOSS: merger54" version="5.0.0" /> + <tool file="emboss_5/emboss_msbar.xml" id="EMBOSS: msbar55" version="5.0.0" /> + <tool file="emboss_5/emboss_needle.xml" id="EMBOSS: needle56" version="5.0.0" /> + <tool file="emboss_5/emboss_newcpgreport.xml" id="EMBOSS: newcpgreport57" version="5.0.0" /> + <tool file="emboss_5/emboss_newcpgseek.xml" id="EMBOSS: newcpgseek58" version="5.0.0" /> + <tool file="emboss_5/emboss_newseq.xml" id="EMBOSS: newseq59" version="5.0.0" /> + <tool file="emboss_5/emboss_noreturn.xml" id="EMBOSS: noreturn60" version="5.0.0" /> + <tool file="emboss_5/emboss_notseq.xml" id="EMBOSS: notseq61" version="5.0.0" /> + <tool file="emboss_5/emboss_nthseq.xml" id="EMBOSS: nthseq62" version="5.0.0" /> + <tool file="emboss_5/emboss_octanol.xml" id="EMBOSS: octanol63" version="5.0.0" /> + <tool file="emboss_5/emboss_oddcomp.xml" id="EMBOSS: oddcomp64" version="5.0.0" /> + <tool file="emboss_5/emboss_palindrome.xml" id="EMBOSS: palindrome65" version="5.0.0" /> + <tool file="emboss_5/emboss_pasteseq.xml" id="EMBOSS: pasteseq66" version="5.0.0" /> + <tool file="emboss_5/emboss_patmatdb.xml" id="EMBOSS: patmatdb67" version="5.0.0" /> + <tool file="emboss_5/emboss_pepcoil.xml" id="EMBOSS: pepcoil68" version="5.0.0" /> + <tool file="emboss_5/emboss_pepinfo.xml" id="EMBOSS: pepinfo69" version="5.0.0" /> + <tool file="emboss_5/emboss_pepnet.xml" id="EMBOSS: pepnet70" version="5.0.0" /> + <tool file="emboss_5/emboss_pepstats.xml" id="EMBOSS: pepstats71" version="5.0.0" /> + <tool file="emboss_5/emboss_pepwheel.xml" id="EMBOSS: pepwheel72" version="5.0.0" /> + <tool file="emboss_5/emboss_pepwindow.xml" id="EMBOSS: pepwindow73" version="5.0.0" /> + <tool file="emboss_5/emboss_pepwindowall.xml" id="EMBOSS: pepwindowall74" version="5.0.0" /> + <tool file="emboss_5/emboss_plotcon.xml" id="EMBOSS: plotcon75" version="5.0.0" /> + <tool file="emboss_5/emboss_plotorf.xml" id="EMBOSS: plotorf76" version="5.0.0" /> + <tool file="emboss_5/emboss_polydot.xml" id="EMBOSS: polydot77" version="5.0.0" /> + <tool file="emboss_5/emboss_preg.xml" id="EMBOSS: preg78" version="5.0.0" /> + <tool file="emboss_5/emboss_prettyplot.xml" id="EMBOSS: prettyplot79" version="5.0.0" /> + <tool file="emboss_5/emboss_prettyseq.xml" id="EMBOSS: prettyseq80" version="5.0.0" /> + <tool file="emboss_5/emboss_primersearch.xml" id="EMBOSS: primersearch81" version="5.0.0" /> + <tool file="emboss_5/emboss_revseq.xml" id="EMBOSS: revseq82" version="5.0.0" /> + <tool file="emboss_5/emboss_seqmatchall.xml" id="EMBOSS: seqmatchall83" version="5.0.0" /> + <tool file="emboss_5/emboss_seqret.xml" id="EMBOSS: seqret84" version="5.0.0" /> + <tool file="emboss_5/emboss_showfeat.xml" id="EMBOSS: showfeat85" version="5.0.0" /> + <tool file="emboss_5/emboss_shuffleseq.xml" id="EMBOSS: shuffleseq87" version="5.0.0" /> + <tool file="emboss_5/emboss_sigcleave.xml" id="EMBOSS: sigcleave88" version="5.0.0" /> + <tool file="emboss_5/emboss_sirna.xml" id="EMBOSS: sirna89" version="5.0.0" /> + <tool file="emboss_5/emboss_sixpack.xml" id="EMBOSS: sixpack90" version="5.0.0" /> + <tool file="emboss_5/emboss_skipseq.xml" id="EMBOSS: skipseq91" version="5.0.0" /> + <tool file="emboss_5/emboss_splitter.xml" id="EMBOSS: splitter92" version="5.0.0" /> + <tool file="emboss_5/emboss_supermatcher.xml" id="EMBOSS: supermatcher95" version="5.0.0" /> + <tool file="emboss_5/emboss_syco.xml" id="EMBOSS: syco96" version="5.0.0" /> + <tool file="emboss_5/emboss_tcode.xml" id="EMBOSS: tcode97" version="5.0.0" /> + <tool file="emboss_5/emboss_textsearch.xml" id="EMBOSS: textsearch98" version="5.0.0" /> + <tool file="emboss_5/emboss_tmap.xml" id="EMBOSS: tmap99" version="5.0.0" /> + <tool file="emboss_5/emboss_tranalign.xml" id="EMBOSS: tranalign100" version="5.0.0" /> + <tool file="emboss_5/emboss_transeq.xml" id="EMBOSS: transeq101" version="5.0.0" /> + <tool file="emboss_5/emboss_trimest.xml" id="EMBOSS: trimest102" version="5.0.0" /> + <tool file="emboss_5/emboss_trimseq.xml" id="EMBOSS: trimseq103" version="5.0.0" /> + <tool file="emboss_5/emboss_twofeat.xml" id="EMBOSS: twofeat104" version="5.0.0" /> + <tool file="emboss_5/emboss_union.xml" id="EMBOSS: union105" version="5.0.0" /> + <tool file="emboss_5/emboss_vectorstrip.xml" id="EMBOSS: vectorstrip106" version="5.0.0" /> + <tool file="emboss_5/emboss_water.xml" id="EMBOSS: water107" version="5.0.0" /> + <tool file="emboss_5/emboss_wobble.xml" id="EMBOSS: wobble108" version="5.0.0" /> + <tool file="emboss_5/emboss_wordcount.xml" id="EMBOSS: wordcount109" version="5.0.0" /> + <tool file="emboss_5/emboss_wordmatch.xml" id="EMBOSS: wordmatch110" version="5.0.0" /> + </repository> + </section> +</toolshed> diff -r af24105e47a9966566ad92833f0990805da69ac3 -r 05b62675898dcca81358692352dfcc9cee75a7a2 universe_wsgi.ini.sample --- a/universe_wsgi.ini.sample +++ b/universe_wsgi.ini.sample @@ -131,6 +131,22 @@ # Other tool config files must include the tool_path as an attribute in the <toolbox> tag. #tool_path = tools +# Enable automatic installation of tools that used to be in the Galaxy +# distribution but are now in the main Galaxy tool shed. The tools +# that will be installed are configured in the config file named +# tool_shed_install.xml, which is located in the Galaxy install directory. +# Tools already installed will not be re-installed even if they are +# referenced in the tool_shed_install.xml file. +#enable_tool_shed_install = False +#tool_shed_install_config_file = tool_shed_install.xml + +# CRITICAL NOTE: the location in which the tools will be installed is the +# location pointed to by the "tool_path" attribute in the following file. +# The default location setting in shed_tool_conf.xml ("../shed_tools") may +# be problematic for some cluster environments, so make sure to change it +# if appropriate or use a different file name for the setting. +#install_tool_config_file = shed_tool_conf.xml + # Directory where data used by tools is located, see the samples in that # directory and the wiki for help: # http://wiki.g2.bx.psu.edu/Admin/Data%20Integration Repository URL: https://bitbucket.org/galaxy/galaxy-central/ -- This is a commit notification from bitbucket.org. You are receiving this because you have the service enabled, addressing the recipient of this email.
participants (1)
-
Bitbucket