commit/galaxy-central: greg: Move ~/lib/galaxy/tool_shed to ~/lib/tool_shed, and move the ~/lib/galaxy/util/shed_util componets to ~/lib/tool_shed/util.
1 new commit in galaxy-central: https://bitbucket.org/galaxy/galaxy-central/commits/6f0050c4e06e/ changeset: 6f0050c4e06e user: greg date: 2013-02-26 21:52:51 summary: Move ~/lib/galaxy/tool_shed to ~/lib/tool_shed, and move the ~/lib/galaxy/util/shed_util componets to ~/lib/tool_shed/util. affected #: 77 files diff -r e9e09d1ab51752db592eee71032ec5fc33f6ee57 -r 6f0050c4e06ef6f5fdce3f7326280c07c63ef62e lib/galaxy/app.py --- a/lib/galaxy/app.py +++ b/lib/galaxy/app.py @@ -3,8 +3,8 @@ from galaxy import config, jobs, util, tools, web import galaxy.tools.search import galaxy.tools.data -import galaxy.tool_shed -import galaxy.tool_shed.tool_shed_registry +import tool_shed.galaxy_install +import tool_shed.tool_shed_registry from galaxy.web import security import galaxy.model import galaxy.datatypes.registry @@ -41,7 +41,7 @@ db_url = "sqlite:///%s?isolation_level=IMMEDIATE" % self.config.database # Set up the tool sheds registry if os.path.isfile( self.config.tool_sheds_config ): - self.tool_shed_registry = galaxy.tool_shed.tool_shed_registry.Registry( self.config.root, self.config.tool_sheds_config ) + self.tool_shed_registry = tool_shed.tool_shed_registry.Registry( self.config.root, self.config.tool_sheds_config ) else: self.tool_shed_registry = None log.debug( 'self.config.tool_sheds_config: %s, self.tool_shed_registry: %s', @@ -51,7 +51,7 @@ from galaxy.model.migrate.check import create_or_verify_database create_or_verify_database( db_url, kwargs.get( 'global_conf', {} ).get( '__file__', None ), self.config.database_engine_options, app=self ) # Alert the Galaxy admin to tools that have been moved from the distribution to the tool shed. - from galaxy.tool_shed.migrate.check import verify_tools + from tool_shed.galaxy_install.migrate.check import verify_tools verify_tools( self, db_url, kwargs.get( 'global_conf', {} ).get( '__file__', None ), self.config.database_engine_options ) # Object store manager self.object_store = build_object_store_from_config(self.config) @@ -64,7 +64,7 @@ object_store = self.object_store, trace_logger=self.trace_logger ) # Manage installed tool shed repositories. - self.installed_repository_manager = galaxy.tool_shed.InstalledRepositoryManager( self ) + self.installed_repository_manager = tool_shed.galaxy_install.InstalledRepositoryManager( self ) # Create an empty datatypes registry. self.datatypes_registry = galaxy.datatypes.registry.Registry() # Load proprietary datatypes defined in datatypes_conf.xml files in all installed tool shed repositories. We @@ -105,7 +105,7 @@ self.data_managers = DataManagers( self ) # If enabled, poll respective tool sheds to see if updates are available for any installed tool shed repositories. if self.config.get_bool( 'enable_tool_shed_check', False ): - from tool_shed import update_manager + from tool_shed.galaxy_install import update_manager self.update_manager = update_manager.UpdateManager( self ) # Load proprietary datatype converters and display applications. self.installed_repository_manager.load_proprietary_converters_and_display_applications() diff -r e9e09d1ab51752db592eee71032ec5fc33f6ee57 -r 6f0050c4e06ef6f5fdce3f7326280c07c63ef62e lib/galaxy/model/__init__.py --- a/lib/galaxy/model/__init__.py +++ b/lib/galaxy/model/__init__.py @@ -3031,6 +3031,10 @@ pass class ToolShedRepository( object ): + api_collection_visible_keys = ( 'id', 'name', 'tool_shed', 'owner', 'installed_changeset_revision', 'changeset_revision', 'ctx_rev', 'includes_datatypes', + 'update_available', 'deleted', 'uninstalled', 'dist_to_shed', 'status', 'error_message' ) + api_element_visible_keys = ( 'id', 'name', 'tool_shed', 'owner', 'installed_changeset_revision', 'changeset_revision', 'ctx_rev', 'includes_datatypes', + 'update_available', 'deleted', 'uninstalled', 'dist_to_shed', 'status', 'error_message' ) installation_status = Bunch( NEW='New', CLONING='Cloning', SETTING_TOOL_VERSIONS='Setting tool versions', @@ -3066,6 +3070,10 @@ self.dist_to_shed = dist_to_shed self.status = status self.error_message = error_message + def as_dict( self, trans ): + tsr_dict = self.get_api_value( view='element' ) + tsr_dict[ 'id' ] = trans.security.encode_id( self.id ) + return tsr_dict def repo_files_directory( self, app ): repo_path = self.repo_path( app ) if repo_path: @@ -3153,6 +3161,22 @@ if self.shed_config_filename == shed_tool_conf_dict[ 'config_filename' ]: return shed_tool_conf_dict return default + def get_api_value( self, view='collection', value_mapper=None ): + if value_mapper is None: + value_mapper = {} + rval = {} + try: + visible_keys = self.__getattribute__( 'api_' + view + '_visible_keys' ) + except AttributeError: + raise Exception( 'Unknown API view: %s' % view ) + for key in visible_keys: + try: + rval[ key ] = self.__getattribute__( key ) + if key in value_mapper: + rval[ key ] = value_mapper.get( key )( rval[ key ] ) + except AttributeError: + rval[ key ] = None + return rval @property def can_install( self ): return self.status == self.installation_status.NEW diff -r e9e09d1ab51752db592eee71032ec5fc33f6ee57 -r 6f0050c4e06ef6f5fdce3f7326280c07c63ef62e lib/galaxy/model/migrate/versions/0113_update_migrate_tools_table.py --- /dev/null +++ b/lib/galaxy/model/migrate/versions/0113_update_migrate_tools_table.py @@ -0,0 +1,44 @@ +""" +Migration script to update the migrate_tools.repository_path column to point to the new location lib/tool_shed/galaxy_install/migrate. +""" + +from sqlalchemy import * +from sqlalchemy.orm import * +from migrate import * +from migrate.changeset import * + +import datetime +now = datetime.datetime.utcnow +# Need our custom types, but don't import anything else from model +from galaxy.model.custom_types import * + +import sys, logging +log = logging.getLogger( __name__ ) +log.setLevel(logging.DEBUG) +handler = logging.StreamHandler( sys.stdout ) +format = "%(name)s %(levelname)s %(asctime)s %(message)s" +formatter = logging.Formatter( format ) +handler.setFormatter( formatter ) +log.addHandler( handler ) + +metadata = MetaData( migrate_engine ) +db_session = scoped_session( sessionmaker( bind=migrate_engine, autoflush=False, autocommit=True ) ) + +def upgrade(): + print __doc__ + + metadata.reflect() + # Create the table. + try: + cmd = "UPDATE migrate_tools set repository_path='lib/galaxy/tool_shed/migrate';" + db_session.execute( cmd ) + except Exception, e: + log.debug( "Updating migrate_tools.repository_path column to point to the new location lib/tool_shed/galaxy_install/migrate failed: %s" % str( e ) ) + +def downgrade(): + metadata.reflect() + try: + cmd = "UPDATE migrate_tools set repository_path='lib/galaxy/tool_shed/migrate';" + db_session.execute( cmd ) + except Exception, e: + log.debug( "Updating migrate_tools.repository_path column to point to the old location lib/galaxy/tool_shed/migrate failed: %s" % str( e ) ) diff -r e9e09d1ab51752db592eee71032ec5fc33f6ee57 -r 6f0050c4e06ef6f5fdce3f7326280c07c63ef62e lib/galaxy/tool_shed/__init__.py --- a/lib/galaxy/tool_shed/__init__.py +++ /dev/null @@ -1,57 +0,0 @@ -""" -Classes encapsulating the management of repositories installed from Galaxy tool sheds. -""" -import os -import galaxy.util.shed_util -import galaxy.util.shed_util_common -from galaxy.model.orm import and_ - -from galaxy import eggs -import pkg_resources - -pkg_resources.require( 'elementtree' ) -from elementtree import ElementTree, ElementInclude - -class InstalledRepositoryManager( object ): - def __init__( self, app ): - self.app = app - self.model = self.app.model - self.sa_session = self.model.context.current - self.tool_configs = self.app.config.tool_configs - if self.app.config.migrated_tools_config not in self.tool_configs: - self.tool_configs.append( self.app.config.migrated_tools_config ) - self.installed_repository_dicts = [] - def get_repository_install_dir( self, tool_shed_repository ): - for tool_config in self.tool_configs: - tree = ElementTree.parse( tool_config ) - root = tree.getroot() - ElementInclude.include( root ) - tool_path = root.get( 'tool_path', None ) - if tool_path: - tool_shed = galaxy.util.shed_util_common.clean_tool_shed_url( tool_shed_repository.tool_shed ) - relative_path = os.path.join( tool_path, - tool_shed, - 'repos', - tool_shed_repository.owner, - tool_shed_repository.name, - tool_shed_repository.installed_changeset_revision ) - if os.path.exists( relative_path ): - return relative_path - return None - def load_proprietary_datatypes( self ): - for tool_shed_repository in self.sa_session.query( self.model.ToolShedRepository ) \ - .filter( and_( self.model.ToolShedRepository.table.c.includes_datatypes==True, - self.model.ToolShedRepository.table.c.deleted==False ) ) \ - .order_by( self.model.ToolShedRepository.table.c.id ): - relative_install_dir = self.get_repository_install_dir( tool_shed_repository ) - if relative_install_dir: - installed_repository_dict = galaxy.util.shed_util.load_installed_datatypes( self.app, tool_shed_repository, relative_install_dir ) - if installed_repository_dict: - self.installed_repository_dicts.append( installed_repository_dict ) - def load_proprietary_converters_and_display_applications( self, deactivate=False ): - for installed_repository_dict in self.installed_repository_dicts: - if installed_repository_dict[ 'converter_path' ]: - galaxy.util.shed_util.load_installed_datatype_converters( self.app, installed_repository_dict, deactivate=deactivate ) - if installed_repository_dict[ 'display_path' ]: - galaxy.util.shed_util.load_installed_display_applications( self.app, installed_repository_dict, deactivate=deactivate ) - diff -r e9e09d1ab51752db592eee71032ec5fc33f6ee57 -r 6f0050c4e06ef6f5fdce3f7326280c07c63ef62e lib/galaxy/tool_shed/common_util.py --- a/lib/galaxy/tool_shed/common_util.py +++ /dev/null @@ -1,93 +0,0 @@ -import os, urllib2 -from galaxy import util -from galaxy.util.odict import odict -from galaxy.tool_shed import encoding_util - -REPOSITORY_OWNER = 'devteam' - -def check_for_missing_tools( app, tool_panel_configs, latest_tool_migration_script_number ): - # Get the 000x_tools.xml file associated with the current migrate_tools version number. - tools_xml_file_path = os.path.abspath( os.path.join( 'scripts', 'migrate_tools', '%04d_tools.xml' % latest_tool_migration_script_number ) ) - # Parse the XML and load the file attributes for later checking against the proprietary tool_panel_config. - migrated_tool_configs_dict = odict() - tree = util.parse_xml( tools_xml_file_path ) - root = tree.getroot() - tool_shed = root.get( 'name' ) - tool_shed_url = get_tool_shed_url_from_tools_xml_file_path( app, tool_shed ) - # The default behavior is that the tool shed is down. - tool_shed_accessible = False - if tool_shed_url: - for elem in root: - if elem.tag == 'repository': - tool_dependencies = [] - tool_dependencies_dict = {} - repository_name = elem.get( 'name' ) - changeset_revision = elem.get( 'changeset_revision' ) - url = '%s/repository/get_tool_dependencies?name=%s&owner=%s&changeset_revision=%s&from_install_manager=True' % \ - ( tool_shed_url, repository_name, REPOSITORY_OWNER, changeset_revision ) - try: - response = urllib2.urlopen( url ) - text = response.read() - response.close() - tool_shed_accessible = True - except Exception, e: - # Tool shed may be unavailable - we have to set tool_shed_accessible since we're looping. - tool_shed_accessible = False - print "The URL\n%s\nraised the exception:\n%s\n" % ( url, str( e ) ) - if tool_shed_accessible: - if text: - tool_dependencies_dict = encoding_util.tool_shed_decode( text ) - for dependency_key, requirements_dict in tool_dependencies_dict.items(): - tool_dependency_name = requirements_dict[ 'name' ] - tool_dependency_version = requirements_dict[ 'version' ] - tool_dependency_type = requirements_dict[ 'type' ] - tool_dependency_readme = requirements_dict.get( 'readme', '' ) - tool_dependencies.append( ( tool_dependency_name, tool_dependency_version, tool_dependency_type, tool_dependency_readme ) ) - for tool_elem in elem.findall( 'tool' ): - migrated_tool_configs_dict[ tool_elem.get( 'file' ) ] = tool_dependencies - if tool_shed_accessible: - # Parse the proprietary tool_panel_configs (the default is tool_conf.xml) and generate the list of missing tool config file names. - missing_tool_configs_dict = odict() - for tool_panel_config in tool_panel_configs: - tree = util.parse_xml( tool_panel_config ) - root = tree.getroot() - for elem in root: - if elem.tag == 'tool': - missing_tool_configs_dict = check_tool_tag_set( elem, migrated_tool_configs_dict, missing_tool_configs_dict ) - elif elem.tag == 'section': - for section_elem in elem: - if section_elem.tag == 'tool': - missing_tool_configs_dict = check_tool_tag_set( section_elem, migrated_tool_configs_dict, missing_tool_configs_dict ) - else: - exception_msg = '\n\nThe entry for the main Galaxy tool shed at %s is missing from the %s file. ' % ( tool_shed, app.config.tool_sheds_config ) - exception_msg += 'The entry for this tool shed must always be available in this file, so re-add it before attempting to start your Galaxy server.\n' - raise Exception( exception_msg ) - return tool_shed_accessible, missing_tool_configs_dict -def check_tool_tag_set( elem, migrated_tool_configs_dict, missing_tool_configs_dict ): - file_path = elem.get( 'file', None ) - if file_path: - path, name = os.path.split( file_path ) - if name in migrated_tool_configs_dict: - tool_dependencies = migrated_tool_configs_dict[ name ] - missing_tool_configs_dict[ name ] = tool_dependencies - return missing_tool_configs_dict -def get_non_shed_tool_panel_configs( app ): - # Get the non-shed related tool panel configs - there can be more than one, and the default is tool_conf.xml. - config_filenames = [] - for config_filename in app.config.tool_configs: - # Any config file that includes a tool_path attribute in the root tag set like the following is shed-related. - # <toolbox tool_path="../shed_tools"> - tree = util.parse_xml( config_filename ) - root = tree.getroot() - tool_path = root.get( 'tool_path', None ) - if tool_path is None: - config_filenames.append( config_filename ) - return config_filenames -def get_tool_shed_url_from_tools_xml_file_path( app, tool_shed ): - search_str = '://%s' % tool_shed - for shed_name, shed_url in app.tool_shed_registry.tool_sheds.items(): - if shed_url.find( search_str ) >= 0: - if shed_url.endswith( '/' ): - shed_url = shed_url.rstrip( '/' ) - return shed_url - return None diff -r e9e09d1ab51752db592eee71032ec5fc33f6ee57 -r 6f0050c4e06ef6f5fdce3f7326280c07c63ef62e lib/galaxy/tool_shed/encoding_util.py --- a/lib/galaxy/tool_shed/encoding_util.py +++ /dev/null @@ -1,44 +0,0 @@ -import binascii, logging -from galaxy.util.hash_util import hmac_new -from galaxy.util.json import json_fix - -from galaxy import eggs -import pkg_resources - -pkg_resources.require( "simplejson" ) -import simplejson - -log = logging.getLogger( __name__ ) - -encoding_sep = '__esep__' -encoding_sep2 = '__esepii__' - -def tool_shed_decode( value ): - # Extract and verify hash - a, b = value.split( ":" ) - value = binascii.unhexlify( b ) - test = hmac_new( 'ToolShedAndGalaxyMustHaveThisSameKey', value ) - assert a == test - # Restore from string - values = None - try: - values = simplejson.loads( value ) - except Exception, e: - log.debug( "Decoding json value from tool shed for value '%s' threw exception: %s" % ( str( value ), str( e ) ) ) - if values is not None: - try: - return json_fix( values ) - except Exception, e: - log.debug( "Fixing decoded json values '%s' from tool shed threw exception: %s" % ( str( values ), str( e ) ) ) - fixed_values = values - if values is None: - values = value - return values -def tool_shed_encode( val ): - if isinstance( val, dict ): - value = simplejson.dumps( val ) - else: - value = val - a = hmac_new( 'ToolShedAndGalaxyMustHaveThisSameKey', value ) - b = binascii.hexlify( value ) - return "%s:%s" % ( a, b ) \ No newline at end of file diff -r e9e09d1ab51752db592eee71032ec5fc33f6ee57 -r 6f0050c4e06ef6f5fdce3f7326280c07c63ef62e lib/galaxy/tool_shed/install_manager.py --- a/lib/galaxy/tool_shed/install_manager.py +++ /dev/null @@ -1,372 +0,0 @@ -""" -Manage automatic installation of tools configured in the xxx.xml files in ~/scripts/migrate_tools (e.g., 0002_tools.xml). -All of the tools were at some point included in the Galaxy distribution, but are now hosted in the main Galaxy tool shed. -""" -import os, urllib2, tempfile -from galaxy import util -from galaxy.tools import ToolSection -from galaxy.util.json import from_json_string, to_json_string -import galaxy.util.shed_util as shed_util -import galaxy.util.shed_util_common as suc -from galaxy.util.odict import odict -from galaxy.tool_shed import common_util - -class InstallManager( object ): - def __init__( self, app, latest_migration_script_number, tool_shed_install_config, migrated_tools_config, install_dependencies ): - """ - Check tool settings in tool_shed_install_config and install all repositories that are not already installed. The tool - panel configuration file is the received migrated_tools_config, which is the reserved file named migrated_tools_conf.xml. - """ - self.app = app - self.toolbox = self.app.toolbox - self.migrated_tools_config = migrated_tools_config - # If install_dependencies is True but tool_dependency_dir is not set, do not attempt to install but print informative error message. - if install_dependencies and app.config.tool_dependency_dir is None: - message = 'You are attempting to install tool dependencies but do not have a value for "tool_dependency_dir" set in your universe_wsgi.ini ' - message += 'file. Set this location value to the path where you want tool dependencies installed and rerun the migration script.' - raise Exception( message ) - # Get the local non-shed related tool panel configs (there can be more than one, and the default name is tool_conf.xml). - self.proprietary_tool_confs = self.non_shed_tool_panel_configs - self.proprietary_tool_panel_elems = self.get_proprietary_tool_panel_elems( latest_migration_script_number ) - # Set the location where the repositories will be installed by retrieving the tool_path setting from migrated_tools_config. - tree = util.parse_xml( migrated_tools_config ) - root = tree.getroot() - self.tool_path = root.get( 'tool_path' ) - print "Repositories will be installed into configured tool_path location ", str( self.tool_path ) - # Parse tool_shed_install_config to check each of the tools. - self.tool_shed_install_config = tool_shed_install_config - tree = util.parse_xml( tool_shed_install_config ) - root = tree.getroot() - self.tool_shed = suc.clean_tool_shed_url( root.get( 'name' ) ) - self.repository_owner = common_util.REPOSITORY_OWNER - index, self.shed_config_dict = suc.get_shed_tool_conf_dict( app, self.migrated_tools_config ) - # Since tool migration scripts can be executed any number of times, we need to make sure the appropriate tools are defined in - # tool_conf.xml. If no tools associated with the migration stage are defined, no repositories will be installed on disk. - # The default behavior is that the tool shed is down. - tool_shed_accessible = False - tool_panel_configs = common_util.get_non_shed_tool_panel_configs( app ) - if tool_panel_configs: - # The missing_tool_configs_dict contents are something like: - # {'emboss_antigenic.xml': [('emboss', '5.0.0', 'package', '\nreadme blah blah blah\n')]} - tool_shed_accessible, missing_tool_configs_dict = common_util.check_for_missing_tools( app, tool_panel_configs, latest_migration_script_number ) - else: - # It doesn't matter if the tool shed is accessible since there are no migrated tools defined in the local Galaxy instance, but - # we have to set the value of tool_shed_accessible to True so that the value of migrate_tools.version can be correctly set in - # the database. - tool_shed_accessible = True - missing_tool_configs_dict = odict() - if tool_shed_accessible: - if len( self.proprietary_tool_confs ) == 1: - plural = '' - file_names = self.proprietary_tool_confs[ 0 ] - else: - plural = 's' - file_names = ', '.join( self.proprietary_tool_confs ) - if missing_tool_configs_dict: - for repository_elem in root: - self.install_repository( repository_elem, install_dependencies ) - else: - message = "\nNo tools associated with migration stage %s are defined in your " % str( latest_migration_script_number ) - message += "file%s named %s,\nso no repositories will be installed on disk.\n" % ( plural, file_names ) - print message - else: - message = "\nThe main Galaxy tool shed is not currently available, so skipped migration stage %s.\n" % str( latest_migration_script_number ) - message += "Try again later.\n" - print message - def get_guid( self, repository_clone_url, relative_install_dir, tool_config ): - if self.shed_config_dict.get( 'tool_path' ): - relative_install_dir = os.path.join( self.shed_config_dict['tool_path'], relative_install_dir ) - found = False - for root, dirs, files in os.walk( relative_install_dir ): - if root.find( '.hg' ) < 0 and root.find( 'hgrc' ) < 0: - if '.hg' in dirs: - dirs.remove( '.hg' ) - for name in files: - if name == tool_config: - found = True - break - if found: - break - full_path = str( os.path.abspath( os.path.join( root, name ) ) ) - tool = self.toolbox.load_tool( full_path ) - return suc.generate_tool_guid( repository_clone_url, tool ) - def get_proprietary_tool_panel_elems( self, latest_tool_migration_script_number ): - # Parse each config in self.proprietary_tool_confs (the default is tool_conf.xml) and generate a list of Elements that are - # either ToolSection elements or Tool elements. These will be used to generate new entries in the migrated_tools_conf.xml - # file for the installed tools. - tools_xml_file_path = os.path.abspath( os.path.join( 'scripts', 'migrate_tools', '%04d_tools.xml' % latest_tool_migration_script_number ) ) - # Parse the XML and load the file attributes for later checking against the integrated elements from self.proprietary_tool_confs. - migrated_tool_configs = [] - tree = util.parse_xml( tools_xml_file_path ) - root = tree.getroot() - for elem in root: - if elem.tag == 'repository': - for tool_elem in elem: - migrated_tool_configs.append( tool_elem.get( 'file' ) ) - # Parse each file in self.proprietary_tool_confs and generate the integrated list of tool panel Elements that contain them. - tool_panel_elems = [] - for proprietary_tool_conf in self.proprietary_tool_confs: - tree = util.parse_xml( proprietary_tool_conf ) - root = tree.getroot() - for elem in root: - if elem.tag == 'tool': - # Tools outside of sections. - file_path = elem.get( 'file', None ) - if file_path: - name = suc.strip_path( file_path ) - if name in migrated_tool_configs: - if elem not in tool_panel_elems: - tool_panel_elems.append( elem ) - elif elem.tag == 'section': - # Tools contained in a section. - for section_elem in elem: - if section_elem.tag == 'tool': - file_path = section_elem.get( 'file', None ) - if file_path: - name = suc.strip_path( file_path ) - if name in migrated_tool_configs: - # Append the section, not the tool. - if elem not in tool_panel_elems: - tool_panel_elems.append( elem ) - return tool_panel_elems - def get_containing_tool_sections( self, tool_config ): - """ - If tool_config is defined somewhere in self.proprietary_tool_panel_elems, return True and a list of ToolSections in which the - tool is displayed. If the tool is displayed outside of any sections, None is appended to the list. - """ - tool_sections = [] - is_displayed = False - for proprietary_tool_panel_elem in self.proprietary_tool_panel_elems: - if proprietary_tool_panel_elem.tag == 'tool': - # The proprietary_tool_panel_elem looks something like <tool file="emboss_5/emboss_antigenic.xml" />. - proprietary_tool_config = proprietary_tool_panel_elem.get( 'file' ) - proprietary_name = suc.strip_path( proprietary_tool_config ) - if tool_config == proprietary_name: - # The tool is loaded outside of any sections. - tool_sections.append( None ) - if not is_displayed: - is_displayed = True - if proprietary_tool_panel_elem.tag == 'section': - # The proprietary_tool_panel_elem looks something like <section name="EMBOSS" id="EMBOSSLite">. - for section_elem in proprietary_tool_panel_elem: - if section_elem.tag == 'tool': - # The section_elem looks something like <tool file="emboss_5/emboss_antigenic.xml" />. - proprietary_tool_config = section_elem.get( 'file' ) - proprietary_name = suc.strip_path( proprietary_tool_config ) - if tool_config == proprietary_name: - # The tool is loaded inside of the section_elem. - tool_sections.append( ToolSection( proprietary_tool_panel_elem ) ) - if not is_displayed: - is_displayed = True - return is_displayed, tool_sections - def handle_repository_contents( self, tool_shed_repository, repository_clone_url, relative_install_dir, repository_elem, install_dependencies ): - """Generate the metadata for the installed tool shed repository, among other things.""" - tool_panel_dict_for_display = odict() - if self.tool_path: - repo_install_dir = os.path.join( self.tool_path, relative_install_dir ) - else: - repo_install_dir = relative_install_dir - for tool_elem in repository_elem: - # The tool_elem looks something like this: <tool id="EMBOSS: antigenic1" version="5.0.0" file="emboss_antigenic.xml" /> - tool_config = tool_elem.get( 'file' ) - guid = self.get_guid( repository_clone_url, relative_install_dir, tool_config ) - # See if tool_config is defined inside of a section in self.proprietary_tool_panel_elems. - is_displayed, tool_sections = self.get_containing_tool_sections( tool_config ) - if is_displayed: - tool_panel_dict_for_tool_config = shed_util.generate_tool_panel_dict_for_tool_config( guid, tool_config, tool_sections=tool_sections ) - for k, v in tool_panel_dict_for_tool_config.items(): - tool_panel_dict_for_display[ k ] = v - else: - print 'The tool "%s" (%s) has not been enabled because it is not defined in a proprietary tool config (%s).' \ - % ( guid, tool_config, ", ".join( self.proprietary_tool_confs or [] ) ) - metadata_dict, invalid_file_tups = suc.generate_metadata_for_changeset_revision( app=self.app, - repository=tool_shed_repository, - changeset_revision=tool_shed_repository.changeset_revision, - repository_clone_url=repository_clone_url, - shed_config_dict = self.shed_config_dict, - relative_install_dir=relative_install_dir, - repository_files_dir=None, - resetting_all_metadata_on_repository=False, - updating_installed_repository=False, - persist=True ) - tool_shed_repository.metadata = metadata_dict - self.app.sa_session.add( tool_shed_repository ) - self.app.sa_session.flush() - if 'tool_dependencies' in metadata_dict: - # All tool_dependency objects must be created before the tools are processed even if no tool dependencies will be installed. - tool_dependencies = shed_util.create_tool_dependency_objects( self.app, tool_shed_repository, relative_install_dir, set_status=True ) - else: - tool_dependencies = None - if 'tools' in metadata_dict: - sample_files = metadata_dict.get( 'sample_files', [] ) - sample_files = [ str( s ) for s in sample_files ] - tool_index_sample_files = shed_util.get_tool_index_sample_files( sample_files ) - shed_util.copy_sample_files( self.app, tool_index_sample_files, tool_path=self.tool_path ) - sample_files_copied = [ s for s in tool_index_sample_files ] - repository_tools_tups = suc.get_repository_tools_tups( self.app, metadata_dict ) - if repository_tools_tups: - # Handle missing data table entries for tool parameters that are dynamically generated select lists. - repository_tools_tups = shed_util.handle_missing_data_table_entry( self.app, relative_install_dir, self.tool_path, repository_tools_tups ) - # Handle missing index files for tool parameters that are dynamically generated select lists. - repository_tools_tups, sample_files_copied = shed_util.handle_missing_index_file( self.app, - self.tool_path, - sample_files, - repository_tools_tups, - sample_files_copied ) - # Copy remaining sample files included in the repository to the ~/tool-data directory of the local Galaxy instance. - shed_util.copy_sample_files( self.app, sample_files, tool_path=self.tool_path, sample_files_copied=sample_files_copied ) - if install_dependencies and tool_dependencies and 'tool_dependencies' in metadata_dict: - # Install tool dependencies. - shed_util.update_tool_shed_repository_status( self.app, - tool_shed_repository, - self.app.model.ToolShedRepository.installation_status.INSTALLING_TOOL_DEPENDENCIES ) - # Get the tool_dependencies.xml file from disk. - tool_dependencies_config = suc.get_config_from_disk( 'tool_dependencies.xml', repo_install_dir ) - installed_tool_dependencies = shed_util.handle_tool_dependencies( app=self.app, - tool_shed_repository=tool_shed_repository, - tool_dependencies_config=tool_dependencies_config, - tool_dependencies=tool_dependencies ) - for installed_tool_dependency in installed_tool_dependencies: - if installed_tool_dependency.status == self.app.model.ToolDependency.installation_status.ERROR: - print '\nThe following error occurred from the InstallManager while installing tool dependency ', installed_tool_dependency.name, ':' - print installed_tool_dependency.error_message, '\n\n' - shed_util.add_to_tool_panel( self.app, - tool_shed_repository.name, - repository_clone_url, - tool_shed_repository.installed_changeset_revision, - repository_tools_tups, - self.repository_owner, - self.migrated_tools_config, - tool_panel_dict=tool_panel_dict_for_display, - new_install=True ) - if 'datatypes' in metadata_dict: - tool_shed_repository.status = self.app.model.ToolShedRepository.installation_status.LOADING_PROPRIETARY_DATATYPES - if not tool_shed_repository.includes_datatypes: - tool_shed_repository.includes_datatypes = True - self.app.sa_session.add( tool_shed_repository ) - self.app.sa_session.flush() - work_dir = tempfile.mkdtemp() - datatypes_config = suc.get_config_from_disk( 'datatypes_conf.xml', repo_install_dir ) - # Load proprietary data types required by tools. The value of override is not important here since the Galaxy server will be started - # after this installation completes. - converter_path, display_path = shed_util.alter_config_and_load_prorietary_datatypes( self.app, datatypes_config, repo_install_dir, override=False ) #repo_install_dir was relative_install_dir - if converter_path or display_path: - # Create a dictionary of tool shed repository related information. - repository_dict = shed_util.create_repository_dict_for_proprietary_datatypes( tool_shed=self.tool_shed, - name=tool_shed_repository.name, - owner=self.repository_owner, - installed_changeset_revision=tool_shed_repository.installed_changeset_revision, - tool_dicts=metadata_dict.get( 'tools', [] ), - converter_path=converter_path, - display_path=display_path ) - if converter_path: - # Load proprietary datatype converters - self.app.datatypes_registry.load_datatype_converters( self.toolbox, installed_repository_dict=repository_dict ) - if display_path: - # Load proprietary datatype display applications - self.app.datatypes_registry.load_display_applications( installed_repository_dict=repository_dict ) - try: - shutil.rmtree( work_dir ) - except: - pass - def install_repository( self, repository_elem, install_dependencies ): - # Install a single repository, loading contained tools into the tool panel. - name = repository_elem.get( 'name' ) - description = repository_elem.get( 'description' ) - installed_changeset_revision = repository_elem.get( 'changeset_revision' ) - # Install path is of the form: <tool path>/<tool shed>/repos/<repository owner>/<repository name>/<installed changeset revision> - relative_clone_dir = os.path.join( self.tool_shed, 'repos', self.repository_owner, name, installed_changeset_revision ) - clone_dir = os.path.join( self.tool_path, relative_clone_dir ) - if self.__isinstalled( clone_dir ): - print "Skipping automatic install of repository '", name, "' because it has already been installed in location ", clone_dir - else: - tool_shed_url = self.__get_url_from_tool_shed( self.tool_shed ) - repository_clone_url = os.path.join( tool_shed_url, 'repos', self.repository_owner, name ) - relative_install_dir = os.path.join( relative_clone_dir, name ) - install_dir = os.path.join( clone_dir, name ) - ctx_rev = suc.get_ctx_rev( tool_shed_url, name, self.repository_owner, installed_changeset_revision ) - tool_shed_repository = suc.create_or_update_tool_shed_repository( app=self.app, - name=name, - description=description, - installed_changeset_revision=installed_changeset_revision, - ctx_rev=ctx_rev, - repository_clone_url=repository_clone_url, - metadata_dict={}, - status=self.app.model.ToolShedRepository.installation_status.NEW, - current_changeset_revision=None, - owner=self.repository_owner, - dist_to_shed=True ) - shed_util.update_tool_shed_repository_status( self.app, tool_shed_repository, self.app.model.ToolShedRepository.installation_status.CLONING ) - cloned_ok, error_message = suc.clone_repository( repository_clone_url, os.path.abspath( install_dir ), ctx_rev ) - if cloned_ok: - self.handle_repository_contents( tool_shed_repository=tool_shed_repository, - repository_clone_url=repository_clone_url, - relative_install_dir=relative_install_dir, - repository_elem=repository_elem, - install_dependencies=install_dependencies ) - self.app.sa_session.refresh( tool_shed_repository ) - metadata_dict = tool_shed_repository.metadata - if 'tools' in metadata_dict: - shed_util.update_tool_shed_repository_status( self.app, - tool_shed_repository, - self.app.model.ToolShedRepository.installation_status.SETTING_TOOL_VERSIONS ) - # Get the tool_versions from the tool shed for each tool in the installed change set. - url = '%s/repository/get_tool_versions?name=%s&owner=%s&changeset_revision=%s' % \ - ( tool_shed_url, tool_shed_repository.name, self.repository_owner, installed_changeset_revision ) - response = urllib2.urlopen( url ) - text = response.read() - response.close() - if text: - tool_version_dicts = from_json_string( text ) - shed_util.handle_tool_versions( self.app, tool_version_dicts, tool_shed_repository ) - else: - # Set the tool versions since they seem to be missing for this repository in the tool shed. - # CRITICAL NOTE: These default settings may not properly handle all parent/child associations. - for tool_dict in metadata_dict[ 'tools' ]: - flush_needed = False - tool_id = tool_dict[ 'guid' ] - old_tool_id = tool_dict[ 'id' ] - tool_version = tool_dict[ 'version' ] - tool_version_using_old_id = shed_util.get_tool_version( self.app, old_tool_id ) - tool_version_using_guid = shed_util.get_tool_version( self.app, tool_id ) - if not tool_version_using_old_id: - tool_version_using_old_id = self.app.model.ToolVersion( tool_id=old_tool_id, - tool_shed_repository=tool_shed_repository ) - self.app.sa_session.add( tool_version_using_old_id ) - self.app.sa_session.flush() - if not tool_version_using_guid: - tool_version_using_guid = self.app.model.ToolVersion( tool_id=tool_id, - tool_shed_repository=tool_shed_repository ) - self.app.sa_session.add( tool_version_using_guid ) - self.app.sa_session.flush() - # Associate the two versions as parent / child. - tool_version_association = shed_util.get_tool_version_association( self.app, - tool_version_using_old_id, - tool_version_using_guid ) - if not tool_version_association: - tool_version_association = self.app.model.ToolVersionAssociation( tool_id=tool_version_using_guid.id, - parent_id=tool_version_using_old_id.id ) - self.app.sa_session.add( tool_version_association ) - self.app.sa_session.flush() - shed_util.update_tool_shed_repository_status( self.app, tool_shed_repository, self.app.model.ToolShedRepository.installation_status.INSTALLED ) - @property - def non_shed_tool_panel_configs( self ): - return common_util.get_non_shed_tool_panel_configs( self.app ) - def __get_url_from_tool_shed( self, tool_shed ): - # The value of tool_shed is something like: toolshed.g2.bx.psu.edu. We need the URL to this tool shed, which is something like: - # http://toolshed.g2.bx.psu.edu/ - for shed_name, shed_url in self.app.tool_shed_registry.tool_sheds.items(): - if shed_url.find( tool_shed ) >= 0: - if shed_url.endswith( '/' ): - shed_url = shed_url.rstrip( '/' ) - return shed_url - # The tool shed from which the repository was originally installed must no longer be configured in tool_sheds_conf.xml. - return None - def __isinstalled( self, clone_dir ): - full_path = os.path.abspath( clone_dir ) - if os.path.exists( full_path ): - for root, dirs, files in os.walk( full_path ): - if '.hg' in dirs: - # Assume that the repository has been installed if we find a .hg directory. - return True - return False diff -r e9e09d1ab51752db592eee71032ec5fc33f6ee57 -r 6f0050c4e06ef6f5fdce3f7326280c07c63ef62e lib/galaxy/tool_shed/migrate/check.py --- a/lib/galaxy/tool_shed/migrate/check.py +++ /dev/null @@ -1,171 +0,0 @@ -import sys, os, logging, subprocess -from galaxy import eggs -import pkg_resources -pkg_resources.require( "sqlalchemy-migrate" ) - -from migrate.versioning import repository, schema -from sqlalchemy import * -from galaxy.util.odict import odict -from galaxy.tool_shed import common_util - -log = logging.getLogger( __name__ ) - -# Path relative to galaxy -migrate_repository_directory = os.path.dirname( __file__ ).replace( os.getcwd() + os.path.sep, '', 1 ) -migrate_repository = repository.Repository( migrate_repository_directory ) -dialect_to_egg = { - "sqlite" : "pysqlite>=2", - "postgres" : "psycopg2", - "mysql" : "MySQL_python" -} - -def verify_tools( app, url, galaxy_config_file, engine_options={} ): - # Check the value in the migrate_tools.version database table column to verify that the number is in - # sync with the number of version scripts in ~/lib/galaxy/tools/migrate/versions. - dialect = ( url.split( ':', 1 ) )[0] - try: - egg = dialect_to_egg[ dialect ] - try: - pkg_resources.require( egg ) - log.debug( "%s egg successfully loaded for %s dialect" % ( egg, dialect ) ) - except: - # If the module is in the path elsewhere (i.e. non-egg), it'll still load. - log.warning( "%s egg not found, but an attempt will be made to use %s anyway" % ( egg, dialect ) ) - except KeyError: - # Let this go, it could possibly work with db's we don't support - log.error( "database_connection contains an unknown SQLAlchemy database dialect: %s" % dialect ) - # Create engine and metadata - engine = create_engine( url, **engine_options ) - meta = MetaData( bind=engine ) - # The migrate_tools table was created in database version script 0092_add_migrate_tools_table.py. - version_table = Table( "migrate_tools", meta, autoload=True ) - # Verify that the code and the database are in sync. - db_schema = schema.ControlledSchema( engine, migrate_repository ) - latest_tool_migration_script_number = migrate_repository.versions.latest - if latest_tool_migration_script_number != db_schema.version: - # The default behavior is that the tool shed is down. - tool_shed_accessible = False - if app.new_installation: - # New installations will not be missing tools, so we don't need to worry about them. - missing_tool_configs_dict = odict() - else: - tool_panel_configs = common_util.get_non_shed_tool_panel_configs( app ) - if tool_panel_configs: - # The missing_tool_configs_dict contents are something like: - # {'emboss_antigenic.xml': [('emboss', '5.0.0', 'package', '\nreadme blah blah blah\n')]} - tool_shed_accessible, missing_tool_configs_dict = common_util.check_for_missing_tools( app, tool_panel_configs, latest_tool_migration_script_number ) - else: - # It doesn't matter if the tool shed is accessible since there are no migrated tools defined in the local Galaxy instance, but - # we have to set the value of tool_shed_accessible to True so that the value of migrate_tools.version can be correctly set in - # the database. - tool_shed_accessible = True - missing_tool_configs_dict = odict() - have_tool_dependencies = False - for k, v in missing_tool_configs_dict.items(): - if v: - have_tool_dependencies = True - break - config_arg = '' - if os.path.abspath( os.path.join( os.getcwd(), 'universe_wsgi.ini' ) ) != galaxy_config_file: - config_arg = ' -c %s' % galaxy_config_file.replace( os.path.abspath( os.getcwd() ), '.' ) - if not app.config.running_functional_tests: - if tool_shed_accessible: - # Automatically update the value of the migrate_tools.version database table column. - cmd = 'sh manage_tools.sh%s upgrade' % config_arg - proc = subprocess.Popen( args=cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) - return_code = proc.wait() - output = proc.stdout.read( 32768 ) - if return_code != 0: - raise Exception( "Error attempting to update the value of migrate_tools.version: %s" % output ) - elif missing_tool_configs_dict: - if len( tool_panel_configs ) == 1: - plural = '' - tool_panel_config_file_names = tool_panel_configs[ 0 ] - else: - plural = 's' - tool_panel_config_file_names = ', '.join( tool_panel_configs ) - msg = "\n>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>" - msg += "\n\nThe list of files at the end of this message refers to tools that are configured to load into the tool panel for\n" - msg += "this Galaxy instance, but have been removed from the Galaxy distribution. These tools and their dependencies can be\n" - msg += "automatically installed from the Galaxy tool shed at http://toolshed.g2.bx.psu.edu.\n\n" - msg += "To skip this process, attempt to start your Galaxy server again (e.g., sh run.sh or whatever you use). If you do this,\n" - msg += "be aware that these tools will no longer be available in your Galaxy tool panel, and entries for each of them should\n" - msg += "be removed from your file%s named %s.\n\n" % ( plural, tool_panel_config_file_names ) - msg += "CRITICAL NOTE IF YOU PLAN TO INSTALL\n" - msg += "The location in which the tool repositories will be installed is the value of the 'tool_path' attribute in the <tool>\n" - msg += 'tag of the file named ./migrated_tool_conf.xml (i.e., <toolbox tool_path="../shed_tools">). The default location\n' - msg += "setting is '../shed_tools', which may be problematic for some cluster environments, so make sure to change it before\n" - msg += "you execute the installation process if appropriate. The configured location must be outside of the Galaxy installation\n" - msg += "directory or it must be in a sub-directory protected by a properly configured .hgignore file if the directory is within\n" - msg += "the Galaxy installation directory hierarchy. This is because tool shed repositories will be installed using mercurial's\n" - msg += "clone feature, which creates .hg directories and associated mercurial repository files. Not having .hgignore properly\n" - msg += "configured could result in undesired behavior when modifying or updating your local Galaxy instance or the tool shed\n" - msg += "repositories if they are in directories that pose conflicts. See mercurial's .hgignore documentation at the following\n" - msg += "URL for details.\n\nhttp://mercurial.selenic.com/wiki/.hgignore\n\n" - if have_tool_dependencies: - msg += "The following tool dependencies can also optionally be installed (see the option flag in the command below). If you\n" - msg += "choose to install them (recommended), they will be installed within the location specified by the 'tool_dependency_dir'\n" - msg += "setting in your main Galaxy configuration file (e.g., uninverse_wsgi.ini).\n" - processed_tool_dependencies = [] - for missing_tool_config, tool_dependencies in missing_tool_configs_dict.items(): - for tool_dependencies_tup in tool_dependencies: - if tool_dependencies_tup not in processed_tool_dependencies: - msg += "------------------------------------\n" - msg += "Tool Dependency\n" - msg += "------------------------------------\n" - msg += "Name: %s, Version: %s, Type: %s\n" % ( tool_dependencies_tup[ 0 ], - tool_dependencies_tup[ 1 ], - tool_dependencies_tup[ 2 ] ) - if tool_dependencies_tup[ 3 ]: - msg += "Requirements and installation information:\n" - msg += "%s\n" % tool_dependencies_tup[ 3 ] - else: - msg += "\n" - msg += "------------------------------------\n" - processed_tool_dependencies.append( tool_dependencies_tup ) - msg += "\n" - msg += "%s" % output.replace( 'done', '' ) - msg += "vvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvv\n" - msg += "sh ./scripts/migrate_tools/%04d_tools.sh\n" % latest_tool_migration_script_number - msg += "^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n\n" - if have_tool_dependencies: - msg += "The tool dependencies listed above will be installed along with the repositories if you add the 'install_dependencies'\n" - msg += "option to the above command like this:\n\n" - msg += "vvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvv\n" - msg += "sh ./scripts/migrate_tools/%04d_tools.sh install_dependencies\n" % latest_tool_migration_script_number - msg += "^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n\n" - msg += "Tool dependencies can be installed after the repositories have been installed as well.\n\n" - msg += "After the installation process finishes, you can start your Galaxy server. As part of this installation process,\n" - msg += "entries for each of the following tool config files will be added to the file named ./migrated_tool_conf.xml, so these\n" - msg += "tools will continue to be loaded into your tool panel. Because of this, existing entries for these files should be\n" - msg += "removed from your file%s named %s, but only after the installation process finishes.\n\n" % ( plural, tool_panel_config_file_names ) - for missing_tool_config, tool_dependencies in missing_tool_configs_dict.items(): - msg += "%s\n" % missing_tool_config - msg += "<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<\n" - raise Exception( msg ) - else: - log.debug( "The main Galaxy tool shed is not currently available, so skipped tool migration %s until next server startup" % db_schema.version ) - else: - log.info( "At migrate_tools version %d" % db_schema.version ) - -def migrate_to_current_version( engine, schema ): - # Changes to get to current version. - changeset = schema.changeset( None ) - for ver, change in changeset: - nextver = ver + changeset.step - log.info( 'Installing tools from version %s -> %s... ' % ( ver, nextver ) ) - old_stdout = sys.stdout - class FakeStdout( object ): - def __init__( self ): - self.buffer = [] - def write( self, s ): - self.buffer.append( s ) - def flush( self ): - pass - sys.stdout = FakeStdout() - try: - schema.runchange( ver, change, changeset.step ) - finally: - for message in "".join( sys.stdout.buffer ).split( "\n" ): - log.info( message ) - sys.stdout = old_stdout diff -r e9e09d1ab51752db592eee71032ec5fc33f6ee57 -r 6f0050c4e06ef6f5fdce3f7326280c07c63ef62e lib/galaxy/tool_shed/migrate/common.py --- a/lib/galaxy/tool_shed/migrate/common.py +++ /dev/null @@ -1,81 +0,0 @@ -import sys, os, ConfigParser -import galaxy.config -import galaxy.datatypes.registry -from galaxy import tools -from galaxy.tools.data import * -import galaxy.model.mapping -import galaxy.tools.search -from galaxy.objectstore import build_object_store_from_config -from galaxy.tool_shed.common_util import * -import galaxy.tool_shed.tool_shed_registry -from galaxy.tool_shed import install_manager - -class MigrateToolsApplication( object ): - """Encapsulates the state of a basic Galaxy Universe application in order to initiate the Install Manager""" - def __init__( self, tools_migration_config ): - install_dependencies = 'install_dependencies' in sys.argv - galaxy_config_file = 'universe_wsgi.ini' - if '-c' in sys.argv: - pos = sys.argv.index( '-c' ) - sys.argv.pop( pos ) - galaxy_config_file = sys.argv.pop( pos ) - if not os.path.exists( galaxy_config_file ): - print "Galaxy config file does not exist (hint: use '-c config.ini' for non-standard locations): %s" % galaxy_config_file - sys.exit( 1 ) - config_parser = ConfigParser.ConfigParser( { 'here':os.getcwd() } ) - config_parser.read( galaxy_config_file ) - galaxy_config_dict = {} - for key, value in config_parser.items( "app:main" ): - galaxy_config_dict[ key ] = value - self.config = galaxy.config.Configuration( **galaxy_config_dict ) - if not self.config.database_connection: - self.config.database_connection = "sqlite:///%s?isolation_level=IMMEDIATE" % self.config.database - self.config.update_integrated_tool_panel = True - self.object_store = build_object_store_from_config( self.config ) - # Setup the database engine and ORM - self.model = galaxy.model.mapping.init( self.config.file_path, - self.config.database_connection, - engine_options={}, - create_tables=False, - object_store=self.object_store ) - # Create an empty datatypes registry. - self.datatypes_registry = galaxy.datatypes.registry.Registry() - # Load the data types in the Galaxy distribution, which are defined in self.config.datatypes_config. - self.datatypes_registry.load_datatypes( self.config.root, self.config.datatypes_config ) - # Initialize tool data tables using the config defined by self.config.tool_data_table_config_path. - self.tool_data_tables = ToolDataTableManager( tool_data_path=self.config.tool_data_path, - config_filename=self.config.tool_data_table_config_path ) - # Load additional entries defined by self.config.shed_tool_data_table_config into tool data tables. - self.tool_data_tables.load_from_config_file( config_filename=self.config.shed_tool_data_table_config, - tool_data_path=self.tool_data_tables.tool_data_path, - from_shed_config=True ) - # Initialize the tools, making sure the list of tool configs includes the reserved migrated_tools_conf.xml file. - tool_configs = self.config.tool_configs - if self.config.migrated_tools_config not in tool_configs: - tool_configs.append( self.config.migrated_tools_config ) - self.toolbox = tools.ToolBox( tool_configs, self.config.tool_path, self ) - # Search support for tools - self.toolbox_search = galaxy.tools.search.ToolBoxSearch( self.toolbox ) - # Set up the tool sheds registry. - if os.path.isfile( self.config.tool_sheds_config ): - self.tool_shed_registry = galaxy.tool_shed.tool_shed_registry.Registry( self.config.root, self.config.tool_sheds_config ) - else: - self.tool_shed_registry = None - # Get the latest tool migration script number to send to the Install manager. - latest_migration_script_number = int( tools_migration_config.split( '_' )[ 0 ] ) - # The value of migrated_tools_config is migrated_tools_conf.xml, and is reserved for containing only those tools that have been - # eliminated from the distribution and moved to the tool shed. A side-effect of instantiating the InstallManager is the automatic - # installation of all appropriate tool shed repositories. - self.install_manager = install_manager.InstallManager( app=self, - latest_migration_script_number=latest_migration_script_number, - tool_shed_install_config=os.path.join( self.config.root, - 'scripts', - 'migrate_tools', - tools_migration_config ), - migrated_tools_config=self.config.migrated_tools_config, - install_dependencies=install_dependencies ) - @property - def sa_session( self ): - return self.model.context.current - def shutdown( self ): - self.object_store.shutdown() diff -r e9e09d1ab51752db592eee71032ec5fc33f6ee57 -r 6f0050c4e06ef6f5fdce3f7326280c07c63ef62e lib/galaxy/tool_shed/migrate/migrate.cfg --- a/lib/galaxy/tool_shed/migrate/migrate.cfg +++ /dev/null @@ -1,20 +0,0 @@ -[db_settings] -# Used to identify which repository this database is versioned under. -# You can use the name of your project. -repository_id=GalaxyTools - -# The name of the database table used to track the schema version. -# This name shouldn't already be used by your project. -# If this is changed once a database is under version control, you'll need to -# change the table name in each database too. -version_table=migrate_tools - -# When committing a change script, Migrate will attempt to generate the -# sql for all supported databases; normally, if one of them fails - probably -# because you don't have that database installed - it is ignored and the -# commit continues, perhaps ending successfully. -# Databases in this list MUST compile successfully during a commit, or the -# entire commit will fail. List the databases your application will actually -# be using to ensure your updates to that database work properly. -# This must be a list; example: ['postgres','sqlite'] -required_dbs=[] \ No newline at end of file diff -r e9e09d1ab51752db592eee71032ec5fc33f6ee57 -r 6f0050c4e06ef6f5fdce3f7326280c07c63ef62e lib/galaxy/tool_shed/migrate/versions/0001_tools.py --- a/lib/galaxy/tool_shed/migrate/versions/0001_tools.py +++ /dev/null @@ -1,9 +0,0 @@ -""" -Initialize the version column of the migrate_tools database table to 1. No tool migrations are handled in this version. -""" -import sys - -def upgrade(): - print __doc__ -def downgrade(): - pass diff -r e9e09d1ab51752db592eee71032ec5fc33f6ee57 -r 6f0050c4e06ef6f5fdce3f7326280c07c63ef62e lib/galaxy/tool_shed/migrate/versions/0002_tools.py --- a/lib/galaxy/tool_shed/migrate/versions/0002_tools.py +++ /dev/null @@ -1,13 +0,0 @@ -""" -The Emboss 5.0.0 tools have been eliminated from the distribution and the Emboss datatypes have been removed from -datatypes_conf.xml.sample. You should remove the Emboss datatypes from your version of datatypes_conf.xml. The -repositories named emboss_5 and emboss_datatypes from the main Galaxy tool shed at http://toolshed.g2.bx.psu.edu -will be installed into your local Galaxy instance at the location discussed above by running the following command. -""" - -import sys - -def upgrade(): - print __doc__ -def downgrade(): - pass diff -r e9e09d1ab51752db592eee71032ec5fc33f6ee57 -r 6f0050c4e06ef6f5fdce3f7326280c07c63ef62e lib/galaxy/tool_shed/migrate/versions/0003_tools.py --- a/lib/galaxy/tool_shed/migrate/versions/0003_tools.py +++ /dev/null @@ -1,12 +0,0 @@ -""" -The freebayes tool has been eliminated from the distribution . The repository named freebayes from the main -Galaxy tool shed at http://toolshed.g2.bx.psu.edu will be installed into your local Galaxy instance at the -location discussed above by running the following command. -""" - -import sys - -def upgrade(): - print __doc__ -def downgrade(): - pass diff -r e9e09d1ab51752db592eee71032ec5fc33f6ee57 -r 6f0050c4e06ef6f5fdce3f7326280c07c63ef62e lib/galaxy/tool_shed/migrate/versions/0004_tools.py --- a/lib/galaxy/tool_shed/migrate/versions/0004_tools.py +++ /dev/null @@ -1,14 +0,0 @@ -""" -The NCBI BLAST+ tools have been eliminated from the distribution. The tools and -datatypes are now available in repositories named ncbi_blast_plus and -blast_datatypes, in the main Galaxy tool shed at http://toolshed.g2.bx.psu.edu. -These repositories will be installed into your local Galaxy instance at the -location discussed above by running the following command. -""" - -import sys - -def upgrade(): - print __doc__ -def downgrade(): - pass diff -r e9e09d1ab51752db592eee71032ec5fc33f6ee57 -r 6f0050c4e06ef6f5fdce3f7326280c07c63ef62e lib/galaxy/tool_shed/migrate/versions/0005_tools.py --- a/lib/galaxy/tool_shed/migrate/versions/0005_tools.py +++ /dev/null @@ -1,15 +0,0 @@ -""" -The tools "Map with BWA for Illumina" and "Map with BWA for SOLiD" have -been eliminated from the distribution. The tools are now available -in the repository named bwa_wrappers from the main Galaxy tool shed at -http://toolshed.g2.bx.psu.edu, and will be installed into your local -Galaxy instance at the location discussed above by running the following -command. -""" - -import sys - -def upgrade(): - print __doc__ -def downgrade(): - pass diff -r e9e09d1ab51752db592eee71032ec5fc33f6ee57 -r 6f0050c4e06ef6f5fdce3f7326280c07c63ef62e lib/galaxy/tool_shed/migrate/versions/0006_tools.py --- a/lib/galaxy/tool_shed/migrate/versions/0006_tools.py +++ /dev/null @@ -1,20 +0,0 @@ -""" -The following tools have been eliminated from the distribution: -FASTQ to BAM, SAM to FASTQ, BAM Index Statistics, Estimate Library -Complexity, Insertion size metrics for PAIRED data, SAM/BAM Hybrid -Selection Metrics, bam/sam Cleaning, Add or Replace Groups, Replace -SAM/BAM Header, Paired Read Mate Fixer, Mark Duplicate reads, -SAM/BAM Alignment Summary Metrics, SAM/BAM GC Bias Metrics, and -Reorder SAM/BAM. The tools are now available in the repository -named picard from the main Galaxy tool shed at -http://toolshed.g2.bx.psu.edu, and will be installed into your -local Galaxy instance at the location discussed above by running -the following command. -""" - -import sys - -def upgrade(): - print __doc__ -def downgrade(): - pass diff -r e9e09d1ab51752db592eee71032ec5fc33f6ee57 -r 6f0050c4e06ef6f5fdce3f7326280c07c63ef62e lib/galaxy/tool_shed/migrate/versions/0007_tools.py --- a/lib/galaxy/tool_shed/migrate/versions/0007_tools.py +++ /dev/null @@ -1,17 +0,0 @@ -""" -The following tools have been eliminated from the distribution: -Map with Bowtie for Illumina, Map with Bowtie for SOLiD, Lastz, -and Lastz paired reads. The tools are now available in the -repositories named bowtie_wrappers, bowtie_color_wrappers, lastz, -and lastz_paired_reads from the main Galaxy tool shed at -http://toolshed.g2.bx.psu.edu, and will be installed into your -local Galaxy instance at the location discussed above by running -the following command. -""" - -import sys - -def upgrade(): - print __doc__ -def downgrade(): - pass diff -r e9e09d1ab51752db592eee71032ec5fc33f6ee57 -r 6f0050c4e06ef6f5fdce3f7326280c07c63ef62e lib/galaxy/tool_shed/tool_dependencies/common_util.py --- a/lib/galaxy/tool_shed/tool_dependencies/common_util.py +++ /dev/null @@ -1,149 +0,0 @@ -import os, shutil, tarfile, urllib2, zipfile -from galaxy.datatypes import checkers - -def create_env_var_dict( elem, tool_dependency_install_dir=None, tool_shed_repository_install_dir=None ): - env_var_name = elem.get( 'name', 'PATH' ) - env_var_action = elem.get( 'action', 'prepend_to' ) - env_var_text = None - if elem.text and elem.text.find( 'REPOSITORY_INSTALL_DIR' ) >= 0: - if tool_shed_repository_install_dir and elem.text.find( '$REPOSITORY_INSTALL_DIR' ) != -1: - env_var_text = elem.text.replace( '$REPOSITORY_INSTALL_DIR', tool_shed_repository_install_dir ) - return dict( name=env_var_name, action=env_var_action, value=env_var_text ) - else: - env_var_text = elem.text.replace( '$REPOSITORY_INSTALL_DIR', tool_dependency_install_dir ) - return dict( name=env_var_name, action=env_var_action, value=env_var_text ) - if elem.text and elem.text.find( 'INSTALL_DIR' ) >= 0: - if tool_dependency_install_dir: - env_var_text = elem.text.replace( '$INSTALL_DIR', tool_dependency_install_dir ) - return dict( name=env_var_name, action=env_var_action, value=env_var_text ) - else: - env_var_text = elem.text.replace( '$INSTALL_DIR', tool_shed_repository_install_dir ) - return dict( name=env_var_name, action=env_var_action, value=env_var_text ) - if elem.text: - # Allow for environment variables that contain neither REPOSITORY_INSTALL_DIR nor INSTALL_DIR since there may be command line - # parameters that are tuned for a Galaxy instance. Allowing them to be set in one location rather than being hard coded into - # each tool config is the best approach. For example: - # <environment_variable name="GATK2_SITE_OPTIONS" action="set_to"> - # "--num_threads 4 --num_cpu_threads_per_data_thread 3 --phone_home STANDARD" - # </environment_variable> - return dict( name=env_var_name, action=env_var_action, value=elem.text) - return None -def create_or_update_env_shell_file( install_dir, env_var_dict ): - env_var_name = env_var_dict[ 'name' ] - env_var_action = env_var_dict[ 'action' ] - env_var_value = env_var_dict[ 'value' ] - if env_var_action == 'prepend_to': - changed_value = '%s:$%s' % ( env_var_value, env_var_name ) - elif env_var_action == 'set_to': - changed_value = '%s' % env_var_value - elif env_var_action == 'append_to': - changed_value = '$%s:%s' % ( env_var_name, env_var_value ) - env_shell_file_path = '%s/env.sh' % install_dir - if os.path.exists( env_shell_file_path ): - write_action = '>>' - else: - write_action = '>' - cmd = "echo '%s=%s; export %s' %s %s;chmod +x %s" % ( env_var_name, - changed_value, - env_var_name, - write_action, - env_shell_file_path, - env_shell_file_path ) - return cmd -def extract_tar( file_name, file_path ): - if isgzip( file_name ) or isbz2( file_name ): - # Open for reading with transparent compression. - tar = tarfile.open( file_name, 'r:*' ) - else: - tar = tarfile.open( file_name ) - tar.extractall( path=file_path ) - tar.close() -def extract_zip( archive_path, extraction_path ): - # TODO: change this method to use zipfile.Zipfile.extractall() when we stop supporting Python 2.5. - if not zipfile_ok( archive_path ): - return False - zip_archive = zipfile.ZipFile( archive_path, 'r' ) - for name in zip_archive.namelist(): - uncompressed_path = os.path.join( extraction_path, name ) - if uncompressed_path.endswith( '/' ): - if not os.path.isdir( uncompressed_path ): - os.makedirs( uncompressed_path ) - else: - file( uncompressed_path, 'wb' ).write( zip_archive.read( name ) ) - zip_archive.close() - return True -def isbz2( file_path ): - return checkers.is_bz2( file_path ) -def isgzip( file_path ): - return checkers.is_gzip( file_path ) -def istar( file_path ): - return tarfile.is_tarfile( file_path ) -def iszip( file_path ): - return checkers.check_zip( file_path ) -def make_directory( full_path ): - if not os.path.exists( full_path ): - os.makedirs( full_path ) -def move_directory_files( current_dir, source_dir, destination_dir ): - source_directory = os.path.abspath( os.path.join( current_dir, source_dir ) ) - destination_directory = os.path.join( destination_dir ) - if not os.path.isdir( destination_directory ): - os.makedirs( destination_directory ) - for file_name in os.listdir( source_directory ): - source_file = os.path.join( source_directory, file_name ) - destination_file = os.path.join( destination_directory, file_name ) - shutil.move( source_file, destination_file ) -def move_file( current_dir, source, destination_dir ): - source_file = os.path.abspath( os.path.join( current_dir, source ) ) - destination_directory = os.path.join( destination_dir ) - if not os.path.isdir( destination_directory ): - os.makedirs( destination_directory ) - shutil.move( source_file, destination_directory ) -def tar_extraction_directory( file_path, file_name ): - """Try to return the correct extraction directory.""" - file_name = file_name.strip() - extensions = [ '.tar.gz', '.tgz', '.tar.bz2', '.tar', '.zip' ] - for extension in extensions: - if file_name.find( extension ) > 0: - dir_name = file_name[ :-len( extension ) ] - if os.path.exists( os.path.abspath( os.path.join( file_path, dir_name ) ) ): - return dir_name - if os.path.exists( os.path.abspath( os.path.join( file_path, file_name ) ) ): - return os.path.abspath( file_path ) - raise ValueError( 'Could not find path to file %s' % os.path.abspath( os.path.join( file_path, file_name ) ) ) -def url_download( install_dir, downloaded_file_name, download_url ): - file_path = os.path.join( install_dir, downloaded_file_name ) - src = None - dst = None - try: - src = urllib2.urlopen( download_url ) - data = src.read() - dst = open( file_path,'wb' ) - dst.write( data ) - except: - if src: - src.close() - if dst: - dst.close() - return os.path.abspath( file_path ) -def zip_extraction_directory( file_path, file_name ): - """Try to return the correct extraction directory.""" - files = [ filename for filename in os.listdir( file_path ) if not filename.endswith( '.zip' ) ] - if len( files ) > 1: - return os.path.abspath( file_path ) - elif len( files ) == 1: - # If there is only on file it should be a directory. - if os.path.isdir( os.path.join( file_path, files[ 0 ] ) ): - return os.path.abspath( os.path.join( file_path, files[ 0 ] ) ) - raise ValueError( 'Could not find directory for the extracted file %s' % os.path.abspath( os.path.join( file_path, file_name ) ) ) -def zipfile_ok( path_to_archive ): - """ - This function is a bit pedantic and not functionally necessary. It checks whether there is no file pointing outside of the extraction, - because ZipFile.extractall() has some potential security holes. See python zipfile documentation for more details. - """ - basename = os.path.realpath( os.path.dirname( path_to_archive ) ) - zip_archive = zipfile.ZipFile( path_to_archive ) - for member in zip_archive.namelist(): - member_path = os.path.realpath( os.path.join( basename, member ) ) - if not member_path.startswith( basename ): - return False - return True diff -r e9e09d1ab51752db592eee71032ec5fc33f6ee57 -r 6f0050c4e06ef6f5fdce3f7326280c07c63ef62e lib/galaxy/tool_shed/tool_dependencies/fabric_util.py --- a/lib/galaxy/tool_shed/tool_dependencies/fabric_util.py +++ /dev/null @@ -1,143 +0,0 @@ -# For Python 2.5 -from __future__ import with_statement - -import os, shutil, tempfile -from contextlib import contextmanager -import common_util - -from galaxy import eggs -import pkg_resources - -pkg_resources.require('ssh' ) -pkg_resources.require( 'Fabric' ) - -from fabric.api import env, lcd, local, settings - -INSTALLATION_LOG = 'INSTALLATION.log' - -def check_fabric_version(): - version = env.version - if int( version.split( "." )[ 0 ] ) < 1: - raise NotImplementedError( "Install Fabric version 1.0 or later." ) -def set_galaxy_environment( galaxy_user, tool_dependency_dir, host='localhost', shell='/bin/bash -l -c' ): - """General Galaxy environment configuration""" - env.user = galaxy_user - env.install_dir = tool_dependency_dir - env.host_string = host - env.shell = shell - env.use_sudo = False - env.safe_cmd = local - return env -@contextmanager -def make_tmp_dir(): - work_dir = tempfile.mkdtemp() - yield work_dir - if os.path.exists( work_dir ): - local( 'rm -rf %s' % work_dir ) -def handle_command( app, tool_dependency, install_dir, cmd ): - sa_session = app.model.context.current - output = local( cmd, capture=True ) - log_results( cmd, output, os.path.join( install_dir, INSTALLATION_LOG ) ) - if output.return_code: - tool_dependency.status = app.model.ToolDependency.installation_status.ERROR - tool_dependency.error_message = str( output.stderr ) - sa_session.add( tool_dependency ) - sa_session.flush() - return output.return_code -def install_and_build_package( app, tool_dependency, actions_dict ): - """Install a Galaxy tool dependency package either via a url or a mercurial or git clone command.""" - sa_session = app.model.context.current - install_dir = actions_dict[ 'install_dir' ] - package_name = actions_dict[ 'package_name' ] - actions = actions_dict.get( 'actions', None ) - filtered_actions = [] - if actions: - with make_tmp_dir() as work_dir: - with lcd( work_dir ): - # The first action in the list of actions will be the one that defines the installation process. There - # are currently only two supported processes; download_by_url and clone via a "shell_command" action type. - action_type, action_dict = actions[ 0 ] - if action_type == 'download_by_url': - # Eliminate the download_by_url action so remaining actions can be processed correctly. - filtered_actions = actions[ 1: ] - url = action_dict[ 'url' ] - if 'target_filename' in action_dict: - downloaded_filename = action_dict[ 'target_filename' ] - else: - downloaded_filename = os.path.split( url )[ -1 ] - downloaded_file_path = common_util.url_download( work_dir, downloaded_filename, url ) - if common_util.istar( downloaded_file_path ): - # <action type="download_by_url">http://sourceforge.net/projects/samtools/files/samtools/0.1.18/samtools-0.1.18.tar.bz2</action> - common_util.extract_tar( downloaded_file_path, work_dir ) - dir = common_util.tar_extraction_directory( work_dir, downloaded_filename ) - elif common_util.iszip( downloaded_file_path ): - # <action type="download_by_url">http://downloads.sourceforge.net/project/picard/picard-tools/1.56/picard-tools-1.56.zip</action> - zip_archive_extracted = common_util.extract_zip( downloaded_file_path, work_dir ) - dir = common_util.zip_extraction_directory( work_dir, downloaded_filename ) - else: - dir = work_dir - elif action_type == 'shell_command': - # <action type="shell_command">git clone --recursive git://github.com/ekg/freebayes.git</action> - # Eliminate the shell_command clone action so remaining actions can be processed correctly. - filtered_actions = actions[ 1: ] - return_code = handle_command( app, tool_dependency, install_dir, action_dict[ 'command' ] ) - if return_code: - return - dir = package_name - else: - # We're handling a complex repository dependency where we only have a set_environment tag set. - # <action type="set_environment"> - # <environment_variable name="PATH" action="prepend_to">$INSTALL_DIR/bin</environment_variable> - # </action> - filtered_actions = [ a for a in actions ] - dir = install_dir - if not os.path.exists( dir ): - os.makedirs( dir ) - # The package has been down-loaded, so we can now perform all of the actions defined for building it. - with lcd( dir ): - for action_tup in filtered_actions: - action_type, action_dict = action_tup - current_dir = os.path.abspath( os.path.join( work_dir, dir ) ) - if action_type == 'make_directory': - common_util.make_directory( full_path=action_dict[ 'full_path' ] ) - elif action_type == 'move_directory_files': - common_util.move_directory_files( current_dir=current_dir, - source_dir=os.path.join( action_dict[ 'source_directory' ] ), - destination_dir=os.path.join( action_dict[ 'destination_directory' ] ) ) - elif action_type == 'move_file': - # TODO: Remove this hack that resets current_dir so that the pre-compiled bwa binary can be found. - # current_dir = '/Users/gvk/workspaces_2008/bwa/bwa-0.5.9' - common_util.move_file( current_dir=current_dir, - source=os.path.join( action_dict[ 'source' ] ), - destination_dir=os.path.join( action_dict[ 'destination' ] ) ) - elif action_type == 'set_environment': - # Currently the only action supported in this category is "environment_variable". - env_var_dicts = action_dict[ 'environment_variable' ] - for env_var_dict in env_var_dicts: - cmd = common_util.create_or_update_env_shell_file( install_dir, env_var_dict ) - return_code = handle_command( app, tool_dependency, install_dir, cmd ) - if return_code: - return - elif action_type == 'shell_command': - with settings( warn_only=True ): - return_code = handle_command( app, tool_dependency, install_dir, action_dict[ 'command' ] ) - if return_code: - return -def log_results( command, fabric_AttributeString, file_path ): - """ - Write attributes of fabric.operations._AttributeString (which is the output of executing command using fabric's local() method) - to a specified log file. - """ - if os.path.exists( file_path ): - logfile = open( file_path, 'ab' ) - else: - logfile = open( file_path, 'wb' ) - logfile.write( "\n#############################################\n" ) - logfile.write( '%s\nSTDOUT\n' % command ) - logfile.write( str( fabric_AttributeString.stdout ) ) - logfile.write( "\n#############################################\n" ) - logfile.write( "\n#############################################\n" ) - logfile.write( '%s\nSTDERR\n' % command ) - logfile.write( str( fabric_AttributeString.stderr ) ) - logfile.write( "\n#############################################\n" ) - logfile.close() This diff is so big that we needed to truncate the remainder. Repository URL: https://bitbucket.org/galaxy/galaxy-central/ -- This is a commit notification from bitbucket.org. You are receiving this because you have the service enabled, addressing the recipient of this email.
participants (1)
-
commits-noreply@bitbucket.org