commit/galaxy-central: greg: Enhance the tool shed repository installation process so that repository tools can be loaded into the tool panel outside of any sections. Enhance the install manager to use this enhancement. Create a new shed_util module that contains common mentods used between the install manager and the admin_toolshed controller, and modify these components to import these common methods. Significant code cleanup and miscellaneous bug fixes included as well.
1 new commit in galaxy-central: https://bitbucket.org/galaxy/galaxy-central/changeset/7dd3a0891011/ changeset: 7dd3a0891011 user: greg date: 2011-12-13 15:44:50 summary: Enhance the tool shed repository installation process so that repository tools can be loaded into the tool panel outside of any sections. Enhance the install manager to use this enhancement. Create a new shed_util module that contains common mentods used between the install manager and the admin_toolshed controller, and modify these components to import these common methods. Significant code cleanup and miscellaneous bug fixes included as well. affected #: 5 files diff -r 9c46a216e24c529a496a50afec13ebcb78106b96 -r 7dd3a089101138a4796eb73a5f1391d2f436723e lib/galaxy/tools/install_manager.py --- a/lib/galaxy/tools/install_manager.py +++ b/lib/galaxy/tools/install_manager.py @@ -4,18 +4,7 @@ shed. Tools included in tool_shed_install.xml that have already been installed will not be re-installed. """ -from galaxy import util -from galaxy.tools import ToolSection -from galaxy.tools.search import ToolBoxSearch -from galaxy import model -from galaxy.web.controllers.admin_toolshed import generate_metadata, generate_tool_panel_section, add_shed_tool_conf_entry, create_or_undelete_tool_shed_repository -from galaxy.web.controllers.admin_toolshed import handle_missing_data_table_entry, handle_missing_index_file, handle_tool_dependencies -from galaxy.model.orm import * -import os, subprocess, tempfile, logging - -pkg_resources.require( 'elementtree' ) -from elementtree import ElementTree, ElementInclude -from elementtree.ElementTree import Element +from galaxy.util.shed_util import * log = logging.getLogger( __name__ ) @@ -39,192 +28,110 @@ self.tool_shed_install_config = tool_shed_install_config tree = util.parse_xml( tool_shed_install_config ) root = tree.getroot() - self.tool_shed = root.get( 'name' ) + self.tool_shed = clean_tool_shed_url( root.get( 'name' ) ) log.debug( "Repositories will be installed from tool shed '%s' into configured tool_path location '%s'" % ( str( self.tool_shed ), str( self.tool_path ) ) ) self.repository_owner = 'devteam' for elem in root: - if elem.tag == 'tool': - self.check_tool( elem ) + if elem.tag == 'repository': + self.install_repository( elem ) elif elem.tag == 'section': - self.check_section( elem ) - def check_tool( self, elem ): - # TODO: write this method. - pass - def check_section( self, elem ): + self.install_section( elem ) + def install_repository( self, elem, section_name='', section_id='' ): + # Install a single repository into the tool config. If outside of any sections, the entry looks something like: + # <repository name="cut_wrapper" description="Galaxy wrapper for the Cut tool" changeset_revision="f3ed6cfe6402"> + # <tool id="Cut1" version="1.0.1" /> + # </repository> + name = elem.get( 'name' ) + description = elem.get( 'description' ) + changeset_revision = elem.get( 'changeset_revision' ) + # Install path is of the form: <tool path>/<tool shed>/repos/<repository owner>/<repository name>/<changeset revision> + clone_dir = os.path.join( self.tool_path, self.tool_shed, 'repos', self.repository_owner, name, changeset_revision ) + if self.__isinstalled( elem, clone_dir ): + log.debug( "Skipping automatic install of repository '%s' because it has already been installed in location '%s'" % ( name, clone_dir ) ) + else: + if section_name and section_id: + section_key = 'section_%s' % str( section_id ) + if section_key in self.app.toolbox.tool_panel: + # Appending a tool to an existing section in self.app.toolbox.tool_panel + log.debug( "Appending to tool panel section: %s" % section_name ) + tool_section = self.app.toolbox.tool_panel[ section_key ] + else: + # Appending a new section to self.app.toolbox.tool_panel + log.debug( "Loading new tool panel section: %s" % section_name ) + new_section_elem = Element( 'section' ) + new_section_elem.attrib[ 'name' ] = section_name + new_section_elem.attrib[ 'id' ] = section_id + tool_section = ToolSection( new_section_elem ) + self.app.toolbox.tool_panel[ section_key ] = tool_section + else: + tool_section = None + current_working_dir = os.getcwd() + tool_shed_url = self.__get_url_from_tool_shed( self.tool_shed ) + repository_clone_url = os.path.join( tool_shed_url, 'repos', self.repository_owner, name ) + relative_install_dir = os.path.join( clone_dir, name ) + returncode, tmp_name = clone_repository( name, clone_dir, current_working_dir, repository_clone_url ) + if returncode == 0: + returncode, tmp_name = update_repository( current_working_dir, relative_install_dir, changeset_revision ) + if returncode == 0: + metadata_dict = load_repository_contents( self.app, + name, + description, + self.repository_owner, + changeset_revision, + repository_clone_url, + self.install_tool_config, + self.tool_path, + tool_section, + relative_install_dir, + current_working_dir, + tmp_name ) + # Add a new record to the tool_id_guid_map table for each + # tool in the repository if one doesn't already exist. + if 'tools' in metadata_dict: + tools_mapped = 0 + for tool_dict in metadata_dict[ 'tools' ]: + flush_needed = False + tool_id = tool_dict[ 'id' ] + tool_version = tool_dict[ 'version' ] + guid = tool_dict[ 'guid' ] + tool_id_guid_map = get_tool_id_guid_map( self.app, tool_id, tool_version, self.tool_shed, self.repository_owner, name ) + if tool_id_guid_map: + if tool_id_guid_map.guid != guid: + tool_id_guid_map.guid = guid + flush_needed = True + else: + tool_id_guid_map = self.app.model.ToolIdGuidMap( tool_id=tool_id, + tool_version=tool_version, + tool_shed=self.tool_shed, + repository_owner=self.repository_owner, + repository_name=name, + guid=guid ) + flush_needed = True + if flush_needed: + self.sa_session.add( tool_id_guid_map ) + self.sa_session.flush() + tools_mapped += 1 + log.debug( "Mapped tool ids to guids for %d tools included in repository '%s'." % ( tools_mapped, name ) ) + else: + tmp_stderr = open( tmp_name, 'rb' ) + log.debug( "Error updating repository '%s': %s" % ( name, tmp_stderr.read() ) ) + tmp_stderr.close() + else: + tmp_stderr = open( tmp_name, 'rb' ) + log.debug( "Error cloning repository '%s': %s" % ( name, tmp_stderr.read() ) ) + tmp_stderr.close() + def install_section( self, elem ): + # Install 1 or more repositories into a section in the tool config. An entry looks something like: + # <section name="EMBOSS" id="EMBOSSLite"> + # <repository name="emboss_5" description="Galaxy wrappers for EMBOSS version 5 tools" changeset_revision="bdd88ae5d0ac"> + # <tool file="emboss_5/emboss_antigenic.xml" id="EMBOSS: antigenic1" version="5.0.0" /> + # ... + # </repository> + # </section> section_name = elem.get( 'name' ) section_id = elem.get( 'id' ) for repository_elem in elem: - name = repository_elem.get( 'name' ) - description = repository_elem.get( 'description' ) - changeset_revision = repository_elem.get( 'changeset_revision' ) - installed = False - for tool_elem in repository_elem: - tool_config = tool_elem.get( 'file' ) - tool_id = tool_elem.get( 'id' ) - tool_version = tool_elem.get( 'version' ) - tigm = self.__get_tool_id_guid_map_by_id_version( tool_id, tool_version ) - if tigm: - # A record exists in the tool_id_guid_map - # table, so see if the tool is still installed. - install_path = self.__generate_install_path( tigm ) - if os.path.exists( install_path ): - message = "Skipping automatic install of repository '%s' because it has already been installed in location '%s'" % \ - ( name, install_path ) - log.debug( message ) - installed = True - break - if not installed: - log.debug( "Installing repository '%s' from tool shed '%s'" % ( name, self.tool_shed ) ) - current_working_dir = os.getcwd() - tool_shed_url = self.__get_url_from_tool_shed( self.tool_shed ) - repository_clone_url = '%s/repos/devteam/%s' % ( tool_shed_url, name ) - # Install path is of the form: <tool path><tool shed>/repos/<repository owner>/<repository name>/<changeset revision> - clone_dir = os.path.join( self.tool_path, self.tool_shed, 'repos/devteam', name, changeset_revision ) - if not os.path.isdir( clone_dir ): - os.makedirs( clone_dir ) - log.debug( 'Cloning %s...' % repository_clone_url ) - cmd = 'hg clone %s' % repository_clone_url - tmp_name = tempfile.NamedTemporaryFile().name - tmp_stderr = open( tmp_name, 'wb' ) - os.chdir( clone_dir ) - proc = subprocess.Popen( args=cmd, shell=True, stderr=tmp_stderr.fileno() ) - returncode = proc.wait() - os.chdir( current_working_dir ) - tmp_stderr.close() - if returncode == 0: - # Update the cloned repository to changeset_revision. It is imperative that the - # installed repository is updated to the desired changeset_revision before metadata - # is set because the process for setting metadata uses the repository files on disk. - relative_install_dir = os.path.join( clone_dir, name ) - log.debug( 'Updating cloned repository to revision "%s"' % changeset_revision ) - cmd = 'hg update -r %s' % changeset_revision - tmp_name = tempfile.NamedTemporaryFile().name - tmp_stderr = open( tmp_name, 'wb' ) - os.chdir( relative_install_dir ) - proc = subprocess.Popen( cmd, shell=True, stderr=tmp_stderr.fileno() ) - returncode = proc.wait() - os.chdir( current_working_dir ) - tmp_stderr.close() - if returncode == 0: - # Generate the metadata for the installed tool shed repository. It is imperative that - # the installed repository is updated to the desired changeset_revision before metadata - # is set because the process for setting metadata uses the repository files on disk. - metadata_dict = generate_metadata( self.app.toolbox, relative_install_dir, repository_clone_url ) - if 'datatypes_config' in metadata_dict: - datatypes_config = os.path.abspath( metadata_dict[ 'datatypes_config' ] ) - # Load data types required by tools. - self.__load_datatypes( trans, datatypes_config, relative_install_dir ) - if 'tools' in metadata_dict: - repository_tools_tups = [] - for tool_dict in metadata_dict[ 'tools' ]: - relative_path = tool_dict[ 'tool_config' ] - guid = tool_dict[ 'guid' ] - tool = self.app.toolbox.load_tool( os.path.abspath( relative_path ) ) - repository_tools_tups.append( ( relative_path, guid, tool ) ) - if repository_tools_tups: - sample_files = metadata_dict.get( 'sample_files', [] ) - # Handle missing data table entries for tool parameters that are dynamically generated select lists. - repository_tools_tups = handle_missing_data_table_entry( self.app, self.tool_path, sample_files, repository_tools_tups ) - # Handle missing index files for tool parameters that are dynamically generated select lists. - repository_tools_tups = handle_missing_index_file( self.app, self.tool_path, sample_files, repository_tools_tups ) - # Handle tools that use fabric scripts to install dependencies. - handle_tool_dependencies( current_working_dir, relative_install_dir, repository_tools_tups ) - section_key = 'section_%s' % str( section_id ) - if section_key in self.app.toolbox.tool_panel: - # Appending a tool to an existing section in self.app.toolbox.tool_panel - log.debug( "Appending to tool panel section: %s" % section_name ) - tool_section = self.app.toolbox.tool_panel[ section_key ] - else: - # Appending a new section to self.app.toolbox.tool_panel - log.debug( "Loading new tool panel section: %s" % section_name ) - elem = Element( 'section' ) - elem.attrib[ 'name' ] = section_name - elem.attrib[ 'id' ] = section_id - tool_section = ToolSection( elem ) - self.app.toolbox.tool_panel[ section_key ] = tool_section - # Generate an in-memory tool conf section that includes the new tools. - new_tool_section = generate_tool_panel_section( name, - repository_clone_url, - changeset_revision, - tool_section, - repository_tools_tups, - owner=self.repository_owner ) - # Create a temporary file to persist the in-memory tool section - # TODO: Figure out how to do this in-memory using xml.etree. - tmp_name = tempfile.NamedTemporaryFile().name - persisted_new_tool_section = open( tmp_name, 'wb' ) - persisted_new_tool_section.write( new_tool_section ) - persisted_new_tool_section.close() - # Parse the persisted tool panel section - tree = util.parse_xml( tmp_name ) - root = tree.getroot() - # Load the tools in the section into the tool panel. - self.app.toolbox.load_section_tag_set( root, self.app.toolbox.tool_panel, self.tool_path ) - # Remove the temporary file - try: - os.unlink( tmp_name ) - except: - pass - # Append the new section to the shed_tool_config file. - add_shed_tool_conf_entry( self.app, self.install_tool_config, new_tool_section ) - if self.app.toolbox_search.enabled: - # If search support for tools is enabled, index the new installed tools. - self.app.toolbox_search = ToolBoxSearch( self.app.toolbox ) - # Add a new record to the tool_shed_repository table if one doesn't - # already exist. If one exists but is marked deleted, undelete it. - log.debug( "Adding new row to tool_shed_repository table for repository '%s'" % name ) - create_or_undelete_tool_shed_repository( self.app, - name, - description, - changeset_revision, - repository_clone_url, - metadata_dict, - owner=self.repository_owner ) - # Add a new record to the tool_id_guid_map table for each - # tool in the repository if one doesn't already exist. - if 'tools' in metadata_dict: - tools_mapped = 0 - for tool_dict in metadata_dict[ 'tools' ]: - tool_id = tool_dict[ 'id' ] - tool_version = tool_dict[ 'version' ] - guid = tool_dict[ 'guid' ] - tool_id_guid_map = model.ToolIdGuidMap( tool_id=tool_id, - tool_version=tool_version, - tool_shed=self.tool_shed, - repository_owner=self.repository_owner, - repository_name=name, - guid=guid ) - self.sa_session.add( tool_id_guid_map ) - self.sa_session.flush() - tools_mapped += 1 - log.debug( "Mapped tool ids to guids for %d tools included in repository '%s'." % ( tools_mapped, name ) ) - def __generate_install_path( self, tool_id_guid_map ): - """ - Generate a tool path in which a tool is or will be installed. The tool path will be of the form: - <tool shed>/repos/<repository owner>/<repository name>/<changeset revision> - """ - tool_shed = tool_id_guid_map.tool_shed - repository_name = tool_id_guid_map.repository_name - tool_shed_repository = self.__get_repository_by_tool_shed_name_owner( tool_shed, repository_name, self.repository_owner ) - changeset_revision = tool_shed_repository.changeset_revision - return '%s/repos%s/%s/%s/%s' % ( tool_shed, self.repository_owner, repository_name, changeset_revision ) - def __get_repository_by_tool_shed_name_owner( tool_shed, name, owner ): - """Get a repository from the database via tool_shed, name and owner.""" - # CRITICAL: this assumes that a single changeset_revision exists for each repository - # in the tool shed. In other words, if a repository has multiple changset_revisions - # there will be problems. We're probably safe here because only a single changeset_revision - # for each tool shed repository will be installed using this installation process. - return self.sa_session.query( self.app.model.ToolShedRepository ) \ - .filter( and_( self.app.model.ToolShedRepository.table.c.tool_shed == tool_shed, - self.app.model.ToolShedRepository.table.c.name == name, - self.app.model.ToolShedRepository.table.c.owner == owner ) ) \ - .first() - def __get_tool_id_guid_map_by_id_version( self, tool_id, tool_version ): - """Get a tool_id_guid_map from the database via tool_id and tool_version.""" - return self.sa_session.query( self.app.model.ToolIdGuidMap ) \ - .filter( and_( self.app.model.ToolIdGuidMap.table.c.tool_id == tool_id, - self.app.model.ToolIdGuidMap.table.c.tool_version == tool_version ) ) \ - .first() + self.install_repository( repository_elem, section_name=section_name, section_id=section_id ) def __get_url_from_tool_shed( self, tool_shed ): # The value of tool_shed is something like: toolshed.g2.bx.psu.edu # We need the URL to this tool shed, which is something like: @@ -237,3 +144,17 @@ # The tool shed from which the repository was originally # installed must no longer be configured in tool_sheds_conf.xml. return None + def __isinstalled( self, repository_elem, clone_dir ): + name = repository_elem.get( 'name' ) + installed = False + for tool_elem in repository_elem: + tool_config = tool_elem.get( 'file' ) + tool_id = tool_elem.get( 'id' ) + tool_version = tool_elem.get( 'version' ) + tigm = get_tool_id_guid_map( self.app, tool_id, tool_version, self.tool_shed, self.repository_owner, name ) + if tigm: + # A record exists in the tool_id_guid_map table, so see if the repository is installed. + if os.path.exists( clone_dir ): + installed = True + break + return installed diff -r 9c46a216e24c529a496a50afec13ebcb78106b96 -r 7dd3a089101138a4796eb73a5f1391d2f436723e lib/galaxy/util/shed_util.py --- /dev/null +++ b/lib/galaxy/util/shed_util.py @@ -0,0 +1,548 @@ +import os, tempfile, shutil, subprocess, logging +from datetime import date, datetime, timedelta +from time import strftime +from galaxy import util +from galaxy.tools import ToolSection +from galaxy.tools.search import ToolBoxSearch +from galaxy.model.orm import * + +pkg_resources.require( 'elementtree' ) +from elementtree import ElementTree, ElementInclude +from elementtree.ElementTree import Element, SubElement, tostring + +log = logging.getLogger( __name__ ) + +def add_shed_tool_conf_entry( app, shed_tool_conf, tool_panel_entry ): + """ + Add an entry in the shed_tool_conf file. An entry looks something like: + <section name="Filter and Sort" id="filter"> + <tool file="filter/filtering.xml" guid="toolshed.g2.bx.psu.edu/repos/test/filter/1.0.2"/> + </section> + This method is used by the InstallManager, which does not have access to trans. + """ + # Make a backup of the hgweb.config file since we're going to be changing it. + if not os.path.exists( shed_tool_conf ): + output = open( shed_tool_conf, 'w' ) + output.write( '<?xml version="1.0"?>\n' ) + output.write( '<toolbox tool_path="%s">\n' % tool_path ) + output.write( '</toolbox>\n' ) + output.close() + # Make a backup of the shed_tool_conf file. + today = date.today() + backup_date = today.strftime( "%Y_%m_%d" ) + shed_tool_conf_copy = '%s/%s_%s_backup' % ( app.config.root, shed_tool_conf, backup_date ) + shutil.copy( os.path.abspath( shed_tool_conf ), os.path.abspath( shed_tool_conf_copy ) ) + tmp_fd, tmp_fname = tempfile.mkstemp() + new_shed_tool_conf = open( tmp_fname, 'wb' ) + for i, line in enumerate( open( shed_tool_conf ) ): + if line.startswith( '</toolbox>' ): + # We're at the end of the original config file, so add our entry. + new_shed_tool_conf.write( ' ' ) + new_shed_tool_conf.write( tostring( pretty_print_xml( tool_panel_entry ) ) ) + new_shed_tool_conf.write( line ) + else: + new_shed_tool_conf.write( line ) + new_shed_tool_conf.close() + shutil.move( tmp_fname, os.path.abspath( shed_tool_conf ) ) +def clean_repository_clone_url( repository_clone_url ): + if repository_clone_url.find( '@' ) > 0: + # We have an url that includes an authenticated user, something like: + # http://test@bx.psu.edu:9009/repos/some_username/column + items = repository_clone_url.split( '@' ) + tmp_url = items[ 1 ] + elif repository_clone_url.find( '//' ) > 0: + # We have an url that includes only a protocol, something like: + # http://bx.psu.edu:9009/repos/some_username/column + items = repository_clone_url.split( '//' ) + tmp_url = items[ 1 ] + else: + tmp_url = repository_clone_url + return tmp_url +def clean_tool_shed_url( tool_shed_url ): + if tool_shed_url.find( ':' ) > 0: + # Eliminate the port, if any, since it will result in an invalid directory name. + return tool_shed_url.split( ':' )[ 0 ] + return tool_shed_url.rstrip( '/' ) +def clone_repository( name, clone_dir, current_working_dir, repository_clone_url ): + log.debug( "Installing repository '%s'" % name ) + os.makedirs( clone_dir ) + log.debug( 'Cloning %s' % repository_clone_url ) + cmd = 'hg clone %s' % repository_clone_url + tmp_name = tempfile.NamedTemporaryFile().name + tmp_stderr = open( tmp_name, 'wb' ) + os.chdir( clone_dir ) + proc = subprocess.Popen( args=cmd, shell=True, stderr=tmp_stderr.fileno() ) + returncode = proc.wait() + os.chdir( current_working_dir ) + tmp_stderr.close() + return returncode, tmp_name +def create_or_undelete_tool_shed_repository( app, name, description, changeset_revision, repository_clone_url, metadata_dict, owner='' ): + # This method is used by the InstallManager, which does not have access to trans. + sa_session = app.model.context.current + tmp_url = clean_repository_clone_url( repository_clone_url ) + tool_shed = tmp_url.split( 'repos' )[ 0 ].rstrip( '/' ) + if not owner: + owner = get_repository_owner( tmp_url ) + includes_datatypes = 'datatypes_config' in metadata_dict + flush_needed = False + tool_shed_repository = get_repository_by_shed_name_owner_changeset_revision( app, tool_shed, name, owner, changeset_revision ) + if tool_shed_repository: + if tool_shed_repository.deleted: + tool_shed_repository.deleted = False + # Reset includes_datatypes in case metadata changed since last installed. + tool_shed_repository.includes_datatypes = includes_datatypes + flush_needed = True + else: + tool_shed_repository = app.model.ToolShedRepository( tool_shed=tool_shed, + name=name, + description=description, + owner=owner, + changeset_revision=changeset_revision, + metadata=metadata_dict, + includes_datatypes=includes_datatypes ) + flush_needed = True + if flush_needed: + sa_session.add( tool_shed_repository ) + sa_session.flush() +def generate_datatypes_metadata( datatypes_config, metadata_dict ): + """ + Update the received metadata_dict with changes that have been applied + to the received datatypes_config. This method is used by the InstallManager, + which does not have access to trans. + """ + # Parse datatypes_config. + tree = ElementTree.parse( datatypes_config ) + root = tree.getroot() + ElementInclude.include( root ) + repository_datatype_code_files = [] + datatype_files = root.find( 'datatype_files' ) + if datatype_files: + for elem in datatype_files.findall( 'datatype_file' ): + name = elem.get( 'name', None ) + repository_datatype_code_files.append( name ) + metadata_dict[ 'datatype_files' ] = repository_datatype_code_files + datatypes = [] + registration = root.find( 'registration' ) + if registration: + for elem in registration.findall( 'datatype' ): + extension = elem.get( 'extension', None ) + dtype = elem.get( 'type', None ) + mimetype = elem.get( 'mimetype', None ) + datatypes.append( dict( extension=extension, + dtype=dtype, + mimetype=mimetype ) ) + metadata_dict[ 'datatypes' ] = datatypes + return metadata_dict +def generate_metadata( toolbox, relative_install_dir, repository_clone_url ): + """ + Browse the repository files on disk to generate metadata. Since we are using disk files, it + is imperative that the repository is updated to the desired change set revision before metadata + is generated. This method is used by the InstallManager, which does not have access to trans. + """ + metadata_dict = {} + sample_files = [] + datatypes_config = None + # Find datatypes_conf.xml if it exists. + for root, dirs, files in os.walk( relative_install_dir ): + if root.find( '.hg' ) < 0: + for name in files: + if name == 'datatypes_conf.xml': + relative_path = os.path.join( root, name ) + datatypes_config = os.path.abspath( relative_path ) + break + if datatypes_config: + metadata_dict[ 'datatypes_config' ] = relative_path + metadata_dict = generate_datatypes_metadata( datatypes_config, metadata_dict ) + # Find all special .sample files. + for root, dirs, files in os.walk( relative_install_dir ): + if root.find( '.hg' ) < 0: + for name in files: + if name.endswith( '.sample' ): + sample_files.append( os.path.join( root, name ) ) + if sample_files: + metadata_dict[ 'sample_files' ] = sample_files + # Find all tool configs and exported workflows. + for root, dirs, files in os.walk( relative_install_dir ): + if root.find( '.hg' ) < 0 and root.find( 'hgrc' ) < 0: + if '.hg' in dirs: + dirs.remove( '.hg' ) + for name in files: + # Find all tool configs. + if name != 'datatypes_conf.xml' and name.endswith( '.xml' ): + full_path = os.path.abspath( os.path.join( root, name ) ) + try: + tool = toolbox.load_tool( full_path ) + except Exception, e: + tool = None + if tool is not None: + tool_config = os.path.join( root, name ) + metadata_dict = generate_tool_metadata( tool_config, tool, repository_clone_url, metadata_dict ) + # Find all exported workflows + elif name.endswith( '.ga' ): + relative_path = os.path.join( root, name ) + fp = open( relative_path, 'rb' ) + workflow_text = fp.read() + fp.close() + exported_workflow_dict = from_json_string( workflow_text ) + if 'a_galaxy_workflow' in exported_workflow_dict and exported_workflow_dict[ 'a_galaxy_workflow' ] == 'true': + metadata_dict = generate_workflow_metadata( relative_path, exported_workflow_dict, metadata_dict ) + return metadata_dict +def generate_tool_guid( repository_clone_url, tool ): + """ + Generate a guid for the installed tool. It is critical that this guid matches the guid for + the tool in the Galaxy tool shed from which it is being installed. The form of the guid is + <tool shed host>/repos/<repository owner>/<repository name>/<tool id>/<tool version> + """ + tmp_url = clean_repository_clone_url( repository_clone_url ) + return '%s/%s/%s' % ( tmp_url, tool.id, tool.version ) +def generate_tool_metadata( tool_config, tool, repository_clone_url, metadata_dict ): + """ + Update the received metadata_dict with changes that have been + applied to the received tool. This method is used by the InstallManager, + which does not have access to trans. + """ + # Generate the guid + guid = generate_tool_guid( repository_clone_url, tool ) + # Handle tool.requirements. + tool_requirements = [] + for tr in tool.requirements: + name=tr.name + type=tr.type + if type == 'fabfile': + version = None + fabfile = tr.fabfile + method = tr.method + else: + version = tr.version + fabfile = None + method = None + requirement_dict = dict( name=name, + type=type, + version=version, + fabfile=fabfile, + method=method ) + tool_requirements.append( requirement_dict ) + # Handle tool.tests. + tool_tests = [] + if tool.tests: + for ttb in tool.tests: + test_dict = dict( name=ttb.name, + required_files=ttb.required_files, + inputs=ttb.inputs, + outputs=ttb.outputs ) + tool_tests.append( test_dict ) + tool_dict = dict( id=tool.id, + guid=guid, + name=tool.name, + version=tool.version, + description=tool.description, + version_string_cmd = tool.version_string_cmd, + tool_config=tool_config, + requirements=tool_requirements, + tests=tool_tests ) + if 'tools' in metadata_dict: + metadata_dict[ 'tools' ].append( tool_dict ) + else: + metadata_dict[ 'tools' ] = [ tool_dict ] + return metadata_dict +def generate_tool_panel_elem_list( repository_name, repository_clone_url, changeset_revision, repository_tools_tups, tool_section=None, owner='' ): + """Generate a list of ElementTree Element objects for each section or list of tools.""" + elem_list = [] + tmp_url = clean_repository_clone_url( repository_clone_url ) + if not owner: + owner = get_repository_owner( tmp_url ) + if tool_section: + root_elem = Element( 'section' ) + root_elem.attrib[ 'name' ] = tool_section.name + root_elem.attrib[ 'id' ] = tool_section.id + for repository_tool_tup in repository_tools_tups: + tool_file_path, guid, tool = repository_tool_tup + if tool_section: + tool_elem = SubElement( root_elem, 'tool' ) + else: + tool_elem = Element( 'tool' ) + tool_elem.attrib[ 'file' ] = tool_file_path + tool_elem.attrib[ 'guid' ] = guid + tool_shed_elem = SubElement( tool_elem, 'tool_shed' ) + tool_shed_elem.text = tmp_url.split( 'repos' )[ 0 ].rstrip( '/' ) + repository_name_elem = SubElement( tool_elem, 'repository_name' ) + repository_name_elem.text = repository_name + repository_owner_elem = SubElement( tool_elem, 'repository_owner' ) + repository_owner_elem.text = owner + changeset_revision_elem = SubElement( tool_elem, 'changeset_revision' ) + changeset_revision_elem.text = changeset_revision + id_elem = SubElement( tool_elem, 'id' ) + id_elem.text = tool.id + version_elem = SubElement( tool_elem, 'version' ) + version_elem.text = tool.version + if tool_section: + elem_list.append( root_elem ) + else: + elem_list.append( tool_elem ) + return elem_list +def generate_workflow_metadata( relative_path, exported_workflow_dict, metadata_dict ): + """ + Update the received metadata_dict with changes that have been applied + to the received exported_workflow_dict. Store everything in the database. + This method is used by the InstallManager, which does not have access to trans. + """ + if 'workflows' in metadata_dict: + metadata_dict[ 'workflows' ].append( ( relative_path, exported_workflow_dict ) ) + else: + metadata_dict[ 'workflows' ] = [ ( relative_path, exported_workflow_dict ) ] + return metadata_dict +def get_repository_by_shed_name_owner_changeset_revision( app, tool_shed, name, owner, changeset_revision ): + # This method is used by the InstallManager, which does not have access to trans. + sa_session = app.model.context.current + if tool_shed.find( '//' ) > 0: + tool_shed = tool_shed.split( '//' )[1] + return sa_session.query( app.model.ToolShedRepository ) \ + .filter( and_( app.model.ToolShedRepository.table.c.tool_shed == tool_shed, + app.model.ToolShedRepository.table.c.name == name, + app.model.ToolShedRepository.table.c.owner == owner, + app.model.ToolShedRepository.table.c.changeset_revision == changeset_revision ) ) \ + .first() +def get_repository_owner( cleaned_repository_url ): + items = cleaned_repository_url.split( 'repos' ) + repo_path = items[ 1 ] + if repo_path.startswith( '/' ): + repo_path = repo_path.replace( '/', '', 1 ) + return repo_path.lstrip( '/' ).split( '/' )[ 0 ] +def get_tool_id_guid_map( app, tool_id, version, tool_shed, repository_owner, repository_name ): + # This method is used by the InstallManager, which does not have access to trans. + sa_session = app.model.context.current + return sa_session.query( app.model.ToolIdGuidMap ) \ + .filter( and_( app.model.ToolIdGuidMap.table.c.tool_id == tool_id, + app.model.ToolIdGuidMap.table.c.tool_version == version, + app.model.ToolIdGuidMap.table.c.tool_shed == tool_shed, + app.model.ToolIdGuidMap.table.c.repository_owner == repository_owner, + app.model.ToolIdGuidMap.table.c.repository_name == repository_name ) ) \ + .first() +def handle_missing_data_table_entry( app, tool_path, sample_files, repository_tools_tups ): + """ + Inspect each tool to see if any have input parameters that are dynamically + generated select lists that require entries in the tool_data_table_conf.xml file. + This method is used by the InstallManager, which does not have access to trans. + """ + missing_data_table_entry = False + for index, repository_tools_tup in enumerate( repository_tools_tups ): + tup_path, guid, repository_tool = repository_tools_tup + if repository_tool.params_with_missing_data_table_entry: + missing_data_table_entry = True + break + if missing_data_table_entry: + # The repository must contain a tool_data_table_conf.xml.sample file that includes + # all required entries for all tools in the repository. + for sample_file in sample_files: + head, tail = os.path.split( sample_file ) + if tail == 'tool_data_table_conf.xml.sample': + break + error, correction_msg = handle_sample_tool_data_table_conf_file( app, sample_file ) + if error: + # TODO: Do more here than logging an exception. + log.debug( exception_msg ) + # Reload the tool into the local list of repository_tools_tups. + repository_tool = app.toolbox.load_tool( os.path.join( tool_path, tup_path ) ) + repository_tools_tups[ index ] = ( tup_path, repository_tool ) + return repository_tools_tups +def handle_missing_index_file( app, tool_path, sample_files, repository_tools_tups ): + """ + Inspect each tool to see if it has any input parameters that + are dynamically generated select lists that depend on a .loc file. + This method is used by the InstallManager, which does not have access to trans. + """ + missing_files_handled = [] + for index, repository_tools_tup in enumerate( repository_tools_tups ): + tup_path, guid, repository_tool = repository_tools_tup + params_with_missing_index_file = repository_tool.params_with_missing_index_file + for param in params_with_missing_index_file: + options = param.options + missing_head, missing_tail = os.path.split( options.missing_index_file ) + if missing_tail not in missing_files_handled: + # The repository must contain the required xxx.loc.sample file. + for sample_file in sample_files: + sample_head, sample_tail = os.path.split( sample_file ) + if sample_tail == '%s.sample' % missing_tail: + copy_sample_loc_file( app, sample_file ) + if options.tool_data_table and options.tool_data_table.missing_index_file: + options.tool_data_table.handle_found_index_file( options.missing_index_file ) + missing_files_handled.append( missing_tail ) + break + # Reload the tool into the local list of repository_tools_tups. + repository_tool = app.toolbox.load_tool( os.path.join( tool_path, tup_path ) ) + repository_tools_tups[ index ] = ( tup_path, guid, repository_tool ) + return repository_tools_tups +def handle_tool_dependencies( current_working_dir, repo_files_dir, repository_tools_tups ): + """ + Inspect each tool to see if it includes a "requirement" that refers to a fabric + script. For those that do, execute the fabric script to install tool dependencies. + This method is used by the InstallManager, which does not have access to trans. + """ + for index, repository_tools_tup in enumerate( repository_tools_tups ): + tup_path, guid, repository_tool = repository_tools_tup + for requirement in repository_tool.requirements: + if requirement.type == 'fabfile': + log.debug( 'Executing fabric script to install dependencies for tool "%s"...' % repository_tool.name ) + fabfile = requirement.fabfile + method = requirement.method + # Find the relative path to the fabfile. + relative_fabfile_path = None + for root, dirs, files in os.walk( repo_files_dir ): + for name in files: + if name == fabfile: + relative_fabfile_path = os.path.join( root, name ) + break + if relative_fabfile_path: + # cmd will look something like: fab -f fabfile.py install_bowtie + cmd = 'fab -f %s %s' % ( relative_fabfile_path, method ) + tmp_name = tempfile.NamedTemporaryFile().name + tmp_stderr = open( tmp_name, 'wb' ) + os.chdir( repo_files_dir ) + proc = subprocess.Popen( cmd, shell=True, stderr=tmp_stderr.fileno() ) + returncode = proc.wait() + os.chdir( current_working_dir ) + tmp_stderr.close() + if returncode != 0: + # TODO: do something more here than logging the problem. + tmp_stderr = open( tmp_name, 'rb' ) + error = tmp_stderr.read() + tmp_stderr.close() + log.debug( 'Problem installing dependencies for tool "%s"\n%s' % ( repository_tool.name, error ) ) +def load_datatypes( app, datatypes_config, relative_intall_dir ): + # This method is used by the InstallManager, which does not have access to trans. + imported_module = None + # Parse datatypes_config. + tree = parse_xml( datatypes_config ) + datatypes_config_root = tree.getroot() + relative_path_to_datatype_file_name = None + datatype_files = datatypes_config_root.find( 'datatype_files' ) + # Currently only a single datatype_file is supported. For example: + # <datatype_files> + # <datatype_file name="gmap.py"/> + # </datatype_files> + for elem in datatype_files.findall( 'datatype_file' ): + datatype_file_name = elem.get( 'name', None ) + if datatype_file_name: + # Find the file in the installed repository. + for root, dirs, files in os.walk( relative_intall_dir ): + if root.find( '.hg' ) < 0: + for name in files: + if name == datatype_file_name: + relative_path_to_datatype_file_name = os.path.join( root, name ) + break + break + if relative_path_to_datatype_file_name: + relative_head, relative_tail = os.path.split( relative_path_to_datatype_file_name ) + registration = datatypes_config_root.find( 'registration' ) + # Get the module by parsing the <datatype> tag. + for elem in registration.findall( 'datatype' ): + # A 'type' attribute is currently required. The attribute + # should be something like: type="gmap:GmapDB". + dtype = elem.get( 'type', None ) + if dtype: + fields = dtype.split( ':' ) + datatype_module = fields[0] + datatype_class_name = fields[1] + # Since we currently support only a single datatype_file, + # we have what we need. + break + try: + sys.path.insert( 0, relative_head ) + imported_module = __import__( datatype_module ) + sys.path.pop( 0 ) + except Exception, e: + log.debug( "Exception importing datatypes code file included in installed repository: %s" % str( e ) ) + app.datatypes_registry.load_datatypes( root_dir=app.config.root, config=datatypes_config, imported_module=imported_module ) +def load_repository_contents( app, name, description, owner, changeset_revision, repository_clone_url, shed_tool_conf, + tool_path, tool_section, relative_install_dir, current_working_dir, tmp_name ): + # This method is used by the InstallManager, which does not have access to trans. + # Generate the metadata for the installed tool shed repository. It is imperative that + # the installed repository is updated to the desired changeset_revision before metadata + # is set because the process for setting metadata uses the repository files on disk. + metadata_dict = generate_metadata( app.toolbox, relative_install_dir, repository_clone_url ) + if 'datatypes_config' in metadata_dict: + datatypes_config = os.path.abspath( metadata_dict[ 'datatypes_config' ] ) + # Load data types required by tools. + load_datatypes( app, datatypes_config, relative_install_dir ) + if 'tools' in metadata_dict: + repository_tools_tups = [] + for tool_dict in metadata_dict[ 'tools' ]: + relative_path = tool_dict[ 'tool_config' ] + guid = tool_dict[ 'guid' ] + tool = app.toolbox.load_tool( os.path.abspath( relative_path ) ) + repository_tools_tups.append( ( relative_path, guid, tool ) ) + if repository_tools_tups: + sample_files = metadata_dict.get( 'sample_files', [] ) + # Handle missing data table entries for tool parameters that are dynamically generated select lists. + repository_tools_tups = handle_missing_data_table_entry( app, tool_path, sample_files, repository_tools_tups ) + # Handle missing index files for tool parameters that are dynamically generated select lists. + repository_tools_tups = handle_missing_index_file( app, tool_path, sample_files, repository_tools_tups ) + # Handle tools that use fabric scripts to install dependencies. + handle_tool_dependencies( current_working_dir, relative_install_dir, repository_tools_tups ) + # Generate a new entry for the tool config. + elem_list = generate_tool_panel_elem_list( name, + repository_clone_url, + changeset_revision, + repository_tools_tups, + tool_section=tool_section, + owner=owner ) + if tool_section: + for section_elem in elem_list: + # Load the section into the tool panel. + app.toolbox.load_section_tag_set( section_elem, app.toolbox.tool_panel, tool_path ) + else: + # Load the tools into the tool panel outside of any sections. + for tool_elem in elem_list: + guid = tool_elem.get( 'guid' ) + app.toolbox.load_tool_tag_set( tool_elem, app.toolbox.tool_panel, tool_path=tool_path, guid=guid ) + # Remove the temporary file + try: + os.unlink( tmp_name ) + except: + pass + for elem_entry in elem_list: + # Append the new entry (either section or list of tools) to the shed_tool_config file. + add_shed_tool_conf_entry( app, shed_tool_conf, elem_entry ) + if app.toolbox_search.enabled: + # If search support for tools is enabled, index the new installed tools. + app.toolbox_search = ToolBoxSearch( app.toolbox ) + # Add a new record to the tool_shed_repository table if one doesn't + # already exist. If one exists but is marked deleted, undelete it. + log.debug( "Adding new row to tool_shed_repository table for repository '%s'" % name ) + create_or_undelete_tool_shed_repository( app, + name, + description, + changeset_revision, + repository_clone_url, + metadata_dict ) + return metadata_dict +def pretty_print_xml( elem, level=0 ): + pad = ' ' + i = "\n" + level * pad + if len( elem ): + if not elem.text or not elem.text.strip(): + elem.text = i + pad + pad + if not elem.tail or not elem.tail.strip(): + elem.tail = i + for e in elem: + pretty_print_xml( e, level + 1 ) + if not elem.tail or not elem.tail.strip(): + elem.tail = i + else: + if level and ( not elem.tail or not elem.tail.strip() ): + elem.tail = i + pad + return elem +def update_repository( current_working_dir, relative_install_dir, changeset_revision ): + # Update the cloned repository to changeset_revision. It is imperative that the + # installed repository is updated to the desired changeset_revision before metadata + # is set because the process for setting metadata uses the repository files on disk. + log.debug( 'Updating cloned repository to revision "%s"' % changeset_revision ) + cmd = 'hg update -r %s' % changeset_revision + tmp_name = tempfile.NamedTemporaryFile().name + tmp_stderr = open( tmp_name, 'wb' ) + os.chdir( relative_install_dir ) + proc = subprocess.Popen( cmd, shell=True, stderr=tmp_stderr.fileno() ) + returncode = proc.wait() + os.chdir( current_working_dir ) + tmp_stderr.close() + return returncode, tmp_name diff -r 9c46a216e24c529a496a50afec13ebcb78106b96 -r 7dd3a089101138a4796eb73a5f1391d2f436723e lib/galaxy/web/base/controller.py --- a/lib/galaxy/web/base/controller.py +++ b/lib/galaxy/web/base/controller.py @@ -24,7 +24,7 @@ pkg_resources.require( 'elementtree' ) from elementtree import ElementTree, ElementInclude -from elementtree.ElementTree import Element +from elementtree.ElementTree import Element, SubElement, tostring log = logging.getLogger( __name__ ) diff -r 9c46a216e24c529a496a50afec13ebcb78106b96 -r 7dd3a089101138a4796eb73a5f1391d2f436723e lib/galaxy/web/controllers/admin_toolshed.py --- a/lib/galaxy/web/controllers/admin_toolshed.py +++ b/lib/galaxy/web/controllers/admin_toolshed.py @@ -1,5 +1,5 @@ from galaxy.web.controllers.admin import * -import logging +from galaxy.util.shed_util import * log = logging.getLogger( __name__ ) @@ -159,131 +159,68 @@ else: section_key = 'section_%s' % tool_panel_section tool_section = trans.app.toolbox.tool_panel[ section_key ] - # Decode the encoded repo_info_dict param value. - repo_info_dict = tool_shed_decode( repo_info_dict ) - # Clone the repository to the configured location. - current_working_dir = os.getcwd() - installed_repository_names = [] - for name, repo_info_tuple in repo_info_dict.items(): - metadata_dict = None - description, repository_clone_url, changeset_revision = repo_info_tuple - clone_dir = os.path.join( tool_path, self.__generate_tool_path( repository_clone_url, changeset_revision ) ) - if os.path.exists( clone_dir ): - # Repository and revision has already been cloned. - # TODO: implement the ability to re-install or revert an existing repository. - message += 'Revision <b>%s</b> of repository <b>%s</b> was previously installed.<br/>' % ( changeset_revision, name ) - else: - os.makedirs( clone_dir ) - log.debug( 'Cloning %s...' % repository_clone_url ) - cmd = 'hg clone %s' % repository_clone_url - tmp_name = tempfile.NamedTemporaryFile().name - tmp_stderr = open( tmp_name, 'wb' ) - os.chdir( clone_dir ) - proc = subprocess.Popen( args=cmd, shell=True, stderr=tmp_stderr.fileno() ) - returncode = proc.wait() - os.chdir( current_working_dir ) - tmp_stderr.close() + else: + tool_section = None + # Decode the encoded repo_info_dict param value. + repo_info_dict = tool_shed_decode( repo_info_dict ) + # Clone the repository to the configured location. + current_working_dir = os.getcwd() + installed_repository_names = [] + for name, repo_info_tuple in repo_info_dict.items(): + description, repository_clone_url, changeset_revision = repo_info_tuple + clone_dir = os.path.join( tool_path, self.__generate_tool_path( repository_clone_url, changeset_revision ) ) + relative_install_dir = os.path.join( clone_dir, name ) + if os.path.exists( clone_dir ): + # Repository and revision has already been cloned. + # TODO: implement the ability to re-install or revert an existing repository. + message += 'Revision <b>%s</b> of repository <b>%s</b> was previously installed.<br/>' % ( changeset_revision, name ) + else: + returncode, tmp_name = clone_repository( name, clone_dir, current_working_dir, repository_clone_url ) + if returncode == 0: + returncode, tmp_name = update_repository( current_working_dir, relative_install_dir, changeset_revision ) if returncode == 0: - # Update the cloned repository to changeset_revision. It is imperative that the - # installed repository is updated to the desired changeset_revision before metadata - # is set because the process for setting metadata uses the repository files on disk. - relative_install_dir = os.path.join( clone_dir, name ) - log.debug( 'Updating cloned repository to revision "%s"...' % changeset_revision ) - cmd = 'hg update -r %s' % changeset_revision - tmp_name = tempfile.NamedTemporaryFile().name - tmp_stderr = open( tmp_name, 'wb' ) - os.chdir( relative_install_dir ) - proc = subprocess.Popen( cmd, shell=True, stderr=tmp_stderr.fileno() ) - returncode = proc.wait() - os.chdir( current_working_dir ) - tmp_stderr.close() - if returncode == 0: - # Generate the metadata for the installed tool shed repository. It is imperative that - # the installed repository is updated to the desired changeset_revision before metadata - # is set because the process for setting metadata uses the repository files on disk. - metadata_dict = generate_metadata( trans.app.toolbox, relative_install_dir, repository_clone_url ) - if 'datatypes_config' in metadata_dict: - datatypes_config = os.path.abspath( metadata_dict[ 'datatypes_config' ] ) - # Load data types required by tools. - self.__load_datatypes( trans, datatypes_config, relative_install_dir ) - if 'tools' in metadata_dict: - repository_tools_tups = [] - for tool_dict in metadata_dict[ 'tools' ]: - relative_path = tool_dict[ 'tool_config' ] - guid = tool_dict[ 'guid' ] - tool = trans.app.toolbox.load_tool( os.path.abspath( relative_path ) ) - repository_tools_tups.append( ( relative_path, guid, tool ) ) - if repository_tools_tups: - sample_files = metadata_dict.get( 'sample_files', [] ) - # Handle missing data table entries for tool parameters that are dynamically generated select lists. - repository_tools_tups = handle_missing_data_table_entry( trans.app, tool_path, sample_files, repository_tools_tups ) - # Handle missing index files for tool parameters that are dynamically generated select lists. - repository_tools_tups = handle_missing_index_file( trans.app, tool_path, sample_files, repository_tools_tups ) - # Handle tools that use fabric scripts to install dependencies. - handle_tool_dependencies( current_working_dir, relative_install_dir, repository_tools_tups ) - # Generate an in-memory tool conf section that includes the new tools. - new_tool_section = generate_tool_panel_section( name, - repository_clone_url, - changeset_revision, - tool_section, - repository_tools_tups ) - # Create a temporary file to persist the in-memory tool section - # TODO: Figure out how to do this in-memory using xml.etree. - tmp_name = tempfile.NamedTemporaryFile().name - persisted_new_tool_section = open( tmp_name, 'wb' ) - persisted_new_tool_section.write( new_tool_section ) - persisted_new_tool_section.close() - # Parse the persisted tool panel section - tree = parse_xml( tmp_name ) - root = tree.getroot() - # Load the tools in the section into the tool panel. - trans.app.toolbox.load_section_tag_set( root, trans.app.toolbox.tool_panel, tool_path ) - # Remove the temporary file - try: - os.unlink( tmp_name ) - except: - pass - # Append the new section to the shed_tool_config file. - add_shed_tool_conf_entry( trans.app, shed_tool_conf, new_tool_section ) - if trans.app.toolbox_search.enabled: - # If search support for tools is enabled, index the new installed tools. - trans.app.toolbox_search = ToolBoxSearch( trans.app.toolbox ) - # Add a new record to the tool_shed_repository table if one doesn't - # already exist. If one exists but is marked deleted, undelete it. - create_or_undelete_tool_shed_repository( trans.app, - name, - description, - changeset_revision, - repository_clone_url, - metadata_dict ) - installed_repository_names.append( name ) - else: - tmp_stderr = open( tmp_name, 'rb' ) - message += '%s<br/>' % tmp_stderr.read() - tmp_stderr.close() - status = 'error' + owner = get_repository_owner( clean_repository_clone_url( repository_clone_url ) ) + metadata_dict = load_repository_contents( trans.app, + name, + description, + owner, + changeset_revision, + repository_clone_url, + shed_tool_conf, + tool_path, + tool_section, + relative_install_dir, + current_working_dir, + tmp_name ) + installed_repository_names.append( name ) else: tmp_stderr = open( tmp_name, 'rb' ) message += '%s<br/>' % tmp_stderr.read() tmp_stderr.close() status = 'error' - if installed_repository_names: - installed_repository_names.sort() - num_repositories_installed = len( installed_repository_names ) + else: + tmp_stderr = open( tmp_name, 'rb' ) + message += '%s<br/>' % tmp_stderr.read() + tmp_stderr.close() + status = 'error' + if installed_repository_names: + installed_repository_names.sort() + num_repositories_installed = len( installed_repository_names ) + if tool_section: message += 'Installed %d %s and all tools were loaded into tool panel section <b>%s</b>:<br/>Installed repositories: ' % \ ( num_repositories_installed, inflector.cond_plural( num_repositories_installed, 'repository' ), tool_section.name ) - for i, repo_name in enumerate( installed_repository_names ): - if i == len( installed_repository_names ) -1: - message += '%s.<br/>' % repo_name - else: - message += '%s, ' % repo_name - return trans.response.send_redirect( web.url_for( controller='admin_toolshed', - action='browse_repositories', - message=message, - status=status ) ) - else: - message = 'Choose the section in your tool panel to contain the installed tools.' - status = 'error' + else: + message += 'Installed %d %s and all tools were loaded into the tool panel outside of any sections.<br/>Installed repositories: ' % \ + ( num_repositories_installed, inflector.cond_plural( num_repositories_installed, 'repository' ) ) + for i, repo_name in enumerate( installed_repository_names ): + if i == len( installed_repository_names ) -1: + message += '%s.<br/>' % repo_name + else: + message += '%s, ' % repo_name + return trans.response.send_redirect( web.url_for( controller='admin_toolshed', + action='browse_repositories', + message=message, + status=status ) ) if len( trans.app.toolbox.shed_tool_confs.keys() ) > 1: shed_tool_conf_select_field = build_shed_tool_conf_select_field( trans ) shed_tool_conf = None @@ -432,7 +369,7 @@ status=status ) def __get_relative_install_dir( self, trans, repository ): # Get the directory where the repository is install. - tool_shed = self.__clean_tool_shed_url( repository.tool_shed ) + tool_shed = clean_tool_shed_url( repository.tool_shed ) partial_install_dir = '%s/repos/%s/%s/%s' % ( tool_shed, repository.owner, repository.name, repository.changeset_revision ) # Get the relative tool installation paths from each of the shed tool configs. shed_tool_confs = trans.app.toolbox.shed_tool_confs @@ -443,55 +380,6 @@ if os.path.isdir( relative_install_dir ): break return relative_install_dir - def __load_datatypes( self, trans, datatypes_config, relative_intall_dir ): - imported_module = None - # Parse datatypes_config. - tree = parse_xml( datatypes_config ) - datatypes_config_root = tree.getroot() - relative_path_to_datatype_file_name = None - datatype_files = datatypes_config_root.find( 'datatype_files' ) - # Currently only a single datatype_file is supported. For example: - # <datatype_files> - # <datatype_file name="gmap.py"/> - # </datatype_files> - for elem in datatype_files.findall( 'datatype_file' ): - datatype_file_name = elem.get( 'name', None ) - if datatype_file_name: - # Find the file in the installed repository. - for root, dirs, files in os.walk( relative_intall_dir ): - if root.find( '.hg' ) < 0: - for name in files: - if name == datatype_file_name: - relative_path_to_datatype_file_name = os.path.join( root, name ) - break - break - if relative_path_to_datatype_file_name: - relative_head, relative_tail = os.path.split( relative_path_to_datatype_file_name ) - registration = datatypes_config_root.find( 'registration' ) - # Get the module by parsing the <datatype> tag. - for elem in registration.findall( 'datatype' ): - # A 'type' attribute is currently required. The attribute - # should be something like: type="gmap:GmapDB". - dtype = elem.get( 'type', None ) - if dtype: - fields = dtype.split( ':' ) - datatype_module = fields[0] - datatype_class_name = fields[1] - # Since we currently support only a single datatype_file, - # we have what we need. - break - try: - sys.path.insert( 0, relative_head ) - imported_module = __import__( datatype_module ) - sys.path.pop( 0 ) - except Exception, e: - log.debug( "Exception importing datatypes code file included in installed repository: %s" % str( e ) ) - trans.app.datatypes_registry.load_datatypes( root_dir=trans.app.config.root, config=datatypes_config, imported_module=imported_module ) - def __clean_tool_shed_url( self, tool_shed_url ): - if tool_shed_url.find( ':' ) > 0: - # Eliminate the port, if any, since it will result in an invalid directory name. - return tool_shed_url.split( ':' )[ 0 ] - return tool_shed_url.rstrip( '/' ) def __generate_tool_path( self, repository_clone_url, changeset_revision ): """ Generate a tool path that guarantees repositories with the same name will always be installed @@ -504,7 +392,7 @@ items = tmp_url.split( 'repos' ) tool_shed_url = items[ 0 ] repo_path = items[ 1 ] - tool_shed_url = self.__clean_tool_shed_url( tool_shed_url ) + tool_shed_url = clean_tool_shed_url( tool_shed_url ) return '%s/repos%s/%s' % ( tool_shed_url, repo_path, changeset_revision ) def __generate_clone_url( self, trans, repository ): """Generate the URL for cloning a repository.""" @@ -545,17 +433,6 @@ trans.model.ToolShedRepository.table.c.owner == owner, trans.model.ToolShedRepository.table.c.changeset_revision == changeset_revision ) ) \ .first() -def get_repository_by_shed_name_owner_changeset_revision( app, tool_shed, name, owner, changeset_revision ): - # This method is used by the InstallManager, which does not have access to trans. - sa_session = app.model.context.current - if tool_shed.find( '//' ) > 0: - tool_shed = tool_shed.split( '//' )[1] - return sa_session.query( app.model.ToolShedRepository ) \ - .filter( and_( app.model.ToolShedRepository.table.c.tool_shed == tool_shed, - app.model.ToolShedRepository.table.c.name == name, - app.model.ToolShedRepository.table.c.owner == owner, - app.model.ToolShedRepository.table.c.changeset_revision == changeset_revision ) ) \ - .first() def get_url_from_repository_tool_shed( trans, repository ): # The stored value of repository.tool_shed is something like: # toolshed.g2.bx.psu.edu @@ -569,347 +446,3 @@ # The tool shed from which the repository was originally # installed must no longer be configured in tool_sheds_conf.xml. return None -def generate_tool_panel_section( repository_name, repository_clone_url, changeset_revision, tool_section, repository_tools_tups, owner='' ): - """ - Write an in-memory tool panel section so we can load it into the tool panel and then - append it to the appropriate shed tool config. - TODO: re-write using ElementTree. - """ - tmp_url = clean_repository_clone_url( repository_clone_url ) - if not owner: - owner = get_repository_owner( tmp_url ) - section_str = '' - section_str += ' <section name="%s" id="%s">\n' % ( tool_section.name, tool_section.id ) - for repository_tool_tup in repository_tools_tups: - tool_file_path, guid, tool = repository_tool_tup - section_str += ' <tool file="%s" guid="%s">\n' % ( tool_file_path, guid ) - section_str += ' <tool_shed>%s</tool_shed>\n' % tmp_url.split( 'repos' )[ 0 ].rstrip( '/' ) - section_str += ' <repository_name>%s</repository_name>\n' % repository_name - section_str += ' <repository_owner>%s</repository_owner>\n' % owner - section_str += ' <changeset_revision>%s</changeset_revision>\n' % changeset_revision - section_str += ' <id>%s</id>\n' % tool.id - section_str += ' <version>%s</version>\n' % tool.version - section_str += ' </tool>\n' - section_str += ' </section>\n' - return section_str -def get_repository_owner( cleaned_repository_url ): - items = cleaned_repository_url.split( 'repos' ) - repo_path = items[ 1 ] - if repo_path.startswith( '/' ): - repo_path = repo_path.replace( '/', '', 1 ) - return repo_path.lstrip( '/' ).split( '/' )[ 0 ] -def generate_tool_guid( repository_clone_url, tool ): - """ - Generate a guid for the installed tool. It is critical that this guid matches the guid for - the tool in the Galaxy tool shed from which it is being installed. The form of the guid is - <tool shed host>/repos/<repository owner>/<repository name>/<tool id>/<tool version> - """ - tmp_url = clean_repository_clone_url( repository_clone_url ) - return '%s/%s/%s' % ( tmp_url, tool.id, tool.version ) -def clean_repository_clone_url( repository_clone_url ): - if repository_clone_url.find( '@' ) > 0: - # We have an url that includes an authenticated user, something like: - # http://test@bx.psu.edu:9009/repos/some_username/column - items = repository_clone_url.split( '@' ) - tmp_url = items[ 1 ] - elif repository_clone_url.find( '//' ) > 0: - # We have an url that includes only a protocol, something like: - # http://bx.psu.edu:9009/repos/some_username/column - items = repository_clone_url.split( '//' ) - tmp_url = items[ 1 ] - else: - tmp_url = repository_clone_url - return tmp_url -def generate_metadata( toolbox, relative_install_dir, repository_clone_url ): - """ - Browse the repository files on disk to generate metadata. Since we are using disk files, it - is imperative that the repository is updated to the desired change set revision before metadata - is generated. This method is used by the InstallManager, which does not have access to trans. - """ - metadata_dict = {} - sample_files = [] - datatypes_config = None - # Find datatypes_conf.xml if it exists. - for root, dirs, files in os.walk( relative_install_dir ): - if root.find( '.hg' ) < 0: - for name in files: - if name == 'datatypes_conf.xml': - relative_path = os.path.join( root, name ) - datatypes_config = os.path.abspath( relative_path ) - break - if datatypes_config: - metadata_dict[ 'datatypes_config' ] = relative_path - metadata_dict = generate_datatypes_metadata( datatypes_config, metadata_dict ) - # Find all special .sample files. - for root, dirs, files in os.walk( relative_install_dir ): - if root.find( '.hg' ) < 0: - for name in files: - if name.endswith( '.sample' ): - sample_files.append( os.path.join( root, name ) ) - if sample_files: - metadata_dict[ 'sample_files' ] = sample_files - # Find all tool configs and exported workflows. - for root, dirs, files in os.walk( relative_install_dir ): - if root.find( '.hg' ) < 0 and root.find( 'hgrc' ) < 0: - if '.hg' in dirs: - dirs.remove( '.hg' ) - for name in files: - # Find all tool configs. - if name != 'datatypes_conf.xml' and name.endswith( '.xml' ): - full_path = os.path.abspath( os.path.join( root, name ) ) - try: - tool = toolbox.load_tool( full_path ) - except Exception, e: - tool = None - if tool is not None: - tool_config = os.path.join( root, name ) - metadata_dict = generate_tool_metadata( tool_config, tool, repository_clone_url, metadata_dict ) - # Find all exported workflows - elif name.endswith( '.ga' ): - relative_path = os.path.join( root, name ) - fp = open( relative_path, 'rb' ) - workflow_text = fp.read() - fp.close() - exported_workflow_dict = from_json_string( workflow_text ) - if 'a_galaxy_workflow' in exported_workflow_dict and exported_workflow_dict[ 'a_galaxy_workflow' ] == 'true': - metadata_dict = generate_workflow_metadata( relative_path, exported_workflow_dict, metadata_dict ) - return metadata_dict -def generate_datatypes_metadata( datatypes_config, metadata_dict ): - """ - Update the received metadata_dict with changes that have been applied - to the received datatypes_config. This method is used by the InstallManager, - which does not have access to trans. - """ - # Parse datatypes_config. - tree = ElementTree.parse( datatypes_config ) - root = tree.getroot() - ElementInclude.include( root ) - repository_datatype_code_files = [] - datatype_files = root.find( 'datatype_files' ) - if datatype_files: - for elem in datatype_files.findall( 'datatype_file' ): - name = elem.get( 'name', None ) - repository_datatype_code_files.append( name ) - metadata_dict[ 'datatype_files' ] = repository_datatype_code_files - datatypes = [] - registration = root.find( 'registration' ) - if registration: - for elem in registration.findall( 'datatype' ): - extension = elem.get( 'extension', None ) - dtype = elem.get( 'type', None ) - mimetype = elem.get( 'mimetype', None ) - datatypes.append( dict( extension=extension, - dtype=dtype, - mimetype=mimetype ) ) - metadata_dict[ 'datatypes' ] = datatypes - return metadata_dict -def generate_tool_metadata( tool_config, tool, repository_clone_url, metadata_dict ): - """ - Update the received metadata_dict with changes that have been - applied to the received tool. This method is used by the InstallManager, - which does not have access to trans. - """ - # Generate the guid - guid = generate_tool_guid( repository_clone_url, tool ) - # Handle tool.requirements. - tool_requirements = [] - for tr in tool.requirements: - name=tr.name - type=tr.type - if type == 'fabfile': - version = None - fabfile = tr.fabfile - method = tr.method - else: - version = tr.version - fabfile = None - method = None - requirement_dict = dict( name=name, - type=type, - version=version, - fabfile=fabfile, - method=method ) - tool_requirements.append( requirement_dict ) - # Handle tool.tests. - tool_tests = [] - if tool.tests: - for ttb in tool.tests: - test_dict = dict( name=ttb.name, - required_files=ttb.required_files, - inputs=ttb.inputs, - outputs=ttb.outputs ) - tool_tests.append( test_dict ) - tool_dict = dict( id=tool.id, - guid=guid, - name=tool.name, - version=tool.version, - description=tool.description, - version_string_cmd = tool.version_string_cmd, - tool_config=tool_config, - requirements=tool_requirements, - tests=tool_tests ) - if 'tools' in metadata_dict: - metadata_dict[ 'tools' ].append( tool_dict ) - else: - metadata_dict[ 'tools' ] = [ tool_dict ] - return metadata_dict -def generate_workflow_metadata( relative_path, exported_workflow_dict, metadata_dict ): - """ - Update the received metadata_dict with changes that have been applied - to the received exported_workflow_dict. Store everything in the database. - This method is used by the InstallManager, which does not have access to trans. - """ - if 'workflows' in metadata_dict: - metadata_dict[ 'workflows' ].append( ( relative_path, exported_workflow_dict ) ) - else: - metadata_dict[ 'workflows' ] = [ ( relative_path, exported_workflow_dict ) ] - return metadata_dict -def handle_missing_data_table_entry( app, tool_path, sample_files, repository_tools_tups ): - """ - Inspect each tool to see if any have input parameters that are dynamically - generated select lists that require entries in the tool_data_table_conf.xml file. - This method is used by the InstallManager, which does not have access to trans. - """ - missing_data_table_entry = False - for index, repository_tools_tup in enumerate( repository_tools_tups ): - tup_path, guid, repository_tool = repository_tools_tup - if repository_tool.params_with_missing_data_table_entry: - missing_data_table_entry = True - break - if missing_data_table_entry: - # The repository must contain a tool_data_table_conf.xml.sample file that includes - # all required entries for all tools in the repository. - for sample_file in sample_files: - head, tail = os.path.split( sample_file ) - if tail == 'tool_data_table_conf.xml.sample': - break - error, correction_msg = handle_sample_tool_data_table_conf_file( app, sample_file ) - if error: - # TODO: Do more here than logging an exception. - log.debug( exception_msg ) - # Reload the tool into the local list of repository_tools_tups. - repository_tool = app.toolbox.load_tool( os.path.join( tool_path, tup_path ) ) - repository_tools_tups[ index ] = ( tup_path, repository_tool ) - return repository_tools_tups -def handle_missing_index_file( app, tool_path, sample_files, repository_tools_tups ): - """ - Inspect each tool to see if it has any input parameters that - are dynamically generated select lists that depend on a .loc file. - This method is used by the InstallManager, which does not have access to trans. - """ - missing_files_handled = [] - for index, repository_tools_tup in enumerate( repository_tools_tups ): - tup_path, guid, repository_tool = repository_tools_tup - params_with_missing_index_file = repository_tool.params_with_missing_index_file - for param in params_with_missing_index_file: - options = param.options - missing_head, missing_tail = os.path.split( options.missing_index_file ) - if missing_tail not in missing_files_handled: - # The repository must contain the required xxx.loc.sample file. - for sample_file in sample_files: - sample_head, sample_tail = os.path.split( sample_file ) - if sample_tail == '%s.sample' % missing_tail: - copy_sample_loc_file( app, sample_file ) - if options.tool_data_table and options.tool_data_table.missing_index_file: - options.tool_data_table.handle_found_index_file( options.missing_index_file ) - missing_files_handled.append( missing_tail ) - break - # Reload the tool into the local list of repository_tools_tups. - repository_tool = app.toolbox.load_tool( os.path.join( tool_path, tup_path ) ) - repository_tools_tups[ index ] = ( tup_path, guid, repository_tool ) - return repository_tools_tups -def handle_tool_dependencies( current_working_dir, repo_files_dir, repository_tools_tups ): - """ - Inspect each tool to see if it includes a "requirement" that refers to a fabric - script. For those that do, execute the fabric script to install tool dependencies. - This method is used by the InstallManager, which does not have access to trans. - """ - for index, repository_tools_tup in enumerate( repository_tools_tups ): - tup_path, guid, repository_tool = repository_tools_tup - for requirement in repository_tool.requirements: - if requirement.type == 'fabfile': - log.debug( 'Executing fabric script to install dependencies for tool "%s"...' % repository_tool.name ) - fabfile = requirement.fabfile - method = requirement.method - # Find the relative path to the fabfile. - relative_fabfile_path = None - for root, dirs, files in os.walk( repo_files_dir ): - for name in files: - if name == fabfile: - relative_fabfile_path = os.path.join( root, name ) - break - if relative_fabfile_path: - # cmd will look something like: fab -f fabfile.py install_bowtie - cmd = 'fab -f %s %s' % ( relative_fabfile_path, method ) - tmp_name = tempfile.NamedTemporaryFile().name - tmp_stderr = open( tmp_name, 'wb' ) - os.chdir( repo_files_dir ) - proc = subprocess.Popen( cmd, shell=True, stderr=tmp_stderr.fileno() ) - returncode = proc.wait() - os.chdir( current_working_dir ) - tmp_stderr.close() - if returncode != 0: - # TODO: do something more here than logging the problem. - tmp_stderr = open( tmp_name, 'rb' ) - error = tmp_stderr.read() - tmp_stderr.close() - log.debug( 'Problem installing dependencies for tool "%s"\n%s' % ( repository_tool.name, error ) ) -def add_shed_tool_conf_entry( app, shed_tool_conf, new_tool_section ): - """ - Add an entry in the shed_tool_conf file. An entry looks something like: - <section name="Filter and Sort" id="filter"> - <tool file="filter/filtering.xml" guid="toolshed.g2.bx.psu.edu/repos/test/filter/1.0.2"/> - </section> - This method is used by the InstallManager, which does not have access to trans. - """ - # Make a backup of the hgweb.config file since we're going to be changing it. - if not os.path.exists( shed_tool_conf ): - output = open( shed_tool_conf, 'w' ) - output.write( '<?xml version="1.0"?>\n' ) - output.write( '<toolbox tool_path="%s">\n' % tool_path ) - output.write( '</toolbox>\n' ) - output.close() - # Make a backup of the shed_tool_conf file. - today = date.today() - backup_date = today.strftime( "%Y_%m_%d" ) - shed_tool_conf_copy = '%s/%s_%s_backup' % ( app.config.root, shed_tool_conf, backup_date ) - shutil.copy( os.path.abspath( shed_tool_conf ), os.path.abspath( shed_tool_conf_copy ) ) - tmp_fd, tmp_fname = tempfile.mkstemp() - new_shed_tool_conf = open( tmp_fname, 'wb' ) - for i, line in enumerate( open( shed_tool_conf ) ): - if line.startswith( '</toolbox>' ): - # We're at the end of the original config file, so add our entry. - new_shed_tool_conf.write( new_tool_section ) - new_shed_tool_conf.write( line ) - else: - new_shed_tool_conf.write( line ) - new_shed_tool_conf.close() - shutil.move( tmp_fname, os.path.abspath( shed_tool_conf ) ) -def create_or_undelete_tool_shed_repository( app, name, description, changeset_revision, repository_clone_url, metadata_dict, owner='' ): - # This method is used by the InstallManager, which does not have access to trans. - sa_session = app.model.context.current - tmp_url = clean_repository_clone_url( repository_clone_url ) - tool_shed = tmp_url.split( 'repos' )[ 0 ].rstrip( '/' ) - if not owner: - owner = get_repository_owner( tmp_url ) - includes_datatypes = 'datatypes_config' in metadata_dict - flush_needed = False - tool_shed_repository = get_repository_by_shed_name_owner_changeset_revision( app, tool_shed, name, owner, changeset_revision ) - if tool_shed_repository: - if tool_shed_repository.deleted: - tool_shed_repository.deleted = False - # Reset includes_datatypes in case metadata changed since last installed. - tool_shed_repository.includes_datatypes = includes_datatypes - flush_needed = True - else: - tool_shed_repository = app.model.ToolShedRepository( tool_shed=tool_shed, - name=name, - description=description, - owner=owner, - changeset_revision=changeset_revision, - metadata=metadata_dict, - includes_datatypes=includes_datatypes ) - flush_needed = True - if flush_needed: - sa_session.add( tool_shed_repository ) - sa_session.flush() diff -r 9c46a216e24c529a496a50afec13ebcb78106b96 -r 7dd3a089101138a4796eb73a5f1391d2f436723e templates/admin/tool_shed_repository/select_tool_panel_section.mako --- a/templates/admin/tool_shed_repository/select_tool_panel_section.mako +++ b/templates/admin/tool_shed_repository/select_tool_panel_section.mako @@ -23,39 +23,42 @@ <br/><div class="toolForm"> - <div class="toolFormTitle">Choose section to load tools into tool panel</div> + <div class="toolFormTitle">Choose tool panel section to contain installed tools (optional)</div><div class="toolFormBody"> - <form name="select_tool_panel_section" id="select_tool_panel_section" action="${h.url_for( controller='admin_toolshed', action='install_repository', tool_shed_url=tool_shed_url, repo_info_dict=repo_info_dict )}" method="post" > - %if shed_tool_conf_select_field: + <form name="select_tool_panel_section" id="select_tool_panel_section" action="${h.url_for( controller='admin_toolshed', action='install_repository', tool_shed_url=tool_shed_url, repo_info_dict=repo_info_dict )}" method="post" > + %if shed_tool_conf_select_field: + <div class="form-row"> + <label>Shed tool configuration file:</label> + ${shed_tool_conf_select_field.get_html()} + <div class="toolParamHelp" style="clear: both;"> + Your Galaxy instance is configured with ${len( shed_tool_conf_select_field.options )} shed tool configuration files, + so choose one in which to configure the installed tools. + </div> + </div> + <div style="clear: both"></div> + %else: + <input type="hidden" name="shed_tool_conf" value="${shed_tool_conf}"/> + %endif <div class="form-row"> - <label>Shed tool configuration file:</label> - ${shed_tool_conf_select_field.get_html()} + <label>Add new tool panel section:</label> + <input name="new_tool_panel_section" type="textfield" value="${new_tool_panel_section}" size="40"/><div class="toolParamHelp" style="clear: both;"> - Your Galaxy instance is configured with ${len( shed_tool_conf_select_field.options )} shed tool configuration files, - so choose one in which to configure the installed tools. + Add a new tool panel section contain the installed tools (optional). </div></div> - <div style="clear: both"></div> - %else: - <input type="hidden" name="shed_tool_conf" value="${shed_tool_conf}"/> - %endif - <div class="form-row"> - <label>Add new tool panel section:</label> - <input name="new_tool_panel_section" type="textfield" value="${new_tool_panel_section}" size="40"/> - <div class="toolParamHelp" style="clear: both;"> - Add a new tool panel section or choose an existing section in your tool panel below to contain the installed tools. + <div class="form-row"> + <label>Select existing tool panel section:</label> + ${tool_panel_section_select_field.get_html()} + <div class="toolParamHelp" style="clear: both;"> + Choose an existing section in your tool panel to contain the installed tools (optional). + </div></div> - </div> - <div class="form-row"> - <label>Select existing tool panel section:</label> - ${tool_panel_section_select_field.get_html()} - <div class="toolParamHelp" style="clear: both;"> - Choose an existing section in your tool panel to contain the installed tools. + <div class="form-row"> + <input type="submit" name="select_tool_panel_section_button" value="Install"/> + <div class="toolParamHelp" style="clear: both;"> + Clicking <b>Install</b> without selecting a tool panel section will load the installed tools into the tool panel outside of any sections. + </div></div> - </div> - <div class="form-row"> - <input type="submit" name="select_tool_panel_section_button" value="Install"/> - </div> - </form> + </form></div></div> Repository URL: https://bitbucket.org/galaxy/galaxy-central/ -- This is a commit notification from bitbucket.org. You are receiving this because you have the service enabled, addressing the recipient of this email.
participants (1)
-
Bitbucket