commit/galaxy-central: greg: Check potential tool config files to ensure they are text before attempting to load them in tool shed repositories, and a fix handling for tools contained in a repository that don't peoperly load into Galaxy because they are missing a required entry in the tool_data_table.conf file.
1 new commit in galaxy-central: https://bitbucket.org/galaxy/galaxy-central/changeset/b199460443af/ changeset: b199460443af user: greg date: 2012-03-12 18:51:07 summary: Check potential tool config files to ensure they are text before attempting to load them in tool shed repositories, and a fix handling for tools contained in a repository that don't peoperly load into Galaxy because they are missing a required entry in the tool_data_table.conf file. affected #: 6 files diff -r 02f55e34ccfeef865d5d0811664a1f95635ec609 -r b199460443afeb3023f1c160f23b2215ba42add4 lib/galaxy/tool_shed/migrate/check.py --- a/lib/galaxy/tool_shed/migrate/check.py +++ b/lib/galaxy/tool_shed/migrate/check.py @@ -63,7 +63,7 @@ raise Exception( "Error attempting to update the value of migrate_tools.version: %s" % output ) elif missing_tool_configs: msg = "\n>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>" - msg += "\n\nThe list of files at the end of this message refer to tools that are configured to load into the tool panel for\n" + msg += "\n\nThe list of files at the end of this message refers to tools that are configured to load into the tool panel for\n" msg += "this Galaxy instance, but have been removed from the Galaxy distribution. These tools can be automatically installed\n" msg += "from the Galaxy tool shed at http://toolshed.g2.bx.psu.edu.\n\n" msg += "To skip this process, attempt to start your Galaxy server again (e.g., sh run.sh or whatever you use). If you do this,\n" diff -r 02f55e34ccfeef865d5d0811664a1f95635ec609 -r b199460443afeb3023f1c160f23b2215ba42add4 lib/galaxy/util/shed_util.py --- a/lib/galaxy/util/shed_util.py +++ b/lib/galaxy/util/shed_util.py @@ -2,6 +2,7 @@ from datetime import date, datetime, timedelta from time import strftime from galaxy import util +from galaxy.datatypes.checkers import * from galaxy.util.json import * from galaxy.tools.search import ToolBoxSearch from galaxy.model.orm import * @@ -11,6 +12,7 @@ from elementtree.ElementTree import Element, SubElement log = logging.getLogger( __name__ ) + def add_to_shed_tool_config( app, shed_tool_conf_dict, elem_list ): # A tool shed repository is being installed so change the shed_tool_conf file. Parse the config file to generate the entire list # of config_elems instead of using the in-memory list since it will be a subset of the entire list if one or more repositories have @@ -320,14 +322,15 @@ # Find all tool configs. if name != 'datatypes_conf.xml' and name.endswith( '.xml' ): full_path = os.path.abspath( os.path.join( root, name ) ) - try: - tool = toolbox.load_tool( full_path ) - except Exception, e: - tool = None - if tool is not None: - tool_config = os.path.join( root, name ) - guid = generate_tool_guid( repository_clone_url, tool ) - metadata_dict = generate_tool_metadata( tool_config, tool, repository_clone_url, metadata_dict ) + if not ( check_binary( full_path ) or check_image( full_path ) or check_gzip( full_path )[ 0 ] + or check_bz2( full_path )[ 0 ] or check_zip( full_path ) ): + try: + tool = toolbox.load_tool( full_path ) + except Exception, e: + tool = None + if tool is not None: + tool_config = os.path.join( root, name ) + metadata_dict = generate_tool_metadata( tool_config, tool, repository_clone_url, metadata_dict ) # Find all exported workflows elif name.endswith( '.ga' ): relative_path = os.path.join( root, name ) @@ -642,10 +645,14 @@ repository_tools_tups = [] if 'tools' in metadata_dict: for tool_dict in metadata_dict[ 'tools' ]: - relative_path = tool_dict[ 'tool_config' ] - guid = tool_dict[ 'guid' ] - tool = app.toolbox.load_tool( os.path.abspath( relative_path ), guid=guid ) - repository_tools_tups.append( ( relative_path, guid, tool ) ) + relative_path = tool_dict.get( 'tool_config', None ) + guid = tool_dict.get( 'guid', None ) + if relative_path and guid: + tool = app.toolbox.load_tool( os.path.abspath( relative_path ), guid=guid ) + else: + tool = None + if tool: + repository_tools_tups.append( ( relative_path, guid, tool ) ) return repository_tools_tups def get_tool_panel_config_tool_path_install_dir( app, repository ): # Return shed-related tool panel config, the tool_path configured in it, and the relative path to the directory where the @@ -745,6 +752,7 @@ missing_data_table_entry = True break if missing_data_table_entry: + sample_file = None # The repository must contain a tool_data_table_conf.xml.sample file that includes all required entries for all tools in the repository. for sample_file in sample_files: head, tail = os.path.split( sample_file ) @@ -753,10 +761,10 @@ error, correction_msg = handle_sample_tool_data_table_conf_file( app, sample_file ) if error: # TODO: Do more here than logging an exception. - log.debug( exception_msg ) + log.debug( correction_msg ) # Reload the tool into the local list of repository_tools_tups. repository_tool = app.toolbox.load_tool( os.path.join( tool_path, tup_path ), guid=guid ) - repository_tools_tups[ index ] = ( tup_path, repository_tool ) + repository_tools_tups[ index ] = ( tup_path, guid, repository_tool ) return repository_tools_tups def handle_missing_index_file( app, tool_path, sample_files, repository_tools_tups ): """Inspect each tool to see if it has any input parameters that are dynamically generated select lists that depend on a .loc file.""" @@ -781,6 +789,46 @@ repository_tool = app.toolbox.load_tool( os.path.join( tool_path, tup_path ), guid=guid ) repository_tools_tups[ index ] = ( tup_path, guid, repository_tool ) return repository_tools_tups +def handle_sample_tool_data_table_conf_file( app, filename ): + """ + Parse the incoming filename and add new entries to the in-memory + app.tool_data_tables dictionary as well as appending them to the + shed's tool_data_table_conf.xml file on disk. + """ + error = False + message = '' + try: + new_table_elems = app.tool_data_tables.add_new_entries_from_config_file( filename ) + except Exception, e: + message = str( e ) + error = True + if not error: + # Add an entry to the end of the tool_data_table_conf.xml file. + tdt_config = "%s/tool_data_table_conf.xml" % app.config.root + if os.path.exists( tdt_config ): + # Make a backup of the file since we're going to be changing it. + today = date.today() + backup_date = today.strftime( "%Y_%m_%d" ) + tdt_config_copy = '%s/tool_data_table_conf.xml_%s_backup' % ( app.config.root, backup_date ) + shutil.copy( os.path.abspath( tdt_config ), os.path.abspath( tdt_config_copy ) ) + # Write each line of the tool_data_table_conf.xml file, except the last line to a temp file. + fh = tempfile.NamedTemporaryFile( 'wb' ) + tmp_filename = fh.name + fh.close() + new_tdt_config = open( tmp_filename, 'wb' ) + for i, line in enumerate( open( tdt_config, 'rb' ) ): + if line.find( '</tables>' ) >= 0: + for new_table_elem in new_table_elems: + new_tdt_config.write( ' %s\n' % util.xml_to_string( new_table_elem ).rstrip( '\n' ) ) + new_tdt_config.write( '</tables>\n' ) + else: + new_tdt_config.write( line ) + new_tdt_config.close() + shutil.move( tmp_filename, os.path.abspath( tdt_config ) ) + else: + message = "The required file named tool_data_table_conf.xml does not exist in the Galaxy install directory." + error = True + return error, message def handle_tool_dependencies( current_working_dir, repo_files_dir, repository_tools_tups ): """ Inspect each tool to see if it includes a "requirement" that refers to a fabric diff -r 02f55e34ccfeef865d5d0811664a1f95635ec609 -r b199460443afeb3023f1c160f23b2215ba42add4 lib/galaxy/web/base/controller.py --- a/lib/galaxy/web/base/controller.py +++ b/lib/galaxy/web/base/controller.py @@ -2464,46 +2464,6 @@ id = trans.security.decode_id( id ) quota = trans.sa_session.query( trans.model.Quota ).get( id ) return quota -def handle_sample_tool_data_table_conf_file( app, filename ): - """ - Parse the incoming filename and add new entries to the in-memory - app.tool_data_tables dictionary as well as appending them to the - shed's tool_data_table_conf.xml file on disk. - """ - error = False - message = '' - try: - new_table_elems = app.tool_data_tables.add_new_entries_from_config_file( filename ) - except Exception, e: - message = str( e ) - error = True - if not error: - # Add an entry to the end of the tool_data_table_conf.xml file. - tdt_config = "%s/tool_data_table_conf.xml" % app.config.root - if os.path.exists( tdt_config ): - # Make a backup of the file since we're going to be changing it. - today = date.today() - backup_date = today.strftime( "%Y_%m_%d" ) - tdt_config_copy = '%s/tool_data_table_conf.xml_%s_backup' % ( app.config.root, backup_date ) - shutil.copy( os.path.abspath( tdt_config ), os.path.abspath( tdt_config_copy ) ) - # Write each line of the tool_data_table_conf.xml file, except the last line to a temp file. - fh = tempfile.NamedTemporaryFile( 'wb' ) - tmp_filename = fh.name - fh.close() - new_tdt_config = open( tmp_filename, 'wb' ) - for i, line in enumerate( open( tdt_config, 'rb' ) ): - if line.find( '</tables>' ) >= 0: - for new_table_elem in new_table_elems: - new_tdt_config.write( ' %s\n' % util.xml_to_string( new_table_elem ).rstrip( '\n' ) ) - new_tdt_config.write( '</tables>\n' ) - else: - new_tdt_config.write( line ) - new_tdt_config.close() - shutil.move( tmp_filename, os.path.abspath( tdt_config ) ) - else: - message = "The required file named tool_data_table_conf.xml does not exist in the Galaxy install directory." - error = True - return error, message def tool_shed_encode( val ): if isinstance( val, dict ): value = simplejson.dumps( val ) diff -r 02f55e34ccfeef865d5d0811664a1f95635ec609 -r b199460443afeb3023f1c160f23b2215ba42add4 lib/galaxy/webapps/community/controllers/common.py --- a/lib/galaxy/webapps/community/controllers/common.py +++ b/lib/galaxy/webapps/community/controllers/common.py @@ -1,10 +1,12 @@ import os, string, socket, logging, simplejson, binascii from time import strftime from datetime import * +from galaxy.datatypes.checkers import * from galaxy.tools import * from galaxy.util.json import from_json_string, to_json_string from galaxy.util.hash_util import * from galaxy.util.shed_util import copy_sample_loc_file, generate_datatypes_metadata, generate_tool_metadata, generate_workflow_metadata +from galaxy.util.shed_util import handle_sample_tool_data_table_conf_file from galaxy.web.base.controller import * from galaxy.webapps.community import model from galaxy.model.orm import * @@ -314,19 +316,21 @@ # Find all tool configs. if name != 'datatypes_conf.xml' and name.endswith( '.xml' ): full_path = os.path.abspath( os.path.join( root, name ) ) - try: - tool = load_tool( trans, full_path ) - valid = True - except Exception, e: - valid = False - invalid_files.append( ( name, str( e ) ) ) - if valid and tool is not None: - can_set_metadata, invalid_files = check_tool_input_params( trans, name, tool, sample_files, invalid_files ) - if can_set_metadata: - # Update the list of metadata dictionaries for tools in metadata_dict. - tool_config = os.path.join( root, name ) - repository_clone_url = generate_clone_url( trans, id ) - metadata_dict = generate_tool_metadata( tool_config, tool, repository_clone_url, metadata_dict ) + if not ( check_binary( full_path ) or check_image( full_path ) or check_gzip( full_path )[ 0 ] + or check_bz2( full_path )[ 0 ] or check_zip( full_path ) ): + try: + tool = load_tool( trans, full_path ) + valid = True + except Exception, e: + valid = False + invalid_files.append( ( name, str( e ) ) ) + if valid and tool is not None: + can_set_metadata, invalid_files = check_tool_input_params( trans, name, tool, sample_files, invalid_files ) + if can_set_metadata: + # Update the list of metadata dictionaries for tools in metadata_dict. + tool_config = os.path.join( root, name ) + repository_clone_url = generate_clone_url( trans, id ) + metadata_dict = generate_tool_metadata( tool_config, tool, repository_clone_url, metadata_dict ) # Find all exported workflows elif name.endswith( '.ga' ): try: @@ -381,22 +385,24 @@ fh = open( tmp_filename, 'w' ) fh.write( fctx.data() ) fh.close() - try: - tool = load_tool( trans, tmp_filename ) - valid = True - except Exception, e: - invalid_files.append( ( filename, str( e ) ) ) - valid = False - if valid and tool is not None: - # Update the list of metadata dictionaries for tools in metadata_dict. Note that filename - # here is the relative path to the config file within the change set context, something - # like filtering.xml, but when the change set was the repository tip, the value was - # something like database/community_files/000/repo_1/filtering.xml. This shouldn't break - # anything, but may result in a bit of confusion when maintaining the code / data over time. - # IMPORTANT NOTE: Here we are assuming that since the current change set is not the repository - # tip, we do not have to handle any .loc.sample files since they would have been handled previously. - repository_clone_url = generate_clone_url( trans, id ) - metadata_dict = generate_tool_metadata( filename, tool, repository_clone_url, metadata_dict ) + if not ( check_binary( tmp_filename ) or check_image( tmp_filename ) or check_gzip( tmp_filename )[ 0 ] + or check_bz2( tmp_filename )[ 0 ] or check_zip( tmp_filename ) ): + try: + tool = load_tool( trans, tmp_filename ) + valid = True + except Exception, e: + invalid_files.append( ( filename, str( e ) ) ) + valid = False + if valid and tool is not None: + # Update the list of metadata dictionaries for tools in metadata_dict. Note that filename + # here is the relative path to the config file within the change set context, something + # like filtering.xml, but when the change set was the repository tip, the value was + # something like database/community_files/000/repo_1/filtering.xml. This shouldn't break + # anything, but may result in a bit of confusion when maintaining the code / data over time. + # IMPORTANT NOTE: Here we are assuming that since the current change set is not the repository + # tip, we do not have to handle any .loc.sample files since they would have been handled previously. + repository_clone_url = generate_clone_url( trans, id ) + metadata_dict = generate_tool_metadata( filename, tool, repository_clone_url, metadata_dict ) try: os.unlink( tmp_filename ) except: diff -r 02f55e34ccfeef865d5d0811664a1f95635ec609 -r b199460443afeb3023f1c160f23b2215ba42add4 lib/galaxy/webapps/community/controllers/repository.py --- a/lib/galaxy/webapps/community/controllers/repository.py +++ b/lib/galaxy/webapps/community/controllers/repository.py @@ -856,13 +856,15 @@ fh = open( tmp_filename, 'w' ) fh.write( fctx.data() ) fh.close() - try: - tool = load_tool( trans, tmp_filename ) - valid = True - except: - valid = False - if valid and tool is not None: - tool_guids.append( generate_tool_guid( trans, repository, tool ) ) + if not ( check_binary( tmp_filename ) or check_image( tmp_filename ) or check_gzip( tmp_filename )[ 0 ] + or check_bz2( tmp_filename )[ 0 ] or check_zip( tmp_filename ) ): + try: + tool = load_tool( trans, tmp_filename ) + valid = True + except: + valid = False + if valid and tool is not None: + tool_guids.append( generate_tool_guid( trans, repository, tool ) ) try: os.unlink( tmp_filename ) except: diff -r 02f55e34ccfeef865d5d0811664a1f95635ec609 -r b199460443afeb3023f1c160f23b2215ba42add4 lib/galaxy/webapps/community/controllers/upload.py --- a/lib/galaxy/webapps/community/controllers/upload.py +++ b/lib/galaxy/webapps/community/controllers/upload.py @@ -3,6 +3,7 @@ from galaxy.model.orm import * from galaxy.datatypes.checkers import * from common import * +from galaxy.util.shed_util import handle_sample_tool_data_table_conf_file from mercurial import hg, ui, commands log = logging.getLogger( __name__ ) Repository URL: https://bitbucket.org/galaxy/galaxy-central/ -- This is a commit notification from bitbucket.org. You are receiving this because you have the service enabled, addressing the recipient of this email.
participants (1)
-
Bitbucket