commit/galaxy-central: 5 new changesets
5 new commits in galaxy-central: https://bitbucket.org/galaxy/galaxy-central/changeset/1b7f0f272c1a/ changeset: 1b7f0f272c1a user: dannon date: 2012-11-07 16:51:50 summary: Security: Sanitize grid filters. affected #: 1 file diff -r 6971bbc8bd8418baea51345b891e1aa647d14a88 -r 1b7f0f272c1a3f6b8d55969fbf73122884d23070 lib/galaxy/web/framework/helpers/grids.py --- a/lib/galaxy/web/framework/helpers/grids.py +++ b/lib/galaxy/web/framework/helpers/grids.py @@ -2,6 +2,7 @@ from galaxy.web.base.controller import * from galaxy.web.framework.helpers import iff from galaxy.web import url_for +from galaxy.util import sanitize_text from galaxy.util.json import from_json_string, to_json_string from galaxy.util.odict import odict from galaxy.web.framework.helpers import to_unicode @@ -136,7 +137,8 @@ # Update query. query = column.filter( trans, trans.user, query, column_filter ) # Upate current filter dict. - cur_filter_dict[ column.key ] = column_filter + #Column filters are rendered in various places, sanitize them all here. + cur_filter_dict[ column.key ] = sanitize_text(column_filter) # Carry filter along to newly generated urls; make sure filter is a string so # that we can encode to UTF-8 and thus handle user input to filters. if isinstance( column_filter, list ): https://bitbucket.org/galaxy/galaxy-central/changeset/643931af6baf/ changeset: 643931af6baf user: dannon date: 2012-11-07 16:54:57 summary: Whitespace cleanup. affected #: 1 file diff -r 1b7f0f272c1a3f6b8d55969fbf73122884d23070 -r 643931af6baf5cfdd772dab1342f47149a23620c lib/galaxy/web/framework/helpers/grids.py --- a/lib/galaxy/web/framework/helpers/grids.py +++ b/lib/galaxy/web/framework/helpers/grids.py @@ -36,7 +36,7 @@ num_page_links = 10 # Set preference names. cur_filter_pref_name = ".filter" - cur_sort_key_pref_name = ".sort_key" + cur_sort_key_pref_name = ".sort_key" pass_through_operations = {} def __init__( self ): # Determine if any multiple row operations are defined @@ -45,13 +45,13 @@ if operation.allow_multiple: self.has_multiple_item_operations = True break - - # If a column does not have a model class, set the column's model class + + # If a column does not have a model class, set the column's model class # to be the grid's model class. for column in self.columns: if not column.model_class: column.model_class = self.model_class - + def __call__( self, trans, **kwargs ): # Get basics. # FIXME: pretty sure this is only here to pass along, can likely be eliminated @@ -88,7 +88,7 @@ cur_filter_dict = {} for column in self.columns: if column.key: - # Get the filter criterion for the column. Precedence is (a) if using default filter, only look there; otherwise, + # Get the filter criterion for the column. Precedence is (a) if using default filter, only look there; otherwise, # (b) look in kwargs; and (c) look in base filter. column_filter = None if use_default_filter: @@ -98,7 +98,7 @@ # Queries that include table joins cannot guarantee unique column names. This problem is # handled by setting the column_filter value to <TableName>.<ColumnName>. column_filter = kwargs.get( "f-" + column.model_class.__name__ + ".%s" % column.key ) - elif "f-" + column.key in kwargs: + elif "f-" + column.key in kwargs: column_filter = kwargs.get( "f-" + column.key ) elif column.key in base_filter: column_filter = base_filter.get( column.key ) @@ -108,7 +108,7 @@ if isinstance( item, basestring): try: # Not clear what we're decoding, so recurse to ensure that we catch everything. - decoded_item = from_json_string( item ) + decoded_item = from_json_string( item ) if isinstance( decoded_item, list): decoded_list = from_json_string_recurse( decoded_item ) else: @@ -146,7 +146,7 @@ for filter in column_filter: if not isinstance( filter, basestring ): filter = unicode( filter ).encode("utf-8") - extra_url_args[ "f-" + column.key ] = to_json_string( column_filter ) + extra_url_args[ "f-" + column.key ] = to_json_string( column_filter ) else: # Process singleton filter. if not isinstance( column_filter, basestring ): @@ -190,19 +190,19 @@ if self.use_paging: if 'page' in kwargs: if kwargs['page'] == 'all': - page_num = 0 + page_num = 0 else: page_num = int( kwargs['page'] ) else: page_num = 1 - + if page_num == 0: # Show all rows in page. total_num_rows = query.count() page_num = 1 num_pages = 1 else: - # Show a limited number of rows. Before modifying query, get the total number of rows that query + # Show a limited number of rows. Before modifying query, get the total number of rows that query # returns so that the total number of pages can be computed. total_num_rows = query.count() query = query.limit( self.num_rows_per_page ).offset( ( page_num-1 ) * self.num_rows_per_page ) @@ -211,11 +211,11 @@ # Defaults. page_num = 1 num_pages = 1 - - # There are some places in grid templates where it's useful for a grid + + # There are some places in grid templates where it's useful for a grid # to have its current filter. self.cur_filter_dict = cur_filter_dict - + # Preserve grid state: save current filter and sort key. if self.preserve_state: pref_name = unicode( self.__class__.__name__ + self.cur_filter_pref_name ) @@ -253,10 +253,10 @@ return url_for( **new_kwargs ) self.use_panels = ( kwargs.get( 'use_panels', False ) in [ True, 'True', 'true' ] ) async_request = ( ( self.use_async ) and ( kwargs.get( 'async', False ) in [ True, 'True', 'true'] ) ) - # Currently, filling the template returns a str object; this requires decoding the string into a - # unicode object within mako templates. What probably should be done is to return the template as + # Currently, filling the template returns a str object; this requires decoding the string into a + # unicode object within mako templates. What probably should be done is to return the template as # utf-8 unicode; however, this would require encoding the object as utf-8 before returning the grid - # results via a controller method, which is require substantial changes. Hence, for now, return grid + # results via a controller method, which is require substantial changes. Hence, for now, return grid # as str. return trans.fill_template( iff( async_request, self.async_template, self.template ), grid=self, @@ -303,7 +303,7 @@ # (gvk) Is this method necessary? Why not simply build the entire query, # including applying filters in the build_initial_query() method? return query - + class GridColumn( object ): def __init__( self, label, key=None, model_class=None, method=None, format=None, \ link=None, attach_popup=False, visible=True, ncells=1, nowrap=False, \ @@ -360,16 +360,16 @@ if column_name is None: column_name = self.key if ascending: - query = query.order_by( self.model_class.table.c.get( column_name ).asc() ) + query = query.order_by( self.model_class.table.c.get( column_name ).asc() ) else: query = query.order_by( self.model_class.table.c.get( column_name ).desc() ) return query - + class ReverseSortColumn( GridColumn ): """ Column that reverses sorting; this is useful when the natural sort is descending. """ def sort( self, trans, query, ascending, column_name=None ): return GridColumn.sort( self, trans, query, (not ascending), column_name=column_name ) - + class TextColumn( GridColumn ): """ Generic column that employs freetext and, hence, supports freetext, case-independent filtering. """ def filter( self, trans, user, query, column_filter ): @@ -380,7 +380,7 @@ query = query.filter( self.get_filter( trans, user, column_filter ) ) return query def get_filter( self, trans, user, column_filter ): - """ Returns a SQLAlchemy criterion derived from column_filter. """ + """ Returns a SQLAlchemy criterion derived from column_filter. """ if isinstance( column_filter, basestring ): return self.get_single_filter( user, column_filter ) elif isinstance( column_filter, list ): @@ -406,7 +406,7 @@ if column_name is None: column_name = self.key if ascending: - query = query.order_by( func.lower( self.model_class.table.c.get( column_name ) ).asc() ) + query = query.order_by( func.lower( self.model_class.table.c.get( column_name ) ).asc() ) else: query = query.order_by( func.lower( self.model_class.table.c.get( column_name ) ).desc() ) return query @@ -430,9 +430,9 @@ class IntegerColumn( TextColumn ): """ - Integer column that employs freetext, but checks that the text is an integer, + Integer column that employs freetext, but checks that the text is an integer, so support filtering on integer values. - + IMPORTANT NOTE: grids that use this column type should not include the column in the cols_to_filter list of MulticolFilterColumn ( i.e., searching on this column type should not be performed in the grid's standard search - it won't @@ -440,8 +440,8 @@ that search on this column should use 'filterable="advanced"' so that searching is only performed in the advanced search component, restricting the search to the specific column. - - This is useful for searching on object ids or other integer columns. See the + + This is useful for searching on object ids or other integer columns. See the JobIdColumn column in the SpecifiedDateListGrid class in the jobs controller of the reports webapp for an example. """ @@ -452,14 +452,14 @@ def sort( self, trans, query, ascending, column_name=None ): """Sort query using this column.""" return GridColumn.sort( self, trans, query, ascending, column_name=column_name ) - + class CommunityRatingColumn( GridColumn, UsesItemRatings ): """ Column that displays community ratings for an item. """ def get_value( self, trans, grid, item ): ave_item_rating, num_ratings = self.get_ave_item_rating_data( trans.sa_session, item, webapp_model=trans.model ) - return trans.fill_template( "community_rating.mako", - ave_item_rating=ave_item_rating, - num_ratings=num_ratings, + return trans.fill_template( "community_rating.mako", + ave_item_rating=ave_item_rating, + num_ratings=num_ratings, item_id=trans.security.encode_id( item.id ) ) def sort( self, trans, query, ascending, column_name=None ): def get_foreign_key( source_class, target_class ): @@ -511,12 +511,12 @@ return ann_snippet def get_single_filter( self, user, a_filter ): """ Filter by annotation and annotation owner. """ - return self.model_class.annotations.any( - and_( func.lower( self.model_annotation_association_class.annotation ).like( "%" + a_filter.lower() + "%" ), - # TODO: not sure why, to filter by owner's annotations, we have to do this rather than + return self.model_class.annotations.any( + and_( func.lower( self.model_annotation_association_class.annotation ).like( "%" + a_filter.lower() + "%" ), + # TODO: not sure why, to filter by owner's annotations, we have to do this rather than # 'self.model_class.user==self.model_annotation_association_class.user' - self.model_annotation_association_class.table.c.user_id==self.model_class.table.c.user_id ) ) - + self.model_annotation_association_class.table.c.user_id==self.model_class.table.c.user_id ) ) + class CommunityTagsColumn( TextColumn ): """ Column that supports community tags. """ def __init__( self, col_name, key, model_class=None, model_tag_association_class=None, filterable=None, grid_name=None ): @@ -549,7 +549,7 @@ # Filter by all values. clause_list.append( self.model_class.tags.any( func.lower( self.model_tag_association_class.user_value ).like( "%" + value.lower() + "%" ) ) ) return and_( *clause_list ) - + class IndividualTagsColumn( CommunityTagsColumn ): """ Column that supports individual tags. """ def get_value( self, trans, grid, item ): @@ -577,7 +577,7 @@ # Filter by individual's tag values. clause_list.append( self.model_class.tags.any( and_( func.lower( self.model_tag_association_class.user_value ).like( "%" + value.lower() + "%" ), self.model_tag_association_class.user == user ) ) ) return and_( *clause_list ) - + class MulticolFilterColumn( TextColumn ): """ Column that performs multicolumn filtering. """ def __init__( self, col_name, cols_to_filter, key, visible, filterable="default" ): @@ -601,7 +601,7 @@ clause_list.append( column.get_filter( trans, user, column_filter ) ) complete_filter = or_( *clause_list ) return query.filter( complete_filter ) - + class OwnerColumn( TextColumn ): """ Column that lists item's owner. """ def get_value( self, trans, grid, item ): @@ -609,7 +609,7 @@ def sort( self, trans, query, ascending, column_name=None ): """ Sort column using case-insensitive alphabetical sorting on item's username. """ if ascending: - query = query.order_by( func.lower ( self.model_class.username ).asc() ) + query = query.order_by( func.lower ( self.model_class.username ).asc() ) else: query = query.order_by( func.lower( self.model_class.username ).desc() ) return query @@ -751,17 +751,17 @@ return self.condition( item ) else: return True - + class DisplayByUsernameAndSlugGridOperation( GridOperation ): """ Operation to display an item by username and slug. """ def get_url_args( self, item ): return { 'action' : 'display_by_username_and_slug', 'username' : item.user.username, 'slug' : item.slug } - + class GridAction( object ): def __init__( self, label=None, url_args=None ): self.label = label self.url_args = url_args - + class GridColumnFilter( object ): def __init__( self, label, args=None ): self.label = label https://bitbucket.org/galaxy/galaxy-central/changeset/36ecf36af10d/ changeset: 36ecf36af10d user: dannon date: 2012-11-07 16:56:26 summary: Fix incorrect new_args vs new_kwargs. Remove dead code. affected #: 1 file diff -r 643931af6baf5cfdd772dab1342f47149a23620c -r 36ecf36af10d88e6cc35666352a69f35c9313439 lib/galaxy/web/framework/helpers/grids.py --- a/lib/galaxy/web/framework/helpers/grids.py +++ b/lib/galaxy/web/framework/helpers/grids.py @@ -116,7 +116,6 @@ except ValueError: decoded_list = [ unicode ( item ) ] elif isinstance( item, list): - return_val = [] for element in item: a_list = from_json_string_recurse( element ) decoded_list = decoded_list + a_list @@ -247,7 +246,7 @@ if 'id' in new_kwargs: id = new_kwargs[ 'id' ] if isinstance( id, list ): - new_args[ 'id' ] = [ trans.security.encode_id( i ) for i in id ] + new_kwargs[ 'id' ] = [ trans.security.encode_id( i ) for i in id ] else: new_kwargs[ 'id' ] = trans.security.encode_id( id ) return url_for( **new_kwargs ) https://bitbucket.org/galaxy/galaxy-central/changeset/4379576e8199/ changeset: 4379576e8199 user: dannon date: 2012-11-07 16:57:54 summary: Sphinx: Add doc/build to .hgignore. affected #: 1 file diff -r 36ecf36af10d88e6cc35666352a69f35c9313439 -r 4379576e8199c18b11ec448f090e8300bc0f372d .hgignore --- a/.hgignore +++ b/.hgignore @@ -41,6 +41,8 @@ static/welcome.html.* static/welcome.html +doc/build + # Tool data. tool-data/annotation_profiler_options.xml tool-data/annotation_profiler_valid_builds.txt https://bitbucket.org/galaxy/galaxy-central/changeset/0a20563f9d46/ changeset: 0a20563f9d46 user: dannon date: 2012-11-07 17:00:23 summary: Merge. affected #: 4 files diff -r 4379576e8199c18b11ec448f090e8300bc0f372d -r 0a20563f9d46749d041c079907492bf9d4866628 lib/galaxy/tool_shed/common_util.py --- /dev/null +++ b/lib/galaxy/tool_shed/common_util.py @@ -0,0 +1,93 @@ +import os, urllib2 +from galaxy import util +from galaxy.util.odict import odict +from galaxy.tool_shed.encoding_util import * + +REPOSITORY_OWNER = 'devteam' + +def check_for_missing_tools( app, tool_panel_configs, latest_tool_migration_script_number ): + # Get the 000x_tools.xml file associated with the current migrate_tools version number. + tools_xml_file_path = os.path.abspath( os.path.join( 'scripts', 'migrate_tools', '%04d_tools.xml' % latest_tool_migration_script_number ) ) + # Parse the XML and load the file attributes for later checking against the proprietary tool_panel_config. + migrated_tool_configs_dict = odict() + tree = util.parse_xml( tools_xml_file_path ) + root = tree.getroot() + tool_shed = root.get( 'name' ) + tool_shed_url = get_tool_shed_url_from_tools_xml_file_path( app, tool_shed ) + # The default behavior is that the tool shed is down. + tool_shed_accessible = False + if tool_shed_url: + for elem in root: + if elem.tag == 'repository': + tool_dependencies = [] + tool_dependencies_dict = {} + repository_name = elem.get( 'name' ) + changeset_revision = elem.get( 'changeset_revision' ) + url = '%s/repository/get_tool_dependencies?name=%s&owner=%s&changeset_revision=%s&from_install_manager=True' % \ + ( tool_shed_url, repository_name, REPOSITORY_OWNER, changeset_revision ) + try: + response = urllib2.urlopen( url ) + text = response.read() + response.close() + tool_shed_accessible = True + except Exception, e: + # Tool shed may be unavailable - we have to set tool_shed_accessible since we're looping. + tool_shed_accessible = False + print "The URL\n%s\nraised the exception:\n%s\n" % ( url, str( e ) ) + if tool_shed_accessible: + if text: + tool_dependencies_dict = tool_shed_decode( text ) + for dependency_key, requirements_dict in tool_dependencies_dict.items(): + tool_dependency_name = requirements_dict[ 'name' ] + tool_dependency_version = requirements_dict[ 'version' ] + tool_dependency_type = requirements_dict[ 'type' ] + tool_dependency_readme = requirements_dict.get( 'readme', '' ) + tool_dependencies.append( ( tool_dependency_name, tool_dependency_version, tool_dependency_type, tool_dependency_readme ) ) + for tool_elem in elem.findall( 'tool' ): + migrated_tool_configs_dict[ tool_elem.get( 'file' ) ] = tool_dependencies + if tool_shed_accessible: + # Parse the proprietary tool_panel_configs (the default is tool_conf.xml) and generate the list of missing tool config file names. + missing_tool_configs_dict = odict() + for tool_panel_config in tool_panel_configs: + tree = util.parse_xml( tool_panel_config ) + root = tree.getroot() + for elem in root: + if elem.tag == 'tool': + missing_tool_configs_dict = check_tool_tag_set( elem, migrated_tool_configs_dict, missing_tool_configs_dict ) + elif elem.tag == 'section': + for section_elem in elem: + if section_elem.tag == 'tool': + missing_tool_configs_dict = check_tool_tag_set( section_elem, migrated_tool_configs_dict, missing_tool_configs_dict ) + else: + exception_msg = '\n\nThe entry for the main Galaxy tool shed at %s is missing from the %s file. ' % ( tool_shed, app.config.tool_sheds_config ) + exception_msg += 'The entry for this tool shed must always be available in this file, so re-add it before attempting to start your Galaxy server.\n' + raise Exception( exception_msg ) + return tool_shed_accessible, missing_tool_configs_dict +def check_tool_tag_set( elem, migrated_tool_configs_dict, missing_tool_configs_dict ): + file_path = elem.get( 'file', None ) + if file_path: + path, name = os.path.split( file_path ) + if name in migrated_tool_configs_dict: + tool_dependencies = migrated_tool_configs_dict[ name ] + missing_tool_configs_dict[ name ] = tool_dependencies + return missing_tool_configs_dict +def get_non_shed_tool_panel_configs( app ): + # Get the non-shed related tool panel configs - there can be more than one, and the default is tool_conf.xml. + config_filenames = [] + for config_filename in app.config.tool_configs: + # Any config file that includes a tool_path attribute in the root tag set like the following is shed-related. + # <toolbox tool_path="../shed_tools"> + tree = util.parse_xml( config_filename ) + root = tree.getroot() + tool_path = root.get( 'tool_path', None ) + if tool_path is None: + config_filenames.append( config_filename ) + return config_filenames +def get_tool_shed_url_from_tools_xml_file_path( app, tool_shed ): + search_str = '://%s' % tool_shed + for shed_name, shed_url in app.tool_shed_registry.tool_sheds.items(): + if shed_url.find( search_str ) >= 0: + if shed_url.endswith( '/' ): + shed_url = shed_url.rstrip( '/' ) + return shed_url + return None diff -r 4379576e8199c18b11ec448f090e8300bc0f372d -r 0a20563f9d46749d041c079907492bf9d4866628 lib/galaxy/tool_shed/install_manager.py --- a/lib/galaxy/tool_shed/install_manager.py +++ b/lib/galaxy/tool_shed/install_manager.py @@ -7,8 +7,7 @@ from galaxy.util.json import from_json_string, to_json_string from galaxy.util.shed_util import * from galaxy.util.odict import odict - -REPOSITORY_OWNER = 'devteam' +from galaxy.tool_shed.common_util import * class InstallManager( object ): def __init__( self, app, latest_migration_script_number, tool_shed_install_config, migrated_tools_config, install_dependencies ): @@ -19,10 +18,11 @@ self.app = app self.toolbox = self.app.toolbox self.migrated_tools_config = migrated_tools_config - # If install_dependencies is True, but tool_dependency_dir is not set, - # do not attempt to install, but print informative error message + # If install_dependencies is True but tool_dependency_dir is not set, do not attempt to install but print informative error message. if install_dependencies and app.config.tool_dependency_dir is None: - raise Exception( 'You are attempting to install tool dependencies, but do not have a value for "tool_dependency_dir" set in your ini file. Please set this to the path where you would like to install dependencies and rerun the migration script.' ) + message = 'You are attempting to install tool dependencies but do not have a value for "tool_dependency_dir" set in your universe_wsgi.ini ' + message += 'file. Set this location value to the path where you want tool dependencies installed and rerun the migration script.' + raise Exception( message ) # Get the local non-shed related tool panel configs (there can be more than one, and the default name is tool_conf.xml). self.proprietary_tool_confs = self.non_shed_tool_panel_configs self.proprietary_tool_panel_elems = self.get_proprietary_tool_panel_elems( latest_migration_script_number ) @@ -38,8 +38,39 @@ self.tool_shed = clean_tool_shed_url( root.get( 'name' ) ) self.repository_owner = REPOSITORY_OWNER index, self.shed_config_dict = get_shed_tool_conf_dict( app, self.migrated_tools_config ) - for repository_elem in root: - self.install_repository( repository_elem, install_dependencies ) + # Since tool migration scripts can be executed any number of times, we need to make sure the appropriate tools are defined in + # tool_conf.xml. If no tools associated with the migration stage are defined, no repositories will be installed on disk. + # The default behavior is that the tool shed is down. + tool_shed_accessible = False + tool_panel_configs = get_non_shed_tool_panel_configs( app ) + if tool_panel_configs: + # The missing_tool_configs_dict contents are something like: + # {'emboss_antigenic.xml': [('emboss', '5.0.0', 'package', '\nreadme blah blah blah\n')]} + tool_shed_accessible, missing_tool_configs_dict = check_for_missing_tools( app, tool_panel_configs, latest_migration_script_number ) + else: + # It doesn't matter if the tool shed is accessible since there are no migrated tools defined in the local Galaxy instance, but + # we have to set the value of tool_shed_accessible to True so that the value of migrate_tools.version can be correctly set in + # the database. + tool_shed_accessible = True + missing_tool_configs_dict = odict() + if tool_shed_accessible: + if len( self.proprietary_tool_confs ) == 1: + plural = '' + file_names = self.proprietary_tool_confs[ 0 ] + else: + plural = 's' + file_names = ', '.join( self.proprietary_tool_confs ) + if missing_tool_configs_dict: + for repository_elem in root: + self.install_repository( repository_elem, install_dependencies ) + else: + message = "\nNo tools associated with migration stage %s are defined in your " % str( latest_migration_script_number ) + message += "file%s named %s,\nso no repositories will be installed on disk.\n" % ( plural, file_names ) + print message + else: + message = "\nThe main Galaxy tool shed is not currently available, so skipped migration stage %s.\n" % str( latest_migration_script_number ) + message += "Try again later.\n" + print message def get_guid( self, repository_clone_url, relative_install_dir, tool_config ): if self.shed_config_dict.get( 'tool_path' ): relative_install_dir = os.path.join( self.shed_config_dict['tool_path'], relative_install_dir ) @@ -144,7 +175,8 @@ for k, v in tool_panel_dict_for_tool_config.items(): tool_panel_dict_for_display[ k ] = v else: - print 'The tool "%s" (%s) has not been enabled because it is not defined in a proprietary tool config (%s).' % ( guid, tool_config, ", ".join( self.proprietary_tool_confs or [] ) ) + print 'The tool "%s" (%s) has not been enabled because it is not defined in a proprietary tool config (%s).' \ + % ( guid, tool_config, ", ".join( self.proprietary_tool_confs or [] ) ) metadata_dict, invalid_file_tups = generate_metadata_for_changeset_revision( app=self.app, repository=tool_shed_repository, repository_clone_url=repository_clone_url, @@ -315,20 +347,9 @@ update_tool_shed_repository_status( self.app, tool_shed_repository, self.app.model.ToolShedRepository.installation_status.INSTALLED ) @property def non_shed_tool_panel_configs( self ): - # Get the non-shed related tool panel config file names from the Galaxy config - the default is tool_conf.xml. - config_filenames = [] - for config_filename in self.app.config.tool_configs: - # Any config file that includes a tool_path attribute in the root tag set like the following is shed-related. - # <toolbox tool_path="../shed_tools"> - tree = util.parse_xml( config_filename ) - root = tree.getroot() - tool_path = root.get( 'tool_path', None ) - if tool_path is None: - config_filenames.append( config_filename ) - return config_filenames + return get_non_shed_tool_panel_configs( self.app ) def __get_url_from_tool_shed( self, tool_shed ): - # The value of tool_shed is something like: toolshed.g2.bx.psu.edu - # We need the URL to this tool shed, which is something like: + # The value of tool_shed is something like: toolshed.g2.bx.psu.edu. We need the URL to this tool shed, which is something like: # http://toolshed.g2.bx.psu.edu/ for shed_name, shed_url in self.app.tool_shed_registry.tool_sheds.items(): if shed_url.find( tool_shed ) >= 0: diff -r 4379576e8199c18b11ec448f090e8300bc0f372d -r 0a20563f9d46749d041c079907492bf9d4866628 lib/galaxy/tool_shed/migrate/common.py --- a/lib/galaxy/tool_shed/migrate/common.py +++ b/lib/galaxy/tool_shed/migrate/common.py @@ -1,103 +1,13 @@ -import sys, os, ConfigParser, urllib2 +import sys, os, ConfigParser import galaxy.config import galaxy.datatypes.registry -from galaxy import util, tools +from galaxy import tools import galaxy.model.mapping import galaxy.tools.search from galaxy.objectstore import build_object_store_from_config +from galaxy.tool_shed.common_util import * import galaxy.tool_shed.tool_shed_registry from galaxy.tool_shed import install_manager -from galaxy.tool_shed.encoding_util import * -from galaxy.util.odict import odict - -REPOSITORY_OWNER = 'devteam' - -def check_for_missing_tools( app, tool_panel_configs, latest_tool_migration_script_number ): - # Get the 000x_tools.xml file associated with the current migrate_tools version number. - tools_xml_file_path = os.path.abspath( os.path.join( 'scripts', 'migrate_tools', '%04d_tools.xml' % latest_tool_migration_script_number ) ) - # Parse the XML and load the file attributes for later checking against the proprietary tool_panel_config. - migrated_tool_configs_dict = odict() - tree = util.parse_xml( tools_xml_file_path ) - root = tree.getroot() - tool_shed = root.get( 'name' ) - tool_shed_url = get_tool_shed_url_from_tools_xml_file_path( app, tool_shed ) - # The default behavior is that the tool shed is down. - tool_shed_accessible = False - if tool_shed_url: - for elem in root: - if elem.tag == 'repository': - tool_dependencies = [] - tool_dependencies_dict = {} - repository_name = elem.get( 'name' ) - changeset_revision = elem.get( 'changeset_revision' ) - url = '%s/repository/get_tool_dependencies?name=%s&owner=%s&changeset_revision=%s&from_install_manager=True' % \ - ( tool_shed_url, repository_name, REPOSITORY_OWNER, changeset_revision ) - try: - response = urllib2.urlopen( url ) - text = response.read() - response.close() - tool_shed_accessible = True - except Exception, e: - # Tool shed may be unavailable - we have to set tool_shed_accessible since we're looping. - tool_shed_accessible = False - print "The URL\n%s\nraised the exception:\n%s\n" % ( url, str( e ) ) - if tool_shed_accessible: - if text: - tool_dependencies_dict = tool_shed_decode( text ) - for dependency_key, requirements_dict in tool_dependencies_dict.items(): - tool_dependency_name = requirements_dict[ 'name' ] - tool_dependency_version = requirements_dict[ 'version' ] - tool_dependency_type = requirements_dict[ 'type' ] - tool_dependency_readme = requirements_dict.get( 'readme', '' ) - tool_dependencies.append( ( tool_dependency_name, tool_dependency_version, tool_dependency_type, tool_dependency_readme ) ) - for tool_elem in elem.findall( 'tool' ): - migrated_tool_configs_dict[ tool_elem.get( 'file' ) ] = tool_dependencies - if tool_shed_accessible: - # Parse the proprietary tool_panel_configs (the default is tool_conf.xml) and generate the list of missing tool config file names. - missing_tool_configs_dict = odict() - for tool_panel_config in tool_panel_configs: - tree = util.parse_xml( tool_panel_config ) - root = tree.getroot() - for elem in root: - if elem.tag == 'tool': - missing_tool_configs_dict = check_tool_tag_set( elem, migrated_tool_configs_dict, missing_tool_configs_dict ) - elif elem.tag == 'section': - for section_elem in elem: - if section_elem.tag == 'tool': - missing_tool_configs_dict = check_tool_tag_set( section_elem, migrated_tool_configs_dict, missing_tool_configs_dict ) - else: - exception_msg = '\n\nThe entry for the main Galaxy tool shed at %s is missing from the %s file. ' % ( tool_shed, app.config.tool_sheds_config ) - exception_msg += 'The entry for this tool shed must always be available in this file, so re-add it before attempting to start your Galaxy server.\n' - raise Exception( exception_msg ) - return tool_shed_accessible, missing_tool_configs_dict -def check_tool_tag_set( elem, migrated_tool_configs_dict, missing_tool_configs_dict ): - file_path = elem.get( 'file', None ) - if file_path: - path, name = os.path.split( file_path ) - if name in migrated_tool_configs_dict: - tool_dependencies = migrated_tool_configs_dict[ name ] - missing_tool_configs_dict[ name ] = tool_dependencies - return missing_tool_configs_dict -def get_non_shed_tool_panel_configs( app ): - # Get the non-shed related tool panel configs - there can be more than one, and the default is tool_conf.xml. - config_filenames = [] - for config_filename in app.config.tool_configs: - # Any config file that includes a tool_path attribute in the root tag set like the following is shed-related. - # <toolbox tool_path="../shed_tools"> - tree = util.parse_xml( config_filename ) - root = tree.getroot() - tool_path = root.get( 'tool_path', None ) - if tool_path is None: - config_filenames.append( config_filename ) - return config_filenames -def get_tool_shed_url_from_tools_xml_file_path( app, tool_shed ): - search_str = '://%s' % tool_shed - for shed_name, shed_url in app.tool_shed_registry.tool_sheds.items(): - if shed_url.find( search_str ) >= 0: - if shed_url.endswith( '/' ): - shed_url = shed_url.rstrip( '/' ) - return shed_url - return None class MigrateToolsApplication( object ): """Encapsulates the state of a basic Galaxy Universe application in order to initiate the Install Manager""" diff -r 4379576e8199c18b11ec448f090e8300bc0f372d -r 0a20563f9d46749d041c079907492bf9d4866628 scripts/migrate_tools/migrate_tools.py --- a/scripts/migrate_tools/migrate_tools.py +++ b/scripts/migrate_tools/migrate_tools.py @@ -26,8 +26,8 @@ else: plural = 's' file_names = ', '.join( non_shed_tool_confs ) -msg = "\nThe installation process is finished. You should now remove entries for the installed tools from your file%s named\n" % plural -msg += "%s and start your Galaxy server.\n" % file_names +msg = "\nThe installation process is finished. If any tools associated with this migration were defined in your file%s named\n" % plural +msg += "%s, then you should remove entries for them and start your Galaxy server.\n" % file_names print msg app.shutdown() sys.exit( 0 ) Repository URL: https://bitbucket.org/galaxy/galaxy-central/ -- This is a commit notification from bitbucket.org. You are receiving this because you have the service enabled, addressing the recipient of this email.
participants (1)
-
Bitbucket