1 new commit in galaxy-central: https://bitbucket.org/galaxy/galaxy-central/commits/3faf01e5111e/ Changeset: 3faf01e5111e User: dan Date: 2014-05-19 21:08:26 Summary: First pass at allowing dbkeys / genome builds to be loaded from Tool Data Tables. Affected #: 12 files diff -r 6e4db406d614c7826b80a825be351df674994409 -r 3faf01e5111ecf454f06b22546b530131b0550be lib/galaxy/app.py --- a/lib/galaxy/app.py +++ b/lib/galaxy/app.py @@ -57,13 +57,17 @@ self.tag_handler = GalaxyTagHandler() # Dataset Collection Plugins self.dataset_collections_service = dataset_collections.DatasetCollectionsService(self) + + # Tool Data Tables + self._configure_tool_data_tables( from_shed_config=False ) + # Load dbkey / genome build manager + self._configure_genome_builds( data_table_name="__dbkeys__", load_old_style=True ) + # Genomes self.genomes = Genomes( self ) # Data providers registry. self.data_provider_registry = DataProviderRegistry() - self._configure_tool_data_tables( from_shed_config=False ) - # Initialize job metrics manager, needs to be in place before # config so per-destination modifications can be made. self.job_metrics = job_metrics.JobMetrics( self.config.job_metrics_config_file, app=self ) diff -r 6e4db406d614c7826b80a825be351df674994409 -r 3faf01e5111ecf454f06b22546b530131b0550be lib/galaxy/config.py --- a/lib/galaxy/config.py +++ b/lib/galaxy/config.py @@ -16,6 +16,7 @@ from galaxy.util import string_as_bool from galaxy.util import listify from galaxy.util import parse_xml +from galaxy.util.dbkeys import GenomeBuilds from galaxy import eggs import pkg_resources @@ -554,6 +555,9 @@ """ Shared code for configuring Galaxy-like app objects. """ + def _configure_genome_builds( self, data_table_name="__dbkeys__", load_old_style=True ): + self.genome_builds = GenomeBuilds( self, data_table_name=data_table_name, load_old_style=load_old_style ) + def _configure_toolbox( self ): # Initialize the tools, making sure the list of tool configs includes the reserved migrated_tools_conf.xml file. tool_configs = self.config.tool_configs diff -r 6e4db406d614c7826b80a825be351df674994409 -r 3faf01e5111ecf454f06b22546b530131b0550be lib/galaxy/datatypes/metadata.py --- a/lib/galaxy/datatypes/metadata.py +++ b/lib/galaxy/datatypes/metadata.py @@ -372,7 +372,7 @@ context = context or {} other_values = other_values or {} try: - values = kwd['trans'].db_builds + values = kwd['trans'].app.genome_builds.get_genome_build_names( kwd['trans'] ) except KeyError: pass return super(DBKeyParameter, self).get_html_field( value, context, other_values, values, **kwd) @@ -381,7 +381,7 @@ context = context or {} other_values = other_values or {} try: - values = kwd['trans'].db_builds + values = kwd['trans'].app.genome_builds.get_genome_build_names( kwd['trans'] ) except KeyError: pass return super(DBKeyParameter, self).get_html( value, context, other_values, values, **kwd) diff -r 6e4db406d614c7826b80a825be351df674994409 -r 3faf01e5111ecf454f06b22546b530131b0550be lib/galaxy/tools/actions/__init__.py --- a/lib/galaxy/tools/actions/__init__.py +++ b/lib/galaxy/tools/actions/__init__.py @@ -193,36 +193,10 @@ # Collect chromInfo dataset and add as parameters to incoming db_datasets = {} - db_dataset = trans.db_dataset_for( input_dbkey ) + ( chrom_info, db_dataset ) = trans.app.genome_builds.get_chrom_info( input_dbkey, trans=trans ) if db_dataset: - db_datasets[ "chromInfo" ] = db_dataset - incoming[ "chromInfo" ] = db_dataset.file_name - else: - # -- Get chrom_info (len file) from either a custom or built-in build. -- - - chrom_info = None - if trans.user and ( 'dbkeys' in trans.user.preferences ) and ( input_dbkey in from_json_string( trans.user.preferences[ 'dbkeys' ] ) ): - # Custom build. - custom_build_dict = from_json_string( trans.user.preferences[ 'dbkeys' ] )[ input_dbkey ] - # HACK: the attempt to get chrom_info below will trigger the - # fasta-to-len converter if the dataset is not available or, - # which will in turn create a recursive loop when - # running the fasta-to-len tool. So, use a hack in the second - # condition below to avoid getting chrom_info when running the - # fasta-to-len converter. - if 'fasta' in custom_build_dict and tool.id != 'CONVERTER_fasta_to_len': - # Build is defined by fasta; get len file, which is obtained from converting fasta. - build_fasta_dataset = trans.sa_session.query( trans.app.model.HistoryDatasetAssociation ).get( custom_build_dict[ 'fasta' ] ) - chrom_info = build_fasta_dataset.get_converted_dataset( trans, 'len' ).file_name - elif 'len' in custom_build_dict: - # Build is defined by len file, so use it. - chrom_info = trans.sa_session.query( trans.app.model.HistoryDatasetAssociation ).get( custom_build_dict[ 'len' ] ).file_name - - if not chrom_info: - # Default to built-in build. - chrom_info = os.path.join( trans.app.config.len_file_path, "%s.len" % input_dbkey ) - incoming[ "chromInfo" ] = os.path.abspath( chrom_info ) - inp_data.update( db_datasets ) + inp_data.update( { "chromInfo": db_dataset } ) + incoming[ "chromInfo" ] = chrom_info # Determine output dataset permission/roles list existing_datasets = [ inp for inp in inp_data.values() if inp ] diff -r 6e4db406d614c7826b80a825be351df674994409 -r 3faf01e5111ecf454f06b22546b530131b0550be lib/galaxy/tools/actions/upload_common.py --- a/lib/galaxy/tools/actions/upload_common.py +++ b/lib/galaxy/tools/actions/upload_common.py @@ -157,7 +157,7 @@ folder = matches[0] else: new_folder = trans.app.model.LibraryFolder( name=name, description='Automatically created by upload tool' ) - new_folder.genome_build = util.dbnames.default_value + new_folder.genome_build = trans.app.genome_builds.default_value folder.add_folder( new_folder ) trans.sa_session.add( new_folder ) trans.sa_session.flush() diff -r 6e4db406d614c7826b80a825be351df674994409 -r 3faf01e5111ecf454f06b22546b530131b0550be lib/galaxy/tools/data/__init__.py --- a/lib/galaxy/tools/data/__init__.py +++ b/lib/galaxy/tools/data/__init__.py @@ -321,6 +321,19 @@ def get_fields( self ): return self.data + def get_named_fields_list( self ): + rval = [] + named_colums = self.get_column_name_list() + for fields in self.get_fields(): + field_dict = {} + for i, field in enumerate( fields ): + field_name = named_colums[i] + if field_name is None: + field_name = i #check that this is supposed to be 0 based. + field_dict[ field_name ] = field + rval.append( field_dict ) + return rval + def get_version_fields( self ): return ( self._loaded_content_version, self.data ) diff -r 6e4db406d614c7826b80a825be351df674994409 -r 3faf01e5111ecf454f06b22546b530131b0550be lib/galaxy/tools/parameters/basic.py --- a/lib/galaxy/tools/parameters/basic.py +++ b/lib/galaxy/tools/parameters/basic.py @@ -1033,17 +1033,18 @@ """ def __init__( self, *args, **kwds ): super( GenomeBuildParameter, self ).__init__( *args, **kwds ) - self.static_options = [ ( value, key, False ) for key, value in util.dbnames ] + if self.tool: + self.static_options = [ ( value, key, False ) for key, value in self._get_dbkey_names()] def get_options( self, trans, other_values ): last_used_build = object() if trans.history: last_used_build = trans.history.genome_build - for dbkey, build_name in trans.db_builds: + for dbkey, build_name in self._get_dbkey_names( trans=trans ): yield build_name, dbkey, ( dbkey == last_used_build ) def get_legal_values( self, trans, other_values ): - return set( dbkey for dbkey, _ in trans.db_builds ) + return set( dbkey for dbkey, _ in self._get_dbkey_names( trans=trans ) ) def to_dict( self, trans, view='collection', value_mapper=None ): # skip SelectToolParameter (the immediate parent) bc we need to get options in a different way here @@ -1062,6 +1063,12 @@ 'value': value }) return d + + def _get_dbkey_names( self, trans=None ): + if not self.tool: + # Hack for unit tests, since we have no tool + return util.dbnames + return self.tool.app.genome_builds.get_genome_build_names( trans=trans ) class ColumnListParameter( SelectToolParameter ): diff -r 6e4db406d614c7826b80a825be351df674994409 -r 3faf01e5111ecf454f06b22546b530131b0550be lib/galaxy/util/dbkeys.py --- /dev/null +++ b/lib/galaxy/util/dbkeys.py @@ -0,0 +1,82 @@ +""" +Functionality for dealing with dbkeys. +""" +#dbkeys read from disk using builds.txt +from galaxy.util import dbnames, galaxy_directory +from galaxy.util.json import from_json_string +from galaxy.util.odict import odict +import os.path + + +class GenomeBuilds( object ): + default_value = "?" + default_name = "unspecified (?)" + + def __init__( self, app, data_table_name="__dbkeys__", load_old_style=True ): + self._app = app + self._data_table_name = data_table_name + self._static_chrom_info_path = app.config.len_file_path + self._static_dbkeys = odict() #need odict to keep ? at top of list + if load_old_style: + for key, value in dbnames: + self._static_dbkeys[ key ] = value + + def get_genome_build_names( self, trans=None ): + #FIXME: how to deal with key duplicates? + #Load old builds.txt static keys + rval = ( self._static_dbkeys.items() ) + #load dbkeys from dbkey data table + dbkey_table = self._app.tool_data_tables.get( self._data_table_name, None ) + if dbkey_table is not None: + for field_dict in dbkey_table.get_named_fields_list(): + rval.append( ( field_dict[ 'value' ], field_dict[ 'name' ] ) ) + #load user custom genome builds + if trans is not None: + if trans.history: + datasets = trans.sa_session.query( self._app.model.HistoryDatasetAssociation ) \ + .filter_by( deleted=False, history_id=trans.history.id, extension="len" ) + for dataset in datasets: + rval.append( (dataset.dbkey, dataset.name) ) + user = trans.get_user() + if user and 'dbkeys' in user.preferences: + user_keys = from_json_string( user.preferences['dbkeys'] ) + for key, chrom_dict in user_keys.iteritems(): + rval.append( ( key, "%s (%s) [Custom]" % ( chrom_dict['name'], key ) ) ) + return rval + + def get_chrom_info( self, dbkey, trans=None ): + chrom_info = None + db_dataset = None + # Collect chromInfo from custom builds + if trans: + db_dataset = trans.db_dataset_for( dbkey ) + if db_dataset: + #incoming[ "chromInfo" ] = db_dataset.file_name + chrom_info = db_dataset.file_name + else: + # -- Get chrom_info (len file) from either a custom or built-in build. -- + if trans.user and ( 'dbkeys' in trans.user.preferences ) and ( input_dbkey in from_json_string( trans.user.preferences[ 'dbkeys' ] ) ): + # Custom build. + custom_build_dict = from_json_string( trans.user.preferences[ 'dbkeys' ] )[ input_dbkey ] + # HACK: the attempt to get chrom_info below will trigger the + # fasta-to-len converter if the dataset is not available or, + # which will in turn create a recursive loop when + # running the fasta-to-len tool. So, use a hack in the second + # condition below to avoid getting chrom_info when running the + # fasta-to-len converter. + if 'fasta' in custom_build_dict and tool.id != 'CONVERTER_fasta_to_len': + # Build is defined by fasta; get len file, which is obtained from converting fasta. + build_fasta_dataset = trans.sa_session.query( trans.app.model.HistoryDatasetAssociation ).get( custom_build_dict[ 'fasta' ] ) + chrom_info = build_fasta_dataset.get_converted_dataset( trans, 'len' ).file_name + elif 'len' in custom_build_dict: + # Build is defined by len file, so use it. + chrom_info = trans.sa_session.query( trans.app.model.HistoryDatasetAssociation ).get( custom_build_dict[ 'len' ] ).file_name + if not chrom_info: + dbkey_table = self._app.tool_data_tables.get( self._data_table_name, None ) + if dbkey_table is not None: + chrom_info = dbkey_table.get_entry( 'value', dbkey, 'len_path', default=None ) + if not chrom_info: + # Default to built-in build. + chrom_info = os.path.join( self._static_chrom_info_path, "%s.len" % dbkey ) + chrom_info = os.path.abspath( chrom_info ) + return ( chrom_info, db_dataset ) diff -r 6e4db406d614c7826b80a825be351df674994409 -r 3faf01e5111ecf454f06b22546b530131b0550be lib/galaxy/visualization/genomes.py --- a/lib/galaxy/visualization/genomes.py +++ b/lib/galaxy/visualization/genomes.py @@ -178,9 +178,9 @@ """ def __init__( self, app ): - # Create list of genomes from util.dbnames + # Create list of genomes from app.genome_builds self.genomes = {} - for key, description in util.dbnames: + for key, description in app.genome_builds.get_genome_build_names(): self.genomes[ key ] = Genome( key, description ) # Add len files to genomes. diff -r 6e4db406d614c7826b80a825be351df674994409 -r 3faf01e5111ecf454f06b22546b530131b0550be lib/galaxy/web/framework/__init__.py --- a/lib/galaxy/web/framework/__init__.py +++ b/lib/galaxy/web/framework/__init__.py @@ -1075,7 +1075,7 @@ if self.galaxy_session.user: history.user = self.galaxy_session.user # Track genome_build with history - history.genome_build = util.dbnames.default_value + history.genome_build = self.app.genome_builds.default_value # Set the user's default history permissions self.app.security_agent.history_set_default_permissions( history ) # Save @@ -1238,19 +1238,8 @@ Returns the builds defined by galaxy and the builds defined by the user (chromInfo in history). """ - dbnames = list() - if self.history: - datasets = self.sa_session.query( self.app.model.HistoryDatasetAssociation ) \ - .filter_by( deleted=False, history_id=self.history.id, extension="len" ) - for dataset in datasets: - dbnames.append( (dataset.dbkey, dataset.name) ) - user = self.get_user() - if user and 'dbkeys' in user.preferences: - user_keys = from_json_string( user.preferences['dbkeys'] ) - for key, chrom_dict in user_keys.iteritems(): - dbnames.append((key, "%s (%s) [Custom]" % (chrom_dict['name'], key) )) - dbnames.extend( util.dbnames ) - return dbnames + #FIXME: This method should be removed + return self.app.genome_builds.get_genome_build_names( trans=self ) @property def ucsc_builds( self ): diff -r 6e4db406d614c7826b80a825be351df674994409 -r 3faf01e5111ecf454f06b22546b530131b0550be lib/galaxy/webapps/galaxy/controllers/library_common.py --- a/lib/galaxy/webapps/galaxy/controllers/library_common.py +++ b/lib/galaxy/webapps/galaxy/controllers/library_common.py @@ -292,9 +292,9 @@ new_folder = trans.app.model.LibraryFolder( name=util.restore_text( params.name ), description=util.restore_text( params.description ) ) # We are associating the last used genome build with folders, so we will always - # initialize a new folder with the first dbkey in util.dbnames which is currently + # initialize a new folder with the first dbkey in genome builds list which is currently # ? unspecified (?) - new_folder.genome_build = util.dbnames.default_value + new_folder.genome_build = trans.app.genome_builds.default_value parent_folder.add_folder( new_folder ) trans.sa_session.add( new_folder ) trans.sa_session.flush() @@ -1433,7 +1433,7 @@ file_formats = trans.app.datatypes_registry.upload_file_formats # Send list of genome builds to the form so the "dbkey" select list can be populated dynamically def get_dbkey_options( last_used_build ): - for dbkey, build_name in util.dbnames: + for dbkey, build_name in trans.app.genome_builds.get_genome_build_names( trans=trans ): yield build_name, dbkey, ( dbkey==last_used_build ) dbkeys = get_dbkey_options( last_used_build ) # Send the current history to the form to enable importing datasets from history to library diff -r 6e4db406d614c7826b80a825be351df674994409 -r 3faf01e5111ecf454f06b22546b530131b0550be test/unit/tools_support.py --- a/test/unit/tools_support.py +++ b/test/unit/tools_support.py @@ -14,6 +14,7 @@ from galaxy.model import mapping from galaxy.tools import Tool from galaxy.util import parse_xml +from galaxy.util.dbkeys import GenomeBuilds from galaxy.jobs import NoopQueue @@ -96,6 +97,7 @@ tool_data_path=os.path.join(test_directory, "tools"), root=os.path.join(test_directory, "galaxy"), admin_users="mary@example.com", + len_file_path=os.path.join( 'tool-data', 'shared', 'ucsc', 'chrom' ), ) # Setup some attributes for downstream extension by specific tests. @@ -115,6 +117,7 @@ self.model[ module_member_name ] = module_member else: self.model = in_memomry_model + self.genome_builds = GenomeBuilds( self ) self.toolbox = None self.object_store = None self.security = SecurityHelper(id_secret="testing") Repository URL: https://bitbucket.org/galaxy/galaxy-central/ -- This is a commit notification from bitbucket.org. You are receiving this because you have the service enabled, addressing the recipient of this email.