1 new commit in galaxy-central: https://bitbucket.org/galaxy/galaxy-central/changeset/4527ed1eb175/ changeset: 4527ed1eb175 user: jgoecks date: 2012-04-06 16:29:08 summary: Track and reuse HDA subsets in Trackster. affected #: 4 files diff -r ee0459fb77f2e14cf89eb162eb63f0ca1e539419 -r 4527ed1eb1751a8699143236057c9df9318f48ea lib/galaxy/model/__init__.py --- a/lib/galaxy/model/__init__.py +++ b/lib/galaxy/model/__init__.py @@ -1186,6 +1186,12 @@ self.history_dataset_association = hda self.user = user self.site = site + +class HistoryDatasetAssociationSubset( object ): + def __init__(self, hda, subset, location): + self.hda = hda + self.subset = subset + self.location = location class Library( object, APIItem ): permitted_actions = get_permitted_actions( filter='LIBRARY' ) diff -r ee0459fb77f2e14cf89eb162eb63f0ca1e539419 -r 4527ed1eb1751a8699143236057c9df9318f48ea lib/galaxy/model/mapping.py --- a/lib/galaxy/model/mapping.py +++ b/lib/galaxy/model/mapping.py @@ -143,6 +143,12 @@ Column( "history_dataset_association_id", Integer, ForeignKey( "history_dataset_association.id" ), index=True ), Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ), Column( "site", TrimmedString( 255 ) ) ) + +HistoryDatasetAssociationSubset.table = Table( "history_dataset_association_subset", metadata, + Column( "id", Integer, primary_key=True ), + Column( "history_dataset_association_id", Integer, ForeignKey( "history_dataset_association.id" ), index=True ), + Column( "history_dataset_association_subset_id", Integer, ForeignKey( "history_dataset_association.id" ), index=True ), + Column( "location", Unicode(255), index=True) ) ImplicitlyConvertedDatasetAssociation.table = Table( "implicitly_converted_dataset_association", metadata, Column( "id", Integer, primary_key=True ), @@ -1232,6 +1238,13 @@ assign_mapper( context, HistoryDatasetAssociationDisplayAtAuthorization, HistoryDatasetAssociationDisplayAtAuthorization.table, properties=dict( history_dataset_association = relation( HistoryDatasetAssociation ), user = relation( User ) ) ) + +assign_mapper( context, HistoryDatasetAssociationSubset, HistoryDatasetAssociationSubset.table, + properties=dict( hda = relation( HistoryDatasetAssociation, + primaryjoin=( HistoryDatasetAssociationSubset.table.c.history_dataset_association_id == HistoryDatasetAssociation.table.c.id ) ), + subset = relation( HistoryDatasetAssociation, + primaryjoin=( HistoryDatasetAssociationSubset.table.c.history_dataset_association_subset_id == HistoryDatasetAssociation.table.c.id ) ) + ) ) assign_mapper( context, ImplicitlyConvertedDatasetAssociation, ImplicitlyConvertedDatasetAssociation.table, properties=dict( parent_hda=relation( diff -r ee0459fb77f2e14cf89eb162eb63f0ca1e539419 -r 4527ed1eb1751a8699143236057c9df9318f48ea lib/galaxy/model/migrate/versions/0095_hda_subsets.py --- /dev/null +++ b/lib/galaxy/model/migrate/versions/0095_hda_subsets.py @@ -0,0 +1,44 @@ +""" +Migration script to create table for tracking history_dataset_association subsets. +""" + +from sqlalchemy import * +from sqlalchemy.orm import * +from migrate import * +from migrate.changeset import * + +import logging +log = logging.getLogger( __name__ ) + +metadata = MetaData( migrate_engine ) +db_session = scoped_session( sessionmaker( bind=migrate_engine, autoflush=False, autocommit=True ) ) + +# Table to add. + +HistoryDatasetAssociationSubset_table = Table( "history_dataset_association_subset", metadata, + Column( "id", Integer, primary_key=True ), + Column( "history_dataset_association_id", Integer, ForeignKey( "history_dataset_association.id" ), index=True ), + Column( "history_dataset_association_subset_id", Integer, ForeignKey( "history_dataset_association.id" ), index=True ), + Column( "location", Unicode(255), index=True) +) + +def upgrade(): + print __doc__ + metadata.reflect() + + # Create history_dataset_association_subset. + try: + HistoryDatasetAssociationSubset_table.create() + except Exception, e: + print str(e) + log.debug( "Creating history_dataset_association_subset table failed: %s" % str( e ) ) + +def downgrade(): + metadata.reflect() + + # Drop history_dataset_association_subset table. + try: + HistoryDatasetAssociationSubset_table.drop() + except Exception, e: + print str(e) + log.debug( "Dropping history_dataset_association_subset table failed: %s" % str( e ) ) \ No newline at end of file diff -r ee0459fb77f2e14cf89eb162eb63f0ca1e539419 -r 4527ed1eb1751a8699143236057c9df9318f48ea lib/galaxy/web/controllers/tracks.py --- a/lib/galaxy/web/controllers/tracks.py +++ b/lib/galaxy/web/controllers/tracks.py @@ -932,6 +932,7 @@ # Set input datasets for tool. If running on region, extract and use subset # when possible. # + location = "%s:%i-%i" % ( chrom, low, high ) for jida in original_job.input_datasets: # If param set previously by config actions, do nothing. if jida.name in params_set: @@ -943,51 +944,67 @@ elif run_on_region and hasattr( input_dataset.datatype, 'get_track_type' ): # Dataset is indexed and hence a subset can be extracted and used # as input. - track_type, data_sources = input_dataset.datatype.get_track_type() - data_source = data_sources[ 'data' ] - converted_dataset = input_dataset.get_converted_dataset( trans, data_source ) - deps = input_dataset.get_converted_dataset_deps( trans, data_source ) + + # Look for subset. + subset_dataset_association = trans.sa_session.query( trans.app.model.HistoryDatasetAssociationSubset ) \ + .filter_by( hda=input_dataset, location=location ) \ + .first() + if subset_dataset_association: + # Data subset exists. + subset_dataset = subset_dataset_association.subset + else: + # Need to create subset. + track_type, data_sources = input_dataset.datatype.get_track_type() + data_source = data_sources[ 'data' ] + converted_dataset = input_dataset.get_converted_dataset( trans, data_source ) + deps = input_dataset.get_converted_dataset_deps( trans, data_source ) - # Create new HDA for input dataset's subset. - new_dataset = trans.app.model.HistoryDatasetAssociation( extension=input_dataset.ext, \ - dbkey=input_dataset.dbkey, \ - create_dataset=True, \ - sa_session=trans.sa_session, - name="Subset [%s:%i-%i] of data %i" % \ - ( chrom, low, high, input_dataset.hid ), - visible=False ) - target_history.add_dataset( new_dataset ) - trans.sa_session.add( new_dataset ) - trans.app.security_agent.set_all_dataset_permissions( new_dataset.dataset, hda_permissions ) + # Create new HDA for input dataset's subset. + new_dataset = trans.app.model.HistoryDatasetAssociation( extension=input_dataset.ext, \ + dbkey=input_dataset.dbkey, \ + create_dataset=True, \ + sa_session=trans.sa_session, + name="Subset [%s] of data %i" % \ + ( location, input_dataset.hid ), + visible=False ) + target_history.add_dataset( new_dataset ) + trans.sa_session.add( new_dataset ) + trans.app.security_agent.set_all_dataset_permissions( new_dataset.dataset, hda_permissions ) - # Write subset of data to new dataset - data_provider_class = get_data_provider( original_dataset=input_dataset ) - data_provider = data_provider_class( original_dataset=input_dataset, - converted_dataset=converted_dataset, - dependencies=deps ) - trans.app.object_store.create( new_dataset.dataset ) - data_provider.write_data_to_file( chrom, low, high, new_dataset.file_name ) + # Write subset of data to new dataset + data_provider_class = get_data_provider( original_dataset=input_dataset ) + data_provider = data_provider_class( original_dataset=input_dataset, + converted_dataset=converted_dataset, + dependencies=deps ) + trans.app.object_store.create( new_dataset.dataset ) + data_provider.write_data_to_file( chrom, low, high, new_dataset.file_name ) - # TODO: (a) size not working; (b) need to set peek. - new_dataset.set_size() - new_dataset.info = "Data subset for trackster" - new_dataset.set_dataset_state( trans.app.model.Dataset.states.OK ) + # TODO: (a) size not working; (b) need to set peek. + new_dataset.set_size() + new_dataset.info = "Data subset for trackster" + new_dataset.set_dataset_state( trans.app.model.Dataset.states.OK ) - # Set metadata. - # TODO: set meta internally if dataset is small enough? - if trans.app.config.set_metadata_externally: - trans.app.datatypes_registry.set_external_metadata_tool.tool_action.execute( trans.app.datatypes_registry.set_external_metadata_tool, - trans, incoming = { 'input1':new_dataset }, - overwrite=False, job_params={ "source" : "trackster" } ) - else: - message = 'Attributes updated' - new_dataset.set_meta() - new_dataset.datatype.after_setting_metadata( new_dataset ) + # Set metadata. + # TODO: set meta internally if dataset is small enough? + if trans.app.config.set_metadata_externally: + trans.app.datatypes_registry.set_external_metadata_tool.tool_action.execute( trans.app.datatypes_registry.set_external_metadata_tool, + trans, incoming = { 'input1':new_dataset }, + overwrite=False, job_params={ "source" : "trackster" } ) + else: + message = 'Attributes updated' + new_dataset.set_meta() + new_dataset.datatype.after_setting_metadata( new_dataset ) + + # Add HDA subset association. + subset_association = trans.app.model.HistoryDatasetAssociationSubset( hda=input_dataset, subset=new_dataset, location=location ) + trans.sa_session.add( subset_association ) + + subset_dataset = new_dataset trans.sa_session.flush() # Add dataset to tool's parameters. - if not set_param_value( tool_params, jida.name, new_dataset ): + if not set_param_value( tool_params, jida.name, subset_dataset ): return to_json_string( { "error" : True, "message" : "error setting parameter %s" % jida.name } ) # Repository URL: https://bitbucket.org/galaxy/galaxy-central/ -- This is a commit notification from bitbucket.org. You are receiving this because you have the service enabled, addressing the recipient of this email.