1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/a749bcb13792/ changeset: a749bcb13792 user: jgoecks date: 2012-12-14 16:31:19 summary: Decouple set metadata tool from a web transaction and require only an app. affected #: 1 file
diff -r 2d7a64c143d1136fdfa6ed2203c22527b04b5ff1 -r a749bcb137928ede156899ca5ef4d2eef6e9f45c lib/galaxy/tools/actions/metadata.py --- a/lib/galaxy/tools/actions/metadata.py +++ b/lib/galaxy/tools/actions/metadata.py @@ -8,29 +8,50 @@
class SetMetadataToolAction( ToolAction ): """Tool action used for setting external metadata on an existing dataset""" + + def execute( self, tool, trans, incoming={}, set_output_hid=False, overwrite=True, history=None, job_params=None ): + """ + Execute using a web transaction. + """ + user_id = None + if trans.user: + user_id = trans.user.id + job, odict = self.execute_via_app( tool, trans.app, trans.get_galaxy_session().id, + trans.history.id, user_id, incoming, set_output_hid, + overwrite, history, job_params ) + # FIXME: can remove this when logging in execute_via_app method. + trans.log_event( "Added set external metadata job to the job queue, id: %s" % str(job.id), tool_id=job.tool_id ) + return job, odict
- def execute( self, tool, trans, incoming = {}, set_output_hid = False, overwrite = True, history=None, job_params=None ): + def execute_via_app( self, tool, app, session_id, history_id, user_id = None, + incoming = {}, set_output_hid = False, overwrite = True, + history=None, job_params=None ): + """ + Execute using application. + """ for name, value in incoming.iteritems(): - if isinstance( value, trans.app.model.HistoryDatasetAssociation ): + if isinstance( value, app.model.HistoryDatasetAssociation ): dataset = value dataset_name = name type = 'hda' break - elif isinstance( value, trans.app.model.LibraryDatasetDatasetAssociation ): + elif isinstance( value, app.model.LibraryDatasetDatasetAssociation ): dataset = value dataset_name = name type = 'ldda' break else: raise Exception( 'The dataset to set metadata on could not be determined.' ) + + sa_session = app.model.context
# Create the job object - job = trans.app.model.Job() - job.session_id = trans.get_galaxy_session().id - job.history_id = trans.history.id + job = app.model.Job() + job.session_id = session_id + job.history_id = history_id job.tool_id = tool.id - if trans.user: - job.user_id = trans.user.id + if user_id: + job.user_id = user_id if job_params: job.params = to_json_string( job_params ) start_job_state = job.state #should be job.states.NEW @@ -40,26 +61,26 @@ except: job.tool_version = "1.0.1" job.state = job.states.WAITING #we need to set job state to something other than NEW, or else when tracking jobs in db it will be picked up before we have added input / output parameters - trans.sa_session.add( job ) - trans.sa_session.flush() #ensure job.id is available + sa_session.add( job ) + sa_session.flush() #ensure job.id is available
#add parameters to job_parameter table # Store original dataset state, so we can restore it. A separate table might be better (no chance of 'losing' the original state)? incoming[ '__ORIGINAL_DATASET_STATE__' ] = dataset.state external_metadata_wrapper = JobExternalOutputMetadataWrapper( job ) cmd_line = external_metadata_wrapper.setup_external_metadata( dataset, - trans.sa_session, + sa_session, exec_dir = None, - tmp_dir = trans.app.config.new_file_path, - dataset_files_path = trans.app.model.Dataset.file_path, + tmp_dir = app.config.new_file_path, + dataset_files_path = app.model.Dataset.file_path, output_fnames = None, - config_root = trans.app.config.root, - config_file = trans.app.config.config_file, - datatypes_config = trans.app.datatypes_registry.integrated_datatypes_configs, + config_root = app.config.root, + config_file = app.config.config_file, + datatypes_config = app.datatypes_registry.integrated_datatypes_configs, job_metadata = None, kwds = { 'overwrite' : overwrite } ) incoming[ '__SET_EXTERNAL_METADATA_COMMAND_LINE__' ] = cmd_line - for name, value in tool.params_to_strings( incoming, trans.app ).iteritems(): + for name, value in tool.params_to_strings( incoming, app ).iteritems(): job.add_parameter( name, value ) #add the dataset to job_to_input_dataset table if type == 'hda': @@ -70,11 +91,12 @@ # i.e. if state was set to 'running' the set metadata job would never run, as it would wait for input (the dataset to set metadata on) to be in a ready state dataset._state = dataset.states.SETTING_METADATA job.state = start_job_state #job inputs have been configured, restore initial job state - trans.sa_session.flush() + sa_session.flush()
# Queue the job for execution - trans.app.job_queue.put( job.id, tool ) - trans.log_event( "Added set external metadata job to the job queue, id: %s" % str(job.id), tool_id=job.tool_id ) + app.job_queue.put( job.id, tool ) + # FIXME: need to add event logging to app and log events there rather than trans. + #trans.log_event( "Added set external metadata job to the job queue, id: %s" % str(job.id), tool_id=job.tool_id )
#clear e.g. converted files dataset.datatype.before_setting_metadata( dataset )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving this because you have the service enabled, addressing the recipient of this email.
galaxy-commits@lists.galaxyproject.org