5 new commits in galaxy-central: https://bitbucket.org/galaxy/galaxy-central/commits/bb5771403508/ Changeset: bb5771403508 User: dannon Date: 2013-05-05 17:54:28 Summary: Workflow controller: remove unused variables. Affected #: 1 file diff -r 02822f28dc93348264a18f9f30abfedb53b80728 -r bb5771403508fbd75829960f089e6f35267716cf lib/galaxy/webapps/galaxy/controllers/workflow.py --- a/lib/galaxy/webapps/galaxy/controllers/workflow.py +++ b/lib/galaxy/webapps/galaxy/controllers/workflow.py @@ -134,7 +134,7 @@ @web.require_login( "use Galaxy workflows" ) def list_grid( self, trans, **kwargs ): """ List user's stored workflows. """ - status = message = None + # status = message = None if 'operation' in kwargs: operation = kwargs['operation'].lower() if operation == "rename": @@ -462,7 +462,7 @@ return trans.show_error_message( "The specified workflow does not exist." ) # Rate workflow. - stored_rating = self.rate_item( trans.sa_session, trans.get_user(), stored, rating ) + self.rate_item( trans.sa_session, trans.get_user(), stored, rating ) return self.get_ave_item_rating_data( trans.sa_session, stored ) @@ -507,8 +507,6 @@ @web.require_login( "use Galaxy workflows" ) def gen_image( self, trans, id ): stored = self.get_stored_workflow( trans, id, check_ownership=True ) - session = trans.sa_session - trans.response.set_content_type("image/svg+xml") return self._workflow_to_svg_canvas( trans, stored ).standalone_xml() @@ -942,7 +940,6 @@ """ # Load encoded workflow from database - user = trans.get_user() id = trans.security.decode_id( id ) trans.workflow_building_mode = True stored = trans.sa_session.query( model.StoredWorkflow ).get( id ) @@ -1034,6 +1031,7 @@ trans.response.headers["Content-Disposition"] = 'attachment; filename="Galaxy-Workflow-%s.ga"' % ( sname ) trans.response.set_content_type( 'application/galaxy-archive' ) return stored_dict + @web.expose def import_workflow( self, trans, cntrller='workflow', **kwd ): """ @@ -1097,7 +1095,7 @@ else: uploaded_file = file_data.file uploaded_file_name = uploaded_file.name - uploaded_file_filename = file_data.filename + # uploaded_file_filename = file_data.filename if os.path.getsize( os.path.abspath( uploaded_file_name ) ) > 0: # We're reading the file as text so we can re-use the existing code below. # This may not be ideal... @@ -1198,6 +1196,7 @@ status=status, use_panels=True, myexperiment_target_url=myexperiment_target_url ) + @web.json def get_datatypes( self, trans ): ext_to_class_name = dict() @@ -1335,7 +1334,8 @@ # Build the state for each step errors = {} has_upgrade_messages = False - has_errors = False + # has_errors is never used + # has_errors = False saved_history = None if history_id is not None: saved_history = trans.get_history(); @@ -1498,7 +1498,8 @@ step.state = step.module.state # Error dict if step.tool_errors: - has_errors = True + # has_errors is never used. + # has_errors = True errors[step.id] = step.tool_errors else: ## Non-tool specific stuff? @@ -1553,7 +1554,8 @@ # Build the state for each step errors = {} has_upgrade_messages = False - has_errors = False + # has_errors is never used + # has_errors = False if kwargs: # If kwargs were provided, the states for each step should have # been POSTed @@ -1591,7 +1593,8 @@ step.state = step.module.state # Error dict if step.tool_errors: - has_errors = True + # has_errors is never used + # has_errors = True errors[step.id] = step.tool_errors else: ## Non-tool specific stuff? @@ -1825,7 +1828,7 @@ # Unpack and add post-job actions. post_job_actions = step_dict.get( 'post_job_actions', {} ) for name, pja_dict in post_job_actions.items(): - pja = model.PostJobAction( pja_dict[ 'action_type' ], + model.PostJobAction( pja_dict[ 'action_type' ], step, pja_dict[ 'output_name' ], pja_dict[ 'action_arguments' ] ) # Second pass to deal with connections between steps @@ -2064,7 +2067,6 @@ but track the associations. """ associations = [] - names_to_clean = [] # dbkey is pushed in by the framework if 'dbkey' in values: del values['dbkey'] https://bitbucket.org/galaxy/galaxy-central/commits/e3b8df9f9ecd/ Changeset: e3b8df9f9ecd User: dannon Date: 2013-05-05 18:12:19 Summary: Workflow controller - trim trailing whitespace, add spacing between classes/methods. Affected #: 1 file diff -r bb5771403508fbd75829960f089e6f35267716cf -r e3b8df9f9ecd003a9b2c0441987ce1b79b6179a4 lib/galaxy/webapps/galaxy/controllers/workflow.py --- a/lib/galaxy/webapps/galaxy/controllers/workflow.py +++ b/lib/galaxy/webapps/galaxy/controllers/workflow.py @@ -37,8 +37,8 @@ from galaxy.workflow.modules import module_factory, ToolModule +class StoredWorkflowListGrid( grids.Grid ): -class StoredWorkflowListGrid( grids.Grid ): class StepsColumn( grids.GridColumn ): def get_value(self, trans, grid, workflow): return len( workflow.latest_workflow.steps ) @@ -73,6 +73,7 @@ def apply_query_filter( self, trans, query, **kwargs ): return query.filter_by( user=trans.user, deleted=False ) + class StoredWorkflowAllPublishedGrid( grids.Grid ): title = "Published Workflows" model_class = model.StoredWorkflow @@ -94,13 +95,16 @@ key="free-text-search", visible=False, filterable="standard" ) ) operations = [] + def build_initial_query( self, trans, **kwargs ): # Join so that searching stored_workflow.user makes sense. return trans.sa_session.query( self.model_class ).join( model.User.table ) + def apply_query_filter( self, trans, query, **kwargs ): # A public workflow is published, has a slug, and is not deleted. return query.filter( self.model_class.published==True ).filter( self.model_class.slug != None ).filter( self.model_class.deleted == False ) + # Simple SGML parser to get all content in a single tag. class SingleTagContentsParser( sgmllib.SGMLParser ): @@ -119,6 +123,7 @@ if self.cur_tag == self.target_tag: self.tag_content += text + class WorkflowController( BaseUIController, SharableMixin, UsesStoredWorkflowMixin, UsesAnnotations, UsesItemRatings ): stored_list_grid = StoredWorkflowListGrid() published_list_grid = StoredWorkflowAllPublishedGrid() @@ -847,7 +852,7 @@ for input_name, conns in step.temp_input_connections.iteritems(): if conns: conn_dicts = conns if isinstance(conns,list) else [conns] - for conn_dict in conn_dicts: + for conn_dict in conn_dicts: conn = model.WorkflowStepConnection() conn.input_step = step conn.input_name = input_name @@ -883,6 +888,7 @@ """ stored = self.get_stored_workflow( trans, id, check_ownership=False, check_accessible=True ) return trans.fill_template( "/workflow/export.mako", item=stored, use_panels=True ) + @web.expose @web.require_login( "use workflows" ) def import_from_myexp( self, trans, myexp_id, **kwd ): @@ -932,6 +938,7 @@ return trans.show_warn_message( "Imported, but this workflow contains cycles. %s" % workflow_list_str ) else: return trans.show_message( "Workflow '%s' imported. %s" % (workflow.name, workflow_list_str) ) + @web.expose @web.require_login( "use workflows" ) def export_to_myexp( self, trans, id, myexp_username, myexp_password ): @@ -1767,7 +1774,7 @@ # tools. This should be removed at some point. Mirrored # hack in _workflow_from_dict should never be removed so # existing workflow exports continue to function. - for input_name, input_conn in dict(input_conn_dict).iteritems(): + for input_name, input_conn in dict(input_conn_dict).iteritems(): if len(input_conn) == 1: input_conn_dict[input_name] = input_conn[0] step_dict['input_connections'] = input_conn_dict @@ -1776,6 +1783,7 @@ # Add to return value data['steps'][step.order_index] = step_dict return data + def _workflow_from_dict( self, trans, data, source=None, add_to_menu=False ): """ Creates a workflow from a dict. Created workflow is stored in the database and returned. @@ -1869,16 +1877,13 @@ return stored, missing_tool_tups def _workflow_to_svg_canvas( self, trans, stored ): - workflow = stored.latest_workflow data = [] - canvas = svgfig.canvas(style="stroke:black; fill:none; stroke-width:1px; stroke-linejoin:round; text-anchor:left") text = svgfig.SVG("g") connectors = svgfig.SVG("g") boxes = svgfig.SVG("g") svgfig.Text.defaults["font-size"] = "10px" - in_pos = {} out_pos = {} margin = 5 @@ -2021,6 +2026,7 @@ except CycleError: return None + class FakeJob( object ): """ Fake job object for datasets that have no creating_job_associations, https://bitbucket.org/galaxy/galaxy-central/commits/16fbd8c387a0/ Changeset: 16fbd8c387a0 User: dannon Date: 2013-05-05 19:57:21 Summary: Add docstring comment for workflow controller 'imp' Affected #: 1 file diff -r e3b8df9f9ecd003a9b2c0441987ce1b79b6179a4 -r 16fbd8c387a0cb7eb7607b6d37c6ea26fa9c6d50 lib/galaxy/webapps/galaxy/controllers/workflow.py --- a/lib/galaxy/webapps/galaxy/controllers/workflow.py +++ b/lib/galaxy/webapps/galaxy/controllers/workflow.py @@ -356,6 +356,7 @@ @web.expose @web.require_login( "to import a workflow", use_panels=True ) def imp( self, trans, id, **kwargs ): + """Imports a workflow shared by other users.""" # Set referer message. referer = trans.request.referer if referer is not "": https://bitbucket.org/galaxy/galaxy-central/commits/c01b7cf7257a/ Changeset: c01b7cf7257a User: dannon Date: 2013-05-06 18:50:06 Summary: Metadata setting: Always set externally. Affected #: 16 files diff -r 16fbd8c387a0cb7eb7607b6d37c6ea26fa9c6d50 -r c01b7cf7257af79616730a73886711b6ecf0310a lib/galaxy/config.py --- a/lib/galaxy/config.py +++ b/lib/galaxy/config.py @@ -86,7 +86,6 @@ self.galaxy_data_manager_data_path = kwargs.get( 'galaxy_data_manager_data_path', self.tool_data_path ) self.tool_secret = kwargs.get( "tool_secret", "" ) self.id_secret = kwargs.get( "id_secret", "USING THE DEFAULT IS NOT SECURE!" ) - self.set_metadata_externally = string_as_bool( kwargs.get( "set_metadata_externally", "False" ) ) self.retry_metadata_internally = string_as_bool( kwargs.get( "retry_metadata_internally", "True" ) ) self.use_remote_user = string_as_bool( kwargs.get( "use_remote_user", "False" ) ) self.remote_user_maildomain = kwargs.get( "remote_user_maildomain", None ) diff -r 16fbd8c387a0cb7eb7607b6d37c6ea26fa9c6d50 -r c01b7cf7257af79616730a73886711b6ecf0310a lib/galaxy/datatypes/metadata.py --- a/lib/galaxy/datatypes/metadata.py +++ b/lib/galaxy/datatypes/metadata.py @@ -28,8 +28,6 @@ STATEMENTS = "__galaxy_statements__" #this is the name of the property in a Datatype class where new metadata spec element Statements are stored -DATABASE_CONNECTION_AVAILABLE = False #When False, certain metadata parameter types (see FileParameter) will behave differently - class Statement( object ): """ This class inserts its target into a list in the surrounding @@ -442,19 +440,9 @@ return None if isinstance( value, galaxy.model.MetadataFile ) or isinstance( value, MetadataTempFile ): return value - if DATABASE_CONNECTION_AVAILABLE: - try: - # FIXME: this query requires a monkey patch in assignmapper.py since - # MetadataParameters do not have a handle to the sqlalchemy session - # DBTODO this is problematic now. - return galaxy.model.MetadataFile.get( value ) - except: - #value was not a valid id - return None - else: - mf = galaxy.model.MetadataFile() - mf.id = value #we assume this is a valid id, since we cannot check it - return mf + mf = galaxy.model.MetadataFile() + mf.id = value #we assume this is a valid id, since we cannot check it + return mf def make_copy( self, value, target_context, source_context ): value = self.wrap( value ) @@ -499,13 +487,13 @@ return value def new_file( self, dataset = None, **kwds ): - if DATABASE_CONNECTION_AVAILABLE: + if object_session( dataset ): mf = galaxy.model.MetadataFile( name = self.spec.name, dataset = dataset, **kwds ) object_session( dataset ).add( mf ) object_session( dataset ).flush() #flush to assign id return mf else: - #we need to make a tmp file that is accessable to the head node, + #we need to make a tmp file that is accessable to the head node, #we will be copying its contents into the MetadataFile objects filename after restoring from JSON #we do not include 'dataset' in the kwds passed, as from_JSON_value() will handle this for us return MetadataTempFile( **kwds ) diff -r 16fbd8c387a0cb7eb7607b6d37c6ea26fa9c6d50 -r c01b7cf7257af79616730a73886711b6ecf0310a lib/galaxy/datatypes/registry.py --- a/lib/galaxy/datatypes/registry.py +++ b/lib/galaxy/datatypes/registry.py @@ -397,7 +397,7 @@ except KeyError: builder = data.Text() return builder - def change_datatype(self, data, ext, set_meta = True ): + def change_datatype(self, data, ext): data.extension = ext # call init_meta and copy metadata from itself. The datatype # being converted *to* will handle any metadata copying and @@ -405,10 +405,6 @@ if data.has_data(): data.set_size() data.init_meta( copy_from=data ) - if set_meta: - #metadata is being set internally - data.set_meta( overwrite = False ) - data.set_peek() return data def old_change_datatype(self, data, ext): """Creates and returns a new datatype based on an existing data and an extension""" diff -r 16fbd8c387a0cb7eb7607b6d37c6ea26fa9c6d50 -r c01b7cf7257af79616730a73886711b6ecf0310a lib/galaxy/jobs/__init__.py --- a/lib/galaxy/jobs/__init__.py +++ b/lib/galaxy/jobs/__init__.py @@ -953,9 +953,7 @@ #either use the metadata from originating output dataset, or call set_meta on the copies #it would be quicker to just copy the metadata from the originating output dataset, #but somewhat trickier (need to recurse up the copied_from tree), for now we'll call set_meta() - if not self.app.config.set_metadata_externally or \ - ( not self.external_output_metadata.external_metadata_set_successfully( dataset, self.sa_session ) \ - and self.app.config.retry_metadata_internally ): + if ( not self.external_output_metadata.external_metadata_set_successfully( dataset, self.sa_session ) and self.app.config.retry_metadata_internally ): dataset.datatype.set_meta( dataset, overwrite = False ) #call datatype.set_meta directly for the initial set_meta call during dataset creation elif not self.external_output_metadata.external_metadata_set_successfully( dataset, self.sa_session ) and job.states.ERROR != final_job_state: dataset._state = model.Dataset.states.FAILED_METADATA @@ -1226,8 +1224,7 @@ try: for fname in self.extra_filenames: os.remove( fname ) - if self.app.config.set_metadata_externally: - self.external_output_metadata.cleanup_external_metadata( self.sa_session ) + self.external_output_metadata.cleanup_external_metadata( self.sa_session ) galaxy.tools.imp_exp.JobExportHistoryArchiveWrapper( self.job_id ).cleanup_after_job( self.sa_session ) galaxy.tools.imp_exp.JobImportHistoryArchiveWrapper( self.app, self.job_id ).cleanup_after_job() galaxy.tools.genome_index.GenomeIndexToolWrapper( self.job_id ).postprocessing( self.sa_session, self.app ) diff -r 16fbd8c387a0cb7eb7607b6d37c6ea26fa9c6d50 -r c01b7cf7257af79616730a73886711b6ecf0310a lib/galaxy/jobs/deferred/data_transfer.py --- a/lib/galaxy/jobs/deferred/data_transfer.py +++ b/lib/galaxy/jobs/deferred/data_transfer.py @@ -136,15 +136,11 @@ if name not in [ 'name', 'info', 'dbkey', 'base_name' ]: if spec.get( 'default' ): setattr( ldda.metadata, name, spec.unwrap( spec.get( 'default' ) ) ) - if self.app.config.set_metadata_externally: - self.app.datatypes_registry.set_external_metadata_tool.tool_action.execute( self.app.datatypes_registry.set_external_metadata_tool, - FakeTrans( self.app, - history=sample.history, - user=sample.request.user ), - incoming = { 'input1':ldda } ) - else: - ldda.set_meta() - ldda.datatype.after_setting_metadata( ldda ) + self.app.datatypes_registry.set_external_metadata_tool.tool_action.execute( self.app.datatypes_registry.set_external_metadata_tool, + FakeTrans( self.app, + history=sample.history, + user=sample.request.user ), + incoming = { 'input1':ldda } ) ldda.state = ldda.states.OK # TODO: not sure if this flush is necessary self.sa_session.add( ldda ) diff -r 16fbd8c387a0cb7eb7607b6d37c6ea26fa9c6d50 -r c01b7cf7257af79616730a73886711b6ecf0310a lib/galaxy/jobs/runners/__init__.py --- a/lib/galaxy/jobs/runners/__init__.py +++ b/lib/galaxy/jobs/runners/__init__.py @@ -174,7 +174,7 @@ # Append metadata setting commands, we don't want to overwrite metadata # that was copied over in init_meta(), as per established behavior - if include_metadata and self.app.config.set_metadata_externally: + if include_metadata: commands += "; cd %s; " % os.path.abspath( os.getcwd() ) commands += job_wrapper.setup_external_metadata( exec_dir = os.path.abspath( os.getcwd() ), diff -r 16fbd8c387a0cb7eb7607b6d37c6ea26fa9c6d50 -r c01b7cf7257af79616730a73886711b6ecf0310a lib/galaxy/jobs/runners/local.py --- a/lib/galaxy/jobs/runners/local.py +++ b/lib/galaxy/jobs/runners/local.py @@ -83,14 +83,14 @@ stdout_file.close() stderr_file.close() log.debug('execution finished: %s' % command_line) - except Exception, exc: + except Exception: job_wrapper.fail( "failure running job", exception=True ) log.exception("failure running job %d" % job_wrapper.job_id) return #run the metadata setting script here #this is terminate-able when output dataset/job is deleted #so that long running set_meta()s can be canceled without having to reboot the server - if job_wrapper.get_state() not in [ model.Job.states.ERROR, model.Job.states.DELETED ] and self.app.config.set_metadata_externally and job_wrapper.output_paths: + if job_wrapper.get_state() not in [ model.Job.states.ERROR, model.Job.states.DELETED ] and job_wrapper.output_paths: external_metadata_script = job_wrapper.setup_external_metadata( output_fnames = job_wrapper.get_output_fnames(), set_extension = True, tmp_dir = job_wrapper.working_directory, diff -r 16fbd8c387a0cb7eb7607b6d37c6ea26fa9c6d50 -r c01b7cf7257af79616730a73886711b6ecf0310a lib/galaxy/jobs/runners/lwr.py --- a/lib/galaxy/jobs/runners/lwr.py +++ b/lib/galaxy/jobs/runners/lwr.py @@ -143,7 +143,7 @@ #run the metadata setting script here #this is terminate-able when output dataset/job is deleted #so that long running set_meta()s can be canceled without having to reboot the server - if job_wrapper.get_state() not in [ model.Job.states.ERROR, model.Job.states.DELETED ] and self.app.config.set_metadata_externally and job_wrapper.output_paths: + if job_wrapper.get_state() not in [ model.Job.states.ERROR, model.Job.states.DELETED ] and job_wrapper.output_paths: external_metadata_script = job_wrapper.setup_external_metadata( output_fnames = job_wrapper.get_output_fnames(), set_extension = True, kwds = { 'overwrite' : False } ) #we don't want to overwrite metadata that was copied over in init_meta(), as per established behavior diff -r 16fbd8c387a0cb7eb7607b6d37c6ea26fa9c6d50 -r c01b7cf7257af79616730a73886711b6ecf0310a lib/galaxy/jobs/runners/tasks.py --- a/lib/galaxy/jobs/runners/tasks.py +++ b/lib/galaxy/jobs/runners/tasks.py @@ -123,7 +123,7 @@ #run the metadata setting script here #this is terminate-able when output dataset/job is deleted #so that long running set_meta()s can be canceled without having to reboot the server - if job_wrapper.get_state() not in [ model.Job.states.ERROR, model.Job.states.DELETED ] and self.app.config.set_metadata_externally and job_wrapper.output_paths: + if job_wrapper.get_state() not in [ model.Job.states.ERROR, model.Job.states.DELETED ] and job_wrapper.output_paths: external_metadata_script = job_wrapper.setup_external_metadata( output_fnames = job_wrapper.get_output_fnames(), set_extension = True, kwds = { 'overwrite' : False } ) #we don't want to overwrite metadata that was copied over in init_meta(), as per established behavior diff -r 16fbd8c387a0cb7eb7607b6d37c6ea26fa9c6d50 -r c01b7cf7257af79616730a73886711b6ecf0310a lib/galaxy/tools/imp_exp/__init__.py --- a/lib/galaxy/tools/imp_exp/__init__.py +++ b/lib/galaxy/tools/imp_exp/__init__.py @@ -189,15 +189,10 @@ # Although metadata is set above, need to set metadata to recover BAI for BAMs. if hda.extension == 'bam': - if self.app.config.set_metadata_externally: - self.app.datatypes_registry.set_external_metadata_tool.tool_action.execute_via_app( - self.app.datatypes_registry.set_external_metadata_tool, self.app, jiha.job.session_id, - new_history.id, jiha.job.user, incoming={ 'input1': hda }, overwrite=False - ) - else: - message = 'Attributes updated' - hda.set_meta() - hda.datatype.after_setting_metadata( hda ) + self.app.datatypes_registry.set_external_metadata_tool.tool_action.execute_via_app( + self.app.datatypes_registry.set_external_metadata_tool, self.app, jiha.job.session_id, + new_history.id, jiha.job.user, incoming={ 'input1': hda }, overwrite=False + ) # # Create jobs. diff -r 16fbd8c387a0cb7eb7607b6d37c6ea26fa9c6d50 -r c01b7cf7257af79616730a73886711b6ecf0310a lib/galaxy/webapps/galaxy/api/tools.py --- a/lib/galaxy/webapps/galaxy/api/tools.py +++ b/lib/galaxy/webapps/galaxy/api/tools.py @@ -365,15 +365,9 @@ # Set metadata. # TODO: set meta internally if dataset is small enough? - if trans.app.config.set_metadata_externally: - trans.app.datatypes_registry.set_external_metadata_tool.tool_action.execute( trans.app.datatypes_registry.set_external_metadata_tool, - trans, incoming = { 'input1':new_dataset }, - overwrite=False, job_params={ "source" : "trackster" } ) - else: - message = 'Attributes updated' - new_dataset.set_meta() - new_dataset.datatype.after_setting_metadata( new_dataset ) - + trans.app.datatypes_registry.set_external_metadata_tool.tool_action.execute( trans.app.datatypes_registry.set_external_metadata_tool, + trans, incoming = { 'input1':new_dataset }, + overwrite=False, job_params={ "source" : "trackster" } ) # Add HDA subset association. subset_association = trans.app.model.HistoryDatasetAssociationSubset( hda=input_dataset, subset=new_dataset, location=regions_str ) trans.sa_session.add( subset_association ) diff -r 16fbd8c387a0cb7eb7607b6d37c6ea26fa9c6d50 -r c01b7cf7257af79616730a73886711b6ecf0310a lib/galaxy/webapps/galaxy/controllers/dataset.py --- a/lib/galaxy/webapps/galaxy/controllers/dataset.py +++ b/lib/galaxy/webapps/galaxy/controllers/dataset.py @@ -379,10 +379,9 @@ message = "This dataset is currently being used as input or output. You cannot change datatype until the jobs have completed or you have canceled them." error = True else: - trans.app.datatypes_registry.change_datatype( data, params.datatype, set_meta = not trans.app.config.set_metadata_externally ) + trans.app.datatypes_registry.change_datatype( data, params.datatype ) trans.sa_session.flush() - if trans.app.config.set_metadata_externally: - trans.app.datatypes_registry.set_external_metadata_tool.tool_action.execute( trans.app.datatypes_registry.set_external_metadata_tool, trans, incoming = { 'input1':data }, overwrite = False ) #overwrite is False as per existing behavior + trans.app.datatypes_registry.set_external_metadata_tool.tool_action.execute( trans.app.datatypes_registry.set_external_metadata_tool, trans, incoming = { 'input1':data }, overwrite = False ) #overwrite is False as per existing behavior message = "Changed the type of dataset '%s' to %s" % ( to_unicode( data.name ), params.datatype ) refresh_frames=['history'] else: @@ -436,13 +435,8 @@ if name not in [ 'name', 'info', 'dbkey', 'base_name' ]: if spec.get( 'default' ): setattr( data.metadata, name, spec.unwrap( spec.get( 'default' ) ) ) - if trans.app.config.set_metadata_externally: - message = 'Attributes have been queued to be updated' - trans.app.datatypes_registry.set_external_metadata_tool.tool_action.execute( trans.app.datatypes_registry.set_external_metadata_tool, trans, incoming = { 'input1':data } ) - else: - message = 'Attributes updated' - data.set_meta() - data.datatype.after_setting_metadata( data ) + message = 'Attributes have been queued to be updated' + trans.app.datatypes_registry.set_external_metadata_tool.tool_action.execute( trans.app.datatypes_registry.set_external_metadata_tool, trans, incoming = { 'input1':data } ) trans.sa_session.flush() refresh_frames=['history'] elif params.convert_data: diff -r 16fbd8c387a0cb7eb7607b6d37c6ea26fa9c6d50 -r c01b7cf7257af79616730a73886711b6ecf0310a lib/galaxy/webapps/galaxy/controllers/user.py --- a/lib/galaxy/webapps/galaxy/controllers/user.py +++ b/lib/galaxy/webapps/galaxy/controllers/user.py @@ -1339,9 +1339,6 @@ continue # Get chrom count file. - # NOTE: this conversion doesn't work well with set_metadata_externally=False - # because the conversion occurs before metadata can be set; the - # dataset is marked as deleted and a subsequent conversion is run. chrom_count_dataset = len_dataset.get_converted_dataset( trans, "linecount" ) if not chrom_count_dataset or chrom_count_dataset.state != trans.app.model.Job.states.OK: # No valid linecount dataset. diff -r 16fbd8c387a0cb7eb7607b6d37c6ea26fa9c6d50 -r c01b7cf7257af79616730a73886711b6ecf0310a scripts/extract_dataset_part.py --- a/scripts/extract_dataset_part.py +++ b/scripts/extract_dataset_part.py @@ -24,7 +24,6 @@ # This junk is here to prevent loading errors import galaxy.model.mapping #need to load this before we unpickle, in order to setup properties assigned by the mappers galaxy.model.Job() #this looks REAL stupid, but it is REQUIRED in order for SA to insert parameters into the classes defined by the mappers --> it appears that instantiating ANY mapper'ed class would suffice here -galaxy.datatypes.metadata.DATABASE_CONNECTION_AVAILABLE = False #Let metadata know that there is no database connection, and to just assume object ids are valid def __main__(): """ diff -r 16fbd8c387a0cb7eb7607b6d37c6ea26fa9c6d50 -r c01b7cf7257af79616730a73886711b6ecf0310a scripts/functional_tests.py --- a/scripts/functional_tests.py +++ b/scripts/functional_tests.py @@ -185,7 +185,6 @@ cluster_files_directory = cluster_files_directory, job_working_directory = job_working_directory, outputs_to_working_directory = 'True', - set_metadata_externally = 'True', static_enabled = 'False', debug = 'False', track_jobs_in_database = 'True', diff -r 16fbd8c387a0cb7eb7607b6d37c6ea26fa9c6d50 -r c01b7cf7257af79616730a73886711b6ecf0310a scripts/set_metadata.py --- a/scripts/set_metadata.py +++ b/scripts/set_metadata.py @@ -28,7 +28,6 @@ import simplejson import galaxy.model.mapping #need to load this before we unpickle, in order to setup properties assigned by the mappers galaxy.model.Job() #this looks REAL stupid, but it is REQUIRED in order for SA to insert parameters into the classes defined by the mappers --> it appears that instantiating ANY mapper'ed class would suffice here -galaxy.datatypes.metadata.DATABASE_CONNECTION_AVAILABLE = False #Let metadata know that there is no database connection, and to just assume object ids are valid from galaxy.util import stringify_dictionary_keys from galaxy.util.json import from_json_string from sqlalchemy.orm import clear_mappers https://bitbucket.org/galaxy/galaxy-central/commits/b72a06cf0247/ Changeset: b72a06cf0247 User: dannon Date: 2013-05-06 18:53:32 Summary: Merge Affected #: 4 files Diff not available. Repository URL: https://bitbucket.org/galaxy/galaxy-central/ -- This is a commit notification from bitbucket.org. You are receiving this because you have the service enabled, addressing the recipient of this email.