commit/galaxy-central: 6 new changesets

6 new commits in galaxy-central: https://bitbucket.org/galaxy/galaxy-central/commits/11597a078e65/ Changeset: 11597a078e65 User: nsoranzo Date: 2014-01-22 14:41:09 Summary: Fix typo in method name. Affected #: 1 file diff -r 6c3c24b83d97adc4b946f01321c540fc48e6bb21 -r 11597a078e6505087064315567ad3357c2d40c56 lib/galaxy/webapps/galaxy/api/workflows.py --- a/lib/galaxy/webapps/galaxy/api/workflows.py +++ b/lib/galaxy/webapps/galaxy/api/workflows.py @@ -586,7 +586,7 @@ return data @expose_api - def import_shared_worflow(self, trans, payload, **kwd): + def import_shared_workflow(self, trans, payload, **kwd): """ POST /api/workflows/import Import a workflow shared by other users. https://bitbucket.org/galaxy/galaxy-central/commits/c7e4de2d6ed8/ Changeset: c7e4de2d6ed8 User: nsoranzo Date: 2014-01-22 15:05:51 Summary: Remove unused imports. Affected #: 1 file diff -r 11597a078e6505087064315567ad3357c2d40c56 -r c7e4de2d6ed888204c78d1f950d63a75c13a3386 lib/galaxy/web/base/controller.py --- a/lib/galaxy/web/base/controller.py +++ b/lib/galaxy/web/base/controller.py @@ -5,16 +5,12 @@ import operator import os import re -import urllib from gettext import gettext import pkg_resources pkg_resources.require("SQLAlchemy >= 0.4") from sqlalchemy import func, and_, select -pkg_resources.require( "Routes" ) -import routes - from paste.httpexceptions import HTTPBadRequest, HTTPInternalServerError from paste.httpexceptions import HTTPNotImplemented, HTTPRequestRangeNotSatisfiable from galaxy.exceptions import ItemAccessibilityException, ItemDeletionException, ItemOwnershipException @@ -40,7 +36,6 @@ from galaxy.model import ExtendedMetadata, ExtendedMetadataIndex, LibraryDatasetDatasetAssociation, HistoryDatasetAssociation -from galaxy.datatypes.display_applications import util as da_util from galaxy.datatypes.metadata import FileParameter https://bitbucket.org/galaxy/galaxy-central/commits/1f91923153d2/ Changeset: 1f91923153d2 User: nsoranzo Date: 2014-01-22 14:46:36 Summary: Move duplicated method _workflow_from_dict to UsesStoredWorkflowMixin. Affected #: 3 files diff -r c7e4de2d6ed888204c78d1f950d63a75c13a3386 -r 1f91923153d2d64a216ddbeb06810df3ac12be06 lib/galaxy/web/base/controller.py --- a/lib/galaxy/web/base/controller.py +++ b/lib/galaxy/web/base/controller.py @@ -37,6 +37,7 @@ from galaxy.model import ExtendedMetadata, ExtendedMetadataIndex, LibraryDatasetDatasetAssociation, HistoryDatasetAssociation from galaxy.datatypes.metadata import FileParameter +from galaxy.util.json import to_json_string log = logging.getLogger( __name__ ) @@ -1554,6 +1555,102 @@ session.flush() return imported_stored + def _workflow_from_dict( self, trans, data, source=None, add_to_menu=False ): + """ + Creates a workflow from a dict. Created workflow is stored in the database and returned. + """ + from galaxy.webapps.galaxy.controllers.workflow import attach_ordered_steps + + # Put parameters in workflow mode + trans.workflow_building_mode = True + # Create new workflow from incoming dict + workflow = model.Workflow() + # If there's a source, put it in the workflow name. + if source: + name = "%s (imported from %s)" % ( data['name'], source ) + else: + name = data['name'] + workflow.name = name + # Assume no errors until we find a step that has some + workflow.has_errors = False + # Create each step + steps = [] + # The editor will provide ids for each step that we don't need to save, + # but do need to use to make connections + steps_by_external_id = {} + # Keep track of tools required by the workflow that are not available in + # the local Galaxy instance. Each tuple in the list of missing_tool_tups + # will be ( tool_id, tool_name, tool_version ). + missing_tool_tups = [] + # First pass to build step objects and populate basic values + for step_dict in data[ 'steps' ].itervalues(): + # Create the model class for the step + step = model.WorkflowStep() + steps.append( step ) + steps_by_external_id[ step_dict['id' ] ] = step + # FIXME: Position should be handled inside module + step.position = step_dict['position'] + module = module_factory.from_dict( trans, step_dict, secure=False ) + module.save_to_step( step ) + if module.type == 'tool' and module.tool is None: + # A required tool is not available in the local Galaxy instance. + missing_tool_tup = ( step_dict[ 'tool_id' ], step_dict[ 'name' ], step_dict[ 'tool_version' ] ) + if missing_tool_tup not in missing_tool_tups: + missing_tool_tups.append( missing_tool_tup ) + # Save the entire step_dict in the unused config field, be parsed later + # when we do have the tool + step.config = to_json_string(step_dict) + if step.tool_errors: + workflow.has_errors = True + # Stick this in the step temporarily + step.temp_input_connections = step_dict['input_connections'] + # Save step annotation. + annotation = step_dict[ 'annotation' ] + if annotation: + annotation = sanitize_html( annotation, 'utf-8', 'text/html' ) + self.add_item_annotation( trans.sa_session, trans.get_user(), step, annotation ) + # Second pass to deal with connections between steps + for step in steps: + # Input connections + for input_name, conn_list in step.temp_input_connections.iteritems(): + if not conn_list: + continue + if not isinstance(conn_list, list): # Older style singleton connection + conn_list = [conn_list] + for conn_dict in conn_list: + conn = model.WorkflowStepConnection() + conn.input_step = step + conn.input_name = input_name + conn.output_name = conn_dict['output_name'] + conn.output_step = steps_by_external_id[ conn_dict['id'] ] + del step.temp_input_connections + + # Order the steps if possible + attach_ordered_steps( workflow, steps ) + + # Connect up + stored = model.StoredWorkflow() + stored.name = workflow.name + workflow.stored_workflow = stored + stored.latest_workflow = workflow + stored.user = trans.user + if data[ 'annotation' ]: + self.add_item_annotation( trans.sa_session, stored.user, stored, data[ 'annotation' ] ) + + # Persist + trans.sa_session.add( stored ) + trans.sa_session.flush() + + if add_to_menu: + if trans.user.stored_workflow_menu_entries == None: + trans.user.stored_workflow_menu_entries = [] + menuEntry = model.StoredWorkflowMenuEntry() + menuEntry.stored_workflow = stored + trans.user.stored_workflow_menu_entries.append( menuEntry ) + trans.sa_session.flush() + + return stored, missing_tool_tups + class UsesFormDefinitionsMixin: """Mixin for controllers that use Galaxy form objects.""" diff -r c7e4de2d6ed888204c78d1f950d63a75c13a3386 -r 1f91923153d2d64a216ddbeb06810df3ac12be06 lib/galaxy/webapps/galaxy/api/workflows.py --- a/lib/galaxy/webapps/galaxy/api/workflows.py +++ b/lib/galaxy/webapps/galaxy/api/workflows.py @@ -9,14 +9,12 @@ from galaxy import exceptions from galaxy import util from galaxy import web -from galaxy import model from galaxy.tools.parameters import visit_input_values, DataToolParameter, RuntimeValue from galaxy.web import _future_expose_api as expose_api from galaxy.web.base.controller import BaseAPIController, url_for, UsesStoredWorkflowMixin from galaxy.workflow.modules import module_factory, ToolModule from galaxy.jobs.actions.post import ActionBox -from ..controllers.workflow import attach_ordered_steps log = logging.getLogger(__name__) @@ -367,89 +365,6 @@ return item - def _workflow_from_dict( self, trans, data, source=None ): - """ - RPARK: copied from galaxy.webapps.galaxy.controllers.workflows.py - Creates a workflow from a dict. Created workflow is stored in the database and returned. - """ - # Put parameters in workflow mode - trans.workflow_building_mode = True - # Create new workflow from incoming dict - workflow = model.Workflow() - # If there's a source, put it in the workflow name. - if source: - name = "%s (imported from %s)" % ( data['name'], source ) - else: - name = data['name'] - workflow.name = name - # Assume no errors until we find a step that has some - workflow.has_errors = False - # Create each step - steps = [] - # The editor will provide ids for each step that we don't need to save, - # but do need to use to make connections - steps_by_external_id = {} - # Keep track of tools required by the workflow that are not available in - # the local Galaxy instance. Each tuple in the list of missing_tool_tups - # will be ( tool_id, tool_name, tool_version ). - missing_tool_tups = [] - # First pass to build step objects and populate basic values - for step_dict in data[ 'steps' ].itervalues(): - # Create the model class for the step - step = model.WorkflowStep() - steps.append( step ) - steps_by_external_id[ step_dict['id' ] ] = step - # FIXME: Position should be handled inside module - step.position = step_dict['position'] - module = module_factory.from_dict( trans, step_dict, secure=False ) - if module.type == 'tool' and module.tool is None: - # A required tool is not available in the local Galaxy instance. - missing_tool_tup = ( step_dict[ 'tool_id' ], step_dict[ 'name' ], step_dict[ 'tool_version' ] ) - if missing_tool_tup not in missing_tool_tups: - missing_tool_tups.append( missing_tool_tup ) - module.save_to_step( step ) - if step.tool_errors: - workflow.has_errors = True - # Stick this in the step temporarily - step.temp_input_connections = step_dict['input_connections'] - # Save step annotation. - #annotation = step_dict[ 'annotation' ] - #if annotation: - #annotation = sanitize_html( annotation, 'utf-8', 'text/html' ) - # ------------------------------------------ # - # RPARK REMOVING: user annotation b/c of API - #self.add_item_annotation( trans.sa_session, trans.get_user(), step, annotation ) - # ------------------------------------------ # - # Unpack and add post-job actions. - post_job_actions = step_dict.get( 'post_job_actions', {} ) - for name, pja_dict in post_job_actions.items(): - model.PostJobAction( pja_dict[ 'action_type' ], - step, pja_dict[ 'output_name' ], - pja_dict[ 'action_arguments' ] ) - # Second pass to deal with connections between steps - for step in steps: - # Input connections - for input_name, conn_dict in step.temp_input_connections.iteritems(): - if conn_dict: - conn = model.WorkflowStepConnection() - conn.input_step = step - conn.input_name = input_name - conn.output_name = conn_dict['output_name'] - conn.output_step = steps_by_external_id[ conn_dict['id'] ] - del step.temp_input_connections - # Order the steps if possible - attach_ordered_steps( workflow, steps ) - # Connect up - stored = model.StoredWorkflow() - stored.name = workflow.name - workflow.stored_workflow = stored - stored.latest_workflow = workflow - stored.user = trans.user - # Persist - trans.sa_session.add( stored ) - trans.sa_session.flush() - return stored, missing_tool_tups - def _workflow_to_dict( self, trans, stored ): """ RPARK: copied from galaxy.web.controllers.workflows.py diff -r c7e4de2d6ed888204c78d1f950d63a75c13a3386 -r 1f91923153d2d64a216ddbeb06810df3ac12be06 lib/galaxy/webapps/galaxy/controllers/workflow.py --- a/lib/galaxy/webapps/galaxy/controllers/workflow.py +++ b/lib/galaxy/webapps/galaxy/controllers/workflow.py @@ -24,7 +24,6 @@ from galaxy.tools.parameters import RuntimeValue, visit_input_values from galaxy.tools.parameters.basic import DataToolParameter, DrillDownSelectToolParameter, SelectToolParameter, UnvalidatedValue from galaxy.tools.parameters.grouping import Conditional, Repeat -from galaxy.util.json import to_json_string from galaxy.util.odict import odict from galaxy.util.sanitize_html import sanitize_html from galaxy.util.topsort import CycleError, topsort, topsort_levels @@ -1734,100 +1733,6 @@ data['steps'][step.order_index] = step_dict return data - def _workflow_from_dict( self, trans, data, source=None, add_to_menu=False ): - """ - Creates a workflow from a dict. Created workflow is stored in the database and returned. - """ - # Put parameters in workflow mode - trans.workflow_building_mode = True - # Create new workflow from incoming dict - workflow = model.Workflow() - # If there's a source, put it in the workflow name. - if source: - name = "%s (imported from %s)" % ( data['name'], source ) - else: - name = data['name'] - workflow.name = name - # Assume no errors until we find a step that has some - workflow.has_errors = False - # Create each step - steps = [] - # The editor will provide ids for each step that we don't need to save, - # but do need to use to make connections - steps_by_external_id = {} - # Keep track of tools required by the workflow that are not available in - # the local Galaxy instance. Each tuple in the list of missing_tool_tups - # will be ( tool_id, tool_name, tool_version ). - missing_tool_tups = [] - # First pass to build step objects and populate basic values - for key, step_dict in data[ 'steps' ].iteritems(): - # Create the model class for the step - step = model.WorkflowStep() - steps.append( step ) - steps_by_external_id[ step_dict['id' ] ] = step - # FIXME: Position should be handled inside module - step.position = step_dict['position'] - module = module_factory.from_dict( trans, step_dict, secure=False ) - module.save_to_step( step ) - if module.type == 'tool' and module.tool is None: - # A required tool is not available in the local Galaxy instance. - missing_tool_tup = ( step_dict[ 'tool_id' ], step_dict[ 'name' ], step_dict[ 'tool_version' ] ) - if missing_tool_tup not in missing_tool_tups: - missing_tool_tups.append( missing_tool_tup ) - # Save the entire step_dict in the unused config field, be parsed later - # when we do have the too when we do have the tool - step.config = to_json_string(step_dict) - if step.tool_errors: - workflow.has_errors = True - # Stick this in the step temporarily - step.temp_input_connections = step_dict['input_connections'] - # Save step annotation. - annotation = step_dict[ 'annotation' ] - if annotation: - annotation = sanitize_html( annotation, 'utf-8', 'text/html' ) - self.add_item_annotation( trans.sa_session, trans.get_user(), step, annotation ) - # Second pass to deal with connections between steps - for step in steps: - # Input connections - for input_name, conn_list in step.temp_input_connections.iteritems(): - if not conn_list: - continue - if not isinstance(conn_list, list): # Older style singleton connection - conn_list = [conn_list] - for conn_dict in conn_list: - conn = model.WorkflowStepConnection() - conn.input_step = step - conn.input_name = input_name - conn.output_name = conn_dict['output_name'] - conn.output_step = steps_by_external_id[ conn_dict['id'] ] - del step.temp_input_connections - - # Order the steps if possible - attach_ordered_steps( workflow, steps ) - - # Connect up - stored = model.StoredWorkflow() - stored.name = workflow.name - workflow.stored_workflow = stored - stored.latest_workflow = workflow - stored.user = trans.user - if data[ 'annotation' ]: - self.add_item_annotation( trans.sa_session, stored.user, stored, data[ 'annotation' ] ) - - # Persist - trans.sa_session.add( stored ) - trans.sa_session.flush() - - if add_to_menu: - if trans.user.stored_workflow_menu_entries == None: - trans.user.stored_workflow_menu_entries = [] - menuEntry = model.StoredWorkflowMenuEntry() - menuEntry.stored_workflow = stored - trans.user.stored_workflow_menu_entries.append( menuEntry ) - trans.sa_session.flush() - - return stored, missing_tool_tups - def _workflow_to_svg_canvas( self, trans, stored ): workflow = stored.latest_workflow data = [] https://bitbucket.org/galaxy/galaxy-central/commits/7ed68c60b454/ Changeset: 7ed68c60b454 User: nsoranzo Date: 2014-01-22 15:50:09 Summary: Move duplicated method _workflow_to_dict to UsesStoredWorkflowMixin. Affected #: 3 files diff -r 1f91923153d2d64a216ddbeb06810df3ac12be06 -r 7ed68c60b4546c90f24f4463b644f6fffe712596 lib/galaxy/web/base/controller.py --- a/lib/galaxy/web/base/controller.py +++ b/lib/galaxy/web/base/controller.py @@ -37,7 +37,10 @@ from galaxy.model import ExtendedMetadata, ExtendedMetadataIndex, LibraryDatasetDatasetAssociation, HistoryDatasetAssociation from galaxy.datatypes.metadata import FileParameter +from galaxy.tools.parameters import RuntimeValue, visit_input_values +from galaxy.tools.parameters.basic import DataToolParameter from galaxy.util.json import to_json_string +from galaxy.workflow.modules import ToolModule log = logging.getLogger( __name__ ) @@ -1651,6 +1654,131 @@ return stored, missing_tool_tups + def _workflow_to_dict( self, trans, stored ): + """ + Converts a workflow to a dict of attributes suitable for exporting. + """ + workflow = stored.latest_workflow + workflow_annotation = self.get_item_annotation_obj( trans.sa_session, trans.user, stored ) + annotation_str = "" + if workflow_annotation: + annotation_str = workflow_annotation.annotation + # Pack workflow data into a dictionary and return + data = {} + data['a_galaxy_workflow'] = 'true' # Placeholder for identifying galaxy workflow + data['format-version'] = "0.1" + data['name'] = workflow.name + data['annotation'] = annotation_str + data['steps'] = {} + # For each step, rebuild the form and encode the state + for step in workflow.steps: + # Load from database representation + module = module_factory.from_workflow_step( trans, step ) + if not module: + return None + # Get user annotation. + step_annotation = self.get_item_annotation_obj(trans.sa_session, trans.user, step ) + annotation_str = "" + if step_annotation: + annotation_str = step_annotation.annotation + # Step info + step_dict = { + 'id': step.order_index, + 'type': module.type, + 'tool_id': module.get_tool_id(), + 'tool_version' : step.tool_version, + 'name': module.get_name(), + 'tool_state': module.get_state( secure=False ), + 'tool_errors': module.get_errors(), + ## 'data_inputs': module.get_data_inputs(), + ## 'data_outputs': module.get_data_outputs(), + 'annotation' : annotation_str + } + # Add post-job actions to step dict. + if module.type == 'tool': + pja_dict = {} + for pja in step.post_job_actions: + pja_dict[pja.action_type+pja.output_name] = dict( action_type = pja.action_type, + output_name = pja.output_name, + action_arguments = pja.action_arguments ) + step_dict[ 'post_job_actions' ] = pja_dict + # Data inputs + step_dict['inputs'] = [] + if module.type == "data_input": + # Get input dataset name; default to 'Input Dataset' + name = module.state.get( 'name', 'Input Dataset') + step_dict['inputs'].append( { "name" : name, "description" : annotation_str } ) + else: + # Step is a tool and may have runtime inputs. + for name, val in module.state.inputs.items(): + input_type = type( val ) + if input_type == RuntimeValue: + step_dict['inputs'].append( { "name" : name, "description" : "runtime parameter for tool %s" % module.get_name() } ) + elif input_type == dict: + # Input type is described by a dict, e.g. indexed parameters. + for partval in val.values(): + if type( partval ) == RuntimeValue: + step_dict['inputs'].append( { "name" : name, "description" : "runtime parameter for tool %s" % module.get_name() } ) + # User outputs + step_dict['user_outputs'] = [] + """ + module_outputs = module.get_data_outputs() + step_outputs = trans.sa_session.query( WorkflowOutput ).filter( step=step ) + for output in step_outputs: + name = output.output_name + annotation = "" + for module_output in module_outputs: + if module_output.get( 'name', None ) == name: + output_type = module_output.get( 'extension', '' ) + break + data['outputs'][name] = { 'name' : name, 'annotation' : annotation, 'type' : output_type } + """ + + # All step outputs + step_dict['outputs'] = [] + if type( module ) is ToolModule: + for output in module.get_data_outputs(): + step_dict['outputs'].append( { 'name' : output['name'], 'type' : output['extensions'][0] } ) + # Connections + input_connections = step.input_connections + if step.type is None or step.type == 'tool': + # Determine full (prefixed) names of valid input datasets + data_input_names = {} + def callback( input, value, prefixed_name, prefixed_label ): + if isinstance( input, DataToolParameter ): + data_input_names[ prefixed_name ] = True + + # FIXME: this updates modules silently right now; messages from updates should be provided. + module.check_and_update_state() + visit_input_values( module.tool.inputs, module.state.inputs, callback ) + # Filter + # FIXME: this removes connection without displaying a message currently! + input_connections = [ conn for conn in input_connections if conn.input_name in data_input_names ] + # Encode input connections as dictionary + input_conn_dict = {} + unique_input_names = set( [conn.input_name for conn in input_connections] ) + for input_name in unique_input_names: + input_conn_dict[ input_name ] = \ + [ dict( id=conn.output_step.order_index, output_name=conn.output_name ) for conn in input_connections if conn.input_name == input_name ] + # Preserve backward compatability. Previously Galaxy + # assumed input connections would be dictionaries not + # lists of dictionaries, so replace any singleton list + # with just the dictionary so that workflows exported from + # newer Galaxy instances can be used with older Galaxy + # instances if they do no include multiple input + # tools. This should be removed at some point. Mirrored + # hack in _workflow_from_dict should never be removed so + # existing workflow exports continue to function. + for input_name, input_conn in dict(input_conn_dict).iteritems(): + if len(input_conn) == 1: + input_conn_dict[input_name] = input_conn[0] + step_dict['input_connections'] = input_conn_dict + # Position + step_dict['position'] = step.position + # Add to return value + data['steps'][step.order_index] = step_dict + return data + class UsesFormDefinitionsMixin: """Mixin for controllers that use Galaxy form objects.""" diff -r 1f91923153d2d64a216ddbeb06810df3ac12be06 -r 7ed68c60b4546c90f24f4463b644f6fffe712596 lib/galaxy/webapps/galaxy/api/workflows.py --- a/lib/galaxy/webapps/galaxy/api/workflows.py +++ b/lib/galaxy/webapps/galaxy/api/workflows.py @@ -9,10 +9,10 @@ from galaxy import exceptions from galaxy import util from galaxy import web -from galaxy.tools.parameters import visit_input_values, DataToolParameter, RuntimeValue +from galaxy.tools.parameters import visit_input_values, DataToolParameter from galaxy.web import _future_expose_api as expose_api from galaxy.web.base.controller import BaseAPIController, url_for, UsesStoredWorkflowMixin -from galaxy.workflow.modules import module_factory, ToolModule +from galaxy.workflow.modules import module_factory from galaxy.jobs.actions.post import ActionBox @@ -365,141 +365,6 @@ return item - def _workflow_to_dict( self, trans, stored ): - """ - RPARK: copied from galaxy.web.controllers.workflows.py - Converts a workflow to a dict of attributes suitable for exporting. - """ - workflow = stored.latest_workflow - - ### ----------------------------------- ### - ## RPARK EDIT ## - workflow_annotation = self.get_item_annotation_obj( trans.sa_session, trans.user, stored ) - annotation_str = "" - if workflow_annotation: - annotation_str = workflow_annotation.annotation - ### ----------------------------------- ### - - - # Pack workflow data into a dictionary and return - data = {} - data['a_galaxy_workflow'] = 'true' # Placeholder for identifying galaxy workflow - data['format-version'] = "0.1" - data['name'] = workflow.name - ### ----------------------------------- ### - ## RPARK EDIT ## - data['annotation'] = annotation_str - ### ----------------------------------- ### - - data['steps'] = {} - # For each step, rebuild the form and encode the state - for step in workflow.steps: - # Load from database representation - module = module_factory.from_workflow_step( trans, step ) - if not module: - return None - - ### ----------------------------------- ### - ## RPARK EDIT ## - - # TODO: This is duplicated from - # lib/galaxy/webapps/controllres/workflow.py -- refactor and - # eliminate copied code. - - # Get user annotation. - step_annotation = self.get_item_annotation_obj(trans.sa_session, trans.user, step ) - annotation_str = "" - if step_annotation: - annotation_str = step_annotation.annotation - ### ----------------------------------- ### - - # Step info - step_dict = { - 'id': step.order_index, - 'type': module.type, - 'tool_id': module.get_tool_id(), - 'tool_version' : step.tool_version, - 'name': module.get_name(), - 'tool_state': module.get_state( secure=False ), - 'tool_errors': module.get_errors(), - ## 'data_inputs': module.get_data_inputs(), - ## 'data_outputs': module.get_data_outputs(), - - ### ----------------------------------- ### - ## RPARK EDIT ## - 'annotation' : annotation_str - ### ----------------------------------- ### - - } - # Add post-job actions to step dict. - if module.type == 'tool': - pja_dict = {} - for pja in step.post_job_actions: - pja_dict[pja.action_type+pja.output_name] = dict( action_type = pja.action_type, - output_name = pja.output_name, - action_arguments = pja.action_arguments ) - step_dict[ 'post_job_actions' ] = pja_dict - # Data inputs - step_dict['inputs'] = [] - if module.type == "data_input": - # Get input dataset name; default to 'Input Dataset' - name = module.state.get( 'name', 'Input Dataset') - step_dict['inputs'].append( { "name" : name, "description" : annotation_str } ) - else: - # Step is a tool and may have runtime inputs. - for name, val in module.state.inputs.items(): - input_type = type( val ) - if input_type == RuntimeValue: - step_dict['inputs'].append( { "name" : name, "description" : "runtime parameter for tool %s" % module.get_name() } ) - elif input_type == dict: - # Input type is described by a dict, e.g. indexed parameters. - for partval in val.values(): - if type( partval ) == RuntimeValue: - step_dict['inputs'].append( { "name" : name, "description" : "runtime parameter for tool %s" % module.get_name() } ) - # User outputs - step_dict['user_outputs'] = [] - """ - module_outputs = module.get_data_outputs() - step_outputs = trans.sa_session.query( WorkflowOutput ).filter( step=step ) - for output in step_outputs: - name = output.output_name - annotation = "" - for module_output in module_outputs: - if module_output.get( 'name', None ) == name: - output_type = module_output.get( 'extension', '' ) - break - data['outputs'][name] = { 'name' : name, 'annotation' : annotation, 'type' : output_type } - """ - - # All step outputs - step_dict['outputs'] = [] - if type( module ) is ToolModule: - for output in module.get_data_outputs(): - step_dict['outputs'].append( { 'name' : output['name'], 'type' : output['extensions'][0] } ) - # Connections - input_connections = step.input_connections - if step.type is None or step.type == 'tool': - # Determine full (prefixed) names of valid input datasets - data_input_names = {} - def callback( input, value, prefixed_name, prefixed_label ): - if isinstance( input, DataToolParameter ): - data_input_names[ prefixed_name ] = True - visit_input_values( module.tool.inputs, module.state.inputs, callback ) - # Filter - # FIXME: this removes connection without displaying a message currently! - input_connections = [ conn for conn in input_connections if conn.input_name in data_input_names ] - # Encode input connections as dictionary - input_conn_dict = {} - for conn in input_connections: - input_conn_dict[ conn.input_name ] = \ - dict( id=conn.output_step.order_index, output_name=conn.output_name ) - step_dict['input_connections'] = input_conn_dict - # Position - step_dict['position'] = step.position - # Add to return value - data['steps'][step.order_index] = step_dict - return data - @expose_api def import_shared_workflow(self, trans, payload, **kwd): """ diff -r 1f91923153d2d64a216ddbeb06810df3ac12be06 -r 7ed68c60b4546c90f24f4463b644f6fffe712596 lib/galaxy/webapps/galaxy/controllers/workflow.py --- a/lib/galaxy/webapps/galaxy/controllers/workflow.py +++ b/lib/galaxy/webapps/galaxy/controllers/workflow.py @@ -21,7 +21,7 @@ from galaxy.jobs.actions.post import ActionBox from galaxy.model.item_attrs import UsesItemRatings from galaxy.model.mapping import desc -from galaxy.tools.parameters import RuntimeValue, visit_input_values +from galaxy.tools.parameters import visit_input_values from galaxy.tools.parameters.basic import DataToolParameter, DrillDownSelectToolParameter, SelectToolParameter, UnvalidatedValue from galaxy.tools.parameters.grouping import Conditional, Repeat from galaxy.util.odict import odict @@ -32,7 +32,7 @@ from galaxy.web.framework import form from galaxy.web.framework.helpers import grids, time_ago from galaxy.web.framework.helpers import to_unicode -from galaxy.workflow.modules import module_factory, ToolModule +from galaxy.workflow.modules import module_factory class StoredWorkflowListGrid( grids.Grid ): @@ -1608,131 +1608,6 @@ shared_by_others=shared_by_others, ids_in_menu=ids_in_menu ) - def _workflow_to_dict( self, trans, stored ): - """ - Converts a workflow to a dict of attributes suitable for exporting. - """ - workflow = stored.latest_workflow - workflow_annotation = self.get_item_annotation_obj( trans.sa_session, trans.user, stored ) - annotation_str = "" - if workflow_annotation: - annotation_str = workflow_annotation.annotation - # Pack workflow data into a dictionary and return - data = {} - data['a_galaxy_workflow'] = 'true' # Placeholder for identifying galaxy workflow - data['format-version'] = "0.1" - data['name'] = workflow.name - data['annotation'] = annotation_str - data['steps'] = {} - # For each step, rebuild the form and encode the state - for step in workflow.steps: - # Load from database representation - module = module_factory.from_workflow_step( trans, step ) - if not module: - return None - # Get user annotation. - step_annotation = self.get_item_annotation_obj(trans.sa_session, trans.user, step ) - annotation_str = "" - if step_annotation: - annotation_str = step_annotation.annotation - # Step info - step_dict = { - 'id': step.order_index, - 'type': module.type, - 'tool_id': module.get_tool_id(), - 'tool_version' : step.tool_version, - 'name': module.get_name(), - 'tool_state': module.get_state( secure=False ), - 'tool_errors': module.get_errors(), - ## 'data_inputs': module.get_data_inputs(), - ## 'data_outputs': module.get_data_outputs(), - 'annotation' : annotation_str - } - # Add post-job actions to step dict. - if module.type == 'tool': - pja_dict = {} - for pja in step.post_job_actions: - pja_dict[pja.action_type+pja.output_name] = dict( action_type = pja.action_type, - output_name = pja.output_name, - action_arguments = pja.action_arguments ) - step_dict[ 'post_job_actions' ] = pja_dict - # Data inputs - step_dict['inputs'] = [] - if module.type == "data_input": - # Get input dataset name; default to 'Input Dataset' - name = module.state.get( 'name', 'Input Dataset') - step_dict['inputs'].append( { "name" : name, "description" : annotation_str } ) - else: - # Step is a tool and may have runtime inputs. - for name, val in module.state.inputs.items(): - input_type = type( val ) - if input_type == RuntimeValue: - step_dict['inputs'].append( { "name" : name, "description" : "runtime parameter for tool %s" % module.get_name() } ) - elif input_type == dict: - # Input type is described by a dict, e.g. indexed parameters. - for partname, partval in val.items(): - if type( partval ) == RuntimeValue: - step_dict['inputs'].append( { "name" : name, "description" : "runtime parameter for tool %s" % module.get_name() } ) - # User outputs - step_dict['user_outputs'] = [] - """ - module_outputs = module.get_data_outputs() - step_outputs = trans.sa_session.query( WorkflowOutput ).filter( step=step ) - for output in step_outputs: - name = output.output_name - annotation = "" - for module_output in module_outputs: - if module_output.get( 'name', None ) == name: - output_type = module_output.get( 'extension', '' ) - break - data['outputs'][name] = { 'name' : name, 'annotation' : annotation, 'type' : output_type } - """ - - # All step outputs - step_dict['outputs'] = [] - if type( module ) is ToolModule: - for output in module.get_data_outputs(): - step_dict['outputs'].append( { 'name' : output['name'], 'type' : output['extensions'][0] } ) - # Connections - input_connections = step.input_connections - if step.type is None or step.type == 'tool': - # Determine full (prefixed) names of valid input datasets - data_input_names = {} - def callback( input, value, prefixed_name, prefixed_label ): - if isinstance( input, DataToolParameter ): - data_input_names[ prefixed_name ] = True - - # FIXME: this updates modules silently right now; messages from updates should be provided. - module.check_and_update_state() - visit_input_values( module.tool.inputs, module.state.inputs, callback ) - # Filter - # FIXME: this removes connection without displaying a message currently! - input_connections = [ conn for conn in input_connections if conn.input_name in data_input_names ] - # Encode input connections as dictionary - input_conn_dict = {} - unique_input_names = set( [conn.input_name for conn in input_connections] ) - for input_name in unique_input_names: - input_conn_dict[ input_name ] = \ - [ dict( id=conn.output_step.order_index, output_name=conn.output_name ) for conn in input_connections if conn.input_name == input_name ] - # Preserve backward compatability. Previously Galaxy - # assumed input connections would be dictionaries not - # lists of dictionaries, so replace any singleton list - # with just the dictionary so that workflows exported from - # newer Galaxy instances can be used with older Galaxy - # instances if they do no include multiple input - # tools. This should be removed at some point. Mirrored - # hack in _workflow_from_dict should never be removed so - # existing workflow exports continue to function. - for input_name, input_conn in dict(input_conn_dict).iteritems(): - if len(input_conn) == 1: - input_conn_dict[input_name] = input_conn[0] - step_dict['input_connections'] = input_conn_dict - # Position - step_dict['position'] = step.position - # Add to return value - data['steps'][step.order_index] = step_dict - return data - def _workflow_to_svg_canvas( self, trans, stored ): workflow = stored.latest_workflow data = [] https://bitbucket.org/galaxy/galaxy-central/commits/8c98ef649bc0/ Changeset: 8c98ef649bc0 User: nsoranzo Date: 2014-01-24 16:05:27 Summary: Fix typo in action name. Affected #: 1 file diff -r 7ed68c60b4546c90f24f4463b644f6fffe712596 -r 8c98ef649bc0f94edf72c9adbff3947243ad33de lib/galaxy/webapps/galaxy/buildapp.py --- a/lib/galaxy/webapps/galaxy/buildapp.py +++ b/lib/galaxy/webapps/galaxy/buildapp.py @@ -167,7 +167,7 @@ webapp.mapper.connect( 'workflow_dict', '/api/workflows/{workflow_id}/download', controller='workflows', action='workflow_dict', conditions=dict( method=['GET'] ) ) # Preserve the following download route for now for dependent applications -- deprecate at some point webapp.mapper.connect( 'workflow_dict', '/api/workflows/download/{workflow_id}', controller='workflows', action='workflow_dict', conditions=dict( method=['GET'] ) ) - webapp.mapper.connect( 'import_shared_workflow', '/api/workflows/import', controller='workflows', action='import_shared_worflow', conditions=dict( method=['POST'] ) ) + webapp.mapper.connect( 'import_shared_workflow', '/api/workflows/import', controller='workflows', action='import_shared_workflow', conditions=dict( method=['POST'] ) ) # ============================ # ===== AUTHENTICATE API ===== https://bitbucket.org/galaxy/galaxy-central/commits/51798d8a3c92/ Changeset: 51798d8a3c92 User: jmchilton Date: 2014-01-25 17:27:04 Summary: Merged in nsoranzo/galaxy-central (pull request #306) Fix typo and reduce duplication in workflow API Affected #: 4 files diff -r 122fe556dc384291119115d096210e4298e43f93 -r 51798d8a3c92a2290537f6dcfb8f93e6f1371509 lib/galaxy/web/base/controller.py --- a/lib/galaxy/web/base/controller.py +++ b/lib/galaxy/web/base/controller.py @@ -5,16 +5,12 @@ import operator import os import re -import urllib from gettext import gettext import pkg_resources pkg_resources.require("SQLAlchemy >= 0.4") from sqlalchemy import func, and_, select -pkg_resources.require( "Routes" ) -import routes - from paste.httpexceptions import HTTPBadRequest, HTTPInternalServerError from paste.httpexceptions import HTTPNotImplemented, HTTPRequestRangeNotSatisfiable from galaxy.exceptions import ItemAccessibilityException, ItemDeletionException, ItemOwnershipException @@ -40,8 +36,11 @@ from galaxy.model import ExtendedMetadata, ExtendedMetadataIndex, LibraryDatasetDatasetAssociation, HistoryDatasetAssociation -from galaxy.datatypes.display_applications import util as da_util from galaxy.datatypes.metadata import FileParameter +from galaxy.tools.parameters import RuntimeValue, visit_input_values +from galaxy.tools.parameters.basic import DataToolParameter +from galaxy.util.json import to_json_string +from galaxy.workflow.modules import ToolModule log = logging.getLogger( __name__ ) @@ -1559,6 +1558,227 @@ session.flush() return imported_stored + def _workflow_from_dict( self, trans, data, source=None, add_to_menu=False ): + """ + Creates a workflow from a dict. Created workflow is stored in the database and returned. + """ + from galaxy.webapps.galaxy.controllers.workflow import attach_ordered_steps + + # Put parameters in workflow mode + trans.workflow_building_mode = True + # Create new workflow from incoming dict + workflow = model.Workflow() + # If there's a source, put it in the workflow name. + if source: + name = "%s (imported from %s)" % ( data['name'], source ) + else: + name = data['name'] + workflow.name = name + # Assume no errors until we find a step that has some + workflow.has_errors = False + # Create each step + steps = [] + # The editor will provide ids for each step that we don't need to save, + # but do need to use to make connections + steps_by_external_id = {} + # Keep track of tools required by the workflow that are not available in + # the local Galaxy instance. Each tuple in the list of missing_tool_tups + # will be ( tool_id, tool_name, tool_version ). + missing_tool_tups = [] + # First pass to build step objects and populate basic values + for step_dict in data[ 'steps' ].itervalues(): + # Create the model class for the step + step = model.WorkflowStep() + steps.append( step ) + steps_by_external_id[ step_dict['id' ] ] = step + # FIXME: Position should be handled inside module + step.position = step_dict['position'] + module = module_factory.from_dict( trans, step_dict, secure=False ) + module.save_to_step( step ) + if module.type == 'tool' and module.tool is None: + # A required tool is not available in the local Galaxy instance. + missing_tool_tup = ( step_dict[ 'tool_id' ], step_dict[ 'name' ], step_dict[ 'tool_version' ] ) + if missing_tool_tup not in missing_tool_tups: + missing_tool_tups.append( missing_tool_tup ) + # Save the entire step_dict in the unused config field, be parsed later + # when we do have the tool + step.config = to_json_string(step_dict) + if step.tool_errors: + workflow.has_errors = True + # Stick this in the step temporarily + step.temp_input_connections = step_dict['input_connections'] + # Save step annotation. + annotation = step_dict[ 'annotation' ] + if annotation: + annotation = sanitize_html( annotation, 'utf-8', 'text/html' ) + self.add_item_annotation( trans.sa_session, trans.get_user(), step, annotation ) + # Second pass to deal with connections between steps + for step in steps: + # Input connections + for input_name, conn_list in step.temp_input_connections.iteritems(): + if not conn_list: + continue + if not isinstance(conn_list, list): # Older style singleton connection + conn_list = [conn_list] + for conn_dict in conn_list: + conn = model.WorkflowStepConnection() + conn.input_step = step + conn.input_name = input_name + conn.output_name = conn_dict['output_name'] + conn.output_step = steps_by_external_id[ conn_dict['id'] ] + del step.temp_input_connections + + # Order the steps if possible + attach_ordered_steps( workflow, steps ) + + # Connect up + stored = model.StoredWorkflow() + stored.name = workflow.name + workflow.stored_workflow = stored + stored.latest_workflow = workflow + stored.user = trans.user + if data[ 'annotation' ]: + self.add_item_annotation( trans.sa_session, stored.user, stored, data[ 'annotation' ] ) + + # Persist + trans.sa_session.add( stored ) + trans.sa_session.flush() + + if add_to_menu: + if trans.user.stored_workflow_menu_entries == None: + trans.user.stored_workflow_menu_entries = [] + menuEntry = model.StoredWorkflowMenuEntry() + menuEntry.stored_workflow = stored + trans.user.stored_workflow_menu_entries.append( menuEntry ) + trans.sa_session.flush() + + return stored, missing_tool_tups + + def _workflow_to_dict( self, trans, stored ): + """ + Converts a workflow to a dict of attributes suitable for exporting. + """ + workflow = stored.latest_workflow + workflow_annotation = self.get_item_annotation_obj( trans.sa_session, trans.user, stored ) + annotation_str = "" + if workflow_annotation: + annotation_str = workflow_annotation.annotation + # Pack workflow data into a dictionary and return + data = {} + data['a_galaxy_workflow'] = 'true' # Placeholder for identifying galaxy workflow + data['format-version'] = "0.1" + data['name'] = workflow.name + data['annotation'] = annotation_str + data['steps'] = {} + # For each step, rebuild the form and encode the state + for step in workflow.steps: + # Load from database representation + module = module_factory.from_workflow_step( trans, step ) + if not module: + return None + # Get user annotation. + step_annotation = self.get_item_annotation_obj(trans.sa_session, trans.user, step ) + annotation_str = "" + if step_annotation: + annotation_str = step_annotation.annotation + # Step info + step_dict = { + 'id': step.order_index, + 'type': module.type, + 'tool_id': module.get_tool_id(), + 'tool_version' : step.tool_version, + 'name': module.get_name(), + 'tool_state': module.get_state( secure=False ), + 'tool_errors': module.get_errors(), + ## 'data_inputs': module.get_data_inputs(), + ## 'data_outputs': module.get_data_outputs(), + 'annotation' : annotation_str + } + # Add post-job actions to step dict. + if module.type == 'tool': + pja_dict = {} + for pja in step.post_job_actions: + pja_dict[pja.action_type+pja.output_name] = dict( action_type = pja.action_type, + output_name = pja.output_name, + action_arguments = pja.action_arguments ) + step_dict[ 'post_job_actions' ] = pja_dict + # Data inputs + step_dict['inputs'] = [] + if module.type == "data_input": + # Get input dataset name; default to 'Input Dataset' + name = module.state.get( 'name', 'Input Dataset') + step_dict['inputs'].append( { "name" : name, "description" : annotation_str } ) + else: + # Step is a tool and may have runtime inputs. + for name, val in module.state.inputs.items(): + input_type = type( val ) + if input_type == RuntimeValue: + step_dict['inputs'].append( { "name" : name, "description" : "runtime parameter for tool %s" % module.get_name() } ) + elif input_type == dict: + # Input type is described by a dict, e.g. indexed parameters. + for partval in val.values(): + if type( partval ) == RuntimeValue: + step_dict['inputs'].append( { "name" : name, "description" : "runtime parameter for tool %s" % module.get_name() } ) + # User outputs + step_dict['user_outputs'] = [] + """ + module_outputs = module.get_data_outputs() + step_outputs = trans.sa_session.query( WorkflowOutput ).filter( step=step ) + for output in step_outputs: + name = output.output_name + annotation = "" + for module_output in module_outputs: + if module_output.get( 'name', None ) == name: + output_type = module_output.get( 'extension', '' ) + break + data['outputs'][name] = { 'name' : name, 'annotation' : annotation, 'type' : output_type } + """ + + # All step outputs + step_dict['outputs'] = [] + if type( module ) is ToolModule: + for output in module.get_data_outputs(): + step_dict['outputs'].append( { 'name' : output['name'], 'type' : output['extensions'][0] } ) + # Connections + input_connections = step.input_connections + if step.type is None or step.type == 'tool': + # Determine full (prefixed) names of valid input datasets + data_input_names = {} + def callback( input, value, prefixed_name, prefixed_label ): + if isinstance( input, DataToolParameter ): + data_input_names[ prefixed_name ] = True + + # FIXME: this updates modules silently right now; messages from updates should be provided. + module.check_and_update_state() + visit_input_values( module.tool.inputs, module.state.inputs, callback ) + # Filter + # FIXME: this removes connection without displaying a message currently! + input_connections = [ conn for conn in input_connections if conn.input_name in data_input_names ] + # Encode input connections as dictionary + input_conn_dict = {} + unique_input_names = set( [conn.input_name for conn in input_connections] ) + for input_name in unique_input_names: + input_conn_dict[ input_name ] = \ + [ dict( id=conn.output_step.order_index, output_name=conn.output_name ) for conn in input_connections if conn.input_name == input_name ] + # Preserve backward compatability. Previously Galaxy + # assumed input connections would be dictionaries not + # lists of dictionaries, so replace any singleton list + # with just the dictionary so that workflows exported from + # newer Galaxy instances can be used with older Galaxy + # instances if they do no include multiple input + # tools. This should be removed at some point. Mirrored + # hack in _workflow_from_dict should never be removed so + # existing workflow exports continue to function. + for input_name, input_conn in dict(input_conn_dict).iteritems(): + if len(input_conn) == 1: + input_conn_dict[input_name] = input_conn[0] + step_dict['input_connections'] = input_conn_dict + # Position + step_dict['position'] = step.position + # Add to return value + data['steps'][step.order_index] = step_dict + return data + class UsesFormDefinitionsMixin: """Mixin for controllers that use Galaxy form objects.""" diff -r 122fe556dc384291119115d096210e4298e43f93 -r 51798d8a3c92a2290537f6dcfb8f93e6f1371509 lib/galaxy/webapps/galaxy/api/workflows.py --- a/lib/galaxy/webapps/galaxy/api/workflows.py +++ b/lib/galaxy/webapps/galaxy/api/workflows.py @@ -9,14 +9,12 @@ from galaxy import exceptions from galaxy import util from galaxy import web -from galaxy import model -from galaxy.tools.parameters import visit_input_values, DataToolParameter, RuntimeValue +from galaxy.tools.parameters import visit_input_values, DataToolParameter from galaxy.web import _future_expose_api as expose_api from galaxy.web.base.controller import BaseAPIController, url_for, UsesStoredWorkflowMixin -from galaxy.workflow.modules import module_factory, ToolModule +from galaxy.workflow.modules import module_factory from galaxy.jobs.actions.post import ActionBox -from ..controllers.workflow import attach_ordered_steps log = logging.getLogger(__name__) @@ -367,226 +365,8 @@ return item - def _workflow_from_dict( self, trans, data, source=None ): - """ - RPARK: copied from galaxy.webapps.galaxy.controllers.workflows.py - Creates a workflow from a dict. Created workflow is stored in the database and returned. - """ - # Put parameters in workflow mode - trans.workflow_building_mode = True - # Create new workflow from incoming dict - workflow = model.Workflow() - # If there's a source, put it in the workflow name. - if source: - name = "%s (imported from %s)" % ( data['name'], source ) - else: - name = data['name'] - workflow.name = name - # Assume no errors until we find a step that has some - workflow.has_errors = False - # Create each step - steps = [] - # The editor will provide ids for each step that we don't need to save, - # but do need to use to make connections - steps_by_external_id = {} - # Keep track of tools required by the workflow that are not available in - # the local Galaxy instance. Each tuple in the list of missing_tool_tups - # will be ( tool_id, tool_name, tool_version ). - missing_tool_tups = [] - # First pass to build step objects and populate basic values - for step_dict in data[ 'steps' ].itervalues(): - # Create the model class for the step - step = model.WorkflowStep() - steps.append( step ) - steps_by_external_id[ step_dict['id' ] ] = step - # FIXME: Position should be handled inside module - step.position = step_dict['position'] - module = module_factory.from_dict( trans, step_dict, secure=False ) - if module.type == 'tool' and module.tool is None: - # A required tool is not available in the local Galaxy instance. - missing_tool_tup = ( step_dict[ 'tool_id' ], step_dict[ 'name' ], step_dict[ 'tool_version' ] ) - if missing_tool_tup not in missing_tool_tups: - missing_tool_tups.append( missing_tool_tup ) - module.save_to_step( step ) - if step.tool_errors: - workflow.has_errors = True - # Stick this in the step temporarily - step.temp_input_connections = step_dict['input_connections'] - # Save step annotation. - #annotation = step_dict[ 'annotation' ] - #if annotation: - #annotation = sanitize_html( annotation, 'utf-8', 'text/html' ) - # ------------------------------------------ # - # RPARK REMOVING: user annotation b/c of API - #self.add_item_annotation( trans.sa_session, trans.get_user(), step, annotation ) - # ------------------------------------------ # - # Unpack and add post-job actions. - post_job_actions = step_dict.get( 'post_job_actions', {} ) - for name, pja_dict in post_job_actions.items(): - model.PostJobAction( pja_dict[ 'action_type' ], - step, pja_dict[ 'output_name' ], - pja_dict[ 'action_arguments' ] ) - # Second pass to deal with connections between steps - for step in steps: - # Input connections - for input_name, conn_dict in step.temp_input_connections.iteritems(): - if conn_dict: - conn = model.WorkflowStepConnection() - conn.input_step = step - conn.input_name = input_name - conn.output_name = conn_dict['output_name'] - conn.output_step = steps_by_external_id[ conn_dict['id'] ] - del step.temp_input_connections - # Order the steps if possible - attach_ordered_steps( workflow, steps ) - # Connect up - stored = model.StoredWorkflow() - stored.name = workflow.name - workflow.stored_workflow = stored - stored.latest_workflow = workflow - stored.user = trans.user - # Persist - trans.sa_session.add( stored ) - trans.sa_session.flush() - return stored, missing_tool_tups - - def _workflow_to_dict( self, trans, stored ): - """ - RPARK: copied from galaxy.web.controllers.workflows.py - Converts a workflow to a dict of attributes suitable for exporting. - """ - workflow = stored.latest_workflow - - ### ----------------------------------- ### - ## RPARK EDIT ## - workflow_annotation = self.get_item_annotation_obj( trans.sa_session, trans.user, stored ) - annotation_str = "" - if workflow_annotation: - annotation_str = workflow_annotation.annotation - ### ----------------------------------- ### - - - # Pack workflow data into a dictionary and return - data = {} - data['a_galaxy_workflow'] = 'true' # Placeholder for identifying galaxy workflow - data['format-version'] = "0.1" - data['name'] = workflow.name - ### ----------------------------------- ### - ## RPARK EDIT ## - data['annotation'] = annotation_str - ### ----------------------------------- ### - - data['steps'] = {} - # For each step, rebuild the form and encode the state - for step in workflow.steps: - # Load from database representation - module = module_factory.from_workflow_step( trans, step ) - if not module: - return None - - ### ----------------------------------- ### - ## RPARK EDIT ## - - # TODO: This is duplicated from - # lib/galaxy/webapps/controllres/workflow.py -- refactor and - # eliminate copied code. - - # Get user annotation. - step_annotation = self.get_item_annotation_obj(trans.sa_session, trans.user, step ) - annotation_str = "" - if step_annotation: - annotation_str = step_annotation.annotation - ### ----------------------------------- ### - - # Step info - step_dict = { - 'id': step.order_index, - 'type': module.type, - 'tool_id': module.get_tool_id(), - 'tool_version' : step.tool_version, - 'name': module.get_name(), - 'tool_state': module.get_state( secure=False ), - 'tool_errors': module.get_errors(), - ## 'data_inputs': module.get_data_inputs(), - ## 'data_outputs': module.get_data_outputs(), - - ### ----------------------------------- ### - ## RPARK EDIT ## - 'annotation' : annotation_str - ### ----------------------------------- ### - - } - # Add post-job actions to step dict. - if module.type == 'tool': - pja_dict = {} - for pja in step.post_job_actions: - pja_dict[pja.action_type+pja.output_name] = dict( action_type = pja.action_type, - output_name = pja.output_name, - action_arguments = pja.action_arguments ) - step_dict[ 'post_job_actions' ] = pja_dict - # Data inputs - step_dict['inputs'] = [] - if module.type == "data_input": - # Get input dataset name; default to 'Input Dataset' - name = module.state.get( 'name', 'Input Dataset') - step_dict['inputs'].append( { "name" : name, "description" : annotation_str } ) - else: - # Step is a tool and may have runtime inputs. - for name, val in module.state.inputs.items(): - input_type = type( val ) - if input_type == RuntimeValue: - step_dict['inputs'].append( { "name" : name, "description" : "runtime parameter for tool %s" % module.get_name() } ) - elif input_type == dict: - # Input type is described by a dict, e.g. indexed parameters. - for partval in val.values(): - if type( partval ) == RuntimeValue: - step_dict['inputs'].append( { "name" : name, "description" : "runtime parameter for tool %s" % module.get_name() } ) - # User outputs - step_dict['user_outputs'] = [] - """ - module_outputs = module.get_data_outputs() - step_outputs = trans.sa_session.query( WorkflowOutput ).filter( step=step ) - for output in step_outputs: - name = output.output_name - annotation = "" - for module_output in module_outputs: - if module_output.get( 'name', None ) == name: - output_type = module_output.get( 'extension', '' ) - break - data['outputs'][name] = { 'name' : name, 'annotation' : annotation, 'type' : output_type } - """ - - # All step outputs - step_dict['outputs'] = [] - if type( module ) is ToolModule: - for output in module.get_data_outputs(): - step_dict['outputs'].append( { 'name' : output['name'], 'type' : output['extensions'][0] } ) - # Connections - input_connections = step.input_connections - if step.type is None or step.type == 'tool': - # Determine full (prefixed) names of valid input datasets - data_input_names = {} - def callback( input, value, prefixed_name, prefixed_label ): - if isinstance( input, DataToolParameter ): - data_input_names[ prefixed_name ] = True - visit_input_values( module.tool.inputs, module.state.inputs, callback ) - # Filter - # FIXME: this removes connection without displaying a message currently! - input_connections = [ conn for conn in input_connections if conn.input_name in data_input_names ] - # Encode input connections as dictionary - input_conn_dict = {} - for conn in input_connections: - input_conn_dict[ conn.input_name ] = \ - dict( id=conn.output_step.order_index, output_name=conn.output_name ) - step_dict['input_connections'] = input_conn_dict - # Position - step_dict['position'] = step.position - # Add to return value - data['steps'][step.order_index] = step_dict - return data - @expose_api - def import_shared_worflow(self, trans, payload, **kwd): + def import_shared_workflow(self, trans, payload, **kwd): """ POST /api/workflows/import Import a workflow shared by other users. diff -r 122fe556dc384291119115d096210e4298e43f93 -r 51798d8a3c92a2290537f6dcfb8f93e6f1371509 lib/galaxy/webapps/galaxy/buildapp.py --- a/lib/galaxy/webapps/galaxy/buildapp.py +++ b/lib/galaxy/webapps/galaxy/buildapp.py @@ -167,7 +167,7 @@ webapp.mapper.connect( 'workflow_dict', '/api/workflows/{workflow_id}/download', controller='workflows', action='workflow_dict', conditions=dict( method=['GET'] ) ) # Preserve the following download route for now for dependent applications -- deprecate at some point webapp.mapper.connect( 'workflow_dict', '/api/workflows/download/{workflow_id}', controller='workflows', action='workflow_dict', conditions=dict( method=['GET'] ) ) - webapp.mapper.connect( 'import_shared_workflow', '/api/workflows/import', controller='workflows', action='import_shared_worflow', conditions=dict( method=['POST'] ) ) + webapp.mapper.connect( 'import_shared_workflow', '/api/workflows/import', controller='workflows', action='import_shared_workflow', conditions=dict( method=['POST'] ) ) # ============================ # ===== AUTHENTICATE API ===== diff -r 122fe556dc384291119115d096210e4298e43f93 -r 51798d8a3c92a2290537f6dcfb8f93e6f1371509 lib/galaxy/webapps/galaxy/controllers/workflow.py --- a/lib/galaxy/webapps/galaxy/controllers/workflow.py +++ b/lib/galaxy/webapps/galaxy/controllers/workflow.py @@ -21,10 +21,9 @@ from galaxy.jobs.actions.post import ActionBox from galaxy.model.item_attrs import UsesItemRatings from galaxy.model.mapping import desc -from galaxy.tools.parameters import RuntimeValue, visit_input_values +from galaxy.tools.parameters import visit_input_values from galaxy.tools.parameters.basic import DataToolParameter, DrillDownSelectToolParameter, SelectToolParameter, UnvalidatedValue from galaxy.tools.parameters.grouping import Conditional, Repeat -from galaxy.util.json import to_json_string from galaxy.util.odict import odict from galaxy.util.sanitize_html import sanitize_html from galaxy.util.topsort import CycleError, topsort, topsort_levels @@ -33,7 +32,7 @@ from galaxy.web.framework import form from galaxy.web.framework.helpers import grids, time_ago from galaxy.web.framework.helpers import to_unicode -from galaxy.workflow.modules import module_factory, ToolModule +from galaxy.workflow.modules import module_factory class StoredWorkflowListGrid( grids.Grid ): @@ -1609,225 +1608,6 @@ shared_by_others=shared_by_others, ids_in_menu=ids_in_menu ) - def _workflow_to_dict( self, trans, stored ): - """ - Converts a workflow to a dict of attributes suitable for exporting. - """ - workflow = stored.latest_workflow - workflow_annotation = self.get_item_annotation_obj( trans.sa_session, trans.user, stored ) - annotation_str = "" - if workflow_annotation: - annotation_str = workflow_annotation.annotation - # Pack workflow data into a dictionary and return - data = {} - data['a_galaxy_workflow'] = 'true' # Placeholder for identifying galaxy workflow - data['format-version'] = "0.1" - data['name'] = workflow.name - data['annotation'] = annotation_str - data['steps'] = {} - # For each step, rebuild the form and encode the state - for step in workflow.steps: - # Load from database representation - module = module_factory.from_workflow_step( trans, step ) - if not module: - return None - # Get user annotation. - step_annotation = self.get_item_annotation_obj(trans.sa_session, trans.user, step ) - annotation_str = "" - if step_annotation: - annotation_str = step_annotation.annotation - # Step info - step_dict = { - 'id': step.order_index, - 'type': module.type, - 'tool_id': module.get_tool_id(), - 'tool_version' : step.tool_version, - 'name': module.get_name(), - 'tool_state': module.get_state( secure=False ), - 'tool_errors': module.get_errors(), - ## 'data_inputs': module.get_data_inputs(), - ## 'data_outputs': module.get_data_outputs(), - 'annotation' : annotation_str - } - # Add post-job actions to step dict. - if module.type == 'tool': - pja_dict = {} - for pja in step.post_job_actions: - pja_dict[pja.action_type+pja.output_name] = dict( action_type = pja.action_type, - output_name = pja.output_name, - action_arguments = pja.action_arguments ) - step_dict[ 'post_job_actions' ] = pja_dict - # Data inputs - step_dict['inputs'] = [] - if module.type == "data_input": - # Get input dataset name; default to 'Input Dataset' - name = module.state.get( 'name', 'Input Dataset') - step_dict['inputs'].append( { "name" : name, "description" : annotation_str } ) - else: - # Step is a tool and may have runtime inputs. - for name, val in module.state.inputs.items(): - input_type = type( val ) - if input_type == RuntimeValue: - step_dict['inputs'].append( { "name" : name, "description" : "runtime parameter for tool %s" % module.get_name() } ) - elif input_type == dict: - # Input type is described by a dict, e.g. indexed parameters. - for partname, partval in val.items(): - if type( partval ) == RuntimeValue: - step_dict['inputs'].append( { "name" : name, "description" : "runtime parameter for tool %s" % module.get_name() } ) - # User outputs - step_dict['user_outputs'] = [] - """ - module_outputs = module.get_data_outputs() - step_outputs = trans.sa_session.query( WorkflowOutput ).filter( step=step ) - for output in step_outputs: - name = output.output_name - annotation = "" - for module_output in module_outputs: - if module_output.get( 'name', None ) == name: - output_type = module_output.get( 'extension', '' ) - break - data['outputs'][name] = { 'name' : name, 'annotation' : annotation, 'type' : output_type } - """ - - # All step outputs - step_dict['outputs'] = [] - if type( module ) is ToolModule: - for output in module.get_data_outputs(): - step_dict['outputs'].append( { 'name' : output['name'], 'type' : output['extensions'][0] } ) - # Connections - input_connections = step.input_connections - if step.type is None or step.type == 'tool': - # Determine full (prefixed) names of valid input datasets - data_input_names = {} - def callback( input, value, prefixed_name, prefixed_label ): - if isinstance( input, DataToolParameter ): - data_input_names[ prefixed_name ] = True - - # FIXME: this updates modules silently right now; messages from updates should be provided. - module.check_and_update_state() - visit_input_values( module.tool.inputs, module.state.inputs, callback ) - # Filter - # FIXME: this removes connection without displaying a message currently! - input_connections = [ conn for conn in input_connections if conn.input_name in data_input_names ] - # Encode input connections as dictionary - input_conn_dict = {} - unique_input_names = set( [conn.input_name for conn in input_connections] ) - for input_name in unique_input_names: - input_conn_dict[ input_name ] = \ - [ dict( id=conn.output_step.order_index, output_name=conn.output_name ) for conn in input_connections if conn.input_name == input_name ] - # Preserve backward compatability. Previously Galaxy - # assumed input connections would be dictionaries not - # lists of dictionaries, so replace any singleton list - # with just the dictionary so that workflows exported from - # newer Galaxy instances can be used with older Galaxy - # instances if they do no include multiple input - # tools. This should be removed at some point. Mirrored - # hack in _workflow_from_dict should never be removed so - # existing workflow exports continue to function. - for input_name, input_conn in dict(input_conn_dict).iteritems(): - if len(input_conn) == 1: - input_conn_dict[input_name] = input_conn[0] - step_dict['input_connections'] = input_conn_dict - # Position - step_dict['position'] = step.position - # Add to return value - data['steps'][step.order_index] = step_dict - return data - - def _workflow_from_dict( self, trans, data, source=None, add_to_menu=False ): - """ - Creates a workflow from a dict. Created workflow is stored in the database and returned. - """ - # Put parameters in workflow mode - trans.workflow_building_mode = True - # Create new workflow from incoming dict - workflow = model.Workflow() - # If there's a source, put it in the workflow name. - if source: - name = "%s (imported from %s)" % ( data['name'], source ) - else: - name = data['name'] - workflow.name = name - # Assume no errors until we find a step that has some - workflow.has_errors = False - # Create each step - steps = [] - # The editor will provide ids for each step that we don't need to save, - # but do need to use to make connections - steps_by_external_id = {} - # Keep track of tools required by the workflow that are not available in - # the local Galaxy instance. Each tuple in the list of missing_tool_tups - # will be ( tool_id, tool_name, tool_version ). - missing_tool_tups = [] - # First pass to build step objects and populate basic values - for key, step_dict in data[ 'steps' ].iteritems(): - # Create the model class for the step - step = model.WorkflowStep() - steps.append( step ) - steps_by_external_id[ step_dict['id' ] ] = step - # FIXME: Position should be handled inside module - step.position = step_dict['position'] - module = module_factory.from_dict( trans, step_dict, secure=False ) - module.save_to_step( step ) - if module.type == 'tool' and module.tool is None: - # A required tool is not available in the local Galaxy instance. - missing_tool_tup = ( step_dict[ 'tool_id' ], step_dict[ 'name' ], step_dict[ 'tool_version' ] ) - if missing_tool_tup not in missing_tool_tups: - missing_tool_tups.append( missing_tool_tup ) - # Save the entire step_dict in the unused config field, be parsed later - # when we do have the too when we do have the tool - step.config = to_json_string(step_dict) - if step.tool_errors: - workflow.has_errors = True - # Stick this in the step temporarily - step.temp_input_connections = step_dict['input_connections'] - # Save step annotation. - annotation = step_dict[ 'annotation' ] - if annotation: - annotation = sanitize_html( annotation, 'utf-8', 'text/html' ) - self.add_item_annotation( trans.sa_session, trans.get_user(), step, annotation ) - # Second pass to deal with connections between steps - for step in steps: - # Input connections - for input_name, conn_list in step.temp_input_connections.iteritems(): - if not conn_list: - continue - if not isinstance(conn_list, list): # Older style singleton connection - conn_list = [conn_list] - for conn_dict in conn_list: - conn = model.WorkflowStepConnection() - conn.input_step = step - conn.input_name = input_name - conn.output_name = conn_dict['output_name'] - conn.output_step = steps_by_external_id[ conn_dict['id'] ] - del step.temp_input_connections - - # Order the steps if possible - attach_ordered_steps( workflow, steps ) - - # Connect up - stored = model.StoredWorkflow() - stored.name = workflow.name - workflow.stored_workflow = stored - stored.latest_workflow = workflow - stored.user = trans.user - if data[ 'annotation' ]: - self.add_item_annotation( trans.sa_session, stored.user, stored, data[ 'annotation' ] ) - - # Persist - trans.sa_session.add( stored ) - trans.sa_session.flush() - - if add_to_menu: - if trans.user.stored_workflow_menu_entries == None: - trans.user.stored_workflow_menu_entries = [] - menuEntry = model.StoredWorkflowMenuEntry() - menuEntry.stored_workflow = stored - trans.user.stored_workflow_menu_entries.append( menuEntry ) - trans.sa_session.flush() - - return stored, missing_tool_tups - def _workflow_to_svg_canvas( self, trans, stored ): workflow = stored.latest_workflow data = [] Repository URL: https://bitbucket.org/galaxy/galaxy-central/ -- This is a commit notification from bitbucket.org. You are receiving this because you have the service enabled, addressing the recipient of this email.
participants (1)
-
commits-noreply@bitbucket.org