commit/galaxy-central: 4 new changesets
4 new commits in galaxy-central: https://bitbucket.org/galaxy/galaxy-central/commits/e662438a3641/ Changeset: e662438a3641 User: nsoranzo Date: 2014-02-26 19:27:17 Summary: Workflow API: add tool version and parameters for each step when showing a workflow. Otherwise to get this information an extra API call to download the JSON representation, followed by a tricky mapping of the step ids are necessary. Affected #: 1 file diff -r 451b411b4b19ca34ed154352b07b3f49983d1197 -r e662438a3641ec18fdb396348f190664832d80ef lib/galaxy/webapps/galaxy/api/workflows.py --- a/lib/galaxy/webapps/galaxy/api/workflows.py +++ b/lib/galaxy/webapps/galaxy/api/workflows.py @@ -79,7 +79,7 @@ user=trans.user ).join( 'stored_workflow' ).filter( trans.app.model.StoredWorkflow.deleted == False ).order_by( desc( trans.app.model.StoredWorkflow.update_time ) ).all(): - item = wf_sa.stored_workflow.to_dict( value_mapper={ 'id': trans.security.encode_id }) + item = wf_sa.stored_workflow.to_dict( value_mapper={ 'id': trans.security.encode_id } ) encoded_id = trans.security.encode_id(wf_sa.stored_workflow.id) item['url'] = url_for( 'workflow', id=encoded_id ) rval.append(item) @@ -127,6 +127,8 @@ steps[step.id] = {'id': step.id, 'type': step.type, 'tool_id': step.tool_id, + 'tool_version': step.tool_version, + 'tool_inputs': step.tool_inputs, 'input_steps': {}} for conn in step.input_connections: steps[step.id]['input_steps'][conn.input_name] = {'source_step': conn.output_step_id, @@ -139,7 +141,7 @@ """ POST /api/workflows - We're not creating workflows from the api. Just execute for now. + We're not creating workflows from the api. Just execute for now. However, we will import them if installed_repository_file is specified """ @@ -210,7 +212,7 @@ else: trans.response.status = 400 return "Unknown dataset source '%s' specified." % ds_map[k]['src'] - if add_to_history and hda.history != history: + if add_to_history and hda.history != history: hda = hda.copy() history.add_dataset(hda) ds_map[k]['hda'] = hda @@ -260,7 +262,7 @@ trans.response.status = 400 return "Workflow cannot be run because of step upgrade messages: %s" % step.upgrade_messages else: - # This is an input step. Make sure we have an available input. + # This is an input step. Make sure we have an available input. if step.type == 'data_input' and str(step.id) not in ds_map: trans.response.status = 400 return "Workflow cannot be run because an expected input step '%s' has no input dataset." % step.id @@ -387,7 +389,7 @@ """ # Pull parameters out of payload. workflow_id = payload.get('workflow_id', None) - if workflow_id == None: + if workflow_id is None: raise exceptions.ObjectAttributeMissingException( "Missing required parameter 'workflow_id'." ) try: stored_workflow = self.get_stored_workflow( trans, workflow_id, check_ownership=False ) @@ -452,7 +454,7 @@ if trans.sa_session.query(trans.app.model.StoredWorkflowUserShareAssociation).filter_by(user=trans.user, stored_workflow=stored_workflow).count() == 0: raise exceptions.ItemOwnershipException() results = trans.sa_session.query(self.app.model.WorkflowInvocation).filter(self.app.model.WorkflowInvocation.workflow_id==stored_workflow.latest_workflow_id) - results = results.filter(self.app.model.WorkflowInvocation.id == trans.security.decode_id(usage_id)) + results = results.filter(self.app.model.WorkflowInvocation.id == trans.security.decode_id(usage_id)) out = results.first() if out is not None: return self.encode_all_ids( trans, out.to_dict('element'), True) https://bitbucket.org/galaxy/galaxy-central/commits/7a74c3b3fa1e/ Changeset: 7a74c3b3fa1e User: nsoranzo Date: 2014-02-26 19:38:43 Summary: Add documentation to workflows API create(). Affected #: 1 file diff -r e662438a3641ec18fdb396348f190664832d80ef -r 7a74c3b3fa1e8ea52882fbfde4d3037519789390 lib/galaxy/webapps/galaxy/api/workflows.py --- a/lib/galaxy/webapps/galaxy/api/workflows.py +++ b/lib/galaxy/webapps/galaxy/api/workflows.py @@ -143,7 +143,28 @@ We're not creating workflows from the api. Just execute for now. - However, we will import them if installed_repository_file is specified + However, we will import them if installed_repository_file is specified. + + :param installed_repository_file The path of a workflow to import. Either workflow_id or installed_repository_file must be specified + :type installed_repository_file str + + :param workflow_id: an existing workflow id. Either workflow_id or installed_repository_file must be specified + :type workflow_id: str + + :param parameters: See _update_step_parameters() + :type parameters: dict + + :param ds_map: A dictionary mapping each input step id to a dictionary with 2 keys: 'src' (which can be 'ldda', 'ld' or 'hda') and 'id' (which should be the id of a LibraryDatasetDatasetAssociation, LibraryDataset or HistoryDatasetAssociation respectively) + :type ds_map: dict + + :param no_add_to_history: if present in the payload with any value, the input datasets will not be added to the selected history + :type no_add_to_history: str + + :param history: Either the name of a new history or "hist_id=HIST_ID" where HIST_ID is the id of an existing history + :type history: str + + :param replacement_params: A dictionary used when renaming datasets + :type replacement_params: dict """ # Pull parameters out of payload. https://bitbucket.org/galaxy/galaxy-central/commits/eb8a8eddb1fc/ Changeset: eb8a8eddb1fc User: nsoranzo Date: 2014-02-26 19:54:11 Summary: Workflows API: really allow import with installed_repository_file by not crashing if workflow_id, ds_map or history are not specified. Reorder code so that a new history is created after checking the sanity checks. Affected #: 1 file diff -r 7a74c3b3fa1e8ea52882fbfde4d3037519789390 -r eb8a8eddb1fc79913a10832ca73be049a719949e lib/galaxy/webapps/galaxy/api/workflows.py --- a/lib/galaxy/webapps/galaxy/api/workflows.py +++ b/lib/galaxy/webapps/galaxy/api/workflows.py @@ -168,11 +168,11 @@ """ # Pull parameters out of payload. - workflow_id = payload['workflow_id'] + workflow_id = payload.get('workflow_id', None) param_map = payload.get('parameters', {}) - ds_map = payload['ds_map'] + ds_map = payload.get('ds_map', {}) add_to_history = 'no_add_to_history' not in payload - history_param = payload['history'] + history_param = payload.get('history', '') # Get/create workflow. if not workflow_id: @@ -198,6 +198,20 @@ return("Workflow is not owned by or shared with current user") workflow = stored_workflow.latest_workflow + # Sanity checks. + if not workflow: + trans.response.status = 400 + return "Workflow not found." + if len( workflow.steps ) == 0: + trans.response.status = 400 + return "Workflow cannot be run because it does not have any steps" + if workflow.has_cycles: + trans.response.status = 400 + return "Workflow cannot be run because it contains cycles" + if workflow.has_errors: + trans.response.status = 400 + return "Workflow cannot be run because of validation errors in some steps" + # Get target history. if history_param.startswith('hist_id='): #Passing an existing history to use. @@ -241,22 +255,7 @@ trans.response.status = 400 return "Invalid Dataset '%s' Specified" % ds_map[k]['id'] - # Sanity checks. - if not workflow: - trans.response.status = 400 - return "Workflow not found." - if len( workflow.steps ) == 0: - trans.response.status = 400 - return "Workflow cannot be run because it does not have any steps" - if workflow.has_cycles: - trans.response.status = 400 - return "Workflow cannot be run because it contains cycles" - if workflow.has_errors: - trans.response.status = 400 - return "Workflow cannot be run because of validation errors in some steps" - # Build the state for each step - rval = {} for step in workflow.steps: step_errors = None input_connections_by_name = {} @@ -291,12 +290,7 @@ step.state = step.module.get_runtime_state() # Run each step, connecting outputs to inputs - outputs = util.odict.odict() - rval['history'] = trans.security.encode_id(history.id) - rval['outputs'] = [] - replacement_dict = payload.get('replacement_params', {}) - outputs = invoke( trans=trans, workflow=workflow, @@ -308,6 +302,9 @@ # Build legacy output - should probably include more information from # outputs. + rval = {} + rval['history'] = trans.security.encode_id(history.id) + rval['outputs'] = [] for step in workflow.steps: if step.type == 'tool' or step.type is None: for v in outputs[ step.id ].itervalues(): https://bitbucket.org/galaxy/galaxy-central/commits/d66016004bca/ Changeset: d66016004bca User: jmchilton Date: 2014-03-04 14:39:06 Summary: Merged in nsoranzo/galaxy-central (pull request #341) Workflows API enhancements (pull request #337 corrected) Affected #: 1 file diff -r 24f45ea024e47f0e6ee8c427c7450e1f79d0c525 -r d66016004bca809cb3bc1fcd630c6d336bf92a9e lib/galaxy/webapps/galaxy/api/workflows.py --- a/lib/galaxy/webapps/galaxy/api/workflows.py +++ b/lib/galaxy/webapps/galaxy/api/workflows.py @@ -79,7 +79,7 @@ user=trans.user ).join( 'stored_workflow' ).filter( trans.app.model.StoredWorkflow.deleted == False ).order_by( desc( trans.app.model.StoredWorkflow.update_time ) ).all(): - item = wf_sa.stored_workflow.to_dict( value_mapper={ 'id': trans.security.encode_id }) + item = wf_sa.stored_workflow.to_dict( value_mapper={ 'id': trans.security.encode_id } ) encoded_id = trans.security.encode_id(wf_sa.stored_workflow.id) item['url'] = url_for( 'workflow', id=encoded_id ) rval.append(item) @@ -127,6 +127,8 @@ steps[step.id] = {'id': step.id, 'type': step.type, 'tool_id': step.tool_id, + 'tool_version': step.tool_version, + 'tool_inputs': step.tool_inputs, 'input_steps': {}} for conn in step.input_connections: steps[step.id]['input_steps'][conn.input_name] = {'source_step': conn.output_step_id, @@ -139,17 +141,38 @@ """ POST /api/workflows - We're not creating workflows from the api. Just execute for now. + We're not creating workflows from the api. Just execute for now. - However, we will import them if installed_repository_file is specified + However, we will import them if installed_repository_file is specified. + + :param installed_repository_file The path of a workflow to import. Either workflow_id or installed_repository_file must be specified + :type installed_repository_file str + + :param workflow_id: an existing workflow id. Either workflow_id or installed_repository_file must be specified + :type workflow_id: str + + :param parameters: See _update_step_parameters() + :type parameters: dict + + :param ds_map: A dictionary mapping each input step id to a dictionary with 2 keys: 'src' (which can be 'ldda', 'ld' or 'hda') and 'id' (which should be the id of a LibraryDatasetDatasetAssociation, LibraryDataset or HistoryDatasetAssociation respectively) + :type ds_map: dict + + :param no_add_to_history: if present in the payload with any value, the input datasets will not be added to the selected history + :type no_add_to_history: str + + :param history: Either the name of a new history or "hist_id=HIST_ID" where HIST_ID is the id of an existing history + :type history: str + + :param replacement_params: A dictionary used when renaming datasets + :type replacement_params: dict """ # Pull parameters out of payload. - workflow_id = payload['workflow_id'] + workflow_id = payload.get('workflow_id', None) param_map = payload.get('parameters', {}) - ds_map = payload['ds_map'] + ds_map = payload.get('ds_map', {}) add_to_history = 'no_add_to_history' not in payload - history_param = payload['history'] + history_param = payload.get('history', '') # Get/create workflow. if not workflow_id: @@ -175,6 +198,20 @@ return("Workflow is not owned by or shared with current user") workflow = stored_workflow.latest_workflow + # Sanity checks. + if not workflow: + trans.response.status = 400 + return "Workflow not found." + if len( workflow.steps ) == 0: + trans.response.status = 400 + return "Workflow cannot be run because it does not have any steps" + if workflow.has_cycles: + trans.response.status = 400 + return "Workflow cannot be run because it contains cycles" + if workflow.has_errors: + trans.response.status = 400 + return "Workflow cannot be run because of validation errors in some steps" + # Get target history. if history_param.startswith('hist_id='): #Passing an existing history to use. @@ -210,7 +247,7 @@ else: trans.response.status = 400 return "Unknown dataset source '%s' specified." % ds_map[k]['src'] - if add_to_history and hda.history != history: + if add_to_history and hda.history != history: hda = hda.copy() history.add_dataset(hda) ds_map[k]['hda'] = hda @@ -218,22 +255,7 @@ trans.response.status = 400 return "Invalid Dataset '%s' Specified" % ds_map[k]['id'] - # Sanity checks. - if not workflow: - trans.response.status = 400 - return "Workflow not found." - if len( workflow.steps ) == 0: - trans.response.status = 400 - return "Workflow cannot be run because it does not have any steps" - if workflow.has_cycles: - trans.response.status = 400 - return "Workflow cannot be run because it contains cycles" - if workflow.has_errors: - trans.response.status = 400 - return "Workflow cannot be run because of validation errors in some steps" - # Build the state for each step - rval = {} for step in workflow.steps: step_errors = None input_connections_by_name = {} @@ -260,7 +282,7 @@ trans.response.status = 400 return "Workflow cannot be run because of step upgrade messages: %s" % step.upgrade_messages else: - # This is an input step. Make sure we have an available input. + # This is an input step. Make sure we have an available input. if step.type == 'data_input' and str(step.id) not in ds_map: trans.response.status = 400 return "Workflow cannot be run because an expected input step '%s' has no input dataset." % step.id @@ -268,12 +290,7 @@ step.state = step.module.get_runtime_state() # Run each step, connecting outputs to inputs - outputs = util.odict.odict() - rval['history'] = trans.security.encode_id(history.id) - rval['outputs'] = [] - replacement_dict = payload.get('replacement_params', {}) - outputs = invoke( trans=trans, workflow=workflow, @@ -285,6 +302,9 @@ # Build legacy output - should probably include more information from # outputs. + rval = {} + rval['history'] = trans.security.encode_id(history.id) + rval['outputs'] = [] for step in workflow.steps: if step.type == 'tool' or step.type is None: for v in outputs[ step.id ].itervalues(): @@ -387,7 +407,7 @@ """ # Pull parameters out of payload. workflow_id = payload.get('workflow_id', None) - if workflow_id == None: + if workflow_id is None: raise exceptions.ObjectAttributeMissingException( "Missing required parameter 'workflow_id'." ) try: stored_workflow = self.get_stored_workflow( trans, workflow_id, check_ownership=False ) @@ -452,7 +472,7 @@ if trans.sa_session.query(trans.app.model.StoredWorkflowUserShareAssociation).filter_by(user=trans.user, stored_workflow=stored_workflow).count() == 0: raise exceptions.ItemOwnershipException() results = trans.sa_session.query(self.app.model.WorkflowInvocation).filter(self.app.model.WorkflowInvocation.workflow_id==stored_workflow.latest_workflow_id) - results = results.filter(self.app.model.WorkflowInvocation.id == trans.security.decode_id(usage_id)) + results = results.filter(self.app.model.WorkflowInvocation.id == trans.security.decode_id(usage_id)) out = results.first() if out is not None: return self.encode_all_ids( trans, out.to_dict('element'), True) Repository URL: https://bitbucket.org/galaxy/galaxy-central/ -- This is a commit notification from bitbucket.org. You are receiving this because you have the service enabled, addressing the recipient of this email.
participants (1)
-
commits-noreply@bitbucket.org