commit/galaxy-central: jmchilton: Improvements to to yaml_to_workflow for Kyle.
1 new commit in galaxy-central: https://bitbucket.org/galaxy/galaxy-central/commits/46a3d43d779f/ Changeset: 46a3d43d779f User: jmchilton Date: 2014-12-04 21:18:20+00:00 Summary: Improvements to to yaml_to_workflow for Kyle. Add UUID to workflows (his contribution) - add shortcuts for rename an hide actions with tests (his request, my implementation). Affected #: 4 files diff -r 7272cc7b9ea1fd64646fa6d7e6fb5924fd8618f8 -r 46a3d43d779fa7735029449935dad77474adf9b1 test/api/test_workflow_extraction.py --- a/test/api/test_workflow_extraction.py +++ b/test/api/test_workflow_extraction.py @@ -182,68 +182,6 @@ collection_step_state = loads( collection_step[ "tool_state" ] ) self.assertEquals( collection_step_state[ "collection_type" ], u"list:paired" ) - def _run_jobs( self, jobs_yaml ): - history_id = self.history_id - workflow_id = self._upload_yaml_workflow( - jobs_yaml - ) - jobs_descriptions = yaml.load( jobs_yaml ) - test_data = jobs_descriptions["test_data"] - - label_map = {} - inputs = {} - for key, value in test_data.items(): - if isinstance( value, dict ): - elements_data = value.get( "elements", [] ) - elements = [] - for element_data in elements_data: - identifier = element_data[ "identifier" ] - content = element_data["content"] - elements.append( ( identifier, content ) ) - collection_type = value["type"] - if collection_type == "list:paired": - hdca = self.dataset_collection_populator.create_list_of_pairs_in_history( history_id ).json() - elif collection_type == "list": - hdca = self.dataset_collection_populator.create_list_in_history( history_id, contents=elements ).json() - else: - hdca = self.dataset_collection_populator.create_pair_in_history( history_id, contents=elements ).json() - label_map[key] = self._ds_entry( hdca ) - inputs[key] = hdca - else: - hda = self.dataset_populator.new_dataset( history_id, content=value ) - label_map[key] = self._ds_entry( hda ) - inputs[key] = hda - workflow_request = dict( - history="hist_id=%s" % history_id, - workflow_id=workflow_id, - ) - workflow_request[ "inputs" ] = dumps( label_map ) - workflow_request[ "inputs_by" ] = 'name' - self.dataset_populator.wait_for_history( history_id, assert_ok=True ) - url = "workflows/%s/usage" % ( workflow_id ) - invocation_response = self._post( url, data=workflow_request ) - self._assert_status_code_is( invocation_response, 200 ) - invocation = invocation_response.json() - invocation_id = invocation[ "id" ] - # Wait for workflow to become fully scheduled and then for all jobs - # complete. - self.wait_for_invocation( workflow_id, invocation_id ) - self.dataset_populator.wait_for_history( history_id, assert_ok=True ) - jobs = self._history_jobs( history_id ) - return RunJobsSummary( - history_id=history_id, - workflow_id=workflow_id, - inputs=inputs, - jobs=jobs, - ) - - def wait_for_invocation( self, workflow_id, invocation_id ): - url = "workflows/%s/usage/%s" % ( workflow_id, invocation_id ) - return wait_on_state( lambda: self._get( url ) ) - - def _history_jobs( self, history_id ): - return self._get("jobs", { "history_id": history_id, "order_by": "create_time" } ).json() - def _job_id_for_tool( self, jobs, tool_id ): return self._job_for_tool( jobs, tool_id )[ "id" ] diff -r 7272cc7b9ea1fd64646fa6d7e6fb5924fd8618f8 -r 46a3d43d779fa7735029449935dad77474adf9b1 test/api/test_workflows.py --- a/test/api/test_workflows.py +++ b/test/api/test_workflows.py @@ -1,6 +1,12 @@ +from .helpers import wait_on_state + from base import api from json import dumps +from collections import namedtuple + import time + +import yaml from .helpers import WorkflowPopulator from .helpers import DatasetPopulator from .helpers import DatasetCollectionPopulator @@ -101,6 +107,69 @@ invocation_details = invocation_details_response.json() return invocation_details + def _run_jobs( self, jobs_yaml, history_id=None ): + if history_id is None: + history_id = self.history_id + workflow_id = self._upload_yaml_workflow( + jobs_yaml + ) + jobs_descriptions = yaml.load( jobs_yaml ) + test_data = jobs_descriptions["test_data"] + + label_map = {} + inputs = {} + for key, value in test_data.items(): + if isinstance( value, dict ): + elements_data = value.get( "elements", [] ) + elements = [] + for element_data in elements_data: + identifier = element_data[ "identifier" ] + content = element_data["content"] + elements.append( ( identifier, content ) ) + collection_type = value["type"] + if collection_type == "list:paired": + hdca = self.dataset_collection_populator.create_list_of_pairs_in_history( history_id ).json() + elif collection_type == "list": + hdca = self.dataset_collection_populator.create_list_in_history( history_id, contents=elements ).json() + else: + hdca = self.dataset_collection_populator.create_pair_in_history( history_id, contents=elements ).json() + label_map[key] = self._ds_entry( hdca ) + inputs[key] = hdca + else: + hda = self.dataset_populator.new_dataset( history_id, content=value ) + label_map[key] = self._ds_entry( hda ) + inputs[key] = hda + workflow_request = dict( + history="hist_id=%s" % history_id, + workflow_id=workflow_id, + ) + workflow_request[ "inputs" ] = dumps( label_map ) + workflow_request[ "inputs_by" ] = 'name' + self.dataset_populator.wait_for_history( history_id, assert_ok=True ) + url = "workflows/%s/usage" % ( workflow_id ) + invocation_response = self._post( url, data=workflow_request ) + self._assert_status_code_is( invocation_response, 200 ) + invocation = invocation_response.json() + invocation_id = invocation[ "id" ] + # Wait for workflow to become fully scheduled and then for all jobs + # complete. + self.wait_for_invocation( workflow_id, invocation_id ) + self.dataset_populator.wait_for_history( history_id, assert_ok=True ) + jobs = self._history_jobs( history_id ) + return RunJobsSummary( + history_id=history_id, + workflow_id=workflow_id, + inputs=inputs, + jobs=jobs, + ) + + def wait_for_invocation( self, workflow_id, invocation_id ): + url = "workflows/%s/usage/%s" % ( workflow_id, invocation_id ) + return wait_on_state( lambda: self._get( url ) ) + + def _history_jobs( self, history_id ): + return self._get("jobs", { "history_id": history_id, "order_by": "create_time" } ).json() + # Workflow API TODO: # - Allow history_id as param to workflow run action. (hist_id) @@ -641,3 +710,7 @@ shared_workflow_id=workflow_id, ) return self._post( route, import_data ) + + +RunJobsSummary = namedtuple('RunJobsSummary', ['history_id', 'workflow_id', 'inputs', 'jobs']) + diff -r 7272cc7b9ea1fd64646fa6d7e6fb5924fd8618f8 -r 46a3d43d779fa7735029449935dad77474adf9b1 test/api/test_workflows_from_yaml.py --- a/test/api/test_workflows_from_yaml.py +++ b/test/api/test_workflows_from_yaml.py @@ -1,4 +1,3 @@ -import json from .test_workflows import BaseWorkflowsApiTestCase @@ -30,3 +29,33 @@ __current_case__: 1 """) self._get("workflows/%s/download" % workflow_id).content + + def test_simple_output_actions( self ): + history_id = self.dataset_populator.new_history() + self._run_jobs(""" +steps: + - type: input + label: input1 + - tool_id: cat1 + label: first_cat + state: + input1: + $link: 0 + outputs: + out_file1: + hide: true + rename: "the new value" + - tool_id: cat1 + state: + input1: + $link: first_cat#out_file1 +test_data: + input1: "hello world" +""", history_id=history_id) + + details1 = self.dataset_populator.get_history_dataset_details(history_id, hid=2) + assert not details1["visible"] + assert details1["name"] == "the new value", details1 + details2 = self.dataset_populator.get_history_dataset_details(history_id, hid=3) + assert details2["visible"] + assert False diff -r 7272cc7b9ea1fd64646fa6d7e6fb5924fd8618f8 -r 46a3d43d779fa7735029449935dad77474adf9b1 test/api/yaml_to_workflow.py --- a/test/api/yaml_to_workflow.py +++ b/test/api/yaml_to_workflow.py @@ -2,6 +2,7 @@ import yaml import json +import uuid try: from collections import OrderedDict @@ -25,7 +26,8 @@ "a_galaxy_workflow": "true", "format-version": "0.1", "annotation": "", - "name": "Workflow" + "name": "Workflow", + "uuid": str(uuid.uuid4()), }) steps = as_python["steps"] @@ -107,8 +109,10 @@ __ensure_defaults( step, { "annotation": "", + "post_job_actions": {}, } ) __ensure_inputs_connections(step) + post_job_actions = step["post_job_actions"] tool_state = { # TODO: Galaxy should not require tool state actually specify a __page__. @@ -178,6 +182,30 @@ __populate_tool_state(step, tool_state) + # Handle outputs. + if "outputs" in step: + for name, output in step.get("outputs", {}).items(): + if output.get("hide", False): + action_name = "HideDatasetAction%s" % name + action = __action( + "HideDatasetAction", + name, + ) + post_job_actions[action_name] = action + + if output.get("rename", None): + new_name = output.get("rename") + action_name = "RenameDatasetAction%s" % name + arguments = dict(newname=new_name) + action = __action( + "RenameDatasetAction", + name, + arguments, + ) + post_job_actions[action_name] = action + + del step["outputs"] + class ConversionContext(object): @@ -185,6 +213,14 @@ self.labels = {} +def __action(type, name, arguments={}): + return { + "action_arguments": arguments, + "action_type": type, + "output_name": name, + } + + def __is_link(value): return isinstance(value, dict) and "$link" in value Repository URL: https://bitbucket.org/galaxy/galaxy-central/ -- This is a commit notification from bitbucket.org. You are receiving this because you have the service enabled, addressing the recipient of this email.
participants (1)
-
commits-noreply@bitbucket.org