commit/galaxy-central: 7 new changesets
7 new commits in galaxy-central: https://bitbucket.org/galaxy/galaxy-central/commits/045da9050e44/ Changeset: 045da9050e44 Branch: workflow_params User: simleo Date: 2014-01-30 12:15:55 Summary: Created new branch workflow_params Affected #: 0 files https://bitbucket.org/galaxy/galaxy-central/commits/cb881765cb99/ Changeset: cb881765cb99 Branch: workflow_params User: simleo Date: 2014-01-30 12:18:58 Summary: changed step_id passing convention in the workflows API Affected #: 1 file diff -r 045da9050e4489f7f559ec0e106790410856e35d -r cb881765cb992eb4aff08d8d62a74f2ca6dfa5de lib/galaxy/webapps/galaxy/api/workflows.py --- a/lib/galaxy/webapps/galaxy/api/workflows.py +++ b/lib/galaxy/webapps/galaxy/api/workflows.py @@ -214,19 +214,12 @@ step.state = step.module.state # Update step parameters as directed by payload's parameter mapping. - if step.tool_id in param_map: - param_dict = param_map[ step.tool_id ] - step_id = param_dict.get( 'step_id', '' ) - + param_dict = param_map.get(str(step.id), param_map.get(step.tool_id)) + if param_dict is not None: # Backward compatibility: convert param/value dict to new 'name': 'value' format. if 'param' in param_dict and 'value' in param_dict: param_dict[ param_dict['param'] ] = param_dict['value'] - - # Update step if there's no step id (i.e. all steps with tool are - # updated) or update if step ids match. - if not step_id or ( step_id and int( step_id ) == step.id ): - for name, value in param_dict.items(): - step.state.inputs[ name ] = value + step.state.inputs.update(param_dict) if step.tool_errors: trans.response.status = 400 https://bitbucket.org/galaxy/galaxy-central/commits/713b2604e8ce/ Changeset: 713b2604e8ce Branch: workflow_params User: simleo Date: 2014-02-02 14:21:49 Summary: workflow API: more flexible step parameter updating Affected #: 1 file diff -r cb881765cb992eb4aff08d8d62a74f2ca6dfa5de -r 713b2604e8cec21b762222cc193b17d057eb680e lib/galaxy/webapps/galaxy/api/workflows.py --- a/lib/galaxy/webapps/galaxy/api/workflows.py +++ b/lib/galaxy/webapps/galaxy/api/workflows.py @@ -19,6 +19,39 @@ log = logging.getLogger(__name__) +def _update_step_parameters(step, param_map): + """ + Update ``step`` parameters based on the user-provided ``param_map`` dict. + + ``param_map`` should be structured as follows:: + + PARAM_MAP = {STEP_ID: PARAM_DICT, ...} + PARAM_DICT = {NAME: VALUE, ...} + + For backwards compatibility, the following (deprecated) formats is + also supported for ``param_map``:: + + PARAM_MAP = {TOOL_ID: PARAM_DICT, ...} + + in which case PARAM_DICT affects all steps with the given tool id. + If both by-tool-id and by-step-id specifications are used, the + latter takes precedence. + + Finally (again, for backwards compatibility), PARAM_DICT can also + be specified as:: + + PARAM_DICT = {'param': NAME, 'value': VALUE} + + Note that this format allows only one parameter to be set per step. + """ + param_dict = param_map.get(step.tool_id, {}).copy() + param_dict.update(param_map.get(str(step.id), {})) + if param_dict: + if 'param' in param_dict and 'value' in param_dict: + param_dict[param_dict['param']] = param_dict['value'] + step.state.inputs.update(param_dict) + + class WorkflowsAPIController(BaseAPIController, UsesStoredWorkflowMixin): @web.expose_api @@ -212,15 +245,7 @@ # are not persisted so we need to do it every time) step.module.add_dummy_datasets( connections=step.input_connections ) step.state = step.module.state - - # Update step parameters as directed by payload's parameter mapping. - param_dict = param_map.get(str(step.id), param_map.get(step.tool_id)) - if param_dict is not None: - # Backward compatibility: convert param/value dict to new 'name': 'value' format. - if 'param' in param_dict and 'value' in param_dict: - param_dict[ param_dict['param'] ] = param_dict['value'] - step.state.inputs.update(param_dict) - + _update_step_parameters(step, param_map) if step.tool_errors: trans.response.status = 400 return "Workflow cannot be run because of validation errors in some steps: %s" % step_errors https://bitbucket.org/galaxy/galaxy-central/commits/d8a1b16c94e2/ Changeset: d8a1b16c94e2 Branch: workflow_params User: simleo Date: 2014-02-02 14:35:57 Summary: fixed a typo Affected #: 1 file diff -r 713b2604e8cec21b762222cc193b17d057eb680e -r d8a1b16c94e26e89be0ce0d1b3b696be4ac7957e lib/galaxy/webapps/galaxy/api/workflows.py --- a/lib/galaxy/webapps/galaxy/api/workflows.py +++ b/lib/galaxy/webapps/galaxy/api/workflows.py @@ -28,7 +28,7 @@ PARAM_MAP = {STEP_ID: PARAM_DICT, ...} PARAM_DICT = {NAME: VALUE, ...} - For backwards compatibility, the following (deprecated) formats is + For backwards compatibility, the following (deprecated) format is also supported for ``param_map``:: PARAM_MAP = {TOOL_ID: PARAM_DICT, ...} https://bitbucket.org/galaxy/galaxy-central/commits/19bff78d5c91/ Changeset: 19bff78d5c91 User: jmchilton Date: 2014-02-03 00:44:50 Summary: Merge pull request #317. Affected #: 1 file diff -r 6788cffb792bc88bb6cfb3a15aff1d29a8c4a700 -r 19bff78d5c91db39a34cc18ea467faf7e5129736 lib/galaxy/webapps/galaxy/api/workflows.py --- a/lib/galaxy/webapps/galaxy/api/workflows.py +++ b/lib/galaxy/webapps/galaxy/api/workflows.py @@ -19,6 +19,39 @@ log = logging.getLogger(__name__) +def _update_step_parameters(step, param_map): + """ + Update ``step`` parameters based on the user-provided ``param_map`` dict. + + ``param_map`` should be structured as follows:: + + PARAM_MAP = {STEP_ID: PARAM_DICT, ...} + PARAM_DICT = {NAME: VALUE, ...} + + For backwards compatibility, the following (deprecated) format is + also supported for ``param_map``:: + + PARAM_MAP = {TOOL_ID: PARAM_DICT, ...} + + in which case PARAM_DICT affects all steps with the given tool id. + If both by-tool-id and by-step-id specifications are used, the + latter takes precedence. + + Finally (again, for backwards compatibility), PARAM_DICT can also + be specified as:: + + PARAM_DICT = {'param': NAME, 'value': VALUE} + + Note that this format allows only one parameter to be set per step. + """ + param_dict = param_map.get(step.tool_id, {}).copy() + param_dict.update(param_map.get(str(step.id), {})) + if param_dict: + if 'param' in param_dict and 'value' in param_dict: + param_dict[param_dict['param']] = param_dict['value'] + step.state.inputs.update(param_dict) + + class WorkflowsAPIController(BaseAPIController, UsesStoredWorkflowMixin): @web.expose_api @@ -212,22 +245,7 @@ # are not persisted so we need to do it every time) step.module.add_dummy_datasets( connections=step.input_connections ) step.state = step.module.state - - # Update step parameters as directed by payload's parameter mapping. - if step.tool_id in param_map: - param_dict = param_map[ step.tool_id ] - step_id = param_dict.get( 'step_id', '' ) - - # Backward compatibility: convert param/value dict to new 'name': 'value' format. - if 'param' in param_dict and 'value' in param_dict: - param_dict[ param_dict['param'] ] = param_dict['value'] - - # Update step if there's no step id (i.e. all steps with tool are - # updated) or update if step ids match. - if not step_id or ( step_id and int( step_id ) == step.id ): - for name, value in param_dict.items(): - step.state.inputs[ name ] = value - + _update_step_parameters(step, param_map) if step.tool_errors: trans.response.status = 400 return "Workflow cannot be run because of validation errors in some steps: %s" % step_errors https://bitbucket.org/galaxy/galaxy-central/commits/0f636b29e4f5/ Changeset: 0f636b29e4f5 User: jmchilton Date: 2014-02-03 00:54:28 Summary: Workflow API functional testing - parameter replacement. Test parameter replacement while running workflows - both by tool id (deprecated) and new post pull request #317 replacement by step id. Affected #: 3 files diff -r 19bff78d5c91db39a34cc18ea467faf7e5129736 -r 0f636b29e4f58d1568e8658dcae57e3c45a14563 test/functional/api/helpers.py --- a/test/functional/api/helpers.py +++ b/test/functional/api/helpers.py @@ -3,7 +3,11 @@ from json import loads from pkg_resources import resource_string +# Simple workflow that takes an input and call cat wrapper on it. workflow_str = resource_string( __name__, "test_workflow_1.ga" ) +# Simple workflow that takes an input and filters with random lines twice in a +# row - first grabbing 8 lines at random and then 6. +workflow_random_x2_str = resource_string( __name__, "test_workflow_2.ga" ) class TestsDatasets: @@ -68,8 +72,8 @@ def __init__( self, api_test_case ): self.api_test_case = api_test_case - def load_workflow( self, name, add_pja=False ): - workflow = loads( workflow_str ) + def load_workflow( self, name, content=workflow_str, add_pja=False ): + workflow = loads( content ) workflow[ "name" ] = name if add_pja: tool_step = workflow[ "steps" ][ "2" ] @@ -80,6 +84,9 @@ ) return workflow + def load_random_x2_workflow( self, name ): + return self.load_workflow( name, content=workflow_random_x2_str ) + def simple_workflow( self, name, **create_kwds ): workflow = self.load_workflow( name ) return self.create_workflow( workflow, **create_kwds ) diff -r 19bff78d5c91db39a34cc18ea467faf7e5129736 -r 0f636b29e4f58d1568e8658dcae57e3c45a14563 test/functional/api/test_workflow_2.ga --- /dev/null +++ b/test/functional/api/test_workflow_2.ga @@ -0,0 +1,89 @@ +{ + "a_galaxy_workflow": "true", + "annotation": "", + "format-version": "0.1", + "name": "random_lines_x2", + "steps": { + "0": { + "annotation": "", + "id": 0, + "input_connections": {}, + "inputs": [ + { + "description": "", + "name": "Input Dataset" + } + ], + "name": "Input dataset", + "outputs": [], + "position": { + "left": 10, + "top": 10 + }, + "tool_errors": null, + "tool_id": null, + "tool_state": "{\"name\": \"Input Dataset\"}", + "tool_version": null, + "type": "data_input", + "user_outputs": [] + }, + "1": { + "annotation": "", + "id": 1, + "input_connections": { + "input": { + "id": 0, + "output_name": "output" + } + }, + "inputs": [], + "name": "Select random lines", + "outputs": [ + { + "name": "out_file1", + "type": "input" + } + ], + "position": { + "left": 230, + "top": 10 + }, + "post_job_actions": {}, + "tool_errors": null, + "tool_id": "random_lines1", + "tool_state": "{\"__page__\": 0, \"num_lines\": \"\\\"8\\\"\", \"seed_source\": \"{\\\"__current_case__\\\": 0, \\\"seed_source_selector\\\": \\\"no_seed\\\"}\", \"input\": \"null\", \"chromInfo\": \"\\\"/home/john/workspace/galaxy-central-workflows-params/tool-data/shared/ucsc/chrom/?.len\\\"\", \"__rerun_remap_job_id__\": null}", + "tool_version": null, + "type": "tool", + "user_outputs": [] + }, + "2": { + "annotation": "", + "id": 2, + "input_connections": { + "input": { + "id": 1, + "output_name": "out_file1" + } + }, + "inputs": [], + "name": "Select random lines", + "outputs": [ + { + "name": "out_file1", + "type": "input" + } + ], + "position": { + "left": 450, + "top": 10 + }, + "post_job_actions": {}, + "tool_errors": null, + "tool_id": "random_lines1", + "tool_state": "{\"__page__\": 0, \"num_lines\": \"\\\"6\\\"\", \"seed_source\": \"{\\\"__current_case__\\\": 0, \\\"seed_source_selector\\\": \\\"no_seed\\\"}\", \"input\": \"null\", \"chromInfo\": \"\\\"/home/john/workspace/galaxy-central-workflows-params/tool-data/shared/ucsc/chrom/?.len\\\"\", \"__rerun_remap_job_id__\": null}", + "tool_version": null, + "type": "tool", + "user_outputs": [] + } + } +} \ No newline at end of file diff -r 19bff78d5c91db39a34cc18ea467faf7e5129736 -r 0f636b29e4f58d1568e8658dcae57e3c45a14563 test/functional/api/test_workflows.py --- a/test/functional/api/test_workflows.py +++ b/test/functional/api/test_workflows.py @@ -67,6 +67,31 @@ self._assert_status_code_is( run_workflow_response, 200 ) self._wait_for_history( history_id, assert_ok=True ) + def test_run_replace_params_by_tool( self ): + workflow_request, history_id = self._setup_random_x2_workflow( "test_for_replace_tool_params" ) + workflow_request[ "parameters" ] = dumps( dict( random_lines1=dict( num_lines=5 ) ) ) + run_workflow_response = self._post( "workflows", data=workflow_request ) + self._assert_status_code_is( run_workflow_response, 200 ) + self._wait_for_history( history_id, assert_ok=True ) + # Would be 8 and 6 without modification + self.__assert_lines_hid_line_count_is( history_id, 2, 5 ) + self.__assert_lines_hid_line_count_is( history_id, 3, 5 ) + + def test_run_replace_params_by_steps( self ): + workflow_request, history_id = self._setup_random_x2_workflow( "test_for_replace_step_params" ) + workflow_summary_response = self._get( "workflows/%s" % workflow_request[ "workflow_id" ] ) + self._assert_status_code_is( workflow_summary_response, 200 ) + steps = workflow_summary_response.json()[ "steps" ] + last_step_id = str( max( map( int, steps.keys() ) ) ) + params = dumps( { last_step_id: dict( num_lines=5 ) } ) + workflow_request[ "parameters" ] = params + run_workflow_response = self._post( "workflows", data=workflow_request ) + self._assert_status_code_is( run_workflow_response, 200 ) + self._wait_for_history( history_id, assert_ok=True ) + # Would be 8 and 6 without modification + self.__assert_lines_hid_line_count_is( history_id, 2, 8 ) + self.__assert_lines_hid_line_count_is( history_id, 3, 5 ) + def test_pja_import_export( self ): workflow = self.workflow_populator.load_workflow( name="test_for_pja_import", add_pja=True ) uploaded_workflow_id = self.workflow_populator.create_workflow( workflow ) @@ -94,9 +119,7 @@ def _setup_workflow_run( self, workflow ): uploaded_workflow_id = self.workflow_populator.create_workflow( workflow ) - workflow_show_resposne = self._get( "workflows/%s" % uploaded_workflow_id ) - self._assert_status_code_is( workflow_show_resposne, 200 ) - workflow_inputs = workflow_show_resposne.json()[ "inputs" ] + workflow_inputs = self._workflow_inputs( uploaded_workflow_id ) step_1 = step_2 = None for key, value in workflow_inputs.iteritems(): label = value[ "label" ] @@ -117,6 +140,29 @@ ) return workflow_request, history_id + def _setup_random_x2_workflow( self, name ): + workflow = self.workflow_populator.load_random_x2_workflow( name ) + uploaded_workflow_id = self.workflow_populator.create_workflow( workflow ) + workflow_inputs = self._workflow_inputs( uploaded_workflow_id ) + key = workflow_inputs.keys()[ 0 ] + history_id = self._new_history() + ten_lines = "\n".join( map( str, range( 10 ) ) ) + hda1 = self._new_dataset( history_id, content=ten_lines ) + workflow_request = dict( + history="hist_id=%s" % history_id, + workflow_id=uploaded_workflow_id, + ds_map=dumps( { + key: self._ds_entry(hda1), + } ), + ) + return workflow_request, history_id + + def _workflow_inputs( self, uploaded_workflow_id ): + workflow_show_resposne = self._get( "workflows/%s" % uploaded_workflow_id ) + self._assert_status_code_is( workflow_show_resposne, 200 ) + workflow_inputs = workflow_show_resposne.json()[ "inputs" ] + return workflow_inputs + def _ds_entry( self, hda ): return dict( src="hda", id=hda[ "id" ] ) @@ -124,6 +170,15 @@ names = self.__workflow_names() assert name in names, "No workflows with name %s in users workflows <%s>" % ( name, names ) + def __assert_lines_hid_line_count_is( self, history, hid, lines ): + contents_url = "histories/%s/contents" % history + history_contents_response = self._get( contents_url ) + self._assert_status_code_is( history_contents_response, 200 ) + hda_summary = filter( lambda hc: hc[ "hid" ] == hid, history_contents_response.json() )[ 0 ] + hda_info_response = self._get( "%s/%s" % ( contents_url, hda_summary[ "id" ] ) ) + self._assert_status_code_is( hda_info_response, 200 ) + self.assertEquals( hda_info_response.json()[ "metadata_data_lines" ], lines ) + def __workflow_names( self ): index_response = self._get( "workflows" ) self._assert_status_code_is( index_response, 200 ) https://bitbucket.org/galaxy/galaxy-central/commits/0617610e18a6/ Changeset: 0617610e18a6 Branch: workflow_params User: jmchilton Date: 2014-02-03 00:55:31 Summary: Close branch workflow_params. Affected #: 0 files Repository URL: https://bitbucket.org/galaxy/galaxy-central/ -- This is a commit notification from bitbucket.org. You are receiving this because you have the service enabled, addressing the recipient of this email.
participants (1)
-
commits-noreply@bitbucket.org