3 new commits in galaxy-central: https://bitbucket.org/galaxy/galaxy-central/commits/89b6cf2df4de/ Changeset: 89b6cf2df4de User: jmchilton Date: 2014-09-06 00:55:21 Summary: Tighten permissions for workflow invocations. Sharing a workflow with a user was previously sufficient to grant access to all invocations of that workflow. This isn't a huge problem since the information potentially leaking out was limitted to invocation counts, various encoded ids, and update times. Still I think no information about invocations should be avaialble as a result of sharing a workflow - and upcoming changes to Galaxy will result in much more information being made available via the workflow invocation API. Affected #: 3 files diff -r 2604f7623fb459f3cbc6b3e30551e1a8ce0ea1b2 -r 89b6cf2df4deafc0740372bbeaff5ea0f954cd5d lib/galaxy/managers/workflows.py --- /dev/null +++ b/lib/galaxy/managers/workflows.py @@ -0,0 +1,64 @@ +from galaxy import model +from galaxy import exceptions + + +class WorkflowsManager( object ): + """ Handle CRUD type operaitons related to workflows. More interesting + stuff regarding workflow execution, step sorting, etc... can be found in + the galaxy.workflow module. + """ + + def __init__( self, app ): + self.app = app + + def check_security( self, trans, has_workflow, check_ownership=True, check_accessible=True): + """ check accessibility or ownership of workflows, storedworkflows, and + workflowinvocations. Throw an exception or returns True if user has + needed level of access. + """ + if not check_ownership or check_accessible: + return True + + # If given an invocation follow to workflow... + if isinstance( has_workflow, model.WorkflowInvocation ): + has_workflow = has_workflow.workflow + + # stored workflow contains security stuff - follow that workflow to + # that unless given a stored workflow. + if hasattr( has_workflow, "stored_workflow" ): + stored_workflow = has_workflow.stored_workflow + else: + stored_workflow = has_workflow + + if stored_workflow.user != trans.user and not trans.user_is_admin(): + if check_ownership: + raise exceptions.ItemOwnershipException() + # else check_accessible... + if trans.sa_session.query( model.StoredWorkflowUserShareAssociation ).filter_by(user=trans.user, stored_workflow=stored_workflow ).count() == 0: + raise exceptions.ItemAccessibilityException() + + return True + + def get_invocation( self, trans, decoded_invocation_id ): + try: + workflow_invocation = trans.sa_session.query( + self.app.model.WorkflowInvocation + ).get( decoded_invocation_id ) + except Exception: + raise exceptions.ObjectNotFound() + self.check_security( trans, workflow_invocation, check_ownership=True, check_accessible=False ) + return workflow_invocation + + def build_invocations_query( self, trans, decoded_stored_workflow_id ): + try: + stored_workflow = trans.sa_session.query( + self.app.model.StoredWorkflow + ).get( decoded_stored_workflow_id ) + except Exception: + raise exceptions.ObjectNotFound() + self.check_security( trans, stored_workflow, check_ownership=True, check_accessible=False ) + return trans.sa_session.query( + model.WorkflowInvocation + ).filter_by( + workflow_id=stored_workflow.latest_workflow_id + ) diff -r 2604f7623fb459f3cbc6b3e30551e1a8ce0ea1b2 -r 89b6cf2df4deafc0740372bbeaff5ea0f954cd5d lib/galaxy/webapps/galaxy/api/workflows.py --- a/lib/galaxy/webapps/galaxy/api/workflows.py +++ b/lib/galaxy/webapps/galaxy/api/workflows.py @@ -10,6 +10,7 @@ from galaxy import exceptions, util from galaxy.model.item_attrs import UsesAnnotations from galaxy.managers import histories +from galaxy.managers import workflows from galaxy.web import _future_expose_api as expose_api from galaxy.web.base.controller import BaseAPIController, url_for, UsesStoredWorkflowMixin from galaxy.web.base.controller import UsesHistoryMixin @@ -26,6 +27,7 @@ def __init__( self, app ): super( BaseAPIController, self ).__init__( app ) self.history_manager = histories.HistoryManager() + self.workflow_manager = workflows.WorkflowsManager( app ) @expose_api def index(self, trans, **kwd): @@ -377,15 +379,8 @@ :raises: exceptions.MessageException, exceptions.ObjectNotFound """ - try: - stored_workflow = trans.sa_session.query(self.app.model.StoredWorkflow).get(trans.security.decode_id(workflow_id)) - except Exception: - raise exceptions.ObjectNotFound() - # check to see if user has permissions to selected workflow - if stored_workflow.user != trans.user and not trans.user_is_admin(): - if trans.sa_session.query(trans.app.model.StoredWorkflowUserShareAssociation).filter_by(user=trans.user, stored_workflow=stored_workflow).count() == 0: - raise exceptions.ItemOwnershipException() - results = trans.sa_session.query(self.app.model.WorkflowInvocation).filter_by(workflow_id=stored_workflow.latest_workflow_id) + decoded_stored_workflow_invocation_id = self.__decode_id( trans, workflow_id ) + results = self.workflow_manager.build_invocations_query( trans, decoded_stored_workflow_invocation_id ) out = [] for r in results: out.append( self.encode_all_ids( trans, r.to_dict(), True) ) @@ -405,20 +400,10 @@ :raises: exceptions.MessageException, exceptions.ObjectNotFound """ - - try: - stored_workflow = trans.sa_session.query(self.app.model.StoredWorkflow).get(trans.security.decode_id(workflow_id)) - except Exception: - raise exceptions.ObjectNotFound() - # check to see if user has permissions to selected workflow - if stored_workflow.user != trans.user and not trans.user_is_admin(): - if trans.sa_session.query(trans.app.model.StoredWorkflowUserShareAssociation).filter_by(user=trans.user, stored_workflow=stored_workflow).count() == 0: - raise exceptions.ItemOwnershipException() - results = trans.sa_session.query(self.app.model.WorkflowInvocation).filter_by(workflow_id=stored_workflow.latest_workflow_id) - results = results.filter_by(id=trans.security.decode_id(usage_id)) - out = results.first() - if out is not None: - return self.encode_all_ids( trans, out.to_dict('element'), True) + decoded_workflow_invocation_id = self.__decode_id( trans, usage_id ) + workflow_invocation = self.workflow_manager.get_invocation( trans, decoded_workflow_invocation_id ) + if workflow_invocation: + return self.encode_all_ids( trans, workflow_invocation.to_dict('element'), True) return None def __get_stored_accessible_workflow( self, trans, workflow_id ): diff -r 2604f7623fb459f3cbc6b3e30551e1a8ce0ea1b2 -r 89b6cf2df4deafc0740372bbeaff5ea0f954cd5d test/api/test_workflows.py --- a/test/api/test_workflows.py +++ b/test/api/test_workflows.py @@ -506,22 +506,16 @@ self._assert_has_keys( pja, "action_type", "output_name", "action_arguments" ) @skip_without_tool( "cat1" ) + def test_only_own_invocations_accessible( self ): + workflow_id, usage = self._run_workflow_once_get_invocation( "test_usage") + with self._different_user(): + usage_details_response = self._get( "workflows/%s/usage/%s" % ( workflow_id, usage[ "id" ] ) ) + self._assert_status_code_is( usage_details_response, 403 ) + + @skip_without_tool( "cat1" ) def test_invocation_usage( self ): - workflow = self.workflow_populator.load_workflow( name="test_usage" ) - workflow_request, history_id = self._setup_workflow_run( workflow ) - workflow_id = workflow_request[ "workflow_id" ] - response = self._get( "workflows/%s/usage" % workflow_id ) - self._assert_status_code_is( response, 200 ) - assert len( response.json() ) == 0 - run_workflow_response = self._post( "workflows", data=workflow_request ) - self._assert_status_code_is( run_workflow_response, 200 ) - - response = self._get( "workflows/%s/usage" % workflow_id ) - self._assert_status_code_is( response, 200 ) - usages = response.json() - assert len( usages ) == 1 - - usage_details_response = self._get( "workflows/%s/usage/%s" % ( workflow_id, usages[ 0 ][ "id" ] ) ) + workflow_id, usage = self._run_workflow_once_get_invocation( "test_usage") + usage_details_response = self._get( "workflows/%s/usage/%s" % ( workflow_id, usage[ "id" ] ) ) self._assert_status_code_is( usage_details_response, 200 ) usage_details = usage_details_response.json() # Assert some high-level things about the structure of data returned. @@ -544,6 +538,22 @@ # renamed to 'the_new_name'. assert "the_new_name" in map( lambda hda: hda[ "name" ], contents ) + def _run_workflow_once_get_invocation( self, name ): + workflow = self.workflow_populator.load_workflow( name=name ) + workflow_request, history_id = self._setup_workflow_run( workflow ) + workflow_id = workflow_request[ "workflow_id" ] + response = self._get( "workflows/%s/usage" % workflow_id ) + self._assert_status_code_is( response, 200 ) + assert len( response.json() ) == 0 + run_workflow_response = self._post( "workflows", data=workflow_request ) + self._assert_status_code_is( run_workflow_response, 200 ) + + response = self._get( "workflows/%s/usage" % workflow_id ) + self._assert_status_code_is( response, 200 ) + usages = response.json() + assert len( usages ) == 1 + return workflow_id, usages[ 0 ] + def _setup_workflow_run( self, workflow, inputs_by='step_id', history_id=None ): uploaded_workflow_id = self.workflow_populator.create_workflow( workflow ) if not history_id: https://bitbucket.org/galaxy/galaxy-central/commits/14953d248e9b/ Changeset: 14953d248e9b User: jmchilton Date: 2014-09-06 00:55:21 Summary: Update workflow invocation to_dict for recent collection workflow changes. There may now be multiple WorkflowInvocationSteps for each WorkflowStep for steps that are mapped over collections - so to_dict creating a dictionary of this information indexed on order step is problematic because only one WorkflowInvocationStep will be represented per step. Instead now just returning a big list of all of the invocations - which contains all of the same information. This is a backward incompatible API change for the workflow invocation API. Also update the input mapping stuff with logic for dealing with data collection inputs. Affected #: 2 files diff -r 89b6cf2df4deafc0740372bbeaff5ea0f954cd5d -r 14953d248e9b404589c1e81494517f47c1004e49 lib/galaxy/model/__init__.py --- a/lib/galaxy/model/__init__.py +++ b/lib/galaxy/model/__init__.py @@ -3081,20 +3081,22 @@ def to_dict( self, view='collection', value_mapper=None ): rval = super( WorkflowInvocation, self ).to_dict( view=view, value_mapper=value_mapper ) if view == 'element': - steps = {} + steps = [] for step in self.steps: v = step.to_dict() - steps[str(v['order_index'])] = v + steps.append( v ) rval['steps'] = steps inputs = {} for step in self.steps: if step.workflow_step.type == 'tool': for step_input in step.workflow_step.input_connections: - if step_input.output_step.type == 'data_input': + output_step_type = step_input.output_step.type + if output_step_type in [ 'data_input', 'data_collection_input' ]: + src = "hda" if output_step_type == 'data_input' else 'hdca' for job_input in step.job.input_datasets: if job_input.name == step_input.input_name: - inputs[str(step_input.output_step.order_index)] = { "id": job_input.dataset_id, "src": "hda"} + inputs[str(step_input.output_step.order_index)] = { "id": job_input.dataset_id, "src": src } rval['inputs'] = inputs return rval diff -r 89b6cf2df4deafc0740372bbeaff5ea0f954cd5d -r 14953d248e9b404589c1e81494517f47c1004e49 test/api/test_workflows.py --- a/test/api/test_workflows.py +++ b/test/api/test_workflows.py @@ -520,8 +520,8 @@ usage_details = usage_details_response.json() # Assert some high-level things about the structure of data returned. self._assert_has_keys( usage_details, "inputs", "steps" ) - for step in usage_details[ "steps" ].values(): - self._assert_has_keys( step, "workflow_step_id", "order_index" ) + for step in usage_details[ "steps" ]: + self._assert_has_keys( step, "workflow_step_id", "order_index", "id" ) @skip_without_tool( "cat1" ) def test_post_job_action( self ): https://bitbucket.org/galaxy/galaxy-central/commits/54efa2c365f4/ Changeset: 54efa2c365f4 User: jmchilton Date: 2014-09-06 00:55:21 Summary: Refactoring out methods usable for downstream work on scheduling. Affected #: 2 files diff -r 14953d248e9b404589c1e81494517f47c1004e49 -r 54efa2c365f4e2c867d56e44fb202ec2262e1122 lib/galaxy/webapps/galaxy/api/workflows.py --- a/lib/galaxy/webapps/galaxy/api/workflows.py +++ b/lib/galaxy/webapps/galaxy/api/workflows.py @@ -383,7 +383,7 @@ results = self.workflow_manager.build_invocations_query( trans, decoded_stored_workflow_invocation_id ) out = [] for r in results: - out.append( self.encode_all_ids( trans, r.to_dict(), True) ) + out.append( self.__encode_invocation( trans, r ) ) return out @expose_api @@ -403,7 +403,7 @@ decoded_workflow_invocation_id = self.__decode_id( trans, usage_id ) workflow_invocation = self.workflow_manager.get_invocation( trans, decoded_workflow_invocation_id ) if workflow_invocation: - return self.encode_all_ids( trans, workflow_invocation.to_dict('element'), True) + return self.__encode_invocation( trans, workflow_invocation ) return None def __get_stored_accessible_workflow( self, trans, workflow_id ): @@ -435,6 +435,13 @@ raise exceptions.ObjectNotFound( "No such workflow found." ) return stored_workflow + def __encode_invocation( self, trans, invocation, view="element" ): + return self.encode_all_ids( + trans, + invocation.to_dict( view ), + True + ) + def __decode_id( self, trans, workflow_id, model_type="workflow" ): try: return trans.security.decode_id( workflow_id ) diff -r 14953d248e9b404589c1e81494517f47c1004e49 -r 54efa2c365f4e2c867d56e44fb202ec2262e1122 test/api/test_workflows.py --- a/test/api/test_workflows.py +++ b/test/api/test_workflows.py @@ -515,9 +515,7 @@ @skip_without_tool( "cat1" ) def test_invocation_usage( self ): workflow_id, usage = self._run_workflow_once_get_invocation( "test_usage") - usage_details_response = self._get( "workflows/%s/usage/%s" % ( workflow_id, usage[ "id" ] ) ) - self._assert_status_code_is( usage_details_response, 200 ) - usage_details = usage_details_response.json() + usage_details = self._invocation_details( workflow_id, usage[ "id" ] ) # Assert some high-level things about the structure of data returned. self._assert_has_keys( usage_details, "inputs", "steps" ) for step in usage_details[ "steps" ]: @@ -538,6 +536,12 @@ # renamed to 'the_new_name'. assert "the_new_name" in map( lambda hda: hda[ "name" ], contents ) + def _invocation_details( self, workflow_id, invocation_id ): + invocation_details_response = self._get( "workflows/%s/usage/%s" % ( workflow_id, invocation_id ) ) + self._assert_status_code_is( invocation_details_response, 200 ) + invocation_details = invocation_details_response.json() + return invocation_details + def _run_workflow_once_get_invocation( self, name ): workflow = self.workflow_populator.load_workflow( name=name ) workflow_request, history_id = self._setup_workflow_run( workflow ) Repository URL: https://bitbucket.org/galaxy/galaxy-central/ -- This is a commit notification from bitbucket.org. You are receiving this because you have the service enabled, addressing the recipient of this email.