commit/galaxy-central: 8 new changesets
8 new commits in galaxy-central: https://bitbucket.org/galaxy/galaxy-central/commits/318e2dc3c8f3/ Changeset: 318e2dc3c8f3 User: jmchilton Date: 2014-02-28 01:06:36 Summary: Initial work on test cases for history contents API. Affected #: 2 files diff -r 95517f976cca49f984b89c9fdd5b9208b1a11fcb -r 318e2dc3c8f388c1f07937f9c0cda11b60022e1e test/base/interactor.py --- a/test/base/interactor.py +++ b/test/base/interactor.py @@ -461,16 +461,21 @@ def post_request( url, data, files={} ): return __multipart_request( url, data, files, verb="POST" ) - def put_request( url ): - return __urllib_request( url, 'PUT' ) + def put_request( url, data=None ): + if isinstance( data, dict ): + assert False, "This test will fail, Galaxy's webob dependency does not parse out urlencoded PUT/PATCH entity data, API will receive empty payload." + return __urllib_request( url, 'PUT', json_str=data ) def delete_request( url ): return __urllib_request( url, 'DELETE' ) - def __urllib_request( url, verb ): + def __urllib_request( url, verb, json_str=None ): opener = urllib2.build_opener(urllib2.HTTPHandler) request = urllib2.Request(url) request.get_method = lambda: verb + if json_str: + request.add_header( "content-type", "application/json" ) + request.add_data(json_str) try: response = opener.open(request) return RequestsLikeResponse( response.read(), status_code=response.getcode() ) diff -r 95517f976cca49f984b89c9fdd5b9208b1a11fcb -r 318e2dc3c8f388c1f07937f9c0cda11b60022e1e test/functional/api/test_history_contents.py --- /dev/null +++ b/test/functional/api/test_history_contents.py @@ -0,0 +1,104 @@ +from base import api +import json + +from .helpers import TestsDatasets +from .helpers import LibraryPopulator +from base.interactor import ( + put_request, + delete_request, +) + + +# TODO: Test anonymous access. +class HistoryContentsApiTestCase( api.ApiTestCase, TestsDatasets ): + + def setUp( self ): + super( HistoryContentsApiTestCase, self ).setUp() + self.history_id = self._new_history() + + def test_index_hda_summary( self ): + hda1 = self._new_dataset( self.history_id ) + contents_response = self._get( "histories/%s/contents" % self.history_id ) + hda_summary = self.__check_for_hda( contents_response, hda1 ) + assert "display_types" not in hda_summary # Quick summary, not full details + + def test_index_hda_all_details( self ): + hda1 = self._new_dataset( self.history_id ) + contents_response = self._get( "histories/%s/contents?details=all" % self.history_id ) + hda_details = self.__check_for_hda( contents_response, hda1 ) + self.__assert_hda_has_full_details( hda_details ) + + def test_index_hda_detail_by_id( self ): + hda1 = self._new_dataset( self.history_id ) + contents_response = self._get( "histories/%s/contents?details=%s" % ( self.history_id, hda1[ "id" ] ) ) + hda_details = self.__check_for_hda( contents_response, hda1 ) + self.__assert_hda_has_full_details( hda_details ) + + def test_show_hda( self ): + hda1 = self._new_dataset( self.history_id ) + show_response = self.__show( hda1 ) + self._assert_status_code_is( show_response, 200 ) + self.__assert_matches_hda( hda1, show_response.json() ) + + def test_hda_copy( self ): + hda1 = self._new_dataset( self.history_id ) + create_data = dict( + source='hda', + content=hda1[ "id" ], + ) + second_history_id = self._new_history() + assert self.__count_contents( second_history_id ) == 0 + create_response = self._post( "histories/%s/contents" % second_history_id, create_data ) + self._assert_status_code_is( create_response, 200 ) + assert self.__count_contents( second_history_id ) == 1 + + # TODO + #def test_lda_copy( self ): + # pass + + def test_update( self ): + hda1 = self._new_dataset( self.history_id ) + self._wait_for_history( self.history_id ) + assert str( hda1[ "deleted" ] ).lower() == "false" + update_url = self._api_url( "histories/%s/contents/%s" % ( self.history_id, hda1[ "id" ] ), use_key=True ) + # Awkward json.dumps required here because of https://trello.com/c/CQwmCeG6 + body = json.dumps( dict( deleted=True ) ) + update_response = put_request( update_url, data=body ) + self._assert_status_code_is( update_response, 200 ) + show_response = self.__show( hda1 ) + assert str( show_response.json()[ "deleted" ] ).lower() == "true" + + def test_delete( self ): + hda1 = self._new_dataset( self.history_id ) + self._wait_for_history( self.history_id ) + assert str( self.__show( hda1 ).json()[ "deleted" ] ).lower() == "false" + url = self._api_url( "histories/%s/contents/%s" % ( self.history_id, hda1["id" ] ), use_key=True ) + delete_response = delete_request( url ) + assert delete_response.status_code < 300 # Something in the 200s :). + assert str( self.__show( hda1 ).json()[ "deleted" ] ).lower() == "true" + + def __show( self, hda ): + show_response = self._get( "histories/%s/contents/%s" % ( self.history_id, hda[ "id" ] ) ) + return show_response + + def __count_contents( self, history_id=None, **kwds ): + if history_id == None: + history_id = self.history_id + contents_response = self._get( "histories/%s/contents" % history_id, kwds ) + return len( contents_response.json() ) + + def __assert_hda_has_full_details( self, hda_details ): + self._assert_has_keys( hda_details, "display_types", "display_apps" ) + + def __check_for_hda( self, contents_response, hda ): + self._assert_status_code_is( contents_response, 200 ) + contents = contents_response.json() + assert len( contents ) == 1 + hda_summary = contents[ 0 ] + self.__assert_matches_hda( hda, hda_summary ) + return hda_summary + + def __assert_matches_hda( self, input_hda, query_hda ): + self._assert_has_keys( query_hda, "id", "name" ) + assert input_hda[ "name" ] == query_hda[ "name" ] + assert input_hda[ "id" ] == query_hda[ "id" ] https://bitbucket.org/galaxy/galaxy-central/commits/deefb8ee758d/ Changeset: deefb8ee758d User: jmchilton Date: 2014-02-28 01:06:36 Summary: Include root folder id in output when creating library. This prevents the need to hit the API one more time to get the root folder ID before creating datasets/folders/etc... in the library. Affected #: 1 file diff -r 318e2dc3c8f388c1f07937f9c0cda11b60022e1e -r deefb8ee758d334b8e558a54f284463cba938f54 lib/galaxy/webapps/galaxy/api/libraries.py --- a/lib/galaxy/webapps/galaxy/api/libraries.py +++ b/lib/galaxy/webapps/galaxy/api/libraries.py @@ -130,6 +130,7 @@ new_library['description'] = description new_library['synopsis'] = synopsis new_library['id'] = encoded_id + new_library['root_folder_id'] = trans.security.encode_id( root_folder.id ) return new_library def edit( self, trans, encoded_id, payload, **kwd ): https://bitbucket.org/galaxy/galaxy-central/commits/10d0dd63522f/ Changeset: 10d0dd63522f User: jmchilton Date: 2014-02-28 01:06:36 Summary: Include LDDA state in library dataset to_dict. Needed to monitor library dataset uploads (in particular for API tests, but really any monitoring will benefit from this). Affected #: 1 file diff -r deefb8ee758d334b8e558a54f284463cba938f54 -r 10d0dd63522fa5c777d743bcd58ba5e58aa5eb25 lib/galaxy/model/__init__.py --- a/lib/galaxy/model/__init__.py +++ b/lib/galaxy/model/__init__.py @@ -2127,6 +2127,7 @@ parent_library_id = self.folder.parent_library.id, folder_id = self.folder_id, model_class = self.__class__.__name__, + state = ldda.state, name = ldda.name, file_name = ldda.file_name, uploaded_by = ldda.user.email, https://bitbucket.org/galaxy/galaxy-central/commits/1741dfeaa06c/ Changeset: 1741dfeaa06c User: jmchilton Date: 2014-02-28 01:06:36 Summary: Infrastructure for library API testing. Doesn't really test libraries extensively, but adds enough infrastructure to enable a library dataset to history copy test via API. Affected #: 3 files diff -r 10d0dd63522fa5c777d743bcd58ba5e58aa5eb25 -r 1741dfeaa06c3f63f27da96c12ab92950eda7286 test/functional/api/helpers.py --- a/test/functional/api/helpers.py +++ b/test/functional/api/helpers.py @@ -1,6 +1,6 @@ import time -from json import dumps -from json import loads +import json +import StringIO from pkg_resources import resource_string # Simple workflow that takes an input and call cat wrapper on it. @@ -10,6 +10,8 @@ workflow_random_x2_str = resource_string( __name__, "test_workflow_2.ga" ) +# TODO: Rework this so it is a stand-alone object like workflow +# populator below instead of mixin. class TestsDatasets: def _new_dataset( self, history_id, content='TestData123', **kwds ): @@ -19,15 +21,7 @@ return run_response.json()["outputs"][0] def _wait_for_history( self, history_id, assert_ok=False ): - while True: - history_details_response = self._get( "histories/%s" % history_id ) - self._assert_status_code_is( history_details_response, 200 ) - history_state = history_details_response.json()[ "state" ] - if history_state not in [ "running", "queued" ]: - break - time.sleep( .1 ) - if assert_ok: - self.assertEquals( history_state, 'ok' ) + wait_on_state( lambda: self._get( "histories/%s" % history_id ), assert_ok=assert_ok ) def _new_history( self, **kwds ): name = kwds.get( "name", "API Test History" ) @@ -60,7 +54,7 @@ def _run_tool_payload( self, tool_id, inputs, history_id, **kwds ): return dict( tool_id=tool_id, - inputs=dumps(inputs), + inputs=json.dumps(inputs), history_id=history_id, **kwds ) @@ -73,7 +67,7 @@ self.api_test_case = api_test_case def load_workflow( self, name, content=workflow_str, add_pja=False ): - workflow = loads( content ) + workflow = json.loads( content ) workflow[ "name" ] = name if add_pja: tool_step = workflow[ "steps" ][ "2" ] @@ -93,9 +87,99 @@ def create_workflow( self, workflow, **create_kwds ): data = dict( - workflow=dumps( workflow ), + workflow=json.dumps( workflow ), **create_kwds ) upload_response = self.api_test_case._post( "workflows/upload", data=data ) uploaded_workflow_id = upload_response.json()[ "id" ] return uploaded_workflow_id + + +class LibraryPopulator( object ): + + def __init__( self, api_test_case ): + self.api_test_case = api_test_case + self.galaxy_interactor = api_test_case.galaxy_interactor + + def new_private_library( self, name ): + library = self.new_library( name ) + library_id = library[ "id" ] + + role_id = self.user_private_role_id() + self.set_permissions( library_id, role_id ) + return library + + def new_library( self, name ): + data = dict( name=name ) + create_response = self.galaxy_interactor.post( "libraries", data=data, admin=True ) + return create_response.json() + + def set_permissions( self, library_id, role_id=None ): + if role_id: + perm_list = json.dumps( role_id ) + else: + perm_list = json.dumps( [] ) + + permissions = dict( + LIBRARY_ACCESS_in=perm_list, + LIBRARY_MODIFY_in=perm_list, + LIBRARY_ADD_in=perm_list, + LIBRARY_MANAGE_in=perm_list, + ) + self.galaxy_interactor.post( "libraries/%s/permissions" % library_id, data=permissions, admin=True ) + + def user_email( self ): + users_response = self.galaxy_interactor.get( "users" ) + users = users_response.json() + assert len( users ) == 1 + return users[ 0 ][ "email" ] + + def user_private_role_id( self ): + user_email = self.user_email() + roles_response = self.api_test_case.galaxy_interactor.get( "roles", admin=True ) + users_roles = [ r for r in roles_response.json() if r[ "name" ] == user_email ] + assert len( users_roles ) == 1 + return users_roles[ 0 ][ "id" ] + + def create_dataset_request( self, library, **kwds ): + create_data = { + "folder_id": kwds.get( "folder_id", library[ "root_folder_id" ] ), + "create_type": "file", + "files_0|NAME": kwds.get( "name", "NewFile" ), + "upload_option": kwds.get( "upload_option", "upload_file" ), + "file_type": kwds.get( "file_type", "auto" ), + "db_key": kwds.get( "db_key", "?" ), + } + files = { + "files_0|file_data": kwds.get( "file", StringIO.StringIO( kwds.get( "contents", "TestData" ) ) ), + } + return create_data, files + + def new_library_dataset( self, name, **create_dataset_kwds ): + library = self.new_private_library( name ) + payload, files = self.create_dataset_request( library, **create_dataset_kwds ) + url_rel = "libraries/%s/contents" % ( library[ "id" ] ) + dataset = self.api_test_case.galaxy_interactor.post( url_rel, payload, files=files ).json()[0] + + def show(): + return self.api_test_case.galaxy_interactor.get( "libraries/%s/contents/%s" % ( library[ "id" ], dataset[ "id" ] ) ) + + wait_on_state(show) + return show().json() + + +def wait_on_state( state_func, assert_ok=False, timeout=5 ): + delta = .1 + iteration = 0 + while True: + if (delta * iteration) > timeout: + assert False, "Timed out waiting on state." + iteration += 1 + response = state_func() + assert response.status_code == 200, "Failed to fetch state update while waiting." + state = response.json()[ "state" ] + if state not in [ "running", "queued", "new" ]: + break + time.sleep( delta ) + if assert_ok: + assert state == "ok", "Final state - %s - not okay." % state diff -r 10d0dd63522fa5c777d743bcd58ba5e58aa5eb25 -r 1741dfeaa06c3f63f27da96c12ab92950eda7286 test/functional/api/test_history_contents.py --- a/test/functional/api/test_history_contents.py +++ b/test/functional/api/test_history_contents.py @@ -52,9 +52,16 @@ self._assert_status_code_is( create_response, 200 ) assert self.__count_contents( second_history_id ) == 1 - # TODO - #def test_lda_copy( self ): - # pass + def test_library_copy( self ): + ld = LibraryPopulator( self ).new_library_dataset( "lda_test_library" ) + create_data = dict( + source='library', + content=ld[ "id" ], + ) + assert self.__count_contents( self.history_id ) == 0 + create_response = self._post( "histories/%s/contents" % self.history_id, create_data ) + self._assert_status_code_is( create_response, 200 ) + assert self.__count_contents( self.history_id ) == 1 def test_update( self ): hda1 = self._new_dataset( self.history_id ) diff -r 10d0dd63522fa5c777d743bcd58ba5e58aa5eb25 -r 1741dfeaa06c3f63f27da96c12ab92950eda7286 test/functional/api/test_libraries.py --- /dev/null +++ b/test/functional/api/test_libraries.py @@ -0,0 +1,54 @@ +from base import api +from .helpers import TestsDatasets +from .helpers import LibraryPopulator +from .helpers import wait_on_state + + +class LibrariesApiTestCase( api.ApiTestCase, TestsDatasets ): + + def setUp( self ): + super( LibrariesApiTestCase, self ).setUp() + self.library_populator = LibraryPopulator( self ) + + def test_create( self ): + data = dict( name="CreateTestLibrary" ) + create_response = self._post( "libraries", data=data, admin=True ) + self._assert_status_code_is( create_response, 200 ) + library = create_response.json() + self._assert_has_keys( library, "name" ) + assert library[ "name" ] == "CreateTestLibrary" + + def test_create_private_library_permissions( self ): + library = self.library_populator.new_library( "PermissionTestLibrary" ) + library_id = library[ "id" ] + + role_id = self.library_populator.user_private_role_id() + self.library_populator.set_permissions( library_id, role_id ) + create_response = self._create_folder( library ) + self._assert_status_code_is( create_response, 200 ) + + def test_create_dataset( self ): + library = self.library_populator.new_private_library( "ForCreateDatasets" ) + payload, files = self.library_populator.create_dataset_request( library, file_type="txt", contents="create_test" ) + create_response = self._post( "libraries/%s/contents" % library[ "id" ], payload, files=files ) + self._assert_status_code_is( create_response, 200 ) + library_datasets = create_response.json() + assert len( library_datasets ) == 1 + library_dataset = library_datasets[ 0 ] + + def show(): + return self._get( "libraries/%s/contents/%s" % ( library[ "id" ], library_dataset[ "id" ] ) ) + + wait_on_state( show, assert_ok=True ) + library_dataset = show().json() + self._assert_has_keys( library_dataset, "peek", "data_type" ) + assert library_dataset[ "peek" ].find("create_test") >= 0 + assert library_dataset[ "data_type" ] == "txt" + + def _create_folder( self, library ): + create_data = dict( + folder_id=library[ "root_folder_id" ], + create_type="folder", + name="New Folder", + ) + return self._post( "libraries/%s/contents" % library[ "id" ], data=create_data ) https://bitbucket.org/galaxy/galaxy-central/commits/cb6e53fe3c9f/ Changeset: cb6e53fe3c9f User: jmchilton Date: 2014-02-28 01:06:36 Summary: Start rework of API test mixin TestsDataset into an plain object. Mirroring the newer WorkflowPopulator and LibraryPopulator. Affected #: 1 file diff -r 1741dfeaa06c3f63f27da96c12ab92950eda7286 -r cb6e53fe3c9fda4de94f3f34b6fa87ed2e218a54 test/functional/api/helpers.py --- a/test/functional/api/helpers.py +++ b/test/functional/api/helpers.py @@ -10,27 +10,46 @@ workflow_random_x2_str = resource_string( __name__, "test_workflow_2.ga" ) -# TODO: Rework this so it is a stand-alone object like workflow -# populator below instead of mixin. +# Deprecated mixin, use dataset populator instead. +# TODO: Rework existing tests to target DatasetPopulator in a setup method instead. class TestsDatasets: def _new_dataset( self, history_id, content='TestData123', **kwds ): - payload = self._upload_payload( history_id, content, **kwds ) - run_response = self._post( "tools", data=payload ) - self._assert_status_code_is( run_response, 200 ) + return DatasetPopulator( self.galaxy_interactor ).new_dataset( history_id, content=content, **kwds) + + def _wait_for_history( self, history_id, assert_ok=False ): + return DatasetPopulator( self.galaxy_interactor ).wait_for_history( history_id, assert_ok=assert_ok ) + + def _new_history( self, **kwds ): + return DatasetPopulator( self.galaxy_interactor ).new_history( **kwds ) + + def _upload_payload( self, history_id, content, **kwds ): + return DatasetPopulator( self.galaxy_interactor ).upload_payload( history_id, content, **kwds ) + + def _run_tool_payload( self, tool_id, inputs, history_id, **kwds ): + return DatasetPopulator( self.galaxy_interactor ).run_tool_payload( tool_id, inputs, history_id, **kwds ) + + +class DatasetPopulator( object ): + + def __init__( self, galaxy_interactor ): + self.galaxy_interactor = galaxy_interactor + + def new_dataset( self, history_id, content='TestData123', **kwds ): + payload = self.upload_payload( history_id, content, **kwds ) + run_response = self.galaxy_interactor.post( "tools", data=payload ) return run_response.json()["outputs"][0] - def _wait_for_history( self, history_id, assert_ok=False ): - wait_on_state( lambda: self._get( "histories/%s" % history_id ), assert_ok=assert_ok ) + def wait_for_history( self, history_id, assert_ok=False ): + wait_on_state( lambda: self.galaxy_interactor.get( "histories/%s" % history_id ), assert_ok=assert_ok ) - def _new_history( self, **kwds ): + def new_history( self, **kwds ): name = kwds.get( "name", "API Test History" ) - create_history_response = self._post( "histories", data=dict( name=name ) ) - self._assert_status_code_is( create_history_response, 200 ) + create_history_response = self.galaxy_interactor.post( "histories", data=dict( name=name ) ) history_id = create_history_response.json()[ "id" ] return history_id - def _upload_payload( self, history_id, content, **kwds ): + def upload_payload( self, history_id, content, **kwds ): name = kwds.get( "name", "Test Dataset" ) dbkey = kwds.get( "dbkey", "?" ) file_type = kwds.get( "file_type", 'txt' ) @@ -44,14 +63,14 @@ upload_params[ "files_0|to_posix_lines"] = kwds[ "to_posix_lines" ] if "space_to_tab" in kwds: upload_params[ "files_0|space_to_tab" ] = kwds[ "space_to_tab" ] - return self._run_tool_payload( + return self.run_tool_payload( tool_id='upload1', inputs=upload_params, history_id=history_id, upload_type='upload_dataset' ) - def _run_tool_payload( self, tool_id, inputs, history_id, **kwds ): + def run_tool_payload( self, tool_id, inputs, history_id, **kwds ): return dict( tool_id=tool_id, inputs=json.dumps(inputs), https://bitbucket.org/galaxy/galaxy-central/commits/94e9f2c1ea68/ Changeset: 94e9f2c1ea68 User: jmchilton Date: 2014-02-28 01:06:36 Summary: Rework history.contents_iter to potentially support multiple types. This is utilized downstream in collections work to allow a similar API to be used for loading HistoryDatasetCollectionAssociations as HistoryDatasetAssociations. Affected #: 3 files diff -r cb6e53fe3c9fda4de94f3f34b6fa87ed2e218a54 -r 94e9f2c1ea688ed5b66fbfc8766b50d053efed1b lib/galaxy/model/__init__.py --- a/lib/galaxy/model/__init__.py +++ b/lib/galaxy/model/__init__.py @@ -19,6 +19,7 @@ import time from string import Template from itertools import ifilter +from itertools import chain import galaxy.datatypes import galaxy.datatypes.registry @@ -964,24 +965,37 @@ """ Fetch filtered list of contents of history. """ - python_filter = None + default_contents_types = [ + 'dataset', + ] + types = kwds.get('types', default_contents_types) + iters = [] + if 'dataset' in types: + iters.append( self.__dataset_contents_iter( **kwds ) ) + return galaxy.util.merge_sorted_iterables( operator.attrgetter( "hid" ), *iters ) + + def __dataset_contents_iter(self, **kwds): + return self.__filter_contents( HistoryDatasetAssociation, **kwds ) + + def __filter_contents( self, content_class, **kwds ): db_session = object_session( self ) assert db_session != None - query = db_session.query( HistoryDatasetAssociation ).filter( HistoryDatasetAssociation.table.c.history_id == self.id ) - query = query.order_by( HistoryDatasetAssociation.table.c.hid.asc() ) + query = db_session.query( content_class ).filter( content_class.table.c.history_id == self.id ) + query = query.order_by( content_class.table.c.hid.asc() ) + python_filter = None deleted = galaxy.util.string_as_bool_or_none( kwds.get( 'deleted', None ) ) if deleted is not None: - query = query.filter( HistoryDatasetAssociation.deleted == deleted ) + query = query.filter( content_class.deleted == deleted ) visible = galaxy.util.string_as_bool_or_none( kwds.get( 'visible', None ) ) if visible is not None: - query = query.filter( HistoryDatasetAssociation.visible == visible ) + query = query.filter( content_class.visible == visible ) if 'ids' in kwds: ids = kwds['ids'] max_in_filter_length = kwds.get('max_in_filter_length', MAX_IN_FILTER_LENGTH) if len(ids) < max_in_filter_length: - query = query.filter( HistoryDatasetAssociation.id.in_(ids) ) + query = query.filter( content_class.id.in_(ids) ) else: - python_filter = lambda hda: hda.id in ids + python_filter = lambda content: content.id in ids if python_filter: return ifilter(python_filter, query) else: diff -r cb6e53fe3c9fda4de94f3f34b6fa87ed2e218a54 -r 94e9f2c1ea688ed5b66fbfc8766b50d053efed1b lib/galaxy/util/__init__.py --- a/lib/galaxy/util/__init__.py +++ b/lib/galaxy/util/__init__.py @@ -427,6 +427,59 @@ return os.path.commonprefix( [ file, directory ] ) == directory +def merge_sorted_iterables( operator, *iterables ): + """ + + >>> operator = lambda x: x + >>> list( merge_sorted_iterables( operator, [1,2,3], [4,5] ) ) + [1, 2, 3, 4, 5] + >>> list( merge_sorted_iterables( operator, [4, 5], [1,2,3] ) ) + [1, 2, 3, 4, 5] + >>> list( merge_sorted_iterables( operator, [1, 4, 5], [2], [3] ) ) + [1, 2, 3, 4, 5] + """ + first_iterable = iterables[ 0 ] + if len( iterables ) == 1: + for el in first_iterable: + yield el + else: + for el in __merge_two_sorted_iterables( + operator, + iter( first_iterable ), + merge_sorted_iterables( operator, *iterables[ 1: ] ) + ): + yield el + + +def __merge_two_sorted_iterables( operator, iterable1, iterable2 ): + unset = object() + continue_merge = True + next_1 = unset + next_2 = unset + while continue_merge: + try: + if next_1 is unset: + next_1 = next( iterable1 ) + if next_2 is unset: + next_2 = next( iterable2 ) + if operator( next_2 ) < operator( next_1 ): + yield next_2 + next_2 = unset + else: + yield next_1 + next_1 = unset + except StopIteration: + continue_merge = False + if next_1 is not unset: + yield next_1 + if next_2 is not unset: + yield next_2 + for el in iterable1: + yield el + for el in iterable2: + yield el + + class Params( object ): """ Stores and 'sanitizes' parameters. Alphanumeric characters and the diff -r cb6e53fe3c9fda4de94f3f34b6fa87ed2e218a54 -r 94e9f2c1ea688ed5b66fbfc8766b50d053efed1b lib/galaxy/webapps/galaxy/api/history_contents.py --- a/lib/galaxy/webapps/galaxy/api/history_contents.py +++ b/lib/galaxy/webapps/galaxy/api/history_contents.py @@ -51,7 +51,13 @@ else: history = self.get_history( trans, history_id, check_ownership=True, check_accessible=True ) - contents_kwds = {} + types = kwd.get( 'types', None ) or [] + if types: + types = util.listify(types) + else: + types = ['datasets'] + + contents_kwds = {'types': types} if ids: ids = map( lambda id: trans.security.decode_id( id ), ids.split( ',' ) ) contents_kwds[ 'ids' ] = ids https://bitbucket.org/galaxy/galaxy-central/commits/eb02193da452/ Changeset: eb02193da452 User: jmchilton Date: 2014-02-28 01:06:36 Summary: Rework history_contents API to allow room for multiple 'type's of history contents. Downstream dataset collections are being added and can be associated with histories. They have hids, can be deleted, etc... many of the same properties as HDAs. This refactoring "makes room" for these in the history contents API by allowing type to be passed in. Since HDAs and HDCAs can have the same id - the path history/{history_id}/contents/{contents_id} is somewhat ambigious - so this changeset adds the path history/{history_id}/contents/datasets/{history_dataset_id} while a matching resource path downstream history/{history_id}/contents/dataset_collections/{history_dataset_collection_id} has been added. The vanilla contents path remains but should be considered deprecated IMO. Adding type parameter to history/{history_id}/contents index route - this will always default to dataset for backward compatibility - but downstream can be specified as either dataset, dataset_collection, or "dataset,dataset_collection". Affected #: 2 files diff -r 94e9f2c1ea688ed5b66fbfc8766b50d053efed1b -r eb02193da4526756703f94ac14ceb189fd9461db lib/galaxy/webapps/galaxy/api/history_contents.py --- a/lib/galaxy/webapps/galaxy/api/history_contents.py +++ b/lib/galaxy/webapps/galaxy/api/history_contents.py @@ -33,6 +33,9 @@ :param history_id: encoded id string of the HDA's History :type ids: str :param ids: (optional) a comma separated list of encoded `HDA` ids + :param types: (optional) kinds of contents to index (currently just + dataset, but dataset_collection will be added shortly). + :type types: str :rtype: list :returns: dictionaries containing summary or detailed HDA information @@ -51,11 +54,14 @@ else: history = self.get_history( trans, history_id, check_ownership=True, check_accessible=True ) - types = kwd.get( 'types', None ) or [] + # Allow passing in type or types - for continuity rest of methods + # take in type - but this one can be passed multiple types and + # type=dataset,dataset_collection is a bit silly. + types = kwd.get( 'type', kwd.get( 'types', None ) ) or [] if types: types = util.listify(types) else: - types = ['datasets'] + types = [ 'dataset' ] contents_kwds = {'types': types} if ids: @@ -72,13 +78,14 @@ if details and details != 'all': details = util.listify( details ) - for hda in history.contents_iter( **contents_kwds ): - encoded_hda_id = trans.security.encode_id( hda.id ) - detailed = details == 'all' or ( encoded_hda_id in details ) - if detailed: - rval.append( self._detailed_hda_dict( trans, hda ) ) - else: - rval.append( self._summary_hda_dict( trans, history_id, hda ) ) + for content in history.contents_iter( **contents_kwds ): + if isinstance(content, trans.app.model.HistoryDatasetAssociation): + encoded_content_id = trans.security.encode_id( content.id ) + detailed = details == 'all' or ( encoded_content_id in details ) + if detailed: + rval.append( self._detailed_hda_dict( trans, content ) ) + else: + rval.append( self._summary_hda_dict( trans, history_id, content ) ) except Exception, e: # for errors that are not specific to one hda (history lookup or summary list) rval = "Error in history API at listing contents: " + str( e ) @@ -144,6 +151,13 @@ :returns: dictionary containing detailed HDA information .. seealso:: :func:`galaxy.web.base.controller.UsesHistoryDatasetAssociationMixin.get_hda_dict` """ + contents_type = kwd.get('type', 'dataset') + if contents_type == 'dataset': + return self.__show_dataset( trans, id, history_id, **kwd ) + else: + return self.__handle_unknown_contents_type( trans, contents_type ) + + def __show_dataset( self, trans, id, history_id, **kwd ): try: hda = self.get_history_dataset_association_from_ids( trans, id, history_id ) hda_dict = self.get_hda_dict( trans, hda ) @@ -184,11 +198,6 @@ #TODO: copy existing, accessible hda - dataset controller, copy_datasets #TODO: convert existing, accessible hda - model.DatasetInstance(or hda.datatype).get_converter_types # check parameters - source = payload.get('source', None) - content = payload.get('content', None) - if source not in ['library', 'hda'] or content is None: - trans.response.status = 400 - return "Please define the source ('library' or 'hda') and the content." # retrieve history try: history = self.get_history( trans, history_id, check_ownership=True, check_accessible=False ) @@ -196,6 +205,18 @@ # no way to tell if it failed bc of perms or other (all MessageExceptions) trans.response.status = 500 return str( e ) + type = payload.get('type', 'dataset') + if type == 'dataset': + return self.__create_dataset( trans, history, payload, **kwd ) + else: + return self.__handle_unknown_contents_type( trans, type ) + + def __create_dataset( self, trans, history, payload, **kwd ): + source = payload.get('source', None) + content = payload.get('content', None) + if source not in ['library', 'hda'] or content is None: + trans.response.status = 400 + return "Please define the source ('library' or 'hda') and the content." # copy from library dataset if source == 'library': # get library data set @@ -227,7 +248,7 @@ return str( msg_exc ) except Exception, exc: trans.response.status = 500 - log.exception( "history: %s, source: %s, content: %s", history_id, source, content ) + log.exception( "history: %s, source: %s, content: %s", trans.security.encode_id(history.id), source, content ) return str( exc ) data_copy=hda.copy( copy_children=True ) result=history.add_dataset( data_copy ) @@ -261,6 +282,13 @@ any values that were different from the original and, therefore, updated """ #TODO: PUT /api/histories/{encoded_history_id} payload = { rating: rating } (w/ no security checks) + contents_type = kwd.get('type', 'dataset') + if contents_type == "dataset": + return self.__update_dataset( trans, history_id, id, payload, **kwd ) + else: + return self.__handle_unknown_contents_type( contents_type ) + + def __update_dataset( self, trans, history_id, id, payload, **kwd ): changed = {} try: # anon user @@ -329,6 +357,13 @@ * deleted: if the history was marked as deleted, * purged: if the history was purged """ + contents_type = kwd.get('type', 'dataset') + if contents_type == "dataset": + return self.__delete_dataset( trans, history_id, id, purge=purge, **kwd ) + else: + return self.__handle_unknown_contents_type( trans, contents_type ) + + def __delete_dataset( self, trans, history_id, id, purge, **kwd ): # get purge from the query or from the request body payload (a request body is optional here) purge = util.string_as_bool( purge ) if kwd.get( 'payload', None ): @@ -413,3 +448,8 @@ pass #log.warn( 'unknown key: %s', str( key ) ) return validated_payload + + def __handle_unknown_contents_type( self, trans, contents_type ): + # TODO: raise a message exception instead of setting status and returning dict. + trans.response.status = 400 + return { 'error': 'Unknown contents type %s' % type } diff -r 94e9f2c1ea688ed5b66fbfc8766b50d053efed1b -r eb02193da4526756703f94ac14ceb189fd9461db lib/galaxy/webapps/galaxy/buildapp.py --- a/lib/galaxy/webapps/galaxy/buildapp.py +++ b/lib/galaxy/webapps/galaxy/buildapp.py @@ -75,6 +75,19 @@ webapp.add_api_controllers( 'galaxy.webapps.galaxy.api', app ) + valid_history_contents_types = [ + 'dataset', + ] + # This must come before history contents below. + # Accesss HDA details via histories/:history_id/contents/datasets/:hda_id + webapp.mapper.resource( "typed_content", + "{type:%s}s" % "|".join( valid_history_contents_types ), + name_prefix="history_content_", + controller='history_contents', + path_prefix='/api/histories/:history_id/contents', + parent_resources=dict( member_name='history', collection_name='histories' ), + ) + # Legacy access to HDA details via histories/:history_id/contents/:hda_id webapp.mapper.resource( 'content', 'contents', controller='history_contents', https://bitbucket.org/galaxy/galaxy-central/commits/24f45ea024e4/ Changeset: 24f45ea024e4 User: jmchilton Date: 2014-03-04 14:25:07 Summary: Merged in jmchilton/galaxy-central-fork-1 (pull request #342) Refactor History Contents to Support Multiple Types Affected #: 9 files diff -r 6956c4b2d9cdf2244604dd2782c4c5667c52868e -r 24f45ea024e47f0e6ee8c427c7450e1f79d0c525 lib/galaxy/model/__init__.py --- a/lib/galaxy/model/__init__.py +++ b/lib/galaxy/model/__init__.py @@ -19,6 +19,7 @@ import time from string import Template from itertools import ifilter +from itertools import chain import galaxy.datatypes import galaxy.datatypes.registry @@ -964,24 +965,37 @@ """ Fetch filtered list of contents of history. """ - python_filter = None + default_contents_types = [ + 'dataset', + ] + types = kwds.get('types', default_contents_types) + iters = [] + if 'dataset' in types: + iters.append( self.__dataset_contents_iter( **kwds ) ) + return galaxy.util.merge_sorted_iterables( operator.attrgetter( "hid" ), *iters ) + + def __dataset_contents_iter(self, **kwds): + return self.__filter_contents( HistoryDatasetAssociation, **kwds ) + + def __filter_contents( self, content_class, **kwds ): db_session = object_session( self ) assert db_session != None - query = db_session.query( HistoryDatasetAssociation ).filter( HistoryDatasetAssociation.table.c.history_id == self.id ) - query = query.order_by( HistoryDatasetAssociation.table.c.hid.asc() ) + query = db_session.query( content_class ).filter( content_class.table.c.history_id == self.id ) + query = query.order_by( content_class.table.c.hid.asc() ) + python_filter = None deleted = galaxy.util.string_as_bool_or_none( kwds.get( 'deleted', None ) ) if deleted is not None: - query = query.filter( HistoryDatasetAssociation.deleted == deleted ) + query = query.filter( content_class.deleted == deleted ) visible = galaxy.util.string_as_bool_or_none( kwds.get( 'visible', None ) ) if visible is not None: - query = query.filter( HistoryDatasetAssociation.visible == visible ) + query = query.filter( content_class.visible == visible ) if 'ids' in kwds: ids = kwds['ids'] max_in_filter_length = kwds.get('max_in_filter_length', MAX_IN_FILTER_LENGTH) if len(ids) < max_in_filter_length: - query = query.filter( HistoryDatasetAssociation.id.in_(ids) ) + query = query.filter( content_class.id.in_(ids) ) else: - python_filter = lambda hda: hda.id in ids + python_filter = lambda content: content.id in ids if python_filter: return ifilter(python_filter, query) else: @@ -2127,6 +2141,7 @@ parent_library_id = self.folder.parent_library.id, folder_id = self.folder_id, model_class = self.__class__.__name__, + state = ldda.state, name = ldda.name, file_name = ldda.file_name, uploaded_by = ldda.user.email, diff -r 6956c4b2d9cdf2244604dd2782c4c5667c52868e -r 24f45ea024e47f0e6ee8c427c7450e1f79d0c525 lib/galaxy/util/__init__.py --- a/lib/galaxy/util/__init__.py +++ b/lib/galaxy/util/__init__.py @@ -427,6 +427,59 @@ return os.path.commonprefix( [ file, directory ] ) == directory +def merge_sorted_iterables( operator, *iterables ): + """ + + >>> operator = lambda x: x + >>> list( merge_sorted_iterables( operator, [1,2,3], [4,5] ) ) + [1, 2, 3, 4, 5] + >>> list( merge_sorted_iterables( operator, [4, 5], [1,2,3] ) ) + [1, 2, 3, 4, 5] + >>> list( merge_sorted_iterables( operator, [1, 4, 5], [2], [3] ) ) + [1, 2, 3, 4, 5] + """ + first_iterable = iterables[ 0 ] + if len( iterables ) == 1: + for el in first_iterable: + yield el + else: + for el in __merge_two_sorted_iterables( + operator, + iter( first_iterable ), + merge_sorted_iterables( operator, *iterables[ 1: ] ) + ): + yield el + + +def __merge_two_sorted_iterables( operator, iterable1, iterable2 ): + unset = object() + continue_merge = True + next_1 = unset + next_2 = unset + while continue_merge: + try: + if next_1 is unset: + next_1 = next( iterable1 ) + if next_2 is unset: + next_2 = next( iterable2 ) + if operator( next_2 ) < operator( next_1 ): + yield next_2 + next_2 = unset + else: + yield next_1 + next_1 = unset + except StopIteration: + continue_merge = False + if next_1 is not unset: + yield next_1 + if next_2 is not unset: + yield next_2 + for el in iterable1: + yield el + for el in iterable2: + yield el + + class Params( object ): """ Stores and 'sanitizes' parameters. Alphanumeric characters and the diff -r 6956c4b2d9cdf2244604dd2782c4c5667c52868e -r 24f45ea024e47f0e6ee8c427c7450e1f79d0c525 lib/galaxy/webapps/galaxy/api/history_contents.py --- a/lib/galaxy/webapps/galaxy/api/history_contents.py +++ b/lib/galaxy/webapps/galaxy/api/history_contents.py @@ -33,6 +33,9 @@ :param history_id: encoded id string of the HDA's History :type ids: str :param ids: (optional) a comma separated list of encoded `HDA` ids + :param types: (optional) kinds of contents to index (currently just + dataset, but dataset_collection will be added shortly). + :type types: str :rtype: list :returns: dictionaries containing summary or detailed HDA information @@ -51,7 +54,16 @@ else: history = self.get_history( trans, history_id, check_ownership=True, check_accessible=True ) - contents_kwds = {} + # Allow passing in type or types - for continuity rest of methods + # take in type - but this one can be passed multiple types and + # type=dataset,dataset_collection is a bit silly. + types = kwd.get( 'type', kwd.get( 'types', None ) ) or [] + if types: + types = util.listify(types) + else: + types = [ 'dataset' ] + + contents_kwds = {'types': types} if ids: ids = map( lambda id: trans.security.decode_id( id ), ids.split( ',' ) ) contents_kwds[ 'ids' ] = ids @@ -66,13 +78,14 @@ if details and details != 'all': details = util.listify( details ) - for hda in history.contents_iter( **contents_kwds ): - encoded_hda_id = trans.security.encode_id( hda.id ) - detailed = details == 'all' or ( encoded_hda_id in details ) - if detailed: - rval.append( self._detailed_hda_dict( trans, hda ) ) - else: - rval.append( self._summary_hda_dict( trans, history_id, hda ) ) + for content in history.contents_iter( **contents_kwds ): + if isinstance(content, trans.app.model.HistoryDatasetAssociation): + encoded_content_id = trans.security.encode_id( content.id ) + detailed = details == 'all' or ( encoded_content_id in details ) + if detailed: + rval.append( self._detailed_hda_dict( trans, content ) ) + else: + rval.append( self._summary_hda_dict( trans, history_id, content ) ) except Exception, e: # for errors that are not specific to one hda (history lookup or summary list) rval = "Error in history API at listing contents: " + str( e ) @@ -138,6 +151,13 @@ :returns: dictionary containing detailed HDA information .. seealso:: :func:`galaxy.web.base.controller.UsesHistoryDatasetAssociationMixin.get_hda_dict` """ + contents_type = kwd.get('type', 'dataset') + if contents_type == 'dataset': + return self.__show_dataset( trans, id, history_id, **kwd ) + else: + return self.__handle_unknown_contents_type( trans, contents_type ) + + def __show_dataset( self, trans, id, history_id, **kwd ): try: hda = self.get_history_dataset_association_from_ids( trans, id, history_id ) hda_dict = self.get_hda_dict( trans, hda ) @@ -178,11 +198,6 @@ #TODO: copy existing, accessible hda - dataset controller, copy_datasets #TODO: convert existing, accessible hda - model.DatasetInstance(or hda.datatype).get_converter_types # check parameters - source = payload.get('source', None) - content = payload.get('content', None) - if source not in ['library', 'hda'] or content is None: - trans.response.status = 400 - return "Please define the source ('library' or 'hda') and the content." # retrieve history try: history = self.get_history( trans, history_id, check_ownership=True, check_accessible=False ) @@ -190,6 +205,18 @@ # no way to tell if it failed bc of perms or other (all MessageExceptions) trans.response.status = 500 return str( e ) + type = payload.get('type', 'dataset') + if type == 'dataset': + return self.__create_dataset( trans, history, payload, **kwd ) + else: + return self.__handle_unknown_contents_type( trans, type ) + + def __create_dataset( self, trans, history, payload, **kwd ): + source = payload.get('source', None) + content = payload.get('content', None) + if source not in ['library', 'hda'] or content is None: + trans.response.status = 400 + return "Please define the source ('library' or 'hda') and the content." # copy from library dataset if source == 'library': # get library data set @@ -221,7 +248,7 @@ return str( msg_exc ) except Exception, exc: trans.response.status = 500 - log.exception( "history: %s, source: %s, content: %s", history_id, source, content ) + log.exception( "history: %s, source: %s, content: %s", trans.security.encode_id(history.id), source, content ) return str( exc ) data_copy=hda.copy( copy_children=True ) result=history.add_dataset( data_copy ) @@ -255,6 +282,13 @@ any values that were different from the original and, therefore, updated """ #TODO: PUT /api/histories/{encoded_history_id} payload = { rating: rating } (w/ no security checks) + contents_type = kwd.get('type', 'dataset') + if contents_type == "dataset": + return self.__update_dataset( trans, history_id, id, payload, **kwd ) + else: + return self.__handle_unknown_contents_type( contents_type ) + + def __update_dataset( self, trans, history_id, id, payload, **kwd ): changed = {} try: # anon user @@ -323,6 +357,13 @@ * deleted: if the history was marked as deleted, * purged: if the history was purged """ + contents_type = kwd.get('type', 'dataset') + if contents_type == "dataset": + return self.__delete_dataset( trans, history_id, id, purge=purge, **kwd ) + else: + return self.__handle_unknown_contents_type( trans, contents_type ) + + def __delete_dataset( self, trans, history_id, id, purge, **kwd ): # get purge from the query or from the request body payload (a request body is optional here) purge = util.string_as_bool( purge ) if kwd.get( 'payload', None ): @@ -407,3 +448,8 @@ pass #log.warn( 'unknown key: %s', str( key ) ) return validated_payload + + def __handle_unknown_contents_type( self, trans, contents_type ): + # TODO: raise a message exception instead of setting status and returning dict. + trans.response.status = 400 + return { 'error': 'Unknown contents type %s' % type } diff -r 6956c4b2d9cdf2244604dd2782c4c5667c52868e -r 24f45ea024e47f0e6ee8c427c7450e1f79d0c525 lib/galaxy/webapps/galaxy/api/libraries.py --- a/lib/galaxy/webapps/galaxy/api/libraries.py +++ b/lib/galaxy/webapps/galaxy/api/libraries.py @@ -130,6 +130,7 @@ new_library['description'] = description new_library['synopsis'] = synopsis new_library['id'] = encoded_id + new_library['root_folder_id'] = trans.security.encode_id( root_folder.id ) return new_library def edit( self, trans, encoded_id, payload, **kwd ): diff -r 6956c4b2d9cdf2244604dd2782c4c5667c52868e -r 24f45ea024e47f0e6ee8c427c7450e1f79d0c525 lib/galaxy/webapps/galaxy/buildapp.py --- a/lib/galaxy/webapps/galaxy/buildapp.py +++ b/lib/galaxy/webapps/galaxy/buildapp.py @@ -75,6 +75,19 @@ webapp.add_api_controllers( 'galaxy.webapps.galaxy.api', app ) + valid_history_contents_types = [ + 'dataset', + ] + # This must come before history contents below. + # Accesss HDA details via histories/:history_id/contents/datasets/:hda_id + webapp.mapper.resource( "typed_content", + "{type:%s}s" % "|".join( valid_history_contents_types ), + name_prefix="history_content_", + controller='history_contents', + path_prefix='/api/histories/:history_id/contents', + parent_resources=dict( member_name='history', collection_name='histories' ), + ) + # Legacy access to HDA details via histories/:history_id/contents/:hda_id webapp.mapper.resource( 'content', 'contents', controller='history_contents', diff -r 6956c4b2d9cdf2244604dd2782c4c5667c52868e -r 24f45ea024e47f0e6ee8c427c7450e1f79d0c525 test/base/interactor.py --- a/test/base/interactor.py +++ b/test/base/interactor.py @@ -461,16 +461,21 @@ def post_request( url, data, files={} ): return __multipart_request( url, data, files, verb="POST" ) - def put_request( url ): - return __urllib_request( url, 'PUT' ) + def put_request( url, data=None ): + if isinstance( data, dict ): + assert False, "This test will fail, Galaxy's webob dependency does not parse out urlencoded PUT/PATCH entity data, API will receive empty payload." + return __urllib_request( url, 'PUT', json_str=data ) def delete_request( url ): return __urllib_request( url, 'DELETE' ) - def __urllib_request( url, verb ): + def __urllib_request( url, verb, json_str=None ): opener = urllib2.build_opener(urllib2.HTTPHandler) request = urllib2.Request(url) request.get_method = lambda: verb + if json_str: + request.add_header( "content-type", "application/json" ) + request.add_data(json_str) try: response = opener.open(request) return RequestsLikeResponse( response.read(), status_code=response.getcode() ) diff -r 6956c4b2d9cdf2244604dd2782c4c5667c52868e -r 24f45ea024e47f0e6ee8c427c7450e1f79d0c525 test/functional/api/helpers.py --- a/test/functional/api/helpers.py +++ b/test/functional/api/helpers.py @@ -1,6 +1,6 @@ import time -from json import dumps -from json import loads +import json +import StringIO from pkg_resources import resource_string # Simple workflow that takes an input and call cat wrapper on it. @@ -10,33 +10,46 @@ workflow_random_x2_str = resource_string( __name__, "test_workflow_2.ga" ) +# Deprecated mixin, use dataset populator instead. +# TODO: Rework existing tests to target DatasetPopulator in a setup method instead. class TestsDatasets: def _new_dataset( self, history_id, content='TestData123', **kwds ): - payload = self._upload_payload( history_id, content, **kwds ) - run_response = self._post( "tools", data=payload ) - self._assert_status_code_is( run_response, 200 ) + return DatasetPopulator( self.galaxy_interactor ).new_dataset( history_id, content=content, **kwds) + + def _wait_for_history( self, history_id, assert_ok=False ): + return DatasetPopulator( self.galaxy_interactor ).wait_for_history( history_id, assert_ok=assert_ok ) + + def _new_history( self, **kwds ): + return DatasetPopulator( self.galaxy_interactor ).new_history( **kwds ) + + def _upload_payload( self, history_id, content, **kwds ): + return DatasetPopulator( self.galaxy_interactor ).upload_payload( history_id, content, **kwds ) + + def _run_tool_payload( self, tool_id, inputs, history_id, **kwds ): + return DatasetPopulator( self.galaxy_interactor ).run_tool_payload( tool_id, inputs, history_id, **kwds ) + + +class DatasetPopulator( object ): + + def __init__( self, galaxy_interactor ): + self.galaxy_interactor = galaxy_interactor + + def new_dataset( self, history_id, content='TestData123', **kwds ): + payload = self.upload_payload( history_id, content, **kwds ) + run_response = self.galaxy_interactor.post( "tools", data=payload ) return run_response.json()["outputs"][0] - def _wait_for_history( self, history_id, assert_ok=False ): - while True: - history_details_response = self._get( "histories/%s" % history_id ) - self._assert_status_code_is( history_details_response, 200 ) - history_state = history_details_response.json()[ "state" ] - if history_state not in [ "running", "queued" ]: - break - time.sleep( .1 ) - if assert_ok: - self.assertEquals( history_state, 'ok' ) + def wait_for_history( self, history_id, assert_ok=False ): + wait_on_state( lambda: self.galaxy_interactor.get( "histories/%s" % history_id ), assert_ok=assert_ok ) - def _new_history( self, **kwds ): + def new_history( self, **kwds ): name = kwds.get( "name", "API Test History" ) - create_history_response = self._post( "histories", data=dict( name=name ) ) - self._assert_status_code_is( create_history_response, 200 ) + create_history_response = self.galaxy_interactor.post( "histories", data=dict( name=name ) ) history_id = create_history_response.json()[ "id" ] return history_id - def _upload_payload( self, history_id, content, **kwds ): + def upload_payload( self, history_id, content, **kwds ): name = kwds.get( "name", "Test Dataset" ) dbkey = kwds.get( "dbkey", "?" ) file_type = kwds.get( "file_type", 'txt' ) @@ -50,17 +63,17 @@ upload_params[ "files_0|to_posix_lines"] = kwds[ "to_posix_lines" ] if "space_to_tab" in kwds: upload_params[ "files_0|space_to_tab" ] = kwds[ "space_to_tab" ] - return self._run_tool_payload( + return self.run_tool_payload( tool_id='upload1', inputs=upload_params, history_id=history_id, upload_type='upload_dataset' ) - def _run_tool_payload( self, tool_id, inputs, history_id, **kwds ): + def run_tool_payload( self, tool_id, inputs, history_id, **kwds ): return dict( tool_id=tool_id, - inputs=dumps(inputs), + inputs=json.dumps(inputs), history_id=history_id, **kwds ) @@ -73,7 +86,7 @@ self.api_test_case = api_test_case def load_workflow( self, name, content=workflow_str, add_pja=False ): - workflow = loads( content ) + workflow = json.loads( content ) workflow[ "name" ] = name if add_pja: tool_step = workflow[ "steps" ][ "2" ] @@ -93,9 +106,99 @@ def create_workflow( self, workflow, **create_kwds ): data = dict( - workflow=dumps( workflow ), + workflow=json.dumps( workflow ), **create_kwds ) upload_response = self.api_test_case._post( "workflows/upload", data=data ) uploaded_workflow_id = upload_response.json()[ "id" ] return uploaded_workflow_id + + +class LibraryPopulator( object ): + + def __init__( self, api_test_case ): + self.api_test_case = api_test_case + self.galaxy_interactor = api_test_case.galaxy_interactor + + def new_private_library( self, name ): + library = self.new_library( name ) + library_id = library[ "id" ] + + role_id = self.user_private_role_id() + self.set_permissions( library_id, role_id ) + return library + + def new_library( self, name ): + data = dict( name=name ) + create_response = self.galaxy_interactor.post( "libraries", data=data, admin=True ) + return create_response.json() + + def set_permissions( self, library_id, role_id=None ): + if role_id: + perm_list = json.dumps( role_id ) + else: + perm_list = json.dumps( [] ) + + permissions = dict( + LIBRARY_ACCESS_in=perm_list, + LIBRARY_MODIFY_in=perm_list, + LIBRARY_ADD_in=perm_list, + LIBRARY_MANAGE_in=perm_list, + ) + self.galaxy_interactor.post( "libraries/%s/permissions" % library_id, data=permissions, admin=True ) + + def user_email( self ): + users_response = self.galaxy_interactor.get( "users" ) + users = users_response.json() + assert len( users ) == 1 + return users[ 0 ][ "email" ] + + def user_private_role_id( self ): + user_email = self.user_email() + roles_response = self.api_test_case.galaxy_interactor.get( "roles", admin=True ) + users_roles = [ r for r in roles_response.json() if r[ "name" ] == user_email ] + assert len( users_roles ) == 1 + return users_roles[ 0 ][ "id" ] + + def create_dataset_request( self, library, **kwds ): + create_data = { + "folder_id": kwds.get( "folder_id", library[ "root_folder_id" ] ), + "create_type": "file", + "files_0|NAME": kwds.get( "name", "NewFile" ), + "upload_option": kwds.get( "upload_option", "upload_file" ), + "file_type": kwds.get( "file_type", "auto" ), + "db_key": kwds.get( "db_key", "?" ), + } + files = { + "files_0|file_data": kwds.get( "file", StringIO.StringIO( kwds.get( "contents", "TestData" ) ) ), + } + return create_data, files + + def new_library_dataset( self, name, **create_dataset_kwds ): + library = self.new_private_library( name ) + payload, files = self.create_dataset_request( library, **create_dataset_kwds ) + url_rel = "libraries/%s/contents" % ( library[ "id" ] ) + dataset = self.api_test_case.galaxy_interactor.post( url_rel, payload, files=files ).json()[0] + + def show(): + return self.api_test_case.galaxy_interactor.get( "libraries/%s/contents/%s" % ( library[ "id" ], dataset[ "id" ] ) ) + + wait_on_state(show) + return show().json() + + +def wait_on_state( state_func, assert_ok=False, timeout=5 ): + delta = .1 + iteration = 0 + while True: + if (delta * iteration) > timeout: + assert False, "Timed out waiting on state." + iteration += 1 + response = state_func() + assert response.status_code == 200, "Failed to fetch state update while waiting." + state = response.json()[ "state" ] + if state not in [ "running", "queued", "new" ]: + break + time.sleep( delta ) + if assert_ok: + assert state == "ok", "Final state - %s - not okay." % state diff -r 6956c4b2d9cdf2244604dd2782c4c5667c52868e -r 24f45ea024e47f0e6ee8c427c7450e1f79d0c525 test/functional/api/test_history_contents.py --- /dev/null +++ b/test/functional/api/test_history_contents.py @@ -0,0 +1,111 @@ +from base import api +import json + +from .helpers import TestsDatasets +from .helpers import LibraryPopulator +from base.interactor import ( + put_request, + delete_request, +) + + +# TODO: Test anonymous access. +class HistoryContentsApiTestCase( api.ApiTestCase, TestsDatasets ): + + def setUp( self ): + super( HistoryContentsApiTestCase, self ).setUp() + self.history_id = self._new_history() + + def test_index_hda_summary( self ): + hda1 = self._new_dataset( self.history_id ) + contents_response = self._get( "histories/%s/contents" % self.history_id ) + hda_summary = self.__check_for_hda( contents_response, hda1 ) + assert "display_types" not in hda_summary # Quick summary, not full details + + def test_index_hda_all_details( self ): + hda1 = self._new_dataset( self.history_id ) + contents_response = self._get( "histories/%s/contents?details=all" % self.history_id ) + hda_details = self.__check_for_hda( contents_response, hda1 ) + self.__assert_hda_has_full_details( hda_details ) + + def test_index_hda_detail_by_id( self ): + hda1 = self._new_dataset( self.history_id ) + contents_response = self._get( "histories/%s/contents?details=%s" % ( self.history_id, hda1[ "id" ] ) ) + hda_details = self.__check_for_hda( contents_response, hda1 ) + self.__assert_hda_has_full_details( hda_details ) + + def test_show_hda( self ): + hda1 = self._new_dataset( self.history_id ) + show_response = self.__show( hda1 ) + self._assert_status_code_is( show_response, 200 ) + self.__assert_matches_hda( hda1, show_response.json() ) + + def test_hda_copy( self ): + hda1 = self._new_dataset( self.history_id ) + create_data = dict( + source='hda', + content=hda1[ "id" ], + ) + second_history_id = self._new_history() + assert self.__count_contents( second_history_id ) == 0 + create_response = self._post( "histories/%s/contents" % second_history_id, create_data ) + self._assert_status_code_is( create_response, 200 ) + assert self.__count_contents( second_history_id ) == 1 + + def test_library_copy( self ): + ld = LibraryPopulator( self ).new_library_dataset( "lda_test_library" ) + create_data = dict( + source='library', + content=ld[ "id" ], + ) + assert self.__count_contents( self.history_id ) == 0 + create_response = self._post( "histories/%s/contents" % self.history_id, create_data ) + self._assert_status_code_is( create_response, 200 ) + assert self.__count_contents( self.history_id ) == 1 + + def test_update( self ): + hda1 = self._new_dataset( self.history_id ) + self._wait_for_history( self.history_id ) + assert str( hda1[ "deleted" ] ).lower() == "false" + update_url = self._api_url( "histories/%s/contents/%s" % ( self.history_id, hda1[ "id" ] ), use_key=True ) + # Awkward json.dumps required here because of https://trello.com/c/CQwmCeG6 + body = json.dumps( dict( deleted=True ) ) + update_response = put_request( update_url, data=body ) + self._assert_status_code_is( update_response, 200 ) + show_response = self.__show( hda1 ) + assert str( show_response.json()[ "deleted" ] ).lower() == "true" + + def test_delete( self ): + hda1 = self._new_dataset( self.history_id ) + self._wait_for_history( self.history_id ) + assert str( self.__show( hda1 ).json()[ "deleted" ] ).lower() == "false" + url = self._api_url( "histories/%s/contents/%s" % ( self.history_id, hda1["id" ] ), use_key=True ) + delete_response = delete_request( url ) + assert delete_response.status_code < 300 # Something in the 200s :). + assert str( self.__show( hda1 ).json()[ "deleted" ] ).lower() == "true" + + def __show( self, hda ): + show_response = self._get( "histories/%s/contents/%s" % ( self.history_id, hda[ "id" ] ) ) + return show_response + + def __count_contents( self, history_id=None, **kwds ): + if history_id == None: + history_id = self.history_id + contents_response = self._get( "histories/%s/contents" % history_id, kwds ) + return len( contents_response.json() ) + + def __assert_hda_has_full_details( self, hda_details ): + self._assert_has_keys( hda_details, "display_types", "display_apps" ) + + def __check_for_hda( self, contents_response, hda ): + self._assert_status_code_is( contents_response, 200 ) + contents = contents_response.json() + assert len( contents ) == 1 + hda_summary = contents[ 0 ] + self.__assert_matches_hda( hda, hda_summary ) + return hda_summary + + def __assert_matches_hda( self, input_hda, query_hda ): + self._assert_has_keys( query_hda, "id", "name" ) + assert input_hda[ "name" ] == query_hda[ "name" ] + assert input_hda[ "id" ] == query_hda[ "id" ] diff -r 6956c4b2d9cdf2244604dd2782c4c5667c52868e -r 24f45ea024e47f0e6ee8c427c7450e1f79d0c525 test/functional/api/test_libraries.py --- /dev/null +++ b/test/functional/api/test_libraries.py @@ -0,0 +1,54 @@ +from base import api +from .helpers import TestsDatasets +from .helpers import LibraryPopulator +from .helpers import wait_on_state + + +class LibrariesApiTestCase( api.ApiTestCase, TestsDatasets ): + + def setUp( self ): + super( LibrariesApiTestCase, self ).setUp() + self.library_populator = LibraryPopulator( self ) + + def test_create( self ): + data = dict( name="CreateTestLibrary" ) + create_response = self._post( "libraries", data=data, admin=True ) + self._assert_status_code_is( create_response, 200 ) + library = create_response.json() + self._assert_has_keys( library, "name" ) + assert library[ "name" ] == "CreateTestLibrary" + + def test_create_private_library_permissions( self ): + library = self.library_populator.new_library( "PermissionTestLibrary" ) + library_id = library[ "id" ] + + role_id = self.library_populator.user_private_role_id() + self.library_populator.set_permissions( library_id, role_id ) + create_response = self._create_folder( library ) + self._assert_status_code_is( create_response, 200 ) + + def test_create_dataset( self ): + library = self.library_populator.new_private_library( "ForCreateDatasets" ) + payload, files = self.library_populator.create_dataset_request( library, file_type="txt", contents="create_test" ) + create_response = self._post( "libraries/%s/contents" % library[ "id" ], payload, files=files ) + self._assert_status_code_is( create_response, 200 ) + library_datasets = create_response.json() + assert len( library_datasets ) == 1 + library_dataset = library_datasets[ 0 ] + + def show(): + return self._get( "libraries/%s/contents/%s" % ( library[ "id" ], library_dataset[ "id" ] ) ) + + wait_on_state( show, assert_ok=True ) + library_dataset = show().json() + self._assert_has_keys( library_dataset, "peek", "data_type" ) + assert library_dataset[ "peek" ].find("create_test") >= 0 + assert library_dataset[ "data_type" ] == "txt" + + def _create_folder( self, library ): + create_data = dict( + folder_id=library[ "root_folder_id" ], + create_type="folder", + name="New Folder", + ) + return self._post( "libraries/%s/contents" % library[ "id" ], data=create_data ) Repository URL: https://bitbucket.org/galaxy/galaxy-central/ -- This is a commit notification from bitbucket.org. You are receiving this because you have the service enabled, addressing the recipient of this email.
participants (1)
-
commits-noreply@bitbucket.org