commit/galaxy-central: 7 new changesets
7 new commits in galaxy-central: https://bitbucket.org/galaxy/galaxy-central/commits/9278a44330b7/ Changeset: 9278a44330b7 User: jmchilton Date: 2014-09-11 22:01:08 Summary: Initial work on a tags manager. Rest of UsesTagsMixin needs to be copied over similarly and deprecated, following that I guess all references need to be updated and then the mixin eliminated all together I guess (exhausting :)). Affected #: 3 files diff -r da6e8248d7b402783e829f27ee1ab9e6ba4367d7 -r 9278a44330b7343a7f414057669fc1e047cfc1b6 lib/galaxy/dataset_collections/__init__.py --- a/lib/galaxy/dataset_collections/__init__.py +++ b/lib/galaxy/dataset_collections/__init__.py @@ -10,9 +10,9 @@ from galaxy.web.base.controller import ( UsesHistoryDatasetAssociationMixin, UsesLibraryMixinItems, - UsesTagsMixin, ) from galaxy.managers import hdas # TODO: Refactor all mixin use into managers. +from galaxy.managers import tags from galaxy.managers.collections_util import validate_input_element_identifiers from galaxy.util import validation from galaxy.util import odict @@ -27,8 +27,7 @@ class DatasetCollectionsService( UsesHistoryDatasetAssociationMixin, - UsesLibraryMixinItems, - UsesTagsMixin, + UsesLibraryMixinItems ): """ Abstraction for interfacing with dataset collections instance - ideally abstarcts @@ -41,6 +40,7 @@ self.model = app.model self.security = app.security self.hda_manager = hdas.HDAManager() + self.tag_manager = tags.TagsManager( app ) def create( self, @@ -166,7 +166,7 @@ dataset_collection_instance.add_item_annotation( trans.sa_session, trans.get_user(), dataset_collection_instance, new_data[ 'annotation' ] ) changed[ 'annotation' ] = new_data[ 'annotation' ] if 'tags' in new_data.keys() and trans.get_user(): - self.set_tags_from_list( trans, dataset_collection_instance, new_data[ 'tags' ], user=trans.user ) + self.tag_manager.set_tags_from_list( trans, dataset_collection_instance, new_data[ 'tags' ], user=trans.user ) if changed.keys(): trans.sa_session.flush() diff -r da6e8248d7b402783e829f27ee1ab9e6ba4367d7 -r 9278a44330b7343a7f414057669fc1e047cfc1b6 lib/galaxy/managers/tags.py --- /dev/null +++ b/lib/galaxy/managers/tags.py @@ -0,0 +1,20 @@ + + +class TagsManager( object ): + """ Manages CRUD operations related to tagging objects. + """ + + def __init__( self, app ): + self.app = app + self.tag_handler = app.tag_handler + + def set_tags_from_list( self, trans, item, new_tags_list, user=None ): + #precondition: item is already security checked against user + #precondition: incoming tags is a list of sanitized/formatted strings + user = user or trans.user + + self.tag_handler.delete_item_tags( trans, user, item ) + new_tags_str = ','.join( new_tags_list ) + self.tag_handler.apply_item_tags( trans, user, item, unicode( new_tags_str.encode( 'utf-8' ), 'utf-8' ) ) + trans.sa_session.flush() + return item.tags diff -r da6e8248d7b402783e829f27ee1ab9e6ba4367d7 -r 9278a44330b7343a7f414057669fc1e047cfc1b6 lib/galaxy/web/base/controller.py --- a/lib/galaxy/web/base/controller.py +++ b/lib/galaxy/web/base/controller.py @@ -38,6 +38,7 @@ from galaxy.model import ExtendedMetadata, ExtendedMetadataIndex, LibraryDatasetDatasetAssociation, HistoryDatasetAssociation from galaxy.managers import api_keys +from galaxy.managers import tags from galaxy.datatypes.metadata import FileParameter from galaxy.tools.parameters import RuntimeValue, visit_input_values from galaxy.tools.parameters.basic import DataToolParameter @@ -2808,16 +2809,9 @@ return self.get_tag_handler( trans )._get_item_tag_assoc( user, tagged_item, tag_name ) def set_tags_from_list( self, trans, item, new_tags_list, user=None ): - #precondition: item is already security checked against user - #precondition: incoming tags is a list of sanitized/formatted strings - user = user or trans.user - - # based on controllers/tag retag_async: delete all old, reset to entire new - trans.app.tag_handler.delete_item_tags( trans, user, item ) - new_tags_str = ','.join( new_tags_list ) - trans.app.tag_handler.apply_item_tags( trans, user, item, unicode( new_tags_str.encode( 'utf-8' ), 'utf-8' ) ) - trans.sa_session.flush() - return item.tags + # Method deprecated - try to use TagsHandler instead. + tags_manager = tags.TagsManager( trans.app ) + return tags_manager.set_tags_from_list( trans, item, new_tags_list, user=user ) def get_user_tags_used( self, trans, user=None ): """ https://bitbucket.org/galaxy/galaxy-central/commits/6f5195e15d09/ Changeset: 6f5195e15d09 User: jmchilton Date: 2014-09-11 22:01:08 Summary: Refactor generic controller mixin code for security out for reuse in managers. Affected #: 2 files diff -r 9278a44330b7343a7f414057669fc1e047cfc1b6 -r 6f5195e15d0919790db68865dba5e365d67d14c1 lib/galaxy/managers/base.py --- a/lib/galaxy/managers/base.py +++ b/lib/galaxy/managers/base.py @@ -1,7 +1,41 @@ +from galaxy import exceptions class ModelManager( object ): pass + class ModelSerializer( object ): pass + + +def security_check( trans, item, check_ownership=False, check_accessible=False ): + """ Security checks for an item: checks if (a) user owns item or (b) item + is accessible to user. This is a generic method for dealing with objects + uniformly from the older controller mixin code - however whenever possible + the managers for a particular model should be used to perform security + checks. + """ + + # all items are accessible to an admin + if trans.user_is_admin(): + return item + + # Verify ownership: there is a current user and that user is the same as the item's + if check_ownership: + if not trans.user: + raise exceptions.ItemOwnershipException( "Must be logged in to manage Galaxy items", type='error' ) + if item.user != trans.user: + raise exceptions.ItemOwnershipException( "%s is not owned by the current user" % item.__class__.__name__, type='error' ) + + # Verify accessible: + # if it's part of a lib - can they access via security + # if it's something else (sharable) have they been added to the item's users_shared_with_dot_users + if check_accessible: + if type( item ) in ( trans.app.model.LibraryFolder, trans.app.model.LibraryDatasetDatasetAssociation, trans.app.model.LibraryDataset ): + if not trans.app.security_agent.can_access_library_item( trans.get_current_user_roles(), item, trans.user ): + raise exceptions.ItemAccessibilityException( "%s is not accessible to the current user" % item.__class__.__name__, type='error' ) + else: + if ( item.user != trans.user ) and ( not item.importable ) and ( trans.user not in item.users_shared_with_dot_users ): + raise exceptions.ItemAccessibilityException( "%s is not accessible to the current user" % item.__class__.__name__, type='error' ) + return item diff -r 9278a44330b7343a7f414057669fc1e047cfc1b6 -r 6f5195e15d0919790db68865dba5e365d67d14c1 lib/galaxy/web/base/controller.py --- a/lib/galaxy/web/base/controller.py +++ b/lib/galaxy/web/base/controller.py @@ -39,6 +39,7 @@ from galaxy.managers import api_keys from galaxy.managers import tags +from galaxy.managers import base as managers_base from galaxy.datatypes.metadata import FileParameter from galaxy.tools.parameters import RuntimeValue, visit_input_values from galaxy.tools.parameters.basic import DataToolParameter @@ -301,28 +302,7 @@ def security_check( self, trans, item, check_ownership=False, check_accessible=False ): """ Security checks for an item: checks if (a) user owns item or (b) item is accessible to user. """ - # all items are accessible to an admin - if trans.user_is_admin(): - return item - - # Verify ownership: there is a current user and that user is the same as the item's - if check_ownership: - if not trans.user: - raise ItemOwnershipException( "Must be logged in to manage Galaxy items", type='error' ) - if item.user != trans.user: - raise ItemOwnershipException( "%s is not owned by the current user" % item.__class__.__name__, type='error' ) - - # Verify accessible: - # if it's part of a lib - can they access via security - # if it's something else (sharable) have they been added to the item's users_shared_with_dot_users - if check_accessible: - if type( item ) in ( trans.app.model.LibraryFolder, trans.app.model.LibraryDatasetDatasetAssociation, trans.app.model.LibraryDataset ): - if not trans.app.security_agent.can_access_library_item( trans.get_current_user_roles(), item, trans.user ): - raise ItemAccessibilityException( "%s is not accessible to the current user" % item.__class__.__name__, type='error' ) - else: - if ( item.user != trans.user ) and ( not item.importable ) and ( trans.user not in item.users_shared_with_dot_users ): - raise ItemAccessibilityException( "%s is not accessible to the current user" % item.__class__.__name__, type='error' ) - return item + return managers_base.security_check( trans, item, check_ownership=check_ownership, check_accessible=check_accessible ) class UsesHistoryMixin( SharableItemSecurityMixin ): https://bitbucket.org/galaxy/galaxy-central/commits/83016af21151/ Changeset: 83016af21151 User: jmchilton Date: 2014-09-11 22:01:08 Summary: Refactor generic controller mixin code for loading objects into managers. Affected #: 2 files diff -r 6f5195e15d0919790db68865dba5e365d67d14c1 -r 83016af2115156f4ebb640ea231824f23aeecad3 lib/galaxy/managers/base.py --- a/lib/galaxy/managers/base.py +++ b/lib/galaxy/managers/base.py @@ -1,5 +1,12 @@ from galaxy import exceptions +from galaxy import model +from galaxy.model import tool_shed_install + + +import logging +log = logging.getLogger( __name__ ) + class ModelManager( object ): pass @@ -39,3 +46,75 @@ if ( item.user != trans.user ) and ( not item.importable ) and ( trans.user not in item.users_shared_with_dot_users ): raise exceptions.ItemAccessibilityException( "%s is not accessible to the current user" % item.__class__.__name__, type='error' ) return item + + +def get_class( class_name ): + """ Returns the class object that a string denotes. Without this method, we'd have to do eval(<class_name>). """ + if class_name == 'History': + item_class = model.History + elif class_name == 'HistoryDatasetAssociation': + item_class = model.HistoryDatasetAssociation + elif class_name == 'Page': + item_class = model.Page + elif class_name == 'StoredWorkflow': + item_class = model.StoredWorkflow + elif class_name == 'Visualization': + item_class = model.Visualization + elif class_name == 'Tool': + item_class = model.Tool + elif class_name == 'Job': + item_class = model.Job + elif class_name == 'User': + item_class = model.User + elif class_name == 'Group': + item_class = model.Group + elif class_name == 'Role': + item_class = model.Role + elif class_name == 'Quota': + item_class = model.Quota + elif class_name == 'Library': + item_class = model.Library + elif class_name == 'LibraryFolder': + item_class = model.LibraryFolder + elif class_name == 'LibraryDatasetDatasetAssociation': + item_class = model.LibraryDatasetDatasetAssociation + elif class_name == 'LibraryDataset': + item_class = model.LibraryDataset + elif class_name == 'ToolShedRepository': + item_class = tool_shed_install.ToolShedRepository + else: + item_class = None + return item_class + + +def get_object( trans, id, class_name, check_ownership=False, check_accessible=False, deleted=None ): + """ + Convenience method to get a model object with the specified checks. This is + a generic method for dealing with objects uniformly from the older + controller mixin code - however whenever possible the managers for a + particular model should be used to load objects. + """ + try: + decoded_id = trans.security.decode_id( id ) + except: + raise exceptions.MessageException( "Malformed %s id ( %s ) specified, unable to decode" + % ( class_name, str( id ) ), type='error' ) + try: + item_class = get_class( class_name ) + assert item_class is not None + item = trans.sa_session.query( item_class ).get( decoded_id ) + assert item is not None + except Exception: + log.exception( "Invalid %s id ( %s ) specified." % ( class_name, id ) ) + raise exceptions.MessageException( "Invalid %s id ( %s ) specified" % ( class_name, id ), type="error" ) + + if check_ownership or check_accessible: + security_check( trans, item, check_ownership, check_accessible ) + if deleted is True and not item.deleted: + raise exceptions.ItemDeletionException( '%s "%s" is not deleted' + % ( class_name, getattr( item, 'name', id ) ), type="warning" ) + elif deleted is False and item.deleted: + raise exceptions.ItemDeletionException( '%s "%s" is deleted' + % ( class_name, getattr( item, 'name', id ) ), type="warning" ) + return item + diff -r 6f5195e15d0919790db68865dba5e365d67d14c1 -r 83016af2115156f4ebb640ea231824f23aeecad3 lib/galaxy/web/base/controller.py --- a/lib/galaxy/web/base/controller.py +++ b/lib/galaxy/web/base/controller.py @@ -78,70 +78,14 @@ def get_class( self, class_name ): """ Returns the class object that a string denotes. Without this method, we'd have to do eval(<class_name>). """ - if class_name == 'History': - item_class = self.app.model.History - elif class_name == 'HistoryDatasetAssociation': - item_class = self.app.model.HistoryDatasetAssociation - elif class_name == 'Page': - item_class = self.app.model.Page - elif class_name == 'StoredWorkflow': - item_class = self.app.model.StoredWorkflow - elif class_name == 'Visualization': - item_class = self.app.model.Visualization - elif class_name == 'Tool': - item_class = self.app.model.Tool - elif class_name == 'Job': - item_class = self.app.model.Job - elif class_name == 'User': - item_class = self.app.model.User - elif class_name == 'Group': - item_class = self.app.model.Group - elif class_name == 'Role': - item_class = self.app.model.Role - elif class_name == 'Quota': - item_class = self.app.model.Quota - elif class_name == 'Library': - item_class = self.app.model.Library - elif class_name == 'LibraryFolder': - item_class = self.app.model.LibraryFolder - elif class_name == 'LibraryDatasetDatasetAssociation': - item_class = self.app.model.LibraryDatasetDatasetAssociation - elif class_name == 'LibraryDataset': - item_class = self.app.model.LibraryDataset - elif class_name == 'ToolShedRepository': - item_class = self.app.install_model.ToolShedRepository - else: - item_class = None - return item_class + return managers_base.get_class( class_name ) def get_object( self, trans, id, class_name, check_ownership=False, check_accessible=False, deleted=None ): """ Convenience method to get a model object with the specified checks. """ - try: - decoded_id = trans.security.decode_id( id ) - except: - raise MessageException( "Malformed %s id ( %s ) specified, unable to decode" - % ( class_name, str( id ) ), type='error' ) - try: - item_class = self.get_class( class_name ) - assert item_class is not None - item = trans.sa_session.query( item_class ).get( decoded_id ) - assert item is not None - except Exception: - log.exception( "Invalid %s id ( %s ) specified." % ( class_name, id ) ) - raise MessageException( "Invalid %s id ( %s ) specified" % ( class_name, id ), type="error" ) - - if check_ownership or check_accessible: - self.security_check( trans, item, check_ownership, check_accessible ) - if deleted == True and not item.deleted: - raise ItemDeletionException( '%s "%s" is not deleted' - % ( class_name, getattr( item, 'name', id ) ), type="warning" ) - elif deleted == False and item.deleted: - raise ItemDeletionException( '%s "%s" is deleted' - % ( class_name, getattr( item, 'name', id ) ), type="warning" ) - return item - + return managers_base.get_object( trans, id, class_name, check_ownership=check_ownership, check_accessible=check_accessible, deleted=deleted ) + # this should be here - but catching errors from sharable item controllers that *should* have SharableItemMixin # but *don't* then becomes difficult #def security_check( self, trans, item, check_ownership=False, check_accessible=False ): https://bitbucket.org/galaxy/galaxy-central/commits/355aee34a371/ Changeset: 355aee34a371 User: jmchilton Date: 2014-09-11 22:01:08 Summary: Introduce LDDA manager stub. Use it to eliminate awful controller dependencies (and associated circular dependency errors when controllers are not loaded before everything else) in galaxy.dataset_collections. Affected #: 3 files diff -r 83016af2115156f4ebb640ea231824f23aeecad3 -r 355aee34a371c99f9251b965ea625066a80419ea lib/galaxy/dataset_collections/__init__.py --- a/lib/galaxy/dataset_collections/__init__.py +++ b/lib/galaxy/dataset_collections/__init__.py @@ -7,11 +7,9 @@ from galaxy.exceptions import MessageException from galaxy.exceptions import ItemAccessibilityException from galaxy.exceptions import RequestParameterInvalidException -from galaxy.web.base.controller import ( - UsesHistoryDatasetAssociationMixin, - UsesLibraryMixinItems, -) from galaxy.managers import hdas # TODO: Refactor all mixin use into managers. +from galaxy.managers import histories +from galaxy.managers import lddas from galaxy.managers import tags from galaxy.managers.collections_util import validate_input_element_identifiers from galaxy.util import validation @@ -25,10 +23,7 @@ ERROR_NO_COLLECTION_TYPE = "Create called without specifing a collection type." -class DatasetCollectionsService( - UsesHistoryDatasetAssociationMixin, - UsesLibraryMixinItems -): +class DatasetCollectionsService( object ): """ Abstraction for interfacing with dataset collections instance - ideally abstarcts out model and plugin details. @@ -40,7 +35,9 @@ self.model = app.model self.security = app.security self.hda_manager = hdas.HDAManager() + self.history_manager = histories.HistoryManager() self.tag_manager = tags.TagsManager( app ) + self.ldda_manager = lddas.LDDAManager( ) def create( self, @@ -249,7 +246,7 @@ decoded_id = int( trans.app.security.decode_id( encoded_id ) ) element = self.hda_manager.get( trans, decoded_id, check_ownership=False ) elif src_type == 'ldda': - element = self.get_library_dataset_dataset_association( trans, encoded_id ) + element = self.ldda_manager.get( trans, encoded_id ) elif src_type == 'hdca': # TODO: Option to copy? Force copy? Copy or allow if not owned? element = self.__get_history_collection_instance( trans, encoded_id ).collection @@ -281,7 +278,7 @@ def __get_history_collection_instance( self, trans, id, check_ownership=False, check_accessible=True ): instance_id = int( trans.app.security.decode_id( id ) ) collection_instance = trans.sa_session.query( trans.app.model.HistoryDatasetCollectionAssociation ).get( instance_id ) - self.security_check( trans, collection_instance.history, check_ownership=check_ownership, check_accessible=check_accessible ) + self.history_manager.secure( trans, collection_instance.history, check_ownership=check_ownership, check_accessible=check_accessible ) return collection_instance def __get_library_collection_instance( self, trans, id, check_ownership=False, check_accessible=True ): diff -r 83016af2115156f4ebb640ea231824f23aeecad3 -r 355aee34a371c99f9251b965ea625066a80419ea lib/galaxy/managers/lddas.py --- /dev/null +++ b/lib/galaxy/managers/lddas.py @@ -0,0 +1,16 @@ +from galaxy.managers import base as manager_base + + +class LDDAManager( manager_base.ModelManager ): + """ A fairly sparse manager for LDDAs. + """ + + def __init__( self ): + """ + Set up and initialize other managers needed by lddas. + """ + pass + + def get( self, trans, id, check_accessible=True ): + return manager_base.get_object( trans, id, 'LibraryDatasetDatasetAssociation', + check_ownership=False, check_accessible=check_accessible ) diff -r 83016af2115156f4ebb640ea231824f23aeecad3 -r 355aee34a371c99f9251b965ea625066a80419ea lib/galaxy/web/base/controller.py --- a/lib/galaxy/web/base/controller.py +++ b/lib/galaxy/web/base/controller.py @@ -875,6 +875,9 @@ check_ownership=False, check_accessible=check_accessible ) def get_library_dataset_dataset_association( self, trans, id, check_ownership=False, check_accessible=True ): + # Deprecated in lieu to galaxy.managers.lddas.LDDAManager.get() but not + # reusing that exactly because of subtle differences in exception handling + # logic (API controller override get_object to be slightly different). return self.get_object( trans, id, 'LibraryDatasetDatasetAssociation', check_ownership=False, check_accessible=check_accessible ) https://bitbucket.org/galaxy/galaxy-central/commits/dd31ab49162d/ Changeset: dd31ab49162d User: jmchilton Date: 2014-09-11 22:01:08 Summary: Make dataset collections service a manager. It would hve been arcitected this way from the beginning but my initial work on it predated Carl's managers work and it had those pesky controller dependencies. Affected #: 3 files diff -r 355aee34a371c99f9251b965ea625066a80419ea -r dd31ab49162d3faa30818947a80a6165c2f0b4c7 lib/galaxy/app.py --- a/lib/galaxy/app.py +++ b/lib/galaxy/app.py @@ -5,7 +5,7 @@ from galaxy import config, jobs import galaxy.model import galaxy.security -from galaxy import dataset_collections +from galaxy.managers.collections import DatasetCollectionManager import galaxy.quota from galaxy.tags.tag_handler import GalaxyTagHandler from galaxy.visualization.genomes import Genomes @@ -57,7 +57,7 @@ # Tag handler self.tag_handler = GalaxyTagHandler() # Dataset Collection Plugins - self.dataset_collections_service = dataset_collections.DatasetCollectionsService(self) + self.dataset_collections_service = DatasetCollectionManager(self) # Tool Data Tables self._configure_tool_data_tables( from_shed_config=False ) diff -r 355aee34a371c99f9251b965ea625066a80419ea -r dd31ab49162d3faa30818947a80a6165c2f0b4c7 lib/galaxy/dataset_collections/__init__.py --- a/lib/galaxy/dataset_collections/__init__.py +++ b/lib/galaxy/dataset_collections/__init__.py @@ -1,292 +0,0 @@ -from .registry import DatasetCollectionTypesRegistry -from .matching import MatchingCollections -from .type_description import CollectionTypeDescriptionFactory - - -from galaxy import model -from galaxy.exceptions import MessageException -from galaxy.exceptions import ItemAccessibilityException -from galaxy.exceptions import RequestParameterInvalidException -from galaxy.managers import hdas # TODO: Refactor all mixin use into managers. -from galaxy.managers import histories -from galaxy.managers import lddas -from galaxy.managers import tags -from galaxy.managers.collections_util import validate_input_element_identifiers -from galaxy.util import validation -from galaxy.util import odict - -import logging -log = logging.getLogger( __name__ ) - - -ERROR_INVALID_ELEMENTS_SPECIFICATION = "Create called with invalid parameters, must specify element identifiers." -ERROR_NO_COLLECTION_TYPE = "Create called without specifing a collection type." - - -class DatasetCollectionsService( object ): - """ - Abstraction for interfacing with dataset collections instance - ideally abstarcts - out model and plugin details. - """ - - def __init__( self, app ): - self.type_registry = DatasetCollectionTypesRegistry( app ) - self.collection_type_descriptions = CollectionTypeDescriptionFactory( self.type_registry ) - self.model = app.model - self.security = app.security - self.hda_manager = hdas.HDAManager() - self.history_manager = histories.HistoryManager() - self.tag_manager = tags.TagsManager( app ) - self.ldda_manager = lddas.LDDAManager( ) - - def create( - self, - trans, - parent, # PRECONDITION: security checks on ability to add to parent occurred during load. - name, - collection_type, - element_identifiers=None, - elements=None, - implicit_collection_info=None, - ): - """ - """ - # Trust embedded, newly created objects created by tool subsystem. - trusted_identifiers = implicit_collection_info is not None - - if element_identifiers and not trusted_identifiers: - validate_input_element_identifiers( element_identifiers ) - - dataset_collection = self.__create_dataset_collection( - trans=trans, - collection_type=collection_type, - element_identifiers=element_identifiers, - elements=elements, - ) - if isinstance( parent, model.History ): - dataset_collection_instance = self.model.HistoryDatasetCollectionAssociation( - collection=dataset_collection, - name=name, - ) - if implicit_collection_info: - for input_name, input_collection in implicit_collection_info[ "implicit_inputs" ]: - dataset_collection_instance.add_implicit_input_collection( input_name, input_collection ) - for output_dataset in implicit_collection_info.get( "outputs" ): - output_dataset.hidden_beneath_collection_instance = dataset_collection_instance - trans.sa_session.add( output_dataset ) - - dataset_collection_instance.implicit_output_name = implicit_collection_info[ "implicit_output_name" ] - log.debug("Created collection with %d elements" % ( len( dataset_collection_instance.collection.elements ) ) ) - # Handle setting hid - parent.add_dataset_collection( dataset_collection_instance ) - elif isinstance( parent, model.LibraryFolder ): - dataset_collection_instance = self.model.LibraryDatasetCollectionAssociation( - collection=dataset_collection, - folder=parent, - name=name, - ) - else: - message = "Internal logic error - create called with unknown parent type %s" % type( parent ) - log.exception( message ) - raise MessageException( message ) - - return self.__persist( dataset_collection_instance ) - - def __create_dataset_collection( - self, - trans, - collection_type, - element_identifiers=None, - elements=None, - ): - if element_identifiers is None and elements is None: - raise RequestParameterInvalidException( ERROR_INVALID_ELEMENTS_SPECIFICATION ) - if not collection_type: - raise RequestParameterInvalidException( ERROR_NO_COLLECTION_TYPE ) - collection_type_description = self.collection_type_descriptions.for_collection_type( collection_type ) - # If we have elements, this is an internal request, don't need to load - # objects from identifiers. - if elements is None: - if collection_type_description.has_subcollections( ): - # Nested collection - recursively create collections and update identifiers. - self.__recursively_create_collections( trans, element_identifiers ) - elements = self.__load_elements( trans, element_identifiers ) - # else if elements is set, it better be an ordered dict! - - type_plugin = collection_type_description.rank_type_plugin() - dataset_collection = type_plugin.build_collection( elements ) - dataset_collection.collection_type = collection_type - return dataset_collection - - def delete( self, trans, instance_type, id ): - dataset_collection_instance = self.get_dataset_collection_instance( trans, instance_type, id, check_ownership=True ) - dataset_collection_instance.deleted = True - trans.sa_session.add( dataset_collection_instance ) - trans.sa_session.flush( ) - - def update( self, trans, instance_type, id, payload ): - dataset_collection_instance = self.get_dataset_collection_instance( trans, instance_type, id, check_ownership=True ) - if trans.user is None: - anon_allowed_payload = {} - if 'deleted' in payload: - anon_allowed_payload[ 'deleted' ] = payload[ 'deleted' ] - if 'visible' in payload: - anon_allowed_payload[ 'visible' ] = payload[ 'visible' ] - payload = self._validate_and_parse_update_payload( anon_allowed_payload ) - else: - payload = self._validate_and_parse_update_payload( payload ) - changed = self._set_from_dict( trans, dataset_collection_instance, payload ) - return changed - - def copy( - self, - trans, - parent, # PRECONDITION: security checks on ability to add to parent occurred during load. - source, - encoded_source_id, - ): - assert source == "hdca" # for now - source_hdca = self.__get_history_collection_instance( trans, encoded_source_id ) - new_hdca = source_hdca.copy() - parent.add_dataset_collection( new_hdca ) - trans.sa_session.add( new_hdca ) - trans.sa_session.flush() - return source_hdca - - def _set_from_dict( self, trans, dataset_collection_instance, new_data ): - # Blatantly stolen from UsesHistoryDatasetAssociationMixin.set_hda_from_dict. - - # send what we can down into the model - changed = dataset_collection_instance.set_from_dict( new_data ) - # the rest (often involving the trans) - do here - if 'annotation' in new_data.keys() and trans.get_user(): - dataset_collection_instance.add_item_annotation( trans.sa_session, trans.get_user(), dataset_collection_instance, new_data[ 'annotation' ] ) - changed[ 'annotation' ] = new_data[ 'annotation' ] - if 'tags' in new_data.keys() and trans.get_user(): - self.tag_manager.set_tags_from_list( trans, dataset_collection_instance, new_data[ 'tags' ], user=trans.user ) - - if changed.keys(): - trans.sa_session.flush() - - return changed - - def _validate_and_parse_update_payload( self, payload ): - validated_payload = {} - for key, val in payload.items(): - if val is None: - continue - if key in ( 'name' ): - val = validation.validate_and_sanitize_basestring( key, val ) - validated_payload[ key ] = val - if key in ( 'deleted', 'visible' ): - validated_payload[ key ] = validation.validate_boolean( key, val ) - elif key == 'tags': - validated_payload[ key ] = validation.validate_and_sanitize_basestring_list( key, val ) - return validated_payload - - def history_dataset_collections(self, history, query): - collections = history.active_dataset_collections - collections = filter( query.direct_match, collections ) - return collections - - def __persist( self, dataset_collection_instance ): - context = self.model.context - context.add( dataset_collection_instance ) - context.flush() - return dataset_collection_instance - - def __recursively_create_collections( self, trans, element_identifiers ): - for index, element_identifier in enumerate( element_identifiers ): - try: - if not element_identifier[ "src" ] == "new_collection": - # not a new collection, keep moving... - continue - except KeyError: - # Not a dictionary, just an id of an HDA - move along. - continue - - # element identifier is a dict with src new_collection... - collection_type = element_identifier.get( "collection_type", None ) - collection = self.__create_dataset_collection( - trans=trans, - collection_type=collection_type, - element_identifiers=element_identifier[ "element_identifiers" ], - ) - element_identifier[ "__object__" ] = collection - - return element_identifiers - - def __load_elements( self, trans, element_identifiers ): - elements = odict.odict() - for element_identifier in element_identifiers: - elements[ element_identifier[ "name" ] ] = self.__load_element( trans, element_identifier ) - return elements - - def __load_element( self, trans, element_identifier ): - #if not isinstance( element_identifier, dict ): - # # Is allowing this to just be the id of an hda too clever? Somewhat - # # consistent with other API methods though. - # element_identifier = dict( src='hda', id=str( element_identifier ) ) - - # Previously created collection already found in request, just pass - # through as is. - if "__object__" in element_identifier: - return element_identifier[ "__object__" ] - - # dateset_identifier is dict {src=hda|ldda|hdca|new_collection, id=<encoded_id>} - try: - src_type = element_identifier.get( 'src', 'hda' ) - except AttributeError: - raise MessageException( "Dataset collection element definition (%s) not dictionary-like." % element_identifier ) - encoded_id = element_identifier.get( 'id', None ) - if not src_type or not encoded_id: - raise RequestParameterInvalidException( "Problem decoding element identifier %s" % element_identifier ) - - if src_type == 'hda': - decoded_id = int( trans.app.security.decode_id( encoded_id ) ) - element = self.hda_manager.get( trans, decoded_id, check_ownership=False ) - elif src_type == 'ldda': - element = self.ldda_manager.get( trans, encoded_id ) - elif src_type == 'hdca': - # TODO: Option to copy? Force copy? Copy or allow if not owned? - element = self.__get_history_collection_instance( trans, encoded_id ).collection - # TODO: ldca. - else: - raise RequestParameterInvalidException( "Unknown src_type parameter supplied '%s'." % src_type ) - return element - - def match_collections( self, collections_to_match ): - """ - May seem odd to place it here, but planning to grow sophistication and - get plugin types involved so it will likely make sense in the future. - """ - return MatchingCollections.for_collections( collections_to_match, self.collection_type_descriptions ) - - def get_dataset_collection_instance( self, trans, instance_type, id, **kwds ): - """ - """ - if instance_type == "history": - return self.__get_history_collection_instance( trans, id, **kwds ) - elif instance_type == "library": - return self.__get_library_collection_instance( trans, id, **kwds ) - - def get_dataset_collection( self, trans, encoded_id ): - collection_id = int( trans.app.security.decode_id( encoded_id ) ) - collection = trans.sa_session.query( trans.app.model.DatasetCollection ).get( collection_id ) - return collection - - def __get_history_collection_instance( self, trans, id, check_ownership=False, check_accessible=True ): - instance_id = int( trans.app.security.decode_id( id ) ) - collection_instance = trans.sa_session.query( trans.app.model.HistoryDatasetCollectionAssociation ).get( instance_id ) - self.history_manager.secure( trans, collection_instance.history, check_ownership=check_ownership, check_accessible=check_accessible ) - return collection_instance - - def __get_library_collection_instance( self, trans, id, check_ownership=False, check_accessible=True ): - if check_ownership: - raise NotImplemented( "Functionality (getting library dataset collection with ownership check) unimplemented." ) - instance_id = int( trans.security.decode_id( id ) ) - collection_instance = trans.sa_session.query( trans.app.model.LibraryDatasetCollectionAssociation ).get( instance_id ) - if check_accessible: - if not trans.app.security_agent.can_access_library_item( trans.get_current_user_roles(), collection_instance, trans.user ): - raise ItemAccessibilityException( "LibraryDatasetCollectionAssociation is not accessible to the current user", type='error' ) - return collection_instance diff -r 355aee34a371c99f9251b965ea625066a80419ea -r dd31ab49162d3faa30818947a80a6165c2f0b4c7 lib/galaxy/managers/collections.py --- a/lib/galaxy/managers/collections.py +++ b/lib/galaxy/managers/collections.py @@ -1,1 +1,291 @@ -""" Future home of dataset collections managers. """ +from galaxy.dataset_collections.registry import DatasetCollectionTypesRegistry +from galaxy.dataset_collections.matching import MatchingCollections +from galaxy.dataset_collections.type_description import CollectionTypeDescriptionFactory + +from galaxy import model +from galaxy.exceptions import MessageException +from galaxy.exceptions import ItemAccessibilityException +from galaxy.exceptions import RequestParameterInvalidException +from galaxy.managers import hdas # TODO: Refactor all mixin use into managers. +from galaxy.managers import histories +from galaxy.managers import lddas +from galaxy.managers import tags +from galaxy.managers.collections_util import validate_input_element_identifiers +from galaxy.util import validation +from galaxy.util import odict + +import logging +log = logging.getLogger( __name__ ) + + +ERROR_INVALID_ELEMENTS_SPECIFICATION = "Create called with invalid parameters, must specify element identifiers." +ERROR_NO_COLLECTION_TYPE = "Create called without specifing a collection type." + + +class DatasetCollectionManager( object ): + """ + Abstraction for interfacing with dataset collections instance - ideally abstarcts + out model and plugin details. + """ + + def __init__( self, app ): + self.type_registry = DatasetCollectionTypesRegistry( app ) + self.collection_type_descriptions = CollectionTypeDescriptionFactory( self.type_registry ) + self.model = app.model + self.security = app.security + self.hda_manager = hdas.HDAManager() + self.history_manager = histories.HistoryManager() + self.tag_manager = tags.TagsManager( app ) + self.ldda_manager = lddas.LDDAManager( ) + + def create( + self, + trans, + parent, # PRECONDITION: security checks on ability to add to parent occurred during load. + name, + collection_type, + element_identifiers=None, + elements=None, + implicit_collection_info=None, + ): + """ + """ + # Trust embedded, newly created objects created by tool subsystem. + trusted_identifiers = implicit_collection_info is not None + + if element_identifiers and not trusted_identifiers: + validate_input_element_identifiers( element_identifiers ) + + dataset_collection = self.__create_dataset_collection( + trans=trans, + collection_type=collection_type, + element_identifiers=element_identifiers, + elements=elements, + ) + if isinstance( parent, model.History ): + dataset_collection_instance = self.model.HistoryDatasetCollectionAssociation( + collection=dataset_collection, + name=name, + ) + if implicit_collection_info: + for input_name, input_collection in implicit_collection_info[ "implicit_inputs" ]: + dataset_collection_instance.add_implicit_input_collection( input_name, input_collection ) + for output_dataset in implicit_collection_info.get( "outputs" ): + output_dataset.hidden_beneath_collection_instance = dataset_collection_instance + trans.sa_session.add( output_dataset ) + + dataset_collection_instance.implicit_output_name = implicit_collection_info[ "implicit_output_name" ] + log.debug("Created collection with %d elements" % ( len( dataset_collection_instance.collection.elements ) ) ) + # Handle setting hid + parent.add_dataset_collection( dataset_collection_instance ) + elif isinstance( parent, model.LibraryFolder ): + dataset_collection_instance = self.model.LibraryDatasetCollectionAssociation( + collection=dataset_collection, + folder=parent, + name=name, + ) + else: + message = "Internal logic error - create called with unknown parent type %s" % type( parent ) + log.exception( message ) + raise MessageException( message ) + + return self.__persist( dataset_collection_instance ) + + def __create_dataset_collection( + self, + trans, + collection_type, + element_identifiers=None, + elements=None, + ): + if element_identifiers is None and elements is None: + raise RequestParameterInvalidException( ERROR_INVALID_ELEMENTS_SPECIFICATION ) + if not collection_type: + raise RequestParameterInvalidException( ERROR_NO_COLLECTION_TYPE ) + collection_type_description = self.collection_type_descriptions.for_collection_type( collection_type ) + # If we have elements, this is an internal request, don't need to load + # objects from identifiers. + if elements is None: + if collection_type_description.has_subcollections( ): + # Nested collection - recursively create collections and update identifiers. + self.__recursively_create_collections( trans, element_identifiers ) + elements = self.__load_elements( trans, element_identifiers ) + # else if elements is set, it better be an ordered dict! + + type_plugin = collection_type_description.rank_type_plugin() + dataset_collection = type_plugin.build_collection( elements ) + dataset_collection.collection_type = collection_type + return dataset_collection + + def delete( self, trans, instance_type, id ): + dataset_collection_instance = self.get_dataset_collection_instance( trans, instance_type, id, check_ownership=True ) + dataset_collection_instance.deleted = True + trans.sa_session.add( dataset_collection_instance ) + trans.sa_session.flush( ) + + def update( self, trans, instance_type, id, payload ): + dataset_collection_instance = self.get_dataset_collection_instance( trans, instance_type, id, check_ownership=True ) + if trans.user is None: + anon_allowed_payload = {} + if 'deleted' in payload: + anon_allowed_payload[ 'deleted' ] = payload[ 'deleted' ] + if 'visible' in payload: + anon_allowed_payload[ 'visible' ] = payload[ 'visible' ] + payload = self._validate_and_parse_update_payload( anon_allowed_payload ) + else: + payload = self._validate_and_parse_update_payload( payload ) + changed = self._set_from_dict( trans, dataset_collection_instance, payload ) + return changed + + def copy( + self, + trans, + parent, # PRECONDITION: security checks on ability to add to parent occurred during load. + source, + encoded_source_id, + ): + assert source == "hdca" # for now + source_hdca = self.__get_history_collection_instance( trans, encoded_source_id ) + new_hdca = source_hdca.copy() + parent.add_dataset_collection( new_hdca ) + trans.sa_session.add( new_hdca ) + trans.sa_session.flush() + return source_hdca + + def _set_from_dict( self, trans, dataset_collection_instance, new_data ): + # Blatantly stolen from UsesHistoryDatasetAssociationMixin.set_hda_from_dict. + + # send what we can down into the model + changed = dataset_collection_instance.set_from_dict( new_data ) + # the rest (often involving the trans) - do here + if 'annotation' in new_data.keys() and trans.get_user(): + dataset_collection_instance.add_item_annotation( trans.sa_session, trans.get_user(), dataset_collection_instance, new_data[ 'annotation' ] ) + changed[ 'annotation' ] = new_data[ 'annotation' ] + if 'tags' in new_data.keys() and trans.get_user(): + self.tag_manager.set_tags_from_list( trans, dataset_collection_instance, new_data[ 'tags' ], user=trans.user ) + + if changed.keys(): + trans.sa_session.flush() + + return changed + + def _validate_and_parse_update_payload( self, payload ): + validated_payload = {} + for key, val in payload.items(): + if val is None: + continue + if key in ( 'name' ): + val = validation.validate_and_sanitize_basestring( key, val ) + validated_payload[ key ] = val + if key in ( 'deleted', 'visible' ): + validated_payload[ key ] = validation.validate_boolean( key, val ) + elif key == 'tags': + validated_payload[ key ] = validation.validate_and_sanitize_basestring_list( key, val ) + return validated_payload + + def history_dataset_collections(self, history, query): + collections = history.active_dataset_collections + collections = filter( query.direct_match, collections ) + return collections + + def __persist( self, dataset_collection_instance ): + context = self.model.context + context.add( dataset_collection_instance ) + context.flush() + return dataset_collection_instance + + def __recursively_create_collections( self, trans, element_identifiers ): + for index, element_identifier in enumerate( element_identifiers ): + try: + if not element_identifier[ "src" ] == "new_collection": + # not a new collection, keep moving... + continue + except KeyError: + # Not a dictionary, just an id of an HDA - move along. + continue + + # element identifier is a dict with src new_collection... + collection_type = element_identifier.get( "collection_type", None ) + collection = self.__create_dataset_collection( + trans=trans, + collection_type=collection_type, + element_identifiers=element_identifier[ "element_identifiers" ], + ) + element_identifier[ "__object__" ] = collection + + return element_identifiers + + def __load_elements( self, trans, element_identifiers ): + elements = odict.odict() + for element_identifier in element_identifiers: + elements[ element_identifier[ "name" ] ] = self.__load_element( trans, element_identifier ) + return elements + + def __load_element( self, trans, element_identifier ): + #if not isinstance( element_identifier, dict ): + # # Is allowing this to just be the id of an hda too clever? Somewhat + # # consistent with other API methods though. + # element_identifier = dict( src='hda', id=str( element_identifier ) ) + + # Previously created collection already found in request, just pass + # through as is. + if "__object__" in element_identifier: + return element_identifier[ "__object__" ] + + # dateset_identifier is dict {src=hda|ldda|hdca|new_collection, id=<encoded_id>} + try: + src_type = element_identifier.get( 'src', 'hda' ) + except AttributeError: + raise MessageException( "Dataset collection element definition (%s) not dictionary-like." % element_identifier ) + encoded_id = element_identifier.get( 'id', None ) + if not src_type or not encoded_id: + raise RequestParameterInvalidException( "Problem decoding element identifier %s" % element_identifier ) + + if src_type == 'hda': + decoded_id = int( trans.app.security.decode_id( encoded_id ) ) + element = self.hda_manager.get( trans, decoded_id, check_ownership=False ) + elif src_type == 'ldda': + element = self.ldda_manager.get( trans, encoded_id ) + elif src_type == 'hdca': + # TODO: Option to copy? Force copy? Copy or allow if not owned? + element = self.__get_history_collection_instance( trans, encoded_id ).collection + # TODO: ldca. + else: + raise RequestParameterInvalidException( "Unknown src_type parameter supplied '%s'." % src_type ) + return element + + def match_collections( self, collections_to_match ): + """ + May seem odd to place it here, but planning to grow sophistication and + get plugin types involved so it will likely make sense in the future. + """ + return MatchingCollections.for_collections( collections_to_match, self.collection_type_descriptions ) + + def get_dataset_collection_instance( self, trans, instance_type, id, **kwds ): + """ + """ + if instance_type == "history": + return self.__get_history_collection_instance( trans, id, **kwds ) + elif instance_type == "library": + return self.__get_library_collection_instance( trans, id, **kwds ) + + def get_dataset_collection( self, trans, encoded_id ): + collection_id = int( trans.app.security.decode_id( encoded_id ) ) + collection = trans.sa_session.query( trans.app.model.DatasetCollection ).get( collection_id ) + return collection + + def __get_history_collection_instance( self, trans, id, check_ownership=False, check_accessible=True ): + instance_id = int( trans.app.security.decode_id( id ) ) + collection_instance = trans.sa_session.query( trans.app.model.HistoryDatasetCollectionAssociation ).get( instance_id ) + self.history_manager.secure( trans, collection_instance.history, check_ownership=check_ownership, check_accessible=check_accessible ) + return collection_instance + + def __get_library_collection_instance( self, trans, id, check_ownership=False, check_accessible=True ): + if check_ownership: + raise NotImplemented( "Functionality (getting library dataset collection with ownership check) unimplemented." ) + instance_id = int( trans.security.decode_id( id ) ) + collection_instance = trans.sa_session.query( trans.app.model.LibraryDatasetCollectionAssociation ).get( instance_id ) + if check_accessible: + if not trans.app.security_agent.can_access_library_item( trans.get_current_user_roles(), collection_instance, trans.user ): + raise ItemAccessibilityException( "LibraryDatasetCollectionAssociation is not accessible to the current user", type='error' ) + return collection_instance https://bitbucket.org/galaxy/galaxy-central/commits/d2ec0f230bda/ Changeset: d2ec0f230bda User: jmchilton Date: 2014-09-11 22:01:08 Summary: Work on workflow module testing. Affected #: 3 files diff -r dd31ab49162d3faa30818947a80a6165c2f0b4c7 -r d2ec0f230bdaed4ce52607b6ac1b6d5348dc1834 lib/galaxy/workflow/modules.py --- a/lib/galaxy/workflow/modules.py +++ b/lib/galaxy/workflow/modules.py @@ -120,6 +120,11 @@ ## ---- Run time --------------------------------------------------------- def get_runtime_inputs( self ): + """ Used internally to modules and when displaying inputs in display + and run workflow templates. The ToolModule doesn't implement this and + these templates contain specialized logic for dealing with the tool and + state directly in these cases. + """ raise TypeError( "Abstract method" ) def encode_runtime_state( self, trans, state ): diff -r dd31ab49162d3faa30818947a80a6165c2f0b4c7 -r d2ec0f230bdaed4ce52607b6ac1b6d5348dc1834 test/unit/workflows/test_modules.py --- a/test/unit/workflows/test_modules.py +++ b/test/unit/workflows/test_modules.py @@ -1,16 +1,19 @@ from galaxy import eggs eggs.require( "mock" ) - +import json import mock +from galaxy import model + from galaxy.workflow import modules +from galaxy.tools import parameters from .workflow_support import MockTrans def test_input_has_no_errors(): trans = MockTrans() - input_step_module = modules.module_factory.new( trans, 'data_input' ) + input_step_module = modules.module_factory.new( trans, "data_input" ) assert not input_step_module.get_errors() @@ -18,15 +21,129 @@ trans = MockTrans() mock_tool = mock.Mock() trans.app.toolbox.tools[ "cat1" ] = mock_tool - tool_module = modules.module_factory.new( trans, 'tool', tool_id="cat1" ) + tool_module = modules.module_factory.new( trans, "tool", tool_id="cat1" ) assert not tool_module.get_errors() +def test_data_input_default_state(): + trans = MockTrans() + module = modules.module_factory.new( trans, "data_input" ) + __assert_has_runtime_input( module, label="Input Dataset" ) + + +def test_data_input_modified_state(): + module = __from_state( { + "type": "data_input", + "tool_state": json.dumps({ "name": "Cool Input" }), + } ) + __assert_has_runtime_input( module, label="Cool Input" ) + + +def test_data_input_step_modified_state(): + module = __from_step( + type="data_input", + tool_inputs={ + "name": "Cool Input", + }, + ) + __assert_has_runtime_input( module, label="Cool Input" ) + + +def test_data_input_compute_state_default(): + module = __from_step( + type="data_input", + ) + state, errors = module.compute_state( module.trans ) + assert not errors + assert 'input' in state.inputs + assert state.inputs[ 'input' ] is None + + +def test_data_input_compute_state_args(): + module = __from_step( + type="data_input", + ) + tool_state = module.encode_runtime_state( module.trans, module.test_step.state ) + + hda = model.HistoryDatasetAssociation() + with mock.patch('galaxy.workflow.modules.check_param') as check_method: + check_method.return_value = ( hda, None ) + state, errors = module.compute_state( module.trans, { 'input': 4, 'tool_state': tool_state } ) + + assert not errors + assert 'input' in state.inputs + assert state.inputs[ 'input' ] is hda + + +def test_data_collection_input_default_state(): + trans = MockTrans() + module = modules.module_factory.new( trans, "data_collection_input" ) + __assert_has_runtime_input( module, label="Input Dataset Collection", collection_type="list" ) + + +def test_data_input_collection_modified_state(): + module = __from_state( { + "type": "data_collection_input", + "tool_state": json.dumps({ "name": "Cool Input Collection", "collection_type": "list:paired" }), + } ) + __assert_has_runtime_input( module, label="Cool Input Collection", collection_type="list:paired" ) + + +def test_data_input_collection_step_modified_state(): + module = __from_step( + type="data_collection_input", + tool_inputs={ + "name": "Cool Input Collection", + "collection_type": "list:paired", + }, + ) + __assert_has_runtime_input( module, label="Cool Input Collection", collection_type="list:paired" ) + + def test_cannot_create_tool_modules_for_missing_tools(): trans = MockTrans() exception = False try: - modules.module_factory.new( trans, 'tool', tool_id="cat1" ) + modules.module_factory.new( trans, "tool", tool_id="cat1" ) except Exception: exception = True assert exception + + +def __assert_has_runtime_input( module, label=None, collection_type=None ): + inputs = module.get_runtime_inputs() + assert len( inputs ) == 1 + assert "input" in inputs + + input_param = inputs[ "input" ] + if label is not None: + assert input_param.get_label() == label, input_param.get_label() + if collection_type is not None: + assert input_param.collection_type == collection_type + return input_param + + +def __from_state( state ): + trans = MockTrans() + module = modules.module_factory.from_dict( trans, state ) + return module + + +def __from_step( **kwds ): + trans = MockTrans() + step = __step( + **kwds + ) + injector = modules.WorkflowModuleInjector( trans ) + injector.inject( step ) + module = step.module + module.test_step = step + return module + + +def __step( **kwds ): + step = model.WorkflowStep() + for key, value in kwds.iteritems(): + setattr( step, key, value ) + + return step diff -r dd31ab49162d3faa30818947a80a6165c2f0b4c7 -r d2ec0f230bdaed4ce52607b6ac1b6d5348dc1834 test/unit/workflows/workflow_support.py --- a/test/unit/workflows/workflow_support.py +++ b/test/unit/workflows/workflow_support.py @@ -11,13 +11,25 @@ class TestApp( object ): def __init__( self ): - self.config = bunch.Bunch( ) + self.config = bunch.Bunch( + tool_secret="awesome_secret", + ) self.model = mapping.init( "/tmp", "sqlite:///:memory:", create_tables=True ) self.toolbox = TestToolbox() + self.datatypes_registry = TestDatatypesRegistry() + + +class TestDatatypesRegistry( object ): + + def __init__( self ): + pass + + def get_datatype_by_extension( self, ext ): + return ext class TestToolbox( object ): https://bitbucket.org/galaxy/galaxy-central/commits/b8520d9c8269/ Changeset: b8520d9c8269 User: jmchilton Date: 2014-09-11 22:01:08 Summary: Allow uuid values in to_dict. Affected #: 1 file diff -r d2ec0f230bdaed4ce52607b6ac1b6d5348dc1834 -r b8520d9c8269c4c7deca6418d54f2e722886073e lib/galaxy/model/item_attrs.py --- a/lib/galaxy/model/item_attrs.py +++ b/lib/galaxy/model/item_attrs.py @@ -4,6 +4,7 @@ import galaxy import logging import datetime +import uuid log = logging.getLogger( __name__ ) @@ -186,6 +187,8 @@ return value_mapper.get( key )( item ) if type(item) == datetime.datetime: return item.isoformat() + elif type(item) == uuid.UUID: + return str(item) # Leaving this for future reference, though we may want a more # generic way to handle special type mappings going forward. # If the item is of a class that needs to be 'stringified' before being put into a JSON data structure Repository URL: https://bitbucket.org/galaxy/galaxy-central/ -- This is a commit notification from bitbucket.org. You are receiving this because you have the service enabled, addressing the recipient of this email.
participants (1)
-
commits-noreply@bitbucket.org