1 new commit in galaxy-central: https://bitbucket.org/galaxy/galaxy-central/commits/8b9ca63f9128/ Changeset: 8b9ca63f9128 User: carlfeberhard Date: 2013-04-24 22:24:41 Summary: HDA model: add UsesAnnotations mixin; HDA API: add update method and allow name, deleted, visible, genome_build, dbkey, info, and annotation to be updated; History & HDA API: don't error on allowed but uneditable keys; Browser tests: test hda api Affected #: 9 files diff -r 2df57338a595ab96f3f54af2b0e4c2e382198a40 -r 8b9ca63f9128fbe9c7f01805db64da3ec2916332 lib/galaxy/model/__init__.py --- a/lib/galaxy/model/__init__.py +++ b/lib/galaxy/model/__init__.py @@ -727,6 +727,7 @@ """ # precondition: keys are proper, values are parsed and validated changed = {} + # unknown keys are ignored here for key in [ k for k in new_data.keys() if k in self.api_element_visible_keys ]: new_val = new_data[ key ] old_val = self.__getattribute__( key ) @@ -1428,7 +1429,11 @@ return msg -class HistoryDatasetAssociation( DatasetInstance ): +class HistoryDatasetAssociation( DatasetInstance, UsesAnnotations ): + """ + Resource class that creates a relation between a dataset and a user history. + """ + def __init__( self, hid = None, history = None, @@ -1436,6 +1441,9 @@ copied_from_library_dataset_dataset_association = None, sa_session = None, **kwd ): + """ + Create a a new HDA and associate it with the given history. + """ # FIXME: sa_session is must be passed to DataSetInstance if the create_dataset # parameter is True so that the new object can be flushed. Is there a better way? DatasetInstance.__init__( self, sa_session=sa_session, **kwd ) @@ -1444,7 +1452,11 @@ self.history = history self.copied_from_history_dataset_association = copied_from_history_dataset_association self.copied_from_library_dataset_dataset_association = copied_from_library_dataset_dataset_association + def copy( self, copy_children = False, parent_id = None ): + """ + Create a copy of this HDA. + """ hda = HistoryDatasetAssociation( hid=self.hid, name=self.name, info=self.info, @@ -1471,13 +1483,20 @@ hda.set_peek() object_session( self ).flush() return hda - def to_library_dataset_dataset_association( self, trans, target_folder, replace_dataset=None, parent_id=None, user=None, roles=[], ldda_message='' ): + + def to_library_dataset_dataset_association( self, trans, target_folder, + replace_dataset=None, parent_id=None, user=None, roles=[], ldda_message='' ): + """ + Copy this HDA to a library optionally replacing an existing LDDA. + """ if replace_dataset: - # The replace_dataset param ( when not None ) refers to a LibraryDataset that is being replaced with a new version. + # The replace_dataset param ( when not None ) refers to a LibraryDataset that + # is being replaced with a new version. library_dataset = replace_dataset else: - # If replace_dataset is None, the Library level permissions will be taken from the folder and applied to the new - # LibraryDataset, and the current user's DefaultUserPermissions will be applied to the associated Dataset. + # If replace_dataset is None, the Library level permissions will be taken from the folder and + # applied to the new LibraryDataset, and the current user's DefaultUserPermissions will be applied + # to the associated Dataset. library_dataset = LibraryDataset( folder=target_folder, name=self.name, info=self.info ) object_session( self ).add( library_dataset ) object_session( self ).flush() @@ -1502,7 +1521,8 @@ object_session( self ).flush() # If roles were selected on the upload form, restrict access to the Dataset to those roles for role in roles: - dp = trans.model.DatasetPermissions( trans.app.security_agent.permitted_actions.DATASET_ACCESS.action, ldda.dataset, role ) + dp = trans.model.DatasetPermissions( trans.app.security_agent.permitted_actions.DATASET_ACCESS.action, + ldda.dataset, role ) trans.sa_session.add( dp ) trans.sa_session.flush() # Must set metadata after ldda flushed, as MetadataFiles require ldda.id @@ -1527,30 +1547,47 @@ ldda.set_peek() object_session( self ).flush() return ldda + def clear_associated_files( self, metadata_safe = False, purge = False ): + """ + """ # metadata_safe = True means to only clear when assoc.metadata_safe == False for assoc in self.implicitly_converted_datasets: if not assoc.deleted and ( not metadata_safe or not assoc.metadata_safe ): assoc.clear( purge = purge ) for assoc in self.implicitly_converted_parent_datasets: assoc.clear( purge = purge, delete_dataset = False ) + def get_display_name( self ): - ## Name can be either a string or a unicode object. If string, convert to unicode object assuming 'utf-8' format. + """ + Return the name of this HDA in either ascii or utf-8 encoding. + """ + # Name can be either a string or a unicode object. + # If string, convert to unicode object assuming 'utf-8' format. hda_name = self.name if isinstance(hda_name, str): hda_name = unicode(hda_name, 'utf-8') return hda_name + def get_access_roles( self, trans ): + """ + Return The access roles associated with this HDA's dataset. + """ return self.dataset.get_access_roles( trans ) + def quota_amount( self, user ): """ - If the user has multiple instances of this dataset, it will not affect their disk usage statistic. + Return the disk space used for this HDA relevant to user quotas. + + If the user has multiple instances of this dataset, it will not affect their + disk usage statistic. """ rval = 0 # Anon users are handled just by their single history size. if not user: return rval - # Gets an HDA and its children's disk usage, if the user does not already have an association of the same dataset + # Gets an HDA and its children's disk usage, if the user does not already + # have an association of the same dataset if not self.dataset.library_associations and not self.purged and not self.dataset.purged: for hda in self.dataset.history_associations: if hda.id == self.id: @@ -1562,7 +1599,11 @@ for child in self.children: rval += child.get_disk_usage( user ) return rval + def get_api_value( self, view='collection' ): + """ + Return attributes of this HDA that are exposed using the API. + """ # Since this class is a proxy to rather complex attributes we want to # display in other objects, we can't use the simpler method used by # other model classes. @@ -1598,6 +1639,33 @@ rval['metadata_' + name] = val return rval + def set_from_dict( self, new_data ): + #AKA: set_api_value + """ + Set object attributes to the values in dictionary new_data limiting + to only the following keys: name, deleted, visible, genome_build, + info, and blurb. + + Returns a dictionary of the keys, values that have been changed. + """ + # precondition: keys are proper, values are parsed and validated + #NOTE!: does not handle metadata + editable_keys = ( 'name', 'deleted', 'visible', 'dbkey', 'info', 'blurb' ) + + changed = {} + # unknown keys are ignored here + for key in [ k for k in new_data.keys() if k in editable_keys ]: + new_val = new_data[ key ] + old_val = self.__getattribute__( key ) + if new_val == old_val: + continue + + self.__setattr__( key, new_val ) + changed[ key ] = new_val + + return changed + + class HistoryDatasetAssociationDisplayAtAuthorization( object ): def __init__( self, hda=None, user=None, site=None ): self.history_dataset_association = hda diff -r 2df57338a595ab96f3f54af2b0e4c2e382198a40 -r 8b9ca63f9128fbe9c7f01805db64da3ec2916332 lib/galaxy/web/base/controller.py --- a/lib/galaxy/web/base/controller.py +++ b/lib/galaxy/web/base/controller.py @@ -427,10 +427,15 @@ class UsesHistoryDatasetAssociationMixin: - """ Mixin for controllers that use HistoryDatasetAssociation objects. """ + """ + Mixin for controllers that use HistoryDatasetAssociation objects. + """ def get_dataset( self, trans, dataset_id, check_ownership=True, check_accessible=False, check_state=True ): - """ Get an HDA object by id. """ + """ + Get an HDA object by id performing security checks using + the current transaction. + """ # DEPRECATION: We still support unencoded ids for backward compatibility try: # encoded id? @@ -466,7 +471,10 @@ def get_history_dataset_association( self, trans, history, dataset_id, check_ownership=True, check_accessible=False, check_state=False ): - """Get a HistoryDatasetAssociation from the database by id, verifying ownership.""" + """ + Get a HistoryDatasetAssociation from the database by id, verifying ownership. + """ + #TODO: duplicate of above? alias to above (or vis-versa) self.security_check( trans, history, check_ownership=check_ownership, check_accessible=check_accessible ) hda = self.get_object( trans, dataset_id, 'HistoryDatasetAssociation', check_ownership=False, check_accessible=False, deleted=False ) @@ -479,8 +487,9 @@ return hda def get_data( self, dataset, preview=True ): - """ Gets a dataset's data. """ - + """ + Gets a dataset's data. + """ # Get data from file, truncating if necessary. truncated = False dataset_data = None @@ -610,6 +619,27 @@ return display_apps + def set_hda_from_dict( self, trans, hda, new_data ): + """ + Changes HDA data using the given dictionary new_data. + """ + # precondition: access of the hda has already been checked + + # send what we can down into the model + changed = hda.set_from_dict( new_data ) + # the rest (often involving the trans) - do here + if 'annotation' in new_data.keys() and trans.get_user(): + hda.add_item_annotation( trans.sa_session, trans.get_user(), hda, new_data[ 'annotation' ] ) + changed[ 'annotation' ] = new_data[ 'annotation' ] + # tags + # sharing/permissions? + # purged + + if changed.keys(): + trans.sa_session.flush() + + return changed + class UsesLibraryMixin: diff -r 2df57338a595ab96f3f54af2b0e4c2e382198a40 -r 8b9ca63f9128fbe9c7f01805db64da3ec2916332 lib/galaxy/webapps/galaxy/api/histories.py --- a/lib/galaxy/webapps/galaxy/api/histories.py +++ b/lib/galaxy/webapps/galaxy/api/histories.py @@ -2,15 +2,10 @@ API operations on a history. """ -import pkg_resources -pkg_resources.require("Paste") -from paste.httpexceptions import HTTPBadRequest - from galaxy import web, util from galaxy.web.base.controller import BaseAPIController, UsesHistoryMixin from galaxy.web import url_for from galaxy.model.orm import desc -from galaxy.util.bunch import Bunch import logging log = logging.getLogger( __name__ ) @@ -197,28 +192,39 @@ # - protection against bad data form/type # - protection against malicious data content # all other conversions and processing (such as permissions, etc.) should happen down the line + + # keys listed here don't error when attempting to set, but fail silently + # this allows PUT'ing an entire model back to the server without attribute errors on uneditable attrs + valid_but_uneditable_keys = ( + 'id', 'model_class', 'nice_size', 'contents_url', 'purged', 'tags', + 'state', 'state_details', 'state_ids' + ) + + validated_payload = {} for key, val in payload.items(): # TODO: lots of boilerplate here, but overhead on abstraction is equally onerous if key == 'name': if not ( isinstance( val, str ) or isinstance( val, unicode ) ): raise ValueError( 'name must be a string or unicode: %s' %( str( type( val ) ) ) ) - payload[ 'name' ] = util.sanitize_html.sanitize_html( val, 'utf-8' ) + validated_payload[ 'name' ] = util.sanitize_html.sanitize_html( val, 'utf-8' ) #TODO:?? if sanitized != val: log.warn( 'script kiddie' ) elif key == 'deleted': if not isinstance( val, bool ): raise ValueError( 'deleted must be a boolean: %s' %( str( type( val ) ) ) ) + validated_payload[ 'deleted' ] = val elif key == 'published': - if not isinstance( payload[ 'published' ], bool ): + if not isinstance( val, bool ): raise ValueError( 'published must be a boolean: %s' %( str( type( val ) ) ) ) + validated_payload[ 'published' ] = val elif key == 'genome_build': if not ( isinstance( val, str ) or isinstance( val, unicode ) ): raise ValueError( 'genome_build must be a string: %s' %( str( type( val ) ) ) ) - payload[ 'genome_build' ] = util.sanitize_html.sanitize_html( val, 'utf-8' ) + validated_payload[ 'genome_build' ] = util.sanitize_html.sanitize_html( val, 'utf-8' ) elif key == 'annotation': if not ( isinstance( val, str ) or isinstance( val, unicode ) ): raise ValueError( 'annotation must be a string or unicode: %s' %( str( type( val ) ) ) ) - payload[ 'annotation' ] = util.sanitize_html.sanitize_html( val, 'utf-8' ) - else: + validated_payload[ 'annotation' ] = util.sanitize_html.sanitize_html( val, 'utf-8' ) + elif key not in valid_but_uneditable_keys: raise AttributeError( 'unknown key: %s' %( str( key ) ) ) - return payload + return validated_payload diff -r 2df57338a595ab96f3f54af2b0e4c2e382198a40 -r 8b9ca63f9128fbe9c7f01805db64da3ec2916332 lib/galaxy/webapps/galaxy/api/history_contents.py --- a/lib/galaxy/webapps/galaxy/api/history_contents.py +++ b/lib/galaxy/webapps/galaxy/api/history_contents.py @@ -1,13 +1,13 @@ """ API operations on the contents of a history. """ -import logging -from galaxy import web +from galaxy import web, util from galaxy.web.base.controller import BaseAPIController, url_for from galaxy.web.base.controller import UsesHistoryDatasetAssociationMixin, UsesHistoryMixin from galaxy.web.base.controller import UsesLibraryMixin, UsesLibraryMixinItems +import logging log = logging.getLogger( __name__ ) class HistoryContentsController( BaseAPIController, UsesHistoryDatasetAssociationMixin, UsesHistoryMixin, @@ -138,11 +138,14 @@ POST /api/histories/{encoded_history_id}/contents Creates a new history content item (file, aka HistoryDatasetAssociation). """ + #TODO: copy existing, accessible hda - dataset controller, copy_datasets + #TODO: convert existing, accessible hda - model.DatasetInstance(or hda.datatype).get_converter_types from_ld_id = payload.get( 'from_ld_id', None ) - try: history = self.get_history( trans, history_id, check_ownership=True, check_accessible=False ) except Exception, e: + #TODO: no way to tell if it failed bc of perms or other (all MessageExceptions) + trans.response.status = 500 return str( e ) if from_ld_id: @@ -164,6 +167,90 @@ else: # TODO: implement other "upload" methods here. - trans.response.status = 403 + trans.response.status = 501 return "Not implemented." + @web.expose_api + def update( self, trans, history_id, id, payload, **kwd ): + """ + PUT /api/histories/{encoded_history_id}/contents/{encoded_content_id} + Changes an existing history dataset. + """ + #TODO: PUT /api/histories/{encoded_history_id} payload = { rating: rating } (w/ no security checks) + changed = {} + try: + hda = self.get_dataset( trans, id, + check_ownership=True, check_accessible=True, check_state=True ) + # validation handled here and some parsing, processing, and conversion + payload = self._validate_and_parse_update_payload( payload ) + # additional checks here (security, etc.) + changed = self.set_hda_from_dict( trans, hda, payload ) + + except Exception, exception: + log.error( 'Update of history (%s), HDA (%s) failed: %s', + history_id, id, str( exception ), exc_info=True ) + # convert to appropo HTTP code + if( isinstance( exception, ValueError ) + or isinstance( exception, AttributeError ) ): + # bad syntax from the validater/parser + trans.response.status = 400 + else: + trans.response.status = 500 + return { 'error': str( exception ) } + + return changed + + def _validate_and_parse_update_payload( self, payload ): + """ + Validate and parse incomming data payload for an HDA. + """ + # This layer handles (most of the stricter idiot proofing): + # - unknown/unallowed keys + # - changing data keys from api key to attribute name + # - protection against bad data form/type + # - protection against malicious data content + # all other conversions and processing (such as permissions, etc.) should happen down the line + + # keys listed here don't error when attempting to set, but fail silently + # this allows PUT'ing an entire model back to the server without attribute errors on uneditable attrs + valid_but_uneditable_keys = ( + 'id', 'name', 'type', 'api_type', 'model_class', 'history_id', 'hid', + 'accessible', 'purged', 'state', 'data_type', 'file_ext', 'file_size', 'misc_blurb', + 'download_url', 'visualizations', 'display_apps', 'display_types', + 'metadata_dbkey', 'metadata_column_names', 'metadata_column_types', 'metadata_columns', + 'metadata_comment_lines', 'metadata_data_lines' + ) + + validated_payload = {} + for key, val in payload.items(): + # TODO: lots of boilerplate here, but overhead on abstraction is equally onerous + # typecheck, parse, remap key + if key == 'name': + if not ( isinstance( val, str ) or isinstance( val, unicode ) ): + raise ValueError( 'name must be a string or unicode: %s' %( str( type( val ) ) ) ) + validated_payload[ 'name' ] = util.sanitize_html.sanitize_html( val, 'utf-8' ) + #TODO:?? if sanitized != val: log.warn( 'script kiddie' ) + elif key == 'deleted': + if not isinstance( val, bool ): + raise ValueError( 'deleted must be a boolean: %s' %( str( type( val ) ) ) ) + validated_payload[ 'deleted' ] = val + elif key == 'visible': + if not isinstance( val, bool ): + raise ValueError( 'visible must be a boolean: %s' %( str( type( val ) ) ) ) + validated_payload[ 'visible' ] = val + elif key == 'genome_build': + if not ( isinstance( val, str ) or isinstance( val, unicode ) ): + raise ValueError( 'genome_build must be a string: %s' %( str( type( val ) ) ) ) + validated_payload[ 'dbkey' ] = util.sanitize_html.sanitize_html( val, 'utf-8' ) + elif key == 'annotation': + if not ( isinstance( val, str ) or isinstance( val, unicode ) ): + raise ValueError( 'annotation must be a string or unicode: %s' %( str( type( val ) ) ) ) + validated_payload[ 'annotation' ] = util.sanitize_html.sanitize_html( val, 'utf-8' ) + elif key == 'misc_info': + if not ( isinstance( val, str ) or isinstance( val, unicode ) ): + raise ValueError( 'misc_info must be a string or unicode: %s' %( str( type( val ) ) ) ) + validated_payload[ 'info' ] = util.sanitize_html.sanitize_html( val, 'utf-8' ) + elif key not in valid_but_uneditable_keys: + raise AttributeError( 'unknown key: %s' %( str( key ) ) ) + return validated_payload + diff -r 2df57338a595ab96f3f54af2b0e4c2e382198a40 -r 8b9ca63f9128fbe9c7f01805db64da3ec2916332 test/casperjs/api-hda-tests.js --- /dev/null +++ b/test/casperjs/api-hda-tests.js @@ -0,0 +1,406 @@ +/* Utility to load a specific page and output html, page text, or a screenshot + * Optionally wait for some time, text, or dom selector + */ +try { + //...if there's a better way - please let me know, universe + var scriptDir = require( 'system' ).args[3] + // remove the script filename + .replace( /[\w|\.|\-|_]*$/, '' ) + // if given rel. path, prepend the curr dir + .replace( /^(?!\/)/, './' ), + spaceghost = require( scriptDir + 'spaceghost' ).create({ + // script options here (can be overridden by CLI) + //verbose: true, + //logLevel: debug, + scriptDir: scriptDir + }); + +} catch( error ){ + console.debug( error ); + phantom.exit( 1 ); +} +spaceghost.start(); + +// =================================================================== SET UP +var utils = require( 'utils' ); + +var email = spaceghost.user.getRandomEmail(), + password = '123456'; +if( spaceghost.fixtureData.testUser ){ + email = spaceghost.fixtureData.testUser.email; + password = spaceghost.fixtureData.testUser.password; +} +spaceghost.user.loginOrRegisterUser( email, password ); + +var uploadFilename = '1.sam', + uploadFilepath = '../../test-data/' + uploadFilename, + upload = {}; +spaceghost.thenOpen( spaceghost.baseUrl ).tools.uploadFile( uploadFilepath, function( uploadInfo ){ + upload = uploadInfo; +}); + +function hasKeys( object, keysArray ){ + if( !utils.isObject( object ) ){ return false; } + for( var i=0; i<keysArray.length; i += 1 ){ + if( !object.hasOwnProperty( keysArray[i] ) ){ + spaceghost.debug( 'object missing key: ' + keysArray[i] ); + return false; + } + } + return true; +} + +function countKeys( object ){ + if( !utils.isObject( object ) ){ return 0; } + var count = 0; + for( var key in object ){ + if( object.hasOwnProperty( key ) ){ count += 1; } + } + return count; +} + +// =================================================================== TESTS +var summaryKeys = [ 'id', 'name', 'type', 'url' ], + detailKeys = [ + // the following are always present regardless of datatype + 'id', 'name', 'api_type', 'model_class', + 'history_id', 'hid', + 'accessible', 'deleted', 'visible', 'purged', + 'state', 'data_type', 'file_ext', 'file_size', + 'misc_info', 'misc_blurb', + 'download_url', 'visualizations', 'display_apps', 'display_types', + 'genome_build', + // the following are NOT always present DEPENDING ON datatype + 'metadata_dbkey', + 'metadata_column_names', 'metadata_column_types', 'metadata_columns', + 'metadata_comment_lines', 'metadata_data_lines' + ]; + +spaceghost.historypanel.waitForHdas().then( function(){ + + var uploaded = this.historypanel.hdaElementInfoByTitle( uploadFilename ); + this.info( 'found uploaded hda: ' + uploaded.attributes.id ); + this.debug( 'uploaded hda: ' + this.jsonStr( uploaded ) ); + // ------------------------------------------------------------------------------------------- INDEX + this.test.comment( 'index should return a list of summary data for each hda' ); + var histories = this.api.histories.index(), + lastHistory = histories[0], + hdaIndex = this.api.hdas.index( lastHistory.id ); + //this.debug( 'hdaIndex:' + this.jsonStr( hdaIndex ) ); + + this.test.assert( utils.isArray( hdaIndex ), "index returned an array: length " + hdaIndex.length ); + this.test.assert( hdaIndex.length >= 1, 'Has at least one hda' ); + + var firstHda = hdaIndex[0]; + this.test.assert( hasKeys( firstHda, summaryKeys ), 'Has the proper keys' ); + + this.test.assert( this.api.isEncodedId( firstHda.id ), 'Id appears well-formed: ' + firstHda.id ); + this.test.assert( uploaded.text.indexOf( firstHda.name ) !== -1, 'Title matches: ' + firstHda.name ); + // not caring about type or url here + + + // ------------------------------------------------------------------------------------------- SHOW + this.test.comment( 'show should get an HDA details object' ); + var hdaShow = this.api.hdas.show( lastHistory.id, firstHda.id ); + //this.debug( this.jsonStr( hdaShow ) ); + this.test.assert( hasKeys( hdaShow, detailKeys ), 'Has the proper keys' ); + + //TODO: validate data in each hdaShow attribute... + + + // ------------------------------------------------------------------------------------------- INDEX (detailed) + this.test.comment( 'index should return a list of detailed data for each hda in "ids" when passed' ); + hdaIndex = this.api.hdas.index( lastHistory.id, [ firstHda.id ] ); + this.debug( 'hdaIndex:' + this.jsonStr( hdaIndex ) ); + + this.test.assert( utils.isArray( hdaIndex ), "index returned an array: length " + hdaIndex.length ); + this.test.assert( hdaIndex.length >= 1, 'Has at least one hda' ); + + firstHda = hdaIndex[0]; + this.test.assert( hasKeys( firstHda, detailKeys ), 'Has the proper keys' ); + + //TODO??: validate data in firstHda attribute? we ASSUME it's from a common method as show... + + + // ------------------------------------------------------------------------------------------- CREATE + //TODO: create from_ld_id + + + // ------------------------------------------------------------------------------------------- UPDATE + // ........................................................................................... idiot proofing + this.test.comment( 'updating to the current value should return no value (no change)' ); + hdaShow = this.api.hdas.show( lastHistory.id, firstHda.id ); + var returned = this.api.hdas.update( lastHistory.id, firstHda.id, { + name : hdaShow.name + }); + this.test.assert( countKeys( returned ) === 0, "No changed returned: " + this.jsonStr( returned ) ); + + this.test.comment( 'updating using a nonsense key should fail with an error' ); + var err = {}; + try { + returned = this.api.hdas.update( lastHistory.id, firstHda.id, { + konamiCode : 'uuddlrlrba' + }); + } catch( error ){ + err = error; + //this.debug( this.jsonStr( err ) ); + } + this.test.assert( !!err.message, "Error occurred: " + err.message ); + this.test.assert( err.status === 400, "Error status is 400: " + err.status ); + + this.test.comment( 'updating by attempting to change type should cause an error' ); + err = {}; + try { + returned = this.api.hdas.update( lastHistory.id, firstHda.id, { + //name : false + deleted : 'sure why not' + }); + } catch( error ){ + err = error; + //this.debug( this.jsonStr( err ) ); + } + this.test.assert( !!err.message, "Error occurred: " + err.message ); + this.test.assert( err.status === 400, "Error status is 400: " + err.status ); + //TODO??: other type checks? + + + // ........................................................................................... name + this.test.comment( 'update should allow changing the name' ); + returned = this.api.hdas.update( lastHistory.id, firstHda.id, { + name : 'New name' + }); + //this.debug( 'returned:\n' + this.jsonStr( returned ) ); + hdaShow = this.api.hdas.show( lastHistory.id, firstHda.id ); + this.test.assert( hdaShow.name === 'New name', "Name successfully set via update: " + hdaShow.name ); + + this.test.comment( 'update should sanitize any new name' ); + returned = this.api.hdas.update( lastHistory.id, firstHda.id, { + name : 'New name<script type="text/javascript" src="bler">alert("blah");</script>' + }); + //this.debug( 'returned:\n' + this.jsonStr( returned ) ); + hdaShow = this.api.hdas.show( lastHistory.id, firstHda.id ); + this.test.assert( hdaShow.name === 'New name', "Update sanitized name: " + hdaShow.name ); + + //NOTE!: this fails on sqlite3 (with default setup) + try { + this.test.comment( 'update should allow unicode in names' ); + var unicodeName = 'Ржевский сапоги'; + returned = this.api.hdas.update( lastHistory.id, firstHda.id, { + name : unicodeName + }); + //this.debug( 'returned:\n' + this.jsonStr( returned ) ); + hdaShow = this.api.hdas.show( lastHistory.id, firstHda.id ); + this.test.assert( hdaShow.name === unicodeName, "Update accepted unicode name: " + hdaShow.name ); + } catch( err ){ + //this.debug( this.jsonStr( err ) ); + if( ( err instanceof this.api.APIError ) + && ( err.status === 500 ) + && ( err.message.indexOf( '(ProgrammingError) You must not use 8-bit bytestrings' ) !== -1 ) ){ + this.skipTest( 'Unicode update failed. Are you using sqlite3 as the db?' ); + } + } + + this.test.comment( 'update should allow escaped quotations in names' ); + var quotedName = '"Bler"'; + returned = this.api.hdas.update( lastHistory.id, firstHda.id, { + name : quotedName + }); + //this.debug( 'returned:\n' + this.jsonStr( returned ) ); + hdaShow = this.api.hdas.show( lastHistory.id, firstHda.id ); + this.test.assert( hdaShow.name === quotedName, + "Update accepted escaped quotations in name: " + hdaShow.name ); + + + // ........................................................................................... deleted + this.test.comment( 'update should allow changing the deleted flag' ); + returned = this.api.hdas.update( lastHistory.id, firstHda.id, { + deleted: true + }); + //this.debug( 'returned:\n' + this.jsonStr( returned ) ); + hdaShow = this.api.hdas.show( lastHistory.id, firstHda.id ); + this.test.assert( hdaShow.deleted === true, "Update set the deleted flag: " + hdaShow.deleted ); + + this.test.comment( 'update should allow changing the deleted flag back' ); + returned = this.api.hdas.update( lastHistory.id, firstHda.id, { + deleted: false + }); + //this.debug( 'returned:\n' + this.jsonStr( returned ) ); + hdaShow = this.api.hdas.show( lastHistory.id, firstHda.id ); + this.test.assert( hdaShow.deleted === false, "Update set the deleted flag: " + hdaShow.deleted ); + + + // ........................................................................................... visible/hidden + this.test.comment( 'update should allow changing the visible flag' ); + returned = this.api.hdas.update( lastHistory.id, firstHda.id, { + visible: false + }); + //this.debug( 'returned:\n' + this.jsonStr( returned ) ); + hdaShow = this.api.hdas.show( lastHistory.id, firstHda.id ); + this.test.assert( hdaShow.visible === false, "Update set the visible flag: " + hdaShow.visible ); + + + // ........................................................................................... genome_build/dbkey + this.test.comment( 'update should allow changing the genome_build' ); + returned = this.api.hdas.update( lastHistory.id, firstHda.id, { + genome_build : 'hg18' + }); + //this.debug( 'returned:\n' + this.jsonStr( returned ) ); + hdaShow = this.api.hdas.show( lastHistory.id, firstHda.id ); + this.test.assert( hdaShow.genome_build === 'hg18', + "genome_build successfully set via update: " + hdaShow.genome_build ); + this.test.assert( hdaShow.metadata_dbkey === 'hg18', + "metadata_dbkey successfully set via the same update: " + hdaShow.metadata_dbkey ); + + this.test.comment( 'update should sanitize any genome_build' ); + returned = this.api.hdas.update( lastHistory.id, firstHda.id, { + genome_build : 'hg18<script type="text/javascript" src="bler">alert("blah");</script>' + }); + //this.debug( 'returned:\n' + this.jsonStr( returned ) ); + hdaShow = this.api.hdas.show( lastHistory.id, firstHda.id ); + this.test.assert( hdaShow.genome_build === 'hg18', + "Update sanitized genome_build: " + hdaShow.genome_build ); + this.test.assert( hdaShow.metadata_dbkey === 'hg18', + "metadata_dbkey successfully set via the same update: " + hdaShow.metadata_dbkey ); + + this.test.comment( 'update should allow unicode in genome builds' ); + var unicodeBuild = 'Ржевский18'; + //NOTE!: this fails on sqlite3 (with default setup) + try { + returned = this.api.hdas.update( lastHistory.id, firstHda.id, { + name : unicodeBuild + }); + //this.debug( 'returned:\n' + this.jsonStr( returned ) ); + hdaShow = this.api.hdas.show( lastHistory.id, firstHda.id ); + this.test.assert( hdaShow.genome_build === unicodeBuild, + "Update accepted unicode genome_build: " + hdaShow.name ); + } catch( err ){ + //this.debug( this.jsonStr( err ) ); + if( ( err instanceof this.api.APIError ) + && ( err.status === 500 ) + && ( err.message.indexOf( '(ProgrammingError) You must not use 8-bit bytestrings' ) !== -1 ) ){ + this.skipTest( 'Unicode update failed. Are you using sqlite3 as the db?' ); + } + } + + // ........................................................................................... misc_info/info + this.test.comment( 'update should allow changing the misc_info' ); + var newInfo = 'I\'ve made a huge mistake.'; + returned = this.api.hdas.update( lastHistory.id, firstHda.id, { + misc_info : newInfo + }); + //this.debug( 'returned:\n' + this.jsonStr( returned ) ); + hdaShow = this.api.hdas.show( lastHistory.id, firstHda.id ); + this.test.assert( hdaShow.misc_info === newInfo, + "misc_info successfully set via update: " + hdaShow.misc_info ); + + this.test.comment( 'update should sanitize any misc_info' ); + var newInfo = 'You\'re going to get hop-ons.'; + returned = this.api.hdas.update( lastHistory.id, firstHda.id, { + misc_info : newInfo + '<script type="text/javascript" src="bler">alert("blah");</script>' + }); + //this.debug( 'returned:\n' + this.jsonStr( returned ) ); + hdaShow = this.api.hdas.show( lastHistory.id, firstHda.id ); + this.test.assert( hdaShow.misc_info === newInfo, + "Update sanitized misc_info: " + hdaShow.misc_info ); + + this.test.comment( 'update should allow unicode in misc_info' ); + var unicodeInfo = '여보!'; + //NOTE!: this fails on sqlite3 (with default setup) + try { + returned = this.api.hdas.update( lastHistory.id, firstHda.id, { + misc_info : unicodeInfo + }); + //this.debug( 'returned:\n' + this.jsonStr( returned ) ); + hdaShow = this.api.hdas.show( lastHistory.id, firstHda.id ); + this.test.assert( hdaShow.misc_info === unicodeInfo, + "Update accepted unicode misc_info: " + hdaShow.misc_info ); + } catch( err ){ + //this.debug( this.jsonStr( err ) ); + if( ( err instanceof this.api.APIError ) + && ( err.status === 500 ) + && ( err.message.indexOf( '(ProgrammingError) You must not use 8-bit bytestrings' ) !== -1 ) ){ + this.skipTest( 'Unicode update failed. Are you using sqlite3 as the db?' ); + } + } + +/* + // ........................................................................................... annotation + // currently fails because no annotation is returned in details + this.test.comment( 'update should allow changing the annotation' ); + var newAnnotation = 'Found this sample on a movie theatre floor'; + returned = this.api.hdas.update( lastHistory.id, firstHda.id, { + annotation : newAnnotation + }); + //this.debug( 'returned:\n' + this.jsonStr( returned ) ); + hdaShow = this.api.hdas.show( lastHistory.id, firstHda.id ); + this.test.assert( hdaShow.annotation === newAnnotation, + "Annotation successfully set via update: " + hdaShow.annotation ); + + this.test.comment( 'update should sanitize any new annotation' ); + returned = this.api.hdas.update( lastHistory.id, firstHda.id, { + annotation : 'New annotation<script type="text/javascript" src="bler">alert("blah");</script>' + }); + //this.debug( 'returned:\n' + this.jsonStr( returned ) ); + hdaShow = this.api.hdas.show( lastHistory.id, firstHda.id ); + this.test.assert( hdaShow.annotation === 'New annotation', + "Update sanitized annotation: " + hdaShow.annotation ); + + //NOTE!: this fails on sqlite3 (with default setup) + try { + this.test.comment( 'update should allow unicode in annotations' ); + var unicodeAnnotation = 'お願いは、それが落下させない'; + returned = this.api.hdas.update( lastHistory.id, firstHda.id, { + annotation : unicodeAnnotation + }); + //this.debug( 'returned:\n' + this.jsonStr( returned ) ); + hdaShow = this.api.hdas.show( lastHistory.id, firstHda.id ); + this.test.assert( hdaShow.annotation === unicodeAnnotation, + "Update accepted unicode annotation: " + hdaShow.annotation ); + } catch( err ){ + //this.debug( this.jsonStr( err ) ); + if( ( err instanceof this.api.APIError ) + && ( err.status === 500 ) + && ( err.message.indexOf( '(ProgrammingError) You must not use 8-bit bytestrings' ) !== -1 ) ){ + this.skipTest( 'Unicode update failed. Are you using sqlite3 as the db?' ); + } + } + + this.test.comment( 'update should allow escaped quotations in annotations' ); + var quotedAnnotation = '"Bler"'; + returned = this.api.hdas.update( lastHistory.id, firstHda.id, { + annotation : quotedAnnotation + }); + //this.debug( 'returned:\n' + this.jsonStr( returned ) ); + hdaShow = this.api.hdas.show( lastHistory.id, firstHda.id ); + this.test.assert( hdaShow.annotation === quotedAnnotation, + "Update accepted escaped quotations in annotation: " + hdaShow.annotation ); +*/ + + + // ------------------------------------------------------------------------------------------- ERRORS + this.test.comment( 'create should error with "not implemented" when the param "from_ld_id" is not used' ); + var errored = false; + try { + // sending an empty object won't work + var created = this.api.hdas.create( lastHistory.id, { bler: 'bler' } ); + + } catch( err ){ + errored = true; + this.test.assert( err.message.indexOf( 'Not implemented' ) !== -1, + 'Error has the proper message: ' + err.message ); + this.test.assert( err.status === 501, 'Error has the proper status code: ' + err.status ); + } + if( !errored ){ + this.test.fail( 'create without "from_ld_id" did not cause error' ); + } + + + //var returned = this.api.hdas.update( lastHistory.id, hdaIndex[0].id, { deleted: true, blerp: 'blerp' }); + //var returned = this.api.hdas.update( lastHistory.id, { deleted: true, blerp: 'blerp' }); + //this.debug( 'returned:' + this.jsonStr( returned ) ); + //this.debug( 'page:' + this.jsonStr( this.page ) ); +}); + +// =================================================================== +spaceghost.run( function(){ +}); diff -r 2df57338a595ab96f3f54af2b0e4c2e382198a40 -r 8b9ca63f9128fbe9c7f01805db64da3ec2916332 test/casperjs/api-history-tests.js --- a/test/casperjs/api-history-tests.js +++ b/test/casperjs/api-history-tests.js @@ -36,7 +36,10 @@ function hasKeys( object, keysArray ){ if( !utils.isObject( object ) ){ return false; } for( var i=0; i<keysArray.length; i += 1 ){ - if( !object.hasOwnProperty( keysArray[i] ) ){ return false; } + if( !object.hasOwnProperty( keysArray[i] ) ){ + spaceghost.debug( 'object missing key: ' + keysArray[i] ); + return false; + } } return true; } @@ -362,6 +365,10 @@ "Update accepted escaped quotations in annotation: " + historyShow.annotation ); + // ------------------------------------------------------------------------------------------- ERRORS + //TODO: make sure expected errors are being passed back (but no permissions checks here - different suite) + // bad ids: index, show, update, delete, undelete + /* */ //this.debug( this.jsonStr( historyShow ) ); diff -r 2df57338a595ab96f3f54af2b0e4c2e382198a40 -r 8b9ca63f9128fbe9c7f01805db64da3ec2916332 test/casperjs/modules/api.js --- a/test/casperjs/modules/api.js +++ b/test/casperjs/modules/api.js @@ -232,7 +232,7 @@ }; HDAAPI.prototype.show = function show( historyId, id, deleted ){ - this.api.spaceghost.info( 'hda.show: ' + [ id, (( deleted )?( 'w deleted' ):( '' )) ] ); + this.api.spaceghost.info( 'hda.show: ' + [ historyId, id, (( deleted )?( 'w deleted' ):( '' )) ] ); id = ( id === 'most_recently_used' )?( id ):( this.api.ensureId( id ) ); deleted = deleted || false; @@ -242,7 +242,7 @@ }; HDAAPI.prototype.create = function create( historyId, payload ){ - this.api.spaceghost.info( 'hda.create: ' + this.api.spaceghost.jsonStr( payload ) ); + this.api.spaceghost.info( 'hda.create: ' + [ historyId, this.api.spaceghost.jsonStr( payload ) ] ); // py.payload <-> ajax.data payload = this.api.ensureObject( payload ); @@ -253,8 +253,7 @@ }; HDAAPI.prototype.update = function create( historyId, id, payload ){ - this.api.spaceghost.info( 'hda.update: ' + historyId + ',' + id + ',' - + this.api.spaceghost.jsonStr( payload ) ); + this.api.spaceghost.info( 'hda.update: ' + [ historyId, id, this.api.spaceghost.jsonStr( payload ) ] ); // py.payload <-> ajax.data historyId = this.api.ensureId( historyId ); diff -r 2df57338a595ab96f3f54af2b0e4c2e382198a40 -r 8b9ca63f9128fbe9c7f01805db64da3ec2916332 test/casperjs/modules/tools.js --- a/test/casperjs/modules/tools.js +++ b/test/casperjs/modules/tools.js @@ -13,7 +13,7 @@ //??: circ ref? this.options = {}; /** Default amount of ms to wait for upload to finish */ - this.options.defaultUploadWait = ( 30 * 1000 ); + this.options.defaultUploadWait = ( 45 * 1000 ); this.spaceghost = spaceghost; }; exports.Tools = Tools; @@ -108,14 +108,22 @@ // wait for main panel, history reload ////NOTE!: assumes tool execution reloads the history panel - this.waitForMultipleNavigation( [ 'tool_runner/upload_async_message', 'history' ], function(){ - // debugging - this.jumpToMain( function(){ - var messageInfo = this.elementInfoOrNull( this.data.selectors.messages.all ); - this.debug( ( messageInfo )?( messageInfo.attributes['class'] + ':\n' + messageInfo.text ) - :( 'NO post upload message' ) ); - }); - }); + this.waitForMultipleNavigation( [ 'tool_runner/upload_async_message', 'history' ], + function thenAfterUploadRefreshes(){ + // debugging + this.jumpToMain( function(){ + var messageInfo = this.elementInfoOrNull( this.data.selectors.messages.all ); + this.debug( ( messageInfo )?( messageInfo.attributes['class'] + ':\n' + messageInfo.text ) + :( 'NO post upload message' ) ); + }); + }, + function timeoutWaitingForUploadRefreshes( urlsStillWaitingOn ){ + this.capture( 'upload-error.png' ) + throw new this.GalaxyError( 'Upload Error: ' + + 'timeout waiting for upload "' + filepath + '" refreshes: ' + urlsStillWaitingOn ); + }, + this.tools.options.defaultUploadWait + ); }); }; @@ -160,13 +168,19 @@ // error if an info message wasn't found spaceghost.withMainPanel( function checkUploadMessage(){ var infoInfo = spaceghost.elementInfoOrNull( this.data.selectors.messages.infolarge ); - if( ( !infoInfo ) - || ( infoInfo.text.indexOf( this.data.text.upload.success ) === -1 ) ){ - throw new this.GalaxyError( 'Upload Error: no info message uploading "' + filepath + '"' ); + if( ( infoInfo ) + && ( infoInfo.text.indexOf( this.data.text.upload.success ) !== -1 ) ){ + // safe to store these + uploadInfo.filename = filename; + uploadInfo.filepath = filepath; + + } else { + // capture any other messages on the page + var otherInfo = spaceghost.elementInfoOrNull( this.data.selectors.messages.all ), + message = ( otherInfo && otherInfo.text )?( otherInfo.text ):( '' ); + this.capture( 'upload-error.png' ) + throw new this.GalaxyError( 'Upload Error: no success message uploading "' + filepath + '": ' + message ); } - // safe to store these - uploadInfo.filename = filename; - uploadInfo.filepath = filepath; }); // the hpanel should refresh and display the uploading file, wait for that to go into the ok state @@ -177,6 +191,7 @@ if( hdaElement === null ){ var hdaContainer = this.historypanel.data.selectors.hdaContainer; this.warning( 'Upload Error: ' + hdaContainer + ':\n' + this.getHTML( hdaContainer ) ); + this.capture( 'upload-error.png' ) throw new this.GalaxyError( 'Upload Error: uploaded file HDA not found: ' + uploadInfo.filename ); } this.debug( 'uploaded HDA element: ' + this.jsonStr( this.quickInfo( hdaElement ) ) ); @@ -191,6 +206,7 @@ }, function timeoutFn( newHdaInfo ){ this.warning( 'timeout waiting for upload:\n' + this.jsonStr( this.quickInfo( newHdaInfo ) ) ); + this.capture( 'upload-error.png' ) throw new spaceghost.GalaxyError( 'Upload Error: timeout waiting for ok state: ' + '"' + uploadInfo.filepath + '" (waited ' + timeoutAfterMs + ' ms)' ); diff -r 2df57338a595ab96f3f54af2b0e4c2e382198a40 -r 8b9ca63f9128fbe9c7f01805db64da3ec2916332 test/casperjs/spaceghost.js --- a/test/casperjs/spaceghost.js +++ b/test/casperjs/spaceghost.js @@ -546,9 +546,10 @@ * @param {String} urlToWaitFor the url to wait for (rel. to spaceghost.baseUrl) * @param {Function} then the function to call after the nav request * @param {Function} timeoutFn the function to call on timeout (optional) + * @param {Integer} waitMs manual setting of ms to wait (optional) */ -SpaceGhost.prototype.waitForNavigation = function waitForNavigation( urlToWaitFor, then, timeoutFn ){ - return this.waitForMultipleNavigation( [ urlToWaitFor ], then, timeoutFn ); +SpaceGhost.prototype.waitForNavigation = function waitForNavigation( urlToWaitFor, then, timeoutFn, waitMs ){ + return this.waitForMultipleNavigation( [ urlToWaitFor ], then, timeoutFn, waitMs ); }; /** Wait for a multiple navigation requests then call a function. @@ -557,9 +558,13 @@ * @param {String[]} urlsToWaitFor the relative urls to wait for * @param {Function} then the function to call after the nav request * @param {Function} timeoutFn the function to call on timeout (optional) + * @param {Integer} waitMs manual setting of ms to wait (optional) */ -SpaceGhost.prototype.waitForMultipleNavigation = function waitForMultipleNavigation( urlsToWaitFor, then, timeoutFn ){ - this.info( 'waiting for navigation: ' + this.jsonStr( urlsToWaitFor ) ); +SpaceGhost.prototype.waitForMultipleNavigation = function waitForMultipleNavigation( urlsToWaitFor, + then, timeoutFn, waitMs ){ + waitMs = waitMs || ( this.options.waitTimeout * urlsToWaitFor.length ); + + this.info( 'waiting for navigation: ' + this.jsonStr( urlsToWaitFor ) + ', timeout after: ' + waitMs ); function urlMatches( urlToMatch, url ){ return ( url.indexOf( spaceghost.baseUrl + '/' + urlToMatch ) !== -1 ); } @@ -589,9 +594,10 @@ if( utils.isFunction( then ) ){ then.call( this ); } }, function timeout(){ - if( utils.isFunction( timeoutFn ) ){ timeoutFn.call( this ); } + this.removeListener( 'navigation.requested', catchNavReq ); + if( utils.isFunction( timeoutFn ) ){ timeoutFn.call( this, urlsToWaitFor ); } }, - this.options.waitTimeout * urlsToWaitFor.length + waitMs ); return this; }; Repository URL: https://bitbucket.org/galaxy/galaxy-central/ -- This is a commit notification from bitbucket.org. You are receiving this because you have the service enabled, addressing the recipient of this email.