galaxy-commits
Threads by month
- ----- 2024 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2023 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2022 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2021 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2020 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2019 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2018 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2017 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2016 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2015 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2014 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2013 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2012 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2011 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2010 -----
- December
- November
- October
- September
- August
- July
- June
- May
April 2013
- 1 participants
- 197 discussions
commit/galaxy-central: carlfeberhard: HDA model: add UsesAnnotations mixin; HDA API: add update method and allow name, deleted, visible, genome_build, dbkey, info, and annotation to be updated; History & HDA API: don't error on allowed but uneditable keys; Browser tests: test hda api
by commits-noreply@bitbucket.org 24 Apr '13
by commits-noreply@bitbucket.org 24 Apr '13
24 Apr '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/8b9ca63f9128/
Changeset: 8b9ca63f9128
User: carlfeberhard
Date: 2013-04-24 22:24:41
Summary: HDA model: add UsesAnnotations mixin; HDA API: add update method and allow name, deleted, visible, genome_build, dbkey, info, and annotation to be updated; History & HDA API: don't error on allowed but uneditable keys; Browser tests: test hda api
Affected #: 9 files
diff -r 2df57338a595ab96f3f54af2b0e4c2e382198a40 -r 8b9ca63f9128fbe9c7f01805db64da3ec2916332 lib/galaxy/model/__init__.py
--- a/lib/galaxy/model/__init__.py
+++ b/lib/galaxy/model/__init__.py
@@ -727,6 +727,7 @@
"""
# precondition: keys are proper, values are parsed and validated
changed = {}
+ # unknown keys are ignored here
for key in [ k for k in new_data.keys() if k in self.api_element_visible_keys ]:
new_val = new_data[ key ]
old_val = self.__getattribute__( key )
@@ -1428,7 +1429,11 @@
return msg
-class HistoryDatasetAssociation( DatasetInstance ):
+class HistoryDatasetAssociation( DatasetInstance, UsesAnnotations ):
+ """
+ Resource class that creates a relation between a dataset and a user history.
+ """
+
def __init__( self,
hid = None,
history = None,
@@ -1436,6 +1441,9 @@
copied_from_library_dataset_dataset_association = None,
sa_session = None,
**kwd ):
+ """
+ Create a a new HDA and associate it with the given history.
+ """
# FIXME: sa_session is must be passed to DataSetInstance if the create_dataset
# parameter is True so that the new object can be flushed. Is there a better way?
DatasetInstance.__init__( self, sa_session=sa_session, **kwd )
@@ -1444,7 +1452,11 @@
self.history = history
self.copied_from_history_dataset_association = copied_from_history_dataset_association
self.copied_from_library_dataset_dataset_association = copied_from_library_dataset_dataset_association
+
def copy( self, copy_children = False, parent_id = None ):
+ """
+ Create a copy of this HDA.
+ """
hda = HistoryDatasetAssociation( hid=self.hid,
name=self.name,
info=self.info,
@@ -1471,13 +1483,20 @@
hda.set_peek()
object_session( self ).flush()
return hda
- def to_library_dataset_dataset_association( self, trans, target_folder, replace_dataset=None, parent_id=None, user=None, roles=[], ldda_message='' ):
+
+ def to_library_dataset_dataset_association( self, trans, target_folder,
+ replace_dataset=None, parent_id=None, user=None, roles=[], ldda_message='' ):
+ """
+ Copy this HDA to a library optionally replacing an existing LDDA.
+ """
if replace_dataset:
- # The replace_dataset param ( when not None ) refers to a LibraryDataset that is being replaced with a new version.
+ # The replace_dataset param ( when not None ) refers to a LibraryDataset that
+ # is being replaced with a new version.
library_dataset = replace_dataset
else:
- # If replace_dataset is None, the Library level permissions will be taken from the folder and applied to the new
- # LibraryDataset, and the current user's DefaultUserPermissions will be applied to the associated Dataset.
+ # If replace_dataset is None, the Library level permissions will be taken from the folder and
+ # applied to the new LibraryDataset, and the current user's DefaultUserPermissions will be applied
+ # to the associated Dataset.
library_dataset = LibraryDataset( folder=target_folder, name=self.name, info=self.info )
object_session( self ).add( library_dataset )
object_session( self ).flush()
@@ -1502,7 +1521,8 @@
object_session( self ).flush()
# If roles were selected on the upload form, restrict access to the Dataset to those roles
for role in roles:
- dp = trans.model.DatasetPermissions( trans.app.security_agent.permitted_actions.DATASET_ACCESS.action, ldda.dataset, role )
+ dp = trans.model.DatasetPermissions( trans.app.security_agent.permitted_actions.DATASET_ACCESS.action,
+ ldda.dataset, role )
trans.sa_session.add( dp )
trans.sa_session.flush()
# Must set metadata after ldda flushed, as MetadataFiles require ldda.id
@@ -1527,30 +1547,47 @@
ldda.set_peek()
object_session( self ).flush()
return ldda
+
def clear_associated_files( self, metadata_safe = False, purge = False ):
+ """
+ """
# metadata_safe = True means to only clear when assoc.metadata_safe == False
for assoc in self.implicitly_converted_datasets:
if not assoc.deleted and ( not metadata_safe or not assoc.metadata_safe ):
assoc.clear( purge = purge )
for assoc in self.implicitly_converted_parent_datasets:
assoc.clear( purge = purge, delete_dataset = False )
+
def get_display_name( self ):
- ## Name can be either a string or a unicode object. If string, convert to unicode object assuming 'utf-8' format.
+ """
+ Return the name of this HDA in either ascii or utf-8 encoding.
+ """
+ # Name can be either a string or a unicode object.
+ # If string, convert to unicode object assuming 'utf-8' format.
hda_name = self.name
if isinstance(hda_name, str):
hda_name = unicode(hda_name, 'utf-8')
return hda_name
+
def get_access_roles( self, trans ):
+ """
+ Return The access roles associated with this HDA's dataset.
+ """
return self.dataset.get_access_roles( trans )
+
def quota_amount( self, user ):
"""
- If the user has multiple instances of this dataset, it will not affect their disk usage statistic.
+ Return the disk space used for this HDA relevant to user quotas.
+
+ If the user has multiple instances of this dataset, it will not affect their
+ disk usage statistic.
"""
rval = 0
# Anon users are handled just by their single history size.
if not user:
return rval
- # Gets an HDA and its children's disk usage, if the user does not already have an association of the same dataset
+ # Gets an HDA and its children's disk usage, if the user does not already
+ # have an association of the same dataset
if not self.dataset.library_associations and not self.purged and not self.dataset.purged:
for hda in self.dataset.history_associations:
if hda.id == self.id:
@@ -1562,7 +1599,11 @@
for child in self.children:
rval += child.get_disk_usage( user )
return rval
+
def get_api_value( self, view='collection' ):
+ """
+ Return attributes of this HDA that are exposed using the API.
+ """
# Since this class is a proxy to rather complex attributes we want to
# display in other objects, we can't use the simpler method used by
# other model classes.
@@ -1598,6 +1639,33 @@
rval['metadata_' + name] = val
return rval
+ def set_from_dict( self, new_data ):
+ #AKA: set_api_value
+ """
+ Set object attributes to the values in dictionary new_data limiting
+ to only the following keys: name, deleted, visible, genome_build,
+ info, and blurb.
+
+ Returns a dictionary of the keys, values that have been changed.
+ """
+ # precondition: keys are proper, values are parsed and validated
+ #NOTE!: does not handle metadata
+ editable_keys = ( 'name', 'deleted', 'visible', 'dbkey', 'info', 'blurb' )
+
+ changed = {}
+ # unknown keys are ignored here
+ for key in [ k for k in new_data.keys() if k in editable_keys ]:
+ new_val = new_data[ key ]
+ old_val = self.__getattribute__( key )
+ if new_val == old_val:
+ continue
+
+ self.__setattr__( key, new_val )
+ changed[ key ] = new_val
+
+ return changed
+
+
class HistoryDatasetAssociationDisplayAtAuthorization( object ):
def __init__( self, hda=None, user=None, site=None ):
self.history_dataset_association = hda
diff -r 2df57338a595ab96f3f54af2b0e4c2e382198a40 -r 8b9ca63f9128fbe9c7f01805db64da3ec2916332 lib/galaxy/web/base/controller.py
--- a/lib/galaxy/web/base/controller.py
+++ b/lib/galaxy/web/base/controller.py
@@ -427,10 +427,15 @@
class UsesHistoryDatasetAssociationMixin:
- """ Mixin for controllers that use HistoryDatasetAssociation objects. """
+ """
+ Mixin for controllers that use HistoryDatasetAssociation objects.
+ """
def get_dataset( self, trans, dataset_id, check_ownership=True, check_accessible=False, check_state=True ):
- """ Get an HDA object by id. """
+ """
+ Get an HDA object by id performing security checks using
+ the current transaction.
+ """
# DEPRECATION: We still support unencoded ids for backward compatibility
try:
# encoded id?
@@ -466,7 +471,10 @@
def get_history_dataset_association( self, trans, history, dataset_id,
check_ownership=True, check_accessible=False, check_state=False ):
- """Get a HistoryDatasetAssociation from the database by id, verifying ownership."""
+ """
+ Get a HistoryDatasetAssociation from the database by id, verifying ownership.
+ """
+ #TODO: duplicate of above? alias to above (or vis-versa)
self.security_check( trans, history, check_ownership=check_ownership, check_accessible=check_accessible )
hda = self.get_object( trans, dataset_id, 'HistoryDatasetAssociation', check_ownership=False, check_accessible=False, deleted=False )
@@ -479,8 +487,9 @@
return hda
def get_data( self, dataset, preview=True ):
- """ Gets a dataset's data. """
-
+ """
+ Gets a dataset's data.
+ """
# Get data from file, truncating if necessary.
truncated = False
dataset_data = None
@@ -610,6 +619,27 @@
return display_apps
+ def set_hda_from_dict( self, trans, hda, new_data ):
+ """
+ Changes HDA data using the given dictionary new_data.
+ """
+ # precondition: access of the hda has already been checked
+
+ # send what we can down into the model
+ changed = hda.set_from_dict( new_data )
+ # the rest (often involving the trans) - do here
+ if 'annotation' in new_data.keys() and trans.get_user():
+ hda.add_item_annotation( trans.sa_session, trans.get_user(), hda, new_data[ 'annotation' ] )
+ changed[ 'annotation' ] = new_data[ 'annotation' ]
+ # tags
+ # sharing/permissions?
+ # purged
+
+ if changed.keys():
+ trans.sa_session.flush()
+
+ return changed
+
class UsesLibraryMixin:
diff -r 2df57338a595ab96f3f54af2b0e4c2e382198a40 -r 8b9ca63f9128fbe9c7f01805db64da3ec2916332 lib/galaxy/webapps/galaxy/api/histories.py
--- a/lib/galaxy/webapps/galaxy/api/histories.py
+++ b/lib/galaxy/webapps/galaxy/api/histories.py
@@ -2,15 +2,10 @@
API operations on a history.
"""
-import pkg_resources
-pkg_resources.require("Paste")
-from paste.httpexceptions import HTTPBadRequest
-
from galaxy import web, util
from galaxy.web.base.controller import BaseAPIController, UsesHistoryMixin
from galaxy.web import url_for
from galaxy.model.orm import desc
-from galaxy.util.bunch import Bunch
import logging
log = logging.getLogger( __name__ )
@@ -197,28 +192,39 @@
# - protection against bad data form/type
# - protection against malicious data content
# all other conversions and processing (such as permissions, etc.) should happen down the line
+
+ # keys listed here don't error when attempting to set, but fail silently
+ # this allows PUT'ing an entire model back to the server without attribute errors on uneditable attrs
+ valid_but_uneditable_keys = (
+ 'id', 'model_class', 'nice_size', 'contents_url', 'purged', 'tags',
+ 'state', 'state_details', 'state_ids'
+ )
+
+ validated_payload = {}
for key, val in payload.items():
# TODO: lots of boilerplate here, but overhead on abstraction is equally onerous
if key == 'name':
if not ( isinstance( val, str ) or isinstance( val, unicode ) ):
raise ValueError( 'name must be a string or unicode: %s' %( str( type( val ) ) ) )
- payload[ 'name' ] = util.sanitize_html.sanitize_html( val, 'utf-8' )
+ validated_payload[ 'name' ] = util.sanitize_html.sanitize_html( val, 'utf-8' )
#TODO:?? if sanitized != val: log.warn( 'script kiddie' )
elif key == 'deleted':
if not isinstance( val, bool ):
raise ValueError( 'deleted must be a boolean: %s' %( str( type( val ) ) ) )
+ validated_payload[ 'deleted' ] = val
elif key == 'published':
- if not isinstance( payload[ 'published' ], bool ):
+ if not isinstance( val, bool ):
raise ValueError( 'published must be a boolean: %s' %( str( type( val ) ) ) )
+ validated_payload[ 'published' ] = val
elif key == 'genome_build':
if not ( isinstance( val, str ) or isinstance( val, unicode ) ):
raise ValueError( 'genome_build must be a string: %s' %( str( type( val ) ) ) )
- payload[ 'genome_build' ] = util.sanitize_html.sanitize_html( val, 'utf-8' )
+ validated_payload[ 'genome_build' ] = util.sanitize_html.sanitize_html( val, 'utf-8' )
elif key == 'annotation':
if not ( isinstance( val, str ) or isinstance( val, unicode ) ):
raise ValueError( 'annotation must be a string or unicode: %s' %( str( type( val ) ) ) )
- payload[ 'annotation' ] = util.sanitize_html.sanitize_html( val, 'utf-8' )
- else:
+ validated_payload[ 'annotation' ] = util.sanitize_html.sanitize_html( val, 'utf-8' )
+ elif key not in valid_but_uneditable_keys:
raise AttributeError( 'unknown key: %s' %( str( key ) ) )
- return payload
+ return validated_payload
diff -r 2df57338a595ab96f3f54af2b0e4c2e382198a40 -r 8b9ca63f9128fbe9c7f01805db64da3ec2916332 lib/galaxy/webapps/galaxy/api/history_contents.py
--- a/lib/galaxy/webapps/galaxy/api/history_contents.py
+++ b/lib/galaxy/webapps/galaxy/api/history_contents.py
@@ -1,13 +1,13 @@
"""
API operations on the contents of a history.
"""
-import logging
-from galaxy import web
+from galaxy import web, util
from galaxy.web.base.controller import BaseAPIController, url_for
from galaxy.web.base.controller import UsesHistoryDatasetAssociationMixin, UsesHistoryMixin
from galaxy.web.base.controller import UsesLibraryMixin, UsesLibraryMixinItems
+import logging
log = logging.getLogger( __name__ )
class HistoryContentsController( BaseAPIController, UsesHistoryDatasetAssociationMixin, UsesHistoryMixin,
@@ -138,11 +138,14 @@
POST /api/histories/{encoded_history_id}/contents
Creates a new history content item (file, aka HistoryDatasetAssociation).
"""
+ #TODO: copy existing, accessible hda - dataset controller, copy_datasets
+ #TODO: convert existing, accessible hda - model.DatasetInstance(or hda.datatype).get_converter_types
from_ld_id = payload.get( 'from_ld_id', None )
-
try:
history = self.get_history( trans, history_id, check_ownership=True, check_accessible=False )
except Exception, e:
+ #TODO: no way to tell if it failed bc of perms or other (all MessageExceptions)
+ trans.response.status = 500
return str( e )
if from_ld_id:
@@ -164,6 +167,90 @@
else:
# TODO: implement other "upload" methods here.
- trans.response.status = 403
+ trans.response.status = 501
return "Not implemented."
+ @web.expose_api
+ def update( self, trans, history_id, id, payload, **kwd ):
+ """
+ PUT /api/histories/{encoded_history_id}/contents/{encoded_content_id}
+ Changes an existing history dataset.
+ """
+ #TODO: PUT /api/histories/{encoded_history_id} payload = { rating: rating } (w/ no security checks)
+ changed = {}
+ try:
+ hda = self.get_dataset( trans, id,
+ check_ownership=True, check_accessible=True, check_state=True )
+ # validation handled here and some parsing, processing, and conversion
+ payload = self._validate_and_parse_update_payload( payload )
+ # additional checks here (security, etc.)
+ changed = self.set_hda_from_dict( trans, hda, payload )
+
+ except Exception, exception:
+ log.error( 'Update of history (%s), HDA (%s) failed: %s',
+ history_id, id, str( exception ), exc_info=True )
+ # convert to appropo HTTP code
+ if( isinstance( exception, ValueError )
+ or isinstance( exception, AttributeError ) ):
+ # bad syntax from the validater/parser
+ trans.response.status = 400
+ else:
+ trans.response.status = 500
+ return { 'error': str( exception ) }
+
+ return changed
+
+ def _validate_and_parse_update_payload( self, payload ):
+ """
+ Validate and parse incomming data payload for an HDA.
+ """
+ # This layer handles (most of the stricter idiot proofing):
+ # - unknown/unallowed keys
+ # - changing data keys from api key to attribute name
+ # - protection against bad data form/type
+ # - protection against malicious data content
+ # all other conversions and processing (such as permissions, etc.) should happen down the line
+
+ # keys listed here don't error when attempting to set, but fail silently
+ # this allows PUT'ing an entire model back to the server without attribute errors on uneditable attrs
+ valid_but_uneditable_keys = (
+ 'id', 'name', 'type', 'api_type', 'model_class', 'history_id', 'hid',
+ 'accessible', 'purged', 'state', 'data_type', 'file_ext', 'file_size', 'misc_blurb',
+ 'download_url', 'visualizations', 'display_apps', 'display_types',
+ 'metadata_dbkey', 'metadata_column_names', 'metadata_column_types', 'metadata_columns',
+ 'metadata_comment_lines', 'metadata_data_lines'
+ )
+
+ validated_payload = {}
+ for key, val in payload.items():
+ # TODO: lots of boilerplate here, but overhead on abstraction is equally onerous
+ # typecheck, parse, remap key
+ if key == 'name':
+ if not ( isinstance( val, str ) or isinstance( val, unicode ) ):
+ raise ValueError( 'name must be a string or unicode: %s' %( str( type( val ) ) ) )
+ validated_payload[ 'name' ] = util.sanitize_html.sanitize_html( val, 'utf-8' )
+ #TODO:?? if sanitized != val: log.warn( 'script kiddie' )
+ elif key == 'deleted':
+ if not isinstance( val, bool ):
+ raise ValueError( 'deleted must be a boolean: %s' %( str( type( val ) ) ) )
+ validated_payload[ 'deleted' ] = val
+ elif key == 'visible':
+ if not isinstance( val, bool ):
+ raise ValueError( 'visible must be a boolean: %s' %( str( type( val ) ) ) )
+ validated_payload[ 'visible' ] = val
+ elif key == 'genome_build':
+ if not ( isinstance( val, str ) or isinstance( val, unicode ) ):
+ raise ValueError( 'genome_build must be a string: %s' %( str( type( val ) ) ) )
+ validated_payload[ 'dbkey' ] = util.sanitize_html.sanitize_html( val, 'utf-8' )
+ elif key == 'annotation':
+ if not ( isinstance( val, str ) or isinstance( val, unicode ) ):
+ raise ValueError( 'annotation must be a string or unicode: %s' %( str( type( val ) ) ) )
+ validated_payload[ 'annotation' ] = util.sanitize_html.sanitize_html( val, 'utf-8' )
+ elif key == 'misc_info':
+ if not ( isinstance( val, str ) or isinstance( val, unicode ) ):
+ raise ValueError( 'misc_info must be a string or unicode: %s' %( str( type( val ) ) ) )
+ validated_payload[ 'info' ] = util.sanitize_html.sanitize_html( val, 'utf-8' )
+ elif key not in valid_but_uneditable_keys:
+ raise AttributeError( 'unknown key: %s' %( str( key ) ) )
+ return validated_payload
+
diff -r 2df57338a595ab96f3f54af2b0e4c2e382198a40 -r 8b9ca63f9128fbe9c7f01805db64da3ec2916332 test/casperjs/api-hda-tests.js
--- /dev/null
+++ b/test/casperjs/api-hda-tests.js
@@ -0,0 +1,406 @@
+/* Utility to load a specific page and output html, page text, or a screenshot
+ * Optionally wait for some time, text, or dom selector
+ */
+try {
+ //...if there's a better way - please let me know, universe
+ var scriptDir = require( 'system' ).args[3]
+ // remove the script filename
+ .replace( /[\w|\.|\-|_]*$/, '' )
+ // if given rel. path, prepend the curr dir
+ .replace( /^(?!\/)/, './' ),
+ spaceghost = require( scriptDir + 'spaceghost' ).create({
+ // script options here (can be overridden by CLI)
+ //verbose: true,
+ //logLevel: debug,
+ scriptDir: scriptDir
+ });
+
+} catch( error ){
+ console.debug( error );
+ phantom.exit( 1 );
+}
+spaceghost.start();
+
+// =================================================================== SET UP
+var utils = require( 'utils' );
+
+var email = spaceghost.user.getRandomEmail(),
+ password = '123456';
+if( spaceghost.fixtureData.testUser ){
+ email = spaceghost.fixtureData.testUser.email;
+ password = spaceghost.fixtureData.testUser.password;
+}
+spaceghost.user.loginOrRegisterUser( email, password );
+
+var uploadFilename = '1.sam',
+ uploadFilepath = '../../test-data/' + uploadFilename,
+ upload = {};
+spaceghost.thenOpen( spaceghost.baseUrl ).tools.uploadFile( uploadFilepath, function( uploadInfo ){
+ upload = uploadInfo;
+});
+
+function hasKeys( object, keysArray ){
+ if( !utils.isObject( object ) ){ return false; }
+ for( var i=0; i<keysArray.length; i += 1 ){
+ if( !object.hasOwnProperty( keysArray[i] ) ){
+ spaceghost.debug( 'object missing key: ' + keysArray[i] );
+ return false;
+ }
+ }
+ return true;
+}
+
+function countKeys( object ){
+ if( !utils.isObject( object ) ){ return 0; }
+ var count = 0;
+ for( var key in object ){
+ if( object.hasOwnProperty( key ) ){ count += 1; }
+ }
+ return count;
+}
+
+// =================================================================== TESTS
+var summaryKeys = [ 'id', 'name', 'type', 'url' ],
+ detailKeys = [
+ // the following are always present regardless of datatype
+ 'id', 'name', 'api_type', 'model_class',
+ 'history_id', 'hid',
+ 'accessible', 'deleted', 'visible', 'purged',
+ 'state', 'data_type', 'file_ext', 'file_size',
+ 'misc_info', 'misc_blurb',
+ 'download_url', 'visualizations', 'display_apps', 'display_types',
+ 'genome_build',
+ // the following are NOT always present DEPENDING ON datatype
+ 'metadata_dbkey',
+ 'metadata_column_names', 'metadata_column_types', 'metadata_columns',
+ 'metadata_comment_lines', 'metadata_data_lines'
+ ];
+
+spaceghost.historypanel.waitForHdas().then( function(){
+
+ var uploaded = this.historypanel.hdaElementInfoByTitle( uploadFilename );
+ this.info( 'found uploaded hda: ' + uploaded.attributes.id );
+ this.debug( 'uploaded hda: ' + this.jsonStr( uploaded ) );
+ // ------------------------------------------------------------------------------------------- INDEX
+ this.test.comment( 'index should return a list of summary data for each hda' );
+ var histories = this.api.histories.index(),
+ lastHistory = histories[0],
+ hdaIndex = this.api.hdas.index( lastHistory.id );
+ //this.debug( 'hdaIndex:' + this.jsonStr( hdaIndex ) );
+
+ this.test.assert( utils.isArray( hdaIndex ), "index returned an array: length " + hdaIndex.length );
+ this.test.assert( hdaIndex.length >= 1, 'Has at least one hda' );
+
+ var firstHda = hdaIndex[0];
+ this.test.assert( hasKeys( firstHda, summaryKeys ), 'Has the proper keys' );
+
+ this.test.assert( this.api.isEncodedId( firstHda.id ), 'Id appears well-formed: ' + firstHda.id );
+ this.test.assert( uploaded.text.indexOf( firstHda.name ) !== -1, 'Title matches: ' + firstHda.name );
+ // not caring about type or url here
+
+
+ // ------------------------------------------------------------------------------------------- SHOW
+ this.test.comment( 'show should get an HDA details object' );
+ var hdaShow = this.api.hdas.show( lastHistory.id, firstHda.id );
+ //this.debug( this.jsonStr( hdaShow ) );
+ this.test.assert( hasKeys( hdaShow, detailKeys ), 'Has the proper keys' );
+
+ //TODO: validate data in each hdaShow attribute...
+
+
+ // ------------------------------------------------------------------------------------------- INDEX (detailed)
+ this.test.comment( 'index should return a list of detailed data for each hda in "ids" when passed' );
+ hdaIndex = this.api.hdas.index( lastHistory.id, [ firstHda.id ] );
+ this.debug( 'hdaIndex:' + this.jsonStr( hdaIndex ) );
+
+ this.test.assert( utils.isArray( hdaIndex ), "index returned an array: length " + hdaIndex.length );
+ this.test.assert( hdaIndex.length >= 1, 'Has at least one hda' );
+
+ firstHda = hdaIndex[0];
+ this.test.assert( hasKeys( firstHda, detailKeys ), 'Has the proper keys' );
+
+ //TODO??: validate data in firstHda attribute? we ASSUME it's from a common method as show...
+
+
+ // ------------------------------------------------------------------------------------------- CREATE
+ //TODO: create from_ld_id
+
+
+ // ------------------------------------------------------------------------------------------- UPDATE
+ // ........................................................................................... idiot proofing
+ this.test.comment( 'updating to the current value should return no value (no change)' );
+ hdaShow = this.api.hdas.show( lastHistory.id, firstHda.id );
+ var returned = this.api.hdas.update( lastHistory.id, firstHda.id, {
+ name : hdaShow.name
+ });
+ this.test.assert( countKeys( returned ) === 0, "No changed returned: " + this.jsonStr( returned ) );
+
+ this.test.comment( 'updating using a nonsense key should fail with an error' );
+ var err = {};
+ try {
+ returned = this.api.hdas.update( lastHistory.id, firstHda.id, {
+ konamiCode : 'uuddlrlrba'
+ });
+ } catch( error ){
+ err = error;
+ //this.debug( this.jsonStr( err ) );
+ }
+ this.test.assert( !!err.message, "Error occurred: " + err.message );
+ this.test.assert( err.status === 400, "Error status is 400: " + err.status );
+
+ this.test.comment( 'updating by attempting to change type should cause an error' );
+ err = {};
+ try {
+ returned = this.api.hdas.update( lastHistory.id, firstHda.id, {
+ //name : false
+ deleted : 'sure why not'
+ });
+ } catch( error ){
+ err = error;
+ //this.debug( this.jsonStr( err ) );
+ }
+ this.test.assert( !!err.message, "Error occurred: " + err.message );
+ this.test.assert( err.status === 400, "Error status is 400: " + err.status );
+ //TODO??: other type checks?
+
+
+ // ........................................................................................... name
+ this.test.comment( 'update should allow changing the name' );
+ returned = this.api.hdas.update( lastHistory.id, firstHda.id, {
+ name : 'New name'
+ });
+ //this.debug( 'returned:\n' + this.jsonStr( returned ) );
+ hdaShow = this.api.hdas.show( lastHistory.id, firstHda.id );
+ this.test.assert( hdaShow.name === 'New name', "Name successfully set via update: " + hdaShow.name );
+
+ this.test.comment( 'update should sanitize any new name' );
+ returned = this.api.hdas.update( lastHistory.id, firstHda.id, {
+ name : 'New name<script type="text/javascript" src="bler">alert("blah");</script>'
+ });
+ //this.debug( 'returned:\n' + this.jsonStr( returned ) );
+ hdaShow = this.api.hdas.show( lastHistory.id, firstHda.id );
+ this.test.assert( hdaShow.name === 'New name', "Update sanitized name: " + hdaShow.name );
+
+ //NOTE!: this fails on sqlite3 (with default setup)
+ try {
+ this.test.comment( 'update should allow unicode in names' );
+ var unicodeName = 'Ржевский сапоги';
+ returned = this.api.hdas.update( lastHistory.id, firstHda.id, {
+ name : unicodeName
+ });
+ //this.debug( 'returned:\n' + this.jsonStr( returned ) );
+ hdaShow = this.api.hdas.show( lastHistory.id, firstHda.id );
+ this.test.assert( hdaShow.name === unicodeName, "Update accepted unicode name: " + hdaShow.name );
+ } catch( err ){
+ //this.debug( this.jsonStr( err ) );
+ if( ( err instanceof this.api.APIError )
+ && ( err.status === 500 )
+ && ( err.message.indexOf( '(ProgrammingError) You must not use 8-bit bytestrings' ) !== -1 ) ){
+ this.skipTest( 'Unicode update failed. Are you using sqlite3 as the db?' );
+ }
+ }
+
+ this.test.comment( 'update should allow escaped quotations in names' );
+ var quotedName = '"Bler"';
+ returned = this.api.hdas.update( lastHistory.id, firstHda.id, {
+ name : quotedName
+ });
+ //this.debug( 'returned:\n' + this.jsonStr( returned ) );
+ hdaShow = this.api.hdas.show( lastHistory.id, firstHda.id );
+ this.test.assert( hdaShow.name === quotedName,
+ "Update accepted escaped quotations in name: " + hdaShow.name );
+
+
+ // ........................................................................................... deleted
+ this.test.comment( 'update should allow changing the deleted flag' );
+ returned = this.api.hdas.update( lastHistory.id, firstHda.id, {
+ deleted: true
+ });
+ //this.debug( 'returned:\n' + this.jsonStr( returned ) );
+ hdaShow = this.api.hdas.show( lastHistory.id, firstHda.id );
+ this.test.assert( hdaShow.deleted === true, "Update set the deleted flag: " + hdaShow.deleted );
+
+ this.test.comment( 'update should allow changing the deleted flag back' );
+ returned = this.api.hdas.update( lastHistory.id, firstHda.id, {
+ deleted: false
+ });
+ //this.debug( 'returned:\n' + this.jsonStr( returned ) );
+ hdaShow = this.api.hdas.show( lastHistory.id, firstHda.id );
+ this.test.assert( hdaShow.deleted === false, "Update set the deleted flag: " + hdaShow.deleted );
+
+
+ // ........................................................................................... visible/hidden
+ this.test.comment( 'update should allow changing the visible flag' );
+ returned = this.api.hdas.update( lastHistory.id, firstHda.id, {
+ visible: false
+ });
+ //this.debug( 'returned:\n' + this.jsonStr( returned ) );
+ hdaShow = this.api.hdas.show( lastHistory.id, firstHda.id );
+ this.test.assert( hdaShow.visible === false, "Update set the visible flag: " + hdaShow.visible );
+
+
+ // ........................................................................................... genome_build/dbkey
+ this.test.comment( 'update should allow changing the genome_build' );
+ returned = this.api.hdas.update( lastHistory.id, firstHda.id, {
+ genome_build : 'hg18'
+ });
+ //this.debug( 'returned:\n' + this.jsonStr( returned ) );
+ hdaShow = this.api.hdas.show( lastHistory.id, firstHda.id );
+ this.test.assert( hdaShow.genome_build === 'hg18',
+ "genome_build successfully set via update: " + hdaShow.genome_build );
+ this.test.assert( hdaShow.metadata_dbkey === 'hg18',
+ "metadata_dbkey successfully set via the same update: " + hdaShow.metadata_dbkey );
+
+ this.test.comment( 'update should sanitize any genome_build' );
+ returned = this.api.hdas.update( lastHistory.id, firstHda.id, {
+ genome_build : 'hg18<script type="text/javascript" src="bler">alert("blah");</script>'
+ });
+ //this.debug( 'returned:\n' + this.jsonStr( returned ) );
+ hdaShow = this.api.hdas.show( lastHistory.id, firstHda.id );
+ this.test.assert( hdaShow.genome_build === 'hg18',
+ "Update sanitized genome_build: " + hdaShow.genome_build );
+ this.test.assert( hdaShow.metadata_dbkey === 'hg18',
+ "metadata_dbkey successfully set via the same update: " + hdaShow.metadata_dbkey );
+
+ this.test.comment( 'update should allow unicode in genome builds' );
+ var unicodeBuild = 'Ржевский18';
+ //NOTE!: this fails on sqlite3 (with default setup)
+ try {
+ returned = this.api.hdas.update( lastHistory.id, firstHda.id, {
+ name : unicodeBuild
+ });
+ //this.debug( 'returned:\n' + this.jsonStr( returned ) );
+ hdaShow = this.api.hdas.show( lastHistory.id, firstHda.id );
+ this.test.assert( hdaShow.genome_build === unicodeBuild,
+ "Update accepted unicode genome_build: " + hdaShow.name );
+ } catch( err ){
+ //this.debug( this.jsonStr( err ) );
+ if( ( err instanceof this.api.APIError )
+ && ( err.status === 500 )
+ && ( err.message.indexOf( '(ProgrammingError) You must not use 8-bit bytestrings' ) !== -1 ) ){
+ this.skipTest( 'Unicode update failed. Are you using sqlite3 as the db?' );
+ }
+ }
+
+ // ........................................................................................... misc_info/info
+ this.test.comment( 'update should allow changing the misc_info' );
+ var newInfo = 'I\'ve made a huge mistake.';
+ returned = this.api.hdas.update( lastHistory.id, firstHda.id, {
+ misc_info : newInfo
+ });
+ //this.debug( 'returned:\n' + this.jsonStr( returned ) );
+ hdaShow = this.api.hdas.show( lastHistory.id, firstHda.id );
+ this.test.assert( hdaShow.misc_info === newInfo,
+ "misc_info successfully set via update: " + hdaShow.misc_info );
+
+ this.test.comment( 'update should sanitize any misc_info' );
+ var newInfo = 'You\'re going to get hop-ons.';
+ returned = this.api.hdas.update( lastHistory.id, firstHda.id, {
+ misc_info : newInfo + '<script type="text/javascript" src="bler">alert("blah");</script>'
+ });
+ //this.debug( 'returned:\n' + this.jsonStr( returned ) );
+ hdaShow = this.api.hdas.show( lastHistory.id, firstHda.id );
+ this.test.assert( hdaShow.misc_info === newInfo,
+ "Update sanitized misc_info: " + hdaShow.misc_info );
+
+ this.test.comment( 'update should allow unicode in misc_info' );
+ var unicodeInfo = '여보!';
+ //NOTE!: this fails on sqlite3 (with default setup)
+ try {
+ returned = this.api.hdas.update( lastHistory.id, firstHda.id, {
+ misc_info : unicodeInfo
+ });
+ //this.debug( 'returned:\n' + this.jsonStr( returned ) );
+ hdaShow = this.api.hdas.show( lastHistory.id, firstHda.id );
+ this.test.assert( hdaShow.misc_info === unicodeInfo,
+ "Update accepted unicode misc_info: " + hdaShow.misc_info );
+ } catch( err ){
+ //this.debug( this.jsonStr( err ) );
+ if( ( err instanceof this.api.APIError )
+ && ( err.status === 500 )
+ && ( err.message.indexOf( '(ProgrammingError) You must not use 8-bit bytestrings' ) !== -1 ) ){
+ this.skipTest( 'Unicode update failed. Are you using sqlite3 as the db?' );
+ }
+ }
+
+/*
+ // ........................................................................................... annotation
+ // currently fails because no annotation is returned in details
+ this.test.comment( 'update should allow changing the annotation' );
+ var newAnnotation = 'Found this sample on a movie theatre floor';
+ returned = this.api.hdas.update( lastHistory.id, firstHda.id, {
+ annotation : newAnnotation
+ });
+ //this.debug( 'returned:\n' + this.jsonStr( returned ) );
+ hdaShow = this.api.hdas.show( lastHistory.id, firstHda.id );
+ this.test.assert( hdaShow.annotation === newAnnotation,
+ "Annotation successfully set via update: " + hdaShow.annotation );
+
+ this.test.comment( 'update should sanitize any new annotation' );
+ returned = this.api.hdas.update( lastHistory.id, firstHda.id, {
+ annotation : 'New annotation<script type="text/javascript" src="bler">alert("blah");</script>'
+ });
+ //this.debug( 'returned:\n' + this.jsonStr( returned ) );
+ hdaShow = this.api.hdas.show( lastHistory.id, firstHda.id );
+ this.test.assert( hdaShow.annotation === 'New annotation',
+ "Update sanitized annotation: " + hdaShow.annotation );
+
+ //NOTE!: this fails on sqlite3 (with default setup)
+ try {
+ this.test.comment( 'update should allow unicode in annotations' );
+ var unicodeAnnotation = 'お願いは、それが落下させない';
+ returned = this.api.hdas.update( lastHistory.id, firstHda.id, {
+ annotation : unicodeAnnotation
+ });
+ //this.debug( 'returned:\n' + this.jsonStr( returned ) );
+ hdaShow = this.api.hdas.show( lastHistory.id, firstHda.id );
+ this.test.assert( hdaShow.annotation === unicodeAnnotation,
+ "Update accepted unicode annotation: " + hdaShow.annotation );
+ } catch( err ){
+ //this.debug( this.jsonStr( err ) );
+ if( ( err instanceof this.api.APIError )
+ && ( err.status === 500 )
+ && ( err.message.indexOf( '(ProgrammingError) You must not use 8-bit bytestrings' ) !== -1 ) ){
+ this.skipTest( 'Unicode update failed. Are you using sqlite3 as the db?' );
+ }
+ }
+
+ this.test.comment( 'update should allow escaped quotations in annotations' );
+ var quotedAnnotation = '"Bler"';
+ returned = this.api.hdas.update( lastHistory.id, firstHda.id, {
+ annotation : quotedAnnotation
+ });
+ //this.debug( 'returned:\n' + this.jsonStr( returned ) );
+ hdaShow = this.api.hdas.show( lastHistory.id, firstHda.id );
+ this.test.assert( hdaShow.annotation === quotedAnnotation,
+ "Update accepted escaped quotations in annotation: " + hdaShow.annotation );
+*/
+
+
+ // ------------------------------------------------------------------------------------------- ERRORS
+ this.test.comment( 'create should error with "not implemented" when the param "from_ld_id" is not used' );
+ var errored = false;
+ try {
+ // sending an empty object won't work
+ var created = this.api.hdas.create( lastHistory.id, { bler: 'bler' } );
+
+ } catch( err ){
+ errored = true;
+ this.test.assert( err.message.indexOf( 'Not implemented' ) !== -1,
+ 'Error has the proper message: ' + err.message );
+ this.test.assert( err.status === 501, 'Error has the proper status code: ' + err.status );
+ }
+ if( !errored ){
+ this.test.fail( 'create without "from_ld_id" did not cause error' );
+ }
+
+
+ //var returned = this.api.hdas.update( lastHistory.id, hdaIndex[0].id, { deleted: true, blerp: 'blerp' });
+ //var returned = this.api.hdas.update( lastHistory.id, { deleted: true, blerp: 'blerp' });
+ //this.debug( 'returned:' + this.jsonStr( returned ) );
+ //this.debug( 'page:' + this.jsonStr( this.page ) );
+});
+
+// ===================================================================
+spaceghost.run( function(){
+});
diff -r 2df57338a595ab96f3f54af2b0e4c2e382198a40 -r 8b9ca63f9128fbe9c7f01805db64da3ec2916332 test/casperjs/api-history-tests.js
--- a/test/casperjs/api-history-tests.js
+++ b/test/casperjs/api-history-tests.js
@@ -36,7 +36,10 @@
function hasKeys( object, keysArray ){
if( !utils.isObject( object ) ){ return false; }
for( var i=0; i<keysArray.length; i += 1 ){
- if( !object.hasOwnProperty( keysArray[i] ) ){ return false; }
+ if( !object.hasOwnProperty( keysArray[i] ) ){
+ spaceghost.debug( 'object missing key: ' + keysArray[i] );
+ return false;
+ }
}
return true;
}
@@ -362,6 +365,10 @@
"Update accepted escaped quotations in annotation: " + historyShow.annotation );
+ // ------------------------------------------------------------------------------------------- ERRORS
+ //TODO: make sure expected errors are being passed back (but no permissions checks here - different suite)
+ // bad ids: index, show, update, delete, undelete
+
/*
*/
//this.debug( this.jsonStr( historyShow ) );
diff -r 2df57338a595ab96f3f54af2b0e4c2e382198a40 -r 8b9ca63f9128fbe9c7f01805db64da3ec2916332 test/casperjs/modules/api.js
--- a/test/casperjs/modules/api.js
+++ b/test/casperjs/modules/api.js
@@ -232,7 +232,7 @@
};
HDAAPI.prototype.show = function show( historyId, id, deleted ){
- this.api.spaceghost.info( 'hda.show: ' + [ id, (( deleted )?( 'w deleted' ):( '' )) ] );
+ this.api.spaceghost.info( 'hda.show: ' + [ historyId, id, (( deleted )?( 'w deleted' ):( '' )) ] );
id = ( id === 'most_recently_used' )?( id ):( this.api.ensureId( id ) );
deleted = deleted || false;
@@ -242,7 +242,7 @@
};
HDAAPI.prototype.create = function create( historyId, payload ){
- this.api.spaceghost.info( 'hda.create: ' + this.api.spaceghost.jsonStr( payload ) );
+ this.api.spaceghost.info( 'hda.create: ' + [ historyId, this.api.spaceghost.jsonStr( payload ) ] );
// py.payload <-> ajax.data
payload = this.api.ensureObject( payload );
@@ -253,8 +253,7 @@
};
HDAAPI.prototype.update = function create( historyId, id, payload ){
- this.api.spaceghost.info( 'hda.update: ' + historyId + ',' + id + ','
- + this.api.spaceghost.jsonStr( payload ) );
+ this.api.spaceghost.info( 'hda.update: ' + [ historyId, id, this.api.spaceghost.jsonStr( payload ) ] );
// py.payload <-> ajax.data
historyId = this.api.ensureId( historyId );
diff -r 2df57338a595ab96f3f54af2b0e4c2e382198a40 -r 8b9ca63f9128fbe9c7f01805db64da3ec2916332 test/casperjs/modules/tools.js
--- a/test/casperjs/modules/tools.js
+++ b/test/casperjs/modules/tools.js
@@ -13,7 +13,7 @@
//??: circ ref?
this.options = {};
/** Default amount of ms to wait for upload to finish */
- this.options.defaultUploadWait = ( 30 * 1000 );
+ this.options.defaultUploadWait = ( 45 * 1000 );
this.spaceghost = spaceghost;
};
exports.Tools = Tools;
@@ -108,14 +108,22 @@
// wait for main panel, history reload
////NOTE!: assumes tool execution reloads the history panel
- this.waitForMultipleNavigation( [ 'tool_runner/upload_async_message', 'history' ], function(){
- // debugging
- this.jumpToMain( function(){
- var messageInfo = this.elementInfoOrNull( this.data.selectors.messages.all );
- this.debug( ( messageInfo )?( messageInfo.attributes['class'] + ':\n' + messageInfo.text )
- :( 'NO post upload message' ) );
- });
- });
+ this.waitForMultipleNavigation( [ 'tool_runner/upload_async_message', 'history' ],
+ function thenAfterUploadRefreshes(){
+ // debugging
+ this.jumpToMain( function(){
+ var messageInfo = this.elementInfoOrNull( this.data.selectors.messages.all );
+ this.debug( ( messageInfo )?( messageInfo.attributes['class'] + ':\n' + messageInfo.text )
+ :( 'NO post upload message' ) );
+ });
+ },
+ function timeoutWaitingForUploadRefreshes( urlsStillWaitingOn ){
+ this.capture( 'upload-error.png' )
+ throw new this.GalaxyError( 'Upload Error: '
+ + 'timeout waiting for upload "' + filepath + '" refreshes: ' + urlsStillWaitingOn );
+ },
+ this.tools.options.defaultUploadWait
+ );
});
};
@@ -160,13 +168,19 @@
// error if an info message wasn't found
spaceghost.withMainPanel( function checkUploadMessage(){
var infoInfo = spaceghost.elementInfoOrNull( this.data.selectors.messages.infolarge );
- if( ( !infoInfo )
- || ( infoInfo.text.indexOf( this.data.text.upload.success ) === -1 ) ){
- throw new this.GalaxyError( 'Upload Error: no info message uploading "' + filepath + '"' );
+ if( ( infoInfo )
+ && ( infoInfo.text.indexOf( this.data.text.upload.success ) !== -1 ) ){
+ // safe to store these
+ uploadInfo.filename = filename;
+ uploadInfo.filepath = filepath;
+
+ } else {
+ // capture any other messages on the page
+ var otherInfo = spaceghost.elementInfoOrNull( this.data.selectors.messages.all ),
+ message = ( otherInfo && otherInfo.text )?( otherInfo.text ):( '' );
+ this.capture( 'upload-error.png' )
+ throw new this.GalaxyError( 'Upload Error: no success message uploading "' + filepath + '": ' + message );
}
- // safe to store these
- uploadInfo.filename = filename;
- uploadInfo.filepath = filepath;
});
// the hpanel should refresh and display the uploading file, wait for that to go into the ok state
@@ -177,6 +191,7 @@
if( hdaElement === null ){
var hdaContainer = this.historypanel.data.selectors.hdaContainer;
this.warning( 'Upload Error: ' + hdaContainer + ':\n' + this.getHTML( hdaContainer ) );
+ this.capture( 'upload-error.png' )
throw new this.GalaxyError( 'Upload Error: uploaded file HDA not found: ' + uploadInfo.filename );
}
this.debug( 'uploaded HDA element: ' + this.jsonStr( this.quickInfo( hdaElement ) ) );
@@ -191,6 +206,7 @@
}, function timeoutFn( newHdaInfo ){
this.warning( 'timeout waiting for upload:\n' + this.jsonStr( this.quickInfo( newHdaInfo ) ) );
+ this.capture( 'upload-error.png' )
throw new spaceghost.GalaxyError( 'Upload Error: timeout waiting for ok state: '
+ '"' + uploadInfo.filepath + '" (waited ' + timeoutAfterMs + ' ms)' );
diff -r 2df57338a595ab96f3f54af2b0e4c2e382198a40 -r 8b9ca63f9128fbe9c7f01805db64da3ec2916332 test/casperjs/spaceghost.js
--- a/test/casperjs/spaceghost.js
+++ b/test/casperjs/spaceghost.js
@@ -546,9 +546,10 @@
* @param {String} urlToWaitFor the url to wait for (rel. to spaceghost.baseUrl)
* @param {Function} then the function to call after the nav request
* @param {Function} timeoutFn the function to call on timeout (optional)
+ * @param {Integer} waitMs manual setting of ms to wait (optional)
*/
-SpaceGhost.prototype.waitForNavigation = function waitForNavigation( urlToWaitFor, then, timeoutFn ){
- return this.waitForMultipleNavigation( [ urlToWaitFor ], then, timeoutFn );
+SpaceGhost.prototype.waitForNavigation = function waitForNavigation( urlToWaitFor, then, timeoutFn, waitMs ){
+ return this.waitForMultipleNavigation( [ urlToWaitFor ], then, timeoutFn, waitMs );
};
/** Wait for a multiple navigation requests then call a function.
@@ -557,9 +558,13 @@
* @param {String[]} urlsToWaitFor the relative urls to wait for
* @param {Function} then the function to call after the nav request
* @param {Function} timeoutFn the function to call on timeout (optional)
+ * @param {Integer} waitMs manual setting of ms to wait (optional)
*/
-SpaceGhost.prototype.waitForMultipleNavigation = function waitForMultipleNavigation( urlsToWaitFor, then, timeoutFn ){
- this.info( 'waiting for navigation: ' + this.jsonStr( urlsToWaitFor ) );
+SpaceGhost.prototype.waitForMultipleNavigation = function waitForMultipleNavigation( urlsToWaitFor,
+ then, timeoutFn, waitMs ){
+ waitMs = waitMs || ( this.options.waitTimeout * urlsToWaitFor.length );
+
+ this.info( 'waiting for navigation: ' + this.jsonStr( urlsToWaitFor ) + ', timeout after: ' + waitMs );
function urlMatches( urlToMatch, url ){
return ( url.indexOf( spaceghost.baseUrl + '/' + urlToMatch ) !== -1 );
}
@@ -589,9 +594,10 @@
if( utils.isFunction( then ) ){ then.call( this ); }
},
function timeout(){
- if( utils.isFunction( timeoutFn ) ){ timeoutFn.call( this ); }
+ this.removeListener( 'navigation.requested', catchNavReq );
+ if( utils.isFunction( timeoutFn ) ){ timeoutFn.call( this, urlsToWaitFor ); }
},
- this.options.waitTimeout * urlsToWaitFor.length
+ waitMs
);
return this;
};
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: inithello: Account for the scenario where a test status is not present in the tool_test_errors dict.
by commits-noreply@bitbucket.org 24 Apr '13
by commits-noreply@bitbucket.org 24 Apr '13
24 Apr '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/2df57338a595/
Changeset: 2df57338a595
User: inithello
Date: 2013-04-24 20:55:53
Summary: Account for the scenario where a test status is not present in the tool_test_errors dict.
Affected #: 1 file
diff -r 1a915a754396da8ac986dd410ee99bed12668235 -r 2df57338a595ab96f3f54af2b0e4c2e382198a40 lib/galaxy/webapps/tool_shed/controllers/repository.py
--- a/lib/galaxy/webapps/tool_shed/controllers/repository.py
+++ b/lib/galaxy/webapps/tool_shed/controllers/repository.py
@@ -1171,9 +1171,9 @@
# Generate a citable URL for this repository with owner and changeset revision.
repository_citable_url = suc.url_join( tool_shed_url, 'view', user.username, repository.name, metadata_row.changeset_revision )
title = 'Functional test results for changeset revision %s of %s' % ( metadata_row.changeset_revision, repository.name )
- tests_passed = len( metadata_row.tool_test_errors[ 'tests_passed' ] )
- tests_failed = len( metadata_row.tool_test_errors[ 'invalid_tests' ] )
- invalid_tests = len( metadata_row.tool_test_errors[ 'test_errors' ] )
+ tests_passed = len( metadata_row.tool_test_errors.get( 'tests_passed', [] ) )
+ tests_failed = len( metadata_row.tool_test_errors.get( 'test_errors', [] ) )
+ invalid_tests = len( metadata_row.tool_test_errors.get( 'invalid_tests', [] ) )
description = '%d tests passed, %d tests failed, %d tests determined to be invalid.' % ( tests_passed, tests_failed, invalid_tests )
# The guid attribute in an RSS feed's list of items allows a feed reader to choose not to show an item as updated
# if the guid is unchanged. For functional test results, the citable URL is sufficiently unique to enable
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
24 Apr '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/1a915a754396/
Changeset: 1a915a754396
User: greg
Date: 2013-04-24 20:46:54
Summary: Add support for a new <action type="set_environmnet_for_install"> tag in tool_dependencies.xml files included in tool shed repositories. This tag currently can include any number of <repository> tags, each of which will contain any number of tool dependency tags (i.e., <package> or <set_environmnet> tags). The settings in the env.sh file for each of the tool dependency tags will be injected into the environment for all following shell commands defined in the tool_dependencies.xml file, ensuring that the required tool dependencies are sued when compiling the current dependency. I'm sure this will make no sense to anyone reading this commit message.
Affected #: 7 files
diff -r 333dfc42a8688ec81627ae1164e3be9ba2aa864c -r 1a915a754396da8ac986dd410ee99bed12668235 lib/galaxy/model/__init__.py
--- a/lib/galaxy/model/__init__.py
+++ b/lib/galaxy/model/__init__.py
@@ -3443,7 +3443,7 @@
"""Return the repository's tool dependencies that are currently installed."""
installed_dependencies = []
for tool_dependency in self.tool_dependencies:
- if tool_dependency.status == ToolDependency.installation_status.INSTALLED:
+ if tool_dependency.status in [ ToolDependency.installation_status.INSTALLED, ToolDependency.installation_status.ERROR ]:
installed_dependencies.append( tool_dependency )
return installed_dependencies
@property
diff -r 333dfc42a8688ec81627ae1164e3be9ba2aa864c -r 1a915a754396da8ac986dd410ee99bed12668235 lib/tool_shed/galaxy_install/repository_util.py
--- a/lib/tool_shed/galaxy_install/repository_util.py
+++ b/lib/tool_shed/galaxy_install/repository_util.py
@@ -478,7 +478,7 @@
tool_shed_repository,
trans.model.ToolShedRepository.installation_status.INSTALLING_TOOL_DEPENDENCIES )
# Get the tool_dependencies.xml file from the repository.
- tool_dependencies_config = suc.get_config_from_disk( 'tool_dependencies.xml', install_dir )#relative_install_dir )
+ tool_dependencies_config = suc.get_config_from_disk( 'tool_dependencies.xml', install_dir )
installed_tool_dependencies = common_install_util.handle_tool_dependencies( app=trans.app,
tool_shed_repository=tool_shed_repository,
tool_dependencies_config=tool_dependencies_config,
diff -r 333dfc42a8688ec81627ae1164e3be9ba2aa864c -r 1a915a754396da8ac986dd410ee99bed12668235 lib/tool_shed/galaxy_install/tool_dependencies/common_util.py
--- a/lib/tool_shed/galaxy_install/tool_dependencies/common_util.py
+++ b/lib/tool_shed/galaxy_install/tool_dependencies/common_util.py
@@ -1,6 +1,14 @@
-import os, shutil, tarfile, urllib2, zipfile
+import logging
+import os
+import shutil
+import tarfile
+import urllib2
+import zipfile
+import tool_shed.util.shed_util_common as suc
from galaxy.datatypes import checkers
+log = logging.getLogger( __name__ )
+
def create_env_var_dict( elem, tool_dependency_install_dir=None, tool_shed_repository_install_dir=None ):
env_var_name = elem.get( 'name', 'PATH' )
env_var_action = elem.get( 'action', 'prepend_to' )
@@ -76,6 +84,67 @@
zip_archive.close()
return True
+def get_env_shell_file_path( installation_directory ):
+ env_shell_file_name = 'env.sh'
+ default_location = os.path.abspath( os.path.join( installation_directory, env_shell_file_name ) )
+ if os.path.exists( default_location ):
+ return default_location
+ for root, dirs, files in os.walk( installation_directory ):
+ for name in files:
+ if name == env_shell_file_name:
+ return os.path.abspath( os.path.join( root, name ) )
+ return None
+
+def get_env_shell_file_paths( app, elem ):
+ # Currently only the following tag set is supported.
+ # <repository toolshed="http://localhost:9009/" name="package_numpy_1_7" owner="test" changeset_revision="c84c6a8be056">
+ # <package name="numpy" version="1.7.1" />
+ # </repository>
+ env_shell_file_paths = []
+ toolshed = elem.get( 'toolshed', None )
+ repository_name = elem.get( 'name', None )
+ repository_owner = elem.get( 'owner', None )
+ changeset_revision = elem.get( 'changeset_revision', None )
+ if toolshed and repository_name and repository_owner and changeset_revision:
+ repository = suc.get_repository_for_dependency_relationship( app, toolshed, repository_name, repository_owner, changeset_revision )
+ if repository:
+ for sub_elem in elem:
+ tool_dependency_type = sub_elem.tag
+ tool_dependency_name = sub_elem.get( 'name' )
+ tool_dependency_version = sub_elem.get( 'version' )
+ if tool_dependency_type and tool_dependency_name and tool_dependency_version:
+ # Get the tool_dependency so we can get it's installation directory.
+ tool_dependency = None
+ for tool_dependency in repository.tool_dependencies:
+ if tool_dependency.type == tool_dependency_type and tool_dependency.name == tool_dependency_name and tool_dependency.version == tool_dependency_version:
+ break
+ if tool_dependency:
+ tool_dependency_key = '%s/%s' % ( tool_dependency_name, tool_dependency_version )
+ installation_directory = tool_dependency.installation_directory( app )
+ env_shell_file_path = get_env_shell_file_path( installation_directory )
+ env_shell_file_paths.append( env_shell_file_path )
+ else:
+ error_message = "Skipping tool dependency definition because unable to locate tool dependency "
+ error_message += "type %s, name %s, version %s for repository %s" % \
+ ( str( tool_dependency_type ), str( tool_dependency_name ), str( tool_dependency_version ), str( repository.name ) )
+ log.debug( error_message )
+ continue
+ else:
+ error_message = "Skipping invalid tool dependency definition: type %s, name %s, version %s." % \
+ ( str( tool_dependency_type ), str( tool_dependency_name ), str( tool_dependency_version ) )
+ log.debug( error_message )
+ continue
+ else:
+ error_message = "Skipping set_environment_for_install definition because unable to locate required installed tool shed repository: "
+ error_message += "toolshed %s, name %s, owner %s, changeset_revision %s." % \
+ ( str( toolshed ), str( repository_name ), str( repository_owner ), str( changeset_revision ) )
+ log.debug( error_message )
+ else:
+ error_message = "Skipping invalid set_environment_for_install definition: toolshed %s, name %s, owner %s, changeset_revision %s." % \
+ ( str( toolshed ), str( repository_name ), str( repository_owner ), str( changeset_revision ) )
+ log.debug( error_message )
+ return env_shell_file_paths
+
def isbz2( file_path ):
return checkers.is_bz2( file_path )
diff -r 333dfc42a8688ec81627ae1164e3be9ba2aa864c -r 1a915a754396da8ac986dd410ee99bed12668235 lib/tool_shed/galaxy_install/tool_dependencies/fabric_util.py
--- a/lib/tool_shed/galaxy_install/tool_dependencies/fabric_util.py
+++ b/lib/tool_shed/galaxy_install/tool_dependencies/fabric_util.py
@@ -28,30 +28,18 @@
if int( version.split( "." )[ 0 ] ) < 1:
raise NotImplementedError( "Install Fabric version 1.0 or later." )
-def set_galaxy_environment( galaxy_user, tool_dependency_dir, host='localhost', shell='/bin/bash -l -c' ):
- """General Galaxy environment configuration"""
- env.user = galaxy_user
- env.install_dir = tool_dependency_dir
- env.host_string = host
- env.shell = shell
- env.use_sudo = False
- env.safe_cmd = local
- return env
-
-@contextmanager
-def make_tmp_dir():
- work_dir = tempfile.mkdtemp()
- yield work_dir
- if os.path.exists( work_dir ):
- local( 'rm -rf %s' % work_dir )
-
def handle_command( app, tool_dependency, install_dir, cmd ):
sa_session = app.model.context.current
output = local( cmd, capture=True )
log_results( cmd, output, os.path.join( install_dir, INSTALLATION_LOG ) )
if output.return_code:
tool_dependency.status = app.model.ToolDependency.installation_status.ERROR
- tool_dependency.error_message = str( output.stderr )
+ if output.stderr:
+ tool_dependency.error_message = str( output.stderr )[ :32768 ]
+ elif output.stdout:
+ tool_dependency.error_message = str( output.stdout )[ :32768 ]
+ else:
+ tool_dependency.error_message = "Unknown error occurred executing shell command %s, return_code: %s" % ( str( cmd ), str( output.return_code ) )
sa_session.add( tool_dependency )
sa_session.flush()
return output.return_code
@@ -63,6 +51,7 @@
package_name = actions_dict[ 'package_name' ]
actions = actions_dict.get( 'actions', None )
filtered_actions = []
+ env_shell_file_paths = []
if actions:
with make_tmp_dir() as work_dir:
with lcd( work_dir ):
@@ -133,9 +122,19 @@
return_code = handle_command( app, tool_dependency, install_dir, cmd )
if return_code:
return
+ elif action_type == 'set_environment_for_install':
+ # Currently the only action supported in this category is a list of paths to one or more tool dependency env.sh files,
+ # the environment setting in each of which will be injected into the environment for all <action type="shell_command">
+ # tags that follow this <action type="set_environment_for_install"> tag set in the tool_dependencies.xml file.
+ env_shell_file_paths = action_dict[ 'env_shell_file_paths' ]
elif action_type == 'shell_command':
with settings( warn_only=True ):
- return_code = handle_command( app, tool_dependency, install_dir, action_dict[ 'command' ] )
+ cmd = ''
+ for env_shell_file_path in env_shell_file_paths:
+ for i, env_setting in enumerate( open( env_shell_file_path ) ):
+ cmd += '%s\n' % env_setting
+ cmd += action_dict[ 'command' ]
+ return_code = handle_command( app, tool_dependency, install_dir, cmd )
if return_code:
return
@@ -157,3 +156,20 @@
logfile.write( str( fabric_AttributeString.stderr ) )
logfile.write( "\n#############################################\n" )
logfile.close()
+
+@contextmanager
+def make_tmp_dir():
+ work_dir = tempfile.mkdtemp()
+ yield work_dir
+ if os.path.exists( work_dir ):
+ local( 'rm -rf %s' % work_dir )
+
+def set_galaxy_environment( galaxy_user, tool_dependency_dir, host='localhost', shell='/bin/bash -l -c' ):
+ """General Galaxy environment configuration. This method is not currently used."""
+ env.user = galaxy_user
+ env.install_dir = tool_dependency_dir
+ env.host_string = host
+ env.shell = shell
+ env.use_sudo = False
+ env.safe_cmd = local
+ return env
diff -r 333dfc42a8688ec81627ae1164e3be9ba2aa864c -r 1a915a754396da8ac986dd410ee99bed12668235 lib/tool_shed/galaxy_install/tool_dependencies/install_util.py
--- a/lib/tool_shed/galaxy_install/tool_dependencies/install_util.py
+++ b/lib/tool_shed/galaxy_install/tool_dependencies/install_util.py
@@ -282,9 +282,6 @@
tool_dependency.status = app.model.ToolDependency.installation_status.INSTALLED
sa_session.add( tool_dependency )
sa_session.flush()
-
-
-
else:
package_install_version = package_elem.get( 'version', '1.0' )
tool_dependency = tool_dependency_util.create_or_update_tool_dependency( app=app,
@@ -378,7 +375,28 @@
action_dict[ env_elem.tag ] = env_var_dicts
else:
continue
+ elif action_type == 'set_environment_for_install':
+ # <action type="set_environment_for_install">
+ # <repository toolshed="http://localhost:9009/" name="package_numpy_1_7" owner="test" changeset_revision="c84c6a8be056">
+ # <package name="numpy" version="1.7.1" />
+ # </repository>
+ # </action>
+ # This action type allows for defining an environment that will properly compile a tool dependency. Currently, tag set definitions like
+ # that above are supported, but in the future other approaches to setting environment variables or other environment attributes can be
+ # supported. The above tag set will result in the installed and compiled numpy version 1.7.1 binary to be used when compiling the current
+ # tool dependency package. See the package_matplotlib_1_2 repository in the test tool shed for a real-world example.
+ all_env_shell_file_paths = []
+ for env_elem in action_elem:
+ if env_elem.tag == 'repository':
+ env_shell_file_paths = common_util.get_env_shell_file_paths( app, env_elem )
+ if env_shell_file_paths:
+ all_env_shell_file_paths.extend( env_shell_file_paths )
+ if all_env_shell_file_paths:
+ action_dict[ 'env_shell_file_paths' ] = all_env_shell_file_paths
+ else:
+ continue
else:
+ log.debug( "Skipping unsupported action type '%s'." % str( action_type ) )
continue
actions.append( ( action_type, action_dict ) )
if actions:
diff -r 333dfc42a8688ec81627ae1164e3be9ba2aa864c -r 1a915a754396da8ac986dd410ee99bed12668235 lib/tool_shed/util/common_install_util.py
--- a/lib/tool_shed/util/common_install_util.py
+++ b/lib/tool_shed/util/common_install_util.py
@@ -307,6 +307,7 @@
will be installed in:
~/<app.config.tool_dependency_dir>/<package_name>/<package_version>/<repo_owner>/<repo_name>/<repo_installed_changeset_revision>
"""
+ sa_session = app.model.context.current
installed_tool_dependencies = []
# Parse the tool_dependencies.xml config.
try:
@@ -327,12 +328,30 @@
if tool_dependency.name==package_name and tool_dependency.version==package_version:
break
if tool_dependency.can_install:
- tool_dependency = install_package( app, elem, tool_shed_repository, tool_dependencies=tool_dependencies )
+ try:
+ tool_dependency = install_package( app, elem, tool_shed_repository, tool_dependencies=tool_dependencies )
+ except Exception, e:
+ error_message = "Error installing tool dependency %s version %s: %s" % ( str( package_name ), str( package_version ), str( e ) )
+ log.debug( error_message )
+ if tool_dependency:
+ tool_dependency.status = app.model.ToolDependency.installation_status.ERROR
+ tool_dependency.error_message = error_message
+ sa_session.add( tool_dependency )
+ sa_session.flush()
if tool_dependency and tool_dependency.status in [ app.model.ToolDependency.installation_status.INSTALLED,
app.model.ToolDependency.installation_status.ERROR ]:
installed_tool_dependencies.append( tool_dependency )
elif elem.tag == 'set_environment':
- tool_dependency = set_environment( app, elem, tool_shed_repository )
+ try:
+ tool_dependency = set_environment( app, elem, tool_shed_repository )
+ except Exception, e:
+ error_message = "Error setting environment for tool dependency: %s" % str( e )
+ log.debug( error_message )
+ if tool_dependency:
+ tool_dependency.status = app.model.ToolDependency.installation_status.ERROR
+ tool_dependency.error_message = error_message
+ sa_session.add( tool_dependency )
+ sa_session.flush()
if tool_dependency and tool_dependency.status in [ app.model.ToolDependency.installation_status.INSTALLED,
app.model.ToolDependency.installation_status.ERROR ]:
installed_tool_dependencies.append( tool_dependency )
diff -r 333dfc42a8688ec81627ae1164e3be9ba2aa864c -r 1a915a754396da8ac986dd410ee99bed12668235 lib/tool_shed/util/shed_util_common.py
--- a/lib/tool_shed/util/shed_util_common.py
+++ b/lib/tool_shed/util/shed_util_common.py
@@ -32,7 +32,8 @@
log = logging.getLogger( __name__ )
INITIAL_CHANGELOG_HASH = '000000000000'
-MAX_CONTENT_SIZE = 32768
+MAX_CONTENT_SIZE = 1048576
+MAX_DISPLAY_SIZE = 32768
VALID_CHARS = set( string.letters + string.digits + "'\"-=_.()/+*^,:?!#[]%\\$@;{}&<>" )
new_repo_email_alert_template = """
@@ -667,10 +668,17 @@
safe_str = ''
for i, line in enumerate( open( file_path ) ):
safe_str = '%s%s' % ( safe_str, to_safe_string( line ) )
+ # Stop reading after string is larger than MAX_CONTENT_SIZE.
if len( safe_str ) > MAX_CONTENT_SIZE:
- large_str = '\nFile contents truncated because file size is larger than maximum viewing size of %s\n' % util.nice_size( MAX_CONTENT_SIZE )
- safe_str = '%s%s' % ( safe_str, to_safe_string( large_str ) )
+ large_str = \
+ to_safe_string( '\nFile contents truncated because file size is larger than maximum viewing size of %s\n' % util.nice_size( MAX_CONTENT_SIZE ) )
+ safe_str = '%s%s' % ( safe_str, large_str )
break
+ if len( safe_str ) > MAX_DISPLAY_SIZE:
+ # Eliminate the middle of the file to display a file no larger than MAX_DISPLAY_SIZE. This may not be ideal if the file is larger than MAX_CONTENT_SIZE.
+ join_by_str = \
+ to_safe_string( "\n\n...some text eliminated here because file size is larger than maximum viewing size of %s...\n\n" % util.nice_size( MAX_DISPLAY_SIZE ) )
+ safe_str = util.shrink_string_by_size( safe_str, MAX_DISPLAY_SIZE, join_by=join_by_str, left_larger=True, beginning_on_size_error=True )
return safe_str
def get_repository_files( trans, folder_path ):
@@ -787,9 +795,8 @@
This method assumes all repository tools are defined in a single shed-related tool panel config.
"""
tool_shed = clean_tool_shed_url( repository.tool_shed )
- partial_install_dir = '%s/repos/%s/%s/%s' % ( tool_shed, repository.owner, repository.name, repository.installed_changeset_revision )
+ relative_install_dir = '%s/repos/%s/%s/%s' % ( tool_shed, repository.owner, repository.name, repository.installed_changeset_revision )
# Get the relative tool installation paths from each of the shed tool configs.
- relative_install_dir = None
shed_config_dict = repository.get_shed_config_dict( app )
if not shed_config_dict:
# Just pick a semi-random shed config.
@@ -799,7 +806,6 @@
break
shed_tool_conf = shed_config_dict[ 'config_filename' ]
tool_path = shed_config_dict[ 'tool_path' ]
- relative_install_dir = partial_install_dir
return shed_tool_conf, tool_path, relative_install_dir
def get_tool_path_by_shed_tool_conf_filename( trans, shed_tool_conf ):
@@ -1188,13 +1194,13 @@
return toolshed_base_url.rstrip( '/' ) == str( url_for( '/', qualified=True ) ).rstrip( '/' )
def translate_string( raw_text, to_html=True ):
- """Return a subset of a string (up to MAX_CONTENT_SIZE) translated to a safe string for display in a browser."""
+ """Return a subset of a string (up to MAX_DISPLAY_SIZE) translated to a safe string for display in a browser."""
if raw_text:
- if len( raw_text ) <= MAX_CONTENT_SIZE:
+ if len( raw_text ) <= MAX_DISPLAY_SIZE:
translated_string = to_safe_string( raw_text, to_html=to_html )
else:
- large_str = '\nFile contents truncated because file size is larger than maximum viewing size of %s\n' % util.nice_size( MAX_CONTENT_SIZE )
- translated_string = to_safe_string( '%s%s' % ( raw_text[ 0:MAX_CONTENT_SIZE ], large_str ), to_html=to_html )
+ large_str = '\nFile contents truncated because file size is larger than maximum viewing size of %s\n' % util.nice_size( MAX_DISPLAY_SIZE )
+ translated_string = to_safe_string( '%s%s' % ( raw_text[ 0:MAX_DISPLAY_SIZE ], large_str ), to_html=to_html )
else:
translated_string = ''
return translated_string
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: inithello: Retrieve functional test results by owner's username instead of user ID.
by commits-noreply@bitbucket.org 24 Apr '13
by commits-noreply@bitbucket.org 24 Apr '13
24 Apr '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/333dfc42a868/
Changeset: 333dfc42a868
User: inithello
Date: 2013-04-24 20:41:16
Summary: Retrieve functional test results by owner's username instead of user ID.
Affected #: 1 file
diff -r 322d7ae99729ac4235e3f6becc6707c9f0c359d1 -r 333dfc42a8688ec81627ae1164e3be9ba2aa864c lib/galaxy/webapps/tool_shed/controllers/repository.py
--- a/lib/galaxy/webapps/tool_shed/controllers/repository.py
+++ b/lib/galaxy/webapps/tool_shed/controllers/repository.py
@@ -1127,14 +1127,17 @@
@web.expose
def get_functional_test_rss( self, trans, **kwd ):
- '''Return an RSS feed of the functional test results for the provided user ID, optionally filtered by the 'status' parameter.'''
- encoded_user_id = kwd.get( 'user_id', None )
- if encoded_user_id:
- user_id = trans.security.decode_id( encoded_user_id )
+ '''Return an RSS feed of the functional test results for the provided user, optionally filtered by the 'status' parameter.'''
+ owner = kwd.get( 'owner', None )
+ status = kwd.get( 'status', 'all' )
+ if owner:
+ user = suc.get_user_by_username( trans.app, owner )
else:
trans.response.status = 404
- return 'Unknown or missing user ID.'
- status = kwd.get( 'status', 'all' )
+ return 'Missing owner parameter.'
+ if user is None:
+ trans.response.status = 404
+ return 'No user found with username %s.' % owner
if status == 'passed':
# Return only metadata revisions where tools_functionally_correct is set to True.
metadata_filter = and_( trans.model.RepositoryMetadata.table.c.includes_tools == True,
@@ -1150,6 +1153,7 @@
metadata_filter = and_( trans.model.RepositoryMetadata.table.c.includes_tools == True,
trans.model.RepositoryMetadata.table.c.time_last_tested is not None )
+ tool_shed_url = web.url_for( '/', qualified=True )
functional_test_results = []
for metadata_row in trans.sa_session.query( trans.model.RepositoryMetadata ) \
.filter( metadata_filter ) \
@@ -1157,17 +1161,15 @@
.filter( and_( trans.model.Repository.table.c.deleted == False,
trans.model.Repository.table.c.private == False,
trans.model.Repository.table.c.deprecated == False,
- trans.model.Repository.table.c.user_id == user_id ) ):
+ trans.model.Repository.table.c.user_id == user.id ) ):
if not metadata_row.tool_test_errors:
continue
# Per the RSS 2.0 specification, all dates in RSS feeds must be formatted as specified in RFC 822
# section 5.1, e.g. Sat, 07 Sep 2002 00:00:01 UT
time_tested = metadata_row.time_last_tested.strftime( '%a, %d %b %Y %H:%M:%S UT' )
- link = web.url_for( '/', qualified=True )
repository = metadata_row.repository
- user = repository.user
# Generate a citable URL for this repository with owner and changeset revision.
- repository_citable_url = suc.url_join( link, 'view', user.username, repository.name, metadata_row.changeset_revision )
+ repository_citable_url = suc.url_join( tool_shed_url, 'view', user.username, repository.name, metadata_row.changeset_revision )
title = 'Functional test results for changeset revision %s of %s' % ( metadata_row.changeset_revision, repository.name )
tests_passed = len( metadata_row.tool_test_errors[ 'tests_passed' ] )
tests_failed = len( metadata_row.tool_test_errors[ 'invalid_tests' ] )
@@ -1184,7 +1186,7 @@
trans.response.set_content_type( 'application/rss+xml' )
return trans.fill_template( '/rss.mako',
title='Tool functional test results',
- link=link,
+ link=tool_shed_url,
description='Functional test results for repositories owned by %s.' % user.username,
pubdate=strftime( '%a, %d %b %Y %H:%M:%S UT', gmtime() ),
items=functional_test_results )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: inithello: Basic rss feed for tool functional test results for repositories owned by a specific user.
by commits-noreply@bitbucket.org 24 Apr '13
by commits-noreply@bitbucket.org 24 Apr '13
24 Apr '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/322d7ae99729/
Changeset: 322d7ae99729
User: inithello
Date: 2013-04-24 20:14:15
Summary: Basic rss feed for tool functional test results for repositories owned by a specific user.
Affected #: 2 files
diff -r 4126ec15fd614dff9d5a0b555623626fa947197c -r 322d7ae99729ac4235e3f6becc6707c9f0c359d1 lib/galaxy/webapps/tool_shed/controllers/repository.py
--- a/lib/galaxy/webapps/tool_shed/controllers/repository.py
+++ b/lib/galaxy/webapps/tool_shed/controllers/repository.py
@@ -1125,6 +1125,70 @@
named_tmp_file = suc.get_named_tmpfile_from_ctx( ctx, file_name, dir )
return named_tmp_file
+ @web.expose
+ def get_functional_test_rss( self, trans, **kwd ):
+ '''Return an RSS feed of the functional test results for the provided user ID, optionally filtered by the 'status' parameter.'''
+ encoded_user_id = kwd.get( 'user_id', None )
+ if encoded_user_id:
+ user_id = trans.security.decode_id( encoded_user_id )
+ else:
+ trans.response.status = 404
+ return 'Unknown or missing user ID.'
+ status = kwd.get( 'status', 'all' )
+ if status == 'passed':
+ # Return only metadata revisions where tools_functionally_correct is set to True.
+ metadata_filter = and_( trans.model.RepositoryMetadata.table.c.includes_tools == True,
+ trans.model.RepositoryMetadata.table.c.tools_functionally_correct == True,
+ trans.model.RepositoryMetadata.table.c.time_last_tested is not None )
+ elif status == 'failed':
+ # Return only metadata revisions where tools_functionally_correct is set to False.
+ metadata_filter = and_( trans.model.RepositoryMetadata.table.c.includes_tools == True,
+ trans.model.RepositoryMetadata.table.c.tools_functionally_correct == False,
+ trans.model.RepositoryMetadata.table.c.time_last_tested is not None )
+ else:
+ # Return all metadata entries for this user's repositories.
+ metadata_filter = and_( trans.model.RepositoryMetadata.table.c.includes_tools == True,
+ trans.model.RepositoryMetadata.table.c.time_last_tested is not None )
+
+ functional_test_results = []
+ for metadata_row in trans.sa_session.query( trans.model.RepositoryMetadata ) \
+ .filter( metadata_filter ) \
+ .join( trans.model.Repository ) \
+ .filter( and_( trans.model.Repository.table.c.deleted == False,
+ trans.model.Repository.table.c.private == False,
+ trans.model.Repository.table.c.deprecated == False,
+ trans.model.Repository.table.c.user_id == user_id ) ):
+ if not metadata_row.tool_test_errors:
+ continue
+ # Per the RSS 2.0 specification, all dates in RSS feeds must be formatted as specified in RFC 822
+ # section 5.1, e.g. Sat, 07 Sep 2002 00:00:01 UT
+ time_tested = metadata_row.time_last_tested.strftime( '%a, %d %b %Y %H:%M:%S UT' )
+ link = web.url_for( '/', qualified=True )
+ repository = metadata_row.repository
+ user = repository.user
+ # Generate a citable URL for this repository with owner and changeset revision.
+ repository_citable_url = suc.url_join( link, 'view', user.username, repository.name, metadata_row.changeset_revision )
+ title = 'Functional test results for changeset revision %s of %s' % ( metadata_row.changeset_revision, repository.name )
+ tests_passed = len( metadata_row.tool_test_errors[ 'tests_passed' ] )
+ tests_failed = len( metadata_row.tool_test_errors[ 'invalid_tests' ] )
+ invalid_tests = len( metadata_row.tool_test_errors[ 'test_errors' ] )
+ description = '%d tests passed, %d tests failed, %d tests determined to be invalid.' % ( tests_passed, tests_failed, invalid_tests )
+ # The guid attribute in an RSS feed's list of items allows a feed reader to choose not to show an item as updated
+ # if the guid is unchanged. For functional test results, the citable URL is sufficiently unique to enable
+ # that behavior.
+ functional_test_results.append( dict( title=title,
+ guid=repository_citable_url,
+ link=repository_citable_url,
+ description=description,
+ pubdate=time_tested ) )
+ trans.response.set_content_type( 'application/rss+xml' )
+ return trans.fill_template( '/rss.mako',
+ title='Tool functional test results',
+ link=link,
+ description='Functional test results for repositories owned by %s.' % user.username,
+ pubdate=strftime( '%a, %d %b %Y %H:%M:%S UT', gmtime() ),
+ items=functional_test_results )
+
def get_metadata( self, trans, repository_id, changeset_revision ):
repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans, repository_id, changeset_revision )
if repository_metadata and repository_metadata.metadata:
diff -r 4126ec15fd614dff9d5a0b555623626fa947197c -r 322d7ae99729ac4235e3f6becc6707c9f0c359d1 templates/rss.mako
--- /dev/null
+++ b/templates/rss.mako
@@ -0,0 +1,23 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<rss version="2.0">
+ <channel>
+ <title>${title}</title>
+ <link>${link}</link>
+ <pubDate>${pubdate}</pubDate>
+ <description>${description}</description>
+ <language>en-US</language>
+ <ttl>60</ttl>
+ <docs>http://cyber.law.harvard.edu/rss/rss.html</docs>
+ %for item in items:
+ <item>
+ <pubDate>${item['pubdate']}</pubDate>
+ <title>${item['title']}</title>
+ <link>${item['link']}</link>
+ <guid>${item['guid']}</guid>
+ <description>
+ ${item['description']}
+ </description>
+ </item>
+ %endfor
+ </channel>
+</rss>
\ No newline at end of file
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
2 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/4126ec15fd61/
Changeset: 4126ec15fd61
User: inithello
Date: 2013-04-24 15:52:40
Summary: Fix functional test issue with forms having refresh_on_change.
Affected #: 1 file
diff -r fb28ceb83c379e1d792f622f9b2cbc8c3e050f37 -r 4126ec15fd614dff9d5a0b555623626fa947197c test/base/twilltestcase.py
--- a/test/base/twilltestcase.py
+++ b/test/base/twilltestcase.py
@@ -1180,9 +1180,16 @@
# Check for refresh_on_change attribute, submit a change if required
if hasattr( control, 'attrs' ) and 'refresh_on_change' in control.attrs.keys():
changed = False
- item_labels = [ item.attrs[ 'label' ] for item in control.get_items() if item.selected ] #For DataToolParameter, control.value is the HDA id, but kwd contains the filename. This loop gets the filename/label for the selected values.
+ # For DataToolParameter, control.value is the HDA id, but kwd contains the filename.
+ # This loop gets the filename/label for the selected values.
+ item_labels = [ item.attrs[ 'label' ] for item in control.get_items() if item.selected ]
for value in kwd[ control.name ]:
- if value not in control.value and True not in [ value in item_label for item_label in item_labels ]:
+ # Galaxy truncates long file names in the dataset_collector in galaxy/tools/parameters/basic.py
+ if len( value ) > 30 and control.is_of_kind( 'singlelist' ):
+ field_value = '%s..%s' % ( elem[:17], elem[-11:] )
+ else:
+ field_value = value
+ if field_value not in control.value and True not in [ field_value in item_label for item_label in item_labels ]:
changed = True
break
if changed:
@@ -1190,7 +1197,11 @@
control.clear()
# kwd[control.name] should be a singlelist
for elem in kwd[ control.name ]:
- tc.fv( f.name, control.name, str( elem ) )
+ if len( elem ) > 30 and control.is_of_kind( 'singlelist' ):
+ elem_name = '%s..%s' % ( elem[:17], elem[-11:] )
+ else:
+ elem_name = elem
+ tc.fv( f.name, control.name, str( elem_name ) )
# Create a new submit control, allows form to refresh, instead of going to next page
control = ClientForm.SubmitControl( 'SubmitControl', '___refresh_grouping___', {'name':'refresh_grouping'} )
control.add_to_form( f )
@@ -1241,7 +1252,7 @@
tc.fv( f.name, control.name, str( elem ) )
except Exception, e2:
print "Attempting to set control '", control.name, "' to value '", elem, "' threw exception: ", e2
- # Galaxy truncates long file names in the dataset_collector in ~/parameters/basic.py
+ # Galaxy truncates long file names in the dataset_collector in galaxy/tools/parameters/basic.py
if len( elem ) > 30:
elem_name = '%s..%s' % ( elem[:17], elem[-11:] )
else:
https://bitbucket.org/galaxy/galaxy-central/commits/b0ea9722b9dd/
Changeset: b0ea9722b9dd
Branch: stable
User: inithello
Date: 2013-04-24 15:52:40
Summary: Fix functional test issue with forms having refresh_on_change.
Affected #: 1 file
diff -r ae53deed05b68f6febfcf3472c2a11514c12eb0c -r b0ea9722b9dd8e0da63208b59be20b90eae0cea7 test/base/twilltestcase.py
--- a/test/base/twilltestcase.py
+++ b/test/base/twilltestcase.py
@@ -1180,9 +1180,16 @@
# Check for refresh_on_change attribute, submit a change if required
if hasattr( control, 'attrs' ) and 'refresh_on_change' in control.attrs.keys():
changed = False
- item_labels = [ item.attrs[ 'label' ] for item in control.get_items() if item.selected ] #For DataToolParameter, control.value is the HDA id, but kwd contains the filename. This loop gets the filename/label for the selected values.
+ # For DataToolParameter, control.value is the HDA id, but kwd contains the filename.
+ # This loop gets the filename/label for the selected values.
+ item_labels = [ item.attrs[ 'label' ] for item in control.get_items() if item.selected ]
for value in kwd[ control.name ]:
- if value not in control.value and True not in [ value in item_label for item_label in item_labels ]:
+ # Galaxy truncates long file names in the dataset_collector in galaxy/tools/parameters/basic.py
+ if len( value ) > 30 and control.is_of_kind( 'singlelist' ):
+ field_value = '%s..%s' % ( elem[:17], elem[-11:] )
+ else:
+ field_value = value
+ if field_value not in control.value and True not in [ field_value in item_label for item_label in item_labels ]:
changed = True
break
if changed:
@@ -1190,7 +1197,11 @@
control.clear()
# kwd[control.name] should be a singlelist
for elem in kwd[ control.name ]:
- tc.fv( f.name, control.name, str( elem ) )
+ if len( elem ) > 30 and control.is_of_kind( 'singlelist' ):
+ elem_name = '%s..%s' % ( elem[:17], elem[-11:] )
+ else:
+ elem_name = elem
+ tc.fv( f.name, control.name, str( elem_name ) )
# Create a new submit control, allows form to refresh, instead of going to next page
control = ClientForm.SubmitControl( 'SubmitControl', '___refresh_grouping___', {'name':'refresh_grouping'} )
control.add_to_form( f )
@@ -1241,7 +1252,7 @@
tc.fv( f.name, control.name, str( elem ) )
except Exception, e2:
print "Attempting to set control '", control.name, "' to value '", elem, "' threw exception: ", e2
- # Galaxy truncates long file names in the dataset_collector in ~/parameters/basic.py
+ # Galaxy truncates long file names in the dataset_collector in galaxy/tools/parameters/basic.py
if len( elem ) > 30:
elem_name = '%s..%s' % ( elem[:17], elem[-11:] )
else:
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: carlfeberhard: History API: add update method and allow name, genome_build, annotation, deleted, and published as updatable fields; browser tests: test history api; root/history: fix error handling when user is anonymous
by commits-noreply@bitbucket.org 23 Apr '13
by commits-noreply@bitbucket.org 23 Apr '13
23 Apr '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/fb28ceb83c37/
Changeset: fb28ceb83c37
User: carlfeberhard
Date: 2013-04-23 23:38:00
Summary: History API: add update method and allow name, genome_build, annotation, deleted, and published as updatable fields; browser tests: test history api; root/history: fix error handling when user is anonymous
Affected #: 9 files
diff -r 7c59121055516595937b83d51eaf98b60723b622 -r fb28ceb83c379e1d792f622f9b2cbc8c3e050f37 lib/galaxy/model/__init__.py
--- a/lib/galaxy/model/__init__.py
+++ b/lib/galaxy/model/__init__.py
@@ -575,8 +575,10 @@
self.group = group
class History( object, UsesAnnotations ):
+
api_collection_visible_keys = ( 'id', 'name', 'published', 'deleted' )
- api_element_visible_keys = ( 'id', 'name', 'published', 'deleted' )
+ api_element_visible_keys = ( 'id', 'name', 'published', 'deleted', 'genome_build', 'purged' )
+
def __init__( self, id=None, name=None, user=None ):
self.id = id
self.name = name or "Unnamed history"
@@ -589,6 +591,7 @@
self.user = user
self.datasets = []
self.galaxy_sessions = []
+
def _next_hid( self ):
# TODO: override this with something in the database that ensures
# better integrity
@@ -600,18 +603,21 @@
if dataset.hid > last_hid:
last_hid = dataset.hid
return last_hid + 1
+
def add_galaxy_session( self, galaxy_session, association=None ):
if association is None:
self.galaxy_sessions.append( GalaxySessionToHistoryAssociation( galaxy_session, self ) )
else:
self.galaxy_sessions.append( association )
+
def add_dataset( self, dataset, parent_id=None, genome_build=None, set_hid=True, quota=True ):
if isinstance( dataset, Dataset ):
dataset = HistoryDatasetAssociation(dataset=dataset)
object_session( self ).add( dataset )
object_session( self ).flush()
elif not isinstance( dataset, HistoryDatasetAssociation ):
- raise TypeError, "You can only add Dataset and HistoryDatasetAssociation instances to a history ( you tried to add %s )." % str( dataset )
+ raise TypeError, ( "You can only add Dataset and HistoryDatasetAssociation instances to a history" +
+ " ( you tried to add %s )." % str( dataset ) )
if parent_id:
for data in self.datasets:
if data.id == parent_id:
@@ -630,6 +636,7 @@
self.genome_build = genome_build
self.datasets.append( dataset )
return dataset
+
def copy( self, name=None, target_user=None, activatable=False ):
# Create new history.
if not name:
@@ -647,7 +654,7 @@
# Copy annotation.
self.copy_item_annotation( db_session, self.user, self, target_user, new_history )
- #Copy Tags
+ # Copy Tags
new_history.copy_tags_from(target_user=target_user, source_history=self)
# Copy HDAs.
@@ -667,12 +674,17 @@
db_session.add( new_history )
db_session.flush()
return new_history
+
@property
def activatable_datasets( self ):
# This needs to be a list
return [ hda for hda in self.datasets if not hda.dataset.deleted ]
+
def get_display_name( self ):
- """ History name can be either a string or a unicode object. If string, convert to unicode object assuming 'utf-8' format. """
+ """
+ History name can be either a string or a unicode object.
+ If string, convert to unicode object assuming 'utf-8' format.
+ """
history_name = self.name
if isinstance(history_name, str):
history_name = unicode(history_name, 'utf-8')
@@ -682,6 +694,7 @@
if value_mapper is None:
value_mapper = {}
rval = {}
+
try:
visible_keys = self.__getattribute__( 'api_' + view + '_visible_keys' )
except AttributeError:
@@ -693,6 +706,7 @@
rval[key] = value_mapper.get( key )( rval[key] )
except AttributeError:
rval[key] = None
+
tags_str_list = []
for tag in self.tags:
tag_str = tag.user_tname
@@ -702,25 +716,51 @@
rval['tags'] = tags_str_list
rval['model_class'] = self.__class__.__name__
return rval
+
+ def set_from_dict( self, new_data ):
+ #AKA: set_api_value
+ """
+ Set object attributes to the values in dictionary new_data limiting
+ to only those keys in api_element_visible_keys.
+
+ Returns a dictionary of the keys, values that have been changed.
+ """
+ # precondition: keys are proper, values are parsed and validated
+ changed = {}
+ for key in [ k for k in new_data.keys() if k in self.api_element_visible_keys ]:
+ new_val = new_data[ key ]
+ old_val = self.__getattribute__( key )
+ if new_val == old_val:
+ continue
+
+ self.__setattr__( key, new_val )
+ changed[ key ] = new_val
+
+ return changed
+
@property
def get_disk_size_bytes( self ):
return self.get_disk_size( nice_size=False )
+
def unhide_datasets( self ):
for dataset in self.datasets:
dataset.mark_unhidden()
+
def resume_paused_jobs( self ):
for dataset in self.datasets:
job = dataset.creating_job
if job is not None and job.state == Job.states.PAUSED:
job.set_state(Job.states.NEW)
+
def get_disk_size( self, nice_size=False ):
# unique datasets only
db_session = object_session( self )
- rval = db_session.query( func.sum( db_session.query( HistoryDatasetAssociation.dataset_id, Dataset.total_size ).join( Dataset )
- .filter( HistoryDatasetAssociation.table.c.history_id == self.id )
- .filter( HistoryDatasetAssociation.purged != True )
- .filter( Dataset.purged != True )
- .distinct().subquery().c.total_size ) ).first()[0]
+ rval = db_session.query(
+ func.sum( db_session.query( HistoryDatasetAssociation.dataset_id, Dataset.total_size ).join( Dataset )
+ .filter( HistoryDatasetAssociation.table.c.history_id == self.id )
+ .filter( HistoryDatasetAssociation.purged != True )
+ .filter( Dataset.purged != True )
+ .distinct().subquery().c.total_size ) ).first()[0]
if rval is None:
rval = 0
if nice_size:
@@ -733,6 +773,7 @@
new_shta.user = target_user
self.tags.append(new_shta)
+
class HistoryUserShareAssociation( object ):
def __init__( self ):
self.history = None
diff -r 7c59121055516595937b83d51eaf98b60723b622 -r fb28ceb83c379e1d792f622f9b2cbc8c3e050f37 lib/galaxy/web/base/controller.py
--- a/lib/galaxy/web/base/controller.py
+++ b/lib/galaxy/web/base/controller.py
@@ -245,6 +245,187 @@
return item
+class UsesHistoryMixin( SharableItemSecurityMixin ):
+ """ Mixin for controllers that use History objects. """
+
+ def get_history( self, trans, id, check_ownership=True, check_accessible=False, deleted=None ):
+ """Get a History from the database by id, verifying ownership."""
+ history = self.get_object( trans, id, 'History', check_ownership=check_ownership, check_accessible=check_accessible, deleted=deleted )
+ return self.security_check( trans, history, check_ownership, check_accessible )
+
+ def get_history_datasets( self, trans, history, show_deleted=False, show_hidden=False, show_purged=False ):
+ """ Returns history's datasets. """
+ query = trans.sa_session.query( trans.model.HistoryDatasetAssociation ) \
+ .filter( trans.model.HistoryDatasetAssociation.history == history ) \
+ .options( eagerload( "children" ) ) \
+ .join( "dataset" ) \
+ .options( eagerload_all( "dataset.actions" ) ) \
+ .order_by( trans.model.HistoryDatasetAssociation.hid )
+ if not show_deleted:
+ query = query.filter( trans.model.HistoryDatasetAssociation.deleted == False )
+ if not show_purged:
+ query = query.filter( trans.model.Dataset.purged == False )
+ return query.all()
+
+ def get_hda_state_counts( self, trans, history, include_deleted=False, include_hidden=False ):
+ """
+ Returns a dictionary with state counts for history's HDAs. Key is a
+ dataset state, value is the number of states in that count.
+ """
+ # Build query to get (state, count) pairs.
+ cols_to_select = [ trans.app.model.Dataset.table.c.state, func.count( '*' ) ]
+ from_obj = trans.app.model.HistoryDatasetAssociation.table.join( trans.app.model.Dataset.table )
+
+ conditions = [ trans.app.model.HistoryDatasetAssociation.table.c.history_id == history.id ]
+ if not include_deleted:
+ # Only count datasets that have not been deleted.
+ conditions.append( trans.app.model.HistoryDatasetAssociation.table.c.deleted == False )
+ if not include_hidden:
+ # Only count datasets that are visible.
+ conditions.append( trans.app.model.HistoryDatasetAssociation.table.c.visible == True )
+
+ group_by = trans.app.model.Dataset.table.c.state
+ query = select( columns=cols_to_select,
+ from_obj=from_obj,
+ whereclause=and_( *conditions ),
+ group_by=group_by )
+
+ # Initialize count dict with all states.
+ state_count_dict = {}
+ for k, state in trans.app.model.Dataset.states.items():
+ state_count_dict[ state ] = 0
+
+ # Process query results, adding to count dict.
+ for row in trans.sa_session.execute( query ):
+ state, count = row
+ state_count_dict[ state ] = count
+
+ return state_count_dict
+
+ def get_hda_summary_dicts( self, trans, history ):
+ """Returns a list of dictionaries containing summary information
+ for each HDA in the given history.
+ """
+ hda_model = trans.model.HistoryDatasetAssociation
+
+ # get state, name, etc.
+ columns = ( hda_model.name, hda_model.hid, hda_model.id, hda_model.deleted,
+ trans.model.Dataset.state )
+ column_keys = [ "name", "hid", "id", "deleted", "state" ]
+
+ query = ( trans.sa_session.query( *columns )
+ .enable_eagerloads( False )
+ .filter( hda_model.history == history )
+ .join( trans.model.Dataset )
+ .order_by( hda_model.hid ) )
+
+ # build dictionaries, adding history id and encoding all ids
+ hda_dicts = []
+ for hda_tuple in query.all():
+ hda_dict = dict( zip( column_keys, hda_tuple ) )
+ hda_dict[ 'history_id' ] = history.id
+ trans.security.encode_dict_ids( hda_dict )
+ hda_dicts.append( hda_dict )
+ return hda_dicts
+
+ def _get_hda_state_summaries( self, trans, hda_dict_list ):
+ """Returns two dictionaries (in a tuple): state_counts and state_ids.
+ Each is keyed according to the possible hda states:
+ _counts contains a sum of the datasets in each state
+ _ids contains a list of the encoded ids for each hda in that state
+
+ hda_dict_list should be a list of hda data in dictionary form.
+ """
+ #TODO: doc to rst
+ # init counts, ids for each state
+ state_counts = {}
+ state_ids = {}
+ for key, state in trans.app.model.Dataset.states.items():
+ state_counts[ state ] = 0
+ state_ids[ state ] = []
+
+ for hda_dict in hda_dict_list:
+ item_state = hda_dict['state']
+ if not hda_dict['deleted']:
+ state_counts[ item_state ] = state_counts[ item_state ] + 1
+ # needs to return all ids (no deleted check)
+ state_ids[ item_state ].append( hda_dict['id'] )
+
+ return ( state_counts, state_ids )
+
+ def _get_history_state_from_hdas( self, trans, history, hda_state_counts ):
+ """Returns the history state based on the states of the HDAs it contains.
+ """
+ states = trans.app.model.Dataset.states
+
+ num_hdas = sum( hda_state_counts.values() )
+ # (default to ERROR)
+ state = states.ERROR
+ if num_hdas == 0:
+ state = states.NEW
+
+ else:
+ if( ( hda_state_counts[ states.RUNNING ] > 0 )
+ or ( hda_state_counts[ states.SETTING_METADATA ] > 0 )
+ or ( hda_state_counts[ states.UPLOAD ] > 0 ) ):
+ state = states.RUNNING
+
+ elif hda_state_counts[ states.QUEUED ] > 0:
+ state = states.QUEUED
+
+ elif( ( hda_state_counts[ states.ERROR ] > 0 )
+ or ( hda_state_counts[ states.FAILED_METADATA ] > 0 ) ):
+ state = states.ERROR
+
+ elif hda_state_counts[ states.OK ] == num_hdas:
+ state = states.OK
+
+ return state
+
+ def get_history_dict( self, trans, history, hda_dictionaries=None ):
+ """Returns history data in the form of a dictionary.
+ """
+ history_dict = history.get_api_value( view='element', value_mapper={ 'id':trans.security.encode_id })
+
+ history_dict[ 'nice_size' ] = history.get_disk_size( nice_size=True )
+ history_dict[ 'annotation' ] = history.get_item_annotation_str( trans.sa_session, trans.user, history )
+ if not history_dict[ 'annotation' ]:
+ history_dict[ 'annotation' ] = ''
+ #TODO: item_slug url
+
+ hda_summaries = hda_dictionaries if hda_dictionaries else self.get_hda_summary_dicts( trans, history )
+ #TODO remove the following in v2
+ ( state_counts, state_ids ) = self._get_hda_state_summaries( trans, hda_summaries )
+ history_dict[ 'state_details' ] = state_counts
+ history_dict[ 'state_ids' ] = state_ids
+ history_dict[ 'state' ] = self._get_history_state_from_hdas( trans, history, state_counts )
+
+ return history_dict
+
+ def set_history_from_dict( self, trans, history, new_data ):
+ """
+ Changes history data using the given dictionary new_data.
+ """
+ # precondition: access of the history has already been checked
+
+ # send what we can down into the model
+ changed = history.set_from_dict( new_data )
+ # the rest (often involving the trans) - do here
+ if 'annotation' in new_data.keys() and trans.get_user():
+ history.add_item_annotation( trans.sa_session, trans.get_user(), history, new_data[ 'annotation' ] )
+ changed[ 'annotation' ] = new_data[ 'annotation' ]
+ # tags
+ # importable (ctrl.history.set_accessible_async)
+ # sharing/permissions?
+ # slugs?
+ # purged - duh duh duhhhhhhnnnnnnnnnn
+
+ if changed.keys():
+ trans.sa_session.flush()
+
+ return changed
+
+
class UsesHistoryDatasetAssociationMixin:
""" Mixin for controllers that use HistoryDatasetAssociation objects. """
@@ -817,165 +998,6 @@
step.input_connections_by_name = dict( ( conn.input_name, conn ) for conn in step.input_connections )
-class UsesHistoryMixin( SharableItemSecurityMixin ):
- """ Mixin for controllers that use History objects. """
-
- def get_history( self, trans, id, check_ownership=True, check_accessible=False, deleted=None ):
- """Get a History from the database by id, verifying ownership."""
- history = self.get_object( trans, id, 'History', check_ownership=check_ownership, check_accessible=check_accessible, deleted=deleted )
- return self.security_check( trans, history, check_ownership, check_accessible )
-
- def get_history_datasets( self, trans, history, show_deleted=False, show_hidden=False, show_purged=False ):
- """ Returns history's datasets. """
- query = trans.sa_session.query( trans.model.HistoryDatasetAssociation ) \
- .filter( trans.model.HistoryDatasetAssociation.history == history ) \
- .options( eagerload( "children" ) ) \
- .join( "dataset" ) \
- .options( eagerload_all( "dataset.actions" ) ) \
- .order_by( trans.model.HistoryDatasetAssociation.hid )
- if not show_deleted:
- query = query.filter( trans.model.HistoryDatasetAssociation.deleted == False )
- if not show_purged:
- query = query.filter( trans.model.Dataset.purged == False )
- return query.all()
-
- def get_hda_state_counts( self, trans, history, include_deleted=False, include_hidden=False ):
- """
- Returns a dictionary with state counts for history's HDAs. Key is a
- dataset state, value is the number of states in that count.
- """
- # Build query to get (state, count) pairs.
- cols_to_select = [ trans.app.model.Dataset.table.c.state, func.count( '*' ) ]
- from_obj = trans.app.model.HistoryDatasetAssociation.table.join( trans.app.model.Dataset.table )
-
- conditions = [ trans.app.model.HistoryDatasetAssociation.table.c.history_id == history.id ]
- if not include_deleted:
- # Only count datasets that have not been deleted.
- conditions.append( trans.app.model.HistoryDatasetAssociation.table.c.deleted == False )
- if not include_hidden:
- # Only count datasets that are visible.
- conditions.append( trans.app.model.HistoryDatasetAssociation.table.c.visible == True )
-
- group_by = trans.app.model.Dataset.table.c.state
- query = select( columns=cols_to_select,
- from_obj=from_obj,
- whereclause=and_( *conditions ),
- group_by=group_by )
-
- # Initialize count dict with all states.
- state_count_dict = {}
- for k, state in trans.app.model.Dataset.states.items():
- state_count_dict[ state ] = 0
-
- # Process query results, adding to count dict.
- for row in trans.sa_session.execute( query ):
- state, count = row
- state_count_dict[ state ] = count
-
- return state_count_dict
-
- def get_hda_summary_dicts( self, trans, history ):
- """Returns a list of dictionaries containing summary information
- for each HDA in the given history.
- """
- hda_model = trans.model.HistoryDatasetAssociation
-
- # get state, name, etc.
- columns = ( hda_model.name, hda_model.hid, hda_model.id, hda_model.deleted,
- trans.model.Dataset.state )
- column_keys = [ "name", "hid", "id", "deleted", "state" ]
-
- query = ( trans.sa_session.query( *columns )
- .enable_eagerloads( False )
- .filter( hda_model.history == history )
- .join( trans.model.Dataset )
- .order_by( hda_model.hid ) )
-
- # build dictionaries, adding history id and encoding all ids
- hda_dicts = []
- for hda_tuple in query.all():
- hda_dict = dict( zip( column_keys, hda_tuple ) )
- hda_dict[ 'history_id' ] = history.id
- trans.security.encode_dict_ids( hda_dict )
- hda_dicts.append( hda_dict )
- return hda_dicts
-
- def _get_hda_state_summaries( self, trans, hda_dict_list ):
- """Returns two dictionaries (in a tuple): state_counts and state_ids.
- Each is keyed according to the possible hda states:
- _counts contains a sum of the datasets in each state
- _ids contains a list of the encoded ids for each hda in that state
-
- hda_dict_list should be a list of hda data in dictionary form.
- """
- #TODO: doc to rst
- # init counts, ids for each state
- state_counts = {}
- state_ids = {}
- for key, state in trans.app.model.Dataset.states.items():
- state_counts[ state ] = 0
- state_ids[ state ] = []
-
- for hda_dict in hda_dict_list:
- item_state = hda_dict['state']
- if not hda_dict['deleted']:
- state_counts[ item_state ] = state_counts[ item_state ] + 1
- # needs to return all ids (no deleted check)
- state_ids[ item_state ].append( hda_dict['id'] )
-
- return ( state_counts, state_ids )
-
- def _get_history_state_from_hdas( self, trans, history, hda_state_counts ):
- """Returns the history state based on the states of the HDAs it contains.
- """
- states = trans.app.model.Dataset.states
-
- num_hdas = sum( hda_state_counts.values() )
- # (default to ERROR)
- state = states.ERROR
- if num_hdas == 0:
- state = states.NEW
-
- else:
- if( ( hda_state_counts[ states.RUNNING ] > 0 )
- or ( hda_state_counts[ states.SETTING_METADATA ] > 0 )
- or ( hda_state_counts[ states.UPLOAD ] > 0 ) ):
- state = states.RUNNING
-
- elif hda_state_counts[ states.QUEUED ] > 0:
- state = states.QUEUED
-
- elif( ( hda_state_counts[ states.ERROR ] > 0 )
- or ( hda_state_counts[ states.FAILED_METADATA ] > 0 ) ):
- state = states.ERROR
-
- elif hda_state_counts[ states.OK ] == num_hdas:
- state = states.OK
-
- return state
-
- def get_history_dict( self, trans, history, hda_dictionaries=None ):
- """Returns history data in the form of a dictionary.
- """
- history_dict = history.get_api_value( view='element', value_mapper={ 'id':trans.security.encode_id })
-
- history_dict[ 'nice_size' ] = history.get_disk_size( nice_size=True )
-
- #TODO: separate, move to annotation api, fill on the client
- history_dict[ 'annotation' ] = history.get_item_annotation_str( trans.sa_session, trans.user, history )
- if not history_dict[ 'annotation' ]:
- history_dict[ 'annotation' ] = ''
-
- hda_summaries = hda_dictionaries if hda_dictionaries else self.get_hda_summary_dicts( trans, history )
- #TODO remove the following in v2
- ( state_counts, state_ids ) = self._get_hda_state_summaries( trans, hda_summaries )
- history_dict[ 'state_details' ] = state_counts
- history_dict[ 'state_ids' ] = state_ids
- history_dict[ 'state' ] = self._get_history_state_from_hdas( trans, history, state_counts )
-
- return history_dict
-
-
class UsesFormDefinitionsMixin:
"""Mixin for controllers that use Galaxy form objects."""
diff -r 7c59121055516595937b83d51eaf98b60723b622 -r fb28ceb83c379e1d792f622f9b2cbc8c3e050f37 lib/galaxy/webapps/galaxy/api/histories.py
--- a/lib/galaxy/webapps/galaxy/api/histories.py
+++ b/lib/galaxy/webapps/galaxy/api/histories.py
@@ -1,12 +1,18 @@
"""
API operations on a history.
"""
-import logging
+
+import pkg_resources
+pkg_resources.require("Paste")
+from paste.httpexceptions import HTTPBadRequest
+
from galaxy import web, util
from galaxy.web.base.controller import BaseAPIController, UsesHistoryMixin
from galaxy.web import url_for
from galaxy.model.orm import desc
+from galaxy.util.bunch import Bunch
+import logging
log = logging.getLogger( __name__ )
class HistoriesController( BaseAPIController, UsesHistoryMixin ):
@@ -18,6 +24,7 @@
GET /api/histories/deleted
Displays a collection (list) of histories.
"""
+ #TODO: query (by name, date, etc.)
rval = []
deleted = util.string_as_bool( deleted )
try:
@@ -50,6 +57,8 @@
GET /api/histories/most_recently_used
Displays information about a history.
"""
+ #TODO: GET /api/histories/{encoded_history_id}?as_archive=True
+ #TODO: GET /api/histories/s/{username}/{slug}
history_id = id
deleted = util.string_as_bool( deleted )
@@ -92,6 +101,10 @@
trans.sa_session.flush()
item = new_history.get_api_value(view='element', value_mapper={'id':trans.security.encode_id})
item['url'] = url_for( 'history', id=item['id'] )
+
+ #TODO: copy own history
+ #TODO: import an importable history
+ #TODO: import from archive
return item
@web.expose_api
@@ -146,3 +159,66 @@
trans.sa_session.add( history )
trans.sa_session.flush()
return 'OK'
+
+ @web.expose_api
+ def update( self, trans, id, payload, **kwd ):
+ """
+ PUT /api/histories/{encoded_history_id}
+ Changes an existing history.
+ """
+ #TODO: PUT /api/histories/{encoded_history_id} payload = { rating: rating } (w/ no security checks)
+ try:
+ history = self.get_history( trans, id, check_ownership=True, check_accessible=True, deleted=True )
+ # validation handled here and some parsing, processing, and conversion
+ payload = self._validate_and_parse_update_payload( payload )
+ # additional checks here (security, etc.)
+ changed = self.set_history_from_dict( trans, history, payload )
+
+ except Exception, exception:
+ log.error( 'Update of history (%s) failed: %s', id, str( exception ), exc_info=True )
+ # convert to appropo HTTP code
+ if( isinstance( exception, ValueError )
+ or isinstance( exception, AttributeError ) ):
+ # bad syntax from the validater/parser
+ trans.response.status = 400
+ else:
+ trans.response.status = 500
+ return { 'error': str( exception ) }
+
+ return changed
+
+ def _validate_and_parse_update_payload( self, payload ):
+ """
+ Validate and parse incomming data payload for a history.
+ """
+ # This layer handles (most of the stricter idiot proofing):
+ # - unknown/unallowed keys
+ # - changing data keys from api key to attribute name
+ # - protection against bad data form/type
+ # - protection against malicious data content
+ # all other conversions and processing (such as permissions, etc.) should happen down the line
+ for key, val in payload.items():
+ # TODO: lots of boilerplate here, but overhead on abstraction is equally onerous
+ if key == 'name':
+ if not ( isinstance( val, str ) or isinstance( val, unicode ) ):
+ raise ValueError( 'name must be a string or unicode: %s' %( str( type( val ) ) ) )
+ payload[ 'name' ] = util.sanitize_html.sanitize_html( val, 'utf-8' )
+ #TODO:?? if sanitized != val: log.warn( 'script kiddie' )
+ elif key == 'deleted':
+ if not isinstance( val, bool ):
+ raise ValueError( 'deleted must be a boolean: %s' %( str( type( val ) ) ) )
+ elif key == 'published':
+ if not isinstance( payload[ 'published' ], bool ):
+ raise ValueError( 'published must be a boolean: %s' %( str( type( val ) ) ) )
+ elif key == 'genome_build':
+ if not ( isinstance( val, str ) or isinstance( val, unicode ) ):
+ raise ValueError( 'genome_build must be a string: %s' %( str( type( val ) ) ) )
+ payload[ 'genome_build' ] = util.sanitize_html.sanitize_html( val, 'utf-8' )
+ elif key == 'annotation':
+ if not ( isinstance( val, str ) or isinstance( val, unicode ) ):
+ raise ValueError( 'annotation must be a string or unicode: %s' %( str( type( val ) ) ) )
+ payload[ 'annotation' ] = util.sanitize_html.sanitize_html( val, 'utf-8' )
+ else:
+ raise AttributeError( 'unknown key: %s' %( str( key ) ) )
+ return payload
+
diff -r 7c59121055516595937b83d51eaf98b60723b622 -r fb28ceb83c379e1d792f622f9b2cbc8c3e050f37 lib/galaxy/webapps/galaxy/controllers/history.py
--- a/lib/galaxy/webapps/galaxy/controllers/history.py
+++ b/lib/galaxy/webapps/galaxy/controllers/history.py
@@ -724,10 +724,10 @@
'include_hidden' : include_hidden,
'include_deleted' : include_deleted }
history_exp_tool.execute( trans, incoming = params, set_output_hid = True )
+ url = url_for( controller='history', action="export_archive", id=id, qualified=True )
return trans.show_message( "Exporting History '%(n)s'. Use this link to download \
the archive or import it to another Galaxy server: \
- <a href='%(u)s'>%(u)s</a>" \
- % ( { 'n' : history.name, 'u' : url_for(controller='history', action="export_archive", id=id, qualified=True ) } ) )
+ <a href='%(u)s'>%(u)s</a>" % ( { 'n' : history.name, 'u' : url } ) )
@web.expose
@web.json
@@ -739,7 +739,8 @@
trans.sa_session.flush()
return_dict = {
"name" : history.name,
- "link" : url_for(controller='history', action="display_by_username_and_slug", username=history.user.username, slug=history.slug ) }
+ "link" : url_for(controller='history', action="display_by_username_and_slug",
+ username=history.user.username, slug=history.slug ) }
return return_dict
@web.expose
diff -r 7c59121055516595937b83d51eaf98b60723b622 -r fb28ceb83c379e1d792f622f9b2cbc8c3e050f37 lib/galaxy/webapps/galaxy/controllers/root.py
--- a/lib/galaxy/webapps/galaxy/controllers/root.py
+++ b/lib/galaxy/webapps/galaxy/controllers/root.py
@@ -167,7 +167,8 @@
history_dictionary = self.get_history_dict( trans, history, hda_dictionaries=hda_dictionaries )
except Exception, exc:
- log.error( 'Error bootstrapping history for user %d: %s', trans.user.id, str( exc ), exc_info=True )
+ user_id = str( trans.user.id ) if trans.user else '(anonymous)'
+ log.error( 'Error bootstrapping history for user %s: %s', user_id, str( exc ), exc_info=True )
message, status = err_msg()
history_dictionary[ 'error' ] = message
diff -r 7c59121055516595937b83d51eaf98b60723b622 -r fb28ceb83c379e1d792f622f9b2cbc8c3e050f37 test/casperjs/api-history-tests.js
--- /dev/null
+++ b/test/casperjs/api-history-tests.js
@@ -0,0 +1,372 @@
+/* Utility to load a specific page and output html, page text, or a screenshot
+ * Optionally wait for some time, text, or dom selector
+ */
+try {
+ //...if there's a better way - please let me know, universe
+ var scriptDir = require( 'system' ).args[3]
+ // remove the script filename
+ .replace( /[\w|\.|\-|_]*$/, '' )
+ // if given rel. path, prepend the curr dir
+ .replace( /^(?!\/)/, './' ),
+ spaceghost = require( scriptDir + 'spaceghost' ).create({
+ // script options here (can be overridden by CLI)
+ //verbose: true,
+ //logLevel: debug,
+ scriptDir: scriptDir
+ });
+
+} catch( error ){
+ console.debug( error );
+ phantom.exit( 1 );
+}
+spaceghost.start();
+
+
+// =================================================================== SET UP
+var utils = require( 'utils' );
+
+var email = spaceghost.user.getRandomEmail(),
+ password = '123456';
+if( spaceghost.fixtureData.testUser ){
+ email = spaceghost.fixtureData.testUser.email;
+ password = spaceghost.fixtureData.testUser.password;
+}
+spaceghost.user.loginOrRegisterUser( email, password );
+
+function hasKeys( object, keysArray ){
+ if( !utils.isObject( object ) ){ return false; }
+ for( var i=0; i<keysArray.length; i += 1 ){
+ if( !object.hasOwnProperty( keysArray[i] ) ){ return false; }
+ }
+ return true;
+}
+
+function countKeys( object ){
+ if( !utils.isObject( object ) ){ return 0; }
+ var count = 0;
+ for( var key in object ){
+ if( object.hasOwnProperty( key ) ){ count += 1; }
+ }
+ return count;
+}
+
+// =================================================================== TESTS
+spaceghost.thenOpen( spaceghost.baseUrl ).then( function(){
+
+ // ------------------------------------------------------------------------------------------- INDEX
+ this.test.comment( 'index should get a list of histories' );
+ var historyIndex = this.api.histories.index();
+ //this.debug( this.jsonStr( historyIndex ) );
+ this.test.assert( utils.isArray( historyIndex ), "index returned an array: length " + historyIndex.length );
+ this.test.assert( historyIndex.length >= 1, 'Has at least one history' );
+
+ var firstHistory = historyIndex[0];
+ this.test.assert( hasKeys( firstHistory, [ 'id', 'name', 'url' ] ), 'Has the proper keys' );
+ this.test.assert( this.api.isEncodedId( firstHistory.id ), 'Id appears well-formed' );
+
+
+ // ------------------------------------------------------------------------------------------- SHOW
+ this.test.comment( 'show should get a history details object' );
+ var historyShow = this.api.histories.show( firstHistory.id );
+ //this.debug( this.jsonStr( historyShow ) );
+ this.test.assert( hasKeys( historyShow, [
+ 'id', 'name', 'annotation', 'nice_size', 'contents_url',
+ 'state', 'state_details', 'state_ids' ]),
+ 'Has the proper keys' );
+
+ this.test.comment( 'a history details object should contain two objects named state_details and state_ids' );
+ var states = [
+ 'discarded', 'empty', 'error', 'failed_metadata', 'new',
+ 'ok', 'paused', 'queued', 'running', 'setting_metadata', 'upload' ],
+ state_details = historyShow.state_details,
+ state_ids = historyShow.state_ids;
+ this.test.assert( hasKeys( state_details, states ), 'state_details has the proper keys' );
+ this.test.assert( hasKeys( state_ids, states ), 'state_ids has the proper keys' );
+ var state_detailsAreNumbers = true;
+ state_idsAreArrays = true;
+ states.forEach( function( state ){
+ if( !utils.isArray( state_ids[ state ] ) ){ state_idsAreArrays = false; }
+ if( !utils.isNumber( state_details[ state ] ) ){ state_detailsAreNumbers = false; }
+ });
+ this.test.assert( state_idsAreArrays, 'state_ids values are arrays' );
+ this.test.assert( state_detailsAreNumbers, 'state_details values are numbers' );
+
+ this.test.comment( 'calling show with "most_recently_used" should return the first history' );
+ historyShow = this.api.histories.show( 'most_recently_used' );
+ //this.debug( this.jsonStr( historyShow ) );
+ this.test.assert( historyShow.id === firstHistory.id, 'Is the first history' );
+
+ this.test.comment( 'Should be able to combine calls' );
+ this.test.assert( this.api.histories.show( this.api.histories.index()[0].id ).id === firstHistory.id,
+ 'combining function calls works' );
+
+ // test server bad id protection
+ this.test.comment( 'A bad id to show should throw an error' );
+ this.assertRaises( function(){
+ this.api.histories.show( '1234123412341234' );
+ }, 'Error in history API at showing history detail: 400 Bad Request', 'Raises an exception' );
+
+
+ // ------------------------------------------------------------------------------------------- CREATE
+ this.test.comment( 'Calling create should create a new history and allow setting the name' );
+ var newHistoryName = 'Created History',
+ createdHistory = this.api.histories.create({ name: newHistoryName });
+ //this.debug( 'returned from create:\n' + this.jsonStr( createdHistory ) );
+ this.test.assert( createdHistory.name === newHistoryName,
+ "Name of created history (from create) is correct: " + createdHistory.name );
+
+ // check the index
+ var newFirstHistory = this.api.histories.index()[0];
+ //this.debug( 'newFirstHistory:\n' + this.jsonStr( newFirstHistory ) );
+ this.test.assert( newFirstHistory.name === newHistoryName,
+ "Name of last history (from index) is correct: " + newFirstHistory.name );
+ this.test.assert( newFirstHistory.id === createdHistory.id,
+ "Id of last history (from index) is correct: " + newFirstHistory.id );
+
+
+ // ------------------------------------------------------------------------------------------- DELETE
+ this.test.comment( 'calling delete should delete the given history and remove it from the standard index' );
+ var deletedHistory = this.api.histories.delete_( createdHistory.id );
+ //this.debug( 'returned from delete:\n' + this.jsonStr( deletedHistory ) );
+ this.test.assert( deletedHistory === 'OK',
+ "Deletion returned 'OK' - even though that's not a great, informative response: " + deletedHistory );
+
+ newFirstHistory = this.api.histories.index()[0];
+ //this.debug( 'newFirstHistory:\n' + this.jsonStr( newFirstHistory ) );
+ this.test.assert( newFirstHistory.id !== createdHistory.id,
+ "Id of last history (from index) DOES NOT appear: " + newFirstHistory.id );
+
+ this.test.comment( 'calling index with delete=true should include the deleted history' );
+ newFirstHistory = this.api.histories.index( true )[0];
+ //this.debug( 'newFirstHistory:\n' + this.jsonStr( newFirstHistory ) );
+ this.test.assert( newFirstHistory.id === createdHistory.id,
+ "Id of last history (from index) DOES appear using index( deleted=true ): " + newFirstHistory.id );
+
+
+ // ------------------------------------------------------------------------------------------- UNDELETE
+ this.test.comment( 'calling undelete should undelete the given history and re-include it in index' );
+ var undeletedHistory = this.api.histories.undelete( createdHistory.id );
+ //this.debug( 'returned from undelete:\n' + this.jsonStr( undeletedHistory ) );
+ this.test.assert( undeletedHistory === 'OK',
+ "Undeletion returned 'OK' - even though that's not a great, informative response: " + undeletedHistory );
+
+ newFirstHistory = this.api.histories.index()[0];
+ this.debug( 'newFirstHistory:\n' + this.jsonStr( newFirstHistory ) );
+ this.test.assert( newFirstHistory.id === createdHistory.id,
+ "Id of last history (from index) DOES appear after undeletion: " + newFirstHistory.id );
+
+
+ //TODO: show, deleted flag
+ //TODO: delete, purge flag
+ // ------------------------------------------------------------------------------------------- UPDATE
+ // ........................................................................................... idiot proofing
+ this.test.comment( 'updating to the current value should return no value (no change)' );
+ historyShow = this.api.histories.show( newFirstHistory.id );
+ var returned = this.api.histories.update( newFirstHistory.id, {
+ name : historyShow.name
+ });
+ this.test.assert( countKeys( returned ) === 0, "No changed returned: " + this.jsonStr( returned ) );
+
+ this.test.comment( 'updating using a nonsense key should fail with an error' );
+ var err = {};
+ try {
+ returned = this.api.histories.update( newFirstHistory.id, {
+ konamiCode : 'uuddlrlrba'
+ });
+ } catch( error ){
+ err = error;
+ //this.debug( this.jsonStr( err ) );
+ }
+ this.test.assert( !!err.message, "Error occurred: " + err.message );
+ this.test.assert( err.status === 400, "Error status is 400: " + err.status );
+
+ this.test.comment( 'updating by attempting to change type should cause an error' );
+ err = {};
+ try {
+ returned = this.api.histories.update( newFirstHistory.id, {
+ //name : false
+ deleted : 'sure why not'
+ });
+ } catch( error ){
+ err = error;
+ //this.debug( this.jsonStr( err ) );
+ }
+ this.test.assert( !!err.message, "Error occurred: " + err.message );
+ this.test.assert( err.status === 400, "Error status is 400: " + err.status );
+ //TODO??: other type checks?
+
+
+ // ........................................................................................... name
+ this.test.comment( 'update should allow changing the name' );
+ returned = this.api.histories.update( newFirstHistory.id, {
+ name : 'New name'
+ });
+ //this.debug( 'returned:\n' + this.jsonStr( returned ) );
+ historyShow = this.api.histories.show( newFirstHistory.id );
+ this.test.assert( historyShow.name === 'New name', "Name successfully set via update: " + historyShow.name );
+
+ this.test.comment( 'update should sanitize any new name' );
+ returned = this.api.histories.update( newFirstHistory.id, {
+ name : 'New name<script type="text/javascript" src="bler">alert("blah");</script>'
+ });
+ //this.debug( 'returned:\n' + this.jsonStr( returned ) );
+ historyShow = this.api.histories.show( newFirstHistory.id );
+ this.test.assert( historyShow.name === 'New name', "Update sanitized name: " + historyShow.name );
+
+ //NOTE!: this fails on sqlite3 (with default setup)
+ try {
+ this.test.comment( 'update should allow unicode in names' );
+ var unicodeName = '桜ゲノム';
+ returned = this.api.histories.update( newFirstHistory.id, {
+ name : unicodeName
+ });
+ //this.debug( 'returned:\n' + this.jsonStr( returned ) );
+ historyShow = this.api.histories.show( newFirstHistory.id );
+ this.test.assert( historyShow.name === unicodeName, "Update accepted unicode name: " + historyShow.name );
+ } catch( err ){
+ //this.debug( this.jsonStr( err ) );
+ if( ( err instanceof this.api.APIError )
+ && ( err.status === 500 )
+ && ( err.message.indexOf( '(ProgrammingError) You must not use 8-bit bytestrings' ) !== -1 ) ){
+ this.skipTest( 'Unicode update failed. Are you using sqlite3 as the db?' );
+ }
+ }
+
+ this.test.comment( 'update should allow escaped quotations in names' );
+ var quotedName = '"Bler"';
+ returned = this.api.histories.update( newFirstHistory.id, {
+ name : quotedName
+ });
+ //this.debug( 'returned:\n' + this.jsonStr( returned ) );
+ historyShow = this.api.histories.show( newFirstHistory.id );
+ this.test.assert( historyShow.name === quotedName,
+ "Update accepted escaped quotations in name: " + historyShow.name );
+
+
+ // ........................................................................................... deleted
+ this.test.comment( 'update should allow changing the deleted flag' );
+ returned = this.api.histories.update( newFirstHistory.id, {
+ deleted: true
+ });
+ //this.debug( 'returned:\n' + this.jsonStr( returned ) );
+ historyShow = this.api.histories.show( newFirstHistory.id );
+ this.test.assert( historyShow.deleted === true, "Update set the deleted flag: " + historyShow.deleted );
+
+ this.test.comment( 'update should allow changing the deleted flag back' );
+ returned = this.api.histories.update( newFirstHistory.id, {
+ deleted: false
+ });
+ //this.debug( 'returned:\n' + this.jsonStr( returned ) );
+ historyShow = this.api.histories.show( newFirstHistory.id );
+ this.test.assert( historyShow.deleted === false, "Update set the deleted flag: " + historyShow.deleted );
+
+
+ // ........................................................................................... published
+ this.test.comment( 'update should allow changing the published flag' );
+ returned = this.api.histories.update( newFirstHistory.id, {
+ published: true
+ });
+ this.debug( 'returned:\n' + this.jsonStr( returned ) );
+ historyShow = this.api.histories.show( newFirstHistory.id );
+ this.test.assert( historyShow.published === true, "Update set the published flag: " + historyShow.published );
+
+
+ // ........................................................................................... genome_build
+ this.test.comment( 'update should allow changing the genome_build' );
+ returned = this.api.histories.update( newFirstHistory.id, {
+ genome_build : 'hg18'
+ });
+ //this.debug( 'returned:\n' + this.jsonStr( returned ) );
+ historyShow = this.api.histories.show( newFirstHistory.id );
+ this.test.assert( historyShow.genome_build === 'hg18',
+ "genome_build successfully set via update: " + historyShow.genome_build );
+
+ this.test.comment( 'update should sanitize any genome_build' );
+ returned = this.api.histories.update( newFirstHistory.id, {
+ genome_build : 'hg18<script type="text/javascript" src="bler">alert("blah");</script>'
+ });
+ //this.debug( 'returned:\n' + this.jsonStr( returned ) );
+ historyShow = this.api.histories.show( newFirstHistory.id );
+ this.test.assert( historyShow.genome_build === 'hg18',
+ "Update sanitized genome_build: " + historyShow.genome_build );
+
+ this.test.comment( 'update should allow unicode in genome builds' );
+ var unicodeBuild = '桜12';
+ //NOTE!: this fails on sqlite3 (with default setup)
+ try {
+ returned = this.api.histories.update( newFirstHistory.id, {
+ name : unicodeBuild
+ });
+ //this.debug( 'returned:\n' + this.jsonStr( returned ) );
+ historyShow = this.api.histories.show( newFirstHistory.id );
+ this.test.assert( historyShow.genome_build === unicodeBuild,
+ "Update accepted unicode genome_build: " + historyShow.name );
+ } catch( err ){
+ //this.debug( this.jsonStr( err ) );
+ if( ( err instanceof this.api.APIError )
+ && ( err.status === 500 )
+ && ( err.message.indexOf( '(ProgrammingError) You must not use 8-bit bytestrings' ) !== -1 ) ){
+ this.skipTest( 'Unicode update failed. Are you using sqlite3 as the db?' );
+ }
+ }
+
+
+ // ........................................................................................... annotation
+ this.test.comment( 'update should allow changing the annotation' );
+ var newAnnotation = 'Here are some notes that I stole from the person next to me';
+ returned = this.api.histories.update( newFirstHistory.id, {
+ annotation : newAnnotation
+ });
+ //this.debug( 'returned:\n' + this.jsonStr( returned ) );
+ historyShow = this.api.histories.show( newFirstHistory.id );
+ this.test.assert( historyShow.annotation === newAnnotation,
+ "Annotation successfully set via update: " + historyShow.annotation );
+
+ this.test.comment( 'update should sanitize any new annotation' );
+ returned = this.api.histories.update( newFirstHistory.id, {
+ annotation : 'New annotation<script type="text/javascript" src="bler">alert("blah");</script>'
+ });
+ //this.debug( 'returned:\n' + this.jsonStr( returned ) );
+ historyShow = this.api.histories.show( newFirstHistory.id );
+ this.test.assert( historyShow.annotation === 'New annotation',
+ "Update sanitized annotation: " + historyShow.annotation );
+
+ //NOTE!: this fails on sqlite3 (with default setup)
+ try {
+ this.test.comment( 'update should allow unicode in annotations' );
+ var unicodeAnnotation = 'お願いは、それが落下させない';
+ returned = this.api.histories.update( newFirstHistory.id, {
+ annotation : unicodeAnnotation
+ });
+ //this.debug( 'returned:\n' + this.jsonStr( returned ) );
+ historyShow = this.api.histories.show( newFirstHistory.id );
+ this.test.assert( historyShow.annotation === unicodeAnnotation,
+ "Update accepted unicode annotation: " + historyShow.annotation );
+ } catch( err ){
+ //this.debug( this.jsonStr( err ) );
+ if( ( err instanceof this.api.APIError )
+ && ( err.status === 500 )
+ && ( err.message.indexOf( '(ProgrammingError) You must not use 8-bit bytestrings' ) !== -1 ) ){
+ this.skipTest( 'Unicode update failed. Are you using sqlite3 as the db?' );
+ }
+ }
+
+ this.test.comment( 'update should allow escaped quotations in annotations' );
+ var quotedAnnotation = '"Bler"';
+ returned = this.api.histories.update( newFirstHistory.id, {
+ annotation : quotedAnnotation
+ });
+ //this.debug( 'returned:\n' + this.jsonStr( returned ) );
+ historyShow = this.api.histories.show( newFirstHistory.id );
+ this.test.assert( historyShow.annotation === quotedAnnotation,
+ "Update accepted escaped quotations in annotation: " + historyShow.annotation );
+
+
+/*
+*/
+ //this.debug( this.jsonStr( historyShow ) );
+});
+
+// ===================================================================
+spaceghost.run( function(){
+});
diff -r 7c59121055516595937b83d51eaf98b60723b622 -r fb28ceb83c379e1d792f622f9b2cbc8c3e050f37 test/casperjs/casperjs_runner.py
--- a/test/casperjs/casperjs_runner.py
+++ b/test/casperjs/casperjs_runner.py
@@ -361,6 +361,14 @@
self.run_js_script( 'hda-state-tests.js' )
+class Test_05_API( CasperJSTestCase ):
+ """Tests for API functionality and security.
+ """
+ def test_00_history_api( self ):
+ """Test history API.
+ """
+ self.run_js_script( 'api-history-tests.js' )
+
# ==================================================================== MAIN
if __name__ == '__main__':
diff -r 7c59121055516595937b83d51eaf98b60723b622 -r fb28ceb83c379e1d792f622f9b2cbc8c3e050f37 test/casperjs/modules/api.js
--- a/test/casperjs/modules/api.js
+++ b/test/casperjs/modules/api.js
@@ -32,11 +32,13 @@
APIError.prototype = new Error();
APIError.prototype.constructor = Error;
/** @class Thrown when Galaxy the API returns an error from a request */
-function APIError( msg ){
+function APIError( msg, status ){
Error.apply( this, arguments );
this.name = "APIError";
this.message = msg;
+ this.status = status;
}
+API.prototype.APIError = APIError;
exports.APIError = APIError;
/* ------------------------------------------------------------------- TODO:
@@ -65,7 +67,8 @@
if( resp.status !== 200 ){
// grrr... this doesn't lose the \n\r\t
- throw new APIError( resp.responseText.replace( /[\s\n\r\t]+/gm, ' ' ).replace( /"/, '' ) );
+ //throw new APIError( resp.responseText.replace( /[\s\n\r\t]+/gm, ' ' ).replace( /"/, '' ) );
+ throw new APIError( resp.responseText, resp.status );
}
return JSON.parse( resp.responseText );
};
@@ -130,7 +133,8 @@
show : 'api/histories/%s',
create : 'api/histories',
delete_ : 'api/histories/%s',
- undelete: 'api/histories/deleted/%s/undelete'
+ undelete: 'api/histories/deleted/%s/undelete',
+ update : 'api/histories/%s'
};
HistoriesAPI.prototype.index = function index( deleted ){
@@ -183,6 +187,20 @@
});
};
+HistoriesAPI.prototype.update = function create( id, payload ){
+ this.api.spaceghost.info( 'history.update: ' + id + ',' + this.api.spaceghost.jsonStr( payload ) );
+
+ // py.payload <-> ajax.data
+ id = this.api.ensureId( id );
+ payload = this.api.ensureObject( payload );
+ url = utils.format( this.urlTpls.update, id );
+
+ return this.api._ajax( url, {
+ type : 'PUT',
+ data : payload
+ });
+};
+
// =================================================================== HDAS
var HDAAPI = function HDAAPI( api ){
@@ -201,7 +219,7 @@
};
HDAAPI.prototype.index = function index( historyId, ids ){
- this.api.spaceghost.info( 'history.index: ' + [ historyId, ids ] );
+ this.api.spaceghost.info( 'hda.index: ' + [ historyId, ids ] );
var data = {};
if( ids ){
ids = ( utils.isArray( ids ) )?( ids.join( ',' ) ):( ids );
@@ -214,7 +232,7 @@
};
HDAAPI.prototype.show = function show( historyId, id, deleted ){
- this.api.spaceghost.info( 'history.show: ' + [ id, (( deleted )?( 'w deleted' ):( '' )) ] );
+ this.api.spaceghost.info( 'hda.show: ' + [ id, (( deleted )?( 'w deleted' ):( '' )) ] );
id = ( id === 'most_recently_used' )?( id ):( this.api.ensureId( id ) );
deleted = deleted || false;
@@ -224,7 +242,7 @@
};
HDAAPI.prototype.create = function create( historyId, payload ){
- this.api.spaceghost.info( 'history.create: ' + this.api.spaceghost.jsonStr( payload ) );
+ this.api.spaceghost.info( 'hda.create: ' + this.api.spaceghost.jsonStr( payload ) );
// py.payload <-> ajax.data
payload = this.api.ensureObject( payload );
@@ -235,7 +253,8 @@
};
HDAAPI.prototype.update = function create( historyId, id, payload ){
- this.api.spaceghost.info( 'history.update: ' + this.api.spaceghost.jsonStr( payload ) );
+ this.api.spaceghost.info( 'hda.update: ' + historyId + ',' + id + ','
+ + this.api.spaceghost.jsonStr( payload ) );
// py.payload <-> ajax.data
historyId = this.api.ensureId( historyId );
diff -r 7c59121055516595937b83d51eaf98b60723b622 -r fb28ceb83c379e1d792f622f9b2cbc8c3e050f37 test/casperjs/spaceghost.js
--- a/test/casperjs/spaceghost.js
+++ b/test/casperjs/spaceghost.js
@@ -545,9 +545,10 @@
* NOTE: uses string indexOf - doesn't play well with urls like [ 'history', 'history/bler' ]
* @param {String} urlToWaitFor the url to wait for (rel. to spaceghost.baseUrl)
* @param {Function} then the function to call after the nav request
+ * @param {Function} timeoutFn the function to call on timeout (optional)
*/
-SpaceGhost.prototype.waitForNavigation = function waitForNavigation( urlToWaitFor, then ){
- return this.waitForMultipleNavigation( [ urlToWaitFor ], then );
+SpaceGhost.prototype.waitForNavigation = function waitForNavigation( urlToWaitFor, then, timeoutFn ){
+ return this.waitForMultipleNavigation( [ urlToWaitFor ], then, timeoutFn );
};
/** Wait for a multiple navigation requests then call a function.
@@ -555,8 +556,9 @@
* NOTE: uses string indexOf - doesn't play well with urls like [ 'history', 'history/bler' ]
* @param {String[]} urlsToWaitFor the relative urls to wait for
* @param {Function} then the function to call after the nav request
+ * @param {Function} timeoutFn the function to call on timeout (optional)
*/
-SpaceGhost.prototype.waitForMultipleNavigation = function waitForMultipleNavigation( urlsToWaitFor, then ){
+SpaceGhost.prototype.waitForMultipleNavigation = function waitForMultipleNavigation( urlsToWaitFor, then, timeoutFn ){
this.info( 'waiting for navigation: ' + this.jsonStr( urlsToWaitFor ) );
function urlMatches( urlToMatch, url ){
return ( url.indexOf( spaceghost.baseUrl + '/' + urlToMatch ) !== -1 );
@@ -586,6 +588,9 @@
function callThen(){
if( utils.isFunction( then ) ){ then.call( this ); }
},
+ function timeout(){
+ if( utils.isFunction( timeoutFn ) ){ timeoutFn.call( this ); }
+ },
this.options.waitTimeout * urlsToWaitFor.length
);
return this;
@@ -731,8 +736,9 @@
/** Casper has an (undocumented?) skip test feature. This is a conv. wrapper for that.
*/
-SpaceGhost.prototype.skipTest = function skipTest(){
- throw this.test.SKIP_MESSAGE;
+SpaceGhost.prototype.skipTest = function skipTest( msg ){
+ this.warn( 'Skipping test. ' + msg );
+ //throw this.test.SKIP_MESSAGE;
};
/** Test helper - within frame, assert selector, and assert text in selector
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: inithello: Functional tests to verify correct setting of metadata when dependency definitions are deleted.
by commits-noreply@bitbucket.org 23 Apr '13
by commits-noreply@bitbucket.org 23 Apr '13
23 Apr '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/7c5912105551/
Changeset: 7c5912105551
User: inithello
Date: 2013-04-23 20:46:53
Summary: Functional tests to verify correct setting of metadata when dependency definitions are deleted.
Affected #: 1 file
diff -r 1e3d92ec22b2794bfa2294edb74695c580791406 -r 7c59121055516595937b83d51eaf98b60723b622 test/tool_shed/functional/test_0440_deleting_dependency_definitions.py
--- /dev/null
+++ b/test/tool_shed/functional/test_0440_deleting_dependency_definitions.py
@@ -0,0 +1,365 @@
+from tool_shed.base.twilltestcase import ShedTwillTestCase, common, os
+import tool_shed.base.test_db_util as test_db_util
+
+column_repository_name = 'column_maker_0440'
+column_repository_description = "Add column"
+column_repository_long_description = "Compute an expression on every row"
+
+convert_repository_name = 'convert_chars_0440'
+convert_repository_description = "Convert delimiters"
+convert_repository_long_description = "Convert delimiters to tab"
+
+bwa_package_repository_name = 'bwa_package_0440'
+bwa_package_repository_description = "BWA Package Repository"
+bwa_package_repository_long_description = "BWA repository with a package tool dependency defined for BWA 0.5.9."
+
+bwa_base_repository_name = 'bwa_base_0440'
+bwa_base_repository_description = "BWA Base"
+bwa_base_repository_long_description = "NT space mapping with BWA"
+
+bwa_tool_dependency_repository_name = 'bwa_tool_dependency_0440'
+bwa_tool_dependency_repository_description = "BWA Base"
+bwa_tool_dependency_repository_long_description = "NT space mapping with BWA"
+
+'''
+Simple repository dependencies:
+1. Create and populate column_maker_0440 so that it has an installable revision 0.
+2. Create and populate convert_chars_0440 so that it has an installable revision 0.
+3. Add a valid simple repository_dependencies.xml to convert_chars_0440 that points to the installable revision of column_maker_0440.
+4. Make sure the installable revision of convert_chars_0440 is now revision 1 instead of revision 0.
+5. Delete repository_dependencies.xml from convert_chars_0440, and make sure convert_chars_0440 now has two installable revisions: 1 and 2
+
+Complex repository dependencies:
+1. Create and populate bwa_package_0440 so that it has a valid orphan tool dependency definition and an installable revision 0.
+2. Create and populate bwa_base_0440 so that it has an installable revision 0.
+3. Add a valid complex repository dependency tool_dependencies.xml to bwa_base_0440 that points to the installable revision 0 of bwa_package_0440.
+4. Make sure that bwa_base_0440 installable revision is now revision 1 instead of revision 0.
+5. Delete tool_dependencies.xml from bwa_base_0440, and make sure bwa_base_0440 now has two installable revisions: 1 and 2
+
+Tool dependencies:
+1. Create and populate bwa_tool_dependency_0440 so that it has a valid orphan tool dependency definition and an installable revision 0.
+2. Delete tool_dependencies.xml from bwa_tool_dependency_0440, and make sure that bwa_tool_dependency_0440 still has
+ a single installable revision 0.
+3. Add the same tool_dependencies.xml file to bwa_tool_dependency_0440, and make sure that bwa_tool_dependency_0440
+ still has a single installable revision 0.
+'''
+
+
+class TestDeletedDependencies( ShedTwillTestCase ):
+ '''Test metadata setting when dependency definitions are deleted.'''
+
+ def test_0000_initiate_users( self ):
+ """Create necessary user accounts and login as an admin user."""
+ """
+ Create all the user accounts that are needed for this test script to run independently of other tests.
+ Previously created accounts will not be re-created.
+ """
+ self.logout()
+ self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+ test_user_1 = test_db_util.get_user( common.test_user_1_email )
+ assert test_user_1 is not None, 'Problem retrieving user with email %s from the database' % common.test_user_1_email
+ test_user_1_private_role = test_db_util.get_private_role( test_user_1 )
+ self.logout()
+ self.login( email=common.admin_email, username=common.admin_username )
+ admin_user = test_db_util.get_user( common.admin_email )
+ assert admin_user is not None, 'Problem retrieving user with email %s from the database' % common.admin_email
+ admin_user_private_role = test_db_util.get_private_role( admin_user )
+
+ def test_0005_create_column_maker_repository( self ):
+ '''Create and populate a repository named column_maker_0440.'''
+ '''
+ We are at simple repository dependencies, step 1 - Create and populate column_maker_0440 so that it has an installable revision 0.
+ '''
+ category = self.create_category( name='Test 0440 Deleted Dependency Definitions',
+ description='Description of Deleted Dependency Definitions category for test 0440' )
+ self.logout()
+ self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+ strings_displayed = [ "Repository 'column_maker_0440' has been created" ]
+ repository = self.get_or_create_repository( name=column_repository_name,
+ description=column_repository_description,
+ long_description=column_repository_long_description,
+ owner=common.test_user_1_name,
+ category_id=self.security.encode_id( category.id ),
+ strings_displayed=strings_displayed )
+ self.upload_file( repository,
+ filename='column_maker/column_maker.tar',
+ filepath=None,
+ valid_tools_only=True,
+ uncompress_file=True,
+ remove_repo_files_not_in_tar=False,
+ commit_message='Uploaded column maker tool tarball.',
+ strings_displayed=[],
+ strings_not_displayed=[] )
+
+ def test_0010_create_convert_chars_repository( self ):
+ '''Create and populate a repository named convert_chars_0440.'''
+ '''
+ We are at simple repository dependencies, step 2 - Create and populate convert_chars_0440 so that it has an installable revision 0.
+ '''
+ category = test_db_util.get_category_by_name( 'Test 0440 Deleted Dependency Definitions' )
+ self.logout()
+ self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+ strings_displayed = [ "Repository 'convert_chars_0440' has been created" ]
+ repository = self.get_or_create_repository( name=convert_repository_name,
+ description=convert_repository_description,
+ long_description=convert_repository_long_description,
+ owner=common.test_user_1_name,
+ category_id=self.security.encode_id( category.id ),
+ strings_displayed=strings_displayed )
+ self.upload_file( repository,
+ filename='convert_chars/convert_chars.tar',
+ filepath=None,
+ valid_tools_only=True,
+ uncompress_file=True,
+ remove_repo_files_not_in_tar=False,
+ commit_message='Uploaded convert chars tool tarball.',
+ strings_displayed=[],
+ strings_not_displayed=[] )
+
+ def test_0015_create_dependency_on_convert_chars( self ):
+ '''Create a dependency definition file that specifies column_maker_0440 and upload it to convert_chars_0440.'''
+ '''
+ We are at simple repository dependencies, step 3 - Add a valid simple repository_dependencies.xml to
+ convert_chars_0440 that points to the installable revision of column_maker_0440.
+ '''
+ convert_repository = test_db_util.get_repository_by_name_and_owner( convert_repository_name, common.test_user_1_name )
+ column_repository = test_db_util.get_repository_by_name_and_owner( column_repository_name, common.test_user_1_name )
+ dependency_xml_path = self.generate_temp_path( 'test_0440', additional_paths=[ 'dependencies' ] )
+ column_tuple = ( self.url, column_repository.name, column_repository.user.username, self.get_repository_tip( column_repository ) )
+ # After this, convert_chars_0440 should depend on column_maker_0440.
+ self.create_repository_dependency( repository=convert_repository,
+ repository_tuples=[ column_tuple ],
+ filepath=dependency_xml_path,
+ prior_installation_required=True )
+ self.check_repository_dependency( convert_repository, column_repository )
+
+ def test_0020_verify_dependency_metadata( self ):
+ '''Verify that uploading the dependency moved metadata to the tip.'''
+ '''
+ We are at simple repository dependencies, step 4 - Make sure the installable revision of convert_chars_0440 is now
+ revision 1 (the tip) instead of revision 0.
+ '''
+ repository = test_db_util.get_repository_by_name_and_owner( convert_repository_name, common.test_user_1_name )
+ tip = self.get_repository_tip( repository )
+ metadata_record = self.get_repository_metadata_by_changeset_revision( repository, tip )
+ # Make sure that the new tip is now downloadable, and that there are no other downloadable revisions.
+ assert metadata_record.downloadable, 'Tip is not downloadable.'
+ assert len( repository.downloadable_revisions ) == 1, 'Repository %s has %d downloadable revisions, expected 1.' % \
+ ( repository.name, len( repository.downloadable_revisions ) )
+
+ def test_0025_delete_repository_dependency( self ):
+ '''Delete the repository_dependencies.xml from convert_chars_0440.'''
+ '''
+ We are at simple repository dependencies, steps 5 and 6 - Delete repository_dependencies.xml from convert_chars_0440.
+ Make sure convert_chars_0440 now has two installable revisions: 1 and 2
+ '''
+ repository = test_db_util.get_repository_by_name_and_owner( convert_repository_name, common.test_user_1_name )
+ # Record the current tip, so we can verify that it's still a downloadable revision after repository_dependencies.xml
+ # is deleted and a new downloadable revision is created.
+ old_changeset_revision = self.get_repository_tip( repository )
+ self.delete_files_from_repository( repository, filenames=[ 'repository_dependencies.xml' ] )
+ new_changeset_revision = self.get_repository_tip( repository )
+ # Check that the old changeset revision is still downloadable.
+ metadata_record = self.get_repository_metadata_by_changeset_revision( repository, old_changeset_revision )
+ assert metadata_record.downloadable, 'The revision of %s that contains repository_dependencies.xml is no longer downloadable.' % \
+ repository.name
+ # Check that the new tip is also downloadable.
+ metadata_record = self.get_repository_metadata_by_changeset_revision( repository, new_changeset_revision )
+ assert metadata_record.downloadable, 'The revision of %s that does not contain repository_dependencies.xml is not downloadable.' % \
+ repository.name
+ # Explicitly reload the repository instance from the database, to avoid potential caching issues.
+ test_db_util.refresh( repository )
+ # Verify that there are only two downloadable revisions.
+ assert len( repository.downloadable_revisions ) == 2, 'Repository %s has %d downloadable revisions, expected 2.' % \
+ ( repository.name, len( repository.downloadable_revisions ) )
+
+ def test_0030_create_bwa_package_repository( self ):
+ '''Create and populate the bwa_package_0440 repository.'''
+ '''
+ We are at complex repository dependencies, step 1 - Create and populate bwa_package_0440 so that it has a valid orphan
+ tool dependency definition and an installable revision 0.
+ '''
+ category = test_db_util.get_category_by_name( 'Test 0440 Deleted Dependency Definitions' )
+ self.logout()
+ self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+ strings_displayed = [ "Repository 'bwa_package_0440' has been created" ]
+ repository = self.get_or_create_repository( name=bwa_package_repository_name,
+ description=bwa_package_repository_description,
+ long_description=bwa_package_repository_long_description,
+ owner=common.test_user_1_name,
+ category_id=self.security.encode_id( category.id ),
+ strings_displayed=strings_displayed )
+ self.upload_file( repository,
+ filename='bwa/complex/tool_dependencies.xml',
+ filepath=None,
+ valid_tools_only=True,
+ uncompress_file=True,
+ remove_repo_files_not_in_tar=False,
+ commit_message='Uploaded package tool dependency definition.',
+ strings_displayed=[],
+ strings_not_displayed=[] )
+
+ def test_0035_create_bwa_base_repository( self ):
+ '''Create and populate the bwa_base_0440 repository.'''
+ '''
+ We are at complex repository dependencies, step 2 - Create and populate bwa_base_0440 so that it has an installable revision 0.
+ This repository should contain a tool with a defined dependency that will be satisfied by the tool dependency defined in bwa_package_0440.
+ '''
+ category = test_db_util.get_category_by_name( 'Test 0440 Deleted Dependency Definitions' )
+ self.logout()
+ self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+ strings_displayed = [ "Repository 'bwa_base_0440' has been created" ]
+ repository = self.get_or_create_repository( name=bwa_base_repository_name,
+ description=bwa_base_repository_description,
+ long_description=bwa_base_repository_long_description,
+ owner=common.test_user_1_name,
+ category_id=self.security.encode_id( category.id ),
+ strings_displayed=strings_displayed )
+ self.upload_file( repository,
+ filename='bwa/complex/bwa_base.tar',
+ filepath=None,
+ valid_tools_only=True,
+ uncompress_file=True,
+ remove_repo_files_not_in_tar=False,
+ commit_message='Uploaded BWA nucleotide space mapping tool tarball.',
+ strings_displayed=[],
+ strings_not_displayed=[] )
+
+ def test_0040_create_dependency_on_bwa_package_repository( self ):
+ '''Create a complex repository dependency on bwa_package_0440 and upload it to bwa_tool_0440.'''
+ '''
+ We are at complex repository dependencies, step 3 - Add a valid complex repository dependency tool_dependencies.xml to
+ bwa_base_0440 that points to the installable revision 0 of bwa_package_0440.
+ '''
+ bwa_package_repository = test_db_util.get_repository_by_name_and_owner( bwa_package_repository_name, common.test_user_1_name )
+ bwa_base_repository = test_db_util.get_repository_by_name_and_owner( bwa_base_repository_name, common.test_user_1_name )
+ dependency_path = self.generate_temp_path( 'test_0440', additional_paths=[ 'complex' ] )
+ changeset_revision = self.get_repository_tip( bwa_package_repository )
+ bwa_tuple = ( self.url, bwa_package_repository.name, bwa_package_repository.user.username, changeset_revision )
+ self.create_repository_dependency( repository=bwa_base_repository,
+ repository_tuples=[ bwa_tuple ],
+ filepath=dependency_path,
+ prior_installation_required=True,
+ complex=True,
+ package='bwa',
+ version='0.5.9' )
+
+ def test_0045_verify_dependency_metadata( self ):
+ '''Verify that uploading the dependency moved metadata to the tip.'''
+ '''
+ We are at complex repository dependencies, step 4 - Make sure that bwa_base_0440 installable revision is now revision 1
+ instead of revision 0.
+ '''
+ repository = test_db_util.get_repository_by_name_and_owner( bwa_base_repository_name, common.test_user_1_name )
+ tip = self.get_repository_tip( repository )
+ metadata_record = self.get_repository_metadata_by_changeset_revision( repository, tip )
+ # Make sure that the new tip is now downloadable, and that there are no other downloadable revisions.
+ assert metadata_record.downloadable, 'Tip is not downloadable.'
+ assert len( repository.downloadable_revisions ) == 1, 'Repository %s has %d downloadable revisions, expected 1.' % \
+ ( repository.name, len( repository.downloadable_revisions ) )
+
+ def test_0050_delete_complex_repository_dependency( self ):
+ '''Delete the tool_dependencies.xml from bwa_base_0440.'''
+ '''
+ We are at complex repository dependencies, step 5 - Delete tool_dependencies.xml from bwa_base_0440,
+ and make sure bwa_base_0440 now has two installable revisions: 1 and 2
+ '''
+ repository = test_db_util.get_repository_by_name_and_owner( bwa_base_repository_name, common.test_user_1_name )
+ # Record the current tip, so we can verify that it's still a downloadable revision after tool_dependencies.xml
+ # is deleted and a new downloadable revision is created.
+ old_changeset_revision = self.get_repository_tip( repository )
+ self.delete_files_from_repository( repository, filenames=[ 'tool_dependencies.xml' ] )
+ new_changeset_revision = self.get_repository_tip( repository )
+ # Check that the old changeset revision is still downloadable.
+ metadata_record = self.get_repository_metadata_by_changeset_revision( repository, old_changeset_revision )
+ assert metadata_record.downloadable, 'The revision of %s that contains tool_dependencies.xml is no longer downloadable.' % \
+ repository.name
+ # Check that the new tip is also downloadable.
+ metadata_record = self.get_repository_metadata_by_changeset_revision( repository, new_changeset_revision )
+ assert metadata_record.downloadable, 'The revision of %s that does not contain tool_dependencies.xml is not downloadable.' % \
+ repository.name
+ # Verify that there are only two downloadable revisions.
+ assert len( repository.downloadable_revisions ) == 2, 'Repository %s has %d downloadable revisions, expected 2.' % \
+ ( repository.name, len( repository.downloadable_revisions ) )
+
+ def test_0055_create_bwa_tool_dependency_repository( self ):
+ '''Create and populate the bwa_tool_dependency_0440 repository.'''
+ '''
+ We are at tool dependencies, step 1 - Create and populate bwa_tool_dependency_0440 so that it has a valid orphan tool
+ dependency definition and an installable revision 0.
+ '''
+ category = test_db_util.get_category_by_name( 'Test 0440 Deleted Dependency Definitions' )
+ self.logout()
+ self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+ strings_displayed = [ "Repository 'bwa_tool_dependency_0440' has been created" ]
+ repository = self.get_or_create_repository( name=bwa_tool_dependency_repository_name,
+ description=bwa_tool_dependency_repository_description,
+ long_description=bwa_tool_dependency_repository_long_description,
+ owner=common.test_user_1_name,
+ category_id=self.security.encode_id( category.id ),
+ strings_displayed=strings_displayed )
+ self.upload_file( repository,
+ filename='bwa/complex/tool_dependencies.xml',
+ filepath=None,
+ valid_tools_only=True,
+ uncompress_file=True,
+ remove_repo_files_not_in_tar=False,
+ commit_message='Uploaded package tool dependency definition.',
+ strings_displayed=[],
+ strings_not_displayed=[] )
+
+ def test_0060_delete_bwa_tool_dependency_definition( self ):
+ '''Delete the tool_dependencies.xml file from bwa_tool_dependency_0440.'''
+ '''
+ We are at tool dependencies, step 2 - Delete tool_dependencies.xml from bwa_tool_dependency_0440.
+ Make sure bwa_tool_dependency_0440 still has a downloadable changeset revision at the old tip.
+ '''
+ repository = test_db_util.get_repository_by_name_and_owner( bwa_tool_dependency_repository_name, common.test_user_1_name )
+ # Record the current tip, so we can verify that it's still a downloadable revision after repository_dependencies.xml
+ # is deleted and a new downloadable revision is created.
+ old_changeset_revision = self.get_repository_tip( repository )
+ self.delete_files_from_repository( repository, filenames=[ 'tool_dependencies.xml' ] )
+ new_changeset_revision = self.get_repository_tip( repository )
+ # Check that the old changeset revision is still downloadable.
+ metadata_record = self.get_repository_metadata_by_changeset_revision( repository, old_changeset_revision )
+ assert metadata_record.downloadable, 'The revision of %s that contains tool_dependencies.xml is no longer downloadable.' % \
+ repository.name
+ # Check that the new tip does not have a metadata revision.
+ metadata_record = self.get_repository_metadata_by_changeset_revision( repository, new_changeset_revision )
+ # If a changeset revision does not have metadata, the above method will return None.
+ assert metadata_record is None, 'The tip revision of %s should not have metadata, but metadata was found.' % repository.name
+ # Verify that the new changeset revision is not downloadable.
+ assert len( repository.downloadable_revisions ) == 1, 'Repository %s has %d downloadable revisions, expected 1.' % \
+ ( repository.name, len( repository.downloadable_revisions ) )
+
+ def test_0065_reupload_bwa_tool_dependency_definition( self ):
+ '''Reupload the tool_dependencies.xml file to bwa_tool_dependency_0440.'''
+ '''
+ We are at tool dependencies, step 3 - Add the same tool_dependencies.xml file to bwa_tool_dependency_0440, and make sure
+ that bwa_tool_dependency_0440 still has a single installable revision 0.
+ '''
+ repository = test_db_util.get_repository_by_name_and_owner( bwa_tool_dependency_repository_name, common.test_user_1_name )
+ # Record the current tip, so we can verify that it's still not a downloadable revision after tool_dependencies.xml
+ # is re-uploaded and a new downloadable revision is created.
+ old_changeset_revision = self.get_repository_tip( repository )
+ self.upload_file( repository,
+ filename='bwa/complex/tool_dependencies.xml',
+ filepath=None,
+ valid_tools_only=True,
+ uncompress_file=True,
+ remove_repo_files_not_in_tar=False,
+ commit_message='Uploaded package tool dependency definition.',
+ strings_displayed=[],
+ strings_not_displayed=[] )
+ new_changeset_revision = self.get_repository_tip( repository )
+ # Check that the old changeset revision is still downloadable.
+ metadata_record = self.get_repository_metadata_by_changeset_revision( repository, old_changeset_revision )
+ assert metadata_record is None, 'The revision of %s that does not contain tool_dependencies.xml should not be downloadable, but is.' % \
+ repository.name
+ # Check that the new tip is also downloadable.
+ metadata_record = self.get_repository_metadata_by_changeset_revision( repository, new_changeset_revision )
+ assert metadata_record.downloadable, 'The revision of %s that contains tool_dependencies.xml is not downloadable.' % \
+ repository.name
+ # Verify that there are only two downloadable revisions.
+ assert len( repository.downloadable_revisions ) == 1, 'Repository %s has %d downloadable revisions, expected 1.' % \
+ ( repository.name, len( repository.downloadable_revisions ) )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: dan: Have data source tools display the provided 'name' parameter as the initial dataset name.
by commits-noreply@bitbucket.org 22 Apr '13
by commits-noreply@bitbucket.org 22 Apr '13
22 Apr '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/1e3d92ec22b2/
Changeset: 1e3d92ec22b2
User: dan
Date: 2013-04-22 22:58:49
Summary: Have data source tools display the provided 'name' parameter as the initial dataset name.
Affected #: 3 files
diff -r ea0b7ca55aec1552718132227d80b0ae82ca2913 -r 1e3d92ec22b2794bfa2294edb74695c580791406 lib/galaxy/tools/__init__.py
--- a/lib/galaxy/tools/__init__.py
+++ b/lib/galaxy/tools/__init__.py
@@ -36,6 +36,7 @@
from galaxy.datatypes.metadata import JobExternalOutputMetadataWrapper
from galaxy.jobs import ParallelismInfo
from galaxy.tools.actions import DefaultToolAction
+from galaxy.tools.actions.data_source import DataSourceToolAction
from galaxy.tools.actions.data_manager import DataManagerToolAction
from galaxy.tools.deps import DependencyManager
from galaxy.tools.parameters import check_param, params_from_strings, params_to_strings
@@ -3028,6 +3029,7 @@
allow the user to query and extract data from another web site.
"""
tool_type = 'data_source'
+ default_tool_action = DataSourceToolAction
def _build_GALAXY_URL_parameter( self ):
return ToolParameter.build( self, ElementTree.XML( '<param name="GALAXY_URL" type="baseurl" value="/tool_runner?tool_id=%s" />' % self.id ) )
diff -r ea0b7ca55aec1552718132227d80b0ae82ca2913 -r 1e3d92ec22b2794bfa2294edb74695c580791406 lib/galaxy/tools/actions/__init__.py
--- a/lib/galaxy/tools/actions/__init__.py
+++ b/lib/galaxy/tools/actions/__init__.py
@@ -342,9 +342,10 @@
params['on_string'] = on_text
data.name = fill_template( output.label, context=params )
else:
- data.name = tool.name
- if on_text:
- data.name += ( " on " + on_text )
+ if params is None:
+ params = make_dict_copy( incoming )
+ wrap_values( tool.inputs, params, skip_missing_values = not tool.check_values )
+ data.name = self._get_default_data_name( data, tool, on_text=on_text, trans=trans, incoming=incoming, history=history, params=params, job_params=job_params )
# Store output
out_data[ name ] = data
if output.actions:
@@ -430,3 +431,9 @@
trans.app.job_queue.put( job.id, job.tool_id )
trans.log_event( "Added job to the job queue, id: %s" % str(job.id), tool_id=job.tool_id )
return job, out_data
+
+ def _get_default_data_name( self, dataset, tool, on_text=None, trans=None, incoming=None, history=None, params=None, job_params=None, **kwd ):
+ name = tool.name
+ if on_text:
+ name += ( " on " + on_text )
+ return name
diff -r ea0b7ca55aec1552718132227d80b0ae82ca2913 -r 1e3d92ec22b2794bfa2294edb74695c580791406 lib/galaxy/tools/actions/data_source.py
--- /dev/null
+++ b/lib/galaxy/tools/actions/data_source.py
@@ -0,0 +1,12 @@
+from __init__ import DefaultToolAction
+
+import logging
+log = logging.getLogger( __name__ )
+
+class DataSourceToolAction( DefaultToolAction ):
+ """Tool action used for Data Source Tools"""
+
+ def _get_default_data_name( self, dataset, tool, on_text=None, trans=None, incoming=None, history=None, params=None, job_params=None, **kwd ):
+ if incoming and 'name' in incoming:
+ return incoming[ 'name' ]
+ return super( DataSourceToolAction, self )._get_default_data_name( dataset, tool, on_text=on_text, trans=trans, incoming=incoming, history=history, params=params, job_params=job_params )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: greg: Raise exceptions when tool dependency definition <install> and <set_environment> tags define an unsupported version attribute.
by commits-noreply@bitbucket.org 22 Apr '13
by commits-noreply@bitbucket.org 22 Apr '13
22 Apr '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/ea0b7ca55aec/
Changeset: ea0b7ca55aec
User: greg
Date: 2013-04-22 20:51:48
Summary: Raise exceptions when tool dependency definition <install> and <set_environment> tags define an unsupported version attribute.
Affected #: 1 file
diff -r 42632cc2a166e8933fe29496a6087309769b8987 -r ea0b7ca55aec1552718132227d80b0ae82ca2913 lib/tool_shed/galaxy_install/tool_dependencies/install_util.py
--- a/lib/tool_shed/galaxy_install/tool_dependencies/install_util.py
+++ b/lib/tool_shed/galaxy_install/tool_dependencies/install_util.py
@@ -151,6 +151,8 @@
action_dict[ env_elem.tag ] = env_var_dicts
actions.append( ( action_type, action_dict ) )
return tool_dependency, actions
+ else:
+ raise NotImplementedError( 'Only install version 1.0 is currently supported (i.e., change your tag to be <install version="1.0">).' )
return None, actions
def install_and_build_package_via_fabric( app, tool_dependency, actions_dict ):
@@ -299,6 +301,8 @@
sa_session.refresh( tool_dependency )
if tool_dependency.status != app.model.ToolDependency.installation_status.ERROR:
print package_name, 'version', package_version, 'installed in', install_dir
+ else:
+ raise NotImplementedError( 'Only install version 1.0 is currently supported (i.e., change your tag to be <install version="1.0">).' )
elif package_elem.tag == 'readme':
# Nothing to be done.
continue
@@ -550,6 +554,8 @@
sa_session.add( tool_dependency )
sa_session.flush()
print 'Environment variable ', env_var_name, 'set in', install_dir
+ else:
+ raise NotImplementedError( 'Only set_environment version 1.0 is currently supported (i.e., change your tag to be <set_environment version="1.0">).' )
def strip_path( fpath ):
if not fpath:
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0