galaxy-commits
Threads by month
- ----- 2024 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2023 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2022 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2021 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2020 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2019 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2018 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2017 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2016 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2015 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2014 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2013 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2012 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2011 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2010 -----
- December
- November
- October
- September
- August
- July
- June
- May
October 2013
- 1 participants
- 226 discussions
commit/galaxy-central: greg: Enhance the Tool Shed API feature supporting resetting metadata on certain repositories to allow for individual repositories per request.
by commits-noreply@bitbucket.org 16 Oct '13
by commits-noreply@bitbucket.org 16 Oct '13
16 Oct '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/b681b2aeddbc/
Changeset: b681b2aeddbc
User: greg
Date: 2013-10-16 22:29:00
Summary: Enhance the Tool Shed API feature supporting resetting metadata on certain repositories to allow for individual repositories per request.
Affected #: 3 files
diff -r 2a4f4be3c377de638ee280b7a3ae83c62af6038a -r b681b2aeddbc52c3d953abcb431688df6fb8a005 lib/galaxy/webapps/tool_shed/api/repositories.py
--- a/lib/galaxy/webapps/tool_shed/api/repositories.py
+++ b/lib/galaxy/webapps/tool_shed/api/repositories.py
@@ -177,6 +177,42 @@
return message
@web.expose_api
+ def repository_ids_for_setting_metadata( self, trans, my_writable=False, **kwd ):
+ """
+ GET /api/get_repository_ids_for_setting_metadata
+
+ Displays a collection (list) of repository ids ordered for setting metadata.
+
+ :param key: the API key of the Tool Shed user.
+ :param my_writable (optional): if the API key is associated with an admin user in the Tool Shed, setting this param value
+ to True will restrict resetting metadata to only repositories that are writable by the user
+ in addition to those repositories of type tool_dependency_definition. This param is ignored
+ if the current user is not an admin user, in which case this same restriction is automatic.
+ """
+ try:
+ if trans.user_is_admin():
+ my_writable = util.asbool( my_writable )
+ else:
+ my_writable = True
+ handled_repository_ids = []
+ repository_ids = []
+ query = suc.get_query_for_setting_metadata_on_repositories( trans, my_writable=my_writable, order=False )
+ # Make sure repositories of type tool_dependency_definition are first in the list.
+ for repository in query:
+ if repository.type == rt_util.TOOL_DEPENDENCY_DEFINITION and repository.id not in handled_repository_ids:
+ repository_ids.append( trans.security.encode_id( repository.id ) )
+ # Now add all remaining repositories to the list.
+ for repository in query:
+ if repository.type != rt_util.TOOL_DEPENDENCY_DEFINITION and repository.id not in handled_repository_ids:
+ repository_ids.append( trans.security.encode_id( repository.id ) )
+ return repository_ids
+ except Exception, e:
+ message = "Error in the Tool Shed repositories API in repository_ids_for_setting_metadata: %s" % str( e )
+ log.error( message, exc_info=True )
+ trans.response.status = 500
+ return message
+
+ @web.expose_api
def reset_metadata_on_repositories( self, trans, payload, **kwd ):
"""
PUT /api/repositories/reset_metadata_on_repositories
@@ -239,6 +275,48 @@
trans.response.status = 500
return message
+ @web.expose_api
+ def reset_metadata_on_repository( self, trans, payload, **kwd ):
+ """
+ PUT /api/repositories/reset_metadata_on_repository
+
+ Resets all metadata on a specified repository in the Tool Shed.
+
+ :param key: the API key of the Tool Shed user.
+
+ The following parameters must be included in the payload.
+ :param repository_id: the encoded id of the repository on which metadata is to be reset.
+ """
+ def handle_repository( trans, start_time, repository ):
+ results = dict( start_time=start_time,
+ repository_status=[] )
+ try:
+ invalid_file_tups, metadata_dict = metadata_util.reset_all_metadata_on_repository_in_tool_shed( trans,
+ trans.security.encode_id( repository.id ) )
+ if invalid_file_tups:
+ message = tool_util.generate_message_for_invalid_tools( trans, invalid_file_tups, repository, None, as_html=False )
+ else:
+ message = "Successfully reset metadata on repository %s" % str( repository.name )
+ except Exception, e:
+ message = "Error resetting metadata on repository %s: %s" % ( str( repository.name ), str( e ) )
+
+ results[ 'repository_status' ].append( message )
+ return results
+ try:
+ repository_id = payload.get( 'repository_id', None )
+ if repository_id is not None:
+ repository = suc.get_repository_in_tool_shed( trans, repository_id )
+ start_time = strftime( "%Y-%m-%d %H:%M:%S" )
+ results = handle_repository( trans, start_time, repository )
+ stop_time = strftime( "%Y-%m-%d %H:%M:%S" )
+ results[ 'stop_time' ] = stop_time
+ return json.to_json_string( results, sort_keys=True, indent=4 * ' ' )
+ except Exception, e:
+ message = "Error in the Tool Shed repositories API in reset_metadata_on_repositories: %s" % str( e )
+ log.error( message, exc_info=True )
+ trans.response.status = 500
+ return message
+
@web.expose_api_anonymous
def show( self, trans, id, **kwd ):
"""
diff -r 2a4f4be3c377de638ee280b7a3ae83c62af6038a -r b681b2aeddbc52c3d953abcb431688df6fb8a005 lib/galaxy/webapps/tool_shed/buildapp.py
--- a/lib/galaxy/webapps/tool_shed/buildapp.py
+++ b/lib/galaxy/webapps/tool_shed/buildapp.py
@@ -85,7 +85,9 @@
controller='repositories',
collection={ 'get_repository_revision_install_info' : 'GET',
'get_ordered_installable_revisions' : 'GET',
- 'reset_metadata_on_repositories' : 'POST' },
+ 'repository_ids_for_setting_metadata' : 'GET',
+ 'reset_metadata_on_repositories' : 'POST',
+ 'reset_metadata_on_repository' : 'POST' },
name_prefix='repository_',
path_prefix='/api',
parent_resources=dict( member_name='repository', collection_name='repositories' ) )
diff -r 2a4f4be3c377de638ee280b7a3ae83c62af6038a -r b681b2aeddbc52c3d953abcb431688df6fb8a005 lib/tool_shed/scripts/api/reset_metadata_on_repositories.py
--- a/lib/tool_shed/scripts/api/reset_metadata_on_repositories.py
+++ b/lib/tool_shed/scripts/api/reset_metadata_on_repositories.py
@@ -16,20 +16,37 @@
import os
import sys
sys.path.insert( 0, os.path.dirname( __file__ ) )
+from common import get
from common import submit
+def string_as_bool( string ):
+ if str( string ).lower() in [ 'true' ]:
+ return True
+ else:
+ return False
+
def main( options ):
api_key = options.api
my_writable = options.my_writable
+ one_per_request = options.one_per_request
base_tool_shed_url = options.tool_shed_url.rstrip( '/' )
- data = dict( my_writable=my_writable )
- url = '%s/api/repositories/reset_metadata_on_repositories' % base_tool_shed_url
- submit( url, data, options.api )
+ if string_as_bool( one_per_request ):
+ url = '%s/api/repositories/repository_ids_for_setting_metadata?key=%s&my_writable=%s' % ( base_tool_shed_url, api_key, str( my_writable ) )
+ repository_ids = get( url, api_key )
+ for repository_id in repository_ids:
+ data = dict( repository_id=repository_id )
+ url = '%s/api/repositories/reset_metadata_on_repository' % base_tool_shed_url
+ submit( url, data, options.api )
+ else:
+ data = dict( my_writable=my_writable )
+ url = '%s/api/repositories/reset_metadata_on_repositories' % base_tool_shed_url
+ submit( url, data, options.api )
if __name__ == '__main__':
parser = argparse.ArgumentParser( description='Reset metadata on certain repositories in the Tool Shed via the Tool Shed API.' )
parser.add_argument( "-a", "--api", dest="api", required=True, help="API Key" )
parser.add_argument( "-m", "--my_writable", dest="my_writable", required=False, default='False', help="Restrict to my writable repositories" )
+ parser.add_argument( "-o", "--one_per_request", dest="one_per_request", required=False, default='False', help="One repository per request" )
parser.add_argument( "-u", "--url", dest="tool_shed_url", required=True, help="Tool Shed URL" )
options = parser.parse_args()
main( options )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: jmchilton: Allow user field (__user_email__, __user_name__, etc..) in hidden fields as outlined in PR 206, but do so in a way that centralizes some related logic (i.e. logic used by Cheetah in job templates and used by dynamic filter options).
by commits-noreply@bitbucket.org 16 Oct '13
by commits-noreply@bitbucket.org 16 Oct '13
16 Oct '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/2a4f4be3c377/
Changeset: 2a4f4be3c377
User: jmchilton
Date: 2013-09-19 21:46:45
Summary: Allow user field (__user_email__, __user_name__, etc..) in hidden fields as outlined in PR 206, but do so in a way that centralizes some related logic (i.e. logic used by Cheetah in job templates and used by dynamic filter options).
Affected #: 4 files
diff -r 64f283100f960c1ae7645fae17394334b1b7f349 -r 2a4f4be3c377de638ee280b7a3ae83c62af6038a lib/galaxy/jobs/__init__.py
--- a/lib/galaxy/jobs/__init__.py
+++ b/lib/galaxy/jobs/__init__.py
@@ -680,20 +680,8 @@
out_data[ "output_file" ] = FakeDatasetAssociation( dataset=special.dataset )
# These can be passed on the command line if wanted as $__user_*__
- if job.history and job.history.user:
- user = job.history.user
- user_id = '%d' % job.history.user.id
- user_email = str(job.history.user.email)
- user_name = str(job.history.user.username)
- else:
- user = None
- user_id = 'Anonymous'
- user_email = 'Anonymous'
- user_name = 'Anonymous'
- incoming['__user__'] = user
- incoming['__user_id__'] = incoming['userId'] = user_id
- incoming['__user_email__'] = incoming['userEmail'] = user_email
- incoming['__user_name__'] = user_name
+ incoming.update( model.User.user_template_environment( job.history and job.history.user ) )
+
# Build params, done before hook so hook can use
param_dict = self.tool.build_param_dict( incoming,
inp_data, out_data,
diff -r 64f283100f960c1ae7645fae17394334b1b7f349 -r 2a4f4be3c377de638ee280b7a3ae83c62af6038a lib/galaxy/model/__init__.py
--- a/lib/galaxy/model/__init__.py
+++ b/lib/galaxy/model/__init__.py
@@ -18,6 +18,7 @@
import simplejson
import socket
import time
+from string import Template
import galaxy.datatypes
import galaxy.datatypes.registry
@@ -159,6 +160,47 @@
total += hda.dataset.get_total_size()
return total
+ @staticmethod
+ def user_template_environment( user ):
+ """
+
+ >>> env = User.user_template_environment(None)
+ >>> env['__user_email__']
+ 'Anonymous'
+ >>> env['__user_id__']
+ 'Anonymous'
+ >>> user = User('foo(a)example.com')
+ >>> user.id = 6
+ >>> user.username = 'foo2'
+ >>> env = User.user_template_environment(user)
+ >>> env['__user_id__']
+ '6'
+ >>> env['__user_name__']
+ 'foo2'
+ """
+ if user:
+ user_id = '%d' % user.id
+ user_email = str( user.email )
+ user_name = str( user.username )
+ else:
+ user = None
+ user_id = 'Anonymous'
+ user_email = 'Anonymous'
+ user_name = 'Anonymous'
+ environment = {}
+ environment[ '__user__' ] = user
+ environment[ '__user_id__' ] = environment[ 'userId' ] = user_id
+ environment[ '__user_email__' ] = environment[ 'userEmail' ] = user_email
+ environment[ '__user_name__' ] = user_name
+ return environment
+
+ @staticmethod
+ def expand_user_properties( user, in_string ):
+ """
+ """
+ environment = User.user_template_environment( user )
+ return Template( in_string ).safe_substitute( environment )
+
class Job( object, Dictifiable ):
dict_collection_visible_keys = [ 'id' ]
diff -r 64f283100f960c1ae7645fae17394334b1b7f349 -r 2a4f4be3c377de638ee280b7a3ae83c62af6038a lib/galaxy/tools/__init__.py
--- a/lib/galaxy/tools/__init__.py
+++ b/lib/galaxy/tools/__init__.py
@@ -2291,7 +2291,7 @@
args = dict()
for key, param in self.inputs.iteritems():
if isinstance( param, HiddenToolParameter ):
- args[key] = param.value
+ args[key] = model.User.expand_user_properties( trans.user, param.value )
elif isinstance( param, BaseURLToolParameter ):
args[key] = param.get_value( trans )
else:
diff -r 64f283100f960c1ae7645fae17394334b1b7f349 -r 2a4f4be3c377de638ee280b7a3ae83c62af6038a lib/galaxy/tools/parameters/dynamic_options.py
--- a/lib/galaxy/tools/parameters/dynamic_options.py
+++ b/lib/galaxy/tools/parameters/dynamic_options.py
@@ -6,6 +6,7 @@
import operator, sys, os, logging
import basic, validation
from galaxy.util import string_as_bool
+from galaxy.model import User
import galaxy.tools
log = logging.getLogger(__name__)
@@ -55,8 +56,7 @@
rval = []
filter_value = self.value
try:
- if trans.user.email:
- filter_value = filter_value.replace('$__user_email__',trans.user.email)
+ filter_value = User.expand_user_properties( trans.user, filter_value)
except:
pass
for fields in options:
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
16 Oct '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/64f283100f96/
Changeset: 64f283100f96
User: greg
Date: 2013-10-16 20:19:26
Summary: Enhance the Tool Shed and Galaxy APIs to support resetting metadata on repositories in the Tool Shed or installed into Galaxy respectively. When resetting repository metadata in the Tool Shed, if the API key is associated with an admin user, then setting the my_writable param value to True will restrict resetting metadata to only repositories that are writable by the user in addition to those repositories of type tool_dependency_definition. This param is ignored if the current user is not an admin user, in which case this same restriction is automatic. Also, when resetting metadata in the Tool Shed, repositories of type tool_dependency_definition are reset before repositories of type unrestricted. When setting metadata for installed repositories in Galaxy, neither repository types nor order are considered since these concepts are irrelevant in that environment.
Affected #: 10 files
diff -r 00764f2c46dff45f208e22203c6498777725bba7 -r 64f283100f960c1ae7645fae17394334b1b7f349 lib/galaxy/webapps/galaxy/api/tool_shed_repositories.py
--- a/lib/galaxy/webapps/galaxy/api/tool_shed_repositories.py
+++ b/lib/galaxy/webapps/galaxy/api/tool_shed_repositories.py
@@ -1,6 +1,7 @@
import logging
from paste.httpexceptions import HTTPBadRequest, HTTPForbidden
+from time import strftime
from galaxy import util
from galaxy import web
@@ -10,6 +11,7 @@
from tool_shed.galaxy_install import repository_util
from tool_shed.util import common_util
from tool_shed.util import encoding_util
+from tool_shed.util import metadata_util
from tool_shed.util import workflow_util
import tool_shed.util.shed_util_common as suc
@@ -488,6 +490,49 @@
return tool_shed_repositories
@web.expose_api
+ def reset_metadata_on_installed_repositories( self, trans, payload, **kwd ):
+ """
+ PUT /api/tool_shed_repositories/reset_metadata_on_installed_repositories
+
+ Resets all metadata on all repositories installed into Galaxy in an "orderly fashion".
+
+ :param key: the API key of the Galaxy admin user.
+ """
+ try:
+ start_time = strftime( "%Y-%m-%d %H:%M:%S" )
+ results = dict( start_time=start_time,
+ successful_count=0,
+ unsuccessful_count=0,
+ repository_status=[] )
+ # Make sure the current user's API key proves he is an admin user in this Galaxy instance.
+ if not trans.user_is_admin():
+ raise HTTPForbidden( detail='You are not authorized to reset metadata on repositories installed into this Galaxy instance.' )
+ query = suc.get_query_for_setting_metadata_on_repositories( trans, my_writable=False, order=False )
+ # Now reset metadata on all remaining repositories.
+ for repository in query:
+ repository_id = trans.security.encode_id( repository.id )
+ try:
+ invalid_file_tups, metadata_dict = metadata_util.reset_all_metadata_on_installed_repository( trans, repository_id )
+ if invalid_file_tups:
+ message = tool_util.generate_message_for_invalid_tools( trans, invalid_file_tups, repository, None, as_html=False )
+ results[ 'unsuccessful_count' ] += 1
+ else:
+ message = "Successfully reset metadata on repository %s" % str( repository.name )
+ results[ 'successful_count' ] += 1
+ except Exception, e:
+ message = "Error resetting metadata on repository %s: %s" % ( str( repository.name ), str( e ) )
+ results[ 'unsuccessful_count' ] += 1
+ results[ 'repository_status' ].append( message )
+ stop_time = strftime( "%Y-%m-%d %H:%M:%S" )
+ results[ 'stop_time' ] = stop_time
+ return json.to_json_string( results, sort_keys=True, indent=4 * ' ' )
+ except Exception, e:
+ message = "Error in the Galaxy tool_shed_repositories API in reset_metadata_on_installed_repositories: %s" % str( e )
+ log.error( message, exc_info=True )
+ trans.response.status = 500
+ return message
+
+ @web.expose_api
def show( self, trans, id, **kwd ):
"""
GET /api/tool_shed_repositories/{encoded_tool_shed_repsository_id}
diff -r 00764f2c46dff45f208e22203c6498777725bba7 -r 64f283100f960c1ae7645fae17394334b1b7f349 lib/galaxy/webapps/galaxy/buildapp.py
--- a/lib/galaxy/webapps/galaxy/buildapp.py
+++ b/lib/galaxy/webapps/galaxy/buildapp.py
@@ -187,6 +187,7 @@
'exported_workflows' : 'GET',
'import_workflow' : 'POST',
'import_workflows' : 'POST' },
+ collection={ 'reset_metadata_on_installed_repositories' : 'POST' },
controller='tool_shed_repositories',
name_prefix='tool_shed_repository_',
path_prefix='/api',
diff -r 00764f2c46dff45f208e22203c6498777725bba7 -r 64f283100f960c1ae7645fae17394334b1b7f349 lib/galaxy/webapps/tool_shed/api/repositories.py
--- a/lib/galaxy/webapps/tool_shed/api/repositories.py
+++ b/lib/galaxy/webapps/tool_shed/api/repositories.py
@@ -1,11 +1,16 @@
import logging
+from time import strftime
from galaxy.web.framework.helpers import time_ago
from galaxy import eggs
from galaxy import web
from galaxy import util
+from galaxy.util import json
from galaxy.web.base.controller import BaseAPIController
+import tool_shed.repository_types.util as rt_util
import tool_shed.util.shed_util_common as suc
from tool_shed.galaxy_install import repository_util
+from tool_shed.util import metadata_util
+from tool_shed.util import tool_util
eggs.require( 'mercurial' )
@@ -171,6 +176,69 @@
trans.response.status = 500
return message
+ @web.expose_api
+ def reset_metadata_on_repositories( self, trans, payload, **kwd ):
+ """
+ PUT /api/repositories/reset_metadata_on_repositories
+
+ Resets all metadata on all repositories in the Tool Shed in an "orderly fashion". Since there are currently only two
+ repository types (tool_dependecy_definition and unrestricted), the order in which metadata is reset is repositories of
+ type tool_dependecy_definition first followed by repositories of type unrestricted, and only one pass is necessary. If
+ a new repository type is introduced, the process will undoubtedly need to be revisited. To facilitate this order, an
+ in-memory list of repository ids that have been processed is maintained.
+
+ :param key: the API key of the Tool Shed user.
+
+ The following parameters can optionally be included in the payload.
+ :param my_writable (optional): if the API key is associated with an admin user in the Tool Shed, setting this param value
+ to True will restrict resetting metadata to only repositories that are writable by the user
+ in addition to those repositories of type tool_dependency_definition. This param is ignored
+ if the current user is not an admin user, in which case this same restriction is automatic.
+ """
+ def handle_repository( trans, repository, results ):
+ repository_id = trans.security.encode_id( repository.id )
+ try:
+ invalid_file_tups, metadata_dict = metadata_util.reset_all_metadata_on_repository_in_tool_shed( trans, repository_id )
+ if invalid_file_tups:
+ message = tool_util.generate_message_for_invalid_tools( trans, invalid_file_tups, repository, None, as_html=False )
+ results[ 'unsuccessful_count' ] += 1
+ else:
+ message = "Successfully reset metadata on repository %s" % str( repository.name )
+ results[ 'successful_count' ] += 1
+ except Exception, e:
+ message = "Error resetting metadata on repository %s: %s" % ( str( repository.name ), str( e ) )
+ results[ 'unsuccessful_count' ] += 1
+ results[ 'repository_status' ].append( message )
+ return results
+ try:
+ start_time = strftime( "%Y-%m-%d %H:%M:%S" )
+ results = dict( start_time=start_time,
+ repository_status=[],
+ successful_count=0,
+ unsuccessful_count=0 )
+ handled_repository_ids = []
+ if trans.user_is_admin():
+ my_writable = util.asbool( payload.get( 'my_writable', False ) )
+ else:
+ my_writable = True
+ query = suc.get_query_for_setting_metadata_on_repositories( trans, my_writable=my_writable, order=False )
+ # First reset metadata on all repositories of type repository_dependency_definition.
+ for repository in query:
+ if repository.type == rt_util.TOOL_DEPENDENCY_DEFINITION and repository.id not in handled_repository_ids:
+ results = handle_repository( trans, repository, results )
+ # Now reset metadata on all remaining repositories.
+ for repository in query:
+ if repository.type != rt_util.TOOL_DEPENDENCY_DEFINITION and repository.id not in handled_repository_ids:
+ results = handle_repository( trans, repository, results )
+ stop_time = strftime( "%Y-%m-%d %H:%M:%S" )
+ results[ 'stop_time' ] = stop_time
+ return json.to_json_string( results, sort_keys=True, indent=4 * ' ' )
+ except Exception, e:
+ message = "Error in the Tool Shed repositories API in reset_metadata_on_repositories: %s" % str( e )
+ log.error( message, exc_info=True )
+ trans.response.status = 500
+ return message
+
@web.expose_api_anonymous
def show( self, trans, id, **kwd ):
"""
diff -r 00764f2c46dff45f208e22203c6498777725bba7 -r 64f283100f960c1ae7645fae17394334b1b7f349 lib/galaxy/webapps/tool_shed/buildapp.py
--- a/lib/galaxy/webapps/tool_shed/buildapp.py
+++ b/lib/galaxy/webapps/tool_shed/buildapp.py
@@ -84,7 +84,8 @@
'repositories',
controller='repositories',
collection={ 'get_repository_revision_install_info' : 'GET',
- 'get_ordered_installable_revisions' : 'GET' },
+ 'get_ordered_installable_revisions' : 'GET',
+ 'reset_metadata_on_repositories' : 'POST' },
name_prefix='repository_',
path_prefix='/api',
parent_resources=dict( member_name='repository', collection_name='repositories' ) )
diff -r 00764f2c46dff45f208e22203c6498777725bba7 -r 64f283100f960c1ae7645fae17394334b1b7f349 lib/tool_shed/scripts/api/reset_metadata_on_repositories.py
--- /dev/null
+++ b/lib/tool_shed/scripts/api/reset_metadata_on_repositories.py
@@ -0,0 +1,35 @@
+#!/usr/bin/env python
+"""
+Script to reset metadata on certain repositories in the Tool Shed. If the received API key is associated
+with an admin user in the Tool Shed, setting the my_writable param value to True will restrict resetting
+metadata to only repositories that are writable by the user in addition to those repositories of type
+tool_dependency_definition. The my_writable param is ignored if the current user is not an admin user,
+in which case this same restriction is automatic.
+
+usage: reset_metadata_on_repositories.py key <my_writable>
+
+Here is a working example of how to use this script to reset metadata on certain repositories in a specified Tool Shed.
+python ./reset_metadata_on_repositories.py -a 22be3b -m True -u http://localhost:9009/
+"""
+
+import argparse
+import os
+import sys
+sys.path.insert( 0, os.path.dirname( __file__ ) )
+from common import submit
+
+def main( options ):
+ api_key = options.api
+ my_writable = options.my_writable
+ base_tool_shed_url = options.tool_shed_url.rstrip( '/' )
+ data = dict( my_writable=my_writable )
+ url = '%s/api/repositories/reset_metadata_on_repositories' % base_tool_shed_url
+ submit( url, data, options.api )
+
+if __name__ == '__main__':
+ parser = argparse.ArgumentParser( description='Reset metadata on certain repositories in the Tool Shed via the Tool Shed API.' )
+ parser.add_argument( "-a", "--api", dest="api", required=True, help="API Key" )
+ parser.add_argument( "-m", "--my_writable", dest="my_writable", required=False, default='False', help="Restrict to my writable repositories" )
+ parser.add_argument( "-u", "--url", dest="tool_shed_url", required=True, help="Tool Shed URL" )
+ options = parser.parse_args()
+ main( options )
diff -r 00764f2c46dff45f208e22203c6498777725bba7 -r 64f283100f960c1ae7645fae17394334b1b7f349 lib/tool_shed/util/metadata_util.py
--- a/lib/tool_shed/util/metadata_util.py
+++ b/lib/tool_shed/util/metadata_util.py
@@ -1782,10 +1782,10 @@
log.debug( message )
unsuccessful_count += 1
else:
- log.debug( "Successfully reset metadata on repository %s" % repository.name )
+ log.debug( "Successfully reset metadata on repository %s" % str( repository.name ) )
successful_count += 1
except:
- log.exception( "Error attempting to reset metadata on repository %s", repository.name )
+ log.exception( "Error attempting to reset metadata on repository %s", str( repository.name ) )
unsuccessful_count += 1
message = "Successfully reset metadata on %d %s. " % ( successful_count, inflector.cond_plural( successful_count, "repository" ) )
if unsuccessful_count:
diff -r 00764f2c46dff45f208e22203c6498777725bba7 -r 64f283100f960c1ae7645fae17394334b1b7f349 lib/tool_shed/util/shed_util_common.py
--- a/lib/tool_shed/util/shed_util_common.py
+++ b/lib/tool_shed/util/shed_util_common.py
@@ -20,6 +20,7 @@
from tool_shed.util import common_util
from tool_shed.util import encoding_util
from tool_shed.util import xml_util
+import tool_shed.repository_types.util as rt_util
from xml.etree import ElementTree as XmlET
from urllib2 import HTTPError
@@ -104,45 +105,15 @@
def build_repository_ids_select_field( trans, name='repository_ids', multiple=True, display='checkboxes', my_writable=False ):
"""Method called from both Galaxy and the Tool Shed to generate the current list of repositories for resetting metadata."""
repositories_select_field = SelectField( name=name, multiple=multiple, display=display )
- if trans.webapp.name == 'tool_shed':
- # We're in the tool shed.
- if my_writable:
- username = trans.user.username
- clause_list = []
- for repository in trans.sa_session.query( trans.model.Repository ) \
- .filter( trans.model.Repository.table.c.deleted == False ):
- allow_push = repository.allow_push( trans.app )
- if allow_push:
- allow_push_usernames = allow_push.split( ',' )
- if username in allow_push_usernames:
- clause_list.append( trans.model.Repository.table.c.id == repository.id )
- if clause_list:
- for repository in trans.sa_session.query( trans.model.Repository ) \
- .filter( or_( *clause_list ) ) \
- .order_by( trans.model.Repository.table.c.name,
- trans.model.Repository.table.c.user_id ):
- owner = repository.user.username
- option_label = '%s (%s)' % ( repository.name, owner )
- option_value = '%s' % trans.security.encode_id( repository.id )
- repositories_select_field.add_option( option_label, option_value )
+ query = get_query_for_setting_metadata_on_repositories( trans, my_writable=my_writable, order=True )
+ for repository in query:
+ if trans.webapp.name == 'tool_shed':
+ owner = str( repository.user.username )
else:
- for repository in trans.sa_session.query( trans.model.Repository ) \
- .filter( trans.model.Repository.table.c.deleted == False ) \
- .order_by( trans.model.Repository.table.c.name,
- trans.model.Repository.table.c.user_id ):
- owner = repository.user.username
- option_label = '%s (%s)' % ( repository.name, owner )
- option_value = '%s' % trans.security.encode_id( repository.id )
- repositories_select_field.add_option( option_label, option_value )
- else:
- # We're in Galaxy.
- for repository in trans.sa_session.query( trans.model.ToolShedRepository ) \
- .filter( trans.model.ToolShedRepository.table.c.uninstalled == False ) \
- .order_by( trans.model.ToolShedRepository.table.c.name,
- trans.model.ToolShedRepository.table.c.owner ):
- option_label = '%s (%s)' % ( repository.name, repository.owner )
- option_value = trans.security.encode_id( repository.id )
- repositories_select_field.add_option( option_label, option_value )
+ owner = str( repository.owner )
+ option_label = '%s (%s)' % ( str( repository.name ), owner )
+ option_value = '%s' % trans.security.encode_id( repository.id )
+ repositories_select_field.add_option( option_label, option_value )
return repositories_select_field
def build_tool_dependencies_select_field( trans, tool_shed_repository, name, multiple=True, display='checkboxes', uninstalled_only=False ):
@@ -836,6 +807,63 @@
prior_import_or_install_required_dict[ encoded_repository_id ] = prior_import_or_install_ids
return prior_import_or_install_required_dict
+def get_query_for_setting_metadata_on_repositories( trans, my_writable=False, order=True ):
+ """
+ Return a query containing repositories for resetting metadata. This method is called from both the Tool Shed and Galaxy. The
+ my_writable parameter is ignored unless called from the Tool Shed, and the order parameter is used for displaying the list of
+ repositories ordered alphabetically for display on a page. When called from wither the Tool Shed or Galaxy API, order is False.
+ """
+ if trans.webapp.name == 'tool_shed':
+ # When called from the Tool Shed API, the metadata is reset on all repositories of type tool_dependency_definition in addition
+ # to other selected repositories.
+ if my_writable:
+ username = trans.user.username
+ clause_list = []
+ for repository in trans.sa_session.query( trans.model.Repository ) \
+ .filter( trans.model.Repository.table.c.deleted == False ):
+ allow_push = repository.allow_push( trans.app )
+ if not order:
+ # We've been called from the Tool Shed API, so reset metadata on all repositories of type tool_dependency_definition.
+ if repository.type == rt_util.TOOL_DEPENDENCY_DEFINITION:
+ clause_list.append( trans.model.Repository.table.c.id == repository.id )
+ elif allow_push:
+ # Include all repositories that are writable by the current user.
+ allow_push_usernames = allow_push.split( ',' )
+ if username in allow_push_usernames:
+ clause_list.append( trans.model.Repository.table.c.id == repository.id )
+ if clause_list:
+ if order:
+ return trans.sa_session.query( trans.model.Repository ) \
+ .filter( or_( *clause_list ) ) \
+ .order_by( trans.model.Repository.table.c.name,
+ trans.model.Repository.table.c.user_id )
+ else:
+ return trans.sa_session.query( trans.model.Repository ) \
+ .filter( or_( *clause_list ) )
+ else:
+ # Return an empty query.
+ return trans.sa_session.query( trans.model.Repository ) \
+ .filter( trans.model.Repository.table.c.id == -1 )
+ else:
+ if order:
+ return trans.sa_session.query( trans.model.Repository ) \
+ .filter( trans.model.Repository.table.c.deleted == False ) \
+ .order_by( trans.model.Repository.table.c.name,
+ trans.model.Repository.table.c.user_id )
+ else:
+ return trans.sa_session.query( trans.model.Repository ) \
+ .filter( trans.model.Repository.table.c.deleted == False )
+ else:
+ # We're in Galaxy.
+ if order:
+ return trans.sa_session.query( trans.model.ToolShedRepository ) \
+ .filter( trans.model.ToolShedRepository.table.c.uninstalled == False ) \
+ .order_by( trans.model.ToolShedRepository.table.c.name,
+ trans.model.ToolShedRepository.table.c.owner )
+ else:
+ return trans.sa_session.query( trans.model.ToolShedRepository ) \
+ .filter( trans.model.ToolShedRepository.table.c.uninstalled == False )
+
def get_repo_info_tuple_contents( repo_info_tuple ):
"""Take care in handling the repo_info_tuple as it evolves over time as new tool shed features are introduced."""
if len( repo_info_tuple ) == 6:
diff -r 00764f2c46dff45f208e22203c6498777725bba7 -r 64f283100f960c1ae7645fae17394334b1b7f349 scripts/api/import_workflows_from_installed_tool_shed_repository.py
--- a/scripts/api/import_workflows_from_installed_tool_shed_repository.py
+++ b/scripts/api/import_workflows_from_installed_tool_shed_repository.py
@@ -45,7 +45,7 @@
data = {}
# NOTE: to import a single workflow, add an index to data (e.g.,
# data[ 'index' ] = 0
- # and change the url to be ~/import_workflow (simgular). For example,
+ # and change the url to be ~/import_workflow (singular). For example,
# url = '%s%s' % ( base_galaxy_url, '/api/tool_shed_repositories/%s/import_workflow' % str( tool_shed_repository_id ) )
url = '%s%s' % ( base_galaxy_url, '/api/tool_shed_repositories/%s/import_workflows' % str( tool_shed_repository_id ) )
submit( options.api, url, data )
diff -r 00764f2c46dff45f208e22203c6498777725bba7 -r 64f283100f960c1ae7645fae17394334b1b7f349 scripts/api/reset_metadata_on_installed_repositories.py
--- /dev/null
+++ b/scripts/api/reset_metadata_on_installed_repositories.py
@@ -0,0 +1,29 @@
+#!/usr/bin/env python
+"""
+Script to reset metadata on all Tool Shed repositories installed into a Galaxy instance. The received
+API key must be associated with a Galaxy admin user.
+
+usage: reset_metadata_on_installed_repositories.py key
+
+Here is a working example of how to use this script.
+python ./reset_metadata_on_installed_repositories.py -a 22be3b -u http://localhost:8763/
+"""
+
+import argparse
+import os
+import sys
+sys.path.insert( 0, os.path.dirname( __file__ ) )
+from common import submit
+
+def main( options ):
+ api_key = options.api
+ base_galaxy_url = options.galaxy_url.rstrip( '/' )
+ url = '%s/api/tool_shed_repositories/reset_metadata_on_installed_repositories' % base_galaxy_url
+ submit( options.api, url, {} )
+
+if __name__ == '__main__':
+ parser = argparse.ArgumentParser( description='Reset metadata on all Tool Shed repositories installed into Galaxy via the Galaxy API.' )
+ parser.add_argument( "-a", "--api", dest="api", required=True, help="API Key" )
+ parser.add_argument( "-u", "--url", dest="galaxy_url", required=True, help="Galaxy URL" )
+ options = parser.parse_args()
+ main( options )
diff -r 00764f2c46dff45f208e22203c6498777725bba7 -r 64f283100f960c1ae7645fae17394334b1b7f349 test/tool_shed/base/twilltestcase.py
--- a/test/tool_shed/base/twilltestcase.py
+++ b/test/tool_shed/base/twilltestcase.py
@@ -969,7 +969,7 @@
self.check_for_strings( [ 'Metadata has been reset' ] )
def reset_metadata_on_selected_repositories( self, repository_ids ):
- self.visit_url( '/admin/reset_metadata_on_selected_repositories_in_tool_shed' )
+ self.visit_url( '/admin/reset_metadata_on_selected_repositories' )
kwd = dict( repository_ids=repository_ids )
self.submit_form( form_no=1, button="reset_metadata_on_selected_repositories_button", **kwd )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
2 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/e786022dc67e/
Changeset: e786022dc67e
User: jmchilton
Date: 2013-10-16 20:01:15
Summary: Improved handling of tools producing unicode in standard error/output.
Without this change (modification on idea by Bjoern), sqlite reports errors as follows for such tools:
ProgrammingError: (ProgrammingError) You must not use 8-bit bytestrings unless you use a text_factory that can interpret 8-bit bytestrings (like text_factory = str). It is highly recommended that you instead just switch your application to Unicode strings. u'UPDATE job SET update_time=?, stdout=?, stderr=? WHERE job.id = ?' ('2013-10-10 05:00:05.789309', '', '/dev/fd/2 unicode \xc3\xaa\xe0\xaf\xb3\xe0\xbe\x85\xe1\x9d\xb1\xe3\x8e\xb0\n', 11).
Affected #: 1 file
diff -r 1c22e43fc21e049da34af60f603f2e19bde912d2 -r e786022dc67ed918050bd81b9ac679ac958e4f75 lib/galaxy/jobs/__init__.py
--- a/lib/galaxy/jobs/__init__.py
+++ b/lib/galaxy/jobs/__init__.py
@@ -25,6 +25,7 @@
from galaxy.util.bunch import Bunch
from galaxy.util.expressions import ExpressionContext
from galaxy.util.json import from_json_string
+from galaxy.util import unicodify
from .output_checker import check_output
log = logging.getLogger( __name__ )
@@ -869,6 +870,9 @@
the output datasets based on stderr and stdout from the command, and
the contents of the output files.
"""
+ stdout = unicodify( stdout )
+ stderr = unicodify( stderr )
+
# default post job setup
self.sa_session.expunge_all()
job = self.get_job()
@@ -1518,6 +1522,9 @@
the output datasets based on stderr and stdout from the command, and
the contents of the output files.
"""
+ stdout = unicodify( stdout )
+ stderr = unicodify( stderr )
+
# This may have ended too soon
log.debug( 'task %s for job %d ended; exit code: %d'
% (self.task_id, self.job_id,
https://bitbucket.org/galaxy/galaxy-central/commits/00764f2c46df/
Changeset: 00764f2c46df
User: jmchilton
Date: 2013-10-16 20:01:15
Summary: Improved handling of tools producing unicode in stardard error/output.
Without this modification paste cannot render the stderr and/or stdout of such tools. This is a modification on an idea from Simon Guest, utilizing some code (now available as galaxy.util.smart_str) from an older version of Django to solve this problem.
Affected #: 2 files
diff -r e786022dc67ed918050bd81b9ac679ac958e4f75 -r 00764f2c46dff45f208e22203c6498777725bba7 lib/galaxy/util/__init__.py
--- a/lib/galaxy/util/__init__.py
+++ b/lib/galaxy/util/__init__.py
@@ -547,6 +547,30 @@
except:
return default
+
+def smart_str(s, encoding='utf-8', strings_only=False, errors='strict'):
+ """
+ Returns a bytestring version of 's', encoded as specified in 'encoding'.
+
+ If strings_only is True, don't convert (some) non-string-like objects.
+
+ Adapted from an older, simpler version of django.utils.encoding.smart_str.
+ """
+ if strings_only and isinstance(s, (type(None), int)):
+ return s
+ if not isinstance(s, basestring):
+ try:
+ return str(s)
+ except UnicodeEncodeError:
+ return unicode(s).encode(encoding, errors)
+ elif isinstance(s, unicode):
+ return s.encode(encoding, errors)
+ elif s and encoding != 'utf-8':
+ return s.decode('utf-8', errors).encode(encoding, errors)
+ else:
+ return s
+
+
def object_to_string( obj ):
return binascii.hexlify( pickle.dumps( obj, 2 ) )
diff -r e786022dc67ed918050bd81b9ac679ac958e4f75 -r 00764f2c46dff45f208e22203c6498777725bba7 lib/galaxy/webapps/galaxy/controllers/dataset.py
--- a/lib/galaxy/webapps/galaxy/controllers/dataset.py
+++ b/lib/galaxy/webapps/galaxy/controllers/dataset.py
@@ -8,7 +8,7 @@
from galaxy import datatypes, eggs, model, util, web
from galaxy.datatypes.display_applications.util import decode_dataset_user, encode_dataset_user
from galaxy.model.item_attrs import UsesAnnotations, UsesItemRatings
-from galaxy.util import inflector
+from galaxy.util import inflector, smart_str
from galaxy.util.sanitize_html import sanitize_html
from galaxy.web.base.controller import BaseUIController, ERROR, SUCCESS, url_for, UsesHistoryDatasetAssociationMixin, UsesHistoryMixin
from galaxy.web.framework.helpers import grids, iff, time_ago
@@ -184,7 +184,7 @@
stdout = job.stdout
except:
stdout = "Invalid dataset ID or you are not allowed to access this dataset"
- return stdout
+ return smart_str( stdout )
@web.expose
# TODO: Migrate stderr and stdout to use _get_job_for_dataset; it wasn't tested.
@@ -196,7 +196,7 @@
stderr = job.stderr
except:
stderr = "Invalid dataset ID or you are not allowed to access this dataset"
- return stderr
+ return smart_str( stderr )
@web.expose
def exit_code( self, trans, dataset_id=None, **kwargs ):
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
4 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/3a156f9e008d/
Changeset: 3a156f9e008d
User: nsoranzo
Date: 2013-09-18 14:43:59
Summary: Prevent no choice in multiple selections.
Affected #: 2 files
diff -r 750e687b468a27978d51dfac9d542a0cc113e64b -r 3a156f9e008d3e61a951aa9efba2c982cb826e0a tools/ngs_simulation/ngs_simulation.py
--- a/tools/ngs_simulation/ngs_simulation.py
+++ b/tools/ngs_simulation/ngs_simulation.py
@@ -70,11 +70,11 @@
num_sims = int( options.num_sims )
except TypeError, e:
stop_err( 'Make sure the number of simulations is an integer value: %s' % str( e ) )
- if len( options.polymorphism ) > 0:
+ if options.polymorphism != 'None':
polymorphisms = [ float( p ) for p in options.polymorphism.split( ',' ) ]
else:
stop_err( 'Select at least one polymorphism value to use' )
- if len( options.detection_thresh ) > 0:
+ if options.detection_thresh != 'None':
detection_threshes = [ float( dt ) for dt in options.detection_thresh.split( ',' ) ]
else:
stop_err( 'Select at least one detection threshold to use' )
diff -r 750e687b468a27978d51dfac9d542a0cc113e64b -r 3a156f9e008d3e61a951aa9efba2c982cb826e0a tools/ngs_simulation/ngs_simulation.xml
--- a/tools/ngs_simulation/ngs_simulation.xml
+++ b/tools/ngs_simulation/ngs_simulation.xml
@@ -72,6 +72,7 @@
<option value="0.8">0.8</option><option value="0.9">0.9</option><option value="1.0">1.0</option>
+ <validator type="no_options" message="You must select at least one value" /></param><param name="detection_thresh" type="select" multiple="true" label="Detection thresholds"><option value="0.001">0.001</option>
@@ -102,6 +103,7 @@
<option value="0.8">0.8</option><option value="0.9">0.9</option><option value="1.0">1.0</option>
+ <validator type="no_options" message="You must select at least one value" /></param><param name="summary_out" type="boolean" truevalue="true" falsevalue="false" checked="true" label="Include a (text) summary file for all the simulations" /><!-- <param name="sim_results" type="boolean" truevalue="true" falsevalue="false" checked="false" label="Output all tabular simulation results" help="Number of polymorphisms times number of detection thresholds"/>
https://bitbucket.org/galaxy/galaxy-central/commits/ff4bbde97901/
Changeset: ff4bbde97901
User: nsoranzo
Date: 2013-09-18 14:44:42
Summary: Fix reference to 'Fetch taxonomic representation' tool.
Affected #: 1 file
diff -r 3a156f9e008d3e61a951aa9efba2c982cb826e0a -r ff4bbde97901cc3dd3ab9c36432e2d17a655265a tools/taxonomy/t2ps_wrapper.xml
--- a/tools/taxonomy/t2ps_wrapper.xml
+++ b/tools/taxonomy/t2ps_wrapper.xml
@@ -47,7 +47,7 @@
**What it does**
-Given taxonomy representation (produced by *Taxonomy manipulation->Fetch Taxonomic Ranks* tool) this utility produces a graphical representations of phylogenetic tree in PDF format.
+Given taxonomy representation (produced by *Fetch taxonomic representation* tool) this utility produces a graphical representations of phylogenetic tree in PDF format.
--------
https://bitbucket.org/galaxy/galaxy-central/commits/c61cc2f91c59/
Changeset: c61cc2f91c59
User: nsoranzo
Date: 2013-10-04 18:15:22
Summary: Fix for "syntax error near unexpected token `;;'".
<command> should not end with a semicolon, otherwise
BaseJobRunner.build_command_line() would create a bash command containing
';;' when appending metadata setting commands.
Affected #: 1 file
diff -r ff4bbde97901cc3dd3ab9c36432e2d17a655265a -r c61cc2f91c59c54e585ef92b77f650774ebdbd55 tools/sr_mapping/bowtie2_wrapper.xml
--- a/tools/sr_mapping/bowtie2_wrapper.xml
+++ b/tools/sr_mapping/bowtie2_wrapper.xml
@@ -95,15 +95,15 @@
#end if
## view/sort and output file
- | samtools view -Su - | samtools sort -o - - > $output;
+ | samtools view -Su - | samtools sort -o - - > $output
## rename unaligned sequence files
#if $library.type == "paired" and $output_unaligned_reads_l and $output_unaligned_reads_r:
#set left = str($output_unaligned_reads_l).replace( '.dat', '.1.dat' )
#set right = str($output_unaligned_reads_l).replace( '.dat', '.2.dat' )
- mv $left $output_unaligned_reads_l;
- mv $right $output_unaligned_reads_r;
+ ; mv $left $output_unaligned_reads_l;
+ mv $right $output_unaligned_reads_r
#end if
</command>
https://bitbucket.org/galaxy/galaxy-central/commits/1c22e43fc21e/
Changeset: 1c22e43fc21e
User: jmchilton
Date: 2013-10-16 19:49:34
Summary: Merged in nsoranzo/galaxy-central-tools2 (pull request #234)
Bug fixes for 3 tools (rebased #222)
Affected #: 4 files
diff -r e8fbf32ba1ccb4edec436e9ca76a40d6f64917be -r 1c22e43fc21e049da34af60f603f2e19bde912d2 tools/ngs_simulation/ngs_simulation.py
--- a/tools/ngs_simulation/ngs_simulation.py
+++ b/tools/ngs_simulation/ngs_simulation.py
@@ -70,11 +70,11 @@
num_sims = int( options.num_sims )
except TypeError, e:
stop_err( 'Make sure the number of simulations is an integer value: %s' % str( e ) )
- if len( options.polymorphism ) > 0:
+ if options.polymorphism != 'None':
polymorphisms = [ float( p ) for p in options.polymorphism.split( ',' ) ]
else:
stop_err( 'Select at least one polymorphism value to use' )
- if len( options.detection_thresh ) > 0:
+ if options.detection_thresh != 'None':
detection_threshes = [ float( dt ) for dt in options.detection_thresh.split( ',' ) ]
else:
stop_err( 'Select at least one detection threshold to use' )
diff -r e8fbf32ba1ccb4edec436e9ca76a40d6f64917be -r 1c22e43fc21e049da34af60f603f2e19bde912d2 tools/ngs_simulation/ngs_simulation.xml
--- a/tools/ngs_simulation/ngs_simulation.xml
+++ b/tools/ngs_simulation/ngs_simulation.xml
@@ -72,6 +72,7 @@
<option value="0.8">0.8</option><option value="0.9">0.9</option><option value="1.0">1.0</option>
+ <validator type="no_options" message="You must select at least one value" /></param><param name="detection_thresh" type="select" multiple="true" label="Detection thresholds"><option value="0.001">0.001</option>
@@ -102,6 +103,7 @@
<option value="0.8">0.8</option><option value="0.9">0.9</option><option value="1.0">1.0</option>
+ <validator type="no_options" message="You must select at least one value" /></param><param name="summary_out" type="boolean" truevalue="true" falsevalue="false" checked="true" label="Include a (text) summary file for all the simulations" /><!-- <param name="sim_results" type="boolean" truevalue="true" falsevalue="false" checked="false" label="Output all tabular simulation results" help="Number of polymorphisms times number of detection thresholds"/>
diff -r e8fbf32ba1ccb4edec436e9ca76a40d6f64917be -r 1c22e43fc21e049da34af60f603f2e19bde912d2 tools/sr_mapping/bowtie2_wrapper.xml
--- a/tools/sr_mapping/bowtie2_wrapper.xml
+++ b/tools/sr_mapping/bowtie2_wrapper.xml
@@ -95,15 +95,15 @@
#end if
## view/sort and output file
- | samtools view -Su - | samtools sort -o - - > $output;
+ | samtools view -Su - | samtools sort -o - - > $output
## rename unaligned sequence files
#if $library.type == "paired" and $output_unaligned_reads_l and $output_unaligned_reads_r:
#set left = str($output_unaligned_reads_l).replace( '.dat', '.1.dat' )
#set right = str($output_unaligned_reads_l).replace( '.dat', '.2.dat' )
- mv $left $output_unaligned_reads_l;
- mv $right $output_unaligned_reads_r;
+ ; mv $left $output_unaligned_reads_l;
+ mv $right $output_unaligned_reads_r
#end if
</command>
diff -r e8fbf32ba1ccb4edec436e9ca76a40d6f64917be -r 1c22e43fc21e049da34af60f603f2e19bde912d2 tools/taxonomy/t2ps_wrapper.xml
--- a/tools/taxonomy/t2ps_wrapper.xml
+++ b/tools/taxonomy/t2ps_wrapper.xml
@@ -47,7 +47,7 @@
**What it does**
-Given taxonomy representation (produced by *Taxonomy manipulation->Fetch Taxonomic Ranks* tool) this utility produces a graphical representations of phylogenetic tree in PDF format.
+Given taxonomy representation (produced by *Fetch taxonomic representation* tool) this utility produces a graphical representations of phylogenetic tree in PDF format.
--------
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
3 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/6fdf96b4aef0/
Changeset: 6fdf96b4aef0
User: jmchilton
Date: 2013-10-11 17:40:43
Summary: Refactor building command lines out of job runner base into its own module.
Add unit tests. Test skipping metadata, using metadata, return code patch, dependency shell commands.
Affected #: 3 files
diff -r 829c047e05776fb2a6568f0d1076856fe6255fab -r 6fdf96b4aef00174938470f945d6931746e319f2 lib/galaxy/jobs/command_factory.py
--- /dev/null
+++ b/lib/galaxy/jobs/command_factory.py
@@ -0,0 +1,70 @@
+from os import getcwd
+from os.path import abspath
+
+
+def build_command( job, job_wrapper, include_metadata=False, include_work_dir_outputs=True ):
+ """
+ Compose the sequence of commands necessary to execute a job. This will
+ currently include:
+
+ - environment settings corresponding to any requirement tags
+ - preparing input files
+ - command line taken from job wrapper
+ - commands to set metadata (if include_metadata is True)
+ """
+
+ commands = job_wrapper.get_command_line()
+
+ # All job runners currently handle this case which should never occur
+ if not commands:
+ return None
+
+ # Prepend version string
+ if job_wrapper.version_string_cmd:
+ commands = "%s &> %s; " % ( job_wrapper.version_string_cmd, job_wrapper.get_version_string_path() ) + commands
+
+ # prepend getting input files (if defined)
+ if hasattr(job_wrapper, 'prepare_input_files_cmds') and job_wrapper.prepare_input_files_cmds is not None:
+ commands = "; ".join( job_wrapper.prepare_input_files_cmds + [ commands ] )
+
+ # Prepend dependency injection
+ if job_wrapper.dependency_shell_commands:
+ commands = "; ".join( job_wrapper.dependency_shell_commands + [ commands ] )
+
+ # Coping work dir outputs or setting metadata will mask return code of
+ # tool command. If these are used capture the return code and ensure
+ # the last thing that happens is an exit with return code.
+ capture_return_code_command = "; return_code=$?"
+ captured_return_code = False
+
+ # Append commands to copy job outputs based on from_work_dir attribute.
+ if include_work_dir_outputs:
+ work_dir_outputs = job.get_work_dir_outputs( job_wrapper )
+ if work_dir_outputs:
+ if not captured_return_code:
+ commands += capture_return_code_command
+ captured_return_code = True
+
+ commands += "; " + "; ".join( [ "if [ -f %s ] ; then cp %s %s ; fi" %
+ ( source_file, source_file, destination ) for ( source_file, destination ) in work_dir_outputs ] )
+
+ # Append metadata setting commands, we don't want to overwrite metadata
+ # that was copied over in init_meta(), as per established behavior
+ if include_metadata and job_wrapper.requires_setting_metadata:
+ if not captured_return_code:
+ commands += capture_return_code_command
+ captured_return_code = True
+ commands += "; cd %s; " % abspath( getcwd() )
+ commands += job_wrapper.setup_external_metadata(
+ exec_dir=abspath( getcwd() ),
+ tmp_dir=job_wrapper.working_directory,
+ dataset_files_path=job.app.model.Dataset.file_path,
+ output_fnames=job_wrapper.get_output_fnames(),
+ set_extension=False,
+ kwds={ 'overwrite' : False }
+ )
+
+ if captured_return_code:
+ commands += '; sh -c "exit $return_code"'
+
+ return commands
diff -r 829c047e05776fb2a6568f0d1076856fe6255fab -r 6fdf96b4aef00174938470f945d6931746e319f2 lib/galaxy/jobs/runners/__init__.py
--- a/lib/galaxy/jobs/runners/__init__.py
+++ b/lib/galaxy/jobs/runners/__init__.py
@@ -12,6 +12,7 @@
from Queue import Queue, Empty
import galaxy.jobs
+from galaxy.jobs.command_factory import build_command
from galaxy import model
from galaxy.util import DATABASE_MAX_STRING_SIZE, shrink_stream_by_size
@@ -142,67 +143,7 @@
raise NotImplementedError()
def build_command_line( self, job_wrapper, include_metadata=False, include_work_dir_outputs=True ):
- """
- Compose the sequence of commands necessary to execute a job. This will
- currently include:
-
- - environment settings corresponding to any requirement tags
- - preparing input files
- - command line taken from job wrapper
- - commands to set metadata (if include_metadata is True)
- """
-
- commands = job_wrapper.get_command_line()
- # All job runners currently handle this case which should never
- # occur
- if not commands:
- return None
- # Prepend version string
- if job_wrapper.version_string_cmd:
- commands = "%s &> %s; " % ( job_wrapper.version_string_cmd, job_wrapper.get_version_string_path() ) + commands
- # prepend getting input files (if defined)
- if hasattr(job_wrapper, 'prepare_input_files_cmds') and job_wrapper.prepare_input_files_cmds is not None:
- commands = "; ".join( job_wrapper.prepare_input_files_cmds + [ commands ] )
- # Prepend dependency injection
- if job_wrapper.dependency_shell_commands:
- commands = "; ".join( job_wrapper.dependency_shell_commands + [ commands ] )
-
- # Coping work dir outputs or setting metadata will mask return code of
- # tool command. If these are used capture the return code and ensure
- # the last thing that happens is an exit with return code.
- capture_return_code_command = "; return_code=$?"
- captured_return_code = False
-
- # Append commands to copy job outputs based on from_work_dir attribute.
- if include_work_dir_outputs:
- work_dir_outputs = self.get_work_dir_outputs( job_wrapper )
- if work_dir_outputs:
- if not captured_return_code:
- commands += capture_return_code_command
- captured_return_code = True
- commands += "; " + "; ".join( [ "if [ -f %s ] ; then cp %s %s ; fi" %
- ( source_file, source_file, destination ) for ( source_file, destination ) in work_dir_outputs ] )
-
- # Append metadata setting commands, we don't want to overwrite metadata
- # that was copied over in init_meta(), as per established behavior
- if include_metadata and job_wrapper.requires_setting_metadata:
- if not captured_return_code:
- commands += capture_return_code_command
- captured_return_code = True
- commands += "; cd %s; " % os.path.abspath( os.getcwd() )
- commands += job_wrapper.setup_external_metadata(
- exec_dir = os.path.abspath( os.getcwd() ),
- tmp_dir = job_wrapper.working_directory,
- dataset_files_path = self.app.model.Dataset.file_path,
- output_fnames = job_wrapper.get_output_fnames(),
- set_extension = False,
- kwds = { 'overwrite' : False } )
-
-
- if captured_return_code:
- commands += '; sh -c "exit $return_code"'
-
- return commands
+ return build_command( self, job_wrapper, include_metadata=include_metadata, include_work_dir_outputs=include_work_dir_outputs )
def get_work_dir_outputs( self, job_wrapper ):
"""
diff -r 829c047e05776fb2a6568f0d1076856fe6255fab -r 6fdf96b4aef00174938470f945d6931746e319f2 test/unit/test_command_factory.py
--- /dev/null
+++ b/test/unit/test_command_factory.py
@@ -0,0 +1,79 @@
+from os import getcwd
+from unittest import TestCase
+
+from galaxy.jobs.command_factory import build_command
+from galaxy.util.bunch import Bunch
+
+MOCK_COMMAND_LINE = "/opt/galaxy/tools/bowtie /mnt/galaxyData/files/000/input000.dat"
+
+
+class TestCommandFactory(TestCase):
+
+ def setUp(self):
+ self.job_wrapper = MockJobWrapper()
+ self.job = Bunch(app=Bunch(model=Bunch(Dataset=Bunch(file_path="file_path"))))
+ self.include_metadata = False
+ self.include_work_dir_outputs = True
+
+ def test_simplest_command(self):
+ self.include_work_dir_outputs = False
+ self.__assert_command_is( MOCK_COMMAND_LINE )
+
+ def test_shell_commands(self):
+ self.include_work_dir_outputs = False
+ dep_commands = [". /opt/galaxy/tools/bowtie/default/env.sh"]
+ self.job_wrapper.dependency_shell_commands = dep_commands
+ self.__assert_command_is( "%s; %s" % (dep_commands[0], MOCK_COMMAND_LINE) )
+
+ def test_set_metadata_skipped_if_unneeded(self):
+ self.include_metadata = True
+ self.include_work_dir_outputs = False
+ self.__assert_command_is( MOCK_COMMAND_LINE )
+
+ def test_set_metadata(self):
+ self.include_metadata = True
+ self.include_work_dir_outputs = False
+ metadata_line = "set_metadata_and_stuff.sh"
+ self.job_wrapper.metadata_line = metadata_line
+ expected_command = '%s; return_code=$?; cd %s; %s; sh -c "exit $return_code"' % (MOCK_COMMAND_LINE, getcwd(), metadata_line)
+ self.__assert_command_is( expected_command )
+
+ def __assert_command_is(self, expected_command):
+ command = self.__command()
+ self.assertEqual(command, expected_command)
+
+ def __command(self):
+ kwds = dict(
+ job=self.job,
+ job_wrapper=self.job_wrapper,
+ include_metadata=self.include_metadata,
+ include_work_dir_outputs=self.include_work_dir_outputs,
+ )
+ return build_command(**kwds)
+
+
+class MockJobWrapper(object):
+
+ def __init__(self):
+ self.version_string_cmd = None
+ self.command_line = MOCK_COMMAND_LINE
+ self.dependency_shell_commands = []
+ self.metadata_line = None
+ self.working_directory = "job1"
+
+ def get_command_line(self):
+ return self.command_line
+
+ @property
+ def requires_setting_metadata(self):
+ return self.metadata_line is not None
+
+ def setup_external_metadata(self, *args, **kwds):
+ return self.metadata_line
+
+ def get_output_fnames(self):
+ return []
+
+
+class MockJob(object):
+ app = Bunch()
https://bitbucket.org/galaxy/galaxy-central/commits/e0016057e164/
Changeset: e0016057e164
User: jmchilton
Date: 2013-10-11 17:40:43
Summary: Fix bug related to trailing semi-colon in tools.
Thanks to Bjoern, Nicola, Nate for helping track this down.
Affected #: 2 files
diff -r 6fdf96b4aef00174938470f945d6931746e319f2 -r e0016057e1648d7f4d29fbaf14252cbefff6d0bd lib/galaxy/jobs/command_factory.py
--- a/lib/galaxy/jobs/command_factory.py
+++ b/lib/galaxy/jobs/command_factory.py
@@ -19,6 +19,10 @@
if not commands:
return None
+ # Remove trailing semi-colon so we can start hacking up this command.
+ # TODO: Refactor to compose a list and join with ';', would be more clean.
+ commands = commands.rstrip(";")
+
# Prepend version string
if job_wrapper.version_string_cmd:
commands = "%s &> %s; " % ( job_wrapper.version_string_cmd, job_wrapper.get_version_string_path() ) + commands
diff -r 6fdf96b4aef00174938470f945d6931746e319f2 -r e0016057e1648d7f4d29fbaf14252cbefff6d0bd test/unit/test_command_factory.py
--- a/test/unit/test_command_factory.py
+++ b/test/unit/test_command_factory.py
@@ -31,6 +31,13 @@
self.__assert_command_is( MOCK_COMMAND_LINE )
def test_set_metadata(self):
+ self._test_set_metadata()
+
+ def test_strips_trailing_semicolons(self):
+ self.job_wrapper.command_line = "%s;" % MOCK_COMMAND_LINE
+ self._test_set_metadata()
+
+ def _test_set_metadata(self):
self.include_metadata = True
self.include_work_dir_outputs = False
metadata_line = "set_metadata_and_stuff.sh"
https://bitbucket.org/galaxy/galaxy-central/commits/e8fbf32ba1cc/
Changeset: e8fbf32ba1cc
User: jmchilton
Date: 2013-10-16 19:48:50
Summary: Merge pull request #235.
Fix for tools with trailing semi-colons.
Affected #: 3 files
diff -r e4d476ccf7832df0b1f65048d3784b010f84e59a -r e8fbf32ba1ccb4edec436e9ca76a40d6f64917be lib/galaxy/jobs/command_factory.py
--- /dev/null
+++ b/lib/galaxy/jobs/command_factory.py
@@ -0,0 +1,74 @@
+from os import getcwd
+from os.path import abspath
+
+
+def build_command( job, job_wrapper, include_metadata=False, include_work_dir_outputs=True ):
+ """
+ Compose the sequence of commands necessary to execute a job. This will
+ currently include:
+
+ - environment settings corresponding to any requirement tags
+ - preparing input files
+ - command line taken from job wrapper
+ - commands to set metadata (if include_metadata is True)
+ """
+
+ commands = job_wrapper.get_command_line()
+
+ # All job runners currently handle this case which should never occur
+ if not commands:
+ return None
+
+ # Remove trailing semi-colon so we can start hacking up this command.
+ # TODO: Refactor to compose a list and join with ';', would be more clean.
+ commands = commands.rstrip(";")
+
+ # Prepend version string
+ if job_wrapper.version_string_cmd:
+ commands = "%s &> %s; " % ( job_wrapper.version_string_cmd, job_wrapper.get_version_string_path() ) + commands
+
+ # prepend getting input files (if defined)
+ if hasattr(job_wrapper, 'prepare_input_files_cmds') and job_wrapper.prepare_input_files_cmds is not None:
+ commands = "; ".join( job_wrapper.prepare_input_files_cmds + [ commands ] )
+
+ # Prepend dependency injection
+ if job_wrapper.dependency_shell_commands:
+ commands = "; ".join( job_wrapper.dependency_shell_commands + [ commands ] )
+
+ # Coping work dir outputs or setting metadata will mask return code of
+ # tool command. If these are used capture the return code and ensure
+ # the last thing that happens is an exit with return code.
+ capture_return_code_command = "; return_code=$?"
+ captured_return_code = False
+
+ # Append commands to copy job outputs based on from_work_dir attribute.
+ if include_work_dir_outputs:
+ work_dir_outputs = job.get_work_dir_outputs( job_wrapper )
+ if work_dir_outputs:
+ if not captured_return_code:
+ commands += capture_return_code_command
+ captured_return_code = True
+
+ commands += "; " + "; ".join( [ "if [ -f %s ] ; then cp %s %s ; fi" %
+ ( source_file, source_file, destination ) for ( source_file, destination ) in work_dir_outputs ] )
+
+ # Append metadata setting commands, we don't want to overwrite metadata
+ # that was copied over in init_meta(), as per established behavior
+ if include_metadata and job_wrapper.requires_setting_metadata:
+ if not captured_return_code:
+ commands += capture_return_code_command
+ captured_return_code = True
+ commands += "; cd %s; " % abspath( getcwd() )
+ commands += job_wrapper.setup_external_metadata(
+ exec_dir=abspath( getcwd() ),
+ tmp_dir=job_wrapper.working_directory,
+ dataset_files_path=job.app.model.Dataset.file_path,
+ output_fnames=job_wrapper.get_output_fnames(),
+ set_extension=False,
+ kwds={ 'overwrite' : False }
+ )
+
+ if captured_return_code:
+ commands += '; sh -c "exit $return_code"'
+
+ return commands
diff -r e4d476ccf7832df0b1f65048d3784b010f84e59a -r e8fbf32ba1ccb4edec436e9ca76a40d6f64917be lib/galaxy/jobs/runners/__init__.py
--- a/lib/galaxy/jobs/runners/__init__.py
+++ b/lib/galaxy/jobs/runners/__init__.py
@@ -12,6 +12,7 @@
from Queue import Queue, Empty
import galaxy.jobs
+from galaxy.jobs.command_factory import build_command
from galaxy import model
from galaxy.util import DATABASE_MAX_STRING_SIZE, shrink_stream_by_size
@@ -142,67 +143,7 @@
raise NotImplementedError()
def build_command_line( self, job_wrapper, include_metadata=False, include_work_dir_outputs=True ):
- """
- Compose the sequence of commands necessary to execute a job. This will
- currently include:
-
- - environment settings corresponding to any requirement tags
- - preparing input files
- - command line taken from job wrapper
- - commands to set metadata (if include_metadata is True)
- """
-
- commands = job_wrapper.get_command_line()
- # All job runners currently handle this case which should never
- # occur
- if not commands:
- return None
- # Prepend version string
- if job_wrapper.version_string_cmd:
- commands = "%s &> %s; " % ( job_wrapper.version_string_cmd, job_wrapper.get_version_string_path() ) + commands
- # prepend getting input files (if defined)
- if hasattr(job_wrapper, 'prepare_input_files_cmds') and job_wrapper.prepare_input_files_cmds is not None:
- commands = "; ".join( job_wrapper.prepare_input_files_cmds + [ commands ] )
- # Prepend dependency injection
- if job_wrapper.dependency_shell_commands:
- commands = "; ".join( job_wrapper.dependency_shell_commands + [ commands ] )
-
- # Coping work dir outputs or setting metadata will mask return code of
- # tool command. If these are used capture the return code and ensure
- # the last thing that happens is an exit with return code.
- capture_return_code_command = "; return_code=$?"
- captured_return_code = False
-
- # Append commands to copy job outputs based on from_work_dir attribute.
- if include_work_dir_outputs:
- work_dir_outputs = self.get_work_dir_outputs( job_wrapper )
- if work_dir_outputs:
- if not captured_return_code:
- commands += capture_return_code_command
- captured_return_code = True
- commands += "; " + "; ".join( [ "if [ -f %s ] ; then cp %s %s ; fi" %
- ( source_file, source_file, destination ) for ( source_file, destination ) in work_dir_outputs ] )
-
- # Append metadata setting commands, we don't want to overwrite metadata
- # that was copied over in init_meta(), as per established behavior
- if include_metadata and job_wrapper.requires_setting_metadata:
- if not captured_return_code:
- commands += capture_return_code_command
- captured_return_code = True
- commands += "; cd %s; " % os.path.abspath( os.getcwd() )
- commands += job_wrapper.setup_external_metadata(
- exec_dir = os.path.abspath( os.getcwd() ),
- tmp_dir = job_wrapper.working_directory,
- dataset_files_path = self.app.model.Dataset.file_path,
- output_fnames = job_wrapper.get_output_fnames(),
- set_extension = False,
- kwds = { 'overwrite' : False } )
-
-
- if captured_return_code:
- commands += '; sh -c "exit $return_code"'
-
- return commands
+ return build_command( self, job_wrapper, include_metadata=include_metadata, include_work_dir_outputs=include_work_dir_outputs )
def get_work_dir_outputs( self, job_wrapper ):
"""
diff -r e4d476ccf7832df0b1f65048d3784b010f84e59a -r e8fbf32ba1ccb4edec436e9ca76a40d6f64917be test/unit/test_command_factory.py
--- /dev/null
+++ b/test/unit/test_command_factory.py
@@ -0,0 +1,86 @@
+from os import getcwd
+from unittest import TestCase
+
+from galaxy.jobs.command_factory import build_command
+from galaxy.util.bunch import Bunch
+
+MOCK_COMMAND_LINE = "/opt/galaxy/tools/bowtie /mnt/galaxyData/files/000/input000.dat"
+
+
+class TestCommandFactory(TestCase):
+
+ def setUp(self):
+ self.job_wrapper = MockJobWrapper()
+ self.job = Bunch(app=Bunch(model=Bunch(Dataset=Bunch(file_path="file_path"))))
+ self.include_metadata = False
+ self.include_work_dir_outputs = True
+
+ def test_simplest_command(self):
+ self.include_work_dir_outputs = False
+ self.__assert_command_is( MOCK_COMMAND_LINE )
+
+ def test_shell_commands(self):
+ self.include_work_dir_outputs = False
+ dep_commands = [". /opt/galaxy/tools/bowtie/default/env.sh"]
+ self.job_wrapper.dependency_shell_commands = dep_commands
+ self.__assert_command_is( "%s; %s" % (dep_commands[0], MOCK_COMMAND_LINE) )
+
+ def test_set_metadata_skipped_if_unneeded(self):
+ self.include_metadata = True
+ self.include_work_dir_outputs = False
+ self.__assert_command_is( MOCK_COMMAND_LINE )
+
+ def test_set_metadata(self):
+ self._test_set_metadata()
+
+ def test_strips_trailing_semicolons(self):
+ self.job_wrapper.command_line = "%s;" % MOCK_COMMAND_LINE
+ self._test_set_metadata()
+
+ def _test_set_metadata(self):
+ self.include_metadata = True
+ self.include_work_dir_outputs = False
+ metadata_line = "set_metadata_and_stuff.sh"
+ self.job_wrapper.metadata_line = metadata_line
+ expected_command = '%s; return_code=$?; cd %s; %s; sh -c "exit $return_code"' % (MOCK_COMMAND_LINE, getcwd(), metadata_line)
+ self.__assert_command_is( expected_command )
+
+ def __assert_command_is(self, expected_command):
+ command = self.__command()
+ self.assertEqual(command, expected_command)
+
+ def __command(self):
+ kwds = dict(
+ job=self.job,
+ job_wrapper=self.job_wrapper,
+ include_metadata=self.include_metadata,
+ include_work_dir_outputs=self.include_work_dir_outputs,
+ )
+ return build_command(**kwds)
+
+
+class MockJobWrapper(object):
+
+ def __init__(self):
+ self.version_string_cmd = None
+ self.command_line = MOCK_COMMAND_LINE
+ self.dependency_shell_commands = []
+ self.metadata_line = None
+ self.working_directory = "job1"
+
+ def get_command_line(self):
+ return self.command_line
+
+ @property
+ def requires_setting_metadata(self):
+ return self.metadata_line is not None
+
+ def setup_external_metadata(self, *args, **kwds):
+ return self.metadata_line
+
+ def get_output_fnames(self):
+ return []
+
+
+class MockJob(object):
+ app = Bunch()
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: jmchilton: Restore improvements to job output checking code.
by commits-noreply@bitbucket.org 16 Oct '13
by commits-noreply@bitbucket.org 16 Oct '13
16 Oct '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/e4d476ccf783/
Changeset: e4d476ccf783
User: jmchilton
Date: 2013-10-16 19:46:59
Summary: Restore improvements to job output checking code.
Affected #: 5 files
diff -r d966d09f77d9a3fed4401eb1e5e64d17a57d684e -r e4d476ccf7832df0b1f65048d3784b010f84e59a lib/galaxy/jobs/__init__.py
--- a/lib/galaxy/jobs/__init__.py
+++ b/lib/galaxy/jobs/__init__.py
@@ -25,6 +25,7 @@
from galaxy.util.bunch import Bunch
from galaxy.util.expressions import ExpressionContext
from galaxy.util.json import from_json_string
+from .output_checker import check_output
log = logging.getLogger( __name__ )
@@ -1079,158 +1080,7 @@
self.cleanup()
def check_tool_output( self, stdout, stderr, tool_exit_code, job ):
- """
- Check the output of a tool - given the stdout, stderr, and the tool's
- exit code, return True if the tool exited succesfully and False
- otherwise. No exceptions should be thrown. If this code encounters
- an exception, it returns True so that the workflow can continue;
- otherwise, a bug in this code could halt workflow progress.
- Note that, if the tool did not define any exit code handling or
- any stdio/stderr handling, then it reverts back to previous behavior:
- if stderr contains anything, then False is returned.
- Note that the job id is just for messages.
- """
- # By default, the tool succeeded. This covers the case where the code
- # has a bug but the tool was ok, and it lets a workflow continue.
- success = True
-
- try:
- # Check exit codes and match regular expressions against stdout and
- # stderr if this tool was configured to do so.
- # If there is a regular expression for scanning stdout/stderr,
- # then we assume that the tool writer overwrote the default
- # behavior of just setting an error if there is *anything* on
- # stderr.
- if ( len( self.tool.stdio_regexes ) > 0 or
- len( self.tool.stdio_exit_codes ) > 0 ):
- # Check the exit code ranges in the order in which
- # they were specified. Each exit_code is a StdioExitCode
- # that includes an applicable range. If the exit code was in
- # that range, then apply the error level and add a message.
- # If we've reached a fatal error rule, then stop.
- max_error_level = galaxy.tools.StdioErrorLevel.NO_ERROR
- if tool_exit_code != None:
- for stdio_exit_code in self.tool.stdio_exit_codes:
- if ( tool_exit_code >= stdio_exit_code.range_start and
- tool_exit_code <= stdio_exit_code.range_end ):
- # Tack on a generic description of the code
- # plus a specific code description. For example,
- # this might prepend "Job 42: Warning (Out of Memory)\n".
- code_desc = stdio_exit_code.desc
- if ( None == code_desc ):
- code_desc = ""
- tool_msg = ( "%s: Exit code %d (%s)" % (
- galaxy.tools.StdioErrorLevel.desc( stdio_exit_code.error_level ),
- tool_exit_code,
- code_desc ) )
- log.info( "Job %s: %s" % (job.get_id_tag(), tool_msg) )
- stderr = tool_msg + "\n" + stderr
- max_error_level = max( max_error_level,
- stdio_exit_code.error_level )
- if ( max_error_level >=
- galaxy.tools.StdioErrorLevel.FATAL ):
- break
-
- if max_error_level < galaxy.tools.StdioErrorLevel.FATAL:
- # We'll examine every regex. Each regex specifies whether
- # it is to be run on stdout, stderr, or both. (It is
- # possible for neither stdout nor stderr to be scanned,
- # but those regexes won't be used.) We record the highest
- # error level, which are currently "warning" and "fatal".
- # If fatal, then we set the job's state to ERROR.
- # If warning, then we still set the job's state to OK
- # but include a message. We'll do this if we haven't seen
- # a fatal error yet
- for regex in self.tool.stdio_regexes:
- # If ( this regex should be matched against stdout )
- # - Run the regex's match pattern against stdout
- # - If it matched, then determine the error level.
- # o If it was fatal, then we're done - break.
- # Repeat the stdout stuff for stderr.
- # TODO: Collapse this into a single function.
- if ( regex.stdout_match ):
- regex_match = re.search( regex.match, stdout,
- re.IGNORECASE )
- if ( regex_match ):
- rexmsg = self.regex_err_msg( regex_match, regex)
- log.info( "Job %s: %s"
- % ( job.get_id_tag(), rexmsg ) )
- stdout = rexmsg + "\n" + stdout
- max_error_level = max( max_error_level,
- regex.error_level )
- if ( max_error_level >=
- galaxy.tools.StdioErrorLevel.FATAL ):
- break
-
- if ( regex.stderr_match ):
- regex_match = re.search( regex.match, stderr,
- re.IGNORECASE )
- if ( regex_match ):
- rexmsg = self.regex_err_msg( regex_match, regex)
- log.info( "Job %s: %s"
- % ( job.get_id_tag(), rexmsg ) )
- stderr = rexmsg + "\n" + stderr
- max_error_level = max( max_error_level,
- regex.error_level )
- if ( max_error_level >=
- galaxy.tools.StdioErrorLevel.FATAL ):
- break
-
- # If we encountered a fatal error, then we'll need to set the
- # job state accordingly. Otherwise the job is ok:
- if max_error_level >= galaxy.tools.StdioErrorLevel.FATAL:
- success = False
- else:
- success = True
-
- # When there are no regular expressions and no exit codes to check,
- # default to the previous behavior: when there's anything on stderr
- # the job has an error, and the job is ok otherwise.
- else:
- # TODO: Add in the tool and job id:
- # log.debug( "Tool did not define exit code or stdio handling; "
- # + "checking stderr for success" )
- if stderr:
- success = False
- else:
- success = True
-
- # On any exception, return True.
- except:
- tb = traceback.format_exc()
- log.warning( "Tool check encountered unexpected exception; "
- + "assuming tool was successful: " + tb )
- success = True
-
- # Store the modified stdout and stderr in the job:
- if None != job:
- job.stdout = stdout
- job.stderr = stderr
-
- return success
-
- def regex_err_msg( self, match, regex ):
- """
- Return a message about the match on tool output using the given
- ToolStdioRegex regex object. The regex_match is a MatchObject
- that will contain the string matched on.
- """
- # Get the description for the error level:
- err_msg = galaxy.tools.StdioErrorLevel.desc( regex.error_level ) + ": "
- # If there's a description for the regular expression, then use it.
- # Otherwise, we'll take the first 256 characters of the match.
- if None != regex.desc:
- err_msg += regex.desc
- else:
- mstart = match.start()
- mend = match.end()
- err_msg += "Matched on "
- # TODO: Move the constant 256 somewhere else besides here.
- if mend - mstart > 256:
- err_msg += match.string[ mstart : mstart+256 ] + "..."
- else:
- err_msg += match.string[ mstart: mend ]
- return err_msg
+ return check_output( self.tool, stdout, stderr, tool_exit_code, job )
def cleanup( self ):
# remove temporary files
diff -r d966d09f77d9a3fed4401eb1e5e64d17a57d684e -r e4d476ccf7832df0b1f65048d3784b010f84e59a lib/galaxy/jobs/error_level.py
--- /dev/null
+++ b/lib/galaxy/jobs/error_level.py
@@ -0,0 +1,25 @@
+
+
+# These determine stdio-based error levels from matching on regular expressions
+# and exit codes. They are meant to be used comparatively, such as showing
+# that warning < fatal. This is really meant to just be an enum.
+class StdioErrorLevel( object ):
+ NO_ERROR = 0
+ LOG = 1
+ WARNING = 2
+ FATAL = 3
+ MAX = 3
+ descs = {
+ NO_ERROR: 'No error',
+ LOG: 'Log',
+ WARNING: 'Warning',
+ FATAL: 'Fatal error',
+ }
+
+ @staticmethod
+ def desc( error_level ):
+ err_msg = "Unknown error"
+ if ( error_level > 0 and
+ error_level <= StdioErrorLevel.MAX ):
+ err_msg = StdioErrorLevel.descs[ error_level ]
+ return err_msg
diff -r d966d09f77d9a3fed4401eb1e5e64d17a57d684e -r e4d476ccf7832df0b1f65048d3784b010f84e59a lib/galaxy/jobs/output_checker.py
--- /dev/null
+++ b/lib/galaxy/jobs/output_checker.py
@@ -0,0 +1,164 @@
+import re
+from .error_level import StdioErrorLevel
+import traceback
+
+from logging import getLogger
+log = getLogger( __name__ )
+
+
+def check_output( tool, stdout, stderr, tool_exit_code, job ):
+ """
+ Check the output of a tool - given the stdout, stderr, and the tool's
+ exit code, return True if the tool exited succesfully and False
+ otherwise. No exceptions should be thrown. If this code encounters
+ an exception, it returns True so that the workflow can continue;
+ otherwise, a bug in this code could halt workflow progress.
+
+ Note that, if the tool did not define any exit code handling or
+ any stdio/stderr handling, then it reverts back to previous behavior:
+ if stderr contains anything, then False is returned.
+
+ Note that the job id is just for messages.
+ """
+ # By default, the tool succeeded. This covers the case where the code
+ # has a bug but the tool was ok, and it lets a workflow continue.
+ success = True
+
+ try:
+ # Check exit codes and match regular expressions against stdout and
+ # stderr if this tool was configured to do so.
+ # If there is a regular expression for scanning stdout/stderr,
+ # then we assume that the tool writer overwrote the default
+ # behavior of just setting an error if there is *anything* on
+ # stderr.
+ if ( len( tool.stdio_regexes ) > 0 or
+ len( tool.stdio_exit_codes ) > 0 ):
+ # Check the exit code ranges in the order in which
+ # they were specified. Each exit_code is a StdioExitCode
+ # that includes an applicable range. If the exit code was in
+ # that range, then apply the error level and add a message.
+ # If we've reached a fatal error rule, then stop.
+ max_error_level = StdioErrorLevel.NO_ERROR
+ if tool_exit_code != None:
+ for stdio_exit_code in tool.stdio_exit_codes:
+ if ( tool_exit_code >= stdio_exit_code.range_start and
+ tool_exit_code <= stdio_exit_code.range_end ):
+ # Tack on a generic description of the code
+ # plus a specific code description. For example,
+ # this might prepend "Job 42: Warning (Out of Memory)\n".
+ code_desc = stdio_exit_code.desc
+ if ( None == code_desc ):
+ code_desc = ""
+ tool_msg = ( "%s: Exit code %d (%s)" % (
+ StdioErrorLevel.desc( stdio_exit_code.error_level ),
+ tool_exit_code,
+ code_desc ) )
+ log.info( "Job %s: %s" % (job.get_id_tag(), tool_msg) )
+ stderr = tool_msg + "\n" + stderr
+ max_error_level = max( max_error_level,
+ stdio_exit_code.error_level )
+ if ( max_error_level >=
+ StdioErrorLevel.FATAL ):
+ break
+
+ if max_error_level < StdioErrorLevel.FATAL:
+ # We'll examine every regex. Each regex specifies whether
+ # it is to be run on stdout, stderr, or both. (It is
+ # possible for neither stdout nor stderr to be scanned,
+ # but those regexes won't be used.) We record the highest
+ # error level, which are currently "warning" and "fatal".
+ # If fatal, then we set the job's state to ERROR.
+ # If warning, then we still set the job's state to OK
+ # but include a message. We'll do this if we haven't seen
+ # a fatal error yet
+ for regex in tool.stdio_regexes:
+ # If ( this regex should be matched against stdout )
+ # - Run the regex's match pattern against stdout
+ # - If it matched, then determine the error level.
+ # o If it was fatal, then we're done - break.
+ # Repeat the stdout stuff for stderr.
+ # TODO: Collapse this into a single function.
+ if ( regex.stdout_match ):
+ regex_match = re.search( regex.match, stdout,
+ re.IGNORECASE )
+ if ( regex_match ):
+ rexmsg = __regex_err_msg( regex_match, regex)
+ log.info( "Job %s: %s"
+ % ( job.get_id_tag(), rexmsg ) )
+ stdout = rexmsg + "\n" + stdout
+ max_error_level = max( max_error_level,
+ regex.error_level )
+ if ( max_error_level >=
+ StdioErrorLevel.FATAL ):
+ break
+
+ if ( regex.stderr_match ):
+ regex_match = re.search( regex.match, stderr,
+ re.IGNORECASE )
+ if ( regex_match ):
+ rexmsg = __regex_err_msg( regex_match, regex)
+ log.info( "Job %s: %s"
+ % ( job.get_id_tag(), rexmsg ) )
+ stderr = rexmsg + "\n" + stderr
+ max_error_level = max( max_error_level,
+ regex.error_level )
+ if ( max_error_level >=
+ StdioErrorLevel.FATAL ):
+ break
+
+ # If we encountered a fatal error, then we'll need to set the
+ # job state accordingly. Otherwise the job is ok:
+ if max_error_level >= StdioErrorLevel.FATAL:
+ success = False
+ else:
+ success = True
+
+ # When there are no regular expressions and no exit codes to check,
+ # default to the previous behavior: when there's anything on stderr
+ # the job has an error, and the job is ok otherwise.
+ else:
+ # TODO: Add in the tool and job id:
+ # log.debug( "Tool did not define exit code or stdio handling; "
+ # + "checking stderr for success" )
+ if stderr:
+ success = False
+ else:
+ success = True
+
+ # On any exception, return True.
+ except:
+ tb = traceback.format_exc()
+ log.warning( "Tool check encountered unexpected exception; "
+ + "assuming tool was successful: " + tb )
+ success = True
+
+ # Store the modified stdout and stderr in the job:
+ if None != job:
+ job.stdout = stdout
+ job.stderr = stderr
+
+ return success
+
+
+def __regex_err_msg( match, regex ):
+ """
+ Return a message about the match on tool output using the given
+ ToolStdioRegex regex object. The regex_match is a MatchObject
+ that will contain the string matched on.
+ """
+ # Get the description for the error level:
+ err_msg = StdioErrorLevel.desc( regex.error_level ) + ": "
+ # If there's a description for the regular expression, then use it.
+ # Otherwise, we'll take the first 256 characters of the match.
+ if None != regex.desc:
+ err_msg += regex.desc
+ else:
+ mstart = match.start()
+ mend = match.end()
+ err_msg += "Matched on "
+ # TODO: Move the constant 256 somewhere else besides here.
+ if mend - mstart > 256:
+ err_msg += match.string[ mstart : mstart + 256 ] + "..."
+ else:
+ err_msg += match.string[ mstart: mend ]
+ return err_msg
diff -r d966d09f77d9a3fed4401eb1e5e64d17a57d684e -r e4d476ccf7832df0b1f65048d3784b010f84e59a lib/galaxy/tools/__init__.py
--- a/lib/galaxy/tools/__init__.py
+++ b/lib/galaxy/tools/__init__.py
@@ -34,6 +34,7 @@
from sqlalchemy import and_
from galaxy import jobs, model
+from galaxy.jobs.error_level import StdioErrorLevel
from galaxy.datatypes.metadata import JobExternalOutputMetadataWrapper
from galaxy.jobs import ParallelismInfo
from galaxy.tools.actions import DefaultToolAction
@@ -64,33 +65,11 @@
from tool_shed.util import shed_util_common
from .loader import load_tool, template_macro_params
+
log = logging.getLogger( __name__ )
WORKFLOW_PARAMETER_REGULAR_EXPRESSION = re.compile( '''\$\{.+?\}''' )
-# These determine stdio-based error levels from matching on regular expressions
-# and exit codes. They are meant to be used comparatively, such as showing
-# that warning < fatal. This is really meant to just be an enum.
-class StdioErrorLevel( object ):
- NO_ERROR = 0
- LOG = 1
- WARNING = 2
- FATAL = 3
- MAX = 3
- descs = {
- NO_ERROR : 'No error',
- LOG: 'Log',
- WARNING : 'Warning',
- FATAL : 'Fatal error'
- }
- @staticmethod
- def desc( error_level ):
- err_msg = "Unknown error"
- if ( error_level > 0 and
- error_level <= StdioErrorLevel.MAX ):
- err_msg = StdioErrorLevel.descs[ error_level ]
- return err_msg
-
class ToolNotFoundException( Exception ):
pass
diff -r d966d09f77d9a3fed4401eb1e5e64d17a57d684e -r e4d476ccf7832df0b1f65048d3784b010f84e59a test/unit/test_job_output_checker.py
--- /dev/null
+++ b/test/unit/test_job_output_checker.py
@@ -0,0 +1,62 @@
+from unittest import TestCase
+from galaxy.util.bunch import Bunch
+from galaxy.jobs.output_checker import check_output
+from galaxy.jobs.error_level import StdioErrorLevel
+
+
+class OutputCheckerTestCase( TestCase ):
+
+ def setUp( self ):
+ self.tool = Bunch(
+ stdio_regexes=[],
+ stdio_exit_codes=[],
+ )
+ self.job = Bunch(
+ stdout=None,
+ stderr=None,
+ get_id_tag=lambda: "test_id",
+ )
+ self.stdout = ''
+ self.stderr = ''
+ self.tool_exit_code = None
+
+ def test_default_no_stderr_success( self ):
+ self.__assertSuccessful()
+
+ def test_default_stderr_failure( self ):
+ self.stderr = 'foo'
+ self.__assertNotSuccessful()
+
+ def test_exit_code_error( self ):
+ mock_exit_code = Bunch( range_start=1, range_end=1, error_level=StdioErrorLevel.FATAL, desc=None )
+ self.tool.stdio_exit_codes.append( mock_exit_code )
+ self.tool_exit_code = 1
+ self.__assertNotSuccessful()
+
+ def test_exit_code_success( self ):
+ mock_exit_code = Bunch( range_start=1, range_end=1, error_level=StdioErrorLevel.FATAL, desc=None )
+ self.tool.stdio_exit_codes.append( mock_exit_code )
+ self.tool_exit_code = 0
+ self.__assertSuccessful()
+
+ def test_problematic_strings( self ):
+ problematic_str = '\x80abc'
+ regex_rule = Bunch( match=r'.abc', stdout_match=False, stderr_match=True, error_level=StdioErrorLevel.FATAL, desc=None )
+ self.tool.stdio_regexes = [ regex_rule ]
+ self.stderr = problematic_str
+ self.__assertNotSuccessful()
+
+ problematic_str = '\x80abc'
+ regex_rule = Bunch( match=r'.abcd', stdout_match=False, stderr_match=True, error_level=StdioErrorLevel.FATAL, desc=None )
+ self.tool.stdio_regexes = [ regex_rule ]
+ self.stderr = problematic_str
+ self.__assertSuccessful()
+
+ def __assertSuccessful( self ):
+ self.assertTrue( self.__check_output() )
+
+ def __assertNotSuccessful( self ):
+ self.assertFalse( self.__check_output() )
+
+ def __check_output( self ):
+ return check_output( self.tool, self.stdout, self.stderr, self.tool_exit_code, self.job )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: jgoecks: Remove tipsy because it has been replaced by bootstrap tooltips.
by commits-noreply@bitbucket.org 16 Oct '13
by commits-noreply@bitbucket.org 16 Oct '13
16 Oct '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/d966d09f77d9/
Changeset: d966d09f77d9
User: jgoecks
Date: 2013-10-16 19:03:47
Summary: Remove tipsy because it has been replaced by bootstrap tooltips.
Affected #: 4 files
diff -r 2d78abc47928918a14f56711657696724cd9af72 -r d966d09f77d9a3fed4401eb1e5e64d17a57d684e static/scripts/galaxy.base.js
--- a/static/scripts/galaxy.base.js
+++ b/static/scripts/galaxy.base.js
@@ -666,11 +666,6 @@
});
// Tooltips
- // if ( $.fn.tipsy ) {
- // // FIXME: tipsy gravity cannot be updated, so need classes that specify N/S gravity and
- // // initialize each separately.
- // $(".tooltip").tipsy( { gravity: 's' } );
- // }
if ( $.fn.tooltip ) {
// Put tooltips below items in panel header so that they do not overlap masthead.
$(".unified-panel-header [title]").tooltip( { placement: 'bottom' } );
diff -r 2d78abc47928918a14f56711657696724cd9af72 -r d966d09f77d9a3fed4401eb1e5e64d17a57d684e static/scripts/libs/jquery/jquery.tipsy.js
--- a/static/scripts/libs/jquery/jquery.tipsy.js
+++ /dev/null
@@ -1,172 +0,0 @@
-/* NOTE: MODIFIED FROM ORIGINAL! */
-
-(function($) {
- function fixTitle($ele) {
- if ($ele.attr('title') || typeof($ele.attr('original-title')) != 'string') {
- $ele.attr('original-title', $ele.attr('title') || '').removeAttr('title');
- }
- }
-
- $.fn.tipsy = function(options) {
-
- options = $.extend({}, $.fn.tipsy.defaults, options);
-
- return this.each(function() {
-
- fixTitle($(this));
- var opts = $.fn.tipsy.elementOptions(this, options);
- var timeout = null;
-
- $(this).hover(function() {
- var self = this;
- timeout = setTimeout(function() {
- $.data(self, 'cancel.tipsy', true);
-
- var tip = $.data(self, 'active.tipsy');
- if (!tip) {
- tip = $('<div class="tipsy"><div class="tipsy-inner"/></div>');
- tip.css({position: 'absolute', zIndex: 100000});
- $.data(self, 'active.tipsy', tip);
- }
-
- fixTitle($(self));
-
- var title;
- if (typeof opts.title == 'string') {
- title = $(self).attr(opts.title == 'title' ? 'original-title' : opts.title);
- } else if (typeof opts.title == 'function') {
- title = opts.title.call(self);
- }
-
- tip.find('.tipsy-inner')[opts.html ? 'html' : 'text'](title || opts.fallback);
-
-
- var pos = $.extend({}, $(self).offset(), {width: self.offsetWidth, height: self.offsetHeight});
- tip.get(0).className = 'tipsy'; // reset classname in case of dynamic gravity
- tip.remove().css({top: 0, left: 0, visibility: 'hidden', display: 'block'}).appendTo(document.body);
-
- tip.css( { width: tip.width() + 1, height: tip.height() } );
-
- var actualWidth = tip[0].offsetWidth, actualHeight = tip[0].offsetHeight;
- var gravity = (typeof opts.gravity == 'function') ? opts.gravity.call(self) : opts.gravity;
-
- var top, left;
- switch (gravity.charAt(0)) {
- case 'n':
- top = pos.top + pos.height;
- left = pos.left + pos.width / 2 - actualWidth / 2;
- tip.addClass('tipsy-north');
- break;
- case 's':
- top = pos.top - actualHeight;
- left = pos.left + pos.width / 2 - actualWidth / 2;
- tip.addClass('tipsy-south');
- break;
- case 'e':
- top = pos.top + pos.height / 2 - actualHeight / 2;
- left = pos.left - actualWidth;
- tip.addClass('tipsy-east');
- break;
- case 'w':
- top = pos.top + pos.height / 2 - actualHeight / 2;
- left = pos.left + pos.width;
- tip.addClass('tipsy-west');
- break;
- }
- // Shift if off screen
- var w = $(window);
-
- // If off the top of the screen, flip
- if ( top < w.scrollTop() && gravity.charAt( 0 ) == 's' ) {
- top = pos.top + pos.height;
- gravity = 'north';
- tip.removeClass('tipsy-south').addClass('tipsy-north');
- }
-
- // If off bottom, just shift for now
- top = Math.min( top, w.scrollTop() + w.height() - tip.outerHeight() );
-
-
- // Shift left or right
- var left_shift = 0;
- if ( left < w.scrollLeft() ) {
- left_shift = left - w.scrollLeft();
- }
- var t = w.scrollLeft() + w.width() - tip.outerWidth();
- if ( left > t ) {
- left_shift = left - t;
- }
-
- left -= left_shift;
-
- tip.css( { left: left, top: top } );
-
- // Shift background to center over element (not implemented for east/west)
- switch (gravity.charAt(0)) {
- case 'n':
- tip.css( 'background-position', - ( 250 - tip.outerWidth() / 2 ) + left_shift + "px top" );
- break;
- case 's':
- tip.css( 'background-position', - ( 250 - tip.outerWidth() / 2 ) + left_shift + "px bottom" );
- break;
- case 'e':
- break;
- case 'w':
- break;
- }
-
- if (opts.fade) {
- tip.stop().css({opacity: 0, display: 'block', visibility: 'visible'}).animate({opacity: opts.opacity});
- } else {
- tip.css({visibility: 'visible', opacity: opts.opacity});
- }
- }, opts.delayIn);
-
- }, function() {
- $.data(this, 'cancel.tipsy', false);
- var self = this;
- clearTimeout(timeout);
- setTimeout(function() {
- if ($.data(this, 'cancel.tipsy')) { return; }
- var tip = $.data(self, 'active.tipsy');
- if (opts.fade) {
- tip.stop().fadeOut(function() { $(this).remove(); });
- } else if (tip) {
- tip.remove();
- }
- }, opts.delayOut);
-
- });
-
- });
-
- };
-
- // Overwrite this method to provide options on a per-element basis.
- // For example, you could store the gravity in a 'tipsy-gravity' attribute:
- // return $.extend({}, options, {gravity: $(ele).attr('tipsy-gravity') || 'n' });
- // (remember - do not modify 'options' in place!)
- $.fn.tipsy.elementOptions = function(ele, options) {
- return $.metadata ? $.extend({}, options, $(ele).metadata()) : options;
- };
-
- $.fn.tipsy.defaults = {
- delayIn: 0,
- delayOut: 100,
- fade: false,
- fallback: '',
- gravity: 'n',
- html: false,
- opacity: 0.8,
- title: 'title'
- };
-
- $.fn.tipsy.autoNS = function() {
- return $(this).offset().top > ($(document).scrollTop() + $(window).height() / 2) ? 's' : 'n';
- };
-
- $.fn.tipsy.autoWE = function() {
- return $(this).offset().left > ($(document).scrollLeft() + $(window).width() / 2) ? 'e' : 'w';
- };
-
-})(jQuery);
diff -r 2d78abc47928918a14f56711657696724cd9af72 -r d966d09f77d9a3fed4401eb1e5e64d17a57d684e static/scripts/packed/libs/jquery/jquery.tipsy.js
--- a/static/scripts/packed/libs/jquery/jquery.tipsy.js
+++ /dev/null
@@ -1,1 +0,0 @@
-(function(b){function a(c){if(c.attr("title")||typeof(c.attr("original-title"))!="string"){c.attr("original-title",c.attr("title")||"").removeAttr("title")}}b.fn.tipsy=function(c){c=b.extend({},b.fn.tipsy.defaults,c);return this.each(function(){a(b(this));var d=b.fn.tipsy.elementOptions(this,c);var e=null;b(this).hover(function(){var f=this;e=setTimeout(function(){b.data(f,"cancel.tipsy",true);var o=b.data(f,"active.tipsy");if(!o){o=b('<div class="tipsy"><div class="tipsy-inner"/></div>');o.css({position:"absolute",zIndex:100000});b.data(f,"active.tipsy",o)}a(b(f));var m;if(typeof d.title=="string"){m=b(f).attr(d.title=="title"?"original-title":d.title)}else{if(typeof d.title=="function"){m=d.title.call(f)}}o.find(".tipsy-inner")[d.html?"html":"text"](m||d.fallback);var k=b.extend({},b(f).offset(),{width:f.offsetWidth,height:f.offsetHeight});o.get(0).className="tipsy";o.remove().css({top:0,left:0,visibility:"hidden",display:"block"}).appendTo(document.body);o.css({width:o.width()+1,height:o.height()});var h=o[0].offsetWidth,j=o[0].offsetHeight;var q=(typeof d.gravity=="function")?d.gravity.call(f):d.gravity;var l,i;switch(q.charAt(0)){case"n":l=k.top+k.height;i=k.left+k.width/2-h/2;o.addClass("tipsy-north");break;case"s":l=k.top-j;i=k.left+k.width/2-h/2;o.addClass("tipsy-south");break;case"e":l=k.top+k.height/2-j/2;i=k.left-h;o.addClass("tipsy-east");break;case"w":l=k.top+k.height/2-j/2;i=k.left+k.width;o.addClass("tipsy-west");break}var n=b(window);if(l<n.scrollTop()&&q.charAt(0)=="s"){l=k.top+k.height;q="north";o.removeClass("tipsy-south").addClass("tipsy-north")}l=Math.min(l,n.scrollTop()+n.height()-o.outerHeight());var g=0;if(i<n.scrollLeft()){g=i-n.scrollLeft()}var p=n.scrollLeft()+n.width()-o.outerWidth();if(i>p){g=i-p}i-=g;o.css({left:i,top:l});switch(q.charAt(0)){case"n":o.css("background-position",-(250-o.outerWidth()/2)+g+"px top");break;case"s":o.css("background-position",-(250-o.outerWidth()/2)+g+"px bottom");break;case"e":break;case"w":break}if(d.fade){o.stop().css({opacity:0,display:"block",visibility:"visible"}).animate({opacity:d.opacity})}else{o.css({visibility:"visible",opacity:d.opacity})}},d.delayIn)},function(){b.data(this,"cancel.tipsy",false);var f=this;clearTimeout(e);setTimeout(function(){if(b.data(this,"cancel.tipsy")){return}var g=b.data(f,"active.tipsy");if(d.fade){g.stop().fadeOut(function(){b(this).remove()})}else{if(g){g.remove()}}},d.delayOut)})})};b.fn.tipsy.elementOptions=function(d,c){return b.metadata?b.extend({},c,b(d).metadata()):c};b.fn.tipsy.defaults={delayIn:0,delayOut:100,fade:false,fallback:"",gravity:"n",html:false,opacity:0.8,title:"title"};b.fn.tipsy.autoNS=function(){return b(this).offset().top>(b(document).scrollTop()+b(window).height()/2)?"s":"n"};b.fn.tipsy.autoWE=function(){return b(this).offset().left>(b(document).scrollLeft()+b(window).width()/2)?"e":"w"}})(jQuery);
\ No newline at end of file
diff -r 2d78abc47928918a14f56711657696724cd9af72 -r d966d09f77d9a3fed4401eb1e5e64d17a57d684e static/scripts/viz/trackster/tracks.js
--- a/static/scripts/viz/trackster/tracks.js
+++ b/static/scripts/viz/trackster/tracks.js
@@ -292,7 +292,7 @@
title: "Remove",
css_class: "remove-icon",
on_click_fn: function(drawable) {
- // Tipsy for remove icon must be deleted when drawable is deleted.
+ // Tooltip for remove icon must be deleted when drawable is deleted.
$(".tooltip").remove();
drawable.remove();
}
@@ -642,7 +642,7 @@
title: "Filters",
css_class: "filters-icon",
on_click_fn: function(group) {
- // TODO: update tipsy text.
+ // TODO: update Tooltip text.
if (group.filters_manager.visible()) {
// Hiding filters.
group.filters_manager.clear_filters();
@@ -2409,7 +2409,7 @@
title: "Filters",
css_class: "filters-icon",
on_click_fn: function(drawable) {
- // TODO: update tipsy text.
+ // TODO: update Tooltip text.
if (drawable.filters_manager.visible()) {
drawable.filters_manager.clear_filters();
}
@@ -2425,7 +2425,7 @@
title: "Tool",
css_class: "hammer",
on_click_fn: function(track) {
- // TODO: update tipsy text.
+ // TODO: update Tooltip text.
track.tool.toggle();
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: martenson: honeypot trap for bot registering accounts
by commits-noreply@bitbucket.org 16 Oct '13
by commits-noreply@bitbucket.org 16 Oct '13
16 Oct '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/2d78abc47928/
Changeset: 2d78abc47928
User: martenson
Date: 2013-10-16 17:40:02
Summary: honeypot trap for bot registering accounts
Affected #: 2 files
diff -r d3d4210d0abee42c4c7cb8d66b8a325f340899c1 -r 2d78abc47928918a14f56711657696724cd9af72 lib/galaxy/webapps/galaxy/controllers/user.py
--- a/lib/galaxy/webapps/galaxy/controllers/user.py
+++ b/lib/galaxy/webapps/galaxy/controllers/user.py
@@ -594,6 +594,12 @@
@web.expose
def create( self, trans, cntrller='user', redirect_url='', refresh_frames=[], **kwd ):
params = util.Params( kwd )
+
+ # If the honeypot field is not empty we are dealing with a bot.
+ honeypot_field = params.get( 'bear_field', '' )
+ if honeypot_field != '':
+ return trans.show_error_message( "You are considered a bot. If you are not one please try registering again and follow the form's legend. <a target=\"_top\" href=\"%s\">Go to the home page</a>." ) % url_for( '/' )
+
message = util.restore_text( params.get( 'message', '' ) )
status = params.get( 'status', 'done' )
use_panels = util.string_as_bool( kwd.get( 'use_panels', True ) )
diff -r d3d4210d0abee42c4c7cb8d66b8a325f340899c1 -r 2d78abc47928918a14f56711657696724cd9af72 templates/user/register.mako
--- a/templates/user/register.mako
+++ b/templates/user/register.mako
@@ -162,7 +162,7 @@
%endif
<div id="for_bears">
If you see this, please leave following field blank.
- <input type="text" name="please leave this field blank" size="1" value=""/>
+ <input type="text" name="bear_field" size="1" value=""/></div><div class="form-row"><input type="submit" id="send" name="create_user_button" value="Submit"/>
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: guerler: Modify bowtie error handler
by commits-noreply@bitbucket.org 16 Oct '13
by commits-noreply@bitbucket.org 16 Oct '13
16 Oct '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/d3d4210d0abe/
Changeset: d3d4210d0abe
User: guerler
Date: 2013-10-16 17:03:51
Summary: Modify bowtie error handler
Affected #: 3 files
diff -r 940a310179c4ad9cfe7dabae2ac71402608a636a -r d3d4210d0abee42c4c7cb8d66b8a325f340899c1 static/style/blue/base.css
--- a/static/style/blue/base.css
+++ b/static/style/blue/base.css
@@ -1129,7 +1129,7 @@
.upload-box .table th{text-align:center;white-space:nowrap}
.upload-box .table td{margin:0px;paddign:0px}
.upload-box .title{width:130px;word-wrap:break-word;font-size:11px}
-.upload-box .text{position:absolute;display:none}.upload-box .text .text-content{font-size:11px;width:100%;height:50px;resize:none;background:inherit}
+.upload-box .text{position:absolute;display:none}.upload-box .text .text-content{font-size:11px;width:100%;height:50px;resize:none;background:inherit;color:#000}
.upload-box .text .text-info{font-size:11px;color:#999}
.upload-box .extension{width:100px;font-size:11px}
.upload-box .genome{width:150px;font-size:11px}
diff -r 940a310179c4ad9cfe7dabae2ac71402608a636a -r d3d4210d0abee42c4c7cb8d66b8a325f340899c1 static/style/src/less/upload.less
--- a/static/style/src/less/upload.less
+++ b/static/style/src/less/upload.less
@@ -45,10 +45,11 @@
.text-content {
font-size : @font-size-small;
- width : 100%;
- height : 50px;
- resize : none;
- background : inherit;
+ width: 100%;
+ height: 50px;
+ resize: none;
+ background: inherit;
+ color: @black;
}
.text-info {
diff -r 940a310179c4ad9cfe7dabae2ac71402608a636a -r d3d4210d0abee42c4c7cb8d66b8a325f340899c1 tools/sr_mapping/bowtie2_wrapper.xml
--- a/tools/sr_mapping/bowtie2_wrapper.xml
+++ b/tools/sr_mapping/bowtie2_wrapper.xml
@@ -109,7 +109,7 @@
<!-- basic error handling --><stdio>
- <regex match="Exception" source="stderr" level="fatal" description="Tool exception"/>
+ <exit_code range="1:" level="fatal" description="Tool exception" /></stdio><inputs>
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0