galaxy-commits
Threads by month
- ----- 2025 -----
- June
- May
- April
- March
- February
- January
- ----- 2024 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2023 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2022 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2021 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2020 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2019 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2018 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2017 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2016 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2015 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2014 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2013 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2012 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2011 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2010 -----
- December
- November
- October
- September
- August
- July
- June
- May
- 15302 discussions
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/758f42549867/
changeset: 758f42549867
user: dannon
date: 2012-04-02 20:14:24
summary: Update cloud launch config name.
affected #: 1 file
diff -r 5ea04ccb61e8373369c41370dd3ee216a7dc6d14 -r 758f425498670dbe992701df81d277dbc7cd6de7 templates/webapps/galaxy/base_panels.mako
--- a/templates/webapps/galaxy/base_panels.mako
+++ b/templates/webapps/galaxy/base_panels.mako
@@ -110,7 +110,7 @@
%endif
## Cloud menu.
- %if app.config.get_bool( 'enable_cloud_control', False ):
+ %if app.config.get_bool( 'enable_cloud_launch', False ):
<%
menu_options = [
[_('New Cloud Cluster'), h.url_for( controller='/cloud', action='index' ) ],
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: greg: Make sure repository files displayed during installation are restricted to a max file size and contents are viewable in a browser.
by Bitbucket 02 Apr '12
by Bitbucket 02 Apr '12
02 Apr '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/5ea04ccb61e8/
changeset: 5ea04ccb61e8
user: greg
date: 2012-04-02 19:27:31
summary: Make sure repository files displayed during installation are restricted to a max file size and contents are viewable in a browser.
affected #: 4 files
diff -r 319c7ee6873c53e57aa6b37271cb7c6afb067612 -r 5ea04ccb61e8373369c41370dd3ee216a7dc6d14 lib/galaxy/util/shed_util.py
--- a/lib/galaxy/util/shed_util.py
+++ b/lib/galaxy/util/shed_util.py
@@ -1,4 +1,4 @@
-import os, tempfile, shutil, subprocess, logging
+import os, tempfile, shutil, subprocess, logging, string
from datetime import date, datetime, timedelta
from time import strftime
from galaxy import util
@@ -13,6 +13,14 @@
log = logging.getLogger( __name__ )
+# Characters that must be html escaped
+MAPPED_CHARS = { '>' :'>',
+ '<' :'<',
+ '"' : '"',
+ '&' : '&',
+ '\'' : ''' }
+VALID_CHARS = set( string.letters + string.digits + "'\"-=_.()/+*^,:?!#[]%\\$@;{}" )
+
def add_to_shed_tool_config( app, shed_tool_conf_dict, elem_list ):
# A tool shed repository is being installed so change the shed_tool_conf file. Parse the config file to generate the entire list
# of config_elems instead of using the in-memory list since it will be a subset of the entire list if one or more repositories have
@@ -1096,6 +1104,34 @@
if uninstall:
# Write the current in-memory version of the integrated_tool_panel.xml file to disk.
trans.app.toolbox.write_integrated_tool_panel_config_file()
+def to_html_escaped( text ):
+ """Translates the characters in text to html values"""
+ translated = []
+ for c in text:
+ if c in [ '\r\n', '\n', ' ', '\t' ] or c in VALID_CHARS:
+ translated.append( c )
+ elif c in MAPPED_CHARS:
+ translated.append( MAPPED_CHARS[ c ] )
+ else:
+ translated.append( 'X' )
+ return ''.join( translated )
+def to_html_str( text ):
+ """Translates the characters in text to sn html string"""
+ translated = []
+ for c in text:
+ if c in VALID_CHARS:
+ translated.append( c )
+ elif c in MAPPED_CHARS:
+ translated.append( MAPPED_CHARS[ c ] )
+ elif c == ' ':
+ translated.append( ' ' )
+ elif c == '\t':
+ translated.append( ' ' )
+ elif c == '\n':
+ translated.append( '<br/>' )
+ elif c not in [ '\r' ]:
+ translated.append( 'X' )
+ return ''.join( translated )
def update_repository( current_working_dir, repo_files_dir, changeset_revision ):
# Update the cloned repository to changeset_revision. It is imperative that the
# installed repository is updated to the desired changeset_revision before metadata
diff -r 319c7ee6873c53e57aa6b37271cb7c6afb067612 -r 5ea04ccb61e8373369c41370dd3ee216a7dc6d14 lib/galaxy/web/controllers/admin_toolshed.py
--- a/lib/galaxy/web/controllers/admin_toolshed.py
+++ b/lib/galaxy/web/controllers/admin_toolshed.py
@@ -6,6 +6,8 @@
log = logging.getLogger( __name__ )
+MAX_CONTENT_SIZE = 32768
+
class RepositoryListGrid( grids.Grid ):
class NameColumn( grids.TextColumn ):
def get_value( self, trans, grid, tool_shed_repository ):
@@ -377,8 +379,15 @@
owner = get_repository_owner( clean_repository_clone_url( repository_clone_url ) )
url = '%s/repository/get_readme?name=%s&owner=%s&changeset_revision=%s&webapp=galaxy' % ( tool_shed_url, name, owner, changeset_revision )
response = urllib2.urlopen( url )
- readme_text = response.read()
+ raw_text = response.read()
response.close()
+ readme_text = ''
+ for i, line in enumerate( raw_text ):
+ readme_text = '%s%s' % ( readme_text, to_html_str( line ) )
+ if len( readme_text ) > MAX_CONTENT_SIZE:
+ large_str = '\nFile contents truncated because file size is larger than maximum viewing size of %s\n' % util.nice_size( MAX_CONTENT_SIZE )
+ readme_text = '%s%s' % ( readme_text, to_html_str( large_str ) )
+ break
else:
readme_text = ''
else:
diff -r 319c7ee6873c53e57aa6b37271cb7c6afb067612 -r 5ea04ccb61e8373369c41370dd3ee216a7dc6d14 lib/galaxy/webapps/community/controllers/common.py
--- a/lib/galaxy/webapps/community/controllers/common.py
+++ b/lib/galaxy/webapps/community/controllers/common.py
@@ -6,7 +6,7 @@
from galaxy.util.json import from_json_string, to_json_string
from galaxy.util.hash_util import *
from galaxy.util.shed_util import copy_sample_loc_file, generate_datatypes_metadata, generate_tool_metadata, generate_workflow_metadata
-from galaxy.util.shed_util import handle_sample_tool_data_table_conf_file
+from galaxy.util.shed_util import handle_sample_tool_data_table_conf_file, to_html_escaped, to_html_str
from galaxy.web.base.controller import *
from galaxy.webapps.community import model
from galaxy.model.orm import *
diff -r 319c7ee6873c53e57aa6b37271cb7c6afb067612 -r 5ea04ccb61e8373369c41370dd3ee216a7dc6d14 lib/galaxy/webapps/community/controllers/repository.py
--- a/lib/galaxy/webapps/community/controllers/repository.py
+++ b/lib/galaxy/webapps/community/controllers/repository.py
@@ -15,14 +15,7 @@
log = logging.getLogger( __name__ )
-# Characters that must be html escaped
-MAPPED_CHARS = { '>' :'>',
- '<' :'<',
- '"' : '"',
- '&' : '&',
- '\'' : ''' }
MAX_CONTENT_SIZE = 32768
-VALID_CHARS = set( string.letters + string.digits + "'\"-=_.()/+*^,:?!#[]%\\$@;{}" )
VALID_REPOSITORYNAME_RE = re.compile( "^[a-z0-9\_]+$" )
class CategoryListGrid( grids.Grid ):
@@ -785,7 +778,7 @@
repo = hg.repository( get_configured_ui(), repo_dir )
for root, dirs, files in os.walk( repo_dir ):
for name in files:
- if name.lower() in [ 'readme', 'read_me' ]:
+ if name.lower() in [ 'readme', 'readme.txt', 'read_me', 'read_me.txt' ]:
f = open( os.path.join( root, name ), 'r' )
text = f.read()
f.close()
@@ -1600,7 +1593,7 @@
anchors = modified + added + removed + deleted + unknown + ignored + clean
diffs = []
for diff in patch.diff( repo, node1=ctx_parent.node(), node2=ctx.node() ):
- diffs.append( self.to_html_escaped( diff ) )
+ diffs.append( to_html_escaped( diff ) )
is_malicious = change_set_is_malicious( trans, id, repository.tip )
return trans.fill_template( '/webapps/community/repository/view_changeset.mako',
repository=repository,
@@ -1938,20 +1931,20 @@
trans.response.headers['Pragma'] = 'no-cache'
trans.response.headers['Expires'] = '0'
if is_gzip( file_path ):
- to_html = self.to_html_str( '\ngzip compressed file\n' )
+ to_html = to_html_str( '\ngzip compressed file\n' )
elif is_bz2( file_path ):
- to_html = self.to_html_str( '\nbz2 compressed file\n' )
+ to_html = to_html_str( '\nbz2 compressed file\n' )
elif check_zip( file_path ):
- to_html = self.to_html_str( '\nzip compressed file\n' )
+ to_html = to_html_str( '\nzip compressed file\n' )
elif check_binary( file_path ):
- to_html = self.to_html_str( '\nBinary file\n' )
+ to_html = to_html_str( '\nBinary file\n' )
else:
to_html = ''
for i, line in enumerate( open( file_path ) ):
- to_html = '%s%s' % ( to_html, self.to_html_str( line ) )
+ to_html = '%s%s' % ( to_html, to_html_str( line ) )
if len( to_html ) > MAX_CONTENT_SIZE:
large_str = '\nFile contents truncated because file size is larger than maximum viewing size of %s\n' % util.nice_size( MAX_CONTENT_SIZE )
- to_html = '%s%s' % ( to_html, self.to_html_str( large_str ) )
+ to_html = '%s%s' % ( to_html, to_html_str( large_str ) )
break
return to_html
@web.expose
@@ -1960,34 +1953,6 @@
message = util.restore_text( params.get( 'message', '' ) )
status = params.get( 'status', 'done' )
return trans.fill_template( '/webapps/community/repository/help.mako', message=message, status=status, **kwd )
- def to_html_escaped( self, text ):
- """Translates the characters in text to html values"""
- translated = []
- for c in text:
- if c in [ '\r\n', '\n', ' ', '\t' ] or c in VALID_CHARS:
- translated.append( c )
- elif c in MAPPED_CHARS:
- translated.append( MAPPED_CHARS[ c ] )
- else:
- translated.append( 'X' )
- return ''.join( translated )
- def to_html_str( self, text ):
- """Translates the characters in text to sn html string"""
- translated = []
- for c in text:
- if c in VALID_CHARS:
- translated.append( c )
- elif c in MAPPED_CHARS:
- translated.append( MAPPED_CHARS[ c ] )
- elif c == ' ':
- translated.append( ' ' )
- elif c == '\t':
- translated.append( ' ' )
- elif c == '\n':
- translated.append( '<br/>' )
- elif c not in [ '\r' ]:
- translated.append( 'X' )
- return ''.join( translated )
def __build_allow_push_select_field( self, trans, current_push_list, selected_value='none' ):
options = []
for user in trans.sa_session.query( trans.model.User ):
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
11 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/15e75fe2efae/
changeset: 15e75fe2efae
user: jgoecks
date: 2012-03-30 17:30:49
summary: Remove full handlebars library and replace with runtime.
affected #: 2 files
diff -r de2946aca8877087d761684a72ff955f46e4f5a6 -r 15e75fe2efae761ac7182a496d5c79ef3bc3a43a static/scripts/libs/handlebars-1.0.0.beta.6.js
--- a/static/scripts/libs/handlebars-1.0.0.beta.6.js
+++ /dev/null
@@ -1,1550 +0,0 @@
-// lib/handlebars/base.js
-var Handlebars = {};
-
-Handlebars.VERSION = "1.0.beta.6";
-
-Handlebars.helpers = {};
-Handlebars.partials = {};
-
-Handlebars.registerHelper = function(name, fn, inverse) {
- if(inverse) { fn.not = inverse; }
- this.helpers[name] = fn;
-};
-
-Handlebars.registerPartial = function(name, str) {
- this.partials[name] = str;
-};
-
-Handlebars.registerHelper('helperMissing', function(arg) {
- if(arguments.length === 2) {
- return undefined;
- } else {
- throw new Error("Could not find property '" + arg + "'");
- }
-});
-
-var toString = Object.prototype.toString, functionType = "[object Function]";
-
-Handlebars.registerHelper('blockHelperMissing', function(context, options) {
- var inverse = options.inverse || function() {}, fn = options.fn;
-
-
- var ret = "";
- var type = toString.call(context);
-
- if(type === functionType) { context = context.call(this); }
-
- if(context === true) {
- return fn(this);
- } else if(context === false || context == null) {
- return inverse(this);
- } else if(type === "[object Array]") {
- if(context.length > 0) {
- for(var i=0, j=context.length; i<j; i++) {
- ret = ret + fn(context[i]);
- }
- } else {
- ret = inverse(this);
- }
- return ret;
- } else {
- return fn(context);
- }
-});
-
-Handlebars.registerHelper('each', function(context, options) {
- var fn = options.fn, inverse = options.inverse;
- var ret = "";
-
- if(context && context.length > 0) {
- for(var i=0, j=context.length; i<j; i++) {
- ret = ret + fn(context[i]);
- }
- } else {
- ret = inverse(this);
- }
- return ret;
-});
-
-Handlebars.registerHelper('if', function(context, options) {
- var type = toString.call(context);
- if(type === functionType) { context = context.call(this); }
-
- if(!context || Handlebars.Utils.isEmpty(context)) {
- return options.inverse(this);
- } else {
- return options.fn(this);
- }
-});
-
-Handlebars.registerHelper('unless', function(context, options) {
- var fn = options.fn, inverse = options.inverse;
- options.fn = inverse;
- options.inverse = fn;
-
- return Handlebars.helpers['if'].call(this, context, options);
-});
-
-Handlebars.registerHelper('with', function(context, options) {
- return options.fn(context);
-});
-
-Handlebars.registerHelper('log', function(context) {
- Handlebars.log(context);
-});
-;
-// lib/handlebars/compiler/parser.js
-/* Jison generated parser */
-var handlebars = (function(){
-
-var parser = {trace: function trace() { },
-yy: {},
-symbols_: {"error":2,"root":3,"program":4,"EOF":5,"statements":6,"simpleInverse":7,"statement":8,"openInverse":9,"closeBlock":10,"openBlock":11,"mustache":12,"partial":13,"CONTENT":14,"COMMENT":15,"OPEN_BLOCK":16,"inMustache":17,"CLOSE":18,"OPEN_INVERSE":19,"OPEN_ENDBLOCK":20,"path":21,"OPEN":22,"OPEN_UNESCAPED":23,"OPEN_PARTIAL":24,"params":25,"hash":26,"param":27,"STRING":28,"INTEGER":29,"BOOLEAN":30,"hashSegments":31,"hashSegment":32,"ID":33,"EQUALS":34,"pathSegments":35,"SEP":36,"$accept":0,"$end":1},
-terminals_: {2:"error",5:"EOF",14:"CONTENT",15:"COMMENT",16:"OPEN_BLOCK",18:"CLOSE",19:"OPEN_INVERSE",20:"OPEN_ENDBLOCK",22:"OPEN",23:"OPEN_UNESCAPED",24:"OPEN_PARTIAL",28:"STRING",29:"INTEGER",30:"BOOLEAN",33:"ID",34:"EQUALS",36:"SEP"},
-productions_: [0,[3,2],[4,3],[4,1],[4,0],[6,1],[6,2],[8,3],[8,3],[8,1],[8,1],[8,1],[8,1],[11,3],[9,3],[10,3],[12,3],[12,3],[13,3],[13,4],[7,2],[17,3],[17,2],[17,2],[17,1],[25,2],[25,1],[27,1],[27,1],[27,1],[27,1],[26,1],[31,2],[31,1],[32,3],[32,3],[32,3],[32,3],[21,1],[35,3],[35,1]],
-performAction: function anonymous(yytext,yyleng,yylineno,yy,yystate,$$,_$) {
-
-var $0 = $$.length - 1;
-switch (yystate) {
-case 1: return $$[$0-1]
-break;
-case 2: this.$ = new yy.ProgramNode($$[$0-2], $$[$0])
-break;
-case 3: this.$ = new yy.ProgramNode($$[$0])
-break;
-case 4: this.$ = new yy.ProgramNode([])
-break;
-case 5: this.$ = [$$[$0]]
-break;
-case 6: $$[$0-1].push($$[$0]); this.$ = $$[$0-1]
-break;
-case 7: this.$ = new yy.InverseNode($$[$0-2], $$[$0-1], $$[$0])
-break;
-case 8: this.$ = new yy.BlockNode($$[$0-2], $$[$0-1], $$[$0])
-break;
-case 9: this.$ = $$[$0]
-break;
-case 10: this.$ = $$[$0]
-break;
-case 11: this.$ = new yy.ContentNode($$[$0])
-break;
-case 12: this.$ = new yy.CommentNode($$[$0])
-break;
-case 13: this.$ = new yy.MustacheNode($$[$0-1][0], $$[$0-1][1])
-break;
-case 14: this.$ = new yy.MustacheNode($$[$0-1][0], $$[$0-1][1])
-break;
-case 15: this.$ = $$[$0-1]
-break;
-case 16: this.$ = new yy.MustacheNode($$[$0-1][0], $$[$0-1][1])
-break;
-case 17: this.$ = new yy.MustacheNode($$[$0-1][0], $$[$0-1][1], true)
-break;
-case 18: this.$ = new yy.PartialNode($$[$0-1])
-break;
-case 19: this.$ = new yy.PartialNode($$[$0-2], $$[$0-1])
-break;
-case 20:
-break;
-case 21: this.$ = [[$$[$0-2]].concat($$[$0-1]), $$[$0]]
-break;
-case 22: this.$ = [[$$[$0-1]].concat($$[$0]), null]
-break;
-case 23: this.$ = [[$$[$0-1]], $$[$0]]
-break;
-case 24: this.$ = [[$$[$0]], null]
-break;
-case 25: $$[$0-1].push($$[$0]); this.$ = $$[$0-1];
-break;
-case 26: this.$ = [$$[$0]]
-break;
-case 27: this.$ = $$[$0]
-break;
-case 28: this.$ = new yy.StringNode($$[$0])
-break;
-case 29: this.$ = new yy.IntegerNode($$[$0])
-break;
-case 30: this.$ = new yy.BooleanNode($$[$0])
-break;
-case 31: this.$ = new yy.HashNode($$[$0])
-break;
-case 32: $$[$0-1].push($$[$0]); this.$ = $$[$0-1]
-break;
-case 33: this.$ = [$$[$0]]
-break;
-case 34: this.$ = [$$[$0-2], $$[$0]]
-break;
-case 35: this.$ = [$$[$0-2], new yy.StringNode($$[$0])]
-break;
-case 36: this.$ = [$$[$0-2], new yy.IntegerNode($$[$0])]
-break;
-case 37: this.$ = [$$[$0-2], new yy.BooleanNode($$[$0])]
-break;
-case 38: this.$ = new yy.IdNode($$[$0])
-break;
-case 39: $$[$0-2].push($$[$0]); this.$ = $$[$0-2];
-break;
-case 40: this.$ = [$$[$0]]
-break;
-}
-},
-table: [{3:1,4:2,5:[2,4],6:3,8:4,9:5,11:6,12:7,13:8,14:[1,9],15:[1,10],16:[1,12],19:[1,11],22:[1,13],23:[1,14],24:[1,15]},{1:[3]},{5:[1,16]},{5:[2,3],7:17,8:18,9:5,11:6,12:7,13:8,14:[1,9],15:[1,10],16:[1,12],19:[1,19],20:[2,3],22:[1,13],23:[1,14],24:[1,15]},{5:[2,5],14:[2,5],15:[2,5],16:[2,5],19:[2,5],20:[2,5],22:[2,5],23:[2,5],24:[2,5]},{4:20,6:3,8:4,9:5,11:6,12:7,13:8,14:[1,9],15:[1,10],16:[1,12],19:[1,11],20:[2,4],22:[1,13],23:[1,14],24:[1,15]},{4:21,6:3,8:4,9:5,11:6,12:7,13:8,14:[1,9],15:[1,10],16:[1,12],19:[1,11],20:[2,4],22:[1,13],23:[1,14],24:[1,15]},{5:[2,9],14:[2,9],15:[2,9],16:[2,9],19:[2,9],20:[2,9],22:[2,9],23:[2,9],24:[2,9]},{5:[2,10],14:[2,10],15:[2,10],16:[2,10],19:[2,10],20:[2,10],22:[2,10],23:[2,10],24:[2,10]},{5:[2,11],14:[2,11],15:[2,11],16:[2,11],19:[2,11],20:[2,11],22:[2,11],23:[2,11],24:[2,11]},{5:[2,12],14:[2,12],15:[2,12],16:[2,12],19:[2,12],20:[2,12],22:[2,12],23:[2,12],24:[2,12]},{17:22,21:23,33:[1,25],35:24},{17:26,21:23,33:[1,25],35:24},{17:27,21:23,33:[1,25],35:24},{17:28,21:23,33:[1,25],35:24},{21:29,33:[1,25],35:24},{1:[2,1]},{6:30,8:4,9:5,11:6,12:7,13:8,14:[1,9],15:[1,10],16:[1,12],19:[1,11],22:[1,13],23:[1,14],24:[1,15]},{5:[2,6],14:[2,6],15:[2,6],16:[2,6],19:[2,6],20:[2,6],22:[2,6],23:[2,6],24:[2,6]},{17:22,18:[1,31],21:23,33:[1,25],35:24},{10:32,20:[1,33]},{10:34,20:[1,33]},{18:[1,35]},{18:[2,24],21:40,25:36,26:37,27:38,28:[1,41],29:[1,42],30:[1,43],31:39,32:44,33:[1,45],35:24},{18:[2,38],28:[2,38],29:[2,38],30:[2,38],33:[2,38],36:[1,46]},{18:[2,40],28:[2,40],29:[2,40],30:[2,40],33:[2,40],36:[2,40]},{18:[1,47]},{18:[1,48]},{18:[1,49]},{18:[1,50],21:51,33:[1,25],35:24},{5:[2,2],8:18,9:5,11:6,12:7,13:8,14:[1,9],15:[1,10],16:[1,12],19:[1,11],20:[2,2],22:[1,13],23:[1,14],24:[1,15]},{14:[2,20],15:[2,20],16:[2,20],19:[2,20],22:[2,20],23:[2,20],24:[2,20]},{5:[2,7],14:[2,7],15:[2,7],16:[2,7],19:[2,7],20:[2,7],22:[2,7],23:[2,7],24:[2,7]},{21:52,33:[1,25],35:24},{5:[2,8],14:[2,8],15:[2,8],16:[2,8],19:[2,8],20:[2,8],22:[2,8],23:[2,8],24:[2,8]},{14:[2,14],15:[2,14],16:[2,14],19:[2,14],20:[2,14],22:[2,14],23:[2,14],24:[2,14]},{18:[2,22],21:40,26:53,27:54,28:[1,41],29:[1,42],30:[1,43],31:39,32:44,33:[1,45],35:24},{18:[2,23]},{18:[2,26],28:[2,26],29:[2,26],30:[2,26],33:[2,26]},{18:[2,31],32:55,33:[1,56]},{18:[2,27],28:[2,27],29:[2,27],30:[2,27],33:[2,27]},{18:[2,28],28:[2,28],29:[2,28],30:[2,28],33:[2,28]},{18:[2,29],28:[2,29],29:[2,29],30:[2,29],33:[2,29]},{18:[2,30],28:[2,30],29:[2,30],30:[2,30],33:[2,30]},{18:[2,33],33:[2,33]},{18:[2,40],28:[2,40],29:[2,40],30:[2,40],33:[2,40],34:[1,57],36:[2,40]},{33:[1,58]},{14:[2,13],15:[2,13],16:[2,13],19:[2,13],20:[2,13],22:[2,13],23:[2,13],24:[2,13]},{5:[2,16],14:[2,16],15:[2,16],16:[2,16],19:[2,16],20:[2,16],22:[2,16],23:[2,16],24:[2,16]},{5:[2,17],14:[2,17],15:[2,17],16:[2,17],19:[2,17],20:[2,17],22:[2,17],23:[2,17],24:[2,17]},{5:[2,18],14:[2,18],15:[2,18],16:[2,18],19:[2,18],20:[2,18],22:[2,18],23:[2,18],24:[2,18]},{18:[1,59]},{18:[1,60]},{18:[2,21]},{18:[2,25],28:[2,25],29:[2,25],30:[2,25],33:[2,25]},{18:[2,32],33:[2,32]},{34:[1,57]},{21:61,28:[1,62],29:[1,63],30:[1,64],33:[1,25],35:24},{18:[2,39],28:[2,39],29:[2,39],30:[2,39],33:[2,39],36:[2,39]},{5:[2,19],14:[2,19],15:[2,19],16:[2,19],19:[2,19],20:[2,19],22:[2,19],23:[2,19],24:[2,19]},{5:[2,15],14:[2,15],15:[2,15],16:[2,15],19:[2,15],20:[2,15],22:[2,15],23:[2,15],24:[2,15]},{18:[2,34],33:[2,34]},{18:[2,35],33:[2,35]},{18:[2,36],33:[2,36]},{18:[2,37],33:[2,37]}],
-defaultActions: {16:[2,1],37:[2,23],53:[2,21]},
-parseError: function parseError(str, hash) {
- throw new Error(str);
-},
-parse: function parse(input) {
- var self = this, stack = [0], vstack = [null], lstack = [], table = this.table, yytext = "", yylineno = 0, yyleng = 0, recovering = 0, TERROR = 2, EOF = 1;
- this.lexer.setInput(input);
- this.lexer.yy = this.yy;
- this.yy.lexer = this.lexer;
- if (typeof this.lexer.yylloc == "undefined")
- this.lexer.yylloc = {};
- var yyloc = this.lexer.yylloc;
- lstack.push(yyloc);
- if (typeof this.yy.parseError === "function")
- this.parseError = this.yy.parseError;
- function popStack(n) {
- stack.length = stack.length - 2 * n;
- vstack.length = vstack.length - n;
- lstack.length = lstack.length - n;
- }
- function lex() {
- var token;
- token = self.lexer.lex() || 1;
- if (typeof token !== "number") {
- token = self.symbols_[token] || token;
- }
- return token;
- }
- var symbol, preErrorSymbol, state, action, a, r, yyval = {}, p, len, newState, expected;
- while (true) {
- state = stack[stack.length - 1];
- if (this.defaultActions[state]) {
- action = this.defaultActions[state];
- } else {
- if (symbol == null)
- symbol = lex();
- action = table[state] && table[state][symbol];
- }
- if (typeof action === "undefined" || !action.length || !action[0]) {
- if (!recovering) {
- expected = [];
- for (p in table[state])
- if (this.terminals_[p] && p > 2) {
- expected.push("'" + this.terminals_[p] + "'");
- }
- var errStr = "";
- if (this.lexer.showPosition) {
- errStr = "Parse error on line " + (yylineno + 1) + ":\n" + this.lexer.showPosition() + "\nExpecting " + expected.join(", ") + ", got '" + this.terminals_[symbol] + "'";
- } else {
- errStr = "Parse error on line " + (yylineno + 1) + ": Unexpected " + (symbol == 1?"end of input":"'" + (this.terminals_[symbol] || symbol) + "'");
- }
- this.parseError(errStr, {text: this.lexer.match, token: this.terminals_[symbol] || symbol, line: this.lexer.yylineno, loc: yyloc, expected: expected});
- }
- }
- if (action[0] instanceof Array && action.length > 1) {
- throw new Error("Parse Error: multiple actions possible at state: " + state + ", token: " + symbol);
- }
- switch (action[0]) {
- case 1:
- stack.push(symbol);
- vstack.push(this.lexer.yytext);
- lstack.push(this.lexer.yylloc);
- stack.push(action[1]);
- symbol = null;
- if (!preErrorSymbol) {
- yyleng = this.lexer.yyleng;
- yytext = this.lexer.yytext;
- yylineno = this.lexer.yylineno;
- yyloc = this.lexer.yylloc;
- if (recovering > 0)
- recovering--;
- } else {
- symbol = preErrorSymbol;
- preErrorSymbol = null;
- }
- break;
- case 2:
- len = this.productions_[action[1]][1];
- yyval.$ = vstack[vstack.length - len];
- yyval._$ = {first_line: lstack[lstack.length - (len || 1)].first_line, last_line: lstack[lstack.length - 1].last_line, first_column: lstack[lstack.length - (len || 1)].first_column, last_column: lstack[lstack.length - 1].last_column};
- r = this.performAction.call(yyval, yytext, yyleng, yylineno, this.yy, action[1], vstack, lstack);
- if (typeof r !== "undefined") {
- return r;
- }
- if (len) {
- stack = stack.slice(0, -1 * len * 2);
- vstack = vstack.slice(0, -1 * len);
- lstack = lstack.slice(0, -1 * len);
- }
- stack.push(this.productions_[action[1]][0]);
- vstack.push(yyval.$);
- lstack.push(yyval._$);
- newState = table[stack[stack.length - 2]][stack[stack.length - 1]];
- stack.push(newState);
- break;
- case 3:
- return true;
- }
- }
- return true;
-}
-};/* Jison generated lexer */
-var lexer = (function(){
-
-var lexer = ({EOF:1,
-parseError:function parseError(str, hash) {
- if (this.yy.parseError) {
- this.yy.parseError(str, hash);
- } else {
- throw new Error(str);
- }
- },
-setInput:function (input) {
- this._input = input;
- this._more = this._less = this.done = false;
- this.yylineno = this.yyleng = 0;
- this.yytext = this.matched = this.match = '';
- this.conditionStack = ['INITIAL'];
- this.yylloc = {first_line:1,first_column:0,last_line:1,last_column:0};
- return this;
- },
-input:function () {
- var ch = this._input[0];
- this.yytext+=ch;
- this.yyleng++;
- this.match+=ch;
- this.matched+=ch;
- var lines = ch.match(/\n/);
- if (lines) this.yylineno++;
- this._input = this._input.slice(1);
- return ch;
- },
-unput:function (ch) {
- this._input = ch + this._input;
- return this;
- },
-more:function () {
- this._more = true;
- return this;
- },
-pastInput:function () {
- var past = this.matched.substr(0, this.matched.length - this.match.length);
- return (past.length > 20 ? '...':'') + past.substr(-20).replace(/\n/g, "");
- },
-upcomingInput:function () {
- var next = this.match;
- if (next.length < 20) {
- next += this._input.substr(0, 20-next.length);
- }
- return (next.substr(0,20)+(next.length > 20 ? '...':'')).replace(/\n/g, "");
- },
-showPosition:function () {
- var pre = this.pastInput();
- var c = new Array(pre.length + 1).join("-");
- return pre + this.upcomingInput() + "\n" + c+"^";
- },
-next:function () {
- if (this.done) {
- return this.EOF;
- }
- if (!this._input) this.done = true;
-
- var token,
- match,
- col,
- lines;
- if (!this._more) {
- this.yytext = '';
- this.match = '';
- }
- var rules = this._currentRules();
- for (var i=0;i < rules.length; i++) {
- match = this._input.match(this.rules[rules[i]]);
- if (match) {
- lines = match[0].match(/\n.*/g);
- if (lines) this.yylineno += lines.length;
- this.yylloc = {first_line: this.yylloc.last_line,
- last_line: this.yylineno+1,
- first_column: this.yylloc.last_column,
- last_column: lines ? lines[lines.length-1].length-1 : this.yylloc.last_column + match[0].length}
- this.yytext += match[0];
- this.match += match[0];
- this.matches = match;
- this.yyleng = this.yytext.length;
- this._more = false;
- this._input = this._input.slice(match[0].length);
- this.matched += match[0];
- token = this.performAction.call(this, this.yy, this, rules[i],this.conditionStack[this.conditionStack.length-1]);
- if (token) return token;
- else return;
- }
- }
- if (this._input === "") {
- return this.EOF;
- } else {
- this.parseError('Lexical error on line '+(this.yylineno+1)+'. Unrecognized text.\n'+this.showPosition(),
- {text: "", token: null, line: this.yylineno});
- }
- },
-lex:function lex() {
- var r = this.next();
- if (typeof r !== 'undefined') {
- return r;
- } else {
- return this.lex();
- }
- },
-begin:function begin(condition) {
- this.conditionStack.push(condition);
- },
-popState:function popState() {
- return this.conditionStack.pop();
- },
-_currentRules:function _currentRules() {
- return this.conditions[this.conditionStack[this.conditionStack.length-1]].rules;
- },
-topState:function () {
- return this.conditionStack[this.conditionStack.length-2];
- },
-pushState:function begin(condition) {
- this.begin(condition);
- }});
-lexer.performAction = function anonymous(yy,yy_,$avoiding_name_collisions,YY_START) {
-
-var YYSTATE=YY_START
-switch($avoiding_name_collisions) {
-case 0:
- if(yy_.yytext.slice(-1) !== "\\") this.begin("mu");
- if(yy_.yytext.slice(-1) === "\\") yy_.yytext = yy_.yytext.substr(0,yy_.yyleng-1), this.begin("emu");
- if(yy_.yytext) return 14;
-
-break;
-case 1: return 14;
-break;
-case 2: this.popState(); return 14;
-break;
-case 3: return 24;
-break;
-case 4: return 16;
-break;
-case 5: return 20;
-break;
-case 6: return 19;
-break;
-case 7: return 19;
-break;
-case 8: return 23;
-break;
-case 9: return 23;
-break;
-case 10: yy_.yytext = yy_.yytext.substr(3,yy_.yyleng-5); this.popState(); return 15;
-break;
-case 11: return 22;
-break;
-case 12: return 34;
-break;
-case 13: return 33;
-break;
-case 14: return 33;
-break;
-case 15: return 36;
-break;
-case 16: /*ignore whitespace*/
-break;
-case 17: this.popState(); return 18;
-break;
-case 18: this.popState(); return 18;
-break;
-case 19: yy_.yytext = yy_.yytext.substr(1,yy_.yyleng-2).replace(/\\"/g,'"'); return 28;
-break;
-case 20: return 30;
-break;
-case 21: return 30;
-break;
-case 22: return 29;
-break;
-case 23: return 33;
-break;
-case 24: yy_.yytext = yy_.yytext.substr(1, yy_.yyleng-2); return 33;
-break;
-case 25: return 'INVALID';
-break;
-case 26: return 5;
-break;
-}
-};
-lexer.rules = [/^[^\x00]*?(?=(\{\{))/,/^[^\x00]+/,/^[^\x00]{2,}?(?=(\{\{))/,/^\{\{>/,/^\{\{#/,/^\{\{\//,/^\{\{\^/,/^\{\{\s*else\b/,/^\{\{\{/,/^\{\{&/,/^\{\{![\s\S]*?\}\}/,/^\{\{/,/^=/,/^\.(?=[} ])/,/^\.\./,/^[\/.]/,/^\s+/,/^\}\}\}/,/^\}\}/,/^"(\\["]|[^"])*"/,/^true(?=[}\s])/,/^false(?=[}\s])/,/^[0-9]+(?=[}\s])/,/^[a-zA-Z0-9_$-]+(?=[=}\s\/.])/,/^\[[^\]]*\]/,/^./,/^$/];
-lexer.conditions = {"mu":{"rules":[3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26],"inclusive":false},"emu":{"rules":[2],"inclusive":false},"INITIAL":{"rules":[0,1,26],"inclusive":true}};return lexer;})()
-parser.lexer = lexer;
-return parser;
-})();
-if (typeof require !== 'undefined' && typeof exports !== 'undefined') {
-exports.parser = handlebars;
-exports.parse = function () { return handlebars.parse.apply(handlebars, arguments); }
-exports.main = function commonjsMain(args) {
- if (!args[1])
- throw new Error('Usage: '+args[0]+' FILE');
- if (typeof process !== 'undefined') {
- var source = require('fs').readFileSync(require('path').join(process.cwd(), args[1]), "utf8");
- } else {
- var cwd = require("file").path(require("file").cwd());
- var source = cwd.join(args[1]).read({charset: "utf-8"});
- }
- return exports.parser.parse(source);
-}
-if (typeof module !== 'undefined' && require.main === module) {
- exports.main(typeof process !== 'undefined' ? process.argv.slice(1) : require("system").args);
-}
-};
-;
-// lib/handlebars/compiler/base.js
-Handlebars.Parser = handlebars;
-
-Handlebars.parse = function(string) {
- Handlebars.Parser.yy = Handlebars.AST;
- return Handlebars.Parser.parse(string);
-};
-
-Handlebars.print = function(ast) {
- return new Handlebars.PrintVisitor().accept(ast);
-};
-
-Handlebars.logger = {
- DEBUG: 0, INFO: 1, WARN: 2, ERROR: 3, level: 3,
-
- // override in the host environment
- log: function(level, str) {}
-};
-
-Handlebars.log = function(level, str) { Handlebars.logger.log(level, str); };
-;
-// lib/handlebars/compiler/ast.js
-(function() {
-
- Handlebars.AST = {};
-
- Handlebars.AST.ProgramNode = function(statements, inverse) {
- this.type = "program";
- this.statements = statements;
- if(inverse) { this.inverse = new Handlebars.AST.ProgramNode(inverse); }
- };
-
- Handlebars.AST.MustacheNode = function(params, hash, unescaped) {
- this.type = "mustache";
- this.id = params[0];
- this.params = params.slice(1);
- this.hash = hash;
- this.escaped = !unescaped;
- };
-
- Handlebars.AST.PartialNode = function(id, context) {
- this.type = "partial";
-
- // TODO: disallow complex IDs
-
- this.id = id;
- this.context = context;
- };
-
- var verifyMatch = function(open, close) {
- if(open.original !== close.original) {
- throw new Handlebars.Exception(open.original + " doesn't match " + close.original);
- }
- };
-
- Handlebars.AST.BlockNode = function(mustache, program, close) {
- verifyMatch(mustache.id, close);
- this.type = "block";
- this.mustache = mustache;
- this.program = program;
- };
-
- Handlebars.AST.InverseNode = function(mustache, program, close) {
- verifyMatch(mustache.id, close);
- this.type = "inverse";
- this.mustache = mustache;
- this.program = program;
- };
-
- Handlebars.AST.ContentNode = function(string) {
- this.type = "content";
- this.string = string;
- };
-
- Handlebars.AST.HashNode = function(pairs) {
- this.type = "hash";
- this.pairs = pairs;
- };
-
- Handlebars.AST.IdNode = function(parts) {
- this.type = "ID";
- this.original = parts.join(".");
-
- var dig = [], depth = 0;
-
- for(var i=0,l=parts.length; i<l; i++) {
- var part = parts[i];
-
- if(part === "..") { depth++; }
- else if(part === "." || part === "this") { this.isScoped = true; }
- else { dig.push(part); }
- }
-
- this.parts = dig;
- this.string = dig.join('.');
- this.depth = depth;
- this.isSimple = (dig.length === 1) && (depth === 0);
- };
-
- Handlebars.AST.StringNode = function(string) {
- this.type = "STRING";
- this.string = string;
- };
-
- Handlebars.AST.IntegerNode = function(integer) {
- this.type = "INTEGER";
- this.integer = integer;
- };
-
- Handlebars.AST.BooleanNode = function(bool) {
- this.type = "BOOLEAN";
- this.bool = bool;
- };
-
- Handlebars.AST.CommentNode = function(comment) {
- this.type = "comment";
- this.comment = comment;
- };
-
-})();;
-// lib/handlebars/utils.js
-Handlebars.Exception = function(message) {
- var tmp = Error.prototype.constructor.apply(this, arguments);
-
- for (var p in tmp) {
- if (tmp.hasOwnProperty(p)) { this[p] = tmp[p]; }
- }
-
- this.message = tmp.message;
-};
-Handlebars.Exception.prototype = new Error;
-
-// Build out our basic SafeString type
-Handlebars.SafeString = function(string) {
- this.string = string;
-};
-Handlebars.SafeString.prototype.toString = function() {
- return this.string.toString();
-};
-
-(function() {
- var escape = {
- "<": "<",
- ">": ">",
- '"': """,
- "'": "'",
- "`": "`"
- };
-
- var badChars = /&(?!\w+;)|[<>"'`]/g;
- var possible = /[&<>"'`]/;
-
- var escapeChar = function(chr) {
- return escape[chr] || "&";
- };
-
- Handlebars.Utils = {
- escapeExpression: function(string) {
- // don't escape SafeStrings, since they're already safe
- if (string instanceof Handlebars.SafeString) {
- return string.toString();
- } else if (string == null || string === false) {
- return "";
- }
-
- if(!possible.test(string)) { return string; }
- return string.replace(badChars, escapeChar);
- },
-
- isEmpty: function(value) {
- if (typeof value === "undefined") {
- return true;
- } else if (value === null) {
- return true;
- } else if (value === false) {
- return true;
- } else if(Object.prototype.toString.call(value) === "[object Array]" && value.length === 0) {
- return true;
- } else {
- return false;
- }
- }
- };
-})();;
-// lib/handlebars/compiler/compiler.js
-Handlebars.Compiler = function() {};
-Handlebars.JavaScriptCompiler = function() {};
-
-(function(Compiler, JavaScriptCompiler) {
- Compiler.OPCODE_MAP = {
- appendContent: 1,
- getContext: 2,
- lookupWithHelpers: 3,
- lookup: 4,
- append: 5,
- invokeMustache: 6,
- appendEscaped: 7,
- pushString: 8,
- truthyOrFallback: 9,
- functionOrFallback: 10,
- invokeProgram: 11,
- invokePartial: 12,
- push: 13,
- assignToHash: 15,
- pushStringParam: 16
- };
-
- Compiler.MULTI_PARAM_OPCODES = {
- appendContent: 1,
- getContext: 1,
- lookupWithHelpers: 2,
- lookup: 1,
- invokeMustache: 3,
- pushString: 1,
- truthyOrFallback: 1,
- functionOrFallback: 1,
- invokeProgram: 3,
- invokePartial: 1,
- push: 1,
- assignToHash: 1,
- pushStringParam: 1
- };
-
- Compiler.DISASSEMBLE_MAP = {};
-
- for(var prop in Compiler.OPCODE_MAP) {
- var value = Compiler.OPCODE_MAP[prop];
- Compiler.DISASSEMBLE_MAP[value] = prop;
- }
-
- Compiler.multiParamSize = function(code) {
- return Compiler.MULTI_PARAM_OPCODES[Compiler.DISASSEMBLE_MAP[code]];
- };
-
- Compiler.prototype = {
- compiler: Compiler,
-
- disassemble: function() {
- var opcodes = this.opcodes, opcode, nextCode;
- var out = [], str, name, value;
-
- for(var i=0, l=opcodes.length; i<l; i++) {
- opcode = opcodes[i];
-
- if(opcode === 'DECLARE') {
- name = opcodes[++i];
- value = opcodes[++i];
- out.push("DECLARE " + name + " = " + value);
- } else {
- str = Compiler.DISASSEMBLE_MAP[opcode];
-
- var extraParams = Compiler.multiParamSize(opcode);
- var codes = [];
-
- for(var j=0; j<extraParams; j++) {
- nextCode = opcodes[++i];
-
- if(typeof nextCode === "string") {
- nextCode = "\"" + nextCode.replace("\n", "\\n") + "\"";
- }
-
- codes.push(nextCode);
- }
-
- str = str + " " + codes.join(" ");
-
- out.push(str);
- }
- }
-
- return out.join("\n");
- },
-
- guid: 0,
-
- compile: function(program, options) {
- this.children = [];
- this.depths = {list: []};
- this.options = options;
-
- // These changes will propagate to the other compiler components
- var knownHelpers = this.options.knownHelpers;
- this.options.knownHelpers = {
- 'helperMissing': true,
- 'blockHelperMissing': true,
- 'each': true,
- 'if': true,
- 'unless': true,
- 'with': true,
- 'log': true
- };
- if (knownHelpers) {
- for (var name in knownHelpers) {
- this.options.knownHelpers[name] = knownHelpers[name];
- }
- }
-
- return this.program(program);
- },
-
- accept: function(node) {
- return this[node.type](node);
- },
-
- program: function(program) {
- var statements = program.statements, statement;
- this.opcodes = [];
-
- for(var i=0, l=statements.length; i<l; i++) {
- statement = statements[i];
- this[statement.type](statement);
- }
- this.isSimple = l === 1;
-
- this.depths.list = this.depths.list.sort(function(a, b) {
- return a - b;
- });
-
- return this;
- },
-
- compileProgram: function(program) {
- var result = new this.compiler().compile(program, this.options);
- var guid = this.guid++;
-
- this.usePartial = this.usePartial || result.usePartial;
-
- this.children[guid] = result;
-
- for(var i=0, l=result.depths.list.length; i<l; i++) {
- depth = result.depths.list[i];
-
- if(depth < 2) { continue; }
- else { this.addDepth(depth - 1); }
- }
-
- return guid;
- },
-
- block: function(block) {
- var mustache = block.mustache;
- var depth, child, inverse, inverseGuid;
-
- var params = this.setupStackForMustache(mustache);
-
- var programGuid = this.compileProgram(block.program);
-
- if(block.program.inverse) {
- inverseGuid = this.compileProgram(block.program.inverse);
- this.declare('inverse', inverseGuid);
- }
-
- this.opcode('invokeProgram', programGuid, params.length, !!mustache.hash);
- this.declare('inverse', null);
- this.opcode('append');
- },
-
- inverse: function(block) {
- var params = this.setupStackForMustache(block.mustache);
-
- var programGuid = this.compileProgram(block.program);
-
- this.declare('inverse', programGuid);
-
- this.opcode('invokeProgram', null, params.length, !!block.mustache.hash);
- this.declare('inverse', null);
- this.opcode('append');
- },
-
- hash: function(hash) {
- var pairs = hash.pairs, pair, val;
-
- this.opcode('push', '{}');
-
- for(var i=0, l=pairs.length; i<l; i++) {
- pair = pairs[i];
- val = pair[1];
-
- this.accept(val);
- this.opcode('assignToHash', pair[0]);
- }
- },
-
- partial: function(partial) {
- var id = partial.id;
- this.usePartial = true;
-
- if(partial.context) {
- this.ID(partial.context);
- } else {
- this.opcode('push', 'depth0');
- }
-
- this.opcode('invokePartial', id.original);
- this.opcode('append');
- },
-
- content: function(content) {
- this.opcode('appendContent', content.string);
- },
-
- mustache: function(mustache) {
- var params = this.setupStackForMustache(mustache);
-
- this.opcode('invokeMustache', params.length, mustache.id.original, !!mustache.hash);
-
- if(mustache.escaped && !this.options.noEscape) {
- this.opcode('appendEscaped');
- } else {
- this.opcode('append');
- }
- },
-
- ID: function(id) {
- this.addDepth(id.depth);
-
- this.opcode('getContext', id.depth);
-
- this.opcode('lookupWithHelpers', id.parts[0] || null, id.isScoped || false);
-
- for(var i=1, l=id.parts.length; i<l; i++) {
- this.opcode('lookup', id.parts[i]);
- }
- },
-
- STRING: function(string) {
- this.opcode('pushString', string.string);
- },
-
- INTEGER: function(integer) {
- this.opcode('push', integer.integer);
- },
-
- BOOLEAN: function(bool) {
- this.opcode('push', bool.bool);
- },
-
- comment: function() {},
-
- // HELPERS
- pushParams: function(params) {
- var i = params.length, param;
-
- while(i--) {
- param = params[i];
-
- if(this.options.stringParams) {
- if(param.depth) {
- this.addDepth(param.depth);
- }
-
- this.opcode('getContext', param.depth || 0);
- this.opcode('pushStringParam', param.string);
- } else {
- this[param.type](param);
- }
- }
- },
-
- opcode: function(name, val1, val2, val3) {
- this.opcodes.push(Compiler.OPCODE_MAP[name]);
- if(val1 !== undefined) { this.opcodes.push(val1); }
- if(val2 !== undefined) { this.opcodes.push(val2); }
- if(val3 !== undefined) { this.opcodes.push(val3); }
- },
-
- declare: function(name, value) {
- this.opcodes.push('DECLARE');
- this.opcodes.push(name);
- this.opcodes.push(value);
- },
-
- addDepth: function(depth) {
- if(depth === 0) { return; }
-
- if(!this.depths[depth]) {
- this.depths[depth] = true;
- this.depths.list.push(depth);
- }
- },
-
- setupStackForMustache: function(mustache) {
- var params = mustache.params;
-
- this.pushParams(params);
-
- if(mustache.hash) {
- this.hash(mustache.hash);
- }
-
- this.ID(mustache.id);
-
- return params;
- }
- };
-
- JavaScriptCompiler.prototype = {
- // PUBLIC API: You can override these methods in a subclass to provide
- // alternative compiled forms for name lookup and buffering semantics
- nameLookup: function(parent, name, type) {
- if (/^[0-9]+$/.test(name)) {
- return parent + "[" + name + "]";
- } else if (JavaScriptCompiler.isValidJavaScriptVariableName(name)) {
- return parent + "." + name;
- }
- else {
- return parent + "['" + name + "']";
- }
- },
-
- appendToBuffer: function(string) {
- if (this.environment.isSimple) {
- return "return " + string + ";";
- } else {
- return "buffer += " + string + ";";
- }
- },
-
- initializeBuffer: function() {
- return this.quotedString("");
- },
-
- namespace: "Handlebars",
- // END PUBLIC API
-
- compile: function(environment, options, context, asObject) {
- this.environment = environment;
- this.options = options || {};
-
- this.name = this.environment.name;
- this.isChild = !!context;
- this.context = context || {
- programs: [],
- aliases: { self: 'this' },
- registers: {list: []}
- };
-
- this.preamble();
-
- this.stackSlot = 0;
- this.stackVars = [];
-
- this.compileChildren(environment, options);
-
- var opcodes = environment.opcodes, opcode;
-
- this.i = 0;
-
- for(l=opcodes.length; this.i<l; this.i++) {
- opcode = this.nextOpcode(0);
-
- if(opcode[0] === 'DECLARE') {
- this.i = this.i + 2;
- this[opcode[1]] = opcode[2];
- } else {
- this.i = this.i + opcode[1].length;
- this[opcode[0]].apply(this, opcode[1]);
- }
- }
-
- return this.createFunctionContext(asObject);
- },
-
- nextOpcode: function(n) {
- var opcodes = this.environment.opcodes, opcode = opcodes[this.i + n], name, val;
- var extraParams, codes;
-
- if(opcode === 'DECLARE') {
- name = opcodes[this.i + 1];
- val = opcodes[this.i + 2];
- return ['DECLARE', name, val];
- } else {
- name = Compiler.DISASSEMBLE_MAP[opcode];
-
- extraParams = Compiler.multiParamSize(opcode);
- codes = [];
-
- for(var j=0; j<extraParams; j++) {
- codes.push(opcodes[this.i + j + 1 + n]);
- }
-
- return [name, codes];
- }
- },
-
- eat: function(opcode) {
- this.i = this.i + opcode.length;
- },
-
- preamble: function() {
- var out = [];
-
- // this register will disambiguate helper lookup from finding a function in
- // a context. This is necessary for mustache compatibility, which requires
- // that context functions in blocks are evaluated by blockHelperMissing, and
- // then proceed as if the resulting value was provided to blockHelperMissing.
- this.useRegister('foundHelper');
-
- if (!this.isChild) {
- var namespace = this.namespace;
- var copies = "helpers = helpers || " + namespace + ".helpers;";
- if(this.environment.usePartial) { copies = copies + " partials = partials || " + namespace + ".partials;"; }
- out.push(copies);
- } else {
- out.push('');
- }
-
- if (!this.environment.isSimple) {
- out.push(", buffer = " + this.initializeBuffer());
- } else {
- out.push("");
- }
-
- // track the last context pushed into place to allow skipping the
- // getContext opcode when it would be a noop
- this.lastContext = 0;
- this.source = out;
- },
-
- createFunctionContext: function(asObject) {
- var locals = this.stackVars;
- if (!this.isChild) {
- locals = locals.concat(this.context.registers.list);
- }
-
- if(locals.length > 0) {
- this.source[1] = this.source[1] + ", " + locals.join(", ");
- }
-
- // Generate minimizer alias mappings
- if (!this.isChild) {
- var aliases = []
- for (var alias in this.context.aliases) {
- this.source[1] = this.source[1] + ', ' + alias + '=' + this.context.aliases[alias];
- }
- }
-
- if (this.source[1]) {
- this.source[1] = "var " + this.source[1].substring(2) + ";";
- }
-
- // Merge children
- if (!this.isChild) {
- this.source[1] += '\n' + this.context.programs.join('\n') + '\n';
- }
-
- if (!this.environment.isSimple) {
- this.source.push("return buffer;");
- }
-
- var params = this.isChild ? ["depth0", "data"] : ["Handlebars", "depth0", "helpers", "partials", "data"];
-
- for(var i=0, l=this.environment.depths.list.length; i<l; i++) {
- params.push("depth" + this.environment.depths.list[i]);
- }
-
- if (asObject) {
- params.push(this.source.join("\n "));
-
- return Function.apply(this, params);
- } else {
- var functionSource = 'function ' + (this.name || '') + '(' + params.join(',') + ') {\n ' + this.source.join("\n ") + '}';
- Handlebars.log(Handlebars.logger.DEBUG, functionSource + "\n\n");
- return functionSource;
- }
- },
-
- appendContent: function(content) {
- this.source.push(this.appendToBuffer(this.quotedString(content)));
- },
-
- append: function() {
- var local = this.popStack();
- this.source.push("if(" + local + " || " + local + " === 0) { " + this.appendToBuffer(local) + " }");
- if (this.environment.isSimple) {
- this.source.push("else { " + this.appendToBuffer("''") + " }");
- }
- },
-
- appendEscaped: function() {
- var opcode = this.nextOpcode(1), extra = "";
- this.context.aliases.escapeExpression = 'this.escapeExpression';
-
- if(opcode[0] === 'appendContent') {
- extra = " + " + this.quotedString(opcode[1][0]);
- this.eat(opcode);
- }
-
- this.source.push(this.appendToBuffer("escapeExpression(" + this.popStack() + ")" + extra));
- },
-
- getContext: function(depth) {
- if(this.lastContext !== depth) {
- this.lastContext = depth;
- }
- },
-
- lookupWithHelpers: function(name, isScoped) {
- if(name) {
- var topStack = this.nextStack();
-
- this.usingKnownHelper = false;
-
- var toPush;
- if (!isScoped && this.options.knownHelpers[name]) {
- toPush = topStack + " = " + this.nameLookup('helpers', name, 'helper');
- this.usingKnownHelper = true;
- } else if (isScoped || this.options.knownHelpersOnly) {
- toPush = topStack + " = " + this.nameLookup('depth' + this.lastContext, name, 'context');
- } else {
- this.register('foundHelper', this.nameLookup('helpers', name, 'helper'));
- toPush = topStack + " = foundHelper || " + this.nameLookup('depth' + this.lastContext, name, 'context');
- }
-
- toPush += ';';
- this.source.push(toPush);
- } else {
- this.pushStack('depth' + this.lastContext);
- }
- },
-
- lookup: function(name) {
- var topStack = this.topStack();
- this.source.push(topStack + " = (" + topStack + " === null || " + topStack + " === undefined || " + topStack + " === false ? " +
- topStack + " : " + this.nameLookup(topStack, name, 'context') + ");");
- },
-
- pushStringParam: function(string) {
- this.pushStack('depth' + this.lastContext);
- this.pushString(string);
- },
-
- pushString: function(string) {
- this.pushStack(this.quotedString(string));
- },
-
- push: function(name) {
- this.pushStack(name);
- },
-
- invokeMustache: function(paramSize, original, hasHash) {
- this.populateParams(paramSize, this.quotedString(original), "{}", null, hasHash, function(nextStack, helperMissingString, id) {
- if (!this.usingKnownHelper) {
- this.context.aliases.helperMissing = 'helpers.helperMissing';
- this.context.aliases.undef = 'void 0';
- this.source.push("else if(" + id + "=== undef) { " + nextStack + " = helperMissing.call(" + helperMissingString + "); }");
- if (nextStack !== id) {
- this.source.push("else { " + nextStack + " = " + id + "; }");
- }
- }
- });
- },
-
- invokeProgram: function(guid, paramSize, hasHash) {
- var inverse = this.programExpression(this.inverse);
- var mainProgram = this.programExpression(guid);
-
- this.populateParams(paramSize, null, mainProgram, inverse, hasHash, function(nextStack, helperMissingString, id) {
- if (!this.usingKnownHelper) {
- this.context.aliases.blockHelperMissing = 'helpers.blockHelperMissing';
- this.source.push("else { " + nextStack + " = blockHelperMissing.call(" + helperMissingString + "); }");
- }
- });
- },
-
- populateParams: function(paramSize, helperId, program, inverse, hasHash, fn) {
- var needsRegister = hasHash || this.options.stringParams || inverse || this.options.data;
- var id = this.popStack(), nextStack;
- var params = [], param, stringParam, stringOptions;
-
- if (needsRegister) {
- this.register('tmp1', program);
- stringOptions = 'tmp1';
- } else {
- stringOptions = '{ hash: {} }';
- }
-
- if (needsRegister) {
- var hash = (hasHash ? this.popStack() : '{}');
- this.source.push('tmp1.hash = ' + hash + ';');
- }
-
- if(this.options.stringParams) {
- this.source.push('tmp1.contexts = [];');
- }
-
- for(var i=0; i<paramSize; i++) {
- param = this.popStack();
- params.push(param);
-
- if(this.options.stringParams) {
- this.source.push('tmp1.contexts.push(' + this.popStack() + ');');
- }
- }
-
- if(inverse) {
- this.source.push('tmp1.fn = tmp1;');
- this.source.push('tmp1.inverse = ' + inverse + ';');
- }
-
- if(this.options.data) {
- this.source.push('tmp1.data = data;');
- }
-
- params.push(stringOptions);
-
- this.populateCall(params, id, helperId || id, fn, program !== '{}');
- },
-
- populateCall: function(params, id, helperId, fn, program) {
- var paramString = ["depth0"].concat(params).join(", ");
- var helperMissingString = ["depth0"].concat(helperId).concat(params).join(", ");
-
- var nextStack = this.nextStack();
-
- if (this.usingKnownHelper) {
- this.source.push(nextStack + " = " + id + ".call(" + paramString + ");");
- } else {
- this.context.aliases.functionType = '"function"';
- var condition = program ? "foundHelper && " : ""
- this.source.push("if(" + condition + "typeof " + id + " === functionType) { " + nextStack + " = " + id + ".call(" + paramString + "); }");
- }
- fn.call(this, nextStack, helperMissingString, id);
- this.usingKnownHelper = false;
- },
-
- invokePartial: function(context) {
- params = [this.nameLookup('partials', context, 'partial'), "'" + context + "'", this.popStack(), "helpers", "partials"];
-
- if (this.options.data) {
- params.push("data");
- }
-
- this.pushStack("self.invokePartial(" + params.join(", ") + ");");
- },
-
- assignToHash: function(key) {
- var value = this.popStack();
- var hash = this.topStack();
-
- this.source.push(hash + "['" + key + "'] = " + value + ";");
- },
-
- // HELPERS
-
- compiler: JavaScriptCompiler,
-
- compileChildren: function(environment, options) {
- var children = environment.children, child, compiler;
-
- for(var i=0, l=children.length; i<l; i++) {
- child = children[i];
- compiler = new this.compiler();
-
- this.context.programs.push(''); // Placeholder to prevent name conflicts for nested children
- var index = this.context.programs.length;
- child.index = index;
- child.name = 'program' + index;
- this.context.programs[index] = compiler.compile(child, options, this.context);
- }
- },
-
- programExpression: function(guid) {
- if(guid == null) { return "self.noop"; }
-
- var child = this.environment.children[guid],
- depths = child.depths.list;
- var programParams = [child.index, child.name, "data"];
-
- for(var i=0, l = depths.length; i<l; i++) {
- depth = depths[i];
-
- if(depth === 1) { programParams.push("depth0"); }
- else { programParams.push("depth" + (depth - 1)); }
- }
-
- if(depths.length === 0) {
- return "self.program(" + programParams.join(", ") + ")";
- } else {
- programParams.shift();
- return "self.programWithDepth(" + programParams.join(", ") + ")";
- }
- },
-
- register: function(name, val) {
- this.useRegister(name);
- this.source.push(name + " = " + val + ";");
- },
-
- useRegister: function(name) {
- if(!this.context.registers[name]) {
- this.context.registers[name] = true;
- this.context.registers.list.push(name);
- }
- },
-
- pushStack: function(item) {
- this.source.push(this.nextStack() + " = " + item + ";");
- return "stack" + this.stackSlot;
- },
-
- nextStack: function() {
- this.stackSlot++;
- if(this.stackSlot > this.stackVars.length) { this.stackVars.push("stack" + this.stackSlot); }
- return "stack" + this.stackSlot;
- },
-
- popStack: function() {
- return "stack" + this.stackSlot--;
- },
-
- topStack: function() {
- return "stack" + this.stackSlot;
- },
-
- quotedString: function(str) {
- return '"' + str
- .replace(/\\/g, '\\\\')
- .replace(/"/g, '\\"')
- .replace(/\n/g, '\\n')
- .replace(/\r/g, '\\r') + '"';
- }
- };
-
- var reservedWords = (
- "break else new var" +
- " case finally return void" +
- " catch for switch while" +
- " continue function this with" +
- " default if throw" +
- " delete in try" +
- " do instanceof typeof" +
- " abstract enum int short" +
- " boolean export interface static" +
- " byte extends long super" +
- " char final native synchronized" +
- " class float package throws" +
- " const goto private transient" +
- " debugger implements protected volatile" +
- " double import public let yield"
- ).split(" ");
-
- var compilerWords = JavaScriptCompiler.RESERVED_WORDS = {};
-
- for(var i=0, l=reservedWords.length; i<l; i++) {
- compilerWords[reservedWords[i]] = true;
- }
-
- JavaScriptCompiler.isValidJavaScriptVariableName = function(name) {
- if(!JavaScriptCompiler.RESERVED_WORDS[name] && /^[a-zA-Z_$][0-9a-zA-Z_$]+$/.test(name)) {
- return true;
- }
- return false;
- }
-
-})(Handlebars.Compiler, Handlebars.JavaScriptCompiler);
-
-Handlebars.precompile = function(string, options) {
- options = options || {};
-
- var ast = Handlebars.parse(string);
- var environment = new Handlebars.Compiler().compile(ast, options);
- return new Handlebars.JavaScriptCompiler().compile(environment, options);
-};
-
-Handlebars.compile = function(string, options) {
- options = options || {};
-
- var compiled;
- function compile() {
- var ast = Handlebars.parse(string);
- var environment = new Handlebars.Compiler().compile(ast, options);
- var templateSpec = new Handlebars.JavaScriptCompiler().compile(environment, options, undefined, true);
- return Handlebars.template(templateSpec);
- }
-
- // Template is only compiled on first use and cached after that point.
- return function(context, options) {
- if (!compiled) {
- compiled = compile();
- }
- return compiled.call(this, context, options);
- };
-};
-;
-// lib/handlebars/runtime.js
-Handlebars.VM = {
- template: function(templateSpec) {
- // Just add water
- var container = {
- escapeExpression: Handlebars.Utils.escapeExpression,
- invokePartial: Handlebars.VM.invokePartial,
- programs: [],
- program: function(i, fn, data) {
- var programWrapper = this.programs[i];
- if(data) {
- return Handlebars.VM.program(fn, data);
- } else if(programWrapper) {
- return programWrapper;
- } else {
- programWrapper = this.programs[i] = Handlebars.VM.program(fn);
- return programWrapper;
- }
- },
- programWithDepth: Handlebars.VM.programWithDepth,
- noop: Handlebars.VM.noop
- };
-
- return function(context, options) {
- options = options || {};
- return templateSpec.call(container, Handlebars, context, options.helpers, options.partials, options.data);
- };
- },
-
- programWithDepth: function(fn, data, $depth) {
- var args = Array.prototype.slice.call(arguments, 2);
-
- return function(context, options) {
- options = options || {};
-
- return fn.apply(this, [context, options.data || data].concat(args));
- };
- },
- program: function(fn, data) {
- return function(context, options) {
- options = options || {};
-
- return fn(context, options.data || data);
- };
- },
- noop: function() { return ""; },
- invokePartial: function(partial, name, context, helpers, partials, data) {
- options = { helpers: helpers, partials: partials, data: data };
-
- if(partial === undefined) {
- throw new Handlebars.Exception("The partial " + name + " could not be found");
- } else if(partial instanceof Function) {
- return partial(context, options);
- } else if (!Handlebars.compile) {
- throw new Handlebars.Exception("The partial " + name + " could not be compiled when running in runtime-only mode");
- } else {
- partials[name] = Handlebars.compile(partial);
- return partials[name](context, options);
- }
- }
-};
-
-Handlebars.template = Handlebars.VM.template;
-;
diff -r de2946aca8877087d761684a72ff955f46e4f5a6 -r 15e75fe2efae761ac7182a496d5c79ef3bc3a43a static/scripts/libs/handlebars.runtime.js
--- /dev/null
+++ b/static/scripts/libs/handlebars.runtime.js
@@ -0,0 +1,223 @@
+// lib/handlebars/base.js
+var Handlebars = {};
+
+Handlebars.VERSION = "1.0.beta.6";
+
+Handlebars.helpers = {};
+Handlebars.partials = {};
+
+Handlebars.registerHelper = function(name, fn, inverse) {
+ if(inverse) { fn.not = inverse; }
+ this.helpers[name] = fn;
+};
+
+Handlebars.registerPartial = function(name, str) {
+ this.partials[name] = str;
+};
+
+Handlebars.registerHelper('helperMissing', function(arg) {
+ if(arguments.length === 2) {
+ return undefined;
+ } else {
+ throw new Error("Could not find property '" + arg + "'");
+ }
+});
+
+var toString = Object.prototype.toString, functionType = "[object Function]";
+
+Handlebars.registerHelper('blockHelperMissing', function(context, options) {
+ var inverse = options.inverse || function() {}, fn = options.fn;
+
+
+ var ret = "";
+ var type = toString.call(context);
+
+ if(type === functionType) { context = context.call(this); }
+
+ if(context === true) {
+ return fn(this);
+ } else if(context === false || context == null) {
+ return inverse(this);
+ } else if(type === "[object Array]") {
+ if(context.length > 0) {
+ for(var i=0, j=context.length; i<j; i++) {
+ ret = ret + fn(context[i]);
+ }
+ } else {
+ ret = inverse(this);
+ }
+ return ret;
+ } else {
+ return fn(context);
+ }
+});
+
+Handlebars.registerHelper('each', function(context, options) {
+ var fn = options.fn, inverse = options.inverse;
+ var ret = "";
+
+ if(context && context.length > 0) {
+ for(var i=0, j=context.length; i<j; i++) {
+ ret = ret + fn(context[i]);
+ }
+ } else {
+ ret = inverse(this);
+ }
+ return ret;
+});
+
+Handlebars.registerHelper('if', function(context, options) {
+ var type = toString.call(context);
+ if(type === functionType) { context = context.call(this); }
+
+ if(!context || Handlebars.Utils.isEmpty(context)) {
+ return options.inverse(this);
+ } else {
+ return options.fn(this);
+ }
+});
+
+Handlebars.registerHelper('unless', function(context, options) {
+ var fn = options.fn, inverse = options.inverse;
+ options.fn = inverse;
+ options.inverse = fn;
+
+ return Handlebars.helpers['if'].call(this, context, options);
+});
+
+Handlebars.registerHelper('with', function(context, options) {
+ return options.fn(context);
+});
+
+Handlebars.registerHelper('log', function(context) {
+ Handlebars.log(context);
+});
+;
+// lib/handlebars/utils.js
+Handlebars.Exception = function(message) {
+ var tmp = Error.prototype.constructor.apply(this, arguments);
+
+ for (var p in tmp) {
+ if (tmp.hasOwnProperty(p)) { this[p] = tmp[p]; }
+ }
+
+ this.message = tmp.message;
+};
+Handlebars.Exception.prototype = new Error;
+
+// Build out our basic SafeString type
+Handlebars.SafeString = function(string) {
+ this.string = string;
+};
+Handlebars.SafeString.prototype.toString = function() {
+ return this.string.toString();
+};
+
+(function() {
+ var escape = {
+ "<": "<",
+ ">": ">",
+ '"': """,
+ "'": "'",
+ "`": "`"
+ };
+
+ var badChars = /&(?!\w+;)|[<>"'`]/g;
+ var possible = /[&<>"'`]/;
+
+ var escapeChar = function(chr) {
+ return escape[chr] || "&";
+ };
+
+ Handlebars.Utils = {
+ escapeExpression: function(string) {
+ // don't escape SafeStrings, since they're already safe
+ if (string instanceof Handlebars.SafeString) {
+ return string.toString();
+ } else if (string == null || string === false) {
+ return "";
+ }
+
+ if(!possible.test(string)) { return string; }
+ return string.replace(badChars, escapeChar);
+ },
+
+ isEmpty: function(value) {
+ if (typeof value === "undefined") {
+ return true;
+ } else if (value === null) {
+ return true;
+ } else if (value === false) {
+ return true;
+ } else if(Object.prototype.toString.call(value) === "[object Array]" && value.length === 0) {
+ return true;
+ } else {
+ return false;
+ }
+ }
+ };
+})();;
+// lib/handlebars/runtime.js
+Handlebars.VM = {
+ template: function(templateSpec) {
+ // Just add water
+ var container = {
+ escapeExpression: Handlebars.Utils.escapeExpression,
+ invokePartial: Handlebars.VM.invokePartial,
+ programs: [],
+ program: function(i, fn, data) {
+ var programWrapper = this.programs[i];
+ if(data) {
+ return Handlebars.VM.program(fn, data);
+ } else if(programWrapper) {
+ return programWrapper;
+ } else {
+ programWrapper = this.programs[i] = Handlebars.VM.program(fn);
+ return programWrapper;
+ }
+ },
+ programWithDepth: Handlebars.VM.programWithDepth,
+ noop: Handlebars.VM.noop
+ };
+
+ return function(context, options) {
+ options = options || {};
+ return templateSpec.call(container, Handlebars, context, options.helpers, options.partials, options.data);
+ };
+ },
+
+ programWithDepth: function(fn, data, $depth) {
+ var args = Array.prototype.slice.call(arguments, 2);
+
+ return function(context, options) {
+ options = options || {};
+
+ return fn.apply(this, [context, options.data || data].concat(args));
+ };
+ },
+ program: function(fn, data) {
+ return function(context, options) {
+ options = options || {};
+
+ return fn(context, options.data || data);
+ };
+ },
+ noop: function() { return ""; },
+ invokePartial: function(partial, name, context, helpers, partials, data) {
+ options = { helpers: helpers, partials: partials, data: data };
+
+ if(partial === undefined) {
+ throw new Handlebars.Exception("The partial " + name + " could not be found");
+ } else if(partial instanceof Function) {
+ return partial(context, options);
+ } else if (!Handlebars.compile) {
+ throw new Handlebars.Exception("The partial " + name + " could not be compiled when running in runtime-only mode");
+ } else {
+ partials[name] = Handlebars.compile(partial);
+ return partials[name](context, options);
+ }
+ }
+};
+
+Handlebars.template = Handlebars.VM.template;
+;
https://bitbucket.org/galaxy/galaxy-central/changeset/657c1295db78/
changeset: 657c1295db78
user: jgoecks
date: 2012-03-30 17:46:02
summary: Enable handlebars precompilation and template insertion.
affected #: 8 files
diff -r 15e75fe2efae761ac7182a496d5c79ef3bc3a43a -r 657c1295db78085a845ecf68075008710eba5ed5 lib/galaxy/web/framework/helpers/__init__.py
--- a/lib/galaxy/web/framework/helpers/__init__.py
+++ b/lib/galaxy/web/framework/helpers/__init__.py
@@ -45,14 +45,28 @@
"""
return "\n".join( [ stylesheet_link_tag( "/static/style/" + name + ".css?v=%s" % server_starttime ) for name in args ] )
-def js( *args ):
+def js_helper( prefix, *args ):
"""
- Take a list of javascript names (no extension) and return appropriate
+ Take a prefix and list of javascript names and return appropriate
string of script tags.
Cache-bust with time that server started running on
"""
- return "\n".join( [ javascript_include_tag( "/static/scripts/" + name + ".js?v=%s" % server_starttime ) for name in args ] )
+ return "\n".join( [ javascript_include_tag( prefix + name + ".js?v=%s" % server_starttime ) for name in args ] )
+
+def js( *args ):
+ """
+ Take a prefix and list of javascript names and return appropriate
+ string of script tags.
+ """
+ return js_helper( '/static/scripts/', *args )
+
+def handlebars( *args ):
+ """
+ Take a list of template names (no extension) and return appropriate
+ string of script tags.
+ """
+ return js_helper( '/static/scripts/handlebars/compiled/', *args )
# Hashes
diff -r 15e75fe2efae761ac7182a496d5c79ef3bc3a43a -r 657c1295db78085a845ecf68075008710eba5ed5 static/scripts/handlebars/compile_templates.py
--- /dev/null
+++ b/static/scripts/handlebars/compile_templates.py
@@ -0,0 +1,27 @@
+#!/usr/bin/env python
+
+# Script requires handlebars compiler be installed; use node package manager
+# to install handlebars.
+
+import sys
+
+from glob import glob
+from subprocess import call
+from shutil import copyfile
+from os import path
+
+cmd = "handlebars %s -f compiled/%s.js"
+
+# If specific scripts specified on command line, just pack them, otherwise pack
+# all.
+
+if len( sys.argv ) > 1:
+ to_pack = sys.argv[1:]
+else:
+ to_pack = glob( "*.handlebars" )
+
+for fname in to_pack:
+ fname_base = path.splitext( path.split( fname )[1] )[0]
+ print fname_base
+ print "%s --> compiled/%s.js" % ( fname, fname_base )
+ out = call( cmd % ( fname, fname_base ), shell=True )
diff -r 15e75fe2efae761ac7182a496d5c79ef3bc3a43a -r 657c1295db78085a845ecf68075008710eba5ed5 static/scripts/handlebars/compiled/panel_label.js
--- /dev/null
+++ b/static/scripts/handlebars/compiled/panel_label.js
@@ -0,0 +1,15 @@
+(function() {
+ var template = Handlebars.template, templates = Handlebars.templates = Handlebars.templates || {};
+templates['panel_label'] = template(function (Handlebars,depth0,helpers,partials,data) {
+ helpers = helpers || Handlebars.helpers;
+ var buffer = "", stack1, foundHelper, self=this, functionType="function", helperMissing=helpers.helperMissing, undef=void 0, escapeExpression=this.escapeExpression;
+
+
+ buffer += "<span>";
+ foundHelper = helpers.name;
+ stack1 = foundHelper || depth0.name;
+ if(typeof stack1 === functionType) { stack1 = stack1.call(depth0, { hash: {} }); }
+ else if(stack1=== undef) { stack1 = helperMissing.call(depth0, "name", { hash: {} }); }
+ buffer += escapeExpression(stack1) + "</span>";
+ return buffer;});
+})();
\ No newline at end of file
diff -r 15e75fe2efae761ac7182a496d5c79ef3bc3a43a -r 657c1295db78085a845ecf68075008710eba5ed5 static/scripts/handlebars/compiled/panel_section.js
--- /dev/null
+++ b/static/scripts/handlebars/compiled/panel_section.js
@@ -0,0 +1,25 @@
+(function() {
+ var template = Handlebars.template, templates = Handlebars.templates = Handlebars.templates || {};
+templates['panel_section'] = template(function (Handlebars,depth0,helpers,partials,data) {
+ helpers = helpers || Handlebars.helpers;
+ var buffer = "", stack1, foundHelper, self=this, functionType="function", helperMissing=helpers.helperMissing, undef=void 0, escapeExpression=this.escapeExpression;
+
+
+ buffer += "<div class=\"toolSectionTitle\" id=\"title_";
+ foundHelper = helpers.id;
+ stack1 = foundHelper || depth0.id;
+ if(typeof stack1 === functionType) { stack1 = stack1.call(depth0, { hash: {} }); }
+ else if(stack1=== undef) { stack1 = helperMissing.call(depth0, "id", { hash: {} }); }
+ buffer += escapeExpression(stack1) + "\">\n <a href=\"#\"><span>";
+ foundHelper = helpers.name;
+ stack1 = foundHelper || depth0.name;
+ if(typeof stack1 === functionType) { stack1 = stack1.call(depth0, { hash: {} }); }
+ else if(stack1=== undef) { stack1 = helperMissing.call(depth0, "name", { hash: {} }); }
+ buffer += escapeExpression(stack1) + "</span></a>\n</div>\n<div id=\"";
+ foundHelper = helpers.id;
+ stack1 = foundHelper || depth0.id;
+ if(typeof stack1 === functionType) { stack1 = stack1.call(depth0, { hash: {} }); }
+ else if(stack1=== undef) { stack1 = helperMissing.call(depth0, "id", { hash: {} }); }
+ buffer += escapeExpression(stack1) + "\" class=\"toolSectionBody\" style=\"display: none; \">\n <div class=\"toolSectionBg\"></div>\n<div>";
+ return buffer;});
+})();
\ No newline at end of file
diff -r 15e75fe2efae761ac7182a496d5c79ef3bc3a43a -r 657c1295db78085a845ecf68075008710eba5ed5 static/scripts/handlebars/compiled/tool_link.js
--- /dev/null
+++ b/static/scripts/handlebars/compiled/tool_link.js
@@ -0,0 +1,40 @@
+(function() {
+ var template = Handlebars.template, templates = Handlebars.templates = Handlebars.templates || {};
+templates['tool_link'] = template(function (Handlebars,depth0,helpers,partials,data) {
+ helpers = helpers || Handlebars.helpers;
+ var buffer = "", stack1, foundHelper, self=this, functionType="function", helperMissing=helpers.helperMissing, undef=void 0, escapeExpression=this.escapeExpression;
+
+
+ buffer += "<a class=\"";
+ foundHelper = helpers.id;
+ stack1 = foundHelper || depth0.id;
+ if(typeof stack1 === functionType) { stack1 = stack1.call(depth0, { hash: {} }); }
+ else if(stack1=== undef) { stack1 = helperMissing.call(depth0, "id", { hash: {} }); }
+ buffer += escapeExpression(stack1) + " tool-link\" href=\"";
+ foundHelper = helpers.link;
+ stack1 = foundHelper || depth0.link;
+ if(typeof stack1 === functionType) { stack1 = stack1.call(depth0, { hash: {} }); }
+ else if(stack1=== undef) { stack1 = helperMissing.call(depth0, "link", { hash: {} }); }
+ buffer += escapeExpression(stack1) + "\" target=\"";
+ foundHelper = helpers.target;
+ stack1 = foundHelper || depth0.target;
+ if(typeof stack1 === functionType) { stack1 = stack1.call(depth0, { hash: {} }); }
+ else if(stack1=== undef) { stack1 = helperMissing.call(depth0, "target", { hash: {} }); }
+ buffer += escapeExpression(stack1) + "\" minsizehint=\"";
+ foundHelper = helpers.min_width;
+ stack1 = foundHelper || depth0.min_width;
+ if(typeof stack1 === functionType) { stack1 = stack1.call(depth0, { hash: {} }); }
+ else if(stack1=== undef) { stack1 = helperMissing.call(depth0, "min_width", { hash: {} }); }
+ buffer += escapeExpression(stack1) + "\">";
+ foundHelper = helpers.name;
+ stack1 = foundHelper || depth0.name;
+ if(typeof stack1 === functionType) { stack1 = stack1.call(depth0, { hash: {} }); }
+ else if(stack1=== undef) { stack1 = helperMissing.call(depth0, "name", { hash: {} }); }
+ buffer += escapeExpression(stack1) + "</a> ";
+ foundHelper = helpers.description;
+ stack1 = foundHelper || depth0.description;
+ if(typeof stack1 === functionType) { stack1 = stack1.call(depth0, { hash: {} }); }
+ else if(stack1=== undef) { stack1 = helperMissing.call(depth0, "description", { hash: {} }); }
+ buffer += escapeExpression(stack1);
+ return buffer;});
+})();
\ No newline at end of file
diff -r 15e75fe2efae761ac7182a496d5c79ef3bc3a43a -r 657c1295db78085a845ecf68075008710eba5ed5 static/scripts/handlebars/panel_label.handlebars
--- /dev/null
+++ b/static/scripts/handlebars/panel_label.handlebars
@@ -0,0 +1,1 @@
+<span>{{name}}</span>
\ No newline at end of file
diff -r 15e75fe2efae761ac7182a496d5c79ef3bc3a43a -r 657c1295db78085a845ecf68075008710eba5ed5 static/scripts/handlebars/panel_section.handlebars
--- /dev/null
+++ b/static/scripts/handlebars/panel_section.handlebars
@@ -0,0 +1,6 @@
+<div class="toolSectionTitle" id="title_{{id}}">
+ <a href="#"><span>{{name}}</span></a>
+</div>
+<div id="{{id}}" class="toolSectionBody" style="display: none; ">
+ <div class="toolSectionBg"></div>
+<div>
\ No newline at end of file
diff -r 15e75fe2efae761ac7182a496d5c79ef3bc3a43a -r 657c1295db78085a845ecf68075008710eba5ed5 static/scripts/handlebars/tool_link.handlebars
--- /dev/null
+++ b/static/scripts/handlebars/tool_link.handlebars
@@ -0,0 +1,1 @@
+<a class="{{id}} tool-link" href="{{link}}" target="{{target}}" minsizehint="{{min_width}}">{{name}}</a> {{description}}
\ No newline at end of file
https://bitbucket.org/galaxy/galaxy-central/changeset/833332b039fb/
changeset: 833332b039fb
user: jgoecks
date: 2012-04-02 00:19:11
summary: New tool menu generation using a JavaScript Backbone-based MVC framework. Framework principals are (a) event-based communication from model to view and (b) Backbone views act more as controllers with the HTML as the view itself. Tool menu generation and search are completely functional; recently used tools and tool tags are not yet implemented.
affected #: 11 files
diff -r 657c1295db78085a845ecf68075008710eba5ed5 -r 833332b039fb6f00fb148ddbae03721783216347 lib/galaxy/tools/__init__.py
--- a/lib/galaxy/tools/__init__.py
+++ b/lib/galaxy/tools/__init__.py
@@ -31,6 +31,7 @@
from galaxy.util.hash_util import *
from galaxy.util import listify
from galaxy.util.shed_util import *
+from galaxy.web import url_for
from galaxy.visualization.tracks.visual_analytics import TracksterConfig
@@ -486,11 +487,11 @@
self.version = elem.get( "version" ) or ''
self.elems = odict()
- def to_dict( self ):
+ def to_dict( self, trans ):
""" Return a dict that includes section's attributes. """
section_elts = []
for key, val in self.elems.items():
- section_elts.append( val.to_dict() )
+ section_elts.append( val.to_dict( trans ) )
return { 'type': 'section', 'id': self.id, 'name': self.name, 'version': self.version, 'elems': section_elts }
class ToolSectionLabel( object ):
@@ -503,7 +504,7 @@
self.id = elem.get( "id" )
self.version = elem.get( "version" ) or ''
- def to_dict( self ):
+ def to_dict( self, trans ):
""" Return a dict that includes label's attributes. """
return { 'type': 'label', 'id': self.id, 'name': self.text, 'version': self.version }
@@ -2260,10 +2261,19 @@
self.sa_session.flush()
return primary_datasets
- def to_dict( self, **kwds ):
- """ Return dict that includes tool's attributes. """
- return { 'type': 'tool', 'id': self.id, 'name': self.name,
- 'version': self.version, 'description': self.description }
+ def to_dict( self, trans ):
+ """ Return dict of tool attributes. """
+
+ # Create tool link.
+ if not self.tool_type.startswith( 'data_source' ):
+ link = url_for( controller='tool_runner', tool_id=self.id )
+ else:
+ link = url_for( self.action, **self.get_static_param_values( trans ) )
+
+ return { 'type': 'tool', 'id': self.id, 'name': self.name, 'link': link,
+ 'version': self.version, 'description': self.description,
+ 'min_width': self.uihints.get( 'minwidth', -1 ),
+ 'target': self.target }
class DataSourceTool( Tool ):
"""
diff -r 657c1295db78085a845ecf68075008710eba5ed5 -r 833332b039fb6f00fb148ddbae03721783216347 lib/galaxy/web/controllers/tools.py
--- a/lib/galaxy/web/controllers/tools.py
+++ b/lib/galaxy/web/controllers/tools.py
@@ -20,12 +20,12 @@
panel_elts = []
# Taken from tool_menu.mako:
for key, val in self.app.toolbox.tool_panel.items():
- panel_elts.append( val.to_dict() )
+ panel_elts.append( val.to_dict( trans ) )
rval = panel_elts
else:
tools = []
for id, tool in self.app.toolbox.tools_by_id.items():
- tools.append( tool.to_dict() )
+ tools.append( tool.to_dict( trans ) )
rval = tools
return rval
diff -r 657c1295db78085a845ecf68075008710eba5ed5 -r 833332b039fb6f00fb148ddbae03721783216347 static/june_2007_style/blue/tool_menu.css
--- a/static/june_2007_style/blue/tool_menu.css
+++ b/static/june_2007_style/blue/tool_menu.css
@@ -0,0 +1,2 @@
+#tool-search{padding-top:5px;padding-bottom:10px;position:relative;}
+div.toolSectionWrapper{margin-bottom:5px;}
diff -r 657c1295db78085a845ecf68075008710eba5ed5 -r 833332b039fb6f00fb148ddbae03721783216347 static/june_2007_style/tool_menu.css.tmpl
--- a/static/june_2007_style/tool_menu.css.tmpl
+++ b/static/june_2007_style/tool_menu.css.tmpl
@@ -0,0 +1,8 @@
+#tool-search {
+ padding-top: 5px;
+ padding-bottom: 10px;
+ position: relative;
+}
+div.toolSectionWrapper {
+ margin-bottom: 5px;
+}
\ No newline at end of file
diff -r 657c1295db78085a845ecf68075008710eba5ed5 -r 833332b039fb6f00fb148ddbae03721783216347 static/scripts/backbone/tools.js
--- /dev/null
+++ b/static/scripts/backbone/tools.js
@@ -0,0 +1,383 @@
+/**
+ * Model classes for Galaxy tools and tool panel. Models have no references to views,
+ * instead using events to indicate state changes; this is advantageous because
+ * multiple views can use the same object and models can be used without views.
+ */
+
+/**
+ * Simple base model for any visible element. Includes useful attributes and ability
+ * to set and track visibility.
+ */
+var BaseModel = Backbone.Model.extend({
+ defaults: {
+ id: null,
+ name: null,
+ hidden: false
+ },
+
+ show: function() {
+ this.set("hidden", false);
+ },
+
+ hide: function() {
+ this.set("hidden", true);
+ },
+
+ is_visible: function() {
+ return !this.attributes.hidden;
+ }
+});
+
+/**
+ * A Galaxy tool.
+ */
+var Tool = BaseModel.extend({
+ // Default attributes.
+ defaults: {
+ description: null,
+ target: null,
+ params: []
+ },
+
+ apply_search_results: function(results) {
+ ( _.indexOf(results, this.attributes.id) !== -1 ? this.show() : this.hide() );
+ return this.is_visible();
+ }
+});
+
+/**
+ * Label or section header in tool panel.
+ */
+var ToolPanelLabel = BaseModel.extend({});
+
+/**
+ * Section of tool panel with elements (labels and tools).
+ */
+var ToolPanelSection = BaseModel.extend({
+ defaults: {
+ elems: [],
+ open: false
+ },
+
+ clear_search_results: function() {
+ _.each(this.attributes.elems, function(elt) {
+ elt.show();
+ });
+
+ this.show();
+ this.set("open", false);
+ },
+
+ apply_search_results: function(results) {
+ var all_hidden = true,
+ cur_label;
+ _.each(this.attributes.elems, function(elt) {
+ if (elt instanceof ToolPanelLabel) {
+ cur_label = elt;
+ cur_label.hide();
+ }
+ else if (elt instanceof Tool) {
+ if (elt.apply_search_results(results)) {
+ all_hidden = false;
+ if (cur_label) {
+ cur_label.show();
+ }
+ }
+ }
+ });
+
+ if (all_hidden) {
+ this.hide();
+ }
+ else {
+ this.show();
+ this.set("open", true);
+ }
+ }
+});
+
+/**
+ * Tool search that updates results when query is changed. Result value of null
+ * indicates that query was not run; if not null, results are from search using
+ * query.
+ */
+var ToolSearch = Backbone.Model.extend({
+ defaults: {
+ spinner_url: "",
+ search_url: "",
+ visible: true,
+ query: "",
+ results: null
+ },
+
+ initialize: function() {
+ this.on("change:query", this.do_search);
+ },
+
+ /**
+ * Do the search and update the results.
+ */
+ do_search: function() {
+ var query = this.attributes.query;
+
+ // If query is too short, do not search.
+ if (query.length < 3) {
+ this.set("results", null);
+ return;
+ }
+
+ // Do search via AJAX.
+ var q = query + '*';
+ // Stop previous ajax-request
+ if (this.timer) {
+ clearTimeout(this.timer);
+ }
+ // Start a new ajax-request in X ms
+ $("#search-spinner").show();
+ var self = this;
+ this.timer = setTimeout(function () {
+ $.get(self.attributes.search_url, { query: q }, function (data) {
+ self.set("results", data);
+ $("#search-spinner").hide();
+ }, "json" );
+ }, 200 );
+ }
+});
+
+/**
+ * A collection of ToolPanelSections, Tools, and ToolPanelLabels. Collection
+ * applies search results as they become available.
+ */
+var ToolPanel = Backbone.Collection.extend({
+ url: "/tools",
+ parse: function(response) {
+ // Recursive function to parse tool panel elements.
+ var parse_elt = function(elt_dict) {
+ var type = elt_dict.type;
+ if (type === 'tool') {
+ return new Tool(elt_dict);
+ }
+ else if (type === 'section') {
+ // Parse elements.
+ var elems = _.map(elt_dict.elems, parse_elt);
+ elt_dict.elems = elems;
+ return new ToolPanelSection(elt_dict);
+ }
+ else if (type === 'label') {
+ return new ToolPanelLabel(elt_dict);
+ }
+ };
+
+ return _.map(response, parse_elt);
+ },
+
+ initialize: function(options) {
+ this.tool_search = options.tool_search;
+ this.tool_search.on("change:results", this.apply_search_results, this);
+ },
+
+ clear_search_results: function() {
+ this.each(function(panel_elt) {
+ panel_elt.clear_search_results();
+ });
+ },
+
+ apply_search_results: function() {
+ var results = this.tool_search.attributes.results;
+ if (results === null) {
+ this.clear_search_results();
+ return;
+ }
+
+ this.each(function(panel_elt) {
+ panel_elt.apply_search_results(results);
+ });
+ }
+});
+
+/**
+ * View classes for Galaxy tools and tool panel.
+ *
+ * Views use precompiled Handlebars templates for rendering. Views update as needed
+ * based on (a) model/collection events and (b) user interactions; in this sense,
+ * they are controllers are well and the HTML is the real view in the MVC architecture.
+ */
+
+// TODO: implement a BaseModelView for handling model visibility.
+
+/**
+ * Link to a tool.
+ */
+var ToolLinkView = Backbone.View.extend({
+ tagName: 'div',
+ template: Handlebars.templates.tool_link,
+ initialize: function() {
+ this.model.on("change:hidden", this.update_visible, this);
+ },
+ render: function() {
+ this.$el.append( this.template(this.model.toJSON()) );
+ return this;
+ },
+ update_visible: function() {
+ ( this.model.attributes.hidden ? this.$el.hide() : this.$el.show() );
+ }
+});
+
+/**
+ * Panel label/section header.
+ */
+var ToolPanelLabelView = Backbone.View.extend({
+ tagName: 'div',
+ className: 'toolPanelLabel',
+ template: Handlebars.templates.panel_label,
+ initialize: function() {
+ this.model.on("change:hidden", this.update_visible, this);
+ },
+ render: function() {
+ this.$el.append( this.template(this.model.toJSON()) );
+ return this;
+ },
+ update_visible: function() {
+ ( this.model.attributes.hidden ? this.$el.hide() : this.$el.show() );
+ }
+});
+
+/**
+ * Panel section.
+ */
+var ToolPanelSectionView = Backbone.View.extend({
+ tagName: 'div',
+ className: 'toolSectionWrapper',
+ template: Handlebars.templates.panel_section,
+ initialize: function() {
+ this.model.on("change:hidden", this.update_visible, this);
+ this.model.on("change:open", this.update_open, this);
+ },
+ render: function() {
+ // Build using template.
+ this.$el.append( this.template(this.model.toJSON()) );
+
+ // Add tools to section.
+ var section_body = this.$el.find(".toolSectionBody");
+ _.each(this.model.attributes.elems, function(elt) {
+ if (elt instanceof Tool) {
+ var tool_view = new ToolLinkView({model: elt, className: "toolTitle"});
+ tool_view.render();
+ section_body.append(tool_view.$el);
+ }
+ else if (elt instanceof ToolPanelLabel) {
+ var label_view = new ToolPanelLabelView({model: elt});
+ label_view.render();
+ section_body.append(label_view.$el);
+ }
+ else {
+ // TODO: handle nested section bodies?
+ }
+ });
+ return this;
+ },
+
+ events: {
+ 'click .toolSectionTitle > a': 'toggle'
+ },
+
+ /**
+ * Toggle visibility of tool section.
+ */
+ toggle: function() {
+ this.$el.children(".toolSectionBody").toggle("fast");
+ this.model.set("open", !this.model.attributes.open);
+ },
+
+ /**
+ * Update whether section is open or close.
+ */
+ update_open: function() {
+ (this.model.attributes.open ?
+ this.$el.children(".toolSectionBody").show("fast") :
+ this.$el.children(".toolSectionBody").hide("fast")
+ );
+ },
+
+ /**
+ * Update section and section elements visibility after search.
+ */
+ update_visible: function() {
+ ( this.model.attributes.hidden ? this.$el.hide() : this.$el.show() );
+ }
+});
+
+var ToolSearchView = Backbone.View.extend({
+ tagName: 'div',
+ id: 'tool-search',
+ className: 'bar',
+ template: Handlebars.templates.tool_search,
+
+ events: {
+ 'click': 'focus_and_select',
+ 'keyup :input': 'query_changed'
+ },
+
+ render: function() {
+ this.$el.append( this.template(this.model.toJSON()) );
+ return this;
+ },
+
+ focus_and_select: function() {
+ this.$el.find(":input").focus().select();
+ },
+
+ query_changed: function() {
+ this.model.set("query", this.$el.find(":input").val());
+ }
+});
+
+var ToolPanelView = Backbone.View.extend({
+ tagName: 'div',
+ className: 'toolMenu',
+
+ /**
+ * Waits for collection to load and then renders.
+ */
+ initialize: function(options) {
+ this.collection.tool_search.on("change:results", this.handle_search_results, this);
+
+ var self = this;
+ // Wait for collection to load before rendering.
+ this.collection.bind("reset", function() { self.render(); });
+ },
+
+ render: function() {
+ var this_el = this.$el;
+
+ // Render search.
+ var search_view = new ToolSearchView( {model: this.collection.tool_search} );
+ search_view.render();
+ this_el.append(search_view.$el);
+
+ // Render panel.
+ this.collection.each(function(panel_elt) {
+ if (panel_elt instanceof ToolPanelSection) {
+ var section_title_view = new ToolPanelSectionView({model: panel_elt});
+ section_title_view.render();
+ this_el.append(section_title_view.$el);
+ }
+ else if (panel_elt instanceof Tool) {
+ var tool_view = new ToolLinkView({model: elt, className: "toolTitleNoSection"});
+ tool_view.render();
+ this_el.append(tool_view.$el);
+ }
+ });
+ return this;
+ },
+
+ handle_search_results: function() {
+ var results = this.collection.tool_search.attributes.results;
+ if (results && results.length === 0) {
+ $("#search-no-results").show();
+ }
+ else {
+ $("#search-no-results").hide();
+ }
+ }
+});
diff -r 657c1295db78085a845ecf68075008710eba5ed5 -r 833332b039fb6f00fb148ddbae03721783216347 static/scripts/galaxy.base.js
--- a/static/scripts/galaxy.base.js
+++ b/static/scripts/galaxy.base.js
@@ -623,7 +623,6 @@
if (initValue) {
var search_input = tool_menu_frame.find("#tool-search-query");
search_input.val("search tools");
- search_input.css("font-style", "italic");
}
}
diff -r 657c1295db78085a845ecf68075008710eba5ed5 -r 833332b039fb6f00fb148ddbae03721783216347 static/scripts/handlebars/compile_templates.py
--- a/static/scripts/handlebars/compile_templates.py
+++ b/static/scripts/handlebars/compile_templates.py
@@ -22,6 +22,5 @@
for fname in to_pack:
fname_base = path.splitext( path.split( fname )[1] )[0]
- print fname_base
print "%s --> compiled/%s.js" % ( fname, fname_base )
out = call( cmd % ( fname, fname_base ), shell=True )
diff -r 657c1295db78085a845ecf68075008710eba5ed5 -r 833332b039fb6f00fb148ddbae03721783216347 static/scripts/handlebars/compiled/tool_search.js
--- /dev/null
+++ b/static/scripts/handlebars/compiled/tool_search.js
@@ -0,0 +1,15 @@
+(function() {
+ var template = Handlebars.template, templates = Handlebars.templates = Handlebars.templates || {};
+templates['tool_search'] = template(function (Handlebars,depth0,helpers,partials,data) {
+ helpers = helpers || Handlebars.helpers;
+ var buffer = "", stack1, foundHelper, self=this, functionType="function", helperMissing=helpers.helperMissing, undef=void 0, escapeExpression=this.escapeExpression;
+
+
+ buffer += "<input type=\"text\" name=\"query\" value=\"search tools\" id=\"tool-search-query\" autocomplete=\"off\" class=\"search-query parent-width\" />\n<img src=\"";
+ foundHelper = helpers.spinner_url;
+ stack1 = foundHelper || depth0.spinner_url;
+ if(typeof stack1 === functionType) { stack1 = stack1.call(depth0, { hash: {} }); }
+ else if(stack1=== undef) { stack1 = helperMissing.call(depth0, "spinner_url", { hash: {} }); }
+ buffer += escapeExpression(stack1) + "\" id=\"search-spinner\" class=\"search-spinner\"/>\n";
+ return buffer;});
+})();
\ No newline at end of file
diff -r 657c1295db78085a845ecf68075008710eba5ed5 -r 833332b039fb6f00fb148ddbae03721783216347 static/scripts/handlebars/tool_search.handlebars
--- /dev/null
+++ b/static/scripts/handlebars/tool_search.handlebars
@@ -0,0 +1,2 @@
+<input type="text" name="query" value="search tools" id="tool-search-query" autocomplete="off" class="search-query parent-width" />
+<img src="{{spinner_url}}" id="search-spinner" class="search-spinner"/>
diff -r 657c1295db78085a845ecf68075008710eba5ed5 -r 833332b039fb6f00fb148ddbae03721783216347 templates/base.mako
--- a/templates/base.mako
+++ b/templates/base.mako
@@ -26,7 +26,7 @@
## <!--[if lt IE 7]>
## <script type='text/javascript' src="/static/scripts/IE7.js"></script>
## <![endif]-->
- ${h.js( "jquery", "galaxy.base" )}
+ ${h.js( "jquery", "galaxy.base", "libs/underscore", "libs/backbone", "libs/handlebars.runtime" )}
</%def>
## Additional metas can be defined by templates inheriting from this one.
diff -r 657c1295db78085a845ecf68075008710eba5ed5 -r 833332b039fb6f00fb148ddbae03721783216347 templates/root/tool_menu.mako
--- a/templates/root/tool_menu.mako
+++ b/templates/root/tool_menu.mako
@@ -1,36 +1,7 @@
-<%!
- import re
-%>
+<%inherit file="/base.mako"/><%namespace file="/tagging_common.mako" import="render_tool_tagging_elements" />
-## Render a tool
-<%def name="render_tool( tool, section )">
- %if not tool.hidden:
- %if section:
- <div class="toolTitle">
- %else:
- <div class="toolTitleNoSection">
- %endif
- <%
- if not tool.tool_type.startswith( 'data_source' ):
- link = h.url_for( controller='tool_runner', tool_id=tool.id )
- else:
- link = h.url_for( tool.action, ** tool.get_static_param_values( t ) )
- %>
- ## FIXME: This doesn't look right
- ## %if "[[" in tool.description and "]]" in tool.description:
- ## ${tool.description.replace( '[[', '<a href="link" target="galaxy_main">' % $tool.id ).replace( "]]", "</a>" )
- <% tool_id = re.sub( '[^a-z0-9_]', '_', tool.id.lower() ) %>
- %if tool.name:
- <a class="link-${tool_id} tool-link" href="${link}" target=${tool.target} minsizehint="${tool.uihints.get( 'minwidth', -1 )}">${_(tool.name)}</a> ${tool.description}
- %else:
- <a class="link-${tool_id} tool-link" href="${link}" target=${tool.target} minsizehint="${tool.uihints.get( 'minwidth', -1 )}">${tool.description}</a>
- %endif
- </div>
- %endif
-</%def>
-
## Render a workflow
<%def name="render_workflow( key, workflow, section )">
%if section:
@@ -43,385 +14,109 @@
</div></%def>
-## Render a label
-<%def name="render_label( label )">
- <div class="toolPanelLabel" id="title_${label.id}">
- <span>${label.text}</span>
- </div>
+<%def name="javascripts()">
+ ${parent.javascripts()}
+ ${h.handlebars( "tool_link", "panel_label", "panel_section", "tool_search" )}
+ ${h.js( "galaxy.base", "json2", "autocomplete_tagging", "backbone/tools" )}
+
+ <%
+ show_tool_search = "true"
+ if trans.user and trans.user.preferences.get( "show_tool_search", "False" ) == "False":
+ show_tool_search = "false"
+ %>
+
+ <script type="text/javascript">
+ // Init. on document load.
+ var tool_panel, tool_panel_view, tool_search;
+ $(function() {
+ // Set up search.
+ tool_search = new ToolSearch( {spinner_url: "${h.url_for('/static/images/loading_small_white_bg.gif')}",
+ search_url: "${h.url_for( controller='root', action='tool_search' )}",
+ visible: ${show_tool_search} } );
+
+ // Set up tool panel.
+ tool_panel = new ToolPanel( { tool_search: tool_search } );
+ tool_panel.fetch();
+
+
+ // Set up tool panel view and initialize.
+ tool_panel_view = new ToolPanelView( {collection: tool_panel} );
+ $('body').prepend(tool_panel_view.$el);
+
+ // Minsize init hint.
+ $( "a[minsizehint]" ).click( function() {
+ if ( parent.handle_minwidth_hint ) {
+ parent.handle_minwidth_hint( $(this).attr( "minsizehint" ) );
+ }
+ });
+
+ // Log clicks on tools.
+ /*
+ $("div.toolTitle > a").click( function() {
+ var tool_title = $(this).attr('id').split("-")[1];
+ var section_title = $.trim( $(this).parents("div.toolSectionWrapper").find("div.toolSectionTitle").text() );
+ var search_active = $(this).parents("div.toolTitle").hasClass("search_match");
+
+ // Log action.
+ galaxy_async.log_user_action("tool_menu_click." + tool_title, section_title,
+ JSON.stringify({"search_active" : search_active}));
+ });
+ */
+
+ // TODO: is this necessary?
+ $( "a[minsizehint]" ).click( function() {
+ if ( parent.handle_minwidth_hint ) {
+ parent.handle_minwidth_hint( $(this).attr( "minsizehint" ) );
+ }
+ });
+ });
+ </script></%def>
-<!DOCTYPE HTML>
-<html>
- <head>
- <title>${_('Galaxy Tools')}</title>
- <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
- <link href="${h.url_for('/static/style/base.css')}" rel="stylesheet" type="text/css" />
- <link href="${h.url_for('/static/style/tool_menu.css')}" rel="stylesheet" type="text/css" />
- <link href="${h.url_for('/static/style/autocomplete_tagging.css')}" rel="stylesheet" type="text/css" />
+<%def name="stylesheets()">
+ ${parent.stylesheets()}
+ ${h.css("tool_menu")}
+</%def>
- ${h.js( "jquery", "galaxy.base", "json2", "autocomplete_tagging" )}
- <script type="text/javascript">
- // Set up GalaxyAsync object.
- var galaxy_async = new GalaxyAsync(${str(trans.app.config.log_actions).lower()});
- galaxy_async.set_func_url(galaxy_async.log_user_action, "${h.url_for( controller='user', action='log_user_action_async' )}");
+<%def name="title()">
+ ${_('Galaxy Tools')}
+</%def>
+
+## Default body
+<body class="toolMenuPage">
+
+ <div class="toolMenu">
+ ## Feedback when search returns no results.
+ <div id="search-no-results" style="display: none; padding-top: 5px">
+ <em><strong>Search did not match any tools.</strong></em>
+ </div>
- $(document).ready(function() {
- // Init showing/hiding of tool sections.
- $( "div.toolSectionBody" ).hide();
- $( "div.toolSectionTitle > span" ).wrap( "<a href='#'></a>" )
- var last_expanded = null;
- $( "div.toolSectionTitle" ).each( function() {
- var body = $(this).next( "div.toolSectionBody" );
- $(this).click( function() {
- if ( body.is( ":hidden" ) ) {
- if ( last_expanded ) {
- last_expanded.slideUp( "fast" );
- }
- last_expanded = body;
- body.slideDown( "fast" );
- } else {
- body.slideUp( "fast" );
- last_expanded = null;
- }
- return false;
- });
- });
-
- // Log clicks on tools.
- $("div.toolTitle > a").click( function() {
- var tool_title = $(this).attr('id').split("-")[1];
- var section_title = $.trim( $(this).parents("div.toolSectionWrapper").find("div.toolSectionTitle").text() );
- var search_active = $(this).parents("div.toolTitle").hasClass("search_match");
-
- // Log action.
- galaxy_async.log_user_action("tool_menu_click." + tool_title, section_title,
- JSON.stringify({"search_active" : search_active}));
- });
-
- $( "a[minsizehint]" ).click( function() {
- if ( parent.handle_minwidth_hint ) {
- parent.handle_minwidth_hint( $(this).attr( "minsizehint" ) );
- }
- });
-
- // Init searching.
- $("#tool-search-query").click( function () {
- $(this).focus();
- $(this).select();
- }).keyup( function () {
- // Remove italics.
- $(this).css("font-style", "normal");
-
- // Don't search if the search value is < 3 chars, but clear the search if there was a previous query
- if ( this.value.length < 3 && this.lastValue && this.lastValue.length >= 3 ) {
- reset_tool_search(false);
- // Re-apply tags
- if ( current_tags.length > 0 ) {
- $.get("${h.url_for( controller='root', action='tool_search' )}", { query: "", tags: current_tags }, function (data) {
- apply_search_results(data);
- }, "json" );
- }
- // Don't update if same value as last time
- } else if ( this.value !== this.lastValue ) {
- // Add class to denote that searching is active.
- $(this).addClass("search_active");
- // input.addClass(config.loadingClass);
- // Add '*' to facilitate partial matching.
- var q = this.value + '*';
- // Stop previous ajax-request
- if (this.timer) {
- clearTimeout(this.timer);
- }
- // Start a new ajax-request in X ms
- $("#search-spinner").show();
- this.timer = setTimeout(function () {
- $.get("${h.url_for( controller='root', action='tool_search' )}", { query: q, tags: current_tags }, function (data) {
- apply_search_results(data);
- $("#search-spinner").hide();
- }, "json" );
- }, 200 );
- }
- this.lastValue = this.value;
- });
-
- // Apply stored tags
- %if trans.user and trans.user.preferences.get( 'selected_tool_tags', '' ):
- current_tags = "${trans.user.preferences['selected_tool_tags']}".split(",")
- $.get("${h.url_for( controller='root', action='tool_search' )}", { query: "", tags: current_tags }, function (data) {
- apply_search_results(data);
- }, "json" );
- $("span.tag-name").each( function() {
- for ( var i in current_tags ) {
- if ( $(this).text() == current_tags[i] ) {
- $(this).addClass("active-tag-name");
- $(this).append("<img class='delete-tag-img' src='${h.url_for('/static/images/delete_tag_icon_gray.png')}'/>")
- }
- }
- });
- %endif
- });
-
- var apply_search_results = function (data) {
- // input.removeClass(config.loadingClass);
- // Show live-search if results and search-term aren't empty
- $("#search-no-results").hide();
- // Hide all tool sections.
- $(".toolSectionWrapper").hide();
- // This hides all tools but not workflows link (which is in a .toolTitle div).
- $(".toolSectionWrapper").find(".toolTitle").hide();
- if ( data.length !== 0 ) {
- // Map tool ids to element ids and join them.
- var s = $.map( data, function( n, i ) { return ".link-" + n.toLowerCase().replace(/[^a-z0-9_]/g,'_'); } ).join( ", " );
-
- // First pass to show matching tools and their parents.
- $(s).each( function() {
- // Add class to denote match.
- $(this).parent().addClass("search_match");
- if ($(this).parents("#recently_used_wrapper").length === 0) {
- // Default behavior.
- $(this).parent().show().parent().parent().show().parent().show();
- } else if ($(this).parents(".user_pref_visible").length !== 0) {
- // RU menu is visible, so filter it as normal.
- $(this).parent().show().parent().parent().show().parent().show();
- } else {
- // RU menu is not visible, so set up classes and visibility so that if menu shown matching is
- // aleady in place.
- $(this).parent().show();
- }
- });
-
- // Hide labels that have no visible children.
- $(".toolPanelLabel").each( function() {
- var this_label = $(this);
- var next = this_label.next();
- var no_visible_tools = true;
- // Look through tools following label and, if none are visible, hide label.
- while (next.length !== 0 && next.hasClass("toolTitle")) {
- if (next.is(":visible")) {
- no_visible_tools = false;
- break;
- } else {
- next = next.next();
- }
- }
- if (no_visible_tools) {
- this_label.hide();
- }
- });
- } else {
- $("#search-no-results").show();
- }
- }
-
- // Update recently used tools menu. Function inserts a new item and removes the last item.
- function update_recently_used() {
- $.ajax({
- url: "${h.url_for( controller='/user', action='get_most_recently_used_tool_async' )}",
- dataType: 'json',
- error: function() {
- // console.log( "Failed to update recently used list." );
- },
- success: function(new_tool_info) {
- var recently_used_elts = $("#recently_used").find(".toolTitle");
- var first_elt = $(recently_used_elts.first());
- var found_in_list = false;
-
- // Look for new tool in current list. If found, rearrange list to move tool to top.
- var new_tool_info_id = new_tool_info.id.toLowerCase().replace(/[^a-z0-9_]/, "_")
- recently_used_elts.each( function(index) {
- var anchor = $(this).find("a");
- if (anchor.hasClass("link-" + new_tool_info_id)) {
- found_in_list = true;
-
- // If tool is first, do nothing.
- if (index === 0) {
- return;
- } else {
- // Tool not first; reorder.
- $(this).remove();
- first_elt.before($(this));
- }
- }
- });
-
- // If tool not in list, create new element, remove last element, and put new element first in list.
- if (!found_in_list) {
- new_tool_elt = $("<div class='toolTitle'> \
- <a class='link-" + new_tool_info.id + "' href='" + new_tool_info.link + "' target='" +
- new_tool_info.target + "' minsizehint='" + new_tool_info.minsizehint + "'>" +
- new_tool_info.name + "</a> " + new_tool_info.description + " \
- </div>");
- recently_used_elts.last().remove();
- recently_used_elts.first().before(new_tool_elt);
- }
- }
- });
-
- }
-
- var current_tags = new Array();
- function tool_tag_click(tag_name, tag_value) {
- var add = true;
- for ( var i = 0 ; i < current_tags.length ; i++ ) {
- if ( current_tags[i] == tag_name ) {
- current_tags.splice( i, 1 );
- add = false;
- }
- }
- if ( add ) {
- current_tags.push( tag_name );
- $("span.tag-name").each( function() {
- if ( $(this).text() == tag_name ) {
- $(this).addClass("active-tag-name");
- $(this).append("<img class='delete-tag-img' src='${h.url_for('/static/images/delete_tag_icon_gray.png')}'/>")
- }
- });
- } else {
- $("span.tag-name").each( function() {
- if ( $(this).text() == tag_name ) {
- $(this).removeClass("active-tag-name");
- $(this).text(tag_name);
- }
- });
- }
- if ( current_tags.length == 0 ) {
- $("#search-no-results").hide();
- $(".tool-link").each( function() {
- reset_tool_search(false);
- });
- return;
- }
- var q = $("input#tool-search-query").val();
- if ( q == "search tools" ) {
- q = "";
- } else if ( q.length > 0 ) {
- q = q + '*';
- }
- $.get("${h.url_for( controller='root', action='tool_search' )}", { query: q, tags: current_tags }, function (data) {
- apply_search_results(data);
- }, "json" );
- }
-
- </script>
- </head>
-
- <body class="toolMenuPage">
- <div class="toolMenu">
-
- ## Tool search.
- <%
- show_tool_search = True
- if trans.user:
- show_tool_search = trans.user.preferences.get( "show_tool_search", "False" ) == "True"
-
- if show_tool_search:
- display = "block"
- else:
- display = "none"
- %>
- <div id="tool-search" class="bar" style="padding-top: 5px; padding-bottom: 10px;position: relative; display: ${display};">
- %if trans.app.config.get_bool( 'enable_tool_tags', False ):
- <b>Tags:</b>
- ${render_tool_tagging_elements()}
+ ## Link to workflow management. The location of this may change, but eventually
+ ## at least some workflows will appear here (the user should be able to
+ ## configure which of their stored workflows appear in the tools menu).
+
+ %if t.user:
+ <div class="toolSectionPad"></div>
+ <div class="toolSectionPad"></div>
+ <div class="toolSectionTitle" id="title_XXinternalXXworkflow">
+ <span>Workflows</span>
+ </div>
+ <div id="XXinternalXXworkflow" class="toolSectionBody">
+ <div class="toolSectionBg">
+ %if t.user.stored_workflow_menu_entries:
+ %for m in t.user.stored_workflow_menu_entries:
+ <div class="toolTitle">
+ <a href="${h.url_for( controller='workflow', action='run', id=trans.security.encode_id(m.stored_workflow_id) )}" target="galaxy_main">${m.stored_workflow.name}</a>
+ </div>
+ %endfor
%endif
- <input type="text" name="query" value="search tools" id="tool-search-query" autocomplete="off" class="search-query parent-width" />
- <img src="${h.url_for('/static/images/loading_small_white_bg.gif')}" id="search-spinner" class="search-spinner"/>
+ <div class="toolTitle">
+ <a href="${h.url_for( controller='workflow', action='list_for_run')}" target="galaxy_main">All workflows</a>
+ </div></div>
-
- ## Recently used tools.
- %if trans.user:
- <%
- if trans.user.preferences.get( 'show_recently_used_menu', 'False' ) == 'True':
- display = "block"
- pref_class = "user_pref_visible"
- else:
- display = "none"
- pref_class = "user_pref_hidden"
- %>
- <div class="toolSectionWrapper ${pref_class}" id="recently_used_wrapper"
- style="display: ${display}; padding-bottom: 5px">
- <div class="toolSectionTitle">
- <span>Recently Used</span>
- </div>
- <div id="recently_used" class="toolSectionBody">
- <div class="toolSectionBg">
- %for tool in recent_tools:
- ${render_tool( tool, True )}
- %endfor
- </div>
- </div>
- <div class="toolSectionPad"></div>
- </div>
- %endif
-
- ## Tools.
- %for key, val in toolbox.tool_panel.items():
- <div class="toolSectionWrapper">
- %if key.startswith( 'tool' ):
- ${render_tool( val, False )}
- %elif key.startswith( 'workflow' ):
- ${render_workflow( key, val, False )}
- %elif key.startswith( 'section' ):
- <% section = val %>
- ## Render the section only if it is not empty.
- %if section.elems:
- <div class="toolSectionTitle" id="title_${section.id}">
- <span>${section.name}</span>
- </div>
- <div id="${section.id}" class="toolSectionBody">
- <div class="toolSectionBg">
- %for section_key, section_val in section.elems.items():
- %if section_key.startswith( 'tool' ):
- ${render_tool( section_val, True )}
- %elif section_key.startswith( 'workflow' ):
- ${render_workflow( section_key, section_val, True )}
- %elif section_key.startswith( 'label' ):
- ${render_label( section_val )}
- %endif
- %endfor
- </div>
- </div>
- %endif
- %elif key.startswith( 'label' ):
- ${render_label( val )}
- %endif
- <div class="toolSectionPad"></div>
- </div>
- %endfor
-
- ## Feedback when search returns no results.
- <div id="search-no-results" style="display: none; padding-top: 5px">
- <em><strong>Search did not match any tools.</strong></em>
- </div>
-
- ## Link to workflow management. The location of this may change, but eventually
- ## at least some workflows will appear here (the user should be able to
- ## configure which of their stored workflows appear in the tools menu).
-
- %if t.user:
- <div class="toolSectionPad"></div>
- <div class="toolSectionPad"></div>
- <div class="toolSectionTitle" id="title_XXinternalXXworkflow">
- <span>Workflows</span>
- </div>
- <div id="XXinternalXXworkflow" class="toolSectionBody">
- <div class="toolSectionBg">
- %if t.user.stored_workflow_menu_entries:
- %for m in t.user.stored_workflow_menu_entries:
- <div class="toolTitle">
- <a href="${h.url_for( controller='workflow', action='run', id=trans.security.encode_id(m.stored_workflow_id) )}" target="galaxy_main">${m.stored_workflow.name}</a>
- </div>
- %endfor
- %endif
- <div class="toolTitle">
- <a href="${h.url_for( controller='workflow', action='list_for_run')}" target="galaxy_main">All workflows</a>
- </div>
- </div>
- </div>
- %endif
-
</div>
- </div>
- </body>
-</html>
+ %endif
+
+ </div>
+</body>
\ No newline at end of file
https://bitbucket.org/galaxy/galaxy-central/changeset/11649eacb329/
changeset: 11649eacb329
user: jgoecks
date: 2012-04-02 00:19:34
summary: Merge
affected #: 10 files
diff -r 833332b039fb6f00fb148ddbae03721783216347 -r 11649eacb329e0f84e64d8c07d316c26182d8be1 lib/galaxy/config.py
--- a/lib/galaxy/config.py
+++ b/lib/galaxy/config.py
@@ -193,18 +193,33 @@
# Store advanced job management config
self.job_manager = kwargs.get('job_manager', self.server_name).strip()
self.job_handlers = [ x.strip() for x in kwargs.get('job_handlers', self.server_name).split(',') ]
+ self.default_job_handlers = [ x.strip() for x in kwargs.get('default_job_handlers', ','.join( self.job_handlers ) ).split(',') ]
# Use database for IPC unless this is a standalone server (or multiple servers doing self dispatching in memory)
self.track_jobs_in_database = True
if ( len( self.job_handlers ) == 1 ) and ( self.job_handlers[0] == self.server_name ) and ( self.job_manager == self.server_name ):
self.track_jobs_in_database = False
# Store per-tool runner configs
+ self.tool_handlers = self.__read_tool_job_config( global_conf_parser, 'galaxy:tool_handlers', 'name' )
+ self.tool_runners = self.__read_tool_job_config( global_conf_parser, 'galaxy:tool_runners', 'url' )
+ self.datatypes_config = kwargs.get( 'datatypes_config_file', 'datatypes_conf.xml' )
+ # Cloud configuration options
+ self.enable_cloud_launch = string_as_bool( kwargs.get( 'enable_cloud_launch', False ) )
+ # Galaxy messaging (AMQP) configuration options
+ self.amqp = {}
try:
- tool_runners_config = global_conf_parser.items("galaxy:tool_runners")
+ amqp_config = global_conf_parser.items("galaxy_amqp")
+ except ConfigParser.NoSectionError:
+ amqp_config = {}
+ for k, v in amqp_config:
+ self.amqp[k] = v
+ def __read_tool_job_config( self, global_conf_parser, section, key ):
+ try:
+ tool_runners_config = global_conf_parser.items( section )
# Process config to group multiple configs for the same tool.
- tool_runners = {}
+ rval = {}
for entry in tool_runners_config:
- tool_config, url = entry
+ tool_config, val = entry
tool = None
runner_dict = {}
if tool_config.find("[") != -1:
@@ -219,29 +234,18 @@
tool = tool_config
# Add runner URL.
- runner_dict[ 'url' ] = url
+ runner_dict[ key ] = val
# Create tool entry if necessary.
- if tool not in tool_runners:
- tool_runners[ tool ] = []
+ if tool not in rval:
+ rval[ tool ] = []
# Add entry to runners.
- tool_runners[ tool ].append( runner_dict )
+ rval[ tool ].append( runner_dict )
- self.tool_runners = tool_runners
+ return rval
except ConfigParser.NoSectionError:
- self.tool_runners = []
- self.datatypes_config = kwargs.get( 'datatypes_config_file', 'datatypes_conf.xml' )
- # Cloud configuration options
- self.enable_cloud_launch = string_as_bool( kwargs.get( 'enable_cloud_launch', False ) )
- # Galaxy messaging (AMQP) configuration options
- self.amqp = {}
- try:
- amqp_config = global_conf_parser.items("galaxy_amqp")
- except ConfigParser.NoSectionError:
- amqp_config = {}
- for k, v in amqp_config:
- self.amqp[k] = v
+ return []
def get( self, key, default ):
return self.config_dict.get( key, default )
def get_bool( self, key, default ):
diff -r 833332b039fb6f00fb148ddbae03721783216347 -r 11649eacb329e0f84e64d8c07d316c26182d8be1 lib/galaxy/jobs/manager.py
--- a/lib/galaxy/jobs/manager.py
+++ b/lib/galaxy/jobs/manager.py
@@ -144,7 +144,7 @@
pass
for job in jobs_to_check:
- job.handler = self.__select_handler( job )
+ job.handler = self.__get_handler( job )
log.debug( "(%s) Job assigned to handler '%s'" % ( job.id, job.handler ) )
self.sa_session.add( job )
@@ -157,9 +157,15 @@
for job in jobs_to_check:
self.job_handler.job_queue.put( job.id, job.tool_id )
- def __select_handler( self, job ):
- # TODO: handler selection based on params, tool, etc.
- return random.choice( self.app.config.job_handlers )
+ def __get_handler( self, job ):
+ try:
+ params = None
+ if job.params:
+ params = from_json_string( job.params )
+ return self.app.toolbox.tools_by_id.get( job.tool_id, None ).get_job_handler( params )
+ except:
+ log.exception( "(%s) Caught exception attempting to get tool-specific job handler for tool '%s', selecting at random from available handlers instead:" % ( job.id, job.tool_id ) )
+ return random.choice( self.app.config.job_handlers )
def put( self, job_id, tool ):
"""Add a job to the queue (by job identifier)"""
diff -r 833332b039fb6f00fb148ddbae03721783216347 -r 11649eacb329e0f84e64d8c07d316c26182d8be1 lib/galaxy/tools/__init__.py
--- a/lib/galaxy/tools/__init__.py
+++ b/lib/galaxy/tools/__init__.py
@@ -5,7 +5,7 @@
pkg_resources.require( "simplejson" )
-import logging, os, string, sys, tempfile, glob, shutil, types, urllib, subprocess
+import logging, os, string, sys, tempfile, glob, shutil, types, urllib, subprocess, random
import simplejson
import binascii
from UserDict import DictMixin
@@ -115,17 +115,17 @@
tool_path = self.tool_root_dir
# Only load the panel_dict under certain conditions.
load_panel_dict = not self.integrated_tool_panel_config_has_contents
- for elem in root:
+ for index, elem in enumerate( root ):
if parsing_shed_tool_conf:
config_elems.append( elem )
if elem.tag == 'tool':
- self.load_tool_tag_set( elem, self.tool_panel, self.integrated_tool_panel, tool_path, load_panel_dict, guid=elem.get( 'guid' ) )
+ self.load_tool_tag_set( elem, self.tool_panel, self.integrated_tool_panel, tool_path, load_panel_dict, guid=elem.get( 'guid' ), index=index )
elif elem.tag == 'workflow':
- self.load_workflow_tag_set( elem, self.tool_panel, self.integrated_tool_panel, load_panel_dict )
+ self.load_workflow_tag_set( elem, self.tool_panel, self.integrated_tool_panel, load_panel_dict, index=index )
elif elem.tag == 'section':
- self.load_section_tag_set( elem, tool_path, load_panel_dict )
+ self.load_section_tag_set( elem, tool_path, load_panel_dict, index=index )
elif elem.tag == 'label':
- self.load_label_tag_set( elem, self.tool_panel, self.integrated_tool_panel, load_panel_dict )
+ self.load_label_tag_set( elem, self.tool_panel, self.integrated_tool_panel, load_panel_dict, index=index )
if parsing_shed_tool_conf:
shed_tool_conf_dict = dict( config_filename=config_filename,
tool_path=tool_path,
@@ -287,7 +287,7 @@
self.app.model.ToolShedRepository.table.c.owner == owner,
self.app.model.ToolShedRepository.table.c.installed_changeset_revision == installed_changeset_revision ) ) \
.first()
- def load_tool_tag_set( self, elem, panel_dict, integrated_panel_dict, tool_path, load_panel_dict, guid=None ):
+ def load_tool_tag_set( self, elem, panel_dict, integrated_panel_dict, tool_path, load_panel_dict, guid=None, index=None ):
try:
path = elem.get( "file" )
if guid is None:
@@ -355,10 +355,13 @@
if load_panel_dict:
panel_dict[ key ] = tool
# Always load the tool into the integrated_panel_dict, or it will not be included in the integrated_tool_panel.xml file.
- integrated_panel_dict[ key ] = tool
+ if key in integrated_panel_dict or index is None:
+ integrated_panel_dict[ key ] = tool
+ else:
+ integrated_panel_dict.insert( index, key, tool )
except:
log.exception( "Error reading tool from path: %s" % path )
- def load_workflow_tag_set( self, elem, panel_dict, integrated_panel_dict, load_panel_dict ):
+ def load_workflow_tag_set( self, elem, panel_dict, integrated_panel_dict, load_panel_dict, index=None ):
try:
# TODO: should id be encoded?
workflow_id = elem.get( 'id' )
@@ -368,16 +371,22 @@
if load_panel_dict:
panel_dict[ key ] = workflow
# Always load workflows into the integrated_panel_dict.
- integrated_panel_dict[ key ] = workflow
+ if key in integrated_panel_dict or index is None:
+ integrated_panel_dict[ key ] = workflow
+ else:
+ integrated_panel_dict.insert( index, key, workflow )
except:
log.exception( "Error loading workflow: %s" % workflow_id )
- def load_label_tag_set( self, elem, panel_dict, integrated_panel_dict, load_panel_dict ):
+ def load_label_tag_set( self, elem, panel_dict, integrated_panel_dict, load_panel_dict, index=None ):
label = ToolSectionLabel( elem )
key = 'label_' + label.id
if load_panel_dict:
panel_dict[ key ] = label
- integrated_panel_dict[ key ] = label
- def load_section_tag_set( self, elem, tool_path, load_panel_dict ):
+ if key in integrated_panel_dict or index is None:
+ integrated_panel_dict[ key ] = label
+ else:
+ integrated_panel_dict.insert( index, key, label )
+ def load_section_tag_set( self, elem, tool_path, load_panel_dict, index=None ):
key = 'section_' + elem.get( "id" )
if key in self.tool_panel:
section = self.tool_panel[ key ]
@@ -391,17 +400,20 @@
else:
integrated_section = ToolSection( elem )
integrated_elems = integrated_section.elems
- for sub_elem in elem:
+ for sub_index, sub_elem in enumerate( elem ):
if sub_elem.tag == 'tool':
- self.load_tool_tag_set( sub_elem, elems, integrated_elems, tool_path, load_panel_dict, guid=sub_elem.get( 'guid' ) )
+ self.load_tool_tag_set( sub_elem, elems, integrated_elems, tool_path, load_panel_dict, guid=sub_elem.get( 'guid' ), index=sub_index )
elif sub_elem.tag == 'workflow':
- self.load_workflow_tag_set( sub_elem, elems, integrated_elems, load_panel_dict )
+ self.load_workflow_tag_set( sub_elem, elems, integrated_elems, load_panel_dict, index=sub_index )
elif sub_elem.tag == 'label':
- self.load_label_tag_set( sub_elem, elems, integrated_elems, load_panel_dict )
+ self.load_label_tag_set( sub_elem, elems, integrated_elems, load_panel_dict, index=sub_index )
if load_panel_dict:
self.tool_panel[ key ] = section
# Always load sections into the integrated_tool_panel.
- self.integrated_tool_panel[ key ] = integrated_section
+ if key in self.integrated_tool_panel or index is None:
+ self.integrated_tool_panel[ key ] = integrated_section
+ else:
+ self.integrated_tool_panel.insert( index, key, integrated_section )
def load_tool( self, config_file, guid=None ):
"""Load a single tool from the file named by `config_file` and return an instance of `Tool`."""
# Parse XML configuration file and get the root element
@@ -683,31 +695,35 @@
if tool_version:
return tool_version.get_version_ids( self.app )
return []
- def get_job_runner( self, job_params=None ):
- # Look through runners to find one with matching parameters.
- selected_runner = None
- if len( self.job_runners ) == 1:
- # Most tools have a single runner.
- selected_runner = self.job_runners[0]
+ def __get_job_run_config( self, run_configs, key, job_params=None ):
+ # Look through runners/handlers to find one with matching parameters.
+ available_configs = []
+ if len( run_configs ) == 1:
+ # Most tools have a single config.
+ return run_configs[0][ key ] # return to avoid random when this will be the case most of the time
elif job_params is None:
- # Use job runner with no params
- for runner in self.job_runners:
- if "params" not in runner:
- selected_runner = runner
+ # Use job config with no params
+ for config in run_configs:
+ if "params" not in config:
+ available_configs.append( config )
else:
- # Find runner with matching parameters.
- for runner in self.job_runners:
- if "params" in runner:
+ # Find config with matching parameters.
+ for config in run_configs:
+ if "params" in config:
match = True
- runner_params = runner[ "params" ]
+ config_params = config[ "params" ]
for param, value in job_params.items():
- if param not in runner_params or \
- runner_params[ param ] != job_params[ param ]:
+ if param not in config_params or \
+ config_params[ param ] != job_params[ param ]:
match = False
break
if match:
- selected_runner = runner
- return selected_runner[ "url" ]
+ available_configs.append( config )
+ return random.choice( available_configs )[ key ]
+ def get_job_runner( self, job_params=None ):
+ return self.__get_job_run_config( self.job_runners, key='url', job_params=job_params )
+ def get_job_handler( self, job_params=None ):
+ return self.__get_job_run_config( self.job_handlers, key='name', job_params=job_params )
def parse( self, root, guid=None ):
"""
Read tool configuration from the element `root` and fill in `self`.
@@ -774,6 +790,12 @@
self.parallelism = ToolParallelismInfo(parallelism)
else:
self.parallelism = None
+ # Set job handler(s). Each handler is a dict with 'url' and, optionally, 'params'.
+ self_id = self.id.lower()
+ self.job_handlers = [ { "name" : name } for name in self.app.config.default_job_handlers ]
+ # Set custom handler(s) if they're defined.
+ if self_id in self.app.config.tool_handlers:
+ self.job_handlers = self.app.config.tool_handlers[ self_id ]
# Set job runner(s). Each runner is a dict with 'url' and, optionally, 'params'.
if self.app.config.start_job_runners is None:
# Jobs are always local regardless of tool config if no additional
@@ -783,7 +805,6 @@
# Set job runner to the cluster default
self.job_runners = [ { "url" : self.app.config.default_cluster_job_runner } ]
# Set custom runner(s) if they're defined.
- self_id = self.id.lower()
if self_id in self.app.config.tool_runners:
self.job_runners = self.app.config.tool_runners[ self_id ]
# Is this a 'hidden' tool (hidden in tool menu)
diff -r 833332b039fb6f00fb148ddbae03721783216347 -r 11649eacb329e0f84e64d8c07d316c26182d8be1 lib/galaxy/util/odict.py
--- a/lib/galaxy/util/odict.py
+++ b/lib/galaxy/util/odict.py
@@ -11,23 +11,22 @@
This dictionary class extends UserDict to record the order in which items are
added. Calling keys(), values(), items(), etc. will return results in this
order.
+ """
+ def __init__( self, dict = None ):
+ self._keys = []
+ UserDict.__init__( self, dict )
- I've added iterkeys, itervalues, iteritems
- """
- def __init__(self, dict = None):
- self._keys = []
- UserDict.__init__(self, dict)
+ def __delitem__( self, key ):
+ UserDict.__delitem__( self, key )
+ self._keys.remove( key )
- def __delitem__(self, key):
- UserDict.__delitem__(self, key)
- self._keys.remove(key)
+ def __setitem__( self, key, item ):
+ UserDict.__setitem__( self, key, item )
+ if key not in self._keys:
+ self._keys.append( key )
- def __setitem__(self, key, item):
- UserDict.__setitem__(self, key, item)
- if key not in self._keys: self._keys.append(key)
-
- def clear(self):
- UserDict.clear(self)
+ def clear( self ):
+ UserDict.clear( self )
self._keys = []
def copy(self):
@@ -35,49 +34,43 @@
new.update( self )
return new
- def items(self):
- return zip(self._keys, self.values())
+ def items( self ):
+ return zip( self._keys, self.values() )
- def keys(self):
+ def keys( self ):
return self._keys[:]
- def popitem(self):
+ def popitem( self ):
try:
key = self._keys[-1]
except IndexError:
- raise KeyError('dictionary is empty')
+ raise KeyError( 'dictionary is empty' )
+ val = self[ key ]
+ del self[ key ]
+ return ( key, val )
- val = self[key]
- del self[key]
+ def setdefault( self, key, failobj=None ):
+ if key not in self._keys:
+ self._keys.append( key )
+ return UserDict.setdefault( self, key, failobj )
- return (key, val)
+ def update( self, dict ):
+ for ( key, val ) in dict.items():
+ self.__setitem__( key, val )
- def setdefault(self, key, failobj = None):
- if key not in self._keys: self._keys.append(key)
- return UserDict.setdefault(self, key, failobj)
+ def values( self ):
+ return map( self.get, self._keys )
- def update(self, dict):
- UserDict.update(self, dict)
- for key in dict.keys():
- if key not in self._keys: self._keys.append(key)
-
- def update(self, dict):
- for (key,val) in dict.items():
- self.__setitem__(key,val)
-
- def values(self):
- return map(self.get, self._keys)
-
- def iterkeys(self):
+ def iterkeys( self ):
return iter( self._keys )
- def itervalues(self):
+ def itervalues( self ):
for key in self._keys:
- yield self.get(key)
+ yield self.get( key )
- def iteritems(self):
+ def iteritems( self ):
for key in self._keys:
- yield key, self.get(key)
+ yield key, self.get( key )
def __iter__( self ):
for key in self._keys:
@@ -86,3 +79,7 @@
def reverse( self ):
self._keys.reverse()
+ def insert( self, index, key, item ):
+ if key not in self._keys:
+ self._keys.insert( index, key )
+ UserDict.__setitem__( self, key, item )
diff -r 833332b039fb6f00fb148ddbae03721783216347 -r 11649eacb329e0f84e64d8c07d316c26182d8be1 lib/galaxy/webapps/community/config.py
--- a/lib/galaxy/webapps/community/config.py
+++ b/lib/galaxy/webapps/community/config.py
@@ -84,6 +84,11 @@
self.screencasts_url = kwargs.get( 'screencasts_url', None )
self.log_events = False
self.cloud_controller_instance = False
+ self.server_name = ''
+ self.job_manager = ''
+ self.job_handlers = []
+ self.tool_handlers = []
+ self.tool_runners = []
# Proxy features
self.apache_xsendfile = kwargs.get( 'apache_xsendfile', False )
self.nginx_x_accel_redirect_base = kwargs.get( 'nginx_x_accel_redirect_base', False )
diff -r 833332b039fb6f00fb148ddbae03721783216347 -r 11649eacb329e0f84e64d8c07d316c26182d8be1 run_functional_tests.sh
--- a/run_functional_tests.sh
+++ b/run_functional_tests.sh
@@ -20,23 +20,23 @@
echo "==========================================================================================================================================="
echo "'run_functional_tests.sh -id bbb' for testing one tool with id 'bbb' ('bbb' is the tool id)"
echo "'run_functional_tests.sh -sid ccc' for testing one section with sid 'ccc' ('ccc' is the string after 'section::')"
-elif [ $1 = '--migrated' ]; then
+elif [ $1 = '-migrated' ]; then
if [ ! $2 ]; then
- python ./scripts/functional_tests.py -v functional.test_toolbox --with-nosehtml --html-report-file run_functional_tests.html --migrated
+ python ./scripts/functional_tests.py -v functional.test_toolbox --with-nosehtml --html-report-file run_functional_tests.html -migrated
elif [ $2 = '-id' ]; then
# TODO: This option is not tested...
- python ./scripts/functional_tests.py -v functional.test_toolbox:TestForTool_$3 --with-nosehtml --html-report-file run_functional_tests.html --migrated
+ python ./scripts/functional_tests.py -v functional.test_toolbox:TestForTool_$3 --with-nosehtml --html-report-file run_functional_tests.html -migrated
else
- python ./scripts/functional_tests.py -v functional.test_toolbox --with-nosehtml --html-report-file run_functional_tests.html --migrated
+ python ./scripts/functional_tests.py -v functional.test_toolbox --with-nosehtml --html-report-file run_functional_tests.html -migrated
fi
-elif [ $1 = '--installed' ]; then
+elif [ $1 = '-installed' ]; then
if [ ! $2 ]; then
- python ./scripts/functional_tests.py -v functional.test_toolbox --with-nosehtml --html-report-file run_functional_tests.html --installed
+ python ./scripts/functional_tests.py -v functional.test_toolbox --with-nosehtml --html-report-file run_functional_tests.html -installed
elif [ $2 = '-id' ]; then
# TODO: This option is not tested...
- python ./scripts/functional_tests.py -v functional.test_toolbox:TestForTool_$3 --with-nosehtml --html-report-file run_functional_tests.html --installed
+ python ./scripts/functional_tests.py -v functional.test_toolbox:TestForTool_$3 --with-nosehtml --html-report-file run_functional_tests.html -installed
else
- python ./scripts/functional_tests.py -v functional.test_toolbox --with-nosehtml --html-report-file run_functional_tests.html --installed
+ python ./scripts/functional_tests.py -v functional.test_toolbox --with-nosehtml --html-report-file run_functional_tests.html -installed
fi
else
python ./scripts/functional_tests.py -v --with-nosehtml --html-report-file run_functional_tests.html $1
diff -r 833332b039fb6f00fb148ddbae03721783216347 -r 11649eacb329e0f84e64d8c07d316c26182d8be1 scripts/functional_tests.py
--- a/scripts/functional_tests.py
+++ b/scripts/functional_tests.py
@@ -135,8 +135,8 @@
tool_path = os.environ.get( 'GALAXY_TEST_TOOL_PATH', 'tools' )
if 'HTTP_ACCEPT_LANGUAGE' not in os.environ:
os.environ[ 'HTTP_ACCEPT_LANGUAGE' ] = default_galaxy_locales
- testing_migrated_tools = '--migrated' in sys.argv
- testing_installed_tools = '--installed' in sys.argv
+ testing_migrated_tools = '-migrated' in sys.argv
+ testing_installed_tools = '-installed' in sys.argv
if testing_migrated_tools or testing_installed_tools:
sys.argv.pop()
diff -r 833332b039fb6f00fb148ddbae03721783216347 -r 11649eacb329e0f84e64d8c07d316c26182d8be1 scripts/galaxy_messaging/server/data_transfer.py
--- a/scripts/galaxy_messaging/server/data_transfer.py
+++ b/scripts/galaxy_messaging/server/data_transfer.py
@@ -177,7 +177,7 @@
self.update_status( SampleDataset.transfer_status.ADD_TO_LIBRARY )
try:
data = {}
- data[ 'folder_id' ] = api.encode_id( self.config_id_secret, '%s.%s' % ( 'folder', self.folder_id ) )
+ data[ 'folder_id' ] = 'F%s' % api.encode_id( self.config_id_secret, self.folder_id )
data[ 'file_type' ] = 'auto'
data[ 'server_dir' ] = self.server_dir
data[ 'dbkey' ] = ''
diff -r 833332b039fb6f00fb148ddbae03721783216347 -r 11649eacb329e0f84e64d8c07d316c26182d8be1 tools/genomespace/genomespace_exporter.py
--- a/tools/genomespace/genomespace_exporter.py
+++ b/tools/genomespace/genomespace_exporter.py
@@ -49,7 +49,8 @@
def get_directory( url_opener, dm_url, path ):
url = dm_url
- for sub_path in path:
+ i = None
+ for i, sub_path in enumerate( path ):
url = "%s/%s" % ( url, sub_path )
dir_request = urllib2.Request( url, headers = { 'Content-Type': 'application/json', 'Accept': 'application/json' } )
dir_request.get_method = lambda: 'GET'
@@ -59,10 +60,14 @@
#print "e", e, url #punting, assuming lack of permisions at this low of a level...
continue
break
- return dir_dict
+ if i is not None:
+ path = path[i+1:]
+ else:
+ path = []
+ return ( dir_dict, path )
def get_default_directory( url_opener, dm_url ):
- return get_directory( url_opener, dm_url, ["defaultdirectory"] )
+ return get_directory( url_opener, dm_url, ["defaultdirectory"] )[0]
def create_directory( url_opener, directory_dict, new_dir, dm_url ):
payload = { "isDirectory": True }
@@ -142,8 +147,8 @@
dm_url = genomespace_site_dict['dmServer']
#get default directory
if target_directory and target_directory[0] == '/':
- directory_dict = get_directory( url_opener, dm_url, [ "%s/%s/%s" % ( GENOMESPACE_API_VERSION_STRING, 'file', target_directory[1] ) ] + target_directory[2:] )['directory']
- target_directory.pop(0)
+ directory_dict, target_directory = get_directory( url_opener, dm_url, [ "%s/%s/%s" % ( GENOMESPACE_API_VERSION_STRING, 'file', target_directory[1] ) ] + target_directory[2:] )
+ directory_dict = directory_dict['directory']
else:
directory_dict = get_default_directory( url_opener, dm_url )['directory']
#what directory to stuff this in
diff -r 833332b039fb6f00fb148ddbae03721783216347 -r 11649eacb329e0f84e64d8c07d316c26182d8be1 universe_wsgi.ini.sample
--- a/universe_wsgi.ini.sample
+++ b/universe_wsgi.ini.sample
@@ -562,6 +562,13 @@
# comma-separated list.
#job_handlers = main
+# By default, a handler from job_handlers will be selected at random if the
+# tool to run does specify a handler below in [galaxy:tool_handlers]. If you
+# want certain handlers to only handle jobs for tools/params explicitly
+# assigned below, use default_job_handlers to specify which handlers should be
+# used for jobs without explicit handlers.
+#default_job_handlers = main
+
# This enables splitting of jobs into tasks, if specified by the particular tool config.
# This is a new feature and not recommended for production servers yet.
#use_tasked_jobs = False
@@ -664,18 +671,30 @@
#pbs_stage_path =
#pbs_dataset_server =
-# ---- Tool Job Runners -----------------------------------------------------
+# ---- Per-Tool Job Management ----------------------------------------------
-# Individual per-tool job runner overrides. Parameters can be included to define
-# multiple runners per tool. E.g. to run Cufflinks jobs initiated from Trackster
+# Per-tool job handler and runner overrides. Parameters can be included to define multiple
+# runners per tool. E.g. to run Cufflinks jobs initiated from Trackster
# differently than standard Cufflinks jobs:
-# cufflinks = local:///
-# cufflinks[source@trackster] = local:///
-# If not listed here, a tool will run with the runner defined with
-# default_cluster_job_runner.
+#
+# cufflinks = local:///
+# cufflinks[source@trackster] = local:///
+
+[galaxy:tool_handlers]
+
+# By default, Galaxy will select a handler at random from the list of
+# job_handlers set above. You can override as in the following examples:
+#
+#upload1 = upload_handler
+#cufflinks[source@trackster] = realtime_handler
[galaxy:tool_runners]
+# If not listed here, a tool will run with the runner defined with
+# default_cluster_job_runner. These overrides for local:/// are done because
+# these tools can fetch data from remote sites, which may not be suitable to
+# run on a cluster (if it does not have access to the Internet, for example).
+
biomart = local:///
encode_db1 = local:///
hbvar = local:///
https://bitbucket.org/galaxy/galaxy-central/changeset/24501c5f17a6/
changeset: 24501c5f17a6
user: jgoecks
date: 2012-04-02 16:19:10
summary: Modify pack_scripts.py to (a) pack entire directory trees and (b) only pack if necessary. Add packed versions of many scripts.
affected #: 12 files
diff -r 11649eacb329e0f84e64d8c07d316c26182d8be1 -r 24501c5f17a6310f39f5e3a5d55f8a00d7a67213 static/scripts/pack_scripts.py
--- a/static/scripts/pack_scripts.py
+++ b/static/scripts/pack_scripts.py
@@ -1,6 +1,6 @@
#!/usr/bin/env python
-import sys
+import sys, os
from glob import glob
from subprocess import call
@@ -16,16 +16,52 @@
# If specific scripts specified on command line, just pack them, otherwise pack
# all.
+def recursive_glob( pattern, excluded_dirs ):
+ """
+ Returns all items that match pattern in root and subdirectories.
+ """
+
+ a_dir, a_pattern = path.split( pattern )
+
+ # Skip excluded dirs.
+ if a_dir in excluded_dirs:
+ return []
+
+ # Search current dir.
+ # print a_dir, a_pattern
+ rval = glob( pattern )
+ for item in glob( path.join( a_dir, "*" ) ):
+ if path.isdir( item ):
+ rval.extend( recursive_glob( path.join( item, a_pattern ), excluded_dirs ) )
+
+ return rval
+
+# Get files to pack.
if len( sys.argv ) > 1:
to_pack = sys.argv[1:]
else:
- to_pack = glob( "*.js" )
- to_pack.extend( glob( "libs/*.js" ) )
+ to_pack = recursive_glob( "*.js", [ "packed" ] )
+
for fname in to_pack:
d = dict( fname=fname )
+ packed_fname = path.join( 'packed', fname )
+
+ # Only copy if full version is newer than packed version.
+ if path.exists( packed_fname ) and ( path.getmtime( fname ) < path.getmtime( packed_fname ) ):
+ print "Packed is current: %s" % fname
+ continue
+
print "%(fname)s --> packed/%(fname)s" % d
+
+ # Create destination dir if necessary.
+ dir, name = os.path.split( packed_fname )
+ if not path.exists( dir ):
+ print "Creating needed directory %s" % dir
+ os.makedirs( dir )
+
+ # Copy/pack.
if fname in do_not_pack:
- copyfile( fname, path.join( 'packed', fname ) )
+ copyfile( fname, path.join( packed_fname ) )
else:
out = call( cmd % d, shell=True )
diff -r 11649eacb329e0f84e64d8c07d316c26182d8be1 -r 24501c5f17a6310f39f5e3a5d55f8a00d7a67213 static/scripts/packed/backbone/tools.js
--- /dev/null
+++ b/static/scripts/packed/backbone/tools.js
@@ -0,0 +1,1 @@
+var BaseModel=Backbone.Model.extend({defaults:{id:null,name:null,hidden:false},show:function(){this.set("hidden",false)},hide:function(){this.set("hidden",true)},is_visible:function(){return !this.attributes.hidden}});var Tool=BaseModel.extend({defaults:{description:null,target:null,params:[]},apply_search_results:function(a){(_.indexOf(a,this.attributes.id)!==-1?this.show():this.hide());return this.is_visible()}});var ToolPanelLabel=BaseModel.extend({});var ToolPanelSection=BaseModel.extend({defaults:{elems:[],open:false},clear_search_results:function(){_.each(this.attributes.elems,function(a){a.show()});this.show();this.set("open",false)},apply_search_results:function(b){var c=true,a;_.each(this.attributes.elems,function(d){if(d instanceof ToolPanelLabel){a=d;a.hide()}else{if(d instanceof Tool){if(d.apply_search_results(b)){c=false;if(a){a.show()}}}}});if(c){this.hide()}else{this.show();this.set("open",true)}}});var ToolSearch=Backbone.Model.extend({defaults:{spinner_url:"",search_url:"",visible:true,query:"",results:null},initialize:function(){this.on("change:query",this.do_search)},do_search:function(){var c=this.attributes.query;if(c.length<3){this.set("results",null);return}var b=c+"*";if(this.timer){clearTimeout(this.timer)}$("#search-spinner").show();var a=this;this.timer=setTimeout(function(){$.get(a.attributes.search_url,{query:b},function(d){a.set("results",d);$("#search-spinner").hide()},"json")},200)}});var ToolPanel=Backbone.Collection.extend({url:"/tools",parse:function(a){var b=function(e){var d=e.type;if(d==="tool"){return new Tool(e)}else{if(d==="section"){var c=_.map(e.elems,b);e.elems=c;return new ToolPanelSection(e)}else{if(d==="label"){return new ToolPanelLabel(e)}}}};return _.map(a,b)},initialize:function(a){this.tool_search=a.tool_search;this.tool_search.on("change:results",this.apply_search_results,this)},clear_search_results:function(){this.each(function(a){a.clear_search_results()})},apply_search_results:function(){var a=this.tool_search.attributes.results;if(a===null){this.clear_search_results();return}this.each(function(b){b.apply_search_results(a)})}});var ToolLinkView=Backbone.View.extend({tagName:"div",template:Handlebars.templates.tool_link,initialize:function(){this.model.on("change:hidden",this.update_visible,this)},render:function(){this.$el.append(this.template(this.model.toJSON()));return this},update_visible:function(){(this.model.attributes.hidden?this.$el.hide():this.$el.show())}});var ToolPanelLabelView=Backbone.View.extend({tagName:"div",className:"toolPanelLabel",template:Handlebars.templates.panel_label,initialize:function(){this.model.on("change:hidden",this.update_visible,this)},render:function(){this.$el.append(this.template(this.model.toJSON()));return this},update_visible:function(){(this.model.attributes.hidden?this.$el.hide():this.$el.show())}});var ToolPanelSectionView=Backbone.View.extend({tagName:"div",className:"toolSectionWrapper",template:Handlebars.templates.panel_section,initialize:function(){this.model.on("change:hidden",this.update_visible,this);this.model.on("change:open",this.update_open,this)},render:function(){this.$el.append(this.template(this.model.toJSON()));var a=this.$el.find(".toolSectionBody");_.each(this.model.attributes.elems,function(b){if(b instanceof Tool){var c=new ToolLinkView({model:b,className:"toolTitle"});c.render();a.append(c.$el)}else{if(b instanceof ToolPanelLabel){var d=new ToolPanelLabelView({model:b});d.render();a.append(d.$el)}else{}}});return this},events:{"click .toolSectionTitle > a":"toggle"},toggle:function(){this.$el.children(".toolSectionBody").toggle("fast");this.model.set("open",!this.model.attributes.open)},update_open:function(){(this.model.attributes.open?this.$el.children(".toolSectionBody").show("fast"):this.$el.children(".toolSectionBody").hide("fast"))},update_visible:function(){(this.model.attributes.hidden?this.$el.hide():this.$el.show())}});var ToolSearchView=Backbone.View.extend({tagName:"div",id:"tool-search",className:"bar",template:Handlebars.templates.tool_search,events:{click:"focus_and_select","keyup :input":"query_changed"},render:function(){this.$el.append(this.template(this.model.toJSON()));return this},focus_and_select:function(){this.$el.find(":input").focus().select()},query_changed:function(){this.model.set("query",this.$el.find(":input").val())}});var ToolPanelView=Backbone.View.extend({tagName:"div",className:"toolMenu",initialize:function(b){this.collection.tool_search.on("change:results",this.handle_search_results,this);var a=this;this.collection.bind("reset",function(){a.render()})},render:function(){var b=this.$el;var a=new ToolSearchView({model:this.collection.tool_search});a.render();b.append(a.$el);this.collection.each(function(d){if(d instanceof ToolPanelSection){var c=new ToolPanelSectionView({model:d});c.render();b.append(c.$el)}else{if(d instanceof Tool){var e=new ToolLinkView({model:elt,className:"toolTitleNoSection"});e.render();b.append(e.$el)}}});return this},handle_search_results:function(){var a=this.collection.tool_search.attributes.results;if(a&&a.length===0){$("#search-no-results").show()}else{$("#search-no-results").hide()}}});
\ No newline at end of file
diff -r 11649eacb329e0f84e64d8c07d316c26182d8be1 -r 24501c5f17a6310f39f5e3a5d55f8a00d7a67213 static/scripts/packed/galaxy.base.js
--- a/static/scripts/packed/galaxy.base.js
+++ b/static/scripts/packed/galaxy.base.js
@@ -1,1 +1,1 @@
-if(!Array.indexOf){Array.prototype.indexOf=function(c){for(var b=0,a=this.length;b<a;b++){if(this[b]==c){return b}}return -1}}function obj_length(c){if(c.length!==undefined){return c.length}var b=0;for(var a in c){b++}return b}$.fn.makeAbsolute=function(a){return this.each(function(){var b=$(this);var c=b.position();b.css({position:"absolute",marginLeft:0,marginTop:0,top:c.top,left:c.left,right:$(window).width()-(c.left+b.width())});if(a){b.remove().appendTo("body")}})};function make_popupmenu(b,c){var a=(b.data("menu_options"));b.data("menu_options",c);if(a){return}b.bind("click.show_popup",function(d){$(".popmenu-wrapper").remove();setTimeout(function(){var g=$("<ul class='dropdown-menu' id='"+b.attr("id")+"-menu'></ul>");var f=b.data("menu_options");if(obj_length(f)<=0){$("<li>No Options.</li>").appendTo(g)}$.each(f,function(j,i){if(i){g.append($("<li></li>").append($("<a href='#'></a>").html(j).click(i)))}else{g.append($("<li></li>").addClass("head").append($("<a href='#'></a>").html(j)))}});var h=$("<div class='popmenu-wrapper' style='position: absolute;left: 0; top: -1000;'></div>").append(g).appendTo("body");var e=d.pageX-h.width()/2;e=Math.min(e,$(document).scrollLeft()+$(window).width()-$(h).width()-5);e=Math.max(e,$(document).scrollLeft()+5);h.css({top:d.pageY,left:e})},10);setTimeout(function(){var f=function(h){$(h).bind("click.close_popup",function(){$(".popmenu-wrapper").remove();h.unbind("click.close_popup")})};f($(window.document));f($(window.top.document));for(var e=window.top.frames.length;e--;){var g=$(window.top.frames[e].document);f(g)}},50);return false})}function make_popup_menus(){jQuery("div[popupmenu]").each(function(){var a={};var c=$(this);c.find("a").each(function(){var f=$(this),h=f.get(0);var d=h.getAttribute("confirm"),e=h.getAttribute("href"),g=h.getAttribute("target");if(!e){a[f.text()]=null}else{a[f.text()]=function(){if(!d||confirm(d)){var i;if(g=="_parent"){window.parent.location=e}else{if(g=="_top"){window.top.location=e}else{if(g=="demo"){if(i==undefined||i.closed){i=window.open(e,g);i.creator=self}}else{window.location=e}}}}}}});var b=$("#"+c.attr("popupmenu"));b.find("a").bind("click",function(d){d.stopPropagation();return true});make_popupmenu(b,a);b.addClass("popup");c.remove()})}function naturalSort(j,h){var p=/(-?[0-9\.]+)/g,k=j.toString().toLowerCase()||"",g=h.toString().toLowerCase()||"",l=String.fromCharCode(0),n=k.replace(p,l+"$1"+l).split(l),e=g.replace(p,l+"$1"+l).split(l),d=(new Date(k)).getTime(),o=d?(new Date(g)).getTime():null;if(o){if(d<o){return -1}else{if(d>o){return 1}}}var m,f;for(var i=0,c=Math.max(n.length,e.length);i<c;i++){m=parseFloat(n[i])||n[i];f=parseFloat(e[i])||e[i];if(m<f){return -1}else{if(m>f){return 1}}}return 0}function replace_big_select_inputs(a,b){if(!jQuery().autocomplete){return}if(a===undefined){a=20}if(b===undefined){b=3000}$("select").each(function(){var d=$(this);var g=d.find("option").length;if((g<a)||(g>b)){return}if(d.attr("multiple")==="multiple"){return}if(d.hasClass("no-autocomplete")){return}var m=d.attr("value");var c=$("<input type='text' class='text-and-autocomplete-select'></input>");c.attr("size",40);c.attr("name",d.attr("name"));c.attr("id",d.attr("id"));c.click(function(){var n=$(this).val();$(this).val("Loading...");$(this).showAllInCache();$(this).val(n);$(this).select()});var e=[];var i={};d.children("option").each(function(){var o=$(this).text();var n=$(this).attr("value");e.push(o);i[o]=n;i[n]=n;if(n==m){c.attr("value",o)}});if(m===""||m==="?"){c.attr("value","Click to Search or Select")}if(d.attr("name")=="dbkey"){e=e.sort(naturalSort)}var f={selectFirst:false,autoFill:false,mustMatch:false,matchContains:true,max:b,minChars:0,hideForLessThanMinChars:false};c.autocomplete(e,f);d.replaceWith(c);var k=function(){var o=c.attr("value");var n=i[o];if(n!==null&&n!==undefined){c.attr("value",n)}else{if(m!==""){c.attr("value",m)}else{c.attr("value","?")}}};c.parents("form").submit(function(){k()});$(document).bind("convert_to_values",function(){k()});if(d.attr("refresh_on_change")=="true"){var h=d.attr("refresh_on_change_values"),l=d.attr("last_selected_value");if(h!==undefined){h=h.split(",")}var j=function(){var n=i[c.attr("value")];if(l!==n&&n!==null&&n!==undefined){if(h!==undefined&&$.inArray(n,h)===-1&&$.inArray(l,h)===-1){return}c.attr("value",n);$(window).trigger("refresh_on_change");c.parents("form").submit()}};c.bind("result",j);c.keyup(function(n){if(n.keyCode===13){j()}});c.keydown(function(n){if(n.keyCode===13){return false}})}})}$.fn.make_text_editable=function(g){var d=("num_cols" in g?g.num_cols:30),c=("num_rows" in g?g.num_rows:4),e=("use_textarea" in g?g.use_textarea:false),b=("on_finish" in g?g.on_finish:null),f=("help_text" in g?g.help_text:null);var a=$(this);a.addClass("editable-text").click(function(l){if($(this).children(":input").length>0){return}a.removeClass("editable-text");var i=function(m){a.find(":input").remove();if(m!=""){a.text(m)}else{a.html("<br>")}a.addClass("editable-text");if(b){b(m)}};var h=a.text(),k,j;if(e){k=$("<textarea/>").attr({rows:c,cols:d}).text($.trim(h)).keyup(function(m){if(m.keyCode===27){i(h)}});j=$("<button/>").text("Done").click(function(){i(k.val());return false})}else{k=$("<input type='text'/>").attr({value:$.trim(h),size:d}).blur(function(){i(h)}).keyup(function(m){if(m.keyCode===27){$(this).trigger("blur")}else{if(m.keyCode===13){i($(this).val())}}})}a.text("");a.append(k);if(j){a.append(j)}k.focus();k.select();l.stopPropagation()});if(f){a.attr("title",f).tipsy({gravity:"s"})}return a};function async_save_text(d,f,e,a,c,h,i,g,b){if(c===undefined){c=30}if(i===undefined){i=4}$("#"+d).live("click",function(){if($("#renaming-active").length>0){return}var l=$("#"+f),k=l.text(),j;if(h){j=$("<textarea></textarea>").attr({rows:i,cols:c}).text($.trim(k))}else{j=$("<input type='text'></input>").attr({value:$.trim(k),size:c})}j.attr("id","renaming-active");j.blur(function(){$(this).remove();l.show();if(b){b(j)}});j.keyup(function(n){if(n.keyCode===27){$(this).trigger("blur")}else{if(n.keyCode===13){var m={};m[a]=$(this).val();$(this).trigger("blur");$.ajax({url:e,data:m,error:function(){alert("Text editing for elt "+f+" failed")},success:function(o){if(o!==""){l.text(o)}else{l.html("<em>None</em>")}if(b){b(j)}}})}}});if(g){g(j)}l.hide();j.insertAfter(l);j.focus();j.select();return})}function init_history_items(d,a,c){var b=function(){try{var e=$.jStorage.get("history_expand_state");if(e){for(var g in e){$("#"+g+" div.historyItemBody").show()}}}catch(f){$.jStorage.deleteKey("history_expand_state")}if($.browser.mozilla){$("div.historyItemBody").each(function(){if(!$(this).is(":visible")){$(this).find("pre.peek").css("overflow","hidden")}})}d.each(function(){var j=this.id,h=$(this).children("div.historyItemBody"),i=h.find("pre.peek");$(this).find(".historyItemTitleBar > .historyItemTitle").wrap("<a href='javascript:void(0);'></a>").click(function(){var k;if(h.is(":visible")){if($.browser.mozilla){i.css("overflow","hidden")}h.slideUp("fast");if(!c){k=$.jStorage.get("history_expand_state");if(k){delete k[j];$.jStorage.set("history_expand_state",k)}}}else{h.slideDown("fast",function(){if($.browser.mozilla){i.css("overflow","auto")}});if(!c){k=$.jStorage.get("history_expand_state");if(!k){k={}}k[j]=true;$.jStorage.set("history_expand_state",k)}}return false})});$("#top-links > a.toggle").click(function(){var h=$.jStorage.get("history_expand_state");if(!h){h={}}$("div.historyItemBody:visible").each(function(){if($.browser.mozilla){$(this).find("pre.peek").css("overflow","hidden")}$(this).slideUp("fast");if(h){delete h[$(this).parent().attr("id")]}});$.jStorage.set("history_expand_state",h)}).show()};b()}function commatize(b){b+="";var a=/(\d+)(\d{3})/;while(a.test(b)){b=b.replace(a,"$1,$2")}return b}function reset_tool_search(a){var c=$("#galaxy_tools").contents();if(c.length===0){c=$(document)}$(this).removeClass("search_active");c.find(".toolTitle").removeClass("search_match");c.find(".toolSectionBody").hide();c.find(".toolTitle").show();c.find(".toolPanelLabel").show();c.find(".toolSectionWrapper").each(function(){if($(this).attr("id")!="recently_used_wrapper"){$(this).show()}else{if($(this).hasClass("user_pref_visible")){$(this).show()}}});c.find("#search-no-results").hide();c.find("#search-spinner").hide();if(a){var b=c.find("#tool-search-query");b.val("search tools");b.css("font-style","italic")}}var GalaxyAsync=function(a){this.url_dict={};this.log_action=(a===undefined?false:a)};GalaxyAsync.prototype.set_func_url=function(a,b){this.url_dict[a]=b};GalaxyAsync.prototype.set_user_pref=function(a,b){var c=this.url_dict[arguments.callee];if(c===undefined){return false}$.ajax({url:c,data:{pref_name:a,pref_value:b},error:function(){return false},success:function(){return true}})};GalaxyAsync.prototype.log_user_action=function(c,b,d){if(!this.log_action){return}var a=this.url_dict[arguments.callee];if(a===undefined){return false}$.ajax({url:a,data:{action:c,context:b,params:d},error:function(){return false},success:function(){return true}})};$(document).ready(function(){$("select[refresh_on_change='true']").change(function(){var a=$(this),e=a.val(),d=false,c=a.attr("refresh_on_change_values");if(c){c=c.split(",");var b=a.attr("last_selected_value");if($.inArray(e,c)===-1&&$.inArray(b,c)===-1){return}}$(window).trigger("refresh_on_change");$(document).trigger("convert_to_values");a.get(0).form.submit()});$(":checkbox[refresh_on_change='true']").click(function(){var a=$(this),e=a.val(),d=false,c=a.attr("refresh_on_change_values");if(c){c=c.split(",");var b=a.attr("last_selected_value");if($.inArray(e,c)===-1&&$.inArray(b,c)===-1){return}}$(window).trigger("refresh_on_change");a.get(0).form.submit()});$("a[confirm]").click(function(){return confirm($(this).attr("confirm"))});if($.fn.tipsy){$(".tooltip").tipsy({gravity:"s"})}make_popup_menus();replace_big_select_inputs(20,1500);$("a").click(function(){var b=$(this);var c=(parent.frames&&parent.frames.galaxy_main);if((b.attr("target")=="galaxy_main")&&(!c)){var a=b.attr("href");if(a.indexOf("?")==-1){a+="?"}else{a+="&"}a+="use_panels=True";b.attr("href",a);b.attr("target","_self")}return b})});
\ No newline at end of file
+if(!Array.indexOf){Array.prototype.indexOf=function(c){for(var b=0,a=this.length;b<a;b++){if(this[b]==c){return b}}return -1}}function obj_length(c){if(c.length!==undefined){return c.length}var b=0;for(var a in c){b++}return b}$.fn.makeAbsolute=function(a){return this.each(function(){var b=$(this);var c=b.position();b.css({position:"absolute",marginLeft:0,marginTop:0,top:c.top,left:c.left,right:$(window).width()-(c.left+b.width())});if(a){b.remove().appendTo("body")}})};function make_popupmenu(b,c){var a=(b.data("menu_options"));b.data("menu_options",c);if(a){return}b.bind("click.show_popup",function(d){$(".popmenu-wrapper").remove();setTimeout(function(){var g=$("<ul class='dropdown-menu' id='"+b.attr("id")+"-menu'></ul>");var f=b.data("menu_options");if(obj_length(f)<=0){$("<li>No Options.</li>").appendTo(g)}$.each(f,function(j,i){if(i){g.append($("<li></li>").append($("<a href='#'></a>").html(j).click(i)))}else{g.append($("<li></li>").addClass("head").append($("<a href='#'></a>").html(j)))}});var h=$("<div class='popmenu-wrapper' style='position: absolute;left: 0; top: -1000;'></div>").append(g).appendTo("body");var e=d.pageX-h.width()/2;e=Math.min(e,$(document).scrollLeft()+$(window).width()-$(h).width()-5);e=Math.max(e,$(document).scrollLeft()+5);h.css({top:d.pageY,left:e})},10);setTimeout(function(){var f=function(h){$(h).bind("click.close_popup",function(){$(".popmenu-wrapper").remove();h.unbind("click.close_popup")})};f($(window.document));f($(window.top.document));for(var e=window.top.frames.length;e--;){var g=$(window.top.frames[e].document);f(g)}},50);return false})}function make_popup_menus(){jQuery("div[popupmenu]").each(function(){var a={};var c=$(this);c.find("a").each(function(){var f=$(this),h=f.get(0);var d=h.getAttribute("confirm"),e=h.getAttribute("href"),g=h.getAttribute("target");if(!e){a[f.text()]=null}else{a[f.text()]=function(){if(!d||confirm(d)){var i;if(g=="_parent"){window.parent.location=e}else{if(g=="_top"){window.top.location=e}else{if(g=="demo"){if(i==undefined||i.closed){i=window.open(e,g);i.creator=self}}else{window.location=e}}}}}}});var b=$("#"+c.attr("popupmenu"));b.find("a").bind("click",function(d){d.stopPropagation();return true});make_popupmenu(b,a);b.addClass("popup");c.remove()})}function naturalSort(j,h){var p=/(-?[0-9\.]+)/g,k=j.toString().toLowerCase()||"",g=h.toString().toLowerCase()||"",l=String.fromCharCode(0),n=k.replace(p,l+"$1"+l).split(l),e=g.replace(p,l+"$1"+l).split(l),d=(new Date(k)).getTime(),o=d?(new Date(g)).getTime():null;if(o){if(d<o){return -1}else{if(d>o){return 1}}}var m,f;for(var i=0,c=Math.max(n.length,e.length);i<c;i++){m=parseFloat(n[i])||n[i];f=parseFloat(e[i])||e[i];if(m<f){return -1}else{if(m>f){return 1}}}return 0}function replace_big_select_inputs(a,b){if(!jQuery().autocomplete){return}if(a===undefined){a=20}if(b===undefined){b=3000}$("select").each(function(){var d=$(this);var g=d.find("option").length;if((g<a)||(g>b)){return}if(d.attr("multiple")==="multiple"){return}if(d.hasClass("no-autocomplete")){return}var m=d.attr("value");var c=$("<input type='text' class='text-and-autocomplete-select'></input>");c.attr("size",40);c.attr("name",d.attr("name"));c.attr("id",d.attr("id"));c.click(function(){var n=$(this).val();$(this).val("Loading...");$(this).showAllInCache();$(this).val(n);$(this).select()});var e=[];var i={};d.children("option").each(function(){var o=$(this).text();var n=$(this).attr("value");e.push(o);i[o]=n;i[n]=n;if(n==m){c.attr("value",o)}});if(m===""||m==="?"){c.attr("value","Click to Search or Select")}if(d.attr("name")=="dbkey"){e=e.sort(naturalSort)}var f={selectFirst:false,autoFill:false,mustMatch:false,matchContains:true,max:b,minChars:0,hideForLessThanMinChars:false};c.autocomplete(e,f);d.replaceWith(c);var k=function(){var o=c.attr("value");var n=i[o];if(n!==null&&n!==undefined){c.attr("value",n)}else{if(m!==""){c.attr("value",m)}else{c.attr("value","?")}}};c.parents("form").submit(function(){k()});$(document).bind("convert_to_values",function(){k()});if(d.attr("refresh_on_change")=="true"){var h=d.attr("refresh_on_change_values"),l=d.attr("last_selected_value");if(h!==undefined){h=h.split(",")}var j=function(){var n=i[c.attr("value")];if(l!==n&&n!==null&&n!==undefined){if(h!==undefined&&$.inArray(n,h)===-1&&$.inArray(l,h)===-1){return}c.attr("value",n);$(window).trigger("refresh_on_change");c.parents("form").submit()}};c.bind("result",j);c.keyup(function(n){if(n.keyCode===13){j()}});c.keydown(function(n){if(n.keyCode===13){return false}})}})}$.fn.make_text_editable=function(g){var d=("num_cols" in g?g.num_cols:30),c=("num_rows" in g?g.num_rows:4),e=("use_textarea" in g?g.use_textarea:false),b=("on_finish" in g?g.on_finish:null),f=("help_text" in g?g.help_text:null);var a=$(this);a.addClass("editable-text").click(function(l){if($(this).children(":input").length>0){return}a.removeClass("editable-text");var i=function(m){a.find(":input").remove();if(m!=""){a.text(m)}else{a.html("<br>")}a.addClass("editable-text");if(b){b(m)}};var h=a.text(),k,j;if(e){k=$("<textarea/>").attr({rows:c,cols:d}).text($.trim(h)).keyup(function(m){if(m.keyCode===27){i(h)}});j=$("<button/>").text("Done").click(function(){i(k.val());return false})}else{k=$("<input type='text'/>").attr({value:$.trim(h),size:d}).blur(function(){i(h)}).keyup(function(m){if(m.keyCode===27){$(this).trigger("blur")}else{if(m.keyCode===13){i($(this).val())}}})}a.text("");a.append(k);if(j){a.append(j)}k.focus();k.select();l.stopPropagation()});if(f){a.attr("title",f).tipsy({gravity:"s"})}return a};function async_save_text(d,f,e,a,c,h,i,g,b){if(c===undefined){c=30}if(i===undefined){i=4}$("#"+d).live("click",function(){if($("#renaming-active").length>0){return}var l=$("#"+f),k=l.text(),j;if(h){j=$("<textarea></textarea>").attr({rows:i,cols:c}).text($.trim(k))}else{j=$("<input type='text'></input>").attr({value:$.trim(k),size:c})}j.attr("id","renaming-active");j.blur(function(){$(this).remove();l.show();if(b){b(j)}});j.keyup(function(n){if(n.keyCode===27){$(this).trigger("blur")}else{if(n.keyCode===13){var m={};m[a]=$(this).val();$(this).trigger("blur");$.ajax({url:e,data:m,error:function(){alert("Text editing for elt "+f+" failed")},success:function(o){if(o!==""){l.text(o)}else{l.html("<em>None</em>")}if(b){b(j)}}})}}});if(g){g(j)}l.hide();j.insertAfter(l);j.focus();j.select();return})}function init_history_items(d,a,c){var b=function(){try{var e=$.jStorage.get("history_expand_state");if(e){for(var g in e){$("#"+g+" div.historyItemBody").show()}}}catch(f){$.jStorage.deleteKey("history_expand_state")}if($.browser.mozilla){$("div.historyItemBody").each(function(){if(!$(this).is(":visible")){$(this).find("pre.peek").css("overflow","hidden")}})}d.each(function(){var j=this.id,h=$(this).children("div.historyItemBody"),i=h.find("pre.peek");$(this).find(".historyItemTitleBar > .historyItemTitle").wrap("<a href='javascript:void(0);'></a>").click(function(){var k;if(h.is(":visible")){if($.browser.mozilla){i.css("overflow","hidden")}h.slideUp("fast");if(!c){k=$.jStorage.get("history_expand_state");if(k){delete k[j];$.jStorage.set("history_expand_state",k)}}}else{h.slideDown("fast",function(){if($.browser.mozilla){i.css("overflow","auto")}});if(!c){k=$.jStorage.get("history_expand_state");if(!k){k={}}k[j]=true;$.jStorage.set("history_expand_state",k)}}return false})});$("#top-links > a.toggle").click(function(){var h=$.jStorage.get("history_expand_state");if(!h){h={}}$("div.historyItemBody:visible").each(function(){if($.browser.mozilla){$(this).find("pre.peek").css("overflow","hidden")}$(this).slideUp("fast");if(h){delete h[$(this).parent().attr("id")]}});$.jStorage.set("history_expand_state",h)}).show()};b()}function commatize(b){b+="";var a=/(\d+)(\d{3})/;while(a.test(b)){b=b.replace(a,"$1,$2")}return b}function reset_tool_search(a){var c=$("#galaxy_tools").contents();if(c.length===0){c=$(document)}$(this).removeClass("search_active");c.find(".toolTitle").removeClass("search_match");c.find(".toolSectionBody").hide();c.find(".toolTitle").show();c.find(".toolPanelLabel").show();c.find(".toolSectionWrapper").each(function(){if($(this).attr("id")!="recently_used_wrapper"){$(this).show()}else{if($(this).hasClass("user_pref_visible")){$(this).show()}}});c.find("#search-no-results").hide();c.find("#search-spinner").hide();if(a){var b=c.find("#tool-search-query");b.val("search tools")}}var GalaxyAsync=function(a){this.url_dict={};this.log_action=(a===undefined?false:a)};GalaxyAsync.prototype.set_func_url=function(a,b){this.url_dict[a]=b};GalaxyAsync.prototype.set_user_pref=function(a,b){var c=this.url_dict[arguments.callee];if(c===undefined){return false}$.ajax({url:c,data:{pref_name:a,pref_value:b},error:function(){return false},success:function(){return true}})};GalaxyAsync.prototype.log_user_action=function(c,b,d){if(!this.log_action){return}var a=this.url_dict[arguments.callee];if(a===undefined){return false}$.ajax({url:a,data:{action:c,context:b,params:d},error:function(){return false},success:function(){return true}})};$(document).ready(function(){$("select[refresh_on_change='true']").change(function(){var a=$(this),e=a.val(),d=false,c=a.attr("refresh_on_change_values");if(c){c=c.split(",");var b=a.attr("last_selected_value");if($.inArray(e,c)===-1&&$.inArray(b,c)===-1){return}}$(window).trigger("refresh_on_change");$(document).trigger("convert_to_values");a.get(0).form.submit()});$(":checkbox[refresh_on_change='true']").click(function(){var a=$(this),e=a.val(),d=false,c=a.attr("refresh_on_change_values");if(c){c=c.split(",");var b=a.attr("last_selected_value");if($.inArray(e,c)===-1&&$.inArray(b,c)===-1){return}}$(window).trigger("refresh_on_change");a.get(0).form.submit()});$("a[confirm]").click(function(){return confirm($(this).attr("confirm"))});if($.fn.tipsy){$(".tooltip").tipsy({gravity:"s"})}make_popup_menus();replace_big_select_inputs(20,1500);$("a").click(function(){var b=$(this);var c=(parent.frames&&parent.frames.galaxy_main);if((b.attr("target")=="galaxy_main")&&(!c)){var a=b.attr("href");if(a.indexOf("?")==-1){a+="?"}else{a+="&"}a+="use_panels=True";b.attr("href",a);b.attr("target","_self")}return b})});
\ No newline at end of file
diff -r 11649eacb329e0f84e64d8c07d316c26182d8be1 -r 24501c5f17a6310f39f5e3a5d55f8a00d7a67213 static/scripts/packed/handlebars/compiled/panel_label.js
--- /dev/null
+++ b/static/scripts/packed/handlebars/compiled/panel_label.js
@@ -0,0 +1,1 @@
+(function(){var b=Handlebars.template,a=Handlebars.templates=Handlebars.templates||{};a.panel_label=b(function(e,n,d,l,k){d=d||e.helpers;var i="",c,h,o=this,f="function",m=d.helperMissing,g=void 0,j=this.escapeExpression;i+="<span>";h=d.name;c=h||n.name;if(typeof c===f){c=c.call(n,{hash:{}})}else{if(c===g){c=m.call(n,"name",{hash:{}})}}i+=j(c)+"</span>";return i})})();
\ No newline at end of file
diff -r 11649eacb329e0f84e64d8c07d316c26182d8be1 -r 24501c5f17a6310f39f5e3a5d55f8a00d7a67213 static/scripts/packed/handlebars/compiled/panel_section.js
--- /dev/null
+++ b/static/scripts/packed/handlebars/compiled/panel_section.js
@@ -0,0 +1,1 @@
+(function(){var b=Handlebars.template,a=Handlebars.templates=Handlebars.templates||{};a.panel_section=b(function(e,n,d,l,k){d=d||e.helpers;var i="",c,h,o=this,f="function",m=d.helperMissing,g=void 0,j=this.escapeExpression;i+='<div class="toolSectionTitle" id="title_';h=d.id;c=h||n.id;if(typeof c===f){c=c.call(n,{hash:{}})}else{if(c===g){c=m.call(n,"id",{hash:{}})}}i+=j(c)+'">\n <a href="#"><span>';h=d.name;c=h||n.name;if(typeof c===f){c=c.call(n,{hash:{}})}else{if(c===g){c=m.call(n,"name",{hash:{}})}}i+=j(c)+'</span></a>\n</div>\n<div id="';h=d.id;c=h||n.id;if(typeof c===f){c=c.call(n,{hash:{}})}else{if(c===g){c=m.call(n,"id",{hash:{}})}}i+=j(c)+'" class="toolSectionBody" style="display: none; ">\n <div class="toolSectionBg"></div>\n<div>';return i})})();
\ No newline at end of file
diff -r 11649eacb329e0f84e64d8c07d316c26182d8be1 -r 24501c5f17a6310f39f5e3a5d55f8a00d7a67213 static/scripts/packed/handlebars/compiled/tool_link.js
--- /dev/null
+++ b/static/scripts/packed/handlebars/compiled/tool_link.js
@@ -0,0 +1,1 @@
+(function(){var b=Handlebars.template,a=Handlebars.templates=Handlebars.templates||{};a.tool_link=b(function(e,n,d,l,k){d=d||e.helpers;var i="",c,h,o=this,f="function",m=d.helperMissing,g=void 0,j=this.escapeExpression;i+='<a class="';h=d.id;c=h||n.id;if(typeof c===f){c=c.call(n,{hash:{}})}else{if(c===g){c=m.call(n,"id",{hash:{}})}}i+=j(c)+' tool-link" href="';h=d.link;c=h||n.link;if(typeof c===f){c=c.call(n,{hash:{}})}else{if(c===g){c=m.call(n,"link",{hash:{}})}}i+=j(c)+'" target="';h=d.target;c=h||n.target;if(typeof c===f){c=c.call(n,{hash:{}})}else{if(c===g){c=m.call(n,"target",{hash:{}})}}i+=j(c)+'" minsizehint="';h=d.min_width;c=h||n.min_width;if(typeof c===f){c=c.call(n,{hash:{}})}else{if(c===g){c=m.call(n,"min_width",{hash:{}})}}i+=j(c)+'">';h=d.name;c=h||n.name;if(typeof c===f){c=c.call(n,{hash:{}})}else{if(c===g){c=m.call(n,"name",{hash:{}})}}i+=j(c)+"</a> ";h=d.description;c=h||n.description;if(typeof c===f){c=c.call(n,{hash:{}})}else{if(c===g){c=m.call(n,"description",{hash:{}})}}i+=j(c);return i})})();
\ No newline at end of file
diff -r 11649eacb329e0f84e64d8c07d316c26182d8be1 -r 24501c5f17a6310f39f5e3a5d55f8a00d7a67213 static/scripts/packed/handlebars/compiled/tool_search.js
--- /dev/null
+++ b/static/scripts/packed/handlebars/compiled/tool_search.js
@@ -0,0 +1,1 @@
+(function(){var b=Handlebars.template,a=Handlebars.templates=Handlebars.templates||{};a.tool_search=b(function(e,n,d,l,k){d=d||e.helpers;var i="",c,h,o=this,f="function",m=d.helperMissing,g=void 0,j=this.escapeExpression;i+='<input type="text" name="query" value="search tools" id="tool-search-query" autocomplete="off" class="search-query parent-width" />\n<img src="';h=d.spinner_url;c=h||n.spinner_url;if(typeof c===f){c=c.call(n,{hash:{}})}else{if(c===g){c=m.call(n,"spinner_url",{hash:{}})}}i+=j(c)+'" id="search-spinner" class="search-spinner"/>\n';return i})})();
\ No newline at end of file
diff -r 11649eacb329e0f84e64d8c07d316c26182d8be1 -r 24501c5f17a6310f39f5e3a5d55f8a00d7a67213 static/scripts/packed/libs/handlebars-1.0.0.beta.6.js
--- a/static/scripts/packed/libs/handlebars-1.0.0.beta.6.js
+++ /dev/null
@@ -1,1 +0,0 @@
-var Handlebars={};Handlebars.VERSION="1.0.beta.6";Handlebars.helpers={};Handlebars.partials={};Handlebars.registerHelper=function(b,c,a){if(a){c.not=a}this.helpers[b]=c};Handlebars.registerPartial=function(a,b){this.partials[a]=b};Handlebars.registerHelper("helperMissing",function(a){if(arguments.length===2){return undefined}else{throw new Error("Could not find property '"+a+"'")}});var toString=Object.prototype.toString,functionType="[object Function]";Handlebars.registerHelper("blockHelperMissing",function(f,d){var a=d.inverse||function(){},h=d.fn;var c="";var g=toString.call(f);if(g===functionType){f=f.call(this)}if(f===true){return h(this)}else{if(f===false||f==null){return a(this)}else{if(g==="[object Array]"){if(f.length>0){for(var e=0,b=f.length;e<b;e++){c=c+h(f[e])}}else{c=a(this)}return c}else{return h(f)}}}});Handlebars.registerHelper("each",function(f,d){var g=d.fn,a=d.inverse;var c="";if(f&&f.length>0){for(var e=0,b=f.length;e<b;e++){c=c+g(f[e])}}else{c=a(this)}return c});Handlebars.registerHelper("if",function(b,a){var c=toString.call(b);if(c===functionType){b=b.call(this)}if(!b||Handlebars.Utils.isEmpty(b)){return a.inverse(this)}else{return a.fn(this)}});Handlebars.registerHelper("unless",function(c,b){var d=b.fn,a=b.inverse;b.fn=a;b.inverse=d;return Handlebars.helpers["if"].call(this,c,b)});Handlebars.registerHelper("with",function(b,a){return a.fn(b)});Handlebars.registerHelper("log",function(a){Handlebars.log(a)});var handlebars=(function(){var f={trace:function c(){},yy:{},symbols_:{error:2,root:3,program:4,EOF:5,statements:6,simpleInverse:7,statement:8,openInverse:9,closeBlock:10,openBlock:11,mustache:12,partial:13,CONTENT:14,COMMENT:15,OPEN_BLOCK:16,inMustache:17,CLOSE:18,OPEN_INVERSE:19,OPEN_ENDBLOCK:20,path:21,OPEN:22,OPEN_UNESCAPED:23,OPEN_PARTIAL:24,params:25,hash:26,param:27,STRING:28,INTEGER:29,BOOLEAN:30,hashSegments:31,hashSegment:32,ID:33,EQUALS:34,pathSegments:35,SEP:36,"$accept":0,"$end":1},terminals_:{2:"error",5:"EOF",14:"CONTENT",15:"COMMENT",16:"OPEN_BLOCK",18:"CLOSE",19:"OPEN_INVERSE",20:"OPEN_ENDBLOCK",22:"OPEN",23:"OPEN_UNESCAPED",24:"OPEN_PARTIAL",28:"STRING",29:"INTEGER",30:"BOOLEAN",33:"ID",34:"EQUALS",36:"SEP"},productions_:[0,[3,2],[4,3],[4,1],[4,0],[6,1],[6,2],[8,3],[8,3],[8,1],[8,1],[8,1],[8,1],[11,3],[9,3],[10,3],[12,3],[12,3],[13,3],[13,4],[7,2],[17,3],[17,2],[17,2],[17,1],[25,2],[25,1],[27,1],[27,1],[27,1],[27,1],[26,1],[31,2],[31,1],[32,3],[32,3],[32,3],[32,3],[21,1],[35,3],[35,1]],performAction:function b(g,j,k,n,m,i,l){var h=i.length-1;switch(m){case 1:return i[h-1];break;case 2:this.$=new n.ProgramNode(i[h-2],i[h]);break;case 3:this.$=new n.ProgramNode(i[h]);break;case 4:this.$=new n.ProgramNode([]);break;case 5:this.$=[i[h]];break;case 6:i[h-1].push(i[h]);this.$=i[h-1];break;case 7:this.$=new n.InverseNode(i[h-2],i[h-1],i[h]);break;case 8:this.$=new n.BlockNode(i[h-2],i[h-1],i[h]);break;case 9:this.$=i[h];break;case 10:this.$=i[h];break;case 11:this.$=new n.ContentNode(i[h]);break;case 12:this.$=new n.CommentNode(i[h]);break;case 13:this.$=new n.MustacheNode(i[h-1][0],i[h-1][1]);break;case 14:this.$=new n.MustacheNode(i[h-1][0],i[h-1][1]);break;case 15:this.$=i[h-1];break;case 16:this.$=new n.MustacheNode(i[h-1][0],i[h-1][1]);break;case 17:this.$=new n.MustacheNode(i[h-1][0],i[h-1][1],true);break;case 18:this.$=new n.PartialNode(i[h-1]);break;case 19:this.$=new n.PartialNode(i[h-2],i[h-1]);break;case 20:break;case 21:this.$=[[i[h-2]].concat(i[h-1]),i[h]];break;case 22:this.$=[[i[h-1]].concat(i[h]),null];break;case 23:this.$=[[i[h-1]],i[h]];break;case 24:this.$=[[i[h]],null];break;case 25:i[h-1].push(i[h]);this.$=i[h-1];break;case 26:this.$=[i[h]];break;case 27:this.$=i[h];break;case 28:this.$=new n.StringNode(i[h]);break;case 29:this.$=new n.IntegerNode(i[h]);break;case 30:this.$=new n.BooleanNode(i[h]);break;case 31:this.$=new n.HashNode(i[h]);break;case 32:i[h-1].push(i[h]);this.$=i[h-1];break;case 33:this.$=[i[h]];break;case 34:this.$=[i[h-2],i[h]];break;case 35:this.$=[i[h-2],new n.StringNode(i[h])];break;case 36:this.$=[i[h-2],new n.IntegerNode(i[h])];break;case 37:this.$=[i[h-2],new n.BooleanNode(i[h])];break;case 38:this.$=new n.IdNode(i[h]);break;case 39:i[h-2].push(i[h]);this.$=i[h-2];break;case 40:this.$=[i[h]];break}},table:[{3:1,4:2,5:[2,4],6:3,8:4,9:5,11:6,12:7,13:8,14:[1,9],15:[1,10],16:[1,12],19:[1,11],22:[1,13],23:[1,14],24:[1,15]},{1:[3]},{5:[1,16]},{5:[2,3],7:17,8:18,9:5,11:6,12:7,13:8,14:[1,9],15:[1,10],16:[1,12],19:[1,19],20:[2,3],22:[1,13],23:[1,14],24:[1,15]},{5:[2,5],14:[2,5],15:[2,5],16:[2,5],19:[2,5],20:[2,5],22:[2,5],23:[2,5],24:[2,5]},{4:20,6:3,8:4,9:5,11:6,12:7,13:8,14:[1,9],15:[1,10],16:[1,12],19:[1,11],20:[2,4],22:[1,13],23:[1,14],24:[1,15]},{4:21,6:3,8:4,9:5,11:6,12:7,13:8,14:[1,9],15:[1,10],16:[1,12],19:[1,11],20:[2,4],22:[1,13],23:[1,14],24:[1,15]},{5:[2,9],14:[2,9],15:[2,9],16:[2,9],19:[2,9],20:[2,9],22:[2,9],23:[2,9],24:[2,9]},{5:[2,10],14:[2,10],15:[2,10],16:[2,10],19:[2,10],20:[2,10],22:[2,10],23:[2,10],24:[2,10]},{5:[2,11],14:[2,11],15:[2,11],16:[2,11],19:[2,11],20:[2,11],22:[2,11],23:[2,11],24:[2,11]},{5:[2,12],14:[2,12],15:[2,12],16:[2,12],19:[2,12],20:[2,12],22:[2,12],23:[2,12],24:[2,12]},{17:22,21:23,33:[1,25],35:24},{17:26,21:23,33:[1,25],35:24},{17:27,21:23,33:[1,25],35:24},{17:28,21:23,33:[1,25],35:24},{21:29,33:[1,25],35:24},{1:[2,1]},{6:30,8:4,9:5,11:6,12:7,13:8,14:[1,9],15:[1,10],16:[1,12],19:[1,11],22:[1,13],23:[1,14],24:[1,15]},{5:[2,6],14:[2,6],15:[2,6],16:[2,6],19:[2,6],20:[2,6],22:[2,6],23:[2,6],24:[2,6]},{17:22,18:[1,31],21:23,33:[1,25],35:24},{10:32,20:[1,33]},{10:34,20:[1,33]},{18:[1,35]},{18:[2,24],21:40,25:36,26:37,27:38,28:[1,41],29:[1,42],30:[1,43],31:39,32:44,33:[1,45],35:24},{18:[2,38],28:[2,38],29:[2,38],30:[2,38],33:[2,38],36:[1,46]},{18:[2,40],28:[2,40],29:[2,40],30:[2,40],33:[2,40],36:[2,40]},{18:[1,47]},{18:[1,48]},{18:[1,49]},{18:[1,50],21:51,33:[1,25],35:24},{5:[2,2],8:18,9:5,11:6,12:7,13:8,14:[1,9],15:[1,10],16:[1,12],19:[1,11],20:[2,2],22:[1,13],23:[1,14],24:[1,15]},{14:[2,20],15:[2,20],16:[2,20],19:[2,20],22:[2,20],23:[2,20],24:[2,20]},{5:[2,7],14:[2,7],15:[2,7],16:[2,7],19:[2,7],20:[2,7],22:[2,7],23:[2,7],24:[2,7]},{21:52,33:[1,25],35:24},{5:[2,8],14:[2,8],15:[2,8],16:[2,8],19:[2,8],20:[2,8],22:[2,8],23:[2,8],24:[2,8]},{14:[2,14],15:[2,14],16:[2,14],19:[2,14],20:[2,14],22:[2,14],23:[2,14],24:[2,14]},{18:[2,22],21:40,26:53,27:54,28:[1,41],29:[1,42],30:[1,43],31:39,32:44,33:[1,45],35:24},{18:[2,23]},{18:[2,26],28:[2,26],29:[2,26],30:[2,26],33:[2,26]},{18:[2,31],32:55,33:[1,56]},{18:[2,27],28:[2,27],29:[2,27],30:[2,27],33:[2,27]},{18:[2,28],28:[2,28],29:[2,28],30:[2,28],33:[2,28]},{18:[2,29],28:[2,29],29:[2,29],30:[2,29],33:[2,29]},{18:[2,30],28:[2,30],29:[2,30],30:[2,30],33:[2,30]},{18:[2,33],33:[2,33]},{18:[2,40],28:[2,40],29:[2,40],30:[2,40],33:[2,40],34:[1,57],36:[2,40]},{33:[1,58]},{14:[2,13],15:[2,13],16:[2,13],19:[2,13],20:[2,13],22:[2,13],23:[2,13],24:[2,13]},{5:[2,16],14:[2,16],15:[2,16],16:[2,16],19:[2,16],20:[2,16],22:[2,16],23:[2,16],24:[2,16]},{5:[2,17],14:[2,17],15:[2,17],16:[2,17],19:[2,17],20:[2,17],22:[2,17],23:[2,17],24:[2,17]},{5:[2,18],14:[2,18],15:[2,18],16:[2,18],19:[2,18],20:[2,18],22:[2,18],23:[2,18],24:[2,18]},{18:[1,59]},{18:[1,60]},{18:[2,21]},{18:[2,25],28:[2,25],29:[2,25],30:[2,25],33:[2,25]},{18:[2,32],33:[2,32]},{34:[1,57]},{21:61,28:[1,62],29:[1,63],30:[1,64],33:[1,25],35:24},{18:[2,39],28:[2,39],29:[2,39],30:[2,39],33:[2,39],36:[2,39]},{5:[2,19],14:[2,19],15:[2,19],16:[2,19],19:[2,19],20:[2,19],22:[2,19],23:[2,19],24:[2,19]},{5:[2,15],14:[2,15],15:[2,15],16:[2,15],19:[2,15],20:[2,15],22:[2,15],23:[2,15],24:[2,15]},{18:[2,34],33:[2,34]},{18:[2,35],33:[2,35]},{18:[2,36],33:[2,36]},{18:[2,37],33:[2,37]}],defaultActions:{16:[2,1],37:[2,23],53:[2,21]},parseError:function d(h,g){throw new Error(h)},parse:function e(o){var x=this,l=[0],G=[null],s=[],H=this.table,h="",q=0,E=0,j=0,n=2,u=1;this.lexer.setInput(o);this.lexer.yy=this.yy;this.yy.lexer=this.lexer;if(typeof this.lexer.yylloc=="undefined"){this.lexer.yylloc={}}var i=this.lexer.yylloc;s.push(i);if(typeof this.yy.parseError==="function"){this.parseError=this.yy.parseError}function w(p){l.length=l.length-2*p;G.length=G.length-p;s.length=s.length-p}function v(){var p;p=x.lexer.lex()||1;if(typeof p!=="number"){p=x.symbols_[p]||p}return p}var D,z,k,C,I,t,B={},y,F,g,m;while(true){k=l[l.length-1];if(this.defaultActions[k]){C=this.defaultActions[k]}else{if(D==null){D=v()}C=H[k]&&H[k][D]}if(typeof C==="undefined"||!C.length||!C[0]){if(!j){m=[];for(y in H[k]){if(this.terminals_[y]&&y>2){m.push("'"+this.terminals_[y]+"'")}}var A="";if(this.lexer.showPosition){A="Parse error on line "+(q+1)+":\n"+this.lexer.showPosition()+"\nExpecting "+m.join(", ")+", got '"+this.terminals_[D]+"'"}else{A="Parse error on line "+(q+1)+": Unexpected "+(D==1?"end of input":"'"+(this.terminals_[D]||D)+"'")}this.parseError(A,{text:this.lexer.match,token:this.terminals_[D]||D,line:this.lexer.yylineno,loc:i,expected:m})}}if(C[0] instanceof Array&&C.length>1){throw new Error("Parse Error: multiple actions possible at state: "+k+", token: "+D)}switch(C[0]){case 1:l.push(D);G.push(this.lexer.yytext);s.push(this.lexer.yylloc);l.push(C[1]);D=null;if(!z){E=this.lexer.yyleng;h=this.lexer.yytext;q=this.lexer.yylineno;i=this.lexer.yylloc;if(j>0){j--}}else{D=z;z=null}break;case 2:F=this.productions_[C[1]][1];B.$=G[G.length-F];B._$={first_line:s[s.length-(F||1)].first_line,last_line:s[s.length-1].last_line,first_column:s[s.length-(F||1)].first_column,last_column:s[s.length-1].last_column};t=this.performAction.call(B,h,E,q,this.yy,C[1],G,s);if(typeof t!=="undefined"){return t}if(F){l=l.slice(0,-1*F*2);G=G.slice(0,-1*F);s=s.slice(0,-1*F)}l.push(this.productions_[C[1]][0]);G.push(B.$);s.push(B._$);g=H[l[l.length-2]][l[l.length-1]];l.push(g);break;case 3:return true}}return true}};var a=(function(){var j=({EOF:1,parseError:function l(o,n){if(this.yy.parseError){this.yy.parseError(o,n)}else{throw new Error(o)}},setInput:function(n){this._input=n;this._more=this._less=this.done=false;this.yylineno=this.yyleng=0;this.yytext=this.matched=this.match="";this.conditionStack=["INITIAL"];this.yylloc={first_line:1,first_column:0,last_line:1,last_column:0};return this},input:function(){var o=this._input[0];this.yytext+=o;this.yyleng++;this.match+=o;this.matched+=o;var n=o.match(/\n/);if(n){this.yylineno++}this._input=this._input.slice(1);return o},unput:function(n){this._input=n+this._input;return this},more:function(){this._more=true;return this},pastInput:function(){var n=this.matched.substr(0,this.matched.length-this.match.length);return(n.length>20?"...":"")+n.substr(-20).replace(/\n/g,"")},upcomingInput:function(){var n=this.match;if(n.length<20){n+=this._input.substr(0,20-n.length)}return(n.substr(0,20)+(n.length>20?"...":"")).replace(/\n/g,"")},showPosition:function(){var n=this.pastInput();var o=new Array(n.length+1).join("-");return n+this.upcomingInput()+"\n"+o+"^"},next:function(){if(this.done){return this.EOF}if(!this._input){this.done=true}var r,p,o,n;if(!this._more){this.yytext="";this.match=""}var s=this._currentRules();for(var q=0;q<s.length;q++){p=this._input.match(this.rules[s[q]]);if(p){n=p[0].match(/\n.*/g);if(n){this.yylineno+=n.length}this.yylloc={first_line:this.yylloc.last_line,last_line:this.yylineno+1,first_column:this.yylloc.last_column,last_column:n?n[n.length-1].length-1:this.yylloc.last_column+p[0].length};this.yytext+=p[0];this.match+=p[0];this.matches=p;this.yyleng=this.yytext.length;this._more=false;this._input=this._input.slice(p[0].length);this.matched+=p[0];r=this.performAction.call(this,this.yy,this,s[q],this.conditionStack[this.conditionStack.length-1]);if(r){return r}else{return}}}if(this._input===""){return this.EOF}else{this.parseError("Lexical error on line "+(this.yylineno+1)+". Unrecognized text.\n"+this.showPosition(),{text:"",token:null,line:this.yylineno})}},lex:function g(){var n=this.next();if(typeof n!=="undefined"){return n}else{return this.lex()}},begin:function h(n){this.conditionStack.push(n)},popState:function m(){return this.conditionStack.pop()},_currentRules:function k(){return this.conditions[this.conditionStack[this.conditionStack.length-1]].rules},topState:function(){return this.conditionStack[this.conditionStack.length-2]},pushState:function h(n){this.begin(n)}});j.performAction=function i(r,o,q,n){var p=n;switch(q){case 0:if(o.yytext.slice(-1)!=="\\"){this.begin("mu")}if(o.yytext.slice(-1)==="\\"){o.yytext=o.yytext.substr(0,o.yyleng-1),this.begin("emu")}if(o.yytext){return 14}break;case 1:return 14;break;case 2:this.popState();return 14;break;case 3:return 24;break;case 4:return 16;break;case 5:return 20;break;case 6:return 19;break;case 7:return 19;break;case 8:return 23;break;case 9:return 23;break;case 10:o.yytext=o.yytext.substr(3,o.yyleng-5);this.popState();return 15;break;case 11:return 22;break;case 12:return 34;break;case 13:return 33;break;case 14:return 33;break;case 15:return 36;break;case 16:break;case 17:this.popState();return 18;break;case 18:this.popState();return 18;break;case 19:o.yytext=o.yytext.substr(1,o.yyleng-2).replace(/\\"/g,'"');return 28;break;case 20:return 30;break;case 21:return 30;break;case 22:return 29;break;case 23:return 33;break;case 24:o.yytext=o.yytext.substr(1,o.yyleng-2);return 33;break;case 25:return"INVALID";break;case 26:return 5;break}};j.rules=[/^[^\x00]*?(?=(\{\{))/,/^[^\x00]+/,/^[^\x00]{2,}?(?=(\{\{))/,/^\{\{>/,/^\{\{#/,/^\{\{\//,/^\{\{\^/,/^\{\{\s*else\b/,/^\{\{\{/,/^\{\{&/,/^\{\{![\s\S]*?\}\}/,/^\{\{/,/^=/,/^\.(?=[} ])/,/^\.\./,/^[\/.]/,/^\s+/,/^\}\}\}/,/^\}\}/,/^"(\\["]|[^"])*"/,/^true(?=[}\s])/,/^false(?=[}\s])/,/^[0-9]+(?=[}\s])/,/^[a-zA-Z0-9_$-]+(?=[=}\s\/.])/,/^\[[^\]]*\]/,/^./,/^$/];j.conditions={mu:{rules:[3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26],inclusive:false},emu:{rules:[2],inclusive:false},INITIAL:{rules:[0,1,26],inclusive:true}};return j})();f.lexer=a;return f})();if(typeof require!=="undefined"&&typeof exports!=="undefined"){exports.parser=handlebars;exports.parse=function(){return handlebars.parse.apply(handlebars,arguments)};exports.main=function commonjsMain(a){if(!a[1]){throw new Error("Usage: "+a[0]+" FILE")}if(typeof process!=="undefined"){var c=require("fs").readFileSync(require("path").join(process.cwd(),a[1]),"utf8")}else{var b=require("file").path(require("file").cwd());var c=b.join(a[1]).read({charset:"utf-8"})}return exports.parser.parse(c)};if(typeof module!=="undefined"&&require.main===module){exports.main(typeof process!=="undefined"?process.argv.slice(1):require("system").args)}}Handlebars.Parser=handlebars;Handlebars.parse=function(a){Handlebars.Parser.yy=Handlebars.AST;return Handlebars.Parser.parse(a)};Handlebars.print=function(a){return new Handlebars.PrintVisitor().accept(a)};Handlebars.logger={DEBUG:0,INFO:1,WARN:2,ERROR:3,level:3,log:function(b,a){}};Handlebars.log=function(b,a){Handlebars.logger.log(b,a)};(function(){Handlebars.AST={};Handlebars.AST.ProgramNode=function(c,b){this.type="program";this.statements=c;if(b){this.inverse=new Handlebars.AST.ProgramNode(b)}};Handlebars.AST.MustacheNode=function(d,c,b){this.type="mustache";this.id=d[0];this.params=d.slice(1);this.hash=c;this.escaped=!b};Handlebars.AST.PartialNode=function(c,b){this.type="partial";this.id=c;this.context=b};var a=function(b,c){if(b.original!==c.original){throw new Handlebars.Exception(b.original+" doesn't match "+c.original)}};Handlebars.AST.BlockNode=function(c,b,d){a(c.id,d);this.type="block";this.mustache=c;this.program=b};Handlebars.AST.InverseNode=function(c,b,d){a(c.id,d);this.type="inverse";this.mustache=c;this.program=b};Handlebars.AST.ContentNode=function(b){this.type="content";this.string=b};Handlebars.AST.HashNode=function(b){this.type="hash";this.pairs=b};Handlebars.AST.IdNode=function(f){this.type="ID";this.original=f.join(".");var d=[],g=0;for(var e=0,b=f.length;e<b;e++){var c=f[e];if(c===".."){g++}else{if(c==="."||c==="this"){this.isScoped=true}else{d.push(c)}}}this.parts=d;this.string=d.join(".");this.depth=g;this.isSimple=(d.length===1)&&(g===0)};Handlebars.AST.StringNode=function(b){this.type="STRING";this.string=b};Handlebars.AST.IntegerNode=function(b){this.type="INTEGER";this.integer=b};Handlebars.AST.BooleanNode=function(b){this.type="BOOLEAN";this.bool=b};Handlebars.AST.CommentNode=function(b){this.type="comment";this.comment=b}})();Handlebars.Exception=function(b){var a=Error.prototype.constructor.apply(this,arguments);for(var c in a){if(a.hasOwnProperty(c)){this[c]=a[c]}}this.message=a.message};Handlebars.Exception.prototype=new Error;Handlebars.SafeString=function(a){this.string=a};Handlebars.SafeString.prototype.toString=function(){return this.string.toString()};(function(){var c={"<":"<",">":">",'"':""","'":"'","`":"`"};var d=/&(?!\w+;)|[<>"'`]/g;var b=/[&<>"'`]/;var a=function(e){return c[e]||"&"};Handlebars.Utils={escapeExpression:function(e){if(e instanceof Handlebars.SafeString){return e.toString()}else{if(e==null||e===false){return""}}if(!b.test(e)){return e}return e.replace(d,a)},isEmpty:function(e){if(typeof e==="undefined"){return true}else{if(e===null){return true}else{if(e===false){return true}else{if(Object.prototype.toString.call(e)==="[object Array]"&&e.length===0){return true}else{return false}}}}}}})();Handlebars.Compiler=function(){};Handlebars.JavaScriptCompiler=function(){};(function(f,e){f.OPCODE_MAP={appendContent:1,getContext:2,lookupWithHelpers:3,lookup:4,append:5,invokeMustache:6,appendEscaped:7,pushString:8,truthyOrFallback:9,functionOrFallback:10,invokeProgram:11,invokePartial:12,push:13,assignToHash:15,pushStringParam:16};f.MULTI_PARAM_OPCODES={appendContent:1,getContext:1,lookupWithHelpers:2,lookup:1,invokeMustache:3,pushString:1,truthyOrFallback:1,functionOrFallback:1,invokeProgram:3,invokePartial:1,push:1,assignToHash:1,pushStringParam:1};f.DISASSEMBLE_MAP={};for(var h in f.OPCODE_MAP){var g=f.OPCODE_MAP[h];f.DISASSEMBLE_MAP[g]=h}f.multiParamSize=function(i){return f.MULTI_PARAM_OPCODES[f.DISASSEMBLE_MAP[i]]};f.prototype={compiler:f,disassemble:function(){var t=this.opcodes,r,n;var q=[],v,m,w;for(var s=0,o=t.length;s<o;s++){r=t[s];if(r==="DECLARE"){m=t[++s];w=t[++s];q.push("DECLARE "+m+" = "+w)}else{v=f.DISASSEMBLE_MAP[r];var u=f.multiParamSize(r);var k=[];for(var p=0;p<u;p++){n=t[++s];if(typeof n==="string"){n='"'+n.replace("\n","\\n")+'"'}k.push(n)}v=v+" "+k.join(" ");q.push(v)}}return q.join("\n")},guid:0,compile:function(i,k){this.children=[];this.depths={list:[]};this.options=k;var l=this.options.knownHelpers;this.options.knownHelpers={helperMissing:true,blockHelperMissing:true,each:true,"if":true,unless:true,"with":true,log:true};if(l){for(var j in l){this.options.knownHelpers[j]=l[j]}}return this.program(i)},accept:function(i){return this[i.type](i)},program:function(m){var k=m.statements,o;this.opcodes=[];for(var n=0,j=k.length;n<j;n++){o=k[n];this[o.type](o)}this.isSimple=j===1;this.depths.list=this.depths.list.sort(function(l,i){return l-i});return this},compileProgram:function(m){var j=new this.compiler().compile(m,this.options);var n=this.guid++;this.usePartial=this.usePartial||j.usePartial;this.children[n]=j;for(var o=0,k=j.depths.list.length;o<k;o++){depth=j.depths.list[o];if(depth<2){continue}else{this.addDepth(depth-1)}}return n},block:function(o){var l=o.mustache;var n,p,j,k;var m=this.setupStackForMustache(l);var i=this.compileProgram(o.program);if(o.program.inverse){k=this.compileProgram(o.program.inverse);this.declare("inverse",k)}this.opcode("invokeProgram",i,m.length,!!l.hash);this.declare("inverse",null);this.opcode("append")},inverse:function(k){var j=this.setupStackForMustache(k.mustache);var i=this.compileProgram(k.program);this.declare("inverse",i);this.opcode("invokeProgram",null,j.length,!!k.mustache.hash);this.declare("inverse",null);this.opcode("append")},hash:function(n){var m=n.pairs,p,o;this.opcode("push","{}");for(var k=0,j=m.length;k<j;k++){p=m[k];o=p[1];this.accept(o);this.opcode("assignToHash",p[0])}},partial:function(i){var j=i.id;this.usePartial=true;if(i.context){this.ID(i.context)}else{this.opcode("push","depth0")}this.opcode("invokePartial",j.original);this.opcode("append")},content:function(i){this.opcode("appendContent",i.string)},mustache:function(i){var j=this.setupStackForMustache(i);this.opcode("invokeMustache",j.length,i.id.original,!!i.hash);if(i.escaped&&!this.options.noEscape){this.opcode("appendEscaped")}else{this.opcode("append")}},ID:function(m){this.addDepth(m.depth);this.opcode("getContext",m.depth);this.opcode("lookupWithHelpers",m.parts[0]||null,m.isScoped||false);for(var k=1,j=m.parts.length;k<j;k++){this.opcode("lookup",m.parts[k])}},STRING:function(i){this.opcode("pushString",i.string)},INTEGER:function(i){this.opcode("push",i.integer)},BOOLEAN:function(i){this.opcode("push",i.bool)},comment:function(){},pushParams:function(l){var j=l.length,k;while(j--){k=l[j];if(this.options.stringParams){if(k.depth){this.addDepth(k.depth)}this.opcode("getContext",k.depth||0);this.opcode("pushStringParam",k.string)}else{this[k.type](k)}}},opcode:function(i,l,k,j){this.opcodes.push(f.OPCODE_MAP[i]);if(l!==undefined){this.opcodes.push(l)}if(k!==undefined){this.opcodes.push(k)}if(j!==undefined){this.opcodes.push(j)}},declare:function(i,j){this.opcodes.push("DECLARE");this.opcodes.push(i);this.opcodes.push(j)},addDepth:function(i){if(i===0){return}if(!this.depths[i]){this.depths[i]=true;this.depths.list.push(i)}},setupStackForMustache:function(i){var j=i.params;this.pushParams(j);if(i.hash){this.hash(i.hash)}this.ID(i.id);return j}};e.prototype={nameLookup:function(k,i,j){if(/^[0-9]+$/.test(i)){return k+"["+i+"]"}else{if(e.isValidJavaScriptVariableName(i)){return k+"."+i}else{return k+"['"+i+"']"}}},appendToBuffer:function(i){if(this.environment.isSimple){return"return "+i+";"}else{return"buffer += "+i+";"}},initializeBuffer:function(){return this.quotedString("")},namespace:"Handlebars",compile:function(i,j,l,k){this.environment=i;this.options=j||{};this.name=this.environment.name;this.isChild=!!l;this.context=l||{programs:[],aliases:{self:"this"},registers:{list:[]}};this.preamble();this.stackSlot=0;this.stackVars=[];this.compileChildren(i,j);var n=i.opcodes,m;this.i=0;for(b=n.length;this.i<b;this.i++){m=this.nextOpcode(0);if(m[0]==="DECLARE"){this.i=this.i+2;this[m[1]]=m[2]}else{this.i=this.i+m[1].length;this[m[0]].apply(this,m[1])}}return this.createFunctionContext(k)},nextOpcode:function(r){var o=this.environment.opcodes,m=o[this.i+r],l,p;var q,i;if(m==="DECLARE"){l=o[this.i+1];p=o[this.i+2];return["DECLARE",l,p]}else{l=f.DISASSEMBLE_MAP[m];q=f.multiParamSize(m);i=[];for(var k=0;k<q;k++){i.push(o[this.i+k+1+r])}return[l,i]}},eat:function(i){this.i=this.i+i.length},preamble:function(){var i=[];this.useRegister("foundHelper");if(!this.isChild){var j=this.namespace;var k="helpers = helpers || "+j+".helpers;";if(this.environment.usePartial){k=k+" partials = partials || "+j+".partials;"}i.push(k)}else{i.push("")}if(!this.environment.isSimple){i.push(", buffer = "+this.initializeBuffer())}else{i.push("")}this.lastContext=0;this.source=i},createFunctionContext:function(p){var q=this.stackVars;if(!this.isChild){q=q.concat(this.context.registers.list)}if(q.length>0){this.source[1]=this.source[1]+", "+q.join(", ")}if(!this.isChild){var k=[];for(var o in this.context.aliases){this.source[1]=this.source[1]+", "+o+"="+this.context.aliases[o]}}if(this.source[1]){this.source[1]="var "+this.source[1].substring(2)+";"}if(!this.isChild){this.source[1]+="\n"+this.context.programs.join("\n")+"\n"}if(!this.environment.isSimple){this.source.push("return buffer;")}var r=this.isChild?["depth0","data"]:["Handlebars","depth0","helpers","partials","data"];for(var n=0,j=this.environment.depths.list.length;n<j;n++){r.push("depth"+this.environment.depths.list[n])}if(p){r.push(this.source.join("\n "));return Function.apply(this,r)}else{var m="function "+(this.name||"")+"("+r.join(",")+") {\n "+this.source.join("\n ")+"}";Handlebars.log(Handlebars.logger.DEBUG,m+"\n\n");return m}},appendContent:function(i){this.source.push(this.appendToBuffer(this.quotedString(i)))},append:function(){var i=this.popStack();this.source.push("if("+i+" || "+i+" === 0) { "+this.appendToBuffer(i)+" }");if(this.environment.isSimple){this.source.push("else { "+this.appendToBuffer("''")+" }")}},appendEscaped:function(){var j=this.nextOpcode(1),i="";this.context.aliases.escapeExpression="this.escapeExpression";if(j[0]==="appendContent"){i=" + "+this.quotedString(j[1][0]);this.eat(j)}this.source.push(this.appendToBuffer("escapeExpression("+this.popStack()+")"+i))},getContext:function(i){if(this.lastContext!==i){this.lastContext=i}},lookupWithHelpers:function(k,l){if(k){var i=this.nextStack();this.usingKnownHelper=false;var j;if(!l&&this.options.knownHelpers[k]){j=i+" = "+this.nameLookup("helpers",k,"helper");this.usingKnownHelper=true}else{if(l||this.options.knownHelpersOnly){j=i+" = "+this.nameLookup("depth"+this.lastContext,k,"context")}else{this.register("foundHelper",this.nameLookup("helpers",k,"helper"));j=i+" = foundHelper || "+this.nameLookup("depth"+this.lastContext,k,"context")}}j+=";";this.source.push(j)}else{this.pushStack("depth"+this.lastContext)}},lookup:function(j){var i=this.topStack();this.source.push(i+" = ("+i+" === null || "+i+" === undefined || "+i+" === false ? "+i+" : "+this.nameLookup(i,j,"context")+");")},pushStringParam:function(i){this.pushStack("depth"+this.lastContext);this.pushString(i)},pushString:function(i){this.pushStack(this.quotedString(i))},push:function(i){this.pushStack(i)},invokeMustache:function(k,j,i){this.populateParams(k,this.quotedString(j),"{}",null,i,function(l,n,m){if(!this.usingKnownHelper){this.context.aliases.helperMissing="helpers.helperMissing";this.context.aliases.undef="void 0";this.source.push("else if("+m+"=== undef) { "+l+" = helperMissing.call("+n+"); }");if(l!==m){this.source.push("else { "+l+" = "+m+"; }")}}})},invokeProgram:function(k,l,j){var i=this.programExpression(this.inverse);var m=this.programExpression(k);this.populateParams(l,null,m,i,j,function(n,p,o){if(!this.usingKnownHelper){this.context.aliases.blockHelperMissing="helpers.blockHelperMissing";this.source.push("else { "+n+" = blockHelperMissing.call("+p+"); }")}})},populateParams:function(p,k,t,q,x,w){var l=x||this.options.stringParams||q||this.options.data;var j=this.popStack(),v;var n=[],m,o,u;if(l){this.register("tmp1",t);u="tmp1"}else{u="{ hash: {} }"}if(l){var s=(x?this.popStack():"{}");this.source.push("tmp1.hash = "+s+";")}if(this.options.stringParams){this.source.push("tmp1.contexts = [];")}for(var r=0;r<p;r++){m=this.popStack();n.push(m);if(this.options.stringParams){this.source.push("tmp1.contexts.push("+this.popStack()+");")}}if(q){this.source.push("tmp1.fn = tmp1;");this.source.push("tmp1.inverse = "+q+";")}if(this.options.data){this.source.push("tmp1.data = data;")}n.push(u);this.populateCall(n,j,k||j,w,t!=="{}")},populateCall:function(n,j,k,q,o){var m=["depth0"].concat(n).join(", ");var i=["depth0"].concat(k).concat(n).join(", ");var p=this.nextStack();if(this.usingKnownHelper){this.source.push(p+" = "+j+".call("+m+");")}else{this.context.aliases.functionType='"function"';var l=o?"foundHelper && ":"";this.source.push("if("+l+"typeof "+j+" === functionType) { "+p+" = "+j+".call("+m+"); }")}q.call(this,p,i,j);this.usingKnownHelper=false},invokePartial:function(i){params=[this.nameLookup("partials",i,"partial"),"'"+i+"'",this.popStack(),"helpers","partials"];if(this.options.data){params.push("data")}this.pushStack("self.invokePartial("+params.join(", ")+");")},assignToHash:function(i){var j=this.popStack();var k=this.topStack();this.source.push(k+"['"+i+"'] = "+j+";")},compiler:e,compileChildren:function(j,n){var p=j.children,r,q;for(var o=0,k=p.length;o<k;o++){r=p[o];q=new this.compiler();this.context.programs.push("");var m=this.context.programs.length;r.index=m;r.name="program"+m;this.context.programs[m]=q.compile(r,n,this.context)}},programExpression:function(k){if(k==null){return"self.noop"}var p=this.environment.children[k],o=p.depths.list;var n=[p.index,p.name,"data"];for(var m=0,j=o.length;m<j;m++){depth=o[m];if(depth===1){n.push("depth0")}else{n.push("depth"+(depth-1))}}if(o.length===0){return"self.program("+n.join(", ")+")"}else{n.shift();return"self.programWithDepth("+n.join(", ")+")"}},register:function(i,j){this.useRegister(i);this.source.push(i+" = "+j+";")},useRegister:function(i){if(!this.context.registers[i]){this.context.registers[i]=true;this.context.registers.list.push(i)}},pushStack:function(i){this.source.push(this.nextStack()+" = "+i+";");return"stack"+this.stackSlot},nextStack:function(){this.stackSlot++;if(this.stackSlot>this.stackVars.length){this.stackVars.push("stack"+this.stackSlot)}return"stack"+this.stackSlot},popStack:function(){return"stack"+this.stackSlot--},topStack:function(){return"stack"+this.stackSlot},quotedString:function(i){return'"'+i.replace(/\\/g,"\\\\").replace(/"/g,'\\"').replace(/\n/g,"\\n").replace(/\r/g,"\\r")+'"'}};var a=("break else new var case finally return void catch for switch while continue function this with default if throw delete in try do instanceof typeof abstract enum int short boolean export interface static byte extends long super char final native synchronized class float package throws const goto private transient debugger implements protected volatile double import public let yield").split(" ");var d=e.RESERVED_WORDS={};for(var c=0,b=a.length;c<b;c++){d[a[c]]=true}e.isValidJavaScriptVariableName=function(i){if(!e.RESERVED_WORDS[i]&&/^[a-zA-Z_$][0-9a-zA-Z_$]+$/.test(i)){return true}return false}})(Handlebars.Compiler,Handlebars.JavaScriptCompiler);Handlebars.precompile=function(d,c){c=c||{};var b=Handlebars.parse(d);var a=new Handlebars.Compiler().compile(b,c);return new Handlebars.JavaScriptCompiler().compile(a,c)};Handlebars.compile=function(b,a){a=a||{};var d;function c(){var g=Handlebars.parse(b);var f=new Handlebars.Compiler().compile(g,a);var e=new Handlebars.JavaScriptCompiler().compile(f,a,undefined,true);return Handlebars.template(e)}return function(f,e){if(!d){d=c()}return d.call(this,f,e)}};Handlebars.VM={template:function(a){var b={escapeExpression:Handlebars.Utils.escapeExpression,invokePartial:Handlebars.VM.invokePartial,programs:[],program:function(d,e,f){var c=this.programs[d];if(f){return Handlebars.VM.program(e,f)}else{if(c){return c}else{c=this.programs[d]=Handlebars.VM.program(e);return c}}},programWithDepth:Handlebars.VM.programWithDepth,noop:Handlebars.VM.noop};return function(d,c){c=c||{};return a.call(b,Handlebars,d,c.helpers,c.partials,c.data)}},programWithDepth:function(b,d,c){var a=Array.prototype.slice.call(arguments,2);return function(f,e){e=e||{};return b.apply(this,[f,e.data||d].concat(a))}},program:function(a,b){return function(d,c){c=c||{};return a(d,c.data||b)}},noop:function(){return""},invokePartial:function(a,b,d,e,c,f){options={helpers:e,partials:c,data:f};if(a===undefined){throw new Handlebars.Exception("The partial "+b+" could not be found")}else{if(a instanceof Function){return a(d,options)}else{if(!Handlebars.compile){throw new Handlebars.Exception("The partial "+b+" could not be compiled when running in runtime-only mode")}else{c[b]=Handlebars.compile(a);return c[b](d,options)}}}}};Handlebars.template=Handlebars.VM.template;
\ No newline at end of file
diff -r 11649eacb329e0f84e64d8c07d316c26182d8be1 -r 24501c5f17a6310f39f5e3a5d55f8a00d7a67213 static/scripts/packed/libs/handlebars.runtime.js
--- /dev/null
+++ b/static/scripts/packed/libs/handlebars.runtime.js
@@ -0,0 +1,1 @@
+var Handlebars={};Handlebars.VERSION="1.0.beta.6";Handlebars.helpers={};Handlebars.partials={};Handlebars.registerHelper=function(b,c,a){if(a){c.not=a}this.helpers[b]=c};Handlebars.registerPartial=function(a,b){this.partials[a]=b};Handlebars.registerHelper("helperMissing",function(a){if(arguments.length===2){return undefined}else{throw new Error("Could not find property '"+a+"'")}});var toString=Object.prototype.toString,functionType="[object Function]";Handlebars.registerHelper("blockHelperMissing",function(f,d){var a=d.inverse||function(){},h=d.fn;var c="";var g=toString.call(f);if(g===functionType){f=f.call(this)}if(f===true){return h(this)}else{if(f===false||f==null){return a(this)}else{if(g==="[object Array]"){if(f.length>0){for(var e=0,b=f.length;e<b;e++){c=c+h(f[e])}}else{c=a(this)}return c}else{return h(f)}}}});Handlebars.registerHelper("each",function(f,d){var g=d.fn,a=d.inverse;var c="";if(f&&f.length>0){for(var e=0,b=f.length;e<b;e++){c=c+g(f[e])}}else{c=a(this)}return c});Handlebars.registerHelper("if",function(b,a){var c=toString.call(b);if(c===functionType){b=b.call(this)}if(!b||Handlebars.Utils.isEmpty(b)){return a.inverse(this)}else{return a.fn(this)}});Handlebars.registerHelper("unless",function(c,b){var d=b.fn,a=b.inverse;b.fn=a;b.inverse=d;return Handlebars.helpers["if"].call(this,c,b)});Handlebars.registerHelper("with",function(b,a){return a.fn(b)});Handlebars.registerHelper("log",function(a){Handlebars.log(a)});Handlebars.Exception=function(b){var a=Error.prototype.constructor.apply(this,arguments);for(var c in a){if(a.hasOwnProperty(c)){this[c]=a[c]}}this.message=a.message};Handlebars.Exception.prototype=new Error;Handlebars.SafeString=function(a){this.string=a};Handlebars.SafeString.prototype.toString=function(){return this.string.toString()};(function(){var c={"<":"<",">":">",'"':""","'":"'","`":"`"};var d=/&(?!\w+;)|[<>"'`]/g;var b=/[&<>"'`]/;var a=function(e){return c[e]||"&"};Handlebars.Utils={escapeExpression:function(e){if(e instanceof Handlebars.SafeString){return e.toString()}else{if(e==null||e===false){return""}}if(!b.test(e)){return e}return e.replace(d,a)},isEmpty:function(e){if(typeof e==="undefined"){return true}else{if(e===null){return true}else{if(e===false){return true}else{if(Object.prototype.toString.call(e)==="[object Array]"&&e.length===0){return true}else{return false}}}}}}})();Handlebars.VM={template:function(a){var b={escapeExpression:Handlebars.Utils.escapeExpression,invokePartial:Handlebars.VM.invokePartial,programs:[],program:function(d,e,f){var c=this.programs[d];if(f){return Handlebars.VM.program(e,f)}else{if(c){return c}else{c=this.programs[d]=Handlebars.VM.program(e);return c}}},programWithDepth:Handlebars.VM.programWithDepth,noop:Handlebars.VM.noop};return function(d,c){c=c||{};return a.call(b,Handlebars,d,c.helpers,c.partials,c.data)}},programWithDepth:function(b,d,c){var a=Array.prototype.slice.call(arguments,2);return function(f,e){e=e||{};return b.apply(this,[f,e.data||d].concat(a))}},program:function(a,b){return function(d,c){c=c||{};return a(d,c.data||b)}},noop:function(){return""},invokePartial:function(a,b,d,e,c,f){options={helpers:e,partials:c,data:f};if(a===undefined){throw new Handlebars.Exception("The partial "+b+" could not be found")}else{if(a instanceof Function){return a(d,options)}else{if(!Handlebars.compile){throw new Handlebars.Exception("The partial "+b+" could not be compiled when running in runtime-only mode")}else{c[b]=Handlebars.compile(a);return c[b](d,options)}}}}};Handlebars.template=Handlebars.VM.template;
\ No newline at end of file
diff -r 11649eacb329e0f84e64d8c07d316c26182d8be1 -r 24501c5f17a6310f39f5e3a5d55f8a00d7a67213 static/scripts/packed/tools/rgenetics/checkbox_and_radiobutton.js
--- /dev/null
+++ b/static/scripts/packed/tools/rgenetics/checkbox_and_radiobutton.js
@@ -0,0 +1,1 @@
+function checkBox(A,x,r,u,q,s,B,y,t,C,w,p,D){var z=13;var v=true;if(arguments.length==z){this.id=A;this.parentNode=x;this.x=r;this.y=u;this.checkboxId=q;this.checkcrossId=s;this.checkedStatus=B;this.labelText=y;this.textStyles=t;if(!this.textStyles["font-size"]){this.textStyles["font-size"]=12}this.labelDistance=C;this.labelYOffset=w;this.radioButtonGroup=p;this.functionToCall=D;this.exists=true;this.label=undefined}else{v=false;alert("Error in checkbox ("+A+"): wrong nr of arguments! You have to pass over "+z+" parameters.")}if(v){this.timer=new Timer(this);if(this.radioButtonGroup){this.timerMs=0}else{this.timerMs=200}this.createCheckBox()}else{alert("Could not create checkbox with id '"+A+"' due to errors in the constructor parameters")}}checkBox.prototype.createCheckBox=function(){if(typeof(this.parentNode)=="string"){this.parentNode=document.getElementById(this.parentNode)}this.checkBox=document.createElementNS(svgNS,"use");this.checkBox.setAttributeNS(null,"x",this.x);this.checkBox.setAttributeNS(null,"y",this.y);this.checkBox.setAttributeNS(xlinkNS,"href","#"+this.checkboxId);this.checkBox.addEventListener("click",this,false);this.checkBox.setAttributeNS(null,"cursor","pointer");this.parentNode.appendChild(this.checkBox);this.checkCross=document.createElementNS(svgNS,"use");this.checkCross.setAttributeNS(null,"x",this.x);this.checkCross.setAttributeNS(null,"y",this.y);this.checkCross.setAttributeNS(xlinkNS,"href","#"+this.checkcrossId);this.parentNode.appendChild(this.checkCross);if(this.checkedStatus==false){this.checkCross.setAttributeNS(null,"display","none")}if(this.labelText){if(this.labelText.length>0){this.label=document.createElementNS(svgNS,"text");for(var d in this.textStyles){var e=this.textStyles[d];if(d=="font-size"){e+="px"}this.label.setAttributeNS(null,d,e)}this.label.setAttributeNS(null,"x",(this.x+this.labelDistance));this.label.setAttributeNS(null,"y",(this.y+this.labelYOffset));this.label.setAttributeNS(null,"cursor","pointer");var f=document.createTextNode(this.labelText);this.label.appendChild(f);this.label.setAttributeNS(null,"pointer-events","all");this.label.addEventListener("click",this,false);this.parentNode.appendChild(this.label)}}if(this.radioButtonGroup){this.radioButtonGroup.addCheckBox(this)}};checkBox.prototype.handleEvent=function(b){if(b.type=="click"){if(this.checkedStatus==true){this.checkCross.setAttributeNS(null,"display","none");this.checkedStatus=false}else{this.checkCross.setAttributeNS(null,"display","inline");this.checkedStatus=true}}this.timer.setTimeout("fireFunction",this.timerMs)};checkBox.prototype.fireFunction=function(){if(this.radioButtonGroup){this.radioButtonGroup.selectById(this.id,true)}else{if(typeof(this.functionToCall)=="function"){this.functionToCall(this.id,this.checkedStatus,this.labelText)}if(typeof(this.functionToCall)=="object"){this.functionToCall.checkBoxChanged(this.id,this.checkedStatus,this.labelText)}if(typeof(this.functionToCall)==undefined){return}}};checkBox.prototype.check=function(b){this.checkCross.setAttributeNS(null,"display","inherit");this.checkedStatus=true;if(b){this.timer.setTimeout("fireFunction",this.timerMs)}};checkBox.prototype.uncheck=function(b){this.checkCross.setAttributeNS(null,"display","none");this.checkedStatus=false;if(b){this.timer.setTimeout("fireFunction",this.timerMs)}};checkBox.prototype.moveTo=function(c,d){this.x=c;this.y=d;this.checkBox.setAttributeNS(null,"x",this.x);this.checkBox.setAttributeNS(null,"y",this.y);this.checkCross.setAttributeNS(null,"x",this.x);this.checkCross.setAttributeNS(null,"y",this.y);if(this.labelText){this.label.setAttributeNS(null,"x",(this.x+this.labelDistance));this.label.setAttributeNS(null,"y",(this.y+this.labelYOffset))}};checkBox.prototype.remove=function(b){this.checkBox.removeEventListener("click",this,false);this.parentNode.removeChild(this.checkBox);this.parentNode.removeChild(this.checkCross);if(this.label){this.parentNode.removeChild(this.label)}this.exists=false};checkBox.prototype.setLabelText=function(e){this.labelText=e;if(this.label){this.label.firstChild.nodeValue=e}else{if(this.labelText.length>0){this.label=document.createElementNS(svgNS,"text");for(var d in this.textStyles){value=this.textStyles[d];if(d=="font-size"){value+="px"}this.label.setAttributeNS(null,d,value)}this.label.setAttributeNS(null,"x",(this.x+this.labelDistance));this.label.setAttributeNS(null,"y",(this.y+this.textStyles["font-size"]*0.3));var f=document.createTextNode(this.labelText);this.label.appendChild(f);this.parentNode.appendChild(this.label)}}};function radioButtonGroup(f,e){var d=2;if(arguments.length==d){this.id=f;if(typeof(e)=="function"||typeof(e)=="object"||typeof(e)==undefined){this.functionToCall=e}else{alert("Error in radiobutton with ("+f+"): argument functionToCall is not of type 'function', 'object' or undefined!")}this.checkBoxes=new Array();this.selectedId=undefined;this.selectedIndex=undefined;this.timer=new Timer(this);this.timerMs=200}else{alert("Error in radiobutton with ("+f+"): wrong nr of arguments! You have to pass over "+d+" parameters.")}}radioButtonGroup.prototype.addCheckBox=function(b){this.checkBoxes.push(b);if(b.checkedStatus){this.selectedId=b.id;this.selectedIndex=this.checkBoxes.length-1}};radioButtonGroup.prototype.selectById=function(h,f){var g=false;for(var e=0;e<this.checkBoxes.length;e++){if(this.checkBoxes[e].id==h){this.selectedId=h;this.selectedIndex=e;if(this.checkBoxes[e].checkedStatus==false){this.checkBoxes[e].check(false)}g=true}else{this.checkBoxes[e].uncheck(false)}}if(g){if(f){this.timer.setTimeout("fireFunction",this.timerMs)}}else{alert("Error in radiobutton with ("+this.id+"): could not find checkbox with id '"+h+"'")}};radioButtonGroup.prototype.selectByLabelname=function(f,e){var g=-1;for(var h=0;h<this.checkBoxes.length;h++){if(this.checkBoxes[h].labelText==f){g=this.checkBoxes[h].id}}if(g==-1){alert("Error in radiobutton with ("+this.id+"): could not find checkbox with label '"+f+"'")}else{this.selectById(g,e)}};radioButtonGroup.prototype.fireFunction=function(){if(typeof(this.functionToCall)=="function"){this.functionToCall(this.id,this.selectedId,this.checkBoxes[this.selectedIndex].labelText)}if(typeof(this.functionToCall)=="object"){this.functionToCall.radioButtonChanged(this.id,this.selectedId,this.checkBoxes[this.selectedIndex].labelText)}if(typeof(this.functionToCall)==undefined){return}};
\ No newline at end of file
diff -r 11649eacb329e0f84e64d8c07d316c26182d8be1 -r 24501c5f17a6310f39f5e3a5d55f8a00d7a67213 static/scripts/packed/tools/rgenetics/helper_functions.js
--- /dev/null
+++ b/static/scripts/packed/tools/rgenetics/helper_functions.js
@@ -0,0 +1,1 @@
+var svgNS="http://www.w3.org/2000/svg";var xlinkNS="http://www.w3.org/1999/xlink";var cartoNS="http://www.carto.net/attrib";var attribNS="http://www.carto.net/attrib";var batikNS="http://xml.apache.org/batik/ext";function toPolarDir(e,f){var d=(Math.atan2(f,e));return(d)}function toPolarDist(e,d){var f=Math.sqrt(e*e+d*d);return(f)}function toRectX(d,f){var e=f*Math.cos(d);return(e)}function toRectY(d,c){y=c*Math.sin(d);return(y)}function DegToRad(b){return(b/180*Math.PI)}function RadToDeg(b){return(b/Math.PI*180)}function dd2dms(f){var e=(Math.abs(f)-Math.floor(Math.abs(f)))*60;var g=(e-Math.floor(e))*60;var e=Math.floor(e);if(f>=0){var j=Math.floor(f)}else{var j=Math.ceil(f)}return{deg:j,min:e,sec:g}}function dms2dd(f,e,d){if(f<0){return f-(e/60)-(d/3600)}else{return f+(e/60)+(d/3600)}}function log(d,b){if(b==null){b=Math.E}return Math.log(d)/Math.log(b)}function intBilinear(A,C,E,G,x,H,I,J,K){var f=(x-I)/K;var w=(H-J)/K;var z=(1-f)*(1-w);var B=f*(1-w);var D=f*w;var F=w*(1-f);var e=z*E+B*G+D*A+F*C;return e}function leftOfTest(k,m,q,n,r,o){var l=(n-m)*(r-q)-(q-k)*(o-n);if(l<0){var p=1}else{var p=0}return p}function distFromLine(p,o,q,u,r,m){var l=r-q;var n=m-u;var t=(n*(p-q)-l*(o-u))/Math.sqrt(Math.pow(l,2)+Math.pow(n,2));return t}function angleBetwTwoLines(f,g,j,l){var k=Math.acos((f*j+g*l)/(Math.sqrt(Math.pow(f,2)+Math.pow(g,2))*Math.sqrt(Math.pow(j,2)+Math.pow(l,2))));return k}function calcBisectorVector(c,k,m,o){var n=Math.sqrt(Math.pow(c,2)+Math.pow(k,2));var p=Math.sqrt(Math.pow(m,2)+Math.pow(o,2));var l=new Array();l[0]=c/n+m/p;l[1]=k/n+o/p;return l}function calcBisectorAngle(u,l,m,n){var r=Math.sqrt(Math.pow(u,2)+Math.pow(l,2));var t=Math.sqrt(Math.pow(m,2)+Math.pow(n,2));var o=u/r+m/t;var p=l/r+n/t;var q=toPolarDir(o,p);return q}function intersect2lines(q,w,r,x,o,t,p,u){var A=new Object();var z=(u-t)*(r-q)-(p-o)*(x-w);if(z==0){alert("lines are parallel")}else{var B=((p-o)*(w-t)-(u-t)*(q-o))/z;var C=((r-q)*(w-t)-(x-w)*(q-o))/z}A.x=q+B*(r-q);A.y=w+B*(x-w);return A}function formatNumberString(j,l){if(typeof(j)=="Number"){var n=j.toString()}else{var n=j}var g="";var m=n.split(".");var k=m[0].length;if(k>3){while(k>0){if(k>3){g=l+m[0].substr(k-3,3)+g}else{g=m[0].substr(0,k)+g}k-=3}}else{g=m[0]}if(m[1]){g=g+"."+m[1]}return g}function statusChange(b){document.getElementById("statusText").firstChild.nodeValue="Statusbar: "+b}function scaleObject(j,l){var m=j.currentTarget;var n=m.getAttributeNS(null,"x");var g=m.getAttributeNS(null,"y");var k="scale("+l+") translate("+(n*1/l-n)+" "+(g*1/l-g)+")";m.setAttributeNS(null,"transform",k)}function getTransformToRootElement(d){try{var f=d.getTransformToElement(document.documentElement)}catch(e){var f=d.getCTM();while((d=d.parentNode)!=document){f=d.getCTM().multiply(f)}}return f}function getTransformToElement(e,j){try{var g=e.getTransformToElement(j)}catch(f){var g=e.getCTM();while((e=e.parentNode)!=j){g=e.getCTM().multiply(g)}}return g}function hsv2rgb(n,r,t){var f=new Object();if(r==0){f.red=Math.round(t*255);f.green=Math.round(t*255);f.blue=Math.round(t*255)}else{var o=n/60;var p=Math.floor(o);var m=o-p;if(p%2==0){m=1-m}var q=t*(1-r);var u=t*(1-r*m);switch(p){case 0:f.red=t;f.green=u;f.blue=q;break;case 1:f.red=u;f.green=t;f.blue=q;break;case 2:f.red=q;f.green=t;f.blue=u;break;case 3:f.red=q;f.green=u;f.blue=t;break;case 4:f.red=u;f.green=q;f.blue=t;break;case 5:f.red=t;f.green=q;f.blue=u;break;case 6:f.red=t;f.green=u;f.blue=q;break}f.red=Math.round(f.red*255);f.green=Math.round(f.green*255);f.blue=Math.round(f.blue*255)}return f}function rgb2hsv(j,e,f){var g=new Object();j=j/255;e=e/255;f=f/255;myMax=Math.max(j,Math.max(e,f));myMin=Math.min(j,Math.min(e,f));v=myMax;if(myMax>0){s=(myMax-myMin)/myMax}else{s=0}if(s>0){myDiff=myMax-myMin;rc=(myMax-j)/myDiff;gc=(myMax-e)/myDiff;bc=(myMax-f)/myDiff;if(j==myMax){h=(bc-gc)/6}if(e==myMax){h=(2+rc-bc)/6}if(f==myMax){h=(4+gc-rc)/6}}else{h=0}if(h<0){h+=1}g.hue=Math.round(h*360);g.sat=s;g.val=v;return g}function arrayPopulate(e,d){var f=new Array();if(e.length!=d.length){alert("error: arrays do not have the same length!")}else{for(i=0;i<e.length;i++){f[e[i]]=d[i]}}return f}function getData(m,n,l,k,g,j){this.url=m;this.callBackFunction=n;this.returnFormat=l;this.method=k;this.additionalParams=j;if(k!="get"&&k!="post"){alert("Error in network request: parameter 'method' must be 'get' or 'post'")}this.postText=g;this.xmlRequest=null}getData.prototype.getData=function(){if(window.getURL){if(this.method=="get"){getURL(this.url,this)}if(this.method=="post"){postURL(this.url,this.postText,this)}}else{if(window.XMLHttpRequest){var c=this;this.xmlRequest=new XMLHttpRequest();if(this.method=="get"){if(this.returnFormat=="xml"){this.xmlRequest.overrideMimeType("text/xml")}this.xmlRequest.open("GET",this.url,true)}if(this.method=="post"){this.xmlRequest.open("POST",this.url,true)}this.xmlRequest.onreadystatechange=function(){c.handleEvent()};if(this.method=="get"){this.xmlRequest.send(null)}if(this.method=="post"){var d=true;if(!this.postText){d=false;alert("Error in network post request: missing parameter 'postText'!")}if(typeof(this.postText)!="string"){d=false;alert("Error in network post request: parameter 'postText' has to be of type 'string')")}if(d){this.xmlRequest.send(this.postText)}}}else{alert("your browser/svg viewer neither supports window.getURL nor window.XMLHttpRequest!")}}};getData.prototype.operationComplete=function(c){if(c.success){if(this.returnFormat=="xml"){var d=parseXML(c.content,document);if(typeof(this.callBackFunction)=="function"){this.callBackFunction(d.firstChild,this.additionalParams)}if(typeof(this.callBackFunction)=="object"){this.callBackFunction.receiveData(d.firstChild,this.additionalParams)}}if(this.returnFormat=="json"){if(typeof(this.callBackFunction)=="function"){this.callBackFunction(c.content,this.additionalParams)}if(typeof(this.callBackFunction)=="object"){this.callBackFunction.receiveData(c.content,this.additionalParams)}}}else{alert("something went wrong with dynamic loading of geometry!")}};getData.prototype.handleEvent=function(){if(this.xmlRequest.readyState==4){if(this.returnFormat=="xml"){var b=document.importNode(this.xmlRequest.responseXML.documentElement,true);if(typeof(this.callBackFunction)=="function"){this.callBackFunction(b,this.additionalParams)}if(typeof(this.callBackFunction)=="object"){this.callBackFunction.receiveData(b,this.additionalParams)}}if(this.returnFormat=="json"){if(typeof(this.callBackFunction)=="function"){this.callBackFunction(this.xmlRequest.responseText,this.additionalParams)}if(typeof(this.callBackFunction)=="object"){this.callBackFunction.receiveData(this.xmlRequest.responseText,this.additionalParams)}}}};function serializeNode(d){if(typeof XMLSerializer!="undefined"){return new XMLSerializer().serializeToString(d)}else{if(typeof d.xml!="undefined"){return d.xml}else{if(typeof printNode!="undefined"){return printNode(d)}else{if(typeof Packages!="undefined"){try{var e=new java.io.StringWriter();Packages.org.apache.batik.dom.util.DOMUtilities.writeNode(d,e);return e.toString()}catch(f){alert("Sorry, your SVG viewer does not support the printNode/serialize function.");return""}}else{alert("Sorry, your SVG viewer does not support the printNode/serialize function.");return""}}}}}function startAnimation(b){document.getElementById(b).beginElement()};
\ No newline at end of file
diff -r 11649eacb329e0f84e64d8c07d316c26182d8be1 -r 24501c5f17a6310f39f5e3a5d55f8a00d7a67213 static/scripts/packed/tools/rgenetics/timer.js
--- /dev/null
+++ b/static/scripts/packed/tools/rgenetics/timer.js
@@ -0,0 +1,1 @@
+function Timer(){this.obj=(arguments.length)?arguments[0]:window;return this}Timer.prototype.setInterval=function(h,g){var e=Timer.getNew();var f=Timer.buildCall(this.obj,e,arguments);Timer.set[e].timer=window.setInterval(f,g);return e};Timer.prototype.setTimeout=function(d,f){var e=Timer.getNew();Timer.buildCall(this.obj,e,arguments);Timer.set[e].timer=window.setTimeout("Timer.callOnce("+e+");",f);return e};Timer.prototype.clearInterval=function(b){if(!Timer.set[b]){return}window.clearInterval(Timer.set[b].timer);Timer.set[b]=null};Timer.prototype.clearTimeout=function(b){if(!Timer.set[b]){return}window.clearTimeout(Timer.set[b].timer);Timer.set[b]=null};Timer.set=new Array();Timer.buildCall=function(h,i,f){var j="";Timer.set[i]=new Array();if(h!=window){Timer.set[i].obj=h;j="Timer.set["+i+"].obj."}j+=f[0]+"(";if(f.length>2){Timer.set[i][0]=f[2];j+="Timer.set["+i+"][0]";for(var g=1;(g+2)<f.length;g++){Timer.set[i][g]=f[g+2];j+=", Timer.set["+i+"]["+g+"]"}}j+=");";Timer.set[i].call=j;return j};Timer.callOnce=function(i){if(!Timer.set[i]){return}eval(Timer.set[i].call);Timer.set[i]=null};Timer.getNew=function(){var b=0;while(Timer.set[b]){b++}return b};
\ No newline at end of file
https://bitbucket.org/galaxy/galaxy-central/changeset/892d346ba649/
changeset: 892d346ba649
user: jgoecks
date: 2012-04-02 16:19:21
summary: Merge.
affected #: 1 file
diff -r 24501c5f17a6310f39f5e3a5d55f8a00d7a67213 -r 892d346ba6493249f503a43bface1ca8f4a4c167 tools/gatk/unified_genotyper.xml
--- a/tools/gatk/unified_genotyper.xml
+++ b/tools/gatk/unified_genotyper.xml
@@ -138,6 +138,7 @@
--excludeAnnotation "${annotation}"
#end for
#end if
+ ${analysis_param_type.multiallelic}
'
## #if str( $analysis_param_type.snpEff_rod_bind_type.snpEff_rod_bind_type_selector ) == 'set_snpEff':
## -p '--annotation "SnpEff"'
@@ -145,7 +146,6 @@
## #else:
## -p '--excludeAnnotation "SnpEff"'
## #end if
- ${analysis_param_type.multiallelic}
#end if
</command><inputs>
https://bitbucket.org/galaxy/galaxy-central/changeset/d02ee3fe8f4e/
changeset: d02ee3fe8f4e
user: jgoecks
date: 2012-04-02 16:43:14
summary: Put tool panel definition in page rather than using AJAX to retrieve.
affected #: 4 files
diff -r 892d346ba6493249f503a43bface1ca8f4a4c167 -r d02ee3fe8f4ec295913b4acb0b3c1932ddd3b22f lib/galaxy/web/controllers/tools.py
--- a/lib/galaxy/web/controllers/tools.py
+++ b/lib/galaxy/web/controllers/tools.py
@@ -18,7 +18,6 @@
in_panel = util.string_as_bool( kwds.get( 'in_panel', 'True' ) )
if in_panel:
panel_elts = []
- # Taken from tool_menu.mako:
for key, val in self.app.toolbox.tool_panel.items():
panel_elts.append( val.to_dict( trans ) )
rval = panel_elts
diff -r 892d346ba6493249f503a43bface1ca8f4a4c167 -r d02ee3fe8f4ec295913b4acb0b3c1932ddd3b22f static/scripts/backbone/tools.js
--- a/static/scripts/backbone/tools.js
+++ b/static/scripts/backbone/tools.js
@@ -341,10 +341,6 @@
*/
initialize: function(options) {
this.collection.tool_search.on("change:results", this.handle_search_results, this);
-
- var self = this;
- // Wait for collection to load before rendering.
- this.collection.bind("reset", function() { self.render(); });
},
render: function() {
diff -r 892d346ba6493249f503a43bface1ca8f4a4c167 -r d02ee3fe8f4ec295913b4acb0b3c1932ddd3b22f static/scripts/packed/backbone/tools.js
--- a/static/scripts/packed/backbone/tools.js
+++ b/static/scripts/packed/backbone/tools.js
@@ -1,1 +1,1 @@
-var BaseModel=Backbone.Model.extend({defaults:{id:null,name:null,hidden:false},show:function(){this.set("hidden",false)},hide:function(){this.set("hidden",true)},is_visible:function(){return !this.attributes.hidden}});var Tool=BaseModel.extend({defaults:{description:null,target:null,params:[]},apply_search_results:function(a){(_.indexOf(a,this.attributes.id)!==-1?this.show():this.hide());return this.is_visible()}});var ToolPanelLabel=BaseModel.extend({});var ToolPanelSection=BaseModel.extend({defaults:{elems:[],open:false},clear_search_results:function(){_.each(this.attributes.elems,function(a){a.show()});this.show();this.set("open",false)},apply_search_results:function(b){var c=true,a;_.each(this.attributes.elems,function(d){if(d instanceof ToolPanelLabel){a=d;a.hide()}else{if(d instanceof Tool){if(d.apply_search_results(b)){c=false;if(a){a.show()}}}}});if(c){this.hide()}else{this.show();this.set("open",true)}}});var ToolSearch=Backbone.Model.extend({defaults:{spinner_url:"",search_url:"",visible:true,query:"",results:null},initialize:function(){this.on("change:query",this.do_search)},do_search:function(){var c=this.attributes.query;if(c.length<3){this.set("results",null);return}var b=c+"*";if(this.timer){clearTimeout(this.timer)}$("#search-spinner").show();var a=this;this.timer=setTimeout(function(){$.get(a.attributes.search_url,{query:b},function(d){a.set("results",d);$("#search-spinner").hide()},"json")},200)}});var ToolPanel=Backbone.Collection.extend({url:"/tools",parse:function(a){var b=function(e){var d=e.type;if(d==="tool"){return new Tool(e)}else{if(d==="section"){var c=_.map(e.elems,b);e.elems=c;return new ToolPanelSection(e)}else{if(d==="label"){return new ToolPanelLabel(e)}}}};return _.map(a,b)},initialize:function(a){this.tool_search=a.tool_search;this.tool_search.on("change:results",this.apply_search_results,this)},clear_search_results:function(){this.each(function(a){a.clear_search_results()})},apply_search_results:function(){var a=this.tool_search.attributes.results;if(a===null){this.clear_search_results();return}this.each(function(b){b.apply_search_results(a)})}});var ToolLinkView=Backbone.View.extend({tagName:"div",template:Handlebars.templates.tool_link,initialize:function(){this.model.on("change:hidden",this.update_visible,this)},render:function(){this.$el.append(this.template(this.model.toJSON()));return this},update_visible:function(){(this.model.attributes.hidden?this.$el.hide():this.$el.show())}});var ToolPanelLabelView=Backbone.View.extend({tagName:"div",className:"toolPanelLabel",template:Handlebars.templates.panel_label,initialize:function(){this.model.on("change:hidden",this.update_visible,this)},render:function(){this.$el.append(this.template(this.model.toJSON()));return this},update_visible:function(){(this.model.attributes.hidden?this.$el.hide():this.$el.show())}});var ToolPanelSectionView=Backbone.View.extend({tagName:"div",className:"toolSectionWrapper",template:Handlebars.templates.panel_section,initialize:function(){this.model.on("change:hidden",this.update_visible,this);this.model.on("change:open",this.update_open,this)},render:function(){this.$el.append(this.template(this.model.toJSON()));var a=this.$el.find(".toolSectionBody");_.each(this.model.attributes.elems,function(b){if(b instanceof Tool){var c=new ToolLinkView({model:b,className:"toolTitle"});c.render();a.append(c.$el)}else{if(b instanceof ToolPanelLabel){var d=new ToolPanelLabelView({model:b});d.render();a.append(d.$el)}else{}}});return this},events:{"click .toolSectionTitle > a":"toggle"},toggle:function(){this.$el.children(".toolSectionBody").toggle("fast");this.model.set("open",!this.model.attributes.open)},update_open:function(){(this.model.attributes.open?this.$el.children(".toolSectionBody").show("fast"):this.$el.children(".toolSectionBody").hide("fast"))},update_visible:function(){(this.model.attributes.hidden?this.$el.hide():this.$el.show())}});var ToolSearchView=Backbone.View.extend({tagName:"div",id:"tool-search",className:"bar",template:Handlebars.templates.tool_search,events:{click:"focus_and_select","keyup :input":"query_changed"},render:function(){this.$el.append(this.template(this.model.toJSON()));return this},focus_and_select:function(){this.$el.find(":input").focus().select()},query_changed:function(){this.model.set("query",this.$el.find(":input").val())}});var ToolPanelView=Backbone.View.extend({tagName:"div",className:"toolMenu",initialize:function(b){this.collection.tool_search.on("change:results",this.handle_search_results,this);var a=this;this.collection.bind("reset",function(){a.render()})},render:function(){var b=this.$el;var a=new ToolSearchView({model:this.collection.tool_search});a.render();b.append(a.$el);this.collection.each(function(d){if(d instanceof ToolPanelSection){var c=new ToolPanelSectionView({model:d});c.render();b.append(c.$el)}else{if(d instanceof Tool){var e=new ToolLinkView({model:elt,className:"toolTitleNoSection"});e.render();b.append(e.$el)}}});return this},handle_search_results:function(){var a=this.collection.tool_search.attributes.results;if(a&&a.length===0){$("#search-no-results").show()}else{$("#search-no-results").hide()}}});
\ No newline at end of file
+var BaseModel=Backbone.Model.extend({defaults:{id:null,name:null,hidden:false},show:function(){this.set("hidden",false)},hide:function(){this.set("hidden",true)},is_visible:function(){return !this.attributes.hidden}});var Tool=BaseModel.extend({defaults:{description:null,target:null,params:[]},apply_search_results:function(a){(_.indexOf(a,this.attributes.id)!==-1?this.show():this.hide());return this.is_visible()}});var ToolPanelLabel=BaseModel.extend({});var ToolPanelSection=BaseModel.extend({defaults:{elems:[],open:false},clear_search_results:function(){_.each(this.attributes.elems,function(a){a.show()});this.show();this.set("open",false)},apply_search_results:function(b){var c=true,a;_.each(this.attributes.elems,function(d){if(d instanceof ToolPanelLabel){a=d;a.hide()}else{if(d instanceof Tool){if(d.apply_search_results(b)){c=false;if(a){a.show()}}}}});if(c){this.hide()}else{this.show();this.set("open",true)}}});var ToolSearch=Backbone.Model.extend({defaults:{spinner_url:"",search_url:"",visible:true,query:"",results:null},initialize:function(){this.on("change:query",this.do_search)},do_search:function(){var c=this.attributes.query;if(c.length<3){this.set("results",null);return}var b=c+"*";if(this.timer){clearTimeout(this.timer)}$("#search-spinner").show();var a=this;this.timer=setTimeout(function(){$.get(a.attributes.search_url,{query:b},function(d){a.set("results",d);$("#search-spinner").hide()},"json")},200)}});var ToolPanel=Backbone.Collection.extend({url:"/tools",parse:function(a){var b=function(e){var d=e.type;if(d==="tool"){return new Tool(e)}else{if(d==="section"){var c=_.map(e.elems,b);e.elems=c;return new ToolPanelSection(e)}else{if(d==="label"){return new ToolPanelLabel(e)}}}};return _.map(a,b)},initialize:function(a){this.tool_search=a.tool_search;this.tool_search.on("change:results",this.apply_search_results,this)},clear_search_results:function(){this.each(function(a){a.clear_search_results()})},apply_search_results:function(){var a=this.tool_search.attributes.results;if(a===null){this.clear_search_results();return}this.each(function(b){b.apply_search_results(a)})}});var ToolLinkView=Backbone.View.extend({tagName:"div",template:Handlebars.templates.tool_link,initialize:function(){this.model.on("change:hidden",this.update_visible,this)},render:function(){this.$el.append(this.template(this.model.toJSON()));return this},update_visible:function(){(this.model.attributes.hidden?this.$el.hide():this.$el.show())}});var ToolPanelLabelView=Backbone.View.extend({tagName:"div",className:"toolPanelLabel",template:Handlebars.templates.panel_label,initialize:function(){this.model.on("change:hidden",this.update_visible,this)},render:function(){this.$el.append(this.template(this.model.toJSON()));return this},update_visible:function(){(this.model.attributes.hidden?this.$el.hide():this.$el.show())}});var ToolPanelSectionView=Backbone.View.extend({tagName:"div",className:"toolSectionWrapper",template:Handlebars.templates.panel_section,initialize:function(){this.model.on("change:hidden",this.update_visible,this);this.model.on("change:open",this.update_open,this)},render:function(){this.$el.append(this.template(this.model.toJSON()));var a=this.$el.find(".toolSectionBody");_.each(this.model.attributes.elems,function(b){if(b instanceof Tool){var c=new ToolLinkView({model:b,className:"toolTitle"});c.render();a.append(c.$el)}else{if(b instanceof ToolPanelLabel){var d=new ToolPanelLabelView({model:b});d.render();a.append(d.$el)}else{}}});return this},events:{"click .toolSectionTitle > a":"toggle"},toggle:function(){this.$el.children(".toolSectionBody").toggle("fast");this.model.set("open",!this.model.attributes.open)},update_open:function(){(this.model.attributes.open?this.$el.children(".toolSectionBody").show("fast"):this.$el.children(".toolSectionBody").hide("fast"))},update_visible:function(){(this.model.attributes.hidden?this.$el.hide():this.$el.show())}});var ToolSearchView=Backbone.View.extend({tagName:"div",id:"tool-search",className:"bar",template:Handlebars.templates.tool_search,events:{click:"focus_and_select","keyup :input":"query_changed"},render:function(){this.$el.append(this.template(this.model.toJSON()));return this},focus_and_select:function(){this.$el.find(":input").focus().select()},query_changed:function(){this.model.set("query",this.$el.find(":input").val())}});var ToolPanelView=Backbone.View.extend({tagName:"div",className:"toolMenu",initialize:function(a){this.collection.tool_search.on("change:results",this.handle_search_results,this)},render:function(){var b=this.$el;var a=new ToolSearchView({model:this.collection.tool_search});a.render();b.append(a.$el);this.collection.each(function(d){if(d instanceof ToolPanelSection){var c=new ToolPanelSectionView({model:d});c.render();b.append(c.$el)}else{if(d instanceof Tool){var e=new ToolLinkView({model:elt,className:"toolTitleNoSection"});e.render();b.append(e.$el)}}});return this},handle_search_results:function(){var a=this.collection.tool_search.attributes.results;if(a&&a.length===0){$("#search-no-results").show()}else{$("#search-no-results").hide()}}});
\ No newline at end of file
diff -r 892d346ba6493249f503a43bface1ca8f4a4c167 -r d02ee3fe8f4ec295913b4acb0b3c1932ddd3b22f templates/root/tool_menu.mako
--- a/templates/root/tool_menu.mako
+++ b/templates/root/tool_menu.mako
@@ -20,9 +20,14 @@
${h.js( "galaxy.base", "json2", "autocomplete_tagging", "backbone/tools" )}
<%
+ # Set up for creating tool panel.
show_tool_search = "true"
if trans.user and trans.user.preferences.get( "show_tool_search", "False" ) == "False":
show_tool_search = "false"
+
+ dictified_panel = []
+ for key, val in trans.app.toolbox.tool_panel.items():
+ dictified_panel.append( val.to_dict( trans ) )
%><script type="text/javascript">
@@ -36,11 +41,11 @@
// Set up tool panel.
tool_panel = new ToolPanel( { tool_search: tool_search } );
- tool_panel.fetch();
-
+ tool_panel.reset( tool_panel.parse( ${h.to_json_string( dictified_panel )} ) );
// Set up tool panel view and initialize.
tool_panel_view = new ToolPanelView( {collection: tool_panel} );
+ tool_panel_view.render();
$('body').prepend(tool_panel_view.$el);
// Minsize init hint.
https://bitbucket.org/galaxy/galaxy-central/changeset/de70d3eb7565/
changeset: de70d3eb7565
user: jgoecks
date: 2012-04-02 17:36:35
summary: Show/hide tool search based on user preferences.
affected #: 2 files
diff -r d02ee3fe8f4ec295913b4acb0b3c1932ddd3b22f -r de70d3eb756529bc37f00495a5f2f2357915eb1c static/scripts/backbone/tools.js
--- a/static/scripts/backbone/tools.js
+++ b/static/scripts/backbone/tools.js
@@ -101,7 +101,7 @@
* indicates that query was not run; if not null, results are from search using
* query.
*/
-var ToolSearch = Backbone.Model.extend({
+var ToolSearch = BaseModel.extend({
defaults: {
spinner_url: "",
search_url: "",
@@ -320,6 +320,9 @@
render: function() {
this.$el.append( this.template(this.model.toJSON()) );
+ if (!this.model.is_visible()) {
+ this.$el.hide();
+ }
return this;
},
diff -r d02ee3fe8f4ec295913b4acb0b3c1932ddd3b22f -r de70d3eb756529bc37f00495a5f2f2357915eb1c templates/root/tool_menu.mako
--- a/templates/root/tool_menu.mako
+++ b/templates/root/tool_menu.mako
@@ -21,9 +21,9 @@
<%
# Set up for creating tool panel.
- show_tool_search = "true"
+ tool_search_hidden = "false"
if trans.user and trans.user.preferences.get( "show_tool_search", "False" ) == "False":
- show_tool_search = "false"
+ tool_search_hidden = "true"
dictified_panel = []
for key, val in trans.app.toolbox.tool_panel.items():
@@ -37,7 +37,7 @@
// Set up search.
tool_search = new ToolSearch( {spinner_url: "${h.url_for('/static/images/loading_small_white_bg.gif')}",
search_url: "${h.url_for( controller='root', action='tool_search' )}",
- visible: ${show_tool_search} } );
+ hidden: ${tool_search_hidden} } );
// Set up tool panel.
tool_panel = new ToolPanel( { tool_search: tool_search } );
https://bitbucket.org/galaxy/galaxy-central/changeset/b8559675ee18/
changeset: b8559675ee18
user: jgoecks
date: 2012-04-02 17:38:44
summary: Disable recently used tools menu for now.
affected #: 1 file
diff -r de70d3eb756529bc37f00495a5f2f2357915eb1c -r b8559675ee18d77397bd572b2d22525137677afb templates/root/index.mako
--- a/templates/root/index.mako
+++ b/templates/root/index.mako
@@ -173,7 +173,8 @@
else:
action = "SHOW_RECENT"
%>
- menu_options[ ${action} ] = toggle_recently_used_fn;
+ // TODO: make compatible with new tool menu.
+ //menu_options[ ${action} ] = toggle_recently_used_fn;
%endif
https://bitbucket.org/galaxy/galaxy-central/changeset/9b403ed9ec16/
changeset: 9b403ed9ec16
user: jgoecks
date: 2012-04-02 17:48:40
summary: Add a base view that supports hiding and showing objects.
affected #: 2 files
diff -r b8559675ee18d77397bd572b2d22525137677afb -r 9b403ed9ec16789e2100e9921bed956789b47613 static/scripts/backbone/tools.js
--- a/static/scripts/backbone/tools.js
+++ b/static/scripts/backbone/tools.js
@@ -203,54 +203,54 @@
* they are controllers are well and the HTML is the real view in the MVC architecture.
*/
-// TODO: implement a BaseModelView for handling model visibility.
+/**
+ * Base view that handles visibility based on model's hidden attribute.
+ */
+var BaseView = Backbone.View.extend({
+ initialize: function() {
+ this.model.on("change:hidden", this.update_visible, this);
+ },
+ update_visible: function() {
+ ( this.model.attributes.hidden ? this.$el.hide() : this.$el.show() );
+ }
+});
/**
* Link to a tool.
*/
-var ToolLinkView = Backbone.View.extend({
+var ToolLinkView = BaseView.extend({
tagName: 'div',
template: Handlebars.templates.tool_link,
- initialize: function() {
- this.model.on("change:hidden", this.update_visible, this);
- },
+
render: function() {
this.$el.append( this.template(this.model.toJSON()) );
return this;
- },
- update_visible: function() {
- ( this.model.attributes.hidden ? this.$el.hide() : this.$el.show() );
}
});
/**
* Panel label/section header.
*/
-var ToolPanelLabelView = Backbone.View.extend({
+var ToolPanelLabelView = BaseView.extend({
tagName: 'div',
className: 'toolPanelLabel',
template: Handlebars.templates.panel_label,
- initialize: function() {
- this.model.on("change:hidden", this.update_visible, this);
- },
+
render: function() {
this.$el.append( this.template(this.model.toJSON()) );
return this;
},
- update_visible: function() {
- ( this.model.attributes.hidden ? this.$el.hide() : this.$el.show() );
- }
});
/**
* Panel section.
*/
-var ToolPanelSectionView = Backbone.View.extend({
+var ToolPanelSectionView = BaseView.extend({
tagName: 'div',
className: 'toolSectionWrapper',
template: Handlebars.templates.panel_section,
initialize: function() {
- this.model.on("change:hidden", this.update_visible, this);
+ BaseView.prototype.initialize.call(this);
this.model.on("change:open", this.update_open, this);
},
render: function() {
@@ -297,13 +297,6 @@
this.$el.children(".toolSectionBody").show("fast") :
this.$el.children(".toolSectionBody").hide("fast")
);
- },
-
- /**
- * Update section and section elements visibility after search.
- */
- update_visible: function() {
- ( this.model.attributes.hidden ? this.$el.hide() : this.$el.show() );
}
});
diff -r b8559675ee18d77397bd572b2d22525137677afb -r 9b403ed9ec16789e2100e9921bed956789b47613 static/scripts/packed/backbone/tools.js
--- a/static/scripts/packed/backbone/tools.js
+++ b/static/scripts/packed/backbone/tools.js
@@ -1,1 +1,1 @@
-var BaseModel=Backbone.Model.extend({defaults:{id:null,name:null,hidden:false},show:function(){this.set("hidden",false)},hide:function(){this.set("hidden",true)},is_visible:function(){return !this.attributes.hidden}});var Tool=BaseModel.extend({defaults:{description:null,target:null,params:[]},apply_search_results:function(a){(_.indexOf(a,this.attributes.id)!==-1?this.show():this.hide());return this.is_visible()}});var ToolPanelLabel=BaseModel.extend({});var ToolPanelSection=BaseModel.extend({defaults:{elems:[],open:false},clear_search_results:function(){_.each(this.attributes.elems,function(a){a.show()});this.show();this.set("open",false)},apply_search_results:function(b){var c=true,a;_.each(this.attributes.elems,function(d){if(d instanceof ToolPanelLabel){a=d;a.hide()}else{if(d instanceof Tool){if(d.apply_search_results(b)){c=false;if(a){a.show()}}}}});if(c){this.hide()}else{this.show();this.set("open",true)}}});var ToolSearch=Backbone.Model.extend({defaults:{spinner_url:"",search_url:"",visible:true,query:"",results:null},initialize:function(){this.on("change:query",this.do_search)},do_search:function(){var c=this.attributes.query;if(c.length<3){this.set("results",null);return}var b=c+"*";if(this.timer){clearTimeout(this.timer)}$("#search-spinner").show();var a=this;this.timer=setTimeout(function(){$.get(a.attributes.search_url,{query:b},function(d){a.set("results",d);$("#search-spinner").hide()},"json")},200)}});var ToolPanel=Backbone.Collection.extend({url:"/tools",parse:function(a){var b=function(e){var d=e.type;if(d==="tool"){return new Tool(e)}else{if(d==="section"){var c=_.map(e.elems,b);e.elems=c;return new ToolPanelSection(e)}else{if(d==="label"){return new ToolPanelLabel(e)}}}};return _.map(a,b)},initialize:function(a){this.tool_search=a.tool_search;this.tool_search.on("change:results",this.apply_search_results,this)},clear_search_results:function(){this.each(function(a){a.clear_search_results()})},apply_search_results:function(){var a=this.tool_search.attributes.results;if(a===null){this.clear_search_results();return}this.each(function(b){b.apply_search_results(a)})}});var ToolLinkView=Backbone.View.extend({tagName:"div",template:Handlebars.templates.tool_link,initialize:function(){this.model.on("change:hidden",this.update_visible,this)},render:function(){this.$el.append(this.template(this.model.toJSON()));return this},update_visible:function(){(this.model.attributes.hidden?this.$el.hide():this.$el.show())}});var ToolPanelLabelView=Backbone.View.extend({tagName:"div",className:"toolPanelLabel",template:Handlebars.templates.panel_label,initialize:function(){this.model.on("change:hidden",this.update_visible,this)},render:function(){this.$el.append(this.template(this.model.toJSON()));return this},update_visible:function(){(this.model.attributes.hidden?this.$el.hide():this.$el.show())}});var ToolPanelSectionView=Backbone.View.extend({tagName:"div",className:"toolSectionWrapper",template:Handlebars.templates.panel_section,initialize:function(){this.model.on("change:hidden",this.update_visible,this);this.model.on("change:open",this.update_open,this)},render:function(){this.$el.append(this.template(this.model.toJSON()));var a=this.$el.find(".toolSectionBody");_.each(this.model.attributes.elems,function(b){if(b instanceof Tool){var c=new ToolLinkView({model:b,className:"toolTitle"});c.render();a.append(c.$el)}else{if(b instanceof ToolPanelLabel){var d=new ToolPanelLabelView({model:b});d.render();a.append(d.$el)}else{}}});return this},events:{"click .toolSectionTitle > a":"toggle"},toggle:function(){this.$el.children(".toolSectionBody").toggle("fast");this.model.set("open",!this.model.attributes.open)},update_open:function(){(this.model.attributes.open?this.$el.children(".toolSectionBody").show("fast"):this.$el.children(".toolSectionBody").hide("fast"))},update_visible:function(){(this.model.attributes.hidden?this.$el.hide():this.$el.show())}});var ToolSearchView=Backbone.View.extend({tagName:"div",id:"tool-search",className:"bar",template:Handlebars.templates.tool_search,events:{click:"focus_and_select","keyup :input":"query_changed"},render:function(){this.$el.append(this.template(this.model.toJSON()));return this},focus_and_select:function(){this.$el.find(":input").focus().select()},query_changed:function(){this.model.set("query",this.$el.find(":input").val())}});var ToolPanelView=Backbone.View.extend({tagName:"div",className:"toolMenu",initialize:function(a){this.collection.tool_search.on("change:results",this.handle_search_results,this)},render:function(){var b=this.$el;var a=new ToolSearchView({model:this.collection.tool_search});a.render();b.append(a.$el);this.collection.each(function(d){if(d instanceof ToolPanelSection){var c=new ToolPanelSectionView({model:d});c.render();b.append(c.$el)}else{if(d instanceof Tool){var e=new ToolLinkView({model:elt,className:"toolTitleNoSection"});e.render();b.append(e.$el)}}});return this},handle_search_results:function(){var a=this.collection.tool_search.attributes.results;if(a&&a.length===0){$("#search-no-results").show()}else{$("#search-no-results").hide()}}});
\ No newline at end of file
+var BaseModel=Backbone.Model.extend({defaults:{id:null,name:null,hidden:false},show:function(){this.set("hidden",false)},hide:function(){this.set("hidden",true)},is_visible:function(){return !this.attributes.hidden}});var Tool=BaseModel.extend({defaults:{description:null,target:null,params:[]},apply_search_results:function(a){(_.indexOf(a,this.attributes.id)!==-1?this.show():this.hide());return this.is_visible()}});var ToolPanelLabel=BaseModel.extend({});var ToolPanelSection=BaseModel.extend({defaults:{elems:[],open:false},clear_search_results:function(){_.each(this.attributes.elems,function(a){a.show()});this.show();this.set("open",false)},apply_search_results:function(b){var c=true,a;_.each(this.attributes.elems,function(d){if(d instanceof ToolPanelLabel){a=d;a.hide()}else{if(d instanceof Tool){if(d.apply_search_results(b)){c=false;if(a){a.show()}}}}});if(c){this.hide()}else{this.show();this.set("open",true)}}});var ToolSearch=BaseModel.extend({defaults:{spinner_url:"",search_url:"",visible:true,query:"",results:null},initialize:function(){this.on("change:query",this.do_search)},do_search:function(){var c=this.attributes.query;if(c.length<3){this.set("results",null);return}var b=c+"*";if(this.timer){clearTimeout(this.timer)}$("#search-spinner").show();var a=this;this.timer=setTimeout(function(){$.get(a.attributes.search_url,{query:b},function(d){a.set("results",d);$("#search-spinner").hide()},"json")},200)}});var ToolPanel=Backbone.Collection.extend({url:"/tools",parse:function(a){var b=function(e){var d=e.type;if(d==="tool"){return new Tool(e)}else{if(d==="section"){var c=_.map(e.elems,b);e.elems=c;return new ToolPanelSection(e)}else{if(d==="label"){return new ToolPanelLabel(e)}}}};return _.map(a,b)},initialize:function(a){this.tool_search=a.tool_search;this.tool_search.on("change:results",this.apply_search_results,this)},clear_search_results:function(){this.each(function(a){a.clear_search_results()})},apply_search_results:function(){var a=this.tool_search.attributes.results;if(a===null){this.clear_search_results();return}this.each(function(b){b.apply_search_results(a)})}});var BaseView=Backbone.View.extend({initialize:function(){this.model.on("change:hidden",this.update_visible,this)},update_visible:function(){(this.model.attributes.hidden?this.$el.hide():this.$el.show())}});var ToolLinkView=BaseView.extend({tagName:"div",template:Handlebars.templates.tool_link,render:function(){this.$el.append(this.template(this.model.toJSON()));return this}});var ToolPanelLabelView=BaseView.extend({tagName:"div",className:"toolPanelLabel",template:Handlebars.templates.panel_label,render:function(){this.$el.append(this.template(this.model.toJSON()));return this},});var ToolPanelSectionView=BaseView.extend({tagName:"div",className:"toolSectionWrapper",template:Handlebars.templates.panel_section,initialize:function(){BaseView.prototype.initialize.call(this);this.model.on("change:open",this.update_open,this)},render:function(){this.$el.append(this.template(this.model.toJSON()));var a=this.$el.find(".toolSectionBody");_.each(this.model.attributes.elems,function(b){if(b instanceof Tool){var c=new ToolLinkView({model:b,className:"toolTitle"});c.render();a.append(c.$el)}else{if(b instanceof ToolPanelLabel){var d=new ToolPanelLabelView({model:b});d.render();a.append(d.$el)}else{}}});return this},events:{"click .toolSectionTitle > a":"toggle"},toggle:function(){this.$el.children(".toolSectionBody").toggle("fast");this.model.set("open",!this.model.attributes.open)},update_open:function(){(this.model.attributes.open?this.$el.children(".toolSectionBody").show("fast"):this.$el.children(".toolSectionBody").hide("fast"))}});var ToolSearchView=Backbone.View.extend({tagName:"div",id:"tool-search",className:"bar",template:Handlebars.templates.tool_search,events:{click:"focus_and_select","keyup :input":"query_changed"},render:function(){this.$el.append(this.template(this.model.toJSON()));if(!this.model.is_visible()){this.$el.hide()}return this},focus_and_select:function(){this.$el.find(":input").focus().select()},query_changed:function(){this.model.set("query",this.$el.find(":input").val())}});var ToolPanelView=Backbone.View.extend({tagName:"div",className:"toolMenu",initialize:function(a){this.collection.tool_search.on("change:results",this.handle_search_results,this)},render:function(){var b=this.$el;var a=new ToolSearchView({model:this.collection.tool_search});a.render();b.append(a.$el);this.collection.each(function(d){if(d instanceof ToolPanelSection){var c=new ToolPanelSectionView({model:d});c.render();b.append(c.$el)}else{if(d instanceof Tool){var e=new ToolLinkView({model:elt,className:"toolTitleNoSection"});e.render();b.append(e.$el)}}});return this},handle_search_results:function(){var a=this.collection.tool_search.attributes.results;if(a&&a.length===0){$("#search-no-results").show()}else{$("#search-no-results").hide()}}});
\ No newline at end of file
https://bitbucket.org/galaxy/galaxy-central/changeset/319c7ee6873c/
changeset: 319c7ee6873c
user: jgoecks
date: 2012-04-02 17:52:39
summary: Merge.
affected #: 6 files
diff -r 9b403ed9ec16789e2100e9921bed956789b47613 -r 319c7ee6873c53e57aa6b37271cb7c6afb067612 lib/galaxy/objectstore/__init__.py
--- a/lib/galaxy/objectstore/__init__.py
+++ b/lib/galaxy/objectstore/__init__.py
@@ -223,7 +223,7 @@
if not os.path.exists(path):
return self._construct_path(obj, base_dir=base_dir, dir_only=dir_only, extra_dir=extra_dir, extra_dir_at_root=extra_dir_at_root, alt_name=alt_name)
- def _construct_path(self, obj, old_style=False, base_dir=None, dir_only=False, extra_dir=None, extra_dir_at_root=False, alt_name=None):
+ def _construct_path(self, obj, old_style=False, base_dir=None, dir_only=False, extra_dir=None, extra_dir_at_root=False, alt_name=None, **kwargs):
""" Construct the expected absolute path for accessing the object
identified by `obj`.id.
diff -r 9b403ed9ec16789e2100e9921bed956789b47613 -r 319c7ee6873c53e57aa6b37271cb7c6afb067612 lib/galaxy/tools/__init__.py
--- a/lib/galaxy/tools/__init__.py
+++ b/lib/galaxy/tools/__init__.py
@@ -2181,7 +2181,7 @@
sa_session=self.sa_session )
self.app.security_agent.copy_dataset_permissions( outdata.dataset, child_dataset.dataset )
# Move data from temp location to dataset location
- self.app.object_store.update_from_file(child_dataset.dataset, filename, create=True)
+ self.app.object_store.update_from_file(child_dataset.dataset, file_name=filename, create=True)
self.sa_session.add( child_dataset )
self.sa_session.flush()
child_dataset.set_size()
@@ -2248,7 +2248,7 @@
self.sa_session.add( primary_data )
self.sa_session.flush()
# Move data from temp location to dataset location
- self.app.object_store.update_from_file(primary_data.dataset, filename, create=True)
+ self.app.object_store.update_from_file(primary_data.dataset, file_name=filename, create=True)
primary_data.set_size()
primary_data.name = "%s (%s)" % ( outdata.name, designation )
primary_data.info = outdata.info
diff -r 9b403ed9ec16789e2100e9921bed956789b47613 -r 319c7ee6873c53e57aa6b37271cb7c6afb067612 lib/galaxy/web/controllers/admin_toolshed.py
--- a/lib/galaxy/web/controllers/admin_toolshed.py
+++ b/lib/galaxy/web/controllers/admin_toolshed.py
@@ -367,6 +367,22 @@
shed_tool_conf = shed_tool_conf.replace( './', '', 1 )
shed_tool_conf_select_field = None
tool_panel_section_select_field = build_tool_panel_section_select_field( trans )
+ if includes_tools:
+ # If we're installing a single repository that contains a readme file, get it's contents to display.
+ decoded_repo_info_dict = tool_shed_decode( repo_info_dict )
+ if len( decoded_repo_info_dict ) == 1:
+ name = decoded_repo_info_dict.keys()[ 0 ]
+ repo_info_tuple = decoded_repo_info_dict[ name ]
+ description, repository_clone_url, changeset_revision = repo_info_tuple
+ owner = get_repository_owner( clean_repository_clone_url( repository_clone_url ) )
+ url = '%s/repository/get_readme?name=%s&owner=%s&changeset_revision=%s&webapp=galaxy' % ( tool_shed_url, name, owner, changeset_revision )
+ response = urllib2.urlopen( url )
+ readme_text = response.read()
+ response.close()
+ else:
+ readme_text = ''
+ else:
+ readme_text = ''
return trans.fill_template( '/admin/tool_shed_repository/select_tool_panel_section.mako',
tool_shed_url=tool_shed_url,
repo_info_dict=repo_info_dict,
@@ -375,6 +391,7 @@
shed_tool_conf_select_field=shed_tool_conf_select_field,
tool_panel_section_select_field=tool_panel_section_select_field,
new_tool_panel_section=new_tool_panel_section,
+ readme_text=readme_text,
message=message,
status=status )
@web.expose
diff -r 9b403ed9ec16789e2100e9921bed956789b47613 -r 319c7ee6873c53e57aa6b37271cb7c6afb067612 lib/galaxy/webapps/community/config.py
--- a/lib/galaxy/webapps/community/config.py
+++ b/lib/galaxy/webapps/community/config.py
@@ -86,6 +86,7 @@
self.cloud_controller_instance = False
self.server_name = ''
self.job_manager = ''
+ self.default_job_handlers = []
self.job_handlers = []
self.tool_handlers = []
self.tool_runners = []
diff -r 9b403ed9ec16789e2100e9921bed956789b47613 -r 319c7ee6873c53e57aa6b37271cb7c6afb067612 lib/galaxy/webapps/community/controllers/repository.py
--- a/lib/galaxy/webapps/community/controllers/repository.py
+++ b/lib/galaxy/webapps/community/controllers/repository.py
@@ -773,6 +773,25 @@
( galaxy_url, url_for( '/', qualified=True ), encoded_repo_info_dict, str( includes_tools ) )
return trans.response.send_redirect( url )
@web.expose
+ def get_readme( self, trans, **kwd ):
+ """
+ If the received changeset_revision includes a file named readme (case ignored), return it's contents.
+ """
+ name = kwd[ 'name' ]
+ owner = kwd[ 'owner' ]
+ changeset_revision = kwd[ 'changeset_revision' ]
+ repository = get_repository_by_name_and_owner( trans, name, owner )
+ repo_dir = repository.repo_path
+ repo = hg.repository( get_configured_ui(), repo_dir )
+ for root, dirs, files in os.walk( repo_dir ):
+ for name in files:
+ if name.lower() in [ 'readme', 'read_me' ]:
+ f = open( os.path.join( root, name ), 'r' )
+ text = f.read()
+ f.close()
+ return str( text )
+ return ''
+ @web.expose
def get_tool_versions( self, trans, **kwd ):
"""
For each valid /downloadable change set (up to the received changeset_revision) in the
diff -r 9b403ed9ec16789e2100e9921bed956789b47613 -r 319c7ee6873c53e57aa6b37271cb7c6afb067612 templates/admin/tool_shed_repository/select_tool_panel_section.mako
--- a/templates/admin/tool_shed_repository/select_tool_panel_section.mako
+++ b/templates/admin/tool_shed_repository/select_tool_panel_section.mako
@@ -62,3 +62,13 @@
</form></div></div>
+%if readme_text:
+ <div class="toolForm">
+ <div class="toolFormTitle">Repository README file (may contain important installation or license information)</div>
+ <div class="toolFormBody">
+ <div class="form-row">
+ <pre>${readme_text}</pre>
+ </div>
+ </div>
+ </div>
+%endif
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: greg: If installing a single tool shed repository that contains a readme file, display its contents on the tool panel section selection page.
by Bitbucket 02 Apr '12
by Bitbucket 02 Apr '12
02 Apr '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/41aa295fa871/
changeset: 41aa295fa871
user: greg
date: 2012-04-02 17:25:46
summary: If installing a single tool shed repository that contains a readme file, display its contents on the tool panel section selection page.
affected #: 4 files
diff -r e8e6bb539b85533d8aff54adbacfcfd548ba8a8e -r 41aa295fa871ef7f81f12a701ec59acc16c3ae9c lib/galaxy/web/controllers/admin_toolshed.py
--- a/lib/galaxy/web/controllers/admin_toolshed.py
+++ b/lib/galaxy/web/controllers/admin_toolshed.py
@@ -367,6 +367,22 @@
shed_tool_conf = shed_tool_conf.replace( './', '', 1 )
shed_tool_conf_select_field = None
tool_panel_section_select_field = build_tool_panel_section_select_field( trans )
+ if includes_tools:
+ # If we're installing a single repository that contains a readme file, get it's contents to display.
+ decoded_repo_info_dict = tool_shed_decode( repo_info_dict )
+ if len( decoded_repo_info_dict ) == 1:
+ name = decoded_repo_info_dict.keys()[ 0 ]
+ repo_info_tuple = decoded_repo_info_dict[ name ]
+ description, repository_clone_url, changeset_revision = repo_info_tuple
+ owner = get_repository_owner( clean_repository_clone_url( repository_clone_url ) )
+ url = '%s/repository/get_readme?name=%s&owner=%s&changeset_revision=%s&webapp=galaxy' % ( tool_shed_url, name, owner, changeset_revision )
+ response = urllib2.urlopen( url )
+ readme_text = response.read()
+ response.close()
+ else:
+ readme_text = ''
+ else:
+ readme_text = ''
return trans.fill_template( '/admin/tool_shed_repository/select_tool_panel_section.mako',
tool_shed_url=tool_shed_url,
repo_info_dict=repo_info_dict,
@@ -375,6 +391,7 @@
shed_tool_conf_select_field=shed_tool_conf_select_field,
tool_panel_section_select_field=tool_panel_section_select_field,
new_tool_panel_section=new_tool_panel_section,
+ readme_text=readme_text,
message=message,
status=status )
@web.expose
diff -r e8e6bb539b85533d8aff54adbacfcfd548ba8a8e -r 41aa295fa871ef7f81f12a701ec59acc16c3ae9c lib/galaxy/webapps/community/config.py
--- a/lib/galaxy/webapps/community/config.py
+++ b/lib/galaxy/webapps/community/config.py
@@ -86,6 +86,7 @@
self.cloud_controller_instance = False
self.server_name = ''
self.job_manager = ''
+ self.default_job_handlers = []
self.job_handlers = []
self.tool_handlers = []
self.tool_runners = []
diff -r e8e6bb539b85533d8aff54adbacfcfd548ba8a8e -r 41aa295fa871ef7f81f12a701ec59acc16c3ae9c lib/galaxy/webapps/community/controllers/repository.py
--- a/lib/galaxy/webapps/community/controllers/repository.py
+++ b/lib/galaxy/webapps/community/controllers/repository.py
@@ -773,6 +773,25 @@
( galaxy_url, url_for( '/', qualified=True ), encoded_repo_info_dict, str( includes_tools ) )
return trans.response.send_redirect( url )
@web.expose
+ def get_readme( self, trans, **kwd ):
+ """
+ If the received changeset_revision includes a file named readme (case ignored), return it's contents.
+ """
+ name = kwd[ 'name' ]
+ owner = kwd[ 'owner' ]
+ changeset_revision = kwd[ 'changeset_revision' ]
+ repository = get_repository_by_name_and_owner( trans, name, owner )
+ repo_dir = repository.repo_path
+ repo = hg.repository( get_configured_ui(), repo_dir )
+ for root, dirs, files in os.walk( repo_dir ):
+ for name in files:
+ if name.lower() in [ 'readme', 'read_me' ]:
+ f = open( os.path.join( root, name ), 'r' )
+ text = f.read()
+ f.close()
+ return str( text )
+ return ''
+ @web.expose
def get_tool_versions( self, trans, **kwd ):
"""
For each valid /downloadable change set (up to the received changeset_revision) in the
diff -r e8e6bb539b85533d8aff54adbacfcfd548ba8a8e -r 41aa295fa871ef7f81f12a701ec59acc16c3ae9c templates/admin/tool_shed_repository/select_tool_panel_section.mako
--- a/templates/admin/tool_shed_repository/select_tool_panel_section.mako
+++ b/templates/admin/tool_shed_repository/select_tool_panel_section.mako
@@ -62,3 +62,13 @@
</form></div></div>
+%if readme_text:
+ <div class="toolForm">
+ <div class="toolFormTitle">Repository README file (may contain important installation or license information)</div>
+ <div class="toolFormBody">
+ <div class="form-row">
+ <pre>${readme_text}</pre>
+ </div>
+ </div>
+ </div>
+%endif
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: dan: Fix for DiskObjectStore._construct_path recieving extra keywords.
by Bitbucket 02 Apr '12
by Bitbucket 02 Apr '12
02 Apr '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/e8e6bb539b85/
changeset: e8e6bb539b85
user: dan
date: 2012-04-02 17:00:56
summary: Fix for DiskObjectStore._construct_path recieving extra keywords.
affected #: 1 file
diff -r 6ceefdcedee595904231c4bc2ee78417bfec637a -r e8e6bb539b85533d8aff54adbacfcfd548ba8a8e lib/galaxy/objectstore/__init__.py
--- a/lib/galaxy/objectstore/__init__.py
+++ b/lib/galaxy/objectstore/__init__.py
@@ -223,7 +223,7 @@
if not os.path.exists(path):
return self._construct_path(obj, base_dir=base_dir, dir_only=dir_only, extra_dir=extra_dir, extra_dir_at_root=extra_dir_at_root, alt_name=alt_name)
- def _construct_path(self, obj, old_style=False, base_dir=None, dir_only=False, extra_dir=None, extra_dir_at_root=False, alt_name=None):
+ def _construct_path(self, obj, old_style=False, base_dir=None, dir_only=False, extra_dir=None, extra_dir_at_root=False, alt_name=None, **kwargs):
""" Construct the expected absolute path for accessing the object
identified by `obj`.id.
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: dan: Fix for call to object_store.update_from_file for collect primary data in tools/__init__.py.
by Bitbucket 02 Apr '12
by Bitbucket 02 Apr '12
02 Apr '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/6ceefdcedee5/
changeset: 6ceefdcedee5
user: dan
date: 2012-04-02 16:48:27
summary: Fix for call to object_store.update_from_file for collect primary data in tools/__init__.py.
affected #: 1 file
diff -r 24b474fd29bcfddabd996e985c72872e21c8c0f8 -r 6ceefdcedee595904231c4bc2ee78417bfec637a lib/galaxy/tools/__init__.py
--- a/lib/galaxy/tools/__init__.py
+++ b/lib/galaxy/tools/__init__.py
@@ -2180,7 +2180,7 @@
sa_session=self.sa_session )
self.app.security_agent.copy_dataset_permissions( outdata.dataset, child_dataset.dataset )
# Move data from temp location to dataset location
- self.app.object_store.update_from_file(child_dataset.dataset, filename, create=True)
+ self.app.object_store.update_from_file(child_dataset.dataset, file_name=filename, create=True)
self.sa_session.add( child_dataset )
self.sa_session.flush()
child_dataset.set_size()
@@ -2247,7 +2247,7 @@
self.sa_session.add( primary_data )
self.sa_session.flush()
# Move data from temp location to dataset location
- self.app.object_store.update_from_file(primary_data.dataset, filename, create=True)
+ self.app.object_store.update_from_file(primary_data.dataset, file_name=filename, create=True)
primary_data.set_size()
primary_data.name = "%s (%s)" % ( outdata.name, designation )
primary_data.info = outdata.info
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

02 Apr '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/24b474fd29bc/
changeset: 24b474fd29bc
user: dan
date: 2012-04-02 16:09:00
summary: Fix for --multiallelic in Unified Genotyper.
affected #: 1 file
diff -r 4dd7564268fcd4f5bd3275fa3b5da012b7adefb5 -r 24b474fd29bcfddabd996e985c72872e21c8c0f8 tools/gatk/unified_genotyper.xml
--- a/tools/gatk/unified_genotyper.xml
+++ b/tools/gatk/unified_genotyper.xml
@@ -138,6 +138,7 @@
--excludeAnnotation "${annotation}"
#end for
#end if
+ ${analysis_param_type.multiallelic}
'
## #if str( $analysis_param_type.snpEff_rod_bind_type.snpEff_rod_bind_type_selector ) == 'set_snpEff':
## -p '--annotation "SnpEff"'
@@ -145,7 +146,6 @@
## #else:
## -p '--excludeAnnotation "SnpEff"'
## #end if
- ${analysis_param_type.multiallelic}
#end if
</command><inputs>
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: natefoo: Allow job handlers to be exempted from the default random choice pool.
by Bitbucket 31 Mar '12
by Bitbucket 31 Mar '12
31 Mar '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/4dd7564268fc/
changeset: 4dd7564268fc
user: natefoo
date: 2012-03-31 16:06:43
summary: Allow job handlers to be exempted from the default random choice pool.
affected #: 3 files
diff -r 40abca016f1ebd7be7adb362614eb16d8f2116bf -r 4dd7564268fcd4f5bd3275fa3b5da012b7adefb5 lib/galaxy/config.py
--- a/lib/galaxy/config.py
+++ b/lib/galaxy/config.py
@@ -193,6 +193,7 @@
# Store advanced job management config
self.job_manager = kwargs.get('job_manager', self.server_name).strip()
self.job_handlers = [ x.strip() for x in kwargs.get('job_handlers', self.server_name).split(',') ]
+ self.default_job_handlers = [ x.strip() for x in kwargs.get('default_job_handlers', ','.join( self.job_handlers ) ).split(',') ]
# Use database for IPC unless this is a standalone server (or multiple servers doing self dispatching in memory)
self.track_jobs_in_database = True
if ( len( self.job_handlers ) == 1 ) and ( self.job_handlers[0] == self.server_name ) and ( self.job_manager == self.server_name ):
diff -r 40abca016f1ebd7be7adb362614eb16d8f2116bf -r 4dd7564268fcd4f5bd3275fa3b5da012b7adefb5 lib/galaxy/tools/__init__.py
--- a/lib/galaxy/tools/__init__.py
+++ b/lib/galaxy/tools/__init__.py
@@ -791,7 +791,7 @@
self.parallelism = None
# Set job handler(s). Each handler is a dict with 'url' and, optionally, 'params'.
self_id = self.id.lower()
- self.job_handlers = [ { "name" : name } for name in self.app.config.job_handlers ]
+ self.job_handlers = [ { "name" : name } for name in self.app.config.default_job_handlers ]
# Set custom handler(s) if they're defined.
if self_id in self.app.config.tool_handlers:
self.job_handlers = self.app.config.tool_handlers[ self_id ]
diff -r 40abca016f1ebd7be7adb362614eb16d8f2116bf -r 4dd7564268fcd4f5bd3275fa3b5da012b7adefb5 universe_wsgi.ini.sample
--- a/universe_wsgi.ini.sample
+++ b/universe_wsgi.ini.sample
@@ -562,6 +562,13 @@
# comma-separated list.
#job_handlers = main
+# By default, a handler from job_handlers will be selected at random if the
+# tool to run does specify a handler below in [galaxy:tool_handlers]. If you
+# want certain handlers to only handle jobs for tools/params explicitly
+# assigned below, use default_job_handlers to specify which handlers should be
+# used for jobs without explicit handlers.
+#default_job_handlers = main
+
# This enables splitting of jobs into tasks, if specified by the particular tool config.
# This is a new feature and not recommended for production servers yet.
#use_tasked_jobs = False
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: dan: Small fix for setting upload path in GenomeSpace export tool.
by Bitbucket 30 Mar '12
by Bitbucket 30 Mar '12
30 Mar '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/40abca016f1e/
changeset: 40abca016f1e
user: dan
date: 2012-03-30 23:08:35
summary: Small fix for setting upload path in GenomeSpace export tool.
affected #: 1 file
diff -r de97f00ff16feb64cd26d3393a68c303f33e5e33 -r 40abca016f1ebd7be7adb362614eb16d8f2116bf tools/genomespace/genomespace_exporter.py
--- a/tools/genomespace/genomespace_exporter.py
+++ b/tools/genomespace/genomespace_exporter.py
@@ -49,7 +49,8 @@
def get_directory( url_opener, dm_url, path ):
url = dm_url
- for sub_path in path:
+ i = None
+ for i, sub_path in enumerate( path ):
url = "%s/%s" % ( url, sub_path )
dir_request = urllib2.Request( url, headers = { 'Content-Type': 'application/json', 'Accept': 'application/json' } )
dir_request.get_method = lambda: 'GET'
@@ -59,10 +60,14 @@
#print "e", e, url #punting, assuming lack of permisions at this low of a level...
continue
break
- return dir_dict
+ if i is not None:
+ path = path[i+1:]
+ else:
+ path = []
+ return ( dir_dict, path )
def get_default_directory( url_opener, dm_url ):
- return get_directory( url_opener, dm_url, ["defaultdirectory"] )
+ return get_directory( url_opener, dm_url, ["defaultdirectory"] )[0]
def create_directory( url_opener, directory_dict, new_dir, dm_url ):
payload = { "isDirectory": True }
@@ -142,8 +147,8 @@
dm_url = genomespace_site_dict['dmServer']
#get default directory
if target_directory and target_directory[0] == '/':
- directory_dict = get_directory( url_opener, dm_url, [ "%s/%s/%s" % ( GENOMESPACE_API_VERSION_STRING, 'file', target_directory[1] ) ] + target_directory[2:] )['directory']
- target_directory.pop(0)
+ directory_dict, target_directory = get_directory( url_opener, dm_url, [ "%s/%s/%s" % ( GENOMESPACE_API_VERSION_STRING, 'file', target_directory[1] ) ] + target_directory[2:] )
+ directory_dict = directory_dict['directory']
else:
directory_dict = get_default_directory( url_opener, dm_url )['directory']
#what directory to stuff this in
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: greg: Additional setting to fix tool shed config which was broken due to the additions in the Galaxy webapp in recent commits.
by Bitbucket 30 Mar '12
by Bitbucket 30 Mar '12
30 Mar '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/de97f00ff16f/
changeset: de97f00ff16f
user: greg
date: 2012-03-30 20:37:55
summary: Additional setting to fix tool shed config which was broken due to the additions in the Galaxy webapp in recent commits.
affected #: 1 file
diff -r 25db1453bb2e55d7d58d407364ce71001bd24096 -r de97f00ff16feb64cd26d3393a68c303f33e5e33 lib/galaxy/webapps/community/config.py
--- a/lib/galaxy/webapps/community/config.py
+++ b/lib/galaxy/webapps/community/config.py
@@ -84,6 +84,11 @@
self.screencasts_url = kwargs.get( 'screencasts_url', None )
self.log_events = False
self.cloud_controller_instance = False
+ self.server_name = ''
+ self.job_manager = ''
+ self.job_handlers = []
+ self.tool_handlers = []
+ self.tool_runners = []
# Proxy features
self.apache_xsendfile = kwargs.get( 'apache_xsendfile', False )
self.nginx_x_accel_redirect_base = kwargs.get( 'nginx_x_accel_redirect_base', False )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/25db1453bb2e/
changeset: 25db1453bb2e
user: greg
date: 2012-03-30 20:19:44
summary: Enhance odict to enable insertion into a specified location in it's keys, and enhance integrated_tool_panel generation to take advantage of this. New tool panel items will now be inserted into an existing integrated_tool_panel.xml file in the location in whatever tool panel configuration file the new item has been added.
affected #: 2 files
diff -r 0626ddb49084b94248e9621d584409d398fd546d -r 25db1453bb2e55d7d58d407364ce71001bd24096 lib/galaxy/tools/__init__.py
--- a/lib/galaxy/tools/__init__.py
+++ b/lib/galaxy/tools/__init__.py
@@ -114,17 +114,17 @@
tool_path = self.tool_root_dir
# Only load the panel_dict under certain conditions.
load_panel_dict = not self.integrated_tool_panel_config_has_contents
- for elem in root:
+ for index, elem in enumerate( root ):
if parsing_shed_tool_conf:
config_elems.append( elem )
if elem.tag == 'tool':
- self.load_tool_tag_set( elem, self.tool_panel, self.integrated_tool_panel, tool_path, load_panel_dict, guid=elem.get( 'guid' ) )
+ self.load_tool_tag_set( elem, self.tool_panel, self.integrated_tool_panel, tool_path, load_panel_dict, guid=elem.get( 'guid' ), index=index )
elif elem.tag == 'workflow':
- self.load_workflow_tag_set( elem, self.tool_panel, self.integrated_tool_panel, load_panel_dict )
+ self.load_workflow_tag_set( elem, self.tool_panel, self.integrated_tool_panel, load_panel_dict, index=index )
elif elem.tag == 'section':
- self.load_section_tag_set( elem, tool_path, load_panel_dict )
+ self.load_section_tag_set( elem, tool_path, load_panel_dict, index=index )
elif elem.tag == 'label':
- self.load_label_tag_set( elem, self.tool_panel, self.integrated_tool_panel, load_panel_dict )
+ self.load_label_tag_set( elem, self.tool_panel, self.integrated_tool_panel, load_panel_dict, index=index )
if parsing_shed_tool_conf:
shed_tool_conf_dict = dict( config_filename=config_filename,
tool_path=tool_path,
@@ -286,7 +286,7 @@
self.app.model.ToolShedRepository.table.c.owner == owner,
self.app.model.ToolShedRepository.table.c.installed_changeset_revision == installed_changeset_revision ) ) \
.first()
- def load_tool_tag_set( self, elem, panel_dict, integrated_panel_dict, tool_path, load_panel_dict, guid=None ):
+ def load_tool_tag_set( self, elem, panel_dict, integrated_panel_dict, tool_path, load_panel_dict, guid=None, index=None ):
try:
path = elem.get( "file" )
if guid is None:
@@ -354,10 +354,13 @@
if load_panel_dict:
panel_dict[ key ] = tool
# Always load the tool into the integrated_panel_dict, or it will not be included in the integrated_tool_panel.xml file.
- integrated_panel_dict[ key ] = tool
+ if key in integrated_panel_dict or index is None:
+ integrated_panel_dict[ key ] = tool
+ else:
+ integrated_panel_dict.insert( index, key, tool )
except:
log.exception( "Error reading tool from path: %s" % path )
- def load_workflow_tag_set( self, elem, panel_dict, integrated_panel_dict, load_panel_dict ):
+ def load_workflow_tag_set( self, elem, panel_dict, integrated_panel_dict, load_panel_dict, index=None ):
try:
# TODO: should id be encoded?
workflow_id = elem.get( 'id' )
@@ -367,16 +370,22 @@
if load_panel_dict:
panel_dict[ key ] = workflow
# Always load workflows into the integrated_panel_dict.
- integrated_panel_dict[ key ] = workflow
+ if key in integrated_panel_dict or index is None:
+ integrated_panel_dict[ key ] = workflow
+ else:
+ integrated_panel_dict.insert( index, key, workflow )
except:
log.exception( "Error loading workflow: %s" % workflow_id )
- def load_label_tag_set( self, elem, panel_dict, integrated_panel_dict, load_panel_dict ):
+ def load_label_tag_set( self, elem, panel_dict, integrated_panel_dict, load_panel_dict, index=None ):
label = ToolSectionLabel( elem )
key = 'label_' + label.id
if load_panel_dict:
panel_dict[ key ] = label
- integrated_panel_dict[ key ] = label
- def load_section_tag_set( self, elem, tool_path, load_panel_dict ):
+ if key in integrated_panel_dict or index is None:
+ integrated_panel_dict[ key ] = label
+ else:
+ integrated_panel_dict.insert( index, key, label )
+ def load_section_tag_set( self, elem, tool_path, load_panel_dict, index=None ):
key = 'section_' + elem.get( "id" )
if key in self.tool_panel:
section = self.tool_panel[ key ]
@@ -390,17 +399,20 @@
else:
integrated_section = ToolSection( elem )
integrated_elems = integrated_section.elems
- for sub_elem in elem:
+ for sub_index, sub_elem in enumerate( elem ):
if sub_elem.tag == 'tool':
- self.load_tool_tag_set( sub_elem, elems, integrated_elems, tool_path, load_panel_dict, guid=sub_elem.get( 'guid' ) )
+ self.load_tool_tag_set( sub_elem, elems, integrated_elems, tool_path, load_panel_dict, guid=sub_elem.get( 'guid' ), index=sub_index )
elif sub_elem.tag == 'workflow':
- self.load_workflow_tag_set( sub_elem, elems, integrated_elems, load_panel_dict )
+ self.load_workflow_tag_set( sub_elem, elems, integrated_elems, load_panel_dict, index=sub_index )
elif sub_elem.tag == 'label':
- self.load_label_tag_set( sub_elem, elems, integrated_elems, load_panel_dict )
+ self.load_label_tag_set( sub_elem, elems, integrated_elems, load_panel_dict, index=sub_index )
if load_panel_dict:
self.tool_panel[ key ] = section
# Always load sections into the integrated_tool_panel.
- self.integrated_tool_panel[ key ] = integrated_section
+ if key in self.integrated_tool_panel or index is None:
+ self.integrated_tool_panel[ key ] = integrated_section
+ else:
+ self.integrated_tool_panel.insert( index, key, integrated_section )
def load_tool( self, config_file, guid=None ):
"""Load a single tool from the file named by `config_file` and return an instance of `Tool`."""
# Parse XML configuration file and get the root element
diff -r 0626ddb49084b94248e9621d584409d398fd546d -r 25db1453bb2e55d7d58d407364ce71001bd24096 lib/galaxy/util/odict.py
--- a/lib/galaxy/util/odict.py
+++ b/lib/galaxy/util/odict.py
@@ -11,23 +11,22 @@
This dictionary class extends UserDict to record the order in which items are
added. Calling keys(), values(), items(), etc. will return results in this
order.
+ """
+ def __init__( self, dict = None ):
+ self._keys = []
+ UserDict.__init__( self, dict )
- I've added iterkeys, itervalues, iteritems
- """
- def __init__(self, dict = None):
- self._keys = []
- UserDict.__init__(self, dict)
+ def __delitem__( self, key ):
+ UserDict.__delitem__( self, key )
+ self._keys.remove( key )
- def __delitem__(self, key):
- UserDict.__delitem__(self, key)
- self._keys.remove(key)
+ def __setitem__( self, key, item ):
+ UserDict.__setitem__( self, key, item )
+ if key not in self._keys:
+ self._keys.append( key )
- def __setitem__(self, key, item):
- UserDict.__setitem__(self, key, item)
- if key not in self._keys: self._keys.append(key)
-
- def clear(self):
- UserDict.clear(self)
+ def clear( self ):
+ UserDict.clear( self )
self._keys = []
def copy(self):
@@ -35,49 +34,43 @@
new.update( self )
return new
- def items(self):
- return zip(self._keys, self.values())
+ def items( self ):
+ return zip( self._keys, self.values() )
- def keys(self):
+ def keys( self ):
return self._keys[:]
- def popitem(self):
+ def popitem( self ):
try:
key = self._keys[-1]
except IndexError:
- raise KeyError('dictionary is empty')
+ raise KeyError( 'dictionary is empty' )
+ val = self[ key ]
+ del self[ key ]
+ return ( key, val )
- val = self[key]
- del self[key]
+ def setdefault( self, key, failobj=None ):
+ if key not in self._keys:
+ self._keys.append( key )
+ return UserDict.setdefault( self, key, failobj )
- return (key, val)
+ def update( self, dict ):
+ for ( key, val ) in dict.items():
+ self.__setitem__( key, val )
- def setdefault(self, key, failobj = None):
- if key not in self._keys: self._keys.append(key)
- return UserDict.setdefault(self, key, failobj)
+ def values( self ):
+ return map( self.get, self._keys )
- def update(self, dict):
- UserDict.update(self, dict)
- for key in dict.keys():
- if key not in self._keys: self._keys.append(key)
-
- def update(self, dict):
- for (key,val) in dict.items():
- self.__setitem__(key,val)
-
- def values(self):
- return map(self.get, self._keys)
-
- def iterkeys(self):
+ def iterkeys( self ):
return iter( self._keys )
- def itervalues(self):
+ def itervalues( self ):
for key in self._keys:
- yield self.get(key)
+ yield self.get( key )
- def iteritems(self):
+ def iteritems( self ):
for key in self._keys:
- yield key, self.get(key)
+ yield key, self.get( key )
def __iter__( self ):
for key in self._keys:
@@ -86,3 +79,7 @@
def reverse( self ):
self._keys.reverse()
+ def insert( self, index, key, item ):
+ if key not in self._keys:
+ self._keys.insert( index, key )
+ UserDict.__setitem__( self, key, item )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: natefoo: Allow for job handler selection based on job params (like source).
by Bitbucket 30 Mar '12
by Bitbucket 30 Mar '12
30 Mar '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/0626ddb49084/
changeset: 0626ddb49084
user: natefoo
date: 2012-03-30 20:10:45
summary: Allow for job handler selection based on job params (like source).
affected #: 4 files
diff -r 12b14f3e78e915c2a87649fe406386fbcd4b8c02 -r 0626ddb49084b94248e9621d584409d398fd546d lib/galaxy/config.py
--- a/lib/galaxy/config.py
+++ b/lib/galaxy/config.py
@@ -198,13 +198,27 @@
if ( len( self.job_handlers ) == 1 ) and ( self.job_handlers[0] == self.server_name ) and ( self.job_manager == self.server_name ):
self.track_jobs_in_database = False
# Store per-tool runner configs
+ self.tool_handlers = self.__read_tool_job_config( global_conf_parser, 'galaxy:tool_handlers', 'name' )
+ self.tool_runners = self.__read_tool_job_config( global_conf_parser, 'galaxy:tool_runners', 'url' )
+ self.datatypes_config = kwargs.get( 'datatypes_config_file', 'datatypes_conf.xml' )
+ # Cloud configuration options
+ self.enable_cloud_launch = string_as_bool( kwargs.get( 'enable_cloud_launch', False ) )
+ # Galaxy messaging (AMQP) configuration options
+ self.amqp = {}
try:
- tool_runners_config = global_conf_parser.items("galaxy:tool_runners")
+ amqp_config = global_conf_parser.items("galaxy_amqp")
+ except ConfigParser.NoSectionError:
+ amqp_config = {}
+ for k, v in amqp_config:
+ self.amqp[k] = v
+ def __read_tool_job_config( self, global_conf_parser, section, key ):
+ try:
+ tool_runners_config = global_conf_parser.items( section )
# Process config to group multiple configs for the same tool.
- tool_runners = {}
+ rval = {}
for entry in tool_runners_config:
- tool_config, url = entry
+ tool_config, val = entry
tool = None
runner_dict = {}
if tool_config.find("[") != -1:
@@ -219,29 +233,18 @@
tool = tool_config
# Add runner URL.
- runner_dict[ 'url' ] = url
+ runner_dict[ key ] = val
# Create tool entry if necessary.
- if tool not in tool_runners:
- tool_runners[ tool ] = []
+ if tool not in rval:
+ rval[ tool ] = []
# Add entry to runners.
- tool_runners[ tool ].append( runner_dict )
+ rval[ tool ].append( runner_dict )
- self.tool_runners = tool_runners
+ return rval
except ConfigParser.NoSectionError:
- self.tool_runners = []
- self.datatypes_config = kwargs.get( 'datatypes_config_file', 'datatypes_conf.xml' )
- # Cloud configuration options
- self.enable_cloud_launch = string_as_bool( kwargs.get( 'enable_cloud_launch', False ) )
- # Galaxy messaging (AMQP) configuration options
- self.amqp = {}
- try:
- amqp_config = global_conf_parser.items("galaxy_amqp")
- except ConfigParser.NoSectionError:
- amqp_config = {}
- for k, v in amqp_config:
- self.amqp[k] = v
+ return []
def get( self, key, default ):
return self.config_dict.get( key, default )
def get_bool( self, key, default ):
diff -r 12b14f3e78e915c2a87649fe406386fbcd4b8c02 -r 0626ddb49084b94248e9621d584409d398fd546d lib/galaxy/jobs/manager.py
--- a/lib/galaxy/jobs/manager.py
+++ b/lib/galaxy/jobs/manager.py
@@ -144,7 +144,7 @@
pass
for job in jobs_to_check:
- job.handler = self.__select_handler( job )
+ job.handler = self.__get_handler( job )
log.debug( "(%s) Job assigned to handler '%s'" % ( job.id, job.handler ) )
self.sa_session.add( job )
@@ -157,9 +157,15 @@
for job in jobs_to_check:
self.job_handler.job_queue.put( job.id, job.tool_id )
- def __select_handler( self, job ):
- # TODO: handler selection based on params, tool, etc.
- return random.choice( self.app.config.job_handlers )
+ def __get_handler( self, job ):
+ try:
+ params = None
+ if job.params:
+ params = from_json_string( job.params )
+ return self.app.toolbox.tools_by_id.get( job.tool_id, None ).get_job_handler( params )
+ except:
+ log.exception( "(%s) Caught exception attempting to get tool-specific job handler for tool '%s', selecting at random from available handlers instead:" % ( job.id, job.tool_id ) )
+ return random.choice( self.app.config.job_handlers )
def put( self, job_id, tool ):
"""Add a job to the queue (by job identifier)"""
diff -r 12b14f3e78e915c2a87649fe406386fbcd4b8c02 -r 0626ddb49084b94248e9621d584409d398fd546d lib/galaxy/tools/__init__.py
--- a/lib/galaxy/tools/__init__.py
+++ b/lib/galaxy/tools/__init__.py
@@ -5,7 +5,7 @@
pkg_resources.require( "simplejson" )
-import logging, os, string, sys, tempfile, glob, shutil, types, urllib, subprocess
+import logging, os, string, sys, tempfile, glob, shutil, types, urllib, subprocess, random
import simplejson
import binascii
from UserDict import DictMixin
@@ -682,31 +682,35 @@
if tool_version:
return tool_version.get_version_ids( self.app )
return []
- def get_job_runner( self, job_params=None ):
- # Look through runners to find one with matching parameters.
- selected_runner = None
- if len( self.job_runners ) == 1:
- # Most tools have a single runner.
- selected_runner = self.job_runners[0]
+ def __get_job_run_config( self, run_configs, key, job_params=None ):
+ # Look through runners/handlers to find one with matching parameters.
+ available_configs = []
+ if len( run_configs ) == 1:
+ # Most tools have a single config.
+ return run_configs[0][ key ] # return to avoid random when this will be the case most of the time
elif job_params is None:
- # Use job runner with no params
- for runner in self.job_runners:
- if "params" not in runner:
- selected_runner = runner
+ # Use job config with no params
+ for config in run_configs:
+ if "params" not in config:
+ available_configs.append( config )
else:
- # Find runner with matching parameters.
- for runner in self.job_runners:
- if "params" in runner:
+ # Find config with matching parameters.
+ for config in run_configs:
+ if "params" in config:
match = True
- runner_params = runner[ "params" ]
+ config_params = config[ "params" ]
for param, value in job_params.items():
- if param not in runner_params or \
- runner_params[ param ] != job_params[ param ]:
+ if param not in config_params or \
+ config_params[ param ] != job_params[ param ]:
match = False
break
if match:
- selected_runner = runner
- return selected_runner[ "url" ]
+ available_configs.append( config )
+ return random.choice( available_configs )[ key ]
+ def get_job_runner( self, job_params=None ):
+ return self.__get_job_run_config( self.job_runners, key='url', job_params=job_params )
+ def get_job_handler( self, job_params=None ):
+ return self.__get_job_run_config( self.job_handlers, key='name', job_params=job_params )
def parse( self, root, guid=None ):
"""
Read tool configuration from the element `root` and fill in `self`.
@@ -773,6 +777,12 @@
self.parallelism = ToolParallelismInfo(parallelism)
else:
self.parallelism = None
+ # Set job handler(s). Each handler is a dict with 'url' and, optionally, 'params'.
+ self_id = self.id.lower()
+ self.job_handlers = [ { "name" : name } for name in self.app.config.job_handlers ]
+ # Set custom handler(s) if they're defined.
+ if self_id in self.app.config.tool_handlers:
+ self.job_handlers = self.app.config.tool_handlers[ self_id ]
# Set job runner(s). Each runner is a dict with 'url' and, optionally, 'params'.
if self.app.config.start_job_runners is None:
# Jobs are always local regardless of tool config if no additional
@@ -782,7 +792,6 @@
# Set job runner to the cluster default
self.job_runners = [ { "url" : self.app.config.default_cluster_job_runner } ]
# Set custom runner(s) if they're defined.
- self_id = self.id.lower()
if self_id in self.app.config.tool_runners:
self.job_runners = self.app.config.tool_runners[ self_id ]
# Is this a 'hidden' tool (hidden in tool menu)
diff -r 12b14f3e78e915c2a87649fe406386fbcd4b8c02 -r 0626ddb49084b94248e9621d584409d398fd546d universe_wsgi.ini.sample
--- a/universe_wsgi.ini.sample
+++ b/universe_wsgi.ini.sample
@@ -664,18 +664,30 @@
#pbs_stage_path =
#pbs_dataset_server =
-# ---- Tool Job Runners -----------------------------------------------------
+# ---- Per-Tool Job Management ----------------------------------------------
-# Individual per-tool job runner overrides. Parameters can be included to define
-# multiple runners per tool. E.g. to run Cufflinks jobs initiated from Trackster
+# Per-tool job handler and runner overrides. Parameters can be included to define multiple
+# runners per tool. E.g. to run Cufflinks jobs initiated from Trackster
# differently than standard Cufflinks jobs:
-# cufflinks = local:///
-# cufflinks[source@trackster] = local:///
-# If not listed here, a tool will run with the runner defined with
-# default_cluster_job_runner.
+#
+# cufflinks = local:///
+# cufflinks[source@trackster] = local:///
+
+[galaxy:tool_handlers]
+
+# By default, Galaxy will select a handler at random from the list of
+# job_handlers set above. You can override as in the following examples:
+#
+#upload1 = upload_handler
+#cufflinks[source@trackster] = realtime_handler
[galaxy:tool_runners]
+# If not listed here, a tool will run with the runner defined with
+# default_cluster_job_runner. These overrides for local:/// are done because
+# these tools can fetch data from remote sites, which may not be suitable to
+# run on a cluster (if it does not have access to the Internet, for example).
+
biomart = local:///
encode_db1 = local:///
hbvar = local:///
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: natefoo: Fix the data transfer script to work with new API library content ids.
by Bitbucket 30 Mar '12
by Bitbucket 30 Mar '12
30 Mar '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/12b14f3e78e9/
changeset: 12b14f3e78e9
user: natefoo
date: 2012-03-30 15:51:04
summary: Fix the data transfer script to work with new API library content ids.
affected #: 1 file
diff -r db6b788a1fe6f8ce12f91844bd8658f6168f2ff4 -r 12b14f3e78e915c2a87649fe406386fbcd4b8c02 scripts/galaxy_messaging/server/data_transfer.py
--- a/scripts/galaxy_messaging/server/data_transfer.py
+++ b/scripts/galaxy_messaging/server/data_transfer.py
@@ -177,7 +177,7 @@
self.update_status( SampleDataset.transfer_status.ADD_TO_LIBRARY )
try:
data = {}
- data[ 'folder_id' ] = api.encode_id( self.config_id_secret, '%s.%s' % ( 'folder', self.folder_id ) )
+ data[ 'folder_id' ] = 'F%s' % api.encode_id( self.config_id_secret, self.folder_id )
data[ 'file_type' ] = 'auto'
data[ 'server_dir' ] = self.server_dir
data[ 'dbkey' ] = ''
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: greg: Change new functional test flags to conform more closely to previous command line flags
by Bitbucket 30 Mar '12
by Bitbucket 30 Mar '12
30 Mar '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/db6b788a1fe6/
changeset: db6b788a1fe6
user: greg
date: 2012-03-30 15:36:21
summary: Change new functional test flags to conform more closely to previous command line flags
affected #: 2 files
diff -r de2946aca8877087d761684a72ff955f46e4f5a6 -r db6b788a1fe6f8ce12f91844bd8658f6168f2ff4 run_functional_tests.sh
--- a/run_functional_tests.sh
+++ b/run_functional_tests.sh
@@ -20,23 +20,23 @@
echo "==========================================================================================================================================="
echo "'run_functional_tests.sh -id bbb' for testing one tool with id 'bbb' ('bbb' is the tool id)"
echo "'run_functional_tests.sh -sid ccc' for testing one section with sid 'ccc' ('ccc' is the string after 'section::')"
-elif [ $1 = '--migrated' ]; then
+elif [ $1 = '-migrated' ]; then
if [ ! $2 ]; then
- python ./scripts/functional_tests.py -v functional.test_toolbox --with-nosehtml --html-report-file run_functional_tests.html --migrated
+ python ./scripts/functional_tests.py -v functional.test_toolbox --with-nosehtml --html-report-file run_functional_tests.html -migrated
elif [ $2 = '-id' ]; then
# TODO: This option is not tested...
- python ./scripts/functional_tests.py -v functional.test_toolbox:TestForTool_$3 --with-nosehtml --html-report-file run_functional_tests.html --migrated
+ python ./scripts/functional_tests.py -v functional.test_toolbox:TestForTool_$3 --with-nosehtml --html-report-file run_functional_tests.html -migrated
else
- python ./scripts/functional_tests.py -v functional.test_toolbox --with-nosehtml --html-report-file run_functional_tests.html --migrated
+ python ./scripts/functional_tests.py -v functional.test_toolbox --with-nosehtml --html-report-file run_functional_tests.html -migrated
fi
-elif [ $1 = '--installed' ]; then
+elif [ $1 = '-installed' ]; then
if [ ! $2 ]; then
- python ./scripts/functional_tests.py -v functional.test_toolbox --with-nosehtml --html-report-file run_functional_tests.html --installed
+ python ./scripts/functional_tests.py -v functional.test_toolbox --with-nosehtml --html-report-file run_functional_tests.html -installed
elif [ $2 = '-id' ]; then
# TODO: This option is not tested...
- python ./scripts/functional_tests.py -v functional.test_toolbox:TestForTool_$3 --with-nosehtml --html-report-file run_functional_tests.html --installed
+ python ./scripts/functional_tests.py -v functional.test_toolbox:TestForTool_$3 --with-nosehtml --html-report-file run_functional_tests.html -installed
else
- python ./scripts/functional_tests.py -v functional.test_toolbox --with-nosehtml --html-report-file run_functional_tests.html --installed
+ python ./scripts/functional_tests.py -v functional.test_toolbox --with-nosehtml --html-report-file run_functional_tests.html -installed
fi
else
python ./scripts/functional_tests.py -v --with-nosehtml --html-report-file run_functional_tests.html $1
diff -r de2946aca8877087d761684a72ff955f46e4f5a6 -r db6b788a1fe6f8ce12f91844bd8658f6168f2ff4 scripts/functional_tests.py
--- a/scripts/functional_tests.py
+++ b/scripts/functional_tests.py
@@ -135,8 +135,8 @@
tool_path = os.environ.get( 'GALAXY_TEST_TOOL_PATH', 'tools' )
if 'HTTP_ACCEPT_LANGUAGE' not in os.environ:
os.environ[ 'HTTP_ACCEPT_LANGUAGE' ] = default_galaxy_locales
- testing_migrated_tools = '--migrated' in sys.argv
- testing_installed_tools = '--installed' in sys.argv
+ testing_migrated_tools = '-migrated' in sys.argv
+ testing_installed_tools = '-installed' in sys.argv
if testing_migrated_tools or testing_installed_tools:
sys.argv.pop()
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: jgoecks: Fix chart_curve (Trackster) icon omission in new styles.
by Bitbucket 30 Mar '12
by Bitbucket 30 Mar '12
30 Mar '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/de2946aca887/
changeset: de2946aca887
user: jgoecks
date: 2012-03-30 15:12:37
summary: Fix chart_curve (Trackster) icon omission in new styles.
affected #: 3 files
diff -r 1c3b74544a2f0d2d135c8d3f70803d1284278d5b -r de2946aca8877087d761684a72ff955f46e4f5a6 static/june_2007_style/base_sprites.less.tmpl
--- a/static/june_2007_style/base_sprites.less.tmpl
+++ b/static/june_2007_style/base_sprites.less.tmpl
@@ -94,6 +94,10 @@
-sprite-group: fugue;
-sprite-image: fugue/gear.png;
}
+.icon-button.chart_curve {
+ -sprite-group: fugue;
+ -sprite-image: silk/chart_curve.png;
+}
.text-and-autocomplete-select {
-sprite-group: fugue;
diff -r 1c3b74544a2f0d2d135c8d3f70803d1284278d5b -r de2946aca8877087d761684a72ff955f46e4f5a6 static/june_2007_style/blue/base.css
--- a/static/june_2007_style/blue/base.css
+++ b/static/june_2007_style/blue/base.css
@@ -225,7 +225,7 @@
.navbar-form .radio,.navbar-form .checkbox{margin-top:5px;}
.navbar-form input[type="image"],.navbar-form input[type="checkbox"],.navbar-form input[type="radio"]{margin-top:3px;}
.navbar-search{position:relative;float:left;margin-top:6px;margin-bottom:0;}.navbar-search .search-query{padding:4px 9px;font-family:"Lucida Grande",verdana,arial,helvetica,sans-serif;font-size:13px;font-weight:normal;line-height:1;color:#ffffff;color:rgba(255, 255, 255, 0.75);background:#666;background:rgba(255, 255, 255, 0.3);border:1px solid #111;-webkit-box-shadow:inset 0 1px 2px rgba(0, 0, 0, 0.1),0 1px 0px rgba(255, 255, 255, 0.15);-moz-box-shadow:inset 0 1px 2px rgba(0, 0, 0, 0.1),0 1px 0px rgba(255, 255, 255, 0.15);box-shadow:inset 0 1px 2px rgba(0, 0, 0, 0.1),0 1px 0px rgba(255, 255, 255, 0.15);-webkit-transition:none;-moz-transition:none;-ms-transition:none;-o-transition:none;transition:none;}.navbar-search .search-query :-moz-placeholder{color:#eeeeee;}
-.navbar-search .search-query::-webkit-input-placeholder{color:#eeeeee;}
+.navbar-search .search-query ::-webkit-input-placeholder{color:#eeeeee;}
.navbar-search .search-query:hover{color:#ffffff;background-color:#999999;background-color:rgba(255, 255, 255, 0.5);}
.navbar-search .search-query:focus,.navbar-search .search-query.focused{padding:5px 10px;color:#333333;text-shadow:0 1px 0 #ffffff;background-color:#ffffff;border:0;-webkit-box-shadow:0 0 3px rgba(0, 0, 0, 0.15);-moz-box-shadow:0 0 3px rgba(0, 0, 0, 0.15);box-shadow:0 0 3px rgba(0, 0, 0, 0.15);outline:0;}
.navbar-fixed-top{position:fixed;top:0;right:0;left:0;z-index:1030;}
@@ -706,7 +706,8 @@
.icon-button.import{background:url(fugue.png) no-repeat 0px -312px;}
.icon-button.plus-button{background:url(fugue.png) no-repeat 0px -338px;}
.icon-button.gear{background:url(fugue.png) no-repeat 0px -364px;}
-.text-and-autocomplete-select{background:url(fugue.png) no-repeat right -390px;}
+.icon-button.chart_curve{background:url(fugue.png) no-repeat 0px -390px;}
+.text-and-autocomplete-select{background:url(fugue.png) no-repeat right -416px;}
.tipsy{padding:5px;font-size:10px;filter:alpha(opacity=80);background-repeat:no-repeat;background-image:url(../images/tipsy.gif);}
.tipsy-inner{padding:5px 8px 4px 8px;background-color:black;color:white;max-width:200px;text-align:center;}
.tipsy-north{background-position:top center;}
diff -r 1c3b74544a2f0d2d135c8d3f70803d1284278d5b -r de2946aca8877087d761684a72ff955f46e4f5a6 static/june_2007_style/blue/fugue.png
Binary file static/june_2007_style/blue/fugue.png has changed
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: natefoo: Remove unused and faulty import of sqlalchemy expressions in galaxy.jobs
by Bitbucket 29 Mar '12
by Bitbucket 29 Mar '12
29 Mar '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/1c3b74544a2f/
changeset: 1c3b74544a2f
user: natefoo
date: 2012-03-30 04:47:17
summary: Remove unused and faulty import of sqlalchemy expressions in galaxy.jobs
affected #: 1 file
diff -r 4f6c38ca353861351e286016bbbddc126ca566c4 -r 1c3b74544a2f0d2d135c8d3f70803d1284278d5b lib/galaxy/jobs/__init__.py
--- a/lib/galaxy/jobs/__init__.py
+++ b/lib/galaxy/jobs/__init__.py
@@ -11,8 +11,6 @@
import traceback
import subprocess
-from sqlalchemy.sql.expression import and_, or_
-
import galaxy
from galaxy import util, model
from galaxy.datatypes.tabular import *
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: natefoo: Support multiple job runners by creating a job manager that designates a job handler to run jobs, thereby avoiding the "new job" race condition.
by Bitbucket 29 Mar '12
by Bitbucket 29 Mar '12
29 Mar '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/4f6c38ca3538/
changeset: 4f6c38ca3538
user: natefoo
date: 2012-03-29 23:25:56
summary: Support multiple job runners by creating a job manager that designates a job handler to run jobs, thereby avoiding the "new job" race condition.
affected #: 11 files
diff -r 528ca90fbd3e629bbe1eba92dc0faffb600a556f -r 4f6c38ca353861351e286016bbbddc126ca566c4 lib/galaxy/app.py
--- a/lib/galaxy/app.py
+++ b/lib/galaxy/app.py
@@ -124,8 +124,9 @@
if self.config.get_bool( 'enable_beta_job_managers', False ):
from jobs import transfer_manager
self.transfer_manager = transfer_manager.TransferManager( self )
- # Start the job queue
- self.job_manager = jobs.JobManager( self )
+ # Start the job manager
+ from jobs import manager
+ self.job_manager = manager.JobManager( self )
# FIXME: These are exposed directly for backward compatibility
self.job_queue = self.job_manager.job_queue
self.job_stop_queue = self.job_manager.job_stop_queue
diff -r 528ca90fbd3e629bbe1eba92dc0faffb600a556f -r 4f6c38ca353861351e286016bbbddc126ca566c4 lib/galaxy/config.py
--- a/lib/galaxy/config.py
+++ b/lib/galaxy/config.py
@@ -184,7 +184,20 @@
# Heartbeat log file name override
if global_conf is not None:
self.heartbeat_log = global_conf.get( 'heartbeat_log', 'heartbeat.log' )
- #Store per-tool runner configs.
+ # Determine which 'server:' this is
+ self.server_name = 'main'
+ for arg in sys.argv:
+ # Crummy, but PasteScript does not give you a way to determine this
+ if arg.lower().startswith('--server-name='):
+ self.server_name = arg.split('=', 1)[-1]
+ # Store advanced job management config
+ self.job_manager = kwargs.get('job_manager', self.server_name).strip()
+ self.job_handlers = [ x.strip() for x in kwargs.get('job_handlers', self.server_name).split(',') ]
+ # Use database for IPC unless this is a standalone server (or multiple servers doing self dispatching in memory)
+ self.track_jobs_in_database = True
+ if ( len( self.job_handlers ) == 1 ) and ( self.job_handlers[0] == self.server_name ) and ( self.job_manager == self.server_name ):
+ self.track_jobs_in_database = False
+ # Store per-tool runner configs
try:
tool_runners_config = global_conf_parser.items("galaxy:tool_runners")
diff -r 528ca90fbd3e629bbe1eba92dc0faffb600a556f -r 4f6c38ca353861351e286016bbbddc126ca566c4 lib/galaxy/jobs/__init__.py
--- a/lib/galaxy/jobs/__init__.py
+++ b/lib/galaxy/jobs/__init__.py
@@ -1,4 +1,17 @@
-import logging, threading, sys, os, time, traceback, shutil
+"""
+Support for running a tool in Galaxy via an internal job management system
+"""
+
+import os
+import sys
+import pwd
+import time
+import logging
+import threading
+import traceback
+import subprocess
+
+from sqlalchemy.sql.expression import and_, or_
import galaxy
from galaxy import util, model
@@ -9,51 +22,16 @@
from galaxy.util.json import from_json_string
from galaxy.util.expressions import ExpressionContext
from galaxy.jobs.actions.post import ActionBox
-import subprocess, pwd
from galaxy.exceptions import ObjectInvalid
-from sqlalchemy.sql.expression import and_, or_
-
-import pkg_resources
-pkg_resources.require( "PasteDeploy" )
-
-from Queue import Queue, Empty
-
log = logging.getLogger( __name__ )
-# States for running a job. These are NOT the same as data states
-JOB_WAIT, JOB_ERROR, JOB_INPUT_ERROR, JOB_INPUT_DELETED, JOB_READY, JOB_DELETED, JOB_ADMIN_DELETED = 'wait', 'error', 'input_error', 'input_deleted', 'ready', 'deleted', 'admin_deleted'
-
# This file, if created in the job's working directory, will be used for
# setting advanced metadata properties on the job and its associated outputs.
# This interface is currently experimental, is only used by the upload tool,
# and should eventually become API'd
TOOL_PROVIDED_JOB_METADATA_FILE = 'galaxy.json'
-class JobManager( object ):
- """
- Highest level interface to job management.
-
- TODO: Currently the app accesses "job_queue" and "job_stop_queue" directly.
- This should be decoupled.
- """
- def __init__( self, app ):
- self.app = app
- if self.app.config.get_bool( "enable_job_running", True ):
- # The dispatcher launches the underlying job runners
- self.dispatcher = DefaultJobDispatcher( app )
- # Queues for starting and stopping jobs
- self.job_queue = JobQueue( app, self.dispatcher )
- self.job_stop_queue = JobStopQueue( app, self.dispatcher )
- if self.app.config.enable_beta_job_managers:
- from galaxy.jobs.deferred import DeferredJobQueue
- self.deferred_job_queue = DeferredJobQueue( app )
- else:
- self.job_queue = self.job_stop_queue = NoopQueue()
- def shutdown( self ):
- self.job_queue.shutdown()
- self.job_stop_queue.shutdown()
-
class Sleeper( object ):
"""
Provides a 'sleep' method that sleeps for a number of seconds *unless*
@@ -70,238 +48,6 @@
self.condition.notify()
self.condition.release()
-class JobQueue( object ):
- """
- Job manager, waits for jobs to be runnable and then dispatches to
- a JobRunner.
- """
- STOP_SIGNAL = object()
- def __init__( self, app, dispatcher ):
- """Start the job manager"""
- self.app = app
- self.sa_session = app.model.context
- self.job_lock = False
- # Should we read jobs form the database, or use an in memory queue
- self.track_jobs_in_database = app.config.get_bool( 'track_jobs_in_database', False )
- # Keep track of the pid that started the job manager, only it
- # has valid threads
- self.parent_pid = os.getpid()
- # Contains new jobs. Note this is not used if track_jobs_in_database is True
- self.queue = Queue()
- # Contains jobs that are waiting (only use from monitor thread)
- ## This and jobs_to_check[] are closest to a "Job Queue"
- self.waiting_jobs = []
- # Helper for interruptable sleep
- self.sleeper = Sleeper()
- self.running = True
- self.dispatcher = dispatcher
- self.monitor_thread = threading.Thread( target=self.__monitor )
- # Recover jobs at startup
- if app.config.get_bool( 'enable_job_recovery', True ):
- self.__check_jobs_at_startup()
- # Start the queue
- self.monitor_thread.start()
- log.info( "job manager started" )
-
- def __check_jobs_at_startup( self ):
- """
- Checks all jobs that are in the 'new', 'queued' or 'running' state in
- the database and requeues or cleans up as necessary. Only run as the
- job manager starts.
- """
- model = self.app.model # DBTODO Why?
- for job in self.sa_session.query( model.Job ).filter( model.Job.state == model.Job.states.NEW ):
- if job.tool_id not in self.app.toolbox.tools_by_id:
- log.warning( "Tool '%s' removed from tool config, unable to recover job: %s" % ( job.tool_id, job.id ) )
- JobWrapper( job, self ).fail( 'This tool was disabled before the job completed. Please contact your Galaxy administrator, or' )
- else:
- log.debug( "no runner: %s is still in new state, adding to the jobs queue" %job.id )
- self.queue.put( ( job.id, job.tool_id ) )
- for job in self.sa_session.query( model.Job ).enable_eagerloads( False ).filter( ( model.Job.state == model.Job.states.RUNNING ) | ( model.Job.state == model.Job.states.QUEUED ) ):
- if job.tool_id not in self.app.toolbox.tools_by_id:
- log.warning( "Tool '%s' removed from tool config, unable to recover job: %s" % ( job.tool_id, job.id ) )
- JobWrapper( job, self ).fail( 'This tool was disabled before the job completed. Please contact your Galaxy administrator, or' )
- elif job.job_runner_name is None:
- log.debug( "no runner: %s is still in queued state, adding to the jobs queue" %job.id )
- if self.track_jobs_in_database:
- job.state = model.Job.states.NEW
- else:
- self.queue.put( ( job.id, job.tool_id ) )
- else:
- job_wrapper = JobWrapper( job, self )
- self.dispatcher.recover( job, job_wrapper )
- if self.sa_session.dirty:
- self.sa_session.flush()
-
- def __monitor( self ):
- """
- Continually iterate the waiting jobs, checking is each is ready to
- run and dispatching if so.
- """
- # HACK: Delay until after forking, we need a way to do post fork notification!!!
- time.sleep( 10 )
- while self.running:
- try:
- self.__monitor_step()
- except:
- log.exception( "Exception in monitor_step" )
- # Sleep
- self.sleeper.sleep( 1 )
-
- def __monitor_step( self ):
- """
- Called repeatedly by `monitor` to process waiting jobs. Gets any new
- jobs (either from the database or from its own queue), then iterates
- over all new and waiting jobs to check the state of the jobs each
- depends on. If the job has dependencies that have not finished, it
- it goes to the waiting queue. If the job has dependencies with errors,
- it is marked as having errors and removed from the queue. Otherwise,
- the job is dispatched.
- """
- # Pull all new jobs from the queue at once
- jobs_to_check = []
- if self.track_jobs_in_database:
- # Clear the session so we get fresh states for job and all datasets
- self.sa_session.expunge_all()
- # Fetch all new jobs
- jobs_to_check = self.sa_session.query( model.Job ).enable_eagerloads( False ) \
- .filter( model.Job.state == model.Job.states.NEW ).all()
- else:
- # Get job objects and append to watch queue for any which were
- # previously waiting
- for job_id in self.waiting_jobs:
- jobs_to_check.append( self.sa_session.query( model.Job ).get( job_id ) )
- try:
- while 1:
- message = self.queue.get_nowait()
- if message is self.STOP_SIGNAL:
- return
- # Unpack the message
- job_id, tool_id = message
- # Get the job object and append to watch queue
- jobs_to_check.append( self.sa_session.query( model.Job ).get( job_id ) )
- except Empty:
- pass
- # Iterate over new and waiting jobs and look for any that are
- # ready to run
- new_waiting_jobs = []
- for job in jobs_to_check:
- try:
- # Check the job's dependencies, requeue if they're not done
- job_state = self.__check_if_ready_to_run( job )
- if job_state == JOB_WAIT:
- if not self.track_jobs_in_database:
- new_waiting_jobs.append( job.id )
- elif job_state == JOB_INPUT_ERROR:
- log.info( "job %d unable to run: one or more inputs in error state" % job.id )
- elif job_state == JOB_INPUT_DELETED:
- log.info( "job %d unable to run: one or more inputs deleted" % job.id )
- elif job_state == JOB_READY:
- if self.job_lock:
- log.info( "Job dispatch attempted for %s, but prevented by administrative lock." % job.id )
- if not self.track_jobs_in_database:
- new_waiting_jobs.append( job.id )
- else:
- self.dispatcher.put( JobWrapper( job, self ) )
- log.info( "job %d dispatched" % job.id )
- elif job_state == JOB_DELETED:
- log.info( "job %d deleted by user while still queued" % job.id )
- elif job_state == JOB_ADMIN_DELETED:
- log.info( "job %d deleted by admin while still queued" % job.id )
- else:
- log.error( "unknown job state '%s' for job %d" % ( job_state, job.id ) )
- if not self.track_jobs_in_database:
- new_waiting_jobs.append( job.id )
- except Exception:
- log.exception( "failure running job %d" % job.id )
- # Update the waiting list
- self.waiting_jobs = new_waiting_jobs
- # Done with the session
- self.sa_session.remove()
-
- def __check_if_ready_to_run( self, job ):
- """
- Check if a job is ready to run by verifying that each of its input
- datasets is ready (specifically in the OK state). If any input dataset
- has an error, fail the job and return JOB_INPUT_ERROR. If any input
- dataset is deleted, fail the job and return JOB_INPUT_DELETED. If all
- input datasets are in OK state, return JOB_READY indicating that the
- job can be dispatched. Otherwise, return JOB_WAIT indicating that input
- datasets are still being prepared.
- """
- if job.state == model.Job.states.DELETED:
- return JOB_DELETED
- elif job.state == model.Job.states.ERROR:
- return JOB_ADMIN_DELETED
- elif self.app.config.enable_quotas:
- quota = self.app.quota_agent.get_quota( job.user )
- if quota is not None:
- try:
- usage = self.app.quota_agent.get_usage( user=job.user, history=job.history )
- if usage > quota:
- return JOB_WAIT
- except AssertionError, e:
- pass # No history, should not happen with an anon user
- for dataset_assoc in job.input_datasets + job.input_library_datasets:
- idata = dataset_assoc.dataset
- if not idata:
- continue
- # don't run jobs for which the input dataset was deleted
- if idata.deleted:
- JobWrapper( job, self ).fail( "input data %d (file: %s) was deleted before the job started" % ( idata.hid, idata.file_name ) )
- return JOB_INPUT_DELETED
- # an error in the input data causes us to bail immediately
- elif idata.state == idata.states.ERROR:
- JobWrapper( job, self ).fail( "input data %d is in error state" % ( idata.hid ) )
- return JOB_INPUT_ERROR
- elif idata.state == idata.states.FAILED_METADATA:
- JobWrapper( job, self ).fail( "input data %d failed to properly set metadata" % ( idata.hid ) )
- return JOB_INPUT_ERROR
- elif idata.state != idata.states.OK and not ( idata.state == idata.states.SETTING_METADATA and job.tool_id is not None and job.tool_id == self.app.datatypes_registry.set_external_metadata_tool.id ):
- # need to requeue
- return JOB_WAIT
- return self.__check_user_jobs( job )
-
- def __check_user_jobs( self, job ):
- if not self.app.config.user_job_limit:
- return JOB_READY
- if job.user:
- count = self.sa_session.query( model.Job ).enable_eagerloads( False ) \
- .filter( and_( model.Job.user_id == job.user.id,
- or_( model.Job.state == model.Job.states.RUNNING,
- model.Job.state == model.Job.states.QUEUED ) ) ).count()
- elif job.galaxy_session:
- count = self.sa_session.query( model.Job ).enable_eagerloads( False ) \
- .filter( and_( model.Job.session_id == job.galaxy_session.id,
- or_( model.Job.state == model.Job.states.RUNNING,
- model.Job.state == model.Job.states.QUEUED ) ) ).count()
- else:
- log.warning( 'Job %s is not associated with a user or session so job concurrency limit cannot be checked.' % job.id )
- return JOB_READY
- if count >= self.app.config.user_job_limit:
- return JOB_WAIT
- return JOB_READY
-
- def put( self, job_id, tool ):
- """Add a job to the queue (by job identifier)"""
- if not self.track_jobs_in_database:
- self.queue.put( ( job_id, tool.id ) )
- self.sleeper.wake()
-
- def shutdown( self ):
- """Attempts to gracefully shut down the worker thread"""
- if self.parent_pid != os.getpid():
- # We're not the real job queue, do nothing
- return
- else:
- log.info( "sending stop signal to worker thread" )
- self.running = False
- if not self.track_jobs_in_database:
- self.queue.put( self.STOP_SIGNAL )
- self.sleeper.wake()
- log.info( "job queue stopped" )
- self.dispatcher.shutdown()
-
class JobWrapper( object ):
"""
Wraps a 'model.Job' with convenience methods for running processes and
@@ -1177,179 +923,13 @@
# There is no metadata setting for tasks. This is handled after the merge, at the job level.
return ""
-class DefaultJobDispatcher( object ):
- def __init__( self, app ):
- self.app = app
- self.job_runners = {}
- start_job_runners = ["local"]
- if app.config.start_job_runners is not None:
- start_job_runners.extend( [ x.strip() for x in util.listify( app.config.start_job_runners ) ] )
- if app.config.use_tasked_jobs:
- start_job_runners.append("tasks")
- for name in start_job_runners:
- self._load_plugin( name )
-
- def _load_plugin( self, name ):
- module_name = 'galaxy.jobs.runners.' + name
- try:
- module = __import__( module_name )
- except:
- log.exception( 'Job runner is not loadable: %s' % module_name )
- return
- for comp in module_name.split( "." )[1:]:
- module = getattr( module, comp )
- if '__all__' not in dir( module ):
- log.error( 'Runner "%s" does not contain a list of exported classes in __all__' % module_name )
- return
- for obj in module.__all__:
- display_name = ':'.join( ( module_name, obj ) )
- runner = getattr( module, obj )
- self.job_runners[name] = runner( self.app )
- log.debug( 'Loaded job runner: %s' % display_name )
-
- def __get_runner_name( self, job_wrapper ):
- if self.app.config.use_tasked_jobs and job_wrapper.tool.parallelism is not None and not isinstance(job_wrapper, TaskWrapper):
- runner_name = "tasks"
- else:
- runner_name = ( job_wrapper.get_job_runner().split(":", 1) )[0]
- return runner_name
-
- def put( self, job_wrapper ):
- try:
- runner_name = self.__get_runner_name( job_wrapper )
- if self.app.config.use_tasked_jobs and job_wrapper.tool.parallelism is not None and isinstance(job_wrapper, TaskWrapper):
- #DBTODO Refactor
- log.debug( "dispatching task %s, of job %d, to %s runner" %( job_wrapper.task_id, job_wrapper.job_id, runner_name ) )
- else:
- log.debug( "dispatching job %d to %s runner" %( job_wrapper.job_id, runner_name ) )
- self.job_runners[runner_name].put( job_wrapper )
- except KeyError:
- log.error( 'put(): (%s) Invalid job runner: %s' % ( job_wrapper.job_id, runner_name ) )
- job_wrapper.fail( 'Unable to run job due to a misconfiguration of the Galaxy job running system. Please contact a site administrator.' )
-
- def stop( self, job ):
- runner_name = ( job.job_runner_name.split(":", 1) )[0]
- log.debug( "stopping job %d in %s runner" %( job.id, runner_name ) )
- try:
- self.job_runners[runner_name].stop_job( job )
- except KeyError:
- log.error( 'stop(): (%s) Invalid job runner: %s' % ( job_wrapper.job_id, runner_name ) )
- # Job and output dataset states have already been updated, so nothing is done here.
-
- def recover( self, job, job_wrapper ):
- runner_name = ( job.job_runner_name.split(":", 1) )[0]
- log.debug( "recovering job %d in %s runner" %( job.id, runner_name ) )
- try:
- self.job_runners[runner_name].recover( job, job_wrapper )
- except KeyError:
- log.error( 'recover(): (%s) Invalid job runner: %s' % ( job_wrapper.job_id, runner_name ) )
- job_wrapper.fail( 'Unable to run job due to a misconfiguration of the Galaxy job running system. Please contact a site administrator.' )
-
- def shutdown( self ):
- for runner in self.job_runners.itervalues():
- runner.shutdown()
-
-class JobStopQueue( object ):
- """
- A queue for jobs which need to be terminated prematurely.
- """
- STOP_SIGNAL = object()
- def __init__( self, app, dispatcher ):
- self.app = app
- self.sa_session = app.model.context
- self.dispatcher = dispatcher
-
- self.track_jobs_in_database = app.config.get_bool( 'track_jobs_in_database', False )
-
- # Keep track of the pid that started the job manager, only it
- # has valid threads
- self.parent_pid = os.getpid()
- # Contains new jobs. Note this is not used if track_jobs_in_database is True
- self.queue = Queue()
-
- # Contains jobs that are waiting (only use from monitor thread)
- self.waiting = []
-
- # Helper for interruptable sleep
- self.sleeper = Sleeper()
- self.running = True
- self.monitor_thread = threading.Thread( target=self.monitor )
- self.monitor_thread.start()
- log.info( "job stopper started" )
-
- def monitor( self ):
- """
- Continually iterate the waiting jobs, stop any that are found.
- """
- # HACK: Delay until after forking, we need a way to do post fork notification!!!
- time.sleep( 10 )
- while self.running:
- try:
- self.monitor_step()
- except:
- log.exception( "Exception in monitor_step" )
- # Sleep
- self.sleeper.sleep( 1 )
-
- def monitor_step( self ):
- """
- Called repeatedly by `monitor` to stop jobs.
- """
- # Pull all new jobs from the queue at once
- jobs_to_check = []
- if self.track_jobs_in_database:
- # Clear the session so we get fresh states for job and all datasets
- self.sa_session.expunge_all()
- # Fetch all new jobs
- newly_deleted_jobs = self.sa_session.query( model.Job ).enable_eagerloads( False ) \
- .filter( model.Job.state == model.Job.states.DELETED_NEW ).all()
- for job in newly_deleted_jobs:
- jobs_to_check.append( ( job, None ) )
- # Also pull from the queue (in the case of Administrative stopped jobs)
- try:
- while 1:
- message = self.queue.get_nowait()
- if message is self.STOP_SIGNAL:
- return
- # Unpack the message
- job_id, error_msg = message
- # Get the job object and append to watch queue
- jobs_to_check.append( ( self.sa_session.query( model.Job ).get( job_id ), error_msg ) )
- except Empty:
- pass
- for job, error_msg in jobs_to_check:
- if error_msg is not None:
- job.state = job.states.ERROR
- job.info = error_msg
- else:
- job.state = job.states.DELETED
- self.sa_session.add( job )
- self.sa_session.flush()
- if job.job_runner_name is not None:
- # tell the dispatcher to stop the job
- self.dispatcher.stop( job )
-
- def put( self, job_id, error_msg=None ):
- self.queue.put( ( job_id, error_msg ) )
-
- def shutdown( self ):
- """Attempts to gracefully shut down the worker thread"""
- if self.parent_pid != os.getpid():
- # We're not the real job queue, do nothing
- return
- else:
- log.info( "sending stop signal to worker thread" )
- self.running = False
- if not self.track_jobs_in_database:
- self.queue.put( self.STOP_SIGNAL )
- self.sleeper.wake()
- log.info( "job stopper stopped" )
-
class NoopQueue( object ):
"""
Implements the JobQueue / JobStopQueue interface but does nothing
"""
def put( self, *args ):
return
+ def put_stop( self, *args ):
+ return
def shutdown( self ):
return
diff -r 528ca90fbd3e629bbe1eba92dc0faffb600a556f -r 4f6c38ca353861351e286016bbbddc126ca566c4 lib/galaxy/jobs/handler.py
--- /dev/null
+++ b/lib/galaxy/jobs/handler.py
@@ -0,0 +1,430 @@
+"""
+Galaxy job handler, prepares, runs, tracks, and finishes Galaxy jobs
+"""
+
+import os
+import time
+import logging
+import threading
+from Queue import Queue, Empty
+
+from sqlalchemy.sql.expression import and_, or_
+
+from galaxy import util, model
+from galaxy.jobs import Sleeper, JobWrapper, TaskWrapper
+
+log = logging.getLogger( __name__ )
+
+# States for running a job. These are NOT the same as data states
+JOB_WAIT, JOB_ERROR, JOB_INPUT_ERROR, JOB_INPUT_DELETED, JOB_READY, JOB_DELETED, JOB_ADMIN_DELETED = 'wait', 'error', 'input_error', 'input_deleted', 'ready', 'deleted', 'admin_deleted'
+
+class JobHandler( object ):
+ """
+ Handle the preparation, running, tracking, and finishing of jobs
+ """
+ def __init__( self, app ):
+ self.app = app
+ # The dispatcher launches the underlying job runners
+ self.dispatcher = DefaultJobDispatcher( app )
+ # Queues for starting and stopping jobs
+ self.job_queue = JobHandlerQueue( app, self.dispatcher )
+ self.job_stop_queue = JobHandlerStopQueue( app, self.dispatcher )
+ def start( self ):
+ self.job_queue.start()
+ def shutdown( self ):
+ self.job_queue.shutdown()
+ self.job_stop_queue.shutdown()
+
+class JobHandlerQueue( object ):
+ """
+ Job manager, waits for jobs to be runnable and then dispatches to
+ a JobRunner.
+ """
+ STOP_SIGNAL = object()
+ def __init__( self, app, dispatcher ):
+ """Start the job manager"""
+ self.app = app
+ self.dispatcher = dispatcher
+
+ self.sa_session = app.model.context
+ self.track_jobs_in_database = self.app.config.track_jobs_in_database
+
+ # Keep track of the pid that started the job manager, only it
+ # has valid threads
+ self.parent_pid = os.getpid()
+ # Contains new jobs. Note this is not used if track_jobs_in_database is True
+ self.queue = Queue()
+ # Contains jobs that are waiting (only use from monitor thread)
+ ## This and jobs_to_check[] are closest to a "Job Queue"
+ self.waiting_jobs = []
+ # Helper for interruptable sleep
+ self.sleeper = Sleeper()
+ self.running = True
+ self.monitor_thread = threading.Thread( target=self.__monitor )
+
+ def start( self ):
+ """
+ The JobManager should start, and then start its Handler, if it has one.
+ """
+ # Recover jobs at startup
+ self.__check_jobs_at_startup()
+ # Start the queue
+ self.monitor_thread.start()
+ log.info( "job handler queue started" )
+
+ def __check_jobs_at_startup( self ):
+ """
+ Checks all jobs that are in the 'new', 'queued' or 'running' state in
+ the database and requeues or cleans up as necessary. Only run as the
+ job manager starts.
+ """
+ for job in self.sa_session.query( model.Job ).enable_eagerloads( False ) \
+ .filter( ( ( model.Job.state == model.Job.states.NEW ) \
+ | ( model.Job.state == model.Job.states.RUNNING ) \
+ | ( model.Job.state == model.Job.states.QUEUED ) ) \
+ & ( model.Job.handler == self.app.config.server_name ) ):
+ if job.tool_id not in self.app.toolbox.tools_by_id:
+ log.warning( "(%s) Tool '%s' removed from tool config, unable to recover job" % ( job.id, job.tool_id ) )
+ JobWrapper( job, self ).fail( 'This tool was disabled before the job completed. Please contact your Galaxy administrator.' )
+ elif job.job_runner_name is None:
+ log.debug( "(%s) No job runner assigned and job still in '%s' state, adding to the job handler queue" % ( job.id, job.state ) )
+ if self.track_jobs_in_database:
+ job.state = model.Job.states.NEW
+ else:
+ self.queue.put( ( job.id, job.tool_id ) )
+ else:
+ job_wrapper = JobWrapper( job, self )
+ self.dispatcher.recover( job, job_wrapper )
+ if self.sa_session.dirty:
+ self.sa_session.flush()
+
+ def __monitor( self ):
+ """
+ Continually iterate the waiting jobs, checking is each is ready to
+ run and dispatching if so.
+ """
+ while self.running:
+ try:
+ self.__monitor_step()
+ except:
+ log.exception( "Exception in monitor_step" )
+ # Sleep
+ self.sleeper.sleep( 1 )
+
+ def __monitor_step( self ):
+ """
+ Called repeatedly by `monitor` to process waiting jobs. Gets any new
+ jobs (either from the database or from its own queue), then iterates
+ over all new and waiting jobs to check the state of the jobs each
+ depends on. If the job has dependencies that have not finished, it
+ it goes to the waiting queue. If the job has dependencies with errors,
+ it is marked as having errors and removed from the queue. Otherwise,
+ the job is dispatched.
+ """
+ # Pull all new jobs from the queue at once
+ jobs_to_check = []
+ if self.track_jobs_in_database:
+ # Clear the session so we get fresh states for job and all datasets
+ self.sa_session.expunge_all()
+ # Fetch all new jobs
+ jobs_to_check = self.sa_session.query( model.Job ).enable_eagerloads( False ) \
+ .filter( ( model.Job.state == model.Job.states.NEW ) \
+ & ( model.Job.handler == self.app.config.server_name ) ).all()
+ else:
+ # Get job objects and append to watch queue for any which were
+ # previously waiting
+ for job_id in self.waiting_jobs:
+ jobs_to_check.append( self.sa_session.query( model.Job ).get( job_id ) )
+ try:
+ while 1:
+ message = self.queue.get_nowait()
+ if message is self.STOP_SIGNAL:
+ return
+ # Unpack the message
+ job_id, tool_id = message
+ # Get the job object and append to watch queue
+ jobs_to_check.append( self.sa_session.query( model.Job ).get( job_id ) )
+ except Empty:
+ pass
+ # Iterate over new and waiting jobs and look for any that are
+ # ready to run
+ new_waiting_jobs = []
+ for job in jobs_to_check:
+ try:
+ # Check the job's dependencies, requeue if they're not done
+ job_state = self.__check_if_ready_to_run( job )
+ if job_state == JOB_WAIT:
+ if not self.track_jobs_in_database:
+ new_waiting_jobs.append( job.id )
+ elif job_state == JOB_INPUT_ERROR:
+ log.info( "(%d) Job unable to run: one or more inputs in error state" % job.id )
+ elif job_state == JOB_INPUT_DELETED:
+ log.info( "(%d) Job unable to run: one or more inputs deleted" % job.id )
+ elif job_state == JOB_READY:
+ self.dispatcher.put( JobWrapper( job, self ) )
+ log.info( "(%d) Job dispatched" % job.id )
+ elif job_state == JOB_DELETED:
+ log.info( "(%d) Job deleted by user while still queued" % job.id )
+ elif job_state == JOB_ADMIN_DELETED:
+ log.info( "(%d) Job deleted by admin while still queued" % job.id )
+ else:
+ log.error( "(%d) Job in unknown state '%s'" % ( job.id, job_state ) )
+ if not self.track_jobs_in_database:
+ new_waiting_jobs.append( job.id )
+ except Exception:
+ log.exception( "failure running job %d" % job.id )
+ # Update the waiting list
+ self.waiting_jobs = new_waiting_jobs
+ # Done with the session
+ self.sa_session.remove()
+
+ def __check_if_ready_to_run( self, job ):
+ """
+ Check if a job is ready to run by verifying that each of its input
+ datasets is ready (specifically in the OK state). If any input dataset
+ has an error, fail the job and return JOB_INPUT_ERROR. If any input
+ dataset is deleted, fail the job and return JOB_INPUT_DELETED. If all
+ input datasets are in OK state, return JOB_READY indicating that the
+ job can be dispatched. Otherwise, return JOB_WAIT indicating that input
+ datasets are still being prepared.
+ """
+ if job.state == model.Job.states.DELETED:
+ return JOB_DELETED
+ elif job.state == model.Job.states.ERROR:
+ return JOB_ADMIN_DELETED
+ elif self.app.config.enable_quotas:
+ quota = self.app.quota_agent.get_quota( job.user )
+ if quota is not None:
+ try:
+ usage = self.app.quota_agent.get_usage( user=job.user, history=job.history )
+ if usage > quota:
+ return JOB_WAIT
+ except AssertionError, e:
+ pass # No history, should not happen with an anon user
+ for dataset_assoc in job.input_datasets + job.input_library_datasets:
+ idata = dataset_assoc.dataset
+ if not idata:
+ continue
+ # don't run jobs for which the input dataset was deleted
+ if idata.deleted:
+ JobWrapper( job, self ).fail( "input data %d (file: %s) was deleted before the job started" % ( idata.hid, idata.file_name ) )
+ return JOB_INPUT_DELETED
+ # an error in the input data causes us to bail immediately
+ elif idata.state == idata.states.ERROR:
+ JobWrapper( job, self ).fail( "input data %d is in error state" % ( idata.hid ) )
+ return JOB_INPUT_ERROR
+ elif idata.state == idata.states.FAILED_METADATA:
+ JobWrapper( job, self ).fail( "input data %d failed to properly set metadata" % ( idata.hid ) )
+ return JOB_INPUT_ERROR
+ elif idata.state != idata.states.OK and not ( idata.state == idata.states.SETTING_METADATA and job.tool_id is not None and job.tool_id == self.app.datatypes_registry.set_external_metadata_tool.id ):
+ # need to requeue
+ return JOB_WAIT
+ return self.__check_user_jobs( job )
+
+ def __check_user_jobs( self, job ):
+ if not self.app.config.user_job_limit:
+ return JOB_READY
+ if job.user:
+ count = self.sa_session.query( model.Job ).enable_eagerloads( False ) \
+ .filter( and_( model.Job.user_id == job.user.id,
+ or_( model.Job.state == model.Job.states.RUNNING,
+ model.Job.state == model.Job.states.QUEUED ) ) ).count()
+ elif job.galaxy_session:
+ count = self.sa_session.query( model.Job ).enable_eagerloads( False ) \
+ .filter( and_( model.Job.session_id == job.galaxy_session.id,
+ or_( model.Job.state == model.Job.states.RUNNING,
+ model.Job.state == model.Job.states.QUEUED ) ) ).count()
+ else:
+ log.warning( 'Job %s is not associated with a user or session so job concurrency limit cannot be checked.' % job.id )
+ return JOB_READY
+ if count >= self.app.config.user_job_limit:
+ return JOB_WAIT
+ return JOB_READY
+
+ def put( self, job_id, tool_id ):
+ """Add a job to the queue (by job identifier)"""
+ if not self.track_jobs_in_database:
+ self.queue.put( ( job_id, tool_id ) )
+ self.sleeper.wake()
+
+ def shutdown( self ):
+ """Attempts to gracefully shut down the worker thread"""
+ if self.parent_pid != os.getpid():
+ # We're not the real job queue, do nothing
+ return
+ else:
+ log.info( "sending stop signal to worker thread" )
+ self.running = False
+ if not self.track_jobs_in_database:
+ self.queue.put( self.STOP_SIGNAL )
+ self.sleeper.wake()
+ log.info( "job handler queue stopped" )
+ self.dispatcher.shutdown()
+
+class JobHandlerStopQueue( object ):
+ """
+ A queue for jobs which need to be terminated prematurely.
+ """
+ STOP_SIGNAL = object()
+ def __init__( self, app, dispatcher ):
+ self.app = app
+ self.dispatcher = dispatcher
+
+ self.sa_session = app.model.context
+
+ # Keep track of the pid that started the job manager, only it
+ # has valid threads
+ self.parent_pid = os.getpid()
+ # Contains new jobs. Note this is not used if track_jobs_in_database is True
+ self.queue = Queue()
+
+ # Contains jobs that are waiting (only use from monitor thread)
+ self.waiting = []
+
+ # Helper for interruptable sleep
+ self.sleeper = Sleeper()
+ self.running = True
+ self.monitor_thread = threading.Thread( target=self.monitor )
+ self.monitor_thread.start()
+ log.info( "job handler stop queue started" )
+
+ def monitor( self ):
+ """
+ Continually iterate the waiting jobs, stop any that are found.
+ """
+ # HACK: Delay until after forking, we need a way to do post fork notification!!!
+ time.sleep( 10 )
+ while self.running:
+ try:
+ self.monitor_step()
+ except:
+ log.exception( "Exception in monitor_step" )
+ # Sleep
+ self.sleeper.sleep( 1 )
+
+ def monitor_step( self ):
+ """
+ Called repeatedly by `monitor` to stop jobs.
+ """
+ # Pull all new jobs from the queue at once
+ jobs_to_check = []
+ if self.app.config.track_jobs_in_database:
+ # Clear the session so we get fresh states for job and all datasets
+ self.sa_session.expunge_all()
+ # Fetch all new jobs
+ newly_deleted_jobs = self.sa_session.query( model.Job ).enable_eagerloads( False ) \
+ .filter( ( model.Job.state == model.Job.states.DELETED_NEW ) \
+ & ( model.Job.handler == self.app.config.server_name ) ).all()
+ for job in newly_deleted_jobs:
+ jobs_to_check.append( ( job, None ) )
+ # Also pull from the queue (in the case of Administrative stopped jobs)
+ try:
+ while 1:
+ message = self.queue.get_nowait()
+ if message is self.STOP_SIGNAL:
+ return
+ # Unpack the message
+ job_id, error_msg = message
+ # Get the job object and append to watch queue
+ jobs_to_check.append( ( self.sa_session.query( model.Job ).get( job_id ), error_msg ) )
+ except Empty:
+ pass
+ for job, error_msg in jobs_to_check:
+ if error_msg is not None:
+ job.state = job.states.ERROR
+ job.info = error_msg
+ else:
+ job.state = job.states.DELETED
+ self.sa_session.add( job )
+ self.sa_session.flush()
+ if job.job_runner_name is not None:
+ # tell the dispatcher to stop the job
+ self.dispatcher.stop( job )
+
+ def put( self, job_id, error_msg=None ):
+ self.queue.put( ( job_id, error_msg ) )
+
+ def shutdown( self ):
+ """Attempts to gracefully shut down the worker thread"""
+ if self.parent_pid != os.getpid():
+ # We're not the real job queue, do nothing
+ return
+ else:
+ log.info( "sending stop signal to worker thread" )
+ self.running = False
+ if not self.track_jobs_in_database:
+ self.queue.put( self.STOP_SIGNAL )
+ self.sleeper.wake()
+ log.info( "job handler stop queue stopped" )
+
+class DefaultJobDispatcher( object ):
+ def __init__( self, app ):
+ self.app = app
+ self.job_runners = {}
+ start_job_runners = ["local"]
+ if app.config.start_job_runners is not None:
+ start_job_runners.extend( [ x.strip() for x in util.listify( app.config.start_job_runners ) ] )
+ if app.config.use_tasked_jobs:
+ start_job_runners.append("tasks")
+ for name in start_job_runners:
+ self._load_plugin( name )
+
+ def _load_plugin( self, name ):
+ module_name = 'galaxy.jobs.runners.' + name
+ try:
+ module = __import__( module_name )
+ except:
+ log.exception( 'Job runner is not loadable: %s' % module_name )
+ return
+ for comp in module_name.split( "." )[1:]:
+ module = getattr( module, comp )
+ if '__all__' not in dir( module ):
+ log.error( 'Runner "%s" does not contain a list of exported classes in __all__' % module_name )
+ return
+ for obj in module.__all__:
+ display_name = ':'.join( ( module_name, obj ) )
+ runner = getattr( module, obj )
+ self.job_runners[name] = runner( self.app )
+ log.debug( 'Loaded job runner: %s' % display_name )
+
+ def __get_runner_name( self, job_wrapper ):
+ if self.app.config.use_tasked_jobs and job_wrapper.tool.parallelism is not None and not isinstance(job_wrapper, TaskWrapper):
+ runner_name = "tasks"
+ else:
+ runner_name = ( job_wrapper.get_job_runner().split(":", 1) )[0]
+ return runner_name
+
+ def put( self, job_wrapper ):
+ try:
+ runner_name = self.__get_runner_name( job_wrapper )
+ if self.app.config.use_tasked_jobs and job_wrapper.tool.parallelism is not None and isinstance(job_wrapper, TaskWrapper):
+ #DBTODO Refactor
+ log.debug( "dispatching task %s, of job %d, to %s runner" %( job_wrapper.task_id, job_wrapper.job_id, runner_name ) )
+ else:
+ log.debug( "dispatching job %d to %s runner" %( job_wrapper.job_id, runner_name ) )
+ self.job_runners[runner_name].put( job_wrapper )
+ except KeyError:
+ log.error( 'put(): (%s) Invalid job runner: %s' % ( job_wrapper.job_id, runner_name ) )
+ job_wrapper.fail( 'Unable to run job due to a misconfiguration of the Galaxy job running system. Please contact a site administrator.' )
+
+ def stop( self, job ):
+ runner_name = ( job.job_runner_name.split(":", 1) )[0]
+ log.debug( "stopping job %d in %s runner" %( job.id, runner_name ) )
+ try:
+ self.job_runners[runner_name].stop_job( job )
+ except KeyError:
+ log.error( 'stop(): (%s) Invalid job runner: %s' % ( job_wrapper.job_id, runner_name ) )
+ # Job and output dataset states have already been updated, so nothing is done here.
+
+ def recover( self, job, job_wrapper ):
+ runner_name = ( job.job_runner_name.split(":", 1) )[0]
+ log.debug( "recovering job %d in %s runner" %( job.id, runner_name ) )
+ try:
+ self.job_runners[runner_name].recover( job, job_wrapper )
+ except KeyError:
+ log.error( 'recover(): (%s) Invalid job runner: %s' % ( job_wrapper.job_id, runner_name ) )
+ job_wrapper.fail( 'Unable to run job due to a misconfiguration of the Galaxy job running system. Please contact a site administrator.' )
+
+ def shutdown( self ):
+ for runner in self.job_runners.itervalues():
+ runner.shutdown()
diff -r 528ca90fbd3e629bbe1eba92dc0faffb600a556f -r 4f6c38ca353861351e286016bbbddc126ca566c4 lib/galaxy/jobs/manager.py
--- /dev/null
+++ b/lib/galaxy/jobs/manager.py
@@ -0,0 +1,270 @@
+"""
+Top-level Galaxy job manager, moves jobs to handler(s)
+"""
+
+import os
+import time
+import random
+import logging
+import threading
+from Queue import Queue, Empty
+
+from sqlalchemy.sql.expression import and_, or_
+
+from galaxy import model
+from galaxy.jobs import handler, Sleeper, NoopQueue
+
+log = logging.getLogger( __name__ )
+
+class JobManager( object ):
+ """
+ Highest level interface to job management.
+
+ TODO: Currently the app accesses "job_queue" and "job_stop_queue" directly.
+ This should be decoupled.
+ """
+ def __init__( self, app ):
+ self.app = app
+ self.job_handler = NoopHandler()
+ if self.app.config.server_name in self.app.config.job_handlers:
+ self.job_handler = handler.JobHandler( app )
+ if self.app.config.server_name == self.app.config.job_manager:
+ job_handler = NoopHandler()
+ # In the case that webapp == manager == handler, pass jobs in memory
+ if not self.app.config.track_jobs_in_database:
+ job_handler = self.job_handler
+ # Otherwise, even if the manager == one of the handlers, its handler will pick up jobs from the database
+ self.job_queue = JobManagerQueue( app, job_handler )
+ self.job_stop_queue = JobManagerStopQueue( app, job_handler )
+ if self.app.config.enable_beta_job_managers:
+ from galaxy.jobs.deferred import DeferredJobQueue
+ self.deferred_job_queue = DeferredJobQueue( app )
+ else:
+ self.job_queue = self.job_stop_queue = NoopQueue()
+ self.job_handler.start()
+ def shutdown( self ):
+ self.job_queue.shutdown()
+ self.job_stop_queue.shutdown()
+ self.job_handler.shutdown()
+
+class JobManagerQueue( object ):
+ """
+ Job manager, waits for jobs to be runnable and then dispatches to a
+ JobHandler.
+ """
+ STOP_SIGNAL = object()
+ def __init__( self, app, job_handler ):
+ self.app = app
+ self.job_handler = job_handler # the (singular) handler if we are passing jobs in memory
+
+ self.sa_session = app.model.context
+ self.job_lock = False
+ # Keep track of the pid that started the job manager, only it
+ # has valid threads
+ self.parent_pid = os.getpid()
+ # Contains new jobs. Note this is not used if track_jobs_in_database is True
+ self.queue = Queue()
+ # Helper for interruptable sleep
+ self.sleeper = Sleeper()
+ self.running = True
+ self.monitor_thread = threading.Thread( target=self.__monitor )
+ # Recover jobs at startup
+ self.__check_jobs_at_startup()
+ # Start the queue
+ self.monitor_thread.start()
+ log.info( "job manager queue started" )
+
+ def __check_jobs_at_startup( self ):
+ """
+ Checks all jobs that are in the 'new', 'queued' or 'running' state in
+ the database and requeues or cleans up as necessary. Only run as the
+ job manager starts.
+ """
+ for job in self.sa_session.query( model.Job ).enable_eagerloads( False ) \
+ .filter( ( ( model.Job.state == model.Job.states.NEW ) \
+ | ( model.Job.state == model.Job.states.RUNNING ) \
+ | ( model.Job.state == model.Job.states.QUEUED ) ) \
+ & ( model.Job.handler == None ) ):
+ if job.tool_id not in self.app.toolbox.tools_by_id:
+ log.warning( "(%s) Tool '%s' removed from tool config, unable to recover job" % ( job.id, job.tool_id ) )
+ JobWrapper( job, self ).fail( 'This tool was disabled before the job completed. Please contact your Galaxy administrator.' )
+ else:
+ job.handler = self.__select_handler( job ) # handler's recovery method will take it from here
+ log.info( "(%d) Job in '%s' state had no handler at job manager startup, assigned '%s' handler" % ( job.id, job.state, job.handler ) )
+ if self.sa_session.dirty:
+ self.sa_session.flush()
+
+ def __monitor( self ):
+ """
+ Continually iterate the waiting jobs and dispatch to a handler
+ """
+ # HACK: Delay until after forking, we need a way to do post fork notification!!!
+ time.sleep( 10 )
+ while self.running:
+ try:
+ self.__monitor_step()
+ except:
+ log.exception( "Exception in monitor_step" )
+ # Sleep
+ self.sleeper.sleep( 1 )
+
+ def __monitor_step( self ):
+ """
+ Called repeatedly by `monitor` to process waiting jobs. Gets any new
+ jobs (either from the database or from its own queue), then assigns a
+ handler.
+ """
+ # Do nothing if the queue is locked
+ if self.job_lock:
+ log.info( 'Job queue is administratively locked, sleeping...' )
+ time.sleep( 10 )
+ return
+ # Pull all new jobs from the queue at once
+ jobs_to_check = []
+ if self.app.config.track_jobs_in_database:
+ # Clear the session so we get fresh states for job and all datasets
+ self.sa_session.expunge_all()
+ # Fetch all new jobs
+ jobs_to_check = self.sa_session.query( model.Job ).enable_eagerloads( False ) \
+ .filter( ( model.Job.state == model.Job.states.NEW ) \
+ & ( model.Job.handler == None ) ).all()
+ else:
+ # Get job objects and append to watch queue for any which were
+ # previously waiting
+ try:
+ while 1:
+ message = self.queue.get_nowait()
+ if message is self.STOP_SIGNAL:
+ return
+ # Unpack the message
+ job_id, tool_id = message
+ # Get the job object and append to watch queue
+ jobs_to_check.append( self.sa_session.query( model.Job ).get( job_id ) )
+ except Empty:
+ pass
+
+ for job in jobs_to_check:
+ job.handler = self.__select_handler( job )
+ log.debug( "(%s) Job assigned to handler '%s'" % ( job.id, job.handler ) )
+ self.sa_session.add( job )
+
+ # If tracking in the database, handlers will pick up the job now
+ self.sa_session.flush()
+
+ time.sleep( 5 )
+
+ # This only does something in the case that there is only one handler and it is this Galaxy process
+ for job in jobs_to_check:
+ self.job_handler.job_queue.put( job.id, job.tool_id )
+
+ def __select_handler( self, job ):
+ # TODO: handler selection based on params, tool, etc.
+ return random.choice( self.app.config.job_handlers )
+
+ def put( self, job_id, tool ):
+ """Add a job to the queue (by job identifier)"""
+ if not self.app.config.track_jobs_in_database:
+ self.queue.put( ( job_id, tool.id ) )
+ self.sleeper.wake()
+
+ def shutdown( self ):
+ """Attempts to gracefully shut down the worker thread"""
+ if self.parent_pid != os.getpid():
+ # We're not the real job queue, do nothing
+ return
+ else:
+ log.info( "sending stop signal to worker thread" )
+ self.running = False
+ if not self.app.config.track_jobs_in_database:
+ self.queue.put( self.STOP_SIGNAL )
+ self.sleeper.wake()
+ log.info( "job manager queue stopped" )
+ self.dispatcher.shutdown()
+
+class JobManagerStopQueue( object ):
+ """
+ A queue for jobs which need to be terminated prematurely.
+ """
+ STOP_SIGNAL = object()
+ def __init__( self, app, job_handler ):
+ self.app = app
+ self.job_handler = job_handler
+
+ self.sa_session = app.model.context
+
+ # Keep track of the pid that started the job manager, only it
+ # has valid threads
+ self.parent_pid = os.getpid()
+ # Contains new jobs. Note this is not used if track_jobs_in_database is True
+ self.queue = Queue()
+
+ # Contains jobs that are waiting (only use from monitor thread)
+ self.waiting = []
+
+ # Helper for interruptable sleep
+ self.sleeper = Sleeper()
+ self.running = True
+ self.monitor_thread = threading.Thread( target=self.monitor )
+ self.monitor_thread.start()
+ log.info( "job manager stop queue started" )
+
+ def monitor( self ):
+ """
+ Continually iterate the waiting jobs, stop any that are found.
+ """
+ # HACK: Delay until after forking, we need a way to do post fork notification!!!
+ time.sleep( 10 )
+ while self.running:
+ try:
+ self.monitor_step()
+ except:
+ log.exception( "Exception in monitor_step" )
+ # Sleep
+ self.sleeper.sleep( 1 )
+
+ def monitor_step( self ):
+ """
+ Called repeatedly by `monitor` to stop jobs.
+ """
+ jobs_to_check = []
+ # Pull from the queue even if tracking in the database (in the case of Administrative stopped jobs)
+ try:
+ while 1:
+ message = self.queue.get_nowait()
+ if message is self.STOP_SIGNAL:
+ return
+ # Unpack the message
+ job_id, error_msg = message
+ # Get the job object and append to watch queue
+ jobs_to_check.append( ( self.sa_session.query( model.Job ).get( job_id ), error_msg ) )
+ except Empty:
+ pass
+
+ # If tracking in the database, the handler will pick up the stop itself. Otherwise, notify the handler.
+ for job, error_msg in jobs_to_check:
+ self.job_handler.job_stop_queue.put( job.id, error_msg )
+
+ def put( self, job_id, error_msg=None ):
+ self.queue.put( ( job_id, error_msg ) )
+
+ def shutdown( self ):
+ """Attempts to gracefully shut down the worker thread"""
+ if self.parent_pid != os.getpid():
+ # We're not the real job queue, do nothing
+ return
+ else:
+ log.info( "sending stop signal to worker thread" )
+ self.running = False
+ if not self.app.config.track_jobs_in_database:
+ self.queue.put( self.STOP_SIGNAL )
+ self.sleeper.wake()
+ log.info( "job manager stop queue stopped" )
+
+class NoopHandler( object ):
+ def __init__( self, *args, **kwargs ):
+ self.job_queue = NoopQueue()
+ self.job_stop_queue = NoopQueue()
+ def start( self ):
+ pass
+ def shutdown( self, *args ):
+ pass
diff -r 528ca90fbd3e629bbe1eba92dc0faffb600a556f -r 4f6c38ca353861351e286016bbbddc126ca566c4 lib/galaxy/model/__init__.py
--- a/lib/galaxy/model/__init__.py
+++ b/lib/galaxy/model/__init__.py
@@ -132,6 +132,7 @@
self.job_runner_external_id = None
self.post_job_actions = []
self.imported = False
+ self.handler = None
def add_parameter( self, name, value ):
self.parameters.append( JobParameter( name, value ) )
@@ -171,14 +172,11 @@
if not dataset.deleted:
return False
return True
- def mark_deleted( self, enable_job_running=True, track_jobs_in_database=False ):
+ def mark_deleted( self, track_jobs_in_database=False ):
"""
Mark this job as deleted, and mark any output datasets as discarded.
"""
- # This could be handled with *just* track_jobs_in_database, but I
- # didn't want to make setting track_jobs_in_database required in
- # non-runner configs.
- if not enable_job_running or track_jobs_in_database:
+ if track_jobs_in_database:
self.state = Job.states.DELETED_NEW
else:
self.state = Job.states.DELETED
diff -r 528ca90fbd3e629bbe1eba92dc0faffb600a556f -r 4f6c38ca353861351e286016bbbddc126ca566c4 lib/galaxy/model/mapping.py
--- a/lib/galaxy/model/mapping.py
+++ b/lib/galaxy/model/mapping.py
@@ -421,7 +421,8 @@
Column( "job_runner_external_id", String( 255 ) ),
Column( "object_store_id", TrimmedString( 255 ), index=True ),
Column( "imported", Boolean, default=False, index=True ),
- Column( "params", TrimmedString(255), index=True ) )
+ Column( "params", TrimmedString(255), index=True ),
+ Column( "handler", TrimmedString( 255 ), index=True ) )
JobParameter.table = Table( "job_parameter", metadata,
Column( "id", Integer, primary_key=True ),
diff -r 528ca90fbd3e629bbe1eba92dc0faffb600a556f -r 4f6c38ca353861351e286016bbbddc126ca566c4 lib/galaxy/model/migrate/versions/0094_add_job_handler_col.py
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0094_add_job_handler_col.py
@@ -0,0 +1,49 @@
+"""
+Migration script to create "handler" column in job table.
+"""
+
+from sqlalchemy import *
+from sqlalchemy.orm import *
+from migrate import *
+from migrate.changeset import *
+
+import logging
+log = logging.getLogger( __name__ )
+
+# Need our custom types, but don't import anything else from model
+from galaxy.model.custom_types import *
+
+metadata = MetaData( migrate_engine )
+db_session = scoped_session( sessionmaker( bind=migrate_engine, autoflush=False, autocommit=True ) )
+
+# Column to add.
+handler_col = Column( "handler", TrimmedString(255), index=True )
+
+def display_migration_details():
+ print ""
+ print "This migration script adds a 'handler' column to the Job table."
+
+def upgrade():
+ print __doc__
+ metadata.reflect()
+
+ # Add column to Job table.
+ try:
+ Job_table = Table( "job", metadata, autoload=True )
+ handler_col.create( Job_table )
+ assert handler_col is Job_table.c.handler
+
+ except Exception, e:
+ print str(e)
+ log.debug( "Adding column 'handler' to job table failed: %s" % str( e ) )
+
+def downgrade():
+ metadata.reflect()
+
+ # Drop column from Job table.
+ try:
+ Job_table = Table( "job", metadata, autoload=True )
+ handler_col = Job_table.c.handler
+ handler_col.drop()
+ except Exception, e:
+ log.debug( "Dropping column 'handler' from job table failed: %s" % ( str( e ) ) )
diff -r 528ca90fbd3e629bbe1eba92dc0faffb600a556f -r 4f6c38ca353861351e286016bbbddc126ca566c4 lib/galaxy/web/base/controller.py
--- a/lib/galaxy/web/base/controller.py
+++ b/lib/galaxy/web/base/controller.py
@@ -2381,8 +2381,8 @@
deleted = []
msg = None
status = None
- if not trans.app.config.get_bool( "enable_job_running", True ):
- return trans.show_error_message( 'This Galaxy instance is not configured to run jobs. If using multiple servers, please directly access the job running instance to manage jobs.' )
+ if not self.app.config.job_manager != self.app.config.server_name:
+ return trans.show_error_message( 'This Galaxy instance is not the job manager. If using multiple servers, please directly access the job manager instance to manage jobs.' )
job_ids = util.listify( stop )
if job_ids and stop_msg in [ None, '' ]:
msg = 'Please enter an error message to display to the user describing why the job was terminated'
diff -r 528ca90fbd3e629bbe1eba92dc0faffb600a556f -r 4f6c38ca353861351e286016bbbddc126ca566c4 lib/galaxy/web/controllers/dataset.py
--- a/lib/galaxy/web/controllers/dataset.py
+++ b/lib/galaxy/web/controllers/dataset.py
@@ -904,8 +904,7 @@
if job.state in [ self.app.model.Job.states.QUEUED, self.app.model.Job.states.RUNNING, self.app.model.Job.states.NEW ]:
# Are *all* of the job's other output datasets deleted?
if job.check_if_output_datasets_deleted():
- job.mark_deleted( self.app.config.get_bool( 'enable_job_running', True ),
- self.app.config.get_bool( 'track_jobs_in_database', False ) )
+ job.mark_deleted( self.app.config.track_jobs_in_database )
self.app.job_manager.job_stop_queue.put( job.id )
trans.sa_session.flush()
except Exception, e:
diff -r 528ca90fbd3e629bbe1eba92dc0faffb600a556f -r 4f6c38ca353861351e286016bbbddc126ca566c4 universe_wsgi.ini.sample
--- a/universe_wsgi.ini.sample
+++ b/universe_wsgi.ini.sample
@@ -542,14 +542,25 @@
# -- Job Execution
-# If running multiple Galaxy processes, one can be designated as the job
-# runner. For more information, see:
-# http://wiki.g2.bx.psu.edu/Admin/Config/Performance/Web%20Application%20Scal…
-#enable_job_running = True
+# To increase performance of job execution and the web interface, you can
+# separate Galaxy into multiple processes. There are more than one way to do
+# this, and they are explained in detail in the documentation:
+#
+# http://wiki.g2.bx.psu.edu/Admin/Config/Performance/Web%20Application%20Scal…
+#
+# By default, Galaxy manages and executes jobs from within a single process and
+# notifies itself of new jobs via in-memory queues. If you change job_manager
+# and job_handlers from their default values, notification will instead be done
+# using the `state` and `handler` columns of the job table in the database.
-# Should jobs be tracked through the database, rather than in memory.
-# Necessary if you're running the load balanced setup.
-#track_jobs_in_database = False
+# Identify the server_name (the string following server: at the top of this
+# file) which should be designated as the job manager (only one):
+#job_manager = main
+
+# Identify the server_name(s) which should be designated as job handlers
+# (responsible for starting, tracking, finishing, and cleaning up jobs) as a
+# comma-separated list.
+#job_handlers = main
# This enables splitting of jobs into tasks, if specified by the particular tool config.
# This is a new feature and not recommended for production servers yet.
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: james_taylor: style: various improvements for menubutton styles
by Bitbucket 29 Mar '12
by Bitbucket 29 Mar '12
29 Mar '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/528ca90fbd3e/
changeset: 528ca90fbd3e
user: james_taylor
date: 2012-03-29 22:27:22
summary: style: various improvements for menubutton styles
affected #: 2 files
diff -r 7e82d5412edc1617da7ebdabd0d511a96348bcbc -r 528ca90fbd3e629bbe1eba92dc0faffb600a556f static/june_2007_style/base.less
--- a/static/june_2007_style/base.less
+++ b/static/june_2007_style/base.less
@@ -1222,6 +1222,10 @@
.menubutton {
.btn();
+ &:hover { .btn.hover(); }
+ &:active { .btn.active(); }
+ &:focus { .tab-focus(); }
+
display: inline-block;
cursor: pointer;
position: relative;
@@ -1235,11 +1239,21 @@
// padding: 1px 0.25em;
// margin: -1px -0.25em;
+ a {
+ text-decoration: none;
+ }
+
.label {
position: relative;
- // display: block;
+ display: inline-block;
border-right: none;
text-decoration: none;
+ text-align: left;
+ // The following properties truncate the text and force the button to have one line
+ max-height: 2*@baseLineHeight;
+ line-height: @baseLineHeight;
+ overflow: hidden;
+ text-overflow: ellipsis;
}
&.popup .label {
@@ -1247,21 +1261,16 @@
padding-right: 6px;
}
- &.popup {
+ &.popup, &.popup.split {
+ padding-right: 18px;
&:after {
margin-top: 6px;
+ position: absolute;
+ top: 2px;
+ right: 6px;
.caret();
}
}
-
- &.popup.split {
- &:after {
- margin-top: 6px;
- margin-left: 0px;
- .caret();
- }
- }
-
}
// A split menu button, the main button has an action, the arrow causes the
diff -r 7e82d5412edc1617da7ebdabd0d511a96348bcbc -r 528ca90fbd3e629bbe1eba92dc0faffb600a556f static/june_2007_style/blue/base.css
--- a/static/june_2007_style/blue/base.css
+++ b/static/june_2007_style/blue/base.css
@@ -661,10 +661,13 @@
.action-button:active{background-image:none;-webkit-box-shadow:inset 0 2px 4px rgba(0, 0, 0, 0.15),0 1px 2px rgba(0, 0, 0, 0.05);-moz-box-shadow:inset 0 2px 4px rgba(0, 0, 0, 0.15),0 1px 2px rgba(0, 0, 0, 0.05);box-shadow:inset 0 2px 4px rgba(0, 0, 0, 0.15),0 1px 2px rgba(0, 0, 0, 0.05);background-color:#e6e6e6;background-color:#d9d9d9 \9;outline:0;}
.menubutton{display:inline-block;padding:2px 10px 2px;font-size:12px;line-height:16px;color:#111111;text-align:center;text-shadow:0 1px 1px rgba(255, 255, 255, 0.75);background-color:#fafafa;background-image:-webkit-gradient(linear, 0 0, 0 100%, from(#ffffff), color-stop(25%, #ffffff), to(#e6e6e6));background-image:-webkit-linear-gradient(#ffffff, #ffffff 25%, #e6e6e6);background-image:-moz-linear-gradient(top, #ffffff, #ffffff 25%, #e6e6e6);background-image:-ms-linear-gradient(#ffffff, #ffffff 25%, #e6e6e6);background-image:-o-linear-gradient(#ffffff, #ffffff 25%, #e6e6e6);background-image:linear-gradient(#ffffff, #ffffff 25%, #e6e6e6);background-repeat:no-repeat;filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ffffff', endColorstr='#e6e6e6', GradientType=0);border:1px solid #999999;border-bottom-color:#888888;-webkit-border-radius:4px;-moz-border-radius:4px;border-radius:4px;-webkit-box-shadow:inset 0 1px 0 rgba(255, 255, 255, 0.2),0 1px 2px rgba(0, 0, 0, 0.05);-moz-box-shadow:inset 0 1px 0 rgba(255, 255, 255, 0.2),0 1px 2px rgba(0, 0, 0, 0.05);box-shadow:inset 0 1px 0 rgba(255, 255, 255, 0.2),0 1px 2px rgba(0, 0, 0, 0.05);cursor:pointer;*margin-left:.3em;display:inline-block;cursor:pointer;position:relative;-webkit-user-select:none;-moz-user-select:none;-o-user-select:none;user-select:none;}.menubutton:first-child{*margin-left:0;}
.menubutton [class^="ficon"],.menubutton [class*=" ficon"]{line-height:.9em;}
-.menubutton .label{position:relative;border-right:none;text-decoration:none;}
+.menubutton:hover{color:#111111;text-decoration:none;background-color:#e6e6e6;background-position:0 -15px;-webkit-transition:background-position 0.1s linear;-moz-transition:background-position 0.1s linear;-ms-transition:background-position 0.1s linear;-o-transition:background-position 0.1s linear;transition:background-position 0.1s linear;}
+.menubutton:active{background-image:none;-webkit-box-shadow:inset 0 2px 4px rgba(0, 0, 0, 0.15),0 1px 2px rgba(0, 0, 0, 0.05);-moz-box-shadow:inset 0 2px 4px rgba(0, 0, 0, 0.15),0 1px 2px rgba(0, 0, 0, 0.05);box-shadow:inset 0 2px 4px rgba(0, 0, 0, 0.15),0 1px 2px rgba(0, 0, 0, 0.05);background-color:#e6e6e6;background-color:#d9d9d9 \9;outline:0;}
+.menubutton:focus{outline:thin dotted;outline:5px auto -webkit-focus-ring-color;outline-offset:-2px;}
+.menubutton a{text-decoration:none;}
+.menubutton .label{position:relative;display:inline-block;border-right:none;text-decoration:none;text-align:left;max-height:32px;line-height:16px;overflow:hidden;text-overflow:ellipsis;}
.menubutton.popup .label{border-right:solid #999999 1px;padding-right:6px;}
-.menubutton.popup:after{margin-top:6px;display:inline-block;width:0;height:0;text-indent:-99999px;*text-indent:0;vertical-align:top;border-left:4px solid transparent;border-right:4px solid transparent;border-top:4px solid #000000;opacity:0.3;filter:alpha(opacity=30);content:"\2193";}
-.menubutton.popup.split:after{margin-top:6px;margin-left:0px;display:inline-block;width:0;height:0;text-indent:-99999px;*text-indent:0;vertical-align:top;border-left:4px solid transparent;border-right:4px solid transparent;border-top:4px solid #000000;opacity:0.3;filter:alpha(opacity=30);content:"\2193";}
+.menubutton.popup,.menubutton.popup.split{padding-right:18px;}.menubutton.popup:after,.menubutton.popup.split:after{margin-top:6px;position:absolute;top:2px;right:6px;display:inline-block;width:0;height:0;text-indent:-99999px;*text-indent:0;vertical-align:top;border-left:4px solid transparent;border-right:4px solid transparent;border-top:4px solid #000000;opacity:0.3;filter:alpha(opacity=30);content:"\2193";}
div.popmenu-wrapper{position:absolute;top:100%;z-index:20000;}div.popmenu-wrapper ul.dropdown-menu{display:block;position:relative;float:none;}
ul.dropdown-menu a{text-decoration:none;}
ul.dropdown-menu li.head>a{text-shadow:0 1px 0 rgba(255, 255, 255, 0.5);font-size:11px;font-weight:bold;line-height:16px;color:#999999;text-transform:uppercase;}
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

29 Mar '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/7e82d5412edc/
changeset: 7e82d5412edc
user: james_taylor
date: 2012-03-29 20:52:58
summary: style: fix brand overflowing
affected #: 3 files
diff -r 59f0cdeb9afcd899727ba2d2b5b4f3d0e6383edd -r 7e82d5412edc1617da7ebdabd0d511a96348bcbc static/june_2007_style/base.less
--- a/static/june_2007_style/base.less
+++ b/static/june_2007_style/base.less
@@ -407,14 +407,26 @@
}
.title {
- .navbar.brand();
+
+ position: absolute;
+ left: 0;
+ top: 0;
+
font-family: verdana;
font-weight: bold;
+ font-size: 20px;
+ line-height: 1;
color: white;
// Override margin and padding due to shorter navbar height
padding: 5px 20px 12px;
margin-left: -15px;
z-index: 2000;
+
+ img {
+ display: inline;
+ width: 26px;
+ vertical-align: top;
+ }
a {
color: white;
text-decoration: none;
diff -r 59f0cdeb9afcd899727ba2d2b5b4f3d0e6383edd -r 7e82d5412edc1617da7ebdabd0d511a96348bcbc static/june_2007_style/blue/base.css
--- a/static/june_2007_style/blue/base.css
+++ b/static/june_2007_style/blue/base.css
@@ -225,7 +225,7 @@
.navbar-form .radio,.navbar-form .checkbox{margin-top:5px;}
.navbar-form input[type="image"],.navbar-form input[type="checkbox"],.navbar-form input[type="radio"]{margin-top:3px;}
.navbar-search{position:relative;float:left;margin-top:6px;margin-bottom:0;}.navbar-search .search-query{padding:4px 9px;font-family:"Lucida Grande",verdana,arial,helvetica,sans-serif;font-size:13px;font-weight:normal;line-height:1;color:#ffffff;color:rgba(255, 255, 255, 0.75);background:#666;background:rgba(255, 255, 255, 0.3);border:1px solid #111;-webkit-box-shadow:inset 0 1px 2px rgba(0, 0, 0, 0.1),0 1px 0px rgba(255, 255, 255, 0.15);-moz-box-shadow:inset 0 1px 2px rgba(0, 0, 0, 0.1),0 1px 0px rgba(255, 255, 255, 0.15);box-shadow:inset 0 1px 2px rgba(0, 0, 0, 0.1),0 1px 0px rgba(255, 255, 255, 0.15);-webkit-transition:none;-moz-transition:none;-ms-transition:none;-o-transition:none;transition:none;}.navbar-search .search-query :-moz-placeholder{color:#eeeeee;}
-.navbar-search .search-query ::-webkit-input-placeholder{color:#eeeeee;}
+.navbar-search .search-query::-webkit-input-placeholder{color:#eeeeee;}
.navbar-search .search-query:hover{color:#ffffff;background-color:#999999;background-color:rgba(255, 255, 255, 0.5);}
.navbar-search .search-query:focus,.navbar-search .search-query.focused{padding:5px 10px;color:#333333;text-shadow:0 1px 0 #ffffff;background-color:#ffffff;border:0;-webkit-box-shadow:0 0 3px rgba(0, 0, 0, 0.15);-moz-box-shadow:0 0 3px rgba(0, 0, 0, 0.15);box-shadow:0 0 3px rgba(0, 0, 0, 0.15);outline:0;}
.navbar-fixed-top{position:fixed;top:0;right:0;left:0;z-index:1030;}
@@ -535,7 +535,8 @@
#masthead .btn-group .btn{margin-top:0;}
#masthead .nav>li>a{padding:6px 10px 8px;cursor:pointer;}#masthead .nav>li>a:hover{color:gold;}
#masthead .dropdown-menu a,#masthead .dropdown-menu a:hover{text-decoration:none;}
-#masthead .title{float:left;display:block;padding:8px 20px 12px;margin-left:-20px;font-size:20px;font-weight:200;line-height:1;color:#ffffff;font-family:verdana;font-weight:bold;color:white;padding:5px 20px 12px;margin-left:-15px;z-index:2000;}#masthead .title a{color:white;text-decoration:none;}
+#masthead .title{position:absolute;left:0;top:0;font-family:verdana;font-weight:bold;font-size:20px;line-height:1;color:white;padding:5px 20px 12px;margin-left:-15px;z-index:2000;}#masthead .title img{display:inline;width:26px;vertical-align:top;}
+#masthead .title a{color:white;text-decoration:none;}
#masthead .masthead-inner{padding-left:20px;padding-right:20px;background-color:#303239;background-image:-moz-linear-gradient(top, #333333, #2c3143);background-image:-ms-linear-gradient(top, #333333, #2c3143);background-image:-webkit-gradient(linear, 0 0, 0 100%, from(#333333), to(#2c3143));background-image:-webkit-linear-gradient(top, #333333, #2c3143);background-image:-o-linear-gradient(top, #333333, #2c3143);background-image:linear-gradient(top, #333333, #2c3143);background-repeat:repeat-x;filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#333333', endColorstr='#2c3143', GradientType=0);-webkit-border-radius:4px;-moz-border-radius:4px;border-radius:4px;-webkit-box-shadow:0 1px 3px rgba(0, 0, 0, 0.25),inset 0 -1px 0 rgba(0, 0, 0, 0.1);-moz-box-shadow:0 1px 3px rgba(0, 0, 0, 0.25),inset 0 -1px 0 rgba(0, 0, 0, 0.1);box-shadow:0 1px 3px rgba(0, 0, 0, 0.25),inset 0 -1px 0 rgba(0, 0, 0, 0.1);padding-left:0;padding-right:0;-webkit-border-radius:0;-moz-border-radius:0;border-radius:0;height:32px;}
#masthead a:hover{text-decoration:underline;}
.quota-meter-container{position:absolute;top:0;right:0;height:32px;}
diff -r 59f0cdeb9afcd899727ba2d2b5b4f3d0e6383edd -r 7e82d5412edc1617da7ebdabd0d511a96348bcbc templates/webapps/galaxy/base_panels.mako
--- a/templates/webapps/galaxy/base_panels.mako
+++ b/templates/webapps/galaxy/base_panels.mako
@@ -184,12 +184,12 @@
</div>
## Logo, layered over tabs to be clickable
- <div class="title" style="position: absolute; top: 0; left: 0; white-space: nowrap;">
+ <div class="title"><a href="${app.config.get( 'logo_url', '/' )}">
- <img border="0" src="${h.url_for('/static/images/galaxyIcon_noText.png')}" style="display: inline; width: 26px; vertical-align: top;">
+ <img border="0" src="${h.url_for('/static/images/galaxyIcon_noText.png')}">
Galaxy
%if app.config.brand:
- <span class='brand'>/ ${app.config.brand}</span>
+ <span>/ ${app.config.brand}</span>
%endif
</a></div>
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

29 Mar '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/59f0cdeb9afc/
changeset: 59f0cdeb9afc
user: greg
date: 2012-03-29 21:02:36
summary: Enhance the functional test framework to support testing tools contained in installed repositories that were not migrated from the Galaxy distribution. To test these tools, use: sh run_functional_tests.sh --installed
affected #: 4 files
diff -r c510097f7018dbc177513a62c0ca46b4cace0c86 -r 59f0cdeb9afcd899727ba2d2b5b4f3d0e6383edd run_functional_tests.sh
--- a/run_functional_tests.sh
+++ b/run_functional_tests.sh
@@ -29,6 +29,15 @@
else
python ./scripts/functional_tests.py -v functional.test_toolbox --with-nosehtml --html-report-file run_functional_tests.html --migrated
fi
+elif [ $1 = '--installed' ]; then
+ if [ ! $2 ]; then
+ python ./scripts/functional_tests.py -v functional.test_toolbox --with-nosehtml --html-report-file run_functional_tests.html --installed
+ elif [ $2 = '-id' ]; then
+ # TODO: This option is not tested...
+ python ./scripts/functional_tests.py -v functional.test_toolbox:TestForTool_$3 --with-nosehtml --html-report-file run_functional_tests.html --installed
+ else
+ python ./scripts/functional_tests.py -v functional.test_toolbox --with-nosehtml --html-report-file run_functional_tests.html --installed
+ fi
else
python ./scripts/functional_tests.py -v --with-nosehtml --html-report-file run_functional_tests.html $1
fi
diff -r c510097f7018dbc177513a62c0ca46b4cace0c86 -r 59f0cdeb9afcd899727ba2d2b5b4f3d0e6383edd scripts/functional_tests.py
--- a/scripts/functional_tests.py
+++ b/scripts/functional_tests.py
@@ -48,21 +48,65 @@
default_galaxy_locales = 'en'
default_galaxy_test_file_dir = "test-data"
migrated_tool_panel_config = 'migrated_tools_conf.xml'
+installed_tool_panel_configs = [ 'shed_tool_conf.xml' ]
+
+def parse_tool_panel_config( config, shed_tools_dict ):
+ """
+ Parse a shed-related tool panel config to generate the shed_tools_dict. This only happens when testing tools installed from the tool shed.
+ """
+ last_galaxy_test_file_dir = None
+ last_tested_repository_name = None
+ last_tested_changeset_revision = None
+ tree = util.parse_xml( config )
+ root = tree.getroot()
+ for elem in root:
+ if elem.tag == 'tool':
+ galaxy_test_file_dir, \
+ last_tested_repository_name, \
+ last_tested_changeset_revision = get_installed_repository_info( elem,
+ last_galaxy_test_file_dir,
+ last_tested_repository_name,
+ last_tested_changeset_revision )
+ if galaxy_test_file_dir:
+ if galaxy_test_file_dir != last_galaxy_test_file_dir:
+ if not os.path.isabs( galaxy_test_file_dir ):
+ galaxy_test_file_dir = os.path.join( os.getcwd(), galaxy_test_file_dir )
+ guid = elem.get( 'guid' )
+ shed_tools_dict[ guid ] = galaxy_test_file_dir
+ last_galaxy_test_file_dir = galaxy_test_file_dir
+ elif elem.tag == 'section':
+ for section_elem in elem:
+ if section_elem.tag == 'tool':
+ galaxy_test_file_dir, \
+ last_tested_repository_name, \
+ last_tested_changeset_revision = get_installed_repository_info( section_elem,
+ last_galaxy_test_file_dir,
+ last_tested_repository_name,
+ last_tested_changeset_revision )
+ if galaxy_test_file_dir:
+ if galaxy_test_file_dir != last_galaxy_test_file_dir:
+ if not os.path.isabs( galaxy_test_file_dir ):
+ galaxy_test_file_dir = os.path.join( os.getcwd(), galaxy_test_file_dir )
+ guid = section_elem.get( 'guid' )
+ shed_tools_dict[ guid ] = galaxy_test_file_dir
+ last_galaxy_test_file_dir = galaxy_test_file_dir
+ return shed_tools_dict
def get_installed_repository_info( elem, last_galaxy_test_file_dir, last_tested_repository_name, last_tested_changeset_revision ):
"""
Return the GALAXY_TEST_FILE_DIR, the containing repository name and the change set revision for the tool elem.
- This only happens when testing tools eliminated from the distribution and now installed from the tool shed.
+ This only happens when testing tools installed from the tool shed.
"""
tool_config_path = elem.get( 'file' )
installed_tool_path_items = tool_config_path.split( '/repos/' )
sans_shed = installed_tool_path_items[ 1 ]
path_items = sans_shed.split( '/' )
+ repository_owner = path_items[ 0 ]
repository_name = path_items[ 1 ]
changeset_revision = path_items[ 2 ]
if repository_name != last_tested_repository_name or changeset_revision != last_tested_changeset_revision:
# Locate the test-data directory.
- installed_tool_path = os.path.join( installed_tool_path_items[ 0 ], 'repos', 'devteam', repository_name, changeset_revision )
+ installed_tool_path = os.path.join( installed_tool_path_items[ 0 ], 'repos', repository_owner, repository_name, changeset_revision )
for root, dirs, files in os.walk( installed_tool_path ):
if 'test-data' in dirs:
return os.path.join( root, 'test-data' ), repository_name, changeset_revision
@@ -92,12 +136,12 @@
if 'HTTP_ACCEPT_LANGUAGE' not in os.environ:
os.environ[ 'HTTP_ACCEPT_LANGUAGE' ] = default_galaxy_locales
testing_migrated_tools = '--migrated' in sys.argv
+ testing_installed_tools = '--installed' in sys.argv
- if testing_migrated_tools:
+ if testing_migrated_tools or testing_installed_tools:
sys.argv.pop()
# Store a jsonified dictionary of tool_id : GALAXY_TEST_FILE_DIR pairs.
- galaxy_migrated_tools_file = 'migrated_tools_dict'
- migrated_tools_dict = {}
+ galaxy_tool_shed_test_file = 'shed_tools_dict'
# We need the upload tool for functional tests, so we'll create a temporary tool panel config that defines it.
fd, tmp_tool_panel_conf = tempfile.mkstemp()
os.write( fd, '<?xml version="1.0"?>\n' )
@@ -127,7 +171,7 @@
tool_data_table_config_path = 'tool_data_table_conf.xml'
tool_dependency_dir = os.environ.get( 'GALAXY_TOOL_DEPENDENCY_DIR', None )
use_distributed_object_store = os.environ.get( 'GALAXY_USE_DISTRIBUTED_OBJECT_STORE', False )
-
+
if start_server:
psu_production = False
galaxy_test_proxy_port = None
@@ -295,54 +339,32 @@
os.environ[ 'GALAXY_TEST_SAVE' ] = galaxy_test_save
# Pass in through script setenv, will leave a copy of ALL test validate files
os.environ[ 'GALAXY_TEST_HOST' ] = galaxy_test_host
- if testing_migrated_tools:
- last_galaxy_test_file_dir = None
- last_tested_repository_name = None
- last_tested_changeset_revision = None
- tree = util.parse_xml( migrated_tool_panel_config )
- root = tree.getroot()
- migrated_tool_path = root.get( 'tool_path' )
- counter = 0
- for elem in root:
- if elem.tag == 'tool':
- galaxy_test_file_dir, \
- last_tested_repository_name, \
- last_tested_changeset_revision = get_installed_repository_info( elem,
- last_galaxy_test_file_dir,
- last_tested_repository_name,
- last_tested_changeset_revision )
- if galaxy_test_file_dir:
- if galaxy_test_file_dir != last_galaxy_test_file_dir:
- if not os.path.isabs( galaxy_test_file_dir ):
- galaxy_test_file_dir = os.path.join( os.getcwd(), galaxy_test_file_dir )
- guid = elem.get( 'guid' )
- migrated_tools_dict[ guid ] = galaxy_test_file_dir
- last_galaxy_test_file_dir = galaxy_test_file_dir
- elif elem.tag == 'section':
- for section_elem in elem:
- if section_elem.tag == 'tool':
- galaxy_test_file_dir, \
- last_tested_repository_name, \
- last_tested_changeset_revision = get_installed_repository_info( section_elem,
- last_galaxy_test_file_dir,
- last_tested_repository_name,
- last_tested_changeset_revision )
- if galaxy_test_file_dir:
- if galaxy_test_file_dir != last_galaxy_test_file_dir:
- if not os.path.isabs( galaxy_test_file_dir ):
- galaxy_test_file_dir = os.path.join( os.getcwd(), galaxy_test_file_dir )
- guid = section_elem.get( 'guid' )
- migrated_tools_dict[ guid ] = galaxy_test_file_dir
- last_galaxy_test_file_dir = galaxy_test_file_dir
- # Persist the migrated_tools_dict to the galaxy_migrated_tools_file.
- migrated_tools_file = open( galaxy_migrated_tools_file, 'w' )
- migrated_tools_file.write( to_json_string( migrated_tools_dict ) )
- migrated_tools_file.close()
- if not os.path.isabs( galaxy_migrated_tools_file ):
- galaxy_migrated_tools_file = os.path.join( os.getcwd(), galaxy_migrated_tools_file )
- os.environ[ 'GALAXY_MIGRATED_TOOLS_FILE' ] = galaxy_migrated_tools_file
+ if testing_migrated_tools or testing_installed_tools:
+ shed_tools_dict = {}
+ if testing_migrated_tools:
+ shed_tools_dict = parse_tool_panel_config( migrated_tool_panel_config, shed_tools_dict )
+ elif testing_installed_tools:
+ for shed_tool_config in installed_tool_panel_configs:
+ shed_tools_dict = parse_tool_panel_config( shed_tool_config, shed_tools_dict )
+ # Persist the shed_tools_dict to the galaxy_tool_shed_test_file.
+ shed_tools_file = open( galaxy_tool_shed_test_file, 'w' )
+ shed_tools_file.write( to_json_string( shed_tools_dict ) )
+ shed_tools_file.close()
+ if not os.path.isabs( galaxy_tool_shed_test_file ):
+ galaxy_tool_shed_test_file = os.path.join( os.getcwd(), galaxy_tool_shed_test_file )
+ os.environ[ 'GALAXY_TOOL_SHED_TEST_FILE' ] = galaxy_tool_shed_test_file
+ if testing_installed_tools:
+ # Eliminate the migrated_tool_panel_config from the app's tool_configs, append the list of installed_tool_panel_configs,
+ # and reload the app's toolbox.
+ relative_migrated_tool_panel_config = os.path.join( app.config.root, migrated_tool_panel_config )
+ tool_configs = app.config.tool_configs
+ if relative_migrated_tool_panel_config in tool_configs:
+ tool_configs.remove( relative_migrated_tool_panel_config )
+ for installed_tool_panel_config in installed_tool_panel_configs:
+ tool_configs.append( installed_tool_panel_config )
+ app.toolbox = tools.ToolBox( tool_configs, app.config.tool_path, app )
functional.test_toolbox.toolbox = app.toolbox
- functional.test_toolbox.build_tests( testing_migrated_tools=True )
+ functional.test_toolbox.build_tests( testing_shed_tools=True )
test_config = nose.config.Config( env=os.environ, ignoreFiles=ignore_files, plugins=nose.plugins.manager.DefaultPluginManager() )
test_config.configure( sys.argv )
result = run_tests( test_config )
@@ -352,9 +374,9 @@
except:
log.info( "Unable to remove temporary file: %s" % tmp_tool_panel_conf )
try:
- os.unlink( galaxy_migrated_tools_file )
+ os.unlink( galaxy_tool_shed_test_file )
except:
- log.info( "Unable to remove file: %s" % galaxy_migrated_tools_file )
+ log.info( "Unable to remove file: %s" % galaxy_tool_shed_test_file )
else:
functional.test_toolbox.toolbox = app.toolbox
functional.test_toolbox.build_tests()
diff -r c510097f7018dbc177513a62c0ca46b4cace0c86 -r 59f0cdeb9afcd899727ba2d2b5b4f3d0e6383edd test/base/twilltestcase.py
--- a/test/base/twilltestcase.py
+++ b/test/base/twilltestcase.py
@@ -36,14 +36,14 @@
self.port = os.environ.get( 'GALAXY_TEST_PORT' )
self.url = "http://%s:%s" % ( self.host, self.port )
self.file_dir = os.environ.get( 'GALAXY_TEST_FILE_DIR', None )
- self.migrated_tools_file = os.environ.get( 'GALAXY_MIGRATED_TOOLS_FILE', None )
- if self.migrated_tools_file:
- f = open( self.migrated_tools_file, 'r' )
+ self.tool_shed_test_file = os.environ.get( 'GALAXY_TOOL_SHED_TEST_FILE', None )
+ if self.tool_shed_test_file:
+ f = open( self.tool_shed_test_file, 'r' )
text = f.read()
f.close()
- self.migrated_tools_dict = from_json_string( text )
+ self.shed_tools_dict = from_json_string( text )
else:
- self.migrated_tools_dict = {}
+ self.shed_tools_dict = {}
self.keepOutdir = os.environ.get( 'GALAXY_TEST_SAVE', '' )
if self.keepOutdir > '':
try:
@@ -169,9 +169,9 @@
if line_diff_count > lines_diff:
raise AssertionError, "Failed to find '%s' in history data. (lines_diff=%i):\n" % ( contains, lines_diff )
- def get_filename( self, filename, migrated_tool_id=None ):
- if migrated_tool_id and self.migrated_tools_dict:
- file_dir = self.migrated_tools_dict[ migrated_tool_id ]
+ def get_filename( self, filename, shed_tool_id=None ):
+ if shed_tool_id and self.shed_tools_dict:
+ file_dir = self.shed_tools_dict[ shed_tool_id ]
if not file_dir:
file_dir = self.file_dir
else:
@@ -183,9 +183,9 @@
filename = os.path.join( *path )
file(filename, 'wt').write(buffer.getvalue())
- def upload_file( self, filename, ftype='auto', dbkey='unspecified (?)', space_to_tab=False, metadata=None, composite_data=None, migrated_tool_id=None ):
+ def upload_file( self, filename, ftype='auto', dbkey='unspecified (?)', space_to_tab=False, metadata=None, composite_data=None, shed_tool_id=None ):
"""
- Uploads a file. If migrated_tool_id has a value, we're testing tools migrated from the distribution to the tool shed,
+ Uploads a file. If shed_tool_id has a value, we're testing tools migrated from the distribution to the tool shed,
so the tool-data directory of test data files is contained in the installed tool shed repository.
"""
self.visit_url( "%s/tool_runner?tool_id=upload1" % self.url )
@@ -197,11 +197,11 @@
tc.fv( "1", "files_metadata|%s" % elem.get( 'name' ), elem.get( 'value' ) )
if composite_data:
for i, composite_file in enumerate( composite_data ):
- filename = self.get_filename( composite_file.get( 'value' ), migrated_tool_id=migrated_tool_id )
+ filename = self.get_filename( composite_file.get( 'value' ), shed_tool_id=shed_tool_id )
tc.formfile( "1", "files_%i|file_data" % i, filename )
tc.fv( "1", "files_%i|space_to_tab" % i, composite_file.get( 'space_to_tab', False ) )
else:
- filename = self.get_filename( filename, migrated_tool_id=migrated_tool_id )
+ filename = self.get_filename( filename, shed_tool_id=shed_tool_id )
tc.formfile( "1", "file_data", filename )
tc.fv( "1", "space_to_tab", space_to_tab )
tc.submit("runtool_btn")
@@ -653,7 +653,7 @@
fd,temp_prefix = tempfile.mkstemp(prefix='tmp',suffix=suffix)
return temp_prefix
- def verify_dataset_correctness( self, filename, hid=None, wait=True, maxseconds=120, attributes=None, migrated_tool_id=None ):
+ def verify_dataset_correctness( self, filename, hid=None, wait=True, maxseconds=120, attributes=None, shed_tool_id=None ):
"""Verifies that the attributes and contents of a history item meet expectations"""
if wait:
self.wait( maxseconds=maxseconds ) #wait for job to finish
@@ -692,7 +692,7 @@
errmsg += str( err )
raise AssertionError( errmsg )
if filename is not None:
- local_name = self.get_filename( filename, migrated_tool_id=migrated_tool_id )
+ local_name = self.get_filename( filename, shed_tool_id=shed_tool_id )
temp_name = self.makeTfname(fname = filename)
file( temp_name, 'wb' ).write(data)
if self.keepOutdir > '':
@@ -726,7 +726,7 @@
else:
raise Exception, 'Unimplemented Compare type: %s' % compare
if extra_files:
- self.verify_extra_files_content( extra_files, elem.get( 'id' ), migrated_tool_id=migrated_tool_id )
+ self.verify_extra_files_content( extra_files, elem.get( 'id' ), shed_tool_id=shed_tool_id )
except AssertionError, err:
errmsg = 'History item %s different than expected, difference (using %s):\n' % ( hid, compare )
errmsg += str( err )
@@ -745,21 +745,21 @@
os.remove( temp_name )
return temp_local, temp_temp
- def verify_extra_files_content( self, extra_files, hda_id, migrated_tool_id=None ):
+ def verify_extra_files_content( self, extra_files, hda_id, shed_tool_id=None ):
files_list = []
for extra_type, extra_value, extra_name, extra_attributes in extra_files:
if extra_type == 'file':
files_list.append( ( extra_name, extra_value, extra_attributes ) )
elif extra_type == 'directory':
- for filename in os.listdir( self.get_filename( extra_value, migrated_tool_id=migrated_tool_id ) ):
+ for filename in os.listdir( self.get_filename( extra_value, shed_tool_id=shed_tool_id ) ):
files_list.append( ( filename, os.path.join( extra_value, filename ), extra_attributes ) )
else:
raise ValueError, 'unknown extra_files type: %s' % extra_type
for filename, filepath, attributes in files_list:
- self.verify_composite_datatype_file_content( filepath, hda_id, base_name=filename, attributes=attributes, migrated_tool_id=migrated_tool_id )
+ self.verify_composite_datatype_file_content( filepath, hda_id, base_name=filename, attributes=attributes, shed_tool_id=shed_tool_id )
- def verify_composite_datatype_file_content( self, file_name, hda_id, base_name=None, attributes=None, migrated_tool_id=None ):
- local_name = self.get_filename( file_name, migrated_tool_id=migrated_tool_id )
+ def verify_composite_datatype_file_content( self, file_name, hda_id, base_name=None, attributes=None, shed_tool_id=None ):
+ local_name = self.get_filename( file_name, shed_tool_id=shed_tool_id )
if base_name is None:
base_name = os.path.split(file_name)[-1]
temp_name = self.makeTfname(fname = base_name)
@@ -1015,8 +1015,8 @@
def last_page( self ):
return tc.browser.get_html()
- def load_cookies( self, file, migrated_tool_id=None ):
- filename = self.get_filename( file, migrated_tool_id=migrated_tool_id )
+ def load_cookies( self, file, shed_tool_id=None ):
+ filename = self.get_filename( file, shed_tool_id=shed_tool_id )
tc.load_cookies(filename)
def reload_page( self ):
diff -r c510097f7018dbc177513a62c0ca46b4cace0c86 -r 59f0cdeb9afcd899727ba2d2b5b4f3d0e6383edd test/functional/test_toolbox.py
--- a/test/functional/test_toolbox.py
+++ b/test/functional/test_toolbox.py
@@ -10,7 +10,7 @@
class ToolTestCase( TwillTestCase ):
"""Abstract test case that runs tests based on a `galaxy.tools.test.ToolTest`"""
- def do_it( self, testdef, migrated_tool_id=None ):
+ def do_it( self, testdef, shed_tool_id=None ):
# If the test generation had an error, raise
if testdef.error:
if testdef.exception:
@@ -40,7 +40,7 @@
dbkey=extra.get( 'dbkey', 'hg17' ),
metadata=metadata,
composite_data=composite_data,
- migrated_tool_id=migrated_tool_id )
+ shed_tool_id=shed_tool_id )
print "Uploaded file: ", fname, ", ftype: ", extra.get( 'ftype', 'auto' ), ", extra: ", extra
#Post upload attribute editing
edit_attributes = extra.get( 'edit_attributes', [] )
@@ -99,7 +99,7 @@
elem_hid = elem.get( 'hid' )
elem_index += 1
try:
- self.verify_dataset_correctness( outfile, hid=elem_hid, maxseconds=testdef.maxseconds, attributes=attributes, migrated_tool_id=migrated_tool_id )
+ self.verify_dataset_correctness( outfile, hid=elem_hid, maxseconds=testdef.maxseconds, attributes=attributes, shed_tool_id=shed_tool_id )
except Exception, e:
print >>sys.stderr, self.get_job_stdout( elem.get( 'id' ), format=True )
print >>sys.stderr, self.get_job_stderr( elem.get( 'id' ), format=True )
@@ -143,7 +143,7 @@
expanded_inputs[value.name] = declared_inputs[value.name]
return expanded_inputs
-def build_tests( testing_migrated_tools=False ):
+def build_tests( testing_shed_tools=False ):
"""
If the module level variable `toolbox` is set, generate `ToolTestCase`
classes for all of its tests and put them into this modules globals() so
@@ -166,12 +166,12 @@
baseclasses = ( ToolTestCase, )
namespace = dict()
for j, testdef in enumerate( tool.tests ):
- def make_test_method( td, migrated_tool_id=None ):
+ def make_test_method( td, shed_tool_id=None ):
def test_tool( self ):
- self.do_it( td, migrated_tool_id=migrated_tool_id )
+ self.do_it( td, shed_tool_id=shed_tool_id )
return test_tool
- if testing_migrated_tools:
- test_method = make_test_method( testdef, migrated_tool_id=tool.id )
+ if testing_shed_tools:
+ test_method = make_test_method( testdef, shed_tool_id=tool.id )
else:
test_method = make_test_method( testdef )
test_method.__doc__ = "%s ( %s ) > %s" % ( tool.name, tool.id, testdef.name )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: greg: Enhance the functional test framework to support tools migrated from the distribution and installed from the tool shed. To test these tools, use: sh run_functional_tests.sh --migrated
by Bitbucket 29 Mar '12
by Bitbucket 29 Mar '12
29 Mar '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/c510097f7018/
changeset: c510097f7018
user: greg
date: 2012-03-29 18:03:14
summary: Enhance the functional test framework to support tools migrated from the distribution and installed from the tool shed. To test these tools, use: sh run_functional_tests.sh --migrated
affected #: 6 files
diff -r 7ab3012fe281f9219e2323e6c8c9694fc2b2628a -r c510097f7018dbc177513a62c0ca46b4cace0c86 lib/galaxy/tool_shed/install_manager.py
--- a/lib/galaxy/tool_shed/install_manager.py
+++ b/lib/galaxy/tool_shed/install_manager.py
@@ -276,7 +276,6 @@
root = tree.getroot()
tool_path = root.get( 'tool_path', None )
if tool_path is None:
- # There will be a problem here if the user has defined 2 non-shed related configs.
config_filenames.append( config_filename )
return config_filenames
def __get_url_from_tool_shed( self, tool_shed ):
diff -r 7ab3012fe281f9219e2323e6c8c9694fc2b2628a -r c510097f7018dbc177513a62c0ca46b4cace0c86 lib/galaxy/tools/__init__.py
--- a/lib/galaxy/tools/__init__.py
+++ b/lib/galaxy/tools/__init__.py
@@ -124,7 +124,7 @@
elif elem.tag == 'section':
self.load_section_tag_set( elem, tool_path, load_panel_dict )
elif elem.tag == 'label':
- self.load_label_tag_set( elem, self.tool_panel, self.integrated_tool_panel )
+ self.load_label_tag_set( elem, self.tool_panel, self.integrated_tool_panel, load_panel_dict )
if parsing_shed_tool_conf:
shed_tool_conf_dict = dict( config_filename=config_filename,
tool_path=tool_path,
@@ -370,10 +370,10 @@
integrated_panel_dict[ key ] = workflow
except:
log.exception( "Error loading workflow: %s" % workflow_id )
- def load_label_tag_set( self, elem, panel_dict, integrated_panel_dict ):
+ def load_label_tag_set( self, elem, panel_dict, integrated_panel_dict, load_panel_dict ):
label = ToolSectionLabel( elem )
key = 'label_' + label.id
- if not self.integrated_tool_panel_config_has_contents:
+ if load_panel_dict:
panel_dict[ key ] = label
integrated_panel_dict[ key ] = label
def load_section_tag_set( self, elem, tool_path, load_panel_dict ):
@@ -396,7 +396,7 @@
elif sub_elem.tag == 'workflow':
self.load_workflow_tag_set( sub_elem, elems, integrated_elems, load_panel_dict )
elif sub_elem.tag == 'label':
- self.load_label_tag_set( sub_elem, elems, integrated_elems )
+ self.load_label_tag_set( sub_elem, elems, integrated_elems, load_panel_dict )
if load_panel_dict:
self.tool_panel[ key ] = section
# Always load sections into the integrated_tool_panel.
diff -r 7ab3012fe281f9219e2323e6c8c9694fc2b2628a -r c510097f7018dbc177513a62c0ca46b4cace0c86 run_functional_tests.sh
--- a/run_functional_tests.sh
+++ b/run_functional_tests.sh
@@ -20,6 +20,15 @@
echo "==========================================================================================================================================="
echo "'run_functional_tests.sh -id bbb' for testing one tool with id 'bbb' ('bbb' is the tool id)"
echo "'run_functional_tests.sh -sid ccc' for testing one section with sid 'ccc' ('ccc' is the string after 'section::')"
+elif [ $1 = '--migrated' ]; then
+ if [ ! $2 ]; then
+ python ./scripts/functional_tests.py -v functional.test_toolbox --with-nosehtml --html-report-file run_functional_tests.html --migrated
+ elif [ $2 = '-id' ]; then
+ # TODO: This option is not tested...
+ python ./scripts/functional_tests.py -v functional.test_toolbox:TestForTool_$3 --with-nosehtml --html-report-file run_functional_tests.html --migrated
+ else
+ python ./scripts/functional_tests.py -v functional.test_toolbox --with-nosehtml --html-report-file run_functional_tests.html --migrated
+ fi
else
python ./scripts/functional_tests.py -v --with-nosehtml --html-report-file run_functional_tests.html $1
fi
diff -r 7ab3012fe281f9219e2323e6c8c9694fc2b2628a -r c510097f7018dbc177513a62c0ca46b4cace0c86 scripts/functional_tests.py
--- a/scripts/functional_tests.py
+++ b/scripts/functional_tests.py
@@ -1,6 +1,6 @@
#!/usr/bin/env python
-import os, sys, shutil
+import os, sys, shutil, tempfile, re
# Assume we are run from the galaxy root directory, add lib to the python path
cwd = os.getcwd()
@@ -32,6 +32,13 @@
from galaxy.web import buildapp
from galaxy import tools
from galaxy.util import bunch
+from galaxy import util
+from galaxy.util.json import to_json_string
+
+import nose.core
+import nose.config
+import nose.loader
+import nose.plugins.manager
log = logging.getLogger( "functional_tests.py" )
@@ -40,28 +47,87 @@
default_galaxy_test_port_max = 9999
default_galaxy_locales = 'en'
default_galaxy_test_file_dir = "test-data"
+migrated_tool_panel_config = 'migrated_tools_conf.xml'
-def main():
-
+def get_installed_repository_info( elem, last_galaxy_test_file_dir, last_tested_repository_name, last_tested_changeset_revision ):
+ """
+ Return the GALAXY_TEST_FILE_DIR, the containing repository name and the change set revision for the tool elem.
+ This only happens when testing tools eliminated from the distribution and now installed from the tool shed.
+ """
+ tool_config_path = elem.get( 'file' )
+ installed_tool_path_items = tool_config_path.split( '/repos/' )
+ sans_shed = installed_tool_path_items[ 1 ]
+ path_items = sans_shed.split( '/' )
+ repository_name = path_items[ 1 ]
+ changeset_revision = path_items[ 2 ]
+ if repository_name != last_tested_repository_name or changeset_revision != last_tested_changeset_revision:
+ # Locate the test-data directory.
+ installed_tool_path = os.path.join( installed_tool_path_items[ 0 ], 'repos', 'devteam', repository_name, changeset_revision )
+ for root, dirs, files in os.walk( installed_tool_path ):
+ if 'test-data' in dirs:
+ return os.path.join( root, 'test-data' ), repository_name, changeset_revision
+ return None, repository_name, changeset_revision
+ return last_galaxy_test_file_dir, last_tested_repository_name, last_tested_changeset_revision
+
+def run_tests( test_config ):
+ loader = nose.loader.TestLoader( config=test_config )
+ plug_loader = test_config.plugins.prepareTestLoader( loader )
+ if plug_loader is not None:
+ loader = plug_loader
+ tests = loader.loadTestsFromNames( test_config.testNames )
+ test_runner = nose.core.TextTestRunner( stream=test_config.stream,
+ verbosity=test_config.verbosity,
+ config=test_config )
+ plug_runner = test_config.plugins.prepareTestRunner( test_runner )
+ if plug_runner is not None:
+ test_runner = plug_runner
+ return test_runner.run( tests )
+
+def main():
# ---- Configuration ------------------------------------------------------
-
galaxy_test_host = os.environ.get( 'GALAXY_TEST_HOST', default_galaxy_test_host )
galaxy_test_port = os.environ.get( 'GALAXY_TEST_PORT', None )
galaxy_test_save = os.environ.get( 'GALAXY_TEST_SAVE', None)
+ tool_path = os.environ.get( 'GALAXY_TEST_TOOL_PATH', 'tools' )
+ if 'HTTP_ACCEPT_LANGUAGE' not in os.environ:
+ os.environ[ 'HTTP_ACCEPT_LANGUAGE' ] = default_galaxy_locales
+ testing_migrated_tools = '--migrated' in sys.argv
- if 'HTTP_ACCEPT_LANGUAGE' not in os.environ:
- os.environ['HTTP_ACCEPT_LANGUAGE'] = default_galaxy_locales
- galaxy_test_file_dir = os.environ.get( 'GALAXY_TEST_FILE_DIR', default_galaxy_test_file_dir )
- if not os.path.isabs( galaxy_test_file_dir ):
- galaxy_test_file_dir = os.path.join( os.getcwd(), galaxy_test_file_dir )
- start_server = 'GALAXY_TEST_EXTERNAL' not in os.environ
- tool_path = os.environ.get( 'GALAXY_TEST_TOOL_PATH', 'tools' )
- tool_config_file = os.environ.get( 'GALAXY_TEST_TOOL_CONF', 'tool_conf.xml.sample' )
- tool_data_table_config_path = 'tool_data_table_conf.xml'
+ if testing_migrated_tools:
+ sys.argv.pop()
+ # Store a jsonified dictionary of tool_id : GALAXY_TEST_FILE_DIR pairs.
+ galaxy_migrated_tools_file = 'migrated_tools_dict'
+ migrated_tools_dict = {}
+ # We need the upload tool for functional tests, so we'll create a temporary tool panel config that defines it.
+ fd, tmp_tool_panel_conf = tempfile.mkstemp()
+ os.write( fd, '<?xml version="1.0"?>\n' )
+ os.write( fd, '<toolbox>\n' )
+ os.write( fd, '<tool file="data_source/upload.xml"/>\n' )
+ os.write( fd, '</toolbox>\n' )
+ os.close( fd )
+ tool_config_file = tmp_tool_panel_conf
+ galaxy_test_file_dir = None
+ library_import_dir = None
+ user_library_import_dir = None
+ # Exclude all files except test_toolbox.py.
+ ignore_files = ( re.compile( r'^test_[adghlmsu]*' ), re.compile( r'^test_ta*' ) )
+ else:
+ tool_config_file = os.environ.get( 'GALAXY_TEST_TOOL_CONF', 'tool_conf.xml.sample' )
+ galaxy_test_file_dir = os.environ.get( 'GALAXY_TEST_FILE_DIR', default_galaxy_test_file_dir )
+ if not os.path.isabs( galaxy_test_file_dir ):
+ galaxy_test_file_dir = os.path.join( os.getcwd(), galaxy_test_file_dir )
+ library_import_dir = galaxy_test_file_dir
+ user_library_import_dir = os.path.join( galaxy_test_file_dir, 'users' )
+ ignore_files = ()
+
+ start_server = 'GALAXY_TEST_EXTERNAL' not in os.environ
+ if os.path.exists( 'tool_data_table_conf.test.xml' ):
+ tool_data_table_config_path = 'tool_data_table_conf.test.xml'
+ else:
+ tool_data_table_config_path = 'tool_data_table_conf.xml'
tool_dependency_dir = os.environ.get( 'GALAXY_TOOL_DEPENDENCY_DIR', None )
use_distributed_object_store = os.environ.get( 'GALAXY_USE_DISTRIBUTED_OBJECT_STORE', False )
- if os.path.exists( 'tool_data_table_conf.test.xml' ):
- tool_data_table_config_path = 'tool_data_table_conf.test.xml'
+
if start_server:
psu_production = False
galaxy_test_proxy_port = None
@@ -126,38 +192,21 @@
try:
os.makedirs( dir )
except OSError:
- pass
-
+ pass
print "Database connection:", database_connection
-
- # What requires these?
- # handy for (eg) functional tests to save outputs?
- if galaxy_test_save:
- os.environ['GALAXY_TEST_SAVE'] = galaxy_test_save
- # pass in through script setenv
- # will leave a copy of ALL test validate files
- os.environ['GALAXY_TEST_HOST'] = galaxy_test_host
- os.environ['GALAXY_TEST_FILE_DIR'] = galaxy_test_file_dir
- # ---- Build Application --------------------------------------------------
-
- app = None
-
+ # ---- Build Application --------------------------------------------------
+ app = None
if start_server:
-
global_conf = { '__file__' : 'universe_wsgi.ini.sample' }
if psu_production:
global_conf = None
-
if not database_connection.startswith( 'sqlite://' ):
- kwargs['database_engine_option_max_overflow'] = '20'
-
+ kwargs[ 'database_engine_option_max_overflow' ] = '20'
if tool_dependency_dir is not None:
- kwargs['tool_dependency_dir'] = tool_dependency_dir
-
+ kwargs[ 'tool_dependency_dir' ] = tool_dependency_dir
if use_distributed_object_store:
- kwargs['object_store'] = 'distributed'
- kwargs['distributed_object_store_config_file'] = 'distributed_object_store_conf.xml.sample'
-
+ kwargs[ 'object_store' ] = 'distributed'
+ kwargs[ 'distributed_object_store_config_file' ] = 'distributed_object_store_conf.xml.sample'
# Build the Universe Application
app = UniverseApplication( job_queue_workers = 5,
id_secret = 'changethisinproductiontoo',
@@ -179,21 +228,16 @@
allow_user_deletion = True,
admin_users = 'test(a)bx.psu.edu',
allow_library_path_paste = True,
- library_import_dir = galaxy_test_file_dir,
- user_library_import_dir = os.path.join( galaxy_test_file_dir, 'users' ),
+ library_import_dir = library_import_dir,
+ user_library_import_dir = user_library_import_dir,
global_conf = global_conf,
**kwargs )
-
- log.info( "Embedded Universe application started" );
-
+ log.info( "Embedded Universe application started" )
# ---- Run webserver ------------------------------------------------------
-
server = None
if start_server:
-
- webapp = buildapp.app_factory( dict(), use_translogger = False, static_enabled = False, app=app )
-
+ webapp = buildapp.app_factory( dict(), use_translogger=False, static_enabled=False, app=app )
if galaxy_test_port is not None:
server = httpserver.serve( webapp, host=galaxy_test_host, port=galaxy_test_port, start_loop=False )
else:
@@ -217,7 +261,6 @@
t = threading.Thread( target=server.serve_forever )
t.start()
-
# Test if the server is up
for i in range( 10 ):
conn = httplib.HTTPConnection( galaxy_test_host, galaxy_test_port ) # directly test the app, not the proxy
@@ -227,85 +270,105 @@
time.sleep( 0.1 )
else:
raise Exception( "Test HTTP server did not return '200 OK' after 10 tries" )
-
# Test if the proxy server is up
if psu_production:
conn = httplib.HTTPConnection( galaxy_test_host, galaxy_test_proxy_port ) # directly test the app, not the proxy
conn.request( "GET", "/" )
if not conn.getresponse().status == 200:
raise Exception( "Test HTTP proxy server did not return '200 OK'" )
-
log.info( "Embedded web server started" )
-
-
# ---- Load toolbox for generated tests -----------------------------------
-
# We don't add the tests to the path until everything is up and running
new_path = [ os.path.join( cwd, "test" ) ]
new_path.extend( sys.path[1:] )
sys.path = new_path
-
import functional.test_toolbox
-
- if app:
- # TODO: provisions for loading toolbox from file when using external server
- functional.test_toolbox.toolbox = app.toolbox
- functional.test_toolbox.build_tests()
- else:
- # FIXME: This doesn't work at all now that toolbox requires an 'app' instance
- # (to get at datatypes, might just pass a datatype registry directly)
- datatypes_registry = galaxy.datatypes.registry.Registry()
- datatypes_registry.load_datatypes()
- my_app = bunch.Bunch( datatypes_registry )
- test_toolbox.toolbox = tools.ToolBox( 'tool_conf.xml.test', 'tools', my_app )
-
# ---- Find tests ---------------------------------------------------------
-
if galaxy_test_proxy_port:
log.info( "Functional tests will be run against %s:%s" % ( galaxy_test_host, galaxy_test_proxy_port ) )
else:
log.info( "Functional tests will be run against %s:%s" % ( galaxy_test_host, galaxy_test_port ) )
-
success = False
-
try:
-
- import nose.core
- import nose.config
- import nose.loader
- import nose.plugins.manager
-
- test_config = nose.config.Config( env = os.environ, plugins=nose.plugins.manager.DefaultPluginManager() )
- test_config.configure( sys.argv )
-
- loader = nose.loader.TestLoader( config = test_config )
-
- plug_loader = test_config.plugins.prepareTestLoader( loader )
- if plug_loader is not None:
- loader = plug_loader
-
- tests = loader.loadTestsFromNames( test_config.testNames )
-
- test_runner = nose.core.TextTestRunner(
- stream = test_config.stream,
- verbosity = test_config.verbosity,
- config = test_config)
-
- plug_runner = test_config.plugins.prepareTestRunner( test_runner )
- if plug_runner is not None:
- test_runner = plug_runner
-
- result = test_runner.run( tests )
-
- success = result.wasSuccessful()
-
+ # What requires these? Handy for (eg) functional tests to save outputs?
+ if galaxy_test_save:
+ os.environ[ 'GALAXY_TEST_SAVE' ] = galaxy_test_save
+ # Pass in through script setenv, will leave a copy of ALL test validate files
+ os.environ[ 'GALAXY_TEST_HOST' ] = galaxy_test_host
+ if testing_migrated_tools:
+ last_galaxy_test_file_dir = None
+ last_tested_repository_name = None
+ last_tested_changeset_revision = None
+ tree = util.parse_xml( migrated_tool_panel_config )
+ root = tree.getroot()
+ migrated_tool_path = root.get( 'tool_path' )
+ counter = 0
+ for elem in root:
+ if elem.tag == 'tool':
+ galaxy_test_file_dir, \
+ last_tested_repository_name, \
+ last_tested_changeset_revision = get_installed_repository_info( elem,
+ last_galaxy_test_file_dir,
+ last_tested_repository_name,
+ last_tested_changeset_revision )
+ if galaxy_test_file_dir:
+ if galaxy_test_file_dir != last_galaxy_test_file_dir:
+ if not os.path.isabs( galaxy_test_file_dir ):
+ galaxy_test_file_dir = os.path.join( os.getcwd(), galaxy_test_file_dir )
+ guid = elem.get( 'guid' )
+ migrated_tools_dict[ guid ] = galaxy_test_file_dir
+ last_galaxy_test_file_dir = galaxy_test_file_dir
+ elif elem.tag == 'section':
+ for section_elem in elem:
+ if section_elem.tag == 'tool':
+ galaxy_test_file_dir, \
+ last_tested_repository_name, \
+ last_tested_changeset_revision = get_installed_repository_info( section_elem,
+ last_galaxy_test_file_dir,
+ last_tested_repository_name,
+ last_tested_changeset_revision )
+ if galaxy_test_file_dir:
+ if galaxy_test_file_dir != last_galaxy_test_file_dir:
+ if not os.path.isabs( galaxy_test_file_dir ):
+ galaxy_test_file_dir = os.path.join( os.getcwd(), galaxy_test_file_dir )
+ guid = section_elem.get( 'guid' )
+ migrated_tools_dict[ guid ] = galaxy_test_file_dir
+ last_galaxy_test_file_dir = galaxy_test_file_dir
+ # Persist the migrated_tools_dict to the galaxy_migrated_tools_file.
+ migrated_tools_file = open( galaxy_migrated_tools_file, 'w' )
+ migrated_tools_file.write( to_json_string( migrated_tools_dict ) )
+ migrated_tools_file.close()
+ if not os.path.isabs( galaxy_migrated_tools_file ):
+ galaxy_migrated_tools_file = os.path.join( os.getcwd(), galaxy_migrated_tools_file )
+ os.environ[ 'GALAXY_MIGRATED_TOOLS_FILE' ] = galaxy_migrated_tools_file
+ functional.test_toolbox.toolbox = app.toolbox
+ functional.test_toolbox.build_tests( testing_migrated_tools=True )
+ test_config = nose.config.Config( env=os.environ, ignoreFiles=ignore_files, plugins=nose.plugins.manager.DefaultPluginManager() )
+ test_config.configure( sys.argv )
+ result = run_tests( test_config )
+ success = result.wasSuccessful()
+ try:
+ os.unlink( tmp_tool_panel_conf )
+ except:
+ log.info( "Unable to remove temporary file: %s" % tmp_tool_panel_conf )
+ try:
+ os.unlink( galaxy_migrated_tools_file )
+ except:
+ log.info( "Unable to remove file: %s" % galaxy_migrated_tools_file )
+ else:
+ functional.test_toolbox.toolbox = app.toolbox
+ functional.test_toolbox.build_tests()
+ if galaxy_test_file_dir:
+ os.environ[ 'GALAXY_TEST_FILE_DIR' ] = galaxy_test_file_dir
+ test_config = nose.config.Config( env=os.environ, ignoreFiles=ignore_files, plugins=nose.plugins.manager.DefaultPluginManager() )
+ test_config.configure( sys.argv )
+ result = run_tests( test_config )
+ success = result.wasSuccessful()
except:
log.exception( "Failure running tests" )
log.info( "Shutting down" )
-
- # ---- Teardown -----------------------------------------------------------
-
+ # ---- Tear down -----------------------------------------------------------
if server:
log.info( "Shutting down embedded web server" )
server.server_close()
@@ -330,7 +393,6 @@
shutil.rmtree( dir )
except:
pass
-
if success:
return 0
else:
diff -r 7ab3012fe281f9219e2323e6c8c9694fc2b2628a -r c510097f7018dbc177513a62c0ca46b4cace0c86 test/base/twilltestcase.py
--- a/test/base/twilltestcase.py
+++ b/test/base/twilltestcase.py
@@ -39,8 +39,9 @@
self.migrated_tools_file = os.environ.get( 'GALAXY_MIGRATED_TOOLS_FILE', None )
if self.migrated_tools_file:
f = open( self.migrated_tools_file, 'r' )
- self.migrated_tools_dict = from_json_string( f.readlines() )
+ text = f.read()
f.close()
+ self.migrated_tools_dict = from_json_string( text )
else:
self.migrated_tools_dict = {}
self.keepOutdir = os.environ.get( 'GALAXY_TEST_SAVE', '' )
@@ -51,8 +52,6 @@
pass
self.home()
- #self.set_history()
-
# Functions associated with files
def files_diff( self, file1, file2, attributes=None ):
"""Checks the contents of 2 files for differences"""
@@ -170,17 +169,25 @@
if line_diff_count > lines_diff:
raise AssertionError, "Failed to find '%s' in history data. (lines_diff=%i):\n" % ( contains, lines_diff )
- def get_filename( self, filename ):
- full = os.path.join( self.file_dir, filename)
- return os.path.abspath(full)
+ def get_filename( self, filename, migrated_tool_id=None ):
+ if migrated_tool_id and self.migrated_tools_dict:
+ file_dir = self.migrated_tools_dict[ migrated_tool_id ]
+ if not file_dir:
+ file_dir = self.file_dir
+ else:
+ file_dir = self.file_dir
+ return os.path.abspath( os.path.join( file_dir, filename ) )
def save_log( *path ):
"""Saves the log to a file"""
filename = os.path.join( *path )
file(filename, 'wt').write(buffer.getvalue())
- def upload_file( self, filename, ftype='auto', dbkey='unspecified (?)', space_to_tab = False, metadata = None, composite_data = None ):
- """Uploads a file"""
+ def upload_file( self, filename, ftype='auto', dbkey='unspecified (?)', space_to_tab=False, metadata=None, composite_data=None, migrated_tool_id=None ):
+ """
+ Uploads a file. If migrated_tool_id has a value, we're testing tools migrated from the distribution to the tool shed,
+ so the tool-data directory of test data files is contained in the installed tool shed repository.
+ """
self.visit_url( "%s/tool_runner?tool_id=upload1" % self.url )
try:
self.refresh_form( "file_type", ftype ) #Refresh, to support composite files
@@ -190,11 +197,11 @@
tc.fv( "1", "files_metadata|%s" % elem.get( 'name' ), elem.get( 'value' ) )
if composite_data:
for i, composite_file in enumerate( composite_data ):
- filename = self.get_filename( composite_file.get( 'value' ) )
+ filename = self.get_filename( composite_file.get( 'value' ), migrated_tool_id=migrated_tool_id )
tc.formfile( "1", "files_%i|file_data" % i, filename )
tc.fv( "1", "files_%i|space_to_tab" % i, composite_file.get( 'space_to_tab', False ) )
else:
- filename = self.get_filename( filename )
+ filename = self.get_filename( filename, migrated_tool_id=migrated_tool_id )
tc.formfile( "1", "file_data", filename )
tc.fv( "1", "space_to_tab", space_to_tab )
tc.submit("runtool_btn")
@@ -212,6 +219,7 @@
raise AssertionError, "Invalid hid (%s) created when uploading file %s" % ( hid, filename )
# Wait for upload processing to finish (TODO: this should be done in each test case instead)
self.wait()
+
def upload_url_paste( self, url_paste, ftype='auto', dbkey='unspecified (?)' ):
"""Pasted data in the upload utility"""
self.visit_page( "tool_runner/index?tool_id=upload1" )
@@ -620,6 +628,7 @@
check_str = '1 dataset copied to 1 history'
self.check_page_for_string( check_str )
self.home()
+
def get_hids_in_history( self ):
"""Returns the list of hid values for items in a history"""
data_list = self.get_history_as_data_list()
@@ -628,6 +637,7 @@
hid = elem.get('hid')
hids.append(hid)
return hids
+
def get_hids_in_histories( self ):
"""Returns the list of hids values for items in all histories"""
data_list = self.get_histories_as_data_list()
@@ -643,7 +653,7 @@
fd,temp_prefix = tempfile.mkstemp(prefix='tmp',suffix=suffix)
return temp_prefix
- def verify_dataset_correctness( self, filename, hid=None, wait=True, maxseconds=120, attributes=None ):
+ def verify_dataset_correctness( self, filename, hid=None, wait=True, maxseconds=120, attributes=None, migrated_tool_id=None ):
"""Verifies that the attributes and contents of a history item meet expectations"""
if wait:
self.wait( maxseconds=maxseconds ) #wait for job to finish
@@ -682,7 +692,7 @@
errmsg += str( err )
raise AssertionError( errmsg )
if filename is not None:
- local_name = self.get_filename( filename )
+ local_name = self.get_filename( filename, migrated_tool_id=migrated_tool_id )
temp_name = self.makeTfname(fname = filename)
file( temp_name, 'wb' ).write(data)
if self.keepOutdir > '':
@@ -716,7 +726,7 @@
else:
raise Exception, 'Unimplemented Compare type: %s' % compare
if extra_files:
- self.verify_extra_files_content( extra_files, elem.get( 'id' ) )
+ self.verify_extra_files_content( extra_files, elem.get( 'id' ), migrated_tool_id=migrated_tool_id )
except AssertionError, err:
errmsg = 'History item %s different than expected, difference (using %s):\n' % ( hid, compare )
errmsg += str( err )
@@ -735,21 +745,21 @@
os.remove( temp_name )
return temp_local, temp_temp
- def verify_extra_files_content( self, extra_files, hda_id ):
+ def verify_extra_files_content( self, extra_files, hda_id, migrated_tool_id=None ):
files_list = []
for extra_type, extra_value, extra_name, extra_attributes in extra_files:
if extra_type == 'file':
files_list.append( ( extra_name, extra_value, extra_attributes ) )
elif extra_type == 'directory':
- for filename in os.listdir( self.get_filename( extra_value ) ):
+ for filename in os.listdir( self.get_filename( extra_value, migrated_tool_id=migrated_tool_id ) ):
files_list.append( ( filename, os.path.join( extra_value, filename ), extra_attributes ) )
else:
raise ValueError, 'unknown extra_files type: %s' % extra_type
for filename, filepath, attributes in files_list:
- self.verify_composite_datatype_file_content( filepath, hda_id, base_name = filename, attributes = attributes )
+ self.verify_composite_datatype_file_content( filepath, hda_id, base_name=filename, attributes=attributes, migrated_tool_id=migrated_tool_id )
- def verify_composite_datatype_file_content( self, file_name, hda_id, base_name = None, attributes = None ):
- local_name = self.get_filename( file_name )
+ def verify_composite_datatype_file_content( self, file_name, hda_id, base_name=None, attributes=None, migrated_tool_id=None ):
+ local_name = self.get_filename( file_name, migrated_tool_id=migrated_tool_id )
if base_name is None:
base_name = os.path.split(file_name)[-1]
temp_name = self.makeTfname(fname = base_name)
@@ -1005,8 +1015,8 @@
def last_page( self ):
return tc.browser.get_html()
- def load_cookies( self, file ):
- filename = self.get_filename(file)
+ def load_cookies( self, file, migrated_tool_id=None ):
+ filename = self.get_filename( file, migrated_tool_id=migrated_tool_id )
tc.load_cookies(filename)
def reload_page( self ):
diff -r 7ab3012fe281f9219e2323e6c8c9694fc2b2628a -r c510097f7018dbc177513a62c0ca46b4cace0c86 test/functional/test_toolbox.py
--- a/test/functional/test_toolbox.py
+++ b/test/functional/test_toolbox.py
@@ -10,7 +10,7 @@
class ToolTestCase( TwillTestCase ):
"""Abstract test case that runs tests based on a `galaxy.tools.test.ToolTest`"""
- def do_it( self, testdef ):
+ def do_it( self, testdef, migrated_tool_id=None ):
# If the test generation had an error, raise
if testdef.error:
if testdef.exception:
@@ -35,7 +35,12 @@
children = extra.get( 'children', [] )
metadata = extra.get( 'metadata', [] )
composite_data = extra.get( 'composite_data', [] )
- self.upload_file( fname, ftype=extra.get( 'ftype', 'auto' ), dbkey=extra.get( 'dbkey', 'hg17' ), metadata = metadata, composite_data = composite_data )
+ self.upload_file( fname,
+ ftype=extra.get( 'ftype', 'auto' ),
+ dbkey=extra.get( 'dbkey', 'hg17' ),
+ metadata=metadata,
+ composite_data=composite_data,
+ migrated_tool_id=migrated_tool_id )
print "Uploaded file: ", fname, ", ftype: ", extra.get( 'ftype', 'auto' ), ", extra: ", extra
#Post upload attribute editing
edit_attributes = extra.get( 'edit_attributes', [] )
@@ -94,7 +99,7 @@
elem_hid = elem.get( 'hid' )
elem_index += 1
try:
- self.verify_dataset_correctness( outfile, hid=elem_hid, maxseconds=testdef.maxseconds, attributes=attributes )
+ self.verify_dataset_correctness( outfile, hid=elem_hid, maxseconds=testdef.maxseconds, attributes=attributes, migrated_tool_id=migrated_tool_id )
except Exception, e:
print >>sys.stderr, self.get_job_stdout( elem.get( 'id' ), format=True )
print >>sys.stderr, self.get_job_stderr( elem.get( 'id' ), format=True )
@@ -138,7 +143,7 @@
expanded_inputs[value.name] = declared_inputs[value.name]
return expanded_inputs
-def build_tests():
+def build_tests( testing_migrated_tools=False ):
"""
If the module level variable `toolbox` is set, generate `ToolTestCase`
classes for all of its tests and put them into this modules globals() so
@@ -148,21 +153,30 @@
return
# Push all the toolbox tests to module level
G = globals()
+ # Eliminate all previous tests from G.
+ for key, val in G.items():
+ if key.startswith( 'TestForTool_' ):
+ del G[ key ]
for i, tool_id in enumerate( toolbox.tools_by_id ):
tool = toolbox.get_tool( tool_id )
if tool.tests:
- # Create a new subclass of ToolTestCase dynamically adding methods
- # names test_tool_XXX that run each test defined in the tool.
- n = "TestForTool_" + tool.id.replace( ' ', '_' )
- s = ( ToolTestCase, )
- d = dict()
+ # Create a new subclass of ToolTestCase, dynamically adding methods
+ # named test_tool_XXX that run each test defined in the tool config.
+ name = "TestForTool_" + tool.id.replace( ' ', '_' )
+ baseclasses = ( ToolTestCase, )
+ namespace = dict()
for j, testdef in enumerate( tool.tests ):
- def make_test_method( td ):
+ def make_test_method( td, migrated_tool_id=None ):
def test_tool( self ):
- self.do_it( td )
+ self.do_it( td, migrated_tool_id=migrated_tool_id )
return test_tool
- m = make_test_method( testdef )
- m.__doc__ = "%s ( %s ) > %s" % ( tool.name, tool.id, testdef.name )
- d['test_tool_%06d' % j] = m
- G[ n ] = new.classobj( n, s, d )
-
+ if testing_migrated_tools:
+ test_method = make_test_method( testdef, migrated_tool_id=tool.id )
+ else:
+ test_method = make_test_method( testdef )
+ test_method.__doc__ = "%s ( %s ) > %s" % ( tool.name, tool.id, testdef.name )
+ namespace[ 'test_tool_%06d' % j ] = test_method
+ # The new.classobj function returns a new class object, with name name, derived
+ # from baseclasses (which should be a tuple of classes) and with namespace dict.
+ new_class_obj = new.classobj( name, baseclasses, namespace )
+ G[ name ] = new_class_obj
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/7ab3012fe281/
changeset: 7ab3012fe281
user: dan
date: 2012-03-29 16:24:28
summary: Add GenomeSpace tools.
affected #: 13 files
diff -r a2cd73dd4f7284b1d0ffa56235e478d013cc116f -r 7ab3012fe281f9219e2323e6c8c9694fc2b2628a tool_conf.xml.sample
--- a/tool_conf.xml.sample
+++ b/tool_conf.xml.sample
@@ -27,14 +27,14 @@
<tool file="data_source/epigraph_import.xml" /><tool file="data_source/epigraph_import_test.xml" /><tool file="data_source/hbvar.xml" />
- <tool file="data_source/genomespace_file_browser_prod.xml" />
- <!-- <tool file="data_source/genomespace_file_browser_test.xml" />
- <tool file="data_source/genomespace_file_browser_dev.xml" /> -->
+ <tool file="genomespace/genomespace_file_browser_prod.xml" />
+ <tool file="genomespace/genomespace_importer.xml" /><tool file="validation/fix_errors.xml" /></section><section name="Send Data" id="send"><tool file="data_destination/epigraph.xml" /><tool file="data_destination/epigraph_test.xml" />
+ <tool file="genomespace/genomespace_exporter.xml" /></section><section name="ENCODE Tools" id="EncodeTools"><tool file="encode/gencode_partition.xml" />
diff -r a2cd73dd4f7284b1d0ffa56235e478d013cc116f -r 7ab3012fe281f9219e2323e6c8c9694fc2b2628a tools/data_source/genomespace_file_browser.py
--- a/tools/data_source/genomespace_file_browser.py
+++ /dev/null
@@ -1,148 +0,0 @@
-#Dan Blankenberg
-
-import optparse, os, urllib2, cookielib
-
-from galaxy import eggs
-import pkg_resources
-
-pkg_resources.require( "simplejson" )
-import simplejson
-
-GENOMESPACE_API_VERSION_STRING = "v1.0"
-GENOMESPACE_SERVER_URL_PROPERTIES = "http://www.genomespace.org/sites/genomespacefiles/config/serverurl.properti…"
-
-CHUNK_SIZE = 2**20 #1mb
-
-DEFAULT_GALAXY_EXT = "data"
-
-#genomespace format identifier is the URL
-GENOMESPACE_FORMAT_IDENTIFIER_TO_GENOMESPACE_EXT = {} #TODO: fix this so it is not a global variable
-#TODO: we should use a better way to set up this mapping
-GENOMESPACE_EXT_TO_GALAXY_EXT = {'rifles': 'rifles',
- 'lifes': 'lifes',
- 'cn': 'cn',
- 'GTF': 'gtf',
- 'res': 'res',
- 'xcn': 'xcn',
- 'lowercasetxt': 'lowercasetxt',
- 'bed': 'bed',
- 'CBS': 'cbs',
- 'genomicatab': 'genomicatab',
- 'gxp': 'gxp',
- 'reversedtxt': 'reversedtxt',
- 'nowhitespace': 'nowhitespace',
- 'unknown': 'unknown',
- 'txt': 'txt',
- 'uppercasetxt': 'uppercasetxt',
- 'GISTIC': 'gistic',
- 'GFF': 'gff',
- 'gmt': 'gmt',
- 'gct': 'gct'}
-
-
-def chunk_write( source_stream, target_stream, source_method = "read", target_method="write" ):
- source_method = getattr( source_stream, source_method )
- target_method = getattr( target_stream, target_method )
- while True:
- chunk = source_method( CHUNK_SIZE )
- if chunk:
- target_method( chunk )
- else:
- break
-
-def get_cookie_opener( gs_username, gs_token ):
- """ Create a GenomeSpace cookie opener """
- cj = cookielib.CookieJar()
- for cookie_name, cookie_value in [ ( 'gs-token', gs_token ), ( 'gs-username', gs_username ) ]:
- #create a super-cookie, valid for all domains
- cookie = cookielib.Cookie(version=0, name=cookie_name, value=cookie_value, port=None, port_specified=False, domain='', domain_specified=False, domain_initial_dot=False, path='/', path_specified=True, secure=False, expires=None, discard=True, comment=None, comment_url=None, rest={'HttpOnly': None}, rfc2109=False )
- cj.set_cookie( cookie )
- cookie_opener = urllib2.build_opener( urllib2.HTTPCookieProcessor( cj ) )
- return cookie_opener
-
-def get_galaxy_ext_from_genomespace_format_url( url_opener, file_format_url ):
- ext = GENOMESPACE_FORMAT_IDENTIFIER_TO_GENOMESPACE_EXT.get( file_format_url, None )
- if ext is not None:
- ext = GENOMESPACE_EXT_TO_GALAXY_EXT.get( ext, None )
- if ext is None:
- #could check content type, etc here
- ext = DEFAULT_GALAXY_EXT
- return ext
-
-def get_genomespace_site_urls():
- genomespace_sites = {}
- for line in urllib2.urlopen( GENOMESPACE_SERVER_URL_PROPERTIES ).read().split( '\n' ):
- line = line.rstrip()
- if not line or line.startswith( "#" ):
- continue
- server, line = line.split( '.', 1 )
- if server not in genomespace_sites:
- genomespace_sites[server] = {}
- line = line.split( "=", 1 )
- genomespace_sites[server][line[0]] = line[1]
- return genomespace_sites
-
-def set_genomespace_format_identifiers( url_opener, dm_site ):
- gs_request = urllib2.Request( "%s/%s/dataformat/list" % ( dm_site, GENOMESPACE_API_VERSION_STRING ) )
- gs_request.get_method = lambda: 'GET'
- opened_gs_request = url_opener.open( gs_request )
- genomespace_formats = simplejson.loads( opened_gs_request.read() )
- for format in genomespace_formats:
- GENOMESPACE_FORMAT_IDENTIFIER_TO_GENOMESPACE_EXT[ format['url'] ] = format['name']
-
-def download_from_genomespace_file_browser( json_parameter_file, genomespace_site ):
- json_params = simplejson.loads( open( json_parameter_file, 'r' ).read() )
- datasource_params = json_params.get( 'param_dict' )
- username = datasource_params.get( "gs-username", None )
- token = datasource_params.get( "gs-token", None )
- assert None not in [ username, token ], "Missing GenomeSpace username or token."
- output_filename = datasource_params.get( "output", None )
- dataset_id = json_params['output_data'][0]['dataset_id']
- hda_id = json_params['output_data'][0]['hda_id']
- url_opener = get_cookie_opener( username, token )
- #load and set genomespace format ids to galaxy exts
- genomespace_site_dict = get_genomespace_site_urls()[ genomespace_site ]
- set_genomespace_format_identifiers( url_opener, genomespace_site_dict['dmServer'] )
-
- file_url_prefix = "fileUrl"
- file_type_prefix = "fileFormat"
- metadata_parameter_file = open( json_params['job_config']['TOOL_PROVIDED_JOB_METADATA_FILE'], 'wb' )
- file_numbers = []
- for name in datasource_params.keys():
- if name.startswith( file_url_prefix ):
- name = name[len( file_url_prefix ):]
- file_numbers.append( int( name ) )
- file_numbers.sort()
- for file_num in file_numbers:
- url_key = "%s%i" % ( file_url_prefix, file_num )
- download_url = datasource_params.get( url_key, None )
- if download_url is None:
- break
- filetype_key = "%s%i" % ( file_type_prefix, file_num )
- filetype_url = datasource_params.get( filetype_key, None )
- galaxy_ext = get_galaxy_ext_from_genomespace_format_url( url_opener, filetype_url )
- if output_filename is None:
- output_filename = os.path.join( datasource_params['__new_file_path__'], 'primary_%i_output%i_visible_%s' % ( hda_id, file_num, galaxy_ext ) )
- else:
- if dataset_id is not None:
- metadata_parameter_file.write( "%s\n" % simplejson.dumps( dict( type = 'dataset',
- dataset_id = dataset_id,
- ext = galaxy_ext ) ) )
- output_file = open( output_filename, 'wb' )
- new_file_request = urllib2.Request( download_url )
- new_file_request.get_method = lambda: 'GET'
- target_download_url = url_opener.open( new_file_request )
- chunk_write( target_download_url, output_file )
- output_file.close()
- output_filename = None #only have one filename available
- metadata_parameter_file.close()
- return True
-
-if __name__ == '__main__':
- #Parse Command Line
- parser = optparse.OptionParser()
- parser.add_option( '-p', '--json_parameter_file', dest='json_parameter_file', action='store', type="string", default=None, help='json_parameter_file' )
- parser.add_option( '-s', '--genomespace_site', dest='genomespace_site', action='store', type="string", default=None, help='genomespace_site' )
- (options, args) = parser.parse_args()
-
- download_from_genomespace_file_browser( options.json_parameter_file, options.genomespace_site )
diff -r a2cd73dd4f7284b1d0ffa56235e478d013cc116f -r 7ab3012fe281f9219e2323e6c8c9694fc2b2628a tools/data_source/genomespace_file_browser_dev.xml
--- a/tools/data_source/genomespace_file_browser_dev.xml
+++ /dev/null
@@ -1,15 +0,0 @@
-<?xml version="1.0"?>
-<tool name="GenomeSpace import" id="genomespace_file_browser_dev" tool_type="data_source" add_galaxy_url="False" force_history_refresh="True" version="0.0.1">
- <description>from file browser (development)</description>
- <command interpreter="python">genomespace_file_browser.py --json_parameter_file "${output}" --genomespace_site "dev"</command>
- <inputs action="https://dmdev.genomespace.org:8444/datamanager/defaultdirectory" check_values="False" method="post">
- <display>go to GenomeSpace Data Manager </display>
- <param name="appCallbackUrl" type="baseurl" value="/tool_runner?tool_id=genomespace_file_browser_dev&runtool_btn=Execute" />
- <param name="appName" type="hidden" value="Galaxy" />
- </inputs>
- <uihints minwidth="800"/>
- <outputs>
- <data name="output" format="auto" />
- </outputs>
- <options sanitize="False" refresh="True"/>
-</tool>
diff -r a2cd73dd4f7284b1d0ffa56235e478d013cc116f -r 7ab3012fe281f9219e2323e6c8c9694fc2b2628a tools/data_source/genomespace_file_browser_prod.xml
--- a/tools/data_source/genomespace_file_browser_prod.xml
+++ /dev/null
@@ -1,15 +0,0 @@
-<?xml version="1.0"?>
-<tool name="GenomeSpace import" id="genomespace_file_browser_prod" tool_type="data_source" add_galaxy_url="False" force_history_refresh="True" version="0.0.1">
- <description>from file browser</description>
- <command interpreter="python">genomespace_file_browser.py --json_parameter_file "${output}" --genomespace_site "prod"</command>
- <inputs action="https://dm.genomespace.org/datamanager/defaultdirectory" check_values="False" method="post">
- <display>go to GenomeSpace Data Manager </display>
- <param name="appCallbackUrl" type="baseurl" value="/tool_runner?tool_id=genomespace_file_browser_prod&runtool_btn=Execute" />
- <param name="appName" type="hidden" value="Galaxy" />
- </inputs>
- <uihints minwidth="800"/>
- <outputs>
- <data name="output" format="auto" />
- </outputs>
- <options sanitize="False" refresh="True"/>
-</tool>
diff -r a2cd73dd4f7284b1d0ffa56235e478d013cc116f -r 7ab3012fe281f9219e2323e6c8c9694fc2b2628a tools/data_source/genomespace_file_browser_test.xml
--- a/tools/data_source/genomespace_file_browser_test.xml
+++ /dev/null
@@ -1,15 +0,0 @@
-<?xml version="1.0"?>
-<tool name="GenomeSpace import" id="genomespace_file_browser_test" tool_type="data_source" add_galaxy_url="False" force_history_refresh="True" version="0.0.1">
- <description>from file browser (test)</description>
- <command interpreter="python">genomespace_file_browser.py --json_parameter_file "${output}" --genomespace_site "test"</command>
- <inputs action="https://dmtest.genomespace.org:8444/datamanager/defaultdirectory" check_values="False" method="post">
- <display>go to GenomeSpace Data Manager </display>
- <param name="appCallbackUrl" type="baseurl" value="/tool_runner?tool_id=genomespace_file_browser_test&runtool_btn=Execute" />
- <param name="appName" type="hidden" value="Galaxy" />
- </inputs>
- <uihints minwidth="800"/>
- <outputs>
- <data name="output" format="auto" />
- </outputs>
- <options sanitize="False" refresh="True"/>
-</tool>
diff -r a2cd73dd4f7284b1d0ffa56235e478d013cc116f -r 7ab3012fe281f9219e2323e6c8c9694fc2b2628a tools/genomespace/genomespace_exporter.py
--- /dev/null
+++ b/tools/genomespace/genomespace_exporter.py
@@ -0,0 +1,208 @@
+#Dan Blankenberg
+
+import optparse, os, urllib2, urllib, cookielib, hashlib, base64, cgi, binascii
+
+from galaxy import eggs
+import pkg_resources
+
+pkg_resources.require( "simplejson" )
+import simplejson
+
+GENOMESPACE_API_VERSION_STRING = "v1.0"
+GENOMESPACE_SERVER_URL_PROPERTIES = "http://www.genomespace.org/sites/genomespacefiles/config/serverurl.properti…"
+
+CHUNK_SIZE = 2**20 #1mb
+
+
+def chunk_write( source_stream, target_stream, source_method = "read", target_method="write" ):
+ source_method = getattr( source_stream, source_method )
+ target_method = getattr( target_stream, target_method )
+ while True:
+ chunk = source_method( CHUNK_SIZE )
+ if chunk:
+ target_method( chunk )
+ else:
+ break
+
+def get_cookie_opener( gs_username, gs_token ):
+ """ Create a GenomeSpace cookie opener """
+ cj = cookielib.CookieJar()
+ for cookie_name, cookie_value in [ ( 'gs-token', gs_token ), ( 'gs-username', gs_username ) ]:
+ #create a super-cookie, valid for all domains
+ cookie = cookielib.Cookie(version=0, name=cookie_name, value=cookie_value, port=None, port_specified=False, domain='', domain_specified=False, domain_initial_dot=False, path='/', path_specified=True, secure=False, expires=None, discard=True, comment=None, comment_url=None, rest={'HttpOnly': None}, rfc2109=False )
+ cj.set_cookie( cookie )
+ cookie_opener = urllib2.build_opener( urllib2.HTTPCookieProcessor( cj ) )
+ return cookie_opener
+
+def get_genomespace_site_urls():
+ genomespace_sites = {}
+ for line in urllib2.urlopen( GENOMESPACE_SERVER_URL_PROPERTIES ).read().split( '\n' ):
+ line = line.rstrip()
+ if not line or line.startswith( "#" ):
+ continue
+ server, line = line.split( '.', 1 )
+ if server not in genomespace_sites:
+ genomespace_sites[server] = {}
+ line = line.split( "=", 1 )
+ genomespace_sites[server][line[0]] = line[1]
+ return genomespace_sites
+
+def get_directory( url_opener, dm_url, path ):
+ url = dm_url
+ for sub_path in path:
+ url = "%s/%s" % ( url, sub_path )
+ dir_request = urllib2.Request( url, headers = { 'Content-Type': 'application/json', 'Accept': 'application/json' } )
+ dir_request.get_method = lambda: 'GET'
+ try:
+ dir_dict = simplejson.loads( url_opener.open( dir_request ).read() )
+ except urllib2.HTTPError, e:
+ #print "e", e, url #punting, assuming lack of permisions at this low of a level...
+ continue
+ break
+ return dir_dict
+
+def get_default_directory( url_opener, dm_url ):
+ return get_directory( url_opener, dm_url, ["defaultdirectory"] )
+
+def create_directory( url_opener, directory_dict, new_dir, dm_url ):
+ payload = { "isDirectory": True }
+ for dir_slice in new_dir:
+ if dir_slice in ( '', '/', None ):
+ continue
+ url = '/'.join( ( directory_dict['url'], urllib.quote( dir_slice.replace( '/', '_' ), safe='' ) ) )
+ new_dir_request = urllib2.Request( url, headers = { 'Content-Type': 'application/json', 'Accept': 'application/json' }, data = simplejson.dumps( payload ) )
+ new_dir_request.get_method = lambda: 'PUT'
+ directory_dict = simplejson.loads( url_opener.open( new_dir_request ).read() )
+ return directory_dict
+
+def get_genome_space_launch_apps( atm_url, url_opener, file_url, file_type ):
+ gs_request = urllib2.Request( "%s/%s/webtool/descriptor" % ( atm_url, GENOMESPACE_API_VERSION_STRING ) )
+ gs_request.get_method = lambda: 'GET'
+ opened_gs_request = url_opener.open( gs_request )
+ webtool_descriptors = simplejson.loads( opened_gs_request.read() )
+ webtools = []
+ for webtool in webtool_descriptors:
+ webtool_name = webtool.get( 'name' )
+ base_url = webtool.get( 'baseUrl' )
+ use_tool = False
+ for param in webtool.get( 'fileParameters', [] ):
+ for format in param.get( 'formats', [] ):
+ if format.get( 'name' ) == file_type:
+ use_tool = True
+ break
+ if use_tool:
+ file_param_name = param.get( 'name' )
+ #file_name_delimiters = param.get( 'nameDelimiters' )
+ if '?' in base_url:
+ url_delimiter = "&"
+ else:
+ url_delimiter = "?"
+ launch_url = "%s%s%s" % ( base_url, url_delimiter, urllib.urlencode( [ ( file_param_name, file_url ) ] ) )
+ webtools.append( ( launch_url, webtool_name ) )
+ break
+ return webtools
+
+def galaxy_code_get_genomespace_folders( genomespace_site='prod', trans=None, value=None, **kwd ):
+ if value:
+ value = value[0]#single select, only 1 value
+ def recurse_directory_dict( url_opener, cur_options, url ):
+ cur_directory = urllib2.Request( url )#, headers = { 'Content-Type': 'application/json', 'Accept': 'application/text' } ) #apparently http://www.genomespace.org/team/specs/updated-dm-rest-api:"Every HTTP request to the Data Manager should include the Accept header with a preference for the media types application/json and application/text." is not correct
+ cur_directory.get_method = lambda: 'GET'
+ #get url to upload to
+ cur_directory = url_opener.open( cur_directory ).read()
+ cur_directory = simplejson.loads( cur_directory )
+ directory = cur_directory.get( 'directory', {} )
+ contents = cur_directory.get( 'contents', [] )
+ if directory.get( 'isDirectory', False ):
+ selected = directory.get( 'path' ) == value
+ cur_options.append( { 'name':directory.get( 'name' ), 'value': directory.get( 'path'), 'options':[], 'selected': selected } )
+ for sub_dir in contents:
+ if sub_dir.get( 'isDirectory', False ):
+ recurse_directory_dict( url_opener, cur_options[-1]['options'], sub_dir.get( 'url' ) )
+ rval = []
+ if trans and trans.user:
+ username = trans.user.preferences.get( 'genomespace_username', None )
+ token = trans.user.preferences.get( 'genomespace_token', None )
+ if None in ( username, token ):
+ return []
+ url_opener = get_cookie_opener( username, token )
+ genomespace_site_dict = get_genomespace_site_urls()[ genomespace_site ]
+ dm_url = genomespace_site_dict['dmServer']
+ #get default directory
+ directory_dict = get_default_directory( url_opener, dm_url )['directory']
+ #what directory to stuff this in
+ recurse_directory_dict( url_opener, rval, directory_dict.get( 'url' ) )
+
+ return rval
+
+
+def send_file_to_genomespace( genomespace_site, username, token, source_filename, target_directory, target_filename, file_type, content_type, log_filename ):
+ url_opener = get_cookie_opener( username, token )
+ genomespace_site_dict = get_genomespace_site_urls()[ genomespace_site ]
+ dm_url = genomespace_site_dict['dmServer']
+ #get default directory
+ if target_directory and target_directory[0] == '/':
+ directory_dict = get_directory( url_opener, dm_url, [ "%s/%s/%s" % ( GENOMESPACE_API_VERSION_STRING, 'file', target_directory[1] ) ] + target_directory[2:] )['directory']
+ target_directory.pop(0)
+ else:
+ directory_dict = get_default_directory( url_opener, dm_url )['directory']
+ #what directory to stuff this in
+ target_directory_dict = create_directory( url_opener, directory_dict, target_directory, dm_url )
+ #get upload url
+ upload_url = "uploadurl"
+ content_length = os.path.getsize( source_filename )
+ input_file = open( source_filename )
+ content_md5 = hashlib.md5()
+ chunk_write( input_file, content_md5, target_method="update" )
+ input_file.seek( 0 ) #back to start, for uploading
+
+ upload_params = { 'Content-Length': content_length, 'Content-MD5': base64.standard_b64encode( content_md5.digest() ), 'Content-Type': content_type }
+ upload_url = "%s/%s/%s%s/%s?%s" % ( dm_url, GENOMESPACE_API_VERSION_STRING, upload_url, target_directory_dict['path'], urllib.quote( target_filename, safe='' ), urllib.urlencode( upload_params ) )
+ new_file_request = urllib2.Request( upload_url )#, headers = { 'Content-Type': 'application/json', 'Accept': 'application/text' } ) #apparently http://www.genomespace.org/team/specs/updated-dm-rest-api:"Every HTTP request to the Data Manager should include the Accept header with a preference for the media types application/json and application/text." is not correct
+ new_file_request.get_method = lambda: 'GET'
+ #get url to upload to
+ target_upload_url = url_opener.open( new_file_request ).read()
+ #upload file to determined url
+ upload_headers = dict( upload_params )
+ #upload_headers[ 'x-amz-meta-md5-hash' ] = content_md5.hexdigest()
+ upload_headers[ 'Accept' ] = 'application/json'
+ upload_file_request = urllib2.Request( target_upload_url, headers = upload_headers, data = input_file )
+ upload_file_request.get_method = lambda: 'PUT'
+ upload_result = urllib2.urlopen( upload_file_request ).read()
+
+ result_url = "%s/%s" % ( target_directory_dict['url'], urllib.quote( target_filename, safe='' ) )
+ #determine available gs launch apps
+ web_tools = get_genome_space_launch_apps( genomespace_site_dict['atmServer'], url_opener, result_url, file_type )
+ if log_filename:
+ log_file = open( log_filename, 'wb' )
+ log_file.write( "<html><head><title>File uploaded to GenomeSpace from Galaxy</title></head><body>\n" )
+ log_file.write( '<p>Uploaded <a href="%s">%s/%s</a> to GenomeSpace.</p>\n' % ( result_url, target_directory_dict['path'], target_filename ) )
+ if web_tools:
+ log_file.write( "<p>You may open this file directly in the following applications:</p>\n" )
+ log_file.write( '<p><ul>\n' )
+ for web_tool in web_tools:
+ log_file.write( '<li><a href="%s">%s</a></li>\n' % ( web_tool ) )
+ log_file.write( '</p></ul>\n' )
+ else:
+ log_file.write( '<p>There are no GenomeSpace applications available for file type: %s</p>\n' % ( file_type ) )
+ log_file.write( "</body></html>\n" )
+ return upload_result
+
+if __name__ == '__main__':
+ #Parse Command Line
+ parser = optparse.OptionParser()
+ parser.add_option( '-s', '--genomespace_site', dest='genomespace_site', action='store', type="string", default=None, help='genomespace_site' )
+ parser.add_option( '-t', '--token', dest='token', action='store', type="string", default=None, help='token' )
+ parser.add_option( '-u', '--username', dest='username', action='store', type="string", default=None, help='username' )
+ parser.add_option( '-d', '--dataset', dest='dataset', action='store', type="string", default=None, help='dataset' )
+ parser.add_option( '-f', '--filename', dest='filename', action='store', type="string", default=None, help='filename' )
+ parser.add_option( '-y', '--subdirectory', dest='subdirectory', action='append', type="string", default=None, help='subdirectory' )
+ parser.add_option( '', '--file_type', dest='file_type', action='store', type="string", default=None, help='file_type' )
+ parser.add_option( '-c', '--content_type', dest='content_type', action='store', type="string", default=None, help='content_type' )
+ parser.add_option( '-l', '--log', dest='log', action='store', type="string", default=None, help='log' )
+
+ (options, args) = parser.parse_args()
+
+ send_file_to_genomespace( options.genomespace_site, options.username, options.token, options.dataset, map( binascii.unhexlify, options.subdirectory ), options.filename, options.file_type, options.content_type, options.log )
+
+
diff -r a2cd73dd4f7284b1d0ffa56235e478d013cc116f -r 7ab3012fe281f9219e2323e6c8c9694fc2b2628a tools/genomespace/genomespace_exporter.xml
--- /dev/null
+++ b/tools/genomespace/genomespace_exporter.xml
@@ -0,0 +1,51 @@
+<?xml version="1.0"?>
+<tool name="GenomeSpace Exporter" id="genomespace_exporter" require_login="True" version="0.0.1">
+ <description> - send data to GenomeSpace</description>
+ <command interpreter="python">genomespace_exporter.py
+ --genomespace_site "prod"
+ #assert $__user_id__ != 'Anonymous', Exception( 'You must be logged in to use this tool.' )
+ #set $user = $__app__.model.User.get( $__user_id__ )
+ #set $username = $user.preferences.get( 'genomespace_username', None )
+ #set $token = $user.preferences.get( 'genomespace_token', None )
+ #assert None not in ( $username, $token ), Exception( 'You must associate a GenomeSpace OpenID with your account and log in with it.' )
+ #import binascii
+ --username "${username}"
+ --token "${token}"
+ --dataset "${input1}"
+ #if $subdirectory:
+ #for $subd in str( $subdirectory ).split( '/' ):
+ #if not $subd:
+ --subdirectory "${ binascii.hexlify( '/' ) }"
+ #else:
+ --subdirectory "${ binascii.hexlify( $subd ) }"
+ #end if
+ #end for
+ #else:
+ --subdirectory "${ binascii.hexlify( 'galaxy_export' ) }"
+ --subdirectory "${ binascii.hexlify( str( $base_url ).split( '://', 1 )[-1] ) }" ##Protocol removed by request
+ #end if
+ #if $filename:
+ --filename "${filename}"
+ #else:
+ --filename "Galaxy History Item ${__app__.security.encode_id( $input1.id )} - ${input1.hid}: ${input1.name}.${input1.ext}"
+ #end if
+ --file_type "${input1.ext}"
+ --content_type "${input1.get_mime()}"
+ --log "${output_log}"
+ </command>
+ <inputs>
+ <param format="data" name="input1" type="data" label="Send this dataset to GenomeSpace" />
+ <param name="base_url" type="baseurl" />
+ <!-- <param name="subdirectory" type="text" size="80" help="Leave blank to generate automatically" /> -->
+ <param name="subdirectory" type="drill_down" display="radio" hierarchy="exact" multiple="False" label="Choose Target Directory" dynamic_options="galaxy_code_get_genomespace_folders( genomespace_site = 'prod', trans=__trans__, value=__value__, input_dataset=input1 )" help="Leave blank to generate automatically"/>
+ <param name="filename" type="text" size="80" help="Leave blank to generate automatically" />
+ </inputs>
+ <outputs>
+ <data format="html" name="output_log" />
+ </outputs>
+ <help>
+ This Tool allows you to export data to GenomeSpace. You must have logged in using your GenomeSpace OpenID. You can associate your OpenID credentials under the User Preferences panel.
+ </help>
+ <options refresh="True"/>
+ <code file="genomespace_exporter.py" />
+</tool>
\ No newline at end of file
diff -r a2cd73dd4f7284b1d0ffa56235e478d013cc116f -r 7ab3012fe281f9219e2323e6c8c9694fc2b2628a tools/genomespace/genomespace_file_browser.py
--- /dev/null
+++ b/tools/genomespace/genomespace_file_browser.py
@@ -0,0 +1,160 @@
+#Dan Blankenberg
+
+import optparse, os, urllib, urllib2, urlparse, cookielib
+
+from galaxy import eggs
+import pkg_resources
+
+pkg_resources.require( "simplejson" )
+import simplejson
+
+GENOMESPACE_API_VERSION_STRING = "v1.0"
+GENOMESPACE_SERVER_URL_PROPERTIES = "http://www.genomespace.org/sites/genomespacefiles/config/serverurl.properti…"
+
+CHUNK_SIZE = 2**20 #1mb
+
+DEFAULT_GALAXY_EXT = "data"
+
+#genomespace format identifier is the URL
+GENOMESPACE_FORMAT_IDENTIFIER_TO_GENOMESPACE_EXT = {} #TODO: fix this so it is not a global variable
+#TODO: we should use a better way to set up this mapping
+GENOMESPACE_EXT_TO_GALAXY_EXT = {'rifles': 'rifles',
+ 'lifes': 'lifes',
+ 'cn': 'cn',
+ 'GTF': 'gtf',
+ 'res': 'res',
+ 'xcn': 'xcn',
+ 'lowercasetxt': 'lowercasetxt',
+ 'bed': 'bed',
+ 'CBS': 'cbs',
+ 'genomicatab': 'genomicatab',
+ 'gxp': 'gxp',
+ 'reversedtxt': 'reversedtxt',
+ 'nowhitespace': 'nowhitespace',
+ 'unknown': 'unknown',
+ 'txt': 'txt',
+ 'uppercasetxt': 'uppercasetxt',
+ 'GISTIC': 'gistic',
+ 'GFF': 'gff',
+ 'gmt': 'gmt',
+ 'gct': 'gct'}
+
+
+def chunk_write( source_stream, target_stream, source_method = "read", target_method="write" ):
+ source_method = getattr( source_stream, source_method )
+ target_method = getattr( target_stream, target_method )
+ while True:
+ chunk = source_method( CHUNK_SIZE )
+ if chunk:
+ target_method( chunk )
+ else:
+ break
+
+def get_cookie_opener( gs_username, gs_token ):
+ """ Create a GenomeSpace cookie opener """
+ cj = cookielib.CookieJar()
+ for cookie_name, cookie_value in [ ( 'gs-token', gs_token ), ( 'gs-username', gs_username ) ]:
+ #create a super-cookie, valid for all domains
+ cookie = cookielib.Cookie(version=0, name=cookie_name, value=cookie_value, port=None, port_specified=False, domain='', domain_specified=False, domain_initial_dot=False, path='/', path_specified=True, secure=False, expires=None, discard=True, comment=None, comment_url=None, rest={'HttpOnly': None}, rfc2109=False )
+ cj.set_cookie( cookie )
+ cookie_opener = urllib2.build_opener( urllib2.HTTPCookieProcessor( cj ) )
+ return cookie_opener
+
+def get_galaxy_ext_from_genomespace_format_url( url_opener, file_format_url ):
+ ext = GENOMESPACE_FORMAT_IDENTIFIER_TO_GENOMESPACE_EXT.get( file_format_url, None )
+ if ext is not None:
+ ext = GENOMESPACE_EXT_TO_GALAXY_EXT.get( ext, None )
+ if ext is None:
+ #could check content type, etc here
+ ext = DEFAULT_GALAXY_EXT
+ return ext
+
+def get_genomespace_site_urls():
+ genomespace_sites = {}
+ for line in urllib2.urlopen( GENOMESPACE_SERVER_URL_PROPERTIES ).read().split( '\n' ):
+ line = line.rstrip()
+ if not line or line.startswith( "#" ):
+ continue
+ server, line = line.split( '.', 1 )
+ if server not in genomespace_sites:
+ genomespace_sites[server] = {}
+ line = line.split( "=", 1 )
+ genomespace_sites[server][line[0]] = line[1]
+ return genomespace_sites
+
+def set_genomespace_format_identifiers( url_opener, dm_site ):
+ gs_request = urllib2.Request( "%s/%s/dataformat/list" % ( dm_site, GENOMESPACE_API_VERSION_STRING ) )
+ gs_request.get_method = lambda: 'GET'
+ opened_gs_request = url_opener.open( gs_request )
+ genomespace_formats = simplejson.loads( opened_gs_request.read() )
+ for format in genomespace_formats:
+ GENOMESPACE_FORMAT_IDENTIFIER_TO_GENOMESPACE_EXT[ format['url'] ] = format['name']
+
+def download_from_genomespace_file_browser( json_parameter_file, genomespace_site ):
+ json_params = simplejson.loads( open( json_parameter_file, 'r' ).read() )
+ datasource_params = json_params.get( 'param_dict' )
+ username = datasource_params.get( "gs-username", None )
+ token = datasource_params.get( "gs-token", None )
+ assert None not in [ username, token ], "Missing GenomeSpace username or token."
+ output_filename = datasource_params.get( "output", None )
+ dataset_id = json_params['output_data'][0]['dataset_id']
+ hda_id = json_params['output_data'][0]['hda_id']
+ url_opener = get_cookie_opener( username, token )
+ #load and set genomespace format ids to galaxy exts
+ genomespace_site_dict = get_genomespace_site_urls()[ genomespace_site ]
+ set_genomespace_format_identifiers( url_opener, genomespace_site_dict['dmServer'] )
+
+ file_url_prefix = "fileUrl"
+ file_type_prefix = "fileFormat"
+ metadata_parameter_file = open( json_params['job_config']['TOOL_PROVIDED_JOB_METADATA_FILE'], 'wb' )
+ file_numbers = []
+ for name in datasource_params.keys():
+ if name.startswith( file_url_prefix ):
+ name = name[len( file_url_prefix ):]
+ file_numbers.append( int( name ) )
+ file_numbers.sort()
+ for file_num in file_numbers:
+ url_key = "%s%i" % ( file_url_prefix, file_num )
+ download_url = datasource_params.get( url_key, None )
+ if download_url is None:
+ break
+ filetype_key = "%s%i" % ( file_type_prefix, file_num )
+ filetype_url = datasource_params.get( filetype_key, None )
+ galaxy_ext = get_galaxy_ext_from_genomespace_format_url( url_opener, filetype_url )
+ formated_download_url = "%s?%s" % ( download_url, urllib.urlencode( [ ( 'dataformat', filetype_url ) ] ) )
+ new_file_request = urllib2.Request( formated_download_url )
+ new_file_request.get_method = lambda: 'GET'
+ target_download_url = url_opener.open( new_file_request )
+ filename = None
+ if 'Content-Disposition' in target_download_url.info():
+ # If the response has Content-Disposition, try to get filename from it
+ content_disposition = dict( map( lambda x: x.strip().split('=') if '=' in x else ( x.strip(),'' ), target_download_url.info()['Content-Disposition'].split( ';' ) ) )
+ if 'filename' in content_disposition:
+ filename = content_disposition[ 'filename' ].strip( "\"'" )
+ if not filename:
+ parsed_url = urlparse.urlparse( download_url )
+ query_params = urlparse.parse_qs( parsed_url[4] )
+ filename = urllib.unquote_plus( parsed_url[2].split( '/' )[-1] )
+ if output_filename is None:
+ output_filename = os.path.join( datasource_params['__new_file_path__'], 'primary_%i_output%i_visible_%s' % ( hda_id, file_num, galaxy_ext ) )
+ else:
+ if dataset_id is not None:
+ metadata_parameter_file.write( "%s\n" % simplejson.dumps( dict( type = 'dataset',
+ dataset_id = dataset_id,
+ ext = galaxy_ext,
+ name = "GenomeSpace import on %s" % ( filename ) ) ) )
+ output_file = open( output_filename, 'wb' )
+ chunk_write( target_download_url, output_file )
+ output_file.close()
+ output_filename = None #only have one filename available
+ metadata_parameter_file.close()
+ return True
+
+if __name__ == '__main__':
+ #Parse Command Line
+ parser = optparse.OptionParser()
+ parser.add_option( '-p', '--json_parameter_file', dest='json_parameter_file', action='store', type="string", default=None, help='json_parameter_file' )
+ parser.add_option( '-s', '--genomespace_site', dest='genomespace_site', action='store', type="string", default=None, help='genomespace_site' )
+ (options, args) = parser.parse_args()
+
+ download_from_genomespace_file_browser( options.json_parameter_file, options.genomespace_site )
diff -r a2cd73dd4f7284b1d0ffa56235e478d013cc116f -r 7ab3012fe281f9219e2323e6c8c9694fc2b2628a tools/genomespace/genomespace_file_browser_dev.xml
--- /dev/null
+++ b/tools/genomespace/genomespace_file_browser_dev.xml
@@ -0,0 +1,15 @@
+<?xml version="1.0"?>
+<tool name="GenomeSpace import" id="genomespace_file_browser_dev" tool_type="data_source" add_galaxy_url="False" force_history_refresh="True" version="0.0.1">
+ <description>from file browser (development)</description>
+ <command interpreter="python">genomespace_file_browser.py --json_parameter_file "${output}" --genomespace_site "dev"</command>
+ <inputs action="https://dmdev.genomespace.org:8444/datamanager/defaultdirectory" check_values="False" method="post">
+ <display>go to GenomeSpace Data Manager </display>
+ <param name="appCallbackUrl" type="baseurl" value="/tool_runner?tool_id=genomespace_file_browser_dev&runtool_btn=Execute" />
+ <param name="appName" type="hidden" value="Galaxy" />
+ </inputs>
+ <uihints minwidth="800"/>
+ <outputs>
+ <data name="output" format="auto" />
+ </outputs>
+ <options sanitize="False" refresh="True"/>
+</tool>
diff -r a2cd73dd4f7284b1d0ffa56235e478d013cc116f -r 7ab3012fe281f9219e2323e6c8c9694fc2b2628a tools/genomespace/genomespace_file_browser_prod.xml
--- /dev/null
+++ b/tools/genomespace/genomespace_file_browser_prod.xml
@@ -0,0 +1,15 @@
+<?xml version="1.0"?>
+<tool name="GenomeSpace import" id="genomespace_file_browser_prod" tool_type="data_source" add_galaxy_url="False" force_history_refresh="True" version="0.0.1">
+ <description>from file browser</description>
+ <command interpreter="python">genomespace_file_browser.py --json_parameter_file "${output}" --genomespace_site "prod"</command>
+ <inputs action="https://dm.genomespace.org/datamanager/defaultdirectory" check_values="False" method="post">
+ <display>go to GenomeSpace Data Manager </display>
+ <param name="appCallbackUrl" type="baseurl" value="/tool_runner?tool_id=genomespace_file_browser_prod&runtool_btn=Execute" />
+ <param name="appName" type="hidden" value="Galaxy" />
+ </inputs>
+ <uihints minwidth="800"/>
+ <outputs>
+ <data name="output" format="auto" />
+ </outputs>
+ <options sanitize="False" refresh="True"/>
+</tool>
diff -r a2cd73dd4f7284b1d0ffa56235e478d013cc116f -r 7ab3012fe281f9219e2323e6c8c9694fc2b2628a tools/genomespace/genomespace_file_browser_test.xml
--- /dev/null
+++ b/tools/genomespace/genomespace_file_browser_test.xml
@@ -0,0 +1,15 @@
+<?xml version="1.0"?>
+<tool name="GenomeSpace import" id="genomespace_file_browser_test" tool_type="data_source" add_galaxy_url="False" force_history_refresh="True" version="0.0.1">
+ <description>from file browser (test)</description>
+ <command interpreter="python">genomespace_file_browser.py --json_parameter_file "${output}" --genomespace_site "test"</command>
+ <inputs action="https://dmtest.genomespace.org:8444/datamanager/defaultdirectory" check_values="False" method="post">
+ <display>go to GenomeSpace Data Manager </display>
+ <param name="appCallbackUrl" type="baseurl" value="/tool_runner?tool_id=genomespace_file_browser_test&runtool_btn=Execute" />
+ <param name="appName" type="hidden" value="Galaxy" />
+ </inputs>
+ <uihints minwidth="800"/>
+ <outputs>
+ <data name="output" format="auto" />
+ </outputs>
+ <options sanitize="False" refresh="True"/>
+</tool>
diff -r a2cd73dd4f7284b1d0ffa56235e478d013cc116f -r 7ab3012fe281f9219e2323e6c8c9694fc2b2628a tools/genomespace/genomespace_importer.py
--- /dev/null
+++ b/tools/genomespace/genomespace_importer.py
@@ -0,0 +1,156 @@
+#Dan Blankenberg
+
+import optparse, os, urllib2, urllib, cookielib, urlparse
+
+from galaxy import eggs
+import pkg_resources
+
+pkg_resources.require( "simplejson" )
+import simplejson
+
+GENOMESPACE_API_VERSION_STRING = "v1.0"
+GENOMESPACE_SERVER_URL_PROPERTIES = "http://www.genomespace.org/sites/genomespacefiles/config/serverurl.properti…"
+
+CHUNK_SIZE = 2**20 #1mb
+
+DEFAULT_GALAXY_EXT = "data"
+
+#genomespace format identifier is the URL
+GENOMESPACE_FORMAT_IDENTIFIER_TO_GENOMESPACE_EXT = {} #TODO: fix this so it is not a global variable
+#TODO: we should use a better way to set up this mapping
+GENOMESPACE_EXT_TO_GALAXY_EXT = {'rifles': 'rifles',
+ 'lifes': 'lifes',
+ 'cn': 'cn',
+ 'GTF': 'gtf',
+ 'res': 'res',
+ 'xcn': 'xcn',
+ 'lowercasetxt': 'lowercasetxt',
+ 'bed': 'bed',
+ 'CBS': 'cbs',
+ 'genomicatab': 'genomicatab',
+ 'gxp': 'gxp',
+ 'reversedtxt': 'reversedtxt',
+ 'nowhitespace': 'nowhitespace',
+ 'unknown': 'unknown',
+ 'txt': 'txt',
+ 'uppercasetxt': 'uppercasetxt',
+ 'GISTIC': 'gistic',
+ 'GFF': 'gff',
+ 'gmt': 'gmt',
+ 'gct': 'gct'}
+
+VALID_CHARS = '.-()[]0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ'
+
+def chunk_write( source_stream, target_stream, source_method = "read", target_method="write" ):
+ source_method = getattr( source_stream, source_method )
+ target_method = getattr( target_stream, target_method )
+ while True:
+ chunk = source_method( CHUNK_SIZE )
+ if chunk:
+ target_method( chunk )
+ else:
+ break
+
+def get_cookie_opener( gs_username, gs_token ):
+ """ Create a GenomeSpace cookie opener """
+ cj = cookielib.CookieJar()
+ for cookie_name, cookie_value in [ ( 'gs-token', gs_token ), ( 'gs-username', gs_username ) ]:
+ #create a super-cookie, valid for all domains
+ cookie = cookielib.Cookie(version=0, name=cookie_name, value=cookie_value, port=None, port_specified=False, domain='', domain_specified=False, domain_initial_dot=False, path='/', path_specified=True, secure=False, expires=None, discard=True, comment=None, comment_url=None, rest={'HttpOnly': None}, rfc2109=False )
+ cj.set_cookie( cookie )
+ cookie_opener = urllib2.build_opener( urllib2.HTTPCookieProcessor( cj ) )
+ return cookie_opener
+
+def get_galaxy_ext_from_genomespace_format_url( url_opener, file_format_url ):
+ ext = GENOMESPACE_FORMAT_IDENTIFIER_TO_GENOMESPACE_EXT.get( file_format_url, None )
+ if ext is not None:
+ ext = GENOMESPACE_EXT_TO_GALAXY_EXT.get( ext, None )
+ if ext is None:
+ #could check content type, etc here
+ ext = DEFAULT_GALAXY_EXT
+ return ext
+
+def get_genomespace_site_urls():
+ genomespace_sites = {}
+ for line in urllib2.urlopen( GENOMESPACE_SERVER_URL_PROPERTIES ).read().split( '\n' ):
+ line = line.rstrip()
+ if not line or line.startswith( "#" ):
+ continue
+ server, line = line.split( '.', 1 )
+ if server not in genomespace_sites:
+ genomespace_sites[server] = {}
+ line = line.split( "=", 1 )
+ genomespace_sites[server][line[0]] = line[1]
+ return genomespace_sites
+
+def set_genomespace_format_identifiers( url_opener, dm_site ):
+ gs_request = urllib2.Request( "%s/%s/dataformat/list" % ( dm_site, GENOMESPACE_API_VERSION_STRING ) )
+ gs_request.get_method = lambda: 'GET'
+ opened_gs_request = url_opener.open( gs_request )
+ genomespace_formats = simplejson.loads( opened_gs_request.read() )
+ for format in genomespace_formats:
+ GENOMESPACE_FORMAT_IDENTIFIER_TO_GENOMESPACE_EXT[ format['url'] ] = format['name']
+
+def download_from_genomespace_importer( username, token, json_parameter_file, genomespace_site ):
+ json_params = simplejson.loads( open( json_parameter_file, 'r' ).read() )
+ datasource_params = json_params.get( 'param_dict' )
+ #username = datasource_params.get( "gs-username", None )
+ #token = datasource_params.get( "gs-token", None )
+ assert None not in [ username, token ], "Missing GenomeSpace username or token."
+ output_filename = datasource_params.get( "output_file1", None )
+ dataset_id = json_params['output_data'][0]['dataset_id']
+ hda_id = json_params['output_data'][0]['hda_id']
+ url_opener = get_cookie_opener( username, token )
+ #load and set genomespace format ids to galaxy exts
+ genomespace_site_dict = get_genomespace_site_urls()[ genomespace_site ]
+ set_genomespace_format_identifiers( url_opener, genomespace_site_dict['dmServer'] )
+ file_url_name = "URL"
+ metadata_parameter_file = open( json_params['job_config']['TOOL_PROVIDED_JOB_METADATA_FILE'], 'wb' )
+ url_param = datasource_params.get( file_url_name, None )
+ for download_url in url_param.split( ',' ):
+ parsed_url = urlparse.urlparse( download_url )
+ query_params = urlparse.parse_qs( parsed_url[4] )
+ file_type = DEFAULT_GALAXY_EXT
+ if 'dataformat' in query_params:
+ file_type = query_params[ 'dataformat' ][0]
+ file_type = get_galaxy_ext_from_genomespace_format_url( url_opener, file_type )
+ elif '.' in parsed_url[2]:
+ file_type = parsed_url[2].rsplit( '.', 1 )[-1]
+ file_type = GENOMESPACE_EXT_TO_GALAXY_EXT.get( file_type, file_type )
+ new_file_request = urllib2.Request( download_url )
+ new_file_request.get_method = lambda: 'GET'
+ target_download_url = url_opener.open( new_file_request )
+ filename = None
+ if 'Content-Disposition' in target_download_url.info():
+ content_disposition = dict( map( lambda x: x.strip().split('=') if '=' in x else ( x.strip(),'' ), target_download_url.info()['Content-Disposition'].split( ';' ) ) )
+ if 'filename' in content_disposition:
+ filename = content_disposition[ 'filename' ].strip( "\"'" )
+ if not filename:
+ parsed_url = urlparse.urlparse( download_url )
+ query_params = urlparse.parse_qs( parsed_url[4] )
+ filename = urllib.unquote_plus( parsed_url[2].split( '/' )[-1] )
+ if output_filename is None:
+ output_filename = os.path.join( datasource_params['__new_file_path__'], 'primary_%i_output%s_visible_%s' % ( hda_id, ''.join( c in VALID_CHARS and c or '-' for c in filename ), file_type ) )
+ else:
+ if dataset_id is not None:
+ metadata_parameter_file.write( "%s\n" % simplejson.dumps( dict( type = 'dataset',
+ dataset_id = dataset_id,
+ ext = file_type,
+ name = "GenomeSpace importer on %s" % ( filename ) ) ) )
+ output_file = open( output_filename, 'wb' )
+ chunk_write( target_download_url, output_file )
+ output_file.close()
+ output_filename = None #only have one filename available
+ metadata_parameter_file.close()
+ return True
+
+if __name__ == '__main__':
+ #Parse Command Line
+ parser = optparse.OptionParser()
+ parser.add_option( '-p', '--json_parameter_file', dest='json_parameter_file', action='store', type="string", default=None, help='json_parameter_file' )
+ parser.add_option( '-s', '--genomespace_site', dest='genomespace_site', action='store', type="string", default=None, help='genomespace_site' )
+ parser.add_option( '-t', '--token', dest='token', action='store', type="string", default=None, help='token' )
+ parser.add_option( '-u', '--username', dest='username', action='store', type="string", default=None, help='username' )
+ (options, args) = parser.parse_args()
+
+ download_from_genomespace_importer( options.username, options.token, options.json_parameter_file, options.genomespace_site )
diff -r a2cd73dd4f7284b1d0ffa56235e478d013cc116f -r 7ab3012fe281f9219e2323e6c8c9694fc2b2628a tools/genomespace/genomespace_importer.xml
--- /dev/null
+++ b/tools/genomespace/genomespace_importer.xml
@@ -0,0 +1,26 @@
+<?xml version="1.0"?>
+<tool name="GenomeSpace Importer" id="genomespace_importer" tool_type="data_source" force_history_refresh="True" hidden="True" display_interface="False" require_login="True" version="0.0.1">
+ <description> - receive data from GenomeSpace</description>
+ <command interpreter="python">genomespace_importer.py
+ --genomespace_site "prod"
+ #assert $__user_id__ != 'Anonymous', Exception( 'You must be logged in to use this tool.' )
+ #set $user = $__app__.model.User.get( $__user_id__ )
+ #set $username = $user.preferences.get( 'genomespace_username', None )
+ #set $token = $user.preferences.get( 'genomespace_token', None )
+ #assert None not in ( $username, $token ), Exception( 'You must associate a GenomeSpace OpenID with your account and log in with it.' )
+ --username "${username}"
+ --token "${token}"
+ --json_parameter_file "${output_file1}"
+ </command>
+ <inputs check_values="False">
+ <!-- <param name="file_name" type="text" value="" /> -->
+ <param name="URL" type="hidden" value="" />
+ </inputs>
+ <outputs>
+ <data format="auto" name="output_file1" />
+ </outputs>
+ <help>
+ some help text here...
+ </help>
+ <options refresh="True"/>
+</tool>
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: dan: Add require_login and display_interface flags to tools. Add 'everything' target for refresh_frame.
by Bitbucket 29 Mar '12
by Bitbucket 29 Mar '12
29 Mar '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/a2cd73dd4f72/
changeset: a2cd73dd4f72
user: dan
date: 2012-03-29 16:24:15
summary: Add require_login and display_interface flags to tools. Add 'everything' target for refresh_frame.
affected #: 3 files
diff -r 5aab033197616ac6d8ca1f14f672f9f3195a56dc -r a2cd73dd4f7284b1d0ffa56235e478d013cc116f lib/galaxy/tools/__init__.py
--- a/lib/galaxy/tools/__init__.py
+++ b/lib/galaxy/tools/__init__.py
@@ -636,6 +636,8 @@
self.check_values = True
self.nginx_upload = False
self.input_required = False
+ self.display_interface = True
+ self.require_login = False
# Define a place to keep track of all input parameters. These
# differ from the inputs dictionary in that inputs can be page
# elements like conditionals, but input_params are basic form
@@ -732,6 +734,8 @@
# Useful i.e. when an indeterminate number of outputs are created by
# a tool.
self.force_history_refresh = util.string_as_bool( root.get( 'force_history_refresh', 'False' ) )
+ self.display_interface = util.string_as_bool( root.get( 'display_interface', str( self.display_interface ) ) )
+ self.require_login = util.string_as_bool( root.get( 'require_login', str( self.require_login ) ) )
# Load input translator, used by datasource tools to change
# names/values of incoming parameters
self.input_translator = root.find( "request_param_translation" )
@@ -1337,6 +1341,8 @@
# on the standard run form) or "URL" (a parameter provided by
# external data source tools).
if "runtool_btn" not in incoming and "URL" not in incoming:
+ if not self.display_interface:
+ return 'message.mako', dict( status='info', message="The interface for this tool cannot be displayed", refresh_frames=['everything'] )
return "tool_form.mako", dict( errors={}, tool_state=state, param_values={}, incoming={} )
# Process incoming data
if not( self.check_values ):
@@ -1385,6 +1391,8 @@
state.page += 1
# Fill in the default values for the next page
self.fill_in_new_state( trans, self.inputs_by_page[ state.page ], state.inputs )
+ if not self.display_interface:
+ return 'message.mako', dict( status='info', message="The interface for this tool cannot be displayed", refresh_frames=['everything'] )
return 'tool_form.mako', dict( errors=errors, tool_state=state )
else:
try:
@@ -1397,6 +1405,8 @@
except:
pass
# Just a refresh, render the form with updated state and errors.
+ if not self.display_interface:
+ return 'message.mako', dict( status='info', message="The interface for this tool cannot be displayed", refresh_frames=['everything'] )
return 'tool_form.mako', dict( errors=errors, tool_state=state )
def find_fieldstorage( self, x ):
if isinstance( x, FieldStorage ):
diff -r 5aab033197616ac6d8ca1f14f672f9f3195a56dc -r a2cd73dd4f7284b1d0ffa56235e478d013cc116f lib/galaxy/web/controllers/tool_runner.py
--- a/lib/galaxy/web/controllers/tool_runner.py
+++ b/lib/galaxy/web/controllers/tool_runner.py
@@ -58,6 +58,8 @@
log.error( "index called with tool id '%s' but no such tool exists", tool_id )
trans.log_event( "Tool id '%s' does not exist" % tool_id )
return "Tool '%s' does not exist, kwd=%s " % (tool_id, kwd)
+ if tool.require_login and not trans.user:
+ return trans.response.send_redirect( url_for( controller='user', action='login', cntrller='user', message="You must be logged in to use this tool.", status="info", referer=url_for( controller='/tool_runner', action='index', tool_id=tool_id, **kwd ) ) )
params = util.Params( kwd, sanitize = False ) #Sanitize parameters when substituting into command line via input wrappers
#do param translation here, used by datasource tools
if tool.input_translator:
diff -r 5aab033197616ac6d8ca1f14f672f9f3195a56dc -r a2cd73dd4f7284b1d0ffa56235e478d013cc116f templates/message.mako
--- a/templates/message.mako
+++ b/templates/message.mako
@@ -24,6 +24,9 @@
<%def name="javascripts()">
${parent.javascripts()}
<script type="text/javascript">
+ %if 'everything' in refresh_frames:
+ parent.location.href="${h.url_for( controller='root' )}";
+ %endif
%if 'masthead' in refresh_frames:
## if ( parent.frames && parent.frames.galaxy_masthead ) {
## parent.frames.galaxy_masthead.location.href="${h.url_for( controller='root', action='masthead')}";
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: dan: Enable dynamic options for DrillDownSelectToolParameter. Fix for setting checked state for current value for DrillDownSelectToolParameter.
by Bitbucket 29 Mar '12
by Bitbucket 29 Mar '12
29 Mar '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/5aab03319761/
changeset: 5aab03319761
user: dan
date: 2012-03-29 16:23:42
summary: Enable dynamic options for DrillDownSelectToolParameter. Fix for setting checked state for current value for DrillDownSelectToolParameter.
affected #: 2 files
diff -r 7273e1405094e931b4dc39f646cf4cd83a5bbc5d -r 5aab033197616ac6d8ca1f14f672f9f3195a56dc lib/galaxy/tools/parameters/basic.py
--- a/lib/galaxy/tools/parameters/basic.py
+++ b/lib/galaxy/tools/parameters/basic.py
@@ -1133,7 +1133,9 @@
from_file = os.path.join( tool.app.config.tool_data_path, from_file )
elem = XML( "<root>%s</root>" % open( from_file ).read() )
self.is_dynamic = False
- self.dynamic_options = None #backwards compatibility with SelectToolParameter's old dynamic options and late validation
+ self.dynamic_options = elem.get( 'dynamic_options' , None )
+ if self.dynamic_options:
+ self.is_dynamic = True
self.options = []
self.filtered = {}
if elem.find( 'filter' ):
@@ -1148,12 +1150,23 @@
if filter.get( 'value' ) not in self.filtered[filter.get( 'data_ref' )][filter.get( 'meta_key' )]:
self.filtered[filter.get( 'data_ref' )][filter.get( 'meta_key' )][filter.get( 'value' )] = []
recurse_option_elems( self.filtered[filter.get( 'data_ref' )][filter.get( 'meta_key' )][filter.get( 'value' )], filter.find( 'options' ).findall( 'option' ) )
- else:
+ elif not self.dynamic_options:
recurse_option_elems( self.options, elem.find( 'options' ).findall( 'option' ) )
+ def _get_options_from_code( self, trans=None, value=None, other_values=None ):
+ assert self.dynamic_options, Exception( "dynamic_options was not specifed" )
+ call_other_values = { '__trans__': trans, '__value__': value }
+ if other_values:
+ call_other_values.update( other_values.dict )
+ return eval( self.dynamic_options, self.tool.code_namespace, call_other_values )
+
+
def get_options( self, trans=None, value=None, other_values={} ):
if self.is_dynamic:
- options = []
+ if self.dynamic_options:
+ options = self._get_options_from_code( trans=trans, value=value, other_values=other_values )
+ else:
+ options = []
for filter_key, filter_value in self.filtered.iteritems():
dataset = other_values[filter_key]
if dataset.__class__.__name__.endswith( "DatasetFilenameWrapper" ): #this is a bad way to check for this, but problems importing class ( due to circular imports? )
diff -r 7273e1405094e931b4dc39f646cf4cd83a5bbc5d -r 5aab033197616ac6d8ca1f14f672f9f3195a56dc lib/galaxy/web/form_builder.py
--- a/lib/galaxy/web/form_builder.py
+++ b/lib/galaxy/web/form_builder.py
@@ -440,7 +440,7 @@
self.options = options
if value and not isinstance( value, list ):
value = [ value ]
- else:
+ elif not value:
value = []
self.value = value
if display == "checkbox":
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: greg: Fixes for transferring sample datasets and viewing . managing transferred sample datasets.
by Bitbucket 29 Mar '12
by Bitbucket 29 Mar '12
29 Mar '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/7273e1405094/
changeset: 7273e1405094
user: greg
date: 2012-03-29 16:17:51
summary: Fixes for transferring sample datasets and viewing . managing transferred sample datasets.
affected #: 7 files
diff -r 9f5750e19c1ea88fa0c694bb42642589659b0750 -r 7273e1405094e931b4dc39f646cf4cd83a5bbc5d lib/galaxy/web/controllers/requests_admin.py
--- a/lib/galaxy/web/controllers/requests_admin.py
+++ b/lib/galaxy/web/controllers/requests_admin.py
@@ -493,6 +493,7 @@
return trans.response.send_redirect( web.url_for( controller='requests_admin',
action='select_datasets_to_transfer',
request_id=trans.security.encode_id( request.id ),
+ external_service_id=trans.security.encode_id( external_service.id ),
status=status,
message=message ) )
def __create_sample_datasets( self, trans, sample, selected_datasets_to_transfer, external_service ):
diff -r 9f5750e19c1ea88fa0c694bb42642589659b0750 -r 7273e1405094e931b4dc39f646cf4cd83a5bbc5d lib/galaxy/web/controllers/requests_common.py
--- a/lib/galaxy/web/controllers/requests_common.py
+++ b/lib/galaxy/web/controllers/requests_common.py
@@ -1011,6 +1011,8 @@
sample = trans.sa_session.query( trans.model.Sample ).get( trans.security.decode_id( sample_id ) )
except:
return invalid_id_redirect( trans, cntrller, sample_id, 'sample' )
+ external_service_id = params.get( 'external_service_id', None )
+ external_service = trans.sa_session.query( trans.model.ExternalService ).get( trans.security.decode_id( external_service_id ) )
# See if a library and folder have been set for this sample.
if is_admin and not sample.library or not sample.folder:
status = 'error'
@@ -1043,6 +1045,7 @@
return trans.fill_template( '/requests/common/view_sample_datasets.mako',
cntrller=cntrller,
title=title,
+ external_service=external_service,
sample=sample,
sample_datasets=sample_datasets,
transfer_status=transfer_status,
diff -r 9f5750e19c1ea88fa0c694bb42642589659b0750 -r 7273e1405094e931b4dc39f646cf4cd83a5bbc5d templates/requests/common/common.mako
--- a/templates/requests/common/common.mako
+++ b/templates/requests/common/common.mako
@@ -298,9 +298,14 @@
## This link will direct the admin to a page allowing them to manage datasets.
<a id="sampleDatasets-${sample.id}" href="${h.url_for( controller='requests_admin', action='manage_datasets', cntrller=cntrller, sample_id=trans.security.encode_id( sample.id ) )}">${len( sample.datasets )}</a>
%elif sample.datasets:
+ <%
+ # Get an external_service from one of the sample datasets. This assumes all sample datasets are associated with
+ # the same external service - hopefully this is a good assumption.
+ external_service = sample.datasets[0].external_service
+ %>
## Since this is a regular user, only display a link if there is at least 1
## selected dataset for the sample.
- <a id="sampleDatasets-${sample.id}" href="${h.url_for( controller='requests_common', action='view_sample_datasets', cntrller=cntrller, sample_id=trans.security.encode_id( sample.id ) )}">${len( sample.datasets )}</a>
+ <a id="sampleDatasets-${sample.id}" href="${h.url_for( controller='requests_common', action='view_sample_datasets', cntrller=cntrller, external_service_id=trans.security.encode_id( external_service.id ), sample_id=trans.security.encode_id( sample.id ) )}">${len( sample.datasets )}</a>
%else:
## Since this is a regular user, do not display a link if there are no datasets.
<a id="sampleDatasets-${sample.id}">${len( sample.datasets )}</a>
@@ -424,8 +429,13 @@
%endif
%if sample.datasets and len( sample.datasets ) > len( transferred_dataset_files ) and sample.library and sample.folder:
<li><a class="action-button" href="${h.url_for( controller='requests_admin', action='manage_datasets', sample_id=trans.security.encode_id( sample.id ) )}">Manage selected datasets</a></li>
- %elif sample.datasets and len(sample.datasets ) == len( transferred_dataset_files ):
- <li><a class="action-button" href="${h.url_for( controller='requests_common', action='view_sample_datasets', cntrller=cntrller, sample_id=trans.security.encode_id( sample.id ), transfer_status=trans.model.SampleDataset.transfer_status.COMPLETE )}">View transferred datasets</a></li>
+ %elif sample.datasets and len( sample.datasets ) == len( transferred_dataset_files ):
+ <%
+ # Get an external_service from one of the sample datasets. This assumes all sample datasets are associated with
+ # the same external service - hopefully this is a good assumption.
+ external_service = sample.datasets[0].external_service
+ %>
+ <li><a class="action-button" href="${h.url_for( controller='requests_common', action='view_sample_datasets', cntrller=cntrller, external_service_id=trans.security.encode_id( external_service.id ), sample_id=trans.security.encode_id( sample.id ), transfer_status=trans.model.SampleDataset.transfer_status.COMPLETE )}">View transferred datasets</a></li>
%endif
</div>
%else:
@@ -483,6 +493,11 @@
## This link will direct the admin to a page allowing them to manage datasets.
<a id="sampleDatasets-${sample.id}" href="${h.url_for( controller='requests_admin', action='manage_datasets', cntrller=cntrller, sample_id=trans.security.encode_id( sample.id ) )}">${len( sample.datasets )}</a>
%elif sample.library and sample.datasets:
+ <%
+ # Get an external_service from one of the sample datasets. This assumes all sample datasets are associated with
+ # the same external service - hopefully this is a good assumption.
+ external_service = sample.datasets[0].external_service
+ %>
## Since this is a regular user, only display a link if there is at least 1
## selected dataset for the sample.
<a id="sampleDatasets-${sample.id}" href="${h.url_for( controller='requests_common', action='view_sample_datasets', cntrller=cntrller, sample_id=trans.security.encode_id( sample.id ) )}">${len( sample.datasets )}</a>
diff -r 9f5750e19c1ea88fa0c694bb42642589659b0750 -r 7273e1405094e931b4dc39f646cf4cd83a5bbc5d templates/requests/common/find_samples.mako
--- a/templates/requests/common/find_samples.mako
+++ b/templates/requests/common/find_samples.mako
@@ -78,7 +78,12 @@
%else:
State: ${sample.state.name}<br/>
%endif
- Datasets: <a href="${h.url_for( controller='requests_common', action='view_sample_datasets', cntrller=cntrller, sample_id=trans.security.encode_id( sample.id ) )}">${len( sample.datasets )}</a><br/>
+ <%
+ # Get an external_service from one of the sample datasets. This assumes all sample datasets are associated with
+ # the same external service - hopefully this is a good assumption.
+ external_service = sample.datasets[0].external_service
+ %>
+ Datasets: <a href="${h.url_for( controller='requests_common', action='view_sample_datasets', cntrller=cntrller, external_service_id=trans.security.encode_id( external_service.id ), sample_id=trans.security.encode_id( sample.id ) )}">${len( sample.datasets )}</a><br/>
%if is_admin:
<i>User: ${sample.request.user.email}</i>
%endif
diff -r 9f5750e19c1ea88fa0c694bb42642589659b0750 -r 7273e1405094e931b4dc39f646cf4cd83a5bbc5d templates/requests/common/view_sample_datasets.mako
--- a/templates/requests/common/view_sample_datasets.mako
+++ b/templates/requests/common/view_sample_datasets.mako
@@ -25,7 +25,7 @@
<li><a class="action-button" id="sample-${sample.id}-popup" class="menubutton">Dataset Actions</a></li><div popupmenu="sample-${sample.id}-popup">
%if can_select_datasets:
- <li><a class="action-button" href="${h.url_for( controller='requests_admin', action='select_datasets_to_transfer', cntrller=cntrller, request_id=trans.security.encode_id( sample.request.id ), sample_id=trans.security.encode_id( sample.id ) )}">Select more datasets</a></li>
+ <li><a class="action-button" href="${h.url_for( controller='requests_admin', action='select_datasets_to_transfer', cntrller=cntrller, external_service_id=trans.security.encode_id( external_service.id ), request_id=trans.security.encode_id( sample.request.id ), sample_id=trans.security.encode_id( sample.id ) )}">Select more datasets</a></li>
%endif
<li><a class="action-button" href="${h.url_for( controller='library_common', action='browse_library', cntrller=cntrller, id=trans.security.encode_id( sample.library.id ) )}">View target Data Library</a></li><li><a class="action-button" href="${h.url_for( controller='requests_common', action='view_request', cntrller=cntrller, id=trans.security.encode_id( sample.request.id ) )}">Browse this request</a></li>
diff -r 9f5750e19c1ea88fa0c694bb42642589659b0750 -r 7273e1405094e931b4dc39f646cf4cd83a5bbc5d test/base/twilltestcase.py
--- a/test/base/twilltestcase.py
+++ b/test/base/twilltestcase.py
@@ -13,6 +13,7 @@
from elementtree import ElementTree
from galaxy.web import security
from galaxy.web.framework.helpers import iff
+from galaxy.util.json import from_json_string
from base.asserts import verify_assertions
buffer = StringIO.StringIO()
@@ -34,7 +35,14 @@
self.host = os.environ.get( 'GALAXY_TEST_HOST' )
self.port = os.environ.get( 'GALAXY_TEST_PORT' )
self.url = "http://%s:%s" % ( self.host, self.port )
- self.file_dir = os.environ.get( 'GALAXY_TEST_FILE_DIR' )
+ self.file_dir = os.environ.get( 'GALAXY_TEST_FILE_DIR', None )
+ self.migrated_tools_file = os.environ.get( 'GALAXY_MIGRATED_TOOLS_FILE', None )
+ if self.migrated_tools_file:
+ f = open( self.migrated_tools_file, 'r' )
+ self.migrated_tools_dict = from_json_string( f.readlines() )
+ f.close()
+ else:
+ self.migrated_tools_dict = {}
self.keepOutdir = os.environ.get( 'GALAXY_TEST_SAVE', '' )
if self.keepOutdir > '':
try:
@@ -1792,9 +1800,10 @@
tc.submit( "save_samples_button" )
for check_str in strings_displayed_after_submit:
self.check_page_for_string( check_str )
- def add_datasets_to_sample( self, request_id, sample_id, sample_datasets, strings_displayed=[], strings_displayed_after_submit=[] ):
+ def add_datasets_to_sample( self, request_id, sample_id, external_service_id, sample_datasets, strings_displayed=[], strings_displayed_after_submit=[] ):
# visit the dataset selection page
- url = "%s/requests_admin/select_datasets_to_transfer?cntrller=requests_admin&sample_id=%s&request_id=%s" % ( self.url, sample_id, request_id )
+ url = "%s/requests_admin/select_datasets_to_transfer?cntrller=requests_admin&sample_id=%s&request_id=%s&external_service_id=%s" % \
+ ( self.url, sample_id, request_id, external_service_id )
self.visit_url( url )
for check_str in strings_displayed:
self.check_page_for_string( check_str )
diff -r 9f5750e19c1ea88fa0c694bb42642589659b0750 -r 7273e1405094e931b4dc39f646cf4cd83a5bbc5d test/functional/test_sample_tracking.py
--- a/test/functional/test_sample_tracking.py
+++ b/test/functional/test_sample_tracking.py
@@ -800,11 +800,13 @@
sample_dataset_file_names = [ dataset.split( '/' )[-1] for dataset in sample_datasets ]
global request1_sample1
request1_sample1 = request1.get_sample( 'Sample1_renamed' )
+ external_service = request1_sample1.external_service
strings_displayed_after_submit = [ 'Datasets (%s) have been selected for sample (%s)' % \
( str( sample_dataset_file_names )[1:-1].replace( "'", "" ), request1_sample1.name ) ]
strings_displayed = [ 'Select datasets to transfer from data directory configured for the sequencer' ]
self.add_datasets_to_sample( request_id=self.security.encode_id( request2.id ),
sample_id= self.security.encode_id( request1_sample1.id ),
+ external_service_id=self.security.encode_id( external_serviceexternal_service.id ),
sample_datasets=sample_datasets,
strings_displayed=strings_displayed,
strings_displayed_after_submit=strings_displayed_after_submit )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0