galaxy-commits
Threads by month
- ----- 2024 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2023 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2022 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2021 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2020 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2019 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2018 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2017 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2016 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2015 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2014 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2013 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2012 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2011 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2010 -----
- December
- November
- October
- September
- August
- July
- June
- May
October 2013
- 1 participants
- 226 discussions
commit/galaxy-central: carlfeberhard: UI, Popupmenu: fix for IE 9; pack scripts
by commits-noreply@bitbucket.org 17 Oct '13
by commits-noreply@bitbucket.org 17 Oct '13
17 Oct '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/5c9cfd1dea8b/
Changeset: 5c9cfd1dea8b
User: carlfeberhard
Date: 2013-10-17 22:05:29
Summary: UI, Popupmenu: fix for IE 9; pack scripts
Affected #: 7 files
diff -r f888228f48d89ade8b8f49a4d777c573174392d1 -r 5c9cfd1dea8bfb85bcc1ee07035629a77ee3a051 static/scripts/mvc/ui.js
--- a/static/scripts/mvc/ui.js
+++ b/static/scripts/mvc/ui.js
@@ -213,226 +213,159 @@
});
// =============================================================================
-/**
+/**
* view for a popup menu
*/
-var PopupMenu = Backbone.View.extend(
-{
- /* TODO:
- add submenus
- add hrefs
- test various html keys
- add make_popupmenus style
- */
+var PopupMenu = Backbone.View.extend({
/** Cache the desired button element and options, set up the button click handler
* NOTE: attaches this view as HTML/jQ data on the button for later use.
*/
- //TODO: include docs on special option keys (divider, checked, etc.)
- initialize: function($button, options)
- {
+ initialize: function( $button, options ){
// default settings
- this.$button = $button || $('<div/>');
+ this.$button = $button || $( '<div/>' );
this.options = options || [];
// set up button click -> open menu behavior
var menu = this;
- this.$button.click(function(event)
- {
- menu._renderAndShow(event);
+ this.$button.click( function( event ){
+ menu._renderAndShow( event );
return false;
});
+ },
- // attach this view as a data object on the button - for later access
- //TODO:?? memleak?
- this.$button.data('PopupMenu', this);
+ // render the menu, append to the page body at the click position, and set up the 'click-away' handlers, show
+ _renderAndShow: function( clickEvent ){
+ this.render();
+ this.$el.appendTo( 'body' );
+ this.$el.css( this._getShownPosition( clickEvent ));
+ this._setUpCloseBehavior();
+ this.$el.show();
},
// render the menu
- // this menu doesn't attach itself to the DOM (see _renderAndShow)
- render: function()
- {
- // link this popup
- var menu = this;
-
- // render the menu body
- this.$el.addClass('popmenu-wrapper')
- .css(
- {
- position : 'absolute',
- display : 'none'
- });
-
- // use template
- this.$el.html(this.template(this.$button.attr('id'), this.options));
+ // this menu doesn't attach itself to the DOM ( see _renderAndShow )
+ render: function(){
+ // render the menu body absolute and hidden, fill with template
+ this.$el.addClass( 'popmenu-wrapper' ).hide()
+ .css({ position : 'absolute' })
+ .html( this.template( this.$button.attr( 'id' ), this.options ));
// set up behavior on each link/anchor elem
- if(this.options.length)
- {
- this.$el.find('li').each(function(i, li)
- {
- var $li = $(li),
- $anchor = $li.children( 'a.popupmenu-option' ),
- menuFunc = menu.options[i].func;
+ if( this.options.length ){
+ var menu = this;
+ //precondition: there should be one option per li
+ this.$el.find( 'li' ).each( function( i, li ){
+ var option = menu.options[i];
- // click event
- if($anchor.length && menuFunc)
- {
- $anchor.click(function(event)
- {
- menuFunc(event, menu.options[i]);
+ // if the option has 'func', call that function when the anchor is clicked
+ if( option.func ){
+ $( this ).children( 'a.popupmenu-option' ).click( function( event ){
+ option.func.call( menu, event, option );
+ // bubble up so that an option click will call the close behavior
+ //return false;
});
}
-
- // cache the anchor as a jq obj within the options obj
- menu.options[i].$li = $li;
});
}
return this;
},
+ template : function( id, options ){
+ return [
+ '<ul id="', id, '-menu" class="dropdown-menu">', this._templateOptions( options ), '</ul>'
+ ].join( '' );
+ },
+
+ _templateOptions : function( options ){
+ if( !options.length ){
+ return '<li>(no options)</li>';
+ }
+ return _.map( options, function( option ){
+ if( option.divider ){
+ return '<li class="divider"></li>';
+ } else if( option.header ){
+ return [ '<li class="head"><a href="javascript:void(0);">', option.html, '</a></li>' ].join( '' );
+ }
+ var href = option.href || 'javascript:void(0);',
+ target = ( option.target )?( ' target="' + target + '"' ):( '' ),
+ check = ( option.checked )?( '<span class="fa-icon-ok"></span>' ):( '' );
+ return [
+ '<li><a class="popupmenu-option" href="', href, '"', target, '>',
+ check, option.html,
+ '</a></li>'
+ ].join( '' );
+ }).join( '' );
+ },
+
// get the absolute position/offset for the menu
- _getShownPosition : function( clickEvent )
- {
- // get element width
+ _getShownPosition : function( clickEvent ){
+
+ // display menu horiz. centered on click...
var menuWidth = this.$el.width();
-
- // display menu horiz. centered on click...
var x = clickEvent.pageX - menuWidth / 2 ;
- // adjust to handle horiz. scroll and window dimensions (draw entirely on visible screen area)
+ // adjust to handle horiz. scroll and window dimensions ( draw entirely on visible screen area )
x = Math.min( x, $( document ).scrollLeft() + $( window ).width() - menuWidth - 5 );
x = Math.max( x, $( document ).scrollLeft() + 5 );
-
- // return
return {
top: clickEvent.pageY,
left: x
};
},
- // render the menu, append to the page body at the click position, and set up the 'click-away' handlers, show
- _renderAndShow: function(clickEvent)
- {
- this.render();
- this.$el.appendTo('body');
- this.$el.css( this._getShownPosition(clickEvent));
- this._setUpCloseBehavior();
- this.$el.show();
- },
-
// bind an event handler to all available frames so that when anything is clicked
// the menu is removed from the DOM and the event handler unbinds itself
- _setUpCloseBehavior: function()
- {
+ _setUpCloseBehavior: function(){
// function to close popup and unbind itself
var menu = this;
- var closePopupWhenClicked = function($elClicked)
- {
- $elClicked.bind("click.close_popup", function()
- {
+ var closePopupWhenClicked = function( $elClicked ){
+ $elClicked.one( "click.close_popup", function(){
menu.remove();
- $elClicked.unbind("click.close_popup");
});
};
// bind to current, parent, and sibling frames
- closePopupWhenClicked($(window.document));
- closePopupWhenClicked($(window.top.document));
- _.each(window.top.frames, function(siblingFrame)
- {
- closePopupWhenClicked($(siblingFrame.document));
+ closePopupWhenClicked( $( window.document ));
+ closePopupWhenClicked( $( window.top.document ));
+ _.each( window.top.frames, function( siblingFrame ){
+ closePopupWhenClicked( $( siblingFrame.document ));
});
},
// add a menu option/item at the given index
- addItem: function(item, index)
- {
+ addItem: function( item, index ){
// append to end if no index
- index = (index >= 0) ? index : this.options.length;
- this.options.splice(index, 0, item);
+ index = ( index >= 0 ) ? index : this.options.length;
+ this.options.splice( index, 0, item );
return this;
},
// remove a menu option/item at the given index
- removeItem: function(index)
- {
- if(index >=0)
- this.options.splice(index, 1);
+ removeItem: function( index ){
+ if( index >=0 ){
+ this.options.splice( index, 1 );
+ }
return this;
},
// search for a menu option by it's html
- findIndexByHtml: function(html)
- {
- for(var i = 0; i < this.options.length; i++)
- if(_.has(this.options[i], 'html') && (this.options[i].html === html))
+ findIndexByHtml: function( html ){
+ for( var i = 0; i < this.options.length; i++ ){
+ if( _.has( this.options[i], 'html' ) && ( this.options[i].html === html )){
return i;
+ }
+ }
return null;
},
// search for a menu option by it's html
- findItemByHtml: function(html)
- {
- return this.options[(this.findIndexByHtml(html))];
+ findItemByHtml: function( html ){
+ return this.options[( this.findIndexByHtml( html ))];
},
// string representation
- toString: function()
- {
+ toString: function(){
return 'PopupMenu';
- },
-
- // template
- template: function(id, options)
- {
- // initialize template
- var tmpl = '<ul id="' + id + '-menu" class="dropdown-menu">';
-
- // check item number
- if (options.length > 0)
- {
- // add option
- for (var i in options)
- {
- // get item
- var item = options[i];
-
- // check for divider
- if (item.divider)
- {
- // add divider
- tmpl += '<li class="divider"></li>';
- } else {
- // identify header
- if(item.header)
- {
- tmpl += '<li class="head"><a href="javascript:void(0);">' + item.html + '</a></li>';
- } else {
- // add href
- if (item.href)
- {
- tmpl += '<li><a href="' + item.href + '"';
- tmpl += 'target="' + item.target + '"';
- } else
- tmpl += '<li><a href="javascript:void(0);"';
-
- // add class
- tmpl += 'class="popupmenu-option">'
-
- // add target
- if (item.checked)
- tmpl += '<span class="fa-icon-ok"></span>';
-
- // add html
- tmpl += item.html;
- }
- }
- }
- } else
- tmpl += '<li>No Options.</li>';
-
- // return
- return tmpl + '</ul>';
}
});
diff -r f888228f48d89ade8b8f49a4d777c573174392d1 -r 5c9cfd1dea8bfb85bcc1ee07035629a77ee3a051 static/scripts/packed/galaxy.frame.js
--- a/static/scripts/packed/galaxy.frame.js
+++ b/static/scripts/packed/galaxy.frame.js
@@ -1,1 +1,1 @@
-define(["galaxy.master","libs/backbone/backbone-relational"],function(b){var a=Backbone.View.extend({el_main:"#everything",options:{frame:{cols:6,rows:3},rows:1000,cell:130,margin:5,scroll:5,top_min:40,frame_max:9},cols:0,top:0,top_max:0,frame_z:0,frame_counter:0,frame_counter_id:0,frame_list:[],frame_shadow:null,visible:false,active:false,button_active:null,button_load:null,initialize:function(d){var c=this;this.button_active=new b.GalaxyMasterIcon({icon:"fa-icon-th",tooltip:"Enable/Disable Scratchbook",on_click:function(f){c.event_panel_active(f)}});Galaxy.master.append(this.button_active);this.button_load=new b.GalaxyMasterIcon({icon:"fa-icon-eye-open",tooltip:"Show/Hide Scratchbook",on_click:function(f){c.event_panel_load(f)},with_number:true});Galaxy.master.append(this.button_load);if(d){this.options=_.defaults(d,this.options)}this.top=this.top_max=this.options.top_min;this.setElement(this.template());$(this.el).append(this.template_background());$(this.el).append(this.template_menu());$(this.el_main).append($(this.el));var e="#frame-shadow";$(this.el).append(this.template_shadow(e.substring(1)));this.frame_shadow={id:e,screen_location:{},grid_location:{},grid_rank:null,grid_lock:false};this.frame_resize(this.frame_shadow,{width:0,height:0});this.frame_list[e]=this.frame_shadow;this.panel_refresh();var c=this;$(window).resize(function(){if(c.visible){c.panel_refresh()}});window.onbeforeunload=function(){if(c.frame_counter>0){return"You opened "+c.frame_counter+" frame(s) which will be lost."}}},event:{type:null,target:null,xy:null},events:{mousemove:"event_frame_mouse_move",mouseup:"event_frame_mouse_up",mouseleave:"event_frame_mouse_up",mousewheel:"event_panel_scroll",DOMMouseScroll:"event_panel_scroll","mousedown .frame":"event_frame_mouse_down","mousedown .frame-background":"event_panel_load","mousedown .frame-scroll-up":"event_panel_scroll_up","mousedown .frame-scroll-down":"event_panel_scroll_down","mousedown .f-close":"event_frame_close","mousedown .f-pin":"event_frame_lock"},event_frame_mouse_down:function(c){if(this.event.type!==null){return}if($(c.target).hasClass("f-header")||$(c.target).hasClass("f-title")){this.event.type="drag"}if($(c.target).hasClass("f-resize")){this.event.type="resize"}if(this.event.type===null){return}c.preventDefault();this.event.target=this.event_get_frame(c.target);if(this.event.target.grid_lock){this.event.type=null;return}this.event.xy={x:c.originalEvent.pageX,y:c.originalEvent.pageY};this.frame_drag_start(this.event.target)},event_frame_mouse_move:function(i){if(this.event.type!="drag"&&this.event.type!="resize"){return}var g={x:i.originalEvent.pageX,y:i.originalEvent.pageY};var d={x:g.x-this.event.xy.x,y:g.y-this.event.xy.y};this.event.xy=g;var h=this.frame_screen(this.event.target);if(this.event.type=="resize"){h.width+=d.x;h.height+=d.y;var f=this.options.cell-this.options.margin-1;h.width=Math.max(h.width,f);h.height=Math.max(h.height,f);this.frame_resize(this.event.target,h);h.width=this.to_grid_coord("width",h.width)+1;h.height=this.to_grid_coord("height",h.height)+1;h.width=this.to_pixel_coord("width",h.width);h.height=this.to_pixel_coord("height",h.height);this.frame_resize(this.frame_shadow,h);this.frame_insert(this.frame_shadow,{top:this.to_grid_coord("top",h.top),left:this.to_grid_coord("left",h.left)})}if(this.event.type=="drag"){h.left+=d.x;h.top+=d.y;this.frame_offset(this.event.target,h);var c={top:this.to_grid_coord("top",h.top),left:this.to_grid_coord("left",h.left)};if(c.left!==0){c.left++}this.frame_insert(this.frame_shadow,c)}},event_frame_mouse_up:function(c){if(this.event.type!="drag"&&this.event.type!="resize"){return}this.frame_drag_stop(this.event.target);this.event.type=null},event_frame_close:function(d){if(this.event.type!==null){return}d.preventDefault();var f=this.event_get_frame(d.target);var c=this;$(f.id).fadeOut("fast",function(){$(f.id).remove();delete c.frame_list[f.id];c.frame_counter--;c.panel_refresh(true);c.panel_animation_complete();if(c.visible&&c.frame_counter==0){c.panel_show_hide()}})},event_frame_lock:function(c){if(this.event.type!==null){return}c.preventDefault();var d=this.event_get_frame(c.target);if(d.grid_lock){d.grid_lock=false;$(d.id).find(".f-pin").removeClass("toggle");$(d.id).find(".f-header").removeClass("f-not-allowed");$(d.id).find(".f-title").removeClass("f-not-allowed");$(d.id).find(".f-resize").show();$(d.id).find(".f-close").show()}else{d.grid_lock=true;$(d.id).find(".f-pin").addClass("toggle");$(d.id).find(".f-header").addClass("f-not-allowed");$(d.id).find(".f-title").addClass("f-not-allowed");$(d.id).find(".f-resize").hide();$(d.id).find(".f-close").hide()}},event_panel_load:function(c){if(this.event.type!==null){return}this.panel_show_hide()},event_panel_active:function(c){if(this.event.type!==null){return}this.panel_active_disable()},event_panel_scroll:function(c){if(this.event.type!==null||!this.visible){return}c.preventDefault();var d=c.originalEvent.detail?c.originalEvent.detail:c.originalEvent.wheelDelta/-3;this.panel_scroll(d)},event_panel_scroll_up:function(c){if(this.event.type!==null){return}c.preventDefault();this.panel_scroll(-this.options.scroll)},event_panel_scroll_down:function(c){if(this.event.type!==null){return}c.preventDefault();this.panel_scroll(this.options.scroll)},event_get_frame:function(c){return this.frame_list["#"+$(c).closest(".frame").attr("id")]},frame_drag_start:function(d){this.frame_focus(d,true);var c=this.frame_screen(d);this.frame_resize(this.frame_shadow,c);this.frame_grid(this.frame_shadow,d.grid_location);d.grid_location=null;$(this.frame_shadow.id).show();$(".f-cover").show()},frame_drag_stop:function(d){this.frame_focus(d,false);var c=this.frame_screen(this.frame_shadow);this.frame_resize(d,c);this.frame_grid(d,this.frame_shadow.grid_location,true);this.frame_shadow.grid_location=null;$(this.frame_shadow.id).hide();$(".f-cover").hide();this.panel_animation_complete()},to_grid_coord:function(e,d){var c=(e=="width"||e=="height")?1:-1;if(e=="top"){d-=this.top}return parseInt((d+c*this.options.margin)/this.options.cell,10)},to_pixel_coord:function(e,f){var c=(e=="width"||e=="height")?1:-1;var d=(f*this.options.cell)-c*this.options.margin;if(e=="top"){d+=this.top}return d},to_grid:function(c){return{top:this.to_grid_coord("top",c.top),left:this.to_grid_coord("left",c.left),width:this.to_grid_coord("width",c.width),height:this.to_grid_coord("height",c.height)}},to_pixel:function(c){return{top:this.to_pixel_coord("top",c.top),left:this.to_pixel_coord("left",c.left),width:this.to_pixel_coord("width",c.width),height:this.to_pixel_coord("height",c.height)}},is_collision:function(e){function c(h,g){return !(h.left>g.left+g.width-1||h.left+h.width-1<g.left||h.top>g.top+g.height-1||h.top+h.height-1<g.top)}for(var d in this.frame_list){var f=this.frame_list[d];if(f.grid_location===null){continue}if(c(e,f.grid_location)){return true}}return false},location_rank:function(c){return(c.top*this.cols)+c.left},menu_refresh:function(){this.button_load.number(this.frame_counter);if(this.frame_counter==0){this.button_load.hide()}else{this.button_load.show()}if(this.top==this.options.top_min){$(".frame-scroll-up").hide()}else{$(".frame-scroll-up").show()}if(this.top==this.top_max){$(".frame-scroll-down").hide()}else{$(".frame-scroll-down").show()}},panel_animation_complete:function(){var c=this;$(".frame").promise().done(function(){c.panel_scroll(0,true)})},panel_refresh:function(c){this.cols=parseInt($(window).width()/this.options.cell,10)+1;this.frame_insert(null,null,c)},panel_scroll:function(h,c){var e=this.top-this.options.scroll*h;e=Math.max(e,this.top_max);e=Math.min(e,this.options.top_min);if(this.top!=e){for(var d in this.frame_list){var g=this.frame_list[d];if(g.grid_location!==null){var f={top:g.screen_location.top-(this.top-e),left:g.screen_location.left};this.frame_offset(g,f,c)}}this.top=e}this.menu_refresh()},panel_show_hide:function(){if(this.visible){this.visible=false;$(".frame").fadeOut("fast");this.button_load.icon("fa-icon-eye-close");this.button_load.untoggle();$(".frame-background").hide();$(".frame-menu").hide()}else{this.visible=true;$(".frame").fadeIn("fast");this.button_load.icon("fa-icon-eye-open");this.button_load.toggle();$(this.frame_shadow.id).hide();$(".frame-background").show();this.panel_refresh()}},panel_active_disable:function(){if(this.active){this.active=false;this.button_active.untoggle();if(this.visible){this.panel_show_hide()}}else{this.active=true;this.button_active.toggle()}},frame_new:function(d){if(!this.active){if(d.location=="center"){var c=$(window.parent.document).find("iframe#galaxy_main");c.attr("src",d.content)}else{window.location=d.content}return}if(this.frame_counter>=this.options.frame_max){alert("You have reached the maximum number of allowed frames ("+this.options.frame_max+").");return}var e="#frame-"+(this.frame_counter_id++);if($(e).length!==0){alert("This frame already exists. This page might contain multiple frame managers.");return}this.top=this.options.top_min;$(this.el).append(this.template_frame(e.substring(1),d.title,d.type,d.content));var f={id:e,screen_location:{},grid_location:{},grid_rank:null,grid_lock:false};d.width=this.to_pixel_coord("width",this.options.frame.cols);d.height=this.to_pixel_coord("height",this.options.frame.rows);this.frame_z=parseInt($(f.id).css("z-index"));this.frame_list[e]=f;this.frame_counter++;this.frame_resize(f,{width:d.width,height:d.height});this.frame_insert(f,{top:0,left:0},true);if(!this.visible){this.panel_show_hide()}},frame_insert:function(j,c,e){var d=[];if(j){j.grid_location=null;d.push([j,this.location_rank(c)])}var g=null;for(g in this.frame_list){var h=this.frame_list[g];if(h.grid_location!==null&&!h.grid_lock){h.grid_location=null;d.push([h,h.grid_rank])}}d.sort(function(k,f){var m=k[1];var l=f[1];return m<l?-1:(m>l?1:0)});for(g=0;g<d.length;g++){this.frame_place(d[g][0],e)}this.top_max=0;for(var g in this.frame_list){var j=this.frame_list[g];if(j.grid_location!==null){this.top_max=Math.max(this.top_max,j.grid_location.top+j.grid_location.height)}}this.top_max=$(window).height()-this.top_max*this.options.cell-2*this.options.margin;this.top_max=Math.min(this.top_max,this.options.top_min);this.menu_refresh()},frame_place:function(k,d){k.grid_location=null;var h=this.to_grid(this.frame_screen(k));var c=false;for(var f=0;f<this.options.rows;f++){for(var e=0;e<Math.max(1,this.cols-h.width);e++){h.top=f;h.left=e;if(!this.is_collision(h)){c=true;break}}if(c){break}}if(c){this.frame_grid(k,h,d)}else{console.log("Grid dimensions exceeded.")}},frame_focus:function(e,c){var d=this.frame_z+(c?1:0);$(e.id).css("z-index",d)},frame_offset:function(f,e,d){f.screen_location.left=e.left;f.screen_location.top=e.top;if(d){this.frame_focus(f,true);var c=this;$(f.id).animate({top:e.top,left:e.left},"fast",function(){c.frame_focus(f,false)})}else{$(f.id).css({top:e.top,left:e.left})}},frame_resize:function(d,c){$(d.id).css({width:c.width,height:c.height});d.screen_location.width=c.width;d.screen_location.height=c.height},frame_grid:function(e,c,d){e.grid_location=c;this.frame_offset(e,this.to_pixel(c),d);e.grid_rank=this.location_rank(c)},frame_screen:function(d){var c=d.screen_location;return{top:c.top,left:c.left,width:c.width,height:c.height}},template:function(){return'<div class="galaxy-frame"></div>'},template_frame:function(f,e,c,d){if(!e){e=""}if(c=="url"){d='<iframe scrolling="auto" class="f-iframe" src="'+d+'"></iframe>'}return'<div id="'+f+'" class="frame corner"><div class="f-header corner"><span class="f-title">'+e+'</span><span class="f-icon f-pin fa-icon-pushpin"></span><span class="f-icon f-close fa-icon-trash"></span></div><div class="f-content">'+d+'<div class="f-cover"></div></div><span class="f-resize f-icon corner fa-icon-resize-full"></span></div>'},template_shadow:function(c){return'<div id="'+c+'" class="frame-shadow corner"></div>'},template_background:function(){return'<div class="frame-background"></div>'},template_menu:function(){return'<div class="frame-scroll-up frame-menu fa-icon-chevron-up fa-icon-2x"></div><div class="frame-scroll-down frame-menu fa-icon-chevron-down fa-icon-2x"></div>'}});return{GalaxyFrameManager:a}});
\ No newline at end of file
+define(["galaxy.master","libs/backbone/backbone-relational"],function(b){var a=Backbone.View.extend({el_main:"#everything",options:{frame:{cols:6,rows:3},rows:1000,cell:130,margin:5,scroll:5,top_min:40,frame_max:9},cols:0,top:0,top_max:0,frame_z:0,frame_counter:0,frame_counter_id:0,frame_list:[],frame_shadow:null,visible:false,active:false,button_active:null,button_load:null,initialize:function(d){var c=this;this.button_active=new b.GalaxyMasterIcon({icon:"fa-icon-th",tooltip:"Enable/Disable Scratchbook",on_click:function(f){c.event_panel_active(f)},on_unload:function(){if(c.frame_counter>0){return"You opened "+c.frame_counter+" frame(s) which will be lost."}}});Galaxy.master.append(this.button_active);this.button_load=new b.GalaxyMasterIcon({icon:"fa-icon-eye-open",tooltip:"Show/Hide Scratchbook",on_click:function(f){c.event_panel_load(f)},with_number:true});Galaxy.master.append(this.button_load);if(d){this.options=_.defaults(d,this.options)}this.top=this.top_max=this.options.top_min;this.setElement(this.template());$(this.el).append(this.template_background());$(this.el).append(this.template_menu());$(this.el_main).append($(this.el));var e="#frame-shadow";$(this.el).append(this.template_shadow(e.substring(1)));this.frame_shadow={id:e,screen_location:{},grid_location:{},grid_rank:null,grid_lock:false};this.frame_resize(this.frame_shadow,{width:0,height:0});this.frame_list[e]=this.frame_shadow;this.panel_refresh();var c=this;$(window).resize(function(){if(c.visible){c.panel_refresh()}})},event:{type:null,target:null,xy:null},events:{mousemove:"event_frame_mouse_move",mouseup:"event_frame_mouse_up",mouseleave:"event_frame_mouse_up",mousewheel:"event_panel_scroll",DOMMouseScroll:"event_panel_scroll","mousedown .frame":"event_frame_mouse_down","mousedown .frame-background":"event_panel_load","mousedown .frame-scroll-up":"event_panel_scroll_up","mousedown .frame-scroll-down":"event_panel_scroll_down","mousedown .f-close":"event_frame_close","mousedown .f-pin":"event_frame_lock"},event_frame_mouse_down:function(c){if(this.event.type!==null){return}if($(c.target).hasClass("f-header")||$(c.target).hasClass("f-title")){this.event.type="drag"}if($(c.target).hasClass("f-resize")){this.event.type="resize"}if(this.event.type===null){return}c.preventDefault();this.event.target=this.event_get_frame(c.target);if(this.event.target.grid_lock){this.event.type=null;return}this.event.xy={x:c.originalEvent.pageX,y:c.originalEvent.pageY};this.frame_drag_start(this.event.target)},event_frame_mouse_move:function(i){if(this.event.type!="drag"&&this.event.type!="resize"){return}var g={x:i.originalEvent.pageX,y:i.originalEvent.pageY};var d={x:g.x-this.event.xy.x,y:g.y-this.event.xy.y};this.event.xy=g;var h=this.frame_screen(this.event.target);if(this.event.type=="resize"){h.width+=d.x;h.height+=d.y;var f=this.options.cell-this.options.margin-1;h.width=Math.max(h.width,f);h.height=Math.max(h.height,f);this.frame_resize(this.event.target,h);h.width=this.to_grid_coord("width",h.width)+1;h.height=this.to_grid_coord("height",h.height)+1;h.width=this.to_pixel_coord("width",h.width);h.height=this.to_pixel_coord("height",h.height);this.frame_resize(this.frame_shadow,h);this.frame_insert(this.frame_shadow,{top:this.to_grid_coord("top",h.top),left:this.to_grid_coord("left",h.left)})}if(this.event.type=="drag"){h.left+=d.x;h.top+=d.y;this.frame_offset(this.event.target,h);var c={top:this.to_grid_coord("top",h.top),left:this.to_grid_coord("left",h.left)};if(c.left!==0){c.left++}this.frame_insert(this.frame_shadow,c)}},event_frame_mouse_up:function(c){if(this.event.type!="drag"&&this.event.type!="resize"){return}this.frame_drag_stop(this.event.target);this.event.type=null},event_frame_close:function(d){if(this.event.type!==null){return}d.preventDefault();var f=this.event_get_frame(d.target);var c=this;$(f.id).fadeOut("fast",function(){$(f.id).remove();delete c.frame_list[f.id];c.frame_counter--;c.panel_refresh(true);c.panel_animation_complete();if(c.visible&&c.frame_counter==0){c.panel_show_hide()}})},event_frame_lock:function(c){if(this.event.type!==null){return}c.preventDefault();var d=this.event_get_frame(c.target);if(d.grid_lock){d.grid_lock=false;$(d.id).find(".f-pin").removeClass("toggle");$(d.id).find(".f-header").removeClass("f-not-allowed");$(d.id).find(".f-title").removeClass("f-not-allowed");$(d.id).find(".f-resize").show();$(d.id).find(".f-close").show()}else{d.grid_lock=true;$(d.id).find(".f-pin").addClass("toggle");$(d.id).find(".f-header").addClass("f-not-allowed");$(d.id).find(".f-title").addClass("f-not-allowed");$(d.id).find(".f-resize").hide();$(d.id).find(".f-close").hide()}},event_panel_load:function(c){if(this.event.type!==null){return}this.panel_show_hide()},event_panel_active:function(c){if(this.event.type!==null){return}this.panel_active_disable()},event_panel_scroll:function(c){if(this.event.type!==null||!this.visible){return}c.preventDefault();var d=c.originalEvent.detail?c.originalEvent.detail:c.originalEvent.wheelDelta/-3;this.panel_scroll(d)},event_panel_scroll_up:function(c){if(this.event.type!==null){return}c.preventDefault();this.panel_scroll(-this.options.scroll)},event_panel_scroll_down:function(c){if(this.event.type!==null){return}c.preventDefault();this.panel_scroll(this.options.scroll)},event_get_frame:function(c){return this.frame_list["#"+$(c).closest(".frame").attr("id")]},frame_drag_start:function(d){this.frame_focus(d,true);var c=this.frame_screen(d);this.frame_resize(this.frame_shadow,c);this.frame_grid(this.frame_shadow,d.grid_location);d.grid_location=null;$(this.frame_shadow.id).show();$(".f-cover").show()},frame_drag_stop:function(d){this.frame_focus(d,false);var c=this.frame_screen(this.frame_shadow);this.frame_resize(d,c);this.frame_grid(d,this.frame_shadow.grid_location,true);this.frame_shadow.grid_location=null;$(this.frame_shadow.id).hide();$(".f-cover").hide();this.panel_animation_complete()},to_grid_coord:function(e,d){var c=(e=="width"||e=="height")?1:-1;if(e=="top"){d-=this.top}return parseInt((d+c*this.options.margin)/this.options.cell,10)},to_pixel_coord:function(e,f){var c=(e=="width"||e=="height")?1:-1;var d=(f*this.options.cell)-c*this.options.margin;if(e=="top"){d+=this.top}return d},to_grid:function(c){return{top:this.to_grid_coord("top",c.top),left:this.to_grid_coord("left",c.left),width:this.to_grid_coord("width",c.width),height:this.to_grid_coord("height",c.height)}},to_pixel:function(c){return{top:this.to_pixel_coord("top",c.top),left:this.to_pixel_coord("left",c.left),width:this.to_pixel_coord("width",c.width),height:this.to_pixel_coord("height",c.height)}},is_collision:function(e){function c(h,g){return !(h.left>g.left+g.width-1||h.left+h.width-1<g.left||h.top>g.top+g.height-1||h.top+h.height-1<g.top)}for(var d in this.frame_list){var f=this.frame_list[d];if(f.grid_location===null){continue}if(c(e,f.grid_location)){return true}}return false},location_rank:function(c){return(c.top*this.cols)+c.left},menu_refresh:function(){this.button_load.number(this.frame_counter);if(this.frame_counter==0){this.button_load.hide()}else{this.button_load.show()}if(this.top==this.options.top_min){$(".frame-scroll-up").hide()}else{$(".frame-scroll-up").show()}if(this.top==this.top_max){$(".frame-scroll-down").hide()}else{$(".frame-scroll-down").show()}},panel_animation_complete:function(){var c=this;$(".frame").promise().done(function(){c.panel_scroll(0,true)})},panel_refresh:function(c){this.cols=parseInt($(window).width()/this.options.cell,10)+1;this.frame_insert(null,null,c)},panel_scroll:function(h,c){var e=this.top-this.options.scroll*h;e=Math.max(e,this.top_max);e=Math.min(e,this.options.top_min);if(this.top!=e){for(var d in this.frame_list){var g=this.frame_list[d];if(g.grid_location!==null){var f={top:g.screen_location.top-(this.top-e),left:g.screen_location.left};this.frame_offset(g,f,c)}}this.top=e}this.menu_refresh()},panel_show_hide:function(){if(this.visible){this.visible=false;$(".frame").fadeOut("fast");this.button_load.icon("fa-icon-eye-close");this.button_load.untoggle();$(".frame-background").hide();$(".frame-menu").hide()}else{this.visible=true;$(".frame").fadeIn("fast");this.button_load.icon("fa-icon-eye-open");this.button_load.toggle();$(this.frame_shadow.id).hide();$(".frame-background").show();this.panel_refresh()}},panel_active_disable:function(){if(this.active){this.active=false;this.button_active.untoggle();if(this.visible){this.panel_show_hide()}}else{this.active=true;this.button_active.toggle()}},frame_new:function(d){if(!this.active){if(d.location=="center"){var c=$(window.parent.document).find("iframe#galaxy_main");c.attr("src",d.content)}else{window.location=d.content}return}if(this.frame_counter>=this.options.frame_max){alert("You have reached the maximum number of allowed frames ("+this.options.frame_max+").");return}var e="#frame-"+(this.frame_counter_id++);if($(e).length!==0){alert("This frame already exists. This page might contain multiple frame managers.");return}this.top=this.options.top_min;$(this.el).append(this.template_frame(e.substring(1),d.title,d.type,d.content));var f={id:e,screen_location:{},grid_location:{},grid_rank:null,grid_lock:false};d.width=this.to_pixel_coord("width",this.options.frame.cols);d.height=this.to_pixel_coord("height",this.options.frame.rows);this.frame_z=parseInt($(f.id).css("z-index"));this.frame_list[e]=f;this.frame_counter++;this.frame_resize(f,{width:d.width,height:d.height});this.frame_insert(f,{top:0,left:0},true);if(!this.visible){this.panel_show_hide()}},frame_insert:function(j,c,e){var d=[];if(j){j.grid_location=null;d.push([j,this.location_rank(c)])}var g=null;for(g in this.frame_list){var h=this.frame_list[g];if(h.grid_location!==null&&!h.grid_lock){h.grid_location=null;d.push([h,h.grid_rank])}}d.sort(function(k,f){var m=k[1];var l=f[1];return m<l?-1:(m>l?1:0)});for(g=0;g<d.length;g++){this.frame_place(d[g][0],e)}this.top_max=0;for(var g in this.frame_list){var j=this.frame_list[g];if(j.grid_location!==null){this.top_max=Math.max(this.top_max,j.grid_location.top+j.grid_location.height)}}this.top_max=$(window).height()-this.top_max*this.options.cell-2*this.options.margin;this.top_max=Math.min(this.top_max,this.options.top_min);this.menu_refresh()},frame_place:function(k,d){k.grid_location=null;var h=this.to_grid(this.frame_screen(k));var c=false;for(var f=0;f<this.options.rows;f++){for(var e=0;e<Math.max(1,this.cols-h.width);e++){h.top=f;h.left=e;if(!this.is_collision(h)){c=true;break}}if(c){break}}if(c){this.frame_grid(k,h,d)}else{console.log("Grid dimensions exceeded.")}},frame_focus:function(e,c){var d=this.frame_z+(c?1:0);$(e.id).css("z-index",d)},frame_offset:function(f,e,d){f.screen_location.left=e.left;f.screen_location.top=e.top;if(d){this.frame_focus(f,true);var c=this;$(f.id).animate({top:e.top,left:e.left},"fast",function(){c.frame_focus(f,false)})}else{$(f.id).css({top:e.top,left:e.left})}},frame_resize:function(d,c){$(d.id).css({width:c.width,height:c.height});d.screen_location.width=c.width;d.screen_location.height=c.height},frame_grid:function(e,c,d){e.grid_location=c;this.frame_offset(e,this.to_pixel(c),d);e.grid_rank=this.location_rank(c)},frame_screen:function(d){var c=d.screen_location;return{top:c.top,left:c.left,width:c.width,height:c.height}},template:function(){return'<div class="galaxy-frame"></div>'},template_frame:function(f,e,c,d){if(!e){e=""}if(c=="url"){d='<iframe scrolling="auto" class="f-iframe" src="'+d+'"></iframe>'}return'<div id="'+f+'" class="frame corner"><div class="f-header corner"><span class="f-title">'+e+'</span><span class="f-icon f-pin fa-icon-pushpin"></span><span class="f-icon f-close fa-icon-trash"></span></div><div class="f-content">'+d+'<div class="f-cover"></div></div><span class="f-resize f-icon corner fa-icon-resize-full"></span></div>'},template_shadow:function(c){return'<div id="'+c+'" class="frame-shadow corner"></div>'},template_background:function(){return'<div class="frame-background"></div>'},template_menu:function(){return'<div class="frame-scroll-up frame-menu fa-icon-chevron-up fa-icon-2x"></div><div class="frame-scroll-down frame-menu fa-icon-chevron-down fa-icon-2x"></div>'}});return{GalaxyFrameManager:a}});
\ No newline at end of file
diff -r f888228f48d89ade8b8f49a4d777c573174392d1 -r 5c9cfd1dea8bfb85bcc1ee07035629a77ee3a051 static/scripts/packed/galaxy.master.js
--- a/static/scripts/packed/galaxy.master.js
+++ b/static/scripts/packed/galaxy.master.js
@@ -1,1 +1,1 @@
-define(["libs/backbone/backbone-relational"],function(){var a=Backbone.View.extend({el_master:"#masthead",initialize:function(c){this.setElement($(this.template()));$(this.el_master).append($(this.el))},events:{mousedown:function(c){c.preventDefault()}},append:function(c){$(this.el).append($(c.el))},prepend:function(c){$(this.el).prepend($(c.el))},template:function(){return'<div class="iconbar"></div>'}});var b=Backbone.View.extend({options:{id:"galaxy-icon",icon:"fa-icon-cog",tooltip:"galaxy-icon",with_number:false,on_click:function(){alert("clicked")},visible:true},initialize:function(d){if(d){this.options=_.defaults(d,this.options)}this.setElement($(this.template(this.options)));var c=this;$(this.el).find(".icon").tooltip({title:this.options.tooltip}).on("click",c.options.on_click);if(!this.options.visible){this.hide()}},show:function(){$(this.el).css({visibility:"visible"})},hide:function(){$(this.el).css({visibility:"hidden"})},icon:function(c){$(this.el).find(".icon").removeClass(this.options.icon).addClass(c);this.options.icon=c},toggle:function(){$(this.el).addClass("toggle")},untoggle:function(){$(this.el).removeClass("toggle")},number:function(c){$(this.el).find(".number").text(c)},template:function(d){var c="<div id="+d.id+' class="symbol"><div class="icon fa-icon-2x '+d.icon+'"></div>';if(d.with_number){c+='<div class="number"></div>'}c+="</div>";return c}});return{GalaxyMaster:a,GalaxyMasterIcon:b}});
\ No newline at end of file
+define(["libs/backbone/backbone-relational"],function(){var a=Backbone.View.extend({el_master:"#masthead",list:[],initialize:function(d){this.setElement($(this.template()));$(this.el_master).append($(this.el));var c=this;window.onbeforeunload=function(){var f="";for(key in c.list){if(c.list[key].options.on_unload){var e=c.list[key].options.on_unload();if(e){f+=e+" "}}}if(f!=""){return f}}},events:{mousedown:function(c){c.preventDefault()}},append:function(c){$(this.el).append($(c.el));this.list.push(c)},prepend:function(c){$(this.el).prepend($(c.el));this.list.push(c)},template:function(){return'<div class="iconbar"></div>'}});var b=Backbone.View.extend({options:{id:"galaxy-icon",icon:"fa-icon-cog",tooltip:"galaxy-icon",with_number:false,on_click:function(){alert("clicked")},on_unload:null,visible:true},initialize:function(d){if(d){this.options=_.defaults(d,this.options)}this.setElement($(this.template(this.options)));var c=this;$(this.el).find(".icon").tooltip({title:this.options.tooltip}).on("click",c.options.on_click);if(!this.options.visible){this.hide()}},show:function(){$(this.el).css({visibility:"visible"})},hide:function(){$(this.el).css({visibility:"hidden"})},icon:function(c){$(this.el).find(".icon").removeClass(this.options.icon).addClass(c);this.options.icon=c},toggle:function(){$(this.el).addClass("toggle")},untoggle:function(){$(this.el).removeClass("toggle")},number:function(c){$(this.el).find(".number").text(c)},template:function(d){var c="<div id="+d.id+' class="symbol"><div class="icon fa-icon-2x '+d.icon+'"></div>';if(d.with_number){c+='<div class="number"></div>'}c+="</div>";return c}});return{GalaxyMaster:a,GalaxyMasterIcon:b}});
\ No newline at end of file
diff -r f888228f48d89ade8b8f49a4d777c573174392d1 -r 5c9cfd1dea8bfb85bcc1ee07035629a77ee3a051 static/scripts/packed/galaxy.modal.js
--- a/static/scripts/packed/galaxy.modal.js
+++ b/static/scripts/packed/galaxy.modal.js
@@ -1,1 +1,1 @@
-define(["libs/backbone/backbone-relational"],function(){var a=Backbone.View.extend({elMain:"#everything",optionsDefault:{title:"galaxy-modal",body:"",backdrop:true},options:{},initialize:function(b){if(b){this.create(b)}},show:function(b){this.initialize(b);var c=$(document).height()/2;this.$body.css("max-height",c);if(this.options.height){this.$body.css("height",Math.min(this.options.height,c))}if(this.options.height){this.$body.css("overflow","hidden")}if(this.visible){this.$el.show()}else{this.$el.fadeIn("fast")}this.visible=true},hide:function(){this.$el.fadeOut("fast");this.visible=false},create:function(c){this.options=_.defaults(c,this.optionsDefault);if(this.options.body=="progress"){this.options.body=$('<div class="progress progress-striped active"><div class="progress-bar progress-bar-info" style="width:100%"></div></div>')}if(this.$el){this.$el.remove()}this.setElement(this.template(this.options.title));this.$body=(this.$el).find(".modal-body");this.$footer=(this.$el).find(".modal-footer");this.$buttons=(this.$el).find(".buttons");this.$backdrop=(this.$el).find(".modal-backdrop");this.$body.html(this.options.body);this.$body.css("min-width",this.$body.width());if(!this.options.backdrop){this.$backdrop.removeClass("in")}if(this.options.buttons){var b=this;$.each(this.options.buttons,function(d,e){b.$buttons.append($('<button id="'+String(d).toLowerCase()+'"></button>').text(d).click(e)).append(" ")})}else{this.$footer.hide()}$(this.elMain).append($(this.el))},enableButton:function(b){this.$buttons.find("#"+String(b).toLowerCase()).prop("disabled",false)},disableButton:function(b){this.$buttons.find("#"+String(b).toLowerCase()).prop("disabled",true)},scrollTop:function(){return this.$body.scrollTop()},template:function(b){return'<div class="modal"><div class="modal-backdrop fade in" style="z-index: -1;"></div><div class="modal-dialog"><div class="modal-content"><div class="modal-header"><button type="button" class="close" style="display: none;">×</button><h4 class="title">'+b+'</h4></div><div class="modal-body"></div><div class="modal-footer"><div class="buttons" style="float: right;"></div></div></div</div></div>'}});return{GalaxyModal:a}});
\ No newline at end of file
+define(["libs/backbone/backbone-relational"],function(){var a=Backbone.View.extend({elMain:"#everything",optionsDefault:{title:"galaxy-modal",body:"",backdrop:true,height:null,width:null},options:{},initialize:function(b){if(b){this.create(b)}},show:function(b){this.initialize(b);if(this.options.height){this.$body.css("height",this.options.height);this.$body.css("overflow","hidden")}else{this.$body.css("max-height",$(window).height()/2)}if(this.options.width){this.$dialog.css("width",this.options.width)}if(this.visible){this.$el.show()}else{this.$el.fadeIn("fast")}this.visible=true},hide:function(){this.$el.fadeOut("fast");this.visible=false},create:function(c){this.options=_.defaults(c,this.optionsDefault);if(this.options.body=="progress"){this.options.body=$('<div class="progress progress-striped active"><div class="progress-bar progress-bar-info" style="width:100%"></div></div>')}if(this.$el){this.$el.remove()}this.setElement(this.template(this.options.title));this.$dialog=(this.$el).find(".modal-dialog");this.$body=(this.$el).find(".modal-body");this.$footer=(this.$el).find(".modal-footer");this.$buttons=(this.$el).find(".buttons");this.$backdrop=(this.$el).find(".modal-backdrop");this.$body.html(this.options.body);if(!this.options.backdrop){this.$backdrop.removeClass("in")}if(this.options.buttons){var b=this;$.each(this.options.buttons,function(d,e){b.$buttons.append($('<button id="'+String(d).toLowerCase()+'"></button>').text(d).click(e)).append(" ")})}else{this.$footer.hide()}$(this.elMain).append($(this.el))},enableButton:function(b){this.$buttons.find("#"+String(b).toLowerCase()).prop("disabled",false)},disableButton:function(b){this.$buttons.find("#"+String(b).toLowerCase()).prop("disabled",true)},scrollTop:function(){return this.$body.scrollTop()},template:function(b){return'<div class="modal"><div class="modal-backdrop fade in" style="z-index: -1;"></div><div class="modal-dialog"><div class="modal-content"><div class="modal-header"><button type="button" class="close" style="display: none;">×</button><h4 class="title">'+b+'</h4></div><div class="modal-body"></div><div class="modal-footer"><div class="buttons" style="float: right;"></div></div></div</div></div>'}});return{GalaxyModal:a}});
\ No newline at end of file
diff -r f888228f48d89ade8b8f49a4d777c573174392d1 -r 5c9cfd1dea8bfb85bcc1ee07035629a77ee3a051 static/scripts/packed/galaxy.upload.js
--- a/static/scripts/packed/galaxy.upload.js
+++ b/static/scripts/packed/galaxy.upload.js
@@ -1,1 +1,1 @@
-define(["galaxy.modal","galaxy.master","utils/galaxy.utils","utils/galaxy.uploadbox","libs/backbone/backbone-relational"],function(b,d,c){var a=Backbone.View.extend({modal:null,button_show:null,uploadbox:null,select_extension:{auto:"Auto-detect"},state:{init:"fa-icon-trash",queued:"fa-icon-spinner fa-icon-spin",running:"__running__",success:"fa-icon-ok",error:"fa-icon-warning-sign"},counter:{announce:0,success:0,error:0,running:0,reset:function(){this.announce=this.success=this.error=this.running=0}},initialize:function(){if(!Galaxy.currHistoryPanel){var e=this;window.setTimeout(function(){e.initialize()},500);return}var e=this;this.button_show=new d.GalaxyMasterIcon({icon:"fa-icon-upload",tooltip:"Upload Files",on_click:function(f){e.event_show(f)},with_number:true});Galaxy.master.prepend(this.button_show);var e=this;c.jsonFromUrl(galaxy_config.root+"api/datatypes",function(g){for(key in g){var f=g[key];e.select_extension[f]=f}})},event_dragover:function(f){},event_dragleave:function(f){},event_announce:function(f,g,i){var j="#upload-"+f;$(this.el).find("tbody:last").append(this.template_row(j,this.select_extension));var h=this.get_upload_item(f);h.fadeIn();h.find("#title").html(g.name);h.find("#size").html(this.size_to_string(g.size));var e=this;h.find("#symbol").on("click",function(){e.event_remove(f)});this.event_progress(f,g,0);this.counter.announce++;this.update_screen()},event_initialize:function(g,h,k){this.button_show.number(this.counter.announce);var i=this.get_upload_item(g);var l=i.find("#symbol");l.addClass(this.state.running);var e=Galaxy.currHistoryPanel.model.get("id");var f=i.find("#extension").val();var j=i.find("#space_to_tabs").is(":checked");this.uploadbox.configure({url:galaxy_config.root+"api/tools/",paramname:"files_0|file_data"});tool_input={};tool_input.dbkey="?";tool_input.file_type=f;tool_input["files_0|NAME"]=h.name;tool_input["files_0|type"]="upload_dataset";tool_input.space_to_tabs=j;data={};data.history_id=e;data.tool_id="upload1";data.inputs=JSON.stringify(tool_input);return data},event_progress:function(f,g,i){var h=this.get_upload_item(f);var e=parseInt(i);h.find(".progress-bar").css({width:e+"%"});if(e!=100){h.find("#percentage").html(e+"%")}else{h.find("#percentage").html("Adding to history...")}},event_success:function(e,f,h){this.event_progress(e,f,100);this.button_show.number("");this.counter.announce--;this.counter.success++;this.update_screen();var g=this.get_upload_item(e);g.addClass("success");g.find("#percentage").html("100%");var i=g.find("#symbol");i.removeClass(this.state.running);i.removeClass(this.state.queued);i.addClass(this.state.success);Galaxy.currHistoryPanel.refresh()},event_error:function(e,f,h){this.event_progress(e,f,0);this.button_show.number("");this.counter.announce--;this.counter.error++;this.update_screen();var g=this.get_upload_item(e);g.addClass("danger");g.find(".progress").remove();g.find("#info").html("<strong>Failed: </strong>"+h).show();var i=g.find("#symbol");i.removeClass(this.state.running);i.removeClass(this.state.queued);i.addClass(this.state.error)},event_upload:function(){if(this.counter.announce==0||this.counter.running>0){return}var f=$(this.el).find(".upload-item");var e=this;f.each(function(){var g=$(this).find("#symbol");if(g.hasClass(e.state.init)){g.removeClass(e.state.init);g.addClass(e.state.queued);$(this).find("#extension").attr("disabled",true);$(this).find("#space_to_tabs").attr("disabled",true)}});this.counter.running=this.counter.announce;this.update_screen();this.uploadbox.upload()},event_pause:function(){if(this.counter.running==0){return}this.uploadbox.pause();$("#upload-info").html("Queueing will pause after completing the current file...")},event_complete:function(){this.counter.running=0;this.update_screen();var f=$(this.el).find(".upload-item");var e=this;f.each(function(){var g=$(this).find("#symbol");if(g.hasClass(e.state.queued)&&!g.hasClass(e.state.running)){g.removeClass(e.state.queued);g.addClass(e.state.init);$(this).find("#extension").attr("disabled",false);$(this).find("#space_to_tabs").attr("disabled",false)}})},event_reset:function(){if(this.counter.running==0){var e=$(this.el).find(".upload-item");$(this.el).find("table").fadeOut({complete:function(){e.remove()}});this.counter.reset();this.update_screen();this.uploadbox.reset()}},event_remove:function(e){var f=this.get_upload_item(e);var g=f.find("#symbol");if(g.hasClass(this.state.init)||g.hasClass(this.state.success)||g.hasClass(this.state.error)){if(f.hasClass("success")){this.counter.success--}else{if(f.hasClass("danger")){this.counter.error--}else{this.counter.announce--}}this.update_screen();this.uploadbox.remove(e);f.remove()}},event_show:function(g){g.preventDefault();if(!this.modal){var f=this;this.modal=new b.GalaxyModal({title:"Upload files from your local drive",body:this.template("upload-box","upload-info"),buttons:{Select:function(){f.uploadbox.select()},Upload:function(){f.event_upload()},Pause:function(){f.event_pause()},Reset:function(){f.event_reset()},Close:function(){f.modal.hide()}},height:"350"});this.setElement("#upload-box");var f=this;this.uploadbox=this.$el.uploadbox({dragover:function(){f.event_dragover()},dragleave:function(){f.event_dragleave()},announce:function(e,h,i){f.event_announce(e,h,i)},initialize:function(e,h,i){return f.event_initialize(e,h,i)},success:function(e,h,i){f.event_success(e,h,i)},progress:function(e,h,i){f.event_progress(e,h,i)},error:function(e,h,i){f.event_error(e,h,i)},complete:function(){f.event_complete()},});this.update_screen()}this.modal.show()},get_upload_item:function(e){return $(this.el).find("#upload-"+e)},size_to_string:function(e){var f="";if(e>=100000000000){e=e/100000000000;f="TB"}else{if(e>=100000000){e=e/100000000;f="GB"}else{if(e>=100000){e=e/100000;f="MB"}else{if(e>=100){e=e/100;f="KB"}else{e=e*10;f="b"}}}}return"<strong>"+(Math.round(e)/10)+"</strong> "+f},update_screen:function(){if(this.counter.announce==0){if(this.uploadbox.compatible){message="Drag&drop files into this box or click 'Select' to select files!"}else{message="Unfortunately, your browser does not support multiple file uploads or drag&drop.<br>Please upgrade to i.e. Firefox 4+, Chrome 7+, IE 10+, Opera 12+ or Safari 6+."}}else{if(this.counter.running==0){message="You added "+this.counter.announce+" file(s) to the queue. Add more files or click 'Upload' to proceed."}else{message="Please wait..."+this.counter.announce+" out of "+this.counter.running+" remaining."}}$("#upload-info").html(message);if(this.counter.running==0&&this.counter.announce+this.counter.success+this.counter.error>0){this.modal.enableButton("Reset")}else{this.modal.disableButton("Reset")}if(this.counter.running==0&&this.counter.announce>0){this.modal.enableButton("Upload")}else{this.modal.disableButton("Upload")}if(this.counter.running>0){this.modal.enableButton("Pause")}else{this.modal.disableButton("Pause")}if(this.counter.running==0){this.modal.enableButton("Select")}else{this.modal.disableButton("Select")}if(this.counter.announce+this.counter.success+this.counter.error>0){$(this.el).find("table").show()}else{$(this.el).find("table").hide()}},template:function(f,e){return'<div id="'+f+'" class="upload-box"><table class="table table-striped" style="display: none;"><thead><tr><th>Name</th><th>Size</th><th>Type</th><th>Space→Tab</th><th>Status</th><th></th></tr></thead><tbody></tbody></table></div><h6 id="'+e+'" class="upload-info"></h6>'},template_row:function(g,f){var e='<tr id="'+g.substr(1)+'" class="upload-item"><td><div id="title" class="title"></div></td><td><div id="size" class="size"></div></td><td><select id="extension" class="extension">';for(key in f){e+='<option value="'+key+'">'+f[key]+"</option>"}e+='</select></td><td><input id="space_to_tabs" type="checkbox"></input></td><td><div id="info" class="info"><div class="progress"><div class="progress-bar progress-bar-success"></div><div id="percentage" class="percentage">0%</div></div></div></td><td><div id="symbol" class="symbol '+this.state.init+'"></div></td></tr>';return e}});return{GalaxyUpload:a}});
\ No newline at end of file
+define(["galaxy.modal","galaxy.master","utils/galaxy.utils","utils/galaxy.uploadbox","libs/backbone/backbone-relational"],function(b,d,c){var a=Backbone.View.extend({modal:null,button_show:null,uploadbox:null,select_extension:[["Auto-detect","auto"]],select_genome:[["Unspecified (?)","?"]],state:{init:"fa-icon-trash",queued:"fa-icon-spinner fa-icon-spin",running:"__running__",success:"fa-icon-ok",error:"fa-icon-warning-sign"},counter:{announce:0,success:0,error:0,running:0,reset:function(){this.announce=this.success=this.error=this.running=0}},initialize:function(){if(!Galaxy.currHistoryPanel){var e=this;window.setTimeout(function(){e.initialize()},500);return}if(!Galaxy.currUser.get("id")){return}var e=this;this.button_show=new d.GalaxyMasterIcon({icon:"fa-icon-upload",tooltip:"Upload Files",on_click:function(f){e.event_show(f)},on_unload:function(){if(e.counter.running>0){return"Several uploads are still processing."}},with_number:true});Galaxy.master.prepend(this.button_show);var e=this;c.jsonFromUrl(galaxy_config.root+"api/datatypes",function(f){for(key in f){e.select_extension.push([f[key],f[key]])}});c.jsonFromUrl(galaxy_config.root+"api/genomes",function(f){var g=e.select_genome[0];e.select_genome=[];for(key in f){if(f[key].length>1){if(f[key][1]!==g[1]){e.select_genome.push(f[key])}}}e.select_genome.sort(function(i,h){return i[0]>h[0]?1:i[0]<h[0]?-1:0});e.select_genome.unshift(g)})},event_dragover:function(f){},event_dragleave:function(f){},event_announce:function(i,g,n){var f="#upload-"+i;$(this.el).find("tbody:last").append(this.template_row(f));var h=this.get_upload_item(i);h.fadeIn();h.find("#title").html(g.name);h.find("#size").html(this.size_to_string(g.size));var m=this;h.find("#symbol").on("click",function(){m.event_remove(i)});h.find("#text-content").on("keyup",function(){var o=h.find("#text-content").val().length;h.find("#size").html(m.size_to_string(o))});this.event_progress(i,g,0);this.counter.announce++;this.update_screen();if(g.size==-1){var l=h.find("#text");var j=8;var e=h.width()-2*j;var k=h.height()-j;l.css("width",e+"px");l.css("top",k+"px");h.height(k+l.height()+2*j);l.show()}},event_initialize:function(i,e,n){this.button_show.number(this.counter.announce);var g=this.get_upload_item(i);var k=g.find("#symbol");k.addClass(this.state.running);var j=Galaxy.currHistoryPanel.model.get("id");var f=g.find("#extension").val();var m=g.find("#genome").val();var l=g.find("#text-content").val();var h=g.find("#space_to_tabs").is(":checked");if(!l&&!(e.size>0)){return null}this.uploadbox.configure({url:galaxy_config.root+"api/tools/",paramname:"files_0|file_data"});tool_input={};tool_input.dbkey=m;tool_input.file_type=f;tool_input["files_0|NAME"]=e.name;tool_input["files_0|type"]="upload_dataset";tool_input["files_0|url_paste"]=l;tool_input.space_to_tabs=h;data={};data.history_id=j;data.tool_id="upload1";data.inputs=JSON.stringify(tool_input);return data},event_progress:function(f,g,i){var h=this.get_upload_item(f);var e=parseInt(i);h.find(".progress-bar").css({width:e+"%"});if(e!=100){h.find("#percentage").html(e+"%")}else{h.find("#percentage").html("Adding to history...")}},event_success:function(e,f,h){this.event_progress(e,f,100);this.button_show.number("");this.counter.announce--;this.counter.success++;this.update_screen();var g=this.get_upload_item(e);g.addClass("success");g.find("#percentage").html("100%");var i=g.find("#symbol");i.removeClass(this.state.running);i.removeClass(this.state.queued);i.addClass(this.state.success);Galaxy.currHistoryPanel.refresh()},event_error:function(e,f,h){this.event_progress(e,f,0);this.button_show.number("");this.counter.announce--;this.counter.error++;this.update_screen();var g=this.get_upload_item(e);g.addClass("danger");g.find(".progress").remove();g.find("#info").html("<strong>Failed: </strong>"+h).show();var i=g.find("#symbol");i.removeClass(this.state.running);i.removeClass(this.state.queued);i.addClass(this.state.error)},event_start:function(){if(this.counter.announce==0||this.counter.running>0){return}var f=$(this.el).find(".upload-item");var e=this;f.each(function(){var g=$(this).find("#symbol");if(g.hasClass(e.state.init)){g.removeClass(e.state.init);g.addClass(e.state.queued);$(this).find("#text-content").attr("disabled",true);$(this).find("#genome").attr("disabled",true);$(this).find("#extension").attr("disabled",true);$(this).find("#space_to_tabs").attr("disabled",true)}});this.counter.running=this.counter.announce;this.update_screen();this.uploadbox.start()},event_stop:function(){if(this.counter.running==0){return}this.uploadbox.stop();$("#upload-info").html("Queue will pause after completing the current file...")},event_complete:function(){this.counter.running=0;this.update_screen();var f=$(this.el).find(".upload-item");var e=this;f.each(function(){var g=$(this).find("#symbol");if(g.hasClass(e.state.queued)&&!g.hasClass(e.state.running)){g.removeClass(e.state.queued);g.addClass(e.state.init);$(this).find("#text-content").attr("disabled",false);$(this).find("#genome").attr("disabled",false);$(this).find("#extension").attr("disabled",false);$(this).find("#space_to_tabs").attr("disabled",false)}})},event_reset:function(){if(this.counter.running==0){var e=$(this.el).find(".upload-item");$(this.el).find("table").fadeOut({complete:function(){e.remove()}});this.counter.reset();this.update_screen();this.uploadbox.reset()}},event_remove:function(e){var f=this.get_upload_item(e);var g=f.find("#symbol");if(g.hasClass(this.state.init)||g.hasClass(this.state.success)||g.hasClass(this.state.error)){if(f.hasClass("success")){this.counter.success--}else{if(f.hasClass("danger")){this.counter.error--}else{this.counter.announce--}}this.update_screen();this.uploadbox.remove(e);f.remove()}},event_create:function(){this.uploadbox.add([{name:"New File",size:-1}])},event_show:function(g){g.preventDefault();if(!this.modal){var f=this;this.modal=new b.GalaxyModal({title:"Upload files from your local drive",body:this.template("upload-box","upload-info"),buttons:{Select:function(){f.uploadbox.select()},Create:function(){f.event_create()},Upload:function(){f.event_start()},Pause:function(){f.event_stop()},Reset:function(){f.event_reset()},Close:function(){f.modal.hide()},},height:"400",width:"900"});this.setElement("#upload-box");var f=this;this.uploadbox=this.$el.uploadbox({dragover:function(){f.event_dragover()},dragleave:function(){f.event_dragleave()},announce:function(e,h,i){f.event_announce(e,h,i)},initialize:function(e,h,i){return f.event_initialize(e,h,i)},success:function(e,h,i){f.event_success(e,h,i)},progress:function(e,h,i){f.event_progress(e,h,i)},error:function(e,h,i){f.event_error(e,h,i)},complete:function(){f.event_complete()},});this.update_screen()}this.modal.show()},get_upload_item:function(e){return $(this.el).find("#upload-"+e)},size_to_string:function(e){var f="";if(e>=100000000000){e=e/100000000000;f="TB"}else{if(e>=100000000){e=e/100000000;f="GB"}else{if(e>=100000){e=e/100000;f="MB"}else{if(e>=100){e=e/100;f="KB"}else{if(e>0){e=e*10;f="b"}else{return"<strong>-</strong>"}}}}}return"<strong>"+(Math.round(e)/10)+"</strong> "+f},update_screen:function(){if(this.counter.announce==0){if(this.uploadbox.compatible()){message="Drag&drop files into this box or click 'Select' to select files!"}else{message="Unfortunately, your browser does not support multiple file uploads or drag&drop.<br>Please upgrade to i.e. Firefox 4+, Chrome 7+, IE 10+, Opera 12+ or Safari 6+."}}else{if(this.counter.running==0){message="You added "+this.counter.announce+" file(s) to the queue. Add more files or click 'Upload' to proceed."}else{message="Please wait..."+this.counter.announce+" out of "+this.counter.running+" remaining."}}$("#upload-info").html(message);if(this.counter.running==0&&this.counter.announce+this.counter.success+this.counter.error>0){this.modal.enableButton("Reset")}else{this.modal.disableButton("Reset")}if(this.counter.running==0&&this.counter.announce>0){this.modal.enableButton("Upload")}else{this.modal.disableButton("Upload")}if(this.counter.running>0){this.modal.enableButton("Pause")}else{this.modal.disableButton("Pause")}if(this.counter.running==0){this.modal.enableButton("Select");this.modal.enableButton("Create")}else{this.modal.disableButton("Select");this.modal.disableButton("Create")}if(this.counter.announce+this.counter.success+this.counter.error>0){$(this.el).find("table").show()}else{$(this.el).find("table").hide()}},template:function(f,e){return'<div id="'+f+'" class="upload-box"><table class="table table-striped" style="display: none;"><thead><tr><th>Name</th><th>Size</th><th>Type</th><th>Genome</th><th>Space→Tab</th><th>Status</th><th></th></tr></thead><tbody></tbody></table></div><h6 id="'+e+'" class="upload-info"></h6>'},template_row:function(f){var e='<tr id="'+f.substr(1)+'" class="upload-item"><td><div style="position: relative;"><div id="title" class="title"></div><div id="text" class="text"><div class="text-info">You may specify a list of URLs (one per line) or paste the contents of a file.</div><textarea id="text-content" class="text-content form-control"></textarea></div></div></td><td><div id="size" class="size"></div></td>';e+='<td><select id="extension" class="extension">';for(key in this.select_extension){e+='<option value="'+this.select_extension[key][1]+'">'+this.select_extension[key][0]+"</option>"}e+="</select></td>";e+='<td><select id="genome" class="genome">';for(key in this.select_genome){e+='<option value="'+this.select_genome[key][1]+'">'+this.select_genome[key][0]+"</option>"}e+="</select></td>";e+='<td><input id="space_to_tabs" type="checkbox"></input></td><td><div id="info" class="info"><div class="progress"><div class="progress-bar progress-bar-success"></div><div id="percentage" class="percentage">0%</div></div></div></td><td><div id="symbol" class="symbol '+this.state.init+'"></div></td></tr>';return e}});return{GalaxyUpload:a}});
\ No newline at end of file
diff -r f888228f48d89ade8b8f49a4d777c573174392d1 -r 5c9cfd1dea8bfb85bcc1ee07035629a77ee3a051 static/scripts/packed/mvc/ui.js
--- a/static/scripts/packed/mvc/ui.js
+++ b/static/scripts/packed/mvc/ui.js
@@ -1,1 +1,1 @@
-var IconButton=Backbone.Model.extend({defaults:{title:"",icon_class:"",on_click:null,menu_options:null,is_menu_button:true,id:null,href:null,target:null,enabled:true,visible:true,tooltip_config:{}}});var IconButtonView=Backbone.View.extend({initialize:function(){this.model.attributes.tooltip_config={placement:"bottom"};this.model.bind("change",this.render,this)},render:function(){this.$el.tooltip("hide");var a=this.template(this.model.attributes);a.tooltip(this.model.get("tooltip_config"));this.$el.replaceWith(a);this.setElement(a);return this},events:{click:"click"},click:function(a){if(this.model.attributes.on_click){this.model.attributes.on_click(a);return false}return true},template:function(b){var a='title="'+b.title+'" class="icon-button';if(b.is_menu_button){a+=" menu-button"}a+=" "+b.icon_class;if(!b.enabled){a+="_disabled"}a+='"';if(b.id){a+=' id="'+b.id+'"'}a+=' href="'+b.href+'"';if(b.target){a+=' target="'+b.target+'"'}if(!b.visible){a+=' style="display: none;"'}if(b.enabled){a="<a "+a+"/>"}else{a="<span "+a+"/>"}return $(a)}});var IconButtonCollection=Backbone.Collection.extend({model:IconButton});var IconButtonMenuView=Backbone.View.extend({tagName:"div",initialize:function(){this.render()},render:function(){var a=this;this.collection.each(function(d){var b=$("<a/>").attr("href","javascript:void(0)").attr("title",d.attributes.title).addClass("icon-button menu-button").addClass(d.attributes.icon_class).appendTo(a.$el).click(d.attributes.on_click);if(d.attributes.tooltip_config){b.tooltip(d.attributes.tooltip_config)}var c=d.get("options");if(c){make_popupmenu(b,c)}});return this}});var create_icon_buttons_menu=function(b,a){if(!a){a={}}var c=new IconButtonCollection(_.map(b,function(d){return new IconButton(_.extend(d,a))}));return new IconButtonMenuView({collection:c})};var Grid=Backbone.Collection.extend({});var GridView=Backbone.View.extend({});var PopupMenu=Backbone.View.extend({initialize:function(b,a){this.$button=b||$("<div/>");this.options=a||[];var c=this;this.$button.click(function(d){c._renderAndShow(d);return false});this.$button.data("PopupMenu",this)},render:function(){var a=this;this.$el.addClass("popmenu-wrapper").css({position:"absolute",display:"none"});this.$el.html(this.template(this.$button.attr("id"),this.options));if(this.options.length){this.$el.find("li").each(function(c,b){var f=$(b),e=f.children("a.popupmenu-option"),d=a.options[c].func;if(e.length&&d){e.click(function(g){d(g,a.options[c])})}a.options[c].$li=f})}return this},_getShownPosition:function(b){var c=this.$el.width();var a=b.pageX-c/2;a=Math.min(a,$(document).scrollLeft()+$(window).width()-c-5);a=Math.max(a,$(document).scrollLeft()+5);return{top:b.pageY,left:a}},_renderAndShow:function(a){this.render();this.$el.appendTo("body");this.$el.css(this._getShownPosition(a));this._setUpCloseBehavior();this.$el.show()},_setUpCloseBehavior:function(){var b=this;var a=function(c){c.bind("click.close_popup",function(){b.remove();c.unbind("click.close_popup")})};a($(window.document));a($(window.top.document));_.each(window.top.frames,function(c){a($(c.document))})},addItem:function(b,a){a=(a>=0)?a:this.options.length;this.options.splice(a,0,b);return this},removeItem:function(a){if(a>=0){this.options.splice(a,1)}return this},findIndexByHtml:function(b){for(var a=0;a<this.options.length;a++){if(_.has(this.options[a],"html")&&(this.options[a].html===b)){return a}}return null},findItemByHtml:function(a){return this.options[(this.findIndexByHtml(a))]},toString:function(){return"PopupMenu"},template:function(e,b){var a='<ul id="'+e+'-menu" class="dropdown-menu">';if(b.length>0){for(var c in b){var d=b[c];if(d.divider){a+='<li class="divider"></li>'}else{if(d.header){a+='<li class="head"><a href="javascript:void(0);">'+d.html+"</a></li>"}else{if(d.href){a+='<li><a href="'+d.href+'"';a+='target="'+d.target+'"'}else{a+='<li><a href="javascript:void(0);"'}a+='class="popupmenu-option">';if(d.checked){a+='<span class="fa-icon-ok"></span>'}a+=d.html}}}}else{a+="<li>No Options.</li>"}return a+"</ul>"}});PopupMenu.make_popupmenu=function(b,c){var a=[];_.each(c,function(f,d){var e={html:d};if(f===null){e.header=true}else{if(jQuery.type(f)==="function"){e.func=f}}a.push(e)});return new PopupMenu($(b),a)};PopupMenu.convertLinksToOptions=function(c,a){c=$(c);a=a||"a";var b=[];c.find(a).each(function(g,e){var f={},d=$(g);f.html=d.text();if(j){var j=d.attr("href"),k=d.attr("target"),h=d.attr("confirm");f.func=function(){if((h)&&(!confirm(h))){return}var i;switch(k){case"_parent":window.parent.location=j;break;case"_top":window.top.location=j;break;case"demo":if(i===undefined||i.closed){i=window.open(j,k);i.creator=self}break;default:window.location=j}}}b.push(f)});return b};PopupMenu.fromExistingDom=function(d,c,a){d=$(d);c=$(c);var b=PopupMenu.convertLinksToOptions(c,a);c.remove();return new PopupMenu(d,b)};PopupMenu.make_popup_menus=function(c,b,d){c=c||document;b=b||"div[popupmenu]";d=d||function(e,f){return"#"+e.attr("popupmenu")};var a=[];$(c).find(b).each(function(){var e=$(this),f=$(c).find(d(e,c));a.push(PopupMenu.fromDom(f,e));f.addClass("popup")});return a};
\ No newline at end of file
+var IconButton=Backbone.Model.extend({defaults:{title:"",icon_class:"",on_click:null,menu_options:null,is_menu_button:true,id:null,href:null,target:null,enabled:true,visible:true,tooltip_config:{}}});var IconButtonView=Backbone.View.extend({initialize:function(){this.model.attributes.tooltip_config={placement:"bottom"};this.model.bind("change",this.render,this)},render:function(){this.$el.tooltip("hide");var a=this.template(this.model.attributes);a.tooltip(this.model.get("tooltip_config"));this.$el.replaceWith(a);this.setElement(a);return this},events:{click:"click"},click:function(a){if(this.model.attributes.on_click){this.model.attributes.on_click(a);return false}return true},template:function(b){var a='title="'+b.title+'" class="icon-button';if(b.is_menu_button){a+=" menu-button"}a+=" "+b.icon_class;if(!b.enabled){a+="_disabled"}a+='"';if(b.id){a+=' id="'+b.id+'"'}a+=' href="'+b.href+'"';if(b.target){a+=' target="'+b.target+'"'}if(!b.visible){a+=' style="display: none;"'}if(b.enabled){a="<a "+a+"/>"}else{a="<span "+a+"/>"}return $(a)}});var IconButtonCollection=Backbone.Collection.extend({model:IconButton});var IconButtonMenuView=Backbone.View.extend({tagName:"div",initialize:function(){this.render()},render:function(){var a=this;this.collection.each(function(d){var b=$("<a/>").attr("href","javascript:void(0)").attr("title",d.attributes.title).addClass("icon-button menu-button").addClass(d.attributes.icon_class).appendTo(a.$el).click(d.attributes.on_click);if(d.attributes.tooltip_config){b.tooltip(d.attributes.tooltip_config)}var c=d.get("options");if(c){make_popupmenu(b,c)}});return this}});var create_icon_buttons_menu=function(b,a){if(!a){a={}}var c=new IconButtonCollection(_.map(b,function(d){return new IconButton(_.extend(d,a))}));return new IconButtonMenuView({collection:c})};var Grid=Backbone.Collection.extend({});var GridView=Backbone.View.extend({});var PopupMenu=Backbone.View.extend({initialize:function(b,a){this.$button=b||$("<div/>");this.options=a||[];var c=this;this.$button.click(function(d){c._renderAndShow(d);return false})},_renderAndShow:function(a){this.render();this.$el.appendTo("body");this.$el.css(this._getShownPosition(a));this._setUpCloseBehavior();this.$el.show()},render:function(){this.$el.addClass("popmenu-wrapper").hide().css({position:"absolute"}).html(this.template(this.$button.attr("id"),this.options));if(this.options.length){var a=this;this.$el.find("li").each(function(c,b){var d=a.options[c];if(d.func){$(this).children("a.popupmenu-option").click(function(e){d.func.call(a,e,d)})}})}return this},template:function(b,a){return['<ul id="',b,'-menu" class="dropdown-menu">',this._templateOptions(a),"</ul>"].join("")},_templateOptions:function(a){if(!a.length){return"<li>(no options)</li>"}return _.map(a,function(d){if(d.divider){return'<li class="divider"></li>'}else{if(d.header){return['<li class="head"><a href="javascript:void(0);">',d.html,"</a></li>"].join("")}}var c=d.href||"javascript:void(0);",e=(d.target)?(' target="'+e+'"'):(""),b=(d.checked)?('<span class="fa-icon-ok"></span>'):("");return['<li><a class="popupmenu-option" href="',c,'"',e,">",b,d.html,"</a></li>"].join("")}).join("")},_getShownPosition:function(b){var c=this.$el.width();var a=b.pageX-c/2;a=Math.min(a,$(document).scrollLeft()+$(window).width()-c-5);a=Math.max(a,$(document).scrollLeft()+5);return{top:b.pageY,left:a}},_setUpCloseBehavior:function(){var b=this;var a=function(c){c.one("click.close_popup",function(){b.remove()})};a($(window.document));a($(window.top.document));_.each(window.top.frames,function(c){a($(c.document))})},addItem:function(b,a){a=(a>=0)?a:this.options.length;this.options.splice(a,0,b);return this},removeItem:function(a){if(a>=0){this.options.splice(a,1)}return this},findIndexByHtml:function(b){for(var a=0;a<this.options.length;a++){if(_.has(this.options[a],"html")&&(this.options[a].html===b)){return a}}return null},findItemByHtml:function(a){return this.options[(this.findIndexByHtml(a))]},toString:function(){return"PopupMenu"}});PopupMenu.make_popupmenu=function(b,c){var a=[];_.each(c,function(f,d){var e={html:d};if(f===null){e.header=true}else{if(jQuery.type(f)==="function"){e.func=f}}a.push(e)});return new PopupMenu($(b),a)};PopupMenu.convertLinksToOptions=function(c,a){c=$(c);a=a||"a";var b=[];c.find(a).each(function(g,e){var f={},d=$(g);f.html=d.text();if(j){var j=d.attr("href"),k=d.attr("target"),h=d.attr("confirm");f.func=function(){if((h)&&(!confirm(h))){return}var i;switch(k){case"_parent":window.parent.location=j;break;case"_top":window.top.location=j;break;case"demo":if(i===undefined||i.closed){i=window.open(j,k);i.creator=self}break;default:window.location=j}}}b.push(f)});return b};PopupMenu.fromExistingDom=function(d,c,a){d=$(d);c=$(c);var b=PopupMenu.convertLinksToOptions(c,a);c.remove();return new PopupMenu(d,b)};PopupMenu.make_popup_menus=function(c,b,d){c=c||document;b=b||"div[popupmenu]";d=d||function(e,f){return"#"+e.attr("popupmenu")};var a=[];$(c).find(b).each(function(){var e=$(this),f=$(c).find(d(e,c));a.push(PopupMenu.fromDom(f,e));f.addClass("popup")});return a};
\ No newline at end of file
diff -r f888228f48d89ade8b8f49a4d777c573174392d1 -r 5c9cfd1dea8bfb85bcc1ee07035629a77ee3a051 static/scripts/packed/utils/galaxy.uploadbox.js
--- a/static/scripts/packed/utils/galaxy.uploadbox.js
+++ b/static/scripts/packed/utils/galaxy.uploadbox.js
@@ -1,1 +1,1 @@
-(function(d){jQuery.event.props.push("dataTransfer");var h={url:"",paramname:"content",maxfilesize:2048,maxfilenumber:20,dragover:function(){},dragleave:function(){},announce:function(){},initialize:function(){},progress:function(){},success:function(){},error:function(j,k,l){alert(l)},complete:function(){},error_filesize:"File exceeds 2GB. Please use an FTP client.",error_default:"Please make sure the file is available.",error_server:"The server is unavailable.",error_toomany:"You can only queue <20 files per upload session."};var b={};var f={};var g=0;var i=0;var e=false;var a=false;var c=null;d.fn.uploadbox=function(z){b=d.extend({},h,z);c=this;c.append('<input id="uploadbox_input" type="file" style="display: none" multiple>');c.on("drop",m);c.on("dragover",n);c.on("dragleave",u);d("#uploadbox_input").change(function(A){w(A.target.files);d(this).val("")});function m(A){if(!A.dataTransfer){return}w(A.dataTransfer.files);A.preventDefault();return false}function n(A){A.preventDefault();b.dragover.call(A)}function u(A){A.stopPropagation();b.dragleave.call(A)}function j(A){if(A.lengthComputable){b.progress(this.index,this.file,Math.round((A.loaded*100)/A.total))}}function w(C){if(e){return}for(var B=0;B<C.length;B++){if(i>=b.maxfilenumber){break}var A=String(g++);f[A]=C[B];b.announce(A,f[A],"");i++}}function p(A){if(f[A]){delete f[A];i--}}function k(){if(i==0||a){a=false;e=false;b.complete();return}else{e=true}var D=-1;for(var F in f){D=F;break}var E=f[D];p(D);var H=b.initialize(D,E);try{var B=new FileReader();var C=E.size;var A=1048576*b.maxfilesize;B.index=D;if(C<A){B.onload=function(I){o(D,E,H)};B.onerror=function(I){s(D,E,b.error_default)};B.onabort=function(I){s(D,E,b.error_default)};B.readAsDataURL(E)}else{s(D,E,b.error_filesize)}}catch(G){s(D,E,G)}}function o(A,C,D){var E=new FormData();for(var B in D){E.append(B,D[B])}E.append(b.paramname,C,C.name);var F=new XMLHttpRequest();F.onreadystatechange=function(){if(F.readyState!=F.DONE){return}var G=null;if(F.responseText){try{G=jQuery.parseJSON(F.responseText)}catch(H){G=F.responseText}}if(F.status<200||F.status>299){var I=F.statusText;if(!F.statusText){if(F.status==0){I=b.error_server}else{I=b.error_default}}s(A,C,I+" ("+F.status+")")}else{v(A,C,G)}};F.upload.index=A;F.upload.file=C;F.upload.addEventListener("progress",j,false);F.open("POST",b.url,true);F.setRequestHeader("Accept","application/json");F.setRequestHeader("Cache-Control","no-cache");F.setRequestHeader("X-Requested-With","XMLHttpRequest");F.send(E)}function v(A,B,C){b.success(A,B,C);k()}function s(A,B,C){b.error(A,B,C);k()}function t(){d("#uploadbox_input").trigger("click")}function r(A){for(A in f){p(A)}}function y(){if(!e){e=true;k()}}function x(){a=true}function l(A){b=d.extend({},b,A);return b}function q(){return window.File&&window.FileReader&&window.FormData&&window.XMLHttpRequest&&window.FileList}return{select:t,remove:p,upload:y,pause:x,reset:r,configure:l,compatible:q}}})(jQuery);
\ No newline at end of file
+(function(c){jQuery.event.props.push("dataTransfer");var h={url:"",paramname:"content",maxfilesize:2048,maxfilenumber:20,dragover:function(){},dragleave:function(){},announce:function(){},initialize:function(){},progress:function(){},success:function(){},error:function(k,l,m){alert(m)},complete:function(){},error_filesize:"File exceeds 2GB. Please use an FTP client.",error_default:"Please make sure the file is available.",error_server:"Upload request failed.",error_toomany:"You can only queue <20 files per upload session.",error_login:"Uploads require you to log in.",error_missing:"No upload content available."};var a={};var e={};var f=0;var j=0;var d=false;var g=false;var b=null;var i=null;c.fn.uploadbox=function(A){a=c.extend({},h,A);b=this;b.append('<input id="uploadbox_input" type="file" style="display: none" multiple>');b.on("drop",o);b.on("dragover",p);b.on("dragleave",x);c("#uploadbox_input").change(function(B){z(B.target.files);c(this).val("")});function o(B){if(!B.dataTransfer){return}z(B.dataTransfer.files);B.preventDefault();return false}function p(B){B.preventDefault();a.dragover.call(B)}function x(B){B.stopPropagation();a.dragleave.call(B)}function k(B){if(B.lengthComputable){a.progress(this.index,this.file,Math.round((B.loaded*100)/B.total))}}function z(D){if(d){return}for(var C=0;C<D.length;C++){if(j>=a.maxfilenumber){break}var B=String(f++);e[B]=D[C];a.announce(B,e[B],"");j++}}function r(B){if(e[B]){delete e[B];j--}}function m(){if(j==0||g){g=false;d=false;a.complete();return}else{d=true}var D=-1;for(var F in e){D=F;break}var E=e[D];r(D);var C=E.size;var B=1048576*a.maxfilesize;if(C<B){var G=a.initialize(D,E);if(G){q(D,E,G)}else{u(D,E,a.error_missing)}}else{u(D,E,a.error_filesize)}}function q(B,D,E){var F=new FormData();for(var C in E){F.append(C,E[C])}if(D.size>0){F.append(a.paramname,D,D.name)}i=new XMLHttpRequest();i.open("POST",a.url,true);i.setRequestHeader("Accept","application/json");i.setRequestHeader("Cache-Control","no-cache");i.setRequestHeader("X-Requested-With","XMLHttpRequest");i.onreadystatechange=function(){if(i.readyState!=i.DONE){return}var G=null;if(i.responseText){try{G=jQuery.parseJSON(i.responseText)}catch(H){G=i.responseText}}if(i.status<200||i.status>299){var I=i.statusText;if(i.status==403){I=a.error_login}else{if(i.status==0){I=a.error_server}else{if(!I){I=a.error_default}}}u(B,D,I+" ("+i.status+")")}else{y(B,D,G)}};i.upload.index=B;i.upload.file=D;i.upload.addEventListener("progress",k,false);i.send(F)}function y(B,C,D){a.success(B,C,D);m()}function u(B,C,D){a.error(B,C,D);m()}function w(){c("#uploadbox_input").trigger("click")}function t(B){for(B in e){r(B)}}function l(){if(!d){d=true;m()}}function v(){g=true}function n(B){a=c.extend({},a,B);return a}function s(){return window.File&&window.FormData&&window.XMLHttpRequest&&window.FileList}return{select:w,add:z,remove:r,start:l,stop:v,reset:t,configure:n,compatible:s}}})(jQuery);
\ No newline at end of file
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
6 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/d816d6602e18/
Changeset: d816d6602e18
Branch: custom-toolbox2
User: BjoernGruening
Date: 2013-06-13 22:46:22
Summary: Implement the ability to change the tool-panel as user prederence.
Affected #: 10 files
diff -r 6f9e73ec3e99d43f14e3d735ecfe7b66ea38472a -r d816d6602e18fd8e04c5d141527ca77a3e39e6cf lib/galaxy/config.py
--- a/lib/galaxy/config.py
+++ b/lib/galaxy/config.py
@@ -6,8 +6,9 @@
import logging, logging.config
import ConfigParser
from datetime import timedelta
-from galaxy.util import string_as_bool, listify, parse_xml
-
+from galaxy.util import string_as_bool
+from galaxy.util import listify
+from galaxy.util import parse_xml
from galaxy import eggs
import pkg_resources
@@ -64,9 +65,14 @@
tcf = kwargs[ 'tool_config_files' ]
else:
tcf = 'tool_conf.xml'
- self.tool_filters = listify( kwargs.get( "tool_filters", [] ) )
- self.tool_label_filters = listify( kwargs.get( "tool_label_filters", [] ) )
- self.tool_section_filters = listify( kwargs.get( "tool_section_filters", [] ) )
+ self.tool_filters = listify( kwargs.get( "tool_filters", [] ), do_strip=True )
+ self.tool_label_filters = listify( kwargs.get( "tool_label_filters", [] ), do_strip=True )
+ self.tool_section_filters = listify( kwargs.get( "tool_section_filters", [] ), do_strip=True )
+
+ self.user_tool_filters = listify( kwargs.get( "user_tool_filters", [] ), do_strip=True )
+ self.user_label_filters = listify( kwargs.get( "user_tool_label_filters", [] ), do_strip=True )
+ self.user_section_filters = listify( kwargs.get( "user_tool_section_filters", [] ), do_strip=True )
+
self.tool_configs = [ resolve_path( p, self.root ) for p in listify( tcf ) ]
self.shed_tool_data_path = kwargs.get( "shed_tool_data_path", None )
if self.shed_tool_data_path:
@@ -459,3 +465,4 @@
sentry_handler = SentryHandler( config.sentry_dsn )
sentry_handler.setLevel( logging.WARN )
root.addHandler( sentry_handler )
+
diff -r 6f9e73ec3e99d43f14e3d735ecfe7b66ea38472a -r d816d6602e18fd8e04c5d141527ca77a3e39e6cf lib/galaxy/model/__init__.py
--- a/lib/galaxy/model/__init__.py
+++ b/lib/galaxy/model/__init__.py
@@ -3267,7 +3267,7 @@
#end of Data Manager Classes
class UserPreference ( object ):
- def __init__( self, name=None, value=None ):
+ def __init__( self, name=None, value=None):
self.name = name
self.value = value
diff -r 6f9e73ec3e99d43f14e3d735ecfe7b66ea38472a -r d816d6602e18fd8e04c5d141527ca77a3e39e6cf lib/galaxy/tools/__init__.py
--- a/lib/galaxy/tools/__init__.py
+++ b/lib/galaxy/tools/__init__.py
@@ -3426,3 +3426,4 @@
class InterruptedUpload( Exception ):
pass
+
diff -r 6f9e73ec3e99d43f14e3d735ecfe7b66ea38472a -r d816d6602e18fd8e04c5d141527ca77a3e39e6cf lib/galaxy/tools/filters/__init__.py
--- a/lib/galaxy/tools/filters/__init__.py
+++ b/lib/galaxy/tools/filters/__init__.py
@@ -1,4 +1,5 @@
-
+from galaxy.util import listify
+from copy import deepcopy
class FilterFactory( object ):
"""
@@ -15,17 +16,29 @@
self.default_filters = dict( tool=[ _not_hidden ], section=[], label=[] )
# Add dynamic filters to these default filters.
config = toolbox.app.config
- self.__init_filters( "tool", config.tool_filters )
- self.__init_filters( "section", config.tool_section_filters )
- self.__init_filters( "label", config.tool_label_filters )
+ self.__init_filters( "tool", config.tool_filters, self.default_filters )
+ self.__init_filters( "section", config.tool_section_filters, self.default_filters )
+ self.__init_filters( "label", config.tool_label_filters, self.default_filters )
def build_filters( self, trans, **kwds ):
"""
Build list of filters to check tools against given current context.
"""
- filters = self.default_filters.copy()
-
- if not trans.user:
+ filters = deepcopy( self.default_filters )
+ if trans.user:
+ for name, value in trans.user.preferences.items():
+ if value.strip():
+ user_filters = listify( value, do_strip=True )
+ category = ''
+ if name == 'toolbox_tool_filters':
+ category = "tool"
+ elif name == 'toolbox_section_filters':
+ category = "section"
+ elif name == 'toolbox_label_filters':
+ category = "label"
+ if category:
+ self.__init_filters( category, user_filters, filters )
+ else:
filters[ "tool" ].append( _requires_login )
if kwds.get( "trackster", False ):
@@ -33,10 +46,11 @@
return filters
- def __init_filters( self, key, filters ):
+ def __init_filters( self, key, filters, toolbox_filters ):
for filter in filters:
filter_function = self.__build_filter_function( filter )
- self.default_filters[ key ].append( filter_function )
+ toolbox_filters[ key ].append( filter_function )
+ return toolbox_filters
def __build_filter_function( self, filter_name ):
"""Obtain python function (importing a submodule if needed)
diff -r 6f9e73ec3e99d43f14e3d735ecfe7b66ea38472a -r d816d6602e18fd8e04c5d141527ca77a3e39e6cf lib/galaxy/tools/filters/examples.py
--- /dev/null
+++ b/lib/galaxy/tools/filters/examples.py
@@ -0,0 +1,115 @@
+import logging
+log = logging.getLogger( __name__ )
+
+
+def restrict_encode( content, tool ):
+ """
+ Disable the random interval ENCODE tool
+
+ This tool filter will disable all the ENCODE tool when enabled.
+ """
+ if tool.id == 'random_intervals1':
+ return False
+ return True
+
+
+def restrict_text( content, section ):
+ """
+ Disable Text sections
+
+ This tool filter will disable all Tools groups under a 'Text' section when enabled.
+ """
+ if section.name.find('Text') != -1:
+ return False
+ return True
+
+
+def restrict_upload_to_admins( context, tool ):
+ """
+ Disable Upload tool for all non-admin users.
+
+ This tool filter will hide the upload tool from all users except admin
+ users.
+
+ This can be enabled by renaming this file to examples.py and adding
+ the following to the ``app:main`` section of ``universe_wsgi.ini``:
+
+ tool_filters = examples:restrict_upload_to_admins
+ """
+ if tool.name == "Upload File":
+ return context.trans.user_is_admin()
+ return True
+
+
+def disable_gatk( context, tool ):
+ """
+ This tool filter will disable all gatk tools when enabled.
+
+ This can be enabled by renaming this file to examples.py and adding the following to the
+ ``app:main`` section of ``universe_wsgi.ini``:
+
+ tool_filters = examples:disable_gatk
+ """
+ return not any( [ requirement.name == "gatk" for requirement in tool.requirements ] )
+
+
+def explicit_user_mapping( context, section ):
+ """
+ This tool section filter uses an explicit mapping to describe what users can view
+ which tool sections. Anonymous users will only be able to view the "Get Data"
+ tool section (with id getext). This can be enabled by renaming this file to
+ examples.py and adding the following to the ``app:main`` section of
+ ``universe_wsgi.ini``:
+
+ tool_section_filters = examples:explicit_user_mapping
+ """
+ users_sections = {
+ None: [ "getext" ],
+ "bob(a)example.com": [ "getext", "textutil", "filter" ],
+ "mary(a)example.com": [ "getext", "textutil", "filter", "ngs" ],
+ }
+ user = context.trans.user
+ email = user and user.email
+ valid_sections = users_sections.get( email, [] )
+ return section.id in valid_sections
+
+
+DEVELOPERS = [ "mary(a)example.com" ]
+
+
+def restrict_development_tools( context, tool ):
+ """
+ This tool filter will disable all tools with the string alpha appearing in
+ the version for all users except those explicitly appearing in the DEVELOPERS list
+ defined above. This can be enabled by renaming this file to examples.py and
+ adding the following to the ``app:main`` section of ``universe_wsgi.ini``:
+
+ tool_filters = examples:restrict_development_tools
+ """
+ version = tool.version
+ user = context.trans.user
+ email = user and user.email
+ return "alpha" not in version or email in DEVELOPERS
+
+
+def per_host_tool_sections( context, section ):
+ """
+ This tool section filter results in different sections being display based on
+ the URL the user is making the request to. This could allow a single Galaxy instance
+ to seem like several different instances hosting different tools based on the URL used
+ to access the Galxy. This can be enabled by renaming this file to examples.py and adding
+ the following to the ``app:main`` section of ``universe_wsgi.ini``:
+
+ tool_section_filters = examples:per_host_tool_sections
+ """
+ host = context.trans.request.host
+ # Core tools used by all virtual hosts.
+ valid_sections = [ "getext", "textutil", "filter" ]
+ if "ngs.galaxy.example.com" in host:
+ valid_sections += [ "ngs" ]
+ elif "microarray.galaxy.example.com" in host:
+ valid_sections += [ "microarray" ]
+ elif "proteomics.galaxy.example.com" in host:
+ valid_sections += [ "proteomics" ]
+ return section.id in valid_sections
+
diff -r 6f9e73ec3e99d43f14e3d735ecfe7b66ea38472a -r d816d6602e18fd8e04c5d141527ca77a3e39e6cf lib/galaxy/util/__init__.py
--- a/lib/galaxy/util/__init__.py
+++ b/lib/galaxy/util/__init__.py
@@ -496,7 +496,7 @@
else:
return False
-def listify( item ):
+def listify( item, do_strip=False ):
"""
Make a single item a single item list, or return a list if passed a
list. Passing a None returns an empty list.
@@ -506,7 +506,10 @@
elif isinstance( item, list ):
return item
elif isinstance( item, basestring ) and item.count( ',' ):
- return item.split( ',' )
+ if do_strip:
+ return [token.strip() for token in item.split( ',' )]
+ else:
+ return item.split( ',' )
else:
return [ item ]
@@ -746,6 +749,32 @@
current_group,
e ) )
+def docstring_trim(docstring):
+ """Trimming python doc strings. Taken from: http://www.python.org/dev/peps/pep-0257/"""
+ if not docstring:
+ return ''
+ # Convert tabs to spaces (following the normal Python rules)
+ # and split into a list of lines:
+ lines = docstring.expandtabs().splitlines()
+ # Determine minimum indentation (first line doesn't count):
+ indent = sys.maxint
+ for line in lines[1:]:
+ stripped = line.lstrip()
+ if stripped:
+ indent = min(indent, len(line) - len(stripped))
+ # Remove indentation (first line is special):
+ trimmed = [lines[0].strip()]
+ if indent < sys.maxint:
+ for line in lines[1:]:
+ trimmed.append(line[indent:].rstrip())
+ # Strip off trailing and leading blank lines:
+ while trimmed and not trimmed[-1]:
+ trimmed.pop()
+ while trimmed and not trimmed[0]:
+ trimmed.pop(0)
+ # Return a single string:
+ return '\n'.join(trimmed)
+
def nice_size(size):
"""
Returns a readably formatted string with the size
diff -r 6f9e73ec3e99d43f14e3d735ecfe7b66ea38472a -r d816d6602e18fd8e04c5d141527ca77a3e39e6cf lib/galaxy/webapps/galaxy/controllers/user.py
--- a/lib/galaxy/webapps/galaxy/controllers/user.py
+++ b/lib/galaxy/webapps/galaxy/controllers/user.py
@@ -9,14 +9,24 @@
import string
import random
from galaxy import web
-from galaxy import util, model
+from galaxy import util
+from galaxy import model
from galaxy.model.orm import and_
-from galaxy.security.validate_user_input import validate_email, validate_publicname, validate_password, transform_publicname
-from galaxy.util.json import from_json_string, to_json_string
+from galaxy.security.validate_user_input import validate_email
+from galaxy.security.validate_user_input import validate_publicname
+from galaxy.security.validate_user_input import validate_password
+from galaxy.security.validate_user_input import transform_publicname
+from galaxy.util.json import from_json_string
+from galaxy.util.json import to_json_string
+from galaxy.util import listify
+from galaxy.util import docstring_trim
from galaxy.web import url_for
-from galaxy.web.base.controller import BaseUIController, UsesFormDefinitionsMixin
-from galaxy.web.form_builder import CheckboxField, build_select_field
+from galaxy.web.base.controller import BaseUIController
+from galaxy.web.base.controller import UsesFormDefinitionsMixin
+from galaxy.web.form_builder import CheckboxField
+from galaxy.web.form_builder import build_select_field
from galaxy.web.framework.helpers import time_ago, grids
+from galaxy.web.framework.helpers import grids
log = logging.getLogger( __name__ )
@@ -101,6 +111,7 @@
use_panels=use_panels,
message=message,
status='error' ) )
+
@web.expose
def openid_process( self, trans, **kwd ):
'''Handle's response from OpenID Providers'''
@@ -221,6 +232,7 @@
redirect=redirect,
message=message,
status=status ) )
+
@web.expose
def openid_associate( self, trans, cntrller='user', **kwd ):
'''Associates a user with an OpenID log in'''
@@ -359,6 +371,7 @@
user_type_form_definition=user_type_form_definition,
widgets=widgets,
openids=openids )
+
@web.expose
@web.require_login( 'manage OpenIDs' )
def openid_disassociate( self, trans, **kwd ):
@@ -465,6 +478,7 @@
openid_providers=trans.app.openid_providers,
form_input_auto_focus=True,
active_view="user" )
+
def __validate_login( self, trans, **kwd ):
message = kwd.get( 'message', '' )
status = kwd.get( 'status', 'done' )
@@ -662,6 +676,7 @@
message = 'Now logged in as %s.<br><a target="_top" href="%s">Return to the home page.</a>' % ( user.email, url_for( '/' ) )
success = True
return ( message, status, user, success )
+
def __get_user_type_form_definition( self, trans, user=None, **kwd ):
params = util.Params( kwd )
if user and user.values:
@@ -673,6 +688,7 @@
else:
user_type_form_definition = None
return user_type_form_definition
+
def __get_widgets( self, trans, user_type_form_definition, user=None, **kwd ):
widgets = []
if user_type_form_definition:
@@ -774,6 +790,7 @@
username=user.username,
message=message,
status=status )
+
@web.expose
def edit_info( self, trans, cntrller, **kwd ):
params = util.Params( kwd )
@@ -882,6 +899,7 @@
action='manage_user_info',
cntrller=cntrller,
**kwd ) )
+
@web.expose
def reset_password( self, trans, email=None, **kwd ):
if trans.app.config.smtp_server is None:
@@ -929,6 +947,7 @@
return trans.fill_template( '/user/reset_password.mako',
message=message,
status=status )
+
def __validate( self, trans, params, email, password, confirm, username ):
# If coming from the tool shed webapp, we'll require a public user name
if trans.webapp.name == 'tool_shed':
@@ -950,6 +969,7 @@
if user_type_fd_id in [ 'none' ]:
return "Select the user's type and information"
return message
+
@web.expose
def set_default_permissions( self, trans, cntrller, **kwd ):
"""Sets the user's default permissions for the new histories"""
@@ -975,7 +995,128 @@
status=status )
else:
# User not logged in, history group must be only public
- return trans.show_error_message( "You must be logged in to change your default permitted actions." )
+ return trans.show_error_message( "You must be logged in to change your default permitted actions." )
+
+ @web.expose
+ @web.require_login()
+ def toolbox_filters( self, trans, cntrller, **kwd ):
+ """
+ Sets the user's default filters for the toolbox.
+ Toolbox filters are specified in universe_wsgi.ini.
+ The user can activate them and the choice is stored in user_preferences.
+ """
+
+ def get_filter_mapping( db_filters, config_filters ):
+ """
+ Compare the allowed filters from the universe_wsgi.ini config file with the previously saved or default filters from the database.
+ We need that to toogle the checkboxes for the formular in the right way.
+ Furthermore we extract all information associated to a filter to display them in the formular.
+ """
+ filters = list()
+ for filter_name in config_filters:
+ if ":" in filter_name:
+ # Should be a submodule of filters (e.g. examples:restrict_development_tools)
+ (module_name, function_name) = filter_name.rsplit(":", 1)
+ module_name = 'galaxy.tools.filters.%s' % module_name.strip()
+ module = __import__( module_name, globals(), fromlist=['temp_module'] )
+ function = getattr( module, function_name.strip() )
+ else:
+ # No module found it has to be explicitly imported.
+ module = __import__( 'galaxy.tools.filters', globals(), fromlist=['temp_module'] )
+ function = getattr( globals(), filter_name.strip() )
+
+ doc_string = docstring_trim( function.__doc__ )
+ split = doc_string.split('\n\n')
+ if split:
+ sdesc = split[0]
+ else:
+ log.error( 'No description specified in the __doc__ string for %s.' % filter_name )
+ if len(split) > 1:
+ description = split[1]
+ else:
+ description = ''
+
+ if filter_name in db_filters:
+ filters.append( dict( filterpath=filter_name, short_desc=sdesc, desc=description, checked=True ) )
+ else:
+ filters.append( dict( filterpath=filter_name, short_desc=sdesc, desc=description, checked=False ) )
+ return filters
+
+ params = util.Params( kwd )
+ message = util.restore_text( params.get( 'message', '' ) )
+ status = params.get( 'status', 'done' )
+
+ user_id = params.get( 'user_id', False )
+ if user_id:
+ user = trans.sa_session.query( trans.app.model.User ).get( trans.security.decode_id( user_id ) )
+ else:
+ user = trans.user
+
+ if user:
+ saved_user_tool_filters = list()
+ saved_user_section_filters = list()
+ saved_user_label_filters = list()
+
+ for name, value in user.preferences.items():
+ if name == 'toolbox_tool_filters':
+ saved_user_tool_filters = listify( value, do_strip=True )
+ elif name == 'toolbox_section_filters':
+ saved_user_section_filters = listify( value, do_strip=True )
+ elif name == 'toolbox_label_filters':
+ saved_user_label_filters = listify( value, do_strip=True )
+
+ tool_filters = get_filter_mapping( saved_user_tool_filters, trans.app.config.user_tool_filters )
+ section_filters = get_filter_mapping( saved_user_section_filters, trans.app.config.user_section_filters )
+ label_filters = get_filter_mapping( saved_user_label_filters, trans.app.config.user_label_filters )
+
+ return trans.fill_template( 'user/toolbox_filters.mako',
+ cntrller=cntrller,
+ message=message,
+ tool_filters=tool_filters,
+ section_filters=section_filters,
+ label_filters=label_filters,
+ user=user,
+ status=status )
+ else:
+ # User not logged in, history group must be only public
+ return trans.show_error_message( "You must be logged in to change private toolbox filters." )
+
+ @web.expose
+ @web.require_login( "to change the private toolbox filters" )
+ def edit_toolbox_filters( self, trans, cntrller, **kwd ):
+ params = util.Params( kwd )
+ message = util.restore_text( params.get( 'message', '' ) )
+ user_id = params.get( 'user_id', False )
+ if not user_id:
+ # User must be logged in to create a new address
+ return trans.show_error_message( "You must be logged in to change the ToolBox filters." )
+
+ user = trans.sa_session.query( trans.app.model.User ).get( trans.security.decode_id( user_id ) )
+
+ if params.get( 'edit_toolbox_filter_button', False ):
+ tool_filters = list()
+ section_filters = list()
+ label_filters = list()
+ for name, state in params.flatten():
+ if state == 'on':
+ if name.startswith('t_'):
+ tool_filters.append( name[2:] )
+ elif name.startswith('l_'):
+ label_filters.append( name[2:] )
+ elif name.startswith('s_'):
+ section_filters.append( name[2:] )
+ user.preferences['toolbox_tool_filters'] = ','.join( tool_filters )
+ user.preferences['toolbox_section_filters'] = ','.join( section_filters )
+ user.preferences['toolbox_label_filters'] = ','.join( label_filters )
+
+ trans.sa_session.add( user )
+ trans.sa_session.flush()
+ message = 'ToolBox filters has been updated.'
+ kwd = dict( message=message, status='done' )
+
+ # Display the ToolBox filters form with the current values filled in
+ return self.toolbox_filters( trans, cntrller, **kwd )
+
@web.expose
@web.require_login( "to get most recently used tool" )
@web.json_pretty
@@ -998,7 +1139,8 @@
"minsizehint" : tool.uihints.get( 'minwidth', -1 ),
"description" : tool.description
}
- return tool_info
+ return tool_info
+
@web.expose
def manage_addresses(self, trans, **kwd):
if trans.user:
@@ -1020,6 +1162,7 @@
else:
# User not logged in, history group must be only public
return trans.show_error_message( "You must be logged in to change your default permitted actions." )
+
@web.expose
def new_address( self, trans, cntrller, **kwd ):
params = util.Params( kwd )
@@ -1104,6 +1247,7 @@
phone=phone,
message=message,
status=status )
+
@web.expose
def edit_address( self, trans, cntrller, **kwd ):
params = util.Params( kwd )
@@ -1118,7 +1262,7 @@
address_id = params.get( 'address_id', None )
if not address_id:
return trans.show_error_message( "No address id received for editing." )
- address_obj = trans.sa_session.query( trans.app.model.UserAddress ).get( trans.security.decode_id( address_id ) )
+ address_obj = trans.sa_session.query( trans.app.model.UserAddress ).get( trans.security.decode_id( address_id ) )
if params.get( 'edit_address_button', False ):
short_desc = util.restore_text( params.get( 'short_desc', '' ) )
name = util.restore_text( params.get( 'name', '' ) )
@@ -1181,6 +1325,7 @@
address_obj=address_obj,
message=message,
status=status )
+
@web.expose
def delete_address( self, trans, cntrller, address_id=None, user_id=None ):
try:
@@ -1201,6 +1346,7 @@
user_id=user_id,
message=message,
status=status ) )
+
@web.expose
def undelete_address( self, trans, cntrller, address_id=None, user_id=None ):
try:
@@ -1220,6 +1366,7 @@
user_id=user_id,
message=message,
status=status ) )
+
@web.expose
def set_user_pref_async( self, trans, pref_name, pref_value ):
""" Set a user preference asynchronously. If user is not logged in, do nothing. """
@@ -1232,7 +1379,7 @@
""" Log a user action asynchronously. If user is not logged in, do nothing. """
if trans.user:
trans.log_action( trans.get_user(), action, context, params )
-
+
@web.expose
@web.require_login()
def dbkeys( self, trans, **kwds ):
diff -r 6f9e73ec3e99d43f14e3d735ecfe7b66ea38472a -r d816d6602e18fd8e04c5d141527ca77a3e39e6cf templates/user/index.mako
--- a/templates/user/index.mako
+++ b/templates/user/index.mako
@@ -13,6 +13,7 @@
<li><a href="${h.url_for( controller='user', action='manage_user_info', cntrller=cntrller )}">${_('Manage your information')}</a></li><li><a href="${h.url_for( controller='user', action='set_default_permissions', cntrller=cntrller )}">${_('Change default permissions')}</a> for new histories</li><li><a href="${h.url_for( controller='user', action='api_keys', cntrller=cntrller )}">${_('Manage your API keys')}</a></li>
+ <li><a href="${h.url_for( controller='user', action='toolbox_filters', cntrller=cntrller )}">${_('Manage your ToolBox filters')}</a></li>
%if trans.app.config.enable_openid:
<li><a href="${h.url_for( controller='user', action='openid_manage', cntrller=cntrller )}">${_('Manage OpenIDs')}</a> linked to your account</li>
%endif
diff -r 6f9e73ec3e99d43f14e3d735ecfe7b66ea38472a -r d816d6602e18fd8e04c5d141527ca77a3e39e6cf templates/user/toolbox_filters.mako
--- /dev/null
+++ b/templates/user/toolbox_filters.mako
@@ -0,0 +1,91 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+
+%if message:
+ ${render_msg( message, status )}
+%endif
+</br>
+</br>
+
+<ul class="manage-table-actions">
+ <li>
+ <a class="action-button" href="${h.url_for( controller='user', action='index', cntrller=cntrller )}">User preferences</a>
+ </li>
+</ul>
+
+%if tool_filters or section_filters or label_filters:
+ <div class="toolForm">
+ <form name="toolbox_filter" id="toolbox_filter" action="${h.url_for( controller='user', action='edit_toolbox_filters', cntrller=cntrller, user_id=trans.security.encode_id( user.id ) )}" method="post" >
+ % if tool_filters:
+ <div class="toolFormTitle">Edit ToolBox filters :: Tools</div>
+ <div class="toolFormBody">
+ % for filter in tool_filters:
+ <div class="form-row">
+ <div style="float: left; width: 40px; margin-right: 10px;">
+ % if filter['checked']:
+ <input type="checkbox" name="t_${filter['filterpath']}" checked="checked">
+ % else:
+ <input type="checkbox" name="t_${filter['filterpath']}">
+ % endif
+ </div>
+ <div style="float: left; margin-right: 10px;">
+ ${filter['short_desc']}
+ <div class="toolParamHelp" style="clear: both;">${filter['desc']}</div>
+ </div>
+ <div style="clear: both"></div>
+ </div>
+ % endfor
+ </div>
+ % endif
+
+ % if section_filters:
+ <div class="toolFormTitle">Edit ToolBox filters :: Sections</div>
+ <div class="toolFormBody">
+ % for filter in section_filters:
+ <div class="form-row">
+ <div style="float: left; width: 40px; margin-right: 10px;">
+ % if filter['checked']:
+ <input type="checkbox" name="s_${filter['filterpath']}" checked="checked">
+ % else:
+ <input type="checkbox" name="s_${filter['filterpath']}">
+ % endif
+ </div>
+ <div style="float: left; margin-right: 10px;">
+ ${filter['short_desc']}
+ <div class="toolParamHelp" style="clear: both;">${filter['desc']}</div>
+ </div>
+ <div style="clear: both"></div>
+ </div>
+ % endfor
+ </div>
+ % endif
+
+ % if label_filters:
+ <div class="toolFormTitle">Edit ToolBox filters :: Labels</div>
+ <div class="toolFormBody">
+ % for filter in label_filters:
+ <div class="form-row">
+ <div style="float: left; width: 40px; margin-right: 10px;">
+ % if filter['checked']:
+ <input type="checkbox" name="l_${filter['filterpath']}" checked="checked">
+ % else:
+ <input type="checkbox" name="l_${filter['filterpath']}">
+ % endif
+ </div>
+ <div style="float: left; margin-right: 10px;">
+ ${filter['short_desc']}
+ <div class="toolParamHelp" style="clear: both;">${filter['desc']}</div>
+ </div>
+ <div style="clear: both"></div>
+ </div>
+ % endfor
+ </div>
+ % endif
+ <div class="form-row">
+ <input type="submit" name="edit_toolbox_filter_button" value="Save changes">
+ </div>
+ </form>
+ </div>
+%else:
+ ${render_msg( 'No filter available. Contact you system administrator or check your configuration file.', 'info' )}
+%endif
diff -r 6f9e73ec3e99d43f14e3d735ecfe7b66ea38472a -r d816d6602e18fd8e04c5d141527ca77a3e39e6cf universe_wsgi.ini.sample
--- a/universe_wsgi.ini.sample
+++ b/universe_wsgi.ini.sample
@@ -682,6 +682,18 @@
# if running many handlers.
#cache_user_job_count = False
+# ToolBox filtering
+# Modules from lib/galaxy/tools/filters/ can be specified in the following lines.
+# tool_* filters will be applied for all users and can not be changed by them.
+# user_tool_* filters will be shown under user preferences and can be toogled
+# on and off by runtime
+#tool_filters
+#tool_label_filters
+#tool_section_filters
+#user_tool_filters = examples:restrict_upload_to_admins, examples:restrict_encode
+#user_tool_section_filters = examples:restrict_text
+#user_tool_label_filters = examples:restrict_upload_to_admins, examples:restrict_encode
+
# ---- Galaxy Message Queue -------------------------------------------------
# Galaxy uses AMQ protocol to receive messages from external sources like
https://bitbucket.org/galaxy/galaxy-central/commits/fb1b676f51b2/
Changeset: fb1b676f51b2
User: dannon
Date: 2013-10-17 19:40:04
Summary: Merged custom-toolbox2
Affected #: 10 files
diff -r f1d77cc9136794e0272f2ed9b1b33f976f3a0f10 -r fb1b676f51b224a4f3b830402d79652850dd9261 lib/galaxy/config.py
--- a/lib/galaxy/config.py
+++ b/lib/galaxy/config.py
@@ -6,8 +6,9 @@
import logging, logging.config
import ConfigParser
from datetime import timedelta
-from galaxy.util import string_as_bool, listify, parse_xml
-
+from galaxy.util import string_as_bool
+from galaxy.util import listify
+from galaxy.util import parse_xml
from galaxy import eggs
import pkg_resources
@@ -64,9 +65,14 @@
tcf = kwargs[ 'tool_config_files' ]
else:
tcf = 'tool_conf.xml,shed_tool_conf.xml'
- self.tool_filters = listify( kwargs.get( "tool_filters", [] ) )
- self.tool_label_filters = listify( kwargs.get( "tool_label_filters", [] ) )
- self.tool_section_filters = listify( kwargs.get( "tool_section_filters", [] ) )
+ self.tool_filters = listify( kwargs.get( "tool_filters", [] ), do_strip=True )
+ self.tool_label_filters = listify( kwargs.get( "tool_label_filters", [] ), do_strip=True )
+ self.tool_section_filters = listify( kwargs.get( "tool_section_filters", [] ), do_strip=True )
+
+ self.user_tool_filters = listify( kwargs.get( "user_tool_filters", [] ), do_strip=True )
+ self.user_label_filters = listify( kwargs.get( "user_tool_label_filters", [] ), do_strip=True )
+ self.user_section_filters = listify( kwargs.get( "user_tool_section_filters", [] ), do_strip=True )
+
self.tool_configs = [ resolve_path( p, self.root ) for p in listify( tcf ) ]
self.shed_tool_data_path = kwargs.get( "shed_tool_data_path", None )
if self.shed_tool_data_path:
@@ -489,3 +495,4 @@
sentry_handler = SentryHandler( config.sentry_dsn )
sentry_handler.setLevel( logging.WARN )
root.addHandler( sentry_handler )
+
diff -r f1d77cc9136794e0272f2ed9b1b33f976f3a0f10 -r fb1b676f51b224a4f3b830402d79652850dd9261 lib/galaxy/model/__init__.py
--- a/lib/galaxy/model/__init__.py
+++ b/lib/galaxy/model/__init__.py
@@ -3359,7 +3359,7 @@
#end of Data Manager Classes
class UserPreference ( object ):
- def __init__( self, name=None, value=None ):
+ def __init__( self, name=None, value=None):
self.name = name
self.value = value
diff -r f1d77cc9136794e0272f2ed9b1b33f976f3a0f10 -r fb1b676f51b224a4f3b830402d79652850dd9261 lib/galaxy/tools/__init__.py
--- a/lib/galaxy/tools/__init__.py
+++ b/lib/galaxy/tools/__init__.py
@@ -3433,3 +3433,4 @@
class InterruptedUpload( Exception ):
pass
+
diff -r f1d77cc9136794e0272f2ed9b1b33f976f3a0f10 -r fb1b676f51b224a4f3b830402d79652850dd9261 lib/galaxy/tools/filters/__init__.py
--- a/lib/galaxy/tools/filters/__init__.py
+++ b/lib/galaxy/tools/filters/__init__.py
@@ -1,4 +1,5 @@
-
+from galaxy.util import listify
+from copy import deepcopy
class FilterFactory( object ):
"""
@@ -15,25 +16,39 @@
self.default_filters = dict( tool=[ _not_hidden, _handle_requires_login ], section=[], label=[] )
# Add dynamic filters to these default filters.
config = toolbox.app.config
- self.__init_filters( "tool", config.tool_filters )
- self.__init_filters( "section", config.tool_section_filters )
- self.__init_filters( "label", config.tool_label_filters )
+ self.__init_filters( "tool", config.tool_filters, self.default_filters )
+ self.__init_filters( "section", config.tool_section_filters, self.default_filters )
+ self.__init_filters( "label", config.tool_label_filters, self.default_filters )
def build_filters( self, trans, **kwds ):
"""
Build list of filters to check tools against given current context.
"""
- filters = self.default_filters.copy()
-
+ filters = deepcopy( self.default_filters )
+ if trans.user:
+ for name, value in trans.user.preferences.items():
+ if value.strip():
+ user_filters = listify( value, do_strip=True )
+ category = ''
+ if name == 'toolbox_tool_filters':
+ category = "tool"
+ elif name == 'toolbox_section_filters':
+ category = "section"
+ elif name == 'toolbox_label_filters':
+ category = "label"
+ if category:
+ self.__init_filters( category, user_filters, filters )
+ else:
if kwds.get( "trackster", False ):
filters[ "tool" ].append( _has_trackster_conf )
return filters
- def __init_filters( self, key, filters ):
+ def __init_filters( self, key, filters, toolbox_filters ):
for filter in filters:
filter_function = self.__build_filter_function( filter )
- self.default_filters[ key ].append( filter_function )
+ toolbox_filters[ key ].append( filter_function )
+ return toolbox_filters
def __build_filter_function( self, filter_name ):
"""Obtain python function (importing a submodule if needed)
diff -r f1d77cc9136794e0272f2ed9b1b33f976f3a0f10 -r fb1b676f51b224a4f3b830402d79652850dd9261 lib/galaxy/tools/filters/examples.py
--- /dev/null
+++ b/lib/galaxy/tools/filters/examples.py
@@ -0,0 +1,115 @@
+import logging
+log = logging.getLogger( __name__ )
+
+
+def restrict_encode( content, tool ):
+ """
+ Disable the random interval ENCODE tool
+
+ This tool filter will disable all the ENCODE tool when enabled.
+ """
+ if tool.id == 'random_intervals1':
+ return False
+ return True
+
+
+def restrict_text( content, section ):
+ """
+ Disable Text sections
+
+ This tool filter will disable all Tools groups under a 'Text' section when enabled.
+ """
+ if section.name.find('Text') != -1:
+ return False
+ return True
+
+
+def restrict_upload_to_admins( context, tool ):
+ """
+ Disable Upload tool for all non-admin users.
+
+ This tool filter will hide the upload tool from all users except admin
+ users.
+
+ This can be enabled by renaming this file to examples.py and adding
+ the following to the ``app:main`` section of ``universe_wsgi.ini``:
+
+ tool_filters = examples:restrict_upload_to_admins
+ """
+ if tool.name == "Upload File":
+ return context.trans.user_is_admin()
+ return True
+
+
+def disable_gatk( context, tool ):
+ """
+ This tool filter will disable all gatk tools when enabled.
+
+ This can be enabled by renaming this file to examples.py and adding the following to the
+ ``app:main`` section of ``universe_wsgi.ini``:
+
+ tool_filters = examples:disable_gatk
+ """
+ return not any( [ requirement.name == "gatk" for requirement in tool.requirements ] )
+
+
+def explicit_user_mapping( context, section ):
+ """
+ This tool section filter uses an explicit mapping to describe what users can view
+ which tool sections. Anonymous users will only be able to view the "Get Data"
+ tool section (with id getext). This can be enabled by renaming this file to
+ examples.py and adding the following to the ``app:main`` section of
+ ``universe_wsgi.ini``:
+
+ tool_section_filters = examples:explicit_user_mapping
+ """
+ users_sections = {
+ None: [ "getext" ],
+ "bob(a)example.com": [ "getext", "textutil", "filter" ],
+ "mary(a)example.com": [ "getext", "textutil", "filter", "ngs" ],
+ }
+ user = context.trans.user
+ email = user and user.email
+ valid_sections = users_sections.get( email, [] )
+ return section.id in valid_sections
+
+
+DEVELOPERS = [ "mary(a)example.com" ]
+
+
+def restrict_development_tools( context, tool ):
+ """
+ This tool filter will disable all tools with the string alpha appearing in
+ the version for all users except those explicitly appearing in the DEVELOPERS list
+ defined above. This can be enabled by renaming this file to examples.py and
+ adding the following to the ``app:main`` section of ``universe_wsgi.ini``:
+
+ tool_filters = examples:restrict_development_tools
+ """
+ version = tool.version
+ user = context.trans.user
+ email = user and user.email
+ return "alpha" not in version or email in DEVELOPERS
+
+
+def per_host_tool_sections( context, section ):
+ """
+ This tool section filter results in different sections being display based on
+ the URL the user is making the request to. This could allow a single Galaxy instance
+ to seem like several different instances hosting different tools based on the URL used
+ to access the Galxy. This can be enabled by renaming this file to examples.py and adding
+ the following to the ``app:main`` section of ``universe_wsgi.ini``:
+
+ tool_section_filters = examples:per_host_tool_sections
+ """
+ host = context.trans.request.host
+ # Core tools used by all virtual hosts.
+ valid_sections = [ "getext", "textutil", "filter" ]
+ if "ngs.galaxy.example.com" in host:
+ valid_sections += [ "ngs" ]
+ elif "microarray.galaxy.example.com" in host:
+ valid_sections += [ "microarray" ]
+ elif "proteomics.galaxy.example.com" in host:
+ valid_sections += [ "proteomics" ]
+ return section.id in valid_sections
+
diff -r f1d77cc9136794e0272f2ed9b1b33f976f3a0f10 -r fb1b676f51b224a4f3b830402d79652850dd9261 lib/galaxy/util/__init__.py
--- a/lib/galaxy/util/__init__.py
+++ b/lib/galaxy/util/__init__.py
@@ -504,7 +504,7 @@
else:
return False
-def listify( item ):
+def listify( item, do_strip=False ):
"""
Make a single item a single item list, or return a list if passed a
list. Passing a None returns an empty list.
@@ -514,7 +514,10 @@
elif isinstance( item, list ):
return item
elif isinstance( item, basestring ) and item.count( ',' ):
- return item.split( ',' )
+ if do_strip:
+ return [token.strip() for token in item.split( ',' )]
+ else:
+ return item.split( ',' )
else:
return [ item ]
@@ -778,6 +781,32 @@
current_group,
e ) )
+def docstring_trim(docstring):
+ """Trimming python doc strings. Taken from: http://www.python.org/dev/peps/pep-0257/"""
+ if not docstring:
+ return ''
+ # Convert tabs to spaces (following the normal Python rules)
+ # and split into a list of lines:
+ lines = docstring.expandtabs().splitlines()
+ # Determine minimum indentation (first line doesn't count):
+ indent = sys.maxint
+ for line in lines[1:]:
+ stripped = line.lstrip()
+ if stripped:
+ indent = min(indent, len(line) - len(stripped))
+ # Remove indentation (first line is special):
+ trimmed = [lines[0].strip()]
+ if indent < sys.maxint:
+ for line in lines[1:]:
+ trimmed.append(line[indent:].rstrip())
+ # Strip off trailing and leading blank lines:
+ while trimmed and not trimmed[-1]:
+ trimmed.pop()
+ while trimmed and not trimmed[0]:
+ trimmed.pop(0)
+ # Return a single string:
+ return '\n'.join(trimmed)
+
def nice_size(size):
"""
Returns a readably formatted string with the size
diff -r f1d77cc9136794e0272f2ed9b1b33f976f3a0f10 -r fb1b676f51b224a4f3b830402d79652850dd9261 lib/galaxy/webapps/galaxy/controllers/user.py
--- a/lib/galaxy/webapps/galaxy/controllers/user.py
+++ b/lib/galaxy/webapps/galaxy/controllers/user.py
@@ -10,15 +10,25 @@
import random
import urllib
from galaxy import web
-from galaxy import util, model
+from galaxy import util
+from galaxy import model
from galaxy.model.orm import and_
-from galaxy.security.validate_user_input import validate_email, validate_publicname, validate_password, transform_publicname
-from galaxy.util.json import from_json_string, to_json_string
+from galaxy.security.validate_user_input import validate_email
+from galaxy.security.validate_user_input import validate_publicname
+from galaxy.security.validate_user_input import validate_password
+from galaxy.security.validate_user_input import transform_publicname
+from galaxy.util.json import from_json_string
+from galaxy.util.json import to_json_string
+from galaxy.util import listify
+from galaxy.util import docstring_trim
from galaxy.web import url_for
-from galaxy.web.base.controller import BaseUIController, UsesFormDefinitionsMixin
-from galaxy.web.form_builder import CheckboxField, build_select_field
+from galaxy.web.base.controller import BaseUIController
+from galaxy.web.base.controller import UsesFormDefinitionsMixin
+from galaxy.web.form_builder import CheckboxField
+from galaxy.web.form_builder import build_select_field
from galaxy.web.framework.helpers import time_ago, grids
from datetime import datetime, timedelta
+from galaxy.web.framework.helpers import grids
from galaxy.util import hash_util
log = logging.getLogger( __name__ )
@@ -104,6 +114,7 @@
use_panels=use_panels,
message=message,
status='error' ) )
+
@web.expose
def openid_process( self, trans, **kwd ):
'''Handle's response from OpenID Providers'''
@@ -231,6 +242,7 @@
redirect=redirect,
message=message,
status=status ) )
+
@web.expose
def openid_associate( self, trans, cntrller='user', **kwd ):
'''Associates a user with an OpenID log in'''
@@ -369,6 +381,7 @@
user_type_form_definition=user_type_form_definition,
widgets=widgets,
openids=openids )
+
@web.expose
@web.require_login( 'manage OpenIDs' )
def openid_disassociate( self, trans, **kwd ):
@@ -849,6 +862,7 @@
else:
user_type_form_definition = None
return user_type_form_definition
+
def __get_widgets( self, trans, user_type_form_definition, user=None, **kwd ):
widgets = []
if user_type_form_definition:
@@ -950,6 +964,7 @@
username=user.username,
message=message,
status=status )
+
@web.expose
def edit_info( self, trans, cntrller, **kwd ):
params = util.Params( kwd )
@@ -1058,6 +1073,7 @@
action='manage_user_info',
cntrller=cntrller,
**kwd ) )
+
@web.expose
def reset_password( self, trans, email=None, **kwd ):
if trans.app.config.smtp_server is None:
@@ -1105,6 +1121,7 @@
return trans.fill_template( '/user/reset_password.mako',
message=message,
status=status )
+
def __validate( self, trans, params, email, password, confirm, username ):
# If coming from the tool shed webapp, we'll require a public user name
if trans.webapp.name == 'tool_shed':
@@ -1126,6 +1143,7 @@
if user_type_fd_id in [ 'none' ]:
return "Select the user's type and information"
return message
+
@web.expose
def set_default_permissions( self, trans, cntrller, **kwd ):
"""Sets the user's default permissions for the new histories"""
@@ -1152,6 +1170,127 @@
else:
# User not logged in, history group must be only public
return trans.show_error_message( "You must be logged in to change your default permitted actions." )
+
+ @web.expose
+ @web.require_login()
+ def toolbox_filters( self, trans, cntrller, **kwd ):
+ """
+ Sets the user's default filters for the toolbox.
+ Toolbox filters are specified in universe_wsgi.ini.
+ The user can activate them and the choice is stored in user_preferences.
+ """
+
+ def get_filter_mapping( db_filters, config_filters ):
+ """
+ Compare the allowed filters from the universe_wsgi.ini config file with the previously saved or default filters from the database.
+ We need that to toogle the checkboxes for the formular in the right way.
+ Furthermore we extract all information associated to a filter to display them in the formular.
+ """
+ filters = list()
+ for filter_name in config_filters:
+ if ":" in filter_name:
+ # Should be a submodule of filters (e.g. examples:restrict_development_tools)
+ (module_name, function_name) = filter_name.rsplit(":", 1)
+ module_name = 'galaxy.tools.filters.%s' % module_name.strip()
+ module = __import__( module_name, globals(), fromlist=['temp_module'] )
+ function = getattr( module, function_name.strip() )
+ else:
+ # No module found it has to be explicitly imported.
+ module = __import__( 'galaxy.tools.filters', globals(), fromlist=['temp_module'] )
+ function = getattr( globals(), filter_name.strip() )
+
+ doc_string = docstring_trim( function.__doc__ )
+ split = doc_string.split('\n\n')
+ if split:
+ sdesc = split[0]
+ else:
+ log.error( 'No description specified in the __doc__ string for %s.' % filter_name )
+ if len(split) > 1:
+ description = split[1]
+ else:
+ description = ''
+
+ if filter_name in db_filters:
+ filters.append( dict( filterpath=filter_name, short_desc=sdesc, desc=description, checked=True ) )
+ else:
+ filters.append( dict( filterpath=filter_name, short_desc=sdesc, desc=description, checked=False ) )
+ return filters
+
+ params = util.Params( kwd )
+ message = util.restore_text( params.get( 'message', '' ) )
+ status = params.get( 'status', 'done' )
+
+ user_id = params.get( 'user_id', False )
+ if user_id:
+ user = trans.sa_session.query( trans.app.model.User ).get( trans.security.decode_id( user_id ) )
+ else:
+ user = trans.user
+
+ if user:
+ saved_user_tool_filters = list()
+ saved_user_section_filters = list()
+ saved_user_label_filters = list()
+
+ for name, value in user.preferences.items():
+ if name == 'toolbox_tool_filters':
+ saved_user_tool_filters = listify( value, do_strip=True )
+ elif name == 'toolbox_section_filters':
+ saved_user_section_filters = listify( value, do_strip=True )
+ elif name == 'toolbox_label_filters':
+ saved_user_label_filters = listify( value, do_strip=True )
+
+ tool_filters = get_filter_mapping( saved_user_tool_filters, trans.app.config.user_tool_filters )
+ section_filters = get_filter_mapping( saved_user_section_filters, trans.app.config.user_section_filters )
+ label_filters = get_filter_mapping( saved_user_label_filters, trans.app.config.user_label_filters )
+
+ return trans.fill_template( 'user/toolbox_filters.mako',
+ cntrller=cntrller,
+ message=message,
+ tool_filters=tool_filters,
+ section_filters=section_filters,
+ label_filters=label_filters,
+ user=user,
+ status=status )
+ else:
+ # User not logged in, history group must be only public
+ return trans.show_error_message( "You must be logged in to change private toolbox filters." )
+
+ @web.expose
+ @web.require_login( "to change the private toolbox filters" )
+ def edit_toolbox_filters( self, trans, cntrller, **kwd ):
+ params = util.Params( kwd )
+ message = util.restore_text( params.get( 'message', '' ) )
+ user_id = params.get( 'user_id', False )
+ if not user_id:
+ # User must be logged in to create a new address
+ return trans.show_error_message( "You must be logged in to change the ToolBox filters." )
+
+ user = trans.sa_session.query( trans.app.model.User ).get( trans.security.decode_id( user_id ) )
+
+ if params.get( 'edit_toolbox_filter_button', False ):
+ tool_filters = list()
+ section_filters = list()
+ label_filters = list()
+ for name, state in params.flatten():
+ if state == 'on':
+ if name.startswith('t_'):
+ tool_filters.append( name[2:] )
+ elif name.startswith('l_'):
+ label_filters.append( name[2:] )
+ elif name.startswith('s_'):
+ section_filters.append( name[2:] )
+ user.preferences['toolbox_tool_filters'] = ','.join( tool_filters )
+ user.preferences['toolbox_section_filters'] = ','.join( section_filters )
+ user.preferences['toolbox_label_filters'] = ','.join( label_filters )
+
+ trans.sa_session.add( user )
+ trans.sa_session.flush()
+ message = 'ToolBox filters has been updated.'
+ kwd = dict( message=message, status='done' )
+
+ # Display the ToolBox filters form with the current values filled in
+ return self.toolbox_filters( trans, cntrller, **kwd )
+
@web.expose
@web.require_login( "to get most recently used tool" )
@web.json_pretty
@@ -1175,6 +1314,7 @@
"description" : tool.description
}
return tool_info
+
@web.expose
def manage_addresses(self, trans, **kwd):
if trans.user:
@@ -1196,6 +1336,7 @@
else:
# User not logged in, history group must be only public
return trans.show_error_message( "You must be logged in to change your default permitted actions." )
+
@web.expose
def new_address( self, trans, cntrller, **kwd ):
params = util.Params( kwd )
@@ -1280,6 +1421,7 @@
phone=phone,
message=message,
status=status )
+
@web.expose
def edit_address( self, trans, cntrller, **kwd ):
params = util.Params( kwd )
@@ -1357,6 +1499,7 @@
address_obj=address_obj,
message=message,
status=status )
+
@web.expose
def delete_address( self, trans, cntrller, address_id=None, user_id=None ):
try:
@@ -1377,6 +1520,7 @@
user_id=user_id,
message=message,
status=status ) )
+
@web.expose
def undelete_address( self, trans, cntrller, address_id=None, user_id=None ):
try:
@@ -1396,6 +1540,7 @@
user_id=user_id,
message=message,
status=status ) )
+
@web.expose
def set_user_pref_async( self, trans, pref_name, pref_value ):
""" Set a user preference asynchronously. If user is not logged in, do nothing. """
diff -r f1d77cc9136794e0272f2ed9b1b33f976f3a0f10 -r fb1b676f51b224a4f3b830402d79652850dd9261 templates/user/index.mako
--- a/templates/user/index.mako
+++ b/templates/user/index.mako
@@ -13,6 +13,7 @@
<li><a href="${h.url_for( controller='user', action='manage_user_info', cntrller=cntrller )}">${_('Manage your information')}</a></li><li><a href="${h.url_for( controller='user', action='set_default_permissions', cntrller=cntrller )}">${_('Change default permissions')}</a> for new histories</li><li><a href="${h.url_for( controller='user', action='api_keys', cntrller=cntrller )}">${_('Manage your API keys')}</a></li>
+ <li><a href="${h.url_for( controller='user', action='toolbox_filters', cntrller=cntrller )}">${_('Manage your ToolBox filters')}</a></li>
%if trans.app.config.enable_openid:
<li><a href="${h.url_for( controller='user', action='openid_manage', cntrller=cntrller )}">${_('Manage OpenIDs')}</a> linked to your account</li>
%endif
diff -r f1d77cc9136794e0272f2ed9b1b33f976f3a0f10 -r fb1b676f51b224a4f3b830402d79652850dd9261 templates/user/toolbox_filters.mako
--- /dev/null
+++ b/templates/user/toolbox_filters.mako
@@ -0,0 +1,91 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+
+%if message:
+ ${render_msg( message, status )}
+%endif
+</br>
+</br>
+
+<ul class="manage-table-actions">
+ <li>
+ <a class="action-button" href="${h.url_for( controller='user', action='index', cntrller=cntrller )}">User preferences</a>
+ </li>
+</ul>
+
+%if tool_filters or section_filters or label_filters:
+ <div class="toolForm">
+ <form name="toolbox_filter" id="toolbox_filter" action="${h.url_for( controller='user', action='edit_toolbox_filters', cntrller=cntrller, user_id=trans.security.encode_id( user.id ) )}" method="post" >
+ % if tool_filters:
+ <div class="toolFormTitle">Edit ToolBox filters :: Tools</div>
+ <div class="toolFormBody">
+ % for filter in tool_filters:
+ <div class="form-row">
+ <div style="float: left; width: 40px; margin-right: 10px;">
+ % if filter['checked']:
+ <input type="checkbox" name="t_${filter['filterpath']}" checked="checked">
+ % else:
+ <input type="checkbox" name="t_${filter['filterpath']}">
+ % endif
+ </div>
+ <div style="float: left; margin-right: 10px;">
+ ${filter['short_desc']}
+ <div class="toolParamHelp" style="clear: both;">${filter['desc']}</div>
+ </div>
+ <div style="clear: both"></div>
+ </div>
+ % endfor
+ </div>
+ % endif
+
+ % if section_filters:
+ <div class="toolFormTitle">Edit ToolBox filters :: Sections</div>
+ <div class="toolFormBody">
+ % for filter in section_filters:
+ <div class="form-row">
+ <div style="float: left; width: 40px; margin-right: 10px;">
+ % if filter['checked']:
+ <input type="checkbox" name="s_${filter['filterpath']}" checked="checked">
+ % else:
+ <input type="checkbox" name="s_${filter['filterpath']}">
+ % endif
+ </div>
+ <div style="float: left; margin-right: 10px;">
+ ${filter['short_desc']}
+ <div class="toolParamHelp" style="clear: both;">${filter['desc']}</div>
+ </div>
+ <div style="clear: both"></div>
+ </div>
+ % endfor
+ </div>
+ % endif
+
+ % if label_filters:
+ <div class="toolFormTitle">Edit ToolBox filters :: Labels</div>
+ <div class="toolFormBody">
+ % for filter in label_filters:
+ <div class="form-row">
+ <div style="float: left; width: 40px; margin-right: 10px;">
+ % if filter['checked']:
+ <input type="checkbox" name="l_${filter['filterpath']}" checked="checked">
+ % else:
+ <input type="checkbox" name="l_${filter['filterpath']}">
+ % endif
+ </div>
+ <div style="float: left; margin-right: 10px;">
+ ${filter['short_desc']}
+ <div class="toolParamHelp" style="clear: both;">${filter['desc']}</div>
+ </div>
+ <div style="clear: both"></div>
+ </div>
+ % endfor
+ </div>
+ % endif
+ <div class="form-row">
+ <input type="submit" name="edit_toolbox_filter_button" value="Save changes">
+ </div>
+ </form>
+ </div>
+%else:
+ ${render_msg( 'No filter available. Contact you system administrator or check your configuration file.', 'info' )}
+%endif
diff -r f1d77cc9136794e0272f2ed9b1b33f976f3a0f10 -r fb1b676f51b224a4f3b830402d79652850dd9261 universe_wsgi.ini.sample
--- a/universe_wsgi.ini.sample
+++ b/universe_wsgi.ini.sample
@@ -758,6 +758,18 @@
# if running many handlers.
#cache_user_job_count = False
+# ToolBox filtering
+# Modules from lib/galaxy/tools/filters/ can be specified in the following lines.
+# tool_* filters will be applied for all users and can not be changed by them.
+# user_tool_* filters will be shown under user preferences and can be toogled
+# on and off by runtime
+#tool_filters
+#tool_label_filters
+#tool_section_filters
+#user_tool_filters = examples:restrict_upload_to_admins, examples:restrict_encode
+#user_tool_section_filters = examples:restrict_text
+#user_tool_label_filters = examples:restrict_upload_to_admins, examples:restrict_encode
+
# ---- Galaxy Message Queue -------------------------------------------------
# Galaxy uses AMQ protocol to receive messages from external sources like
https://bitbucket.org/galaxy/galaxy-central/commits/74100455218f/
Changeset: 74100455218f
User: dannon
Date: 2013-10-17 19:41:45
Summary: Fix duplicate import from bad merge
Affected #: 1 file
diff -r fb1b676f51b224a4f3b830402d79652850dd9261 -r 74100455218f1dbfb2ef39aceda4f88ae15e50ef lib/galaxy/webapps/galaxy/controllers/user.py
--- a/lib/galaxy/webapps/galaxy/controllers/user.py
+++ b/lib/galaxy/webapps/galaxy/controllers/user.py
@@ -28,7 +28,6 @@
from galaxy.web.form_builder import build_select_field
from galaxy.web.framework.helpers import time_ago, grids
from datetime import datetime, timedelta
-from galaxy.web.framework.helpers import grids
from galaxy.util import hash_util
log = logging.getLogger( __name__ )
https://bitbucket.org/galaxy/galaxy-central/commits/bdf393fb4155/
Changeset: bdf393fb4155
User: dannon
Date: 2013-10-17 20:09:07
Summary: Fix indentation from merge
Affected #: 1 file
diff -r 74100455218f1dbfb2ef39aceda4f88ae15e50ef -r bdf393fb4155bed5f633d2daf9cd6a96ab46e7ce lib/galaxy/tools/filters/__init__.py
--- a/lib/galaxy/tools/filters/__init__.py
+++ b/lib/galaxy/tools/filters/__init__.py
@@ -39,8 +39,8 @@
if category:
self.__init_filters( category, user_filters, filters )
else:
- if kwds.get( "trackster", False ):
- filters[ "tool" ].append( _has_trackster_conf )
+ if kwds.get( "trackster", False ):
+ filters[ "tool" ].append( _has_trackster_conf )
return filters
https://bitbucket.org/galaxy/galaxy-central/commits/f888228f48d8/
Changeset: f888228f48d8
User: dannon
Date: 2013-10-17 21:31:15
Summary: Merge w/ central
Affected #: 1 file
diff -r bdf393fb4155bed5f633d2daf9cd6a96ab46e7ce -r f888228f48d89ade8b8f49a4d777c573174392d1 lib/tool_shed/galaxy_install/tool_dependencies/fabric_util.py
--- a/lib/tool_shed/galaxy_install/tool_dependencies/fabric_util.py
+++ b/lib/tool_shed/galaxy_install/tool_dependencies/fabric_util.py
@@ -293,7 +293,7 @@
full_path_to_dir = os.path.abspath( install_dir )
td_common_util.move_file( current_dir=work_dir,
source=downloaded_filename,
- destination_dir=full_path_to_dir )
+ destination=full_path_to_dir )
if action_type == 'download_by_url':
# Eliminate the download_by_url action so remaining actions can be processed correctly.
filtered_actions = actions[ 1: ]
@@ -551,7 +551,7 @@
full_path_to_dir = os.path.abspath( install_dir )
td_common_util.move_file( current_dir=work_dir,
source=downloaded_filename,
- destination_dir=full_path_to_dir )
+ destination=full_path_to_dir )
def log_results( command, fabric_AttributeString, file_path ):
"""
https://bitbucket.org/galaxy/galaxy-central/commits/acf1a0062ea8/
Changeset: acf1a0062ea8
Branch: custom-toolbox2
User: dannon
Date: 2013-10-17 21:32:40
Summary: Close feature branch for toolbox filtering 2
Affected #: 0 files
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: greg: Fix for the call to td_common_util.move_file reported by Jim Johnson - thanks Jim!
by commits-noreply@bitbucket.org 17 Oct '13
by commits-noreply@bitbucket.org 17 Oct '13
17 Oct '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/1846eddc72a8/
Changeset: 1846eddc72a8
User: greg
Date: 2013-10-17 19:46:03
Summary: Fix for the call to td_common_util.move_file reported by Jim Johnson - thanks Jim!
Affected #: 1 file
diff -r f1d77cc9136794e0272f2ed9b1b33f976f3a0f10 -r 1846eddc72a8b4138a2835d93b5db764524c216d lib/tool_shed/galaxy_install/tool_dependencies/fabric_util.py
--- a/lib/tool_shed/galaxy_install/tool_dependencies/fabric_util.py
+++ b/lib/tool_shed/galaxy_install/tool_dependencies/fabric_util.py
@@ -293,7 +293,7 @@
full_path_to_dir = os.path.abspath( install_dir )
td_common_util.move_file( current_dir=work_dir,
source=downloaded_filename,
- destination_dir=full_path_to_dir )
+ destination=full_path_to_dir )
if action_type == 'download_by_url':
# Eliminate the download_by_url action so remaining actions can be processed correctly.
filtered_actions = actions[ 1: ]
@@ -551,7 +551,7 @@
full_path_to_dir = os.path.abspath( install_dir )
td_common_util.move_file( current_dir=work_dir,
source=downloaded_filename,
- destination_dir=full_path_to_dir )
+ destination=full_path_to_dir )
def log_results( command, fabric_AttributeString, file_path ):
"""
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: carlfeberhard: Graph datatypes: re-add to conf.sample returning False from all sniffers; the user must now manually change the type
by commits-noreply@bitbucket.org 17 Oct '13
by commits-noreply@bitbucket.org 17 Oct '13
17 Oct '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/f1d77cc91367/
Changeset: f1d77cc91367
User: carlfeberhard
Date: 2013-10-17 19:14:11
Summary: Graph datatypes: re-add to conf.sample returning False from all sniffers; the user must now manually change the type
Affected #: 2 files
diff -r 01975234fe5b2ea1058dee188b5032155a43499b -r f1d77cc9136794e0272f2ed9b1b33f976f3a0f10 datatypes_conf.xml.sample
--- a/datatypes_conf.xml.sample
+++ b/datatypes_conf.xml.sample
@@ -245,6 +245,10 @@
<datatype extension="snpmatrix" type="galaxy.datatypes.genetics:SNPMatrix" display_in_upload="true"/><datatype extension="xls" type="galaxy.datatypes.tabular:Tabular"/><!-- End RGenetics Datatypes -->
+ <!-- graph datatypes -->
+ <datatype extension="xgmml" type="galaxy.datatypes.graph:Xgmml" display_in_upload="true"/>
+ <datatype extension="sif" type="galaxy.datatypes.graph:Sif" display_in_upload="true"/>
+ <datatype extension="rdf" type="galaxy.datatypes.graph:Rdf" display_in_upload="true"/></registration><sniffers><!--
diff -r 01975234fe5b2ea1058dee188b5032155a43499b -r f1d77cc9136794e0272f2ed9b1b33f976f3a0f10 lib/galaxy/datatypes/graph.py
--- a/lib/galaxy/datatypes/graph.py
+++ b/lib/galaxy/datatypes/graph.py
@@ -34,14 +34,9 @@
def sniff( self, filename ):
"""
- Determines whether the file is XML or not, should probably actually check if it is a real xgmml file....
+ Returns false and the user must manually set.
"""
- line = ''
- with open( filename ) as handle:
- line = handle.readline()
-
- #TODO - Is there a more robust way to do this?
- return line.startswith( '<?xml ' )
+ return False
@staticmethod
def merge( split_files, output_file ):
@@ -85,19 +80,9 @@
def sniff( self, filename ):
"""
- Determines whether the file is SIF
+ Returns false and the user must manually set.
"""
- line = ''
- with open( filename ) as infile:
- correct = True
- for line in infile:
- if not line.strip():
- continue
- tlen = len( line.split( "\t" ) )
- # may contain 1 or >= 3 columns
- if tlen == 2:
- correct = False
- return correct
+ return False
@staticmethod
def merge( split_files, output_file ):
@@ -116,6 +101,12 @@
"""
file_ext = "rdf"
+ def sniff( self, filename ):
+ """
+ Returns false and the user must manually set.
+ """
+ return False
+
def set_peek( self, dataset, is_multi_byte=False ):
"""Set the peek and blurb text"""
if not dataset.dataset.purged:
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: greg: Allow for skipping specified repositories when setting metadata in the tool shed.
by commits-noreply@bitbucket.org 17 Oct '13
by commits-noreply@bitbucket.org 17 Oct '13
17 Oct '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/01975234fe5b/
Changeset: 01975234fe5b
User: greg
Date: 2013-10-17 17:18:05
Summary: Allow for skipping specified repositories when setting metadata in the tool shed.
Affected #: 3 files
diff -r 329ea7a83af4f389a7c95ee4559d88c6fec0211b -r 01975234fe5b2ea1058dee188b5032155a43499b lib/galaxy/webapps/tool_shed/api/repositories.py
--- a/lib/galaxy/webapps/tool_shed/api/repositories.py
+++ b/lib/galaxy/webapps/tool_shed/api/repositories.py
@@ -230,6 +230,7 @@
to True will restrict resetting metadata to only repositories that are writable by the user
in addition to those repositories of type tool_dependency_definition. This param is ignored
if the current user is not an admin user, in which case this same restriction is automatic.
+ :param encoded_ids_to_skip (optional): a list of encoded repository ids for repositories that should not be processed.
"""
def handle_repository( trans, repository, results ):
repository_id = trans.security.encode_id( repository.id )
@@ -253,6 +254,7 @@
successful_count=0,
unsuccessful_count=0 )
handled_repository_ids = []
+ encoded_ids_to_skip = payload.get( 'encoded_ids_to_skip', [] )
if trans.user_is_admin():
my_writable = util.asbool( payload.get( 'my_writable', False ) )
else:
@@ -260,11 +262,19 @@
query = suc.get_query_for_setting_metadata_on_repositories( trans, my_writable=my_writable, order=False )
# First reset metadata on all repositories of type repository_dependency_definition.
for repository in query:
- if repository.type == rt_util.TOOL_DEPENDENCY_DEFINITION and repository.id not in handled_repository_ids:
+ encoded_id = trans.security.encode_id( repository.id )
+ if encoded_id in encoded_ids_to_skip:
+ log.debug( "Skipping repository with id %s because it is in encoded_ids_to_skip %s" % \
+ ( str( repository_id ), str( encoded_ids_to_skip ) ) )
+ elif repository.type == rt_util.TOOL_DEPENDENCY_DEFINITION and repository.id not in handled_repository_ids:
results = handle_repository( trans, repository, results )
# Now reset metadata on all remaining repositories.
for repository in query:
- if repository.type != rt_util.TOOL_DEPENDENCY_DEFINITION and repository.id not in handled_repository_ids:
+ encoded_id = trans.security.encode_id( repository.id )
+ if encoded_id in encoded_ids_to_skip:
+ log.debug( "Skipping repository with id %s because it is in encoded_ids_to_skip %s" % \
+ ( str( repository_id ), str( encoded_ids_to_skip ) ) )
+ elif repository.type != rt_util.TOOL_DEPENDENCY_DEFINITION and repository.id not in handled_repository_ids:
results = handle_repository( trans, repository, results )
stop_time = strftime( "%Y-%m-%d %H:%M:%S" )
results[ 'stop_time' ] = stop_time
diff -r 329ea7a83af4f389a7c95ee4559d88c6fec0211b -r 01975234fe5b2ea1058dee188b5032155a43499b lib/tool_shed/scripts/api/reset_metadata_on_repositories.py
--- a/lib/tool_shed/scripts/api/reset_metadata_on_repositories.py
+++ b/lib/tool_shed/scripts/api/reset_metadata_on_repositories.py
@@ -27,18 +27,29 @@
def main( options ):
api_key = options.api
+ base_tool_shed_url = options.tool_shed_url.rstrip( '/' )
my_writable = options.my_writable
one_per_request = options.one_per_request
- base_tool_shed_url = options.tool_shed_url.rstrip( '/' )
+ skip_file = options.skip_file
+ encoded_ids_to_skip = []
+ if skip_file and os.path.exists( skip_file ):
+ # Contents of file must be 1 encoded repository id per line.
+ contents = open( skip_file, 'rb' ).read()
+ if contents:
+ encoded_ids_to_skip = contents.split( '\n' )
if string_as_bool( one_per_request ):
url = '%s/api/repositories/repository_ids_for_setting_metadata?key=%s&my_writable=%s' % ( base_tool_shed_url, api_key, str( my_writable ) )
repository_ids = get( url, api_key )
for repository_id in repository_ids:
- data = dict( repository_id=repository_id )
- url = '%s/api/repositories/reset_metadata_on_repository' % base_tool_shed_url
- submit( url, data, options.api )
+ if repository_id in encoded_ids_to_skip:
+ print "Skipping repository with id %s because it is in skip file %s" % ( str( repository_id ), str( skip_file ) )
+ else:
+ data = dict( repository_id=repository_id )
+ url = '%s/api/repositories/reset_metadata_on_repository' % base_tool_shed_url
+ submit( url, data, options.api )
else:
- data = dict( my_writable=my_writable )
+ data = dict( encoded_ids_to_skip=encoded_ids_to_skip,
+ my_writable=my_writable )
url = '%s/api/repositories/reset_metadata_on_repositories' % base_tool_shed_url
submit( url, data, options.api )
@@ -46,7 +57,8 @@
parser = argparse.ArgumentParser( description='Reset metadata on certain repositories in the Tool Shed via the Tool Shed API.' )
parser.add_argument( "-a", "--api", dest="api", required=True, help="API Key" )
parser.add_argument( "-m", "--my_writable", dest="my_writable", required=False, default='False', help="Restrict to my writable repositories" )
- parser.add_argument( "-o", "--one_per_request", dest="one_per_request", required=False, default='False', help="One repository per request" )
+ parser.add_argument( "-o", "--one_per_request", dest="one_per_request", required=False, default='True', help="One repository per request" )
+ parser.add_argument( "-s", "--skip_file", dest="skip_file", required=False, help="Name of local file containing encoded repository ids to skip" )
parser.add_argument( "-u", "--url", dest="tool_shed_url", required=True, help="Tool Shed URL" )
options = parser.parse_args()
main( options )
diff -r 329ea7a83af4f389a7c95ee4559d88c6fec0211b -r 01975234fe5b2ea1058dee188b5032155a43499b lib/tool_shed/util/shed_util_common.py
--- a/lib/tool_shed/util/shed_util_common.py
+++ b/lib/tool_shed/util/shed_util_common.py
@@ -821,12 +821,12 @@
clause_list = []
for repository in trans.sa_session.query( trans.model.Repository ) \
.filter( trans.model.Repository.table.c.deleted == False ):
- allow_push = repository.allow_push( trans.app )
- if not order:
- # We've been called from the Tool Shed API, so reset metadata on all repositories of type tool_dependency_definition.
- if repository.type == rt_util.TOOL_DEPENDENCY_DEFINITION:
- clause_list.append( trans.model.Repository.table.c.id == repository.id )
- elif allow_push:
+ # Always reset metadata on all repositories of type tool_dependency_definition.
+ if repository.type == rt_util.TOOL_DEPENDENCY_DEFINITION:
+ clause_list.append( trans.model.Repository.table.c.id == repository.id )
+ else:
+ allow_push = repository.allow_push( trans.app )
+ if allow_push:
# Include all repositories that are writable by the current user.
allow_push_usernames = allow_push.split( ',' )
if username in allow_push_usernames:
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: jmchilton: Bug fix related to return code handling in TaskWrapper.
by commits-noreply@bitbucket.org 17 Oct '13
by commits-noreply@bitbucket.org 17 Oct '13
17 Oct '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/329ea7a83af4/
Changeset: 329ea7a83af4
User: jmchilton
Date: 2013-10-17 12:30:13
Summary: Bug fix related to return code handling in TaskWrapper.
Thanks to Peter Cock for finding this.
Affected #: 2 files
diff -r 8e001dc9675c105d83d83f6d5a756c11759e8bb2 -r 329ea7a83af4f389a7c95ee4559d88c6fec0211b lib/galaxy/jobs/command_factory.py
--- a/lib/galaxy/jobs/command_factory.py
+++ b/lib/galaxy/jobs/command_factory.py
@@ -55,18 +55,20 @@
# Append metadata setting commands, we don't want to overwrite metadata
# that was copied over in init_meta(), as per established behavior
if include_metadata and job_wrapper.requires_setting_metadata:
- if not captured_return_code:
- commands += capture_return_code_command
- captured_return_code = True
- commands += "; cd %s; " % abspath( getcwd() )
- commands += job_wrapper.setup_external_metadata(
+ metadata_command = job_wrapper.setup_external_metadata(
exec_dir=abspath( getcwd() ),
tmp_dir=job_wrapper.working_directory,
dataset_files_path=job.app.model.Dataset.file_path,
output_fnames=job_wrapper.get_output_fnames(),
set_extension=False,
kwds={ 'overwrite' : False }
- )
+ ) or ''
+ metadata_command = metadata_command.strip()
+ if metadata_command:
+ if not captured_return_code:
+ commands += capture_return_code_command
+ captured_return_code = True
+ commands += "; cd %s; %s" % (abspath( getcwd() ), metadata_command)
if captured_return_code:
commands += '; sh -c "exit $return_code"'
diff -r 8e001dc9675c105d83d83f6d5a756c11759e8bb2 -r 329ea7a83af4f389a7c95ee4559d88c6fec0211b test/unit/test_command_factory.py
--- a/test/unit/test_command_factory.py
+++ b/test/unit/test_command_factory.py
@@ -45,6 +45,18 @@
expected_command = '%s; return_code=$?; cd %s; %s; sh -c "exit $return_code"' % (MOCK_COMMAND_LINE, getcwd(), metadata_line)
self.__assert_command_is( expected_command )
+ def test_empty_metadata(self):
+ """
+ As produced by TaskWrapper.
+ """
+ self.include_metadata = True
+ self.include_work_dir_outputs = False
+ self.job_wrapper.metadata_line = ' '
+ # Empty metadata command do not touch command line.
+ expected_command = '%s' % (MOCK_COMMAND_LINE)
+ self.__assert_command_is( expected_command )
+
+
def __assert_command_is(self, expected_command):
command = self.__command()
self.assertEqual(command, expected_command)
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
24 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/be16cffdcb68/
Changeset: be16cffdcb68
User: jmchilton
Date: 2013-10-17 06:15:21
Summary: Tool Dependencies: Convert James Taylor's dependency testing file lib/galaxy/tools/deps/tests.py into a formal unit test.
Affected #: 2 files
diff -r ebd092733e552ef6a846d55b69d22dc779dfcc5f -r be16cffdcb68709e3368bcd599bfef9f8aab72a4 lib/galaxy/tools/deps/tests.py
--- a/lib/galaxy/tools/deps/tests.py
+++ /dev/null
@@ -1,32 +0,0 @@
-import tempfile
-import os.path
-from os import makedirs, mkdir
-import galaxy.tools.deps
-
-def touch( fname, data=None ):
- f = open( fname, 'w' )
- if data:
- f.write( data )
- f.close()
-
-def test():
-
- # Setup directories
- base_path = tempfile.mkdtemp()
- # mkdir( base_path )
- for name, version, sub in [ ( "dep1", "1.0", "env.sh" ), ( "dep1", "2.0", "bin" ), ( "dep2", "1.0", None ) ]:
- if sub == "bin":
- p = os.path.join( base_path, name, version, "bin" )
- else:
- p = os.path.join( base_path, name, version )
- try:
- makedirs( p )
- except:
- pass
- if sub == "env.sh":
- touch( os.path.join( p, "env.sh" ) )
-
- dm = galaxy.tools.deps.DependencyManager( [ base_path ] )
-
- print dm.find_dep( "dep1", "1.0" )
- print dm.find_dep( "dep1", "2.0" )
diff -r ebd092733e552ef6a846d55b69d22dc779dfcc5f -r be16cffdcb68709e3368bcd599bfef9f8aab72a4 test/unit/test_tool_deps.py
--- /dev/null
+++ b/test/unit/test_tool_deps.py
@@ -0,0 +1,40 @@
+import tempfile
+import os.path
+from os import makedirs
+import galaxy.tools.deps
+
+
+def touch( fname, data=None ):
+ f = open( fname, 'w' )
+ if data:
+ f.write( data )
+ f.close()
+
+
+def test_tool_dependencies():
+
+ # Setup directories
+ base_path = tempfile.mkdtemp()
+ # mkdir( base_path )
+ for name, version, sub in [ ( "dep1", "1.0", "env.sh" ), ( "dep1", "2.0", "bin" ), ( "dep2", "1.0", None ) ]:
+ if sub == "bin":
+ p = os.path.join( base_path, name, version, "bin" )
+ else:
+ p = os.path.join( base_path, name, version )
+ try:
+ makedirs( p )
+ except:
+ pass
+ if sub == "env.sh":
+ touch( os.path.join( p, "env.sh" ) )
+
+ dm = galaxy.tools.deps.DependencyManager( [ base_path ] )
+
+ d1_script, d1_path, d1_version = dm.find_dep( "dep1", "1.0" )
+ assert d1_script == os.path.join( base_path, 'dep1', '1.0', 'env.sh' )
+ assert d1_path == os.path.join( base_path, 'dep1', '1.0' )
+ assert d1_version == "1.0"
+ d2_script, d2_path, d2_version = dm.find_dep( "dep1", "2.0" )
+ assert d2_script == None
+ assert d2_path == os.path.join( base_path, 'dep1', '2.0' )
+ assert d2_version == "2.0"
https://bitbucket.org/galaxy/galaxy-central/commits/e276f3fc9a74/
Changeset: e276f3fc9a74
User: jmchilton
Date: 2013-10-17 06:15:21
Summary: Tool Dependencies: Add tests for addditional existing behavior for tool dependencies.
Namely test the fallen:
- Test default package will not be fallen back upon when exactly specified version is used. (Not my favorite behavior. -John)
- Test tool shed package and set_environment requirement types (standard use).
- Test tool shed installed tools will fallback on traditional Galaxy dependencies if a requirement is unmatched in installed dependencies.
- Test tool shed dependency takes prescndence over traditional Galaxy dependency when both are available.
Other small clean ups of test/unit/test_tool_deps.py.
Affected #: 1 file
diff -r be16cffdcb68709e3368bcd599bfef9f8aab72a4 -r e276f3fc9a7426923d968e7e56da1e3b26292056 test/unit/test_tool_deps.py
--- a/test/unit/test_tool_deps.py
+++ b/test/unit/test_tool_deps.py
@@ -1,40 +1,140 @@
import tempfile
import os.path
-from os import makedirs
-import galaxy.tools.deps
-
-
-def touch( fname, data=None ):
- f = open( fname, 'w' )
- if data:
- f.write( data )
- f.close()
+from os import makedirs, symlink
+from shutil import rmtree
+from galaxy.tools.deps import DependencyManager
+from galaxy.util.bunch import Bunch
+from contextlib import contextmanager
def test_tool_dependencies():
+ # Setup directories
+ with __test_base_path() as base_path:
+ for name, version, sub in [ ( "dep1", "1.0", "env.sh" ), ( "dep1", "2.0", "bin" ), ( "dep2", "1.0", None ) ]:
+ if sub == "bin":
+ p = os.path.join( base_path, name, version, "bin" )
+ else:
+ p = os.path.join( base_path, name, version )
+ try:
+ makedirs( p )
+ except:
+ pass
+ if sub == "env.sh":
+ __touch( os.path.join( p, "env.sh" ) )
- # Setup directories
+ dm = DependencyManager( [base_path] )
+ d1_script, d1_path, d1_version = dm.find_dep( "dep1", "1.0" )
+ assert d1_script == os.path.join( base_path, 'dep1', '1.0', 'env.sh' )
+ assert d1_path == os.path.join( base_path, 'dep1', '1.0' )
+ assert d1_version == "1.0"
+ d2_script, d2_path, d2_version = dm.find_dep( "dep1", "2.0" )
+ assert d2_script == None
+ assert d2_path == os.path.join( base_path, 'dep1', '2.0' )
+ assert d2_version == "2.0"
+
+ ## Test default versions
+ symlink( os.path.join( base_path, 'dep1', '2.0'), os.path.join( base_path, 'dep1', 'default' ) )
+ default_script, default_path, default_version = dm.find_dep( "dep1", None )
+ assert default_version == "2.0"
+
+ ## Test default will not be fallen back upon by default
+ default_script, default_path, default_version = dm.find_dep( "dep1", "2.1" )
+ assert default_script == None
+ assert default_version == None
+
+
+TEST_REPO_USER = "devteam"
+TEST_REPO_NAME = "bwa"
+TEST_REPO_CHANGESET = "12abcd41223da"
+TEST_VERSION = "0.5.9"
+
+
+def test_toolshed_set_enviornment_requiremetns():
+ with __test_base_path() as base_path:
+ test_repo = __build_test_repo('set_environment')
+ dm = DependencyManager( [base_path] )
+ env_settings_dir = os.path.join(base_path, "environment_settings", TEST_REPO_NAME, TEST_REPO_USER, TEST_REPO_NAME, TEST_REPO_CHANGESET)
+ os.makedirs(env_settings_dir)
+ d1_script, d1_path, d1_version = dm.find_dep( TEST_REPO_NAME, version=None, type='set_environment', installed_tool_dependencies=[test_repo] )
+ assert d1_version == None
+ assert d1_script == os.path.join(env_settings_dir, "env.sh"), d1_script
+
+
+def test_toolshed_package_requirements():
+ with __test_base_path() as base_path:
+ test_repo = __build_test_repo('package', version=TEST_VERSION)
+ dm = DependencyManager( [base_path] )
+ package_dir = __build_ts_test_package(base_path)
+ d1_script, d1_path, d1_version = dm.find_dep( TEST_REPO_NAME, version=TEST_VERSION, type='package', installed_tool_dependencies=[test_repo] )
+ assert d1_version == TEST_VERSION, d1_version
+ assert d1_script == os.path.join(package_dir, "env.sh"), d1_script
+
+
+def test_toolshed_tools_fallback_on_manual_dependencies():
+ with __test_base_path() as base_path:
+ dm = DependencyManager( [base_path] )
+ test_repo = __build_test_repo('package', version=TEST_VERSION)
+ env_path = __setup_galaxy_package_dep(base_path, "dep1", "1.0")
+ d1_script, d1_path, d1_version = dm.find_dep( "dep1", version="1.0", type='package', installed_tool_dependencies=[test_repo] )
+ assert d1_version == "1.0"
+ assert d1_script == env_path
+
+
+def test_toolshed_greater_precendence():
+ with __test_base_path() as base_path:
+ dm = DependencyManager( [base_path] )
+ test_repo = __build_test_repo('package', version=TEST_VERSION)
+ ts_package_dir = __build_ts_test_package(base_path)
+ gx_env_path = __setup_galaxy_package_dep(base_path, TEST_REPO_NAME, TEST_VERSION)
+ ts_env_path = os.path.join(ts_package_dir, "env.sh")
+ d1_script, d1_path, d1_version = dm.find_dep( TEST_REPO_NAME, version=TEST_VERSION, type='package', installed_tool_dependencies=[test_repo] )
+ assert d1_script != gx_env_path # Not the galaxy path, it should be the tool shed path used.
+ assert d1_script == ts_env_path
+
+
+def __build_ts_test_package(base_path, script_contents=''):
+ package_dir = os.path.join(base_path, TEST_REPO_NAME, TEST_VERSION, TEST_REPO_USER, TEST_REPO_NAME, TEST_REPO_CHANGESET)
+ __touch(os.path.join(package_dir, 'env.sh'), script_contents)
+ return package_dir
+
+
+def __setup_galaxy_package_dep(base_path, name, version, contents=""):
+ dep_directory = os.path.join( base_path, name, version )
+ env_path = os.path.join( dep_directory, "env.sh" )
+ __touch( env_path, contents )
+ return env_path
+
+
+def __touch( fname, data=None ):
+ dirname = os.path.dirname( fname )
+ if not os.path.exists( dirname ):
+ makedirs( dirname )
+ f = open( fname, 'w' )
+ try:
+ if data:
+ f.write( data )
+ finally:
+ f.close()
+
+
+def __build_test_repo(type, version=None):
+ return Bunch(
+ owner=TEST_REPO_USER,
+ name=TEST_REPO_NAME,
+ type=type,
+ version=version,
+ tool_shed_repository=Bunch(
+ owner=TEST_REPO_USER,
+ name=TEST_REPO_NAME,
+ installed_changeset_revision=TEST_REPO_CHANGESET
+ )
+ )
+
+
+@contextmanager
+def __test_base_path():
base_path = tempfile.mkdtemp()
- # mkdir( base_path )
- for name, version, sub in [ ( "dep1", "1.0", "env.sh" ), ( "dep1", "2.0", "bin" ), ( "dep2", "1.0", None ) ]:
- if sub == "bin":
- p = os.path.join( base_path, name, version, "bin" )
- else:
- p = os.path.join( base_path, name, version )
- try:
- makedirs( p )
- except:
- pass
- if sub == "env.sh":
- touch( os.path.join( p, "env.sh" ) )
-
- dm = galaxy.tools.deps.DependencyManager( [ base_path ] )
-
- d1_script, d1_path, d1_version = dm.find_dep( "dep1", "1.0" )
- assert d1_script == os.path.join( base_path, 'dep1', '1.0', 'env.sh' )
- assert d1_path == os.path.join( base_path, 'dep1', '1.0' )
- assert d1_version == "1.0"
- d2_script, d2_path, d2_version = dm.find_dep( "dep1", "2.0" )
- assert d2_script == None
- assert d2_path == os.path.join( base_path, 'dep1', '2.0' )
- assert d2_version == "2.0"
+ try:
+ yield base_path
+ finally:
+ rmtree(base_path)
https://bitbucket.org/galaxy/galaxy-central/commits/b16ab0d407a0/
Changeset: b16ab0d407a0
User: jmchilton
Date: 2013-10-17 06:15:21
Summary: Tool Dependencies: PEP8 fixes for lib/galaxy/tools/deps/__init__.py
Affected #: 1 file
diff -r e276f3fc9a7426923d968e7e56da1e3b26292056 -r b16ab0d407a09695aaa0dcbffcd9661b12c4b701 lib/galaxy/tools/deps/__init__.py
--- a/lib/galaxy/tools/deps/__init__.py
+++ b/lib/galaxy/tools/deps/__init__.py
@@ -7,6 +7,7 @@
import logging
log = logging.getLogger( __name__ )
+
class DependencyManager( object ):
"""
A DependencyManager attempts to resolve named and versioned dependencies by searching for them under a list of directories. Directories should be
@@ -28,6 +29,7 @@
if not os.path.isdir( base_path ):
log.warn( "Path '%s' is not directory, ignoring", base_path )
self.base_paths.append( os.path.abspath( base_path ) )
+
def find_dep( self, name, version=None, type='package', installed_tool_dependencies=None ):
"""
Attempt to find a dependency named `name` at version `version`. If version is None, return the "default" version as determined using a
@@ -37,6 +39,7 @@
return self._find_dep_default( name, type=type, installed_tool_dependencies=installed_tool_dependencies )
else:
return self._find_dep_versioned( name, version, type=type, installed_tool_dependencies=installed_tool_dependencies )
+
def _find_dep_versioned( self, name, version, type='package', installed_tool_dependencies=None ):
installed_tool_dependency = self._get_installed_dependency( installed_tool_dependencies, name, type, version=version )
for base_path in self.base_paths:
@@ -51,6 +54,7 @@
return None, path, version
else:
return None, None, None
+
def _find_dep_default( self, name, type='package', installed_tool_dependencies=None ):
if type == 'set_environment' and installed_tool_dependencies:
installed_tool_dependency = self._get_installed_dependency( installed_tool_dependencies, name, type, version=None )
@@ -68,20 +72,22 @@
script = os.path.join( real_path, 'env.sh' )
if os.path.exists( script ):
return script, real_path, real_version
- elif os.path.exists( os.path.join( real_path, 'bin' ) ):
+ elif os.path.exists( real_bin ):
return None, real_path, real_version
else:
return None, None, None
+
def _get_installed_dependency( self, installed_tool_dependencies, name, type, version=None ):
if installed_tool_dependencies:
for installed_tool_dependency in installed_tool_dependencies:
if version:
- if installed_tool_dependency.name==name and installed_tool_dependency.type==type and installed_tool_dependency.version==version:
+ if installed_tool_dependency.name == name and installed_tool_dependency.type == type and installed_tool_dependency.version == version:
return installed_tool_dependency
else:
- if installed_tool_dependency.name==name and installed_tool_dependency.type==type:
+ if installed_tool_dependency.name == name and installed_tool_dependency.type == type:
return installed_tool_dependency
return None
+
def _get_package_installed_dependency_path( self, installed_tool_dependency, base_path, name, version ):
tool_shed_repository = installed_tool_dependency.tool_shed_repository
return os.path.join( base_path,
@@ -90,6 +96,7 @@
tool_shed_repository.owner,
tool_shed_repository.name,
tool_shed_repository.installed_changeset_revision )
+
def _get_set_environment_installed_dependency_script_path( self, installed_tool_dependency, name ):
tool_shed_repository = installed_tool_dependency.tool_shed_repository
for base_path in self.base_paths:
https://bitbucket.org/galaxy/galaxy-central/commits/630cfe0030ab/
Changeset: 630cfe0030ab
User: jmchilton
Date: 2013-10-17 06:15:21
Summary: Tool Dependencies: Drop multiple tool_dependency_dir implementation for now.
Will reimplement once I have added more granular, plugin style tool dependency resolvers. They will benefit from there being just one default_base_path as implemented here, but allowing it to be overridden on a per resolver basis.
Affected #: 3 files
diff -r b16ab0d407a09695aaa0dcbffcd9661b12c4b701 -r 630cfe0030abf40e46c9839254278531d657e554 lib/galaxy/tools/__init__.py
--- a/lib/galaxy/tools/__init__.py
+++ b/lib/galaxy/tools/__init__.py
@@ -696,7 +696,7 @@
def init_dependency_manager( self ):
if self.app.config.use_tool_dependencies:
- self.dependency_manager = DependencyManager( [ self.app.config.tool_dependency_dir ] )
+ self.dependency_manager = DependencyManager( self.app.config.tool_dependency_dir )
else:
self.dependency_manager = None
diff -r b16ab0d407a09695aaa0dcbffcd9661b12c4b701 -r 630cfe0030abf40e46c9839254278531d657e554 lib/galaxy/tools/deps/__init__.py
--- a/lib/galaxy/tools/deps/__init__.py
+++ b/lib/galaxy/tools/deps/__init__.py
@@ -17,18 +17,16 @@
and should each contain a file 'env.sh' which can be sourced to make the dependency available in the current shell environment.
"""
- def __init__( self, base_paths=[] ):
+ def __init__( self, default_base_path ):
"""
Create a new dependency manager looking for packages under the paths listed
in `base_paths`. The default base path is app.config.tool_dependency_dir.
"""
- self.base_paths = []
- for base_path in base_paths:
- if not os.path.exists( base_path ):
- log.warn( "Path '%s' does not exist, ignoring", base_path )
- if not os.path.isdir( base_path ):
- log.warn( "Path '%s' is not directory, ignoring", base_path )
- self.base_paths.append( os.path.abspath( base_path ) )
+ if not os.path.exists( default_base_path ):
+ log.warn( "Path '%s' does not exist, ignoring", default_base_path )
+ if not os.path.isdir( default_base_path ):
+ log.warn( "Path '%s' is not directory, ignoring", default_base_path )
+ self.default_base_path = os.path.abspath( default_base_path )
def find_dep( self, name, version=None, type='package', installed_tool_dependencies=None ):
"""
@@ -42,18 +40,17 @@
def _find_dep_versioned( self, name, version, type='package', installed_tool_dependencies=None ):
installed_tool_dependency = self._get_installed_dependency( installed_tool_dependencies, name, type, version=version )
- for base_path in self.base_paths:
- if installed_tool_dependency:
- path = self._get_package_installed_dependency_path( installed_tool_dependency, base_path, name, version )
- else:
- path = os.path.join( base_path, name, version )
- script = os.path.join( path, 'env.sh' )
- if os.path.exists( script ):
- return script, path, version
- elif os.path.exists( os.path.join( path, 'bin' ) ):
- return None, path, version
+ base_path = self.default_base_path
+ if installed_tool_dependency:
+ path = self._get_package_installed_dependency_path( installed_tool_dependency, base_path, name, version )
else:
- return None, None, None
+ path = os.path.join( base_path, name, version )
+ script = os.path.join( path, 'env.sh' )
+ if os.path.exists( script ):
+ return script, path, version
+ elif os.path.exists( os.path.join( path, 'bin' ) ):
+ return None, path, version
+ return None, None, None
def _find_dep_default( self, name, type='package', installed_tool_dependencies=None ):
if type == 'set_environment' and installed_tool_dependencies:
@@ -63,19 +60,18 @@
if script and path:
# Environment settings do not use versions.
return script, path, None
- for base_path in self.base_paths:
- path = os.path.join( base_path, name, 'default' )
- if os.path.islink( path ):
- real_path = os.path.realpath( path )
- real_bin = os.path.join( real_path, 'bin' )
- real_version = os.path.basename( real_path )
- script = os.path.join( real_path, 'env.sh' )
- if os.path.exists( script ):
- return script, real_path, real_version
- elif os.path.exists( real_bin ):
- return None, real_path, real_version
- else:
- return None, None, None
+ base_path = self.default_base_path
+ path = os.path.join( base_path, name, 'default' )
+ if os.path.islink( path ):
+ real_path = os.path.realpath( path )
+ real_bin = os.path.join( real_path, 'bin' )
+ real_version = os.path.basename( real_path )
+ script = os.path.join( real_path, 'env.sh' )
+ if os.path.exists( script ):
+ return script, real_path, real_version
+ elif os.path.exists( real_bin ):
+ return None, real_path, real_version
+ return None, None, None
def _get_installed_dependency( self, installed_tool_dependencies, name, type, version=None ):
if installed_tool_dependencies:
@@ -99,14 +95,14 @@
def _get_set_environment_installed_dependency_script_path( self, installed_tool_dependency, name ):
tool_shed_repository = installed_tool_dependency.tool_shed_repository
- for base_path in self.base_paths:
- path = os.path.abspath( os.path.join( base_path,
- 'environment_settings',
- name,
- tool_shed_repository.owner,
- tool_shed_repository.name,
- tool_shed_repository.installed_changeset_revision ) )
- if os.path.exists( path ):
- script = os.path.join( path, 'env.sh' )
- return script, path, None
+ base_path = self.default_base_path
+ path = os.path.abspath( os.path.join( base_path,
+ 'environment_settings',
+ name,
+ tool_shed_repository.owner,
+ tool_shed_repository.name,
+ tool_shed_repository.installed_changeset_revision ) )
+ if os.path.exists( path ):
+ script = os.path.join( path, 'env.sh' )
+ return script, path, None
return None, None, None
diff -r b16ab0d407a09695aaa0dcbffcd9661b12c4b701 -r 630cfe0030abf40e46c9839254278531d657e554 test/unit/test_tool_deps.py
--- a/test/unit/test_tool_deps.py
+++ b/test/unit/test_tool_deps.py
@@ -9,6 +9,7 @@
def test_tool_dependencies():
# Setup directories
+
with __test_base_path() as base_path:
for name, version, sub in [ ( "dep1", "1.0", "env.sh" ), ( "dep1", "2.0", "bin" ), ( "dep2", "1.0", None ) ]:
if sub == "bin":
@@ -22,7 +23,7 @@
if sub == "env.sh":
__touch( os.path.join( p, "env.sh" ) )
- dm = DependencyManager( [base_path] )
+ dm = DependencyManager( default_base_path=base_path )
d1_script, d1_path, d1_version = dm.find_dep( "dep1", "1.0" )
assert d1_script == os.path.join( base_path, 'dep1', '1.0', 'env.sh' )
assert d1_path == os.path.join( base_path, 'dep1', '1.0' )
@@ -52,7 +53,7 @@
def test_toolshed_set_enviornment_requiremetns():
with __test_base_path() as base_path:
test_repo = __build_test_repo('set_environment')
- dm = DependencyManager( [base_path] )
+ dm = DependencyManager( default_base_path=base_path )
env_settings_dir = os.path.join(base_path, "environment_settings", TEST_REPO_NAME, TEST_REPO_USER, TEST_REPO_NAME, TEST_REPO_CHANGESET)
os.makedirs(env_settings_dir)
d1_script, d1_path, d1_version = dm.find_dep( TEST_REPO_NAME, version=None, type='set_environment', installed_tool_dependencies=[test_repo] )
@@ -63,7 +64,7 @@
def test_toolshed_package_requirements():
with __test_base_path() as base_path:
test_repo = __build_test_repo('package', version=TEST_VERSION)
- dm = DependencyManager( [base_path] )
+ dm = DependencyManager( default_base_path=base_path )
package_dir = __build_ts_test_package(base_path)
d1_script, d1_path, d1_version = dm.find_dep( TEST_REPO_NAME, version=TEST_VERSION, type='package', installed_tool_dependencies=[test_repo] )
assert d1_version == TEST_VERSION, d1_version
@@ -72,7 +73,7 @@
def test_toolshed_tools_fallback_on_manual_dependencies():
with __test_base_path() as base_path:
- dm = DependencyManager( [base_path] )
+ dm = DependencyManager( default_base_path=base_path )
test_repo = __build_test_repo('package', version=TEST_VERSION)
env_path = __setup_galaxy_package_dep(base_path, "dep1", "1.0")
d1_script, d1_path, d1_version = dm.find_dep( "dep1", version="1.0", type='package', installed_tool_dependencies=[test_repo] )
@@ -82,7 +83,7 @@
def test_toolshed_greater_precendence():
with __test_base_path() as base_path:
- dm = DependencyManager( [base_path] )
+ dm = DependencyManager( default_base_path=base_path )
test_repo = __build_test_repo('package', version=TEST_VERSION)
ts_package_dir = __build_ts_test_package(base_path)
gx_env_path = __setup_galaxy_package_dep(base_path, TEST_REPO_NAME, TEST_VERSION)
https://bitbucket.org/galaxy/galaxy-central/commits/a54a058fa777/
Changeset: a54a058fa777
User: jmchilton
Date: 2013-10-17 06:15:21
Summary: Tool Dependencies: Small clean ups.
Affected #: 1 file
diff -r 630cfe0030abf40e46c9839254278531d657e554 -r a54a058fa77737cd1f9d51393cb46f5bf8dda95f lib/galaxy/tools/deps/__init__.py
--- a/lib/galaxy/tools/deps/__init__.py
+++ b/lib/galaxy/tools/deps/__init__.py
@@ -7,15 +7,19 @@
import logging
log = logging.getLogger( __name__ )
+INDETERMINATE_DEPENDENCY = (None, None, None)
+
class DependencyManager( object ):
"""
- A DependencyManager attempts to resolve named and versioned dependencies by searching for them under a list of directories. Directories should be
+ A DependencyManager attempts to resolve named and versioned dependencies by
+ searching for them under a list of directories. Directories should be
of the form:
$BASE/name/version/...
- and should each contain a file 'env.sh' which can be sourced to make the dependency available in the current shell environment.
+ and should each contain a file 'env.sh' which can be sourced to make the
+ dependency available in the current shell environment.
"""
def __init__( self, default_base_path ):
"""
@@ -50,7 +54,7 @@
return script, path, version
elif os.path.exists( os.path.join( path, 'bin' ) ):
return None, path, version
- return None, None, None
+ return INDETERMINATE_DEPENDENCY
def _find_dep_default( self, name, type='package', installed_tool_dependencies=None ):
if type == 'set_environment' and installed_tool_dependencies:
@@ -71,16 +75,17 @@
return script, real_path, real_version
elif os.path.exists( real_bin ):
return None, real_path, real_version
- return None, None, None
+ return INDETERMINATE_DEPENDENCY
def _get_installed_dependency( self, installed_tool_dependencies, name, type, version=None ):
if installed_tool_dependencies:
for installed_tool_dependency in installed_tool_dependencies:
+ name_and_type_equal = installed_tool_dependency.name == name and installed_tool_dependency.type == type
if version:
- if installed_tool_dependency.name == name and installed_tool_dependency.type == type and installed_tool_dependency.version == version:
+ if name_and_type_equal and installed_tool_dependency.version == version:
return installed_tool_dependency
else:
- if installed_tool_dependency.name == name and installed_tool_dependency.type == type:
+ if name_and_type_equal:
return installed_tool_dependency
return None
@@ -105,4 +110,4 @@
if os.path.exists( path ):
script = os.path.join( path, 'env.sh' )
return script, path, None
- return None, None, None
+ return INDETERMINATE_DEPENDENCY
https://bitbucket.org/galaxy/galaxy-central/commits/8ca4fdce3052/
Changeset: 8ca4fdce3052
User: jmchilton
Date: 2013-10-17 06:15:21
Summary: Tool Dependencies: Begin rearranging arguments so all ones not of interest to every potential dependency resolution plugin can be kwds
Affected #: 1 file
diff -r a54a058fa77737cd1f9d51393cb46f5bf8dda95f -r 8ca4fdce30526d58737d2b0d21af15cb750ddffc lib/galaxy/tools/deps/__init__.py
--- a/lib/galaxy/tools/deps/__init__.py
+++ b/lib/galaxy/tools/deps/__init__.py
@@ -43,7 +43,7 @@
return self._find_dep_versioned( name, version, type=type, installed_tool_dependencies=installed_tool_dependencies )
def _find_dep_versioned( self, name, version, type='package', installed_tool_dependencies=None ):
- installed_tool_dependency = self._get_installed_dependency( installed_tool_dependencies, name, type, version=version )
+ installed_tool_dependency = self._get_installed_dependency( name, type, version=version, installed_tool_dependencies=installed_tool_dependencies )
base_path = self.default_base_path
if installed_tool_dependency:
path = self._get_package_installed_dependency_path( installed_tool_dependency, base_path, name, version )
@@ -58,7 +58,7 @@
def _find_dep_default( self, name, type='package', installed_tool_dependencies=None ):
if type == 'set_environment' and installed_tool_dependencies:
- installed_tool_dependency = self._get_installed_dependency( installed_tool_dependencies, name, type, version=None )
+ installed_tool_dependency = self._get_installed_dependency( name, type, version=None, installed_tool_dependencies=installed_tool_dependencies )
if installed_tool_dependency:
script, path, version = self._get_set_environment_installed_dependency_script_path( installed_tool_dependency, name )
if script and path:
@@ -77,16 +77,15 @@
return None, real_path, real_version
return INDETERMINATE_DEPENDENCY
- def _get_installed_dependency( self, installed_tool_dependencies, name, type, version=None ):
- if installed_tool_dependencies:
- for installed_tool_dependency in installed_tool_dependencies:
- name_and_type_equal = installed_tool_dependency.name == name and installed_tool_dependency.type == type
- if version:
- if name_and_type_equal and installed_tool_dependency.version == version:
- return installed_tool_dependency
- else:
- if name_and_type_equal:
- return installed_tool_dependency
+ def _get_installed_dependency( self, name, type, version=None, **kwds ):
+ for installed_tool_dependency in kwds.get("installed_tool_dependencies", []):
+ name_and_type_equal = installed_tool_dependency.name == name and installed_tool_dependency.type == type
+ if version:
+ if name_and_type_equal and installed_tool_dependency.version == version:
+ return installed_tool_dependency
+ else:
+ if name_and_type_equal:
+ return installed_tool_dependency
return None
def _get_package_installed_dependency_path( self, installed_tool_dependency, base_path, name, version ):
https://bitbucket.org/galaxy/galaxy-central/commits/22f3aca8ad9c/
Changeset: 22f3aca8ad9c
User: jmchilton
Date: 2013-10-17 06:15:21
Summary: Tool Dependencies: Finish refactoring toward generic keyword arguments.
Affected #: 1 file
diff -r 8ca4fdce30526d58737d2b0d21af15cb750ddffc -r 22f3aca8ad9cb9a0e606a1c69cb4c211fe5cc133 lib/galaxy/tools/deps/__init__.py
--- a/lib/galaxy/tools/deps/__init__.py
+++ b/lib/galaxy/tools/deps/__init__.py
@@ -32,18 +32,18 @@
log.warn( "Path '%s' is not directory, ignoring", default_base_path )
self.default_base_path = os.path.abspath( default_base_path )
- def find_dep( self, name, version=None, type='package', installed_tool_dependencies=None ):
+ def find_dep( self, name, version=None, type='package', **kwds ):
"""
Attempt to find a dependency named `name` at version `version`. If version is None, return the "default" version as determined using a
symbolic link (if found). Returns a triple of: env_script, base_path, real_version
"""
if version is None:
- return self._find_dep_default( name, type=type, installed_tool_dependencies=installed_tool_dependencies )
+ return self._find_dep_default( name, type=type, **kwds )
else:
- return self._find_dep_versioned( name, version, type=type, installed_tool_dependencies=installed_tool_dependencies )
+ return self._find_dep_versioned( name, version, type=type, **kwds )
- def _find_dep_versioned( self, name, version, type='package', installed_tool_dependencies=None ):
- installed_tool_dependency = self._get_installed_dependency( name, type, version=version, installed_tool_dependencies=installed_tool_dependencies )
+ def _find_dep_versioned( self, name, version, type='package', **kwds ):
+ installed_tool_dependency = self._get_installed_dependency( name, type, version=version, **kwds )
base_path = self.default_base_path
if installed_tool_dependency:
path = self._get_package_installed_dependency_path( installed_tool_dependency, base_path, name, version )
@@ -56,9 +56,9 @@
return None, path, version
return INDETERMINATE_DEPENDENCY
- def _find_dep_default( self, name, type='package', installed_tool_dependencies=None ):
- if type == 'set_environment' and installed_tool_dependencies:
- installed_tool_dependency = self._get_installed_dependency( name, type, version=None, installed_tool_dependencies=installed_tool_dependencies )
+ def _find_dep_default( self, name, type='package', **kwds ):
+ if type == 'set_environment' and kwds.get('installed_tool_dependencies', None):
+ installed_tool_dependency = self._get_installed_dependency( name, type, version=None, **kwds )
if installed_tool_dependency:
script, path, version = self._get_set_environment_installed_dependency_script_path( installed_tool_dependency, name )
if script and path:
https://bitbucket.org/galaxy/galaxy-central/commits/dd2b36c4fc66/
Changeset: dd2b36c4fc66
User: jmchilton
Date: 2013-10-17 06:15:21
Summary: Tool Dependencies: Introduce the concept of a dependency resolver, DependencyManager will check each in turn.
Affected #: 1 file
diff -r 22f3aca8ad9cb9a0e606a1c69cb4c211fe5cc133 -r dd2b36c4fc6682e76cceef10de2ae3154cc107c6 lib/galaxy/tools/deps/__init__.py
--- a/lib/galaxy/tools/deps/__init__.py
+++ b/lib/galaxy/tools/deps/__init__.py
@@ -31,8 +31,29 @@
if not os.path.isdir( default_base_path ):
log.warn( "Path '%s' is not directory, ignoring", default_base_path )
self.default_base_path = os.path.abspath( default_base_path )
+ self.dependency_resolvers = [ GalaxyPackageDependencyResolver(self) ]
+
def find_dep( self, name, version=None, type='package', **kwds ):
+ for resolver in self.dependency_resolvers:
+ dependency = resolver.resolve( name, version, type, **kwds )
+ if dependency != INDETERMINATE_DEPENDENCY:
+ return dependency
+ return INDETERMINATE_DEPENDENCY
+
+
+class DependencyResolver(object):
+
+ def resolve( self, name, version, type, **kwds ):
+ raise NotImplementedError()
+
+
+class GalaxyPackageDependencyResolver(DependencyResolver):
+
+ def __init__(self, dependency_manager):
+ self.default_base_path = dependency_manager.default_base_path
+
+ def resolve( self, name, version, type, **kwds ):
"""
Attempt to find a dependency named `name` at version `version`. If version is None, return the "default" version as determined using a
symbolic link (if found). Returns a triple of: env_script, base_path, real_version
https://bitbucket.org/galaxy/galaxy-central/commits/ae5dbc930415/
Changeset: ae5dbc930415
User: jmchilton
Date: 2013-10-17 06:15:21
Summary: Tool Dependencies: Refactor common code out of _find_dep_versioned and _find_dep_default.
Affected #: 1 file
diff -r dd2b36c4fc6682e76cceef10de2ae3154cc107c6 -r ae5dbc9304152e034ed5565f5243ab6b5c0cbe77 lib/galaxy/tools/deps/__init__.py
--- a/lib/galaxy/tools/deps/__init__.py
+++ b/lib/galaxy/tools/deps/__init__.py
@@ -70,12 +70,7 @@
path = self._get_package_installed_dependency_path( installed_tool_dependency, base_path, name, version )
else:
path = os.path.join( base_path, name, version )
- script = os.path.join( path, 'env.sh' )
- if os.path.exists( script ):
- return script, path, version
- elif os.path.exists( os.path.join( path, 'bin' ) ):
- return None, path, version
- return INDETERMINATE_DEPENDENCY
+ return self._galaxy_package_dep(path, version)
def _find_dep_default( self, name, type='package', **kwds ):
if type == 'set_environment' and kwds.get('installed_tool_dependencies', None):
@@ -89,13 +84,17 @@
path = os.path.join( base_path, name, 'default' )
if os.path.islink( path ):
real_path = os.path.realpath( path )
- real_bin = os.path.join( real_path, 'bin' )
real_version = os.path.basename( real_path )
- script = os.path.join( real_path, 'env.sh' )
- if os.path.exists( script ):
- return script, real_path, real_version
- elif os.path.exists( real_bin ):
- return None, real_path, real_version
+ return self._galaxy_package_dep(real_path, real_version)
+ else:
+ return INDETERMINATE_DEPENDENCY
+
+ def _galaxy_package_dep( self, path, version ):
+ script = os.path.join( path, 'env.sh' )
+ if os.path.exists( script ):
+ return script, path, version
+ elif os.path.exists( os.path.join( path, 'bin' ) ):
+ return None, path, version
return INDETERMINATE_DEPENDENCY
def _get_installed_dependency( self, name, type, version=None, **kwds ):
https://bitbucket.org/galaxy/galaxy-central/commits/8928cf0be809/
Changeset: 8928cf0be809
User: jmchilton
Date: 2013-10-17 06:15:21
Summary: Tool Dependencies: Split package resolvers into Tool Shed and traditional Galaxy versions.
Affected #: 1 file
diff -r ae5dbc9304152e034ed5565f5243ab6b5c0cbe77 -r 8928cf0be80994703689e89c96e2dbfa20a47319 lib/galaxy/tools/deps/__init__.py
--- a/lib/galaxy/tools/deps/__init__.py
+++ b/lib/galaxy/tools/deps/__init__.py
@@ -31,7 +31,10 @@
if not os.path.isdir( default_base_path ):
log.warn( "Path '%s' is not directory, ignoring", default_base_path )
self.default_base_path = os.path.abspath( default_base_path )
- self.dependency_resolvers = [ GalaxyPackageDependencyResolver(self) ]
+ self.dependency_resolvers = [
+ ToolShedPackageDependencyResolver(self),
+ GalaxyPackageDependencyResolver(self),
+ ]
def find_dep( self, name, version=None, type='package', **kwds ):
@@ -64,22 +67,11 @@
return self._find_dep_versioned( name, version, type=type, **kwds )
def _find_dep_versioned( self, name, version, type='package', **kwds ):
- installed_tool_dependency = self._get_installed_dependency( name, type, version=version, **kwds )
base_path = self.default_base_path
- if installed_tool_dependency:
- path = self._get_package_installed_dependency_path( installed_tool_dependency, base_path, name, version )
- else:
- path = os.path.join( base_path, name, version )
+ path = os.path.join( base_path, name, version )
return self._galaxy_package_dep(path, version)
def _find_dep_default( self, name, type='package', **kwds ):
- if type == 'set_environment' and kwds.get('installed_tool_dependencies', None):
- installed_tool_dependency = self._get_installed_dependency( name, type, version=None, **kwds )
- if installed_tool_dependency:
- script, path, version = self._get_set_environment_installed_dependency_script_path( installed_tool_dependency, name )
- if script and path:
- # Environment settings do not use versions.
- return script, path, None
base_path = self.default_base_path
path = os.path.join( base_path, name, 'default' )
if os.path.islink( path ):
@@ -97,6 +89,31 @@
return None, path, version
return INDETERMINATE_DEPENDENCY
+
+class ToolShedPackageDependencyResolver(GalaxyPackageDependencyResolver):
+
+ def __init__(self, dependency_manager):
+ super(ToolShedPackageDependencyResolver, self).__init__(dependency_manager)
+
+ def _find_dep_versioned( self, name, version, type='package', **kwds ):
+ installed_tool_dependency = self._get_installed_dependency( name, type, version=version, **kwds )
+ base_path = self.default_base_path
+ if installed_tool_dependency:
+ path = self._get_package_installed_dependency_path( installed_tool_dependency, base_path, name, version )
+ return self._galaxy_package_dep(path, version)
+ else:
+ return INDETERMINATE_DEPENDENCY
+
+ def _find_dep_default( self, name, type='package', **kwds ):
+ if type == 'set_environment' and kwds.get('installed_tool_dependencies', None):
+ installed_tool_dependency = self._get_installed_dependency( name, type, version=None, **kwds )
+ if installed_tool_dependency:
+ script, path, version = self._get_set_environment_installed_dependency_script_path( installed_tool_dependency, name )
+ if script and path:
+ # Environment settings do not use versions.
+ return script, path, None
+ return INDETERMINATE_DEPENDENCY
+
def _get_installed_dependency( self, name, type, version=None, **kwds ):
for installed_tool_dependency in kwds.get("installed_tool_dependencies", []):
name_and_type_equal = installed_tool_dependency.name == name and installed_tool_dependency.type == type
https://bitbucket.org/galaxy/galaxy-central/commits/0762bd90e85b/
Changeset: 0762bd90e85b
User: jmchilton
Date: 2013-10-17 06:15:21
Summary: Tool Dependencies: Implement XML configuration of tool dependency resolver.
This is largely modelled off of @natefoo's job_conf.xml. This is a minimally configurable XML layout so far, but will be dding more options.
Already can decide between whether to load tool shed installed packages first (default) or manually conifgured ones. The advantages of loading manually configured ones first is outlined here - http://dev.list.galaxyproject.org/Test-Toolshed-Biopython-package-dependenc….
Affected #: 4 files
diff -r 8928cf0be80994703689e89c96e2dbfa20a47319 -r 0762bd90e85b94e643458c22670de8455dc97f71 lib/galaxy/config.py
--- a/lib/galaxy/config.py
+++ b/lib/galaxy/config.py
@@ -114,6 +114,7 @@
self.collect_outputs_from = [ x.strip() for x in kwargs.get( 'collect_outputs_from', 'new_file_path,job_working_directory' ).lower().split(',') ]
self.template_path = resolve_path( kwargs.get( "template_path", "templates" ), self.root )
self.template_cache = resolve_path( kwargs.get( "template_cache_path", "database/compiled_templates" ), self.root )
+ self.dependency_resolvers_config_file = resolve_path( kwargs.get( 'dependency_resolvers_config_file', 'dependency_resolvers_conf.xml' ), self.root )
self.job_config_file = resolve_path( kwargs.get( 'job_config_file', 'job_conf.xml' ), self.root )
self.local_job_queue_workers = int( kwargs.get( "local_job_queue_workers", "5" ) )
self.cluster_job_queue_workers = int( kwargs.get( "cluster_job_queue_workers", "3" ) )
diff -r 8928cf0be80994703689e89c96e2dbfa20a47319 -r 0762bd90e85b94e643458c22670de8455dc97f71 lib/galaxy/tools/__init__.py
--- a/lib/galaxy/tools/__init__.py
+++ b/lib/galaxy/tools/__init__.py
@@ -696,7 +696,11 @@
def init_dependency_manager( self ):
if self.app.config.use_tool_dependencies:
- self.dependency_manager = DependencyManager( self.app.config.tool_dependency_dir )
+ dependency_manager_kwds = {
+ 'default_base_path': self.app.config.tool_dependency_dir,
+ 'conf_file': self.app.config.dependency_resolvers_config_file,
+ }
+ self.dependency_manager = DependencyManager( **dependency_manager_kwds )
else:
self.dependency_manager = None
diff -r 8928cf0be80994703689e89c96e2dbfa20a47319 -r 0762bd90e85b94e643458c22670de8455dc97f71 lib/galaxy/tools/deps/__init__.py
--- a/lib/galaxy/tools/deps/__init__.py
+++ b/lib/galaxy/tools/deps/__init__.py
@@ -7,6 +7,8 @@
import logging
log = logging.getLogger( __name__ )
+from galaxy.util import parse_xml
+
INDETERMINATE_DEPENDENCY = (None, None, None)
@@ -21,7 +23,7 @@
and should each contain a file 'env.sh' which can be sourced to make the
dependency available in the current shell environment.
"""
- def __init__( self, default_base_path ):
+ def __init__( self, default_base_path, conf_file=None ):
"""
Create a new dependency manager looking for packages under the paths listed
in `base_paths`. The default base path is app.config.tool_dependency_dir.
@@ -31,10 +33,7 @@
if not os.path.isdir( default_base_path ):
log.warn( "Path '%s' is not directory, ignoring", default_base_path )
self.default_base_path = os.path.abspath( default_base_path )
- self.dependency_resolvers = [
- ToolShedPackageDependencyResolver(self),
- GalaxyPackageDependencyResolver(self),
- ]
+ self.dependency_resolvers = self.__build_dependency_resolvers( conf_file )
def find_dep( self, name, version=None, type='package', **kwds ):
@@ -44,6 +43,32 @@
return dependency
return INDETERMINATE_DEPENDENCY
+ def __build_dependency_resolvers( self, conf_file ):
+ if not conf_file or not os.path.exists( conf_file ):
+ return self.__default_dependency_resolvers()
+ tree = parse_xml( conf_file )
+ return self.__parse_resolver_conf_xml( tree )
+
+ def __default_dependency_resolvers( self ):
+ return [
+ ToolShedPackageDependencyResolver(self),
+ GalaxyPackageDependencyResolver(self),
+ ]
+
+ def __parse_resolver_conf_xml(self, tree):
+ """
+
+ :param tree: Object representing the root ``<dependency_resolvers>`` object in the file.
+ :type tree: ``xml.etree.ElementTree.Element``
+ """
+ resolvers = []
+ resolvers_element = tree.getroot()
+ for resolver_element in resolvers_element.getchildren():
+ resolver_type = resolver_element.tag
+ resolver = RESOLVER_CLASSES[resolver_type](self)
+ resolvers.append(resolver)
+ return resolvers
+
class DependencyResolver(object):
@@ -147,3 +172,9 @@
script = os.path.join( path, 'env.sh' )
return script, path, None
return INDETERMINATE_DEPENDENCY
+
+
+RESOLVER_CLASSES = {
+ 'tool_shed_package': ToolShedPackageDependencyResolver,
+ 'galaxy_package': GalaxyPackageDependencyResolver,
+}
diff -r 8928cf0be80994703689e89c96e2dbfa20a47319 -r 0762bd90e85b94e643458c22670de8455dc97f71 test/unit/test_tool_deps.py
--- a/test/unit/test_tool_deps.py
+++ b/test/unit/test_tool_deps.py
@@ -139,3 +139,32 @@
yield base_path
finally:
rmtree(base_path)
+
+
+def test_parse():
+ with __parse_resolvers('''<dependency_resolvers>
+ <tool_shed_package />
+ <galaxy_package />
+</dependency_resolvers>
+''') as dependency_resolvers:
+ assert 'ToolShed' in dependency_resolvers[0].__class__.__name__
+ assert 'Galaxy' in dependency_resolvers[1].__class__.__name__
+
+ with __parse_resolvers('''<dependency_resolvers>
+ <galaxy_package />
+ <tool_shed_package />
+</dependency_resolvers>
+''') as dependency_resolvers:
+ assert 'Galaxy' in dependency_resolvers[0].__class__.__name__
+ assert 'ToolShed' in dependency_resolvers[1].__class__.__name__
+
+
+@contextmanager
+def __parse_resolvers(xml_content):
+ with __test_base_path() as base_path:
+ f = tempfile.NamedTemporaryFile()
+ f.write(xml_content)
+ f.flush()
+ dm = DependencyManager( default_base_path=base_path, conf_file=f.name )
+ yield dm.dependency_resolvers
+
https://bitbucket.org/galaxy/galaxy-central/commits/4711ced22745/
Changeset: 4711ced22745
User: jmchilton
Date: 2013-10-17 06:15:21
Summary: Tool Dependencies: Add ability to pass arguments to tool resolvers.
Use new functionlity to allow a much cleaner, more general solution to the problem outlined in Pull Request 220. Allow creation of a Galaxy package dependency resolver that always falls back to 'default' dependency. I think this should be the default last-ditch behavior if a specified version of a tag is not found, but I understand the desire for it not to be, hopefully making it optional in this fashion is an acceptable middle ground.
The original problem is essentially, tool shed installs require an exact version of package requirements to be used, but this very encumbering in rapidly evolving and/or non-toolshed use cases (e.g. my Galaxy-P production and development servers). It is tedius to keep different code bases, environments in such cases.
Affected #: 2 files
diff -r 0762bd90e85b94e643458c22670de8455dc97f71 -r 4711ced227459680997ffe448ea57abd65349fb2 lib/galaxy/tools/deps/__init__.py
--- a/lib/galaxy/tools/deps/__init__.py
+++ b/lib/galaxy/tools/deps/__init__.py
@@ -7,7 +7,7 @@
import logging
log = logging.getLogger( __name__ )
-from galaxy.util import parse_xml
+from galaxy.util import parse_xml, string_as_bool
INDETERMINATE_DEPENDENCY = (None, None, None)
@@ -65,7 +65,8 @@
resolvers_element = tree.getroot()
for resolver_element in resolvers_element.getchildren():
resolver_type = resolver_element.tag
- resolver = RESOLVER_CLASSES[resolver_type](self)
+ resolver_kwds = dict(resolver_element.items())
+ resolver = RESOLVER_CLASSES[resolver_type](self, **resolver_kwds)
resolvers.append(resolver)
return resolvers
@@ -78,15 +79,22 @@
class GalaxyPackageDependencyResolver(DependencyResolver):
- def __init__(self, dependency_manager):
+ def __init__(self, dependency_manager, **kwds):
self.default_base_path = dependency_manager.default_base_path
+ ## Galaxy tool shed requires explicit versions on XML elements,
+ ## this in inconvient for testing or Galaxy instances not utilizing
+ ## the tool shed so allow a fallback version of the Galaxy package
+ ## resolver that will just grab 'default' version of exact version
+ ## unavailable.
+ self.versionless = string_as_bool(kwds.get('versionless', "false"))
+
def resolve( self, name, version, type, **kwds ):
"""
Attempt to find a dependency named `name` at version `version`. If version is None, return the "default" version as determined using a
symbolic link (if found). Returns a triple of: env_script, base_path, real_version
"""
- if version is None:
+ if version is None or self.versionless:
return self._find_dep_default( name, type=type, **kwds )
else:
return self._find_dep_versioned( name, version, type=type, **kwds )
@@ -117,8 +125,8 @@
class ToolShedPackageDependencyResolver(GalaxyPackageDependencyResolver):
- def __init__(self, dependency_manager):
- super(ToolShedPackageDependencyResolver, self).__init__(dependency_manager)
+ def __init__(self, dependency_manager, **kwds):
+ super(ToolShedPackageDependencyResolver, self).__init__(dependency_manager, **kwds)
def _find_dep_versioned( self, name, version, type='package', **kwds ):
installed_tool_dependency = self._get_installed_dependency( name, type, version=version, **kwds )
diff -r 0762bd90e85b94e643458c22670de8455dc97f71 -r 4711ced227459680997ffe448ea57abd65349fb2 test/unit/test_tool_deps.py
--- a/test/unit/test_tool_deps.py
+++ b/test/unit/test_tool_deps.py
@@ -158,6 +158,15 @@
assert 'Galaxy' in dependency_resolvers[0].__class__.__name__
assert 'ToolShed' in dependency_resolvers[1].__class__.__name__
+ with __parse_resolvers('''<dependency_resolvers>
+ <galaxy_package />
+ <tool_shed_package />
+ <galaxy_package versionless="true" />
+</dependency_resolvers>
+''') as dependency_resolvers:
+ assert not dependency_resolvers[0].versionless
+ assert dependency_resolvers[2].versionless
+
@contextmanager
def __parse_resolvers(xml_content):
https://bitbucket.org/galaxy/galaxy-central/commits/73c22fa3ae10/
Changeset: 73c22fa3ae10
User: jmchilton
Date: 2013-10-17 06:15:21
Summary: Tool Dependencies: Allow overridding the base_path on 'galaxy_package' tool dependency resolvers.
There was some code for doing this earlier but it wasn't configurable in anyway. This version looks like it should work and is cleaner.
Affected #: 2 files
diff -r 4711ced227459680997ffe448ea57abd65349fb2 -r 73c22fa3ae109e14bd70128c6e00174beae45629 lib/galaxy/tools/deps/__init__.py
--- a/lib/galaxy/tools/deps/__init__.py
+++ b/lib/galaxy/tools/deps/__init__.py
@@ -80,14 +80,13 @@
class GalaxyPackageDependencyResolver(DependencyResolver):
def __init__(self, dependency_manager, **kwds):
- self.default_base_path = dependency_manager.default_base_path
-
## Galaxy tool shed requires explicit versions on XML elements,
## this in inconvient for testing or Galaxy instances not utilizing
## the tool shed so allow a fallback version of the Galaxy package
## resolver that will just grab 'default' version of exact version
## unavailable.
self.versionless = string_as_bool(kwds.get('versionless', "false"))
+ self.base_path = kwds.get('base_path', dependency_manager.default_base_path)
def resolve( self, name, version, type, **kwds ):
"""
@@ -100,12 +99,12 @@
return self._find_dep_versioned( name, version, type=type, **kwds )
def _find_dep_versioned( self, name, version, type='package', **kwds ):
- base_path = self.default_base_path
+ base_path = self.base_path
path = os.path.join( base_path, name, version )
return self._galaxy_package_dep(path, version)
def _find_dep_default( self, name, type='package', **kwds ):
- base_path = self.default_base_path
+ base_path = self.base_path
path = os.path.join( base_path, name, 'default' )
if os.path.islink( path ):
real_path = os.path.realpath( path )
@@ -130,7 +129,7 @@
def _find_dep_versioned( self, name, version, type='package', **kwds ):
installed_tool_dependency = self._get_installed_dependency( name, type, version=version, **kwds )
- base_path = self.default_base_path
+ base_path = self.base_path
if installed_tool_dependency:
path = self._get_package_installed_dependency_path( installed_tool_dependency, base_path, name, version )
return self._galaxy_package_dep(path, version)
@@ -169,7 +168,7 @@
def _get_set_environment_installed_dependency_script_path( self, installed_tool_dependency, name ):
tool_shed_repository = installed_tool_dependency.tool_shed_repository
- base_path = self.default_base_path
+ base_path = self.base_path
path = os.path.abspath( os.path.join( base_path,
'environment_settings',
name,
diff -r 4711ced227459680997ffe448ea57abd65349fb2 -r 73c22fa3ae109e14bd70128c6e00174beae45629 test/unit/test_tool_deps.py
--- a/test/unit/test_tool_deps.py
+++ b/test/unit/test_tool_deps.py
@@ -167,6 +167,19 @@
assert not dependency_resolvers[0].versionless
assert dependency_resolvers[2].versionless
+ with __parse_resolvers('''<dependency_resolvers>
+ <galaxy_package />
+ <tool_shed_package />
+ <galaxy_package base_path="/opt/galaxy/legacy/"/>
+</dependency_resolvers>
+''') as dependency_resolvers:
+ # Unspecified base_paths are both default_base_paths
+ assert dependency_resolvers[0].base_path == dependency_resolvers[1].base_path
+ # Can specify custom base path...
+ assert dependency_resolvers[2].base_path == "/opt/galaxy/legacy/"
+ # ... that is different from the default.
+ assert dependency_resolvers[0].base_path != dependency_resolvers[2].base_path
+
@contextmanager
def __parse_resolvers(xml_content):
https://bitbucket.org/galaxy/galaxy-central/commits/36d6cca02ff2/
Changeset: 36d6cca02ff2
User: jmchilton
Date: 2013-10-17 06:15:21
Summary: Tool Dependencies: Modify the tool resolver syntax slightly.
<galaxy_packages /> looks better than <galaxy_package /> since it is a whole class of possible dependencies the corresponding resolver could resolve.
Affected #: 2 files
diff -r 73c22fa3ae109e14bd70128c6e00174beae45629 -r 36d6cca02ff256889ea3a65b256b098431d35192 lib/galaxy/tools/deps/__init__.py
--- a/lib/galaxy/tools/deps/__init__.py
+++ b/lib/galaxy/tools/deps/__init__.py
@@ -182,6 +182,6 @@
RESOLVER_CLASSES = {
- 'tool_shed_package': ToolShedPackageDependencyResolver,
- 'galaxy_package': GalaxyPackageDependencyResolver,
+ 'tool_shed_packages': ToolShedPackageDependencyResolver,
+ 'galaxy_packages': GalaxyPackageDependencyResolver,
}
diff -r 73c22fa3ae109e14bd70128c6e00174beae45629 -r 36d6cca02ff256889ea3a65b256b098431d35192 test/unit/test_tool_deps.py
--- a/test/unit/test_tool_deps.py
+++ b/test/unit/test_tool_deps.py
@@ -143,34 +143,34 @@
def test_parse():
with __parse_resolvers('''<dependency_resolvers>
- <tool_shed_package />
- <galaxy_package />
+ <tool_shed_packages />
+ <galaxy_packages /></dependency_resolvers>
''') as dependency_resolvers:
assert 'ToolShed' in dependency_resolvers[0].__class__.__name__
assert 'Galaxy' in dependency_resolvers[1].__class__.__name__
with __parse_resolvers('''<dependency_resolvers>
- <galaxy_package />
- <tool_shed_package />
+ <galaxy_packages />
+ <tool_shed_packages /></dependency_resolvers>
''') as dependency_resolvers:
assert 'Galaxy' in dependency_resolvers[0].__class__.__name__
assert 'ToolShed' in dependency_resolvers[1].__class__.__name__
with __parse_resolvers('''<dependency_resolvers>
- <galaxy_package />
- <tool_shed_package />
- <galaxy_package versionless="true" />
+ <galaxy_packages />
+ <tool_shed_packages />
+ <galaxy_packages versionless="true" /></dependency_resolvers>
''') as dependency_resolvers:
assert not dependency_resolvers[0].versionless
assert dependency_resolvers[2].versionless
with __parse_resolvers('''<dependency_resolvers>
- <galaxy_package />
- <tool_shed_package />
- <galaxy_package base_path="/opt/galaxy/legacy/"/>
+ <galaxy_packages />
+ <tool_shed_packages />
+ <galaxy_packages base_path="/opt/galaxy/legacy/"/></dependency_resolvers>
''') as dependency_resolvers:
# Unspecified base_paths are both default_base_paths
https://bitbucket.org/galaxy/galaxy-central/commits/b7380008b339/
Changeset: b7380008b339
User: jmchilton
Date: 2013-10-17 06:15:21
Summary: Tool Dependencies: Refactor actual tool resolvers out into the own module.
Cleans up lib/galaxy/tools/deps/__init__.py significantly and makes inter-dependencies more clear.
Affected #: 4 files
diff -r 36d6cca02ff256889ea3a65b256b098431d35192 -r b7380008b339394d0f2957f778d2b80f00e72727 lib/galaxy/tools/deps/__init__.py
--- a/lib/galaxy/tools/deps/__init__.py
+++ b/lib/galaxy/tools/deps/__init__.py
@@ -7,9 +7,10 @@
import logging
log = logging.getLogger( __name__ )
-from galaxy.util import parse_xml, string_as_bool
-
-INDETERMINATE_DEPENDENCY = (None, None, None)
+from galaxy.util import parse_xml
+from .resolvers import INDETERMINATE_DEPENDENCY
+from .resolvers.galaxy_packages import GalaxyPackageDependencyResolver
+from .resolvers.tool_shed_packages import ToolShedPackageDependencyResolver
class DependencyManager( object ):
@@ -70,117 +71,6 @@
resolvers.append(resolver)
return resolvers
-
-class DependencyResolver(object):
-
- def resolve( self, name, version, type, **kwds ):
- raise NotImplementedError()
-
-
-class GalaxyPackageDependencyResolver(DependencyResolver):
-
- def __init__(self, dependency_manager, **kwds):
- ## Galaxy tool shed requires explicit versions on XML elements,
- ## this in inconvient for testing or Galaxy instances not utilizing
- ## the tool shed so allow a fallback version of the Galaxy package
- ## resolver that will just grab 'default' version of exact version
- ## unavailable.
- self.versionless = string_as_bool(kwds.get('versionless', "false"))
- self.base_path = kwds.get('base_path', dependency_manager.default_base_path)
-
- def resolve( self, name, version, type, **kwds ):
- """
- Attempt to find a dependency named `name` at version `version`. If version is None, return the "default" version as determined using a
- symbolic link (if found). Returns a triple of: env_script, base_path, real_version
- """
- if version is None or self.versionless:
- return self._find_dep_default( name, type=type, **kwds )
- else:
- return self._find_dep_versioned( name, version, type=type, **kwds )
-
- def _find_dep_versioned( self, name, version, type='package', **kwds ):
- base_path = self.base_path
- path = os.path.join( base_path, name, version )
- return self._galaxy_package_dep(path, version)
-
- def _find_dep_default( self, name, type='package', **kwds ):
- base_path = self.base_path
- path = os.path.join( base_path, name, 'default' )
- if os.path.islink( path ):
- real_path = os.path.realpath( path )
- real_version = os.path.basename( real_path )
- return self._galaxy_package_dep(real_path, real_version)
- else:
- return INDETERMINATE_DEPENDENCY
-
- def _galaxy_package_dep( self, path, version ):
- script = os.path.join( path, 'env.sh' )
- if os.path.exists( script ):
- return script, path, version
- elif os.path.exists( os.path.join( path, 'bin' ) ):
- return None, path, version
- return INDETERMINATE_DEPENDENCY
-
-
-class ToolShedPackageDependencyResolver(GalaxyPackageDependencyResolver):
-
- def __init__(self, dependency_manager, **kwds):
- super(ToolShedPackageDependencyResolver, self).__init__(dependency_manager, **kwds)
-
- def _find_dep_versioned( self, name, version, type='package', **kwds ):
- installed_tool_dependency = self._get_installed_dependency( name, type, version=version, **kwds )
- base_path = self.base_path
- if installed_tool_dependency:
- path = self._get_package_installed_dependency_path( installed_tool_dependency, base_path, name, version )
- return self._galaxy_package_dep(path, version)
- else:
- return INDETERMINATE_DEPENDENCY
-
- def _find_dep_default( self, name, type='package', **kwds ):
- if type == 'set_environment' and kwds.get('installed_tool_dependencies', None):
- installed_tool_dependency = self._get_installed_dependency( name, type, version=None, **kwds )
- if installed_tool_dependency:
- script, path, version = self._get_set_environment_installed_dependency_script_path( installed_tool_dependency, name )
- if script and path:
- # Environment settings do not use versions.
- return script, path, None
- return INDETERMINATE_DEPENDENCY
-
- def _get_installed_dependency( self, name, type, version=None, **kwds ):
- for installed_tool_dependency in kwds.get("installed_tool_dependencies", []):
- name_and_type_equal = installed_tool_dependency.name == name and installed_tool_dependency.type == type
- if version:
- if name_and_type_equal and installed_tool_dependency.version == version:
- return installed_tool_dependency
- else:
- if name_and_type_equal:
- return installed_tool_dependency
- return None
-
- def _get_package_installed_dependency_path( self, installed_tool_dependency, base_path, name, version ):
- tool_shed_repository = installed_tool_dependency.tool_shed_repository
- return os.path.join( base_path,
- name,
- version,
- tool_shed_repository.owner,
- tool_shed_repository.name,
- tool_shed_repository.installed_changeset_revision )
-
- def _get_set_environment_installed_dependency_script_path( self, installed_tool_dependency, name ):
- tool_shed_repository = installed_tool_dependency.tool_shed_repository
- base_path = self.base_path
- path = os.path.abspath( os.path.join( base_path,
- 'environment_settings',
- name,
- tool_shed_repository.owner,
- tool_shed_repository.name,
- tool_shed_repository.installed_changeset_revision ) )
- if os.path.exists( path ):
- script = os.path.join( path, 'env.sh' )
- return script, path, None
- return INDETERMINATE_DEPENDENCY
-
-
RESOLVER_CLASSES = {
'tool_shed_packages': ToolShedPackageDependencyResolver,
'galaxy_packages': GalaxyPackageDependencyResolver,
diff -r 36d6cca02ff256889ea3a65b256b098431d35192 -r b7380008b339394d0f2957f778d2b80f00e72727 lib/galaxy/tools/deps/resolvers/__init__.py
--- /dev/null
+++ b/lib/galaxy/tools/deps/resolvers/__init__.py
@@ -0,0 +1,18 @@
+from abc import ABCMeta, abstractmethod
+
+INDETERMINATE_DEPENDENCY = (None, None, None)
+
+
+class DependencyResolver(object):
+ __metaclass__ = ABCMeta
+
+ @abstractmethod
+ def resolve( self, name, version, type, **kwds ):
+ """
+ Given inputs describing dependency in the abstract, yield tuple of
+ (script, bin, version). Here script is the env.sh file to source
+ before running a job, if that is not found the bin directory will be
+ appended to the path (if it is not None). Finally, version is the
+ resolved tool dependency version (which may differ from requested
+ version for instance if the request version is 'default'.)
+ """
diff -r 36d6cca02ff256889ea3a65b256b098431d35192 -r b7380008b339394d0f2957f778d2b80f00e72727 lib/galaxy/tools/deps/resolvers/galaxy_packages.py
--- /dev/null
+++ b/lib/galaxy/tools/deps/resolvers/galaxy_packages.py
@@ -0,0 +1,51 @@
+from os.path import join, islink, realpath, basename, exists, abspath
+
+from ..resolvers import DependencyResolver, INDETERMINATE_DEPENDENCY
+from galaxy.util import string_as_bool
+
+
+class GalaxyPackageDependencyResolver(DependencyResolver):
+
+ def __init__(self, dependency_manager, **kwds):
+ ## Galaxy tool shed requires explicit versions on XML elements,
+ ## this in inconvient for testing or Galaxy instances not utilizing
+ ## the tool shed so allow a fallback version of the Galaxy package
+ ## resolver that will just grab 'default' version of exact version
+ ## unavailable.
+ self.versionless = string_as_bool(kwds.get('versionless', "false"))
+ self.base_path = abspath( kwds.get('base_path', dependency_manager.default_base_path) )
+
+ def resolve( self, name, version, type, **kwds ):
+ """
+ Attempt to find a dependency named `name` at version `version`. If version is None, return the "default" version as determined using a
+ symbolic link (if found). Returns a triple of: env_script, base_path, real_version
+ """
+ if version is None or self.versionless:
+ return self._find_dep_default( name, type=type, **kwds )
+ else:
+ return self._find_dep_versioned( name, version, type=type, **kwds )
+
+ def _find_dep_versioned( self, name, version, type='package', **kwds ):
+ base_path = self.base_path
+ path = join( base_path, name, version )
+ return self._galaxy_package_dep(path, version)
+
+ def _find_dep_default( self, name, type='package', **kwds ):
+ base_path = self.base_path
+ path = join( base_path, name, 'default' )
+ if islink( path ):
+ real_path = realpath( path )
+ real_version = basename( real_path )
+ return self._galaxy_package_dep(real_path, real_version)
+ else:
+ return INDETERMINATE_DEPENDENCY
+
+ def _galaxy_package_dep( self, path, version ):
+ script = join( path, 'env.sh' )
+ if exists( script ):
+ return script, path, version
+ elif exists( join( path, 'bin' ) ):
+ return None, path, version
+ return INDETERMINATE_DEPENDENCY
+
+__all__ = [GalaxyPackageDependencyResolver]
diff -r 36d6cca02ff256889ea3a65b256b098431d35192 -r b7380008b339394d0f2957f778d2b80f00e72727 lib/galaxy/tools/deps/resolvers/tool_shed_packages.py
--- /dev/null
+++ b/lib/galaxy/tools/deps/resolvers/tool_shed_packages.py
@@ -0,0 +1,66 @@
+from os.path import abspath, join, exists
+
+from .galaxy_packages import GalaxyPackageDependencyResolver
+from ..resolvers import INDETERMINATE_DEPENDENCY
+
+
+class ToolShedPackageDependencyResolver(GalaxyPackageDependencyResolver):
+
+ def __init__(self, dependency_manager, **kwds):
+ super(ToolShedPackageDependencyResolver, self).__init__(dependency_manager, **kwds)
+
+ def _find_dep_versioned( self, name, version, type='package', **kwds ):
+ installed_tool_dependency = self._get_installed_dependency( name, type, version=version, **kwds )
+ base_path = self.base_path
+ if installed_tool_dependency:
+ path = self._get_package_installed_dependency_path( installed_tool_dependency, base_path, name, version )
+ return self._galaxy_package_dep(path, version)
+ else:
+ return INDETERMINATE_DEPENDENCY
+
+ def _find_dep_default( self, name, type='package', **kwds ):
+ if type == 'set_environment' and kwds.get('installed_tool_dependencies', None):
+ installed_tool_dependency = self._get_installed_dependency( name, type, version=None, **kwds )
+ if installed_tool_dependency:
+ script, path, version = self._get_set_environment_installed_dependency_script_path( installed_tool_dependency, name )
+ if script and path:
+ # Environment settings do not use versions.
+ return script, path, None
+ return INDETERMINATE_DEPENDENCY
+
+ def _get_installed_dependency( self, name, type, version=None, **kwds ):
+ installed_tool_dependencies = kwds.get("installed_tool_dependencies", [])
+ for installed_tool_dependency in (installed_tool_dependencies or []):
+ name_and_type_equal = installed_tool_dependency.name == name and installed_tool_dependency.type == type
+ if version:
+ if name_and_type_equal and installed_tool_dependency.version == version:
+ return installed_tool_dependency
+ else:
+ if name_and_type_equal:
+ return installed_tool_dependency
+ return None
+
+ def _get_package_installed_dependency_path( self, installed_tool_dependency, base_path, name, version ):
+ tool_shed_repository = installed_tool_dependency.tool_shed_repository
+ return join( base_path,
+ name,
+ version,
+ tool_shed_repository.owner,
+ tool_shed_repository.name,
+ tool_shed_repository.installed_changeset_revision )
+
+ def _get_set_environment_installed_dependency_script_path( self, installed_tool_dependency, name ):
+ tool_shed_repository = installed_tool_dependency.tool_shed_repository
+ base_path = self.base_path
+ path = abspath( join( base_path,
+ 'environment_settings',
+ name,
+ tool_shed_repository.owner,
+ tool_shed_repository.name,
+ tool_shed_repository.installed_changeset_revision ) )
+ if exists( path ):
+ script = join( path, 'env.sh' )
+ return script, path, None
+ return INDETERMINATE_DEPENDENCY
+
+__all__ = [ToolShedPackageDependencyResolver]
https://bitbucket.org/galaxy/galaxy-central/commits/21b9e99a73fe/
Changeset: 21b9e99a73fe
User: jmchilton
Date: 2013-10-17 06:15:21
Summary: Tool Dependencies: Introduce higher-level abstraction describing dependency.
Passing the tuple (script, path, version) around is insufficient to represent other kinds of potential dependency load commands (namely module loads), this generalization allows more expressivity. I believe this abstraction also moves some logic out of the tool class that is better encapsulated in the deps module and submodules.
Affected #: 5 files
diff -r b7380008b339394d0f2957f778d2b80f00e72727 -r 21b9e99a73fe96721498dda9751680741f78e946 lib/galaxy/tools/__init__.py
--- a/lib/galaxy/tools/__init__.py
+++ b/lib/galaxy/tools/__init__.py
@@ -40,7 +40,7 @@
from galaxy.tools.actions import DefaultToolAction
from galaxy.tools.actions.data_source import DataSourceToolAction
from galaxy.tools.actions.data_manager import DataManagerToolAction
-from galaxy.tools.deps import DependencyManager
+from galaxy.tools.deps import DependencyManager, INDETERMINATE_DEPENDENCY
from galaxy.tools.parameters import check_param, params_from_strings, params_to_strings
from galaxy.tools.parameters.basic import (BaseURLToolParameter,
DataToolParameter, HiddenToolParameter, LibraryDatasetToolParameter,
@@ -2681,20 +2681,17 @@
installed_tool_dependencies = None
for requirement in self.requirements:
log.debug( "Building dependency shell command for dependency '%s'", requirement.name )
- script_file = None
- base_path = None
- version = None
+ dependency = INDETERMINATE_DEPENDENCY
if requirement.type in [ 'package', 'set_environment' ]:
- script_file, base_path, version = self.app.toolbox.dependency_manager.find_dep( name=requirement.name,
- version=requirement.version,
- type=requirement.type,
- installed_tool_dependencies=installed_tool_dependencies )
- if script_file is None and base_path is None:
+ dependency = self.app.toolbox.dependency_manager.find_dep( name=requirement.name,
+ version=requirement.version,
+ type=requirement.type,
+ installed_tool_dependencies=installed_tool_dependencies )
+ dependency_commands = dependency.shell_commands( requirement )
+ if not dependency_commands:
log.warn( "Failed to resolve dependency on '%s', ignoring", requirement.name )
- elif requirement.type == 'package' and script_file is None:
- commands.append( 'PACKAGE_BASE=%s; export PACKAGE_BASE; PATH="%s/bin:$PATH"; export PATH' % ( base_path, base_path ) )
else:
- commands.append( 'PACKAGE_BASE=%s; export PACKAGE_BASE; . %s' % ( base_path, script_file ) )
+ commands.append(dependency_commands)
return commands
def build_redirect_url_params( self, param_dict ):
"""
diff -r b7380008b339394d0f2957f778d2b80f00e72727 -r 21b9e99a73fe96721498dda9751680741f78e946 lib/galaxy/tools/deps/resolvers/__init__.py
--- a/lib/galaxy/tools/deps/resolvers/__init__.py
+++ b/lib/galaxy/tools/deps/resolvers/__init__.py
@@ -1,9 +1,7 @@
from abc import ABCMeta, abstractmethod
-INDETERMINATE_DEPENDENCY = (None, None, None)
-
-class DependencyResolver(object):
+class DependencyResolver( object ):
__metaclass__ = ABCMeta
@abstractmethod
@@ -16,3 +14,21 @@
resolved tool dependency version (which may differ from requested
version for instance if the request version is 'default'.)
"""
+
+
+class Dependency( object ):
+ __metaclass__ = ABCMeta
+
+ @abstractmethod
+ def shell_commands( self, requirement ):
+ """
+ Return shell commands to enable this dependency.
+ """
+
+
+class NullDependency( Dependency ):
+
+ def shell_commands( self, requirement ):
+ return None
+
+INDETERMINATE_DEPENDENCY = NullDependency()
diff -r b7380008b339394d0f2957f778d2b80f00e72727 -r 21b9e99a73fe96721498dda9751680741f78e946 lib/galaxy/tools/deps/resolvers/galaxy_packages.py
--- a/lib/galaxy/tools/deps/resolvers/galaxy_packages.py
+++ b/lib/galaxy/tools/deps/resolvers/galaxy_packages.py
@@ -1,8 +1,11 @@
from os.path import join, islink, realpath, basename, exists, abspath
-from ..resolvers import DependencyResolver, INDETERMINATE_DEPENDENCY
+from ..resolvers import DependencyResolver, INDETERMINATE_DEPENDENCY, Dependency
from galaxy.util import string_as_bool
+import logging
+log = logging.getLogger( __name__ )
+
class GalaxyPackageDependencyResolver(DependencyResolver):
@@ -43,9 +46,28 @@
def _galaxy_package_dep( self, path, version ):
script = join( path, 'env.sh' )
if exists( script ):
- return script, path, version
+ return GalaxyPackageDependency(script, path, version)
elif exists( join( path, 'bin' ) ):
- return None, path, version
+ return GalaxyPackageDependency(None, path, version)
return INDETERMINATE_DEPENDENCY
-__all__ = [GalaxyPackageDependencyResolver]
+
+class GalaxyPackageDependency(Dependency):
+
+ def __init__( self, script, path, version ):
+ self.script = script
+ self.path = path
+ self.version = version
+
+ def shell_commands( self, requirement ):
+ base_path = self.path
+ if self.script is None and base_path is None:
+ log.warn( "Failed to resolve dependency on '%s', ignoring", requirement.name )
+ commands = None
+ elif requirement.type == 'package' and self.script is None:
+ commands = 'PACKAGE_BASE=%s; export PACKAGE_BASE; PATH="%s/bin:$PATH"; export PATH' % ( base_path, base_path )
+ else:
+ commands = 'PACKAGE_BASE=%s; export PACKAGE_BASE; . %s' % ( base_path, self.script )
+ return commands
+
+__all__ = [GalaxyPackageDependencyResolver, GalaxyPackageDependency]
diff -r b7380008b339394d0f2957f778d2b80f00e72727 -r 21b9e99a73fe96721498dda9751680741f78e946 lib/galaxy/tools/deps/resolvers/tool_shed_packages.py
--- a/lib/galaxy/tools/deps/resolvers/tool_shed_packages.py
+++ b/lib/galaxy/tools/deps/resolvers/tool_shed_packages.py
@@ -1,6 +1,6 @@
from os.path import abspath, join, exists
-from .galaxy_packages import GalaxyPackageDependencyResolver
+from .galaxy_packages import GalaxyPackageDependencyResolver, GalaxyPackageDependency
from ..resolvers import INDETERMINATE_DEPENDENCY
@@ -22,10 +22,10 @@
if type == 'set_environment' and kwds.get('installed_tool_dependencies', None):
installed_tool_dependency = self._get_installed_dependency( name, type, version=None, **kwds )
if installed_tool_dependency:
- script, path, version = self._get_set_environment_installed_dependency_script_path( installed_tool_dependency, name )
- if script and path:
+ dependency = self._get_set_environment_installed_dependency_script_path( installed_tool_dependency, name )
+ if dependency.script and dependency.path:
# Environment settings do not use versions.
- return script, path, None
+ return GalaxyPackageDependency(dependency.script, dependency.path, None)
return INDETERMINATE_DEPENDENCY
def _get_installed_dependency( self, name, type, version=None, **kwds ):
@@ -60,7 +60,7 @@
tool_shed_repository.installed_changeset_revision ) )
if exists( path ):
script = join( path, 'env.sh' )
- return script, path, None
+ return GalaxyPackageDependency(script, path, None)
return INDETERMINATE_DEPENDENCY
__all__ = [ToolShedPackageDependencyResolver]
diff -r b7380008b339394d0f2957f778d2b80f00e72727 -r 21b9e99a73fe96721498dda9751680741f78e946 test/unit/test_tool_deps.py
--- a/test/unit/test_tool_deps.py
+++ b/test/unit/test_tool_deps.py
@@ -2,9 +2,11 @@
import os.path
from os import makedirs, symlink
from shutil import rmtree
-from galaxy.tools.deps import DependencyManager
+from galaxy.tools.deps import DependencyManager, INDETERMINATE_DEPENDENCY
+from galaxy.tools.deps.resolvers.galaxy_packages import GalaxyPackageDependency
from galaxy.util.bunch import Bunch
from contextlib import contextmanager
+from subprocess import Popen, PIPE
def test_tool_dependencies():
@@ -24,24 +26,23 @@
__touch( os.path.join( p, "env.sh" ) )
dm = DependencyManager( default_base_path=base_path )
- d1_script, d1_path, d1_version = dm.find_dep( "dep1", "1.0" )
- assert d1_script == os.path.join( base_path, 'dep1', '1.0', 'env.sh' )
- assert d1_path == os.path.join( base_path, 'dep1', '1.0' )
- assert d1_version == "1.0"
- d2_script, d2_path, d2_version = dm.find_dep( "dep1", "2.0" )
- assert d2_script == None
- assert d2_path == os.path.join( base_path, 'dep1', '2.0' )
- assert d2_version == "2.0"
+ dependency = dm.find_dep( "dep1", "1.0" )
+ assert dependency.script == os.path.join( base_path, 'dep1', '1.0', 'env.sh' )
+ assert dependency.path == os.path.join( base_path, 'dep1', '1.0' )
+ assert dependency.version == "1.0"
+ dependency = dm.find_dep( "dep1", "2.0" )
+ assert dependency.script == None
+ assert dependency.path == os.path.join( base_path, 'dep1', '2.0' )
+ assert dependency.version == "2.0"
## Test default versions
symlink( os.path.join( base_path, 'dep1', '2.0'), os.path.join( base_path, 'dep1', 'default' ) )
- default_script, default_path, default_version = dm.find_dep( "dep1", None )
- assert default_version == "2.0"
+ dependency = dm.find_dep( "dep1", None )
+ assert dependency.version == "2.0"
## Test default will not be fallen back upon by default
- default_script, default_path, default_version = dm.find_dep( "dep1", "2.1" )
- assert default_script == None
- assert default_version == None
+ dependency = dm.find_dep( "dep1", "2.1" )
+ assert dependency == INDETERMINATE_DEPENDENCY
TEST_REPO_USER = "devteam"
@@ -56,9 +57,9 @@
dm = DependencyManager( default_base_path=base_path )
env_settings_dir = os.path.join(base_path, "environment_settings", TEST_REPO_NAME, TEST_REPO_USER, TEST_REPO_NAME, TEST_REPO_CHANGESET)
os.makedirs(env_settings_dir)
- d1_script, d1_path, d1_version = dm.find_dep( TEST_REPO_NAME, version=None, type='set_environment', installed_tool_dependencies=[test_repo] )
- assert d1_version == None
- assert d1_script == os.path.join(env_settings_dir, "env.sh"), d1_script
+ dependency = dm.find_dep( TEST_REPO_NAME, version=None, type='set_environment', installed_tool_dependencies=[test_repo] )
+ assert dependency.version == None
+ assert dependency.script == os.path.join(env_settings_dir, "env.sh")
def test_toolshed_package_requirements():
@@ -66,9 +67,9 @@
test_repo = __build_test_repo('package', version=TEST_VERSION)
dm = DependencyManager( default_base_path=base_path )
package_dir = __build_ts_test_package(base_path)
- d1_script, d1_path, d1_version = dm.find_dep( TEST_REPO_NAME, version=TEST_VERSION, type='package', installed_tool_dependencies=[test_repo] )
- assert d1_version == TEST_VERSION, d1_version
- assert d1_script == os.path.join(package_dir, "env.sh"), d1_script
+ dependency = dm.find_dep( TEST_REPO_NAME, version=TEST_VERSION, type='package', installed_tool_dependencies=[test_repo] )
+ assert dependency.version == TEST_VERSION
+ assert dependency.script == os.path.join(package_dir, "env.sh")
def test_toolshed_tools_fallback_on_manual_dependencies():
@@ -76,9 +77,9 @@
dm = DependencyManager( default_base_path=base_path )
test_repo = __build_test_repo('package', version=TEST_VERSION)
env_path = __setup_galaxy_package_dep(base_path, "dep1", "1.0")
- d1_script, d1_path, d1_version = dm.find_dep( "dep1", version="1.0", type='package', installed_tool_dependencies=[test_repo] )
- assert d1_version == "1.0"
- assert d1_script == env_path
+ dependency = dm.find_dep( "dep1", version="1.0", type='package', installed_tool_dependencies=[test_repo] )
+ assert dependency.version == "1.0"
+ assert dependency.script == env_path
def test_toolshed_greater_precendence():
@@ -88,9 +89,9 @@
ts_package_dir = __build_ts_test_package(base_path)
gx_env_path = __setup_galaxy_package_dep(base_path, TEST_REPO_NAME, TEST_VERSION)
ts_env_path = os.path.join(ts_package_dir, "env.sh")
- d1_script, d1_path, d1_version = dm.find_dep( TEST_REPO_NAME, version=TEST_VERSION, type='package', installed_tool_dependencies=[test_repo] )
- assert d1_script != gx_env_path # Not the galaxy path, it should be the tool shed path used.
- assert d1_script == ts_env_path
+ dependency = dm.find_dep( TEST_REPO_NAME, version=TEST_VERSION, type='package', installed_tool_dependencies=[test_repo] )
+ assert dependency.script != gx_env_path # Not the galaxy path, it should be the tool shed path used.
+ assert dependency.script == ts_env_path
def __build_ts_test_package(base_path, script_contents=''):
@@ -99,6 +100,18 @@
return package_dir
+def test_galaxy_dependency_object_script():
+ with __test_base_path() as base_path:
+ ## Create env.sh file that just exports variable FOO and verify it
+ ## shell_commands export it correctly.
+ env_path = __setup_galaxy_package_dep(base_path, TEST_REPO_NAME, TEST_VERSION, "export FOO=\"bar\"")
+ dependency = GalaxyPackageDependency(env_path, os.path.dirname(env_path), TEST_VERSION)
+ command = ["bash", "-c", "%s; echo \"$FOO\"" % dependency.shell_commands(Bunch(type="package"))]
+ process = Popen(command, stdout=PIPE)
+ output = process.communicate()[0].strip()
+ assert output == 'bar'
+
+
def __setup_galaxy_package_dep(base_path, name, version, contents=""):
dep_directory = os.path.join( base_path, name, version )
env_path = os.path.join( dep_directory, "env.sh" )
https://bitbucket.org/galaxy/galaxy-central/commits/31f6c565ca43/
Changeset: 31f6c565ca43
User: jmchilton
Date: 2013-10-17 06:15:21
Summary: Tool Dependencies: Outline of integrating environment modules.
This is completely untested and undoubtedly needs some fixes. The point is largely to outline how it could be done and let someone with interest in using this go in and flush out the implementation.
Note: This is a community contributed feature and support from the core Galaxy team for it will be minimal.
Affected #: 3 files
diff -r 21b9e99a73fe96721498dda9751680741f78e946 -r 31f6c565ca43d21b12cd78ffe72509517cd9cfea lib/galaxy/tools/deps/__init__.py
--- a/lib/galaxy/tools/deps/__init__.py
+++ b/lib/galaxy/tools/deps/__init__.py
@@ -11,6 +11,7 @@
from .resolvers import INDETERMINATE_DEPENDENCY
from .resolvers.galaxy_packages import GalaxyPackageDependencyResolver
from .resolvers.tool_shed_packages import ToolShedPackageDependencyResolver
+from .resolvers.modules import ModuleDependencyResolver
class DependencyManager( object ):
@@ -74,4 +75,5 @@
RESOLVER_CLASSES = {
'tool_shed_packages': ToolShedPackageDependencyResolver,
'galaxy_packages': GalaxyPackageDependencyResolver,
+ 'modules': ModuleDependencyResolver,
}
diff -r 21b9e99a73fe96721498dda9751680741f78e946 -r 31f6c565ca43d21b12cd78ffe72509517cd9cfea lib/galaxy/tools/deps/resolvers/modules.py
--- /dev/null
+++ b/lib/galaxy/tools/deps/resolvers/modules.py
@@ -0,0 +1,137 @@
+"""
+This file contains the outline of an implementation to load environment modules
+(http://modules.sourceforge.net/).
+
+This is a community contributed feature and the core Galaxy team does utilize
+it, hence support for it will be minimal. The Galaxy team eagerly welcomes
+community contribution and maintenance however.
+"""
+from os.path import exists, isdir, join
+from StringIO import StringIO
+from subprocess import Popen, PIPE
+
+from ..resolvers import DependencyResolver, INDETERMINATE_DEPENDENCY, Dependency
+from galaxy.util import string_as_bool
+
+import logging
+log = logging.getLogger( __name__ )
+
+
+DEFAULT_MODULE_COMMAND = 'module'
+DEFAULT_MODULE_DIRECTORY = '/usr/share/modules/modulefiles'
+DEFAULT_INDICATOR = '(default)'
+DEFAULT_MODULE_PREFETCH = "true"
+UNKNOWN_FIND_BY_MESSAGE = "ModuleDependencyResolver does not know how to find modules by [%s], find_by should be one of %s"
+
+
+class ModuleDependencyResolver(DependencyResolver):
+
+ def __init__(self, dependency_manager, **kwds):
+ self.module_command = kwds.get('command', DEFAULT_MODULE_COMMAND)
+ self.versionless = string_as_bool(kwds.get('versionless', 'false'))
+ find_by = kwds.get('find_by', 'avail')
+ prefetch = string_as_bool(kwds.get('prefetch', DEFAULT_MODULE_PREFETCH))
+ if find_by == 'directory':
+ directory = kwds.get('directory', DEFAULT_MODULE_DIRECTORY)
+ self.module_checker = DirectoryModuleChecker(self, directory, prefetch)
+ elif find_by == 'avail':
+ self.module_checker = AvailModuleChecker(self, prefetch)
+ else:
+ raise Exception(UNKNOWN_FIND_BY_MESSAGE % (find_by, ["avail", "directory"]))
+
+ def resolve( self, name, version, type, **kwds ):
+ if type != "package":
+ return INDETERMINATE_DEPENDENCY
+
+ if self.versionless:
+ version = None
+
+ if self.__has_module(name, version):
+ return ModuleDependency(self, name, version)
+
+ return INDETERMINATE_DEPENDENCY
+
+ def __has_module(self, name, version):
+ return self.module_checker.has_module(name, version)
+
+
+class DirectoryModuleChecker(object):
+
+ def __init__(self, module_dependency_resolver, directory, prefetch):
+ self.module_dependency_resolver = module_dependency_resolver
+ self.directory = directory
+ if prefetch:
+ log.warn("Created module dependency resolver with prefetch enabled, but directory module checker does not support this.")
+ pass
+
+ def has_module(self, module, version):
+ module_directory = join(self.directory, module)
+ has_module_directory = isdir( join( self.directory, module ) )
+ if not version:
+ has_module = has_module_directory
+ else:
+ modulefile = join( module_directory, version )
+ has_modulefile = exists( modulefile )
+ has_module = has_module_directory and has_modulefile
+ return has_module
+
+
+class AvailModuleChecker(object):
+
+ def __init__(self, module_dependency_resolver, prefetch):
+ self.module_dependency_resolver = module_dependency_resolver
+ if prefetch:
+ prefetched_modules = []
+ for module in self.__modules():
+ prefetched_modules.append(module)
+ else:
+ prefetched_modules = None
+ self.prefetched_modules = prefetched_modules
+
+ def has_module(self, module, version):
+ module_generator = self.prefetched_modules
+ if module_generator is None:
+ module_generator = self.__modules()
+
+ for module_name, module_version in module_generator:
+ names_match = module == module_name
+ module_match = names_match and (version == None or module_version == version)
+ if module_match:
+ return True
+ return False
+
+ def __modules(self):
+ raw_output = self.__module_avail_ouptut()
+ for line in StringIO(raw_output):
+ line = line and line.strip()
+ if not line or line.startswith("-"):
+ continue
+
+ line_modules = line.split()
+ for module in line_modules:
+ if module.endswith(DEFAULT_INDICATOR):
+ module = module[0:-len(DEFAULT_INDICATOR)].strip()
+ module_parts = module.split('/')
+ module_version = None
+ if len(module_parts) == 2:
+ module_version = module_parts[1]
+ module_name = module_parts[0]
+ yield module_name, module_version
+
+ def __module_avail_ouptut(self):
+ avail_command = '%s avail' % self.module_dependency_resolver.module_command
+ return Popen([avail_command], shell=True, stderr=PIPE).communicate()[1]
+
+
+class ModuleDependency(Dependency):
+
+ def __init__(self, module_dependency_resolver, module_name, module_version=None):
+ self.module_dependency_resolver = module_dependency_resolver
+ self.module_name = module_name
+ self.module_version = module_version
+
+ def shell_commands(self, requirement):
+ command = '%s load %s' % (self.module_dependency_resolver.module_command, self.module_name)
+ if self.module_version:
+ command = '%s/%s' % self.module_version
+ return command
diff -r 21b9e99a73fe96721498dda9751680741f78e946 -r 31f6c565ca43d21b12cd78ffe72509517cd9cfea test/unit/test_tool_deps.py
--- a/test/unit/test_tool_deps.py
+++ b/test/unit/test_tool_deps.py
@@ -1,9 +1,11 @@
import tempfile
import os.path
-from os import makedirs, symlink
+from stat import S_IXUSR
+from os import makedirs, symlink, stat, chmod
from shutil import rmtree
from galaxy.tools.deps import DependencyManager, INDETERMINATE_DEPENDENCY
from galaxy.tools.deps.resolvers.galaxy_packages import GalaxyPackageDependency
+from galaxy.tools.deps.resolvers.modules import ModuleDependencyResolver
from galaxy.util.bunch import Bunch
from contextlib import contextmanager
from subprocess import Popen, PIPE
@@ -100,6 +102,50 @@
return package_dir
+def test_module_dependency_resolver():
+ with __test_base_path() as temp_directory:
+ module_script = os.path.join(temp_directory, "module")
+ with open(module_script, 'w') as f:
+ f.write('''#!/bin/sh
+cat %s/example_output 1>&2;
+''' % temp_directory)
+ with open(os.path.join(temp_directory, "example_output"), "w") as f:
+ # Subset of module avail from MSI cluster.
+ f.write('''
+-------------------------- /soft/modules/modulefiles ---------------------------
+JAGS/3.2.0-gcc45
+JAGS/3.3.0-gcc4.7.2
+ProbABEL/0.1-3
+ProbABEL/0.1-9e
+R/2.12.2
+R/2.13.1
+R/2.14.1
+R/2.15.0
+R/2.15.1
+R/3.0.1(default)
+abokia-blast/2.0.2-130524/ompi_intel
+abokia-blast/2.0.2-130630/ompi_intel
+
+--------------------------- /soft/intel/modulefiles ----------------------------
+advisor/2013/update1 intel/11.1.075 mkl/10.2.1.017
+advisor/2013/update2 intel/11.1.080 mkl/10.2.5.035
+advisor/2013/update3 intel/12.0 mkl/10.2.7.041
+''')
+ st = os.stat(module_script)
+ chmod(module_script, st.st_mode | S_IXUSR)
+ resolver = ModuleDependencyResolver(None, command=module_script)
+ module = resolver.resolve( name="R", version=None, type="package" )
+ assert module.module_name == "R"
+ assert module.module_version == None
+
+ module = resolver.resolve( name="R", version="3.0.1", type="package" )
+ assert module.module_name == "R"
+ assert module.module_version == "3.0.1"
+
+ module = resolver.resolve( name="R", version="3.0.4", type="package" )
+ assert module == INDETERMINATE_DEPENDENCY
+
+
def test_galaxy_dependency_object_script():
with __test_base_path() as base_path:
## Create env.sh file that just exports variable FOO and verify it
@@ -189,11 +235,30 @@
# Unspecified base_paths are both default_base_paths
assert dependency_resolvers[0].base_path == dependency_resolvers[1].base_path
# Can specify custom base path...
- assert dependency_resolvers[2].base_path == "/opt/galaxy/legacy/"
+ assert dependency_resolvers[2].base_path == "/opt/galaxy/legacy"
# ... that is different from the default.
assert dependency_resolvers[0].base_path != dependency_resolvers[2].base_path
+def test_config_module_defaults():
+ with __parse_resolvers('''<dependency_resolvers>
+ <modules />
+</dependency_resolvers>
+''') as dependency_resolvers:
+ module_resolver = dependency_resolvers[0]
+ assert module_resolver.module_command == "module"
+ assert module_resolver.module_checker.__class__.__name__ == "AvailModuleChecker"
+
+
+def test_config_module_directory_searcher():
+ with __parse_resolvers('''<dependency_resolvers>
+ <modules find_by="directory" directory="/opt/Modules/modulefiles" />
+</dependency_resolvers>
+''') as dependency_resolvers:
+ module_resolver = dependency_resolvers[0]
+ assert module_resolver.module_checker.directory == "/opt/Modules/modulefiles"
+
+
@contextmanager
def __parse_resolvers(xml_content):
with __test_base_path() as base_path:
https://bitbucket.org/galaxy/galaxy-central/commits/54b9d1a2b1a9/
Changeset: 54b9d1a2b1a9
User: jmchilton
Date: 2013-10-17 06:15:21
Summary: Tool Dependencies: Dynamically load tool dependency resolver plugins.
This largely mirrors the code for the dynamic job runner so created common util code meant to be shared between dynamic job destinations and dynamic dependency resolves out into galaxy.util.submodules.
TODO: Update job mapper to use this.
Affected #: 5 files
diff -r 31f6c565ca43d21b12cd78ffe72509517cd9cfea -r 54b9d1a2b1a971274c2d02f6856c4cd08b9edf88 lib/galaxy/tools/deps/__init__.py
--- a/lib/galaxy/tools/deps/__init__.py
+++ b/lib/galaxy/tools/deps/__init__.py
@@ -8,10 +8,11 @@
log = logging.getLogger( __name__ )
from galaxy.util import parse_xml
+
from .resolvers import INDETERMINATE_DEPENDENCY
from .resolvers.galaxy_packages import GalaxyPackageDependencyResolver
from .resolvers.tool_shed_packages import ToolShedPackageDependencyResolver
-from .resolvers.modules import ModuleDependencyResolver
+from galaxy.util.submodules import submodules
class DependencyManager( object ):
@@ -35,6 +36,7 @@
if not os.path.isdir( default_base_path ):
log.warn( "Path '%s' is not directory, ignoring", default_base_path )
self.default_base_path = os.path.abspath( default_base_path )
+ self.resolver_classes = self.__resolvers_dict()
self.dependency_resolvers = self.__build_dependency_resolvers( conf_file )
@@ -68,12 +70,19 @@
for resolver_element in resolvers_element.getchildren():
resolver_type = resolver_element.tag
resolver_kwds = dict(resolver_element.items())
- resolver = RESOLVER_CLASSES[resolver_type](self, **resolver_kwds)
+ resolver = self.resolver_classes[resolver_type](self, **resolver_kwds)
resolvers.append(resolver)
return resolvers
-RESOLVER_CLASSES = {
- 'tool_shed_packages': ToolShedPackageDependencyResolver,
- 'galaxy_packages': GalaxyPackageDependencyResolver,
- 'modules': ModuleDependencyResolver,
-}
+ def __resolvers_dict( self ):
+ resolver_dict = {}
+ for resolver_module in self.__resolver_modules():
+ for clazz in resolver_module.__all__:
+ resolver_type = getattr(clazz, 'resolver_type', None)
+ if resolver_type:
+ resolver_dict[resolver_type] = clazz
+ return resolver_dict
+
+ def __resolver_modules( self ):
+ import galaxy.tools.deps.resolvers
+ return submodules( galaxy.tools.deps.resolvers )
diff -r 31f6c565ca43d21b12cd78ffe72509517cd9cfea -r 54b9d1a2b1a971274c2d02f6856c4cd08b9edf88 lib/galaxy/tools/deps/resolvers/galaxy_packages.py
--- a/lib/galaxy/tools/deps/resolvers/galaxy_packages.py
+++ b/lib/galaxy/tools/deps/resolvers/galaxy_packages.py
@@ -8,6 +8,7 @@
class GalaxyPackageDependencyResolver(DependencyResolver):
+ resolver_type = "galaxy_packages"
def __init__(self, dependency_manager, **kwds):
## Galaxy tool shed requires explicit versions on XML elements,
diff -r 31f6c565ca43d21b12cd78ffe72509517cd9cfea -r 54b9d1a2b1a971274c2d02f6856c4cd08b9edf88 lib/galaxy/tools/deps/resolvers/modules.py
--- a/lib/galaxy/tools/deps/resolvers/modules.py
+++ b/lib/galaxy/tools/deps/resolvers/modules.py
@@ -25,6 +25,7 @@
class ModuleDependencyResolver(DependencyResolver):
+ resolver_type = "modules"
def __init__(self, dependency_manager, **kwds):
self.module_command = kwds.get('command', DEFAULT_MODULE_COMMAND)
@@ -135,3 +136,5 @@
if self.module_version:
command = '%s/%s' % self.module_version
return command
+
+__all__ = [ModuleDependencyResolver]
diff -r 31f6c565ca43d21b12cd78ffe72509517cd9cfea -r 54b9d1a2b1a971274c2d02f6856c4cd08b9edf88 lib/galaxy/tools/deps/resolvers/tool_shed_packages.py
--- a/lib/galaxy/tools/deps/resolvers/tool_shed_packages.py
+++ b/lib/galaxy/tools/deps/resolvers/tool_shed_packages.py
@@ -5,6 +5,7 @@
class ToolShedPackageDependencyResolver(GalaxyPackageDependencyResolver):
+ resolver_type = "tool_shed_packages"
def __init__(self, dependency_manager, **kwds):
super(ToolShedPackageDependencyResolver, self).__init__(dependency_manager, **kwds)
diff -r 31f6c565ca43d21b12cd78ffe72509517cd9cfea -r 54b9d1a2b1a971274c2d02f6856c4cd08b9edf88 lib/galaxy/util/submodules.py
--- /dev/null
+++ b/lib/galaxy/util/submodules.py
@@ -0,0 +1,30 @@
+from os import listdir
+import logging
+log = logging.getLogger( __name__ )
+
+
+def submodules( module ):
+ unsorted_submodule_names = __submodule_names( module )
+ submodule_names = sorted( unsorted_submodule_names, reverse=True )
+ submodules = []
+ for submodule_name in submodule_names:
+ full_submodule = "%s.%s" % ( module.__name__, submodule_name )
+ try:
+ __import__( full_submodule )
+ submodule = getattr( module, submodule_name )
+ submodules.append( submodule )
+ except BaseException, exception:
+ exception_str = str( exception )
+ message = "%s dynamic module could not be loaded: %s" % ( full_submodule, exception_str )
+ log.debug( message )
+ return submodules
+
+
+def __submodule_names( module ):
+ module_dir = module.__path__[ 0 ]
+ names = []
+ for fname in listdir( module_dir ):
+ if not( fname.startswith( "_" ) ) and fname.endswith( ".py" ):
+ submodule_name = fname[ :-len( ".py" ) ]
+ names.append( submodule_name )
+ return names
https://bitbucket.org/galaxy/galaxy-central/commits/6f54a6d1fc23/
Changeset: 6f54a6d1fc23
User: jmchilton
Date: 2013-10-17 06:15:21
Summary: Tool Dependencies: Move requirement/dependency logic out of galaxy.tools and into galaxy.tools.deps, test.
Moved ToolRequirement out into its own module along with parsing. Also moved all requirements handling logic into DependencyManager. Now the Tool class knows nothing about the internals or requirements or how to process dependencies. There is also greater unit test coverage for these concepts.
Affected #: 4 files
diff -r 54b9d1a2b1a971274c2d02f6856c4cd08b9edf88 -r 6f54a6d1fc23b16e11005f166fe84e73754f6cae lib/galaxy/tools/__init__.py
--- a/lib/galaxy/tools/__init__.py
+++ b/lib/galaxy/tools/__init__.py
@@ -41,6 +41,7 @@
from galaxy.tools.actions.data_source import DataSourceToolAction
from galaxy.tools.actions.data_manager import DataManagerToolAction
from galaxy.tools.deps import DependencyManager, INDETERMINATE_DEPENDENCY
+from galaxy.tools.deps.requirements import parse_requirements_from_xml
from galaxy.tools.parameters import check_param, params_from_strings, params_to_strings
from galaxy.tools.parameters.basic import (BaseURLToolParameter,
DataToolParameter, HiddenToolParameter, LibraryDatasetToolParameter,
@@ -938,15 +939,6 @@
def __iter__( self ):
return iter( ( self.format, self.metadata_source, self.parent ) )
-class ToolRequirement( object ):
- """
- Represents an external requirement that must be available for the tool to run (for example, a program, package, or library).
- Requirements can optionally assert a specific version.
- """
- def __init__( self, name=None, type=None, version=None ):
- self.name = name
- self.type = type
- self.version = version
class Tool( object, Dictifiable ):
"""
@@ -1236,10 +1228,7 @@
else:
self.tests = None
# Requirements (dependencies)
- self.requirements = []
- requirements_elem = root.find( "requirements" )
- if requirements_elem:
- self.parse_requirements( requirements_elem )
+ self.requirements = parse_requirements_from_xml( root )
# Determine if this tool can be used in workflows
self.is_workflow_compatible = self.check_workflow_compatible(root)
# Trackster configuration.
@@ -1810,17 +1799,6 @@
for name in param.get_dependencies():
context[ name ].refresh_on_change = True
return param
- def parse_requirements( self, requirements_elem ):
- """
- Parse each requirement from the <requirements> element and add to
- self.requirements
- """
- for requirement_elem in requirements_elem.findall( 'requirement' ):
- name = xml_text( requirement_elem )
- type = requirement_elem.get( "type", "package" )
- version = requirement_elem.get( "version", None )
- requirement = ToolRequirement( name=name, type=type, version=version )
- self.requirements.append( requirement )
def populate_tool_shed_info( self ):
if self.repository_id is not None and 'ToolShedRepository' in self.app.model:
@@ -2672,27 +2650,16 @@
command_line = command_line.replace(executable, abs_executable, 1)
command_line = self.interpreter + " " + command_line
return command_line
+
def build_dependency_shell_commands( self ):
"""Return a list of commands to be run to populate the current environment to include this tools requirements."""
- commands = []
if self.tool_shed_repository:
installed_tool_dependencies = self.tool_shed_repository.installed_tool_dependencies
else:
installed_tool_dependencies = None
- for requirement in self.requirements:
- log.debug( "Building dependency shell command for dependency '%s'", requirement.name )
- dependency = INDETERMINATE_DEPENDENCY
- if requirement.type in [ 'package', 'set_environment' ]:
- dependency = self.app.toolbox.dependency_manager.find_dep( name=requirement.name,
- version=requirement.version,
- type=requirement.type,
- installed_tool_dependencies=installed_tool_dependencies )
- dependency_commands = dependency.shell_commands( requirement )
- if not dependency_commands:
- log.warn( "Failed to resolve dependency on '%s', ignoring", requirement.name )
- else:
- commands.append(dependency_commands)
- return commands
+ return self.app.toolbox.dependency_manager.dependency_shell_commands( self.requirements,
+ installed_tool_dependencies=installed_tool_dependencies )
+
def build_redirect_url_params( self, param_dict ):
"""
Substitute parameter values into self.redirect_url_params
diff -r 54b9d1a2b1a971274c2d02f6856c4cd08b9edf88 -r 6f54a6d1fc23b16e11005f166fe84e73754f6cae lib/galaxy/tools/deps/__init__.py
--- a/lib/galaxy/tools/deps/__init__.py
+++ b/lib/galaxy/tools/deps/__init__.py
@@ -39,6 +39,22 @@
self.resolver_classes = self.__resolvers_dict()
self.dependency_resolvers = self.__build_dependency_resolvers( conf_file )
+ def dependency_shell_commands( self, requirements, **kwds ):
+ commands = []
+ for requirement in requirements:
+ log.debug( "Building dependency shell command for dependency '%s'", requirement.name )
+ dependency = INDETERMINATE_DEPENDENCY
+ if requirement.type in [ 'package', 'set_environment' ]:
+ dependency = self.find_dep( name=requirement.name,
+ version=requirement.version,
+ type=requirement.type,
+ **kwds )
+ dependency_commands = dependency.shell_commands( requirement )
+ if not dependency_commands:
+ log.warn( "Failed to resolve dependency on '%s', ignoring", requirement.name )
+ else:
+ commands.append( dependency_commands )
+ return commands
def find_dep( self, name, version=None, type='package', **kwds ):
for resolver in self.dependency_resolvers:
diff -r 54b9d1a2b1a971274c2d02f6856c4cd08b9edf88 -r 6f54a6d1fc23b16e11005f166fe84e73754f6cae lib/galaxy/tools/deps/requirements.py
--- /dev/null
+++ b/lib/galaxy/tools/deps/requirements.py
@@ -0,0 +1,57 @@
+from galaxy.util import xml_text
+
+DEFAULT_REQUIREMENT_TYPE = "package"
+DEFAULT_REQUIREMENT_VERSION = None
+
+
+class ToolRequirement( object ):
+ """
+ Represents an external requirement that must be available for the tool to
+ run (for example, a program, package, or library). Requirements can
+ optionally assert a specific version.
+ """
+ def __init__( self, name=None, type=None, version=None ):
+ self.name = name
+ self.type = type
+ self.version = version
+
+
+def parse_requirements_from_xml( xml_root ):
+ """
+
+ >>> from galaxy.util import parse_xml
+ >>> from elementtree import ElementTree
+ >>> def load_requirements( contents ):
+ ... contents_document = '''<tool><requirements>%s</requirements></tool>'''
+ ... root = ElementTree.fromstring( contents_document % contents )
+ ... return parse_requirements_from_xml( root )
+ >>> reqs = load_requirements('''<requirement>bwa</requirement>''')
+ >>> reqs[0].name
+ 'bwa'
+ >>> reqs[0].version is None
+ True
+ >>> reqs[0].type
+ 'package'
+ >>> reqs = load_requirements('''<requirement type="binary" version="1.3.3">cufflinks</requirement>''')
+ >>> reqs[0].name
+ 'cufflinks'
+ >>> reqs[0].version
+ '1.3.3'
+ >>> reqs[0].type
+ 'binary'
+ """
+ requirements_elem = xml_root.find( "requirements" )
+
+ requirement_elems = []
+ if requirements_elem:
+ requirement_elems = requirements_elem.findall( 'requirement' )
+
+ requirements = []
+ for requirement_elem in requirement_elems:
+ name = xml_text( requirement_elem )
+ type = requirement_elem.get( "type", DEFAULT_REQUIREMENT_TYPE )
+ version = requirement_elem.get( "version", DEFAULT_REQUIREMENT_VERSION )
+ requirement = ToolRequirement( name=name, type=type, version=version )
+ requirements.append( requirement )
+
+ return requirements
diff -r 54b9d1a2b1a971274c2d02f6856c4cd08b9edf88 -r 6f54a6d1fc23b16e11005f166fe84e73754f6cae test/unit/test_tool_deps.py
--- a/test/unit/test_tool_deps.py
+++ b/test/unit/test_tool_deps.py
@@ -152,10 +152,25 @@
## shell_commands export it correctly.
env_path = __setup_galaxy_package_dep(base_path, TEST_REPO_NAME, TEST_VERSION, "export FOO=\"bar\"")
dependency = GalaxyPackageDependency(env_path, os.path.dirname(env_path), TEST_VERSION)
- command = ["bash", "-c", "%s; echo \"$FOO\"" % dependency.shell_commands(Bunch(type="package"))]
- process = Popen(command, stdout=PIPE)
- output = process.communicate()[0].strip()
- assert output == 'bar'
+ __assert_foo_exported( dependency.shell_commands( Bunch( type="package" ) ) )
+
+
+def test_shell_commands_built():
+ ## Test that dependency manager builds valid shell commands for a list of
+ ## requirements.
+ with __test_base_path() as base_path:
+ dm = DependencyManager( default_base_path=base_path )
+ __setup_galaxy_package_dep( base_path, TEST_REPO_NAME, TEST_VERSION, contents="export FOO=\"bar\"" )
+ mock_requirements = [ Bunch(type="package", version=TEST_VERSION, name=TEST_REPO_NAME ) ]
+ commands = dm.dependency_shell_commands( mock_requirements )
+ __assert_foo_exported( commands )
+
+
+def __assert_foo_exported( commands ):
+ command = ["bash", "-c", "%s; echo \"$FOO\"" % "".join(commands)]
+ process = Popen(command, stdout=PIPE)
+ output = process.communicate()[0].strip()
+ assert output == 'bar'
def __setup_galaxy_package_dep(base_path, name, version, contents=""):
https://bitbucket.org/galaxy/galaxy-central/commits/dc9c0e065be0/
Changeset: dc9c0e065be0
User: sjguest
Date: 2013-10-17 06:15:21
Summary: Fixed ModuleDependencyResolver, versionless fallback, and modulepath
Affected #: 1 file
diff -r 6f54a6d1fc23b16e11005f166fe84e73754f6cae -r dc9c0e065be01f8d16b35eec7b53098fdfa0fd05 lib/galaxy/tools/deps/resolvers/modules.py
--- a/lib/galaxy/tools/deps/resolvers/modules.py
+++ b/lib/galaxy/tools/deps/resolvers/modules.py
@@ -6,6 +6,7 @@
it, hence support for it will be minimal. The Galaxy team eagerly welcomes
community contribution and maintenance however.
"""
+from os import environ
from os.path import exists, isdir, join
from StringIO import StringIO
from subprocess import Popen, PIPE
@@ -17,8 +18,12 @@
log = logging.getLogger( __name__ )
-DEFAULT_MODULE_COMMAND = 'module'
-DEFAULT_MODULE_DIRECTORY = '/usr/share/modules/modulefiles'
+if environ.has_key('MODULEPATH'):
+ DEFAULT_MODULE_PATH = environ['MODULEPATH']
+elif environ.has_key('MODULESHOME'):
+ DEFAULT_MODULE_PATH = join(environ['MODULESHOME'], 'modulefiles')
+else:
+ DEFAULT_MODULE_PATH = '/usr/share/modules/modulefiles'
DEFAULT_INDICATOR = '(default)'
DEFAULT_MODULE_PREFETCH = "true"
UNKNOWN_FIND_BY_MESSAGE = "ModuleDependencyResolver does not know how to find modules by [%s], find_by should be one of %s"
@@ -28,13 +33,12 @@
resolver_type = "modules"
def __init__(self, dependency_manager, **kwds):
- self.module_command = kwds.get('command', DEFAULT_MODULE_COMMAND)
self.versionless = string_as_bool(kwds.get('versionless', 'false'))
find_by = kwds.get('find_by', 'avail')
prefetch = string_as_bool(kwds.get('prefetch', DEFAULT_MODULE_PREFETCH))
if find_by == 'directory':
- directory = kwds.get('directory', DEFAULT_MODULE_DIRECTORY)
- self.module_checker = DirectoryModuleChecker(self, directory, prefetch)
+ modulepath = kwds.get('modulepath', DEFAULT_MODULE_PATH)
+ self.module_checker = DirectoryModuleChecker(self, modulepath, prefetch)
elif find_by == 'avail':
self.module_checker = AvailModuleChecker(self, prefetch)
else:
@@ -44,11 +48,10 @@
if type != "package":
return INDETERMINATE_DEPENDENCY
- if self.versionless:
- version = None
-
if self.__has_module(name, version):
return ModuleDependency(self, name, version)
+ elif self.versionless and self.__has_module(name, None):
+ return ModuleDependency(self, name, None)
return INDETERMINATE_DEPENDENCY
@@ -58,23 +61,26 @@
class DirectoryModuleChecker(object):
- def __init__(self, module_dependency_resolver, directory, prefetch):
+ def __init__(self, module_dependency_resolver, modulepath, prefetch):
self.module_dependency_resolver = module_dependency_resolver
- self.directory = directory
+ self.directories = modulepath.split(':')
if prefetch:
log.warn("Created module dependency resolver with prefetch enabled, but directory module checker does not support this.")
pass
def has_module(self, module, version):
- module_directory = join(self.directory, module)
- has_module_directory = isdir( join( self.directory, module ) )
- if not version:
- has_module = has_module_directory
- else:
- modulefile = join( module_directory, version )
- has_modulefile = exists( modulefile )
- has_module = has_module_directory and has_modulefile
- return has_module
+ for directory in self.directories:
+ module_directory = join(directory, module)
+ has_module_directory = isdir( module_directory )
+ if not version:
+ has_module = has_module_directory or exists(module_directory) # could be a bare modulefile
+ else:
+ modulefile = join( module_directory, version )
+ has_modulefile = exists( modulefile )
+ has_module = has_module_directory and has_modulefile
+ if has_module:
+ return True
+ return False
class AvailModuleChecker(object):
@@ -102,7 +108,7 @@
return False
def __modules(self):
- raw_output = self.__module_avail_ouptut()
+ raw_output = self.__module_avail_output()
for line in StringIO(raw_output):
line = line and line.strip()
if not line or line.startswith("-"):
@@ -119,10 +125,9 @@
module_name = module_parts[0]
yield module_name, module_version
- def __module_avail_ouptut(self):
- avail_command = '%s avail' % self.module_dependency_resolver.module_command
- return Popen([avail_command], shell=True, stderr=PIPE).communicate()[1]
-
+ def __module_avail_output(self):
+ avail_command = ['modulecmd', 'sh', 'avail']
+ return Popen(avail_command, stderr=PIPE).communicate()[1]
class ModuleDependency(Dependency):
@@ -132,9 +137,10 @@
self.module_version = module_version
def shell_commands(self, requirement):
- command = '%s load %s' % (self.module_dependency_resolver.module_command, self.module_name)
+ module_to_load = self.module_name
if self.module_version:
- command = '%s/%s' % self.module_version
+ module_to_load = '%s/%s' % (self.module_name, self.module_version)
+ command = 'eval `modulecmd sh load %s`' % (module_to_load)
return command
__all__ = [ModuleDependencyResolver]
https://bitbucket.org/galaxy/galaxy-central/commits/93286e74e18b/
Changeset: 93286e74e18b
User: jmchilton
Date: 2013-10-17 06:15:21
Summary: Small touch ups to module code enhancements.
Affected #: 1 file
diff -r dc9c0e065be01f8d16b35eec7b53098fdfa0fd05 -r 93286e74e18b7dcb84aa11a326f08d5b9398a90d lib/galaxy/tools/deps/resolvers/modules.py
--- a/lib/galaxy/tools/deps/resolvers/modules.py
+++ b/lib/galaxy/tools/deps/resolvers/modules.py
@@ -6,7 +6,7 @@
it, hence support for it will be minimal. The Galaxy team eagerly welcomes
community contribution and maintenance however.
"""
-from os import environ
+from os import environ, pathsep
from os.path import exists, isdir, join
from StringIO import StringIO
from subprocess import Popen, PIPE
@@ -18,12 +18,7 @@
log = logging.getLogger( __name__ )
-if environ.has_key('MODULEPATH'):
- DEFAULT_MODULE_PATH = environ['MODULEPATH']
-elif environ.has_key('MODULESHOME'):
- DEFAULT_MODULE_PATH = join(environ['MODULESHOME'], 'modulefiles')
-else:
- DEFAULT_MODULE_PATH = '/usr/share/modules/modulefiles'
+DEFAULT_MODULE_PATH = '/usr/share/modules/modulefiles'
DEFAULT_INDICATOR = '(default)'
DEFAULT_MODULE_PREFETCH = "true"
UNKNOWN_FIND_BY_MESSAGE = "ModuleDependencyResolver does not know how to find modules by [%s], find_by should be one of %s"
@@ -44,6 +39,15 @@
else:
raise Exception(UNKNOWN_FIND_BY_MESSAGE % (find_by, ["avail", "directory"]))
+ def __default_modulespath(self):
+ if 'MODULEPATH' in environ:
+ module_path = environ['MODULEPATH']
+ elif 'MODULESHOME' in environ:
+ module_path = join(environ['MODULESHOME'], 'modulefiles')
+ else:
+ module_path = DEFAULT_MODULE_PATH
+ return module_path
+
def resolve( self, name, version, type, **kwds ):
if type != "package":
return INDETERMINATE_DEPENDENCY
@@ -63,24 +67,25 @@
def __init__(self, module_dependency_resolver, modulepath, prefetch):
self.module_dependency_resolver = module_dependency_resolver
- self.directories = modulepath.split(':')
+ self.directories = modulepath.split(pathsep)
if prefetch:
log.warn("Created module dependency resolver with prefetch enabled, but directory module checker does not support this.")
pass
def has_module(self, module, version):
+ has_module = False
for directory in self.directories:
module_directory = join(directory, module)
has_module_directory = isdir( module_directory )
if not version:
- has_module = has_module_directory or exists(module_directory) # could be a bare modulefile
+ has_module = has_module_directory or exists(module_directory) # could be a bare modulefile
else:
modulefile = join( module_directory, version )
has_modulefile = exists( modulefile )
has_module = has_module_directory and has_modulefile
if has_module:
- return True
- return False
+ break
+ return has_module
class AvailModuleChecker(object):
@@ -129,6 +134,7 @@
avail_command = ['modulecmd', 'sh', 'avail']
return Popen(avail_command, stderr=PIPE).communicate()[1]
+
class ModuleDependency(Dependency):
def __init__(self, module_dependency_resolver, module_name, module_version=None):
https://bitbucket.org/galaxy/galaxy-central/commits/66222d30c816/
Changeset: 66222d30c816
User: jmchilton
Date: 2013-10-17 06:15:21
Summary: Add unit tests for module behavior contributed by Simon Guest.
Affected #: 2 files
diff -r 93286e74e18b7dcb84aa11a326f08d5b9398a90d -r 66222d30c81606b57d09e195365707c9f61b2946 lib/galaxy/tools/deps/resolvers/modules.py
--- a/lib/galaxy/tools/deps/resolvers/modules.py
+++ b/lib/galaxy/tools/deps/resolvers/modules.py
@@ -17,7 +17,7 @@
import logging
log = logging.getLogger( __name__ )
-
+DEFAULT_MODULECMD_PATH = "modulecmd" # Just check path
DEFAULT_MODULE_PATH = '/usr/share/modules/modulefiles'
DEFAULT_INDICATOR = '(default)'
DEFAULT_MODULE_PREFETCH = "true"
@@ -31,8 +31,9 @@
self.versionless = string_as_bool(kwds.get('versionless', 'false'))
find_by = kwds.get('find_by', 'avail')
prefetch = string_as_bool(kwds.get('prefetch', DEFAULT_MODULE_PREFETCH))
+ self.modulecmd = kwds.get('modulecmd', DEFAULT_MODULECMD_PATH)
if find_by == 'directory':
- modulepath = kwds.get('modulepath', DEFAULT_MODULE_PATH)
+ modulepath = kwds.get('modulepath', self.__default_modulespath())
self.module_checker = DirectoryModuleChecker(self, modulepath, prefetch)
elif find_by == 'avail':
self.module_checker = AvailModuleChecker(self, prefetch)
@@ -131,7 +132,7 @@
yield module_name, module_version
def __module_avail_output(self):
- avail_command = ['modulecmd', 'sh', 'avail']
+ avail_command = [self.module_dependency_resolver.modulecmd, 'sh', 'avail']
return Popen(avail_command, stderr=PIPE).communicate()[1]
@@ -146,7 +147,7 @@
module_to_load = self.module_name
if self.module_version:
module_to_load = '%s/%s' % (self.module_name, self.module_version)
- command = 'eval `modulecmd sh load %s`' % (module_to_load)
+ command = 'eval `%s sh load %s`' % (self.module_dependency_resolver.modulecmd, module_to_load)
return command
__all__ = [ModuleDependencyResolver]
diff -r 93286e74e18b7dcb84aa11a326f08d5b9398a90d -r 66222d30c81606b57d09e195365707c9f61b2946 test/unit/test_tool_deps.py
--- a/test/unit/test_tool_deps.py
+++ b/test/unit/test_tool_deps.py
@@ -1,11 +1,11 @@
import tempfile
import os.path
from stat import S_IXUSR
-from os import makedirs, symlink, stat, chmod
+from os import makedirs, stat, symlink, chmod, environ
from shutil import rmtree
from galaxy.tools.deps import DependencyManager, INDETERMINATE_DEPENDENCY
from galaxy.tools.deps.resolvers.galaxy_packages import GalaxyPackageDependency
-from galaxy.tools.deps.resolvers.modules import ModuleDependencyResolver
+from galaxy.tools.deps.resolvers.modules import ModuleDependencyResolver, ModuleDependency
from galaxy.util.bunch import Bunch
from contextlib import contextmanager
from subprocess import Popen, PIPE
@@ -92,7 +92,7 @@
gx_env_path = __setup_galaxy_package_dep(base_path, TEST_REPO_NAME, TEST_VERSION)
ts_env_path = os.path.join(ts_package_dir, "env.sh")
dependency = dm.find_dep( TEST_REPO_NAME, version=TEST_VERSION, type='package', installed_tool_dependencies=[test_repo] )
- assert dependency.script != gx_env_path # Not the galaxy path, it should be the tool shed path used.
+ assert dependency.script != gx_env_path # Not the galaxy path, it should be the tool shed path used.
assert dependency.script == ts_env_path
@@ -104,9 +104,8 @@
def test_module_dependency_resolver():
with __test_base_path() as temp_directory:
- module_script = os.path.join(temp_directory, "module")
- with open(module_script, 'w') as f:
- f.write('''#!/bin/sh
+ module_script = os.path.join(temp_directory, "modulecmd")
+ __write_script(module_script, '''#!/bin/sh
cat %s/example_output 1>&2;
''' % temp_directory)
with open(os.path.join(temp_directory, "example_output"), "w") as f:
@@ -131,9 +130,7 @@
advisor/2013/update2 intel/11.1.080 mkl/10.2.5.035
advisor/2013/update3 intel/12.0 mkl/10.2.7.041
''')
- st = os.stat(module_script)
- chmod(module_script, st.st_mode | S_IXUSR)
- resolver = ModuleDependencyResolver(None, command=module_script)
+ resolver = ModuleDependencyResolver(None, modulecmd=module_script)
module = resolver.resolve( name="R", version=None, type="package" )
assert module.module_name == "R"
assert module.module_version == None
@@ -146,6 +143,31 @@
assert module == INDETERMINATE_DEPENDENCY
+def test_module_dependency():
+ with __test_base_path() as temp_directory:
+ ## Create mock modulecmd script that just exports a variable
+ ## the way modulecmd sh load would, but also validate correct
+ ## module name and version are coming through.
+ mock_modulecmd = os.path.join(temp_directory, 'modulecmd')
+ __write_script(mock_modulecmd, '''#!/bin/sh
+if [ $3 != "foomodule/1.0" ];
+then
+ exit 1
+fi
+echo 'FOO="bar"'
+''')
+ resolver = Bunch(modulecmd=mock_modulecmd)
+ dependency = ModuleDependency(resolver, "foomodule", "1.0")
+ __assert_foo_exported( dependency.shell_commands( Bunch( type="package" ) ) )
+
+
+def __write_script(path, contents):
+ with open(path, 'w') as f:
+ f.write(contents)
+ st = stat(path)
+ chmod(path, st.st_mode | S_IXUSR)
+
+
def test_galaxy_dependency_object_script():
with __test_base_path() as base_path:
## Create env.sh file that just exports variable FOO and verify it
@@ -170,7 +192,7 @@
command = ["bash", "-c", "%s; echo \"$FOO\"" % "".join(commands)]
process = Popen(command, stdout=PIPE)
output = process.communicate()[0].strip()
- assert output == 'bar'
+ assert output == 'bar', "Command %s exports FOO as %s, not bar" % (command, output)
def __setup_galaxy_package_dep(base_path, name, version, contents=""):
@@ -261,17 +283,68 @@
</dependency_resolvers>
''') as dependency_resolvers:
module_resolver = dependency_resolvers[0]
- assert module_resolver.module_command == "module"
assert module_resolver.module_checker.__class__.__name__ == "AvailModuleChecker"
+def test_config_modulepath():
+ # Test reads and splits MODULEPATH if modulepath is not specified.
+ with __parse_resolvers('''<dependency_resolvers>
+ <modules find_by="directory" modulepath="/opt/modules/modulefiles:/usr/local/modules/modulefiles" />
+</dependency_resolvers>
+''') as dependency_resolvers:
+ assert dependency_resolvers[0].module_checker.directories == ["/opt/modules/modulefiles", "/usr/local/modules/modulefiles"]
+
+
+def test_config_MODULEPATH():
+ # Test reads and splits MODULEPATH if modulepath is not specified.
+ with __environ({"MODULEPATH": "/opt/modules/modulefiles:/usr/local/modules/modulefiles"}):
+ with __parse_resolvers('''<dependency_resolvers>
+ <modules find_by="directory" />
+</dependency_resolvers>
+''') as dependency_resolvers:
+ assert dependency_resolvers[0].module_checker.directories == ["/opt/modules/modulefiles", "/usr/local/modules/modulefiles"]
+
+
+def test_config_MODULESHOME():
+ # Test fallbacks to read MODULESHOME if modulepath is not specified and
+ # neither is MODULEPATH.
+ with __environ({"MODULESHOME": "/opt/modules"}, remove="MODULEPATH"):
+ with __parse_resolvers('''<dependency_resolvers>
+ <modules find_by="directory" />
+</dependency_resolvers>
+''') as dependency_resolvers:
+ assert dependency_resolvers[0].module_checker.directories == ["/opt/modules/modulefiles"]
+
+
def test_config_module_directory_searcher():
with __parse_resolvers('''<dependency_resolvers>
- <modules find_by="directory" directory="/opt/Modules/modulefiles" />
+ <modules find_by="directory" modulepath="/opt/Modules/modulefiles" /></dependency_resolvers>
''') as dependency_resolvers:
module_resolver = dependency_resolvers[0]
- assert module_resolver.module_checker.directory == "/opt/Modules/modulefiles"
+ assert module_resolver.module_checker.directories == ["/opt/Modules/modulefiles"]
+
+
+@contextmanager
+def __environ(values, remove=[]):
+ """
+ Modify the environment for a test, adding/updating values in dict `values` and
+ removing any environment variables mentioned in list `remove`.
+ """
+ new_keys = set(environ.keys()) - set(values.keys())
+ old_environ = environ.copy()
+ try:
+ environ.update(values)
+ for to_remove in remove:
+ try:
+ del environ[remove]
+ except KeyError:
+ pass
+ yield
+ finally:
+ environ.update(old_environ)
+ for key in new_keys:
+ del environ[key]
@contextmanager
@@ -282,4 +355,3 @@
f.flush()
dm = DependencyManager( default_base_path=base_path, conf_file=f.name )
yield dm.dependency_resolvers
-
https://bitbucket.org/galaxy/galaxy-central/commits/a96f6548bfc3/
Changeset: a96f6548bfc3
User: jmchilton
Date: 2013-10-17 06:15:21
Summary: Add method to DependencyManager determining if tool shed dependencies used.
Affected #: 2 files
diff -r 66222d30c81606b57d09e195365707c9f61b2946 -r a96f6548bfc3f223f93a8b619e0ad2d4634a3f3f lib/galaxy/tools/deps/__init__.py
--- a/lib/galaxy/tools/deps/__init__.py
+++ b/lib/galaxy/tools/deps/__init__.py
@@ -56,6 +56,9 @@
commands.append( dependency_commands )
return commands
+ def uses_tool_shed_dependencies(self):
+ return any( map( lambda r: isinstance( r, ToolShedPackageDependencyResolver ), self.dependency_resolvers ) )
+
def find_dep( self, name, version=None, type='package', **kwds ):
for resolver in self.dependency_resolvers:
dependency = resolver.resolve( name, version, type, **kwds )
diff -r 66222d30c81606b57d09e195365707c9f61b2946 -r a96f6548bfc3f223f93a8b619e0ad2d4634a3f3f test/unit/test_tool_deps.py
--- a/test/unit/test_tool_deps.py
+++ b/test/unit/test_tool_deps.py
@@ -277,6 +277,20 @@
assert dependency_resolvers[0].base_path != dependency_resolvers[2].base_path
+def test_uses_tool_shed_dependencies():
+ with __dependency_manager('''<dependency_resolvers>
+ <galaxy_packages />
+</dependency_resolvers>
+''') as dm:
+ assert not dm.uses_tool_shed_dependencies()
+
+ with __dependency_manager('''<dependency_resolvers>
+ <tool_shed_packages />
+</dependency_resolvers>
+''') as dm:
+ assert dm.uses_tool_shed_dependencies()
+
+
def test_config_module_defaults():
with __parse_resolvers('''<dependency_resolvers><modules />
@@ -349,9 +363,15 @@
@contextmanager
def __parse_resolvers(xml_content):
+ with __dependency_manager(xml_content) as dm:
+ yield dm.dependency_resolvers
+
+
+@contextmanager
+def __dependency_manager(xml_content):
with __test_base_path() as base_path:
f = tempfile.NamedTemporaryFile()
f.write(xml_content)
f.flush()
dm = DependencyManager( default_base_path=base_path, conf_file=f.name )
- yield dm.dependency_resolvers
+ yield dm
https://bitbucket.org/galaxy/galaxy-central/commits/8e001dc9675c/
Changeset: 8e001dc9675c
User: jmchilton
Date: 2013-10-17 06:15:21
Summary: Dependencies: Do not attempt to install package dependencies in tool shed if the will be ignored.
That is if dependency resolvers are configured to ignore tool shed dependencies as outlined by Simon Guest, though stopping short of marking these dependencies as installed - they will still be marked as errors for now. TODO: Determine what to do with dependencies that can be resolved (just mark as installed? a brand new state? mark as never installed?).
Affected #: 1 file
diff -r a96f6548bfc3f223f93a8b619e0ad2d4634a3f3f -r 8e001dc9675c105d83d83f6d5a756c11759e8bb2 lib/tool_shed/util/common_install_util.py
--- a/lib/tool_shed/util/common_install_util.py
+++ b/lib/tool_shed/util/common_install_util.py
@@ -6,6 +6,7 @@
from galaxy import util
from galaxy import web
from galaxy.util import json
+from galaxy.tools.deps.resolvers import INDETERMINATE_DEPENDENCY
import tool_shed.util.shed_util_common as suc
from tool_shed.util import common_util
from tool_shed.util import container_util
@@ -457,7 +458,19 @@
tool_dependency = tool_dependencies[ index ]
if tool_dependency.can_install:
try:
- tool_dependency = install_package( app, elem, tool_shed_repository, tool_dependencies=tool_dependencies )
+ dependencies_ignored = app.toolbox.dependency_manager and not app.toolbox.dependency_manager.uses_tool_shed_dependencies()
+ if dependencies_ignored:
+ log.info("Skipping package %s, tool shed dependency resolver not enabled." % package_name)
+ # Tool dependency resolves have been configured and they do not
+ # include the tool shed. Do not install package.
+ status = app.model.ToolDependency.installation_status.ERROR
+ if app.toolbox.dependency_manager.find_dep( package_name, package_version, type='package') != INDETERMINATE_DEPENDENCY:
+ ## TODO: Do something here such as marking it installed or
+ ## configured externally.
+ pass
+ tool_dependency.status = status
+ else:
+ tool_dependency = install_package( app, elem, tool_shed_repository, tool_dependencies=tool_dependencies )
except Exception, e:
error_message = "Error installing tool dependency %s version %s: %s" % ( str( package_name ), str( package_version ), str( e ) )
log.exception( error_message )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: jmchilton: Tool shed install move_file bug fix (divergence between 67be847 and 8f2b113).
by commits-noreply@bitbucket.org 16 Oct '13
by commits-noreply@bitbucket.org 16 Oct '13
16 Oct '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/ebd092733e55/
Changeset: ebd092733e55
User: jmchilton
Date: 2013-10-17 06:00:55
Summary: Tool shed install move_file bug fix (divergence between 67be847 and 8f2b113).
Affected #: 1 file
diff -r 7c05567cdc1b9d1c08e59c2927fb9e85009262e4 -r ebd092733e552ef6a846d55b69d22dc779dfcc5f lib/tool_shed/galaxy_install/tool_dependencies/install_util.py
--- a/lib/tool_shed/galaxy_install/tool_dependencies/install_util.py
+++ b/lib/tool_shed/galaxy_install/tool_dependencies/install_util.py
@@ -560,8 +560,8 @@
# <source>misc/some_file</source>
# <destination>$INSTALL_DIR/bin</destination>
# </action>
- action_dict[ 'source' ] = evaluate_template( action_elem.find( 'source' ).text )
- action_dict[ 'destination' ] = evaluate_template( action_elem.find( 'destination' ).text )
+ action_dict[ 'source' ] = td_common_util.evaluate_template( action_elem.find( 'source' ).text, install_dir )
+ action_dict[ 'destination' ] = td_common_util.evaluate_template( action_elem.find( 'destination' ).text, install_dir )
action_dict[ 'rename_to' ] = action_elem.get( 'rename_to' )
elif action_type == 'set_environment':
# <action type="set_environment">
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
3 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/913f3ab84629/
Changeset: 913f3ab84629
Branch: abrenner/histogram2xml-python-module-rpy-requires-1379010965302
User: abrenner
Date: 2013-09-12 20:36:20
Summary: histogram2.xml python module rpy requires R package.
Affected #: 1 file
diff -r b9505d30f85f6e5e49640c6428185ab427899ec4 -r 913f3ab84629b1c2dff39a8788e1e0201f82ecaa tools/plotting/histogram2.xml
--- a/tools/plotting/histogram2.xml
+++ b/tools/plotting/histogram2.xml
@@ -27,6 +27,7 @@
</tests><requirements><requirement type="python-module">rpy</requirement>
+ <requirement type="package">R</requirement></requirements><help>
https://bitbucket.org/galaxy/galaxy-central/commits/f3e736fe03df/
Changeset: f3e736fe03df
User: abrenner
Date: 2013-09-12 20:39:39
Summary: Merged in abrenner/histogram2xml-python-module-rpy-requires-1379010965302 (pull request #1)
histogram2.xml python module rpy requires R package.
Affected #: 1 file
diff -r b9505d30f85f6e5e49640c6428185ab427899ec4 -r f3e736fe03df3a6dd5438c12ba35ea791a1eaca9 tools/plotting/histogram2.xml
--- a/tools/plotting/histogram2.xml
+++ b/tools/plotting/histogram2.xml
@@ -27,6 +27,7 @@
</tests><requirements><requirement type="python-module">rpy</requirement>
+ <requirement type="package">R</requirement></requirements><help>
https://bitbucket.org/galaxy/galaxy-central/commits/7c05567cdc1b/
Changeset: 7c05567cdc1b
User: jmchilton
Date: 2013-10-17 05:12:40
Summary: Merged in abrenner/galaxy-central (pull request #215)
histogram2 - RPY requires R - missing from requirements tag
Thanks for the contribution!
Affected #: 1 file
diff -r 3825c8d493649a72c6fc65bdd06b2362f0bf5930 -r 7c05567cdc1b9d1c08e59c2927fb9e85009262e4 tools/plotting/histogram2.xml
--- a/tools/plotting/histogram2.xml
+++ b/tools/plotting/histogram2.xml
@@ -27,6 +27,7 @@
</tests><requirements><requirement type="python-module">rpy</requirement>
+ <requirement type="package">R</requirement></requirements><help>
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
6 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/432999eabbaa/
Changeset: 432999eabbaa
User: jmchilton
Date: 2013-10-17 00:37:37
Summary: Add job_script module from LWR including GALAXY_SLOTS logic.
Use new job_script module in the DRMAA runner. This implements the long discussed GALAXY_SLOTS logic for PBS/TORQUE, SLURM, and grid engine when coming through the DRMAA runner.
Affected #: 6 files
diff -r b681b2aeddbc52c3d953abcb431688df6fb8a005 -r 432999eabbaa97528a2bd37f1b99de86b39a924f lib/galaxy/jobs/runners/__init__.py
--- a/lib/galaxy/jobs/runners/__init__.py
+++ b/lib/galaxy/jobs/runners/__init__.py
@@ -15,6 +15,7 @@
from galaxy.jobs.command_factory import build_command
from galaxy import model
from galaxy.util import DATABASE_MAX_STRING_SIZE, shrink_stream_by_size
+from galaxy.jobs.runners.util.job_script import job_script
log = logging.getLogger( __name__ )
@@ -218,6 +219,16 @@
external_metadata_proc.wait()
log.debug( 'execution of external set_meta for job %d finished' % job_wrapper.job_id )
+ def get_job_file(self, job_wrapper, **kwds):
+ options = dict(
+ galaxy_lib=job_wrapper.galaxy_lib_dir,
+ env_setup_commands=job_wrapper.get_env_setup_clause(),
+ working_directory=os.path.abspath( job_wrapper.working_directory ),
+ command=job_wrapper.runner_command_line,
+ )
+ options.update(**kwds)
+ return job_script(**options)
+
class AsynchronousJobState( object ):
"""
diff -r b681b2aeddbc52c3d953abcb431688df6fb8a005 -r 432999eabbaa97528a2bd37f1b99de86b39a924f lib/galaxy/jobs/runners/drmaa.py
--- a/lib/galaxy/jobs/runners/drmaa.py
+++ b/lib/galaxy/jobs/runners/drmaa.py
@@ -37,29 +37,10 @@
drmaa.JobState.FAILED: 'job finished, but failed',
}
-# The last four lines (following the last fi) will:
-# - setup the env
-# - move to the job wrapper's working directory
-# - execute the command
-# - take the command's exit code ($?) and write it to a file.
-drm_template = """#!/bin/sh
-GALAXY_LIB="%s"
-if [ "$GALAXY_LIB" != "None" ]; then
- if [ -n "$PYTHONPATH" ]; then
- PYTHONPATH="$GALAXY_LIB:$PYTHONPATH"
- else
- PYTHONPATH="$GALAXY_LIB"
- fi
- export PYTHONPATH
-fi
-%s
-cd %s
-%s
-echo $? > %s
-"""
DRMAA_jobTemplate_attributes = [ 'args', 'remoteCommand', 'outputPath', 'errorPath', 'nativeSpecification',
- 'jobName','email','project' ]
+ 'jobName', 'email', 'project' ]
+
class DRMAAJobRunner( AsynchronousJobRunner ):
"""
@@ -138,12 +119,7 @@
jt.nativeSpecification = native_spec
# fill in the DRM's job run template
- script = drm_template % ( job_wrapper.galaxy_lib_dir,
- job_wrapper.get_env_setup_clause(),
- os.path.abspath( job_wrapper.working_directory ),
- command_line,
- ajs.exit_code_file )
-
+ script = self.get_job_file(job_wrapper, exit_code_path=ajs.exit_code_file)
try:
fh = file( ajs.job_file, "w" )
fh.write( script )
diff -r b681b2aeddbc52c3d953abcb431688df6fb8a005 -r 432999eabbaa97528a2bd37f1b99de86b39a924f lib/galaxy/jobs/runners/util/__init__.py
--- /dev/null
+++ b/lib/galaxy/jobs/runners/util/__init__.py
@@ -0,0 +1,9 @@
+"""
+This module and its submodules contains utilities for running external
+processes and interfacing with job managers. This module should contain
+functionality shared between Galaxy and the LWR.
+"""
+try:
+ from galaxy.util.bunch import Bunch
+except ImportError:
+ from lwr.util import Bunch
diff -r b681b2aeddbc52c3d953abcb431688df6fb8a005 -r 432999eabbaa97528a2bd37f1b99de86b39a924f lib/galaxy/jobs/runners/util/job_script/CLUSTER_SLOTS_STATEMENT.sh
--- /dev/null
+++ b/lib/galaxy/jobs/runners/util/job_script/CLUSTER_SLOTS_STATEMENT.sh
@@ -0,0 +1,11 @@
+export GALAXY_SLOTS_CONFIGURED="1"
+if [ -n "$SLURM_JOB_NUM_NODES" ]; then
+ GALAXY_SLOTS="$SLURM_JOB_NUM_NODES"
+elif [ -n "$NSLOTS" ]; then
+ GALAXY_SLOTS="$NSLOTS"
+elif [ -f "$PBS_NODEFILE" ]; then
+ GALAXY_SLOTS=`wc -l < $PBS_NODEFILE`
+else
+ GALAXY_SLOTS="1"
+ unset GALAXY_SLOTS_CONFIGURED
+fi
diff -r b681b2aeddbc52c3d953abcb431688df6fb8a005 -r 432999eabbaa97528a2bd37f1b99de86b39a924f lib/galaxy/jobs/runners/util/job_script/DEFAULT_JOB_FILE_TEMPLATE.sh
--- /dev/null
+++ b/lib/galaxy/jobs/runners/util/job_script/DEFAULT_JOB_FILE_TEMPLATE.sh
@@ -0,0 +1,17 @@
+#!/bin/sh
+$headers
+$slots_statement
+export GALAXY_SLOTS
+GALAXY_LIB="$galaxy_lib"
+if [ "$GALAXY_LIB" != "None" ]; then
+ if [ -n "$PYTHONPATH" ]; then
+ PYTHONPATH="$GALAXY_LIB:$PYTHONPATH"
+ else
+ PYTHONPATH="$GALAXY_LIB"
+ fi
+ export PYTHONPATH
+fi
+$env_setup_commands
+cd $working_directory
+$command
+echo $? > $exit_code_path
diff -r b681b2aeddbc52c3d953abcb431688df6fb8a005 -r 432999eabbaa97528a2bd37f1b99de86b39a924f lib/galaxy/jobs/runners/util/job_script/__init__.py
--- /dev/null
+++ b/lib/galaxy/jobs/runners/util/job_script/__init__.py
@@ -0,0 +1,54 @@
+from string import Template
+from pkg_resources import resource_string
+
+DEFAULT_JOB_FILE_TEMPLATE = Template(
+ resource_string(__name__, 'DEFAULT_JOB_FILE_TEMPLATE.sh')
+)
+
+SLOTS_STATEMENT_CLUSTER_DEFAULT = \
+ resource_string(__name__, 'CLUSTER_SLOTS_STATEMENT.sh')
+
+SLOTS_STATEMENT_SINGLE = """
+GALAXY_SLOTS="1"
+"""
+
+REQUIRED_TEMPLATE_PARAMS = ['working_directory', 'command', 'exit_code_path']
+OPTIONAL_TEMPLATE_PARAMS = {
+ 'galaxy_lib': None,
+ 'headers': '',
+ 'env_setup_commands': '',
+ 'slots_statement': SLOTS_STATEMENT_CLUSTER_DEFAULT,
+}
+
+
+def job_script(template=DEFAULT_JOB_FILE_TEMPLATE, **kwds):
+ """
+
+ >>> has_exception = False
+ >>> try: job_script()
+ ... except Exception as e: has_exception = True
+ >>> has_exception
+ True
+ >>> script = job_script(working_directory='wd', command='uptime', exit_code_path='ec')
+ >>> '\\nuptime\\n' in script
+ True
+ >>> 'echo $? > ec' in script
+ True
+ >>> 'GALAXY_LIB="None"' in script
+ True
+ >>> script.startswith('#!/bin/sh\\n#PBS -test\\n')
+ False
+ >>> script = job_script(working_directory='wd', command='uptime', exit_code_path='ec', headers='#PBS -test')
+ >>> script.startswith('#!/bin/sh\\n#PBS -test\\n')
+ True
+ >>> script = job_script(working_directory='wd', command='uptime', exit_code_path='ec', slots_statement='GALAXY_SLOTS="$SLURM_JOB_NUM_NODES"')
+ >>> script.find('GALAXY_SLOTS="$SLURM_JOB_NUM_NODES"\\nexport GALAXY_SLOTS\\n') > 0
+ True
+ """
+ if any([param not in kwds for param in REQUIRED_TEMPLATE_PARAMS]):
+ raise Exception("Failed to create job_script, a required parameter is missing.")
+ template_params = OPTIONAL_TEMPLATE_PARAMS.copy()
+ template_params.update(**kwds)
+ if not isinstance(template, Template):
+ template = Template(template)
+ return template.safe_substitute(template_params)
https://bitbucket.org/galaxy/galaxy-central/commits/126d5c5b9a49/
Changeset: 126d5c5b9a49
User: jmchilton
Date: 2013-10-17 00:37:37
Summary: Condor job runner enhancements.
Implement exit code handling and submission script parameters (job destination parameter <param id="submit_xxxx">yyyy</param> will be passed through as xxxx = yyyy.
Bring in LWR helper scripts to implement most of these and clean up things. Bring in LWR external.py for parsing ID's comding from resource managers, seems like overkill for now in Galaxy because just used in condor, but in LWR also used for its version of the CLI runners (will merge those changes in down the road). Utilize job_scripts module for building script (this is what fixes exit code handling).
TODO: GALAXY_SLOTS logic still not implemented, but should be easier now.
Affected #: 3 files
diff -r 432999eabbaa97528a2bd37f1b99de86b39a924f -r 126d5c5b9a4938909b518cb176dc47890e1c13e5 lib/galaxy/jobs/runners/condor.py
--- a/lib/galaxy/jobs/runners/condor.py
+++ b/lib/galaxy/jobs/runners/condor.py
@@ -3,40 +3,18 @@
"""
import os
-import re
-import sys
-import time
import logging
-import subprocess
from galaxy import model
from galaxy.jobs.runners import AsynchronousJobState, AsynchronousJobRunner
-
-from galaxy.util import asbool
+from galaxy.jobs.runners.util.condor import submission_params, build_submit_description
+from galaxy.jobs.runners.util.condor import condor_submit, condor_stop
+from galaxy.jobs.runners.util.condor import summarize_condor_log
log = logging.getLogger( __name__ )
__all__ = [ 'CondorJobRunner' ]
-drm_template = """#!/bin/sh
-GALAXY_LIB="%s"
-if [ "$GALAXY_LIB" != "None" ]; then
- if [ -n "$PYTHONPATH" ]; then
- PYTHONPATH="$GALAXY_LIB:$PYTHONPATH"
- else
- PYTHONPATH="$GALAXY_LIB"
- fi
- export PYTHONPATH
-fi
-cd %s
-%s
-"""
-
-default_query_classad = dict(
- universe = 'vanilla',
- getenv = 'true',
- notification = 'NEVER',
-)
class CondorJobState( AsynchronousJobState ):
def __init__( self, **kwargs ):
@@ -49,21 +27,22 @@
self.user_log = None
self.user_log_size = 0
+
class CondorJobRunner( AsynchronousJobRunner ):
"""
Job runner backed by a finite pool of worker threads. FIFO scheduling
"""
runner_name = "CondorRunner"
+
def __init__( self, app, nworkers ):
"""Initialize this job runner and start the monitor thread"""
super( CondorJobRunner, self ).__init__( app, nworkers )
self._init_monitor_thread()
self._init_worker_threads()
- # superclass url_to_destination is fine - condor runner does not take params
-
def queue_job( self, job_wrapper ):
"""Create job script and submit it to the DRM"""
+
# prepare the job
if not self.prepare_job( job_wrapper, include_metadata=True ):
return
@@ -78,25 +57,30 @@
galaxy_id_tag = job_wrapper.get_id_tag()
# get destination params
- query_params = default_query_classad.copy()
- query_params.update( job_destination.params )
+ query_params = submission_params(**job_destination.params)
# define job attributes
- cjs = CondorJobState( files_dir=self.app.config.cluster_files_directory, job_wrapper=job_wrapper )
- cjs.user_log = os.path.join( self.app.config.cluster_files_directory, 'galaxy_%s.condor.log' % galaxy_id_tag )
+ cjs = CondorJobState(
+ files_dir=self.app.config.cluster_files_directory,
+ job_wrapper=job_wrapper
+ )
+
+ cluster_directory = self.app.config.cluster_files_directory
+ cjs.user_log = os.path.join( cluster_directory, 'galaxy_%s.condor.log' % galaxy_id_tag )
cjs.register_cleanup_file_attribute( 'user_log' )
- submit_file = os.path.join( self.app.config.cluster_files_directory, 'galaxy_%s.condor.desc' % galaxy_id_tag )
+ submit_file = os.path.join( cluster_directory, 'galaxy_%s.condor.desc' % galaxy_id_tag )
executable = cjs.job_file
- submit_desc = [ ]
- for k, v in query_params.items():
- submit_desc.append( '%s = %s' % ( k, v ) )
- submit_desc.append( 'executable = ' + executable )
- submit_desc.append( 'output = ' + cjs.output_file )
- submit_desc.append( 'error = ' + cjs.error_file )
- submit_desc.append( 'log = ' + cjs.user_log )
- submit_desc.append( 'queue' )
- script = drm_template % (job_wrapper.galaxy_lib_dir, os.path.abspath( job_wrapper.working_directory ), command_line)
+ build_submit_params = dict(
+ executable=executable,
+ output=cjs.output_file,
+ error=cjs.error_file,
+ user_log=cjs.user_log,
+ query_params=query_params,
+ )
+
+ submit_file_contents = build_submit_description(**build_submit_params)
+ script = self.get_job_file( job_wrapper, exit_code_path=cjs.exit_code_file )
try:
fh = file( executable, "w" )
fh.write( script )
@@ -108,10 +92,7 @@
return
try:
- fh = file( submit_file, 'w' )
- for line in submit_desc:
- fh.write( line + '\n' )
- fh.close()
+ open(submit_file, "w").write(submit_file_contents)
except:
if self.app.config.cleanup_job == "always":
cjs.cleanup()
@@ -132,23 +113,9 @@
log.debug( "(%s) submitting file %s" % ( galaxy_id_tag, executable ) )
log.debug( "(%s) command is: %s" % ( galaxy_id_tag, command_line ) )
- s_out = ''
- external_job_id = None
- try:
- submit = subprocess.Popen( ( 'condor_submit', submit_file ), stdout=subprocess.PIPE, stderr=subprocess.STDOUT )
- s_out, s_err = submit.communicate()
- if submit.returncode == 0:
- match = re.search( 'submitted to cluster (\\d+).', s_out )
- if match is None:
- s_out = 'Failed to find job id from condor_submit'
- else:
- external_job_id = match.group( 1 )
- except Exception, e:
- # TODO Add extra except for OSError?
- s_out = str(e)
-
+ external_job_id, message = condor_submit(submit_file)
if external_job_id is None:
- log.debug( "condor_submit failed for job %s: %s" % (job_wrapper.get_id_tag(), s_out) )
+ log.debug( "condor_submit failed for job %s: %s" % (job_wrapper.get_id_tag(), message) )
if self.app.config.cleanup_job == "always":
os.unlink( submit_file )
cjs.cleanup()
@@ -177,29 +144,17 @@
new_watched = []
for cjs in self.watched:
job_id = cjs.job_id
- log_job_id = job_id.zfill(3)
galaxy_id_tag = cjs.job_wrapper.get_id_tag()
- job_running = False
- job_complete = False
- job_failed = False
try:
if os.stat( cjs.user_log ).st_size == cjs.user_log_size:
new_watched.append( cjs )
continue
- with open(cjs.user_log, 'r') as fh:
- for line in fh:
- if '001 (' + log_job_id + '.' in line:
- job_running = True
- if '004 (' + log_job_id + '.' in line:
- job_running = False
- if '007 (' + log_job_id + '.' in line:
- job_running = False
- if '005 (' + log_job_id + '.' in line:
- job_complete = True
- if '009 (' + log_job_id + '.' in line:
- job_failed = True
- cjs.user_log_size = fh.tell()
- except Exception, e:
+ s1, s4, s7, s5, s9, log_size = summarize_condor_log(cjs.user_log, job_id)
+ job_running = s1 and not (s4 or s7)
+ job_complete = s5
+ job_failed = s9
+ cjs.user_log_size = log_size
+ except Exception:
# so we don't kill the monitor thread
log.exception( "(%s/%s) Unable to check job status" % ( galaxy_id_tag, job_id ) )
log.warning( "(%s/%s) job will now be errored" % ( galaxy_id_tag, job_id ) )
@@ -230,13 +185,10 @@
def stop_job( self, job ):
"""Attempts to delete a job from the DRM queue"""
- try:
- subprocess.check_call( ( 'condor_rm', job.job_runner_external_id ) )
- log.debug( "(%s/%s) Removed from DRM queue at user's request" % ( job.id, job.job_runner_external_id ) )
- except subprocess.CalledProcessError:
- log.debug( "(%s/%s) User killed running job, but condor_rm failed" % ( job.id, job.job_runner_external_id ) )
- except Exception, e:
- log.debug( "(%s/%s) User killed running job, but error encountered removing from Condor queue: %s" % ( job.id, job.job_runner_external_id, e ) )
+ external_id = job.job_runner_external_id
+ failure_message = condor_stop(external_id)
+ if failure_message:
+ log.debug("(%s/%s). Failed to stop condor %s" % (external_id, failure_message))
def recover( self, job, job_wrapper ):
"""Recovers jobs stuck in the queued/running state when Galaxy started"""
diff -r 432999eabbaa97528a2bd37f1b99de86b39a924f -r 126d5c5b9a4938909b518cb176dc47890e1c13e5 lib/galaxy/jobs/runners/util/condor/__init__.py
--- /dev/null
+++ b/lib/galaxy/jobs/runners/util/condor/__init__.py
@@ -0,0 +1,114 @@
+"""
+Condor helper utilities.
+"""
+from subprocess import Popen, PIPE, STDOUT, check_call, CalledProcessError
+from ..external import parse_external_id
+
+DEFAULT_QUERY_CLASSAD = dict(
+ universe='vanilla',
+ getenv='true',
+ notification='NEVER',
+)
+
+PROBLEM_RUNNING_CONDOR_SUBMIT = \
+ "Problem encountered while running condor_submit."
+PROBLEM_PARSING_EXTERNAL_ID = \
+ "Failed to find job id from condor_submit"
+
+SUBMIT_PARAM_PREFIX = "submit_"
+
+
+def submission_params(**kwds):
+ submission_params = {}
+ for key, value in kwds.iteritems():
+ key = key.lower()
+ if key.startswith(SUBMIT_PARAM_PREFIX):
+ condor_key = key[len(SUBMIT_PARAM_PREFIX):]
+ submission_params[condor_key] = value
+ return submission_params
+
+
+def build_submit_description(executable, output, error, user_log, query_params):
+ """
+ Build up the contents of a condor submit description file.
+
+ >>> submit_args = dict(executable='/path/to/script', output='o', error='e', user_log='ul')
+ >>> submit_args['query_params'] = dict()
+ >>> default_description = build_submit_description(**submit_args)
+ >>> assert 'executable = /path/to/script' in default_description
+ >>> assert 'output = o' in default_description
+ >>> assert 'error = e' in default_description
+ >>> assert 'queue' in default_description
+ >>> assert 'universe = vanilla' in default_description
+ >>> assert 'universe = standard' not in default_description
+ >>> submit_args['query_params'] = dict(universe='standard')
+ >>> std_description = build_submit_description(**submit_args)
+ >>> assert 'universe = vanilla' not in std_description
+ >>> assert 'universe = standard' in std_description
+ """
+ all_query_params = DEFAULT_QUERY_CLASSAD.copy()
+ all_query_params.update(query_params)
+
+ submit_description = []
+ for key, value in all_query_params.items():
+ submit_description.append('%s = %s' % (key, value))
+ submit_description.append('executable = ' + executable)
+ submit_description.append('output = ' + output)
+ submit_description.append('error = ' + error)
+ submit_description.append('log = ' + user_log)
+ submit_description.append('queue')
+ return '\n'.join(submit_description)
+
+
+def condor_submit(submit_file):
+ """
+ Submit a condor job described by the given file. Parse an external id for
+ the submission or return None and a reason for the failure.
+ """
+ external_id = None
+ try:
+ submit = Popen(('condor_submit', submit_file), stdout=PIPE, stderr=STDOUT)
+ message, _ = submit.communicate()
+ if submit.returncode == 0:
+ external_id = parse_external_id(message, type='condor')
+ else:
+ message = PROBLEM_PARSING_EXTERNAL_ID
+ except Exception as e:
+ message = str(e)
+ return external_id, message
+
+
+def condor_stop(external_id):
+ """
+ Stop running condor job and return a failure_message if this
+ fails.
+ """
+ failure_message = None
+ try:
+ check_call(('condor_rm', external_id))
+ except CalledProcessError:
+ failure_message = "condor_rm failed"
+ except Exception as e:
+ "error encountered calling condor_rm: %s" % e
+ return failure_message
+
+
+def summarize_condor_log(log_file, external_id):
+ """
+ """
+ log_job_id = external_id.zfill(3)
+ s1 = s4 = s7 = s5 = s9 = False
+ with open(log_file, 'r') as log_handle:
+ for line in log_handle:
+ if '001 (' + log_job_id + '.' in line:
+ s1 = True
+ if '004 (' + log_job_id + '.' in line:
+ s4 = True
+ if '007 (' + log_job_id + '.' in line:
+ s7 = True
+ if '005 (' + log_job_id + '.' in line:
+ s5 = True
+ if '009 (' + log_job_id + '.' in line:
+ s9 = True
+ file_size = log_handle.tell()
+ return s1, s4, s7, s5, s9, file_size
diff -r 432999eabbaa97528a2bd37f1b99de86b39a924f -r 126d5c5b9a4938909b518cb176dc47890e1c13e5 lib/galaxy/jobs/runners/util/external.py
--- /dev/null
+++ b/lib/galaxy/jobs/runners/util/external.py
@@ -0,0 +1,37 @@
+from re import search
+
+EXTERNAL_ID_TYPE_ANY = None
+
+EXTERNAL_ID_PATTERNS = [
+ ('condor', r'submitted to cluster (\d+)\.'),
+ ('slurm', r'Submitted batch job (\w+)'),
+ ('torque', r'(.+)'), # Default 'pattern' assumed by Galaxy code circa August 2013.
+]
+
+
+def parse_external_id(output, type=EXTERNAL_ID_TYPE_ANY):
+ """
+ Attempt to parse the output of job submission commands for an external id.__doc__
+
+ >>> parse_external_id("12345.pbsmanager")
+ '12345.pbsmanager'
+ >>> parse_external_id('Submitted batch job 185')
+ '185'
+ >>> parse_external_id('Submitted batch job 185', type='torque')
+ 'Submitted batch job 185'
+ >>> parse_external_id('submitted to cluster 125.')
+ '125'
+ >>> parse_external_id('submitted to cluster 125.', type='slurm')
+ >>>
+ """
+ external_id = None
+ for pattern_type, pattern in EXTERNAL_ID_PATTERNS:
+ if type != EXTERNAL_ID_TYPE_ANY and type != pattern_type:
+ continue
+
+ match = search(pattern, output)
+ if match:
+ external_id = match.group(1)
+ break
+
+ return external_id
https://bitbucket.org/galaxy/galaxy-central/commits/93caaf7f9bdd/
Changeset: 93caaf7f9bdd
User: jmchilton
Date: 2013-10-17 00:37:37
Summary: Implement GALAXY_SLOTS logic for condor runner.
Just pass through submit_request_cpus as GALAXY_SLOTS if set in job_conf.xml.
Affected #: 1 file
diff -r 126d5c5b9a4938909b518cb176dc47890e1c13e5 -r 93caaf7f9bdd6429a250921fa080bfe71028f8c0 lib/galaxy/jobs/runners/condor.py
--- a/lib/galaxy/jobs/runners/condor.py
+++ b/lib/galaxy/jobs/runners/condor.py
@@ -58,6 +58,11 @@
# get destination params
query_params = submission_params(**job_destination.params)
+ galaxy_slots = query_params.get('request_cpus', None)
+ if galaxy_slots:
+ galaxy_slots_statement = 'GALAXY_SLOTS="%s"; export GALAXY_SLOTS_CONFIGURED="1"' % galaxy_slots
+ else:
+ galaxy_slots_statement = 'GALAXY_SLOTS="1"'
# define job attributes
cjs = CondorJobState(
@@ -80,7 +85,11 @@
)
submit_file_contents = build_submit_description(**build_submit_params)
- script = self.get_job_file( job_wrapper, exit_code_path=cjs.exit_code_file )
+ script = self.get_job_file(
+ job_wrapper,
+ exit_code_path=cjs.exit_code_file,
+ slots_statement=galaxy_slots_statement,
+ )
try:
fh = file( executable, "w" )
fh.write( script )
https://bitbucket.org/galaxy/galaxy-central/commits/5715b876f91b/
Changeset: 5715b876f91b
User: jmchilton
Date: 2013-10-17 00:37:37
Summary: PBS job runner improvements (including GALAXY_SLOTS).
Rework to use get_job_file, this will enable GALAXY_SLOTS logic for the PBS runner. Bring in staging fixes found in pull request #194 from CSIRO (stephen_mcmahon_). Small PEP-8 tweaks.
Affected #: 1 file
diff -r 93caaf7f9bdd6429a250921fa080bfe71028f8c0 -r 5715b876f91bd34d179dc9d48e7ad1a9e6baf285 lib/galaxy/jobs/runners/pbs.py
--- a/lib/galaxy/jobs/runners/pbs.py
+++ b/lib/galaxy/jobs/runners/pbs.py
@@ -1,9 +1,10 @@
-import os, logging, threading, time, traceback
+import os
+import logging
+import time
+import traceback
from datetime import timedelta
-from Queue import Queue, Empty
from galaxy import model
-from galaxy.datatypes.data import nice_size
from galaxy.util.bunch import Bunch
from galaxy.util import DATABASE_MAX_STRING_SIZE, shrink_stream_by_size
from galaxy.jobs import JobDestination
@@ -35,42 +36,14 @@
# The last two lines execute the command and then retrieve the command's
# exit code ($?) and write it to a file.
-pbs_template = """#!/bin/sh
-GALAXY_LIB="%s"
-if [ "$GALAXY_LIB" != "None" ]; then
- if [ -n "$PYTHONPATH" ]; then
- export PYTHONPATH="$GALAXY_LIB:$PYTHONPATH"
- else
- export PYTHONPATH="$GALAXY_LIB"
- fi
-fi
-%s
-cd %s
-%s
-echo $? > %s
-"""
-
-# The last two lines execute the command and then retrieve the command's
-# exit code ($?) and write it to a file.
-pbs_symlink_template = """#!/bin/sh
-GALAXY_LIB="%s"
-if [ "$GALAXY_LIB" != "None" ]; then
- if [ -n "$PYTHONPATH" ]; then
- export PYTHONPATH="$GALAXY_LIB:$PYTHONPATH"
- else
- export PYTHONPATH="$GALAXY_LIB"
- fi
-fi
+pbs_symlink_template = """
for dataset in %s; do
dir=`dirname $dataset`
file=`basename $dataset`
[ ! -d $dir ] && mkdir -p $dir
[ ! -e $dataset ] && ln -s %s/$file $dataset
done
-%s
-cd %s
-%s
-echo $? > %s
+mkdir -p %s
"""
PBS_ARGMAP = {
@@ -109,6 +82,7 @@
-8: "exec() of user command failed",
}
+
class PBSJobRunner( AsynchronousJobRunner ):
"""
Job runner backed by a finite pool of worker threads. FIFO scheduling
@@ -273,6 +247,7 @@
pbs_ofile = self.app.config.pbs_application_server + ':' + ofile
pbs_efile = self.app.config.pbs_application_server + ':' + efile
output_files = [ str( o ) for o in output_fnames ]
+ output_files.append(ecfile)
stagein = self.get_stage_in_out( job_wrapper.get_input_fnames() + output_files, symlink=True )
stageout = self.get_stage_in_out( output_files )
attrs = [
@@ -300,20 +275,20 @@
# write the job script
if self.app.config.pbs_stage_path != '':
- script = pbs_symlink_template % ( job_wrapper.galaxy_lib_dir,
- " ".join( job_wrapper.get_input_fnames() + output_files ),
- self.app.config.pbs_stage_path,
- job_wrapper.get_env_setup_clause(),
- exec_dir,
- command_line,
- ecfile )
+ # touch the ecfile so that it gets staged
+ with file(ecfile, 'a'):
+ os.utime(ecfile, None)
+ stage_commands = pbs_symlink_template % (
+ " ".join( job_wrapper.get_input_fnames() + output_files ),
+ self.app.config.pbs_stage_path,
+ exec_dir,
+ )
else:
- script = pbs_template % ( job_wrapper.galaxy_lib_dir,
- job_wrapper.get_env_setup_clause(),
- exec_dir,
- command_line,
- ecfile )
+ stage_commands = ''
+
+ env_setup_commands = '%s\n%s' % (stage_commands, job_wrapper.get_env_setup_clause())
+ script = self.get_job_file(job_wrapper, exit_code_path=ecfile, env_setup_commands=env_setup_commands)
job_file = "%s/%s.sh" % (self.app.config.cluster_files_directory, job_wrapper.job_id)
fh = file(job_file, "w")
fh.write(script)
https://bitbucket.org/galaxy/galaxy-central/commits/84784515c048/
Changeset: 84784515c048
User: jmchilton
Date: 2013-10-17 00:37:37
Summary: Add GALAXY_SLOTS logic to local runner.
Affected #: 1 file
diff -r 5715b876f91bd34d179dc9d48e7ad1a9e6baf285 -r 84784515c0481cfddbfc05e983efced55972e9a0 lib/galaxy/jobs/runners/local.py
--- a/lib/galaxy/jobs/runners/local.py
+++ b/lib/galaxy/jobs/runners/local.py
@@ -42,6 +42,21 @@
super( LocalJobRunner, self ).__init__( app, nworkers )
self._init_worker_threads()
+ def __command_line( self, job_wrapper ):
+ """
+ """
+ command_line = job_wrapper.runner_command_line
+
+ ## slots would be cleaner name, but don't want deployers to see examples and think it
+ ## is going to work with other job runners.
+ slots = job_wrapper.job_destination.params.get( "local_slots", None )
+ command_line = command_line.lstrip( " ;" )
+ if slots:
+ command_line = 'export GALAXY_SLOTS="%d"; export GALAXY_SLOTS_CONFIGURED="1"; %s' % ( int( slots ), command_line )
+ else:
+ command_line = 'export GALAXY_SLOTS="1"; %s' % command_line
+ return command_line
+
def queue_job( self, job_wrapper ):
# prepare the job
if not self.prepare_job( job_wrapper ):
@@ -51,7 +66,7 @@
exit_code = 0
# command line has been added to the wrapper by prepare_job()
- command_line = job_wrapper.runner_command_line
+ command_line = self.__command_line( job_wrapper )
job_id = job_wrapper.get_id_tag()
https://bitbucket.org/galaxy/galaxy-central/commits/3825c8d49364/
Changeset: 3825c8d49364
User: jmchilton
Date: 2013-10-17 00:37:37
Summary: Improved fix for tools producing command lines with trailing semi-colons.
Affected #: 1 file
diff -r 84784515c0481cfddbfc05e983efced55972e9a0 -r 3825c8d493649a72c6fc65bdd06b2362f0bf5930 lib/galaxy/jobs/command_factory.py
--- a/lib/galaxy/jobs/command_factory.py
+++ b/lib/galaxy/jobs/command_factory.py
@@ -21,7 +21,7 @@
# Remove trailing semi-colon so we can start hacking up this command.
# TODO: Refactor to compose a list and join with ';', would be more clean.
- commands = commands.rstrip(";")
+ commands = commands.rstrip("; ")
# Prepend version string
if job_wrapper.version_string_cmd:
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0