galaxy-commits
Threads by month
- ----- 2024 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2023 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2022 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2021 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2020 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2019 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2018 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2017 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2016 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2015 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2014 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2013 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2012 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2011 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2010 -----
- December
- November
- October
- September
- August
- July
- June
- May
December 2013
- 1 participants
- 207 discussions
commit/galaxy-central: carlfeberhard: HDA API: raise error when attempting to undelete purged HDA
by commits-noreply@bitbucket.org 05 Dec '13
by commits-noreply@bitbucket.org 05 Dec '13
05 Dec '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/d500c13a819f/
Changeset: d500c13a819f
User: carlfeberhard
Date: 2013-12-05 19:27:01
Summary: HDA API: raise error when attempting to undelete purged HDA
Affected #: 1 file
diff -r 1941221a1eacf48035827e845be1d21e67ba87e5 -r d500c13a819f7a2a6dd5540b5f2ceaa020ecfe2e lib/galaxy/model/__init__.py
--- a/lib/galaxy/model/__init__.py
+++ b/lib/galaxy/model/__init__.py
@@ -1802,6 +1802,10 @@
if new_val == old_val:
continue
+ # special cases here
+ if key == 'deleted' and new_val is False and self.purged:
+ raise Exception( 'Cannot undelete a purged dataset' )
+
self.__setattr__( key, new_val )
changed[ key ] = new_val
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: carlfeberhard: Popupmenus: correctly assign default button div
by commits-noreply@bitbucket.org 05 Dec '13
by commits-noreply@bitbucket.org 05 Dec '13
05 Dec '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/1941221a1eac/
Changeset: 1941221a1eac
User: carlfeberhard
Date: 2013-12-05 19:19:37
Summary: Popupmenus: correctly assign default button div
Affected #: 2 files
diff -r 48d838357ceea59c58640dabf7580bc1947d7577 -r 1941221a1eacf48035827e845be1d21e67ba87e5 static/scripts/mvc/ui.js
--- a/static/scripts/mvc/ui.js
+++ b/static/scripts/mvc/ui.js
@@ -196,7 +196,10 @@
*/
initialize: function( $button, options ){
// default settings
- this.$button = $button || $( '<div/>' );
+ this.$button = $button;
+ if( !this.$button.size() ){
+ this.$button = $( '<div/>' );
+ }
this.options = options || [];
// set up button click -> open menu behavior
diff -r 48d838357ceea59c58640dabf7580bc1947d7577 -r 1941221a1eacf48035827e845be1d21e67ba87e5 static/scripts/packed/mvc/ui.js
--- a/static/scripts/packed/mvc/ui.js
+++ b/static/scripts/packed/mvc/ui.js
@@ -1,1 +1,1 @@
-var IconButton=Backbone.Model.extend({defaults:{title:"",icon_class:"",on_click:null,menu_options:null,is_menu_button:true,id:null,href:null,target:null,enabled:true,visible:true,tooltip_config:{}}});var IconButtonView=Backbone.View.extend({initialize:function(){this.model.attributes.tooltip_config={placement:"bottom"};this.model.bind("change",this.render,this)},render:function(){this.$el.tooltip("hide");var a=this.template(this.model.toJSON());a.tooltip(this.model.get("tooltip_config"));this.$el.replaceWith(a);this.setElement(a);return this},events:{click:"click"},click:function(a){if(_.isFunction(this.model.get("on_click"))){this.model.get("on_click")(a);return false}return true},template:function(b){var a='title="'+b.title+'" class="icon-button';if(b.is_menu_button){a+=" menu-button"}a+=" "+b.icon_class;if(!b.enabled){a+="_disabled"}a+='"';if(b.id){a+=' id="'+b.id+'"'}a+=' href="'+b.href+'"';if(b.target){a+=' target="'+b.target+'"'}if(!b.visible){a+=' style="display: none;"'}if(b.enabled){a="<a "+a+"/>"}else{a="<span "+a+"/>"}return $(a)}});var IconButtonCollection=Backbone.Collection.extend({model:IconButton});var IconButtonMenuView=Backbone.View.extend({tagName:"div",initialize:function(){this.render()},render:function(){var a=this;this.collection.each(function(d){var b=$("<a/>").attr("href","javascript:void(0)").attr("title",d.attributes.title).addClass("icon-button menu-button").addClass(d.attributes.icon_class).appendTo(a.$el).click(d.attributes.on_click);if(d.attributes.tooltip_config){b.tooltip(d.attributes.tooltip_config)}var c=d.get("options");if(c){make_popupmenu(b,c)}});return this}});var create_icon_buttons_menu=function(b,a){if(!a){a={}}var c=new IconButtonCollection(_.map(b,function(d){return new IconButton(_.extend(d,a))}));return new IconButtonMenuView({collection:c})};var Grid=Backbone.Collection.extend({});var GridView=Backbone.View.extend({});var PopupMenu=Backbone.View.extend({initialize:function(b,a){this.$button=b||$("<div/>");this.options=a||[];var c=this;this.$button.click(function(d){$(".popmenu-wrapper").remove();c._renderAndShow(d);return false})},_renderAndShow:function(a){this.render();this.$el.appendTo("body").css(this._getShownPosition(a)).show();this._setUpCloseBehavior()},render:function(){this.$el.addClass("popmenu-wrapper").hide().css({position:"absolute"}).html(this.template(this.$button.attr("id"),this.options));if(this.options.length){var a=this;this.$el.find("li").each(function(c,b){var d=a.options[c];if(d.func){$(this).children("a.popupmenu-option").click(function(e){d.func.call(a,e,d)})}})}return this},template:function(b,a){return['<ul id="',b,'-menu" class="dropdown-menu">',this._templateOptions(a),"</ul>"].join("")},_templateOptions:function(a){if(!a.length){return"<li>(no options)</li>"}return _.map(a,function(d){if(d.divider){return'<li class="divider"></li>'}else{if(d.header){return['<li class="head"><a href="javascript:void(0);">',d.html,"</a></li>"].join("")}}var c=d.href||"javascript:void(0);",e=(d.target)?(' target="'+d.target+'"'):(""),b=(d.checked)?('<span class="fa fa-check"></span>'):("");return['<li><a class="popupmenu-option" href="',c,'"',e,">",b,d.html,"</a></li>"].join("")}).join("")},_getShownPosition:function(b){var c=this.$el.width();var a=b.pageX-c/2;a=Math.min(a,$(document).scrollLeft()+$(window).width()-c-5);a=Math.max(a,$(document).scrollLeft()+5);return{top:b.pageY,left:a}},_setUpCloseBehavior:function(){var c=this;function a(e){$(document).off("click.close_popup");if(window.parent!==window){try{$(window.parent.document).off("click.close_popup")}catch(d){}}else{try{$("iframe#galaxy_main").contents().off("click.close_popup")}catch(d){}}c.remove()}$("html").one("click.close_popup",a);if(window.parent!==window){try{$(window.parent.document).find("html").one("click.close_popup",a)}catch(b){}}else{try{$("iframe#galaxy_main").contents().one("click.close_popup",a)}catch(b){}}},addItem:function(b,a){a=(a>=0)?a:this.options.length;this.options.splice(a,0,b);return this},removeItem:function(a){if(a>=0){this.options.splice(a,1)}return this},findIndexByHtml:function(b){for(var a=0;a<this.options.length;a++){if(_.has(this.options[a],"html")&&(this.options[a].html===b)){return a}}return null},findItemByHtml:function(a){return this.options[(this.findIndexByHtml(a))]},toString:function(){return"PopupMenu"}});PopupMenu.make_popupmenu=function(b,c){var a=[];_.each(c,function(f,d){var e={html:d};if(f===null){e.header=true}else{if(jQuery.type(f)==="function"){e.func=f}}a.push(e)});return new PopupMenu($(b),a)};PopupMenu.convertLinksToOptions=function(c,a){c=$(c);a=a||"a";var b=[];c.find(a).each(function(g,e){var f={},d=$(g);f.html=d.text();if(d.attr("href")){var j=d.attr("href"),k=d.attr("target"),h=d.attr("confirm");f.func=function(){if((h)&&(!confirm(h))){return}switch(k){case"_parent":window.parent.location=j;break;case"_top":window.top.location=j;break;default:window.location=j}}}b.push(f)});return b};PopupMenu.fromExistingDom=function(d,c,a){d=$(d);c=$(c);var b=PopupMenu.convertLinksToOptions(c,a);c.remove();return new PopupMenu(d,b)};PopupMenu.make_popup_menus=function(c,b,d){c=c||document;b=b||"div[popupmenu]";d=d||function(e,f){return"#"+e.attr("popupmenu")};var a=[];$(c).find(b).each(function(){var e=$(this),f=$(c).find(d(e,c));a.push(PopupMenu.fromDom(f,e));f.addClass("popup")});return a};var faIconButton=function(a){a=a||{};a.tooltipConfig=a.tooltipConfig||{placement:"bottom"};a.classes=["icon-btn"].concat(a.classes||[]);if(a.disabled){a.classes.push("disabled")}var b=['<a class="',a.classes.join(" "),'"',((a.title)?(' title="'+a.title+'"'):("")),((a.target)?(' target="'+a.target+'"'):("")),' href="',((a.href)?(a.href):("javascript:void(0);")),'">','<span class="fa ',a.faIcon,'"></span>',"</a>"].join("");var c=$(b).tooltip(a.tooltipConfig);if(_.isFunction(a.onclick)){c.click(a.onclick)}return c};var searchInput=function(k){var a=27,h=13,i=$("<div/>"),b={initialVal:"",name:"search",placeholder:"search",classes:"",onclear:function(){},onsearch:function(l){},minSearchLen:0,escWillClear:true,oninit:function(){}};if(jQuery.type(k)==="object"){k=jQuery.extend(true,b,k)}function d(l){var m=$(this).parent().children("input");m.val("");m.trigger("clear:searchInput");k.onclear()}function j(m,l){$(this).trigger("search:searchInput",l);k.onsearch(l)}function c(){return['<input type="text" name="',k.name,'" placeholder="',k.placeholder,'" ','class="search-query ',k.classes,'" ',"/>"].join("")}function g(){return $(c()).css({width:"100%","padding-right":"24px"}).focus(function(l){$(this).select()}).keyup(function(m){if(m.which===a&&k.escWillClear){d.call(this,m)}else{var l=$(this).val();if((m.which===h)||(k.minSearchLen&&l.length>=k.minSearchLen)){j.call(this,m,l)}else{if(!l.length){d.call(this,m)}}}}).val(k.initialVal)}function f(){return'<span class="search-clear fa fa-times-circle" title="'+_l("clear search (esc)")+'"></span>'}function e(){return $(f()).css({position:"absolute",right:"15px","font-size":"1.4em","line-height":"23px",color:"grey"}).tooltip({placement:"bottom"}).click(function(l){d.call(this,l)})}return i.append([g(),e()])};function LoadingIndicator(a,c){var b=this;c=jQuery.extend({cover:false},c||{});function d(){var e=['<div class="loading-indicator">','<div class="loading-indicator-text">','<span class="fa fa-spinner fa-spin fa-lg"></span>','<span class="loading-indicator-message">loading...</span>',"</div>","</div>"].join("\n");var g=$(e).hide().css(c.css||{position:"fixed"}),f=g.children(".loading-indicator-text");if(c.cover){g.css({"z-index":2,top:a.css("top"),bottom:a.css("bottom"),left:a.css("left"),right:a.css("right"),opacity:0.5,"background-color":"white","text-align":"center"});f=g.children(".loading-indicator-text").css({"margin-top":"20px"})}else{f=g.children(".loading-indicator-text").css({margin:"12px 0px 0px 10px",opacity:"0.85",color:"grey"});f.children(".loading-indicator-message").css({margin:"0px 8px 0px 0px","font-style":"italic"})}return g}b.show=function(f,e,g){f=f||"loading...";e=e||"fast";b.$indicator=d().insertBefore(a);b.message(f);b.$indicator.fadeIn(e,g);return b};b.message=function(e){b.$indicator.find("i").text(e)};b.hide=function(e,f){e=e||"fast";if(b.$indicator&&b.$indicator.size()){b.$indicator.fadeOut(e,function(){b.$indicator.remove();if(f){f()}})}else{if(f){f()}}return b};return b};
\ No newline at end of file
+var IconButton=Backbone.Model.extend({defaults:{title:"",icon_class:"",on_click:null,menu_options:null,is_menu_button:true,id:null,href:null,target:null,enabled:true,visible:true,tooltip_config:{}}});var IconButtonView=Backbone.View.extend({initialize:function(){this.model.attributes.tooltip_config={placement:"bottom"};this.model.bind("change",this.render,this)},render:function(){this.$el.tooltip("hide");var a=this.template(this.model.toJSON());a.tooltip(this.model.get("tooltip_config"));this.$el.replaceWith(a);this.setElement(a);return this},events:{click:"click"},click:function(a){if(_.isFunction(this.model.get("on_click"))){this.model.get("on_click")(a);return false}return true},template:function(b){var a='title="'+b.title+'" class="icon-button';if(b.is_menu_button){a+=" menu-button"}a+=" "+b.icon_class;if(!b.enabled){a+="_disabled"}a+='"';if(b.id){a+=' id="'+b.id+'"'}a+=' href="'+b.href+'"';if(b.target){a+=' target="'+b.target+'"'}if(!b.visible){a+=' style="display: none;"'}if(b.enabled){a="<a "+a+"/>"}else{a="<span "+a+"/>"}return $(a)}});var IconButtonCollection=Backbone.Collection.extend({model:IconButton});var IconButtonMenuView=Backbone.View.extend({tagName:"div",initialize:function(){this.render()},render:function(){var a=this;this.collection.each(function(d){var b=$("<a/>").attr("href","javascript:void(0)").attr("title",d.attributes.title).addClass("icon-button menu-button").addClass(d.attributes.icon_class).appendTo(a.$el).click(d.attributes.on_click);if(d.attributes.tooltip_config){b.tooltip(d.attributes.tooltip_config)}var c=d.get("options");if(c){make_popupmenu(b,c)}});return this}});var create_icon_buttons_menu=function(b,a){if(!a){a={}}var c=new IconButtonCollection(_.map(b,function(d){return new IconButton(_.extend(d,a))}));return new IconButtonMenuView({collection:c})};var Grid=Backbone.Collection.extend({});var GridView=Backbone.View.extend({});var PopupMenu=Backbone.View.extend({initialize:function(b,a){this.$button=b;if(!this.$button.size()){this.$button=$("<div/>")}this.options=a||[];var c=this;this.$button.click(function(d){$(".popmenu-wrapper").remove();c._renderAndShow(d);return false})},_renderAndShow:function(a){this.render();this.$el.appendTo("body").css(this._getShownPosition(a)).show();this._setUpCloseBehavior()},render:function(){this.$el.addClass("popmenu-wrapper").hide().css({position:"absolute"}).html(this.template(this.$button.attr("id"),this.options));if(this.options.length){var a=this;this.$el.find("li").each(function(c,b){var d=a.options[c];if(d.func){$(this).children("a.popupmenu-option").click(function(e){d.func.call(a,e,d)})}})}return this},template:function(b,a){return['<ul id="',b,'-menu" class="dropdown-menu">',this._templateOptions(a),"</ul>"].join("")},_templateOptions:function(a){if(!a.length){return"<li>(no options)</li>"}return _.map(a,function(d){if(d.divider){return'<li class="divider"></li>'}else{if(d.header){return['<li class="head"><a href="javascript:void(0);">',d.html,"</a></li>"].join("")}}var c=d.href||"javascript:void(0);",e=(d.target)?(' target="'+d.target+'"'):(""),b=(d.checked)?('<span class="fa fa-check"></span>'):("");return['<li><a class="popupmenu-option" href="',c,'"',e,">",b,d.html,"</a></li>"].join("")}).join("")},_getShownPosition:function(b){var c=this.$el.width();var a=b.pageX-c/2;a=Math.min(a,$(document).scrollLeft()+$(window).width()-c-5);a=Math.max(a,$(document).scrollLeft()+5);return{top:b.pageY,left:a}},_setUpCloseBehavior:function(){var c=this;function a(e){$(document).off("click.close_popup");if(window.parent!==window){try{$(window.parent.document).off("click.close_popup")}catch(d){}}else{try{$("iframe#galaxy_main").contents().off("click.close_popup")}catch(d){}}c.remove()}$("html").one("click.close_popup",a);if(window.parent!==window){try{$(window.parent.document).find("html").one("click.close_popup",a)}catch(b){}}else{try{$("iframe#galaxy_main").contents().one("click.close_popup",a)}catch(b){}}},addItem:function(b,a){a=(a>=0)?a:this.options.length;this.options.splice(a,0,b);return this},removeItem:function(a){if(a>=0){this.options.splice(a,1)}return this},findIndexByHtml:function(b){for(var a=0;a<this.options.length;a++){if(_.has(this.options[a],"html")&&(this.options[a].html===b)){return a}}return null},findItemByHtml:function(a){return this.options[(this.findIndexByHtml(a))]},toString:function(){return"PopupMenu"}});PopupMenu.make_popupmenu=function(b,c){var a=[];_.each(c,function(f,d){var e={html:d};if(f===null){e.header=true}else{if(jQuery.type(f)==="function"){e.func=f}}a.push(e)});return new PopupMenu($(b),a)};PopupMenu.convertLinksToOptions=function(c,a){c=$(c);a=a||"a";var b=[];c.find(a).each(function(g,e){var f={},d=$(g);f.html=d.text();if(d.attr("href")){var j=d.attr("href"),k=d.attr("target"),h=d.attr("confirm");f.func=function(){if((h)&&(!confirm(h))){return}switch(k){case"_parent":window.parent.location=j;break;case"_top":window.top.location=j;break;default:window.location=j}}}b.push(f)});return b};PopupMenu.fromExistingDom=function(d,c,a){d=$(d);c=$(c);var b=PopupMenu.convertLinksToOptions(c,a);c.remove();return new PopupMenu(d,b)};PopupMenu.make_popup_menus=function(c,b,d){c=c||document;b=b||"div[popupmenu]";d=d||function(e,f){return"#"+e.attr("popupmenu")};var a=[];$(c).find(b).each(function(){var e=$(this),f=$(c).find(d(e,c));a.push(PopupMenu.fromDom(f,e));f.addClass("popup")});return a};var faIconButton=function(a){a=a||{};a.tooltipConfig=a.tooltipConfig||{placement:"bottom"};a.classes=["icon-btn"].concat(a.classes||[]);if(a.disabled){a.classes.push("disabled")}var b=['<a class="',a.classes.join(" "),'"',((a.title)?(' title="'+a.title+'"'):("")),((a.target)?(' target="'+a.target+'"'):("")),' href="',((a.href)?(a.href):("javascript:void(0);")),'">','<span class="fa ',a.faIcon,'"></span>',"</a>"].join("");var c=$(b).tooltip(a.tooltipConfig);if(_.isFunction(a.onclick)){c.click(a.onclick)}return c};var searchInput=function(k){var a=27,h=13,i=$("<div/>"),b={initialVal:"",name:"search",placeholder:"search",classes:"",onclear:function(){},onsearch:function(l){},minSearchLen:0,escWillClear:true,oninit:function(){}};if(jQuery.type(k)==="object"){k=jQuery.extend(true,b,k)}function d(l){var m=$(this).parent().children("input");m.val("");m.trigger("clear:searchInput");k.onclear()}function j(m,l){$(this).trigger("search:searchInput",l);k.onsearch(l)}function c(){return['<input type="text" name="',k.name,'" placeholder="',k.placeholder,'" ','class="search-query ',k.classes,'" ',"/>"].join("")}function g(){return $(c()).css({width:"100%","padding-right":"24px"}).focus(function(l){$(this).select()}).keyup(function(m){if(m.which===a&&k.escWillClear){d.call(this,m)}else{var l=$(this).val();if((m.which===h)||(k.minSearchLen&&l.length>=k.minSearchLen)){j.call(this,m,l)}else{if(!l.length){d.call(this,m)}}}}).val(k.initialVal)}function f(){return'<span class="search-clear fa fa-times-circle" title="'+_l("clear search (esc)")+'"></span>'}function e(){return $(f()).css({position:"absolute",right:"15px","font-size":"1.4em","line-height":"23px",color:"grey"}).tooltip({placement:"bottom"}).click(function(l){d.call(this,l)})}return i.append([g(),e()])};function LoadingIndicator(a,c){var b=this;c=jQuery.extend({cover:false},c||{});function d(){var e=['<div class="loading-indicator">','<div class="loading-indicator-text">','<span class="fa fa-spinner fa-spin fa-lg"></span>','<span class="loading-indicator-message">loading...</span>',"</div>","</div>"].join("\n");var g=$(e).hide().css(c.css||{position:"fixed"}),f=g.children(".loading-indicator-text");if(c.cover){g.css({"z-index":2,top:a.css("top"),bottom:a.css("bottom"),left:a.css("left"),right:a.css("right"),opacity:0.5,"background-color":"white","text-align":"center"});f=g.children(".loading-indicator-text").css({"margin-top":"20px"})}else{f=g.children(".loading-indicator-text").css({margin:"12px 0px 0px 10px",opacity:"0.85",color:"grey"});f.children(".loading-indicator-message").css({margin:"0px 8px 0px 0px","font-style":"italic"})}return g}b.show=function(f,e,g){f=f||"loading...";e=e||"fast";b.$indicator=d().insertBefore(a);b.message(f);b.$indicator.fadeIn(e,g);return b};b.message=function(e){b.$indicator.find("i").text(e)};b.hide=function(e,f){e=e||"fast";if(b.$indicator&&b.$indicator.size()){b.$indicator.fadeOut(e,function(){b.$indicator.remove();if(f){f()}})}else{if(f){f()}}return b};return b};
\ No newline at end of file
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
2 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/d38f79874652/
Changeset: d38f79874652
User: jmchilton
Date: 2013-12-05 18:02:01
Summary: More fixes to install_and_test_tool_shed_repositories for session globals going away.
Affected #: 2 files
diff -r e962e0406fc631aa9915e3a0522e10a35d8b0def -r d38f79874652f7a6bd898aa582ec186e9445513c test/install_and_test_tool_shed_repositories/base/test_db_util.py
--- a/test/install_and_test_tool_shed_repositories/base/test_db_util.py
+++ b/test/install_and_test_tool_shed_repositories/base/test_db_util.py
@@ -2,8 +2,7 @@
import galaxy.model as model
import galaxy.model.tool_shed_install as install_model
from galaxy.model.orm import and_
-from functional.database_contexts import galaxy_context as sa_session
-from functional.database_contexts import install_context as install_session
+from functional import database_contexts
log = logging.getLogger(__name__)
@@ -13,25 +12,25 @@
# twilltestcase.py (prehaps rename to install_refresh or refresh_repository).
def delete_obj( obj ):
- sa_session.delete( obj )
- sa_session.flush()
+ database_contexts.galaxy_context.delete( obj )
+ database_contexts.galaxy_context.flush()
def delete_user_roles( user ):
for ura in user.roles:
- sa_session.delete( ura )
- sa_session.flush()
+ database_contexts.galaxy_context.delete( ura )
+ database_contexts.galaxy_context.flush()
def flush( obj ):
- sa_session.add( obj )
- sa_session.flush()
+ database_contexts.galaxy_context.add( obj )
+ database_contexts.galaxy_context.flush()
def get_repository( repository_id ):
- return install_session.query( install_model.ToolShedRepository ) \
+ return database_contexts.install_context.query( install_model.ToolShedRepository ) \
.filter( install_model.ToolShedRepository.table.c.id == repository_id ) \
.first()
def get_installed_repository_by_name_owner_changeset_revision( name, owner, changeset_revision ):
- return install_session.query( install_model.ToolShedRepository ) \
+ return database_contexts.install_context.query( install_model.ToolShedRepository ) \
.filter( and_( install_model.ToolShedRepository.table.c.name == name,
install_model.ToolShedRepository.table.c.owner == owner,
install_model.ToolShedRepository.table.c.installed_changeset_revision == changeset_revision ) ) \
@@ -46,27 +45,27 @@
def get_tool_dependencies_for_installed_repository( repository_id, status=None, exclude_status=None ):
if status is not None:
- return install_session.query( install_model.ToolDependency ) \
+ return database_contexts.install_context.query( install_model.ToolDependency ) \
.filter( and_( install_model.ToolDependency.table.c.tool_shed_repository_id == repository_id,
install_model.ToolDependency.table.c.status == status ) ) \
.all()
elif exclude_status is not None:
- return install_session.query( install_model.ToolDependency ) \
+ return database_contexts.install_context.query( install_model.ToolDependency ) \
.filter( and_( install_model.ToolDependency.table.c.tool_shed_repository_id == repository_id,
install_model.ToolDependency.table.c.status != exclude_status ) ) \
.all()
else:
- return install_session.query( install_model.ToolDependency ) \
+ return database_contexts.install_context.query( install_model.ToolDependency ) \
.filter( install_model.ToolDependency.table.c.tool_shed_repository_id == repository_id ) \
.all()
def mark_obj_deleted( obj ):
obj.deleted = True
- sa_session.add( obj )
- sa_session.flush()
+ database_contexts.galaxy_context.add( obj )
+ database_contexts.galaxy_context.flush()
def refresh( obj ):
- install_session.refresh( obj ) # only used by twilltest
+ database_contexts.install_context.refresh( obj ) # only used by twilltest
def get_private_role( user ):
for role in user.all_roles():
@@ -75,6 +74,6 @@
raise AssertionError( "Private role not found for user '%s'" % user.email )
def get_user( email ):
- return sa_session.query( model.User ) \
+ return database_contexts.galaxy_context.query( model.User ) \
.filter( model.User.table.c.email==email ) \
.first()
diff -r e962e0406fc631aa9915e3a0522e10a35d8b0def -r d38f79874652f7a6bd898aa582ec186e9445513c test/install_and_test_tool_shed_repositories/functional_tests.py
--- a/test/install_and_test_tool_shed_repositories/functional_tests.py
+++ b/test/install_and_test_tool_shed_repositories/functional_tests.py
@@ -24,7 +24,7 @@
# http://code.google.com/p/python-nose/issues/detail?id=284
eggs.require( "pysqlite" )
-import functional.test_toolbox as test_toolbox
+test_toolbox = None
import httplib
import install_and_test_tool_shed_repositories.base.test_db_util as test_db_util
import install_and_test_tool_shed_repositories.functional.test_install_repositories as test_install_repositories
@@ -939,6 +939,9 @@
app = UniverseApplication( **kwargs )
database_contexts.galaxy_context = app.model.context
database_contexts.install_context = app.install_model.context
+ global test_toolbox
+ import functional.test_toolbox as imported_test_toolbox
+ test_toolbox = imported_test_toolbox
log.debug( "Embedded Galaxy application started..." )
# ---- Run galaxy webserver ------------------------------------------------------
https://bitbucket.org/galaxy/galaxy-central/commits/48d838357cee/
Changeset: 48d838357cee
User: jmchilton
Date: 2013-12-05 18:02:57
Summary: Merge latest central.
Affected #: 1 file
diff -r d38f79874652f7a6bd898aa582ec186e9445513c -r 48d838357ceea59c58640dabf7580bc1947d7577 lib/tool_shed/util/container_util.py
--- a/lib/tool_shed/util/container_util.py
+++ b/lib/tool_shed/util/container_util.py
@@ -327,19 +327,30 @@
# "set_environment": [{"name": "R_SCRIPT_PATH", "type": "set_environment"}]
new_set_environment_dict_list = []
for env_requirements_dict in requirements_dict:
- name = env_requirements_dict[ 'name' ]
- type = env_requirements_dict[ 'type' ]
- if tool_dependency_is_orphan( type, name, None, tools ):
- env_requirements_dict[ 'is_orphan' ] = True
+ try:
+ name = env_requirements_dict[ 'name' ]
+ type = env_requirements_dict[ 'type' ]
+ if tool_dependency_is_orphan( type, name, None, tools ):
+ env_requirements_dict[ 'is_orphan' ] = True
+ except Exception, e:
+ name = str( e )
+ type = 'unknown'
+ is_orphan = 'unknown'
new_set_environment_dict_list.append( env_requirements_dict )
new_tool_dependencies[ td_key ] = new_set_environment_dict_list
else:
# {"R/2.15.1": {"name": "R", "readme": "some string", "type": "package", "version": "2.15.1"}
- name = requirements_dict[ 'name' ]
- type = requirements_dict[ 'type' ]
- version = requirements_dict[ 'version']
- if tool_dependency_is_orphan( type, name, version, tools ):
- requirements_dict[ 'is_orphan' ] = True
+ try:
+ name = requirements_dict[ 'name' ]
+ type = requirements_dict[ 'type' ]
+ version = requirements_dict[ 'version']
+ if tool_dependency_is_orphan( type, name, version, tools ):
+ requirements_dict[ 'is_orphan' ] = True
+ except Exception, e:
+ name = str( e )
+ type = 'unknown'
+ version = 'unknown'
+ is_orphan = 'unknown'
new_tool_dependencies[ td_key ] = requirements_dict
return new_tool_dependencies
@@ -358,16 +369,24 @@
# Insert a header row.
data_manager_id += 1
data_manager = DataManager( id=data_manager_id,
- name='Name',
- version='Version',
- data_tables='Data Tables' )
+ name='Name',
+ version='Version',
+ data_tables='Data Tables' )
folder.valid_data_managers.append( data_manager )
for data_manager_dict in data_managers.itervalues():
data_manager_id += 1
+ try:
+ name = data_manager_dict.get( 'name', '' )
+ version = data_manager_dict.get( 'version', '' )
+ data_tables = ", ".join( data_manager_dict.get( 'data_tables', '' ) )
+ except Exception, e:
+ name = str( e )
+ version = 'unknown'
+ data_tables = 'unknown'
data_manager = DataManager( id=data_manager_id,
- name=data_manager_dict.get( 'name', '' ),
- version=data_manager_dict.get( 'version', '' ),
- data_tables=", ".join( data_manager_dict.get( 'data_tables', '' ) ) )
+ name=name,
+ version=version,
+ data_tables=data_tables )
folder.valid_data_managers.append( data_manager )
else:
data_managers_root_folder = None
@@ -412,13 +431,27 @@
else:
num_display_app_containers = 0
datatype_id += 1
+ try:
+ extension = datatypes_dict.get( 'extension', '' )
+ type = datatypes_dict.get( 'dtype', '' )
+ mimetype = datatypes_dict.get( 'mimetype', '' )
+ subclass = datatypes_dict.get( 'subclass', '' )
+ converters = num_converters
+ display_app_containers = num_display_app_containers
+ except Exception, e:
+ extension = str( e )
+ type = 'unknown'
+ mimetype = 'unknown'
+ subclass = 'unknown'
+ converters = 'unknown'
+ display_app_containers = 'unknown'
datatype = Datatype( id=datatype_id,
- extension=datatypes_dict.get( 'extension', '' ),
- type=datatypes_dict.get( 'dtype', '' ),
- mimetype=datatypes_dict.get( 'mimetype', '' ),
- subclass=datatypes_dict.get( 'subclass', '' ),
- converters=num_converters,
- display_app_containers=num_display_app_containers )
+ extension=extension,
+ type=type,
+ mimetype=mimetype,
+ subclass=subclass,
+ converters=converters,
+ display_app_containers=display_app_containers )
folder.datatypes.append( datatype )
else:
datatypes_root_folder = None
@@ -439,22 +472,22 @@
# Insert a header row.
data_manager_id += 1
data_manager = InvalidDataManager( id=data_manager_id,
- index='Element Index',
- error='Error' )
+ index='Element Index',
+ error='Error' )
folder.invalid_data_managers.append( data_manager )
if error_messages:
for error_message in error_messages:
data_manager_id += 1
data_manager = InvalidDataManager( id=data_manager_id,
- index=0,
- error=error_message )
+ index=0,
+ error=error_message )
folder.invalid_data_managers.append( data_manager )
has_errors = True
for data_manager_dict in data_managers:
data_manager_id += 1
data_manager = InvalidDataManager( id=data_manager_id,
- index=data_manager_dict.get( 'index', 0 ) + 1,
- error=data_manager_dict.get( 'error_message', '' ) )
+ index=data_manager_dict.get( 'index', 0 ) + 1,
+ error=data_manager_dict.get( 'error_message', '' ) )
folder.invalid_data_managers.append( data_manager )
has_errors = True
else:
@@ -523,10 +556,16 @@
for td_key, requirements_dict in invalid_tool_dependencies_dict.items():
folder_id += 1
invalid_tool_dependency_id += 1
- name = requirements_dict[ 'name' ]
- type = requirements_dict[ 'type' ]
- version = requirements_dict[ 'version' ]
- error = requirements_dict[ 'error' ]
+ try:
+ name = requirements_dict[ 'name' ]
+ type = requirements_dict[ 'type' ]
+ version = requirements_dict[ 'version' ]
+ error = requirements_dict[ 'error' ]
+ except Exception, e:
+ name = 'unknown'
+ type = 'unknown'
+ version = 'unknown'
+ error = str( e )
key = generate_tool_dependencies_key( name, version, type )
label = "Version <b>%s</b> of the <b>%s</b><b>%s</b>" % ( version, name, type )
folder = Folder( id=folder_id,
@@ -923,8 +962,8 @@
repository_installation_status = None
for tool_dict in tool_dicts:
if not isinstance( tool_dict, dict ):
- # Due to some previous bug (hopefully not current) invalid tool strings may be included in the recived
- # list of tool_dicts. For example, the picard repository metadata has 2 invalid tools in the recieved
+ # Due to some previous bug (hopefully not current) invalid tool strings may be included in the received
+ # list of tool_dicts. For example, the picard repository metadata has 2 invalid tools in the received
# list of supposedly valid tools: 'rgPicardASMetrics.xml', 'rgPicardGCBiasMetrics.xml'.
continue
container_object_tool_id += 1
@@ -933,17 +972,28 @@
# 'requirements': [{'version': '1.56.0', 'type': 'package', 'name': 'picard'}],
requirements_str = ''
for requirement_dict in requirements:
- requirement_name = str( requirement_dict.get( 'name', 'unknown' ) )
- requirement_type = str( requirement_dict.get( 'type', 'unknown' ) )
+ try:
+ requirement_name = str( requirement_dict.get( 'name', 'unknown' ) )
+ requirement_type = str( requirement_dict.get( 'type', 'unknown' ) )
+ except Exception, e:
+ requirement_name = str( e )
+ requirement_type = 'unknown'
requirements_str += '%s (%s), ' % ( requirement_name, requirement_type )
requirements_str = requirements_str.rstrip( ', ' )
else:
requirements_str = 'none'
- tool_config = str( tool_dict.get( 'tool_config', 'missing' ) )
- tool_id = str( tool_dict.get( 'id', 'missing' ) )
- name = str( tool_dict.get( 'name', 'missing' ) )
- description = str( tool_dict.get( 'description', '' ) )
- version = str( tool_dict.get( 'description', 'missing' ) )
+ try:
+ tool_config = str( tool_dict.get( 'tool_config', 'missing' ) )
+ tool_id = str( tool_dict.get( 'id', 'unknown' ) )
+ name = str( tool_dict.get( 'name', 'unknown' ) )
+ description = str( tool_dict.get( 'description', '' ) )
+ version = str( tool_dict.get( 'description', 'unknown' ) )
+ except Exception, e:
+ tool_config = str( e )
+ tool_id = 'unknown'
+ name = 'unknown'
+ description = ''
+ version = 'unknown'
tool = Tool( id=container_object_tool_id,
tool_config=tool_config,
tool_id=tool_id,
@@ -1012,12 +1062,21 @@
is_orphan = False
if is_orphan:
folder.description = not_used_by_local_tools_description
- name = set_environment_dict.get( 'name', None )
- type = set_environment_dict[ 'type' ]
- repository_id = set_environment_dict.get( 'repository_id', None )
- td_id = set_environment_dict.get( 'tool_dependency_id', None )
+ try:
+ name = set_environment_dict.get( 'name', None )
+ type = set_environment_dict[ 'type' ]
+ repository_id = set_environment_dict.get( 'repository_id', None )
+ td_id = set_environment_dict.get( 'tool_dependency_id', None )
+ except Exception, e:
+ name = str( e )
+ type = 'unknown'
+ repository_id = 'unknown'
+ td_id = 'unknown'
if trans.webapp.name == 'galaxy':
- installation_status = set_environment_dict.get( 'status', 'Never installed' )
+ try:
+ installation_status = set_environment_dict.get( 'status', 'Never installed' )
+ except Exception, e:
+ installation_status = str( e )
else:
installation_status = None
tool_dependency = ToolDependency( id=tool_dependency_id,
@@ -1037,13 +1096,23 @@
is_orphan = False
if is_orphan:
folder.description = not_used_by_local_tools_description
- name = requirements_dict[ 'name' ]
- version = requirements_dict[ 'version' ]
- type = requirements_dict[ 'type' ]
- repository_id = requirements_dict.get( 'repository_id', None )
- td_id = requirements_dict.get( 'tool_dependency_id', None )
+ try:
+ name = requirements_dict[ 'name' ]
+ version = requirements_dict[ 'version' ]
+ type = requirements_dict[ 'type' ]
+ repository_id = requirements_dict.get( 'repository_id', None )
+ td_id = requirements_dict.get( 'tool_dependency_id', None )
+ except Exception, e:
+ name = str( e )
+ version = 'unknown'
+ type = 'unknown'
+ repository_id = 'unknown'
+ td_id = 'unknown'
if trans.webapp.name == 'galaxy':
- installation_status = requirements_dict.get( 'status', 'Never installed' )
+ try:
+ installation_status = requirements_dict.get( 'status', 'Never installed' )
+ except Exception, e:
+ installation_status = str( e )
else:
installation_status = None
tool_dependency = ToolDependency( id=tool_dependency_id,
@@ -1109,14 +1178,24 @@
folder_id += 1
test_environment_folder = Folder( id=folder_id, key='test_environment', label='Automated test environment', parent=containing_folder )
containing_folder.folders.append( test_environment_folder )
- architecture = test_environment_dict.get( 'architecture', '' )
- galaxy_database_version = test_environment_dict.get( 'galaxy_database_version', '' )
- galaxy_revision = test_environment_dict.get( 'galaxy_revision', '' )
- python_version = test_environment_dict.get( 'python_version', '' )
- system = test_environment_dict.get( 'system', '' )
- tool_shed_database_version = test_environment_dict.get( 'tool_shed_database_version', '' )
- tool_shed_mercurial_version = test_environment_dict.get( 'tool_shed_mercurial_version', '' )
- tool_shed_revision = test_environment_dict.get( 'tool_shed_revision', '' )
+ try:
+ architecture = test_environment_dict.get( 'architecture', '' )
+ galaxy_database_version = test_environment_dict.get( 'galaxy_database_version', '' )
+ galaxy_revision = test_environment_dict.get( 'galaxy_revision', '' )
+ python_version = test_environment_dict.get( 'python_version', '' )
+ system = test_environment_dict.get( 'system', '' )
+ tool_shed_database_version = test_environment_dict.get( 'tool_shed_database_version', '' )
+ tool_shed_mercurial_version = test_environment_dict.get( 'tool_shed_mercurial_version', '' )
+ tool_shed_revision = test_environment_dict.get( 'tool_shed_revision', '' )
+ except Exception, e:
+ architecture = str( e )
+ galaxy_database_version = ''
+ galaxy_revision = ''
+ python_version = ''
+ system = ''
+ tool_shed_database_version = ''
+ tool_shed_mercurial_version = ''
+ tool_shed_revision = ''
test_environment = TestEnvironment( id=1,
architecture=architecture,
galaxy_database_version=galaxy_database_version,
@@ -1134,8 +1213,11 @@
not_tested_folder = Folder( id=folder_id, key='not_tested', label='Not tested', parent=containing_folder )
containing_folder.folders.append( not_tested_folder )
not_tested_id = 0
- not_tested = NotTested( id=not_tested_id,
- reason=not_tested_dict.get( 'reason', '' ) )
+ try:
+ reason = not_tested_dict.get( 'reason', '' )
+ except Exception, e:
+ reason = str( e )
+ not_tested = NotTested( id=not_tested_id, reason=reason )
not_tested_folder.not_tested.append( not_tested )
passed_tests_dicts = tool_test_results_dict.get( 'passed_tests', [] )
if len( passed_tests_dicts ) > 0:
@@ -1145,10 +1227,18 @@
passed_test_id = 0
for passed_tests_dict in passed_tests_dicts:
passed_test_id += 1
+ try:
+ test_id = passed_tests_dict.get( 'test_id' '' )
+ tool_id = passed_tests_dict.get( 'tool_id', '' )
+ tool_version = passed_tests_dict.get( 'tool_version', '' )
+ except Exception, e:
+ test_id = str( e )
+ tool_id = 'unknown'
+ tool_version = 'unknown'
passed_test = PassedTest( id=passed_test_id,
- test_id=passed_tests_dict.get( 'test_id' '' ),
- tool_id=passed_tests_dict.get( 'tool_id', '' ),
- tool_version=passed_tests_dict.get( 'tool_version', '' ) )
+ test_id=test_id,
+ tool_id=tool_id,
+ tool_version=tool_version )
passed_tests_folder.passed_tests.append( passed_test )
failed_tests_dicts = tool_test_results_dict.get( 'failed_tests', [] )
if len( failed_tests_dicts ) > 0:
@@ -1157,16 +1247,25 @@
containing_folder.folders.append( failed_tests_folder )
failed_test_id = 0
for failed_tests_dict in failed_tests_dicts:
- # TODO: Remove this when invalid test data is eliminated.
- if isinstance( failed_tests_dict, list ):
- failed_tests_dict = failed_tests_dict[ 0 ]
failed_test_id += 1
+ try:
+ stderr = failed_tests_dict.get( 'stderr', '' )
+ test_id = failed_tests_dict.get( 'test_id', '' )
+ tool_id = failed_tests_dict.get( 'tool_id', '' )
+ tool_version = failed_tests_dict.get( 'tool_version', '' )
+ traceback = failed_tests_dict.get( 'traceback', '' )
+ except Exception, e:
+ stderr = 'unknown'
+ test_id = 'unknown'
+ tool_id = 'unknown'
+ tool_version = 'unknown'
+ traceback = str( e )
failed_test = FailedTest( id=failed_test_id,
- stderr=failed_tests_dict.get( 'stderr', '' ),
- test_id=failed_tests_dict.get( 'test_id', '' ),
- tool_id=failed_tests_dict.get( 'tool_id', '' ),
- tool_version=failed_tests_dict.get( 'tool_version', '' ),
- traceback=failed_tests_dict.get( 'traceback', '' ) )
+ stderr=stderr,
+ test_id=test_id,
+ tool_id=tool_id,
+ tool_version=tool_version,
+ traceback=traceback )
failed_tests_folder.failed_tests.append( failed_test )
missing_test_components_dicts = tool_test_results_dict.get( 'missing_test_components', [] )
if len( missing_test_components_dicts ) > 0:
@@ -1179,11 +1278,21 @@
missing_test_component_id = 0
for missing_test_components_dict in missing_test_components_dicts:
missing_test_component_id += 1
+ try:
+ missing_components = missing_test_components_dict.get( 'missing_components', '' )
+ tool_guid = missing_test_components_dict.get( 'tool_guid', '' )
+ tool_id = missing_test_components_dict.get( 'tool_id', '' )
+ tool_version = missing_test_components_dict.get( 'tool_version', '' )
+ except Exception, e:
+ missing_components = str( e )
+ tool_guid = 'unknown'
+ tool_id = 'unknown'
+ tool_version = 'unknown'
missing_test_component = MissingTestComponent( id=missing_test_component_id,
- missing_components=missing_test_components_dict.get( 'missing_components', '' ),
- tool_guid=missing_test_components_dict.get( 'tool_guid', '' ),
- tool_id=missing_test_components_dict.get( 'tool_id', '' ),
- tool_version=missing_test_components_dict.get( 'tool_version', '' ) )
+ missing_components=missing_components,
+ tool_guid=tool_guid,
+ tool_id=tool_id,
+ tool_version=tool_version )
missing_test_components_folder.missing_test_components.append( missing_test_component )
installation_error_dict = tool_test_results_dict.get( 'installation_errors', {} )
if len( installation_error_dict ) > 0:
@@ -1215,13 +1324,24 @@
installation_error_base_folder.folders.append( current_repository_folder )
for current_repository_error_dict in current_repository_installation_error_dicts:
repository_installation_error_id += 1
- repository_installation_error = \
- RepositoryInstallationError( id=repository_installation_error_id,
- tool_shed=str( current_repository_error_dict.get( 'tool_shed', '' ) ),
- name=str( current_repository_error_dict.get( 'name', '' ) ),
- owner=str( current_repository_error_dict.get( 'owner', '' ) ),
- changeset_revision=str( current_repository_error_dict.get( 'changeset_revision', '' ) ),
- error_message=current_repository_error_dict.get( 'error_message', '' ) )
+ try:
+ r_tool_shed = str( current_repository_error_dict.get( 'tool_shed', '' ) )
+ r_name = str( current_repository_error_dict.get( 'name', '' ) )
+ r_owner = str( current_repository_error_dict.get( 'owner', '' ) )
+ r_changeset_revision = str( current_repository_error_dict.get( 'changeset_revision', '' ) )
+ r_error_message = current_repository_error_dict.get( 'error_message', '' )
+ except Exception, e:
+ r_tool_shed = 'unknown'
+ r_name = 'unknown'
+ r_owner = 'unknown'
+ r_changeset_revision = 'unknown'
+ r_error_message = str( e )
+ repository_installation_error = RepositoryInstallationError( id=repository_installation_error_id,
+ tool_shed=r_tool_shed,
+ name=r_name,
+ owner=r_owner,
+ changeset_revision=r_changeset_revision,
+ error_message=r_error_message )
current_repository_folder.current_repository_installation_errors.append( repository_installation_error )
if len( repository_dependency_installation_error_dicts ) > 0:
folder_id += 1
@@ -1232,13 +1352,24 @@
installation_error_base_folder.folders.append( repository_dependencies_folder )
for repository_dependency_error_dict in repository_dependency_installation_error_dicts:
repository_installation_error_id += 1
- repository_installation_error = \
- RepositoryInstallationError( id=repository_installation_error_id,
- tool_shed=str( repository_dependency_error_dict.get( 'tool_shed', '' ) ),
- name=str( repository_dependency_error_dict.get( 'name', '' ) ),
- owner=str( repository_dependency_error_dict.get( 'owner', '' ) ),
- changeset_revision=str( repository_dependency_error_dict.get( 'changeset_revision', '' ) ),
- error_message=repository_dependency_error_dict.get( 'error_message', '' ) )
+ try:
+ rd_tool_shed = str( repository_dependency_error_dict.get( 'tool_shed', '' ) )
+ rd_name = str( repository_dependency_error_dict.get( 'name', '' ) )
+ rd_owner = str( repository_dependency_error_dict.get( 'owner', '' ) )
+ rd_changeset_revision = str( repository_dependency_error_dict.get( 'changeset_revision', '' ) )
+ rd_error_message = repository_dependency_error_dict.get( 'error_message', '' )
+ except Exception, e:
+ rd_tool_shed = 'unknown'
+ rd_name = 'unknown'
+ rd_owner = 'unknown'
+ rd_changeset_revision = 'unknown'
+ rd_error_message = str( e )
+ repository_installation_error = RepositoryInstallationError( id=repository_installation_error_id,
+ tool_shed=rd_tool_shed,
+ name=rd_name,
+ owner=rd_owner,
+ changeset_revision=rd_changeset_revision,
+ error_message=rd_error_message )
repository_dependencies_folder.repository_installation_errors.append( repository_installation_error )
if len( tool_dependency_installation_error_dicts ) > 0:
# [{'error_message': 'some traceback string' 'type': 'package', 'name': 'MIRA', 'version': '4.0'}]
@@ -1251,12 +1382,21 @@
tool_dependency_error_id = 0
for tool_dependency_error_dict in tool_dependency_installation_error_dicts:
tool_dependency_error_id += 1
- tool_dependency_installation_error = \
- ToolDependencyInstallationError( id=tool_dependency_error_id,
- type=str( tool_dependency_error_dict.get( 'type', '' ) ),
- name=str( tool_dependency_error_dict.get( 'name', '' ) ),
- version=str( tool_dependency_error_dict.get( 'version', '' ) ),
- error_message=tool_dependency_error_dict.get( 'error_message', '' ) )
+ try:
+ td_type = str( tool_dependency_error_dict.get( 'type', '' ) )
+ td_name = str( tool_dependency_error_dict.get( 'name', '' ) )
+ td_version = str( tool_dependency_error_dict.get( 'version', '' ) )
+ td_error_message = tool_dependency_error_dict.get( 'error_message', '' )
+ except Exception, e:
+ td_type = 'unknown'
+ td_name = 'unknown'
+ td_version = 'unknown'
+ td_error_message = str( e )
+ tool_dependency_installation_error = ToolDependencyInstallationError( id=tool_dependency_error_id,
+ type=td_type,
+ name=td_name,
+ version=td_version,
+ error_message=td_error_message )
tool_dependencies_folder.tool_dependency_installation_errors.append( tool_dependency_installation_error )
else:
tool_test_results_root_folder = None
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: greg: Improved exception handling in the Tool Shed's container_util.
by commits-noreply@bitbucket.org 05 Dec '13
by commits-noreply@bitbucket.org 05 Dec '13
05 Dec '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/5369ad06627a/
Changeset: 5369ad06627a
User: greg
Date: 2013-12-05 17:10:20
Summary: Improved exception handling in the Tool Shed's container_util.
Affected #: 1 file
diff -r e962e0406fc631aa9915e3a0522e10a35d8b0def -r 5369ad06627a1992cd822894c549b8c0eb61aee1 lib/tool_shed/util/container_util.py
--- a/lib/tool_shed/util/container_util.py
+++ b/lib/tool_shed/util/container_util.py
@@ -327,19 +327,30 @@
# "set_environment": [{"name": "R_SCRIPT_PATH", "type": "set_environment"}]
new_set_environment_dict_list = []
for env_requirements_dict in requirements_dict:
- name = env_requirements_dict[ 'name' ]
- type = env_requirements_dict[ 'type' ]
- if tool_dependency_is_orphan( type, name, None, tools ):
- env_requirements_dict[ 'is_orphan' ] = True
+ try:
+ name = env_requirements_dict[ 'name' ]
+ type = env_requirements_dict[ 'type' ]
+ if tool_dependency_is_orphan( type, name, None, tools ):
+ env_requirements_dict[ 'is_orphan' ] = True
+ except Exception, e:
+ name = str( e )
+ type = 'unknown'
+ is_orphan = 'unknown'
new_set_environment_dict_list.append( env_requirements_dict )
new_tool_dependencies[ td_key ] = new_set_environment_dict_list
else:
# {"R/2.15.1": {"name": "R", "readme": "some string", "type": "package", "version": "2.15.1"}
- name = requirements_dict[ 'name' ]
- type = requirements_dict[ 'type' ]
- version = requirements_dict[ 'version']
- if tool_dependency_is_orphan( type, name, version, tools ):
- requirements_dict[ 'is_orphan' ] = True
+ try:
+ name = requirements_dict[ 'name' ]
+ type = requirements_dict[ 'type' ]
+ version = requirements_dict[ 'version']
+ if tool_dependency_is_orphan( type, name, version, tools ):
+ requirements_dict[ 'is_orphan' ] = True
+ except Exception, e:
+ name = str( e )
+ type = 'unknown'
+ version = 'unknown'
+ is_orphan = 'unknown'
new_tool_dependencies[ td_key ] = requirements_dict
return new_tool_dependencies
@@ -358,16 +369,24 @@
# Insert a header row.
data_manager_id += 1
data_manager = DataManager( id=data_manager_id,
- name='Name',
- version='Version',
- data_tables='Data Tables' )
+ name='Name',
+ version='Version',
+ data_tables='Data Tables' )
folder.valid_data_managers.append( data_manager )
for data_manager_dict in data_managers.itervalues():
data_manager_id += 1
+ try:
+ name = data_manager_dict.get( 'name', '' )
+ version = data_manager_dict.get( 'version', '' )
+ data_tables = ", ".join( data_manager_dict.get( 'data_tables', '' ) )
+ except Exception, e:
+ name = str( e )
+ version = 'unknown'
+ data_tables = 'unknown'
data_manager = DataManager( id=data_manager_id,
- name=data_manager_dict.get( 'name', '' ),
- version=data_manager_dict.get( 'version', '' ),
- data_tables=", ".join( data_manager_dict.get( 'data_tables', '' ) ) )
+ name=name,
+ version=version,
+ data_tables=data_tables )
folder.valid_data_managers.append( data_manager )
else:
data_managers_root_folder = None
@@ -412,13 +431,27 @@
else:
num_display_app_containers = 0
datatype_id += 1
+ try:
+ extension = datatypes_dict.get( 'extension', '' )
+ type = datatypes_dict.get( 'dtype', '' )
+ mimetype = datatypes_dict.get( 'mimetype', '' )
+ subclass = datatypes_dict.get( 'subclass', '' )
+ converters = num_converters
+ display_app_containers = num_display_app_containers
+ except Exception, e:
+ extension = str( e )
+ type = 'unknown'
+ mimetype = 'unknown'
+ subclass = 'unknown'
+ converters = 'unknown'
+ display_app_containers = 'unknown'
datatype = Datatype( id=datatype_id,
- extension=datatypes_dict.get( 'extension', '' ),
- type=datatypes_dict.get( 'dtype', '' ),
- mimetype=datatypes_dict.get( 'mimetype', '' ),
- subclass=datatypes_dict.get( 'subclass', '' ),
- converters=num_converters,
- display_app_containers=num_display_app_containers )
+ extension=extension,
+ type=type,
+ mimetype=mimetype,
+ subclass=subclass,
+ converters=converters,
+ display_app_containers=display_app_containers )
folder.datatypes.append( datatype )
else:
datatypes_root_folder = None
@@ -439,22 +472,22 @@
# Insert a header row.
data_manager_id += 1
data_manager = InvalidDataManager( id=data_manager_id,
- index='Element Index',
- error='Error' )
+ index='Element Index',
+ error='Error' )
folder.invalid_data_managers.append( data_manager )
if error_messages:
for error_message in error_messages:
data_manager_id += 1
data_manager = InvalidDataManager( id=data_manager_id,
- index=0,
- error=error_message )
+ index=0,
+ error=error_message )
folder.invalid_data_managers.append( data_manager )
has_errors = True
for data_manager_dict in data_managers:
data_manager_id += 1
data_manager = InvalidDataManager( id=data_manager_id,
- index=data_manager_dict.get( 'index', 0 ) + 1,
- error=data_manager_dict.get( 'error_message', '' ) )
+ index=data_manager_dict.get( 'index', 0 ) + 1,
+ error=data_manager_dict.get( 'error_message', '' ) )
folder.invalid_data_managers.append( data_manager )
has_errors = True
else:
@@ -523,10 +556,16 @@
for td_key, requirements_dict in invalid_tool_dependencies_dict.items():
folder_id += 1
invalid_tool_dependency_id += 1
- name = requirements_dict[ 'name' ]
- type = requirements_dict[ 'type' ]
- version = requirements_dict[ 'version' ]
- error = requirements_dict[ 'error' ]
+ try:
+ name = requirements_dict[ 'name' ]
+ type = requirements_dict[ 'type' ]
+ version = requirements_dict[ 'version' ]
+ error = requirements_dict[ 'error' ]
+ except Exception, e:
+ name = 'unknown'
+ type = 'unknown'
+ version = 'unknown'
+ error = str( e )
key = generate_tool_dependencies_key( name, version, type )
label = "Version <b>%s</b> of the <b>%s</b><b>%s</b>" % ( version, name, type )
folder = Folder( id=folder_id,
@@ -923,8 +962,8 @@
repository_installation_status = None
for tool_dict in tool_dicts:
if not isinstance( tool_dict, dict ):
- # Due to some previous bug (hopefully not current) invalid tool strings may be included in the recived
- # list of tool_dicts. For example, the picard repository metadata has 2 invalid tools in the recieved
+ # Due to some previous bug (hopefully not current) invalid tool strings may be included in the received
+ # list of tool_dicts. For example, the picard repository metadata has 2 invalid tools in the received
# list of supposedly valid tools: 'rgPicardASMetrics.xml', 'rgPicardGCBiasMetrics.xml'.
continue
container_object_tool_id += 1
@@ -933,17 +972,28 @@
# 'requirements': [{'version': '1.56.0', 'type': 'package', 'name': 'picard'}],
requirements_str = ''
for requirement_dict in requirements:
- requirement_name = str( requirement_dict.get( 'name', 'unknown' ) )
- requirement_type = str( requirement_dict.get( 'type', 'unknown' ) )
+ try:
+ requirement_name = str( requirement_dict.get( 'name', 'unknown' ) )
+ requirement_type = str( requirement_dict.get( 'type', 'unknown' ) )
+ except Exception, e:
+ requirement_name = str( e )
+ requirement_type = 'unknown'
requirements_str += '%s (%s), ' % ( requirement_name, requirement_type )
requirements_str = requirements_str.rstrip( ', ' )
else:
requirements_str = 'none'
- tool_config = str( tool_dict.get( 'tool_config', 'missing' ) )
- tool_id = str( tool_dict.get( 'id', 'missing' ) )
- name = str( tool_dict.get( 'name', 'missing' ) )
- description = str( tool_dict.get( 'description', '' ) )
- version = str( tool_dict.get( 'description', 'missing' ) )
+ try:
+ tool_config = str( tool_dict.get( 'tool_config', 'missing' ) )
+ tool_id = str( tool_dict.get( 'id', 'unknown' ) )
+ name = str( tool_dict.get( 'name', 'unknown' ) )
+ description = str( tool_dict.get( 'description', '' ) )
+ version = str( tool_dict.get( 'description', 'unknown' ) )
+ except Exception, e:
+ tool_config = str( e )
+ tool_id = 'unknown'
+ name = 'unknown'
+ description = ''
+ version = 'unknown'
tool = Tool( id=container_object_tool_id,
tool_config=tool_config,
tool_id=tool_id,
@@ -1012,12 +1062,21 @@
is_orphan = False
if is_orphan:
folder.description = not_used_by_local_tools_description
- name = set_environment_dict.get( 'name', None )
- type = set_environment_dict[ 'type' ]
- repository_id = set_environment_dict.get( 'repository_id', None )
- td_id = set_environment_dict.get( 'tool_dependency_id', None )
+ try:
+ name = set_environment_dict.get( 'name', None )
+ type = set_environment_dict[ 'type' ]
+ repository_id = set_environment_dict.get( 'repository_id', None )
+ td_id = set_environment_dict.get( 'tool_dependency_id', None )
+ except Exception, e:
+ name = str( e )
+ type = 'unknown'
+ repository_id = 'unknown'
+ td_id = 'unknown'
if trans.webapp.name == 'galaxy':
- installation_status = set_environment_dict.get( 'status', 'Never installed' )
+ try:
+ installation_status = set_environment_dict.get( 'status', 'Never installed' )
+ except Exception, e:
+ installation_status = str( e )
else:
installation_status = None
tool_dependency = ToolDependency( id=tool_dependency_id,
@@ -1037,13 +1096,23 @@
is_orphan = False
if is_orphan:
folder.description = not_used_by_local_tools_description
- name = requirements_dict[ 'name' ]
- version = requirements_dict[ 'version' ]
- type = requirements_dict[ 'type' ]
- repository_id = requirements_dict.get( 'repository_id', None )
- td_id = requirements_dict.get( 'tool_dependency_id', None )
+ try:
+ name = requirements_dict[ 'name' ]
+ version = requirements_dict[ 'version' ]
+ type = requirements_dict[ 'type' ]
+ repository_id = requirements_dict.get( 'repository_id', None )
+ td_id = requirements_dict.get( 'tool_dependency_id', None )
+ except Exception, e:
+ name = str( e )
+ version = 'unknown'
+ type = 'unknown'
+ repository_id = 'unknown'
+ td_id = 'unknown'
if trans.webapp.name == 'galaxy':
- installation_status = requirements_dict.get( 'status', 'Never installed' )
+ try:
+ installation_status = requirements_dict.get( 'status', 'Never installed' )
+ except Exception, e:
+ installation_status = str( e )
else:
installation_status = None
tool_dependency = ToolDependency( id=tool_dependency_id,
@@ -1109,14 +1178,24 @@
folder_id += 1
test_environment_folder = Folder( id=folder_id, key='test_environment', label='Automated test environment', parent=containing_folder )
containing_folder.folders.append( test_environment_folder )
- architecture = test_environment_dict.get( 'architecture', '' )
- galaxy_database_version = test_environment_dict.get( 'galaxy_database_version', '' )
- galaxy_revision = test_environment_dict.get( 'galaxy_revision', '' )
- python_version = test_environment_dict.get( 'python_version', '' )
- system = test_environment_dict.get( 'system', '' )
- tool_shed_database_version = test_environment_dict.get( 'tool_shed_database_version', '' )
- tool_shed_mercurial_version = test_environment_dict.get( 'tool_shed_mercurial_version', '' )
- tool_shed_revision = test_environment_dict.get( 'tool_shed_revision', '' )
+ try:
+ architecture = test_environment_dict.get( 'architecture', '' )
+ galaxy_database_version = test_environment_dict.get( 'galaxy_database_version', '' )
+ galaxy_revision = test_environment_dict.get( 'galaxy_revision', '' )
+ python_version = test_environment_dict.get( 'python_version', '' )
+ system = test_environment_dict.get( 'system', '' )
+ tool_shed_database_version = test_environment_dict.get( 'tool_shed_database_version', '' )
+ tool_shed_mercurial_version = test_environment_dict.get( 'tool_shed_mercurial_version', '' )
+ tool_shed_revision = test_environment_dict.get( 'tool_shed_revision', '' )
+ except Exception, e:
+ architecture = str( e )
+ galaxy_database_version = ''
+ galaxy_revision = ''
+ python_version = ''
+ system = ''
+ tool_shed_database_version = ''
+ tool_shed_mercurial_version = ''
+ tool_shed_revision = ''
test_environment = TestEnvironment( id=1,
architecture=architecture,
galaxy_database_version=galaxy_database_version,
@@ -1134,8 +1213,11 @@
not_tested_folder = Folder( id=folder_id, key='not_tested', label='Not tested', parent=containing_folder )
containing_folder.folders.append( not_tested_folder )
not_tested_id = 0
- not_tested = NotTested( id=not_tested_id,
- reason=not_tested_dict.get( 'reason', '' ) )
+ try:
+ reason = not_tested_dict.get( 'reason', '' )
+ except Exception, e:
+ reason = str( e )
+ not_tested = NotTested( id=not_tested_id, reason=reason )
not_tested_folder.not_tested.append( not_tested )
passed_tests_dicts = tool_test_results_dict.get( 'passed_tests', [] )
if len( passed_tests_dicts ) > 0:
@@ -1145,10 +1227,18 @@
passed_test_id = 0
for passed_tests_dict in passed_tests_dicts:
passed_test_id += 1
+ try:
+ test_id = passed_tests_dict.get( 'test_id' '' )
+ tool_id = passed_tests_dict.get( 'tool_id', '' )
+ tool_version = passed_tests_dict.get( 'tool_version', '' )
+ except Exception, e:
+ test_id = str( e )
+ tool_id = 'unknown'
+ tool_version = 'unknown'
passed_test = PassedTest( id=passed_test_id,
- test_id=passed_tests_dict.get( 'test_id' '' ),
- tool_id=passed_tests_dict.get( 'tool_id', '' ),
- tool_version=passed_tests_dict.get( 'tool_version', '' ) )
+ test_id=test_id,
+ tool_id=tool_id,
+ tool_version=tool_version )
passed_tests_folder.passed_tests.append( passed_test )
failed_tests_dicts = tool_test_results_dict.get( 'failed_tests', [] )
if len( failed_tests_dicts ) > 0:
@@ -1157,16 +1247,25 @@
containing_folder.folders.append( failed_tests_folder )
failed_test_id = 0
for failed_tests_dict in failed_tests_dicts:
- # TODO: Remove this when invalid test data is eliminated.
- if isinstance( failed_tests_dict, list ):
- failed_tests_dict = failed_tests_dict[ 0 ]
failed_test_id += 1
+ try:
+ stderr = failed_tests_dict.get( 'stderr', '' )
+ test_id = failed_tests_dict.get( 'test_id', '' )
+ tool_id = failed_tests_dict.get( 'tool_id', '' )
+ tool_version = failed_tests_dict.get( 'tool_version', '' )
+ traceback = failed_tests_dict.get( 'traceback', '' )
+ except Exception, e:
+ stderr = 'unknown'
+ test_id = 'unknown'
+ tool_id = 'unknown'
+ tool_version = 'unknown'
+ traceback = str( e )
failed_test = FailedTest( id=failed_test_id,
- stderr=failed_tests_dict.get( 'stderr', '' ),
- test_id=failed_tests_dict.get( 'test_id', '' ),
- tool_id=failed_tests_dict.get( 'tool_id', '' ),
- tool_version=failed_tests_dict.get( 'tool_version', '' ),
- traceback=failed_tests_dict.get( 'traceback', '' ) )
+ stderr=stderr,
+ test_id=test_id,
+ tool_id=tool_id,
+ tool_version=tool_version,
+ traceback=traceback )
failed_tests_folder.failed_tests.append( failed_test )
missing_test_components_dicts = tool_test_results_dict.get( 'missing_test_components', [] )
if len( missing_test_components_dicts ) > 0:
@@ -1179,11 +1278,21 @@
missing_test_component_id = 0
for missing_test_components_dict in missing_test_components_dicts:
missing_test_component_id += 1
+ try:
+ missing_components = missing_test_components_dict.get( 'missing_components', '' )
+ tool_guid = missing_test_components_dict.get( 'tool_guid', '' )
+ tool_id = missing_test_components_dict.get( 'tool_id', '' )
+ tool_version = missing_test_components_dict.get( 'tool_version', '' )
+ except Exception, e:
+ missing_components = str( e )
+ tool_guid = 'unknown'
+ tool_id = 'unknown'
+ tool_version = 'unknown'
missing_test_component = MissingTestComponent( id=missing_test_component_id,
- missing_components=missing_test_components_dict.get( 'missing_components', '' ),
- tool_guid=missing_test_components_dict.get( 'tool_guid', '' ),
- tool_id=missing_test_components_dict.get( 'tool_id', '' ),
- tool_version=missing_test_components_dict.get( 'tool_version', '' ) )
+ missing_components=missing_components,
+ tool_guid=tool_guid,
+ tool_id=tool_id,
+ tool_version=tool_version )
missing_test_components_folder.missing_test_components.append( missing_test_component )
installation_error_dict = tool_test_results_dict.get( 'installation_errors', {} )
if len( installation_error_dict ) > 0:
@@ -1215,13 +1324,24 @@
installation_error_base_folder.folders.append( current_repository_folder )
for current_repository_error_dict in current_repository_installation_error_dicts:
repository_installation_error_id += 1
- repository_installation_error = \
- RepositoryInstallationError( id=repository_installation_error_id,
- tool_shed=str( current_repository_error_dict.get( 'tool_shed', '' ) ),
- name=str( current_repository_error_dict.get( 'name', '' ) ),
- owner=str( current_repository_error_dict.get( 'owner', '' ) ),
- changeset_revision=str( current_repository_error_dict.get( 'changeset_revision', '' ) ),
- error_message=current_repository_error_dict.get( 'error_message', '' ) )
+ try:
+ r_tool_shed = str( current_repository_error_dict.get( 'tool_shed', '' ) )
+ r_name = str( current_repository_error_dict.get( 'name', '' ) )
+ r_owner = str( current_repository_error_dict.get( 'owner', '' ) )
+ r_changeset_revision = str( current_repository_error_dict.get( 'changeset_revision', '' ) )
+ r_error_message = current_repository_error_dict.get( 'error_message', '' )
+ except Exception, e:
+ r_tool_shed = 'unknown'
+ r_name = 'unknown'
+ r_owner = 'unknown'
+ r_changeset_revision = 'unknown'
+ r_error_message = str( e )
+ repository_installation_error = RepositoryInstallationError( id=repository_installation_error_id,
+ tool_shed=r_tool_shed,
+ name=r_name,
+ owner=r_owner,
+ changeset_revision=r_changeset_revision,
+ error_message=r_error_message )
current_repository_folder.current_repository_installation_errors.append( repository_installation_error )
if len( repository_dependency_installation_error_dicts ) > 0:
folder_id += 1
@@ -1232,13 +1352,24 @@
installation_error_base_folder.folders.append( repository_dependencies_folder )
for repository_dependency_error_dict in repository_dependency_installation_error_dicts:
repository_installation_error_id += 1
- repository_installation_error = \
- RepositoryInstallationError( id=repository_installation_error_id,
- tool_shed=str( repository_dependency_error_dict.get( 'tool_shed', '' ) ),
- name=str( repository_dependency_error_dict.get( 'name', '' ) ),
- owner=str( repository_dependency_error_dict.get( 'owner', '' ) ),
- changeset_revision=str( repository_dependency_error_dict.get( 'changeset_revision', '' ) ),
- error_message=repository_dependency_error_dict.get( 'error_message', '' ) )
+ try:
+ rd_tool_shed = str( repository_dependency_error_dict.get( 'tool_shed', '' ) )
+ rd_name = str( repository_dependency_error_dict.get( 'name', '' ) )
+ rd_owner = str( repository_dependency_error_dict.get( 'owner', '' ) )
+ rd_changeset_revision = str( repository_dependency_error_dict.get( 'changeset_revision', '' ) )
+ rd_error_message = repository_dependency_error_dict.get( 'error_message', '' )
+ except Exception, e:
+ rd_tool_shed = 'unknown'
+ rd_name = 'unknown'
+ rd_owner = 'unknown'
+ rd_changeset_revision = 'unknown'
+ rd_error_message = str( e )
+ repository_installation_error = RepositoryInstallationError( id=repository_installation_error_id,
+ tool_shed=rd_tool_shed,
+ name=rd_name,
+ owner=rd_owner,
+ changeset_revision=rd_changeset_revision,
+ error_message=rd_error_message )
repository_dependencies_folder.repository_installation_errors.append( repository_installation_error )
if len( tool_dependency_installation_error_dicts ) > 0:
# [{'error_message': 'some traceback string' 'type': 'package', 'name': 'MIRA', 'version': '4.0'}]
@@ -1251,12 +1382,21 @@
tool_dependency_error_id = 0
for tool_dependency_error_dict in tool_dependency_installation_error_dicts:
tool_dependency_error_id += 1
- tool_dependency_installation_error = \
- ToolDependencyInstallationError( id=tool_dependency_error_id,
- type=str( tool_dependency_error_dict.get( 'type', '' ) ),
- name=str( tool_dependency_error_dict.get( 'name', '' ) ),
- version=str( tool_dependency_error_dict.get( 'version', '' ) ),
- error_message=tool_dependency_error_dict.get( 'error_message', '' ) )
+ try:
+ td_type = str( tool_dependency_error_dict.get( 'type', '' ) )
+ td_name = str( tool_dependency_error_dict.get( 'name', '' ) )
+ td_version = str( tool_dependency_error_dict.get( 'version', '' ) )
+ td_error_message = tool_dependency_error_dict.get( 'error_message', '' )
+ except Exception, e:
+ td_type = 'unknown'
+ td_name = 'unknown'
+ td_version = 'unknown'
+ td_error_message = str( e )
+ tool_dependency_installation_error = ToolDependencyInstallationError( id=tool_dependency_error_id,
+ type=td_type,
+ name=td_name,
+ version=td_version,
+ error_message=td_error_message )
tool_dependencies_folder.tool_dependency_installation_errors.append( tool_dependency_installation_error )
else:
tool_test_results_root_folder = None
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
15 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/03a70b0e0b1e/
Changeset: 03a70b0e0b1e
User: jmchilton
Date: 2013-12-05 07:20:54
Summary: Modles: Clean up stand-alone database scripts.
Namely create_db.py,db_shell.py, and manage_db.py.
Move duplicated code into lib/galaxy/model/orm/scripts.py - add unit tests. PEP-8 clean up of all 4 files. Add incoherent incoherent comment at top of manage_db.py.
As a result of the code de-duplication create_db.py should be usable with the tool shed now.
Will be refactoring lib/galaxy/model/orm/scripts.py to work with new tool shed install database - it will be good to have a place to test these and allow manage_db.py and clean_db.py to work immediately.
Affected #: 4 files
diff -r bbdb9255aa9a1b4d30e08e8da0276d28422c5805 -r 03a70b0e0b1ec1a6399b8d7ef115967d3829cffd lib/galaxy/model/orm/scripts.py
--- /dev/null
+++ b/lib/galaxy/model/orm/scripts.py
@@ -0,0 +1,104 @@
+"""
+Code to support database helper scripts (create_db.py, manage_db.py, etc...).
+"""
+import logging
+import os.path
+from ConfigParser import SafeConfigParser
+
+from galaxy import eggs
+
+eggs.require( "decorator" )
+eggs.require( "Tempita" )
+eggs.require( "SQLAlchemy" )
+eggs.require( "sqlalchemy_migrate" )
+
+from galaxy.model.orm import dialect_to_egg
+
+import pkg_resources
+
+log = logging.getLogger( __name__ )
+
+DEFAULT_CONFIG_FILE = 'universe_wsgi.ini'
+DEFAULT_CONFIG_PREFIX = ''
+DEFAULT_DATABASE = 'galaxy'
+
+DATABASE = {
+ "galaxy":
+ {
+ 'repo': 'lib/galaxy/model/migrate',
+ 'default_sqlite_file': './database/universe.sqlite',
+ },
+ "tool_shed":
+ {
+ 'repo': 'lib/galaxy/webapps/tool_shed/model/migrate',
+ 'config_file': 'tool_shed_wsgi.ini',
+ 'default_sqlite_file': './database/community.sqlite',
+ },
+}
+
+
+def get_config( argv, cwd=None ):
+ """
+ Read sys.argv and parse out repository of migrations and database url.
+
+ >>> from tempfile import mkdtemp
+ >>> config_dir = mkdtemp()
+ >>> def write_ini(path, property, value):
+ ... p = SafeConfigParser()
+ ... p.add_section('app:main')
+ ... p.set('app:main', property, value)
+ ... with open(os.path.join(config_dir, path), 'w') as f: p.write(f)
+ >>> write_ini('tool_shed_wsgi.ini', 'database_connection', 'sqlite:///pg/testdb1')
+ >>> config = get_config(['manage_db.py', 'tool_shed'], cwd=config_dir)
+ >>> config['repo']
+ 'lib/galaxy/webapps/tool_shed/model/migrate'
+ >>> config['db_url']
+ 'sqlite:///pg/testdb1'
+ >>> write_ini('universe_wsgi.ini', 'database_file', 'moo.sqlite')
+ >>> config = get_config(['manage_db.py'], cwd=config_dir)
+ >>> config['db_url']
+ 'sqlite:///moo.sqlite?isolation_level=IMMEDIATE'
+ >>> config['repo']
+ 'lib/galaxy/model/migrate'
+ """
+ if argv and (argv[-1] in DATABASE):
+ database = argv.pop() # database name tool_shed, galaxy, or install.
+ else:
+ database = 'galaxy'
+ database_defaults = DATABASE[ database ]
+
+ if '-c' in argv:
+ pos = argv.index( '-c' )
+ argv.pop(pos)
+ config_file = argv.pop( pos )
+ else:
+ config_file = database_defaults.get( 'config_file', DEFAULT_CONFIG_FILE )
+ repo = database_defaults[ 'repo' ]
+ config_prefix = database_defaults.get( 'config_prefix', DEFAULT_CONFIG_PREFIX )
+ default_sqlite_file = database_defaults[ 'default_sqlite_file' ]
+ if cwd:
+ config_file = os.path.join( cwd, config_file )
+
+ cp = SafeConfigParser()
+ cp.read( config_file )
+
+ if cp.has_option( "app:main", "%sdatabase_connection" % config_prefix):
+ db_url = cp.get( "app:main", "%sdatabase_connection" % config_prefix )
+ elif cp.has_option( "app:main", "%sdatabase_file" % config_prefix ):
+ db_url = "sqlite:///%s?isolation_level=IMMEDIATE" % cp.get( "app:main", "database_file" )
+ else:
+ db_url = "sqlite:///%s?isolation_level=IMMEDIATE" % default_sqlite_file
+
+ dialect = ( db_url.split( ':', 1 ) )[0]
+ try:
+ egg = dialect_to_egg[dialect]
+ try:
+ pkg_resources.require( egg )
+ log.debug( "%s egg successfully loaded for %s dialect" % ( egg, dialect ) )
+ except:
+ # If the module is in the path elsewhere (i.e. non-egg), it'll still load.
+ log.warning( "%s egg not found, but an attempt will be made to use %s anyway" % ( egg, dialect ) )
+ except KeyError:
+ # Let this go, it could possibly work with db's we don't support
+ log.error( "database_connection contains an unknown SQLAlchemy database dialect: %s" % dialect )
+ return dict(db_url=db_url, repo=repo, config_file=config_file, database=database)
diff -r bbdb9255aa9a1b4d30e08e8da0276d28422c5805 -r 03a70b0e0b1ec1a6399b8d7ef115967d3829cffd scripts/create_db.py
--- a/scripts/create_db.py
+++ b/scripts/create_db.py
@@ -16,39 +16,24 @@
and database file
"""
-import sys, os.path, logging
+import sys
+import os.path
new_path = [ os.path.join( os.getcwd(), "lib" ) ]
-new_path.extend( sys.path[1:] ) # remove scripts/ from the path
+new_path.extend( sys.path[1:] ) # remove scripts/ from the path
sys.path = new_path
-from galaxy import eggs
+from galaxy.model.orm.scripts import get_config
from galaxy.model.migrate.check import create_or_verify_database as create_db
+from galaxy.webapps.tool_shed.model.migrate.check import create_or_verify_database as create_tool_shed_db
-import pkg_resources
-from ConfigParser import SafeConfigParser
+def invoke_create():
+ config = get_config(sys.argv)
+ if config['database'] == 'galaxy':
+ create_db(config['db_url'], config['config_file'])
+ elif config['database'] == 'tool_shed':
+ create_tool_shed_db(config['db_url'])
-log = logging.getLogger( __name__ )
-
-# Poor man's optparse
-config_file = 'universe_wsgi.ini'
-if '-c' in sys.argv:
- pos = sys.argv.index( '-c' )
- sys.argv.pop(pos)
- config_file = sys.argv.pop( pos )
-if not os.path.exists( config_file ):
- print "Galaxy config file does not exist (hint: use '-c config.ini' for non-standard locations): %s" % config_file
- sys.exit( 1 )
-
-cp = SafeConfigParser()
-cp.read( config_file )
-
-if cp.has_option( "app:main", "database_connection" ):
- db_url = cp.get( "app:main", "database_connection" )
-elif cp.has_option( "app:main", "database_file" ):
- db_url = "sqlite:///%s?isolation_level=IMMEDIATE" % cp.get( "app:main", "database_file" )
-else:
- db_url = "sqlite:///./database/universe.sqlite?isolation_level=IMMEDIATE"
-
-create_db(db_url, config_file)
+if __name__ == "__main__":
+ invoke_create()
diff -r bbdb9255aa9a1b4d30e08e8da0276d28422c5805 -r 03a70b0e0b1ec1a6399b8d7ef115967d3829cffd scripts/db_shell.py
--- a/scripts/db_shell.py
+++ b/scripts/db_shell.py
@@ -6,79 +6,35 @@
# >>> sa_session.add(new_user)
# >>> sa_session.commit()
# >>> sa_session.query(User).all()
-#
+#
# You can also use this script as a library, for instance see https://gist.github.com/1979583
# TODO: This script overlaps a lot with manage_db.py and create_db.py,
# these should maybe be refactored to remove duplication.
-import sys, os.path, logging
+import sys
+import os.path
-new_path = [ os.path.join( os.getcwd(), "lib" ) ]
-new_path.extend( sys.path[1:] ) # remove scripts/ from the path
+db_shell_path = __file__
+new_path = [ os.path.join( os.path.dirname( db_shell_path ), os.path.pardir, "lib" ) ]
+new_path.extend( sys.path[1:] ) # remove scripts/ from the path
sys.path = new_path
+from galaxy.model.orm.scripts import get_config
+
from galaxy import eggs
+eggs.require( "decorator" )
+eggs.require( "Tempita" )
+eggs.require( "SQLAlchemy" )
-import pkg_resources
-pkg_resources.require( "decorator" )
-pkg_resources.require( "Tempita" )
-pkg_resources.require( "sqlalchemy-migrate" )
-pkg_resources.require( "SQLAlchemy" )
+db_url = get_config( sys.argv )['db_url']
-from ConfigParser import SafeConfigParser
-from galaxy.model.orm import dialect_to_egg
-
-log = logging.getLogger( __name__ )
-
-if sys.argv[-1] in [ 'tool_shed' ]:
- # Need to pop the last arg so the command line args will be correct
- # for sqlalchemy-migrate
- webapp = sys.argv.pop()
- config_file = 'tool_shed_wsgi.ini'
- repo = 'lib/galaxy/webapps/tool_shed/model/migrate'
-else:
- # Poor man's optparse
- config_file = 'universe_wsgi.ini'
- if '-c' in sys.argv:
- pos = sys.argv.index( '-c' )
- sys.argv.pop(pos)
- config_file = sys.argv.pop( pos )
- if not os.path.exists( config_file ):
- print "Galaxy config file does not exist (hint: use '-c config.ini' for non-standard locations): %s" % config_file
- sys.exit( 1 )
- repo = 'lib/galaxy/model/migrate'
-
-cp = SafeConfigParser()
-cp.read( config_file )
-
-if cp.has_option( "app:main", "database_connection" ):
- db_url = cp.get( "app:main", "database_connection" )
-elif cp.has_option( "app:main", "database_file" ):
- db_url = "sqlite:///%s?isolation_level=IMMEDIATE" % cp.get( "app:main", "database_file" )
-else:
- db_url = "sqlite:///./database/universe.sqlite?isolation_level=IMMEDIATE"
-
-dialect = ( db_url.split( ':', 1 ) )[0]
-try:
- egg = dialect_to_egg[dialect]
- try:
- pkg_resources.require( egg )
- log.debug( "%s egg successfully loaded for %s dialect" % ( egg, dialect ) )
- except:
- # If the module is in the path elsewhere (i.e. non-egg), it'll still load.
- log.warning( "%s egg not found, but an attempt will be made to use %s anyway" % ( egg, dialect ) )
-except KeyError:
- # Let this go, it could possibly work with db's we don't support
- log.error( "database_connection contains an unknown SQLAlchemy database dialect: %s" % dialect )
-
-# Setup DB scripting environment
+# Setup DB scripting environment
from sqlalchemy import *
from sqlalchemy.orm import *
from sqlalchemy.exc import *
engine = create_engine(db_url, echo=True)
-db_session = scoped_session( sessionmaker( bind = engine ) )
+db_session = scoped_session( sessionmaker( bind=engine ) )
from galaxy.model.mapping import context as sa_session
sa_session.bind = engine
from galaxy.model import *
-
diff -r bbdb9255aa9a1b4d30e08e8da0276d28422c5805 -r 03a70b0e0b1ec1a6399b8d7ef115967d3829cffd scripts/manage_db.py
--- a/scripts/manage_db.py
+++ b/scripts/manage_db.py
@@ -1,7 +1,11 @@
-import sys, os.path, logging
+""" This script parses Galaxy or Tool Shed config file for database connection
+and then delegates to sqlalchemy_migrate shell main function in
+migrate.versioning.shell. """
+import sys
+import os.path
new_path = [ os.path.join( os.getcwd(), "lib" ) ]
-new_path.extend( sys.path[1:] ) # remove scripts/ from the path
+new_path.extend( sys.path[1:] ) # remove scripts/ from the path
sys.path = new_path
from galaxy import eggs
@@ -12,51 +16,16 @@
eggs.require( "sqlalchemy_migrate" )
from migrate.versioning.shell import main
-from ConfigParser import SafeConfigParser
-from galaxy.model.orm import dialect_to_egg
+from galaxy.model.orm.scripts import get_config
-log = logging.getLogger( __name__ )
-if sys.argv[-1] in [ 'tool_shed' ]:
- # Need to pop the last arg so the command line args will be correct
- # for sqlalchemy-migrate
- webapp = sys.argv.pop()
- config_file = 'tool_shed_wsgi.ini'
- repo = 'lib/galaxy/webapps/tool_shed/model/migrate'
-else:
- # Poor man's optparse
- config_file = 'universe_wsgi.ini'
- if '-c' in sys.argv:
- pos = sys.argv.index( '-c' )
- sys.argv.pop(pos)
- config_file = sys.argv.pop( pos )
- if not os.path.exists( config_file ):
- print "Galaxy config file does not exist (hint: use '-c config.ini' for non-standard locations): %s" % config_file
- sys.exit( 1 )
- repo = 'lib/galaxy/model/migrate'
+def invoke_migrate_main():
+ config = get_config( sys.argv )
+ db_url = config['db_url']
+ repo = config['repo']
-cp = SafeConfigParser()
-cp.read( config_file )
+ main( repository=repo, url=db_url )
-if cp.has_option( "app:main", "database_connection" ):
- db_url = cp.get( "app:main", "database_connection" )
-elif cp.has_option( "app:main", "database_file" ):
- db_url = "sqlite:///%s?isolation_level=IMMEDIATE" % cp.get( "app:main", "database_file" )
-else:
- db_url = "sqlite:///./database/universe.sqlite?isolation_level=IMMEDIATE"
-
-dialect = ( db_url.split( ':', 1 ) )[0]
-try:
- egg = dialect_to_egg[dialect]
- try:
- eggs.require( egg )
- log.debug( "%s egg successfully loaded for %s dialect" % ( egg, dialect ) )
- except:
- # If the module is in the path elsewhere (i.e. non-egg), it'll still load.
- log.warning( "%s egg not found, but an attempt will be made to use %s anyway" % ( egg, dialect ) )
-except KeyError:
- # Let this go, it could possibly work with db's we don't support
- log.error( "database_connection contains an unknown SQLAlchemy database dialect: %s" % dialect )
-
-main( repository=repo, url=db_url )
+if __name__ == "__main__":
+ invoke_migrate_main()
https://bitbucket.org/galaxy/galaxy-central/commits/317df34ad00c/
Changeset: 317df34ad00c
User: jmchilton
Date: 2013-12-05 07:20:54
Summary: Models: Refactor some common code between tool shed and Galaxy into galaxy.model.orm.
Affected #: 3 files
diff -r 03a70b0e0b1ec1a6399b8d7ef115967d3829cffd -r 317df34ad00c8087a8116a5ccb4679788ce3821c lib/galaxy/model/mapping.py
--- a/lib/galaxy/model/mapping.py
+++ b/lib/galaxy/model/mapping.py
@@ -16,7 +16,7 @@
from sqlalchemy.orm.collections import attribute_mapped_collection
from galaxy import model
-from galaxy.model.orm import dialect_to_egg
+from galaxy.model.orm import load_egg_for_url
from galaxy.model.custom_types import JSONType, MetadataType, TrimmedString, UUIDType
from galaxy.security import GalaxyRBACAgent
from galaxy.util.bunch import Bunch
@@ -1998,23 +1998,6 @@
model.History._next_hid = db_next_hid
-def guess_dialect_for_url( url ):
- return (url.split(':', 1))[0]
-
-def load_egg_for_url( url ):
- # Load the appropriate db module
- dialect = guess_dialect_for_url( url )
- try:
- egg = dialect_to_egg[dialect]
- try:
- pkg_resources.require( egg )
- log.debug( "%s egg successfully loaded for %s dialect" % ( egg, dialect ) )
- except:
- # If the module's in the path elsewhere (i.e. non-egg), it'll still load.
- log.warning( "%s egg not found, but an attempt will be made to use %s anyway" % ( egg, dialect ) )
- except KeyError:
- # Let this go, it could possibly work with db's we don't support
- log.error( "database_connection contains an unknown SQLAlchemy database dialect: %s" % dialect )
def init( file_path, url, engine_options={}, create_tables=False, database_query_profiling_proxy=False, object_store=None, trace_logger=None, use_pbkdf2=True ):
"""Connect mappings to the database"""
diff -r 03a70b0e0b1ec1a6399b8d7ef115967d3829cffd -r 317df34ad00c8087a8116a5ccb4679788ce3821c lib/galaxy/model/orm/__init__.py
--- a/lib/galaxy/model/orm/__init__.py
+++ b/lib/galaxy/model/orm/__init__.py
@@ -4,13 +4,34 @@
from sqlalchemy import *
from sqlalchemy.orm import *
from sqlalchemy.interfaces import *
-import sqlalchemy.exc
-from sqlalchemy.ext.orderinglist import ordering_list
+import logging
+log = logging.getLogger( __name__ )
+
dialect_to_egg = {
- "sqlite" : "pysqlite>=2",
- "postgres" : "psycopg2",
- "postgresql" : "psycopg2",
- "mysql" : "MySQL_python"
+ "sqlite": "pysqlite>=2",
+ "postgres": "psycopg2",
+ "postgresql": "psycopg2",
+ "mysql": "MySQL_python"
}
+
+
+def load_egg_for_url( url ):
+ # Load the appropriate db module
+ dialect = __guess_dialect_for_url( url )
+ try:
+ egg = dialect_to_egg[dialect]
+ try:
+ pkg_resources.require( egg )
+ log.debug( "%s egg successfully loaded for %s dialect" % ( egg, dialect ) )
+ except:
+ # If the module's in the path elsewhere (i.e. non-egg), it'll still load.
+ log.warning( "%s egg not found, but an attempt will be made to use %s anyway" % ( egg, dialect ) )
+ except KeyError:
+ # Let this go, it could possibly work with db's we don't support
+ log.error( "database_connection contains an unknown SQLAlchemy database dialect: %s" % dialect )
+
+
+def __guess_dialect_for_url( url ):
+ return (url.split(':', 1))[0]
diff -r 03a70b0e0b1ec1a6399b8d7ef115967d3829cffd -r 317df34ad00c8087a8116a5ccb4679788ce3821c lib/galaxy/webapps/tool_shed/model/mapping.py
--- a/lib/galaxy/webapps/tool_shed/model/mapping.py
+++ b/lib/galaxy/webapps/tool_shed/model/mapping.py
@@ -5,14 +5,13 @@
import logging
log = logging.getLogger( __name__ )
-import sys
import datetime
from galaxy.webapps.tool_shed.model import *
from galaxy.model.orm import *
from galaxy.model.custom_types import *
from galaxy.util.bunch import Bunch
-from galaxy.model.orm import dialect_to_egg
+from galaxy.model.orm import load_egg_for_url
import galaxy.webapps.tool_shed.util.shed_statistics as shed_statistics
import galaxy.webapps.tool_shed.util.hgweb_config
from galaxy.webapps.tool_shed.security import CommunityRBACAgent
@@ -307,23 +306,6 @@
category=relation( Category ),
repository=relation( Repository ) ) )
-def guess_dialect_for_url( url ):
- return (url.split(':', 1))[0]
-
-def load_egg_for_url( url ):
- # Load the appropriate db module
- dialect = guess_dialect_for_url( url )
- try:
- egg = dialect_to_egg[dialect]
- try:
- pkg_resources.require( egg )
- log.debug( "%s egg successfully loaded for %s dialect" % ( egg, dialect ) )
- except:
- # If the module's in the path elsewhere (i.e. non-egg), it'll still load.
- log.warning( "%s egg not found, but an attempt will be made to use %s anyway" % ( egg, dialect ) )
- except KeyError:
- # Let this go, it could possibly work with db's we don't support
- log.error( "database_connection contains an unknown SQLAlchemy database dialect: %s" % dialect )
def init( file_path, url, engine_options={}, create_tables=False ):
"""Connect mappings to the database"""
https://bitbucket.org/galaxy/galaxy-central/commits/be3b177c9bf2/
Changeset: be3b177c9bf2
User: jmchilton
Date: 2013-12-05 07:20:54
Summary: Models: Refactor common code out between Galaxy and Tool Shed related to create_engine.
Affected #: 3 files
diff -r 317df34ad00c8087a8116a5ccb4679788ce3821c -r be3b177c9bf23abef7f64c9ee64f39ff23524764 lib/galaxy/model/mapping.py
--- a/lib/galaxy/model/mapping.py
+++ b/lib/galaxy/model/mapping.py
@@ -8,7 +8,7 @@
import pkg_resources
import inspect
-from sqlalchemy import and_, asc, Boolean, Column, create_engine, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint
+from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint
from sqlalchemy.ext.associationproxy import association_proxy
from sqlalchemy.ext.orderinglist import ordering_list
from sqlalchemy.types import BigInteger
@@ -16,12 +16,11 @@
from sqlalchemy.orm.collections import attribute_mapped_collection
from galaxy import model
-from galaxy.model.orm import load_egg_for_url
+from galaxy.model.orm.engine_factory import build_engine
from galaxy.model.custom_types import JSONType, MetadataType, TrimmedString, UUIDType
from galaxy.security import GalaxyRBACAgent
from galaxy.util.bunch import Bunch
-
log = logging.getLogger( __name__ )
metadata = MetaData()
@@ -2008,19 +2007,8 @@
# Use PBKDF2 password hashing?
model.User.use_pbkdf2 = use_pbkdf2
# Load the appropriate db module
- load_egg_for_url( url )
- # Should we use the logging proxy?
- if database_query_profiling_proxy:
- import galaxy.model.orm.logging_connection_proxy as logging_connection_proxy
- proxy = logging_connection_proxy.LoggingProxy()
- # If metlog is enabled, do micrologging
- elif trace_logger:
- import galaxy.model.orm.logging_connection_proxy as logging_connection_proxy
- proxy = logging_connection_proxy.TraceLoggerProxy( trace_logger )
- else:
- proxy = None
- # Create the database engine
- engine = create_engine( url, proxy=proxy, **engine_options )
+ engine = build_engine( url, engine_options, database_query_profiling_proxy, trace_logger )
+
# Connect the metadata to the database.
metadata.bind = engine
# Clear any existing contextual sessions and reconfigure
diff -r 317df34ad00c8087a8116a5ccb4679788ce3821c -r be3b177c9bf23abef7f64c9ee64f39ff23524764 lib/galaxy/model/orm/engine_factory.py
--- /dev/null
+++ b/lib/galaxy/model/orm/engine_factory.py
@@ -0,0 +1,24 @@
+import logging
+log = logging.getLogger( __name__ )
+
+from sqlalchemy import create_engine
+from galaxy.model.orm import load_egg_for_url
+
+
+def build_engine(url, engine_options, database_query_profiling_proxy=False, trace_logger=None):
+ load_egg_for_url( url )
+
+ # Should we use the logging proxy?
+ if database_query_profiling_proxy:
+ import galaxy.model.orm.logging_connection_proxy as logging_connection_proxy
+ proxy = logging_connection_proxy.LoggingProxy()
+ # If metlog is enabled, do micrologging
+ elif trace_logger:
+ import galaxy.model.orm.logging_connection_proxy as logging_connection_proxy
+ proxy = logging_connection_proxy.TraceLoggerProxy( trace_logger )
+ else:
+ proxy = None
+
+ # Create the database engine
+ engine = create_engine( url, proxy=proxy, **engine_options )
+ return engine
diff -r 317df34ad00c8087a8116a5ccb4679788ce3821c -r be3b177c9bf23abef7f64c9ee64f39ff23524764 lib/galaxy/webapps/tool_shed/model/mapping.py
--- a/lib/galaxy/webapps/tool_shed/model/mapping.py
+++ b/lib/galaxy/webapps/tool_shed/model/mapping.py
@@ -11,7 +11,7 @@
from galaxy.model.orm import *
from galaxy.model.custom_types import *
from galaxy.util.bunch import Bunch
-from galaxy.model.orm import load_egg_for_url
+from galaxy.model.orm.engine_factory import build_engine
import galaxy.webapps.tool_shed.util.shed_statistics as shed_statistics
import galaxy.webapps.tool_shed.util.hgweb_config
from galaxy.webapps.tool_shed.security import CommunityRBACAgent
@@ -309,10 +309,8 @@
def init( file_path, url, engine_options={}, create_tables=False ):
"""Connect mappings to the database"""
- # Load the appropriate db module
- load_egg_for_url( url )
# Create the database engine
- engine = create_engine( url, **engine_options )
+ engine = build_engine( url, engine_options )
# Connect the metadata to the database.
metadata.bind = engine
# Clear any existing contextual sessions and reconfigure
https://bitbucket.org/galaxy/galaxy-central/commits/c0b3c4a3b438/
Changeset: c0b3c4a3b438
User: jmchilton
Date: 2013-12-05 07:20:54
Summary: Models: Introduce real class for app.model (ModelMapping).
Shared across Tool Shed and Galaxy to reduce code duplication. Previously this was just a bunch, making it a real class allows placing some shared logic in there.
This refactoring also eliminates some global variables galaxy.model.mapping.Session, galaxy.model.mapping.context, and same for tool shed. This may break things, but these are things that should probably be fixed anyway.
In particular small changes to db_shell.py and a unit test that depended on these global variables have been updated. The functional test frameworks likewise needed to be updated to not depend on these - these changes were more substantial.
To fix these functional tests, I essentially replace old references to global variables in Galaxy with references to global variables just defined in the test framework in test/functional/database_contexts.py (a slight improvement).
Affected #: 19 files
diff -r be3b177c9bf23abef7f64c9ee64f39ff23524764 -r c0b3c4a3b43881ababfa066672aa1f1734c44484 lib/galaxy/model/base.py
--- /dev/null
+++ b/lib/galaxy/model/base.py
@@ -0,0 +1,39 @@
+"""
+Shared model and mapping code between Galaxy and Tool Shed, trying to
+generalize to generic database connections.
+"""
+
+from sqlalchemy.orm import scoped_session, sessionmaker
+from galaxy.util.bunch import Bunch
+from inspect import getmembers, isclass
+
+
+# TODO: Refactor this to be a proper class, not a bunch.
+class ModelMapping(Bunch):
+
+ def __init__(self, model_modules, engine):
+ self.engine = engine
+ context = scoped_session( sessionmaker( autoflush=False, autocommit=True ) )
+ # For backward compatibility with "context.current"
+ # deprecated?
+ context.current = context
+ self.context = context
+ self.session = context
+
+ model_classes = {}
+ for module in model_modules:
+ m_obs = getmembers(module, isclass)
+ m_obs = dict([m for m in m_obs if m[1].__module__ == module.__name__])
+ model_classes.update(m_obs)
+
+ super(ModelMapping, self).__init__(**model_classes)
+
+ context.remove()
+ context.configure( bind=engine )
+
+ @property
+ def Session(self):
+ """
+ For backward compat., deprecated.
+ """
+ return self.context
diff -r be3b177c9bf23abef7f64c9ee64f39ff23524764 -r c0b3c4a3b43881ababfa066672aa1f1734c44484 lib/galaxy/model/mapping.py
--- a/lib/galaxy/model/mapping.py
+++ b/lib/galaxy/model/mapping.py
@@ -6,29 +6,23 @@
import datetime
import logging
import pkg_resources
-import inspect
from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint
from sqlalchemy.ext.associationproxy import association_proxy
from sqlalchemy.ext.orderinglist import ordering_list
from sqlalchemy.types import BigInteger
-from sqlalchemy.orm import backref, object_session, relation, scoped_session, sessionmaker, mapper, class_mapper
+from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper
from sqlalchemy.orm.collections import attribute_mapped_collection
from galaxy import model
from galaxy.model.orm.engine_factory import build_engine
from galaxy.model.custom_types import JSONType, MetadataType, TrimmedString, UUIDType
+from galaxy.model.base import ModelMapping
from galaxy.security import GalaxyRBACAgent
-from galaxy.util.bunch import Bunch
log = logging.getLogger( __name__ )
metadata = MetaData()
-context = Session = scoped_session( sessionmaker( autoflush=False, autocommit=True ) )
-
-# For backward compatibility with "context.current"
-context.current = Session
-
# NOTE REGARDING TIMESTAMPS:
# It is currently difficult to have the timestamps calculated by the
@@ -2011,23 +2005,14 @@
# Connect the metadata to the database.
metadata.bind = engine
- # Clear any existing contextual sessions and reconfigure
- Session.remove()
- Session.configure( bind=engine )
+
+ result = ModelMapping([model], engine=engine)
+
# Create tables if needed
if create_tables:
metadata.create_all()
# metadata.engine.commit()
- # Pack everything into a bunch -- inspecting should work better than
- # grabbing everything in globals(), but it's possible that there's code
- # working somewhere that expects random stuff we'd incidentally included.
- m_obs = inspect.getmembers(model, inspect.isclass)
- m_obs = dict([m for m in m_obs if m[1].__module__ == 'galaxy.model'])
- result = Bunch( **m_obs )
- result.engine = engine
- result.session = Session
- # For backward compatibility with "model.context.current"
- result.context = Session
+
result.create_tables = create_tables
#load local galaxy security policy
result.security_agent = GalaxyRBACAgent( result )
@@ -2037,4 +2022,3 @@
"""Get unittest suite for this module"""
import unittest, mapping_tests
return unittest.makeSuite( mapping_tests.MappingTests )
-
diff -r be3b177c9bf23abef7f64c9ee64f39ff23524764 -r c0b3c4a3b43881ababfa066672aa1f1734c44484 lib/galaxy/model/mapping_tests.py
--- a/lib/galaxy/model/mapping_tests.py
+++ /dev/null
@@ -1,54 +0,0 @@
-import unittest
-import galaxy.model.mapping as mapping
-from galaxy.model import directory_hash_id
-import os.path
-
-class MappingTests( unittest.TestCase ):
- def test_basic( self ):
- # Start the database and connect the mapping
- model = mapping.init( "/tmp", "sqlite:///:memory:", create_tables=True )
- assert model.engine is not None
- # Make some changes and commit them
- u = model.User( email="james(a)foo.bar.baz", password="password" )
- # gs = model.GalaxySession()
- h1 = model.History( name="History 1", user=u)
- #h1.queries.append( model.Query( "h1->q1" ) )
- #h1.queries.append( model.Query( "h1->q2" ) )
- h2 = model.History( name=( "H" * 1024 ) )
- model.session.add_all( ( u, h1, h2 ) )
- #q1 = model.Query( "h2->q1" )
- d1 = model.HistoryDatasetAssociation( extension="interval", metadata=dict(chromCol=1,startCol=2,endCol=3 ), history=h2, create_dataset=True, sa_session=model.session )
- #h2.queries.append( q1 )
- #h2.queries.append( model.Query( "h2->q2" ) )
- model.session.add( ( d1 ) )
- model.session.flush()
- model.session.expunge_all()
- # Check
- users = model.session.query( model.User ).all()
- assert len( users ) == 1
- assert users[0].email == "james(a)foo.bar.baz"
- assert users[0].password == "password"
- assert len( users[0].histories ) == 1
- assert users[0].histories[0].name == "History 1"
- hists = model.session.query( model.History ).all()
- assert hists[0].name == "History 1"
- assert hists[1].name == ( "H" * 255 )
- assert hists[0].user == users[0]
- assert hists[1].user is None
- assert hists[1].datasets[0].metadata.chromCol == 1
- # The filename test has moved to objecstore
- #id = hists[1].datasets[0].id
- #assert hists[1].datasets[0].file_name == os.path.join( "/tmp", *directory_hash_id( id ) ) + ( "/dataset_%d.dat" % id )
- # Do an update and check
- hists[1].name = "History 2b"
- model.session.flush()
- model.session.expunge_all()
- hists = model.session.query( model.History ).all()
- assert hists[0].name == "History 1"
- assert hists[1].name == "History 2b"
- # gvk TODO need to ad test for GalaxySessions, but not yet sure what they should look like.
-
-def get_suite():
- suite = unittest.TestSuite()
- suite.addTest( MappingTests( "test_basic" ) )
- return suite
diff -r be3b177c9bf23abef7f64c9ee64f39ff23524764 -r c0b3c4a3b43881ababfa066672aa1f1734c44484 lib/galaxy/tools/parameters/basic.py
--- a/lib/galaxy/tools/parameters/basic.py
+++ b/lib/galaxy/tools/parameters/basic.py
@@ -1062,7 +1062,8 @@
>>> # Mock up a history (not connected to database)
>>> from galaxy.model import History, HistoryDatasetAssociation
>>> from galaxy.util.bunch import Bunch
- >>> from galaxy.model.mapping import context as sa_session
+ >>> from galaxy.model.mapping import init
+ >>> sa_session = init( "/tmp", "sqlite:///:memory:", create_tables=True ).session
>>> hist = History()
>>> sa_session.add( hist )
>>> sa_session.flush()
diff -r be3b177c9bf23abef7f64c9ee64f39ff23524764 -r c0b3c4a3b43881ababfa066672aa1f1734c44484 lib/galaxy/webapps/tool_shed/model/mapping.py
--- a/lib/galaxy/webapps/tool_shed/model/mapping.py
+++ b/lib/galaxy/webapps/tool_shed/model/mapping.py
@@ -8,19 +8,16 @@
import datetime
from galaxy.webapps.tool_shed.model import *
+import galaxy.webapps.tool_shed.model
from galaxy.model.orm import *
from galaxy.model.custom_types import *
-from galaxy.util.bunch import Bunch
from galaxy.model.orm.engine_factory import build_engine
+from galaxy.model.base import ModelMapping
import galaxy.webapps.tool_shed.util.shed_statistics as shed_statistics
import galaxy.webapps.tool_shed.util.hgweb_config
from galaxy.webapps.tool_shed.security import CommunityRBACAgent
metadata = MetaData()
-context = Session = scoped_session( sessionmaker( autoflush=False, autocommit=True ) )
-
-# For backward compatibility with "context.current"
-context.current = Session
# NOTE REGARDING TIMESTAMPS:
# It is currently difficult to have the timestamps calculated by the
@@ -313,17 +310,14 @@
engine = build_engine( url, engine_options )
# Connect the metadata to the database.
metadata.bind = engine
- # Clear any existing contextual sessions and reconfigure
- Session.remove()
- Session.configure( bind=engine )
- # Create tables if needed
+
+ result = ModelMapping([galaxy.webapps.tool_shed.model], engine=engine)
+
if create_tables:
metadata.create_all()
- # Pack everything into a bunch
- result = Bunch( **globals() )
- result.engine = engine
- result.session = Session
+
result.create_tables = create_tables
+
# Load local tool shed security policy
result.security_agent = CommunityRBACAgent( result )
result.shed_counter = shed_statistics.ShedCounter( result )
diff -r be3b177c9bf23abef7f64c9ee64f39ff23524764 -r c0b3c4a3b43881ababfa066672aa1f1734c44484 scripts/db_shell.py
--- a/scripts/db_shell.py
+++ b/scripts/db_shell.py
@@ -33,8 +33,6 @@
from sqlalchemy.orm import *
from sqlalchemy.exc import *
-engine = create_engine(db_url, echo=True)
-db_session = scoped_session( sessionmaker( bind=engine ) )
-from galaxy.model.mapping import context as sa_session
-sa_session.bind = engine
+from galaxy.model.mapping import init
+sa_session = init( '/tmp/', db_url ).context
from galaxy.model import *
diff -r be3b177c9bf23abef7f64c9ee64f39ff23524764 -r c0b3c4a3b43881ababfa066672aa1f1734c44484 scripts/functional_tests.py
--- a/scripts/functional_tests.py
+++ b/scripts/functional_tests.py
@@ -50,6 +50,8 @@
from galaxy import util
from galaxy.util.json import to_json_string
+from functional import database_contexts
+
import nose.core
import nose.config
import nose.loader
@@ -368,6 +370,7 @@
kwargs[ 'config_file' ] = galaxy_config_file
# Build the Universe Application
app = UniverseApplication( **kwargs )
+ database_contexts.galaxy_context = app.model.context
log.info( "Embedded Universe application started" )
# ---- Run webserver ------------------------------------------------------
diff -r be3b177c9bf23abef7f64c9ee64f39ff23524764 -r c0b3c4a3b43881ababfa066672aa1f1734c44484 test/base/interactor.py
--- a/test/base/interactor.py
+++ b/test/base/interactor.py
@@ -4,7 +4,7 @@
from galaxy.util.odict import odict
import galaxy.model
from galaxy.model.orm import and_, desc
-from galaxy.model.mapping import context as sa_session
+from base.test_db_util import sa_session
from simplejson import dumps, loads
from logging import getLogger
diff -r be3b177c9bf23abef7f64c9ee64f39ff23524764 -r c0b3c4a3b43881ababfa066672aa1f1734c44484 test/base/test_db_util.py
--- a/test/base/test_db_util.py
+++ b/test/base/test_db_util.py
@@ -1,6 +1,6 @@
import galaxy.model
from galaxy.model.orm import *
-from galaxy.model.mapping import context as sa_session
+from functional.database_contexts import galaxy_context as sa_session
from base.twilltestcase import *
import sys
diff -r be3b177c9bf23abef7f64c9ee64f39ff23524764 -r c0b3c4a3b43881ababfa066672aa1f1734c44484 test/functional/database_contexts.py
--- /dev/null
+++ b/test/functional/database_contexts.py
@@ -0,0 +1,5 @@
+# Global variables to pass database contexts around. Fairly hackish that they
+# are shared this way, but at least they have been moved out of Galaxy's lib/
+# code base.
+galaxy_context = None
+tool_shed_context = None
diff -r be3b177c9bf23abef7f64c9ee64f39ff23524764 -r c0b3c4a3b43881ababfa066672aa1f1734c44484 test/functional/test_DNAse_flanked_genes.py
--- a/test/functional/test_DNAse_flanked_genes.py
+++ b/test/functional/test_DNAse_flanked_genes.py
@@ -1,6 +1,6 @@
import galaxy.model
from galaxy.model.orm import *
-from galaxy.model.mapping import context as sa_session
+from base.test_db_util import sa_session
from base.twilltestcase import TwillTestCase
""" A sample analysis"""
diff -r be3b177c9bf23abef7f64c9ee64f39ff23524764 -r c0b3c4a3b43881ababfa066672aa1f1734c44484 test/functional/test_get_data.py
--- a/test/functional/test_get_data.py
+++ b/test/functional/test_get_data.py
@@ -1,6 +1,5 @@
import galaxy.model
from galaxy.model.orm import *
-from galaxy.model.mapping import context as sa_session
from base.twilltestcase import TwillTestCase
from base.test_db_util import *
diff -r be3b177c9bf23abef7f64c9ee64f39ff23524764 -r c0b3c4a3b43881ababfa066672aa1f1734c44484 test/functional/test_history_functions.py
--- a/test/functional/test_history_functions.py
+++ b/test/functional/test_history_functions.py
@@ -1,7 +1,7 @@
import urllib
import galaxy.model
from galaxy.model.orm import *
-from galaxy.model.mapping import context as sa_session
+from base.test_db_util import sa_session
from base.twilltestcase import *
class TestHistory( TwillTestCase ):
diff -r be3b177c9bf23abef7f64c9ee64f39ff23524764 -r c0b3c4a3b43881ababfa066672aa1f1734c44484 test/functional/test_metadata_editing.py
--- a/test/functional/test_metadata_editing.py
+++ b/test/functional/test_metadata_editing.py
@@ -1,6 +1,6 @@
import galaxy.model
from galaxy.model.orm import *
-from galaxy.model.mapping import context as sa_session
+from base.test_db_util import sa_session
from base.twilltestcase import TwillTestCase
class TestMetadataEdit( TwillTestCase ):
diff -r be3b177c9bf23abef7f64c9ee64f39ff23524764 -r c0b3c4a3b43881ababfa066672aa1f1734c44484 test/install_and_test_tool_shed_repositories/base/test_db_util.py
--- a/test/install_and_test_tool_shed_repositories/base/test_db_util.py
+++ b/test/install_and_test_tool_shed_repositories/base/test_db_util.py
@@ -1,7 +1,7 @@
import logging
import galaxy.model as model
from galaxy.model.orm import and_
-from galaxy.model.mapping import context as sa_session
+from functional.database_contexts import galaxy_context as sa_session
log = logging.getLogger(__name__)
diff -r be3b177c9bf23abef7f64c9ee64f39ff23524764 -r c0b3c4a3b43881ababfa066672aa1f1734c44484 test/install_and_test_tool_shed_repositories/functional_tests.py
--- a/test/install_and_test_tool_shed_repositories/functional_tests.py
+++ b/test/install_and_test_tool_shed_repositories/functional_tests.py
@@ -61,6 +61,9 @@
from tool_shed.util import tool_dependency_util
from tool_shed.util.xml_util import parse_xml
+from functional import database_contexts
+
+
log = logging.getLogger( 'install_and_test_repositories' )
assert sys.version_info[ :2 ] >= ( 2, 6 )
@@ -934,6 +937,8 @@
kwargs[ 'database_engine_option_pool_size' ] = '10'
kwargs[ 'config_file' ] = galaxy_config_file
app = UniverseApplication( **kwargs )
+ database_contexts.galaxy_context = app.model.context
+
log.debug( "Embedded Galaxy application started..." )
# ---- Run galaxy webserver ------------------------------------------------------
server = None
diff -r be3b177c9bf23abef7f64c9ee64f39ff23524764 -r c0b3c4a3b43881ababfa066672aa1f1734c44484 test/tool_shed/base/test_db_util.py
--- a/test/tool_shed/base/test_db_util.py
+++ b/test/tool_shed/base/test_db_util.py
@@ -1,8 +1,10 @@
import galaxy.model, logging
import galaxy.webapps.tool_shed.model as model
from galaxy.model.orm import *
-from galaxy.webapps.tool_shed.model.mapping import context as sa_session
-from galaxy.model.mapping import context as ga_session
+
+from functional.database_contexts import tool_shed_context as sa_session
+from functional.database_contexts import galaxy_context as ga_session
+
log = logging.getLogger( 'test.tool_shed.test_db_util' )
diff -r be3b177c9bf23abef7f64c9ee64f39ff23524764 -r c0b3c4a3b43881ababfa066672aa1f1734c44484 test/tool_shed/functional_tests.py
--- a/test/tool_shed/functional_tests.py
+++ b/test/tool_shed/functional_tests.py
@@ -11,7 +11,7 @@
# the full path to the temporary directroy wher eht repositories are located cannot contain invalid url characters.
tool_shed_test_tmp_dir = os.path.join( tool_shed_home_directory, 'tmp' )
os.environ[ 'TOOL_SHED_TEST_TMP_DIR' ] = tool_shed_test_tmp_dir
-new_path = [ os.path.join( cwd, "lib" ) ]
+new_path = [ os.path.join( cwd, "lib" ), os.path.join( cwd, "test" ) ]
new_path.extend( sys.path[1:] )
sys.path = new_path
@@ -47,6 +47,8 @@
import nose.loader
import nose.plugins.manager
+from functional import database_contexts
+
log = logging.getLogger( "tool_shed_functional_tests.py" )
default_tool_shed_test_host = "localhost"
@@ -254,7 +256,7 @@
kwargs[ 'database_engine_option_pool_size' ] = '10'
toolshedapp = ToolshedUniverseApplication( **kwargs )
-
+ database_contexts.tool_shed_context = toolshedapp.model.context
log.info( "Embedded Toolshed application started" )
# ---- Run tool shed webserver ------------------------------------------------------
@@ -352,7 +354,7 @@
kwargs[ 'database_engine_option_pool_size' ] = '10'
kwargs[ 'database_engine_option_max_overflow' ] = '20'
galaxyapp = GalaxyUniverseApplication( **kwargs )
-
+
log.info( "Embedded Galaxy application started" )
# ---- Run galaxy webserver ------------------------------------------------------
@@ -362,7 +364,7 @@
use_translogger=False,
static_enabled=True,
app=galaxyapp )
-
+ database_contexts.galaxy_context = galaxyapp.model.context
if galaxy_test_port is not None:
galaxy_server = httpserver.serve( galaxywebapp, host=galaxy_test_host, port=galaxy_test_port, start_loop=False )
else:
diff -r be3b177c9bf23abef7f64c9ee64f39ff23524764 -r c0b3c4a3b43881ababfa066672aa1f1734c44484 test/unit/test_galaxy_mapping.py
--- /dev/null
+++ b/test/unit/test_galaxy_mapping.py
@@ -0,0 +1,54 @@
+import unittest
+import galaxy.model.mapping as mapping
+from galaxy.model import directory_hash_id
+import os.path
+
+class MappingTests( unittest.TestCase ):
+ def test_basic( self ):
+ # Start the database and connect the mapping
+ model = mapping.init( "/tmp", "sqlite:///:memory:", create_tables=True )
+ assert model.engine is not None
+ # Make some changes and commit them
+ u = model.User( email="james(a)foo.bar.baz", password="password" )
+ # gs = model.GalaxySession()
+ h1 = model.History( name="History 1", user=u)
+ #h1.queries.append( model.Query( "h1->q1" ) )
+ #h1.queries.append( model.Query( "h1->q2" ) )
+ h2 = model.History( name=( "H" * 1024 ) )
+ model.session.add_all( ( u, h1, h2 ) )
+ #q1 = model.Query( "h2->q1" )
+ d1 = model.HistoryDatasetAssociation( extension="interval", metadata=dict(chromCol=1,startCol=2,endCol=3 ), history=h2, create_dataset=True, sa_session=model.session )
+ #h2.queries.append( q1 )
+ #h2.queries.append( model.Query( "h2->q2" ) )
+ model.session.add( ( d1 ) )
+ model.session.flush()
+ model.session.expunge_all()
+ # Check
+ users = model.session.query( model.User ).all()
+ assert len( users ) == 1
+ assert users[0].email == "james(a)foo.bar.baz"
+ assert users[0].password == "password"
+ assert len( users[0].histories ) == 1
+ assert users[0].histories[0].name == "History 1"
+ hists = model.session.query( model.History ).all()
+ assert hists[0].name == "History 1"
+ assert hists[1].name == ( "H" * 255 )
+ assert hists[0].user == users[0]
+ assert hists[1].user is None
+ assert hists[1].datasets[0].metadata.chromCol == 1
+ # The filename test has moved to objecstore
+ #id = hists[1].datasets[0].id
+ #assert hists[1].datasets[0].file_name == os.path.join( "/tmp", *directory_hash_id( id ) ) + ( "/dataset_%d.dat" % id )
+ # Do an update and check
+ hists[1].name = "History 2b"
+ model.session.flush()
+ model.session.expunge_all()
+ hists = model.session.query( model.History ).all()
+ assert hists[0].name == "History 1"
+ assert hists[1].name == "History 2b"
+ # gvk TODO need to ad test for GalaxySessions, but not yet sure what they should look like.
+
+def get_suite():
+ suite = unittest.TestSuite()
+ suite.addTest( MappingTests( "test_basic" ) )
+ return suite
https://bitbucket.org/galaxy/galaxy-central/commits/e85f672b0129/
Changeset: e85f672b0129
User: jmchilton
Date: 2013-12-05 07:20:54
Summary: Models: Random fixes for mapping tests.
PEP-8. Get rid of old test suite code mixed into real code.
Affected #: 2 files
diff -r c0b3c4a3b43881ababfa066672aa1f1734c44484 -r e85f672b01290ab03563d21e853a6e726633efd2 lib/galaxy/model/mapping.py
--- a/lib/galaxy/model/mapping.py
+++ b/lib/galaxy/model/mapping.py
@@ -2017,8 +2017,3 @@
#load local galaxy security policy
result.security_agent = GalaxyRBACAgent( result )
return result
-
-def get_suite():
- """Get unittest suite for this module"""
- import unittest, mapping_tests
- return unittest.makeSuite( mapping_tests.MappingTests )
diff -r c0b3c4a3b43881ababfa066672aa1f1734c44484 -r e85f672b01290ab03563d21e853a6e726633efd2 test/unit/test_galaxy_mapping.py
--- a/test/unit/test_galaxy_mapping.py
+++ b/test/unit/test_galaxy_mapping.py
@@ -1,7 +1,6 @@
import unittest
import galaxy.model.mapping as mapping
-from galaxy.model import directory_hash_id
-import os.path
+
class MappingTests( unittest.TestCase ):
def test_basic( self ):
@@ -17,7 +16,8 @@
h2 = model.History( name=( "H" * 1024 ) )
model.session.add_all( ( u, h1, h2 ) )
#q1 = model.Query( "h2->q1" )
- d1 = model.HistoryDatasetAssociation( extension="interval", metadata=dict(chromCol=1,startCol=2,endCol=3 ), history=h2, create_dataset=True, sa_session=model.session )
+ metadata = dict( chromCol=1, startCol=2, endCol=3 )
+ d1 = model.HistoryDatasetAssociation( extension="interval", metadata=metadata, history=h2, create_dataset=True, sa_session=model.session )
#h2.queries.append( q1 )
#h2.queries.append( model.Query( "h2->q2" ) )
model.session.add( ( d1 ) )
@@ -48,6 +48,7 @@
assert hists[1].name == "History 2b"
# gvk TODO need to ad test for GalaxySessions, but not yet sure what they should look like.
+
def get_suite():
suite = unittest.TestSuite()
suite.addTest( MappingTests( "test_basic" ) )
https://bitbucket.org/galaxy/galaxy-central/commits/23577dc89ace/
Changeset: 23577dc89ace
User: jmchilton
Date: 2013-12-05 07:20:54
Summary: Models: Unify 'now' import and comment across galaxy and toolshed.
Affected #: 3 files
diff -r e85f672b01290ab03563d21e853a6e726633efd2 -r 23577dc89ace1f775003153f87a4941feeeda8f4 lib/galaxy/model/mapping.py
--- a/lib/galaxy/model/mapping.py
+++ b/lib/galaxy/model/mapping.py
@@ -3,7 +3,6 @@
are encapsulated here.
"""
-import datetime
import logging
import pkg_resources
@@ -16,6 +15,7 @@
from galaxy import model
from galaxy.model.orm.engine_factory import build_engine
+from galaxy.model.orm.now import now
from galaxy.model.custom_types import JSONType, MetadataType, TrimmedString, UUIDType
from galaxy.model.base import ModelMapping
from galaxy.security import GalaxyRBACAgent
@@ -24,15 +24,6 @@
metadata = MetaData()
-# NOTE REGARDING TIMESTAMPS:
-# It is currently difficult to have the timestamps calculated by the
-# database in a portable way, so we're doing it in the client. This
-# also saves us from needing to postfetch on postgres. HOWEVER: it
-# relies on the client's clock being set correctly, so if clustering
-# web servers, use a time server to ensure synchronization
-
-# Return the current time in UTC without any timezone information
-now = datetime.datetime.utcnow
model.User.table = Table( "galaxy_user", metadata,
Column( "id", Integer, primary_key=True),
diff -r e85f672b01290ab03563d21e853a6e726633efd2 -r 23577dc89ace1f775003153f87a4941feeeda8f4 lib/galaxy/model/orm/now.py
--- /dev/null
+++ b/lib/galaxy/model/orm/now.py
@@ -0,0 +1,13 @@
+from datetime import datetime
+
+# NOTE REGARDING TIMESTAMPS:
+# It is currently difficult to have the timestamps calculated by the
+# database in a portable way, so we're doing it in the client. This
+# also saves us from needing to postfetch on postgres. HOWEVER: it
+# relies on the client's clock being set correctly, so if clustering
+# web servers, use a time server to ensure synchronization
+
+# Return the current time in UTC without any timezone information
+now = datetime.utcnow
+
+__all__ = [now]
diff -r e85f672b01290ab03563d21e853a6e726633efd2 -r 23577dc89ace1f775003153f87a4941feeeda8f4 lib/galaxy/webapps/tool_shed/model/mapping.py
--- a/lib/galaxy/webapps/tool_shed/model/mapping.py
+++ b/lib/galaxy/webapps/tool_shed/model/mapping.py
@@ -5,13 +5,12 @@
import logging
log = logging.getLogger( __name__ )
-import datetime
-
from galaxy.webapps.tool_shed.model import *
import galaxy.webapps.tool_shed.model
from galaxy.model.orm import *
from galaxy.model.custom_types import *
from galaxy.model.orm.engine_factory import build_engine
+from galaxy.model.orm.now import now
from galaxy.model.base import ModelMapping
import galaxy.webapps.tool_shed.util.shed_statistics as shed_statistics
import galaxy.webapps.tool_shed.util.hgweb_config
@@ -19,15 +18,6 @@
metadata = MetaData()
-# NOTE REGARDING TIMESTAMPS:
-# It is currently difficult to have the timestamps calculated by the
-# database in a portable way, so we're doing it in the client. This
-# also saves us from needing to postfetch on postgres. HOWEVER: it
-# relies on the client's clock being set correctly, so if clustering
-# web servers, use a time server to ensure synchronization
-
-# Return the current time in UTC without any timezone information
-now = datetime.datetime.utcnow
APIKeys.table = Table( "api_keys", metadata,
Column( "id", Integer, primary_key=True ),
https://bitbucket.org/galaxy/galaxy-central/commits/920cbad98f9f/
Changeset: 920cbad98f9f
User: jmchilton
Date: 2013-12-05 07:20:54
Summary: Models: Move tool shed install related models and mapping into own package.
Specifically, refactor all tool shed installation related models and mapping information in Galaxy's models into new module galaxy.model.tool_shed_install.
This is work toward being able to store tool shed installation data into a second database. There is still a lot TODO, all access to these models are still through galaxy.model and the existing ModelMapping object, these references need to be refactored.
More information on the end goal here: http://dev.list.galaxyproject.org/Galaxy-less-Tool-Installing-tp4660655.html.
Affected #: 6 files
diff -r 23577dc89ace1f775003153f87a4941feeeda8f4 -r 920cbad98f9f25b7b7612d64639e901680233715 lib/galaxy/model/__init__.py
--- a/lib/galaxy/model/__init__.py
+++ b/lib/galaxy/model/__init__.py
@@ -26,7 +26,7 @@
from galaxy.datatypes.metadata import MetadataCollection
from galaxy.model.item_attrs import Dictifiable, UsesAnnotations
from galaxy.security import get_permitted_actions
-from galaxy.util import asbool, is_multi_byte, nice_size, Params, restore_text, send_mail
+from galaxy.util import is_multi_byte, nice_size, Params, restore_text, send_mail
from galaxy.util.bunch import Bunch
from galaxy.util.hash_util import new_secure_hash
from galaxy.util.directory_hash import directory_hash_id
@@ -36,7 +36,6 @@
WorkflowMappingField)
from sqlalchemy.orm import object_session
from sqlalchemy.sql.expression import func
-from tool_shed.util import common_util
log = logging.getLogger( __name__ )
@@ -44,6 +43,12 @@
# Default Value Required for unit tests
datatypes_registry.load_datatypes()
+# TODO: Refactor references to these classes to eliminate need for this
+# import.
+from .tool_shed_install import ToolShedRepository, RepositoryRepositoryDependencyAssociation, RepositoryDependency
+from .tool_shed_install import ToolDependency, ToolVersion, ToolVersionAssociation, MigrateTools
+
+
class NoConverterException(Exception):
def __init__(self, value):
self.value = value
@@ -3405,611 +3410,3 @@
class APIKeys( object ):
pass
-
-
-class ToolShedRepository( object ):
- dict_collection_visible_keys = ( 'id', 'tool_shed', 'name', 'owner', 'installed_changeset_revision', 'changeset_revision', 'ctx_rev', 'includes_datatypes',
- 'tool_shed_status', 'deleted', 'uninstalled', 'dist_to_shed', 'status', 'error_message' )
- dict_element_visible_keys = ( 'id', 'tool_shed', 'name', 'owner', 'installed_changeset_revision', 'changeset_revision', 'ctx_rev', 'includes_datatypes',
- 'tool_shed_status', 'deleted', 'uninstalled', 'dist_to_shed', 'status', 'error_message' )
- installation_status = Bunch( NEW='New',
- CLONING='Cloning',
- SETTING_TOOL_VERSIONS='Setting tool versions',
- INSTALLING_REPOSITORY_DEPENDENCIES='Installing repository dependencies',
- INSTALLING_TOOL_DEPENDENCIES='Installing tool dependencies',
- LOADING_PROPRIETARY_DATATYPES='Loading proprietary datatypes',
- INSTALLED='Installed',
- DEACTIVATED='Deactivated',
- ERROR='Error',
- UNINSTALLED='Uninstalled' )
- states = Bunch( INSTALLING = 'running',
- OK = 'ok',
- WARNING = 'queued',
- ERROR = 'error',
- UNINSTALLED = 'deleted_new' )
-
- def __init__( self, id=None, create_time=None, tool_shed=None, name=None, description=None, owner=None, installed_changeset_revision=None,
- changeset_revision=None, ctx_rev=None, metadata=None, includes_datatypes=False, tool_shed_status=None, deleted=False,
- uninstalled=False, dist_to_shed=False, status=None, error_message=None ):
- self.id = id
- self.create_time = create_time
- self.tool_shed = tool_shed
- self.name = name
- self.description = description
- self.owner = owner
- self.installed_changeset_revision = installed_changeset_revision
- self.changeset_revision = changeset_revision
- self.ctx_rev = ctx_rev
- self.metadata = metadata
- self.includes_datatypes = includes_datatypes
- self.tool_shed_status = tool_shed_status
- self.deleted = deleted
- self.uninstalled = uninstalled
- self.dist_to_shed = dist_to_shed
- self.status = status
- self.error_message = error_message
-
- def as_dict( self, value_mapper=None ):
- return self.to_dict( view='element', value_mapper=value_mapper )
-
- @property
- def can_install( self ):
- return self.status == self.installation_status.NEW
-
- @property
- def can_reset_metadata( self ):
- return self.status == self.installation_status.INSTALLED
-
- @property
- def can_uninstall( self ):
- return self.status != self.installation_status.UNINSTALLED
-
- @property
- def can_deactivate( self ):
- return self.status not in [ self.installation_status.DEACTIVATED, self.installation_status.UNINSTALLED ]
-
- @property
- def can_reinstall_or_activate( self ):
- return self.deleted
-
- def get_shed_config_filename( self ):
- shed_config_filename = None
- if self.metadata:
- shed_config_filename = self.metadata.get( 'shed_config_filename', shed_config_filename )
- return shed_config_filename
-
- def get_shed_config_dict( self, app, default=None ):
- """
- Return the in-memory version of the shed_tool_conf file, which is stored in the config_elems entry
- in the shed_tool_conf_dict.
- """
- if not self.shed_config_filename:
- self.guess_shed_config( app, default=default )
- if self.shed_config_filename:
- for shed_tool_conf_dict in app.toolbox.shed_tool_confs:
- if self.shed_config_filename == shed_tool_conf_dict[ 'config_filename' ]:
- return shed_tool_conf_dict
- return default
-
- def get_tool_relative_path( self, app ):
- shed_conf_dict = self.get_shed_config_dict( app )
- tool_path = None
- relative_path = None
- if shed_conf_dict:
- tool_path = shed_conf_dict[ 'tool_path' ]
- relative_path = os.path.join( self.tool_shed_path_name, 'repos', self.owner, self.name, self.installed_changeset_revision )
- return tool_path, relative_path
-
- def guess_shed_config( self, app, default=None ):
- tool_ids = []
- metadata = self.metadata or {}
- for tool in metadata.get( 'tools', [] ):
- tool_ids.append( tool.get( 'guid' ) )
- for shed_tool_conf_dict in app.toolbox.shed_tool_confs:
- name = shed_tool_conf_dict[ 'config_filename' ]
- for elem in shed_tool_conf_dict[ 'config_elems' ]:
- if elem.tag == 'tool':
- for sub_elem in elem.findall( 'id' ):
- tool_id = sub_elem.text.strip()
- if tool_id in tool_ids:
- self.shed_config_filename = name
- return shed_tool_conf_dict
- elif elem.tag == "section":
- for tool_elem in elem.findall( 'tool' ):
- for sub_elem in tool_elem.findall( 'id' ):
- tool_id = sub_elem.text.strip()
- if tool_id in tool_ids:
- self.shed_config_filename = name
- return shed_tool_conf_dict
- if self.includes_datatypes:
- #we need to search by filepaths here, which is less desirable
- tool_shed_url = self.tool_shed
- if tool_shed_url.find( ':' ) > 0:
- # Eliminate the port, if any, since it will result in an invalid directory name.
- tool_shed_url = tool_shed_url.split( ':' )[ 0 ]
- tool_shed = tool_shed_url.rstrip( '/' )
- for shed_tool_conf_dict in app.toolbox.shed_tool_confs:
- tool_path = shed_tool_conf_dict[ 'tool_path' ]
- relative_path = os.path.join( tool_path, tool_shed, 'repos', self.owner, self.name, self.installed_changeset_revision )
- if os.path.exists( relative_path ):
- self.shed_config_filename = shed_tool_conf_dict[ 'config_filename' ]
- return shed_tool_conf_dict
- return default
-
- @property
- def has_readme_files( self ):
- if self.metadata:
- return 'readme_files' in self.metadata
- return False
-
- @property
- def has_repository_dependencies( self ):
- if self.metadata:
- repository_dependencies_dict = self.metadata.get( 'repository_dependencies', {} )
- repository_dependencies = repository_dependencies_dict.get( 'repository_dependencies', [] )
- # [["http://localhost:9009", "package_libgtextutils_0_6", "test", "e2003cbf18cd", "True", "True"]]
- for rd_tup in repository_dependencies:
- tool_shed, name, owner, changeset_revision, prior_installation_required, only_if_compiling_contained_td = \
- common_util.parse_repository_dependency_tuple( rd_tup )
- if not asbool( only_if_compiling_contained_td ):
- return True
- return False
-
- @property
- def has_repository_dependencies_only_if_compiling_contained_td( self ):
- if self.metadata:
- repository_dependencies_dict = self.metadata.get( 'repository_dependencies', {} )
- repository_dependencies = repository_dependencies_dict.get( 'repository_dependencies', [] )
- # [["http://localhost:9009", "package_libgtextutils_0_6", "test", "e2003cbf18cd", "True", "True"]]
- for rd_tup in repository_dependencies:
- tool_shed, name, owner, changeset_revision, prior_installation_required, only_if_compiling_contained_td = \
- common_util.parse_repository_dependency_tuple( rd_tup )
- if not asbool( only_if_compiling_contained_td ):
- return False
- return True
- return False
-
- @property
- def in_error_state( self ):
- return self.status == self.installation_status.ERROR
-
- @property
- def includes_data_managers( self ):
- if self.metadata:
- return bool( len( self.metadata.get( 'data_manager', {} ).get( 'data_managers', {} ) ) )
- return False
-
- @property
- def includes_tools( self ):
- if self.metadata:
- return 'tools' in self.metadata
- return False
-
- @property
- def includes_tools_for_display_in_tool_panel( self ):
- if self.includes_tools:
- tool_dicts = self.metadata[ 'tools' ]
- for tool_dict in tool_dicts:
- if tool_dict.get( 'add_to_tool_panel', True ):
- return True
- return False
-
- @property
- def includes_tool_dependencies( self ):
- if self.metadata:
- return 'tool_dependencies' in self.metadata
- return False
-
- @property
- def includes_workflows( self ):
- if self.metadata:
- return 'workflows' in self.metadata
- return False
-
- @property
- def installed_repository_dependencies( self ):
- """Return the repository's repository dependencies that are currently installed."""
- installed_required_repositories = []
- for required_repository in self.repository_dependencies:
- if required_repository.status == self.installation_status.INSTALLED:
- installed_required_repositories.append( required_repository )
- return installed_required_repositories
-
- @property
- def installed_tool_dependencies( self ):
- """Return the repository's tool dependencies that are currently installed."""
- installed_dependencies = []
- for tool_dependency in self.tool_dependencies:
- if tool_dependency.status in [ ToolDependency.installation_status.INSTALLED,
- ToolDependency.installation_status.ERROR ]:
- installed_dependencies.append( tool_dependency )
- return installed_dependencies
-
- @property
- def is_deprecated_in_tool_shed( self ):
- if self.tool_shed_status:
- return galaxy.util.asbool( self.tool_shed_status.get( 'repository_deprecated', False ) )
- return False
-
- @property
- def is_deactivated_or_installed( self ):
- return self.status in [ self.installation_status.DEACTIVATED,
- self.installation_status.INSTALLED ]
-
- @property
- def is_latest_installable_revision( self ):
- if self.tool_shed_status:
- return galaxy.util.asbool( self.tool_shed_status.get( 'latest_installable_revision', False ) )
- return False
-
- @property
- def missing_repository_dependencies( self ):
- """Return the repository's repository dependencies that are not currently installed, and may not ever have been installed."""
- missing_required_repositories = []
- for required_repository in self.repository_dependencies:
- if required_repository.status not in [ self.installation_status.INSTALLED ]:
- missing_required_repositories.append( required_repository )
- return missing_required_repositories
-
- @property
- def missing_tool_dependencies( self ):
- """Return the repository's tool dependencies that are not currently installed, and may not ever have been installed."""
- missing_dependencies = []
- for tool_dependency in self.tool_dependencies:
- if tool_dependency.status not in [ ToolDependency.installation_status.INSTALLED ]:
- missing_dependencies.append( tool_dependency )
- return missing_dependencies
-
- def repo_files_directory( self, app ):
- repo_path = self.repo_path( app )
- if repo_path:
- return os.path.join( repo_path, self.name )
- return None
-
- def repo_path( self, app ):
- tool_shed_url = self.tool_shed
- if tool_shed_url.find( ':' ) > 0:
- # Eliminate the port, if any, since it will result in an invalid directory name.
- tool_shed_url = tool_shed_url.split( ':' )[ 0 ]
- tool_shed = tool_shed_url.rstrip( '/' )
- for index, shed_tool_conf_dict in enumerate( app.toolbox.shed_tool_confs ):
- tool_path = shed_tool_conf_dict[ 'tool_path' ]
- relative_path = os.path.join( tool_path, tool_shed, 'repos', self.owner, self.name, self.installed_changeset_revision )
- if os.path.exists( relative_path ):
- return relative_path
- return None
-
- @property
- def repository_dependencies( self ):
- """
- Return all of this repository's repository dependencies, ignoring their attributes like prior_installation_required and
- only_if_compiling_contained_td.
- """
- required_repositories = []
- for rrda in self.required_repositories:
- repository_dependency = rrda.repository_dependency
- required_repository = repository_dependency.repository
- if required_repository:
- required_repositories.append( required_repository )
- return required_repositories
-
- @property
- def repository_dependencies_being_installed( self ):
- """Return the repository's repository dependencies that are currently being installed."""
- required_repositories_being_installed = []
- for required_repository in self.repository_dependencies:
- if required_repository.status in [ self.installation_status.CLONING,
- self.installation_status.INSTALLING_REPOSITORY_DEPENDENCIES,
- self.installation_status.INSTALLING_TOOL_DEPENDENCIES,
- self.installation_status.LOADING_PROPRIETARY_DATATYPES,
- self.installation_status.SETTING_TOOL_VERSIONS ]:
- required_repositories_being_installed.append( required_repository )
- return required_repositories_being_installed
-
- @property
- def repository_dependencies_missing_or_being_installed( self ):
- """Return the repository's repository dependencies that are either missing or currently being installed."""
- required_repositories_missing_or_being_installed = []
- for required_repository in self.repository_dependencies:
- if required_repository.status in [ self.installation_status.ERROR,
- self.installation_status.INSTALLING,
- self.installation_status.NEVER_INSTALLED,
- self.installation_status.UNINSTALLED ]:
- required_repositories_missing_or_being_installed.append( required_repository )
- return required_repositories_missing_or_being_installed
-
- @property
- def repository_dependencies_with_installation_errors( self ):
- """Return the repository's repository dependencies that have installation errors."""
- required_repositories_with_installation_errors = []
- for required_repository in self.repository_dependencies:
- if required_repository.status == self.installation_status.ERROR:
- required_repositories_with_installation_errors.append( required_repository )
- return required_repositories_with_installation_errors
-
- @property
- def requires_prior_installation_of( self ):
- """
- Return a list of repository dependency tuples like (tool_shed, name, owner, changeset_revision, prior_installation_required) for this
- repository's repository dependencies where prior_installation_required is True. By definition, repository dependencies are required to
- be installed in order for this repository to function correctly. However, those repository dependencies that are defined for this
- repository with prior_installation_required set to True place them in a special category in that the required repositories must be
- installed before this repository is installed. Among other things, this enables these "special" repository dependencies to include
- information that enables the successful intallation of this repository. This method is not used during the initial installation of
- this repository, but only after it has been installed (metadata must be set for this repository in order for this method to be useful).
- """
- required_rd_tups_that_must_be_installed = []
- if self.has_repository_dependencies:
- rd_tups = self.metadata[ 'repository_dependencies' ][ 'repository_dependencies' ]
- for rd_tup in rd_tups:
- if len( rd_tup ) == 5:
- tool_shed, name, owner, changeset_revision, prior_installation_required = rd_tup
- if galaxy.util.asbool( prior_installation_required ):
- required_rd_tups_that_must_be_installed.append( ( tool_shed, name, owner, changeset_revision, 'True', 'False' ) )
- elif len( rd_tup ) == 6:
- tool_shed, name, owner, changeset_revision, prior_installation_required, only_if_compiling_contained_td = rd_tup
- # The repository dependency will only be required to be previsously installed if it does not fall into the category of
- # a repository that must be installed only so that it's contained tool dependency can be used for compiling the tool
- # dependency of the dependent repository.
- if not galaxy.util.asbool( only_if_compiling_contained_td ):
- if galaxy.util.asbool( prior_installation_required ):
- required_rd_tups_that_must_be_installed.append( ( tool_shed, name, owner, changeset_revision, 'True', 'False' ) )
- return required_rd_tups_that_must_be_installed
-
- @property
- def revision_update_available( self ):
- # This method should be named update_available, but since it is no longer possible to drop a table column using migration scripts
- # with the sqlite database (see ~/galaxy/model/migrate/versions/0016_drop_update_available_col_add_tool_shed_status_col.py), we
- # have to name it in such a way that it will not conflict with the eliminated tool_shed_repository.update_available column (which
- # cannot be eliminated if using the sqlite database).
- if self.tool_shed_status:
- return galaxy.util.asbool( self.tool_shed_status.get( 'revision_update', False ) )
- return False
-
- def set_shed_config_filename( self, value ):
- self.metadata[ 'shed_config_filename' ] = value
-
- shed_config_filename = property( get_shed_config_filename, set_shed_config_filename )
-
- def to_dict( self, view='collection', value_mapper=None ):
- if value_mapper is None:
- value_mapper = {}
- rval = {}
- try:
- visible_keys = self.__getattribute__( 'dict_' + view + '_visible_keys' )
- except AttributeError:
- raise Exception( 'Unknown API view: %s' % view )
- for key in visible_keys:
- try:
- rval[ key ] = self.__getattribute__( key )
- if key in value_mapper:
- rval[ key ] = value_mapper.get( key, rval[ key ] )
- except AttributeError:
- rval[ key ] = None
- return rval
-
- @property
- def tool_dependencies_being_installed( self ):
- dependencies_being_installed = []
- for tool_dependency in self.tool_dependencies:
- if tool_dependency.status == ToolDependency.installation_status.INSTALLING:
- dependencies_being_installed.append( tool_dependency )
- return dependencies_being_installed
-
- @property
- def tool_dependencies_missing_or_being_installed( self ):
- dependencies_missing_or_being_installed = []
- for tool_dependency in self.tool_dependencies:
- if tool_dependency.status in [ ToolDependency.installation_status.ERROR,
- ToolDependency.installation_status.INSTALLING,
- ToolDependency.installation_status.NEVER_INSTALLED,
- ToolDependency.installation_status.UNINSTALLED ]:
- dependencies_missing_or_being_installed.append( tool_dependency )
- return dependencies_missing_or_being_installed
-
- @property
- def tool_dependencies_with_installation_errors( self ):
- dependencies_with_installation_errors = []
- for tool_dependency in self.tool_dependencies:
- if tool_dependency.status == ToolDependency.installation_status.ERROR:
- dependencies_with_installation_errors.append( tool_dependency )
- return dependencies_with_installation_errors
-
- @property
- def tool_shed_path_name( self ):
- tool_shed_url = self.tool_shed
- if tool_shed_url.find( ':' ) > 0:
- # Eliminate the port, if any, since it will result in an invalid directory name.
- tool_shed_url = tool_shed_url.split( ':' )[ 0 ]
- return tool_shed_url.rstrip( '/' )
-
- @property
- def tuples_of_repository_dependencies_needed_for_compiling_td( self ):
- """
- Return tuples defining this repository's repository dependencies that are necessary only for compiling this repository's tool
- dependencies.
- """
- rd_tups_of_repositories_needed_for_compiling_td = []
- if self.metadata:
- repository_dependencies = self.metadata.get( 'repository_dependencies', None )
- rd_tups = repository_dependencies[ 'repository_dependencies' ]
- for rd_tup in rd_tups:
- if len( rd_tup ) == 6:
- tool_shed, name, owner, changeset_revision, prior_installation_required, only_if_compiling_contained_td = rd_tup
- if galaxy.util.asbool( only_if_compiling_contained_td ):
- rd_tups_of_repositories_needed_for_compiling_td.append( ( tool_shed, name, owner, changeset_revision, 'False', 'True' ) )
- return rd_tups_of_repositories_needed_for_compiling_td
-
- @property
- def uninstalled_repository_dependencies( self ):
- """Return the repository's repository dependencies that have been uninstalled."""
- uninstalled_required_repositories = []
- for required_repository in self.repository_dependencies:
- if required_repository.status == self.installation_status.UNINSTALLED:
- uninstalled_required_repositories.append( required_repository )
- return uninstalled_required_repositories
-
- @property
- def uninstalled_tool_dependencies( self ):
- """Return the repository's tool dependencies that have been uninstalled."""
- uninstalled_tool_dependencies = []
- for tool_dependency in self.tool_dependencies:
- if tool_dependency.status == ToolDependency.installation_status.UNINSTALLED:
- uninstalled_tool_dependencies.append( tool_dependency )
- return uninstalled_tool_dependencies
-
- @property
- def upgrade_available( self ):
- if self.tool_shed_status:
- if self.is_deprecated_in_tool_shed:
- # Only allow revision upgrades if the repository is not deprecated in the tool shed.
- return False
- return galaxy.util.asbool( self.tool_shed_status.get( 'revision_upgrade', False ) )
- return False
-
-
-class RepositoryRepositoryDependencyAssociation( object ):
- def __init__( self, tool_shed_repository_id=None, repository_dependency_id=None ):
- self.tool_shed_repository_id = tool_shed_repository_id
- self.repository_dependency_id = repository_dependency_id
-
-
-class RepositoryDependency( object ):
- def __init__( self, tool_shed_repository_id=None ):
- self.tool_shed_repository_id = tool_shed_repository_id
-
-
-class ToolDependency( object ):
- installation_status = Bunch( NEVER_INSTALLED='Never installed',
- INSTALLING='Installing',
- INSTALLED='Installed',
- ERROR='Error',
- UNINSTALLED='Uninstalled' )
- states = Bunch( INSTALLING = 'running',
- OK = 'ok',
- WARNING = 'queued',
- ERROR = 'error',
- UNINSTALLED = 'deleted_new' )
-
- def __init__( self, tool_shed_repository_id=None, name=None, version=None, type=None, status=None, error_message=None ):
- self.tool_shed_repository_id = tool_shed_repository_id
- self.name = name
- self.version = version
- self.type = type
- self.status = status
- self.error_message = error_message
-
- @property
- def can_install( self ):
- return self.status in [ self.installation_status.NEVER_INSTALLED, self.installation_status.UNINSTALLED ]
-
- @property
- def can_uninstall( self ):
- return self.status in [ self.installation_status.ERROR, self.installation_status.INSTALLED ]
-
- @property
- def can_update( self ):
- return self.status in [ self.installation_status.NEVER_INSTALLED,
- self.installation_status.INSTALLED,
- self.installation_status.ERROR,
- self.installation_status.UNINSTALLED ]
-
- @property
- def in_error_state( self ):
- return self.status == self.installation_status.ERROR
-
- def installation_directory( self, app ):
- if self.type == 'package':
- return os.path.join( app.config.tool_dependency_dir,
- self.name,
- self.version,
- self.tool_shed_repository.owner,
- self.tool_shed_repository.name,
- self.tool_shed_repository.installed_changeset_revision )
- if self.type == 'set_environment':
- return os.path.join( app.config.tool_dependency_dir,
- 'environment_settings',
- self.name,
- self.tool_shed_repository.owner,
- self.tool_shed_repository.name,
- self.tool_shed_repository.installed_changeset_revision )
-
-
-class ToolVersion( object, Dictifiable ):
- dict_element_visible_keys = ( 'id', 'tool_shed_repository' )
- def __init__( self, id=None, create_time=None, tool_id=None, tool_shed_repository=None ):
- self.id = id
- self.create_time = create_time
- self.tool_id = tool_id
- self.tool_shed_repository = tool_shed_repository
- def get_previous_version( self, app ):
- sa_session = app.model.context.current
- tva = sa_session.query( app.model.ToolVersionAssociation ) \
- .filter( app.model.ToolVersionAssociation.table.c.tool_id == self.id ) \
- .first()
- if tva:
- return sa_session.query( app.model.ToolVersion ) \
- .filter( app.model.ToolVersion.table.c.id == tva.parent_id ) \
- .first()
- return None
- def get_next_version( self, app ):
- sa_session = app.model.context.current
- tva = sa_session.query( app.model.ToolVersionAssociation ) \
- .filter( app.model.ToolVersionAssociation.table.c.parent_id == self.id ) \
- .first()
- if tva:
- return sa_session.query( app.model.ToolVersion ) \
- .filter( app.model.ToolVersion.table.c.id == tva.tool_id ) \
- .first()
- return None
- def get_versions( self, app ):
- tool_versions = []
- # Prepend ancestors.
- def __ancestors( app, tool_version ):
- # Should we handle multiple parents at each level?
- previous_version = tool_version.get_previous_version( app )
- if previous_version:
- if previous_version not in tool_versions:
- tool_versions.insert( 0, previous_version )
- __ancestors( app, previous_version )
- # Append descendants.
- def __descendants( app, tool_version ):
- # Should we handle multiple child siblings at each level?
- next_version = tool_version.get_next_version( app )
- if next_version:
- if next_version not in tool_versions:
- tool_versions.append( next_version )
- __descendants( app, next_version )
- __ancestors( app, self )
- if self not in tool_versions:
- tool_versions.append( self )
- __descendants( app, self )
- return tool_versions
- def get_version_ids( self, app, reverse=False ):
- if reverse:
- version_ids = []
- for tool_version in self.get_versions( app ):
- version_ids.insert( 0, tool_version.tool_id )
- return version_ids
- return [ tool_version.tool_id for tool_version in self.get_versions( app ) ]
-
- def to_dict( self, view='element' ):
- rval = super( ToolVersion, self ).to_dict( view=view )
- rval['tool_name'] = self.tool_id
- for a in self.parent_tool_association:
- rval['parent_tool_id'] = a.parent_id
- for a in self.child_tool_association:
- rval['child_tool_id'] = a.tool_id
- return rval
-
-class ToolVersionAssociation( object ):
- def __init__( self, id=None, tool_id=None, parent_id=None ):
- self.id = id
- self.tool_id = tool_id
- self.parent_id = parent_id
-
-class MigrateTools( object ):
- def __init__( self, repository_id=None, repository_path=None, version=None ):
- self.repository_id = repository_id
- self.repository_path = repository_path
- self.version = version
diff -r 23577dc89ace1f775003153f87a4941feeeda8f4 -r 920cbad98f9f25b7b7612d64639e901680233715 lib/galaxy/model/mapping.py
--- a/lib/galaxy/model/mapping.py
+++ b/lib/galaxy/model/mapping.py
@@ -14,6 +14,7 @@
from sqlalchemy.orm.collections import attribute_mapped_collection
from galaxy import model
+from galaxy.model import tool_shed_install
from galaxy.model.orm.engine_factory import build_engine
from galaxy.model.orm.now import now
from galaxy.model.custom_types import JSONType, MetadataType, TrimmedString, UUIDType
@@ -24,6 +25,10 @@
metadata = MetaData()
+# import tool shed mappings, TODO: update all references to eliminate
+# need for this import.
+from .tool_shed_install.mapping import *
+
model.User.table = Table( "galaxy_user", metadata,
Column( "id", Integer, primary_key=True),
@@ -40,6 +45,7 @@
Column( "active", Boolean, index=True, default=True, nullable=False ),
Column( "activation_token", TrimmedString( 64 ), nullable=True, index=True ) )
+
model.UserAddress.table = Table( "user_address", metadata,
Column( "id", Integer, primary_key=True),
Column( "create_time", DateTime, default=now ),
@@ -378,67 +384,6 @@
Column( "form_values_id", Integer, ForeignKey( "form_values.id" ), index=True ),
Column( "deleted", Boolean, index=True, default=False ) )
-model.ToolShedRepository.table = Table( "tool_shed_repository", metadata,
- Column( "id", Integer, primary_key=True ),
- Column( "create_time", DateTime, default=now ),
- Column( "update_time", DateTime, default=now, onupdate=now ),
- Column( "tool_shed", TrimmedString( 255 ), index=True ),
- Column( "name", TrimmedString( 255 ), index=True ),
- Column( "description" , TEXT ),
- Column( "owner", TrimmedString( 255 ), index=True ),
- Column( "installed_changeset_revision", TrimmedString( 255 ) ),
- Column( "changeset_revision", TrimmedString( 255 ), index=True ),
- Column( "ctx_rev", TrimmedString( 10 ) ),
- Column( "metadata", JSONType, nullable=True ),
- Column( "includes_datatypes", Boolean, index=True, default=False ),
- Column( "tool_shed_status", JSONType, nullable=True ),
- Column( "deleted", Boolean, index=True, default=False ),
- Column( "uninstalled", Boolean, default=False ),
- Column( "dist_to_shed", Boolean, default=False ),
- Column( "status", TrimmedString( 255 ) ),
- Column( "error_message", TEXT ) )
-
-model.RepositoryRepositoryDependencyAssociation.table = Table( 'repository_repository_dependency_association', metadata,
- Column( "id", Integer, primary_key=True ),
- Column( "create_time", DateTime, default=now ),
- Column( "update_time", DateTime, default=now, onupdate=now ),
- Column( "tool_shed_repository_id", Integer, ForeignKey( "tool_shed_repository.id" ), index=True ),
- Column( "repository_dependency_id", Integer, ForeignKey( "repository_dependency.id" ), index=True ) )
-
-model.RepositoryDependency.table = Table( "repository_dependency", metadata,
- Column( "id", Integer, primary_key=True ),
- Column( "create_time", DateTime, default=now ),
- Column( "update_time", DateTime, default=now, onupdate=now ),
- Column( "tool_shed_repository_id", Integer, ForeignKey( "tool_shed_repository.id" ), index=True, nullable=False ) )
-
-model.ToolDependency.table = Table( "tool_dependency", metadata,
- Column( "id", Integer, primary_key=True ),
- Column( "create_time", DateTime, default=now ),
- Column( "update_time", DateTime, default=now, onupdate=now ),
- Column( "tool_shed_repository_id", Integer, ForeignKey( "tool_shed_repository.id" ), index=True, nullable=False ),
- Column( "name", TrimmedString( 255 ) ),
- Column( "version", TEXT ),
- Column( "type", TrimmedString( 40 ) ),
- Column( "status", TrimmedString( 255 ), nullable=False ),
- Column( "error_message", TEXT ) )
-
-model.ToolVersion.table = Table( "tool_version", metadata,
- Column( "id", Integer, primary_key=True ),
- Column( "create_time", DateTime, default=now ),
- Column( "update_time", DateTime, default=now, onupdate=now ),
- Column( "tool_id", String( 255 ) ),
- Column( "tool_shed_repository_id", Integer, ForeignKey( "tool_shed_repository.id" ), index=True, nullable=True ) )
-
-model.ToolVersionAssociation.table = Table( "tool_version_association", metadata,
- Column( "id", Integer, primary_key=True ),
- Column( "tool_id", Integer, ForeignKey( "tool_version.id" ), index=True, nullable=False ),
- Column( "parent_id", Integer, ForeignKey( "tool_version.id" ), index=True, nullable=False ) )
-
-model.MigrateTools.table = Table( "migrate_tools", metadata,
- Column( "repository_id", TrimmedString( 255 ) ),
- Column( "repository_path", TEXT ),
- Column( "version", Integer ) )
-
model.Job.table = Table( "job", metadata,
Column( "id", Integer, primary_key=True ),
Column( "create_time", DateTime, default=now ),
@@ -1775,40 +1720,6 @@
ratings=relation( model.PageRatingAssociation, order_by=model.PageRatingAssociation.table.c.id, backref="pages" )
) )
-mapper( model.ToolShedRepository, model.ToolShedRepository.table,
- properties=dict( tool_versions=relation( model.ToolVersion,
- primaryjoin=( model.ToolShedRepository.table.c.id == model.ToolVersion.table.c.tool_shed_repository_id ),
- backref='tool_shed_repository' ),
- tool_dependencies=relation( model.ToolDependency,
- primaryjoin=( model.ToolShedRepository.table.c.id == model.ToolDependency.table.c.tool_shed_repository_id ),
- order_by=model.ToolDependency.table.c.name,
- backref='tool_shed_repository' ),
- required_repositories=relation( model.RepositoryRepositoryDependencyAssociation,
- primaryjoin=( model.ToolShedRepository.table.c.id == model.RepositoryRepositoryDependencyAssociation.table.c.tool_shed_repository_id ) ) ) )
-
-mapper( model.RepositoryRepositoryDependencyAssociation, model.RepositoryRepositoryDependencyAssociation.table,
- properties=dict( repository=relation( model.ToolShedRepository,
- primaryjoin=( model.RepositoryRepositoryDependencyAssociation.table.c.tool_shed_repository_id == model.ToolShedRepository.table.c.id ) ),
- repository_dependency=relation( model.RepositoryDependency,
- primaryjoin=( model.RepositoryRepositoryDependencyAssociation.table.c.repository_dependency_id == model.RepositoryDependency.table.c.id ) ) ) )
-
-mapper( model.RepositoryDependency, model.RepositoryDependency.table,
- properties=dict( repository=relation( model.ToolShedRepository,
- primaryjoin=( model.RepositoryDependency.table.c.tool_shed_repository_id == model.ToolShedRepository.table.c.id ) ) ) )
-
-mapper( model.ToolDependency, model.ToolDependency.table )
-
-mapper( model.ToolVersion, model.ToolVersion.table,
- properties=dict(
- parent_tool_association=relation( model.ToolVersionAssociation,
- primaryjoin=( model.ToolVersion.table.c.id == model.ToolVersionAssociation.table.c.tool_id ) ),
- child_tool_association=relation( model.ToolVersionAssociation,
- primaryjoin=( model.ToolVersion.table.c.id == model.ToolVersionAssociation.table.c.parent_id ) )
- )
- )
-
-mapper( model.ToolVersionAssociation, model.ToolVersionAssociation.table )
-
# Set up proxy so that
# Page.users_shared_with
# returns a list of users that page is shared with.
@@ -1997,7 +1908,7 @@
# Connect the metadata to the database.
metadata.bind = engine
- result = ModelMapping([model], engine=engine)
+ result = ModelMapping([model, tool_shed_install], engine=engine)
# Create tables if needed
if create_tables:
diff -r 23577dc89ace1f775003153f87a4941feeeda8f4 -r 920cbad98f9f25b7b7612d64639e901680233715 lib/galaxy/model/search.py
--- a/lib/galaxy/model/search.py
+++ b/lib/galaxy/model/search.py
@@ -35,8 +35,9 @@
History, Library, LibraryFolder, LibraryDataset,StoredWorkflowTagAssociation,
StoredWorkflow, HistoryTagAssociation,HistoryDatasetAssociationTagAssociation,
ExtendedMetadata, ExtendedMetadataIndex, HistoryAnnotationAssociation, Job, JobParameter,
-JobToInputLibraryDatasetAssociation, JobToInputDatasetAssociation, JobToOutputDatasetAssociation, ToolVersion,
+JobToInputLibraryDatasetAssociation, JobToInputDatasetAssociation, JobToOutputDatasetAssociation,
Page, PageRevision )
+from galaxy.model.tool_shed_install import ToolVersion
from galaxy.util.json import to_json_string
from sqlalchemy import and_
@@ -270,6 +271,7 @@
}
def search(self, trans):
+ # Likely this will break in subsequent model refactoring. Need to revisit.
self.query = trans.sa_session.query( ToolVersion )
##################
diff -r 23577dc89ace1f775003153f87a4941feeeda8f4 -r 920cbad98f9f25b7b7612d64639e901680233715 lib/galaxy/model/tool_shed_install/__init__.py
--- /dev/null
+++ b/lib/galaxy/model/tool_shed_install/__init__.py
@@ -0,0 +1,626 @@
+import os
+
+from galaxy.model.item_attrs import Dictifiable
+
+from galaxy.util.bunch import Bunch
+from galaxy.util import asbool
+
+from tool_shed.util import common_util
+
+
+class ToolShedRepository( object ):
+ dict_collection_visible_keys = ( 'id', 'tool_shed', 'name', 'owner', 'installed_changeset_revision', 'changeset_revision', 'ctx_rev', 'includes_datatypes',
+ 'tool_shed_status', 'deleted', 'uninstalled', 'dist_to_shed', 'status', 'error_message' )
+ dict_element_visible_keys = ( 'id', 'tool_shed', 'name', 'owner', 'installed_changeset_revision', 'changeset_revision', 'ctx_rev', 'includes_datatypes',
+ 'tool_shed_status', 'deleted', 'uninstalled', 'dist_to_shed', 'status', 'error_message' )
+ installation_status = Bunch( NEW='New',
+ CLONING='Cloning',
+ SETTING_TOOL_VERSIONS='Setting tool versions',
+ INSTALLING_REPOSITORY_DEPENDENCIES='Installing repository dependencies',
+ INSTALLING_TOOL_DEPENDENCIES='Installing tool dependencies',
+ LOADING_PROPRIETARY_DATATYPES='Loading proprietary datatypes',
+ INSTALLED='Installed',
+ DEACTIVATED='Deactivated',
+ ERROR='Error',
+ UNINSTALLED='Uninstalled' )
+ states = Bunch( INSTALLING='running',
+ OK='ok',
+ WARNING='queued',
+ ERROR='error',
+ UNINSTALLED='deleted_new' )
+
+ def __init__( self, id=None, create_time=None, tool_shed=None, name=None, description=None, owner=None, installed_changeset_revision=None,
+ changeset_revision=None, ctx_rev=None, metadata=None, includes_datatypes=False, tool_shed_status=None, deleted=False,
+ uninstalled=False, dist_to_shed=False, status=None, error_message=None ):
+ self.id = id
+ self.create_time = create_time
+ self.tool_shed = tool_shed
+ self.name = name
+ self.description = description
+ self.owner = owner
+ self.installed_changeset_revision = installed_changeset_revision
+ self.changeset_revision = changeset_revision
+ self.ctx_rev = ctx_rev
+ self.metadata = metadata
+ self.includes_datatypes = includes_datatypes
+ self.tool_shed_status = tool_shed_status
+ self.deleted = deleted
+ self.uninstalled = uninstalled
+ self.dist_to_shed = dist_to_shed
+ self.status = status
+ self.error_message = error_message
+
+ def as_dict( self, value_mapper=None ):
+ return self.to_dict( view='element', value_mapper=value_mapper )
+
+ @property
+ def can_install( self ):
+ return self.status == self.installation_status.NEW
+
+ @property
+ def can_reset_metadata( self ):
+ return self.status == self.installation_status.INSTALLED
+
+ @property
+ def can_uninstall( self ):
+ return self.status != self.installation_status.UNINSTALLED
+
+ @property
+ def can_deactivate( self ):
+ return self.status not in [ self.installation_status.DEACTIVATED, self.installation_status.UNINSTALLED ]
+
+ @property
+ def can_reinstall_or_activate( self ):
+ return self.deleted
+
+ def get_shed_config_filename( self ):
+ shed_config_filename = None
+ if self.metadata:
+ shed_config_filename = self.metadata.get( 'shed_config_filename', shed_config_filename )
+ return shed_config_filename
+
+ def get_shed_config_dict( self, app, default=None ):
+ """
+ Return the in-memory version of the shed_tool_conf file, which is stored in the config_elems entry
+ in the shed_tool_conf_dict.
+ """
+ if not self.shed_config_filename:
+ self.guess_shed_config( app, default=default )
+ if self.shed_config_filename:
+ for shed_tool_conf_dict in app.toolbox.shed_tool_confs:
+ if self.shed_config_filename == shed_tool_conf_dict[ 'config_filename' ]:
+ return shed_tool_conf_dict
+ return default
+
+ def get_tool_relative_path( self, app ):
+ shed_conf_dict = self.get_shed_config_dict( app )
+ tool_path = None
+ relative_path = None
+ if shed_conf_dict:
+ tool_path = shed_conf_dict[ 'tool_path' ]
+ relative_path = os.path.join( self.tool_shed_path_name, 'repos', self.owner, self.name, self.installed_changeset_revision )
+ return tool_path, relative_path
+
+ def guess_shed_config( self, app, default=None ):
+ tool_ids = []
+ metadata = self.metadata or {}
+ for tool in metadata.get( 'tools', [] ):
+ tool_ids.append( tool.get( 'guid' ) )
+ for shed_tool_conf_dict in app.toolbox.shed_tool_confs:
+ name = shed_tool_conf_dict[ 'config_filename' ]
+ for elem in shed_tool_conf_dict[ 'config_elems' ]:
+ if elem.tag == 'tool':
+ for sub_elem in elem.findall( 'id' ):
+ tool_id = sub_elem.text.strip()
+ if tool_id in tool_ids:
+ self.shed_config_filename = name
+ return shed_tool_conf_dict
+ elif elem.tag == "section":
+ for tool_elem in elem.findall( 'tool' ):
+ for sub_elem in tool_elem.findall( 'id' ):
+ tool_id = sub_elem.text.strip()
+ if tool_id in tool_ids:
+ self.shed_config_filename = name
+ return shed_tool_conf_dict
+ if self.includes_datatypes:
+ #we need to search by filepaths here, which is less desirable
+ tool_shed_url = self.tool_shed
+ if tool_shed_url.find( ':' ) > 0:
+ # Eliminate the port, if any, since it will result in an invalid directory name.
+ tool_shed_url = tool_shed_url.split( ':' )[ 0 ]
+ tool_shed = tool_shed_url.rstrip( '/' )
+ for shed_tool_conf_dict in app.toolbox.shed_tool_confs:
+ tool_path = shed_tool_conf_dict[ 'tool_path' ]
+ relative_path = os.path.join( tool_path, tool_shed, 'repos', self.owner, self.name, self.installed_changeset_revision )
+ if os.path.exists( relative_path ):
+ self.shed_config_filename = shed_tool_conf_dict[ 'config_filename' ]
+ return shed_tool_conf_dict
+ return default
+
+ @property
+ def has_readme_files( self ):
+ if self.metadata:
+ return 'readme_files' in self.metadata
+ return False
+
+ @property
+ def has_repository_dependencies( self ):
+ if self.metadata:
+ repository_dependencies_dict = self.metadata.get( 'repository_dependencies', {} )
+ repository_dependencies = repository_dependencies_dict.get( 'repository_dependencies', [] )
+ # [["http://localhost:9009", "package_libgtextutils_0_6", "test", "e2003cbf18cd", "True", "True"]]
+ for rd_tup in repository_dependencies:
+ tool_shed, name, owner, changeset_revision, prior_installation_required, only_if_compiling_contained_td = \
+ common_util.parse_repository_dependency_tuple( rd_tup )
+ if not asbool( only_if_compiling_contained_td ):
+ return True
+ return False
+
+ @property
+ def has_repository_dependencies_only_if_compiling_contained_td( self ):
+ if self.metadata:
+ repository_dependencies_dict = self.metadata.get( 'repository_dependencies', {} )
+ repository_dependencies = repository_dependencies_dict.get( 'repository_dependencies', [] )
+ # [["http://localhost:9009", "package_libgtextutils_0_6", "test", "e2003cbf18cd", "True", "True"]]
+ for rd_tup in repository_dependencies:
+ tool_shed, name, owner, changeset_revision, prior_installation_required, only_if_compiling_contained_td = \
+ common_util.parse_repository_dependency_tuple( rd_tup )
+ if not asbool( only_if_compiling_contained_td ):
+ return False
+ return True
+ return False
+
+ @property
+ def in_error_state( self ):
+ return self.status == self.installation_status.ERROR
+
+ @property
+ def includes_data_managers( self ):
+ if self.metadata:
+ return bool( len( self.metadata.get( 'data_manager', {} ).get( 'data_managers', {} ) ) )
+ return False
+
+ @property
+ def includes_tools( self ):
+ if self.metadata:
+ return 'tools' in self.metadata
+ return False
+
+ @property
+ def includes_tools_for_display_in_tool_panel( self ):
+ if self.includes_tools:
+ tool_dicts = self.metadata[ 'tools' ]
+ for tool_dict in tool_dicts:
+ if tool_dict.get( 'add_to_tool_panel', True ):
+ return True
+ return False
+
+ @property
+ def includes_tool_dependencies( self ):
+ if self.metadata:
+ return 'tool_dependencies' in self.metadata
+ return False
+
+ @property
+ def includes_workflows( self ):
+ if self.metadata:
+ return 'workflows' in self.metadata
+ return False
+
+ @property
+ def installed_repository_dependencies( self ):
+ """Return the repository's repository dependencies that are currently installed."""
+ installed_required_repositories = []
+ for required_repository in self.repository_dependencies:
+ if required_repository.status == self.installation_status.INSTALLED:
+ installed_required_repositories.append( required_repository )
+ return installed_required_repositories
+
+ @property
+ def installed_tool_dependencies( self ):
+ """Return the repository's tool dependencies that are currently installed."""
+ installed_dependencies = []
+ for tool_dependency in self.tool_dependencies:
+ if tool_dependency.status in [ ToolDependency.installation_status.INSTALLED,
+ ToolDependency.installation_status.ERROR ]:
+ installed_dependencies.append( tool_dependency )
+ return installed_dependencies
+
+ @property
+ def is_deprecated_in_tool_shed( self ):
+ if self.tool_shed_status:
+ return asbool( self.tool_shed_status.get( 'repository_deprecated', False ) )
+ return False
+
+ @property
+ def is_deactivated_or_installed( self ):
+ return self.status in [ self.installation_status.DEACTIVATED,
+ self.installation_status.INSTALLED ]
+
+ @property
+ def is_latest_installable_revision( self ):
+ if self.tool_shed_status:
+ return asbool( self.tool_shed_status.get( 'latest_installable_revision', False ) )
+ return False
+
+ @property
+ def missing_repository_dependencies( self ):
+ """Return the repository's repository dependencies that are not currently installed, and may not ever have been installed."""
+ missing_required_repositories = []
+ for required_repository in self.repository_dependencies:
+ if required_repository.status not in [ self.installation_status.INSTALLED ]:
+ missing_required_repositories.append( required_repository )
+ return missing_required_repositories
+
+ @property
+ def missing_tool_dependencies( self ):
+ """Return the repository's tool dependencies that are not currently installed, and may not ever have been installed."""
+ missing_dependencies = []
+ for tool_dependency in self.tool_dependencies:
+ if tool_dependency.status not in [ ToolDependency.installation_status.INSTALLED ]:
+ missing_dependencies.append( tool_dependency )
+ return missing_dependencies
+
+ def repo_files_directory( self, app ):
+ repo_path = self.repo_path( app )
+ if repo_path:
+ return os.path.join( repo_path, self.name )
+ return None
+
+ def repo_path( self, app ):
+ tool_shed_url = self.tool_shed
+ if tool_shed_url.find( ':' ) > 0:
+ # Eliminate the port, if any, since it will result in an invalid directory name.
+ tool_shed_url = tool_shed_url.split( ':' )[ 0 ]
+ tool_shed = tool_shed_url.rstrip( '/' )
+ for index, shed_tool_conf_dict in enumerate( app.toolbox.shed_tool_confs ):
+ tool_path = shed_tool_conf_dict[ 'tool_path' ]
+ relative_path = os.path.join( tool_path, tool_shed, 'repos', self.owner, self.name, self.installed_changeset_revision )
+ if os.path.exists( relative_path ):
+ return relative_path
+ return None
+
+ @property
+ def repository_dependencies( self ):
+ """
+ Return all of this repository's repository dependencies, ignoring their attributes like prior_installation_required and
+ only_if_compiling_contained_td.
+ """
+ required_repositories = []
+ for rrda in self.required_repositories:
+ repository_dependency = rrda.repository_dependency
+ required_repository = repository_dependency.repository
+ if required_repository:
+ required_repositories.append( required_repository )
+ return required_repositories
+
+ @property
+ def repository_dependencies_being_installed( self ):
+ """Return the repository's repository dependencies that are currently being installed."""
+ required_repositories_being_installed = []
+ for required_repository in self.repository_dependencies:
+ if required_repository.status in [ self.installation_status.CLONING,
+ self.installation_status.INSTALLING_REPOSITORY_DEPENDENCIES,
+ self.installation_status.INSTALLING_TOOL_DEPENDENCIES,
+ self.installation_status.LOADING_PROPRIETARY_DATATYPES,
+ self.installation_status.SETTING_TOOL_VERSIONS ]:
+ required_repositories_being_installed.append( required_repository )
+ return required_repositories_being_installed
+
+ @property
+ def repository_dependencies_missing_or_being_installed( self ):
+ """Return the repository's repository dependencies that are either missing or currently being installed."""
+ required_repositories_missing_or_being_installed = []
+ for required_repository in self.repository_dependencies:
+ if required_repository.status in [ self.installation_status.ERROR,
+ self.installation_status.INSTALLING,
+ self.installation_status.NEVER_INSTALLED,
+ self.installation_status.UNINSTALLED ]:
+ required_repositories_missing_or_being_installed.append( required_repository )
+ return required_repositories_missing_or_being_installed
+
+ @property
+ def repository_dependencies_with_installation_errors( self ):
+ """Return the repository's repository dependencies that have installation errors."""
+ required_repositories_with_installation_errors = []
+ for required_repository in self.repository_dependencies:
+ if required_repository.status == self.installation_status.ERROR:
+ required_repositories_with_installation_errors.append( required_repository )
+ return required_repositories_with_installation_errors
+
+ @property
+ def requires_prior_installation_of( self ):
+ """
+ Return a list of repository dependency tuples like (tool_shed, name, owner, changeset_revision, prior_installation_required) for this
+ repository's repository dependencies where prior_installation_required is True. By definition, repository dependencies are required to
+ be installed in order for this repository to function correctly. However, those repository dependencies that are defined for this
+ repository with prior_installation_required set to True place them in a special category in that the required repositories must be
+ installed before this repository is installed. Among other things, this enables these "special" repository dependencies to include
+ information that enables the successful intallation of this repository. This method is not used during the initial installation of
+ this repository, but only after it has been installed (metadata must be set for this repository in order for this method to be useful).
+ """
+ required_rd_tups_that_must_be_installed = []
+ if self.has_repository_dependencies:
+ rd_tups = self.metadata[ 'repository_dependencies' ][ 'repository_dependencies' ]
+ for rd_tup in rd_tups:
+ if len( rd_tup ) == 5:
+ tool_shed, name, owner, changeset_revision, prior_installation_required = rd_tup
+ if asbool( prior_installation_required ):
+ required_rd_tups_that_must_be_installed.append( ( tool_shed, name, owner, changeset_revision, 'True', 'False' ) )
+ elif len( rd_tup ) == 6:
+ tool_shed, name, owner, changeset_revision, prior_installation_required, only_if_compiling_contained_td = rd_tup
+ # The repository dependency will only be required to be previsously installed if it does not fall into the category of
+ # a repository that must be installed only so that it's contained tool dependency can be used for compiling the tool
+ # dependency of the dependent repository.
+ if not asbool( only_if_compiling_contained_td ):
+ if asbool( prior_installation_required ):
+ required_rd_tups_that_must_be_installed.append( ( tool_shed, name, owner, changeset_revision, 'True', 'False' ) )
+ return required_rd_tups_that_must_be_installed
+
+ @property
+ def revision_update_available( self ):
+ # This method should be named update_available, but since it is no longer possible to drop a table column using migration scripts
+ # with the sqlite database (see ~/galaxy/model/migrate/versions/0016_drop_update_available_col_add_tool_shed_status_col.py), we
+ # have to name it in such a way that it will not conflict with the eliminated tool_shed_repository.update_available column (which
+ # cannot be eliminated if using the sqlite database).
+ if self.tool_shed_status:
+ return asbool( self.tool_shed_status.get( 'revision_update', False ) )
+ return False
+
+ def set_shed_config_filename( self, value ):
+ self.metadata[ 'shed_config_filename' ] = value
+
+ shed_config_filename = property( get_shed_config_filename, set_shed_config_filename )
+
+ def to_dict( self, view='collection', value_mapper=None ):
+ if value_mapper is None:
+ value_mapper = {}
+ rval = {}
+ try:
+ visible_keys = self.__getattribute__( 'dict_' + view + '_visible_keys' )
+ except AttributeError:
+ raise Exception( 'Unknown API view: %s' % view )
+ for key in visible_keys:
+ try:
+ rval[ key ] = self.__getattribute__( key )
+ if key in value_mapper:
+ rval[ key ] = value_mapper.get( key, rval[ key ] )
+ except AttributeError:
+ rval[ key ] = None
+ return rval
+
+ @property
+ def tool_dependencies_being_installed( self ):
+ dependencies_being_installed = []
+ for tool_dependency in self.tool_dependencies:
+ if tool_dependency.status == ToolDependency.installation_status.INSTALLING:
+ dependencies_being_installed.append( tool_dependency )
+ return dependencies_being_installed
+
+ @property
+ def tool_dependencies_missing_or_being_installed( self ):
+ dependencies_missing_or_being_installed = []
+ for tool_dependency in self.tool_dependencies:
+ if tool_dependency.status in [ ToolDependency.installation_status.ERROR,
+ ToolDependency.installation_status.INSTALLING,
+ ToolDependency.installation_status.NEVER_INSTALLED,
+ ToolDependency.installation_status.UNINSTALLED ]:
+ dependencies_missing_or_being_installed.append( tool_dependency )
+ return dependencies_missing_or_being_installed
+
+ @property
+ def tool_dependencies_with_installation_errors( self ):
+ dependencies_with_installation_errors = []
+ for tool_dependency in self.tool_dependencies:
+ if tool_dependency.status == ToolDependency.installation_status.ERROR:
+ dependencies_with_installation_errors.append( tool_dependency )
+ return dependencies_with_installation_errors
+
+ @property
+ def tool_shed_path_name( self ):
+ tool_shed_url = self.tool_shed
+ if tool_shed_url.find( ':' ) > 0:
+ # Eliminate the port, if any, since it will result in an invalid directory name.
+ tool_shed_url = tool_shed_url.split( ':' )[ 0 ]
+ return tool_shed_url.rstrip( '/' )
+
+ @property
+ def tuples_of_repository_dependencies_needed_for_compiling_td( self ):
+ """
+ Return tuples defining this repository's repository dependencies that are necessary only for compiling this repository's tool
+ dependencies.
+ """
+ rd_tups_of_repositories_needed_for_compiling_td = []
+ if self.metadata:
+ repository_dependencies = self.metadata.get( 'repository_dependencies', None )
+ rd_tups = repository_dependencies[ 'repository_dependencies' ]
+ for rd_tup in rd_tups:
+ if len( rd_tup ) == 6:
+ tool_shed, name, owner, changeset_revision, prior_installation_required, only_if_compiling_contained_td = rd_tup
+ if asbool( only_if_compiling_contained_td ):
+ rd_tups_of_repositories_needed_for_compiling_td.append( ( tool_shed, name, owner, changeset_revision, 'False', 'True' ) )
+ return rd_tups_of_repositories_needed_for_compiling_td
+
+ @property
+ def uninstalled_repository_dependencies( self ):
+ """Return the repository's repository dependencies that have been uninstalled."""
+ uninstalled_required_repositories = []
+ for required_repository in self.repository_dependencies:
+ if required_repository.status == self.installation_status.UNINSTALLED:
+ uninstalled_required_repositories.append( required_repository )
+ return uninstalled_required_repositories
+
+ @property
+ def uninstalled_tool_dependencies( self ):
+ """Return the repository's tool dependencies that have been uninstalled."""
+ uninstalled_tool_dependencies = []
+ for tool_dependency in self.tool_dependencies:
+ if tool_dependency.status == ToolDependency.installation_status.UNINSTALLED:
+ uninstalled_tool_dependencies.append( tool_dependency )
+ return uninstalled_tool_dependencies
+
+ @property
+ def upgrade_available( self ):
+ if self.tool_shed_status:
+ if self.is_deprecated_in_tool_shed:
+ # Only allow revision upgrades if the repository is not deprecated in the tool shed.
+ return False
+ return asbool( self.tool_shed_status.get( 'revision_upgrade', False ) )
+ return False
+
+
+class RepositoryRepositoryDependencyAssociation( object ):
+ def __init__( self, tool_shed_repository_id=None, repository_dependency_id=None ):
+ self.tool_shed_repository_id = tool_shed_repository_id
+ self.repository_dependency_id = repository_dependency_id
+
+
+class RepositoryDependency( object ):
+ def __init__( self, tool_shed_repository_id=None ):
+ self.tool_shed_repository_id = tool_shed_repository_id
+
+
+class ToolDependency( object ):
+ installation_status = Bunch( NEVER_INSTALLED='Never installed',
+ INSTALLING='Installing',
+ INSTALLED='Installed',
+ ERROR='Error',
+ UNINSTALLED='Uninstalled' )
+
+ states = Bunch( INSTALLING='running',
+ OK='ok',
+ WARNING='queued',
+ ERROR='error',
+ UNINSTALLED='deleted_new' )
+
+ def __init__( self, tool_shed_repository_id=None, name=None, version=None, type=None, status=None, error_message=None ):
+ self.tool_shed_repository_id = tool_shed_repository_id
+ self.name = name
+ self.version = version
+ self.type = type
+ self.status = status
+ self.error_message = error_message
+
+ @property
+ def can_install( self ):
+ return self.status in [ self.installation_status.NEVER_INSTALLED, self.installation_status.UNINSTALLED ]
+
+ @property
+ def can_uninstall( self ):
+ return self.status in [ self.installation_status.ERROR, self.installation_status.INSTALLED ]
+
+ @property
+ def can_update( self ):
+ return self.status in [ self.installation_status.NEVER_INSTALLED,
+ self.installation_status.INSTALLED,
+ self.installation_status.ERROR,
+ self.installation_status.UNINSTALLED ]
+
+ @property
+ def in_error_state( self ):
+ return self.status == self.installation_status.ERROR
+
+ def installation_directory( self, app ):
+ if self.type == 'package':
+ return os.path.join( app.config.tool_dependency_dir,
+ self.name,
+ self.version,
+ self.tool_shed_repository.owner,
+ self.tool_shed_repository.name,
+ self.tool_shed_repository.installed_changeset_revision )
+ if self.type == 'set_environment':
+ return os.path.join( app.config.tool_dependency_dir,
+ 'environment_settings',
+ self.name,
+ self.tool_shed_repository.owner,
+ self.tool_shed_repository.name,
+ self.tool_shed_repository.installed_changeset_revision )
+
+
+class ToolVersion( object, Dictifiable ):
+ dict_element_visible_keys = ( 'id', 'tool_shed_repository' )
+
+ def __init__( self, id=None, create_time=None, tool_id=None, tool_shed_repository=None ):
+ self.id = id
+ self.create_time = create_time
+ self.tool_id = tool_id
+ self.tool_shed_repository = tool_shed_repository
+
+ def get_previous_version( self, app ):
+ sa_session = app.model.context.current
+ tva = sa_session.query( app.model.ToolVersionAssociation ) \
+ .filter( app.model.ToolVersionAssociation.table.c.tool_id == self.id ) \
+ .first()
+ if tva:
+ return sa_session.query( app.model.ToolVersion ) \
+ .filter( app.model.ToolVersion.table.c.id == tva.parent_id ) \
+ .first()
+ return None
+
+ def get_next_version( self, app ):
+ sa_session = app.model.context.current
+ tva = sa_session.query( app.model.ToolVersionAssociation ) \
+ .filter( app.model.ToolVersionAssociation.table.c.parent_id == self.id ) \
+ .first()
+ if tva:
+ return sa_session.query( app.model.ToolVersion ) \
+ .filter( app.model.ToolVersion.table.c.id == tva.tool_id ) \
+ .first()
+ return None
+
+ def get_versions( self, app ):
+ tool_versions = []
+ # Prepend ancestors.
+
+ def __ancestors( app, tool_version ):
+ # Should we handle multiple parents at each level?
+ previous_version = tool_version.get_previous_version( app )
+ if previous_version:
+ if previous_version not in tool_versions:
+ tool_versions.insert( 0, previous_version )
+ __ancestors( app, previous_version )
+
+ # Append descendants.
+ def __descendants( app, tool_version ):
+ # Should we handle multiple child siblings at each level?
+ next_version = tool_version.get_next_version( app )
+ if next_version:
+ if next_version not in tool_versions:
+ tool_versions.append( next_version )
+ __descendants( app, next_version )
+ __ancestors( app, self )
+ if self not in tool_versions:
+ tool_versions.append( self )
+ __descendants( app, self )
+ return tool_versions
+
+ def get_version_ids( self, app, reverse=False ):
+ if reverse:
+ version_ids = []
+ for tool_version in self.get_versions( app ):
+ version_ids.insert( 0, tool_version.tool_id )
+ return version_ids
+ return [ tool_version.tool_id for tool_version in self.get_versions( app ) ]
+
+ def to_dict( self, view='element' ):
+ rval = super( ToolVersion, self ).to_dict( view=view )
+ rval['tool_name'] = self.tool_id
+ for a in self.parent_tool_association:
+ rval['parent_tool_id'] = a.parent_id
+ for a in self.child_tool_association:
+ rval['child_tool_id'] = a.tool_id
+ return rval
+
+
+class ToolVersionAssociation( object ):
+ def __init__( self, id=None, tool_id=None, parent_id=None ):
+ self.id = id
+ self.tool_id = tool_id
+ self.parent_id = parent_id
+
+
+class MigrateTools( object ):
+ def __init__( self, repository_id=None, repository_path=None, version=None ):
+ self.repository_id = repository_id
+ self.repository_path = repository_path
+ self.version = version
diff -r 23577dc89ace1f775003153f87a4941feeeda8f4 -r 920cbad98f9f25b7b7612d64639e901680233715 lib/galaxy/model/tool_shed_install/mapping.py
--- /dev/null
+++ b/lib/galaxy/model/tool_shed_install/mapping.py
@@ -0,0 +1,105 @@
+#from galaxy.model import tool_shed_models as install_model
+from galaxy import model as install_model
+#from sqlalchemy import MetaData
+from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, String, Table, TEXT
+from sqlalchemy.orm import relation, mapper
+from galaxy.model.custom_types import JSONType, TrimmedString
+from galaxy.model.orm.now import now
+from galaxy.model.mapping import metadata
+
+
+install_model.ToolShedRepository.table = Table( "tool_shed_repository", metadata,
+ Column( "id", Integer, primary_key=True ),
+ Column( "create_time", DateTime, default=now ),
+ Column( "update_time", DateTime, default=now, onupdate=now ),
+ Column( "tool_shed", TrimmedString( 255 ), index=True ),
+ Column( "name", TrimmedString( 255 ), index=True ),
+ Column( "description" , TEXT ),
+ Column( "owner", TrimmedString( 255 ), index=True ),
+ Column( "installed_changeset_revision", TrimmedString( 255 ) ),
+ Column( "changeset_revision", TrimmedString( 255 ), index=True ),
+ Column( "ctx_rev", TrimmedString( 10 ) ),
+ Column( "metadata", JSONType, nullable=True ),
+ Column( "includes_datatypes", Boolean, index=True, default=False ),
+ Column( "tool_shed_status", JSONType, nullable=True ),
+ Column( "deleted", Boolean, index=True, default=False ),
+ Column( "uninstalled", Boolean, default=False ),
+ Column( "dist_to_shed", Boolean, default=False ),
+ Column( "status", TrimmedString( 255 ) ),
+ Column( "error_message", TEXT ) )
+
+install_model.RepositoryRepositoryDependencyAssociation.table = Table( 'repository_repository_dependency_association', metadata,
+ Column( "id", Integer, primary_key=True ),
+ Column( "create_time", DateTime, default=now ),
+ Column( "update_time", DateTime, default=now, onupdate=now ),
+ Column( "tool_shed_repository_id", Integer, ForeignKey( "tool_shed_repository.id" ), index=True ),
+ Column( "repository_dependency_id", Integer, ForeignKey( "repository_dependency.id" ), index=True ) )
+
+install_model.RepositoryDependency.table = Table( "repository_dependency", metadata,
+ Column( "id", Integer, primary_key=True ),
+ Column( "create_time", DateTime, default=now ),
+ Column( "update_time", DateTime, default=now, onupdate=now ),
+ Column( "tool_shed_repository_id", Integer, ForeignKey( "tool_shed_repository.id" ), index=True, nullable=False ) )
+
+install_model.ToolDependency.table = Table( "tool_dependency", metadata,
+ Column( "id", Integer, primary_key=True ),
+ Column( "create_time", DateTime, default=now ),
+ Column( "update_time", DateTime, default=now, onupdate=now ),
+ Column( "tool_shed_repository_id", Integer, ForeignKey( "tool_shed_repository.id" ), index=True, nullable=False ),
+ Column( "name", TrimmedString( 255 ) ),
+ Column( "version", TEXT ),
+ Column( "type", TrimmedString( 40 ) ),
+ Column( "status", TrimmedString( 255 ), nullable=False ),
+ Column( "error_message", TEXT ) )
+
+install_model.ToolVersion.table = Table( "tool_version", metadata,
+ Column( "id", Integer, primary_key=True ),
+ Column( "create_time", DateTime, default=now ),
+ Column( "update_time", DateTime, default=now, onupdate=now ),
+ Column( "tool_id", String( 255 ) ),
+ Column( "tool_shed_repository_id", Integer, ForeignKey( "tool_shed_repository.id" ), index=True, nullable=True ) )
+
+install_model.ToolVersionAssociation.table = Table( "tool_version_association", metadata,
+ Column( "id", Integer, primary_key=True ),
+ Column( "tool_id", Integer, ForeignKey( "tool_version.id" ), index=True, nullable=False ),
+ Column( "parent_id", Integer, ForeignKey( "tool_version.id" ), index=True, nullable=False ) )
+
+install_model.MigrateTools.table = Table( "migrate_tools", metadata,
+ Column( "repository_id", TrimmedString( 255 ) ),
+ Column( "repository_path", TEXT ),
+ Column( "version", Integer ) )
+
+
+mapper( install_model.ToolShedRepository, install_model.ToolShedRepository.table,
+ properties=dict( tool_versions=relation( install_model.ToolVersion,
+ primaryjoin=( install_model.ToolShedRepository.table.c.id == install_model.ToolVersion.table.c.tool_shed_repository_id ),
+ backref='tool_shed_repository' ),
+ tool_dependencies=relation( install_model.ToolDependency,
+ primaryjoin=( install_model.ToolShedRepository.table.c.id == install_model.ToolDependency.table.c.tool_shed_repository_id ),
+ order_by=install_model.ToolDependency.table.c.name,
+ backref='tool_shed_repository' ),
+ required_repositories=relation( install_model.RepositoryRepositoryDependencyAssociation,
+ primaryjoin=( install_model.ToolShedRepository.table.c.id == install_model.RepositoryRepositoryDependencyAssociation.table.c.tool_shed_repository_id ) ) ) )
+
+mapper( install_model.RepositoryRepositoryDependencyAssociation, install_model.RepositoryRepositoryDependencyAssociation.table,
+ properties=dict( repository=relation( install_model.ToolShedRepository,
+ primaryjoin=( install_model.RepositoryRepositoryDependencyAssociation.table.c.tool_shed_repository_id == install_model.ToolShedRepository.table.c.id ) ),
+ repository_dependency=relation( install_model.RepositoryDependency,
+ primaryjoin=( install_model.RepositoryRepositoryDependencyAssociation.table.c.repository_dependency_id == install_model.RepositoryDependency.table.c.id ) ) ) )
+
+mapper( install_model.RepositoryDependency, install_model.RepositoryDependency.table,
+ properties=dict( repository=relation( install_model.ToolShedRepository,
+ primaryjoin=( install_model.RepositoryDependency.table.c.tool_shed_repository_id == install_model.ToolShedRepository.table.c.id ) ) ) )
+
+mapper( install_model.ToolDependency, install_model.ToolDependency.table )
+
+mapper( install_model.ToolVersion, install_model.ToolVersion.table,
+ properties=dict(
+ parent_tool_association=relation( install_model.ToolVersionAssociation,
+ primaryjoin=( install_model.ToolVersion.table.c.id == install_model.ToolVersionAssociation.table.c.tool_id ) ),
+ child_tool_association=relation( install_model.ToolVersionAssociation,
+ primaryjoin=( install_model.ToolVersion.table.c.id == install_model.ToolVersionAssociation.table.c.parent_id ) )
+ )
+ )
+
+mapper( install_model.ToolVersionAssociation, install_model.ToolVersionAssociation.table )
diff -r 23577dc89ace1f775003153f87a4941feeeda8f4 -r 920cbad98f9f25b7b7612d64639e901680233715 lib/galaxy/webapps/galaxy/controllers/admin.py
--- a/lib/galaxy/webapps/galaxy/controllers/admin.py
+++ b/lib/galaxy/webapps/galaxy/controllers/admin.py
@@ -4,6 +4,7 @@
import galaxy.util
from galaxy import model
+from galaxy.model import tool_shed_install as install_model
from galaxy import web
from galaxy.actions.admin import AdminActions
from galaxy.exceptions import MessageException
@@ -406,7 +407,7 @@
return tool_ids_str
# Grid definition
title = "Tool versions"
- model_class = model.ToolVersion
+ model_class = install_model.ToolVersion
template='/admin/tool_version/grid.mako'
default_sort_key = "tool_id"
columns = [
https://bitbucket.org/galaxy/galaxy-central/commits/c1603460642a/
Changeset: c1603460642a
User: jmchilton
Date: 2013-12-05 07:20:54
Summary: Models: Update references galaxy.model.ToolShedRepository.
Replace references to galaxy.model.Tool{ShedRepository,Dependency} with new galaxy.model.tool_shed_install path.
Affected #: 5 files
diff -r 920cbad98f9f25b7b7612d64639e901680233715 -r c1603460642a33821ed17b6a3d53812a116c927e lib/tool_shed/galaxy_install/grids/admin_toolshed_grids.py
--- a/lib/tool_shed/galaxy_install/grids/admin_toolshed_grids.py
+++ b/lib/tool_shed/galaxy_install/grids/admin_toolshed_grids.py
@@ -1,6 +1,7 @@
import logging
-from galaxy import model, util
+from galaxy import util
+from galaxy.model import tool_shed_install
from galaxy.web.framework.helpers import iff, grids
from galaxy.web import url_for
from galaxy.model.orm import or_
@@ -123,7 +124,7 @@
return accepted_filters
# Grid definition
title = "Installed tool shed repositories"
- model_class = model.ToolShedRepository
+ model_class = tool_shed_install.ToolShedRepository
template='/admin/tool_shed_repository/grid.mako'
default_sort_key = "name"
columns = [
@@ -131,7 +132,7 @@
attach_popup=False ),
NameColumn( "Name",
key="name",
- link=( lambda item: iff( item.status in [ model.ToolShedRepository.installation_status.CLONING ],
+ link=( lambda item: iff( item.status in [ tool_shed_install.ToolShedRepository.installation_status.CLONING ],
None,
dict( operation="manage_repository", id=item.id ) ) ),
attach_popup=True ),
@@ -164,23 +165,23 @@
grids.GridOperation( "Get updates",
allow_multiple=False,
condition=( lambda item: not item.deleted and item.revision_update_available and item.status not in \
- [ model.ToolShedRepository.installation_status.ERROR, model.ToolShedRepository.installation_status.NEW ] ),
+ [ tool_shed_install.ToolShedRepository.installation_status.ERROR, tool_shed_install.ToolShedRepository.installation_status.NEW ] ),
async_compatible=False,
url_args=dict( controller='admin_toolshed', action='browse_repositories', operation='get updates' ) ),
grids.GridOperation( "Install",
allow_multiple=False,
- condition=( lambda item: not item.deleted and item.status == model.ToolShedRepository.installation_status.NEW ),
+ condition=( lambda item: not item.deleted and item.status == tool_shed_install.ToolShedRepository.installation_status.NEW ),
async_compatible=False,
url_args=dict( controller='admin_toolshed', action='manage_repository', operation='install' ) ),
grids.GridOperation( "Deactivate or uninstall",
allow_multiple=False,
condition=( lambda item: not item.deleted and item.status not in \
- [ model.ToolShedRepository.installation_status.ERROR, model.ToolShedRepository.installation_status.NEW ] ),
+ [ tool_shed_install.ToolShedRepository.installation_status.ERROR, tool_shed_install.ToolShedRepository.installation_status.NEW ] ),
async_compatible=False,
url_args=dict( controller='admin_toolshed', action='browse_repositories', operation='deactivate or uninstall' ) ),
grids.GridOperation( "Reset to install",
allow_multiple=False,
- condition=( lambda item: ( item.status == model.ToolShedRepository.installation_status.ERROR ) ),
+ condition=( lambda item: ( item.status == tool_shed_install.ToolShedRepository.installation_status.ERROR ) ),
async_compatible=False,
url_args=dict( controller='admin_toolshed', action='browse_repositories', operation='reset to install' ) ),
grids.GridOperation( "Activate or reinstall",
@@ -274,7 +275,7 @@
title = "Monitor installing tool shed repositories"
template = "admin/tool_shed_repository/repository_installation_grid.mako"
- model_class = model.ToolShedRepository
+ model_class = tool_shed_install.ToolShedRepository
default_sort_key = "-create_time"
num_rows_per_page = 50
preserve_state = True
@@ -282,13 +283,13 @@
columns = [
NameColumn( "Name",
link=( lambda item: iff( item.status in \
- [ model.ToolShedRepository.installation_status.NEW,
- model.ToolShedRepository.installation_status.CLONING,
- model.ToolShedRepository.installation_status.SETTING_TOOL_VERSIONS,
- model.ToolShedRepository.installation_status.INSTALLING_REPOSITORY_DEPENDENCIES,
- model.ToolShedRepository.installation_status.INSTALLING_TOOL_DEPENDENCIES,
- model.ToolShedRepository.installation_status.LOADING_PROPRIETARY_DATATYPES,
- model.ToolShedRepository.installation_status.UNINSTALLED ], \
+ [ tool_shed_install.ToolShedRepository.installation_status.NEW,
+ tool_shed_install.ToolShedRepository.installation_status.CLONING,
+ tool_shed_install.ToolShedRepository.installation_status.SETTING_TOOL_VERSIONS,
+ tool_shed_install.ToolShedRepository.installation_status.INSTALLING_REPOSITORY_DEPENDENCIES,
+ tool_shed_install.ToolShedRepository.installation_status.INSTALLING_TOOL_DEPENDENCIES,
+ tool_shed_install.ToolShedRepository.installation_status.LOADING_PROPRIETARY_DATATYPES,
+ tool_shed_install.ToolShedRepository.installation_status.UNINSTALLED ], \
None, dict( action="manage_repository", id=item.id ) ) ),
filterable="advanced" ),
DescriptionColumn( "Description",
@@ -372,16 +373,16 @@
title = "Tool Dependencies"
template = "admin/tool_shed_repository/tool_dependencies_grid.mako"
- model_class = model.ToolDependency
+ model_class = tool_shed_install.ToolDependency
default_sort_key = "-create_time"
num_rows_per_page = 50
preserve_state = True
use_paging = False
columns = [
NameColumn( "Name",
- link=( lambda item: iff( item.status in [ model.ToolDependency.installation_status.NEVER_INSTALLED,
- model.ToolDependency.installation_status.INSTALLING,
- model.ToolDependency.installation_status.UNINSTALLED ],
+ link=( lambda item: iff( item.status in [ tool_shed_install.ToolDependency.installation_status.NEVER_INSTALLED,
+ tool_shed_install.ToolDependency.installation_status.INSTALLING,
+ tool_shed_install.ToolDependency.installation_status.UNINSTALLED ],
None,
dict( action="manage_tool_dependencies", operation='browse', id=item.id ) ) ),
filterable="advanced" ),
diff -r 920cbad98f9f25b7b7612d64639e901680233715 -r c1603460642a33821ed17b6a3d53812a116c927e test/install_and_test_tool_shed_repositories/base/test_db_util.py
--- a/test/install_and_test_tool_shed_repositories/base/test_db_util.py
+++ b/test/install_and_test_tool_shed_repositories/base/test_db_util.py
@@ -1,5 +1,6 @@
import logging
import galaxy.model as model
+import galaxy.model.tool_shed_install as install_model
from galaxy.model.orm import and_
from functional.database_contexts import galaxy_context as sa_session
@@ -19,15 +20,15 @@
sa_session.flush()
def get_repository( repository_id ):
- return sa_session.query( model.ToolShedRepository ) \
- .filter( model.ToolShedRepository.table.c.id == repository_id ) \
+ return sa_session.query( install_model.ToolShedRepository ) \
+ .filter( install_model.ToolShedRepository.table.c.id == repository_id ) \
.first()
def get_installed_repository_by_name_owner_changeset_revision( name, owner, changeset_revision ):
- return sa_session.query( model.ToolShedRepository ) \
- .filter( and_( model.ToolShedRepository.table.c.name == name,
- model.ToolShedRepository.table.c.owner == owner,
- model.ToolShedRepository.table.c.installed_changeset_revision == changeset_revision ) ) \
+ return sa_session.query( install_model.ToolShedRepository ) \
+ .filter( and_( install_model.ToolShedRepository.table.c.name == name,
+ install_model.ToolShedRepository.table.c.owner == owner,
+ install_model.ToolShedRepository.table.c.installed_changeset_revision == changeset_revision ) ) \
.one()
@@ -39,18 +40,18 @@
def get_tool_dependencies_for_installed_repository( repository_id, status=None, exclude_status=None ):
if status is not None:
- return sa_session.query( model.ToolDependency ) \
- .filter( and_( model.ToolDependency.table.c.tool_shed_repository_id == repository_id,
- model.ToolDependency.table.c.status == status ) ) \
+ return sa_session.query( install_model.ToolDependency ) \
+ .filter( and_( install_model.ToolDependency.table.c.tool_shed_repository_id == repository_id,
+ install_model.ToolDependency.table.c.status == status ) ) \
.all()
elif exclude_status is not None:
- return sa_session.query( model.ToolDependency ) \
- .filter( and_( model.ToolDependency.table.c.tool_shed_repository_id == repository_id,
- model.ToolDependency.table.c.status != exclude_status ) ) \
+ return sa_session.query( install_model.ToolDependency ) \
+ .filter( and_( install_model.ToolDependency.table.c.tool_shed_repository_id == repository_id,
+ install_model.ToolDependency.table.c.status != exclude_status ) ) \
.all()
else:
- return sa_session.query( model.ToolDependency ) \
- .filter( model.ToolDependency.table.c.tool_shed_repository_id == repository_id ) \
+ return sa_session.query( install_model.ToolDependency ) \
+ .filter( install_model.ToolDependency.table.c.tool_shed_repository_id == repository_id ) \
.all()
def mark_obj_deleted( obj ):
diff -r 920cbad98f9f25b7b7612d64639e901680233715 -r c1603460642a33821ed17b6a3d53812a116c927e test/install_and_test_tool_shed_repositories/base/twilltestcase.py
--- a/test/install_and_test_tool_shed_repositories/base/twilltestcase.py
+++ b/test/install_and_test_tool_shed_repositories/base/twilltestcase.py
@@ -1,4 +1,5 @@
import galaxy.model as model
+import galaxy.model.tool_shed_install as install_model
import common, string, os, re, test_db_util, simplejson, logging, time, sys
import galaxy.util as util
from base.twilltestcase import tc, from_json_string, TwillTestCase, security, urllib
@@ -137,8 +138,8 @@
return new_url
def wait_for_repository_installation( self, repository_ids ):
- final_states = [ model.ToolShedRepository.installation_status.ERROR,
- model.ToolShedRepository.installation_status.INSTALLED ]
+ final_states = [ install_model.ToolShedRepository.installation_status.ERROR,
+ install_model.ToolShedRepository.installation_status.INSTALLED ]
# Wait until all repositories are in a final state before returning. This ensures that subsequent tests
# are running against an installed repository, and not one that is still in the process of installing.
if repository_ids:
diff -r 920cbad98f9f25b7b7612d64639e901680233715 -r c1603460642a33821ed17b6a3d53812a116c927e test/tool_shed/base/test_db_util.py
--- a/test/tool_shed/base/test_db_util.py
+++ b/test/tool_shed/base/test_db_util.py
@@ -1,4 +1,5 @@
import galaxy.model, logging
+import galaxy.model.tool_shed_install
import galaxy.webapps.tool_shed.model as model
from galaxy.model.orm import *
@@ -26,13 +27,13 @@
def get_all_installed_repositories( actually_installed=False ):
if actually_installed:
- return ga_session.query( galaxy.model.ToolShedRepository ) \
- .filter( and_( galaxy.model.ToolShedRepository.table.c.deleted == False,
- galaxy.model.ToolShedRepository.table.c.uninstalled == False,
- galaxy.model.ToolShedRepository.table.c.status == galaxy.model.ToolShedRepository.installation_status.INSTALLED ) ) \
+ return ga_session.query( galaxy.model.tool_shed_install.ToolShedRepository ) \
+ .filter( and_( galaxy.model.tool_shed_install.ToolShedRepository.table.c.deleted == False,
+ galaxy.model.tool_shed_install.ToolShedRepository.table.c.uninstalled == False,
+ galaxy.model.tool_shed_install.ToolShedRepository.table.c.status == galaxy.model.tool_shed_install.ToolShedRepository.installation_status.INSTALLED ) ) \
.all()
else:
- return ga_session.query( galaxy.model.ToolShedRepository ).all()
+ return ga_session.query( galaxy.model.tool_shed_install.ToolShedRepository ).all()
def get_category_by_name( name ):
return sa_session.query( model.Category ) \
@@ -50,21 +51,21 @@
.all()
def get_galaxy_repository_by_name_owner_changeset_revision( repository_name, owner, changeset_revision ):
- return ga_session.query( galaxy.model.ToolShedRepository ) \
- .filter( and_( galaxy.model.ToolShedRepository.table.c.name == repository_name,
- galaxy.model.ToolShedRepository.table.c.owner == owner,
- galaxy.model.ToolShedRepository.table.c.changeset_revision == changeset_revision ) ) \
+ return ga_session.query( galaxy.model.tool_shed_install.ToolShedRepository ) \
+ .filter( and_( galaxy.model.tool_shed_install.ToolShedRepository.table.c.name == repository_name,
+ galaxy.model.tool_shed_install.ToolShedRepository.table.c.owner == owner,
+ galaxy.model.tool_shed_install.ToolShedRepository.table.c.changeset_revision == changeset_revision ) ) \
.first()
def get_installed_repository_by_id( repository_id ):
- return ga_session.query( galaxy.model.ToolShedRepository ) \
- .filter( galaxy.model.ToolShedRepository.table.c.id == repository_id ) \
+ return ga_session.query( galaxy.model.tool_shed_install.ToolShedRepository ) \
+ .filter( galaxy.model.tool_shed_install.ToolShedRepository.table.c.id == repository_id ) \
.first()
def get_installed_repository_by_name_owner( repository_name, owner ):
- return ga_session.query( galaxy.model.ToolShedRepository ) \
- .filter( and_( galaxy.model.ToolShedRepository.table.c.name == repository_name,
- galaxy.model.ToolShedRepository.table.c.owner == owner ) ) \
+ return ga_session.query( galaxy.model.tool_shed_install.ToolShedRepository ) \
+ .filter( and_( galaxy.model.tool_shed_install.ToolShedRepository.table.c.name == repository_name,
+ galaxy.model.tool_shed_install.ToolShedRepository.table.c.owner == owner ) ) \
.first()
def get_private_role( user ):
diff -r 920cbad98f9f25b7b7612d64639e901680233715 -r c1603460642a33821ed17b6a3d53812a116c927e test/tool_shed/base/twilltestcase.py
--- a/test/tool_shed/base/twilltestcase.py
+++ b/test/tool_shed/base/twilltestcase.py
@@ -10,7 +10,7 @@
import tempfile
import tarfile
import galaxy.webapps.tool_shed.util.hgweb_config
-import galaxy.model as galaxy_model
+import galaxy.model.tool_shed_install as galaxy_model
import galaxy.util as util
from tool_shed.util import shed_util_common as suc
from tool_shed.util import xml_util
https://bitbucket.org/galaxy/galaxy-central/commits/87739d395b20/
Changeset: 87739d395b20
User: jmchilton
Date: 2013-12-05 07:20:54
Summary: Models: Introduce app.install_model.
All references to galaxy.model.tool_shed_install models should go through this instance. Right now it is a reference to the same ModelMapping instance, but the goal is to allow them to be two different mappings.
Affected #: 36 files
diff -r c1603460642a33821ed17b6a3d53812a116c927e -r 87739d395b202170770dca1d13ff80fcc2975881 lib/galaxy/app.py
--- a/lib/galaxy/app.py
+++ b/lib/galaxy/app.py
@@ -66,6 +66,10 @@
object_store = self.object_store,
trace_logger=self.trace_logger,
use_pbkdf2=self.config.get_bool( 'use_pbkdf2', True ) )
+ # Want tool_shed_install models accessed through new attribute
+ # (install_model). This is the same object for now, but should ultimately
+ # be allowed to be a separate ModelMapping instances.
+ self.install_model = self.model
# Manage installed tool shed repositories.
self.installed_repository_manager = tool_shed.galaxy_install.InstalledRepositoryManager( self )
# Create an empty datatypes registry.
diff -r c1603460642a33821ed17b6a3d53812a116c927e -r 87739d395b202170770dca1d13ff80fcc2975881 lib/galaxy/model/search.py
--- a/lib/galaxy/model/search.py
+++ b/lib/galaxy/model/search.py
@@ -272,7 +272,7 @@
def search(self, trans):
# Likely this will break in subsequent model refactoring. Need to revisit.
- self.query = trans.sa_session.query( ToolVersion )
+ self.query = trans.install_model.context.query( ToolVersion )
##################
#History Dataset Searching
diff -r c1603460642a33821ed17b6a3d53812a116c927e -r 87739d395b202170770dca1d13ff80fcc2975881 lib/galaxy/model/tool_shed_install/__init__.py
--- a/lib/galaxy/model/tool_shed_install/__init__.py
+++ b/lib/galaxy/model/tool_shed_install/__init__.py
@@ -547,24 +547,24 @@
self.tool_shed_repository = tool_shed_repository
def get_previous_version( self, app ):
- sa_session = app.model.context.current
- tva = sa_session.query( app.model.ToolVersionAssociation ) \
- .filter( app.model.ToolVersionAssociation.table.c.tool_id == self.id ) \
+ context = app.install_model.context
+ tva = context.query( app.install_model.ToolVersionAssociation ) \
+ .filter( app.install_model.ToolVersionAssociation.table.c.tool_id == self.id ) \
.first()
if tva:
- return sa_session.query( app.model.ToolVersion ) \
- .filter( app.model.ToolVersion.table.c.id == tva.parent_id ) \
+ return context.query( app.install_model.ToolVersion ) \
+ .filter( app.install_model.ToolVersion.table.c.id == tva.parent_id ) \
.first()
return None
def get_next_version( self, app ):
- sa_session = app.model.context.current
- tva = sa_session.query( app.model.ToolVersionAssociation ) \
- .filter( app.model.ToolVersionAssociation.table.c.parent_id == self.id ) \
+ context = app.install_model.context
+ tva = context.query( app.install_model.ToolVersionAssociation ) \
+ .filter( app.install_model.ToolVersionAssociation.table.c.parent_id == self.id ) \
.first()
if tva:
- return sa_session.query( app.model.ToolVersion ) \
- .filter( app.model.ToolVersion.table.c.id == tva.tool_id ) \
+ return context.query( app.install_model.ToolVersion ) \
+ .filter( app.install_model.ToolVersion.table.c.id == tva.tool_id ) \
.first()
return None
diff -r c1603460642a33821ed17b6a3d53812a116c927e -r 87739d395b202170770dca1d13ff80fcc2975881 lib/galaxy/tools/__init__.py
--- a/lib/galaxy/tools/__init__.py
+++ b/lib/galaxy/tools/__init__.py
@@ -414,15 +414,15 @@
def __get_tool_version( self, tool_id ):
"""Return a ToolVersion if one exists for the tool_id"""
- return self.sa_session.query( self.app.model.ToolVersion ) \
- .filter( self.app.model.ToolVersion.table.c.tool_id == tool_id ) \
+ return self.app.install_model.context.query( self.app.install_model.ToolVersion ) \
+ .filter( self.app.install_model.ToolVersion.table.c.tool_id == tool_id ) \
.first()
def __get_tool_shed_repository( self, tool_shed, name, owner, installed_changeset_revision ):
- return self.sa_session.query( self.app.model.ToolShedRepository ) \
- .filter( and_( self.app.model.ToolShedRepository.table.c.tool_shed == tool_shed,
- self.app.model.ToolShedRepository.table.c.name == name,
- self.app.model.ToolShedRepository.table.c.owner == owner,
- self.app.model.ToolShedRepository.table.c.installed_changeset_revision == installed_changeset_revision ) ) \
+ return self.app.install_model.context.query( self.app.install_model.ToolShedRepository ) \
+ .filter( and_( self.app.install_model.ToolShedRepository.table.c.tool_shed == tool_shed,
+ self.app.install_model.ToolShedRepository.table.c.name == name,
+ self.app.install_model.ToolShedRepository.table.c.owner == owner,
+ self.app.install_model.ToolShedRepository.table.c.installed_changeset_revision == installed_changeset_revision ) ) \
.first()
def get_tool_components( self, tool_id, tool_version=None, get_loaded_tools_by_lineage=False, set_selected=False ):
@@ -503,9 +503,9 @@
tool.version = elem.find( "version" ).text
# Make sure the tool has a tool_version.
if not self.__get_tool_version( tool.id ):
- tool_version = self.app.model.ToolVersion( tool_id=tool.id, tool_shed_repository=tool_shed_repository )
- self.sa_session.add( tool_version )
- self.sa_session.flush()
+ tool_version = self.app.install_model.ToolVersion( tool_id=tool.id, tool_shed_repository=tool_shed_repository )
+ self.app.install_model.context.add( tool_version )
+ self.app.install_model.context.flush()
# Load the tool's lineage ids.
tool.lineage_ids = tool.tool_version.get_version_ids( self.app )
if self.app.config.get_bool( 'enable_tool_tags', False ):
@@ -1003,8 +1003,8 @@
@property
def tool_version( self ):
"""Return a ToolVersion if one exists for our id"""
- return self.sa_session.query( self.app.model.ToolVersion ) \
- .filter( self.app.model.ToolVersion.table.c.tool_id == self.id ) \
+ return self.app.install_model.context.query( self.app.install_model.ToolVersion ) \
+ .filter( self.app.install_model.ToolVersion.table.c.tool_id == self.id ) \
.first()
@property
def tool_versions( self ):
@@ -1698,9 +1698,9 @@
return param
def populate_tool_shed_info( self ):
- if self.repository_id is not None and 'ToolShedRepository' in self.app.model:
+ if self.repository_id is not None and self.app.name == 'galaxy':
repository_id = self.app.security.decode_id( self.repository_id )
- tool_shed_repository = self.sa_session.query( self.app.model.ToolShedRepository ).get( repository_id )
+ tool_shed_repository = self.app.install_model.context.query( self.app.install_model.ToolShedRepository ).get( repository_id )
if tool_shed_repository:
self.tool_shed = tool_shed_repository.tool_shed
self.repository_name = tool_shed_repository.name
diff -r c1603460642a33821ed17b6a3d53812a116c927e -r 87739d395b202170770dca1d13ff80fcc2975881 lib/galaxy/web/base/controller.py
--- a/lib/galaxy/web/base/controller.py
+++ b/lib/galaxy/web/base/controller.py
@@ -103,7 +103,7 @@
elif class_name == 'LibraryDataset':
item_class = self.app.model.LibraryDataset
elif class_name == 'ToolShedRepository':
- item_class = self.app.model.ToolShedRepository
+ item_class = self.app.install_model.ToolShedRepository
else:
item_class = None
return item_class
diff -r c1603460642a33821ed17b6a3d53812a116c927e -r 87739d395b202170770dca1d13ff80fcc2975881 lib/galaxy/web/base/controllers/admin.py
--- a/lib/galaxy/web/base/controllers/admin.py
+++ b/lib/galaxy/web/base/controllers/admin.py
@@ -27,7 +27,7 @@
message = kwd.get( 'message', '' )
status = kwd.get( 'status', 'done' )
if trans.webapp.name == 'galaxy':
- installed_repositories = trans.sa_session.query( trans.model.ToolShedRepository ).first()
+ installed_repositories = trans.install_model.context.query( trans.install_model.ToolShedRepository ).first()
installing_repository_ids = get_ids_of_tool_shed_repositories_being_installed( trans, as_string=True )
return trans.fill_template( '/webapps/galaxy/admin/index.mako',
installed_repositories=installed_repositories,
@@ -1067,17 +1067,17 @@
def get_ids_of_tool_shed_repositories_being_installed( trans, as_string=False ):
installing_repository_ids = []
- new_status = trans.model.ToolShedRepository.installation_status.NEW
- cloning_status = trans.model.ToolShedRepository.installation_status.CLONING
- setting_tool_versions_status = trans.model.ToolShedRepository.installation_status.SETTING_TOOL_VERSIONS
- installing_dependencies_status = trans.model.ToolShedRepository.installation_status.INSTALLING_TOOL_DEPENDENCIES
- loading_datatypes_status = trans.model.ToolShedRepository.installation_status.LOADING_PROPRIETARY_DATATYPES
- for tool_shed_repository in trans.sa_session.query( trans.model.ToolShedRepository ) \
- .filter( or_( trans.model.ToolShedRepository.status == new_status,
- trans.model.ToolShedRepository.status == cloning_status,
- trans.model.ToolShedRepository.status == setting_tool_versions_status,
- trans.model.ToolShedRepository.status == installing_dependencies_status,
- trans.model.ToolShedRepository.status == loading_datatypes_status ) ):
+ new_status = trans.install_model.ToolShedRepository.installation_status.NEW
+ cloning_status = trans.install_model.ToolShedRepository.installation_status.CLONING
+ setting_tool_versions_status = trans.install_model.ToolShedRepository.installation_status.SETTING_TOOL_VERSIONS
+ installing_dependencies_status = trans.install_model.ToolShedRepository.installation_status.INSTALLING_TOOL_DEPENDENCIES
+ loading_datatypes_status = trans.install_model.ToolShedRepository.installation_status.LOADING_PROPRIETARY_DATATYPES
+ for tool_shed_repository in trans.install_model.context.query( trans.install_model.ToolShedRepository ) \
+ .filter( or_( trans.install_model.ToolShedRepository.status == new_status,
+ trans.install_model.ToolShedRepository.status == cloning_status,
+ trans.install_model.ToolShedRepository.status == setting_tool_versions_status,
+ trans.install_model.ToolShedRepository.status == installing_dependencies_status,
+ trans.install_model.ToolShedRepository.status == loading_datatypes_status ) ):
installing_repository_ids.append( trans.security.encode_id( tool_shed_repository.id ) )
if as_string:
return ','.join( installing_repository_ids )
diff -r c1603460642a33821ed17b6a3d53812a116c927e -r 87739d395b202170770dca1d13ff80fcc2975881 lib/galaxy/web/framework/__init__.py
--- a/lib/galaxy/web/framework/__init__.py
+++ b/lib/galaxy/web/framework/__init__.py
@@ -341,7 +341,7 @@
self.security = webapp.security
base.DefaultWebTransaction.__init__( self, environ )
self.setup_i18n()
- self.sa_session.expunge_all()
+ self.expunge_all()
self.debug = asbool( self.app.config.get( 'debug', False ) )
# Flag indicating whether we are in workflow building mode (means
# that the current history should not be used for parameter values
@@ -403,6 +403,15 @@
"""
return self.app.model.context.current
+ def expunge_all( self ):
+ app = self.app
+ context = app.model.context
+ context.expunge_all()
+ # This is a bit hacky, should refctor this. Maybe refactor to app -> expunge_all()
+ if hasattr(app, 'install_model'):
+ install_model = app.install_model
+ if install_model != app.model:
+ install_model.context.expunge_all()
def get_user( self ):
"""Return the current user if logged in or None."""
@@ -953,6 +962,10 @@
def model( self ):
return self.app.model
+ @property
+ def install_model( self ):
+ return self.app.install_model
+
def make_form_data( self, name, **kwargs ):
rval = self.template_context[name] = FormData()
rval.values.update( kwargs )
diff -r c1603460642a33821ed17b6a3d53812a116c927e -r 87739d395b202170770dca1d13ff80fcc2975881 lib/galaxy/webapps/galaxy/api/tool_shed_repositories.py
--- a/lib/galaxy/webapps/galaxy/api/tool_shed_repositories.py
+++ b/lib/galaxy/webapps/galaxy/api/tool_shed_repositories.py
@@ -143,8 +143,8 @@
# Example URL: http://localhost:8763/api/tool_shed_repositories
tool_shed_repository_dicts = []
try:
- query = trans.sa_session.query( trans.app.model.ToolShedRepository ) \
- .order_by( trans.app.model.ToolShedRepository.table.c.name ) \
+ query = trans.install_model.context.query( trans.app.install_model.ToolShedRepository ) \
+ .order_by( trans.app.install_model.ToolShedRepository.table.c.name ) \
.all()
for tool_shed_repository in query:
tool_shed_repository_dict = tool_shed_repository.to_dict( value_mapper=default_tool_shed_repository_value_mapper( trans, tool_shed_repository ) )
@@ -325,9 +325,9 @@
repository_util.order_components_for_installation( trans, tsr_ids, repo_info_dicts, tool_panel_section_keys=tool_panel_section_keys )
# Install the repositories, keeping track of each one for later display.
for index, tsr_id in enumerate( ordered_tsr_ids ):
- tool_shed_repository = trans.sa_session.query( trans.model.ToolShedRepository ).get( trans.security.decode_id( tsr_id ) )
- if tool_shed_repository.status in [ trans.model.ToolShedRepository.installation_status.NEW,
- trans.model.ToolShedRepository.installation_status.UNINSTALLED ]:
+ tool_shed_repository = trans.install_model.context.query( trans.install_model.ToolShedRepository ).get( trans.security.decode_id( tsr_id ) )
+ if tool_shed_repository.status in [ trans.install_model.ToolShedRepository.installation_status.NEW,
+ trans.install_model.ToolShedRepository.installation_status.UNINSTALLED ]:
repo_info_dict = ordered_repo_info_dicts[ index ]
tool_panel_section_key = ordered_tool_panel_section_keys[ index ]
@@ -470,7 +470,7 @@
ordered_repo_info_dicts = repair_dict.get( 'ordered_repo_info_dicts', [] )
if ordered_tsr_ids and ordered_repo_info_dicts:
for index, tsr_id in enumerate( ordered_tsr_ids ):
- repository = trans.sa_session.query( trans.model.ToolShedRepository ).get( trans.security.decode_id( tsr_id ) )
+ repository = trans.install_model.context.query( trans.install_model.ToolShedRepository ).get( trans.security.decode_id( tsr_id ) )
repo_info_dict = ordered_repo_info_dicts[ index ]
# TODO: handle errors in repair_dict.
repair_dict = repository_util.repair_tool_shed_repository( trans,
diff -r c1603460642a33821ed17b6a3d53812a116c927e -r 87739d395b202170770dca1d13ff80fcc2975881 lib/galaxy/webapps/galaxy/controllers/admin.py
--- a/lib/galaxy/webapps/galaxy/controllers/admin.py
+++ b/lib/galaxy/webapps/galaxy/controllers/admin.py
@@ -428,8 +428,9 @@
num_rows_per_page = 50
preserve_state = False
use_paging = True
+
def build_initial_query( self, trans, **kwd ):
- return trans.sa_session.query( self.model_class )
+ return trans.install_model.context.query( self.model_class )
class AdminGalaxy( BaseUIController, Admin, AdminActions, UsesQuotaMixin, QuotaParamParser ):
diff -r c1603460642a33821ed17b6a3d53812a116c927e -r 87739d395b202170770dca1d13ff80fcc2975881 lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
--- a/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
+++ b/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
@@ -263,12 +263,12 @@
errors = '%s %s' % ( errors, error_message )
tool_shed_repository.deleted = True
if remove_from_disk_checked:
- tool_shed_repository.status = trans.model.ToolShedRepository.installation_status.UNINSTALLED
+ tool_shed_repository.status = trans.install_model.ToolShedRepository.installation_status.UNINSTALLED
tool_shed_repository.error_message = None
else:
- tool_shed_repository.status = trans.model.ToolShedRepository.installation_status.DEACTIVATED
- trans.sa_session.add( tool_shed_repository )
- trans.sa_session.flush()
+ tool_shed_repository.status = trans.install_model.ToolShedRepository.installation_status.DEACTIVATED
+ trans.install_model.context.add( tool_shed_repository )
+ trans.install_model.context.flush()
if remove_from_disk_checked:
message = 'The repository named <b>%s</b> has been uninstalled. ' % tool_shed_repository.name
if errors:
@@ -438,7 +438,7 @@
tool_dependencies_config=tool_dependencies_config,
tool_dependencies=tool_dependencies )
for installed_tool_dependency in installed_tool_dependencies:
- if installed_tool_dependency.status == trans.app.model.ToolDependency.installation_status.ERROR:
+ if installed_tool_dependency.status == trans.app.install_model.ToolDependency.installation_status.ERROR:
text = util.unicodify( installed_tool_dependency.error_message )
if text is not None:
message += ' %s' % text
@@ -469,8 +469,8 @@
# Filter tool dependencies to only those that are installed.
tool_dependencies_for_installation = []
for tool_dependency in tool_dependencies:
- if tool_dependency.status in [ trans.model.ToolDependency.installation_status.UNINSTALLED,
- trans.model.ToolDependency.installation_status.ERROR ]:
+ if tool_dependency.status in [ trans.install_model.ToolDependency.installation_status.UNINSTALLED,
+ trans.install_model.ToolDependency.installation_status.ERROR ]:
tool_dependencies_for_installation.append( tool_dependency )
if tool_dependencies_for_installation:
# Redirect back to the ToolDependencyGrid before initiating installation.
@@ -544,9 +544,9 @@
# TODO: I believe this block should be removed, but make sure..
repositories_for_uninstallation = []
for repository_id in tsridslist:
- repository = trans.sa_session.query( trans.model.ToolShedRepository ).get( trans.security.decode_id( repository_id ) )
- if repository.status in [ trans.model.ToolShedRepository.installation_status.INSTALLED,
- trans.model.ToolShedRepository.installation_status.ERROR ]:
+ repository = trans.install_model.context.query( trans.install_model.ToolShedRepository ).get( trans.security.decode_id( repository_id ) )
+ if repository.status in [ trans.install_model.ToolShedRepository.installation_status.INSTALLED,
+ trans.install_model.ToolShedRepository.installation_status.ERROR ]:
repositories_for_uninstallation.append( repository )
if repositories_for_uninstallation:
return trans.response.send_redirect( web.url_for( controller='admin_toolshed',
@@ -570,9 +570,9 @@
ordered_tsr_ids, ordered_repo_info_dicts, ordered_tool_panel_section_keys = \
repository_util.order_components_for_installation( trans, tsr_ids, repo_info_dicts, tool_panel_section_keys=tool_panel_section_keys )
for tsr_id in ordered_tsr_ids:
- repository = trans.sa_session.query( trans.model.ToolShedRepository ).get( trans.security.decode_id( tsr_id ) )
- if repository.status in [ trans.model.ToolShedRepository.installation_status.NEW,
- trans.model.ToolShedRepository.installation_status.UNINSTALLED ]:
+ repository = trans.install_model.context.query( trans.install_model.ToolShedRepository ).get( trans.security.decode_id( tsr_id ) )
+ if repository.status in [ trans.install_model.ToolShedRepository.installation_status.NEW,
+ trans.install_model.ToolShedRepository.installation_status.UNINSTALLED ]:
repositories_for_installation.append( repository )
repo_info_dict, tool_panel_section_key = repository_util.get_repository_components_for_installation( tsr_id,
ordered_tsr_ids,
@@ -600,7 +600,7 @@
if not repository:
return trans.show_error_message( 'Invalid repository specified.' )
tool_shed_url = suc.get_url_from_tool_shed( trans.app, repository.tool_shed )
- if repository.status in [ trans.model.ToolShedRepository.installation_status.CLONING ]:
+ if repository.status in [ trans.install_model.ToolShedRepository.installation_status.CLONING ]:
tool_shed_repository_ids = [ repository_id ]
return trans.response.send_redirect( web.url_for( controller='admin_toolshed',
action='monitor_repository_installation',
@@ -628,8 +628,8 @@
elif kwd.get( 'edit_repository_button', False ):
if description != repository.description:
repository.description = description
- trans.sa_session.add( repository )
- trans.sa_session.flush()
+ trans.install_model.context.add( repository )
+ trans.install_model.context.flush()
message = "The repository information has been updated."
containers_dict = metadata_util.populate_containers_dict_from_repository_metadata( trans=trans,
tool_shed_url=tool_shed_url,
@@ -678,8 +678,8 @@
tool_dependencies_for_uninstallation = []
for tool_dependency_id in tool_dependency_ids:
tool_dependency = tool_dependency_util.get_tool_dependency( trans, tool_dependency_id )
- if tool_dependency.status in [ trans.model.ToolDependency.installation_status.INSTALLED,
- trans.model.ToolDependency.installation_status.ERROR ]:
+ if tool_dependency.status in [ trans.install_model.ToolDependency.installation_status.INSTALLED,
+ trans.install_model.ToolDependency.installation_status.ERROR ]:
tool_dependencies_for_uninstallation.append( tool_dependency )
if tool_dependencies_for_uninstallation:
return trans.response.send_redirect( web.url_for( controller='admin_toolshed',
@@ -693,8 +693,8 @@
tool_dependencies_for_installation = []
for tool_dependency_id in tool_dependency_ids:
tool_dependency = tool_dependency_util.get_tool_dependency( trans, tool_dependency_id )
- if tool_dependency.status in [ trans.model.ToolDependency.installation_status.NEVER_INSTALLED,
- trans.model.ToolDependency.installation_status.UNINSTALLED ]:
+ if tool_dependency.status in [ trans.install_model.ToolDependency.installation_status.NEVER_INSTALLED,
+ trans.install_model.ToolDependency.installation_status.UNINSTALLED ]:
tool_dependencies_for_installation.append( tool_dependency )
if tool_dependencies_for_installation:
self.initiate_tool_dependency_installation( trans, tool_dependencies_for_installation )
@@ -757,8 +757,8 @@
tool_dependencies_for_installation = []
for tool_dependency_id in tool_dependency_ids:
tool_dependency = tool_dependency_util.get_tool_dependency( trans, tool_dependency_id )
- if tool_dependency.status in [ trans.model.ToolDependency.installation_status.NEVER_INSTALLED,
- trans.model.ToolDependency.installation_status.UNINSTALLED ]:
+ if tool_dependency.status in [ trans.install_model.ToolDependency.installation_status.NEVER_INSTALLED,
+ trans.install_model.ToolDependency.installation_status.UNINSTALLED ]:
tool_dependencies_for_installation.append( tool_dependency )
if tool_dependencies_for_installation:
self.initiate_tool_dependency_installation( trans, tool_dependencies_for_installation )
@@ -1054,7 +1054,7 @@
no_changes_checked=no_changes_checked,
tool_panel_section=tool_panel_section,
new_tool_panel_section=new_tool_panel_section )
- if tool_shed_repository.status == trans.model.ToolShedRepository.installation_status.UNINSTALLED:
+ if tool_shed_repository.status == trans.install_model.ToolShedRepository.installation_status.UNINSTALLED:
# The repository's status must be updated from 'Uninstalled' to 'New' when initiating reinstall so the repository_installation_updater will function.
tool_shed_repository = suc.create_or_update_tool_shed_repository( trans.app,
tool_shed_repository.name,
@@ -1063,7 +1063,7 @@
tool_shed_repository.ctx_rev,
repository_clone_url,
metadata,
- trans.model.ToolShedRepository.installation_status.NEW,
+ trans.install_model.ToolShedRepository.installation_status.NEW,
tool_shed_repository.changeset_revision,
tool_shed_repository.owner,
tool_shed_repository.dist_to_shed )
@@ -1133,12 +1133,12 @@
tsr_ids = [ r.id for r in created_or_updated_tool_shed_repositories ]
tool_shed_repositories = []
for tsr_id in tsr_ids:
- tsr = trans.sa_session.query( trans.model.ToolShedRepository ).get( tsr_id )
+ tsr = trans.install_model.context.query( trans.install_model.ToolShedRepository ).get( tsr_id )
tool_shed_repositories.append( tsr )
clause_list = []
for tsr_id in tsr_ids:
- clause_list.append( trans.model.ToolShedRepository.table.c.id == tsr_id )
- query = trans.sa_session.query( trans.model.ToolShedRepository ).filter( or_( *clause_list ) )
+ clause_list.append( trans.install_model.ToolShedRepository.table.c.id == tsr_id )
+ query = trans.install_model.context.current.query( trans.install_model.ToolShedRepository ).filter( or_( *clause_list ) )
return trans.fill_template( 'admin/tool_shed_repository/initiate_repository_installation.mako',
encoded_kwd=encoded_kwd,
query=query,
@@ -1177,7 +1177,7 @@
if ordered_tsr_ids and ordered_repo_info_dicts:
repositories_for_repair = []
for tsr_id in ordered_tsr_ids:
- repository = trans.sa_session.query( trans.model.ToolShedRepository ).get( trans.security.decode_id( tsr_id ) )
+ repository = trans.install_model.context.query( trans.install_model.ToolShedRepository ).get( trans.security.decode_id( tsr_id ) )
repositories_for_repair.append( repository )
return self.repair_tool_shed_repositories( trans, repositories_for_repair, ordered_repo_info_dicts )
tool_shed_repository = suc.get_installed_tool_shed_repository( trans, repository_id )
@@ -1219,7 +1219,7 @@
status_list = util.listify( status_list )
for tup in zip( ids, status_list ):
id, status = tup
- repository = trans.sa_session.query( trans.model.ToolShedRepository ).get( trans.security.decode_id( id ) )
+ repository = trans.install_model.context.query( trans.install_model.ToolShedRepository ).get( trans.security.decode_id( id ) )
if repository.status != status:
rval.append( dict( id=id,
status=repository.status,
@@ -1440,8 +1440,8 @@
repository.metadata = metadata_dict
if metadata_dict != original_metadata_dict:
suc.update_in_shed_tool_config( trans.app, repository )
- trans.sa_session.add( repository )
- trans.sa_session.flush()
+ trans.install_model.context.add( repository )
+ trans.install_model.context.flush()
message = 'Metadata has been reset on repository <b>%s</b>.' % repository.name
status = 'done'
else:
@@ -1464,7 +1464,7 @@
if kwd.get( 'reset_repository', False ):
repository_util.set_repository_attributes( trans,
repository,
- status=trans.model.ToolShedRepository.installation_status.NEW,
+ status=trans.install_model.ToolShedRepository.installation_status.NEW,
error_message=None,
deleted=False,
uninstalled=False,
@@ -1530,7 +1530,7 @@
status_list = util.listify( status_list )
for tup in zip( ids, status_list ):
id, status = tup
- tool_dependency = trans.sa_session.query( trans.model.ToolDependency ).get( trans.security.decode_id( id ) )
+ tool_dependency = trans.install_model.context.query( trans.install_model.ToolDependency ).get( trans.security.decode_id( id ) )
if tool_dependency.status != status:
rval.append( dict( id=id,
status=tool_dependency.status,
@@ -1634,8 +1634,8 @@
repository.tool_shed_status = tool_shed_status_dict
else:
repository.tool_shed_status = None
- trans.sa_session.add( repository )
- trans.sa_session.flush()
+ trans.install_model.context.add( repository )
+ trans.install_model.context.flush()
if 'tools' in metadata_dict:
tool_panel_dict = metadata_dict.get( 'tool_panel_section', None )
if tool_panel_dict is None:
@@ -1688,8 +1688,8 @@
success_count = 0
repository_names_not_updated = []
updated_count = 0
- for repository in trans.sa_session.query( trans.model.ToolShedRepository ) \
- .filter( trans.model.ToolShedRepository.table.c.deleted == False ):
+ for repository in trans.install_model.context.query( trans.install_model.ToolShedRepository ) \
+ .filter( trans.install_model.ToolShedRepository.table.c.deleted == False ):
ok, updated = suc.check_or_update_tool_shed_status_for_installed_repository( trans, repository )
if ok:
success_count += 1
diff -r c1603460642a33821ed17b6a3d53812a116c927e -r 87739d395b202170770dca1d13ff80fcc2975881 lib/galaxy/workflow/modules.py
--- a/lib/galaxy/workflow/modules.py
+++ b/lib/galaxy/workflow/modules.py
@@ -261,8 +261,8 @@
@classmethod
def __get_tool_version( cls, trans, tool_id ):
# Return a ToolVersion if one exists for tool_id.
- return trans.sa_session.query( trans.app.model.ToolVersion ) \
- .filter( trans.app.model.ToolVersion.table.c.tool_id == tool_id ) \
+ return trans.install_model.context.query( trans.install_model.ToolVersion ) \
+ .filter( trans.install_model.ToolVersion.table.c.tool_id == tool_id ) \
.first()
def save_to_step( self, step ):
diff -r c1603460642a33821ed17b6a3d53812a116c927e -r 87739d395b202170770dca1d13ff80fcc2975881 lib/tool_shed/galaxy_install/__init__.py
--- a/lib/tool_shed/galaxy_install/__init__.py
+++ b/lib/tool_shed/galaxy_install/__init__.py
@@ -12,8 +12,8 @@
class InstalledRepositoryManager( object ):
def __init__( self, app ):
self.app = app
- self.model = self.app.model
- self.sa_session = self.model.context.current
+ self.install_model = self.app.install_model
+ self.context = self.install_model.context
self.tool_configs = self.app.config.tool_configs
if self.app.config.migrated_tools_config not in self.tool_configs:
self.tool_configs.append( self.app.config.migrated_tools_config )
@@ -37,10 +37,10 @@
return relative_path
return None
def load_proprietary_datatypes( self ):
- for tool_shed_repository in self.sa_session.query( self.model.ToolShedRepository ) \
- .filter( and_( self.model.ToolShedRepository.table.c.includes_datatypes==True,
- self.model.ToolShedRepository.table.c.deleted==False ) ) \
- .order_by( self.model.ToolShedRepository.table.c.id ):
+ for tool_shed_repository in self.context.query( self.install_model.ToolShedRepository ) \
+ .filter( and_( self.install_model.ToolShedRepository.table.c.includes_datatypes==True,
+ self.install_model.ToolShedRepository.table.c.deleted==False ) ) \
+ .order_by( self.install_model.ToolShedRepository.table.c.id ):
relative_install_dir = self.get_repository_install_dir( tool_shed_repository )
if relative_install_dir:
installed_repository_dict = tool_shed.util.datatype_util.load_installed_datatypes( self.app, tool_shed_repository, relative_install_dir )
diff -r c1603460642a33821ed17b6a3d53812a116c927e -r 87739d395b202170770dca1d13ff80fcc2975881 lib/tool_shed/galaxy_install/grids/admin_toolshed_grids.py
--- a/lib/tool_shed/galaxy_install/grids/admin_toolshed_grids.py
+++ b/lib/tool_shed/galaxy_install/grids/admin_toolshed_grids.py
@@ -196,7 +196,7 @@
use_paging = False
def build_initial_query( self, trans, **kwd ):
- return trans.sa_session.query( self.model_class ) \
+ return trans.install_model.context.query( self.model_class ) \
.order_by( self.model_class.table.c.tool_shed,
self.model_class.table.c.name,
self.model_class.table.c.owner,
@@ -244,21 +244,21 @@
def get_value( self, trans, grid, tool_shed_repository ):
status_label = tool_shed_repository.status
- if tool_shed_repository.status in [ trans.model.ToolShedRepository.installation_status.CLONING,
- trans.model.ToolShedRepository.installation_status.SETTING_TOOL_VERSIONS,
- trans.model.ToolShedRepository.installation_status.INSTALLING_TOOL_DEPENDENCIES,
- trans.model.ToolShedRepository.installation_status.LOADING_PROPRIETARY_DATATYPES ]:
- bgcolor = trans.model.ToolShedRepository.states.INSTALLING
- elif tool_shed_repository.status in [ trans.model.ToolShedRepository.installation_status.NEW,
- trans.model.ToolShedRepository.installation_status.UNINSTALLED ]:
- bgcolor = trans.model.ToolShedRepository.states.UNINSTALLED
- elif tool_shed_repository.status in [ trans.model.ToolShedRepository.installation_status.ERROR ]:
- bgcolor = trans.model.ToolShedRepository.states.ERROR
- elif tool_shed_repository.status in [ trans.model.ToolShedRepository.installation_status.DEACTIVATED ]:
- bgcolor = trans.model.ToolShedRepository.states.WARNING
- elif tool_shed_repository.status in [ trans.model.ToolShedRepository.installation_status.INSTALLED ]:
+ if tool_shed_repository.status in [ trans.install_model.ToolShedRepository.installation_status.CLONING,
+ trans.install_model.ToolShedRepository.installation_status.SETTING_TOOL_VERSIONS,
+ trans.install_model.ToolShedRepository.installation_status.INSTALLING_TOOL_DEPENDENCIES,
+ trans.install_model.ToolShedRepository.installation_status.LOADING_PROPRIETARY_DATATYPES ]:
+ bgcolor = trans.install_model.ToolShedRepository.states.INSTALLING
+ elif tool_shed_repository.status in [ trans.install_model.ToolShedRepository.installation_status.NEW,
+ trans.install_model.ToolShedRepository.installation_status.UNINSTALLED ]:
+ bgcolor = trans.install_model.ToolShedRepository.states.UNINSTALLED
+ elif tool_shed_repository.status in [ trans.install_model.ToolShedRepository.installation_status.ERROR ]:
+ bgcolor = trans.install_model.ToolShedRepository.states.ERROR
+ elif tool_shed_repository.status in [ trans.install_model.ToolShedRepository.installation_status.DEACTIVATED ]:
+ bgcolor = trans.install_model.ToolShedRepository.states.WARNING
+ elif tool_shed_repository.status in [ trans.install_model.ToolShedRepository.installation_status.INSTALLED ]:
if tool_shed_repository.missing_tool_dependencies or tool_shed_repository.missing_repository_dependencies:
- bgcolor = trans.model.ToolShedRepository.states.WARNING
+ bgcolor = trans.install_model.ToolShedRepository.states.WARNING
if tool_shed_repository.missing_tool_dependencies and not tool_shed_repository.missing_repository_dependencies:
status_label = '%s, missing tool dependencies' % status_label
if tool_shed_repository.missing_repository_dependencies and not tool_shed_repository.missing_tool_dependencies:
@@ -266,9 +266,9 @@
if tool_shed_repository.missing_tool_dependencies and tool_shed_repository.missing_repository_dependencies:
status_label = '%s, missing both tool and repository dependencies' % status_label
if not tool_shed_repository.missing_tool_dependencies and not tool_shed_repository.missing_repository_dependencies:
- bgcolor = trans.model.ToolShedRepository.states.OK
+ bgcolor = trans.install_model.ToolShedRepository.states.OK
else:
- bgcolor = trans.model.ToolShedRepository.states.ERROR
+ bgcolor = trans.install_model.ToolShedRepository.states.ERROR
rval = '<div class="count-box state-color-%s" id="RepositoryStatus-%s">%s</div>' % \
( bgcolor, trans.security.encode_id( tool_shed_repository.id ), status_label )
return rval
@@ -311,21 +311,21 @@
for tool_shed_repository_id in tool_shed_repository_ids:
clause_list.append( self.model_class.table.c.id == trans.security.decode_id( tool_shed_repository_id ) )
if clause_list:
- return trans.sa_session.query( self.model_class ) \
+ return trans.install_model.context.query( self.model_class ) \
.filter( or_( *clause_list ) )
- for tool_shed_repository in trans.sa_session.query( self.model_class ) \
+ for tool_shed_repository in trans.install_model.context.query( self.model_class ) \
.filter( self.model_class.table.c.deleted == False ):
- if tool_shed_repository.status in [ trans.model.ToolShedRepository.installation_status.NEW,
- trans.model.ToolShedRepository.installation_status.CLONING,
- trans.model.ToolShedRepository.installation_status.SETTING_TOOL_VERSIONS,
- trans.model.ToolShedRepository.installation_status.INSTALLING_TOOL_DEPENDENCIES,
- trans.model.ToolShedRepository.installation_status.LOADING_PROPRIETARY_DATATYPES ]:
+ if tool_shed_repository.status in [ trans.install_model.ToolShedRepository.installation_status.NEW,
+ trans.install_model.ToolShedRepository.installation_status.CLONING,
+ trans.install_model.ToolShedRepository.installation_status.SETTING_TOOL_VERSIONS,
+ trans.install_model.ToolShedRepository.installation_status.INSTALLING_TOOL_DEPENDENCIES,
+ trans.install_model.ToolShedRepository.installation_status.LOADING_PROPRIETARY_DATATYPES ]:
clause_list.append( self.model_class.table.c.id == tool_shed_repository.id )
if clause_list:
- return trans.sa_session.query( self.model_class ) \
+ return trans.install_model.context.query( self.model_class ) \
.filter( or_( *clause_list ) )
- return trans.sa_session.query( self.model_class ) \
- .filter( self.model_class.table.c.status == trans.model.ToolShedRepository.installation_status.NEW )
+ return trans.install_model.context.query( self.model_class ) \
+ .filter( self.model_class.table.c.status == trans.install_model.ToolShedRepository.installation_status.NEW )
def apply_query_filter( self, trans, query, **kwd ):
tool_shed_repository_id = kwd.get( 'tool_shed_repository_id', None )
@@ -358,15 +358,15 @@
class StatusColumn( grids.TextColumn ):
def get_value( self, trans, grid, tool_dependency ):
- if tool_dependency.status in [ trans.model.ToolDependency.installation_status.INSTALLING ]:
- bgcolor = trans.model.ToolDependency.states.INSTALLING
- elif tool_dependency.status in [ trans.model.ToolDependency.installation_status.NEVER_INSTALLED,
- trans.model.ToolDependency.installation_status.UNINSTALLED ]:
- bgcolor = trans.model.ToolDependency.states.UNINSTALLED
- elif tool_dependency.status in [ trans.model.ToolDependency.installation_status.ERROR ]:
- bgcolor = trans.model.ToolDependency.states.ERROR
- elif tool_dependency.status in [ trans.model.ToolDependency.installation_status.INSTALLED ]:
- bgcolor = trans.model.ToolDependency.states.OK
+ if tool_dependency.status in [ trans.install_model.ToolDependency.installation_status.INSTALLING ]:
+ bgcolor = trans.install_model.ToolDependency.states.INSTALLING
+ elif tool_dependency.status in [ trans.install_model.ToolDependency.installation_status.NEVER_INSTALLED,
+ trans.install_model.ToolDependency.installation_status.UNINSTALLED ]:
+ bgcolor = trans.install_model.ToolDependency.states.UNINSTALLED
+ elif tool_dependency.status in [ trans.install_model.ToolDependency.installation_status.ERROR ]:
+ bgcolor = trans.install_model.ToolDependency.states.ERROR
+ elif tool_dependency.status in [ trans.install_model.ToolDependency.installation_status.INSTALLED ]:
+ bgcolor = trans.install_model.ToolDependency.states.OK
rval = '<div class="count-box state-color-%s" id="ToolDependencyStatus-%s">%s</div>' % \
( bgcolor, trans.security.encode_id( tool_dependency.id ), tool_dependency.status )
return rval
@@ -400,9 +400,9 @@
clause_list = []
for tool_dependency_id in tool_dependency_ids:
clause_list.append( self.model_class.table.c.id == trans.security.decode_id( tool_dependency_id ) )
- return trans.sa_session.query( self.model_class ) \
+ return trans.install_model.context.query( self.model_class ) \
.filter( or_( *clause_list ) )
- return trans.sa_session.query( self.model_class )
+ return trans.install_model.context.query( self.model_class )
def apply_query_filter( self, trans, query, **kwd ):
tool_dependency_id = kwd.get( 'tool_dependency_id', None )
diff -r c1603460642a33821ed17b6a3d53812a116c927e -r 87739d395b202170770dca1d13ff80fcc2975881 lib/tool_shed/galaxy_install/install_manager.py
--- a/lib/tool_shed/galaxy_install/install_manager.py
+++ b/lib/tool_shed/galaxy_install/install_manager.py
@@ -145,7 +145,7 @@
ctx_rev=ctx_rev,
repository_clone_url=repository_clone_url,
metadata_dict={},
- status=self.app.model.ToolShedRepository.installation_status.NEW,
+ status=self.app.install_model.ToolShedRepository.installation_status.NEW,
current_changeset_revision=None,
owner=self.repository_owner,
dist_to_shed=True )
@@ -387,8 +387,8 @@
updating_installed_repository=False,
persist=True )
tool_shed_repository.metadata = metadata_dict
- self.app.sa_session.add( tool_shed_repository )
- self.app.sa_session.flush()
+ self.app.install_model.context.add( tool_shed_repository )
+ self.app.install_model.context.flush()
has_tool_dependencies = self.__has_tool_dependencies( metadata_dict )
if has_tool_dependencies:
# All tool_dependency objects must be created before the tools are processed even if no tool dependencies will be installed.
@@ -427,7 +427,7 @@
# Install tool dependencies.
suc.update_tool_shed_repository_status( self.app,
tool_shed_repository,
- self.app.model.ToolShedRepository.installation_status.INSTALLING_TOOL_DEPENDENCIES )
+ self.app.install_model.ToolShedRepository.installation_status.INSTALLING_TOOL_DEPENDENCIES )
# Get the tool_dependencies.xml file from disk.
tool_dependencies_config = suc.get_config_from_disk( 'tool_dependencies.xml', repo_install_dir )
installed_tool_dependencies = common_install_util.handle_tool_dependencies( app=self.app,
@@ -435,15 +435,15 @@
tool_dependencies_config=tool_dependencies_config,
tool_dependencies=tool_dependencies )
for installed_tool_dependency in installed_tool_dependencies:
- if installed_tool_dependency.status == self.app.model.ToolDependency.installation_status.ERROR:
+ if installed_tool_dependency.status == self.app.install_model.ToolDependency.installation_status.ERROR:
print '\nThe following error occurred from the InstallManager while installing tool dependency ', installed_tool_dependency.name, ':'
print installed_tool_dependency.error_message, '\n\n'
if 'datatypes' in metadata_dict:
- tool_shed_repository.status = self.app.model.ToolShedRepository.installation_status.LOADING_PROPRIETARY_DATATYPES
+ tool_shed_repository.status = self.app.install_model.ToolShedRepository.installation_status.LOADING_PROPRIETARY_DATATYPES
if not tool_shed_repository.includes_datatypes:
tool_shed_repository.includes_datatypes = True
- self.app.sa_session.add( tool_shed_repository )
- self.app.sa_session.flush()
+ self.app.install_model.context.add( tool_shed_repository )
+ self.app.install_model.context.flush()
work_dir = tempfile.mkdtemp( prefix="tmp-toolshed-hrc" )
datatypes_config = suc.get_config_from_disk( suc.DATATYPES_CONFIG_FILENAME, repo_install_dir )
# Load proprietary data types required by tools. The value of override is not important here since the Galaxy server will be started
@@ -478,10 +478,10 @@
cloned_ok = self.__iscloned( clone_dir )
is_installed = False
# Any of the following states should count as installed in this context.
- if tool_shed_repository.status in [ self.app.model.ToolShedRepository.installation_status.INSTALLED,
- self.app.model.ToolShedRepository.installation_status.ERROR,
- self.app.model.ToolShedRepository.installation_status.UNINSTALLED,
- self.app.model.ToolShedRepository.installation_status.DEACTIVATED ]:
+ if tool_shed_repository.status in [ self.app.install_model.ToolShedRepository.installation_status.INSTALLED,
+ self.app.install_model.ToolShedRepository.installation_status.ERROR,
+ self.app.install_model.ToolShedRepository.installation_status.UNINSTALLED,
+ self.app.install_model.ToolShedRepository.installation_status.DEACTIVATED ]:
is_installed = True
if cloned_ok and is_installed:
print "Skipping automatic install of repository '", tool_shed_repository.name, "' because it has already been installed in location ", clone_dir
@@ -495,7 +495,7 @@
tool_shed_repository.owner,
tool_shed_repository.installed_changeset_revision )
if not cloned_ok:
- suc.update_tool_shed_repository_status( self.app, tool_shed_repository, self.app.model.ToolShedRepository.installation_status.CLONING )
+ suc.update_tool_shed_repository_status( self.app, tool_shed_repository, self.app.install_model.ToolShedRepository.installation_status.CLONING )
cloned_ok, error_message = suc.clone_repository( repository_clone_url, os.path.abspath( install_dir ), ctx_rev )
if cloned_ok and not is_installed:
self.handle_repository_contents( tool_shed_repository=tool_shed_repository,
@@ -504,12 +504,12 @@
repository_elem=repository_elem,
install_dependencies=install_dependencies,
is_repository_dependency=is_repository_dependency )
- self.app.sa_session.refresh( tool_shed_repository )
+ self.app.install_model.context.refresh( tool_shed_repository )
metadata_dict = tool_shed_repository.metadata
if 'tools' in metadata_dict:
suc.update_tool_shed_repository_status( self.app,
tool_shed_repository,
- self.app.model.ToolShedRepository.installation_status.SETTING_TOOL_VERSIONS )
+ self.app.install_model.ToolShedRepository.installation_status.SETTING_TOOL_VERSIONS )
# Get the tool_versions from the tool shed for each tool in the installed change set.
url = '%s/repository/get_tool_versions?name=%s&owner=%s&changeset_revision=%s' % \
( self.tool_shed_url, tool_shed_repository.name, self.repository_owner, tool_shed_repository.installed_changeset_revision )
@@ -528,30 +528,30 @@
tool_version_using_old_id = tool_util.get_tool_version( self.app, old_tool_id )
tool_version_using_guid = tool_util.get_tool_version( self.app, tool_id )
if not tool_version_using_old_id:
- tool_version_using_old_id = self.app.model.ToolVersion( tool_id=old_tool_id,
+ tool_version_using_old_id = self.app.install_model.ToolVersion( tool_id=old_tool_id,
tool_shed_repository=tool_shed_repository )
- self.app.sa_session.add( tool_version_using_old_id )
- self.app.sa_session.flush()
+ self.app.install_model.context.add( tool_version_using_old_id )
+ self.app.install_model.context.flush()
if not tool_version_using_guid:
- tool_version_using_guid = self.app.model.ToolVersion( tool_id=tool_id,
+ tool_version_using_guid = self.app.install_model.ToolVersion( tool_id=tool_id,
tool_shed_repository=tool_shed_repository )
- self.app.sa_session.add( tool_version_using_guid )
- self.app.sa_session.flush()
+ self.app.install_model.context.add( tool_version_using_guid )
+ self.app.install_model.context.flush()
# Associate the two versions as parent / child.
tool_version_association = tool_util.get_tool_version_association( self.app,
tool_version_using_old_id,
tool_version_using_guid )
if not tool_version_association:
- tool_version_association = self.app.model.ToolVersionAssociation( tool_id=tool_version_using_guid.id,
+ tool_version_association = self.app.install_model.ToolVersionAssociation( tool_id=tool_version_using_guid.id,
parent_id=tool_version_using_old_id.id )
- self.app.sa_session.add( tool_version_association )
- self.app.sa_session.flush()
- suc.update_tool_shed_repository_status( self.app, tool_shed_repository, self.app.model.ToolShedRepository.installation_status.INSTALLED )
+ self.app.install_model.context.add( tool_version_association )
+ self.app.install_model.context.flush()
+ suc.update_tool_shed_repository_status( self.app, tool_shed_repository, self.app.install_model.ToolShedRepository.installation_status.INSTALLED )
else:
print 'Error attempting to clone repository %s: %s' % ( str( tool_shed_repository.name ), str( error_message ) )
suc.update_tool_shed_repository_status( self.app,
tool_shed_repository,
- self.app.model.ToolShedRepository.installation_status.ERROR,
+ self.app.install_model.ToolShedRepository.installation_status.ERROR,
error_message=error_message )
@property
diff -r c1603460642a33821ed17b6a3d53812a116c927e -r 87739d395b202170770dca1d13ff80fcc2975881 lib/tool_shed/galaxy_install/repository_util.py
--- a/lib/tool_shed/galaxy_install/repository_util.py
+++ b/lib/tool_shed/galaxy_install/repository_util.py
@@ -360,8 +360,8 @@
tool_shed_status_dict = suc.get_tool_shed_status_for_installed_repository( trans.app, tool_shed_repository )
if tool_shed_status_dict:
tool_shed_repository.tool_shed_status = tool_shed_status_dict
- trans.sa_session.add( tool_shed_repository )
- trans.sa_session.flush()
+ trans.install_model.context.add( tool_shed_repository )
+ trans.install_model.context.flush()
if 'tool_dependencies' in metadata_dict and not reinstalling:
tool_dependencies = tool_dependency_util.create_tool_dependency_objects( trans.app, tool_shed_repository, relative_install_dir, set_status=True )
if 'sample_files' in metadata_dict:
@@ -402,11 +402,11 @@
tool_shed_repository,
repository_tools_tups )
if 'datatypes' in metadata_dict:
- tool_shed_repository.status = trans.model.ToolShedRepository.installation_status.LOADING_PROPRIETARY_DATATYPES
+ tool_shed_repository.status = trans.install_model.ToolShedRepository.installation_status.LOADING_PROPRIETARY_DATATYPES
if not tool_shed_repository.includes_datatypes:
tool_shed_repository.includes_datatypes = True
- trans.sa_session.add( tool_shed_repository )
- trans.sa_session.flush()
+ trans.install_model.context.add( tool_shed_repository )
+ trans.install_model.context.flush()
files_dir = relative_install_dir
if shed_config_dict.get( 'tool_path' ):
files_dir = os.path.join( shed_config_dict[ 'tool_path' ], files_dir )
@@ -494,12 +494,12 @@
tsr_ids = [ r.id for r in created_or_updated_tool_shed_repositories ]
tool_shed_repositories = []
for tsr_id in tsr_ids:
- tsr = trans.sa_session.query( trans.model.ToolShedRepository ).get( tsr_id )
+ tsr = trans.install_model.context.query( trans.install_model.ToolShedRepository ).get( tsr_id )
tool_shed_repositories.append( tsr )
clause_list = []
for tsr_id in tsr_ids:
- clause_list.append( trans.model.ToolShedRepository.table.c.id == tsr_id )
- query = trans.sa_session.query( trans.model.ToolShedRepository ).filter( or_( *clause_list ) )
+ clause_list.append( trans.install_model.ToolShedRepository.table.c.id == tsr_id )
+ query = trans.install_model.context.query( trans.install_model.ToolShedRepository ).filter( or_( *clause_list ) )
return encoded_kwd, query, tool_shed_repositories, encoded_repository_ids
def install_tool_shed_repository( trans, tool_shed_repository, repo_info_dict, tool_panel_section_key, shed_tool_conf, tool_path, install_tool_dependencies,
@@ -516,7 +516,7 @@
if isinstance( repo_info_dict, basestring ):
repo_info_dict = encoding_util.tool_shed_decode( repo_info_dict )
# Clone each repository to the configured location.
- suc.update_tool_shed_repository_status( trans.app, tool_shed_repository, trans.model.ToolShedRepository.installation_status.CLONING )
+ suc.update_tool_shed_repository_status( trans.app, tool_shed_repository, trans.install_model.ToolShedRepository.installation_status.CLONING )
repo_info_tuple = repo_info_dict[ tool_shed_repository.name ]
description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, tool_dependencies = repo_info_tuple
relative_clone_dir = suc.generate_tool_shed_repository_install_dir( repository_clone_url, tool_shed_repository.installed_changeset_revision )
@@ -543,13 +543,13 @@
tool_section=tool_section,
shed_tool_conf=shed_tool_conf,
reinstalling=reinstalling )
- trans.sa_session.refresh( tool_shed_repository )
+ trans.install_model.context.refresh( tool_shed_repository )
metadata = tool_shed_repository.metadata
if 'tools' in metadata:
# Get the tool_versions from the tool shed for each tool in the installed change set.
suc.update_tool_shed_repository_status( trans.app,
tool_shed_repository,
- trans.model.ToolShedRepository.installation_status.SETTING_TOOL_VERSIONS )
+ trans.install_model.ToolShedRepository.installation_status.SETTING_TOOL_VERSIONS )
tool_shed_url = suc.get_url_from_tool_shed( trans.app, tool_shed_repository.tool_shed )
url = suc.url_join( tool_shed_url,
'/repository/get_tool_versions?name=%s&owner=%s&changeset_revision=%s' %
@@ -568,7 +568,7 @@
# Install tool dependencies.
suc.update_tool_shed_repository_status( trans.app,
tool_shed_repository,
- trans.model.ToolShedRepository.installation_status.INSTALLING_TOOL_DEPENDENCIES )
+ trans.install_model.ToolShedRepository.installation_status.INSTALLING_TOOL_DEPENDENCIES )
# Get the tool_dependencies.xml file from the repository.
tool_dependencies_config = suc.get_config_from_disk( 'tool_dependencies.xml', install_dir )
installed_tool_dependencies = common_install_util.handle_tool_dependencies( app=trans.app,
@@ -576,12 +576,12 @@
tool_dependencies_config=tool_dependencies_config,
tool_dependencies=tool_shed_repository.tool_dependencies )
suc.remove_dir( work_dir )
- suc.update_tool_shed_repository_status( trans.app, tool_shed_repository, trans.model.ToolShedRepository.installation_status.INSTALLED )
+ suc.update_tool_shed_repository_status( trans.app, tool_shed_repository, trans.install_model.ToolShedRepository.installation_status.INSTALLED )
else:
# An error occurred while cloning the repository, so reset everything necessary to enable another attempt.
set_repository_attributes( trans,
tool_shed_repository,
- status=trans.model.ToolShedRepository.installation_status.ERROR,
+ status=trans.install_model.ToolShedRepository.installation_status.ERROR,
error_message=error_message,
deleted=False,
uninstalled=False,
@@ -751,14 +751,14 @@
metadata = repository.metadata
repair_dict = {}
- if repository.status in [ trans.model.ToolShedRepository.installation_status.DEACTIVATED ]:
+ if repository.status in [ trans.install_model.ToolShedRepository.installation_status.DEACTIVATED ]:
try:
common_install_util.activate_repository( trans, repository )
except Exception, e:
error_message = "Error activating repository %s: %s" % ( repository.name, str( e ) )
log.debug( error_message )
repair_dict [ repository.name ] = error_message
- elif repository.status not in [ trans.model.ToolShedRepository.installation_status.INSTALLED ]:
+ elif repository.status not in [ trans.install_model.ToolShedRepository.installation_status.INSTALLED ]:
shed_tool_conf, tool_path, relative_install_dir = suc.get_tool_panel_config_tool_path_install_dir( trans.app, repository )
# Reset the repository attributes to the New state for installation.
if metadata:
@@ -772,7 +772,7 @@
tool_panel_section_key = None
set_repository_attributes( trans,
repository,
- status=trans.model.ToolShedRepository.installation_status.NEW,
+ status=trans.install_model.ToolShedRepository.installation_status.NEW,
error_message=None,
deleted=False,
uninstalled=False,
@@ -785,7 +785,7 @@
tool_path,
install_tool_dependencies=True,
reinstalling=True )
- if repository.status in [ trans.model.ToolShedRepository.installation_status.ERROR ]:
+ if repository.status in [ trans.install_model.ToolShedRepository.installation_status.ERROR ]:
repair_dict = add_repair_dict_entry( repository.name, repository.error_message )
else:
# We have an installed tool shed repository, so handle tool dependencies if necessary.
@@ -793,17 +793,17 @@
work_dir = tempfile.mkdtemp( prefix="tmp-toolshed-itdep" )
# Reset missing tool dependencies.
for tool_dependency in repository.missing_tool_dependencies:
- if tool_dependency.status in [ trans.model.ToolDependency.installation_status.ERROR,
- trans.model.ToolDependency.installation_status.INSTALLING ]:
+ if tool_dependency.status in [ trans.install_model.ToolDependency.installation_status.ERROR,
+ trans.install_model.ToolDependency.installation_status.INSTALLING ]:
tool_dependency = tool_dependency_util.set_tool_dependency_attributes( trans.app,
tool_dependency=tool_dependency,
- status=trans.model.ToolDependency.installation_status.UNINSTALLED,
+ status=trans.install_model.ToolDependency.installation_status.UNINSTALLED,
error_message=None,
remove_from_disk=True )
# Install tool dependencies.
suc.update_tool_shed_repository_status( trans.app,
repository,
- trans.model.ToolShedRepository.installation_status.INSTALLING_TOOL_DEPENDENCIES )
+ trans.install_model.ToolShedRepository.installation_status.INSTALLING_TOOL_DEPENDENCIES )
# Get the tool_dependencies.xml file from the repository.
tool_dependencies_config = suc.get_config_from_disk( 'tool_dependencies.xml', repository.repo_path( trans.app ) )
installed_tool_dependencies = common_install_util.handle_tool_dependencies( app=trans.app,
@@ -811,10 +811,10 @@
tool_dependencies_config=tool_dependencies_config,
tool_dependencies=repository.tool_dependencies )
for installed_tool_dependency in installed_tool_dependencies:
- if installed_tool_dependency.status in [ trans.model.ToolDependency.installation_status.ERROR ]:
+ if installed_tool_dependency.status in [ trans.install_model.ToolDependency.installation_status.ERROR ]:
repair_dict = add_repair_dict_entry( repository.name, installed_tool_dependency.error_message )
suc.remove_dir( work_dir )
- suc.update_tool_shed_repository_status( trans.app, repository, trans.model.ToolShedRepository.installation_status.INSTALLED )
+ suc.update_tool_shed_repository_status( trans.app, repository, trans.install_model.ToolShedRepository.installation_status.INSTALLED )
return repair_dict
def set_repository_attributes( trans, repository, status, error_message, deleted, uninstalled, remove_from_disk=False ):
@@ -831,5 +831,5 @@
repository.status = status
repository.deleted = deleted
repository.uninstalled = uninstalled
- trans.sa_session.add( repository )
- trans.sa_session.flush()
+ trans.install_model.context.add( repository )
+ trans.install_model.context.flush()
diff -r c1603460642a33821ed17b6a3d53812a116c927e -r 87739d395b202170770dca1d13ff80fcc2975881 lib/tool_shed/galaxy_install/tool_dependencies/fabric_util.py
--- a/lib/tool_shed/galaxy_install/tool_dependencies/fabric_util.py
+++ b/lib/tool_shed/galaxy_install/tool_dependencies/fabric_util.py
@@ -169,20 +169,20 @@
env_file_builder.append_line( action="source", value=shell_file_path )
def handle_command( app, tool_dependency, install_dir, cmd, return_output=False ):
- sa_session = app.model.context.current
+ context = app.install_model.context
with settings( warn_only=True ):
output = local( cmd, capture=True )
log_results( cmd, output, os.path.join( install_dir, INSTALLATION_LOG ) )
if output.return_code:
- tool_dependency.status = app.model.ToolDependency.installation_status.ERROR
+ tool_dependency.status = app.install_model.ToolDependency.installation_status.ERROR
if output.stderr:
tool_dependency.error_message = unicodify( str( output.stderr )[ :32768 ] )
elif output.stdout:
tool_dependency.error_message = unicodify( str( output.stdout )[ :32768 ] )
else:
tool_dependency.error_message = "Unknown error occurred executing shell command %s, return_code: %s" % ( str( cmd ), str( output.return_code ) )
- sa_session.add( tool_dependency )
- sa_session.flush()
+ context.add( tool_dependency )
+ context.flush()
if return_output:
return output
return output.return_code
@@ -294,7 +294,6 @@
def install_and_build_package( app, tool_dependency, actions_dict ):
"""Install a Galaxy tool dependency package either via a url or a mercurial or git clone command."""
- sa_session = app.model.context.current
install_dir = actions_dict[ 'install_dir' ]
package_name = actions_dict[ 'package_name' ]
actions = actions_dict.get( 'actions', None )
diff -r c1603460642a33821ed17b6a3d53812a116c927e -r 87739d395b202170770dca1d13ff80fcc2975881 lib/tool_shed/galaxy_install/tool_dependencies/install_util.py
--- a/lib/tool_shed/galaxy_install/tool_dependencies/install_util.py
+++ b/lib/tool_shed/galaxy_install/tool_dependencies/install_util.py
@@ -82,7 +82,7 @@
name=package_name,
version=package_version,
type='package',
- status=app.model.ToolDependency.installation_status.NEVER_INSTALLED,
+ status=app.install_model.ToolDependency.installation_status.NEVER_INSTALLED,
set_status=True )
# Create an env.sh file for the tool_dependency whose first line will source the env.sh file located in
# the path defined by required_tool_dependency_env_file_path. It doesn't matter if the required env.sh
@@ -109,7 +109,7 @@
tool_dependency = \
tool_dependency_util.set_tool_dependency_attributes( app,
tool_dependency=tool_dependency,
- status=app.model.ToolDependency.installation_status.INSTALLED )
+ status=app.install_model.ToolDependency.installation_status.INSTALLED )
tool_dependencies.append( tool_dependency )
return tool_dependencies
@@ -125,13 +125,13 @@
return file_path
def get_tool_shed_repository_by_tool_shed_name_owner_changeset_revision( app, tool_shed_url, name, owner, changeset_revision ):
- sa_session = app.model.context.current
+ sa_session = app.install_model.context
tool_shed = td_common_util.clean_tool_shed_url( tool_shed_url )
- tool_shed_repository = sa_session.query( app.model.ToolShedRepository ) \
- .filter( and_( app.model.ToolShedRepository.table.c.tool_shed == tool_shed,
- app.model.ToolShedRepository.table.c.name == name,
- app.model.ToolShedRepository.table.c.owner == owner,
- app.model.ToolShedRepository.table.c.changeset_revision == changeset_revision ) ) \
+ tool_shed_repository = sa_session.query( app.install_model.ToolShedRepository ) \
+ .filter( and_( app.install_model.ToolShedRepository.table.c.tool_shed == tool_shed,
+ app.install_model.ToolShedRepository.table.c.name == name,
+ app.install_model.ToolShedRepository.table.c.owner == owner,
+ app.install_model.ToolShedRepository.table.c.changeset_revision == changeset_revision ) ) \
.first()
if tool_shed_repository:
return tool_shed_repository
@@ -141,11 +141,11 @@
if text:
changeset_revisions = listify( text )
for changeset_revision in changeset_revisions:
- tool_shed_repository = sa_session.query( app.model.ToolShedRepository ) \
- .filter( and_( app.model.ToolShedRepository.table.c.tool_shed == tool_shed,
- app.model.ToolShedRepository.table.c.name == name,
- app.model.ToolShedRepository.table.c.owner == owner,
- app.model.ToolShedRepository.table.c.changeset_revision == changeset_revision ) ) \
+ tool_shed_repository = sa_session.query( app.install_model.ToolShedRepository ) \
+ .filter( and_( app.install_model.ToolShedRepository.table.c.tool_shed == tool_shed,
+ app.install_model.ToolShedRepository.table.c.name == name,
+ app.install_model.ToolShedRepository.table.c.owner == owner,
+ app.install_model.ToolShedRepository.table.c.changeset_revision == changeset_revision ) ) \
.first()
if tool_shed_repository:
return tool_shed_repository
@@ -263,7 +263,7 @@
return handled_tool_dependencies
def install_and_build_package_via_fabric( app, tool_dependency, actions_dict ):
- sa_session = app.model.context.current
+ sa_session = app.install_model.context
try:
# There is currently only one fabric method.
tool_dependency = fabric_util.install_and_build_package( app, tool_dependency, actions_dict )
@@ -281,7 +281,7 @@
def install_package( app, elem, tool_shed_repository, tool_dependencies=None ):
# The value of tool_dependencies is a partial or full list of ToolDependency records associated with the tool_shed_repository.
- sa_session = app.model.context.current
+ sa_session = app.install_model.context
tool_dependency = None
# The value of package_name should match the value of the "package" type in the tool config's <requirements> tag set, but it's not required.
package_name = elem.get( 'name', None )
@@ -296,7 +296,7 @@
package_version,
tool_shed_repository )
for rd_tool_dependency in rd_tool_dependencies:
- if rd_tool_dependency.status == app.model.ToolDependency.installation_status.ERROR:
+ if rd_tool_dependency.status == app.install_model.ToolDependency.installation_status.ERROR:
# We'll log the error here, but continue installing packages since some may not require this dependency.
print "Error installing tool dependency for required repository: %s" % str( rd_tool_dependency.error_message )
elif package_elem.tag == 'install':
@@ -328,7 +328,7 @@
name=package_name,
version=package_version,
type='package',
- status=app.model.ToolDependency.installation_status.INSTALLING,
+ status=app.install_model.ToolDependency.installation_status.INSTALLING,
set_status=True )
# Get the information about the current platform in case the tool dependency definition includes tag sets for installing
# compiled binaries.
@@ -366,7 +366,7 @@
package_name=package_name,
actions_elem=actions_elem,
action_elem=None )
- if tool_dependency.status == app.model.ToolDependency.installation_status.INSTALLED:
+ if tool_dependency.status == app.install_model.ToolDependency.installation_status.INSTALLED:
# If an <actions> tag was found that matches the current platform, and the install_via_fabric method
# did not result in an error state, set binary_installed to True in order to skip any remaining
# platform-specific <actions> tags.
@@ -384,7 +384,7 @@
log.debug( 'Proceeding with install and compile recipe for tool dependency %s.' % str( tool_dependency.name ) )
# Make sure to reset for installation if attempt at binary installation resulted in an error.
can_install = True
- if tool_dependency.status != app.model.ToolDependency.installation_status.NEVER_INSTALLED:
+ if tool_dependency.status != app.install_model.ToolDependency.installation_status.NEVER_INSTALLED:
removed, error_message = tool_dependency_util.remove_tool_dependency( app, tool_dependency )
if not removed:
log.debug( 'Error removing old files from installation directory %s: %s' % \
@@ -401,7 +401,7 @@
# an <actions> tag, such as a set_environment entry, or a download_file or download_by_url command to
# retrieve extra data for this tool dependency. Only do this if the tool dependency is not in an error
# state, otherwise skip this action.
- if actions_elem.tag == 'action' and tool_dependency.status != app.model.ToolDependency.installation_status.ERROR:
+ if actions_elem.tag == 'action' and tool_dependency.status != app.install_model.ToolDependency.installation_status.ERROR:
tool_dependency = install_via_fabric( app,
tool_dependency,
install_dir,
@@ -418,7 +418,7 @@
package_name=package_name,
actions_elem=actions_elems,
action_elem=None )
- if tool_dependency.status != app.model.ToolDependency.installation_status.ERROR:
+ if tool_dependency.status != app.install_model.ToolDependency.installation_status.ERROR:
log.debug( 'Tool dependency %s version %s has been installed in %s.' % \
( str( package_name ), str( package_version ), str( install_dir ) ) )
else:
@@ -450,7 +450,7 @@
def install_via_fabric( app, tool_dependency, install_dir, package_name=None, proprietary_fabfile_path=None, actions_elem=None, action_elem=None, **kwd ):
"""Parse a tool_dependency.xml file's <actions> tag set to gather information for the installation via fabric."""
- sa_session = app.model.context.current
+ sa_session = app.install_model.context
if not os.path.exists( install_dir ):
os.makedirs( install_dir )
actions_dict = dict( install_dir=install_dir )
@@ -816,7 +816,7 @@
# <set_environment version="1.0">
# <repository toolshed="<tool shed>" name="<repository name>" owner="<repository owner>" changeset_revision="<changeset revision>" />
# </set_environment>
- sa_session = app.model.context.current
+ sa_session = app.install_model.context
tool_dependency = None
env_var_version = elem.get( 'version', '1.0' )
for env_var_elem in elem:
@@ -846,7 +846,7 @@
name=env_var_name,
version=None,
type='set_environment',
- status=app.model.ToolDependency.installation_status.INSTALLING,
+ status=app.install_model.ToolDependency.installation_status.INSTALLING,
set_status=True )
if env_var_version == '1.0':
# Create this tool dependency's env.sh file.
@@ -858,15 +858,15 @@
log.debug( error_message )
tool_dependency = tool_dependency_util.set_tool_dependency_attributes( app,
tool_dependency=tool_dependency,
- status=app.model.ToolDependency.installation_status.ERROR,
+ status=app.install_model.ToolDependency.installation_status.ERROR,
error_message=error_message,
remove_from_disk=False )
else:
- if tool_dependency.status not in [ app.model.ToolDependency.installation_status.ERROR,
- app.model.ToolDependency.installation_status.INSTALLED ]:
+ if tool_dependency.status not in [ app.install_model.ToolDependency.installation_status.ERROR,
+ app.install_model.ToolDependency.installation_status.INSTALLED ]:
tool_dependency = tool_dependency_util.set_tool_dependency_attributes( app,
tool_dependency=tool_dependency,
- status=app.model.ToolDependency.installation_status.INSTALLED,
+ status=app.install_model.ToolDependency.installation_status.INSTALLED,
error_message=None,
remove_from_disk=False )
log.debug( 'Environment variable %s set in %s for tool dependency %s.' % \
@@ -875,7 +875,7 @@
error_message = 'Only set_environment version 1.0 is currently supported (i.e., change your tag to be <set_environment version="1.0">).'
tool_dependency = tool_dependency_util.set_tool_dependency_attributes( app,
tool_dependency=tool_dependency,
- status=app.model.ToolDependency.installation_status.ERROR,
+ status=app.install_model.ToolDependency.installation_status.ERROR,
error_message=error_message,
remove_from_disk=False )
return tool_dependency
diff -r c1603460642a33821ed17b6a3d53812a116c927e -r 87739d395b202170770dca1d13ff80fcc2975881 lib/tool_shed/galaxy_install/update_manager.py
--- a/lib/tool_shed/galaxy_install/update_manager.py
+++ b/lib/tool_shed/galaxy_install/update_manager.py
@@ -15,7 +15,7 @@
def __init__( self, app ):
self.app = app
- self.sa_session = self.app.model.context.current
+ self.context = self.app.install_model.context
# Ideally only one Galaxy server process should be able to check for repository updates.
self.running = True
self.sleeper = Sleeper()
@@ -29,19 +29,19 @@
# Make a call to the tool shed for each installed repository to get the latest status information in the tool shed for the
# repository. This information includes items like newer installable repository revisions, current revision updates, whether
# the repository revision is the latest installable revision, and whether the repository has been deprecated in the tool shed.
- for repository in self.sa_session.query( self.app.model.ToolShedRepository ) \
- .filter( self.app.model.ToolShedRepository.table.c.deleted == False ):
+ for repository in self.context.query( self.app.install_model.ToolShedRepository ) \
+ .filter( self.app.install_model.ToolShedRepository.table.c.deleted == False ):
tool_shed_status_dict = suc.get_tool_shed_status_for_installed_repository( self.app, repository )
if tool_shed_status_dict:
if tool_shed_status_dict != repository.tool_shed_status:
repository.tool_shed_status = tool_shed_status_dict
- self.sa_session.flush()
+ self.context.flush()
else:
# The received tool_shed_status_dict is an empty dictionary, so coerce to None.
tool_shed_status_dict = None
if tool_shed_status_dict != repository.tool_shed_status:
repository.tool_shed_status = tool_shed_status_dict
- self.sa_session.flush()
+ self.context.flush()
self.sleeper.sleep( self.seconds_to_sleep )
log.info( 'Update manager restarter shutting down...' )
diff -r c1603460642a33821ed17b6a3d53812a116c927e -r 87739d395b202170770dca1d13ff80fcc2975881 lib/tool_shed/util/common_install_util.py
--- a/lib/tool_shed/util/common_install_util.py
+++ b/lib/tool_shed/util/common_install_util.py
@@ -26,7 +26,7 @@
repository_clone_url = suc.generate_clone_url_for_installed_repository( trans.app, repository )
shed_tool_conf, tool_path, relative_install_dir = suc.get_tool_panel_config_tool_path_install_dir( trans.app, repository )
repository.deleted = False
- repository.status = trans.model.ToolShedRepository.installation_status.INSTALLED
+ repository.status = trans.install_model.ToolShedRepository.installation_status.INSTALLED
if repository.includes_tools_for_display_in_tool_panel:
metadata = repository.metadata
repository_tools_tups = suc.get_repository_tools_tups( trans.app, metadata )
@@ -52,8 +52,8 @@
data_manager_relative_install_dir,
repository,
repository_tools_tups )
- trans.sa_session.add( repository )
- trans.sa_session.flush()
+ trans.install_model.context.add( repository )
+ trans.install_model.context.flush()
if repository.includes_datatypes:
if tool_path:
repository_install_dir = os.path.abspath( os.path.join( tool_path, relative_install_dir ) )
@@ -186,7 +186,7 @@
only_if_compiling_contained_td,
tsr.id,
tsr.status ]
- if tsr.status == trans.model.ToolShedRepository.installation_status.INSTALLED:
+ if tsr.status == trans.install_model.ToolShedRepository.installation_status.INSTALLED:
installed_rd_tups.append( rd_tup )
else:
# We'll only add the rd_tup to the missing_rd_tups list if the received repository has tool dependencies that are not
@@ -257,7 +257,7 @@
only_if_compiling_contained_td,
repository.id,
repository.status ]
- if repository.status == trans.model.ToolShedRepository.installation_status.INSTALLED:
+ if repository.status == trans.install_model.ToolShedRepository.installation_status.INSTALLED:
if new_rd_tup not in installed_rd_tups:
installed_rd_tups.append( new_rd_tup )
else:
@@ -299,7 +299,7 @@
if tool_dependencies_dict:
for td_key, val in tool_dependencies_dict.items():
# Default the status to NEVER_INSTALLED.
- tool_dependency_status = trans.model.ToolDependency.installation_status.NEVER_INSTALLED
+ tool_dependency_status = trans.install_model.ToolDependency.installation_status.NEVER_INSTALLED
# Set environment tool dependencies are a list.
if td_key == 'set_environment':
new_val = []
@@ -313,7 +313,7 @@
tool_dependency_status = tool_dependency.status
requirement_dict[ 'status' ] = tool_dependency_status
new_val.append( requirement_dict )
- if tool_dependency_status in [ trans.model.ToolDependency.installation_status.INSTALLED ]:
+ if tool_dependency_status in [ trans.install_model.ToolDependency.installation_status.INSTALLED ]:
installed_tool_dependencies[ td_key ] = new_val
else:
missing_tool_dependencies[ td_key ] = new_val
@@ -327,7 +327,7 @@
if tool_dependency:
tool_dependency_status = tool_dependency.status
val[ 'status' ] = tool_dependency_status
- if tool_dependency_status in [ trans.model.ToolDependency.installation_status.INSTALLED ]:
+ if tool_dependency_status in [ trans.install_model.ToolDependency.installation_status.INSTALLED ]:
installed_tool_dependencies[ td_key ] = val
else:
missing_tool_dependencies[ td_key ] = val
@@ -435,7 +435,7 @@
# that should be installed. This allows for filtering out dependencies that have not been checked for installation on the 'Manage tool
# dependencies' page for an installed tool shed repository.
attr_tups_of_dependencies_for_install = [ ( td.name, td.version, td.type ) for td in tool_dependencies ]
- sa_session = app.model.context.current
+ context = app.install_model.context
installed_tool_dependencies = []
# Parse the tool_dependencies.xml config.
tree, error_message = xml_util.parse_xml( tool_dependencies_config )
@@ -471,7 +471,7 @@
tool_dependency = \
tool_dependency_util.set_tool_dependency_attributes( app,
tool_dependency=tool_dependency,
- status=app.model.ToolDependency.installation_status.ERROR,
+ status=app.install_model.ToolDependency.installation_status.ERROR,
error_message=None,
remove_from_disk=False )
else:
@@ -486,8 +486,8 @@
tool_dependency,
error_message,
remove_installation_path=False )
- if tool_dependency and tool_dependency.status in [ app.model.ToolDependency.installation_status.INSTALLED,
- app.model.ToolDependency.installation_status.ERROR ]:
+ if tool_dependency and tool_dependency.status in [ app.install_model.ToolDependency.installation_status.INSTALLED,
+ app.install_model.ToolDependency.installation_status.ERROR ]:
installed_tool_dependencies.append( tool_dependency )
elif elem.tag == 'set_environment':
# <set_environment version="1.0">
@@ -505,8 +505,8 @@
tool_dependency,
error_message,
remove_installation_path=False )
- if tool_dependency and tool_dependency.status in [ app.model.ToolDependency.installation_status.INSTALLED,
- app.model.ToolDependency.installation_status.ERROR ]:
+ if tool_dependency and tool_dependency.status in [ app.install_model.ToolDependency.installation_status.INSTALLED,
+ app.install_model.ToolDependency.installation_status.ERROR ]:
installed_tool_dependencies.append( tool_dependency )
return installed_tool_dependencies
diff -r c1603460642a33821ed17b6a3d53812a116c927e -r 87739d395b202170770dca1d13ff80fcc2975881 lib/tool_shed/util/metadata_util.py
--- a/lib/tool_shed/util/metadata_util.py
+++ b/lib/tool_shed/util/metadata_util.py
@@ -1487,8 +1487,8 @@
invalid_tools = metadata.get( 'invalid_tools', None )
# Handle README files.
if repository.has_readme_files:
- if reinstalling or repository.status not in [ trans.model.ToolShedRepository.installation_status.DEACTIVATED,
- trans.model.ToolShedRepository.installation_status.INSTALLED ]:
+ if reinstalling or repository.status not in [ trans.install_model.ToolShedRepository.installation_status.DEACTIVATED,
+ trans.install_model.ToolShedRepository.installation_status.INSTALLED ]:
# Since we're reinstalling, we need to send a request to the tool shed to get the README files.
url = suc.url_join( tool_shed_url,
'repository/get_readme_files?name=%s&owner=%s&changeset_revision=%s' % \
@@ -1582,8 +1582,8 @@
repository.metadata = metadata_dict
if metadata_dict != original_metadata_dict:
suc.update_in_shed_tool_config( trans.app, repository )
- trans.sa_session.add( repository )
- trans.sa_session.flush()
+ trans.install_model.context.add( repository )
+ trans.install_model.context.flush()
log.debug( 'Metadata has been reset on repository %s.' % repository.name )
else:
log.debug( 'Metadata did not need to be reset on repository %s.' % repository.name )
@@ -1916,7 +1916,7 @@
dependency_install_dir = tool_dependency.installation_directory( app )
removed_from_disk, error_message = tool_dependency_util.remove_tool_dependency_installation_directory( dependency_install_dir )
if removed_from_disk:
- sa_session = app.model.context.current
+ context = app.install_model.context
new_dependency_name = None
new_dependency_type = None
new_dependency_version = None
@@ -1933,17 +1933,17 @@
( str( tool_dependency.name ), str( tool_dependency.type ), str( tool_dependency.version ), str( new_dependency_type ), str( new_dependency_version ) ) )
tool_dependency.type = new_dependency_type
tool_dependency.version = new_dependency_version
- tool_dependency.status = app.model.ToolDependency.installation_status.UNINSTALLED
+ tool_dependency.status = app.install_model.ToolDependency.installation_status.UNINSTALLED
tool_dependency.error_message = None
- sa_session.add( tool_dependency )
- sa_session.flush()
+ context.add( tool_dependency )
+ context.flush()
new_tool_dependency = tool_dependency
else:
# We have no new tool dependency definition based on a matching dependency name, so remove the existing tool dependency record from the database.
log.debug( "Deleting tool dependency with name '%s', type '%s' and version '%s' from the database since it is no longer defined." % \
( str( tool_dependency.name ), str( tool_dependency.type ), str( tool_dependency.version ) ) )
- sa_session.delete( tool_dependency )
- sa_session.flush()
+ context.delete( tool_dependency )
+ context.flush()
return new_tool_dependency
def update_repository_dependencies_metadata( metadata, repository_dependency_tups, is_valid, description ):
diff -r c1603460642a33821ed17b6a3d53812a116c927e -r 87739d395b202170770dca1d13ff80fcc2975881 lib/tool_shed/util/repository_dependency_util.py
--- a/lib/tool_shed/util/repository_dependency_util.py
+++ b/lib/tool_shed/util/repository_dependency_util.py
@@ -73,14 +73,14 @@
# Make sure required_repository is in the repository_dependency table.
repository_dependency = get_repository_dependency_by_repository_id( trans, required_repository.id )
if not repository_dependency:
- repository_dependency = trans.model.RepositoryDependency( tool_shed_repository_id=required_repository.id )
- trans.sa_session.add( repository_dependency )
- trans.sa_session.flush()
+ repository_dependency = trans.install_model.RepositoryDependency( tool_shed_repository_id=required_repository.id )
+ trans.install_model.context.add( repository_dependency )
+ trans.install_model.context.flush()
# Build the relationship between the d_repository and the required_repository.
- rrda = trans.model.RepositoryRepositoryDependencyAssociation( tool_shed_repository_id=d_repository.id,
+ rrda = trans.install_model.RepositoryRepositoryDependencyAssociation( tool_shed_repository_id=d_repository.id,
repository_dependency_id=repository_dependency.id )
- trans.sa_session.add( rrda )
- trans.sa_session.flush()
+ trans.install_model.context.add( rrda )
+ trans.install_model.context.flush()
def can_add_to_key_rd_dicts( key_rd_dict, key_rd_dicts ):
"""Handle the case where an update to the changeset revision was done."""
@@ -134,31 +134,31 @@
repository_db_record, installed_changeset_revision = \
suc.repository_was_previously_installed( trans, tool_shed_url, name, repo_info_tuple )
if repository_db_record:
- if repository_db_record.status in [ trans.model.ToolShedRepository.installation_status.INSTALLED,
- trans.model.ToolShedRepository.installation_status.CLONING,
- trans.model.ToolShedRepository.installation_status.SETTING_TOOL_VERSIONS,
- trans.model.ToolShedRepository.installation_status.INSTALLING_REPOSITORY_DEPENDENCIES,
- trans.model.ToolShedRepository.installation_status.INSTALLING_TOOL_DEPENDENCIES,
- trans.model.ToolShedRepository.installation_status.LOADING_PROPRIETARY_DATATYPES ]:
+ if repository_db_record.status in [ trans.install_model.ToolShedRepository.installation_status.INSTALLED,
+ trans.install_model.ToolShedRepository.installation_status.CLONING,
+ trans.install_model.ToolShedRepository.installation_status.SETTING_TOOL_VERSIONS,
+ trans.install_model.ToolShedRepository.installation_status.INSTALLING_REPOSITORY_DEPENDENCIES,
+ trans.install_model.ToolShedRepository.installation_status.INSTALLING_TOOL_DEPENDENCIES,
+ trans.install_model.ToolShedRepository.installation_status.LOADING_PROPRIETARY_DATATYPES ]:
log.debug( "Skipping installation of tool_shed_repository '%s' because it's installation status is '%s'." % \
( str( repository_db_record.name ), str( repository_db_record.status ) ) )
else:
- if repository_db_record.status in [ trans.model.ToolShedRepository.installation_status.ERROR,
- trans.model.ToolShedRepository.installation_status.NEW,
- trans.model.ToolShedRepository.installation_status.UNINSTALLED ]:
+ if repository_db_record.status in [ trans.install_model.ToolShedRepository.installation_status.ERROR,
+ trans.install_model.ToolShedRepository.installation_status.NEW,
+ trans.install_model.ToolShedRepository.installation_status.UNINSTALLED ]:
# The current tool shed repository is not currently installed, so we can update it's record in the database.
name = repository_db_record.name
installed_changeset_revision = repository_db_record.installed_changeset_revision
metadata_dict = repository_db_record.metadata
dist_to_shed = repository_db_record.dist_to_shed
can_update_db_record = True
- elif repository_db_record.status in [ trans.model.ToolShedRepository.installation_status.DEACTIVATED ]:
+ elif repository_db_record.status in [ trans.install_model.ToolShedRepository.installation_status.DEACTIVATED ]:
# The current tool shed repository is deactivated, so updating it's database record is not necessary - just activate it.
log.debug( "Reactivating deactivated tool_shed_repository '%s'." % str( repository_db_record.name ) )
common_install_util.activate_repository( trans, repository_db_record )
# No additional updates to the database record are necessary.
can_update_db_record = False
- elif repository_db_record.status not in [ trans.model.ToolShedRepository.installation_status.NEW ]:
+ elif repository_db_record.status not in [ trans.install_model.ToolShedRepository.installation_status.NEW ]:
# Set changeset_revision here so suc.create_or_update_tool_shed_repository will find the previously installed
# and uninstalled repository instead of creating a new record.
changeset_revision = repository_db_record.installed_changeset_revision
@@ -192,7 +192,7 @@
ctx_rev=ctx_rev,
repository_clone_url=repository_clone_url,
metadata_dict={},
- status=trans.model.ToolShedRepository.installation_status.NEW,
+ status=trans.install_model.ToolShedRepository.installation_status.NEW,
current_changeset_revision=changeset_revision,
owner=repository_owner,
dist_to_shed=False )
@@ -727,8 +727,8 @@
only_if_compiling_contained_td )
def get_repository_dependency_by_repository_id( trans, decoded_repository_id ):
- return trans.sa_session.query( trans.model.RepositoryDependency ) \
- .filter( trans.model.RepositoryDependency.table.c.tool_shed_repository_id == decoded_repository_id ) \
+ return trans.install_model.context.query( trans.install_model.RepositoryDependency ) \
+ .filter( trans.install_model.RepositoryDependency.table.c.tool_shed_repository_id == decoded_repository_id ) \
.first()
def update_circular_repository_dependencies( repository_key, repository_dependency, repository_dependencies, circular_repository_dependencies ):
diff -r c1603460642a33821ed17b6a3d53812a116c927e -r 87739d395b202170770dca1d13ff80fcc2975881 lib/tool_shed/util/shed_util_common.py
--- a/lib/tool_shed/util/shed_util_common.py
+++ b/lib/tool_shed/util/shed_util_common.py
@@ -122,12 +122,12 @@
tool_dependencies_select_field = SelectField( name=name, multiple=multiple, display=display )
for tool_dependency in tool_shed_repository.tool_dependencies:
if uninstalled_only:
- if tool_dependency.status not in [ trans.model.ToolDependency.installation_status.NEVER_INSTALLED,
- trans.model.ToolDependency.installation_status.UNINSTALLED ]:
+ if tool_dependency.status not in [ trans.install_model.ToolDependency.installation_status.NEVER_INSTALLED,
+ trans.install_model.ToolDependency.installation_status.UNINSTALLED ]:
continue
else:
- if tool_dependency.status in [ trans.model.ToolDependency.installation_status.NEVER_INSTALLED,
- trans.model.ToolDependency.installation_status.UNINSTALLED ]:
+ if tool_dependency.status in [ trans.install_model.ToolDependency.installation_status.NEVER_INSTALLED,
+ trans.install_model.ToolDependency.installation_status.UNINSTALLED ]:
continue
option_label = '%s version %s' % ( str( tool_dependency.name ), str( tool_dependency.version ) )
option_value = trans.security.encode_id( tool_dependency.id )
@@ -157,8 +157,8 @@
ok = True
if tool_shed_status_dict != repository.tool_shed_status:
repository.tool_shed_status = tool_shed_status_dict
- trans.sa_session.add( repository )
- trans.sa_session.flush()
+ trans.install_model.context.add( repository )
+ trans.install_model.context.flush()
updated = True
else:
ok = False
@@ -240,15 +240,15 @@
# was later uninstalled, this value should be received as the value of that change set to which the repository had been updated just prior
# to it being uninstalled.
current_changeset_revision = installed_changeset_revision
- sa_session = app.model.context.current
+ context = app.install_model.context
tool_shed = get_tool_shed_from_clone_url( repository_clone_url )
if not owner:
owner = get_repository_owner_from_clone_url( repository_clone_url )
includes_datatypes = 'datatypes' in metadata_dict
- if status in [ app.model.ToolShedRepository.installation_status.DEACTIVATED ]:
+ if status in [ app.install_model.ToolShedRepository.installation_status.DEACTIVATED ]:
deleted = True
uninstalled = False
- elif status in [ app.model.ToolShedRepository.installation_status.UNINSTALLED ]:
+ elif status in [ app.install_model.ToolShedRepository.installation_status.UNINSTALLED ]:
deleted = True
uninstalled = True
else:
@@ -271,7 +271,7 @@
tool_shed_repository.status = status
else:
log.debug( "Adding new row for repository '%s' in the tool_shed_repository table, status set to '%s'." % ( str( name ), str( status ) ) )
- tool_shed_repository = app.model.ToolShedRepository( tool_shed=tool_shed,
+ tool_shed_repository = app.install_model.ToolShedRepository( tool_shed=tool_shed,
name=name,
description=description,
owner=owner,
@@ -284,8 +284,8 @@
deleted=deleted,
uninstalled=uninstalled,
status=status )
- sa_session.add( tool_shed_repository )
- sa_session.flush()
+ context.add( tool_shed_repository )
+ context.flush()
return tool_shed_repository
def extract_components_from_tuple( repository_components_tuple ):
@@ -596,17 +596,17 @@
def get_ids_of_tool_shed_repositories_being_installed( trans, as_string=False ):
installing_repository_ids = []
- new_status = trans.model.ToolShedRepository.installation_status.NEW
- cloning_status = trans.model.ToolShedRepository.installation_status.CLONING
- setting_tool_versions_status = trans.model.ToolShedRepository.installation_status.SETTING_TOOL_VERSIONS
- installing_dependencies_status = trans.model.ToolShedRepository.installation_status.INSTALLING_TOOL_DEPENDENCIES
- loading_datatypes_status = trans.model.ToolShedRepository.installation_status.LOADING_PROPRIETARY_DATATYPES
- for tool_shed_repository in trans.sa_session.query( trans.model.ToolShedRepository ) \
- .filter( or_( trans.model.ToolShedRepository.status == new_status,
- trans.model.ToolShedRepository.status == cloning_status,
- trans.model.ToolShedRepository.status == setting_tool_versions_status,
- trans.model.ToolShedRepository.status == installing_dependencies_status,
- trans.model.ToolShedRepository.status == loading_datatypes_status ) ):
+ new_status = trans.install_model.ToolShedRepository.installation_status.NEW
+ cloning_status = trans.install_model.ToolShedRepository.installation_status.CLONING
+ setting_tool_versions_status = trans.install_model.ToolShedRepository.installation_status.SETTING_TOOL_VERSIONS
+ installing_dependencies_status = trans.install_model.ToolShedRepository.installation_status.INSTALLING_TOOL_DEPENDENCIES
+ loading_datatypes_status = trans.install_model.ToolShedRepository.installation_status.LOADING_PROPRIETARY_DATATYPES
+ for tool_shed_repository in trans.install_model.context.query( trans.install_model.ToolShedRepository ) \
+ .filter( or_( trans.install_model.ToolShedRepository.status == new_status,
+ trans.install_model.ToolShedRepository.status == cloning_status,
+ trans.install_model.ToolShedRepository.status == setting_tool_versions_status,
+ trans.install_model.ToolShedRepository.status == installing_dependencies_status,
+ trans.install_model.ToolShedRepository.status == loading_datatypes_status ) ):
installing_repository_ids.append( trans.security.encode_id( tool_shed_repository.id ) )
if as_string:
return ','.join( installing_repository_ids )
@@ -614,7 +614,7 @@
def get_installed_tool_shed_repository( trans, id ):
"""Get a tool shed repository record from the Galaxy database defined by the id."""
- return trans.sa_session.query( trans.model.ToolShedRepository ).get( trans.security.decode_id( id ) )
+ return trans.install_model.context.query( trans.install_model.ToolShedRepository ).get( trans.security.decode_id( id ) )
def get_latest_changeset_revision( trans, repository, repo ):
repository_tip = repository.tip( trans.app )
@@ -740,7 +740,7 @@
ctx_rev=ctx_rev,
repository_clone_url=repository_clone_url,
metadata_dict={},
- status=trans.model.ToolShedRepository.installation_status.NEW,
+ status=trans.install_model.ToolShedRepository.installation_status.NEW,
current_changeset_revision=None,
owner=owner,
dist_to_shed=False )
@@ -858,13 +858,13 @@
else:
# We're in Galaxy.
if order:
- return trans.sa_session.query( trans.model.ToolShedRepository ) \
- .filter( trans.model.ToolShedRepository.table.c.uninstalled == False ) \
- .order_by( trans.model.ToolShedRepository.table.c.name,
- trans.model.ToolShedRepository.table.c.owner )
+ return trans.install_model.context.query( trans.install_model.ToolShedRepository ) \
+ .filter( trans.install_model.ToolShedRepository.table.c.uninstalled == False ) \
+ .order_by( trans.install_model.ToolShedRepository.table.c.name,
+ trans.install_model.ToolShedRepository.table.c.owner )
else:
- return trans.sa_session.query( trans.model.ToolShedRepository ) \
- .filter( trans.model.ToolShedRepository.table.c.uninstalled == False )
+ return trans.install_model.context.query( trans.install_model.ToolShedRepository ) \
+ .filter( trans.install_model.ToolShedRepository.table.c.uninstalled == False )
def get_repo_info_tuple_contents( repo_info_tuple ):
"""Take care in handling the repo_info_tuple as it evolves over time as new tool shed features are introduced."""
@@ -892,30 +892,30 @@
def get_repository_by_id( trans, id ):
"""Get a repository from the database via id."""
if trans.webapp.name == 'galaxy':
- return trans.sa_session.query( trans.model.ToolShedRepository ).get( trans.security.decode_id( id ) )
+ return trans.install_model.context.query( trans.install_model.ToolShedRepository ).get( trans.security.decode_id( id ) )
else:
return trans.sa_session.query( trans.model.Repository ).get( trans.security.decode_id( id ) )
def get_repository_by_name( app, name ):
"""Get a repository from the database via name."""
- sa_session = app.model.context.current
+ repository_query = __repository_query( app )
if app.name == 'galaxy':
- return sa_session.query( app.model.ToolShedRepository ).filter_by( name=name ).first()
+ return repository_query.filter_by( name=name ).first()
else:
- return sa_session.query( app.model.Repository ).filter_by( name=name ).first()
+ return repository_query.filter_by( name=name ).first()
def get_repository_by_name_and_owner( app, name, owner ):
"""Get a repository from the database via name and owner"""
- sa_session = app.model.context.current
+ repository_query = __repository_query( app )
if app.name == 'galaxy':
- return sa_session.query( app.model.ToolShedRepository ) \
- .filter( and_( app.model.ToolShedRepository.table.c.name == name,
- app.model.ToolShedRepository.table.c.owner == owner ) ) \
+ return repository_query \
+ .filter( and_( app.install_model.ToolShedRepository.table.c.name == name,
+ app.install_model.ToolShedRepository.table.c.owner == owner ) ) \
.first()
# We're in the tool shed.
user = get_user_by_username( app, owner )
if user:
- return sa_session.query( app.model.Repository ) \
+ return repository_query \
.filter( and_( app.model.Repository.table.c.name == name,
app.model.Repository.table.c.user_id == user.id ) ) \
.first()
@@ -1218,36 +1218,36 @@
def get_tool_shed_repository_by_id( trans, repository_id ):
"""Return a tool shed repository database record defined by the id."""
# This method is used only in Galaxy, not the tool shed.
- return trans.sa_session.query( trans.model.ToolShedRepository ) \
- .filter( trans.model.ToolShedRepository.table.c.id == trans.security.decode_id( repository_id ) ) \
+ return trans.install_model.context.query( trans.install_model.ToolShedRepository ) \
+ .filter( trans.install_model.ToolShedRepository.table.c.id == trans.security.decode_id( repository_id ) ) \
.first()
def get_tool_shed_repository_by_shed_name_owner_changeset_revision( app, tool_shed, name, owner, changeset_revision ):
"""Return a tool shed repository database record defined by the combination of a tool_shed, repository name, repository owner and current changeet_revision."""
# This method is used only in Galaxy, not the tool shed.
- sa_session = app.model.context.current
+ repository_query = __repository_query( app )
if tool_shed.find( '//' ) > 0:
tool_shed = tool_shed.split( '//' )[1]
tool_shed = tool_shed.rstrip( '/' )
- return sa_session.query( app.model.ToolShedRepository ) \
- .filter( and_( app.model.ToolShedRepository.table.c.tool_shed == tool_shed,
- app.model.ToolShedRepository.table.c.name == name,
- app.model.ToolShedRepository.table.c.owner == owner,
- app.model.ToolShedRepository.table.c.changeset_revision == changeset_revision ) ) \
+ return repository_query \
+ .filter( and_( app.install_model.ToolShedRepository.table.c.tool_shed == tool_shed,
+ app.install_model.ToolShedRepository.table.c.name == name,
+ app.install_model.ToolShedRepository.table.c.owner == owner,
+ app.install_model.ToolShedRepository.table.c.changeset_revision == changeset_revision ) ) \
.first()
def get_tool_shed_repository_by_shed_name_owner_installed_changeset_revision( app, tool_shed, name, owner, installed_changeset_revision ):
"""Return a tool shed repository database record defined by the combination of a tool_shed, repository name, repository owner and installed_changeet_revision."""
# This method is used only in Galaxy, not the tool shed.
- sa_session = app.model.context.current
+ repository_query = __repository_query( app )
if tool_shed.find( '//' ) > 0:
tool_shed = tool_shed.split( '//' )[1]
tool_shed = tool_shed.rstrip( '/' )
- return sa_session.query( app.model.ToolShedRepository ) \
- .filter( and_( app.model.ToolShedRepository.table.c.tool_shed == tool_shed,
- app.model.ToolShedRepository.table.c.name == name,
- app.model.ToolShedRepository.table.c.owner == owner,
- app.model.ToolShedRepository.table.c.installed_changeset_revision == installed_changeset_revision ) ) \
+ return repository_query \
+ .filter( and_( app.install_model.ToolShedRepository.table.c.tool_shed == tool_shed,
+ app.install_model.ToolShedRepository.table.c.name == name,
+ app.install_model.ToolShedRepository.table.c.owner == owner,
+ app.install_model.ToolShedRepository.table.c.installed_changeset_revision == installed_changeset_revision ) ) \
.first()
def get_tool_shed_status_for_installed_repository( app, repository ):
@@ -1295,38 +1295,38 @@
changeset_revision )
if tool_shed_repository:
status_label = tool_shed_repository.status
- if tool_shed_repository.status in [ trans.model.ToolShedRepository.installation_status.CLONING,
- trans.model.ToolShedRepository.installation_status.SETTING_TOOL_VERSIONS,
- trans.model.ToolShedRepository.installation_status.INSTALLING_REPOSITORY_DEPENDENCIES,
- trans.model.ToolShedRepository.installation_status.INSTALLING_TOOL_DEPENDENCIES,
- trans.model.ToolShedRepository.installation_status.LOADING_PROPRIETARY_DATATYPES ]:
- bgcolor = trans.model.ToolShedRepository.states.INSTALLING
- elif tool_shed_repository.status in [ trans.model.ToolShedRepository.installation_status.NEW,
- trans.model.ToolShedRepository.installation_status.UNINSTALLED ]:
- bgcolor = trans.model.ToolShedRepository.states.UNINSTALLED
- elif tool_shed_repository.status in [ trans.model.ToolShedRepository.installation_status.ERROR ]:
- bgcolor = trans.model.ToolShedRepository.states.ERROR
- elif tool_shed_repository.status in [ trans.model.ToolShedRepository.installation_status.DEACTIVATED ]:
- bgcolor = trans.model.ToolShedRepository.states.WARNING
- elif tool_shed_repository.status in [ trans.model.ToolShedRepository.installation_status.INSTALLED ]:
+ if tool_shed_repository.status in [ trans.install_model.ToolShedRepository.installation_status.CLONING,
+ trans.install_model.ToolShedRepository.installation_status.SETTING_TOOL_VERSIONS,
+ trans.install_model.ToolShedRepository.installation_status.INSTALLING_REPOSITORY_DEPENDENCIES,
+ trans.install_model.ToolShedRepository.installation_status.INSTALLING_TOOL_DEPENDENCIES,
+ trans.install_model.ToolShedRepository.installation_status.LOADING_PROPRIETARY_DATATYPES ]:
+ bgcolor = trans.install_model.ToolShedRepository.states.INSTALLING
+ elif tool_shed_repository.status in [ trans.install_model.ToolShedRepository.installation_status.NEW,
+ trans.install_model.ToolShedRepository.installation_status.UNINSTALLED ]:
+ bgcolor = trans.install_model.ToolShedRepository.states.UNINSTALLED
+ elif tool_shed_repository.status in [ trans.install_model.ToolShedRepository.installation_status.ERROR ]:
+ bgcolor = trans.install_model.ToolShedRepository.states.ERROR
+ elif tool_shed_repository.status in [ trans.install_model.ToolShedRepository.installation_status.DEACTIVATED ]:
+ bgcolor = trans.install_model.ToolShedRepository.states.WARNING
+ elif tool_shed_repository.status in [ trans.install_model.ToolShedRepository.installation_status.INSTALLED ]:
if tool_shed_repository.repository_dependencies_being_installed:
- bgcolor = trans.model.ToolShedRepository.states.WARNING
- status_label = '%s, %s' % ( status_label, trans.model.ToolShedRepository.installation_status.INSTALLING_REPOSITORY_DEPENDENCIES )
+ bgcolor = trans.install_model.ToolShedRepository.states.WARNING
+ status_label = '%s, %s' % ( status_label, trans.install_model.ToolShedRepository.installation_status.INSTALLING_REPOSITORY_DEPENDENCIES )
elif tool_shed_repository.missing_repository_dependencies:
- bgcolor = trans.model.ToolShedRepository.states.WARNING
+ bgcolor = trans.install_model.ToolShedRepository.states.WARNING
status_label = '%s, missing repository dependencies' % status_label
elif tool_shed_repository.tool_dependencies_being_installed:
- bgcolor = trans.model.ToolShedRepository.states.WARNING
- status_label = '%s, %s' % (status_label, trans.model.ToolShedRepository.installation_status.INSTALLING_TOOL_DEPENDENCIES )
+ bgcolor = trans.install_model.ToolShedRepository.states.WARNING
+ status_label = '%s, %s' % (status_label, trans.install_model.ToolShedRepository.installation_status.INSTALLING_TOOL_DEPENDENCIES )
elif tool_shed_repository.missing_tool_dependencies:
- bgcolor = trans.model.ToolShedRepository.states.WARNING
+ bgcolor = trans.install_model.ToolShedRepository.states.WARNING
status_label = '%s, missing tool dependencies' % status_label
else:
- bgcolor = trans.model.ToolShedRepository.states.OK
+ bgcolor = trans.install_model.ToolShedRepository.states.OK
else:
- bgcolor = trans.model.ToolShedRepository.states.ERROR
+ bgcolor = trans.install_model.ToolShedRepository.states.ERROR
else:
- bgcolor = trans.model.ToolShedRepository.states.WARNING
+ bgcolor = trans.install_model.ToolShedRepository.states.WARNING
status_label = '%s, unknown status' % status_label
return '<div class="count-box state-color-%s">%s</div>' % ( bgcolor, status_label )
@@ -1589,10 +1589,10 @@
repository.deleted = False
repository.tool_shed_status = None
repository.uninstalled = False
- repository.status = trans.model.ToolShedRepository.installation_status.NEW
+ repository.status = trans.install_model.ToolShedRepository.installation_status.NEW
repository.error_message = None
- trans.sa_session.add( repository )
- trans.sa_session.flush()
+ trans.install_model.context.add( repository )
+ trans.install_model.context.flush()
def reversed_lower_upper_bounded_changelog( repo, excluded_lower_bounds_changeset_revision, included_upper_bounds_changeset_revision ):
"""
@@ -1771,12 +1771,12 @@
def update_tool_shed_repository_status( app, tool_shed_repository, status, error_message=None ):
"""Update the status of a tool shed repository in the process of being installed into Galaxy."""
- sa_session = app.model.context.current
+ context = app.install_model.context
tool_shed_repository.status = status
if error_message:
tool_shed_repository.error_message = str( error_message )
- sa_session.add( tool_shed_repository )
- sa_session.flush()
+ context.add( tool_shed_repository )
+ context.flush()
def url_join( *args ):
"""Return a valid URL produced by appending a base URL and a set of request parameters."""
@@ -1784,3 +1784,10 @@
for arg in args:
parts.append( arg.strip( '/' ) )
return '/'.join( parts )
+
+def __repository_query( app ):
+ if app.name == "galaxy":
+ query = app.install_model.context.query( app.install_model.ToolShedRepository )
+ else:
+ query = app.model.context.query( app.model.Repository )
+ return query
This diff is so big that we needed to truncate the remainder.
https://bitbucket.org/galaxy/galaxy-central/commits/bdba92c000c5/
Changeset: bdba92c000c5
User: jmchilton
Date: 2013-12-05 07:20:54
Summary: Models: Allow core Galaxy models and install tool dependencies to target different databases.
Now app.model and app.install_model may be two distinct objects not just two references to the same object. Add ModelMapping class for galaxy.model.tool_shed_install.
If install_database_connection is set in Galaxy configuration, a separate database will be targetted for the persistence of the tool shed repository information. Additional database options may likewise be specified in this case: install_database_engine_option_pool_size, etc....
Affected #: 6 files
diff -r 87739d395b202170770dca1d13ff80fcc2975881 -r bdba92c000c58a6d41af045c3283466131fb1e27 lib/galaxy/app.py
--- a/lib/galaxy/app.py
+++ b/lib/galaxy/app.py
@@ -43,6 +43,10 @@
db_url = self.config.database_connection
else:
db_url = "sqlite:///%s?isolation_level=IMMEDIATE" % self.config.database
+ install_db_url = self.config.install_database_connection
+ # TODO: Consider more aggressive check here that this is not the same
+ # database file under the hood.
+ combined_install_database = not( install_db_url and install_db_url != db_url )
# Set up the tool sheds registry
if os.path.isfile( self.config.tool_sheds_config ):
self.tool_shed_registry = tool_shed.tool_shed_registry.Registry( self.config.root, self.config.tool_sheds_config )
@@ -52,6 +56,10 @@
# is a new installation, we'll restrict the tool migration messaging.
from galaxy.model.migrate.check import create_or_verify_database
create_or_verify_database( db_url, kwargs.get( 'global_conf', {} ).get( '__file__', None ), self.config.database_engine_options, app=self )
+ if not combined_install_database:
+ from galaxy.model.tool_shed_install.migrate.check import create_or_verify_database as tsi_create_or_verify_database
+ tsi_create_or_verify_database( install_db_url, self.config.install_database_engine_options, app=self )
+
# Alert the Galaxy admin to tools that have been moved from the distribution to the tool shed.
from tool_shed.galaxy_install.migrate.check import verify_tools
verify_tools( self, db_url, kwargs.get( 'global_conf', {} ).get( '__file__', None ), self.config.database_engine_options )
@@ -62,14 +70,20 @@
self.model = mapping.init( self.config.file_path,
db_url,
self.config.database_engine_options,
+ map_install_models=combined_install_database,
database_query_profiling_proxy = self.config.database_query_profiling_proxy,
object_store = self.object_store,
trace_logger=self.trace_logger,
use_pbkdf2=self.config.get_bool( 'use_pbkdf2', True ) )
- # Want tool_shed_install models accessed through new attribute
- # (install_model). This is the same object for now, but should ultimately
- # be allowed to be a separate ModelMapping instances.
- self.install_model = self.model
+
+ if combined_install_database:
+ self.install_model = self.model
+ else:
+ from galaxy.model.tool_shed_install import mapping as install_mapping
+ install_db_url = self.config.install_database_connection
+ install_db_engine_options = self.config.install_database_engine_options
+ self.install_model = install_mapping.init( install_db_url,
+ install_db_engine_options )
# Manage installed tool shed repositories.
self.installed_repository_manager = tool_shed.galaxy_install.InstalledRepositoryManager( self )
# Create an empty datatypes registry.
diff -r 87739d395b202170770dca1d13ff80fcc2975881 -r bdba92c000c58a6d41af045c3283466131fb1e27 lib/galaxy/config.py
--- a/lib/galaxy/config.py
+++ b/lib/galaxy/config.py
@@ -33,15 +33,22 @@
self.umask = os.umask( 077 ) # get the current umask
os.umask( self.umask ) # can't get w/o set, so set it back
self.gid = os.getgid() # if running under newgrp(1) we'll need to fix the group of data created on the cluster
+
# Database related configuration
self.database = resolve_path( kwargs.get( "database_file", "database/universe.sqlite" ), self.root )
self.database_connection = kwargs.get( "database_connection", False )
self.database_engine_options = get_database_engine_options( kwargs )
self.database_create_tables = string_as_bool( kwargs.get( "database_create_tables", "True" ) )
self.database_query_profiling_proxy = string_as_bool( kwargs.get( "database_query_profiling_proxy", "False" ) )
+
# Don't set this to true for production databases, but probably should
# default to True for sqlite databases.
self.database_auto_migrate = string_as_bool( kwargs.get( "database_auto_migrate", "False" ) )
+
+ # Install database related configuration (if different).
+ self.install_database_connection = kwargs.get( "install_database_connection", None )
+ self.install_database_engine_options = get_database_engine_options( kwargs, model_prefix="install_" )
+
# Where dataset files are stored
self.file_path = resolve_path( kwargs.get( "file_path", "database/files" ), self.root )
self.new_file_path = resolve_path( kwargs.get( "new_file_path", "database/tmp" ), self.root )
@@ -439,7 +446,7 @@
admin_users = [ x.strip() for x in self.get( "admin_users", "" ).split( "," ) ]
return ( user is not None and user.email in admin_users )
-def get_database_engine_options( kwargs ):
+def get_database_engine_options( kwargs, model_prefix='' ):
"""
Allow options for the SQLAlchemy database engine to be passed by using
the prefix "database_engine_option".
@@ -455,7 +462,7 @@
'pool_threadlocal': string_as_bool,
'server_side_cursors': string_as_bool
}
- prefix = "database_engine_option_"
+ prefix = "%sdatabase_engine_option_" % model_prefix
prefix_len = len( prefix )
rval = {}
for key, value in kwargs.iteritems():
diff -r 87739d395b202170770dca1d13ff80fcc2975881 -r bdba92c000c58a6d41af045c3283466131fb1e27 lib/galaxy/model/__init__.py
--- a/lib/galaxy/model/__init__.py
+++ b/lib/galaxy/model/__init__.py
@@ -43,11 +43,6 @@
# Default Value Required for unit tests
datatypes_registry.load_datatypes()
-# TODO: Refactor references to these classes to eliminate need for this
-# import.
-from .tool_shed_install import ToolShedRepository, RepositoryRepositoryDependencyAssociation, RepositoryDependency
-from .tool_shed_install import ToolDependency, ToolVersion, ToolVersionAssociation, MigrateTools
-
class NoConverterException(Exception):
def __init__(self, value):
diff -r 87739d395b202170770dca1d13ff80fcc2975881 -r bdba92c000c58a6d41af045c3283466131fb1e27 lib/galaxy/model/mapping.py
--- a/lib/galaxy/model/mapping.py
+++ b/lib/galaxy/model/mapping.py
@@ -14,7 +14,6 @@
from sqlalchemy.orm.collections import attribute_mapped_collection
from galaxy import model
-from galaxy.model import tool_shed_install
from galaxy.model.orm.engine_factory import build_engine
from galaxy.model.orm.now import now
from galaxy.model.custom_types import JSONType, MetadataType, TrimmedString, UUIDType
@@ -25,10 +24,6 @@
metadata = MetaData()
-# import tool shed mappings, TODO: update all references to eliminate
-# need for this import.
-from .tool_shed_install.mapping import *
-
model.User.table = Table( "galaxy_user", metadata,
Column( "id", Integer, primary_key=True),
@@ -1894,7 +1889,7 @@
model.History._next_hid = db_next_hid
-def init( file_path, url, engine_options={}, create_tables=False, database_query_profiling_proxy=False, object_store=None, trace_logger=None, use_pbkdf2=True ):
+def init( file_path, url, engine_options={}, create_tables=False, map_install_models=False, database_query_profiling_proxy=False, object_store=None, trace_logger=None, use_pbkdf2=True ):
"""Connect mappings to the database"""
# Connect dataset to the file path
model.Dataset.file_path = file_path
@@ -1908,7 +1903,13 @@
# Connect the metadata to the database.
metadata.bind = engine
- result = ModelMapping([model, tool_shed_install], engine=engine)
+ model_modules = [model]
+ if map_install_models:
+ import galaxy.model.tool_shed_install.mapping
+ from galaxy.model import tool_shed_install
+ model_modules.append(tool_shed_install)
+
+ result = ModelMapping(model_modules, engine=engine)
# Create tables if needed
if create_tables:
diff -r 87739d395b202170770dca1d13ff80fcc2975881 -r bdba92c000c58a6d41af045c3283466131fb1e27 lib/galaxy/model/tool_shed_install/mapping.py
--- a/lib/galaxy/model/tool_shed_install/mapping.py
+++ b/lib/galaxy/model/tool_shed_install/mapping.py
@@ -1,11 +1,14 @@
-#from galaxy.model import tool_shed_models as install_model
-from galaxy import model as install_model
-#from sqlalchemy import MetaData
+from galaxy.model import tool_shed_install as install_model
+from sqlalchemy import MetaData
from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, String, Table, TEXT
from sqlalchemy.orm import relation, mapper
from galaxy.model.custom_types import JSONType, TrimmedString
from galaxy.model.orm.now import now
-from galaxy.model.mapping import metadata
+from galaxy.model.base import ModelMapping
+from galaxy.model.orm.engine_factory import build_engine
+
+
+metadata = MetaData()
install_model.ToolShedRepository.table = Table( "tool_shed_repository", metadata,
@@ -103,3 +106,23 @@
)
mapper( install_model.ToolVersionAssociation, install_model.ToolVersionAssociation.table )
+
+
+def init( url, engine_options={}, create_tables=False ):
+ """Connect mappings to the database"""
+ # Load the appropriate db module
+ engine = build_engine( url, engine_options )
+
+ # Connect the metadata to the database.
+ metadata.bind = engine
+
+ result = ModelMapping([install_model], engine=engine)
+
+ # Create tables if needed
+ if create_tables:
+ metadata.create_all()
+ # metadata.engine.commit()
+
+ result.create_tables = create_tables
+ #load local galaxy security policy
+ return result
diff -r 87739d395b202170770dca1d13ff80fcc2975881 -r bdba92c000c58a6d41af045c3283466131fb1e27 lib/galaxy/webapps/galaxy/buildapp.py
--- a/lib/galaxy/webapps/galaxy/buildapp.py
+++ b/lib/galaxy/webapps/galaxy/buildapp.py
@@ -221,6 +221,12 @@
galaxy.model.mapping.metadata.engine.connection_provider._pool.dispose()
except:
pass
+ # Close any pooled database connections before forking
+ try:
+ galaxy.model.tool_shed_install.mapping.metadata.engine.connection_provider._pool.dispose()
+ except:
+ pass
+
# Return
return webapp
https://bitbucket.org/galaxy/galaxy-central/commits/d0c988049677/
Changeset: d0c988049677
User: jmchilton
Date: 2013-12-05 07:20:54
Summary: Models: Database migrations for stand-alone tool_shed_install models.
create_db and manage_db should now work with install as well.
Migrations are symbolic links into lib/galaxy/model/migrate/versions of migrations affecting these tables. All future migrations to these tables should created in one place like this and linked in the other.
Affected #: 22 files
diff -r bdba92c000c58a6d41af045c3283466131fb1e27 -r d0c9880496777e52081b6f813bf5ca0cdda64b85 lib/galaxy/model/orm/scripts.py
--- a/lib/galaxy/model/orm/scripts.py
+++ b/lib/galaxy/model/orm/scripts.py
@@ -34,6 +34,12 @@
'config_file': 'tool_shed_wsgi.ini',
'default_sqlite_file': './database/community.sqlite',
},
+ "install":
+ {
+ 'repo': 'lib/galaxy/model/tool_shed_install/migrate',
+ 'config_prefix': 'install_',
+ 'default_sqlite_file': './database/install.sqlite',
+ },
}
diff -r bdba92c000c58a6d41af045c3283466131fb1e27 -r d0c9880496777e52081b6f813bf5ca0cdda64b85 lib/galaxy/model/tool_shed_install/migrate/check.py
--- /dev/null
+++ b/lib/galaxy/model/tool_shed_install/migrate/check.py
@@ -0,0 +1,113 @@
+import sys
+import os.path
+import logging
+
+from galaxy import eggs
+eggs.require( "SQLAlchemy" )
+eggs.require( "decorator" ) # Required by sqlalchemy-migrate
+eggs.require( "Tempita " ) # Required by sqlalchemy-migrate
+eggs.require( "sqlalchemy-migrate" )
+
+from sqlalchemy import *
+from sqlalchemy.exc import NoSuchTableError
+from migrate.versioning import repository, schema
+
+from galaxy.model.orm import dialect_to_egg
+
+log = logging.getLogger( __name__ )
+
+# path relative to galaxy
+migrate_repository_directory = os.path.dirname( __file__ ).replace( os.getcwd() + os.path.sep, '', 1 )
+migrate_repository = repository.Repository( migrate_repository_directory )
+
+
+def create_or_verify_database( url, engine_options={}, app=None ):
+ """
+ """
+ dialect = ( url.split( ':', 1 ) )[0]
+ try:
+ egg = dialect_to_egg[dialect]
+ try:
+ eggs.require( egg )
+ log.debug( "%s egg successfully loaded for %s dialect" % ( egg, dialect ) )
+ except:
+ # If the module is in the path elsewhere (i.e. non-egg), it'll still load.
+ log.warning( "%s egg not found, but an attempt will be made to use %s anyway" % ( egg, dialect ) )
+ except KeyError:
+ # Let this go, it could possibly work with db's we don't support
+ log.error( "database_connection contains an unknown SQLAlchemy database dialect: %s" % dialect )
+ # Create engine and metadata
+ engine = create_engine( url, **engine_options )
+
+ def migrate():
+ try:
+ # Declare the database to be under a repository's version control
+ db_schema = schema.ControlledSchema.create( engine, migrate_repository )
+ except:
+ # The database is already under version control
+ db_schema = schema.ControlledSchema( engine, migrate_repository )
+ # Apply all scripts to get to current version
+ migrate_to_current_version( engine, db_schema )
+
+ meta = MetaData( bind=engine )
+ if app and getattr( app.config, 'database_auto_migrate', False ):
+ migrate()
+ return
+
+ # Try to load tool_shed_repository table
+ try:
+ Table( "tool_shed_repository", meta, autoload=True )
+ except NoSuchTableError:
+ # No table means a completely uninitialized database. If we
+ # have an app, we'll set it's new_installation setting to True
+ # so the tool migration process will be skipped.
+ migrate()
+ return
+
+ try:
+ Table( "migrate_version", meta, autoload=True )
+ except NoSuchTableError:
+ # The database exists but is not yet under migrate version control, so init with version 1
+ log.info( "Adding version control to existing database" )
+ try:
+ Table( "metadata_file", meta, autoload=True )
+ schema.ControlledSchema.create( engine, migrate_repository, version=2 )
+ except NoSuchTableError:
+ schema.ControlledSchema.create( engine, migrate_repository, version=1 )
+
+ # Verify that the code and the DB are in sync
+ db_schema = schema.ControlledSchema( engine, migrate_repository )
+ if migrate_repository.versions.latest != db_schema.version:
+ exception_msg = "Your database has version '%d' but this code expects version '%d'. " % ( db_schema.version, migrate_repository.versions.latest )
+ exception_msg += "Back up your database and then migrate the schema by running the following from your Galaxy installation directory:"
+ exception_msg += "\n\nsh manage_db.sh upgrade install\n"
+
+ else:
+ log.info( "At database version %d" % db_schema.version )
+
+
+def migrate_to_current_version( engine, schema ):
+ # Changes to get to current version
+ changeset = schema.changeset( None )
+ for ver, change in changeset:
+ nextver = ver + changeset.step
+ log.info( 'Migrating %s -> %s... ' % ( ver, nextver ) )
+ old_stdout = sys.stdout
+
+ class FakeStdout( object ):
+ def __init__( self ):
+ self.buffer = []
+
+ def write( self, s ):
+ self.buffer.append( s )
+
+ def flush( self ):
+ pass
+
+ sys.stdout = FakeStdout()
+ try:
+ schema.runchange( ver, change, changeset.step )
+ finally:
+ for message in "".join( sys.stdout.buffer ).split( "\n" ):
+ log.info( message )
+ sys.stdout = old_stdout
diff -r bdba92c000c58a6d41af045c3283466131fb1e27 -r d0c9880496777e52081b6f813bf5ca0cdda64b85 lib/galaxy/model/tool_shed_install/migrate/migrate.cfg
--- /dev/null
+++ b/lib/galaxy/model/tool_shed_install/migrate/migrate.cfg
@@ -0,0 +1,20 @@
+[db_settings]
+# Used to identify which repository this database is versioned under.
+# You can use the name of your project.
+repository_id=ToolShedInstall
+
+# The name of the database table used to track the schema version.
+# This name shouldn't already be used by your project.
+# If this is changed once a database is under version control, you'll need to
+# change the table name in each database too.
+version_table=migrate_version
+
+# When committing a change script, Migrate will attempt to generate the
+# sql for all supported databases; normally, if one of them fails - probably
+# because you don't have that database installed - it is ignored and the
+# commit continues, perhaps ending successfully.
+# Databases in this list MUST compile successfully during a commit, or the
+# entire commit will fail. List the databases your application will actually
+# be using to ensure your updates to that database work properly.
+# This must be a list; example: ['postgres','sqlite']
+required_dbs=[]
diff -r bdba92c000c58a6d41af045c3283466131fb1e27 -r d0c9880496777e52081b6f813bf5ca0cdda64b85 lib/galaxy/model/tool_shed_install/migrate/versions/0001_add_tool_shed_repository_table.py
--- /dev/null
+++ b/lib/galaxy/model/tool_shed_install/migrate/versions/0001_add_tool_shed_repository_table.py
@@ -0,0 +1,1 @@
+../../../migrate/versions/0082_add_tool_shed_repository_table.py
\ No newline at end of file
diff -r bdba92c000c58a6d41af045c3283466131fb1e27 -r d0c9880496777e52081b6f813bf5ca0cdda64b85 lib/galaxy/model/tool_shed_install/migrate/versions/0002_add_tool_shed_repository_table_columns.py
--- /dev/null
+++ b/lib/galaxy/model/tool_shed_install/migrate/versions/0002_add_tool_shed_repository_table_columns.py
@@ -0,0 +1,1 @@
+../../../migrate/versions/0086_add_tool_shed_repository_table_columns.py
\ No newline at end of file
diff -r bdba92c000c58a6d41af045c3283466131fb1e27 -r d0c9880496777e52081b6f813bf5ca0cdda64b85 lib/galaxy/model/tool_shed_install/migrate/versions/0003_tool_id_guid_map_table.py
--- /dev/null
+++ b/lib/galaxy/model/tool_shed_install/migrate/versions/0003_tool_id_guid_map_table.py
@@ -0,0 +1,1 @@
+../../../migrate/versions/0087_tool_id_guid_map_table.py
\ No newline at end of file
diff -r bdba92c000c58a6d41af045c3283466131fb1e27 -r d0c9880496777e52081b6f813bf5ca0cdda64b85 lib/galaxy/model/tool_shed_install/migrate/versions/0004_add_installed_changeset_revison_column.py
--- /dev/null
+++ b/lib/galaxy/model/tool_shed_install/migrate/versions/0004_add_installed_changeset_revison_column.py
@@ -0,0 +1,1 @@
+../../../migrate/versions/0088_add_installed_changeset_revison_column.py
\ No newline at end of file
diff -r bdba92c000c58a6d41af045c3283466131fb1e27 -r d0c9880496777e52081b6f813bf5ca0cdda64b85 lib/galaxy/model/tool_shed_install/migrate/versions/0005_add_tool_shed_repository_table_columns.py
--- /dev/null
+++ b/lib/galaxy/model/tool_shed_install/migrate/versions/0005_add_tool_shed_repository_table_columns.py
@@ -0,0 +1,1 @@
+../../../migrate/versions/0090_add_tool_shed_repository_table_columns.py
\ No newline at end of file
diff -r bdba92c000c58a6d41af045c3283466131fb1e27 -r d0c9880496777e52081b6f813bf5ca0cdda64b85 lib/galaxy/model/tool_shed_install/migrate/versions/0006_add_tool_version_tables.py
--- /dev/null
+++ b/lib/galaxy/model/tool_shed_install/migrate/versions/0006_add_tool_version_tables.py
@@ -0,0 +1,1 @@
+../../../migrate/versions/0091_add_tool_version_tables.py
\ No newline at end of file
diff -r bdba92c000c58a6d41af045c3283466131fb1e27 -r d0c9880496777e52081b6f813bf5ca0cdda64b85 lib/galaxy/model/tool_shed_install/migrate/versions/0007_add_migrate_tools_table.py
--- /dev/null
+++ b/lib/galaxy/model/tool_shed_install/migrate/versions/0007_add_migrate_tools_table.py
@@ -0,0 +1,1 @@
+../../../migrate/versions/0092_add_migrate_tools_table.py
\ No newline at end of file
diff -r bdba92c000c58a6d41af045c3283466131fb1e27 -r d0c9880496777e52081b6f813bf5ca0cdda64b85 lib/galaxy/model/tool_shed_install/migrate/versions/0008_add_ctx_rev_column.py
--- /dev/null
+++ b/lib/galaxy/model/tool_shed_install/migrate/versions/0008_add_ctx_rev_column.py
@@ -0,0 +1,1 @@
+../../../migrate/versions/0097_add_ctx_rev_column.py
\ No newline at end of file
diff -r bdba92c000c58a6d41af045c3283466131fb1e27 -r d0c9880496777e52081b6f813bf5ca0cdda64b85 lib/galaxy/model/tool_shed_install/migrate/versions/0009_add_tool_dependency_table.py
--- /dev/null
+++ b/lib/galaxy/model/tool_shed_install/migrate/versions/0009_add_tool_dependency_table.py
@@ -0,0 +1,1 @@
+../../../migrate/versions/0099_add_tool_dependency_table.py
\ No newline at end of file
diff -r bdba92c000c58a6d41af045c3283466131fb1e27 -r d0c9880496777e52081b6f813bf5ca0cdda64b85 lib/galaxy/model/tool_shed_install/migrate/versions/0010_alter_tool_dependency_table_version_column.py
--- /dev/null
+++ b/lib/galaxy/model/tool_shed_install/migrate/versions/0010_alter_tool_dependency_table_version_column.py
@@ -0,0 +1,1 @@
+../../../migrate/versions/0100_alter_tool_dependency_table_version_column.py
\ No newline at end of file
diff -r bdba92c000c58a6d41af045c3283466131fb1e27 -r d0c9880496777e52081b6f813bf5ca0cdda64b85 lib/galaxy/model/tool_shed_install/migrate/versions/0011_drop_installed_changeset_revision_column.py
--- /dev/null
+++ b/lib/galaxy/model/tool_shed_install/migrate/versions/0011_drop_installed_changeset_revision_column.py
@@ -0,0 +1,1 @@
+../../../migrate/versions/0101_drop_installed_changeset_revision_column.py
\ No newline at end of file
diff -r bdba92c000c58a6d41af045c3283466131fb1e27 -r d0c9880496777e52081b6f813bf5ca0cdda64b85 lib/galaxy/model/tool_shed_install/migrate/versions/0012_add_tool_dependency_status_columns.py
--- /dev/null
+++ b/lib/galaxy/model/tool_shed_install/migrate/versions/0012_add_tool_dependency_status_columns.py
@@ -0,0 +1,1 @@
+../../../migrate/versions/0102_add_tool_dependency_status_columns.py
\ No newline at end of file
diff -r bdba92c000c58a6d41af045c3283466131fb1e27 -r d0c9880496777e52081b6f813bf5ca0cdda64b85 lib/galaxy/model/tool_shed_install/migrate/versions/0013_add_tool_shed_repository_status_columns.py
--- /dev/null
+++ b/lib/galaxy/model/tool_shed_install/migrate/versions/0013_add_tool_shed_repository_status_columns.py
@@ -0,0 +1,1 @@
+../../../migrate/versions/0103_add_tool_shed_repository_status_columns.py
\ No newline at end of file
diff -r bdba92c000c58a6d41af045c3283466131fb1e27 -r d0c9880496777e52081b6f813bf5ca0cdda64b85 lib/galaxy/model/tool_shed_install/migrate/versions/0014_add_repository_dependency_tables.py
--- /dev/null
+++ b/lib/galaxy/model/tool_shed_install/migrate/versions/0014_add_repository_dependency_tables.py
@@ -0,0 +1,1 @@
+../../../migrate/versions/0109_add_repository_dependency_tables.py
\ No newline at end of file
diff -r bdba92c000c58a6d41af045c3283466131fb1e27 -r d0c9880496777e52081b6f813bf5ca0cdda64b85 lib/galaxy/model/tool_shed_install/migrate/versions/0015_update_migrate_tools_table.py
--- /dev/null
+++ b/lib/galaxy/model/tool_shed_install/migrate/versions/0015_update_migrate_tools_table.py
@@ -0,0 +1,1 @@
+../../../migrate/versions/0113_update_migrate_tools_table.py
\ No newline at end of file
diff -r bdba92c000c58a6d41af045c3283466131fb1e27 -r d0c9880496777e52081b6f813bf5ca0cdda64b85 lib/galaxy/model/tool_shed_install/migrate/versions/0016_update_migrate_tools_table_again.py
--- /dev/null
+++ b/lib/galaxy/model/tool_shed_install/migrate/versions/0016_update_migrate_tools_table_again.py
@@ -0,0 +1,1 @@
+../../../migrate/versions/0114_update_migrate_tools_table_again.py
\ No newline at end of file
diff -r bdba92c000c58a6d41af045c3283466131fb1e27 -r d0c9880496777e52081b6f813bf5ca0cdda64b85 lib/galaxy/model/tool_shed_install/migrate/versions/0017_drop_update_available_col_add_tool_shed_status_col.py
--- /dev/null
+++ b/lib/galaxy/model/tool_shed_install/migrate/versions/0017_drop_update_available_col_add_tool_shed_status_col.py
@@ -0,0 +1,1 @@
+../../../migrate/versions/0116_drop_update_available_col_add_tool_shed_status_col.py
\ No newline at end of file
diff -r bdba92c000c58a6d41af045c3283466131fb1e27 -r d0c9880496777e52081b6f813bf5ca0cdda64b85 scripts/create_db.py
--- a/scripts/create_db.py
+++ b/scripts/create_db.py
@@ -25,6 +25,7 @@
from galaxy.model.orm.scripts import get_config
from galaxy.model.migrate.check import create_or_verify_database as create_db
+from galaxy.model.tool_shed_install.migrate.check import create_or_verify_database as create_install_db
from galaxy.webapps.tool_shed.model.migrate.check import create_or_verify_database as create_tool_shed_db
@@ -34,6 +35,8 @@
create_db(config['db_url'], config['config_file'])
elif config['database'] == 'tool_shed':
create_tool_shed_db(config['db_url'])
+ elif config['database'] == 'install':
+ create_install_db(config['db_url'])
if __name__ == "__main__":
invoke_create()
https://bitbucket.org/galaxy/galaxy-central/commits/cc8cd43967a1/
Changeset: cc8cd43967a1
User: jmchilton
Date: 2013-12-05 07:20:54
Summary: Models: Allow setting install database target when running tool shed functional tests.
Affected #: 2 files
diff -r d0c9880496777e52081b6f813bf5ca0cdda64b85 -r cc8cd43967a1894a7533e3220e146bd9f73de387 lib/galaxy/app.py
--- a/lib/galaxy/app.py
+++ b/lib/galaxy/app.py
@@ -77,10 +77,12 @@
use_pbkdf2=self.config.get_bool( 'use_pbkdf2', True ) )
if combined_install_database:
+ log.info("Install database targetting Galaxy's database configuration.")
self.install_model = self.model
else:
from galaxy.model.tool_shed_install import mapping as install_mapping
install_db_url = self.config.install_database_connection
+ log.info("Install database using its own connection %s" % install_db_url)
install_db_engine_options = self.config.install_database_engine_options
self.install_model = install_mapping.init( install_db_url,
install_db_engine_options )
diff -r d0c9880496777e52081b6f813bf5ca0cdda64b85 -r cc8cd43967a1894a7533e3220e146bd9f73de387 test/tool_shed/functional_tests.py
--- a/test/tool_shed/functional_tests.py
+++ b/test/tool_shed/functional_tests.py
@@ -41,12 +41,14 @@
# This is for the galaxy application.
from galaxy.app import UniverseApplication as GalaxyUniverseApplication
from galaxy.web import buildapp as galaxybuildapp
+from galaxy.util import asbool
import nose.core
import nose.config
import nose.loader
import nose.plugins.manager
+
from functional import database_contexts
log = logging.getLogger( "tool_shed_functional_tests.py" )
@@ -59,6 +61,11 @@
default_galaxy_test_port_max = 9999
default_galaxy_test_host = 'localhost'
+# Use separate databases for Galaxy and tool shed install info by default,
+# set GALAXY_TEST_INSTALL_DB_MERGED to True to revert to merged databases
+# behavior.
+default_install_db_merged = False
+
# should this serve static resources (scripts, images, styles, etc.)
STATIC_ENABLED = True
@@ -203,6 +210,13 @@
__copy_database_template(os.environ['GALAXY_TEST_DB_TEMPLATE'], db_path)
galaxy_database_auto_migrate = True
galaxy_database_connection = 'sqlite:///%s' % db_path
+ if 'GALAXY_TEST_INSTALL_DBURI' in os.environ:
+ install_galaxy_database_connection = os.environ[ 'GALAXY_TEST_INSTALL_DBURI' ]
+ elif asbool( os.environ.get( 'GALAXY_TEST_INSTALL_DB_MERGED', default_install_db_merged ) ):
+ install_galaxy_database_connection = galaxy_database_connection
+ else:
+ install_galaxy_db_path = os.path.join( galaxy_db_path, 'install.sqlite' )
+ install_galaxy_database_connection = 'sqlite:///%s' % install_galaxy_db_path
tool_shed_global_conf = get_webapp_global_conf()
tool_shed_global_conf[ '__file__' ] = 'tool_shed_wsgi.ini.sample'
kwargs = dict( admin_users = 'test(a)bx.psu.edu',
@@ -323,6 +337,7 @@
allow_user_deletion = True,
admin_users = 'test(a)bx.psu.edu',
allow_library_path_paste = True,
+ install_database_connection = install_galaxy_database_connection,
database_connection = galaxy_database_connection,
database_auto_migrate = galaxy_database_auto_migrate,
datatype_converters_config_file = "datatype_converters_conf.xml.sample",
https://bitbucket.org/galaxy/galaxy-central/commits/8de536533089/
Changeset: 8de536533089
User: jmchilton
Date: 2013-12-05 07:20:54
Summary: Fixup to error handling in repository_util.py
Affected #: 1 file
diff -r cc8cd43967a1894a7533e3220e146bd9f73de387 -r 8de5365330899d1d3328901797e39871d8187d47 lib/tool_shed/galaxy_install/repository_util.py
--- a/lib/tool_shed/galaxy_install/repository_util.py
+++ b/lib/tool_shed/galaxy_install/repository_util.py
@@ -559,10 +559,11 @@
tool_version_dicts = json.from_json_string( text )
tool_util.handle_tool_versions( trans.app, tool_version_dicts, tool_shed_repository )
else:
- message += "Version information for the tools included in the <b>%s</b> repository is missing. " % name
- message += "Reset all of this repository's metadata in the tool shed, then set the installed tool versions "
- message += "from the installed repository's <b>Repository Actions</b> menu. "
- status = 'error'
+ if not error_message:
+ error_message = ""
+ error_message += "Version information for the tools included in the <b>%s</b> repository is missing. " % tool_shed_repository.name
+ error_message += "Reset all of this repository's metadata in the tool shed, then set the installed tool versions "
+ error_message += "from the installed repository's <b>Repository Actions</b> menu. "
if install_tool_dependencies and tool_shed_repository.tool_dependencies and 'tool_dependencies' in metadata:
work_dir = tempfile.mkdtemp( prefix="tmp-toolshed-itsr" )
# Install tool dependencies.
https://bitbucket.org/galaxy/galaxy-central/commits/8f2b98ab3629/
Changeset: 8f2b98ab3629
User: jmchilton
Date: 2013-12-05 07:20:54
Summary: PEP-8 fixes for search.py.
Affected #: 1 file
diff -r 8de5365330899d1d3328901797e39871d8187d47 -r 8f2b98ab3629e9f884f00a560ef4447c947491f5 lib/galaxy/model/search.py
--- a/lib/galaxy/model/search.py
+++ b/lib/galaxy/model/search.py
@@ -32,11 +32,11 @@
import parsley
from galaxy.model import (HistoryDatasetAssociation, LibraryDatasetDatasetAssociation,
-History, Library, LibraryFolder, LibraryDataset,StoredWorkflowTagAssociation,
-StoredWorkflow, HistoryTagAssociation,HistoryDatasetAssociationTagAssociation,
+History, Library, LibraryFolder, LibraryDataset, StoredWorkflowTagAssociation,
+StoredWorkflow, HistoryTagAssociation, HistoryDatasetAssociationTagAssociation,
ExtendedMetadata, ExtendedMetadataIndex, HistoryAnnotationAssociation, Job, JobParameter,
JobToInputLibraryDatasetAssociation, JobToInputDatasetAssociation, JobToOutputDatasetAssociation,
-Page, PageRevision )
+Page, PageRevision)
from galaxy.model.tool_shed_install import ToolVersion
from galaxy.util.json import to_json_string
@@ -95,7 +95,6 @@
if field.id_decode:
conditional.right = trans.security.decode_id( conditional.right )
-
def filter(self, left, operator, right):
if operator == 'and':
self.filter(left.left, left.operator, left.right)
@@ -110,7 +109,7 @@
#print field.sqlalchemy_field == right, field.sqlalchemy_field, right
self.query = self.query.filter( field.sqlalchemy_field == right )
elif operator == "!=":
- self.query = self.query.filter( field.sqlalchemy_field != right )
+ self.query = self.query.filter( field.sqlalchemy_field != right )
elif operator == "like":
self.query = self.query.filter( field.sqlalchemy_field.like(right) )
else:
@@ -133,7 +132,7 @@
for row in self.query.distinct().all():
selected = True
for f in self.post_filter:
- if not f[0](row, f[1],f[2],f[3]):
+ if not f[0](row, f[1], f[2], f[3]):
selected = False
if selected:
yield row
@@ -143,6 +142,7 @@
#Library Dataset Searching
##################
+
def library_extended_metadata_filter(view, left, operator, right):
view.do_query = True
if 'extended_metadata_joined' not in view.state:
@@ -159,6 +159,7 @@
)
)
+
def ldda_parent_library_filter(item, left, operator, right):
if operator == '=':
return right == item.library_dataset.folder.parent_library.id
@@ -170,12 +171,12 @@
class LibraryDatasetDatasetView(ViewQueryBaseClass):
VIEW_NAME = "library_dataset_dataset"
FIELDS = {
- 'extended_metadata' : ViewField('extended_metadata', handler=library_extended_metadata_filter),
- 'name' : ViewField('name', sqlalchemy_field=LibraryDatasetDatasetAssociation.name ),
- 'id' : ViewField('id', sqlalchemy_field=LibraryDatasetDatasetAssociation.id, id_decode=True),
- 'deleted' : ViewField('deleted', sqlalchemy_field=LibraryDatasetDatasetAssociation.deleted),
- 'parent_library_id' : ViewField('parent_library_id', id_decode=True, post_filter=ldda_parent_library_filter),
- 'data_type' : ViewField('data_type', sqlalchemy_field=LibraryDatasetDatasetAssociation.extension)
+ 'extended_metadata': ViewField('extended_metadata', handler=library_extended_metadata_filter),
+ 'name': ViewField('name', sqlalchemy_field=LibraryDatasetDatasetAssociation.name),
+ 'id': ViewField('id', sqlalchemy_field=LibraryDatasetDatasetAssociation.id, id_decode=True),
+ 'deleted': ViewField('deleted', sqlalchemy_field=LibraryDatasetDatasetAssociation.deleted),
+ 'parent_library_id': ViewField('parent_library_id', id_decode=True, post_filter=ldda_parent_library_filter),
+ 'data_type': ViewField('data_type', sqlalchemy_field=LibraryDatasetDatasetAssociation.extension)
}
def search(self, trans):
@@ -189,20 +190,18 @@
class LibraryView(ViewQueryBaseClass):
VIEW_NAME = "library"
FIELDS = {
- 'name' : ViewField('name', sqlalchemy_field=Library.name ),
- 'id' : ViewField('id', sqlalchemy_field=Library.id, id_decode=True),
- 'deleted' : ViewField('deleted', sqlalchemy_field=Library.deleted)
+ 'name': ViewField('name', sqlalchemy_field=Library.name),
+ 'id': ViewField('id', sqlalchemy_field=Library.id, id_decode=True),
+ 'deleted': ViewField('deleted', sqlalchemy_field=Library.deleted)
}
def search(self, trans):
self.query = trans.sa_session.query( Library )
-
##################
#Library Folder Searching
##################
-
def library_folder_parent_library_id_filter(item, left, operator, right):
if operator == '=':
return item.parent_library.id == right
@@ -210,6 +209,7 @@
return item.parent_library.id != right
raise GalaxyParseError("Invalid comparison operator: %s" % (operator))
+
def library_path_filter(item, left, operator, right):
lpath = "/" + "/".join(item.library_path)
if operator == '=':
@@ -222,22 +222,20 @@
class LibraryFolderView(ViewQueryBaseClass):
VIEW_NAME = "library_folder"
FIELDS = {
- 'name' : ViewField('name', sqlalchemy_field=LibraryFolder.name ),
- 'id' : ViewField('id', sqlalchemy_field=LibraryFolder.id, id_decode=True),
- 'parent_id' : ViewField('parent_id', sqlalchemy_field=LibraryFolder.parent_id, id_decode=True ),
- 'parent_library_id' : ViewField('parent_library_id', post_filter=library_folder_parent_library_id_filter, id_decode=True),
- 'library_path' : ViewField('library_path', post_filter=library_path_filter)
+ 'name': ViewField('name', sqlalchemy_field=LibraryFolder.name),
+ 'id': ViewField('id', sqlalchemy_field=LibraryFolder.id, id_decode=True),
+ 'parent_id': ViewField('parent_id', sqlalchemy_field=LibraryFolder.parent_id, id_decode=True),
+ 'parent_library_id': ViewField('parent_library_id', post_filter=library_folder_parent_library_id_filter, id_decode=True),
+ 'library_path': ViewField('library_path', post_filter=library_path_filter)
}
def search(self, trans):
self.query = trans.sa_session.query( LibraryFolder )
-
##################
#Library Dataset Searching
##################
-
def library_dataset_name_filter(item, left, operator, right):
if operator == '=':
return item.name == right
@@ -249,35 +247,33 @@
class LibraryDatasetView(ViewQueryBaseClass):
VIEW_NAME = "library_dataset"
FIELDS = {
- 'name' : ViewField('name', post_filter=library_dataset_name_filter),
- 'id' : ViewField('id', sqlalchemy_field=LibraryDataset.id, id_decode=True),
- 'folder_id' : ViewField('folder_id', sqlalchemy_field=LibraryDataset.folder_id, id_decode=True)
+ 'name': ViewField('name', post_filter=library_dataset_name_filter),
+ 'id': ViewField('id', sqlalchemy_field=LibraryDataset.id, id_decode=True),
+ 'folder_id': ViewField('folder_id', sqlalchemy_field=LibraryDataset.folder_id, id_decode=True)
}
def search(self, trans):
self.query = trans.sa_session.query( LibraryDataset )
-
##################
#Tool Searching
##################
-
class ToolView(ViewQueryBaseClass):
VIEW_NAME = "tool"
FIELDS = {
- 'tool_id' : ViewField('name', sqlalchemy_field=ToolVersion.tool_id ),
- 'id' : ViewField('id', sqlalchemy_field=ToolVersion.id)
+ 'tool_id': ViewField('name', sqlalchemy_field=ToolVersion.tool_id),
+ 'id': ViewField('id', sqlalchemy_field=ToolVersion.id)
}
def search(self, trans):
# Likely this will break in subsequent model refactoring. Need to revisit.
self.query = trans.install_model.context.query( ToolVersion )
+
##################
#History Dataset Searching
##################
-
def history_dataset_handle_tag(view, left, operator, right):
if operator == "=":
view.do_query = True
@@ -298,17 +294,17 @@
class HistoryDatasetView(ViewQueryBaseClass):
DOMAIN = "history_dataset"
FIELDS = {
- 'name' : ViewField('name', sqlalchemy_field=HistoryDatasetAssociation.name),
- 'id' : ViewField('id',sqlalchemy_field=HistoryDatasetAssociation.id, id_decode=True),
- 'history_id' : ViewField('history_id',sqlalchemy_field=HistoryDatasetAssociation.history_id, id_decode=True),
- 'tag' : ViewField("tag", handler=history_dataset_handle_tag),
- 'copied_from_ldda_id' : ViewField("copied_from_ldda_id",
+ 'name': ViewField('name', sqlalchemy_field=HistoryDatasetAssociation.name),
+ 'id': ViewField('id', sqlalchemy_field=HistoryDatasetAssociation.id, id_decode=True),
+ 'history_id': ViewField('history_id', sqlalchemy_field=HistoryDatasetAssociation.history_id, id_decode=True),
+ 'tag': ViewField("tag", handler=history_dataset_handle_tag),
+ 'copied_from_ldda_id': ViewField("copied_from_ldda_id",
sqlalchemy_field=HistoryDatasetAssociation.copied_from_library_dataset_dataset_association_id,
id_decode=True),
- 'copied_from_hda_id' : ViewField("copied_from_hda_id",
+ 'copied_from_hda_id': ViewField("copied_from_hda_id",
sqlalchemy_field=HistoryDatasetAssociation.copied_from_history_dataset_association_id,
id_decode=True),
- 'deleted' : ViewField('deleted', sqlalchemy_field=HistoryDatasetAssociation.deleted)
+ 'deleted': ViewField('deleted', sqlalchemy_field=HistoryDatasetAssociation.deleted)
}
def search(self, trans):
@@ -357,10 +353,10 @@
class HistoryView(ViewQueryBaseClass):
DOMAIN = "history"
FIELDS = {
- 'name' : ViewField('name', sqlalchemy_field=History.name),
- 'id' : ViewField('id', sqlalchemy_field=History.id, id_decode=True),
- 'tag' : ViewField("tag", handler=history_handle_tag),
- 'annotation' : ViewField("annotation", handler=history_handle_annotation)
+ 'name': ViewField('name', sqlalchemy_field=History.name),
+ 'id': ViewField('id', sqlalchemy_field=History.id, id_decode=True),
+ 'tag': ViewField("tag", handler=history_handle_tag),
+ 'annotation': ViewField("annotation", handler=history_handle_annotation)
}
def search(self, trans):
@@ -389,22 +385,20 @@
class WorkflowView(ViewQueryBaseClass):
DOMAIN = "workflow"
FIELDS = {
- 'name' : ViewField('name', sqlalchemy_field=StoredWorkflow.name),
- 'id' : ViewField('id', sqlalchemy_field=StoredWorkflow.id, id_decode=True),
- 'tag' : ViewField('tag', handler=workflow_tag_handler)
+ 'name': ViewField('name', sqlalchemy_field=StoredWorkflow.name),
+ 'id': ViewField('id', sqlalchemy_field=StoredWorkflow.id, id_decode=True),
+ 'tag': ViewField('tag', handler=workflow_tag_handler)
}
def search(self, trans):
self.query = trans.sa_session.query( StoredWorkflow )
-
##################
#Job Searching
##################
-
def job_param_filter(view, left, operator, right):
view.do_query = True
alias = aliased( JobParameter )
@@ -417,6 +411,7 @@
)
)
+
def job_input_hda_filter(view, left, operator, right):
view.do_query = True
alias = aliased( JobToInputDatasetAssociation )
@@ -429,6 +424,7 @@
)
)
+
def job_input_ldda_filter(view, left, operator, right):
view.do_query = True
alias = aliased( JobToInputLibraryDatasetAssociation )
@@ -442,7 +438,6 @@
)
-
def job_output_hda_filter(view, left, operator, right):
view.do_query = True
alias = aliased( JobToOutputDatasetAssociation )
@@ -459,36 +454,34 @@
class JobView(ViewQueryBaseClass):
DOMAIN = "job"
FIELDS = {
- 'tool_name' : ViewField('tool_name', sqlalchemy_field=Job.tool_id),
- 'state' : ViewField('state', sqlalchemy_field=Job.state),
- 'param' : ViewField('param', handler=job_param_filter),
- 'input_ldda' : ViewField('input_ldda', handler=job_input_ldda_filter, id_decode=True),
- 'input_hda' : ViewField('input_hda', handler=job_input_hda_filter, id_decode=True),
- 'output_hda' : ViewField('output_hda', handler=job_output_hda_filter, id_decode=True)
+ 'tool_name': ViewField('tool_name', sqlalchemy_field=Job.tool_id),
+ 'state': ViewField('state', sqlalchemy_field=Job.state),
+ 'param': ViewField('param', handler=job_param_filter),
+ 'input_ldda': ViewField('input_ldda', handler=job_input_ldda_filter, id_decode=True),
+ 'input_hda': ViewField('input_hda', handler=job_input_hda_filter, id_decode=True),
+ 'output_hda': ViewField('output_hda', handler=job_output_hda_filter, id_decode=True)
}
def search(self, trans):
self.query = trans.sa_session.query( Job )
-
##################
#Page Searching
##################
+
class PageView(ViewQueryBaseClass):
DOMAIN = "page"
FIELDS = {
- 'id' : ViewField('id', sqlalchemy_field=Page.id, id_decode=True),
- 'title' : ViewField('title', sqlalchemy_field=Page.title),
+ 'id': ViewField('id', sqlalchemy_field=Page.id, id_decode=True),
+ 'title': ViewField('title', sqlalchemy_field=Page.title),
}
def search(self, trans):
self.query = trans.sa_session.query( Page )
-
-
##################
#Page Revision Searching
##################
@@ -497,35 +490,34 @@
class PageRevisionView(ViewQueryBaseClass):
DOMAIN = "page_revision"
FIELDS = {
- 'id' : ViewField('id', sqlalchemy_field=PageRevision.id, id_decode=True),
- 'title' : ViewField('title', sqlalchemy_field=PageRevision.title),
- 'page_id' : ViewField('page_id', sqlalchemy_field=PageRevision.page_id, id_decode=True),
+ 'id': ViewField('id', sqlalchemy_field=PageRevision.id, id_decode=True),
+ 'title': ViewField('title', sqlalchemy_field=PageRevision.title),
+ 'page_id': ViewField('page_id', sqlalchemy_field=PageRevision.page_id, id_decode=True),
}
def search(self, trans):
self.query = trans.sa_session.query( PageRevision )
-
"""
The view mapping takes a user's name for a table and maps it to a View class that will
handle queries
"""
view_mapping = {
- 'library' : LibraryView,
- 'library_folder' : LibraryFolderView,
- 'library_dataset_dataset' : LibraryDatasetDatasetView,
- 'library_dataset' : LibraryDatasetView,
- 'lda' : LibraryDatasetView,
- 'ldda' : LibraryDatasetDatasetView,
- 'history_dataset' : HistoryDatasetView,
- 'hda' : HistoryDatasetView,
- 'history' : HistoryView,
- 'workflow' : WorkflowView,
- 'tool' : ToolView,
- 'job' : JobView,
- 'page' : PageView,
- 'page_revision' : PageRevisionView,
+ 'library': LibraryView,
+ 'library_folder': LibraryFolderView,
+ 'library_dataset_dataset': LibraryDatasetDatasetView,
+ 'library_dataset': LibraryDatasetView,
+ 'lda': LibraryDatasetView,
+ 'ldda': LibraryDatasetDatasetView,
+ 'history_dataset': HistoryDatasetView,
+ 'hda': HistoryDatasetView,
+ 'history': HistoryView,
+ 'workflow': WorkflowView,
+ 'tool': ToolView,
+ 'job': JobView,
+ 'page': PageView,
+ 'page_revision': PageRevisionView,
}
"""
@@ -575,6 +567,7 @@
not_dquote = anything:x ?(x != '"') -> x
"""
+
class GalaxyQuery:
"""
This class represents a data structure of a compiled GQL query
@@ -584,6 +577,7 @@
self.table_name = table_name
self.conditional = conditional
+
class GalaxyQueryComparison:
"""
This class represents the data structure of the comparison arguments of a
@@ -594,6 +588,7 @@
self.operator = operator
self.right = right
+
class GalaxyQueryAnd:
"""
This class represents the data structure of the comparison arguments of a
@@ -608,6 +603,7 @@
class GalaxyParseError(Exception):
pass
+
class SearchQuery:
def __init__(self, view, query):
self.view = view
@@ -623,7 +619,8 @@
self.view.filter(
self.query.conditional.left,
self.query.conditional.operator,
- self.query.conditional.right )
+ self.query.conditional.right
+ )
return self.view.get_results(True)
def item_to_api_value(self, item):
@@ -637,17 +634,16 @@
return o
-
class GalaxySearchEngine:
"""
Primary class for searching. Parses GQL (Galaxy Query Language) queries and returns a 'SearchQuery' class
"""
def __init__(self):
self.parser = parsley.makeGrammar(gqlGrammar, {
- 're' : re,
- 'GalaxyQuery' : GalaxyQuery,
- 'GalaxyQueryComparison' : GalaxyQueryComparison,
- 'GalaxyQueryAnd' : GalaxyQueryAnd
+ 're': re,
+ 'GalaxyQuery': GalaxyQuery,
+ 'GalaxyQueryComparison': GalaxyQueryComparison,
+ 'GalaxyQueryAnd': GalaxyQueryAnd
})
def query(self, query_text):
@@ -657,4 +653,3 @@
view = view_mapping[q.table_name]()
return SearchQuery(view, q)
raise GalaxyParseError("No such table %s" % (q.table_name))
-
https://bitbucket.org/galaxy/galaxy-central/commits/e962e0406fc6/
Changeset: e962e0406fc6
User: jmchilton
Date: 2013-12-05 07:20:54
Summary: Rework search.py to reflect galaxy.model can be imported and not mapped.
Basically search.py was assuming certain class attributes are available that are not available until after mapping.init has done its thing. This delays evaluation of those class attributes until runtime.
Affected #: 1 file
diff -r 8f2b98ab3629e9f884f00a560ef4447c947491f5 -r e962e0406fc631aa9915e3a0522e10a35d8b0def lib/galaxy/model/search.py
--- a/lib/galaxy/model/search.py
+++ b/lib/galaxy/model/search.py
@@ -105,13 +105,15 @@
self.do_query = True
field = self.FIELDS[left_base]
if field.sqlalchemy_field is not None:
+ clazz, attribute = field.sqlalchemy_field
+ sqlalchemy_field_value = getattr(clazz, attribute)
if operator == "=":
#print field.sqlalchemy_field == right, field.sqlalchemy_field, right
- self.query = self.query.filter( field.sqlalchemy_field == right )
+ self.query = self.query.filter( sqlalchemy_field_value == right )
elif operator == "!=":
- self.query = self.query.filter( field.sqlalchemy_field != right )
+ self.query = self.query.filter( sqlalchemy_field_value != right )
elif operator == "like":
- self.query = self.query.filter( field.sqlalchemy_field.like(right) )
+ self.query = self.query.filter( sqlalchemy_field_value.like(right) )
else:
raise GalaxyParseError("Invalid comparison operator: %s" % (operator))
elif field.handler is not None:
@@ -172,11 +174,11 @@
VIEW_NAME = "library_dataset_dataset"
FIELDS = {
'extended_metadata': ViewField('extended_metadata', handler=library_extended_metadata_filter),
- 'name': ViewField('name', sqlalchemy_field=LibraryDatasetDatasetAssociation.name),
+ 'name': ViewField('name', sqlalchemy_field=(LibraryDatasetDatasetAssociation, "name")),
'id': ViewField('id', sqlalchemy_field=LibraryDatasetDatasetAssociation.id, id_decode=True),
- 'deleted': ViewField('deleted', sqlalchemy_field=LibraryDatasetDatasetAssociation.deleted),
+ 'deleted': ViewField('deleted', sqlalchemy_field=(LibraryDatasetDatasetAssociation, "deleted")),
'parent_library_id': ViewField('parent_library_id', id_decode=True, post_filter=ldda_parent_library_filter),
- 'data_type': ViewField('data_type', sqlalchemy_field=LibraryDatasetDatasetAssociation.extension)
+ 'data_type': ViewField('data_type', sqlalchemy_field=(LibraryDatasetDatasetAssociation, "extension")),
}
def search(self, trans):
@@ -190,9 +192,9 @@
class LibraryView(ViewQueryBaseClass):
VIEW_NAME = "library"
FIELDS = {
- 'name': ViewField('name', sqlalchemy_field=Library.name),
+ 'name': ViewField('name', sqlalchemy_field=(Library, "name")),
'id': ViewField('id', sqlalchemy_field=Library.id, id_decode=True),
- 'deleted': ViewField('deleted', sqlalchemy_field=Library.deleted)
+ 'deleted': ViewField('deleted', sqlalchemy_field=(Library, "deleted")),
}
def search(self, trans):
@@ -222,9 +224,9 @@
class LibraryFolderView(ViewQueryBaseClass):
VIEW_NAME = "library_folder"
FIELDS = {
- 'name': ViewField('name', sqlalchemy_field=LibraryFolder.name),
- 'id': ViewField('id', sqlalchemy_field=LibraryFolder.id, id_decode=True),
- 'parent_id': ViewField('parent_id', sqlalchemy_field=LibraryFolder.parent_id, id_decode=True),
+ 'name': ViewField('name', sqlalchemy_field=(LibraryFolder, "name")),
+ 'id': ViewField('id', sqlalchemy_field=(LibraryFolder, "id"), id_decode=True),
+ 'parent_id': ViewField('parent_id', sqlalchemy_field=(LibraryFolder, "parent_id"), id_decode=True),
'parent_library_id': ViewField('parent_library_id', post_filter=library_folder_parent_library_id_filter, id_decode=True),
'library_path': ViewField('library_path', post_filter=library_path_filter)
}
@@ -248,8 +250,8 @@
VIEW_NAME = "library_dataset"
FIELDS = {
'name': ViewField('name', post_filter=library_dataset_name_filter),
- 'id': ViewField('id', sqlalchemy_field=LibraryDataset.id, id_decode=True),
- 'folder_id': ViewField('folder_id', sqlalchemy_field=LibraryDataset.folder_id, id_decode=True)
+ 'id': ViewField('id', sqlalchemy_field=(LibraryDataset, "id"), id_decode=True),
+ 'folder_id': ViewField('folder_id', sqlalchemy_field=(LibraryDataset, "folder_id"), id_decode=True)
}
def search(self, trans):
@@ -262,12 +264,11 @@
class ToolView(ViewQueryBaseClass):
VIEW_NAME = "tool"
FIELDS = {
- 'tool_id': ViewField('name', sqlalchemy_field=ToolVersion.tool_id),
- 'id': ViewField('id', sqlalchemy_field=ToolVersion.id)
+ 'tool_id': ViewField('name', sqlalchemy_field=(ToolVersion, "tool_id")),
+ 'id': ViewField('id', sqlalchemy_field=(ToolVersion, "id")),
}
def search(self, trans):
- # Likely this will break in subsequent model refactoring. Need to revisit.
self.query = trans.install_model.context.query( ToolVersion )
@@ -294,17 +295,17 @@
class HistoryDatasetView(ViewQueryBaseClass):
DOMAIN = "history_dataset"
FIELDS = {
- 'name': ViewField('name', sqlalchemy_field=HistoryDatasetAssociation.name),
- 'id': ViewField('id', sqlalchemy_field=HistoryDatasetAssociation.id, id_decode=True),
- 'history_id': ViewField('history_id', sqlalchemy_field=HistoryDatasetAssociation.history_id, id_decode=True),
+ 'name': ViewField('name', sqlalchemy_field=(HistoryDatasetAssociation, "name")),
+ 'id': ViewField('id', sqlalchemy_field=(HistoryDatasetAssociation, "id"), id_decode=True),
+ 'history_id': ViewField('history_id', sqlalchemy_field=(HistoryDatasetAssociation, "history_id"), id_decode=True),
'tag': ViewField("tag", handler=history_dataset_handle_tag),
'copied_from_ldda_id': ViewField("copied_from_ldda_id",
- sqlalchemy_field=HistoryDatasetAssociation.copied_from_library_dataset_dataset_association_id,
+ sqlalchemy_field=(HistoryDatasetAssociation, "copied_from_library_dataset_dataset_association_id"),
id_decode=True),
'copied_from_hda_id': ViewField("copied_from_hda_id",
- sqlalchemy_field=HistoryDatasetAssociation.copied_from_history_dataset_association_id,
+ sqlalchemy_field=(HistoryDatasetAssociation, "copied_from_history_dataset_association_id"),
id_decode=True),
- 'deleted': ViewField('deleted', sqlalchemy_field=HistoryDatasetAssociation.deleted)
+ 'deleted': ViewField('deleted', sqlalchemy_field=(HistoryDatasetAssociation, "deleted")),
}
def search(self, trans):
@@ -353,8 +354,8 @@
class HistoryView(ViewQueryBaseClass):
DOMAIN = "history"
FIELDS = {
- 'name': ViewField('name', sqlalchemy_field=History.name),
- 'id': ViewField('id', sqlalchemy_field=History.id, id_decode=True),
+ 'name': ViewField('name', sqlalchemy_field=(History, "name")),
+ 'id': ViewField('id', sqlalchemy_field=(History, "id"), id_decode=True),
'tag': ViewField("tag", handler=history_handle_tag),
'annotation': ViewField("annotation", handler=history_handle_annotation)
}
@@ -385,8 +386,8 @@
class WorkflowView(ViewQueryBaseClass):
DOMAIN = "workflow"
FIELDS = {
- 'name': ViewField('name', sqlalchemy_field=StoredWorkflow.name),
- 'id': ViewField('id', sqlalchemy_field=StoredWorkflow.id, id_decode=True),
+ 'name': ViewField('name', sqlalchemy_field=(StoredWorkflow, "name")),
+ 'id': ViewField('id', sqlalchemy_field=(StoredWorkflow, "id"), id_decode=True),
'tag': ViewField('tag', handler=workflow_tag_handler)
}
@@ -454,8 +455,8 @@
class JobView(ViewQueryBaseClass):
DOMAIN = "job"
FIELDS = {
- 'tool_name': ViewField('tool_name', sqlalchemy_field=Job.tool_id),
- 'state': ViewField('state', sqlalchemy_field=Job.state),
+ 'tool_name': ViewField('tool_name', sqlalchemy_field=(Job, "tool_id")),
+ 'state': ViewField('state', sqlalchemy_field=(Job, "state")),
'param': ViewField('param', handler=job_param_filter),
'input_ldda': ViewField('input_ldda', handler=job_input_ldda_filter, id_decode=True),
'input_hda': ViewField('input_hda', handler=job_input_hda_filter, id_decode=True),
@@ -474,8 +475,8 @@
class PageView(ViewQueryBaseClass):
DOMAIN = "page"
FIELDS = {
- 'id': ViewField('id', sqlalchemy_field=Page.id, id_decode=True),
- 'title': ViewField('title', sqlalchemy_field=Page.title),
+ 'id': ViewField('id', sqlalchemy_field=(Page, "id"), id_decode=True),
+ 'title': ViewField('title', sqlalchemy_field=(Page, "title")),
}
def search(self, trans):
@@ -490,9 +491,9 @@
class PageRevisionView(ViewQueryBaseClass):
DOMAIN = "page_revision"
FIELDS = {
- 'id': ViewField('id', sqlalchemy_field=PageRevision.id, id_decode=True),
- 'title': ViewField('title', sqlalchemy_field=PageRevision.title),
- 'page_id': ViewField('page_id', sqlalchemy_field=PageRevision.page_id, id_decode=True),
+ 'id': ViewField('id', sqlalchemy_field=(PageRevision, "id"), id_decode=True),
+ 'title': ViewField('title', sqlalchemy_field=(PageRevision, "title")),
+ 'page_id': ViewField('page_id', sqlalchemy_field=(PageRevision, "page_id"), id_decode=True),
}
def search(self, trans):
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
2 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/e6ad98134741/
Changeset: e6ad98134741
User: jmchilton
Date: 2013-12-04 18:42:43
Summary: Load plugins in a deterministic order.
Fixes plugin unit tests on my dev box and is a good idea overall anyway.
Affected #: 1 file
diff -r 1ff7ac24ec0011ed3da190164e4e5b1877741e62 -r e6ad9813474196d2ebd110c6716fb6650ea2ae9e lib/galaxy/web/base/pluginframework.py
--- a/lib/galaxy/web/base/pluginframework.py
+++ b/lib/galaxy/web/base/pluginframework.py
@@ -146,7 +146,7 @@
# due to the ordering of listdir, there is an implicit plugin loading order here
# could instead explicitly list on/off in master config file
for directory in self.directories:
- for plugin_dir in os.listdir( directory ):
+ for plugin_dir in sorted( os.listdir( directory ) ):
plugin_path = os.path.join( directory, plugin_dir )
if self.is_plugin( plugin_path ):
yield plugin_path
https://bitbucket.org/galaxy/galaxy-central/commits/bbdb9255aa9a/
Changeset: bbdb9255aa9a
User: jmchilton
Date: 2013-12-04 18:46:32
Summary: Allow run_unit_tests.sh to execute all unit tests in test/unit.
Basically adds __init__.py files to each subdirectory so nose detects these as Python module directories. Also modify dynamic loading of modules in Carl's test cases so the test case can be run with any cwd - including the Galaxy root directory as run_unit_tests.sh will use.
This should roughly double the number of unit tests buildbot runs.
Affected #: 6 files
diff -r e6ad9813474196d2ebd110c6716fb6650ea2ae9e -r bbdb9255aa9a1b4d30e08e8da0276d28422c5805 test/unit/datatypes/dataproviders/test_base_dataproviders.py
--- a/test/unit/datatypes/dataproviders/test_base_dataproviders.py
+++ b/test/unit/datatypes/dataproviders/test_base_dataproviders.py
@@ -7,13 +7,14 @@
#TODO: fix off by ones in FilteredDataProvider counters
+import os.path
import imp
import unittest
import StringIO
import tempfilecache
-utility = imp.load_source( 'utility', '../../util/utility.py' )
+utility = imp.load_source( 'utility', os.path.join( os.path.dirname( __file__), '../../util/utility.py' ) )
log = utility.set_up_filelogger( __name__ + '.log' )
utility.add_galaxy_lib_to_path( 'test/unit/datatypes/dataproviders' )
diff -r e6ad9813474196d2ebd110c6716fb6650ea2ae9e -r bbdb9255aa9a1b4d30e08e8da0276d28422c5805 test/unit/datatypes/dataproviders/test_line_dataproviders.py
--- a/test/unit/datatypes/dataproviders/test_line_dataproviders.py
+++ b/test/unit/datatypes/dataproviders/test_line_dataproviders.py
@@ -8,13 +8,14 @@
#TODO: fix off by ones in FilteredDataProvider counters
import imp
+import os
import unittest
import StringIO
import tempfilecache
import test_base_dataproviders
-utility = imp.load_source( 'utility', '../../util/utility.py' )
+utility = imp.load_source( 'utility', os.path.join( os.path.dirname( __file__ ), '../../util/utility.py' ) )
log = utility.set_up_filelogger( __name__ + '.log' )
utility.add_galaxy_lib_to_path( 'test/unit/datatypes/dataproviders' )
diff -r e6ad9813474196d2ebd110c6716fb6650ea2ae9e -r bbdb9255aa9a1b4d30e08e8da0276d28422c5805 test/unit/visualizations/registry/test_VisualizationsRegistry.py
--- a/test/unit/visualizations/registry/test_VisualizationsRegistry.py
+++ b/test/unit/visualizations/registry/test_VisualizationsRegistry.py
@@ -4,7 +4,7 @@
import imp
import unittest
-utility = imp.load_source( 'utility', '../../util/utility.py' )
+utility = imp.load_source( 'utility', os.path.join( os.path.dirname( __file__ ), '../../util/utility.py' ) )
log = utility.set_up_filelogger( __name__ + '.log' )
relative_test_path = '/test/unit/visualizations/registry'
@@ -12,7 +12,7 @@
from galaxy.visualization.registry import VisualizationsRegistry
-base_mock = imp.load_source( 'mock', '../../web/base/mock.py' )
+base_mock = imp.load_source( 'mock', os.path.join( os.path.dirname( __file__ ), '../../web/base/mock.py' ) )
# ----------------------------------------------------------------------------- globals
glx_dir = os.getcwd().replace( relative_test_path, '' )
diff -r e6ad9813474196d2ebd110c6716fb6650ea2ae9e -r bbdb9255aa9a1b4d30e08e8da0276d28422c5805 test/unit/web/base/test_HookPluginManager.py
--- a/test/unit/web/base/test_HookPluginManager.py
+++ b/test/unit/web/base/test_HookPluginManager.py
@@ -5,7 +5,7 @@
import unittest
import types
-utility = imp.load_source( 'utility', '../../util/utility.py' )
+utility = imp.load_source( 'utility', os.path.join( os.path.dirname( __file__ ), '../../util/utility.py' ) )
log = utility.set_up_filelogger( __name__ + '.log' )
utility.add_galaxy_lib_to_path( 'test/unit/web/base' )
diff -r e6ad9813474196d2ebd110c6716fb6650ea2ae9e -r bbdb9255aa9a1b4d30e08e8da0276d28422c5805 test/unit/web/base/test_PageServingPluginManager.py
--- a/test/unit/web/base/test_PageServingPluginManager.py
+++ b/test/unit/web/base/test_PageServingPluginManager.py
@@ -4,7 +4,7 @@
import imp
import unittest
-utility = imp.load_source( 'utility', '../../util/utility.py' )
+utility = imp.load_source( 'utility', os.path.join( os.path.dirname( __file__ ), '../../util/utility.py' ) )
log = utility.set_up_filelogger( __name__ + '.log' )
utility.add_galaxy_lib_to_path( 'test/unit/web/base' )
diff -r e6ad9813474196d2ebd110c6716fb6650ea2ae9e -r bbdb9255aa9a1b4d30e08e8da0276d28422c5805 test/unit/web/base/test_PluginManager.py
--- a/test/unit/web/base/test_PluginManager.py
+++ b/test/unit/web/base/test_PluginManager.py
@@ -5,7 +5,7 @@
import imp
import unittest
-utility = imp.load_source( 'utility', '../../util/utility.py' )
+utility = imp.load_source( 'utility', os.path.join( os.path.dirname( __file__ ), '../../util/utility.py' ) )
log = utility.set_up_filelogger( __name__ + '.log' )
utility.add_galaxy_lib_to_path( 'test/unit/web/base' )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
04 Dec '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/1ff7ac24ec00/
Changeset: 1ff7ac24ec00
User: guerler
Date: 2013-12-05 06:05:20
Summary: UI: Fix iconbar css
Affected #: 2 files
diff -r d787fe355c86b11f4f70534d9a4b2e4631aa9782 -r 1ff7ac24ec0011ed3da190164e4e5b1877741e62 static/style/blue/base.css
--- a/static/style/blue/base.css
+++ b/static/style/blue/base.css
@@ -1311,7 +1311,7 @@
#masthead li.dropdown>a:hover .caret{border-top-color:gold;border-bottom-color:gold}
#masthead .navbar-brand{position:absolute;left:0;top:0;font-family:verdana;font-weight:bold;font-size:20px;line-height:1;color:white;padding:5px 20px 12px;margin-left:-15px;z-index:2000}#masthead .navbar-brand img{display:inline;width:26px;vertical-align:top}
#masthead .navbar-brand a{color:white;text-decoration:none}
-#masthead .iconbar{position:absolute;top:5px;right:110px;cursor:pointer;color:#999;overflow:hidden}#masthead .iconbar .symbol{float:left;margin:0px 10px}
+#masthead .iconbar{position:absolute;top:5px;right:110px;cursor:pointer;color:#999;overflow:hidden}#masthead .iconbar .symbol{float:left;margin:0px 10px;height:26px}
#masthead .iconbar .symbol .number{font-weight:bold;font-size:10px;font-family:"Lucida Grande",verdana,arial,helvetica,sans-serif;position:relative;left:23px;top:-12px}
#masthead .iconbar .toggle{color:#BCC800}
.quota-meter-container{position:absolute;top:0;right:0;height:32px}
diff -r d787fe355c86b11f4f70534d9a4b2e4631aa9782 -r 1ff7ac24ec0011ed3da190164e4e5b1877741e62 static/style/src/less/base.less
--- a/static/style/src/less/base.less
+++ b/static/style/src/less/base.less
@@ -431,6 +431,7 @@
{
float : left;
margin : 0px 10px;
+ height : 26px;
}
.symbol .number
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: guerler: Grids: Fix tag click event
by commits-noreply@bitbucket.org 04 Dec '13
by commits-noreply@bitbucket.org 04 Dec '13
04 Dec '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/d787fe355c86/
Changeset: d787fe355c86
User: guerler
Date: 2013-12-05 01:59:07
Summary: Grids: Fix tag click event
Affected #: 3 files
diff -r 44b0d21b4c7744d27d97241375b953abc1da7830 -r d787fe355c86b11f4f70534d9a4b2e4631aa9782 static/scripts/galaxy.grids.js
--- a/static/scripts/galaxy.grids.js
+++ b/static/scripts/galaxy.grids.js
@@ -200,7 +200,7 @@
var text_input_obj = $('#input-' + column_key + '-filter');
var text_input = text_input_obj.val();
text_input_obj.val('');
- self.add_filter_condition(column_key, text_input, true);
+ self.add_filter_condition(column_key, text_input);
return false;
});
});
@@ -221,7 +221,7 @@
// Initialize standard, advanced search toggles.
$('.advanced-search-toggle').each( function() {
$(this).click( function() {
- $("#standard-search").slideToggle('fast');
+ $('#standard-search').slideToggle('fast');
$('#advanced-search').slideToggle('fast');
return false;
});
@@ -335,14 +335,14 @@
},
// Add a condition to the grid filter; this adds the condition and refreshes the grid.
- add_filter_condition: function (name, value, append) {
+ add_filter_condition: function (name, value) {
// Do nothing is value is empty.
if (value === "") {
return false;
}
// Add condition to grid.
- this.grid.add_filter(name, value, append);
+ this.grid.add_filter(name, value, true);
// Add button that displays filter and provides a button to delete it.
var t = $("<span>" + value + "<a href='javascript:void(0);'><span class='delete-search-icon' /></span></a>");
diff -r 44b0d21b4c7744d27d97241375b953abc1da7830 -r d787fe355c86b11f4f70534d9a4b2e4631aa9782 static/scripts/packed/galaxy.grids.js
--- a/static/scripts/packed/galaxy.grids.js
+++ b/static/scripts/packed/galaxy.grids.js
@@ -1,1 +1,1 @@
-jQuery.ajaxSettings.traditional=true;define(["mvc/ui"],function(){var a=Backbone.Model.extend({defaults:{url_base:"",async:false,async_ops:[],categorical_filters:[],filters:{},sort_key:null,show_item_checkboxes:false,advanced_search:false,cur_page:1,num_pages:1,operation:undefined,item_ids:undefined},can_async_op:function(c){return _.indexOf(this.attributes.async_ops,c)!==-1},add_filter:function(g,h,d){if(d){var e=this.attributes.filters[g],c;if(e===null||e===undefined){c=h}else{if(typeof(e)=="string"){if(e=="All"){c=h}else{var f=[];f[0]=e;f[1]=h;c=f}}else{c=e;c.push(h)}}this.attributes.filters[g]=c}else{this.attributes.filters[g]=h}},remove_filter:function(d,g){var c=this.attributes.filters[d];if(c===null||c===undefined){return false}var f=true;if(typeof(c)==="string"){if(c=="All"){f=false}else{delete this.attributes.filters[d]}}else{var e=_.indexOf(c,g);if(e!==-1){c.splice(e,1)}else{f=false}}return f},get_url_data:function(){var c={async:this.attributes.async,sort:this.attributes.sort_key,page:this.attributes.cur_page,show_item_checkboxes:this.attributes.show_item_checkboxes,advanced_search:this.attributes.advanced_search};if(this.attributes.operation){c.operation=this.attributes.operation}if(this.attributes.item_ids){c.id=this.attributes.item_ids}var d=this;_.each(_.pairs(d.attributes.filters),function(e){c["f-"+e[0]]=e[1]});return c}});var b=Backbone.View.extend({grid:null,initialize:function(c){this.init_grid(c);this.init_grid_controls();$("input[type=text]").each(function(){$(this).click(function(){$(this).select()}).keyup(function(){$(this).css("font-style","normal")})})},init_grid:function(c){this.grid=c;this.init_grid_elements()},init_grid_controls:function(){$(".submit-image").each(function(){$(this).mousedown(function(){$(this).addClass("gray-background")});$(this).mouseup(function(){$(this).removeClass("gray-background")})});var c=this;$(".sort-link").each(function(){$(this).click(function(){c.set_sort_condition($(this).attr("sort_key"));return false})});$(".categorical-filter > a").each(function(){$(this).click(function(){c.set_categorical_filter($(this).attr("filter_key"),$(this).attr("filter_val"));return false})});$(".text-filter-form").each(function(){$(this).submit(function(){var g=$(this).attr("column_key");var f=$("#input-"+g+"-filter");var h=f.val();f.val("");c.add_filter_condition(g,h,true);return false})});var d=$("#input-tags-filter");if(d.length){d.autocomplete(this.grid.history_tag_autocomplete_url,{selectFirst:false,autoFill:false,highlight:false,mustMatch:false})}var e=$("#input-name-filter");if(e.length){e.autocomplete(this.grid.history_name_autocomplete_url,{selectFirst:false,autoFill:false,highlight:false,mustMatch:false})}$(".advanced-search-toggle").each(function(){$(this).click(function(){$("#standard-search").slideToggle("fast");$("#advanced-search").slideToggle("fast");return false})})},init_grid_elements:function(){$(".grid").each(function(){var j=$(this).find("input.grid-row-select-checkbox");var i=$(this).find("span.grid-selected-count");var q=function(){i.text($(j).filter(":checked").length)};$(j).each(function(){$(this).change(q)});q()});if($(".community_rating_star").length!==0){$(".community_rating_star").rating({})}var p=this.grid.attributes;var o=this;$(".page-link > a").each(function(){$(this).click(function(){o.set_page($(this).attr("page_num"));return false})});$(".use-inbound").each(function(){$(this).click(function(i){o.execute({href:$(this).attr("href"),inbound:true});return false})});$(".use-outbound").each(function(){$(this).click(function(i){o.execute({href:$(this).attr("href")});return false})});for(var h in p.items){var k=$("#grid-"+h+"-popup");k.off();var d=new PopupMenu(k);var n=p.items[h];for(var g in p.operations){var e=p.operations[g];var l=e.label;var c=n.operation_config[l];var f=n.encode_id;if(c.allowed&&e.allow_popup){var m={html:e.label,href:c.url_args,target:c.target,confirmation_text:e.confirm,inbound:e.inbound};m.func=function(q){q.preventDefault();var j=$(q.target).html();var i=this.findItemByHtml(j);o.execute(i)};d.addItem(m)}}}},add_filter_condition:function(f,h,c){if(h===""){return false}this.grid.add_filter(f,h,c);var g=$("<span>"+h+"<a href='javascript:void(0);'><span class='delete-search-icon' /></span></a>");g.addClass("text-filter-val");var e=this;g.click(function(){e.grid.remove_filter(f,h);$(this).remove();e.go_page_one();e.execute()});var d=$("#"+f+"-filtering-criteria");d.append(g);this.go_page_one();this.execute()},set_sort_condition:function(h){var g=this.grid.get("sort_key");var f=h;if(g.indexOf(h)!==-1){if(g.substring(0,1)!=="-"){f="-"+h}else{}}$(".sort-arrow").remove();var e=(f.substring(0,1)=="-")?"↑":"↓";var c=$("<span>"+e+"</span>").addClass("sort-arrow");var d=$("#"+h+"-header");d.append(c);this.grid.set("sort_key",f);this.go_page_one();this.execute()},set_categorical_filter:function(e,g){var d=this.grid.get("categorical_filters")[e],f=this.grid.get("filters")[e];var c=this;$("."+e+"-filter").each(function(){var k=$.trim($(this).text());var i=d[k];var j=i[e];if(j==g){$(this).empty();$(this).addClass("current-filter");$(this).append(k)}else{if(j==f){$(this).empty();var h=$("<a href='#'>"+k+"</a>");h.click(function(){c.set_categorical_filter(e,j)});$(this).removeClass("current-filter");$(this).append(h)}}});this.grid.add_filter(e,g);this.go_page_one();this.execute()},set_page:function(c){var d=this;$(".page-link").each(function(){var i=$(this).attr("id"),g=parseInt(i.split("-")[2],10),e=d.grid.get("cur_page"),h;if(g===c){h=$(this).children().text();$(this).empty();$(this).addClass("inactive-link");$(this).text(h)}else{if(g===e){h=$(this).text();$(this).empty();$(this).removeClass("inactive-link");var f=$("<a href='#'>"+h+"</a>");f.click(function(){d.set_page(g)});$(this).append(f)}}});if(c==="all"){this.grid.set("cur_page",c)}else{this.grid.set("cur_page",parseInt(c,10))}this.execute()},submit_operation:function(f,g){var e=$('input[name="id"]:checked').length;if(!e>0){return false}var d=$(f).val();var c=[];$("input[name=id]:checked").each(function(){c.push($(this).val())});this.execute({operation:d,id:c,confirmation_text:g});return true},execute:function(l){var f=null;var e=null;var g=null;var c=null;var k=null;if(l){e=l.href;g=l.operation;f=l.id;c=l.confirmation_text;k=l.inbound;if(e!==undefined&&e.indexOf("operation=")!=-1){var j=e.split("?");if(j.length>1){var i=j[1];var d=i.split("&");for(var h=0;h<d.length;h++){if(d[h].indexOf("operation")!=-1){g=d[h].split("=")[1];g=g.replace(/\+/g," ")}else{if(d[h].indexOf("id")!=-1){f=d[h].split("=")[1]}}}}}}if(g&&f){if(c&&c!=""&&c!="None"){if(!confirm(c)){return false}}g=g.toLowerCase();this.grid.set({operation:g,item_ids:f});if(this.grid.can_async_op(g)){this.update_grid()}else{this.go_to(k,"")}return false}if(e){this.go_to(k,e);return false}if(this.grid.get("async")){this.update_grid()}else{this.go_to(k,"")}return false},go_to:function(f,d){var e=this.grid.get("async");this.grid.set("async",false);advanced_search=$("#advanced-search").is(":visible");this.grid.set("advanced_search",advanced_search);if(!d){d=this.grid.get("url_base")+"?"+$.param(this.grid.get_url_data())}this.grid.set({operation:undefined,item_ids:undefined,async:e});if(f){var c=$(".grid-header").closest(".inbound");if(c.length!==0){c.load(d);return}}window.location=d},update_grid:function(){var d=(this.grid.get("operation")?"POST":"GET");$(".loading-elt-overlay").show();var c=this;$.ajax({type:d,url:c.grid.get("url_base"),data:c.grid.get_url_data(),error:function(e){alert("Grid refresh failed")},success:function(f){var e=f.split("*****");$("#grid-table-body").html(e[0]);$("#grid-table-footer").html(e[1]);$("#grid-table-body").trigger("update");$(".loading-elt-overlay").hide();var g=$.trim(e[2]);if(g!==""){$("#grid-message").html(g).show();setTimeout(function(){$("#grid-message").hide()},5000)}},complete:function(){c.grid.set({operation:undefined,item_ids:undefined})}})},check_all_items:function(){var c=document.getElementById("check_all"),d=document.getElementsByTagName("input"),f=0,e;if(c.checked===true){for(e=0;e<d.length;e++){if(d[e].name.indexOf("id")!==-1){d[e].checked=true;f++}}}else{for(e=0;e<d.length;e++){if(d[e].name.indexOf("id")!==-1){d[e].checked=false}}}this.init_grid_elements()},go_page_one:function(){var c=this.grid.get("cur_page");if(c!==null&&c!==undefined&&c!=="all"){this.grid.set("cur_page",1)}}});return{Grid:a,GridView:b}});
\ No newline at end of file
+jQuery.ajaxSettings.traditional=true;define(["mvc/ui"],function(){var a=Backbone.Model.extend({defaults:{url_base:"",async:false,async_ops:[],categorical_filters:[],filters:{},sort_key:null,show_item_checkboxes:false,advanced_search:false,cur_page:1,num_pages:1,operation:undefined,item_ids:undefined},can_async_op:function(c){return _.indexOf(this.attributes.async_ops,c)!==-1},add_filter:function(g,h,d){if(d){var e=this.attributes.filters[g],c;if(e===null||e===undefined){c=h}else{if(typeof(e)=="string"){if(e=="All"){c=h}else{var f=[];f[0]=e;f[1]=h;c=f}}else{c=e;c.push(h)}}this.attributes.filters[g]=c}else{this.attributes.filters[g]=h}},remove_filter:function(d,g){var c=this.attributes.filters[d];if(c===null||c===undefined){return false}var f=true;if(typeof(c)==="string"){if(c=="All"){f=false}else{delete this.attributes.filters[d]}}else{var e=_.indexOf(c,g);if(e!==-1){c.splice(e,1)}else{f=false}}return f},get_url_data:function(){var c={async:this.attributes.async,sort:this.attributes.sort_key,page:this.attributes.cur_page,show_item_checkboxes:this.attributes.show_item_checkboxes,advanced_search:this.attributes.advanced_search};if(this.attributes.operation){c.operation=this.attributes.operation}if(this.attributes.item_ids){c.id=this.attributes.item_ids}var d=this;_.each(_.pairs(d.attributes.filters),function(e){c["f-"+e[0]]=e[1]});return c}});var b=Backbone.View.extend({grid:null,initialize:function(c){this.init_grid(c);this.init_grid_controls();$("input[type=text]").each(function(){$(this).click(function(){$(this).select()}).keyup(function(){$(this).css("font-style","normal")})})},init_grid:function(c){this.grid=c;this.init_grid_elements()},init_grid_controls:function(){$(".submit-image").each(function(){$(this).mousedown(function(){$(this).addClass("gray-background")});$(this).mouseup(function(){$(this).removeClass("gray-background")})});var c=this;$(".sort-link").each(function(){$(this).click(function(){c.set_sort_condition($(this).attr("sort_key"));return false})});$(".categorical-filter > a").each(function(){$(this).click(function(){c.set_categorical_filter($(this).attr("filter_key"),$(this).attr("filter_val"));return false})});$(".text-filter-form").each(function(){$(this).submit(function(){var g=$(this).attr("column_key");var f=$("#input-"+g+"-filter");var h=f.val();f.val("");c.add_filter_condition(g,h);return false})});var d=$("#input-tags-filter");if(d.length){d.autocomplete(this.grid.history_tag_autocomplete_url,{selectFirst:false,autoFill:false,highlight:false,mustMatch:false})}var e=$("#input-name-filter");if(e.length){e.autocomplete(this.grid.history_name_autocomplete_url,{selectFirst:false,autoFill:false,highlight:false,mustMatch:false})}$(".advanced-search-toggle").each(function(){$(this).click(function(){$("#standard-search").slideToggle("fast");$("#advanced-search").slideToggle("fast");return false})})},init_grid_elements:function(){$(".grid").each(function(){var j=$(this).find("input.grid-row-select-checkbox");var i=$(this).find("span.grid-selected-count");var q=function(){i.text($(j).filter(":checked").length)};$(j).each(function(){$(this).change(q)});q()});if($(".community_rating_star").length!==0){$(".community_rating_star").rating({})}var p=this.grid.attributes;var o=this;$(".page-link > a").each(function(){$(this).click(function(){o.set_page($(this).attr("page_num"));return false})});$(".use-inbound").each(function(){$(this).click(function(i){o.execute({href:$(this).attr("href"),inbound:true});return false})});$(".use-outbound").each(function(){$(this).click(function(i){o.execute({href:$(this).attr("href")});return false})});for(var h in p.items){var k=$("#grid-"+h+"-popup");k.off();var d=new PopupMenu(k);var n=p.items[h];for(var g in p.operations){var e=p.operations[g];var l=e.label;var c=n.operation_config[l];var f=n.encode_id;if(c.allowed&&e.allow_popup){var m={html:e.label,href:c.url_args,target:c.target,confirmation_text:e.confirm,inbound:e.inbound};m.func=function(q){q.preventDefault();var j=$(q.target).html();var i=this.findItemByHtml(j);o.execute(i)};d.addItem(m)}}}},add_filter_condition:function(e,g){if(g===""){return false}this.grid.add_filter(e,g,true);var f=$("<span>"+g+"<a href='javascript:void(0);'><span class='delete-search-icon' /></span></a>");f.addClass("text-filter-val");var d=this;f.click(function(){d.grid.remove_filter(e,g);$(this).remove();d.go_page_one();d.execute()});var c=$("#"+e+"-filtering-criteria");c.append(f);this.go_page_one();this.execute()},set_sort_condition:function(h){var g=this.grid.get("sort_key");var f=h;if(g.indexOf(h)!==-1){if(g.substring(0,1)!=="-"){f="-"+h}else{}}$(".sort-arrow").remove();var e=(f.substring(0,1)=="-")?"↑":"↓";var c=$("<span>"+e+"</span>").addClass("sort-arrow");var d=$("#"+h+"-header");d.append(c);this.grid.set("sort_key",f);this.go_page_one();this.execute()},set_categorical_filter:function(e,g){var d=this.grid.get("categorical_filters")[e],f=this.grid.get("filters")[e];var c=this;$("."+e+"-filter").each(function(){var k=$.trim($(this).text());var i=d[k];var j=i[e];if(j==g){$(this).empty();$(this).addClass("current-filter");$(this).append(k)}else{if(j==f){$(this).empty();var h=$("<a href='#'>"+k+"</a>");h.click(function(){c.set_categorical_filter(e,j)});$(this).removeClass("current-filter");$(this).append(h)}}});this.grid.add_filter(e,g);this.go_page_one();this.execute()},set_page:function(c){var d=this;$(".page-link").each(function(){var i=$(this).attr("id"),g=parseInt(i.split("-")[2],10),e=d.grid.get("cur_page"),h;if(g===c){h=$(this).children().text();$(this).empty();$(this).addClass("inactive-link");$(this).text(h)}else{if(g===e){h=$(this).text();$(this).empty();$(this).removeClass("inactive-link");var f=$("<a href='#'>"+h+"</a>");f.click(function(){d.set_page(g)});$(this).append(f)}}});if(c==="all"){this.grid.set("cur_page",c)}else{this.grid.set("cur_page",parseInt(c,10))}this.execute()},submit_operation:function(f,g){var e=$('input[name="id"]:checked').length;if(!e>0){return false}var d=$(f).val();var c=[];$("input[name=id]:checked").each(function(){c.push($(this).val())});this.execute({operation:d,id:c,confirmation_text:g});return true},execute:function(l){var f=null;var e=null;var g=null;var c=null;var k=null;if(l){e=l.href;g=l.operation;f=l.id;c=l.confirmation_text;k=l.inbound;if(e!==undefined&&e.indexOf("operation=")!=-1){var j=e.split("?");if(j.length>1){var i=j[1];var d=i.split("&");for(var h=0;h<d.length;h++){if(d[h].indexOf("operation")!=-1){g=d[h].split("=")[1];g=g.replace(/\+/g," ")}else{if(d[h].indexOf("id")!=-1){f=d[h].split("=")[1]}}}}}}if(g&&f){if(c&&c!=""&&c!="None"){if(!confirm(c)){return false}}g=g.toLowerCase();this.grid.set({operation:g,item_ids:f});if(this.grid.can_async_op(g)){this.update_grid()}else{this.go_to(k,"")}return false}if(e){this.go_to(k,e);return false}if(this.grid.get("async")){this.update_grid()}else{this.go_to(k,"")}return false},go_to:function(f,d){var e=this.grid.get("async");this.grid.set("async",false);advanced_search=$("#advanced-search").is(":visible");this.grid.set("advanced_search",advanced_search);if(!d){d=this.grid.get("url_base")+"?"+$.param(this.grid.get_url_data())}this.grid.set({operation:undefined,item_ids:undefined,async:e});if(f){var c=$(".grid-header").closest(".inbound");if(c.length!==0){c.load(d);return}}window.location=d},update_grid:function(){var d=(this.grid.get("operation")?"POST":"GET");$(".loading-elt-overlay").show();var c=this;$.ajax({type:d,url:c.grid.get("url_base"),data:c.grid.get_url_data(),error:function(e){alert("Grid refresh failed")},success:function(f){var e=f.split("*****");$("#grid-table-body").html(e[0]);$("#grid-table-footer").html(e[1]);$("#grid-table-body").trigger("update");$(".loading-elt-overlay").hide();var g=$.trim(e[2]);if(g!==""){$("#grid-message").html(g).show();setTimeout(function(){$("#grid-message").hide()},5000)}},complete:function(){c.grid.set({operation:undefined,item_ids:undefined})}})},check_all_items:function(){var c=document.getElementById("check_all"),d=document.getElementsByTagName("input"),f=0,e;if(c.checked===true){for(e=0;e<d.length;e++){if(d[e].name.indexOf("id")!==-1){d[e].checked=true;f++}}}else{for(e=0;e<d.length;e++){if(d[e].name.indexOf("id")!==-1){d[e].checked=false}}}this.init_grid_elements()},go_page_one:function(){var c=this.grid.get("cur_page");if(c!==null&&c!==undefined&&c!=="all"){this.grid.set("cur_page",1)}}});return{Grid:a,GridView:b}});
\ No newline at end of file
diff -r 44b0d21b4c7744d27d97241375b953abc1da7830 -r d787fe355c86b11f4f70534d9a4b2e4631aa9782 templates/grid_base.mako
--- a/templates/grid_base.mako
+++ b/templates/grid_base.mako
@@ -235,8 +235,13 @@
{
// Put tag name and value together.
var tag = tag_name + (tag_value !== undefined && tag_value !== "" ? ":" + tag_value : "");
- $('#advanced-search').show('fast');
- gridView.add_filter_condition("tags", tag, true);
+ var advanced_search = $('#advanced-search').is(":visible");
+ if (!advanced_search)
+ {
+ $('#standard-search').slideToggle('fast');
+ $('#advanced-search').slideToggle('fast');
+ }
+ gridView.add_filter_condition("tags", tag);
};
</script></%def>
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: guerler: Grids: Refactoring and fixes
by commits-noreply@bitbucket.org 04 Dec '13
by commits-noreply@bitbucket.org 04 Dec '13
04 Dec '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/44b0d21b4c77/
Changeset: 44b0d21b4c77
User: guerler
Date: 2013-12-05 00:51:44
Summary: Grids: Refactoring and fixes
iG: removed templates/grid_common.mako
Affected #: 8 files
diff -r 60128a7ab160f23cf4fe4cb4231729cfa56369fb -r 44b0d21b4c7744d27d97241375b953abc1da7830 lib/galaxy/web/framework/helpers/grids.py
--- a/lib/galaxy/web/framework/helpers/grids.py
+++ b/lib/galaxy/web/framework/helpers/grids.py
@@ -264,6 +264,7 @@
return url_for( **new_kwargs)
self.use_panels = ( kwargs.get( 'use_panels', False ) in [ True, 'True', 'true' ] )
+ self.advanced_search = ( kwargs.get( 'advanced_search', False ) in [ True, 'True', 'true' ] )
async_request = ( ( self.use_async ) and ( kwargs.get( 'async', False ) in [ True, 'True', 'true'] ) )
# Currently, filling the template returns a str object; this requires decoding the string into a
# unicode object within mako templates. What probably should be done is to return the template as
@@ -285,6 +286,7 @@
status = status,
message = message,
use_panels=self.use_panels,
+ advanced_search=self.advanced_search,
show_item_checkboxes = ( self.show_item_checkboxes or
kwargs.get( 'show_item_checkboxes', '' ) in [ 'True', 'true' ] ),
# Pass back kwargs so that grid template can set and use args without
diff -r 60128a7ab160f23cf4fe4cb4231729cfa56369fb -r 44b0d21b4c7744d27d97241375b953abc1da7830 static/scripts/galaxy.grids.js
--- a/static/scripts/galaxy.grids.js
+++ b/static/scripts/galaxy.grids.js
@@ -19,6 +19,7 @@
filters: {},
sort_key: null,
show_item_checkboxes: false,
+ advanced_search: false,
cur_page: 1,
num_pages: 1,
operation: undefined,
@@ -110,7 +111,8 @@
async: this.attributes.async,
sort: this.attributes.sort_key,
page: this.attributes.cur_page,
- show_item_checkboxes: this.attributes.show_item_checkboxes
+ show_item_checkboxes: this.attributes.show_item_checkboxes,
+ advanced_search: this.attributes.advanced_search
};
// Add operation, item_ids only if they have values.
@@ -343,7 +345,7 @@
this.grid.add_filter(name, value, append);
// Add button that displays filter and provides a button to delete it.
- var t = $("<span>" + value + "<a href='javascript:void(0);'><span class='delete-search-icon' /></a></span>");
+ var t = $("<span>" + value + "<a href='javascript:void(0);'><span class='delete-search-icon' /></span></a>");
t.addClass('text-filter-val');
var self = this;
t.click(function() {
@@ -579,15 +581,19 @@
// go to url
go_to: function (inbound, href) {
-
+ // get aysnc status
var async = this.grid.get('async');
this.grid.set('async', false);
+ // get slide status
+ advanced_search = $('#advanced-search').is(":visible");
+ this.grid.set('advanced_search', advanced_search);
+
// get default url
if(!href)
href = this.grid.get('url_base') + "?" + $.param(this.grid.get_url_data());
- // Clear grid of transient request attributes.
+ // clear grid of transient request attributes.
this.grid.set({
operation: undefined,
item_ids: undefined,
diff -r 60128a7ab160f23cf4fe4cb4231729cfa56369fb -r 44b0d21b4c7744d27d97241375b953abc1da7830 static/scripts/packed/galaxy.grids.js
--- a/static/scripts/packed/galaxy.grids.js
+++ b/static/scripts/packed/galaxy.grids.js
@@ -1,1 +1,1 @@
-jQuery.ajaxSettings.traditional=true;define(["mvc/ui"],function(){var a=Backbone.Model.extend({defaults:{url_base:"",async:false,async_ops:[],categorical_filters:[],filters:{},sort_key:null,show_item_checkboxes:false,cur_page:1,num_pages:1,operation:undefined,item_ids:undefined},can_async_op:function(c){return _.indexOf(this.attributes.async_ops,c)!==-1},add_filter:function(g,h,d){if(d){var e=this.attributes.filters[g],c;if(e===null||e===undefined){c=h}else{if(typeof(e)=="string"){if(e=="All"){c=h}else{var f=[];f[0]=e;f[1]=h;c=f}}else{c=e;c.push(h)}}this.attributes.filters[g]=c}else{this.attributes.filters[g]=h}},remove_filter:function(d,g){var c=this.attributes.filters[d];if(c===null||c===undefined){return false}var f=true;if(typeof(c)==="string"){if(c=="All"){f=false}else{delete this.attributes.filters[d]}}else{var e=_.indexOf(c,g);if(e!==-1){c.splice(e,1)}else{f=false}}return f},get_url_data:function(){var c={async:this.attributes.async,sort:this.attributes.sort_key,page:this.attributes.cur_page,show_item_checkboxes:this.attributes.show_item_checkboxes};if(this.attributes.operation){c.operation=this.attributes.operation}if(this.attributes.item_ids){c.id=this.attributes.item_ids}var d=this;_.each(_.pairs(d.attributes.filters),function(e){c["f-"+e[0]]=e[1]});return c}});var b=Backbone.View.extend({grid:null,initialize:function(c){this.init_grid(c);this.init_grid_controls();$("input[type=text]").each(function(){$(this).click(function(){$(this).select()}).keyup(function(){$(this).css("font-style","normal")})})},init_grid:function(c){this.grid=c;this.init_grid_elements()},init_grid_controls:function(){$(".submit-image").each(function(){$(this).mousedown(function(){$(this).addClass("gray-background")});$(this).mouseup(function(){$(this).removeClass("gray-background")})});var c=this;$(".sort-link").each(function(){$(this).click(function(){c.set_sort_condition($(this).attr("sort_key"));return false})});$(".categorical-filter > a").each(function(){$(this).click(function(){c.set_categorical_filter($(this).attr("filter_key"),$(this).attr("filter_val"));return false})});$(".text-filter-form").each(function(){$(this).submit(function(){var g=$(this).attr("column_key");var f=$("#input-"+g+"-filter");var h=f.val();f.val("");c.add_filter_condition(g,h,true);return false})});var d=$("#input-tags-filter");if(d.length){d.autocomplete(this.grid.history_tag_autocomplete_url,{selectFirst:false,autoFill:false,highlight:false,mustMatch:false})}var e=$("#input-name-filter");if(e.length){e.autocomplete(this.grid.history_name_autocomplete_url,{selectFirst:false,autoFill:false,highlight:false,mustMatch:false})}$(".advanced-search-toggle").each(function(){$(this).click(function(){$("#standard-search").slideToggle("fast");$("#advanced-search").slideToggle("fast");return false})})},init_grid_elements:function(){$(".grid").each(function(){var j=$(this).find("input.grid-row-select-checkbox");var i=$(this).find("span.grid-selected-count");var q=function(){i.text($(j).filter(":checked").length)};$(j).each(function(){$(this).change(q)});q()});if($(".community_rating_star").length!==0){$(".community_rating_star").rating({})}var p=this.grid.attributes;var o=this;$(".page-link > a").each(function(){$(this).click(function(){o.set_page($(this).attr("page_num"));return false})});$(".use-inbound").each(function(){$(this).click(function(i){o.execute({href:$(this).attr("href"),inbound:true});return false})});$(".use-outbound").each(function(){$(this).click(function(i){o.execute({href:$(this).attr("href")});return false})});for(var h in p.items){var k=$("#grid-"+h+"-popup");k.off();var d=new PopupMenu(k);var n=p.items[h];for(var g in p.operations){var e=p.operations[g];var l=e.label;var c=n.operation_config[l];var f=n.encode_id;if(c.allowed&&e.allow_popup){var m={html:e.label,href:c.url_args,target:c.target,confirmation_text:e.confirm,inbound:e.inbound};m.func=function(q){q.preventDefault();var j=$(q.target).html();var i=this.findItemByHtml(j);o.execute(i)};d.addItem(m)}}}},add_filter_condition:function(f,h,c){if(h===""){return false}this.grid.add_filter(f,h,c);var g=$("<span>"+h+"<a href='javascript:void(0);'><span class='delete-search-icon' /></a></span>");g.addClass("text-filter-val");var e=this;g.click(function(){e.grid.remove_filter(f,h);$(this).remove();e.go_page_one();e.execute()});var d=$("#"+f+"-filtering-criteria");d.append(g);this.go_page_one();this.execute()},set_sort_condition:function(h){var g=this.grid.get("sort_key");var f=h;if(g.indexOf(h)!==-1){if(g.substring(0,1)!=="-"){f="-"+h}else{}}$(".sort-arrow").remove();var e=(f.substring(0,1)=="-")?"↑":"↓";var c=$("<span>"+e+"</span>").addClass("sort-arrow");var d=$("#"+h+"-header");d.append(c);this.grid.set("sort_key",f);this.go_page_one();this.execute()},set_categorical_filter:function(e,g){var d=this.grid.get("categorical_filters")[e],f=this.grid.get("filters")[e];var c=this;$("."+e+"-filter").each(function(){var k=$.trim($(this).text());var i=d[k];var j=i[e];if(j==g){$(this).empty();$(this).addClass("current-filter");$(this).append(k)}else{if(j==f){$(this).empty();var h=$("<a href='#'>"+k+"</a>");h.click(function(){c.set_categorical_filter(e,j)});$(this).removeClass("current-filter");$(this).append(h)}}});this.grid.add_filter(e,g);this.go_page_one();this.execute()},set_page:function(c){var d=this;$(".page-link").each(function(){var i=$(this).attr("id"),g=parseInt(i.split("-")[2],10),e=d.grid.get("cur_page"),h;if(g===c){h=$(this).children().text();$(this).empty();$(this).addClass("inactive-link");$(this).text(h)}else{if(g===e){h=$(this).text();$(this).empty();$(this).removeClass("inactive-link");var f=$("<a href='#'>"+h+"</a>");f.click(function(){d.set_page(g)});$(this).append(f)}}});if(c==="all"){this.grid.set("cur_page",c)}else{this.grid.set("cur_page",parseInt(c,10))}this.execute()},submit_operation:function(f,g){var e=$('input[name="id"]:checked').length;if(!e>0){return false}var d=$(f).val();var c=[];$("input[name=id]:checked").each(function(){c.push($(this).val())});this.execute({operation:d,id:c,confirmation_text:g});return true},execute:function(l){var f=null;var e=null;var g=null;var c=null;var k=null;if(l){e=l.href;g=l.operation;f=l.id;c=l.confirmation_text;k=l.inbound;if(e!==undefined&&e.indexOf("operation=")!=-1){var j=e.split("?");if(j.length>1){var i=j[1];var d=i.split("&");for(var h=0;h<d.length;h++){if(d[h].indexOf("operation")!=-1){g=d[h].split("=")[1];g=g.replace(/\+/g," ")}else{if(d[h].indexOf("id")!=-1){f=d[h].split("=")[1]}}}}}}if(g&&f){if(c&&c!=""&&c!="None"){if(!confirm(c)){return false}}g=g.toLowerCase();this.grid.set({operation:g,item_ids:f});if(this.grid.can_async_op(g)){this.update_grid()}else{this.go_to(k,"")}return false}if(e){this.go_to(k,e);return false}if(this.grid.get("async")){this.update_grid()}else{this.go_to(k,"")}return false},go_to:function(f,d){var e=this.grid.get("async");this.grid.set("async",false);if(!d){d=this.grid.get("url_base")+"?"+$.param(this.grid.get_url_data())}this.grid.set({operation:undefined,item_ids:undefined,async:e});if(f){var c=$(".grid-header").closest(".inbound");if(c.length!==0){c.load(d);return}}window.location=d},update_grid:function(){var d=(this.grid.get("operation")?"POST":"GET");$(".loading-elt-overlay").show();var c=this;$.ajax({type:d,url:c.grid.get("url_base"),data:c.grid.get_url_data(),error:function(e){alert("Grid refresh failed")},success:function(f){var e=f.split("*****");$("#grid-table-body").html(e[0]);$("#grid-table-footer").html(e[1]);$("#grid-table-body").trigger("update");$(".loading-elt-overlay").hide();var g=$.trim(e[2]);if(g!==""){$("#grid-message").html(g).show();setTimeout(function(){$("#grid-message").hide()},5000)}},complete:function(){c.grid.set({operation:undefined,item_ids:undefined})}})},check_all_items:function(){var c=document.getElementById("check_all"),d=document.getElementsByTagName("input"),f=0,e;if(c.checked===true){for(e=0;e<d.length;e++){if(d[e].name.indexOf("id")!==-1){d[e].checked=true;f++}}}else{for(e=0;e<d.length;e++){if(d[e].name.indexOf("id")!==-1){d[e].checked=false}}}this.init_grid_elements()},go_page_one:function(){var c=this.grid.get("cur_page");if(c!==null&&c!==undefined&&c!=="all"){this.grid.set("cur_page",1)}}});return{Grid:a,GridView:b}});
\ No newline at end of file
+jQuery.ajaxSettings.traditional=true;define(["mvc/ui"],function(){var a=Backbone.Model.extend({defaults:{url_base:"",async:false,async_ops:[],categorical_filters:[],filters:{},sort_key:null,show_item_checkboxes:false,advanced_search:false,cur_page:1,num_pages:1,operation:undefined,item_ids:undefined},can_async_op:function(c){return _.indexOf(this.attributes.async_ops,c)!==-1},add_filter:function(g,h,d){if(d){var e=this.attributes.filters[g],c;if(e===null||e===undefined){c=h}else{if(typeof(e)=="string"){if(e=="All"){c=h}else{var f=[];f[0]=e;f[1]=h;c=f}}else{c=e;c.push(h)}}this.attributes.filters[g]=c}else{this.attributes.filters[g]=h}},remove_filter:function(d,g){var c=this.attributes.filters[d];if(c===null||c===undefined){return false}var f=true;if(typeof(c)==="string"){if(c=="All"){f=false}else{delete this.attributes.filters[d]}}else{var e=_.indexOf(c,g);if(e!==-1){c.splice(e,1)}else{f=false}}return f},get_url_data:function(){var c={async:this.attributes.async,sort:this.attributes.sort_key,page:this.attributes.cur_page,show_item_checkboxes:this.attributes.show_item_checkboxes,advanced_search:this.attributes.advanced_search};if(this.attributes.operation){c.operation=this.attributes.operation}if(this.attributes.item_ids){c.id=this.attributes.item_ids}var d=this;_.each(_.pairs(d.attributes.filters),function(e){c["f-"+e[0]]=e[1]});return c}});var b=Backbone.View.extend({grid:null,initialize:function(c){this.init_grid(c);this.init_grid_controls();$("input[type=text]").each(function(){$(this).click(function(){$(this).select()}).keyup(function(){$(this).css("font-style","normal")})})},init_grid:function(c){this.grid=c;this.init_grid_elements()},init_grid_controls:function(){$(".submit-image").each(function(){$(this).mousedown(function(){$(this).addClass("gray-background")});$(this).mouseup(function(){$(this).removeClass("gray-background")})});var c=this;$(".sort-link").each(function(){$(this).click(function(){c.set_sort_condition($(this).attr("sort_key"));return false})});$(".categorical-filter > a").each(function(){$(this).click(function(){c.set_categorical_filter($(this).attr("filter_key"),$(this).attr("filter_val"));return false})});$(".text-filter-form").each(function(){$(this).submit(function(){var g=$(this).attr("column_key");var f=$("#input-"+g+"-filter");var h=f.val();f.val("");c.add_filter_condition(g,h,true);return false})});var d=$("#input-tags-filter");if(d.length){d.autocomplete(this.grid.history_tag_autocomplete_url,{selectFirst:false,autoFill:false,highlight:false,mustMatch:false})}var e=$("#input-name-filter");if(e.length){e.autocomplete(this.grid.history_name_autocomplete_url,{selectFirst:false,autoFill:false,highlight:false,mustMatch:false})}$(".advanced-search-toggle").each(function(){$(this).click(function(){$("#standard-search").slideToggle("fast");$("#advanced-search").slideToggle("fast");return false})})},init_grid_elements:function(){$(".grid").each(function(){var j=$(this).find("input.grid-row-select-checkbox");var i=$(this).find("span.grid-selected-count");var q=function(){i.text($(j).filter(":checked").length)};$(j).each(function(){$(this).change(q)});q()});if($(".community_rating_star").length!==0){$(".community_rating_star").rating({})}var p=this.grid.attributes;var o=this;$(".page-link > a").each(function(){$(this).click(function(){o.set_page($(this).attr("page_num"));return false})});$(".use-inbound").each(function(){$(this).click(function(i){o.execute({href:$(this).attr("href"),inbound:true});return false})});$(".use-outbound").each(function(){$(this).click(function(i){o.execute({href:$(this).attr("href")});return false})});for(var h in p.items){var k=$("#grid-"+h+"-popup");k.off();var d=new PopupMenu(k);var n=p.items[h];for(var g in p.operations){var e=p.operations[g];var l=e.label;var c=n.operation_config[l];var f=n.encode_id;if(c.allowed&&e.allow_popup){var m={html:e.label,href:c.url_args,target:c.target,confirmation_text:e.confirm,inbound:e.inbound};m.func=function(q){q.preventDefault();var j=$(q.target).html();var i=this.findItemByHtml(j);o.execute(i)};d.addItem(m)}}}},add_filter_condition:function(f,h,c){if(h===""){return false}this.grid.add_filter(f,h,c);var g=$("<span>"+h+"<a href='javascript:void(0);'><span class='delete-search-icon' /></span></a>");g.addClass("text-filter-val");var e=this;g.click(function(){e.grid.remove_filter(f,h);$(this).remove();e.go_page_one();e.execute()});var d=$("#"+f+"-filtering-criteria");d.append(g);this.go_page_one();this.execute()},set_sort_condition:function(h){var g=this.grid.get("sort_key");var f=h;if(g.indexOf(h)!==-1){if(g.substring(0,1)!=="-"){f="-"+h}else{}}$(".sort-arrow").remove();var e=(f.substring(0,1)=="-")?"↑":"↓";var c=$("<span>"+e+"</span>").addClass("sort-arrow");var d=$("#"+h+"-header");d.append(c);this.grid.set("sort_key",f);this.go_page_one();this.execute()},set_categorical_filter:function(e,g){var d=this.grid.get("categorical_filters")[e],f=this.grid.get("filters")[e];var c=this;$("."+e+"-filter").each(function(){var k=$.trim($(this).text());var i=d[k];var j=i[e];if(j==g){$(this).empty();$(this).addClass("current-filter");$(this).append(k)}else{if(j==f){$(this).empty();var h=$("<a href='#'>"+k+"</a>");h.click(function(){c.set_categorical_filter(e,j)});$(this).removeClass("current-filter");$(this).append(h)}}});this.grid.add_filter(e,g);this.go_page_one();this.execute()},set_page:function(c){var d=this;$(".page-link").each(function(){var i=$(this).attr("id"),g=parseInt(i.split("-")[2],10),e=d.grid.get("cur_page"),h;if(g===c){h=$(this).children().text();$(this).empty();$(this).addClass("inactive-link");$(this).text(h)}else{if(g===e){h=$(this).text();$(this).empty();$(this).removeClass("inactive-link");var f=$("<a href='#'>"+h+"</a>");f.click(function(){d.set_page(g)});$(this).append(f)}}});if(c==="all"){this.grid.set("cur_page",c)}else{this.grid.set("cur_page",parseInt(c,10))}this.execute()},submit_operation:function(f,g){var e=$('input[name="id"]:checked').length;if(!e>0){return false}var d=$(f).val();var c=[];$("input[name=id]:checked").each(function(){c.push($(this).val())});this.execute({operation:d,id:c,confirmation_text:g});return true},execute:function(l){var f=null;var e=null;var g=null;var c=null;var k=null;if(l){e=l.href;g=l.operation;f=l.id;c=l.confirmation_text;k=l.inbound;if(e!==undefined&&e.indexOf("operation=")!=-1){var j=e.split("?");if(j.length>1){var i=j[1];var d=i.split("&");for(var h=0;h<d.length;h++){if(d[h].indexOf("operation")!=-1){g=d[h].split("=")[1];g=g.replace(/\+/g," ")}else{if(d[h].indexOf("id")!=-1){f=d[h].split("=")[1]}}}}}}if(g&&f){if(c&&c!=""&&c!="None"){if(!confirm(c)){return false}}g=g.toLowerCase();this.grid.set({operation:g,item_ids:f});if(this.grid.can_async_op(g)){this.update_grid()}else{this.go_to(k,"")}return false}if(e){this.go_to(k,e);return false}if(this.grid.get("async")){this.update_grid()}else{this.go_to(k,"")}return false},go_to:function(f,d){var e=this.grid.get("async");this.grid.set("async",false);advanced_search=$("#advanced-search").is(":visible");this.grid.set("advanced_search",advanced_search);if(!d){d=this.grid.get("url_base")+"?"+$.param(this.grid.get_url_data())}this.grid.set({operation:undefined,item_ids:undefined,async:e});if(f){var c=$(".grid-header").closest(".inbound");if(c.length!==0){c.load(d);return}}window.location=d},update_grid:function(){var d=(this.grid.get("operation")?"POST":"GET");$(".loading-elt-overlay").show();var c=this;$.ajax({type:d,url:c.grid.get("url_base"),data:c.grid.get_url_data(),error:function(e){alert("Grid refresh failed")},success:function(f){var e=f.split("*****");$("#grid-table-body").html(e[0]);$("#grid-table-footer").html(e[1]);$("#grid-table-body").trigger("update");$(".loading-elt-overlay").hide();var g=$.trim(e[2]);if(g!==""){$("#grid-message").html(g).show();setTimeout(function(){$("#grid-message").hide()},5000)}},complete:function(){c.grid.set({operation:undefined,item_ids:undefined})}})},check_all_items:function(){var c=document.getElementById("check_all"),d=document.getElementsByTagName("input"),f=0,e;if(c.checked===true){for(e=0;e<d.length;e++){if(d[e].name.indexOf("id")!==-1){d[e].checked=true;f++}}}else{for(e=0;e<d.length;e++){if(d[e].name.indexOf("id")!==-1){d[e].checked=false}}}this.init_grid_elements()},go_page_one:function(){var c=this.grid.get("cur_page");if(c!==null&&c!==undefined&&c!=="all"){this.grid.set("cur_page",1)}}});return{Grid:a,GridView:b}});
\ No newline at end of file
diff -r 60128a7ab160f23cf4fe4cb4231729cfa56369fb -r 44b0d21b4c7744d27d97241375b953abc1da7830 templates/grid_base.mako
--- a/templates/grid_base.mako
+++ b/templates/grid_base.mako
@@ -1,5 +1,7 @@
<%!
- from galaxy.web.framework.helpers.grids import TextColumn
+ from galaxy.web.framework.helpers.grids import TextColumn, StateColumn, GridColumnFilter
+ from galaxy.web.framework.helpers import iff
+
import galaxy.util
def inherit(context):
if context.get('use_panels'):
@@ -12,16 +14,16 @@
return '/base.mako'
%><%inherit file="${inherit(context)}"/>
-<%namespace file="./grid_common.mako" import="*" /><%namespace file="/refresh_frames.mako" import="handle_refresh_frames" /><%namespace file="/display_common.mako" import="get_class_plural" />
-<%def name="load(embedded = False)">
+<%def name="load(embedded = False, insert = None)"><%
+ self.init(insert)
+ self.stylesheets()
+ self.javascripts()
self.grid_javascripts()
if embedded:
- self.init()
- self.stylesheets()
self.render_grid_header( grid, False )
self.render_grid_table( grid, show_item_checkboxes=show_item_checkboxes )
else:
@@ -30,7 +32,7 @@
%></%def>
-<%def name="init()">
+<%def name="init(insert=None)"><%
self.has_left_panel = False
self.has_right_panel = False
@@ -62,9 +64,12 @@
'use_paging' : grid.use_paging,
'legend' : grid.legend,
'current_item_id' : False,
- 'use_panels' : context.get('use_panels')
+ 'use_panels' : context.get('use_panels'),
+ 'insert' : insert,
+ 'default_filter_dict' : default_filter_dict,
+ 'advanced_search' : advanced_search
}
-
+
## add current item if exists
if current_item:
self.grid_config['current_item_id'] = current_item.id
@@ -96,6 +101,8 @@
'label_id_prefix' : column.label_id_prefix,
'sortable' : column.sortable,
'label' : column.label,
+ 'filterable' : column.filterable,
+ 'is_text' : isinstance(column, TextColumn),
'href' : href,
'extra' : extra
})
@@ -309,7 +316,9 @@
%endif
</ul>
%endif
-
+ %if self.grid_config['insert']:
+ ${self.grid_config['insert']}
+ %endif
${render_grid_filters( grid )}
</div></%def>
@@ -583,3 +592,191 @@
%endif
</%def>
+## Print grid search/filtering UI.
+<%def name="render_grid_filters( grid, render_advanced_search=True )">
+ <%
+ default_filter_dict = self.grid_config['default_filter_dict']
+ filters = self.grid_config['filters']
+
+ # Show advanced search if flag set or if there are filters for advanced search fields.
+ advanced_search_display = "none"
+
+ if self.grid_config['advanced_search']:
+ advanced_search_display = "block"
+
+ for column in self.grid_config['columns']:
+ if column['filterable'] == "advanced":
+ ## Show div if current filter has value that is different from the default filter.
+ column_key = column['key']
+ if column_key in filters and column_key in default_filter_dict and \
+ filters[column_key] != default_filter_dict[column_key]:
+ advanced_search_display = "block"
+
+ # do not show standard search if showing adv.
+ standard_search_display = "block"
+ if advanced_search_display == "block":
+ standard_search_display = "none"
+ %>
+ ## Standard search.
+ <div id="standard-search" style="display: ${standard_search_display};">
+ <table>
+ <tr><td style="padding: 0;">
+ <table>
+ %for column in self.grid_config['columns']:
+ %if column['filterable'] == "standard":
+ ${render_grid_column_filter( grid, column )}
+ %endif
+ %endfor
+ </table>
+ </td></tr>
+ <tr><td>
+ ## Only show advanced search if there are filterable columns.
+ <%
+ show_advanced_search_link = False
+ if render_advanced_search:
+ for column in self.grid_config['columns']:
+ if column['filterable'] == "advanced":
+ show_advanced_search_link = True
+ break
+ endif
+ %>
+ %if show_advanced_search_link:
+ <a href="" class="advanced-search-toggle">Advanced Search</a>
+ %endif
+ </td></tr>
+ </table>
+ </div>
+
+ ## Advanced search.
+ <div id="advanced-search" style="display: ${advanced_search_display}; margin-top: 5px; border: 1px solid #ccc;">
+ <table>
+ <tr><td style="text-align: left" colspan="100">
+ <a href="" class="advanced-search-toggle">Close Advanced Search</a>
+ </td></tr>
+ %for column in self.grid_config['columns']:
+ %if column['filterable'] == "advanced":
+ ## Show div if current filter has value that is different from the default filter.
+ <%
+ column_key = column['key']
+ %>
+ % if column_key in filters and column_key in default_filter_dict and \
+ filters[column_key] != default_filter_dict[column_key]:
+ <script type="text/javascript">
+ $('#advanced-search').css("display", "block");
+ </script>
+ % endif
+
+ ${render_grid_column_filter( grid, column )}
+ %endif
+ %endfor
+ </table>
+ </div>
+</%def>
+
+## Render a filter UI for a grid column. Filter is rendered as a table row.
+<%def name="render_grid_column_filter( grid, column )">
+ <tr>
+ <%
+ default_filter_dict = self.grid_config['default_filter_dict']
+ filters = self.grid_config['filters']
+ column_label = column['label']
+ column_key = column['key']
+ if column['filterable'] == "advanced":
+ column_label = column_label.lower()
+ %>
+ %if column['filterable'] == "advanced":
+ <td align="left" style="padding-left: 10px">${column_label}:</td>
+ %endif
+ <td style="padding: 0;">
+ %if column['is_text']:
+ <form class="text-filter-form" column_key="${column_key}" action="${url(dict())}" method="get" >
+ ## Carry forward filtering criteria with hidden inputs.
+ %for temp_column in self.grid_config['columns']:
+ %if temp_column['key'] in filters:
+ <% value = filters[ temp_column['key'] ] %>
+ %if value != "All":
+ <%
+ if temp_column['is_text']:
+ value = h.to_json_string( value )
+ %>
+ <input type="hidden" id="${temp_column['key']}" name="f-${temp_column['key']}" value='${value}'/>
+ %endif
+ %endif
+ %endfor
+ ## Print current filtering criteria and links to delete.
+ <span id="${column_key}-filtering-criteria">
+ %if column_key in filters:
+ <% column_filter = filters[column_key] %>
+ %if isinstance( column_filter, basestring ):
+ %if column_filter != "All":
+ <span class='text-filter-val'>
+ ${filters[column_key]}
+ <% filter_all = GridColumnFilter( "", { column_key : "All" } ) %>
+ <a href="${url(filter_all.get_url_args())}"><span class="delete-search-icon" /></a>
+ </span>
+ %endif
+ %elif isinstance( column_filter, list ):
+ %for i, filter in enumerate( column_filter ):
+ <span class='text-filter-val'>${filter}
+ <%
+ new_filter = list( column_filter )
+ del new_filter[ i ]
+ new_column_filter = GridColumnFilter( "", { column_key : h.to_json_string( new_filter ) } )
+ %>
+ <a href="${url(new_column_filter.get_url_args())}"><span class="delete-search-icon" /></a>
+ </span>
+ %endfor
+ %endif
+ %endif
+ </span>
+ ## Print input field for column.
+ <span class="search-box">
+ <%
+ # Set value, size of search input field. Minimum size is 20 characters.
+ value = iff( column['filterable'] == "standard", column['label'].lower(), "")
+ size = len( value )
+ if size < 20:
+ size = 20
+ # +4 to account for search icon/button.
+ size = size + 4
+ %>
+ <input class="search-box-input" id="input-${column_key}-filter" name="f-${column_key}" type="text" value="${value}" size="${size}"/>
+ <button class="submit-image" type="submit" title='Search'><span style="display: none;"></button>
+ </span>
+ </form>
+ %else:
+ <span id="${column_key}-filtering-criteria">
+ <%
+ seperator = False
+ %>
+ %for filter_label in self.grid_config['categorical_filters'][column_key]:
+ <%
+ # get filter
+ filter = self.grid_config['categorical_filters'][column_key][filter_label]
+
+ # each filter will have only a single argument, so get that single argument
+ for key in filter:
+ filter_key = key
+ filter_arg = filter[key]
+ %>
+ %if seperator:
+ |
+ %endif
+
+ <%
+ seperator = True
+ %>
+ %if column_key in cur_filter_dict and column_key in filter and cur_filter_dict[column_key] == filter_arg:
+ <span class="categorical-filter ${column_key}-filter current-filter">${filter_label}</span>
+ %else:
+ <span class="categorical-filter ${column_key}-filter">
+ <a href="" filter_key="${filter_key}" filter_val="${filter_arg}">${filter_label}</a>
+ </span>
+ %endif
+ %endfor
+ </span>
+ %endif
+ </td>
+ </tr>
+</%def>
+
diff -r 60128a7ab160f23cf4fe4cb4231729cfa56369fb -r 44b0d21b4c7744d27d97241375b953abc1da7830 templates/grid_common.mako
--- a/templates/grid_common.mako
+++ /dev/null
@@ -1,187 +0,0 @@
-<%!
- from galaxy.web.framework.helpers.grids import TextColumn, StateColumn, GridColumnFilter
- from galaxy.web.framework.helpers import iff
-%>
-
-## Render a filter UI for a grid column. Filter is rendered as a table row.
-<%def name="render_grid_column_filter( grid, column )">
- <tr>
- <%
- column_label = column.label
- if column.filterable == "advanced":
- column_label = column_label.lower()
- %>
- %if column.filterable == "advanced":
- <td align="left" style="padding-left: 10px">${column_label}:</td>
- %endif
- <td style="padding: 0;">
- %if isinstance(column, TextColumn):
- <form class="text-filter-form" column_key="${column.key}" action="${url(dict())}" method="get" >
- ## Carry forward filtering criteria with hidden inputs.
- %for temp_column in grid.columns:
- %if temp_column.key in cur_filter_dict:
- <% value = cur_filter_dict[ temp_column.key ] %>
- %if value != "All":
- <%
- if isinstance( temp_column, TextColumn ):
- value = h.to_json_string( value )
- %>
- <input type="hidden" id="${temp_column.key}" name="f-${temp_column.key}" value='${value}'/>
- %endif
- %endif
- %endfor
- ## Print current filtering criteria and links to delete.
- <span id="${column.key}-filtering-criteria">
- %if column.key in cur_filter_dict:
- <% column_filter = cur_filter_dict[column.key] %>
- %if isinstance( column_filter, basestring ):
- %if column_filter != "All":
- <span class='text-filter-val'>
- ${cur_filter_dict[column.key]}
- <% filter_all = GridColumnFilter( "", { column.key : "All" } ) %>
- <a href="${url(filter_all.get_url_args())}"><span class="delete-search-icon" /></a>
- </span>
- %endif
- %elif isinstance( column_filter, list ):
- %for i, filter in enumerate( column_filter ):
- %if i > 0:
- ,
- %endif
- <span class='text-filter-val'>${filter}
- <%
- new_filter = list( column_filter )
- del new_filter[ i ]
- new_column_filter = GridColumnFilter( "", { column.key : h.to_json_string( new_filter ) } )
- %>
- <a href="${url(new_column_filter.get_url_args())}"><span class="delete-search-icon" /></a>
- </span>
- %endfor
- %endif
- %endif
- </span>
- ## Print input field for column.
- <span class="search-box">
- <%
- # Set value, size of search input field. Minimum size is 20 characters.
- value = iff( column.filterable == "standard", column.label.lower(), "")
- size = len( value )
- if size < 20:
- size = 20
- # +4 to account for search icon/button.
- size = size + 4
- %>
- <input class="search-box-input" id="input-${column.key}-filter" name="f-${column.key}" type="text" value="${value}" size="${size}"/>
- <button class="submit-image" type="submit" title='Search'><span style="display: none;"></button>
- </span>
- </form>
- %else:
- <span id="${column.key}-filtering-criteria">
- %for i, filter in enumerate( column.get_accepted_filters() ):
- <%
- # HACK: we know that each filter will have only a single argument, so get that single argument.
- for key, arg in filter.args.items():
- filter_key = key
- filter_arg = arg
- %>
- %if i > 0:
- |
- %endif
- %if column.key in cur_filter_dict and column.key in filter.args and cur_filter_dict[column.key] == filter.args[column.key]:
- <span class="categorical-filter ${column.key}-filter current-filter">${filter.label}</span>
- %else:
- <span class="categorical-filter ${column.key}-filter">
- <a href="${url(filter.get_url_args())}" filter_key="${filter_key}" filter_val="${filter_arg}">${filter.label}</a>
- </span>
- %endif
- %endfor
- </span>
- %endif
- </td>
- </tr>
-</%def>
-
-## Print grid search/filtering UI.
-<%def name="render_grid_filters( grid, render_advanced_search=True )">
- <%
- # Show advanced search if flag set or if there are filters for advanced search fields.
- advanced_search_display = "none"
- if 'advanced-search' in kwargs and kwargs['advanced-search'] in ['True', 'true']:
- advanced_search_display = "block"
-
- for column in grid.columns:
- if column.filterable == "advanced":
- ## Show div if current filter has value that is different from the default filter.
- if column.key in cur_filter_dict and column.key in default_filter_dict and \
- cur_filter_dict[column.key] != default_filter_dict[column.key]:
- advanced_search_display = "block"
-
- # do not show standard search if showing adv.
- standard_search_display = "block"
- if advanced_search_display == "block":
- standard_search_display = "none"
- %>
- ## Standard search.
- <div id="standard-search" style="display: ${standard_search_display};">
- <table>
- <tr><td style="padding: 0;">
- <table>
- %for column in grid.columns:
- %if column.filterable == "standard":
- ${render_grid_column_filter( grid, column )}
- %endif
- %endfor
- </table>
- </td></tr>
- <tr><td>
- ## Clear the standard search.
- ##|
- ##<% filter_all = GridColumnFilter( "", { column.key : "All" } ) %>
- ##<a href="${url(filter_all.get_url_args())}">Clear All</a>
-
- ## Only show advanced search if there are filterable columns.
- <%
- show_advanced_search_link = False
- if render_advanced_search:
- for column in grid.columns:
- if column.filterable == "advanced":
- show_advanced_search_link = True
- break
- endif
- %>
- %if show_advanced_search_link:
- <% args = { "advanced-search" : True } %>
- <a href="${url(args)}" class="advanced-search-toggle">Advanced Search</a>
- %endif
- </td></tr>
- </table>
- </div>
-
- ## Advanced search.
- <div id="advanced-search" style="display: ${advanced_search_display}; margin-top: 5px; border: 1px solid #ccc;">
- <table>
- <tr><td style="text-align: left" colspan="100">
- <% args = { "advanced-search" : False } %>
- <a href="${url(args)}" class="advanced-search-toggle">Close Advanced Search</a>
- ## Link to clear all filters.
- ##|
- ##<%
- ## no_filter = GridColumnFilter("Clear All", default_filter_dict)
- ##%>
- ##<a href="${url(no_filter.get_url_args())}">${no_filter.label}</a>
- </td></tr>
- %for column in grid.columns:
- %if column.filterable == "advanced":
- ## Show div if current filter has value that is different from the default filter.
- %if column.key in cur_filter_dict and column.key in default_filter_dict and \
- cur_filter_dict[column.key] != default_filter_dict[column.key]:
- <script type="text/javascript">
- $('#advanced-search').css("display", "block");
- </script>
- %endif
-
- ${render_grid_column_filter( grid, column )}
- %endif
- %endfor
- </table>
- </div>
-</%def>
diff -r 60128a7ab160f23cf4fe4cb4231729cfa56369fb -r 44b0d21b4c7744d27d97241375b953abc1da7830 templates/webapps/tool_shed/category/grid.mako
--- a/templates/webapps/tool_shed/category/grid.mako
+++ b/templates/webapps/tool_shed/category/grid.mako
@@ -1,66 +1,13 @@
-<%namespace file="/display_common.mako" import="render_message" />
-<%namespace file="/grid_base.mako" import="*" />
-<%namespace file="/grid_common.mako" import="*" />
-<%inherit file="/grid_base.mako" />
+<%namespace name="grid_base" file="/grid_base.mako" import="*" />
+<%namespace name="grid_common" file="../common/grid_common.mako" import="*" />
-## Render grid header.
-## TODO: This is very similar to this directory's valid_grid.mako, so see if we can re-use this code in a better way.
-<%def name="render_grid_header( grid, repo_grid, render_title=True)">
- ${init()}
- ${grid_javascripts()}
- <div class="grid-header">
- %if render_title:
- ${grid_title()}
- %endif
- %if grid.global_actions:
- <ul class="manage-table-actions">
- %if len( grid.global_actions ) < 4:
- %for action in grid.global_actions:
- <li><a class="action-button" href="${h.url_for( **action.url_args )}">${action.label | h}</a></li>
- %endfor
- %else:
- <li><a class="action-button" id="action-8675309-popup" class="menubutton">Actions</a></li>
- <div popupmenu="action-8675309-popup">
- %for action in grid.global_actions:
- <a class="action-button" href="${h.url_for( **action.url_args )}">${action.label | h}</a>
- %endfor
- </div>
- %endif
- </ul>
- %endif
- ${render_grid_filters( repo_grid, render_advanced_search=False )}
- </div>
+<%def name="insert()">
+<%
+ from tool_shed.grids.repository_grids import RepositoryGrid
+ repo_grid = RepositoryGrid()
+ grid_common.render_grid_filters(repo_grid)
+%></%def>
-<%def name="make_grid( grid, repo_grid )">
- <div class="loading-elt-overlay"></div>
- <table>
- <tr>
- <td width="75%">${self.render_grid_header( grid, repo_grid )}</td>
- <td></td>
- <td></td>
- </tr>
- <tr>
- <td width="100%" id="grid-message" valign="top">${render_message( message, status )}</td>
- <td></td>
- <td></td>
- </tr>
- </table>
- ${render_grid_table( grid, show_item_checkboxes )}
-</%def>
-
-<%def name="grid_body()">
- <%
- from tool_shed.grids.repository_grids import RepositoryGrid
- repo_grid = RepositoryGrid()
- %>
- ${self.make_grid( grid, repo_grid )}
-</%def>
-
-<%def name="center_panel()">
- <div style="overflow: auto; height: 100%">
- <div class="page-container" style="padding: 10px;">
- ${self.grid_body( grid )}
- </div>
- </div>
-</%def>
+<!DOCTYPE HTML>
+${grid_base.load(False, capture(self.insert))}
\ No newline at end of file
diff -r 60128a7ab160f23cf4fe4cb4231729cfa56369fb -r 44b0d21b4c7744d27d97241375b953abc1da7830 templates/webapps/tool_shed/category/valid_grid.mako
--- a/templates/webapps/tool_shed/category/valid_grid.mako
+++ b/templates/webapps/tool_shed/category/valid_grid.mako
@@ -1,65 +1,13 @@
-<%namespace file="/display_common.mako" import="render_message" />
-<%namespace file="/grid_base.mako" import="*" />
-<%namespace file="/grid_common.mako" import="*" />
-<%inherit file="/grid_base.mako" />
+<%namespace name="grid_base" file="/grid_base.mako" import="*" />
+<%namespace name="grid_common" file="../common/grid_common.mako" import="*" />
-## Render grid header.
-<%def name="render_grid_header( grid, repo_grid, render_title=True)">
- ${init()}
- ${grid_javascripts()}
- <div class="grid-header">
- %if render_title:
- ${grid_title()}
- %endif
- %if grid.global_actions:
- <ul class="manage-table-actions">
- %if len( grid.global_actions ) < 4:
- %for action in grid.global_actions:
- <li><a class="action-button" href="${h.url_for( **action.url_args )}">${action.label | h}</a></li>
- %endfor
- %else:
- <li><a class="action-button" id="action-8675309-popup" class="menubutton">Actions</a></li>
- <div popupmenu="action-8675309-popup">
- %for action in grid.global_actions:
- <a class="action-button" href="${h.url_for( **action.url_args )}">${action.label | h}</a>
- %endfor
- </div>
- %endif
- </ul>
- %endif
- ${render_grid_filters( repo_grid, render_advanced_search=False )}
- </div>
+<%def name="insert()">
+<%
+ from tool_shed.grids.repository_grids import ValidRepositoryGrid
+ repo_grid = ValidRepositoryGrid()
+ grid_common.render_grid_filters(repo_grid)
+%></%def>
-<%def name="make_grid( grid, repo_grid )">
- <div class="loading-elt-overlay"></div>
- <table>
- <tr>
- <td width="75%">${self.render_grid_header( grid, repo_grid )}</td>
- <td></td>
- <td></td>
- </tr>
- <tr>
- <td width="100%" id="grid-message" valign="top">${render_message( message, status )}</td>
- <td></td>
- <td></td>
- </tr>
- </table>
- ${render_grid_table( grid, show_item_checkboxes )}
-</%def>
-
-<%def name="grid_body()">
- <%
- from tool_shed.grids.repository_grids import ValidRepositoryGrid
- repo_grid = ValidRepositoryGrid()
- %>
- ${self.make_grid( grid, repo_grid )}
-</%def>
-
-<%def name="center_panel()">
- <div style="overflow: auto; height: 100%">
- <div class="page-container" style="padding: 10px;">
- ${self.grid_body( grid )}
- </div>
- </div>
-</%def>
+<!DOCTYPE HTML>
+${grid_base.load(False, capture(self.insert))}
\ No newline at end of file
diff -r 60128a7ab160f23cf4fe4cb4231729cfa56369fb -r 44b0d21b4c7744d27d97241375b953abc1da7830 templates/webapps/tool_shed/common/grid_common.mako
--- /dev/null
+++ b/templates/webapps/tool_shed/common/grid_common.mako
@@ -0,0 +1,187 @@
+<%!
+ from galaxy.web.framework.helpers.grids import TextColumn, StateColumn, GridColumnFilter
+ from galaxy.web.framework.helpers import iff
+%>
+
+## Render a filter UI for a grid column. Filter is rendered as a table row.
+<%def name="render_grid_column_filter( grid, column )">
+ <tr>
+ <%
+ column_label = column.label
+ if column.filterable == "advanced":
+ column_label = column_label.lower()
+ %>
+ %if column.filterable == "advanced":
+ <td align="left" style="padding-left: 10px">${column_label}:</td>
+ %endif
+ <td style="padding: 0;">
+ %if isinstance(column, TextColumn):
+ <form class="text-filter-form" column_key="${column.key}" action="${url(dict())}" method="get" >
+ ## Carry forward filtering criteria with hidden inputs.
+ %for temp_column in grid.columns:
+ %if temp_column.key in cur_filter_dict:
+ <% value = cur_filter_dict[ temp_column.key ] %>
+ %if value != "All":
+ <%
+ if isinstance( temp_column, TextColumn ):
+ value = h.to_json_string( value )
+ %>
+ <input type="hidden" id="${temp_column.key}" name="f-${temp_column.key}" value='${value}'/>
+ %endif
+ %endif
+ %endfor
+ ## Print current filtering criteria and links to delete.
+ <span id="${column.key}-filtering-criteria">
+ %if column.key in cur_filter_dict:
+ <% column_filter = cur_filter_dict[column.key] %>
+ %if isinstance( column_filter, basestring ):
+ %if column_filter != "All":
+ <span class='text-filter-val'>
+ ${cur_filter_dict[column.key]}
+ <% filter_all = GridColumnFilter( "", { column.key : "All" } ) %>
+ <a href="${url(filter_all.get_url_args())}"><span class="delete-search-icon" /></a>
+ </span>
+ %endif
+ %elif isinstance( column_filter, list ):
+ %for i, filter in enumerate( column_filter ):
+ %if i > 0:
+ ,
+ %endif
+ <span class='text-filter-val'>${filter}
+ <%
+ new_filter = list( column_filter )
+ del new_filter[ i ]
+ new_column_filter = GridColumnFilter( "", { column.key : h.to_json_string( new_filter ) } )
+ %>
+ <a href="${url(new_column_filter.get_url_args())}"><span class="delete-search-icon" /></a>
+ </span>
+ %endfor
+ %endif
+ %endif
+ </span>
+ ## Print input field for column.
+ <span class="search-box">
+ <%
+ # Set value, size of search input field. Minimum size is 20 characters.
+ value = iff( column.filterable == "standard", column.label.lower(), "")
+ size = len( value )
+ if size < 20:
+ size = 20
+ # +4 to account for search icon/button.
+ size = size + 4
+ %>
+ <input class="search-box-input" id="input-${column.key}-filter" name="f-${column.key}" type="text" value="${value}" size="${size}"/>
+ <button class="submit-image" type="submit" title='Search'><span style="display: none;"></button>
+ </span>
+ </form>
+ %else:
+ <span id="${column.key}-filtering-criteria">
+ %for i, filter in enumerate( column.get_accepted_filters() ):
+ <%
+ # HACK: we know that each filter will have only a single argument, so get that single argument.
+ for key, arg in filter.args.items():
+ filter_key = key
+ filter_arg = arg
+ %>
+ %if i > 0:
+ |
+ %endif
+ %if column.key in cur_filter_dict and column.key in filter.args and cur_filter_dict[column.key] == filter.args[column.key]:
+ <span class="categorical-filter ${column.key}-filter current-filter">${filter.label}</span>
+ %else:
+ <span class="categorical-filter ${column.key}-filter">
+ <a href="${url(filter.get_url_args())}" filter_key="${filter_key}" filter_val="${filter_arg}">${filter.label}</a>
+ </span>
+ %endif
+ %endfor
+ </span>
+ %endif
+ </td>
+ </tr>
+</%def>
+
+## Print grid search/filtering UI.
+<%def name="render_grid_filters( grid, render_advanced_search=True )">
+ <%
+ # Show advanced search if flag set or if there are filters for advanced search fields.
+ advanced_search_display = "none"
+ if 'advanced-search' in kwargs and kwargs['advanced-search'] in ['True', 'true']:
+ advanced_search_display = "block"
+
+ for column in grid.columns:
+ if column.filterable == "advanced":
+ ## Show div if current filter has value that is different from the default filter.
+ if column.key in cur_filter_dict and column.key in default_filter_dict and \
+ cur_filter_dict[column.key] != default_filter_dict[column.key]:
+ advanced_search_display = "block"
+
+ # do not show standard search if showing adv.
+ standard_search_display = "block"
+ if advanced_search_display == "block":
+ standard_search_display = "none"
+ %>
+ ## Standard search.
+ <div id="standard-search" style="display: ${standard_search_display};">
+ <table>
+ <tr><td style="padding: 0;">
+ <table>
+ %for column in grid.columns:
+ %if column.filterable == "standard":
+ ${render_grid_column_filter( grid, column )}
+ %endif
+ %endfor
+ </table>
+ </td></tr>
+ <tr><td>
+ ## Clear the standard search.
+ ##|
+ ##<% filter_all = GridColumnFilter( "", { column.key : "All" } ) %>
+ ##<a href="${url(filter_all.get_url_args())}">Clear All</a>
+
+ ## Only show advanced search if there are filterable columns.
+ <%
+ show_advanced_search_link = False
+ if render_advanced_search:
+ for column in grid.columns:
+ if column.filterable == "advanced":
+ show_advanced_search_link = True
+ break
+ endif
+ %>
+ %if show_advanced_search_link:
+ <% args = { "advanced-search" : True } %>
+ <a href="${url(args)}" class="advanced-search-toggle">Advanced Search</a>
+ %endif
+ </td></tr>
+ </table>
+ </div>
+
+ ## Advanced search.
+ <div id="advanced-search" style="display: ${advanced_search_display}; margin-top: 5px; border: 1px solid #ccc;">
+ <table>
+ <tr><td style="text-align: left" colspan="100">
+ <% args = { "advanced-search" : False } %>
+ <a href="${url(args)}" class="advanced-search-toggle">Close Advanced Search</a>
+ ## Link to clear all filters.
+ ##|
+ ##<%
+ ## no_filter = GridColumnFilter("Clear All", default_filter_dict)
+ ##%>
+ ##<a href="${url(no_filter.get_url_args())}">${no_filter.label}</a>
+ </td></tr>
+ %for column in grid.columns:
+ %if column.filterable == "advanced":
+ ## Show div if current filter has value that is different from the default filter.
+ %if column.key in cur_filter_dict and column.key in default_filter_dict and \
+ cur_filter_dict[column.key] != default_filter_dict[column.key]:
+ <script type="text/javascript">
+ $('#advanced-search').css("display", "block");
+ </script>
+ %endif
+
+ ${render_grid_column_filter( grid, column )}
+ %endif
+ %endfor
+ </table>
+ </div>
+</%def>
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: greg: Fixes for rendering tools and invalid tools in the tools container.
by commits-noreply@bitbucket.org 04 Dec '13
by commits-noreply@bitbucket.org 04 Dec '13
04 Dec '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/60128a7ab160/
Changeset: 60128a7ab160
User: greg
Date: 2013-12-04 21:48:16
Summary: Fixes for rendering tools and invalid tools in the tools container.
Affected #: 1 file
diff -r dc3944242df936a5026988aaf11f0a90fbe7b115 -r 60128a7ab160f23cf4fe4cb4231729cfa56369fb lib/tool_shed/util/container_util.py
--- a/lib/tool_shed/util/container_util.py
+++ b/lib/tool_shed/util/container_util.py
@@ -892,7 +892,7 @@
def build_tools_folder( trans, folder_id, tool_dicts, repository, changeset_revision, valid=True, label='Valid tools' ):
"""Return a folder hierarchy containing valid tools."""
if tool_dicts:
- tool_id = 0
+ container_object_tool_id = 0
folder_id += 1
tools_root_folder = Folder( id=folder_id, key='root', label='root', parent=None )
folder_id += 1
@@ -901,8 +901,8 @@
folder.description = 'click the name to inspect the tool metadata'
tools_root_folder.folders.append( folder )
# Insert a header row.
- tool_id += 1
- tool = Tool( id=tool_id,
+ container_object_tool_id += 1
+ tool = Tool( id=container_object_tool_id,
tool_config='',
tool_id='',
name='Name',
@@ -922,21 +922,34 @@
repository_id = None
repository_installation_status = None
for tool_dict in tool_dicts:
- tool_id += 1
- if 'requirements' in tool_dict:
- requirements = tool_dict[ 'requirements' ]
+ if not isinstance( tool_dict, dict ):
+ # Due to some previous bug (hopefully not current) invalid tool strings may be included in the recived
+ # list of tool_dicts. For example, the picard repository metadata has 2 invalid tools in the recieved
+ # list of supposedly valid tools: 'rgPicardASMetrics.xml', 'rgPicardGCBiasMetrics.xml'.
+ continue
+ container_object_tool_id += 1
+ requirements = tool_dict.get( 'requirements', None )
+ if requirements is not None:
+ # 'requirements': [{'version': '1.56.0', 'type': 'package', 'name': 'picard'}],
requirements_str = ''
for requirement_dict in requirements:
- requirements_str += '%s (%s), ' % ( requirement_dict[ 'name' ], requirement_dict[ 'type' ] )
+ requirement_name = str( requirement_dict.get( 'name', 'unknown' ) )
+ requirement_type = str( requirement_dict.get( 'type', 'unknown' ) )
+ requirements_str += '%s (%s), ' % ( requirement_name, requirement_type )
requirements_str = requirements_str.rstrip( ', ' )
else:
requirements_str = 'none'
- tool = Tool( id=tool_id,
- tool_config=tool_dict[ 'tool_config' ],
- tool_id=tool_dict[ 'id' ],
- name=tool_dict[ 'name' ],
- description=tool_dict[ 'description' ],
- version=tool_dict[ 'version' ],
+ tool_config = str( tool_dict.get( 'tool_config', 'missing' ) )
+ tool_id = str( tool_dict.get( 'id', 'missing' ) )
+ name = str( tool_dict.get( 'name', 'missing' ) )
+ description = str( tool_dict.get( 'description', '' ) )
+ version = str( tool_dict.get( 'description', 'missing' ) )
+ tool = Tool( id=container_object_tool_id,
+ tool_config=tool_config,
+ tool_id=tool_id,
+ name=name,
+ description=description,
+ version=version,
requirements=requirements_str,
repository_id=repository_id,
changeset_revision=changeset_revision,
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0