galaxy-commits
Threads by month
- ----- 2024 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2023 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2022 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2021 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2020 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2019 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2018 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2017 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2016 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2015 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2014 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2013 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2012 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2011 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2010 -----
- December
- November
- October
- September
- August
- July
- June
- May
January 2014
- 1 participants
- 280 discussions
commit/galaxy-central: guerler: Fix refresh events for select fields in toolshed grid
by commits-noreply@bitbucket.org 10 Jan '14
by commits-noreply@bitbucket.org 10 Jan '14
10 Jan '14
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/c04e79918595/
Changeset: c04e79918595
User: guerler
Date: 2014-01-10 19:07:37
Summary: Fix refresh events for select fields in toolshed grid
Affected #: 4 files
diff -r 984377efb491a30e4afd320c9fbff2ff1e1241af -r c04e79918595e0cefe85edac156ff5a962f4a686 static/scripts/galaxy.base.js
--- a/static/scripts/galaxy.base.js
+++ b/static/scripts/galaxy.base.js
@@ -623,8 +623,8 @@
});
};
-$(document).ready( function() {
-
+// Initialize refresh events.
+function init_refresh_on_change () {
$("select[refresh_on_change='true']").change( function() {
var select_field = $(this),
select_val = select_field.val(),
@@ -663,7 +663,13 @@
$( "a[confirm]" ).click( function() {
return confirm( $(this).attr("confirm") );
});
+};
+$(document).ready( function() {
+
+ // Refresh events for form fields.
+ init_refresh_on_change();
+
// Tooltips
if ( $.fn.tooltip ) {
// Put tooltips below items in panel header so that they do not overlap masthead.
diff -r 984377efb491a30e4afd320c9fbff2ff1e1241af -r c04e79918595e0cefe85edac156ff5a962f4a686 static/scripts/galaxy.grids.js
--- a/static/scripts/galaxy.grids.js
+++ b/static/scripts/galaxy.grids.js
@@ -199,6 +199,9 @@
// configure elements
this.init_grid_elements();
+
+ // attach global event handler
+ init_refresh_on_change();
},
// Initialize grid controls
diff -r 984377efb491a30e4afd320c9fbff2ff1e1241af -r c04e79918595e0cefe85edac156ff5a962f4a686 static/scripts/packed/galaxy.base.js
--- a/static/scripts/packed/galaxy.base.js
+++ b/static/scripts/packed/galaxy.base.js
@@ -1,1 +1,1 @@
-(function(){var b=0;var c=["ms","moz","webkit","o"];for(var a=0;a<c.length&&!window.requestAnimationFrame;++a){window.requestAnimationFrame=window[c[a]+"RequestAnimationFrame"];window.cancelRequestAnimationFrame=window[c[a]+"CancelRequestAnimationFrame"]}if(!window.requestAnimationFrame){window.requestAnimationFrame=function(h,e){var d=new Date().getTime();var f=Math.max(0,16-(d-b));var g=window.setTimeout(function(){h(d+f)},f);b=d+f;return g}}if(!window.cancelAnimationFrame){window.cancelAnimationFrame=function(d){clearTimeout(d)}}}());if(!Array.indexOf){Array.prototype.indexOf=function(c){for(var b=0,a=this.length;b<a;b++){if(this[b]==c){return b}}return -1}}function obj_length(c){if(c.length!==undefined){return c.length}var b=0;for(var a in c){b++}return b}$.fn.makeAbsolute=function(a){return this.each(function(){var b=$(this);var c=b.position();b.css({position:"absolute",marginLeft:0,marginTop:0,top:c.top,left:c.left,right:$(window).width()-(c.left+b.width())});if(a){b.remove().appendTo("body")}})};function make_popupmenu(b,c){var a=(b.data("menu_options"));b.data("menu_options",c);if(a){return}b.bind("click.show_popup",function(d){$(".popmenu-wrapper").remove();setTimeout(function(){var g=$("<ul class='dropdown-menu' id='"+b.attr("id")+"-menu'></ul>");var f=b.data("menu_options");if(obj_length(f)<=0){$("<li>No Options.</li>").appendTo(g)}$.each(f,function(j,i){if(i){var l=i.action||i;g.append($("<li></li>").append($("<a>").attr("href",i.url).html(j).click(l)))}else{g.append($("<li></li>").addClass("head").append($("<a href='#'></a>").html(j)))}});var h=$("<div class='popmenu-wrapper' style='position: absolute;left: 0; top: -1000;'></div>").append(g).appendTo("body");var e=d.pageX-h.width()/2;e=Math.min(e,$(document).scrollLeft()+$(window).width()-$(h).width()-5);e=Math.max(e,$(document).scrollLeft()+5);h.css({top:d.pageY,left:e})},10);setTimeout(function(){var f=function(h){$(h).bind("click.close_popup",function(){$(".popmenu-wrapper").remove();h.unbind("click.close_popup")})};f($(window.document));f($(window.top.document));for(var e=window.top.frames.length;e--;){var g=$(window.top.frames[e].document);f(g)}},50);return false})}function make_popup_menus(a){a=a||document;$(a).find("div[popupmenu]").each(function(){var b={};var d=$(this);d.find("a").each(function(){var g=$(this),i=g.get(0),e=i.getAttribute("confirm"),f=i.getAttribute("href"),h=i.getAttribute("target");if(!f){b[g.text()]=null}else{b[g.text()]={url:f,action:function(){if(!e||confirm(e)){if(h){window.open(f,h)}else{g.click()}}}}}});var c=$(a).find("#"+d.attr("popupmenu"));c.find("a").bind("click",function(f){f.stopPropagation();return true});make_popupmenu(c,b);c.addClass("popup");d.remove()})}function naturalSort(j,h){var p=/(-?[0-9\.]+)/g,k=j.toString().toLowerCase()||"",g=h.toString().toLowerCase()||"",l=String.fromCharCode(0),n=k.replace(p,l+"$1"+l).split(l),e=g.replace(p,l+"$1"+l).split(l),d=(new Date(k)).getTime(),o=d?(new Date(g)).getTime():null;if(o){if(d<o){return -1}else{if(d>o){return 1}}}var m,f;for(var i=0,c=Math.max(n.length,e.length);i<c;i++){m=parseFloat(n[i])||n[i];f=parseFloat(e[i])||e[i];if(m<f){return -1}else{if(m>f){return 1}}}return 0}$.fn.refresh_select2=function(){var b=$(this);var a={width:"resolve",closeOnSelect:!b.is("[MULTIPLE]")};return b.select2(a)};function replace_big_select_inputs(a,c,b){if(!jQuery.fn.select2){return}if(a===undefined){a=20}if(c===undefined){c=3000}b=b||$("select");b.each(function(){var e=$(this);var d=e.find("option").length;if((d<a)||(d>c)){return}if(e.hasClass("no-autocomplete")){return}e.refresh_select2()})}$.fn.make_text_editable=function(g){var d=("num_cols" in g?g.num_cols:30),c=("num_rows" in g?g.num_rows:4),e=("use_textarea" in g?g.use_textarea:false),b=("on_finish" in g?g.on_finish:null),f=("help_text" in g?g.help_text:null);var a=$(this);a.addClass("editable-text").click(function(l){if($(this).children(":input").length>0){return}a.removeClass("editable-text");var i=function(m){a.find(":input").remove();if(m!==""){a.text(m)}else{a.html("<br>")}a.addClass("editable-text");if(b){b(m)}};var h=("cur_text" in g?g.cur_text:a.text()),k,j;if(e){k=$("<textarea/>").attr({rows:c,cols:d}).text($.trim(h)).keyup(function(m){if(m.keyCode===27){i(h)}});j=$("<button/>").text("Done").click(function(){i(k.val());return false})}else{k=$("<input type='text'/>").attr({value:$.trim(h),size:d}).blur(function(){i(h)}).keyup(function(m){if(m.keyCode===27){$(this).trigger("blur")}else{if(m.keyCode===13){i($(this).val())}}})}a.text("");a.append(k);if(j){a.append(j)}k.focus();k.select();l.stopPropagation()});if(f){a.attr("title",f).tooltip()}return a};function async_save_text(d,f,e,a,c,h,i,g,b){if(c===undefined){c=30}if(i===undefined){i=4}$("#"+d).click(function(){if($("#renaming-active").length>0){return}var l=$("#"+f),k=l.text(),j;if(h){j=$("<textarea></textarea>").attr({rows:i,cols:c}).text($.trim(k))}else{j=$("<input type='text'></input>").attr({value:$.trim(k),size:c})}j.attr("id","renaming-active");j.blur(function(){$(this).remove();l.show();if(b){b(j)}});j.keyup(function(n){if(n.keyCode===27){$(this).trigger("blur")}else{if(n.keyCode===13){var m={};m[a]=$(this).val();$(this).trigger("blur");$.ajax({url:e,data:m,error:function(){alert("Text editing for elt "+f+" failed")},success:function(o){if(o!==""){l.text(o)}else{l.html("<em>None</em>")}if(b){b(j)}}})}}});if(g){g(j)}l.hide();j.insertAfter(l);j.focus();j.select();return})}function init_history_items(d,a,c){var b=function(){try{var e=$.jStorage.get("history_expand_state");if(e){for(var g in e){$("#"+g+" div.historyItemBody").show()}}}catch(f){$.jStorage.deleteKey("history_expand_state")}if($.browser.mozilla){$("div.historyItemBody").each(function(){if(!$(this).is(":visible")){$(this).find("pre.peek").css("overflow","hidden")}})}d.each(function(){var j=this.id,h=$(this).children("div.historyItemBody"),i=h.find("pre.peek");$(this).find(".historyItemTitleBar > .historyItemTitle").wrap("<a href='javascript:void(0);'></a>").click(function(){var k;if(h.is(":visible")){if($.browser.mozilla){i.css("overflow","hidden")}h.slideUp("fast");if(!c){k=$.jStorage.get("history_expand_state");if(k){delete k[j];$.jStorage.set("history_expand_state",k)}}}else{h.slideDown("fast",function(){if($.browser.mozilla){i.css("overflow","auto")}});if(!c){k=$.jStorage.get("history_expand_state");if(!k){k={}}k[j]=true;$.jStorage.set("history_expand_state",k)}}return false})});$("#top-links > a.toggle").click(function(){var h=$.jStorage.get("history_expand_state");if(!h){h={}}$("div.historyItemBody:visible").each(function(){if($.browser.mozilla){$(this).find("pre.peek").css("overflow","hidden")}$(this).slideUp("fast");if(h){delete h[$(this).parent().attr("id")]}});$.jStorage.set("history_expand_state",h)}).show()};b()}function commatize(b){b+="";var a=/(\d+)(\d{3})/;while(a.test(b)){b=b.replace(a,"$1,$2")}return b}function reset_tool_search(a){var c=$("#galaxy_tools").contents();if(c.length===0){c=$(document)}$(this).removeClass("search_active");c.find(".toolTitle").removeClass("search_match");c.find(".toolSectionBody").hide();c.find(".toolTitle").show();c.find(".toolPanelLabel").show();c.find(".toolSectionWrapper").each(function(){if($(this).attr("id")!=="recently_used_wrapper"){$(this).show()}else{if($(this).hasClass("user_pref_visible")){$(this).show()}}});c.find("#search-no-results").hide();c.find("#search-spinner").hide();if(a){var b=c.find("#tool-search-query");b.val("search tools")}}var GalaxyAsync=function(a){this.url_dict={};this.log_action=(a===undefined?false:a)};GalaxyAsync.prototype.set_func_url=function(a,b){this.url_dict[a]=b};GalaxyAsync.prototype.set_user_pref=function(a,b){var c=this.url_dict[arguments.callee];if(c===undefined){return false}$.ajax({url:c,data:{pref_name:a,pref_value:b},error:function(){return false},success:function(){return true}})};GalaxyAsync.prototype.log_user_action=function(c,b,d){if(!this.log_action){return}var a=this.url_dict[arguments.callee];if(a===undefined){return false}$.ajax({url:a,data:{action:c,context:b,params:d},error:function(){return false},success:function(){return true}})};$(document).ready(function(){$("select[refresh_on_change='true']").change(function(){var a=$(this),e=a.val(),d=false,c=a.attr("refresh_on_change_values");if(c){c=c.split(",");var b=a.attr("last_selected_value");if($.inArray(e,c)===-1&&$.inArray(b,c)===-1){return}}$(window).trigger("refresh_on_change");$(document).trigger("convert_to_values");a.get(0).form.submit()});$(":checkbox[refresh_on_change='true']").click(function(){var a=$(this),e=a.val(),d=false,c=a.attr("refresh_on_change_values");if(c){c=c.split(",");var b=a.attr("last_selected_value");if($.inArray(e,c)===-1&&$.inArray(b,c)===-1){return}}$(window).trigger("refresh_on_change");a.get(0).form.submit()});$("a[confirm]").click(function(){return confirm($(this).attr("confirm"))});if($.fn.tooltip){$(".unified-panel-header [title]").tooltip({placement:"bottom"});$("[title]").tooltip({placement:"top"})}make_popup_menus();replace_big_select_inputs(20,1500);$("a").click(function(){var b=$(this);var c=(parent.frames&&parent.frames.galaxy_main);if((b.attr("target")=="galaxy_main")&&(!c)){var a=b.attr("href");if(a.indexOf("?")==-1){a+="?"}else{a+="&"}a+="use_panels=True";b.attr("href",a);b.attr("target","_self")}return b})});
\ No newline at end of file
+(function(){var b=0;var c=["ms","moz","webkit","o"];for(var a=0;a<c.length&&!window.requestAnimationFrame;++a){window.requestAnimationFrame=window[c[a]+"RequestAnimationFrame"];window.cancelRequestAnimationFrame=window[c[a]+"CancelRequestAnimationFrame"]}if(!window.requestAnimationFrame){window.requestAnimationFrame=function(h,e){var d=new Date().getTime();var f=Math.max(0,16-(d-b));var g=window.setTimeout(function(){h(d+f)},f);b=d+f;return g}}if(!window.cancelAnimationFrame){window.cancelAnimationFrame=function(d){clearTimeout(d)}}}());if(!Array.indexOf){Array.prototype.indexOf=function(c){for(var b=0,a=this.length;b<a;b++){if(this[b]==c){return b}}return -1}}function obj_length(c){if(c.length!==undefined){return c.length}var b=0;for(var a in c){b++}return b}$.fn.makeAbsolute=function(a){return this.each(function(){var b=$(this);var c=b.position();b.css({position:"absolute",marginLeft:0,marginTop:0,top:c.top,left:c.left,right:$(window).width()-(c.left+b.width())});if(a){b.remove().appendTo("body")}})};function make_popupmenu(b,c){var a=(b.data("menu_options"));b.data("menu_options",c);if(a){return}b.bind("click.show_popup",function(d){$(".popmenu-wrapper").remove();setTimeout(function(){var g=$("<ul class='dropdown-menu' id='"+b.attr("id")+"-menu'></ul>");var f=b.data("menu_options");if(obj_length(f)<=0){$("<li>No Options.</li>").appendTo(g)}$.each(f,function(j,i){if(i){var l=i.action||i;g.append($("<li></li>").append($("<a>").attr("href",i.url).html(j).click(l)))}else{g.append($("<li></li>").addClass("head").append($("<a href='#'></a>").html(j)))}});var h=$("<div class='popmenu-wrapper' style='position: absolute;left: 0; top: -1000;'></div>").append(g).appendTo("body");var e=d.pageX-h.width()/2;e=Math.min(e,$(document).scrollLeft()+$(window).width()-$(h).width()-5);e=Math.max(e,$(document).scrollLeft()+5);h.css({top:d.pageY,left:e})},10);setTimeout(function(){var f=function(h){$(h).bind("click.close_popup",function(){$(".popmenu-wrapper").remove();h.unbind("click.close_popup")})};f($(window.document));f($(window.top.document));for(var e=window.top.frames.length;e--;){var g=$(window.top.frames[e].document);f(g)}},50);return false})}function make_popup_menus(a){a=a||document;$(a).find("div[popupmenu]").each(function(){var b={};var d=$(this);d.find("a").each(function(){var g=$(this),i=g.get(0),e=i.getAttribute("confirm"),f=i.getAttribute("href"),h=i.getAttribute("target");if(!f){b[g.text()]=null}else{b[g.text()]={url:f,action:function(){if(!e||confirm(e)){if(h){window.open(f,h)}else{g.click()}}}}}});var c=$(a).find("#"+d.attr("popupmenu"));c.find("a").bind("click",function(f){f.stopPropagation();return true});make_popupmenu(c,b);c.addClass("popup");d.remove()})}function naturalSort(j,h){var p=/(-?[0-9\.]+)/g,k=j.toString().toLowerCase()||"",g=h.toString().toLowerCase()||"",l=String.fromCharCode(0),n=k.replace(p,l+"$1"+l).split(l),e=g.replace(p,l+"$1"+l).split(l),d=(new Date(k)).getTime(),o=d?(new Date(g)).getTime():null;if(o){if(d<o){return -1}else{if(d>o){return 1}}}var m,f;for(var i=0,c=Math.max(n.length,e.length);i<c;i++){m=parseFloat(n[i])||n[i];f=parseFloat(e[i])||e[i];if(m<f){return -1}else{if(m>f){return 1}}}return 0}$.fn.refresh_select2=function(){var b=$(this);var a={width:"resolve",closeOnSelect:!b.is("[MULTIPLE]")};return b.select2(a)};function replace_big_select_inputs(a,c,b){if(!jQuery.fn.select2){return}if(a===undefined){a=20}if(c===undefined){c=3000}b=b||$("select");b.each(function(){var e=$(this);var d=e.find("option").length;if((d<a)||(d>c)){return}if(e.hasClass("no-autocomplete")){return}e.refresh_select2()})}$.fn.make_text_editable=function(g){var d=("num_cols" in g?g.num_cols:30),c=("num_rows" in g?g.num_rows:4),e=("use_textarea" in g?g.use_textarea:false),b=("on_finish" in g?g.on_finish:null),f=("help_text" in g?g.help_text:null);var a=$(this);a.addClass("editable-text").click(function(l){if($(this).children(":input").length>0){return}a.removeClass("editable-text");var i=function(m){a.find(":input").remove();if(m!==""){a.text(m)}else{a.html("<br>")}a.addClass("editable-text");if(b){b(m)}};var h=("cur_text" in g?g.cur_text:a.text()),k,j;if(e){k=$("<textarea/>").attr({rows:c,cols:d}).text($.trim(h)).keyup(function(m){if(m.keyCode===27){i(h)}});j=$("<button/>").text("Done").click(function(){i(k.val());return false})}else{k=$("<input type='text'/>").attr({value:$.trim(h),size:d}).blur(function(){i(h)}).keyup(function(m){if(m.keyCode===27){$(this).trigger("blur")}else{if(m.keyCode===13){i($(this).val())}}})}a.text("");a.append(k);if(j){a.append(j)}k.focus();k.select();l.stopPropagation()});if(f){a.attr("title",f).tooltip()}return a};function async_save_text(d,f,e,a,c,h,i,g,b){if(c===undefined){c=30}if(i===undefined){i=4}$("#"+d).click(function(){if($("#renaming-active").length>0){return}var l=$("#"+f),k=l.text(),j;if(h){j=$("<textarea></textarea>").attr({rows:i,cols:c}).text($.trim(k))}else{j=$("<input type='text'></input>").attr({value:$.trim(k),size:c})}j.attr("id","renaming-active");j.blur(function(){$(this).remove();l.show();if(b){b(j)}});j.keyup(function(n){if(n.keyCode===27){$(this).trigger("blur")}else{if(n.keyCode===13){var m={};m[a]=$(this).val();$(this).trigger("blur");$.ajax({url:e,data:m,error:function(){alert("Text editing for elt "+f+" failed")},success:function(o){if(o!==""){l.text(o)}else{l.html("<em>None</em>")}if(b){b(j)}}})}}});if(g){g(j)}l.hide();j.insertAfter(l);j.focus();j.select();return})}function init_history_items(d,a,c){var b=function(){try{var e=$.jStorage.get("history_expand_state");if(e){for(var g in e){$("#"+g+" div.historyItemBody").show()}}}catch(f){$.jStorage.deleteKey("history_expand_state")}if($.browser.mozilla){$("div.historyItemBody").each(function(){if(!$(this).is(":visible")){$(this).find("pre.peek").css("overflow","hidden")}})}d.each(function(){var j=this.id,h=$(this).children("div.historyItemBody"),i=h.find("pre.peek");$(this).find(".historyItemTitleBar > .historyItemTitle").wrap("<a href='javascript:void(0);'></a>").click(function(){var k;if(h.is(":visible")){if($.browser.mozilla){i.css("overflow","hidden")}h.slideUp("fast");if(!c){k=$.jStorage.get("history_expand_state");if(k){delete k[j];$.jStorage.set("history_expand_state",k)}}}else{h.slideDown("fast",function(){if($.browser.mozilla){i.css("overflow","auto")}});if(!c){k=$.jStorage.get("history_expand_state");if(!k){k={}}k[j]=true;$.jStorage.set("history_expand_state",k)}}return false})});$("#top-links > a.toggle").click(function(){var h=$.jStorage.get("history_expand_state");if(!h){h={}}$("div.historyItemBody:visible").each(function(){if($.browser.mozilla){$(this).find("pre.peek").css("overflow","hidden")}$(this).slideUp("fast");if(h){delete h[$(this).parent().attr("id")]}});$.jStorage.set("history_expand_state",h)}).show()};b()}function commatize(b){b+="";var a=/(\d+)(\d{3})/;while(a.test(b)){b=b.replace(a,"$1,$2")}return b}function reset_tool_search(a){var c=$("#galaxy_tools").contents();if(c.length===0){c=$(document)}$(this).removeClass("search_active");c.find(".toolTitle").removeClass("search_match");c.find(".toolSectionBody").hide();c.find(".toolTitle").show();c.find(".toolPanelLabel").show();c.find(".toolSectionWrapper").each(function(){if($(this).attr("id")!=="recently_used_wrapper"){$(this).show()}else{if($(this).hasClass("user_pref_visible")){$(this).show()}}});c.find("#search-no-results").hide();c.find("#search-spinner").hide();if(a){var b=c.find("#tool-search-query");b.val("search tools")}}var GalaxyAsync=function(a){this.url_dict={};this.log_action=(a===undefined?false:a)};GalaxyAsync.prototype.set_func_url=function(a,b){this.url_dict[a]=b};GalaxyAsync.prototype.set_user_pref=function(a,b){var c=this.url_dict[arguments.callee];if(c===undefined){return false}$.ajax({url:c,data:{pref_name:a,pref_value:b},error:function(){return false},success:function(){return true}})};GalaxyAsync.prototype.log_user_action=function(c,b,d){if(!this.log_action){return}var a=this.url_dict[arguments.callee];if(a===undefined){return false}$.ajax({url:a,data:{action:c,context:b,params:d},error:function(){return false},success:function(){return true}})};function init_refresh_on_change(){$("select[refresh_on_change='true']").change(function(){var a=$(this),e=a.val(),d=false,c=a.attr("refresh_on_change_values");if(c){c=c.split(",");var b=a.attr("last_selected_value");if($.inArray(e,c)===-1&&$.inArray(b,c)===-1){return}}$(window).trigger("refresh_on_change");$(document).trigger("convert_to_values");a.get(0).form.submit()});$(":checkbox[refresh_on_change='true']").click(function(){var a=$(this),e=a.val(),d=false,c=a.attr("refresh_on_change_values");if(c){c=c.split(",");var b=a.attr("last_selected_value");if($.inArray(e,c)===-1&&$.inArray(b,c)===-1){return}}$(window).trigger("refresh_on_change");a.get(0).form.submit()});$("a[confirm]").click(function(){return confirm($(this).attr("confirm"))})}$(document).ready(function(){init_refresh_on_change();if($.fn.tooltip){$(".unified-panel-header [title]").tooltip({placement:"bottom"});$("[title]").tooltip({placement:"top"})}make_popup_menus();replace_big_select_inputs(20,1500);$("a").click(function(){var b=$(this);var c=(parent.frames&&parent.frames.galaxy_main);if((b.attr("target")=="galaxy_main")&&(!c)){var a=b.attr("href");if(a.indexOf("?")==-1){a+="?"}else{a+="&"}a+="use_panels=True";b.attr("href",a);b.attr("target","_self")}return b})});
\ No newline at end of file
diff -r 984377efb491a30e4afd320c9fbff2ff1e1241af -r c04e79918595e0cefe85edac156ff5a962f4a686 static/scripts/packed/galaxy.grids.js
--- a/static/scripts/packed/galaxy.grids.js
+++ b/static/scripts/packed/galaxy.grids.js
@@ -1,1 +1,1 @@
-jQuery.ajaxSettings.traditional=true;define(["mvc/ui"],function(){var a=Backbone.Model.extend({defaults:{url_base:"",async:false,async_ops:[],categorical_filters:[],filters:{},sort_key:null,show_item_checkboxes:false,advanced_search:false,cur_page:1,num_pages:1,operation:undefined,item_ids:undefined},can_async_op:function(c){return _.indexOf(this.attributes.async_ops,c)!==-1},add_filter:function(g,h,d){if(d){var e=this.attributes.filters[g],c;if(e===null||e===undefined){c=h}else{if(typeof(e)=="string"){if(e=="All"){c=h}else{var f=[];f[0]=e;f[1]=h;c=f}}else{c=e;c.push(h)}}this.attributes.filters[g]=c}else{this.attributes.filters[g]=h}},remove_filter:function(d,g){var c=this.attributes.filters[d];if(c===null||c===undefined){return false}var f=true;if(typeof(c)==="string"){if(c=="All"){f=false}else{delete this.attributes.filters[d]}}else{var e=_.indexOf(c,g);if(e!==-1){c.splice(e,1)}else{f=false}}return f},get_url_data:function(){var c={async:this.attributes.async,sort:this.attributes.sort_key,page:this.attributes.cur_page,show_item_checkboxes:this.attributes.show_item_checkboxes,advanced_search:this.attributes.advanced_search};if(this.attributes.operation){c.operation=this.attributes.operation}if(this.attributes.item_ids){c.id=this.attributes.item_ids}var d=this;_.each(_.pairs(d.attributes.filters),function(e){c["f-"+e[0]]=e[1]});return c},get_url:function(c){return this.get("url_base")+"?"+$.param(this.get_url_data())+"&"+$.param(c)}});var b=Backbone.View.extend({grid:null,initialize:function(c){this.init_grid(c);this.init_grid_controls();$("input[type=text]").each(function(){$(this).click(function(){$(this).select()}).keyup(function(){$(this).css("font-style","normal")})})},handle_refresh:function(c){if(c){if($.inArray("history",c)>-1){if(top.Galaxy&&top.Galaxy.currHistoryPanel){top.Galaxy.currHistoryPanel.loadCurrentHistory()}}}},init_grid:function(e){this.grid=new a(e);var d=this.grid.attributes;this.handle_refresh(d.refresh_frames);var c=this.grid.get("url_base");c=c.replace(/^.*\/\/[^\/]+/,"");this.grid.set("url_base",c);$("#grid-table-body").html(this.template_body(d));$("#grid-table-footer").html(this.template_footer(d));if(d.message){$("#grid-message").html(this.template_message(d));setTimeout(function(){$("#grid-message").html("")},5000)}this.init_grid_elements()},init_grid_controls:function(){$(".submit-image").each(function(){$(this).mousedown(function(){$(this).addClass("gray-background")});$(this).mouseup(function(){$(this).removeClass("gray-background")})});var c=this;$(".sort-link").each(function(){$(this).click(function(){c.set_sort_condition($(this).attr("sort_key"));return false})});$(".categorical-filter > a").each(function(){$(this).click(function(){c.set_categorical_filter($(this).attr("filter_key"),$(this).attr("filter_val"));return false})});$(".text-filter-form").each(function(){$(this).submit(function(){var g=$(this).attr("column_key");var f=$("#input-"+g+"-filter");var h=f.val();f.val("");c.add_filter_condition(g,h);return false})});var d=$("#input-tags-filter");if(d.length){d.autocomplete(this.grid.history_tag_autocomplete_url,{selectFirst:false,autoFill:false,highlight:false,mustMatch:false})}var e=$("#input-name-filter");if(e.length){e.autocomplete(this.grid.history_name_autocomplete_url,{selectFirst:false,autoFill:false,highlight:false,mustMatch:false})}$(".advanced-search-toggle").each(function(){$(this).click(function(){$("#standard-search").slideToggle("fast");$("#advanced-search").slideToggle("fast");return false})})},init_grid_elements:function(){$(".grid").each(function(){var s=$(this).find("input.grid-row-select-checkbox");var r=$(this).find("span.grid-selected-count");var t=function(){r.text($(s).filter(":checked").length)};$(s).each(function(){$(this).change(t)});t()});if($(".community_rating_star").length!==0){$(".community_rating_star").rating({})}var q=this.grid.attributes;var p=this;$(".page-link > a").each(function(){$(this).click(function(){p.set_page($(this).attr("page_num"));return false})});$(".use-inbound").each(function(){$(this).click(function(r){p.execute({href:$(this).attr("href"),inbound:true});return false})});$(".use-outbound").each(function(){$(this).click(function(r){p.execute({href:$(this).attr("href")});return false})});var f=q.items.length;if(f==0){return}for(var k in q.items){var o=q.items[k];var l=$("#grid-"+k+"-popup");l.off();var d=new PopupMenu(l);for(var h in q.operations){var e=q.operations[h];var m=e.label;var c=o.operation_config[m];var g=o.encode_id;if(c.allowed&&e.allow_popup){var n={html:e.label,href:c.url_args,target:c.target,confirmation_text:e.confirm,inbound:e.inbound};n.func=function(t){t.preventDefault();var s=$(t.target).html();var r=this.findItemByHtml(s);p.execute(r)};d.addItem(n)}}}},add_filter_condition:function(e,g){if(g===""){return false}this.grid.add_filter(e,g,true);var f=$("<span>"+g+"<a href='javascript:void(0);'><span class='delete-search-icon' /></span></a>");f.addClass("text-filter-val");var d=this;f.click(function(){d.grid.remove_filter(e,g);$(this).remove();d.go_page_one();d.execute()});var c=$("#"+e+"-filtering-criteria");c.append(f);this.go_page_one();this.execute()},set_sort_condition:function(h){var g=this.grid.get("sort_key");var f=h;if(g.indexOf(h)!==-1){if(g.substring(0,1)!=="-"){f="-"+h}else{}}$(".sort-arrow").remove();var e=(f.substring(0,1)=="-")?"↑":"↓";var c=$("<span>"+e+"</span>").addClass("sort-arrow");var d=$("#"+h+"-header");d.append(c);this.grid.set("sort_key",f);this.go_page_one();this.execute()},set_categorical_filter:function(e,g){var d=this.grid.get("categorical_filters")[e],f=this.grid.get("filters")[e];var c=this;$("."+e+"-filter").each(function(){var m=$.trim($(this).text());var k=d[m];var l=k[e];if(l==g){$(this).empty();$(this).addClass("current-filter");$(this).append(m)}else{if(l==f){$(this).empty();var h=$("<a href='#'>"+m+"</a>");h.click(function(){c.set_categorical_filter(e,l)});$(this).removeClass("current-filter");$(this).append(h)}}});this.grid.add_filter(e,g);this.go_page_one();this.execute()},set_page:function(c){var d=this;$(".page-link").each(function(){var k=$(this).attr("id"),g=parseInt(k.split("-")[2],10),e=d.grid.get("cur_page"),h;if(g===c){h=$(this).children().text();$(this).empty();$(this).addClass("inactive-link");$(this).text(h)}else{if(g===e){h=$(this).text();$(this).empty();$(this).removeClass("inactive-link");var f=$("<a href='#'>"+h+"</a>");f.click(function(){d.set_page(g)});$(this).append(f)}}});if(c==="all"){this.grid.set("cur_page",c)}else{this.grid.set("cur_page",parseInt(c,10))}this.execute()},submit_operation:function(f,g){var e=$('input[name="id"]:checked').length;if(!e>0){return false}var d=$(f).val();var c=[];$("input[name=id]:checked").each(function(){c.push($(this).val())});this.execute({operation:d,id:c,confirmation_text:g});return true},execute:function(n){var f=null;var e=null;var g=null;var c=null;var m=null;if(n){e=n.href;g=n.operation;f=n.id;c=n.confirmation_text;m=n.inbound;if(e!==undefined&&e.indexOf("operation=")!=-1){var l=e.split("?");if(l.length>1){var k=l[1];var d=k.split("&");for(var h=0;h<d.length;h++){if(d[h].indexOf("operation")!=-1){g=d[h].split("=")[1];g=g.replace(/\+/g," ")}else{if(d[h].indexOf("id")!=-1){f=d[h].split("=")[1]}}}}}}if(g&&f){if(c&&c!=""&&c!="None"&&c!="null"){if(!confirm(c)){return false}}g=g.toLowerCase();this.grid.set({operation:g,item_ids:f});if(this.grid.can_async_op(g)){this.update_grid()}else{this.go_to(m,"")}return false}if(e){this.go_to(m,e);return false}if(this.grid.get("async")){this.update_grid()}else{this.go_to(m,"")}return false},go_to:function(f,d){var e=this.grid.get("async");this.grid.set("async",false);advanced_search=$("#advanced-search").is(":visible");this.grid.set("advanced_search",advanced_search);if(!d){d=this.grid.get("url_base")+"?"+$.param(this.grid.get_url_data())}this.grid.set({operation:undefined,item_ids:undefined,async:e});if(f){var c=$(".grid-header").closest(".inbound");if(c.length!==0){c.load(d);return}}window.location=d},update_grid:function(){var d=(this.grid.get("operation")?"POST":"GET");$(".loading-elt-overlay").show();var c=this;$.ajax({type:d,url:c.grid.get("url_base"),data:c.grid.get_url_data(),error:function(e){alert("Grid refresh failed")},success:function(e){c.init_grid($.parseJSON(e));$(".loading-elt-overlay").hide()},complete:function(){c.grid.set({operation:undefined,item_ids:undefined})}})},check_all_items:function(){var c=document.getElementById("check_all"),d=document.getElementsByTagName("input"),f=0,e;if(c.checked===true){for(e=0;e<d.length;e++){if(d[e].name.indexOf("id")!==-1){d[e].checked=true;f++}}}else{for(e=0;e<d.length;e++){if(d[e].name.indexOf("id")!==-1){d[e].checked=false}}}this.init_grid_elements()},go_page_one:function(){var c=this.grid.get("cur_page");if(c!==null&&c!==undefined&&c!=="all"){this.grid.set("cur_page",1)}},template_body:function(t){var m="";var u=0;var g=t.items.length;if(g==0){m+='<tr><td colspan="100"><em>No Items</em></td></tr>';u=1}for(var h in t.items){var r=t.items[h];var c=r.encode_id;var k="grid-"+h+"-popup";m+="<tr ";if(t.current_item_id==r.id){m+='class="current"'}m+=">";if(t.show_item_checkboxes){m+='<td style="width: 1.5em;"><input type="checkbox" name="id" value="'+c+'" id="'+c+'" class="grid-row-select-checkbox" /></td>'}for(j in t.columns){var f=t.columns[j];if(f.visible){var e="";if(f.nowrap){e='style="white-space:nowrap;"'}var s=r.column_config[f.label];var l=s.link;var n=s.value;var q=s.inbound;if(jQuery.type(n)==="string"){n=n.replace(/\/\//g,"/")}var d="";var p="";if(f.attach_popup){d="grid-"+h+"-popup";p="menubutton";if(l!=""){p+=" split"}p+=" popup"}m+="<td "+e+">";if(l){if(t.operations.length!=0){m+='<div id="'+d+'" class="'+p+'" style="float: left;">'}var o="";if(q){o="use-inbound"}else{o="use-outbound"}m+='<a class="label '+o+'" href="'+l+'" onclick="return false;">'+n+"</a>";if(t.operations.length!=0){m+="</div>"}}else{m+='<div id="'+d+'" class="'+p+'"><label id="'+f.label_id_prefix+c+'" for="'+c+'">'+n+"</label></div>"}m+="</td>"}}m+="</tr>";u++}return m},template_footer:function(q){var m="";if(q.use_paging&&q.num_pages>1){var o=q.num_page_links;var c=q.cur_page_num;var p=q.num_pages;var l=o/2;var k=c-l;var g=0;if(k==0){k=1;g=l-(c-k)}var f=l+g;var e=c+f;if(e<=p){max_offset=0}else{e=p;max_offset=f-(e+1-c)}if(max_offset!=0){k-=max_offset;if(k<1){k=1}}m+='<tr id="page-links-row">';if(q.show_item_checkboxes){m+="<td></td>"}m+='<td colspan="100"><span id="page-link-container">Page:';if(k>1){m+='<span class="page-link" id="page-link-1"><a href="'+this.grid.get_url({page:n})+'" page_num="1" onclick="return false;">1</a></span> ...'}for(var n=k;n<e+1;n++){if(n==q.cur_page_num){m+='<span class="page-link inactive-link" id="page-link-'+n+'">'+n+"</span>"}else{m+='<span class="page-link" id="page-link-'+n+'"><a href="'+this.grid.get_url({page:n})+'" onclick="return false;" page_num="'+n+'">'+n+"</a></span>"}}if(e<p){m+='...<span class="page-link" id="page-link-'+p+'"><a href="'+this.grid.get_url({page:p})+'" onclick="return false;" page_num="'+p+'">'+p+"</a></span>"}m+="</span>";m+='<span class="page-link" id="show-all-link-span"> | <a href="'+this.grid.get_url({page:"all"})+'" onclick="return false;" page_num="all">Show All</a></span></td></tr>'}if(q.show_item_checkboxes){m+='<tr><input type="hidden" id="operation" name="operation" value=""><td></td><td colspan="100">For <span class="grid-selected-count"></span> selected '+q.get_class_plural+": ";for(i in q.operations){var d=q.operations[i];if(d.allow_multiple){m+='<input type="button" value="'+d.label+'" class="action-button" onclick="gridView.submit_operation(this, \''+d.confirm+"')\"> "}}m+="</td></tr>"}var h=false;for(i in q.operations){if(q.operations[i].global_operation){h=true;break}}if(h){m+='<tr><td colspan="100">';for(i in q.operations){var d=q.operations[i];if(d.global_operation){m+='<a class="action-button" href="'+d.global_operation+'">'+d.label+"</a>"}}m+="</td></tr>"}if(q.legend){m+='<tr><td colspan="100">'+q.legend+"</td></tr>"}return m},template_message:function(c){return'<p><div class="'+c.status+'message transient-message">'+c.message+'</div><div style="clear: both"></div></p>'}});return{Grid:a,GridView:b}});
\ No newline at end of file
+jQuery.ajaxSettings.traditional=true;define(["mvc/ui"],function(){var a=Backbone.Model.extend({defaults:{url_base:"",async:false,async_ops:[],categorical_filters:[],filters:{},sort_key:null,show_item_checkboxes:false,advanced_search:false,cur_page:1,num_pages:1,operation:undefined,item_ids:undefined},can_async_op:function(c){return _.indexOf(this.attributes.async_ops,c)!==-1},add_filter:function(g,h,d){if(d){var e=this.attributes.filters[g],c;if(e===null||e===undefined){c=h}else{if(typeof(e)=="string"){if(e=="All"){c=h}else{var f=[];f[0]=e;f[1]=h;c=f}}else{c=e;c.push(h)}}this.attributes.filters[g]=c}else{this.attributes.filters[g]=h}},remove_filter:function(d,g){var c=this.attributes.filters[d];if(c===null||c===undefined){return false}var f=true;if(typeof(c)==="string"){if(c=="All"){f=false}else{delete this.attributes.filters[d]}}else{var e=_.indexOf(c,g);if(e!==-1){c.splice(e,1)}else{f=false}}return f},get_url_data:function(){var c={async:this.attributes.async,sort:this.attributes.sort_key,page:this.attributes.cur_page,show_item_checkboxes:this.attributes.show_item_checkboxes,advanced_search:this.attributes.advanced_search};if(this.attributes.operation){c.operation=this.attributes.operation}if(this.attributes.item_ids){c.id=this.attributes.item_ids}var d=this;_.each(_.pairs(d.attributes.filters),function(e){c["f-"+e[0]]=e[1]});return c},get_url:function(c){return this.get("url_base")+"?"+$.param(this.get_url_data())+"&"+$.param(c)}});var b=Backbone.View.extend({grid:null,initialize:function(c){this.init_grid(c);this.init_grid_controls();$("input[type=text]").each(function(){$(this).click(function(){$(this).select()}).keyup(function(){$(this).css("font-style","normal")})})},handle_refresh:function(c){if(c){if($.inArray("history",c)>-1){if(top.Galaxy&&top.Galaxy.currHistoryPanel){top.Galaxy.currHistoryPanel.loadCurrentHistory()}}}},init_grid:function(e){this.grid=new a(e);var d=this.grid.attributes;this.handle_refresh(d.refresh_frames);var c=this.grid.get("url_base");c=c.replace(/^.*\/\/[^\/]+/,"");this.grid.set("url_base",c);$("#grid-table-body").html(this.template_body(d));$("#grid-table-footer").html(this.template_footer(d));if(d.message){$("#grid-message").html(this.template_message(d));setTimeout(function(){$("#grid-message").html("")},5000)}this.init_grid_elements();init_refresh_on_change()},init_grid_controls:function(){$(".submit-image").each(function(){$(this).mousedown(function(){$(this).addClass("gray-background")});$(this).mouseup(function(){$(this).removeClass("gray-background")})});var c=this;$(".sort-link").each(function(){$(this).click(function(){c.set_sort_condition($(this).attr("sort_key"));return false})});$(".categorical-filter > a").each(function(){$(this).click(function(){c.set_categorical_filter($(this).attr("filter_key"),$(this).attr("filter_val"));return false})});$(".text-filter-form").each(function(){$(this).submit(function(){var g=$(this).attr("column_key");var f=$("#input-"+g+"-filter");var h=f.val();f.val("");c.add_filter_condition(g,h);return false})});var d=$("#input-tags-filter");if(d.length){d.autocomplete(this.grid.history_tag_autocomplete_url,{selectFirst:false,autoFill:false,highlight:false,mustMatch:false})}var e=$("#input-name-filter");if(e.length){e.autocomplete(this.grid.history_name_autocomplete_url,{selectFirst:false,autoFill:false,highlight:false,mustMatch:false})}$(".advanced-search-toggle").each(function(){$(this).click(function(){$("#standard-search").slideToggle("fast");$("#advanced-search").slideToggle("fast");return false})})},init_grid_elements:function(){$(".grid").each(function(){var s=$(this).find("input.grid-row-select-checkbox");var r=$(this).find("span.grid-selected-count");var t=function(){r.text($(s).filter(":checked").length)};$(s).each(function(){$(this).change(t)});t()});if($(".community_rating_star").length!==0){$(".community_rating_star").rating({})}var q=this.grid.attributes;var p=this;$(".page-link > a").each(function(){$(this).click(function(){p.set_page($(this).attr("page_num"));return false})});$(".use-inbound").each(function(){$(this).click(function(r){p.execute({href:$(this).attr("href"),inbound:true});return false})});$(".use-outbound").each(function(){$(this).click(function(r){p.execute({href:$(this).attr("href")});return false})});var f=q.items.length;if(f==0){return}for(var k in q.items){var o=q.items[k];var l=$("#grid-"+k+"-popup");l.off();var d=new PopupMenu(l);for(var h in q.operations){var e=q.operations[h];var m=e.label;var c=o.operation_config[m];var g=o.encode_id;if(c.allowed&&e.allow_popup){var n={html:e.label,href:c.url_args,target:c.target,confirmation_text:e.confirm,inbound:e.inbound};n.func=function(t){t.preventDefault();var s=$(t.target).html();var r=this.findItemByHtml(s);p.execute(r)};d.addItem(n)}}}},add_filter_condition:function(e,g){if(g===""){return false}this.grid.add_filter(e,g,true);var f=$("<span>"+g+"<a href='javascript:void(0);'><span class='delete-search-icon' /></span></a>");f.addClass("text-filter-val");var d=this;f.click(function(){d.grid.remove_filter(e,g);$(this).remove();d.go_page_one();d.execute()});var c=$("#"+e+"-filtering-criteria");c.append(f);this.go_page_one();this.execute()},set_sort_condition:function(h){var g=this.grid.get("sort_key");var f=h;if(g.indexOf(h)!==-1){if(g.substring(0,1)!=="-"){f="-"+h}else{}}$(".sort-arrow").remove();var e=(f.substring(0,1)=="-")?"↑":"↓";var c=$("<span>"+e+"</span>").addClass("sort-arrow");var d=$("#"+h+"-header");d.append(c);this.grid.set("sort_key",f);this.go_page_one();this.execute()},set_categorical_filter:function(e,g){var d=this.grid.get("categorical_filters")[e],f=this.grid.get("filters")[e];var c=this;$("."+e+"-filter").each(function(){var m=$.trim($(this).text());var k=d[m];var l=k[e];if(l==g){$(this).empty();$(this).addClass("current-filter");$(this).append(m)}else{if(l==f){$(this).empty();var h=$("<a href='#'>"+m+"</a>");h.click(function(){c.set_categorical_filter(e,l)});$(this).removeClass("current-filter");$(this).append(h)}}});this.grid.add_filter(e,g);this.go_page_one();this.execute()},set_page:function(c){var d=this;$(".page-link").each(function(){var k=$(this).attr("id"),g=parseInt(k.split("-")[2],10),e=d.grid.get("cur_page"),h;if(g===c){h=$(this).children().text();$(this).empty();$(this).addClass("inactive-link");$(this).text(h)}else{if(g===e){h=$(this).text();$(this).empty();$(this).removeClass("inactive-link");var f=$("<a href='#'>"+h+"</a>");f.click(function(){d.set_page(g)});$(this).append(f)}}});if(c==="all"){this.grid.set("cur_page",c)}else{this.grid.set("cur_page",parseInt(c,10))}this.execute()},submit_operation:function(f,g){var e=$('input[name="id"]:checked').length;if(!e>0){return false}var d=$(f).val();var c=[];$("input[name=id]:checked").each(function(){c.push($(this).val())});this.execute({operation:d,id:c,confirmation_text:g});return true},execute:function(n){var f=null;var e=null;var g=null;var c=null;var m=null;if(n){e=n.href;g=n.operation;f=n.id;c=n.confirmation_text;m=n.inbound;if(e!==undefined&&e.indexOf("operation=")!=-1){var l=e.split("?");if(l.length>1){var k=l[1];var d=k.split("&");for(var h=0;h<d.length;h++){if(d[h].indexOf("operation")!=-1){g=d[h].split("=")[1];g=g.replace(/\+/g," ")}else{if(d[h].indexOf("id")!=-1){f=d[h].split("=")[1]}}}}}}if(g&&f){if(c&&c!=""&&c!="None"&&c!="null"){if(!confirm(c)){return false}}g=g.toLowerCase();this.grid.set({operation:g,item_ids:f});if(this.grid.can_async_op(g)){this.update_grid()}else{this.go_to(m,"")}return false}if(e){this.go_to(m,e);return false}if(this.grid.get("async")){this.update_grid()}else{this.go_to(m,"")}return false},go_to:function(f,d){var e=this.grid.get("async");this.grid.set("async",false);advanced_search=$("#advanced-search").is(":visible");this.grid.set("advanced_search",advanced_search);if(!d){d=this.grid.get("url_base")+"?"+$.param(this.grid.get_url_data())}this.grid.set({operation:undefined,item_ids:undefined,async:e});if(f){var c=$(".grid-header").closest(".inbound");if(c.length!==0){c.load(d);return}}window.location=d},update_grid:function(){var d=(this.grid.get("operation")?"POST":"GET");$(".loading-elt-overlay").show();var c=this;$.ajax({type:d,url:c.grid.get("url_base"),data:c.grid.get_url_data(),error:function(e){alert("Grid refresh failed")},success:function(e){c.init_grid($.parseJSON(e));$(".loading-elt-overlay").hide()},complete:function(){c.grid.set({operation:undefined,item_ids:undefined})}})},check_all_items:function(){var c=document.getElementById("check_all"),d=document.getElementsByTagName("input"),f=0,e;if(c.checked===true){for(e=0;e<d.length;e++){if(d[e].name.indexOf("id")!==-1){d[e].checked=true;f++}}}else{for(e=0;e<d.length;e++){if(d[e].name.indexOf("id")!==-1){d[e].checked=false}}}this.init_grid_elements()},go_page_one:function(){var c=this.grid.get("cur_page");if(c!==null&&c!==undefined&&c!=="all"){this.grid.set("cur_page",1)}},template_body:function(t){var m="";var u=0;var g=t.items.length;if(g==0){m+='<tr><td colspan="100"><em>No Items</em></td></tr>';u=1}for(var h in t.items){var r=t.items[h];var c=r.encode_id;var k="grid-"+h+"-popup";m+="<tr ";if(t.current_item_id==r.id){m+='class="current"'}m+=">";if(t.show_item_checkboxes){m+='<td style="width: 1.5em;"><input type="checkbox" name="id" value="'+c+'" id="'+c+'" class="grid-row-select-checkbox" /></td>'}for(j in t.columns){var f=t.columns[j];if(f.visible){var e="";if(f.nowrap){e='style="white-space:nowrap;"'}var s=r.column_config[f.label];var l=s.link;var n=s.value;var q=s.inbound;if(jQuery.type(n)==="string"){n=n.replace(/\/\//g,"/")}var d="";var p="";if(f.attach_popup){d="grid-"+h+"-popup";p="menubutton";if(l!=""){p+=" split"}p+=" popup"}m+="<td "+e+">";if(l){if(t.operations.length!=0){m+='<div id="'+d+'" class="'+p+'" style="float: left;">'}var o="";if(q){o="use-inbound"}else{o="use-outbound"}m+='<a class="label '+o+'" href="'+l+'" onclick="return false;">'+n+"</a>";if(t.operations.length!=0){m+="</div>"}}else{m+='<div id="'+d+'" class="'+p+'"><label id="'+f.label_id_prefix+c+'" for="'+c+'">'+n+"</label></div>"}m+="</td>"}}m+="</tr>";u++}return m},template_footer:function(q){var m="";if(q.use_paging&&q.num_pages>1){var o=q.num_page_links;var c=q.cur_page_num;var p=q.num_pages;var l=o/2;var k=c-l;var g=0;if(k==0){k=1;g=l-(c-k)}var f=l+g;var e=c+f;if(e<=p){max_offset=0}else{e=p;max_offset=f-(e+1-c)}if(max_offset!=0){k-=max_offset;if(k<1){k=1}}m+='<tr id="page-links-row">';if(q.show_item_checkboxes){m+="<td></td>"}m+='<td colspan="100"><span id="page-link-container">Page:';if(k>1){m+='<span class="page-link" id="page-link-1"><a href="'+this.grid.get_url({page:n})+'" page_num="1" onclick="return false;">1</a></span> ...'}for(var n=k;n<e+1;n++){if(n==q.cur_page_num){m+='<span class="page-link inactive-link" id="page-link-'+n+'">'+n+"</span>"}else{m+='<span class="page-link" id="page-link-'+n+'"><a href="'+this.grid.get_url({page:n})+'" onclick="return false;" page_num="'+n+'">'+n+"</a></span>"}}if(e<p){m+='...<span class="page-link" id="page-link-'+p+'"><a href="'+this.grid.get_url({page:p})+'" onclick="return false;" page_num="'+p+'">'+p+"</a></span>"}m+="</span>";m+='<span class="page-link" id="show-all-link-span"> | <a href="'+this.grid.get_url({page:"all"})+'" onclick="return false;" page_num="all">Show All</a></span></td></tr>'}if(q.show_item_checkboxes){m+='<tr><input type="hidden" id="operation" name="operation" value=""><td></td><td colspan="100">For <span class="grid-selected-count"></span> selected '+q.get_class_plural+": ";for(i in q.operations){var d=q.operations[i];if(d.allow_multiple){m+='<input type="button" value="'+d.label+'" class="action-button" onclick="gridView.submit_operation(this, \''+d.confirm+"')\"> "}}m+="</td></tr>"}var h=false;for(i in q.operations){if(q.operations[i].global_operation){h=true;break}}if(h){m+='<tr><td colspan="100">';for(i in q.operations){var d=q.operations[i];if(d.global_operation){m+='<a class="action-button" href="'+d.global_operation+'">'+d.label+"</a>"}}m+="</td></tr>"}if(q.legend){m+='<tr><td colspan="100">'+q.legend+"</td></tr>"}return m},template_message:function(c){return'<p><div class="'+c.status+'message transient-message">'+c.message+'</div><div style="clear: both"></div></p>'}});return{Grid:a,GridView:b}});
\ No newline at end of file
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: carlfeberhard: History & HDA API: fix unicode support for tags in update
by commits-noreply@bitbucket.org 10 Jan '14
by commits-noreply@bitbucket.org 10 Jan '14
10 Jan '14
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/984377efb491/
Changeset: 984377efb491
User: carlfeberhard
Date: 2014-01-10 19:02:26
Summary: History & HDA API: fix unicode support for tags in update
Affected #: 1 file
diff -r b142bc80747e5079b7f406259dc346cdba86a044 -r 984377efb491a30e4afd320c9fbff2ff1e1241af lib/galaxy/web/base/controller.py
--- a/lib/galaxy/web/base/controller.py
+++ b/lib/galaxy/web/base/controller.py
@@ -2469,7 +2469,7 @@
# based on controllers/tag retag_async: delete all old, reset to entire new
trans.app.tag_handler.delete_item_tags( trans, user, item )
new_tags_str = ','.join( new_tags_list )
- trans.app.tag_handler.apply_item_tags( trans, user, item, new_tags_str.encode( 'utf-8' ) )
+ trans.app.tag_handler.apply_item_tags( trans, user, item, unicode( new_tags_str.encode( 'utf-8' ), 'utf-8' ) )
trans.sa_session.flush()
return item.tags
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: carlfeberhard: History & HDA API: fix unicode support in update
by commits-noreply@bitbucket.org 10 Jan '14
by commits-noreply@bitbucket.org 10 Jan '14
10 Jan '14
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/b142bc80747e/
Changeset: b142bc80747e
User: carlfeberhard
Date: 2014-01-10 18:55:43
Summary: History & HDA API: fix unicode support in update
Affected #: 5 files
diff -r e41c77b8ba69351cb243fa4ba651e9b270b4b91c -r b142bc80747e5079b7f406259dc346cdba86a044 lib/galaxy/web/base/controller.py
--- a/lib/galaxy/web/base/controller.py
+++ b/lib/galaxy/web/base/controller.py
@@ -179,6 +179,29 @@
rval[k] = self.encode_all_ids(trans, v, recursive)
return rval
+ # incoming param validation
+ # should probably be in sep. serializer class/object _used_ by controller
+ def validate_and_sanitize_basestring( self, key, val ):
+ if not isinstance( val, basestring ):
+ raise ValueError( '%s must be a string or unicode: %s' %( key, str( type( val ) ) ) )
+ return unicode( sanitize_html( val, 'utf-8', 'text/html' ), 'utf-8' )
+
+ def validate_and_sanitize_basestring_list( self, key, val ):
+ if not isinstance( val, list ):
+ raise ValueError( '%s must be a list: %s' %( key, str( type( val ) ) ) )
+ return [ unicode( sanitize_html( t, 'utf-8', 'text/html' ), 'utf-8' ) for t in val ]
+
+ def validate_boolean( self, key, val ):
+ if not isinstance( val, bool ):
+ raise ValueError( '%s must be a boolean: %s' %( key, str( type( val ) ) ) )
+ return val
+
+ #TODO:
+ #def validate_integer( self, key, val, min, max ):
+ #def validate_float( self, key, val, min, max ):
+ #def validate_number( self, key, val, min, max ):
+ #def validate_genome_build( self, key, val ):
+
Root = BaseController
diff -r e41c77b8ba69351cb243fa4ba651e9b270b4b91c -r b142bc80747e5079b7f406259dc346cdba86a044 lib/galaxy/webapps/galaxy/api/histories.py
--- a/lib/galaxy/webapps/galaxy/api/histories.py
+++ b/lib/galaxy/webapps/galaxy/api/histories.py
@@ -342,34 +342,14 @@
'id', 'model_class', 'nice_size', 'contents_url', 'purged', 'tags',
'state', 'state_details', 'state_ids'
)
-
validated_payload = {}
for key, val in payload.items():
- # TODO: lots of boilerplate here, but overhead on abstraction is equally onerous
- if key == 'name':
- if not ( isinstance( val, str ) or isinstance( val, unicode ) ):
- raise ValueError( 'name must be a string or unicode: %s' %( str( type( val ) ) ) )
- validated_payload[ 'name' ] = sanitize_html( val, 'utf-8' )
- #TODO:?? if sanitized != val: log.warn( 'script kiddie' )
- elif key == 'deleted':
- if not isinstance( val, bool ):
- raise ValueError( 'deleted must be a boolean: %s' %( str( type( val ) ) ) )
- validated_payload[ 'deleted' ] = val
- elif key == 'published':
- if not isinstance( val, bool ):
- raise ValueError( 'published must be a boolean: %s' %( str( type( val ) ) ) )
- validated_payload[ 'published' ] = val
- elif key == 'genome_build' and val is not None:
- if not ( isinstance( val, str ) or isinstance( val, unicode ) ):
- raise ValueError( 'genome_build must be a string: %s' %( str( type( val ) ) ) )
- validated_payload[ 'genome_build' ] = sanitize_html( val, 'utf-8' )
- elif key == 'annotation':
- if not ( isinstance( val, str ) or isinstance( val, unicode ) ):
- raise ValueError( 'annotation must be a string or unicode: %s' %( str( type( val ) ) ) )
- validated_payload[ 'annotation' ] = sanitize_html( val, 'utf-8' )
+ if key in ( 'name', 'genome_build', 'annotation' ):
+ validated_payload[ key ] = self.validate_and_sanitize_basestring( key, val )
+ if key in ( 'deleted', 'published' ):
+ validated_payload[ key ] = self.validate_boolean( key, val )
elif key == 'tags':
- if isinstance( val, list ):
- validated_payload[ 'tags' ] = [ sanitize_html( t, 'utf-8' ) for t in val ]
+ validated_payload[ key ] = self.validate_and_sanitize_basestring_list( key, val )
elif key not in valid_but_uneditable_keys:
pass
#log.warn( 'unknown key: %s', str( key ) )
diff -r e41c77b8ba69351cb243fa4ba651e9b270b4b91c -r b142bc80747e5079b7f406259dc346cdba86a044 lib/galaxy/webapps/galaxy/api/history_contents.py
--- a/lib/galaxy/webapps/galaxy/api/history_contents.py
+++ b/lib/galaxy/webapps/galaxy/api/history_contents.py
@@ -385,38 +385,17 @@
)
validated_payload = {}
for key, val in payload.items():
- # TODO: lots of boilerplate here, but overhead on abstraction is equally onerous
- # typecheck, parse, remap key
- if key == 'name':
- if not ( isinstance( val, str ) or isinstance( val, unicode ) ):
- raise ValueError( 'name must be a string or unicode: %s' %( str( type( val ) ) ) )
- validated_payload[ 'name' ] = util.sanitize_html.sanitize_html( val, 'utf-8' )
- #TODO:?? if sanitized != val: log.warn( 'script kiddie' )
- elif key == 'deleted':
- if not isinstance( val, bool ):
- raise ValueError( 'deleted must be a boolean: %s' %( str( type( val ) ) ) )
- validated_payload[ 'deleted' ] = val
- elif key == 'visible':
- if not isinstance( val, bool ):
- raise ValueError( 'visible must be a boolean: %s' %( str( type( val ) ) ) )
- validated_payload[ 'visible' ] = val
- elif key == 'genome_build':
- if not ( isinstance( val, str ) or isinstance( val, unicode ) ):
- raise ValueError( 'genome_build must be a string: %s' %( str( type( val ) ) ) )
- validated_payload[ 'dbkey' ] = util.sanitize_html.sanitize_html( val, 'utf-8' )
- elif key == 'annotation' and val is not None:
- if not ( isinstance( val, str ) or isinstance( val, unicode ) ):
- raise ValueError( 'annotation must be a string or unicode: %s' %( str( type( val ) ) ) )
- validated_payload[ 'annotation' ] = util.sanitize_html.sanitize_html( val, 'utf-8' )
- elif key == 'misc_info':
- if not ( isinstance( val, str ) or isinstance( val, unicode ) ):
- raise ValueError( 'misc_info must be a string or unicode: %s' %( str( type( val ) ) ) )
- validated_payload[ 'info' ] = util.sanitize_html.sanitize_html( val, 'utf-8' )
+ if key in ( 'name', 'genome_build', 'misc_info', 'annotation' ):
+ val = self.validate_and_sanitize_basestring( key, val )
+ #TODO: need better remap system or eliminate the need altogether
+ key = 'dbkey' if key == 'genome_build' else key
+ key = 'info' if key == 'misc_info' else key
+ validated_payload[ key ] = val
+ if key in ( 'deleted', 'visible' ):
+ validated_payload[ key ] = self.validate_boolean( key, val )
elif key == 'tags':
- if isinstance( val, list ):
- validated_payload[ 'tags' ] = [ sanitize_html( t, 'utf-8' ) for t in val ]
+ validated_payload[ key ] = self.validate_and_sanitize_basestring_list( key, val )
elif key not in valid_but_uneditable_keys:
pass
#log.warn( 'unknown key: %s', str( key ) )
return validated_payload
-
diff -r e41c77b8ba69351cb243fa4ba651e9b270b4b91c -r b142bc80747e5079b7f406259dc346cdba86a044 test/casperjs/api-hda-tests.js
--- a/test/casperjs/api-hda-tests.js
+++ b/test/casperjs/api-hda-tests.js
@@ -174,24 +174,14 @@
hdaShow = this.api.hdas.show( lastHistory.id, firstHda.id );
this.test.assert( hdaShow.name === 'New name', "Update sanitized name: " + hdaShow.name );
- //NOTE!: this fails on sqlite3 (with default setup)
- try {
- this.test.comment( 'update should allow unicode in names' );
- var unicodeName = 'Ржевский сапоги';
- returned = this.api.hdas.update( lastHistory.id, firstHda.id, {
- name : unicodeName
- });
- //this.debug( 'returned:\n' + this.jsonStr( returned ) );
- hdaShow = this.api.hdas.show( lastHistory.id, firstHda.id );
- this.test.assert( hdaShow.name === unicodeName, "Update accepted unicode name: " + hdaShow.name );
- } catch( err ){
- //this.debug( this.jsonStr( err ) );
- if( ( err instanceof this.api.APIError )
- && ( err.status === 500 )
- && ( err.message.indexOf( '(ProgrammingError) You must not use 8-bit bytestrings' ) !== -1 ) ){
- this.skipTest( 'Unicode update failed. Are you using sqlite3 as the db?' );
- }
- }
+ this.test.comment( 'update should allow unicode in names' );
+ var unicodeName = 'Ржевский сапоги';
+ returned = this.api.hdas.update( lastHistory.id, firstHda.id, {
+ name : unicodeName
+ });
+ //this.debug( 'returned:\n' + this.jsonStr( returned ) );
+ hdaShow = this.api.hdas.show( lastHistory.id, firstHda.id );
+ this.test.assert( hdaShow.name === unicodeName, "Update accepted unicode name: " + hdaShow.name );
this.test.comment( 'update should allow escaped quotations in names' );
var quotedName = '"Bler"';
@@ -239,11 +229,11 @@
});
//this.debug( 'returned:\n' + this.jsonStr( returned ) );
hdaShow = this.api.hdas.show( lastHistory.id, firstHda.id );
+ //this.debug( 'hdaShow:\n' + this.jsonStr( hdaShow ) );
this.test.assert( hdaShow.genome_build === 'hg18',
"genome_build successfully set via update: " + hdaShow.genome_build );
this.test.assert( hdaShow.metadata_dbkey === 'hg18',
"metadata_dbkey successfully set via the same update: " + hdaShow.metadata_dbkey );
-
this.test.comment( 'update should sanitize any genome_build' );
returned = this.api.hdas.update( lastHistory.id, firstHda.id, {
genome_build : 'hg18<script type="text/javascript" src="bler">alert("blah");</script>'
@@ -257,23 +247,14 @@
this.test.comment( 'update should allow unicode in genome builds' );
var unicodeBuild = 'Ржевский18';
- //NOTE!: this fails on sqlite3 (with default setup)
- try {
- returned = this.api.hdas.update( lastHistory.id, firstHda.id, {
- name : unicodeBuild
- });
- //this.debug( 'returned:\n' + this.jsonStr( returned ) );
- hdaShow = this.api.hdas.show( lastHistory.id, firstHda.id );
- this.test.assert( hdaShow.genome_build === unicodeBuild,
- "Update accepted unicode genome_build: " + hdaShow.name );
- } catch( err ){
- //this.debug( this.jsonStr( err ) );
- if( ( err instanceof this.api.APIError )
- && ( err.status === 500 )
- && ( err.message.indexOf( '(ProgrammingError) You must not use 8-bit bytestrings' ) !== -1 ) ){
- this.skipTest( 'Unicode update failed. Are you using sqlite3 as the db?' );
- }
- }
+ returned = this.api.hdas.update( lastHistory.id, firstHda.id, {
+ genome_build : unicodeBuild
+ });
+ this.debug( 'returned:\n' + this.jsonStr( returned ) );
+ hdaShow = this.api.hdas.show( lastHistory.id, firstHda.id );
+ this.debug( 'hdaShow:\n' + this.jsonStr( hdaShow ) );
+ this.test.assert( hdaShow.genome_build === unicodeBuild,
+ "Update accepted unicode genome_build: " + hdaShow.genome_build );
// ........................................................................................... misc_info/info
this.test.comment( 'update should allow changing the misc_info' );
@@ -298,25 +279,14 @@
this.test.comment( 'update should allow unicode in misc_info' );
var unicodeInfo = '여보!';
- //NOTE!: this fails on sqlite3 (with default setup)
- try {
- returned = this.api.hdas.update( lastHistory.id, firstHda.id, {
- misc_info : unicodeInfo
- });
- //this.debug( 'returned:\n' + this.jsonStr( returned ) );
- hdaShow = this.api.hdas.show( lastHistory.id, firstHda.id );
- this.test.assert( hdaShow.misc_info === unicodeInfo,
- "Update accepted unicode misc_info: " + hdaShow.misc_info );
- } catch( err ){
- //this.debug( this.jsonStr( err ) );
- if( ( err instanceof this.api.APIError )
- && ( err.status === 500 )
- && ( err.message.indexOf( '(ProgrammingError) You must not use 8-bit bytestrings' ) !== -1 ) ){
- this.skipTest( 'Unicode update failed. Are you using sqlite3 as the db?' );
- }
- }
+ returned = this.api.hdas.update( lastHistory.id, firstHda.id, {
+ misc_info : unicodeInfo
+ });
+ //this.debug( 'returned:\n' + this.jsonStr( returned ) );
+ hdaShow = this.api.hdas.show( lastHistory.id, firstHda.id );
+ this.test.assert( hdaShow.misc_info === unicodeInfo,
+ "Update accepted unicode misc_info: " + hdaShow.misc_info );
-/*
// ........................................................................................... annotation
// currently fails because no annotation is returned in details
this.test.comment( 'update should allow changing the annotation' );
@@ -338,25 +308,15 @@
this.test.assert( hdaShow.annotation === 'New annotation',
"Update sanitized annotation: " + hdaShow.annotation );
- //NOTE!: this fails on sqlite3 (with default setup)
- try {
- this.test.comment( 'update should allow unicode in annotations' );
- var unicodeAnnotation = 'お願いは、それが落下させない';
- returned = this.api.hdas.update( lastHistory.id, firstHda.id, {
- annotation : unicodeAnnotation
- });
- //this.debug( 'returned:\n' + this.jsonStr( returned ) );
- hdaShow = this.api.hdas.show( lastHistory.id, firstHda.id );
- this.test.assert( hdaShow.annotation === unicodeAnnotation,
- "Update accepted unicode annotation: " + hdaShow.annotation );
- } catch( err ){
- //this.debug( this.jsonStr( err ) );
- if( ( err instanceof this.api.APIError )
- && ( err.status === 500 )
- && ( err.message.indexOf( '(ProgrammingError) You must not use 8-bit bytestrings' ) !== -1 ) ){
- this.skipTest( 'Unicode update failed. Are you using sqlite3 as the db?' );
- }
- }
+ this.test.comment( 'update should allow unicode in annotations' );
+ var unicodeAnnotation = 'お願いは、それが落下させない';
+ returned = this.api.hdas.update( lastHistory.id, firstHda.id, {
+ annotation : unicodeAnnotation
+ });
+ //this.debug( 'returned:\n' + this.jsonStr( returned ) );
+ hdaShow = this.api.hdas.show( lastHistory.id, firstHda.id );
+ this.test.assert( hdaShow.annotation === unicodeAnnotation,
+ "Update accepted unicode annotation: " + hdaShow.annotation );
this.test.comment( 'update should allow escaped quotations in annotations' );
var quotedAnnotation = '"Bler"';
@@ -367,8 +327,6 @@
hdaShow = this.api.hdas.show( lastHistory.id, firstHda.id );
this.test.assert( hdaShow.annotation === quotedAnnotation,
"Update accepted escaped quotations in annotation: " + hdaShow.annotation );
-*/
-
// ------------------------------------------------------------------------------------------- ERRORS
this.test.comment( 'create should error with "Please define the source" when the param "from_ld_id" is not used' );
@@ -391,6 +349,8 @@
//var returned = this.api.hdas.update( lastHistory.id, { deleted: true, blerp: 'blerp' });
//this.debug( 'returned:' + this.jsonStr( returned ) );
//this.debug( 'page:' + this.jsonStr( this.page ) );
+/*
+*/
});
// ===================================================================
diff -r e41c77b8ba69351cb243fa4ba651e9b270b4b91c -r b142bc80747e5079b7f406259dc346cdba86a044 test/casperjs/api-history-tests.js
--- a/test/casperjs/api-history-tests.js
+++ b/test/casperjs/api-history-tests.js
@@ -218,23 +218,14 @@
this.test.assert( historyShow.name === 'New name', "Update sanitized name: " + historyShow.name );
//NOTE!: this fails on sqlite3 (with default setup)
- try {
- this.test.comment( 'update should allow unicode in names' );
- var unicodeName = '桜ゲノム';
- returned = this.api.histories.update( newFirstHistory.id, {
- name : unicodeName
- });
- //this.debug( 'returned:\n' + this.jsonStr( returned ) );
- historyShow = this.api.histories.show( newFirstHistory.id );
- this.test.assert( historyShow.name === unicodeName, "Update accepted unicode name: " + historyShow.name );
- } catch( err ){
- //this.debug( this.jsonStr( err ) );
- if( ( err instanceof this.api.APIError )
- && ( err.status === 500 )
- && ( err.message.indexOf( '(ProgrammingError) You must not use 8-bit bytestrings' ) !== -1 ) ){
- this.skipTest( 'Unicode update failed. Are you using sqlite3 as the db?' );
- }
- }
+ this.test.comment( 'update should allow unicode in names' );
+ var unicodeName = '桜ゲノム';
+ returned = this.api.histories.update( newFirstHistory.id, {
+ name : unicodeName
+ });
+ //this.debug( 'returned:\n' + this.jsonStr( returned ) );
+ historyShow = this.api.histories.show( newFirstHistory.id );
+ this.test.assert( historyShow.name === unicodeName, "Update accepted unicode name: " + historyShow.name );
this.test.comment( 'update should allow escaped quotations in names' );
var quotedName = '"Bler"';
@@ -294,27 +285,16 @@
this.test.assert( historyShow.genome_build === 'hg18',
"Update sanitized genome_build: " + historyShow.genome_build );
+ // removing for now until I can determine the relationship between unicode and genome_builds
this.test.comment( 'update should allow unicode in genome builds' );
var unicodeBuild = '桜12';
- //NOTE!: this fails on sqlite3 (with default setup)
- try {
- returned = this.api.histories.update( newFirstHistory.id, {
- name : unicodeBuild
- });
- //this.debug( 'returned:\n' + this.jsonStr( returned ) );
- historyShow = this.api.histories.show( newFirstHistory.id );
- this.test.assert( historyShow.genome_build === unicodeBuild,
- "Update accepted unicode genome_build: " + historyShow.genome_build );
- } catch( err ){
- //this.debug( this.jsonStr( err ) );
- if( ( err instanceof this.api.APIError )
- && ( err.status === 500 )
- && ( err.message.indexOf( '(ProgrammingError) You must not use 8-bit bytestrings' ) !== -1 ) ){
- this.skipTest( 'Unicode update failed. Are you using sqlite3 as the db?' );
- } else {
- throw err;
- }
- }
+ returned = this.api.histories.update( newFirstHistory.id, {
+ genome_build : unicodeBuild
+ });
+ //this.debug( 'returned:\n' + this.jsonStr( returned ) );
+ historyShow = this.api.histories.show( newFirstHistory.id );
+ this.test.assert( historyShow.genome_build === unicodeBuild,
+ "Update accepted unicode genome_build: " + historyShow.genome_build );
// ........................................................................................... annotation
@@ -337,25 +317,15 @@
this.test.assert( historyShow.annotation === 'New annotation',
"Update sanitized annotation: " + historyShow.annotation );
- //NOTE!: this fails on sqlite3 (with default setup)
- try {
- this.test.comment( 'update should allow unicode in annotations' );
- var unicodeAnnotation = 'お願いは、それが落下させない';
- returned = this.api.histories.update( newFirstHistory.id, {
- annotation : unicodeAnnotation
- });
- //this.debug( 'returned:\n' + this.jsonStr( returned ) );
- historyShow = this.api.histories.show( newFirstHistory.id );
- this.test.assert( historyShow.annotation === unicodeAnnotation,
- "Update accepted unicode annotation: " + historyShow.annotation );
- } catch( err ){
- //this.debug( this.jsonStr( err ) );
- if( ( err instanceof this.api.APIError )
- && ( err.status === 500 )
- && ( err.message.indexOf( '(ProgrammingError) You must not use 8-bit bytestrings' ) !== -1 ) ){
- this.skipTest( 'Unicode update failed. Are you using sqlite3 as the db?' );
- }
- }
+ this.test.comment( 'update should allow unicode in annotations' );
+ var unicodeAnnotation = 'お願いは、それが落下させない';
+ returned = this.api.histories.update( newFirstHistory.id, {
+ annotation : unicodeAnnotation
+ });
+ //this.debug( 'returned:\n' + this.jsonStr( returned ) );
+ historyShow = this.api.histories.show( newFirstHistory.id );
+ this.test.assert( historyShow.annotation === unicodeAnnotation,
+ "Update accepted unicode annotation: " + historyShow.annotation );
this.test.comment( 'update should allow escaped quotations in annotations' );
var quotedAnnotation = '"Bler"';
@@ -371,7 +341,6 @@
// ------------------------------------------------------------------------------------------- ERRORS
//TODO: make sure expected errors are being passed back (but no permissions checks here - different suite)
// bad ids: index, show, update, delete, undelete
-
/*
*/
//this.debug( this.jsonStr( historyShow ) );
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
2 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/d96d5309b5c5/
Changeset: d96d5309b5c5
Branch: stable
User: inithello
Date: 2014-01-08 18:27:14
Summary: When a push is received in the hg controller, update the repository on disk after the push is complete.
Affected #: 1 file
diff -r 3e76aceca9d0ab563e285d630dd7575140236b0d -r d96d5309b5c52bb43c410c7a943808294bee12c0 lib/galaxy/webapps/tool_shed/controllers/hg.py
--- a/lib/galaxy/webapps/tool_shed/controllers/hg.py
+++ b/lib/galaxy/webapps/tool_shed/controllers/hg.py
@@ -2,6 +2,7 @@
from galaxy import web
from galaxy.web.base.controller import BaseUIController
from tool_shed.util.shed_util_common import get_repository_by_name_and_owner
+from tool_shed.util.shed_util_common import update_repository
from tool_shed.util.metadata_util import set_repository_metadata
from galaxy import eggs
@@ -9,6 +10,8 @@
import mercurial.__version__
from mercurial.hgweb.hgwebdir_mod import hgwebdir
from mercurial.hgweb.request import wsgiapplication
+from mercurial import hg
+from mercurial import ui
log = logging.getLogger(__name__)
@@ -36,6 +39,11 @@
repository = get_repository_by_name_and_owner( trans.app, name, owner )
if repository:
if hg_version >= '2.2.3':
+ # Update the repository on disk to the tip revision, because the web upload form uses the on-disk working
+ # directory. If the repository is not updated on disk, pushing from the command line and then uploading
+ # via the web interface will result in a new head being created.
+ repo = hg.repository( ui.ui(), repository.repo_path( trans.app ) )
+ update_repository( repo, ctx_rev=None )
# Set metadata using the repository files on disk.
error_message, status = set_repository_metadata( trans, repository )
if status == 'ok' and error_message:
https://bitbucket.org/galaxy/galaxy-central/commits/e41c77b8ba69/
Changeset: e41c77b8ba69
User: inithello
Date: 2014-01-10 15:52:10
Summary: Merge stable.
Affected #: 0 files
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
4 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/f238040cc9a2/
Changeset: f238040cc9a2
User: jmchilton
Date: 2014-01-10 15:45:36
Summary: Rework key injection logic in test framework.
Pull logic for how master and user API keys are determinined for testing out of functional_tests.py for reuse elsewhere.
This refactoring will help the creation of an API test framework.
Affected #: 2 files
diff -r 66df8b51245cd141fb7180e401e725ef3b1a0e37 -r f238040cc9a261db98edfa316384b33851f22486 scripts/functional_tests.py
--- a/scripts/functional_tests.py
+++ b/scripts/functional_tests.py
@@ -51,6 +51,8 @@
from galaxy.util.json import to_json_string
from functional import database_contexts
+from base.api_util import get_master_api_key
+from base.api_util import get_user_api_key
import nose.core
import nose.config
@@ -64,8 +66,6 @@
default_galaxy_test_port_max = 9999
default_galaxy_locales = 'en'
default_galaxy_test_file_dir = "test-data"
-default_galaxy_master_key = "TEST123"
-default_galaxy_user_key = None
migrated_tool_panel_config = 'migrated_tools_conf.xml'
installed_tool_panel_configs = [ 'shed_tool_conf.xml' ]
@@ -342,7 +342,7 @@
galaxy_data_manager_data_path = tempfile.mkdtemp( prefix='data_manager_tool-data', dir=data_manager_test_tmp_path )
# ---- Build Application --------------------------------------------------
- master_api_key = os.environ.get( "GALAXY_TEST_MASTER_API_KEY", default_galaxy_master_key )
+ master_api_key = get_master_api_key()
app = None
if start_server:
kwargs = dict( admin_users='test(a)bx.psu.edu',
@@ -477,7 +477,7 @@
import functional.test_workflow
functional.test_workflow.WorkflowTestCase.workflow_test_file = workflow_test
functional.test_workflow.WorkflowTestCase.master_api_key = master_api_key
- functional.test_workflow.WorkflowTestCase.user_api_key = os.environ.get( "GALAXY_TEST_USER_API_KEY", default_galaxy_user_key )
+ functional.test_workflow.WorkflowTestCase.user_api_key = get_user_api_key()
data_manager_test = __check_arg( '-data_managers', param=False )
if data_manager_test:
import functional.test_data_managers
@@ -486,7 +486,7 @@
tmp_dir=data_manager_test_tmp_path,
testing_shed_tools=testing_shed_tools,
master_api_key=master_api_key,
- user_api_key=os.environ.get( "GALAXY_TEST_USER_API_KEY", default_galaxy_user_key ),
+ user_api_key=get_user_api_key(),
)
else:
# We must make sure that functional.test_toolbox is always imported after
@@ -499,7 +499,7 @@
functional.test_toolbox.build_tests(
testing_shed_tools=testing_shed_tools,
master_api_key=master_api_key,
- user_api_key=os.environ.get( "GALAXY_TEST_USER_API_KEY", default_galaxy_user_key ),
+ user_api_key=get_user_api_key(),
)
test_config = nose.config.Config( env=os.environ, ignoreFiles=ignore_files, plugins=nose.plugins.manager.DefaultPluginManager() )
test_config.configure( sys.argv )
diff -r 66df8b51245cd141fb7180e401e725ef3b1a0e37 -r f238040cc9a261db98edfa316384b33851f22486 test/base/api_util.py
--- /dev/null
+++ b/test/base/api_util.py
@@ -0,0 +1,20 @@
+import os
+
+DEFAULT_GALAXY_MASTER_API_KEY = "TEST123"
+DEFAULT_GALAXY_USER_API_KEY = None
+
+
+def get_master_api_key():
+ """ Test master API key to use for functional test. This key should be
+ configured as a master API key and should be able to create additional
+ users and keys.
+ """
+ return os.environ.get( "GALAXY_TEST_MASTER_API_KEY", DEFAULT_GALAXY_MASTER_API_KEY )
+
+
+def get_user_api_key():
+ """ Test user API key to use for functional tests. If set, this should drive
+ API based testing - if not set master API key should be used to create a new
+ user and API key for tests.
+ """
+ return os.environ.get( "GALAXY_TEST_USER_API_KEY", DEFAULT_GALAXY_USER_API_KEY )
https://bitbucket.org/galaxy/galaxy-central/commits/efa607692e17/
Changeset: efa607692e17
User: jmchilton
Date: 2014-01-10 15:45:36
Summary: Flush out API interactor...
... this will enable functional testing of API.
Affected #: 1 file
diff -r f238040cc9a261db98edfa316384b33851f22486 -r efa607692e171a5aa820d0a4b5ac9c8471431886 test/base/interactor.py
--- a/test/base/interactor.py
+++ b/test/base/interactor.py
@@ -31,10 +31,11 @@
class GalaxyInteractorApi( object ):
- def __init__( self, twill_test_case ):
+ def __init__( self, twill_test_case, test_user=None ):
self.twill_test_case = twill_test_case
self.api_url = "%s/api" % twill_test_case.url.rstrip("/")
- self.api_key = self.__get_user_key( twill_test_case.user_api_key, twill_test_case.master_api_key )
+ self.master_api_key = twill_test_case.master_api_key
+ self.api_key = self.__get_user_key( twill_test_case.user_api_key, twill_test_case.master_api_key, test_user=test_user )
self.uploads = {}
def verify_output( self, history_id, output_data, outfile, attributes, shed_tool_id, maxseconds ):
@@ -251,19 +252,26 @@
)
return self._post( "tools", files=files, data=data )
- def __get_user_key( self, user_key, admin_key ):
- if user_key:
- return user_key
+ def ensure_user_with_email( self, email ):
+ admin_key = self.master_api_key
all_users = self._get( 'users', key=admin_key ).json()
try:
- test_user = [ user for user in all_users if user["email"] == 'test(a)bx.psu.edu' ][0]
+ test_user = [ user for user in all_users if user["email"] == email ][0]
except IndexError:
data = dict(
- email='test(a)bx.psu.edu',
+ email=email,
password='testuser',
username='admin-user',
)
test_user = self._post( 'users', data, key=admin_key ).json()
+ return test_user
+
+ def __get_user_key( self, user_key, admin_key, test_user=None ):
+ if not test_user:
+ test_user = "test(a)bx.psu.edu"
+ if user_key:
+ return user_key
+ test_user = self.ensure_user_with_email(test_user)
return self._post( "users/%s/api_key" % test_user['id'], key=admin_key ).json()
def __dataset_fetcher( self, history_id ):
@@ -275,16 +283,16 @@
return fetcher
- def _post( self, path, data={}, files=None, key=None):
+ def _post( self, path, data={}, files=None, key=None, admin=False):
if not key:
- key = self.api_key
+ key = self.api_key if not admin else self.master_api_key
data = data.copy()
data['key'] = key
return post_request( "%s/%s" % (self.api_url, path), data=data, files=files )
- def _get( self, path, data={}, key=None ):
+ def _get( self, path, data={}, key=None, admin=False ):
if not key:
- key = self.api_key
+ key = self.api_key if not admin else self.master_api_key
data = data.copy()
data['key'] = key
if path.startswith("/api"):
@@ -413,14 +421,17 @@
try:
from requests import get as get_request
from requests import post as post_request
+ from requests import put as put_request
+ from requests import delete as delete_request
except ImportError:
import urllib2
import httplib
class RequestsLikeResponse( object ):
- def __init__( self, content ):
+ def __init__( self, content, status_code ):
self.content = content
+ self.status_code = status_code
def json( self ):
return loads( self.content )
@@ -431,23 +442,44 @@
argsep = '?'
url = url + argsep + '&'.join( [ '%s=%s' % (k, v) for k, v in params.iteritems() ] )
#req = urllib2.Request( url, headers = { 'Content-Type': 'application/json' } )
- return RequestsLikeResponse(urllib2.urlopen( url ).read() )
+ try:
+ response = urllib2.urlopen( url )
+ return RequestsLikeResponse( response.read(), status_code=response.getcode() )
+ except urllib2.HTTPError as e:
+ return RequestsLikeResponse( e.read(), status_code=e.code )
- def post_request( url, data, files ):
+ def post_request( url, data, files={} ):
+ return __multipart_request( url, data, files, verb="POST" )
+
+ def put_request( url, data, files={} ):
+ return __multipart_request( url, data, files, verb="PUT" )
+
+ def delete_request( url ):
+ opener = urllib2.build_opener(urllib2.HTTPHandler)
+ request = urllib2.Request(url)
+ request.get_method = lambda: 'DELETE'
+ try:
+ response = opener.open(request)
+ return RequestsLikeResponse( response.read(), status_code=response.getcode() )
+ except urllib2.HTTPError as e:
+ return RequestsLikeResponse( e.read(), status_code=e.code )
+
+
+ def __multipart_request( url, data, files={}, verb="POST" ):
parsed_url = urllib2.urlparse.urlparse( url )
- return __post_multipart( host=parsed_url.netloc, selector=parsed_url.path, fields=data.iteritems(), files=(files or {}).iteritems() )
+ return __multipart( host=parsed_url.netloc, selector=parsed_url.path, fields=data.iteritems(), files=(files or {}).iteritems(), verb=verb )
# http://stackoverflow.com/a/681182
- def __post_multipart(host, selector, fields, files):
+ def __multipart(host, selector, fields, files, verb="POST"):
+ h = httplib.HTTP(host)
+ h.putrequest(verb, selector)
content_type, body = __encode_multipart_formdata(fields, files)
- h = httplib.HTTP(host)
- h.putrequest('POST', selector)
h.putheader('content-type', content_type)
h.putheader('content-length', str(len(body)))
h.endheaders()
h.send(body)
errcode, errmsg, headers = h.getreply()
- return RequestsLikeResponse(h.file.read())
+ return RequestsLikeResponse(h.file.read(), status_code=errcode)
def __encode_multipart_formdata(fields, files):
LIMIT = '----------lImIt_of_THE_fIle_eW_$'
https://bitbucket.org/galaxy/galaxy-central/commits/1cd7fd029a01/
Changeset: 1cd7fd029a01
User: jmchilton
Date: 2014-01-10 15:45:36
Summary: Eliminate interactor dependency on deprecated global variable.
Lessens the need to import things in a specific order - as long as database context is run before interactor methods everything should be fine now. TODO: Eliminate rest of dependencies on this variable.
Affected #: 1 file
diff -r efa607692e171a5aa820d0a4b5ac9c8471431886 -r 1cd7fd029a0195fe8ee4c3e5d423591fb4403a8f test/base/interactor.py
--- a/test/base/interactor.py
+++ b/test/base/interactor.py
@@ -4,7 +4,7 @@
from galaxy.util.odict import odict
import galaxy.model
from galaxy.model.orm import and_, desc
-from base.test_db_util import sa_session
+from functional import database_contexts
from json import dumps, loads
from logging import getLogger
@@ -391,9 +391,9 @@
# Start with a new history
self.twill_test_case.logout()
self.twill_test_case.login( email='test(a)bx.psu.edu' )
- admin_user = sa_session.query( galaxy.model.User ).filter( galaxy.model.User.table.c.email == 'test(a)bx.psu.edu' ).one()
+ admin_user = database_contexts.galaxy_context.query( galaxy.model.User ).filter( galaxy.model.User.table.c.email == 'test(a)bx.psu.edu' ).one()
self.twill_test_case.new_history()
- latest_history = sa_session.query( galaxy.model.History ) \
+ latest_history = database_contexts.galaxy_context.query( galaxy.model.History ) \
.filter( and_( galaxy.model.History.table.c.deleted == False,
galaxy.model.History.table.c.user_id == admin_user.id ) ) \
.order_by( desc( galaxy.model.History.table.c.create_time ) ) \
@@ -417,7 +417,7 @@
# Lets just try to use requests if it is available, but if not provide fallback
-# on custom implementations of limited requests get/post functionality.
+# on custom implementations of limited requests get, put, etc... functionality.
try:
from requests import get as get_request
from requests import post as post_request
@@ -464,7 +464,6 @@
except urllib2.HTTPError as e:
return RequestsLikeResponse( e.read(), status_code=e.code )
-
def __multipart_request( url, data, files={}, verb="POST" ):
parsed_url = urllib2.urlparse.urlparse( url )
return __multipart( host=parsed_url.netloc, selector=parsed_url.path, fields=data.iteritems(), files=(files or {}).iteritems(), verb=verb )
https://bitbucket.org/galaxy/galaxy-central/commits/95314eb284fa/
Changeset: 95314eb284fa
User: jmchilton
Date: 2014-01-10 15:45:36
Summary: Implement framework for testing API...
... if you can call 1 new class a framework. Includes a few test cases to exercise/drive it. These examples include a histories API test (a typical API test) and a general test of the API framework itself (mostly just the run_as functionality).
This includes changes to interactor.py to make it more useful outside the context of tool/workflow testing as well as a tweak to test Galaxy that gets started to allow testing of the run_as feature.
Affected #: 5 files
diff -r 1cd7fd029a0195fe8ee4c3e5d423591fb4403a8f -r 95314eb284faf3024207f70ef0dafe63c285e379 scripts/functional_tests.py
--- a/scripts/functional_tests.py
+++ b/scripts/functional_tests.py
@@ -346,6 +346,7 @@
app = None
if start_server:
kwargs = dict( admin_users='test(a)bx.psu.edu',
+ api_allow_run_as='test(a)bx.psu.edu',
allow_library_path_paste=True,
allow_user_creation=True,
allow_user_deletion=True,
diff -r 1cd7fd029a0195fe8ee4c3e5d423591fb4403a8f -r 95314eb284faf3024207f70ef0dafe63c285e379 test/base/api.py
--- /dev/null
+++ b/test/base/api.py
@@ -0,0 +1,85 @@
+# TODO: We don't need all of TwillTestCase, strip down to a common super class
+# shared by API and Twill test cases.
+from .twilltestcase import TwillTestCase
+
+from base.interactor import GalaxyInteractorApi as BaseInteractor
+
+from .api_util import get_master_api_key
+from .api_util import get_user_api_key
+
+from urllib import urlencode
+
+
+TEST_USER = "user(a)bx.psu.edu"
+
+
+# TODO: Allow these to point at existing Galaxy instances.
+class ApiTestCase( TwillTestCase ):
+
+ def setUp( self ):
+ super( ApiTestCase, self ).setUp( )
+ self.user_api_key = get_user_api_key()
+ self.master_api_key = get_master_api_key()
+ self.galaxy_interactor = ApiTestInteractor( self )
+
+ def _api_url( self, path, params=None, use_key=None ):
+ if not params:
+ params = {}
+ url = "%s/api/%s" % ( self.url, path )
+ if use_key:
+ params[ "key" ] = self.galaxy_interactor.api_key
+ query = urlencode( params )
+ if query:
+ url = "%s?%s" % ( url, query )
+ return url
+
+ def _setup_user( self, email ):
+ self.galaxy_interactor.ensure_user_with_email(email)
+ users = self._get( "users", admin=True ).json()
+ user = [ user for user in users if user["email"] == email ][0]
+ return user
+
+ def _get( self, *args, **kwds ):
+ return self.galaxy_interactor.get( *args, **kwds )
+
+ def _post( self, *args, **kwds ):
+ return self.galaxy_interactor.post( *args, **kwds )
+
+ def _assert_status_code_is( self, response, expected_status_code ):
+ response_status_code = response.status_code
+ if expected_status_code != response_status_code:
+ try:
+ body = response.json()
+ except Exception:
+ body = "INVALID JSON RESPONSE"
+ assertion_message_template = "Request status code (%d) was not expected value %d. Body was %s"
+ assertion_message = assertion_message_template % ( response_status_code, expected_status_code, body )
+ raise AssertionError( assertion_message )
+
+ def _assert_has_keys( self, response, *keys ):
+ for key in keys:
+ assert key in response, "Response [%s] does not contain key [%s]" % ( response, key )
+
+ def _random_key( self ): # Used for invalid request testing...
+ return "1234567890123456"
+
+ _assert_has_key = _assert_has_keys
+
+
+class ApiTestInteractor( BaseInteractor ):
+ """ Specialized variant of the API interactor (originally developed for
+ tool functional tests) for testing the API generally.
+ """
+
+ def __init__( self, test_case ):
+ super( ApiTestInteractor, self ).__init__( test_case, test_user=TEST_USER )
+
+ # This variant the lower level get and post methods are meant to be used
+ # directly to test API - instead of relying on higher-level constructs for
+ # specific pieces of the API (the way it is done with the variant for tool)
+ # testing.
+ def get( self, *args, **kwds ):
+ return self._get( *args, **kwds )
+
+ def post( self, *args, **kwds ):
+ return self._post( *args, **kwds )
diff -r 1cd7fd029a0195fe8ee4c3e5d423591fb4403a8f -r 95314eb284faf3024207f70ef0dafe63c285e379 test/functional/api/test_framework.py
--- /dev/null
+++ b/test/functional/api/test_framework.py
@@ -0,0 +1,27 @@
+# This file doesn't test any API in particular but is meant to functionally
+# test the API framework itself.
+from base import api
+
+
+class ApiFrameworkTestCase( api.ApiTestCase ):
+
+ # Next several tests test the API's run_as functionality.
+ def test_user_cannont_run_as( self ):
+ post_data = dict( name="TestHistory1", run_as="another_user" )
+ # Normal user cannot run_as...
+ create_response = self._post( "histories", data=post_data )
+ self._assert_status_code_is( create_response, 403 )
+
+ def test_run_as_invalid_user( self ):
+ post_data = dict( name="TestHistory1", run_as="another_user" )
+ # admin user can run_as, but this user doesn't exist, expect 400.
+ create_response = self._post( "histories", data=post_data, admin=True )
+ self._assert_status_code_is( create_response, 400 )
+
+ def test_run_as_valid_user( self ):
+ run_as_user = self._setup_user( "for_run_as(a)bx.psu.edu" )
+ post_data = dict( name="TestHistory1", run_as=run_as_user[ "id" ] )
+ # Use run_as with admin user and for another user just created, this
+ # should work.
+ create_response = self._post( "histories", data=post_data, admin=True )
+ self._assert_status_code_is( create_response, 200 )
diff -r 1cd7fd029a0195fe8ee4c3e5d423591fb4403a8f -r 95314eb284faf3024207f70ef0dafe63c285e379 test/functional/api/test_histories.py
--- /dev/null
+++ b/test/functional/api/test_histories.py
@@ -0,0 +1,26 @@
+from base import api
+# requests.post or something like it if unavailable
+from base.interactor import post_request
+
+
+class HistoriesApiTestCase( api.ApiTestCase ):
+
+ def test_create_history( self ):
+ # Create a history.
+ post_data = dict( name="TestHistory1" )
+ create_response = self._post( "histories", data=post_data ).json()
+ self._assert_has_keys( create_response, "name", "id" )
+ self.assertEquals( create_response[ "name" ], "TestHistory1" )
+ created_id = create_response[ "id" ]
+
+ # Make sure new history appears in index of user's histories.
+ index_response = self._get( "histories" ).json()
+ indexed_history = [ h for h in index_response if h[ "id" ] == created_id ][0]
+ self.assertEquals(indexed_history[ "name" ], "TestHistory1")
+
+ def test_create_anonymous_fails( self ):
+ post_data = dict( name="CannotCreate" )
+ # Using lower-level _api_url will cause key to not be injected.
+ histories_url = self._api_url( "histories" )
+ create_response = post_request( url=histories_url, data=post_data )
+ self._assert_status_code_is( create_response, 403 )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: carlfeberhard: Remove history panel prefs from session storage on logout; possible fix for duplicating tags_used behavior
by commits-noreply@bitbucket.org 10 Jan '14
by commits-noreply@bitbucket.org 10 Jan '14
10 Jan '14
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/66df8b51245c/
Changeset: 66df8b51245c
User: carlfeberhard
Date: 2014-01-10 15:32:39
Summary: Remove history panel prefs from session storage on logout; possible fix for duplicating tags_used behavior
Affected #: 4 files
diff -r 3dde0f4765fbff33789badeaa17503349559ff72 -r 66df8b51245cd141fb7180e401e725ef3b1a0e37 static/scripts/mvc/tags.js
--- a/static/scripts/mvc/tags.js
+++ b/static/scripts/mvc/tags.js
@@ -56,7 +56,7 @@
/** @returns {jQuery} the input for this view */
$input : function(){
- return this.$el.find( '.tags-input' );
+ return this.$el.find( 'input.tags-input' );
},
/** @returns {String[]} all tags used by the current user */
@@ -73,7 +73,9 @@
view.model.save({ tags: event.val }, { silent: true });
// if it's new, add the tag to the users tags
if( event.added ){
- view._addNewTagToTagsUsed( event.added.text );
+ //??: solve weird behavior in FF on test.galaxyproject.org where
+ // event.added.text is string object: 'String{ 0="o", 1="n", 2="e" }'
+ view._addNewTagToTagsUsed( event.added.text + '' );
}
});
},
diff -r 3dde0f4765fbff33789badeaa17503349559ff72 -r 66df8b51245cd141fb7180e401e725ef3b1a0e37 static/scripts/mvc/user/user-model.js
--- a/static/scripts/mvc/user/user-model.js
+++ b/static/scripts/mvc/user/user-model.js
@@ -74,8 +74,12 @@
*/
clearSessionStorage : function(){
for( var key in sessionStorage ){
- //TODO: currently only history
- if( key.indexOf( 'HistoryView.' ) === 0 ){
+ //TODO: store these under the user key so we don't have to do this
+ // currently only history
+ if( key.indexOf( 'history:' ) === 0 ){
+ sessionStorage.removeItem( key );
+
+ } else if( key === 'history-panel' ){
sessionStorage.removeItem( key );
}
}
diff -r 3dde0f4765fbff33789badeaa17503349559ff72 -r 66df8b51245cd141fb7180e401e725ef3b1a0e37 static/scripts/packed/mvc/tags.js
--- a/static/scripts/packed/mvc/tags.js
+++ b/static/scripts/packed/mvc/tags.js
@@ -1,1 +1,1 @@
-var TagsEditor=Backbone.View.extend(LoggableMixin).extend(HiddenUntilActivatedViewMixin).extend({tagName:"div",className:"tags-display",initialize:function(a){this.listenTo(this.model,"change:tags",function(){this.render()});this.hiddenUntilActivated(a.$activator,a)},render:function(){var a=this;this.$el.html(this._template());this.$input().select2({placeholder:"Add tags",width:"100%",tags:function(){return a._getTagsUsed()}});this._setUpBehaviors();return this},_template:function(){return['<label class="prompt">',_l("Tags"),"</label>",'<input class="tags-input" value="',this.tagsToCSV(),'" />'].join("")},tagsToCSV:function(){var a=this.model.get("tags");if(!_.isArray(a)||_.isEmpty(a)){return""}return a.sort().join(",")},$input:function(){return this.$el.find(".tags-input")},_getTagsUsed:function(){return Galaxy.currUser.get("tags_used")},_setUpBehaviors:function(){var a=this;this.$input().on("change",function(b){a.model.save({tags:b.val},{silent:true});if(b.added){a._addNewTagToTagsUsed(b.added.text)}})},_addNewTagToTagsUsed:function(a){var b=Galaxy.currUser.get("tags_used");if(!_.contains(b,a)){b.push(a);b.sort();Galaxy.currUser.set("tags_used",b)}},remove:function(){this.$input.off();this.stopListening(this.model);Backbone.View.prototype.remove.call(this)},toString:function(){return["TagsEditor(",this.model+"",")"].join("")}});
\ No newline at end of file
+var TagsEditor=Backbone.View.extend(LoggableMixin).extend(HiddenUntilActivatedViewMixin).extend({tagName:"div",className:"tags-display",initialize:function(a){this.listenTo(this.model,"change:tags",function(){this.render()});this.hiddenUntilActivated(a.$activator,a)},render:function(){var a=this;this.$el.html(this._template());this.$input().select2({placeholder:"Add tags",width:"100%",tags:function(){return a._getTagsUsed()}});this._setUpBehaviors();return this},_template:function(){return['<label class="prompt">',_l("Tags"),"</label>",'<input class="tags-input" value="',this.tagsToCSV(),'" />'].join("")},tagsToCSV:function(){var a=this.model.get("tags");if(!_.isArray(a)||_.isEmpty(a)){return""}return a.sort().join(",")},$input:function(){return this.$el.find("input.tags-input")},_getTagsUsed:function(){return Galaxy.currUser.get("tags_used")},_setUpBehaviors:function(){var a=this;this.$input().on("change",function(b){a.model.save({tags:b.val},{silent:true});if(b.added){a._addNewTagToTagsUsed(b.added.text+"")}})},_addNewTagToTagsUsed:function(a){var b=Galaxy.currUser.get("tags_used");if(!_.contains(b,a)){b.push(a);b.sort();Galaxy.currUser.set("tags_used",b)}},remove:function(){this.$input.off();this.stopListening(this.model);Backbone.View.prototype.remove.call(this)},toString:function(){return["TagsEditor(",this.model+"",")"].join("")}});
\ No newline at end of file
diff -r 3dde0f4765fbff33789badeaa17503349559ff72 -r 66df8b51245cd141fb7180e401e725ef3b1a0e37 static/scripts/packed/mvc/user/user-model.js
--- a/static/scripts/packed/mvc/user/user-model.js
+++ b/static/scripts/packed/mvc/user/user-model.js
@@ -1,1 +1,1 @@
-var User=Backbone.Model.extend(LoggableMixin).extend({urlRoot:galaxy_config.root+"api/users",defaults:{id:null,username:"("+_l("anonymous user")+")",email:"",total_disk_usage:0,nice_total_disk_usage:"",quota_percent:null},initialize:function(a){this.log("User.initialize:",a);this.on("loaded",function(b,c){this.log(this+" has loaded:",b,c)});this.on("change",function(b,c){this.log(this+" has changed:",b,c.changes)})},isAnonymous:function(){return(!this.get("email"))},loadFromApi:function(d,b){d=d||User.CURRENT_ID_STR;b=b||{};var a=this,c=b.success;b.success=function(f,e){a.trigger("loaded",f,e);if(c){c(f,e)}};if(d===User.CURRENT_ID_STR){b.url=this.urlRoot+"/"+User.CURRENT_ID_STR}return Backbone.Model.prototype.fetch.call(this,b)},clearSessionStorage:function(){for(var a in sessionStorage){if(a.indexOf("HistoryView.")===0){sessionStorage.removeItem(a)}}},toString:function(){var a=[this.get("username")];if(this.get("id")){a.unshift(this.get("id"));a.push(this.get("email"))}return"User("+a.join(":")+")"}});User.CURRENT_ID_STR="current";User.getCurrentUserFromApi=function(b){var a=new User();a.loadFromApi(User.CURRENT_ID_STR,b);return a};var UserCollection=Backbone.Collection.extend(LoggableMixin).extend({model:User,urlRoot:galaxy_config.root+"api/users"});
\ No newline at end of file
+var User=Backbone.Model.extend(LoggableMixin).extend({urlRoot:galaxy_config.root+"api/users",defaults:{id:null,username:"("+_l("anonymous user")+")",email:"",total_disk_usage:0,nice_total_disk_usage:"",quota_percent:null},initialize:function(a){this.log("User.initialize:",a);this.on("loaded",function(b,c){this.log(this+" has loaded:",b,c)});this.on("change",function(b,c){this.log(this+" has changed:",b,c.changes)})},isAnonymous:function(){return(!this.get("email"))},loadFromApi:function(d,b){d=d||User.CURRENT_ID_STR;b=b||{};var a=this,c=b.success;b.success=function(f,e){a.trigger("loaded",f,e);if(c){c(f,e)}};if(d===User.CURRENT_ID_STR){b.url=this.urlRoot+"/"+User.CURRENT_ID_STR}return Backbone.Model.prototype.fetch.call(this,b)},clearSessionStorage:function(){for(var a in sessionStorage){if(a.indexOf("history:")===0){sessionStorage.removeItem(a)}else{if(a==="history-panel"){sessionStorage.removeItem(a)}}}},toString:function(){var a=[this.get("username")];if(this.get("id")){a.unshift(this.get("id"));a.push(this.get("email"))}return"User("+a.join(":")+")"}});User.CURRENT_ID_STR="current";User.getCurrentUserFromApi=function(b){var a=new User();a.loadFromApi(User.CURRENT_ID_STR,b);return a};var UserCollection=Backbone.Collection.extend(LoggableMixin).extend({model:User,urlRoot:galaxy_config.root+"api/users"});
\ No newline at end of file
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
3 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/995ca7707640/
Changeset: 995ca7707640
User: dannon
Date: 2014-01-10 13:29:05
Summary: Remove dependency on simplejson; convert everything over to using the json module in the python standard library.
Affected #: 44 files
diff -r 0c5e20558f6bb8d690dd9c9d25c172f08e837b1b -r 995ca770764013bcf0c61092de8436c9278357f7 eggs.ini
--- a/eggs.ini
+++ b/eggs.ini
@@ -26,7 +26,6 @@
pysam = 0.4.2
pysqlite = 2.5.6
python_lzo = 1.08_2.03_static
-simplejson = 2.1.1
threadframe = 0.2
guppy = 0.1.8
SQLAlchemy = 0.7.9
diff -r 0c5e20558f6bb8d690dd9c9d25c172f08e837b1b -r 995ca770764013bcf0c61092de8436c9278357f7 lib/galaxy/datatypes/metadata.py
--- a/lib/galaxy/datatypes/metadata.py
+++ b/lib/galaxy/datatypes/metadata.py
@@ -2,15 +2,13 @@
Galaxy Metadata
"""
-from galaxy import eggs
-eggs.require("simplejson")
import copy
import cPickle
+import json
import logging
import os
import shutil
-import simplejson
import sys
import tempfile
import weakref
@@ -130,7 +128,7 @@
def from_JSON_dict( self, filename ):
dataset = self.parent
log.debug( 'loading metadata from file for: %s %s' % ( dataset.__class__.__name__, dataset.id ) )
- JSONified_dict = simplejson.load( open( filename ) )
+ JSONified_dict = json.load( open( filename ) )
for name, spec in self.spec.items():
if name in JSONified_dict:
dataset._metadata[ name ] = spec.param.from_external_value( JSONified_dict[ name ], dataset )
@@ -146,7 +144,7 @@
for name, spec in self.spec.items():
if name in dataset_meta_dict:
meta_dict[ name ] = spec.param.to_external_value( dataset_meta_dict[ name ] )
- simplejson.dump( meta_dict, open( filename, 'wb+' ) )
+ json.dump( meta_dict, open( filename, 'wb+' ) )
def __getstate__( self ):
return None #cannot pickle a weakref item (self._parent), when data._metadata_collection is None, it will be recreated on demand
@@ -456,7 +454,7 @@
class DictParameter( MetadataParameter ):
def to_string( self, value ):
- return simplejson.dumps( value )
+ return json.dumps( value )
class PythonObjectParameter( MetadataParameter ):
@@ -594,7 +592,7 @@
@classmethod
def cleanup_from_JSON_dict_filename( cls, filename ):
try:
- for key, value in simplejson.load( open( filename ) ).items():
+ for key, value in json.load( open( filename ) ).items():
if cls.is_JSONified_value( value ):
value = cls.from_JSON( value )
if isinstance( value, cls ) and os.path.exists( value.file_name ):
@@ -686,10 +684,10 @@
#file to store a 'return code' indicating the results of the set_meta() call
#results code is like (True/False - if setting metadata was successful/failed , exception or string of reason of success/failure )
metadata_files.filename_results_code = abspath( tempfile.NamedTemporaryFile( dir = tmp_dir, prefix = "metadata_results_%s_" % key ).name )
- simplejson.dump( ( False, 'External set_meta() not called' ), open( metadata_files.filename_results_code, 'wb+' ) ) # create the file on disk, so it cannot be reused by tempfile (unlikely, but possible)
+ json.dump( ( False, 'External set_meta() not called' ), open( metadata_files.filename_results_code, 'wb+' ) ) # create the file on disk, so it cannot be reused by tempfile (unlikely, but possible)
#file to store kwds passed to set_meta()
metadata_files.filename_kwds = abspath( tempfile.NamedTemporaryFile( dir = tmp_dir, prefix = "metadata_kwds_%s_" % key ).name )
- simplejson.dump( kwds, open( metadata_files.filename_kwds, 'wb+' ), ensure_ascii=True )
+ json.dump( kwds, open( metadata_files.filename_kwds, 'wb+' ), ensure_ascii=True )
#existing metadata file parameters need to be overridden with cluster-writable file locations
metadata_files.filename_override_metadata = abspath( tempfile.NamedTemporaryFile( dir = tmp_dir, prefix = "metadata_override_%s_" % key ).name )
open( metadata_files.filename_override_metadata, 'wb+' ) # create the file on disk, so it cannot be reused by tempfile (unlikely, but possible)
@@ -699,7 +697,7 @@
metadata_temp = MetadataTempFile()
shutil.copy( dataset.metadata.get( meta_key, None ).file_name, metadata_temp.file_name )
override_metadata.append( ( meta_key, metadata_temp.to_JSON() ) )
- simplejson.dump( override_metadata, open( metadata_files.filename_override_metadata, 'wb+' ) )
+ json.dump( override_metadata, open( metadata_files.filename_override_metadata, 'wb+' ) )
#add to session and flush
sa_session.add( metadata_files )
sa_session.flush()
@@ -711,7 +709,7 @@
metadata_files = self.get_output_filenames_by_dataset( dataset, sa_session )
if not metadata_files:
return False # this file doesn't exist
- rval, rstring = simplejson.load( open( metadata_files.filename_results_code ) )
+ rval, rstring = json.load( open( metadata_files.filename_results_code ) )
if not rval:
log.debug( 'setting metadata externally failed for %s %s: %s' % ( dataset.__class__.__name__, dataset.id, rstring ) )
return rval
diff -r 0c5e20558f6bb8d690dd9c9d25c172f08e837b1b -r 995ca770764013bcf0c61092de8436c9278357f7 lib/galaxy/datatypes/sequence.py
--- a/lib/galaxy/datatypes/sequence.py
+++ b/lib/galaxy/datatypes/sequence.py
@@ -2,12 +2,14 @@
Sequence classes
"""
-import data
+from . import data
import gzip
+import json
import logging
import os
import re
import string
+
from cgi import escape
from galaxy import eggs, util
@@ -16,8 +18,6 @@
from galaxy.datatypes.sniff import get_test_fname, get_headers
from galaxy.datatypes.metadata import MetadataElement
-eggs.require("simplejson")
-import simplejson
try:
eggs.require( "bx-python" )
@@ -44,8 +44,8 @@
def set_peek( self, dataset, is_multi_byte=False ):
if not dataset.dataset.purged:
try:
- parsed_data = simplejson.load(open(dataset.file_name))
- # dataset.peek = simplejson.dumps(data, sort_keys=True, indent=4)
+ parsed_data = json.load(open(dataset.file_name))
+ # dataset.peek = json.dumps(data, sort_keys=True, indent=4)
dataset.peek = data.get_file_peek( dataset.file_name, is_multi_byte=is_multi_byte )
dataset.blurb = '%d sections' % len(parsed_data['sections'])
except Exception, e:
@@ -60,7 +60,7 @@
def sniff( self, filename ):
if os.path.getsize(filename) < 50000:
try:
- data = simplejson.load(open(filename))
+ data = json.load(open(filename))
sections = data['sections']
for section in sections:
if 'start' not in section or 'end' not in section or 'sequences' not in section:
@@ -155,7 +155,7 @@
do_slow_split = classmethod(do_slow_split)
def do_fast_split( cls, input_datasets, toc_file_datasets, subdir_generator_function, split_params):
- data = simplejson.load(open(toc_file_datasets[0].file_name))
+ data = json.load(open(toc_file_datasets[0].file_name))
sections = data['sections']
total_sequences = long(0)
for section in sections:
@@ -191,7 +191,7 @@
toc = toc_file_datasets[ds_no]
split_data['args']['toc_file'] = toc.file_name
f = open(os.path.join(dir, 'split_info_%s.json' % base_name), 'w')
- simplejson.dump(split_data, f)
+ json.dump(split_data, f)
f.close()
start_sequence += sequences_per_file[part_no]
return directories
@@ -557,7 +557,7 @@
sequence_count = long(args['num_sequences'])
if 'toc_file' in args:
- toc_file = simplejson.load(open(args['toc_file'], 'r'))
+ toc_file = json.load(open(args['toc_file'], 'r'))
commands = Sequence.get_split_commands_with_toc(input_name, output_name, toc_file, start_sequence, sequence_count)
else:
commands = Sequence.get_split_commands_sequential(is_gzip(input_name), input_name, output_name, start_sequence, sequence_count)
diff -r 0c5e20558f6bb8d690dd9c9d25c172f08e837b1b -r 995ca770764013bcf0c61092de8436c9278357f7 lib/galaxy/jobs/runners/drmaa.py
--- a/lib/galaxy/jobs/runners/drmaa.py
+++ b/lib/galaxy/jobs/runners/drmaa.py
@@ -2,13 +2,13 @@
Job control via the DRMAA API.
"""
+import json
+import logging
import os
+import string
+import subprocess
import sys
import time
-import string
-import logging
-import subprocess
-import simplejson as json
from galaxy import eggs
from galaxy import model
diff -r 0c5e20558f6bb8d690dd9c9d25c172f08e837b1b -r 995ca770764013bcf0c61092de8436c9278357f7 lib/galaxy/jobs/runners/lwr_client/action_mapper.py
--- a/lib/galaxy/jobs/runners/lwr_client/action_mapper.py
+++ b/lib/galaxy/jobs/runners/lwr_client/action_mapper.py
@@ -1,4 +1,4 @@
-from simplejson import load
+from json import load
from os.path import abspath
from os.path import dirname
from os.path import join
diff -r 0c5e20558f6bb8d690dd9c9d25c172f08e837b1b -r 995ca770764013bcf0c61092de8436c9278357f7 lib/galaxy/jobs/runners/lwr_client/client.py
--- a/lib/galaxy/jobs/runners/lwr_client/client.py
+++ b/lib/galaxy/jobs/runners/lwr_client/client.py
@@ -1,7 +1,6 @@
import os
import shutil
-import simplejson
-from simplejson import dumps
+from json import dumps, loads
from time import sleep
from .destination import submit_params
@@ -16,7 +15,7 @@
def __call__(self, func):
def replacement(*args, **kwargs):
response = func(*args, **kwargs)
- return simplejson.loads(response)
+ return loads(response)
return replacement
diff -r 0c5e20558f6bb8d690dd9c9d25c172f08e837b1b -r 995ca770764013bcf0c61092de8436c9278357f7 lib/galaxy/model/__init__.py
--- a/lib/galaxy/model/__init__.py
+++ b/lib/galaxy/model/__init__.py
@@ -6,7 +6,6 @@
"""
from galaxy import eggs
-eggs.require("simplejson")
eggs.require("pexpect")
import codecs
@@ -15,7 +14,7 @@
import operator
import os
import pexpect
-import simplejson
+import json
import socket
import time
from string import Template
@@ -2250,7 +2249,7 @@
template_data[template.name] = tmp_dict
return template_data
def templates_json( self, use_name=False ):
- return simplejson.dumps( self.templates_dict( use_name=use_name ) )
+ return json.dumps( self.templates_dict( use_name=use_name ) )
def get_display_name( self ):
"""
diff -r 0c5e20558f6bb8d690dd9c9d25c172f08e837b1b -r 995ca770764013bcf0c61092de8436c9278357f7 lib/galaxy/model/custom_types.py
--- a/lib/galaxy/model/custom_types.py
+++ b/lib/galaxy/model/custom_types.py
@@ -1,8 +1,6 @@
from sqlalchemy.types import *
-import pkg_resources
-pkg_resources.require("simplejson")
-import simplejson
+import json
import pickle
import copy
import uuid
@@ -19,8 +17,8 @@
log = logging.getLogger( __name__ )
# Default JSON encoder and decoder
-json_encoder = simplejson.JSONEncoder( sort_keys=True )
-json_decoder = simplejson.JSONDecoder( )
+json_encoder = json.JSONEncoder( sort_keys=True )
+json_decoder = json.JSONDecoder( )
def _sniffnfix_pg9_hex(value):
"""
diff -r 0c5e20558f6bb8d690dd9c9d25c172f08e837b1b -r 995ca770764013bcf0c61092de8436c9278357f7 lib/galaxy/tools/__init__.py
--- a/lib/galaxy/tools/__init__.py
+++ b/lib/galaxy/tools/__init__.py
@@ -4,6 +4,7 @@
import binascii
import glob
+import json
import logging
import os
import pipes
@@ -19,14 +20,12 @@
from math import isinf
from galaxy import eggs
-eggs.require( "simplejson" )
eggs.require( "MarkupSafe" ) #MarkupSafe must load before mako
eggs.require( "Mako" )
eggs.require( "elementtree" )
eggs.require( "Paste" )
eggs.require( "SQLAlchemy >= 0.4" )
-import simplejson
from cgi import FieldStorage
from elementtree import ElementTree
from mako.template import Template
@@ -869,7 +868,7 @@
value = params_to_strings( tool.inputs, self.inputs, app )
value["__page__"] = self.page
value["__rerun_remap_job_id__"] = self.rerun_remap_job_id
- value = simplejson.dumps( value )
+ value = json.dumps( value )
# Make it secure
if secure:
a = hmac_new( app.config.tool_secret, value )
@@ -888,7 +887,7 @@
test = hmac_new( app.config.tool_secret, value )
assert a == test
# Restore from string
- values = json_fix( simplejson.loads( value ) )
+ values = json_fix( json.loads( value ) )
self.page = values.pop( "__page__" )
if '__rerun_remap_job_id__' in values:
self.rerun_remap_job_id = values.pop( "__rerun_remap_job_id__" )
@@ -2921,7 +2920,7 @@
try:
json_file = open( os.path.join( job_working_directory, jobs.TOOL_PROVIDED_JOB_METADATA_FILE ), 'r' )
for line in json_file:
- line = simplejson.loads( line )
+ line = json.loads( line )
if line.get( 'type' ) == 'new_primary_dataset':
new_primary_datasets[ os.path.split( line.get( 'filename' ) )[-1] ] = line
except Exception:
@@ -3085,7 +3084,7 @@
if json_filename is None:
json_filename = file_name
out = open( json_filename, 'w' )
- out.write( simplejson.dumps( json_params ) )
+ out.write( json.dumps( json_params ) )
out.close()
class DataSourceTool( OutputParameterJSONTool ):
@@ -3145,7 +3144,7 @@
if json_filename is None:
json_filename = file_name
out = open( json_filename, 'w' )
- out.write( simplejson.dumps( json_params ) )
+ out.write( json.dumps( json_params ) )
out.close()
class AsyncDataSourceTool( DataSourceTool ):
diff -r 0c5e20558f6bb8d690dd9c9d25c172f08e837b1b -r 995ca770764013bcf0c61092de8436c9278357f7 lib/galaxy/tools/data_manager/manager.py
--- a/lib/galaxy/tools/data_manager/manager.py
+++ b/lib/galaxy/tools/data_manager/manager.py
@@ -1,9 +1,6 @@
-import pkg_resources
-
-pkg_resources.require( "simplejson" )
-
-import os, errno
-import simplejson
+import errno
+import json
+import os
from galaxy import util
from galaxy.util.odict import odict
@@ -226,7 +223,7 @@
#TODO: fix this merging below
for output_name, output_dataset in out_data.iteritems():
try:
- output_dict = simplejson.loads( open( output_dataset.file_name ).read() )
+ output_dict = json.loads( open( output_dataset.file_name ).read() )
except Exception, e:
log.warning( 'Error reading DataManagerTool json for "%s": %s' % ( output_name, e ) )
continue
diff -r 0c5e20558f6bb8d690dd9c9d25c172f08e837b1b -r 995ca770764013bcf0c61092de8436c9278357f7 lib/galaxy/tools/genome_index/__init__.py
--- a/lib/galaxy/tools/genome_index/__init__.py
+++ b/lib/galaxy/tools/genome_index/__init__.py
@@ -1,6 +1,11 @@
from __future__ import with_statement
-import os, shutil, logging, tempfile, tarfile
+import json
+import logging
+import os
+import shutil
+import tarfile
+import tempfile
from galaxy import model, util
from galaxy.web.framework.helpers import to_unicode
@@ -9,9 +14,6 @@
from galaxy.web.base.controller import UsesHistoryMixin
from galaxy.tools.data import ToolDataTableManager
-import pkg_resources
-pkg_resources.require("simplejson")
-import simplejson
log = logging.getLogger(__name__)
@@ -74,7 +76,7 @@
fp = open( gitd.dataset.get_file_name(), 'r' )
deferred = sa_session.query( model.DeferredJob ).filter_by( id=gitd.deferred_job_id ).first()
try:
- logloc = simplejson.load( fp )
+ logloc = json.load( fp )
except ValueError:
deferred.state = app.model.DeferredJob.states.ERROR
sa_session.add( deferred )
diff -r 0c5e20558f6bb8d690dd9c9d25c172f08e837b1b -r 995ca770764013bcf0c61092de8436c9278357f7 lib/galaxy/tools/genome_index/index_genome.py
--- a/lib/galaxy/tools/genome_index/index_genome.py
+++ b/lib/galaxy/tools/genome_index/index_genome.py
@@ -7,11 +7,17 @@
"""
from __future__ import with_statement
-import optparse, sys, os, tempfile, time, subprocess, shlex, tarfile, shutil
+import json
+import optparse
+import os
+import shlex
+import shutil
+import subprocess
+import sys
+import tarfile
+import tempfile
+import time
-import pkg_resources
-pkg_resources.require("simplejson")
-import simplejson
class ManagedIndexer():
def __init__( self, output_file, infile, workingdir, rsync_url, tooldata ):
@@ -76,7 +82,7 @@
return result
def _flush_files( self ):
- simplejson.dump( self.locations, self.outfile )
+ json.dump( self.locations, self.outfile )
self.outfile.close()
self.logfile.close()
@@ -318,4 +324,4 @@
returncode = idxobj.run_indexer( indexer )
if not returncode:
exit(1)
- exit(0)
\ No newline at end of file
+ exit(0)
diff -r 0c5e20558f6bb8d690dd9c9d25c172f08e837b1b -r 995ca770764013bcf0c61092de8436c9278357f7 lib/galaxy/tools/imp_exp/__init__.py
--- a/lib/galaxy/tools/imp_exp/__init__.py
+++ b/lib/galaxy/tools/imp_exp/__init__.py
@@ -1,4 +1,4 @@
-import os, shutil, logging, tempfile, simplejson
+import os, shutil, logging, tempfile, json
from galaxy import model
from galaxy.tools.parameters.basic import UnvalidatedValue
from galaxy.web.framework.helpers import to_unicode
@@ -226,13 +226,13 @@
self.sa_session.add( imported_job )
self.sa_session.flush()
- class HistoryDatasetAssociationIDEncoder( simplejson.JSONEncoder ):
+ class HistoryDatasetAssociationIDEncoder( json.JSONEncoder ):
""" Custom JSONEncoder for a HistoryDatasetAssociation that encodes an HDA as its ID. """
def default( self, obj ):
""" Encode an HDA, default encoding for everything else. """
if isinstance( obj, model.HistoryDatasetAssociation ):
return obj.id
- return simplejson.JSONEncoder.default( self, obj )
+ return json.JSONEncoder.default( self, obj )
# Set parameters. May be useful to look at metadata.py for creating parameters.
# TODO: there may be a better way to set parameters, e.g.:
@@ -311,7 +311,7 @@
del metadata[ name ]
return metadata
- class HistoryDatasetAssociationEncoder( simplejson.JSONEncoder ):
+ class HistoryDatasetAssociationEncoder( json.JSONEncoder ):
""" Custom JSONEncoder for a HistoryDatasetAssociation. """
def default( self, obj ):
""" Encode an HDA, default encoding for everything else. """
@@ -337,7 +337,7 @@
}
if isinstance( obj, UnvalidatedValue ):
return obj.__str__()
- return simplejson.JSONEncoder.default( self, obj )
+ return json.JSONEncoder.default( self, obj )
#
# Create attributes/metadata files for export.
diff -r 0c5e20558f6bb8d690dd9c9d25c172f08e837b1b -r 995ca770764013bcf0c61092de8436c9278357f7 lib/galaxy/util/__init__.py
--- a/lib/galaxy/util/__init__.py
+++ b/lib/galaxy/util/__init__.py
@@ -2,7 +2,26 @@
Utility functions used systemwide.
"""
-import binascii, errno, grp, logging, os, pickle, random, re, shutil, smtplib, stat, string, sys, tempfile, threading
+
+from __future__ import absolute_import
+
+import binascii
+import errno
+import grp
+import json
+import logging
+import os
+import pickle
+import random
+import re
+import shutil
+import smtplib
+import stat
+import string
+import sys
+import tempfile
+import threading
+
from email.MIMEText import MIMEText
from os.path import relpath
@@ -21,12 +40,9 @@
eggs.require( "wchartype" )
import wchartype
-from inflection import Inflector, English
+from .inflection import Inflector, English
inflector = Inflector(English)
-eggs.require( "simplejson" )
-import simplejson
-
log = logging.getLogger(__name__)
_lock = threading.RLock()
@@ -292,8 +308,8 @@
def pretty_print_json(json_data, is_json_string=False):
if is_json_string:
- json_data = simplejson.loads(json_data)
- return simplejson.dumps(json_data, sort_keys=True, indent=4 * ' ')
+ json_data = json.loads(json_data)
+ return json.dumps(json_data, sort_keys=True, indent=4 * ' ')
# characters that are valid
valid_chars = set(string.letters + string.digits + " -=_.()/+*^,:?!")
diff -r 0c5e20558f6bb8d690dd9c9d25c172f08e837b1b -r 995ca770764013bcf0c61092de8436c9278357f7 lib/galaxy/util/json.py
--- a/lib/galaxy/util/json.py
+++ b/lib/galaxy/util/json.py
@@ -1,16 +1,15 @@
+from __future__ import absolute_import
__all__ = [ "to_json_string", "from_json_string", "json_fix", "validate_jsonrpc_request", "validate_jsonrpc_response", "jsonrpc_request", "jsonrpc_response" ]
-import random, string, logging
+import json
+import logging
+import random
import socket
+import string
-import pkg_resources
-pkg_resources.require( "simplejson" )
-
-import simplejson
-
-to_json_string = simplejson.dumps
-from_json_string = simplejson.loads
+to_json_string = json.dumps
+from_json_string = json.loads
log = logging.getLogger( __name__ )
diff -r 0c5e20558f6bb8d690dd9c9d25c172f08e837b1b -r 995ca770764013bcf0c61092de8436c9278357f7 lib/galaxy/visualization/data_providers/phyloviz/baseparser.py
--- a/lib/galaxy/visualization/data_providers/phyloviz/baseparser.py
+++ b/lib/galaxy/visualization/data_providers/phyloviz/baseparser.py
@@ -1,6 +1,4 @@
-import pkg_resources
-pkg_resources.require("simplejson")
-import simplejson
+import json
class Node(object):
"""Node class of PhyloTree, which represents a CLAUDE in a phylogenetic tree"""
@@ -118,7 +116,7 @@
def toJson(self, jsonDict):
"""Convenience method to get a json string from a python json dict"""
- return simplejson.dumps(jsonDict)
+ return json.dumps(jsonDict)
def _writeJsonToFile(self, filepath, json):
"""Writes the file out to the system"""
diff -r 0c5e20558f6bb8d690dd9c9d25c172f08e837b1b -r 995ca770764013bcf0c61092de8436c9278357f7 lib/galaxy/web/framework/__init__.py
--- a/lib/galaxy/web/framework/__init__.py
+++ b/lib/galaxy/web/framework/__init__.py
@@ -2,6 +2,7 @@
Galaxy web application framework
"""
+import hashlib
import inspect
import os
import pkg_resources
@@ -9,27 +10,24 @@
import socket
import string
import time
-import hashlib
+
+from functools import wraps
from Cookie import CookieError
-
pkg_resources.require( "Cheetah" )
from Cheetah.Template import Template
+
+#TODO: Relative imports to be removed
import base
-from functools import wraps
+import helpers
+
from galaxy import util
from galaxy.exceptions import MessageException
-from galaxy.util.json import to_json_string, from_json_string
+from galaxy.util import asbool
+from galaxy.util import safe_str_cmp
from galaxy.util.backports.importlib import import_module
+from galaxy.util.json import from_json_string, to_json_string
from galaxy.util.sanitize_html import sanitize_html
-from galaxy.util import safe_str_cmp
-
-pkg_resources.require( "simplejson" )
-import simplejson
-
-import helpers
-
-from galaxy.util import asbool
import paste.httpexceptions
@@ -76,7 +74,7 @@
@wraps(func)
def decorator( self, trans, *args, **kwargs ):
trans.response.set_content_type( "text/javascript" )
- return simplejson.dumps( func( self, trans, *args, **kwargs ) )
+ return to_json_string( func( self, trans, *args, **kwargs ) )
if not hasattr(func, '_orig'):
decorator._orig = func
decorator.exposed = True
@@ -86,7 +84,7 @@
@wraps(func)
def decorator( self, trans, *args, **kwargs ):
trans.response.set_content_type( "text/javascript" )
- return simplejson.dumps( func( self, trans, *args, **kwargs ), indent=4, sort_keys=True )
+ return to_json_string( func( self, trans, *args, **kwargs ), indent=4, sort_keys=True )
if not hasattr(func, '_orig'):
decorator._orig = func
decorator.exposed = True
@@ -158,7 +156,7 @@
for k, v in payload.iteritems():
if isinstance(v, (str, unicode)):
try:
- payload[k] = simplejson.loads(v)
+ payload[k] = from_json_string(v)
except:
# may not actually be json, just continue
pass
@@ -167,7 +165,7 @@
# Assume application/json content type and parse request body manually, since wsgi won't do it. However, the order of this check
# should ideally be in reverse, with the if clause being a check for application/json and the else clause assuming a standard encoding
# such as multipart/form-data. Leaving it as is for backward compatibility, just in case.
- payload = util.recursively_stringify_dictionary_keys( simplejson.loads( trans.request.body ) )
+ payload = util.recursively_stringify_dictionary_keys( from_json_string( trans.request.body ) )
return payload
try:
kwargs['payload'] = extract_payload_from_request(trans, func, kwargs)
@@ -198,9 +196,9 @@
try:
rval = func( self, trans, *args, **kwargs)
if to_json and trans.debug:
- rval = simplejson.dumps( rval, indent=4, sort_keys=True )
+ rval = to_json_string( rval, indent=4, sort_keys=True )
elif to_json:
- rval = simplejson.dumps( rval )
+ rval = to_json_string( rval )
return rval
except paste.httpexceptions.HTTPException:
raise # handled
diff -r 0c5e20558f6bb8d690dd9c9d25c172f08e837b1b -r 995ca770764013bcf0c61092de8436c9278357f7 lib/galaxy/webapps/demo_sequencer/framework/__init__.py
--- a/lib/galaxy/webapps/demo_sequencer/framework/__init__.py
+++ b/lib/galaxy/webapps/demo_sequencer/framework/__init__.py
@@ -2,9 +2,15 @@
Demo sequencer web application framework
"""
+import json
+import os
import pkg_resources
+import random
+import socket
+import string
+import sys
+import time
-import os, sys, time, socket, random, string
pkg_resources.require( "Cheetah" )
from Cheetah.Template import Template
@@ -19,9 +25,6 @@
from galaxy.util import asbool
-pkg_resources.require( "simplejson" )
-import simplejson
-
pkg_resources.require( "Mako" )
import mako.template
import mako.lookup
diff -r 0c5e20558f6bb8d690dd9c9d25c172f08e837b1b -r 995ca770764013bcf0c61092de8436c9278357f7 lib/galaxy/webapps/galaxy/controllers/data_admin.py
--- a/lib/galaxy/webapps/galaxy/controllers/data_admin.py
+++ b/lib/galaxy/webapps/galaxy/controllers/data_admin.py
@@ -1,14 +1,13 @@
-import sys, ftplib
+import ftplib
+import json
+import sys
from galaxy import model, util
from galaxy.jobs import transfer_manager
+from galaxy.model.orm import *
from galaxy.web.base.controller import *
-from galaxy.web.framework.helpers import time_ago, iff, grids
-from galaxy.model.orm import *
+from galaxy.web.framework.helpers import grids, iff, time_ago
from library_common import get_comptypes, lucene_search, whoosh_search
-import pkg_resources
-pkg_resources.require("simplejson")
-import simplejson
# Older py compatibility
try:
@@ -161,7 +160,7 @@
gname = deferred.params[ 'intname' ]
indexers = ', '.join( deferred.params[ 'indexes' ] )
jobs = self._get_jobs( deferred, trans )
- jsonjobs = simplejson.dumps( jobs )
+ jsonjobs = json.dumps( jobs )
return trans.fill_template( '/admin/data_admin/download_status.mako', name=gname, indexers=indexers, mainjob=jobid, jobs=jobs, jsonjobs=jsonjobs )
@web.expose
@@ -173,7 +172,7 @@
jobid = params.get( 'jobid', '' )
job = sa_session.query( model.DeferredJob ).filter_by( id=jobid ).first()
jobs = self._get_jobs( job, trans )
- return trans.fill_template( '/admin/data_admin/ajax_status.mako', json=simplejson.dumps( jobs ) )
+ return trans.fill_template( '/admin/data_admin/ajax_status.mako', json=json.dumps( jobs ) )
def _get_job( self, jobid, jobtype, trans ):
sa = trans.app.model.context.current
@@ -297,4 +296,4 @@
params = dict( status='ok', dbkey=dbkey, datatype='fasta', url=url, user=trans.user.id, liftover=newlift, longname=longname, indexers=indexers )
- return params
\ No newline at end of file
+ return params
diff -r 0c5e20558f6bb8d690dd9c9d25c172f08e837b1b -r 995ca770764013bcf0c61092de8436c9278357f7 lib/galaxy/webapps/galaxy/controllers/root.py
--- a/lib/galaxy/webapps/galaxy/controllers/root.py
+++ b/lib/galaxy/webapps/galaxy/controllers/root.py
@@ -508,7 +508,7 @@
Attempts to parse values passed as boolean, float, then int. Defaults
to string. Non-recursive (will not parse lists).
"""
- #TODO: use simplejson or json
+ #TODO: use json
rval = {}
for k in kwd:
rval[ k ] = kwd[k]
diff -r 0c5e20558f6bb8d690dd9c9d25c172f08e837b1b -r 995ca770764013bcf0c61092de8436c9278357f7 lib/galaxy/webapps/galaxy/controllers/workflow.py
--- a/lib/galaxy/webapps/galaxy/controllers/workflow.py
+++ b/lib/galaxy/webapps/galaxy/controllers/workflow.py
@@ -1,13 +1,12 @@
import pkg_resources
-pkg_resources.require( "simplejson" )
pkg_resources.require( "SVGFig" )
import base64
import httplib
+import json
import math
import os
import sgmllib
-import simplejson
import svgfig
import urllib2
@@ -20,17 +19,17 @@
from galaxy import web
from galaxy.datatypes.data import Data
from galaxy.jobs.actions.post import ActionBox
+from galaxy.model.item_attrs import UsesAnnotations, UsesItemRatings
from galaxy.model.mapping import desc
from galaxy.tools.parameters import RuntimeValue, visit_input_values
from galaxy.tools.parameters.basic import DataToolParameter, DrillDownSelectToolParameter, SelectToolParameter, UnvalidatedValue
from galaxy.tools.parameters.grouping import Conditional, Repeat
+from galaxy.util.json import to_json_string
from galaxy.util.odict import odict
-from galaxy.util.json import to_json_string
from galaxy.util.sanitize_html import sanitize_html
from galaxy.util.topsort import CycleError, topsort, topsort_levels
from galaxy.web import error, url_for
from galaxy.web.base.controller import BaseUIController, SharableMixin, UsesStoredWorkflowMixin
-from galaxy.model.item_attrs import UsesAnnotations, UsesItemRatings
from galaxy.web.framework import form
from galaxy.web.framework.helpers import grids, time_ago
from galaxy.web.framework.helpers import to_unicode
@@ -812,7 +811,7 @@
# Put parameters in workflow mode
trans.workflow_building_mode = True
# Convert incoming workflow data from json
- data = simplejson.loads( workflow_data )
+ data = json.loads( workflow_data )
# Create new workflow from incoming data
workflow = model.Workflow()
# Just keep the last name (user can rename later)
@@ -920,7 +919,7 @@
#
# Create workflow content JSON.
- workflow_content = simplejson.dumps( workflow_dict, indent=4, sort_keys=True )
+ workflow_content = json.dumps( workflow_dict, indent=4, sort_keys=True )
# Create myExperiment request.
request_raw = trans.fill_template( "workflow/myexp_export.mako", \
@@ -1073,7 +1072,7 @@
if workflow_data:
# Convert incoming workflow data from json
try:
- data = simplejson.loads( workflow_data )
+ data = json.loads( workflow_data )
except Exception, e:
data = None
message = "The data content does not appear to be a Galaxy workflow.<br/>Exception: %s" % str( e )
@@ -1294,7 +1293,7 @@
# It is possible for a workflow to have 0 steps
if len( workflow.steps ) == 0:
error( "Workflow cannot be run because it does not have any steps" )
- #workflow = Workflow.from_simple( simplejson.loads( stored.encoded_value ), trans.app )
+ #workflow = Workflow.from_simple( json.loads( stored.encoded_value ), trans.app )
if workflow.has_cycles:
error( "Workflow cannot be run because it contains cycles" )
if workflow.has_errors:
diff -r 0c5e20558f6bb8d690dd9c9d25c172f08e837b1b -r 995ca770764013bcf0c61092de8436c9278357f7 lib/tool_shed/scripts/api/common.py
--- a/lib/tool_shed/scripts/api/common.py
+++ b/lib/tool_shed/scripts/api/common.py
@@ -1,4 +1,8 @@
-import os, sys, urllib, urllib2
+import json
+import os
+import sys
+import urllib
+import urllib2
new_path = [ os.path.join( os.path.dirname( __file__ ), '..', '..', '..', '..', 'lib' ) ]
new_path.extend( sys.path[ 1: ] )
@@ -7,9 +11,6 @@
from galaxy import eggs
import pkg_resources
-pkg_resources.require( "simplejson" )
-import simplejson
-
pkg_resources.require( "pycrypto" )
from Crypto.Cipher import Blowfish
from Crypto.Util.randpool import RandomPool
@@ -29,9 +30,9 @@
# Sends an API DELETE request and acts as a generic formatter for the JSON response - 'data' will become the JSON payload read by Galaxy.
try:
url = make_url( api_key, url )
- req = urllib2.Request( url, headers = { 'Content-Type': 'application/json' }, data = simplejson.dumps( data ))
+ req = urllib2.Request( url, headers = { 'Content-Type': 'application/json' }, data = json.dumps( data ))
req.get_method = lambda: 'DELETE'
- r = simplejson.loads( urllib2.urlopen( req ).read() )
+ r = json.loads( urllib2.urlopen( req ).read() )
except urllib2.HTTPError, e:
if return_formatted:
print e
@@ -86,8 +87,8 @@
# Do the actual GET.
url = make_url( url, api_key=api_key )
try:
- return simplejson.loads( urllib2.urlopen( url ).read() )
- except simplejson.decoder.JSONDecodeError, e:
+ return json.loads( urllib2.urlopen( url ).read() )
+ except ValueError, e:
print "URL did not return JSON data"
sys.exit(1)
@@ -106,15 +107,15 @@
def post( url, data, api_key=None ):
# Do the actual POST.
url = make_url( url, api_key=api_key )
- req = urllib2.Request( url, headers = { 'Content-Type': 'application/json' }, data = simplejson.dumps( data ) )
- return simplejson.loads( urllib2.urlopen( req ).read() )
+ req = urllib2.Request( url, headers = { 'Content-Type': 'application/json' }, data = json.dumps( data ) )
+ return json.loads( urllib2.urlopen( req ).read() )
def put( url, data, api_key=None ):
# Do the actual PUT.
url = make_url( url, api_key=api_key )
- req = urllib2.Request( url, headers = { 'Content-Type': 'application/json' }, data = simplejson.dumps( data ))
+ req = urllib2.Request( url, headers = { 'Content-Type': 'application/json' }, data = json.dumps( data ))
req.get_method = lambda: 'PUT'
- return simplejson.loads( urllib2.urlopen( req ).read() )
+ return json.loads( urllib2.urlopen( req ).read() )
def submit( url, data, api_key=None, return_formatted=True ):
# Sends an API POST request and acts as a generic formatter for the JSON response - 'data' will become the JSON payload read by Galaxy.
diff -r 0c5e20558f6bb8d690dd9c9d25c172f08e837b1b -r 995ca770764013bcf0c61092de8436c9278357f7 lib/tool_shed/scripts/api/tool_shed_repository_revision_update.py
--- a/lib/tool_shed/scripts/api/tool_shed_repository_revision_update.py
+++ b/lib/tool_shed/scripts/api/tool_shed_repository_revision_update.py
@@ -5,17 +5,16 @@
usage: tool_shed_repository_revision_update.py key url key1=value1 key2=value2 ...
"""
-import os, sys
+import json
+import os
+import sys
+
sys.path.insert( 0, os.path.dirname( __file__ ) )
from common import update
-import pkg_resources
-pkg_resources.require( "simplejson" )
-import simplejson
-
-to_json_string = simplejson.dumps
-from_json_string = simplejson.loads
+to_json_string = json.dumps
+from_json_string = json.loads
data = {}
for key, value in [ kwarg.split( '=', 1 ) for kwarg in sys.argv[ 3: ] ]:
diff -r 0c5e20558f6bb8d690dd9c9d25c172f08e837b1b -r 995ca770764013bcf0c61092de8436c9278357f7 lib/tool_shed/util/encoding_util.py
--- a/lib/tool_shed/util/encoding_util.py
+++ b/lib/tool_shed/util/encoding_util.py
@@ -1,14 +1,10 @@
import binascii
+import json
import logging
from galaxy import eggs
from galaxy.util.hash_util import hmac_new
from galaxy.util.json import json_fix
-import pkg_resources
-
-pkg_resources.require( "simplejson" )
-import simplejson
-
log = logging.getLogger( __name__ )
encoding_sep = '__esep__'
@@ -23,7 +19,7 @@
# Restore from string
values = None
try:
- values = simplejson.loads( value )
+ values = json.loads( value )
except Exception, e:
#log.debug( "Decoding json value from tool shed for value '%s' threw exception: %s" % ( str( value ), str( e ) ) )
pass
@@ -39,7 +35,7 @@
def tool_shed_encode( val ):
if isinstance( val, dict ):
- value = simplejson.dumps( val )
+ value = json.dumps( val )
else:
value = val
a = hmac_new( 'ToolShedAndGalaxyMustHaveThisSameKey', value )
diff -r 0c5e20558f6bb8d690dd9c9d25c172f08e837b1b -r 995ca770764013bcf0c61092de8436c9278357f7 scripts/api/common.py
--- a/scripts/api/common.py
+++ b/scripts/api/common.py
@@ -1,3 +1,4 @@
+import json
import logging
import os
import sys
@@ -10,9 +11,6 @@
from galaxy import eggs
import pkg_resources
-pkg_resources.require( "simplejson" )
-import simplejson
-
pkg_resources.require( "pycrypto" )
from Crypto.Cipher import Blowfish
from Crypto.Util.randpool import RandomPool
@@ -35,30 +33,30 @@
# Do the actual GET.
url = make_url( api_key, url )
try:
- return simplejson.loads( urllib2.urlopen( url ).read() )
- except simplejson.decoder.JSONDecodeError, e:
+ return json.loads( urllib2.urlopen( url ).read() )
+ except json.decoder.JSONDecodeError, e:
print "URL did not return JSON data"
sys.exit(1)
def post( api_key, url, data ):
# Do the actual POST.
url = make_url( api_key, url )
- req = urllib2.Request( url, headers = { 'Content-Type': 'application/json' }, data = simplejson.dumps( data ) )
- return simplejson.loads( urllib2.urlopen( req ).read() )
+ req = urllib2.Request( url, headers = { 'Content-Type': 'application/json' }, data = json.dumps( data ) )
+ return json.loads( urllib2.urlopen( req ).read() )
def put( api_key, url, data ):
# Do the actual PUT
url = make_url( api_key, url )
- req = urllib2.Request( url, headers = { 'Content-Type': 'application/json' }, data = simplejson.dumps( data ))
+ req = urllib2.Request( url, headers = { 'Content-Type': 'application/json' }, data = json.dumps( data ))
req.get_method = lambda: 'PUT'
- return simplejson.loads( urllib2.urlopen( req ).read() )
+ return json.loads( urllib2.urlopen( req ).read() )
def __del( api_key, url, data ):
# Do the actual DELETE
url = make_url( api_key, url )
- req = urllib2.Request( url, headers = { 'Content-Type': 'application/json' }, data = simplejson.dumps( data ))
+ req = urllib2.Request( url, headers = { 'Content-Type': 'application/json' }, data = json.dumps( data ))
req.get_method = lambda: 'DELETE'
- return simplejson.loads( urllib2.urlopen( req ).read() )
+ return json.loads( urllib2.urlopen( req ).read() )
def display( api_key, url, return_formatted=True ):
diff -r 0c5e20558f6bb8d690dd9c9d25c172f08e837b1b -r 995ca770764013bcf0c61092de8436c9278357f7 scripts/api/workflow_import_from_file_rpark.py
--- a/scripts/api/workflow_import_from_file_rpark.py
+++ b/scripts/api/workflow_import_from_file_rpark.py
@@ -11,11 +11,11 @@
from common import submit
### Rpark edit ###
-import simplejson
+import json
def openWorkflow(in_file):
with open(in_file) as f:
- temp_data = simplejson.load(f)
+ temp_data = json.load(f)
return temp_data;
diff -r 0c5e20558f6bb8d690dd9c9d25c172f08e837b1b -r 995ca770764013bcf0c61092de8436c9278357f7 scripts/drmaa_external_killer.py
--- a/scripts/drmaa_external_killer.py
+++ b/scripts/drmaa_external_killer.py
@@ -4,10 +4,11 @@
Terminates a DRMAA job if given a job id and (appropriate) user id.
"""
+import errno
+import json
import os
+import pwd
import sys
-import errno
-import pwd
#import drmaa
new_path = [ os.path.join( os.getcwd(), "lib" ) ]
new_path.extend( sys.path[1:] ) # remove scripts/ from the path
@@ -15,8 +16,6 @@
from galaxy import eggs
import pkg_resources
-pkg_resources.require("simplejson")
-import simplejson as json
pkg_resources.require("drmaa")
import drmaa
diff -r 0c5e20558f6bb8d690dd9c9d25c172f08e837b1b -r 995ca770764013bcf0c61092de8436c9278357f7 scripts/drmaa_external_runner.py
--- a/scripts/drmaa_external_runner.py
+++ b/scripts/drmaa_external_runner.py
@@ -10,8 +10,8 @@
import sys
import errno
import pwd
+import json
-#import simplejson as json
#import drmaa
new_path = [ os.path.join( os.getcwd(), "lib" ) ]
new_path.extend( sys.path[1:] ) # remove scripts/ from the path
@@ -19,8 +19,6 @@
from galaxy import eggs
import pkg_resources
-pkg_resources.require("simplejson")
-import simplejson as json
pkg_resources.require("drmaa")
import drmaa
diff -r 0c5e20558f6bb8d690dd9c9d25c172f08e837b1b -r 995ca770764013bcf0c61092de8436c9278357f7 scripts/external_chown_script.py
--- a/scripts/external_chown_script.py
+++ b/scripts/external_chown_script.py
@@ -1,17 +1,17 @@
#!/usr/bin/env python
+import errno
+import json
import os
+import pwd
import sys
-import errno
-import pwd
#import drmaa
+
new_path = [ os.path.join( os.getcwd(), "lib" ) ]
new_path.extend( sys.path[1:] ) # remove scripts/ from the path
sys.path = new_path
from galaxy import eggs
import pkg_resources
-pkg_resources.require("simplejson")
-import simplejson as json
pkg_resources.require("drmaa")
import drmaa
diff -r 0c5e20558f6bb8d690dd9c9d25c172f08e837b1b -r 995ca770764013bcf0c61092de8436c9278357f7 scripts/extract_dataset_part.py
--- a/scripts/extract_dataset_part.py
+++ b/scripts/extract_dataset_part.py
@@ -6,9 +6,10 @@
on a gid in a scatter-gather mode. This does part of the scatter.
"""
+import json
+import logging
import os
import sys
-import logging
logging.basicConfig()
log = logging.getLogger( __name__ )
@@ -16,11 +17,6 @@
new_path.extend( sys.path[1:] ) # remove scripts/ from the path
sys.path = new_path
-from galaxy import eggs
-import pkg_resources
-pkg_resources.require("simplejson")
-import simplejson
-
# This junk is here to prevent loading errors
import galaxy.model.mapping #need to load this before we unpickle, in order to setup properties assigned by the mappers
galaxy.model.Job() #this looks REAL stupid, but it is REQUIRED in order for SA to insert parameters into the classes defined by the mappers --> it appears that instantiating ANY mapper'ed class would suffice here
@@ -33,7 +29,7 @@
if not os.path.isfile(file_path):
#Nothing to do - some splitters don't write a JSON file
sys.exit(0)
- data = simplejson.load(open(file_path, 'r'))
+ data = json.load(open(file_path, 'r'))
try:
class_name_parts = data['class_name'].split('.')
module_name = '.'.join(class_name_parts[:-1])
diff -r 0c5e20558f6bb8d690dd9c9d25c172f08e837b1b -r 995ca770764013bcf0c61092de8436c9278357f7 scripts/galaxy_messaging/server/data_transfer.py
--- a/scripts/galaxy_messaging/server/data_transfer.py
+++ b/scripts/galaxy_messaging/server/data_transfer.py
@@ -13,12 +13,21 @@
"""
import ConfigParser
-import sys, os, time, traceback
+import cookielib
+import datetime
+import logging
import optparse
-import urllib,urllib2, cookielib, shutil
-import logging, time, datetime
+import os
+import shutil
+import sys
+import time
+import time
+import traceback
+import urllib
+import urllib2
import xml.dom.minidom
+
from xml_helper import get_value, get_value_index
log = logging.getLogger( "datatx_" + str( os.getpid() ) )
@@ -39,14 +48,11 @@
sys.path = new_path
from galaxy import eggs
-from galaxy.util.json import from_json_string, to_json_string
from galaxy.model import SampleDataset
from galaxy.web.api.samples import SamplesAPIController
import pkg_resources
pkg_resources.require( "pexpect" )
import pexpect
-pkg_resources.require( "simplejson" )
-import simplejson
log.debug(str(dir(api)))
diff -r 0c5e20558f6bb8d690dd9c9d25c172f08e837b1b -r 995ca770764013bcf0c61092de8436c9278357f7 scripts/set_metadata.py
--- a/scripts/set_metadata.py
+++ b/scripts/set_metadata.py
@@ -10,9 +10,11 @@
logging.basicConfig()
log = logging.getLogger( __name__ )
+import cPickle
+import json
import os
import sys
-import cPickle
+
# ensure supported version
from check_python import check_python
try:
@@ -26,8 +28,6 @@
from galaxy import eggs
import pkg_resources
-pkg_resources.require("simplejson")
-import simplejson
import galaxy.model.mapping # need to load this before we unpickle, in order to setup properties assigned by the mappers
galaxy.model.Job() # this looks REAL stupid, but it is REQUIRED in order for SA to insert parameters into the classes defined by the mappers --> it appears that instantiating ANY mapper'ed class would suffice here
from galaxy.util import stringify_dictionary_keys
@@ -107,17 +107,17 @@
dataset.extension = ext_override[ dataset.dataset.id ]
# Metadata FileParameter types may not be writable on a cluster node, and are therefore temporarily substituted with MetadataTempFiles
if override_metadata:
- override_metadata = simplejson.load( open( override_metadata ) )
+ override_metadata = json.load( open( override_metadata ) )
for metadata_name, metadata_file_override in override_metadata:
if galaxy.datatypes.metadata.MetadataTempFile.is_JSONified_value( metadata_file_override ):
metadata_file_override = galaxy.datatypes.metadata.MetadataTempFile.from_JSON( metadata_file_override )
setattr( dataset.metadata, metadata_name, metadata_file_override )
- kwds = stringify_dictionary_keys( simplejson.load( open( filename_kwds ) ) ) # load kwds; need to ensure our keywords are not unicode
+ kwds = stringify_dictionary_keys( json.load( open( filename_kwds ) ) ) # load kwds; need to ensure our keywords are not unicode
dataset.datatype.set_meta( dataset, **kwds )
dataset.metadata.to_JSON_dict( filename_out ) # write out results of set_meta
- simplejson.dump( ( True, 'Metadata has been set successfully' ), open( filename_results_code, 'wb+' ) ) # setting metadata has succeeded
+ json.dump( ( True, 'Metadata has been set successfully' ), open( filename_results_code, 'wb+' ) ) # setting metadata has succeeded
except Exception, e:
- simplejson.dump( ( False, str( e ) ), open( filename_results_code, 'wb+' ) ) # setting metadata has failed somehow
+ json.dump( ( False, str( e ) ), open( filename_results_code, 'wb+' ) ) # setting metadata has failed somehow
clear_mappers()
# Shut down any additional threads that might have been created via the ObjectStore
object_store.shutdown()
diff -r 0c5e20558f6bb8d690dd9c9d25c172f08e837b1b -r 995ca770764013bcf0c61092de8436c9278357f7 test/base/interactor.py
--- a/test/base/interactor.py
+++ b/test/base/interactor.py
@@ -5,7 +5,7 @@
import galaxy.model
from galaxy.model.orm import and_, desc
from base.test_db_util import sa_session
-from simplejson import dumps, loads
+from json import dumps, loads
from logging import getLogger
log = getLogger( __name__ )
diff -r 0c5e20558f6bb8d690dd9c9d25c172f08e837b1b -r 995ca770764013bcf0c61092de8436c9278357f7 test/functional/test_workflow.py
--- a/test/functional/test_workflow.py
+++ b/test/functional/test_workflow.py
@@ -5,7 +5,7 @@
from galaxy.util import parse_xml
from galaxy.tools.test import parse_param_elem, require_file, test_data_iter, parse_output_elems
-from simplejson import load, dumps
+from json import load, dumps
from logging import getLogger
log = getLogger( __name__ )
diff -r 0c5e20558f6bb8d690dd9c9d25c172f08e837b1b -r 995ca770764013bcf0c61092de8436c9278357f7 test/tool_shed/base/twilltestcase.py
--- a/test/tool_shed/base/twilltestcase.py
+++ b/test/tool_shed/base/twilltestcase.py
@@ -3,7 +3,6 @@
import os
import re
import test_db_util
-import simplejson
import shutil
import logging
import time
diff -r 0c5e20558f6bb8d690dd9c9d25c172f08e837b1b -r 995ca770764013bcf0c61092de8436c9278357f7 tools/filters/join.py
--- a/tools/filters/join.py
+++ b/tools/filters/join.py
@@ -8,20 +8,15 @@
"""
-import optparse, os, sys, tempfile, struct
+import json
+import optparse
+import os
import psyco_full
-
-try:
- simple_json_exception = None
- from galaxy import eggs
- from galaxy.util.bunch import Bunch
- from galaxy.util import stringify_dictionary_keys
- import pkg_resources
- pkg_resources.require("simplejson")
- import simplejson
-except Exception, e:
- simplejson_exception = e
- simplejson = None
+import struct
+import sys
+import tempfile
+from galaxy.util.bunch import Bunch
+from galaxy.util import stringify_dictionary_keys
class OffsetList:
@@ -337,11 +332,9 @@
fill_options = None
if options.fill_options_file is not None:
try:
- if simplejson is None:
- raise simplejson_exception
- fill_options = Bunch( **stringify_dictionary_keys( simplejson.load( open( options.fill_options_file ) ) ) ) #simplejson.load( open( options.fill_options_file ) )
+ fill_options = Bunch( **stringify_dictionary_keys( json.load( open( options.fill_options_file ) ) ) ) #json.load( open( options.fill_options_file ) )
except Exception, e:
- print "Warning: Ignoring fill options due to simplejson error (%s)." % e
+ print "Warning: Ignoring fill options due to json error (%s)." % e
if fill_options is None:
fill_options = Bunch()
if 'fill_unjoined_only' not in fill_options:
diff -r 0c5e20558f6bb8d690dd9c9d25c172f08e837b1b -r 995ca770764013bcf0c61092de8436c9278357f7 tools/filters/joiner.xml
--- a/tools/filters/joiner.xml
+++ b/tools/filters/joiner.xml
@@ -51,7 +51,7 @@
</inputs><configfiles><configfile name="fill_options_file"><%
-import simplejson
+import json
%>
#set $__fill_options = {}
#if $fill_empty_columns['fill_empty_columns_switch'] == 'fill_empty':
@@ -72,7 +72,7 @@
#end for
#end if
#end if
-${simplejson.dumps( __fill_options )}
+${json.dumps( __fill_options )}
</configfile></configfiles><outputs>
diff -r 0c5e20558f6bb8d690dd9c9d25c172f08e837b1b -r 995ca770764013bcf0c61092de8436c9278357f7 tools/genomespace/genomespace_exporter.py
--- a/tools/genomespace/genomespace_exporter.py
+++ b/tools/genomespace/genomespace_exporter.py
@@ -1,12 +1,16 @@
#Dan Blankenberg
-import optparse, os, urllib2, urllib, cookielib, hashlib, base64, cgi, binascii, logging
-
-from galaxy import eggs
-import pkg_resources
-
-pkg_resources.require( "simplejson" )
-import simplejson
+import base64
+import binascii
+import cgi
+import cookielib
+import hashlib
+import json
+import logging
+import optparse
+import os
+import urllib
+import urllib2
log = logging.getLogger( "tools.genomespace.genomespace_exporter" )#( __name__ )
@@ -58,7 +62,7 @@
dir_request = urllib2.Request( url, headers = { 'Content-Type': 'application/json', 'Accept': 'application/json' } )
dir_request.get_method = lambda: 'GET'
try:
- dir_dict = simplejson.loads( url_opener.open( dir_request ).read() )
+ dir_dict = json.loads( url_opener.open( dir_request ).read() )
except urllib2.HTTPError, e:
#print "e", e, url #punting, assuming lack of permissions at this low of a level...
continue
@@ -81,16 +85,16 @@
if dir_slice in ( '', '/', None ):
continue
url = '/'.join( ( directory_dict['url'], urllib.quote( dir_slice.replace( '/', '_' ), safe='' ) ) )
- new_dir_request = urllib2.Request( url, headers = { 'Content-Type': 'application/json', 'Accept': 'application/json' }, data = simplejson.dumps( payload ) )
+ new_dir_request = urllib2.Request( url, headers = { 'Content-Type': 'application/json', 'Accept': 'application/json' }, data = json.dumps( payload ) )
new_dir_request.get_method = lambda: 'PUT'
- directory_dict = simplejson.loads( url_opener.open( new_dir_request ).read() )
+ directory_dict = json.loads( url_opener.open( new_dir_request ).read() )
return directory_dict
def get_genome_space_launch_apps( atm_url, url_opener, file_url, file_type ):
gs_request = urllib2.Request( "%s/%s/webtool/descriptor" % ( atm_url, GENOMESPACE_API_VERSION_STRING ) )
gs_request.get_method = lambda: 'GET'
opened_gs_request = url_opener.open( gs_request )
- webtool_descriptors = simplejson.loads( opened_gs_request.read() )
+ webtool_descriptors = json.loads( opened_gs_request.read() )
webtools = []
for webtool in webtool_descriptors:
webtool_name = webtool.get( 'name' )
@@ -125,7 +129,7 @@
except urllib2.HTTPError, e:
log.debug( 'GenomeSpace export tool failed reading a directory "%s": %s' % ( url, e ) )
return #bad url, go to next
- cur_directory = simplejson.loads( cur_directory )
+ cur_directory = json.loads( cur_directory )
directory = cur_directory.get( 'directory', {} )
contents = cur_directory.get( 'contents', [] )
if directory.get( 'isDirectory', False ):
diff -r 0c5e20558f6bb8d690dd9c9d25c172f08e837b1b -r 995ca770764013bcf0c61092de8436c9278357f7 tools/genomespace/genomespace_file_browser.py
--- a/tools/genomespace/genomespace_file_browser.py
+++ b/tools/genomespace/genomespace_file_browser.py
@@ -1,12 +1,13 @@
#Dan Blankenberg
-import optparse, os, urllib, urllib2, urlparse, cookielib
+import cookielib
+import json
+import optparse
+import os
+import urllib
+import urllib2
+import urlparse
-from galaxy import eggs
-import pkg_resources
-
-pkg_resources.require( "simplejson" )
-import simplejson
GENOMESPACE_API_VERSION_STRING = "v1.0"
GENOMESPACE_SERVER_URL_PROPERTIES = "https://dm.genomespace.org/config/%s/serverurl.properties" % ( GENOMESPACE_API_VERSION_STRING )
@@ -87,12 +88,12 @@
gs_request = urllib2.Request( "%s/%s/dataformat/list" % ( dm_site, GENOMESPACE_API_VERSION_STRING ) )
gs_request.get_method = lambda: 'GET'
opened_gs_request = url_opener.open( gs_request )
- genomespace_formats = simplejson.loads( opened_gs_request.read() )
+ genomespace_formats = json.loads( opened_gs_request.read() )
for format in genomespace_formats:
GENOMESPACE_FORMAT_IDENTIFIER_TO_GENOMESPACE_EXT[ format['url'] ] = format['name']
def download_from_genomespace_file_browser( json_parameter_file, genomespace_site ):
- json_params = simplejson.loads( open( json_parameter_file, 'r' ).read() )
+ json_params = json.loads( open( json_parameter_file, 'r' ).read() )
datasource_params = json_params.get( 'param_dict' )
username = datasource_params.get( "gs-username", None )
token = datasource_params.get( "gs-token", None )
@@ -150,14 +151,14 @@
filename = "-%s" % filename
used_filenames.append( filename )
output_filename = os.path.join( datasource_params['__new_file_path__'], 'primary_%i_%s_visible_%s' % ( hda_id, filename, galaxy_ext ) )
- metadata_parameter_file.write( "%s\n" % simplejson.dumps( dict( type = 'new_primary_dataset',
+ metadata_parameter_file.write( "%s\n" % json.dumps( dict( type = 'new_primary_dataset',
base_dataset_id = dataset_id,
ext = galaxy_ext,
filename = output_filename,
name = "GenomeSpace import on %s" % ( original_filename ) ) ) )
else:
if dataset_id is not None:
- metadata_parameter_file.write( "%s\n" % simplejson.dumps( dict( type = 'dataset',
+ metadata_parameter_file.write( "%s\n" % json.dumps( dict( type = 'dataset',
dataset_id = dataset_id,
ext = galaxy_ext,
name = "GenomeSpace import on %s" % ( filename ) ) ) )
diff -r 0c5e20558f6bb8d690dd9c9d25c172f08e837b1b -r 995ca770764013bcf0c61092de8436c9278357f7 tools/genomespace/genomespace_importer.py
--- a/tools/genomespace/genomespace_importer.py
+++ b/tools/genomespace/genomespace_importer.py
@@ -1,12 +1,14 @@
#Dan Blankenberg
-import optparse, os, urllib2, urllib, cookielib, urlparse, tempfile, shutil
-
-from galaxy import eggs
-import pkg_resources
-
-pkg_resources.require( "simplejson" )
-import simplejson
+import cookielib
+import json
+import optparse
+import os
+import shutil
+import tempfile
+import urllib
+import urllib2
+import urlparse
import galaxy.model # need to import model before sniff to resolve a circular import dependency
from galaxy.datatypes import sniff
@@ -91,12 +93,12 @@
gs_request = urllib2.Request( "%s/%s/dataformat/list" % ( dm_site, GENOMESPACE_API_VERSION_STRING ) )
gs_request.get_method = lambda: 'GET'
opened_gs_request = url_opener.open( gs_request )
- genomespace_formats = simplejson.loads( opened_gs_request.read() )
+ genomespace_formats = json.loads( opened_gs_request.read() )
for format in genomespace_formats:
GENOMESPACE_FORMAT_IDENTIFIER_TO_GENOMESPACE_EXT[ format['url'] ] = format['name']
def download_from_genomespace_importer( username, token, json_parameter_file, genomespace_site ):
- json_params = simplejson.loads( open( json_parameter_file, 'r' ).read() )
+ json_params = json.loads( open( json_parameter_file, 'r' ).read() )
datasource_params = json_params.get( 'param_dict' )
assert None not in [ username, token ], "Missing GenomeSpace username or token."
output_filename = datasource_params.get( "output_file1", None )
@@ -152,7 +154,7 @@
metadata_request = urllib2.Request( "%s/%s/filemetadata/%s" % ( genomespace_site_dict['dmServer'], GENOMESPACE_API_VERSION_STRING, download_file_path ) )
metadata_request.get_method = lambda: 'GET'
metadata_url = url_opener.open( metadata_request )
- file_metadata_dict = simplejson.loads( metadata_url.read() )
+ file_metadata_dict = json.loads( metadata_url.read() )
metadata_url.close()
file_type = file_metadata_dict.get( 'dataFormat', None )
if file_type and file_type.get( 'url' ):
@@ -176,7 +178,7 @@
#save json info for single primary dataset
if dataset_id is not None:
- metadata_parameter_file.write( "%s\n" % simplejson.dumps( dict( type = 'dataset',
+ metadata_parameter_file.write( "%s\n" % json.dumps( dict( type = 'dataset',
dataset_id = dataset_id,
ext = file_type,
name = "GenomeSpace importer on %s" % ( filename ) ) ) )
@@ -189,7 +191,7 @@
used_filenames.append( filename )
target_output_filename = os.path.join( datasource_params['__new_file_path__'], 'primary_%i_%s_visible_%s' % ( hda_id, filename, file_type ) )
shutil.move( output_filename, target_output_filename )
- metadata_parameter_file.write( "%s\n" % simplejson.dumps( dict( type = 'new_primary_dataset',
+ metadata_parameter_file.write( "%s\n" % json.dumps( dict( type = 'new_primary_dataset',
base_dataset_id = base_dataset_id,
ext = file_type,
filename = target_output_filename,
diff -r 0c5e20558f6bb8d690dd9c9d25c172f08e837b1b -r 995ca770764013bcf0c61092de8436c9278357f7 tools/new_operations/column_join.py
--- a/tools/new_operations/column_join.py
+++ b/tools/new_operations/column_join.py
@@ -13,19 +13,16 @@
other_inputs: the other input files to join
"""
-import optparse, os, re, struct, sys, tempfile
+import json
+import optparse
+import os
+import re
+import struct
+import sys
+import tempfile
-try:
- simple_json_exception = None
- from galaxy import eggs
- from galaxy.util.bunch import Bunch
- from galaxy.util import stringify_dictionary_keys
- import pkg_resources
- pkg_resources.require("simplejson")
- import simplejson
-except Exception, e:
- simplejson_exception = e
- simplejson = None
+from galaxy.util.bunch import Bunch
+from galaxy.util import stringify_dictionary_keys
def stop_err( msg ):
sys.stderr.write( msg )
@@ -162,11 +159,9 @@
fill_options = None
if options.fill_options_file != 'None' and options.fill_options_file is not None:
try:
- if simplejson is None:
- raise simplejson_exception
- fill_options = Bunch( **stringify_dictionary_keys( simplejson.load( open( options.fill_options_file ) ) ) )
+ fill_options = Bunch( **stringify_dictionary_keys( json.load( open( options.fill_options_file ) ) ) )
except Exception, e:
- print 'Warning: Ignoring fill options due to simplejson error (%s).' % e
+ print 'Warning: Ignoring fill options due to json error (%s).' % e
if fill_options is None:
fill_options = Bunch()
if 'file1_columns' not in fill_options:
diff -r 0c5e20558f6bb8d690dd9c9d25c172f08e837b1b -r 995ca770764013bcf0c61092de8436c9278357f7 tools/new_operations/column_join.xml
--- a/tools/new_operations/column_join.xml
+++ b/tools/new_operations/column_join.xml
@@ -49,7 +49,7 @@
</inputs><configfiles><configfile name="fill_options_file"><%
-import simplejson
+import json
%>
#set $__fill_options = {}
#if $fill_empty_columns['fill_empty_columns_switch'] == 'fill_empty':
@@ -65,7 +65,7 @@
#end for
#end if
#end if
-${simplejson.dumps( __fill_options )}
+${json.dumps( __fill_options )}
</configfile></configfiles><outputs>
diff -r 0c5e20558f6bb8d690dd9c9d25c172f08e837b1b -r 995ca770764013bcf0c61092de8436c9278357f7 tools/peak_calling/macs_wrapper.py
--- a/tools/peak_calling/macs_wrapper.py
+++ b/tools/peak_calling/macs_wrapper.py
@@ -1,8 +1,12 @@
-import sys, subprocess, tempfile, shutil, glob, os, os.path, gzip
-from galaxy import eggs
-import pkg_resources
-pkg_resources.require( "simplejson" )
-import simplejson
+import glob
+import gzip
+import json
+import os
+import os.path
+import shutil
+import subprocess
+import sys
+import tempfile
CHUNK_SIZE = 1024
@@ -42,7 +46,7 @@
out.close()
def main():
- options = simplejson.load( open( sys.argv[1] ) )
+ options = json.load( open( sys.argv[1] ) )
output_bed = sys.argv[2]
output_extra_html = sys.argv[3]
output_extra_path = sys.argv[4]
diff -r 0c5e20558f6bb8d690dd9c9d25c172f08e837b1b -r 995ca770764013bcf0c61092de8436c9278357f7 tools/peak_calling/macs_wrapper.xml
--- a/tools/peak_calling/macs_wrapper.xml
+++ b/tools/peak_calling/macs_wrapper.xml
@@ -93,7 +93,7 @@
</outputs><configfiles><configfile name="options_file"><%
-import simplejson
+import json
%>
#set $__options = { 'experiment_name':str( $experiment_name ), 'gsize':int( float( str( $gsize ) ) ), 'tsize':str( $tsize ), 'bw':str( $bw ), 'pvalue':str( $pvalue ), 'mfold':str( $mfold ), 'nolambda':str( $nolambda ), 'lambdaset': str( $lambdaset ), 'futurefdr':str( $futurefdr ) }
#if str( $xls_to_interval ) == 'create':
@@ -135,7 +135,7 @@
#if $diag_type['diag_type_selector'] == 'diag':
#set $__options['diag'] = { 'fe-min':str( $diag_type['fe-min'] ), 'fe-max':str( $diag_type['fe-max'] ), 'fe-step':str( $diag_type['fe-step'] ) }
#end if
-${ simplejson.dumps( __options ) }
+${ json.dumps( __options ) }
</configfile></configfiles><tests>
https://bitbucket.org/galaxy/galaxy-central/commits/2483cc9e597d/
Changeset: 2483cc9e597d
User: dannon
Date: 2014-01-10 13:40:02
Summary: Merge.
Affected #: 172 files
diff -r 995ca770764013bcf0c61092de8436c9278357f7 -r 2483cc9e597d37ab75c5bb46d467622f33b48bc7 job_conf.xml.sample_advanced
--- a/job_conf.xml.sample_advanced
+++ b/job_conf.xml.sample_advanced
@@ -6,7 +6,19 @@
--><plugin id="local" type="runner" load="galaxy.jobs.runners.local:LocalJobRunner"/><plugin id="pbs" type="runner" load="galaxy.jobs.runners.pbs:PBSJobRunner" workers="2"/>
- <plugin id="drmaa" type="runner" load="galaxy.jobs.runners.drmaa:DRMAAJobRunner"/>
+ <plugin id="drmaa" type="runner" load="galaxy.jobs.runners.drmaa:DRMAAJobRunner">
+ <!-- Different DRMs handle successfully completed jobs differently,
+ these options can be changed to handle such differences and
+ are explained in detail on the Galaxy wiki. Defaults are shown -->
+ <param id="invalidjobexception_state">ok</param>
+ <param id="invalidjobexception_retries">0</param>
+ <param id="internalexception_state">ok</param>
+ <param id="internalexception_retries">0</param>
+ </plugin>
+ <plugin id="sge" type="runner" load="galaxy.jobs.runners.drmaa:DRMAAJobRunner">
+ <!-- Override the $DRMAA_LIBRARY_PATH environment variable -->
+ <param id="drmaa_library_path">/sge/lib/libdrmaa.so</param>
+ </plugin><plugin id="lwr" type="runner" load="galaxy.jobs.runners.lwr:LwrJobRunner"><!-- More information on LWR can be found at https://lwr.readthedocs.org --><!-- Uncomment following line to use libcurl to perform HTTP calls (defaults to urllib) -->
@@ -14,6 +26,7 @@
</plugin><plugin id="cli" type="runner" load="galaxy.jobs.runners.cli:ShellJobRunner" /><plugin id="condor" type="runner" load="galaxy.jobs.runners.condor:CondorJobRunner" />
+ <plugin id="slurm" type="runner" load="galaxy.jobs.runners.slurm:SlurmJobRunner" /></plugins><handlers default="handlers"><!-- Additional job handlers - the id should match the name of a
@@ -21,6 +34,15 @@
--><handler id="handler0" tags="handlers"/><handler id="handler1" tags="handlers"/>
+ <!-- Handlers will load all plugins defined in the <plugins> collection
+ above by default, but can be limited to a subset using <plugin>
+ tags. This is useful for heterogenous environments where the DRMAA
+ plugin would need to be loaded more than once with different
+ configs.
+ -->
+ <handler id="sge_handler">
+ <plugin id="sge"/>
+ </handler><handler id="special_handler0" tags="special_handlers"/><handler id="special_handler1" tags="special_handlers"/><handler id="trackster_handler"/>
diff -r 995ca770764013bcf0c61092de8436c9278357f7 -r 2483cc9e597d37ab75c5bb46d467622f33b48bc7 lib/galaxy/jobs/__init__.py
--- a/lib/galaxy/jobs/__init__.py
+++ b/lib/galaxy/jobs/__init__.py
@@ -86,6 +86,7 @@
self.app = app
self.runner_plugins = []
self.handlers = {}
+ self.handler_runner_plugins = {}
self.default_handler_id = None
self.destinations = {}
self.destination_tags = {}
@@ -138,6 +139,10 @@
else:
log.debug("Read definition for handler '%s'" % id)
self.handlers[id] = (id,)
+ for plugin in handler.findall('plugin'):
+ if id not in self.handler_runner_plugins:
+ self.handler_runner_plugins[id] = []
+ self.handler_runner_plugins[id].append( plugin.get('id') )
if handler.get('tags', None) is not None:
for tag in [ x.strip() for x in handler.get('tags').split(',') ]:
if tag in self.handlers:
@@ -420,13 +425,19 @@
"""
return self.destinations.get(id_or_tag, None)
- def get_job_runner_plugins(self):
+ def get_job_runner_plugins(self, handler_id):
"""Load all configured job runner plugins
:returns: list of job runner plugins
"""
rval = {}
- for runner in self.runner_plugins:
+ if handler_id in self.handler_runner_plugins:
+ plugins_to_load = [ rp for rp in self.runner_plugins if rp['id'] in self.handler_runner_plugins[handler_id] ]
+ log.info( "Handler '%s' will load specified runner plugins: %s", handler_id, ', '.join( [ rp['id'] for rp in plugins_to_load ] ) )
+ else:
+ plugins_to_load = self.runner_plugins
+ log.info( "Handler '%s' will load all configured runner plugins", handler_id )
+ for runner in plugins_to_load:
class_names = []
module = None
id = runner['id']
@@ -477,7 +488,7 @@
try:
rval[id] = runner_class( self.app, runner[ 'workers' ], **runner.get( 'kwds', {} ) )
except TypeError:
- log.warning( "Job runner '%s:%s' has not been converted to a new-style runner" % ( module_name, class_name ) )
+ log.exception( "Job runner '%s:%s' has not been converted to a new-style runner or encountered TypeError on load" % ( module_name, class_name ) )
rval[id] = runner_class( self.app )
log.debug( "Loaded job runner '%s:%s' as '%s'" % ( module_name, class_name, id ) )
return rval
diff -r 995ca770764013bcf0c61092de8436c9278357f7 -r 2483cc9e597d37ab75c5bb46d467622f33b48bc7 lib/galaxy/jobs/handler.py
--- a/lib/galaxy/jobs/handler.py
+++ b/lib/galaxy/jobs/handler.py
@@ -565,7 +565,7 @@
class DefaultJobDispatcher( object ):
def __init__( self, app ):
self.app = app
- self.job_runners = self.app.job_config.get_job_runner_plugins()
+ self.job_runners = self.app.job_config.get_job_runner_plugins( self.app.config.server_name )
# Once plugins are loaded, all job destinations that were created from
# URLs can have their URL params converted to the destination's param
# dict by the plugin.
diff -r 995ca770764013bcf0c61092de8436c9278357f7 -r 2483cc9e597d37ab75c5bb46d467622f33b48bc7 lib/galaxy/jobs/runners/__init__.py
--- a/lib/galaxy/jobs/runners/__init__.py
+++ b/lib/galaxy/jobs/runners/__init__.py
@@ -22,13 +22,39 @@
STOP_SIGNAL = object()
+
+class RunnerParams( object ):
+
+ def __init__( self, specs = None, params = None ):
+ self.specs = specs or dict()
+ self.params = params or dict()
+ for name, value in self.params.items():
+ assert name in self.specs, 'Invalid job runner parameter for this plugin: %s' % name
+ if 'map' in self.specs[ name ]:
+ try:
+ self.params[ name ] = self.specs[ name ][ 'map' ]( value )
+ except Exception, e:
+ raise Exception( 'Job runner parameter "%s" value "%s" could not be converted to the correct type: %s' % ( name, value, e ) )
+ if 'valid' in self.specs[ name ]:
+ assert self.specs[ name ][ 'valid' ]( value ), 'Job runner parameter %s failed validation' % name
+
+ def __getattr__( self, name ):
+ return self.params.get( name, self.specs[ name ][ 'default' ] )
+
+
class BaseJobRunner( object ):
- def __init__( self, app, nworkers ):
+ def __init__( self, app, nworkers, **kwargs ):
"""Start the job runner
"""
self.app = app
self.sa_session = app.model.context
self.nworkers = nworkers
+ runner_param_specs = dict( recheck_missing_job_retries = dict( map = int, valid = lambda x: x >= 0, default = 0 ) )
+ if 'runner_param_specs' in kwargs:
+ runner_param_specs.update( kwargs.pop( 'runner_param_specs' ) )
+ if kwargs:
+ log.debug( 'Loading %s with params: %s', self.runner_name, kwargs )
+ self.runner_params = RunnerParams( specs = runner_param_specs, params = kwargs )
def _init_worker_threads(self):
"""Start ``nworkers`` worker threads.
@@ -115,7 +141,7 @@
job_wrapper.cleanup()
return False
elif job_state != model.Job.states.QUEUED:
- log.info( "(%d) Job is in state %s, skipping execution" % ( job_id, job_state ) )
+ log.info( "(%s) Job is in state %s, skipping execution" % ( job_id, job_state ) )
# cleanup may not be safe in all states
return False
@@ -226,6 +252,10 @@
options.update(**kwds)
return job_script(**options)
+ def _complete_terminal_job( self, ajs, **kwargs ):
+ if ajs.job_wrapper.get_state() != model.Job.states.DELETED:
+ self.work_queue.put( ( self.finish_job, ajs ) )
+
class AsynchronousJobState( object ):
"""
@@ -287,8 +317,8 @@
to the correct methods (queue, finish, cleanup) at appropriate times..
"""
- def __init__( self, app, nworkers ):
- super( AsynchronousJobRunner, self ).__init__( app, nworkers )
+ def __init__( self, app, nworkers, **kwargs ):
+ super( AsynchronousJobRunner, self ).__init__( app, nworkers, **kwargs )
# 'watched' and 'queue' are both used to keep track of jobs to watch.
# 'queue' is used to add new watched jobs, and can be called from
# any thread (usually by the 'queue_job' method). 'watched' must only
diff -r 995ca770764013bcf0c61092de8436c9278357f7 -r 2483cc9e597d37ab75c5bb46d467622f33b48bc7 lib/galaxy/jobs/runners/drmaa.py
--- a/lib/galaxy/jobs/runners/drmaa.py
+++ b/lib/galaxy/jobs/runners/drmaa.py
@@ -16,27 +16,12 @@
from galaxy.jobs.runners import AsynchronousJobState, AsynchronousJobRunner
eggs.require( "drmaa" )
-# We foolishly named this file the same as the name exported by the drmaa
-# library... 'import drmaa' imports itself.
-drmaa = __import__( "drmaa" )
log = logging.getLogger( __name__ )
__all__ = [ 'DRMAAJobRunner' ]
-drmaa_state = {
- drmaa.JobState.UNDETERMINED: 'process status cannot be determined',
- drmaa.JobState.QUEUED_ACTIVE: 'job is queued and active',
- drmaa.JobState.SYSTEM_ON_HOLD: 'job is queued and in system hold',
- drmaa.JobState.USER_ON_HOLD: 'job is queued and in user hold',
- drmaa.JobState.USER_SYSTEM_ON_HOLD: 'job is queued and in user and system hold',
- drmaa.JobState.RUNNING: 'job is running',
- drmaa.JobState.SYSTEM_SUSPENDED: 'job is system suspended',
- drmaa.JobState.USER_SUSPENDED: 'job is user suspended',
- drmaa.JobState.DONE: 'job finished normally',
- drmaa.JobState.FAILED: 'job finished, but failed',
-}
-
+drmaa = None
DRMAA_jobTemplate_attributes = [ 'args', 'remoteCommand', 'outputPath', 'errorPath', 'nativeSpecification',
'jobName', 'email', 'project' ]
@@ -48,8 +33,50 @@
"""
runner_name = "DRMAARunner"
- def __init__( self, app, nworkers ):
+ def __init__( self, app, nworkers, **kwargs ):
"""Start the job runner"""
+
+ global drmaa
+
+ runner_param_specs = dict(
+ drmaa_library_path = dict( map = str, default = os.environ.get( 'DRMAA_LIBRARY_PATH', None ) ),
+ invalidjobexception_state = dict( map = str, valid = lambda x: x in ( model.Job.states.OK, model.Job.states.ERROR ), default = model.Job.states.OK ),
+ invalidjobexception_retries = dict( map = int, valid = lambda x: int >= 0, default = 0 ),
+ internalexception_state = dict( map = str, valid = lambda x: x in ( model.Job.states.OK, model.Job.states.ERROR ), default = model.Job.states.OK ),
+ internalexception_retries = dict( map = int, valid = lambda x: int >= 0, default = 0 ) )
+
+ if 'runner_param_specs' not in kwargs:
+ kwargs[ 'runner_param_specs' ] = dict()
+ kwargs[ 'runner_param_specs' ].update( runner_param_specs )
+
+ super( DRMAAJobRunner, self ).__init__( app, nworkers, **kwargs )
+
+ # This allows multiple drmaa runners (although only one per handler) in the same job config file
+ if 'drmaa_library_path' in kwargs:
+ log.info( 'Overriding DRMAA_LIBRARY_PATH due to runner plugin parameter: %s', self.runner_params.drmaa_library_path )
+ os.environ['DRMAA_LIBRARY_PATH'] = self.runner_params.drmaa_library_path
+
+ # We foolishly named this file the same as the name exported by the drmaa
+ # library... 'import drmaa' imports itself.
+ drmaa = __import__( "drmaa" )
+
+ # Subclasses may need access to state constants
+ self.drmaa_job_states = drmaa.JobState
+
+ # Descriptive state strings pulled from the drmaa lib itself
+ self.drmaa_job_state_strings = {
+ drmaa.JobState.UNDETERMINED: 'process status cannot be determined',
+ drmaa.JobState.QUEUED_ACTIVE: 'job is queued and active',
+ drmaa.JobState.SYSTEM_ON_HOLD: 'job is queued and in system hold',
+ drmaa.JobState.USER_ON_HOLD: 'job is queued and in user hold',
+ drmaa.JobState.USER_SYSTEM_ON_HOLD: 'job is queued and in user and system hold',
+ drmaa.JobState.RUNNING: 'job is running',
+ drmaa.JobState.SYSTEM_SUSPENDED: 'job is system suspended',
+ drmaa.JobState.USER_SUSPENDED: 'job is user suspended',
+ drmaa.JobState.DONE: 'job finished normally',
+ drmaa.JobState.FAILED: 'job finished, but failed',
+ }
+
self.ds = drmaa.Session()
self.ds.initialize()
@@ -58,7 +85,6 @@
self.external_killJob_script = app.config.drmaa_external_killjob_script
self.userid = None
- super( DRMAAJobRunner, self ).__init__( app, nworkers )
self._init_monitor_thread()
self._init_worker_threads()
@@ -137,8 +163,10 @@
job_wrapper.cleanup()
return
- log.debug( "(%s) submitting file %s" % ( galaxy_id_tag, ajs.job_file ) )
- log.debug( "(%s) command is: %s" % ( galaxy_id_tag, command_line ) )
+ log.debug( "(%s) submitting file %s", galaxy_id_tag, ajs.job_file )
+ log.debug( "(%s) command is: %s", galaxy_id_tag, command_line )
+ if native_spec:
+ log.debug( "(%s) native specification is: %s", galaxy_id_tag, native_spec )
# runJob will raise if there's a submit problem
if self.external_runJob_script is None:
@@ -175,6 +203,20 @@
# Add to our 'queue' of jobs to monitor
self.monitor_queue.put( ajs )
+ def _complete_terminal_job( self, ajs, drmaa_state, **kwargs ):
+ """
+ Handle a job upon its termination in the DRM. This method is meant to
+ be overridden by subclasses to improve post-mortem and reporting of
+ failures.
+ """
+ if drmaa_state == drmaa.JobState.FAILED:
+ if ajs.job_wrapper.get_state() != model.Job.states.DELETED:
+ ajs.stop_job = False
+ ajs.fail_message = "The cluster DRM system terminated this job"
+ self.work_queue.put( ( self.fail_job, ajs ) )
+ elif drmaa_state == drmaa.JobState.DONE:
+ super( DRMAAJobRunner, self )._complete_terminal_job( ajs )
+
def check_watched_items( self ):
"""
Called by the monitor thread to look at each watched job and deal
@@ -188,16 +230,27 @@
try:
assert external_job_id not in ( None, 'None' ), '(%s/%s) Invalid job id' % ( galaxy_id_tag, external_job_id )
state = self.ds.jobStatus( external_job_id )
- # InternalException was reported to be necessary on some DRMs, but
- # this could cause failures to be detected as completion! Please
- # report if you experience problems with this.
- except ( drmaa.InvalidJobException, drmaa.InternalException ), e:
- # we should only get here if an orphaned job was put into the queue at app startup
- log.info( "(%s/%s) job left DRM queue with following message: %s" % ( galaxy_id_tag, external_job_id, e ) )
- self.work_queue.put( ( self.finish_job, ajs ) )
+ except ( drmaa.InternalException, drmaa.InvalidJobException ), e:
+ ecn = e.__class__.__name__
+ retry_param = ecn.lower() + '_retries'
+ state_param = ecn.lower() + '_state'
+ retries = getattr( ajs, retry_param, 0 )
+ if self.runner_params[ retry_param ] > 0:
+ if retries < self.runner_params[ retry_param ]:
+ # will retry check on next iteration
+ setattr( ajs, retry_param, retries + 1 )
+ continue
+ if self.runner_params[ state_param ] == model.Job.states.OK:
+ log.info( "(%s/%s) job left DRM queue with following message: %s", galaxy_id_tag, external_job_id, e )
+ self.work_queue.put( ( self.finish_job, ajs ) )
+ elif self.runner_params[ state_param ] == model.Job.states.ERROR:
+ log.info( "(%s/%s) job check resulted in %s after %s tries: %s", galaxy_id_tag, external_job_id, ecn, retries, e )
+ self.work_queue.put( ( self.fail_job, ajs ) )
+ else:
+ raise Exception( "%s is set to an invalid value (%s), this should not be possible. See galaxy.jobs.drmaa.__init__()", state_param, self.runner_params[ state_param ] )
continue
except drmaa.DrmCommunicationException, e:
- log.warning( "(%s/%s) unable to communicate with DRM: %s" % ( galaxy_id_tag, external_job_id, e ))
+ log.warning( "(%s/%s) unable to communicate with DRM: %s", galaxy_id_tag, external_job_id, e )
new_watched.append( ajs )
continue
except Exception, e:
@@ -208,19 +261,12 @@
self.work_queue.put( ( self.fail_job, ajs ) )
continue
if state != old_state:
- log.debug( "(%s/%s) state change: %s" % ( galaxy_id_tag, external_job_id, drmaa_state[state] ) )
+ log.debug( "(%s/%s) state change: %s" % ( galaxy_id_tag, external_job_id, self.drmaa_job_state_strings[state] ) )
if state == drmaa.JobState.RUNNING and not ajs.running:
ajs.running = True
ajs.job_wrapper.change_state( model.Job.states.RUNNING )
- if state == drmaa.JobState.FAILED:
- if ajs.job_wrapper.get_state() != model.Job.states.DELETED:
- ajs.stop_job = False
- ajs.fail_message = "The cluster DRM system terminated this job"
- self.work_queue.put( ( self.fail_job, ajs ) )
- continue
- if state == drmaa.JobState.DONE:
- if ajs.job_wrapper.get_state() != model.Job.states.DELETED:
- self.work_queue.put( ( self.finish_job, ajs ) )
+ if state in ( drmaa.JobState.FAILED, drmaa.JobState.DONE ):
+ self._complete_terminal_job( ajs, drmaa_state = state )
continue
ajs.old_state = state
new_watched.append( ajs )
diff -r 995ca770764013bcf0c61092de8436c9278357f7 -r 2483cc9e597d37ab75c5bb46d467622f33b48bc7 lib/galaxy/jobs/runners/slurm.py
--- /dev/null
+++ b/lib/galaxy/jobs/runners/slurm.py
@@ -0,0 +1,57 @@
+"""
+SLURM job control via the DRMAA API.
+"""
+
+import time
+import logging
+import subprocess
+
+from galaxy import model
+from galaxy.jobs.runners.drmaa import DRMAAJobRunner
+
+log = logging.getLogger( __name__ )
+
+__all__ = [ 'SlurmJobRunner' ]
+
+
+class SlurmJobRunner( DRMAAJobRunner ):
+ runner_name = "SlurmRunner"
+
+ def _complete_terminal_job( self, ajs, drmaa_state, **kwargs ):
+ def __get_jobinfo():
+ scontrol_out = subprocess.check_output( ( 'scontrol', '-o', 'show', 'job', ajs.job_id ) )
+ return dict( [ out_param.split( '=', 1 ) for out_param in scontrol_out.split() ] )
+ if drmaa_state == self.drmaa_job_states.FAILED:
+ try:
+ job_info = __get_jobinfo()
+ sleep = 1
+ while job_info['JobState'] == 'COMPLETING':
+ log.debug( '(%s/%s) Waiting %s seconds for failed job to exit COMPLETING state for post-mortem', ajs.job_wrapper.get_id_tag(), ajs.job_id, sleep )
+ time.sleep( sleep )
+ sleep *= 2
+ if sleep > 64:
+ ajs.fail_message = "This job failed and the system timed out while trying to determine the cause of the failure."
+ break
+ job_info = __get_jobinfo()
+ if job_info['JobState'] == 'TIMEOUT':
+ ajs.fail_message = "This job was terminated because it ran longer than the maximum allowed job run time."
+ elif job_info['JobState'] == 'NODE_FAIL':
+ log.warning( '(%s/%s) Job failed due to node failure, attempting resubmission', ajs.job_wrapper.get_id_tag(), ajs.job_id )
+ ajs.job_wrapper.change_state( model.Job.states.QUEUED, info = 'Job was resubmitted due to node failure' )
+ try:
+ self.queue_job( ajs.job_wrapper )
+ return
+ except:
+ ajs.fail_message = "This job failed due to a cluster node failure, and an attempt to resubmit the job failed."
+ elif job_info['JobState'] == 'CANCELLED':
+ ajs.fail_message = "This job failed because it was cancelled by an administrator."
+ else:
+ ajs.fail_message = "This job failed for reasons that could not be determined."
+ ajs.fail_message += '\nPlease click the bug icon to report this problem if you need help.'
+ ajs.stop_job = False
+ self.work_queue.put( ( self.fail_job, ajs ) )
+ except Exception, e:
+ log.exception( '(%s/%s) Unable to inspect failed slurm job using scontrol, job will be unconditionally failed: %s', ajs.job_wrapper.get_id_tag(), ajs.job_id, e )
+ super( SlurmJobRunner, self )._complete_terminal_job( ajs, drmaa_state = drmaa_state )
+ elif drmaa_state == self.drmaa_job_states.DONE:
+ super( SlurmJobRunner, self )._complete_terminal_job( ajs, drmaa_state = drmaa_state )
diff -r 995ca770764013bcf0c61092de8436c9278357f7 -r 2483cc9e597d37ab75c5bb46d467622f33b48bc7 lib/galaxy/model/__init__.py
--- a/lib/galaxy/model/__init__.py
+++ b/lib/galaxy/model/__init__.py
@@ -18,6 +18,7 @@
import socket
import time
from string import Template
+from itertools import ifilter
import galaxy.datatypes
import galaxy.datatypes.registry
@@ -42,6 +43,11 @@
# Default Value Required for unit tests
datatypes_registry.load_datatypes()
+# When constructing filters with in for a fixed set of ids, maximum
+# number of items to place in the IN statement. Different databases
+# are going to have different limits so it is likely best to not let
+# this be unlimited - filter in Python if over this limit.
+MAX_IN_FILTER_LENGTH = 100
class NoConverterException(Exception):
def __init__(self, value):
@@ -892,6 +898,33 @@
rval = galaxy.datatypes.data.nice_size( rval )
return rval
+ def contents_iter( self, **kwds ):
+ """
+ Fetch filtered list of contents of history.
+ """
+ python_filter = None
+ db_session = object_session( self )
+ assert db_session != None
+ query = db_session.query( HistoryDatasetAssociation ).filter( HistoryDatasetAssociation.table.c.history_id == self.id )
+ query = query.order_by( HistoryDatasetAssociation.table.c.hid.asc() )
+ deleted = galaxy.util.string_as_bool_or_none( kwds.get( 'deleted', None ) )
+ if deleted is not None:
+ query = query.filter( HistoryDatasetAssociation.deleted == bool( kwds['deleted'] ) )
+ visible = galaxy.util.string_as_bool_or_none( kwds.get( 'visible', None ) )
+ if visible is not None:
+ query = query.filter( HistoryDatasetAssociation.visible == bool( kwds['visible'] ) )
+ if 'ids' in kwds:
+ ids = kwds['ids']
+ max_in_filter_length = kwds.get('max_in_filter_length', MAX_IN_FILTER_LENGTH)
+ if len(ids) < max_in_filter_length:
+ query = query.filter( HistoryDatasetAssociation.id.in_(ids) )
+ else:
+ python_filter = lambda hda: hda.id in ids
+ if python_filter:
+ return ifilter(python_filter, query)
+ else:
+ return query
+
def copy_tags_from(self,target_user,source_history):
for src_shta in source_history.tags:
new_shta = src_shta.copy()
diff -r 995ca770764013bcf0c61092de8436c9278357f7 -r 2483cc9e597d37ab75c5bb46d467622f33b48bc7 lib/galaxy/webapps/galaxy/api/history_contents.py
--- a/lib/galaxy/webapps/galaxy/api/history_contents.py
+++ b/lib/galaxy/webapps/galaxy/api/history_contents.py
@@ -51,47 +51,28 @@
else:
history = self.get_history( trans, history_id, check_ownership=True, check_accessible=True )
- # if ids, return _FULL_ data (as show) for each id passed
+ contents_kwds = {}
if ids:
- ids = ids.split( ',' )
- for index, hda in enumerate( history.datasets ):
- encoded_hda_id = trans.security.encode_id( hda.id )
- if encoded_hda_id in ids:
- #TODO: share code with show
- rval.append( self._detailed_hda_dict( trans, hda ) )
-
- # if no ids passed, return a _SUMMARY_ of _all_ datasets in the history
+ ids = map( lambda id: trans.security.decode_id( id ), ids.split( ',' ) )
+ contents_kwds[ 'ids' ] = ids
+ # If explicit ids given, always used detailed result.
+ details = 'all'
else:
+ contents_kwds[ 'deleted' ] = kwd.get( 'deleted', None )
+ contents_kwds[ 'visible' ] = kwd.get( 'visible', None )
# details param allows a mixed set of summary and detailed hdas
#TODO: this is getting convoluted due to backwards compat
details = kwd.get( 'details', None ) or []
if details and details != 'all':
details = util.listify( details )
- # by default return all datasets - even if deleted or hidden (defaulting the next switches to None)
- # if specified return those datasets that match the setting
- # backwards compat
- return_deleted = util.string_as_bool_or_none( kwd.get( 'deleted', None ) )
- return_visible = util.string_as_bool_or_none( kwd.get( 'visible', None ) )
-
- for hda in history.datasets:
- # if either return_ setting has been requested (!= None), skip hdas that don't match the request
- if return_deleted is not None:
- if( ( return_deleted and not hda.deleted )
- or ( not return_deleted and hda.deleted ) ):
- continue
- if return_visible is not None:
- if( ( return_visible and not hda.visible )
- or ( not return_visible and hda.visible ) ):
- continue
-
- encoded_hda_id = trans.security.encode_id( hda.id )
- if( ( encoded_hda_id in details )
- or ( details == 'all' ) ):
- rval.append( self._detailed_hda_dict( trans, hda ) )
- else:
- rval.append( self._summary_hda_dict( trans, history_id, hda ) )
-
+ for hda in history.contents_iter( **contents_kwds ):
+ encoded_hda_id = trans.security.encode_id( hda.id )
+ detailed = details == 'all' or ( encoded_hda_id in details )
+ if detailed:
+ rval.append( self._detailed_hda_dict( trans, hda ) )
+ else:
+ rval.append( self._summary_hda_dict( trans, history_id, hda ) )
except Exception, e:
# for errors that are not specific to one hda (history lookup or summary list)
rval = "Error in history API at listing contents: " + str( e )
diff -r 995ca770764013bcf0c61092de8436c9278357f7 -r 2483cc9e597d37ab75c5bb46d467622f33b48bc7 lib/galaxy/webapps/galaxy/api/tool_shed_repositories.py
--- a/lib/galaxy/webapps/galaxy/api/tool_shed_repositories.py
+++ b/lib/galaxy/webapps/galaxy/api/tool_shed_repositories.py
@@ -19,11 +19,6 @@
log = logging.getLogger( __name__ )
-def default_tool_shed_repository_value_mapper( trans, tool_shed_repository ):
- value_mapper={ 'id' : trans.security.encode_id( tool_shed_repository.id ),
- 'error_message' : tool_shed_repository.error_message or '' }
- return value_mapper
-
def get_message_for_no_shed_tool_config():
# This Galaxy instance is not configured with a shed-related tool panel configuration file.
message = 'The tool_config_file setting in universe_wsgi.ini must include at least one shed tool configuration file name with a <toolbox> '
@@ -48,8 +43,8 @@
:param id: the encoded id of the ToolShedRepository object
"""
# Example URL: http://localhost:8763/api/tool_shed_repositories/f2db41e1fa331b3e/exported_…
- # Since exported workflows are dictionaries with very few attributes that differentiate them from each other, we'll build the
- # list based on the following dictionary of those few attributes.
+ # Since exported workflows are dictionaries with very few attributes that differentiate them from each
+ # other, we'll build the list based on the following dictionary of those few attributes.
exported_workflows = []
repository = suc.get_tool_shed_repository_by_id( trans, id )
metadata = repository.metadata
@@ -58,17 +53,23 @@
else:
exported_workflow_tups = []
for index, exported_workflow_tup in enumerate( exported_workflow_tups ):
- # The exported_workflow_tup looks like ( relative_path, exported_workflow_dict ), where the value of relative_path is the location
- # on disk (relative to the root of the installed repository) where the exported_workflow_dict file (.ga file) is located.
+ # The exported_workflow_tup looks like ( relative_path, exported_workflow_dict ), where the value of
+ # relative_path is the location on disk (relative to the root of the installed repository) where the
+ # exported_workflow_dict file (.ga file) is located.
exported_workflow_dict = exported_workflow_tup[ 1 ]
annotation = exported_workflow_dict.get( 'annotation', '' )
format_version = exported_workflow_dict.get( 'format-version', '' )
workflow_name = exported_workflow_dict.get( 'name', '' )
- # Since we don't have an in-memory object with an id, we'll identify the exported workflow via it's location (i.e., index) in the list.
+ # Since we don't have an in-memory object with an id, we'll identify the exported workflow via it's
+ # location (i.e., index) in the list.
display_dict = dict( index=index, annotation=annotation, format_version=format_version, workflow_name=workflow_name )
exported_workflows.append( display_dict )
return exported_workflows
+ def __get_value_mapper( self, trans ):
+ value_mapper = { 'id' : trans.security.encode_id }
+ return value_mapper
+
@web.expose_api
def import_workflow( self, trans, payload, **kwd ):
"""
@@ -96,13 +97,11 @@
# Since we don't have an in-memory object with an id, we'll identify the exported workflow via it's location (i.e., index) in the list.
exported_workflow = exported_workflows[ int( index ) ]
workflow_name = exported_workflow[ 'workflow_name' ]
- workflow, status, message = workflow_util.import_workflow( trans, repository, workflow_name )
+ workflow, status, error_message = workflow_util.import_workflow( trans, repository, workflow_name )
if status == 'error':
- log.error( message, exc_info=True )
- trans.response.status = 500
- return message
- else:
- return workflow.to_dict( view='element' )
+ log.debug( error_message )
+ return {}
+ return workflow.to_dict( view='element' )
@web.expose_api
def import_workflows( self, trans, **kwd ):
@@ -125,11 +124,9 @@
imported_workflow_dicts = []
for exported_workflow_dict in exported_workflows:
workflow_name = exported_workflow_dict[ 'workflow_name' ]
- workflow, status, message = workflow_util.import_workflow( trans, repository, workflow_name )
+ workflow, status, error_message = workflow_util.import_workflow( trans, repository, workflow_name )
if status == 'error':
- log.error( message, exc_info=True )
- trans.response.status = 500
- return message
+ log.debug( error_message )
else:
imported_workflow_dicts.append( workflow.to_dict( view='element' ) )
return imported_workflow_dicts
@@ -142,22 +139,15 @@
"""
# Example URL: http://localhost:8763/api/tool_shed_repositories
tool_shed_repository_dicts = []
- try:
- query = trans.install_model.context.query( trans.app.install_model.ToolShedRepository ) \
- .order_by( trans.app.install_model.ToolShedRepository.table.c.name ) \
- .all()
- for tool_shed_repository in query:
- tool_shed_repository_dict = tool_shed_repository.to_dict( value_mapper=default_tool_shed_repository_value_mapper( trans, tool_shed_repository ) )
- tool_shed_repository_dict[ 'url' ] = web.url_for( controller='tool_shed_repositories',
- action='show',
- id=trans.security.encode_id( tool_shed_repository.id ) )
- tool_shed_repository_dicts.append( tool_shed_repository_dict )
- return tool_shed_repository_dicts
- except Exception, e:
- message = "Error in the tool_shed_repositories API in index: %s" % str( e )
- log.error( message, exc_info=True )
- trans.response.status = 500
- return message
+ for tool_shed_repository in trans.install_model.context.query( trans.app.install_model.ToolShedRepository ) \
+ .order_by( trans.app.install_model.ToolShedRepository.table.c.name ):
+ tool_shed_repository_dict = \
+ tool_shed_repository.to_dict( value_mapper=self.__get_value_mapper( trans ) )
+ tool_shed_repository_dict[ 'url' ] = web.url_for( controller='tool_shed_repositories',
+ action='show',
+ id=trans.security.encode_id( tool_shed_repository.id ) )
+ tool_shed_repository_dicts.append( tool_shed_repository_dict )
+ return tool_shed_repository_dicts
@web.expose_api
def install_repository_revision( self, trans, payload, **kwd ):
@@ -208,8 +198,7 @@
# Make sure this Galaxy instance is configured with a shed-related tool panel configuration file.
if not suc.have_shed_tool_conf_for_install( trans ):
message = get_message_for_no_shed_tool_config()
- log.error( message, exc_info=True )
- trans.response.status = 500
+ log.debug( message )
return dict( status='error', error=message )
# Make sure the current user's API key proves he is an admin user in this Galaxy instance.
if not trans.user_is_admin():
@@ -225,18 +214,20 @@
except Exception, e:
message = "Error attempting to retrieve installation information from tool shed %s for revision %s of repository %s owned by %s: %s" % \
( str( tool_shed_url ), str( changeset_revision ), str( name ), str( owner ), str( e ) )
- log.error( message, exc_info=True )
- trans.response.status = 500
+ log.debug( message )
return dict( status='error', error=message )
if raw_text:
+ # If successful, the response from get_repository_revision_install_info will be 3
+ # dictionaries, a dictionary defining the Repository, a dictionary defining the
+ # Repository revision (RepositoryMetadata), and a dictionary including the additional
+ # information required to install the repository.
items = json.from_json_string( raw_text )
repository_revision_dict = items[ 1 ]
repo_info_dict = items[ 2 ]
else:
message = "Unable to retrieve installation information from tool shed %s for revision %s of repository %s owned by %s: %s" % \
( str( tool_shed_url ), str( changeset_revision ), str( name ), str( owner ), str( e ) )
- log.error( message, exc_info=True )
- trans.response.status = 500
+ log.debug( message )
return dict( status='error', error=message )
repo_info_dicts = [ repo_info_dict ]
# Make sure the tool shed returned everything we need for installing the repository.
@@ -345,7 +336,7 @@
tool_path,
install_tool_dependencies,
reinstalling=False )
- tool_shed_repository_dict = tool_shed_repository.as_dict( value_mapper=default_tool_shed_repository_value_mapper( trans, tool_shed_repository ) )
+ tool_shed_repository_dict = tool_shed_repository.as_dict( value_mapper=self.__get_value_mapper( trans ) )
tool_shed_repository_dict[ 'url' ] = web.url_for( controller='tool_shed_repositories',
action='show',
id=trans.security.encode_id( tool_shed_repository.id ) )
@@ -394,8 +385,7 @@
if not suc.have_shed_tool_conf_for_install( trans ):
# This Galaxy instance is not configured with a shed-related tool panel configuration file.
message = get_message_for_no_shed_tool_config()
- log.error( message, exc_info=True )
- trans.response.status = 500
+ log.debug( message )
return dict( status='error', error=message )
if not trans.user_is_admin():
raise HTTPForbidden( detail='You are not authorized to install a tool shed repository into this Galaxy instance.' )
@@ -410,8 +400,7 @@
len( changeset_revisions ) != num_specified_repositories:
message = 'Error in tool_shed_repositories API in install_repository_revisions: the received parameters must be ordered '
message += 'lists so that positional values in tool_shed_urls, names, owners and changeset_revisions are associated.'
- log.error( message, exc_info=True )
- trans.response.status = 500
+ log.debug( message )
return dict( status='error', error=message )
# Get the information about the Galaxy components (e.g., tool pane section, tool config file, etc) that will contain information
# about each of the repositories being installed.
@@ -482,7 +471,7 @@
repair_dict = repository_util.repair_tool_shed_repository( trans,
repository,
encoding_util.tool_shed_encode( repo_info_dict ) )
- repository_dict = repository.to_dict( value_mapper=default_tool_shed_repository_value_mapper( trans, repository ) )
+ repository_dict = repository.to_dict( value_mapper=self.__get_value_mapper( trans ) )
repository_dict[ 'url' ] = web.url_for( controller='tool_shed_repositories',
action='show',
id=trans.security.encode_id( repository.id ) )
@@ -502,39 +491,39 @@
:param key: the API key of the Galaxy admin user.
"""
- try:
- start_time = strftime( "%Y-%m-%d %H:%M:%S" )
- results = dict( start_time=start_time,
- successful_count=0,
- unsuccessful_count=0,
- repository_status=[] )
- # Make sure the current user's API key proves he is an admin user in this Galaxy instance.
- if not trans.user_is_admin():
- raise HTTPForbidden( detail='You are not authorized to reset metadata on repositories installed into this Galaxy instance.' )
- query = suc.get_query_for_setting_metadata_on_repositories( trans, my_writable=False, order=False )
- # Now reset metadata on all remaining repositories.
- for repository in query:
- repository_id = trans.security.encode_id( repository.id )
- try:
- invalid_file_tups, metadata_dict = metadata_util.reset_all_metadata_on_installed_repository( trans, repository_id )
- if invalid_file_tups:
- message = tool_util.generate_message_for_invalid_tools( trans, invalid_file_tups, repository, None, as_html=False )
- results[ 'unsuccessful_count' ] += 1
- else:
- message = "Successfully reset metadata on repository %s owned by %s" % ( str( repository.name ), str( repository.owner ) )
- results[ 'successful_count' ] += 1
- except Exception, e:
- message = "Error resetting metadata on repository %s owned by %s: %s" % ( str( repository.name ), str( repository.owner ), str( e ) )
+ start_time = strftime( "%Y-%m-%d %H:%M:%S" )
+ results = dict( start_time=start_time,
+ successful_count=0,
+ unsuccessful_count=0,
+ repository_status=[] )
+ # Make sure the current user's API key proves he is an admin user in this Galaxy instance.
+ if not trans.user_is_admin():
+ raise HTTPForbidden( detail='You are not authorized to reset metadata on repositories installed into this Galaxy instance.' )
+ query = suc.get_query_for_setting_metadata_on_repositories( trans, my_writable=False, order=False )
+ # Now reset metadata on all remaining repositories.
+ for repository in query:
+ repository_id = trans.security.encode_id( repository.id )
+ try:
+ invalid_file_tups, metadata_dict = metadata_util.reset_all_metadata_on_installed_repository( trans, repository_id )
+ if invalid_file_tups:
+ message = tool_util.generate_message_for_invalid_tools( trans,
+ invalid_file_tups,
+ repository,
+ None,
+ as_html=False )
results[ 'unsuccessful_count' ] += 1
- results[ 'repository_status' ].append( message )
- stop_time = strftime( "%Y-%m-%d %H:%M:%S" )
- results[ 'stop_time' ] = stop_time
- return json.to_json_string( results, sort_keys=True, indent=4 * ' ' )
- except Exception, e:
- message = "Error in the Galaxy tool_shed_repositories API in reset_metadata_on_installed_repositories: %s" % str( e )
- log.error( message, exc_info=True )
- trans.response.status = 500
- return message
+ else:
+ message = "Successfully reset metadata on repository %s owned by %s" % \
+ ( str( repository.name ), str( repository.owner ) )
+ results[ 'successful_count' ] += 1
+ except Exception, e:
+ message = "Error resetting metadata on repository %s owned by %s: %s" % \
+ ( str( repository.name ), str( repository.owner ), str( e ) )
+ results[ 'unsuccessful_count' ] += 1
+ results[ 'repository_status' ].append( message )
+ stop_time = strftime( "%Y-%m-%d %H:%M:%S" )
+ results[ 'stop_time' ] = stop_time
+ return json.to_json_string( results, sort_keys=True, indent=4 * ' ' )
@web.expose_api
def show( self, trans, id, **kwd ):
@@ -545,15 +534,12 @@
:param id: the encoded id of the ToolShedRepository object
"""
# Example URL: http://localhost:8763/api/tool_shed_repositories/df7a1f0c02a5b08e
- try:
- tool_shed_repository = suc.get_tool_shed_repository_by_id( trans, id )
- tool_shed_repository_dict = tool_shed_repository.as_dict( value_mapper=default_tool_shed_repository_value_mapper( trans, tool_shed_repository ) )
- tool_shed_repository_dict[ 'url' ] = web.url_for( controller='tool_shed_repositories',
- action='show',
- id=trans.security.encode_id( tool_shed_repository.id ) )
- return tool_shed_repository_dict
- except Exception, e:
- message = "Error in tool_shed_repositories API in index: " + str( e )
- log.error( message, exc_info=True )
- trans.response.status = 500
- return message
+ tool_shed_repository = suc.get_tool_shed_repository_by_id( trans, id )
+ if tool_shed_repository is None:
+ log.debug( "Unable to locate tool_shed_repository record for id %s." % ( str( id ) ) )
+ return {}
+ tool_shed_repository_dict = tool_shed_repository.as_dict( value_mapper=self.__get_value_mapper( trans ) )
+ tool_shed_repository_dict[ 'url' ] = web.url_for( controller='tool_shed_repositories',
+ action='show',
+ id=trans.security.encode_id( tool_shed_repository.id ) )
+ return tool_shed_repository_dict
diff -r 995ca770764013bcf0c61092de8436c9278357f7 -r 2483cc9e597d37ab75c5bb46d467622f33b48bc7 lib/galaxy/webapps/tool_shed/api/repositories.py
--- a/lib/galaxy/webapps/tool_shed/api/repositories.py
+++ b/lib/galaxy/webapps/tool_shed/api/repositories.py
@@ -36,22 +36,27 @@
:param name: the name of the Repository
:param owner: the owner of the Repository
- Returns the ordered list of changeset revision hash strings that are associated with installable revisions. As in the changelog, the
- list is ordered oldest to newest.
+ Returns the ordered list of changeset revision hash strings that are associated with installable revisions.
+ As in the changelog, the list is ordered oldest to newest.
"""
# Example URL: http://localhost:9009/api/repositories/get_installable_revisions?name=add_c…
- try:
+ if name and owner:
# Get the repository information.
repository = suc.get_repository_by_name_and_owner( trans.app, name, owner )
+ if repository is None:
+ error_message = "Error in the Tool Shed repositories API in get_ordered_installable_revisions: "
+ error_message += "cannot locate repository %s owned by %s." % ( str( name ), str( owner ) )
+ log.debug( error_message )
+ return []
repo_dir = repository.repo_path( trans.app )
repo = hg.repository( suc.get_configured_ui(), repo_dir )
ordered_installable_revisions = suc.get_ordered_metadata_changeset_revisions( repository, repo, downloadable=True )
return ordered_installable_revisions
- except Exception, e:
- message = "Error in the Tool Shed repositories API in get_ordered_installable_revisions: %s" % str( e )
- log.error( message, exc_info=True )
- trans.response.status = 500
- return message
+ else:
+ error_message = "Error in the Tool Shed repositories API in get_ordered_installable_revisions: "
+ error_message += "invalid name %s or owner %s received." % ( str( name ), str( owner ) )
+ log.debug( error_message )
+ return []
@web.expose_api_anonymous
def get_repository_revision_install_info( self, trans, name, owner, changeset_revision, **kwd ):
@@ -106,49 +111,65 @@
]
}
"""
- repository_value_mapper = { 'id' : trans.security.encode_id,
- 'user_id' : trans.security.encode_id }
- # Example URL: http://localhost:9009/api/repositories/get_repository_revision_install_info…
- try:
+ # Example URL:
+ # http://<xyz>/api/repositories/get_repository_revision_install_info?name=<n>&owner=<o>&changeset_revision=<cr>
+ if name and owner and changeset_revision:
# Get the repository information.
repository = suc.get_repository_by_name_and_owner( trans.app, name, owner )
+ if repository is None:
+ log.debug( 'Cannot locate repository %s owned by %s' % ( str( name ), str( owner ) ) )
+ return {}, {}, {}
encoded_repository_id = trans.security.encode_id( repository.id )
- repository_dict = repository.to_dict( view='element', value_mapper=repository_value_mapper )
+ repository_dict = repository.to_dict( view='element',
+ value_mapper=self.__get_value_mapper( trans ) )
repository_dict[ 'url' ] = web.url_for( controller='repositories',
action='show',
id=encoded_repository_id )
# Get the repository_metadata information.
- repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans, encoded_repository_id, changeset_revision )
+ repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans,
+ encoded_repository_id,
+ changeset_revision )
if not repository_metadata:
- # The changeset_revision column in the repository_metadata table has been updated with a new value value, so find the
- # changeset_revision to which we need to update.
+ # The changeset_revision column in the repository_metadata table has been updated with a new
+ # value value, so find the changeset_revision to which we need to update.
repo_dir = repository.repo_path( trans.app )
repo = hg.repository( suc.get_configured_ui(), repo_dir )
new_changeset_revision = suc.get_next_downloadable_changeset_revision( repository, repo, changeset_revision )
- repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans, encoded_repository_id, new_changeset_revision )
+ repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans,
+ encoded_repository_id,
+ new_changeset_revision )
changeset_revision = new_changeset_revision
if repository_metadata:
encoded_repository_metadata_id = trans.security.encode_id( repository_metadata.id )
repository_metadata_dict = repository_metadata.to_dict( view='collection',
- value_mapper=self.__get_value_mapper( trans, repository_metadata ) )
+ value_mapper=self.__get_value_mapper( trans ) )
repository_metadata_dict[ 'url' ] = web.url_for( controller='repository_revisions',
action='show',
id=encoded_repository_metadata_id )
# Get the repo_info_dict for installing the repository.
- repo_info_dict, includes_tools, includes_tool_dependencies, includes_tools_for_display_in_tool_panel, \
- has_repository_dependencies, has_repository_dependencies_only_if_compiling_contained_td = \
+ repo_info_dict, \
+ includes_tools, \
+ includes_tool_dependencies, \
+ includes_tools_for_display_in_tool_panel, \
+ has_repository_dependencies, \
+ has_repository_dependencies_only_if_compiling_contained_td = \
repository_util.get_repo_info_dict( trans, encoded_repository_id, changeset_revision )
return repository_dict, repository_metadata_dict, repo_info_dict
else:
- message = "Unable to locate repository_metadata record for repository id %d and changeset_revision %s" % ( repository.id, changeset_revision )
- log.error( message, exc_info=True )
- trans.response.status = 500
+ log.debug( "Unable to locate repository_metadata record for repository id %s and changeset_revision %s" % \
+ ( str( repository.id ), str( changeset_revision ) ) )
return repository_dict, {}, {}
- except Exception, e:
- message = "Error in the Tool Shed repositories API in get_repository_revision_install_info: %s" % str( e )
- log.error( message, exc_info=True )
- trans.response.status = 500
- return message
+ else:
+ debug_msg = "Error in the Tool Shed repositories API in get_repository_revision_install_info: "
+ debug_msg += "Invalid name %s or owner %s or changeset_revision %s received." % \
+ ( str( name ), str( owner ), str( changeset_revision ) )
+ log.debug( debug_msg )
+ return {}, {}, {}
+
+ def __get_value_mapper( self, trans ):
+ value_mapper = { 'id' : trans.security.encode_id,
+ 'repository_id' : trans.security.encode_id }
+ return value_mapper
@web.expose_api
def import_capsule( self, trans, payload, **kwd ):
@@ -177,29 +198,27 @@
uploaded_file=None,
capsule_file_name=None )
if os.path.getsize( os.path.abspath( capsule_file_name ) ) == 0:
- message = 'Your capsule file is empty.'
- log.error( message, exc_info=True )
- trans.response.status = 500
- return message
+ log.debug( 'Your capsule file %s is empty.' % str( capsule_file_name ) )
+ return {}
try:
# Open for reading with transparent compression.
tar_archive = tarfile.open( capsule_file_path, 'r:*' )
except tarfile.ReadError, e:
- message = 'Error opening file %s: %s' % ( str( capsule_file_name ), str( e ) )
- log.error( message, exc_info=True )
- trans.response.status = 500
- return message
+ log.debug( 'Error opening capsule file %s: %s' % ( str( capsule_file_name ), str( e ) ) )
+ return {}
capsule_dict[ 'tar_archive' ] = tar_archive
capsule_dict[ 'capsule_file_name' ] = capsule_file_name
capsule_dict = import_util.extract_capsule_files( trans, **capsule_dict )
capsule_dict = import_util.validate_capsule( trans, **capsule_dict )
status = capsule_dict.get( 'status', 'error' )
if status == 'error':
- message = 'The capsule contents are invalid and cannpt be imported:<br/>%s' % str( capsule_dict.get( 'error_message', '' ) )
- log.error( message, exc_info=True )
- trans.response.status = 500
- return message
+ log.debug( 'The capsule contents are invalid and cannpt be imported:<br/>%s' % \
+ str( capsule_dict.get( 'error_message', '' ) ) )
+ return {}
encoded_file_path = capsule_dict.get( 'encoded_file_path', None )
+ if encoded_file_path is None:
+ log.debug( 'The capsule_dict %s is missing the required encoded_file_path entry.' % str( capsule_dict ) )
+ return {}
file_path = encoding_util.tool_shed_decode( encoded_file_path )
export_info_file_path = os.path.join( file_path, 'export_info.xml' )
export_info_dict = import_util.get_export_info_dict( export_info_file_path )
@@ -216,12 +235,14 @@
# Add the capsule_file_name and encoded_file_path to the repository_status_info_dict.
repository_status_info_dict[ 'capsule_file_name' ] = capsule_file_name
repository_status_info_dict[ 'encoded_file_path' ] = encoded_file_path
- import_results_tups = repository_maintenance_util.create_repository_and_import_archive( trans,
- repository_status_info_dict,
- import_results_tups )
+ import_results_tups = \
+ repository_maintenance_util.create_repository_and_import_archive( trans,
+ repository_status_info_dict,
+ import_results_tups )
import_util.check_status_and_reset_downloadable( trans, import_results_tups )
suc.remove_dir( file_path )
- # NOTE: the order of installation is defined in import_results_tups, but order will be lost when transferred to return_dict.
+ # NOTE: the order of installation is defined in import_results_tups, but order will be lost
+ # when transferred to return_dict.
return_dict = {}
for import_results_tup in import_results_tups:
ok, name_owner, message = import_results_tup
@@ -237,28 +258,19 @@
GET /api/repositories
Displays a collection (list) of repositories.
"""
- value_mapper = { 'id' : trans.security.encode_id,
- 'user_id' : trans.security.encode_id }
# Example URL: http://localhost:9009/api/repositories
repository_dicts = []
- deleted = util.string_as_bool( deleted )
- try:
- query = trans.sa_session.query( trans.app.model.Repository ) \
- .filter( trans.app.model.Repository.table.c.deleted == deleted ) \
- .order_by( trans.app.model.Repository.table.c.name ) \
- .all()
- for repository in query:
- repository_dict = repository.to_dict( view='collection', value_mapper=value_mapper )
- repository_dict[ 'url' ] = web.url_for( controller='repositories',
- action='show',
- id=trans.security.encode_id( repository.id ) )
- repository_dicts.append( repository_dict )
- return repository_dicts
- except Exception, e:
- message = "Error in the Tool Shed repositories API in index: %s" % str( e )
- log.error( message, exc_info=True )
- trans.response.status = 500
- return message
+ deleted = util.asbool( deleted )
+ for repository in trans.sa_session.query( trans.app.model.Repository ) \
+ .filter( trans.app.model.Repository.table.c.deleted == deleted ) \
+ .order_by( trans.app.model.Repository.table.c.name ):
+ repository_dict = repository.to_dict( view='collection',
+ value_mapper=self.__get_value_mapper( trans ) )
+ repository_dict[ 'url' ] = web.url_for( controller='repositories',
+ action='show',
+ id=trans.security.encode_id( repository.id ) )
+ repository_dicts.append( repository_dict )
+ return repository_dicts
@web.expose_api
def repository_ids_for_setting_metadata( self, trans, my_writable=False, **kwd ):
@@ -273,28 +285,22 @@
in addition to those repositories of type tool_dependency_definition. This param is ignored
if the current user is not an admin user, in which case this same restriction is automatic.
"""
- try:
- if trans.user_is_admin():
- my_writable = util.asbool( my_writable )
- else:
- my_writable = True
- handled_repository_ids = []
- repository_ids = []
- query = suc.get_query_for_setting_metadata_on_repositories( trans, my_writable=my_writable, order=False )
- # Make sure repositories of type tool_dependency_definition are first in the list.
- for repository in query:
- if repository.type == rt_util.TOOL_DEPENDENCY_DEFINITION and repository.id not in handled_repository_ids:
- repository_ids.append( trans.security.encode_id( repository.id ) )
- # Now add all remaining repositories to the list.
- for repository in query:
- if repository.type != rt_util.TOOL_DEPENDENCY_DEFINITION and repository.id not in handled_repository_ids:
- repository_ids.append( trans.security.encode_id( repository.id ) )
- return repository_ids
- except Exception, e:
- message = "Error in the Tool Shed repositories API in repository_ids_for_setting_metadata: %s" % str( e )
- log.error( message, exc_info=True )
- trans.response.status = 500
- return message
+ if trans.user_is_admin():
+ my_writable = util.asbool( my_writable )
+ else:
+ my_writable = True
+ handled_repository_ids = []
+ repository_ids = []
+ query = suc.get_query_for_setting_metadata_on_repositories( trans, my_writable=my_writable, order=False )
+ # Make sure repositories of type tool_dependency_definition are first in the list.
+ for repository in query:
+ if repository.type == rt_util.TOOL_DEPENDENCY_DEFINITION and repository.id not in handled_repository_ids:
+ repository_ids.append( trans.security.encode_id( repository.id ) )
+ # Now add all remaining repositories to the list.
+ for repository in query:
+ if repository.type != rt_util.TOOL_DEPENDENCY_DEFINITION and repository.id not in handled_repository_ids:
+ repository_ids.append( trans.security.encode_id( repository.id ) )
+ return repository_ids
@web.expose_api
def reset_metadata_on_repositories( self, trans, payload, **kwd ):
@@ -318,6 +324,7 @@
:param skip_file (optional): A local file name that contains the encoded repository ids associated with repositories to skip.
This param can be used as an alternative to the above encoded_ids_to_skip.
"""
+
def handle_repository( trans, repository, results ):
log.debug( "Resetting metadata on repository %s" % str( repository.name ) )
repository_id = trans.security.encode_id( repository.id )
@@ -335,53 +342,48 @@
status = '%s : %s' % ( str( repository.name ), message )
results[ 'repository_status' ].append( status )
return results
- try:
- start_time = strftime( "%Y-%m-%d %H:%M:%S" )
- results = dict( start_time=start_time,
- repository_status=[],
- successful_count=0,
- unsuccessful_count=0 )
- handled_repository_ids = []
- encoded_ids_to_skip = payload.get( 'encoded_ids_to_skip', [] )
- skip_file = payload.get( 'skip_file', None )
- if skip_file and os.path.exists( skip_file ) and not encoded_ids_to_skip:
- # Load the list of encoded_ids_to_skip from the skip_file.
- # Contents of file must be 1 encoded repository id per line.
- lines = open( skip_file, 'rb' ).readlines()
- for line in lines:
- if line.startswith( '#' ):
- # Skip comments.
- continue
- encoded_ids_to_skip.append( line.rstrip( '\n' ) )
- if trans.user_is_admin():
- my_writable = util.asbool( payload.get( 'my_writable', False ) )
- else:
- my_writable = True
- query = suc.get_query_for_setting_metadata_on_repositories( trans, my_writable=my_writable, order=False )
- # First reset metadata on all repositories of type repository_dependency_definition.
- for repository in query:
- encoded_id = trans.security.encode_id( repository.id )
- if encoded_id in encoded_ids_to_skip:
- log.debug( "Skipping repository with id %s because it is in encoded_ids_to_skip %s" % \
- ( str( repository.id ), str( encoded_ids_to_skip ) ) )
- elif repository.type == rt_util.TOOL_DEPENDENCY_DEFINITION and repository.id not in handled_repository_ids:
- results = handle_repository( trans, repository, results )
- # Now reset metadata on all remaining repositories.
- for repository in query:
- encoded_id = trans.security.encode_id( repository.id )
- if encoded_id in encoded_ids_to_skip:
- log.debug( "Skipping repository with id %s because it is in encoded_ids_to_skip %s" % \
- ( str( repository.id ), str( encoded_ids_to_skip ) ) )
- elif repository.type != rt_util.TOOL_DEPENDENCY_DEFINITION and repository.id not in handled_repository_ids:
- results = handle_repository( trans, repository, results )
- stop_time = strftime( "%Y-%m-%d %H:%M:%S" )
- results[ 'stop_time' ] = stop_time
- return json.to_json_string( results, sort_keys=True, indent=4 * ' ' )
- except Exception, e:
- message = "Error in the Tool Shed repositories API in reset_metadata_on_repositories: %s" % str( e )
- log.error( message, exc_info=True )
- trans.response.status = 500
- return message
+
+ start_time = strftime( "%Y-%m-%d %H:%M:%S" )
+ results = dict( start_time=start_time,
+ repository_status=[],
+ successful_count=0,
+ unsuccessful_count=0 )
+ handled_repository_ids = []
+ encoded_ids_to_skip = payload.get( 'encoded_ids_to_skip', [] )
+ skip_file = payload.get( 'skip_file', None )
+ if skip_file and os.path.exists( skip_file ) and not encoded_ids_to_skip:
+ # Load the list of encoded_ids_to_skip from the skip_file.
+ # Contents of file must be 1 encoded repository id per line.
+ lines = open( skip_file, 'rb' ).readlines()
+ for line in lines:
+ if line.startswith( '#' ):
+ # Skip comments.
+ continue
+ encoded_ids_to_skip.append( line.rstrip( '\n' ) )
+ if trans.user_is_admin():
+ my_writable = util.asbool( payload.get( 'my_writable', False ) )
+ else:
+ my_writable = True
+ query = suc.get_query_for_setting_metadata_on_repositories( trans, my_writable=my_writable, order=False )
+ # First reset metadata on all repositories of type repository_dependency_definition.
+ for repository in query:
+ encoded_id = trans.security.encode_id( repository.id )
+ if encoded_id in encoded_ids_to_skip:
+ log.debug( "Skipping repository with id %s because it is in encoded_ids_to_skip %s" % \
+ ( str( repository.id ), str( encoded_ids_to_skip ) ) )
+ elif repository.type == rt_util.TOOL_DEPENDENCY_DEFINITION and repository.id not in handled_repository_ids:
+ results = handle_repository( trans, repository, results )
+ # Now reset metadata on all remaining repositories.
+ for repository in query:
+ encoded_id = trans.security.encode_id( repository.id )
+ if encoded_id in encoded_ids_to_skip:
+ log.debug( "Skipping repository with id %s because it is in encoded_ids_to_skip %s" % \
+ ( str( repository.id ), str( encoded_ids_to_skip ) ) )
+ elif repository.type != rt_util.TOOL_DEPENDENCY_DEFINITION and repository.id not in handled_repository_ids:
+ results = handle_repository( trans, repository, results )
+ stop_time = strftime( "%Y-%m-%d %H:%M:%S" )
+ results[ 'stop_time' ] = stop_time
+ return json.to_json_string( results, sort_keys=True, indent=4 * ' ' )
@web.expose_api
def reset_metadata_on_repository( self, trans, payload, **kwd ):
@@ -395,6 +397,7 @@
The following parameters must be included in the payload.
:param repository_id: the encoded id of the repository on which metadata is to be reset.
"""
+
def handle_repository( trans, start_time, repository ):
results = dict( start_time=start_time,
repository_status=[] )
@@ -410,21 +413,16 @@
status = '%s : %s' % ( str( repository.name ), message )
results[ 'repository_status' ].append( status )
return results
- try:
- repository_id = payload.get( 'repository_id', None )
- if repository_id is not None:
- repository = suc.get_repository_in_tool_shed( trans, repository_id )
- start_time = strftime( "%Y-%m-%d %H:%M:%S" )
- log.debug( "%s...resetting metadata on repository %s" % ( start_time, str( repository.name ) ) )
- results = handle_repository( trans, start_time, repository )
- stop_time = strftime( "%Y-%m-%d %H:%M:%S" )
- results[ 'stop_time' ] = stop_time
- return json.to_json_string( results, sort_keys=True, indent=4 * ' ' )
- except Exception, e:
- message = "Error in the Tool Shed repositories API in reset_metadata_on_repositories: %s" % str( e )
- log.error( message, exc_info=True )
- trans.response.status = 500
- return message
+
+ repository_id = payload.get( 'repository_id', None )
+ if repository_id is not None:
+ repository = suc.get_repository_in_tool_shed( trans, repository_id )
+ start_time = strftime( "%Y-%m-%d %H:%M:%S" )
+ log.debug( "%s...resetting metadata on repository %s" % ( start_time, str( repository.name ) ) )
+ results = handle_repository( trans, start_time, repository )
+ stop_time = strftime( "%Y-%m-%d %H:%M:%S" )
+ results[ 'stop_time' ] = stop_time
+ return json.to_json_string( results, sort_keys=True, indent=4 * ' ' )
@web.expose_api_anonymous
def show( self, trans, id, **kwd ):
@@ -434,27 +432,14 @@
:param id: the encoded id of the Repository object
"""
- value_mapper = { 'id' : trans.security.encode_id,
- 'user_id' : trans.security.encode_id }
# Example URL: http://localhost:9009/api/repositories/f9cad7b01a472135
- try:
- repository = suc.get_repository_in_tool_shed( trans, id )
- repository_dict = repository.to_dict( view='element', value_mapper=value_mapper )
- repository_dict[ 'url' ] = web.url_for( controller='repositories',
- action='show',
- id=trans.security.encode_id( repository.id ) )
- return repository_dict
- except Exception, e:
- message = "Error in the Tool Shed repositories API in show: %s" % str( e )
- log.error( message, exc_info=True )
- trans.response.status = 500
- return message
-
- def __get_value_mapper( self, trans, repository_metadata ):
- value_mapper = { 'id' : trans.security.encode_id,
- 'repository_id' : trans.security.encode_id }
- if repository_metadata.time_last_tested is not None:
- # For some reason the Dictifiable.to_dict() method in ~/galaxy/model/item_attrs.py requires
- # a function rather than a mapped value, so just pass the time_ago function here.
- value_mapper[ 'time_last_tested' ] = time_ago
- return value_mapper
+ repository = suc.get_repository_in_tool_shed( trans, id )
+ if repository is None:
+ log.debug( "Unable to locate repository record for id %s." % ( str( id ) ) )
+ return {}
+ repository_dict = repository.to_dict( view='element',
+ value_mapper=self.__get_value_mapper( trans ) )
+ repository_dict[ 'url' ] = web.url_for( controller='repositories',
+ action='show',
+ id=trans.security.encode_id( repository.id ) )
+ return repository_dict
diff -r 995ca770764013bcf0c61092de8436c9278357f7 -r 2483cc9e597d37ab75c5bb46d467622f33b48bc7 lib/galaxy/webapps/tool_shed/api/repository_revisions.py
--- a/lib/galaxy/webapps/tool_shed/api/repository_revisions.py
+++ b/lib/galaxy/webapps/tool_shed/api/repository_revisions.py
@@ -1,6 +1,5 @@
import datetime
import logging
-from galaxy.web.framework.helpers import time_ago
from tool_shed.util import metadata_util
from galaxy import web
from galaxy import util
@@ -48,61 +47,29 @@
if not changeset_revision:
raise HTTPBadRequest( detail="Missing required parameter 'changeset_revision'." )
export_repository_dependencies = payload.get( 'export_repository_dependencies', False )
- try:
- # We'll currently support only gzip-compressed tar archives.
- file_type = 'gz'
- export_repository_dependencies = util.string_as_bool( export_repository_dependencies )
- # Get the repository information.
- repository = suc.get_repository_by_name_and_owner( trans.app, name, owner )
- repository_id = trans.security.encode_id( repository.id )
- response = export_util.export_repository( trans,
- tool_shed_url,
- repository_id,
- str( repository.name ),
- changeset_revision,
- file_type,
- export_repository_dependencies,
- api=True )
- return response
- except Exception, e:
- message = "Error in the Tool Shed repository_revisions API in export: %s" % str( e )
- log.error( message, exc_info=True )
- trans.response.status = 500
- return message
+ # We'll currently support only gzip-compressed tar archives.
+ file_type = 'gz'
+ export_repository_dependencies = util.asbool( export_repository_dependencies )
+ # Get the repository information.
+ repository = suc.get_repository_by_name_and_owner( trans.app, name, owner )
+ if repository is None:
+ error_message = 'Cannot locate repository with name %s and owner %s,' % ( str( name ), str( owner ) )
+ log.debug( error_message )
+ return None, error_message
+ repository_id = trans.security.encode_id( repository.id )
+ return export_util.export_repository( trans,
+ tool_shed_url,
+ repository_id,
+ str( repository.name ),
+ changeset_revision,
+ file_type,
+ export_repository_dependencies,
+ api=True )
- @web.expose_api_anonymous
- def repository_dependencies( self, trans, id, **kwd ):
- """
- GET /api/repository_revisions/{encoded repository_metadata id}/repository_dependencies
- Displays information about a repository_metadata record in the Tool Shed.
-
- :param id: the encoded id of the `RepositoryMetadata` object
- """
- # Example URL: http://localhost:9009/api/repository_revisions/repository_dependencies/bb12…
+ def __get_value_mapper( self, trans ):
value_mapper = { 'id' : trans.security.encode_id,
- 'user_id' : trans.security.encode_id }
- repository_dependencies_dicts = []
- try:
- repository_metadata = metadata_util.get_repository_metadata_by_id( trans, id )
- metadata = repository_metadata.metadata
- if metadata and 'repository_dependencies' in metadata:
- rd_tups = metadata[ 'repository_dependencies' ][ 'repository_dependencies' ]
- for rd_tup in rd_tups:
- tool_shed, name, owner, changeset_revision = rd_tup[ 0:4 ]
- repository_dependency = suc.get_repository_by_name_and_owner( trans.app, name, owner )
- repository_dependency_dict = repository_dependency.to_dict( view='element', value_mapper=value_mapper )
- # We have to add the changeset_revision of of the repository dependency.
- repository_dependency_dict[ 'changeset_revision' ] = changeset_revision
- repository_dependency_dict[ 'url' ] = web.url_for( controller='repositories',
- action='show',
- id=trans.security.encode_id( repository_dependency.id ) )
- repository_dependencies_dicts.append( repository_dependency_dict )
- return repository_dependencies_dicts
- except Exception, e:
- message = "Error in the Tool Shed repository_revisions API in repository_dependencies: %s" % str( e )
- log.error( message, exc_info=True )
- trans.response.status = 500
- return message
+ 'repository_id' : trans.security.encode_id }
+ return value_mapper
@web.expose_api_anonymous
def index( self, trans, **kwd ):
@@ -117,59 +84,94 @@
# Filter by downloadable if received.
downloadable = kwd.get( 'downloadable', None )
if downloadable is not None:
- clause_list.append( trans.model.RepositoryMetadata.table.c.downloadable == util.string_as_bool( downloadable ) )
+ clause_list.append( trans.model.RepositoryMetadata.table.c.downloadable == util.asbool( downloadable ) )
# Filter by malicious if received.
malicious = kwd.get( 'malicious', None )
if malicious is not None:
- clause_list.append( trans.model.RepositoryMetadata.table.c.malicious == util.string_as_bool( malicious ) )
+ clause_list.append( trans.model.RepositoryMetadata.table.c.malicious == util.asbool( malicious ) )
# Filter by tools_functionally_correct if received.
tools_functionally_correct = kwd.get( 'tools_functionally_correct', None )
if tools_functionally_correct is not None:
- clause_list.append( trans.model.RepositoryMetadata.table.c.tools_functionally_correct == util.string_as_bool( tools_functionally_correct ) )
+ clause_list.append( trans.model.RepositoryMetadata.table.c.tools_functionally_correct == util.asbool( tools_functionally_correct ) )
# Filter by missing_test_components if received.
missing_test_components = kwd.get( 'missing_test_components', None )
if missing_test_components is not None:
- clause_list.append( trans.model.RepositoryMetadata.table.c.missing_test_components == util.string_as_bool( missing_test_components ) )
+ clause_list.append( trans.model.RepositoryMetadata.table.c.missing_test_components == util.asbool( missing_test_components ) )
# Filter by do_not_test if received.
do_not_test = kwd.get( 'do_not_test', None )
if do_not_test is not None:
- clause_list.append( trans.model.RepositoryMetadata.table.c.do_not_test == util.string_as_bool( do_not_test ) )
+ clause_list.append( trans.model.RepositoryMetadata.table.c.do_not_test == util.asbool( do_not_test ) )
# Filter by includes_tools if received.
includes_tools = kwd.get( 'includes_tools', None )
if includes_tools is not None:
- clause_list.append( trans.model.RepositoryMetadata.table.c.includes_tools == util.string_as_bool( includes_tools ) )
+ clause_list.append( trans.model.RepositoryMetadata.table.c.includes_tools == util.asbool( includes_tools ) )
# Filter by test_install_error if received.
test_install_error = kwd.get( 'test_install_error', None )
if test_install_error is not None:
- clause_list.append( trans.model.RepositoryMetadata.table.c.test_install_error == util.string_as_bool( test_install_error ) )
+ clause_list.append( trans.model.RepositoryMetadata.table.c.test_install_error == util.asbool( test_install_error ) )
# Filter by skip_tool_test if received.
skip_tool_test = kwd.get( 'skip_tool_test', None )
if skip_tool_test is not None:
- skip_tool_test = util.string_as_bool( skip_tool_test )
+ skip_tool_test = util.asbool( skip_tool_test )
skipped_metadata_ids_subquery = select( [ trans.app.model.SkipToolTest.table.c.repository_metadata_id ] )
if skip_tool_test:
clause_list.append( trans.model.RepositoryMetadata.id.in_( skipped_metadata_ids_subquery ) )
else:
clause_list.append( not_( trans.model.RepositoryMetadata.id.in_( skipped_metadata_ids_subquery ) ) )
- # Generate and execute the query.
- try:
- query = trans.sa_session.query( trans.app.model.RepositoryMetadata ) \
- .filter( and_( *clause_list ) ) \
- .order_by( trans.app.model.RepositoryMetadata.table.c.repository_id.desc() ) \
- .all()
- for repository_metadata in query:
- repository_metadata_dict = repository_metadata.to_dict( view='collection',
- value_mapper=self.__get_value_mapper( trans, repository_metadata ) )
- repository_metadata_dict[ 'url' ] = web.url_for( controller='repository_revisions',
- action='show',
- id=trans.security.encode_id( repository_metadata.id ) )
- repository_metadata_dicts.append( repository_metadata_dict )
- return repository_metadata_dicts
- except Exception, e:
- message = "Error in the Tool Shed repository_revisions API in index: " + str( e )
- log.error( message, exc_info=True )
- trans.response.status = 500
- return message
+ for repository_metadata in trans.sa_session.query( trans.app.model.RepositoryMetadata ) \
+ .filter( and_( *clause_list ) ) \
+ .order_by( trans.app.model.RepositoryMetadata.table.c.repository_id.desc() ):
+ repository_metadata_dict = repository_metadata.to_dict( view='collection',
+ value_mapper=self.__get_value_mapper( trans ) )
+ repository_metadata_dict[ 'url' ] = web.url_for( controller='repository_revisions',
+ action='show',
+ id=trans.security.encode_id( repository_metadata.id ) )
+ repository_metadata_dicts.append( repository_metadata_dict )
+ return repository_metadata_dicts
+
+ @web.expose_api_anonymous
+ def repository_dependencies( self, trans, id, **kwd ):
+ """
+ GET /api/repository_revisions/{encoded repository_metadata id}/repository_dependencies
+ Displays information about a repository_metadata record in the Tool Shed.
+
+ :param id: the encoded id of the `RepositoryMetadata` object
+ """
+ # Example URL: http://localhost:9009/api/repository_revisions/repository_dependencies/bb12…
+ repository_dependencies_dicts = []
+ repository_metadata = metadata_util.get_repository_metadata_by_id( trans, id )
+ if repository_metadata is None:
+ log.debug( 'Invalid repository_metadata id received: %s' % str( id ) )
+ return repository_dependencies_dicts
+ metadata = repository_metadata.metadata
+ if metadata is None:
+ log.debug( 'The repository_metadata record with id %s has no metadata.' % str ( id ) )
+ return repository_dependencies_dicts
+ if 'repository_dependencies' in metadata:
+ rd_tups = metadata[ 'repository_dependencies' ][ 'repository_dependencies' ]
+ for rd_tup in rd_tups:
+ tool_shed, name, owner, changeset_revision = rd_tup[ 0:4 ]
+ repository_dependency = suc.get_repository_by_name_and_owner( trans.app, name, owner )
+ if repository_dependency is None:
+ log.dbug( 'Cannot locate repository dependency %s owned by %s.' % ( name, owner ) )
+ continue
+ repository_dependency_id = trans.security.encode_id( repository_dependency.id )
+ repository_dependency_repository_metadata = \
+ suc.get_repository_metadata_by_changeset_revision( trans, repository_dependency_id, changeset_revision )
+ if repository_dependency_repository_metadata is None:
+ log.debug( 'Cannot locate repository_metadata with id %s for repository dependency %s owned by %s.' % \
+ ( str( repository_dependency_id ), str( name ), str( owner ) ) )
+ continue
+ repository_dependency_repository_metadata_id = trans.security.encode_id( repository_dependency_repository_metadata.id )
+ repository_dependency_dict = repository_dependency.to_dict( view='element',
+ value_mapper=self.__get_value_mapper( trans ) )
+ # We have to add the changeset_revision of of the repository dependency.
+ repository_dependency_dict[ 'changeset_revision' ] = changeset_revision
+ repository_dependency_dict[ 'url' ] = web.url_for( controller='repositories',
+ action='show',
+ id=repository_dependency_repository_metadata_id )
+ repository_dependencies_dicts.append( repository_dependency_dict )
+ return repository_dependencies_dicts
@web.expose_api_anonymous
def show( self, trans, id, **kwd ):
@@ -180,19 +182,16 @@
:param id: the encoded id of the `RepositoryMetadata` object
"""
# Example URL: http://localhost:9009/api/repository_revisions/bb125606ff9ea620
- try:
- repository_metadata = metadata_util.get_repository_metadata_by_id( trans, id )
- repository_metadata_dict = repository_metadata.to_dict( view='element',
- value_mapper=self.__get_value_mapper( trans, repository_metadata ) )
- repository_metadata_dict[ 'url' ] = web.url_for( controller='repository_revisions',
- action='show',
- id=trans.security.encode_id( repository_metadata.id ) )
- return repository_metadata_dict
- except Exception, e:
- message = "Error in the Tool Shed repository_revisions API in show: %s" % str( e )
- log.error( message, exc_info=True )
- trans.response.status = 500
- return message
+ repository_metadata = metadata_util.get_repository_metadata_by_id( trans, id )
+ if repository_metadata is None:
+ log.debug( 'Cannot locate repository_metadata with id %s' % str( id ) )
+ return {}
+ repository_metadata_dict = repository_metadata.to_dict( view='element',
+ value_mapper=self.__get_value_mapper( trans ) )
+ repository_metadata_dict[ 'url' ] = web.url_for( controller='repository_revisions',
+ action='show',
+ id=trans.security.encode_id( repository_metadata.id ) )
+ return repository_metadata_dict
@web.expose_api
def update( self, trans, payload, **kwd ):
@@ -201,41 +200,32 @@
Updates the value of specified columns of the repository_metadata table based on the key / value pairs in payload.
"""
repository_metadata_id = kwd.get( 'id', None )
- try:
- repository_metadata = metadata_util.get_repository_metadata_by_id( trans, repository_metadata_id )
- flush_needed = False
- for key, new_value in payload.items():
- if key == 'time_last_tested':
- repository_metadata.time_last_tested = datetime.datetime.utcnow()
- flush_needed = True
- elif hasattr( repository_metadata, key ):
- # log information when setting attributes associated with the Tool Shed's install and test framework.
- if key in [ 'do_not_test', 'includes_tools', 'missing_test_components', 'test_install_error',
- 'tools_functionally_correct' ]:
- log.debug( 'Setting repository_metadata table column %s to value %s for changeset_revision %s via the Tool Shed API.' % \
- ( str( key ), str( new_value ), str( repository_metadata.changeset_revision ) ) )
- setattr( repository_metadata, key, new_value )
- flush_needed = True
- if flush_needed:
- trans.sa_session.add( repository_metadata )
- trans.sa_session.flush()
- except Exception, e:
- message = "Error in the Tool Shed repository_revisions API in update: %s" % str( e )
- log.error( message, exc_info=True )
- trans.response.status = 500
- return message
+ if repository_metadata_id is None:
+ raise HTTPBadRequest( detail="Missing required parameter 'id'." )
+ repository_metadata = metadata_util.get_repository_metadata_by_id( trans, repository_metadata_id )
+ if repository_metadata is None:
+ log.debug( 'Cannot locate repository_metadata with id %s' % str( repository_metadata_id ) )
+ return {}
+ flush_needed = False
+ for key, new_value in payload.items():
+ if key == 'time_last_tested':
+ repository_metadata.time_last_tested = datetime.datetime.utcnow()
+ flush_needed = True
+ elif hasattr( repository_metadata, key ):
+ # log information when setting attributes associated with the Tool Shed's install and test framework.
+ if key in [ 'do_not_test', 'includes_tools', 'missing_test_components', 'test_install_error',
+ 'tools_functionally_correct' ]:
+ log.debug( 'Setting repository_metadata column %s to value %s for changeset_revision %s via the Tool Shed API.' % \
+ ( str( key ), str( new_value ), str( repository_metadata.changeset_revision ) ) )
+ setattr( repository_metadata, key, new_value )
+ flush_needed = True
+ if flush_needed:
+ trans.sa_session.add( repository_metadata )
+ trans.sa_session.flush()
+ trans.sa_session.refresh( repository_metadata )
repository_metadata_dict = repository_metadata.to_dict( view='element',
- value_mapper=self.__get_value_mapper( trans, repository_metadata ) )
+ value_mapper=self.__get_value_mapper( trans ) )
repository_metadata_dict[ 'url' ] = web.url_for( controller='repository_revisions',
action='show',
id=trans.security.encode_id( repository_metadata.id ) )
return repository_metadata_dict
-
- def __get_value_mapper( self, trans, repository_metadata ):
- value_mapper = { 'id' : trans.security.encode_id,
- 'repository_id' : trans.security.encode_id }
- if repository_metadata.time_last_tested is not None:
- # For some reason the Dictifiable.to_dict() method in ~/galaxy/model/item_attrs.py requires
- # a function rather than a mapped value, so just pass the time_ago function here.
- value_mapper[ 'time_last_tested' ] = time_ago
- return value_mapper
diff -r 995ca770764013bcf0c61092de8436c9278357f7 -r 2483cc9e597d37ab75c5bb46d467622f33b48bc7 lib/galaxy/webapps/tool_shed/controllers/hg.py
--- a/lib/galaxy/webapps/tool_shed/controllers/hg.py
+++ b/lib/galaxy/webapps/tool_shed/controllers/hg.py
@@ -2,6 +2,7 @@
from galaxy import web
from galaxy.web.base.controller import BaseUIController
from tool_shed.util.shed_util_common import get_repository_by_name_and_owner
+from tool_shed.util.shed_util_common import update_repository
from tool_shed.util.metadata_util import set_repository_metadata
from galaxy import eggs
@@ -9,6 +10,8 @@
import mercurial.__version__
from mercurial.hgweb.hgwebdir_mod import hgwebdir
from mercurial.hgweb.request import wsgiapplication
+from mercurial import hg
+from mercurial import ui
log = logging.getLogger(__name__)
@@ -36,6 +39,11 @@
repository = get_repository_by_name_and_owner( trans.app, name, owner )
if repository:
if hg_version >= '2.2.3':
+ # Update the repository on disk to the tip revision, because the web upload form uses the on-disk working
+ # directory. If the repository is not updated on disk, pushing from the command line and then uploading
+ # via the web interface will result in a new head being created.
+ repo = hg.repository( ui.ui(), repository.repo_path( trans.app ) )
+ update_repository( repo, ctx_rev=None )
# Set metadata using the repository files on disk.
error_message, status = set_repository_metadata( trans, repository )
if status == 'ok' and error_message:
diff -r 995ca770764013bcf0c61092de8436c9278357f7 -r 2483cc9e597d37ab75c5bb46d467622f33b48bc7 lib/tool_shed/galaxy_install/install_manager.py
--- a/lib/tool_shed/galaxy_install/install_manager.py
+++ b/lib/tool_shed/galaxy_install/install_manager.py
@@ -567,14 +567,17 @@
def order_repositories_for_installation( self, tool_shed_repositories, repository_dependencies_dict ):
"""
- Some repositories may have repository dependencies that are required to be installed before the dependent repository. This method will
- inspect the list of repositories about to be installed and make sure to order them appropriately. For each repository about to be installed,
- if required repositories are not contained in the list of repositories about to be installed, then they are not considered. Repository
- dependency definitions that contain circular dependencies should not result in an infinite loop, but obviously prior installation will not be
- handled for one or more of the repositories that require prior installation. This process is similar to the process used when installing tool
- shed repositories (i.e., the order_components_for_installation() method in ~/lib/tool_shed/galaxy_install/repository_util), but does not handle
- managing tool panel sections and other components since repository dependency definitions contained in tool shed repositories with migrated
- tools must never define a relationship to a repository dependency that contains a tool.
+ Some repositories may have repository dependencies that are required to be installed before the dependent
+ repository. This method will inspect the list of repositories about to be installed and make sure to order
+ them appropriately. For each repository about to be installed, if required repositories are not contained
+ in the list of repositories about to be installed, then they are not considered. Repository dependency
+ definitions that contain circular dependencies should not result in an infinite loop, but obviously prior
+ installation will not be handled for one or more of the repositories that require prior installation. This
+ process is similar to the process used when installing tool shed repositories (i.e., the
+ order_components_for_installation() method in ~/lib/tool_shed/galaxy_install/repository_util), but does not
+ handle managing tool panel sections and other components since repository dependency definitions contained
+ in tool shed repositories with migrated tools must never define a relationship to a repository dependency
+ that contains a tool.
"""
ordered_tool_shed_repositories = []
ordered_tsr_ids = []
diff -r 995ca770764013bcf0c61092de8436c9278357f7 -r 2483cc9e597d37ab75c5bb46d467622f33b48bc7 lib/tool_shed/scripts/api/common.py
--- a/lib/tool_shed/scripts/api/common.py
+++ b/lib/tool_shed/scripts/api/common.py
@@ -8,28 +8,18 @@
new_path.extend( sys.path[ 1: ] )
sys.path = new_path
+import tool_shed.util.shed_util_common as suc
+
from galaxy import eggs
import pkg_resources
-pkg_resources.require( "pycrypto" )
-from Crypto.Cipher import Blowfish
-from Crypto.Util.randpool import RandomPool
-from Crypto.Util import number
-
-def encode_id( config_id_secret, obj_id ):
- # Utility method to encode ID's
- id_cipher = Blowfish.new( config_id_secret )
- # Convert to string
- s = str( obj_id )
- # Pad to a multiple of 8 with leading "!"
- s = ( "!" * ( 8 - len(s) % 8 ) ) + s
- # Encrypt
- return id_cipher.encrypt( s ).encode( 'hex' )
-
def delete( api_key, url, data, return_formatted=True ):
- # Sends an API DELETE request and acts as a generic formatter for the JSON response - 'data' will become the JSON payload read by Galaxy.
+ """
+ Sends an API DELETE request and acts as a generic formatter for the JSON response. The
+ 'data' will become the JSON payload read by the Tool Shed.
+ """
try:
- url = make_url( api_key, url )
+ url = make_url( url, api_key=api_key, args=None )
req = urllib2.Request( url, headers = { 'Content-Type': 'application/json' }, data = json.dumps( data ))
req.get_method = lambda: 'DELETE'
r = json.loads( urllib2.urlopen( req ).read() )
@@ -47,12 +37,13 @@
print r
def display( url, api_key=None, return_formatted=True ):
- # Sends an API GET request and acts as a generic formatter for the JSON response.
+ """Sends an API GET request and acts as a generic formatter for the JSON response."""
try:
r = get( url, api_key=api_key )
except urllib2.HTTPError, e:
print e
- print e.read( 1024 ) # Only return the first 1K of errors.
+ # Only return the first 1K of errors.
+ print e.read( 1024 )
sys.exit( 1 )
if type( r ) == unicode:
print 'error: %s' % r
@@ -84,16 +75,94 @@
print 'response is unknown type: %s' % type( r )
def get( url, api_key=None ):
- # Do the actual GET.
- url = make_url( url, api_key=api_key )
+ """Do the GET."""
+ url = make_url( url, api_key=api_key, args=None )
try:
return json.loads( urllib2.urlopen( url ).read() )
except ValueError, e:
print "URL did not return JSON data"
- sys.exit(1)
+ sys.exit( 1 )
+
+def get_api_url( base, parts=[], params=None ):
+ """Compose and return a URL for the Tool Shed API."""
+ if 'api' in parts and parts.index( 'api' ) != 0:
+ parts.pop( parts.index( 'api' ) )
+ parts.insert( 0, 'api' )
+ elif 'api' not in parts:
+ parts.insert( 0, 'api' )
+ url = suc.url_join( base, *parts )
+ if params is not None:
+ try:
+ query_string = urllib.urlencode( params )
+ except Exception, e:
+ # The value of params must be a string.
+ query_string = params
+ url += '?%s' % query_string
+ return url
+
+def get_latest_downloadable_changeset_revision_via_api( url, name, owner ):
+ """
+ Return the latest downloadable changeset revision for the repository defined by the received
+ name and owner.
+ """
+ error_message = ''
+ parts = [ 'api', 'repositories', 'get_ordered_installable_revisions' ]
+ params = dict( name=name, owner=owner )
+ api_url = get_api_url( base=url, parts=parts, params=params )
+ changeset_revisions, error_message = json_from_url( api_url )
+ if changeset_revisions is None or error_message:
+ return None, error_message
+ if len( changeset_revisions ) >= 1:
+ return changeset_revisions[ -1 ], error_message
+ return suc.INITIAL_CHANGELOG_HASH, error_message
+
+def get_repository_dict( url, repository_dict ):
+ """
+ Send a request to the Tool Shed to get additional information about the repository defined
+ by the received repository_dict. Add the information to the repository_dict and return it.
+ """
+ error_message = ''
+ if not isinstance( repository_dict, dict ):
+ error_message = 'Invalid repository_dict received: %s' % str( repository_dict )
+ return None, error_message
+ repository_id = repository_dict.get( 'repository_id', None )
+ if repository_id is None:
+ error_message = 'Invalid repository_dict does not contain a repository_id entry: %s' % str( repository_dict )
+ return None, error_message
+ parts = [ 'api', 'repositories', repository_id ]
+ api_url = get_api_url( base=url, parts=parts )
+ extended_dict, error_message = json_from_url( api_url )
+ if extended_dict is None or error_message:
+ return None, error_message
+ name = extended_dict.get( 'name', None )
+ owner = extended_dict.get( 'owner', None )
+ if name is not None and owner is not None:
+ name = str( name )
+ owner = str( owner )
+ latest_changeset_revision, error_message = get_latest_downloadable_changeset_revision_via_api( url, name, owner )
+ if latest_changeset_revision is None or error_message:
+ return None, error_message
+ extended_dict[ 'latest_revision' ] = str( latest_changeset_revision )
+ return extended_dict, error_message
+ else:
+ error_message = 'Invalid extended_dict does not contain name or woner entries: %s' % str( extended_dict )
+ return None, error_message
+
+def json_from_url( url ):
+ """Send a request to the Tool Shed via the Tool Shed API and handle the response."""
+ error_message = ''
+ url_handle = urllib.urlopen( url )
+ url_contents = url_handle.read()
+ try:
+ parsed_json = simplejson.loads( url_contents )
+ except Exception, e:
+ error_message = str( url_contents )
+ print 'Error parsing JSON data in json_from_url(): ', str( e )
+ return None, error_message
+ return parsed_json, error_message
def make_url( url, api_key=None, args=None ):
- # Adds the API Key to the URL if it's not already there.
+ """Adds the API Key to the URL if it's not already there."""
if args is None:
args = []
argsep = '&'
@@ -105,20 +174,23 @@
return url + argsep + '&'.join( [ '='.join( t ) for t in args ] )
def post( url, data, api_key=None ):
- # Do the actual POST.
- url = make_url( url, api_key=api_key )
+ """Do the POST."""
+ url = make_url( url, api_key=api_key, args=None )
req = urllib2.Request( url, headers = { 'Content-Type': 'application/json' }, data = json.dumps( data ) )
return json.loads( urllib2.urlopen( req ).read() )
def put( url, data, api_key=None ):
- # Do the actual PUT.
- url = make_url( url, api_key=api_key )
+ """Do the PUT."""
+ url = make_url( url, api_key=api_key, args=None )
req = urllib2.Request( url, headers = { 'Content-Type': 'application/json' }, data = json.dumps( data ))
req.get_method = lambda: 'PUT'
return json.loads( urllib2.urlopen( req ).read() )
def submit( url, data, api_key=None, return_formatted=True ):
- # Sends an API POST request and acts as a generic formatter for the JSON response - 'data' will become the JSON payload read by Galaxy.
+ """
+ Sends an API POST request and acts as a generic formatter for the JSON response. The
+ 'data' will become the JSON payload read by the Tool Shed.
+ """
try:
r = post( url, data, api_key=api_key )
except urllib2.HTTPError, e:
@@ -133,7 +205,8 @@
print 'Response'
print '--------'
if type( r ) == list:
- # Currently the only implemented responses are lists of dicts, because submission creates some number of collection elements.
+ # Currently the only implemented responses are lists of dicts, because submission creates
+ # some number of collection elements.
for i in r:
if type( i ) == dict:
if 'url' in i:
@@ -150,9 +223,12 @@
print r
def update( api_key, url, data, return_formatted=True ):
- # Sends an API PUT request and acts as a generic formatter for the JSON response - 'data' will become the JSON payload read by Galaxy.
+ """
+ Sends an API PUT request and acts as a generic formatter for the JSON response. The
+ 'data' will become the JSON payload read by the Tool Shed.
+ """
try:
- r = put( api_key, url, data )
+ r = put( url, data, api_key=api_key )
except urllib2.HTTPError, e:
if return_formatted:
print e
This diff is so big that we needed to truncate the remainder.
https://bitbucket.org/galaxy/galaxy-central/commits/3dde0f4765fb/
Changeset: 3dde0f4765fb
User: dannon
Date: 2014-01-10 13:42:58
Summary: One more simplejson/json fix that slipped through the merge.
Affected #: 1 file
diff -r 2483cc9e597d37ab75c5bb46d467622f33b48bc7 -r 3dde0f4765fbff33789badeaa17503349559ff72 lib/tool_shed/scripts/api/common.py
--- a/lib/tool_shed/scripts/api/common.py
+++ b/lib/tool_shed/scripts/api/common.py
@@ -154,7 +154,7 @@
url_handle = urllib.urlopen( url )
url_contents = url_handle.read()
try:
- parsed_json = simplejson.loads( url_contents )
+ parsed_json = json.loads( url_contents )
except Exception, e:
error_message = str( url_contents )
print 'Error parsing JSON data in json_from_url(): ', str( e )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
2 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/e72b8fd2d28b/
Changeset: e72b8fd2d28b
User: carlfeberhard
Date: 2014-01-09 22:00:07
Summary: Functional twill tests (history): remove check box assertion on history/list (as it's rendered in js now), fix history json scanning errors
Affected #: 2 files
diff -r 17e9e3ffb571e8f200ee8bf195a3708b2be4792b -r e72b8fd2d28b0a5371d13a77469746d0786496de test/base/twilltestcase.py
--- a/test/base/twilltestcase.py
+++ b/test/base/twilltestcase.py
@@ -335,6 +335,7 @@
self.visit_page( "history?show_deleted=False" )
else:
self.visit_page( "history" )
+ json_data = {}
try:
tc.find( pattern, flags=( 'm' if multiline else '' ) )
# twill stores the regex match in a special stack variable
@@ -541,7 +542,6 @@
self.home()
self.visit_page( "history/list" )
self.check_page_for_string( 'Saved Histories' )
- self.check_page_for_string( '<input type="checkbox" name="id" value=' )
self.check_page_for_string( 'operation=Rename' )
self.check_page_for_string( 'operation=Switch' )
self.check_page_for_string( 'operation=Delete' )
@@ -553,7 +553,6 @@
self.home()
self.visit_page( "history/list?f-deleted=True" )
self.check_page_for_string( 'Saved Histories' )
- self.check_page_for_string( '<input type="checkbox" name="id" value=' )
self.check_page_for_string( 'operation=Undelete' )
for check_str in strings_displayed:
self.check_page_for_string( check_str )
diff -r 17e9e3ffb571e8f200ee8bf195a3708b2be4792b -r e72b8fd2d28b0a5371d13a77469746d0786496de test/functional/test_history_functions.py
--- a/test/functional/test_history_functions.py
+++ b/test/functional/test_history_functions.py
@@ -82,6 +82,7 @@
.first()
assert historyB is not None, "Problem retrieving historyB from database"
assert historyA.id == historyB.id, "After the same user logged out and back in, their last used history was not associated with their new session"
+
def test_005_deleting_histories( self ):
"""Testing deleting histories"""
# Logged in as admin_user
@@ -144,6 +145,7 @@
raise AssertionError, "Default permissions were incorrectly deleted from the db for history id %d when it was deleted" % history2.id
# Current history is empty
self.history_options( user=True )
+
def test_010_history_rename( self ):
"""Testing renaming a history"""
# Logged in as admin_user
@@ -157,10 +159,12 @@
raise AssertionError, "History id %d deleted when it should not be" % latest_history.id
self.rename_history( self.security.encode_id( history3.id ), history3.name, new_name=urllib.quote( 'history 3' ) )
sa_session.refresh( history3 )
+
def test_015_history_list( self ):
"""Testing viewing previously stored active histories"""
# Logged in as admin_user
self.view_stored_active_histories()
+
def test_020_share_current_history( self ):
"""Testing sharing the current history which contains only public datasets"""
# Logged in as admin_user
@@ -210,10 +214,13 @@
# Test sharing history3 with an invalid user
self.share_current_history( 'jack(a)jill.com',
strings_displayed_after_submit=[ 'jack(a)jill.com is not a valid Galaxy user.' ] )
+
def test_025_delete_shared_current_history( self ):
"""Testing deleting the current history after it was shared"""
# Logged in as admin_user
- self.delete_current_history( strings_displayed=[ "History (%s) has been shared with others, unshare it before deleting it." % history3.name ] )
+ self.delete_current_history(
+ strings_displayed=[ "History (%s) has been shared with others, unshare it before deleting it." % history3.name ] )
+
def test_030_clone_shared_history( self ):
"""Testing copying a shared history"""
# logged in as admin user
@@ -656,7 +663,7 @@
if hda[ 'id' ] == self.security.encode_id( hda_2_bed.id ):
return ( not hda[ 'accessible' ] )
return False
- self.check_history_json( r'\bhdaJson\s*=\s*(.*);', hda_2_bed_is_inaccessible )
+ self.check_history_json( r'\bhdaJSON\s*=\s*(.*);', hda_2_bed_is_inaccessible )
# Admin users can view all datasets ( using the history/view feature ), so make sure 2.bed is accessible to the admin
self.logout()
@@ -734,7 +741,7 @@
log.info( 'deleting last hda' )
self.delete_history_item( str( latest_hda.id ) )
# check the historyPanel settings.show_deleted for a null json value (no show_deleted in query string)
- self.check_history_json( r'\bpage_show_deleted\s*=\s*(.*),', lambda x: x == None )
+ self.check_history_json( r'\bshow_deleted\s*:\s*(.*),', lambda x: x == None )
# reload this history with the show_deleted flag set in the query string
# the deleted dataset should be there with the proper 'deleted' text
@@ -742,7 +749,7 @@
log.info( 'turning show_deleted on' )
#self.visit_url( "%s/history/?show_deleted=True" % self.url )
# check the historyPanel settings.show_deleted for a true json value
- self.check_history_json( r'\bpage_show_deleted\s*=\s*(.*),', lambda x: x == True, show_deleted=True )
+ self.check_history_json( r'\bshow_deleted\s*:\s*(.*),', lambda x: x == True, show_deleted=True )
# reload this history again with the show_deleted flag set TO FALSE in the query string
# make sure the 'history empty' message shows
@@ -750,7 +757,7 @@
log.info( 'turning show_deleted off' )
#self.visit_url( "%s/history/?show_deleted=False" % self.url )
# check the historyPanel settings.show_deleted for a false json value
- self.check_history_json( r'\bpage_show_deleted\s*=\s*(.*),', lambda x: x == False, show_deleted=False )
+ self.check_history_json( r'\bshow_deleted\s*:\s*(.*),', lambda x: x == False, show_deleted=False )
# delete this history
self.delete_history( self.security.encode_id( latest_history.id ) )
https://bitbucket.org/galaxy/galaxy-central/commits/d02540589348/
Changeset: d02540589348
User: carlfeberhard
Date: 2014-01-09 22:01:23
Summary: Merge
Affected #: 7 files
diff -r e72b8fd2d28b0a5371d13a77469746d0786496de -r d02540589348a2840f0492c91dbabe441fa2fb26 lib/galaxy/webapps/galaxy/api/tool_shed_repositories.py
--- a/lib/galaxy/webapps/galaxy/api/tool_shed_repositories.py
+++ b/lib/galaxy/webapps/galaxy/api/tool_shed_repositories.py
@@ -19,11 +19,6 @@
log = logging.getLogger( __name__ )
-def default_tool_shed_repository_value_mapper( trans, tool_shed_repository ):
- value_mapper={ 'id' : trans.security.encode_id( tool_shed_repository.id ),
- 'error_message' : tool_shed_repository.error_message or '' }
- return value_mapper
-
def get_message_for_no_shed_tool_config():
# This Galaxy instance is not configured with a shed-related tool panel configuration file.
message = 'The tool_config_file setting in universe_wsgi.ini must include at least one shed tool configuration file name with a <toolbox> '
@@ -48,8 +43,8 @@
:param id: the encoded id of the ToolShedRepository object
"""
# Example URL: http://localhost:8763/api/tool_shed_repositories/f2db41e1fa331b3e/exported_…
- # Since exported workflows are dictionaries with very few attributes that differentiate them from each other, we'll build the
- # list based on the following dictionary of those few attributes.
+ # Since exported workflows are dictionaries with very few attributes that differentiate them from each
+ # other, we'll build the list based on the following dictionary of those few attributes.
exported_workflows = []
repository = suc.get_tool_shed_repository_by_id( trans, id )
metadata = repository.metadata
@@ -58,17 +53,23 @@
else:
exported_workflow_tups = []
for index, exported_workflow_tup in enumerate( exported_workflow_tups ):
- # The exported_workflow_tup looks like ( relative_path, exported_workflow_dict ), where the value of relative_path is the location
- # on disk (relative to the root of the installed repository) where the exported_workflow_dict file (.ga file) is located.
+ # The exported_workflow_tup looks like ( relative_path, exported_workflow_dict ), where the value of
+ # relative_path is the location on disk (relative to the root of the installed repository) where the
+ # exported_workflow_dict file (.ga file) is located.
exported_workflow_dict = exported_workflow_tup[ 1 ]
annotation = exported_workflow_dict.get( 'annotation', '' )
format_version = exported_workflow_dict.get( 'format-version', '' )
workflow_name = exported_workflow_dict.get( 'name', '' )
- # Since we don't have an in-memory object with an id, we'll identify the exported workflow via it's location (i.e., index) in the list.
+ # Since we don't have an in-memory object with an id, we'll identify the exported workflow via it's
+ # location (i.e., index) in the list.
display_dict = dict( index=index, annotation=annotation, format_version=format_version, workflow_name=workflow_name )
exported_workflows.append( display_dict )
return exported_workflows
+ def __get_value_mapper( self, trans ):
+ value_mapper = { 'id' : trans.security.encode_id }
+ return value_mapper
+
@web.expose_api
def import_workflow( self, trans, payload, **kwd ):
"""
@@ -96,13 +97,11 @@
# Since we don't have an in-memory object with an id, we'll identify the exported workflow via it's location (i.e., index) in the list.
exported_workflow = exported_workflows[ int( index ) ]
workflow_name = exported_workflow[ 'workflow_name' ]
- workflow, status, message = workflow_util.import_workflow( trans, repository, workflow_name )
+ workflow, status, error_message = workflow_util.import_workflow( trans, repository, workflow_name )
if status == 'error':
- log.error( message, exc_info=True )
- trans.response.status = 500
- return message
- else:
- return workflow.to_dict( view='element' )
+ log.debug( error_message )
+ return {}
+ return workflow.to_dict( view='element' )
@web.expose_api
def import_workflows( self, trans, **kwd ):
@@ -125,11 +124,9 @@
imported_workflow_dicts = []
for exported_workflow_dict in exported_workflows:
workflow_name = exported_workflow_dict[ 'workflow_name' ]
- workflow, status, message = workflow_util.import_workflow( trans, repository, workflow_name )
+ workflow, status, error_message = workflow_util.import_workflow( trans, repository, workflow_name )
if status == 'error':
- log.error( message, exc_info=True )
- trans.response.status = 500
- return message
+ log.debug( error_message )
else:
imported_workflow_dicts.append( workflow.to_dict( view='element' ) )
return imported_workflow_dicts
@@ -142,22 +139,15 @@
"""
# Example URL: http://localhost:8763/api/tool_shed_repositories
tool_shed_repository_dicts = []
- try:
- query = trans.install_model.context.query( trans.app.install_model.ToolShedRepository ) \
- .order_by( trans.app.install_model.ToolShedRepository.table.c.name ) \
- .all()
- for tool_shed_repository in query:
- tool_shed_repository_dict = tool_shed_repository.to_dict( value_mapper=default_tool_shed_repository_value_mapper( trans, tool_shed_repository ) )
- tool_shed_repository_dict[ 'url' ] = web.url_for( controller='tool_shed_repositories',
- action='show',
- id=trans.security.encode_id( tool_shed_repository.id ) )
- tool_shed_repository_dicts.append( tool_shed_repository_dict )
- return tool_shed_repository_dicts
- except Exception, e:
- message = "Error in the tool_shed_repositories API in index: %s" % str( e )
- log.error( message, exc_info=True )
- trans.response.status = 500
- return message
+ for tool_shed_repository in trans.install_model.context.query( trans.app.install_model.ToolShedRepository ) \
+ .order_by( trans.app.install_model.ToolShedRepository.table.c.name ):
+ tool_shed_repository_dict = \
+ tool_shed_repository.to_dict( value_mapper=self.__get_value_mapper( trans ) )
+ tool_shed_repository_dict[ 'url' ] = web.url_for( controller='tool_shed_repositories',
+ action='show',
+ id=trans.security.encode_id( tool_shed_repository.id ) )
+ tool_shed_repository_dicts.append( tool_shed_repository_dict )
+ return tool_shed_repository_dicts
@web.expose_api
def install_repository_revision( self, trans, payload, **kwd ):
@@ -208,8 +198,7 @@
# Make sure this Galaxy instance is configured with a shed-related tool panel configuration file.
if not suc.have_shed_tool_conf_for_install( trans ):
message = get_message_for_no_shed_tool_config()
- log.error( message, exc_info=True )
- trans.response.status = 500
+ log.debug( message )
return dict( status='error', error=message )
# Make sure the current user's API key proves he is an admin user in this Galaxy instance.
if not trans.user_is_admin():
@@ -225,18 +214,20 @@
except Exception, e:
message = "Error attempting to retrieve installation information from tool shed %s for revision %s of repository %s owned by %s: %s" % \
( str( tool_shed_url ), str( changeset_revision ), str( name ), str( owner ), str( e ) )
- log.error( message, exc_info=True )
- trans.response.status = 500
+ log.debug( message )
return dict( status='error', error=message )
if raw_text:
+ # If successful, the response from get_repository_revision_install_info will be 3
+ # dictionaries, a dictionary defining the Repository, a dictionary defining the
+ # Repository revision (RepositoryMetadata), and a dictionary including the additional
+ # information required to install the repository.
items = json.from_json_string( raw_text )
repository_revision_dict = items[ 1 ]
repo_info_dict = items[ 2 ]
else:
message = "Unable to retrieve installation information from tool shed %s for revision %s of repository %s owned by %s: %s" % \
( str( tool_shed_url ), str( changeset_revision ), str( name ), str( owner ), str( e ) )
- log.error( message, exc_info=True )
- trans.response.status = 500
+ log.debug( message )
return dict( status='error', error=message )
repo_info_dicts = [ repo_info_dict ]
# Make sure the tool shed returned everything we need for installing the repository.
@@ -345,7 +336,7 @@
tool_path,
install_tool_dependencies,
reinstalling=False )
- tool_shed_repository_dict = tool_shed_repository.as_dict( value_mapper=default_tool_shed_repository_value_mapper( trans, tool_shed_repository ) )
+ tool_shed_repository_dict = tool_shed_repository.as_dict( value_mapper=self.__get_value_mapper( trans ) )
tool_shed_repository_dict[ 'url' ] = web.url_for( controller='tool_shed_repositories',
action='show',
id=trans.security.encode_id( tool_shed_repository.id ) )
@@ -394,8 +385,7 @@
if not suc.have_shed_tool_conf_for_install( trans ):
# This Galaxy instance is not configured with a shed-related tool panel configuration file.
message = get_message_for_no_shed_tool_config()
- log.error( message, exc_info=True )
- trans.response.status = 500
+ log.debug( message )
return dict( status='error', error=message )
if not trans.user_is_admin():
raise HTTPForbidden( detail='You are not authorized to install a tool shed repository into this Galaxy instance.' )
@@ -410,8 +400,7 @@
len( changeset_revisions ) != num_specified_repositories:
message = 'Error in tool_shed_repositories API in install_repository_revisions: the received parameters must be ordered '
message += 'lists so that positional values in tool_shed_urls, names, owners and changeset_revisions are associated.'
- log.error( message, exc_info=True )
- trans.response.status = 500
+ log.debug( message )
return dict( status='error', error=message )
# Get the information about the Galaxy components (e.g., tool pane section, tool config file, etc) that will contain information
# about each of the repositories being installed.
@@ -482,7 +471,7 @@
repair_dict = repository_util.repair_tool_shed_repository( trans,
repository,
encoding_util.tool_shed_encode( repo_info_dict ) )
- repository_dict = repository.to_dict( value_mapper=default_tool_shed_repository_value_mapper( trans, repository ) )
+ repository_dict = repository.to_dict( value_mapper=self.__get_value_mapper( trans ) )
repository_dict[ 'url' ] = web.url_for( controller='tool_shed_repositories',
action='show',
id=trans.security.encode_id( repository.id ) )
@@ -502,39 +491,39 @@
:param key: the API key of the Galaxy admin user.
"""
- try:
- start_time = strftime( "%Y-%m-%d %H:%M:%S" )
- results = dict( start_time=start_time,
- successful_count=0,
- unsuccessful_count=0,
- repository_status=[] )
- # Make sure the current user's API key proves he is an admin user in this Galaxy instance.
- if not trans.user_is_admin():
- raise HTTPForbidden( detail='You are not authorized to reset metadata on repositories installed into this Galaxy instance.' )
- query = suc.get_query_for_setting_metadata_on_repositories( trans, my_writable=False, order=False )
- # Now reset metadata on all remaining repositories.
- for repository in query:
- repository_id = trans.security.encode_id( repository.id )
- try:
- invalid_file_tups, metadata_dict = metadata_util.reset_all_metadata_on_installed_repository( trans, repository_id )
- if invalid_file_tups:
- message = tool_util.generate_message_for_invalid_tools( trans, invalid_file_tups, repository, None, as_html=False )
- results[ 'unsuccessful_count' ] += 1
- else:
- message = "Successfully reset metadata on repository %s owned by %s" % ( str( repository.name ), str( repository.owner ) )
- results[ 'successful_count' ] += 1
- except Exception, e:
- message = "Error resetting metadata on repository %s owned by %s: %s" % ( str( repository.name ), str( repository.owner ), str( e ) )
+ start_time = strftime( "%Y-%m-%d %H:%M:%S" )
+ results = dict( start_time=start_time,
+ successful_count=0,
+ unsuccessful_count=0,
+ repository_status=[] )
+ # Make sure the current user's API key proves he is an admin user in this Galaxy instance.
+ if not trans.user_is_admin():
+ raise HTTPForbidden( detail='You are not authorized to reset metadata on repositories installed into this Galaxy instance.' )
+ query = suc.get_query_for_setting_metadata_on_repositories( trans, my_writable=False, order=False )
+ # Now reset metadata on all remaining repositories.
+ for repository in query:
+ repository_id = trans.security.encode_id( repository.id )
+ try:
+ invalid_file_tups, metadata_dict = metadata_util.reset_all_metadata_on_installed_repository( trans, repository_id )
+ if invalid_file_tups:
+ message = tool_util.generate_message_for_invalid_tools( trans,
+ invalid_file_tups,
+ repository,
+ None,
+ as_html=False )
results[ 'unsuccessful_count' ] += 1
- results[ 'repository_status' ].append( message )
- stop_time = strftime( "%Y-%m-%d %H:%M:%S" )
- results[ 'stop_time' ] = stop_time
- return json.to_json_string( results, sort_keys=True, indent=4 * ' ' )
- except Exception, e:
- message = "Error in the Galaxy tool_shed_repositories API in reset_metadata_on_installed_repositories: %s" % str( e )
- log.error( message, exc_info=True )
- trans.response.status = 500
- return message
+ else:
+ message = "Successfully reset metadata on repository %s owned by %s" % \
+ ( str( repository.name ), str( repository.owner ) )
+ results[ 'successful_count' ] += 1
+ except Exception, e:
+ message = "Error resetting metadata on repository %s owned by %s: %s" % \
+ ( str( repository.name ), str( repository.owner ), str( e ) )
+ results[ 'unsuccessful_count' ] += 1
+ results[ 'repository_status' ].append( message )
+ stop_time = strftime( "%Y-%m-%d %H:%M:%S" )
+ results[ 'stop_time' ] = stop_time
+ return json.to_json_string( results, sort_keys=True, indent=4 * ' ' )
@web.expose_api
def show( self, trans, id, **kwd ):
@@ -545,15 +534,12 @@
:param id: the encoded id of the ToolShedRepository object
"""
# Example URL: http://localhost:8763/api/tool_shed_repositories/df7a1f0c02a5b08e
- try:
- tool_shed_repository = suc.get_tool_shed_repository_by_id( trans, id )
- tool_shed_repository_dict = tool_shed_repository.as_dict( value_mapper=default_tool_shed_repository_value_mapper( trans, tool_shed_repository ) )
- tool_shed_repository_dict[ 'url' ] = web.url_for( controller='tool_shed_repositories',
- action='show',
- id=trans.security.encode_id( tool_shed_repository.id ) )
- return tool_shed_repository_dict
- except Exception, e:
- message = "Error in tool_shed_repositories API in index: " + str( e )
- log.error( message, exc_info=True )
- trans.response.status = 500
- return message
+ tool_shed_repository = suc.get_tool_shed_repository_by_id( trans, id )
+ if tool_shed_repository is None:
+ log.debug( "Unable to locate tool_shed_repository record for id %s." % ( str( id ) ) )
+ return {}
+ tool_shed_repository_dict = tool_shed_repository.as_dict( value_mapper=self.__get_value_mapper( trans ) )
+ tool_shed_repository_dict[ 'url' ] = web.url_for( controller='tool_shed_repositories',
+ action='show',
+ id=trans.security.encode_id( tool_shed_repository.id ) )
+ return tool_shed_repository_dict
diff -r e72b8fd2d28b0a5371d13a77469746d0786496de -r d02540589348a2840f0492c91dbabe441fa2fb26 lib/galaxy/webapps/tool_shed/api/repositories.py
--- a/lib/galaxy/webapps/tool_shed/api/repositories.py
+++ b/lib/galaxy/webapps/tool_shed/api/repositories.py
@@ -36,22 +36,27 @@
:param name: the name of the Repository
:param owner: the owner of the Repository
- Returns the ordered list of changeset revision hash strings that are associated with installable revisions. As in the changelog, the
- list is ordered oldest to newest.
+ Returns the ordered list of changeset revision hash strings that are associated with installable revisions.
+ As in the changelog, the list is ordered oldest to newest.
"""
# Example URL: http://localhost:9009/api/repositories/get_installable_revisions?name=add_c…
- try:
+ if name and owner:
# Get the repository information.
repository = suc.get_repository_by_name_and_owner( trans.app, name, owner )
+ if repository is None:
+ error_message = "Error in the Tool Shed repositories API in get_ordered_installable_revisions: "
+ error_message += "cannot locate repository %s owned by %s." % ( str( name ), str( owner ) )
+ log.debug( error_message )
+ return []
repo_dir = repository.repo_path( trans.app )
repo = hg.repository( suc.get_configured_ui(), repo_dir )
ordered_installable_revisions = suc.get_ordered_metadata_changeset_revisions( repository, repo, downloadable=True )
return ordered_installable_revisions
- except Exception, e:
- message = "Error in the Tool Shed repositories API in get_ordered_installable_revisions: %s" % str( e )
- log.error( message, exc_info=True )
- trans.response.status = 500
- return message
+ else:
+ error_message = "Error in the Tool Shed repositories API in get_ordered_installable_revisions: "
+ error_message += "invalid name %s or owner %s received." % ( str( name ), str( owner ) )
+ log.debug( error_message )
+ return []
@web.expose_api_anonymous
def get_repository_revision_install_info( self, trans, name, owner, changeset_revision, **kwd ):
@@ -106,49 +111,65 @@
]
}
"""
- repository_value_mapper = { 'id' : trans.security.encode_id,
- 'user_id' : trans.security.encode_id }
- # Example URL: http://localhost:9009/api/repositories/get_repository_revision_install_info…
- try:
+ # Example URL:
+ # http://<xyz>/api/repositories/get_repository_revision_install_info?name=<n>&owner=<o>&changeset_revision=<cr>
+ if name and owner and changeset_revision:
# Get the repository information.
repository = suc.get_repository_by_name_and_owner( trans.app, name, owner )
+ if repository is None:
+ log.debug( 'Cannot locate repository %s owned by %s' % ( str( name ), str( owner ) ) )
+ return {}, {}, {}
encoded_repository_id = trans.security.encode_id( repository.id )
- repository_dict = repository.to_dict( view='element', value_mapper=repository_value_mapper )
+ repository_dict = repository.to_dict( view='element',
+ value_mapper=self.__get_value_mapper( trans ) )
repository_dict[ 'url' ] = web.url_for( controller='repositories',
action='show',
id=encoded_repository_id )
# Get the repository_metadata information.
- repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans, encoded_repository_id, changeset_revision )
+ repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans,
+ encoded_repository_id,
+ changeset_revision )
if not repository_metadata:
- # The changeset_revision column in the repository_metadata table has been updated with a new value value, so find the
- # changeset_revision to which we need to update.
+ # The changeset_revision column in the repository_metadata table has been updated with a new
+ # value value, so find the changeset_revision to which we need to update.
repo_dir = repository.repo_path( trans.app )
repo = hg.repository( suc.get_configured_ui(), repo_dir )
new_changeset_revision = suc.get_next_downloadable_changeset_revision( repository, repo, changeset_revision )
- repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans, encoded_repository_id, new_changeset_revision )
+ repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans,
+ encoded_repository_id,
+ new_changeset_revision )
changeset_revision = new_changeset_revision
if repository_metadata:
encoded_repository_metadata_id = trans.security.encode_id( repository_metadata.id )
repository_metadata_dict = repository_metadata.to_dict( view='collection',
- value_mapper=self.__get_value_mapper( trans, repository_metadata ) )
+ value_mapper=self.__get_value_mapper( trans ) )
repository_metadata_dict[ 'url' ] = web.url_for( controller='repository_revisions',
action='show',
id=encoded_repository_metadata_id )
# Get the repo_info_dict for installing the repository.
- repo_info_dict, includes_tools, includes_tool_dependencies, includes_tools_for_display_in_tool_panel, \
- has_repository_dependencies, has_repository_dependencies_only_if_compiling_contained_td = \
+ repo_info_dict, \
+ includes_tools, \
+ includes_tool_dependencies, \
+ includes_tools_for_display_in_tool_panel, \
+ has_repository_dependencies, \
+ has_repository_dependencies_only_if_compiling_contained_td = \
repository_util.get_repo_info_dict( trans, encoded_repository_id, changeset_revision )
return repository_dict, repository_metadata_dict, repo_info_dict
else:
- message = "Unable to locate repository_metadata record for repository id %d and changeset_revision %s" % ( repository.id, changeset_revision )
- log.error( message, exc_info=True )
- trans.response.status = 500
+ log.debug( "Unable to locate repository_metadata record for repository id %s and changeset_revision %s" % \
+ ( str( repository.id ), str( changeset_revision ) ) )
return repository_dict, {}, {}
- except Exception, e:
- message = "Error in the Tool Shed repositories API in get_repository_revision_install_info: %s" % str( e )
- log.error( message, exc_info=True )
- trans.response.status = 500
- return message
+ else:
+ debug_msg = "Error in the Tool Shed repositories API in get_repository_revision_install_info: "
+ debug_msg += "Invalid name %s or owner %s or changeset_revision %s received." % \
+ ( str( name ), str( owner ), str( changeset_revision ) )
+ log.debug( debug_msg )
+ return {}, {}, {}
+
+ def __get_value_mapper( self, trans ):
+ value_mapper = { 'id' : trans.security.encode_id,
+ 'repository_id' : trans.security.encode_id }
+ return value_mapper
@web.expose_api
def import_capsule( self, trans, payload, **kwd ):
@@ -177,29 +198,27 @@
uploaded_file=None,
capsule_file_name=None )
if os.path.getsize( os.path.abspath( capsule_file_name ) ) == 0:
- message = 'Your capsule file is empty.'
- log.error( message, exc_info=True )
- trans.response.status = 500
- return message
+ log.debug( 'Your capsule file %s is empty.' % str( capsule_file_name ) )
+ return {}
try:
# Open for reading with transparent compression.
tar_archive = tarfile.open( capsule_file_path, 'r:*' )
except tarfile.ReadError, e:
- message = 'Error opening file %s: %s' % ( str( capsule_file_name ), str( e ) )
- log.error( message, exc_info=True )
- trans.response.status = 500
- return message
+ log.debug( 'Error opening capsule file %s: %s' % ( str( capsule_file_name ), str( e ) ) )
+ return {}
capsule_dict[ 'tar_archive' ] = tar_archive
capsule_dict[ 'capsule_file_name' ] = capsule_file_name
capsule_dict = import_util.extract_capsule_files( trans, **capsule_dict )
capsule_dict = import_util.validate_capsule( trans, **capsule_dict )
status = capsule_dict.get( 'status', 'error' )
if status == 'error':
- message = 'The capsule contents are invalid and cannpt be imported:<br/>%s' % str( capsule_dict.get( 'error_message', '' ) )
- log.error( message, exc_info=True )
- trans.response.status = 500
- return message
+ log.debug( 'The capsule contents are invalid and cannpt be imported:<br/>%s' % \
+ str( capsule_dict.get( 'error_message', '' ) ) )
+ return {}
encoded_file_path = capsule_dict.get( 'encoded_file_path', None )
+ if encoded_file_path is None:
+ log.debug( 'The capsule_dict %s is missing the required encoded_file_path entry.' % str( capsule_dict ) )
+ return {}
file_path = encoding_util.tool_shed_decode( encoded_file_path )
export_info_file_path = os.path.join( file_path, 'export_info.xml' )
export_info_dict = import_util.get_export_info_dict( export_info_file_path )
@@ -216,12 +235,14 @@
# Add the capsule_file_name and encoded_file_path to the repository_status_info_dict.
repository_status_info_dict[ 'capsule_file_name' ] = capsule_file_name
repository_status_info_dict[ 'encoded_file_path' ] = encoded_file_path
- import_results_tups = repository_maintenance_util.create_repository_and_import_archive( trans,
- repository_status_info_dict,
- import_results_tups )
+ import_results_tups = \
+ repository_maintenance_util.create_repository_and_import_archive( trans,
+ repository_status_info_dict,
+ import_results_tups )
import_util.check_status_and_reset_downloadable( trans, import_results_tups )
suc.remove_dir( file_path )
- # NOTE: the order of installation is defined in import_results_tups, but order will be lost when transferred to return_dict.
+ # NOTE: the order of installation is defined in import_results_tups, but order will be lost
+ # when transferred to return_dict.
return_dict = {}
for import_results_tup in import_results_tups:
ok, name_owner, message = import_results_tup
@@ -237,28 +258,19 @@
GET /api/repositories
Displays a collection (list) of repositories.
"""
- value_mapper = { 'id' : trans.security.encode_id,
- 'user_id' : trans.security.encode_id }
# Example URL: http://localhost:9009/api/repositories
repository_dicts = []
- deleted = util.string_as_bool( deleted )
- try:
- query = trans.sa_session.query( trans.app.model.Repository ) \
- .filter( trans.app.model.Repository.table.c.deleted == deleted ) \
- .order_by( trans.app.model.Repository.table.c.name ) \
- .all()
- for repository in query:
- repository_dict = repository.to_dict( view='collection', value_mapper=value_mapper )
- repository_dict[ 'url' ] = web.url_for( controller='repositories',
- action='show',
- id=trans.security.encode_id( repository.id ) )
- repository_dicts.append( repository_dict )
- return repository_dicts
- except Exception, e:
- message = "Error in the Tool Shed repositories API in index: %s" % str( e )
- log.error( message, exc_info=True )
- trans.response.status = 500
- return message
+ deleted = util.asbool( deleted )
+ for repository in trans.sa_session.query( trans.app.model.Repository ) \
+ .filter( trans.app.model.Repository.table.c.deleted == deleted ) \
+ .order_by( trans.app.model.Repository.table.c.name ):
+ repository_dict = repository.to_dict( view='collection',
+ value_mapper=self.__get_value_mapper( trans ) )
+ repository_dict[ 'url' ] = web.url_for( controller='repositories',
+ action='show',
+ id=trans.security.encode_id( repository.id ) )
+ repository_dicts.append( repository_dict )
+ return repository_dicts
@web.expose_api
def repository_ids_for_setting_metadata( self, trans, my_writable=False, **kwd ):
@@ -273,28 +285,22 @@
in addition to those repositories of type tool_dependency_definition. This param is ignored
if the current user is not an admin user, in which case this same restriction is automatic.
"""
- try:
- if trans.user_is_admin():
- my_writable = util.asbool( my_writable )
- else:
- my_writable = True
- handled_repository_ids = []
- repository_ids = []
- query = suc.get_query_for_setting_metadata_on_repositories( trans, my_writable=my_writable, order=False )
- # Make sure repositories of type tool_dependency_definition are first in the list.
- for repository in query:
- if repository.type == rt_util.TOOL_DEPENDENCY_DEFINITION and repository.id not in handled_repository_ids:
- repository_ids.append( trans.security.encode_id( repository.id ) )
- # Now add all remaining repositories to the list.
- for repository in query:
- if repository.type != rt_util.TOOL_DEPENDENCY_DEFINITION and repository.id not in handled_repository_ids:
- repository_ids.append( trans.security.encode_id( repository.id ) )
- return repository_ids
- except Exception, e:
- message = "Error in the Tool Shed repositories API in repository_ids_for_setting_metadata: %s" % str( e )
- log.error( message, exc_info=True )
- trans.response.status = 500
- return message
+ if trans.user_is_admin():
+ my_writable = util.asbool( my_writable )
+ else:
+ my_writable = True
+ handled_repository_ids = []
+ repository_ids = []
+ query = suc.get_query_for_setting_metadata_on_repositories( trans, my_writable=my_writable, order=False )
+ # Make sure repositories of type tool_dependency_definition are first in the list.
+ for repository in query:
+ if repository.type == rt_util.TOOL_DEPENDENCY_DEFINITION and repository.id not in handled_repository_ids:
+ repository_ids.append( trans.security.encode_id( repository.id ) )
+ # Now add all remaining repositories to the list.
+ for repository in query:
+ if repository.type != rt_util.TOOL_DEPENDENCY_DEFINITION and repository.id not in handled_repository_ids:
+ repository_ids.append( trans.security.encode_id( repository.id ) )
+ return repository_ids
@web.expose_api
def reset_metadata_on_repositories( self, trans, payload, **kwd ):
@@ -318,6 +324,7 @@
:param skip_file (optional): A local file name that contains the encoded repository ids associated with repositories to skip.
This param can be used as an alternative to the above encoded_ids_to_skip.
"""
+
def handle_repository( trans, repository, results ):
log.debug( "Resetting metadata on repository %s" % str( repository.name ) )
repository_id = trans.security.encode_id( repository.id )
@@ -335,53 +342,48 @@
status = '%s : %s' % ( str( repository.name ), message )
results[ 'repository_status' ].append( status )
return results
- try:
- start_time = strftime( "%Y-%m-%d %H:%M:%S" )
- results = dict( start_time=start_time,
- repository_status=[],
- successful_count=0,
- unsuccessful_count=0 )
- handled_repository_ids = []
- encoded_ids_to_skip = payload.get( 'encoded_ids_to_skip', [] )
- skip_file = payload.get( 'skip_file', None )
- if skip_file and os.path.exists( skip_file ) and not encoded_ids_to_skip:
- # Load the list of encoded_ids_to_skip from the skip_file.
- # Contents of file must be 1 encoded repository id per line.
- lines = open( skip_file, 'rb' ).readlines()
- for line in lines:
- if line.startswith( '#' ):
- # Skip comments.
- continue
- encoded_ids_to_skip.append( line.rstrip( '\n' ) )
- if trans.user_is_admin():
- my_writable = util.asbool( payload.get( 'my_writable', False ) )
- else:
- my_writable = True
- query = suc.get_query_for_setting_metadata_on_repositories( trans, my_writable=my_writable, order=False )
- # First reset metadata on all repositories of type repository_dependency_definition.
- for repository in query:
- encoded_id = trans.security.encode_id( repository.id )
- if encoded_id in encoded_ids_to_skip:
- log.debug( "Skipping repository with id %s because it is in encoded_ids_to_skip %s" % \
- ( str( repository.id ), str( encoded_ids_to_skip ) ) )
- elif repository.type == rt_util.TOOL_DEPENDENCY_DEFINITION and repository.id not in handled_repository_ids:
- results = handle_repository( trans, repository, results )
- # Now reset metadata on all remaining repositories.
- for repository in query:
- encoded_id = trans.security.encode_id( repository.id )
- if encoded_id in encoded_ids_to_skip:
- log.debug( "Skipping repository with id %s because it is in encoded_ids_to_skip %s" % \
- ( str( repository.id ), str( encoded_ids_to_skip ) ) )
- elif repository.type != rt_util.TOOL_DEPENDENCY_DEFINITION and repository.id not in handled_repository_ids:
- results = handle_repository( trans, repository, results )
- stop_time = strftime( "%Y-%m-%d %H:%M:%S" )
- results[ 'stop_time' ] = stop_time
- return json.to_json_string( results, sort_keys=True, indent=4 * ' ' )
- except Exception, e:
- message = "Error in the Tool Shed repositories API in reset_metadata_on_repositories: %s" % str( e )
- log.error( message, exc_info=True )
- trans.response.status = 500
- return message
+
+ start_time = strftime( "%Y-%m-%d %H:%M:%S" )
+ results = dict( start_time=start_time,
+ repository_status=[],
+ successful_count=0,
+ unsuccessful_count=0 )
+ handled_repository_ids = []
+ encoded_ids_to_skip = payload.get( 'encoded_ids_to_skip', [] )
+ skip_file = payload.get( 'skip_file', None )
+ if skip_file and os.path.exists( skip_file ) and not encoded_ids_to_skip:
+ # Load the list of encoded_ids_to_skip from the skip_file.
+ # Contents of file must be 1 encoded repository id per line.
+ lines = open( skip_file, 'rb' ).readlines()
+ for line in lines:
+ if line.startswith( '#' ):
+ # Skip comments.
+ continue
+ encoded_ids_to_skip.append( line.rstrip( '\n' ) )
+ if trans.user_is_admin():
+ my_writable = util.asbool( payload.get( 'my_writable', False ) )
+ else:
+ my_writable = True
+ query = suc.get_query_for_setting_metadata_on_repositories( trans, my_writable=my_writable, order=False )
+ # First reset metadata on all repositories of type repository_dependency_definition.
+ for repository in query:
+ encoded_id = trans.security.encode_id( repository.id )
+ if encoded_id in encoded_ids_to_skip:
+ log.debug( "Skipping repository with id %s because it is in encoded_ids_to_skip %s" % \
+ ( str( repository.id ), str( encoded_ids_to_skip ) ) )
+ elif repository.type == rt_util.TOOL_DEPENDENCY_DEFINITION and repository.id not in handled_repository_ids:
+ results = handle_repository( trans, repository, results )
+ # Now reset metadata on all remaining repositories.
+ for repository in query:
+ encoded_id = trans.security.encode_id( repository.id )
+ if encoded_id in encoded_ids_to_skip:
+ log.debug( "Skipping repository with id %s because it is in encoded_ids_to_skip %s" % \
+ ( str( repository.id ), str( encoded_ids_to_skip ) ) )
+ elif repository.type != rt_util.TOOL_DEPENDENCY_DEFINITION and repository.id not in handled_repository_ids:
+ results = handle_repository( trans, repository, results )
+ stop_time = strftime( "%Y-%m-%d %H:%M:%S" )
+ results[ 'stop_time' ] = stop_time
+ return json.to_json_string( results, sort_keys=True, indent=4 * ' ' )
@web.expose_api
def reset_metadata_on_repository( self, trans, payload, **kwd ):
@@ -395,6 +397,7 @@
The following parameters must be included in the payload.
:param repository_id: the encoded id of the repository on which metadata is to be reset.
"""
+
def handle_repository( trans, start_time, repository ):
results = dict( start_time=start_time,
repository_status=[] )
@@ -410,21 +413,16 @@
status = '%s : %s' % ( str( repository.name ), message )
results[ 'repository_status' ].append( status )
return results
- try:
- repository_id = payload.get( 'repository_id', None )
- if repository_id is not None:
- repository = suc.get_repository_in_tool_shed( trans, repository_id )
- start_time = strftime( "%Y-%m-%d %H:%M:%S" )
- log.debug( "%s...resetting metadata on repository %s" % ( start_time, str( repository.name ) ) )
- results = handle_repository( trans, start_time, repository )
- stop_time = strftime( "%Y-%m-%d %H:%M:%S" )
- results[ 'stop_time' ] = stop_time
- return json.to_json_string( results, sort_keys=True, indent=4 * ' ' )
- except Exception, e:
- message = "Error in the Tool Shed repositories API in reset_metadata_on_repositories: %s" % str( e )
- log.error( message, exc_info=True )
- trans.response.status = 500
- return message
+
+ repository_id = payload.get( 'repository_id', None )
+ if repository_id is not None:
+ repository = suc.get_repository_in_tool_shed( trans, repository_id )
+ start_time = strftime( "%Y-%m-%d %H:%M:%S" )
+ log.debug( "%s...resetting metadata on repository %s" % ( start_time, str( repository.name ) ) )
+ results = handle_repository( trans, start_time, repository )
+ stop_time = strftime( "%Y-%m-%d %H:%M:%S" )
+ results[ 'stop_time' ] = stop_time
+ return json.to_json_string( results, sort_keys=True, indent=4 * ' ' )
@web.expose_api_anonymous
def show( self, trans, id, **kwd ):
@@ -434,27 +432,14 @@
:param id: the encoded id of the Repository object
"""
- value_mapper = { 'id' : trans.security.encode_id,
- 'user_id' : trans.security.encode_id }
# Example URL: http://localhost:9009/api/repositories/f9cad7b01a472135
- try:
- repository = suc.get_repository_in_tool_shed( trans, id )
- repository_dict = repository.to_dict( view='element', value_mapper=value_mapper )
- repository_dict[ 'url' ] = web.url_for( controller='repositories',
- action='show',
- id=trans.security.encode_id( repository.id ) )
- return repository_dict
- except Exception, e:
- message = "Error in the Tool Shed repositories API in show: %s" % str( e )
- log.error( message, exc_info=True )
- trans.response.status = 500
- return message
-
- def __get_value_mapper( self, trans, repository_metadata ):
- value_mapper = { 'id' : trans.security.encode_id,
- 'repository_id' : trans.security.encode_id }
- if repository_metadata.time_last_tested is not None:
- # For some reason the Dictifiable.to_dict() method in ~/galaxy/model/item_attrs.py requires
- # a function rather than a mapped value, so just pass the time_ago function here.
- value_mapper[ 'time_last_tested' ] = time_ago
- return value_mapper
+ repository = suc.get_repository_in_tool_shed( trans, id )
+ if repository is None:
+ log.debug( "Unable to locate repository record for id %s." % ( str( id ) ) )
+ return {}
+ repository_dict = repository.to_dict( view='element',
+ value_mapper=self.__get_value_mapper( trans ) )
+ repository_dict[ 'url' ] = web.url_for( controller='repositories',
+ action='show',
+ id=trans.security.encode_id( repository.id ) )
+ return repository_dict
diff -r e72b8fd2d28b0a5371d13a77469746d0786496de -r d02540589348a2840f0492c91dbabe441fa2fb26 lib/galaxy/webapps/tool_shed/api/repository_revisions.py
--- a/lib/galaxy/webapps/tool_shed/api/repository_revisions.py
+++ b/lib/galaxy/webapps/tool_shed/api/repository_revisions.py
@@ -47,65 +47,29 @@
if not changeset_revision:
raise HTTPBadRequest( detail="Missing required parameter 'changeset_revision'." )
export_repository_dependencies = payload.get( 'export_repository_dependencies', False )
- try:
- # We'll currently support only gzip-compressed tar archives.
- file_type = 'gz'
- export_repository_dependencies = util.string_as_bool( export_repository_dependencies )
- # Get the repository information.
- repository = suc.get_repository_by_name_and_owner( trans.app, name, owner )
- repository_id = trans.security.encode_id( repository.id )
- response = export_util.export_repository( trans,
- tool_shed_url,
- repository_id,
- str( repository.name ),
- changeset_revision,
- file_type,
- export_repository_dependencies,
- api=True )
- return response
- except Exception, e:
- message = "Error in the Tool Shed repository_revisions API in export: %s" % str( e )
- log.error( message, exc_info=True )
- trans.response.status = 500
- return message
+ # We'll currently support only gzip-compressed tar archives.
+ file_type = 'gz'
+ export_repository_dependencies = util.asbool( export_repository_dependencies )
+ # Get the repository information.
+ repository = suc.get_repository_by_name_and_owner( trans.app, name, owner )
+ if repository is None:
+ error_message = 'Cannot locate repository with name %s and owner %s,' % ( str( name ), str( owner ) )
+ log.debug( error_message )
+ return None, error_message
+ repository_id = trans.security.encode_id( repository.id )
+ return export_util.export_repository( trans,
+ tool_shed_url,
+ repository_id,
+ str( repository.name ),
+ changeset_revision,
+ file_type,
+ export_repository_dependencies,
+ api=True )
- @web.expose_api_anonymous
- def repository_dependencies( self, trans, id, **kwd ):
- """
- GET /api/repository_revisions/{encoded repository_metadata id}/repository_dependencies
- Displays information about a repository_metadata record in the Tool Shed.
-
- :param id: the encoded id of the `RepositoryMetadata` object
- """
- # Example URL: http://localhost:9009/api/repository_revisions/repository_dependencies/bb12…
+ def __get_value_mapper( self, trans ):
value_mapper = { 'id' : trans.security.encode_id,
- 'user_id' : trans.security.encode_id }
- repository_dependencies_dicts = []
- try:
- repository_metadata = metadata_util.get_repository_metadata_by_id( trans, id )
- metadata = repository_metadata.metadata
- if metadata and 'repository_dependencies' in metadata:
- rd_tups = metadata[ 'repository_dependencies' ][ 'repository_dependencies' ]
- for rd_tup in rd_tups:
- tool_shed, name, owner, changeset_revision = rd_tup[ 0:4 ]
- repository_dependency = suc.get_repository_by_name_and_owner( trans.app, name, owner )
- repository_dependency_id = trans.security.encode_id( repository_dependency.id )
- repository_dependency_repository_metadata = \
- suc.get_repository_metadata_by_changeset_revision( trans, repository_dependency_id, changeset_revision )
- repository_dependency_repository_metadata_id = trans.security.encode_id( repository_dependency_repository_metadata.id )
- repository_dependency_dict = repository_dependency.to_dict( view='element', value_mapper=value_mapper )
- # We have to add the changeset_revision of of the repository dependency.
- repository_dependency_dict[ 'changeset_revision' ] = changeset_revision
- repository_dependency_dict[ 'url' ] = web.url_for( controller='repositories',
- action='show',
- id=repository_dependency_repository_metadata_id )
- repository_dependencies_dicts.append( repository_dependency_dict )
- return repository_dependencies_dicts
- except Exception, e:
- message = "Error in the Tool Shed repository_revisions API in repository_dependencies: %s" % str( e )
- log.error( message, exc_info=True )
- trans.response.status = 500
- return message
+ 'repository_id' : trans.security.encode_id }
+ return value_mapper
@web.expose_api_anonymous
def index( self, trans, **kwd ):
@@ -120,59 +84,94 @@
# Filter by downloadable if received.
downloadable = kwd.get( 'downloadable', None )
if downloadable is not None:
- clause_list.append( trans.model.RepositoryMetadata.table.c.downloadable == util.string_as_bool( downloadable ) )
+ clause_list.append( trans.model.RepositoryMetadata.table.c.downloadable == util.asbool( downloadable ) )
# Filter by malicious if received.
malicious = kwd.get( 'malicious', None )
if malicious is not None:
- clause_list.append( trans.model.RepositoryMetadata.table.c.malicious == util.string_as_bool( malicious ) )
+ clause_list.append( trans.model.RepositoryMetadata.table.c.malicious == util.asbool( malicious ) )
# Filter by tools_functionally_correct if received.
tools_functionally_correct = kwd.get( 'tools_functionally_correct', None )
if tools_functionally_correct is not None:
- clause_list.append( trans.model.RepositoryMetadata.table.c.tools_functionally_correct == util.string_as_bool( tools_functionally_correct ) )
+ clause_list.append( trans.model.RepositoryMetadata.table.c.tools_functionally_correct == util.asbool( tools_functionally_correct ) )
# Filter by missing_test_components if received.
missing_test_components = kwd.get( 'missing_test_components', None )
if missing_test_components is not None:
- clause_list.append( trans.model.RepositoryMetadata.table.c.missing_test_components == util.string_as_bool( missing_test_components ) )
+ clause_list.append( trans.model.RepositoryMetadata.table.c.missing_test_components == util.asbool( missing_test_components ) )
# Filter by do_not_test if received.
do_not_test = kwd.get( 'do_not_test', None )
if do_not_test is not None:
- clause_list.append( trans.model.RepositoryMetadata.table.c.do_not_test == util.string_as_bool( do_not_test ) )
+ clause_list.append( trans.model.RepositoryMetadata.table.c.do_not_test == util.asbool( do_not_test ) )
# Filter by includes_tools if received.
includes_tools = kwd.get( 'includes_tools', None )
if includes_tools is not None:
- clause_list.append( trans.model.RepositoryMetadata.table.c.includes_tools == util.string_as_bool( includes_tools ) )
+ clause_list.append( trans.model.RepositoryMetadata.table.c.includes_tools == util.asbool( includes_tools ) )
# Filter by test_install_error if received.
test_install_error = kwd.get( 'test_install_error', None )
if test_install_error is not None:
- clause_list.append( trans.model.RepositoryMetadata.table.c.test_install_error == util.string_as_bool( test_install_error ) )
+ clause_list.append( trans.model.RepositoryMetadata.table.c.test_install_error == util.asbool( test_install_error ) )
# Filter by skip_tool_test if received.
skip_tool_test = kwd.get( 'skip_tool_test', None )
if skip_tool_test is not None:
- skip_tool_test = util.string_as_bool( skip_tool_test )
+ skip_tool_test = util.asbool( skip_tool_test )
skipped_metadata_ids_subquery = select( [ trans.app.model.SkipToolTest.table.c.repository_metadata_id ] )
if skip_tool_test:
clause_list.append( trans.model.RepositoryMetadata.id.in_( skipped_metadata_ids_subquery ) )
else:
clause_list.append( not_( trans.model.RepositoryMetadata.id.in_( skipped_metadata_ids_subquery ) ) )
- # Generate and execute the query.
- try:
- query = trans.sa_session.query( trans.app.model.RepositoryMetadata ) \
- .filter( and_( *clause_list ) ) \
- .order_by( trans.app.model.RepositoryMetadata.table.c.repository_id.desc() ) \
- .all()
- for repository_metadata in query:
- repository_metadata_dict = repository_metadata.to_dict( view='collection',
- value_mapper=self.__get_value_mapper( trans, repository_metadata ) )
- repository_metadata_dict[ 'url' ] = web.url_for( controller='repository_revisions',
- action='show',
- id=trans.security.encode_id( repository_metadata.id ) )
- repository_metadata_dicts.append( repository_metadata_dict )
- return repository_metadata_dicts
- except Exception, e:
- message = "Error in the Tool Shed repository_revisions API in index: " + str( e )
- log.error( message, exc_info=True )
- trans.response.status = 500
- return message
+ for repository_metadata in trans.sa_session.query( trans.app.model.RepositoryMetadata ) \
+ .filter( and_( *clause_list ) ) \
+ .order_by( trans.app.model.RepositoryMetadata.table.c.repository_id.desc() ):
+ repository_metadata_dict = repository_metadata.to_dict( view='collection',
+ value_mapper=self.__get_value_mapper( trans ) )
+ repository_metadata_dict[ 'url' ] = web.url_for( controller='repository_revisions',
+ action='show',
+ id=trans.security.encode_id( repository_metadata.id ) )
+ repository_metadata_dicts.append( repository_metadata_dict )
+ return repository_metadata_dicts
+
+ @web.expose_api_anonymous
+ def repository_dependencies( self, trans, id, **kwd ):
+ """
+ GET /api/repository_revisions/{encoded repository_metadata id}/repository_dependencies
+ Displays information about a repository_metadata record in the Tool Shed.
+
+ :param id: the encoded id of the `RepositoryMetadata` object
+ """
+ # Example URL: http://localhost:9009/api/repository_revisions/repository_dependencies/bb12…
+ repository_dependencies_dicts = []
+ repository_metadata = metadata_util.get_repository_metadata_by_id( trans, id )
+ if repository_metadata is None:
+ log.debug( 'Invalid repository_metadata id received: %s' % str( id ) )
+ return repository_dependencies_dicts
+ metadata = repository_metadata.metadata
+ if metadata is None:
+ log.debug( 'The repository_metadata record with id %s has no metadata.' % str ( id ) )
+ return repository_dependencies_dicts
+ if 'repository_dependencies' in metadata:
+ rd_tups = metadata[ 'repository_dependencies' ][ 'repository_dependencies' ]
+ for rd_tup in rd_tups:
+ tool_shed, name, owner, changeset_revision = rd_tup[ 0:4 ]
+ repository_dependency = suc.get_repository_by_name_and_owner( trans.app, name, owner )
+ if repository_dependency is None:
+ log.dbug( 'Cannot locate repository dependency %s owned by %s.' % ( name, owner ) )
+ continue
+ repository_dependency_id = trans.security.encode_id( repository_dependency.id )
+ repository_dependency_repository_metadata = \
+ suc.get_repository_metadata_by_changeset_revision( trans, repository_dependency_id, changeset_revision )
+ if repository_dependency_repository_metadata is None:
+ log.debug( 'Cannot locate repository_metadata with id %s for repository dependency %s owned by %s.' % \
+ ( str( repository_dependency_id ), str( name ), str( owner ) ) )
+ continue
+ repository_dependency_repository_metadata_id = trans.security.encode_id( repository_dependency_repository_metadata.id )
+ repository_dependency_dict = repository_dependency.to_dict( view='element',
+ value_mapper=self.__get_value_mapper( trans ) )
+ # We have to add the changeset_revision of of the repository dependency.
+ repository_dependency_dict[ 'changeset_revision' ] = changeset_revision
+ repository_dependency_dict[ 'url' ] = web.url_for( controller='repositories',
+ action='show',
+ id=repository_dependency_repository_metadata_id )
+ repository_dependencies_dicts.append( repository_dependency_dict )
+ return repository_dependencies_dicts
@web.expose_api_anonymous
def show( self, trans, id, **kwd ):
@@ -183,19 +182,16 @@
:param id: the encoded id of the `RepositoryMetadata` object
"""
# Example URL: http://localhost:9009/api/repository_revisions/bb125606ff9ea620
- try:
- repository_metadata = metadata_util.get_repository_metadata_by_id( trans, id )
- repository_metadata_dict = repository_metadata.to_dict( view='element',
- value_mapper=self.__get_value_mapper( trans, repository_metadata ) )
- repository_metadata_dict[ 'url' ] = web.url_for( controller='repository_revisions',
- action='show',
- id=trans.security.encode_id( repository_metadata.id ) )
- return repository_metadata_dict
- except Exception, e:
- message = "Error in the Tool Shed repository_revisions API in show: %s" % str( e )
- log.error( message, exc_info=True )
- trans.response.status = 500
- return message
+ repository_metadata = metadata_util.get_repository_metadata_by_id( trans, id )
+ if repository_metadata is None:
+ log.debug( 'Cannot locate repository_metadata with id %s' % str( id ) )
+ return {}
+ repository_metadata_dict = repository_metadata.to_dict( view='element',
+ value_mapper=self.__get_value_mapper( trans ) )
+ repository_metadata_dict[ 'url' ] = web.url_for( controller='repository_revisions',
+ action='show',
+ id=trans.security.encode_id( repository_metadata.id ) )
+ return repository_metadata_dict
@web.expose_api
def update( self, trans, payload, **kwd ):
@@ -204,37 +200,32 @@
Updates the value of specified columns of the repository_metadata table based on the key / value pairs in payload.
"""
repository_metadata_id = kwd.get( 'id', None )
- try:
- repository_metadata = metadata_util.get_repository_metadata_by_id( trans, repository_metadata_id )
- flush_needed = False
- for key, new_value in payload.items():
- if key == 'time_last_tested':
- repository_metadata.time_last_tested = datetime.datetime.utcnow()
- flush_needed = True
- elif hasattr( repository_metadata, key ):
- # log information when setting attributes associated with the Tool Shed's install and test framework.
- if key in [ 'do_not_test', 'includes_tools', 'missing_test_components', 'test_install_error',
- 'tools_functionally_correct' ]:
- log.debug( 'Setting repository_metadata table column %s to value %s for changeset_revision %s via the Tool Shed API.' % \
- ( str( key ), str( new_value ), str( repository_metadata.changeset_revision ) ) )
- setattr( repository_metadata, key, new_value )
- flush_needed = True
- if flush_needed:
- trans.sa_session.add( repository_metadata )
- trans.sa_session.flush()
- except Exception, e:
- message = "Error in the Tool Shed repository_revisions API in update: %s" % str( e )
- log.error( message, exc_info=True )
- trans.response.status = 500
- return message
+ if repository_metadata_id is None:
+ raise HTTPBadRequest( detail="Missing required parameter 'id'." )
+ repository_metadata = metadata_util.get_repository_metadata_by_id( trans, repository_metadata_id )
+ if repository_metadata is None:
+ log.debug( 'Cannot locate repository_metadata with id %s' % str( repository_metadata_id ) )
+ return {}
+ flush_needed = False
+ for key, new_value in payload.items():
+ if key == 'time_last_tested':
+ repository_metadata.time_last_tested = datetime.datetime.utcnow()
+ flush_needed = True
+ elif hasattr( repository_metadata, key ):
+ # log information when setting attributes associated with the Tool Shed's install and test framework.
+ if key in [ 'do_not_test', 'includes_tools', 'missing_test_components', 'test_install_error',
+ 'tools_functionally_correct' ]:
+ log.debug( 'Setting repository_metadata column %s to value %s for changeset_revision %s via the Tool Shed API.' % \
+ ( str( key ), str( new_value ), str( repository_metadata.changeset_revision ) ) )
+ setattr( repository_metadata, key, new_value )
+ flush_needed = True
+ if flush_needed:
+ trans.sa_session.add( repository_metadata )
+ trans.sa_session.flush()
+ trans.sa_session.refresh( repository_metadata )
repository_metadata_dict = repository_metadata.to_dict( view='element',
- value_mapper=self.__get_value_mapper( trans, repository_metadata ) )
+ value_mapper=self.__get_value_mapper( trans ) )
repository_metadata_dict[ 'url' ] = web.url_for( controller='repository_revisions',
action='show',
id=trans.security.encode_id( repository_metadata.id ) )
return repository_metadata_dict
-
- def __get_value_mapper( self, trans, repository_metadata ):
- value_mapper = { 'id' : trans.security.encode_id,
- 'repository_id' : trans.security.encode_id }
- return value_mapper
diff -r e72b8fd2d28b0a5371d13a77469746d0786496de -r d02540589348a2840f0492c91dbabe441fa2fb26 lib/tool_shed/scripts/api/common.py
--- a/lib/tool_shed/scripts/api/common.py
+++ b/lib/tool_shed/scripts/api/common.py
@@ -1,34 +1,27 @@
-import os, sys, urllib, urllib2
+import os
+import sys
+import urllib
+import urllib2
new_path = [ os.path.join( os.path.dirname( __file__ ), '..', '..', '..', '..', 'lib' ) ]
new_path.extend( sys.path[ 1: ] )
sys.path = new_path
+import tool_shed.util.shed_util_common as suc
+
from galaxy import eggs
import pkg_resources
pkg_resources.require( "simplejson" )
import simplejson
-pkg_resources.require( "pycrypto" )
-from Crypto.Cipher import Blowfish
-from Crypto.Util.randpool import RandomPool
-from Crypto.Util import number
-
-def encode_id( config_id_secret, obj_id ):
- # Utility method to encode ID's
- id_cipher = Blowfish.new( config_id_secret )
- # Convert to string
- s = str( obj_id )
- # Pad to a multiple of 8 with leading "!"
- s = ( "!" * ( 8 - len(s) % 8 ) ) + s
- # Encrypt
- return id_cipher.encrypt( s ).encode( 'hex' )
-
def delete( api_key, url, data, return_formatted=True ):
- # Sends an API DELETE request and acts as a generic formatter for the JSON response - 'data' will become the JSON payload read by Galaxy.
+ """
+ Sends an API DELETE request and acts as a generic formatter for the JSON response. The
+ 'data' will become the JSON payload read by the Tool Shed.
+ """
try:
- url = make_url( api_key, url )
+ url = make_url( url, api_key=api_key, args=None )
req = urllib2.Request( url, headers = { 'Content-Type': 'application/json' }, data = simplejson.dumps( data ))
req.get_method = lambda: 'DELETE'
r = simplejson.loads( urllib2.urlopen( req ).read() )
@@ -46,12 +39,13 @@
print r
def display( url, api_key=None, return_formatted=True ):
- # Sends an API GET request and acts as a generic formatter for the JSON response.
+ """Sends an API GET request and acts as a generic formatter for the JSON response."""
try:
r = get( url, api_key=api_key )
except urllib2.HTTPError, e:
print e
- print e.read( 1024 ) # Only return the first 1K of errors.
+ # Only return the first 1K of errors.
+ print e.read( 1024 )
sys.exit( 1 )
if type( r ) == unicode:
print 'error: %s' % r
@@ -83,16 +77,94 @@
print 'response is unknown type: %s' % type( r )
def get( url, api_key=None ):
- # Do the actual GET.
- url = make_url( url, api_key=api_key )
+ """Do the GET."""
+ url = make_url( url, api_key=api_key, args=None )
try:
return simplejson.loads( urllib2.urlopen( url ).read() )
except simplejson.decoder.JSONDecodeError, e:
print "URL did not return JSON data"
- sys.exit(1)
+ sys.exit( 1 )
+
+def get_api_url( base, parts=[], params=None ):
+ """Compose and return a URL for the Tool Shed API."""
+ if 'api' in parts and parts.index( 'api' ) != 0:
+ parts.pop( parts.index( 'api' ) )
+ parts.insert( 0, 'api' )
+ elif 'api' not in parts:
+ parts.insert( 0, 'api' )
+ url = suc.url_join( base, *parts )
+ if params is not None:
+ try:
+ query_string = urllib.urlencode( params )
+ except Exception, e:
+ # The value of params must be a string.
+ query_string = params
+ url += '?%s' % query_string
+ return url
+
+def get_latest_downloadable_changeset_revision_via_api( url, name, owner ):
+ """
+ Return the latest downloadable changeset revision for the repository defined by the received
+ name and owner.
+ """
+ error_message = ''
+ parts = [ 'api', 'repositories', 'get_ordered_installable_revisions' ]
+ params = dict( name=name, owner=owner )
+ api_url = get_api_url( base=url, parts=parts, params=params )
+ changeset_revisions, error_message = json_from_url( api_url )
+ if changeset_revisions is None or error_message:
+ return None, error_message
+ if len( changeset_revisions ) >= 1:
+ return changeset_revisions[ -1 ], error_message
+ return suc.INITIAL_CHANGELOG_HASH, error_message
+
+def get_repository_dict( url, repository_dict ):
+ """
+ Send a request to the Tool Shed to get additional information about the repository defined
+ by the received repository_dict. Add the information to the repository_dict and return it.
+ """
+ error_message = ''
+ if not isinstance( repository_dict, dict ):
+ error_message = 'Invalid repository_dict received: %s' % str( repository_dict )
+ return None, error_message
+ repository_id = repository_dict.get( 'repository_id', None )
+ if repository_id is None:
+ error_message = 'Invalid repository_dict does not contain a repository_id entry: %s' % str( repository_dict )
+ return None, error_message
+ parts = [ 'api', 'repositories', repository_id ]
+ api_url = get_api_url( base=url, parts=parts )
+ extended_dict, error_message = json_from_url( api_url )
+ if extended_dict is None or error_message:
+ return None, error_message
+ name = extended_dict.get( 'name', None )
+ owner = extended_dict.get( 'owner', None )
+ if name is not None and owner is not None:
+ name = str( name )
+ owner = str( owner )
+ latest_changeset_revision, error_message = get_latest_downloadable_changeset_revision_via_api( url, name, owner )
+ if latest_changeset_revision is None or error_message:
+ return None, error_message
+ extended_dict[ 'latest_revision' ] = str( latest_changeset_revision )
+ return extended_dict, error_message
+ else:
+ error_message = 'Invalid extended_dict does not contain name or woner entries: %s' % str( extended_dict )
+ return None, error_message
+
+def json_from_url( url ):
+ """Send a request to the Tool Shed via the Tool Shed API and handle the response."""
+ error_message = ''
+ url_handle = urllib.urlopen( url )
+ url_contents = url_handle.read()
+ try:
+ parsed_json = simplejson.loads( url_contents )
+ except Exception, e:
+ error_message = str( url_contents )
+ print 'Error parsing JSON data in json_from_url(): ', str( e )
+ return None, error_message
+ return parsed_json, error_message
def make_url( url, api_key=None, args=None ):
- # Adds the API Key to the URL if it's not already there.
+ """Adds the API Key to the URL if it's not already there."""
if args is None:
args = []
argsep = '&'
@@ -104,20 +176,23 @@
return url + argsep + '&'.join( [ '='.join( t ) for t in args ] )
def post( url, data, api_key=None ):
- # Do the actual POST.
- url = make_url( url, api_key=api_key )
+ """Do the POST."""
+ url = make_url( url, api_key=api_key, args=None )
req = urllib2.Request( url, headers = { 'Content-Type': 'application/json' }, data = simplejson.dumps( data ) )
return simplejson.loads( urllib2.urlopen( req ).read() )
def put( url, data, api_key=None ):
- # Do the actual PUT.
- url = make_url( url, api_key=api_key )
+ """Do the PUT."""
+ url = make_url( url, api_key=api_key, args=None )
req = urllib2.Request( url, headers = { 'Content-Type': 'application/json' }, data = simplejson.dumps( data ))
req.get_method = lambda: 'PUT'
return simplejson.loads( urllib2.urlopen( req ).read() )
def submit( url, data, api_key=None, return_formatted=True ):
- # Sends an API POST request and acts as a generic formatter for the JSON response - 'data' will become the JSON payload read by Galaxy.
+ """
+ Sends an API POST request and acts as a generic formatter for the JSON response. The
+ 'data' will become the JSON payload read by the Tool Shed.
+ """
try:
r = post( url, data, api_key=api_key )
except urllib2.HTTPError, e:
@@ -132,7 +207,8 @@
print 'Response'
print '--------'
if type( r ) == list:
- # Currently the only implemented responses are lists of dicts, because submission creates some number of collection elements.
+ # Currently the only implemented responses are lists of dicts, because submission creates
+ # some number of collection elements.
for i in r:
if type( i ) == dict:
if 'url' in i:
@@ -149,9 +225,12 @@
print r
def update( api_key, url, data, return_formatted=True ):
- # Sends an API PUT request and acts as a generic formatter for the JSON response - 'data' will become the JSON payload read by Galaxy.
+ """
+ Sends an API PUT request and acts as a generic formatter for the JSON response. The
+ 'data' will become the JSON payload read by the Tool Shed.
+ """
try:
- r = put( api_key, url, data )
+ r = put( url, data, api_key=api_key )
except urllib2.HTTPError, e:
if return_formatted:
print e
diff -r e72b8fd2d28b0a5371d13a77469746d0786496de -r d02540589348a2840f0492c91dbabe441fa2fb26 lib/tool_shed/scripts/api/get_filtered_repository_revisions.py
--- a/lib/tool_shed/scripts/api/get_filtered_repository_revisions.py
+++ b/lib/tool_shed/scripts/api/get_filtered_repository_revisions.py
@@ -26,69 +26,19 @@
import urllib
sys.path.insert( 0, os.path.dirname( __file__ ) )
+
from common import get
+from common import get_api_url
+from common import get_repository_dict
+from common import json_from_url
+
+from galaxy.util import asbool
from galaxy.util.json import from_json_string
import tool_shed.util.shed_util_common as suc
-def get_api_url( base, parts=[], params=None ):
- if 'api' in parts and parts.index( 'api' ) != 0:
- parts.pop( parts.index( 'api' ) )
- parts.insert( 0, 'api' )
- elif 'api' not in parts:
- parts.insert( 0, 'api' )
- url = suc.url_join( base, *parts )
- if params:
- url += '?%s' % params
- return url
-
-def get_latest_downloadable_changeset_revision( url, name, owner ):
- error_message = ''
- parts = [ 'api', 'repositories', 'get_ordered_installable_revisions' ]
- params = urllib.urlencode( dict( name=name, owner=owner ) )
- api_url = get_api_url( base=url, parts=parts, params=params )
- changeset_revisions, error_message = json_from_url( api_url )
- if error_message:
- return None, error_message
- if changeset_revisions:
- return changeset_revisions[ -1 ], error_message
- else:
- return suc.INITIAL_CHANGELOG_HASH, error_message
-
-def get_repository_dict( url, repository_dict ):
- error_message = ''
- parts = [ 'api', 'repositories', repository_dict[ 'repository_id' ] ]
- api_url = get_api_url( base=url, parts=parts )
- extended_dict, error_message = json_from_url( api_url )
- if error_message:
- return None, error_message
- name = str( extended_dict[ 'name' ] )
- owner = str( extended_dict[ 'owner' ] )
- latest_changeset_revision, error_message = get_latest_downloadable_changeset_revision( url, name, owner )
- if error_message:
- print error_message
- extended_dict[ 'latest_revision' ] = str( latest_changeset_revision )
- return extended_dict, error_message
-
-def json_from_url( url ):
- error_message = ''
- url_handle = urllib.urlopen( url )
- url_contents = url_handle.read()
- try:
- parsed_json = from_json_string( url_contents )
- except Exception, e:
- error_message = str( url_contents )
- return None, error_message
- return parsed_json, error_message
-
-def string_as_bool( string ):
- if str( string ).lower() in [ 'true' ]:
- return True
- else:
- return False
-
def main( options ):
base_tool_shed_url = options.tool_shed_url.rstrip( '/' )
- latest_revision_only = string_as_bool( options.latest_revision_only )
+ latest_revision_only = asbool( options.latest_revision_only )
do_not_test = str( options.do_not_test )
downloadable = str( options.downloadable )
includes_tools = str( options.includes_tools )
@@ -108,30 +58,31 @@
tools_functionally_correct=tools_functionally_correct ) )
api_url = get_api_url( base=base_tool_shed_url, parts=parts, params=params )
baseline_repository_dicts, error_message = json_from_url( api_url )
- if error_message:
+ if baseline_repository_dicts is None or error_message:
print error_message
- repository_dicts = []
- for baseline_repository_dict in baseline_repository_dicts:
- # We need to get some details from the tool shed API, such as repository name and owner, to pass on to the
- # module that will generate the install methods.
- repository_dict, error_message = get_repository_dict( base_tool_shed_url, baseline_repository_dict )
- if error_message:
- print 'Error getting additional details from the API: ', error_message
- repository_dicts.append( baseline_repository_dict )
- else:
- # Don't test empty repositories.
- changeset_revision = baseline_repository_dict[ 'changeset_revision' ]
- if changeset_revision != suc.INITIAL_CHANGELOG_HASH:
- # Merge the dictionary returned from /api/repository_revisions with the detailed repository_dict and
- # append it to the list of repository_dicts to install and test.
- if latest_revision_only:
- latest_revision = repository_dict[ 'latest_revision' ]
- if changeset_revision == latest_revision:
+ else:
+ repository_dicts = []
+ for baseline_repository_dict in baseline_repository_dicts:
+ # We need to get additional details from the tool shed API to pass on to the
+ # module that will generate the install methods.
+ repository_dict, error_message = get_repository_dict( base_tool_shed_url, baseline_repository_dict )
+ if error_message:
+ print 'Error getting additional details from the API: ', error_message
+ repository_dicts.append( baseline_repository_dict )
+ else:
+ # Don't test empty repositories.
+ changeset_revision = baseline_repository_dict.get( 'changeset_revision', suc.INITIAL_CHANGELOG_HASH )
+ if changeset_revision != suc.INITIAL_CHANGELOG_HASH:
+ # Merge the dictionary returned from /api/repository_revisions with the detailed repository_dict and
+ # append it to the list of repository_dicts to install and test.
+ if latest_revision_only:
+ latest_revision = repository_dict.get( 'latest_revision', suc.INITIAL_CHANGELOG_HASH )
+ if changeset_revision == latest_revision:
+ repository_dicts.append( dict( repository_dict.items() + baseline_repository_dict.items() ) )
+ else:
repository_dicts.append( dict( repository_dict.items() + baseline_repository_dict.items() ) )
- else:
- repository_dicts.append( dict( repository_dict.items() + baseline_repository_dict.items() ) )
- print '\n\n', repository_dicts
- print '\nThe url:\n\n', api_url, '\n\nreturned ', len( repository_dicts ), ' repository dictionaries...'
+ print '\n\n', repository_dicts
+ print '\nThe url:\n\n', api_url, '\n\nreturned ', len( repository_dicts ), ' repository dictionaries...'
if __name__ == '__main__':
parser = argparse.ArgumentParser( description='Get a filtered list of repository dictionaries.' )
diff -r e72b8fd2d28b0a5371d13a77469746d0786496de -r d02540589348a2840f0492c91dbabe441fa2fb26 test/install_and_test_tool_shed_repositories/base/util.py
--- a/test/install_and_test_tool_shed_repositories/base/util.py
+++ b/test/install_and_test_tool_shed_repositories/base/util.py
@@ -3,10 +3,9 @@
cwd = os.getcwd()
sys.path.append( cwd )
-new_path = [ os.path.join( cwd, "scripts" ),
- os.path.join( cwd, "lib" ),
+new_path = [ os.path.join( cwd, "lib" ),
os.path.join( cwd, 'test' ),
- os.path.join( cwd, 'scripts', 'api' ) ]
+ os.path.join( cwd, 'lib', 'tool_shed', 'scripts', 'api' ) ]
new_path.extend( sys.path )
sys.path = new_path
@@ -27,6 +26,10 @@
from datetime import datetime
from datetime import timedelta
+from common import get_api_url
+from common import get_latest_downloadable_changeset_revision_via_api
+from common import get_repository_dict
+from common import json_from_url
from common import update
from galaxy.util import asbool
@@ -267,18 +270,6 @@
version = str( tool_dependency_dict[ 'version' ] )
print "# %s %s version %s" % ( type, name, version )
-def get_api_url( base, parts=[], params=None ):
- if 'api' in parts and parts.index( 'api' ) != 0:
- parts.pop( parts.index( 'api' ) )
- parts.insert( 0, 'api' )
- elif 'api' not in parts:
- parts.insert( 0, 'api' )
- url = suc.url_join( base, *parts )
- if params is not None:
- query_string = urllib.urlencode( params )
- url += '?%s' % query_string
- return url
-
def get_database_version( app ):
'''
This method returns the value of the version column from the migrate_version table, using the provided app's SQLAlchemy session to determine
@@ -296,19 +287,6 @@
break
return version
-def get_latest_downloadable_changeset_revision( url, name, owner ):
- error_message = ''
- parts = [ 'api', 'repositories', 'get_ordered_installable_revisions' ]
- params = dict( name=name, owner=owner )
- api_url = get_api_url( base=url, parts=parts, params=params )
- changeset_revisions, error_message = json_from_url( api_url )
- if error_message:
- return None, error_message
- if changeset_revisions:
- return changeset_revisions[ -1 ], error_message
- else:
- return suc.INITIAL_CHANGELOG_HASH, error_message
-
def get_missing_repository_dependencies( repository ):
"""
Return the entire list of missing repository dependencies for the received repository. The entire
@@ -467,28 +445,6 @@
return None, error_message
return repository_dependency_dicts, error_message
-def get_repository_dict( url, repository_dict ):
- error_message = ''
- if not isinstance( repository_dict, dict ):
- error_message = 'Invalid repository_dict received: %s' % str( repository_dict )
- return None, error_message
- repository_id = repository_dict.get( 'repository_id', None )
- if repository_id is None:
- error_message = 'Invalid repository_dict does not contain a repository_id entry: %s' % str( repository_dict )
- return None, error_message
- parts = [ 'api', 'repositories', repository_id ]
- api_url = get_api_url( base=url, parts=parts )
- extended_dict, error_message = json_from_url( api_url )
- if error_message:
- return None, error_message
- name = str( extended_dict[ 'name' ] )
- owner = str( extended_dict[ 'owner' ] )
- latest_changeset_revision, error_message = get_latest_downloadable_changeset_revision( url, name, owner )
- if error_message:
- return None, error_message
- extended_dict[ 'latest_revision' ] = str( latest_changeset_revision )
- return extended_dict, error_message
-
def get_repository_dependencies_dicts( url, encoded_repository_metadata_id ):
"""
Return a list if dictionaries that define the repository dependencies of the repository defined by the
@@ -709,23 +665,23 @@
return False, None
def is_latest_downloadable_revision( url, repository_dict ):
- name = str( repository_dict[ 'name' ] )
- owner = str( repository_dict[ 'owner' ] )
- changeset_revision = str( repository_dict[ 'changeset_revision' ] )
- latest_revision = get_latest_downloadable_changeset_revision( url, name=name, owner=owner )
- return changeset_revision == str( latest_revision )
-
-def json_from_url( url ):
+ """
+ Return True if the changeset_revision defined in the received repository_dict is the latest
+ installable revision for the repository.
+ """
error_message = ''
- url_handle = urllib.urlopen( url )
- url_contents = url_handle.read()
- try:
- parsed_json = from_json_string( url_contents )
- except Exception, e:
- error_message = str( url_contents )
- log.exception( 'Error parsing JSON data in json_from_url(): %s.' % str( e ) )
- return None, error_message
- return parsed_json, error_message
+ name = repository_dict.get( 'name', None )
+ owner = repository_dict.get( 'owner', None )
+ changeset_revision = repository_dict.get( 'changeset_revision', None )
+ if name is not None and owner is not None and changeset_revision is not None:
+ name = str( name )
+ owner = str( owner )
+ changeset_revision = str( changeset_revision )
+ latest_revision, error_message = get_latest_downloadable_changeset_revision_via_api( url, name=name, owner=owner )
+ if latest_revision is None or error_message:
+ return None, error_message
+ is_latest_downloadable = changeset_revision == str( latest_revision )
+ return is_latest_downloadable, error_message
def parse_exclude_list( xml_filename ):
"""Return a list of repositories to exclude from testing."""
@@ -1024,9 +980,15 @@
if can_update_tool_shed:
metadata_revision_id = repository_dict.get( 'id', None )
if metadata_revision_id is not None:
- name = str( repository_dict[ 'name' ] )
- owner = str( repository_dict[ 'owner' ] )
- changeset_revision = str( repository_dict[ 'changeset_revision' ] )
+ name = repository_dict.get( 'name', None )
+ owner = repository_dict.get( 'owner', None )
+ changeset_revision = repository_dict.get( 'changeset_revision', None )
+ if name is None or owner is None or changeset_revision is None:
+ log.debug( 'Entries for name, owner or changeset_revision missing from repository_dict %s' % str( repository_dict ) )
+ return {}
+ name = str( name )
+ owner = str( owner )
+ changeset_revision = str( changeset_revision )
log.debug('\n=============================================================\n' )
log.debug( 'Inserting the following into tool_test_results for revision %s of repository %s owned by %s:\n%s' % \
( changeset_revision, name, owner, str( tool_test_results_dict ) ) )
diff -r e72b8fd2d28b0a5371d13a77469746d0786496de -r d02540589348a2840f0492c91dbabe441fa2fb26 test/install_and_test_tool_shed_repositories/repositories_with_tools/functional_tests.py
--- a/test/install_and_test_tool_shed_repositories/repositories_with_tools/functional_tests.py
+++ b/test/install_and_test_tool_shed_repositories/repositories_with_tools/functional_tests.py
@@ -685,9 +685,16 @@
tool_test_results_dict[ 'failed_tests' ] = failed_test_dicts
failed_repository_dict = repository_identifier_dict
install_and_test_statistics_dict[ 'at_least_one_test_failed' ].append( failed_repository_dict )
- set_do_not_test = \
- not install_and_test_base_util.is_latest_downloadable_revision( install_and_test_base_util.galaxy_tool_shed_url,
- repository_dict )
+ is_latest_downloadable_revision, error_message = \
+ install_and_test_base_util.is_latest_downloadable_revision( install_and_test_base_util.galaxy_tool_shed_url,
+ repository_dict )
+ if is_latest_downloadable_revision is None or error_message:
+ log.debug( 'Error attempting to determine if revision %s of repository %s owned by %s ' % \
+ ( changeset_revision, name, owner ) )
+ log.debug( 'is the latest downloadable revision: %s' % str( error_message ) )
+ set_do_not_test = False
+ else:
+ set_do_not_test = not is_latest_downloadable_revision
params = dict( tools_functionally_correct=False,
test_install_error=False,
do_not_test=str( set_do_not_test ) )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: greg: Tool Shed API fixes and code cleanup.
by commits-noreply@bitbucket.org 09 Jan '14
by commits-noreply@bitbucket.org 09 Jan '14
09 Jan '14
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/0643bbc23cc1/
Changeset: 0643bbc23cc1
User: greg
Date: 2014-01-09 21:59:18
Summary: Tool Shed API fixes and code cleanup.
Affected #: 7 files
diff -r 17e9e3ffb571e8f200ee8bf195a3708b2be4792b -r 0643bbc23cc1c845516469e3fa07270318c0c1a0 lib/galaxy/webapps/galaxy/api/tool_shed_repositories.py
--- a/lib/galaxy/webapps/galaxy/api/tool_shed_repositories.py
+++ b/lib/galaxy/webapps/galaxy/api/tool_shed_repositories.py
@@ -19,11 +19,6 @@
log = logging.getLogger( __name__ )
-def default_tool_shed_repository_value_mapper( trans, tool_shed_repository ):
- value_mapper={ 'id' : trans.security.encode_id( tool_shed_repository.id ),
- 'error_message' : tool_shed_repository.error_message or '' }
- return value_mapper
-
def get_message_for_no_shed_tool_config():
# This Galaxy instance is not configured with a shed-related tool panel configuration file.
message = 'The tool_config_file setting in universe_wsgi.ini must include at least one shed tool configuration file name with a <toolbox> '
@@ -48,8 +43,8 @@
:param id: the encoded id of the ToolShedRepository object
"""
# Example URL: http://localhost:8763/api/tool_shed_repositories/f2db41e1fa331b3e/exported_…
- # Since exported workflows are dictionaries with very few attributes that differentiate them from each other, we'll build the
- # list based on the following dictionary of those few attributes.
+ # Since exported workflows are dictionaries with very few attributes that differentiate them from each
+ # other, we'll build the list based on the following dictionary of those few attributes.
exported_workflows = []
repository = suc.get_tool_shed_repository_by_id( trans, id )
metadata = repository.metadata
@@ -58,17 +53,23 @@
else:
exported_workflow_tups = []
for index, exported_workflow_tup in enumerate( exported_workflow_tups ):
- # The exported_workflow_tup looks like ( relative_path, exported_workflow_dict ), where the value of relative_path is the location
- # on disk (relative to the root of the installed repository) where the exported_workflow_dict file (.ga file) is located.
+ # The exported_workflow_tup looks like ( relative_path, exported_workflow_dict ), where the value of
+ # relative_path is the location on disk (relative to the root of the installed repository) where the
+ # exported_workflow_dict file (.ga file) is located.
exported_workflow_dict = exported_workflow_tup[ 1 ]
annotation = exported_workflow_dict.get( 'annotation', '' )
format_version = exported_workflow_dict.get( 'format-version', '' )
workflow_name = exported_workflow_dict.get( 'name', '' )
- # Since we don't have an in-memory object with an id, we'll identify the exported workflow via it's location (i.e., index) in the list.
+ # Since we don't have an in-memory object with an id, we'll identify the exported workflow via it's
+ # location (i.e., index) in the list.
display_dict = dict( index=index, annotation=annotation, format_version=format_version, workflow_name=workflow_name )
exported_workflows.append( display_dict )
return exported_workflows
+ def __get_value_mapper( self, trans ):
+ value_mapper = { 'id' : trans.security.encode_id }
+ return value_mapper
+
@web.expose_api
def import_workflow( self, trans, payload, **kwd ):
"""
@@ -96,13 +97,11 @@
# Since we don't have an in-memory object with an id, we'll identify the exported workflow via it's location (i.e., index) in the list.
exported_workflow = exported_workflows[ int( index ) ]
workflow_name = exported_workflow[ 'workflow_name' ]
- workflow, status, message = workflow_util.import_workflow( trans, repository, workflow_name )
+ workflow, status, error_message = workflow_util.import_workflow( trans, repository, workflow_name )
if status == 'error':
- log.error( message, exc_info=True )
- trans.response.status = 500
- return message
- else:
- return workflow.to_dict( view='element' )
+ log.debug( error_message )
+ return {}
+ return workflow.to_dict( view='element' )
@web.expose_api
def import_workflows( self, trans, **kwd ):
@@ -125,11 +124,9 @@
imported_workflow_dicts = []
for exported_workflow_dict in exported_workflows:
workflow_name = exported_workflow_dict[ 'workflow_name' ]
- workflow, status, message = workflow_util.import_workflow( trans, repository, workflow_name )
+ workflow, status, error_message = workflow_util.import_workflow( trans, repository, workflow_name )
if status == 'error':
- log.error( message, exc_info=True )
- trans.response.status = 500
- return message
+ log.debug( error_message )
else:
imported_workflow_dicts.append( workflow.to_dict( view='element' ) )
return imported_workflow_dicts
@@ -142,22 +139,15 @@
"""
# Example URL: http://localhost:8763/api/tool_shed_repositories
tool_shed_repository_dicts = []
- try:
- query = trans.install_model.context.query( trans.app.install_model.ToolShedRepository ) \
- .order_by( trans.app.install_model.ToolShedRepository.table.c.name ) \
- .all()
- for tool_shed_repository in query:
- tool_shed_repository_dict = tool_shed_repository.to_dict( value_mapper=default_tool_shed_repository_value_mapper( trans, tool_shed_repository ) )
- tool_shed_repository_dict[ 'url' ] = web.url_for( controller='tool_shed_repositories',
- action='show',
- id=trans.security.encode_id( tool_shed_repository.id ) )
- tool_shed_repository_dicts.append( tool_shed_repository_dict )
- return tool_shed_repository_dicts
- except Exception, e:
- message = "Error in the tool_shed_repositories API in index: %s" % str( e )
- log.error( message, exc_info=True )
- trans.response.status = 500
- return message
+ for tool_shed_repository in trans.install_model.context.query( trans.app.install_model.ToolShedRepository ) \
+ .order_by( trans.app.install_model.ToolShedRepository.table.c.name ):
+ tool_shed_repository_dict = \
+ tool_shed_repository.to_dict( value_mapper=self.__get_value_mapper( trans ) )
+ tool_shed_repository_dict[ 'url' ] = web.url_for( controller='tool_shed_repositories',
+ action='show',
+ id=trans.security.encode_id( tool_shed_repository.id ) )
+ tool_shed_repository_dicts.append( tool_shed_repository_dict )
+ return tool_shed_repository_dicts
@web.expose_api
def install_repository_revision( self, trans, payload, **kwd ):
@@ -208,8 +198,7 @@
# Make sure this Galaxy instance is configured with a shed-related tool panel configuration file.
if not suc.have_shed_tool_conf_for_install( trans ):
message = get_message_for_no_shed_tool_config()
- log.error( message, exc_info=True )
- trans.response.status = 500
+ log.debug( message )
return dict( status='error', error=message )
# Make sure the current user's API key proves he is an admin user in this Galaxy instance.
if not trans.user_is_admin():
@@ -225,18 +214,20 @@
except Exception, e:
message = "Error attempting to retrieve installation information from tool shed %s for revision %s of repository %s owned by %s: %s" % \
( str( tool_shed_url ), str( changeset_revision ), str( name ), str( owner ), str( e ) )
- log.error( message, exc_info=True )
- trans.response.status = 500
+ log.debug( message )
return dict( status='error', error=message )
if raw_text:
+ # If successful, the response from get_repository_revision_install_info will be 3
+ # dictionaries, a dictionary defining the Repository, a dictionary defining the
+ # Repository revision (RepositoryMetadata), and a dictionary including the additional
+ # information required to install the repository.
items = json.from_json_string( raw_text )
repository_revision_dict = items[ 1 ]
repo_info_dict = items[ 2 ]
else:
message = "Unable to retrieve installation information from tool shed %s for revision %s of repository %s owned by %s: %s" % \
( str( tool_shed_url ), str( changeset_revision ), str( name ), str( owner ), str( e ) )
- log.error( message, exc_info=True )
- trans.response.status = 500
+ log.debug( message )
return dict( status='error', error=message )
repo_info_dicts = [ repo_info_dict ]
# Make sure the tool shed returned everything we need for installing the repository.
@@ -345,7 +336,7 @@
tool_path,
install_tool_dependencies,
reinstalling=False )
- tool_shed_repository_dict = tool_shed_repository.as_dict( value_mapper=default_tool_shed_repository_value_mapper( trans, tool_shed_repository ) )
+ tool_shed_repository_dict = tool_shed_repository.as_dict( value_mapper=self.__get_value_mapper( trans ) )
tool_shed_repository_dict[ 'url' ] = web.url_for( controller='tool_shed_repositories',
action='show',
id=trans.security.encode_id( tool_shed_repository.id ) )
@@ -394,8 +385,7 @@
if not suc.have_shed_tool_conf_for_install( trans ):
# This Galaxy instance is not configured with a shed-related tool panel configuration file.
message = get_message_for_no_shed_tool_config()
- log.error( message, exc_info=True )
- trans.response.status = 500
+ log.debug( message )
return dict( status='error', error=message )
if not trans.user_is_admin():
raise HTTPForbidden( detail='You are not authorized to install a tool shed repository into this Galaxy instance.' )
@@ -410,8 +400,7 @@
len( changeset_revisions ) != num_specified_repositories:
message = 'Error in tool_shed_repositories API in install_repository_revisions: the received parameters must be ordered '
message += 'lists so that positional values in tool_shed_urls, names, owners and changeset_revisions are associated.'
- log.error( message, exc_info=True )
- trans.response.status = 500
+ log.debug( message )
return dict( status='error', error=message )
# Get the information about the Galaxy components (e.g., tool pane section, tool config file, etc) that will contain information
# about each of the repositories being installed.
@@ -482,7 +471,7 @@
repair_dict = repository_util.repair_tool_shed_repository( trans,
repository,
encoding_util.tool_shed_encode( repo_info_dict ) )
- repository_dict = repository.to_dict( value_mapper=default_tool_shed_repository_value_mapper( trans, repository ) )
+ repository_dict = repository.to_dict( value_mapper=self.__get_value_mapper( trans ) )
repository_dict[ 'url' ] = web.url_for( controller='tool_shed_repositories',
action='show',
id=trans.security.encode_id( repository.id ) )
@@ -502,39 +491,39 @@
:param key: the API key of the Galaxy admin user.
"""
- try:
- start_time = strftime( "%Y-%m-%d %H:%M:%S" )
- results = dict( start_time=start_time,
- successful_count=0,
- unsuccessful_count=0,
- repository_status=[] )
- # Make sure the current user's API key proves he is an admin user in this Galaxy instance.
- if not trans.user_is_admin():
- raise HTTPForbidden( detail='You are not authorized to reset metadata on repositories installed into this Galaxy instance.' )
- query = suc.get_query_for_setting_metadata_on_repositories( trans, my_writable=False, order=False )
- # Now reset metadata on all remaining repositories.
- for repository in query:
- repository_id = trans.security.encode_id( repository.id )
- try:
- invalid_file_tups, metadata_dict = metadata_util.reset_all_metadata_on_installed_repository( trans, repository_id )
- if invalid_file_tups:
- message = tool_util.generate_message_for_invalid_tools( trans, invalid_file_tups, repository, None, as_html=False )
- results[ 'unsuccessful_count' ] += 1
- else:
- message = "Successfully reset metadata on repository %s owned by %s" % ( str( repository.name ), str( repository.owner ) )
- results[ 'successful_count' ] += 1
- except Exception, e:
- message = "Error resetting metadata on repository %s owned by %s: %s" % ( str( repository.name ), str( repository.owner ), str( e ) )
+ start_time = strftime( "%Y-%m-%d %H:%M:%S" )
+ results = dict( start_time=start_time,
+ successful_count=0,
+ unsuccessful_count=0,
+ repository_status=[] )
+ # Make sure the current user's API key proves he is an admin user in this Galaxy instance.
+ if not trans.user_is_admin():
+ raise HTTPForbidden( detail='You are not authorized to reset metadata on repositories installed into this Galaxy instance.' )
+ query = suc.get_query_for_setting_metadata_on_repositories( trans, my_writable=False, order=False )
+ # Now reset metadata on all remaining repositories.
+ for repository in query:
+ repository_id = trans.security.encode_id( repository.id )
+ try:
+ invalid_file_tups, metadata_dict = metadata_util.reset_all_metadata_on_installed_repository( trans, repository_id )
+ if invalid_file_tups:
+ message = tool_util.generate_message_for_invalid_tools( trans,
+ invalid_file_tups,
+ repository,
+ None,
+ as_html=False )
results[ 'unsuccessful_count' ] += 1
- results[ 'repository_status' ].append( message )
- stop_time = strftime( "%Y-%m-%d %H:%M:%S" )
- results[ 'stop_time' ] = stop_time
- return json.to_json_string( results, sort_keys=True, indent=4 * ' ' )
- except Exception, e:
- message = "Error in the Galaxy tool_shed_repositories API in reset_metadata_on_installed_repositories: %s" % str( e )
- log.error( message, exc_info=True )
- trans.response.status = 500
- return message
+ else:
+ message = "Successfully reset metadata on repository %s owned by %s" % \
+ ( str( repository.name ), str( repository.owner ) )
+ results[ 'successful_count' ] += 1
+ except Exception, e:
+ message = "Error resetting metadata on repository %s owned by %s: %s" % \
+ ( str( repository.name ), str( repository.owner ), str( e ) )
+ results[ 'unsuccessful_count' ] += 1
+ results[ 'repository_status' ].append( message )
+ stop_time = strftime( "%Y-%m-%d %H:%M:%S" )
+ results[ 'stop_time' ] = stop_time
+ return json.to_json_string( results, sort_keys=True, indent=4 * ' ' )
@web.expose_api
def show( self, trans, id, **kwd ):
@@ -545,15 +534,12 @@
:param id: the encoded id of the ToolShedRepository object
"""
# Example URL: http://localhost:8763/api/tool_shed_repositories/df7a1f0c02a5b08e
- try:
- tool_shed_repository = suc.get_tool_shed_repository_by_id( trans, id )
- tool_shed_repository_dict = tool_shed_repository.as_dict( value_mapper=default_tool_shed_repository_value_mapper( trans, tool_shed_repository ) )
- tool_shed_repository_dict[ 'url' ] = web.url_for( controller='tool_shed_repositories',
- action='show',
- id=trans.security.encode_id( tool_shed_repository.id ) )
- return tool_shed_repository_dict
- except Exception, e:
- message = "Error in tool_shed_repositories API in index: " + str( e )
- log.error( message, exc_info=True )
- trans.response.status = 500
- return message
+ tool_shed_repository = suc.get_tool_shed_repository_by_id( trans, id )
+ if tool_shed_repository is None:
+ log.debug( "Unable to locate tool_shed_repository record for id %s." % ( str( id ) ) )
+ return {}
+ tool_shed_repository_dict = tool_shed_repository.as_dict( value_mapper=self.__get_value_mapper( trans ) )
+ tool_shed_repository_dict[ 'url' ] = web.url_for( controller='tool_shed_repositories',
+ action='show',
+ id=trans.security.encode_id( tool_shed_repository.id ) )
+ return tool_shed_repository_dict
diff -r 17e9e3ffb571e8f200ee8bf195a3708b2be4792b -r 0643bbc23cc1c845516469e3fa07270318c0c1a0 lib/galaxy/webapps/tool_shed/api/repositories.py
--- a/lib/galaxy/webapps/tool_shed/api/repositories.py
+++ b/lib/galaxy/webapps/tool_shed/api/repositories.py
@@ -36,22 +36,27 @@
:param name: the name of the Repository
:param owner: the owner of the Repository
- Returns the ordered list of changeset revision hash strings that are associated with installable revisions. As in the changelog, the
- list is ordered oldest to newest.
+ Returns the ordered list of changeset revision hash strings that are associated with installable revisions.
+ As in the changelog, the list is ordered oldest to newest.
"""
# Example URL: http://localhost:9009/api/repositories/get_installable_revisions?name=add_c…
- try:
+ if name and owner:
# Get the repository information.
repository = suc.get_repository_by_name_and_owner( trans.app, name, owner )
+ if repository is None:
+ error_message = "Error in the Tool Shed repositories API in get_ordered_installable_revisions: "
+ error_message += "cannot locate repository %s owned by %s." % ( str( name ), str( owner ) )
+ log.debug( error_message )
+ return []
repo_dir = repository.repo_path( trans.app )
repo = hg.repository( suc.get_configured_ui(), repo_dir )
ordered_installable_revisions = suc.get_ordered_metadata_changeset_revisions( repository, repo, downloadable=True )
return ordered_installable_revisions
- except Exception, e:
- message = "Error in the Tool Shed repositories API in get_ordered_installable_revisions: %s" % str( e )
- log.error( message, exc_info=True )
- trans.response.status = 500
- return message
+ else:
+ error_message = "Error in the Tool Shed repositories API in get_ordered_installable_revisions: "
+ error_message += "invalid name %s or owner %s received." % ( str( name ), str( owner ) )
+ log.debug( error_message )
+ return []
@web.expose_api_anonymous
def get_repository_revision_install_info( self, trans, name, owner, changeset_revision, **kwd ):
@@ -106,49 +111,65 @@
]
}
"""
- repository_value_mapper = { 'id' : trans.security.encode_id,
- 'user_id' : trans.security.encode_id }
- # Example URL: http://localhost:9009/api/repositories/get_repository_revision_install_info…
- try:
+ # Example URL:
+ # http://<xyz>/api/repositories/get_repository_revision_install_info?name=<n>&owner=<o>&changeset_revision=<cr>
+ if name and owner and changeset_revision:
# Get the repository information.
repository = suc.get_repository_by_name_and_owner( trans.app, name, owner )
+ if repository is None:
+ log.debug( 'Cannot locate repository %s owned by %s' % ( str( name ), str( owner ) ) )
+ return {}, {}, {}
encoded_repository_id = trans.security.encode_id( repository.id )
- repository_dict = repository.to_dict( view='element', value_mapper=repository_value_mapper )
+ repository_dict = repository.to_dict( view='element',
+ value_mapper=self.__get_value_mapper( trans ) )
repository_dict[ 'url' ] = web.url_for( controller='repositories',
action='show',
id=encoded_repository_id )
# Get the repository_metadata information.
- repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans, encoded_repository_id, changeset_revision )
+ repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans,
+ encoded_repository_id,
+ changeset_revision )
if not repository_metadata:
- # The changeset_revision column in the repository_metadata table has been updated with a new value value, so find the
- # changeset_revision to which we need to update.
+ # The changeset_revision column in the repository_metadata table has been updated with a new
+ # value value, so find the changeset_revision to which we need to update.
repo_dir = repository.repo_path( trans.app )
repo = hg.repository( suc.get_configured_ui(), repo_dir )
new_changeset_revision = suc.get_next_downloadable_changeset_revision( repository, repo, changeset_revision )
- repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans, encoded_repository_id, new_changeset_revision )
+ repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans,
+ encoded_repository_id,
+ new_changeset_revision )
changeset_revision = new_changeset_revision
if repository_metadata:
encoded_repository_metadata_id = trans.security.encode_id( repository_metadata.id )
repository_metadata_dict = repository_metadata.to_dict( view='collection',
- value_mapper=self.__get_value_mapper( trans, repository_metadata ) )
+ value_mapper=self.__get_value_mapper( trans ) )
repository_metadata_dict[ 'url' ] = web.url_for( controller='repository_revisions',
action='show',
id=encoded_repository_metadata_id )
# Get the repo_info_dict for installing the repository.
- repo_info_dict, includes_tools, includes_tool_dependencies, includes_tools_for_display_in_tool_panel, \
- has_repository_dependencies, has_repository_dependencies_only_if_compiling_contained_td = \
+ repo_info_dict, \
+ includes_tools, \
+ includes_tool_dependencies, \
+ includes_tools_for_display_in_tool_panel, \
+ has_repository_dependencies, \
+ has_repository_dependencies_only_if_compiling_contained_td = \
repository_util.get_repo_info_dict( trans, encoded_repository_id, changeset_revision )
return repository_dict, repository_metadata_dict, repo_info_dict
else:
- message = "Unable to locate repository_metadata record for repository id %d and changeset_revision %s" % ( repository.id, changeset_revision )
- log.error( message, exc_info=True )
- trans.response.status = 500
+ log.debug( "Unable to locate repository_metadata record for repository id %s and changeset_revision %s" % \
+ ( str( repository.id ), str( changeset_revision ) ) )
return repository_dict, {}, {}
- except Exception, e:
- message = "Error in the Tool Shed repositories API in get_repository_revision_install_info: %s" % str( e )
- log.error( message, exc_info=True )
- trans.response.status = 500
- return message
+ else:
+ debug_msg = "Error in the Tool Shed repositories API in get_repository_revision_install_info: "
+ debug_msg += "Invalid name %s or owner %s or changeset_revision %s received." % \
+ ( str( name ), str( owner ), str( changeset_revision ) )
+ log.debug( debug_msg )
+ return {}, {}, {}
+
+ def __get_value_mapper( self, trans ):
+ value_mapper = { 'id' : trans.security.encode_id,
+ 'repository_id' : trans.security.encode_id }
+ return value_mapper
@web.expose_api
def import_capsule( self, trans, payload, **kwd ):
@@ -177,29 +198,27 @@
uploaded_file=None,
capsule_file_name=None )
if os.path.getsize( os.path.abspath( capsule_file_name ) ) == 0:
- message = 'Your capsule file is empty.'
- log.error( message, exc_info=True )
- trans.response.status = 500
- return message
+ log.debug( 'Your capsule file %s is empty.' % str( capsule_file_name ) )
+ return {}
try:
# Open for reading with transparent compression.
tar_archive = tarfile.open( capsule_file_path, 'r:*' )
except tarfile.ReadError, e:
- message = 'Error opening file %s: %s' % ( str( capsule_file_name ), str( e ) )
- log.error( message, exc_info=True )
- trans.response.status = 500
- return message
+ log.debug( 'Error opening capsule file %s: %s' % ( str( capsule_file_name ), str( e ) ) )
+ return {}
capsule_dict[ 'tar_archive' ] = tar_archive
capsule_dict[ 'capsule_file_name' ] = capsule_file_name
capsule_dict = import_util.extract_capsule_files( trans, **capsule_dict )
capsule_dict = import_util.validate_capsule( trans, **capsule_dict )
status = capsule_dict.get( 'status', 'error' )
if status == 'error':
- message = 'The capsule contents are invalid and cannpt be imported:<br/>%s' % str( capsule_dict.get( 'error_message', '' ) )
- log.error( message, exc_info=True )
- trans.response.status = 500
- return message
+ log.debug( 'The capsule contents are invalid and cannpt be imported:<br/>%s' % \
+ str( capsule_dict.get( 'error_message', '' ) ) )
+ return {}
encoded_file_path = capsule_dict.get( 'encoded_file_path', None )
+ if encoded_file_path is None:
+ log.debug( 'The capsule_dict %s is missing the required encoded_file_path entry.' % str( capsule_dict ) )
+ return {}
file_path = encoding_util.tool_shed_decode( encoded_file_path )
export_info_file_path = os.path.join( file_path, 'export_info.xml' )
export_info_dict = import_util.get_export_info_dict( export_info_file_path )
@@ -216,12 +235,14 @@
# Add the capsule_file_name and encoded_file_path to the repository_status_info_dict.
repository_status_info_dict[ 'capsule_file_name' ] = capsule_file_name
repository_status_info_dict[ 'encoded_file_path' ] = encoded_file_path
- import_results_tups = repository_maintenance_util.create_repository_and_import_archive( trans,
- repository_status_info_dict,
- import_results_tups )
+ import_results_tups = \
+ repository_maintenance_util.create_repository_and_import_archive( trans,
+ repository_status_info_dict,
+ import_results_tups )
import_util.check_status_and_reset_downloadable( trans, import_results_tups )
suc.remove_dir( file_path )
- # NOTE: the order of installation is defined in import_results_tups, but order will be lost when transferred to return_dict.
+ # NOTE: the order of installation is defined in import_results_tups, but order will be lost
+ # when transferred to return_dict.
return_dict = {}
for import_results_tup in import_results_tups:
ok, name_owner, message = import_results_tup
@@ -237,28 +258,19 @@
GET /api/repositories
Displays a collection (list) of repositories.
"""
- value_mapper = { 'id' : trans.security.encode_id,
- 'user_id' : trans.security.encode_id }
# Example URL: http://localhost:9009/api/repositories
repository_dicts = []
- deleted = util.string_as_bool( deleted )
- try:
- query = trans.sa_session.query( trans.app.model.Repository ) \
- .filter( trans.app.model.Repository.table.c.deleted == deleted ) \
- .order_by( trans.app.model.Repository.table.c.name ) \
- .all()
- for repository in query:
- repository_dict = repository.to_dict( view='collection', value_mapper=value_mapper )
- repository_dict[ 'url' ] = web.url_for( controller='repositories',
- action='show',
- id=trans.security.encode_id( repository.id ) )
- repository_dicts.append( repository_dict )
- return repository_dicts
- except Exception, e:
- message = "Error in the Tool Shed repositories API in index: %s" % str( e )
- log.error( message, exc_info=True )
- trans.response.status = 500
- return message
+ deleted = util.asbool( deleted )
+ for repository in trans.sa_session.query( trans.app.model.Repository ) \
+ .filter( trans.app.model.Repository.table.c.deleted == deleted ) \
+ .order_by( trans.app.model.Repository.table.c.name ):
+ repository_dict = repository.to_dict( view='collection',
+ value_mapper=self.__get_value_mapper( trans ) )
+ repository_dict[ 'url' ] = web.url_for( controller='repositories',
+ action='show',
+ id=trans.security.encode_id( repository.id ) )
+ repository_dicts.append( repository_dict )
+ return repository_dicts
@web.expose_api
def repository_ids_for_setting_metadata( self, trans, my_writable=False, **kwd ):
@@ -273,28 +285,22 @@
in addition to those repositories of type tool_dependency_definition. This param is ignored
if the current user is not an admin user, in which case this same restriction is automatic.
"""
- try:
- if trans.user_is_admin():
- my_writable = util.asbool( my_writable )
- else:
- my_writable = True
- handled_repository_ids = []
- repository_ids = []
- query = suc.get_query_for_setting_metadata_on_repositories( trans, my_writable=my_writable, order=False )
- # Make sure repositories of type tool_dependency_definition are first in the list.
- for repository in query:
- if repository.type == rt_util.TOOL_DEPENDENCY_DEFINITION and repository.id not in handled_repository_ids:
- repository_ids.append( trans.security.encode_id( repository.id ) )
- # Now add all remaining repositories to the list.
- for repository in query:
- if repository.type != rt_util.TOOL_DEPENDENCY_DEFINITION and repository.id not in handled_repository_ids:
- repository_ids.append( trans.security.encode_id( repository.id ) )
- return repository_ids
- except Exception, e:
- message = "Error in the Tool Shed repositories API in repository_ids_for_setting_metadata: %s" % str( e )
- log.error( message, exc_info=True )
- trans.response.status = 500
- return message
+ if trans.user_is_admin():
+ my_writable = util.asbool( my_writable )
+ else:
+ my_writable = True
+ handled_repository_ids = []
+ repository_ids = []
+ query = suc.get_query_for_setting_metadata_on_repositories( trans, my_writable=my_writable, order=False )
+ # Make sure repositories of type tool_dependency_definition are first in the list.
+ for repository in query:
+ if repository.type == rt_util.TOOL_DEPENDENCY_DEFINITION and repository.id not in handled_repository_ids:
+ repository_ids.append( trans.security.encode_id( repository.id ) )
+ # Now add all remaining repositories to the list.
+ for repository in query:
+ if repository.type != rt_util.TOOL_DEPENDENCY_DEFINITION and repository.id not in handled_repository_ids:
+ repository_ids.append( trans.security.encode_id( repository.id ) )
+ return repository_ids
@web.expose_api
def reset_metadata_on_repositories( self, trans, payload, **kwd ):
@@ -318,6 +324,7 @@
:param skip_file (optional): A local file name that contains the encoded repository ids associated with repositories to skip.
This param can be used as an alternative to the above encoded_ids_to_skip.
"""
+
def handle_repository( trans, repository, results ):
log.debug( "Resetting metadata on repository %s" % str( repository.name ) )
repository_id = trans.security.encode_id( repository.id )
@@ -335,53 +342,48 @@
status = '%s : %s' % ( str( repository.name ), message )
results[ 'repository_status' ].append( status )
return results
- try:
- start_time = strftime( "%Y-%m-%d %H:%M:%S" )
- results = dict( start_time=start_time,
- repository_status=[],
- successful_count=0,
- unsuccessful_count=0 )
- handled_repository_ids = []
- encoded_ids_to_skip = payload.get( 'encoded_ids_to_skip', [] )
- skip_file = payload.get( 'skip_file', None )
- if skip_file and os.path.exists( skip_file ) and not encoded_ids_to_skip:
- # Load the list of encoded_ids_to_skip from the skip_file.
- # Contents of file must be 1 encoded repository id per line.
- lines = open( skip_file, 'rb' ).readlines()
- for line in lines:
- if line.startswith( '#' ):
- # Skip comments.
- continue
- encoded_ids_to_skip.append( line.rstrip( '\n' ) )
- if trans.user_is_admin():
- my_writable = util.asbool( payload.get( 'my_writable', False ) )
- else:
- my_writable = True
- query = suc.get_query_for_setting_metadata_on_repositories( trans, my_writable=my_writable, order=False )
- # First reset metadata on all repositories of type repository_dependency_definition.
- for repository in query:
- encoded_id = trans.security.encode_id( repository.id )
- if encoded_id in encoded_ids_to_skip:
- log.debug( "Skipping repository with id %s because it is in encoded_ids_to_skip %s" % \
- ( str( repository.id ), str( encoded_ids_to_skip ) ) )
- elif repository.type == rt_util.TOOL_DEPENDENCY_DEFINITION and repository.id not in handled_repository_ids:
- results = handle_repository( trans, repository, results )
- # Now reset metadata on all remaining repositories.
- for repository in query:
- encoded_id = trans.security.encode_id( repository.id )
- if encoded_id in encoded_ids_to_skip:
- log.debug( "Skipping repository with id %s because it is in encoded_ids_to_skip %s" % \
- ( str( repository.id ), str( encoded_ids_to_skip ) ) )
- elif repository.type != rt_util.TOOL_DEPENDENCY_DEFINITION and repository.id not in handled_repository_ids:
- results = handle_repository( trans, repository, results )
- stop_time = strftime( "%Y-%m-%d %H:%M:%S" )
- results[ 'stop_time' ] = stop_time
- return json.to_json_string( results, sort_keys=True, indent=4 * ' ' )
- except Exception, e:
- message = "Error in the Tool Shed repositories API in reset_metadata_on_repositories: %s" % str( e )
- log.error( message, exc_info=True )
- trans.response.status = 500
- return message
+
+ start_time = strftime( "%Y-%m-%d %H:%M:%S" )
+ results = dict( start_time=start_time,
+ repository_status=[],
+ successful_count=0,
+ unsuccessful_count=0 )
+ handled_repository_ids = []
+ encoded_ids_to_skip = payload.get( 'encoded_ids_to_skip', [] )
+ skip_file = payload.get( 'skip_file', None )
+ if skip_file and os.path.exists( skip_file ) and not encoded_ids_to_skip:
+ # Load the list of encoded_ids_to_skip from the skip_file.
+ # Contents of file must be 1 encoded repository id per line.
+ lines = open( skip_file, 'rb' ).readlines()
+ for line in lines:
+ if line.startswith( '#' ):
+ # Skip comments.
+ continue
+ encoded_ids_to_skip.append( line.rstrip( '\n' ) )
+ if trans.user_is_admin():
+ my_writable = util.asbool( payload.get( 'my_writable', False ) )
+ else:
+ my_writable = True
+ query = suc.get_query_for_setting_metadata_on_repositories( trans, my_writable=my_writable, order=False )
+ # First reset metadata on all repositories of type repository_dependency_definition.
+ for repository in query:
+ encoded_id = trans.security.encode_id( repository.id )
+ if encoded_id in encoded_ids_to_skip:
+ log.debug( "Skipping repository with id %s because it is in encoded_ids_to_skip %s" % \
+ ( str( repository.id ), str( encoded_ids_to_skip ) ) )
+ elif repository.type == rt_util.TOOL_DEPENDENCY_DEFINITION and repository.id not in handled_repository_ids:
+ results = handle_repository( trans, repository, results )
+ # Now reset metadata on all remaining repositories.
+ for repository in query:
+ encoded_id = trans.security.encode_id( repository.id )
+ if encoded_id in encoded_ids_to_skip:
+ log.debug( "Skipping repository with id %s because it is in encoded_ids_to_skip %s" % \
+ ( str( repository.id ), str( encoded_ids_to_skip ) ) )
+ elif repository.type != rt_util.TOOL_DEPENDENCY_DEFINITION and repository.id not in handled_repository_ids:
+ results = handle_repository( trans, repository, results )
+ stop_time = strftime( "%Y-%m-%d %H:%M:%S" )
+ results[ 'stop_time' ] = stop_time
+ return json.to_json_string( results, sort_keys=True, indent=4 * ' ' )
@web.expose_api
def reset_metadata_on_repository( self, trans, payload, **kwd ):
@@ -395,6 +397,7 @@
The following parameters must be included in the payload.
:param repository_id: the encoded id of the repository on which metadata is to be reset.
"""
+
def handle_repository( trans, start_time, repository ):
results = dict( start_time=start_time,
repository_status=[] )
@@ -410,21 +413,16 @@
status = '%s : %s' % ( str( repository.name ), message )
results[ 'repository_status' ].append( status )
return results
- try:
- repository_id = payload.get( 'repository_id', None )
- if repository_id is not None:
- repository = suc.get_repository_in_tool_shed( trans, repository_id )
- start_time = strftime( "%Y-%m-%d %H:%M:%S" )
- log.debug( "%s...resetting metadata on repository %s" % ( start_time, str( repository.name ) ) )
- results = handle_repository( trans, start_time, repository )
- stop_time = strftime( "%Y-%m-%d %H:%M:%S" )
- results[ 'stop_time' ] = stop_time
- return json.to_json_string( results, sort_keys=True, indent=4 * ' ' )
- except Exception, e:
- message = "Error in the Tool Shed repositories API in reset_metadata_on_repositories: %s" % str( e )
- log.error( message, exc_info=True )
- trans.response.status = 500
- return message
+
+ repository_id = payload.get( 'repository_id', None )
+ if repository_id is not None:
+ repository = suc.get_repository_in_tool_shed( trans, repository_id )
+ start_time = strftime( "%Y-%m-%d %H:%M:%S" )
+ log.debug( "%s...resetting metadata on repository %s" % ( start_time, str( repository.name ) ) )
+ results = handle_repository( trans, start_time, repository )
+ stop_time = strftime( "%Y-%m-%d %H:%M:%S" )
+ results[ 'stop_time' ] = stop_time
+ return json.to_json_string( results, sort_keys=True, indent=4 * ' ' )
@web.expose_api_anonymous
def show( self, trans, id, **kwd ):
@@ -434,27 +432,14 @@
:param id: the encoded id of the Repository object
"""
- value_mapper = { 'id' : trans.security.encode_id,
- 'user_id' : trans.security.encode_id }
# Example URL: http://localhost:9009/api/repositories/f9cad7b01a472135
- try:
- repository = suc.get_repository_in_tool_shed( trans, id )
- repository_dict = repository.to_dict( view='element', value_mapper=value_mapper )
- repository_dict[ 'url' ] = web.url_for( controller='repositories',
- action='show',
- id=trans.security.encode_id( repository.id ) )
- return repository_dict
- except Exception, e:
- message = "Error in the Tool Shed repositories API in show: %s" % str( e )
- log.error( message, exc_info=True )
- trans.response.status = 500
- return message
-
- def __get_value_mapper( self, trans, repository_metadata ):
- value_mapper = { 'id' : trans.security.encode_id,
- 'repository_id' : trans.security.encode_id }
- if repository_metadata.time_last_tested is not None:
- # For some reason the Dictifiable.to_dict() method in ~/galaxy/model/item_attrs.py requires
- # a function rather than a mapped value, so just pass the time_ago function here.
- value_mapper[ 'time_last_tested' ] = time_ago
- return value_mapper
+ repository = suc.get_repository_in_tool_shed( trans, id )
+ if repository is None:
+ log.debug( "Unable to locate repository record for id %s." % ( str( id ) ) )
+ return {}
+ repository_dict = repository.to_dict( view='element',
+ value_mapper=self.__get_value_mapper( trans ) )
+ repository_dict[ 'url' ] = web.url_for( controller='repositories',
+ action='show',
+ id=trans.security.encode_id( repository.id ) )
+ return repository_dict
diff -r 17e9e3ffb571e8f200ee8bf195a3708b2be4792b -r 0643bbc23cc1c845516469e3fa07270318c0c1a0 lib/galaxy/webapps/tool_shed/api/repository_revisions.py
--- a/lib/galaxy/webapps/tool_shed/api/repository_revisions.py
+++ b/lib/galaxy/webapps/tool_shed/api/repository_revisions.py
@@ -47,65 +47,29 @@
if not changeset_revision:
raise HTTPBadRequest( detail="Missing required parameter 'changeset_revision'." )
export_repository_dependencies = payload.get( 'export_repository_dependencies', False )
- try:
- # We'll currently support only gzip-compressed tar archives.
- file_type = 'gz'
- export_repository_dependencies = util.string_as_bool( export_repository_dependencies )
- # Get the repository information.
- repository = suc.get_repository_by_name_and_owner( trans.app, name, owner )
- repository_id = trans.security.encode_id( repository.id )
- response = export_util.export_repository( trans,
- tool_shed_url,
- repository_id,
- str( repository.name ),
- changeset_revision,
- file_type,
- export_repository_dependencies,
- api=True )
- return response
- except Exception, e:
- message = "Error in the Tool Shed repository_revisions API in export: %s" % str( e )
- log.error( message, exc_info=True )
- trans.response.status = 500
- return message
+ # We'll currently support only gzip-compressed tar archives.
+ file_type = 'gz'
+ export_repository_dependencies = util.asbool( export_repository_dependencies )
+ # Get the repository information.
+ repository = suc.get_repository_by_name_and_owner( trans.app, name, owner )
+ if repository is None:
+ error_message = 'Cannot locate repository with name %s and owner %s,' % ( str( name ), str( owner ) )
+ log.debug( error_message )
+ return None, error_message
+ repository_id = trans.security.encode_id( repository.id )
+ return export_util.export_repository( trans,
+ tool_shed_url,
+ repository_id,
+ str( repository.name ),
+ changeset_revision,
+ file_type,
+ export_repository_dependencies,
+ api=True )
- @web.expose_api_anonymous
- def repository_dependencies( self, trans, id, **kwd ):
- """
- GET /api/repository_revisions/{encoded repository_metadata id}/repository_dependencies
- Displays information about a repository_metadata record in the Tool Shed.
-
- :param id: the encoded id of the `RepositoryMetadata` object
- """
- # Example URL: http://localhost:9009/api/repository_revisions/repository_dependencies/bb12…
+ def __get_value_mapper( self, trans ):
value_mapper = { 'id' : trans.security.encode_id,
- 'user_id' : trans.security.encode_id }
- repository_dependencies_dicts = []
- try:
- repository_metadata = metadata_util.get_repository_metadata_by_id( trans, id )
- metadata = repository_metadata.metadata
- if metadata and 'repository_dependencies' in metadata:
- rd_tups = metadata[ 'repository_dependencies' ][ 'repository_dependencies' ]
- for rd_tup in rd_tups:
- tool_shed, name, owner, changeset_revision = rd_tup[ 0:4 ]
- repository_dependency = suc.get_repository_by_name_and_owner( trans.app, name, owner )
- repository_dependency_id = trans.security.encode_id( repository_dependency.id )
- repository_dependency_repository_metadata = \
- suc.get_repository_metadata_by_changeset_revision( trans, repository_dependency_id, changeset_revision )
- repository_dependency_repository_metadata_id = trans.security.encode_id( repository_dependency_repository_metadata.id )
- repository_dependency_dict = repository_dependency.to_dict( view='element', value_mapper=value_mapper )
- # We have to add the changeset_revision of of the repository dependency.
- repository_dependency_dict[ 'changeset_revision' ] = changeset_revision
- repository_dependency_dict[ 'url' ] = web.url_for( controller='repositories',
- action='show',
- id=repository_dependency_repository_metadata_id )
- repository_dependencies_dicts.append( repository_dependency_dict )
- return repository_dependencies_dicts
- except Exception, e:
- message = "Error in the Tool Shed repository_revisions API in repository_dependencies: %s" % str( e )
- log.error( message, exc_info=True )
- trans.response.status = 500
- return message
+ 'repository_id' : trans.security.encode_id }
+ return value_mapper
@web.expose_api_anonymous
def index( self, trans, **kwd ):
@@ -120,59 +84,94 @@
# Filter by downloadable if received.
downloadable = kwd.get( 'downloadable', None )
if downloadable is not None:
- clause_list.append( trans.model.RepositoryMetadata.table.c.downloadable == util.string_as_bool( downloadable ) )
+ clause_list.append( trans.model.RepositoryMetadata.table.c.downloadable == util.asbool( downloadable ) )
# Filter by malicious if received.
malicious = kwd.get( 'malicious', None )
if malicious is not None:
- clause_list.append( trans.model.RepositoryMetadata.table.c.malicious == util.string_as_bool( malicious ) )
+ clause_list.append( trans.model.RepositoryMetadata.table.c.malicious == util.asbool( malicious ) )
# Filter by tools_functionally_correct if received.
tools_functionally_correct = kwd.get( 'tools_functionally_correct', None )
if tools_functionally_correct is not None:
- clause_list.append( trans.model.RepositoryMetadata.table.c.tools_functionally_correct == util.string_as_bool( tools_functionally_correct ) )
+ clause_list.append( trans.model.RepositoryMetadata.table.c.tools_functionally_correct == util.asbool( tools_functionally_correct ) )
# Filter by missing_test_components if received.
missing_test_components = kwd.get( 'missing_test_components', None )
if missing_test_components is not None:
- clause_list.append( trans.model.RepositoryMetadata.table.c.missing_test_components == util.string_as_bool( missing_test_components ) )
+ clause_list.append( trans.model.RepositoryMetadata.table.c.missing_test_components == util.asbool( missing_test_components ) )
# Filter by do_not_test if received.
do_not_test = kwd.get( 'do_not_test', None )
if do_not_test is not None:
- clause_list.append( trans.model.RepositoryMetadata.table.c.do_not_test == util.string_as_bool( do_not_test ) )
+ clause_list.append( trans.model.RepositoryMetadata.table.c.do_not_test == util.asbool( do_not_test ) )
# Filter by includes_tools if received.
includes_tools = kwd.get( 'includes_tools', None )
if includes_tools is not None:
- clause_list.append( trans.model.RepositoryMetadata.table.c.includes_tools == util.string_as_bool( includes_tools ) )
+ clause_list.append( trans.model.RepositoryMetadata.table.c.includes_tools == util.asbool( includes_tools ) )
# Filter by test_install_error if received.
test_install_error = kwd.get( 'test_install_error', None )
if test_install_error is not None:
- clause_list.append( trans.model.RepositoryMetadata.table.c.test_install_error == util.string_as_bool( test_install_error ) )
+ clause_list.append( trans.model.RepositoryMetadata.table.c.test_install_error == util.asbool( test_install_error ) )
# Filter by skip_tool_test if received.
skip_tool_test = kwd.get( 'skip_tool_test', None )
if skip_tool_test is not None:
- skip_tool_test = util.string_as_bool( skip_tool_test )
+ skip_tool_test = util.asbool( skip_tool_test )
skipped_metadata_ids_subquery = select( [ trans.app.model.SkipToolTest.table.c.repository_metadata_id ] )
if skip_tool_test:
clause_list.append( trans.model.RepositoryMetadata.id.in_( skipped_metadata_ids_subquery ) )
else:
clause_list.append( not_( trans.model.RepositoryMetadata.id.in_( skipped_metadata_ids_subquery ) ) )
- # Generate and execute the query.
- try:
- query = trans.sa_session.query( trans.app.model.RepositoryMetadata ) \
- .filter( and_( *clause_list ) ) \
- .order_by( trans.app.model.RepositoryMetadata.table.c.repository_id.desc() ) \
- .all()
- for repository_metadata in query:
- repository_metadata_dict = repository_metadata.to_dict( view='collection',
- value_mapper=self.__get_value_mapper( trans, repository_metadata ) )
- repository_metadata_dict[ 'url' ] = web.url_for( controller='repository_revisions',
- action='show',
- id=trans.security.encode_id( repository_metadata.id ) )
- repository_metadata_dicts.append( repository_metadata_dict )
- return repository_metadata_dicts
- except Exception, e:
- message = "Error in the Tool Shed repository_revisions API in index: " + str( e )
- log.error( message, exc_info=True )
- trans.response.status = 500
- return message
+ for repository_metadata in trans.sa_session.query( trans.app.model.RepositoryMetadata ) \
+ .filter( and_( *clause_list ) ) \
+ .order_by( trans.app.model.RepositoryMetadata.table.c.repository_id.desc() ):
+ repository_metadata_dict = repository_metadata.to_dict( view='collection',
+ value_mapper=self.__get_value_mapper( trans ) )
+ repository_metadata_dict[ 'url' ] = web.url_for( controller='repository_revisions',
+ action='show',
+ id=trans.security.encode_id( repository_metadata.id ) )
+ repository_metadata_dicts.append( repository_metadata_dict )
+ return repository_metadata_dicts
+
+ @web.expose_api_anonymous
+ def repository_dependencies( self, trans, id, **kwd ):
+ """
+ GET /api/repository_revisions/{encoded repository_metadata id}/repository_dependencies
+ Displays information about a repository_metadata record in the Tool Shed.
+
+ :param id: the encoded id of the `RepositoryMetadata` object
+ """
+ # Example URL: http://localhost:9009/api/repository_revisions/repository_dependencies/bb12…
+ repository_dependencies_dicts = []
+ repository_metadata = metadata_util.get_repository_metadata_by_id( trans, id )
+ if repository_metadata is None:
+ log.debug( 'Invalid repository_metadata id received: %s' % str( id ) )
+ return repository_dependencies_dicts
+ metadata = repository_metadata.metadata
+ if metadata is None:
+ log.debug( 'The repository_metadata record with id %s has no metadata.' % str ( id ) )
+ return repository_dependencies_dicts
+ if 'repository_dependencies' in metadata:
+ rd_tups = metadata[ 'repository_dependencies' ][ 'repository_dependencies' ]
+ for rd_tup in rd_tups:
+ tool_shed, name, owner, changeset_revision = rd_tup[ 0:4 ]
+ repository_dependency = suc.get_repository_by_name_and_owner( trans.app, name, owner )
+ if repository_dependency is None:
+ log.dbug( 'Cannot locate repository dependency %s owned by %s.' % ( name, owner ) )
+ continue
+ repository_dependency_id = trans.security.encode_id( repository_dependency.id )
+ repository_dependency_repository_metadata = \
+ suc.get_repository_metadata_by_changeset_revision( trans, repository_dependency_id, changeset_revision )
+ if repository_dependency_repository_metadata is None:
+ log.debug( 'Cannot locate repository_metadata with id %s for repository dependency %s owned by %s.' % \
+ ( str( repository_dependency_id ), str( name ), str( owner ) ) )
+ continue
+ repository_dependency_repository_metadata_id = trans.security.encode_id( repository_dependency_repository_metadata.id )
+ repository_dependency_dict = repository_dependency.to_dict( view='element',
+ value_mapper=self.__get_value_mapper( trans ) )
+ # We have to add the changeset_revision of of the repository dependency.
+ repository_dependency_dict[ 'changeset_revision' ] = changeset_revision
+ repository_dependency_dict[ 'url' ] = web.url_for( controller='repositories',
+ action='show',
+ id=repository_dependency_repository_metadata_id )
+ repository_dependencies_dicts.append( repository_dependency_dict )
+ return repository_dependencies_dicts
@web.expose_api_anonymous
def show( self, trans, id, **kwd ):
@@ -183,19 +182,16 @@
:param id: the encoded id of the `RepositoryMetadata` object
"""
# Example URL: http://localhost:9009/api/repository_revisions/bb125606ff9ea620
- try:
- repository_metadata = metadata_util.get_repository_metadata_by_id( trans, id )
- repository_metadata_dict = repository_metadata.to_dict( view='element',
- value_mapper=self.__get_value_mapper( trans, repository_metadata ) )
- repository_metadata_dict[ 'url' ] = web.url_for( controller='repository_revisions',
- action='show',
- id=trans.security.encode_id( repository_metadata.id ) )
- return repository_metadata_dict
- except Exception, e:
- message = "Error in the Tool Shed repository_revisions API in show: %s" % str( e )
- log.error( message, exc_info=True )
- trans.response.status = 500
- return message
+ repository_metadata = metadata_util.get_repository_metadata_by_id( trans, id )
+ if repository_metadata is None:
+ log.debug( 'Cannot locate repository_metadata with id %s' % str( id ) )
+ return {}
+ repository_metadata_dict = repository_metadata.to_dict( view='element',
+ value_mapper=self.__get_value_mapper( trans ) )
+ repository_metadata_dict[ 'url' ] = web.url_for( controller='repository_revisions',
+ action='show',
+ id=trans.security.encode_id( repository_metadata.id ) )
+ return repository_metadata_dict
@web.expose_api
def update( self, trans, payload, **kwd ):
@@ -204,37 +200,32 @@
Updates the value of specified columns of the repository_metadata table based on the key / value pairs in payload.
"""
repository_metadata_id = kwd.get( 'id', None )
- try:
- repository_metadata = metadata_util.get_repository_metadata_by_id( trans, repository_metadata_id )
- flush_needed = False
- for key, new_value in payload.items():
- if key == 'time_last_tested':
- repository_metadata.time_last_tested = datetime.datetime.utcnow()
- flush_needed = True
- elif hasattr( repository_metadata, key ):
- # log information when setting attributes associated with the Tool Shed's install and test framework.
- if key in [ 'do_not_test', 'includes_tools', 'missing_test_components', 'test_install_error',
- 'tools_functionally_correct' ]:
- log.debug( 'Setting repository_metadata table column %s to value %s for changeset_revision %s via the Tool Shed API.' % \
- ( str( key ), str( new_value ), str( repository_metadata.changeset_revision ) ) )
- setattr( repository_metadata, key, new_value )
- flush_needed = True
- if flush_needed:
- trans.sa_session.add( repository_metadata )
- trans.sa_session.flush()
- except Exception, e:
- message = "Error in the Tool Shed repository_revisions API in update: %s" % str( e )
- log.error( message, exc_info=True )
- trans.response.status = 500
- return message
+ if repository_metadata_id is None:
+ raise HTTPBadRequest( detail="Missing required parameter 'id'." )
+ repository_metadata = metadata_util.get_repository_metadata_by_id( trans, repository_metadata_id )
+ if repository_metadata is None:
+ log.debug( 'Cannot locate repository_metadata with id %s' % str( repository_metadata_id ) )
+ return {}
+ flush_needed = False
+ for key, new_value in payload.items():
+ if key == 'time_last_tested':
+ repository_metadata.time_last_tested = datetime.datetime.utcnow()
+ flush_needed = True
+ elif hasattr( repository_metadata, key ):
+ # log information when setting attributes associated with the Tool Shed's install and test framework.
+ if key in [ 'do_not_test', 'includes_tools', 'missing_test_components', 'test_install_error',
+ 'tools_functionally_correct' ]:
+ log.debug( 'Setting repository_metadata column %s to value %s for changeset_revision %s via the Tool Shed API.' % \
+ ( str( key ), str( new_value ), str( repository_metadata.changeset_revision ) ) )
+ setattr( repository_metadata, key, new_value )
+ flush_needed = True
+ if flush_needed:
+ trans.sa_session.add( repository_metadata )
+ trans.sa_session.flush()
+ trans.sa_session.refresh( repository_metadata )
repository_metadata_dict = repository_metadata.to_dict( view='element',
- value_mapper=self.__get_value_mapper( trans, repository_metadata ) )
+ value_mapper=self.__get_value_mapper( trans ) )
repository_metadata_dict[ 'url' ] = web.url_for( controller='repository_revisions',
action='show',
id=trans.security.encode_id( repository_metadata.id ) )
return repository_metadata_dict
-
- def __get_value_mapper( self, trans, repository_metadata ):
- value_mapper = { 'id' : trans.security.encode_id,
- 'repository_id' : trans.security.encode_id }
- return value_mapper
diff -r 17e9e3ffb571e8f200ee8bf195a3708b2be4792b -r 0643bbc23cc1c845516469e3fa07270318c0c1a0 lib/tool_shed/scripts/api/common.py
--- a/lib/tool_shed/scripts/api/common.py
+++ b/lib/tool_shed/scripts/api/common.py
@@ -1,34 +1,27 @@
-import os, sys, urllib, urllib2
+import os
+import sys
+import urllib
+import urllib2
new_path = [ os.path.join( os.path.dirname( __file__ ), '..', '..', '..', '..', 'lib' ) ]
new_path.extend( sys.path[ 1: ] )
sys.path = new_path
+import tool_shed.util.shed_util_common as suc
+
from galaxy import eggs
import pkg_resources
pkg_resources.require( "simplejson" )
import simplejson
-pkg_resources.require( "pycrypto" )
-from Crypto.Cipher import Blowfish
-from Crypto.Util.randpool import RandomPool
-from Crypto.Util import number
-
-def encode_id( config_id_secret, obj_id ):
- # Utility method to encode ID's
- id_cipher = Blowfish.new( config_id_secret )
- # Convert to string
- s = str( obj_id )
- # Pad to a multiple of 8 with leading "!"
- s = ( "!" * ( 8 - len(s) % 8 ) ) + s
- # Encrypt
- return id_cipher.encrypt( s ).encode( 'hex' )
-
def delete( api_key, url, data, return_formatted=True ):
- # Sends an API DELETE request and acts as a generic formatter for the JSON response - 'data' will become the JSON payload read by Galaxy.
+ """
+ Sends an API DELETE request and acts as a generic formatter for the JSON response. The
+ 'data' will become the JSON payload read by the Tool Shed.
+ """
try:
- url = make_url( api_key, url )
+ url = make_url( url, api_key=api_key, args=None )
req = urllib2.Request( url, headers = { 'Content-Type': 'application/json' }, data = simplejson.dumps( data ))
req.get_method = lambda: 'DELETE'
r = simplejson.loads( urllib2.urlopen( req ).read() )
@@ -46,12 +39,13 @@
print r
def display( url, api_key=None, return_formatted=True ):
- # Sends an API GET request and acts as a generic formatter for the JSON response.
+ """Sends an API GET request and acts as a generic formatter for the JSON response."""
try:
r = get( url, api_key=api_key )
except urllib2.HTTPError, e:
print e
- print e.read( 1024 ) # Only return the first 1K of errors.
+ # Only return the first 1K of errors.
+ print e.read( 1024 )
sys.exit( 1 )
if type( r ) == unicode:
print 'error: %s' % r
@@ -83,16 +77,94 @@
print 'response is unknown type: %s' % type( r )
def get( url, api_key=None ):
- # Do the actual GET.
- url = make_url( url, api_key=api_key )
+ """Do the GET."""
+ url = make_url( url, api_key=api_key, args=None )
try:
return simplejson.loads( urllib2.urlopen( url ).read() )
except simplejson.decoder.JSONDecodeError, e:
print "URL did not return JSON data"
- sys.exit(1)
+ sys.exit( 1 )
+
+def get_api_url( base, parts=[], params=None ):
+ """Compose and return a URL for the Tool Shed API."""
+ if 'api' in parts and parts.index( 'api' ) != 0:
+ parts.pop( parts.index( 'api' ) )
+ parts.insert( 0, 'api' )
+ elif 'api' not in parts:
+ parts.insert( 0, 'api' )
+ url = suc.url_join( base, *parts )
+ if params is not None:
+ try:
+ query_string = urllib.urlencode( params )
+ except Exception, e:
+ # The value of params must be a string.
+ query_string = params
+ url += '?%s' % query_string
+ return url
+
+def get_latest_downloadable_changeset_revision_via_api( url, name, owner ):
+ """
+ Return the latest downloadable changeset revision for the repository defined by the received
+ name and owner.
+ """
+ error_message = ''
+ parts = [ 'api', 'repositories', 'get_ordered_installable_revisions' ]
+ params = dict( name=name, owner=owner )
+ api_url = get_api_url( base=url, parts=parts, params=params )
+ changeset_revisions, error_message = json_from_url( api_url )
+ if changeset_revisions is None or error_message:
+ return None, error_message
+ if len( changeset_revisions ) >= 1:
+ return changeset_revisions[ -1 ], error_message
+ return suc.INITIAL_CHANGELOG_HASH, error_message
+
+def get_repository_dict( url, repository_dict ):
+ """
+ Send a request to the Tool Shed to get additional information about the repository defined
+ by the received repository_dict. Add the information to the repository_dict and return it.
+ """
+ error_message = ''
+ if not isinstance( repository_dict, dict ):
+ error_message = 'Invalid repository_dict received: %s' % str( repository_dict )
+ return None, error_message
+ repository_id = repository_dict.get( 'repository_id', None )
+ if repository_id is None:
+ error_message = 'Invalid repository_dict does not contain a repository_id entry: %s' % str( repository_dict )
+ return None, error_message
+ parts = [ 'api', 'repositories', repository_id ]
+ api_url = get_api_url( base=url, parts=parts )
+ extended_dict, error_message = json_from_url( api_url )
+ if extended_dict is None or error_message:
+ return None, error_message
+ name = extended_dict.get( 'name', None )
+ owner = extended_dict.get( 'owner', None )
+ if name is not None and owner is not None:
+ name = str( name )
+ owner = str( owner )
+ latest_changeset_revision, error_message = get_latest_downloadable_changeset_revision_via_api( url, name, owner )
+ if latest_changeset_revision is None or error_message:
+ return None, error_message
+ extended_dict[ 'latest_revision' ] = str( latest_changeset_revision )
+ return extended_dict, error_message
+ else:
+ error_message = 'Invalid extended_dict does not contain name or woner entries: %s' % str( extended_dict )
+ return None, error_message
+
+def json_from_url( url ):
+ """Send a request to the Tool Shed via the Tool Shed API and handle the response."""
+ error_message = ''
+ url_handle = urllib.urlopen( url )
+ url_contents = url_handle.read()
+ try:
+ parsed_json = simplejson.loads( url_contents )
+ except Exception, e:
+ error_message = str( url_contents )
+ print 'Error parsing JSON data in json_from_url(): ', str( e )
+ return None, error_message
+ return parsed_json, error_message
def make_url( url, api_key=None, args=None ):
- # Adds the API Key to the URL if it's not already there.
+ """Adds the API Key to the URL if it's not already there."""
if args is None:
args = []
argsep = '&'
@@ -104,20 +176,23 @@
return url + argsep + '&'.join( [ '='.join( t ) for t in args ] )
def post( url, data, api_key=None ):
- # Do the actual POST.
- url = make_url( url, api_key=api_key )
+ """Do the POST."""
+ url = make_url( url, api_key=api_key, args=None )
req = urllib2.Request( url, headers = { 'Content-Type': 'application/json' }, data = simplejson.dumps( data ) )
return simplejson.loads( urllib2.urlopen( req ).read() )
def put( url, data, api_key=None ):
- # Do the actual PUT.
- url = make_url( url, api_key=api_key )
+ """Do the PUT."""
+ url = make_url( url, api_key=api_key, args=None )
req = urllib2.Request( url, headers = { 'Content-Type': 'application/json' }, data = simplejson.dumps( data ))
req.get_method = lambda: 'PUT'
return simplejson.loads( urllib2.urlopen( req ).read() )
def submit( url, data, api_key=None, return_formatted=True ):
- # Sends an API POST request and acts as a generic formatter for the JSON response - 'data' will become the JSON payload read by Galaxy.
+ """
+ Sends an API POST request and acts as a generic formatter for the JSON response. The
+ 'data' will become the JSON payload read by the Tool Shed.
+ """
try:
r = post( url, data, api_key=api_key )
except urllib2.HTTPError, e:
@@ -132,7 +207,8 @@
print 'Response'
print '--------'
if type( r ) == list:
- # Currently the only implemented responses are lists of dicts, because submission creates some number of collection elements.
+ # Currently the only implemented responses are lists of dicts, because submission creates
+ # some number of collection elements.
for i in r:
if type( i ) == dict:
if 'url' in i:
@@ -149,9 +225,12 @@
print r
def update( api_key, url, data, return_formatted=True ):
- # Sends an API PUT request and acts as a generic formatter for the JSON response - 'data' will become the JSON payload read by Galaxy.
+ """
+ Sends an API PUT request and acts as a generic formatter for the JSON response. The
+ 'data' will become the JSON payload read by the Tool Shed.
+ """
try:
- r = put( api_key, url, data )
+ r = put( url, data, api_key=api_key )
except urllib2.HTTPError, e:
if return_formatted:
print e
diff -r 17e9e3ffb571e8f200ee8bf195a3708b2be4792b -r 0643bbc23cc1c845516469e3fa07270318c0c1a0 lib/tool_shed/scripts/api/get_filtered_repository_revisions.py
--- a/lib/tool_shed/scripts/api/get_filtered_repository_revisions.py
+++ b/lib/tool_shed/scripts/api/get_filtered_repository_revisions.py
@@ -26,69 +26,19 @@
import urllib
sys.path.insert( 0, os.path.dirname( __file__ ) )
+
from common import get
+from common import get_api_url
+from common import get_repository_dict
+from common import json_from_url
+
+from galaxy.util import asbool
from galaxy.util.json import from_json_string
import tool_shed.util.shed_util_common as suc
-def get_api_url( base, parts=[], params=None ):
- if 'api' in parts and parts.index( 'api' ) != 0:
- parts.pop( parts.index( 'api' ) )
- parts.insert( 0, 'api' )
- elif 'api' not in parts:
- parts.insert( 0, 'api' )
- url = suc.url_join( base, *parts )
- if params:
- url += '?%s' % params
- return url
-
-def get_latest_downloadable_changeset_revision( url, name, owner ):
- error_message = ''
- parts = [ 'api', 'repositories', 'get_ordered_installable_revisions' ]
- params = urllib.urlencode( dict( name=name, owner=owner ) )
- api_url = get_api_url( base=url, parts=parts, params=params )
- changeset_revisions, error_message = json_from_url( api_url )
- if error_message:
- return None, error_message
- if changeset_revisions:
- return changeset_revisions[ -1 ], error_message
- else:
- return suc.INITIAL_CHANGELOG_HASH, error_message
-
-def get_repository_dict( url, repository_dict ):
- error_message = ''
- parts = [ 'api', 'repositories', repository_dict[ 'repository_id' ] ]
- api_url = get_api_url( base=url, parts=parts )
- extended_dict, error_message = json_from_url( api_url )
- if error_message:
- return None, error_message
- name = str( extended_dict[ 'name' ] )
- owner = str( extended_dict[ 'owner' ] )
- latest_changeset_revision, error_message = get_latest_downloadable_changeset_revision( url, name, owner )
- if error_message:
- print error_message
- extended_dict[ 'latest_revision' ] = str( latest_changeset_revision )
- return extended_dict, error_message
-
-def json_from_url( url ):
- error_message = ''
- url_handle = urllib.urlopen( url )
- url_contents = url_handle.read()
- try:
- parsed_json = from_json_string( url_contents )
- except Exception, e:
- error_message = str( url_contents )
- return None, error_message
- return parsed_json, error_message
-
-def string_as_bool( string ):
- if str( string ).lower() in [ 'true' ]:
- return True
- else:
- return False
-
def main( options ):
base_tool_shed_url = options.tool_shed_url.rstrip( '/' )
- latest_revision_only = string_as_bool( options.latest_revision_only )
+ latest_revision_only = asbool( options.latest_revision_only )
do_not_test = str( options.do_not_test )
downloadable = str( options.downloadable )
includes_tools = str( options.includes_tools )
@@ -108,30 +58,31 @@
tools_functionally_correct=tools_functionally_correct ) )
api_url = get_api_url( base=base_tool_shed_url, parts=parts, params=params )
baseline_repository_dicts, error_message = json_from_url( api_url )
- if error_message:
+ if baseline_repository_dicts is None or error_message:
print error_message
- repository_dicts = []
- for baseline_repository_dict in baseline_repository_dicts:
- # We need to get some details from the tool shed API, such as repository name and owner, to pass on to the
- # module that will generate the install methods.
- repository_dict, error_message = get_repository_dict( base_tool_shed_url, baseline_repository_dict )
- if error_message:
- print 'Error getting additional details from the API: ', error_message
- repository_dicts.append( baseline_repository_dict )
- else:
- # Don't test empty repositories.
- changeset_revision = baseline_repository_dict[ 'changeset_revision' ]
- if changeset_revision != suc.INITIAL_CHANGELOG_HASH:
- # Merge the dictionary returned from /api/repository_revisions with the detailed repository_dict and
- # append it to the list of repository_dicts to install and test.
- if latest_revision_only:
- latest_revision = repository_dict[ 'latest_revision' ]
- if changeset_revision == latest_revision:
+ else:
+ repository_dicts = []
+ for baseline_repository_dict in baseline_repository_dicts:
+ # We need to get additional details from the tool shed API to pass on to the
+ # module that will generate the install methods.
+ repository_dict, error_message = get_repository_dict( base_tool_shed_url, baseline_repository_dict )
+ if error_message:
+ print 'Error getting additional details from the API: ', error_message
+ repository_dicts.append( baseline_repository_dict )
+ else:
+ # Don't test empty repositories.
+ changeset_revision = baseline_repository_dict.get( 'changeset_revision', suc.INITIAL_CHANGELOG_HASH )
+ if changeset_revision != suc.INITIAL_CHANGELOG_HASH:
+ # Merge the dictionary returned from /api/repository_revisions with the detailed repository_dict and
+ # append it to the list of repository_dicts to install and test.
+ if latest_revision_only:
+ latest_revision = repository_dict.get( 'latest_revision', suc.INITIAL_CHANGELOG_HASH )
+ if changeset_revision == latest_revision:
+ repository_dicts.append( dict( repository_dict.items() + baseline_repository_dict.items() ) )
+ else:
repository_dicts.append( dict( repository_dict.items() + baseline_repository_dict.items() ) )
- else:
- repository_dicts.append( dict( repository_dict.items() + baseline_repository_dict.items() ) )
- print '\n\n', repository_dicts
- print '\nThe url:\n\n', api_url, '\n\nreturned ', len( repository_dicts ), ' repository dictionaries...'
+ print '\n\n', repository_dicts
+ print '\nThe url:\n\n', api_url, '\n\nreturned ', len( repository_dicts ), ' repository dictionaries...'
if __name__ == '__main__':
parser = argparse.ArgumentParser( description='Get a filtered list of repository dictionaries.' )
diff -r 17e9e3ffb571e8f200ee8bf195a3708b2be4792b -r 0643bbc23cc1c845516469e3fa07270318c0c1a0 test/install_and_test_tool_shed_repositories/base/util.py
--- a/test/install_and_test_tool_shed_repositories/base/util.py
+++ b/test/install_and_test_tool_shed_repositories/base/util.py
@@ -3,10 +3,9 @@
cwd = os.getcwd()
sys.path.append( cwd )
-new_path = [ os.path.join( cwd, "scripts" ),
- os.path.join( cwd, "lib" ),
+new_path = [ os.path.join( cwd, "lib" ),
os.path.join( cwd, 'test' ),
- os.path.join( cwd, 'scripts', 'api' ) ]
+ os.path.join( cwd, 'lib', 'tool_shed', 'scripts', 'api' ) ]
new_path.extend( sys.path )
sys.path = new_path
@@ -27,6 +26,10 @@
from datetime import datetime
from datetime import timedelta
+from common import get_api_url
+from common import get_latest_downloadable_changeset_revision_via_api
+from common import get_repository_dict
+from common import json_from_url
from common import update
from galaxy.util import asbool
@@ -267,18 +270,6 @@
version = str( tool_dependency_dict[ 'version' ] )
print "# %s %s version %s" % ( type, name, version )
-def get_api_url( base, parts=[], params=None ):
- if 'api' in parts and parts.index( 'api' ) != 0:
- parts.pop( parts.index( 'api' ) )
- parts.insert( 0, 'api' )
- elif 'api' not in parts:
- parts.insert( 0, 'api' )
- url = suc.url_join( base, *parts )
- if params is not None:
- query_string = urllib.urlencode( params )
- url += '?%s' % query_string
- return url
-
def get_database_version( app ):
'''
This method returns the value of the version column from the migrate_version table, using the provided app's SQLAlchemy session to determine
@@ -296,19 +287,6 @@
break
return version
-def get_latest_downloadable_changeset_revision( url, name, owner ):
- error_message = ''
- parts = [ 'api', 'repositories', 'get_ordered_installable_revisions' ]
- params = dict( name=name, owner=owner )
- api_url = get_api_url( base=url, parts=parts, params=params )
- changeset_revisions, error_message = json_from_url( api_url )
- if error_message:
- return None, error_message
- if changeset_revisions:
- return changeset_revisions[ -1 ], error_message
- else:
- return suc.INITIAL_CHANGELOG_HASH, error_message
-
def get_missing_repository_dependencies( repository ):
"""
Return the entire list of missing repository dependencies for the received repository. The entire
@@ -467,28 +445,6 @@
return None, error_message
return repository_dependency_dicts, error_message
-def get_repository_dict( url, repository_dict ):
- error_message = ''
- if not isinstance( repository_dict, dict ):
- error_message = 'Invalid repository_dict received: %s' % str( repository_dict )
- return None, error_message
- repository_id = repository_dict.get( 'repository_id', None )
- if repository_id is None:
- error_message = 'Invalid repository_dict does not contain a repository_id entry: %s' % str( repository_dict )
- return None, error_message
- parts = [ 'api', 'repositories', repository_id ]
- api_url = get_api_url( base=url, parts=parts )
- extended_dict, error_message = json_from_url( api_url )
- if error_message:
- return None, error_message
- name = str( extended_dict[ 'name' ] )
- owner = str( extended_dict[ 'owner' ] )
- latest_changeset_revision, error_message = get_latest_downloadable_changeset_revision( url, name, owner )
- if error_message:
- return None, error_message
- extended_dict[ 'latest_revision' ] = str( latest_changeset_revision )
- return extended_dict, error_message
-
def get_repository_dependencies_dicts( url, encoded_repository_metadata_id ):
"""
Return a list if dictionaries that define the repository dependencies of the repository defined by the
@@ -709,23 +665,23 @@
return False, None
def is_latest_downloadable_revision( url, repository_dict ):
- name = str( repository_dict[ 'name' ] )
- owner = str( repository_dict[ 'owner' ] )
- changeset_revision = str( repository_dict[ 'changeset_revision' ] )
- latest_revision = get_latest_downloadable_changeset_revision( url, name=name, owner=owner )
- return changeset_revision == str( latest_revision )
-
-def json_from_url( url ):
+ """
+ Return True if the changeset_revision defined in the received repository_dict is the latest
+ installable revision for the repository.
+ """
error_message = ''
- url_handle = urllib.urlopen( url )
- url_contents = url_handle.read()
- try:
- parsed_json = from_json_string( url_contents )
- except Exception, e:
- error_message = str( url_contents )
- log.exception( 'Error parsing JSON data in json_from_url(): %s.' % str( e ) )
- return None, error_message
- return parsed_json, error_message
+ name = repository_dict.get( 'name', None )
+ owner = repository_dict.get( 'owner', None )
+ changeset_revision = repository_dict.get( 'changeset_revision', None )
+ if name is not None and owner is not None and changeset_revision is not None:
+ name = str( name )
+ owner = str( owner )
+ changeset_revision = str( changeset_revision )
+ latest_revision, error_message = get_latest_downloadable_changeset_revision_via_api( url, name=name, owner=owner )
+ if latest_revision is None or error_message:
+ return None, error_message
+ is_latest_downloadable = changeset_revision == str( latest_revision )
+ return is_latest_downloadable, error_message
def parse_exclude_list( xml_filename ):
"""Return a list of repositories to exclude from testing."""
@@ -1024,9 +980,15 @@
if can_update_tool_shed:
metadata_revision_id = repository_dict.get( 'id', None )
if metadata_revision_id is not None:
- name = str( repository_dict[ 'name' ] )
- owner = str( repository_dict[ 'owner' ] )
- changeset_revision = str( repository_dict[ 'changeset_revision' ] )
+ name = repository_dict.get( 'name', None )
+ owner = repository_dict.get( 'owner', None )
+ changeset_revision = repository_dict.get( 'changeset_revision', None )
+ if name is None or owner is None or changeset_revision is None:
+ log.debug( 'Entries for name, owner or changeset_revision missing from repository_dict %s' % str( repository_dict ) )
+ return {}
+ name = str( name )
+ owner = str( owner )
+ changeset_revision = str( changeset_revision )
log.debug('\n=============================================================\n' )
log.debug( 'Inserting the following into tool_test_results for revision %s of repository %s owned by %s:\n%s' % \
( changeset_revision, name, owner, str( tool_test_results_dict ) ) )
diff -r 17e9e3ffb571e8f200ee8bf195a3708b2be4792b -r 0643bbc23cc1c845516469e3fa07270318c0c1a0 test/install_and_test_tool_shed_repositories/repositories_with_tools/functional_tests.py
--- a/test/install_and_test_tool_shed_repositories/repositories_with_tools/functional_tests.py
+++ b/test/install_and_test_tool_shed_repositories/repositories_with_tools/functional_tests.py
@@ -685,9 +685,16 @@
tool_test_results_dict[ 'failed_tests' ] = failed_test_dicts
failed_repository_dict = repository_identifier_dict
install_and_test_statistics_dict[ 'at_least_one_test_failed' ].append( failed_repository_dict )
- set_do_not_test = \
- not install_and_test_base_util.is_latest_downloadable_revision( install_and_test_base_util.galaxy_tool_shed_url,
- repository_dict )
+ is_latest_downloadable_revision, error_message = \
+ install_and_test_base_util.is_latest_downloadable_revision( install_and_test_base_util.galaxy_tool_shed_url,
+ repository_dict )
+ if is_latest_downloadable_revision is None or error_message:
+ log.debug( 'Error attempting to determine if revision %s of repository %s owned by %s ' % \
+ ( changeset_revision, name, owner ) )
+ log.debug( 'is the latest downloadable revision: %s' % str( error_message ) )
+ set_do_not_test = False
+ else:
+ set_do_not_test = not is_latest_downloadable_revision
params = dict( tools_functionally_correct=False,
test_install_error=False,
do_not_test=str( set_do_not_test ) )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: carlfeberhard: Remove extra includes in index.mako; Functional browser tests: create 'is loaded' fn for home page to simplify, improve loading in user module, misc. fixes
by commits-noreply@bitbucket.org 09 Jan '14
by commits-noreply@bitbucket.org 09 Jan '14
09 Jan '14
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/17e9e3ffb571/
Changeset: 17e9e3ffb571
User: carlfeberhard
Date: 2014-01-09 20:34:25
Summary: Remove extra includes in index.mako; Functional browser tests: create 'is loaded' fn for home page to simplify, improve loading in user module, misc. fixes
Affected #: 13 files
diff -r 4e6b003daaa44a06ac13c74c5ce8c75fa7bbb7ee -r 17e9e3ffb571e8f200ee8bf195a3708b2be4792b templates/webapps/galaxy/root/index.mako
--- a/templates/webapps/galaxy/root/index.mako
+++ b/templates/webapps/galaxy/root/index.mako
@@ -1,9 +1,7 @@
<%inherit file="/webapps/galaxy/base_panels.mako"/><%namespace file="/root/tool_menu.mako" import="*" />
-<%namespace file="/history/history_panel.mako" import="history_panel_javascripts" /><%namespace file="/history/history_panel.mako" import="current_history_panel" />
-<%namespace file="/history/history_panel.mako" import="history_panel" /><%def name="stylesheets()">
${parent.stylesheets()}
diff -r 4e6b003daaa44a06ac13c74c5ce8c75fa7bbb7ee -r 17e9e3ffb571e8f200ee8bf195a3708b2be4792b test/casperjs/anon-history-tests.js
--- a/test/casperjs/anon-history-tests.js
+++ b/test/casperjs/anon-history-tests.js
@@ -60,16 +60,8 @@
// =================================================================== TESTS
-// ------------------------------------------------------------------- anonymous new, history
-// open galaxy - ensure not logged in
-spaceghost.thenOpen( spaceghost.baseUrl, function(){
- var loggedInAs = spaceghost.user.loggedInAs();
- this.debug( 'loggedInAs: ' + loggedInAs );
- if( loggedInAs ){ this.logout(); }
-});
-
-// ------------------------------------------------------------------- check the empty history for well formedness
-spaceghost.historypanel.waitForHdas( function testPanelStructure(){
+// ------------------------------------------------------------------- check the anonymous new, history for form
+spaceghost.openHomePage().historypanel.waitForHdas( function testPanelStructure(){
this.test.comment( 'history panel for anonymous user, new history' );
this.test.comment( "history name should exist, be visible, and have text " + unnamedName );
@@ -121,8 +113,7 @@
// ------------------------------------------------------------------- anon user can run tool on file
// ------------------------------------------------------------------- anon user registers/logs in -> same history
-spaceghost.user.loginOrRegisterUser( email, password );
-spaceghost.thenOpen( spaceghost.baseUrl, function(){
+spaceghost.user.loginOrRegisterUser( email, password ).openHomePage( function(){
this.test.comment( 'anon-user should login and be associated with previous history' );
var loggedInAs = spaceghost.user.loggedInAs();
@@ -139,8 +130,7 @@
});
// ------------------------------------------------------------------- logs out -> new history
-spaceghost.user.logout();
-spaceghost.thenOpen( spaceghost.baseUrl, function(){
+spaceghost.user.logout().openHomePage( function(){
this.test.comment( 'logging out should create a new, anonymous history' );
this.historypanel.waitForHdas( function(){
diff -r 4e6b003daaa44a06ac13c74c5ce8c75fa7bbb7ee -r 17e9e3ffb571e8f200ee8bf195a3708b2be4792b test/casperjs/api-history-tests.js
--- a/test/casperjs/api-history-tests.js
+++ b/test/casperjs/api-history-tests.js
@@ -304,13 +304,15 @@
//this.debug( 'returned:\n' + this.jsonStr( returned ) );
historyShow = this.api.histories.show( newFirstHistory.id );
this.test.assert( historyShow.genome_build === unicodeBuild,
- "Update accepted unicode genome_build: " + historyShow.name );
+ "Update accepted unicode genome_build: " + historyShow.genome_build );
} catch( err ){
//this.debug( this.jsonStr( err ) );
if( ( err instanceof this.api.APIError )
&& ( err.status === 500 )
&& ( err.message.indexOf( '(ProgrammingError) You must not use 8-bit bytestrings' ) !== -1 ) ){
this.skipTest( 'Unicode update failed. Are you using sqlite3 as the db?' );
+ } else {
+ throw err;
}
}
diff -r 4e6b003daaa44a06ac13c74c5ce8c75fa7bbb7ee -r 17e9e3ffb571e8f200ee8bf195a3708b2be4792b test/casperjs/hda-state-tests.js
--- a/test/casperjs/hda-state-tests.js
+++ b/test/casperjs/hda-state-tests.js
@@ -62,17 +62,14 @@
peekShouldBeArray = [];
// ------------------------------------------------------------------- set up
-// start a new user
+// start a new user and upload a file
spaceghost.user.loginOrRegisterUser( email, password );
-
-// upload a file
spaceghost.then( function upload(){
spaceghost.tools.uploadFile( filepathToUpload, function uploadCallback( _uploadInfo ){
testUploadInfo = _uploadInfo;
});
});
-
// =================================================================== TEST HELPERS
//NOTE: to be called with fn.call( spaceghost, ... )
diff -r 4e6b003daaa44a06ac13c74c5ce8c75fa7bbb7ee -r 17e9e3ffb571e8f200ee8bf195a3708b2be4792b test/casperjs/history-options-tests.js
--- a/test/casperjs/history-options-tests.js
+++ b/test/casperjs/history-options-tests.js
@@ -47,9 +47,8 @@
// =================================================================== TESTS
// ------------------------------------------------------------------- set up
-// start a new user
+// start a new user and upload a file
spaceghost.user.loginOrRegisterUser( email, password );
-
spaceghost.tools.uploadFile( filepathToUpload, function uploadCallback( _uploadInfo ){
testUploadInfo = _uploadInfo;
});
diff -r 4e6b003daaa44a06ac13c74c5ce8c75fa7bbb7ee -r 17e9e3ffb571e8f200ee8bf195a3708b2be4792b test/casperjs/history-panel-tests.js
--- a/test/casperjs/history-panel-tests.js
+++ b/test/casperjs/history-panel-tests.js
@@ -73,7 +73,7 @@
spaceghost.user.loginOrRegisterUser( email, password );
// ------------------------------------------------------------------- check structure of empty history
-spaceghost.thenOpen( spaceghost.baseUrl ).historypanel.waitForHdas( function(){
+spaceghost.openHomePage().historypanel.waitForHdas( function(){
this.test.comment( 'history panel with a new, empty history should be well formed' );
this.test.comment( "history name should exist, be visible, and have text " + unnamedName );
@@ -173,9 +173,9 @@
this.test.assertVisible( nameSelector, 'History name is visible' );
this.test.assertSelectorHasText( nameSelector, newHistoryName, 'History name is ' + newHistoryName );
- this.test.comment( "history subtitle should display size and size should be " + onetxtFilesize + " bytes" );
var onetxtFilesize = require( 'fs' ).size( this.options.scriptDir + filepathToUpload ),
expectedSubtitle = onetxtFilesize + ' bytes';
+ this.test.comment( "history subtitle should display size and size should be " + onetxtFilesize + " bytes" );
this.test.assertExists( subtitleSelector, 'Found ' + subtitleSelector );
this.test.assertVisible( subtitleSelector, 'History subtitle is visible' );
this.test.assertSelectorHasText( subtitleSelector, expectedSubtitle,
diff -r 4e6b003daaa44a06ac13c74c5ce8c75fa7bbb7ee -r 17e9e3ffb571e8f200ee8bf195a3708b2be4792b test/casperjs/login-tests.js
--- a/test/casperjs/login-tests.js
+++ b/test/casperjs/login-tests.js
@@ -1,5 +1,3 @@
-// have to handle errors here - or phantom/casper won't bail but _HANG_
-//TODO: global error handler?
try {
var utils = require( 'utils' ),
xpath = require( 'casper' ).selectXPath,
@@ -25,12 +23,6 @@
phantom.exit( 1 );
}
-// ===================================================================
-/* TODO:
- move selectors and assertText strings into global object for easier editing
-
-
-*/
// =================================================================== globals and helpers
var email = spaceghost.user.getRandomEmail(),
password = '123456';
@@ -43,21 +35,14 @@
// =================================================================== TESTS
// register a user (again...)
-spaceghost.thenOpen( spaceghost.baseUrl, function(){
- this.test.comment( 'registering: ' + email );
- spaceghost.user.registerUser( email, password );
-});
+spaceghost.openHomePage()
+ .user.registerUser( email, password )
+ .user.logout();
-// log them out - check for empty logged in text
-spaceghost.then( function(){
- this.test.comment( 'logging out: ' + email );
- spaceghost.user.logout();
-});
-spaceghost.thenOpen( spaceghost.baseUrl, function(){
- spaceghost.waitForMasthead( function() {
- this.test.assertDoesntExist( xpath( userEmailSelector ) );
- this.test.assert( spaceghost.user.loggedInAs() === '', 'loggedInAs() is empty string' );
- });
+spaceghost.openHomePage( function(){
+ this.test.comment( 'log out should be reflected in user menu' );
+ this.test.assertDoesntExist( xpath( userEmailSelector ) );
+ this.test.assert( spaceghost.user.loggedInAs() === '', 'loggedInAs() is empty string' );
});
// log them back in - check for email in logged in text
@@ -65,16 +50,13 @@
this.test.comment( 'logging back in: ' + email );
spaceghost.user._submitLogin( email, password ); //No such user
});
-spaceghost.thenOpen( spaceghost.baseUrl, function(){
+spaceghost.openHomePage( function(){
this.test.assertSelectorHasText( xpath( userEmailSelector ), email );
this.test.assert( spaceghost.user.loggedInAs() === email, 'loggedInAs() matches email' );
});
// finally log back out for next tests
-spaceghost.then( function(){
- this.test.comment( 'logging out: ' + email );
- spaceghost.user.logout();
-});
+spaceghost.user.logout();
// ------------------------------------------------------------------- shouldn't work
// can't log in: users that don't exist, bad emails, sql injection (hurhur)
@@ -120,10 +102,6 @@
});
});
});
-
-spaceghost.then( function(){
- this.user.logout();
-});
/*
*/
// ===================================================================
diff -r 4e6b003daaa44a06ac13c74c5ce8c75fa7bbb7ee -r 17e9e3ffb571e8f200ee8bf195a3708b2be4792b test/casperjs/modules/api.js
--- a/test/casperjs/modules/api.js
+++ b/test/casperjs/modules/api.js
@@ -1,12 +1,4 @@
// =================================================================== module object, exports
-/** Creates a new api module object.
- * @param {SpaceGhost} spaceghost a spaceghost instance
- * @exported
- */
-exports.create = function createAPI( spaceghost, apikey ){
- return new API( spaceghost );
-};
-
/** User object constructor.
* @param {SpaceGhost} spaceghost a spaceghost instance
* @param {String} apikey apikey for use when not using session authentication
@@ -28,14 +20,21 @@
};
exports.API = API;
+/** Creates a new api module object.
+ * @param {SpaceGhost} spaceghost a spaceghost instance
+ * @exported
+ */
+exports.create = function createAPI( spaceghost, apikey ){
+ return new API( spaceghost );
+};
+
+
API.prototype.toString = function toString(){
return ( this.spaceghost + '.API:'
+ (( this.apikey )?( this.apikey ):( '(session)' )) );
};
// ------------------------------------------------------------------- APIError
-APIError.prototype = new Error();
-APIError.prototype.constructor = Error;
/** @class Thrown when Galaxy the API returns an error from a request */
function APIError( msg, status ){
Error.apply( this, arguments );
@@ -43,6 +42,8 @@
this.message = msg;
this.status = status;
}
+APIError.prototype = new Error();
+APIError.prototype.constructor = Error;
API.prototype.APIError = APIError;
exports.APIError = APIError;
diff -r 4e6b003daaa44a06ac13c74c5ce8c75fa7bbb7ee -r 17e9e3ffb571e8f200ee8bf195a3708b2be4792b test/casperjs/modules/tools.js
--- a/test/casperjs/modules/tools.js
+++ b/test/casperjs/modules/tools.js
@@ -86,14 +86,12 @@
uploadInfo = {};
uploadInfo[ spaceghost.data.selectors.tools.upload.fileInput ] = filepath;
- // click the upload tool
- spaceghost.thenOpen( spaceghost.baseUrl, function(){
- // we can apprently click a tool label without expanding the tool container for it
- this.waitForSelector( '.toolMenu', function(){
- this.click( xpath( '//a[contains(text(),"Upload File")]' ) );
- this.jumpToMain( function(){
- this.waitForSelector( 'body' );
- });
+ spaceghost.openHomePage( function(){
+ // load the upload tool form
+ // (we can apprently click a tool label without expanding the tool container for it)
+ this.click( xpath( '//a[contains(text(),"Upload File")]' ) );
+ this.jumpToMain( function(){
+ this.waitForSelector( 'body' );
});
});
diff -r 4e6b003daaa44a06ac13c74c5ce8c75fa7bbb7ee -r 17e9e3ffb571e8f200ee8bf195a3708b2be4792b test/casperjs/modules/user.js
--- a/test/casperjs/modules/user.js
+++ b/test/casperjs/modules/user.js
@@ -43,11 +43,11 @@
confirm : ( confirm !== undefined )?( confirm ):( password )
};
- spaceghost.thenOpen( spaceghost.baseUrl, function(){
- this.waitForMasthead( function() {
- this.clickLabel( spaceghost.data.labels.masthead.menus.user );
- this.clickLabel( spaceghost.data.labels.masthead.userMenu.register );
+ spaceghost.openHomePage( function(){
+ this.clickLabel( spaceghost.data.labels.masthead.menus.user );
+ this.clickLabel( spaceghost.data.labels.masthead.userMenu.register );
+ this.waitForNavigation( 'user/create', function beforeRegister(){
this.withMainPanel( function mainBeforeRegister(){
spaceghost.debug( '(' + spaceghost.getCurrentUrl() + ') registering user:\n'
+ spaceghost.jsonStr( userInfo ) );
@@ -56,11 +56,13 @@
this.click( xpath( spaceghost.data.selectors.registrationPage.submit_xpath ) );
});
});
- //// debugging
- //spaceghost.withFrame( spaceghost.data.selectors.frames.main, function mainAfterRegister(){
- // var messageInfo = spaceghost.getElementInfo( spaceghost.data.selectors.messages.all );
- // spaceghost.debug( 'post registration message:\n' + spaceghost.jsonStr( messageInfo ) );
- //});
+
+ this.waitForNavigation( 'user/create', function afterRegister(){
+ // this.withMainPanel( function mainAfterRegister(){
+ // var messageInfo = spaceghost.getElementInfo( spaceghost.data.selectors.messages.all );
+ // spaceghost.debug( 'post registration message:\n' + spaceghost.jsonStr( messageInfo ) );
+ // });
+ });
});
};
@@ -79,24 +81,23 @@
password: password
};
- spaceghost.thenOpen( spaceghost.baseUrl, function(){
- spaceghost.waitForMasthead( function() {
- spaceghost.clickLabel( spaceghost.data.labels.masthead.userMenu.login );
+ spaceghost.openHomePage( function(){
+ this.clickLabel( spaceghost.data.labels.masthead.menus.user );
+ this.clickLabel( spaceghost.data.labels.masthead.userMenu.login );
- spaceghost.withMainPanel( function mainBeforeLogin(){
+ this.waitForNavigation( 'user/login', function beforeLogin(){
+ this.withMainPanel( function mainBeforeLogin(){
spaceghost.debug( '(' + spaceghost.getCurrentUrl() + ') logging in user:\n'
+ spaceghost.jsonStr( loginInfo ) );
spaceghost.fill( spaceghost.data.selectors.loginPage.form, loginInfo, false );
spaceghost.click( xpath( spaceghost.data.selectors.loginPage.submit_xpath ) );
});
+ });
- //// debugging
- //spaceghost.withFrame( spaceghost.data.selectors.frames.main, function mainAfterLogin(){
- // //TODO: prob. could use a more generalized form of this for url breakdown/checking
- // if( spaceghost.getCurrentUrl().search( spaceghost.data.selectors.loginPage.url_regex ) != -1 ){
- // var messageInfo = spaceghost.getElementInfo( spaceghost.data.selectors.messages.all );
- // spaceghost.debug( 'post login message:\n' + spaceghost.jsonStr( messageInfo ) );
- // }
+ this.waitForNavigation( 'user/login', function afterLogin(){
+ //this.withMainPanel( function mainAfterLogin(){
+ // var messageInfo = spaceghost.getElementInfo( spaceghost.data.selectors.messages.all );
+ // spaceghost.debug( 'post login message:\n' + spaceghost.jsonStr( messageInfo ) );
//});
});
});
@@ -122,6 +123,9 @@
this.warning( 'Registration failed: ' + messageInfo.text );
throw new spaceghost.GalaxyError( 'RegistrationError: ' + messageInfo.text );
}
+
+ this.clickLabel( 'Return to the home page.' );
+ this.waitForNavigation( '' );
});
return spaceghost;
};
@@ -135,19 +139,21 @@
var spaceghost = this.spaceghost;
this._submitLogin( email, password );
- spaceghost.waitForMasthead( function() {
- spaceghost.withMainPanel( function mainAfterLogin(){
- if( spaceghost.getCurrentUrl().search( spaceghost.data.selectors.loginPage.url_regex ) !== -1 ){
- var messageInfo = spaceghost.getElementInfo( spaceghost.data.selectors.messages.all );
- if( messageInfo && messageInfo.attributes[ 'class' ] === 'errormessage' ){
- this.warning( 'Login failed: ' + messageInfo.text );
- throw new spaceghost.GalaxyError( 'LoginError: ' + messageInfo.text );
- }
- }
- if( spaceghost.user.loggedInAs() === email ){
- spaceghost.info( 'logged in as ' + email );
- }
- });
+ //spaceghost.withMainPanel( function mainAfterLogin(){
+ // if( spaceghost.getCurrentUrl().search( spaceghost.data.selectors.loginPage.url_regex ) !== -1 ){
+ // var messageInfo = spaceghost.getElementInfo( spaceghost.data.selectors.messages.all );
+ // if( messageInfo && messageInfo.attributes[ 'class' ] === 'errormessage' ){
+ // this.warning( 'Login failed: ' + messageInfo.text );
+ // throw new spaceghost.GalaxyError( 'LoginError: ' + messageInfo.text );
+ // }
+ // }
+ //});
+ this.spaceghost.then( function checkLogin(){
+ if( spaceghost.user.loggedInAs() !== email ){
+ throw new spaceghost.GalaxyError( 'LoginError' );
+ } else {
+ spaceghost.info( 'logged in as ' + email );
+ }
});
return spaceghost;
};
@@ -156,19 +162,19 @@
* @returns {String} email of currently logged in user or '' if no one logged in
*/
User.prototype.loggedInAs = function loggedInAs(){
- var spaceghost = this.spaceghost,
- userEmail = '';
- try {
- var emailSelector = xpath( spaceghost.data.selectors.masthead.userMenu.userEmail_xpath ),
- loggedInInfo = spaceghost.elementInfoOrNull( emailSelector );
- if( loggedInInfo !== null ){
- userEmail = loggedInInfo.text.replace( 'Logged in as ', '' );
+ return this.spaceghost.jumpToTop( function(){
+ var userEmail = '';
+ try {
+ var emailSelector = xpath( this.data.selectors.masthead.userMenu.userEmail_xpath ),
+ loggedInInfo = this.elementInfoOrNull( emailSelector );
+ if( loggedInInfo !== null ){
+ userEmail = loggedInInfo.text.replace( 'Logged in as ', '' );
+ }
+ } catch( err ){
+ this.warn( err );
}
- } catch( err ){
- spaceghost.warn( err );
- }
- //console.debug( 'loggedInInfo:', spaceghost.jsonStr( loggedInInfo ) );
- return userEmail;
+ return userEmail;
+ });
};
/** Log out the current user
@@ -176,14 +182,15 @@
*/
User.prototype.logout = function logout(){
var spaceghost = this.spaceghost;
- spaceghost.thenOpen( spaceghost.baseUrl, function(){
- this.info( 'user logging out' );
- spaceghost.waitForMasthead( function _logout() {
- if( spaceghost.user.loggedInAs() ){
- spaceghost.clickLabel( spaceghost.data.labels.masthead.menus.user );
- spaceghost.clickLabel( spaceghost.data.labels.masthead.userMenu.logout );
- }
- });
+ this.spaceghost.openHomePage( function(){
+ if( spaceghost.user.loggedInAs() ){
+ spaceghost.clickLabel( spaceghost.data.labels.masthead.menus.user );
+ spaceghost.clickLabel( spaceghost.data.labels.masthead.userMenu.logout );
+ spaceghost.waitForNavigation( 'user/logout', function _toLogoutPage() {
+ spaceghost.clickLabel( 'go to the home page' );
+ spaceghost.waitForNavigation( '' );
+ });
+ }
});
return spaceghost;
};
@@ -198,10 +205,10 @@
var spaceghost = this.spaceghost;
// attempt a login, if that fails - register
spaceghost.tryStepsCatch( function tryToLogin(){
- spaceghost.open( spaceghost.baseUrl ).user.login( email, password );
+ spaceghost.openHomePage().user.login( email, password );
}, function failedLoginRegister(){
- spaceghost.open( spaceghost.baseUrl ).user.registerUser( email, password, username );
+ spaceghost.openHomePage().user.registerUser( email, password, username );
});
return spaceghost;
};
@@ -216,7 +223,8 @@
// check for the setting in sg and the universe_wsgi.ini file
var adminData = this.spaceghost.options.adminUser,
iniAdminEmails = this.spaceghost.getUniverseSetting( 'admin_users' );
- iniAdminEmails = ( iniAdminEmails )?( iniAdminEmails.split( ',' ).map( function( email ) { return email.trim(); } ) ):( null );
+ iniAdminEmails = ( iniAdminEmails )?
+ ( iniAdminEmails.split( ',' ).map( function( email ) { return email.trim(); } ) ):( null );
//TODO: seems like we only need the wsgi setting - that's the only thing we can't change
if( adminData ){
diff -r 4e6b003daaa44a06ac13c74c5ce8c75fa7bbb7ee -r 17e9e3ffb571e8f200ee8bf195a3708b2be4792b test/casperjs/registration-tests.js
--- a/test/casperjs/registration-tests.js
+++ b/test/casperjs/registration-tests.js
@@ -1,4 +1,3 @@
-// have to handle errors here - or phantom/casper won't bail but _HANG_
try {
var utils = require( 'utils' ),
xpath = require( 'casper' ).selectXPath,
@@ -24,13 +23,6 @@
phantom.exit( 1 );
}
-// ===================================================================
-/* TODO:
- move selectors and assertText strings into global object for easier editing
- pass email, etc. for first (successful) registration (for use with other tests)
-
-
-*/
// =================================================================== globals and helpers
var email = spaceghost.user.getRandomEmail(),
password = '123456',
@@ -38,50 +30,27 @@
username = 'test' + Date.now();
// =================================================================== TESTS
-spaceghost.thenOpen( spaceghost.baseUrl, function(){
+spaceghost.openHomePage( function(){
this.test.comment( 'loading galaxy homepage' );
- // can we load galaxy?
this.test.assertTitle( 'Galaxy' );
- // xpath selector use:
this.test.assertExists( xpath( "//div[@id='masthead']" ), 'found masthead' );
});
-// failing tests for...testing...the tests
-//spaceghost.thenOpen( spaceghost.baseUrl, function(){
-// this.test.comment( 'loading galaxy homepage' );
-// // can we load galaxy?
-// this.test.assertTitle( 'Blorgo' );
-// // xpath selector use:
-// this.test.assertExists( xpath( "//div[@id='facebook']" ), 'found facebook' );
-//});
-
-
// ------------------------------------------------------------------- register a new user
spaceghost.then( function(){
this.test.comment( 'registering user: ' + email );
this.user._submitRegistration( email, password, username, confirm );
});
-spaceghost.thenOpen( spaceghost.baseUrl, function(){
- this.waitForMasthead( function() {
- this.clickLabel( 'User' );
- this.test.assertSelectorHasText( xpath( spaceghost.data.selectors.masthead.userMenu.userEmail_xpath ),
- email, '#user-email === ' + email );
- });
+spaceghost.openHomePage( function(){
+ this.clickLabel( 'User' );
+ var loggedInAs = this.fetchText( xpath( spaceghost.data.selectors.masthead.userMenu.userEmail_xpath ) );
+ this.test.assert( loggedInAs.indexOf( email ) !== -1, 'found proper email in user menu: ' + loggedInAs );
});
// ------------------------------------------------------------------- log out that user
-spaceghost.then( function(){
- this.waitForMasthead( function() {
- this.test.comment( 'logging out user: ' + email );
- this.user.logout();
- });
-});
-spaceghost.then( function(){
- this.waitForMasthead( function() {
- var emailSelector = xpath( this.data.selectors.masthead.userMenu.userEmail_xpath );
- this.debug( 'email:' + this.elementInfoOrNull( emailSelector ) );
- this.test.assert( !this.elementInfoOrNull( emailSelector ), 'user email not found' );
- });
+spaceghost.user.logout().openHomePage( function(){
+ var emailSelector = xpath( this.data.selectors.masthead.userMenu.userEmail_xpath );
+ this.test.assert( !this.elementInfoOrNull( emailSelector ), 'user email not found' );
});
// ------------------------------------------------------------------- bad user registrations
@@ -165,7 +134,6 @@
this.assertErrorMessage( 'Public name is taken; please choose another' );
});
-
// ------------------------------------------------------------------- test the convenience fns
// these versions are for conv. use in other tests, they should throw errors if used improperly
spaceghost.then( function(){
@@ -177,11 +145,6 @@
});
});
-spaceghost.then( function(){
- //??: necessary?
- this.user.logout();
-});
-
// ===================================================================
spaceghost.run( function(){
this.test.done();
diff -r 4e6b003daaa44a06ac13c74c5ce8c75fa7bbb7ee -r 17e9e3ffb571e8f200ee8bf195a3708b2be4792b test/casperjs/spaceghost.js
--- a/test/casperjs/spaceghost.js
+++ b/test/casperjs/spaceghost.js
@@ -507,6 +507,38 @@
Casper.prototype.run.call( this, new_onComplete, time );
};
+// ------------------------------------------------------------------- home page
+/** Wait for the homepage/index/Analyze Data to load fully.
+ */
+SpaceGhost.prototype.openHomePage = function openHomePage( then, delay ){
+//TODO: delay doesn't seem to work
+ this.thenOpen( this.baseUrl, function _openHomePage(){
+ this.waitFor(
+ function waitForCheck(){
+ return this.homePageIsLoaded();
+ },
+ then,
+ function openHomePageTimeout(){
+ throw new GalaxyError( 'Homepage timed out' );
+ },
+ delay
+ );
+ });
+ return this;
+};
+
+/** Check for visibility of main home page elements: masthead, tool menu, history panel.
+ */
+SpaceGhost.prototype.homePageIsLoaded = function homePageIsLoaded(){
+ //this.debug( 'homePageIsLoaded: ' + [
+ // this.visible( '#masthead' ),
+ // this.visible( this.data.selectors.toolMenu.container ),
+ // this.visible( '#current-history-panel' )].join( ', ' ) );
+ return ( this.visible( '#masthead' )
+ && this.visible( this.data.selectors.toolMenu.container )
+ && this.visible( '#current-history-panel' ) );
+};
+
// ------------------------------------------------------------------- try step
/** Install a function as an error handler temporarily, run a function with steps, then remove the handler.
* A rough stand-in for try catch with steps.
@@ -549,6 +581,7 @@
});
};
+
// ------------------------------------------------------------------- misc
/** Hover over an element.
* NOTE: not for use with iframes (main, tool, history) - they need to re-calc
@@ -662,15 +695,6 @@
return returned;
};
-///** Jumps into history frame, exectutes fn, and jumps back to original frame.
-// * @param {Selector} frame the selector for the frame to jump to
-// * @param {Function} fn function called when in the frame
-// * @returns {Any} the return value of fn
-// */
-//SpaceGhost.prototype.jumpToHistory = function jumpToHistory( fn ){
-// return this.jumpToFrame( this.data.selectors.frames.history, fn );
-//};
-
/** Jumps into main frame, exectutes fn, and jumps back to original frame.
* @param {Selector} frame the selector for the frame to jump to
* @param {Function} fn function called when in the frame
@@ -862,6 +886,7 @@
this.test.assert( classes.indexOf( className ) === -1, msg );
};
+
// =================================================================== CONVENIENCE
/** Wraps casper.getElementInfo in try, returning null if element not found instead of erroring.
* @param {String} selector css or xpath selector for the element to find
@@ -1096,7 +1121,7 @@
SpaceGhost.prototype.waitForMasthead = function wait( then ) {
return this.waitForText( this.data.labels.masthead.menus.user, then );
-}
+};
// =================================================================== TEST DATA
@@ -1118,16 +1143,23 @@
},
frames : {
- main : 'galaxy_main',
- history : 'galaxy_history'
+ main : 'galaxy_main'
},
masthead : {
- adminLink : '#masthead a[href="/admin/index"]',
- userMenu : {
+ id : '#masthead',
+ adminLink : '#masthead a[href="/admin/index"]',
+ userMenu : {
userEmail_xpath : '//a[contains(text(),"Logged in as")]'
}
},
+ toolMenu : {
+ container : '.toolMenuContainer'
+ },
+ historyPanel : {
+ current : '#current-history-panel'
+ },
+
loginPage : {
form : 'form#login',
submit_xpath : "//input[@value='Login']",
@@ -1135,7 +1167,8 @@
},
registrationPage : {
form : 'form#registration',
- submit_xpath : "//input[@value='Submit']"
+ submit_xpath : "//input[@value='Submit']",
+ returnLink : '//a[contains(text(),"Return to the home page")]'
},
tools : {
general : {
diff -r 4e6b003daaa44a06ac13c74c5ce8c75fa7bbb7ee -r 17e9e3ffb571e8f200ee8bf195a3708b2be4792b test/casperjs/upload-tests.js
--- a/test/casperjs/upload-tests.js
+++ b/test/casperjs/upload-tests.js
@@ -43,16 +43,12 @@
// =================================================================== TESTS
// ------------------------------------------------------------------- start a new user
-spaceghost.user.loginOrRegisterUser( email, password );
-//??: why is a reload needed here? If we don't, loggedInAs === '' ...
-spaceghost.thenOpen( spaceghost.baseUrl, function(){
+spaceghost.user.loginOrRegisterUser( email, password ).openHomePage( function(){
var loggedInAs = spaceghost.user.loggedInAs();
this.test.assert( loggedInAs === email, 'loggedInAs() matches email: "' + loggedInAs + '"' );
});
-
// ------------------------------------------------------------------- long form
-
// upload a file...
spaceghost.then( function(){
this.test.comment( 'Test uploading a file' );
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0