galaxy-commits
Threads by month
- ----- 2025 -----
- May
- April
- March
- February
- January
- ----- 2024 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2023 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2022 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2021 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2020 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2019 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2018 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2017 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2016 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2015 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2014 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2013 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2012 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2011 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2010 -----
- December
- November
- October
- September
- August
- July
- June
- May
May 2010
- 2 participants
- 158 discussions
details: http://www.bx.psu.edu/hg/galaxy/rev/3445ca17a4c5
changeset: 3680:3445ca17a4c5
user: rc
date: Thu Apr 22 09:35:44 2010 -0400
description:
lims:
dataset rename issue fixed
diffstat:
scripts/galaxy_messaging/server/data_transfer.py | 2 +-
1 files changed, 1 insertions(+), 1 deletions(-)
diffs (12 lines):
diff -r 155f2e89a02b -r 3445ca17a4c5 scripts/galaxy_messaging/server/data_transfer.py
--- a/scripts/galaxy_messaging/server/data_transfer.py Wed Apr 21 18:35:59 2010 -0400
+++ b/scripts/galaxy_messaging/server/data_transfer.py Thu Apr 22 09:35:44 2010 -0400
@@ -161,7 +161,7 @@
pexpect.TIMEOUT:print_ticks},
timeout=10)
log.debug(output)
- path = os.path.join(self.server_dir, os.path.basename(df['file']))
+ path = os.path.join(self.server_dir, os.path.basename(df['name']))
if not os.path.exists(path):
msg = 'Could not find the local file after transfer (%s)' % path
log.error(msg)
1
0
details: http://www.bx.psu.edu/hg/galaxy/rev/155f2e89a02b
changeset: 3679:155f2e89a02b
user: Kanwei Li <kanwei(a)gmail.com>
date: Wed Apr 21 18:35:59 2010 -0400
description:
Fix various trackster issues
diffstat:
lib/galaxy/datatypes/converters/bam_to_summary_tree_converter.py | 2 +-
lib/galaxy/datatypes/converters/bedgraph_to_array_tree_converter.py | 2 +-
lib/galaxy/model/__init__.py | 1 -
lib/galaxy/visualization/tracks/data/interval_index.py | 3 +-
lib/galaxy/visualization/tracks/data/summary_tree.py | 10 ++-
static/scripts/galaxy.base.js | 2 +-
static/scripts/packed/galaxy.base.js | 2 +-
static/scripts/packed/trackster.js | 2 +-
static/scripts/trackster.js | 27 +++++++--
static/trackster.css | 2 +-
templates/tracks/browser.mako | 6 +-
templates/tracks/new_browser.mako | 2 +-
templates/user/dbkeys.mako | 2 +-
13 files changed, 40 insertions(+), 23 deletions(-)
diffs (236 lines):
diff -r fe14a58568ad -r 155f2e89a02b lib/galaxy/datatypes/converters/bam_to_summary_tree_converter.py
--- a/lib/galaxy/datatypes/converters/bam_to_summary_tree_converter.py Wed Apr 21 16:42:09 2010 -0400
+++ b/lib/galaxy/datatypes/converters/bam_to_summary_tree_converter.py Wed Apr 21 18:35:59 2010 -0400
@@ -19,7 +19,7 @@
st = SummaryTree(block_size=100, levels=4, draw_cutoff=100, detail_cutoff=20)
for read in bamfile.fetch():
- st.insert_range(read.rname, read.mpos, read.pos + read.rlen)
+ st.insert_range(bamfile.getrname(read.rname), read.pos, read.pos + read.rlen)
st.write(out_fname)
diff -r fe14a58568ad -r 155f2e89a02b lib/galaxy/datatypes/converters/bedgraph_to_array_tree_converter.py
--- a/lib/galaxy/datatypes/converters/bedgraph_to_array_tree_converter.py Wed Apr 21 16:42:09 2010 -0400
+++ b/lib/galaxy/datatypes/converters/bedgraph_to_array_tree_converter.py Wed Apr 21 18:35:59 2010 -0400
@@ -34,7 +34,7 @@
chrom = feature[0]
chrom_start = int(feature[1])
chrom_end = int(feature[2])
- score = int(feature[3])
+ score = float(feature[3])
return chrom, chrom_start, chrom_end, None, score
def main():
diff -r fe14a58568ad -r 155f2e89a02b lib/galaxy/model/__init__.py
--- a/lib/galaxy/model/__init__.py Wed Apr 21 16:42:09 2010 -0400
+++ b/lib/galaxy/model/__init__.py Wed Apr 21 18:35:59 2010 -0400
@@ -611,7 +611,6 @@
if fail_dependencies:
return None
except ValueError:
- log.debug("WTF")
raise ValueError("A dependency could not be converted.")
except KeyError:
pass # No deps
diff -r fe14a58568ad -r 155f2e89a02b lib/galaxy/visualization/tracks/data/interval_index.py
--- a/lib/galaxy/visualization/tracks/data/interval_index.py Wed Apr 21 16:42:09 2010 -0400
+++ b/lib/galaxy/visualization/tracks/data/interval_index.py Wed Apr 21 18:35:59 2010 -0400
@@ -26,7 +26,8 @@
payload = [ offset, start, end ]
if "no_detail" not in kwargs:
length = len(feature)
- payload.append(feature[3]) # name
+ if length >= 4:
+ payload.append(feature[3]) # name
if length >= 6: # strand
payload.append(feature[5])
diff -r fe14a58568ad -r 155f2e89a02b lib/galaxy/visualization/tracks/data/summary_tree.py
--- a/lib/galaxy/visualization/tracks/data/summary_tree.py Wed Apr 21 16:42:09 2010 -0400
+++ b/lib/galaxy/visualization/tracks/data/summary_tree.py Wed Apr 21 18:35:59 2010 -0400
@@ -19,17 +19,23 @@
if st is None:
st = summary_tree_from_file( self.dataset.file_name )
CACHE[filename] = st
- if chrom not in st.chrom_blocks:
+ if chrom in st.chrom_blocks:
+ pass
+ elif chrom[3:] in st.chrom_blocks:
+ chrom = chrom[3:]
+ else:
return None
resolution = max(1, ceil(float(kwargs['resolution'])))
level = ceil( log( resolution, st.block_size ) )
level = int(max( level, 0 ))
+ if level <= 0:
+ return None
stats = st.chrom_stats[chrom]
results = st.query(chrom, int(start), int(end), level)
- if results == "detail" or level <= 0:
+ if results == "detail":
return None
elif results == "draw":
return "no_detail", None, None
diff -r fe14a58568ad -r 155f2e89a02b static/scripts/galaxy.base.js
--- a/static/scripts/galaxy.base.js Wed Apr 21 16:42:09 2010 -0400
+++ b/static/scripts/galaxy.base.js Wed Apr 21 18:35:59 2010 -0400
@@ -341,7 +341,7 @@
var id = this.id;
var body = $(this).children( "div.historyItemBody" );
var peek = body.find( "pre.peek" )
- $(this).find( ".historyItemTitleBar > .historyItemTitle" ).wrap( "<a href='javascript:void();'></a>" ).click( function() {
+ $(this).find( ".historyItemTitleBar > .historyItemTitle" ).wrap( "<a href='javascript:void(0);'></a>" ).click( function() {
if ( body.is(":visible") ) {
// Hiding stuff here
if ( $.browser.mozilla ) { peek.css( "overflow", "hidden" ); }
diff -r fe14a58568ad -r 155f2e89a02b static/scripts/packed/galaxy.base.js
--- a/static/scripts/packed/galaxy.base.js Wed Apr 21 16:42:09 2010 -0400
+++ b/static/scripts/packed/galaxy.base.js Wed Apr 21 18:35:59 2010 -0400
@@ -1,1 +1,1 @@
-$(document).ready(function(){replace_big_select_inputs()});$.fn.makeAbsolute=function(a){return this.each(function(){var b=$(this);var c=b.position();b.css({position:"absolute",marginLeft:0,marginTop:0,top:c.top,left:c.left,right:$(window).width()-(c.left+b.width())});if(a){b.remove().appendTo("body")}})};function ensure_popup_helper(){if($("#popup-helper").length===0){$("<div id='popup-helper'/>").css({background:"white",opacity:0,zIndex:15000,position:"absolute",top:0,left:0,width:"100%",height:"100%"}).appendTo("body").hide()}}function attach_popupmenu(b,d){var a=function(){d.unbind().hide();$("#popup-helper").unbind("click.popupmenu").hide()};var c=function(g){$("#popup-helper").bind("click.popupmenu",a).show();d.click(a).css({left:0,top:-1000}).show();var f=g.pageX-d.width()/2;f=Math.min(f,$(document).scrollLeft()+$(window).width()-$(d).width()-20);f=Math.max(f,$(document).scrollLeft()+20);d.css({top:g.pageY-5,left:f});return false};$(b).click(c)}function make_popupmen!
u(c,b){ensure_popup_helper();var a=$("<ul id='"+c.attr("id")+"-menu'></ul>");$.each(b,function(f,e){if(e){$("<li/>").html(f).click(e).appendTo(a)}else{$("<li class='head'/>").html(f).appendTo(a)}});var d=$("<div class='popmenu-wrapper'>");d.append(a).append("<div class='overlay-border'>").css("position","absolute").appendTo("body").hide();attach_popupmenu(c,d)}function make_popup_menus(){jQuery("div[popupmenu]").each(function(){var c={};$(this).find("a").each(function(){var b=$(this).attr("confirm"),d=$(this).attr("href"),e=$(this).attr("target");c[$(this).text()]=function(){if(!b||confirm(b)){var g=window;if(e=="_parent"){g=window.parent}else{if(e=="_top"){g=window.top}}g.location=d}}});var a=$("#"+$(this).attr("popupmenu"));make_popupmenu(a,c);$(this).remove();a.addClass("popup").show()})}function array_length(b){if(b.length){return b.length}var c=0;for(var a in b){c++}return c}function naturalSort(i,g){var n=/(-?[0-9\.]+)/g,j=i.toString().toLowerCase()||"",f=g.toString()!
.toLowerCase()||"",k=String.fromCharCode(0),l=j.replace(n,k+"$1"+k).sp
lit(k),e=f.replace(n,k+"$1"+k).split(k),d=(new Date(j)).getTime(),m=d?(new Date(f)).getTime():null;if(m){if(d<m){return -1}else{if(d>m){return 1}}}for(var h=0,c=Math.max(l.length,e.length);h<c;h++){oFxNcL=parseFloat(l[h])||l[h];oFyNcL=parseFloat(e[h])||e[h];if(oFxNcL<oFyNcL){return -1}else{if(oFxNcL>oFyNcL){return 1}}}return 0}function replace_big_select_inputs(a){$("select[name=dbkey]").each(function(){var b=$(this);if(a!==undefined&&b.find("option").length<a){return}var c=b.attr("value");var d=$("<input type='text' class='text-and-autocomplete-select'></input>");d.attr("size",40);d.attr("name",b.attr("name"));d.attr("id",b.attr("id"));d.click(function(){var i=$(this).attr("value");$(this).attr("value","Loading...");$(this).showAllInCache();$(this).attr("value",i);$(this).select()});var h=[];var g={};b.children("option").each(function(){var j=$(this).text();var i=$(this).attr("value");if(i=="?"){return}h.push(j);g[j]=i;g[i]=i;if(i==c){d.attr("value",j)}});h.push("unspecifie!
d (?)");g["unspecified (?)"]="?";g["?"]="?";if(d.attr("value")==""){d.attr("value","Click to Search or Select")}h=h.sort(naturalSort);var f={selectFirst:false,autoFill:false,mustMatch:false,matchContains:true,max:1000,minChars:0,hideForLessThanMinChars:false};d.autocomplete(h,f);b.replaceWith(d);var e=function(){var j=d.attr("value");var i=g[j];if(i!==null&&i!==undefined){d.attr("value",i)}else{if(c!=""){d.attr("value",c)}else{d.attr("value","?")}}};d.parents("form").submit(function(){e()});$(document).bind("convert_dbkeys",function(){e()})})}function async_save_text(d,f,e,a,c,h,i,g,b){if(c===undefined){c=30}if(i===undefined){i=4}$("#"+d).live("click",function(){if($("#renaming-active").length>0){return}var l=$("#"+f),k=l.text(),j;if(h){j=$("<textarea></textarea>").attr({rows:i,cols:c}).text(k)}else{j=$("<input type='text'></input>").attr({value:k,size:c})}j.attr("id","renaming-active");j.blur(function(){$(this).remove();l.show();if(b){b(j)}});j.keyup(function(n){if(n.keyCo!
de===27){$(this).trigger("blur")}else{if(n.keyCode===13){var m={};m[a]
=$(this).val();$(this).trigger("blur");$.ajax({url:e,data:m,error:function(){alert("Text editing for elt "+f+" failed")},success:function(o){l.text(o);if(b){b(j)}}})}}});if(g){g(j)}l.hide();j.insertAfter(l);j.focus();j.select();return})}function init_history_items(d,a,c){var b=function(){try{var e=$.jStore.store("history_expand_state");if(e){for(var g in e){$("#"+g+" div.historyItemBody").show()}}}catch(f){$.jStore.remove("history_expand_state")}if($.browser.mozilla){$("div.historyItemBody").each(function(){if(!$(this).is(":visible")){$(this).find("pre.peek").css("overflow","hidden")}})}d.each(function(){var j=this.id;var h=$(this).children("div.historyItemBody");var i=h.find("pre.peek");$(this).find(".historyItemTitleBar > .historyItemTitle").wrap("<a href='javascript:void();'></a>").click(function(){if(h.is(":visible")){if($.browser.mozilla){i.css("overflow","hidden")}h.slideUp("fast");if(!c){var k=$.jStore.store("history_expand_state");if(k){delete k[j];$.jStore.store("hi!
story_expand_state",k)}}}else{h.slideDown("fast",function(){if($.browser.mozilla){i.css("overflow","auto")}});if(!c){var k=$.jStore.store("history_expand_state");if(k===undefined){k={}}k[j]=true;$.jStore.store("history_expand_state",k)}}return false})});$("#top-links > a.toggle").click(function(){var h=$.jStore.store("history_expand_state");if(h===undefined){h={}}$("div.historyItemBody:visible").each(function(){if($.browser.mozilla){$(this).find("pre.peek").css("overflow","hidden")}$(this).slideUp("fast");if(h){delete h[$(this).parent().attr("id")]}});$.jStore.store("history_expand_state",h)}).show()};if(a){b()}else{$.jStore.init("galaxy");$.jStore.engineReady(function(){b()})}}$(document).ready(function(){$("a[confirm]").click(function(){return confirm($(this).attr("confirm"))});if($.fn.tipsy){$(".tooltip").tipsy({gravity:"s"})}make_popup_menus()});
\ No newline at end of file
+$(document).ready(function(){replace_big_select_inputs()});$.fn.makeAbsolute=function(a){return this.each(function(){var b=$(this);var c=b.position();b.css({position:"absolute",marginLeft:0,marginTop:0,top:c.top,left:c.left,right:$(window).width()-(c.left+b.width())});if(a){b.remove().appendTo("body")}})};function ensure_popup_helper(){if($("#popup-helper").length===0){$("<div id='popup-helper'/>").css({background:"white",opacity:0,zIndex:15000,position:"absolute",top:0,left:0,width:"100%",height:"100%"}).appendTo("body").hide()}}function attach_popupmenu(b,d){var a=function(){d.unbind().hide();$("#popup-helper").unbind("click.popupmenu").hide()};var c=function(g){$("#popup-helper").bind("click.popupmenu",a).show();d.click(a).css({left:0,top:-1000}).show();var f=g.pageX-d.width()/2;f=Math.min(f,$(document).scrollLeft()+$(window).width()-$(d).width()-20);f=Math.max(f,$(document).scrollLeft()+20);d.css({top:g.pageY-5,left:f});return false};$(b).click(c)}function make_popupmen!
u(c,b){ensure_popup_helper();var a=$("<ul id='"+c.attr("id")+"-menu'></ul>");$.each(b,function(f,e){if(e){$("<li/>").html(f).click(e).appendTo(a)}else{$("<li class='head'/>").html(f).appendTo(a)}});var d=$("<div class='popmenu-wrapper'>");d.append(a).append("<div class='overlay-border'>").css("position","absolute").appendTo("body").hide();attach_popupmenu(c,d)}function make_popup_menus(){jQuery("div[popupmenu]").each(function(){var c={};$(this).find("a").each(function(){var b=$(this).attr("confirm"),d=$(this).attr("href"),e=$(this).attr("target");c[$(this).text()]=function(){if(!b||confirm(b)){var g=window;if(e=="_parent"){g=window.parent}else{if(e=="_top"){g=window.top}}g.location=d}}});var a=$("#"+$(this).attr("popupmenu"));make_popupmenu(a,c);$(this).remove();a.addClass("popup").show()})}function array_length(b){if(b.length){return b.length}var c=0;for(var a in b){c++}return c}function naturalSort(i,g){var n=/(-?[0-9\.]+)/g,j=i.toString().toLowerCase()||"",f=g.toString()!
.toLowerCase()||"",k=String.fromCharCode(0),l=j.replace(n,k+"$1"+k).sp
lit(k),e=f.replace(n,k+"$1"+k).split(k),d=(new Date(j)).getTime(),m=d?(new Date(f)).getTime():null;if(m){if(d<m){return -1}else{if(d>m){return 1}}}for(var h=0,c=Math.max(l.length,e.length);h<c;h++){oFxNcL=parseFloat(l[h])||l[h];oFyNcL=parseFloat(e[h])||e[h];if(oFxNcL<oFyNcL){return -1}else{if(oFxNcL>oFyNcL){return 1}}}return 0}function replace_big_select_inputs(a){$("select[name=dbkey]").each(function(){var b=$(this);if(a!==undefined&&b.find("option").length<a){return}var c=b.attr("value");var d=$("<input type='text' class='text-and-autocomplete-select'></input>");d.attr("size",40);d.attr("name",b.attr("name"));d.attr("id",b.attr("id"));d.click(function(){var i=$(this).attr("value");$(this).attr("value","Loading...");$(this).showAllInCache();$(this).attr("value",i);$(this).select()});var h=[];var g={};b.children("option").each(function(){var j=$(this).text();var i=$(this).attr("value");if(i=="?"){return}h.push(j);g[j]=i;g[i]=i;if(i==c){d.attr("value",j)}});h.push("unspecifie!
d (?)");g["unspecified (?)"]="?";g["?"]="?";if(d.attr("value")==""){d.attr("value","Click to Search or Select")}h=h.sort(naturalSort);var f={selectFirst:false,autoFill:false,mustMatch:false,matchContains:true,max:1000,minChars:0,hideForLessThanMinChars:false};d.autocomplete(h,f);b.replaceWith(d);var e=function(){var j=d.attr("value");var i=g[j];if(i!==null&&i!==undefined){d.attr("value",i)}else{if(c!=""){d.attr("value",c)}else{d.attr("value","?")}}};d.parents("form").submit(function(){e()});$(document).bind("convert_dbkeys",function(){e()})})}function async_save_text(d,f,e,a,c,h,i,g,b){if(c===undefined){c=30}if(i===undefined){i=4}$("#"+d).live("click",function(){if($("#renaming-active").length>0){return}var l=$("#"+f),k=l.text(),j;if(h){j=$("<textarea></textarea>").attr({rows:i,cols:c}).text(k)}else{j=$("<input type='text'></input>").attr({value:k,size:c})}j.attr("id","renaming-active");j.blur(function(){$(this).remove();l.show();if(b){b(j)}});j.keyup(function(n){if(n.keyCo!
de===27){$(this).trigger("blur")}else{if(n.keyCode===13){var m={};m[a]
=$(this).val();$(this).trigger("blur");$.ajax({url:e,data:m,error:function(){alert("Text editing for elt "+f+" failed")},success:function(o){l.text(o);if(b){b(j)}}})}}});if(g){g(j)}l.hide();j.insertAfter(l);j.focus();j.select();return})}function init_history_items(d,a,c){var b=function(){try{var e=$.jStore.store("history_expand_state");if(e){for(var g in e){$("#"+g+" div.historyItemBody").show()}}}catch(f){$.jStore.remove("history_expand_state")}if($.browser.mozilla){$("div.historyItemBody").each(function(){if(!$(this).is(":visible")){$(this).find("pre.peek").css("overflow","hidden")}})}d.each(function(){var j=this.id;var h=$(this).children("div.historyItemBody");var i=h.find("pre.peek");$(this).find(".historyItemTitleBar > .historyItemTitle").wrap("<a href='javascript:void(0);'></a>").click(function(){if(h.is(":visible")){if($.browser.mozilla){i.css("overflow","hidden")}h.slideUp("fast");if(!c){var k=$.jStore.store("history_expand_state");if(k){delete k[j];$.jStore.store("h!
istory_expand_state",k)}}}else{h.slideDown("fast",function(){if($.browser.mozilla){i.css("overflow","auto")}});if(!c){var k=$.jStore.store("history_expand_state");if(k===undefined){k={}}k[j]=true;$.jStore.store("history_expand_state",k)}}return false})});$("#top-links > a.toggle").click(function(){var h=$.jStore.store("history_expand_state");if(h===undefined){h={}}$("div.historyItemBody:visible").each(function(){if($.browser.mozilla){$(this).find("pre.peek").css("overflow","hidden")}$(this).slideUp("fast");if(h){delete h[$(this).parent().attr("id")]}});$.jStore.store("history_expand_state",h)}).show()};if(a){b()}else{$.jStore.init("galaxy");$.jStore.engineReady(function(){b()})}}$(document).ready(function(){$("a[confirm]").click(function(){return confirm($(this).attr("confirm"))});if($.fn.tipsy){$(".tooltip").tipsy({gravity:"s"})}make_popup_menus()});
\ No newline at end of file
diff -r fe14a58568ad -r 155f2e89a02b static/scripts/packed/trackster.js
--- a/static/scripts/packed/trackster.js Wed Apr 21 16:42:09 2010 -0400
+++ b/static/scripts/packed/trackster.js Wed Apr 21 18:35:59 2010 -0400
@@ -1,1 +1,1 @@
-var DEBUG=false;var DENSITY=1000,FEATURE_LEVELS=10,DATA_ERROR="There was an error in indexing this dataset.",DATA_NOCONVERTER="A converter for this dataset is not installed. Please check your datatypes_conf.xml file.",DATA_NONE="No data for this chrom/contig.",DATA_PENDING="Currently indexing... please wait",DATA_LOADING="Loading data...",CACHED_TILES_FEATURE=10,CACHED_TILES_LINE=30,CACHED_DATA=20,CONTEXT=$("<canvas></canvas>").get(0).getContext("2d"),RIGHT_STRAND,LEFT_STRAND;var right_img=new Image();right_img.src="../images/visualization/strand_right.png";right_img.onload=function(){RIGHT_STRAND=CONTEXT.createPattern(right_img,"repeat")};var left_img=new Image();left_img.src="../images/visualization/strand_left.png";left_img.onload=function(){LEFT_STRAND=CONTEXT.createPattern(left_img,"repeat")};var right_img_inv=new Image();right_img_inv.src="../images/visualization/strand_right_inv.png";right_img_inv.onload=function(){RIGHT_STRAND_INV=CONTEXT.createPattern(right_img_inv!
,"repeat")};var left_img_inv=new Image();left_img_inv.src="../images/visualization/strand_left_inv.png";left_img_inv.onload=function(){LEFT_STRAND_INV=CONTEXT.createPattern(left_img_inv,"repeat")};function commatize(b){b+="";var a=/(\d+)(\d{3})/;while(a.test(b)){b=b.replace(a,"$1,$2")}return b}var Cache=function(a){this.num_elements=a;this.clear()};$.extend(Cache.prototype,{get:function(b){var a=this.key_ary.indexOf(b);if(a!=-1){this.key_ary.splice(a,1);this.key_ary.push(b)}return this.obj_cache[b]},set:function(b,c){if(!this.obj_cache[b]){if(this.key_ary.length>=this.num_elements){var a=this.key_ary.shift();delete this.obj_cache[a]}this.key_ary.push(b)}this.obj_cache[b]=c;return c},clear:function(){this.obj_cache={};this.key_ary=[]}});var Drawer=function(){};$.extend(Drawer.prototype,{intensity:function(b,a,c){},});drawer=new Drawer();var View=function(b,d,c,a){this.vis_id=c;this.dbkey=a;this.title=d;this.chrom=b;this.tracks=[];this.label_tracks=[];this.max_low=0;this.max_!
high=0;this.center=(this.max_high-this.max_low)/2;this.zoom_factor=3;t
his.zoom_level=0;this.track_id_counter=0};$.extend(View.prototype,{add_track:function(a){a.view=this;a.track_id=this.track_id_counter;this.tracks.push(a);if(a.init){a.init()}a.container_div.attr("id","track_"+a.track_id);this.track_id_counter+=1},add_label_track:function(a){a.view=this;this.label_tracks.push(a)},remove_track:function(a){a.container_div.fadeOut("slow",function(){$(this).remove()});delete this.tracks[a]},update_options:function(){var b=$("ul#sortable-ul").sortable("toArray");var d=[];var c=$("#viewport > div").sort(function(g,f){return b.indexOf($(g).attr("id"))>b.indexOf($(f).attr("id"))});$("#viewport > div").remove();$("#viewport").html(c);for(var e in view.tracks){var a=view.tracks[e];if(a.update_options){a.update_options(e)}}},reset:function(){this.low=this.max_low;this.high=this.max_high;this.center=this.center=(this.max_high-this.max_low)/2;this.zoom_level=0;$(".yaxislabel").remove()},redraw:function(f){this.span=this.max_high-this.max_low;var d=this.sp!
an/Math.pow(this.zoom_factor,this.zoom_level),b=this.center-(d/2),e=b+d;if(b<0){b=0;e=b+d}else{if(e>this.max_high){e=this.max_high;b=e-d}}this.low=Math.floor(b);this.high=Math.ceil(e);this.center=Math.round(this.low+(this.high-this.low)/2);this.resolution=Math.pow(10,Math.ceil(Math.log((this.high-this.low)/200)/Math.LN10));this.zoom_res=Math.pow(FEATURE_LEVELS,Math.max(0,Math.ceil(Math.log(this.resolution,FEATURE_LEVELS)/Math.log(FEATURE_LEVELS))));$("#overview-box").css({left:(this.low/this.span)*$("#overview-viewport").width(),width:Math.max(12,((this.high-this.low)/this.span)*$("#overview-viewport").width())}).show();$("#low").val(commatize(this.low));$("#high").val(commatize(this.high));if(!f){for(var c=0,a=this.tracks.length;c<a;c++){if(this.tracks[c].enabled){this.tracks[c].draw()}}for(var c=0,a=this.label_tracks.length;c<a;c++){this.label_tracks[c].draw()}}},zoom_in:function(a,b){if(this.max_high===0||this.high-this.low<30){return}if(a){this.center=a/b.width()*(this.!
high-this.low)+this.low}this.zoom_level+=1;this.redraw()},zoom_out:fun
ction(){if(this.max_high===0){return}if(this.zoom_level<=0){this.zoom_level=0;return}this.zoom_level-=1;this.redraw()}});var Track=function(a,b){this.name=a;this.parent_element=b;this.init_global()};$.extend(Track.prototype,{init_global:function(){this.header_div=$("<div class='track-header'>").text(this.name);this.content_div=$("<div class='track-content'>");this.container_div=$("<div></div>").addClass("track").append(this.header_div).append(this.content_div);this.parent_element.append(this.container_div)},init_each:function(c,b){var a=this;a.enabled=false;a.data_queue={};a.tile_cache.clear();a.data_cache.clear();a.content_div.css("height","30px");a.content_div.text(DATA_LOADING);a.container_div.removeClass("nodata error pending");if(a.view.chrom){$.getJSON(data_url,c,function(d){if(!d||d==="error"){a.container_div.addClass("error");a.content_div.text(DATA_ERROR)}else{if(d==="no converter"){a.container_div.addClass("error");a.content_div.text(DATA_NOCONVERTER)}else{if(d.dat!
a&&d.data.length===0||d.data===null){a.container_div.addClass("nodata");a.content_div.text(DATA_NONE)}else{if(d==="pending"){a.container_div.addClass("pending");a.content_div.text(DATA_PENDING);setTimeout(function(){a.init()},5000)}else{a.content_div.text("");a.content_div.css("height",a.height_px+"px");a.enabled=true;b(d);a.draw()}}}}})}else{a.container_div.addClass("nodata");a.content_div.text(DATA_NONE)}}});var TiledTrack=function(){};$.extend(TiledTrack.prototype,Track.prototype,{draw:function(){var i=this.view.low,e=this.view.high,f=e-i,d=this.view.resolution;if(DEBUG){$("#debug").text(d+" "+this.view.zoom_res)}var k=$("<div style='position: relative;'></div>");this.content_div.children(":first").remove();this.content_div.append(k);var l=this.content_div.width()/f;var h;var a=Math.floor(i/d/DENSITY);while((a*DENSITY*d)<e){var j=this.content_div.width()+"_"+this.view.zoom_level+"_"+a;var c=this.tile_cache.get(j);if(c){var g=a*DENSITY*d;var b=(g-i)*l;if(this.left_offset)!
{b-=this.left_offset}c.css({left:b});k.append(c);this.max_height=Math.
max(this.max_height,c.height())}else{this.delayed_draw(this,j,i,e,a,d,k,l)}a+=1}},delayed_draw:function(c,e,a,f,b,d,g,h){setTimeout(function(){if(!(a>c.view.high||f<c.view.low)){tile_element=c.draw_tile(d,b,g,h);if(tile_element){c.tile_cache.set(e,tile_element);c.max_height=Math.max(c.max_height,tile_element.height());c.content_div.css("height",c.max_height+"px")}}},50)}});var LabelTrack=function(a){Track.call(this,null,a);this.track_type="LabelTrack";this.hidden=true;this.container_div.addClass("label-track")};$.extend(LabelTrack.prototype,Track.prototype,{draw:function(){var c=this.view,d=c.high-c.low,g=Math.floor(Math.pow(10,Math.floor(Math.log(d)/Math.log(10)))),a=Math.floor(c.low/g)*g,e=this.content_div.width(),b=$("<div style='position: relative; height: 1.3em;'></div>");while(a<c.high){var f=(a-c.low)/d*e;b.append($("<div class='label'>"+commatize(a)+"</div>").css({position:"absolute",left:f-1}));a+=g}this.content_div.children(":first").remove();this.content_div.appen!
d(b)}});var LineTrack=function(c,a,b){this.track_type="LineTrack";Track.call(this,c,$("#viewport"));TiledTrack.call(this);this.height_px=100;this.container_div.addClass("line-track");this.dataset_id=a;this.data_cache=new Cache(CACHED_DATA);this.tile_cache=new Cache(CACHED_TILES_LINE);this.prefs={min_value:undefined,max_value:undefined,mode:"Line"};if(b.min_value!==undefined){this.prefs.min_value=b.min_value}if(b.max_value!==undefined){this.prefs.max_value=b.max_value}if(b.mode!==undefined){this.prefs.mode=b.mode}};$.extend(LineTrack.prototype,TiledTrack.prototype,{init:function(){var a=this,b=a.view.tracks.indexOf(a);a.vertical_range=undefined;this.init_each({stats:true,chrom:a.view.chrom,low:null,high:null,dataset_id:a.dataset_id},function(c){data=c.data;if(isNaN(parseFloat(a.prefs.min_value))||isNaN(parseFloat(a.prefs.max_value))){a.prefs.min_value=data.min;a.prefs.max_value=data.max;$("#track_"+b+"_minval").val(a.prefs.min_value);$("#track_"+b+"_maxval").val(a.prefs.max_!
value)}a.vertical_range=a.prefs.max_value-a.prefs.min_value;a.total_fr
equency=data.total_frequency;$("#linetrack_"+b+"_minval").remove();$("#linetrack_"+b+"_maxval").remove();var e=$("<div></div>").addClass("yaxislabel").attr("id","linetrack_"+b+"_minval").text(a.prefs.min_value);var d=$("<div></div>").addClass("yaxislabel").attr("id","linetrack_"+b+"_maxval").text(a.prefs.max_value);d.css({position:"relative",top:"25px"});d.prependTo(a.container_div);e.css({position:"relative",top:a.height_px+55+"px"});e.prependTo(a.container_div)})},get_data:function(d,b){var c=this,a=b*DENSITY*d,f=(b+1)*DENSITY*d,e=d+"_"+b;if(!c.data_queue[e]){c.data_queue[e]=true;$.ajax({url:data_url,dataType:"json",data:{chrom:this.view.chrom,low:a,high:f,dataset_id:this.dataset_id,resolution:this.view.resolution},success:function(g){data=g.data;c.data_cache.set(e,data);delete c.data_queue[e];c.draw()},error:function(h,g,i){console.log(h,g,i)}})}},draw_tile:function(o,q,c,e){if(this.vertical_range===undefined){return}var r=q*DENSITY*o,a=DENSITY*o,b=$("<canvas class='tile'!
></canvas>"),u=o+"_"+q;if(this.data_cache.get(u)===undefined){this.get_data(o,q);return}var t=this.data_cache.get(u);if(t===null){return}b.css({position:"absolute",top:0,left:(r-this.view.low)*e});b.get(0).width=Math.ceil(a*e);b.get(0).height=this.height_px;var n=b.get(0).getContext("2d"),k=false,l=this.prefs.min_value,g=this.prefs.max_value,m=this.vertical_range,s=this.total_frequency,d=this.height_px;n.beginPath();if(t.length>1){var f=Math.ceil((t[1][0]-t[0][0])*e)}else{var f=10}for(var p=0;p<t.length;p++){var j=t[p][0]-r;var h=t[p][1];if(this.prefs.mode=="Intensity"){if(h===null){continue}j=j*e;if(h<=l){h=l}else{if(h>=g){h=g}}h=255-Math.floor((h-l)/m*255);n.fillStyle="rgb("+h+","+h+","+h+")";n.fillRect(j,0,f,this.height_px)}else{if(h===null){k=false;continue}else{j=j*e;if(h<=l){h=l}else{if(h>=g){h=g}}h=Math.round(d-(h-l)/m*d);if(k){n.lineTo(j,h)}else{n.moveTo(j,h);k=true}}}}n.stroke();c.append(b);return b},gen_options:function(n){var a=$("<div></div>").addClass("form-row!
");var h="track_"+n+"_minval",k="track_"+n+"_maxval",e="track_"+n+"_mo
de",l=$("<label></label>").attr("for",h).text("Min value:"),b=(this.prefs.min_value===undefined?"":this.prefs.min_value),m=$("<input></input>").attr("id",h).val(b),g=$("<label></label>").attr("for",k).text("Max value:"),j=(this.prefs.max_value===undefined?"":this.prefs.max_value),f=$("<input></input>").attr("id",k).val(j),d=$("<label></label>").attr("for",e).text("Display mode:"),i=(this.prefs.mode===undefined?"Line":this.prefs.mode),c=$('<select id="'+e+'"><option value="Line" id="mode_Line">Line</option><option value="Intensity" id="mode_Intensity">Intensity</option></select>');c.children("#mode_"+i).attr("selected","selected");return a.append(l).append(m).append(g).append(f).append(d).append(c)},update_options:function(d){var a=$("#track_"+d+"_minval").val(),c=$("#track_"+d+"_maxval").val(),b=$("#track_"+d+"_mode option:selected").val();if(a!==this.prefs.min_value||c!==this.prefs.max_value||b!=this.prefs.mode){this.prefs.min_value=parseFloat(a);this.prefs.max_value=parseF!
loat(c);this.prefs.mode=b;this.vertical_range=this.prefs.max_value-this.prefs.min_value;$("#linetrack_"+d+"_minval").text(this.prefs.min_value);$("#linetrack_"+d+"_maxval").text(this.prefs.max_value);this.tile_cache.clear();this.draw()}}});var FeatureTrack=function(c,a,b){this.track_type="FeatureTrack";Track.call(this,c,$("#viewport"));TiledTrack.call(this);this.height_px=100;this.container_div.addClass("feature-track");this.dataset_id=a;this.zo_slots={};this.show_labels_scale=0.001;this.showing_details=false;this.vertical_detail_px=10;this.vertical_nodetail_px=3;this.default_font="9px Monaco, Lucida Console, monospace";this.left_offset=200;this.inc_slots={};this.data_queue={};this.s_e_by_tile={};this.tile_cache=new Cache(CACHED_TILES_FEATURE);this.data_cache=new Cache(20);this.prefs={block_color:"black",label_color:"black",show_counts:false};if(b.block_color!==undefined){this.prefs.block_color=b.block_color}if(b.label_color!==undefined){this.prefs.label_color=b.label_color!
}if(b.show_counts!==undefined){this.prefs.show_counts=b.show_counts}};
$.extend(FeatureTrack.prototype,TiledTrack.prototype,{init:function(){var a=this,b=a.view.max_low+"_"+a.view.max_high;this.init_each({low:a.view.max_low,high:a.view.max_high,dataset_id:a.dataset_id,chrom:a.view.chrom,resolution:this.view.resolution},function(c){a.data_cache.set(b,c);a.draw()})},get_data:function(a,d){var b=this,c=a+"_"+d;if(!b.data_queue[c]){b.data_queue[c]=true;$.getJSON(data_url,{chrom:b.view.chrom,low:a,high:d,dataset_id:b.dataset_id,resolution:this.view.resolution},function(e){b.data_cache.set(c,e);delete b.data_queue[c];b.draw()})}},incremental_slots:function(a,g,c){if(!this.inc_slots[a]){this.inc_slots[a]={};this.inc_slots[a].w_scale=1/a;this.s_e_by_tile[a]={}}var m=this.inc_slots[a].w_scale,u=[],h=0,b=$("<canvas></canvas>").get(0).getContext("2d"),n=this.view.max_low;var d,f,w=[];for(var r=0,s=g.length;r<s;r++){var e=g[r],l=e[0];if(this.inc_slots[a][l]!==undefined){h=Math.max(h,this.inc_slots[a][l]);w.push(this.inc_slots[a][l])}else{u.push(r)}}for(var!
r=0,s=u.length;r<s;r++){var e=g[u[r]];l=e[0],feature_start=e[1],feature_end=e[2],feature_name=e[3];d=Math.floor((feature_start-n)*m);if(!c){d-=b.measureText(feature_name).width}f=Math.ceil((feature_end-n)*m);var q=0;while(true){var o=true;if(this.s_e_by_tile[a][q]!==undefined){for(var p=0,v=this.s_e_by_tile[a][q].length;p<v;p++){var t=this.s_e_by_tile[a][q][p];if(f>t[0]&&d<t[1]){o=false;break}}}if(o){if(this.s_e_by_tile[a][q]===undefined){this.s_e_by_tile[a][q]=[]}this.s_e_by_tile[a][q].push([d,f]);this.inc_slots[a][l]=q;h=Math.max(h,q);break}q++}}return h},draw_tile:function(R,h,m,ae){var z=h*DENSITY*R,X=(h+1)*DENSITY*R,w=DENSITY*R;var ac,ad,p;var Y=z+"_"+X;var ac=this.data_cache.get(Y);if(ac===undefined){this.data_queue[[z,X]]=true;this.get_data(z,X);return}if(ac.dataset_type=="array_tree"){p=30}else{var P=(ac.extra_info==="no_detail");var af=(P?this.vertical_nodetail_px:this.vertical_detail_px);p=this.incremental_slots(this.view.zoom_res,ac.data,P)*af+15;m.parent().css(!
"height",Math.max(this.height_px,p)+"px");ad=this.inc_slots[this.view.
zoom_res]}var a=Math.ceil(w*ae),F=$("<canvas class='tile'></canvas>"),T=this.prefs.label_color,f=this.prefs.block_color,J=this.left_offset;F.css({position:"absolute",top:0,left:(z-this.view.low)*ae-J});F.get(0).width=a+J;F.get(0).height=p;var t=F.get(0).getContext("2d");t.fillStyle=this.prefs.block_color;t.font=this.default_font;t.textAlign="right";var C=55,W=255-C,g=W*2/3;if(ac.dataset_type=="summary_tree"){var L=ac.data;var v=ac.max;var l=ac.avg;if(ac.data.length>2){var b=Math.ceil((L[1][0]-L[0][0])*ae)}else{var b=50}for(var aa=0,s=L.length;aa<s;aa++){var N=Math.ceil((L[aa][0]-z)*ae);var M=L[aa][1];if(!M){continue}var E=Math.floor(W-(M/v)*W);t.fillStyle="rgb("+E+","+E+","+E+")";t.fillRect(N+J,0,b,20);if(this.prefs.show_counts){if(E>g){t.fillStyle="black"}else{t.fillStyle="#ddd"}t.textAlign="center";t.fillText(L[aa][1],N+J+(b/2),12)}}m.append(F);return F}var ac=ac.data;var Z=0;for(var aa=0,s=ac.length;aa<s;aa++){var G=ac[aa],D=G[0],ab=G[1],O=G[2],A=G[3];if(ab<=X&&O>=z){var !
Q=Math.floor(Math.max(0,(ab-z)*ae)),u=Math.ceil(Math.min(a,(O-z)*ae)),K=ad[D]*af;if(P){t.fillRect(Q+J,K+5,u-Q,1)}else{var r=G[4],I=G[5],S=G[6],e=G[7];var q,U,B=null,ag=null;if(I&&S){B=Math.floor(Math.max(0,(I-z)*ae));ag=Math.ceil(Math.min(a,(S-z)*ae))}if(ab>z){t.fillStyle=T;t.fillText(A,Q-1+J,K+8);t.fillStyle=f}if(e){if(r){if(r=="+"){t.fillStyle=RIGHT_STRAND}else{if(r=="-"){t.fillStyle=LEFT_STRAND}}t.fillRect(Q+J,K,u-Q,10);t.fillStyle=f}for(var Y=0,d=e.length;Y<d;Y++){var n=e[Y],c=Math.floor(Math.max(0,(n[0]-z)*ae)),H=Math.ceil(Math.min(a,(n[1]-z)*ae));if(c>H){continue}q=5;U=3;t.fillRect(c+J,K+U,H-c,q);if(B!==undefined&&!(c>ag||H<B)){q=9;U=1;var V=Math.max(c,B),o=Math.min(H,ag);t.fillRect(V+J,K+U,o-V,q)}}}else{q=9;U=1;t.fillRect(Q+J,K+U,u-Q,q);if(G.strand){if(G.strand=="+"){t.fillStyle=RIGHT_STRAND_INV}else{if(G.strand=="-"){t.fillStyle=LEFT_STRAND_INV}}t.fillRect(Q+J,K,u-Q,10);t.fillStyle=prefs.block_color}}}Z++}}m.append(F);return F},gen_options:function(i){var a=$("<div>!
</div>").addClass("form-row");var e="track_"+i+"_block_color",k=$("<la
bel></label>").attr("for",e).text("Block color:"),l=$("<input></input>").attr("id",e).attr("name",e).val(this.prefs.block_color),j="track_"+i+"_label_color",g=$("<label></label>").attr("for",j).text("Text color:"),h=$("<input></input>").attr("id",j).attr("name",j).val(this.prefs.label_color),f="track_"+i+"_show_count",c=$("<label></label>").attr("for",f).text("Show summary counts"),b=$('<input type="checkbox" style="float:left;"></input>').attr("id",f).attr("name",f).attr("checked",this.prefs.show_counts),d=$("<div></div>").append(b).append(c);return a.append(k).append(l).append(g).append(h).append(d)},update_options:function(d){var b=$("#track_"+d+"_block_color").val(),c=$("#track_"+d+"_label_color").val(),a=$("#track_"+d+"_show_count").attr("checked");if(b!==this.prefs.block_color||c!==this.prefs.label_color||a!=this.prefs.show_counts){this.prefs.block_color=b;this.prefs.label_color=c;this.prefs.show_counts=a;this.tile_cache.clear();this.draw()}}});var ReadTrack=function(c!
,a,b){FeatureTrack.call(this,c,a,b);this.track_type="ReadTrack"};$.extend(ReadTrack.prototype,TiledTrack.prototype,FeatureTrack.prototype,{});
\ No newline at end of file
+var DEBUG=false;var DENSITY=1000,FEATURE_LEVELS=10,DATA_ERROR="There was an error in indexing this dataset.",DATA_NOCONVERTER="A converter for this dataset is not installed. Please check your datatypes_conf.xml file.",DATA_NONE="No data for this chrom/contig.",DATA_PENDING="Currently indexing... please wait",DATA_LOADING="Loading data...",CACHED_TILES_FEATURE=10,CACHED_TILES_LINE=30,CACHED_DATA=5,CONTEXT=$("<canvas></canvas>").get(0).getContext("2d"),RIGHT_STRAND,LEFT_STRAND;var right_img=new Image();right_img.src="../images/visualization/strand_right.png";right_img.onload=function(){RIGHT_STRAND=CONTEXT.createPattern(right_img,"repeat")};var left_img=new Image();left_img.src="../images/visualization/strand_left.png";left_img.onload=function(){LEFT_STRAND=CONTEXT.createPattern(left_img,"repeat")};var right_img_inv=new Image();right_img_inv.src="../images/visualization/strand_right_inv.png";right_img_inv.onload=function(){RIGHT_STRAND_INV=CONTEXT.createPattern(right_img_inv,!
"repeat")};var left_img_inv=new Image();left_img_inv.src="../images/visualization/strand_left_inv.png";left_img_inv.onload=function(){LEFT_STRAND_INV=CONTEXT.createPattern(left_img_inv,"repeat")};function commatize(b){b+="";var a=/(\d+)(\d{3})/;while(a.test(b)){b=b.replace(a,"$1,$2")}return b}var Cache=function(a){this.num_elements=a;this.clear()};$.extend(Cache.prototype,{get:function(b){var a=this.key_ary.indexOf(b);if(a!=-1){this.key_ary.splice(a,1);this.key_ary.push(b)}return this.obj_cache[b]},set:function(b,c){if(!this.obj_cache[b]){if(this.key_ary.length>=this.num_elements){var a=this.key_ary.shift();delete this.obj_cache[a]}this.key_ary.push(b)}this.obj_cache[b]=c;return c},clear:function(){this.obj_cache={};this.key_ary=[]}});var Drawer=function(){};$.extend(Drawer.prototype,{intensity:function(b,a,c){},});drawer=new Drawer();var View=function(b,d,c,a){this.vis_id=c;this.dbkey=a;this.title=d;this.chrom=b;this.tracks=[];this.label_tracks=[];this.max_low=0;this.max_h!
igh=0;this.center=(this.max_high-this.max_low)/2;this.zoom_factor=3;th
is.zoom_level=0;this.track_id_counter=0};$.extend(View.prototype,{add_track:function(a){a.view=this;a.track_id=this.track_id_counter;this.tracks.push(a);if(a.init){a.init()}a.container_div.attr("id","track_"+a.track_id);this.track_id_counter+=1},add_label_track:function(a){a.view=this;this.label_tracks.push(a)},remove_track:function(a){a.container_div.fadeOut("slow",function(){$(this).remove()});delete this.tracks[a]},update_options:function(){var b=$("ul#sortable-ul").sortable("toArray");var d=[];var c=$("#viewport > div").sort(function(g,f){return b.indexOf($(g).attr("id"))>b.indexOf($(f).attr("id"))});$("#viewport > div").remove();$("#viewport").html(c);for(var e in view.tracks){var a=view.tracks[e];if(a.update_options){a.update_options(e)}}},reset:function(){this.low=this.max_low;this.high=this.max_high;this.center=this.center=(this.max_high-this.max_low)/2;this.zoom_level=0;$(".yaxislabel").remove()},redraw:function(f){this.span=this.max_high-this.max_low;var d=this.spa!
n/Math.pow(this.zoom_factor,this.zoom_level),b=this.center-(d/2),e=b+d;if(b<0){b=0;e=b+d}else{if(e>this.max_high){e=this.max_high;b=e-d}}this.low=Math.floor(b);this.high=Math.ceil(e);this.center=Math.round(this.low+(this.high-this.low)/2);this.resolution=Math.pow(10,Math.ceil(Math.log((this.high-this.low)/200)/Math.LN10));this.zoom_res=Math.pow(FEATURE_LEVELS,Math.max(0,Math.ceil(Math.log(this.resolution,FEATURE_LEVELS)/Math.log(FEATURE_LEVELS))));$("#overview-box").css({left:(this.low/this.span)*$("#overview-viewport").width(),width:Math.max(12,((this.high-this.low)/this.span)*$("#overview-viewport").width())}).show();$("#low").val(commatize(this.low));$("#high").val(commatize(this.high));if(!f){for(var c=0,a=this.tracks.length;c<a;c++){if(this.tracks[c].enabled){this.tracks[c].draw()}}for(var c=0,a=this.label_tracks.length;c<a;c++){this.label_tracks[c].draw()}}},zoom_in:function(a,b){if(this.max_high===0||this.high-this.low<30){return}if(a){this.center=a/b.width()*(this.h!
igh-this.low)+this.low}this.zoom_level+=1;this.redraw()},zoom_out:func
tion(){if(this.max_high===0){return}if(this.zoom_level<=0){this.zoom_level=0;return}this.zoom_level-=1;this.redraw()}});var Track=function(a,b){this.name=a;this.parent_element=b;this.init_global()};$.extend(Track.prototype,{init_global:function(){this.header_div=$("<div class='track-header'>").text(this.name);this.content_div=$("<div class='track-content'>");this.container_div=$("<div></div>").addClass("track").append(this.header_div).append(this.content_div);this.parent_element.append(this.container_div)},init_each:function(c,b){var a=this;a.enabled=false;a.data_queue={};a.tile_cache.clear();a.data_cache.clear();a.content_div.css("height","30px");if(!a.content_div.text()){a.content_div.text(DATA_LOADING)}a.container_div.removeClass("nodata error pending");if(a.view.chrom){$.getJSON(data_url,c,function(d){if(!d||d==="error"){a.container_div.addClass("error");a.content_div.text(DATA_ERROR)}else{if(d==="no converter"){a.container_div.addClass("error");a.content_div.text(DATA_N!
OCONVERTER)}else{if(d.data&&d.data.length===0||d.data===null){a.container_div.addClass("nodata");a.content_div.text(DATA_NONE)}else{if(d==="pending"){a.container_div.addClass("pending");a.content_div.text(DATA_PENDING);setTimeout(function(){a.init()},5000)}else{a.content_div.text("");a.content_div.css("height",a.height_px+"px");a.enabled=true;b(d);a.draw()}}}}})}else{a.container_div.addClass("nodata");a.content_div.text(DATA_NONE)}}});var TiledTrack=function(){};$.extend(TiledTrack.prototype,Track.prototype,{draw:function(){var i=this.view.low,e=this.view.high,f=e-i,d=this.view.resolution;if(DEBUG){$("#debug").text(d+" "+this.view.zoom_res)}var k=$("<div style='position: relative;'></div>");this.content_div.children(":first").remove();this.content_div.append(k);var l=this.content_div.width()/f;var h;var a=Math.floor(i/d/DENSITY);while((a*DENSITY*d)<e){var j=this.content_div.width()+"_"+this.view.zoom_level+"_"+a;var c=this.tile_cache.get(j);if(c){var g=a*DENSITY*d;var b=(g-!
i)*l;if(this.left_offset){b-=this.left_offset}c.css({left:b});k.append
(c);this.max_height=Math.max(this.max_height,c.height())}else{this.delayed_draw(this,j,i,e,a,d,k,l)}a+=1}},delayed_draw:function(c,e,a,f,b,d,g,h){setTimeout(function(){if(!(a>c.view.high||f<c.view.low)){tile_element=c.draw_tile(d,b,g,h);if(tile_element){c.tile_cache.set(e,tile_element);c.max_height=Math.max(c.max_height,tile_element.height());c.content_div.css("height",c.max_height+"px")}}},50)}});var LabelTrack=function(a){Track.call(this,null,a);this.track_type="LabelTrack";this.hidden=true;this.container_div.addClass("label-track")};$.extend(LabelTrack.prototype,Track.prototype,{draw:function(){var c=this.view,d=c.high-c.low,g=Math.floor(Math.pow(10,Math.floor(Math.log(d)/Math.log(10)))),a=Math.floor(c.low/g)*g,e=this.content_div.width(),b=$("<div style='position: relative; height: 1.3em;'></div>");while(a<c.high){var f=(a-c.low)/d*e;b.append($("<div class='label'>"+commatize(a)+"</div>").css({position:"absolute",left:f-1}));a+=g}this.content_div.children(":first").remove!
();this.content_div.append(b)}});var LineTrack=function(c,a,b){this.track_type="LineTrack";Track.call(this,c,$("#viewport"));TiledTrack.call(this);this.height_px=100;this.container_div.addClass("line-track");this.dataset_id=a;this.data_cache=new Cache(CACHED_DATA);this.tile_cache=new Cache(CACHED_TILES_LINE);this.prefs={min_value:undefined,max_value:undefined,mode:"Line"};if(b.min_value!==undefined){this.prefs.min_value=b.min_value}if(b.max_value!==undefined){this.prefs.max_value=b.max_value}if(b.mode!==undefined){this.prefs.mode=b.mode}};$.extend(LineTrack.prototype,TiledTrack.prototype,{init:function(){var a=this,b=a.view.tracks.indexOf(a);a.vertical_range=undefined;this.init_each({stats:true,chrom:a.view.chrom,low:null,high:null,dataset_id:a.dataset_id},function(c){data=c.data;if(isNaN(parseFloat(a.prefs.min_value))||isNaN(parseFloat(a.prefs.max_value))){a.prefs.min_value=data.min;a.prefs.max_value=data.max;$("#track_"+b+"_minval").val(a.prefs.min_value);$("#track_"+b+"_!
maxval").val(a.prefs.max_value)}a.vertical_range=a.prefs.max_value-a.p
refs.min_value;a.total_frequency=data.total_frequency;$("#linetrack_"+b+"_minval").remove();$("#linetrack_"+b+"_maxval").remove();var e=$("<div></div>").addClass("yaxislabel").attr("id","linetrack_"+b+"_minval").text(a.prefs.min_value);var d=$("<div></div>").addClass("yaxislabel").attr("id","linetrack_"+b+"_maxval").text(a.prefs.max_value);d.css({position:"relative",top:"25px"});d.prependTo(a.container_div);e.css({position:"relative",top:a.height_px+55+"px"});e.prependTo(a.container_div)})},get_data:function(d,b){var c=this,a=b*DENSITY*d,f=(b+1)*DENSITY*d,e=d+"_"+b;if(!c.data_queue[e]){c.data_queue[e]=true;$.ajax({url:data_url,dataType:"json",data:{chrom:this.view.chrom,low:a,high:f,dataset_id:this.dataset_id,resolution:this.view.resolution},success:function(g){data=g.data;c.data_cache.set(e,data);delete c.data_queue[e];c.draw()},error:function(h,g,i){console.log(h,g,i)}})}},draw_tile:function(o,q,c,e){if(this.vertical_range===undefined){return}var r=q*DENSITY*o,a=DENSITY*o,!
b=$("<canvas class='tile'></canvas>"),u=o+"_"+q;if(this.data_cache.get(u)===undefined){this.get_data(o,q);return}var t=this.data_cache.get(u);if(t===null){return}b.css({position:"absolute",top:0,left:(r-this.view.low)*e});b.get(0).width=Math.ceil(a*e);b.get(0).height=this.height_px;var n=b.get(0).getContext("2d"),k=false,l=this.prefs.min_value,g=this.prefs.max_value,m=this.vertical_range,s=this.total_frequency,d=this.height_px;n.beginPath();if(t.length>1){var f=Math.ceil((t[1][0]-t[0][0])*e)}else{var f=10}for(var p=0;p<t.length;p++){var j=t[p][0]-r;var h=t[p][1];if(this.prefs.mode=="Intensity"){if(h===null){continue}j=j*e;if(h<=l){h=l}else{if(h>=g){h=g}}h=255-Math.floor((h-l)/m*255);n.fillStyle="rgb("+h+","+h+","+h+")";n.fillRect(j,0,f,this.height_px)}else{if(h===null){k=false;continue}else{j=j*e;if(h<=l){h=l}else{if(h>=g){h=g}}h=Math.round(d-(h-l)/m*d);if(k){n.lineTo(j,h)}else{n.moveTo(j,h);k=true}}}}n.stroke();c.append(b);return b},gen_options:function(n){var a=$("<div></!
div>").addClass("form-row");var h="track_"+n+"_minval",k="track_"+n+"_
maxval",e="track_"+n+"_mode",l=$("<label></label>").attr("for",h).text("Min value:"),b=(this.prefs.min_value===undefined?"":this.prefs.min_value),m=$("<input></input>").attr("id",h).val(b),g=$("<label></label>").attr("for",k).text("Max value:"),j=(this.prefs.max_value===undefined?"":this.prefs.max_value),f=$("<input></input>").attr("id",k).val(j),d=$("<label></label>").attr("for",e).text("Display mode:"),i=(this.prefs.mode===undefined?"Line":this.prefs.mode),c=$('<select id="'+e+'"><option value="Line" id="mode_Line">Line</option><option value="Intensity" id="mode_Intensity">Intensity</option></select>');c.children("#mode_"+i).attr("selected","selected");return a.append(l).append(m).append(g).append(f).append(d).append(c)},update_options:function(d){var a=$("#track_"+d+"_minval").val(),c=$("#track_"+d+"_maxval").val(),b=$("#track_"+d+"_mode option:selected").val();if(a!==this.prefs.min_value||c!==this.prefs.max_value||b!=this.prefs.mode){this.prefs.min_value=parseFloat(a);th!
is.prefs.max_value=parseFloat(c);this.prefs.mode=b;this.vertical_range=this.prefs.max_value-this.prefs.min_value;$("#linetrack_"+d+"_minval").text(this.prefs.min_value);$("#linetrack_"+d+"_maxval").text(this.prefs.max_value);this.tile_cache.clear();this.draw()}}});var FeatureTrack=function(c,a,b){this.track_type="FeatureTrack";Track.call(this,c,$("#viewport"));TiledTrack.call(this);this.height_px=100;this.container_div.addClass("feature-track");this.dataset_id=a;this.zo_slots={};this.show_labels_scale=0.001;this.showing_details=false;this.vertical_detail_px=10;this.vertical_nodetail_px=3;this.default_font="9px Monaco, Lucida Console, monospace";this.left_offset=200;this.inc_slots={};this.data_queue={};this.s_e_by_tile={};this.tile_cache=new Cache(CACHED_TILES_FEATURE);this.data_cache=new Cache(20);this.prefs={block_color:"black",label_color:"black",show_counts:false};if(b.block_color!==undefined){this.prefs.block_color=b.block_color}if(b.label_color!==undefined){this.prefs.!
label_color=b.label_color}if(b.show_counts!==undefined){this.prefs.sho
w_counts=b.show_counts}};$.extend(FeatureTrack.prototype,TiledTrack.prototype,{init:function(){var a=this,b=a.view.max_low+"_"+a.view.max_high;this.init_each({low:a.view.max_low,high:a.view.max_high,dataset_id:a.dataset_id,chrom:a.view.chrom,resolution:this.view.resolution},function(c){a.data_cache.set(b,c);a.draw()})},get_data:function(a,d){var b=this,c=a+"_"+d;if(!b.data_queue[c]){b.data_queue[c]=true;$.getJSON(data_url,{chrom:b.view.chrom,low:a,high:d,dataset_id:b.dataset_id,resolution:this.view.resolution},function(e){b.data_cache.set(c,e);delete b.data_queue[c];b.draw()})}},incremental_slots:function(a,g,c){if(!this.inc_slots[a]){this.inc_slots[a]={};this.inc_slots[a].w_scale=1/a;this.s_e_by_tile[a]={}}var m=this.inc_slots[a].w_scale,v=[],h=0,b=$("<canvas></canvas>").get(0).getContext("2d"),n=this.view.max_low;var d,f,x=[];for(var s=0,t=g.length;s<t;s++){var e=g[s],l=e[0];if(this.inc_slots[a][l]!==undefined){h=Math.max(h,this.inc_slots[a][l]);x.push(this.inc_slots[a][l]!
)}else{v.push(s)}}for(var s=0,t=v.length;s<t;s++){var e=g[v[s]];l=e[0],feature_start=e[1],feature_end=e[2],feature_name=e[3];d=Math.floor((feature_start-n)*m);f=Math.ceil((feature_end-n)*m);if(!c){var p=b.measureText(feature_name).width;if(d-p<0){f+=p}else{d-=p}}var r=0;while(true){var o=true;if(this.s_e_by_tile[a][r]!==undefined){for(var q=0,w=this.s_e_by_tile[a][r].length;q<w;q++){var u=this.s_e_by_tile[a][r][q];if(f>u[0]&&d<u[1]){o=false;break}}}if(o){if(this.s_e_by_tile[a][r]===undefined){this.s_e_by_tile[a][r]=[]}this.s_e_by_tile[a][r].push([d,f]);this.inc_slots[a][l]=r;h=Math.max(h,r);break}r++}}return h},draw_tile:function(R,h,m,ae){var z=h*DENSITY*R,X=(h+1)*DENSITY*R,w=DENSITY*R;var ac,ad,p;var Y=z+"_"+X;var ac=this.data_cache.get(Y);if(ac===undefined){this.data_queue[[z,X]]=true;this.get_data(z,X);return}if(ac.dataset_type=="array_tree"){p=30}else{var P=(ac.extra_info==="no_detail");var af=(P?this.vertical_nodetail_px:this.vertical_detail_px);p=this.incremental_slo!
ts(this.view.zoom_res,ac.data,P)*af+15;m.parent().css("height",Math.ma
x(this.height_px,p)+"px");ad=this.inc_slots[this.view.zoom_res]}var a=Math.ceil(w*ae),F=$("<canvas class='tile'></canvas>"),T=this.prefs.label_color,f=this.prefs.block_color,J=this.left_offset;F.css({position:"absolute",top:0,left:(z-this.view.low)*ae-J});F.get(0).width=a+J;F.get(0).height=p;var t=F.get(0).getContext("2d");t.fillStyle=this.prefs.block_color;t.font=this.default_font;t.textAlign="right";var C=55,W=255-C,g=W*2/3;if(ac.dataset_type=="summary_tree"){var L=ac.data;var v=ac.max;var l=ac.avg;if(ac.data.length>2){var b=Math.ceil((L[1][0]-L[0][0])*ae)}else{var b=50}for(var aa=0,s=L.length;aa<s;aa++){var N=Math.ceil((L[aa][0]-z)*ae);var M=L[aa][1];if(!M){continue}var E=Math.floor(W-(M/v)*W);t.fillStyle="rgb("+E+","+E+","+E+")";t.fillRect(N+J,0,b,20);if(this.prefs.show_counts){if(E>g){t.fillStyle="black"}else{t.fillStyle="#ddd"}t.textAlign="center";t.fillText(L[aa][1],N+J+(b/2),12)}}m.append(F);return F}var ac=ac.data;var Z=0;for(var aa=0,s=ac.length;aa<s;aa++){var G=ac!
[aa],D=G[0],ab=G[1],O=G[2],A=G[3];if(ab<=X&&O>=z){var Q=Math.floor(Math.max(0,(ab-z)*ae)),u=Math.ceil(Math.min(a,(O-z)*ae)),K=ad[D]*af;if(P){t.fillRect(Q+J,K+5,u-Q,1)}else{var r=G[4],I=G[5],S=G[6],e=G[7];var q,U,B=null,ag=null;if(I&&S){B=Math.floor(Math.max(0,(I-z)*ae));ag=Math.ceil(Math.min(a,(S-z)*ae))}if(A!==undefined&&ab>z){t.fillStyle=T;if(h===0&&Q-t.measureText(A).width<0){t.textAlign="left";t.fillText(A,u+2+J,K+8)}else{t.textAlign="right";t.fillText(A,Q-2+J,K+8)}t.fillStyle=f}if(e){if(r){if(r=="+"){t.fillStyle=RIGHT_STRAND}else{if(r=="-"){t.fillStyle=LEFT_STRAND}}t.fillRect(Q+J,K,u-Q,10);t.fillStyle=f}for(var Y=0,d=e.length;Y<d;Y++){var n=e[Y],c=Math.floor(Math.max(0,(n[0]-z)*ae)),H=Math.ceil(Math.min(a,(n[1]-z)*ae));if(c>H){continue}q=5;U=3;t.fillRect(c+J,K+U,H-c,q);if(B!==undefined&&!(c>ag||H<B)){q=9;U=1;var V=Math.max(c,B),o=Math.min(H,ag);t.fillRect(V+J,K+U,o-V,q)}}}else{q=9;U=1;t.fillRect(Q+J,K+U,u-Q,q);if(G.strand){if(G.strand=="+"){t.fillStyle=RIGHT_STRAND_INV!
}else{if(G.strand=="-"){t.fillStyle=LEFT_STRAND_INV}}t.fillRect(Q+J,K,
u-Q,10);t.fillStyle=prefs.block_color}}}Z++}}m.append(F);return F},gen_options:function(i){var a=$("<div></div>").addClass("form-row");var e="track_"+i+"_block_color",k=$("<label></label>").attr("for",e).text("Block color:"),l=$("<input></input>").attr("id",e).attr("name",e).val(this.prefs.block_color),j="track_"+i+"_label_color",g=$("<label></label>").attr("for",j).text("Text color:"),h=$("<input></input>").attr("id",j).attr("name",j).val(this.prefs.label_color),f="track_"+i+"_show_count",c=$("<label></label>").attr("for",f).text("Show summary counts"),b=$('<input type="checkbox" style="float:left;"></input>').attr("id",f).attr("name",f).attr("checked",this.prefs.show_counts),d=$("<div></div>").append(b).append(c);return a.append(k).append(l).append(g).append(h).append(d)},update_options:function(d){var b=$("#track_"+d+"_block_color").val(),c=$("#track_"+d+"_label_color").val(),a=$("#track_"+d+"_show_count").attr("checked");if(b!==this.prefs.block_color||c!==this.prefs.labe!
l_color||a!=this.prefs.show_counts){this.prefs.block_color=b;this.prefs.label_color=c;this.prefs.show_counts=a;this.tile_cache.clear();this.draw()}}});var ReadTrack=function(c,a,b){FeatureTrack.call(this,c,a,b);this.track_type="ReadTrack"};$.extend(ReadTrack.prototype,TiledTrack.prototype,FeatureTrack.prototype,{});
\ No newline at end of file
diff -r fe14a58568ad -r 155f2e89a02b static/scripts/trackster.js
--- a/static/scripts/trackster.js Wed Apr 21 16:42:09 2010 -0400
+++ b/static/scripts/trackster.js Wed Apr 21 18:35:59 2010 -0400
@@ -12,7 +12,7 @@
DATA_LOADING = "Loading data...",
CACHED_TILES_FEATURE = 10,
CACHED_TILES_LINE = 30,
- CACHED_DATA = 20,
+ CACHED_DATA = 5,
CONTEXT = $("<canvas></canvas>").get(0).getContext("2d"),
RIGHT_STRAND, LEFT_STRAND;
@@ -227,7 +227,9 @@
track.tile_cache.clear();
track.data_cache.clear();
track.content_div.css( "height", "30px" );
- track.content_div.text(DATA_LOADING);
+ if (!track.content_div.text()) {
+ track.content_div.text(DATA_LOADING);
+ }
track.container_div.removeClass("nodata error pending");
if (track.view.chrom) {
@@ -640,10 +642,16 @@
feature_end = feature[2],
feature_name = feature[3];
f_start = Math.floor( (feature_start - max_low) * w_scale );
+ f_end = Math.ceil( (feature_end - max_low) * w_scale );
+
if (!no_detail) {
- f_start -= dummy_canvas.measureText(feature_name).width;
+ var text_len = dummy_canvas.measureText(feature_name).width;
+ if (f_start - text_len < 0) {
+ f_end += text_len;
+ } else {
+ f_start -= text_len;
+ }
}
- f_end = Math.ceil( (feature_end - max_low) * w_scale );
var j = 0;
// Try to fit the feature to the first slot that doesn't overlap any other features in that slot
@@ -795,9 +803,15 @@
thick_start = Math.floor( Math.max(0, (feature_ts - tile_low) * w_scale) );
thick_end = Math.ceil( Math.min(width, (feature_te - tile_low) * w_scale) );
}
- if (feature_start > tile_low) {
+ if (feature_name !== undefined && feature_start > tile_low) {
ctx.fillStyle = label_color;
- ctx.fillText(feature_name, f_start - 1 + left_offset, y_center + 8);
+ if (tile_index === 0 && f_start - ctx.measureText(feature_name).width < 0) {
+ ctx.textAlign = "left";
+ ctx.fillText(feature_name, f_end + 2 + left_offset, y_center + 8);
+ } else {
+ ctx.textAlign = "right";
+ ctx.fillText(feature_name, f_start - 2 + left_offset, y_center + 8);
+ }
ctx.fillStyle = block_color;
}
if (feature_blocks) {
@@ -851,7 +865,6 @@
j++;
}
}
-
parent_element.append( new_canvas );
return new_canvas;
}, gen_options: function(track_id) {
diff -r fe14a58568ad -r 155f2e89a02b static/trackster.css
--- a/static/trackster.css Wed Apr 21 16:42:09 2010 -0400
+++ b/static/trackster.css Wed Apr 21 18:35:59 2010 -0400
@@ -38,7 +38,7 @@
margin: 0px;
color: white;
margin-top: -6px;
- margin-bottom: -3px;
+ margin-bottom: -4px;
}
#overview-viewport {
diff -r fe14a58568ad -r 155f2e89a02b templates/tracks/browser.mako
--- a/templates/tracks/browser.mako Wed Apr 21 16:42:09 2010 -0400
+++ b/templates/tracks/browser.mako Wed Apr 21 18:35:59 2010 -0400
@@ -34,7 +34,6 @@
<div id="viewport-container" style="overflow-x: hidden; overflow-y: auto;">
<div id="viewport"></div>
</div>
-
</div>
<div id="nav-container" style="width:100%;">
<div id="nav-labeltrack"></div>
@@ -161,8 +160,7 @@
// To adjust the size of the viewport to fit the fixed-height footer
var refresh = function( e ) {
- $("#content").height( $(window).height() - $("#nav-container").height() - $("#masthead").height());
- $("#viewport-container").height( $("#content").height() - $("#top-labeltrack").height() - $("#nav-labeltrack").height() );
+ $("#viewport-container").height( $(window).height() - 120 );
$("#nav-container").width( $("#center").width() );
view.redraw();
};
@@ -176,7 +174,7 @@
this.current_height = e.clientY;
this.current_x = e.offsetX;
}).bind( "drag", function( e ) {
- var container = $(this).parent();
+ var container = $(this);
var delta = e.offsetX - this.current_x;
var new_scroll = container.scrollTop() - (e.clientY - this.current_height);
if ( new_scroll < container.get(0).scrollHeight - container.height() ) {
diff -r fe14a58568ad -r 155f2e89a02b templates/tracks/new_browser.mako
--- a/templates/tracks/new_browser.mako Wed Apr 21 16:42:09 2010 -0400
+++ b/templates/tracks/new_browser.mako Wed Apr 21 18:35:59 2010 -0400
@@ -1,4 +1,4 @@
-<form id="new-browser-form" action="javascript:void();" method="post" onsubmit="return false;">
+<form id="new-browser-form" action="javascript:void(0);" method="post" onsubmit="return false;">
<div class="form-row">
<label for="new-title">Browser name:</label>
<div class="form-row-input">
diff -r fe14a58568ad -r 155f2e89a02b templates/user/dbkeys.mako
--- a/templates/user/dbkeys.mako Wed Apr 21 16:42:09 2010 -0400
+++ b/templates/user/dbkeys.mako Wed Apr 21 18:35:59 2010 -0400
@@ -18,7 +18,7 @@
$(".db_hide").each(function() {
var pre = $(this);
pre.hide();
- pre.siblings("span").wrap( "<a href='javascript:void();'></a>" ).click( function() {
+ pre.siblings("span").wrap( "<a href='javascript:void(0);'></a>" ).click( function() {
pre.toggle();
});
});
1
0
details: http://www.bx.psu.edu/hg/galaxy/rev/fe14a58568ad
changeset: 3678:fe14a58568ad
user: rc
date: Wed Apr 21 16:42:09 2010 -0400
description:
lims: fixed data transfer bugs
diffstat:
lib/galaxy/web/controllers/requests_admin.py | 2 +-
scripts/galaxy_messaging/server/amqp_consumer.py | 15 +-
scripts/galaxy_messaging/server/data_transfer.py | 29 ++-
scripts/galaxy_messaging/server/galaxyweb_interface.py | 140 +++++++---------
4 files changed, 90 insertions(+), 96 deletions(-)
diffs (314 lines):
diff -r e600ab3fadc1 -r fe14a58568ad lib/galaxy/web/controllers/requests_admin.py
--- a/lib/galaxy/web/controllers/requests_admin.py Wed Apr 21 11:44:19 2010 -0400
+++ b/lib/galaxy/web/controllers/requests_admin.py Wed Apr 21 16:42:09 2010 -0400
@@ -1680,7 +1680,7 @@
virtual_host=trans.app.config.amqp['virtual_host'],
insist=False)
chan = conn.channel()
- msg = amqp.Message(data,
+ msg = amqp.Message(data.replace('\n', '').replace('\r', ''),
content_type='text/plain',
application_headers={'msg_type': 'data_transfer'})
msg.properties["delivery_mode"] = 2
diff -r e600ab3fadc1 -r fe14a58568ad scripts/galaxy_messaging/server/amqp_consumer.py
--- a/scripts/galaxy_messaging/server/amqp_consumer.py Wed Apr 21 11:44:19 2010 -0400
+++ b/scripts/galaxy_messaging/server/amqp_consumer.py Wed Apr 21 16:42:09 2010 -0400
@@ -37,6 +37,7 @@
log.addHandler(fh)
global dbconnstr
+global config
def get_value(dom, tag_name):
'''
@@ -64,17 +65,20 @@
return rc
def recv_callback(msg):
+ global config
# check the meesage type.
msg_type = msg.properties['application_headers'].get('msg_type')
log.debug('\nMESSAGE RECVD: '+str(msg_type))
if msg_type == 'data_transfer':
log.debug('DATA TRANSFER')
# fork a new process to transfer datasets
- transfer_script = "scripts/galaxy_messaging/server/data_transfer.py"
- cmd = ( "python",
- transfer_script,
- msg.body )
- pid = subprocess.Popen(cmd).pid
+ transfer_script = os.path.join(os.getcwd(),
+ "scripts/galaxy_messaging/server/data_transfer.py")
+ cmd = '%s "%s" "%s" "%s"' % ("python",
+ transfer_script,
+ msg.body,
+ config.get("app:main", "id_secret") )
+ pid = subprocess.Popen(cmd, shell=True).pid
log.debug('Started process (%i): %s' % (pid, str(cmd)))
elif msg_type == 'sample_state_update':
log.debug('SAMPLE STATE UPDATE')
@@ -95,6 +99,7 @@
if len(sys.argv) < 2:
print 'Usage: python amqp_consumer.py <Galaxy config file>'
return
+ global config
config = ConfigParser.ConfigParser()
config.read(sys.argv[1])
global dbconnstr
diff -r e600ab3fadc1 -r fe14a58568ad scripts/galaxy_messaging/server/data_transfer.py
--- a/scripts/galaxy_messaging/server/data_transfer.py Wed Apr 21 11:44:19 2010 -0400
+++ b/scripts/galaxy_messaging/server/data_transfer.py Wed Apr 21 16:42:09 2010 -0400
@@ -8,7 +8,7 @@
Usage:
-python data_transfer.py <data_transfer_xml>
+python data_transfer.py <data_transfer_xml> <config_id_secret>
"""
@@ -57,7 +57,7 @@
class DataTransfer(object):
- def __init__(self, msg):
+ def __init__(self, msg, config_id_secret):
log.info(msg)
self.dom = xml.dom.minidom.parseString(msg)
self.host = self.get_value(self.dom, 'data_host')
@@ -67,6 +67,7 @@
self.library_id = self.get_value(self.dom, 'library_id')
self.folder_id = self.get_value(self.dom, 'folder_id')
self.dataset_files = []
+ self.config_id_secret = config_id_secret
count=0
while True:
index = self.get_value_index(self.dom, 'index', count)
@@ -137,7 +138,7 @@
'''
log.error(traceback.format_exc())
log.error('FATAL ERROR.'+msg)
- self.update_status('Error.', 'All', msg)
+ self.update_status('Error', 'All', msg+"\n"+traceback.format_exc())
sys.exit(1)
def transfer_files(self):
@@ -175,18 +176,24 @@
This method adds the dataset file to the target data library & folder
by opening the corresponding url in Galaxy server running.
'''
- self.update_status(Sample.transfer_status.ADD_TO_LIBRARY)
- galaxyweb = GalaxyWebInterface(self.server_host, self.server_port,
- self.datatx_email, self.datatx_password)
- galaxyweb.add_to_library(self.server_dir, self.library_id, self.folder_id)
- galaxyweb.logout()
-
+ try:
+ self.update_status(Sample.transfer_status.ADD_TO_LIBRARY)
+ log.debug("dir:%s, lib:%s, folder:%s" % (self.server_dir, str(self.library_id), str(self.folder_id)))
+ galaxyweb = GalaxyWebInterface(self.server_host, self.server_port,
+ self.datatx_email, self.datatx_password,
+ self.config_id_secret)
+ galaxyweb.add_to_library(self.server_dir, self.library_id, self.folder_id)
+ galaxyweb.logout()
+ except Exception, e:
+ log.debug(e)
+ self.error_and_exit(str(e))
+
def update_status(self, status, dataset_index='All', msg=''):
'''
Update the data transfer status for this dataset in the database
'''
try:
- log.debug('Setting status "%s" for sample "%s"' % ( status, str(dataset_index) ) )
+ log.debug('Setting status "%s" for dataset "%s"' % ( status, str(dataset_index) ) )
df = from_json_string(self.galaxydb.get_sample_dataset_files(self.sample_id))
if dataset_index == 'All':
for dataset in self.dataset_files:
@@ -240,7 +247,7 @@
#
# Start the daemon
#
- dt = DataTransfer(sys.argv[1])
+ dt = DataTransfer(sys.argv[1], sys.argv[2])
dt.start()
sys.exit(0)
diff -r e600ab3fadc1 -r fe14a58568ad scripts/galaxy_messaging/server/galaxyweb_interface.py
--- a/scripts/galaxy_messaging/server/galaxyweb_interface.py Wed Apr 21 11:44:19 2010 -0400
+++ b/scripts/galaxy_messaging/server/galaxyweb_interface.py Wed Apr 21 16:42:09 2010 -0400
@@ -1,6 +1,5 @@
import ConfigParser
import sys, os
-import serial
import array
import time
import optparse,array
@@ -24,97 +23,81 @@
class GalaxyWebInterface(object):
- def __init__(self, server_host, server_port, datatx_email, datatx_password):
- self.server_host = server_host#config.get("main", "server_host")
- self.server_port = server_port#config.get("main", "server_port")
- self.datatx_email = datatx_email#config.get("main", "datatx_email")
- self.datatx_password = datatx_password#config.get("main", "datatx_password")
- try:
- # create url
- self.base_url = "http://%s:%s" % (self.server_host, self.server_port)
- # login
- url = "%s/user/login?email=%s&password=%s&login_button=Login" % (self.base_url, self.datatx_email, self.datatx_password)
- cj = cookielib.CookieJar()
- self.opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj))
- #print url
+ def __init__(self, server_host, server_port, datatx_email, datatx_password, config_id_secret):
+ self.server_host = server_host
+ self.server_port = server_port
+ self.datatx_email = datatx_email
+ self.datatx_password = datatx_password
+ self.config_id_secret = config_id_secret
+ # create url
+ self.base_url = "http://%s:%s" % (self.server_host, self.server_port)
+ # login
+ url = "%s/user/login?email=%s&password=%s&login_button=Login" % (self.base_url, self.datatx_email, self.datatx_password)
+ cj = cookielib.CookieJar()
+ self.opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj))
+ #print url
+ f = self.opener.open(url)
+ if f.read().find("ogged in as "+self.datatx_email) == -1:
+ # if the user doesnt exist, create the user
+ url = "%s/user/create?email=%s&username=%s&password=%s&confirm=%s&create_user_button=Submit" % ( self.base_url, self.datatx_email, self.datatx_email, self.datatx_password, self.datatx_password )
f = self.opener.open(url)
if f.read().find("ogged in as "+self.datatx_email) == -1:
- # if the user doesnt exist, create the user
- url = "%s/user/create?email=%s&username=%s&password=%s&confirm=%s&create_user_button=Submit" % ( self.base_url, self.datatx_email, self.datatx_email, self.datatx_password, self.datatx_password )
- f = self.opener.open(url)
- if f.read().find("ogged in as "+self.datatx_email) == -1:
- raise "The "+self.datatx_email+" user could not login to Galaxy"
- except:
- print traceback.format_exc()
- sys.exit(1)
+ raise Exception("The "+self.datatx_email+" user could not login to Galaxy")
def add_to_library(self, server_dir, library_id, folder_id, dbkey=''):
'''
This method adds the dataset file to the target data library & folder
by opening the corresponding url in Galaxy server running.
'''
- try:
- params = urllib.urlencode(dict( cntrller='library_admin',
- tool_id='upload1',
- tool_state='None',
- library_id=self.encode_id(library_id),
- folder_id=self.encode_id(folder_id),
- upload_option='upload_directory',
- file_type='auto',
- server_dir=os.path.basename(server_dir),
- dbkey=dbkey,
- show_dataset_id='True',
- runtool_btn='Upload to library'))
- #url = "http://localhost:8080/library_common/upload_library_dataset?cntrller=librar…"
- #url = base_url+"/library_common/upload_library_dataset?library_id=adb5f5c93f827949&tool_id=upload1&file_type=auto&server_dir=datatx_22858&dbkey=%3F&upload_option=upload_directory&folder_id=529fd61ab1c6cc36&cntrller=library_admin&tool_state=None&runtool_btn=Upload+to+library"
- url = self.base_url+"/library_common/upload_library_dataset"
- #print url
- #print params
- f = self.opener.open(url, params)
- if f.read().find("Data Library") == -1:
- raise "Dataset could not be uploaded to the data library"
- except:
- print traceback.format_exc()
- sys.exit(1)
+ params = urllib.urlencode(dict( cntrller='library_admin',
+ tool_id='upload1',
+ tool_state='None',
+ library_id=self.encode_id(library_id),
+ folder_id=self.encode_id(folder_id),
+ upload_option='upload_directory',
+ file_type='auto',
+ server_dir=os.path.basename(server_dir),
+ dbkey=dbkey,
+ show_dataset_id='True',
+ runtool_btn='Upload to library'))
+ #url = "http://localhost:8080/library_common/upload_library_dataset?cntrller=librar…"
+ #url = base_url+"/library_common/upload_library_dataset?library_id=adb5f5c93f827949&tool_id=upload1&file_type=auto&server_dir=datatx_22858&dbkey=%3F&upload_option=upload_directory&folder_id=529fd61ab1c6cc36&cntrller=library_admin&tool_state=None&runtool_btn=Upload+to+library"
+ url = self.base_url+"/library_common/upload_library_dataset"
+ #print url
+ #print params
+ f = self.opener.open(url, params)
+ if f.read().find("Data Library") == -1:
+ raise Exception("Dataset could not be uploaded to the data library. URL: %s, PARAMS=%s" % (url, params))
def import_to_history(self, ldda_id, library_id, folder_id):
- try:
- params = urllib.urlencode(dict( cntrller='library_admin',
- show_deleted='False',
- library_id=self.encode_id(library_id),
- folder_id=self.encode_id(folder_id),
- ldda_ids=self.encode_id(ldda_id),
- do_action='import_to_history',
- use_panels='False'))
- #url = "http://lion.bx.psu.edu:8080/library_common/act_on_multiple_datasets?library…"
- #url = base_url+"/library_common/upload_library_dataset?library_id=adb5f5c93f827949&tool_id=upload1&file_type=auto&server_dir=datatx_22858&dbkey=%3F&upload_option=upload_directory&folder_id=529fd61ab1c6cc36&cntrller=library_admin&tool_state=None&runtool_btn=Upload+to+library"
- url = self.base_url+"/library_common/act_on_multiple_datasets"
- #print url
- #print params
- f = self.opener.open(url, params)
- x = f.read()
- if x.find("1 dataset(s) have been imported into your history.") == -1:
- #print x
- raise Exception("Dataset could not be imported into history")
- except:
- print traceback.format_exc()
- sys.exit(1)
-
+ params = urllib.urlencode(dict( cntrller='library_admin',
+ show_deleted='False',
+ library_id=self.encode_id(library_id),
+ folder_id=self.encode_id(folder_id),
+ ldda_ids=self.encode_id(ldda_id),
+ do_action='import_to_history',
+ use_panels='False'))
+ #url = "http://lion.bx.psu.edu:8080/library_common/act_on_multiple_datasets?library…"
+ #url = base_url+"/library_common/upload_library_dataset?library_id=adb5f5c93f827949&tool_id=upload1&file_type=auto&server_dir=datatx_22858&dbkey=%3F&upload_option=upload_directory&folder_id=529fd61ab1c6cc36&cntrller=library_admin&tool_state=None&runtool_btn=Upload+to+library"
+ url = self.base_url+"/library_common/act_on_multiple_datasets"
+ #print url
+ #print params
+ f = self.opener.open(url, params)
+ x = f.read()
+ if x.find("1 dataset(s) have been imported into your history.") == -1:
+ #print x
+ raise Exception("Dataset could not be imported into history")
def run_workflow(self, workflow_id, hid, workflow_step):
input = str(workflow_step)+'|input'
- try:
- params = urllib.urlencode({'id':self.encode_id(workflow_id),
- 'run_workflow': 'Run workflow',
- input: hid})
- url = self.base_url+"/workflow/run"
- #print url+'?'+params
- f = self.opener.open(url, params)
+ params = urllib.urlencode({'id':self.encode_id(workflow_id),
+ 'run_workflow': 'Run workflow',
+ input: hid})
+ url = self.base_url+"/workflow/run"
+ #print url+'?'+params
+ f = self.opener.open(url, params)
# if f.read().find("1 dataset(s) have been imported into your history.") == -1:
# raise Exception("Error in running the workflow")
- except:
- print traceback.format_exc()
- sys.exit(1)
def logout(self):
@@ -122,8 +105,7 @@
f = self.opener.open(self.base_url+'/user/logout')
def encode_id(self, obj_id ):
- id_secret = 'changethisinproductiontoo'
- id_cipher = Blowfish.new( id_secret )
+ id_cipher = Blowfish.new( self.config_id_secret )
# Convert to string
s = str( obj_id )
# Pad to a multiple of 8 with leading "!"
1
0
details: http://www.bx.psu.edu/hg/galaxy/rev/e600ab3fadc1
changeset: 3677:e600ab3fadc1
user: rc
date: Wed Apr 21 11:44:19 2010 -0400
description:
fixed the amqp config bug
diffstat:
lib/galaxy/config.py | 6 +++++-
1 files changed, 5 insertions(+), 1 deletions(-)
diffs (16 lines):
diff -r afbdedd0e758 -r e600ab3fadc1 lib/galaxy/config.py
--- a/lib/galaxy/config.py Wed Apr 21 11:42:50 2010 -0400
+++ b/lib/galaxy/config.py Wed Apr 21 11:44:19 2010 -0400
@@ -125,7 +125,11 @@
self.enable_cloud_execution = string_as_bool( kwargs.get( 'enable_cloud_execution', 'False' ) )
# Galaxy messaging (AMQP) configuration options
self.amqp = {}
- for k, v in global_conf_parser.items("galaxy_amqp"):
+ try:
+ amqp_config = global_conf_parser.items("galaxy_amqp")
+ except ConfigParser.NoSectionError:
+ amqp_config = {}
+ for k, v in amqp_config:
self.amqp[k] = v
def get( self, key, default ):
return self.config_dict.get( key, default )
1
0

10 May '10
details: http://www.bx.psu.edu/hg/galaxy/rev/afbdedd0e758
changeset: 3676:afbdedd0e758
user: jeremy goecks <jeremy.goecks(a)emory.edu>
date: Wed Apr 21 11:42:50 2010 -0400
description:
Cufflinks tools update; added cuffdiff wrapper.
diffstat:
tool_conf.xml.sample | 2 +
tools/ngs_rna/cuffcompare_wrapper.xml | 14 +-
tools/ngs_rna/cuffdiff_wrapper.py | 129 ++++++++++++++++++++++++++++++++++
tools/ngs_rna/cuffdiff_wrapper.xml | 117 ++++++++++++++++++++++++++++++
tools/ngs_rna/cufflinks_wrapper.py | 2 +-
tools/ngs_rna/cufflinks_wrapper.xml | 22 +++--
6 files changed, 270 insertions(+), 16 deletions(-)
diffs (367 lines):
diff -r d6fddb034db7 -r afbdedd0e758 tool_conf.xml.sample
--- a/tool_conf.xml.sample Wed Apr 21 11:35:21 2010 -0400
+++ b/tool_conf.xml.sample Wed Apr 21 11:42:50 2010 -0400
@@ -228,6 +228,8 @@
<section name="NGS: Expression Analysis" id="ngs-rna-tools">
<tool file="ngs_rna/tophat_wrapper.xml" />
<tool file="ngs_rna/cufflinks_wrapper.xml" />
+ <tool file="ngs_rna/cuffcompare_wrapper.xml" />
+ <tool file="ngs_rna/cuffdiff_wrapper.xml" />
</section>
<section name="NGS: SAM Tools" id="samtools">
<tool file="samtools/sam_bitwise_flag_filter.xml" />
diff -r d6fddb034db7 -r afbdedd0e758 tools/ngs_rna/cuffcompare_wrapper.xml
--- a/tools/ngs_rna/cuffcompare_wrapper.xml Wed Apr 21 11:35:21 2010 -0400
+++ b/tools/ngs_rna/cuffcompare_wrapper.xml Wed Apr 21 11:42:50 2010 -0400
@@ -15,15 +15,15 @@
$input2
</command>
<inputs>
- <param format="gtf" name="input1" type="data" label="SAM file of aligned RNA-Seq reads" help=""/>
- <param format="gtf" name="input2" type="data" label="SAM file of aligned RNA-Seq reads" help=""/>
+ <param format="gtf" name="input1" type="data" label="GTF file produced by Cufflinks" help=""/>
+ <param format="gtf" name="input2" type="data" label="GTF file produced by Cufflinks" help=""/>
<conditional name="annotation">
- <param name="use_ref_annotation" type="select" label="Use Reference Annotation?">
+ <param name="use_ref_annotation" type="select" label="Use Reference Annotation">
<option value="No">No</option>
<option value="Yes">Yes</option>
</param>
<when value="Yes">
- <param format="gtf" name="reference_annotation" type="data" label="Reference Annotation" help=""/>
+ <param format="gtf" name="reference_annotation" type="data" label="Reference Annotation" help="Make sure your annotation file is in GTF format and that Galaxy knows that your file is GTF--not GFF."/>
<param name="ignore_nonoverlapping_reference" type="boolean" label="Ignore reference transcripts that are not overlapped by any transcript in input files"/>
</when>
<when value="No">
@@ -32,9 +32,9 @@
</inputs>
<outputs>
- <data format="gtf" name="transcripts_combined" />
- <data format="tracking" name="transcripts_tracking" />
- <data format="gtf" name="transcripts_accuracy" />
+ <data format="gtf" name="transcripts_combined" label="Cuffcompare on data ${input1.hid} and data ${input2.hid}: combined transcripts"/>
+ <data format="tracking" name="transcripts_tracking" label="Cuffcompare on data ${input1.hid} and data ${input2.hid}: transcript tracking"/>
+ <data format="gtf" name="transcripts_accuracy" label="Cuffcompare on data ${input1.hid} and data ${input2.hid}: transcript accuracy"/>
</outputs>
<tests>
diff -r d6fddb034db7 -r afbdedd0e758 tools/ngs_rna/cuffdiff_wrapper.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tools/ngs_rna/cuffdiff_wrapper.py Wed Apr 21 11:42:50 2010 -0400
@@ -0,0 +1,129 @@
+#!/usr/bin/env python
+
+import optparse, os, shutil, subprocess, sys, tempfile
+
+def stop_err( msg ):
+ sys.stderr.write( "%s\n" % msg )
+ sys.exit()
+
+def __main__():
+ #Parse Command Line
+ parser = optparse.OptionParser()
+
+ # Cuffdiff options.
+ parser.add_option( '-s', '--inner-dist-std-dev', dest='inner_dist_std_dev', help='The standard deviation for the distribution on inner distances between mate pairs. The default is 20bp.' )
+ parser.add_option( '-p', '--num-threads', dest='num_threads', help='Use this many threads to align reads. The default is 1.' )
+ parser.add_option( '-m', '--inner-mean-dist', dest='inner_mean_dist', help='This is the expected (mean) inner distance between mate pairs. \
+ For, example, for paired end runs with fragments selected at 300bp, \
+ where each end is 50bp, you should set -r to be 200. The default is 45bp.')
+ parser.add_option( '-Q', '--min-mapqual', dest='min_mapqual', help='Instructs Cufflinks to ignore alignments with a SAM mapping quality lower than this number. The default is 0.' )
+ parser.add_option( '-c', '--min-alignment-count', dest='min_alignment_count', help='The minimum number of alignments in a locus for needed to conduct significance testing on changes in that locus observed between samples. If no testing is performed, changes in the locus are deemed not signficant, and the locus\' observed changes don\'t contribute to correction for multiple testing. The default is 1,000 fragment alignments (up to 2,000 paired reads).' )
+ parser.add_option( '--FDR', dest='FDR', help='The allowed false discovery rate. The default is 0.05.' )
+
+ # Advanced Options:
+ parser.add_option( '--num-importance-samples', dest='num_importance_samples', help='Sets the number of importance samples generated for each locus during abundance estimation. Default: 1000' )
+ parser.add_option( '--max-mle-iterations', dest='max_mle_iterations', help='Sets the number of iterations allowed during maximum likelihood estimation of abundances. Default: 5000' )
+
+ # Wrapper / Galaxy options.
+ parser.add_option( '-A', '--inputA', dest='inputA', help='A transcript GTF file produced by cufflinks, cuffcompare, or other source.')
+ parser.add_option( '-1', '--input1', dest='input1', help='File of RNA-Seq read alignments in the SAM format. SAM is a standard short read alignment, that allows aligners to attach custom tags to individual alignments, and Cufflinks requires that the alignments you supply have some of these tags. Please see Input formats for more details.' )
+ parser.add_option( '-2', '--input2', dest='input2', help='File of RNA-Seq read alignments in the SAM format. SAM is a standard short read alignment, that allows aligners to attach custom tags to individual alignments, and Cufflinks requires that the alignments you supply have some of these tags. Please see Input formats for more details.' )
+
+ parser.add_option( "--isoforms_fpkm_tracking_output", dest="isoforms_fpkm_tracking_output" )
+ parser.add_option( "--genes_fpkm_tracking_output", dest="genes_fpkm_tracking_output" )
+ parser.add_option( "--cds_fpkm_tracking_output", dest="cds_fpkm_tracking_output" )
+ parser.add_option( "--tss_groups_fpkm_tracking_output", dest="tss_groups_fpkm_tracking_output" )
+ parser.add_option( "--isoforms_exp_output", dest="isoforms_exp_output" )
+ parser.add_option( "--genes_exp_output", dest="genes_exp_output" )
+ parser.add_option( "--tss_groups_exp_output", dest="tss_groups_exp_output" )
+ parser.add_option( "--cds_exp_fpkm_tracking_output", dest="cds_exp_fpkm_tracking_output" )
+ parser.add_option( "--splicing_diff_output", dest="splicing_diff_output" )
+ parser.add_option( "--cds_diff_output", dest="cds_diff_output" )
+ parser.add_option( "--promoters_diff_output", dest="promoters_diff_output" )
+
+ (options, args) = parser.parse_args()
+
+ # Make temp directory for output.
+ tmp_output_dir = tempfile.mkdtemp()
+
+ # Build command.
+
+ # Base.
+ cmd = "cuffdiff"
+
+ # Add options.
+ if options.inner_dist_std_dev:
+ cmd += ( " -s %i" % int ( options.inner_dist_std_dev ) )
+ if options.num_threads:
+ cmd += ( " -p %i" % int ( options.num_threads ) )
+ if options.inner_mean_dist:
+ cmd += ( " -m %i" % int ( options.inner_mean_dist ) )
+ if options.min_mapqual:
+ cmd += ( " -Q %i" % int ( options.min_mapqual ) )
+ if options.min_alignment_count:
+ cmd += ( " -c %i" % int ( options.min_alignment_count ) )
+ if options.FDR:
+ cmd += ( " --FDR %f" % float( options.FDR ) )
+ if options.num_importance_samples:
+ cmd += ( " --num-importance-samples %i" % int ( options.num_importance_samples ) )
+ if options.max_mle_iterations:
+ cmd += ( " --max-mle-iterations %i" % int ( options.max_mle_iterations ) )
+
+ # Add inputs.
+ cmd += " " + options.inputA + " " + options.input1 + " " + options.input2
+ print cmd
+
+ # Run command.
+ try:
+ tmp_name = tempfile.NamedTemporaryFile( dir=tmp_output_dir ).name
+ tmp_stderr = open( tmp_name, 'wb' )
+ proc = subprocess.Popen( args=cmd, shell=True, cwd=tmp_output_dir, stderr=tmp_stderr.fileno() )
+ returncode = proc.wait()
+ tmp_stderr.close()
+
+ # Get stderr, allowing for case where it's very large.
+ tmp_stderr = open( tmp_name, 'rb' )
+ stderr = ''
+ buffsize = 1048576
+ try:
+ while True:
+ stderr += tmp_stderr.read( buffsize )
+ if not stderr or len( stderr ) % buffsize != 0:
+ break
+ except OverflowError:
+ pass
+ tmp_stderr.close()
+
+ # Error checking.
+ if returncode != 0:
+ raise Exception, stderr
+
+ # check that there are results in the output file
+ if len( open( tmp_output_dir + "/isoforms.fpkm_tracking", 'rb' ).read().strip() ) == 0:
+ raise Exception, 'The main output file is empty, there may be an error with your input file or settings.'
+ except Exception, e:
+ stop_err( 'Error running cuffdiff. ' + str( e ) )
+
+
+ # Copy output files from tmp directory to specified files.
+ try:
+ try:
+ shutil.copyfile( tmp_output_dir + "/isoforms.fpkm_tracking", options.isoforms_fpkm_tracking_output )
+ shutil.copyfile( tmp_output_dir + "/genes.fpkm_tracking", options.genes_fpkm_tracking_output )
+ shutil.copyfile( tmp_output_dir + "/cds.fpkm_tracking", options.cds_fpkm_tracking_output )
+ shutil.copyfile( tmp_output_dir + "/tss_groups.fpkm_tracking", options.tss_groups_fpkm_tracking_output )
+ shutil.copyfile( tmp_output_dir + "/0_1_isoform_exp.diff", options.isoforms_exp_output )
+ shutil.copyfile( tmp_output_dir + "/0_1_gene_exp.diff", options.genes_exp_output )
+ shutil.copyfile( tmp_output_dir + "/0_1_tss_group_exp.diff", options.tss_groups_exp_output )
+ shutil.copyfile( tmp_output_dir + "/0_1_splicing.diff", options.splicing_diff_output )
+ shutil.copyfile( tmp_output_dir + "/0_1_cds.diff", options.cds_diff_output )
+ shutil.copyfile( tmp_output_dir + "/0_1_cds_exp.diff", options.cds_diff_output )
+ shutil.copyfile( tmp_output_dir + "/0_1_promoters.diff", options.promoters_diff_output )
+ except Exception, e:
+ stop_err( 'Error in cuffdiff:\n' + str( e ) )
+ finally:
+ # Clean up temp dirs
+ if os.path.exists( tmp_output_dir ):
+ shutil.rmtree( tmp_output_dir )
+
+if __name__=="__main__": __main__()
\ No newline at end of file
diff -r d6fddb034db7 -r afbdedd0e758 tools/ngs_rna/cuffdiff_wrapper.xml
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tools/ngs_rna/cuffdiff_wrapper.xml Wed Apr 21 11:42:50 2010 -0400
@@ -0,0 +1,117 @@
+<tool id="cuffdiff" name="Cuffdiff" version="0.8.2">
+ <description>find significant changes in transcript expression, splicing, and promoter use</description>
+ <command interpreter="python">
+ cuffdiff_wrapper.py
+ --FDR=$fdr
+ --num-threads="4"
+ --min-mapqual=$min_mapqual
+ --min-alignment-count=$min_alignment_count
+
+ --isoforms_fpkm_tracking_output=$isoforms_fpkm_tracking
+ --genes_fpkm_tracking_output=$genes_fpkm_tracking
+ --cds_fpkm_tracking_output=$cds_fpkm_tracking
+ --tss_groups_fpkm_tracking_output=$tss_groups_fpkm_tracking
+ --isoforms_exp_output=$isoforms_exp
+ --genes_exp_output=$genes_exp
+ --tss_groups_exp_output=$tss_groups_exp
+ --cds_exp_fpkm_tracking_output=$cds_exp_fpkm_tracking
+ --splicing_diff_output=$splicing_diff
+ --cds_diff_output=$cds_diff
+ --promoters_diff_output=$promoters_diff
+
+ --inputA=$gtf_input
+ --input1=$aligned_reads1
+ --input2=$aligned_reads2
+ </command>
+ <inputs>
+ <param format="gtf" name="gtf_input" type="data" label="Transcripts" help="A transcript GTF file produced by cufflinks, cuffcompare, or other source."/>
+ <param format="sam" name="aligned_reads1" type="data" label="SAM file of aligned RNA-Seq reads" help=""/>
+ <param format="sam" name="aligned_reads2" type="data" label="SAM file of aligned RNA-Seq reads" help=""/>
+ <param name="fdr" type="float" value="0.05" label="False Discovery Rate" help="The allowed false discovery rate."/>
+ <param name="min_mapqual" type="integer" value="0" label="Min SAM Mapping Quality" help="Instructs Cufflinks to ignore alignments with a SAM mapping quality lower than this number."/>
+ <param name="min_alignment_count" type="integer" value="0" label="Min Alignment Count" help="The minimum number of alignments in a locus for needed to conduct significance testing on changes in that locus observed between samples."/>
+ <conditional name="singlePaired">
+ <param name="sPaired" type="select" label="Is this library mate-paired?">
+ <option value="single">Single-end</option>
+ <option value="paired">Paired-end</option>
+ </param>
+ <when value="single"></when>
+ <when value="paired">
+ <param name="mean_inner_distance" type="integer" value="20" label="Mean Inner Distance between Mate Pairs"/>
+ <param name="inner_distance_std_dev" type="integer" value="20" label="Standard Deviation for Inner Distance between Mate Pairs"/>
+ </when>
+ </conditional>
+ </inputs>
+
+ <outputs>
+ <data format="tabular" name="isoforms_exp" label="Cuffdiff on data ${gtf_input.hid}, data ${aligned_reads1.hid}, and data ${aligned_reads2.hid}: isoform expression"/>
+ <data format="tabular" name="genes_exp" label="Cuffdiff on data ${gtf_input.hid}, data ${aligned_reads1.hid}, and data ${aligned_reads2.hid}: gene expression"/>
+ <data format="tabular" name="tss_groups_exp" label="Cuffdiff on data ${gtf_input.hid}, data ${aligned_reads1.hid}, and data ${aligned_reads2.hid}: TSS groups expression"/>
+ <data format="tabular" name="cds_exp_fpkm_tracking" label="Cuffdiff on data ${gtf_input.hid}, data ${aligned_reads1.hid}, and data ${aligned_reads2.hid}: CDS Expression FPKM Tracking"/>
+ <data format="tabular" name="splicing_diff" label="Cuffdiff on data ${gtf_input.hid}, data ${aligned_reads1.hid}, and data ${aligned_reads2.hid}: splicing diff"/>
+ <data format="tabular" name="cds_diff" label="Cuffdiff on data ${gtf_input.hid}, data ${aligned_reads1.hid}, and data ${aligned_reads2.hid}: CDS diff"/>
+ <data format="tabular" name="promoters_diff" label="Cuffdiff on data ${gtf_input.hid}, data ${aligned_reads1.hid}, and data ${aligned_reads2.hid}: promoters diff"/>
+ <data format="tabular" name="tss_groups_fpkm_tracking" label="Cuffdiff on data ${gtf_input.hid}, data ${aligned_reads1.hid}, and data ${aligned_reads2.hid}: TSS groups FPKM tracking" />
+ <data format="tabular" name="cds_fpkm_tracking" label="Cuffdiff on data ${gtf_input.hid}, data ${aligned_reads1.hid}, and data ${aligned_reads2.hid}: CDS FPKM tracking"/>
+ <data format="tabular" name="genes_fpkm_tracking" label="Cuffdiff on data ${gtf_input.hid}, data ${aligned_reads1.hid}, and data ${aligned_reads2.hid}: gene FPKM tracking"/>
+ <data format="tabular" name="isoforms_fpkm_tracking" label="Cuffdiff on data ${gtf_input.hid}, data ${aligned_reads1.hid}, and data ${aligned_reads2.hid}: isoform FPKM tracking"/>
+ </outputs>
+
+ <tests>
+ <test>
+ </test>
+ </tests>
+
+ <help>
+**Cuffdiff Overview**
+
+Cuffdiff is part of Cufflinks_. Cuffdiff find significant changes in transcript expression, splicing, and promoter use. Please cite: Trapnell C, Williams BA, Pertea G, Mortazavi AM, Kwan G, van Baren MJ, Salzberg SL, Wold B, Pachter L. Transcript assembly and abundance estimation from RNA-Seq reveals thousands of new transcripts and switching among isoforms. (manuscript in press)
+
+.. _Cufflinks: http://cufflinks.cbcb.umd.edu/
+
+------
+
+**Know what you are doing**
+
+.. class:: warningmark
+
+There is no such thing (yet) as an automated gearshift in expression analysis. It is all like stick-shift driving in San Francisco. In other words, running this tool with default parameters will probably not give you meaningful results. A way to deal with this is to **understand** the parameters by carefully reading the `documentation`__ and experimenting. Fortunately, Galaxy makes experimenting easy.
+
+.. __: http://cufflinks.cbcb.umd.edu/manual.html#cuffdiff
+
+------
+
+**Input format**
+
+Cuffcompare takes Cufflinks or Cuffcompare GTF files as input along with two SAM files containing the fragment alignments for two or more samples.
+
+.. ___: http://www.todo.org
+
+------
+
+**Outputs**
+
+TODO
+
+-------
+
+**Settings**
+
+All of the options have a default value. You can change any of them. Most of the options in Cuffdiff have been implemented here.
+
+------
+
+**Cuffdiff parameter list**
+
+This is a list of implemented Cuffdiff options::
+
+ -m INT This is the expected (mean) inner distance between mate pairs. For, example, for paired end runs with fragments selected at 300bp, where each end is 50bp, you should set -r to be 200. The default is 45bp.
+ -s INT The standard deviation for the distribution on inner distances between mate pairs. The default is 20bp.
+ -Q Instructs Cufflinks to ignore alignments with a SAM mapping quality lower than this number. The default is 0.
+ -c INT The minimum number of alignments in a locus for needed to conduct significance testing on changes in that locus observed between samples. If no testing is performed, changes in the locus are deemed not signficant, and the locus' observed changes don't contribute to correction for multiple testing. The default is 1,000 fragment alignments (up to 2,000 paired reads).
+ --FDR FLOAT The allowed false discovery rate. The default is 0.05.
+ --num-importance-samples INT Sets the number of importance samples generated for each locus during abundance estimation. Default: 1000
+ --max-mle-iterations INT Sets the number of iterations allowed during maximum likelihood estimation of abundances. Default: 5000
+
+ </help>
+</tool>
diff -r d6fddb034db7 -r afbdedd0e758 tools/ngs_rna/cufflinks_wrapper.py
--- a/tools/ngs_rna/cufflinks_wrapper.py Wed Apr 21 11:35:21 2010 -0400
+++ b/tools/ngs_rna/cufflinks_wrapper.py Wed Apr 21 11:42:50 2010 -0400
@@ -56,7 +56,7 @@
if options.min_mapqual:
cmd += ( " -Q %i" % int ( options.min_mapqual ) )
if options.GTF:
- cmd += ( " -G %i" % options.GTF )
+ cmd += ( " -G %s" % options.GTF )
if options.num_importance_samples:
cmd += ( " --num-importance-samples %i" % int ( options.num_importance_samples ) )
if options.max_mle_iterations:
diff -r d6fddb034db7 -r afbdedd0e758 tools/ngs_rna/cufflinks_wrapper.xml
--- a/tools/ngs_rna/cufflinks_wrapper.xml Wed Apr 21 11:35:21 2010 -0400
+++ b/tools/ngs_rna/cufflinks_wrapper.xml Wed Apr 21 11:42:50 2010 -0400
@@ -1,5 +1,5 @@
<tool id="cufflinks" name="Cufflinks" version="0.8.2">
- <description>transcript assembly, differential expression, and differential regulation for RNA-Seq</description>
+ <description>transcript assembly and FPKM (RPKM) estimates for RNA-Seq data</description>
<command interpreter="python">
cufflinks_wrapper.py
--input=$input
@@ -32,7 +32,7 @@
</param>
<when value="No"></when>
<when value="Yes">
- <param format="gtf" name="reference_annotation_file" type="data" label="Reference Annotation" help=""/>
+ <param format="gtf" name="reference_annotation_file" type="data" label="Reference Annotation" help="Make sure your annotation file is in GTF format and that Galaxy knows that your file is GTF--not GFF."/>
</when>
</conditional>
<conditional name="singlePaired">
@@ -50,9 +50,9 @@
</inputs>
<outputs>
- <data format="expr" name="genes_expression" />
- <data format="expr" name="transcripts_expression" />
- <data format="gtf" name="assembled_isoforms" />
+ <data format="expr" name="genes_expression" label="Cufflinks on data ${input.hid}: gene expression"/>
+ <data format="expr" name="transcripts_expression" label="Cufflinks on data ${input.hid}: transcript expression"/>
+ <data format="gtf" name="assembled_isoforms" label="Cufflinks on data ${input.hid}: assembled transcripts"/>
</outputs>
<tests>
@@ -60,10 +60,16 @@
<param name="sPaired" value="single"/>
<param name="input" value="cufflinks_in.sam"/>
<param name="mean_inner_distance" value="20"/>
+ <param name="max_intron_len" value="300000"/>
+ <param name="min_isoform_fraction" value="0.05"/>
+ <param name="pre_mrna_fraction" value="0.05"/>
+ <param name="min_map_quality" value="0"/>
+ <param name="use_ref" value="No"/>
<output name="assembled_isoforms" file="cufflinks_out1.gtf"/>
- <!-- Can't test these right now because .expr files aren't recognized.
- <output name="genes_expression" file="cufflinks_out3.expr"/>
- <output name="transcripts_expression" file="cufflinks_out2.expr"/>
+ <!--
+ Can't test these right now b/c .expr files aren't recognized. Need to add them?
+ <output name="genes_expression" format="tabular" file="cufflinks_out3.expr"/>
+ <output name="transcripts_expression" format="tabular" file="cufflinks_out2.expr"/>
-->
</test>
</tests>
1
0

10 May '10
details: http://www.bx.psu.edu/hg/galaxy/rev/d6fddb034db7
changeset: 3675:d6fddb034db7
user: Greg Von Kuster <greg(a)bx.psu.edu>
date: Wed Apr 21 11:35:21 2010 -0400
description:
Enable the the grid helper and base grid templates to be used across webapps. Decouple the base controller from the model, controllers that subclass from the base contrller must now import a model. Eliminate the base controller from the community now that the base contrller can be used across webapps. Add a new admin controller grid to the community space. Move the Admin controller to ~/base/controller.py and subclass the 2 admin controller grids ( galaxy and community ) from it. Add the group components to the community.
diffstat:
community_wsgi.ini.sample | 3 +
lib/galaxy/web/base/controller.py | 1195 +++++++++-
lib/galaxy/web/controllers/admin.py | 900 +-------
lib/galaxy/web/controllers/dataset.py | 6 +-
lib/galaxy/web/controllers/forms.py | 2 +-
lib/galaxy/web/controllers/history.py | 10 +-
lib/galaxy/web/controllers/library_admin.py | 2 +-
lib/galaxy/web/controllers/page.py | 15 +-
lib/galaxy/web/controllers/requests.py | 2 +-
lib/galaxy/web/controllers/requests_admin.py | 2 +-
lib/galaxy/web/controllers/root.py | 6 +-
lib/galaxy/web/controllers/tag.py | 2 +-
lib/galaxy/web/controllers/tracks.py | 1 +
lib/galaxy/web/controllers/user.py | 5 +-
lib/galaxy/web/controllers/visualization.py | 3 +-
lib/galaxy/web/controllers/workflow.py | 15 +-
lib/galaxy/web/framework/helpers/grids.py | 21 +-
lib/galaxy/webapps/community/base/controller.py | 24 -
lib/galaxy/webapps/community/buildapp.py | 6 +-
lib/galaxy/webapps/community/config.py | 7 +-
lib/galaxy/webapps/community/controllers/admin.py | 285 ++
lib/galaxy/webapps/community/controllers/tool_browser.py | 2 +-
lib/galaxy/webapps/community/model/__init__.py | 23 +-
lib/galaxy/webapps/community/model/mapping.py | 48 +-
lib/galaxy/webapps/community/model/migrate/versions/0001_initial_tables.py | 29 +-
lib/galaxy/webapps/community/security/__init__.py | 50 +-
templates/admin/dataset_security/group/group.mako | 1 +
templates/admin/dataset_security/group/group_create.mako | 1 +
templates/admin/dataset_security/group/group_rename.mako | 1 +
templates/admin/dataset_security/role/role.mako | 1 +
templates/admin/dataset_security/role/role_create.mako | 1 +
templates/admin/dataset_security/role/role_rename.mako | 1 +
templates/admin/index.mako | 145 -
templates/admin/user/reset_password.mako | 1 +
templates/admin/user/user.mako | 1 +
templates/grid_base.mako | 26 +-
templates/user/info.mako | 4 +-
templates/webapps/community/admin/index.mako | 93 +
templates/webapps/community/base_panels.mako | 2 +-
templates/webapps/galaxy/admin/index.mako | 139 +
test/base/twilltestcase.py | 12 +-
41 files changed, 1930 insertions(+), 1163 deletions(-)
diffs (truncated from 4111 to 3000 lines):
diff -r 076f572d7c9d -r d6fddb034db7 community_wsgi.ini.sample
--- a/community_wsgi.ini.sample Wed Apr 21 10:41:30 2010 -0400
+++ b/community_wsgi.ini.sample Wed Apr 21 11:35:21 2010 -0400
@@ -46,6 +46,9 @@
# NEVER enable this on a public site (even test or QA)
# use_interactive = true
+# this should be a comma-separated list of valid Galaxy users
+#admin_users = user1@example.org,user2@example.org
+
# Force everyone to log in (disable anonymous access)
require_login = False
diff -r 076f572d7c9d -r d6fddb034db7 lib/galaxy/web/base/controller.py
--- a/lib/galaxy/web/base/controller.py Wed Apr 21 10:41:30 2010 -0400
+++ b/lib/galaxy/web/base/controller.py Wed Apr 21 11:35:21 2010 -0400
@@ -1,11 +1,9 @@
"""
Contains functionality needed in every web interface
"""
-
-import os, time, logging, re
-
-# Pieces of Galaxy to make global in every controller
-from galaxy import config, tools, web, model, util
+import os, time, logging, re, string, sys
+from datetime import datetime, timedelta
+from galaxy import config, tools, web, util
from galaxy.web import error, form, url_for
from galaxy.model.orm import *
from galaxy.workflow.modules import *
@@ -24,27 +22,28 @@
"""
Base class for Galaxy web application controllers.
"""
-
def __init__( self, app ):
"""Initialize an interface for application 'app'"""
self.app = app
-
def get_toolbox(self):
"""Returns the application toolbox"""
return self.app.toolbox
-
- def get_class( self, class_name ):
+ def get_class( self, trans, class_name ):
""" Returns the class object that a string denotes. Without this method, we'd have to do eval(<class_name>). """
if class_name == 'History':
- item_class = model.History
+ item_class = trans.model.History
elif class_name == 'HistoryDatasetAssociation':
- item_class = model.HistoryDatasetAssociation
+ item_class = trans.model.HistoryDatasetAssociation
elif class_name == 'Page':
- item_class = model.Page
+ item_class = trans.model.Page
elif class_name == 'StoredWorkflow':
- item_class = model.StoredWorkflow
+ item_class = trans.model.StoredWorkflow
elif class_name == 'Visualization':
- item_class = model.Visualization
+ item_class = trans.model.Visualization
+ elif class_name == 'Tool':
+ # TODO: Nate, this one should be changed to whatever you end up calling
+ # the pointer to the tool archive.
+ item_class = trans.model.Tool
else:
item_class = None
return item_class
@@ -53,62 +52,56 @@
class UsesAnnotations:
""" Mixin for getting and setting item annotations. """
- def get_item_annotation_str( self, db_session, user, item ):
+ def get_item_annotation_str( self, trans, user, item ):
""" Returns a user's annotation string for an item. """
- annotation_obj = self.get_item_annotation_obj( db_session, user, item )
+ annotation_obj = self.get_item_annotation_obj( trans, user, item )
if annotation_obj:
return annotation_obj.annotation
return None
-
- def get_item_annotation_obj( self, db_session, user, item ):
+ def get_item_annotation_obj( self, trans, user, item ):
""" Returns a user's annotation object for an item. """
# Get annotation association.
try:
- annotation_assoc_class = eval( "model.%sAnnotationAssociation" % item.__class__.__name__ )
+ annotation_assoc_class = eval( "trans.model.%sAnnotationAssociation" % item.__class__.__name__ )
except:
# Item doesn't have an annotation association class and cannot be annotated.
return False
-
# Get annotation association object.
- annotation_assoc = db_session.query( annotation_assoc_class ).filter_by( user=user )
- if item.__class__ == model.History:
+ annotation_assoc = trans.sa_session.query( annotation_assoc_class ).filter_by( user=user )
+ if item.__class__ == trans.model.History:
annotation_assoc = annotation_assoc.filter_by( history=item )
- elif item.__class__ == model.HistoryDatasetAssociation:
+ elif item.__class__ == trans.model.HistoryDatasetAssociation:
annotation_assoc = annotation_assoc.filter_by( hda=item )
- elif item.__class__ == model.StoredWorkflow:
+ elif item.__class__ == trans.model.StoredWorkflow:
annotation_assoc = annotation_assoc.filter_by( stored_workflow=item )
- elif item.__class__ == model.WorkflowStep:
+ elif item.__class__ == trans.model.WorkflowStep:
annotation_assoc = annotation_assoc.filter_by( workflow_step=item )
- elif item.__class__ == model.Page:
+ elif item.__class__ == trans.model.Page:
annotation_assoc = annotation_assoc.filter_by( page=item )
- elif item.__class__ == model.Visualization:
+ elif item.__class__ == trans.model.Visualization:
annotation_assoc = annotation_assoc.filter_by( visualization=item )
return annotation_assoc.first()
-
def add_item_annotation( self, trans, item, annotation ):
""" Add or update an item's annotation; a user can only have a single annotation for an item. """
-
# Get/create annotation association object.
- annotation_assoc = self.get_item_annotation_obj( trans.sa_session, trans.get_user(), item )
+ annotation_assoc = self.get_item_annotation_obj( trans, trans.user, item )
if not annotation_assoc:
# Create association.
# TODO: we could replace this eval() with a long if/else stmt, but this is more general without sacrificing
try:
- annotation_assoc_class = eval( "model.%sAnnotationAssociation" % item.__class__.__name__ )
+ annotation_assoc_class = eval( "trans.model.%sAnnotationAssociation" % item.__class__.__name__ )
except:
# Item doesn't have an annotation association class and cannot be annotated.
return False
annotation_assoc = annotation_assoc_class()
item.annotations.append( annotation_assoc )
annotation_assoc.user = trans.get_user()
-
# Set annotation.
annotation_assoc.annotation = annotation
return True
class SharableItemSecurity:
""" Mixin for handling security for sharable items. """
-
def security_check( self, user, item, check_ownership=False, check_accessible=False ):
""" Security checks for an item: checks if (a) user owns item or (b) item is accessible to user. """
if check_ownership:
@@ -125,7 +118,6 @@
class UsesHistoryDatasetAssociation:
""" Mixin for controllers that use HistoryDatasetAssociation objects. """
-
def get_dataset( self, trans, dataset_id, check_ownership=True, check_accessible=False ):
""" Get an HDA object by id. """
# DEPRECATION: We still support unencoded ids for backward compatibility
@@ -133,7 +125,7 @@
dataset_id = int( dataset_id )
except ValueError:
dataset_id = trans.security.decode_id( dataset_id )
- data = trans.sa_session.query( model.HistoryDatasetAssociation ).get( dataset_id )
+ data = trans.sa_session.query( trans.model.HistoryDatasetAssociation ).get( dataset_id )
if not data:
raise paste.httpexceptions.HTTPRequestRangeNotSatisfiable( "Invalid dataset id: %s." % str( dataset_id ) )
if check_ownership:
@@ -151,7 +143,6 @@
else:
error( "You are not allowed to access this dataset" )
return data
-
def get_data( self, dataset, preview=True ):
""" Gets a dataset's data. """
# Get data from file, truncating if necessary.
@@ -169,12 +160,11 @@
class UsesVisualization( SharableItemSecurity ):
""" Mixin for controllers that use Visualization objects. """
-
def get_visualization( self, trans, id, check_ownership=True, check_accessible=False ):
""" Get a Visualization from the database by id, verifying ownership. """
# Load workflow from database
id = trans.security.decode_id( id )
- visualization = trans.sa_session.query( model.Visualization ).get( id )
+ visualization = trans.sa_session.query( trans.model.Visualization ).get( id )
if not visualization:
error( "Visualization not found" )
else:
@@ -182,17 +172,15 @@
class UsesStoredWorkflow( SharableItemSecurity ):
""" Mixin for controllers that use StoredWorkflow objects. """
-
def get_stored_workflow( self, trans, id, check_ownership=True, check_accessible=False ):
""" Get a StoredWorkflow from the database by id, verifying ownership. """
# Load workflow from database
id = trans.security.decode_id( id )
- stored = trans.sa_session.query( model.StoredWorkflow ).get( id )
+ stored = trans.sa_session.query( trans.model.StoredWorkflow ).get( id )
if not stored:
error( "Workflow not found" )
else:
return self.security_check( trans.get_user(), stored, check_ownership, check_accessible )
-
def get_stored_workflow_steps( self, trans, stored_workflow ):
""" Restores states for a stored workflow's steps. """
for step in stored_workflow.latest_workflow.steps:
@@ -217,32 +205,29 @@
class UsesHistory( SharableItemSecurity ):
""" Mixin for controllers that use History objects. """
-
def get_history( self, trans, id, check_ownership=True, check_accessible=False ):
"""Get a History from the database by id, verifying ownership."""
# Load history from database
id = trans.security.decode_id( id )
- history = trans.sa_session.query( model.History ).get( id )
+ history = trans.sa_session.query( trans.model.History ).get( id )
if not history:
err+msg( "History not found" )
else:
return self.security_check( trans.get_user(), history, check_ownership, check_accessible )
-
def get_history_datasets( self, trans, history, show_deleted=False ):
""" Returns history's datasets. """
- query = trans.sa_session.query( model.HistoryDatasetAssociation ) \
- .filter( model.HistoryDatasetAssociation.history == history ) \
+ query = trans.sa_session.query( trans.model.HistoryDatasetAssociation ) \
+ .filter( trans.model.HistoryDatasetAssociation.history == history ) \
.options( eagerload( "children" ) ) \
- .join( "dataset" ).filter( model.Dataset.purged == False ) \
+ .join( "dataset" ).filter( trans.model.Dataset.purged == False ) \
.options( eagerload_all( "dataset.actions" ) ) \
- .order_by( model.HistoryDatasetAssociation.hid )
+ .order_by( trans.model.HistoryDatasetAssociation.hid )
if not show_deleted:
- query = query.filter( model.HistoryDatasetAssociation.deleted == False )
+ query = query.filter( trans.model.HistoryDatasetAssociation.deleted == False )
return query.all()
class Sharable:
""" Mixin for a controller that manages an item that can be shared. """
-
# Implemented methods.
@web.expose
@web.require_login( "share Galaxy items" )
@@ -251,52 +236,42 @@
trans.get_user().username = username
trans.sa_session.flush
return self.sharing( trans, id, **kwargs )
-
# Abstract methods.
-
@web.expose
@web.require_login( "modify Galaxy items" )
def set_slug_async( self, trans, id, new_slug ):
""" Set item slug asynchronously. """
- pass
-
+ pass
@web.expose
@web.require_login( "share Galaxy items" )
def sharing( self, trans, id, **kwargs ):
""" Handle item sharing. """
pass
-
@web.expose
@web.require_login( "share Galaxy items" )
def share( self, trans, id=None, email="", **kwd ):
""" Handle sharing an item with a particular user. """
pass
-
@web.expose
def display_by_username_and_slug( self, trans, username, slug ):
""" Display item by username and slug. """
pass
-
@web.expose
@web.json
@web.require_login( "get item name and link" )
def get_name_and_link_async( self, trans, id=None ):
""" Returns item's name and link. """
pass
-
@web.expose
@web.require_login("get item content asynchronously")
def get_item_content_async( self, trans, id ):
""" Returns item content in HTML format. """
pass
-
# Helper methods.
-
def _make_item_accessible( self, sa_session, item ):
""" Makes item accessible--viewable and importable--and sets item's slug. Does not flush/commit changes, however. Item must have name, user, importable, and slug attributes. """
item.importable = True
self.create_item_slug( sa_session, item )
-
def create_item_slug( self, sa_session, item ):
""" Create item slug. Slug is unique among user's importable items for item's class. Returns true if item's slug was set; false otherwise. """
if item.slug is None or item.slug == "":
@@ -312,7 +287,6 @@
# Remove trailing '-'.
if slug_base.endswith('-'):
slug_base = slug_base[:-1]
-
# Make sure that slug is not taken; if it is, add a number to it.
slug = slug_base
count = 1
@@ -322,12 +296,1103 @@
count += 1
item.slug = slug
return True
-
return False
"""
Deprecated: `BaseController` used to be available under the name `Root`
"""
+class ControllerUnavailable( Exception ):
+ pass
-class ControllerUnavailable( Exception ):
- pass
\ No newline at end of file
+class Admin():
+ # Override these
+ user_list_grid = None
+ role_list_grid = None
+ group_list_grid = None
+
+ @web.expose
+ @web.require_admin
+ def index( self, trans, **kwd ):
+ webapp = kwd.get( 'webapp', 'galaxy' )
+ params = util.Params( kwd )
+ message = util.restore_text( params.get( 'message', '' ) )
+ status = params.get( 'status', 'done' )
+ if webapp == 'galaxy':
+ return trans.fill_template( '/webapps/galaxy/admin/index.mako',
+ webapp=webapp,
+ message=message,
+ status=status )
+ else:
+ return trans.fill_template( '/webapps/community/admin/index.mako',
+ webapp=webapp,
+ message=message,
+ status=status )
+ @web.expose
+ @web.require_admin
+ def center( self, trans, **kwd ):
+ return trans.fill_template( '/admin/center.mako' )
+ @web.expose
+ @web.require_admin
+ def reload_tool( self, trans, **kwd ):
+ params = util.Params( kwd )
+ message = util.restore_text( params.get( 'message', '' ) )
+ status = params.get( 'status', 'done' )
+ return trans.fill_template( '/admin/reload_tool.mako',
+ toolbox=self.app.toolbox,
+ message=message,
+ status=status )
+ @web.expose
+ @web.require_admin
+ def tool_reload( self, trans, tool_version=None, **kwd ):
+ params = util.Params( kwd )
+ tool_id = params.tool_id
+ self.app.toolbox.reload( tool_id )
+ message = 'Reloaded tool: ' + tool_id
+ return trans.fill_template( '/admin/reload_tool.mako',
+ toolbox=self.app.toolbox,
+ message=message,
+ status='done' )
+
+ # Galaxy Role Stuff
+ @web.expose
+ @web.require_admin
+ def roles( self, trans, **kwargs ):
+ if 'operation' in kwargs:
+ operation = kwargs['operation'].lower()
+ if operation == "roles":
+ return self.role( trans, **kwargs )
+ if operation == "create":
+ return self.create_role( trans, **kwargs )
+ if operation == "delete":
+ return self.mark_role_deleted( trans, **kwargs )
+ if operation == "undelete":
+ return self.undelete_role( trans, **kwargs )
+ if operation == "purge":
+ return self.purge_role( trans, **kwargs )
+ if operation == "manage users and groups":
+ return self.manage_users_and_groups_for_role( trans, **kwargs )
+ if operation == "rename":
+ return self.rename_role( trans, **kwargs )
+ # Render the list view
+ return self.role_list_grid( trans, **kwargs )
+ @web.expose
+ @web.require_admin
+ def create_role( self, trans, **kwd ):
+ params = util.Params( kwd )
+ webapp = params.get( 'webapp', 'galaxy' )
+ message = util.restore_text( params.get( 'message', '' ) )
+ status = params.get( 'status', 'done' )
+ if params.get( 'create_role_button', False ):
+ name = util.restore_text( params.name )
+ description = util.restore_text( params.description )
+ in_users = util.listify( params.get( 'in_users', [] ) )
+ in_groups = util.listify( params.get( 'in_groups', [] ) )
+ create_group_for_role = params.get( 'create_group_for_role', 'no' )
+ if not name or not description:
+ message = "Enter a valid name and a description"
+ elif trans.sa_session.query( trans.app.model.Role ).filter( trans.app.model.Role.table.c.name==name ).first():
+ message = "A role with that name already exists"
+ else:
+ # Create the role
+ role = trans.app.model.Role( name=name, description=description, type=trans.app.model.Role.types.ADMIN )
+ trans.sa_session.add( role )
+ # Create the UserRoleAssociations
+ for user in [ trans.sa_session.query( trans.app.model.User ).get( x ) for x in in_users ]:
+ ura = trans.app.model.UserRoleAssociation( user, role )
+ trans.sa_session.add( ura )
+ # Create the GroupRoleAssociations
+ for group in [ trans.sa_session.query( trans.app.model.Group ).get( x ) for x in in_groups ]:
+ gra = trans.app.model.GroupRoleAssociation( group, role )
+ trans.sa_session.add( gra )
+ if create_group_for_role == 'yes':
+ # Create the group
+ group = trans.app.model.Group( name=name )
+ trans.sa_session.add( group )
+ message = "Group '%s' has been created, and role '%s' has been created with %d associated users and %d associated groups" % \
+ ( group.name, role.name, len( in_users ), len( in_groups ) )
+ else:
+ message = "Role '%s' has been created with %d associated users and %d associated groups" % ( role.name, len( in_users ), len( in_groups ) )
+ trans.sa_session.flush()
+ trans.response.send_redirect( web.url_for( controller='admin',
+ action='roles',
+ webapp=webapp,
+ message=util.sanitize_text( message ),
+ status='done' ) )
+ trans.response.send_redirect( web.url_for( controller='admin',
+ action='create_role',
+ webapp=webapp,
+ message=util.sanitize_text( message ),
+ status='error' ) )
+ out_users = []
+ for user in trans.sa_session.query( trans.app.model.User ) \
+ .filter( trans.app.model.User.table.c.deleted==False ) \
+ .order_by( trans.app.model.User.table.c.email ):
+ out_users.append( ( user.id, user.email ) )
+ out_groups = []
+ for group in trans.sa_session.query( trans.app.model.Group ) \
+ .filter( trans.app.model.Group.table.c.deleted==False ) \
+ .order_by( trans.app.model.Group.table.c.name ):
+ out_groups.append( ( group.id, group.name ) )
+ return trans.fill_template( '/admin/dataset_security/role/role_create.mako',
+ in_users=[],
+ out_users=out_users,
+ in_groups=[],
+ out_groups=out_groups,
+ webapp=webapp,
+ message=message,
+ status=status )
+ @web.expose
+ @web.require_admin
+ def rename_role( self, trans, **kwd ):
+ params = util.Params( kwd )
+ webapp = params.get( 'webapp', 'galaxy' )
+ message = util.restore_text( params.get( 'message', '' ) )
+ status = params.get( 'status', 'done' )
+ id = params.get( 'id', None )
+ if not id:
+ message = "No role ids received for renaming"
+ trans.response.send_redirect( web.url_for( controller='admin',
+ action='roles',
+ webapp=webapp,
+ message=message,
+ status='error' ) )
+ role = get_role( trans, id )
+ if params.get( 'rename_role_button', False ):
+ old_name = role.name
+ new_name = util.restore_text( params.name )
+ new_description = util.restore_text( params.description )
+ if not new_name:
+ message = 'Enter a valid name'
+ status='error'
+ elif trans.sa_session.query( trans.app.model.Role ).filter( trans.app.model.Role.table.c.name==new_name ).first():
+ message = 'A role with that name already exists'
+ status = 'error'
+ else:
+ role.name = new_name
+ role.description = new_description
+ trans.sa_session.add( role )
+ trans.sa_session.flush()
+ message = "Role '%s' has been renamed to '%s'" % ( old_name, new_name )
+ return trans.response.send_redirect( web.url_for( controller='admin',
+ action='roles',
+ webapp=webapp,
+ message=util.sanitize_text( message ),
+ status='done' ) )
+ return trans.fill_template( '/admin/dataset_security/role/role_rename.mako',
+ role=role,
+ webapp=webapp,
+ message=message,
+ status=status )
+ @web.expose
+ @web.require_admin
+ def manage_users_and_groups_for_role( self, trans, **kwd ):
+ params = util.Params( kwd )
+ webapp = params.get( 'webapp', 'galaxy' )
+ message = util.restore_text( params.get( 'message', '' ) )
+ status = params.get( 'status', 'done' )
+ id = params.get( 'id', None )
+ if not id:
+ message = "No role ids received for managing users and groups"
+ trans.response.send_redirect( web.url_for( controller='admin',
+ action='roles',
+ webapp=webapp,
+ message=message,
+ status='error' ) )
+ role = get_role( trans, id )
+ if params.get( 'role_members_edit_button', False ):
+ in_users = [ trans.sa_session.query( trans.app.model.User ).get( x ) for x in util.listify( params.in_users ) ]
+ for ura in role.users:
+ user = trans.sa_session.query( trans.app.model.User ).get( ura.user_id )
+ if user not in in_users:
+ # Delete DefaultUserPermissions for previously associated users that have been removed from the role
+ for dup in user.default_permissions:
+ if role == dup.role:
+ trans.sa_session.delete( dup )
+ # Delete DefaultHistoryPermissions for previously associated users that have been removed from the role
+ for history in user.histories:
+ for dhp in history.default_permissions:
+ if role == dhp.role:
+ trans.sa_session.delete( dhp )
+ trans.sa_session.flush()
+ in_groups = [ trans.sa_session.query( trans.app.model.Group ).get( x ) for x in util.listify( params.in_groups ) ]
+ trans.app.security_agent.set_entity_role_associations( roles=[ role ], users=in_users, groups=in_groups )
+ trans.sa_session.refresh( role )
+ message = "Role '%s' has been updated with %d associated users and %d associated groups" % ( role.name, len( in_users ), len( in_groups ) )
+ trans.response.send_redirect( web.url_for( controller='admin',
+ action='roles',
+ webapp=webapp,
+ message=util.sanitize_text( message ),
+ status=status ) )
+ in_users = []
+ out_users = []
+ in_groups = []
+ out_groups = []
+ for user in trans.sa_session.query( trans.app.model.User ) \
+ .filter( trans.app.model.User.table.c.deleted==False ) \
+ .order_by( trans.app.model.User.table.c.email ):
+ if user in [ x.user for x in role.users ]:
+ in_users.append( ( user.id, user.email ) )
+ else:
+ out_users.append( ( user.id, user.email ) )
+ for group in trans.sa_session.query( trans.app.model.Group ) \
+ .filter( trans.app.model.Group.table.c.deleted==False ) \
+ .order_by( trans.app.model.Group.table.c.name ):
+ if group in [ x.group for x in role.groups ]:
+ in_groups.append( ( group.id, group.name ) )
+ else:
+ out_groups.append( ( group.id, group.name ) )
+ library_dataset_actions = {}
+ if webapp == 'galaxy':
+ # Build a list of tuples that are LibraryDatasetDatasetAssociationss followed by a list of actions
+ # whose DatasetPermissions is associated with the Role
+ # [ ( LibraryDatasetDatasetAssociation [ action, action ] ) ]
+ for dp in role.dataset_actions:
+ for ldda in trans.sa_session.query( trans.app.model.LibraryDatasetDatasetAssociation ) \
+ .filter( trans.app.model.LibraryDatasetDatasetAssociation.dataset_id==dp.dataset_id ):
+ root_found = False
+ folder_path = ''
+ folder = ldda.library_dataset.folder
+ while not root_found:
+ folder_path = '%s / %s' % ( folder.name, folder_path )
+ if not folder.parent:
+ root_found = True
+ else:
+ folder = folder.parent
+ folder_path = '%s %s' % ( folder_path, ldda.name )
+ library = trans.sa_session.query( trans.app.model.Library ) \
+ .filter( trans.app.model.Library.table.c.root_folder_id == folder.id ) \
+ .first()
+ if library not in library_dataset_actions:
+ library_dataset_actions[ library ] = {}
+ try:
+ library_dataset_actions[ library ][ folder_path ].append( dp.action )
+ except:
+ library_dataset_actions[ library ][ folder_path ] = [ dp.action ]
+ return trans.fill_template( '/admin/dataset_security/role/role.mako',
+ role=role,
+ in_users=in_users,
+ out_users=out_users,
+ in_groups=in_groups,
+ out_groups=out_groups,
+ library_dataset_actions=library_dataset_actions,
+ webapp=webapp,
+ message=message,
+ status=status )
+ @web.expose
+ @web.require_admin
+ def mark_role_deleted( self, trans, **kwd ):
+ params = util.Params( kwd )
+ webapp = params.get( 'webapp', 'galaxy' )
+ id = kwd.get( 'id', None )
+ if not id:
+ message = "No role ids received for deleting"
+ trans.response.send_redirect( web.url_for( controller='admin',
+ action='roles',
+ webapp=webapp,
+ message=message,
+ status='error' ) )
+ ids = util.listify( id )
+ message = "Deleted %d roles: " % len( ids )
+ for role_id in ids:
+ role = get_role( trans, role_id )
+ role.deleted = True
+ trans.sa_session.add( role )
+ trans.sa_session.flush()
+ message += " %s " % role.name
+ trans.response.send_redirect( web.url_for( controller='admin',
+ action='roles',
+ webapp=webapp,
+ message=util.sanitize_text( message ),
+ status='done' ) )
+ @web.expose
+ @web.require_admin
+ def undelete_role( self, trans, **kwd ):
+ params = util.Params( kwd )
+ webapp = params.get( 'webapp', 'galaxy' )
+ id = kwd.get( 'id', None )
+ if not id:
+ message = "No role ids received for undeleting"
+ trans.response.send_redirect( web.url_for( controller='admin',
+ action='roles',
+ webapp=webapp,
+ message=message,
+ status='error' ) )
+ ids = util.listify( id )
+ count = 0
+ undeleted_roles = ""
+ for role_id in ids:
+ role = get_role( trans, role_id )
+ if not role.deleted:
+ message = "Role '%s' has not been deleted, so it cannot be undeleted." % role.name
+ trans.response.send_redirect( web.url_for( controller='admin',
+ action='roles',
+ webapp=webapp,
+ message=util.sanitize_text( message ),
+ status='error' ) )
+ role.deleted = False
+ trans.sa_session.add( role )
+ trans.sa_session.flush()
+ count += 1
+ undeleted_roles += " %s" % role.name
+ message = "Undeleted %d roles: %s" % ( count, undeleted_roles )
+ trans.response.send_redirect( web.url_for( controller='admin',
+ action='roles',
+ webapp=webapp,
+ message=util.sanitize_text( message ),
+ status='done' ) )
+ @web.expose
+ @web.require_admin
+ def purge_role( self, trans, **kwd ):
+ # This method should only be called for a Role that has previously been deleted.
+ # Purging a deleted Role deletes all of the following from the database:
+ # - UserRoleAssociations where role_id == Role.id
+ # - DefaultUserPermissions where role_id == Role.id
+ # - DefaultHistoryPermissions where role_id == Role.id
+ # - GroupRoleAssociations where role_id == Role.id
+ # - DatasetPermissionss where role_id == Role.id
+ params = util.Params( kwd )
+ webapp = params.get( 'webapp', 'galaxy' )
+ id = kwd.get( 'id', None )
+ if not id:
+ message = "No role ids received for purging"
+ trans.response.send_redirect( web.url_for( controller='admin',
+ action='roles',
+ webapp=webapp,
+ message=util.sanitize_text( message ),
+ status='error' ) )
+ ids = util.listify( id )
+ message = "Purged %d roles: " % len( ids )
+ for role_id in ids:
+ role = get_role( trans, role_id )
+ if not role.deleted:
+ message = "Role '%s' has not been deleted, so it cannot be purged." % role.name
+ trans.response.send_redirect( web.url_for( controller='admin',
+ action='roles',
+ webapp=webapp,
+ message=util.sanitize_text( message ),
+ status='error' ) )
+ # Delete UserRoleAssociations
+ for ura in role.users:
+ user = trans.sa_session.query( trans.app.model.User ).get( ura.user_id )
+ # Delete DefaultUserPermissions for associated users
+ for dup in user.default_permissions:
+ if role == dup.role:
+ trans.sa_session.delete( dup )
+ # Delete DefaultHistoryPermissions for associated users
+ for history in user.histories:
+ for dhp in history.default_permissions:
+ if role == dhp.role:
+ trans.sa_session.delete( dhp )
+ trans.sa_session.delete( ura )
+ # Delete GroupRoleAssociations
+ for gra in role.groups:
+ trans.sa_session.delete( gra )
+ # Delete DatasetPermissionss
+ for dp in role.dataset_actions:
+ trans.sa_session.delete( dp )
+ trans.sa_session.flush()
+ message += " %s " % role.name
+ trans.response.send_redirect( web.url_for( controller='admin',
+ action='roles',
+ webapp=webapp,
+ message=util.sanitize_text( message ),
+ status='done' ) )
+
+ # Galaxy Group Stuff
+ @web.expose
+ @web.require_admin
+ def groups( self, trans, **kwargs ):
+ if 'operation' in kwargs:
+ operation = kwargs['operation'].lower()
+ if operation == "groups":
+ return self.group( trans, **kwargs )
+ if operation == "create":
+ return self.create_group( trans, **kwargs )
+ if operation == "delete":
+ return self.mark_group_deleted( trans, **kwargs )
+ if operation == "undelete":
+ return self.undelete_group( trans, **kwargs )
+ if operation == "purge":
+ return self.purge_group( trans, **kwargs )
+ if operation == "manage users and roles":
+ return self.manage_users_and_roles_for_group( trans, **kwargs )
+ if operation == "rename":
+ return self.rename_group( trans, **kwargs )
+ # Render the list view
+ return self.group_list_grid( trans, **kwargs )
+ @web.expose
+ @web.require_admin
+ def rename_group( self, trans, **kwd ):
+ params = util.Params( kwd )
+ webapp = params.get( 'webapp', 'galaxy' )
+ message = util.restore_text( params.get( 'message', '' ) )
+ status = params.get( 'status', 'done' )
+ id = params.get( 'id', None )
+ if not id:
+ message = "No group ids received for renaming"
+ trans.response.send_redirect( web.url_for( controller='admin',
+ action='groups',
+ webapp=webapp,
+ message=message,
+ status='error' ) )
+ group = get_group( trans, id )
+ if params.get( 'rename_group_button', False ):
+ old_name = group.name
+ new_name = util.restore_text( params.name )
+ if not new_name:
+ message = 'Enter a valid name'
+ status = 'error'
+ elif trans.sa_session.query( trans.app.model.Group ).filter( trans.app.model.Group.table.c.name==new_name ).first():
+ message = 'A group with that name already exists'
+ status = 'error'
+ else:
+ group.name = new_name
+ trans.sa_session.add( group )
+ trans.sa_session.flush()
+ message = "Group '%s' has been renamed to '%s'" % ( old_name, new_name )
+ return trans.response.send_redirect( web.url_for( controller='admin',
+ action='groups',
+ webapp=webapp,
+ message=util.sanitize_text( message ),
+ status='done' ) )
+ return trans.fill_template( '/admin/dataset_security/group/group_rename.mako',
+ group=group,
+ webapp=webapp,
+ message=message,
+ status=status )
+ @web.expose
+ @web.require_admin
+ def manage_users_and_roles_for_group( self, trans, **kwd ):
+ params = util.Params( kwd )
+ webapp = params.get( 'webapp', 'galaxy' )
+ message = util.restore_text( params.get( 'message', '' ) )
+ status = params.get( 'status', 'done' )
+ group = get_group( trans, params.id )
+ if params.get( 'group_roles_users_edit_button', False ):
+ in_roles = [ trans.sa_session.query( trans.app.model.Role ).get( x ) for x in util.listify( params.in_roles ) ]
+ in_users = [ trans.sa_session.query( trans.app.model.User ).get( x ) for x in util.listify( params.in_users ) ]
+ trans.app.security_agent.set_entity_group_associations( groups=[ group ], roles=in_roles, users=in_users )
+ trans.sa_session.refresh( group )
+ message += "Group '%s' has been updated with %d associated roles and %d associated users" % ( group.name, len( in_roles ), len( in_users ) )
+ trans.response.send_redirect( web.url_for( controller='admin',
+ action='groups',
+ webapp=webapp,
+ message=util.sanitize_text( message ),
+ status=status ) )
+ in_roles = []
+ out_roles = []
+ in_users = []
+ out_users = []
+ for role in trans.sa_session.query(trans.app.model.Role ) \
+ .filter( trans.app.model.Role.table.c.deleted==False ) \
+ .order_by( trans.app.model.Role.table.c.name ):
+ if role in [ x.role for x in group.roles ]:
+ in_roles.append( ( role.id, role.name ) )
+ else:
+ out_roles.append( ( role.id, role.name ) )
+ for user in trans.sa_session.query( trans.app.model.User ) \
+ .filter( trans.app.model.User.table.c.deleted==False ) \
+ .order_by( trans.app.model.User.table.c.email ):
+ if user in [ x.user for x in group.users ]:
+ in_users.append( ( user.id, user.email ) )
+ else:
+ out_users.append( ( user.id, user.email ) )
+ message += 'Group %s is currently associated with %d roles and %d users' % ( group.name, len( in_roles ), len( in_users ) )
+ return trans.fill_template( '/admin/dataset_security/group/group.mako',
+ group=group,
+ in_roles=in_roles,
+ out_roles=out_roles,
+ in_users=in_users,
+ out_users=out_users,
+ webapp=webapp,
+ message=message,
+ status=status )
+ @web.expose
+ @web.require_admin
+ def create_group( self, trans, **kwd ):
+ params = util.Params( kwd )
+ webapp = params.get( 'webapp', 'galaxy' )
+ message = util.restore_text( params.get( 'message', '' ) )
+ status = params.get( 'status', 'done' )
+ if params.get( 'create_group_button', False ):
+ name = util.restore_text( params.name )
+ in_users = util.listify( params.get( 'in_users', [] ) )
+ in_roles = util.listify( params.get( 'in_roles', [] ) )
+ if not name:
+ message = "Enter a valid name"
+ elif trans.sa_session.query( trans.app.model.Group ).filter( trans.app.model.Group.table.c.name==name ).first():
+ message = "A group with that name already exists"
+ else:
+ # Create the group
+ group = trans.app.model.Group( name=name )
+ trans.sa_session.add( group )
+ trans.sa_session.flush()
+ # Create the UserRoleAssociations
+ for user in [ trans.sa_session.query( trans.app.model.User ).get( x ) for x in in_users ]:
+ uga = trans.app.model.UserGroupAssociation( user, group )
+ trans.sa_session.add( uga )
+ trans.sa_session.flush()
+ # Create the GroupRoleAssociations
+ for role in [ trans.sa_session.query( trans.app.model.Role ).get( x ) for x in in_roles ]:
+ gra = trans.app.model.GroupRoleAssociation( group, role )
+ trans.sa_session.add( gra )
+ trans.sa_session.flush()
+ message = "Group '%s' has been created with %d associated users and %d associated roles" % ( name, len( in_users ), len( in_roles ) )
+ trans.response.send_redirect( web.url_for( controller='admin',
+ action='groups',
+ webapp=webapp,
+ message=util.sanitize_text( message ),
+ status='done' ) )
+ trans.response.send_redirect( web.url_for( controller='admin',
+ action='create_group',
+ webapp=webapp,
+ message=util.sanitize_text( message ),
+ status='error' ) )
+ out_users = []
+ for user in trans.sa_session.query( trans.app.model.User ) \
+ .filter( trans.app.model.User.table.c.deleted==False ) \
+ .order_by( trans.app.model.User.table.c.email ):
+ out_users.append( ( user.id, user.email ) )
+ out_roles = []
+ for role in trans.sa_session.query( trans.app.model.Role ) \
+ .filter( trans.app.model.Role.table.c.deleted==False ) \
+ .order_by( trans.app.model.Role.table.c.name ):
+ out_roles.append( ( role.id, role.name ) )
+ return trans.fill_template( '/admin/dataset_security/group/group_create.mako',
+ in_users=[],
+ out_users=out_users,
+ in_roles=[],
+ out_roles=out_roles,
+ webapp=webapp,
+ message=message,
+ status=status )
+ @web.expose
+ @web.require_admin
+ def mark_group_deleted( self, trans, **kwd ):
+ params = util.Params( kwd )
+ webapp = params.get( 'webapp', 'galaxy' )
+ id = params.get( 'id', None )
+ if not id:
+ message = "No group ids received for marking deleted"
+ trans.response.send_redirect( web.url_for( controller='admin',
+ action='groups',
+ webapp=webapp,
+ message=message,
+ status='error' ) )
+ ids = util.listify( id )
+ message = "Deleted %d groups: " % len( ids )
+ for group_id in ids:
+ group = get_group( trans, group_id )
+ group.deleted = True
+ trans.sa_session.add( group )
+ trans.sa_session.flush()
+ message += " %s " % group.name
+ trans.response.send_redirect( web.url_for( controller='admin',
+ action='groups',
+ webapp=webapp,
+ message=util.sanitize_text( message ),
+ status='done' ) )
+ @web.expose
+ @web.require_admin
+ def undelete_group( self, trans, **kwd ):
+ params = util.Params( kwd )
+ webapp = params.get( 'webapp', 'galaxy' )
+ id = kwd.get( 'id', None )
+ if not id:
+ message = "No group ids received for undeleting"
+ trans.response.send_redirect( web.url_for( controller='admin',
+ action='groups',
+ webapp=webapp,
+ message=message,
+ status='error' ) )
+ ids = util.listify( id )
+ count = 0
+ undeleted_groups = ""
+ for group_id in ids:
+ group = get_group( trans, group_id )
+ if not group.deleted:
+ message = "Group '%s' has not been deleted, so it cannot be undeleted." % group.name
+ trans.response.send_redirect( web.url_for( controller='admin',
+ action='groups',
+ webapp=webapp,
+ message=util.sanitize_text( message ),
+ status='error' ) )
+ group.deleted = False
+ trans.sa_session.add( group )
+ trans.sa_session.flush()
+ count += 1
+ undeleted_groups += " %s" % group.name
+ message = "Undeleted %d groups: %s" % ( count, undeleted_groups )
+ trans.response.send_redirect( web.url_for( controller='admin',
+ action='groups',
+ webapp=webapp,
+ message=util.sanitize_text( message ),
+ status='done' ) )
+ @web.expose
+ @web.require_admin
+ def purge_group( self, trans, **kwd ):
+ # This method should only be called for a Group that has previously been deleted.
+ # Purging a deleted Group simply deletes all UserGroupAssociations and GroupRoleAssociations.
+ params = util.Params( kwd )
+ webapp = params.get( 'webapp', 'galaxy' )
+ id = kwd.get( 'id', None )
+ if not id:
+ message = "No group ids received for purging"
+ trans.response.send_redirect( web.url_for( controller='admin',
+ action='groups',
+ webapp=webapp,
+ message=util.sanitize_text( message ),
+ status='error' ) )
+ ids = util.listify( id )
+ message = "Purged %d groups: " % len( ids )
+ for group_id in ids:
+ group = get_group( trans, group_id )
+ if not group.deleted:
+ # We should never reach here, but just in case there is a bug somewhere...
+ message = "Group '%s' has not been deleted, so it cannot be purged." % group.name
+ trans.response.send_redirect( web.url_for( controller='admin',
+ action='groups',
+ webapp=webapp,
+ message=util.sanitize_text( message ),
+ status='error' ) )
+ # Delete UserGroupAssociations
+ for uga in group.users:
+ trans.sa_session.delete( uga )
+ # Delete GroupRoleAssociations
+ for gra in group.roles:
+ trans.sa_session.delete( gra )
+ trans.sa_session.flush()
+ message += " %s " % group.name
+ trans.response.send_redirect( web.url_for( controller='admin',
+ action='groups',
+ webapp=webapp,
+ message=util.sanitize_text( message ),
+ status='done' ) )
+
+ # Galaxy User Stuff
+ @web.expose
+ @web.require_admin
+ def create_new_user( self, trans, **kwargs ):
+ webapp = kwargs.get( 'webapp', 'galaxy' )
+ return trans.response.send_redirect( web.url_for( controller='user',
+ action='create',
+ webapp=webapp,
+ admin_view=True ) )
+ @web.expose
+ @web.require_admin
+ def reset_user_password( self, trans, **kwd ):
+ webapp = kwd.get( 'webapp', 'galaxy' )
+ id = kwd.get( 'id', None )
+ if not id:
+ message = "No user ids received for resetting passwords"
+ trans.response.send_redirect( web.url_for( controller='admin',
+ action='users',
+ webapp=webapp,
+ message=message,
+ status='error' ) )
+ ids = util.listify( id )
+ if 'reset_user_password_button' in kwd:
+ message = ''
+ status = ''
+ for user_id in ids:
+ user = get_user( trans, user_id )
+ password = kwd.get( 'password', None )
+ confirm = kwd.get( 'confirm' , None )
+ if len( password ) < 6:
+ message = "Please use a password of at least 6 characters"
+ status = 'error'
+ break
+ elif password != confirm:
+ message = "Passwords do not match"
+ status = 'error'
+ break
+ else:
+ user.set_password_cleartext( password )
+ trans.sa_session.add( user )
+ trans.sa_session.flush()
+ if not message and not status:
+ message = "Passwords reset for %d users" % len( ids )
+ status = 'done'
+ trans.response.send_redirect( web.url_for( controller='admin',
+ action='users',
+ webapp=webapp,
+ message=util.sanitize_text( message ),
+ status=status ) )
+ users = [ get_user( trans, user_id ) for user_id in ids ]
+ if len( ids ) > 1:
+ id=','.join( id )
+ return trans.fill_template( '/admin/user/reset_password.mako',
+ id=id,
+ users=users,
+ password='',
+ confirm='',
+ webapp=webapp )
+ @web.expose
+ @web.require_admin
+ def mark_user_deleted( self, trans, **kwd ):
+ webapp = kwd.get( 'webapp', 'galaxy' )
+ id = kwd.get( 'id', None )
+ if not id:
+ message = "No user ids received for deleting"
+ trans.response.send_redirect( web.url_for( controller='admin',
+ action='users',
+ webapp=webapp,
+ message=message,
+ status='error' ) )
+ ids = util.listify( id )
+ message = "Deleted %d users: " % len( ids )
+ for user_id in ids:
+ user = get_user( trans, user_id )
+ user.deleted = True
+ trans.sa_session.add( user )
+ trans.sa_session.flush()
+ message += " %s " % user.email
+ trans.response.send_redirect( web.url_for( controller='admin',
+ action='users',
+ webapp=webapp,
+ message=util.sanitize_text( message ),
+ status='done' ) )
+ @web.expose
+ @web.require_admin
+ def undelete_user( self, trans, **kwd ):
+ webapp = kwd.get( 'webapp', 'galaxy' )
+ id = kwd.get( 'id', None )
+ if not id:
+ message = "No user ids received for undeleting"
+ trans.response.send_redirect( web.url_for( controller='admin',
+ action='users',
+ webapp=webapp,
+ message=message,
+ status='error' ) )
+ ids = util.listify( id )
+ count = 0
+ undeleted_users = ""
+ for user_id in ids:
+ user = get_user( trans, user_id )
+ if not user.deleted:
+ message = "User '%s' has not been deleted, so it cannot be undeleted." % user.email
+ trans.response.send_redirect( web.url_for( controller='admin',
+ action='users',
+ webapp=webapp,
+ message=util.sanitize_text( message ),
+ status='error' ) )
+ user.deleted = False
+ trans.sa_session.add( user )
+ trans.sa_session.flush()
+ count += 1
+ undeleted_users += " %s" % user.email
+ message = "Undeleted %d users: %s" % ( count, undeleted_users )
+ trans.response.send_redirect( web.url_for( controller='admin',
+ action='users',
+ webapp=webapp,
+ message=util.sanitize_text( message ),
+ status='done' ) )
+ @web.expose
+ @web.require_admin
+ def purge_user( self, trans, **kwd ):
+ # This method should only be called for a User that has previously been deleted.
+ # We keep the User in the database ( marked as purged ), and stuff associated
+ # with the user's private role in case we want the ability to unpurge the user
+ # some time in the future.
+ # Purging a deleted User deletes all of the following:
+ # - History where user_id = User.id
+ # - HistoryDatasetAssociation where history_id = History.id
+ # - Dataset where HistoryDatasetAssociation.dataset_id = Dataset.id
+ # - UserGroupAssociation where user_id == User.id
+ # - UserRoleAssociation where user_id == User.id EXCEPT FOR THE PRIVATE ROLE
+ # Purging Histories and Datasets must be handled via the cleanup_datasets.py script
+ webapp = kwd.get( 'webapp', 'galaxy' )
+ id = kwd.get( 'id', None )
+ if not id:
+ message = "No user ids received for purging"
+ trans.response.send_redirect( web.url_for( controller='admin',
+ action='users',
+ webapp=webapp,
+ message=util.sanitize_text( message ),
+ status='error' ) )
+ ids = util.listify( id )
+ message = "Purged %d users: " % len( ids )
+ for user_id in ids:
+ user = get_user( trans, user_id )
+ if not user.deleted:
+ # We should never reach here, but just in case there is a bug somewhere...
+ message = "User '%s' has not been deleted, so it cannot be purged." % user.email
+ trans.response.send_redirect( web.url_for( controller='admin',
+ action='users',
+ webapp=webapp,
+ message=util.sanitize_text( message ),
+ status='error' ) )
+ private_role = trans.app.security_agent.get_private_user_role( user )
+ # Delete History
+ for h in user.active_histories:
+ trans.sa_session.refresh( h )
+ for hda in h.active_datasets:
+ # Delete HistoryDatasetAssociation
+ d = trans.sa_session.query( trans.app.model.Dataset ).get( hda.dataset_id )
+ # Delete Dataset
+ if not d.deleted:
+ d.deleted = True
+ trans.sa_session.add( d )
+ hda.deleted = True
+ trans.sa_session.add( hda )
+ h.deleted = True
+ trans.sa_session.add( h )
+ # Delete UserGroupAssociations
+ for uga in user.groups:
+ trans.sa_session.delete( uga )
+ # Delete UserRoleAssociations EXCEPT FOR THE PRIVATE ROLE
+ for ura in user.roles:
+ if ura.role_id != private_role.id:
+ trans.sa_session.delete( ura )
+ # Purge the user
+ user.purged = True
+ trans.sa_session.add( user )
+ trans.sa_session.flush()
+ message += "%s " % user.email
+ trans.response.send_redirect( web.url_for( controller='admin',
+ action='users',
+ webapp=webapp,
+ message=util.sanitize_text( message ),
+ status='done' ) )
+ @web.expose
+ @web.require_admin
+ def users( self, trans, **kwargs ):
+ if 'operation' in kwargs:
+ operation = kwargs['operation'].lower()
+ if operation == "roles":
+ return self.user( trans, **kwargs )
+ if operation == "reset password":
+ return self.reset_user_password( trans, **kwargs )
+ if operation == "delete":
+ return self.mark_user_deleted( trans, **kwargs )
+ if operation == "undelete":
+ return self.undelete_user( trans, **kwargs )
+ if operation == "purge":
+ return self.purge_user( trans, **kwargs )
+ if operation == "create":
+ return self.create_new_user( trans, **kwargs )
+ if operation == "information":
+ return self.user_info( trans, **kwargs )
+ if operation == "manage roles and groups":
+ return self.manage_roles_and_groups_for_user( trans, **kwargs )
+ # Render the list view
+ return self.user_list_grid( trans, **kwargs )
+ @web.expose
+ @web.require_admin
+ def user_info( self, trans, **kwd ):
+ '''
+ This method displays the user information page which consists of login
+ information, public username, reset password & other user information
+ obtained during registration
+ '''
+ webapp = kwd.get( 'webapp', 'galaxy' )
+ user_id = kwd.get( 'id', None )
+ if not user_id:
+ message += "Invalid user id (%s) received" % str( user_id )
+ trans.response.send_redirect( web.url_for( controller='admin',
+ action='users',
+ webapp=webapp,
+ message=util.sanitize_text( message ),
+ status='error' ) )
+ user = get_user( trans, user_id )
+ return trans.response.send_redirect( web.url_for( controller='user',
+ action='show_info',
+ user_id=user.id,
+ admin_view=True,
+ **kwd ) )
+ @web.expose
+ @web.require_admin
+ def name_autocomplete_data( self, trans, q=None, limit=None, timestamp=None ):
+ """Return autocomplete data for user emails"""
+ ac_data = ""
+ for user in trans.sa_session.query( User ).filter_by( deleted=False ).filter( func.lower( User.email ).like( q.lower() + "%" ) ):
+ ac_data = ac_data + user.email + "\n"
+ return ac_data
+ @web.expose
+ @web.require_admin
+ def manage_roles_and_groups_for_user( self, trans, **kwd ):
+ webapp = kwd.get( 'webapp', 'galaxy' )
+ user_id = kwd.get( 'id', None )
+ message = ''
+ status = ''
+ if not user_id:
+ message += "Invalid user id (%s) received" % str( user_id )
+ trans.response.send_redirect( web.url_for( controller='admin',
+ action='users',
+ webapp=webapp,
+ message=util.sanitize_text( message ),
+ status='error' ) )
+ user = get_user( trans, user_id )
+ private_role = trans.app.security_agent.get_private_user_role( user )
+ if kwd.get( 'user_roles_groups_edit_button', False ):
+ # Make sure the user is not dis-associating himself from his private role
+ out_roles = kwd.get( 'out_roles', [] )
+ if out_roles:
+ out_roles = [ trans.sa_session.query( trans.app.model.Role ).get( x ) for x in util.listify( out_roles ) ]
+ if private_role in out_roles:
+ message += "You cannot eliminate a user's private role association. "
+ status = 'error'
+ in_roles = kwd.get( 'in_roles', [] )
+ if in_roles:
+ in_roles = [ trans.sa_session.query( trans.app.model.Role ).get( x ) for x in util.listify( in_roles ) ]
+ out_groups = kwd.get( 'out_groups', [] )
+ if out_groups:
+ out_groups = [ trans.sa_session.query( trans.app.model.Group ).get( x ) for x in util.listify( out_groups ) ]
+ in_groups = kwd.get( 'in_groups', [] )
+ if in_groups:
+ in_groups = [ trans.sa_session.query( trans.app.model.Group ).get( x ) for x in util.listify( in_groups ) ]
+ if in_roles:
+ trans.app.security_agent.set_entity_user_associations( users=[ user ], roles=in_roles, groups=in_groups )
+ trans.sa_session.refresh( user )
+ message += "User '%s' has been updated with %d associated roles and %d associated groups (private roles are not displayed)" % \
+ ( user.email, len( in_roles ), len( in_groups ) )
+ trans.response.send_redirect( web.url_for( controller='admin',
+ action='users',
+ webapp=webapp,
+ message=util.sanitize_text( message ),
+ status='done' ) )
+ in_roles = []
+ out_roles = []
+ in_groups = []
+ out_groups = []
+ for role in trans.sa_session.query( trans.app.model.Role ).filter( trans.app.model.Role.table.c.deleted==False ) \
+ .order_by( trans.app.model.Role.table.c.name ):
+ if role in [ x.role for x in user.roles ]:
+ in_roles.append( ( role.id, role.name ) )
+ elif role.type != trans.app.model.Role.types.PRIVATE:
+ # There is a 1 to 1 mapping between a user and a PRIVATE role, so private roles should
+ # not be listed in the roles form fields, except for the currently selected user's private
+ # role, which should always be in in_roles. The check above is added as an additional
+ # precaution, since for a period of time we were including private roles in the form fields.
+ out_roles.append( ( role.id, role.name ) )
+ for group in trans.sa_session.query( trans.app.model.Group ).filter( trans.app.model.Group.table.c.deleted==False ) \
+ .order_by( trans.app.model.Group.table.c.name ):
+ if group in [ x.group for x in user.groups ]:
+ in_groups.append( ( group.id, group.name ) )
+ else:
+ out_groups.append( ( group.id, group.name ) )
+ message += "User '%s' is currently associated with %d roles and is a member of %d groups" % \
+ ( user.email, len( in_roles ), len( in_groups ) )
+ if not status:
+ status = 'done'
+ return trans.fill_template( '/admin/user/user.mako',
+ user=user,
+ in_roles=in_roles,
+ out_roles=out_roles,
+ in_groups=in_groups,
+ out_groups=out_groups,
+ webapp=webapp,
+ message=message,
+ status=status )
+ @web.expose
+ @web.require_admin
+ def memdump( self, trans, ids = 'None', sorts = 'None', pages = 'None', new_id = None, new_sort = None, **kwd ):
+ if self.app.memdump is None:
+ return trans.show_error_message( "Memdump is not enabled (set <code>use_memdump = True</code> in universe_wsgi.ini)" )
+ heap = self.app.memdump.get()
+ p = util.Params( kwd )
+ msg = None
+ if p.dump:
+ heap = self.app.memdump.get( update = True )
+ msg = "Heap dump complete"
+ elif p.setref:
+ self.app.memdump.setref()
+ msg = "Reference point set (dump to see delta from this point)"
+ ids = ids.split( ',' )
+ sorts = sorts.split( ',' )
+ if new_id is not None:
+ ids.append( new_id )
+ sorts.append( 'None' )
+ elif new_sort is not None:
+ sorts[-1] = new_sort
+ breadcrumb = "<a href='%s' class='breadcrumb'>heap</a>" % web.url_for()
+ # new lists so we can assemble breadcrumb links
+ new_ids = []
+ new_sorts = []
+ for id, sort in zip( ids, sorts ):
+ new_ids.append( id )
+ if id != 'None':
+ breadcrumb += "<a href='%s' class='breadcrumb'>[%s]</a>" % ( web.url_for( ids=','.join( new_ids ), sorts=','.join( new_sorts ) ), id )
+ heap = heap[int(id)]
+ new_sorts.append( sort )
+ if sort != 'None':
+ breadcrumb += "<a href='%s' class='breadcrumb'>.by('%s')</a>" % ( web.url_for( ids=','.join( new_ids ), sorts=','.join( new_sorts ) ), sort )
+ heap = heap.by( sort )
+ ids = ','.join( new_ids )
+ sorts = ','.join( new_sorts )
+ if p.theone:
+ breadcrumb += ".theone"
+ heap = heap.theone
+ return trans.fill_template( '/admin/memdump.mako', heap = heap, ids = ids, sorts = sorts, breadcrumb = breadcrumb, msg = msg )
+
+ @web.expose
+ @web.require_admin
+ def jobs( self, trans, stop = [], stop_msg = None, cutoff = 180, **kwd ):
+ deleted = []
+ msg = None
+ status = None
+ job_ids = util.listify( stop )
+ if job_ids and stop_msg in [ None, '' ]:
+ msg = 'Please enter an error message to display to the user describing why the job was terminated'
+ status = 'error'
+ elif job_ids:
+ if stop_msg[-1] not in string.punctuation:
+ stop_msg += '.'
+ for job_id in job_ids:
+ trans.app.job_manager.job_stop_queue.put( job_id, error_msg="This job was stopped by an administrator: %s For more information or help" % stop_msg )
+ deleted.append( str( job_id ) )
+ if deleted:
+ msg = 'Queued job'
+ if len( deleted ) > 1:
+ msg += 's'
+ msg += ' for deletion: '
+ msg += ', '.join( deleted )
+ status = 'done'
+ cutoff_time = datetime.utcnow() - timedelta( seconds=int( cutoff ) )
+ jobs = trans.sa_session.query( trans.app.model.Job ) \
+ .filter( and_( trans.app.model.Job.table.c.update_time < cutoff_time,
+ or_( trans.app.model.Job.state == trans.app.model.Job.states.NEW,
+ trans.app.model.Job.state == trans.app.model.Job.states.QUEUED,
+ trans.app.model.Job.state == trans.app.model.Job.states.RUNNING,
+ trans.app.model.Job.state == trans.app.model.Job.states.UPLOAD ) ) ) \
+ .order_by( trans.app.model.Job.table.c.update_time.desc() )
+ last_updated = {}
+ for job in jobs:
+ delta = datetime.utcnow() - job.update_time
+ if delta > timedelta( minutes=60 ):
+ last_updated[job.id] = '%s hours' % int( delta.seconds / 60 / 60 )
+ else:
+ last_updated[job.id] = '%s minutes' % int( delta.seconds / 60 )
+ return trans.fill_template( '/admin/jobs.mako',
+ jobs = jobs,
+ last_updated = last_updated,
+ cutoff = cutoff,
+ msg = msg,
+ status = status )
+
+## ---- Utility methods -------------------------------------------------------
+
+def get_user( trans, id ):
+ """Get a User from the database by id."""
+ # Load user from database
+ id = trans.security.decode_id( id )
+ user = trans.sa_session.query( trans.model.User ).get( id )
+ if not user:
+ return trans.show_error_message( "User not found for id (%s)" % str( id ) )
+ return user
+def get_role( trans, id ):
+ """Get a Role from the database by id."""
+ # Load user from database
+ id = trans.security.decode_id( id )
+ role = trans.sa_session.query( trans.model.Role ).get( id )
+ if not role:
+ return trans.show_error_message( "Role not found for id (%s)" % str( id ) )
+ return role
+def get_group( trans, id ):
+ """Get a Group from the database by id."""
+ # Load user from database
+ id = trans.security.decode_id( id )
+ group = trans.sa_session.query( trans.model.Group ).get( id )
+ if not group:
+ return trans.show_error_message( "Group not found for id (%s)" % str( id ) )
+ return group
diff -r 076f572d7c9d -r d6fddb034db7 lib/galaxy/web/controllers/admin.py
--- a/lib/galaxy/web/controllers/admin.py Wed Apr 21 10:41:30 2010 -0400
+++ b/lib/galaxy/web/controllers/admin.py Wed Apr 21 11:35:21 2010 -0400
@@ -1,16 +1,10 @@
-import string, sys
-from datetime import datetime, timedelta
-from galaxy import util, datatypes
from galaxy.web.base.controller import *
-from galaxy.util.odict import odict
+from galaxy import model
from galaxy.model.orm import *
from galaxy.web.framework.helpers import time_ago, iff, grids
import logging
log = logging.getLogger( __name__ )
-# States for passing messages
-SUCCESS, INFO, WARNING, ERROR = "done", "info", "warning", "error"
-
class UserListGrid( grids.Grid ):
class EmailColumn( grids.TextColumn ):
def get_value( self, trans, grid, user ):
@@ -49,6 +43,7 @@
return 'never'
# Grid definition
+ webapp = "galaxy"
title = "Users"
model_class = model.User
template='/admin/user/grid.mako'
@@ -57,7 +52,7 @@
EmailColumn( "Email",
key="email",
model_class=model.User,
- link=( lambda item: dict( operation="information", id=item.id ) ),
+ link=( lambda item: dict( operation="information", id=item.id, webapp="galaxy" ) ),
attach_popup=True,
filterable="advanced" ),
UserNameColumn( "User Name",
@@ -79,11 +74,18 @@
visible=False,
filterable="standard" ) )
global_actions = [
- grids.GridAction( "Create new user", dict( controller='admin', action='users', operation='create' ) )
+ grids.GridAction( "Create new user", dict( controller='admin', action='users', operation='create', webapp="galaxy" ) )
]
operations = [
- grids.GridOperation( "Manage Roles and Groups", condition=( lambda item: not item.deleted ), allow_multiple=False ),
- grids.GridOperation( "Reset Password", condition=( lambda item: not item.deleted ), allow_multiple=True, allow_popup=False )
+ grids.GridOperation( "Manage Roles and Groups",
+ condition=( lambda item: not item.deleted ),
+ allow_multiple=False,
+ url_args=dict( webapp="galaxy", action="manage_roles_and_groups_for_user" ) ),
+ grids.GridOperation( "Reset Password",
+ condition=( lambda item: not item.deleted ),
+ allow_multiple=True,
+ allow_popup=False,
+ url_args=dict( webapp="galaxy", action="reset_user_password" ) )
]
#TODO: enhance to account for trans.app.config.allow_user_deletion here so that we can eliminate these operations if
# the setting is False
@@ -96,7 +98,6 @@
grids.GridColumnFilter( "Purged", args=dict( purged=True ) ),
grids.GridColumnFilter( "All", args=dict( deleted='All' ) )
]
- default_filter = dict( email="All", username="All", deleted="False", purged="False" )
num_rows_per_page = 50
preserve_state = False
use_paging = True
@@ -134,6 +135,7 @@
return 0
# Grid definition
+ webapp = "galaxy"
title = "Roles"
model_class = model.Role
template='/admin/dataset_security/role/grid.mako'
@@ -141,7 +143,7 @@
columns = [
NameColumn( "Name",
key="name",
- link=( lambda item: dict( operation="Manage users and groups", id=item.id ) ),
+ link=( lambda item: dict( operation="Manage users and groups", id=item.id, webapp="galaxy" ) ),
model_class=model.Role,
attach_popup=True,
filterable="advanced" ),
@@ -169,16 +171,27 @@
global_actions = [
grids.GridAction( "Add new role", dict( controller='admin', action='roles', operation='create' ) )
]
- operations = [ grids.GridOperation( "Rename", condition=( lambda item: not item.deleted ), allow_multiple=False ),
- grids.GridOperation( "Delete", condition=( lambda item: not item.deleted ), allow_multiple=True ),
- grids.GridOperation( "Undelete", condition=( lambda item: item.deleted ), allow_multiple=True ),
- grids.GridOperation( "Purge", condition=( lambda item: item.deleted ), allow_multiple=True ) ]
+ operations = [ grids.GridOperation( "Rename",
+ condition=( lambda item: not item.deleted ),
+ allow_multiple=False,
+ url_args=dict( webapp="galaxy", action="rename_role" ) ),
+ grids.GridOperation( "Delete",
+ condition=( lambda item: not item.deleted ),
+ allow_multiple=True,
+ url_args=dict( webapp="galaxy", action="mark_role_deleted" ) ),
+ grids.GridOperation( "Undelete",
+ condition=( lambda item: item.deleted ),
+ allow_multiple=True,
+ url_args=dict( webapp="galaxy", action="undelete_role" ) ),
+ grids.GridOperation( "Purge",
+ condition=( lambda item: item.deleted ),
+ allow_multiple=True,
+ url_args=dict( webapp="galaxy", action="purge_role" ) ) ]
standard_filters = [
grids.GridColumnFilter( "Active", args=dict( deleted=False ) ),
grids.GridColumnFilter( "Deleted", args=dict( deleted=True ) ),
grids.GridColumnFilter( "All", args=dict( deleted='All' ) )
]
- default_filter = dict( name="All", deleted="False", description="All", type="All" )
num_rows_per_page = 50
preserve_state = False
use_paging = True
@@ -210,6 +223,7 @@
return 0
# Grid definition
+ webapp = "galaxy"
title = "Groups"
model_class = model.Group
template='/admin/dataset_security/group/grid.mako'
@@ -217,7 +231,7 @@
columns = [
NameColumn( "Name",
key="name",
- link=( lambda item: dict( operation="Manage users and roles", id=item.id ) ),
+ link=( lambda item: dict( operation="Manage users and roles", id=item.id, webapp="galaxy" ) ),
model_class=model.Group,
attach_popup=True,
filterable="advanced" ),
@@ -233,18 +247,29 @@
visible=False,
filterable="standard" ) )
global_actions = [
- grids.GridAction( "Add new group", dict( controller='admin', action='groups', operation='create' ) )
+ grids.GridAction( "Add new group", dict( controller='admin', action='groups', operation='create', webapp="galaxy" ) )
]
- operations = [ grids.GridOperation( "Rename", condition=( lambda item: not item.deleted ), allow_multiple=False ),
- grids.GridOperation( "Delete", condition=( lambda item: not item.deleted ), allow_multiple=True ),
- grids.GridOperation( "Undelete", condition=( lambda item: item.deleted ), allow_multiple=True ),
- grids.GridOperation( "Purge", condition=( lambda item: item.deleted ), allow_multiple=True ) ]
+ operations = [ grids.GridOperation( "Rename",
+ condition=( lambda item: not item.deleted ),
+ allow_multiple=False,
+ url_args=dict( webapp="galaxy", action="rename_group" ) ),
+ grids.GridOperation( "Delete",
+ condition=( lambda item: not item.deleted ),
+ allow_multiple=True,
+ url_args=dict( webapp="galaxy", action="mark_group_deleted" ) ),
+ grids.GridOperation( "Undelete",
+ condition=( lambda item: item.deleted ),
+ allow_multiple=True,
+ url_args=dict( webapp="galaxy", action="undelete_group" ) ),
+ grids.GridOperation( "Purge",
+ condition=( lambda item: item.deleted ),
+ allow_multiple=True,
+ url_args=dict( webapp="galaxy", action="purge_group" ) ) ]
standard_filters = [
grids.GridColumnFilter( "Active", args=dict( deleted=False ) ),
grids.GridColumnFilter( "Deleted", args=dict( deleted=True ) ),
grids.GridColumnFilter( "All", args=dict( deleted='All' ) )
]
- default_filter = dict( name="All", deleted="False" )
num_rows_per_page = 50
preserve_state = False
use_paging = True
@@ -253,831 +278,8 @@
def build_initial_query( self, session ):
return session.query( self.model_class )
-class Admin( BaseController ):
+class AdminGalaxy( BaseController, Admin ):
user_list_grid = UserListGrid()
role_list_grid = RoleListGrid()
group_list_grid = GroupListGrid()
-
- @web.expose
- @web.require_admin
- def index( self, trans, **kwd ):
- params = util.Params( kwd )
- message = util.restore_text( params.get( 'message', '' ) )
- status = params.get( 'status', 'done' )
- return trans.fill_template( '/admin/index.mako', message=message, status=status )
- @web.expose
- @web.require_admin
- def center( self, trans, **kwd ):
- return trans.fill_template( '/admin/center.mako' )
- @web.expose
- @web.require_admin
- def reload_tool( self, trans, **kwd ):
- params = util.Params( kwd )
- message = util.restore_text( params.get( 'message', '' ) )
- status = params.get( 'status', 'done' )
- return trans.fill_template( '/admin/reload_tool.mako', toolbox=self.app.toolbox, message=message, status=status )
- @web.expose
- @web.require_admin
- def tool_reload( self, trans, tool_version=None, **kwd ):
- params = util.Params( kwd )
- tool_id = params.tool_id
- self.app.toolbox.reload( tool_id )
- message = 'Reloaded tool: ' + tool_id
- return trans.fill_template( '/admin/reload_tool.mako', toolbox=self.app.toolbox, message=message, status='done' )
-
- # Galaxy Role Stuff
- @web.expose
- @web.require_admin
- def roles( self, trans, **kwargs ):
- if 'operation' in kwargs:
- operation = kwargs['operation'].lower()
- if operation == "roles":
- return self.role( trans, **kwargs )
- if operation == "create":
- return self.create_role( trans, **kwargs )
- if operation == "delete":
- return self.mark_role_deleted( trans, **kwargs )
- if operation == "undelete":
- return self.undelete_role( trans, **kwargs )
- if operation == "purge":
- return self.purge_role( trans, **kwargs )
- if operation == "manage users and groups":
- return self.manage_users_and_groups_for_role( trans, **kwargs )
- if operation == "rename":
- return self.rename_role( trans, **kwargs )
- # Render the list view
- return self.role_list_grid( trans, **kwargs )
- @web.expose
- @web.require_admin
- def create_role( self, trans, **kwd ):
- params = util.Params( kwd )
- message = util.restore_text( params.get( 'message', '' ) )
- status = params.get( 'status', 'done' )
- if params.get( 'create_role_button', False ):
- name = util.restore_text( params.name )
- description = util.restore_text( params.description )
- in_users = util.listify( params.get( 'in_users', [] ) )
- in_groups = util.listify( params.get( 'in_groups', [] ) )
- create_group_for_role = params.get( 'create_group_for_role', 'no' )
- if not name or not description:
- message = "Enter a valid name and a description"
- elif trans.sa_session.query( trans.app.model.Role ).filter( trans.app.model.Role.table.c.name==name ).first():
- message = "A role with that name already exists"
- else:
- # Create the role
- role = trans.app.model.Role( name=name, description=description, type=trans.app.model.Role.types.ADMIN )
- trans.sa_session.add( role )
- # Create the UserRoleAssociations
- for user in [ trans.sa_session.query( trans.app.model.User ).get( x ) for x in in_users ]:
- ura = trans.app.model.UserRoleAssociation( user, role )
- trans.sa_session.add( ura )
- # Create the GroupRoleAssociations
- for group in [ trans.sa_session.query( trans.app.model.Group ).get( x ) for x in in_groups ]:
- gra = trans.app.model.GroupRoleAssociation( group, role )
- trans.sa_session.add( gra )
- if create_group_for_role == 'yes':
- # Create the group
- group = trans.app.model.Group( name=name )
- trans.sa_session.add( group )
- message = "Group '%s' has been created, and role '%s' has been created with %d associated users and %d associated groups" % \
- ( group.name, role.name, len( in_users ), len( in_groups ) )
- else:
- message = "Role '%s' has been created with %d associated users and %d associated groups" % ( role.name, len( in_users ), len( in_groups ) )
- trans.sa_session.flush()
- trans.response.send_redirect( web.url_for( controller='admin', action='roles', message=util.sanitize_text( message ), status='done' ) )
- trans.response.send_redirect( web.url_for( controller='admin', action='create_role', message=util.sanitize_text( message ), status='error' ) )
- out_users = []
- for user in trans.sa_session.query( trans.app.model.User ) \
- .filter( trans.app.model.User.table.c.deleted==False ) \
- .order_by( trans.app.model.User.table.c.email ):
- out_users.append( ( user.id, user.email ) )
- out_groups = []
- for group in trans.sa_session.query( trans.app.model.Group ) \
- .filter( trans.app.model.Group.table.c.deleted==False ) \
- .order_by( trans.app.model.Group.table.c.name ):
- out_groups.append( ( group.id, group.name ) )
- return trans.fill_template( '/admin/dataset_security/role/role_create.mako',
- in_users=[],
- out_users=out_users,
- in_groups=[],
- out_groups=out_groups,
- message=message,
- status=status )
- @web.expose
- @web.require_admin
- def rename_role( self, trans, **kwd ):
- params = util.Params( kwd )
- message = util.restore_text( params.get( 'message', '' ) )
- status = params.get( 'status', 'done' )
- role = get_role( trans, params.id )
- if params.get( 'rename_role_button', False ):
- old_name = role.name
- new_name = util.restore_text( params.name )
- new_description = util.restore_text( params.description )
- if not new_name:
- message = 'Enter a valid name'
- return trans.fill_template( '/admin/dataset_security/role/role_rename.mako', role=role, message=message, status='error' )
- elif trans.sa_session.query( trans.app.model.Role ).filter( trans.app.model.Role.table.c.name==new_name ).first():
- message = 'A role with that name already exists'
- return trans.fill_template( '/admin/dataset_security/role/role_rename.mako', role=role, message=message, status='error' )
- else:
- role.name = new_name
- role.description = new_description
- trans.sa_session.add( role )
- trans.sa_session.flush()
- message = "Role '%s' has been renamed to '%s'" % ( old_name, new_name )
- return trans.response.send_redirect( web.url_for( action='roles', message=util.sanitize_text( message ), status='done' ) )
- return trans.fill_template( '/admin/dataset_security/role/role_rename.mako', role=role, message=message, status=status )
- @web.expose
- @web.require_admin
- def manage_users_and_groups_for_role( self, trans, **kwd ):
- params = util.Params( kwd )
- message = util.restore_text( params.get( 'message', '' ) )
- status = params.get( 'status', 'done' )
- role = get_role( trans, params.id )
- if params.get( 'role_members_edit_button', False ):
- in_users = [ trans.sa_session.query( trans.app.model.User ).get( x ) for x in util.listify( params.in_users ) ]
- for ura in role.users:
- user = trans.sa_session.query( trans.app.model.User ).get( ura.user_id )
- if user not in in_users:
- # Delete DefaultUserPermissions for previously associated users that have been removed from the role
- for dup in user.default_permissions:
- if role == dup.role:
- trans.sa_session.delete( dup )
- # Delete DefaultHistoryPermissions for previously associated users that have been removed from the role
- for history in user.histories:
- for dhp in history.default_permissions:
- if role == dhp.role:
- trans.sa_session.delete( dhp )
- trans.sa_session.flush()
- in_groups = [ trans.sa_session.query( trans.app.model.Group ).get( x ) for x in util.listify( params.in_groups ) ]
- trans.app.security_agent.set_entity_role_associations( roles=[ role ], users=in_users, groups=in_groups )
- trans.sa_session.refresh( role )
- message = "Role '%s' has been updated with %d associated users and %d associated groups" % ( role.name, len( in_users ), len( in_groups ) )
- trans.response.send_redirect( web.url_for( action='roles', message=util.sanitize_text( message ), status=status ) )
- in_users = []
- out_users = []
- in_groups = []
- out_groups = []
- for user in trans.sa_session.query( trans.app.model.User ) \
- .filter( trans.app.model.User.table.c.deleted==False ) \
- .order_by( trans.app.model.User.table.c.email ):
- if user in [ x.user for x in role.users ]:
- in_users.append( ( user.id, user.email ) )
- else:
- out_users.append( ( user.id, user.email ) )
- for group in trans.sa_session.query( trans.app.model.Group ) \
- .filter( trans.app.model.Group.table.c.deleted==False ) \
- .order_by( trans.app.model.Group.table.c.name ):
- if group in [ x.group for x in role.groups ]:
- in_groups.append( ( group.id, group.name ) )
- else:
- out_groups.append( ( group.id, group.name ) )
- # Build a list of tuples that are LibraryDatasetDatasetAssociationss followed by a list of actions
- # whose DatasetPermissions is associated with the Role
- # [ ( LibraryDatasetDatasetAssociation [ action, action ] ) ]
- library_dataset_actions = {}
- for dp in role.dataset_actions:
- for ldda in trans.sa_session.query( trans.app.model.LibraryDatasetDatasetAssociation ) \
- .filter( trans.app.model.LibraryDatasetDatasetAssociation.dataset_id==dp.dataset_id ):
- root_found = False
- folder_path = ''
- folder = ldda.library_dataset.folder
- while not root_found:
- folder_path = '%s / %s' % ( folder.name, folder_path )
- if not folder.parent:
- root_found = True
- else:
- folder = folder.parent
- folder_path = '%s %s' % ( folder_path, ldda.name )
- library = trans.sa_session.query( trans.app.model.Library ) \
- .filter( trans.app.model.Library.table.c.root_folder_id == folder.id ) \
- .first()
- if library not in library_dataset_actions:
- library_dataset_actions[ library ] = {}
- try:
- library_dataset_actions[ library ][ folder_path ].append( dp.action )
- except:
- library_dataset_actions[ library ][ folder_path ] = [ dp.action ]
- return trans.fill_template( '/admin/dataset_security/role/role.mako',
- role=role,
- in_users=in_users,
- out_users=out_users,
- in_groups=in_groups,
- out_groups=out_groups,
- library_dataset_actions=library_dataset_actions,
- message=message,
- status=status )
- @web.expose
- @web.require_admin
- def mark_role_deleted( self, trans, **kwd ):
- params = util.Params( kwd )
- role = get_role( trans, params.id )
- role.deleted = True
- trans.sa_session.add( role )
- trans.sa_session.flush()
- message = "Role '%s' has been marked as deleted." % role.name
- trans.response.send_redirect( web.url_for( action='roles', message=util.sanitize_text( message ), status='done' ) )
- @web.expose
- @web.require_admin
- def undelete_role( self, trans, **kwd ):
- params = util.Params( kwd )
- role = get_role( trans, params.id )
- role.deleted = False
- trans.sa_session.add( role )
- trans.sa_session.flush()
- message = "Role '%s' has been marked as not deleted." % role.name
- trans.response.send_redirect( web.url_for( action='roles', message=util.sanitize_text( message ), status='done' ) )
- @web.expose
- @web.require_admin
- def purge_role( self, trans, **kwd ):
- # This method should only be called for a Role that has previously been deleted.
- # Purging a deleted Role deletes all of the following from the database:
- # - UserRoleAssociations where role_id == Role.id
- # - DefaultUserPermissions where role_id == Role.id
- # - DefaultHistoryPermissions where role_id == Role.id
- # - GroupRoleAssociations where role_id == Role.id
- # - DatasetPermissionss where role_id == Role.id
- params = util.Params( kwd )
- role = get_role( trans, params.id )
- if not role.deleted:
- message = "Role '%s' has not been deleted, so it cannot be purged." % role.name
- trans.response.send_redirect( web.url_for( action='roles', message=util.sanitize_text( message ), status='error' ) )
- # Delete UserRoleAssociations
- for ura in role.users:
- user = trans.sa_session.query( trans.app.model.User ).get( ura.user_id )
- # Delete DefaultUserPermissions for associated users
- for dup in user.default_permissions:
- if role == dup.role:
- trans.sa_session.delete( dup )
- # Delete DefaultHistoryPermissions for associated users
- for history in user.histories:
- for dhp in history.default_permissions:
- if role == dhp.role:
- trans.sa_session.delete( dhp )
- trans.sa_session.delete( ura )
- # Delete GroupRoleAssociations
- for gra in role.groups:
- trans.sa_session.delete( gra )
- # Delete DatasetPermissionss
- for dp in role.dataset_actions:
- trans.sa_session.delete( dp )
- trans.sa_session.flush()
- message = "The following have been purged from the database for role '%s': " % role.name
- message += "DefaultUserPermissions, DefaultHistoryPermissions, UserRoleAssociations, GroupRoleAssociations, DatasetPermissionss."
- trans.response.send_redirect( web.url_for( action='roles', message=util.sanitize_text( message ), status='done' ) )
-
- # Galaxy Group Stuff
- @web.expose
- @web.require_admin
- def groups( self, trans, **kwargs ):
- if 'operation' in kwargs:
- operation = kwargs['operation'].lower()
- if operation == "groups":
- return self.group( trans, **kwargs )
- if operation == "create":
- return self.create_group( trans, **kwargs )
- if operation == "delete":
- return self.mark_group_deleted( trans, **kwargs )
- if operation == "undelete":
- return self.undelete_group( trans, **kwargs )
- if operation == "purge":
- return self.purge_group( trans, **kwargs )
- if operation == "manage users and roles":
- return self.manage_users_and_roles_for_group( trans, **kwargs )
- if operation == "rename":
- return self.rename_group( trans, **kwargs )
- # Render the list view
- return self.group_list_grid( trans, **kwargs )
- @web.expose
- @web.require_admin
- def rename_group( self, trans, **kwd ):
- params = util.Params( kwd )
- message = util.restore_text( params.get( 'message', '' ) )
- status = params.get( 'status', 'done' )
- group = get_group( trans, params.id )
- if params.get( 'rename_group_button', False ):
- old_name = group.name
- new_name = util.restore_text( params.name )
- if not new_name:
- message = 'Enter a valid name'
- return trans.fill_template( '/admin/dataset_security/group/group_rename.mako', group=group, message=message, status='error' )
- elif trans.sa_session.query( trans.app.model.Group ).filter( trans.app.model.Group.table.c.name==new_name ).first():
- message = 'A group with that name already exists'
- return trans.fill_template( '/admin/dataset_security/group/group_rename.mako', group=group, message=message, status='error' )
- else:
- group.name = new_name
- trans.sa_session.add( group )
- trans.sa_session.flush()
- message = "Group '%s' has been renamed to '%s'" % ( old_name, new_name )
- return trans.response.send_redirect( web.url_for( action='groups', message=util.sanitize_text( message ), status='done' ) )
- return trans.fill_template( '/admin/dataset_security/group/group_rename.mako', group=group, message=message, status=status )
- @web.expose
- @web.require_admin
- def manage_users_and_roles_for_group( self, trans, **kwd ):
- params = util.Params( kwd )
- message = util.restore_text( params.get( 'message', '' ) )
- status = params.get( 'status', 'done' )
- group = get_group( trans, params.id )
- if params.get( 'group_roles_users_edit_button', False ):
- in_roles = [ trans.sa_session.query( trans.app.model.Role ).get( x ) for x in util.listify( params.in_roles ) ]
- in_users = [ trans.sa_session.query( trans.app.model.User ).get( x ) for x in util.listify( params.in_users ) ]
- trans.app.security_agent.set_entity_group_associations( groups=[ group ], roles=in_roles, users=in_users )
- trans.sa_session.refresh( group )
- message += "Group '%s' has been updated with %d associated roles and %d associated users" % ( group.name, len( in_roles ), len( in_users ) )
- trans.response.send_redirect( web.url_for( action='groups', message=util.sanitize_text( message ), status=status ) )
- in_roles = []
- out_roles = []
- in_users = []
- out_users = []
- for role in trans.sa_session.query(trans.app.model.Role ) \
- .filter( trans.app.model.Role.table.c.deleted==False ) \
- .order_by( trans.app.model.Role.table.c.name ):
- if role in [ x.role for x in group.roles ]:
- in_roles.append( ( role.id, role.name ) )
- else:
- out_roles.append( ( role.id, role.name ) )
- for user in trans.sa_session.query( trans.app.model.User ) \
- .filter( trans.app.model.User.table.c.deleted==False ) \
- .order_by( trans.app.model.User.table.c.email ):
- if user in [ x.user for x in group.users ]:
- in_users.append( ( user.id, user.email ) )
- else:
- out_users.append( ( user.id, user.email ) )
- message += 'Group %s is currently associated with %d roles and %d users' % ( group.name, len( in_roles ), len( in_users ) )
- return trans.fill_template( '/admin/dataset_security/group/group.mako',
- group=group,
- in_roles=in_roles,
- out_roles=out_roles,
- in_users=in_users,
- out_users=out_users,
- message=message,
- status=status )
- @web.expose
- @web.require_admin
- def create_group( self, trans, **kwd ):
- params = util.Params( kwd )
- message = util.restore_text( params.get( 'message', '' ) )
- status = params.get( 'status', 'done' )
- if params.get( 'create_group_button', False ):
- name = util.restore_text( params.name )
- in_users = util.listify( params.get( 'in_users', [] ) )
- in_roles = util.listify( params.get( 'in_roles', [] ) )
- if not name:
- message = "Enter a valid name"
- elif trans.sa_session.query( trans.app.model.Group ).filter( trans.app.model.Group.table.c.name==name ).first():
- message = "A group with that name already exists"
- else:
- # Create the group
- group = trans.app.model.Group( name=name )
- trans.sa_session.add( group )
- trans.sa_session.flush()
- # Create the UserRoleAssociations
- for user in [ trans.sa_session.query( trans.app.model.User ).get( x ) for x in in_users ]:
- uga = trans.app.model.UserGroupAssociation( user, group )
- trans.sa_session.add( uga )
- trans.sa_session.flush()
- # Create the GroupRoleAssociations
- for role in [ trans.sa_session.query( trans.app.model.Role ).get( x ) for x in in_roles ]:
- gra = trans.app.model.GroupRoleAssociation( group, role )
- trans.sa_session.add( gra )
- trans.sa_session.flush()
- message = "Group '%s' has been created with %d associated users and %d associated roles" % ( name, len( in_users ), len( in_roles ) )
- trans.response.send_redirect( web.url_for( controller='admin', action='groups', message=util.sanitize_text( message ), status='done' ) )
- trans.response.send_redirect( web.url_for( controller='admin', action='create_group', message=util.sanitize_text( message ), status='error' ) )
- out_users = []
- for user in trans.sa_session.query( trans.app.model.User ) \
- .filter( trans.app.model.User.table.c.deleted==False ) \
- .order_by( trans.app.model.User.table.c.email ):
- out_users.append( ( user.id, user.email ) )
- out_roles = []
- for role in trans.sa_session.query( trans.app.model.Role ) \
- .filter( trans.app.model.Role.table.c.deleted==False ) \
- .order_by( trans.app.model.Role.table.c.name ):
- out_roles.append( ( role.id, role.name ) )
- return trans.fill_template( '/admin/dataset_security/group/group_create.mako',
- in_users=[],
- out_users=out_users,
- in_roles=[],
- out_roles=out_roles,
- message=message,
- status=status )
- @web.expose
- @web.require_admin
- def mark_group_deleted( self, trans, **kwd ):
- params = util.Params( kwd )
- group = get_group( trans, params.id )
- group.deleted = True
- trans.sa_session.add( group )
- trans.sa_session.flush()
- message = "Group '%s' has been marked as deleted." % group.name
- trans.response.send_redirect( web.url_for( action='groups', message=util.sanitize_text( message ), status='done' ) )
- @web.expose
- @web.require_admin
- def undelete_group( self, trans, **kwd ):
- params = util.Params( kwd )
- group = get_group( trans, params.id )
- group.deleted = False
- trans.sa_session.add( group )
- trans.sa_session.flush()
- message = "Group '%s' has been marked as not deleted." % group.name
- trans.response.send_redirect( web.url_for( action='groups', message=util.sanitize_text( message ), status='done' ) )
- @web.expose
- @web.require_admin
- def purge_group( self, trans, **kwd ):
- # This method should only be called for a Group that has previously been deleted.
- # Purging a deleted Group simply deletes all UserGroupAssociations and GroupRoleAssociations.
- params = util.Params( kwd )
- group = get_group( trans, params.id )
- if not group.deleted:
- # We should never reach here, but just in case there is a bug somewhere...
- message = "Group '%s' has not been deleted, so it cannot be purged." % group.name
- trans.response.send_redirect( web.url_for( action='groups', message=util.sanitize_text( message ), status='error' ) )
- # Delete UserGroupAssociations
- for uga in group.users:
- trans.sa_session.delete( uga )
- # Delete GroupRoleAssociations
- for gra in group.roles:
- trans.sa_session.delete( gra )
- trans.sa_session.flush()
- message = "The following have been purged from the database for group '%s': UserGroupAssociations, GroupRoleAssociations." % group.name
- trans.response.send_redirect( web.url_for( action='groups', message=util.sanitize_text( message ), status='done' ) )
-
- # Galaxy User Stuff
- @web.expose
- @web.require_admin
- def create_new_user( self, trans, **kwargs ):
- return trans.response.send_redirect( web.url_for( controller='user',
- action='create',
- admin_view=True ) )
- @web.expose
- @web.require_admin
- def reset_user_password( self, trans, **kwd ):
- id = kwd.get( 'id', None )
- if not id:
- message = "No user ids received for resetting passwords"
- trans.response.send_redirect( web.url_for( action='users', message=message, status='error' ) )
- ids = util.listify( id )
- if 'reset_user_password_button' in kwd:
- message = ''
- status = ''
- for user_id in ids:
- user = get_user( trans, user_id )
- password = kwd.get( 'password', None )
- confirm = kwd.get( 'confirm' , None )
- if len( password ) < 6:
- message = "Please use a password of at least 6 characters"
- status = 'error'
- break
- elif password != confirm:
- message = "Passwords do not match"
- status = 'error'
- break
- else:
- user.set_password_cleartext( password )
- trans.sa_session.add( user )
- trans.sa_session.flush()
- if not message and not status:
- message = "Passwords reset for %d users" % len( ids )
- status = 'done'
- trans.response.send_redirect( web.url_for( action='users',
- message=util.sanitize_text( message ),
- status=status ) )
- users = [ get_user( trans, user_id ) for user_id in ids ]
- if len( ids ) > 1:
- id=','.join( id )
- return trans.fill_template( '/admin/user/reset_password.mako',
- id=id,
- users=users,
- password='',
- confirm='' )
- @web.expose
- @web.require_admin
- def mark_user_deleted( self, trans, **kwd ):
- id = kwd.get( 'id', None )
- if not id:
- message = "No user ids received for deleting"
- trans.response.send_redirect( web.url_for( action='users', message=message, status='error' ) )
- ids = util.listify( id )
- message = "Deleted %d users: " % len( ids )
- for user_id in ids:
- user = get_user( trans, user_id )
- user.deleted = True
- trans.sa_session.add( user )
- trans.sa_session.flush()
- message += " %s " % user.email
- trans.response.send_redirect( web.url_for( action='users', message=util.sanitize_text( message ), status='done' ) )
- @web.expose
- @web.require_admin
- def undelete_user( self, trans, **kwd ):
- id = kwd.get( 'id', None )
- if not id:
- message = "No user ids received for undeleting"
- trans.response.send_redirect( web.url_for( action='users', message=message, status='error' ) )
- ids = util.listify( id )
- count = 0
- undeleted_users = ""
- for user_id in ids:
- user = get_user( trans, user_id )
- if user.deleted:
- user.deleted = False
- trans.sa_session.add( user )
- trans.sa_session.flush()
- count += 1
- undeleted_users += " %s" % user.email
- message = "Undeleted %d users: %s" % ( count, undeleted_users )
- trans.response.send_redirect( web.url_for( action='users',
- message=util.sanitize_text( message ),
- status='done' ) )
- @web.expose
- @web.require_admin
- def purge_user( self, trans, **kwd ):
- # This method should only be called for a User that has previously been deleted.
- # We keep the User in the database ( marked as purged ), and stuff associated
- # with the user's private role in case we want the ability to unpurge the user
- # some time in the future.
- # Purging a deleted User deletes all of the following:
- # - History where user_id = User.id
- # - HistoryDatasetAssociation where history_id = History.id
- # - Dataset where HistoryDatasetAssociation.dataset_id = Dataset.id
- # - UserGroupAssociation where user_id == User.id
- # - UserRoleAssociation where user_id == User.id EXCEPT FOR THE PRIVATE ROLE
- # Purging Histories and Datasets must be handled via the cleanup_datasets.py script
- id = kwd.get( 'id', None )
- if not id:
- message = "No user ids received for purging"
- trans.response.send_redirect( web.url_for( action='users',
- message=util.sanitize_text( message ),
- status='error' ) )
- ids = util.listify( id )
- message = "Purged %d users: " % len( ids )
- for user_id in ids:
- user = get_user( trans, user_id )
- if not user.deleted:
- # We should never reach here, but just in case there is a bug somewhere...
- message = "User '%s' has not been deleted, so it cannot be purged." % user.email
- trans.response.send_redirect( web.url_for( action='users',
- message=util.sanitize_text( message ),
- status='error' ) )
- private_role = trans.app.security_agent.get_private_user_role( user )
- # Delete History
- for h in user.active_histories:
- trans.sa_session.refresh( h )
- for hda in h.active_datasets:
- # Delete HistoryDatasetAssociation
- d = trans.sa_session.query( trans.app.model.Dataset ).get( hda.dataset_id )
- # Delete Dataset
- if not d.deleted:
- d.deleted = True
- trans.sa_session.add( d )
- hda.deleted = True
- trans.sa_session.add( hda )
- h.deleted = True
- trans.sa_session.add( h )
- # Delete UserGroupAssociations
- for uga in user.groups:
- trans.sa_session.delete( uga )
- # Delete UserRoleAssociations EXCEPT FOR THE PRIVATE ROLE
- for ura in user.roles:
- if ura.role_id != private_role.id:
- trans.sa_session.delete( ura )
- # Purge the user
- user.purged = True
- trans.sa_session.add( user )
- trans.sa_session.flush()
- message += "%s " % user.email
- trans.response.send_redirect( web.url_for( controller='admin',
- action='users',
- message=util.sanitize_text( message ),
- status='done' ) )
- @web.expose
- @web.require_admin
- def users( self, trans, **kwargs ):
- if 'operation' in kwargs:
- operation = kwargs['operation'].lower()
- if operation == "roles":
- return self.user( trans, **kwargs )
- if operation == "reset password":
- return self.reset_user_password( trans, **kwargs )
- if operation == "delete":
- return self.mark_user_deleted( trans, **kwargs )
- if operation == "undelete":
- return self.undelete_user( trans, **kwargs )
- if operation == "purge":
- return self.purge_user( trans, **kwargs )
- if operation == "create":
- return self.create_new_user( trans, **kwargs )
- if operation == "information":
- return self.user_info( trans, **kwargs )
- if operation == "manage roles and groups":
- return self.manage_roles_and_groups_for_user( trans, **kwargs )
- # Render the list view
- return self.user_list_grid( trans, **kwargs )
- @web.expose
- @web.require_admin
- def user_info( self, trans, **kwd ):
- '''
- This method displays the user information page which consists of login
- information, public username, reset password & other user information
- obtained during registration
- '''
- user_id = kwd.get( 'id', None )
- if not user_id:
- message += "Invalid user id (%s) received" % str( user_id )
- trans.response.send_redirect( web.url_for( controller='admin',
- action='users',
- message=util.sanitize_text( message ),
- status='error' ) )
- user = get_user( trans, user_id )
- return trans.response.send_redirect( web.url_for( controller='user',
- action='show_info',
- user_id=user.id,
- admin_view=True,
- **kwd ) )
- @web.expose
- @web.require_admin
- def name_autocomplete_data( self, trans, q=None, limit=None, timestamp=None ):
- """Return autocomplete data for user emails"""
- ac_data = ""
- for user in trans.sa_session.query( User ).filter_by( deleted=False ).filter( func.lower( User.email ).like( q.lower() + "%" ) ):
- ac_data = ac_data + user.email + "\n"
- return ac_data
- @web.expose
- @web.require_admin
- def manage_roles_and_groups_for_user( self, trans, **kwd ):
- user_id = kwd.get( 'id', None )
- message = ''
- status = ''
- if not user_id:
- message += "Invalid user id (%s) received" % str( user_id )
- trans.response.send_redirect( web.url_for( controller='admin',
- action='users',
- message=util.sanitize_text( message ),
- status='error' ) )
- user = get_user( trans, user_id )
- private_role = trans.app.security_agent.get_private_user_role( user )
- if kwd.get( 'user_roles_groups_edit_button', False ):
- # Make sure the user is not dis-associating himself from his private role
- out_roles = kwd.get( 'out_roles', [] )
- if out_roles:
- out_roles = [ trans.sa_session.query( trans.app.model.Role ).get( x ) for x in util.listify( out_roles ) ]
- if private_role in out_roles:
- message += "You cannot eliminate a user's private role association. "
- status = 'error'
- in_roles = kwd.get( 'in_roles', [] )
- if in_roles:
- in_roles = [ trans.sa_session.query( trans.app.model.Role ).get( x ) for x in util.listify( in_roles ) ]
- out_groups = kwd.get( 'out_groups', [] )
- if out_groups:
- out_groups = [ trans.sa_session.query( trans.app.model.Group ).get( x ) for x in util.listify( out_groups ) ]
- in_groups = kwd.get( 'in_groups', [] )
- if in_groups:
- in_groups = [ trans.sa_session.query( trans.app.model.Group ).get( x ) for x in util.listify( in_groups ) ]
- if in_roles:
- trans.app.security_agent.set_entity_user_associations( users=[ user ], roles=in_roles, groups=in_groups )
- trans.sa_session.refresh( user )
- message += "User '%s' has been updated with %d associated roles and %d associated groups (private roles are not displayed)" % \
- ( user.email, len( in_roles ), len( in_groups ) )
- trans.response.send_redirect( web.url_for( action='users',
- message=util.sanitize_text( message ),
- status='done' ) )
- in_roles = []
- out_roles = []
- in_groups = []
- out_groups = []
- for role in trans.sa_session.query( trans.app.model.Role ).filter( trans.app.model.Role.table.c.deleted==False ) \
- .order_by( trans.app.model.Role.table.c.name ):
- if role in [ x.role for x in user.roles ]:
- in_roles.append( ( role.id, role.name ) )
- elif role.type != trans.app.model.Role.types.PRIVATE:
- # There is a 1 to 1 mapping between a user and a PRIVATE role, so private roles should
- # not be listed in the roles form fields, except for the currently selected user's private
- # role, which should always be in in_roles. The check above is added as an additional
- # precaution, since for a period of time we were including private roles in the form fields.
- out_roles.append( ( role.id, role.name ) )
- for group in trans.sa_session.query( trans.app.model.Group ).filter( trans.app.model.Group.table.c.deleted==False ) \
- .order_by( trans.app.model.Group.table.c.name ):
- if group in [ x.group for x in user.groups ]:
- in_groups.append( ( group.id, group.name ) )
- else:
- out_groups.append( ( group.id, group.name ) )
- message += "User '%s' is currently associated with %d roles and is a member of %d groups" % \
- ( user.email, len( in_roles ), len( in_groups ) )
- if not status:
- status = 'done'
- return trans.fill_template( '/admin/user/user.mako',
- user=user,
- in_roles=in_roles,
- out_roles=out_roles,
- in_groups=in_groups,
- out_groups=out_groups,
- message=message,
- status=status )
- @web.expose
- @web.require_admin
- def memdump( self, trans, ids = 'None', sorts = 'None', pages = 'None', new_id = None, new_sort = None, **kwd ):
- if self.app.memdump is None:
- return trans.show_error_message( "Memdump is not enabled (set <code>use_memdump = True</code> in universe_wsgi.ini)" )
- heap = self.app.memdump.get()
- p = util.Params( kwd )
- msg = None
- if p.dump:
- heap = self.app.memdump.get( update = True )
- msg = "Heap dump complete"
- elif p.setref:
- self.app.memdump.setref()
- msg = "Reference point set (dump to see delta from this point)"
- ids = ids.split( ',' )
- sorts = sorts.split( ',' )
- if new_id is not None:
- ids.append( new_id )
- sorts.append( 'None' )
- elif new_sort is not None:
- sorts[-1] = new_sort
- breadcrumb = "<a href='%s' class='breadcrumb'>heap</a>" % web.url_for()
- # new lists so we can assemble breadcrumb links
- new_ids = []
- new_sorts = []
- for id, sort in zip( ids, sorts ):
- new_ids.append( id )
- if id != 'None':
- breadcrumb += "<a href='%s' class='breadcrumb'>[%s]</a>" % ( web.url_for( ids=','.join( new_ids ), sorts=','.join( new_sorts ) ), id )
- heap = heap[int(id)]
- new_sorts.append( sort )
- if sort != 'None':
- breadcrumb += "<a href='%s' class='breadcrumb'>.by('%s')</a>" % ( web.url_for( ids=','.join( new_ids ), sorts=','.join( new_sorts ) ), sort )
- heap = heap.by( sort )
- ids = ','.join( new_ids )
- sorts = ','.join( new_sorts )
- if p.theone:
- breadcrumb += ".theone"
- heap = heap.theone
- return trans.fill_template( '/admin/memdump.mako', heap = heap, ids = ids, sorts = sorts, breadcrumb = breadcrumb, msg = msg )
-
- @web.expose
- @web.require_admin
- def jobs( self, trans, stop = [], stop_msg = None, cutoff = 180, **kwd ):
- deleted = []
- msg = None
- status = None
- job_ids = util.listify( stop )
- if job_ids and stop_msg in [ None, '' ]:
- msg = 'Please enter an error message to display to the user describing why the job was terminated'
- status = 'error'
- elif job_ids:
- if stop_msg[-1] not in string.punctuation:
- stop_msg += '.'
- for job_id in job_ids:
- trans.app.job_manager.job_stop_queue.put( job_id, error_msg="This job was stopped by an administrator: %s For more information or help" % stop_msg )
- deleted.append( str( job_id ) )
- if deleted:
- msg = 'Queued job'
- if len( deleted ) > 1:
- msg += 's'
- msg += ' for deletion: '
- msg += ', '.join( deleted )
- status = 'done'
- cutoff_time = datetime.utcnow() - timedelta( seconds=int( cutoff ) )
- jobs = trans.sa_session.query( trans.app.model.Job ) \
- .filter( and_( trans.app.model.Job.table.c.update_time < cutoff_time,
- or_( trans.app.model.Job.state == trans.app.model.Job.states.NEW,
- trans.app.model.Job.state == trans.app.model.Job.states.QUEUED,
- trans.app.model.Job.state == trans.app.model.Job.states.RUNNING,
- trans.app.model.Job.state == trans.app.model.Job.states.UPLOAD ) ) ) \
- .order_by( trans.app.model.Job.table.c.update_time.desc() )
- last_updated = {}
- for job in jobs:
- delta = datetime.utcnow() - job.update_time
- if delta > timedelta( minutes=60 ):
- last_updated[job.id] = '%s hours' % int( delta.seconds / 60 / 60 )
- else:
- last_updated[job.id] = '%s minutes' % int( delta.seconds / 60 )
- return trans.fill_template( '/admin/jobs.mako', jobs = jobs, last_updated = last_updated, cutoff = cutoff, msg = msg, status = status )
-
-## ---- Utility methods -------------------------------------------------------
-
-def get_user( trans, id ):
- """Get a User from the database by id."""
- # Load user from database
- id = trans.security.decode_id( id )
- user = trans.sa_session.query( model.User ).get( id )
- if not user:
- return trans.show_error_message( "User not found for id (%s)" % str( id ) )
- return user
-def get_role( trans, id ):
- """Get a Role from the database by id."""
- # Load user from database
- id = trans.security.decode_id( id )
- role = trans.sa_session.query( model.Role ).get( id )
- if not role:
- return trans.show_error_message( "Role not found for id (%s)" % str( id ) )
- return role
-def get_group( trans, id ):
- """Get a Group from the database by id."""
- # Load user from database
- id = trans.security.decode_id( id )
- group = trans.sa_session.query( model.Group ).get( id )
- if not group:
- return trans.show_error_message( "Group not found for id (%s)" % str( id ) )
- return group
diff -r 076f572d7c9d -r d6fddb034db7 lib/galaxy/web/controllers/dataset.py
--- a/lib/galaxy/web/controllers/dataset.py Wed Apr 21 10:41:30 2010 -0400
+++ b/lib/galaxy/web/controllers/dataset.py Wed Apr 21 11:35:21 2010 -0400
@@ -468,7 +468,7 @@
dataset = self.get_dataset( trans, slug, False, True )
if dataset:
truncated, dataset_data = self.get_data( dataset, preview )
- dataset.annotation = self.get_item_annotation_str( trans.sa_session, dataset.history.user, dataset )
+ dataset.annotation = self.get_item_annotation_str( trans, dataset.history.user, dataset )
return trans.fill_template_mako( "/dataset/display.mako", item=dataset, item_data=dataset_data, truncated=truncated )
else:
raise web.httpexceptions.HTTPNotFound()
@@ -482,7 +482,7 @@
raise web.httpexceptions.HTTPNotFound()
truncated, dataset_data = self.get_data( dataset, preview=True )
# Get annotation.
- dataset.annotation = self.get_item_annotation_str( trans.sa_session, trans.get_user(), dataset )
+ dataset.annotation = self.get_item_annotation_str( trans, trans.user, dataset )
return trans.stream_template_mako( "/dataset/item_content.mako", item=dataset, item_data=dataset_data, truncated=truncated )
@web.expose
@@ -502,7 +502,7 @@
dataset = self.get_dataset( trans, id, False, True )
if not dataset:
web.httpexceptions.HTTPNotFound()
- return self.get_item_annotation_str( trans.sa_session, trans.get_user(), dataset )
+ return self.get_item_annotation_str( trans, trans.user, dataset )
@web.expose
def display_at( self, trans, dataset_id, filename=None, **kwd ):
diff -r 076f572d7c9d -r d6fddb034db7 lib/galaxy/web/controllers/forms.py
--- a/lib/galaxy/web/controllers/forms.py Wed Apr 21 10:41:30 2010 -0400
+++ b/lib/galaxy/web/controllers/forms.py Wed Apr 21 11:35:21 2010 -0400
@@ -1,7 +1,7 @@
from galaxy.web.base.controller import *
from galaxy.model.orm import *
from galaxy.datatypes import sniff
-from galaxy import util
+from galaxy import model, util
import logging, os, sys
from galaxy.web.form_builder import *
from galaxy.tools.parameters.basic import parameter_types
diff -r 076f572d7c9d -r d6fddb034db7 lib/galaxy/web/controllers/history.py
--- a/lib/galaxy/web/controllers/history.py Wed Apr 21 10:41:30 2010 -0400
+++ b/lib/galaxy/web/controllers/history.py Wed Apr 21 11:35:21 2010 -0400
@@ -1,6 +1,6 @@
from galaxy.web.base.controller import *
from galaxy.web.framework.helpers import time_ago, iff, grids
-from galaxy import util
+from galaxy import model, util
from galaxy.util.odict import odict
from galaxy.model.mapping import desc
from galaxy.model.orm import *
@@ -490,9 +490,9 @@
# Get datasets.
datasets = self.get_history_datasets( trans, history )
# Get annotations.
- history.annotation = self.get_item_annotation_str( trans.sa_session, history.user, history )
+ history.annotation = self.get_item_annotation_str( trans, history.user, history )
for dataset in datasets:
- dataset.annotation = self.get_item_annotation_str( trans.sa_session, history.user, dataset )
+ dataset.annotation = self.get_item_annotation_str( trans, history.user, dataset )
return trans.stream_template_mako( "/history/item_content.mako", item = history, item_data = datasets )
@web.expose
@@ -613,9 +613,9 @@
# Get datasets.
datasets = self.get_history_datasets( trans, history )
# Get annotations.
- history.annotation = self.get_item_annotation_str( trans.sa_session, history.user, history )
+ history.annotation = self.get_item_annotation_str( trans, history.user, history )
for dataset in datasets:
- dataset.annotation = self.get_item_annotation_str( trans.sa_session, history.user, dataset )
+ dataset.annotation = self.get_item_annotation_str( trans, history.user, dataset )
return trans.stream_template_mako( "history/display.mako",
item = history, item_data = datasets )
diff -r 076f572d7c9d -r d6fddb034db7 lib/galaxy/web/controllers/library_admin.py
--- a/lib/galaxy/web/controllers/library_admin.py Wed Apr 21 10:41:30 2010 -0400
+++ b/lib/galaxy/web/controllers/library_admin.py Wed Apr 21 11:35:21 2010 -0400
@@ -1,5 +1,5 @@
import sys
-from galaxy import util
+from galaxy import model, util
from galaxy.web.base.controller import *
from galaxy.web.framework.helpers import time_ago, iff, grids
from galaxy.model.orm import *
diff -r 076f572d7c9d -r d6fddb034db7 lib/galaxy/web/controllers/page.py
--- a/lib/galaxy/web/controllers/page.py Wed Apr 21 10:41:30 2010 -0400
+++ b/lib/galaxy/web/controllers/page.py Wed Apr 21 11:35:21 2010 -0400
@@ -1,3 +1,4 @@
+from galaxy import model
from galaxy.web.base.controller import *
from galaxy.web.framework.helpers import time_ago, grids
from galaxy.util.sanitize_html import sanitize_html, _BaseHTMLProcessor
@@ -406,7 +407,7 @@
else:
page_title = page.title
page_slug = page.slug
- page_annotation = self.get_item_annotation_str( trans.sa_session, trans.get_user(), page )
+ page_annotation = self.get_item_annotation_str( trans, trans.user, page )
if not page_annotation:
page_annotation = ""
return trans.show_form(
@@ -527,7 +528,7 @@
annotations = from_json_string( annotations )
for annotation_dict in annotations:
item_id = trans.security.decode_id( annotation_dict[ 'item_id' ] )
- item_class = self.get_class( annotation_dict[ 'item_class' ] )
+ item_class = self.get_class( trans, annotation_dict[ 'item_class' ] )
item = trans.sa_session.query( item_class ).filter_by( id=item_id ).first()
if not item:
raise RuntimeError( "cannot find annotated item" )
@@ -693,28 +694,28 @@
def _get_embed_html( self, trans, item_class, item_id ):
""" Returns HTML for embedding an item in a page. """
- item_class = self.get_class( item_class )
+ item_class = self.get_class( trans, item_class )
if item_class == model.History:
history = self.get_history( trans, item_id, False, True )
- history.annotation = self.get_item_annotation_str( trans.sa_session, history.user, history )
+ history.annotation = self.get_item_annotation_str( trans, history.user, history )
if history:
datasets = self.get_history_datasets( trans, history )
return trans.fill_template( "history/embed.mako", item=history, item_data=datasets )
elif item_class == model.HistoryDatasetAssociation:
dataset = self.get_dataset( trans, item_id, False, True )
- dataset.annotation = self.get_item_annotation_str( trans.sa_session, dataset.history.user, dataset )
+ dataset.annotation = self.get_item_annotation_str( trans, dataset.history.user, dataset )
if dataset:
data = self.get_data( dataset )
return trans.fill_template( "dataset/embed.mako", item=dataset, item_data=data )
elif item_class == model.StoredWorkflow:
workflow = self.get_stored_workflow( trans, item_id, False, True )
- workflow.annotation = self.get_item_annotation_str( trans.sa_session, workflow.user, workflow )
+ workflow.annotation = self.get_item_annotation_str( trans, workflow.user, workflow )
if workflow:
self.get_stored_workflow_steps( trans, workflow )
return trans.fill_template( "workflow/embed.mako", item=workflow, item_data=workflow.latest_workflow.steps )
elif item_class == model.Visualization:
visualization = self.get_visualization( trans, item_id, False, True )
- visualization.annotation = self.get_item_annotation_str( trans.sa_session, visualization.user, visualization )
+ visualization.annotation = self.get_item_annotation_str( trans, visualization.user, visualization )
if visualization:
return trans.fill_template( "visualization/embed.mako", item=visualization, item_data=None )
diff -r 076f572d7c9d -r d6fddb034db7 lib/galaxy/web/controllers/requests.py
--- a/lib/galaxy/web/controllers/requests.py Wed Apr 21 10:41:30 2010 -0400
+++ b/lib/galaxy/web/controllers/requests.py Wed Apr 21 11:35:21 2010 -0400
@@ -2,7 +2,7 @@
from galaxy.web.framework.helpers import time_ago, iff, grids
from galaxy.model.orm import *
from galaxy.datatypes import sniff
-from galaxy import util
+from galaxy import model, util
from galaxy.util.streamball import StreamBall
from galaxy.util.odict import odict
import logging, tempfile, zipfile, tarfile, os, sys
diff -r 076f572d7c9d -r d6fddb034db7 lib/galaxy/web/controllers/requests_admin.py
--- a/lib/galaxy/web/controllers/requests_admin.py Wed Apr 21 10:41:30 2010 -0400
+++ b/lib/galaxy/web/controllers/requests_admin.py Wed Apr 21 11:35:21 2010 -0400
@@ -2,7 +2,7 @@
from galaxy.web.framework.helpers import time_ago, iff, grids
from galaxy.model.orm import *
from galaxy.datatypes import sniff
-from galaxy import util
+from galaxy import model, util
from galaxy.util.streamball import StreamBall
import logging, tempfile, zipfile, tarfile, os, sys, subprocess
from galaxy.web.form_builder import *
diff -r 076f572d7c9d -r d6fddb034db7 lib/galaxy/web/controllers/root.py
--- a/lib/galaxy/web/controllers/root.py Wed Apr 21 10:41:30 2010 -0400
+++ b/lib/galaxy/web/controllers/root.py Wed Apr 21 11:35:21 2010 -0400
@@ -72,7 +72,7 @@
datasets = self.get_history_datasets( trans, history, show_deleted )
return trans.stream_template_mako( "root/history.mako",
history = history,
- annotation = self.get_item_annotation_str( trans.sa_session, trans.get_user(), history ),
+ annotation = self.get_item_annotation_str( trans, trans.user, history ),
datasets = datasets,
hda_id = hda_id,
show_deleted = show_deleted )
@@ -368,7 +368,7 @@
status = 'done'
return trans.fill_template( "/dataset/edit_attributes.mako",
data=data,
- data_annotation=self.get_item_annotation_str( trans.sa_session, trans.get_user(), data ),
+ data_annotation=self.get_item_annotation_str( trans, trans.user, data ),
datatypes=ldatatypes,
current_user_roles=current_user_roles,
all_roles=all_roles,
@@ -392,7 +392,7 @@
if data.parent_id is None and len( data.creating_job_associations ) > 0:
# Mark associated job for deletion
job = data.creating_job_associations[0].job
- if job.state in [ model.Job.states.QUEUED, model.Job.states.RUNNING, model.Job.states.NEW ]:
+ if job.state in [ self.app.model.Job.states.QUEUED, self.app.model.Job.states.RUNNING, self.app.model.Job.states.NEW ]:
# Are *all* of the job's other output datasets deleted?
if job.check_if_output_datasets_deleted():
job.mark_deleted()
diff -r 076f572d7c9d -r d6fddb034db7 lib/galaxy/web/controllers/tag.py
--- a/lib/galaxy/web/controllers/tag.py Wed Apr 21 10:41:30 2010 -0400
+++ b/lib/galaxy/web/controllers/tag.py Wed Apr 21 11:35:21 2010 -0400
@@ -72,7 +72,7 @@
if item_id is not None:
item = self._get_item( trans, item_class, trans.security.decode_id( item_id ) )
user = trans.user
- item_class = self.get_class( item_class )
+ item_class = self.get_class( trans, item_class )
q = q.encode( 'utf-8' )
if q.find( ":" ) == -1:
return self._get_tag_autocomplete_names( trans, q, limit, timestamp, user, item, item_class )
diff -r 076f572d7c9d -r d6fddb034db7 lib/galaxy/web/controllers/tracks.py
--- a/lib/galaxy/web/controllers/tracks.py Wed Apr 21 10:41:30 2010 -0400
+++ b/lib/galaxy/web/controllers/tracks.py Wed Apr 21 11:35:21 2010 -0400
@@ -16,6 +16,7 @@
import math, re, logging, glob
log = logging.getLogger(__name__)
+from galaxy import model
from galaxy.util.json import to_json_string, from_json_string
from galaxy.web.base.controller import *
from galaxy.web.framework import simplejson
diff -r 076f572d7c9d -r d6fddb034db7 lib/galaxy/web/controllers/user.py
--- a/lib/galaxy/web/controllers/user.py Wed Apr 21 10:41:30 2010 -0400
+++ b/lib/galaxy/web/controllers/user.py Wed Apr 21 11:35:21 2010 -0400
@@ -103,8 +103,9 @@
status='done',
active_view="user" )
@web.expose
- def create( self, trans, webapp='galaxy', redirect_url='', refresh_frames=[], **kwd ):
+ def create( self, trans, redirect_url='', refresh_frames=[], **kwd ):
params = util.Params( kwd )
+ webapp = params.get( 'webapp', 'galaxy' )
use_panels = util.string_as_bool( kwd.get( 'use_panels', True ) )
email = util.restore_text( params.get( 'email', '' ) )
# Do not sanitize passwords, so take from kwd
@@ -165,7 +166,7 @@
action='users',
message='Created new user account (%s)' % user.email,
status='done' ) )
- else:
+ elif not admin_view:
# Must be logging into the community space webapp
trans.handle_user_login( user, webapp )
if not error:
diff -r 076f572d7c9d -r d6fddb034db7 lib/galaxy/web/controllers/visualization.py
--- a/lib/galaxy/web/controllers/visualization.py Wed Apr 21 10:41:30 2010 -0400
+++ b/lib/galaxy/web/controllers/visualization.py Wed Apr 21 11:35:21 2010 -0400
@@ -1,3 +1,4 @@
+from galaxy import model
from galaxy.web.base.controller import *
from galaxy.web.framework.helpers import time_ago, grids, iff
from galaxy.util.sanitize_html import sanitize_html
@@ -366,7 +367,7 @@
if visualization.slug is None:
self.create_item_slug( trans.sa_session, visualization )
visualization_slug = visualization.slug
- visualization_annotation = self.get_item_annotation_str( trans.sa_session, trans.get_user(), visualization )
+ visualization_annotation = self.get_item_annotation_str( trans, trans.user, visualization )
if not visualization_annotation:
visualization_annotation = ""
return trans.show_form(
diff -r 076f572d7c9d -r d6fddb034db7 lib/galaxy/web/controllers/workflow.py
--- a/lib/galaxy/web/controllers/workflow.py Wed Apr 21 10:41:30 2010 -0400
+++ b/lib/galaxy/web/controllers/workflow.py Wed Apr 21 11:35:21 2010 -0400
@@ -14,6 +14,7 @@
from galaxy.util.sanitize_html import sanitize_html
from galaxy.util.topsort import topsort, topsort_levels, CycleError
from galaxy.workflow.modules import *
+from galaxy import model
from galaxy.model.mapping import desc
from galaxy.model.orm import *
@@ -176,9 +177,9 @@
# Get data for workflow's steps.
self.get_stored_workflow_steps( trans, stored_workflow )
# Get annotations.
- stored_workflow.annotation = self.get_item_annotation_str( trans.sa_session, stored_workflow.user, stored_workflow )
+ stored_workflow.annotation = self.get_item_annotation_str( trans, stored_workflow.user, stored_workflow )
for step in stored_workflow.latest_workflow.steps:
- step.annotation = self.get_item_annotation_str( trans.sa_session, stored_workflow.user, step )
+ step.annotation = self.get_item_annotation_str( trans, stored_workflow.user, step )
return trans.fill_template_mako( "workflow/display.mako", item=stored_workflow, item_data=stored_workflow.latest_workflow.steps )
@web.expose
@@ -192,9 +193,9 @@
# Get data for workflow's steps.
self.get_stored_workflow_steps( trans, stored )
# Get annotations.
- stored.annotation = self.get_item_annotation_str( trans.sa_session, stored.user, stored )
+ stored.annotation = self.get_item_annotation_str( trans, stored.user, stored )
for step in stored.latest_workflow.steps:
- step.annotation = self.get_item_annotation_str( trans.sa_session, stored.user, step )
+ step.annotation = self.get_item_annotation_str( trans, stored.user, step )
return trans.stream_template_mako( "/workflow/item_content.mako", item = stored, item_data = stored.latest_workflow.steps )
@web.expose
@@ -330,7 +331,7 @@
return trans.fill_template( 'workflow/edit_attributes.mako',
stored=stored,
- annotation=self.get_item_annotation_str( trans.sa_session, trans.get_user(), stored )
+ annotation=self.get_item_annotation_str( trans, trans.user, stored )
)
@web.expose
@@ -501,7 +502,7 @@
if not id:
error( "Invalid workflow id" )
stored = self.get_stored_workflow( trans, id )
- return trans.fill_template( "workflow/editor.mako", stored=stored, annotation=self.get_item_annotation_str( trans.sa_session, trans.get_user(), stored ) )
+ return trans.fill_template( "workflow/editor.mako", stored=stored, annotation=self.get_item_annotation_str( trans, trans.user, stored ) )
@web.json
def editor_form_post( self, trans, type='tool', tool_id=None, annotation=None, **incoming ):
@@ -580,7 +581,7 @@
# as a dictionary not just the values
data['upgrade_messages'][step.order_index] = upgrade_message.values()
# Get user annotation.
- step_annotation = self.get_item_annotation_obj ( trans.sa_session, trans.get_user(), step )
+ step_annotation = self.get_item_annotation_obj ( trans, trans.user, step )
annotation_str = ""
if step_annotation:
annotation_str = step_annotation.annotation
diff -r 076f572d7c9d -r d6fddb034db7 lib/galaxy/web/framework/helpers/grids.py
--- a/lib/galaxy/web/framework/helpers/grids.py Wed Apr 21 10:41:30 2010 -0400
+++ b/lib/galaxy/web/framework/helpers/grids.py Wed Apr 21 11:35:21 2010 -0400
@@ -1,7 +1,5 @@
-from galaxy.model import *
from galaxy.model.orm import *
-
-from galaxy.web.base import controller
+from galaxy.web.base.controller import *
from galaxy.web.framework.helpers import iff
from galaxy.web import url_for
from galaxy.util.json import from_json_string, to_json_string
@@ -15,6 +13,7 @@
"""
Specifies the content and format of a grid (data table).
"""
+ webapp = None
title = ""
exposed = True
model_class = None
@@ -43,6 +42,7 @@
self.has_multiple_item_operations = True
break
def __call__( self, trans, **kwargs ):
+ webapp = kwargs.get( 'webapp', 'galaxy' )
status = kwargs.get( 'status', None )
message = kwargs.get( 'message', None )
session = trans.sa_session
@@ -193,7 +193,8 @@
params = cur_filter_dict.copy()
params['sort'] = sort_key
params['async'] = ( 'async' in kwargs )
- trans.log_action( trans.get_user(), unicode( "grid.view"), context, params )
+ params['webapp'] = webapp
+ trans.log_action( trans.get_user(), unicode( "grid.view" ), context, params )
# Render grid.
def url( *args, **kwargs ):
# Only include sort/filter arguments if not linking to another
@@ -214,8 +215,8 @@
else:
new_kwargs[ 'id' ] = trans.security.encode_id( id )
return url_for( **new_kwargs )
- use_panels = ( 'use_panels' in kwargs ) and ( kwargs['use_panels'] == True )
- async_request = ( ( self.use_async ) and ( 'async' in kwargs ) and ( kwargs['async'] in [ 'True', 'true'] ) )
+ use_panels = ( 'use_panels' in kwargs ) and ( kwargs['use_panels'] in [ True, 'True', 'true' ] )
+ async_request = ( ( self.use_async ) and ( 'async' in kwargs ) and ( kwargs['async'] in [ True, 'True', 'true'] ) )
return trans.fill_template( iff( async_request, self.async_template, self.template),
grid=self,
query=query,
@@ -232,6 +233,7 @@
message_type = status,
message = message,
use_panels=use_panels,
+ webapp=self.webapp,
# Pass back kwargs so that grid template can set and use args without
# grid explicitly having to pass them.
kwargs=kwargs )
@@ -333,7 +335,7 @@
model_class_key_field = getattr( self.model_class, self.key )
return func.lower( model_class_key_field ).like( "%" + a_filter.lower() + "%" )
-class OwnerAnnotationColumn( TextColumn, controller.UsesAnnotations ):
+class OwnerAnnotationColumn( TextColumn, UsesAnnotations ):
""" Column that displays and filters item owner's annotations. """
def __init__( self, col_name, key, model_class, model_annotation_association_class, filterable ):
GridColumn.__init__( self, col_name, key=key, model_class=model_class, filterable=filterable )
@@ -341,7 +343,7 @@
self.model_annotation_association_class = model_annotation_association_class
def get_value( self, trans, grid, item ):
""" Returns item annotation. """
- annotation = self.get_item_annotation_str( trans.sa_session, item.user, item )
+ annotation = self.get_item_annotation_str( trans, item.user, item )
return iff( annotation, annotation, "" )
def get_single_filter( self, user, a_filter ):
""" Filter by annotation and annotation owner. """
@@ -515,7 +517,8 @@
return accepted_filters
class GridOperation( object ):
- def __init__( self, label, key=None, condition=None, allow_multiple=True, allow_popup=True, target=None, url_args=None, async_compatible=False, confirm=None ):
+ def __init__( self, label, key=None, condition=None, allow_multiple=True, allow_popup=True,
+ target=None, url_args=None, async_compatible=False, confirm=None ):
self.label = label
self.key = key
self.allow_multiple = allow_multiple
diff -r 076f572d7c9d -r d6fddb034db7 lib/galaxy/webapps/community/base/controller.py
--- a/lib/galaxy/webapps/community/base/controller.py Wed Apr 21 10:41:30 2010 -0400
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,24 +0,0 @@
-"""Contains functionality needed in every webapp interface"""
-import os, time, logging
-# Pieces of Galaxy to make global in every controller
-from galaxy import config, tools, web, util
-from galaxy.web import error, form, url_for
-from galaxy.webapps.community import model
-from galaxy.model.orm import *
-
-from Cheetah.Template import Template
-
-log = logging.getLogger( __name__ )
-
-class BaseController( object ):
- """Base class for Galaxy webapp application controllers."""
- def __init__( self, app ):
- """Initialize an interface for application 'app'"""
- self.app = app
- def get_class( self, class_name ):
- """ Returns the class object that a string denotes. Without this method, we'd have to do eval(<class_name>). """
- if class_name == 'Tool':
- item_class = model.Tool
- else:
- item_class = None
- return item_class
\ No newline at end of file
diff -r 076f572d7c9d -r d6fddb034db7 lib/galaxy/webapps/community/buildapp.py
--- a/lib/galaxy/webapps/community/buildapp.py Wed Apr 21 10:41:30 2010 -0400
+++ b/lib/galaxy/webapps/community/buildapp.py Wed Apr 21 11:35:21 2010 -0400
@@ -25,7 +25,8 @@
Search for controllers in the 'galaxy.webapps.controllers' module and add
them to the webapp.
"""
- from galaxy.webapps.community.base.controller import BaseController
+ from galaxy.web.base.controller import BaseController
+ from galaxy.web.base.controller import ControllerUnavailable
import galaxy.webapps.community.controllers
controller_dir = galaxy.webapps.community.controllers.__path__[0]
for fname in os.listdir( controller_dir ):
@@ -40,12 +41,11 @@
T = getattr( module, key )
if isclass( T ) and T is not BaseController and issubclass( T, BaseController ):
webapp.add_controller( name, T( app ) )
- from galaxy.web.base.controller import BaseController
import galaxy.web.controllers
controller_dir = galaxy.web.controllers.__path__[0]
for fname in os.listdir( controller_dir ):
# TODO: fix this if we decide to use, we don't need to inspect all controllers...
- if fname.startswith( 'user' ) and fname.endswith( ".py" ):
+ if fname.startswith( 'user' ) and fname.endswith( ".py" ):
name = fname[:-3]
module_name = "galaxy.web.controllers." + name
module = __import__( module_name )
diff -r 076f572d7c9d -r d6fddb034db7 lib/galaxy/webapps/community/config.py
--- a/lib/galaxy/webapps/community/config.py Wed Apr 21 10:41:30 2010 -0400
+++ b/lib/galaxy/webapps/community/config.py Wed Apr 21 11:35:21 2010 -0400
@@ -79,15 +79,12 @@
for path in self.root, self.file_path, self.template_path:
if not os.path.isdir( path ):
raise ConfigurationError("Directory does not exist: %s" % path )
- def is_admin_user( self,user ):
+ def is_admin_user( self, user ):
"""
Determine if the provided user is listed in `admin_users`.
-
- NOTE: This is temporary, admin users will likely be specified in the
- database in the future.
"""
admin_users = self.get( "admin_users", "" ).split( "," )
- return ( user is not None and user.email in admin_users )
+ return user is not None and user.email in admin_users
def get_database_engine_options( kwargs ):
"""
diff -r 076f572d7c9d -r d6fddb034db7 lib/galaxy/webapps/community/controllers/admin.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/lib/galaxy/webapps/community/controllers/admin.py Wed Apr 21 11:35:21 2010 -0400
@@ -0,0 +1,285 @@
+from galaxy.web.base.controller import *
+#from galaxy.web.controllers.admin import get_user, get_group, get_role
+from galaxy.webapps.community import model
+from galaxy.model.orm import *
+from galaxy.web.framework.helpers import time_ago, iff, grids
+import logging
+log = logging.getLogger( __name__ )
+
+class UserListGrid( grids.Grid ):
+ class EmailColumn( grids.TextColumn ):
+ def get_value( self, trans, grid, user ):
+ return user.email
+ class UserNameColumn( grids.TextColumn ):
+ def get_value( self, trans, grid, user ):
+ if user.username:
+ return user.username
+ return 'not set'
+ class StatusColumn( grids.GridColumn ):
+ def get_value( self, trans, grid, user ):
+ if user.purged:
+ return "purged"
+ elif user.deleted:
+ return "deleted"
+ return ""
+ class GroupsColumn( grids.GridColumn ):
+ def get_value( self, trans, grid, user ):
+ if user.groups:
+ return len( user.groups )
+ return 0
+ class RolesColumn( grids.GridColumn ):
+ def get_value( self, trans, grid, user ):
+ if user.roles:
+ return len( user.roles )
+ return 0
+ class ExternalColumn( grids.GridColumn ):
+ def get_value( self, trans, grid, user ):
+ if user.external:
+ return 'yes'
+ return 'no'
+ class LastLoginColumn( grids.GridColumn ):
+ def get_value( self, trans, grid, user ):
+ if user.galaxy_sessions:
+ return self.format( user.galaxy_sessions[ 0 ].update_time )
+ return 'never'
+
+ log.debug("####In UserListGrid, in community" )
+ # Grid definition
+ webapp = "community"
+ title = "Users"
+ model_class = model.User
+ template='/admin/user/grid.mako'
+ default_sort_key = "email"
+ columns = [
+ EmailColumn( "Email",
+ key="email",
+ model_class=model.User,
+ link=( lambda item: dict( operation="information", id=item.id, webapp="community" ) ),
+ attach_popup=True,
+ filterable="advanced" ),
+ UserNameColumn( "User Name",
+ key="username",
+ model_class=model.User,
+ attach_popup=False,
+ filterable="advanced" ),
+ GroupsColumn( "Groups", attach_popup=False ),
+ RolesColumn( "Roles", attach_popup=False ),
+ ExternalColumn( "External", attach_popup=False ),
+ LastLoginColumn( "Last Login", format=time_ago ),
+ StatusColumn( "Status", attach_popup=False ),
+ # Columns that are valid for filtering but are not visible.
+ grids.DeletedColumn( "Deleted", key="deleted", visible=False, filterable="advanced" )
+ ]
+ columns.append( grids.MulticolFilterColumn( "Search",
+ cols_to_filter=[ columns[0], columns[1] ],
+ key="free-text-search",
+ visible=False,
+ filterable="standard" ) )
+ global_actions = [
+ grids.GridAction( "Create new user",
+ dict( controller='admin', action='users', operation='create', webapp="community" ) )
+ ]
+ operations = [
+ grids.GridOperation( "Manage Roles and Groups",
+ condition=( lambda item: not item.deleted ),
+ allow_multiple=False,
+ url_args=dict( webapp="community", action="manage_roles_and_groups_for_user" ) ),
+ grids.GridOperation( "Reset Password",
+ condition=( lambda item: not item.deleted ),
+ allow_multiple=True,
+ allow_popup=False,
+ url_args=dict( webapp="community", action="reset_user_password" ) )
+ ]
1
0
details: http://www.bx.psu.edu/hg/galaxy/rev/076f572d7c9d
changeset: 3674:076f572d7c9d
user: rc
date: Wed Apr 21 10:41:30 2010 -0400
description:
lims:
- data transfer now uses rabbitmq
- datasets can now be renamed before transfering from the sequencer
- data transfer code refactored
diffstat:
lib/galaxy/config.py | 4 +
lib/galaxy/model/__init__.py | 28 +-
lib/galaxy/web/controllers/requests_admin.py | 121 +++++++-
lib/galaxy/web/framework/__init__.py | 1 +
run_galaxy_listener.sh | 2 +-
scripts/galaxy_messaging/client/amqp_publisher.py | 4 +-
scripts/galaxy_messaging/server/amqp_consumer.py | 66 +++-
scripts/galaxy_messaging/server/data_transfer.py | 241 +++++++++-------
scripts/galaxy_messaging/server/galaxydb_interface.py | 17 +-
scripts/galaxy_messaging/server/galaxyweb_interface.py | 132 +++++++++
templates/admin/requests/dataset.mako | 71 +++++
templates/admin/requests/get_data.mako | 67 ++-
universe_wsgi.ini.sample | 2 +-
13 files changed, 577 insertions(+), 179 deletions(-)
diffs (1118 lines):
diff -r 207d0d70483b -r 076f572d7c9d lib/galaxy/config.py
--- a/lib/galaxy/config.py Tue Apr 20 15:36:03 2010 -0400
+++ b/lib/galaxy/config.py Wed Apr 21 10:41:30 2010 -0400
@@ -123,6 +123,10 @@
self.enable_cloud_execution = string_as_bool( kwargs.get( 'enable_cloud_execution', 'True' ) )
else:
self.enable_cloud_execution = string_as_bool( kwargs.get( 'enable_cloud_execution', 'False' ) )
+ # Galaxy messaging (AMQP) configuration options
+ self.amqp = {}
+ for k, v in global_conf_parser.items("galaxy_amqp"):
+ self.amqp[k] = v
def get( self, key, default ):
return self.config_dict.get( key, default )
def get_bool( self, key, default ):
diff -r 207d0d70483b -r 076f572d7c9d lib/galaxy/model/__init__.py
--- a/lib/galaxy/model/__init__.py Tue Apr 20 15:36:03 2010 -0400
+++ b/lib/galaxy/model/__init__.py Wed Apr 21 10:41:30 2010 -0400
@@ -18,6 +18,7 @@
import logging
log = logging.getLogger( __name__ )
from sqlalchemy.orm import object_session
+import pexpect
datatypes_registry = galaxy.datatypes.registry.Registry() #Default Value Required for unit tests
@@ -1455,7 +1456,9 @@
class Sample( object ):
transfer_status = Bunch( NOT_STARTED = 'Not started',
- IN_PROGRESS = 'In progress',
+ IN_QUEUE = 'In queue',
+ TRANSFERRING = 'Transferring dataset',
+ ADD_TO_LIBRARY = 'Adding to data library',
COMPLETE = 'Complete',
ERROR = 'Error')
def __init__(self, name=None, desc=None, request=None, form_values=None,
@@ -1474,22 +1477,33 @@
return None
def untransferred_dataset_files(self):
count = 0
- for df, status in self.dataset_files:
- if status == self.transfer_status.NOT_STARTED:
+ for df in self.dataset_files:
+ if df['status'] == self.transfer_status.NOT_STARTED:
count = count + 1
return count
def inprogress_dataset_files(self):
count = 0
- for df, status in self.dataset_files:
- if status == self.transfer_status.IN_PROGRESS:
+ for df in self.dataset_files:
+ if df['status'] not in [self.transfer_status.NOT_STARTED, self.transfer_status.COMPLETE]:
count = count + 1
return count
def transferred_dataset_files(self):
count = 0
- for df, status in self.dataset_files:
- if status == self.transfer_status.COMPLETE:
+ for df in self.dataset_files:
+ if df['status'] == self.transfer_status.COMPLETE:
count = count + 1
return count
+ def dataset_size(self, filepath):
+ def print_ticks(d):
+ pass
+ datatx_info = self.request.type.datatx_info
+ cmd = 'ssh %s@%s "du -sh %s"' % ( datatx_info['username'],
+ datatx_info['host'],
+ filepath)
+ output = pexpect.run(cmd, events={'.ssword:*': datatx_info['password']+'\r\n',
+ pexpect.TIMEOUT:print_ticks},
+ timeout=10)
+ return output.split('\t')[0]
class SampleState( object ):
def __init__(self, name=None, desc=None, request_type=None):
diff -r 207d0d70483b -r 076f572d7c9d lib/galaxy/web/controllers/requests_admin.py
--- a/lib/galaxy/web/controllers/requests_admin.py Tue Apr 20 15:36:03 2010 -0400
+++ b/lib/galaxy/web/controllers/requests_admin.py Wed Apr 21 10:41:30 2010 -0400
@@ -12,6 +12,7 @@
from sqlalchemy.sql import select
import pexpect
import ConfigParser, threading, time
+from amqplib import client_0_8 as amqp
log = logging.getLogger( __name__ )
@@ -64,7 +65,6 @@
.filter( self.event_class.table.c.id.in_(select(columns=[func.max(self.event_class.table.c.id)],
from_obj=self.event_class.table,
group_by=self.event_class.table.c.request_id)))
- #print column_filter, q
return q
def get_accepted_filters( self ):
""" Returns a list of accepted filters for this column. """
@@ -1509,8 +1509,11 @@
params = util.Params( kwd )
message = util.restore_text( params.get( 'message', '' ) )
status = params.get( 'status', 'done' )
- folder_path = util.restore_text( params.get( 'folder_path', '' ) )
+ folder_path = util.restore_text( params.get( 'folder_path',
+ sample.request.type.datatx_info['data_dir'] ) )
files_list = util.listify( params.get( 'files_list', '' ) )
+ if params.get( 'start_transfer_button', False ) == 'True':
+ return self.__start_datatx(trans, sample)
if not folder_path:
return trans.fill_template( '/admin/requests/get_data.mako',
sample=sample, files=[],
@@ -1544,32 +1547,43 @@
dataset_files=sample.dataset_files,
folder_path=folder_path )
elif params.get( 'remove_dataset_button', False ):
+ # get the filenames from the remote host
+ files = self.__get_files(trans, sample, folder_path)
dataset_index = int(params.get( 'dataset_index', 0 ))
del sample.dataset_files[dataset_index]
trans.sa_session.add( sample )
trans.sa_session.flush()
return trans.fill_template( '/admin/requests/get_data.mako',
- sample=sample,
- dataset_files=sample.dataset_files)
- elif params.get( 'start_transfer_button', False ):
+ sample=sample, files=files,
+ dataset_files=sample.dataset_files,
+ folder_path=folder_path)
+ elif params.get( 'select_files_button', False ):
folder_files = []
if len(files_list):
for f in files_list:
+ filepath = os.path.join(folder_path, f)
if f[-1] == os.sep:
# the selected item is a folder so transfer all the
# folder contents
- self.__get_files_in_dir(trans, sample, os.path.join(folder_path, f))
+ self.__get_files_in_dir(trans, sample, filepath)
else:
- sample.dataset_files.append([os.path.join(folder_path, f),
- sample.transfer_status.NOT_STARTED])
+ sample.dataset_files.append(dict(filepath=filepath,
+ status=sample.transfer_status.NOT_STARTED,
+ name=filepath.split('/')[-1],
+ error_msg='',
+ size=sample.dataset_size(filepath)))
trans.sa_session.add( sample )
trans.sa_session.flush()
- return self.__start_datatx(trans, sample)
return trans.response.send_redirect( web.url_for( controller='requests_admin',
action='show_datatx_page',
sample_id=trans.security.encode_id(sample.id),
folder_path=folder_path))
+ return trans.response.send_redirect( web.url_for( controller='requests_admin',
+ action='show_datatx_page',
+ sample_id=trans.security.encode_id(sample.id),
+ folder_path=folder_path))
+
def __setup_datatx_user(self, trans, library, folder):
'''
This method sets up the datatx user:
@@ -1620,7 +1634,62 @@
trans.sa_session.add( dp )
trans.sa_session.flush()
return datatx_user
-
+
+ def __send_message(self, trans, datatx_info, sample):
+ '''
+ This method creates the xml message and sends it to the rabbitmq server
+ '''
+ # first create the xml message based on the following template
+ xml = \
+ ''' <data_transfer>
+ <data_host>%(DATA_HOST)s</data_host>
+ <data_user>%(DATA_USER)s</data_user>
+ <data_password>%(DATA_PASSWORD)s</data_password>
+ <sample_id>%(SAMPLE_ID)s</sample_id>
+ <library_id>%(LIBRARY_ID)s</library_id>
+ <folder_id>%(FOLDER_ID)s</folder_id>
+ %(DATASETS)s
+ </data_transfer>'''
+ dataset_xml = \
+ '''<dataset>
+ <index>%(INDEX)s</index>
+ <name>%(NAME)s</name>
+ <file>%(FILE)s</file>
+ </dataset>'''
+ datasets = ''
+ for index, dataset in enumerate(sample.dataset_files):
+ if dataset['status'] == sample.transfer_status.NOT_STARTED:
+ datasets = datasets + dataset_xml % dict(INDEX=str(index),
+ NAME=dataset['name'],
+ FILE=dataset['filepath'])
+ sample.dataset_files[index]['status'] = sample.transfer_status.IN_QUEUE
+
+ trans.sa_session.add( sample )
+ trans.sa_session.flush()
+ data = xml % dict(DATA_HOST=datatx_info['host'],
+ DATA_USER=datatx_info['username'],
+ DATA_PASSWORD=datatx_info['password'],
+ SAMPLE_ID=str(sample.id),
+ LIBRARY_ID=str(sample.library.id),
+ FOLDER_ID=str(sample.folder.id),
+ DATASETS=datasets)
+ # now send this message
+ conn = amqp.Connection(host=trans.app.config.amqp['host']+":"+trans.app.config.amqp['port'],
+ userid=trans.app.config.amqp['userid'],
+ password=trans.app.config.amqp['password'],
+ virtual_host=trans.app.config.amqp['virtual_host'],
+ insist=False)
+ chan = conn.channel()
+ msg = amqp.Message(data,
+ content_type='text/plain',
+ application_headers={'msg_type': 'data_transfer'})
+ msg.properties["delivery_mode"] = 2
+ chan.basic_publish(msg,
+ exchange=trans.app.config.amqp['exchange'],
+ routing_key=trans.app.config.amqp['routing_key'])
+ chan.close()
+ conn.close()
+
def __start_datatx(self, trans, sample):
# data transfer user
datatx_user = self.__setup_datatx_user(trans, sample.library, sample.folder)
@@ -1635,6 +1704,11 @@
sample_id=trans.security.encode_id(sample.id),
status='error',
message=message))
+ self.__send_message(trans, datatx_info, sample)
+ return trans.response.send_redirect( web.url_for( controller='requests_admin',
+ action='show_datatx_page',
+ sample_id=trans.security.encode_id(sample.id),
+ folder_path=datatx_info['data_dir']))
error_message = ''
transfer_script = "scripts/galaxy_messaging/server/data_transfer.py"
for index, dataset in enumerate(sample.dataset_files):
@@ -1670,6 +1744,33 @@
action='show_datatx_page',
sample_id=trans.security.encode_id(sample.id),
folder_path=os.path.dirname(dfile)))
+
+ @web.expose
+ @web.require_admin
+ def dataset_details( self, trans, **kwd ):
+ try:
+ sample = trans.sa_session.query( trans.app.model.Sample ).get( trans.security.decode_id(kwd['sample_id']) )
+ except:
+ return trans.response.send_redirect( web.url_for( controller='requests_admin',
+ action='list',
+ status='error',
+ message="Invalid sample ID" ) )
+ params = util.Params( kwd )
+ message = util.restore_text( params.get( 'message', '' ) )
+ status = params.get( 'status', 'done' )
+ dataset_index = int( params.get( 'dataset_index', '' ) )
+ if params.get('save', '') == 'Save':
+ sample.dataset_files[dataset_index]['name'] = util.restore_text( params.get( 'name',
+ sample.dataset_files[dataset_index]['name'] ) )
+ trans.sa_session.add( sample )
+ trans.sa_session.flush()
+ status = 'done'
+ message = 'Saved the changes made to the dataset.'
+ return trans.fill_template( '/admin/requests/dataset.mako',
+ sample=sample,
+ dataset_index=dataset_index,
+ message=message,
+ status=status)
##
#### Request Type Stuff ###################################################
##
diff -r 207d0d70483b -r 076f572d7c9d lib/galaxy/web/framework/__init__.py
--- a/lib/galaxy/web/framework/__init__.py Tue Apr 20 15:36:03 2010 -0400
+++ b/lib/galaxy/web/framework/__init__.py Wed Apr 21 10:41:30 2010 -0400
@@ -32,6 +32,7 @@
from sqlalchemy import and_
pkg_resources.require( "pexpect" )
+pkg_resources.require( "amqplib" )
import logging
log = logging.getLogger( __name__ )
diff -r 207d0d70483b -r 076f572d7c9d run_galaxy_listener.sh
--- a/run_galaxy_listener.sh Tue Apr 20 15:36:03 2010 -0400
+++ b/run_galaxy_listener.sh Wed Apr 21 10:41:30 2010 -0400
@@ -1,4 +1,4 @@
#!/bin/sh
cd `dirname $0`
-python scripts/galaxy_messaging/server/amqp_consumer.py universe_wsgi.ini >> galaxy_listener.log 2>&1
\ No newline at end of file
+python scripts/galaxy_messaging/server/amqp_consumer.py universe_wsgi.ini 2>&1
\ No newline at end of file
diff -r 207d0d70483b -r 076f572d7c9d scripts/galaxy_messaging/client/amqp_publisher.py
--- a/scripts/galaxy_messaging/client/amqp_publisher.py Tue Apr 20 15:36:03 2010 -0400
+++ b/scripts/galaxy_messaging/client/amqp_publisher.py Wed Apr 21 10:41:30 2010 -0400
@@ -35,7 +35,9 @@
virtual_host=amqp_config['virtual_host'],
insist=False)
chan = conn.channel()
- msg = amqp.Message(data)
+ msg = amqp.Message(data,
+ content_type='text/plain',
+ application_headers={'msg_type': 'sample_state_update'})
msg.properties["delivery_mode"] = 2
chan.basic_publish(msg,
exchange=amqp_config['exchange'],
diff -r 207d0d70483b -r 076f572d7c9d scripts/galaxy_messaging/server/amqp_consumer.py
--- a/scripts/galaxy_messaging/server/amqp_consumer.py Tue Apr 20 15:36:03 2010 -0400
+++ b/scripts/galaxy_messaging/server/amqp_consumer.py Wed Apr 21 10:41:30 2010 -0400
@@ -13,6 +13,7 @@
import sys, os
import optparse
import xml.dom.minidom
+import subprocess
from galaxydb_interface import GalaxyDbInterface
assert sys.version_info[:2] >= ( 2, 4 )
@@ -27,8 +28,13 @@
from amqplib import client_0_8 as amqp
import logging
-logging.basicConfig(level=logging.DEBUG)
-log = logging.getLogger( 'GalaxyAMQP' )
+log = logging.getLogger("GalaxyAMQP")
+log.setLevel(logging.DEBUG)
+fh = logging.FileHandler("galaxy_listener.log")
+fh.setLevel(logging.DEBUG)
+formatter = logging.Formatter("%(asctime)s - %(name)s - %(message)s")
+fh.setFormatter(formatter)
+log.addHandler(fh)
global dbconnstr
@@ -43,19 +49,47 @@
rc = rc + node.data
return rc
+def get_value_index(dom, tag_name, index):
+ '''
+ This method extracts the tag value from the xml message
+ '''
+ try:
+ nodelist = dom.getElementsByTagName(tag_name)[index].childNodes
+ except:
+ return None
+ rc = ""
+ for node in nodelist:
+ if node.nodeType == node.TEXT_NODE:
+ rc = rc + node.data
+ return rc
+
def recv_callback(msg):
- dom = xml.dom.minidom.parseString(msg.body)
- barcode = get_value(dom, 'barcode')
- state = get_value(dom, 'state')
- log.debug('Barcode: '+barcode)
- log.debug('State: '+state)
- # update the galaxy db
- galaxy = GalaxyDbInterface(dbconnstr)
- sample_id = galaxy.get_sample_id(field_name='bar_code', value=barcode)
- if sample_id == -1:
- log.debug('Invalid barcode.')
- return
- galaxy.change_state(sample_id, state)
+ # check the meesage type.
+ msg_type = msg.properties['application_headers'].get('msg_type')
+ log.debug('\nMESSAGE RECVD: '+str(msg_type))
+ if msg_type == 'data_transfer':
+ log.debug('DATA TRANSFER')
+ # fork a new process to transfer datasets
+ transfer_script = "scripts/galaxy_messaging/server/data_transfer.py"
+ cmd = ( "python",
+ transfer_script,
+ msg.body )
+ pid = subprocess.Popen(cmd).pid
+ log.debug('Started process (%i): %s' % (pid, str(cmd)))
+ elif msg_type == 'sample_state_update':
+ log.debug('SAMPLE STATE UPDATE')
+ dom = xml.dom.minidom.parseString(msg.body)
+ barcode = get_value(dom, 'barcode')
+ state = get_value(dom, 'state')
+ log.debug('Barcode: '+barcode)
+ log.debug('State: '+state)
+ # update the galaxy db
+ galaxy = GalaxyDbInterface(dbconnstr)
+ sample_id = galaxy.get_sample_id(field_name='bar_code', value=barcode)
+ if sample_id == -1:
+ log.debug('Invalid barcode.')
+ return
+ galaxy.change_state(sample_id, state)
def main():
if len(sys.argv) < 2:
@@ -66,8 +100,8 @@
global dbconnstr
dbconnstr = config.get("app:main", "database_connection")
amqp_config = {}
- for option in config.options("galaxy:amqp"):
- amqp_config[option] = config.get("galaxy:amqp", option)
+ for option in config.options("galaxy_amqp"):
+ amqp_config[option] = config.get("galaxy_amqp", option)
log.debug(str(amqp_config))
conn = amqp.Connection(host=amqp_config['host']+":"+amqp_config['port'],
userid=amqp_config['userid'],
diff -r 207d0d70483b -r 076f572d7c9d scripts/galaxy_messaging/server/data_transfer.py
--- a/scripts/galaxy_messaging/server/data_transfer.py Tue Apr 20 15:36:03 2010 -0400
+++ b/scripts/galaxy_messaging/server/data_transfer.py Wed Apr 21 10:41:30 2010 -0400
@@ -8,28 +8,36 @@
Usage:
-python data_transfer.py <sequencer_host>
- <username>
- <password>
- <source_file>
- <sample_id>
- <dataset_index>
- <library_id>
- <folder_id>
+python data_transfer.py <data_transfer_xml>
+
+
"""
import ConfigParser
import sys, os, time, traceback
import optparse
import urllib,urllib2, cookielib, shutil
import logging, time
+import xml.dom.minidom
+
+sp = sys.path[0]
+
from galaxydb_interface import GalaxyDbInterface
assert sys.version_info[:2] >= ( 2, 4 )
+new_path = [ sp ]
+new_path.extend( sys.path )
+sys.path = new_path
+
+from galaxyweb_interface import GalaxyWebInterface
+
+assert sys.version_info[:2] >= ( 2, 4 )
new_path = [ os.path.join( os.getcwd(), "lib" ) ]
new_path.extend( sys.path[1:] ) # remove scripts/ from the path
sys.path = new_path
+
from galaxy.util.json import from_json_string, to_json_string
+from galaxy.model import Sample
from galaxy import eggs
import pkg_resources
pkg_resources.require( "pexpect" )
@@ -38,28 +46,39 @@
pkg_resources.require( "simplejson" )
import simplejson
-logging.basicConfig(filename=sys.stderr, level=logging.DEBUG,
- format="%(asctime)s [%(levelname)s] %(message)s")
-
-class DataTransferException(Exception):
- def __init__(self, value):
- self.msg = value
- def __str__(self):
- return repr(self.msg)
+log = logging.getLogger("datatx_"+str(os.getpid()))
+log.setLevel(logging.DEBUG)
+fh = logging.FileHandler("data_transfer.log")
+fh.setLevel(logging.DEBUG)
+formatter = logging.Formatter("%(asctime)s - %(name)s - %(message)s")
+fh.setFormatter(formatter)
+log.addHandler(fh)
class DataTransfer(object):
- def __init__(self, host, username, password, remote_file, sample_id,
- dataset_index, library_id, folder_id):
- self.host = host
- self.username = username
- self.password = password
- self.remote_file = remote_file
- self.sample_id = sample_id
- self.dataset_index = dataset_index
- self.library_id = library_id
- self.folder_id = folder_id
+ def __init__(self, msg):
+ log.info(msg)
+ self.dom = xml.dom.minidom.parseString(msg)
+ self.host = self.get_value(self.dom, 'data_host')
+ self.username = self.get_value(self.dom, 'data_user')
+ self.password = self.get_value(self.dom, 'data_password')
+ self.sample_id = self.get_value(self.dom, 'sample_id')
+ self.library_id = self.get_value(self.dom, 'library_id')
+ self.folder_id = self.get_value(self.dom, 'folder_id')
+ self.dataset_files = []
+ count=0
+ while True:
+ index = self.get_value_index(self.dom, 'index', count)
+ file = self.get_value_index(self.dom, 'file', count)
+ name = self.get_value_index(self.dom, 'name', count)
+ if file:
+ self.dataset_files.append(dict(name=name,
+ index=int(index),
+ file=file))
+ else:
+ break
+ count=count+1
try:
# Retrieve the upload user login information from the config file
config = ConfigParser.ConfigParser()
@@ -75,11 +94,13 @@
os.mkdir(self.server_dir)
if not os.path.exists(self.server_dir):
raise Exception
+ # connect to db
+ self.galaxydb = GalaxyDbInterface(self.database_connection)
except:
- logging.error(traceback.format_exc())
- logging.error('FATAL ERROR')
+ log.error(traceback.format_exc())
+ log.error('FATAL ERROR')
if self.database_connection:
- self.update_status('Error')
+ self.error_and_exit('Error')
sys.exit(1)
def start(self):
@@ -88,13 +109,13 @@
to the data library & finally updates the data transfer status in the db
'''
# datatx
- self.transfer_file()
+ self.transfer_files()
# add the dataset to the given library
self.add_to_library()
# update the data transfer status in the db
- self.update_status('Complete')
+ self.update_status(Sample.transfer_status.COMPLETE)
# cleanup
- self.cleanup()
+ #self.cleanup()
sys.exit(0)
def cleanup(self):
@@ -114,34 +135,39 @@
This method is called any exception is raised. This prints the traceback
and terminates this script
'''
- logging.error(traceback.format_exc())
- logging.error('FATAL ERROR.'+msg)
- self.update_status('Error.'+msg)
+ log.error(traceback.format_exc())
+ log.error('FATAL ERROR.'+msg)
+ self.update_status('Error.', 'All', msg)
sys.exit(1)
- def transfer_file(self):
+ def transfer_files(self):
'''
This method executes a scp process using pexpect library to transfer
the dataset file from the remote sequencer to the Galaxy server
'''
def print_ticks(d):
pass
- try:
- cmd = "scp %s@%s:%s %s" % ( self.username,
- self.host,
- self.remote_file,
- self.server_dir)
- logging.debug(cmd)
- output = pexpect.run(cmd, events={'.ssword:*': self.password+'\r\n',
- pexpect.TIMEOUT:print_ticks},
- timeout=10)
- logging.debug(output)
- if not os.path.exists(os.path.join(self.server_dir, os.path.basename(self.remote_file))):
- raise DataTransferException('Could not find the local file after transfer (%s)' % os.path.join(self.server_dir, os.path.basename(self.remote_file)))
- except DataTransferException, (e):
- self.error_and_exit(e.msg)
- except:
- self.error_and_exit()
+ for i, df in enumerate(self.dataset_files):
+ self.update_status(Sample.transfer_status.TRANSFERRING, df['index'])
+ try:
+ cmd = "scp %s@%s:%s %s/%s" % ( self.username,
+ self.host,
+ df['file'],
+ self.server_dir,
+ df['name'])
+ log.debug(cmd)
+ output = pexpect.run(cmd, events={'.ssword:*': self.password+'\r\n',
+ pexpect.TIMEOUT:print_ticks},
+ timeout=10)
+ log.debug(output)
+ path = os.path.join(self.server_dir, os.path.basename(df['file']))
+ if not os.path.exists(path):
+ msg = 'Could not find the local file after transfer (%s)' % path
+ log.error(msg)
+ raise Exception(msg)
+ except Exception, e:
+ msg = traceback.format_exc()
+ self.update_status('Error', df['index'], msg)
def add_to_library(self):
@@ -149,73 +175,72 @@
This method adds the dataset file to the target data library & folder
by opening the corresponding url in Galaxy server running.
'''
- try:
- logging.debug('Adding %s to library...' % os.path.basename(self.remote_file))
- # create url
- base_url = "http://%s:%s" % (self.server_host, self.server_port)
- # login
- url = "%s/user/login?email=%s&password=%s" % (base_url, self.datatx_email, self.datatx_password)
- cj = cookielib.CookieJar()
- opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj))
- f = opener.open(url)
- if f.read().find("ogged in as "+self.datatx_email) == -1:
- # if the user doesnt exist, create the user
- url = "%s/user/create?email=%s&username=%s&password=%s&confirm=%s&create_user_button=Submit" % ( base_url, self.datatx_email, self.datatx_email, self.datatx_password, self.datatx_password )
- f = opener.open(url)
- if f.read().find("ogged in as "+self.datatx_email) == -1:
- raise DataTransferException("The "+self.datatx_email+" user could not login to Galaxy")
- # after login, add dataset to the library
- params = urllib.urlencode(dict( cntrller='library_admin',
- tool_id='upload1',
- tool_state='None',
- library_id=self.library_id,
- folder_id=self.folder_id,
- upload_option='upload_directory',
- file_type='auto',
- server_dir=os.path.basename(self.server_dir),
- dbkey='',
- runtool_btn='Upload to library'))
- #url = "http://localhost:8080/library_common/upload_library_dataset?cntrller=librar…"
- #url = base_url+"/library_common/upload_library_dataset?library_id=adb5f5c93f827949&tool_id=upload1&file_type=auto&server_dir=datatx_22858&dbkey=%3F&upload_option=upload_directory&folder_id=529fd61ab1c6cc36&cntrller=library_admin&tool_state=None&runtool_btn=Upload+to+library"
- url = base_url+"/library_common/upload_library_dataset"
- logging.debug(url)
- logging.debug(params)
- f = opener.open(url, params)
- if f.read().find("Data Library") == -1:
- raise DataTransferException("Dataset could not be uploaded to the data library")
- # finally logout
- f = opener.open(base_url+'/user/logout')
- if f.read().find("You have been logged out.") == -1:
- raise DataTransferException("The "+self.datatx_email+" user could not logout of Galaxy")
- except DataTransferException, (e):
- self.error_and_exit(e.msg)
- except:
- self.error_and_exit()
+ self.update_status(Sample.transfer_status.ADD_TO_LIBRARY)
+ galaxyweb = GalaxyWebInterface(self.server_host, self.server_port,
+ self.datatx_email, self.datatx_password)
+ galaxyweb.add_to_library(self.server_dir, self.library_id, self.folder_id)
+ galaxyweb.logout()
- def update_status(self, status):
+ def update_status(self, status, dataset_index='All', msg=''):
'''
Update the data transfer status for this dataset in the database
'''
try:
- galaxy = GalaxyDbInterface(self.database_connection)
- df = from_json_string(galaxy.get_sample_dataset_files(self.sample_id))
- logging.debug(df)
- df[self.dataset_index][1] = status
- galaxy.set_sample_dataset_files(self.sample_id, to_json_string(df))
- logging.debug("######################\n"+str(from_json_string(galaxy.get_sample_dataset_files(self.sample_id))[self.dataset_index]))
+ log.debug('Setting status "%s" for sample "%s"' % ( status, str(dataset_index) ) )
+ df = from_json_string(self.galaxydb.get_sample_dataset_files(self.sample_id))
+ if dataset_index == 'All':
+ for dataset in self.dataset_files:
+ df[dataset['index']]['status'] = status
+ if status == 'Error':
+ df[dataset['index']]['error_msg'] = msg
+ else:
+ df[dataset['index']]['error_msg'] = ''
+
+ else:
+ df[dataset_index]['status'] = status
+ if status == 'Error':
+ df[dataset_index]['error_msg'] = msg
+ else:
+ df[dataset_index]['error_msg'] = ''
+
+ self.galaxydb.set_sample_dataset_files(self.sample_id, to_json_string(df))
+ log.debug('done.')
except:
- logging.error(traceback.format_exc())
- logging.error('FATAL ERROR')
+ log.error(traceback.format_exc())
+ log.error('FATAL ERROR')
sys.exit(1)
+
+ def get_value(self, dom, tag_name):
+ '''
+ This method extracts the tag value from the xml message
+ '''
+ nodelist = dom.getElementsByTagName(tag_name)[0].childNodes
+ rc = ""
+ for node in nodelist:
+ if node.nodeType == node.TEXT_NODE:
+ rc = rc + node.data
+ return rc
+
+ def get_value_index(self, dom, tag_name, index):
+ '''
+ This method extracts the tag value from the xml message
+ '''
+ try:
+ nodelist = dom.getElementsByTagName(tag_name)[index].childNodes
+ except:
+ return None
+ rc = ""
+ for node in nodelist:
+ if node.nodeType == node.TEXT_NODE:
+ rc = rc + node.data
+ return rc
if __name__ == '__main__':
- logging.info('STARTING %i %s' % (os.getpid(), str(sys.argv)))
- logging.info('daemonized %i' % os.getpid())
+ log.info('STARTING %i %s' % (os.getpid(), str(sys.argv)))
#
# Start the daemon
- #
- dt = DataTransfer(sys.argv[1], sys.argv[2], sys.argv[3], sys.argv[4],
- int(sys.argv[5]), int(sys.argv[6]), sys.argv[7], sys.argv[8])
+ #
+ dt = DataTransfer(sys.argv[1])
dt.start()
sys.exit(0)
diff -r 207d0d70483b -r 076f572d7c9d scripts/galaxy_messaging/server/galaxydb_interface.py
--- a/scripts/galaxy_messaging/server/galaxydb_interface.py Tue Apr 20 15:36:03 2010 -0400
+++ b/scripts/galaxy_messaging/server/galaxydb_interface.py Wed Apr 21 10:41:30 2010 -0400
@@ -20,8 +20,8 @@
from sqlalchemy import *
from sqlalchemy.orm import sessionmaker
-logging.basicConfig(level=logging.DEBUG)
-log = logging.getLogger( 'GalaxyDbInterface' )
+#logging.basicConfig(level=logging.DEBUG)
+#log = logging.getLogger( 'GalaxyDbInterface' )
class GalaxyDbInterface(object):
@@ -53,9 +53,8 @@
x = result.fetchone()
if x:
sample_id = x[0]
- log.debug('Sample ID: %i' % sample_id)
+ #log.debug('Sample ID: %i' % sample_id)
return sample_id
- log.warning('This sample %s %s does not belong to any sample in the database.' % (field_name, value))
return -1
def current_state(self, sample_id):
@@ -74,16 +73,16 @@
subsubquery = select(columns=[self.sample_table.c.request_id],
whereclause=self.sample_table.c.id==sample_id)
self.request_id = subsubquery.execute().fetchall()[0][0]
- log.debug('REQUESTID: %i' % self.request_id)
+ #log.debug('REQUESTID: %i' % self.request_id)
subquery = select(columns=[self.request_table.c.request_type_id],
whereclause=self.request_table.c.id==self.request_id)
request_type_id = subquery.execute().fetchall()[0][0]
- log.debug('REQUESTTYPEID: %i' % request_type_id)
+ #log.debug('REQUESTTYPEID: %i' % request_type_id)
query = select(columns=[self.state_table.c.id, self.state_table.c.name],
whereclause=self.state_table.c.request_type_id==request_type_id,
order_by=self.state_table.c.id.asc())
states = query.execute().fetchall()
- log.debug('POSSIBLESTATES: '+ str(states))
+ #log.debug('POSSIBLESTATES: '+ str(states))
return states
def change_state(self, sample_id, new_state=None):
@@ -100,7 +99,7 @@
new_state_id = state_id
if new_state_id == -1:
return
- log.debug('Updating sample_id %i state to %s' % (sample_id, new_state))
+ #log.debug('Updating sample_id %i state to %s' % (sample_id, new_state))
i = self.event_table.insert()
i.execute(update_time=datetime.utcnow(),
create_time=datetime.utcnow(),
@@ -120,7 +119,7 @@
break
if request_complete:
request_state = 'Complete'
- log.debug('Updating request_id %i state to "%s"' % (self.request_id, request_state))
+ #log.debug('Updating request_id %i state to "%s"' % (self.request_id, request_state))
i = self.request_event_table.insert()
i.execute(update_time=datetime.utcnow(),
create_time=datetime.utcnow(),
diff -r 207d0d70483b -r 076f572d7c9d scripts/galaxy_messaging/server/galaxyweb_interface.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/scripts/galaxy_messaging/server/galaxyweb_interface.py Wed Apr 21 10:41:30 2010 -0400
@@ -0,0 +1,132 @@
+import ConfigParser
+import sys, os
+import serial
+import array
+import time
+import optparse,array
+import shutil, traceback
+import urllib,urllib2, cookielib
+
+assert sys.version_info[:2] >= ( 2, 4 )
+new_path = [ os.path.join( os.getcwd(), "lib" ) ]
+new_path.extend( sys.path[1:] ) # remove scripts/ from the path
+sys.path = new_path
+
+from galaxy import eggs
+import pkg_resources
+
+import pkg_resources
+pkg_resources.require( "pycrypto" )
+
+from Crypto.Cipher import Blowfish
+from Crypto.Util.randpool import RandomPool
+from Crypto.Util import number
+
+
+class GalaxyWebInterface(object):
+ def __init__(self, server_host, server_port, datatx_email, datatx_password):
+ self.server_host = server_host#config.get("main", "server_host")
+ self.server_port = server_port#config.get("main", "server_port")
+ self.datatx_email = datatx_email#config.get("main", "datatx_email")
+ self.datatx_password = datatx_password#config.get("main", "datatx_password")
+ try:
+ # create url
+ self.base_url = "http://%s:%s" % (self.server_host, self.server_port)
+ # login
+ url = "%s/user/login?email=%s&password=%s&login_button=Login" % (self.base_url, self.datatx_email, self.datatx_password)
+ cj = cookielib.CookieJar()
+ self.opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj))
+ #print url
+ f = self.opener.open(url)
+ if f.read().find("ogged in as "+self.datatx_email) == -1:
+ # if the user doesnt exist, create the user
+ url = "%s/user/create?email=%s&username=%s&password=%s&confirm=%s&create_user_button=Submit" % ( self.base_url, self.datatx_email, self.datatx_email, self.datatx_password, self.datatx_password )
+ f = self.opener.open(url)
+ if f.read().find("ogged in as "+self.datatx_email) == -1:
+ raise "The "+self.datatx_email+" user could not login to Galaxy"
+ except:
+ print traceback.format_exc()
+ sys.exit(1)
+
+ def add_to_library(self, server_dir, library_id, folder_id, dbkey=''):
+ '''
+ This method adds the dataset file to the target data library & folder
+ by opening the corresponding url in Galaxy server running.
+ '''
+ try:
+ params = urllib.urlencode(dict( cntrller='library_admin',
+ tool_id='upload1',
+ tool_state='None',
+ library_id=self.encode_id(library_id),
+ folder_id=self.encode_id(folder_id),
+ upload_option='upload_directory',
+ file_type='auto',
+ server_dir=os.path.basename(server_dir),
+ dbkey=dbkey,
+ show_dataset_id='True',
+ runtool_btn='Upload to library'))
+ #url = "http://localhost:8080/library_common/upload_library_dataset?cntrller=librar…"
+ #url = base_url+"/library_common/upload_library_dataset?library_id=adb5f5c93f827949&tool_id=upload1&file_type=auto&server_dir=datatx_22858&dbkey=%3F&upload_option=upload_directory&folder_id=529fd61ab1c6cc36&cntrller=library_admin&tool_state=None&runtool_btn=Upload+to+library"
+ url = self.base_url+"/library_common/upload_library_dataset"
+ #print url
+ #print params
+ f = self.opener.open(url, params)
+ if f.read().find("Data Library") == -1:
+ raise "Dataset could not be uploaded to the data library"
+ except:
+ print traceback.format_exc()
+ sys.exit(1)
+
+ def import_to_history(self, ldda_id, library_id, folder_id):
+ try:
+ params = urllib.urlencode(dict( cntrller='library_admin',
+ show_deleted='False',
+ library_id=self.encode_id(library_id),
+ folder_id=self.encode_id(folder_id),
+ ldda_ids=self.encode_id(ldda_id),
+ do_action='import_to_history',
+ use_panels='False'))
+ #url = "http://lion.bx.psu.edu:8080/library_common/act_on_multiple_datasets?library…"
+ #url = base_url+"/library_common/upload_library_dataset?library_id=adb5f5c93f827949&tool_id=upload1&file_type=auto&server_dir=datatx_22858&dbkey=%3F&upload_option=upload_directory&folder_id=529fd61ab1c6cc36&cntrller=library_admin&tool_state=None&runtool_btn=Upload+to+library"
+ url = self.base_url+"/library_common/act_on_multiple_datasets"
+ #print url
+ #print params
+ f = self.opener.open(url, params)
+ x = f.read()
+ if x.find("1 dataset(s) have been imported into your history.") == -1:
+ #print x
+ raise Exception("Dataset could not be imported into history")
+ except:
+ print traceback.format_exc()
+ sys.exit(1)
+
+
+ def run_workflow(self, workflow_id, hid, workflow_step):
+ input = str(workflow_step)+'|input'
+ try:
+ params = urllib.urlencode({'id':self.encode_id(workflow_id),
+ 'run_workflow': 'Run workflow',
+ input: hid})
+ url = self.base_url+"/workflow/run"
+ #print url+'?'+params
+ f = self.opener.open(url, params)
+# if f.read().find("1 dataset(s) have been imported into your history.") == -1:
+# raise Exception("Error in running the workflow")
+ except:
+ print traceback.format_exc()
+ sys.exit(1)
+
+
+ def logout(self):
+ # finally logout
+ f = self.opener.open(self.base_url+'/user/logout')
+
+ def encode_id(self, obj_id ):
+ id_secret = 'changethisinproductiontoo'
+ id_cipher = Blowfish.new( id_secret )
+ # Convert to string
+ s = str( obj_id )
+ # Pad to a multiple of 8 with leading "!"
+ s = ( "!" * ( 8 - len(s) % 8 ) ) + s
+ # Encrypt
+ return id_cipher.encrypt( s ).encode( 'hex' )
diff -r 207d0d70483b -r 076f572d7c9d templates/admin/requests/dataset.mako
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/templates/admin/requests/dataset.mako Wed Apr 21 10:41:30 2010 -0400
@@ -0,0 +1,71 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+
+
+%if message:
+ ${render_msg( message, status )}
+%endif
+
+<br/>
+<br/>
+
+<ul class="manage-table-actions">
+ <li>
+ <a class="action-button" href="${h.url_for( controller='requests_admin', action='show_datatx_page', sample_id=trans.security.encode_id(sample.id) )}">
+ <span>Dataset transfer page</span></a>
+ </li>
+</ul>
+
+<div class="toolForm">
+ <div class="toolFormTitle">Dataset details</div>
+ <div class="toolFormBody">
+ <form name="dataset_details" action="${h.url_for( controller='requests_admin', action='dataset_details', save_changes=True, sample_id=trans.security.encode_id(sample.id), dataset_index=dataset_index )}" method="post" >
+ <%
+ dataset = sample.dataset_files[dataset_index]
+ %>
+ <div class="form-row">
+ <label>Name:</label>
+ <div style="float: left; width: 250px; margin-right: 10px;">
+ %if dataset['status'] in [sample.transfer_status.IN_QUEUE, sample.transfer_status.NOT_STARTED]:
+ <input type="text" name="name" value="${dataset['name']}" size="60"/>
+ %else:
+ ${dataset['name']}
+ %endif
+
+ </div>
+ <div style="clear: both"></div>
+ </div>
+ <div class="form-row">
+ <label>File on the Sequencer:</label>
+ <div style="float: left; width: 250px; margin-right: 10px;">
+ ${dataset['filepath']}
+ ##<input type="text" name="filepath" value="${dataset['filepath']}" size="100" readonly/>
+ </div>
+ <div style="clear: both"></div>
+ </div>
+ <div class="form-row">
+ <label>Size:</label>
+ <div style="float: left; width: 250px; margin-right: 10px;">
+ ${dataset.get('size', 'Unknown')}
+ </div>
+ <div style="clear: both"></div>
+ </div>
+ <div class="form-row">
+ <label>Transfer status:</label>
+ <div style="float: left; width: 250px; margin-right: 10px;">
+ ${dataset['status']}
+ <br/>
+ %if dataset['status'] == sample.transfer_status.ERROR:
+ ${dataset['error_msg']}
+ %endif
+ </div>
+ <div style="clear: both"></div>
+ </div>
+ %if dataset['status'] in [sample.transfer_status.IN_QUEUE, sample.transfer_status.NOT_STARTED]:
+ <div class="form-row">
+ <input type="submit" name="save" value="Save"/>
+ </div>
+ %endif
+ </form>
+ </div>
+</div>
\ No newline at end of file
diff -r 207d0d70483b -r 076f572d7c9d templates/admin/requests/get_data.mako
--- a/templates/admin/requests/get_data.mako Tue Apr 20 15:36:03 2010 -0400
+++ b/templates/admin/requests/get_data.mako Wed Apr 21 10:41:30 2010 -0400
@@ -53,29 +53,44 @@
<div class="toolForm">
%if len(dataset_files):
## <form name="get_data" action="${h.url_for( controller='requests_admin', action='get_data', sample_id=sample.id)}" method="post" >
+ <div class="form-row">
+ <h4>Sample Dataset(s)</h4>
+ %if sample.untransferred_dataset_files():
<div class="form-row">
- <h4>Sample Dataset(s)</h4>
- <div class="form-row">
- <table class="grid">
- <thead>
- <tr>
- <th>Dataset File</th>
- <th>Transfer Status</th>
- <th></th>
- </tr>
- <thead>
- <tbody>
- %for dataset_index, dataset_file in enumerate(dataset_files):
- ${sample_dataset_files( dataset_index, dataset_file[0], dataset_file[1] )}
- %endfor
- </tbody>
- </table>
- </div>
- </div>
+ <ul class="manage-table-actions">
+ <li>
+ <a class="action-button" href="${h.url_for( controller='requests_admin', action='get_data', start_transfer_button=True, sample_id=sample.id )}">
+ <span>Start transfer</span></a>
+ </li>
+ </ul>
+ </div>
+ %endif
+ <div class="form-row">
+ <table class="grid">
+ <thead>
+ <tr>
+ <th>Dataset File</th>
+ <th>Transfer Status</th>
+ <th></th>
+ </tr>
+ <thead>
+ <tbody>
+ %for dataset_index, dataset_file in enumerate(dataset_files):
+ ${sample_dataset_files( dataset_index, dataset_file['name'], dataset_file['status'] )}
+ %endfor
+ </tbody>
+ </table>
+ </div>
+ </div>
+
## </form>
##</div>
+
+
+<br/>
<br/>
%endif
+
##<div class="toolForm">
<form name="get_data" action="${h.url_for( controller='requests_admin', action='get_data', sample_id=sample.id)}" method="post" >
<div class="form-row">
@@ -102,24 +117,24 @@
navigate away from this page. Once the transfer is complete
the dataset(s) will show up on this page.
</div>
- <input type="submit" name="start_transfer_button" value="Transfer"/>
+ <input type="submit" name="select_files_button" value="Select"/>
</div>
</div>
</div>
</form>
</div>
-<%def name="sample_dataset_files( dataset_index, dataset_file, status )">
+<%def name="sample_dataset_files( dataset_index, dataset_name, status )">
<tr>
<td>
-## <label class="msg_head"><a href="${h.url_for( controller='requests_admin', action='show_dataset_file', sample_id=trans.security.encode_id(sample.id), dataset_index=dataset_index )}">${dataset_file.split('/')[-1]}</a></label>
- <div class="msg_head"><u>${dataset_file.split('/')[-1]}</u></div>
- <div class="msg_body">
- ${dataset_file}
- </div>
+ <label class="msg_head"><a href="${h.url_for( controller='requests_admin', action='dataset_details', sample_id=trans.security.encode_id(sample.id), dataset_index=dataset_index )}">${dataset_name}</a></label>
+## <div class="msg_head"><u>${dataset_file.split('/')[-1]}</u></div>
+## <div class="msg_body">
+## ${dataset_file}
+## </div>
</td>
<td>
- %if status == sample.transfer_status.IN_PROGRESS:
+ %if status not in [sample.transfer_status.NOT_STARTED, sample.transfer_status.COMPLETE]:
<i>${status}</i>
%else:
${status}
diff -r 207d0d70483b -r 076f572d7c9d universe_wsgi.ini.sample
--- a/universe_wsgi.ini.sample Tue Apr 20 15:36:03 2010 -0400
+++ b/universe_wsgi.ini.sample Wed Apr 21 10:41:30 2010 -0400
@@ -287,7 +287,7 @@
# to be set up with a user account and other parameters listed below. The 'host'
# and 'port' fields should point to where the RabbitMQ server is running.
-#[galaxy:amqp]
+[galaxy_amqp]
#host = 127.0.0.1
#port = 5672
#userid = galaxy
1
0
details: http://www.bx.psu.edu/hg/galaxy/rev/207d0d70483b
changeset: 3673:207d0d70483b
user: Kanwei Li <kanwei(a)gmail.com>
date: Tue Apr 20 15:36:03 2010 -0400
description:
Fix history renaming on Saved Histories grid
diffstat:
lib/galaxy/web/controllers/history.py | 3 ++-
1 files changed, 2 insertions(+), 1 deletions(-)
diffs (13 lines):
diff -r 18d0d7fd543a -r 207d0d70483b lib/galaxy/web/controllers/history.py
--- a/lib/galaxy/web/controllers/history.py Tue Apr 20 13:19:44 2010 -0400
+++ b/lib/galaxy/web/controllers/history.py Tue Apr 20 15:36:03 2010 -0400
@@ -177,7 +177,8 @@
operation = kwargs['operation'].lower()
if operation == "share or publish":
return self.sharing( trans, **kwargs )
- if operation == "rename":
+ if operation == "rename" and kwargs.get('id', None): # Don't call rename if no ids
+ del kwargs['name'] # Remove ajax name param that rename method uses
return self.rename( trans, **kwargs )
history_ids = util.listify( kwargs.get( 'id', [] ) )
# Display no message by default
1
0

10 May '10
details: http://www.bx.psu.edu/hg/galaxy/rev/18d0d7fd543a
changeset: 3672:18d0d7fd543a
user: Kanwei Li <kanwei(a)gmail.com>
date: Tue Apr 20 13:19:44 2010 -0400
description:
GFF to Bed converter now converts the spaces to underscores to avoid UCSC problem [Brad Chapman] Closes #323
diffstat:
lib/galaxy/datatypes/converters/gff_to_bed_converter.py | 5 ++++-
1 files changed, 4 insertions(+), 1 deletions(-)
diffs (15 lines):
diff -r 7cb131814770 -r 18d0d7fd543a lib/galaxy/datatypes/converters/gff_to_bed_converter.py
--- a/lib/galaxy/datatypes/converters/gff_to_bed_converter.py Mon Apr 19 17:43:39 2010 -0400
+++ b/lib/galaxy/datatypes/converters/gff_to_bed_converter.py Tue Apr 20 13:19:44 2010 -0400
@@ -21,7 +21,10 @@
strand = '+'
# GFF format: chrom source, name, chromStart, chromEnd, score, strand
# Bed format: chrom, chromStart, chromEnd, name, score, strand
- out.write( "%s\t%s\t%s\t%s\t0\t%s\n" %( elems[0], start, elems[4], elems[2], strand ) )
+ #
+ # Replace any spaces in the name with underscores so UCSC will not complain
+ name = elems[2].replace(" ", "_")
+ out.write( "%s\t%s\t%s\t%s\t0\t%s\n" %( elems[0], start, elems[4], name, strand ) )
except:
skipped_lines += 1
if not first_skipped_line:
1
0

10 May '10
details: http://www.bx.psu.edu/hg/galaxy/rev/7cb131814770
changeset: 3671:7cb131814770
user: James Taylor <james(a)jamestaylor.org>
date: Mon Apr 19 17:43:39 2010 -0400
description:
Fix ordering in display_structured, also use insane eagerloading to make it massively faster
diffstat:
lib/galaxy/util/odict.py | 10 +++++++---
lib/galaxy/web/controllers/history.py | 25 ++++++++++++++++---------
templates/history/display_structured.mako | 13 ++++++++++---
3 files changed, 33 insertions(+), 15 deletions(-)
diffs (128 lines):
diff -r b8d25aabb98d -r 7cb131814770 lib/galaxy/util/odict.py
--- a/lib/galaxy/util/odict.py Tue Apr 20 11:47:51 2010 -0400
+++ b/lib/galaxy/util/odict.py Mon Apr 19 17:43:39 2010 -0400
@@ -31,9 +31,9 @@
self._keys = []
def copy(self):
- new = odict()
- new.update( self )
- return new
+ new = odict()
+ new.update( self )
+ return new
def items(self):
return zip(self._keys, self.values())
@@ -82,3 +82,7 @@
def __iter__( self ):
for key in self._keys:
yield key
+
+ def reverse( self ):
+ self._keys.reverse()
+
diff -r b8d25aabb98d -r 7cb131814770 lib/galaxy/web/controllers/history.py
--- a/lib/galaxy/web/controllers/history.py Tue Apr 20 11:47:51 2010 -0400
+++ b/lib/galaxy/web/controllers/history.py Mon Apr 19 17:43:39 2010 -0400
@@ -1,6 +1,7 @@
from galaxy.web.base.controller import *
from galaxy.web.framework.helpers import time_ago, iff, grids
from galaxy import util
+from galaxy.util.odict import odict
from galaxy.model.mapping import desc
from galaxy.model.orm import *
from galaxy.util.json import *
@@ -336,25 +337,31 @@
"""
# Get history
if id is None:
- history = trans.history
+ id = trans.history.id
else:
id = trans.security.decode_id( id )
- history = trans.sa_session.query( model.History ).get( id )
- assert history
- assert history.user and ( history.user == trans.user ) or ( history == trans.history )
+ # Expunge history from the session to allow us to force a reload
+ # with a bunch of eager loaded joins
+ trans.sa_session.expunge( trans.history )
+ history = trans.sa_session.query( model.History ).options(
+ eagerload_all( 'active_datasets.creating_job_associations.job.workflow_invocation_step.workflow_invocation.workflow' ),
+ eagerload_all( 'active_datasets.children' )
+ ).get( id )
+ assert history
+ assert history.user and ( history.user.id == trans.user.id ) or ( history.id == trans.history.id )
# Resolve jobs and workflow invocations for the datasets in the history
# items is filled with items (hdas, jobs, or workflows) that go at the
# top level
items = []
# First go through and group hdas by job, if there is no job they get
# added directly to items
- jobs = dict()
+ jobs = odict()
for hda in history.active_datasets:
# Follow "copied from ..." association until we get to the original
# instance of the dataset
original_hda = hda
- while original_hda.copied_from_history_dataset_association:
- original_hda = original_hda.copied_from_history_dataset_association
+ ## while original_hda.copied_from_history_dataset_association:
+ ## original_hda = original_hda.copied_from_history_dataset_association
# Check if the job has a creating job, most should, datasets from
# before jobs were tracked, or from the upload tool before it
# created a job, may not
@@ -370,7 +377,7 @@
else:
jobs[ job ] = [ ( hda, None ) ]
# Second, go through the jobs and connect to workflows
- wf_invocations = dict()
+ wf_invocations = odict()
for job, hdas in jobs.iteritems():
# Job is attached to a workflow step, follow it to the
# workflow_invocation and group
@@ -1025,4 +1032,4 @@
msg = 'Clone with name "%s" is now included in your previously stored histories.' % new_history.name
else:
msg = '%d cloned histories are now included in your previously stored histories.' % len( histories )
- return trans.show_ok_message( msg )
\ No newline at end of file
+ return trans.show_ok_message( msg )
diff -r b8d25aabb98d -r 7cb131814770 templates/history/display_structured.mako
--- a/templates/history/display_structured.mako Tue Apr 20 11:47:51 2010 -0400
+++ b/templates/history/display_structured.mako Mon Apr 19 17:43:39 2010 -0400
@@ -16,6 +16,7 @@
.workflow {
border: solid gray 1px;
+ margin: 5px 0;
border-left-width: 5px;
}
@@ -96,9 +97,15 @@
<%def name="render_item_job( job, children )">
<div class="tool toolForm">
- <div class="header toolFormTitle">Tool: ${trans.app.toolbox.tools_by_id[job.tool_id].name}</div>
+ <%
+ if job.tool_id in trans.app.toolbox.tools_by_id:
+ tool_name = trans.app.toolbox.tools_by_id[job.tool_id].name
+ else:
+ tool_name = "Unknown tool with id '%s'" % job.tool_id
+ %>
+ <div class="header toolFormTitle">Tool: ${tool_name}</div>
<div class="body toolFormBody">
- %for e, c in children:
+ %for e, c in reversed( children ):
${render_item( e, c )}
%endfor
</div>
@@ -111,7 +118,7 @@
<div class="workflow">
<div class="header">Workflow: ${wf.workflow.name}</div>
<div class="body">
- %for e, c in children:
+ %for e, c in reversed( children ):
${render_item( e, c )}
%endfor
</div>
1
0