galaxy-commits
Threads by month
- ----- 2025 -----
- June
- May
- April
- March
- February
- January
- ----- 2024 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2023 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2022 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2021 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2020 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2019 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2018 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2017 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2016 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2015 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2014 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2013 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2012 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2011 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2010 -----
- December
- November
- October
- September
- August
- July
- June
- May
- 15302 discussions

28 Jul '11
1 new changeset in galaxy-central:
http://bitbucket.org/galaxy/galaxy-central/changeset/33fb9df64172/
changeset: 33fb9df64172
user: greg
date: 2011-07-28 17:41:01
summary: Rewrite Pac Bio assembly_stats tool - it now takes 1 input fasta dataset and produces 1 output tabular dataset. The "wiki" output format has been eliminated, and the dependency on the pbpy module has been eliiminated.
affected #: 3 files (289 bytes)
--- a/tools/ilmn_pacbio/assembly_stats.py Thu Jul 28 10:00:02 2011 -0400
+++ b/tools/ilmn_pacbio/assembly_stats.py Thu Jul 28 11:41:01 2011 -0400
@@ -1,107 +1,83 @@
#!/usr/bin/env python
-import sys
-import os
-import random
+#
+#Copyright (c) 2011, Pacific Biosciences of California, Inc.
+#
+#All rights reserved.
+#
+#Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
+# * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
+# * Neither the name of Pacific Biosciences nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
+#
+#THIS SOFTWARE IS PROVIDED BY PACIFIC BIOSCIENCES AND ITS CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+#WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL PACIFIC BIOSCIENCES OR ITS CONTRIBUTORS BE LIABLE FOR ANY
+#DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+#LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+#(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import sys, os
+from optparse import OptionParser
+from galaxy import eggs
+import pkg_resources
+pkg_resources.require( 'bx-python' )
+from bx.seq.fasta import FastaReader
-from optparse import OptionParser
-from pbpy.io.FastaIO import FastaEntry, SimpleFastaReader
-
-class FastaStats:
- def __init__(self, argv):
- self.__parseOptions( argv )
-
- def __parseOptions(self, argv):
- usage = 'Usage: %prog [--help] [options] [fastaFileList]'
- parser = OptionParser( usage=usage )
- parser.add_option("--minContigLength", help="Minimum length of contigs to analyze")
- parser.add_option("--genomeLength", help="Length of genome to calculate N50s for.")
- parser.add_option("--outputFormat", help="Format of output [wiki]")
- parser.add_option("--noHeader", action="store_true",
- help="Don't output a header line" )
- parser.set_defaults( noHeader=False,
- minContigLength=0, genomeLength=0, outputFormat="wiki")
-
- self.options, args = parser.parse_args(argv)
-
- if len(args) < 2:
- parser.error( 'Expected 1 arguments' )
-
- self.fastaFiles = args[1:]
- self.outputFormat = self.options.outputFormat
- self.genomeLength = int(self.options.genomeLength)
- self.minContigLength = int(self.options.minContigLength)
- self.statKeys = "File Num Sum Max Avg N50 99%".split(" ")
-
- def getStats(self, fastaFile):
- lengths = []
- for entry in SimpleFastaReader(fastaFile):
- if len(entry.sequence) < self.minContigLength: continue
- lengths.append( len(entry.sequence) )
-
- stats = {"File":fastaFile,
- "Num":len(lengths),
- "Sum":sum(lengths),
- "Max":max(lengths),
- # "MinLenSum": sum( filter(lambda x: x > self.minContigLength, lengths)),
- "Avg":int(sum(lengths)/float(len(lengths))),
- "N50":0,
- "99%":0}
-
- if self.genomeLength == 0: self.genomeLength = sum(lengths)
-
+def getStats( fastaFile, genomeLength, minContigLength ):
+ lengths = []
+ stats = { "Num" : 0,
+ "Sum" : 0,
+ "Max" : 0,
+ "Avg" : 0,
+ "N50" : 0,
+ "99%" : 0 }
+ fasta_reader = FastaReader( open( fastaFile, 'rb' ) )
+ while True:
+ seq = fasta_reader.next()
+ if not seq:
+ break
+ if seq.length < minContigLength:
+ continue
+ lengths.append( seq.length )
+ if lengths:
+ stats[ 'Num' ] = len( lengths )
+ stats[ 'Sum' ] = sum( lengths )
+ stats[ 'Max' ] = max( lengths )
+ stats[ 'Avg' ] = int( sum( lengths ) / float( len( lengths ) ) )
+ stats[ 'N50' ] = 0
+ stats[ '99%' ] = 0
+ if genomeLength == 0:
+ genomeLength = sum( lengths )
lengths.sort()
lengths.reverse()
lenSum = 0
- stats["99%"] = len(lengths)
- for idx, length in enumerate(lengths):
+ stats[ "99%" ] = len( lengths )
+ for idx, length in enumerate( lengths ):
lenSum += length
- if (lenSum > self.genomeLength/2):
- stats["N50"] = length
+ if ( lenSum > genomeLength / 2 ):
+ stats[ "N50" ] = length
break
lenSum = 0
- for idx, length in enumerate(lengths):
+ for idx, length in enumerate( lengths ):
lenSum += length
- if (lenSum > self.genomeLength*0.99):
- stats["99%"] = idx + 1
+ if lenSum > genomeLength * 0.99:
+ stats[ "99%" ] = idx + 1
break
+ return stats
- return stats
+def __main__():
+ #Parse Command Line
+ usage = 'Usage: %prog input output --minContigLength'
+ parser = OptionParser( usage=usage )
+ parser.add_option( "--minContigLength", dest="minContigLength", help="Minimum length of contigs to analyze" )
+ parser.add_option( "--genomeLength", dest="genomeLength", help="Length of genome for which to calculate N50s" )
+ parser.set_defaults( minContigLength=0, genomeLength=0 )
+ options, args = parser.parse_args()
+ input_fasta_file = args[ 0 ]
+ output_tabular_file = args[ 1 ]
+ statKeys = "Num Sum Max Avg N50 99%".split( " " )
+ stats = getStats( input_fasta_file, int( options.genomeLength ), int( options.minContigLength ) )
+ fout = open( output_tabular_file, "w" )
+ fout.write( "%s\n" % "\t".join( map( lambda key: str( stats[ key ] ), statKeys ) ) )
+ fout.close()
- def header(self):
- if self.outputFormat == "wiki":
- buffer = '{| width="200" cellspacing="1" cellpadding="1" border="1"\n'
- buffer += '|-\n'
- for key in self.statKeys:
- buffer += '| %s\n' % key
- return buffer
- elif self.outputFormat == "tsv":
- return "%s\n" % "\t".join(self.statKeys)
- else:
- sys.exit("Unsupported format %s" % self.outputFormat)
-
- def footer(self):
- if self.outputFormat == "wiki":
- return "|}\n"
- else:
- return ""
-
- def format(self, stats):
- if self.outputFormat == "wiki":
- buffer = "|-\n"
- for key in self.statKeys:
- buffer += "| %s\n" % stats[key]
- return buffer
- elif self.outputFormat == "tsv":
- return "%s\n" % "\t".join(map(lambda key: str(stats[key]), self.statKeys))
- else:
- sys.exit("Unsupported format %s" % self.outputFormat)
-
- def run(self):
- if not self.options.noHeader:
- print self.header(),
- for file in self.fastaFiles: print self.format(self.getStats(file)),
- print self.footer()
-
-if __name__=='__main__':
- app = FastaStats(sys.argv)
- app.run()
+if __name__=="__main__": __main__()
--- a/tools/ilmn_pacbio/assembly_stats.xml Thu Jul 28 10:00:02 2011 -0400
+++ b/tools/ilmn_pacbio/assembly_stats.xml Thu Jul 28 11:41:01 2011 -0400
@@ -1,35 +1,33 @@
<tool id="assembly_stats" name="Assembly Statistics" version="1.0.0">
- <description>Calculate common measures of assembly quality</description>
- <command interpreter="python">
- assembly_stats.py ${wiki} --minContigLength=${minLength} $input1 > $output1
- </command>
- <inputs>
- <param name="input1" format="fasta" type="data" label="Select FASTA file containing contigs"/>
- <param name="minLength" type="integer" value="0" label="Minimum length of contigs to consider"/>
- <param name="wiki" type="boolean"
- checked="true" value="True"
- truevalue="--outputFormat=wiki"
- falsevalue="--noHeader --outputFormat=tsv"
- label="Human-readable?" />
- </inputs>
- <outputs>
- <data format="tabular" name="output1" label="Assembly statistics for ${on_string}"/>
- </outputs>
- <help>
+ <description>Calculate common measures of assembly quality</description>
+ <command interpreter="python">
+ assembly_stats.py $input1 $output1 --minContigLength=${minLength}
+ </command>
+ <inputs>
+ <param name="input1" format="fasta" type="data" label="Select FASTA file containing contigs"/>
+ <param name="minLength" type="integer" value="0" label="Minimum length of contigs to consider"/>
+ </inputs>
+ <outputs>
+ <data name="output1" format="tabular" label="Assembly statistics for ${on_string}"/>
+ </outputs>
+ <tests>
+ <test>
+ <param name="input1" value="3.fasta" ftype="fasta"/>
+ <param name="minLength" value="100"/>
+ <output name="output1" ftype="tabular" file="assembly_stats.tabular" />
+ </test>
+ </tests>
+ <help>
**What it does**
-Reports standard measures of *de novo* assembly quality such as
-number of contigs, sum of contigs, mean contig length, and N50.
+Reports standard measures of *de novo* assembly quality such as number of contigs, sum of contigs, mean contig length, and N50.
**Parameter list**
Minimum length
Only include contigs of this size or greater for calculating statistics.
-Human-readable?
- If true, output the statistics in a wiki format which can be read by humans. If false, output the metrics in a tab-delimited row.
-
**Output**
Num contigs
@@ -50,7 +48,7 @@
99%
Number of contigs accounting for 99% of the observed assembly.
- </help>
+ </help></tool>
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: jgoecks: Add necessary import statement for e7214c69ed7d.
by Bitbucket 28 Jul '11
by Bitbucket 28 Jul '11
28 Jul '11
1 new changeset in galaxy-central:
http://bitbucket.org/galaxy/galaxy-central/changeset/d99894825096/
changeset: d99894825096
user: jgoecks
date: 2011-07-28 16:00:02
summary: Add necessary import statement for e7214c69ed7d.
affected #: 1 file (8 bytes)
--- a/lib/galaxy/datatypes/checkers.py Thu Jul 28 08:22:22 2011 -0400
+++ b/lib/galaxy/datatypes/checkers.py Thu Jul 28 10:00:02 2011 -0400
@@ -1,4 +1,4 @@
-import os, gzip, re, gzip, zipfile, binascii, bz2
+import os, gzip, re, gzip, zipfile, binascii, bz2, imghdr
from galaxy import util
try:
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
5 new changesets in galaxy-central:
http://bitbucket.org/galaxy/galaxy-central/changeset/7507b7be224c/
changeset: 7507b7be224c
user: dannon
date: 2011-07-21 20:22:23
summary: Trailing whitespace cleanup workflow editor.
affected #: 1 file (612 bytes)
--- a/templates/workflow/editor.mako Thu Jul 21 13:55:24 2011 -0400
+++ b/templates/workflow/editor.mako Thu Jul 21 14:22:23 2011 -0400
@@ -18,17 +18,17 @@
</%def><%def name="javascripts()">
-
+
${parent.javascripts()}
-
+
<!--[if lt IE 9]><script type='text/javascript' src="${h.url_for('/static/scripts/excanvas.js')}"></script><![endif]-->
${h.js( "jquery",
"jquery.tipsy",
- "jquery.event.drag",
- "jquery.event.drop",
+ "jquery.event.drag",
+ "jquery.event.drop",
"jquery.event.hover",
"jquery.form",
"json2",
@@ -43,7 +43,7 @@
window.lt_ie_7 = true;
</script><![endif]-->
-
+
<script type='text/javascript'>
// Globals
workflow = null;
@@ -51,10 +51,10 @@
active_ajax_call = false;
var galaxy_async = new GalaxyAsync();
galaxy_async.set_func_url(galaxy_async.set_user_pref, "${h.url_for( controller='user', action='set_user_pref_async' )}");
-
+
// jQuery onReady
$( function() {
-
+
if ( window.lt_ie_7 ) {
show_modal(
"Browser not supported",
@@ -62,7 +62,7 @@
);
return;
}
-
+
// Init tool options.
%if trans.app.toolbox_search.enabled:
make_popupmenu( $("#tools-options-button"), {
@@ -71,7 +71,7 @@
show_tool_search = False
if trans.user:
show_tool_search = trans.user.preferences.get( "workflow.show_tool_search", "True" )
-
+
if show_tool_search == "True":
initial_text = "Hide Search"
else:
@@ -85,7 +85,6 @@
pref_value = "False";
menu_option_text = "Search Tools";
menu.toggle();
-
// Reset search.
reset_tool_search(true);
} else {
@@ -94,14 +93,12 @@
menu_option_text = "Hide Search";
menu.toggle();
}
-
// Update menu option.
$("#tools-options-button-menu").find("li").eq(0).text(menu_option_text);
-
galaxy_async.set_user_pref("workflow.show_tool_search", pref_value);
}
});
-
+
// Init searching.
$("#tool-search-query").click( function (){
$(this).focus();
@@ -110,7 +107,6 @@
.keyup( function () {
// Remove italics.
$(this).css("font-style", "normal");
-
// Don't update if same value as last time
if ( this.value.length < 3 ) {
reset_tool_search(false);
@@ -127,7 +123,6 @@
// Start a new ajax-request in X ms
$("#search-spinner").show();
this.timer = setTimeout(function () {
-
$.get("${h.url_for( controller='root', action='tool_search' )}", { query: q }, function (data) {
// input.removeClass(config.loadingClass);
// Show live-search if results and search-term aren't empty
@@ -139,17 +134,15 @@
if ( data.length != 0 ) {
// Map tool ids to element ids and join them.
var s = $.map( data, function( n, i ) { return "#link-" + n; } ).join( ", " );
-
// First pass to show matching tools and their parents.
$(s).each( function() {
// Add class to denote match.
$(this).parent().addClass("search_match");
$(this).parent().show().parent().parent().show().parent().show();
});
-
// Hide labels that have no visible children.
$(".toolPanelLabel").each( function() {
- var this_label = $(this);
+ var this_label = $(this);
var next = this_label.next();
var no_visible_tools = true;
// Look through tools following label and, if none are visible, hide label.
@@ -174,11 +167,11 @@
}
this.lastValue = this.value;
});
- %endif
-
+ %endif
+
// Canvas overview management
canvas_manager = new CanvasManager( $("#canvas-viewport"), $("#overview") );
-
+
// Initialize workflow state
reset();
// Load the datatype info
@@ -225,7 +218,7 @@
});
}
});
-
+
// For autosave purposes
$(document).ajaxStart( function() {
active_ajax_call = true;
@@ -233,14 +226,14 @@
active_ajax_call = false;
});
});
-
+
$(document).ajaxError( function ( e, x ) {
// console.log( e, x );
var message = x.responseText || x.statusText || "Could not connect to server";
show_modal( "Server error", message, { "Ignore error" : hide_modal } );
return false;
});
-
+
make_popupmenu( $("#workflow-options-button"), {
"Save" : save_current_workflow,
##"Create New" : create_new_workflow_dialog,
@@ -250,7 +243,7 @@
##"Load a Workflow" : load_workflow,
"Close": close_editor
});
-
+
function edit_workflow_outputs(){
workflow.clear_active_node();
$('.right-content').hide();
@@ -297,14 +290,14 @@
scroll_to_nodes();
canvas_manager.draw_overview();
}
-
+
function edit_workflow_attributes() {
workflow.clear_active_node();
$('.right-content').hide();
$('#edit-attributes').show();
}
-
+
// On load, set the size to the pref stored in local storage if it exists
overview_size = $.jStorage.get("overview-size");
if (overview_size !== undefined) {
@@ -313,14 +306,14 @@
height: overview_size
});
}
-
+
// Show viewport on load unless pref says it's off
if ($.jStorage.get("overview-off")) {
hide_overview();
} else {
show_overview();
}
-
+
// Stores the size of the overview into local storage when it's resized
$("#overview-border").bind( "dragend", function( e, d ) {
var op = $(this).offsetParent();
@@ -329,19 +322,19 @@
op.height() - ( d.offsetY - opo.top ) );
$.jStorage.set("overview-size", new_size + "px");
});
-
+
function show_overview() {
$.jStorage.set("overview-off", false);
$("#overview-border").css("right", "0px");
$("#close-viewport").css("background-position", "0px 0px");
}
-
+
function hide_overview() {
$.jStorage.set("overview-off", true);
$("#overview-border").css("right", "20000px");
$("#close-viewport").css("background-position", "12px 0px");
}
-
+
// Lets the overview be toggled visible and invisible, adjusting the arrows accordingly
$("#close-viewport").click( function() {
if ( $("#overview-border").css("right") === "0px" ) {
@@ -350,19 +343,19 @@
show_overview();
}
});
-
+
// Unload handler
window.onbeforeunload = function() {
if ( workflow && workflow.has_changes ) {
return "There are unsaved changes to your workflow which will be lost.";
}
};
-
+
// Tool menu
$( "div.toolSectionBody" ).hide();
$( "div.toolSectionTitle > span" ).wrap( "<a href='#'></a>" );
var last_expanded = null;
- $( "div.toolSectionTitle" ).each( function() {
+ $( "div.toolSectionTitle" ).each( function() {
var body = $(this).next( "div.toolSectionBody" );
$(this).click( function() {
if ( body.is( ":hidden" ) ) {
@@ -379,7 +372,7 @@
// Rename async.
async_save_text("workflow-name", "workflow-name", "${h.url_for( action='rename_async', id=trans.security.encode_id(stored.id) )}", "new_name");
-
+
// Tag async. Simply have the workflow edit element generate a click on the tag element to activate tagging.
$('#workflow-tag').click( function() {
$('.tag-area').click();
@@ -396,7 +389,7 @@
}
workflow = new Workflow( $("#canvas-container") );
}
-
+
function scroll_to_nodes() {
var cv = $("#canvas-viewport");
var cc = $("#canvas-container");
@@ -413,7 +406,7 @@
}
cc.css( { left: left, top: top } );
}
-
+
// Add a new step to the workflow by tool id
function add_node_for_tool( id, title ) {
var node = prebuild_node( 'tool', title, id );
@@ -422,7 +415,7 @@
canvas_manager.draw_overview();
workflow.activate_node( node );
$.ajax( {
- url: "${h.url_for( action='get_new_module_info' )}",
+ url: "${h.url_for( action='get_new_module_info' )}",
data: { type: "tool", tool_id: id, "_": "true" },
global: false,
dataType: "json",
@@ -438,7 +431,7 @@
}
});
}
-
+
function add_node_for_module( type, title ) {
node = prebuild_node( type, title );
workflow.add_node( node );
@@ -446,8 +439,8 @@
canvas_manager.draw_overview();
workflow.activate_node( node );
$.ajax( {
- url: "${h.url_for( action='get_new_module_info' )}",
- data: { type: type, "_": "true" },
+ url: "${h.url_for( action='get_new_module_info' )}",
+ data: { type: type, "_": "true" },
dataType: "json",
success: function( data ) {
node.init_field_data( data );
@@ -479,11 +472,11 @@
workflow.active_form_has_changes = true;
});
}
-
+
function display_pja_list(){
return "${ActionBox.get_add_list()}";
}
-
+
function display_file_list(node){
addlist = "<select id='node_data_list' name='node_data_list'>";
for (var out_terminal in node.output_terminals){
@@ -492,7 +485,7 @@
addlist += "</select>";
return addlist;
}
-
+
function new_pja(action_type, target, node){
if (node.post_job_actions === undefined){
//New tool node, set up dict.
@@ -511,7 +504,7 @@
return false;
}
}
-
+
function show_workflow_parameters(){
var parameter_re = /\$\{.+?\}/g;
var workflow_parameters = [];
@@ -532,7 +525,7 @@
if (arg_matches){
matches = matches.concat(arg_matches);
}
- });
+ });
}
});
if (matches){
@@ -541,7 +534,7 @@
workflow_parameters.push(element);
}
});
- }
+ }
}
});
if (workflow_parameters && workflow_parameters.length !== 0){
@@ -555,7 +548,7 @@
wf_parm_box.hide();
}
}
-
+
function show_form_for_tool( text, node ) {
$('.right-content').hide();
$("#right-content").show().html( text );
@@ -632,7 +625,7 @@
});
});
}
-
+
var close_editor = function() {
<% next_url = h.url_for( controller='workflow', action='index' ) %>
workflow.check_changes_in_active_form();
@@ -655,7 +648,7 @@
window.document.location = "${next_url}";
}
};
-
+
var save_current_workflow = function ( eventObj, success_callback ) {
show_modal( "Saving workflow", "progress" );
workflow.check_changes_in_active_form();
@@ -677,7 +670,7 @@
"_": "true"
},
dataType: 'json',
- success: function( data ) {
+ success: function( data ) {
var body = $("<div></div>").text( data.message );
if ( data.errors ) {
body.addClass( "warningmark" );
@@ -704,7 +697,7 @@
}
});
};
-
+
// We bind to ajaxStop because of auto-saving, since the form submission ajax
// call needs to be completed so that the new data is saved
if (active_ajax_call) {
@@ -718,7 +711,7 @@
savefn(success_callback);
}
};
-
+
</script></%def>
@@ -732,7 +725,7 @@
<style type="text/css">
body { margin: 0; padding: 0; overflow: hidden; }
-
+
/* Wider right panel */
#center { right: 309px; }
#right-border { right: 300px; }
@@ -744,11 +737,11 @@
## top: 2.5em;
## margin-top: 7px;
## }
-
+
#left {
background: #C1C9E5 url(${h.url_for('/static/style/menu_bg.png')}) top repeat-x;
}
-
+
div.toolMenu {
margin: 5px;
margin-left: 10px;
@@ -785,8 +778,8 @@
.right-content {
margin: 5px;
}
-
- canvas { position: absolute; z-index: 10; }
+
+ canvas { position: absolute; z-index: 10; }
canvas.dragging { position: absolute; z-index: 1000; }
.input-terminal { width: 12px; height: 12px; background: url(${h.url_for('/static/style/workflow_circle_open.png')}); position: absolute; top: 50%; margin-top: -6px; left: -6px; z-index: 1500; }
.output-terminal { width: 12px; height: 12px; background: url(${h.url_for('/static/style/workflow_circle_open.png')}); position: absolute; top: 50%; margin-top: -6px; right: -6px; z-index: 1500; }
@@ -795,12 +788,12 @@
## .input-terminal-hover { background: yellow; border: solid black 1px; }
.unselectable { -moz-user-select: none; -khtml-user-select: none; user-select: none; }
img { border: 0; }
-
+
div.buttons img {
width: 16px; height: 16px;
cursor: pointer;
}
-
+
## Extra styles for the representation of a tool on the canvas (looks like
## a tiny tool form)
div.toolFormInCanvas {
@@ -809,18 +802,18 @@
## min-width: 130px;
margin: 6px;
}
-
+
div.toolForm-active {
z-index: 1001;
border: solid #8080FF 4px;
margin: 3px;
}
-
+
div.toolFormTitle {
cursor: move;
min-height: 16px;
}
-
+
div.titleRow {
font-weight: bold;
border-bottom: dotted gray 1px;
@@ -830,7 +823,7 @@
div.form-row {
position: relative;
}
-
+
div.tool-node-error div.toolFormTitle {
background: #FFCCCC;
border-color: #AA6666;
@@ -838,14 +831,14 @@
div.tool-node-error {
border-color: #AA6666;
}
-
+
#canvas-area {
position: absolute;
top: 0; left: 305px; bottom: 0; right: 0;
border: solid red 1px;
overflow: none;
}
-
+
.form-row {
}
@@ -855,14 +848,14 @@
.form-row-clear {
clear: both;
}
-
+
div.rule {
height: 0;
border: none;
border-bottom: dotted black 1px;
margin: 0 5px;
}
-
+
.callout {
position: absolute;
z-index: 10000;
@@ -871,21 +864,21 @@
.pjaForm {
margin-bottom:10px;
}
-
+
.pjaForm .toolFormBody{
padding:10px;
}
-
+
.pjaForm .toolParamHelp{
padding:5px;
}
-
+
.panel-header-button-group {
margin-right: 5px;
padding-right: 5px;
border-right: solid gray 1px;
}
-
+
</style></%def>
@@ -945,7 +938,7 @@
${n_('Tools')}
</div></div>
-
+
<div class="unified-panel-body" style="overflow: auto;"><div class="toolMenu">
## Tool search.
@@ -953,7 +946,6 @@
show_tool_search = False
if trans.user:
show_tool_search = trans.user.preferences.get( "workflow.show_tool_search", "True" )
-
if show_tool_search == "True":
display = "block"
else:
@@ -963,7 +955,6 @@
<input type="text" name="query" value="search tools" id="tool-search-query" style="width: 100%; font-style:italic; font-size: inherit" autocomplete="off"/><img src="${h.url_for('/static/images/loading_small_white_bg.gif')}" id="search-spinner" style="display: none; position: absolute; right: 0; top: 5px;"/></div>
-
<div class="toolSectionList">
%for key, val in app.toolbox.tool_panel.items():
<div class="toolSectionWrapper">
@@ -1007,10 +998,10 @@
<a href="#" onclick="add_node_for_module( 'data_input', 'Input Dataset' )">Input dataset</a></div></div>
- </div>
+ </div></div></div>
-
+
</%def><%def name="center_panel()">
@@ -1023,7 +1014,6 @@
Workflow Canvas | ${h.to_unicode( stored.name ) | h}
</div></div>
-
<div class="unified-panel-body"><div id="canvas-viewport" style="width: 100%; height: 100%; position: absolute; overflow: hidden; background: #EEEEEE; background: white url(${h.url_for('/static/images/light_gray_grid.gif')}) repeat;"><div id="canvas-container" style="position: absolute; width: 100%; height: 100%;"></div>
@@ -1079,7 +1069,7 @@
<div class="toolParamHelp">Apply tags to make it easy to search for and find items with the same tag.</div></div>
## Workflow annotation.
- ## Annotation elt.
+ ## Annotation elt.
<div id="workflow-annotation-area" class="form-row"><label>Annotation / Notes:</label><div id="workflow-annotation" class="tooltip editable-text" original-title="Click to edit annotation">
http://bitbucket.org/galaxy/galaxy-central/changeset/2687588bca04/
changeset: 2687588bca04
user: dannon
date: 2011-07-26 19:18:22
summary: merge
affected #: 17 files (15.8 KB)
--- a/datatypes_conf.xml.sample Thu Jul 21 14:22:23 2011 -0400
+++ b/datatypes_conf.xml.sample Tue Jul 26 13:18:22 2011 -0400
@@ -123,7 +123,7 @@
<datatype extension="vcf" type="galaxy.datatypes.tabular:Vcf" display_in_upload="true"><converter file="vcf_to_bgzip_converter.xml" target_datatype="bgzip"/><converter file="vcf_to_tabix_converter.xml" target_datatype="tabix" depends_on="bgzip"/>
- <converter file="vcf_to_summary_tree_converter.xml" target_datatype="summary_tree"/>
+ <converter file="vcf_to_summary_tree_converter.xml" target_datatype="summary_tree"/></datatype><datatype extension="wsf" type="galaxy.datatypes.wsf:SnpFile" display_in_upload="true"/><datatype extension="velvet" type="galaxy.datatypes.assembly:Velvet" display_in_upload="false"/>
@@ -274,10 +274,10 @@
</registration><sniffers><!--
- The order in which Galaxy attempts to determine data types is
- important because some formats are much more loosely defined
- than others. The following list should be the most rigidly
- defined format first, followed by next-most rigidly defined,
+ The order in which Galaxy attempts to determine data types is
+ important because some formats are much more loosely defined
+ than others. The following list should be the most rigidly
+ defined format first, followed by next-most rigidly defined,
and so on.
--><sniffer type="galaxy.datatypes.tabular:Vcf"/>
--- a/lib/galaxy/web/controllers/tracks.py Thu Jul 21 14:22:23 2011 -0400
+++ b/lib/galaxy/web/controllers/tracks.py Tue Jul 26 13:18:22 2011 -0400
@@ -90,10 +90,14 @@
class DbKeyColumn( grids.GridColumn ):
""" Column for filtering by and displaying dataset dbkey. """
def filter( self, trans, user, query, dbkey ):
- """ Filter by dbkey. """
+ """ Filter by dbkey; datasets without a dbkey are returned as well. """
# use raw SQL b/c metadata is a BLOB
dbkey = dbkey.replace("'", "\\'")
- return query.filter( or_( "metadata like '%%\"dbkey\": [\"%s\"]%%'" % dbkey, "metadata like '%%\"dbkey\": \"%s\"%%'" % dbkey ) )
+ return query.filter( or_( \
+ or_( "metadata like '%%\"dbkey\": [\"%s\"]%%'" % dbkey, "metadata like '%%\"dbkey\": \"%s\"%%'" % dbkey ), \
+ or_( "metadata like '%%\"dbkey\": [\"?\"]%%'", "metadata like '%%\"dbkey\": \"?\"%%'" ) \
+ )
+ )
class HistoryColumn( grids.GridColumn ):
""" Column for filtering by history id. """
--- a/lib/galaxy/webapps/community/controllers/repository.py Thu Jul 21 14:22:23 2011 -0400
+++ b/lib/galaxy/webapps/community/controllers/repository.py Tue Jul 26 13:18:22 2011 -0400
@@ -935,14 +935,22 @@
message = util.restore_text( params.get( 'message', '' ) )
status = params.get( 'status', 'done' )
repository = get_repository( trans, repository_id )
- tool = load_tool( trans, os.path.abspath( tool_config ) )
- tool_state = self.__new_state( trans )
- return trans.fill_template( "/webapps/community/repository/tool_form.mako",
- repository=repository,
- tool=tool,
- tool_state=tool_state,
- message=message,
- status=status )
+ try:
+ tool = load_tool( trans, os.path.abspath( tool_config ) )
+ tool_state = self.__new_state( trans )
+ return trans.fill_template( "/webapps/community/repository/tool_form.mako",
+ repository=repository,
+ tool=tool,
+ tool_state=tool_state,
+ message=message,
+ status=status )
+ except Exception, e:
+ message = 'Error loading tool: %s. Click <b>Reset metadata</b> to correct this error.' % str( e )
+ return trans.response.send_redirect( web.url_for( controller='repository',
+ action='manage_repository',
+ id=repository_id,
+ message=message,
+ status='error' ) )
def __new_state( self, trans, all_pages=False ):
"""
Create a new `DefaultToolState` for this tool. It will not be initialized
@@ -955,6 +963,27 @@
state.inputs = {}
return state
@web.expose
+ def view_tool_metadata( self, trans, repository_id, changeset_revision, tool_id, **kwd ):
+ params = util.Params( kwd )
+ message = util.restore_text( params.get( 'message', '' ) )
+ status = params.get( 'status', 'done' )
+ repository = get_repository( trans, repository_id )
+ metadata = {}
+ tool = None
+ repository_metadata = get_repository_metadata( trans, repository_id, changeset_revision ).metadata
+ if 'tools' in repository_metadata:
+ for tool_metadata_dict in repository_metadata[ 'tools' ]:
+ if tool_metadata_dict[ 'id' ] == tool_id:
+ metadata = tool_metadata_dict
+ tool = load_tool( trans, os.path.abspath( metadata[ 'tool_config' ] ) )
+ break
+ return trans.fill_template( "/webapps/community/repository/view_tool_metadata.mako",
+ repository=repository,
+ tool=tool,
+ metadata=metadata,
+ message=message,
+ status=status )
+ @web.expose
def download( self, trans, repository_id, file_type, **kwd ):
# Download an archive of the repository files compressed as zip, gz or bz2.
params = util.Params( kwd )
--- a/static/june_2007_style/blue/panel_layout.css Thu Jul 21 14:22:23 2011 -0400
+++ b/static/june_2007_style/blue/panel_layout.css Tue Jul 26 13:18:22 2011 -0400
@@ -1,4 +1,3 @@
-body,html{overflow:hidden;margin:0;padding:0;width:100%;height:100%;}
body{font:75% "Lucida Grande",verdana,arial,helvetica,sans-serif;background:#eee;}
.unselectable{user-select:none;-moz-user-select:none;-webkit-user-select:none;}
#background{position:absolute;background:#eee;z-index:-1;top:0;left:0;margin:0;padding:0;width:100%;height:100%;}
--- a/static/june_2007_style/blue/trackster.css Thu Jul 21 14:22:23 2011 -0400
+++ b/static/june_2007_style/blue/trackster.css Tue Jul 26 13:18:22 2011 -0400
@@ -55,4 +55,4 @@
.icon.more-down{background:url('../images/fugue/arrow-transition-270-bw.png') no-repeat 0px 0px;}
.icon.more-across{background:url('../images/fugue/arrow-transition-bw.png') no-repeat 0px 0px;}
.intro{padding:1em;}
-.intro>.action-button{background-color:#CCC;padding:1em;}
\ No newline at end of file
+.intro > .action-button{background-color:#CCC;padding:1em;}
--- a/static/june_2007_style/panel_layout.css.tmpl Thu Jul 21 14:22:23 2011 -0400
+++ b/static/june_2007_style/panel_layout.css.tmpl Tue Jul 26 13:18:22 2011 -0400
@@ -1,11 +1,3 @@
-body, html {
- overflow: hidden;
- margin: 0;
- padding: 0;
- width: 100%;
- height: 100%;
-}
-
body {
font: 75% "Lucida Grande",verdana,arial,helvetica,sans-serif;
background: ${layout_bg};
--- a/static/scripts/trackster.js Thu Jul 21 14:22:23 2011 -0400
+++ b/static/scripts/trackster.js Tue Jul 26 13:18:22 2011 -0400
@@ -807,6 +807,9 @@
}
view.redraw();
},
+ /**
+ * Add a track to the view.
+ */
add_track: function(track) {
track.view = this;
track.track_id = this.track_id_counter;
@@ -822,6 +825,9 @@
label_track.view = this;
this.label_tracks.push(label_track);
},
+ /**
+ * Remove a track from the view.
+ */
remove_track: function(track) {
this.has_changes = true;
delete this.tracks[this.tracks.indexOf(track)];
@@ -1588,23 +1594,24 @@
/**
* Tiles drawn by tracks.
*/
-var Tile = function(index, resolution, canvas) {
+var Tile = function(index, resolution, canvas, data) {
this.index = index;
this.low = index * DENSITY * resolution;
this.high = (index + 1) * DENSITY * resolution;
this.resolution = resolution;
// Wrap element in div for background.
this.canvas = $("<div class='track-tile'/>").append(canvas);
+ this.data = data;
this.stale = false;
};
-var SummaryTreeTile = function(index, resolution, canvas, max_val) {
- Tile.call(this, index, resolution, canvas);
+var SummaryTreeTile = function(index, resolution, canvas, data, max_val) {
+ Tile.call(this, index, resolution, canvas, data);
this.max_val = max_val;
};
-var FeatureTrackTile = function(index, resolution, canvas, message) {
- Tile.call(this, index, resolution, canvas);
+var FeatureTrackTile = function(index, resolution, canvas, data, message) {
+ Tile.call(this, index, resolution, canvas, data);
this.message = message;
};
@@ -2096,13 +2103,17 @@
filters[f].update_ui_elt();
}
- // Determine if filters are available; this is based on the example feature.
- var filters_available = false;
- if (track.example_feature) {
- for (var f = 0; f < filters.length; f++) {
- if (filters[f].applies_to(track.example_feature)) {
- filters_available = true;
- break;
+ // Determine if filters are available; this is based on the tiles' data.
+ var filters_available = false,
+ example_feature;
+ for (var i = 0; i < drawn_tiles.length; i++) {
+ if (drawn_tiles[i].data.length) {
+ example_feature = drawn_tiles[i].data[0];
+ for (var f = 0; f < filters.length; f++) {
+ if (filters[f].applies_to(example_feature)) {
+ filters_available = true;
+ break;
+ }
}
}
}
@@ -2385,7 +2396,7 @@
var c_start = Math.round(c * w_scale);
ctx.fillText(seq[c], c_start + track.left_offset, 10);
}
- return new Tile(tile_index, resolution, canvas);
+ return new Tile(tile_index, resolution, canvas, seq);
}
this.content_div.css("height", "0px");
}
@@ -2523,7 +2534,7 @@
var painter = new painters.LinePainter(result.data, tile_low, tile_low + tile_length, this.prefs, this.mode);
painter.draw(ctx, width, height);
- return new Tile(tile_index, resolution, canvas);
+ return new Tile(tile_index, resolution, canvas, result.data);
}
});
@@ -2737,7 +2748,7 @@
// TODO: this shouldn't be done at the tile level
this.container_div.find(".yaxislabel").remove();
var max_label = $("<div />").addClass('yaxislabel');
- max_label.text( result.max );
+ max_label.text(result.max);
max_label.css({ position: "absolute", top: "24px", left: "10px", color: this.prefs.label_color });
max_label.prependTo(this.container_div);
// Create canvas
@@ -2760,7 +2771,7 @@
// Deal with left_offset by translating
ctx.translate(left_offset, SUMMARY_TREE_TOP_PADDING);
painter.draw(ctx, width, required_height);
- return new SummaryTreeTile(tile_index, resolution, canvas, result.max);
+ return new SummaryTreeTile(tile_index, resolution, canvas, result.data, result.max);
}
// Start dealing with row-by-row tracks
@@ -2811,16 +2822,12 @@
this.container_div.find(".yaxislabel").remove();
if (result.data) {
- // Set example feature. This is needed so that track can update its UI based on feature attributes.
- // TODO: use tile data rather than example feature?
- this.example_feature = (result.data.length ? result.data[0] : undefined);
-
// Draw features.
ctx.translate(left_offset, 0);
painter.draw(ctx, width, required_height, slots);
}
- return new FeatureTrackTile(tile_index, resolution, canvas, result.message);
+ return new FeatureTrackTile(tile_index, resolution, canvas, result.data, result.message);
}
});
--- a/templates/base_panels.mako Thu Jul 21 14:22:23 2011 -0400
+++ b/templates/base_panels.mako Tue Jul 26 13:18:22 2011 -0400
@@ -19,6 +19,13 @@
<%def name="stylesheets()">
${h.css('base','panel_layout','jquery.rating')}
<style type="text/css">
+ body, html {
+ overflow: hidden;
+ margin: 0;
+ padding: 0;
+ width: 100%;
+ height: 100%;
+ }
#center {
%if not self.has_left_panel:
left: 0;
--- a/templates/tool_form.mako Thu Jul 21 14:22:23 2011 -0400
+++ b/templates/tool_form.mako Tue Jul 26 13:18:22 2011 -0400
@@ -2,7 +2,12 @@
<%namespace file="/base_panels.mako" import="overlay" /><%def name="stylesheets()">
- ${h.css( "autocomplete_tagging", "panel_layout", "base", "library" )}
+ ${h.css( "autocomplete_tagging", "base", "panel_layout", "library" )}
+ <style type="text/css">
+ html, body {
+ background-color: #fff;
+ }
+ </style></%def><%def name="javascripts()">
--- a/templates/tracks/browser.mako Thu Jul 21 14:22:23 2011 -0400
+++ b/templates/tracks/browser.mako Tue Jul 26 13:18:22 2011 -0400
@@ -273,7 +273,7 @@
}
// Add track.
- tracks.push( {
+ tracks.push({
"track_type": track.get_type(),
"name": track.name,
"hda_ldda": track.hda_ldda,
--- a/templates/visualization/display.mako Thu Jul 21 14:22:23 2011 -0400
+++ b/templates/visualization/display.mako Tue Jul 26 13:18:22 2011 -0400
@@ -122,6 +122,11 @@
// Keyboard navigation. Scroll ~7% of height when scrolling up/down.
//
$(document).keydown(function(e) {
+ // Do not navigate if arrow keys used in input element.
+ if ($(e.srcElement).is(':input')) {
+ return;
+ }
+
// Key codes: left == 37, up == 38, right == 39, down == 40
switch(e.which) {
case 37:
--- a/templates/webapps/community/repository/manage_repository.mako Thu Jul 21 14:22:23 2011 -0400
+++ b/templates/webapps/community/repository/manage_repository.mako Tue Jul 26 13:18:22 2011 -0400
@@ -120,7 +120,7 @@
<div style="clear: both"></div></div><div class="form-row">
- <label>Version:</label>
+ <label>Revision:</label>
%if can_view_change_log:
<a href="${h.url_for( controller='repository', action='view_changelog', id=trans.app.security.encode_id( repository.id ) )}">${repository.revision}</a>
%else:
@@ -151,34 +151,6 @@
</form></div></div>
-<p/>
-<div class="toolForm">
- <div class="toolFormTitle">Manage categories</div>
- <div class="toolFormBody">
- <form name="categories" id="categories" action="${h.url_for( controller='repository', action='manage_repository', id=trans.security.encode_id( repository.id ) )}" method="post" >
- <div class="form-row">
- <label>Categories</label>
- <select name="category_id" multiple>
- %for category in categories:
- %if category.id in selected_categories:
- <option value="${trans.security.encode_id( category.id )}" selected>${category.name}</option>
- %else:
- <option value="${trans.security.encode_id( category.id )}">${category.name}</option>
- %endif
- %endfor
- </select>
- <div class="toolParamHelp" style="clear: both;">
- Multi-select list - hold the appropriate key while clicking to select multiple categories.
- </div>
- <div style="clear: both"></div>
- </div>
- <div class="form-row">
- <input type="submit" name="manage_categories_button" value="Save"/>
- </div>
- </form>
- </div>
-</div>
-<p/>
%if can_set_metadata:
<p/><div class="toolForm">
@@ -204,7 +176,16 @@
</tr>
%for tool_dict in tool_dicts:
<tr>
- <td><a href="${h.url_for( controller='repository', action='display_tool', repository_id=trans.security.encode_id( repository.id ), tool_config=tool_dict[ 'tool_config' ] )}">${tool_dict[ 'name' ]}</a></td>
+ <td>
+ <div style="float: left; margin-left: 1px;" class="menubutton split popup" id="tool-${tool_dict[ 'id' ]}-popup">
+ <a class="view-info" href="${h.url_for( controller='repository', action='display_tool', repository_id=trans.security.encode_id( repository.id ), tool_config=tool_dict[ 'tool_config' ] )}">
+ ${tool_dict[ 'name' ]}
+ </a>
+ </div>
+ <div popupmenu="tool-${tool_dict[ 'id' ]}-popup">
+ <a class="action-button" href="${h.url_for( controller='repository', action='view_tool_metadata', repository_id=trans.security.encode_id( repository.id ), changeset_revision=repository.tip, tool_id=tool_dict[ 'id' ] )}">View all metadata for this tool</a>
+ </div>
+ </td><td>${tool_dict[ 'description' ]}</td><td>${tool_dict[ 'version' ]}</td><td>
@@ -274,6 +255,33 @@
</div></div>
%endif
+<p/>
+<div class="toolForm">
+ <div class="toolFormTitle">Manage categories</div>
+ <div class="toolFormBody">
+ <form name="categories" id="categories" action="${h.url_for( controller='repository', action='manage_repository', id=trans.security.encode_id( repository.id ) )}" method="post" >
+ <div class="form-row">
+ <label>Categories</label>
+ <select name="category_id" multiple>
+ %for category in categories:
+ %if category.id in selected_categories:
+ <option value="${trans.security.encode_id( category.id )}" selected>${category.name}</option>
+ %else:
+ <option value="${trans.security.encode_id( category.id )}">${category.name}</option>
+ %endif
+ %endfor
+ </select>
+ <div class="toolParamHelp" style="clear: both;">
+ Multi-select list - hold the appropriate key while clicking to select multiple categories.
+ </div>
+ <div style="clear: both"></div>
+ </div>
+ <div class="form-row">
+ <input type="submit" name="manage_categories_button" value="Save"/>
+ </div>
+ </form>
+ </div>
+</div>
%if trans.app.config.smtp_server:
<p/><div class="toolForm">
@@ -330,8 +338,8 @@
</form></div></div>
-<p/>
%if repository.ratings:
+ <p/><div class="toolForm"><div class="toolFormTitle">Rating</div><div class="toolFormBody">
--- a/templates/webapps/community/repository/view_repository.mako Thu Jul 21 14:22:23 2011 -0400
+++ b/templates/webapps/community/repository/view_repository.mako Tue Jul 26 13:18:22 2011 -0400
@@ -118,7 +118,7 @@
</div>
%endif
<div class="form-row">
- <label>Version:</label>
+ <label>Revision:</label>
%if can_view_change_log:
<a href="${h.url_for( controller='repository', action='view_changelog', id=trans.app.security.encode_id( repository.id ) )}">${repository.revision}</a>
%else:
@@ -145,20 +145,6 @@
%endif
</div></div>
-%if repository.categories:
- <p/>
- <div class="toolForm">
- <div class="toolFormTitle">Categories</div>
- <div class="toolFormBody">
- %for rca in repository.categories:
- <div class="form-row">
- ${rca.category.name}
- </div>
- %endfor
- <div style="clear: both"></div>
- </div>
- </div>
-%endif
%if metadata:
<p/><div class="toolForm">
@@ -183,9 +169,18 @@
</tr>
%for tool_dict in tool_dicts:
<tr>
- <td><a href="${h.url_for( controller='repository', action='display_tool', repository_id=trans.security.encode_id( repository.id ), tool_config=tool_dict[ 'tool_config' ] )}">${tool_dict[ 'name' ]}</a></td>
+ <td>
+ <div style="float: left; margin-left: 1px;" class="menubutton split popup" id="tool-${repository.id}-popup">
+ <a class="view-info" href="${h.url_for( controller='repository', action='display_tool', repository_id=trans.security.encode_id( repository.id ), tool_config=tool_dict[ 'tool_config' ] )}">
+ ${tool_dict[ 'name' ]}
+ </a>
+ </div>
+ <div popupmenu="tool-${repository.id}-popup">
+ <a class="action-button" href="${h.url_for( controller='repository', action='view_tool_metadata', repository_id=trans.security.encode_id( repository.id ), changeset_revision=repository.tip, tool_id=tool_dict[ 'id' ] )}">View all metadata for this tool</a>
+ </div>
+ </td><td>${tool_dict[ 'description' ]}</td>
- <td>version: ${tool_dict[ 'version' ]}</td>
+ <td>${tool_dict[ 'version' ]}</td><td><%
if 'requirements' in tool_dict:
@@ -242,6 +237,20 @@
</div></div>
%endif
+%if repository.categories:
+ <p/>
+ <div class="toolForm">
+ <div class="toolFormTitle">Categories</div>
+ <div class="toolFormBody">
+ %for rca in repository.categories:
+ <div class="form-row">
+ ${rca.category.name}
+ </div>
+ %endfor
+ <div style="clear: both"></div>
+ </div>
+ </div>
+%endif
%if trans.user and trans.app.config.smtp_server:
<p/><div class="toolForm">
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/templates/webapps/community/repository/view_tool_metadata.mako Tue Jul 26 13:18:22 2011 -0400
@@ -0,0 +1,202 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+<%namespace file="/webapps/community/common/common.mako" import="*" />
+<%namespace file="/webapps/community/repository/common.mako" import="*" />
+
+<%
+ from galaxy.web.framework.helpers import time_ago
+ from urllib import quote_plus
+ is_admin = trans.user_is_admin()
+ is_new = repository.is_new
+ can_push = trans.app.security_agent.can_push( trans.user, repository )
+ can_upload = can_push
+ can_browse_contents = not is_new
+ can_rate = repository.user != trans.user
+ can_manage = is_admin or repository.user == trans.user
+ can_view_change_log = not is_new
+ if can_push:
+ browse_label = 'Browse or delete repository files'
+ else:
+ browse_label = 'Browse repository files'
+%>
+
+<%!
+ def inherit(context):
+ if context.get('use_panels'):
+ return '/webapps/community/base_panels.mako'
+ else:
+ return '/base.mako'
+%>
+<%inherit file="${inherit(context)}"/>
+
+<br/><br/>
+<ul class="manage-table-actions">
+ %if is_new:
+ <a class="action-button" href="${h.url_for( controller='upload', action='upload', repository_id=trans.security.encode_id( repository.id ), webapp='community' )}">Upload files to repository</a>
+ %else:
+ <li><a class="action-button" id="repository-${repository.id}-popup" class="menubutton">Repository Actions</a></li>
+ <div popupmenu="repository-${repository.id}-popup">
+ %if can_manage:
+ <a class="action-button" href="${h.url_for( controller='repository', action='manage_repository', id=trans.app.security.encode_id( repository.id ) )}">Manage repository</a>
+ %else:
+ <a class="action-button" href="${h.url_for( controller='repository', action='view_repository', id=trans.app.security.encode_id( repository.id ) )}">View repository</a>
+ %endif
+ %if can_upload:
+ <a class="action-button" href="${h.url_for( controller='upload', action='upload', repository_id=trans.security.encode_id( repository.id ), webapp='community' )}">Upload files to repository</a>
+ %endif
+ %if can_view_change_log:
+ <a class="action-button" href="${h.url_for( controller='repository', action='view_changelog', id=trans.app.security.encode_id( repository.id ) )}">View change log</a>
+ %endif
+ %if can_browse_contents:
+ <a class="action-button" href="${h.url_for( controller='repository', action='browse_repository', id=trans.app.security.encode_id( repository.id ) )}">${browse_label}</a>
+ %endif
+ <a class="action-button" href="${h.url_for( controller='repository', action='download', repository_id=trans.app.security.encode_id( repository.id ), file_type='gz' )}">Download as a .tar.gz file</a>
+ <a class="action-button" href="${h.url_for( controller='repository', action='download', repository_id=trans.app.security.encode_id( repository.id ), file_type='bz2' )}">Download as a .tar.bz2 file</a>
+ <a class="action-button" href="${h.url_for( controller='repository', action='download', repository_id=trans.app.security.encode_id( repository.id ), file_type='zip' )}">Download as a zip file</a>
+ </div>
+ %endif
+</ul>
+
+%if message:
+ ${render_msg( message, status )}
+%endif
+
+<div class="toolForm">
+ <div class="toolFormTitle">${repository.name}</div>
+ <div class="toolFormBody">
+ <div class="form-row">
+ <label>Clone this repository:</label>
+ ${render_clone_str( repository )}
+ </div>
+ </div>
+</div>
+%if metadata:
+## "{"tools":
+## [{"description": "data on any column using simple expressions",
+## "id": "Filter1",
+## "name": "Filter",
+## "requirements": [],
+## "tests": [{
+## "inputs": [["input", "1.bed", {"children": [], "value": "1.bed"}], ["cond", "c1=='chr22'", {"children": [], "value": "c1=='chr22'"}]], "name": "Test-1",
+## "outputs": [["out_file1", "filter1_test1.bed", {"compare": "diff", "delta": 10000, "extra_files": [], "lines_diff": 0, "sort": false}]],
+## "required_files": [["1.bed", {"children": [], "value": "1.bed"}]]}, {"inputs": [["input", "7.bed", {"children": [], "value": "7.bed"}], ["cond", "c1=='chr1' and c3-c2>=2000 and c6=='+'", {"children": [], "value": "c1=='chr1' and c3-c2>=2000 and c6=='+'"}]], "name": "Test-2", "outputs": [["out_file1", "filter1_test2.bed", {"compare": "diff", "delta": 10000, "extra_files": [], "lines_diff": 0, "sort": false}]], "required_files": [["7.bed", {"children": [], "value": "7.bed"}]]}], "tool_config": "database/community_files/000/repo_1/filtering.xml", "version": "1.0.1", "version_string_cmd": null}], "workflows": [{"a_galaxy_workflow": "true", "annotation": "", "format-version": "0.1", "name": "Workflow constructed from history 'Unnamed history'", "steps": {"0": {"annotation": "", "id": 0, "input_connections": {}, "inputs": [{"description": "", "name": "Input Dataset"}], "name": "Input dataset", "outputs": [], "position": {"left": 10, "top": 10}, "tool_errors": null, "tool_id": null, "tool_state": "{\\"name\\": \\"Input Dataset\\"}", "tool_version": null, "type": "data_input", "user_outputs": []}, "1": {"annotation": "", "id": 1, "input_connections": {"input": {"id": 0, "output_name": "output"}}, "inputs": [], "name": "Filter", "outputs": [{"name": "out_file1", "type": "input"}], "position": {"left": 230, "top": 10}, "post_job_actions": {}, "tool_errors": null, "tool_id": "Filter1", "tool_state": "{\\"__page__\\": 0, \\"cond\\": \\"\\\\\\"c1=='chr1'\\\\\\"\\", \\"chromInfo\\": \\"\\\\\\"/Users/gvk/workspaces_2008/central_051111/tool-data/shared/ucsc/chrom/?.len\\\\\\"\\", \\"input\\": \\"null\\"}", "tool_version": null, "type": "tool", "user_outputs": []}, "2": {"annotation": "", "id": 2, "input_connections": {"input1": {"id": 0, "output_name": "output"}, "input2": {"id": 1, "output_name": "out_file1"}}, "inputs": [], "name": "Subtract Whole Dataset", "outputs": [{"name": "output", "type": "input"}], "position": {"left": 450, "top": 10}, "post_job_actions": {}, "tool_errors": null, "tool_id": "subtract_query1", "tool_state": "{\\"input2\\": \\"null\\", \\"__page__\\": 0, \\"end_col\\": \\"{\\\\\\"__class__\\\\\\": \\\\\\"UnvalidatedValue\\\\\\", \\\\\\"value\\\\\\": \\\\\\"None\\\\\\"}\\", \\"begin_col\\": \\"{\\\\\\"__class__\\\\\\": \\\\\\"UnvalidatedValue\\\\\\", \\\\\\"value\\\\\\": \\\\\\"None\\\\\\"}\\", \\"input1\\": \\"null\\", \\"chromInfo\\": \\"\\\\\\"/Users/gvk/workspaces_2008/central_051111/tool-data/shared/ucsc/chrom/?.len\\\\\\"\\"}", "tool_version": null, "type": "tool", "user_outputs": []}}}]}"
+ <p/>
+ <div class="toolForm">
+ <div class="toolFormTitle">${metadata[ 'name' ]} tool metadata</div>
+ <div class="toolFormBody">
+ <div class="form-row">
+ <label>Name:</label>
+ <a href="${h.url_for( controller='repository', action='display_tool', repository_id=trans.security.encode_id( repository.id ), tool_config=metadata[ 'tool_config' ] )}">${metadata[ 'name' ]}</a>
+ <div style="clear: both"></div>
+ </div>
+ <div class="form-row">
+ <label>Description:</label>
+ ${metadata[ 'description' ]}
+ <div style="clear: both"></div>
+ </div>
+ <div class="form-row">
+ <label>Id:</label>
+ ${metadata[ 'id' ]}
+ <div style="clear: both"></div>
+ </div>
+ <div class="form-row">
+ <label>Version:</label>
+ ${metadata[ 'version' ]}
+ <div style="clear: both"></div>
+ </div>
+ <div class="form-row">
+ <label>Version command string:</label>
+ ${metadata[ 'version_string_cmd' ]}
+ <div style="clear: both"></div>
+ </div>
+ <div class="form-row">
+ <label>Command:</label>
+ ${tool.command}
+ <div style="clear: both"></div>
+ </div>
+ <div class="form-row">
+ <label>Interpreter:</label>
+ ${tool.interpreter}
+ <div style="clear: both"></div>
+ </div>
+ <div class="form-row">
+ <label>Is multi-byte:</label>
+ ${tool.is_multi_byte}
+ <div style="clear: both"></div>
+ </div>
+ <div class="form-row">
+ <label>Forces a history refresh:</label>
+ ${tool.force_history_refresh}
+ <div style="clear: both"></div>
+ </div>
+ <div class="form-row">
+ <label>Parallelism:</label>
+ ${tool.parallelism}
+ <div style="clear: both"></div>
+ </div>
+ <%
+ if 'requirements' in metadata:
+ requirements = metadata[ 'requirements' ]
+ else:
+ requirements = None
+ %>
+ %if requirements:
+ <%
+ requirements_str = ''
+ for requirement_dict in metadata[ 'requirements' ]:
+ requirements_str += '%s (%s), ' % ( requirement_dict[ 'name' ], requirement_dict[ 'type' ] )
+ requirements_str = requirements_str.rstrip( ', ' )
+ %>
+ <div class="form-row">
+ <label>Requirements:</label>
+ ${requirements_str}
+ <div style="clear: both"></div>
+ </div>
+ %endif
+ <%
+ if 'tests' in metadata:
+ tests = metadata[ 'tests' ]
+ else:
+ tests = None
+ %>
+ %if tests:
+ <div class="form-row">
+ <label>Functional tests:</label></td>
+ <table class="grid">
+ <tr>
+ <td><b>name</b></td>
+ <td><b>inputs</b></td>
+ <td><b>outputs</b></td>
+ <td><b>required files</b></td>
+ </tr>
+ %for test_dict in tests:
+ <%
+ inputs = test_dict[ 'inputs' ]
+ outputs = test_dict[ 'outputs' ]
+ required_files = test_dict[ 'required_files' ]
+ %>
+ <tr>
+ <td>${test_dict[ 'name' ]}</td>
+ <td>
+ %for input in inputs:
+ <b>${input[0]}:</b> ${input[1]}<br/>
+ %endfor
+ </td>
+ <td>
+ %for output in outputs:
+ <b>${output[0]}:</b> ${output[1]}<br/>
+ %endfor
+ </td>
+ <td>
+ %for required_file in required_files:
+ ${required_file[0]}<br/>
+ %endfor
+ </td>
+ </tr>
+ %endfor
+ </table>
+ </div>
+ %endif
+ </div>
+ </div>
+%endif
--- a/tool_conf.xml.main Thu Jul 21 14:22:23 2011 -0400
+++ b/tool_conf.xml.main Tue Jul 26 13:18:22 2011 -0400
@@ -51,7 +51,7 @@
<tool file="fasta_tools/fasta_to_tabular.xml" /><tool file="filters/gff2bed.xml" /><tool file="maf/maf_to_bed.xml" />
- <tool file="maf/maf_to_interval.xml" />
+ <tool file="maf/maf_to_interval.xml" /><tool file="maf/maf_to_fasta.xml" /><tool file="fasta_tools/tabular_to_fasta.xml" /><tool file="fastq/fastq_to_fasta.xml" />
@@ -78,13 +78,13 @@
<tool file="filters/gff/extract_GFF_Features.xml" /><tool file="filters/gff/gff_filter_by_attribute.xml" /><tool file="filters/gff/gff_filter_by_feature_count.xml" />
- <tool file="filters/gff/gtf_filter_by_attribute_values_list.xml" />
+ <tool file="filters/gff/gtf_filter_by_attribute_values_list.xml" /></section><section name="Join, Subtract and Group" id="group"><tool file="filters/joiner.xml" /><tool file="filters/compare.xml"/><tool file="new_operations/subtract_query.xml"/>
- <tool file="stats/grouping.xml" />
+ <tool file="stats/grouping.xml" /><tool file="new_operations/column_join.xml"/></section><section name="Extract Features" id="features">
@@ -112,7 +112,7 @@
<tool file="extract/phastOdds/phastOdds_tool.xml" /></section><section name="Operate on Genomic Intervals" id="bxops">
- <tool file="new_operations/intersect.xml" />
+ <tool file="new_operations/intersect.xml" /><tool file="new_operations/subtract.xml" /><tool file="new_operations/merge.xml" /><tool file="new_operations/concat.xml" />
@@ -127,7 +127,7 @@
</section><section name="Statistics" id="stats"><tool file="stats/gsummary.xml" />
- <tool file="filters/uniq.xml" />
+ <tool file="filters/uniq.xml" /><tool file="stats/cor.xml" /><tool file="stats/generate_matrix_for_pca_lda.xml" /><tool file="stats/lda_analy.xml" />
@@ -223,13 +223,13 @@
<tool file="emboss_5/emboss_chips.xml" /><tool file="emboss_5/emboss_cirdna.xml" /><tool file="emboss_5/emboss_codcmp.xml" />
- <tool file="emboss_5/emboss_coderet.xml" />
+ <tool file="emboss_5/emboss_coderet.xml" /><tool file="emboss_5/emboss_compseq.xml" />
- <tool file="emboss_5/emboss_cpgplot.xml" />
+ <tool file="emboss_5/emboss_cpgplot.xml" /><tool file="emboss_5/emboss_cpgreport.xml" /><tool file="emboss_5/emboss_cusp.xml" /><tool file="emboss_5/emboss_cutseq.xml" />
- <tool file="emboss_5/emboss_dan.xml" />
+ <tool file="emboss_5/emboss_dan.xml" /><tool file="emboss_5/emboss_degapseq.xml" /><tool file="emboss_5/emboss_descseq.xml" /><tool file="emboss_5/emboss_diffseq.xml" />
@@ -245,7 +245,7 @@
<tool file="emboss_5/emboss_etandem.xml" /><tool file="emboss_5/emboss_extractfeat.xml" /><tool file="emboss_5/emboss_extractseq.xml" />
- <tool file="emboss_5/emboss_freak.xml" />
+ <tool file="emboss_5/emboss_freak.xml" /><tool file="emboss_5/emboss_fuzznuc.xml" /><tool file="emboss_5/emboss_fuzzpro.xml" /><tool file="emboss_5/emboss_fuzztran.xml" />
@@ -266,7 +266,7 @@
<tool file="emboss_5/emboss_merger.xml" /><tool file="emboss_5/emboss_msbar.xml" /><tool file="emboss_5/emboss_needle.xml" />
- <tool file="emboss_5/emboss_newcpgreport.xml" />
+ <tool file="emboss_5/emboss_newcpgreport.xml" /><tool file="emboss_5/emboss_newcpgseek.xml" /><tool file="emboss_5/emboss_newseq.xml" /><tool file="emboss_5/emboss_noreturn.xml" />
@@ -294,7 +294,7 @@
<tool file="emboss_5/emboss_revseq.xml" /><tool file="emboss_5/emboss_seqmatchall.xml" /><tool file="emboss_5/emboss_seqret.xml" />
- <tool file="emboss_5/emboss_showfeat.xml" />
+ <tool file="emboss_5/emboss_showfeat.xml" /><tool file="emboss_5/emboss_shuffleseq.xml" /><tool file="emboss_5/emboss_sigcleave.xml" /><tool file="emboss_5/emboss_sirna.xml" />
@@ -316,7 +316,7 @@
<tool file="emboss_5/emboss_water.xml" /><tool file="emboss_5/emboss_wobble.xml" /><tool file="emboss_5/emboss_wordcount.xml" />
- <tool file="emboss_5/emboss_wordmatch.xml" />
+ <tool file="emboss_5/emboss_wordmatch.xml" /></section><label text="NGS Toolbox Beta" id="ngs" /><section name="NGS: QC and manipulation" id="cshl_library_information">
--- a/tool_conf.xml.sample Thu Jul 21 14:22:23 2011 -0400
+++ b/tool_conf.xml.sample Tue Jul 26 13:18:22 2011 -0400
@@ -144,14 +144,14 @@
<tool file="regVariation/t_test_two_samples.xml" /><tool file="regVariation/compute_q_values.xml" /><label text="GFF" id="gff" />
- <tool file="stats/count_gff_features.xml" />
+ <tool file="stats/count_gff_features.xml" /></section><!--
Keep this section commented until all of the tools have functional tests
<section name="Wavelet Analysis" id="dwt"><tool file="discreteWavelet/execute_dwt_IvC_all.xml" /><tool file="discreteWavelet/execute_dwt_cor_aVa_perClass.xml" />
- <tool file="discreteWavelet/execute_dwt_cor_aVb_all.xml" />
+ <tool file="discreteWavelet/execute_dwt_cor_aVb_all.xml" /><tool file="discreteWavelet/execute_dwt_var_perClass.xml" /></section>
-->
@@ -184,8 +184,8 @@
<tool file="regVariation/compute_motif_frequencies_for_all_motifs.xml" /><tool file="regVariation/categorize_elements_satisfying_criteria.xml" />s
<tool file="regVariation/draw_stacked_barplots.xml" />
- <tool file="regVariation/multispecies_MicrosatDataGenerator_interrupted_GALAXY.xml" />
- <tool file="regVariation/microsatellite_birthdeath.xml" />
+ <tool file="regVariation/multispecies_MicrosatDataGenerator_interrupted_GALAXY.xml" />
+ <tool file="regVariation/microsatellite_birthdeath.xml" /></section><section name="Multiple regression" id="multReg"><tool file="regVariation/linear_regression.xml" />
@@ -241,7 +241,7 @@
</section><section name="NGS: QC and manipulation" id="NGS_QC"><label text="FastQC: fastq/sam/bam" id="fastqcsambam" />
- <tool file="rgenetics/rgFastQC.xml" />
+ <tool file="rgenetics/rgFastQC.xml" /><label text="Illumina fastq" id="illumina" /><tool file="fastq/fastq_groomer.xml" /><tool file="fastq/fastq_paired_end_splitter.xml" />
@@ -280,21 +280,21 @@
<tool file="fastx_toolkit/fastx_collapser.xml" /><tool file="fastx_toolkit/fastx_renamer.xml" /><tool file="fastx_toolkit/fastx_reverse_complement.xml" />
- <tool file="fastx_toolkit/fastx_trimmer.xml" />
+ <tool file="fastx_toolkit/fastx_trimmer.xml" /></section><section name="NGS: Picard (beta)" id="picard_beta"><label text="QC/Metrics for sam/bam" id="qcsambam"/><tool file="picard/picard_BamIndexStats.xml" />
- <tool file="picard/rgPicardASMetrics.xml" />
- <tool file="picard/rgPicardGCBiasMetrics.xml" />
- <tool file="picard/rgPicardLibComplexity.xml" />
+ <tool file="picard/rgPicardASMetrics.xml" />
+ <tool file="picard/rgPicardGCBiasMetrics.xml" />
+ <tool file="picard/rgPicardLibComplexity.xml" /><tool file="picard/rgPicardInsertSize.xml" /><tool file="picard/rgPicardHsMetrics.xml" /><label text="bam/sam Cleaning" id="picard-clean" /><tool file="picard/picard_AddOrReplaceReadGroups.xml" /><tool file="picard/picard_ReorderSam.xml" /><tool file="picard/picard_ReplaceSamHeader.xml" />
- <tool file="picard/rgPicardFixMate.xml" />
+ <tool file="picard/rgPicardFixMate.xml" /><tool file="picard/rgPicardMarkDups.xml" /></section><!--
--- a/tool_list.py Thu Jul 21 14:22:23 2011 -0400
+++ b/tool_list.py Tue Jul 26 13:18:22 2011 -0400
@@ -4,19 +4,19 @@
onoff = 1
tool_list = []
for line in open("tool_conf.xml.sample", "r"):
- if line.find("<!--") != -1:
+ if line.find("<!--") != -1:
onoff = 0
- if line.find("file") != -1 and onoff==1:
- strs = line.split('\"')
+ if line.find("file") != -1 and onoff==1:
+ strs = line.split('\"')
tool_list.append(strs[1])
- if line.find("<section") != -1 and onoff==1:
+ if line.find("<section") != -1 and onoff==1:
keys = line.strip().split('\"')
n = 0
strtmp = "section::"
- while n < len(keys) :
- if keys[n].find("id") != -1 : strtmp = strtmp + keys[n+1]
- if keys[n].find("name") != -1 : strtmp = strtmp + keys[n+1] + "-"
- n = n + 1
+ while n < len(keys) :
+ if keys[n].find("id") != -1 : strtmp = strtmp + keys[n+1]
+ if keys[n].find("name") != -1 : strtmp = strtmp + keys[n+1] + "-"
+ n = n + 1
tool_list.append(strtmp.replace(' ', '_'))
if line.find("-->") != -1:
onoff =1
@@ -26,42 +26,42 @@
id = []
desc = []
tool_infos = []
-for tool in tool_list :
- if tool.find("section")!=-1 :
+for tool in tool_list :
+ if tool.find("section")!=-1 :
tool_info = dict()
tool_info["id"] = tool
tool_infos.append(tool_info)
- if os.path.exists("tools/"+tool) :
- for line in open("tools/"+tool) :
- if line.find("<tool ") != -1 and line.find("id") != -1 :
- keys = line.strip().split('\"')
- n = 0
- tool_info = dict()
- tool_info["desc"] = ''
- while n < len(keys) :
- if keys[n].find("id") != -1 : tool_info["id"] = keys[n+1].replace(' ', '_')
- if keys[n].find("name") != -1 : tool_info["name"] = keys[n+1]
- if keys[n].find("description") != -1 : tool_info["desc"] = keys[n+1]
- n = n + 1
- tool_infos.append(tool_info)
- break
+ if os.path.exists("tools/"+tool) :
+ for line in open("tools/"+tool) :
+ if line.find("<tool ") != -1 and line.find("id") != -1 :
+ keys = line.strip().split('\"')
+ n = 0
+ tool_info = dict()
+ tool_info["desc"] = ''
+ while n < len(keys) :
+ if keys[n].find("id") != -1 : tool_info["id"] = keys[n+1].replace(' ', '_')
+ if keys[n].find("name") != -1 : tool_info["name"] = keys[n+1]
+ if keys[n].find("description") != -1 : tool_info["desc"] = keys[n+1]
+ n = n + 1
+ tool_infos.append(tool_info)
+ break
flag=0
-if len(sys.argv) == 1 :
- for tool_info in tool_infos:
- if tool_info["id"].find("section") != -1 :
+if len(sys.argv) == 1 :
+ for tool_info in tool_infos:
+ if tool_info["id"].find("section") != -1 :
print "==========================================================================================================================================="
print "%-45s\t%-40s\t%s" % ("id", "name", tool_info["id"])
print "- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -"
else :
print "%-45s\t%-40s" % (tool_info["id"], tool_info["name"])
-else:
- for tool_info in tool_infos:
+else:
+ for tool_info in tool_infos:
if tool_info["id"].find("section") != -1 :
flag=0
elif flag==1:
print " functional.test_toolbox:TestForTool_%s" % tool_info["id"],
- if tool_info["id"].replace('section::', '')==sys.argv[1]:
+ if tool_info["id"].replace('section::', '')==sys.argv[1]:
flag=1
#for key in tool_infos.keys():
http://bitbucket.org/galaxy/galaxy-central/changeset/e055ca3efb5c/
changeset: e055ca3efb5c
user: dannon
date: 2011-07-26 19:56:16
summary: merge
affected #: 13 files (41.1 KB)
--- a/lib/galaxy/jobs/__init__.py Tue Jul 26 13:18:22 2011 -0400
+++ b/lib/galaxy/jobs/__init__.py Tue Jul 26 13:56:16 2011 -0400
@@ -646,9 +646,14 @@
tool=self.tool, stdout=stdout, stderr=stderr )
job.command_line = self.command_line
+ bytes = 0
# Once datasets are collected, set the total dataset size (includes extra files)
for dataset_assoc in job.output_datasets + job.output_library_datasets:
dataset_assoc.dataset.dataset.set_total_size()
+ bytes += dataset_assoc.dataset.dataset.get_total_size()
+
+ if job.user:
+ job.user.total_disk_usage += bytes
# fix permissions
for path in [ dp.real_path for dp in self.get_output_fnames() ]:
--- a/lib/galaxy/model/__init__.py Tue Jul 26 13:18:22 2011 -0400
+++ b/lib/galaxy/model/__init__.py Tue Jul 26 13:56:16 2011 -0400
@@ -70,6 +70,27 @@
if role not in roles:
roles.append( role )
return roles
+ def get_disk_usage( self, nice_size=False ):
+ rval = 0
+ if self.disk_usage is not None:
+ rval = self.disk_usage
+ if nice_size:
+ rval = galaxy.datatypes.data.nice_size( rval )
+ return rval
+ def set_disk_usage( self, bytes ):
+ self.disk_usage = bytes
+ total_disk_usage = property( get_disk_usage, set_disk_usage )
+ def calculate_disk_usage( self ):
+ dataset_ids = []
+ total = 0
+ # this can be a huge number and can run out of memory, so we avoid the mappers
+ db_session = object_session( self )
+ for history in db_session.query( History ).enable_eagerloads( False ).filter_by( user_id=self.id ).yield_per( 1000 ):
+ for hda in db_session.query( HistoryDatasetAssociation ).enable_eagerloads( False ).filter_by( history_id=history.id, purged=False ).yield_per( 1000 ):
+ if not hda.dataset.id in dataset_ids and not hda.dataset.purged and not hda.dataset.library_associations:
+ dataset_ids.append( hda.dataset.id )
+ total += hda.dataset.get_total_size()
+ return total
class Job( object ):
"""
@@ -349,7 +370,7 @@
self.galaxy_sessions.append( GalaxySessionToHistoryAssociation( galaxy_session, self ) )
else:
self.galaxy_sessions.append( association )
- def add_dataset( self, dataset, parent_id=None, genome_build=None, set_hid = True ):
+ def add_dataset( self, dataset, parent_id=None, genome_build=None, set_hid=True, quota=True ):
if isinstance( dataset, Dataset ):
dataset = HistoryDatasetAssociation(dataset=dataset)
object_session( self ).add( dataset )
@@ -367,6 +388,8 @@
else:
if set_hid:
dataset.hid = self._next_hid()
+ if quota and self.user:
+ self.user.total_disk_usage += dataset.quota_amount( self.user )
dataset.history = self
if genome_build not in [None, '?']:
self.genome_build = genome_build
@@ -378,6 +401,9 @@
name = self.name
if not target_user:
target_user = self.user
+ quota = True
+ if target_user == self.user:
+ quota = False
new_history = History( name=name, user=target_user )
db_session = object_session( self )
db_session.add( new_history )
@@ -393,8 +419,8 @@
hdas = self.active_datasets
for hda in hdas:
# Copy HDA.
- new_hda = hda.copy( copy_children=True, target_history=new_history )
- new_history.add_dataset( new_hda, set_hid = False )
+ new_hda = hda.copy( copy_children=True )
+ new_history.add_dataset( new_hda, set_hid = False, quota=quota )
db_session.add( new_hda )
db_session.flush()
# Copy annotation.
@@ -741,6 +767,10 @@
def set_size( self ):
"""Returns the size of the data on disk"""
return self.dataset.set_size()
+ def get_total_size( self ):
+ return self.dataset.get_total_size()
+ def set_total_size( self ):
+ return self.dataset.set_total_size()
def has_data( self ):
"""Detects whether there is any data"""
return self.dataset.has_data()
@@ -922,7 +952,7 @@
self.history = history
self.copied_from_history_dataset_association = copied_from_history_dataset_association
self.copied_from_library_dataset_dataset_association = copied_from_library_dataset_dataset_association
- def copy( self, copy_children = False, parent_id = None, target_history = None ):
+ def copy( self, copy_children = False, parent_id = None ):
hda = HistoryDatasetAssociation( hid=self.hid,
name=self.name,
info=self.info,
@@ -934,8 +964,7 @@
visible=self.visible,
deleted=self.deleted,
parent_id=parent_id,
- copied_from_history_dataset_association=self,
- history = target_history )
+ copied_from_history_dataset_association=self )
object_session( self ).add( hda )
object_session( self ).flush()
hda.set_size()
@@ -1017,6 +1046,26 @@
return hda_name
def get_access_roles( self, trans ):
return self.dataset.get_access_roles( trans )
+ def quota_amount( self, user ):
+ """
+ If the user has multiple instances of this dataset, it will not affect their disk usage statistic.
+ """
+ rval = 0
+ # Anon users are handled just by their single history size.
+ if not user:
+ return rval
+ # Gets an HDA and its children's disk usage, if the user does not already have an association of the same dataset
+ if not self.dataset.library_associations and not self.purged and not self.dataset.purged:
+ for hda in self.dataset.history_associations:
+ if hda.id == self.id:
+ continue
+ if not hda.purged and hda.history and hda.history.user and hda.history.user == user:
+ break
+ else:
+ rval += self.get_total_size()
+ for child in self.children:
+ rval += child.get_disk_usage( user )
+ return rval
class HistoryDatasetAssociationDisplayAtAuthorization( object ):
def __init__( self, hda=None, user=None, site=None ):
@@ -1467,6 +1516,13 @@
self.histories.append( GalaxySessionToHistoryAssociation( self, history ) )
else:
self.histories.append( association )
+ def get_disk_usage( self ):
+ if self.disk_usage is None:
+ return 0
+ return self.disk_usage
+ def set_disk_usage( self, bytes ):
+ self.disk_usage = bytes
+ total_disk_usage = property( get_disk_usage, set_disk_usage )
class GalaxySessionToHistoryAssociation( object ):
def __init__( self, galaxy_session, history ):
--- a/lib/galaxy/web/controllers/dataset.py Tue Jul 26 13:18:22 2011 -0400
+++ b/lib/galaxy/web/controllers/dataset.py Tue Jul 26 13:56:16 2011 -0400
@@ -9,6 +9,7 @@
from galaxy.util import inflector
from galaxy.model.item_attrs import *
from galaxy.model import LibraryDatasetDatasetAssociation, HistoryDatasetAssociation
+from galaxy.web.framework.helpers import to_unicode
import pkg_resources;
pkg_resources.require( "Paste" )
@@ -383,6 +384,188 @@
return trans.stream_template_mako( "/dataset/large_file.mako",
truncated_data = open( data.file_name ).read(max_peek_size),
data = data )
+
+ @web.expose
+ def edit(self, trans, dataset_id=None, filename=None, hid=None, **kwd):
+ """Allows user to modify parameters of an HDA."""
+ message = None
+ status = 'done'
+ refresh_frames = []
+ error = False
+ def __ok_to_edit_metadata( dataset_id ):
+ #prevent modifying metadata when dataset is queued or running as input/output
+ #This code could be more efficient, i.e. by using mappers, but to prevent slowing down loading a History panel, we'll leave the code here for now
+ for job_to_dataset_association in trans.sa_session.query( self.app.model.JobToInputDatasetAssociation ) \
+ .filter_by( dataset_id=dataset_id ) \
+ .all() \
+ + trans.sa_session.query( self.app.model.JobToOutputDatasetAssociation ) \
+ .filter_by( dataset_id=dataset_id ) \
+ .all():
+ if job_to_dataset_association.job.state not in [ job_to_dataset_association.job.states.OK, job_to_dataset_association.job.states.ERROR, job_to_dataset_association.job.states.DELETED ]:
+ return False
+ return True
+ if hid is not None:
+ history = trans.get_history()
+ # TODO: hid handling
+ data = history.datasets[ int( hid ) - 1 ]
+ id = None
+ elif dataset_id is not None:
+ id = trans.app.security.decode_id( dataset_id )
+ data = trans.sa_session.query( self.app.model.HistoryDatasetAssociation ).get( id )
+ else:
+ trans.log_event( "dataset_id and hid are both None, cannot load a dataset to edit" )
+ return trans.show_error_message( "You must provide a history dataset id to edit" )
+ if data is None:
+ trans.log_event( "Problem retrieving dataset (encoded: %s, decoded: %s) with history id %s." % ( str( dataset_id ), str( id ), str( hid ) ) )
+ return trans.show_error_message( "History dataset id is invalid" )
+ if dataset_id is not None and data.history.user is not None and data.history.user != trans.user:
+ trans.log_event( "User attempted to edit an HDA they do not own (encoded: %s, decoded: %s)" % ( dataset_id, id ) )
+ # Do not reveal the dataset's existence
+ return trans.show_error_message( "History dataset id is invalid" )
+ current_user_roles = trans.get_current_user_roles()
+ if data.history.user and not data.dataset.has_manage_permissions_roles( trans ):
+ # Permission setting related to DATASET_MANAGE_PERMISSIONS was broken for a period of time,
+ # so it is possible that some Datasets have no roles associated with the DATASET_MANAGE_PERMISSIONS
+ # permission. In this case, we'll reset this permission to the hda user's private role.
+ manage_permissions_action = trans.app.security_agent.get_action( trans.app.security_agent.permitted_actions.DATASET_MANAGE_PERMISSIONS.action )
+ permissions = { manage_permissions_action : [ trans.app.security_agent.get_private_user_role( data.history.user ) ] }
+ trans.app.security_agent.set_dataset_permission( data.dataset, permissions )
+ if trans.app.security_agent.can_access_dataset( current_user_roles, data.dataset ):
+ if data.state == trans.model.Dataset.states.UPLOAD:
+ return trans.show_error_message( "Please wait until this dataset finishes uploading before attempting to edit its metadata." )
+ params = util.Params( kwd, sanitize=False )
+ if params.change:
+ # The user clicked the Save button on the 'Change data type' form
+ if data.datatype.allow_datatype_change and trans.app.datatypes_registry.get_datatype_by_extension( params.datatype ).allow_datatype_change:
+ #prevent modifying datatype when dataset is queued or running as input/output
+ if not __ok_to_edit_metadata( data.id ):
+ message = "This dataset is currently being used as input or output. You cannot change datatype until the jobs have completed or you have canceled them."
+ error = True
+ else:
+ trans.app.datatypes_registry.change_datatype( data, params.datatype, set_meta = not trans.app.config.set_metadata_externally )
+ trans.sa_session.flush()
+ if trans.app.config.set_metadata_externally:
+ trans.app.datatypes_registry.set_external_metadata_tool.tool_action.execute( trans.app.datatypes_registry.set_external_metadata_tool, trans, incoming = { 'input1':data }, overwrite = False ) #overwrite is False as per existing behavior
+ message = "Changed the type of dataset '%s' to %s" % ( to_unicode( data.name ), params.datatype )
+ refresh_frames=['history']
+ else:
+ message = "You are unable to change datatypes in this manner. Changing %s to %s is not allowed." % ( data.extension, params.datatype )
+ error = True
+ elif params.save:
+ # The user clicked the Save button on the 'Edit Attributes' form
+ data.name = params.name
+ data.info = params.info
+ message = ''
+ if __ok_to_edit_metadata( data.id ):
+ # The following for loop will save all metadata_spec items
+ for name, spec in data.datatype.metadata_spec.items():
+ if spec.get("readonly"):
+ continue
+ optional = params.get("is_"+name, None)
+ other = params.get("or_"+name, None)
+ if optional and optional == 'true':
+ # optional element... == 'true' actually means it is NOT checked (and therefore omitted)
+ setattr(data.metadata, name, None)
+ else:
+ if other:
+ setattr( data.metadata, name, other )
+ else:
+ setattr( data.metadata, name, spec.unwrap( params.get (name, None) ) )
+ data.datatype.after_setting_metadata( data )
+ # Sanitize annotation before adding it.
+ if params.annotation:
+ annotation = sanitize_html( params.annotation, 'utf-8', 'text/html' )
+ self.add_item_annotation( trans.sa_session, trans.get_user(), data, annotation )
+ # If setting metadata previously failed and all required elements have now been set, clear the failed state.
+ if data._state == trans.model.Dataset.states.FAILED_METADATA and not data.missing_meta():
+ data._state = None
+ trans.sa_session.flush()
+ message = "Attributes updated%s" % message
+ refresh_frames=['history']
+ else:
+ trans.sa_session.flush()
+ message = "Attributes updated, but metadata could not be changed because this dataset is currently being used as input or output. You must cancel or wait for these jobs to complete before changing metadata."
+ status = "warning"
+ refresh_frames=['history']
+ elif params.detect:
+ # The user clicked the Auto-detect button on the 'Edit Attributes' form
+ #prevent modifying metadata when dataset is queued or running as input/output
+ if not __ok_to_edit_metadata( data.id ):
+ message = "This dataset is currently being used as input or output. You cannot change metadata until the jobs have completed or you have canceled them."
+ error = True
+ else:
+ for name, spec in data.metadata.spec.items():
+ # We need to be careful about the attributes we are resetting
+ if name not in [ 'name', 'info', 'dbkey', 'base_name' ]:
+ if spec.get( 'default' ):
+ setattr( data.metadata, name, spec.unwrap( spec.get( 'default' ) ) )
+ if trans.app.config.set_metadata_externally:
+ message = 'Attributes have been queued to be updated'
+ trans.app.datatypes_registry.set_external_metadata_tool.tool_action.execute( trans.app.datatypes_registry.set_external_metadata_tool, trans, incoming = { 'input1':data } )
+ else:
+ message = 'Attributes updated'
+ data.set_meta()
+ data.datatype.after_setting_metadata( data )
+ trans.sa_session.flush()
+ refresh_frames=['history']
+ elif params.convert_data:
+ target_type = kwd.get("target_type", None)
+ if target_type:
+ message = data.datatype.convert_dataset(trans, data, target_type)
+ refresh_frames=['history']
+ elif params.update_roles_button:
+ if not trans.user:
+ return trans.show_error_message( "You must be logged in if you want to change permissions." )
+ if trans.app.security_agent.can_manage_dataset( current_user_roles, data.dataset ):
+ access_action = trans.app.security_agent.get_action( trans.app.security_agent.permitted_actions.DATASET_ACCESS.action )
+ manage_permissions_action = trans.app.security_agent.get_action( trans.app.security_agent.permitted_actions.DATASET_MANAGE_PERMISSIONS.action )
+ # The user associated the DATASET_ACCESS permission on the dataset with 1 or more roles. We
+ # need to ensure that they did not associate roles that would cause accessibility problems.
+ permissions, in_roles, error, message = \
+ trans.app.security_agent.derive_roles_from_access( trans, data.dataset.id, 'root', **kwd )
+ if error:
+ # Keep the original role associations for the DATASET_ACCESS permission on the dataset.
+ permissions[ access_action ] = data.dataset.get_access_roles( trans )
+ status = 'error'
+ else:
+ error = trans.app.security_agent.set_all_dataset_permissions( data.dataset, permissions )
+ if error:
+ message += error
+ status = 'error'
+ else:
+ message = 'Your changes completed successfully.'
+ trans.sa_session.refresh( data.dataset )
+ else:
+ message = "You are not authorized to change this dataset's permissions"
+ error = True
+ else:
+ if "dbkey" in data.datatype.metadata_spec and not data.metadata.dbkey:
+ # Copy dbkey into metadata, for backwards compatability
+ # This looks like it does nothing, but getting the dbkey
+ # returns the metadata dbkey unless it is None, in which
+ # case it resorts to the old dbkey. Setting the dbkey
+ # sets it properly in the metadata
+ #### This is likely no longer required, since the dbkey exists entirely within metadata (the old_dbkey field is gone): REMOVE ME?
+ data.metadata.dbkey = data.dbkey
+ # let's not overwrite the imported datatypes module with the variable datatypes?
+ # the built-in 'id' is overwritten in lots of places as well
+ ldatatypes = [ dtype_name for dtype_name, dtype_value in trans.app.datatypes_registry.datatypes_by_extension.iteritems() if dtype_value.allow_datatype_change ]
+ ldatatypes.sort()
+ all_roles = trans.app.security_agent.get_legitimate_roles( trans, data.dataset, 'root' )
+ if error:
+ status = 'error'
+ return trans.fill_template( "/dataset/edit_attributes.mako",
+ data=data,
+ data_annotation=self.get_item_annotation_str( trans.sa_session, trans.user, data ),
+ datatypes=ldatatypes,
+ current_user_roles=current_user_roles,
+ all_roles=all_roles,
+ message=message,
+ status=status,
+ dataset_id=dataset_id,
+ refresh_frames=refresh_frames )
+ else:
+ return trans.show_error_message( "You do not have permission to edit this dataset's ( id: %s ) information." % str( dataset_id ) )
@web.expose
@web.require_login( "see all available datasets" )
@@ -654,111 +837,190 @@
return trans.fill_template_mako( "dataset/display_application/display.mako", msg = msg, display_app = display_app, display_link = display_link, refresh = refresh )
return trans.show_error_message( 'You do not have permission to view this dataset at an external display application.' )
- def _undelete( self, trans, id ):
+ def _delete( self, trans, dataset_id ):
+ message = None
+ status = 'done'
+ id = None
try:
- id = int( id )
- except ValueError, e:
- return False
- history = trans.get_history()
- data = trans.sa_session.query( self.app.model.HistoryDatasetAssociation ).get( id )
- if data and data.undeletable:
+ id = trans.app.security.decode_id( dataset_id )
+ history = trans.get_history()
+ hda = trans.sa_session.query( self.app.model.HistoryDatasetAssociation ).get( id )
+ assert hda, 'Invalid HDA: %s' % id
# Walk up parent datasets to find the containing history
- topmost_parent = data
+ topmost_parent = hda
+ while topmost_parent.parent:
+ topmost_parent = topmost_parent.parent
+ assert topmost_parent in trans.history.datasets, "Data does not belong to current history"
+ # Mark deleted and cleanup
+ hda.mark_deleted()
+ hda.clear_associated_files()
+ trans.log_event( "Dataset id %s marked as deleted" % str(id) )
+ if hda.parent_id is None and len( hda.creating_job_associations ) > 0:
+ # Mark associated job for deletion
+ job = hda.creating_job_associations[0].job
+ if job.state in [ self.app.model.Job.states.QUEUED, self.app.model.Job.states.RUNNING, self.app.model.Job.states.NEW ]:
+ # Are *all* of the job's other output datasets deleted?
+ if job.check_if_output_datasets_deleted():
+ job.mark_deleted( self.app.config.get_bool( 'enable_job_running', True ),
+ self.app.config.get_bool( 'track_jobs_in_database', False ) )
+ self.app.job_manager.job_stop_queue.put( job.id )
+ trans.sa_session.flush()
+ except Exception, e:
+ msg = 'HDA deletion failed (encoded: %s, decoded: %s)' % ( dataset_id, id )
+ log.exception( msg )
+ trans.log_event( msg )
+ message = 'Dataset deletion failed'
+ status = 'error'
+ return ( message, status )
+
+ def _undelete( self, trans, dataset_id ):
+ message = None
+ status = 'done'
+ id = None
+ try:
+ id = trans.app.security.decode_id( dataset_id )
+ history = trans.get_history()
+ hda = trans.sa_session.query( self.app.model.HistoryDatasetAssociation ).get( id )
+ assert hda and hda.undeletable, 'Invalid HDA: %s' % id
+ # Walk up parent datasets to find the containing history
+ topmost_parent = hda
while topmost_parent.parent:
topmost_parent = topmost_parent.parent
assert topmost_parent in history.datasets, "Data does not belong to current history"
# Mark undeleted
- data.mark_undeleted()
+ hda.mark_undeleted()
trans.sa_session.flush()
trans.log_event( "Dataset id %s has been undeleted" % str(id) )
- return True
- return False
+ except Exception, e:
+ msg = 'HDA undeletion failed (encoded: %s, decoded: %s)' % ( dataset_id, id )
+ log.exception( msg )
+ trans.log_event( msg )
+ message = 'Dataset undeletion failed'
+ status = 'error'
+ return ( message, status )
- def _unhide( self, trans, id ):
+ def _unhide( self, trans, dataset_id ):
try:
- id = int( id )
- except ValueError, e:
+ id = trans.app.security.decode_id( dataset_id )
+ except:
return False
history = trans.get_history()
- data = trans.sa_session.query( self.app.model.HistoryDatasetAssociation ).get( id )
- if data:
+ hda = trans.sa_session.query( self.app.model.HistoryDatasetAssociation ).get( id )
+ if hda:
# Walk up parent datasets to find the containing history
- topmost_parent = data
+ topmost_parent = hda
while topmost_parent.parent:
topmost_parent = topmost_parent.parent
assert topmost_parent in history.datasets, "Data does not belong to current history"
# Mark undeleted
- data.mark_unhidden()
+ hda.mark_unhidden()
trans.sa_session.flush()
trans.log_event( "Dataset id %s has been unhidden" % str(id) )
return True
return False
- def _purge( self, trans, id ):
+ def _purge( self, trans, dataset_id ):
+ message = None
+ status = 'done'
try:
- id = int( id )
- except ValueError, e:
- return False
- hda = trans.sa_session.query( self.app.model.HistoryDatasetAssociation ).get( id )
- # Invalid HDA or not deleted
- if not hda or not hda.history or not hda.deleted:
- return False
- # If the user is anonymous, make sure the HDA is owned by the current session.
- if not hda.history.user and trans.galaxy_session.id not in [ s.id for s in hda.history.galaxy_sessions ]:
- return False
- # If the user is known, make sure the HDA is owned by the current user.
- if hda.history.user and hda.history.user != trans.user:
- return False
- # HDA is purgeable
- hda.purged = True
- trans.sa_session.add( hda )
- trans.log_event( "HDA id %s has been purged" % hda.id )
- # Don't delete anything if there are active HDAs or any LDDAs, even if
- # the LDDAs are deleted. Let the cleanup scripts get it in the latter
- # case.
- if hda.dataset.user_can_purge:
- try:
- hda.dataset.full_delete()
- trans.log_event( "Dataset id %s has been purged upon the the purge of HDA id %s" % ( hda.dataset.id, hda.id ) )
- trans.sa_session.add( hda.dataset )
- except:
- log.exception( 'Unable to purge dataset (%s) on purge of hda (%s):' % ( hda.dataset.id, hda.id ) )
- trans.sa_session.flush()
- return True
+ id = trans.app.security.decode_id( dataset_id )
+ history = trans.get_history()
+ user = trans.get_user()
+ hda = trans.sa_session.query( self.app.model.HistoryDatasetAssociation ).get( id )
+ # Invalid HDA
+ assert hda, 'Invalid history dataset ID'
+ # Walk up parent datasets to find the containing history
+ topmost_parent = hda
+ while topmost_parent.parent:
+ topmost_parent = topmost_parent.parent
+ assert topmost_parent in history.datasets, "Data does not belong to current history"
+ # If the user is anonymous, make sure the HDA is owned by the current session.
+ if not user:
+ assert trans.galaxy_session.id in [ s.id for s in hda.history.galaxy_sessions ], 'Invalid history dataset ID'
+ # If the user is known, make sure the HDA is owned by the current user.
+ else:
+ assert topmost_parent.history.user == trans.user, 'Invalid history dataset ID'
+ # HDA is not deleted
+ assert hda.deleted, 'History dataset is not marked as deleted'
+ # HDA is purgeable
+ # Decrease disk usage first
+ if user:
+ user.total_disk_usage -= hda.quota_amount( user )
+ # Mark purged
+ hda.purged = True
+ trans.sa_session.add( hda )
+ trans.log_event( "HDA id %s has been purged" % hda.id )
+ # Don't delete anything if there are active HDAs or any LDDAs, even if
+ # the LDDAs are deleted. Let the cleanup scripts get it in the latter
+ # case.
+ if hda.dataset.user_can_purge:
+ try:
+ hda.dataset.full_delete()
+ trans.log_event( "Dataset id %s has been purged upon the the purge of HDA id %s" % ( hda.dataset.id, hda.id ) )
+ trans.sa_session.add( hda.dataset )
+ except:
+ log.exception( 'Unable to purge dataset (%s) on purge of HDA (%s):' % ( hda.dataset.id, hda.id ) )
+ trans.sa_session.flush()
+ except Exception, e:
+ msg = 'HDA purge failed (encoded: %s, decoded: %s)' % ( dataset_id, id )
+ log.exception( msg )
+ trans.log_event( msg )
+ message = 'Dataset removal from disk failed'
+ status = 'error'
+ return ( message, status )
@web.expose
- def undelete( self, trans, id ):
- if self._undelete( trans, id ):
- return trans.response.send_redirect( web.url_for( controller='root', action='history', show_deleted = True ) )
- raise Exception( "Error undeleting" )
+ def delete( self, trans, dataset_id, filename, show_deleted_on_refresh = False ):
+ message, status = self._delete( trans, dataset_id )
+ return trans.response.send_redirect( web.url_for( controller='root', action='history', show_deleted=show_deleted_on_refresh, message=message, status=status ) )
@web.expose
- def unhide( self, trans, id ):
- if self._unhide( trans, id ):
+ def delete_async( self, trans, dataset_id, filename ):
+ message, status = self._delete( trans, dataset_id )
+ if status == 'done':
+ return "OK"
+ else:
+ raise Exception( message )
+
+ @web.expose
+ def undelete( self, trans, dataset_id, filename ):
+ message, status = self._undelete( trans, dataset_id )
+ return trans.response.send_redirect( web.url_for( controller='root', action='history', show_deleted = True, message=message, status=status ) )
+
+ @web.expose
+ def undelete_async( self, trans, dataset_id, filename ):
+ message, status =self._undelete( trans, dataset_id )
+ if status == 'done':
+ return "OK"
+ else:
+ raise Exception( message )
+
+ @web.expose
+ def unhide( self, trans, dataset_id, filename ):
+ if self._unhide( trans, dataset_id ):
return trans.response.send_redirect( web.url_for( controller='root', action='history', show_hidden = True ) )
raise Exception( "Error unhiding" )
@web.expose
- def undelete_async( self, trans, id ):
- if self._undelete( trans, id ):
- return "OK"
- raise Exception( "Error undeleting" )
-
- @web.expose
- def purge( self, trans, id ):
- if not trans.app.config.allow_user_dataset_purge:
- raise Exception( "Removal of datasets by users is not allowed in this Galaxy instance. Please contact your Galaxy administrator." )
- if self._purge( trans, id ):
- return trans.response.send_redirect( web.url_for( controller='root', action='history', show_deleted = True ) )
- raise Exception( "Error removing disk file" )
+ def purge( self, trans, dataset_id, filename, show_deleted_on_refresh = False ):
+ if trans.app.config.allow_user_dataset_purge:
+ message, status = self._purge( trans, dataset_id )
+ else:
+ message = "Removal of datasets by users is not allowed in this Galaxy instance. Please contact your Galaxy administrator."
+ status = 'error'
+ return trans.response.send_redirect( web.url_for( controller='root', action='history', show_deleted=show_deleted_on_refresh, message=message, status=status ) )
@web.expose
- def purge_async( self, trans, id ):
- if not trans.app.config.allow_user_dataset_purge:
- raise Exception( "Removal of datasets by users is not allowed in this Galaxy instance. Please contact your Galaxy administrator." )
- if self._purge( trans, id ):
+ def purge_async( self, trans, dataset_id, filename ):
+ if trans.app.config.allow_user_dataset_purge:
+ message, status = self._purge( trans, dataset_id )
+ else:
+ message = "Removal of datasets by users is not allowed in this Galaxy instance. Please contact your Galaxy administrator."
+ status = 'error'
+ if status == 'done':
return "OK"
- raise Exception( "Error removing disk file" )
+ else:
+ raise Exception( message )
@web.expose
def show_params( self, trans, dataset_id=None, from_noframe=None, **kwd ):
--- a/lib/galaxy/web/controllers/root.py Tue Jul 26 13:18:22 2011 -0400
+++ b/lib/galaxy/web/controllers/root.py Tue Jul 26 13:56:16 2011 -0400
@@ -8,7 +8,6 @@
from galaxy.util.sanitize_html import sanitize_html
from galaxy.model.orm import *
from galaxy.model.item_attrs import UsesAnnotations
-from galaxy.web.framework.helpers import to_unicode
log = logging.getLogger( __name__ )
@@ -99,11 +98,14 @@
return trans.fill_template_mako( "/my_data.mako" )
@web.expose
- def history( self, trans, as_xml=False, show_deleted=False, show_hidden=False, hda_id=None ):
+ def history( self, trans, as_xml=False, show_deleted=False, show_hidden=False, hda_id=None, **kwd ):
"""
Display the current history, creating a new history if necessary.
NOTE: No longer accepts "id" or "template" options for security reasons.
"""
+ params = util.Params( kwd )
+ message = params.get( 'message', None )
+ status = params.get( 'status', 'done' )
if trans.app.config.require_login and not trans.user:
return trans.fill_template( '/no_access.mako', message = 'Please log in to access Galaxy histories.' )
history = trans.get_history( create=True )
@@ -123,7 +125,9 @@
datasets = datasets,
hda_id = hda_id,
show_deleted = show_deleted,
- show_hidden=show_hidden )
+ show_hidden=show_hidden,
+ message=message,
+ status=status )
@web.expose
def dataset_state ( self, trans, id=None, stamp=None ):
@@ -160,9 +164,13 @@
# Create new HTML for any that have changed
rval = {}
if ids is not None and states is not None:
- ids = map( int, ids.split( "," ) )
+ ids = ids.split( "," )
states = states.split( "," )
- for id, state in zip( ids, states ):
+ for encoded_id, state in zip( ids, states ):
+ try:
+ id = int( trans.app.security.decode_id( encoded_id ) )
+ except:
+ id = int( encoded_id )
data = trans.sa_session.query( self.app.model.HistoryDatasetAssociation ).get( id )
if data.state != state:
job_hda = data
@@ -175,7 +183,7 @@
force_history_refresh = tool.force_history_refresh
if not job_hda.visible:
force_history_refresh = True
- rval[id] = {
+ rval[encoded_id] = {
"state": data.state,
"html": unicode( trans.fill_template( "root/history_item.mako", data=data, hid=data.hid ), 'utf-8' ),
"force_history_refresh": force_history_refresh
@@ -288,237 +296,6 @@
else:
yield "No data with id=%d" % id
- @web.expose
- def edit(self, trans, id=None, hid=None, **kwd):
- """Allows user to modify parameters of an HDA."""
- message = ''
- error = False
- def __ok_to_edit_metadata( dataset_id ):
- #prevent modifying metadata when dataset is queued or running as input/output
- #This code could be more efficient, i.e. by using mappers, but to prevent slowing down loading a History panel, we'll leave the code here for now
- for job_to_dataset_association in trans.sa_session.query( self.app.model.JobToInputDatasetAssociation ) \
- .filter_by( dataset_id=dataset_id ) \
- .all() \
- + trans.sa_session.query( self.app.model.JobToOutputDatasetAssociation ) \
- .filter_by( dataset_id=dataset_id ) \
- .all():
- if job_to_dataset_association.job.state not in [ job_to_dataset_association.job.states.OK, job_to_dataset_association.job.states.ERROR, job_to_dataset_association.job.states.DELETED ]:
- return False
- return True
- if hid is not None:
- history = trans.get_history()
- # TODO: hid handling
- data = history.datasets[ int( hid ) - 1 ]
- elif id is not None:
- data = trans.sa_session.query( self.app.model.HistoryDatasetAssociation ).get( id )
- else:
- trans.log_event( "Problem loading dataset id %s with history id %s." % ( str( id ), str( hid ) ) )
- return trans.show_error_message( "Problem loading dataset." )
- if data is None:
- trans.log_event( "Problem retrieving dataset id %s with history id." % ( str( id ), str( hid ) ) )
- return trans.show_error_message( "Problem retrieving dataset." )
- if id is not None and data.history.user is not None and data.history.user != trans.user:
- return trans.show_error_message( "This instance of a dataset (%s) in a history does not belong to you." % ( data.id ) )
- current_user_roles = trans.get_current_user_roles()
- if data.history.user and not data.dataset.has_manage_permissions_roles( trans ):
- # Permission setting related to DATASET_MANAGE_PERMISSIONS was broken for a period of time,
- # so it is possible that some Datasets have no roles associated with the DATASET_MANAGE_PERMISSIONS
- # permission. In this case, we'll reset this permission to the hda user's private role.
- manage_permissions_action = trans.app.security_agent.get_action( trans.app.security_agent.permitted_actions.DATASET_MANAGE_PERMISSIONS.action )
- permissions = { manage_permissions_action : [ trans.app.security_agent.get_private_user_role( data.history.user ) ] }
- trans.app.security_agent.set_dataset_permission( data.dataset, permissions )
- if trans.app.security_agent.can_access_dataset( current_user_roles, data.dataset ):
- if data.state == trans.model.Dataset.states.UPLOAD:
- return trans.show_error_message( "Please wait until this dataset finishes uploading before attempting to edit its metadata." )
- params = util.Params( kwd, sanitize=False )
- if params.change:
- # The user clicked the Save button on the 'Change data type' form
- if data.datatype.allow_datatype_change and trans.app.datatypes_registry.get_datatype_by_extension( params.datatype ).allow_datatype_change:
- #prevent modifying datatype when dataset is queued or running as input/output
- if not __ok_to_edit_metadata( data.id ):
- return trans.show_error_message( "This dataset is currently being used as input or output. You cannot change datatype until the jobs have completed or you have canceled them." )
- trans.app.datatypes_registry.change_datatype( data, params.datatype, set_meta = not trans.app.config.set_metadata_externally )
- trans.sa_session.flush()
- if trans.app.config.set_metadata_externally:
- trans.app.datatypes_registry.set_external_metadata_tool.tool_action.execute( trans.app.datatypes_registry.set_external_metadata_tool, trans, incoming = { 'input1':data }, overwrite = False ) #overwrite is False as per existing behavior
- return trans.show_ok_message( "Changed the type of dataset '%s' to %s" % ( to_unicode( data.name ), params.datatype ), refresh_frames=['history'] )
- else:
- return trans.show_error_message( "You are unable to change datatypes in this manner. Changing %s to %s is not allowed." % ( data.extension, params.datatype ) )
- elif params.save:
- # The user clicked the Save button on the 'Edit Attributes' form
- data.name = params.name
- data.info = params.info
- message = ''
- if __ok_to_edit_metadata( data.id ):
- # The following for loop will save all metadata_spec items
- for name, spec in data.datatype.metadata_spec.items():
- if spec.get("readonly"):
- continue
- optional = params.get("is_"+name, None)
- other = params.get("or_"+name, None)
- if optional and optional == 'true':
- # optional element... == 'true' actually means it is NOT checked (and therefore omitted)
- setattr(data.metadata, name, None)
- else:
- if other:
- setattr( data.metadata, name, other )
- else:
- setattr( data.metadata, name, spec.unwrap( params.get (name, None) ) )
- data.datatype.after_setting_metadata( data )
- # Sanitize annotation before adding it.
- if params.annotation:
- annotation = sanitize_html( params.annotation, 'utf-8', 'text/html' )
- self.add_item_annotation( trans.sa_session, trans.get_user(), data, annotation )
- # If setting metadata previously failed and all required elements have now been set, clear the failed state.
- if data._state == trans.model.Dataset.states.FAILED_METADATA and not data.missing_meta():
- data._state = None
- trans.sa_session.flush()
- return trans.show_ok_message( "Attributes updated%s" % message, refresh_frames=['history'] )
- else:
- trans.sa_session.flush()
- return trans.show_warn_message( "Attributes updated, but metadata could not be changed because this dataset is currently being used as input or output. You must cancel or wait for these jobs to complete before changing metadata.", refresh_frames=['history'] )
- elif params.detect:
- # The user clicked the Auto-detect button on the 'Edit Attributes' form
- #prevent modifying metadata when dataset is queued or running as input/output
- if not __ok_to_edit_metadata( data.id ):
- return trans.show_error_message( "This dataset is currently being used as input or output. You cannot change metadata until the jobs have completed or you have canceled them." )
- for name, spec in data.metadata.spec.items():
- # We need to be careful about the attributes we are resetting
- if name not in [ 'name', 'info', 'dbkey', 'base_name' ]:
- if spec.get( 'default' ):
- setattr( data.metadata, name, spec.unwrap( spec.get( 'default' ) ) )
- if trans.app.config.set_metadata_externally:
- message = 'Attributes have been queued to be updated'
- trans.app.datatypes_registry.set_external_metadata_tool.tool_action.execute( trans.app.datatypes_registry.set_external_metadata_tool, trans, incoming = { 'input1':data } )
- else:
- message = 'Attributes updated'
- data.set_meta()
- data.datatype.after_setting_metadata( data )
- trans.sa_session.flush()
- return trans.show_ok_message( message, refresh_frames=['history'] )
- elif params.convert_data:
- target_type = kwd.get("target_type", None)
- if target_type:
- message = data.datatype.convert_dataset(trans, data, target_type)
- return trans.show_ok_message( message, refresh_frames=['history'] )
- elif params.update_roles_button:
- if not trans.user:
- return trans.show_error_message( "You must be logged in if you want to change permissions." )
- if trans.app.security_agent.can_manage_dataset( current_user_roles, data.dataset ):
- access_action = trans.app.security_agent.get_action( trans.app.security_agent.permitted_actions.DATASET_ACCESS.action )
- manage_permissions_action = trans.app.security_agent.get_action( trans.app.security_agent.permitted_actions.DATASET_MANAGE_PERMISSIONS.action )
- # The user associated the DATASET_ACCESS permission on the dataset with 1 or more roles. We
- # need to ensure that they did not associate roles that would cause accessibility problems.
- permissions, in_roles, error, message = \
- trans.app.security_agent.derive_roles_from_access( trans, data.dataset.id, 'root', **kwd )
- if error:
- # Keep the original role associations for the DATASET_ACCESS permission on the dataset.
- permissions[ access_action ] = data.dataset.get_access_roles( trans )
- status = 'error'
- else:
- error = trans.app.security_agent.set_all_dataset_permissions( data.dataset, permissions )
- if error:
- message += error
- status = 'error'
- else:
- message = 'Your changes completed successfully.'
- trans.sa_session.refresh( data.dataset )
- else:
- return trans.show_error_message( "You are not authorized to change this dataset's permissions" )
- if "dbkey" in data.datatype.metadata_spec and not data.metadata.dbkey:
- # Copy dbkey into metadata, for backwards compatability
- # This looks like it does nothing, but getting the dbkey
- # returns the metadata dbkey unless it is None, in which
- # case it resorts to the old dbkey. Setting the dbkey
- # sets it properly in the metadata
- #### This is likely no longer required, since the dbkey exists entirely within metadata (the old_dbkey field is gone): REMOVE ME?
- data.metadata.dbkey = data.dbkey
- # let's not overwrite the imported datatypes module with the variable datatypes?
- # the built-in 'id' is overwritten in lots of places as well
- ldatatypes = [ dtype_name for dtype_name, dtype_value in trans.app.datatypes_registry.datatypes_by_extension.iteritems() if dtype_value.allow_datatype_change ]
- ldatatypes.sort()
- all_roles = trans.app.security_agent.get_legitimate_roles( trans, data.dataset, 'root' )
- if error:
- status = 'error'
- else:
- status = 'done'
- return trans.fill_template( "/dataset/edit_attributes.mako",
- data=data,
- data_annotation=self.get_item_annotation_str( trans.sa_session, trans.user, data ),
- datatypes=ldatatypes,
- current_user_roles=current_user_roles,
- all_roles=all_roles,
- message=message,
- status=status )
- else:
- return trans.show_error_message( "You do not have permission to edit this dataset's ( id: %s ) information." % str( id ) )
-
- def __delete_dataset( self, trans, id ):
- data = trans.sa_session.query( self.app.model.HistoryDatasetAssociation ).get( id )
- if data:
- # Walk up parent datasets to find the containing history
- topmost_parent = data
- while topmost_parent.parent:
- topmost_parent = topmost_parent.parent
- assert topmost_parent in trans.history.datasets, "Data does not belong to current history"
- # Mark deleted and cleanup
- data.mark_deleted()
- data.clear_associated_files()
- trans.log_event( "Dataset id %s marked as deleted" % str(id) )
- if data.parent_id is None and len( data.creating_job_associations ) > 0:
- # Mark associated job for deletion
- job = data.creating_job_associations[0].job
- if job.state in [ self.app.model.Job.states.QUEUED, self.app.model.Job.states.RUNNING, self.app.model.Job.states.NEW ]:
- # Are *all* of the job's other output datasets deleted?
- if job.check_if_output_datasets_deleted():
- job.mark_deleted( self.app.config.get_bool( 'enable_job_running', True ),
- self.app.config.get_bool( 'track_jobs_in_database', False ) )
- self.app.job_manager.job_stop_queue.put( job.id )
- trans.sa_session.flush()
-
- @web.expose
- def delete( self, trans, id = None, show_deleted_on_refresh = False, **kwd):
- if id:
- if isinstance( id, list ):
- dataset_ids = id
- else:
- dataset_ids = [ id ]
- history = trans.get_history()
- for id in dataset_ids:
- try:
- id = int( id )
- except:
- continue
- self.__delete_dataset( trans, id )
- return self.history( trans, show_deleted = show_deleted_on_refresh )
-
- @web.expose
- def delete_async( self, trans, id = None, **kwd):
- if id:
- try:
- id = int( id )
- except:
- return "Dataset id '%s' is invalid" %str( id )
- self.__delete_dataset( trans, id )
- return "OK"
-
- @web.expose
- def purge( self, trans, id = None, show_deleted_on_refresh = False, **kwd ):
- if not trans.app.config.allow_user_dataset_purge:
- return trans.show_error_message( "Removal of datasets by users is not allowed in this Galaxy instance. Please contact your Galaxy administrator." )
- hda = trans.sa_session.query( self.app.model.HistoryDatasetAssociation ).get( int( id ) )
- if bool( hda.dataset.active_history_associations or hda.dataset.library_associations ):
- return trans.show_error_message( "Unable to purge: LDDA(s) or active HDA(s) exist" )
- elif hda.dataset.purged:
- return trans.show_error_message( "Unable to purge: dataset is already purged" )
- os.unlink( hda.dataset.file_name )
- if os.path.exists( hda.extra_files_path ):
- shutil.rmtree( hda.extra_files_path )
- hda.dataset.purged = True
- trans.sa_session.add( hda.dataset )
- trans.sa_session.flush()
- return self.history( trans, show_deleted = show_deleted_on_refresh )
-
## ---- History management -----------------------------------------------
@web.expose
--- a/lib/galaxy/web/framework/__init__.py Tue Jul 26 13:18:22 2011 -0400
+++ b/lib/galaxy/web/framework/__init__.py Tue Jul 26 13:56:16 2011 -0400
@@ -471,6 +471,7 @@
- associate new session with user
- if old session had a history and it was not associated with a user, associate it with the new session,
otherwise associate the current session's history with the user
+ - add the disk usage of the current session to the user's total disk usage
"""
# Set the previous session
prev_galaxy_session = self.galaxy_session
@@ -494,6 +495,10 @@
# If the previous galaxy session had a history, associate it with the new
# session, but only if it didn't belong to a different user.
history = prev_galaxy_session.current_history
+ if prev_galaxy_session.user is None:
+ # Increase the user's disk usage by the amount of the previous history's datasets if they didn't already own it.
+ for hda in history.datasets:
+ user.total_disk_usage += hda.quota_amount( user )
elif self.galaxy_session.current_history:
history = self.galaxy_session.current_history
if not history and \
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/scripts/set_user_disk_usage.py Tue Jul 26 13:56:16 2011 -0400
@@ -0,0 +1,76 @@
+#!/usr/bin/env python
+
+import os, sys
+from ConfigParser import ConfigParser
+from optparse import OptionParser
+
+parser = OptionParser()
+parser.add_option( '-c', '--config', dest='config', help='Path to Galaxy config file (universe_wsgi.ini)', default='universe_wsgi.ini' )
+parser.add_option( '-u', '--username', dest='username', help='Username of user to update', default='all' )
+parser.add_option( '-e', '--email', dest='email', help='Email address of user to update', default='all' )
+parser.add_option( '--dry-run', dest='dryrun', help='Dry run (show changes but do not save to database)', action='store_true', default=False )
+( options, args ) = parser.parse_args()
+
+def init():
+
+ options.config = os.path.abspath( options.config )
+ if options.username == 'all':
+ options.username = None
+ if options.email == 'all':
+ options.email = None
+
+ os.chdir( os.path.dirname( options.config ) )
+ sys.path.append( 'lib' )
+
+ from galaxy import eggs
+ import pkg_resources
+
+ config = ConfigParser( dict( file_path = 'database/files',
+ database_connection = 'sqlite:///database/universe.sqlite?isolation_level=IMMEDIATE' ) )
+ config.read( os.path.basename( options.config ) )
+
+ from galaxy.model import mapping
+
+ return mapping.init( config.get( 'app:main', 'file_path' ), config.get( 'app:main', 'database_connection' ), create_tables = False )
+
+def quotacheck( sa_session, users ):
+ sa_session.refresh( user )
+ current = user.get_disk_usage()
+ print user.username, '<' + user.email + '> current usage:', str( current ) + ',',
+ new = user.calculate_disk_usage()
+ sa_session.refresh( user )
+ # usage changed while calculating, do it again
+ if user.get_disk_usage() != current:
+ print 'usage changed while calculating, trying again...'
+ return quotacheck( sa_session, user )
+ # yes, still a small race condition between here and the flush
+ if new == current:
+ print 'no change'
+ else:
+ print 'new:', new
+ if not options.dryrun:
+ user.set_disk_usage( new )
+ sa_session.add( user )
+ sa_session.flush()
+
+if __name__ == '__main__':
+ print 'Loading Galaxy model...'
+ model = init()
+ sa_session = model.context.current
+
+ if not options.username and not options.email:
+ user_count = sa_session.query( model.User ).count()
+ print 'Processing %i users...' % user_count
+ for i, user in enumerate( sa_session.query( model.User ).enable_eagerloads( False ).yield_per( 1000 ) ):
+ print '%3i%%' % int( float(i) / user_count * 100 ),
+ quotacheck( sa_session, user )
+ print '100% complete'
+ sys.exit( 0 )
+ elif options.username:
+ user = sa_session.query( model.User ).enable_eagerloads( False ).filter_by( username=options.username ).first()
+ elif options.email:
+ user = sa_session.query( model.User ).enable_eagerloads( False ).filter_by( email=options.email ).first()
+ if not user:
+ print 'User not found'
+ sys.exit( 1 )
+ quotacheck( sa_session, user )
--- a/templates/dataset/edit_attributes.mako Tue Jul 26 13:18:22 2011 -0400
+++ b/templates/dataset/edit_attributes.mako Tue Jul 26 13:56:16 2011 -0400
@@ -1,5 +1,6 @@
<%inherit file="/base.mako"/><%namespace file="/message.mako" import="render_msg" />
+<%namespace file="/message.mako" name="message_ns" import="javascripts" /><%def name="title()">${_('Edit Dataset Attributes')}</%def>
@@ -10,6 +11,7 @@
<%def name="javascripts()">
${parent.javascripts()}
${h.js( "galaxy.base", "jquery.autocomplete", "autocomplete_tagging" )}
+ ${message_ns.javascripts()}
</%def><%def name="datatype( dataset, datatypes )">
@@ -31,8 +33,7 @@
<div class="toolForm"><div class="toolFormTitle">${_('Edit Attributes')}</div><div class="toolFormBody">
- <form name="edit_attributes" action="${h.url_for( controller='root', action='edit' )}" method="post">
- <input type="hidden" name="id" value="${data.id}"/>
+ <form name="edit_attributes" action="${h.url_for( controller='dataset', action='edit', dataset_id=dataset_id )}" method="post"><div class="form-row"><label>
Name:
@@ -80,8 +81,7 @@
<input type="submit" name="save" value="${_('Save')}"/></div></form>
- <form name="auto_detect" action="${h.url_for( controller='root', action='edit' )}" method="post">
- <input type="hidden" name="id" value="${data.id}"/>
+ <form name="auto_detect" action="${h.url_for( controller='dataset', action='edit', dataset_id=dataset_id )}" method="post"><div class="form-row"><div style="float: left; width: 250px; margin-right: 10px;"><input type="submit" name="detect" value="${_('Auto-detect')}"/>
@@ -104,8 +104,7 @@
<div class="toolForm"><div class="toolFormTitle">${_('Convert to new format')}</div><div class="toolFormBody">
- <form name="convert_data" action="${h.url_for( controller='root', action='edit' )}" method="post">
- <input type="hidden" name="id" value="${data.id}"/>
+ <form name="convert_data" action="${h.url_for( controller='dataset', action='edit', dataset_id=dataset_id )}" method="post"><div class="form-row"><div style="float: left; width: 250px; margin-right: 10px;"><select name="target_type">
@@ -132,8 +131,7 @@
<div class="toolFormTitle">${_('Change data type')}</div><div class="toolFormBody">
%if data.datatype.allow_datatype_change:
- <form name="change_datatype" action="${h.url_for( controller='root', action='edit' )}" method="post">
- <input type="hidden" name="id" value="${data.id}"/>
+ <form name="change_datatype" action="${h.url_for( controller='dataset', action='edit', dataset_id=dataset_id )}" method="post"><div class="form-row"><label>
${_('New Type')}:
@@ -161,7 +159,7 @@
%if trans.app.security_agent.can_manage_dataset( current_user_roles, data.dataset ):
<%namespace file="/dataset/security_common.mako" import="render_permission_form" />
- ${render_permission_form( data.dataset, data.get_display_name(), h.url_for( controller='root', action='edit', id=data.id ), all_roles )}
+ ${render_permission_form( data.dataset, data.get_display_name(), h.url_for( controller='dataset', action='edit', dataset_id=dataset_id ), all_roles )}
%elif trans.user:
<div class="toolForm"><div class="toolFormTitle">View Permissions</div>
--- a/templates/root/history.mako Tue Jul 26 13:18:22 2011 -0400
+++ b/templates/root/history.mako Tue Jul 26 13:56:16 2011 -0400
@@ -1,3 +1,5 @@
+<%namespace file="/message.mako" import="render_msg" />
+
<% _=n_ %><!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
@@ -105,6 +107,11 @@
});
};
+// Update the message for async operations
+function render_message(message, status) {
+ $("div#message-container").html( "<div class=\"" + status + "message\">" + message + "</div><br/>" );
+}
+
$(function() {
var historywrapper = $("div.historyItemWrapper");
init_history_items(historywrapper);
@@ -115,8 +122,8 @@
$(this).click( function() {
$( '#historyItem-' + data_id + "> div.historyItemTitleBar" ).addClass( "spinner" );
$.ajax({
- url: "${h.url_for( action='delete_async', id='XXX' )}".replace( 'XXX', data_id ),
- error: function() { alert( "Delete failed" ); },
+ url: "${h.url_for( controller='dataset', action='delete_async', dataset_id='XXX' )}".replace( 'XXX', data_id ),
+ error: function() { render_message( "Dataset deletion failed", "error" ); },
success: function(msg) {
if (msg === "OK") {
%if show_deleted:
@@ -133,7 +140,7 @@
%endif
$(".tipsy").remove();
} else {
- alert( "Delete failed" );
+ render_message( "Dataset deletion failed", "error" );
}
}
});
@@ -147,8 +154,8 @@
$(this).click( function() {
$( '#historyItem-' + data_id + " > div.historyItemTitleBar" ).addClass( "spinner" );
$.ajax({
- url: "${h.url_for( controller='dataset', action='undelete_async', id='XXX' )}".replace( 'XXX', data_id ),
- error: function() { alert( "Undelete failed" ) },
+ url: "${h.url_for( controller='dataset', action='undelete_async', dataset_id='XXX' )}".replace( 'XXX', data_id ),
+ error: function() { render_message( "Dataset undeletion failed", "error" ); },
success: function() {
var to_update = {};
to_update[data_id] = "none";
@@ -165,8 +172,8 @@
$(this).click( function() {
$( '#historyItem-' + data_id + " > div.historyItemTitleBar" ).addClass( "spinner" );
$.ajax({
- url: "${h.url_for( controller='dataset', action='purge_async', id='XXX' )}".replace( 'XXX', data_id ),
- error: function() { alert( "Removal from disk failed" ) },
+ url: "${h.url_for( controller='dataset', action='purge_async', dataset_id='XXX' )}".replace( 'XXX', data_id ),
+ error: function() { render_message( "Dataset removal from disk failed", "error" ) },
success: function() {
var to_update = {};
to_update[data_id] = "none";
@@ -258,7 +265,7 @@
// Updater
updater(
- ${ h.to_json_string( dict([(data.id, data.state) for data in reversed( datasets ) if data.visible and data.state not in TERMINAL_STATES]) ) }
+ ${ h.to_json_string( dict([(trans.app.security.encode_id(data.id), data.state) for data in reversed( datasets ) if data.visible and data.state not in TERMINAL_STATES]) ) }
);
// Navigate to a dataset.
@@ -311,11 +318,11 @@
if ( val.force_history_refresh ){
force_history_refresh = true;
}
- delete tracked_datasets[ parseInt(id) ];
+ delete tracked_datasets[id];
// When a dataset becomes terminal, check for changes in history disk size
check_history_size = true;
} else {
- tracked_datasets[ parseInt(id) ] = val.state;
+ tracked_datasets[id] = val.state;
}
});
if ( force_history_refresh ) {
@@ -458,6 +465,12 @@
</div>
%endif
+<div id="message-container">
+ %if message:
+ ${render_msg( message, status )}
+ %endif
+</div>
+
%if not datasets:
<div class="infomessagesmall" id="emptyHistoryMessage">
@@ -467,7 +480,7 @@
## Render requested datasets, ordered from newest to oldest
%for data in reversed( datasets ):
%if data.visible or show_hidden:
- <div class="historyItemContainer" id="historyItemContainer-${data.id}">
+ <div class="historyItemContainer" id="historyItemContainer-${trans.app.security.encode_id(data.id)}">
${render_dataset( data, data.hid, show_deleted_on_refresh = show_deleted, for_editing = True )}
</div>
%endif
--- a/templates/root/history_common.mako Tue Jul 26 13:18:22 2011 -0400
+++ b/templates/root/history_common.mako Tue Jul 26 13:56:16 2011 -0400
@@ -39,9 +39,9 @@
can_edit = not ( data.deleted or data.purged )
%>
%if not trans.user_is_admin() and not trans.app.security_agent.can_access_dataset( current_user_roles, data.dataset ):
- <div class="historyItemWrapper historyItem historyItem-${data_state} historyItem-noPermission" id="historyItem-${data.id}">
+ <div class="historyItemWrapper historyItem historyItem-${data_state} historyItem-noPermission" id="historyItem-${dataset_id}">
%else:
- <div class="historyItemWrapper historyItem historyItem-${data_state}" id="historyItem-${data.id}">
+ <div class="historyItemWrapper historyItem historyItem-${data_state}" id="historyItem-${dataset_id}">
%endif
%if data.deleted or data.purged or data.dataset.purged:
@@ -51,9 +51,9 @@
%else:
This dataset has been deleted.
%if for_editing:
- Click <a href="${h.url_for( controller='dataset', action='undelete', id=data.id )}" class="historyItemUndelete" id="historyItemUndeleter-${data.id}" target="galaxy_history">here</a> to undelete
+ Click <a href="${h.url_for( controller='dataset', action='undelete', dataset_id=dataset_id )}" class="historyItemUndelete" id="historyItemUndeleter-${dataset_id}" target="galaxy_history">here</a> to undelete
%if trans.app.config.allow_user_dataset_purge:
- or <a href="${h.url_for( controller='dataset', action='purge', id=data.id )}" class="historyItemPurge" id="historyItemPurger-${data.id}" target="galaxy_history">here</a> to immediately remove it from disk.
+ or <a href="${h.url_for( controller='dataset', action='purge', dataset_id=dataset_id )}" class="historyItemPurge" id="historyItemPurger-${dataset_id}" target="galaxy_history">here</a> to immediately remove it from disk.
%else:
it.
%endif
@@ -64,7 +64,7 @@
%if data.visible is False:
<div class="warningmessagesmall">
- <strong>This dataset has been hidden. Click <a href="${h.url_for( controller='dataset', action='unhide', id=data.id )}" class="historyItemUnhide" id="historyItemUnhider-${data.id}" target="galaxy_history">here</a> to unhide.</strong>
+ <strong>This dataset has been hidden. Click <a href="${h.url_for( controller='dataset', action='unhide', dataset_id=dataset_id )}" class="historyItemUnhide" id="historyItemUnhider-${dataset_id}" target="galaxy_history">here</a> to unhide.</strong></div>
%endif
@@ -110,13 +110,13 @@
%elif data.purged:
<span title="Cannot edit attributes of datasets removed from disk" class="icon-button edit_disabled tooltip"></span>
%else:
- <a class="icon-button edit tooltip" title="Edit attributes" href="${h.url_for( controller='root', action='edit', id=data.id )}" target="galaxy_main"></a>
+ <a class="icon-button edit tooltip" title="Edit attributes" href="${h.url_for( controller='dataset', action='edit', dataset_id=dataset_id )}" target="galaxy_main"></a>
%endif
%endif
%endif
%if for_editing:
%if can_edit:
- <a class="icon-button delete tooltip" title="Delete" href="${h.url_for( action='delete', id=data.id, show_deleted_on_refresh=show_deleted_on_refresh )}" id="historyItemDeleter-${data.id}"></a>
+ <a class="icon-button delete tooltip" title="Delete" href="${h.url_for( controller='dataset', action='delete', dataset_id=dataset_id, show_deleted_on_refresh=show_deleted_on_refresh )}" id="historyItemDeleter-${dataset_id}"></a>
%else:
<span title="Dataset is already deleted" class="icon-button delete_disabled tooltip"></span>
%endif
@@ -184,7 +184,7 @@
<div class="warningmessagesmall" style="margin: 4px 0 4px 0">
An error occurred setting the metadata for this dataset.
%if can_edit:
- You may be able to <a href="${h.url_for( controller='root', action='edit', id=data.id )}" target="galaxy_main">set it manually or retry auto-detection</a>.
+ You may be able to <a href="${h.url_for( controller='dataset', action='edit', dataset_id=dataset_id )}" target="galaxy_main">set it manually or retry auto-detection</a>.
%endif
</div>
%endif
@@ -193,7 +193,7 @@
format: <span class="${data.ext}">${data.ext}</span>,
database:
%if data.dbkey == '?' and can_edit:
- <a href="${h.url_for( controller='root', action='edit', id=data.id )}" target="galaxy_main">${_(data.dbkey)}</a>
+ <a href="${h.url_for( controller='dataset', action='edit', dataset_id=dataset_id )}" target="galaxy_main">${_(data.dbkey)}</a>
%else:
<span class="${data.dbkey}">${_(data.dbkey)}</span>
%endif
--- a/templates/user/index.mako Tue Jul 26 13:18:22 2011 -0400
+++ b/templates/user/index.mako Tue Jul 26 13:56:16 2011 -0400
@@ -22,6 +22,7 @@
<li><a href="${h.url_for( controller='user', action='manage_user_info', cntrller=cntrller, webapp='community' )}">${_('Manage your information')}</a></li>
%endif
</ul>
+ <p>You are currently using <strong>${trans.user.get_disk_usage( nice_size=True )}</strong> of disk space in this Galaxy instance.</p>
%else:
%if not message:
<p>${n_('You are currently not logged in.')}</p>
--- a/templates/webapps/community/repository/common.mako Tue Jul 26 13:18:22 2011 -0400
+++ b/templates/webapps/community/repository/common.mako Tue Jul 26 13:56:16 2011 -0400
@@ -50,39 +50,4 @@
onActivate: function(dtnode) {
var cell = $("#file_contents");
var selected_value;
- if (dtnode.data.key == 'root') {
- selected_value = "${repository.repo_path}/";
- } else {
- selected_value = dtnode.data.key;
- };
- if (selected_value.charAt(selected_value.length-1) != '/') {
- // Make ajax call
- $.ajax( {
- type: "POST",
- url: "${h.url_for( controller='repository', action='get_file_contents' )}",
- dataType: "json",
- data: { file_path: selected_value },
- success : function ( data ) {
- cell.html( '<label>'+data+'</label>' )
- }
- });
- } else {
- cell.html( '' );
- };
- },
- });
- });
- </script>
-</%def>
-
-<%def name="render_clone_str( repository )">
- <%
- protocol, base = trans.request.base.split( '://' )
- if trans.user:
- username = '%s@' % trans.user.username
- else:
- username = ''
- clone_str = '%s://%s%s/repos/%s/%s' % ( protocol, username, base, repository.user.username, repository.name )
- %>
- hg clone <a href="${clone_str}">${clone_str}</a>
-</%def>
\ No newline at end of file
+
\ No newline at end of file
--- a/test/base/twilltestcase.py Tue Jul 26 13:18:22 2011 -0400
+++ b/test/base/twilltestcase.py Tue Jul 26 13:56:16 2011 -0400
@@ -474,7 +474,7 @@
elem = data_list[-1]
hid = int( elem.get('hid') )
self.assertTrue( hid )
- self.visit_page( "edit?hid=%s" % hid )
+ self.visit_page( "dataset/edit?hid=%s" % hid )
for subpatt in patt.split():
tc.find(subpatt)
def delete_history_item( self, hda_id, strings_displayed=[] ):
@@ -483,7 +483,7 @@
hda_id = int( hda_id )
except:
raise AssertionError, "Invalid hda_id '%s' - must be int" % hda_id
- self.visit_url( "%s/root/delete?show_deleted_on_refresh=False&id=%s" % ( self.url, hda_id ) )
+ self.visit_url( "%s/datasets/%s/delete?show_deleted_on_refresh=False" % ( self.url, self.security.encode_id( hda_id ) ) )
for check_str in strings_displayed:
self.check_page_for_string( check_str )
def undelete_history_item( self, hda_id, strings_displayed=[] ):
@@ -492,7 +492,7 @@
hda_id = int( hda_id )
except:
raise AssertionError, "Invalid hda_id '%s' - must be int" % hda_id
- self.visit_url( "%s/dataset/undelete?id=%s" % ( self.url, hda_id ) )
+ self.visit_url( "%s/datasets/%s/undelete" % ( self.url, self.security.encode_id( hda_id ) ) )
for check_str in strings_displayed:
self.check_page_for_string( check_str )
def display_history_item( self, hda_id, strings_displayed=[] ):
@@ -511,7 +511,7 @@
strings_displayed=[], strings_not_displayed=[] ):
"""Edit history_dataset_association attribute information"""
self.home()
- self.visit_url( "%s/root/edit?id=%s" % ( self.url, hda_id ) )
+ self.visit_url( "%s/datasets/%s/edit" % ( self.url, self.security.encode_id( hda_id ) ) )
submit_required = False
self.check_page_for_string( 'Edit Attributes' )
if new_name:
@@ -545,9 +545,9 @@
def auto_detect_metadata( self, hda_id ):
"""Auto-detect history_dataset_association metadata"""
self.home()
- self.visit_url( "%s/root/edit?id=%s" % ( self.url, hda_id ) )
+ self.visit_url( "%s/datasets/%s/edit" % ( self.url, self.security.encode_id( hda_id ) ) )
self.check_page_for_string( 'This will inspect the dataset and attempt' )
- tc.fv( 'auto_detect', 'id', hda_id )
+ tc.fv( 'auto_detect', 'detect', 'Auto-detect' )
tc.submit( 'detect' )
try:
self.check_page_for_string( 'Attributes have been queued to be updated' )
@@ -559,7 +559,7 @@
def convert_format( self, hda_id, target_type ):
"""Convert format of history_dataset_association"""
self.home()
- self.visit_url( "%s/root/edit?id=%s" % ( self.url, hda_id ) )
+ self.visit_url( "%s/datasets/%s/edit" % ( self.url, self.security.encode_id( hda_id ) ) )
self.check_page_for_string( 'This will inspect the dataset and attempt' )
tc.fv( 'convert_data', 'target_type', target_type )
tc.submit( 'convert_data' )
@@ -569,7 +569,7 @@
def change_datatype( self, hda_id, datatype ):
"""Change format of history_dataset_association"""
self.home()
- self.visit_url( "%s/root/edit?id=%s" % ( self.url, hda_id ) )
+ self.visit_url( "%s/datasets/%s/edit" % ( self.url, self.security.encode_id( hda_id ) ) )
self.check_page_for_string( 'This will change the datatype of the existing dataset but' )
tc.fv( 'change_datatype', 'datatype', datatype )
tc.submit( 'change' )
--- a/test/functional/test_history_functions.py Tue Jul 26 13:18:22 2011 -0400
+++ b/test/functional/test_history_functions.py Tue Jul 26 13:56:16 2011 -0400
@@ -664,7 +664,7 @@
.order_by( desc( galaxy.model.HistoryDatasetAssociation.table.c.create_time ) ) \
.first()
self.home()
- self.visit_url( "%s/root/delete?show_deleted_on_refresh=False&id=%s" % ( self.url, str( latest_hda.id ) ) )
+ self.delete_history_item( str( latest_hda.id ) )
self.check_history_for_string( 'Your history is empty' )
self.home()
self.visit_url( "%s/history/?show_deleted=True" % self.url )
http://bitbucket.org/galaxy/galaxy-central/changeset/57c8053675b6/
changeset: 57c8053675b6
user: dannon
date: 2011-07-28 14:22:07
summary: Expose workflows shared with the user in /workflows list of the API.
affected #: 1 file (597 bytes)
--- a/lib/galaxy/web/api/workflows.py Tue Jul 26 13:56:16 2011 -0400
+++ b/lib/galaxy/web/api/workflows.py Thu Jul 28 08:22:07 2011 -0400
@@ -29,6 +29,14 @@
encoded_id = trans.security.encode_id(wf.id)
item['url'] = url_for('workflow', id=encoded_id)
rval.append(item)
+ for wf_sa in trans.sa_session.query( trans.app.model.StoredWorkflowUserShareAssociation ).filter_by(
+ user=trans.user ).join( 'stored_workflow' ).filter(
+ trans.app.model.StoredWorkflow.deleted == False ).order_by(
+ desc( trans.app.model.StoredWorkflow.update_time ) ).all():
+ item = wf_sa.stored_workflow.get_api_value(value_mapper={'id':trans.security.encode_id})
+ encoded_id = trans.security.encode_id(wf_sa.stored_workflow.id)
+ item['url'] = url_for('workflow', id=encoded_id)
+ rval.append(item)
return rval
@web.expose_api
def show(self, trans, id, **kwd):
http://bitbucket.org/galaxy/galaxy-central/changeset/f0c4e3efcb99/
changeset: f0c4e3efcb99
user: dannon
date: 2011-07-28 14:22:22
summary: Merge.
affected #: 15 files (61.1 KB)
--- a/datatypes_conf.xml.sample Thu Jul 28 08:22:07 2011 -0400
+++ b/datatypes_conf.xml.sample Thu Jul 28 08:22:22 2011 -0400
@@ -72,9 +72,10 @@
<!-- <display file="gbrowse/gbrowse_gff.xml" inherit="True" /> --></datatype><datatype extension="gff3" type="galaxy.datatypes.interval:Gff3" display_in_upload="true"/>
- <datatype extension="gif" type="galaxy.datatypes.images:Image" mimetype="image/gif"/>
+ <datatype extension="gif" type="galaxy.datatypes.images:Gif" mimetype="image/gif"/><datatype extension="gmaj.zip" type="galaxy.datatypes.images:Gmaj" mimetype="application/zip"/><datatype extension="gtf" type="galaxy.datatypes.interval:Gtf" display_in_upload="true"/>
+ <datatype extension="h5" type="galaxy.datatypes.data:Data" mimetype="application/octet-stream"/><datatype extension="html" type="galaxy.datatypes.images:Html" mimetype="text/html"/><datatype extension="interval" type="galaxy.datatypes.interval:Interval" display_in_upload="true"><converter file="interval_to_bed_converter.xml" target_datatype="bed"/>
@@ -90,7 +91,21 @@
<datatype extension="picard_interval_list" type="galaxy.datatypes.data:Text" subclass="True" display_in_upload="True"/><datatype extension="gatk_interval" type="galaxy.datatypes.data:Text" subclass="True" display_in_upload="True"/><datatype extension="gatk_dbsnp" type="galaxy.datatypes.data:Text" subclass="True" display_in_upload="True"/>
- <datatype extension="jpg" type="galaxy.datatypes.images:Image" mimetype="image/jpeg"/>
+ <datatype extension="jpg" type="galaxy.datatypes.images:Jpg" mimetype="image/jpeg"/>
+ <datatype extension="tiff" type="galaxy.datatypes.images:Tiff" mimetype="image/tiff"/>
+ <datatype extension="bmp" type="galaxy.datatypes.images:Bmp" mimetype="image/bmp"/>
+ <datatype extension="im" type="galaxy.datatypes.images:Im" mimetype="image/im"/>
+ <datatype extension="pcd" type="galaxy.datatypes.images:Pcd" mimetype="image/pcd"/>
+ <datatype extension="pcx" type="galaxy.datatypes.images:Pcx" mimetype="image/pcx"/>
+ <datatype extension="ppm" type="galaxy.datatypes.images:Ppm" mimetype="image/ppm"/>
+ <datatype extension="psd" type="galaxy.datatypes.images:Psd" mimetype="image/psd"/>
+ <datatype extension="xbm" type="galaxy.datatypes.images:Xbm" mimetype="image/xbm"/>
+ <datatype extension="xpm" type="galaxy.datatypes.images:Xpm" mimetype="image/xpm"/>
+ <datatype extension="rgb" type="galaxy.datatypes.images:Rgb" mimetype="image/rgb"/>
+ <datatype extension="pbm" type="galaxy.datatypes.images:Pbm" mimetype="image/pbm"/>
+ <datatype extension="pgm" type="galaxy.datatypes.images:Pgm" mimetype="image/pgm"/>
+ <datatype extension="eps" type="galaxy.datatypes.images:Eps" mimetype="image/eps"/>
+ <datatype extension="rast" type="galaxy.datatypes.images:Rast" mimetype="image/rast"/><datatype extension="laj" type="galaxy.datatypes.images:Laj"/><datatype extension="lav" type="galaxy.datatypes.sequence:Lav" display_in_upload="true"/><datatype extension="maf" type="galaxy.datatypes.sequence:Maf" display_in_upload="true">
@@ -102,7 +117,7 @@
</datatype><datatype extension="pdf" type="galaxy.datatypes.images:Pdf" mimetype="application/pdf"/><datatype extension="pileup" type="galaxy.datatypes.tabular:Pileup" display_in_upload="true" />
- <datatype extension="png" type="galaxy.datatypes.images:Image" mimetype="image/png"/>
+ <datatype extension="png" type="galaxy.datatypes.images:Png" mimetype="image/png"/><datatype extension="qual" type="galaxy.datatypes.qualityscore:QualityScore" /><datatype extension="qualsolexa" type="galaxy.datatypes.qualityscore:QualityScoreSolexa" display_in_upload="true"/><datatype extension="qualillumina" type="galaxy.datatypes.qualityscore:QualityScoreIllumina" display_in_upload="true"/>
@@ -116,7 +131,7 @@
<datatype extension="svg" type="galaxy.datatypes.images:Image" mimetype="image/svg+xml"/><datatype extension="taxonomy" type="galaxy.datatypes.tabular:Taxonomy" display_in_upload="true"/><datatype extension="tabular" type="galaxy.datatypes.tabular:Tabular" display_in_upload="true"/>
- <datatype extension="twobit" type="galaxy.datatypes.binary:TwoBit" mimetype="application/octet-stream" display_in_upload="true"/>
+ <datatype extension="twobit" type="galaxy.datatypes.binary:TwoBit" mimetype="application/octet-stream" display_in_upload="true"/><datatype extension="txt" type="galaxy.datatypes.data:Text" display_in_upload="true"/><datatype extension="memexml" type="galaxy.datatypes.xml:MEMEXml" mimetype="application/xml" display_in_upload="true"/><datatype extension="blastxml" type="galaxy.datatypes.xml:BlastXml" mimetype="application/xml" display_in_upload="true"/>
@@ -304,6 +319,24 @@
<sniffer type="galaxy.datatypes.tabular:Pileup"/><sniffer type="galaxy.datatypes.interval:Interval"/><sniffer type="galaxy.datatypes.tabular:Sam"/>
+ <sniffer type="galaxy.datatypes.images:Jpg"/>
+ <sniffer type="galaxy.datatypes.images:Png"/>
+ <sniffer type="galaxy.datatypes.images:Tiff"/>
+ <sniffer type="galaxy.datatypes.images:Bmp"/>
+ <sniffer type="galaxy.datatypes.images:Gif"/>
+ <sniffer type="galaxy.datatypes.images:Im"/>
+ <sniffer type="galaxy.datatypes.images:Pcd"/>
+ <sniffer type="galaxy.datatypes.images:Pcx"/>
+ <sniffer type="galaxy.datatypes.images:Ppm"/>
+ <sniffer type="galaxy.datatypes.images:Psd"/>
+ <sniffer type="galaxy.datatypes.images:Xbm"/>
+ <sniffer type="galaxy.datatypes.images:Xpm"/>
+ <sniffer type="galaxy.datatypes.images:Rgb"/>
+ <sniffer type="galaxy.datatypes.images:Pbm"/>
+ <sniffer type="galaxy.datatypes.images:Pgm"/>
+ <sniffer type="galaxy.datatypes.images:Xpm"/>
+ <sniffer type="galaxy.datatypes.images:Eps"/>
+ <sniffer type="galaxy.datatypes.images:Rast"/><!--
Keep this commented until the sniff method in the assembly.py
module is fixed to not read the entire file.
--- a/lib/galaxy/datatypes/binary.py Thu Jul 28 08:22:07 2011 -0400
+++ b/lib/galaxy/datatypes/binary.py Thu Jul 28 08:22:22 2011 -0400
@@ -18,7 +18,7 @@
log = logging.getLogger(__name__)
# Currently these supported binary data types must be manually set on upload
-unsniffable_binary_formats = [ 'ab1', 'scf' ]
+unsniffable_binary_formats = [ 'ab1', 'scf', 'h5' ]
class Binary( data.Data ):
"""Binary data"""
@@ -206,7 +206,24 @@
return "Binary bam alignments file (%s)" % ( data.nice_size( dataset.get_size() ) )
def get_track_type( self ):
return "ReadTrack", {"data": "bai", "index": "summary_tree"}
-
+
+class H5( Binary ):
+ """Class describing an HDF5 file"""
+ file_ext = "h5"
+
+ def set_peek( self, dataset, is_multi_byte=False ):
+ if not dataset.dataset.purged:
+ dataset.peek = "Binary h5 file"
+ dataset.blurb = data.nice_size( dataset.get_size() )
+ else:
+ dataset.peek = 'file does not exist'
+ dataset.blurb = 'file purged from disk'
+ def display_peek( self, dataset ):
+ try:
+ return dataset.peek
+ except:
+ return "Binary h5 sequence file (%s)" % ( data.nice_size( dataset.get_size() ) )
+
class Scf( Binary ):
"""Class describing an scf binary sequence file"""
file_ext = "scf"
@@ -292,7 +309,6 @@
Binary.__init__( self, **kwd )
self._magic = 0x8789F2EB
self._name = "BigBed"
-
def get_track_type( self ):
return "LineTrack", {"data_standalone": "bigbed"}
@@ -309,14 +325,12 @@
return True
except IOError:
return False
-
def set_peek(self, dataset, is_multi_byte=False):
if not dataset.dataset.purged:
dataset.peek = "Binary TwoBit format nucleotide file"
dataset.blurb = data.nice_size(dataset.get_size())
else:
return super(TwoBit, self).set_peek(dataset, is_multi_byte)
-
def display_peek(self, dataset):
try:
return dataset.peek
--- a/lib/galaxy/datatypes/checkers.py Thu Jul 28 08:22:07 2011 -0400
+++ b/lib/galaxy/datatypes/checkers.py Thu Jul 28 08:22:22 2011 -0400
@@ -1,6 +1,28 @@
import os, gzip, re, gzip, zipfile, binascii, bz2
from galaxy import util
+try:
+ import Image as PIL
+except ImportError:
+ try:
+ from PIL import Image as PIL
+ except:
+ PIL = None
+
+def check_image( file_path ):
+ if PIL != None:
+ try:
+ im = PIL.open( file_path )
+ except:
+ return False
+ if im:
+ return im
+ return False
+ else:
+ if imghdr.what( file_path ) != None:
+ return True
+ return False
+
def check_html( file_path, chunk=None ):
if chunk is None:
temp = open( file_path, "U" )
--- a/lib/galaxy/datatypes/images.py Thu Jul 28 08:22:07 2011 -0400
+++ b/lib/galaxy/datatypes/images.py Thu Jul 28 08:22:22 2011 -0400
@@ -7,12 +7,31 @@
from galaxy.datatypes.metadata import MetadataElement
from galaxy.datatypes import metadata
from galaxy.datatypes.sniff import *
+from galaxy.datatypes.util.image_util import *
from urllib import urlencode, quote_plus
import zipfile
-import os, subprocess, tempfile
+import os, subprocess, tempfile, imghdr
+
+try:
+ import Image as PIL
+except ImportError:
+ try:
+ from PIL import Image as PIL
+ except:
+ PIL = None
log = logging.getLogger(__name__)
+# TODO: Uploading image files of various types is supported in Galaxy, but on
+# the main public instance, the display_in_upload is not set for these data
+# types in datatypes_conf.xml because we do not allow image files to be uploaded
+# there. There is currently no API feature that allows uploading files outside
+# of a data library ( where it requires either the upload_paths or upload_directory
+# option to be enabled, which is not the case on the main public instance ). Because
+# of this, we're currently safe, but when the api is enhanced to allow other uploads,
+# we need to ensure that the implementation is such that image files cannot be uploaded
+# to our main public instance.
+
class Image( data.Data ):
"""Class describing an image"""
def set_peek( self, dataset, is_multi_byte=False ):
@@ -22,11 +41,110 @@
else:
dataset.peek = 'file does not exist'
dataset.blurb = 'file purged from disk'
+ def sniff( self, filename ):
+ # First check if we can use PIL
+ if PIL is not None:
+ try:
+ im = PIL.open( filename )
+ im.close()
+ return True
+ except:
+ return False
+ else:
+ if imghdr.what( filename ) is not None:
+ return True
+ else:
+ return False
+
+class Jpg( Image ):
+ def sniff(self, filename, image=None):
+ """Determine if the file is in jpg format."""
+ return check_image_type( filename, ['JPEG'], image )
+
+class Png( Image ):
+ def sniff(self, filename, image=None):
+ """Determine if the file is in png format."""
+ return check_image_type( filename, ['PNG'], image )
+
+class Tiff( Image ):
+ def sniff(self, filename, image=None):
+ """Determine if the file is in tiff format."""
+ return check_image_type( filename, ['TIFF'], image )
+
+class Bmp( Image ):
+ def sniff(self, filename, image=None):
+ """Determine if the file is in bmp format."""
+ return check_image_type( filename, ['BMP'], image )
+
+class Gif( Image ):
+ def sniff(self, filename, image=None):
+ """Determine if the file is in gif format."""
+ return check_image_type( filename, ['GIF'], image )
+
+class Im( Image ):
+ def sniff(self, filename, image=None):
+ """Determine if the file is in im format."""
+ return check_image_type( filename, ['IM'], image )
+
+class Pcd( Image ):
+ def sniff(self, filename, image=None):
+ """Determine if the file is in pcd format."""
+ return check_image_type( filename, ['PCD'], image )
+
+class Pcx( Image ):
+ def sniff(self, filename, image=None):
+ """Determine if the file is in pcx format."""
+ return check_image_type( filename, ['PCX'], image )
+
+class Ppm( Image ):
+ def sniff(self, filename, image=None):
+ """Determine if the file is in ppm format."""
+ return check_image_type( filename, ['PPM'], image )
+
+class Psd( Image ):
+ def sniff(self, filename, image=None):
+ """Determine if the file is in psd format."""
+ return check_image_type( filename, ['PSD'], image )
+
+class Xbm( Image ):
+ def sniff(self, filename, image=None):
+ """Determine if the file is in XBM format."""
+ return check_image_type( filename, ['XBM'], image )
+
+class Xpm( Image ):
+ def sniff(self, filename, image=None):
+ """Determine if the file is in XPM format."""
+ return check_image_type( filename, ['XPM'], image )
+
+class Rgb( Image ):
+ def sniff(self, filename, image=None):
+ """Determine if the file is in RGB format."""
+ return check_image_type( filename, ['RGB'], image )
+
+class Pbm( Image ):
+ def sniff(self, filename, image=None):
+ """Determine if the file is in PBM format"""
+ return check_image_type( filename, ['PBM'], image )
+
+class Pgm( Image ):
+ def sniff(self, filename, image=None):
+ """Determine if the file is in PGM format"""
+ return check_image_type( filename, ['PGM'], image )
+
+class Eps( Image ):
+ def sniff(self, filename, image=None):
+ """Determine if the file is in eps format."""
+ return check_image_type( filename, ['EPS'], image )
+
+
+class Rast( Image ):
+ def sniff(self, filename, image=None):
+ """Determine if the file is in rast format"""
+ return check_image_type( filename, ['RAST'], image )
class Pdf( Image ):
def sniff(self, filename):
- """Determine if the file is in pdf format.
- """
+ """Determine if the file is in pdf format."""
headers = get_headers(filename, None, 1)
try:
if headers[0][0].startswith("%PDF"):
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/lib/galaxy/datatypes/util/image_util.py Thu Jul 28 08:22:22 2011 -0400
@@ -0,0 +1,76 @@
+"""
+Provides utilities for working with image files.
+"""
+import logging, imghdr
+
+try:
+ import Image as PIL
+except ImportError:
+ try:
+ from PIL import Image as PIL
+ except:
+ PIL = None
+
+log = logging.getLogger(__name__)
+
+def image_type( filename, image=None ):
+ format = ''
+ if PIL is not None:
+ if image is not None:
+ format = image.format
+ else:
+ try:
+ im = PIL.open( filename )
+ format = im.format
+ im.close()
+ except:
+ return False
+ else:
+ format = imghdr.what( filename )
+ if format is not None:
+ format = format.upper()
+ else:
+ return False
+ return format
+def check_image_type( filename, types, image=None ):
+ format = image_type( filename, image )
+ # First check if we can use PIL
+ if format in types:
+ return True
+ return False
+def get_image_ext ( file_path, image ):
+ #determine ext
+ format = image_type( file_path, image )
+ if format in [ 'JPG','JPEG' ]:
+ return 'jpg'
+ if format == 'PNG':
+ return 'png'
+ if format == 'TIFF':
+ return 'tiff'
+ if format == 'BMP':
+ return 'bmp'
+ if format == 'GIF':
+ return 'gif'
+ if format == 'IM':
+ return 'im'
+ if format == 'PCD':
+ return 'pcd'
+ if format == 'PCX':
+ return 'pcx'
+ if format == 'PPM':
+ return 'ppm'
+ if format == 'PSD':
+ return 'psd'
+ if format == 'XBM':
+ return 'xbm'
+ if format == 'XPM':
+ return 'xpm'
+ if format == 'RGB':
+ return 'rgb'
+ if format == 'PBM':
+ return 'pbm'
+ if format == 'PGM':
+ return 'pgm'
+ if format == 'EPS':
+ return 'eps'
+ return None
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/5_mult_liftover_mapped.bed Thu Jul 28 08:22:22 2011 -0400
@@ -0,0 +1,132 @@
+chr7 116197893 116197920 CCDS5763.1_cds_0_0_chr7_115444713_f 1 +
+chr7 116222929 116223015 CCDS5763.1_cds_1_0_chr7_115468539_f 1 +
+chr7 116237938 116238188 CCDS5763.1_cds_2_0_chr7_115483025_f 1 +
+chr7 116239076 116239412 CCDS5763.1_cds_3_0_chr7_115484166_f 1 +
+chr7 116240675 116240891 CCDS5763.1_cds_4_0_chr7_115485765_f 1 +
+chr7 116241233 116241396 CCDS5763.1_cds_5_0_chr7_115486323_f 1 +
+chr7 116246193 116246382 CCDS5763.1_cds_6_0_chr7_115491299_f 1 +
+chr7 116222929 116223015 CCDS5764.1_cds_0_0_chr7_115468539_f 1 +
+chr7 116237938 116238188 CCDS5764.1_cds_1_0_chr7_115483025_f 1 +
+chr7 116239076 116239412 CCDS5764.1_cds_2_0_chr7_115484166_f 1 +
+chr7 116240675 116240891 CCDS5764.1_cds_3_0_chr7_115485765_f 1 +
+chr7 116241233 116241396 CCDS5764.1_cds_4_0_chr7_115486323_f 1 +
+chr7 116246193 116246382 CCDS5764.1_cds_5_0_chr7_115491299_f 1 +
+chr7 116495075 116495225 CCDS5766.1_cds_0_0_chr7_115733787_f 1 +
+chr7 116495546 116495734 CCDS5766.1_cds_1_0_chr7_115734265_f 1 +
+chr7 116501260 116501411 CCDS5766.1_cds_2_0_chr7_115739976_f 1 +
+chr7 116495075 116495225 CCDS5765.1_cds_0_0_chr7_115733787_f 1 +
+chr7 116501260 116501449 CCDS5765.1_cds_1_0_chr7_115739976_f 1 +
+chr7 116522088 116522118 CCDS5767.1_cds_0_0_chr7_115759068_f 1 +
+chr7 116523550 116523715 CCDS5767.1_cds_1_0_chr7_115760530_f 1 +
+chr7 116562770 116563112 CCDS5767.1_cds_2_0_chr7_115792951_f 1 +
+chr7 116872904 116872943 CCDS5768.1_cds_0_0_chr7_116096617_f 1 +
+chr7 116899050 116899114 CCDS5768.1_cds_1_0_chr7_116122132_f 1 +
+chr7 116903933 116903985 CCDS5768.1_cds_2_0_chr7_116126999_f 1 +
+chr7 116909709 116909773 CCDS5768.1_cds_3_0_chr7_116132777_f 1 +
+chr7 116915110 116915317 CCDS5768.1_cds_4_0_chr7_116138182_f 1 +
+chr7 116917198 116917278 CCDS5768.1_cds_5_0_chr7_116140268_f 1 +
+chr7 116921242 116921321 CCDS5768.1_cds_6_0_chr7_116144238_f 1 +
+chr7 116923078 116923150 CCDS5768.1_cds_7_0_chr7_116146074_f 1 +
+chr7 116927214 116927277 CCDS5768.1_cds_8_0_chr7_116150065_f 1 +
+chr7 116928880 116929021 CCDS5768.1_cds_9_0_chr7_116151732_f 1 +
+chr7 116964784 116964932 CCDS5770.1_cds_0_0_chr7_116187546_f 1 +
+chr7 117108894 117108977 CCDS5770.1_cds_1_0_chr7_116333767_f 1 +
+chr7 117128694 117128854 CCDS5770.1_cds_2_0_chr7_116353566_f 1 +
+chr7 117138899 117138954 CCDS5770.1_cds_3_0_chr7_116363798_f 1 +
+chr7 117139597 117139713 CCDS5770.1_cds_4_0_chr7_116364496_f 1 +
+chr7 117140988 117141064 CCDS5770.1_cds_5_0_chr7_116365890_f 1 +
+chr7 117143259 117143328 CCDS5770.1_cds_6_0_chr7_116368129_f 1 +
+chr7 117145226 117145381 CCDS5770.1_cds_7_0_chr7_116370086_f 1 +
+chr7 117147574 117147672 CCDS5770.1_cds_8_0_chr7_116372440_f 1 +
+chr7_random 2679423 2679538 CCDS5770.1_cds_9_0_chr7_116404867_f 1 +
+chr7 117201671 117201751 CCDS5770.1_cds_10_0_chr7_116423326_f 1 +
+chr7 117203227 117203330 CCDS5770.1_cds_11_0_chr7_116424839_f 1 +
+chr7 117222109 117222260 CCDS5770.1_cds_12_0_chr7_116443792_f 1 +
+chr7 117231432 117231525 CCDS5770.1_cds_13_0_chr7_116453089_f 1 +
+chr7 117234203 117234343 CCDS5770.1_cds_14_0_chr7_116455928_f 1 +
+chr7 117235141 117235261 CCDS5770.1_cds_15_0_chr7_116456866_f 1 +
+chr7 116964784 116964932 CCDS5769.1_cds_0_0_chr7_116187546_f 1 +
+chr7 117108894 117108977 CCDS5769.1_cds_1_0_chr7_116333767_f 1 +
+chr7 117128694 117128854 CCDS5769.1_cds_2_0_chr7_116353566_f 1 +
+chr7 117138899 117138954 CCDS5769.1_cds_3_0_chr7_116363798_f 1 +
+chr7 117139597 117139713 CCDS5769.1_cds_4_0_chr7_116364496_f 1 +
+chr7 117140988 117141064 CCDS5769.1_cds_5_0_chr7_116365890_f 1 +
+chr7 117145226 117145381 CCDS5769.1_cds_6_0_chr7_116370086_f 1 +
+chr7 117147574 117147672 CCDS5769.1_cds_7_0_chr7_116372440_f 1 +
+chr7_random 2679423 2679538 CCDS5769.1_cds_8_0_chr7_116404867_f 1 +
+chr7 117201671 117201751 CCDS5769.1_cds_9_0_chr7_116423326_f 1 +
+chr7 117203227 117203330 CCDS5769.1_cds_10_0_chr7_116424839_f 1 +
+chr7 117222109 117222260 CCDS5769.1_cds_11_0_chr7_116443792_f 1 +
+chr7 117231432 117231525 CCDS5769.1_cds_12_0_chr7_116453089_f 1 +
+chr7 117234203 117234343 CCDS5769.1_cds_13_0_chr7_116455928_f 1 +
+chr7 117241962 117242058 CCDS5769.1_cds_14_0_chr7_116463767_f 1 +
+chr7 117291331 117291561 CCDS5771.1_cds_0_0_chr7_116512160_r 1 -
+chr7 117310742 117311007 CCDS5771.1_cds_1_0_chr7_116531617_r 1 -
+chr7 117328536 117328629 CCDS5771.1_cds_2_0_chr7_116549076_r 1 -
+chr7 117333743 117333970 CCDS5771.1_cds_3_0_chr7_116554572_r 1 -
+chr7 117336084 117336167 CCDS5771.1_cds_4_0_chr7_116556912_r 1 -
+chr7 117382797 117382950 CCDS5772.1_cds_0_0_chr7_116597601_r 1 -
+chr7 117386552 117386666 CCDS5772.1_cds_1_0_chr7_116601357_r 1 -
+chr7 117387812 117387919 CCDS5772.1_cds_2_0_chr7_116602617_r 1 -
+chr7 117397672 117397782 CCDS5772.1_cds_3_0_chr7_116613943_r 1 -
+chr7 117398745 117398802 CCDS5772.1_cds_4_0_chr7_116615016_r 1 -
+chr7 117399808 117399884 CCDS5772.1_cds_5_0_chr7_116616074_r 1 -
+chr7 117400724 117400849 CCDS5772.1_cds_6_0_chr7_116616991_r 1 -
+chr7 117402466 117402602 CCDS5772.1_cds_7_0_chr7_116618731_r 1 -
+chr7 117403442 117403554 CCDS5772.1_cds_8_0_chr7_116619703_r 1 -
+chr7 117438281 117438393 CCDS5772.1_cds_9_0_chr7_116654168_r 1 -
+chr7 117440357 117440480 CCDS5772.1_cds_10_0_chr7_116656242_r 1 -
+chr7 117444948 117445048 CCDS5772.1_cds_11_0_chr7_116660841_r 1 -
+chr7 117445468 117445573 CCDS5772.1_cds_12_0_chr7_116661361_r 1 -
+chr7 117499706 117499759 CCDS5773.1_cds_0_0_chr7_116714100_f 1 +
+chr7 117523820 117523931 CCDS5773.1_cds_1_0_chr7_116738258_f 1 +
+chr7 117528597 117528706 CCDS5773.1_cds_2_0_chr7_116743039_f 1 +
+chr7 117550464 117550680 CCDS5773.1_cds_3_0_chr7_116764904_f 1 +
+chr7 117553829 117553919 CCDS5773.1_cds_4_0_chr7_116768281_f 1 +
+chr7 117554806 117554970 CCDS5773.1_cds_5_0_chr7_116769253_f 1 +
+chr7 117556111 117556237 CCDS5773.1_cds_6_0_chr7_116770553_f 1 +
+chr7 117559659 117559905 CCDS5773.1_cds_7_0_chr7_116774105_f 1 +
+chr7 117561568 117561660 CCDS5773.1_cds_8_0_chr7_116776021_f 1 +
+chr7 117568199 117568382 CCDS5773.1_cds_9_0_chr7_116782646_f 1 +
+chr7 117579005 117579197 CCDS5773.1_cds_10_0_chr7_116793469_f 1 +
+chr7 117609945 117610041 CCDS5773.1_cds_11_0_chr7_116821744_f 1 +
+chr7 117612558 117612645 CCDS5773.1_cds_12_0_chr7_116824358_f 1 +
+chr7 117614292 117615016 CCDS5773.1_cds_13_0_chr7_116825939_f 1 +
+chr7 117617279 117617408 CCDS5773.1_cds_14_0_chr7_116828935_f 1 +
+chr7 117625173 117625211 CCDS5773.1_cds_15_0_chr7_116836831_f 1 +
+chr7 117625879 117626130 CCDS5773.1_cds_16_0_chr7_116837537_f 1 +
+chr7 117628986 117629066 CCDS5773.1_cds_17_0_chr7_116840679_f 1 +
+chr7 117632825 117632976 CCDS5773.1_cds_18_0_chr7_116844524_f 1 +
+chr7 117633887 117634115 CCDS5773.1_cds_19_0_chr7_116845586_f 1 +
+chr7 117636923 117637024 CCDS5773.1_cds_20_0_chr7_116848618_f 1 +
+chr7 117649505 117649753 CCDS5773.1_cds_21_0_chr7_116861527_f 1 +
+chr7 117664146 117664302 CCDS5773.1_cds_22_0_chr7_116876443_f 1 +
+chr7 117674548 117674638 CCDS5773.1_cds_23_0_chr7_116886847_f 1 +
+chr7 117686685 117686858 CCDS5773.1_cds_24_0_chr7_116898693_f 1 +
+chr7 117687456 117687562 CCDS5773.1_cds_25_0_chr7_116899464_f 1 +
+chr7 117688902 117689103 CCDS5773.1_cds_26_0_chr7_116900913_f 1 +
+chr7 117734744 117734996 CCDS5774.1_cds_0_0_chr7_116945542_r 1 -
+chr7 117741224 117741326 CCDS5774.1_cds_1_0_chr7_116952023_r 1 -
+chr7 117743450 117743638 CCDS5774.1_cds_4_0_chr7_116958552_r 1 -
+chr7 117743957 117744164 CCDS5774.1_cds_5_0_chr7_116959057_r 1 -
+chr7 117746996 117747175 CCDS5774.1_cds_6_0_chr7_116962094_r 1 -
+chr7 117753794 117753981 CCDS5774.1_cds_7_0_chr7_116968918_r 1 -
+chr7 117754149 117754302 CCDS5774.1_cds_8_0_chr7_116969274_r 1 -
+chr7 117764699 117764799 CCDS5774.1_cds_9_0_chr7_116979836_r 1 -
+chr7 117764881 117764968 CCDS5774.1_cds_10_0_chr7_116980018_r 1 -
+chr7 117775103 117775183 CCDS5774.1_cds_11_0_chr7_116990560_r 1 -
+chr7 117776423 117776519 CCDS5774.1_cds_12_0_chr7_116991880_r 1 -
+chr7 117779436 117779712 CCDS5774.1_cds_13_0_chr7_116994440_r 1 -
+chr7 117786062 117786180 CCDS5774.1_cds_14_0_chr7_117001064_r 1 -
+chr7 117796458 117796713 CCDS5774.1_cds_15_0_chr7_117011516_r 1 -
+chr7 117799369 117799520 CCDS5774.1_cds_16_0_chr7_117014446_r 1 -
+chr7 117801790 117801890 CCDS5774.1_cds_17_0_chr7_117016867_r 1 -
+chr7 117803186 117803390 CCDS5774.1_cds_18_0_chr7_117018256_r 1 -
+chr7 117810065 117811719 CCDS5774.1_cds_19_0_chr7_117025133_r 1 -
+chr7 117829639 117829865 CCDS5774.1_cds_20_0_chr7_117044770_r 1 -
+chr7 117880732 117880840 CCDS5774.1_cds_21_0_chr7_117095214_r 1 -
+chr7 117893163 117893244 CCDS5774.1_cds_22_0_chr7_117107340_r 1 -
+chr5 133682646 133682808 CCDS4149.1_cds_0_0_chr5_131424299_f 1 +
+chr5 133682906 133682948 CCDS4149.1_cds_1_0_chr5_131424559_f 1 +
+chr5 133684249 133684339 CCDS4149.1_cds_2_0_chr5_131425904_f 1 +
+chr5 133684463 133684505 CCDS4149.1_cds_3_0_chr5_131426118_f 1 +
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/5_mult_liftover_unmapped.bed Thu Jul 28 08:22:22 2011 -0400
@@ -0,0 +1,4 @@
+#Deleted in new
+chr7 116953508 116953641 CCDS5774.1_cds_2_0_chr7_116953509_r 0 -
+#Deleted in new
+chr7 116955071 116955135 CCDS5774.1_cds_3_0_chr7_116955072_r 0 -
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/cuffcompare_in1_liftover_mapped.bed Thu Jul 28 08:22:22 2011 -0400
@@ -0,0 +1,86 @@
+chr1 Cufflinks transcript 3022555 3022596 1000 . . gene_id "CUFF.1"; transcript_id "CUFF.1.1"; FPKM "20.6079364877"; frac "1.000000"; conf_lo "0.000000"; conf_hi "49.751960"; cov "1.317073";
+chr1 Cufflinks exon 3022555 3022596 1000 . . gene_id "CUFF.1"; transcript_id "CUFF.1.1"; exon_number "1"; FPKM "20.6079364877"; frac "1.000000"; conf_lo "0.000000"; conf_hi "49.751960"; cov "1.317073";
+chr1 Cufflinks transcript 3117334 3117360 1000 . . gene_id "CUFF.5"; transcript_id "CUFF.5.1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr1 Cufflinks exon 3117334 3117360 1000 . . gene_id "CUFF.5"; transcript_id "CUFF.5.1"; exon_number "1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr1 Cufflinks transcript 3117031 3117199 1000 . . gene_id "CUFF.7"; transcript_id "CUFF.7.1"; FPKM "9.9991171124"; frac "1.000000"; conf_lo "0.000000"; conf_hi "19.998234"; cov "0.639053";
+chr1 Cufflinks exon 3117031 3117199 1000 . . gene_id "CUFF.7"; transcript_id "CUFF.7.1"; exon_number "1"; FPKM "9.9991171124"; frac "1.000000"; conf_lo "0.000000"; conf_hi "19.998234"; cov "0.639053";
+chr1 Cufflinks transcript 3118118 3118521 1000 . . gene_id "CUFF.9"; transcript_id "CUFF.9.1"; FPKM "17.7768957078"; frac "1.000000"; conf_lo "9.153835"; conf_hi "26.399957"; cov "1.136139";
+chr1 Cufflinks exon 3118118 3118521 1000 . . gene_id "CUFF.9"; transcript_id "CUFF.9.1"; exon_number "1"; FPKM "17.7768957078"; frac "1.000000"; conf_lo "9.153835"; conf_hi "26.399957"; cov "1.136139";
+chr1 Cufflinks transcript 3118713 3118739 1000 . . gene_id "CUFF.11"; transcript_id "CUFF.11.1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr1 Cufflinks exon 3118713 3118739 1000 . . gene_id "CUFF.11"; transcript_id "CUFF.11.1"; exon_number "1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr1 Cufflinks transcript 3121789 3121867 1000 . . gene_id "CUFF.13"; transcript_id "CUFF.13.1"; FPKM "10.6952581772"; frac "1.000000"; conf_lo "0.000000"; conf_hi "25.820637"; cov "0.683544";
+chr1 Cufflinks exon 3121789 3121867 1000 . . gene_id "CUFF.13"; transcript_id "CUFF.13.1"; exon_number "1"; FPKM "10.6952581772"; frac "1.000000"; conf_lo "0.000000"; conf_hi "25.820637"; cov "0.683544";
+chr1 Cufflinks transcript 3128503 3128581 1000 . . gene_id "CUFF.15"; transcript_id "CUFF.15.1"; FPKM "10.6952581772"; frac "1.000000"; conf_lo "0.000000"; conf_hi "25.820637"; cov "0.683544";
+chr1 Cufflinks exon 3128503 3128581 1000 . . gene_id "CUFF.15"; transcript_id "CUFF.15.1"; exon_number "1"; FPKM "10.6952581772"; frac "1.000000"; conf_lo "0.000000"; conf_hi "25.820637"; cov "0.683544";
+chr1 Cufflinks transcript 3129386 3129482 1000 . . gene_id "CUFF.17"; transcript_id "CUFF.17.1"; FPKM "8.7105710927"; frac "1.000000"; conf_lo "0.000000"; conf_hi "21.029179"; cov "0.556701";
+chr1 Cufflinks exon 3129386 3129482 1000 . . gene_id "CUFF.17"; transcript_id "CUFF.17.1"; exon_number "1"; FPKM "8.7105710927"; frac "1.000000"; conf_lo "0.000000"; conf_hi "21.029179"; cov "0.556701";
+chr1 Cufflinks transcript 3128657 3128728 1000 . . gene_id "CUFF.19"; transcript_id "CUFF.19.1"; FPKM "29.3376873610"; frac "1.000000"; conf_lo "3.097262"; conf_hi "55.578113"; cov "1.875000";
+chr1 Cufflinks exon 3128657 3128728 1000 . . gene_id "CUFF.19"; transcript_id "CUFF.19.1"; exon_number "1"; FPKM "29.3376873610"; frac "1.000000"; conf_lo "3.097262"; conf_hi "55.578113"; cov "1.875000";
+chr1 Cufflinks transcript 3162445 3162500 1000 . . gene_id "CUFF.23"; transcript_id "CUFF.23.1"; FPKM "23.4701498888"; frac "1.000000"; conf_lo "0.000000"; conf_hi "50.571145"; cov "1.500000";
+chr1 Cufflinks exon 3162445 3162500 1000 . . gene_id "CUFF.23"; transcript_id "CUFF.23.1"; exon_number "1"; FPKM "23.4701498888"; frac "1.000000"; conf_lo "0.000000"; conf_hi "50.571145"; cov "1.500000";
+chr1 Cufflinks transcript 3176998 3177034 1000 . . gene_id "CUFF.27"; transcript_id "CUFF.27.1"; FPKM "34.2537322701"; frac "1.000000"; conf_lo "0.000000"; conf_hi "73.806535"; cov "2.189189";
+chr1 Cufflinks exon 3176998 3177034 1000 . . gene_id "CUFF.27"; transcript_id "CUFF.27.1"; exon_number "1"; FPKM "34.2537322701"; frac "1.000000"; conf_lo "0.000000"; conf_hi "73.806535"; cov "2.189189";
+chr1 Cufflinks transcript 3107191 3107612 1000 . . gene_id "CUFF.29"; transcript_id "CUFF.29.1"; FPKM "107.1032192108"; frac "1.000000"; conf_lo "71.402146"; conf_hi "142.804292"; cov "6.845070";
+chr1 Cufflinks exon 3107191 3107612 1000 . . gene_id "CUFF.29"; transcript_id "CUFF.29.1"; exon_number "1"; FPKM "107.1032192108"; frac "1.000000"; conf_lo "71.402146"; conf_hi "142.804292"; cov "6.845070";
+chr1 Cufflinks transcript 3107844 3107874 1000 . . gene_id "CUFF.31"; transcript_id "CUFF.31.1"; FPKM "122.6504607091"; frac "1.000000"; conf_lo "40.883487"; conf_hi "204.417435"; cov "7.838710";
+chr1 Cufflinks exon 3107844 3107874 1000 . . gene_id "CUFF.31"; transcript_id "CUFF.31.1"; exon_number "1"; FPKM "122.6504607091"; frac "1.000000"; conf_lo "40.883487"; conf_hi "204.417435"; cov "7.838710";
+chr1 Cufflinks transcript 3108025 3108051 1000 . . gene_id "CUFF.33"; transcript_id "CUFF.33.1"; FPKM "109.5273661476"; frac "1.000000"; conf_lo "26.732460"; conf_hi "192.322273"; cov "7.000000";
+chr1 Cufflinks exon 3108025 3108051 1000 . . gene_id "CUFF.33"; transcript_id "CUFF.33.1"; exon_number "1"; FPKM "109.5273661476"; frac "1.000000"; conf_lo "26.732460"; conf_hi "192.322273"; cov "7.000000";
+chr1 Cufflinks transcript 3109111 3109241 1000 . . gene_id "CUFF.35"; transcript_id "CUFF.35.1"; FPKM "96.7471827476"; frac "1.000000"; conf_lo "61.420107"; conf_hi "132.074259"; cov "6.183206";
+chr1 Cufflinks exon 3109111 3109241 1000 . . gene_id "CUFF.35"; transcript_id "CUFF.35.1"; exon_number "1"; FPKM "96.7471827476"; frac "1.000000"; conf_lo "61.420107"; conf_hi "132.074259"; cov "6.183206";
+chr1 Cufflinks transcript 3109989 3110041 1000 . . gene_id "CUFF.39"; transcript_id "CUFF.39.1"; FPKM "23.9129829055"; frac "1.000000"; conf_lo "0.000000"; conf_hi "51.525317"; cov "1.528302";
+chr1 Cufflinks exon 3109989 3110041 1000 . . gene_id "CUFF.39"; transcript_id "CUFF.39.1"; exon_number "1"; FPKM "23.9129829055"; frac "1.000000"; conf_lo "0.000000"; conf_hi "51.525317"; cov "1.528302";
+chr1 Cufflinks transcript 3110098 3110176 1000 . . gene_id "CUFF.41"; transcript_id "CUFF.41.1"; FPKM "10.6952581772"; frac "1.000000"; conf_lo "0.000000"; conf_hi "25.820637"; cov "0.683544";
+chr1 Cufflinks exon 3110098 3110176 1000 . . gene_id "CUFF.41"; transcript_id "CUFF.41.1"; exon_number "1"; FPKM "10.6952581772"; frac "1.000000"; conf_lo "0.000000"; conf_hi "25.820637"; cov "0.683544";
+chr1 Cufflinks transcript 3110280 3110358 1000 . . gene_id "CUFF.43"; transcript_id "CUFF.43.1"; FPKM "10.5615674500"; frac "1.000000"; conf_lo "0.000000"; conf_hi "25.497879"; cov "0.675000";
+chr1 Cufflinks exon 3110280 3110358 1000 . . gene_id "CUFF.43"; transcript_id "CUFF.43.1"; exon_number "1"; FPKM "10.5615674500"; frac "1.000000"; conf_lo "0.000000"; conf_hi "25.497879"; cov "0.675000";
+chr1 Cufflinks transcript 3110488 3110589 1000 . . gene_id "CUFF.45"; transcript_id "CUFF.45.1"; FPKM "20.7089557842"; frac "1.000000"; conf_lo "2.186303"; conf_hi "39.231609"; cov "1.323529";
+chr1 Cufflinks exon 3110488 3110589 1000 . . gene_id "CUFF.45"; transcript_id "CUFF.45.1"; exon_number "1"; FPKM "20.7089557842"; frac "1.000000"; conf_lo "2.186303"; conf_hi "39.231609"; cov "1.323529";
+chr1 Cufflinks transcript 3111332 3111358 1000 . . gene_id "CUFF.49"; transcript_id "CUFF.49.1"; FPKM "15.6467665925"; frac "1.000000"; conf_lo "0.000000"; conf_hi "46.940300"; cov "1.000000";
+chr1 Cufflinks exon 3111332 3111358 1000 . . gene_id "CUFF.49"; transcript_id "CUFF.49.1"; exon_number "1"; FPKM "15.6467665925"; frac "1.000000"; conf_lo "0.000000"; conf_hi "46.940300"; cov "1.000000";
+chr1 Cufflinks transcript 3112113 3112139 1000 . . gene_id "CUFF.51"; transcript_id "CUFF.51.1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr1 Cufflinks exon 3112113 3112139 1000 . . gene_id "CUFF.51"; transcript_id "CUFF.51.1"; exon_number "1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr1 Cufflinks transcript 3112479 3112505 1000 . . gene_id "CUFF.53"; transcript_id "CUFF.53.1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr1 Cufflinks exon 3112479 3112505 1000 . . gene_id "CUFF.53"; transcript_id "CUFF.53.1"; exon_number "1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr1 Cufflinks transcript 3114116 3114142 1000 . . gene_id "CUFF.55"; transcript_id "CUFF.55.1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr1 Cufflinks exon 3114116 3114142 1000 . . gene_id "CUFF.55"; transcript_id "CUFF.55.1"; exon_number "1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr1 Cufflinks transcript 3114273 3114299 1000 . . gene_id "CUFF.57"; transcript_id "CUFF.57.1"; FPKM "15.6467665925"; frac "1.000000"; conf_lo "0.000000"; conf_hi "46.940300"; cov "1.000000";
+chr1 Cufflinks exon 3114273 3114299 1000 . . gene_id "CUFF.57"; transcript_id "CUFF.57.1"; exon_number "1"; FPKM "15.6467665925"; frac "1.000000"; conf_lo "0.000000"; conf_hi "46.940300"; cov "1.000000";
+chr1 Cufflinks transcript 3114373 3114399 1000 . . gene_id "CUFF.59"; transcript_id "CUFF.59.1"; FPKM "15.6467665925"; frac "1.000000"; conf_lo "0.000000"; conf_hi "46.940300"; cov "1.000000";
+chr1 Cufflinks exon 3114373 3114399 1000 . . gene_id "CUFF.59"; transcript_id "CUFF.59.1"; exon_number "1"; FPKM "15.6467665925"; frac "1.000000"; conf_lo "0.000000"; conf_hi "46.940300"; cov "1.000000";
+chr1 Cufflinks transcript 3201794 3201848 1000 . . gene_id "CUFF.65"; transcript_id "CUFF.65.1"; FPKM "15.3622799272"; frac "1.000000"; conf_lo "0.000000"; conf_hi "37.087825"; cov "0.981818";
+chr1 Cufflinks exon 3201794 3201848 1000 . . gene_id "CUFF.65"; transcript_id "CUFF.65.1"; exon_number "1"; FPKM "15.3622799272"; frac "1.000000"; conf_lo "0.000000"; conf_hi "37.087825"; cov "0.981818";
+chr1 Cufflinks transcript 3211077 3211141 1000 . . gene_id "CUFF.67"; transcript_id "CUFF.67.1"; FPKM "12.9988522461"; frac "1.000000"; conf_lo "0.000000"; conf_hi "31.382005"; cov "0.830769";
+chr1 Cufflinks exon 3211077 3211141 1000 . . gene_id "CUFF.67"; transcript_id "CUFF.67.1"; exon_number "1"; FPKM "12.9988522461"; frac "1.000000"; conf_lo "0.000000"; conf_hi "31.382005"; cov "0.830769";
+chr1 Cufflinks transcript 3211528 3211611 1000 . . gene_id "CUFF.69"; transcript_id "CUFF.69.1"; FPKM "10.0586356666"; frac "1.000000"; conf_lo "0.000000"; conf_hi "24.283695"; cov "0.642857";
+chr1 Cufflinks exon 3211528 3211611 1000 . . gene_id "CUFF.69"; transcript_id "CUFF.69.1"; exon_number "1"; FPKM "10.0586356666"; frac "1.000000"; conf_lo "0.000000"; conf_hi "24.283695"; cov "0.642857";
+chr1 Cufflinks transcript 3211677 3211774 1000 . . gene_id "CUFF.71"; transcript_id "CUFF.71.1"; FPKM "8.6216877142"; frac "1.000000"; conf_lo "0.000000"; conf_hi "20.814595"; cov "0.551020";
+chr1 Cufflinks exon 3211677 3211774 1000 . . gene_id "CUFF.71"; transcript_id "CUFF.71.1"; exon_number "1"; FPKM "8.6216877142"; frac "1.000000"; conf_lo "0.000000"; conf_hi "20.814595"; cov "0.551020";
+chr1 Cufflinks transcript 3220199 3220253 1000 . . gene_id "CUFF.73"; transcript_id "CUFF.73.1"; FPKM "15.3622799272"; frac "1.000000"; conf_lo "0.000000"; conf_hi "37.087825"; cov "0.981818";
+chr1 Cufflinks exon 3220199 3220253 1000 . . gene_id "CUFF.73"; transcript_id "CUFF.73.1"; exon_number "1"; FPKM "15.3622799272"; frac "1.000000"; conf_lo "0.000000"; conf_hi "37.087825"; cov "0.981818";
+chr1 Cufflinks transcript 3220641 3220667 1000 . . gene_id "CUFF.75"; transcript_id "CUFF.75.1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr1 Cufflinks exon 3220641 3220667 1000 . . gene_id "CUFF.75"; transcript_id "CUFF.75.1"; exon_number "1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr1 Cufflinks transcript 3240464 3240515 1000 . . gene_id "CUFF.77"; transcript_id "CUFF.77.1"; FPKM "16.2485653076"; frac "1.000000"; conf_lo "0.000000"; conf_hi "39.227507"; cov "1.038462";
+chr1 Cufflinks exon 3240464 3240515 1000 . . gene_id "CUFF.77"; transcript_id "CUFF.77.1"; exon_number "1"; FPKM "16.2485653076"; frac "1.000000"; conf_lo "0.000000"; conf_hi "39.227507"; cov "1.038462";
+chr1 Cufflinks transcript 3277601 3277627 1000 . . gene_id "CUFF.79"; transcript_id "CUFF.79.1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr1 Cufflinks exon 3277601 3277627 1000 . . gene_id "CUFF.79"; transcript_id "CUFF.79.1"; exon_number "1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr1 Cufflinks transcript 3285318 3285381 1000 . . gene_id "CUFF.81"; transcript_id "CUFF.81.1"; FPKM "13.2019593124"; frac "1.000000"; conf_lo "0.000000"; conf_hi "31.872349"; cov "0.843750";
+chr1 Cufflinks exon 3285318 3285381 1000 . . gene_id "CUFF.81"; transcript_id "CUFF.81.1"; exon_number "1"; FPKM "13.2019593124"; frac "1.000000"; conf_lo "0.000000"; conf_hi "31.872349"; cov "0.843750";
+chr1 Cufflinks transcript 3285858 3285953 1000 . . gene_id "CUFF.83"; transcript_id "CUFF.83.1"; FPKM "13.2019593124"; frac "1.000000"; conf_lo "0.000000"; conf_hi "28.446269"; cov "0.843750";
+chr1 Cufflinks exon 3285858 3285953 1000 . . gene_id "CUFF.83"; transcript_id "CUFF.83.1"; exon_number "1"; FPKM "13.2019593124"; frac "1.000000"; conf_lo "0.000000"; conf_hi "28.446269"; cov "0.843750";
+chr1 Cufflinks transcript 3289268 3289294 1000 . . gene_id "CUFF.85"; transcript_id "CUFF.85.1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr1 Cufflinks exon 3289268 3289294 1000 . . gene_id "CUFF.85"; transcript_id "CUFF.85.1"; exon_number "1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr1 Cufflinks transcript 3289466 3289514 1000 . . gene_id "CUFF.87"; transcript_id "CUFF.87.1"; FPKM "17.2433754285"; frac "1.000000"; conf_lo "0.000000"; conf_hi "41.629191"; cov "1.102041";
+chr1 Cufflinks exon 3289466 3289514 1000 . . gene_id "CUFF.87"; transcript_id "CUFF.87.1"; exon_number "1"; FPKM "17.2433754285"; frac "1.000000"; conf_lo "0.000000"; conf_hi "41.629191"; cov "1.102041";
+chr1 Cufflinks transcript 3300382 3300432 1000 . . gene_id "CUFF.89"; transcript_id "CUFF.89.1"; FPKM "16.5671646274"; frac "1.000000"; conf_lo "0.000000"; conf_hi "39.996674"; cov "1.058824";
+chr1 Cufflinks exon 3300382 3300432 1000 . . gene_id "CUFF.89"; transcript_id "CUFF.89.1"; exon_number "1"; FPKM "16.5671646274"; frac "1.000000"; conf_lo "0.000000"; conf_hi "39.996674"; cov "1.058824";
+chr1 Cufflinks transcript 3317446 3317472 1000 . . gene_id "CUFF.91"; transcript_id "CUFF.91.1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr1 Cufflinks exon 3317446 3317472 1000 . . gene_id "CUFF.91"; transcript_id "CUFF.91.1"; exon_number "1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr1 Cufflinks transcript 3365246 3365284 1000 . . gene_id "CUFF.93"; transcript_id "CUFF.93.1"; FPKM "21.6647537435"; frac "1.000000"; conf_lo "0.000000"; conf_hi "52.303342"; cov "1.384615";
+chr1 Cufflinks exon 3365246 3365284 1000 . . gene_id "CUFF.93"; transcript_id "CUFF.93.1"; exon_number "1"; FPKM "21.6647537435"; frac "1.000000"; conf_lo "0.000000"; conf_hi "52.303342"; cov "1.384615";
+chr1 Cufflinks transcript 3377607 3377633 1000 . . gene_id "CUFF.95"; transcript_id "CUFF.95.1"; FPKM "46.9402997776"; frac "1.000000"; conf_lo "0.000000"; conf_hi "101.142289"; cov "3.000000";
+chr1 Cufflinks exon 3377607 3377633 1000 . . gene_id "CUFF.95"; transcript_id "CUFF.95.1"; exon_number "1"; FPKM "46.9402997776"; frac "1.000000"; conf_lo "0.000000"; conf_hi "101.142289"; cov "3.000000";
+chr1 Cufflinks transcript 3381259 3381317 1000 . . gene_id "CUFF.97"; transcript_id "CUFF.97.1"; FPKM "21.4811541355"; frac "1.000000"; conf_lo "0.000000"; conf_hi "46.285454"; cov "1.372881";
+chr1 Cufflinks exon 3381259 3381317 1000 . . gene_id "CUFF.97"; transcript_id "CUFF.97.1"; exon_number "1"; FPKM "21.4811541355"; frac "1.000000"; conf_lo "0.000000"; conf_hi "46.285454"; cov "1.372881";
+chr1 Cufflinks transcript 3381404 3381474 1000 . . gene_id "CUFF.99"; transcript_id "CUFF.99.1"; FPKM "14.5676792413"; frac "1.000000"; conf_lo "0.000000"; conf_hi "35.169489"; cov "0.931034";
+chr1 Cufflinks exon 3381404 3381474 1000 . . gene_id "CUFF.99"; transcript_id "CUFF.99.1"; exon_number "1"; FPKM "14.5676792413"; frac "1.000000"; conf_lo "0.000000"; conf_hi "35.169489"; cov "0.931034";
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/cuffcompare_in1_liftover_unmapped.bed Thu Jul 28 08:22:22 2011 -0400
@@ -0,0 +1,28 @@
+# Deleted in new
+chr1 Cufflinks transcript 3111546 3111576 1000 . . gene_id "CUFF.3"; transcript_id "CUFF.3.1"; FPKM "27.2556579354"; frac "1.000000"; conf_lo "0.000000"; conf_hi "65.800979"; cov "1.741935";
+# Deleted in new
+chr1 Cufflinks exon 3111546 3111576 1000 . . gene_id "CUFF.3"; transcript_id "CUFF.3.1"; exon_number "1"; FPKM "27.2556579354"; frac "1.000000"; conf_lo "0.000000"; conf_hi "65.800979"; cov "1.741935";
+# Partially deleted in new
+chr1 Cufflinks transcript 3243019 3243079 1000 . . gene_id "CUFF.21"; transcript_id "CUFF.21.1"; FPKM "13.8512359999"; frac "1.000000"; conf_lo "0.000000"; conf_hi "33.439842"; cov "0.885246";
+# Partially deleted in new
+chr1 Cufflinks exon 3243019 3243079 1000 . . gene_id "CUFF.21"; transcript_id "CUFF.21.1"; exon_number "1"; FPKM "13.8512359999"; frac "1.000000"; conf_lo "0.000000"; conf_hi "33.439842"; cov "0.885246";
+# Partially deleted in new
+chr1 Cufflinks transcript 3242634 3242923 1000 . . gene_id "CUFF.25"; transcript_id "CUFF.25.1"; FPKM "14.5676792413"; frac "1.000000"; conf_lo "5.354270"; conf_hi "23.781089"; cov "0.931034";
+# Partially deleted in new
+chr1 Cufflinks exon 3242634 3242923 1000 . . gene_id "CUFF.25"; transcript_id "CUFF.25.1"; exon_number "1"; FPKM "14.5676792413"; frac "1.000000"; conf_lo "5.354270"; conf_hi "23.781089"; cov "0.931034";
+# Partially deleted in new
+chr1 Cufflinks transcript 3191877 3191945 1000 . . gene_id "CUFF.37"; transcript_id "CUFF.37.1"; FPKM "104.0850125502"; frac "1.000000"; conf_lo "53.596365"; conf_hi "154.573660"; cov "6.652174";
+# Partially deleted in new
+chr1 Cufflinks exon 3191877 3191945 1000 . . gene_id "CUFF.37"; transcript_id "CUFF.37.1"; exon_number "1"; FPKM "104.0850125502"; frac "1.000000"; conf_lo "53.596365"; conf_hi "154.573660"; cov "6.652174";
+# Partially deleted in new
+chr1 Cufflinks transcript 3194186 3194226 1000 . . gene_id "CUFF.47"; transcript_id "CUFF.47.1"; FPKM "20.6079364877"; frac "1.000000"; conf_lo "0.000000"; conf_hi "49.751960"; cov "1.317073";
+# Partially deleted in new
+chr1 Cufflinks exon 3194186 3194226 1000 . . gene_id "CUFF.47"; transcript_id "CUFF.47.1"; exon_number "1"; FPKM "20.6079364877"; frac "1.000000"; conf_lo "0.000000"; conf_hi "49.751960"; cov "1.317073";
+# Deleted in new
+chr1 Cufflinks transcript 3277191 3277218 1000 . . gene_id "CUFF.61"; transcript_id "CUFF.61.1"; FPKM "45.2638604998"; frac "1.000000"; conf_lo "0.000000"; conf_hi "97.530065"; cov "2.892857";
+# Deleted in new
+chr1 Cufflinks exon 3277191 3277218 1000 . . gene_id "CUFF.61"; transcript_id "CUFF.61.1"; exon_number "1"; FPKM "45.2638604998"; frac "1.000000"; conf_lo "0.000000"; conf_hi "97.530065"; cov "2.892857";
+# Deleted in new
+chr1 Cufflinks transcript 3278237 3278263 1000 . . gene_id "CUFF.63"; transcript_id "CUFF.63.1"; FPKM "15.6467665925"; frac "1.000000"; conf_lo "0.000000"; conf_hi "46.940300"; cov "1.000000";
+# Deleted in new
+chr1 Cufflinks exon 3278237 3278263 1000 . . gene_id "CUFF.63"; transcript_id "CUFF.63.1"; exon_number "1"; FPKM "15.6467665925"; frac "1.000000"; conf_lo "0.000000"; conf_hi "46.940300"; cov "1.000000";
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/cuffcompare_in1_mult_liftover_mapped.bed Thu Jul 28 08:22:22 2011 -0400
@@ -0,0 +1,92 @@
+chr1 Cufflinks transcript 3022555 3022596 1000 . . gene_id "CUFF.1"; transcript_id "CUFF.1.1"; FPKM "20.6079364877"; frac "1.000000"; conf_lo "0.000000"; conf_hi "49.751960"; cov "1.317073";
+chr1 Cufflinks exon 3022555 3022596 1000 . . gene_id "CUFF.1"; transcript_id "CUFF.1.1"; exon_number "1"; FPKM "20.6079364877"; frac "1.000000"; conf_lo "0.000000"; conf_hi "49.751960"; cov "1.317073";
+chr1 Cufflinks transcript 3117334 3117360 1000 . . gene_id "CUFF.5"; transcript_id "CUFF.5.1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr1 Cufflinks exon 3117334 3117360 1000 . . gene_id "CUFF.5"; transcript_id "CUFF.5.1"; exon_number "1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr1 Cufflinks transcript 3117031 3117199 1000 . . gene_id "CUFF.7"; transcript_id "CUFF.7.1"; FPKM "9.9991171124"; frac "1.000000"; conf_lo "0.000000"; conf_hi "19.998234"; cov "0.639053";
+chr1 Cufflinks exon 3117031 3117199 1000 . . gene_id "CUFF.7"; transcript_id "CUFF.7.1"; exon_number "1"; FPKM "9.9991171124"; frac "1.000000"; conf_lo "0.000000"; conf_hi "19.998234"; cov "0.639053";
+chr1 Cufflinks transcript 3118118 3118521 1000 . . gene_id "CUFF.9"; transcript_id "CUFF.9.1"; FPKM "17.7768957078"; frac "1.000000"; conf_lo "9.153835"; conf_hi "26.399957"; cov "1.136139";
+chr1 Cufflinks exon 3118118 3118521 1000 . . gene_id "CUFF.9"; transcript_id "CUFF.9.1"; exon_number "1"; FPKM "17.7768957078"; frac "1.000000"; conf_lo "9.153835"; conf_hi "26.399957"; cov "1.136139";
+chr1 Cufflinks transcript 3118713 3118739 1000 . . gene_id "CUFF.11"; transcript_id "CUFF.11.1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr1 Cufflinks exon 3118713 3118739 1000 . . gene_id "CUFF.11"; transcript_id "CUFF.11.1"; exon_number "1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr1 Cufflinks transcript 3121789 3121867 1000 . . gene_id "CUFF.13"; transcript_id "CUFF.13.1"; FPKM "10.6952581772"; frac "1.000000"; conf_lo "0.000000"; conf_hi "25.820637"; cov "0.683544";
+chr1 Cufflinks exon 3121789 3121867 1000 . . gene_id "CUFF.13"; transcript_id "CUFF.13.1"; exon_number "1"; FPKM "10.6952581772"; frac "1.000000"; conf_lo "0.000000"; conf_hi "25.820637"; cov "0.683544";
+chr1 Cufflinks transcript 3128503 3128581 1000 . . gene_id "CUFF.15"; transcript_id "CUFF.15.1"; FPKM "10.6952581772"; frac "1.000000"; conf_lo "0.000000"; conf_hi "25.820637"; cov "0.683544";
+chr1 Cufflinks exon 3128503 3128581 1000 . . gene_id "CUFF.15"; transcript_id "CUFF.15.1"; exon_number "1"; FPKM "10.6952581772"; frac "1.000000"; conf_lo "0.000000"; conf_hi "25.820637"; cov "0.683544";
+chr1 Cufflinks transcript 3129386 3129482 1000 . . gene_id "CUFF.17"; transcript_id "CUFF.17.1"; FPKM "8.7105710927"; frac "1.000000"; conf_lo "0.000000"; conf_hi "21.029179"; cov "0.556701";
+chr1 Cufflinks exon 3129386 3129482 1000 . . gene_id "CUFF.17"; transcript_id "CUFF.17.1"; exon_number "1"; FPKM "8.7105710927"; frac "1.000000"; conf_lo "0.000000"; conf_hi "21.029179"; cov "0.556701";
+chr1 Cufflinks transcript 3128657 3128728 1000 . . gene_id "CUFF.19"; transcript_id "CUFF.19.1"; FPKM "29.3376873610"; frac "1.000000"; conf_lo "3.097262"; conf_hi "55.578113"; cov "1.875000";
+chr1 Cufflinks exon 3128657 3128728 1000 . . gene_id "CUFF.19"; transcript_id "CUFF.19.1"; exon_number "1"; FPKM "29.3376873610"; frac "1.000000"; conf_lo "3.097262"; conf_hi "55.578113"; cov "1.875000";
+chr1 Cufflinks transcript 3162123 3162179 1000 . . gene_id "CUFF.21"; transcript_id "CUFF.21.1"; FPKM "13.8512359999"; frac "1.000000"; conf_lo "0.000000"; conf_hi "33.439842"; cov "0.885246";
+chr1 Cufflinks exon 3162123 3162179 1000 . . gene_id "CUFF.21"; transcript_id "CUFF.21.1"; exon_number "1"; FPKM "13.8512359999"; frac "1.000000"; conf_lo "0.000000"; conf_hi "33.439842"; cov "0.885246";
+chr1 Cufflinks transcript 3162445 3162500 1000 . . gene_id "CUFF.23"; transcript_id "CUFF.23.1"; FPKM "23.4701498888"; frac "1.000000"; conf_lo "0.000000"; conf_hi "50.571145"; cov "1.500000";
+chr1 Cufflinks exon 3162445 3162500 1000 . . gene_id "CUFF.23"; transcript_id "CUFF.23.1"; exon_number "1"; FPKM "23.4701498888"; frac "1.000000"; conf_lo "0.000000"; conf_hi "50.571145"; cov "1.500000";
+chr1 Cufflinks transcript 3161752 3162025 1000 . . gene_id "CUFF.25"; transcript_id "CUFF.25.1"; FPKM "14.5676792413"; frac "1.000000"; conf_lo "5.354270"; conf_hi "23.781089"; cov "0.931034";
+chr1 Cufflinks exon 3161752 3162025 1000 . . gene_id "CUFF.25"; transcript_id "CUFF.25.1"; exon_number "1"; FPKM "14.5676792413"; frac "1.000000"; conf_lo "5.354270"; conf_hi "23.781089"; cov "0.931034";
+chr1 Cufflinks transcript 3176998 3177034 1000 . . gene_id "CUFF.27"; transcript_id "CUFF.27.1"; FPKM "34.2537322701"; frac "1.000000"; conf_lo "0.000000"; conf_hi "73.806535"; cov "2.189189";
+chr1 Cufflinks exon 3176998 3177034 1000 . . gene_id "CUFF.27"; transcript_id "CUFF.27.1"; exon_number "1"; FPKM "34.2537322701"; frac "1.000000"; conf_lo "0.000000"; conf_hi "73.806535"; cov "2.189189";
+chr1 Cufflinks transcript 3107191 3107612 1000 . . gene_id "CUFF.29"; transcript_id "CUFF.29.1"; FPKM "107.1032192108"; frac "1.000000"; conf_lo "71.402146"; conf_hi "142.804292"; cov "6.845070";
+chr1 Cufflinks exon 3107191 3107612 1000 . . gene_id "CUFF.29"; transcript_id "CUFF.29.1"; exon_number "1"; FPKM "107.1032192108"; frac "1.000000"; conf_lo "71.402146"; conf_hi "142.804292"; cov "6.845070";
+chr1 Cufflinks transcript 3107844 3107874 1000 . . gene_id "CUFF.31"; transcript_id "CUFF.31.1"; FPKM "122.6504607091"; frac "1.000000"; conf_lo "40.883487"; conf_hi "204.417435"; cov "7.838710";
+chr1 Cufflinks exon 3107844 3107874 1000 . . gene_id "CUFF.31"; transcript_id "CUFF.31.1"; exon_number "1"; FPKM "122.6504607091"; frac "1.000000"; conf_lo "40.883487"; conf_hi "204.417435"; cov "7.838710";
+chr1 Cufflinks transcript 3108025 3108051 1000 . . gene_id "CUFF.33"; transcript_id "CUFF.33.1"; FPKM "109.5273661476"; frac "1.000000"; conf_lo "26.732460"; conf_hi "192.322273"; cov "7.000000";
+chr1 Cufflinks exon 3108025 3108051 1000 . . gene_id "CUFF.33"; transcript_id "CUFF.33.1"; exon_number "1"; FPKM "109.5273661476"; frac "1.000000"; conf_lo "26.732460"; conf_hi "192.322273"; cov "7.000000";
+chr1 Cufflinks transcript 3109111 3109241 1000 . . gene_id "CUFF.35"; transcript_id "CUFF.35.1"; FPKM "96.7471827476"; frac "1.000000"; conf_lo "61.420107"; conf_hi "132.074259"; cov "6.183206";
+chr1 Cufflinks exon 3109111 3109241 1000 . . gene_id "CUFF.35"; transcript_id "CUFF.35.1"; exon_number "1"; FPKM "96.7471827476"; frac "1.000000"; conf_lo "61.420107"; conf_hi "132.074259"; cov "6.183206";
+chr1 Cufflinks transcript 3109449 3109512 1000 . . gene_id "CUFF.37"; transcript_id "CUFF.37.1"; FPKM "104.0850125502"; frac "1.000000"; conf_lo "53.596365"; conf_hi "154.573660"; cov "6.652174";
+chr1 Cufflinks exon 3109449 3109512 1000 . . gene_id "CUFF.37"; transcript_id "CUFF.37.1"; exon_number "1"; FPKM "104.0850125502"; frac "1.000000"; conf_lo "53.596365"; conf_hi "154.573660"; cov "6.652174";
+chr1 Cufflinks transcript 3109989 3110041 1000 . . gene_id "CUFF.39"; transcript_id "CUFF.39.1"; FPKM "23.9129829055"; frac "1.000000"; conf_lo "0.000000"; conf_hi "51.525317"; cov "1.528302";
+chr1 Cufflinks exon 3109989 3110041 1000 . . gene_id "CUFF.39"; transcript_id "CUFF.39.1"; exon_number "1"; FPKM "23.9129829055"; frac "1.000000"; conf_lo "0.000000"; conf_hi "51.525317"; cov "1.528302";
+chr1 Cufflinks transcript 3110098 3110176 1000 . . gene_id "CUFF.41"; transcript_id "CUFF.41.1"; FPKM "10.6952581772"; frac "1.000000"; conf_lo "0.000000"; conf_hi "25.820637"; cov "0.683544";
+chr1 Cufflinks exon 3110098 3110176 1000 . . gene_id "CUFF.41"; transcript_id "CUFF.41.1"; exon_number "1"; FPKM "10.6952581772"; frac "1.000000"; conf_lo "0.000000"; conf_hi "25.820637"; cov "0.683544";
+chr1 Cufflinks transcript 3110280 3110358 1000 . . gene_id "CUFF.43"; transcript_id "CUFF.43.1"; FPKM "10.5615674500"; frac "1.000000"; conf_lo "0.000000"; conf_hi "25.497879"; cov "0.675000";
+chr1 Cufflinks exon 3110280 3110358 1000 . . gene_id "CUFF.43"; transcript_id "CUFF.43.1"; exon_number "1"; FPKM "10.5615674500"; frac "1.000000"; conf_lo "0.000000"; conf_hi "25.497879"; cov "0.675000";
+chr1 Cufflinks transcript 3110488 3110589 1000 . . gene_id "CUFF.45"; transcript_id "CUFF.45.1"; FPKM "20.7089557842"; frac "1.000000"; conf_lo "2.186303"; conf_hi "39.231609"; cov "1.323529";
+chr1 Cufflinks exon 3110488 3110589 1000 . . gene_id "CUFF.45"; transcript_id "CUFF.45.1"; exon_number "1"; FPKM "20.7089557842"; frac "1.000000"; conf_lo "2.186303"; conf_hi "39.231609"; cov "1.323529";
+chr1 Cufflinks transcript 3111332 3111358 1000 . . gene_id "CUFF.49"; transcript_id "CUFF.49.1"; FPKM "15.6467665925"; frac "1.000000"; conf_lo "0.000000"; conf_hi "46.940300"; cov "1.000000";
+chr1 Cufflinks exon 3111332 3111358 1000 . . gene_id "CUFF.49"; transcript_id "CUFF.49.1"; exon_number "1"; FPKM "15.6467665925"; frac "1.000000"; conf_lo "0.000000"; conf_hi "46.940300"; cov "1.000000";
+chr1 Cufflinks transcript 3112113 3112139 1000 . . gene_id "CUFF.51"; transcript_id "CUFF.51.1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr1 Cufflinks exon 3112113 3112139 1000 . . gene_id "CUFF.51"; transcript_id "CUFF.51.1"; exon_number "1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr1 Cufflinks transcript 3112479 3112505 1000 . . gene_id "CUFF.53"; transcript_id "CUFF.53.1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr1 Cufflinks exon 3112479 3112505 1000 . . gene_id "CUFF.53"; transcript_id "CUFF.53.1"; exon_number "1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr1 Cufflinks transcript 3114116 3114142 1000 . . gene_id "CUFF.55"; transcript_id "CUFF.55.1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr1 Cufflinks exon 3114116 3114142 1000 . . gene_id "CUFF.55"; transcript_id "CUFF.55.1"; exon_number "1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr1 Cufflinks transcript 3114273 3114299 1000 . . gene_id "CUFF.57"; transcript_id "CUFF.57.1"; FPKM "15.6467665925"; frac "1.000000"; conf_lo "0.000000"; conf_hi "46.940300"; cov "1.000000";
+chr1 Cufflinks exon 3114273 3114299 1000 . . gene_id "CUFF.57"; transcript_id "CUFF.57.1"; exon_number "1"; FPKM "15.6467665925"; frac "1.000000"; conf_lo "0.000000"; conf_hi "46.940300"; cov "1.000000";
+chr1 Cufflinks transcript 3114373 3114399 1000 . . gene_id "CUFF.59"; transcript_id "CUFF.59.1"; FPKM "15.6467665925"; frac "1.000000"; conf_lo "0.000000"; conf_hi "46.940300"; cov "1.000000";
+chr1 Cufflinks exon 3114373 3114399 1000 . . gene_id "CUFF.59"; transcript_id "CUFF.59.1"; exon_number "1"; FPKM "15.6467665925"; frac "1.000000"; conf_lo "0.000000"; conf_hi "46.940300"; cov "1.000000";
+chr1 Cufflinks transcript 3201794 3201848 1000 . . gene_id "CUFF.65"; transcript_id "CUFF.65.1"; FPKM "15.3622799272"; frac "1.000000"; conf_lo "0.000000"; conf_hi "37.087825"; cov "0.981818";
+chr1 Cufflinks exon 3201794 3201848 1000 . . gene_id "CUFF.65"; transcript_id "CUFF.65.1"; exon_number "1"; FPKM "15.3622799272"; frac "1.000000"; conf_lo "0.000000"; conf_hi "37.087825"; cov "0.981818";
+chr1 Cufflinks transcript 3211077 3211141 1000 . . gene_id "CUFF.67"; transcript_id "CUFF.67.1"; FPKM "12.9988522461"; frac "1.000000"; conf_lo "0.000000"; conf_hi "31.382005"; cov "0.830769";
+chr1 Cufflinks exon 3211077 3211141 1000 . . gene_id "CUFF.67"; transcript_id "CUFF.67.1"; exon_number "1"; FPKM "12.9988522461"; frac "1.000000"; conf_lo "0.000000"; conf_hi "31.382005"; cov "0.830769";
+chr1 Cufflinks transcript 3211528 3211611 1000 . . gene_id "CUFF.69"; transcript_id "CUFF.69.1"; FPKM "10.0586356666"; frac "1.000000"; conf_lo "0.000000"; conf_hi "24.283695"; cov "0.642857";
+chr1 Cufflinks exon 3211528 3211611 1000 . . gene_id "CUFF.69"; transcript_id "CUFF.69.1"; exon_number "1"; FPKM "10.0586356666"; frac "1.000000"; conf_lo "0.000000"; conf_hi "24.283695"; cov "0.642857";
+chr1 Cufflinks transcript 3211677 3211774 1000 . . gene_id "CUFF.71"; transcript_id "CUFF.71.1"; FPKM "8.6216877142"; frac "1.000000"; conf_lo "0.000000"; conf_hi "20.814595"; cov "0.551020";
+chr1 Cufflinks exon 3211677 3211774 1000 . . gene_id "CUFF.71"; transcript_id "CUFF.71.1"; exon_number "1"; FPKM "8.6216877142"; frac "1.000000"; conf_lo "0.000000"; conf_hi "20.814595"; cov "0.551020";
+chr1 Cufflinks transcript 3220199 3220253 1000 . . gene_id "CUFF.73"; transcript_id "CUFF.73.1"; FPKM "15.3622799272"; frac "1.000000"; conf_lo "0.000000"; conf_hi "37.087825"; cov "0.981818";
+chr1 Cufflinks exon 3220199 3220253 1000 . . gene_id "CUFF.73"; transcript_id "CUFF.73.1"; exon_number "1"; FPKM "15.3622799272"; frac "1.000000"; conf_lo "0.000000"; conf_hi "37.087825"; cov "0.981818";
+chr1 Cufflinks transcript 3220641 3220667 1000 . . gene_id "CUFF.75"; transcript_id "CUFF.75.1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr1 Cufflinks exon 3220641 3220667 1000 . . gene_id "CUFF.75"; transcript_id "CUFF.75.1"; exon_number "1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr1 Cufflinks transcript 3240464 3240515 1000 . . gene_id "CUFF.77"; transcript_id "CUFF.77.1"; FPKM "16.2485653076"; frac "1.000000"; conf_lo "0.000000"; conf_hi "39.227507"; cov "1.038462";
+chr1 Cufflinks exon 3240464 3240515 1000 . . gene_id "CUFF.77"; transcript_id "CUFF.77.1"; exon_number "1"; FPKM "16.2485653076"; frac "1.000000"; conf_lo "0.000000"; conf_hi "39.227507"; cov "1.038462";
+chr1 Cufflinks transcript 3277601 3277627 1000 . . gene_id "CUFF.79"; transcript_id "CUFF.79.1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr1 Cufflinks exon 3277601 3277627 1000 . . gene_id "CUFF.79"; transcript_id "CUFF.79.1"; exon_number "1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr1 Cufflinks transcript 3285318 3285381 1000 . . gene_id "CUFF.81"; transcript_id "CUFF.81.1"; FPKM "13.2019593124"; frac "1.000000"; conf_lo "0.000000"; conf_hi "31.872349"; cov "0.843750";
+chr1 Cufflinks exon 3285318 3285381 1000 . . gene_id "CUFF.81"; transcript_id "CUFF.81.1"; exon_number "1"; FPKM "13.2019593124"; frac "1.000000"; conf_lo "0.000000"; conf_hi "31.872349"; cov "0.843750";
+chr1 Cufflinks transcript 3285858 3285953 1000 . . gene_id "CUFF.83"; transcript_id "CUFF.83.1"; FPKM "13.2019593124"; frac "1.000000"; conf_lo "0.000000"; conf_hi "28.446269"; cov "0.843750";
+chr1 Cufflinks exon 3285858 3285953 1000 . . gene_id "CUFF.83"; transcript_id "CUFF.83.1"; exon_number "1"; FPKM "13.2019593124"; frac "1.000000"; conf_lo "0.000000"; conf_hi "28.446269"; cov "0.843750";
+chr1 Cufflinks transcript 3289268 3289294 1000 . . gene_id "CUFF.85"; transcript_id "CUFF.85.1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr1 Cufflinks exon 3289268 3289294 1000 . . gene_id "CUFF.85"; transcript_id "CUFF.85.1"; exon_number "1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr1 Cufflinks transcript 3289466 3289514 1000 . . gene_id "CUFF.87"; transcript_id "CUFF.87.1"; FPKM "17.2433754285"; frac "1.000000"; conf_lo "0.000000"; conf_hi "41.629191"; cov "1.102041";
+chr1 Cufflinks exon 3289466 3289514 1000 . . gene_id "CUFF.87"; transcript_id "CUFF.87.1"; exon_number "1"; FPKM "17.2433754285"; frac "1.000000"; conf_lo "0.000000"; conf_hi "41.629191"; cov "1.102041";
+chr1 Cufflinks transcript 3300382 3300432 1000 . . gene_id "CUFF.89"; transcript_id "CUFF.89.1"; FPKM "16.5671646274"; frac "1.000000"; conf_lo "0.000000"; conf_hi "39.996674"; cov "1.058824";
+chr1 Cufflinks exon 3300382 3300432 1000 . . gene_id "CUFF.89"; transcript_id "CUFF.89.1"; exon_number "1"; FPKM "16.5671646274"; frac "1.000000"; conf_lo "0.000000"; conf_hi "39.996674"; cov "1.058824";
+chr1 Cufflinks transcript 3317446 3317472 1000 . . gene_id "CUFF.91"; transcript_id "CUFF.91.1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr1 Cufflinks exon 3317446 3317472 1000 . . gene_id "CUFF.91"; transcript_id "CUFF.91.1"; exon_number "1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr1 Cufflinks transcript 3365246 3365284 1000 . . gene_id "CUFF.93"; transcript_id "CUFF.93.1"; FPKM "21.6647537435"; frac "1.000000"; conf_lo "0.000000"; conf_hi "52.303342"; cov "1.384615";
+chr1 Cufflinks exon 3365246 3365284 1000 . . gene_id "CUFF.93"; transcript_id "CUFF.93.1"; exon_number "1"; FPKM "21.6647537435"; frac "1.000000"; conf_lo "0.000000"; conf_hi "52.303342"; cov "1.384615";
+chr1 Cufflinks transcript 3377607 3377633 1000 . . gene_id "CUFF.95"; transcript_id "CUFF.95.1"; FPKM "46.9402997776"; frac "1.000000"; conf_lo "0.000000"; conf_hi "101.142289"; cov "3.000000";
+chr1 Cufflinks exon 3377607 3377633 1000 . . gene_id "CUFF.95"; transcript_id "CUFF.95.1"; exon_number "1"; FPKM "46.9402997776"; frac "1.000000"; conf_lo "0.000000"; conf_hi "101.142289"; cov "3.000000";
+chr1 Cufflinks transcript 3381259 3381317 1000 . . gene_id "CUFF.97"; transcript_id "CUFF.97.1"; FPKM "21.4811541355"; frac "1.000000"; conf_lo "0.000000"; conf_hi "46.285454"; cov "1.372881";
+chr1 Cufflinks exon 3381259 3381317 1000 . . gene_id "CUFF.97"; transcript_id "CUFF.97.1"; exon_number "1"; FPKM "21.4811541355"; frac "1.000000"; conf_lo "0.000000"; conf_hi "46.285454"; cov "1.372881";
+chr1 Cufflinks transcript 3381404 3381474 1000 . . gene_id "CUFF.99"; transcript_id "CUFF.99.1"; FPKM "14.5676792413"; frac "1.000000"; conf_lo "0.000000"; conf_hi "35.169489"; cov "0.931034";
+chr1 Cufflinks exon 3381404 3381474 1000 . . gene_id "CUFF.99"; transcript_id "CUFF.99.1"; exon_number "1"; FPKM "14.5676792413"; frac "1.000000"; conf_lo "0.000000"; conf_hi "35.169489"; cov "0.931034";
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/cuffcompare_in1_mult_liftover_unmapped.bed Thu Jul 28 08:22:22 2011 -0400
@@ -0,0 +1,16 @@
+# Deleted in new
+chr1 Cufflinks transcript 3111546 3111576 1000 . . gene_id "CUFF.3"; transcript_id "CUFF.3.1"; FPKM "27.2556579354"; frac "1.000000"; conf_lo "0.000000"; conf_hi "65.800979"; cov "1.741935";
+# Deleted in new
+chr1 Cufflinks exon 3111546 3111576 1000 . . gene_id "CUFF.3"; transcript_id "CUFF.3.1"; exon_number "1"; FPKM "27.2556579354"; frac "1.000000"; conf_lo "0.000000"; conf_hi "65.800979"; cov "1.741935";
+# Partially deleted in new
+chr1 Cufflinks transcript 3194186 3194226 1000 . . gene_id "CUFF.47"; transcript_id "CUFF.47.1"; FPKM "20.6079364877"; frac "1.000000"; conf_lo "0.000000"; conf_hi "49.751960"; cov "1.317073";
+# Partially deleted in new
+chr1 Cufflinks exon 3194186 3194226 1000 . . gene_id "CUFF.47"; transcript_id "CUFF.47.1"; exon_number "1"; FPKM "20.6079364877"; frac "1.000000"; conf_lo "0.000000"; conf_hi "49.751960"; cov "1.317073";
+# Deleted in new
+chr1 Cufflinks transcript 3277191 3277218 1000 . . gene_id "CUFF.61"; transcript_id "CUFF.61.1"; FPKM "45.2638604998"; frac "1.000000"; conf_lo "0.000000"; conf_hi "97.530065"; cov "2.892857";
+# Deleted in new
+chr1 Cufflinks exon 3277191 3277218 1000 . . gene_id "CUFF.61"; transcript_id "CUFF.61.1"; exon_number "1"; FPKM "45.2638604998"; frac "1.000000"; conf_lo "0.000000"; conf_hi "97.530065"; cov "2.892857";
+# Deleted in new
+chr1 Cufflinks transcript 3278237 3278263 1000 . . gene_id "CUFF.63"; transcript_id "CUFF.63.1"; FPKM "15.6467665925"; frac "1.000000"; conf_lo "0.000000"; conf_hi "46.940300"; cov "1.000000";
+# Deleted in new
+chr1 Cufflinks exon 3278237 3278263 1000 . . gene_id "CUFF.63"; transcript_id "CUFF.63.1"; exon_number "1"; FPKM "15.6467665925"; frac "1.000000"; conf_lo "0.000000"; conf_hi "46.940300"; cov "1.000000";
--- a/test/functional/test_get_data.py Thu Jul 28 08:22:07 2011 -0400
+++ b/test/functional/test_get_data.py Thu Jul 28 08:22:22 2011 -0400
@@ -124,7 +124,7 @@
self.upload_file( '454Score.png' )
hda = get_latest_hda()
assert hda is not None, "Problem retrieving hda from database"
- self.check_history_for_string( "The uploaded file contains inappropriate content" )
+ self.check_history_for_string( "454Score.png" )
def test_0055_upload_file( self ):
"""Test uploading lped composite datatype file, manually setting the file format"""
# Logged in as admin_user
--- a/tools/data_source/upload.py Thu Jul 28 08:22:07 2011 -0400
+++ b/tools/data_source/upload.py Thu Jul 28 08:22:22 2011 -0400
@@ -14,9 +14,18 @@
from galaxy.datatypes.images import Pdf
from galaxy.datatypes.registry import Registry
from galaxy import util
+from galaxy.datatypes.util.image_util import *
from galaxy.util.json import *
try:
+ import Image as PIL
+except ImportError:
+ try:
+ from PIL import Image as PIL
+ except:
+ PIL = None
+
+try:
import bz2
except:
bz2 = None
@@ -51,16 +60,12 @@
return d
def check_bam( file_path ):
return Bam().sniff( file_path )
-
def check_sff( file_path ):
return Sff().sniff( file_path )
-
def check_pdf( file_path ):
return Pdf().sniff( file_path )
-
def check_bigwig( file_path ):
return BigWig().sniff( file_path )
-
def check_bigbed( file_path ):
return BigBed().sniff( file_path )
def parse_outputs( args ):
@@ -102,8 +107,16 @@
dataset.is_multi_byte = util.is_multi_byte( codecs.open( dataset.path, 'r', 'utf-8' ).read( 100 ) )
except UnicodeDecodeError, e:
dataset.is_multi_byte = False
+ # Is dataset an image?
+ image = check_image( dataset.path )
+ if image:
+ if not PIL:
+ image = None
+ # get_image_ext() returns None if nor a supported Image type
+ ext = get_image_ext( dataset.path, image )
+ data_type = ext
# Is dataset content multi-byte?
- if dataset.is_multi_byte:
+ elif dataset.is_multi_byte:
data_type = 'multi-byte char'
ext = sniff.guess_ext( dataset.path, is_multi_byte=True )
# Is dataset content supported sniffable binary?
@@ -122,7 +135,7 @@
elif check_bigbed( dataset.path ):
ext = 'bigbed'
data_type = 'bigbed'
- else:
+ if not data_type:
# See if we have a gzipped file, which, if it passes our restrictions, we'll uncompress
is_gzipped, is_valid = check_gzip( dataset.path )
if is_gzipped and not is_valid:
@@ -314,7 +327,6 @@
if datatype.dataset_content_needs_grooming( output_path ):
# Groom the dataset content if necessary
datatype.groom_dataset_content( output_path )
-
def add_composite_file( dataset, registry, json_file, output_path, files_path ):
if dataset.composite_files:
os.mkdir( files_path )
--- a/tools/extract/liftOver_wrapper.py Thu Jul 28 08:22:07 2011 -0400
+++ b/tools/extract/liftOver_wrapper.py Thu Jul 28 08:22:22 2011 -0400
@@ -34,15 +34,27 @@
out_handle.close()
return fname
-if len( sys.argv ) != 7:
- stop_err( "USAGE: prog input out_file1 out_file2 input_dbkey output_dbkey minMatch" )
+if len( sys.argv ) < 9:
+ stop_err( "USAGE: prog input out_file1 out_file2 input_dbkey output_dbkey infile_type minMatch multiple <minChainT><minChainQ><minSizeQ>" )
infile = sys.argv[1]
outfile1 = sys.argv[2]
outfile2 = sys.argv[3]
in_dbkey = sys.argv[4]
mapfilepath = sys.argv[5]
-minMatch = sys.argv[6]
+infile_type = sys.argv[6]
+gff_option = ""
+if infile_type == "gff":
+ gff_option = "-gff "
+minMatch = sys.argv[7]
+multiple = int(sys.argv[8])
+multiple_option = ""
+if multiple:
+ minChainT = sys.argv[9]
+ minChainQ = sys.argv[10]
+ minSizeQ = sys.argv[11]
+ multiple_option = " -multiple -minChainT=%s -minChainQ=%s -minSizeQ=%s " %(minChainT,minChainQ,minSizeQ)
+
try:
assert float(minMatch)
except:
@@ -55,7 +67,8 @@
stop_err( "%s mapping is not currently available." % ( mapfilepath.split('/')[-1].split('.')[0] ) )
safe_infile = safe_bed_file(infile)
-cmd_line = "liftOver -minMatch=" + str(minMatch) + " " + safe_infile + " " + mapfilepath + " " + outfile1 + " " + outfile2 + " > /dev/null"
+cmd_line = "liftOver " + gff_option + "-minMatch=" + str(minMatch) + multiple_option + " " + safe_infile + " " + mapfilepath + " " + outfile1 + " " + outfile2 + " > /dev/null"
+
try:
# have to nest try-except in try-finally to handle 2.4
try:
--- a/tools/extract/liftOver_wrapper.xml Thu Jul 28 08:22:07 2011 -0400
+++ b/tools/extract/liftOver_wrapper.xml Thu Jul 28 08:22:22 2011 -0400
@@ -1,8 +1,21 @@
-<tool id="liftOver1" name="Convert genome coordinates" version="1.0.2">
+<tool id="liftOver1" name="Convert genome coordinates" version="1.0.3"><description> between assemblies and genomes</description>
- <command interpreter="python">liftOver_wrapper.py $input "$out_file1" "$out_file2" $dbkey $to_dbkey $minMatch</command>
+ <command interpreter="python">
+ liftOver_wrapper.py
+ $input
+ "$out_file1"
+ "$out_file2"
+ $dbkey
+ $to_dbkey
+ #if isinstance( $input.datatype, $__app__.datatypes_registry.get_datatype_by_extension('gff').__class__) or isinstance( $input.datatype, $__app__.datatypes_registry.get_datatype_by_extension('gtf').__class__):
+ "gff"
+ #else:
+ "interval"
+ #end if
+ $minMatch ${multiple.choice} ${multiple.minChainT} ${multiple.minChainQ} ${multiple.minSizeQ}
+ </command><inputs>
- <param format="interval" name="input" type="data" label="Convert coordinates of">
+ <param format="interval,gff,gtf" name="input" type="data" label="Convert coordinates of"><validator type="unspecified_build" /><validator type="dataset_metadata_in_file" filename="liftOver.loc" metadata_name="dbkey" metadata_column="0" message="Liftover mappings are currently not available for the specified build." /></param>
@@ -14,7 +27,23 @@
<filter type="data_meta" ref="input" key="dbkey" column="0" /></options></param>
- <param name="minMatch" size="10" type="float" value="0.95" label="Minimum ratio of bases that must remap" />
+ <param name="minMatch" size="10" type="float" value="0.95" label="Minimum ratio of bases that must remap" help="Recommended values: same species = 0.95, different species = 0.10" />
+ <conditional name="multiple">
+ <param name="choice" type="select" label="Allow multiple output regions?" help="Recommended values: same species = No, different species = Yes">
+ <option value="0" selected="true">No</option>
+ <option value="1">Yes</option>
+ </param>
+ <when value="0">
+ <param name="minSizeQ" type="hidden" value="0" />
+ <param name="minChainQ" type="hidden" value="0" />
+ <param name="minChainT" type="hidden" value="0" />
+ </when>
+ <when value="1">
+ <param name="minSizeQ" size="10" type="integer" value="0" label="Minimum matching region size in query" help="Recommended value: set to >= 300 bases for complete transcripts"/>
+ <param name="minChainQ" size="10" type="integer" value="500" label="Minimum chain size in query"/>
+ <param name="minChainT" size="10" type="integer" value="500" label="Minimum chain size in target"/>
+ </when>
+ </conditional></inputs><outputs><data format="input" name="out_file1" label="${tool.name} on ${on_string} [ MAPPED COORDINATES ]">
@@ -37,9 +66,40 @@
<param name="input" value="5.bed" dbkey="hg18" ftype="bed" /><param name="to_dbkey" value="panTro2" /><param name="minMatch" value="0.95" />
+ <param name="choice" value="0" /><output name="out_file1" file="5_liftover_mapped.bed"/><output name="out_file2" file="5_liftover_unmapped.bed"/></test>
+ <test>
+ <param name="input" value="5.bed" dbkey="hg18" ftype="bed" />
+ <param name="to_dbkey" value="panTro2" />
+ <param name="minMatch" value="0.10" />
+ <param name="choice" value="1" />
+ <param name="minSizeQ" value="0" />
+ <param name="minChainQ" value="500" />
+ <param name="minChainT" value="500" />
+ <output name="out_file1" file="5_mult_liftover_mapped.bed"/>
+ <output name="out_file2" file="5_mult_liftover_unmapped.bed"/>
+ </test>
+ <test>
+ <param name="input" value="cuffcompare_in1.gtf" dbkey="hg18" ftype="gtf" />
+ <param name="to_dbkey" value="panTro2" />
+ <param name="minMatch" value="0.95" />
+ <param name="choice" value="0" />
+ <output name="out_file1" file="cuffcompare_in1_liftover_mapped.bed"/>
+ <output name="out_file2" file="cuffcompare_in1_liftover_unmapped.bed"/>
+ </test>
+ <test>
+ <param name="input" value="cuffcompare_in1.gtf" dbkey="hg18" ftype="gtf" />
+ <param name="to_dbkey" value="panTro2" />
+ <param name="minMatch" value="0.10" />
+ <param name="choice" value="1" />
+ <param name="minSizeQ" value="0" />
+ <param name="minChainQ" value="500" />
+ <param name="minChainT" value="500" />
+ <output name="out_file1" file="cuffcompare_in1_mult_liftover_mapped.bed"/>
+ <output name="out_file2" file="cuffcompare_in1_mult_liftover_unmapped.bed"/>
+ </test></tests><help>
.. class:: warningmark
@@ -48,7 +108,7 @@
.. class:: warningmark
-This tool will only work on interval datasets with chromosome in column 1,
+This tool can work with interval, GFF, and GTF datasets. It requires the interval datasets to have chromosome in column 1,
start co-ordinate in column 2 and end co-ordinate in column 3. BED comments
and track and browser lines will be ignored, but if other non-interval lines
are present the tool will return empty output datasets.
@@ -59,7 +119,11 @@
**What it does**
-This tool converts coordinates and annotations between assemblies and genomes. It produces 2 files, one containing all the mapped coordinates and the other containing the unmapped coordinates, if any.
+This tool is based on the LiftOver utility and Chain track from `the UC Santa Cruz Genome Browser`__.
+
+It converts coordinates and annotations between assemblies and genomes. It produces 2 files, one containing all the mapped coordinates and the other containing the unmapped coordinates, if any.
+
+ .. __: http://genome.ucsc.edu/
-----
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
1 new changeset in galaxy-central:
http://bitbucket.org/galaxy/galaxy-central/changeset/e7214c69ed7d/
changeset: e7214c69ed7d
user: greg
date: 2011-07-27 22:14:15
summary: Add an altered version of Jelle Scholtalbers' enhancement code to support uploding various image data types. I've moved some of the code components around from Jelle's version, and added some fixes. The cod ein the new image_util.py file enables detections of images types without the need to create a new Image() class.
I've also added baseline support of the HDF5 data type in this change set, but it is currently required to be in the unsniffable_data_types list.
affected #: 7 files (9.8 KB)
--- a/datatypes_conf.xml.sample Tue Jul 26 16:14:31 2011 -0400
+++ b/datatypes_conf.xml.sample Wed Jul 27 16:14:15 2011 -0400
@@ -72,9 +72,10 @@
<!-- <display file="gbrowse/gbrowse_gff.xml" inherit="True" /> --></datatype><datatype extension="gff3" type="galaxy.datatypes.interval:Gff3" display_in_upload="true"/>
- <datatype extension="gif" type="galaxy.datatypes.images:Image" mimetype="image/gif"/>
+ <datatype extension="gif" type="galaxy.datatypes.images:Gif" mimetype="image/gif"/><datatype extension="gmaj.zip" type="galaxy.datatypes.images:Gmaj" mimetype="application/zip"/><datatype extension="gtf" type="galaxy.datatypes.interval:Gtf" display_in_upload="true"/>
+ <datatype extension="h5" type="galaxy.datatypes.data:Data" mimetype="application/octet-stream"/><datatype extension="html" type="galaxy.datatypes.images:Html" mimetype="text/html"/><datatype extension="interval" type="galaxy.datatypes.interval:Interval" display_in_upload="true"><converter file="interval_to_bed_converter.xml" target_datatype="bed"/>
@@ -90,7 +91,21 @@
<datatype extension="picard_interval_list" type="galaxy.datatypes.data:Text" subclass="True" display_in_upload="True"/><datatype extension="gatk_interval" type="galaxy.datatypes.data:Text" subclass="True" display_in_upload="True"/><datatype extension="gatk_dbsnp" type="galaxy.datatypes.data:Text" subclass="True" display_in_upload="True"/>
- <datatype extension="jpg" type="galaxy.datatypes.images:Image" mimetype="image/jpeg"/>
+ <datatype extension="jpg" type="galaxy.datatypes.images:Jpg" mimetype="image/jpeg"/>
+ <datatype extension="tiff" type="galaxy.datatypes.images:Tiff" mimetype="image/tiff"/>
+ <datatype extension="bmp" type="galaxy.datatypes.images:Bmp" mimetype="image/bmp"/>
+ <datatype extension="im" type="galaxy.datatypes.images:Im" mimetype="image/im"/>
+ <datatype extension="pcd" type="galaxy.datatypes.images:Pcd" mimetype="image/pcd"/>
+ <datatype extension="pcx" type="galaxy.datatypes.images:Pcx" mimetype="image/pcx"/>
+ <datatype extension="ppm" type="galaxy.datatypes.images:Ppm" mimetype="image/ppm"/>
+ <datatype extension="psd" type="galaxy.datatypes.images:Psd" mimetype="image/psd"/>
+ <datatype extension="xbm" type="galaxy.datatypes.images:Xbm" mimetype="image/xbm"/>
+ <datatype extension="xpm" type="galaxy.datatypes.images:Xpm" mimetype="image/xpm"/>
+ <datatype extension="rgb" type="galaxy.datatypes.images:Rgb" mimetype="image/rgb"/>
+ <datatype extension="pbm" type="galaxy.datatypes.images:Pbm" mimetype="image/pbm"/>
+ <datatype extension="pgm" type="galaxy.datatypes.images:Pgm" mimetype="image/pgm"/>
+ <datatype extension="eps" type="galaxy.datatypes.images:Eps" mimetype="image/eps"/>
+ <datatype extension="rast" type="galaxy.datatypes.images:Rast" mimetype="image/rast"/><datatype extension="laj" type="galaxy.datatypes.images:Laj"/><datatype extension="lav" type="galaxy.datatypes.sequence:Lav" display_in_upload="true"/><datatype extension="maf" type="galaxy.datatypes.sequence:Maf" display_in_upload="true">
@@ -102,7 +117,7 @@
</datatype><datatype extension="pdf" type="galaxy.datatypes.images:Pdf" mimetype="application/pdf"/><datatype extension="pileup" type="galaxy.datatypes.tabular:Pileup" display_in_upload="true" />
- <datatype extension="png" type="galaxy.datatypes.images:Image" mimetype="image/png"/>
+ <datatype extension="png" type="galaxy.datatypes.images:Png" mimetype="image/png"/><datatype extension="qual" type="galaxy.datatypes.qualityscore:QualityScore" /><datatype extension="qualsolexa" type="galaxy.datatypes.qualityscore:QualityScoreSolexa" display_in_upload="true"/><datatype extension="qualillumina" type="galaxy.datatypes.qualityscore:QualityScoreIllumina" display_in_upload="true"/>
@@ -116,7 +131,7 @@
<datatype extension="svg" type="galaxy.datatypes.images:Image" mimetype="image/svg+xml"/><datatype extension="taxonomy" type="galaxy.datatypes.tabular:Taxonomy" display_in_upload="true"/><datatype extension="tabular" type="galaxy.datatypes.tabular:Tabular" display_in_upload="true"/>
- <datatype extension="twobit" type="galaxy.datatypes.binary:TwoBit" mimetype="application/octet-stream" display_in_upload="true"/>
+ <datatype extension="twobit" type="galaxy.datatypes.binary:TwoBit" mimetype="application/octet-stream" display_in_upload="true"/><datatype extension="txt" type="galaxy.datatypes.data:Text" display_in_upload="true"/><datatype extension="memexml" type="galaxy.datatypes.xml:MEMEXml" mimetype="application/xml" display_in_upload="true"/><datatype extension="blastxml" type="galaxy.datatypes.xml:BlastXml" mimetype="application/xml" display_in_upload="true"/>
@@ -304,6 +319,24 @@
<sniffer type="galaxy.datatypes.tabular:Pileup"/><sniffer type="galaxy.datatypes.interval:Interval"/><sniffer type="galaxy.datatypes.tabular:Sam"/>
+ <sniffer type="galaxy.datatypes.images:Jpg"/>
+ <sniffer type="galaxy.datatypes.images:Png"/>
+ <sniffer type="galaxy.datatypes.images:Tiff"/>
+ <sniffer type="galaxy.datatypes.images:Bmp"/>
+ <sniffer type="galaxy.datatypes.images:Gif"/>
+ <sniffer type="galaxy.datatypes.images:Im"/>
+ <sniffer type="galaxy.datatypes.images:Pcd"/>
+ <sniffer type="galaxy.datatypes.images:Pcx"/>
+ <sniffer type="galaxy.datatypes.images:Ppm"/>
+ <sniffer type="galaxy.datatypes.images:Psd"/>
+ <sniffer type="galaxy.datatypes.images:Xbm"/>
+ <sniffer type="galaxy.datatypes.images:Xpm"/>
+ <sniffer type="galaxy.datatypes.images:Rgb"/>
+ <sniffer type="galaxy.datatypes.images:Pbm"/>
+ <sniffer type="galaxy.datatypes.images:Pgm"/>
+ <sniffer type="galaxy.datatypes.images:Xpm"/>
+ <sniffer type="galaxy.datatypes.images:Eps"/>
+ <sniffer type="galaxy.datatypes.images:Rast"/><!--
Keep this commented until the sniff method in the assembly.py
module is fixed to not read the entire file.
--- a/lib/galaxy/datatypes/binary.py Tue Jul 26 16:14:31 2011 -0400
+++ b/lib/galaxy/datatypes/binary.py Wed Jul 27 16:14:15 2011 -0400
@@ -18,7 +18,7 @@
log = logging.getLogger(__name__)
# Currently these supported binary data types must be manually set on upload
-unsniffable_binary_formats = [ 'ab1', 'scf' ]
+unsniffable_binary_formats = [ 'ab1', 'scf', 'h5' ]
class Binary( data.Data ):
"""Binary data"""
@@ -206,7 +206,24 @@
return "Binary bam alignments file (%s)" % ( data.nice_size( dataset.get_size() ) )
def get_track_type( self ):
return "ReadTrack", {"data": "bai", "index": "summary_tree"}
-
+
+class H5( Binary ):
+ """Class describing an HDF5 file"""
+ file_ext = "h5"
+
+ def set_peek( self, dataset, is_multi_byte=False ):
+ if not dataset.dataset.purged:
+ dataset.peek = "Binary h5 file"
+ dataset.blurb = data.nice_size( dataset.get_size() )
+ else:
+ dataset.peek = 'file does not exist'
+ dataset.blurb = 'file purged from disk'
+ def display_peek( self, dataset ):
+ try:
+ return dataset.peek
+ except:
+ return "Binary h5 sequence file (%s)" % ( data.nice_size( dataset.get_size() ) )
+
class Scf( Binary ):
"""Class describing an scf binary sequence file"""
file_ext = "scf"
@@ -292,7 +309,6 @@
Binary.__init__( self, **kwd )
self._magic = 0x8789F2EB
self._name = "BigBed"
-
def get_track_type( self ):
return "LineTrack", {"data_standalone": "bigbed"}
@@ -309,14 +325,12 @@
return True
except IOError:
return False
-
def set_peek(self, dataset, is_multi_byte=False):
if not dataset.dataset.purged:
dataset.peek = "Binary TwoBit format nucleotide file"
dataset.blurb = data.nice_size(dataset.get_size())
else:
return super(TwoBit, self).set_peek(dataset, is_multi_byte)
-
def display_peek(self, dataset):
try:
return dataset.peek
--- a/lib/galaxy/datatypes/checkers.py Tue Jul 26 16:14:31 2011 -0400
+++ b/lib/galaxy/datatypes/checkers.py Wed Jul 27 16:14:15 2011 -0400
@@ -1,6 +1,28 @@
import os, gzip, re, gzip, zipfile, binascii, bz2
from galaxy import util
+try:
+ import Image as PIL
+except ImportError:
+ try:
+ from PIL import Image as PIL
+ except:
+ PIL = None
+
+def check_image( file_path ):
+ if PIL != None:
+ try:
+ im = PIL.open( file_path )
+ except:
+ return False
+ if im:
+ return im
+ return False
+ else:
+ if imghdr.what( file_path ) != None:
+ return True
+ return False
+
def check_html( file_path, chunk=None ):
if chunk is None:
temp = open( file_path, "U" )
--- a/lib/galaxy/datatypes/images.py Tue Jul 26 16:14:31 2011 -0400
+++ b/lib/galaxy/datatypes/images.py Wed Jul 27 16:14:15 2011 -0400
@@ -7,12 +7,31 @@
from galaxy.datatypes.metadata import MetadataElement
from galaxy.datatypes import metadata
from galaxy.datatypes.sniff import *
+from galaxy.datatypes.util.image_util import *
from urllib import urlencode, quote_plus
import zipfile
-import os, subprocess, tempfile
+import os, subprocess, tempfile, imghdr
+
+try:
+ import Image as PIL
+except ImportError:
+ try:
+ from PIL import Image as PIL
+ except:
+ PIL = None
log = logging.getLogger(__name__)
+# TODO: Uploading image files of various types is supported in Galaxy, but on
+# the main public instance, the display_in_upload is not set for these data
+# types in datatypes_conf.xml because we do not allow image files to be uploaded
+# there. There is currently no API feature that allows uploading files outside
+# of a data library ( where it requires either the upload_paths or upload_directory
+# option to be enabled, which is not the case on the main public instance ). Because
+# of this, we're currently safe, but when the api is enhanced to allow other uploads,
+# we need to ensure that the implementation is such that image files cannot be uploaded
+# to our main public instance.
+
class Image( data.Data ):
"""Class describing an image"""
def set_peek( self, dataset, is_multi_byte=False ):
@@ -22,11 +41,110 @@
else:
dataset.peek = 'file does not exist'
dataset.blurb = 'file purged from disk'
+ def sniff( self, filename ):
+ # First check if we can use PIL
+ if PIL is not None:
+ try:
+ im = PIL.open( filename )
+ im.close()
+ return True
+ except:
+ return False
+ else:
+ if imghdr.what( filename ) is not None:
+ return True
+ else:
+ return False
+
+class Jpg( Image ):
+ def sniff(self, filename, image=None):
+ """Determine if the file is in jpg format."""
+ return check_image_type( filename, ['JPEG'], image )
+
+class Png( Image ):
+ def sniff(self, filename, image=None):
+ """Determine if the file is in png format."""
+ return check_image_type( filename, ['PNG'], image )
+
+class Tiff( Image ):
+ def sniff(self, filename, image=None):
+ """Determine if the file is in tiff format."""
+ return check_image_type( filename, ['TIFF'], image )
+
+class Bmp( Image ):
+ def sniff(self, filename, image=None):
+ """Determine if the file is in bmp format."""
+ return check_image_type( filename, ['BMP'], image )
+
+class Gif( Image ):
+ def sniff(self, filename, image=None):
+ """Determine if the file is in gif format."""
+ return check_image_type( filename, ['GIF'], image )
+
+class Im( Image ):
+ def sniff(self, filename, image=None):
+ """Determine if the file is in im format."""
+ return check_image_type( filename, ['IM'], image )
+
+class Pcd( Image ):
+ def sniff(self, filename, image=None):
+ """Determine if the file is in pcd format."""
+ return check_image_type( filename, ['PCD'], image )
+
+class Pcx( Image ):
+ def sniff(self, filename, image=None):
+ """Determine if the file is in pcx format."""
+ return check_image_type( filename, ['PCX'], image )
+
+class Ppm( Image ):
+ def sniff(self, filename, image=None):
+ """Determine if the file is in ppm format."""
+ return check_image_type( filename, ['PPM'], image )
+
+class Psd( Image ):
+ def sniff(self, filename, image=None):
+ """Determine if the file is in psd format."""
+ return check_image_type( filename, ['PSD'], image )
+
+class Xbm( Image ):
+ def sniff(self, filename, image=None):
+ """Determine if the file is in XBM format."""
+ return check_image_type( filename, ['XBM'], image )
+
+class Xpm( Image ):
+ def sniff(self, filename, image=None):
+ """Determine if the file is in XPM format."""
+ return check_image_type( filename, ['XPM'], image )
+
+class Rgb( Image ):
+ def sniff(self, filename, image=None):
+ """Determine if the file is in RGB format."""
+ return check_image_type( filename, ['RGB'], image )
+
+class Pbm( Image ):
+ def sniff(self, filename, image=None):
+ """Determine if the file is in PBM format"""
+ return check_image_type( filename, ['PBM'], image )
+
+class Pgm( Image ):
+ def sniff(self, filename, image=None):
+ """Determine if the file is in PGM format"""
+ return check_image_type( filename, ['PGM'], image )
+
+class Eps( Image ):
+ def sniff(self, filename, image=None):
+ """Determine if the file is in eps format."""
+ return check_image_type( filename, ['EPS'], image )
+
+
+class Rast( Image ):
+ def sniff(self, filename, image=None):
+ """Determine if the file is in rast format"""
+ return check_image_type( filename, ['RAST'], image )
class Pdf( Image ):
def sniff(self, filename):
- """Determine if the file is in pdf format.
- """
+ """Determine if the file is in pdf format."""
headers = get_headers(filename, None, 1)
try:
if headers[0][0].startswith("%PDF"):
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/lib/galaxy/datatypes/util/image_util.py Wed Jul 27 16:14:15 2011 -0400
@@ -0,0 +1,76 @@
+"""
+Provides utilities for working with image files.
+"""
+import logging, imghdr
+
+try:
+ import Image as PIL
+except ImportError:
+ try:
+ from PIL import Image as PIL
+ except:
+ PIL = None
+
+log = logging.getLogger(__name__)
+
+def image_type( filename, image=None ):
+ format = ''
+ if PIL is not None:
+ if image is not None:
+ format = image.format
+ else:
+ try:
+ im = PIL.open( filename )
+ format = im.format
+ im.close()
+ except:
+ return False
+ else:
+ format = imghdr.what( filename )
+ if format is not None:
+ format = format.upper()
+ else:
+ return False
+ return format
+def check_image_type( filename, types, image=None ):
+ format = image_type( filename, image )
+ # First check if we can use PIL
+ if format in types:
+ return True
+ return False
+def get_image_ext ( file_path, image ):
+ #determine ext
+ format = image_type( file_path, image )
+ if format in [ 'JPG','JPEG' ]:
+ return 'jpg'
+ if format == 'PNG':
+ return 'png'
+ if format == 'TIFF':
+ return 'tiff'
+ if format == 'BMP':
+ return 'bmp'
+ if format == 'GIF':
+ return 'gif'
+ if format == 'IM':
+ return 'im'
+ if format == 'PCD':
+ return 'pcd'
+ if format == 'PCX':
+ return 'pcx'
+ if format == 'PPM':
+ return 'ppm'
+ if format == 'PSD':
+ return 'psd'
+ if format == 'XBM':
+ return 'xbm'
+ if format == 'XPM':
+ return 'xpm'
+ if format == 'RGB':
+ return 'rgb'
+ if format == 'PBM':
+ return 'pbm'
+ if format == 'PGM':
+ return 'pgm'
+ if format == 'EPS':
+ return 'eps'
+ return None
--- a/test/functional/test_get_data.py Tue Jul 26 16:14:31 2011 -0400
+++ b/test/functional/test_get_data.py Wed Jul 27 16:14:15 2011 -0400
@@ -124,7 +124,7 @@
self.upload_file( '454Score.png' )
hda = get_latest_hda()
assert hda is not None, "Problem retrieving hda from database"
- self.check_history_for_string( "The uploaded file contains inappropriate content" )
+ self.check_history_for_string( "454Score.png" )
def test_0055_upload_file( self ):
"""Test uploading lped composite datatype file, manually setting the file format"""
# Logged in as admin_user
--- a/tools/data_source/upload.py Tue Jul 26 16:14:31 2011 -0400
+++ b/tools/data_source/upload.py Wed Jul 27 16:14:15 2011 -0400
@@ -14,9 +14,18 @@
from galaxy.datatypes.images import Pdf
from galaxy.datatypes.registry import Registry
from galaxy import util
+from galaxy.datatypes.util.image_util import *
from galaxy.util.json import *
try:
+ import Image as PIL
+except ImportError:
+ try:
+ from PIL import Image as PIL
+ except:
+ PIL = None
+
+try:
import bz2
except:
bz2 = None
@@ -51,16 +60,12 @@
return d
def check_bam( file_path ):
return Bam().sniff( file_path )
-
def check_sff( file_path ):
return Sff().sniff( file_path )
-
def check_pdf( file_path ):
return Pdf().sniff( file_path )
-
def check_bigwig( file_path ):
return BigWig().sniff( file_path )
-
def check_bigbed( file_path ):
return BigBed().sniff( file_path )
def parse_outputs( args ):
@@ -102,8 +107,16 @@
dataset.is_multi_byte = util.is_multi_byte( codecs.open( dataset.path, 'r', 'utf-8' ).read( 100 ) )
except UnicodeDecodeError, e:
dataset.is_multi_byte = False
+ # Is dataset an image?
+ image = check_image( dataset.path )
+ if image:
+ if not PIL:
+ image = None
+ # get_image_ext() returns None if nor a supported Image type
+ ext = get_image_ext( dataset.path, image )
+ data_type = ext
# Is dataset content multi-byte?
- if dataset.is_multi_byte:
+ elif dataset.is_multi_byte:
data_type = 'multi-byte char'
ext = sniff.guess_ext( dataset.path, is_multi_byte=True )
# Is dataset content supported sniffable binary?
@@ -122,7 +135,7 @@
elif check_bigbed( dataset.path ):
ext = 'bigbed'
data_type = 'bigbed'
- else:
+ if not data_type:
# See if we have a gzipped file, which, if it passes our restrictions, we'll uncompress
is_gzipped, is_valid = check_gzip( dataset.path )
if is_gzipped and not is_valid:
@@ -314,7 +327,6 @@
if datatype.dataset_content_needs_grooming( output_path ):
# Groom the dataset content if necessary
datatype.groom_dataset_content( output_path )
-
def add_composite_file( dataset, registry, json_file, output_path, files_path ):
if dataset.composite_files:
os.mkdir( files_path )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: natefoo: Closed the feature/ws branch, changes from this branch were merged in 5827:f3a1086fac91.
by Bitbucket 27 Jul '11
by Bitbucket 27 Jul '11
27 Jul '11
1 new changeset in galaxy-central:
http://bitbucket.org/galaxy/galaxy-central/changeset/70b8039a2eef/
changeset: 70b8039a2eef
branch: feature/ws
user: natefoo
date: 2011-07-27 16:48:03
summary: Closed the feature/ws branch, changes from this branch were merged in 5827:f3a1086fac91.
affected #: 0 files (0 bytes)
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: guru: Added additional options to the liftOver tool. It now supports GFF and GTF formats and allows multiple output regions and related options.
by Bitbucket 26 Jul '11
by Bitbucket 26 Jul '11
26 Jul '11
1 new changeset in galaxy-central:
http://bitbucket.org/galaxy/galaxy-central/changeset/d306dfb77ea3/
changeset: d306dfb77ea3
user: guru
date: 2011-07-26 22:14:31
summary: Added additional options to the liftOver tool. It now supports GFF and GTF formats and allows multiple output regions and related options.
affected #: 8 files (3.4 KB)
--- a/tools/extract/liftOver_wrapper.py Tue Jul 26 13:30:07 2011 -0400
+++ b/tools/extract/liftOver_wrapper.py Tue Jul 26 16:14:31 2011 -0400
@@ -34,15 +34,27 @@
out_handle.close()
return fname
-if len( sys.argv ) != 7:
- stop_err( "USAGE: prog input out_file1 out_file2 input_dbkey output_dbkey minMatch" )
+if len( sys.argv ) < 9:
+ stop_err( "USAGE: prog input out_file1 out_file2 input_dbkey output_dbkey infile_type minMatch multiple <minChainT><minChainQ><minSizeQ>" )
infile = sys.argv[1]
outfile1 = sys.argv[2]
outfile2 = sys.argv[3]
in_dbkey = sys.argv[4]
mapfilepath = sys.argv[5]
-minMatch = sys.argv[6]
+infile_type = sys.argv[6]
+gff_option = ""
+if infile_type == "gff":
+ gff_option = "-gff "
+minMatch = sys.argv[7]
+multiple = int(sys.argv[8])
+multiple_option = ""
+if multiple:
+ minChainT = sys.argv[9]
+ minChainQ = sys.argv[10]
+ minSizeQ = sys.argv[11]
+ multiple_option = " -multiple -minChainT=%s -minChainQ=%s -minSizeQ=%s " %(minChainT,minChainQ,minSizeQ)
+
try:
assert float(minMatch)
except:
@@ -55,7 +67,8 @@
stop_err( "%s mapping is not currently available." % ( mapfilepath.split('/')[-1].split('.')[0] ) )
safe_infile = safe_bed_file(infile)
-cmd_line = "liftOver -minMatch=" + str(minMatch) + " " + safe_infile + " " + mapfilepath + " " + outfile1 + " " + outfile2 + " > /dev/null"
+cmd_line = "liftOver " + gff_option + "-minMatch=" + str(minMatch) + multiple_option + " " + safe_infile + " " + mapfilepath + " " + outfile1 + " " + outfile2 + " > /dev/null"
+
try:
# have to nest try-except in try-finally to handle 2.4
try:
--- a/tools/extract/liftOver_wrapper.xml Tue Jul 26 13:30:07 2011 -0400
+++ b/tools/extract/liftOver_wrapper.xml Tue Jul 26 16:14:31 2011 -0400
@@ -1,8 +1,21 @@
-<tool id="liftOver1" name="Convert genome coordinates" version="1.0.2">
+<tool id="liftOver1" name="Convert genome coordinates" version="1.0.3"><description> between assemblies and genomes</description>
- <command interpreter="python">liftOver_wrapper.py $input "$out_file1" "$out_file2" $dbkey $to_dbkey $minMatch</command>
+ <command interpreter="python">
+ liftOver_wrapper.py
+ $input
+ "$out_file1"
+ "$out_file2"
+ $dbkey
+ $to_dbkey
+ #if isinstance( $input.datatype, $__app__.datatypes_registry.get_datatype_by_extension('gff').__class__) or isinstance( $input.datatype, $__app__.datatypes_registry.get_datatype_by_extension('gtf').__class__):
+ "gff"
+ #else:
+ "interval"
+ #end if
+ $minMatch ${multiple.choice} ${multiple.minChainT} ${multiple.minChainQ} ${multiple.minSizeQ}
+ </command><inputs>
- <param format="interval" name="input" type="data" label="Convert coordinates of">
+ <param format="interval,gff,gtf" name="input" type="data" label="Convert coordinates of"><validator type="unspecified_build" /><validator type="dataset_metadata_in_file" filename="liftOver.loc" metadata_name="dbkey" metadata_column="0" message="Liftover mappings are currently not available for the specified build." /></param>
@@ -14,7 +27,23 @@
<filter type="data_meta" ref="input" key="dbkey" column="0" /></options></param>
- <param name="minMatch" size="10" type="float" value="0.95" label="Minimum ratio of bases that must remap" />
+ <param name="minMatch" size="10" type="float" value="0.95" label="Minimum ratio of bases that must remap" help="Recommended values: same species = 0.95, different species = 0.10" />
+ <conditional name="multiple">
+ <param name="choice" type="select" label="Allow multiple output regions?" help="Recommended values: same species = No, different species = Yes">
+ <option value="0" selected="true">No</option>
+ <option value="1">Yes</option>
+ </param>
+ <when value="0">
+ <param name="minSizeQ" type="hidden" value="0" />
+ <param name="minChainQ" type="hidden" value="0" />
+ <param name="minChainT" type="hidden" value="0" />
+ </when>
+ <when value="1">
+ <param name="minSizeQ" size="10" type="integer" value="0" label="Minimum matching region size in query" help="Recommended value: set to >= 300 bases for complete transcripts"/>
+ <param name="minChainQ" size="10" type="integer" value="500" label="Minimum chain size in query"/>
+ <param name="minChainT" size="10" type="integer" value="500" label="Minimum chain size in target"/>
+ </when>
+ </conditional></inputs><outputs><data format="input" name="out_file1" label="${tool.name} on ${on_string} [ MAPPED COORDINATES ]">
@@ -37,9 +66,40 @@
<param name="input" value="5.bed" dbkey="hg18" ftype="bed" /><param name="to_dbkey" value="panTro2" /><param name="minMatch" value="0.95" />
+ <param name="choice" value="0" /><output name="out_file1" file="5_liftover_mapped.bed"/><output name="out_file2" file="5_liftover_unmapped.bed"/></test>
+ <test>
+ <param name="input" value="5.bed" dbkey="hg18" ftype="bed" />
+ <param name="to_dbkey" value="panTro2" />
+ <param name="minMatch" value="0.10" />
+ <param name="choice" value="1" />
+ <param name="minSizeQ" value="0" />
+ <param name="minChainQ" value="500" />
+ <param name="minChainT" value="500" />
+ <output name="out_file1" file="5_mult_liftover_mapped.bed"/>
+ <output name="out_file2" file="5_mult_liftover_unmapped.bed"/>
+ </test>
+ <test>
+ <param name="input" value="cuffcompare_in1.gtf" dbkey="hg18" ftype="gtf" />
+ <param name="to_dbkey" value="panTro2" />
+ <param name="minMatch" value="0.95" />
+ <param name="choice" value="0" />
+ <output name="out_file1" file="cuffcompare_in1_liftover_mapped.bed"/>
+ <output name="out_file2" file="cuffcompare_in1_liftover_unmapped.bed"/>
+ </test>
+ <test>
+ <param name="input" value="cuffcompare_in1.gtf" dbkey="hg18" ftype="gtf" />
+ <param name="to_dbkey" value="panTro2" />
+ <param name="minMatch" value="0.10" />
+ <param name="choice" value="1" />
+ <param name="minSizeQ" value="0" />
+ <param name="minChainQ" value="500" />
+ <param name="minChainT" value="500" />
+ <output name="out_file1" file="cuffcompare_in1_mult_liftover_mapped.bed"/>
+ <output name="out_file2" file="cuffcompare_in1_mult_liftover_unmapped.bed"/>
+ </test></tests><help>
.. class:: warningmark
@@ -48,7 +108,7 @@
.. class:: warningmark
-This tool will only work on interval datasets with chromosome in column 1,
+This tool can work with interval, GFF, and GTF datasets. It requires the interval datasets to have chromosome in column 1,
start co-ordinate in column 2 and end co-ordinate in column 3. BED comments
and track and browser lines will be ignored, but if other non-interval lines
are present the tool will return empty output datasets.
@@ -59,7 +119,11 @@
**What it does**
-This tool converts coordinates and annotations between assemblies and genomes. It produces 2 files, one containing all the mapped coordinates and the other containing the unmapped coordinates, if any.
+This tool is based on the LiftOver utility and Chain track from `the UC Santa Cruz Genome Browser`__.
+
+It converts coordinates and annotations between assemblies and genomes. It produces 2 files, one containing all the mapped coordinates and the other containing the unmapped coordinates, if any.
+
+ .. __: http://genome.ucsc.edu/
-----
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: natefoo: Move a number of dataset-related methods from the root controller to the dataset controller and encode their ids. Also add user disk usage accounting.
by Bitbucket 26 Jul '11
by Bitbucket 26 Jul '11
26 Jul '11
1 new changeset in galaxy-central:
http://bitbucket.org/galaxy/galaxy-central/changeset/3d2fd67bc0ca/
changeset: 3d2fd67bc0ca
user: natefoo
date: 2011-07-26 19:30:07
summary: Move a number of dataset-related methods from the root controller to the dataset controller and encode their ids. Also add user disk usage accounting.
affected #: 13 files (38.1 KB)
--- a/lib/galaxy/jobs/__init__.py Mon Jul 25 20:55:42 2011 -0400
+++ b/lib/galaxy/jobs/__init__.py Tue Jul 26 13:30:07 2011 -0400
@@ -646,9 +646,14 @@
tool=self.tool, stdout=stdout, stderr=stderr )
job.command_line = self.command_line
+ bytes = 0
# Once datasets are collected, set the total dataset size (includes extra files)
for dataset_assoc in job.output_datasets + job.output_library_datasets:
dataset_assoc.dataset.dataset.set_total_size()
+ bytes += dataset_assoc.dataset.dataset.get_total_size()
+
+ if job.user:
+ job.user.total_disk_usage += bytes
# fix permissions
for path in [ dp.real_path for dp in self.get_output_fnames() ]:
--- a/lib/galaxy/model/__init__.py Mon Jul 25 20:55:42 2011 -0400
+++ b/lib/galaxy/model/__init__.py Tue Jul 26 13:30:07 2011 -0400
@@ -70,6 +70,27 @@
if role not in roles:
roles.append( role )
return roles
+ def get_disk_usage( self, nice_size=False ):
+ rval = 0
+ if self.disk_usage is not None:
+ rval = self.disk_usage
+ if nice_size:
+ rval = galaxy.datatypes.data.nice_size( rval )
+ return rval
+ def set_disk_usage( self, bytes ):
+ self.disk_usage = bytes
+ total_disk_usage = property( get_disk_usage, set_disk_usage )
+ def calculate_disk_usage( self ):
+ dataset_ids = []
+ total = 0
+ # this can be a huge number and can run out of memory, so we avoid the mappers
+ db_session = object_session( self )
+ for history in db_session.query( History ).enable_eagerloads( False ).filter_by( user_id=self.id ).yield_per( 1000 ):
+ for hda in db_session.query( HistoryDatasetAssociation ).enable_eagerloads( False ).filter_by( history_id=history.id, purged=False ).yield_per( 1000 ):
+ if not hda.dataset.id in dataset_ids and not hda.dataset.purged and not hda.dataset.library_associations:
+ dataset_ids.append( hda.dataset.id )
+ total += hda.dataset.get_total_size()
+ return total
class Job( object ):
"""
@@ -349,7 +370,7 @@
self.galaxy_sessions.append( GalaxySessionToHistoryAssociation( galaxy_session, self ) )
else:
self.galaxy_sessions.append( association )
- def add_dataset( self, dataset, parent_id=None, genome_build=None, set_hid = True ):
+ def add_dataset( self, dataset, parent_id=None, genome_build=None, set_hid=True, quota=True ):
if isinstance( dataset, Dataset ):
dataset = HistoryDatasetAssociation(dataset=dataset)
object_session( self ).add( dataset )
@@ -367,6 +388,8 @@
else:
if set_hid:
dataset.hid = self._next_hid()
+ if quota and self.user:
+ self.user.total_disk_usage += dataset.quota_amount( self.user )
dataset.history = self
if genome_build not in [None, '?']:
self.genome_build = genome_build
@@ -378,6 +401,9 @@
name = self.name
if not target_user:
target_user = self.user
+ quota = True
+ if target_user == self.user:
+ quota = False
new_history = History( name=name, user=target_user )
db_session = object_session( self )
db_session.add( new_history )
@@ -393,8 +419,8 @@
hdas = self.active_datasets
for hda in hdas:
# Copy HDA.
- new_hda = hda.copy( copy_children=True, target_history=new_history )
- new_history.add_dataset( new_hda, set_hid = False )
+ new_hda = hda.copy( copy_children=True )
+ new_history.add_dataset( new_hda, set_hid = False, quota=quota )
db_session.add( new_hda )
db_session.flush()
# Copy annotation.
@@ -741,6 +767,10 @@
def set_size( self ):
"""Returns the size of the data on disk"""
return self.dataset.set_size()
+ def get_total_size( self ):
+ return self.dataset.get_total_size()
+ def set_total_size( self ):
+ return self.dataset.set_total_size()
def has_data( self ):
"""Detects whether there is any data"""
return self.dataset.has_data()
@@ -922,7 +952,7 @@
self.history = history
self.copied_from_history_dataset_association = copied_from_history_dataset_association
self.copied_from_library_dataset_dataset_association = copied_from_library_dataset_dataset_association
- def copy( self, copy_children = False, parent_id = None, target_history = None ):
+ def copy( self, copy_children = False, parent_id = None ):
hda = HistoryDatasetAssociation( hid=self.hid,
name=self.name,
info=self.info,
@@ -934,8 +964,7 @@
visible=self.visible,
deleted=self.deleted,
parent_id=parent_id,
- copied_from_history_dataset_association=self,
- history = target_history )
+ copied_from_history_dataset_association=self )
object_session( self ).add( hda )
object_session( self ).flush()
hda.set_size()
@@ -1017,6 +1046,26 @@
return hda_name
def get_access_roles( self, trans ):
return self.dataset.get_access_roles( trans )
+ def quota_amount( self, user ):
+ """
+ If the user has multiple instances of this dataset, it will not affect their disk usage statistic.
+ """
+ rval = 0
+ # Anon users are handled just by their single history size.
+ if not user:
+ return rval
+ # Gets an HDA and its children's disk usage, if the user does not already have an association of the same dataset
+ if not self.dataset.library_associations and not self.purged and not self.dataset.purged:
+ for hda in self.dataset.history_associations:
+ if hda.id == self.id:
+ continue
+ if not hda.purged and hda.history and hda.history.user and hda.history.user == user:
+ break
+ else:
+ rval += self.get_total_size()
+ for child in self.children:
+ rval += child.get_disk_usage( user )
+ return rval
class HistoryDatasetAssociationDisplayAtAuthorization( object ):
def __init__( self, hda=None, user=None, site=None ):
@@ -1467,6 +1516,13 @@
self.histories.append( GalaxySessionToHistoryAssociation( self, history ) )
else:
self.histories.append( association )
+ def get_disk_usage( self ):
+ if self.disk_usage is None:
+ return 0
+ return self.disk_usage
+ def set_disk_usage( self, bytes ):
+ self.disk_usage = bytes
+ total_disk_usage = property( get_disk_usage, set_disk_usage )
class GalaxySessionToHistoryAssociation( object ):
def __init__( self, galaxy_session, history ):
--- a/lib/galaxy/web/controllers/dataset.py Mon Jul 25 20:55:42 2011 -0400
+++ b/lib/galaxy/web/controllers/dataset.py Tue Jul 26 13:30:07 2011 -0400
@@ -9,6 +9,7 @@
from galaxy.util import inflector
from galaxy.model.item_attrs import *
from galaxy.model import LibraryDatasetDatasetAssociation, HistoryDatasetAssociation
+from galaxy.web.framework.helpers import to_unicode
import pkg_resources;
pkg_resources.require( "Paste" )
@@ -383,6 +384,188 @@
return trans.stream_template_mako( "/dataset/large_file.mako",
truncated_data = open( data.file_name ).read(max_peek_size),
data = data )
+
+ @web.expose
+ def edit(self, trans, dataset_id=None, filename=None, hid=None, **kwd):
+ """Allows user to modify parameters of an HDA."""
+ message = None
+ status = 'done'
+ refresh_frames = []
+ error = False
+ def __ok_to_edit_metadata( dataset_id ):
+ #prevent modifying metadata when dataset is queued or running as input/output
+ #This code could be more efficient, i.e. by using mappers, but to prevent slowing down loading a History panel, we'll leave the code here for now
+ for job_to_dataset_association in trans.sa_session.query( self.app.model.JobToInputDatasetAssociation ) \
+ .filter_by( dataset_id=dataset_id ) \
+ .all() \
+ + trans.sa_session.query( self.app.model.JobToOutputDatasetAssociation ) \
+ .filter_by( dataset_id=dataset_id ) \
+ .all():
+ if job_to_dataset_association.job.state not in [ job_to_dataset_association.job.states.OK, job_to_dataset_association.job.states.ERROR, job_to_dataset_association.job.states.DELETED ]:
+ return False
+ return True
+ if hid is not None:
+ history = trans.get_history()
+ # TODO: hid handling
+ data = history.datasets[ int( hid ) - 1 ]
+ id = None
+ elif dataset_id is not None:
+ id = trans.app.security.decode_id( dataset_id )
+ data = trans.sa_session.query( self.app.model.HistoryDatasetAssociation ).get( id )
+ else:
+ trans.log_event( "dataset_id and hid are both None, cannot load a dataset to edit" )
+ return trans.show_error_message( "You must provide a history dataset id to edit" )
+ if data is None:
+ trans.log_event( "Problem retrieving dataset (encoded: %s, decoded: %s) with history id %s." % ( str( dataset_id ), str( id ), str( hid ) ) )
+ return trans.show_error_message( "History dataset id is invalid" )
+ if dataset_id is not None and data.history.user is not None and data.history.user != trans.user:
+ trans.log_event( "User attempted to edit an HDA they do not own (encoded: %s, decoded: %s)" % ( dataset_id, id ) )
+ # Do not reveal the dataset's existence
+ return trans.show_error_message( "History dataset id is invalid" )
+ current_user_roles = trans.get_current_user_roles()
+ if data.history.user and not data.dataset.has_manage_permissions_roles( trans ):
+ # Permission setting related to DATASET_MANAGE_PERMISSIONS was broken for a period of time,
+ # so it is possible that some Datasets have no roles associated with the DATASET_MANAGE_PERMISSIONS
+ # permission. In this case, we'll reset this permission to the hda user's private role.
+ manage_permissions_action = trans.app.security_agent.get_action( trans.app.security_agent.permitted_actions.DATASET_MANAGE_PERMISSIONS.action )
+ permissions = { manage_permissions_action : [ trans.app.security_agent.get_private_user_role( data.history.user ) ] }
+ trans.app.security_agent.set_dataset_permission( data.dataset, permissions )
+ if trans.app.security_agent.can_access_dataset( current_user_roles, data.dataset ):
+ if data.state == trans.model.Dataset.states.UPLOAD:
+ return trans.show_error_message( "Please wait until this dataset finishes uploading before attempting to edit its metadata." )
+ params = util.Params( kwd, sanitize=False )
+ if params.change:
+ # The user clicked the Save button on the 'Change data type' form
+ if data.datatype.allow_datatype_change and trans.app.datatypes_registry.get_datatype_by_extension( params.datatype ).allow_datatype_change:
+ #prevent modifying datatype when dataset is queued or running as input/output
+ if not __ok_to_edit_metadata( data.id ):
+ message = "This dataset is currently being used as input or output. You cannot change datatype until the jobs have completed or you have canceled them."
+ error = True
+ else:
+ trans.app.datatypes_registry.change_datatype( data, params.datatype, set_meta = not trans.app.config.set_metadata_externally )
+ trans.sa_session.flush()
+ if trans.app.config.set_metadata_externally:
+ trans.app.datatypes_registry.set_external_metadata_tool.tool_action.execute( trans.app.datatypes_registry.set_external_metadata_tool, trans, incoming = { 'input1':data }, overwrite = False ) #overwrite is False as per existing behavior
+ message = "Changed the type of dataset '%s' to %s" % ( to_unicode( data.name ), params.datatype )
+ refresh_frames=['history']
+ else:
+ message = "You are unable to change datatypes in this manner. Changing %s to %s is not allowed." % ( data.extension, params.datatype )
+ error = True
+ elif params.save:
+ # The user clicked the Save button on the 'Edit Attributes' form
+ data.name = params.name
+ data.info = params.info
+ message = ''
+ if __ok_to_edit_metadata( data.id ):
+ # The following for loop will save all metadata_spec items
+ for name, spec in data.datatype.metadata_spec.items():
+ if spec.get("readonly"):
+ continue
+ optional = params.get("is_"+name, None)
+ other = params.get("or_"+name, None)
+ if optional and optional == 'true':
+ # optional element... == 'true' actually means it is NOT checked (and therefore omitted)
+ setattr(data.metadata, name, None)
+ else:
+ if other:
+ setattr( data.metadata, name, other )
+ else:
+ setattr( data.metadata, name, spec.unwrap( params.get (name, None) ) )
+ data.datatype.after_setting_metadata( data )
+ # Sanitize annotation before adding it.
+ if params.annotation:
+ annotation = sanitize_html( params.annotation, 'utf-8', 'text/html' )
+ self.add_item_annotation( trans.sa_session, trans.get_user(), data, annotation )
+ # If setting metadata previously failed and all required elements have now been set, clear the failed state.
+ if data._state == trans.model.Dataset.states.FAILED_METADATA and not data.missing_meta():
+ data._state = None
+ trans.sa_session.flush()
+ message = "Attributes updated%s" % message
+ refresh_frames=['history']
+ else:
+ trans.sa_session.flush()
+ message = "Attributes updated, but metadata could not be changed because this dataset is currently being used as input or output. You must cancel or wait for these jobs to complete before changing metadata."
+ status = "warning"
+ refresh_frames=['history']
+ elif params.detect:
+ # The user clicked the Auto-detect button on the 'Edit Attributes' form
+ #prevent modifying metadata when dataset is queued or running as input/output
+ if not __ok_to_edit_metadata( data.id ):
+ message = "This dataset is currently being used as input or output. You cannot change metadata until the jobs have completed or you have canceled them."
+ error = True
+ else:
+ for name, spec in data.metadata.spec.items():
+ # We need to be careful about the attributes we are resetting
+ if name not in [ 'name', 'info', 'dbkey', 'base_name' ]:
+ if spec.get( 'default' ):
+ setattr( data.metadata, name, spec.unwrap( spec.get( 'default' ) ) )
+ if trans.app.config.set_metadata_externally:
+ message = 'Attributes have been queued to be updated'
+ trans.app.datatypes_registry.set_external_metadata_tool.tool_action.execute( trans.app.datatypes_registry.set_external_metadata_tool, trans, incoming = { 'input1':data } )
+ else:
+ message = 'Attributes updated'
+ data.set_meta()
+ data.datatype.after_setting_metadata( data )
+ trans.sa_session.flush()
+ refresh_frames=['history']
+ elif params.convert_data:
+ target_type = kwd.get("target_type", None)
+ if target_type:
+ message = data.datatype.convert_dataset(trans, data, target_type)
+ refresh_frames=['history']
+ elif params.update_roles_button:
+ if not trans.user:
+ return trans.show_error_message( "You must be logged in if you want to change permissions." )
+ if trans.app.security_agent.can_manage_dataset( current_user_roles, data.dataset ):
+ access_action = trans.app.security_agent.get_action( trans.app.security_agent.permitted_actions.DATASET_ACCESS.action )
+ manage_permissions_action = trans.app.security_agent.get_action( trans.app.security_agent.permitted_actions.DATASET_MANAGE_PERMISSIONS.action )
+ # The user associated the DATASET_ACCESS permission on the dataset with 1 or more roles. We
+ # need to ensure that they did not associate roles that would cause accessibility problems.
+ permissions, in_roles, error, message = \
+ trans.app.security_agent.derive_roles_from_access( trans, data.dataset.id, 'root', **kwd )
+ if error:
+ # Keep the original role associations for the DATASET_ACCESS permission on the dataset.
+ permissions[ access_action ] = data.dataset.get_access_roles( trans )
+ status = 'error'
+ else:
+ error = trans.app.security_agent.set_all_dataset_permissions( data.dataset, permissions )
+ if error:
+ message += error
+ status = 'error'
+ else:
+ message = 'Your changes completed successfully.'
+ trans.sa_session.refresh( data.dataset )
+ else:
+ message = "You are not authorized to change this dataset's permissions"
+ error = True
+ else:
+ if "dbkey" in data.datatype.metadata_spec and not data.metadata.dbkey:
+ # Copy dbkey into metadata, for backwards compatability
+ # This looks like it does nothing, but getting the dbkey
+ # returns the metadata dbkey unless it is None, in which
+ # case it resorts to the old dbkey. Setting the dbkey
+ # sets it properly in the metadata
+ #### This is likely no longer required, since the dbkey exists entirely within metadata (the old_dbkey field is gone): REMOVE ME?
+ data.metadata.dbkey = data.dbkey
+ # let's not overwrite the imported datatypes module with the variable datatypes?
+ # the built-in 'id' is overwritten in lots of places as well
+ ldatatypes = [ dtype_name for dtype_name, dtype_value in trans.app.datatypes_registry.datatypes_by_extension.iteritems() if dtype_value.allow_datatype_change ]
+ ldatatypes.sort()
+ all_roles = trans.app.security_agent.get_legitimate_roles( trans, data.dataset, 'root' )
+ if error:
+ status = 'error'
+ return trans.fill_template( "/dataset/edit_attributes.mako",
+ data=data,
+ data_annotation=self.get_item_annotation_str( trans.sa_session, trans.user, data ),
+ datatypes=ldatatypes,
+ current_user_roles=current_user_roles,
+ all_roles=all_roles,
+ message=message,
+ status=status,
+ dataset_id=dataset_id,
+ refresh_frames=refresh_frames )
+ else:
+ return trans.show_error_message( "You do not have permission to edit this dataset's ( id: %s ) information." % str( dataset_id ) )
@web.expose
@web.require_login( "see all available datasets" )
@@ -654,111 +837,190 @@
return trans.fill_template_mako( "dataset/display_application/display.mako", msg = msg, display_app = display_app, display_link = display_link, refresh = refresh )
return trans.show_error_message( 'You do not have permission to view this dataset at an external display application.' )
- def _undelete( self, trans, id ):
+ def _delete( self, trans, dataset_id ):
+ message = None
+ status = 'done'
+ id = None
try:
- id = int( id )
- except ValueError, e:
- return False
- history = trans.get_history()
- data = trans.sa_session.query( self.app.model.HistoryDatasetAssociation ).get( id )
- if data and data.undeletable:
+ id = trans.app.security.decode_id( dataset_id )
+ history = trans.get_history()
+ hda = trans.sa_session.query( self.app.model.HistoryDatasetAssociation ).get( id )
+ assert hda, 'Invalid HDA: %s' % id
# Walk up parent datasets to find the containing history
- topmost_parent = data
+ topmost_parent = hda
+ while topmost_parent.parent:
+ topmost_parent = topmost_parent.parent
+ assert topmost_parent in trans.history.datasets, "Data does not belong to current history"
+ # Mark deleted and cleanup
+ hda.mark_deleted()
+ hda.clear_associated_files()
+ trans.log_event( "Dataset id %s marked as deleted" % str(id) )
+ if hda.parent_id is None and len( hda.creating_job_associations ) > 0:
+ # Mark associated job for deletion
+ job = hda.creating_job_associations[0].job
+ if job.state in [ self.app.model.Job.states.QUEUED, self.app.model.Job.states.RUNNING, self.app.model.Job.states.NEW ]:
+ # Are *all* of the job's other output datasets deleted?
+ if job.check_if_output_datasets_deleted():
+ job.mark_deleted( self.app.config.get_bool( 'enable_job_running', True ),
+ self.app.config.get_bool( 'track_jobs_in_database', False ) )
+ self.app.job_manager.job_stop_queue.put( job.id )
+ trans.sa_session.flush()
+ except Exception, e:
+ msg = 'HDA deletion failed (encoded: %s, decoded: %s)' % ( dataset_id, id )
+ log.exception( msg )
+ trans.log_event( msg )
+ message = 'Dataset deletion failed'
+ status = 'error'
+ return ( message, status )
+
+ def _undelete( self, trans, dataset_id ):
+ message = None
+ status = 'done'
+ id = None
+ try:
+ id = trans.app.security.decode_id( dataset_id )
+ history = trans.get_history()
+ hda = trans.sa_session.query( self.app.model.HistoryDatasetAssociation ).get( id )
+ assert hda and hda.undeletable, 'Invalid HDA: %s' % id
+ # Walk up parent datasets to find the containing history
+ topmost_parent = hda
while topmost_parent.parent:
topmost_parent = topmost_parent.parent
assert topmost_parent in history.datasets, "Data does not belong to current history"
# Mark undeleted
- data.mark_undeleted()
+ hda.mark_undeleted()
trans.sa_session.flush()
trans.log_event( "Dataset id %s has been undeleted" % str(id) )
- return True
- return False
+ except Exception, e:
+ msg = 'HDA undeletion failed (encoded: %s, decoded: %s)' % ( dataset_id, id )
+ log.exception( msg )
+ trans.log_event( msg )
+ message = 'Dataset undeletion failed'
+ status = 'error'
+ return ( message, status )
- def _unhide( self, trans, id ):
+ def _unhide( self, trans, dataset_id ):
try:
- id = int( id )
- except ValueError, e:
+ id = trans.app.security.decode_id( dataset_id )
+ except:
return False
history = trans.get_history()
- data = trans.sa_session.query( self.app.model.HistoryDatasetAssociation ).get( id )
- if data:
+ hda = trans.sa_session.query( self.app.model.HistoryDatasetAssociation ).get( id )
+ if hda:
# Walk up parent datasets to find the containing history
- topmost_parent = data
+ topmost_parent = hda
while topmost_parent.parent:
topmost_parent = topmost_parent.parent
assert topmost_parent in history.datasets, "Data does not belong to current history"
# Mark undeleted
- data.mark_unhidden()
+ hda.mark_unhidden()
trans.sa_session.flush()
trans.log_event( "Dataset id %s has been unhidden" % str(id) )
return True
return False
- def _purge( self, trans, id ):
+ def _purge( self, trans, dataset_id ):
+ message = None
+ status = 'done'
try:
- id = int( id )
- except ValueError, e:
- return False
- hda = trans.sa_session.query( self.app.model.HistoryDatasetAssociation ).get( id )
- # Invalid HDA or not deleted
- if not hda or not hda.history or not hda.deleted:
- return False
- # If the user is anonymous, make sure the HDA is owned by the current session.
- if not hda.history.user and trans.galaxy_session.id not in [ s.id for s in hda.history.galaxy_sessions ]:
- return False
- # If the user is known, make sure the HDA is owned by the current user.
- if hda.history.user and hda.history.user != trans.user:
- return False
- # HDA is purgeable
- hda.purged = True
- trans.sa_session.add( hda )
- trans.log_event( "HDA id %s has been purged" % hda.id )
- # Don't delete anything if there are active HDAs or any LDDAs, even if
- # the LDDAs are deleted. Let the cleanup scripts get it in the latter
- # case.
- if hda.dataset.user_can_purge:
- try:
- hda.dataset.full_delete()
- trans.log_event( "Dataset id %s has been purged upon the the purge of HDA id %s" % ( hda.dataset.id, hda.id ) )
- trans.sa_session.add( hda.dataset )
- except:
- log.exception( 'Unable to purge dataset (%s) on purge of hda (%s):' % ( hda.dataset.id, hda.id ) )
- trans.sa_session.flush()
- return True
+ id = trans.app.security.decode_id( dataset_id )
+ history = trans.get_history()
+ user = trans.get_user()
+ hda = trans.sa_session.query( self.app.model.HistoryDatasetAssociation ).get( id )
+ # Invalid HDA
+ assert hda, 'Invalid history dataset ID'
+ # Walk up parent datasets to find the containing history
+ topmost_parent = hda
+ while topmost_parent.parent:
+ topmost_parent = topmost_parent.parent
+ assert topmost_parent in history.datasets, "Data does not belong to current history"
+ # If the user is anonymous, make sure the HDA is owned by the current session.
+ if not user:
+ assert trans.galaxy_session.id in [ s.id for s in hda.history.galaxy_sessions ], 'Invalid history dataset ID'
+ # If the user is known, make sure the HDA is owned by the current user.
+ else:
+ assert topmost_parent.history.user == trans.user, 'Invalid history dataset ID'
+ # HDA is not deleted
+ assert hda.deleted, 'History dataset is not marked as deleted'
+ # HDA is purgeable
+ # Decrease disk usage first
+ if user:
+ user.total_disk_usage -= hda.quota_amount( user )
+ # Mark purged
+ hda.purged = True
+ trans.sa_session.add( hda )
+ trans.log_event( "HDA id %s has been purged" % hda.id )
+ # Don't delete anything if there are active HDAs or any LDDAs, even if
+ # the LDDAs are deleted. Let the cleanup scripts get it in the latter
+ # case.
+ if hda.dataset.user_can_purge:
+ try:
+ hda.dataset.full_delete()
+ trans.log_event( "Dataset id %s has been purged upon the the purge of HDA id %s" % ( hda.dataset.id, hda.id ) )
+ trans.sa_session.add( hda.dataset )
+ except:
+ log.exception( 'Unable to purge dataset (%s) on purge of HDA (%s):' % ( hda.dataset.id, hda.id ) )
+ trans.sa_session.flush()
+ except Exception, e:
+ msg = 'HDA purge failed (encoded: %s, decoded: %s)' % ( dataset_id, id )
+ log.exception( msg )
+ trans.log_event( msg )
+ message = 'Dataset removal from disk failed'
+ status = 'error'
+ return ( message, status )
@web.expose
- def undelete( self, trans, id ):
- if self._undelete( trans, id ):
- return trans.response.send_redirect( web.url_for( controller='root', action='history', show_deleted = True ) )
- raise Exception( "Error undeleting" )
+ def delete( self, trans, dataset_id, filename, show_deleted_on_refresh = False ):
+ message, status = self._delete( trans, dataset_id )
+ return trans.response.send_redirect( web.url_for( controller='root', action='history', show_deleted=show_deleted_on_refresh, message=message, status=status ) )
@web.expose
- def unhide( self, trans, id ):
- if self._unhide( trans, id ):
+ def delete_async( self, trans, dataset_id, filename ):
+ message, status = self._delete( trans, dataset_id )
+ if status == 'done':
+ return "OK"
+ else:
+ raise Exception( message )
+
+ @web.expose
+ def undelete( self, trans, dataset_id, filename ):
+ message, status = self._undelete( trans, dataset_id )
+ return trans.response.send_redirect( web.url_for( controller='root', action='history', show_deleted = True, message=message, status=status ) )
+
+ @web.expose
+ def undelete_async( self, trans, dataset_id, filename ):
+ message, status =self._undelete( trans, dataset_id )
+ if status == 'done':
+ return "OK"
+ else:
+ raise Exception( message )
+
+ @web.expose
+ def unhide( self, trans, dataset_id, filename ):
+ if self._unhide( trans, dataset_id ):
return trans.response.send_redirect( web.url_for( controller='root', action='history', show_hidden = True ) )
raise Exception( "Error unhiding" )
@web.expose
- def undelete_async( self, trans, id ):
- if self._undelete( trans, id ):
- return "OK"
- raise Exception( "Error undeleting" )
-
- @web.expose
- def purge( self, trans, id ):
- if not trans.app.config.allow_user_dataset_purge:
- raise Exception( "Removal of datasets by users is not allowed in this Galaxy instance. Please contact your Galaxy administrator." )
- if self._purge( trans, id ):
- return trans.response.send_redirect( web.url_for( controller='root', action='history', show_deleted = True ) )
- raise Exception( "Error removing disk file" )
+ def purge( self, trans, dataset_id, filename, show_deleted_on_refresh = False ):
+ if trans.app.config.allow_user_dataset_purge:
+ message, status = self._purge( trans, dataset_id )
+ else:
+ message = "Removal of datasets by users is not allowed in this Galaxy instance. Please contact your Galaxy administrator."
+ status = 'error'
+ return trans.response.send_redirect( web.url_for( controller='root', action='history', show_deleted=show_deleted_on_refresh, message=message, status=status ) )
@web.expose
- def purge_async( self, trans, id ):
- if not trans.app.config.allow_user_dataset_purge:
- raise Exception( "Removal of datasets by users is not allowed in this Galaxy instance. Please contact your Galaxy administrator." )
- if self._purge( trans, id ):
+ def purge_async( self, trans, dataset_id, filename ):
+ if trans.app.config.allow_user_dataset_purge:
+ message, status = self._purge( trans, dataset_id )
+ else:
+ message = "Removal of datasets by users is not allowed in this Galaxy instance. Please contact your Galaxy administrator."
+ status = 'error'
+ if status == 'done':
return "OK"
- raise Exception( "Error removing disk file" )
+ else:
+ raise Exception( message )
@web.expose
def show_params( self, trans, dataset_id=None, from_noframe=None, **kwd ):
--- a/lib/galaxy/web/controllers/root.py Mon Jul 25 20:55:42 2011 -0400
+++ b/lib/galaxy/web/controllers/root.py Tue Jul 26 13:30:07 2011 -0400
@@ -8,7 +8,6 @@
from galaxy.util.sanitize_html import sanitize_html
from galaxy.model.orm import *
from galaxy.model.item_attrs import UsesAnnotations
-from galaxy.web.framework.helpers import to_unicode
log = logging.getLogger( __name__ )
@@ -99,11 +98,14 @@
return trans.fill_template_mako( "/my_data.mako" )
@web.expose
- def history( self, trans, as_xml=False, show_deleted=False, show_hidden=False, hda_id=None ):
+ def history( self, trans, as_xml=False, show_deleted=False, show_hidden=False, hda_id=None, **kwd ):
"""
Display the current history, creating a new history if necessary.
NOTE: No longer accepts "id" or "template" options for security reasons.
"""
+ params = util.Params( kwd )
+ message = params.get( 'message', None )
+ status = params.get( 'status', 'done' )
if trans.app.config.require_login and not trans.user:
return trans.fill_template( '/no_access.mako', message = 'Please log in to access Galaxy histories.' )
history = trans.get_history( create=True )
@@ -123,7 +125,9 @@
datasets = datasets,
hda_id = hda_id,
show_deleted = show_deleted,
- show_hidden=show_hidden )
+ show_hidden=show_hidden,
+ message=message,
+ status=status )
@web.expose
def dataset_state ( self, trans, id=None, stamp=None ):
@@ -160,9 +164,13 @@
# Create new HTML for any that have changed
rval = {}
if ids is not None and states is not None:
- ids = map( int, ids.split( "," ) )
+ ids = ids.split( "," )
states = states.split( "," )
- for id, state in zip( ids, states ):
+ for encoded_id, state in zip( ids, states ):
+ try:
+ id = int( trans.app.security.decode_id( encoded_id ) )
+ except:
+ id = int( encoded_id )
data = trans.sa_session.query( self.app.model.HistoryDatasetAssociation ).get( id )
if data.state != state:
job_hda = data
@@ -175,7 +183,7 @@
force_history_refresh = tool.force_history_refresh
if not job_hda.visible:
force_history_refresh = True
- rval[id] = {
+ rval[encoded_id] = {
"state": data.state,
"html": unicode( trans.fill_template( "root/history_item.mako", data=data, hid=data.hid ), 'utf-8' ),
"force_history_refresh": force_history_refresh
@@ -288,237 +296,6 @@
else:
yield "No data with id=%d" % id
- @web.expose
- def edit(self, trans, id=None, hid=None, **kwd):
- """Allows user to modify parameters of an HDA."""
- message = ''
- error = False
- def __ok_to_edit_metadata( dataset_id ):
- #prevent modifying metadata when dataset is queued or running as input/output
- #This code could be more efficient, i.e. by using mappers, but to prevent slowing down loading a History panel, we'll leave the code here for now
- for job_to_dataset_association in trans.sa_session.query( self.app.model.JobToInputDatasetAssociation ) \
- .filter_by( dataset_id=dataset_id ) \
- .all() \
- + trans.sa_session.query( self.app.model.JobToOutputDatasetAssociation ) \
- .filter_by( dataset_id=dataset_id ) \
- .all():
- if job_to_dataset_association.job.state not in [ job_to_dataset_association.job.states.OK, job_to_dataset_association.job.states.ERROR, job_to_dataset_association.job.states.DELETED ]:
- return False
- return True
- if hid is not None:
- history = trans.get_history()
- # TODO: hid handling
- data = history.datasets[ int( hid ) - 1 ]
- elif id is not None:
- data = trans.sa_session.query( self.app.model.HistoryDatasetAssociation ).get( id )
- else:
- trans.log_event( "Problem loading dataset id %s with history id %s." % ( str( id ), str( hid ) ) )
- return trans.show_error_message( "Problem loading dataset." )
- if data is None:
- trans.log_event( "Problem retrieving dataset id %s with history id." % ( str( id ), str( hid ) ) )
- return trans.show_error_message( "Problem retrieving dataset." )
- if id is not None and data.history.user is not None and data.history.user != trans.user:
- return trans.show_error_message( "This instance of a dataset (%s) in a history does not belong to you." % ( data.id ) )
- current_user_roles = trans.get_current_user_roles()
- if data.history.user and not data.dataset.has_manage_permissions_roles( trans ):
- # Permission setting related to DATASET_MANAGE_PERMISSIONS was broken for a period of time,
- # so it is possible that some Datasets have no roles associated with the DATASET_MANAGE_PERMISSIONS
- # permission. In this case, we'll reset this permission to the hda user's private role.
- manage_permissions_action = trans.app.security_agent.get_action( trans.app.security_agent.permitted_actions.DATASET_MANAGE_PERMISSIONS.action )
- permissions = { manage_permissions_action : [ trans.app.security_agent.get_private_user_role( data.history.user ) ] }
- trans.app.security_agent.set_dataset_permission( data.dataset, permissions )
- if trans.app.security_agent.can_access_dataset( current_user_roles, data.dataset ):
- if data.state == trans.model.Dataset.states.UPLOAD:
- return trans.show_error_message( "Please wait until this dataset finishes uploading before attempting to edit its metadata." )
- params = util.Params( kwd, sanitize=False )
- if params.change:
- # The user clicked the Save button on the 'Change data type' form
- if data.datatype.allow_datatype_change and trans.app.datatypes_registry.get_datatype_by_extension( params.datatype ).allow_datatype_change:
- #prevent modifying datatype when dataset is queued or running as input/output
- if not __ok_to_edit_metadata( data.id ):
- return trans.show_error_message( "This dataset is currently being used as input or output. You cannot change datatype until the jobs have completed or you have canceled them." )
- trans.app.datatypes_registry.change_datatype( data, params.datatype, set_meta = not trans.app.config.set_metadata_externally )
- trans.sa_session.flush()
- if trans.app.config.set_metadata_externally:
- trans.app.datatypes_registry.set_external_metadata_tool.tool_action.execute( trans.app.datatypes_registry.set_external_metadata_tool, trans, incoming = { 'input1':data }, overwrite = False ) #overwrite is False as per existing behavior
- return trans.show_ok_message( "Changed the type of dataset '%s' to %s" % ( to_unicode( data.name ), params.datatype ), refresh_frames=['history'] )
- else:
- return trans.show_error_message( "You are unable to change datatypes in this manner. Changing %s to %s is not allowed." % ( data.extension, params.datatype ) )
- elif params.save:
- # The user clicked the Save button on the 'Edit Attributes' form
- data.name = params.name
- data.info = params.info
- message = ''
- if __ok_to_edit_metadata( data.id ):
- # The following for loop will save all metadata_spec items
- for name, spec in data.datatype.metadata_spec.items():
- if spec.get("readonly"):
- continue
- optional = params.get("is_"+name, None)
- other = params.get("or_"+name, None)
- if optional and optional == 'true':
- # optional element... == 'true' actually means it is NOT checked (and therefore omitted)
- setattr(data.metadata, name, None)
- else:
- if other:
- setattr( data.metadata, name, other )
- else:
- setattr( data.metadata, name, spec.unwrap( params.get (name, None) ) )
- data.datatype.after_setting_metadata( data )
- # Sanitize annotation before adding it.
- if params.annotation:
- annotation = sanitize_html( params.annotation, 'utf-8', 'text/html' )
- self.add_item_annotation( trans.sa_session, trans.get_user(), data, annotation )
- # If setting metadata previously failed and all required elements have now been set, clear the failed state.
- if data._state == trans.model.Dataset.states.FAILED_METADATA and not data.missing_meta():
- data._state = None
- trans.sa_session.flush()
- return trans.show_ok_message( "Attributes updated%s" % message, refresh_frames=['history'] )
- else:
- trans.sa_session.flush()
- return trans.show_warn_message( "Attributes updated, but metadata could not be changed because this dataset is currently being used as input or output. You must cancel or wait for these jobs to complete before changing metadata.", refresh_frames=['history'] )
- elif params.detect:
- # The user clicked the Auto-detect button on the 'Edit Attributes' form
- #prevent modifying metadata when dataset is queued or running as input/output
- if not __ok_to_edit_metadata( data.id ):
- return trans.show_error_message( "This dataset is currently being used as input or output. You cannot change metadata until the jobs have completed or you have canceled them." )
- for name, spec in data.metadata.spec.items():
- # We need to be careful about the attributes we are resetting
- if name not in [ 'name', 'info', 'dbkey', 'base_name' ]:
- if spec.get( 'default' ):
- setattr( data.metadata, name, spec.unwrap( spec.get( 'default' ) ) )
- if trans.app.config.set_metadata_externally:
- message = 'Attributes have been queued to be updated'
- trans.app.datatypes_registry.set_external_metadata_tool.tool_action.execute( trans.app.datatypes_registry.set_external_metadata_tool, trans, incoming = { 'input1':data } )
- else:
- message = 'Attributes updated'
- data.set_meta()
- data.datatype.after_setting_metadata( data )
- trans.sa_session.flush()
- return trans.show_ok_message( message, refresh_frames=['history'] )
- elif params.convert_data:
- target_type = kwd.get("target_type", None)
- if target_type:
- message = data.datatype.convert_dataset(trans, data, target_type)
- return trans.show_ok_message( message, refresh_frames=['history'] )
- elif params.update_roles_button:
- if not trans.user:
- return trans.show_error_message( "You must be logged in if you want to change permissions." )
- if trans.app.security_agent.can_manage_dataset( current_user_roles, data.dataset ):
- access_action = trans.app.security_agent.get_action( trans.app.security_agent.permitted_actions.DATASET_ACCESS.action )
- manage_permissions_action = trans.app.security_agent.get_action( trans.app.security_agent.permitted_actions.DATASET_MANAGE_PERMISSIONS.action )
- # The user associated the DATASET_ACCESS permission on the dataset with 1 or more roles. We
- # need to ensure that they did not associate roles that would cause accessibility problems.
- permissions, in_roles, error, message = \
- trans.app.security_agent.derive_roles_from_access( trans, data.dataset.id, 'root', **kwd )
- if error:
- # Keep the original role associations for the DATASET_ACCESS permission on the dataset.
- permissions[ access_action ] = data.dataset.get_access_roles( trans )
- status = 'error'
- else:
- error = trans.app.security_agent.set_all_dataset_permissions( data.dataset, permissions )
- if error:
- message += error
- status = 'error'
- else:
- message = 'Your changes completed successfully.'
- trans.sa_session.refresh( data.dataset )
- else:
- return trans.show_error_message( "You are not authorized to change this dataset's permissions" )
- if "dbkey" in data.datatype.metadata_spec and not data.metadata.dbkey:
- # Copy dbkey into metadata, for backwards compatability
- # This looks like it does nothing, but getting the dbkey
- # returns the metadata dbkey unless it is None, in which
- # case it resorts to the old dbkey. Setting the dbkey
- # sets it properly in the metadata
- #### This is likely no longer required, since the dbkey exists entirely within metadata (the old_dbkey field is gone): REMOVE ME?
- data.metadata.dbkey = data.dbkey
- # let's not overwrite the imported datatypes module with the variable datatypes?
- # the built-in 'id' is overwritten in lots of places as well
- ldatatypes = [ dtype_name for dtype_name, dtype_value in trans.app.datatypes_registry.datatypes_by_extension.iteritems() if dtype_value.allow_datatype_change ]
- ldatatypes.sort()
- all_roles = trans.app.security_agent.get_legitimate_roles( trans, data.dataset, 'root' )
- if error:
- status = 'error'
- else:
- status = 'done'
- return trans.fill_template( "/dataset/edit_attributes.mako",
- data=data,
- data_annotation=self.get_item_annotation_str( trans.sa_session, trans.user, data ),
- datatypes=ldatatypes,
- current_user_roles=current_user_roles,
- all_roles=all_roles,
- message=message,
- status=status )
- else:
- return trans.show_error_message( "You do not have permission to edit this dataset's ( id: %s ) information." % str( id ) )
-
- def __delete_dataset( self, trans, id ):
- data = trans.sa_session.query( self.app.model.HistoryDatasetAssociation ).get( id )
- if data:
- # Walk up parent datasets to find the containing history
- topmost_parent = data
- while topmost_parent.parent:
- topmost_parent = topmost_parent.parent
- assert topmost_parent in trans.history.datasets, "Data does not belong to current history"
- # Mark deleted and cleanup
- data.mark_deleted()
- data.clear_associated_files()
- trans.log_event( "Dataset id %s marked as deleted" % str(id) )
- if data.parent_id is None and len( data.creating_job_associations ) > 0:
- # Mark associated job for deletion
- job = data.creating_job_associations[0].job
- if job.state in [ self.app.model.Job.states.QUEUED, self.app.model.Job.states.RUNNING, self.app.model.Job.states.NEW ]:
- # Are *all* of the job's other output datasets deleted?
- if job.check_if_output_datasets_deleted():
- job.mark_deleted( self.app.config.get_bool( 'enable_job_running', True ),
- self.app.config.get_bool( 'track_jobs_in_database', False ) )
- self.app.job_manager.job_stop_queue.put( job.id )
- trans.sa_session.flush()
-
- @web.expose
- def delete( self, trans, id = None, show_deleted_on_refresh = False, **kwd):
- if id:
- if isinstance( id, list ):
- dataset_ids = id
- else:
- dataset_ids = [ id ]
- history = trans.get_history()
- for id in dataset_ids:
- try:
- id = int( id )
- except:
- continue
- self.__delete_dataset( trans, id )
- return self.history( trans, show_deleted = show_deleted_on_refresh )
-
- @web.expose
- def delete_async( self, trans, id = None, **kwd):
- if id:
- try:
- id = int( id )
- except:
- return "Dataset id '%s' is invalid" %str( id )
- self.__delete_dataset( trans, id )
- return "OK"
-
- @web.expose
- def purge( self, trans, id = None, show_deleted_on_refresh = False, **kwd ):
- if not trans.app.config.allow_user_dataset_purge:
- return trans.show_error_message( "Removal of datasets by users is not allowed in this Galaxy instance. Please contact your Galaxy administrator." )
- hda = trans.sa_session.query( self.app.model.HistoryDatasetAssociation ).get( int( id ) )
- if bool( hda.dataset.active_history_associations or hda.dataset.library_associations ):
- return trans.show_error_message( "Unable to purge: LDDA(s) or active HDA(s) exist" )
- elif hda.dataset.purged:
- return trans.show_error_message( "Unable to purge: dataset is already purged" )
- os.unlink( hda.dataset.file_name )
- if os.path.exists( hda.extra_files_path ):
- shutil.rmtree( hda.extra_files_path )
- hda.dataset.purged = True
- trans.sa_session.add( hda.dataset )
- trans.sa_session.flush()
- return self.history( trans, show_deleted = show_deleted_on_refresh )
-
## ---- History management -----------------------------------------------
@web.expose
--- a/lib/galaxy/web/framework/__init__.py Mon Jul 25 20:55:42 2011 -0400
+++ b/lib/galaxy/web/framework/__init__.py Tue Jul 26 13:30:07 2011 -0400
@@ -471,6 +471,7 @@
- associate new session with user
- if old session had a history and it was not associated with a user, associate it with the new session,
otherwise associate the current session's history with the user
+ - add the disk usage of the current session to the user's total disk usage
"""
# Set the previous session
prev_galaxy_session = self.galaxy_session
@@ -494,6 +495,10 @@
# If the previous galaxy session had a history, associate it with the new
# session, but only if it didn't belong to a different user.
history = prev_galaxy_session.current_history
+ if prev_galaxy_session.user is None:
+ # Increase the user's disk usage by the amount of the previous history's datasets if they didn't already own it.
+ for hda in history.datasets:
+ user.total_disk_usage += hda.quota_amount( user )
elif self.galaxy_session.current_history:
history = self.galaxy_session.current_history
if not history and \
--- a/templates/dataset/edit_attributes.mako Mon Jul 25 20:55:42 2011 -0400
+++ b/templates/dataset/edit_attributes.mako Tue Jul 26 13:30:07 2011 -0400
@@ -1,5 +1,6 @@
<%inherit file="/base.mako"/><%namespace file="/message.mako" import="render_msg" />
+<%namespace file="/message.mako" name="message_ns" import="javascripts" /><%def name="title()">${_('Edit Dataset Attributes')}</%def>
@@ -10,6 +11,7 @@
<%def name="javascripts()">
${parent.javascripts()}
${h.js( "galaxy.base", "jquery.autocomplete", "autocomplete_tagging" )}
+ ${message_ns.javascripts()}
</%def><%def name="datatype( dataset, datatypes )">
@@ -31,8 +33,7 @@
<div class="toolForm"><div class="toolFormTitle">${_('Edit Attributes')}</div><div class="toolFormBody">
- <form name="edit_attributes" action="${h.url_for( controller='root', action='edit' )}" method="post">
- <input type="hidden" name="id" value="${data.id}"/>
+ <form name="edit_attributes" action="${h.url_for( controller='dataset', action='edit', dataset_id=dataset_id )}" method="post"><div class="form-row"><label>
Name:
@@ -80,8 +81,7 @@
<input type="submit" name="save" value="${_('Save')}"/></div></form>
- <form name="auto_detect" action="${h.url_for( controller='root', action='edit' )}" method="post">
- <input type="hidden" name="id" value="${data.id}"/>
+ <form name="auto_detect" action="${h.url_for( controller='dataset', action='edit', dataset_id=dataset_id )}" method="post"><div class="form-row"><div style="float: left; width: 250px; margin-right: 10px;"><input type="submit" name="detect" value="${_('Auto-detect')}"/>
@@ -104,8 +104,7 @@
<div class="toolForm"><div class="toolFormTitle">${_('Convert to new format')}</div><div class="toolFormBody">
- <form name="convert_data" action="${h.url_for( controller='root', action='edit' )}" method="post">
- <input type="hidden" name="id" value="${data.id}"/>
+ <form name="convert_data" action="${h.url_for( controller='dataset', action='edit', dataset_id=dataset_id )}" method="post"><div class="form-row"><div style="float: left; width: 250px; margin-right: 10px;"><select name="target_type">
@@ -132,8 +131,7 @@
<div class="toolFormTitle">${_('Change data type')}</div><div class="toolFormBody">
%if data.datatype.allow_datatype_change:
- <form name="change_datatype" action="${h.url_for( controller='root', action='edit' )}" method="post">
- <input type="hidden" name="id" value="${data.id}"/>
+ <form name="change_datatype" action="${h.url_for( controller='dataset', action='edit', dataset_id=dataset_id )}" method="post"><div class="form-row"><label>
${_('New Type')}:
@@ -161,7 +159,7 @@
%if trans.app.security_agent.can_manage_dataset( current_user_roles, data.dataset ):
<%namespace file="/dataset/security_common.mako" import="render_permission_form" />
- ${render_permission_form( data.dataset, data.get_display_name(), h.url_for( controller='root', action='edit', id=data.id ), all_roles )}
+ ${render_permission_form( data.dataset, data.get_display_name(), h.url_for( controller='dataset', action='edit', dataset_id=dataset_id ), all_roles )}
%elif trans.user:
<div class="toolForm"><div class="toolFormTitle">View Permissions</div>
--- a/templates/root/history.mako Mon Jul 25 20:55:42 2011 -0400
+++ b/templates/root/history.mako Tue Jul 26 13:30:07 2011 -0400
@@ -1,3 +1,5 @@
+<%namespace file="/message.mako" import="render_msg" />
+
<% _=n_ %><!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
@@ -105,6 +107,11 @@
});
};
+// Update the message for async operations
+function render_message(message, status) {
+ $("div#message-container").html( "<div class=\"" + status + "message\">" + message + "</div><br/>" );
+}
+
$(function() {
var historywrapper = $("div.historyItemWrapper");
init_history_items(historywrapper);
@@ -115,8 +122,8 @@
$(this).click( function() {
$( '#historyItem-' + data_id + "> div.historyItemTitleBar" ).addClass( "spinner" );
$.ajax({
- url: "${h.url_for( action='delete_async', id='XXX' )}".replace( 'XXX', data_id ),
- error: function() { alert( "Delete failed" ); },
+ url: "${h.url_for( controller='dataset', action='delete_async', dataset_id='XXX' )}".replace( 'XXX', data_id ),
+ error: function() { render_message( "Dataset deletion failed", "error" ); },
success: function(msg) {
if (msg === "OK") {
%if show_deleted:
@@ -133,7 +140,7 @@
%endif
$(".tipsy").remove();
} else {
- alert( "Delete failed" );
+ render_message( "Dataset deletion failed", "error" );
}
}
});
@@ -147,8 +154,8 @@
$(this).click( function() {
$( '#historyItem-' + data_id + " > div.historyItemTitleBar" ).addClass( "spinner" );
$.ajax({
- url: "${h.url_for( controller='dataset', action='undelete_async', id='XXX' )}".replace( 'XXX', data_id ),
- error: function() { alert( "Undelete failed" ) },
+ url: "${h.url_for( controller='dataset', action='undelete_async', dataset_id='XXX' )}".replace( 'XXX', data_id ),
+ error: function() { render_message( "Dataset undeletion failed", "error" ); },
success: function() {
var to_update = {};
to_update[data_id] = "none";
@@ -165,8 +172,8 @@
$(this).click( function() {
$( '#historyItem-' + data_id + " > div.historyItemTitleBar" ).addClass( "spinner" );
$.ajax({
- url: "${h.url_for( controller='dataset', action='purge_async', id='XXX' )}".replace( 'XXX', data_id ),
- error: function() { alert( "Removal from disk failed" ) },
+ url: "${h.url_for( controller='dataset', action='purge_async', dataset_id='XXX' )}".replace( 'XXX', data_id ),
+ error: function() { render_message( "Dataset removal from disk failed", "error" ) },
success: function() {
var to_update = {};
to_update[data_id] = "none";
@@ -258,7 +265,7 @@
// Updater
updater(
- ${ h.to_json_string( dict([(data.id, data.state) for data in reversed( datasets ) if data.visible and data.state not in TERMINAL_STATES]) ) }
+ ${ h.to_json_string( dict([(trans.app.security.encode_id(data.id), data.state) for data in reversed( datasets ) if data.visible and data.state not in TERMINAL_STATES]) ) }
);
// Navigate to a dataset.
@@ -311,11 +318,11 @@
if ( val.force_history_refresh ){
force_history_refresh = true;
}
- delete tracked_datasets[ parseInt(id) ];
+ delete tracked_datasets[id];
// When a dataset becomes terminal, check for changes in history disk size
check_history_size = true;
} else {
- tracked_datasets[ parseInt(id) ] = val.state;
+ tracked_datasets[id] = val.state;
}
});
if ( force_history_refresh ) {
@@ -458,6 +465,12 @@
</div>
%endif
+<div id="message-container">
+ %if message:
+ ${render_msg( message, status )}
+ %endif
+</div>
+
%if not datasets:
<div class="infomessagesmall" id="emptyHistoryMessage">
@@ -467,7 +480,7 @@
## Render requested datasets, ordered from newest to oldest
%for data in reversed( datasets ):
%if data.visible or show_hidden:
- <div class="historyItemContainer" id="historyItemContainer-${data.id}">
+ <div class="historyItemContainer" id="historyItemContainer-${trans.app.security.encode_id(data.id)}">
${render_dataset( data, data.hid, show_deleted_on_refresh = show_deleted, for_editing = True )}
</div>
%endif
--- a/templates/root/history_common.mako Mon Jul 25 20:55:42 2011 -0400
+++ b/templates/root/history_common.mako Tue Jul 26 13:30:07 2011 -0400
@@ -39,9 +39,9 @@
can_edit = not ( data.deleted or data.purged )
%>
%if not trans.user_is_admin() and not trans.app.security_agent.can_access_dataset( current_user_roles, data.dataset ):
- <div class="historyItemWrapper historyItem historyItem-${data_state} historyItem-noPermission" id="historyItem-${data.id}">
+ <div class="historyItemWrapper historyItem historyItem-${data_state} historyItem-noPermission" id="historyItem-${dataset_id}">
%else:
- <div class="historyItemWrapper historyItem historyItem-${data_state}" id="historyItem-${data.id}">
+ <div class="historyItemWrapper historyItem historyItem-${data_state}" id="historyItem-${dataset_id}">
%endif
%if data.deleted or data.purged or data.dataset.purged:
@@ -51,9 +51,9 @@
%else:
This dataset has been deleted.
%if for_editing:
- Click <a href="${h.url_for( controller='dataset', action='undelete', id=data.id )}" class="historyItemUndelete" id="historyItemUndeleter-${data.id}" target="galaxy_history">here</a> to undelete
+ Click <a href="${h.url_for( controller='dataset', action='undelete', dataset_id=dataset_id )}" class="historyItemUndelete" id="historyItemUndeleter-${dataset_id}" target="galaxy_history">here</a> to undelete
%if trans.app.config.allow_user_dataset_purge:
- or <a href="${h.url_for( controller='dataset', action='purge', id=data.id )}" class="historyItemPurge" id="historyItemPurger-${data.id}" target="galaxy_history">here</a> to immediately remove it from disk.
+ or <a href="${h.url_for( controller='dataset', action='purge', dataset_id=dataset_id )}" class="historyItemPurge" id="historyItemPurger-${dataset_id}" target="galaxy_history">here</a> to immediately remove it from disk.
%else:
it.
%endif
@@ -64,7 +64,7 @@
%if data.visible is False:
<div class="warningmessagesmall">
- <strong>This dataset has been hidden. Click <a href="${h.url_for( controller='dataset', action='unhide', id=data.id )}" class="historyItemUnhide" id="historyItemUnhider-${data.id}" target="galaxy_history">here</a> to unhide.</strong>
+ <strong>This dataset has been hidden. Click <a href="${h.url_for( controller='dataset', action='unhide', dataset_id=dataset_id )}" class="historyItemUnhide" id="historyItemUnhider-${dataset_id}" target="galaxy_history">here</a> to unhide.</strong></div>
%endif
@@ -110,13 +110,13 @@
%elif data.purged:
<span title="Cannot edit attributes of datasets removed from disk" class="icon-button edit_disabled tooltip"></span>
%else:
- <a class="icon-button edit tooltip" title="Edit attributes" href="${h.url_for( controller='root', action='edit', id=data.id )}" target="galaxy_main"></a>
+ <a class="icon-button edit tooltip" title="Edit attributes" href="${h.url_for( controller='dataset', action='edit', dataset_id=dataset_id )}" target="galaxy_main"></a>
%endif
%endif
%endif
%if for_editing:
%if can_edit:
- <a class="icon-button delete tooltip" title="Delete" href="${h.url_for( action='delete', id=data.id, show_deleted_on_refresh=show_deleted_on_refresh )}" id="historyItemDeleter-${data.id}"></a>
+ <a class="icon-button delete tooltip" title="Delete" href="${h.url_for( controller='dataset', action='delete', dataset_id=dataset_id, show_deleted_on_refresh=show_deleted_on_refresh )}" id="historyItemDeleter-${dataset_id}"></a>
%else:
<span title="Dataset is already deleted" class="icon-button delete_disabled tooltip"></span>
%endif
@@ -184,7 +184,7 @@
<div class="warningmessagesmall" style="margin: 4px 0 4px 0">
An error occurred setting the metadata for this dataset.
%if can_edit:
- You may be able to <a href="${h.url_for( controller='root', action='edit', id=data.id )}" target="galaxy_main">set it manually or retry auto-detection</a>.
+ You may be able to <a href="${h.url_for( controller='dataset', action='edit', dataset_id=dataset_id )}" target="galaxy_main">set it manually or retry auto-detection</a>.
%endif
</div>
%endif
@@ -193,7 +193,7 @@
format: <span class="${data.ext}">${data.ext}</span>,
database:
%if data.dbkey == '?' and can_edit:
- <a href="${h.url_for( controller='root', action='edit', id=data.id )}" target="galaxy_main">${_(data.dbkey)}</a>
+ <a href="${h.url_for( controller='dataset', action='edit', dataset_id=dataset_id )}" target="galaxy_main">${_(data.dbkey)}</a>
%else:
<span class="${data.dbkey}">${_(data.dbkey)}</span>
%endif
--- a/templates/user/index.mako Mon Jul 25 20:55:42 2011 -0400
+++ b/templates/user/index.mako Tue Jul 26 13:30:07 2011 -0400
@@ -22,6 +22,7 @@
<li><a href="${h.url_for( controller='user', action='manage_user_info', cntrller=cntrller, webapp='community' )}">${_('Manage your information')}</a></li>
%endif
</ul>
+ <p>You are currently using <strong>${trans.user.get_disk_usage( nice_size=True )}</strong> of disk space in this Galaxy instance.</p>
%else:
%if not message:
<p>${n_('You are currently not logged in.')}</p>
--- a/templates/webapps/community/repository/common.mako Mon Jul 25 20:55:42 2011 -0400
+++ b/templates/webapps/community/repository/common.mako Tue Jul 26 13:30:07 2011 -0400
@@ -50,39 +50,4 @@
onActivate: function(dtnode) {
var cell = $("#file_contents");
var selected_value;
- if (dtnode.data.key == 'root') {
- selected_value = "${repository.repo_path}/";
- } else {
- selected_value = dtnode.data.key;
- };
- if (selected_value.charAt(selected_value.length-1) != '/') {
- // Make ajax call
- $.ajax( {
- type: "POST",
- url: "${h.url_for( controller='repository', action='get_file_contents' )}",
- dataType: "json",
- data: { file_path: selected_value },
- success : function ( data ) {
- cell.html( '<label>'+data+'</label>' )
- }
- });
- } else {
- cell.html( '' );
- };
- },
- });
- });
- </script>
-</%def>
-
-<%def name="render_clone_str( repository )">
- <%
- protocol, base = trans.request.base.split( '://' )
- if trans.user:
- username = '%s@' % trans.user.username
- else:
- username = ''
- clone_str = '%s://%s%s/repos/%s/%s' % ( protocol, username, base, repository.user.username, repository.name )
- %>
- hg clone <a href="${clone_str}">${clone_str}</a>
-</%def>
\ No newline at end of file
+
\ No newline at end of file
--- a/test/base/twilltestcase.py Mon Jul 25 20:55:42 2011 -0400
+++ b/test/base/twilltestcase.py Tue Jul 26 13:30:07 2011 -0400
@@ -474,7 +474,7 @@
elem = data_list[-1]
hid = int( elem.get('hid') )
self.assertTrue( hid )
- self.visit_page( "edit?hid=%s" % hid )
+ self.visit_page( "dataset/edit?hid=%s" % hid )
for subpatt in patt.split():
tc.find(subpatt)
def delete_history_item( self, hda_id, strings_displayed=[] ):
@@ -483,7 +483,7 @@
hda_id = int( hda_id )
except:
raise AssertionError, "Invalid hda_id '%s' - must be int" % hda_id
- self.visit_url( "%s/root/delete?show_deleted_on_refresh=False&id=%s" % ( self.url, hda_id ) )
+ self.visit_url( "%s/datasets/%s/delete?show_deleted_on_refresh=False" % ( self.url, self.security.encode_id( hda_id ) ) )
for check_str in strings_displayed:
self.check_page_for_string( check_str )
def undelete_history_item( self, hda_id, strings_displayed=[] ):
@@ -492,7 +492,7 @@
hda_id = int( hda_id )
except:
raise AssertionError, "Invalid hda_id '%s' - must be int" % hda_id
- self.visit_url( "%s/dataset/undelete?id=%s" % ( self.url, hda_id ) )
+ self.visit_url( "%s/datasets/%s/undelete" % ( self.url, self.security.encode_id( hda_id ) ) )
for check_str in strings_displayed:
self.check_page_for_string( check_str )
def display_history_item( self, hda_id, strings_displayed=[] ):
@@ -511,7 +511,7 @@
strings_displayed=[], strings_not_displayed=[] ):
"""Edit history_dataset_association attribute information"""
self.home()
- self.visit_url( "%s/root/edit?id=%s" % ( self.url, hda_id ) )
+ self.visit_url( "%s/datasets/%s/edit" % ( self.url, self.security.encode_id( hda_id ) ) )
submit_required = False
self.check_page_for_string( 'Edit Attributes' )
if new_name:
@@ -545,9 +545,9 @@
def auto_detect_metadata( self, hda_id ):
"""Auto-detect history_dataset_association metadata"""
self.home()
- self.visit_url( "%s/root/edit?id=%s" % ( self.url, hda_id ) )
+ self.visit_url( "%s/datasets/%s/edit" % ( self.url, self.security.encode_id( hda_id ) ) )
self.check_page_for_string( 'This will inspect the dataset and attempt' )
- tc.fv( 'auto_detect', 'id', hda_id )
+ tc.fv( 'auto_detect', 'detect', 'Auto-detect' )
tc.submit( 'detect' )
try:
self.check_page_for_string( 'Attributes have been queued to be updated' )
@@ -559,7 +559,7 @@
def convert_format( self, hda_id, target_type ):
"""Convert format of history_dataset_association"""
self.home()
- self.visit_url( "%s/root/edit?id=%s" % ( self.url, hda_id ) )
+ self.visit_url( "%s/datasets/%s/edit" % ( self.url, self.security.encode_id( hda_id ) ) )
self.check_page_for_string( 'This will inspect the dataset and attempt' )
tc.fv( 'convert_data', 'target_type', target_type )
tc.submit( 'convert_data' )
@@ -569,7 +569,7 @@
def change_datatype( self, hda_id, datatype ):
"""Change format of history_dataset_association"""
self.home()
- self.visit_url( "%s/root/edit?id=%s" % ( self.url, hda_id ) )
+ self.visit_url( "%s/datasets/%s/edit" % ( self.url, self.security.encode_id( hda_id ) ) )
self.check_page_for_string( 'This will change the datatype of the existing dataset but' )
tc.fv( 'change_datatype', 'datatype', datatype )
tc.submit( 'change' )
--- a/test/functional/test_history_functions.py Mon Jul 25 20:55:42 2011 -0400
+++ b/test/functional/test_history_functions.py Tue Jul 26 13:30:07 2011 -0400
@@ -664,7 +664,7 @@
.order_by( desc( galaxy.model.HistoryDatasetAssociation.table.c.create_time ) ) \
.first()
self.home()
- self.visit_url( "%s/root/delete?show_deleted_on_refresh=False&id=%s" % ( self.url, str( latest_hda.id ) ) )
+ self.delete_history_item( str( latest_hda.id ) )
self.check_history_for_string( 'Your history is empty' )
self.home()
self.visit_url( "%s/history/?show_deleted=True" % self.url )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
1 new changeset in galaxy-central:
http://bitbucket.org/galaxy/galaxy-central/changeset/9bfff04b94c9/
changeset: 9bfff04b94c9
user: kanwei
date: 2011-07-26 02:55:42
summary: Cleaner fix for 5828:e34b1108f7f8
affected #: 5 files (351 bytes)
--- a/static/june_2007_style/blue/panel_layout.css Mon Jul 25 02:02:07 2011 -0400
+++ b/static/june_2007_style/blue/panel_layout.css Mon Jul 25 20:55:42 2011 -0400
@@ -1,4 +1,3 @@
-body,html{overflow:hidden;margin:0;padding:0;width:100%;height:100%;}
body{font:75% "Lucida Grande",verdana,arial,helvetica,sans-serif;background:#eee;}
.unselectable{user-select:none;-moz-user-select:none;-webkit-user-select:none;}
#background{position:absolute;background:#eee;z-index:-1;top:0;left:0;margin:0;padding:0;width:100%;height:100%;}
--- a/static/june_2007_style/blue/trackster.css Mon Jul 25 02:02:07 2011 -0400
+++ b/static/june_2007_style/blue/trackster.css Mon Jul 25 20:55:42 2011 -0400
@@ -55,4 +55,4 @@
.icon.more-down{background:url('../images/fugue/arrow-transition-270-bw.png') no-repeat 0px 0px;}
.icon.more-across{background:url('../images/fugue/arrow-transition-bw.png') no-repeat 0px 0px;}
.intro{padding:1em;}
-.intro>.action-button{background-color:#CCC;padding:1em;}
\ No newline at end of file
+.intro > .action-button{background-color:#CCC;padding:1em;}
--- a/static/june_2007_style/panel_layout.css.tmpl Mon Jul 25 02:02:07 2011 -0400
+++ b/static/june_2007_style/panel_layout.css.tmpl Mon Jul 25 20:55:42 2011 -0400
@@ -1,11 +1,3 @@
-body, html {
- overflow: hidden;
- margin: 0;
- padding: 0;
- width: 100%;
- height: 100%;
-}
-
body {
font: 75% "Lucida Grande",verdana,arial,helvetica,sans-serif;
background: ${layout_bg};
--- a/templates/base_panels.mako Mon Jul 25 02:02:07 2011 -0400
+++ b/templates/base_panels.mako Mon Jul 25 20:55:42 2011 -0400
@@ -19,6 +19,13 @@
<%def name="stylesheets()">
${h.css('base','panel_layout','jquery.rating')}
<style type="text/css">
+ body, html {
+ overflow: hidden;
+ margin: 0;
+ padding: 0;
+ width: 100%;
+ height: 100%;
+ }
#center {
%if not self.has_left_panel:
left: 0;
--- a/templates/tool_form.mako Mon Jul 25 02:02:07 2011 -0400
+++ b/templates/tool_form.mako Mon Jul 25 20:55:42 2011 -0400
@@ -2,12 +2,10 @@
<%namespace file="/base_panels.mako" import="overlay" /><%def name="stylesheets()">
- ${h.css( "autocomplete_tagging", "panel_layout", "base", "library" )}
+ ${h.css( "autocomplete_tagging", "base", "panel_layout", "library" )}
<style type="text/css">
- body, html {
- overflow: auto;
- width: auto;
- height: auto;
+ html, body {
+ background-color: #fff;
}
</style></%def>
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

25 Jul '11
1 new changeset in galaxy-central:
http://bitbucket.org/galaxy/galaxy-central/changeset/e34b1108f7f8/
changeset: e34b1108f7f8
user: kanwei
date: 2011-07-25 08:02:07
summary: tool_form should overflow correctly
affected #: 1 file (151 bytes)
--- a/templates/tool_form.mako Sat Jul 23 21:05:55 2011 -0400
+++ b/templates/tool_form.mako Mon Jul 25 02:02:07 2011 -0400
@@ -3,6 +3,13 @@
<%def name="stylesheets()">
${h.css( "autocomplete_tagging", "panel_layout", "base", "library" )}
+ <style type="text/css">
+ body, html {
+ overflow: auto;
+ width: auto;
+ height: auto;
+ }
+ </style></%def><%def name="javascripts()">
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: trevorw: minor whitespace updates, tab/space consistency
by Bitbucket 23 Jul '11
by Bitbucket 23 Jul '11
23 Jul '11
1 new changeset in galaxy-central:
http://bitbucket.org/galaxy/galaxy-central/changeset/6206a27bd2ae/
changeset: 6206a27bd2ae
branch: feature/ws
user: trevorw
date: 2011-07-22 22:17:26
summary: minor whitespace updates, tab/space consistency
affected #: 4 files (122 bytes)
--- a/datatypes_conf.xml.sample Fri Jul 22 10:15:37 2011 -0400
+++ b/datatypes_conf.xml.sample Fri Jul 22 15:17:26 2011 -0500
@@ -123,7 +123,7 @@
<datatype extension="vcf" type="galaxy.datatypes.tabular:Vcf" display_in_upload="true"><converter file="vcf_to_bgzip_converter.xml" target_datatype="bgzip"/><converter file="vcf_to_tabix_converter.xml" target_datatype="tabix" depends_on="bgzip"/>
- <converter file="vcf_to_summary_tree_converter.xml" target_datatype="summary_tree"/>
+ <converter file="vcf_to_summary_tree_converter.xml" target_datatype="summary_tree"/></datatype><datatype extension="wsf" type="galaxy.datatypes.wsf:SnpFile" display_in_upload="true"/><datatype extension="velvet" type="galaxy.datatypes.assembly:Velvet" display_in_upload="false"/>
@@ -274,10 +274,10 @@
</registration><sniffers><!--
- The order in which Galaxy attempts to determine data types is
- important because some formats are much more loosely defined
- than others. The following list should be the most rigidly
- defined format first, followed by next-most rigidly defined,
+ The order in which Galaxy attempts to determine data types is
+ important because some formats are much more loosely defined
+ than others. The following list should be the most rigidly
+ defined format first, followed by next-most rigidly defined,
and so on.
--><sniffer type="galaxy.datatypes.tabular:Vcf"/>
--- a/tool_conf.xml.main Fri Jul 22 10:15:37 2011 -0400
+++ b/tool_conf.xml.main Fri Jul 22 15:17:26 2011 -0500
@@ -51,7 +51,7 @@
<tool file="fasta_tools/fasta_to_tabular.xml" /><tool file="filters/gff2bed.xml" /><tool file="maf/maf_to_bed.xml" />
- <tool file="maf/maf_to_interval.xml" />
+ <tool file="maf/maf_to_interval.xml" /><tool file="maf/maf_to_fasta.xml" /><tool file="fasta_tools/tabular_to_fasta.xml" /><tool file="fastq/fastq_to_fasta.xml" />
@@ -78,13 +78,13 @@
<tool file="filters/gff/extract_GFF_Features.xml" /><tool file="filters/gff/gff_filter_by_attribute.xml" /><tool file="filters/gff/gff_filter_by_feature_count.xml" />
- <tool file="filters/gff/gtf_filter_by_attribute_values_list.xml" />
+ <tool file="filters/gff/gtf_filter_by_attribute_values_list.xml" /></section><section name="Join, Subtract and Group" id="group"><tool file="filters/joiner.xml" /><tool file="filters/compare.xml"/><tool file="new_operations/subtract_query.xml"/>
- <tool file="stats/grouping.xml" />
+ <tool file="stats/grouping.xml" /><tool file="new_operations/column_join.xml"/></section><section name="Extract Features" id="features">
@@ -112,7 +112,7 @@
<tool file="extract/phastOdds/phastOdds_tool.xml" /></section><section name="Operate on Genomic Intervals" id="bxops">
- <tool file="new_operations/intersect.xml" />
+ <tool file="new_operations/intersect.xml" /><tool file="new_operations/subtract.xml" /><tool file="new_operations/merge.xml" /><tool file="new_operations/concat.xml" />
@@ -127,7 +127,7 @@
</section><section name="Statistics" id="stats"><tool file="stats/gsummary.xml" />
- <tool file="filters/uniq.xml" />
+ <tool file="filters/uniq.xml" /><tool file="stats/cor.xml" /><tool file="stats/generate_matrix_for_pca_lda.xml" /><tool file="stats/lda_analy.xml" />
@@ -223,13 +223,13 @@
<tool file="emboss_5/emboss_chips.xml" /><tool file="emboss_5/emboss_cirdna.xml" /><tool file="emboss_5/emboss_codcmp.xml" />
- <tool file="emboss_5/emboss_coderet.xml" />
+ <tool file="emboss_5/emboss_coderet.xml" /><tool file="emboss_5/emboss_compseq.xml" />
- <tool file="emboss_5/emboss_cpgplot.xml" />
+ <tool file="emboss_5/emboss_cpgplot.xml" /><tool file="emboss_5/emboss_cpgreport.xml" /><tool file="emboss_5/emboss_cusp.xml" /><tool file="emboss_5/emboss_cutseq.xml" />
- <tool file="emboss_5/emboss_dan.xml" />
+ <tool file="emboss_5/emboss_dan.xml" /><tool file="emboss_5/emboss_degapseq.xml" /><tool file="emboss_5/emboss_descseq.xml" /><tool file="emboss_5/emboss_diffseq.xml" />
@@ -245,7 +245,7 @@
<tool file="emboss_5/emboss_etandem.xml" /><tool file="emboss_5/emboss_extractfeat.xml" /><tool file="emboss_5/emboss_extractseq.xml" />
- <tool file="emboss_5/emboss_freak.xml" />
+ <tool file="emboss_5/emboss_freak.xml" /><tool file="emboss_5/emboss_fuzznuc.xml" /><tool file="emboss_5/emboss_fuzzpro.xml" /><tool file="emboss_5/emboss_fuzztran.xml" />
@@ -266,7 +266,7 @@
<tool file="emboss_5/emboss_merger.xml" /><tool file="emboss_5/emboss_msbar.xml" /><tool file="emboss_5/emboss_needle.xml" />
- <tool file="emboss_5/emboss_newcpgreport.xml" />
+ <tool file="emboss_5/emboss_newcpgreport.xml" /><tool file="emboss_5/emboss_newcpgseek.xml" /><tool file="emboss_5/emboss_newseq.xml" /><tool file="emboss_5/emboss_noreturn.xml" />
@@ -294,7 +294,7 @@
<tool file="emboss_5/emboss_revseq.xml" /><tool file="emboss_5/emboss_seqmatchall.xml" /><tool file="emboss_5/emboss_seqret.xml" />
- <tool file="emboss_5/emboss_showfeat.xml" />
+ <tool file="emboss_5/emboss_showfeat.xml" /><tool file="emboss_5/emboss_shuffleseq.xml" /><tool file="emboss_5/emboss_sigcleave.xml" /><tool file="emboss_5/emboss_sirna.xml" />
@@ -316,7 +316,7 @@
<tool file="emboss_5/emboss_water.xml" /><tool file="emboss_5/emboss_wobble.xml" /><tool file="emboss_5/emboss_wordcount.xml" />
- <tool file="emboss_5/emboss_wordmatch.xml" />
+ <tool file="emboss_5/emboss_wordmatch.xml" /></section><label text="NGS Toolbox Beta" id="ngs" /><section name="NGS: QC and manipulation" id="cshl_library_information">
--- a/tool_conf.xml.sample Fri Jul 22 10:15:37 2011 -0400
+++ b/tool_conf.xml.sample Fri Jul 22 15:17:26 2011 -0500
@@ -144,14 +144,14 @@
<tool file="regVariation/t_test_two_samples.xml" /><tool file="regVariation/compute_q_values.xml" /><label text="GFF" id="gff" />
- <tool file="stats/count_gff_features.xml" />
+ <tool file="stats/count_gff_features.xml" /></section><!--
Keep this section commented until all of the tools have functional tests
<section name="Wavelet Analysis" id="dwt"><tool file="discreteWavelet/execute_dwt_IvC_all.xml" /><tool file="discreteWavelet/execute_dwt_cor_aVa_perClass.xml" />
- <tool file="discreteWavelet/execute_dwt_cor_aVb_all.xml" />
+ <tool file="discreteWavelet/execute_dwt_cor_aVb_all.xml" /><tool file="discreteWavelet/execute_dwt_var_perClass.xml" /></section>
-->
@@ -184,8 +184,8 @@
<tool file="regVariation/compute_motif_frequencies_for_all_motifs.xml" /><tool file="regVariation/categorize_elements_satisfying_criteria.xml" />s
<tool file="regVariation/draw_stacked_barplots.xml" />
- <tool file="regVariation/multispecies_MicrosatDataGenerator_interrupted_GALAXY.xml" />
- <tool file="regVariation/microsatellite_birthdeath.xml" />
+ <tool file="regVariation/multispecies_MicrosatDataGenerator_interrupted_GALAXY.xml" />
+ <tool file="regVariation/microsatellite_birthdeath.xml" /></section><section name="Multiple regression" id="multReg"><tool file="regVariation/linear_regression.xml" />
@@ -241,7 +241,7 @@
</section><section name="NGS: QC and manipulation" id="NGS_QC"><label text="FastQC: fastq/sam/bam" id="fastqcsambam" />
- <tool file="rgenetics/rgFastQC.xml" />
+ <tool file="rgenetics/rgFastQC.xml" /><label text="Illumina fastq" id="illumina" /><tool file="fastq/fastq_groomer.xml" /><tool file="fastq/fastq_paired_end_splitter.xml" />
@@ -280,21 +280,21 @@
<tool file="fastx_toolkit/fastx_collapser.xml" /><tool file="fastx_toolkit/fastx_renamer.xml" /><tool file="fastx_toolkit/fastx_reverse_complement.xml" />
- <tool file="fastx_toolkit/fastx_trimmer.xml" />
+ <tool file="fastx_toolkit/fastx_trimmer.xml" /></section><section name="NGS: Picard (beta)" id="picard_beta"><label text="QC/Metrics for sam/bam" id="qcsambam"/><tool file="picard/picard_BamIndexStats.xml" />
- <tool file="picard/rgPicardASMetrics.xml" />
- <tool file="picard/rgPicardGCBiasMetrics.xml" />
- <tool file="picard/rgPicardLibComplexity.xml" />
+ <tool file="picard/rgPicardASMetrics.xml" />
+ <tool file="picard/rgPicardGCBiasMetrics.xml" />
+ <tool file="picard/rgPicardLibComplexity.xml" /><tool file="picard/rgPicardInsertSize.xml" /><tool file="picard/rgPicardHsMetrics.xml" /><label text="bam/sam Cleaning" id="picard-clean" /><tool file="picard/picard_AddOrReplaceReadGroups.xml" /><tool file="picard/picard_ReorderSam.xml" /><tool file="picard/picard_ReplaceSamHeader.xml" />
- <tool file="picard/rgPicardFixMate.xml" />
+ <tool file="picard/rgPicardFixMate.xml" /><tool file="picard/rgPicardMarkDups.xml" /></section><!--
--- a/tool_list.py Fri Jul 22 10:15:37 2011 -0400
+++ b/tool_list.py Fri Jul 22 15:17:26 2011 -0500
@@ -4,19 +4,19 @@
onoff = 1
tool_list = []
for line in open("tool_conf.xml.sample", "r"):
- if line.find("<!--") != -1:
+ if line.find("<!--") != -1:
onoff = 0
- if line.find("file") != -1 and onoff==1:
- strs = line.split('\"')
+ if line.find("file") != -1 and onoff==1:
+ strs = line.split('\"')
tool_list.append(strs[1])
- if line.find("<section") != -1 and onoff==1:
+ if line.find("<section") != -1 and onoff==1:
keys = line.strip().split('\"')
n = 0
strtmp = "section::"
- while n < len(keys) :
- if keys[n].find("id") != -1 : strtmp = strtmp + keys[n+1]
- if keys[n].find("name") != -1 : strtmp = strtmp + keys[n+1] + "-"
- n = n + 1
+ while n < len(keys) :
+ if keys[n].find("id") != -1 : strtmp = strtmp + keys[n+1]
+ if keys[n].find("name") != -1 : strtmp = strtmp + keys[n+1] + "-"
+ n = n + 1
tool_list.append(strtmp.replace(' ', '_'))
if line.find("-->") != -1:
onoff =1
@@ -26,42 +26,42 @@
id = []
desc = []
tool_infos = []
-for tool in tool_list :
- if tool.find("section")!=-1 :
+for tool in tool_list :
+ if tool.find("section")!=-1 :
tool_info = dict()
tool_info["id"] = tool
tool_infos.append(tool_info)
- if os.path.exists("tools/"+tool) :
- for line in open("tools/"+tool) :
- if line.find("<tool ") != -1 and line.find("id") != -1 :
- keys = line.strip().split('\"')
- n = 0
- tool_info = dict()
- tool_info["desc"] = ''
- while n < len(keys) :
- if keys[n].find("id") != -1 : tool_info["id"] = keys[n+1].replace(' ', '_')
- if keys[n].find("name") != -1 : tool_info["name"] = keys[n+1]
- if keys[n].find("description") != -1 : tool_info["desc"] = keys[n+1]
- n = n + 1
- tool_infos.append(tool_info)
- break
+ if os.path.exists("tools/"+tool) :
+ for line in open("tools/"+tool) :
+ if line.find("<tool ") != -1 and line.find("id") != -1 :
+ keys = line.strip().split('\"')
+ n = 0
+ tool_info = dict()
+ tool_info["desc"] = ''
+ while n < len(keys) :
+ if keys[n].find("id") != -1 : tool_info["id"] = keys[n+1].replace(' ', '_')
+ if keys[n].find("name") != -1 : tool_info["name"] = keys[n+1]
+ if keys[n].find("description") != -1 : tool_info["desc"] = keys[n+1]
+ n = n + 1
+ tool_infos.append(tool_info)
+ break
flag=0
-if len(sys.argv) == 1 :
- for tool_info in tool_infos:
- if tool_info["id"].find("section") != -1 :
+if len(sys.argv) == 1 :
+ for tool_info in tool_infos:
+ if tool_info["id"].find("section") != -1 :
print "==========================================================================================================================================="
print "%-45s\t%-40s\t%s" % ("id", "name", tool_info["id"])
print "- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -"
else :
print "%-45s\t%-40s" % (tool_info["id"], tool_info["name"])
-else:
- for tool_info in tool_infos:
+else:
+ for tool_info in tool_infos:
if tool_info["id"].find("section") != -1 :
flag=0
elif flag==1:
print " functional.test_toolbox:TestForTool_%s" % tool_info["id"],
- if tool_info["id"].replace('section::', '')==sys.argv[1]:
+ if tool_info["id"].replace('section::', '')==sys.argv[1]:
flag=1
#for key in tool_infos.keys():
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
1 new changeset in galaxy-central:
http://bitbucket.org/galaxy/galaxy-central/changeset/4e6d4f51bff9/
changeset: 4e6d4f51bff9
user: jgoecks
date: 2011-07-22 16:15:37
summary: Trackster documentation.
affected #: 2 files (100 bytes)
--- a/lib/galaxy/web/controllers/tracks.py Fri Jul 22 10:03:41 2011 -0400
+++ b/lib/galaxy/web/controllers/tracks.py Fri Jul 22 10:15:37 2011 -0400
@@ -90,7 +90,7 @@
class DbKeyColumn( grids.GridColumn ):
""" Column for filtering by and displaying dataset dbkey. """
def filter( self, trans, user, query, dbkey ):
- """ Filter by dbkey. """
+ """ Filter by dbkey; datasets without a dbkey are returned as well. """
# use raw SQL b/c metadata is a BLOB
dbkey = dbkey.replace("'", "\\'")
return query.filter( or_( \
--- a/static/scripts/trackster.js Fri Jul 22 10:03:41 2011 -0400
+++ b/static/scripts/trackster.js Fri Jul 22 10:15:37 2011 -0400
@@ -825,6 +825,9 @@
label_track.view = this;
this.label_tracks.push(label_track);
},
+ /**
+ * Remove a track from the view.
+ */
remove_track: function(track) {
this.has_changes = true;
delete this.tracks[this.tracks.indexOf(track)];
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: jgoecks: Trackster: enable datasets without a set dbkey to be added to a visualization.
by Bitbucket 22 Jul '11
by Bitbucket 22 Jul '11
22 Jul '11
1 new changeset in galaxy-central:
http://bitbucket.org/galaxy/galaxy-central/changeset/e35c7bb3f7ed/
changeset: e35c7bb3f7ed
user: jgoecks
date: 2011-07-22 16:03:41
summary: Trackster: enable datasets without a set dbkey to be added to a visualization.
affected #: 3 files (275 bytes)
--- a/lib/galaxy/web/controllers/tracks.py Thu Jul 21 16:19:52 2011 -0400
+++ b/lib/galaxy/web/controllers/tracks.py Fri Jul 22 10:03:41 2011 -0400
@@ -93,7 +93,11 @@
""" Filter by dbkey. """
# use raw SQL b/c metadata is a BLOB
dbkey = dbkey.replace("'", "\\'")
- return query.filter( or_( "metadata like '%%\"dbkey\": [\"%s\"]%%'" % dbkey, "metadata like '%%\"dbkey\": \"%s\"%%'" % dbkey ) )
+ return query.filter( or_( \
+ or_( "metadata like '%%\"dbkey\": [\"%s\"]%%'" % dbkey, "metadata like '%%\"dbkey\": \"%s\"%%'" % dbkey ), \
+ or_( "metadata like '%%\"dbkey\": [\"?\"]%%'", "metadata like '%%\"dbkey\": \"?\"%%'" ) \
+ )
+ )
class HistoryColumn( grids.GridColumn ):
""" Column for filtering by history id. """
--- a/static/scripts/trackster.js Thu Jul 21 16:19:52 2011 -0400
+++ b/static/scripts/trackster.js Fri Jul 22 10:03:41 2011 -0400
@@ -807,6 +807,9 @@
}
view.redraw();
},
+ /**
+ * Add a track to the view.
+ */
add_track: function(track) {
track.view = this;
track.track_id = this.track_id_counter;
--- a/templates/tracks/browser.mako Thu Jul 21 16:19:52 2011 -0400
+++ b/templates/tracks/browser.mako Fri Jul 22 10:03:41 2011 -0400
@@ -273,7 +273,7 @@
}
// Add track.
- tracks.push( {
+ tracks.push({
"track_type": track.get_type(),
"name": track.name,
"hda_ldda": track.hda_ldda,
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: greg: Add the ability to view all stored metadata for each tool in a tool shed repository, as well as some tool attributes that are not stored as metadata (e.g., tool.command, etc ).
by Bitbucket 21 Jul '11
by Bitbucket 21 Jul '11
21 Jul '11
1 new changeset in galaxy-central:
http://bitbucket.org/galaxy/galaxy-central/changeset/7702f72354f2/
changeset: 7702f72354f2
user: greg
date: 2011-07-21 22:19:52
summary: Add the ability to view all stored metadata for each tool in a tool shed repository, as well as some tool attributes that are not stored as metadata (e.g., tool.command, etc ).
affected #: 4 files (3.3 KB)
--- a/lib/galaxy/webapps/community/controllers/repository.py Thu Jul 21 14:37:25 2011 -0400
+++ b/lib/galaxy/webapps/community/controllers/repository.py Thu Jul 21 16:19:52 2011 -0400
@@ -935,14 +935,22 @@
message = util.restore_text( params.get( 'message', '' ) )
status = params.get( 'status', 'done' )
repository = get_repository( trans, repository_id )
- tool = load_tool( trans, os.path.abspath( tool_config ) )
- tool_state = self.__new_state( trans )
- return trans.fill_template( "/webapps/community/repository/tool_form.mako",
- repository=repository,
- tool=tool,
- tool_state=tool_state,
- message=message,
- status=status )
+ try:
+ tool = load_tool( trans, os.path.abspath( tool_config ) )
+ tool_state = self.__new_state( trans )
+ return trans.fill_template( "/webapps/community/repository/tool_form.mako",
+ repository=repository,
+ tool=tool,
+ tool_state=tool_state,
+ message=message,
+ status=status )
+ except Exception, e:
+ message = 'Error loading tool: %s. Click <b>Reset metadata</b> to correct this error.' % str( e )
+ return trans.response.send_redirect( web.url_for( controller='repository',
+ action='manage_repository',
+ id=repository_id,
+ message=message,
+ status='error' ) )
def __new_state( self, trans, all_pages=False ):
"""
Create a new `DefaultToolState` for this tool. It will not be initialized
@@ -955,6 +963,27 @@
state.inputs = {}
return state
@web.expose
+ def view_tool_metadata( self, trans, repository_id, changeset_revision, tool_id, **kwd ):
+ params = util.Params( kwd )
+ message = util.restore_text( params.get( 'message', '' ) )
+ status = params.get( 'status', 'done' )
+ repository = get_repository( trans, repository_id )
+ metadata = {}
+ tool = None
+ repository_metadata = get_repository_metadata( trans, repository_id, changeset_revision ).metadata
+ if 'tools' in repository_metadata:
+ for tool_metadata_dict in repository_metadata[ 'tools' ]:
+ if tool_metadata_dict[ 'id' ] == tool_id:
+ metadata = tool_metadata_dict
+ tool = load_tool( trans, os.path.abspath( metadata[ 'tool_config' ] ) )
+ break
+ return trans.fill_template( "/webapps/community/repository/view_tool_metadata.mako",
+ repository=repository,
+ tool=tool,
+ metadata=metadata,
+ message=message,
+ status=status )
+ @web.expose
def download( self, trans, repository_id, file_type, **kwd ):
# Download an archive of the repository files compressed as zip, gz or bz2.
params = util.Params( kwd )
--- a/templates/webapps/community/repository/manage_repository.mako Thu Jul 21 14:37:25 2011 -0400
+++ b/templates/webapps/community/repository/manage_repository.mako Thu Jul 21 16:19:52 2011 -0400
@@ -120,7 +120,7 @@
<div style="clear: both"></div></div><div class="form-row">
- <label>Version:</label>
+ <label>Revision:</label>
%if can_view_change_log:
<a href="${h.url_for( controller='repository', action='view_changelog', id=trans.app.security.encode_id( repository.id ) )}">${repository.revision}</a>
%else:
@@ -151,34 +151,6 @@
</form></div></div>
-<p/>
-<div class="toolForm">
- <div class="toolFormTitle">Manage categories</div>
- <div class="toolFormBody">
- <form name="categories" id="categories" action="${h.url_for( controller='repository', action='manage_repository', id=trans.security.encode_id( repository.id ) )}" method="post" >
- <div class="form-row">
- <label>Categories</label>
- <select name="category_id" multiple>
- %for category in categories:
- %if category.id in selected_categories:
- <option value="${trans.security.encode_id( category.id )}" selected>${category.name}</option>
- %else:
- <option value="${trans.security.encode_id( category.id )}">${category.name}</option>
- %endif
- %endfor
- </select>
- <div class="toolParamHelp" style="clear: both;">
- Multi-select list - hold the appropriate key while clicking to select multiple categories.
- </div>
- <div style="clear: both"></div>
- </div>
- <div class="form-row">
- <input type="submit" name="manage_categories_button" value="Save"/>
- </div>
- </form>
- </div>
-</div>
-<p/>
%if can_set_metadata:
<p/><div class="toolForm">
@@ -204,7 +176,16 @@
</tr>
%for tool_dict in tool_dicts:
<tr>
- <td><a href="${h.url_for( controller='repository', action='display_tool', repository_id=trans.security.encode_id( repository.id ), tool_config=tool_dict[ 'tool_config' ] )}">${tool_dict[ 'name' ]}</a></td>
+ <td>
+ <div style="float: left; margin-left: 1px;" class="menubutton split popup" id="tool-${tool_dict[ 'id' ]}-popup">
+ <a class="view-info" href="${h.url_for( controller='repository', action='display_tool', repository_id=trans.security.encode_id( repository.id ), tool_config=tool_dict[ 'tool_config' ] )}">
+ ${tool_dict[ 'name' ]}
+ </a>
+ </div>
+ <div popupmenu="tool-${tool_dict[ 'id' ]}-popup">
+ <a class="action-button" href="${h.url_for( controller='repository', action='view_tool_metadata', repository_id=trans.security.encode_id( repository.id ), changeset_revision=repository.tip, tool_id=tool_dict[ 'id' ] )}">View all metadata for this tool</a>
+ </div>
+ </td><td>${tool_dict[ 'description' ]}</td><td>${tool_dict[ 'version' ]}</td><td>
@@ -274,6 +255,33 @@
</div></div>
%endif
+<p/>
+<div class="toolForm">
+ <div class="toolFormTitle">Manage categories</div>
+ <div class="toolFormBody">
+ <form name="categories" id="categories" action="${h.url_for( controller='repository', action='manage_repository', id=trans.security.encode_id( repository.id ) )}" method="post" >
+ <div class="form-row">
+ <label>Categories</label>
+ <select name="category_id" multiple>
+ %for category in categories:
+ %if category.id in selected_categories:
+ <option value="${trans.security.encode_id( category.id )}" selected>${category.name}</option>
+ %else:
+ <option value="${trans.security.encode_id( category.id )}">${category.name}</option>
+ %endif
+ %endfor
+ </select>
+ <div class="toolParamHelp" style="clear: both;">
+ Multi-select list - hold the appropriate key while clicking to select multiple categories.
+ </div>
+ <div style="clear: both"></div>
+ </div>
+ <div class="form-row">
+ <input type="submit" name="manage_categories_button" value="Save"/>
+ </div>
+ </form>
+ </div>
+</div>
%if trans.app.config.smtp_server:
<p/><div class="toolForm">
@@ -330,8 +338,8 @@
</form></div></div>
-<p/>
%if repository.ratings:
+ <p/><div class="toolForm"><div class="toolFormTitle">Rating</div><div class="toolFormBody">
--- a/templates/webapps/community/repository/view_repository.mako Thu Jul 21 14:37:25 2011 -0400
+++ b/templates/webapps/community/repository/view_repository.mako Thu Jul 21 16:19:52 2011 -0400
@@ -118,7 +118,7 @@
</div>
%endif
<div class="form-row">
- <label>Version:</label>
+ <label>Revision:</label>
%if can_view_change_log:
<a href="${h.url_for( controller='repository', action='view_changelog', id=trans.app.security.encode_id( repository.id ) )}">${repository.revision}</a>
%else:
@@ -145,20 +145,6 @@
%endif
</div></div>
-%if repository.categories:
- <p/>
- <div class="toolForm">
- <div class="toolFormTitle">Categories</div>
- <div class="toolFormBody">
- %for rca in repository.categories:
- <div class="form-row">
- ${rca.category.name}
- </div>
- %endfor
- <div style="clear: both"></div>
- </div>
- </div>
-%endif
%if metadata:
<p/><div class="toolForm">
@@ -183,9 +169,18 @@
</tr>
%for tool_dict in tool_dicts:
<tr>
- <td><a href="${h.url_for( controller='repository', action='display_tool', repository_id=trans.security.encode_id( repository.id ), tool_config=tool_dict[ 'tool_config' ] )}">${tool_dict[ 'name' ]}</a></td>
+ <td>
+ <div style="float: left; margin-left: 1px;" class="menubutton split popup" id="tool-${repository.id}-popup">
+ <a class="view-info" href="${h.url_for( controller='repository', action='display_tool', repository_id=trans.security.encode_id( repository.id ), tool_config=tool_dict[ 'tool_config' ] )}">
+ ${tool_dict[ 'name' ]}
+ </a>
+ </div>
+ <div popupmenu="tool-${repository.id}-popup">
+ <a class="action-button" href="${h.url_for( controller='repository', action='view_tool_metadata', repository_id=trans.security.encode_id( repository.id ), changeset_revision=repository.tip, tool_id=tool_dict[ 'id' ] )}">View all metadata for this tool</a>
+ </div>
+ </td><td>${tool_dict[ 'description' ]}</td>
- <td>version: ${tool_dict[ 'version' ]}</td>
+ <td>${tool_dict[ 'version' ]}</td><td><%
if 'requirements' in tool_dict:
@@ -242,6 +237,20 @@
</div></div>
%endif
+%if repository.categories:
+ <p/>
+ <div class="toolForm">
+ <div class="toolFormTitle">Categories</div>
+ <div class="toolFormBody">
+ %for rca in repository.categories:
+ <div class="form-row">
+ ${rca.category.name}
+ </div>
+ %endfor
+ <div style="clear: both"></div>
+ </div>
+ </div>
+%endif
%if trans.user and trans.app.config.smtp_server:
<p/><div class="toolForm">
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: jgoecks: Prevent keyboard navigation from within input elements in shared visualizations.
by Bitbucket 21 Jul '11
by Bitbucket 21 Jul '11
21 Jul '11
1 new changeset in galaxy-central:
http://bitbucket.org/galaxy/galaxy-central/changeset/c360836d2c56/
changeset: c360836d2c56
user: jgoecks
date: 2011-07-21 20:37:25
summary: Prevent keyboard navigation from within input elements in shared visualizations.
affected #: 1 file (187 bytes)
--- a/templates/visualization/display.mako Thu Jul 21 14:22:33 2011 -0400
+++ b/templates/visualization/display.mako Thu Jul 21 14:37:25 2011 -0400
@@ -122,6 +122,11 @@
// Keyboard navigation. Scroll ~7% of height when scrolling up/down.
//
$(document).keydown(function(e) {
+ // Do not navigate if arrow keys used in input element.
+ if ($(e.srcElement).is(':input')) {
+ return;
+ }
+
// Key codes: left == 37, up == 38, right == 39, down == 40
switch(e.which) {
case 37:
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: jgoecks: Trackster: save data in Tile object and use to determine if filters are available.
by Bitbucket 21 Jul '11
by Bitbucket 21 Jul '11
21 Jul '11
1 new changeset in galaxy-central:
http://bitbucket.org/galaxy/galaxy-central/changeset/a7b8b0f49b48/
changeset: a7b8b0f49b48
user: jgoecks
date: 2011-07-21 20:22:33
summary: Trackster: save data in Tile object and use to determine if filters are available.
affected #: 1 file (58 bytes)
--- a/static/scripts/trackster.js Thu Jul 21 13:55:24 2011 -0400
+++ b/static/scripts/trackster.js Thu Jul 21 14:22:33 2011 -0400
@@ -1588,23 +1588,24 @@
/**
* Tiles drawn by tracks.
*/
-var Tile = function(index, resolution, canvas) {
+var Tile = function(index, resolution, canvas, data) {
this.index = index;
this.low = index * DENSITY * resolution;
this.high = (index + 1) * DENSITY * resolution;
this.resolution = resolution;
// Wrap element in div for background.
this.canvas = $("<div class='track-tile'/>").append(canvas);
+ this.data = data;
this.stale = false;
};
-var SummaryTreeTile = function(index, resolution, canvas, max_val) {
- Tile.call(this, index, resolution, canvas);
+var SummaryTreeTile = function(index, resolution, canvas, data, max_val) {
+ Tile.call(this, index, resolution, canvas, data);
this.max_val = max_val;
};
-var FeatureTrackTile = function(index, resolution, canvas, message) {
- Tile.call(this, index, resolution, canvas);
+var FeatureTrackTile = function(index, resolution, canvas, data, message) {
+ Tile.call(this, index, resolution, canvas, data);
this.message = message;
};
@@ -2096,13 +2097,17 @@
filters[f].update_ui_elt();
}
- // Determine if filters are available; this is based on the example feature.
- var filters_available = false;
- if (track.example_feature) {
- for (var f = 0; f < filters.length; f++) {
- if (filters[f].applies_to(track.example_feature)) {
- filters_available = true;
- break;
+ // Determine if filters are available; this is based on the tiles' data.
+ var filters_available = false,
+ example_feature;
+ for (var i = 0; i < drawn_tiles.length; i++) {
+ if (drawn_tiles[i].data.length) {
+ example_feature = drawn_tiles[i].data[0];
+ for (var f = 0; f < filters.length; f++) {
+ if (filters[f].applies_to(example_feature)) {
+ filters_available = true;
+ break;
+ }
}
}
}
@@ -2385,7 +2390,7 @@
var c_start = Math.round(c * w_scale);
ctx.fillText(seq[c], c_start + track.left_offset, 10);
}
- return new Tile(tile_index, resolution, canvas);
+ return new Tile(tile_index, resolution, canvas, seq);
}
this.content_div.css("height", "0px");
}
@@ -2523,7 +2528,7 @@
var painter = new painters.LinePainter(result.data, tile_low, tile_low + tile_length, this.prefs, this.mode);
painter.draw(ctx, width, height);
- return new Tile(tile_index, resolution, canvas);
+ return new Tile(tile_index, resolution, canvas, result.data);
}
});
@@ -2737,7 +2742,7 @@
// TODO: this shouldn't be done at the tile level
this.container_div.find(".yaxislabel").remove();
var max_label = $("<div />").addClass('yaxislabel');
- max_label.text( result.max );
+ max_label.text(result.max);
max_label.css({ position: "absolute", top: "24px", left: "10px", color: this.prefs.label_color });
max_label.prependTo(this.container_div);
// Create canvas
@@ -2760,7 +2765,7 @@
// Deal with left_offset by translating
ctx.translate(left_offset, SUMMARY_TREE_TOP_PADDING);
painter.draw(ctx, width, required_height);
- return new SummaryTreeTile(tile_index, resolution, canvas, result.max);
+ return new SummaryTreeTile(tile_index, resolution, canvas, result.data, result.max);
}
// Start dealing with row-by-row tracks
@@ -2811,16 +2816,12 @@
this.container_div.find(".yaxislabel").remove();
if (result.data) {
- // Set example feature. This is needed so that track can update its UI based on feature attributes.
- // TODO: use tile data rather than example feature?
- this.example_feature = (result.data.length ? result.data[0] : undefined);
-
// Draw features.
ctx.translate(left_offset, 0);
painter.draw(ctx, width, required_height, slots);
}
- return new FeatureTrackTile(tile_index, resolution, canvas, result.message);
+ return new FeatureTrackTile(tile_index, resolution, canvas, result.data, result.message);
}
});
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: greg: Add Assaf Gordon's collect_job_timings.sh script.
by Bitbucket 21 Jul '11
by Bitbucket 21 Jul '11
21 Jul '11
1 new changeset in galaxy-central:
http://bitbucket.org/galaxy/galaxy-central/changeset/c609c2574c66/
changeset: c609c2574c66
user: greg
date: 2011-07-21 19:55:24
summary: Add Assaf Gordon's collect_job_timings.sh script.
This shell script produces the report, combining information from SGE's QACCT with the galaxy job/dataset information.
The output contains:
job, user, tool name, dbkey, total input size in bytes, waiting time in SGE queue, actual SGE executiong running time, and some other tidbits.
This allows finding how much running time each user had on the cluster,
how much time each tool/user spend idly waiting,
and some possible correlations between tools, dbkeys, input size and running time.
The script is tightly coupled with SGE and PostgreSQL, but can probably be adapted to PBS/MySQL.
affected #: 1 file (0 bytes)
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: greg: Fix for tool configs that do not include a command tag.
by Bitbucket 21 Jul '11
by Bitbucket 21 Jul '11
21 Jul '11
1 new changeset in galaxy-central:
http://bitbucket.org/galaxy/galaxy-central/changeset/0f15591f2acd/
changeset: 0f15591f2acd
user: greg
date: 2011-07-21 19:44:45
summary: Fix for tool configs that do not include a command tag.
affected #: 1 file (35 bytes)
--- a/lib/galaxy/tools/__init__.py Thu Jul 21 13:14:13 2011 -0400
+++ b/lib/galaxy/tools/__init__.py Thu Jul 21 13:44:45 2011 -0400
@@ -374,15 +374,15 @@
self.input_translator = root.find( "request_param_translation" )
if self.input_translator:
self.input_translator = ToolInputTranslator.from_element( self.input_translator )
- # Command line (template). Optional for tools that do not invoke a
- # local program
+ # Command line (template). Optional for tools that do not invoke a local program
command = root.find("command")
if command is not None and command.text is not None:
self.command = command.text.lstrip() # get rid of leading whitespace
+ # Must pre-pend this AFTER processing the cheetah command template
+ self.interpreter = command.get( "interpreter", None )
else:
self.command = ''
- # Must pre-pend this AFTER processing the cheetah command template
- self.interpreter = command.get("interpreter", None)
+ self.interpreter = None
# Parameters used to build URL for redirection to external app
redirect_url_params = root.find( "redirect_url_params" )
if redirect_url_params is not None and redirect_url_params.text is not None:
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
2 new changesets in galaxy-central:
http://bitbucket.org/galaxy/galaxy-central/changeset/0ff5ad458dea/
changeset: 0ff5ad458dea
user: kanwei
date: 2011-07-21 19:13:48
summary: Return JSON for LibraryDatasetToolParam metadata
affected #: 1 file (75 bytes)
--- a/lib/galaxy/tools/__init__.py Thu Jul 21 13:03:04 2011 -0400
+++ b/lib/galaxy/tools/__init__.py Thu Jul 21 13:13:48 2011 -0400
@@ -2029,6 +2029,7 @@
def __str__( self ):
return self.value.name
def templates( self ):
+ """ Returns JSON dict of templates => data """
if not self.value:
return None
template_data = {}
@@ -2039,7 +2040,7 @@
for field in template.fields:
tmp_dict[field['label']] = content[field['name']]
template_data[template.name] = tmp_dict
- return template_data
+ return simplejson.dumps( template_data )
def __getattr__( self, key ):
return getattr( self.value, key )
http://bitbucket.org/galaxy/galaxy-central/changeset/448490d701fa/
changeset: 448490d701fa
user: kanwei
date: 2011-07-21 19:14:13
summary: Fix workflow overlay error introduced in recent commit
affected #: 1 file (35 bytes)
--- a/templates/workflow/editor.mako Thu Jul 21 13:13:48 2011 -0400
+++ b/templates/workflow/editor.mako Thu Jul 21 13:14:13 2011 -0400
@@ -931,9 +931,9 @@
</div></%def>
-<%def name="overlay()">
+<%def name="overlay(visible=False)">
${parent.overlay( "Loading workflow editor...",
- "<img src='" + h.url_for('/static/images/yui/rel_interstitial_loading.gif') + "'/>" )}
+ "<img src='" + h.url_for('/static/images/yui/rel_interstitial_loading.gif') + "'/>", self.overlay_visible )}
</%def><%def name="left_panel()">
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

21 Jul '11
1 new changeset in galaxy-central:
http://bitbucket.org/galaxy/galaxy-central/changeset/bdb3436ff22f/
changeset: bdb3436ff22f
user: greg
date: 2011-07-21 19:03:04
summary: Remove tool shed repository files from disk that become untracked because they were removed from the repository within an exception block (only occurs in the tool shed test / production environments ).
affected #: 2 files (1.6 KB)
--- a/lib/galaxy/webapps/community/controllers/repository.py Thu Jul 21 12:46:27 2011 -0400
+++ b/lib/galaxy/webapps/community/controllers/repository.py Thu Jul 21 13:03:04 2011 -0400
@@ -459,6 +459,21 @@
relative_selected_file = selected_file.split( 'repo_%d' % repository.id )[1].lstrip( '/' )
repo.dirstate.remove( relative_selected_file )
repo.dirstate.write()
+ absolute_selected_file = os.path.abspath( selected_file )
+ if os.path.isdir( absolute_selected_file ):
+ try:
+ os.rmdir( absolute_selected_file )
+ except OSError, e:
+ # The directory is not empty
+ pass
+ elif os.path.isfile( absolute_selected_file ):
+ os.remove( absolute_selected_file )
+ dir = os.path.split( absolute_selected_file )[0]
+ try:
+ os.rmdir( dir )
+ except OSError, e:
+ # The directory is not empty
+ pass
# Commit the change set.
if not commit_message:
commit_message = 'Deleted selected files'
--- a/lib/galaxy/webapps/community/controllers/upload.py Thu Jul 21 12:46:27 2011 -0400
+++ b/lib/galaxy/webapps/community/controllers/upload.py Thu Jul 21 13:03:04 2011 -0400
@@ -191,6 +191,21 @@
relative_selected_file = selected_file.split( 'repo_%d' % repository.id )[1].lstrip( '/' )
repo.dirstate.remove( relative_selected_file )
repo.dirstate.write()
+ absolute_selected_file = os.path.abspath( selected_file )
+ if os.path.isdir( absolute_selected_file ):
+ try:
+ os.rmdir( absolute_selected_file )
+ except OSError, e:
+ # The directory is not empty
+ pass
+ elif os.path.isfile( absolute_selected_file ):
+ os.remove( absolute_selected_file )
+ dir = os.path.split( absolute_selected_file )[0]
+ try:
+ os.rmdir( dir )
+ except OSError, e:
+ # The directory is not empty
+ pass
for filename_in_archive in filenames_in_archive:
commands.add( repo.ui, repo, filename_in_archive )
if filename_in_archive.endswith( '.loc.sample' ):
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: greg: Add exception handling for mercurial commands that break in the test / production tool shed environment, but not elsewhere.
by Bitbucket 21 Jul '11
by Bitbucket 21 Jul '11
21 Jul '11
1 new changeset in galaxy-central:
http://bitbucket.org/galaxy/galaxy-central/changeset/86bdb4a9db48/
changeset: 86bdb4a9db48
user: greg
date: 2011-07-21 18:46:27
summary: Add exception handling for mercurial commands that break in the test / production tool shed environment, but not elsewhere.
affected #: 2 files (1.1 KB)
--- a/lib/galaxy/webapps/community/controllers/repository.py Thu Jul 21 10:43:16 2011 -0400
+++ b/lib/galaxy/webapps/community/controllers/repository.py Thu Jul 21 12:46:27 2011 -0400
@@ -450,8 +450,15 @@
# Get the current repository tip.
tip = repository.tip
for selected_file in selected_files_to_delete:
- repo_file = os.path.abspath( selected_file )
- commands.remove( repo.ui, repo, repo_file, force=True )
+ try:
+ commands.remove( repo.ui, repo, repo_file, force=True )
+ except Exception, e:
+ # I never have a problem with commands.remove on a Mac, but in the test/production
+ # tool shed environment, it throws an exception whenever I delete all files from a
+ # repository. If this happens, we'll try the following.
+ relative_selected_file = selected_file.split( 'repo_%d' % repository.id )[1].lstrip( '/' )
+ repo.dirstate.remove( relative_selected_file )
+ repo.dirstate.write()
# Commit the change set.
if not commit_message:
commit_message = 'Deleted selected files'
--- a/lib/galaxy/webapps/community/controllers/upload.py Thu Jul 21 10:43:16 2011 -0400
+++ b/lib/galaxy/webapps/community/controllers/upload.py Thu Jul 21 12:46:27 2011 -0400
@@ -182,7 +182,15 @@
for repo_file in files_to_remove:
# Remove files in the repository (relative to the upload point)
# that are not in the uploaded archive.
- commands.remove( repo.ui, repo, repo_file, force=True )
+ try:
+ commands.remove( repo.ui, repo, repo_file, force=True )
+ except Exception, e:
+ # I never have a problem with commands.remove on a Mac, but in the test/production
+ # tool shed environment, it throws an exception whenever I delete all files from a
+ # repository. If this happens, we'll try the following.
+ relative_selected_file = selected_file.split( 'repo_%d' % repository.id )[1].lstrip( '/' )
+ repo.dirstate.remove( relative_selected_file )
+ repo.dirstate.write()
for filename_in_archive in filenames_in_archive:
commands.add( repo.ui, repo, filename_in_archive )
if filename_in_archive.endswith( '.loc.sample' ):
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: dannon: Fix bitbucket issue #612 regarding administrative job lock toggling inappropriately. Split logical forms into actual forms.
by Bitbucket 21 Jul '11
by Bitbucket 21 Jul '11
21 Jul '11
1 new changeset in galaxy-central:
http://bitbucket.org/galaxy/galaxy-central/changeset/ebb1014b237d/
changeset: ebb1014b237d
user: dannon
date: 2011-07-21 16:43:16
summary: Fix bitbucket issue #612 regarding administrative job lock toggling inappropriately. Split logical forms into actual forms.
affected #: 2 files (459 bytes)
--- a/lib/galaxy/web/base/controller.py Thu Jul 21 10:26:41 2011 -0400
+++ b/lib/galaxy/web/base/controller.py Thu Jul 21 10:43:16 2011 -0400
@@ -21,7 +21,7 @@
# RE that tests for valid slug.
VALID_SLUG_RE = re.compile( "^[a-z0-9\-]+$" )
-
+
class BaseController( object ):
"""
Base class for Galaxy web application controllers.
@@ -51,7 +51,7 @@
else:
item_class = None
return item_class
-
+
Root = BaseController
class SharableItemSecurity:
@@ -72,7 +72,7 @@
#
# TODO: need to move UsesHistory, etc. mixins to better location - perhaps lib/galaxy/model/XXX ?
-#
+#
class UsesHistoryDatasetAssociation:
""" Mixin for controllers that use HistoryDatasetAssociation objects. """
@@ -119,15 +119,15 @@
dataset_data = open( dataset.file_name ).read(max_peek_size)
truncated = False
return truncated, dataset_data
-
+
class UsesVisualization( SharableItemSecurity ):
""" Mixin for controllers that use Visualization objects. """
len_files = None
-
+
def _get_dbkeys( self, trans ):
""" Returns all valid dbkeys that a user can use in a visualization. """
-
+
# Read len files.
if not self.len_files:
len_files = glob.glob( os.path.join(trans.app.config.len_file_path, "*.len") )
@@ -137,10 +137,10 @@
user = trans.get_user()
if 'dbkeys' in user.preferences:
user_keys = from_json_string( user.preferences['dbkeys'] )
-
+
dbkeys = [ (v, k) for k, v in trans.db_builds if k in self.len_files or k in user_keys ]
return dbkeys
-
+
def get_visualization( self, trans, id, check_ownership=True, check_accessible=False ):
""" Get a Visualization from the database by id, verifying ownership. """
# Load workflow from database
@@ -152,7 +152,7 @@
error( "Visualization not found" )
else:
return self.security_check( trans.get_user(), visualization, check_ownership, check_accessible )
-
+
def get_visualization_config( self, trans, visualization ):
""" Returns a visualization's configuration. Only works for trackster visualizations right now. """
@@ -172,16 +172,16 @@
dataset = self.get_dataset( trans, dataset_id, check_ownership=False, check_accessible=True )
else:
dataset = trans.sa_session.query( trans.app.model.LibraryDatasetDatasetAssociation ).get( trans.security.decode_id(dataset_id) )
-
+
try:
prefs = t['prefs']
except KeyError:
prefs = {}
-
+
track_type, _ = dataset.datatype.get_track_type()
track_data_provider_class = get_data_provider( original_dataset=dataset )
track_data_provider = track_data_provider_class( original_dataset=dataset )
-
+
tracks.append( {
"track_type": track_type,
"name": t['name'],
@@ -192,15 +192,15 @@
"tool": get_tool_def( trans, dataset ),
"is_child": t.get('is_child', False)
} )
-
- config = { "title": visualization.title, "vis_id": trans.security.encode_id( visualization.id ),
+
+ config = { "title": visualization.title, "vis_id": trans.security.encode_id( visualization.id ),
"tracks": tracks, "bookmarks": bookmarks, "chrom": "", "dbkey": visualization.dbkey }
if 'viewport' in latest_revision.config:
config['viewport'] = latest_revision.config['viewport']
-
+
return config
-
+
class UsesStoredWorkflow( SharableItemSecurity ):
""" Mixin for controllers that use StoredWorkflow objects. """
def get_stored_workflow( self, trans, id, check_ownership=True, check_accessible=False ):
@@ -225,7 +225,7 @@
step.upgrade_messages = module.check_and_update_state()
# Any connected input needs to have value DummyDataset (these
# are not persisted so we need to do it every time)
- module.add_dummy_datasets( connections=step.input_connections )
+ module.add_dummy_datasets( connections=step.input_connections )
# Store state with the step
step.module = module
step.state = module.state
@@ -270,7 +270,7 @@
"""Mixin for controllers that use Galaxy form objects."""
def get_all_forms( self, trans, all_versions=False, filter=None, form_type='All' ):
"""
- Return all the latest forms from the form_definition_current table
+ Return all the latest forms from the form_definition_current table
if all_versions is set to True. Otherwise return all the versions
of all the forms from the form_definition table.
"""
@@ -684,7 +684,7 @@
trans.sa_session.flush()
info_association = sra.run
else:
- info_association = assoc.run
+ info_association = assoc.run
else:
info_association = None
if info_association:
@@ -912,7 +912,7 @@
else:
field_value = int( input_text_value )
elif field_type == CheckboxField.__name__:
- field_value = CheckboxField.is_checked( input_value )
+ field_value = CheckboxField.is_checked( input_value )
elif field_type == PasswordField.__name__:
field_value = kwd.get( field_name, '' )
else:
@@ -1043,7 +1043,7 @@
@web.require_login( "modify Galaxy items" )
def set_slug_async( self, trans, id, new_slug ):
""" Set item slug asynchronously. """
- pass
+ pass
@web.expose
@web.require_login( "share Galaxy items" )
def sharing( self, trans, id, **kwargs ):
@@ -1099,7 +1099,7 @@
item.slug = slug
return True
return False
-
+
"""
Deprecated: `BaseController` used to be available under the name `Root`
"""
@@ -1111,7 +1111,7 @@
user_list_grid = None
role_list_grid = None
group_list_grid = None
-
+
@web.expose
@web.require_admin
def index( self, trans, **kwd ):
@@ -1158,7 +1158,7 @@
toolbox=self.app.toolbox,
message=message,
status='done' )
-
+
# Galaxy Role Stuff
@web.expose
@web.require_admin
@@ -1342,7 +1342,7 @@
action='roles',
webapp=webapp,
message=util.sanitize_text( message ),
- status=status ) )
+ status=status ) )
in_users = []
out_users = []
in_groups = []
@@ -1934,7 +1934,7 @@
def purge_user( self, trans, **kwd ):
# This method should only be called for a User that has previously been deleted.
# We keep the User in the database ( marked as purged ), and stuff associated
- # with the user's private role in case we want the ability to unpurge the user
+ # with the user's private role in case we want the ability to unpurge the user
# some time in the future.
# Purging a deleted User deletes all of the following:
# - History where user_id = User.id
@@ -2158,7 +2158,7 @@
@web.expose
@web.require_admin
- def jobs( self, trans, stop = [], stop_msg = None, cutoff = 180, job_lock = None, **kwd ):
+ def jobs( self, trans, stop = [], stop_msg = None, cutoff = 180, job_lock = None, ajl_submit = None, **kwd ):
deleted = []
msg = None
status = None
@@ -2181,10 +2181,11 @@
msg += ' for deletion: '
msg += ', '.join( deleted )
status = 'done'
- if job_lock == 'lock':
- trans.app.job_manager.job_queue.job_lock = True
- elif job_lock == 'unlock':
- trans.app.job_manager.job_queue.job_lock = False
+ if ajl_submit:
+ if job_lock == 'on':
+ trans.app.job_manager.job_queue.job_lock = True
+ else:
+ trans.app.job_manager.job_queue.job_lock = False
cutoff_time = datetime.utcnow() - timedelta( seconds=int( cutoff ) )
jobs = trans.sa_session.query( trans.app.model.Job ) \
.filter( and_( trans.app.model.Job.table.c.update_time < cutoff_time,
@@ -2209,7 +2210,7 @@
job_lock = trans.app.job_manager.job_queue.job_lock )
## ---- Utility methods -------------------------------------------------------
-
+
def get_user( trans, id ):
"""Get a User from the database by id."""
# Load user from database
--- a/templates/admin/jobs.mako Thu Jul 21 10:26:41 2011 -0400
+++ b/templates/admin/jobs.mako Thu Jul 21 10:43:16 2011 -0400
@@ -21,11 +21,11 @@
report this error".
</p>
-<form name="jobs" action="${h.url_for()}" method="POST"><p/>
%if jobs:
+<form name="jobs" action="${h.url_for()}" method="POST"><table class="manage-table colored" border="0" cellspacing="0" cellpadding="0" width="100%"><tr class="header"><td> </td>
@@ -84,10 +84,12 @@
</div></div><p/>
+</form>
%else:
<div class="infomessage">There are no unfinished jobs to show with current cutoff time.</div><p/>
%endif
+<form name="jobs" action="${h.url_for()}" method="POST"><div class="toolForm"><div class="toolFormTitle">
Update Jobs
@@ -110,30 +112,33 @@
</div></div></div>
- <p/>
+</form>
+<form name="jobs" action="${h.url_for()}" method="POST">
+ <p/><div class="toolForm"><div class="toolFormTitle">
Administrative Job Lock
</div><div class="toolFormBody">
- %if job_lock==True:
<div class="form-row">
- <p>All job execution is currently locked. Click here to unlock.</p>
- <input type='hidden' name='job_lock' value='unlock'/>
+ <input type="hidden" name="ajl_submit" value="True"/>
+ %if job_lock==True:
+ <p>Job dispatching is currently <strong>locked</strong>.</p>
+ <label>
+ <input type='checkbox' name='job_lock' checked='checked' />
+ Prevent jobs from dispatching.
+ </label>
+ %else:
+ <p>Job dispatching is currently <strong>unlocked</strong>.</p>
+ <label>
+ <input type='checkbox' name='job_lock' />
+ Prevent jobs from dispatching.
+ </label>
+ %endif
</div><div class="form-row">
- <input type="submit" class="primary-button" name="submit" value="Unlock">
+ <input type="submit" class="primary-button" name="submit" value="Update"></div>
- %else:
- <div class="form-row">
- <p>To prevent new jobs from dispatching, you can lock the job queue here.</p>
- <input type='hidden' name='job_lock' value='lock'/>
- </div>
- <div class="form-row">
- <input type="submit" class="primary-button" name="submit" value="Lock">
- </div>
- %endif
</div></div>
-
</form>
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: jgoecks: Fix filtering of deleted objects in grid framework when using sqlite. Also make history grid's label for deleted filter clearer. Fixes #596
by Bitbucket 21 Jul '11
by Bitbucket 21 Jul '11
21 Jul '11
1 new changeset in galaxy-central:
http://bitbucket.org/galaxy/galaxy-central/changeset/c176363cb7b2/
changeset: c176363cb7b2
user: jgoecks
date: 2011-07-21 16:04:48
summary: Fix filtering of deleted objects in grid framework when using sqlite. Also make history grid's label for deleted filter clearer. Fixes #596
affected #: 2 files (15 bytes)
--- a/lib/galaxy/web/controllers/history.py Thu Jul 21 09:43:49 2011 -0400
+++ b/lib/galaxy/web/controllers/history.py Thu Jul 21 10:04:48 2011 -0400
@@ -56,7 +56,7 @@
grids.GridColumn( "Created", key="create_time", format=time_ago ),
grids.GridColumn( "Last Updated", key="update_time", format=time_ago ),
# Columns that are valid for filtering but are not visible.
- grids.DeletedColumn( "Deleted", key="deleted", visible=False, filterable="advanced" )
+ grids.DeletedColumn( "Status", key="deleted", visible=False, filterable="advanced" )
]
columns.append(
grids.MulticolFilterColumn(
--- a/lib/galaxy/web/framework/helpers/grids.py Thu Jul 21 09:43:49 2011 -0400
+++ b/lib/galaxy/web/framework/helpers/grids.py Thu Jul 21 10:04:48 2011 -0400
@@ -631,7 +631,7 @@
if column_filter == "All":
pass
elif column_filter in [ "True", "False" ]:
- query = query.filter( self.model_class.deleted == column_filter )
+ query = query.filter( self.model_class.deleted == ( column_filter == "True" ) )
return query
class StateColumn( GridColumn ):
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

21 Jul '11
1 new changeset in galaxy-central:
http://bitbucket.org/galaxy/galaxy-central/changeset/b68bbdc8dd14/
changeset: b68bbdc8dd14
user: greg
date: 2011-07-21 15:43:49
summary: Several tool shed enhancements and fixes:
1) Add the ability to add an entry for a tool to the tool_data_table_conf.xml file in real time. This allows metadata to be generated for tools that use this feature.
2) Add a method to configure settings to the mercurial ui, and configure it such that all message output from mercurial are not displayed (we're in quiet mode).
3) Make sure that mercurial commits performed within exception blocks account for the user performing the commit.
affected #: 6 files (7.5 KB)
--- a/lib/galaxy/util/__init__.py Wed Jul 20 09:01:37 2011 -0400
+++ b/lib/galaxy/util/__init__.py Thu Jul 21 09:43:49 2011 -0400
@@ -127,7 +127,8 @@
'@' : '__at__',
'\n' : '__cn__',
'\r' : '__cr__',
- '\t' : '__tc__'
+ '\t' : '__tc__',
+ '#' : '__pd__'
}
def restore_text(text):
--- a/lib/galaxy/webapps/community/controllers/common.py Wed Jul 20 09:01:37 2011 -0400
+++ b/lib/galaxy/webapps/community/controllers/common.py Thu Jul 21 09:43:49 2011 -0400
@@ -81,7 +81,7 @@
status = 'done'
repository = get_repository( trans, id )
repo_dir = repository.repo_path
- repo = hg.repository( ui.ui(), repo_dir )
+ repo = hg.repository( get_configured_ui(), repo_dir )
change_set = get_change_set( trans, repo, change_set_revision )
invalid_files = []
flush_needed = False
@@ -214,10 +214,6 @@
if invalid_files:
message = "Metadata cannot be defined for change set revision '%s'. Correct the following problems and reset metadata.<br/>" % str( change_set_revision )
for itc_tup in invalid_files:
- # Handle the special case where a tool depends on a missing xxx.loc file by telliing
- # the user to upload xxx.loc.sample to the repository so that it can be copied to
- # ~/tool-data/xxx.loc. In this case, itc_tup[1] will be a message looking something like:
- # [Errno 2] No such file or directory: '/Users/gvk/central/tool-data/blast2go.loc'
tool_file = itc_tup[0]
exception_msg = itc_tup[1]
if exception_msg.find( 'No such file or directory' ) >= 0:
@@ -226,10 +222,28 @@
missing_file = missing_file_items[-1].rstrip( '\'' )
correction_msg = "This file refers to a missing file <b>%s</b>. " % str( missing_file )
if exception_msg.find( '.loc' ) >= 0:
+ # Handle the special case where a tool depends on a missing xxx.loc file by telliing
+ # the user to upload xxx.loc.sample to the repository so that it can be copied to
+ # ~/tool-data/xxx.loc. In this case, exception_msg will look something like:
+ # [Errno 2] No such file or directory: '/Users/gvk/central/tool-data/blast2go.loc'
sample_loc_file = '%s.sample' % str( missing_file )
correction_msg += "Upload a file named <b>%s</b> to the repository to correct this error." % sample_loc_file
else:
correction_msg += "Upload a file named <b>%s</b> to the repository to correct this error." % missing_file
+ elif exception_msg.find( 'Data table named' ) >= 0:
+ # Handle the special case where the tool requires an entry in the tool_data_table.conf file.
+ # In this case, exception_msg will look something like:
+ # Data table named 'tmap_indexes' is required by tool but not configured
+ exception_items = exception_msg.split()
+ name_attr = exception_items[3].lstrip( '\'' ).rstrip( '\'' )
+ message += "<b>%s</b> - This tool requires an entry in the tool_data_table_conf.xml file. " % tool_file
+ message += "Complete and <b>Save</b> the form below to resolve this issue.<br/>"
+ return trans.response.send_redirect( web.url_for( controller='repository',
+ action='add_tool_data_table_entry',
+ name_attr=name_attr,
+ repository_id=id,
+ message=message,
+ status='error' ) )
else:
correction_msg = exception_msg
message += "<b>%s</b> - %s<br/>" % ( tool_file, correction_msg )
@@ -257,12 +271,21 @@
if not ( os.path.exists( os.path.join( tool_data_path, loc_file ) ) or os.path.exists( os.path.join( tool_data_path, sample_loc_file ) ) ):
shutil.copy( os.path.abspath( filename ), os.path.join( tool_data_path, sample_loc_file ) )
shutil.copy( os.path.abspath( filename ), os.path.join( tool_data_path, loc_file ) )
+def get_configured_ui():
+ # Configure any desired ui settings.
+ _ui = ui.ui()
+ # The following will suppress all messages. This is
+ # the same as adding the following setting to the repo
+ # hgrc file' [ui] section:
+ # quiet = True
+ _ui.setconfig( 'ui', 'quiet', True )
+ return _ui
def get_user( trans, id ):
"""Get a user from the database"""
return trans.sa_session.query( trans.model.User ).get( trans.security.decode_id( id ) )
def handle_email_alerts( trans, repository ):
repo_dir = repository.repo_path
- repo = hg.repository( ui.ui(), repo_dir )
+ repo = hg.repository( get_configured_ui(), repo_dir )
smtp_server = trans.app.config.smtp_server
if smtp_server and repository.email_alerts:
# Send email alert to users that want them.
@@ -299,17 +322,19 @@
util.send_mail( frm, to, subject, body, trans.app.config )
except Exception, e:
log.exception( "An error occurred sending a tool shed repository update alert by email." )
-def update_for_browsing( repository, current_working_dir, commit_message='' ):
- # Make a copy of a repository's files for browsing.
+def update_for_browsing( trans, repository, current_working_dir, commit_message='' ):
+ # Make a copy of a repository's files for browsing, remove from disk all files that
+ # are not tracked, and commit all added, modified or removed files that have not yet
+ # been committed.
repo_dir = repository.repo_path
- repo = hg.repository( ui.ui(), repo_dir )
+ repo = hg.repository( get_configured_ui(), repo_dir )
# The following will delete the disk copy of only the files in the repository.
#os.system( 'hg update -r null > /dev/null 2>&1' )
repo.ui.pushbuffer()
commands.status( repo.ui, repo, all=True )
status_and_file_names = repo.ui.popbuffer().strip().split( "\n" )
# status_and_file_names looks something like:
- # ['? MY_README_AGAIN', '? galaxy_tmap_tool/tmap-0.0.9.tar.gz', '? dna_filtering.py', 'C filtering.py', 'C filtering.xml']
+ # ['? README', '? tmap_tool/tmap-0.0.9.tar.gz', '? dna_filtering.py', 'C filtering.py', 'C filtering.xml']
# The codes used to show the status of files are:
# M = modified
# A = added
@@ -345,7 +370,7 @@
if not commit_message:
commit_message = 'Committed changes to: %s' % ', '.join( files_to_commit )
repo.dirstate.write()
- repo.commit( text=commit_message )
+ repo.commit( user=trans.user.username, text=commit_message )
os.chdir( repo_dir )
os.system( 'hg update > /dev/null 2>&1' )
os.chdir( current_working_dir )
--- a/lib/galaxy/webapps/community/controllers/repository.py Wed Jul 20 09:01:37 2011 -0400
+++ b/lib/galaxy/webapps/community/controllers/repository.py Thu Jul 21 09:43:49 2011 -0400
@@ -312,7 +312,7 @@
if not os.path.exists( repository_path ):
os.makedirs( repository_path )
# Create the local repository
- repo = hg.repository( ui.ui(), repository_path, create=True )
+ repo = hg.repository( get_configured_ui(), repository_path, create=True )
# Add an entry in the hgweb.config file for the local repository
# This enables calls to repository.repo_path
self.__add_hgweb_config_entry( trans, repository, repository_path )
@@ -406,7 +406,7 @@
# push_ssl = False
# Since we support both http and https, we set push_ssl to False to override
# the default (which is True) in the mercurial api.
- repo = hg.repository( ui.ui(), path=repository.repo_path )
+ repo = hg.repository( get_configured_ui(), path=repository.repo_path )
fp = repo.opener( 'hgrc', 'wb' )
fp.write( '[paths]\n' )
fp.write( 'default = .\n' )
@@ -423,9 +423,10 @@
status = params.get( 'status', 'done' )
commit_message = util.restore_text( params.get( 'commit_message', 'Deleted selected files' ) )
repository = get_repository( trans, id )
- repo = hg.repository( ui.ui(), repository.repo_path )
+ repo = hg.repository( get_configured_ui(), repository.repo_path )
current_working_dir = os.getcwd()
- update_for_browsing( repository, current_working_dir, commit_message=commit_message )
+ # Update repository files for browsing.
+ update_for_browsing( trans, repository, current_working_dir, commit_message=commit_message )
return trans.fill_template( '/webapps/community/repository/browse_repository.mako',
repo=repo,
repository=repository,
@@ -440,7 +441,7 @@
commit_message = util.restore_text( params.get( 'commit_message', 'Deleted selected files' ) )
repository = get_repository( trans, id )
repo_dir = repository.repo_path
- repo = hg.repository( ui.ui(), repo_dir )
+ repo = hg.repository( get_configured_ui(), repo_dir )
selected_files_to_delete = util.restore_text( params.get( 'selected_files_to_delete', '' ) )
if params.get( 'select_files_to_delete_button', False ):
if selected_files_to_delete:
@@ -450,7 +451,7 @@
tip = repository.tip
for selected_file in selected_files_to_delete:
repo_file = os.path.abspath( selected_file )
- commands.remove( repo.ui, repo, repo_file )
+ commands.remove( repo.ui, repo, repo_file, force=True )
# Commit the change set.
if not commit_message:
commit_message = 'Deleted selected files'
@@ -461,12 +462,12 @@
# tool shed environment, it occasionally throws a "TypeError: array item must be char"
# exception. If this happens, we'll try the following.
repo.dirstate.write()
- repo.commit( text=commit_message )
+ repo.commit( user=trans.user.username, text=commit_message )
handle_email_alerts( trans, repository )
# Update the repository files for browsing.
- update_for_browsing( repository, current_working_dir, commit_message=commit_message )
+ update_for_browsing( trans, repository, current_working_dir, commit_message=commit_message )
# Get the new repository tip.
- repo = hg.repository( ui.ui(), repo_dir )
+ repo = hg.repository( get_configured_ui(), repo_dir )
if tip != repository.tip:
message = "The selected files were deleted from the repository."
else:
@@ -495,7 +496,7 @@
message = util.restore_text( params.get( 'message', '' ) )
status = params.get( 'status', 'done' )
repository = get_repository( trans, id )
- repo = hg.repository( ui.ui(), repository.repo_path )
+ repo = hg.repository( get_configured_ui(), repository.repo_path )
avg_rating, num_ratings = self.get_ave_item_rating_data( trans.sa_session, repository, webapp_model=trans.model )
display_reviews = util.string_as_bool( params.get( 'display_reviews', False ) )
alerts = params.get( 'alerts', '' )
@@ -545,7 +546,7 @@
status = params.get( 'status', 'done' )
repository = get_repository( trans, id )
repo_dir = repository.repo_path
- repo = hg.repository( ui.ui(), repo_dir )
+ repo = hg.repository( get_configured_ui(), repo_dir )
repo_name = util.restore_text( params.get( 'repo_name', repository.name ) )
description = util.restore_text( params.get( 'description', repository.description ) )
long_description = util.restore_text( params.get( 'long_description', repository.long_description ) )
@@ -673,7 +674,7 @@
message = util.restore_text( params.get( 'message', '' ) )
status = params.get( 'status', 'done' )
repository = get_repository( trans, id )
- repo = hg.repository( ui.ui(), repository.repo_path )
+ repo = hg.repository( get_configured_ui(), repository.repo_path )
changesets = []
for changeset in repo.changelog:
ctx = repo.changectx( changeset )
@@ -701,7 +702,7 @@
message = util.restore_text( params.get( 'message', '' ) )
status = params.get( 'status', 'done' )
repository = get_repository( trans, id )
- repo = hg.repository( ui.ui(), repository.repo_path )
+ repo = hg.repository( get_configured_ui(), repository.repo_path )
ctx = get_change_set( trans, repo, ctx_str )
if ctx is None:
message = "Repository does not include changeset revision '%s'." % str( ctx_str )
@@ -745,7 +746,7 @@
message='Select a repository to rate',
status='error' ) )
repository = get_repository( trans, id )
- repo = hg.repository( ui.ui(), repository.repo_path )
+ repo = hg.repository( get_configured_ui(), repository.repo_path )
if repository.user == trans.user:
return trans.response.send_redirect( web.url_for( controller='repository',
action='browse_repositories',
@@ -815,6 +816,98 @@
message=message,
status=status ) )
@web.expose
+ def add_tool_data_table_entry( self, trans, name_attr, repository_id, **kwd ):
+ params = util.Params( kwd )
+ message = util.restore_text( params.get( 'message', '' ) )
+ status = params.get( 'status', 'done' )
+ comment_char = util.restore_text( params.get( 'comment_char', '#' ) )
+ loc_filename = util.restore_text( params.get( 'loc_filename', '' ) )
+ repository = get_repository( trans, repository_id )
+ repo = hg.repository( get_configured_ui(), repository.repo_path )
+ column_fields = self.__get_column_fields( **kwd )
+ if params.get( 'add_field_button', False ):
+ # Add a field
+ field_index = len( column_fields ) + 1
+ field_tup = ( '%i_field_name' % field_index, '' )
+ column_fields.append( field_tup )
+ elif params.get( 'remove_button', False ):
+ # Delete a field - find the index of the field to be removed from the remove button label
+ index = int( kwd[ 'remove_button' ].split( ' ' )[2] ) - 1
+ tup_to_remove = column_fields[ index ]
+ column_fields.remove( tup_to_remove )
+ # Re-number field tups
+ new_column_fields = []
+ for field_index, old_field_tup in enumerate( column_fields ):
+ name = '%i_field_name' % ( field_index + 1 )
+ value = old_field_tup[1]
+ new_column_fields.append( ( name, value ) )
+ column_fields = new_column_fields
+ elif params.get( 'add_tool_data_table_entry_button', False ):
+ # Add an entry to the end of the tool_data_table_conf.xml file
+ tdt_config = "%s/tool_data_table_conf.xml" % trans.app.config.root
+ if os.path.exists( tdt_config ):
+ # Make a backup of the file since we're going to be changing it.
+ today = date.today()
+ backup_date = today.strftime( "%Y_%m_%d" )
+ tdt_config_copy = '%s/tool_data_table_conf.xml_%s_backup' % ( trans.app.config.root, backup_date )
+ shutil.copy( os.path.abspath( tdt_config ), os.path.abspath( tdt_config_copy ) )
+ # Generate the string of column names
+ column_names = ', '.join( [ column_tup[1] for column_tup in column_fields ] )
+ # Write each line of the tool_data_table_conf.xml file, except the last line to a temp file.
+ fh = tempfile.NamedTemporaryFile( 'wb' )
+ tmp_filename = fh.name
+ fh.close()
+ new_tdt_config = open( tmp_filename, 'wb' )
+ for i, line in enumerate( open( tdt_config, 'rb' ) ):
+ if line.startswith( '</tables>' ):
+ break
+ new_tdt_config.write( line )
+ new_tdt_config.write( ' <!-- Location of %s files -->\n' % name_attr )
+ new_tdt_config.write( ' <table name="%s" comment_char="%s">\n' % ( name_attr, comment_char ) )
+ new_tdt_config.write( ' <columns>%s</columns>\n' % column_names )
+ new_tdt_config.write( ' <file path="tool-data/%s" />\n' % loc_filename )
+ new_tdt_config.write( ' </table>\n' )
+ # Now write the last line of the file
+ new_tdt_config.write( '</tables>\n' )
+ new_tdt_config.close()
+ shutil.move( tmp_filename, os.path.abspath( tdt_config ) )
+ # Reload the tool_data_table_conf entries
+ trans.app.tool_data_tables = galaxy.tools.data.ToolDataTableManager( trans.app.config.tool_data_table_config_path )
+ message = "The new entry has been added to the tool_data_table_conf.xml file, so click the <b>Reset metadata</b> button below."
+ # TODO: what if ~/tool-data/<loc_filename> doesn't exist? We need to figure out how to
+ # force the user to upload it's sample to the repository in order to generate metadata.
+ return trans.response.send_redirect( web.url_for( controller='repository',
+ action='manage_repository',
+ id=repository_id,
+ message=message,
+ status=status ) )
+ return trans.fill_template( '/webapps/community/repository/add_tool_data_table_entry.mako',
+ name_attr=name_attr,
+ repository=repository,
+ comment_char=comment_char,
+ loc_filename=loc_filename,
+ column_fields=column_fields,
+ message=message,
+ status=status )
+ def __get_column_fields( self, **kwd ):
+ '''
+ Return a dictionary of the user-entered form fields representing columns
+ in the location file.
+ '''
+ params = util.Params( kwd )
+ column_fields = []
+ index = 0
+ while True:
+ name = '%i_field_name' % ( index + 1 )
+ if kwd.has_key( name ):
+ value = util.restore_text( params.get( name, '' ) )
+ field_tup = ( name, value )
+ index += 1
+ column_fields.append( field_tup )
+ else:
+ break
+ return column_fields
+ @web.expose
def display_tool( self, trans, repository_id, tool_config, **kwd ):
params = util.Params( kwd )
message = util.restore_text( params.get( 'message', '' ) )
--- a/lib/galaxy/webapps/community/controllers/upload.py Wed Jul 20 09:01:37 2011 -0400
+++ b/lib/galaxy/webapps/community/controllers/upload.py Thu Jul 21 09:43:49 2011 -0400
@@ -27,7 +27,7 @@
repository_id = params.get( 'repository_id', '' )
repository = get_repository( trans, repository_id )
repo_dir = repository.repo_path
- repo = hg.repository( ui.ui(), repo_dir )
+ repo = hg.repository( get_configured_ui(), repo_dir )
uncompress_file = util.string_as_bool( params.get( 'uncompress_file', 'true' ) )
remove_repo_files_not_in_tar = util.string_as_bool( params.get( 'remove_repo_files_not_in_tar', 'true' ) )
uploaded_file = None
@@ -87,7 +87,6 @@
# Move the uploaded file to the load_point within the repository hierarchy.
shutil.move( uploaded_file_name, full_path )
commands.add( repo.ui, repo, full_path )
- """
try:
commands.commit( repo.ui, repo, full_path, user=trans.user.username, message=commit_message )
except Exception, e:
@@ -95,17 +94,15 @@
# tool shed environment, it occasionally throws a "TypeError: array item must be char"
# exception. If this happens, we'll try the following.
repo.dirstate.write()
- repo.commit( text=commit_message )
- """
+ repo.commit( user=trans.user.username, text=commit_message )
if full_path.endswith( '.loc.sample' ):
# Handle the special case where a xxx.loc.sample file is
# being uploaded by copying it to ~/tool-data/xxx.loc.
copy_sample_loc_file( trans, full_path )
handle_email_alerts( trans, repository )
if ok:
- # Update the repository files for browsing, a by-product of doing this
- # is eliminating unwanted files from the repository directory.
- update_for_browsing( repository, current_working_dir, commit_message=commit_message )
+ # Update the repository files for browsing.
+ update_for_browsing( trans, repository, current_working_dir, commit_message=commit_message )
# Get the new repository tip.
if tip != repository.tip:
if ( isgzip or isbz2 ) and uncompress_file:
@@ -148,7 +145,7 @@
def upload_tar( self, trans, repository, tar, uploaded_file, upload_point, remove_repo_files_not_in_tar, commit_message ):
# Upload a tar archive of files.
repo_dir = repository.repo_path
- repo = hg.repository( ui.ui(), repo_dir )
+ repo = hg.repository( get_configured_ui(), repo_dir )
files_to_remove = []
ok, message = self.__check_archive( tar )
if not ok:
@@ -185,7 +182,7 @@
for repo_file in files_to_remove:
# Remove files in the repository (relative to the upload point)
# that are not in the uploaded archive.
- commands.remove( repo.ui, repo, repo_file )
+ commands.remove( repo.ui, repo, repo_file, force=True )
for filename_in_archive in filenames_in_archive:
commands.add( repo.ui, repo, filename_in_archive )
if filename_in_archive.endswith( '.loc.sample' ):
@@ -199,7 +196,7 @@
# tool shed environment, it occasionally throws a "TypeError: array item must be char"
# exception. If this happens, we'll try the following.
repo.dirstate.write()
- repo.commit( text=commit_message )
+ repo.commit( user=trans.user.username, text=commit_message )
handle_email_alerts( trans, repository )
return True, '', files_to_remove
def uncompress( self, repository, uploaded_file_name, uploaded_file_filename, isgzip, isbz2 ):
--- a/templates/webapps/community/repository/create_repository.mako Wed Jul 20 09:01:37 2011 -0400
+++ b/templates/webapps/community/repository/create_repository.mako Thu Jul 21 09:43:49 2011 -0400
@@ -17,15 +17,15 @@
<div class="toolForm"><div class="toolFormTitle">Create Repository</div><div class="toolFormBody">
- <form name="create_repository_form" id="create_repository_form" action="${h.url_for( action='create_repository' )}" method="post" >
+ <form name="create_repository_form" id="create_repository_form" action="${h.url_for( controller='repository', action='create_repository' )}" method="post" ><div class="form-row"><label>Name:</label>
- <input name="name" type="textfield" value="${name}" size=40"/>
+ <input name="name" type="textfield" value="${name}" size="40"/><div style="clear: both"></div></div><div class="form-row"><label>Synopsis:</label>
- <input name="description" type="textfield" value="${description}" size=80"/>
+ <input name="description" type="textfield" value="${description}" size="80"/><div style="clear: both"></div></div><div class="form-row">
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
2 new changesets in galaxy-central:
http://bitbucket.org/galaxy/galaxy-central/changeset/ce8cc8feb6e0/
changeset: ce8cc8feb6e0
user: dannon
date: 2011-07-19 19:40:24
summary: Code cleanup.
affected #: 1 file (886 bytes)
--- a/lib/galaxy/web/controllers/history.py Tue Jul 19 11:27:16 2011 -0400
+++ b/lib/galaxy/web/controllers/history.py Tue Jul 19 13:40:24 2011 -0400
@@ -58,13 +58,12 @@
# Columns that are valid for filtering but are not visible.
grids.DeletedColumn( "Deleted", key="deleted", visible=False, filterable="advanced" )
]
- columns.append(
- grids.MulticolFilterColumn(
- "search history names and tags",
- cols_to_filter=[ columns[0], columns[2] ],
+ columns.append(
+ grids.MulticolFilterColumn(
+ "search history names and tags",
+ cols_to_filter=[ columns[0], columns[2] ],
key="free-text-search", visible=False, filterable="standard" )
)
-
operations = [
grids.GridOperation( "Switch", allow_multiple=False, condition=( lambda item: not item.deleted ), async_compatible=False ),
grids.GridOperation( "Share or Publish", allow_multiple=False, condition=( lambda item: not item.deleted ), async_compatible=False ),
@@ -125,11 +124,11 @@
return trans.sa_session.query( self.model_class ).join( 'users_shared_with' )
def apply_query_filter( self, trans, query, **kwargs ):
return query.filter( model.HistoryUserShareAssociation.user == trans.user )
-
+
class HistoryAllPublishedGrid( grids.Grid ):
class NameURLColumn( grids.PublicURLColumn, NameColumn ):
pass
-
+
title = "Published Histories"
model_class = model.History
default_sort_key = "update_time"
@@ -138,15 +137,15 @@
columns = [
NameURLColumn( "Name", key="name", filterable="advanced" ),
grids.OwnerAnnotationColumn( "Annotation", key="annotation", model_annotation_association_class=model.HistoryAnnotationAssociation, filterable="advanced" ),
- grids.OwnerColumn( "Owner", key="username", model_class=model.User, filterable="advanced" ),
+ grids.OwnerColumn( "Owner", key="username", model_class=model.User, filterable="advanced" ),
grids.CommunityRatingColumn( "Community Rating", key="rating" ),
grids.CommunityTagsColumn( "Community Tags", key="tags", model_tag_association_class=model.HistoryTagAssociation, filterable="advanced", grid_name="PublicHistoryListGrid" ),
grids.ReverseSortColumn( "Last Updated", key="update_time", format=time_ago )
]
- columns.append(
- grids.MulticolFilterColumn(
- "Search name, annotation, owner, and tags",
- cols_to_filter=[ columns[0], columns[1], columns[2], columns[4] ],
+ columns.append(
+ grids.MulticolFilterColumn(
+ "Search name, annotation, owner, and tags",
+ cols_to_filter=[ columns[0], columns[1], columns[2], columns[4] ],
key="free-text-search", visible=False, filterable="standard" )
)
operations = []
@@ -156,7 +155,7 @@
def apply_query_filter( self, trans, query, **kwargs ):
# A public history is published, has a slug, and is not deleted.
return query.filter( self.model_class.published == True ).filter( self.model_class.slug != None ).filter( self.model_class.deleted == False )
-
+
class HistoryController( BaseController, Sharable, UsesAnnotations, UsesItemRatings, UsesHistory ):
@web.expose
def index( self, trans ):
@@ -166,11 +165,11 @@
"""XML history list for functional tests"""
trans.response.set_content_type( 'text/xml' )
return trans.fill_template( "/history/list_as_xml.mako" )
-
+
stored_list_grid = HistoryListGrid()
shared_list_grid = SharedHistoryListGrid()
published_list_grid = HistoryAllPublishedGrid()
-
+
@web.expose
def list_published( self, trans, **kwargs ):
grid = self.published_list_grid( trans, **kwargs )
@@ -179,7 +178,7 @@
else:
# Render grid wrapped in panels
return trans.fill_template( "history/list_published.mako", grid=grid )
-
+
@web.expose
@web.require_login( "work with multiple histories" )
def list( self, trans, **kwargs ):
@@ -200,7 +199,7 @@
refresh_history = False
# Load the histories and ensure they all belong to the current user
histories = []
- for history_id in history_ids:
+ for history_id in history_ids:
history = self.get_history( trans, history_id )
if history:
# Ensure history is owned by current user
@@ -209,18 +208,18 @@
histories.append( history )
else:
log.warn( "Invalid history id '%r' passed to list", history_id )
- if histories:
+ if histories:
if operation == "switch":
status, message = self._list_switch( trans, histories )
- # Take action to update UI to reflect history switch. If
+ # Take action to update UI to reflect history switch. If
# grid is using panels, it is standalone and hence a redirect
# to root is needed; if grid is not using panels, it is nested
- # in the main Galaxy UI and refreshing the history frame
+ # in the main Galaxy UI and refreshing the history frame
# is sufficient.
use_panels = kwargs.get('use_panels', False) == 'True'
if use_panels:
return trans.response.send_redirect( url_for( "/" ) )
- else:
+ else:
trans.template_context['refresh_frames'] = ['history']
elif operation in ( "delete", "delete and remove datasets from disk" ):
if operation == "delete and remove datasets from disk":
@@ -338,7 +337,7 @@
trans.set_history( new_history )
# No message
return None, None
-
+
@web.expose
@web.require_login( "work with shared histories" )
def list_shared( self, trans, **kwargs ):
@@ -373,7 +372,7 @@
status = 'done'
# Render the list view
return self.shared_list_grid( trans, status=status, message=message, **kwargs )
-
+
@web.expose
def display_structured( self, trans, id=None ):
"""
@@ -444,7 +443,7 @@
items.sort( key=( lambda x: x[0].create_time ), reverse=True )
#
return trans.fill_template( "history/display_structured.mako", items=items )
-
+
@web.expose
def delete_current( self, trans ):
"""Delete just the active history -- this does not require a logged in user."""
@@ -456,25 +455,22 @@
trans.sa_session.add( history )
trans.sa_session.flush()
trans.log_event( "History id %d marked as deleted" % history.id )
- # Regardless of whether it was previously deleted, we make a new history active
+ # Regardless of whether it was previously deleted, we make a new history active
trans.new_history()
- return trans.show_ok_message( "History deleted, a new history is active", refresh_frames=['history'] )
-
+ return trans.show_ok_message( "History deleted, a new history is active", refresh_frames=['history'] )
+
@web.expose
@web.require_login( "rate items" )
@web.json
def rate_async( self, trans, id, rating ):
""" Rate a history asynchronously and return updated community data. """
-
history = self.get_history( trans, id, check_ownership=False, check_accessible=True )
if not history:
return trans.show_error_message( "The specified history does not exist." )
-
# Rate history.
history_rating = self.rate_item( trans.sa_session, trans.get_user(), history, rating )
-
return self.get_ave_item_rating_data( trans.sa_session, history )
-
+
@web.expose
def rename_async( self, trans, id=None, new_name=None ):
history = self.get_history( trans, id )
@@ -490,7 +486,7 @@
trans.sa_session.add( history )
trans.sa_session.flush()
return history.name
-
+
@web.expose
@web.require_login( "use Galaxy histories" )
def annotate_async( self, trans, id, new_annotation=None, **kwargs ):
@@ -503,12 +499,11 @@
return new_annotation
@web.expose
- # TODO: Remove require_login when users are warned that, if they are not
+ # TODO: Remove require_login when users are warned that, if they are not
# logged in, this will remove their current history.
@web.require_login( "use Galaxy histories" )
def import_archive( self, trans, **kwargs ):
""" Import a history from a file archive. """
-
# Set archive source and type.
archive_file = kwargs.get( 'archive_file', None )
archive_url = kwargs.get( 'archive_url', None )
@@ -519,37 +514,34 @@
elif archive_url:
archive_source = archive_url
archive_type = 'url'
-
# If no source to create archive from, show form to upload archive or specify URL.
if not archive_source:
- return trans.show_form(
+ return trans.show_form(
web.FormBuilder( web.url_for(), "Import a History from an Archive", submit_text="Submit" ) \
.add_input( "text", "Archived History URL", "archive_url", value="", error=None )
# TODO: add support for importing via a file.
- #.add_input( "file", "Archived History File", "archive_file", value=None, error=None )
+ #.add_input( "file", "Archived History File", "archive_file", value=None, error=None )
)
-
# Run job to do import.
history_imp_tool = trans.app.toolbox.tools_by_id[ '__IMPORT_HISTORY__' ]
incoming = { '__ARCHIVE_SOURCE__' : archive_source, '__ARCHIVE_TYPE__' : archive_type }
history_imp_tool.execute( trans, incoming=incoming )
return trans.show_message( "Importing history from '%s'. \
This history will be visible when the import is complete" % archive_source )
-
- @web.expose
+
+ @web.expose
def export_archive( self, trans, id=None, gzip=True, include_hidden=False, include_deleted=False ):
""" Export a history to an archive. """
-
- #
+ #
# Convert options to booleans.
#
if isinstance( gzip, basestring ):
- gzip = ( gzip in [ 'True', 'true', 'T', 't' ] )
+ gzip = ( gzip in [ 'True', 'true', 'T', 't' ] )
if isinstance( include_hidden, basestring ):
include_hidden = ( include_hidden in [ 'True', 'true', 'T', 't' ] )
if isinstance( include_deleted, basestring ):
- include_deleted = ( include_deleted in [ 'True', 'true', 'T', 't' ] )
-
+ include_deleted = ( include_deleted in [ 'True', 'true', 'T', 't' ] )
+
#
# Get history to export.
#
@@ -559,10 +551,10 @@
# Use current history.
history = trans.history
id = trans.security.encode_id( history.id )
-
+
if not history:
return trans.show_error_message( "This history does not exist or you cannot export this history." )
-
+
#
# If history has already been exported and it has not changed since export, stream it.
#
@@ -585,40 +577,38 @@
elif jeha.job.state in [ model.Job.states.RUNNING, model.Job.states.QUEUED, model.Job.states.WAITING ]:
return trans.show_message( "Still exporting history %(n)s; please check back soon. Link: <a href='%(s)s'>%(s)s</a>" \
% ( { 'n' : history.name, 's' : url_for( action="export_archive", id=id, qualified=True ) } ) )
-
+
# Run job to do export.
history_exp_tool = trans.app.toolbox.tools_by_id[ '__EXPORT_HISTORY__' ]
- params = {
- 'history_to_export' : history,
- 'compress' : gzip,
- 'include_hidden' : include_hidden,
+ params = {
+ 'history_to_export' : history,
+ 'compress' : gzip,
+ 'include_hidden' : include_hidden,
'include_deleted' : include_deleted }
history_exp_tool.execute( trans, incoming = params, set_output_hid = True )
return trans.show_message( "Exporting History '%(n)s'. Use this link to download \
the archive or import it to another Galaxy server: \
<a href='%(u)s'>%(u)s</a>" \
% ( { 'n' : history.name, 'u' : url_for( action="export_archive", id=id, qualified=True ) } ) )
-
+
@web.expose
@web.json
@web.require_login( "get history name and link" )
def get_name_and_link_async( self, trans, id=None ):
""" Returns history's name and link. """
history = self.get_history( trans, id, False )
-
if self.create_item_slug( trans.sa_session, history ):
trans.sa_session.flush()
- return_dict = {
- "name" : history.name,
+ return_dict = {
+ "name" : history.name,
"link" : url_for( action="display_by_username_and_slug", username=history.user.username, slug=history.slug ) }
return return_dict
-
+
@web.expose
@web.require_login( "set history's accessible flag" )
def set_accessible_async( self, trans, id=None, accessible=False ):
""" Set history's importable attribute and slug. """
history = self.get_history( trans, id, True )
-
# Only set if importable value would change; this prevents a change in the update_time unless attribute really changed.
importable = accessible in ['True', 'true', 't', 'T'];
if history and history.importable != importable:
@@ -627,7 +617,6 @@
else:
history.importable = importable
trans.sa_session.flush()
-
return
@web.expose
@@ -638,7 +627,7 @@
history.slug = new_slug
trans.sa_session.flush()
return history.slug
-
+
@web.expose
def get_item_content_async( self, trans, id ):
""" Returns item content in HTML format. """
@@ -646,7 +635,7 @@
history = self.get_history( trans, id, False, True )
if history is None:
raise web.httpexceptions.HTTPNotFound()
-
+
# Get datasets.
datasets = self.get_history_datasets( trans, history )
# Get annotations.
@@ -654,7 +643,7 @@
for dataset in datasets:
dataset.annotation = self.get_item_annotation_str( trans.sa_session, history.user, dataset )
return trans.stream_template_mako( "/history/item_content.mako", item = history, item_data = datasets )
-
+
@web.expose
def name_autocomplete_data( self, trans, q=None, limit=None, timestamp=None ):
"""Return autocomplete data for history names"""
@@ -666,7 +655,7 @@
for history in trans.sa_session.query( model.History ).filter_by( user=user ).filter( func.lower( model.History.name ) .like(q.lower() + "%") ):
ac_data = ac_data + history.name + "\n"
return ac_data
-
+
@web.expose
def imp( self, trans, id=None, confirm=False, **kwd ):
"""Import another user's history via a shared URL"""
@@ -682,7 +671,7 @@
referer_message = "<a href='%s'>return to the previous page</a>" % referer
else:
referer_message = "<a href='%s'>go to Galaxy's start page</a>" % url_for( '/' )
-
+
# Do import.
if not id:
return trans.show_error_message( "You must specify a history you want to import.<br>You can %s." % referer_message, use_panels=True )
@@ -712,7 +701,7 @@
# Set imported history to be user's current history.
trans.set_history( new_history )
return trans.show_ok_message(
- message="""History "%s" has been imported. <br>You can <a href="%s">start using this history</a> or %s."""
+ message="""History "%s" has been imported. <br>You can <a href="%s">start using this history</a> or %s."""
% ( new_history.name, web.url_for( '/' ), referer_message ), use_panels=True )
elif not user_history or not user_history.datasets or confirm:
new_history = import_history.copy()
@@ -730,13 +719,13 @@
trans.sa_session.flush()
trans.set_history( new_history )
return trans.show_ok_message(
- message="""History "%s" has been imported. <br>You can <a href="%s">start using this history</a> or %s."""
+ message="""History "%s" has been imported. <br>You can <a href="%s">start using this history</a> or %s."""
% ( new_history.name, web.url_for( '/' ), referer_message ), use_panels=True )
return trans.show_warn_message( """
Warning! If you import this history, you will lose your current
history. <br>You can <a href="%s">continue and import this history</a> or %s.
""" % ( web.url_for( id=id, confirm=True, referer=trans.request.referer ), referer_message ), use_panels=True )
-
+
@web.expose
def view( self, trans, id=None, show_deleted=False ):
"""View a history. If a history is importable, then it is viewable by any user."""
@@ -757,11 +746,11 @@
history = history_to_view,
datasets = datasets,
show_deleted = show_deleted )
-
+
@web.expose
def display_by_username_and_slug( self, trans, username, slug ):
- """ Display history based on a username and slug. """
-
+ """ Display history based on a username and slug. """
+
# Get history.
session = trans.sa_session
user = session.query( model.User ).filter_by( username=username ).first()
@@ -770,14 +759,14 @@
raise web.httpexceptions.HTTPNotFound()
# Security check raises error if user cannot access history.
self.security_check( trans.get_user(), history, False, True)
-
+
# Get datasets.
datasets = self.get_history_datasets( trans, history )
# Get annotations.
history.annotation = self.get_item_annotation_str( trans.sa_session, history.user, history )
for dataset in datasets:
dataset.annotation = self.get_item_annotation_str( trans.sa_session, history.user, dataset )
-
+
# Get rating data.
user_item_rating = 0
if trans.get_user():
@@ -787,9 +776,9 @@
else:
user_item_rating = 0
ave_item_rating, num_ratings = self.get_ave_item_rating_data( trans.sa_session, history )
- return trans.stream_template_mako( "history/display.mako", item = history, item_data = datasets,
+ return trans.stream_template_mako( "history/display.mako", item = history, item_data = datasets,
user_item_rating = user_item_rating, ave_item_rating=ave_item_rating, num_ratings=num_ratings )
-
+
@web.expose
@web.require_login( "share Galaxy histories" )
def sharing( self, trans, id=None, histories=[], **kwargs ):
@@ -804,7 +793,7 @@
histories = [ self.get_history( trans, history_id ) for history_id in ids ]
elif not histories:
histories = [ trans.history ]
-
+
# Do operation on histories.
for history in histories:
if 'make_accessible_via_link' in kwargs:
@@ -837,17 +826,17 @@
message = "History '%s' does not seem to be shared with user '%s'" % ( history.name, user.email )
return trans.fill_template( '/sharing_base.mako', item=history,
message=message, status='error' )
-
-
+
+
# Legacy issue: histories made accessible before recent updates may not have a slug. Create slug for any histories that need them.
for history in histories:
if history.importable and not history.slug:
self._make_item_accessible( trans.sa_session, history )
-
+
session.flush()
-
+
return trans.fill_template( "/sharing_base.mako", item=history )
-
+
@web.expose
@web.require_login( "share histories with other users" )
def share( self, trans, id=None, email="", **kwd ):
@@ -890,11 +879,11 @@
send_to_err = "The histories you are sharing do not contain any datasets that can be accessed by the users with which you are sharing."
return trans.fill_template( "/history/share.mako", histories=histories, email=email, send_to_err=send_to_err )
if can_change or cannot_change:
- return trans.fill_template( "/history/share.mako",
- histories=histories,
- email=email,
- send_to_err=send_to_err,
- can_change=can_change,
+ return trans.fill_template( "/history/share.mako",
+ histories=histories,
+ email=email,
+ send_to_err=send_to_err,
+ can_change=can_change,
cannot_change=cannot_change,
no_change_needed=unique_no_change_needed )
if no_change_needed:
@@ -903,11 +892,11 @@
# User seems to be sharing an empty history
send_to_err = "You cannot share an empty history. "
return trans.fill_template( "/history/share.mako", histories=histories, email=email, send_to_err=send_to_err )
-
+
@web.expose
@web.require_login( "share restricted histories with other users" )
def share_restricted( self, trans, id=None, email="", **kwd ):
- if 'action' in kwd:
+ if 'action' in kwd:
action = kwd[ 'action' ]
else:
err_msg = "Select an action. "
@@ -938,10 +927,10 @@
# The action here is either 'public' or 'private', so we'll continue to populate the
# histories_for_sharing dictionary from the can_change dictionary.
for send_to_user, history_dict in can_change.items():
- for history in history_dict:
+ for history in history_dict:
# Make sure the current history has not already been shared with the current send_to_user
if trans.sa_session.query( trans.app.model.HistoryUserShareAssociation ) \
- .filter( and_( trans.app.model.HistoryUserShareAssociation.table.c.user_id == send_to_user.id,
+ .filter( and_( trans.app.model.HistoryUserShareAssociation.table.c.user_id == send_to_user.id,
trans.app.model.HistoryUserShareAssociation.table.c.history_id == history.id ) ) \
.count() > 0:
send_to_err += "History (%s) already shared with user (%s)" % ( history.name, send_to_user.email )
@@ -954,7 +943,7 @@
# The user with which we are sharing the history does not have access permission on the current dataset
if trans.app.security_agent.can_manage_dataset( user_roles, hda.dataset ) and not hda.dataset.library_associations:
# The current user has authority to change permissions on the current dataset because
- # they have permission to manage permissions on the dataset and the dataset is not associated
+ # they have permission to manage permissions on the dataset and the dataset is not associated
# with a library.
if action == "private":
trans.app.security_agent.privately_share_dataset( hda.dataset, users=[ user, send_to_user ] )
@@ -986,7 +975,7 @@
send_to_user = trans.sa_session.query( trans.app.model.User ) \
.filter( and_( trans.app.model.User.table.c.email==email_address,
trans.app.model.User.table.c.deleted==False ) ) \
- .first()
+ .first()
if send_to_user:
send_to_users.append( send_to_user )
else:
@@ -1004,7 +993,7 @@
for history in history_dict:
# Make sure the current history has not already been shared with the current send_to_user
if trans.sa_session.query( trans.app.model.HistoryUserShareAssociation ) \
- .filter( and_( trans.app.model.HistoryUserShareAssociation.table.c.user_id == send_to_user.id,
+ .filter( and_( trans.app.model.HistoryUserShareAssociation.table.c.user_id == send_to_user.id,
trans.app.model.HistoryUserShareAssociation.table.c.history_id == history.id ) ) \
.count() > 0:
send_to_err += "History (%s) already shared with user (%s)" % ( history.name, send_to_user.email )
@@ -1019,7 +1008,7 @@
# The user may be attempting to share histories whose datasets cannot all be accessed by other users.
# If this is the case, the user sharing the histories can:
# 1) action=='public': choose to make the datasets public if he is permitted to do so
- # 2) action=='private': automatically create a new "sharing role" allowing protected
+ # 2) action=='private': automatically create a new "sharing role" allowing protected
# datasets to be accessed only by the desired users
# This method will populate the can_change, cannot_change and no_change_needed dictionaries, which
# are used for either displaying to the user, letting them make 1 of the choices above, or sharing
@@ -1036,7 +1025,7 @@
for send_to_user in send_to_users:
# Make sure the current history has not already been shared with the current send_to_user
if trans.sa_session.query( trans.app.model.HistoryUserShareAssociation ) \
- .filter( and_( trans.app.model.HistoryUserShareAssociation.table.c.user_id == send_to_user.id,
+ .filter( and_( trans.app.model.HistoryUserShareAssociation.table.c.user_id == send_to_user.id,
trans.app.model.HistoryUserShareAssociation.table.c.history_id == history.id ) ) \
.count() > 0:
send_to_err += "History (%s) already shared with user (%s)" % ( history.name, send_to_user.email )
@@ -1125,7 +1114,7 @@
if send_to_err:
msg += send_to_err
return self.sharing( trans, histories=shared_histories, msg=msg )
-
+
@web.expose
@web.require_login( "rename histories" )
def rename( self, trans, id=None, name=None, **kwd ):
@@ -1164,7 +1153,7 @@
else:
change_msg = change_msg + "<p>History: "+cur_names[i]+" does not appear to belong to you.</p>"
return trans.show_message( "<p>%s" % change_msg, refresh_frames=['history'] )
-
+
@web.expose
@web.require_login( "clone shared Galaxy history" )
def clone( self, trans, id=None, **kwd ):
@@ -1207,13 +1196,11 @@
else:
msg = '%d cloned histories are now included in your previously stored histories.' % len( histories )
return trans.show_ok_message( msg )
-
+
@web.expose
@web.require_login( "switch to a history" )
def switch_to_history( self, trans, hist_id=None ):
decoded_id = trans.security.decode_id(hist_id)
hist = trans.sa_session.query( trans.app.model.History ).get( decoded_id )
-
trans.set_history( hist )
return trans.response.send_redirect( url_for( "/" ) )
-
http://bitbucket.org/galaxy/galaxy-central/changeset/c875100ea5ed/
changeset: c875100ea5ed
user: dannon
date: 2011-07-20 15:01:37
summary: Merge
affected #: 30 files (3.4 KB)
--- a/README.txt Tue Jul 19 13:40:24 2011 -0400
+++ b/README.txt Wed Jul 20 09:01:37 2011 -0400
@@ -28,4 +28,4 @@
Not all dependencies are included for the tools provided in the sample
tool_conf.xml. A full list of external dependencies is available at:
-http://bitbucket.org/galaxy/galaxy-central/wiki/ToolDependencies
+http://wiki.g2.bx.psu.edu/Admin/Tools/Tool%20Dependencies
--- a/dist-eggs.ini Tue Jul 19 13:40:24 2011 -0400
+++ b/dist-eggs.ini Wed Jul 20 09:01:37 2011 -0400
@@ -3,7 +3,7 @@
; eggs.g2.bx.psu.edu) Probably only useful to Galaxy developers at
; Penn State. This file is used by scripts/dist-scramble.py
;
-; More information: http://bitbucket.org/galaxy/galaxy-central/wiki/Config/Eggs
+; More information: http://wiki.g2.bx.psu.edu/Admin/Config/Eggs
;
[hosts]
--- a/eggs.ini Tue Jul 19 13:40:24 2011 -0400
+++ b/eggs.ini Wed Jul 20 09:01:37 2011 -0400
@@ -3,7 +3,7 @@
;
; This file is version controlled and should not be edited by hand!
; For more information, see:
-; http://bitbucket.org/galaxy/galaxy-central/wiki/Config/Eggs
+; http://wiki.g2.bx.psu.edu/Admin/Config/Eggs
;
[general]
--- a/lib/galaxy/jobs/runners/pbs.py Tue Jul 19 13:40:24 2011 -0400
+++ b/lib/galaxy/jobs/runners/pbs.py Wed Jul 20 09:01:37 2011 -0400
@@ -17,7 +17,7 @@
configured properly. Galaxy's "scramble" system should make this installation
simple, please follow the instructions found at:
- http://bitbucket.org/galaxy/galaxy-central/wiki/Config/Cluster
+ http://wiki.g2.bx.psu.edu/Admin/Config/Performance/Cluster
Additional errors may follow:
%s
--- a/lib/galaxy/jobs/runners/sge.py Tue Jul 19 13:40:24 2011 -0400
+++ b/lib/galaxy/jobs/runners/sge.py Wed Jul 20 09:01:37 2011 -0400
@@ -14,7 +14,7 @@
"scramble" system should make this installation simple, please follow the
instructions found at:
- http://bitbucket.org/galaxy/galaxy-central/wiki/Config/Cluster
+ http://wiki.g2.bx.psu.edu/Admin/Config/Performance/Cluster
Additional errors may follow:
%s
--- a/lib/galaxy/web/form_builder.py Tue Jul 19 13:40:24 2011 -0400
+++ b/lib/galaxy/web/form_builder.py Wed Jul 20 09:01:37 2011 -0400
@@ -658,7 +658,7 @@
self.name = name
self.ldda = value
self.trans = trans
- def get_html( self, disabled=False ):
+ def get_html( self, prefix="", disabled=False ):
if not self.ldda:
ldda = ""
text = "Choose a library dataset"
@@ -666,7 +666,7 @@
ldda = self.trans.security.encode_id(self.ldda.id)
text = self.ldda.name
return '<a href="javascript:void(0);" class="add-librarydataset">%s</a> \
- <input type="hidden" name="%s" value="%s">' % ( text, self.name, escape( str(ldda), quote=True ) )
+ <input type="hidden" name="%s%s" value="%s">' % ( text, prefix, self.name, escape( str(ldda), quote=True ) )
def get_display_text(self):
if self.ldda:
--- a/lib/galaxy/webapps/community/config.py Tue Jul 19 13:40:24 2011 -0400
+++ b/lib/galaxy/webapps/community/config.py Wed Jul 20 09:01:37 2011 -0400
@@ -64,7 +64,7 @@
self.nginx_upload_path = kwargs.get( 'nginx_upload_path', False )
self.log_actions = string_as_bool( kwargs.get( 'log_actions', 'False' ) )
self.brand = kwargs.get( 'brand', None )
- self.wiki_url = kwargs.get( 'wiki_url', 'http://bitbucket.org/galaxy/galaxy-central/wiki/Home' )
+ self.wiki_url = kwargs.get( 'wiki_url', 'http://wiki.g2.bx.psu.edu/FrontPage' )
self.bugs_email = kwargs.get( 'bugs_email', None )
self.blog_url = kwargs.get( 'blog_url', None )
self.screencasts_url = kwargs.get( 'screencasts_url', None )
--- a/lib/galaxy/webapps/community/controllers/common.py Tue Jul 19 13:40:24 2011 -0400
+++ b/lib/galaxy/webapps/community/controllers/common.py Wed Jul 20 09:01:37 2011 -0400
@@ -230,6 +230,8 @@
correction_msg += "Upload a file named <b>%s</b> to the repository to correct this error." % sample_loc_file
else:
correction_msg += "Upload a file named <b>%s</b> to the repository to correct this error." % missing_file
+ else:
+ correction_msg = exception_msg
message += "<b>%s</b> - %s<br/>" % ( tool_file, correction_msg )
status = 'error'
elif flush_needed:
@@ -297,7 +299,7 @@
util.send_mail( frm, to, subject, body, trans.app.config )
except Exception, e:
log.exception( "An error occurred sending a tool shed repository update alert by email." )
-def update_for_browsing( repository, current_working_dir ):
+def update_for_browsing( repository, current_working_dir, commit_message='' ):
# Make a copy of a repository's files for browsing.
repo_dir = repository.repo_path
repo = hg.repository( ui.ui(), repo_dir )
@@ -316,12 +318,15 @@
# ! = deleted, but still tracked
# ? = not tracked
# I = ignored
- # We'll remove all files that are not tracked or ignored.
files_to_remove_from_disk = []
+ files_to_commit = []
for status_and_file_name in status_and_file_names:
if status_and_file_name.startswith( '?' ) or status_and_file_name.startswith( 'I' ):
files_to_remove_from_disk.append( os.path.abspath( os.path.join( repo_dir, status_and_file_name.split()[1] ) ) )
+ elif status_and_file_name.startswith( 'M' ) or status_and_file_name.startswith( 'A' ) or status_and_file_name.startswith( 'R' ):
+ files_to_commit.append( os.path.abspath( os.path.join( repo_dir, status_and_file_name.split()[1] ) ) )
for full_path in files_to_remove_from_disk:
+ # We'll remove all files that are not tracked or ignored.
if os.path.isdir( full_path ):
try:
os.rmdir( full_path )
@@ -336,6 +341,11 @@
except OSError, e:
# The directory is not empty
pass
+ if files_to_commit:
+ if not commit_message:
+ commit_message = 'Committed changes to: %s' % ', '.join( files_to_commit )
+ repo.dirstate.write()
+ repo.commit( text=commit_message )
os.chdir( repo_dir )
os.system( 'hg update > /dev/null 2>&1' )
os.chdir( current_working_dir )
--- a/lib/galaxy/webapps/community/controllers/repository.py Tue Jul 19 13:40:24 2011 -0400
+++ b/lib/galaxy/webapps/community/controllers/repository.py Wed Jul 20 09:01:37 2011 -0400
@@ -425,7 +425,7 @@
repository = get_repository( trans, id )
repo = hg.repository( ui.ui(), repository.repo_path )
current_working_dir = os.getcwd()
- update_for_browsing( repository, current_working_dir )
+ update_for_browsing( repository, current_working_dir, commit_message=commit_message )
return trans.fill_template( '/webapps/community/repository/browse_repository.mako',
repo=repo,
repository=repository,
@@ -454,11 +454,17 @@
# Commit the change set.
if not commit_message:
commit_message = 'Deleted selected files'
- # Commit the changes.
- commands.commit( repo.ui, repo, repo_dir, user=trans.user.username, message=commit_message )
+ try:
+ commands.commit( repo.ui, repo, repo_dir, user=trans.user.username, message=commit_message )
+ except Exception, e:
+ # I never have a problem with commands.commit on a Mac, but in the test/production
+ # tool shed environment, it occasionally throws a "TypeError: array item must be char"
+ # exception. If this happens, we'll try the following.
+ repo.dirstate.write()
+ repo.commit( text=commit_message )
handle_email_alerts( trans, repository )
# Update the repository files for browsing.
- update_for_browsing( repository, current_working_dir )
+ update_for_browsing( repository, current_working_dir, commit_message=commit_message )
# Get the new repository tip.
repo = hg.repository( ui.ui(), repo_dir )
if tip != repository.tip:
--- a/lib/galaxy/webapps/community/controllers/upload.py Tue Jul 19 13:40:24 2011 -0400
+++ b/lib/galaxy/webapps/community/controllers/upload.py Wed Jul 20 09:01:37 2011 -0400
@@ -87,7 +87,16 @@
# Move the uploaded file to the load_point within the repository hierarchy.
shutil.move( uploaded_file_name, full_path )
commands.add( repo.ui, repo, full_path )
- commands.commit( repo.ui, repo, full_path, user=trans.user.username, message=commit_message )
+ """
+ try:
+ commands.commit( repo.ui, repo, full_path, user=trans.user.username, message=commit_message )
+ except Exception, e:
+ # I never have a problem with commands.commit on a Mac, but in the test/production
+ # tool shed environment, it occasionally throws a "TypeError: array item must be char"
+ # exception. If this happens, we'll try the following.
+ repo.dirstate.write()
+ repo.commit( text=commit_message )
+ """
if full_path.endswith( '.loc.sample' ):
# Handle the special case where a xxx.loc.sample file is
# being uploaded by copying it to ~/tool-data/xxx.loc.
@@ -96,7 +105,7 @@
if ok:
# Update the repository files for browsing, a by-product of doing this
# is eliminating unwanted files from the repository directory.
- update_for_browsing( repository, current_working_dir )
+ update_for_browsing( repository, current_working_dir, commit_message=commit_message )
# Get the new repository tip.
if tip != repository.tip:
if ( isgzip or isbz2 ) and uncompress_file:
@@ -183,8 +192,14 @@
# Handle the special case where a xxx.loc.sample file is
# being uploaded by copying it to ~/tool-data/xxx.loc.
copy_sample_loc_file( trans, filename_in_archive )
- # Commit the changes.
- commands.commit( repo.ui, repo, full_path, user=trans.user.username, message=commit_message )
+ try:
+ commands.commit( repo.ui, repo, full_path, user=trans.user.username, message=commit_message )
+ except Exception, e:
+ # I never have a problem with commands.commit on a Mac, but in the test/production
+ # tool shed environment, it occasionally throws a "TypeError: array item must be char"
+ # exception. If this happens, we'll try the following.
+ repo.dirstate.write()
+ repo.commit( text=commit_message )
handle_email_alerts( trans, repository )
return True, '', files_to_remove
def uncompress( self, repository, uploaded_file_name, uploaded_file_filename, isgzip, isbz2 ):
--- a/lib/galaxy/webapps/demo_sequencer/config.py Tue Jul 19 13:40:24 2011 -0400
+++ b/lib/galaxy/webapps/demo_sequencer/config.py Wed Jul 20 09:01:37 2011 -0400
@@ -49,7 +49,7 @@
self.smtp_server = kwargs.get( 'smtp_server', None )
self.log_actions = string_as_bool( kwargs.get( 'log_actions', 'False' ) )
self.brand = kwargs.get( 'brand', None )
- self.wiki_url = kwargs.get( 'wiki_url', 'http://bitbucket.org/galaxy/galaxy-central/wiki/Home' )
+ self.wiki_url = kwargs.get( 'wiki_url', 'http://wiki.g2.bx.psu.edu/FrontPage' )
self.bugs_email = kwargs.get( 'bugs_email', None )
self.blog_url = kwargs.get( 'blog_url', None )
self.screencasts_url = kwargs.get( 'screencasts_url', None )
--- a/lib/galaxy/webapps/reports/config.py Tue Jul 19 13:40:24 2011 -0400
+++ b/lib/galaxy/webapps/reports/config.py Wed Jul 20 09:01:37 2011 -0400
@@ -33,7 +33,7 @@
self.sendmail_path = kwargs.get('sendmail_path',"/usr/sbin/sendmail")
self.log_actions = string_as_bool( kwargs.get( 'log_actions', 'False' ) )
self.brand = kwargs.get( 'brand', None )
- self.wiki_url = kwargs.get( 'wiki_url', 'http://bitbucket.org/galaxy/galaxy-central/wiki/Home' )
+ self.wiki_url = kwargs.get( 'wiki_url', 'http://wiki.g2.bx.psu.edu/FrontPage' )
self.bugs_email = kwargs.get( 'bugs_email', None )
self.blog_url = kwargs.get( 'blog_url', None )
self.screencasts_url = kwargs.get( 'screencasts_url', None )
--- a/static/welcome.rst Tue Jul 19 13:40:24 2011 -0400
+++ b/static/welcome.rst Wed Jul 20 09:01:37 2011 -0400
@@ -30,7 +30,7 @@
.. __: /static/help.html
.. __: http://www.bx.psu.edu/cgi-bin/trac.cgi
-.. __: http://bitbucket.org/galaxy/galaxy-central/wiki/GalaxyTeam
+.. __: http://wiki.g2.bx.psu.edu/Galaxy%20Team
.. __: mailto:galaxy@bx.psu.edu
Version: <b>2.1</b> Revision: <b>$Rev$</b>
--- a/templates/webapps/community/base_panels.mako Tue Jul 19 13:40:24 2011 -0400
+++ b/templates/webapps/community/base_panels.mako Wed Jul 20 09:01:37 2011 -0400
@@ -34,9 +34,9 @@
<div class="submenu"><ul><li><a href="${app.config.get( "bugs_email", "mailto:galaxy-bugs@bx.psu.edu" )}">Email comments, bug reports, or suggestions</a></li>
- <li><a target="_blank" href="${app.config.get( "wiki_url", "http://bitbucket.org/galaxy/galaxy-central/wiki" )}">Galaxy Wiki</a></li>
+ <li><a target="_blank" href="${app.config.get( "wiki_url", "http://wiki.g2.bx.psu.edu/" )}">Galaxy Wiki</a></li><li><a target="_blank" href="${app.config.get( "screencasts_url", "http://galaxycast.org" )}">Video tutorials (screencasts)</a></li>
- <li><a target="_blank" href="${app.config.get( "citation_url", "http://bitbucket.org/galaxy/galaxy-central/wiki/Citations" )}">How to Cite Galaxy</a></li>
+ <li><a target="_blank" href="${app.config.get( "citation_url", "http://wiki.g2.bx.psu.edu/Citing%20Galaxy" )}">How to Cite Galaxy</a></li></ul></div></td>
--- a/templates/webapps/galaxy/base_panels.mako Tue Jul 19 13:40:24 2011 -0400
+++ b/templates/webapps/galaxy/base_panels.mako Wed Jul 20 09:01:37 2011 -0400
@@ -107,9 +107,9 @@
<%
menu_options = [
['Email comments, bug reports, or suggestions', app.config.get( "bugs_email", "mailto:galaxy-bugs@bx.psu.edu" ) ],
- ['Galaxy Wiki', app.config.get( "wiki_url", "http://bitbucket.org/galaxy/galaxy-central/wiki" ), "_blank" ],
+ ['Galaxy Wiki', app.config.get( "wiki_url", "http://wiki.g2.bx.psu.edu/" ), "_blank" ],
['Video tutorials (screencasts)', app.config.get( "screencasts_url", "http://galaxycast.org" ), "_blank" ],
- ['How to Cite Galaxy', app.config.get( "screencasts_url", "http://bitbucket.org/galaxy/galaxy-central/wiki/Citations" ), "_blank" ]
+ ['How to Cite Galaxy', app.config.get( "screencasts_url", "http://wiki.g2.bx.psu.edu/Citing%20Galaxy" ), "_blank" ]
]
tab( "help", "Help", None, menu_options=menu_options)
%>
--- a/test-data/rgtestouts/rgManQQ/rgManQQtest1.html Tue Jul 19 13:40:24 2011 -0400
+++ b/test-data/rgtestouts/rgManQQ/rgManQQtest1.html Wed Jul 20 09:01:37 2011 -0400
@@ -13,8 +13,8 @@
<h1>rgManQQtest1</h1><table>
-<tr><td><a href="Allelep_manhattan.png"><img src="Allelep_manhattan.png" alt="Allelep_manhattan.png hspace="10" width="400"><br>(Click to download image Allelep_manhattan.png)</a></td></tr>
-<tr><td><a href="Allelep_qqplot.png"><img src="Allelep_qqplot.png" alt="Allelep_qqplot.png hspace="10" width="400"><br>(Click to download image Allelep_qqplot.png)</a></td></tr>
+<tr><td><a href="Allelep_manhattan.png"><img src="Allelep_manhattan.png" title="Allelep_manhattan.png hspace="10" width="400"><br>(Click to download image Allelep_manhattan.png)</a></td></tr>
+<tr><td><a href="Allelep_qqplot.png"><img src="Allelep_qqplot.png" title="Allelep_qqplot.png hspace="10" width="400"><br>(Click to download image Allelep_qqplot.png)</a></td></tr><tr><td><a href="rgManQQtest1.R">rgManQQtest1.R</a></td></tr><tr><td><a href="rgManQQtest1.R.log">rgManQQtest1.R.log</a></td></tr></table>
@@ -35,7 +35,7 @@
- round_any
+ rename, round_any
@@ -43,11 +43,11 @@
Loading required package: proto
-[1] "### 101 values read from /tmp/rgManQQtemplYC5wa read - now running plots"
+[1] "### 101 values read from /data/tmp/tmpM8NZ50/database/files/000/dataset_1.dat read - now running plots"
[1] "## qqplot on Allelep done"
-[1] "## manhattan on Allelep starting 1 2 3"
+[1] "## manhattan on Allelep starting 2 3 8"
[1] "## manhattan plot on Allelep done"
@@ -62,7 +62,7 @@
# http://StephenTurner.us/
# http://GettingGeneticsDone.blogspot.com/
-# Last updated: Tuesday, December 22, 2009
+# Last updated: 19 July 2011 by Ross Lazarus
# R code for making manhattan plots and QQ plots from plink output files.
# With GWAS data this can take a lot of memory. Recommended for use on
# 64bit machines only, for now.
@@ -72,30 +72,30 @@
library(ggplot2)
coloursTouse = c('firebrick','darkblue','goldenrod','darkgreen')
-# not too fugly but need a colour expert please...
+# not too ugly but need a colour expert please...
-manhattan = function(chrom=NULL,offset=NULL,pvals=NULL, title=NULL, max.y="max",
- suggestiveline=0, genomewide=T, size.x.labels=9, size.y.labels=10, annotate=F, SNPlist=NULL,grey=0) {
-
+DrawManhattan = function(pvals=Null,chrom=Null,offset=Null,title=NULL, max.y="max",suggestiveline=0, genomewide=T, size.x.labels=9,
+ size.y.labels=10, annotate=F, SNPlist=NULL,grey=0) {
if (annotate & is.null(SNPlist)) stop("You requested annotation but provided no SNPlist!")
genomewideline=NULL # was genomewideline=-log10(5e-8)
if (genomewide) { # use bonferroni since might be only a small region?
genomewideline = -log10(0.05/length(pvals)) }
- d=data.frame(CHR=chrom,BP=offset,P=pvals)
-
- #limit to only chrs 1-23?
- d=d[d$CHR %in% 1:23, ]
-
+ offset = as.integer(offset)
+ pvals = as.double(pvals)
+ chro = as.integer(chrom) # already dealt with X and friends?
+ d=data.frame(CHR=chro,BP=offset,P=pvals)
+ #limit to only chrs 1-22, x=23,y=24,Mt=25?
+ d=d[d$CHR %in% 1:25, ]
if ("CHR" %in% names(d) & "BP" %in% names(d) & "P" %in% names(d) ) {
- d=na.omit(d)
+ #d=na.omit(d)
d=d[d$P>0 & d$P<=1, ]
- d$logp = -log10(d$P)
-
+ d$logp = as.double(-log10(d$P))
d$pos=NA
ticks=NULL
lastbase=0
chrlist = unique(d$CHR)
+ chrlist = sort(chrlist) # returns lexical ordering
nchr = length(chrlist) # may be any number?
if (nchr >= 2) {
for (x in c(1:nchr)) {
@@ -107,7 +107,11 @@
lastchr = chrlist[x-1] # previous whatever the list
lastbase=lastbase+tail(subset(d,CHR==lastchr)$BP, 1)
d[d$CHR==i, ]$pos=d[d$CHR==i, ]$BP+lastbase
+ if (sum(is.na(lastchr),is.na(lastbase),is.na(d[d$CHR==i, ]$pos))) {
+ cat(paste('manhattan: For',title,'chrlistx=',i,'lastchr=',lastchr,'lastbase=',lastbase,'pos=',d[d$CHR==i,]$pos))
+ }
tks=c(tks, d[d$CHR==i, ]$pos[floor(length(d[d$CHR==i, ]$pos)/2)+1])
+
}
ticklim=c(min(d$pos),max(d$pos))
xlabs = chrlist
@@ -129,8 +133,6 @@
if (max.y=="max") maxy=ceiling(max(d$logp)) else maxy=max.y
maxy = max(maxy,1.1*genomewideline)
- # if (maxy<8) maxy=8
- # only makes sense if genome wide is assumed - we could have a fine mapping region?
if (annotate) d.annotate=d[as.numeric(substr(d$SNP,3,100)) %in% SNPlist, ]
if (nchr >= 2) {
manplot=qplot(pos,logp,data=d, ylab=expression(-log[10](italic(p))) , colour=factor(CHR))
@@ -149,9 +151,6 @@
axis.text.y=theme_text(size=size.y.labels, colour="grey50"),
axis.ticks=theme_segment(colour=NA)
)
- #manplot = manplot + opts(panel.grid.y.minor=theme_blank(),panel.grid.y.major=theme_blank())
- #manplot = manplot + opts(panel.grid.major=theme_blank())
-
if (suggestiveline) manplot=manplot+geom_hline(yintercept=suggestiveline,colour="blue", alpha=I(1/3))
if (genomewideline) manplot=manplot+geom_hline(yintercept=genomewideline,colour="red")
manplot
@@ -178,16 +177,24 @@
if (spartan) plot=plot+opts(panel.background=theme_rect(col="grey50"), panel.grid.minor=theme_blank())
qq
}
-rgqqMan = function(infile="/tmp/rgManQQtemplYC5wa",chromcolumn=1, offsetcolumn=2, pvalscolumns=c(3),
+
+rgqqMan = function(infile="/data/tmp/tmpM8NZ50/database/files/000/dataset_1.dat",chromcolumn=2, offsetcolumn=3, pvalscolumns=c(8),
title="rgManQQtest1",grey=0) {
rawd = read.table(infile,head=T,sep='\t')
dn = names(rawd)
cc = dn[chromcolumn]
oc = dn[offsetcolumn]
-nams = c(cc,oc)
+rawd[,cc] = sub('chr','',rawd[,cc],ignore.case = T) # just in case
+rawd[,cc] = sub(':','',rawd[,cc],ignore.case = T) # ugh
+rawd[,cc] = sub('X',23,rawd[,cc],ignore.case = T)
+rawd[,cc] = sub('Y',24,rawd[,cc],ignore.case = T)
+rawd[,cc] = sub('Mt',25,rawd[,cc], ignore.case = T)
+nams = c(cc,oc) # for sorting
plen = length(rawd[,1])
-doreorder=1
print(paste('###',plen,'values read from',infile,'read - now running plots',sep=' '))
+rawd = rawd[do.call(order,rawd[nams]),]
+# mmmf - suggested by http://onertipaday.blogspot.com/2007/08/sortingordering-dataframe-according…
+# in case not yet ordered
if (plen > 0) {
for (pvalscolumn in pvalscolumns) {
if (pvalscolumn > 0)
@@ -199,14 +206,8 @@
ggsave(filename=paste(myfname,"qqplot.png",sep='_'),myqqplot,width=6,height=4,dpi=100)
print(paste('## qqplot on',cname,'done'))
if ((chromcolumn > 0) & (offsetcolumn > 0)) {
- if (doreorder) {
- rawd = rawd[do.call(order,rawd[nams]),]
- # mmmf - suggested by http://onertipaday.blogspot.com/2007/08/sortingordering-dataframe-according…
- # in case not yet ordered
- doreorder = 0
- }
print(paste('## manhattan on',cname,'starting',chromcolumn,offsetcolumn,pvalscolumn))
- mymanplot= manhattan(chrom=rawd[,chromcolumn],offset=rawd[,offsetcolumn],pvals=rawd[,pvalscolumn],title=mytitle,grey=grey)
+ mymanplot= DrawManhattan(chrom=rawd[,chromcolumn],offset=rawd[,offsetcolumn],pvals=rawd[,pvalscolumn],title=mytitle,grey=grey)
print(paste('## manhattan plot on',cname,'done'))
ggsave(filename=paste(myfname,"manhattan.png",sep='_'),mymanplot,width=6,height=4,dpi=100)
}
@@ -227,6 +228,6 @@
</pre>
-<h3><a href="http://rgenetics.org">Rgenetics</a> tool rgManQQ.py run at 07/11/2010 20:04:20</h3>
+<b><a href="http://rgenetics.org">Galaxy Rgenetics</a> tool output rgManQQ.py run at 20/07/2011 13:29:43</b><br/></div></body></html>
--- a/tools/data_source/microbial_import.xml Tue Jul 19 13:40:24 2011 -0400
+++ b/tools/data_source/microbial_import.xml Wed Jul 20 09:01:37 2011 -0400
@@ -109,7 +109,7 @@
**Note:** Having trouble locating your organism? Click here_ for a list of available species and their location.
-.. _here: http://bitbucket.org/galaxy/galaxy-central/wiki/Microbes
+.. _here: http://wiki.g2.bx.psu.edu/Main/Data%20Libraries/Microbes
</help></tool>
--- a/tools/new_operations/basecoverage.xml Tue Jul 19 13:40:24 2011 -0400
+++ b/tools/new_operations/basecoverage.xml Wed Jul 20 09:01:37 2011 -0400
@@ -34,7 +34,7 @@
See Galaxy Interval Operation Screencasts_ (right click to open this link in another window).
-.. _Screencasts: http://bitbucket.org/galaxy/galaxy-central/wiki/GopsDesc
+.. _Screencasts: http://wiki.g2.bx.psu.edu/Learn/Interval%20Operations
</help>
--- a/tools/new_operations/cluster.xml Tue Jul 19 13:40:24 2011 -0400
+++ b/tools/new_operations/cluster.xml Wed Jul 20 09:01:37 2011 -0400
@@ -67,7 +67,7 @@
See Galaxy Interval Operation Screencasts_ (right click to open this link in another window).
-.. _Screencasts: http://bitbucket.org/galaxy/galaxy-central/wiki/GopsDesc
+.. _Screencasts: http://wiki.g2.bx.psu.edu/Learn/Interval%20Operations
-----
--- a/tools/new_operations/complement.xml Tue Jul 19 13:40:24 2011 -0400
+++ b/tools/new_operations/complement.xml Wed Jul 20 09:01:37 2011 -0400
@@ -43,7 +43,7 @@
See Galaxy Interval Operation Screencasts_ (right click to open this link in another window).
-.. _Screencasts: http://bitbucket.org/galaxy/galaxy-central/wiki/GopsDesc
+.. _Screencasts: http://wiki.g2.bx.psu.edu/Learn/Interval%20Operations
-----
--- a/tools/new_operations/concat.xml Tue Jul 19 13:40:24 2011 -0400
+++ b/tools/new_operations/concat.xml Wed Jul 20 09:01:37 2011 -0400
@@ -41,7 +41,7 @@
See Galaxy Interval Operation Screencasts_ (right click to open this link in another window).
-.. _Screencasts: http://bitbucket.org/galaxy/galaxy-central/wiki/GopsDesc
+.. _Screencasts: http://wiki.g2.bx.psu.edu/Learn/Interval%20Operations
-----
--- a/tools/new_operations/coverage.xml Tue Jul 19 13:40:24 2011 -0400
+++ b/tools/new_operations/coverage.xml Wed Jul 20 09:01:37 2011 -0400
@@ -44,7 +44,7 @@
See Galaxy Interval Operation Screencasts_ (right click to open this link in another window).
-.. _Screencasts: http://bitbucket.org/galaxy/galaxy-central/wiki/GopsDesc
+.. _Screencasts: http://wiki.g2.bx.psu.edu/Learn/Interval%20Operations
-----
--- a/tools/new_operations/intersect.xml Tue Jul 19 13:40:24 2011 -0400
+++ b/tools/new_operations/intersect.xml Wed Jul 20 09:01:37 2011 -0400
@@ -117,7 +117,7 @@
See Galaxy Interval Operation Screencasts_ (right click to open this link in another window).
-.. _Screencasts: http://bitbucket.org/galaxy/galaxy-central/wiki/GopsDesc
+.. _Screencasts: http://wiki.g2.bx.psu.edu/Learn/Interval%20Operations
-----
--- a/tools/new_operations/join.xml Tue Jul 19 13:40:24 2011 -0400
+++ b/tools/new_operations/join.xml Wed Jul 20 09:01:37 2011 -0400
@@ -78,7 +78,7 @@
See Galaxy Interval Operation Screencasts_ (right click to open this link in another window).
-.. _Screencasts: http://bitbucket.org/galaxy/galaxy-central/wiki/GopsDesc
+.. _Screencasts: http://wiki.g2.bx.psu.edu/Learn/Interval%20Operations
-----
--- a/tools/new_operations/merge.xml Tue Jul 19 13:40:24 2011 -0400
+++ b/tools/new_operations/merge.xml Wed Jul 20 09:01:37 2011 -0400
@@ -44,7 +44,7 @@
See Galaxy Interval Operation Screencasts_ (right click to open this link in another window).
-.. _Screencasts: http://bitbucket.org/galaxy/galaxy-central/wiki/GopsDesc
+.. _Screencasts: http://wiki.g2.bx.psu.edu/Learn/Interval%20Operations
-----
--- a/tools/new_operations/subtract.xml Tue Jul 19 13:40:24 2011 -0400
+++ b/tools/new_operations/subtract.xml Wed Jul 20 09:01:37 2011 -0400
@@ -98,7 +98,7 @@
See Galaxy Interval Operation Screencasts_ (right click to open this link in another window).
-.. _Screencasts: http://bitbucket.org/galaxy/galaxy-central/wiki/GopsDesc
+.. _Screencasts: http://wiki.g2.bx.psu.edu/Learn/Interval%20Operations
-----
--- a/tools/next_gen_conversion/fastq_gen_conv.xml Tue Jul 19 13:40:24 2011 -0400
+++ b/tools/next_gen_conversion/fastq_gen_conv.xml Wed Jul 20 09:01:37 2011 -0400
@@ -75,7 +75,7 @@
A good description of fastq datasets can be found `here`__, while a description of Galaxy's fastq "logic" can be found `here`__. Because ranges of quality values within different types of fastq datasets overlap it very difficult to detect them automatically. This tool supports conversion of two commonly found types (Solexa/Illumina 1.0 and Illumina 1.3+) into fastq Sanger.
.. __: http://en.wikipedia.org/wiki/FASTQ_format
- .. __: http://bitbucket.org/galaxy/galaxy-central/wiki/NGS
+ .. __: http://wiki.g2.bx.psu.edu/Admin/NGS%20Local%20Setup
.. class:: warningmark
--- a/tools/rgenetics/rgManQQ.py Tue Jul 19 13:40:24 2011 -0400
+++ b/tools/rgenetics/rgManQQ.py Wed Jul 20 09:01:37 2011 -0400
@@ -1,5 +1,9 @@
#!/usr/local/bin/python
-
+# updated july 20 to fix sort order - R unique() sorts into strict collating order
+# so need to sort after unique to revert to lexicographic order for x axis on Manhattan
+# rgmanqq updated july 19 to deal with x,y and mt
+# lots of fixes
+# ross lazarus
import sys,math,shutil,subprocess,os,time,tempfile,string
from os.path import abspath
from rgutils import timenow, RRun, galhtmlprefix, galhtmlpostfix, galhtmlattr
@@ -18,7 +22,7 @@
# http://StephenTurner.us/
# http://GettingGeneticsDone.blogspot.com/
-# Last updated: Tuesday, December 22, 2009
+# Last updated: 19 July 2011 by Ross Lazarus
# R code for making manhattan plots and QQ plots from plink output files.
# With GWAS data this can take a lot of memory. Recommended for use on
# 64bit machines only, for now.
@@ -28,30 +32,30 @@
library(ggplot2)
coloursTouse = c('firebrick','darkblue','goldenrod','darkgreen')
-# not too fugly but need a colour expert please...
+# not too ugly but need a colour expert please...
-manhattan = function(chrom=NULL,offset=NULL,pvals=NULL, title=NULL, max.y="max",
- suggestiveline=0, genomewide=T, size.x.labels=9, size.y.labels=10, annotate=F, SNPlist=NULL,grey=0) {
-
+DrawManhattan = function(pvals=Null,chrom=Null,offset=Null,title=NULL, max.y="max",suggestiveline=0, genomewide=T, size.x.labels=9,
+ size.y.labels=10, annotate=F, SNPlist=NULL,grey=0) {
if (annotate & is.null(SNPlist)) stop("You requested annotation but provided no SNPlist!")
genomewideline=NULL # was genomewideline=-log10(5e-8)
if (genomewide) { # use bonferroni since might be only a small region?
genomewideline = -log10(0.05/length(pvals)) }
- d=data.frame(CHR=chrom,BP=offset,P=pvals)
-
- #limit to only chrs 1-23?
- d=d[d$CHR %in% 1:23, ]
-
+ offset = as.integer(offset)
+ pvals = as.double(pvals)
+ chro = as.integer(chrom) # already dealt with X and friends?
+ d=data.frame(CHR=chro,BP=offset,P=pvals)
+ #limit to only chrs 1-22, x=23,y=24,Mt=25?
+ d=d[d$CHR %in% 1:25, ]
if ("CHR" %in% names(d) & "BP" %in% names(d) & "P" %in% names(d) ) {
- d=na.omit(d)
+ #d=na.omit(d)
d=d[d$P>0 & d$P<=1, ]
- d$logp = -log10(d$P)
-
+ d$logp = as.double(-log10(d$P))
d$pos=NA
ticks=NULL
lastbase=0
chrlist = unique(d$CHR)
+ chrlist = sort(chrlist) # returns lexical ordering
nchr = length(chrlist) # may be any number?
if (nchr >= 2) {
for (x in c(1:nchr)) {
@@ -63,7 +67,11 @@
lastchr = chrlist[x-1] # previous whatever the list
lastbase=lastbase+tail(subset(d,CHR==lastchr)$BP, 1)
d[d$CHR==i, ]$pos=d[d$CHR==i, ]$BP+lastbase
+ if (sum(is.na(lastchr),is.na(lastbase),is.na(d[d$CHR==i, ]$pos))) {
+ cat(paste('manhattan: For',title,'chrlistx=',i,'lastchr=',lastchr,'lastbase=',lastbase,'pos=',d[d$CHR==i,]$pos))
+ }
tks=c(tks, d[d$CHR==i, ]$pos[floor(length(d[d$CHR==i, ]$pos)/2)+1])
+
}
ticklim=c(min(d$pos),max(d$pos))
xlabs = chrlist
@@ -85,8 +93,6 @@
if (max.y=="max") maxy=ceiling(max(d$logp)) else maxy=max.y
maxy = max(maxy,1.1*genomewideline)
- # if (maxy<8) maxy=8
- # only makes sense if genome wide is assumed - we could have a fine mapping region?
if (annotate) d.annotate=d[as.numeric(substr(d$SNP,3,100)) %in% SNPlist, ]
if (nchr >= 2) {
manplot=qplot(pos,logp,data=d, ylab=expression(-log[10](italic(p))) , colour=factor(CHR))
@@ -105,9 +111,6 @@
axis.text.y=theme_text(size=size.y.labels, colour="grey50"),
axis.ticks=theme_segment(colour=NA)
)
- #manplot = manplot + opts(panel.grid.y.minor=theme_blank(),panel.grid.y.major=theme_blank())
- #manplot = manplot + opts(panel.grid.major=theme_blank())
-
if (suggestiveline) manplot=manplot+geom_hline(yintercept=suggestiveline,colour="blue", alpha=I(1/3))
if (genomewideline) manplot=manplot+geom_hline(yintercept=genomewideline,colour="red")
manplot
@@ -134,21 +137,29 @@
if (spartan) plot=plot+opts(panel.background=theme_rect(col="grey50"), panel.grid.minor=theme_blank())
qq
}
+
"""
# we need another string to avoid confusion over string substitutions with %in%
# instantiate rcode2 string with infile,chromcol,offsetcol,pvalscols,title before saving and running
-rcode2 = """rgqqMan = function(infile="%s",chromcolumn=%s, offsetcolumn=%s, pvalscolumns=%s,
+rcode2 = """rgqqMan = function(infile="%s",chromcolumn=%d, offsetcolumn=%d, pvalscolumns=c(%s),
title="%s",grey=%d) {
rawd = read.table(infile,head=T,sep='\\t')
dn = names(rawd)
cc = dn[chromcolumn]
oc = dn[offsetcolumn]
-nams = c(cc,oc)
+rawd[,cc] = sub('chr','',rawd[,cc],ignore.case = T) # just in case
+rawd[,cc] = sub(':','',rawd[,cc],ignore.case = T) # ugh
+rawd[,cc] = sub('X',23,rawd[,cc],ignore.case = T)
+rawd[,cc] = sub('Y',24,rawd[,cc],ignore.case = T)
+rawd[,cc] = sub('Mt',25,rawd[,cc], ignore.case = T)
+nams = c(cc,oc) # for sorting
plen = length(rawd[,1])
-doreorder=1
print(paste('###',plen,'values read from',infile,'read - now running plots',sep=' '))
+rawd = rawd[do.call(order,rawd[nams]),]
+# mmmf - suggested by http://onertipaday.blogspot.com/2007/08/sortingordering-dataframe-according…
+# in case not yet ordered
if (plen > 0) {
for (pvalscolumn in pvalscolumns) {
if (pvalscolumn > 0)
@@ -160,14 +171,8 @@
ggsave(filename=paste(myfname,"qqplot.png",sep='_'),myqqplot,width=6,height=4,dpi=100)
print(paste('## qqplot on',cname,'done'))
if ((chromcolumn > 0) & (offsetcolumn > 0)) {
- if (doreorder) {
- rawd = rawd[do.call(order,rawd[nams]),]
- # mmmf - suggested by http://onertipaday.blogspot.com/2007/08/sortingordering-dataframe-according…
- # in case not yet ordered
- doreorder = 0
- }
print(paste('## manhattan on',cname,'starting',chromcolumn,offsetcolumn,pvalscolumn))
- mymanplot= manhattan(chrom=rawd[,chromcolumn],offset=rawd[,offsetcolumn],pvals=rawd[,pvalscolumn],title=mytitle,grey=grey)
+ mymanplot= DrawManhattan(chrom=rawd[,chromcolumn],offset=rawd[,offsetcolumn],pvals=rawd[,pvalscolumn],title=mytitle,grey=grey)
print(paste('## manhattan plot on',cname,'done'))
ggsave(filename=paste(myfname,"manhattan.png",sep='_'),mymanplot,width=6,height=4,dpi=100)
}
@@ -198,50 +203,13 @@
this can be called externally, I guess...for QC eg?
"""
if debug:
- print 'doManQQ',input_fname,chrom_col,offset_col,pval_cols,title,grey,ctitle,outdir
- ffd,filtered_fname = tempfile.mkstemp(prefix='rgManQQtemp')
- f = open(filtered_fname,'w')
- inf = open(input_fname,'r')
- ohead = inf.readline().strip().split('\t') # see if we have a header
- inf.seek(0) # rewind
- newhead = ['pval%d' % (x+1) for x in pval_cols]
- newhead.insert(0,'Offset')
- newhead.insert(0,'Chrom')
- havehead = 0
- wewant = [chrom_col,offset_col]
- wewant += pval_cols
- try:
- allnums = ['%d' % x for x in ohead] # this should barf if non numerics == header row?
- f.write('\t'.join(newhead)) # for R to read
- f.write('\n')
- except:
- havehead = 1
- newhead = [ohead[chrom_col],ohead[offset_col]]
- newhead += [ohead[x] for x in pval_cols]
- f.write('\t'.join(newhead)) # use the original head
- f.write('\n')
- for i,row in enumerate(inf):
- if i == 0 and havehead:
- continue # ignore header
- sr = row.strip().split('\t')
- if len(sr) > 1:
- if sr[chrom_col].lower().find('chr') <> -1:
- sr[chrom_col] = sr[chrom_col][3:]
- newr = [sr[x] for x in wewant] # grab cols we need
- s = '\t'.join(newr)
- f.write(s)
- f.write('\n')
- f.close()
- pvc = [x+3 for x in range(len(pval_cols))] # 2 for offset and chrom, 1 for r offset start
- pvc = 'c(%s)' % (','.join(map(str,pvc)))
- rcmd = '%s%s' % (rcode,rcode2 % (filtered_fname,'1','2',pvc,title,grey))
+ print 'doManQQ',input_fname,chrom_col,offset_col,pval_cols,title,grey,ctitle,outdir
+ rcmd = '%s%s' % (rcode,rcode2 % (input_fname,chrom_col,offset_col,pval_cols,title,grey))
if debug:
- print 'running\n%s\n' % rcmd
+ print 'running\n%s\n' % rcmd
rlog,flist = RRun(rcmd=rcmd,title=ctitle,outdir=outdir)
rlog.append('## R script=')
rlog.append(rcmd)
- if beTidy:
- os.unlink(filtered_fname)
return rlog,flist
@@ -272,19 +240,20 @@
offset_col = -1
p = sys.argv[7].strip().split(',')
try:
- p = [int(x) for x in p]
+ q = [int(x) for x in p]
except:
- p = [-1]
+ p = -1
if chrom_col == -1 or offset_col == -1: # was passed as zero - do not do manhattan plots
chrom_col = -1
offset_col = -1
grey = 0
if (sys.argv[8].lower() in ['1','true']):
grey = 1
- if p == [-1]:
+ if p == -1:
print >> sys.stderr,'## Cannot run rgManQQ - missing pval column'
sys.exit(1)
- rlog,flist = doManQQ(input_fname,chrom_col,offset_col,p,title,grey,ctitle,outdir)
+ p = ['%d' % (int(x) + 1) for x in p]
+ rlog,flist = doManQQ(input_fname,chrom_col+1,offset_col+1,','.join(p),title,grey,ctitle,outdir)
flist.sort()
html = [galhtmlprefix % progname,]
html.append('<h1>%s</h1>' % title)
@@ -294,7 +263,7 @@
fname,expl = row # RRun returns pairs of filenames fiddled for the log and R script
e = os.path.splitext(fname)[-1]
if e in ['.png','.jpg']:
- s= '<tr><td><a href="%s"><img src="%s" alt="%s hspace="10" width="400"><br>(Click to download image %s)</a></td></tr>' \
+ s= '<tr><td><a href="%s"><img src="%s" title="%s hspace="10" width="400"><br>(Click to download image %s)</a></td></tr>' \
% (fname,fname,expl,expl )
html.append(s)
else:
@@ -317,3 +286,4 @@
if __name__ == "__main__":
main()
+
--- a/tools/rgenetics/rgManQQ.xml Tue Jul 19 13:40:24 2011 -0400
+++ b/tools/rgenetics/rgManQQ.xml Wed Jul 20 09:01:37 2011 -0400
@@ -1,4 +1,4 @@
-<tool id="rgManQQ1" name="Manhattan/QQ:" version="1.0.1">
+<tool id="rgManQQ1" name="Manhattan/QQ:" version="1.0.2"><code file="rgManQQ_code.py"/><description>Plots for WGA P values</description>
--- a/universe_wsgi.ini.sample Tue Jul 19 13:40:24 2011 -0400
+++ b/universe_wsgi.ini.sample Wed Jul 20 09:01:37 2011 -0400
@@ -3,7 +3,7 @@
# environment. To tune the application for a multi-user production
# environment, see the documentation at:
#
-# http://bitbucket.org/galaxy/galaxy-central/wiki/Config/ProductionServer
+# http://wiki.g2.bx.psu.edu/Admin/Config/Performance/Production%20Server
#
# Throughout this sample configuration file, except where stated otherwise,
@@ -129,7 +129,7 @@
# Directory where data used by tools is located, see the samples in that
# directory and the wiki for help:
-# http://bitbucket.org/galaxy/galaxy-central/wiki/DataIntegration
+# http://wiki.g2.bx.psu.edu/Admin/Data%20Integration
#tool_data_path = tool-data
# Directory where chrom len files are kept, currently mainly used by trackster
@@ -208,13 +208,13 @@
#logo_url = /
# The URL linked by the "Galaxy Wiki" link in the "Help" menu.
-#wiki_url = http://bitbucket.org/galaxy/galaxy-central/wiki
+#wiki_url = http://wiki.g2.bx.psu.edu/
# The URL linked by the "Email comments..." link in the "Help" menu.
#bugs_email = mailto:galaxy-bugs@bx.psu.edu
# The URL linked by the "How to Cite..." link in the "Help" menu.
-#citation_url = http://bitbucket.org/galaxy/galaxy-central/wiki/Citations
+#citation_url = http://wiki.g2.bx.psu.edu/Citing%20Galaxy
# Serve static content, which must be enabled if you're not serving it via a
# proxy server. These options should be self explanatory and so are not
@@ -314,7 +314,7 @@
# -- Data Libraries
# These library upload options are described in much more detail in the wiki:
-# http://bitbucket.org/galaxy/galaxy-central/wiki/DataLibraries/UploadingFiles
+# http://wiki.g2.bx.psu.edu/Admin/Data%20Libraries/Uploading%20Library%20Files
# Add an option to the library upload form which allows administrators to
# upload a directory of files.
@@ -372,7 +372,7 @@
# User authentication can be delegated to an upstream proxy server (usually
# Apache). The upstream proxy should set a REMOTE_USER header in the request.
# Enabling remote user disables regular logins. For more information, see:
-# http://bitbucket.org/galaxy/galaxy-central/wiki/Config/ApacheProxy
+# http://wiki.g2.bx.psu.edu/Admin/Config/Apache%20Proxy
#use_remote_user = False
# If use_remote_user is enabled and your external authentication
@@ -388,7 +388,7 @@
# users (email addresses). These users will have access to the Admin section
# of the server, and will have access to create users, groups, roles,
# libraries, and more. For more information, see:
-# http://bitbucket.org/galaxy/galaxy-central/wiki/Admin/AdminInterface
+# http://wiki.g2.bx.psu.edu/Admin/Interface
#admin_users = None
# Force everyone to log in (disable anonymous access).
@@ -454,7 +454,7 @@
# If running multiple Galaxy processes, one can be designated as the job
# runner. For more information, see:
-# http://bitbucket.org/galaxy/galaxy-central/wiki/Config/WebApplicationScaling
+# http://wiki.g2.bx.psu.edu/Admin/Config/Performance/Web%20Application%20Scal…
#enable_job_running = True
# Should jobs be tracked through the database, rather than in memory.
@@ -505,7 +505,7 @@
# Clustering Galaxy is not a straightforward process and requires some
# pre-configuration. See the the wiki before attempting to set any of these
# options:
-# http://bitbucket.org/galaxy/galaxy-central/wiki/Config/Cluster
+# http://wiki.g2.bx.psu.edu/Admin/Config/Performance/Cluster
# Comma-separated list of job runners to start. local is always started. If
# left commented, no jobs will be run on the cluster, even if a cluster URL is
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

19 Jul '11
1 new changeset in galaxy-central:
http://bitbucket.org/galaxy/galaxy-central/changeset/dacf97a6c663/
changeset: dacf97a6c663
user: fubar
date: 2011-07-20 05:34:27
summary: Minor tweak to the x axis sorting of Manhattan plots to fix the fact that in R, unique() returns an alphabetically sorted list - need to use sort on the result to get correct lexicographic (10 comes after 9 eg) on the X axis...
affected #: 2 files (181 bytes)
--- a/test-data/rgtestouts/rgManQQ/rgManQQtest1.html Tue Jul 19 22:18:40 2011 -0400
+++ b/test-data/rgtestouts/rgManQQ/rgManQQtest1.html Wed Jul 20 13:34:27 2011 +1000
@@ -43,7 +43,7 @@
Loading required package: proto
-[1] "### 101 values read from /data/tmp/tmpTPXdE1/database/files/000/dataset_1.dat read - now running plots"
+[1] "### 101 values read from /data/tmp/tmpM8NZ50/database/files/000/dataset_1.dat read - now running plots"
[1] "## qqplot on Allelep done"
@@ -62,7 +62,7 @@
# http://StephenTurner.us/
# http://GettingGeneticsDone.blogspot.com/
-# Last updated: Tuesday, December 22, 2009
+# Last updated: 19 July 2011 by Ross Lazarus
# R code for making manhattan plots and QQ plots from plink output files.
# With GWAS data this can take a lot of memory. Recommended for use on
# 64bit machines only, for now.
@@ -81,14 +81,9 @@
genomewideline=NULL # was genomewideline=-log10(5e-8)
if (genomewide) { # use bonferroni since might be only a small region?
genomewideline = -log10(0.05/length(pvals)) }
- chro = sub('chr','',chrom, ignore.case = T) # just in case
- chro = sub(':','',chro, ignore.case = T) # ugh
- chro = sub('X',23,chro, ignore.case = T)
- chro = sub('Y',24,chro, ignore.case = T)
- chro = sub('Mt',25,chro, ignore.case = T)
offset = as.integer(offset)
pvals = as.double(pvals)
- chro = as.integer(chro)
+ chro = as.integer(chrom) # already dealt with X and friends?
d=data.frame(CHR=chro,BP=offset,P=pvals)
#limit to only chrs 1-22, x=23,y=24,Mt=25?
d=d[d$CHR %in% 1:25, ]
@@ -100,6 +95,7 @@
ticks=NULL
lastbase=0
chrlist = unique(d$CHR)
+ chrlist = sort(chrlist) # returns lexical ordering
nchr = length(chrlist) # may be any number?
if (nchr >= 2) {
for (x in c(1:nchr)) {
@@ -182,16 +178,23 @@
qq
}
-rgqqMan = function(infile="/data/tmp/tmpTPXdE1/database/files/000/dataset_1.dat",chromcolumn=2, offsetcolumn=3, pvalscolumns=c(8),
+rgqqMan = function(infile="/data/tmp/tmpM8NZ50/database/files/000/dataset_1.dat",chromcolumn=2, offsetcolumn=3, pvalscolumns=c(8),
title="rgManQQtest1",grey=0) {
rawd = read.table(infile,head=T,sep='\t')
dn = names(rawd)
cc = dn[chromcolumn]
oc = dn[offsetcolumn]
-nams = c(cc,oc)
+rawd[,cc] = sub('chr','',rawd[,cc],ignore.case = T) # just in case
+rawd[,cc] = sub(':','',rawd[,cc],ignore.case = T) # ugh
+rawd[,cc] = sub('X',23,rawd[,cc],ignore.case = T)
+rawd[,cc] = sub('Y',24,rawd[,cc],ignore.case = T)
+rawd[,cc] = sub('Mt',25,rawd[,cc], ignore.case = T)
+nams = c(cc,oc) # for sorting
plen = length(rawd[,1])
-doreorder=1
print(paste('###',plen,'values read from',infile,'read - now running plots',sep=' '))
+rawd = rawd[do.call(order,rawd[nams]),]
+# mmmf - suggested by http://onertipaday.blogspot.com/2007/08/sortingordering-dataframe-according…
+# in case not yet ordered
if (plen > 0) {
for (pvalscolumn in pvalscolumns) {
if (pvalscolumn > 0)
@@ -203,12 +206,6 @@
ggsave(filename=paste(myfname,"qqplot.png",sep='_'),myqqplot,width=6,height=4,dpi=100)
print(paste('## qqplot on',cname,'done'))
if ((chromcolumn > 0) & (offsetcolumn > 0)) {
- if (doreorder) {
- rawd = rawd[do.call(order,rawd[nams]),]
- # mmmf - suggested by http://onertipaday.blogspot.com/2007/08/sortingordering-dataframe-according…
- # in case not yet ordered
- doreorder = 0
- }
print(paste('## manhattan on',cname,'starting',chromcolumn,offsetcolumn,pvalscolumn))
mymanplot= DrawManhattan(chrom=rawd[,chromcolumn],offset=rawd[,offsetcolumn],pvals=rawd[,pvalscolumn],title=mytitle,grey=grey)
print(paste('## manhattan plot on',cname,'done'))
@@ -231,6 +228,6 @@
</pre>
-<b><a href="http://rgenetics.org">Galaxy Rgenetics</a> tool output rgManQQ.py run at 20/07/2011 12:08:46</b><br/>
+<b><a href="http://rgenetics.org">Galaxy Rgenetics</a> tool output rgManQQ.py run at 20/07/2011 13:29:43</b><br/></div></body></html>
--- a/tools/rgenetics/rgManQQ.py Tue Jul 19 22:18:40 2011 -0400
+++ b/tools/rgenetics/rgManQQ.py Wed Jul 20 13:34:27 2011 +1000
@@ -1,4 +1,6 @@
#!/usr/local/bin/python
+# updated july 20 to fix sort order - R unique() sorts into strict collating order
+# so need to sort after unique to revert to lexicographic order for x axis on Manhattan
# rgmanqq updated july 19 to deal with x,y and mt
# lots of fixes
# ross lazarus
@@ -39,14 +41,9 @@
genomewideline=NULL # was genomewideline=-log10(5e-8)
if (genomewide) { # use bonferroni since might be only a small region?
genomewideline = -log10(0.05/length(pvals)) }
- chro = sub('chr','',chrom, ignore.case = T) # just in case
- chro = sub(':','',chro, ignore.case = T) # ugh
- chro = sub('X',23,chro, ignore.case = T)
- chro = sub('Y',24,chro, ignore.case = T)
- chro = sub('Mt',25,chro, ignore.case = T)
offset = as.integer(offset)
pvals = as.double(pvals)
- chro = as.integer(chro)
+ chro = as.integer(chrom) # already dealt with X and friends?
d=data.frame(CHR=chro,BP=offset,P=pvals)
#limit to only chrs 1-22, x=23,y=24,Mt=25?
d=d[d$CHR %in% 1:25, ]
@@ -58,6 +55,7 @@
ticks=NULL
lastbase=0
chrlist = unique(d$CHR)
+ chrlist = sort(chrlist) # returns lexical ordering
nchr = length(chrlist) # may be any number?
if (nchr >= 2) {
for (x in c(1:nchr)) {
@@ -151,10 +149,17 @@
dn = names(rawd)
cc = dn[chromcolumn]
oc = dn[offsetcolumn]
-nams = c(cc,oc)
+rawd[,cc] = sub('chr','',rawd[,cc],ignore.case = T) # just in case
+rawd[,cc] = sub(':','',rawd[,cc],ignore.case = T) # ugh
+rawd[,cc] = sub('X',23,rawd[,cc],ignore.case = T)
+rawd[,cc] = sub('Y',24,rawd[,cc],ignore.case = T)
+rawd[,cc] = sub('Mt',25,rawd[,cc], ignore.case = T)
+nams = c(cc,oc) # for sorting
plen = length(rawd[,1])
-doreorder=1
print(paste('###',plen,'values read from',infile,'read - now running plots',sep=' '))
+rawd = rawd[do.call(order,rawd[nams]),]
+# mmmf - suggested by http://onertipaday.blogspot.com/2007/08/sortingordering-dataframe-according…
+# in case not yet ordered
if (plen > 0) {
for (pvalscolumn in pvalscolumns) {
if (pvalscolumn > 0)
@@ -166,12 +171,6 @@
ggsave(filename=paste(myfname,"qqplot.png",sep='_'),myqqplot,width=6,height=4,dpi=100)
print(paste('## qqplot on',cname,'done'))
if ((chromcolumn > 0) & (offsetcolumn > 0)) {
- if (doreorder) {
- rawd = rawd[do.call(order,rawd[nams]),]
- # mmmf - suggested by http://onertipaday.blogspot.com/2007/08/sortingordering-dataframe-according…
- # in case not yet ordered
- doreorder = 0
- }
print(paste('## manhattan on',cname,'starting',chromcolumn,offsetcolumn,pvalscolumn))
mymanplot= DrawManhattan(chrom=rawd[,chromcolumn],offset=rawd[,offsetcolumn],pvals=rawd[,pvalscolumn],title=mytitle,grey=grey)
print(paste('## manhattan plot on',cname,'done'))
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0