galaxy-dev
Threads by month
- ----- 2024 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2023 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2022 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2021 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2020 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2019 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2018 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2017 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2016 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2015 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2014 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2013 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2012 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2011 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2010 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2009 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2008 -----
- December
- November
- October
- September
- August
November 2009
- 26 participants
- 233 discussions
16 Nov '09
details: http://www.bx.psu.edu/hg/galaxy/rev/a5bdbae15600
changeset: 3030:a5bdbae15600
user: jeremy goecks <jeremy.goecks(a)emory.edu>
date: Fri Nov 13 16:34:32 2009 -0500
description:
Initial AJAX support for grids. AJAX support is not integrated into grids yet, but there are mako templates for AJAXed grids. AJAXed grids can be used for grids that do not use operations.
diffstat:
lib/galaxy/web/controllers/page.py | 60 ++-
lib/galaxy/web/framework/helpers/grids.py | 5 +-
static/scripts/galaxy.base.js | 20 +-
templates/grid_base.mako | 4 +-
templates/grid_base_async.mako | 665 +++++++++++++++++++++++++++++++++++
templates/grid_body_async.mako | 5 +
templates/grid_common_async.mako | 155 ++++++++
templates/tagging_common.mako | 13 -
8 files changed, 905 insertions(+), 22 deletions(-)
diffs (1055 lines):
diff -r d98c52439f53 -r a5bdbae15600 lib/galaxy/web/controllers/page.py
--- a/lib/galaxy/web/controllers/page.py Fri Nov 13 16:32:36 2009 -0500
+++ b/lib/galaxy/web/controllers/page.py Fri Nov 13 16:34:32 2009 -0500
@@ -1,6 +1,7 @@
from galaxy.web.base.controller import *
from galaxy.web.framework.helpers import time_ago, grids
from galaxy.util.sanitize_html import sanitize_html
+from galaxy.util.odict import odict
import re
@@ -69,21 +70,66 @@
]
def apply_default_filter( self, trans, query, **kwargs ):
return query.filter_by( deleted=False, published=True )
-
-
-class NameColumn( grids.TextColumn ):
- def get_value(self, trans, grid, history):
- return history.get_display_name()
class HistorySelectionGrid( grids.Grid ):
+ # Custom columns.
+ class NameColumn( grids.TextColumn ):
+ def get_value(self, trans, grid, history):
+ return history.get_display_name()
+
+ class DeletedColumn( grids.GridColumn ):
+ def get_accepted_filters( self ):
+ """ Returns a list of accepted filters for this column. """
+ accepted_filter_labels_and_vals = { "active" : "False", "deleted" : "True", "all": "All" }
+ accepted_filters = []
+ for label, val in accepted_filter_labels_and_vals.items():
+ args = { self.key: val }
+ accepted_filters.append( grids.GridColumnFilter( label, args) )
+ return accepted_filters
+
+ class SharingColumn( grids.GridColumn ):
+ def filter( self, db_session, query, column_filter ):
+ """ Modify query to filter histories by sharing status. """
+ if column_filter == "All":
+ pass
+ elif column_filter:
+ if column_filter == "private":
+ query = query.filter( model.History.users_shared_with == None )
+ query = query.filter( model.History.importable == False )
+ elif column_filter == "shared":
+ query = query.filter( model.History.users_shared_with != None )
+ elif column_filter == "importable":
+ query = query.filter( model.History.importable == True )
+ return query
+ def get_accepted_filters( self ):
+ """ Returns a list of accepted filters for this column. """
+ accepted_filter_labels_and_vals = odict()
+ accepted_filter_labels_and_vals["private"] = "private"
+ accepted_filter_labels_and_vals["shared"] = "shared"
+ accepted_filter_labels_and_vals["importable"] = "importable"
+ accepted_filter_labels_and_vals["all"] = "All"
+ accepted_filters = []
+ for label, val in accepted_filter_labels_and_vals.items():
+ args = { self.key: val }
+ accepted_filters.append( grids.GridColumnFilter( label, args) )
+ return accepted_filters
+
# Grid definition.
title = "Saved Histories"
+ template = "grid_base_async.mako"
+ async_template = "grid_body_async.mako"
model_class = model.History
+ default_filter = { "deleted" : "False" , "shared" : "All" }
default_sort_key = "-update_time"
+ use_paging = True
+ num_rows_per_page = 5
columns = [
NameColumn( "Name", key="name", model_class=model.History, filterable="advanced" ),
grids.TagsColumn( "Tags", "tags", model.History, model.HistoryTagAssociation, filterable="advanced"),
grids.GridColumn( "Last Updated", key="update_time", format=time_ago ),
+ # Columns that are valid for filtering but are not visible.
+ DeletedColumn( "Deleted", key="deleted", visible=False, filterable="advanced" ),
+ SharingColumn( "Shared", key="shared", visible=False, filterable="advanced" ),
]
columns.append(
grids.MulticolFilterColumn(
@@ -91,6 +137,8 @@
cols_to_filter=[ columns[0], columns[1] ],
key="free-text-search", visible=False, filterable="standard" )
)
+ def apply_default_filter( self, trans, query, **kwargs ):
+ return query.filter_by( user=trans.user, purged=False )
class PageController( BaseController ):
@@ -268,4 +316,4 @@
@web.require_login("select a history from saved histories")
def list_histories_for_selection( self, trans, **kwargs ):
# Render the list view
- return self._history_selection_grid( trans, status=status, message=message, **kwargs )
\ No newline at end of file
+ return self._history_selection_grid( trans, **kwargs )
\ No newline at end of file
diff -r d98c52439f53 -r a5bdbae15600 lib/galaxy/web/framework/helpers/grids.py
--- a/lib/galaxy/web/framework/helpers/grids.py Fri Nov 13 16:32:36 2009 -0500
+++ b/lib/galaxy/web/framework/helpers/grids.py Fri Nov 13 16:34:32 2009 -0500
@@ -1,6 +1,7 @@
from galaxy.model import *
from galaxy.model.orm import *
+from galaxy.web.framework.helpers import iff
from galaxy.tags.tag_handler import TagHandler
from galaxy.web import url_for
from galaxy.util.json import from_json_string, to_json_string
@@ -16,7 +17,9 @@
title = ""
exposed = True
model_class = None
+ # To use grid's async features, set template="grid_base_async.mako"
template = "grid_base.mako"
+ async_template = "grid_body_async.mako"
global_actions = []
columns = []
operations = []
@@ -213,7 +216,7 @@
return url_for( **new_kwargs )
- return trans.fill_template( self.template,
+ return trans.fill_template( iff( 'async' not in kwargs, self.template, self.async_template),
grid=self,
query=query,
cur_page_num = page_num,
diff -r d98c52439f53 -r a5bdbae15600 static/scripts/galaxy.base.js
--- a/static/scripts/galaxy.base.js Fri Nov 13 16:32:36 2009 -0500
+++ b/static/scripts/galaxy.base.js Fri Nov 13 16:34:32 2009 -0500
@@ -19,6 +19,12 @@
jQuery( "a[confirm]" ).click( function() {
return confirm( jQuery(this).attr( "confirm" ) )
});
+ // Make popup menus.
+ make_popup_menus();
+});
+
+function make_popup_menus()
+{
jQuery( "div[popupmenu]" ).each( function() {
var options = {};
$(this).find( "a" ).each( function() {
@@ -40,7 +46,7 @@
$(this).remove();
b.show();
});
-});
+}
function ensure_popup_helper() {
// And the helper below the popup menus
@@ -103,3 +109,15 @@
};
$( button_element ).click( click );
};
+
+// Returns the number of keys (elements) in an array/dictionary.
+var array_length = function(an_array)
+{
+ if (an_array.length)
+ return an_array.length;
+
+ var count = 0;
+ for (element in an_array)
+ count++;
+ return count;
+};
diff -r d98c52439f53 -r a5bdbae15600 templates/grid_base.mako
--- a/templates/grid_base.mako Fri Nov 13 16:32:36 2009 -0500
+++ b/templates/grid_base.mako Fri Nov 13 16:34:32 2009 -0500
@@ -347,8 +347,9 @@
</td>
</tr>
%endif
+ ## Grid operations.
+ %if grid.operations:
<tr>
- ## Grid operations.
<td></td>
<td colspan="100">
For <span class="grid-selected-count"></span> selected ${items_plural}:
@@ -359,6 +360,7 @@
%endfor
</td>
</tr>
+ %endif
</tfoot>
</table>
</form>
diff -r d98c52439f53 -r a5bdbae15600 templates/grid_base_async.mako
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/templates/grid_base_async.mako Fri Nov 13 16:34:32 2009 -0500
@@ -0,0 +1,665 @@
+<%!
+ from galaxy.web.framework.helpers.grids import TextColumn
+ from galaxy.model import History, HistoryDatasetAssociation, User, Role, Group
+ import galaxy.util
+ def inherit(context):
+ if context.get('use_panels'):
+ return '/base_panels.mako'
+ else:
+ return '/base.mako'
+%>
+<%inherit file="${inherit(context)}"/>
+
+## Render the grid's basic elements. Each of these elements can be subclassed.
+%if message:
+ <p>
+ <div class="${message_type}message transient-message">${util.restore_text( message )}</div>
+ <div style="clear: both"></div>
+ </p>
+%endif
+
+${self.render_grid_header()}
+${self.render_grid_table()}
+
+## Function definitions.
+
+<%def name="title()">${grid.title}</%def>
+
+<%def name="javascripts()">
+ ${parent.javascripts()}
+ ${h.js("jquery.autocomplete", "autocomplete_tagging" )}
+ <script type="text/javascript">
+ ## TODO: generalize and move into galaxy.base.js
+ $(document).ready(function() {
+ // Initialize grid elements.
+ init_grid_elements();
+
+ // Initialize autocomplete for text inputs in search UI.
+ var t = $("#input-tags-filter");
+ if (t.length)
+ {
+
+ var autocomplete_options =
+ { selectFirst: false, autoFill: false, highlight: false, mustMatch: false };
+
+ t.autocomplete("${h.url_for( controller='tag', action='tag_autocomplete_data', item_class='History' )}", autocomplete_options);
+ }
+
+ var t2 = $("#input-name-filter");
+ if (t2.length)
+ {
+ var autocomplete_options =
+ { selectFirst: false, autoFill: false, highlight: false, mustMatch: false };
+
+ t2.autocomplete("${h.url_for( controller='history', action='name_autocomplete_data' )}", autocomplete_options);
+ }
+ });
+ ## Can this be moved into base.mako?
+ %if refresh_frames:
+ %if 'masthead' in refresh_frames:
+ ## Refresh masthead == user changes (backward compatibility)
+ if ( parent.user_changed ) {
+ %if trans.user:
+ parent.user_changed( "${trans.user.email}", ${int( app.config.is_admin_user( trans.user ) )} );
+ %else:
+ parent.user_changed( null, false );
+ %endif
+ }
+ %endif
+ %if 'history' in refresh_frames:
+ if ( parent.frames && parent.frames.galaxy_history ) {
+ parent.frames.galaxy_history.location.href="${h.url_for( controller='root', action='history')}";
+ if ( parent.force_right_panel ) {
+ parent.force_right_panel( 'show' );
+ }
+ }
+ %endif
+ %if 'tools' in refresh_frames:
+ if ( parent.frames && parent.frames.galaxy_tools ) {
+ parent.frames.galaxy_tools.location.href="${h.url_for( controller='root', action='tool_menu')}";
+ if ( parent.force_left_panel ) {
+ parent.force_left_panel( 'show' );
+ }
+ }
+ %endif
+ %endif
+
+ //
+ // Code to handle grid operations: filtering, sorting, paging, and operations.
+ //
+
+ // Initialize grid elements.
+ function init_grid_elements()
+ {
+ $(".grid").each( function() {
+ var grid = this;
+ var checkboxes = $(this).find("input.grid-row-select-checkbox");
+ var update = $(this).find( "span.grid-selected-count" );
+ $(checkboxes).each( function() {
+ $(this).change( function() {
+ var n = $(checkboxes).filter("[checked]").size();
+ update.text( n );
+ });
+ })
+ });
+ }
+
+ // Filter values for categorical filters.
+ var categorical_filters = new Object();
+ %for column in grid.columns:
+ %if column.filterable is not None and not isinstance( column, TextColumn ):
+ var ${column.key}_filters =
+ {
+ %for i, filter in enumerate( column.get_accepted_filters() ):
+ %if i > 0:
+ ,
+ %endif
+ ${filter.label} : ${h.to_json_string( filter.args )}
+ %endfor
+ };
+ categorical_filters['${column.key}'] = ${column.key}_filters;
+ %endif
+ %endfor
+
+ // Initialize URL args with filter arguments.
+ var url_args = ${h.to_json_string( cur_filter_dict )};
+
+ // Place "f-" in front of all filter arguments.
+ var arg;
+ for (arg in url_args)
+ {
+ value = url_args[arg];
+ delete url_args[arg];
+ url_args["f-" + arg] = value;
+ }
+
+ // Add sort argument to URL args.
+ url_args['sort'] = "${encoded_sort_key}";
+
+ // Add async keyword to URL args.
+ url_args['async'] = true;
+
+ // Add tag to grid filter.
+ function add_tag_to_grid_filter(tag_name, tag_value)
+ {
+ // Put tag name and value together.
+ var tag = tag_name + (tag_value != null && tag_value != "" ? ":" + tag_value : "");
+ add_filter_condition("tags", tag, true);
+ }
+
+ // Add a condition to the grid filter; this adds the condition and refreshes the grid.
+ function add_filter_condition(name, value, append)
+ {
+ // Update URL arg with new condition.
+ if (append)
+ {
+ // Update or append value.
+ var cur_val = url_args["f-" + name];
+ var new_val;
+ if (cur_val == null || cur_val == undefined)
+ {
+ new_val = value;
+ }
+ else if (typeof(cur_val) == "string")
+ {
+ if (cur_val == "All")
+ new_val = value;
+ else
+ {
+ // Replace string with array.
+ var values = new Array();
+ values[0] = cur_val;
+ values[1] = value;
+ new_val = values;
+ }
+ }
+ else {
+ // Current value is an array.
+ new_val = cur_val;
+ new_val[new_val.length] = value;
+ }
+ url_args["f-" + name] = new_val;
+ }
+ else
+ {
+ // Replace value.
+ url_args["f-" + name] = value;
+ }
+
+ // Add button that displays filter and provides a button to delete it.
+ var t = $("<span>" + value +
+ " <a href='#'><img src='${h.url_for('/static/images/delete_tag_icon_gray.png')}'/></a></span>");
+ t.addClass('text-filter-val');
+ t.click(function() {
+ //
+ // Remove filter condition.
+ //
+
+ // TODO: remove element.
+ //var tag_button = $(this).parent();
+ $(this).remove();
+
+ // Remove condition from URL args.
+ var cur_val = url_args["f-" + name];
+ if (cur_val == null || cur_val == undefined)
+ {
+ // Unexpected. Throw error?
+ }
+ else if (typeof(cur_val) == "string")
+ {
+ if (cur_val == "All")
+ {
+ // Unexpected. Throw error?
+ }
+ else
+ // Remove condition.
+ delete url_args["f-" + name];
+ }
+ else {
+ // Current value is an array.
+ var conditions = cur_val;
+ var index;
+ for (index = 0; index < conditions.length; index++)
+ if (conditions[index] == value)
+ {
+ conditions.splice(index, 1);
+ break;
+ }
+ }
+
+ update_grid(true);
+ });
+
+ var container = $('#' + name + "-filtering-criteria");
+ container.append(t);
+
+ update_grid(true);
+ }
+
+ // Set sort condition for grid.
+ function set_sort_condition(col_key)
+ {
+ // Set new sort condition. New sort is col_key if sorting new column; if reversing sort on
+ // currently sorted column, sort is reversed.
+ var cur_sort = url_args['sort'];
+ var new_sort = col_key;
+ if ( cur_sort.indexOf( col_key ) != -1)
+ {
+ // Reverse sort.
+ if ( cur_sort.substring(0,1) != '-' )
+ new_sort = '-' + col_key;
+ else
+ {
+ // Sort reversed by using just col_key.
+ }
+ }
+
+ // Remove sort arrows elements.
+ $('.sort-arrow').remove()
+
+ // Add sort arrow element to new sort column.
+ var sort_arrow = "↑";
+ if (new_sort.substring(0,1) != '-')
+ sort_arrow = "↓";
+ var t = $("<span>" + sort_arrow + "</span>").addClass('sort-arrow');
+ var th = $("#" + col_key + '-header');
+ th.append(t);
+
+ // Update grid.
+ url_args['sort'] = new_sort;
+ update_grid();
+ }
+
+ // Set new value for categorical filter.
+ function set_categorical_filter(this_obj, name, new_value)
+ {
+ // Update filter hyperlinks to reflect new filter value.
+ var category_filter = categorical_filters[name];
+ var cur_value = url_args["f-" + name];
+ $("." + name + "-filter").each( function() {
+ var text = $(this).text().trim();
+ var filter = category_filter[text];
+ var filter_value = filter[name];
+ if (filter_value == new_value)
+ {
+ // Remove filter link since grid will be using this filter. It is assumed that
+ // this element has a single child, a hyperlink/anchor with text.
+ $(this).empty();
+ $(this).append("<span style='font-style: italic'>" + text + "</span>");
+ }
+ else if (filter_value == cur_value)
+ {
+ // Add hyperlink for this filter since grid will no longer be using this filter. It is assumed that
+ // this element has a single child, a hyperlink/anchor.
+ $(this).empty();
+ var t = $("<a href='#'>" + text + "</a>");
+ t.click(function() {
+ set_categorical_filter( $(this), name, filter_value );
+ });
+ $(this).append(t);
+ }
+ });
+
+ // Need to go back to page 1 if not showing all.
+ var cur_page = url_args['page'];
+ if (cur_page != null && cur_page != undefined && cur_page != 'all')
+ url_args['page'] = 1;
+
+ // Update grid.
+ url_args["f-" + name] = new_value;
+ update_grid(true);
+ }
+
+ var num_pages = ${num_pages};
+ url_args['page'] = 1;
+ // Set page to view.
+ function set_page(new_page)
+ {
+ // Update page hyperlink to reflect new page.
+ $(".page-link").each( function() {
+ var id = $(this).attr('id');
+ var page_num = parseInt( id.split("-")[2] ); // Id has form 'page-link-<page_num>
+ var cur_page = url_args['page'];
+ if (page_num == new_page)
+ {
+ // Remove link to page since grid will be on this page. It is assumed that
+ // this element has a single child, a hyperlink/anchor with text.
+ var text = $(this).children().text();
+ $(this).empty();
+ $(this).addClass("inactive-link");
+ $(this).text(text);
+ }
+ else if (page_num == cur_page)
+ {
+ // Add hyperlink to this page since grid will no longer be on this page. It is assumed that
+ // this element has a single child, a hyperlink/anchor.
+ var text = $(this).text();
+ $(this).empty();
+ $(this).removeClass("inactive-link");
+ var t = $("<a href='#'>" + text + "</a>");
+ t.click(function() {
+ set_page(page_num);
+ });
+ $(this).append(t);
+ }
+ });
+
+
+ if (new_page == "all")
+ {
+ url_args['page'] = new_page;
+ $('#page-links-row').hide('slow');
+ }
+ else
+ {
+ url_args['page'] = parseInt(new_page);
+ }
+ update_grid();
+ }
+
+ // Update grid.
+ function update_grid()
+ {
+ $.ajax({
+ url: "${h.url_for()}",
+ data: url_args,
+ error: function() { alert( "Grid refresh failed" ) },
+ success: function(response_text) {
+ // HACK: use a simple string to separate the two elements in the
+ // response: (1) table body and (2) number of pages in table.
+ var parsed_response_text = response_text.split("*****");
+
+ // Update grid body.
+ var table_body = parsed_response_text[0];
+ $('#grid-table-body').html(table_body);
+
+ // Process grid body.
+ init_grid_elements();
+ make_popup_menus();
+
+ // Update pages.
+ var num_pages = parseInt( parsed_response_text[1] );
+
+ // Rebuild page links.
+ var page_link_container = $('#page-link-container');
+ page_link_container.children().remove();
+ if (num_pages > 1)
+ {
+ // Show page link row.
+ $('#page-links-row').show();
+
+ // First page is the current page.
+ var t = $("<span>1</span>");
+ t.addClass('page-link');
+ t.addClass('inactive-link');
+ t.attr('id', 'page-link-1');
+ page_link_container.append(t);
+
+ // Subsequent pages are navigable.
+ for (var i = 2; i <= num_pages; i++)
+ {
+ var span = $("<span></span>");
+ span.addClass('page-link');
+ span.attr('id', 'page-link-' + i);
+ var t = $("<a href='#'>" + i + "</a>");
+ var page_num = i
+ t.click(function() {
+ set_page(page_num);
+ });
+ span.append(t)
+ page_link_container.append(span);
+ }
+ }
+ else
+ {
+ // Hide page link row.
+ $('#page-links-row').hide('slow');
+ }
+ }
+ });
+ }
+
+ // Perform a grid operation. TODO: this is not complete.
+ function do_operation()
+ {
+ // Get grid form.
+ var form = $('#grid-form');
+ var operation = $('input[name=operation]:submit').val();
+ var item_ids = $('input[name=id]:checked').val();
+
+ // Update URL args.
+ url_args['operation'] = operation;
+ url_args['id'] = item_ids;
+
+ //update_grid();
+
+ //document.location = ${h.url_for()} + "?"
+ }
+
+ </script>
+</%def>
+
+<%def name="stylesheets()">
+ ${h.css( "base", "autocomplete_tagging" )}
+ <style>
+ ## Not generic to all grids -- move to base?
+ .count-box {
+ min-width: 1.1em;
+ padding: 5px;
+ border-width: 1px;
+ border-style: solid;
+ text-align: center;
+ display: inline-block;
+ }
+ .text-filter-val {
+ border: solid 1px #AAAAAA;
+ padding: 1px 3px 1px 3px;
+ margin-right: 5px;
+ -moz-border-radius: .5em;
+ -webkit-border-radius: .5em;
+ font-style: italic;
+ }
+ .page-link a, .inactive-link {
+ padding: 0px 7px 0px 7px;
+ }
+ .inactive-link {
+ font-style: italic;
+ }
+ </style>
+</%def>
+
+<%namespace file="./grid_common_async.mako" import="*" />
+
+## Print grid header.
+<%def name="render_grid_header()">
+ <div class="grid-header">
+ <h2>${grid.title}</h2>
+
+ %if grid.global_actions:
+ <ul class="manage-table-actions">
+ %for action in grid.global_actions:
+ <li>
+ <a class="action-button" href="${h.url_for( **action.url_args )}">${action.label}</a>
+ </li>
+ %endfor
+ </ul>
+ %endif
+
+ ${render_grid_filters()}
+ </div>
+</%def>
+
+## Print grid.
+<%def name="render_grid_table()">
+ <form action="${url()}" method="post" onsubmit="do_operation();return false;">
+ <table class="grid">
+ <thead id="grid-table-header">
+ <tr>
+ <th></th>
+ %for column in grid.columns:
+ %if column.visible:
+ <%
+ href = ""
+ extra = ""
+ if column.sortable:
+ if sort_key == column.key:
+ if sort_order == "asc":
+ href = url( sort=( "-" + column.key ) )
+ extra = "↓"
+ else:
+ href = url( sort=( column.key ) )
+ extra = "↑"
+ else:
+ href = url( sort=column.key )
+ %>
+ <th\
+ id="${column.key}-header"
+ %if column.ncells > 1:
+ colspan="${column.ncells}"
+ %endif
+ >
+ %if href:
+ <a href="${href}" onclick="set_sort_condition('${column.key}');return false;">${column.label}</a>
+ %else:
+ ${column.label}
+ %endif
+ <span class="sort-arrow">${extra}</span>
+ </th>
+ %endif
+ %endfor
+ <th></th>
+ </tr>
+ </thead>
+ <tbody id="grid-table-body">
+ ${render_grid_table_body_contents()}
+ </tbody>
+ <tfoot id="grid-table-footer">
+ ${render_grid_table_footer_contents()}
+ </tfoot>
+ </table>
+ </form>
+</%def>
+
+<%def name="render_grid_table_body_contents()">
+ %if query.count() == 0:
+ ## No results.
+ <tr><td></td><td><em>No Items</em></td></tr>
+ %endif
+ %for i, item in enumerate( query ):
+ <tr \
+ %if current_item == item:
+ class="current" \
+ %endif
+ >
+ ## Item selection column
+ <td style="width: 1.5em;">
+ <input type="checkbox" name="id" value=${trans.security.encode_id( item.id )} class="grid-row-select-checkbox" />
+ </td>
+ ## Data columns
+ %for column in grid.columns:
+ %if column.visible:
+ <%
+ # Link
+ link = column.get_link( trans, grid, item )
+ if link:
+ href = url( **link )
+ else:
+ href = None
+ # Value (coerced to list so we can loop)
+ value = column.get_value( trans, grid, item )
+ if column.ncells == 1:
+ value = [ value ]
+ %>
+ %for cellnum, v in enumerate( value ):
+ <%
+ # Handle non-ascii chars.
+ if isinstance(v, str):
+ v = unicode(v, 'utf-8')
+ # Attach popup menu?
+ if column.attach_popup and cellnum == 0:
+ extra = '<a id="grid-%d-popup" class="arrow" style="display: none;"><span>▼</span></a>' % i
+ else:
+ extra = ""
+ %>
+ %if href:
+ <td><div class="menubutton split" style="float: left;"><a class="label" href="${href}">${v}</a>${extra}</td>
+ %else:
+ <td >${v}${extra}</td>
+ %endif
+ %endfor
+ %endif
+ %endfor
+ ## Actions column
+ <td>
+ <div popupmenu="grid-${i}-popup">
+ %for operation in grid.operations:
+ %if operation.allowed( item ):
+ <%
+ target = ""
+ if operation.target:
+ target = "target='" + operation.target + "'"
+ %>
+ <a class="action-button" ${target} href="${ url( **operation.get_url_args( item ) ) }">${operation.label}</a>
+ %endif
+ %endfor
+ </div>
+ </td>
+ </tr>
+ %endfor
+</%def>
+
+<%def name="render_grid_table_footer_contents()">
+ ## Row for navigating among pages.
+ <%
+ # Mapping between item class and plural term for item.
+ items_plural = "items"
+ if grid.model_class == History:
+ items_plural = "histories"
+ elif grid.model_class == HistoryDatasetAssociation:
+ items_plural = "datasets"
+ elif grid.model_class == User:
+ items_plural = "users"
+ elif grid.model_class == Role:
+ items_plural = "roles"
+ elif grid.model_class == Group:
+ items_plural = "groups"
+ %>
+ %if num_pages > 1:
+ <tr id="page-links-row">
+ <td></td>
+ <td colspan="100">
+ <span id='page-link-container'>
+ ## Page links.
+ Page:
+ %for page_index in range(1, num_pages + 1):
+ %if page_index == cur_page_num:
+ <span class='page-link inactive-link' id="page-link-${page_index}">${page_index}</span>
+ %else:
+ <% args = { 'page' : page_index } %>
+ <span class='page-link' id="page-link-${page_index}"><a href="${url( args )}" onclick="set_page('${page_index}'); return false;">${page_index}</a></span>
+ %endif
+ %endfor
+ </span>
+
+ ## Show all link.
+ <% args = { "page" : "all" } %>
+ <span id='show-all-link'>| <a href="${url( args )}" onclick="set_page('all');return false;">Show all ${items_plural} on one page</a></span>
+ </td>
+ </tr>
+ %endif
+ ## Grid operations.
+ %if grid.operations:
+ <tr>
+ <td></td>
+ <td colspan="100">
+ For <span class="grid-selected-count"></span> selected ${items_plural}:
+ %for operation in grid.operations:
+ %if operation.allow_multiple:
+ <input type="submit" name="operation" value="${operation.label}" class="action-button">
+ %endif
+ %endfor
+ </td>
+ </tr>
+ %endif
+</%def>
+
diff -r d98c52439f53 -r a5bdbae15600 templates/grid_body_async.mako
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/templates/grid_body_async.mako Fri Nov 13 16:34:32 2009 -0500
@@ -0,0 +1,5 @@
+<%namespace file="./grid_base_async.mako" import="*" />
+
+${render_grid_table_body_contents()}
+*****
+${num_pages}
\ No newline at end of file
diff -r d98c52439f53 -r a5bdbae15600 templates/grid_common_async.mako
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/templates/grid_common_async.mako Fri Nov 13 16:34:32 2009 -0500
@@ -0,0 +1,155 @@
+<%! from galaxy.web.framework.helpers.grids import TextColumn, GridColumnFilter %>
+
+## Render an AJAX filter UI for a grid column. Filter is rendered as a table row.
+<%def name="render_ajax_grid_column_filter(column)">
+ <tr>
+ <%
+ column_label = column.label
+ if column.filterable == "advanced":
+ column_label = column_label.lower()
+ %>
+ <td align="left" style="padding-left: 10px">${column_label}:</td>
+ <td>
+ %if isinstance(column, TextColumn):
+ <form action="${url( dict() )}" id="form-filter-${column.key}"
+ ## Move this to doc.ready()
+ ##onsubmit="var text_input=$('#input-${column.key}-filter').val();$('#input-${column.key}-filter').val('');add_filter_condition('${column.key}',text_input,true);return false;"
+ onsubmit="var text_input=$('#input-${column.key}-filter').val();$('#input-${column.key}-filter').val('');add_filter_condition('${column.key}',text_input,true);return false;"
+ method="get" >
+ ## Carry forward filtering criteria with hidden inputs.
+ %for temp_column in grid.columns:
+ %if temp_column.key in cur_filter_dict:
+ <% value = cur_filter_dict[ temp_column.key ] %>
+ %if value != "All":
+ <%
+ if isinstance( temp_column, TextColumn ):
+ value = h.to_json_string( value )
+ %>
+ <input type="hidden" id="${temp_column.key}" name="f-${temp_column.key}" value='${value}'/>
+ %endif
+ %endif
+ %endfor
+
+ ## Print current filtering criteria and links to delete.
+ <span id="${column.key}-filtering-criteria">
+ %if column.key in cur_filter_dict:
+ <% column_filter = cur_filter_dict[column.key] %>
+ %if isinstance( column_filter, basestring ):
+ %if column_filter != "All":
+ <span style="font-style: italic">${cur_filter_dict[column.key]}</span>
+ <% filter_all = GridColumnFilter( "", { column.key : "All" } ) %>
+ <a href="${url( filter_all.get_url_args() )}"><img src="${h.url_for('/static/images/delete_tag_icon_gray.png')}"/></a>
+ |
+ %endif
+ %elif isinstance( column_filter, list ):
+ %for i, filter in enumerate( column_filter ):
+ %if i > 0:
+ ,
+ %endif
+ <span style="font-style: italic">${filter}</span>
+ <%
+ new_filter = list( column_filter )
+ del new_filter[ i ]
+ new_column_filter = GridColumnFilter( "", { column.key : h.to_json_string( new_filter ) } )
+ %>
+ <a href="${url( new_column_filter.get_url_args() )}"><img src="${h.url_for('/static/images/delete_tag_icon_gray.png')}"/></a>
+ %endfor
+
+ %endif
+ %endif
+ </span>
+
+ ## Print input field for column.
+ <span><input id="input-${column.key}-filter" name="f-${column.key}" type="text" value="" size="15"/></span>
+ </form>
+ %else:
+ <span id="${column.key}-filtering-criteria">
+ %for i, filter in enumerate( column.get_accepted_filters() ):
+ <%
+ # HACK: we know that each filter will have only a single argument, so get that single argument.
+ for key, arg in filter.args.items():
+ filter_key = key
+ filter_arg = arg
+ %>
+ %if i > 0:
+ |
+ %endif
+ %if column.key in cur_filter_dict and column.key in filter.args and cur_filter_dict[column.key] == filter.args[column.key]:
+ <span class="${column.key}-filter">${filter.label}</span>
+ %else:
+ <span class="${column.key}-filter">
+ <a href="${url( filter.get_url_args() )}"
+ onclick="set_categorical_filter($(this), '${column.key}','${filter_arg}'); return false;">${filter.label}</a>
+ </span>
+ %endif
+ %endfor
+ </span>
+ %endif
+ </td>
+ </tr>
+</%def>
+
+## Print grid search/filtering UI.
+<%def name="render_grid_filters()">
+ ## Standard search.
+ <div>
+ <table><tr>
+ <td>
+ <table>
+ %for column in grid.columns:
+ %if column.filterable == "standard":
+ ${render_ajax_grid_column_filter(column)}
+ %endif
+ %endfor
+ </table>
+ </td>
+ <td>
+ ## Clear the standard search.
+ ##|
+ ##<% filter_all = GridColumnFilter( "", { column.key : "All" } ) %>
+ ##<a href="${url( filter_all.get_url_args() )}">Clear All</a>
+
+ ## Only show advanced search if there are filterable columns.
+ <%
+ show_advanced_search = False
+ for column in grid.columns:
+ if column.filterable == "advanced":
+ show_advanced_search = True
+ break
+ endif
+ %>
+ %if show_advanced_search:
+ | <a href="" onclick="javascript:$('#more-search-options').slideToggle('fast');return false;">Advanced Search</a>
+ %endif
+ </td>
+ </tr></table>
+ </div>
+
+ ## Advanced search.
+ <div id="more-search-options" style="display: none; padding-top: 5px">
+ <table style="border: 1px solid gray;">
+ <tr><td style="text-align: left" colspan="100">
+ Advanced Search |
+ <a href=""# onclick="javascript:$('#more-search-options').slideToggle('fast');return false;">Close</a> |
+ ## Link to clear all filters.
+ <%
+ no_filter = GridColumnFilter("Clear All", default_filter_dict)
+ %>
+ <a href="${url( no_filter.get_url_args() )}">${no_filter.label}</a>
+ </td></tr>
+ %for column in grid.columns:
+ %if column.filterable == "advanced":
+ ## Show div if current filter has value that is different from the default filter.
+ %if column.key in cur_filter_dict and column.key in default_filter_dict and \
+ cur_filter_dict[column.key] != default_filter_dict[column.key]:
+ <script type="text/javascript">
+ $('#more-search-options').css("display", "block");
+ </script>
+ %endif
+
+ ${render_ajax_grid_column_filter(column)}
+ %endif
+ %endfor
+ </table>
+ </div>
+</%def>
\ No newline at end of file
diff -r d98c52439f53 -r a5bdbae15600 templates/tagging_common.mako
--- a/templates/tagging_common.mako Fri Nov 13 16:32:36 2009 -0500
+++ b/templates/tagging_common.mako Fri Nov 13 16:34:32 2009 -0500
@@ -23,19 +23,6 @@
else: ## isInstance( tag_name, unicode ):
tag_names_and_values[tag_name] = tag_value
%>
- //
- // Returns the number of keys (elements) in an array/dictionary.
- //
- var array_length = function(an_array)
- {
- if (an_array.length)
- return an_array.length;
-
- var count = 0;
- for (element in an_array)
- count++;
- return count;
- };
//
// Default function get text to display on the toggle link.
1
0
16 Nov '09
details: http://www.bx.psu.edu/hg/galaxy/rev/d98c52439f53
changeset: 3029:d98c52439f53
user: jeremy goecks <jeremy.goecks(a)emory.edu>
date: Fri Nov 13 16:32:36 2009 -0500
description:
Cleaned up messages for delete/undelete history/ies.
diffstat:
lib/galaxy/web/controllers/history.py | 4 ++--
test/base/twilltestcase.py | 3 ++-
2 files changed, 4 insertions(+), 3 deletions(-)
diffs (41 lines):
diff -r a0880f1b4297 -r d98c52439f53 lib/galaxy/web/controllers/history.py
--- a/lib/galaxy/web/controllers/history.py Fri Nov 13 15:28:22 2009 -0500
+++ b/lib/galaxy/web/controllers/history.py Fri Nov 13 16:32:36 2009 -0500
@@ -262,7 +262,7 @@
n_deleted += 1
status = SUCCESS
if n_deleted:
- message_parts.append( "Deleted %d histories. " % n_deleted )
+ message_parts.append( "Deleted %d %s. " % ( n_deleted, iff( n_deleted != 1, "histories", "history" ) ) )
if deleted_current:
message_parts.append( "Your active history was deleted, a new empty history is now active. " )
status = INFO
@@ -290,7 +290,7 @@
status = SUCCESS
message_parts = []
if n_undeleted:
- message_parts.append( "Undeleted %d histories." % n_undeleted )
+ message_parts.append( "Undeleted %d %s. " % ( n_undeleted, iff( n_undeleted != 1, "histories", "history" ) ) )
if n_already_purged:
message_parts.append( "%d histories have already been purged and cannot be undeleted." % n_already_purged )
status = WARNING
diff -r a0880f1b4297 -r d98c52439f53 test/base/twilltestcase.py
--- a/test/base/twilltestcase.py Fri Nov 13 15:28:22 2009 -0500
+++ b/test/base/twilltestcase.py Fri Nov 13 16:32:36 2009 -0500
@@ -10,6 +10,7 @@
pkg_resources.require( "elementtree" )
from elementtree import ElementTree
from galaxy.web import security
+from galaxy.web.framework.helpers import iff
buffer = StringIO.StringIO()
@@ -180,7 +181,7 @@
num_deleted = len( id.split( ',' ) )
self.home()
self.visit_page( "history/list?operation=delete&id=%s" % ( id ) )
- check_str = 'Deleted %d histories' % num_deleted
+ check_str = 'Deleted %d %s' % ( num_deleted, iff( num_deleted != 1, "histories","history") )
self.check_page_for_string( check_str )
self.home()
def delete_current_history( self, check_str='' ):
1
0
16 Nov '09
details: http://www.bx.psu.edu/hg/galaxy/rev/5be1e8df69b5
changeset: 3026:5be1e8df69b5
user: Greg Von Kuster <greg(a)bx.psu.edu>
date: Fri Nov 13 14:20:03 2009 -0500
description:
Change the version of the liftOver tool due to the application of Brad Chapman's patch in change set 3021.
diffstat:
tools/extract/liftOver_wrapper.xml | 2 +-
1 files changed, 1 insertions(+), 1 deletions(-)
diffs (9 lines):
diff -r 23c1b9799bce -r 5be1e8df69b5 tools/extract/liftOver_wrapper.xml
--- a/tools/extract/liftOver_wrapper.xml Fri Nov 13 14:13:03 2009 -0500
+++ b/tools/extract/liftOver_wrapper.xml Fri Nov 13 14:20:03 2009 -0500
@@ -1,4 +1,4 @@
-<tool id="liftOver1" name="Convert genome coordinates" version="1.0.1">
+<tool id="liftOver1" name="Convert genome coordinates" version="1.0.2">
<description> between assemblies and genomes</description>
<command interpreter="python">liftOver_wrapper.py $input "$out_file1" "$out_file2" $dbkey $to_dbkey $minMatch</command>
<inputs>
1
0
details: http://www.bx.psu.edu/hg/galaxy/rev/a0880f1b4297
changeset: 3027:a0880f1b4297
user: Nate Coraor <nate(a)bx.psu.edu>
date: Fri Nov 13 15:28:22 2009 -0500
description:
get_display_links fix for GeneTrack
diffstat:
lib/galaxy/datatypes/tracks.py | 3 ++-
1 files changed, 2 insertions(+), 1 deletions(-)
diffs (20 lines):
diff -r 5be1e8df69b5 -r a0880f1b4297 lib/galaxy/datatypes/tracks.py
--- a/lib/galaxy/datatypes/tracks.py Fri Nov 13 14:20:03 2009 -0500
+++ b/lib/galaxy/datatypes/tracks.py Fri Nov 13 15:28:22 2009 -0500
@@ -2,6 +2,7 @@
Datatype classes for tracks/track views within galaxy.
"""
+import data
import tabular, binascii, logging
from galaxy.datatypes.metadata import MetadataElement
from galaxy.datatypes import metadata
@@ -23,7 +24,7 @@
super( GeneTrack, self ).__init__( **kwargs )
self.add_display_app( 'genetrack', 'View in', '', 'genetrack_link' )
def get_display_links( self, dataset, type, app, base_url, target_frame='galaxy_main', **kwd ):
- return data.Binary.get_display_links( self, dataset, type, app, base_url, target_frame=target_frame, **kwd )
+ return data.Data.get_display_links( self, dataset, type, app, base_url, target_frame=target_frame, **kwd )
def genetrack_link( self, hda, type, app, base_url ):
ret_val = []
if hda.has_data:
1
0
16 Nov '09
details: http://www.bx.psu.edu/hg/galaxy/rev/23c1b9799bce
changeset: 3025:23c1b9799bce
user: Greg Von Kuster <greg(a)bx.psu.edu>
date: Fri Nov 13 14:13:03 2009 -0500
description:
Fixes, cleanup and new functional tests for data types and upload:
- moved all supported binary data types to the new binary.py
- changed GeneTrack data type to subclass from Text rather than Binary
- added Sff data type to datatypes_conf.xml.sample
- merged test_sniffinad_and_metadata_settings.py test scritp into test_get_data.py
- added several additional functional test for data types to test_get_data.py
- fixed some bugs in upload.py when uploading binary data types
diffstat:
datatypes_conf.xml.sample | 12 +-
lib/galaxy/datatypes/binary.py | 156 +++++
lib/galaxy/datatypes/data.py | 88 +--
lib/galaxy/datatypes/genetics.py | 69 +--
lib/galaxy/datatypes/images.py | 120 ----
lib/galaxy/datatypes/registry.py | 16 +-
lib/galaxy/datatypes/tracks.py | 8 +-
test/functional/test_get_data.py | 620 +++++++++++++++++++---
test/functional/test_sniffing_and_metadata_settings.py | 262 ---------
tools/data_source/upload.py | 45 +-
10 files changed, 748 insertions(+), 648 deletions(-)
diffs (1914 lines):
diff -r 4f9b630f9976 -r 23c1b9799bce datatypes_conf.xml.sample
--- a/datatypes_conf.xml.sample Fri Nov 13 10:39:15 2009 -0500
+++ b/datatypes_conf.xml.sample Fri Nov 13 14:13:03 2009 -0500
@@ -1,15 +1,15 @@
<?xml version="1.0"?>
<datatypes>
<registration converters_path="lib/galaxy/datatypes/converters">
- <datatype extension="ab1" type="galaxy.datatypes.images:Ab1" mimetype="application/octet-stream" display_in_upload="true"/>
+ <datatype extension="ab1" type="galaxy.datatypes.binary:Ab1" mimetype="application/octet-stream" display_in_upload="true"/>
<datatype extension="axt" type="galaxy.datatypes.sequence:Axt" display_in_upload="true"/>
- <datatype extension="bam" type="galaxy.datatypes.images:Bam" mimetype="application/octet-stream"/>
+ <datatype extension="bam" type="galaxy.datatypes.binary:Bam" mimetype="application/octet-stream"/>
<datatype extension="bed" type="galaxy.datatypes.interval:Bed" display_in_upload="true">
<converter file="bed_to_gff_converter.xml" target_datatype="gff"/>
<converter file="interval_to_coverage.xml" target_datatype="coverage"/>
<converter file="bed_to_interval_index_converter.xml" target_datatype="interval_index"/>
</datatype>
- <datatype extension="binseq.zip" type="galaxy.datatypes.images:Binseq" mimetype="application/zip" display_in_upload="true"/>
+ <datatype extension="binseq.zip" type="galaxy.datatypes.binary:Binseq" mimetype="application/zip" display_in_upload="true"/>
<datatype extension="len" type="galaxy.datatypes.chrominfo:ChromInfo" display_in_upload="true">
<!-- no converters yet -->
</datatype>
@@ -49,12 +49,13 @@
<datatype extension="qualsolid" type="galaxy.datatypes.qualityscore:QualityScoreSOLiD" display_in_upload="true"/>
<datatype extension="qual454" type="galaxy.datatypes.qualityscore:QualityScore454" display_in_upload="true"/>
<datatype extension="sam" type="galaxy.datatypes.tabular:Sam" display_in_upload="true"/>
- <datatype extension="scf" type="galaxy.datatypes.images:Scf" mimetype="application/octet-stream" display_in_upload="true"/>
+ <datatype extension="scf" type="galaxy.datatypes.binary:Scf" mimetype="application/octet-stream" display_in_upload="true"/>
+ <datatype extension="sff" type="galaxy.datatypes.binary:Sff" mimetype="application/octet-stream" display_in_upload="true"/>
<datatype extension="taxonomy" type="galaxy.datatypes.tabular:Taxonomy" display_in_upload="true"/>
<datatype extension="tabular" type="galaxy.datatypes.tabular:Tabular" display_in_upload="true"/>
<datatype extension="txt" type="galaxy.datatypes.data:Text" display_in_upload="true"/>
<datatype extension="blastxml" type="galaxy.datatypes.xml:BlastXml" display_in_upload="true"/>
- <datatype extension="txtseq.zip" type="galaxy.datatypes.images:Txtseq" mimetype="application/zip" display_in_upload="true"/>
+ <datatype extension="txtseq.zip" type="galaxy.datatypes.data:Txtseq" mimetype="application/zip" display_in_upload="true"/>
<datatype extension="wig" type="galaxy.datatypes.interval:Wiggle" display_in_upload="true">
<converter file="wiggle_to_array_tree_converter.xml" target_datatype="array_tree"/>
</datatype>
@@ -190,6 +191,7 @@
defined format first, followed by next-most rigidly defined,
and so on.
-->
+ <sniffer type="galaxy.datatypes.binary:Sff"/>
<sniffer type="galaxy.datatypes.xml:BlastXml"/>
<sniffer type="galaxy.datatypes.sequence:Maf"/>
<sniffer type="galaxy.datatypes.sequence:Lav"/>
diff -r 4f9b630f9976 -r 23c1b9799bce lib/galaxy/datatypes/binary.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/lib/galaxy/datatypes/binary.py Fri Nov 13 14:13:03 2009 -0500
@@ -0,0 +1,156 @@
+"""
+Binary classes
+"""
+
+import data, logging, binascii
+from galaxy.datatypes.metadata import MetadataElement
+from galaxy.datatypes import metadata
+from galaxy.datatypes.sniff import *
+from urllib import urlencode, quote_plus
+import zipfile
+import os, subprocess, tempfile
+
+log = logging.getLogger(__name__)
+
+sniffable_binary_formats = [ 'sff' ]
+# Currently these supported binary data types must be manually set on upload
+unsniffable_binary_formats = [ 'ab1', 'scf' ]
+
+class Binary( data.Data ):
+ """Binary data"""
+ def set_peek( self, dataset ):
+ """Set the peek and blurb text"""
+ if not dataset.dataset.purged:
+ dataset.peek = 'binary data'
+ dataset.blurb = 'data'
+ else:
+ dataset.peek = 'file does not exist'
+ dataset.blurb = 'file purged from disk'
+
+class Ab1( Binary ):
+ """Class describing an ab1 binary sequence file"""
+ file_ext = "ab1"
+ def set_peek( self, dataset ):
+ if not dataset.dataset.purged:
+ export_url = "/history_add_to?" + urlencode( {'history_id':dataset.history_id,'ext':'ab1','name':'ab1 sequence','info':'Sequence file','dbkey':dataset.dbkey} )
+ dataset.peek = "Binary ab1 sequence file"
+ dataset.blurb = data.nice_size( dataset.get_size() )
+ else:
+ dataset.peek = 'file does not exist'
+ dataset.blurb = 'file purged from disk'
+ def display_peek( self, dataset ):
+ try:
+ return dataset.peek
+ except:
+ return "Binary ab1 sequence file (%s)" % ( data.nice_size( dataset.get_size() ) )
+
+class Bam( Binary ):
+ """Class describing a BAM binary file"""
+ file_ext = "bam"
+ MetadataElement( name="bam_index", desc="BAM Index File", param=metadata.FileParameter, readonly=True, no_value=None, visible=False, optional=True )
+ def init_meta( self, dataset, copy_from=None ):
+ Binary.init_meta( self, dataset, copy_from=copy_from )
+ def set_meta( self, dataset, overwrite = True, **kwd ):
+ """
+ Sets index for BAM file.
+ """
+ index_file = dataset.metadata.bam_index
+ if not index_file:
+ index_file = dataset.metadata.spec['bam_index'].param.new_file( dataset = dataset )
+ tmp_dir = tempfile.gettempdir()
+ tmpf1 = tempfile.NamedTemporaryFile( dir=tmp_dir )
+ tmpf1bai = '%s.bai' % tmpf1.name
+ try:
+ os.system( 'cd %s' % tmp_dir )
+ os.system( 'cp %s %s' % ( dataset.file_name, tmpf1.name ) )
+ os.system( 'samtools index %s' % tmpf1.name )
+ os.system( 'cp %s %s' % ( tmpf1bai, index_file.file_name ) )
+ except Exception, ex:
+ sys.stderr.write( 'There was a problem creating the index for the BAM file\n%s\n' + str( ex ) )
+ tmpf1.close()
+ if os.path.exists( tmpf1bai ):
+ os.remove( tmpf1bai )
+ dataset.metadata.bam_index = index_file
+ def set_peek( self, dataset ):
+ if not dataset.dataset.purged:
+ export_url = "/history_add_to?" + urlencode( {'history_id':dataset.history_id,'ext':'bam','name':'bam alignments','info':'Alignments file','dbkey':dataset.dbkey} )
+ dataset.peek = "Binary bam alignments file"
+ dataset.blurb = data.nice_size( dataset.get_size() )
+ else:
+ dataset.peek = 'file does not exist'
+ dataset.blurb = 'file purged from disk'
+ def display_peek( self, dataset ):
+ try:
+ return dataset.peek
+ except:
+ return "Binary bam alignments file (%s)" % ( data.nice_size( dataset.get_size() ) )
+ def get_mime( self ):
+ """Returns the mime type of the datatype"""
+ return 'application/octet-stream'
+
+class Binseq( Binary ):
+ """Class describing a zip archive of binary sequence files"""
+ file_ext = "binseq.zip"
+ def set_peek( self, dataset ):
+ if not dataset.dataset.purged:
+ zip_file = zipfile.ZipFile( dataset.file_name, "r" )
+ num_files = len( zip_file.namelist() )
+ dataset.peek = "Archive of %s binary sequence files" % ( str( num_files ) )
+ dataset.blurb = data.nice_size( dataset.get_size() )
+ else:
+ dataset.peek = 'file does not exist'
+ dataset.blurb = 'file purged from disk'
+ def display_peek( self, dataset ):
+ try:
+ return dataset.peek
+ except:
+ return "Binary sequence file archive (%s)" % ( data.nice_size( dataset.get_size() ) )
+ def get_mime( self ):
+ """Returns the mime type of the datatype"""
+ return 'application/zip'
+
+class Scf( Binary ):
+ """Class describing an scf binary sequence file"""
+ file_ext = "scf"
+ def set_peek( self, dataset ):
+ if not dataset.dataset.purged:
+ export_url = "/history_add_to?" + urlencode({'history_id':dataset.history_id,'ext':'scf','name':'scf sequence','info':'Sequence file','dbkey':dataset.dbkey})
+ dataset.peek = "Binary scf sequence file"
+ dataset.blurb = data.nice_size( dataset.get_size() )
+ else:
+ dataset.peek = 'file does not exist'
+ dataset.blurb = 'file purged from disk'
+ def display_peek( self, dataset ):
+ try:
+ return dataset.peek
+ except:
+ return "Binary scf sequence file (%s)" % ( data.nice_size( dataset.get_size() ) )
+
+class Sff( Binary ):
+ """ Standard Flowgram Format (SFF) """
+ file_ext = "sff"
+ def __init__( self, **kwd ):
+ Binary.__init__( self, **kwd )
+ def sniff( self, filename ):
+ # The first 4 bytes of any sff file is '.sff', and the file is binary. For details
+ # about the format, see http://www.ncbi.nlm.nih.gov/Traces/trace.cgi?cmd=show&f=formats&m=doc&s=for…
+ try:
+ header = open( filename ).read(4)
+ if binascii.b2a_hex( header ) == binascii.hexlify( '.sff' ):
+ return True
+ return False
+ except Exception, e:
+ return False
+ def set_peek( self, dataset ):
+ if not dataset.dataset.purged:
+ export_url = "/history_add_to?" + urlencode( {'history_id':dataset.history_id,'ext':'sff','name':'sff file','info':'sff file','dbkey':dataset.dbkey} )
+ dataset.peek = "Binary sff file"
+ dataset.blurb = data.nice_size( dataset.get_size() )
+ else:
+ dataset.peek = 'file does not exist'
+ dataset.blurb = 'file purged from disk'
+ def display_peek( self, dataset ):
+ try:
+ return dataset.peek
+ except:
+ return "Binary sff file (%s)" % ( data.nice_size( dataset.get_size() ) )
diff -r 4f9b630f9976 -r 23c1b9799bce lib/galaxy/datatypes/data.py
--- a/lib/galaxy/datatypes/data.py Fri Nov 13 10:39:15 2009 -0500
+++ b/lib/galaxy/datatypes/data.py Fri Nov 13 14:13:03 2009 -0500
@@ -1,4 +1,4 @@
-import logging, os, sys, time, tempfile, binascii
+import logging, os, sys, time, tempfile
from galaxy import util
from galaxy.util.odict import odict
from galaxy.util.bunch import Bunch
@@ -40,20 +40,18 @@
"""
__metaclass__ = DataMeta
-
- """Add metadata elements"""
+ # Add metadata elements
MetadataElement( name="dbkey", desc="Database/Build", default="?", param=metadata.DBKeyParameter, multiple=False, no_value="?" )
-
- """Stores the set of display applications, and viewing methods, supported by this datatype """
+ # Stores the set of display applications, and viewing methods, supported by this datatype
supported_display_apps = {}
-
- """If False, the peek is regenerated whenever a dataset of this type is copied"""
+ # If False, the peek is regenerated whenever a dataset of this type is copied
copy_safe_peek = True
-
- is_binary = True #The dataset contains binary data --> do not space_to_tab or convert newlines, etc. Allow binary file uploads of this type when True.
-
- allow_datatype_change = True #Allow user to change between this datatype and others. If False, this datatype cannot be changed from or into.
-
+ # The dataset contains binary data --> do not space_to_tab or convert newlines, etc.
+ # Allow binary file uploads of this type when True.
+ is_binary = True
+ # Allow user to change between this datatype and others. If False, this datatype
+ # cannot be changed from or into.
+ allow_datatype_change = True
#Composite datatypes
composite_type = None
composite_files = odict()
@@ -270,8 +268,6 @@
def add_composite_file( self, name, **kwds ):
#self.composite_files = self.composite_files.copy()
self.composite_files[ name ] = self.__new_composite_file( name, **kwds )
-
-
def __substitute_composite_key( self, key, composite_file, dataset = None ):
if composite_file.substitute_name_with_metadata:
if dataset:
@@ -303,7 +299,6 @@
return files
def generate_auto_primary_file( self, dataset = None ):
raise Exception( "generate_auto_primary_file is not implemented for this datatype." )
-
@property
def has_resolution(self):
return False
@@ -364,23 +359,37 @@
dataset.peek = 'file does not exist'
dataset.blurb = 'file purged from disk'
-class Binary( Data ):
- """Binary data"""
+class Txtseq( Data ):
+ """Class describing a zip archive of text sequence files"""
+ file_ext = "txtseq.zip"
def set_peek( self, dataset ):
- """Set the peek and blurb text"""
if not dataset.dataset.purged:
- dataset.peek = 'binary data'
- dataset.blurb = 'data'
+ zip_file = zipfile.ZipFile( dataset.file_name, "r" )
+ num_files = len( zip_file.namelist() )
+ dataset.peek = "Archive of %s text sequence files" % ( str( num_files ) )
+ dataset.blurb = data.nice_size( dataset.get_size() )
else:
dataset.peek = 'file does not exist'
dataset.blurb = 'file purged from disk'
+ def display_peek(self, dataset):
+ try:
+ return dataset.peek
+ except:
+ return "Text sequence file archive (%s)" % ( data.nice_size( dataset.get_size() ) )
+ def get_mime(self):
+ """Returns the mime type of the datatype"""
+ return 'application/zip'
+
+class Newick( Text ):
+ pass
+
+# ------------- Utility methods --------------
def get_test_fname( fname ):
"""Returns test data filename"""
path, name = os.path.split(__file__)
full_path = os.path.join( path, 'test', fname )
return full_path
-
def nice_size(size):
"""
Returns a readably formatted string with the size
@@ -406,7 +415,6 @@
out = "%.1f %s" % (size, word)
return out
return '??? bytes'
-
def get_file_peek( file_name, is_multi_byte=False, WIDTH=256, LINE_COUNT=5 ):
"""
Returns the first LINE_COUNT lines wrapped to WIDTH
@@ -443,7 +451,6 @@
else:
text = unicode( '\n'.join( lines ), 'utf-8' )
return text
-
def get_line_count(file_name):
"""Returns the number of lines in a file that are neither null nor comments"""
count = 0
@@ -452,38 +459,3 @@
if line and line[0] != '#':
count += 1
return count
-
-class Newick( Text ):
- pass
-
-class Sff( Binary ):
- """ Standard Flowgram Format (SFF) """
- file_ext = "sff"
- def __init__( self, **kwd ):
- Binary.__init__(self, **kwd)
- def init_meta( self, dataset, copy_from=None ):
- Binary.init_meta( self, dataset, copy_from=copy_from )
- def sniff( self, filename ):
- '''
- The first 4 bytes of any sff file is '.sff'
-
- >>> fname = get_test_fname( '1.sff' )
- >>> Sff().sniff( fname )
- True
- '''
- header = open( filename ).read(4)
- if binascii.b2a_hex( header ) == binascii.hexlify( '.sff' ):
- return True
- return False
- def set_peek( self, dataset ):
- if not dataset.dataset.purged:
- dataset.peek = "Binary sff file"
- dataset.blurb = nice_size( dataset.get_size() )
- else:
- dataset.peek = 'file does not exist'
- dataset.blurb = 'file purged from disk'
- def display_peek(self, dataset):
- try:
- return dataset.peek
- except:
- return "sff file (%s)" % ( nice_size( dataset.get_size() ) )
diff -r 4f9b630f9976 -r 23c1b9799bce lib/galaxy/datatypes/genetics.py
--- a/lib/galaxy/datatypes/genetics.py Fri Nov 13 10:39:15 2009 -0500
+++ b/lib/galaxy/datatypes/genetics.py Fri Nov 13 14:13:03 2009 -0500
@@ -48,10 +48,8 @@
"""Initialize datatype, by adding GBrowse display app"""
Interval.__init__(self, **kwd)
self.add_display_app ( 'ucsc', 'display at UCSC', 'as_ucsc_display_file', 'ucsc_links' )
-
def as_ucsc_display_file( self, dataset, **kwd ):
return open( dataset.file_name )
-
def set_meta( self, dataset, overwrite = True, **kwd ):
i = 0
for i, line in enumerate( file ( dataset.file_name ) ):
@@ -66,7 +64,6 @@
except:
pass
Interval.set_meta( self, dataset, overwrite = overwrite, skip = i )
-
def make_html_table( self, dataset, skipchars=[] ):
"""Create HTML table, used for displaying peek"""
out = ['<table cellspacing="0" cellpadding="3">']
@@ -82,7 +79,6 @@
except Exception, exc:
out = "Can't create peek %s" % exc
return out
-
def get_estimated_display_viewport( self, dataset ):
"""
Return a chrom, start, stop tuple for viewing a file. There are slight differences between gff 2 and gff 3
@@ -118,7 +114,6 @@
return ( seqid, str( start ), str( stop ) )
else:
return ( '', '', '' )
-
def gbrowse_links( self, dataset, type, app, base_url ):
ret_val = []
if dataset.has_data:
@@ -132,7 +127,6 @@
link = "%s?start=%s&stop=%s&ref=%s&dbkey=%s" % ( site_url, start, stop, seqid, dataset.dbkey )
ret_val.append( ( site_name, link ) )
return ret_val
-
def ucsc_links( self, dataset, type, app, base_url ):
ret_val = []
if dataset.has_data:
@@ -162,8 +156,6 @@
else:
gal_Log.debug('@@@ gg ucsc_links - no viewport_tuple')
return ret_val
-
-
def sniff( self, filename ):
"""
Determines whether the file is in gff format
@@ -202,20 +194,16 @@
except:
return False
-
-
class rgTabList(Tabular):
""" for sampleid and for featureid lists of exclusions or inclusions in the clean tool
featureid subsets on statistical criteria -> specialized display such as gg
"""
file_ext = "rgTList"
-
def __init__(self, **kwd):
"""Initialize featurelistt datatype"""
Tabular.__init__( self, **kwd )
self.column_names = []
-
def make_html_table( self, dataset, skipchars=[] ):
"""Create HTML table, used for displaying peek"""
out = ['<table cellspacing="0" cellpadding="3">']
@@ -236,7 +224,6 @@
out = "Can't create peek %s" % exc
return out
-
class rgSampleList(rgTabList):
""" for sampleid exclusions or inclusions in the clean tool
output from QC eg excess het, gender error, ibd pair member,eigen outlier,excess mendel errors,...
@@ -252,7 +239,6 @@
self.column_names[0] = 'FID'
self.column_names[1] = 'IID'
# this is what Plink wants as at 2009
-
def sniff(self,filename):
"""
"""
@@ -276,26 +262,22 @@
rgTabList.__init__( self, **kwd )
for i,s in enumerate(['#FeatureId', 'Chr', 'Genpos', 'Mappos']):
self.column_names[i] = s
-
class Rgenetics(Html):
"""class to use for rgenetics"""
- MetadataElement( name="base_name", desc="base name for all transformed versions of this genetic dataset", default="rgenetics",
- readonly=True, set_in_upload=True)
+ MetadataElement( name="base_name", desc="base name for all transformed versions of this genetic dataset", default="rgenetics", readonly=True, set_in_upload=True)
composite_type = 'auto_primary_file'
allow_datatype_change = False
file_ext = 'rgenetics'
-
def missing_meta( self, dataset=None, **kwargs):
"""Checks for empty meta values"""
for key, value in dataset.metadata.items():
if not value:
return True
return False
-
def generate_primary_file( self, dataset = None ):
rval = ['<html><head><title>Rgenetics Galaxy Composite Dataset </title></head><p/>']
rval.append('<div>This composite dataset is composed of the following files:<p/><ul>')
@@ -306,7 +288,6 @@
rval.append( '<li><a href="%s" type="application/binary">%s</a>%s' % ( composite_name, composite_name, opt_text ) )
rval.append( '</ul></div></html>' )
return "\n".join( rval )
-
def regenerate_primary_file(self,dataset):
"""cannot do this until we are setting metadata
"""
@@ -332,12 +313,8 @@
f.write("\n".join( rval ))
f.write('\n')
f.close()
-
def set_meta( self, dataset, **kwd ):
-
- """for lped/pbed eg
-
- """
+ """for lped/pbed eg"""
if kwd.get('overwrite') == False:
if verbose:
gal_Log.debug('@@@ rgenetics set_meta called with overwrite = False')
@@ -349,9 +326,10 @@
gal_Log.debug('@@@rgenetics set_meta failed %s - dataset %s has no efp ?' % (sys.exc_info()[0], dataset.name))
return False
try:
- flist = os.listdir(efp)
- except:
- if verbose: gal_Log.debug('@@@rgenetics set_meta failed %s - dataset %s has no efp ?' % (sys.exc_info()[0],dataset.name))
+ flist = os.listdir(efp)
+ except:
+ if verbose:
+ gal_Log.debug('@@@rgenetics set_meta failed %s - dataset %s has no efp ?' % (sys.exc_info()[0],dataset.name))
return False
if len(flist) == 0:
if verbose:
@@ -372,7 +350,6 @@
dataset.blurb = 'Composite file - Rgenetics Galaxy toolkit'
return True
-
class SNPMatrix(Rgenetics):
"""fake class to distinguish different species of Rgenetics data collections
"""
@@ -385,7 +362,6 @@
else:
dataset.peek = 'file does not exist'
dataset.blurb = 'file purged from disk'
-
def sniff(self,filename):
""" need to check the file header hex code
"""
@@ -397,7 +373,6 @@
else:
return True
-
class Lped(Rgenetics):
"""fake class to distinguish different species of Rgenetics data collections
"""
@@ -408,7 +383,6 @@
self.add_composite_file( '%s.ped', description = 'Pedigree File', substitute_name_with_metadata = 'base_name', is_binary = True )
self.add_composite_file( '%s.map', description = 'Map File', substitute_name_with_metadata = 'base_name', is_binary = True )
-
class Pphe(Rgenetics):
"""fake class to distinguish different species of Rgenetics data collections
"""
@@ -418,7 +392,6 @@
Rgenetics.__init__(self, **kwd)
self.add_composite_file( '%s.pphe', description = 'Plink Phenotype File', substitute_name_with_metadata = 'base_name' )
-
class Lmap(Rgenetics):
"""fake class to distinguish different species of Rgenetics data collections
"""
@@ -442,8 +415,6 @@
Rgenetics.__init__(self, **kwd)
self.add_composite_file( '%s.phe', description = 'Phenotype File', substitute_name_with_metadata = 'base_name' )
-
-
class Fped(Rgenetics):
"""fake class to distinguish different species of Rgenetics data collections
"""
@@ -453,7 +424,6 @@
Rgenetics.__init__(self, **kwd)
self.add_composite_file( '%s.fped', description = 'FBAT format pedfile', substitute_name_with_metadata = 'base_name' )
-
class Pbed(Rgenetics):
"""fake class to distinguish different species of Rgenetics data collections
"""
@@ -475,8 +445,6 @@
self.add_composite_file( '%s.eigenstratgeno', substitute_name_with_metadata = 'base_name', is_binary = True )
self.add_composite_file( '%s.ind', substitute_name_with_metadata = 'base_name', is_binary = True )
self.add_composite_file( '%s.map', substitute_name_with_metadata = 'base_name', is_binary = True )
-
-
class Eigenstratpca(Rgenetics):
"""fake class to distinguish different species of Rgenetics data collections
@@ -487,20 +455,17 @@
Rgenetics.__init__(self, **kwd)
self.add_composite_file( '%s.eigenstratpca', description = 'Eigenstrat PCA file', substitute_name_with_metadata = 'base_name' )
-
class Snptest(Rgenetics):
"""fake class to distinguish different species of Rgenetics data collections
"""
file_ext="snptest"
-
class Pheno(Tabular):
"""
base class for pheno files
"""
file_ext = 'pheno'
-
class RexpBase( Html ):
"""base class for BioC data structures in Galaxy
must be constructed with the pheno data in place since that
@@ -518,18 +483,15 @@
composite_type = 'auto_primary_file'
allow_datatype_change = False
-
def __init__( self, **kwd ):
Html.__init__(self,**kwd)
self.add_composite_file( '%s.pheno', description = 'Phenodata tab text file',
substitute_name_with_metadata = 'base_name', is_binary=True)
-
def generate_primary_file( self, dataset = None ):
""" This is called only at upload to write the html file
cannot rename the datasets here - they come with the default unfortunately
"""
return '<html><head></head><body>AutoGenerated Primary File for Composite Dataset</body></html>'
-
def get_phecols(self, phenolist=[], maxConc=20):
"""
sept 2009: cannot use whitespace to split - make a more complex structure here
@@ -601,8 +563,6 @@
res = [('no usable phenotype columns found',[('?',0),]),]
return res
-
-
def get_pheno(self,dataset):
"""expects a .pheno file in the extra_files_dir - ugh
note that R is wierd and adds the row.name in
@@ -620,7 +580,6 @@
else:
p = []
return '\n'.join(p)
-
def set_peek( self, dataset ):
"""expects a .pheno file in the extra_files_dir - ugh
note that R is wierd and does not include the row.name in
@@ -636,7 +595,6 @@
else:
dataset.peek = 'file does not exist\n'
dataset.blurb = 'file purged from disk'
-
def get_peek( self, dataset ):
"""expects a .pheno file in the extra_files_dir - ugh
"""
@@ -646,7 +604,6 @@
except:
p = ['##failed to find %s' % pp]
return ''.join(p[:5])
-
def get_file_peek(self,filename):
"""
can't really peek at a filename - need the extra_files_path and such?
@@ -657,7 +614,6 @@
except:
pass
return ''.join(h[:5])
-
def regenerate_primary_file(self,dataset):
"""cannot do this until we are setting metadata
"""
@@ -672,18 +628,14 @@
f.write("\n".join( rval ))
f.write('\n')
f.close()
-
- """Add metadata elements"""
def init_meta( self, dataset, copy_from=None ):
+ """Add metadata elements"""
if copy_from:
dataset.metadata = copy_from.metadata
-
def set_meta( self, dataset, **kwd ):
-
"""
NOTE we apply the tabular machinary to the phenodata extracted
from a BioC eSet or affybatch.
-
"""
try:
flist = os.listdir(dataset.extra_files_path)
@@ -727,7 +679,6 @@
if not dataset.blurb:
dataset.blurb = 'R loadable BioC expression object for the Rexpression Galaxy toolkit'
return True
-
def make_html_table( self, pp='nothing supplied from peek\n'):
"""Create HTML table, used for displaying peek"""
out = ['<table cellspacing="0" cellpadding="3">',]
@@ -750,16 +701,13 @@
except Exception, exc:
out = "Can't create html table %s" % str( exc )
return out
-
def display_peek( self, dataset ):
"""Returns formatted html of peek"""
out=self.make_html_table(dataset.peek)
return out
-
def get_mime(self):
"""Returns the mime type of the datatype"""
return 'text/html'
-
class Affybatch( RexpBase ):
"""derived class for BioC data structures in Galaxy """
@@ -790,9 +738,6 @@
self.add_composite_file( '%s.malist', description = 'MAlist R object saved to file',
substitute_name_with_metadata = 'base_name', is_binary = True )
-
if __name__ == '__main__':
import doctest, sys
doctest.testmod(sys.modules[__name__])
-
-
diff -r 4f9b630f9976 -r 23c1b9799bce lib/galaxy/datatypes/images.py
--- a/lib/galaxy/datatypes/images.py Fri Nov 13 10:39:15 2009 -0500
+++ b/lib/galaxy/datatypes/images.py Fri Nov 13 14:13:03 2009 -0500
@@ -13,82 +13,6 @@
log = logging.getLogger(__name__)
-class Ab1( data.Data ):
- """Class describing an ab1 binary sequence file"""
- file_ext = "ab1"
- def set_peek( self, dataset ):
- if not dataset.dataset.purged:
- export_url = "/history_add_to?" + urlencode({'history_id':dataset.history_id,'ext':'ab1','name':'ab1 sequence','info':'Sequence file','dbkey':dataset.dbkey})
- dataset.peek = "Binary ab1 sequence file"
- dataset.blurb = data.nice_size( dataset.get_size() )
- else:
- dataset.peek = 'file does not exist'
- dataset.blurb = 'file purged from disk'
- def display_peek(self, dataset):
- try:
- return dataset.peek
- except:
- return "Binary ab1 sequence file (%s)" % ( data.nice_size( dataset.get_size() ) )
-
-class Scf( data.Data ):
- """Class describing an scf binary sequence file"""
- file_ext = "scf"
- def set_peek( self, dataset ):
- if not dataset.dataset.purged:
- export_url = "/history_add_to?" + urlencode({'history_id':dataset.history_id,'ext':'scf','name':'scf sequence','info':'Sequence file','dbkey':dataset.dbkey})
- dataset.peek = "Binary scf sequence file"
- dataset.blurb = data.nice_size( dataset.get_size() )
- else:
- dataset.peek = 'file does not exist'
- dataset.blurb = 'file purged from disk'
- def display_peek(self, dataset):
- try:
- return dataset.peek
- except:
- return "Binary scf sequence file (%s)" % ( data.nice_size( dataset.get_size() ) )
-
-class Binseq( data.Data ):
- """Class describing a zip archive of binary sequence files"""
- file_ext = "binseq.zip"
- def set_peek( self, dataset ):
- if not dataset.dataset.purged:
- zip_file = zipfile.ZipFile( dataset.file_name, "r" )
- num_files = len( zip_file.namelist() )
- dataset.peek = "Archive of %s binary sequence files" % ( str( num_files ) )
- dataset.blurb = data.nice_size( dataset.get_size() )
- else:
- dataset.peek = 'file does not exist'
- dataset.blurb = 'file purged from disk'
- def display_peek(self, dataset):
- try:
- return dataset.peek
- except:
- return "Binary sequence file archive (%s)" % ( data.nice_size( dataset.get_size() ) )
- def get_mime(self):
- """Returns the mime type of the datatype"""
- return 'application/zip'
-
-class Txtseq( data.Data ):
- """Class describing a zip archive of text sequence files"""
- file_ext = "txtseq.zip"
- def set_peek( self, dataset ):
- if not dataset.dataset.purged:
- zip_file = zipfile.ZipFile( dataset.file_name, "r" )
- num_files = len( zip_file.namelist() )
- dataset.peek = "Archive of %s text sequence files" % ( str( num_files ) )
- dataset.blurb = data.nice_size( dataset.get_size() )
- else:
- dataset.peek = 'file does not exist'
- dataset.blurb = 'file purged from disk'
- def display_peek(self, dataset):
- try:
- return dataset.peek
- except:
- return "Text sequence file archive (%s)" % ( data.nice_size( dataset.get_size() ) )
- def get_mime(self):
- """Returns the mime type of the datatype"""
- return 'application/zip'
-
class Image( data.Data ):
"""Class describing an image"""
def set_peek( self, dataset ):
@@ -236,47 +160,3 @@
return dataset.peek
except:
return "peek unavailable"
-
-class Bam( data.Binary ):
- """Class describing a BAM binary file"""
- file_ext = "bam"
- MetadataElement( name="bam_index", desc="BAM Index File", param=metadata.FileParameter, readonly=True, no_value=None, visible=False, optional=True )
- def init_meta( self, dataset, copy_from=None ):
- data.Binary.init_meta( self, dataset, copy_from=copy_from )
- def set_meta( self, dataset, overwrite = True, **kwd ):
- """
- Sets index for BAM file.
- """
- index_file = dataset.metadata.bam_index
- if not index_file:
- index_file = dataset.metadata.spec['bam_index'].param.new_file( dataset = dataset )
- tmp_dir = tempfile.gettempdir()
- tmpf1 = tempfile.NamedTemporaryFile(dir=tmp_dir)
- tmpf1bai = '%s.bai' % tmpf1.name
- try:
- os.system('cd %s' % tmp_dir)
- os.system('cp %s %s' % (dataset.file_name, tmpf1.name))
- os.system('samtools index %s' % tmpf1.name)
- os.system('cp %s %s' % (tmpf1bai, index_file.file_name))
- except Exception, ex:
- sys.stderr.write('There was a problem creating the index for the BAM file\n%s\n' + str(ex))
- tmpf1.close()
- if os.path.exists(tmpf1bai):
- os.remove(tmpf1bai)
- dataset.metadata.bam_index = index_file
- def set_peek( self, dataset ):
- if not dataset.dataset.purged:
- export_url = "/history_add_to?" + urlencode({'history_id':dataset.history_id,'ext':'bam','name':'bam alignments','info':'Alignments file','dbkey':dataset.dbkey})
- dataset.peek = "Binary bam alignments file"
- dataset.blurb = data.nice_size( dataset.get_size() )
- else:
- dataset.peek = 'file does not exist'
- dataset.blurb = 'file purged from disk'
- def display_peek(self, dataset):
- try:
- return dataset.peek
- except:
- return "Binary bam alignments file (%s)" % ( data.nice_size( dataset.get_size() ) )
- def get_mime(self):
- """Returns the mime type of the datatype"""
- return 'application/octet-stream'
diff -r 4f9b630f9976 -r 23c1b9799bce lib/galaxy/datatypes/registry.py
--- a/lib/galaxy/datatypes/registry.py Fri Nov 13 10:39:15 2009 -0500
+++ b/lib/galaxy/datatypes/registry.py Fri Nov 13 14:13:03 2009 -0500
@@ -3,7 +3,7 @@
"""
import os, tempfile
import logging
-import data, tabular, interval, images, sequence, qualityscore, genetics, xml, coverage, tracks, chrominfo
+import data, tabular, interval, images, sequence, qualityscore, genetics, xml, coverage, tracks, chrominfo, binary
import galaxy.util
from galaxy.util.odict import odict
@@ -109,11 +109,11 @@
#default values
if len(self.datatypes_by_extension) < 1:
self.datatypes_by_extension = {
- 'ab1' : images.Ab1(),
+ 'ab1' : binary.Ab1(),
'axt' : sequence.Axt(),
- 'bam' : images.Bam(),
+ 'bam' : binary.Bam(),
'bed' : interval.Bed(),
- 'binseq.zip' : images.Binseq(),
+ 'binseq.zip' : binary.Binseq(),
'blastxml' : xml.BlastXml(),
'coverage' : coverage.LastzCoverage(),
'customtrack' : interval.CustomTrack(),
@@ -132,12 +132,12 @@
'qualsolexa' : qualityscore.QualityScoreSolexa(),
'qual454' : qualityscore.QualityScore454(),
'sam' : tabular.Sam(),
- 'scf' : images.Scf(),
- 'sff' : data.Sff(),
+ 'scf' : binary.Scf(),
+ 'sff' : binary.Sff(),
'tabular' : tabular.Tabular(),
'taxonomy' : tabular.Taxonomy(),
'txt' : data.Text(),
- 'txtseq.zip' : images.Txtseq(),
+ 'txtseq.zip' : data.Txtseq(),
'wig' : interval.Wiggle()
}
self.mimetypes_by_extension = {
@@ -174,7 +174,7 @@
# because some formats are much more flexibly defined than others.
if len(self.sniff_order) < 1:
self.sniff_order = [
- data.Sff(),
+ binary.Sff(),
xml.BlastXml(),
sequence.Maf(),
sequence.Lav(),
diff -r 4f9b630f9976 -r 23c1b9799bce lib/galaxy/datatypes/tracks.py
--- a/lib/galaxy/datatypes/tracks.py Fri Nov 13 10:39:15 2009 -0500
+++ b/lib/galaxy/datatypes/tracks.py Fri Nov 13 14:13:03 2009 -0500
@@ -2,11 +2,7 @@
Datatype classes for tracks/track views within galaxy.
"""
-import data
-import logging
-import re
-import binascii
-from cgi import escape
+import tabular, binascii, logging
from galaxy.datatypes.metadata import MetadataElement
from galaxy.datatypes import metadata
import galaxy.model
@@ -17,7 +13,7 @@
log = logging.getLogger(__name__)
-class GeneTrack( data.Binary ):
+class GeneTrack( tabular.Tabular ):
file_ext = "genetrack"
MetadataElement( name="genetrack", default="data.genetrack", desc="HDF index", readonly=True, visible=True, no_value=0 )
diff -r 4f9b630f9976 -r 23c1b9799bce test/functional/test_get_data.py
--- a/test/functional/test_get_data.py Fri Nov 13 10:39:15 2009 -0500
+++ b/test/functional/test_get_data.py Fri Nov 13 14:13:03 2009 -0500
@@ -4,128 +4,538 @@
from base.twilltestcase import TwillTestCase
class UploadData( TwillTestCase ):
- def test_000_upload_files_from_disk( self ):
- """Test uploading data files from disk"""
+ def test_0005_upload_file( self ):
+ """Test uploading 1.bed, NOT setting the file format"""
self.logout()
self.login( email='test(a)bx.psu.edu' )
global admin_user
admin_user = sa_session.query( galaxy.model.User ) \
.filter( galaxy.model.User.table.c.email=='test(a)bx.psu.edu' ) \
.one()
- history1 = sa_session.query( galaxy.model.History ) \
- .filter( and_( galaxy.model.History.table.c.deleted==False,
- galaxy.model.History.table.c.user_id==admin_user.id ) ) \
- .order_by( desc( galaxy.model.History.table.c.create_time ) ) \
- .first()
+ history = sa_session.query( galaxy.model.History ) \
+ .filter( and_( galaxy.model.History.table.c.deleted==False,
+ galaxy.model.History.table.c.user_id==admin_user.id ) ) \
+ .order_by( desc( galaxy.model.History.table.c.create_time ) ) \
+ .first()
self.upload_file( '1.bed' )
- hda1 = sa_session.query( galaxy.model.HistoryDatasetAssociation ) \
- .order_by( desc( galaxy.model.HistoryDatasetAssociation.table.c.create_time ) ) \
- .first()
- assert hda1 is not None, "Problem retrieving hda1 from database"
- self.verify_dataset_correctness( '1.bed', hid=str( hda1.hid ) )
- self.upload_file( '2.bed', dbkey='hg17' )
- hda2 = sa_session.query( galaxy.model.HistoryDatasetAssociation ) \
- .order_by( desc( galaxy.model.HistoryDatasetAssociation.table.c.create_time ) ) \
- .first()
- assert hda2 is not None, "Problem retrieving hda2 from database"
- self.verify_dataset_correctness( '2.bed', hid=str( hda2.hid ) )
- self.upload_file( '3.bed', dbkey='hg17', ftype='bed' )
- hda3 = sa_session.query( galaxy.model.HistoryDatasetAssociation ) \
- .order_by( desc( galaxy.model.HistoryDatasetAssociation.table.c.create_time ) ) \
- .first()
- assert hda3 is not None, "Problem retrieving hda3 from database"
- self.verify_dataset_correctness( '3.bed', hid=str( hda3.hid ) )
+ hda = sa_session.query( galaxy.model.HistoryDatasetAssociation ) \
+ .order_by( desc( galaxy.model.HistoryDatasetAssociation.table.c.create_time ) ) \
+ .first()
+ assert hda is not None, "Problem retrieving hda from database"
+ self.verify_dataset_correctness( '1.bed', hid=str( hda.hid ) )
+ self.check_history_for_string( "<th>1.Chrom</th><th>2.Start</th><th>3.End</th>" )
+ self.delete_history( id=self.security.encode_id( history.id ) )
+ def test_0010_upload_file( self ):
+ """Test uploading 4.bed.gz, manually setting the file format"""
+ self.check_history_for_string( 'Your history is empty' )
+ history = sa_session.query( galaxy.model.History ) \
+ .filter( and_( galaxy.model.History.table.c.deleted==False,
+ galaxy.model.History.table.c.user_id==admin_user.id ) ) \
+ .order_by( desc( galaxy.model.History.table.c.create_time ) ) \
+ .first()
self.upload_file( '4.bed.gz', dbkey='hg17', ftype='bed' )
- hda4 = sa_session.query( galaxy.model.HistoryDatasetAssociation ) \
- .order_by( desc( galaxy.model.HistoryDatasetAssociation.table.c.create_time ) ) \
- .first()
- assert hda4 is not None, "Problem retrieving hda4 from database"
- self.verify_dataset_correctness( '4.bed', hid=str( hda4.hid ) )
+ hda = sa_session.query( galaxy.model.HistoryDatasetAssociation ) \
+ .order_by( desc( galaxy.model.HistoryDatasetAssociation.table.c.create_time ) ) \
+ .first()
+ assert hda is not None, "Problem retrieving hda from database"
+ self.verify_dataset_correctness( '4.bed', hid=str( hda.hid ) )
+ self.check_history_for_string( "<th>1.Chrom</th><th>2.Start</th><th>3.End</th>" )
+ self.delete_history( id=self.security.encode_id( history.id ) )
+ def test_0015_upload_file( self ):
+ """Test uploading 1.scf, manually setting the file format"""
+ self.check_history_for_string( 'Your history is empty' )
+ history = sa_session.query( galaxy.model.History ) \
+ .filter( and_( galaxy.model.History.table.c.deleted==False,
+ galaxy.model.History.table.c.user_id==admin_user.id ) ) \
+ .order_by( desc( galaxy.model.History.table.c.create_time ) ) \
+ .first()
self.upload_file( '1.scf', ftype='scf' )
- hda5 = sa_session.query( galaxy.model.HistoryDatasetAssociation ) \
- .order_by( desc( galaxy.model.HistoryDatasetAssociation.table.c.create_time ) ) \
- .first()
- assert hda5 is not None, "Problem retrieving hda5 from database"
- self.verify_dataset_correctness( '1.scf', hid=str( hda5.hid ) )
+ hda = sa_session.query( galaxy.model.HistoryDatasetAssociation ) \
+ .order_by( desc( galaxy.model.HistoryDatasetAssociation.table.c.create_time ) ) \
+ .first()
+ assert hda is not None, "Problem retrieving hda from database"
+ self.verify_dataset_correctness( '1.scf', hid=str( hda.hid ) )
+ self.check_history_for_string( "Binary scf sequence file</pre>" )
+ self.delete_history( id=self.security.encode_id( history.id ) )
+ def test_0020_upload_file( self ):
+ """Test uploading 1.scf, NOT setting the file format"""
+ self.check_history_for_string( 'Your history is empty' )
+ history = sa_session.query( galaxy.model.History ) \
+ .filter( and_( galaxy.model.History.table.c.deleted==False,
+ galaxy.model.History.table.c.user_id==admin_user.id ) ) \
+ .order_by( desc( galaxy.model.History.table.c.create_time ) ) \
+ .first()
+ self.upload_file( '1.scf' )
+ hda = sa_session.query( galaxy.model.HistoryDatasetAssociation ) \
+ .order_by( desc( galaxy.model.HistoryDatasetAssociation.table.c.create_time ) ) \
+ .first()
+ assert hda is not None, "Problem retrieving hda from database"
+ self.check_history_for_string( "File Format' to 'Scf' when uploading scf files" )
+ self.delete_history( id=self.security.encode_id( history.id ) )
+ def test_0025_upload_file( self ):
+ """Test uploading 1.scf.zip, manually setting the file format"""
+ self.check_history_for_string( 'Your history is empty' )
+ history = sa_session.query( galaxy.model.History ) \
+ .filter( and_( galaxy.model.History.table.c.deleted==False,
+ galaxy.model.History.table.c.user_id==admin_user.id ) ) \
+ .order_by( desc( galaxy.model.History.table.c.create_time ) ) \
+ .first()
self.upload_file( '1.scf.zip', ftype='binseq.zip' )
- hda6 = sa_session.query( galaxy.model.HistoryDatasetAssociation ) \
- .order_by( desc( galaxy.model.HistoryDatasetAssociation.table.c.create_time ) ) \
- .first()
- assert hda6 is not None, "Problem retrieving hda6 from database"
- self.verify_dataset_correctness( '1.scf.zip', hid=str( hda6.hid ) )
- self.delete_history( id=self.security.encode_id( history1.id ) )
- def test_005_url_paste( self ):
+ hda = sa_session.query( galaxy.model.HistoryDatasetAssociation ) \
+ .order_by( desc( galaxy.model.HistoryDatasetAssociation.table.c.create_time ) ) \
+ .first()
+ assert hda is not None, "Problem retrieving hda from database"
+ self.verify_dataset_correctness( '1.scf.zip', hid=str( hda.hid ) )
+ self.check_history_for_string( "Archive of 1 binary sequence files</pre>" )
+ self.delete_history( id=self.security.encode_id( history.id ) )
+ def test_0030_upload_file( self ):
+ """Test uploading 1.scf.zip, NOT setting the file format"""
+ self.check_history_for_string( 'Your history is empty' )
+ history = sa_session.query( galaxy.model.History ) \
+ .filter( and_( galaxy.model.History.table.c.deleted==False,
+ galaxy.model.History.table.c.user_id==admin_user.id ) ) \
+ .order_by( desc( galaxy.model.History.table.c.create_time ) ) \
+ .first()
+ self.upload_file( '1.scf.zip' )
+ hda = sa_session.query( galaxy.model.HistoryDatasetAssociation ) \
+ .order_by( desc( galaxy.model.HistoryDatasetAssociation.table.c.create_time ) ) \
+ .first()
+ assert hda is not None, "Problem retrieving hda from database"
+ self.check_history_for_string( "'File Format' for archive consisting of binary files - use 'Binseq.zip'" )
+ self.delete_history( id=self.security.encode_id( history.id ) )
+ def test_0035_upload_file( self ):
+ """Test uploading 1.sam NOT setting the file format"""
+ self.check_history_for_string( 'Your history is empty' )
+ history = sa_session.query( galaxy.model.History ) \
+ .filter( and_( galaxy.model.History.table.c.deleted==False,
+ galaxy.model.History.table.c.user_id==admin_user.id ) ) \
+ .order_by( desc( galaxy.model.History.table.c.create_time ) ) \
+ .first()
+ self.upload_file( '1.sam' )
+ hda = sa_session.query( galaxy.model.HistoryDatasetAssociation ) \
+ .order_by( desc( galaxy.model.HistoryDatasetAssociation.table.c.create_time ) ) \
+ .first()
+ assert hda is not None, "Problem retrieving hda from database"
+ self.verify_dataset_correctness( '1.sam', hid=str( hda.hid ) )
+ self.check_history_for_string( "<th>1.QNAME</th><th>2.FLAG</th><th>3.RNAME</th><th>4.POS</th>" )
+ self.delete_history( id=self.security.encode_id( history.id ) )
+ def test_0040_upload_file( self ):
+ """Test uploading 1.sff, NOT setting the file format"""
+ self.check_history_for_string( 'Your history is empty' )
+ history = sa_session.query( galaxy.model.History ) \
+ .filter( and_( galaxy.model.History.table.c.deleted==False,
+ galaxy.model.History.table.c.user_id==admin_user.id ) ) \
+ .order_by( desc( galaxy.model.History.table.c.create_time ) ) \
+ .first()
+ self.upload_file( '1.sff' )
+ hda = sa_session.query( galaxy.model.HistoryDatasetAssociation ) \
+ .order_by( desc( galaxy.model.HistoryDatasetAssociation.table.c.create_time ) ) \
+ .first()
+ assert hda is not None, "Problem retrieving hda from database"
+ self.verify_dataset_correctness( '1.sff', hid=str( hda.hid ) )
+ self.check_history_for_string( 'format: <span class="sff">sff' )
+ self.delete_history( id=self.security.encode_id( history.id ) )
+ def test_0045_upload_file( self ):
+ """Test uploading 454Score.pdf, NOT setting the file format"""
+ self.check_history_for_string( 'Your history is empty' )
+ history = sa_session.query( galaxy.model.History ) \
+ .filter( and_( galaxy.model.History.table.c.deleted==False,
+ galaxy.model.History.table.c.user_id==admin_user.id ) ) \
+ .order_by( desc( galaxy.model.History.table.c.create_time ) ) \
+ .first()
+ self.upload_file( '454Score.pdf' )
+ hda = sa_session.query( galaxy.model.HistoryDatasetAssociation ) \
+ .order_by( desc( galaxy.model.HistoryDatasetAssociation.table.c.create_time ) ) \
+ .first()
+ assert hda is not None, "Problem retrieving hda from database"
+ self.check_history_for_string( "The uploaded file contains inappropriate content" )
+ self.delete_history( id=self.security.encode_id( history.id ) )
+ def test_0050_upload_file( self ):
+ """Test uploading 454Score.png, NOT setting the file format"""
+ self.check_history_for_string( 'Your history is empty' )
+ history = sa_session.query( galaxy.model.History ) \
+ .filter( and_( galaxy.model.History.table.c.deleted==False,
+ galaxy.model.History.table.c.user_id==admin_user.id ) ) \
+ .order_by( desc( galaxy.model.History.table.c.create_time ) ) \
+ .first()
+ self.upload_file( '454Score.png' )
+ hda = sa_session.query( galaxy.model.HistoryDatasetAssociation ) \
+ .order_by( desc( galaxy.model.HistoryDatasetAssociation.table.c.create_time ) ) \
+ .first()
+ assert hda is not None, "Problem retrieving hda from database"
+ self.check_history_for_string( "The uploaded file contains inappropriate content" )
+ def test_0055_upload_file( self ):
+ """Test uploading lped composite datatype file, manually setting the file format"""
+ # Logged in as admin_user
+ self.check_history_for_string( 'Your history is empty' )
+ history = sa_session.query( galaxy.model.History ) \
+ .filter( and_( galaxy.model.History.table.c.deleted==False,
+ galaxy.model.History.table.c.user_id==admin_user.id ) ) \
+ .order_by( desc( galaxy.model.History.table.c.create_time ) ) \
+ .first()
+ # lped data types include a ped_file and a map_file ( which is binary )
+ self.upload_composite_datatype_file( 'lped', ped_file='tinywga.ped', map_file='tinywga.map', base_name='rgenetics' )
+ # Get the latest hid for testing
+ hda = sa_session.query( galaxy.model.HistoryDatasetAssociation ) \
+ .order_by( desc( galaxy.model.HistoryDatasetAssociation.table.c.create_time ) ) \
+ .first()
+ assert hda is not None, "Problem retrieving hda from database"
+ # We'll test against the resulting ped file and map file for correctness
+ self.verify_composite_datatype_file_content( 'rgenetics.ped', str( hda.id ) )
+ self.verify_composite_datatype_file_content( 'rgenetics.map', str( hda.id ) )
+ self.check_history_for_string( "Uploaded Composite Dataset (lped)" )
+ self.delete_history( id=self.security.encode_id( history.id ) )
+ def test_0060_upload_file( self ):
+ """Test uploading pbed composite datatype file, manually setting the file format"""
+ # Logged in as admin_user
+ self.check_history_for_string( 'Your history is empty' )
+ history = sa_session.query( galaxy.model.History ) \
+ .filter( and_( galaxy.model.History.table.c.deleted==False,
+ galaxy.model.History.table.c.user_id==admin_user.id ) ) \
+ .order_by( desc( galaxy.model.History.table.c.create_time ) ) \
+ .first()
+ # pbed data types include a bim_file, a bed_file and a fam_file
+ self.upload_composite_datatype_file( 'pbed', bim_file='tinywga.bim', bed_file='tinywga.bed', fam_file='tinywga.fam', base_name='rgenetics' )
+ # Get the latest hid for testing
+ hda = sa_session.query( galaxy.model.HistoryDatasetAssociation ) \
+ .order_by( desc( galaxy.model.HistoryDatasetAssociation.table.c.create_time ) ) \
+ .first()
+ assert hda is not None, "Problem retrieving hda from database"
+ # We'll test against the resulting ped file and map file for correctness
+ self.verify_composite_datatype_file_content( 'rgenetics.bim', str( hda.id ) )
+ self.verify_composite_datatype_file_content( 'rgenetics.bed', str( hda.id ) )
+ self.verify_composite_datatype_file_content( 'rgenetics.fam', str( hda.id ) )
+ self.check_history_for_string( "Uploaded Composite Dataset (pbed)" )
+ self.delete_history( id=self.security.encode_id( history.id ) )
+ def test_0065_upload_file( self ):
+ """Test uploading asian_chars_1.txt, NOT setting the file format"""
+ # Logged in as admin_user
+ self.check_history_for_string( 'Your history is empty' )
+ history = sa_session.query( galaxy.model.History ) \
+ .filter( and_( galaxy.model.History.table.c.deleted==False,
+ galaxy.model.History.table.c.user_id==admin_user.id ) ) \
+ .order_by( desc( galaxy.model.History.table.c.create_time ) ) \
+ .first()
+ self.upload_file( 'asian_chars_1.txt' )
+ hda = sa_session.query( galaxy.model.HistoryDatasetAssociation ) \
+ .order_by( desc( galaxy.model.HistoryDatasetAssociation.table.c.create_time ) ) \
+ .first()
+ assert hda is not None, "Problem retrieving hda from database"
+ self.verify_dataset_correctness( 'asian_chars_1.txt', hid=str( hda.hid ) )
+ self.check_history_for_string( 'uploaded multi-byte char file' )
+ self.delete_history( id=self.security.encode_id( history.id ) )
+ def test_0070_upload_file( self ):
+ """Test uploading 2gen.fastq, NOT setting the file format"""
+ # Logged in as admin_user
+ self.check_history_for_string( 'Your history is empty' )
+ history = sa_session.query( galaxy.model.History ) \
+ .filter( and_( galaxy.model.History.table.c.deleted==False,
+ galaxy.model.History.table.c.user_id==admin_user.id ) ) \
+ .order_by( desc( galaxy.model.History.table.c.create_time ) ) \
+ .first()
+ self.upload_file( '2gen.fastq' )
+ hda = sa_session.query( galaxy.model.HistoryDatasetAssociation ) \
+ .order_by( desc( galaxy.model.HistoryDatasetAssociation.table.c.create_time ) ) \
+ .first()
+ assert hda is not None, "Problem retrieving hda from database"
+ self.verify_dataset_correctness( '2gen.fastq', hid=str( hda.hid ) )
+ self.check_history_for_string( '2gen.fastq format: <span class="fastq">fastq</span>, database: \? Info: uploaded fastq file' )
+ self.delete_history( id=self.security.encode_id( history.id ) )
+ def test_0075_upload_file( self ):
+ """Test uploading 1.wig, NOT setting the file format"""
+ # Logged in as admin_user
+ self.check_history_for_string( 'Your history is empty' )
+ history = sa_session.query( galaxy.model.History ) \
+ .filter( and_( galaxy.model.History.table.c.deleted==False,
+ galaxy.model.History.table.c.user_id==admin_user.id ) ) \
+ .order_by( desc( galaxy.model.History.table.c.create_time ) ) \
+ .first()
+ self.upload_file( '1.wig' )
+ hda = sa_session.query( galaxy.model.HistoryDatasetAssociation ) \
+ .order_by( desc( galaxy.model.HistoryDatasetAssociation.table.c.create_time ) ) \
+ .first()
+ assert hda is not None, "Problem retrieving hda from database"
+ self.verify_dataset_correctness( '1.wig', hid=str( hda.hid ) )
+ self.check_history_for_string( '1.wig format: <span class="wig">wig</span>, database: \? Info: uploaded file' )
+ self.check_metadata_for_string( 'value="1.wig" value="\?"' )
+ self.check_metadata_for_string( 'Change data type selected value="wig" selected="yes"' )
+ self.delete_history( id=self.security.encode_id( history.id ) )
+ def test_0080_upload_file( self ):
+ """Test uploading 1.tabular, NOT setting the file format"""
+ # Logged in as admin_user
+ self.check_history_for_string( 'Your history is empty' )
+ history = sa_session.query( galaxy.model.History ) \
+ .filter( and_( galaxy.model.History.table.c.deleted==False,
+ galaxy.model.History.table.c.user_id==admin_user.id ) ) \
+ .order_by( desc( galaxy.model.History.table.c.create_time ) ) \
+ .first()
+ self.upload_file( '1.tabular' )
+ hda = sa_session.query( galaxy.model.HistoryDatasetAssociation ) \
+ .order_by( desc( galaxy.model.HistoryDatasetAssociation.table.c.create_time ) ) \
+ .first()
+ assert hda is not None, "Problem retrieving hda from database"
+ self.verify_dataset_correctness( '1.tabular', hid=str( hda.hid ) )
+ self.check_history_for_string( '1.tabular format: <span class="tabular">tabular</span>, database: \? Info: uploaded file' )
+ self.check_metadata_for_string( 'value="1.tabular" value="\?"' )
+ self.check_metadata_for_string( 'Change data type selected value="tabular" selected="yes"' )
+ self.delete_history( id=self.security.encode_id( history.id ) )
+ def test_0085_upload_file( self ):
+ """Test uploading qualscores.qualsolid, NOT setting the file format"""
+ # Logged in as admin_user
+ self.check_history_for_string( 'Your history is empty' )
+ history = sa_session.query( galaxy.model.History ) \
+ .filter( and_( galaxy.model.History.table.c.deleted==False,
+ galaxy.model.History.table.c.user_id==admin_user.id ) ) \
+ .order_by( desc( galaxy.model.History.table.c.create_time ) ) \
+ .first()
+ self.upload_file( 'qualscores.qualsolid' )
+ hda = sa_session.query( galaxy.model.HistoryDatasetAssociation ) \
+ .order_by( desc( galaxy.model.HistoryDatasetAssociation.table.c.create_time ) ) \
+ .first()
+ assert hda is not None, "Problem retrieving hda from database"
+ self.verify_dataset_correctness( 'qualscores.qualsolid', hid=str( hda.hid ) )
+ self.check_history_for_string( '2.5 Kb, format: <span class="qualsolid">qualsolid</span>, database: \? Info: uploaded file' )
+ self.check_metadata_for_string( 'Change data type value="qualsolid" selected="yes">qualsolid' )
+ self.delete_history( id=self.security.encode_id( history.id ) )
+ def test_0090_upload_file( self ):
+ """Test uploading qualscores.qual454, NOT setting the file format"""
+ # Logged in as admin_user
+ self.check_history_for_string( 'Your history is empty' )
+ history = sa_session.query( galaxy.model.History ) \
+ .filter( and_( galaxy.model.History.table.c.deleted==False,
+ galaxy.model.History.table.c.user_id==admin_user.id ) ) \
+ .order_by( desc( galaxy.model.History.table.c.create_time ) ) \
+ .first()
+ self.upload_file( 'qualscores.qual454' )
+ hda = sa_session.query( galaxy.model.HistoryDatasetAssociation ) \
+ .order_by( desc( galaxy.model.HistoryDatasetAssociation.table.c.create_time ) ) \
+ .first()
+ assert hda is not None, "Problem retrieving hda from database"
+ self.verify_dataset_correctness( 'qualscores.qual454', hid=str( hda.hid ) )
+ self.check_history_for_string( '5.6 Kb, format: <span class="qual454">qual454</span>, database: \?' )
+ self.check_metadata_for_string( 'Change data type value="qual454" selected="yes">qual454' )
+ self.delete_history( id=self.security.encode_id( history.id ) )
+ def test_0095_upload_file( self ):
+ """Test uploading 3.maf, NOT setting the file format"""
+ # Logged in as admin_user
+ self.check_history_for_string( 'Your history is empty' )
+ history = sa_session.query( galaxy.model.History ) \
+ .filter( and_( galaxy.model.History.table.c.deleted==False,
+ galaxy.model.History.table.c.user_id==admin_user.id ) ) \
+ .order_by( desc( galaxy.model.History.table.c.create_time ) ) \
+ .first()
+ self.upload_file( '3.maf' )
+ hda = sa_session.query( galaxy.model.HistoryDatasetAssociation ) \
+ .order_by( desc( galaxy.model.HistoryDatasetAssociation.table.c.create_time ) ) \
+ .first()
+ assert hda is not None, "Problem retrieving hda from database"
+ self.verify_dataset_correctness( '3.maf', hid=str( hda.hid ) )
+ self.check_history_for_string( '3.maf format: <span class="maf">maf</span>, database: \? Info: uploaded file' )
+ self.check_metadata_for_string( 'value="3.maf" value="\?"' )
+ self.check_metadata_for_string( 'Convert to new format <option value="interval">Convert MAF to Genomic Intervals <option value="fasta">Convert MAF to Fasta' )
+ self.check_metadata_for_string( 'Change data type selected value="maf" selected="yes"' )
+ self.delete_history( id=self.security.encode_id( history.id ) )
+ def test_0100_upload_file( self ):
+ """Test uploading 1.lav, NOT setting the file format"""
+ # Logged in as admin_user
+ self.check_history_for_string( 'Your history is empty' )
+ history = sa_session.query( galaxy.model.History ) \
+ .filter( and_( galaxy.model.History.table.c.deleted==False,
+ galaxy.model.History.table.c.user_id==admin_user.id ) ) \
+ .order_by( desc( galaxy.model.History.table.c.create_time ) ) \
+ .first()
+ self.upload_file( '1.lav' )
+ hda = sa_session.query( galaxy.model.HistoryDatasetAssociation ) \
+ .order_by( desc( galaxy.model.HistoryDatasetAssociation.table.c.create_time ) ) \
+ .first()
+ assert hda is not None, "Problem retrieving hda from database"
+ self.verify_dataset_correctness( '1.lav', hid=str( hda.hid ) )
+ self.check_history_for_string( '1.lav format: <span class="lav">lav</span>, database: \? Info: uploaded file' )
+ self.check_metadata_for_string( 'value="1.lav" value="\?"' )
+ self.check_metadata_for_string( 'Change data type selected value="lav" selected="yes"' )
+ self.delete_history( id=self.security.encode_id( history.id ) )
+ def test_0105_upload_file( self ):
+ """Test uploading 1.interval, NOT setting the file format"""
+ # Logged in as admin_user
+ self.check_history_for_string( 'Your history is empty' )
+ history = sa_session.query( galaxy.model.History ) \
+ .filter( and_( galaxy.model.History.table.c.deleted==False,
+ galaxy.model.History.table.c.user_id==admin_user.id ) ) \
+ .order_by( desc( galaxy.model.History.table.c.create_time ) ) \
+ .first()
+ self.upload_file( '1.interval' )
+ hda = sa_session.query( galaxy.model.HistoryDatasetAssociation ) \
+ .order_by( desc( galaxy.model.HistoryDatasetAssociation.table.c.create_time ) ) \
+ .first()
+ assert hda is not None, "Problem retrieving hda from database"
+ self.verify_dataset_correctness( '1.interval', hid=str( hda.hid ) )
+ self.check_history_for_string( '1.interval format: <span class="interval">interval</span>, database: \? Info: uploaded file' )
+ self.check_metadata_for_string( 'value="1.interval" value="\?"' )
+ self.check_metadata_for_string( 'Chrom column: <option value="1" selected> Start column: <option value="2" selected>' )
+ self.check_metadata_for_string( 'End column: <option value="3" selected> Strand column <option value="6" selected>' )
+ self.check_metadata_for_string( 'Convert to new format <option value="bed">Convert Genomic Intervals To BED' )
+ self.check_metadata_for_string( 'Change data type selected value="interval" selected="yes"' )
+ self.delete_history( id=self.security.encode_id( history.id ) )
+ def test_0110_upload_file( self ):
+ """Test uploading 5.gff3, NOT setting the file format"""
+ # Logged in as admin_user
+ self.check_history_for_string( 'Your history is empty' )
+ history = sa_session.query( galaxy.model.History ) \
+ .filter( and_( galaxy.model.History.table.c.deleted==False,
+ galaxy.model.History.table.c.user_id==admin_user.id ) ) \
+ .order_by( desc( galaxy.model.History.table.c.create_time ) ) \
+ .first()
+ self.upload_file( '5.gff3' )
+ hda = sa_session.query( galaxy.model.HistoryDatasetAssociation ) \
+ .order_by( desc( galaxy.model.HistoryDatasetAssociation.table.c.create_time ) ) \
+ .first()
+ assert hda is not None, "Problem retrieving hda from database"
+ self.verify_dataset_correctness( '5.gff3', hid=str( hda.hid ) )
+ self.check_history_for_string( '5.gff3 format: <span class="gff3">gff3</span>, database: \? Info: uploaded file' )
+ self.check_metadata_for_string( 'value="5.gff3" value="\?"' )
+ self.check_metadata_for_string( 'Convert to new format <option value="bed">Convert GFF to BED' )
+ self.check_metadata_for_string( 'Change data type selected value="gff3" selected="yes"' )
+ self.delete_history( id=self.security.encode_id( history.id ) )
+ def test_0115_upload_file( self ):
+ """Test uploading html_file.txt, NOT setting the file format"""
+ # Logged in as admin_user
+ self.check_history_for_string( 'Your history is empty' )
+ history = sa_session.query( galaxy.model.History ) \
+ .filter( and_( galaxy.model.History.table.c.deleted==False,
+ galaxy.model.History.table.c.user_id==admin_user.id ) ) \
+ .order_by( desc( galaxy.model.History.table.c.create_time ) ) \
+ .first()
+ self.upload_file( 'html_file.txt' )
+ hda = sa_session.query( galaxy.model.HistoryDatasetAssociation ) \
+ .order_by( desc( galaxy.model.HistoryDatasetAssociation.table.c.create_time ) ) \
+ .first()
+ assert hda is not None, "Problem retrieving hda from database"
+ self.check_history_for_string( 'The uploaded file contains inappropriate content' )
+ self.delete_history( id=self.security.encode_id( history.id ) )
+ def test_0120_upload_file( self ):
+ """Test uploading 5.gff, NOT setting the file format"""
+ # Logged in as admin_user
+ self.check_history_for_string( 'Your history is empty' )
+ history = sa_session.query( galaxy.model.History ) \
+ .filter( and_( galaxy.model.History.table.c.deleted==False,
+ galaxy.model.History.table.c.user_id==admin_user.id ) ) \
+ .order_by( desc( galaxy.model.History.table.c.create_time ) ) \
+ .first()
+ self.upload_file( '5.gff' )
+ hda = sa_session.query( galaxy.model.HistoryDatasetAssociation ) \
+ .order_by( desc( galaxy.model.HistoryDatasetAssociation.table.c.create_time ) ) \
+ .first()
+ assert hda is not None, "Problem retrieving hda from database"
+ self.verify_dataset_correctness( '5.gff', hid=str( hda.hid ) )
+ self.check_history_for_string( '5.gff format: <span class="gff">gff</span>, database: \? Info: uploaded file' )
+ self.check_metadata_for_string( 'value="5.gff" value="\?"' )
+ self.check_metadata_for_string( 'Convert to new format <option value="bed">Convert GFF to BED' )
+ self.check_metadata_for_string( 'Change data type selected value="gff" selected="yes"' )
+ self.delete_history( id=self.security.encode_id( history.id ) )
+ def test_0125_upload_file( self ):
+ """Test uploading 1.fasta, NOT setting the file format"""
+ # Logged in as admin_user
+ self.check_history_for_string( 'Your history is empty' )
+ history = sa_session.query( galaxy.model.History ) \
+ .filter( and_( galaxy.model.History.table.c.deleted==False,
+ galaxy.model.History.table.c.user_id==admin_user.id ) ) \
+ .order_by( desc( galaxy.model.History.table.c.create_time ) ) \
+ .first()
+ self.upload_file( '1.fasta' )
+ hda = sa_session.query( galaxy.model.HistoryDatasetAssociation ) \
+ .order_by( desc( galaxy.model.HistoryDatasetAssociation.table.c.create_time ) ) \
+ .first()
+ assert hda is not None, "Problem retrieving hda from database"
+ self.verify_dataset_correctness( '1.fasta', hid=str( hda.hid ) )
+ self.check_history_for_string( '1.fasta format: <span class="fasta">fasta</span>, database: \? Info: uploaded file' )
+ self.check_metadata_for_string( 'value="1.fasta" value="\?" Change data type selected value="fasta" selected="yes"' )
+ self.delete_history( id=self.security.encode_id( history.id ) )
+ def test_0130_upload_file( self ):
+ """Test uploading 1.customtrack, NOT setting the file format"""
+ # Logged in as admin_user
+ self.check_history_for_string( 'Your history is empty' )
+ history = sa_session.query( galaxy.model.History ) \
+ .filter( and_( galaxy.model.History.table.c.deleted==False,
+ galaxy.model.History.table.c.user_id==admin_user.id ) ) \
+ .order_by( desc( galaxy.model.History.table.c.create_time ) ) \
+ .first()
+ self.upload_file( '1.customtrack' )
+ hda = sa_session.query( galaxy.model.HistoryDatasetAssociation ) \
+ .order_by( desc( galaxy.model.HistoryDatasetAssociation.table.c.create_time ) ) \
+ .first()
+ assert hda is not None, "Problem retrieving hda from database"
+ self.verify_dataset_correctness( '1.customtrack', hid=str( hda.hid ) )
+ self.check_history_for_string( '1.customtrack format: <span class="customtrack">customtrack</span>, database: \? Info: uploaded file' )
+ self.check_metadata_for_string( 'value="1.customtrack" value="\?" Change data type selected value="customtrack" selected="yes"' )
+ self.delete_history( id=self.security.encode_id( history.id ) )
+ def test_0135_upload_file( self ):
+ """Test uploading shrimp_cs_test1.csfasta, NOT setting the file format"""
+ # Logged in as admin_user
+ self.check_history_for_string( 'Your history is empty' )
+ history = sa_session.query( galaxy.model.History ) \
+ .filter( and_( galaxy.model.History.table.c.deleted==False,
+ galaxy.model.History.table.c.user_id==admin_user.id ) ) \
+ .order_by( desc( galaxy.model.History.table.c.create_time ) ) \
+ .first()
+ self.upload_file( 'shrimp_cs_test1.csfasta' )
+ hda = sa_session.query( galaxy.model.HistoryDatasetAssociation ) \
+ .order_by( desc( galaxy.model.HistoryDatasetAssociation.table.c.create_time ) ) \
+ .first()
+ assert hda is not None, "Problem retrieving hda from database"
+ self.verify_dataset_correctness( 'shrimp_cs_test1.csfasta', hid=str( hda.hid ) )
+ self.check_history_for_string( '162.6 Kb, format: <span class="csfasta">csfasta</span>, <td>>2_14_26_F3,-1282216.0</td>' )
+ self.check_metadata_for_string( 'value="shrimp_cs_test1.csfasta" value="\?" Change data type value="csfasta" selected="yes"' )
+ self.delete_history( id=self.security.encode_id( history.id ) )
+ def test_0140_upload_file( self ):
+ """Test uploading megablast_xml_parser_test1.gz, NOT setting the file format"""
+ # Logged in as admin_user
+ self.check_history_for_string( 'Your history is empty' )
+ history = sa_session.query( galaxy.model.History ) \
+ .filter( and_( galaxy.model.History.table.c.deleted==False,
+ galaxy.model.History.table.c.user_id==admin_user.id ) ) \
+ .order_by( desc( galaxy.model.History.table.c.create_time ) ) \
+ .first()
+ self.upload_file( 'megablast_xml_parser_test1.gz' )
+ hda = sa_session.query( galaxy.model.HistoryDatasetAssociation ) \
+ .order_by( desc( galaxy.model.HistoryDatasetAssociation.table.c.create_time ) ) \
+ .first()
+ assert hda is not None, "Problem retrieving hda from database"
+ self.check_history_for_string( 'NCBI Blast XML data format: <span class="blastxml">blastxml</span>' )
+ self.delete_history( id=self.security.encode_id( history.id ) )
+ def test_0145_upload_file( self ):
+ """Test uploading 1.axt, NOT setting the file format"""
+ # Logged in as admin_user
+ self.check_history_for_string( 'Your history is empty' )
+ history = sa_session.query( galaxy.model.History ) \
+ .filter( and_( galaxy.model.History.table.c.deleted==False,
+ galaxy.model.History.table.c.user_id==admin_user.id ) ) \
+ .order_by( desc( galaxy.model.History.table.c.create_time ) ) \
+ .first()
+ self.upload_file( '1.axt' )
+ hda = sa_session.query( galaxy.model.HistoryDatasetAssociation ) \
+ .order_by( desc( galaxy.model.HistoryDatasetAssociation.table.c.create_time ) ) \
+ .first()
+ assert hda is not None, "Problem retrieving hda from database"
+ self.verify_dataset_correctness( '1.axt', hid=str( hda.hid ) )
+ self.check_history_for_string( '1.axt format: <span class="axt">axt</span>, database: \? Info: uploaded file' )
+ self.check_metadata_for_string( 'value="1.axt" value="\?" Change data type selected value="axt" selected="yes"' )
+ self.delete_history( id=self.security.encode_id( history.id ) )
+ def test_0150_url_paste( self ):
"""Test url paste behavior"""
# Logged in as admin_user
# Deleting the current history should have created a new history
self.check_history_for_string( 'Your history is empty' )
- history2 = sa_session.query( galaxy.model.History ) \
- .filter( and_( galaxy.model.History.table.c.deleted==False,
- galaxy.model.History.table.c.user_id==admin_user.id ) ) \
- .order_by( desc( galaxy.model.History.table.c.create_time ) ) \
- .first()
+ history = sa_session.query( galaxy.model.History ) \
+ .filter( and_( galaxy.model.History.table.c.deleted==False,
+ galaxy.model.History.table.c.user_id==admin_user.id ) ) \
+ .order_by( desc( galaxy.model.History.table.c.create_time ) ) \
+ .first()
self.upload_url_paste( 'hello world' )
self.check_history_for_string( 'Pasted Entry' )
self.check_history_for_string( 'hello world' )
self.upload_url_paste( u'hello world' )
self.check_history_for_string( 'Pasted Entry' )
self.check_history_for_string( 'hello world' )
- self.delete_history( id=self.security.encode_id( history2.id ) )
- def test_010_upload_lped_composite_datatype_files( self ):
- """Test uploading lped composite datatype files"""
- # Logged in as admin_user
- self.check_history_for_string( 'Your history is empty' )
- history3 = sa_session.query( galaxy.model.History ) \
- .filter( and_( galaxy.model.History.table.c.deleted==False,
- galaxy.model.History.table.c.user_id==admin_user.id ) ) \
- .order_by( desc( galaxy.model.History.table.c.create_time ) ) \
- .first()
- # lped data types include a ped_file and a map_file ( which is binary )
- self.upload_composite_datatype_file( 'lped', ped_file='tinywga.ped', map_file='tinywga.map', base_name='rgenetics' )
- # Get the latest hid for testing
- hda1 = sa_session.query( galaxy.model.HistoryDatasetAssociation ) \
- .order_by( desc( galaxy.model.HistoryDatasetAssociation.table.c.create_time ) ) \
- .first()
- assert hda1 is not None, "Problem retrieving hda1 from database"
- # We'll test against the resulting ped file and map file for correctness
- self.verify_composite_datatype_file_content( 'rgenetics.ped', str( hda1.id ) )
- self.verify_composite_datatype_file_content( 'rgenetics.map', str( hda1.id ) )
- self.delete_history( id=self.security.encode_id( history3.id ) )
- def test_015_upload_pbed_composite_datatype_files( self ):
- """Test uploading pbed composite datatype files"""
- # Logged in as admin_user
- self.check_history_for_string( 'Your history is empty' )
- history4 = sa_session.query( galaxy.model.History ) \
- .filter( and_( galaxy.model.History.table.c.deleted==False,
- galaxy.model.History.table.c.user_id==admin_user.id ) ) \
- .order_by( desc( galaxy.model.History.table.c.create_time ) ) \
- .first()
- # pbed data types include a bim_file, a bed_file and a fam_file
- self.upload_composite_datatype_file( 'pbed', bim_file='tinywga.bim', bed_file='tinywga.bed', fam_file='tinywga.fam', base_name='rgenetics' )
- # Get the latest hid for testing
- hda1 = sa_session.query( galaxy.model.HistoryDatasetAssociation ) \
- .order_by( desc( galaxy.model.HistoryDatasetAssociation.table.c.create_time ) ) \
- .first()
- assert hda1 is not None, "Problem retrieving hda1 from database"
- # We'll test against the resulting ped file and map file for correctness
- self.verify_composite_datatype_file_content( 'rgenetics.bim', str( hda1.id ) )
- self.verify_composite_datatype_file_content( 'rgenetics.bed', str( hda1.id ) )
- self.verify_composite_datatype_file_content( 'rgenetics.fam', str( hda1.id ) )
- self.delete_history( id=self.security.encode_id( history4.id ) )
- def test_020_upload_multibyte_character_file( self ):
- """Test uploading multi-byte character file"""
- # Logged in as admin_user
- self.check_history_for_string( 'Your history is empty' )
- history5 = sa_session.query( galaxy.model.History ) \
- .filter( and_( galaxy.model.History.table.c.deleted==False,
- galaxy.model.History.table.c.user_id==admin_user.id ) ) \
- .order_by( desc( galaxy.model.History.table.c.create_time ) ) \
- .first()
- self.upload_file( 'asian_chars_1.txt' )
- hda1 = sa_session.query( galaxy.model.HistoryDatasetAssociation ) \
- .order_by( desc( galaxy.model.HistoryDatasetAssociation.table.c.create_time ) ) \
- .first()
- assert hda1 is not None, "Problem retrieving hda1 from database"
- self.verify_dataset_correctness( 'asian_chars_1.txt', hid=str( hda1.hid ) )
- self.check_history_for_string( 'uploaded multi-byte char file' )
- self.delete_history( id=self.security.encode_id( history5.id ) )
+ self.delete_history( id=self.security.encode_id( history.id ) )
+ def test_9999_clean_up( self ):
+ self.logout()
\ No newline at end of file
diff -r 4f9b630f9976 -r 23c1b9799bce test/functional/test_sniffing_and_metadata_settings.py
--- a/test/functional/test_sniffing_and_metadata_settings.py Fri Nov 13 10:39:15 2009 -0500
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,262 +0,0 @@
-import galaxy.model
-from galaxy.model.orm import *
-from galaxy.model.mapping import context as sa_session
-from base.twilltestcase import TwillTestCase
-
-class SniffingAndMetaDataSettings( TwillTestCase ):
- def test_000_axt_datatype( self ):
- """Testing correctly sniffing axt data type upon upload"""
- self.logout()
- self.login( email='test(a)bx.psu.edu' )
- global admin_user
- admin_user = sa_session.query( galaxy.model.User ).filter( galaxy.model.User.table.c.email=='test(a)bx.psu.edu' ).one()
- self.new_history( name='history1' )
- global history1
- history1 = sa_session.query( galaxy.model.History ) \
- .filter( and_( galaxy.model.History.table.c.deleted==False,
- galaxy.model.History.table.c.user_id==admin_user.id ) ) \
- .order_by( desc( galaxy.model.History.table.c.create_time ) ) \
- .first()
- assert history1 is not None, "Problem retrieving history1 from database"
- self.upload_file( '1.axt' )
- self.verify_dataset_correctness( '1.axt' )
- self.check_history_for_string( '1.axt format: <span class="axt">axt</span>, database: \? Info: uploaded file' )
- self.check_metadata_for_string( 'value="1.axt" value="\?" Change data type selected value="axt" selected="yes"' )
- latest_hda = sa_session.query( galaxy.model.HistoryDatasetAssociation ) \
- .order_by( desc( galaxy.model.HistoryDatasetAssociation.table.c.create_time ) ) \
- .first()
- assert latest_hda is not None, "Problem retrieving axt hda from the database"
- if not latest_hda.name == '1.axt' and not latest_hda.extension == 'axt':
- raise AssertionError, "axt data type was not correctly sniffed."
- def test_005_bed_datatype( self ):
- """Testing correctly sniffing bed data type upon upload"""
- self.upload_file( '1.bed' )
- self.verify_dataset_correctness( '1.bed' )
- self.check_history_for_string( '1.bed format: <span class="bed">bed</span>, database: \? Info: uploaded file')
- self.check_metadata_for_string( 'value="1.bed" value="\?"' )
- self.check_metadata_for_string( 'Chrom column: <option value="1" selected> Start column: <option value="2" selected>' )
- self.check_metadata_for_string( 'End column: <option value="3" selected> Strand column <option value="6" selected>' )
- self.check_metadata_for_string( 'Convert to new format value="bed">Convert Genomic Intervals To BED <option value="gff">Convert BED to GFF' )
- self.check_metadata_for_string( 'Change data type selected value="bed" selected="yes"' )
- latest_hda = sa_session.query( galaxy.model.HistoryDatasetAssociation ) \
- .order_by( desc( galaxy.model.HistoryDatasetAssociation.table.c.create_time ) ) \
- .first()
- assert latest_hda is not None, "Problem retrieving bed hda from the database"
- if not latest_hda.name == '1.bed' and not latest_hda.extension == 'bed':
- raise AssertionError, "bed data type was not correctly sniffed."
- def test_010_blastxml_datatype( self ):
- """Testing correctly sniffing blastxml data type upon upload"""
- self.upload_file( 'megablast_xml_parser_test1.gz' )
- self.check_history_for_string( 'NCBI Blast XML data format: <span class="blastxml">blastxml</span>' )
- latest_hda = sa_session.query( galaxy.model.HistoryDatasetAssociation ) \
- .order_by( desc( galaxy.model.HistoryDatasetAssociation.table.c.create_time ) ) \
- .first()
- assert latest_hda is not None, "Problem retrieving blastxml hda from the database"
- if not latest_hda.name == 'megablast_xml_parser_test1' and not latest_hda.extension == 'blastxml':
- raise AssertionError, "blastxml data type was not correctly sniffed."
- def test_015_csfasta_datatype( self ):
- """Testing correctly sniffing csfasta data type upon upload"""
- self.upload_file( 'shrimp_cs_test1.csfasta' )
- self.verify_dataset_correctness( 'shrimp_cs_test1.csfasta' )
- self.check_history_for_string( '162.6 Kb, format: <span class="csfasta">csfasta</span>, <td>>2_14_26_F3,-1282216.0</td>' )
- self.check_metadata_for_string( 'value="shrimp_cs_test1.csfasta" value="\?" Change data type value="csfasta" selected="yes"' )
- latest_hda = sa_session.query( galaxy.model.HistoryDatasetAssociation ) \
- .order_by( desc( galaxy.model.HistoryDatasetAssociation.table.c.create_time ) ) \
- .first()
- assert latest_hda is not None, "Problem retrieving csfasta hda from the database"
- if not latest_hda.name == 'shrimp_cs_test1.csfasta' and not latest_hda.extension == 'csfasta':
- raise AssertionError, "csfasta data type was not correctly sniffed."
- def test_020_customtrack_datatype( self ):
- """Testing correctly sniffing customtrack data type upon upload"""
- self.upload_file( '1.customtrack' )
- self.verify_dataset_correctness( '1.customtrack' )
- self.check_history_for_string( '1.customtrack format: <span class="customtrack">customtrack</span>, database: \? Info: uploaded file' )
- self.check_metadata_for_string( 'value="1.customtrack" value="\?" Change data type selected value="customtrack" selected="yes"' )
- latest_hda = sa_session.query( galaxy.model.HistoryDatasetAssociation ) \
- .order_by( desc( galaxy.model.HistoryDatasetAssociation.table.c.create_time ) ) \
- .first()
- assert latest_hda is not None, "Problem retrieving customtrack hda from the database"
- if not latest_hda.name == '1.customtrack' and not latest_hda.extension == 'customtrack':
- raise AssertionError, "customtrack data type was not correctly sniffed."
- def test_025_fasta_datatype( self ):
- """Testing correctly sniffing fasta data type upon upload"""
- self.upload_file( '1.fasta' )
- self.verify_dataset_correctness( '1.fasta' )
- self.check_history_for_string( '1.fasta format: <span class="fasta">fasta</span>, database: \? Info: uploaded file' )
- self.check_metadata_for_string( 'value="1.fasta" value="\?" Change data type selected value="fasta" selected="yes"' )
- latest_hda = sa_session.query( galaxy.model.HistoryDatasetAssociation ) \
- .order_by( desc( galaxy.model.HistoryDatasetAssociation.table.c.create_time ) ) \
- .first()
- assert latest_hda is not None, "Problem retrieving fasta hda from the database"
- if not latest_hda.name == '1.fasta' and not latest_hda.extension == 'fasta':
- raise AssertionError, "fasta data type was not correctly sniffed."
- def test_035_gff_datatype( self ):
- """Testing correctly sniffing gff data type upon upload"""
- self.upload_file( '5.gff' )
- self.verify_dataset_correctness( '5.gff' )
- self.check_history_for_string( '5.gff format: <span class="gff">gff</span>, database: \? Info: uploaded file' )
- self.check_metadata_for_string( 'value="5.gff" value="\?"' )
- self.check_metadata_for_string( 'Convert to new format <option value="bed">Convert GFF to BED' )
- self.check_metadata_for_string( 'Change data type selected value="gff" selected="yes"' )
- latest_hda = sa_session.query( galaxy.model.HistoryDatasetAssociation ) \
- .order_by( desc( galaxy.model.HistoryDatasetAssociation.table.c.create_time ) ) \
- .first()
- assert latest_hda is not None, "Problem retrieving gff hda from the database"
- if not latest_hda.name == '5.gff' and not latest_hda.extension == 'gff':
- raise AssertionError, "gff data type was not correctly sniffed."
- def test_040_gff3_datatype( self ):
- """Testing correctly sniffing gff3 data type upon upload"""
- self.upload_file( '5.gff3' )
- self.verify_dataset_correctness( '5.gff3' )
- self.check_history_for_string( '5.gff3 format: <span class="gff3">gff3</span>, database: \? Info: uploaded file' )
- self.check_metadata_for_string( 'value="5.gff3" value="\?"' )
- self.check_metadata_for_string( 'Convert to new format <option value="bed">Convert GFF to BED' )
- self.check_metadata_for_string( 'Change data type selected value="gff3" selected="yes"' )
- latest_hda = sa_session.query( galaxy.model.HistoryDatasetAssociation ) \
- .order_by( desc( galaxy.model.HistoryDatasetAssociation.table.c.create_time ) ) \
- .first()
- assert latest_hda is not None, "Problem retrieving gff3 hda from the database"
- if not latest_hda.name == '5.gff3' and not latest_hda.extension == 'gff3':
- raise AssertionError, "gff3 data type was not correctly sniffed."
- # TODO: the following test generates a data.hid == None, breaking this and all following tests
- # I am not currently able to track down why, and uploading inappropriate files outside of the
- # functional test framework seems to generate valid hids, so this needs to be tracked down and fixed
- # ASAP, un-commenting this test.
- #def test_045_html_datatype( self ):
- #"""Testing correctly sniffing html data type upon upload"""
- #self.upload_file( 'html_file.txt' )
- #self.check_history_for_string( 'An error occurred running this job: No data: you attempted to upload an inappropriate file.' )
- #latest_hda = galaxy.model.HistoryDatasetAssociation.query() \
- # .order_by( desc( galaxy.model.HistoryDatasetAssociation.table.c.create_time ) ).first()
- #assert latest_hda is not None, "Problem retrieving html hda from the database"
- #if not latest_hda.name == 'html_file.txt' and not latest_hda.extension == 'data':
- # raise AssertionError, "html data type was not correctly sniffed."
- def test_050_interval_datatype( self ):
- """Testing correctly sniffing interval data type upon upload"""
- self.upload_file( '1.interval' )
- self.verify_dataset_correctness( '1.interval' )
- self.check_history_for_string( '1.interval format: <span class="interval">interval</span>, database: \? Info: uploaded file' )
- self.check_metadata_for_string( 'value="1.interval" value="\?"' )
- self.check_metadata_for_string( 'Chrom column: <option value="1" selected> Start column: <option value="2" selected>' )
- self.check_metadata_for_string( 'End column: <option value="3" selected> Strand column <option value="6" selected>' )
- self.check_metadata_for_string( 'Convert to new format <option value="bed">Convert Genomic Intervals To BED' )
- self.check_metadata_for_string( 'Change data type selected value="interval" selected="yes"' )
- latest_hda = sa_session.query( galaxy.model.HistoryDatasetAssociation ) \
- .order_by( desc( galaxy.model.HistoryDatasetAssociation.table.c.create_time ) ) \
- .first()
- assert latest_hda is not None, "Problem retrieving interval hda from the database"
- if not latest_hda.name == '1.interval' and not latest_hda.extension == 'interval':
- raise AssertionError, "interval data type was not correctly sniffed."
- def test_055_lav_datatype( self ):
- """Testing correctly sniffing lav data type upon upload"""
- self.upload_file( '1.lav' )
- self.verify_dataset_correctness( '1.lav' )
- self.check_history_for_string( '1.lav format: <span class="lav">lav</span>, database: \? Info: uploaded file' )
- self.check_metadata_for_string( 'value="1.lav" value="\?"' )
- self.check_metadata_for_string( 'Change data type selected value="lav" selected="yes"' )
- latest_hda = sa_session.query( galaxy.model.HistoryDatasetAssociation ) \
- .order_by( desc( galaxy.model.HistoryDatasetAssociation.table.c.create_time ) ) \
- .first()
- assert latest_hda is not None, "Problem retrieving lav hda from the database"
- if not latest_hda.name == '1.lav' and not latest_hda.extension == 'lav':
- raise AssertionError, "lav data type was not correctly sniffed."
- def test_060_maf_datatype( self ):
- """Testing correctly sniffing maf data type upon upload"""
- self.upload_file( '3.maf' )
- self.verify_dataset_correctness( '3.maf' )
- self.check_history_for_string( '3.maf format: <span class="maf">maf</span>, database: \? Info: uploaded file' )
- self.check_metadata_for_string( 'value="3.maf" value="\?"' )
- self.check_metadata_for_string( 'Convert to new format <option value="interval">Convert MAF to Genomic Intervals <option value="fasta">Convert MAF to Fasta' )
- self.check_metadata_for_string( 'Change data type selected value="maf" selected="yes"' )
- latest_hda = sa_session.query( galaxy.model.HistoryDatasetAssociation ) \
- .order_by( desc( galaxy.model.HistoryDatasetAssociation.table.c.create_time ) ) \
- .first()
- assert latest_hda is not None, "Problem retrieving maf hda from the database"
- if not latest_hda.name == '3.maf' and not latest_hda.extension == 'maf':
- raise AssertionError, "maf data type was not correctly sniffed."
- def test_065_qual454_datatype( self ):
- """Testing correctly sniffing qual454 data type upon upload"""
- self.upload_file( 'qualscores.qual454' )
- self.verify_dataset_correctness( 'qualscores.qual454' )
- self.check_history_for_string( '5.6 Kb, format: <span class="qual454">qual454</span>, database: \?' )
- self.check_metadata_for_string( 'Change data type value="qual454" selected="yes">qual454' )
- latest_hda = sa_session.query( galaxy.model.HistoryDatasetAssociation ) \
- .order_by( desc( galaxy.model.HistoryDatasetAssociation.table.c.create_time ) ) \
- .first()
- assert latest_hda is not None, "Problem retrieving qual454 hda from the database"
- if not latest_hda.name == 'qualscores.qual454' and not latest_hda.extension == 'qual454':
- raise AssertionError, "qual454 data type was not correctly sniffed."
- def test_070_qualsolid_datatype( self ):
- """Testing correctly sniffing qualsolid data type upon upload"""
- self.upload_file( 'qualscores.qualsolid' )
- self.verify_dataset_correctness('qualscores.qualsolid' )
- self.check_history_for_string('2.5 Kb, format: <span class="qualsolid">qualsolid</span>, database: \? Info: uploaded file' )
- self.check_metadata_for_string( 'Change data type value="qualsolid" selected="yes">qualsolid' )
- latest_hda = sa_session.query( galaxy.model.HistoryDatasetAssociation ) \
- .order_by( desc( galaxy.model.HistoryDatasetAssociation.table.c.create_time ) ) \
- .first()
- assert latest_hda is not None, "Problem retrieving qualsolid hda from the database"
- if not latest_hda.name == 'qualscores.qualsolid' and not latest_hda.extension == 'qualsolid':
- raise AssertionError, "qualsolid data type was not correctly sniffed."
- def test_075_tabular_datatype( self ):
- """Testing correctly sniffing tabular data type upon upload"""
- self.upload_file( '1.tabular' )
- self.verify_dataset_correctness( '1.tabular' )
- self.check_history_for_string( '1.tabular format: <span class="tabular">tabular</span>, database: \? Info: uploaded file' )
- self.check_metadata_for_string( 'value="1.tabular" value="\?"' )
- self.check_metadata_for_string( 'Change data type selected value="tabular" selected="yes"' )
- latest_hda = sa_session.query( galaxy.model.HistoryDatasetAssociation ) \
- .order_by( desc( galaxy.model.HistoryDatasetAssociation.table.c.create_time ) ) \
- .first()
- assert latest_hda is not None, "Problem retrieving tabular hda from the database"
- if not latest_hda.name == '1.tabular' and not latest_hda.extension == 'tabular':
- raise AssertionError, "tabular data type was not correctly sniffed."
- def test_080_wig_datatype( self ):
- """Testing correctly sniffing wig data type upon upload"""
- self.upload_file( '1.wig' )
- self.verify_dataset_correctness( '1.wig' )
- self.check_history_for_string( '1.wig format: <span class="wig">wig</span>, database: \? Info: uploaded file' )
- self.check_metadata_for_string( 'value="1.wig" value="\?"' )
- self.check_metadata_for_string( 'Change data type selected value="wig" selected="yes"' )
- latest_hda = sa_session.query( galaxy.model.HistoryDatasetAssociation ) \
- .order_by( desc( galaxy.model.HistoryDatasetAssociation.table.c.create_time ) ) \
- .first()
- assert latest_hda is not None, "Problem retrieving wig hda from the database"
- if not latest_hda.name == '1.wig' and not latest_hda.extension == 'wig':
- raise AssertionError, "wig data type was not correctly sniffed."
- def test_090_sam_datatype( self ):
- """Testing correctly sniffing sam format upon upload"""
- self.upload_file( '1.sam' )
- self.verify_dataset_correctness( '1.sam' )
- self.check_history_for_string( '1.sam format: <span class="sam">sam</span>, database: \? Info: uploaded sam file' )
- latest_hda = sa_session.query( galaxy.model.HistoryDatasetAssociation ) \
- .order_by( desc( galaxy.model.HistoryDatasetAssociation.table.c.create_time ) ) \
- .first()
- assert latest_hda is not None, "Problem retrieving sam hda from the database"
- if not latest_hda.name == '1.sam' and not latest_hda.extension == 'sam':
- raise AssertionError, "sam data type was not correctly sniffed."
- def test_095_fastq_datatype( self ):
- """Testing correctly sniffing fastq ( generic ) data type upon upload"""
- self.upload_file( '2gen.fastq' )
- self.verify_dataset_correctness( '2gen.fastq' )
- self.check_history_for_string( '2gen.fastq format: <span class="fastq">fastq</span>, database: \? Info: uploaded fastq file' )
- latest_hda = sa_session.query( galaxy.model.HistoryDatasetAssociation ) \
- .order_by( desc( galaxy.model.HistoryDatasetAssociation.table.c.create_time ) ) \
- .first()
- assert latest_hda is not None, "Problem retrieving fastq hda from the database"
- if not latest_hda.name == '2gen.fastq' and not latest_hda.extension == 'fastq':
- raise AssertionError, "fastq data type was not correctly sniffed."
- def test_0100_sff_datatype( self ):
- """Testing correctly sniffing sff format upon upload"""
- self.upload_file( '1.sff' )
- self.verify_dataset_correctness( '1.sff' )
- self.check_history_for_string( 'format: <span class="sff">sff' )
- latest_hda = sa_session.query( galaxy.model.HistoryDatasetAssociation ) \
- .order_by( desc( galaxy.model.HistoryDatasetAssociation.table.c.create_time ) ) \
- .first()
- assert latest_hda is not None, "Problem retrieving sff hda from the database"
- if not latest_hda.name == '1.sff' and not latest_hda.extension == 'sff':
- raise AssertionError, "sff data type was not correctly sniffed."
- def test_9999_clean_up( self ):
- self.delete_history( id=self.security.encode_id( history1.id ) )
- self.logout()
diff -r 4f9b630f9976 -r 23c1b9799bce tools/data_source/upload.py
--- a/tools/data_source/upload.py Fri Nov 13 10:39:15 2009 -0500
+++ b/tools/data_source/upload.py Fri Nov 13 14:13:03 2009 -0500
@@ -9,6 +9,7 @@
# need to import model before sniff to resolve a circular import dependency
import galaxy.model
from galaxy.datatypes import sniff
+from galaxy.datatypes.binary import sniffable_binary_formats, unsniffable_binary_formats
from galaxy import util
from galaxy.util.json import *
@@ -200,25 +201,29 @@
ext = dataset.file_type
if not data_type:
if check_binary( dataset.path ):
- if dataset.is_binary is not None:
- data_type = 'binary'
- ext = dataset.file_type
- else:
- parts = dataset.name.split( "." )
- if len( parts ) > 1:
- ext = parts[1].strip().lower()
- if not( ext == 'ab1' or ext == 'scf' ):
- file_err( 'The uploaded file contains inappropriate content', dataset, json_file )
+ data_type = 'binary'
+ binary_ok = False
+ parts = dataset.name.split( "." )
+ if len( parts ) > 1:
+ ext = parts[1].strip().lower()
+ if ext in unsniffable_binary_formats and dataset.file_type == ext:
+ binary_ok = True
+ elif ext in unsniffable_binary_formats and dataset.file_type != ext:
+ err_msg = "You must manually set the 'File Format' to '%s' when uploading %s files." % ( ext.capitalize(), ext )
+ file_err( err_msg, dataset, json_file )
+ return
+ if not binary_ok and ext in sniffable_binary_formats:
+ # Sniff the file to confirm it's data type
+ tmp_ext = sniff.guess_ext( dataset.path )
+ if tmp_ext == ext:
+ binary_ok = True
+ else:
+ err_msg = "The content of the file does not match its type (%s)." % ext.capitalize()
+ file_err( err_msg, dataset, json_file )
return
- if ext == 'ab1' and dataset.file_type != 'ab1':
- file_err( "You must manually set the 'File Format' to 'Ab1' when uploading ab1 files.", dataset, json_file )
- return
- elif ext == 'scf' and dataset.file_type != 'scf':
- file_err( "You must manually set the 'File Format' to 'Scf' when uploading scf files.", dataset, json_file )
- return
- else:
- ext = 'binary'
- data_type = 'binary'
+ if not binary_ok:
+ file_err( 'The uploaded file contains inappropriate content', dataset, json_file )
+ return
if not data_type:
# We must have a text file
if check_html( dataset.path ):
@@ -234,10 +239,6 @@
else:
ext = dataset.file_type
data_type = ext
- elif data_type == 'binary' and ext == 'auto':
- # currently we are only sniffing sff binary files
- ext = sniff.guess_ext( dataset.path )
- data_type = ext
# Save job info for the framework
if ext == 'auto' and dataset.ext:
ext = dataset.ext
1
0
16 Nov '09
details: http://www.bx.psu.edu/hg/galaxy/rev/0ef5adc72242
changeset: 3028:0ef5adc72242
user: Greg Von Kuster <greg(a)bx.psu.edu>
date: Fri Nov 13 16:24:00 2009 -0500
description:
A bit of code cleanup in genetics.py, and add all rgenetics data types to datatypes_conf.xml.sample. Also include the new Sff data type in the upload config help section.
diffstat:
datatypes_conf.xml.sample | 431 ++++++++++++++++++++++---------------------
lib/galaxy/datatypes/genetics.py | 119 ++++++-----
tools/data_source/upload.xml | 6 +
3 files changed, 294 insertions(+), 262 deletions(-)
diffs (805 lines):
diff -r a0880f1b4297 -r 0ef5adc72242 datatypes_conf.xml.sample
--- a/datatypes_conf.xml.sample Fri Nov 13 15:28:22 2009 -0500
+++ b/datatypes_conf.xml.sample Fri Nov 13 16:24:00 2009 -0500
@@ -1,213 +1,224 @@
<?xml version="1.0"?>
<datatypes>
- <registration converters_path="lib/galaxy/datatypes/converters">
- <datatype extension="ab1" type="galaxy.datatypes.binary:Ab1" mimetype="application/octet-stream" display_in_upload="true"/>
- <datatype extension="axt" type="galaxy.datatypes.sequence:Axt" display_in_upload="true"/>
- <datatype extension="bam" type="galaxy.datatypes.binary:Bam" mimetype="application/octet-stream"/>
- <datatype extension="bed" type="galaxy.datatypes.interval:Bed" display_in_upload="true">
- <converter file="bed_to_gff_converter.xml" target_datatype="gff"/>
- <converter file="interval_to_coverage.xml" target_datatype="coverage"/>
- <converter file="bed_to_interval_index_converter.xml" target_datatype="interval_index"/>
- </datatype>
- <datatype extension="binseq.zip" type="galaxy.datatypes.binary:Binseq" mimetype="application/zip" display_in_upload="true"/>
- <datatype extension="len" type="galaxy.datatypes.chrominfo:ChromInfo" display_in_upload="true">
- <!-- no converters yet -->
- </datatype>
- <datatype extension="coverage" type="galaxy.datatypes.coverage:LastzCoverage" display_in_upload="true">
- <indexer file="coverage.xml" />
- </datatype>
- <datatype extension="customtrack" type="galaxy.datatypes.interval:CustomTrack"/>
- <datatype extension="csfasta" type="galaxy.datatypes.sequence:csFasta" display_in_upload="true"/>
- <datatype extension="data" type="galaxy.datatypes.data:Data" mimetype="application/octet-stream"/>
- <datatype extension="fasta" type="galaxy.datatypes.sequence:Fasta" display_in_upload="true">
- <converter file="fasta_to_tabular_converter.xml" target_datatype="tabular"/>
- </datatype>
- <datatype extension="fastq" type="galaxy.datatypes.sequence:Fastq" display_in_upload="true"/>
- <datatype extension="fastqsanger" type="galaxy.datatypes.sequence:FastqSanger" display_in_upload="true"/>
- <datatype extension="genetrack" type="galaxy.datatypes.tracks:GeneTrack"/>
- <datatype extension="gff" type="galaxy.datatypes.interval:Gff" display_in_upload="true">
- <converter file="gff_to_bed_converter.xml" target_datatype="bed"/>
- </datatype>
- <datatype extension="gff3" type="galaxy.datatypes.interval:Gff3" display_in_upload="true"/>
- <datatype extension="gif" type="galaxy.datatypes.images:Image" mimetype="image/gif"/>
- <datatype extension="gmaj.zip" type="galaxy.datatypes.images:Gmaj" mimetype="application/zip"/>
- <datatype extension="html" type="galaxy.datatypes.images:Html" mimetype="text/html"/>
- <datatype extension="interval" type="galaxy.datatypes.interval:Interval" display_in_upload="true">
- <converter file="interval_to_bed_converter.xml" target_datatype="bed"/>
- <indexer file="interval_awk.xml" />
- </datatype>
- <datatype extension="jpg" type="galaxy.datatypes.images:Image" mimetype="image/jpeg"/>
- <datatype extension="laj" type="galaxy.datatypes.images:Laj"/>
- <datatype extension="lav" type="galaxy.datatypes.sequence:Lav" display_in_upload="true"/>
- <datatype extension="maf" type="galaxy.datatypes.sequence:Maf" display_in_upload="true">
- <converter file="maf_to_fasta_converter.xml" target_datatype="fasta"/>
- <converter file="maf_to_interval_converter.xml" target_datatype="interval"/>
- </datatype>
- <datatype extension="pdf" type="galaxy.datatypes.images:Image" mimetype="application/pdf"/>
- <datatype extension="png" type="galaxy.datatypes.images:Image" mimetype="image/png"/>
- <datatype extension="qualsolexa" type="galaxy.datatypes.qualityscore:QualityScoreSolexa" display_in_upload="true"/>
- <datatype extension="qualsolid" type="galaxy.datatypes.qualityscore:QualityScoreSOLiD" display_in_upload="true"/>
- <datatype extension="qual454" type="galaxy.datatypes.qualityscore:QualityScore454" display_in_upload="true"/>
- <datatype extension="sam" type="galaxy.datatypes.tabular:Sam" display_in_upload="true"/>
- <datatype extension="scf" type="galaxy.datatypes.binary:Scf" mimetype="application/octet-stream" display_in_upload="true"/>
- <datatype extension="sff" type="galaxy.datatypes.binary:Sff" mimetype="application/octet-stream" display_in_upload="true"/>
- <datatype extension="taxonomy" type="galaxy.datatypes.tabular:Taxonomy" display_in_upload="true"/>
- <datatype extension="tabular" type="galaxy.datatypes.tabular:Tabular" display_in_upload="true"/>
- <datatype extension="txt" type="galaxy.datatypes.data:Text" display_in_upload="true"/>
- <datatype extension="blastxml" type="galaxy.datatypes.xml:BlastXml" display_in_upload="true"/>
- <datatype extension="txtseq.zip" type="galaxy.datatypes.data:Txtseq" mimetype="application/zip" display_in_upload="true"/>
- <datatype extension="wig" type="galaxy.datatypes.interval:Wiggle" display_in_upload="true">
- <converter file="wiggle_to_array_tree_converter.xml" target_datatype="array_tree"/>
- </datatype>
- <datatype extension="array_tree" type="galaxy.datatypes.data:Data" />
- <datatype extension="interval_index" type="galaxy.datatypes.data:Data" />
- <!-- EMBOSS TOOLS -->
- <datatype extension="acedb" type="galaxy.datatypes.data:Text"/>
- <datatype extension="asn1" type="galaxy.datatypes.data:Text"/>
- <datatype extension="btwisted" type="galaxy.datatypes.data:Text"/>
- <datatype extension="cai" type="galaxy.datatypes.data:Text"/>
- <datatype extension="charge" type="galaxy.datatypes.data:Text"/>
- <datatype extension="checktrans" type="galaxy.datatypes.data:Text"/>
- <datatype extension="chips" type="galaxy.datatypes.data:Text"/>
- <datatype extension="clustal" type="galaxy.datatypes.data:Text"/>
- <datatype extension="codata" type="galaxy.datatypes.data:Text"/>
- <datatype extension="codcmp" type="galaxy.datatypes.data:Text"/>
- <datatype extension="coderet" type="galaxy.datatypes.data:Text"/>
- <datatype extension="compseq" type="galaxy.datatypes.data:Text"/>
- <datatype extension="cpgplot" type="galaxy.datatypes.data:Text"/>
- <datatype extension="cpgreport" type="galaxy.datatypes.data:Text"/>
- <datatype extension="cusp" type="galaxy.datatypes.data:Text"/>
- <datatype extension="cut" type="galaxy.datatypes.data:Text"/>
- <datatype extension="dan" type="galaxy.datatypes.data:Text"/>
- <datatype extension="dbmotif" type="galaxy.datatypes.data:Text"/>
- <datatype extension="diffseq" type="galaxy.datatypes.data:Text"/>
- <datatype extension="digest" type="galaxy.datatypes.data:Text"/>
- <datatype extension="dreg" type="galaxy.datatypes.data:Text"/>
- <datatype extension="einverted" type="galaxy.datatypes.data:Text"/>
- <datatype extension="embl" type="galaxy.datatypes.data:Text"/>
- <datatype extension="epestfind" type="galaxy.datatypes.data:Text"/>
- <datatype extension="equicktandem" type="galaxy.datatypes.data:Text"/>
- <datatype extension="est2genome" type="galaxy.datatypes.data:Text"/>
- <datatype extension="etandem" type="galaxy.datatypes.data:Text"/>
- <datatype extension="excel" type="galaxy.datatypes.data:Text"/>
- <datatype extension="feattable" type="galaxy.datatypes.data:Text"/>
- <datatype extension="fitch" type="galaxy.datatypes.data:Text"/>
- <datatype extension="freak" type="galaxy.datatypes.data:Text"/>
- <datatype extension="fuzznuc" type="galaxy.datatypes.data:Text"/>
- <datatype extension="fuzzpro" type="galaxy.datatypes.data:Text"/>
- <datatype extension="fuzztran" type="galaxy.datatypes.data:Text"/>
- <datatype extension="garnier" type="galaxy.datatypes.data:Text"/>
- <datatype extension="gcg" type="galaxy.datatypes.data:Text"/>
- <datatype extension="geecee" type="galaxy.datatypes.data:Text"/>
- <datatype extension="genbank" type="galaxy.datatypes.data:Text"/>
- <datatype extension="helixturnhelix" type="galaxy.datatypes.data:Text"/>
- <datatype extension="hennig86" type="galaxy.datatypes.data:Text"/>
- <datatype extension="hmoment" type="galaxy.datatypes.data:Text"/>
- <datatype extension="ig" type="galaxy.datatypes.data:Text"/>
- <datatype extension="isochore" type="galaxy.datatypes.data:Text"/>
- <datatype extension="jackknifer" type="galaxy.datatypes.data:Text"/>
- <datatype extension="jackknifernon" type="galaxy.datatypes.data:Text"/>
- <datatype extension="markx10" type="galaxy.datatypes.data:Text"/>
- <datatype extension="markx1" type="galaxy.datatypes.data:Text"/>
- <datatype extension="markx0" type="galaxy.datatypes.data:Text"/>
- <datatype extension="markx3" type="galaxy.datatypes.data:Text"/>
- <datatype extension="markx2" type="galaxy.datatypes.data:Text"/>
- <datatype extension="match" type="galaxy.datatypes.data:Text"/>
- <datatype extension="mega" type="galaxy.datatypes.data:Text"/>
- <datatype extension="meganon" type="galaxy.datatypes.data:Text"/>
- <datatype extension="motif" type="galaxy.datatypes.data:Text"/>
- <datatype extension="msf" type="galaxy.datatypes.data:Text"/>
- <datatype extension="nametable" type="galaxy.datatypes.data:Text"/>
- <datatype extension="ncbi" type="galaxy.datatypes.data:Text"/>
- <datatype extension="needle" type="galaxy.datatypes.data:Text"/>
- <datatype extension="newcpgreport" type="galaxy.datatypes.data:Text"/>
- <datatype extension="newcpgseek" type="galaxy.datatypes.data:Text"/>
- <datatype extension="nexus" type="galaxy.datatypes.data:Text"/>
- <datatype extension="nexusnon" type="galaxy.datatypes.data:Text"/>
- <datatype extension="noreturn" type="galaxy.datatypes.data:Text"/>
- <datatype extension="pair" type="galaxy.datatypes.data:Text"/>
- <datatype extension="palindrome" type="galaxy.datatypes.data:Text"/>
- <datatype extension="pepcoil" type="galaxy.datatypes.data:Text"/>
- <datatype extension="pepinfo" type="galaxy.datatypes.data:Text"/>
- <datatype extension="pepstats" type="galaxy.datatypes.data:Text"/>
- <datatype extension="phylip" type="galaxy.datatypes.data:Text"/>
- <datatype extension="phylipnon" type="galaxy.datatypes.data:Text"/>
- <datatype extension="pir" type="galaxy.datatypes.data:Text"/>
- <datatype extension="polydot" type="galaxy.datatypes.data:Text"/>
- <datatype extension="preg" type="galaxy.datatypes.data:Text"/>
- <datatype extension="prettyseq" type="galaxy.datatypes.data:Text"/>
- <datatype extension="primersearch" type="galaxy.datatypes.data:Text"/>
- <datatype extension="regions" type="galaxy.datatypes.data:Text"/>
- <datatype extension="score" type="galaxy.datatypes.data:Text"/>
- <datatype extension="selex" type="galaxy.datatypes.data:Text"/>
- <datatype extension="seqtable" type="galaxy.datatypes.data:Text"/>
- <datatype extension="showfeat" type="galaxy.datatypes.data:Text"/>
- <datatype extension="showorf" type="galaxy.datatypes.data:Text"/>
- <datatype extension="simple" type="galaxy.datatypes.data:Text"/>
- <datatype extension="sixpack" type="galaxy.datatypes.data:Text"/>
- <datatype extension="srs" type="galaxy.datatypes.data:Text"/>
- <datatype extension="srspair" type="galaxy.datatypes.data:Text"/>
- <datatype extension="staden" type="galaxy.datatypes.data:Text"/>
- <datatype extension="strider" type="galaxy.datatypes.data:Text"/>
- <datatype extension="supermatcher" type="galaxy.datatypes.data:Text"/>
- <datatype extension="swiss" type="galaxy.datatypes.data:Text"/>
- <datatype extension="syco" type="galaxy.datatypes.data:Text"/>
- <datatype extension="table" type="galaxy.datatypes.data:Text"/>
- <datatype extension="textsearch" type="galaxy.datatypes.data:Text"/>
- <datatype extension="vectorstrip" type="galaxy.datatypes.data:Text"/>
- <datatype extension="wobble" type="galaxy.datatypes.data:Text"/>
- <datatype extension="wordcount" type="galaxy.datatypes.data:Text"/>
- <datatype extension="tagseq" type="galaxy.datatypes.data:Text"/>
- <!-- Start RGenetics Datatypes -->
- <!-- genome graphs ucsc file - first col is always marker then numeric values to plot -->
- <datatype extension="gg" type="galaxy.datatypes.genetics:GenomeGraphs"/>
- <datatype extension="rgenetics" type="galaxy.datatypes.genetics:Rgenetics"/>
- <!-- linkage format pedigree (separate .map file) -->
- <datatype extension="lped" type="galaxy.datatypes.genetics:Lped" display_in_upload="true"/>
- <!-- plink compressed file - has bed extension unfortunately -->
- <datatype extension="pbed" type="galaxy.datatypes.genetics:Pbed" display_in_upload="true"/>
- <!-- eigenstrat pedigree input file -->
- <datatype extension="eigenstratgeno" type="galaxy.datatypes.genetics:Eigenstratgeno"/>
- <!-- eigenstrat pca output file for adjusted eigenQTL eg -->
- <datatype extension="eigenstratpca" type="galaxy.datatypes.genetics:Eigenstratpca"/>
- <!-- fbat/pbat format pedigree (header row of marker names) -->
- <datatype extension="fped" type="galaxy.datatypes.genetics:Fped"/>
- <!-- part of linkage format pedigree -->
- <datatype extension="lmap" type="galaxy.datatypes.genetics:Lmap"/>
- <!-- phenotype file - fbat format -->
- <datatype extension="fphe" type="galaxy.datatypes.genetics:Fphe"/>
- <!-- phenotype file - plink format -->
- <datatype extension="pphe" type="galaxy.datatypes.genetics:Pphe"/>
- <datatype extension="snptest" type="galaxy.datatypes.genetics:Snptest"/>
- <datatype extension="snpmatrix" type="galaxy.datatypes.genetics:SNPMatrix"/>
- <datatype extension="xls" type="galaxy.datatypes.tabular:Tabular"/>
- <!-- End RGenetics Datatypes -->
- </registration>
- <sniffers>
- <!--
- The order in which Galaxy attempts to determine data types is
- important because some formats are much more loosely defined
- than others. The following list should be the most rigidly
- defined format first, followed by next-most rigidly defined,
- and so on.
- -->
- <sniffer type="galaxy.datatypes.binary:Sff"/>
- <sniffer type="galaxy.datatypes.xml:BlastXml"/>
- <sniffer type="galaxy.datatypes.sequence:Maf"/>
- <sniffer type="galaxy.datatypes.sequence:Lav"/>
- <sniffer type="galaxy.datatypes.sequence:csFasta"/>
- <sniffer type="galaxy.datatypes.qualityscore:QualityScoreSOLiD"/>
- <sniffer type="galaxy.datatypes.qualityscore:QualityScore454"/>
- <sniffer type="galaxy.datatypes.sequence:Fasta"/>
- <sniffer type="galaxy.datatypes.sequence:Fastq"/>
- <sniffer type="galaxy.datatypes.interval:Wiggle"/>
- <sniffer type="galaxy.datatypes.images:Html"/>
- <sniffer type="galaxy.datatypes.sequence:Axt"/>
- <sniffer type="galaxy.datatypes.interval:Bed"/>
- <sniffer type="galaxy.datatypes.interval:CustomTrack"/>
- <sniffer type="galaxy.datatypes.interval:Gff"/>
- <sniffer type="galaxy.datatypes.interval:Gff3"/>
- <sniffer type="galaxy.datatypes.interval:Interval"/>
- <sniffer type="galaxy.datatypes.tabular:Sam"/>
- </sniffers>
+ <registration converters_path="lib/galaxy/datatypes/converters">
+ <datatype extension="ab1" type="galaxy.datatypes.binary:Ab1" mimetype="application/octet-stream" display_in_upload="true"/>
+ <datatype extension="axt" type="galaxy.datatypes.sequence:Axt" display_in_upload="true"/>
+ <datatype extension="bam" type="galaxy.datatypes.binary:Bam" mimetype="application/octet-stream"/>
+ <datatype extension="bed" type="galaxy.datatypes.interval:Bed" display_in_upload="true">
+ <converter file="bed_to_gff_converter.xml" target_datatype="gff"/>
+ <converter file="interval_to_coverage.xml" target_datatype="coverage"/>
+ <converter file="bed_to_interval_index_converter.xml" target_datatype="interval_index"/>
+ </datatype>
+ <datatype extension="binseq.zip" type="galaxy.datatypes.binary:Binseq" mimetype="application/zip" display_in_upload="true"/>
+ <datatype extension="len" type="galaxy.datatypes.chrominfo:ChromInfo" display_in_upload="true">
+ <!-- no converters yet -->
+ </datatype>
+ <datatype extension="coverage" type="galaxy.datatypes.coverage:LastzCoverage" display_in_upload="true">
+ <indexer file="coverage.xml" />
+ </datatype>
+ <datatype extension="customtrack" type="galaxy.datatypes.interval:CustomTrack"/>
+ <datatype extension="csfasta" type="galaxy.datatypes.sequence:csFasta" display_in_upload="true"/>
+ <datatype extension="data" type="galaxy.datatypes.data:Data" mimetype="application/octet-stream"/>
+ <datatype extension="fasta" type="galaxy.datatypes.sequence:Fasta" display_in_upload="true">
+ <converter file="fasta_to_tabular_converter.xml" target_datatype="tabular"/>
+ </datatype>
+ <datatype extension="fastq" type="galaxy.datatypes.sequence:Fastq" display_in_upload="true"/>
+ <datatype extension="fastqsanger" type="galaxy.datatypes.sequence:FastqSanger" display_in_upload="true"/>
+ <datatype extension="genetrack" type="galaxy.datatypes.tracks:GeneTrack"/>
+ <datatype extension="gff" type="galaxy.datatypes.interval:Gff" display_in_upload="true">
+ <converter file="gff_to_bed_converter.xml" target_datatype="bed"/>
+ </datatype>
+ <datatype extension="gff3" type="galaxy.datatypes.interval:Gff3" display_in_upload="true"/>
+ <datatype extension="gif" type="galaxy.datatypes.images:Image" mimetype="image/gif"/>
+ <datatype extension="gmaj.zip" type="galaxy.datatypes.images:Gmaj" mimetype="application/zip"/>
+ <datatype extension="html" type="galaxy.datatypes.images:Html" mimetype="text/html"/>
+ <datatype extension="interval" type="galaxy.datatypes.interval:Interval" display_in_upload="true">
+ <converter file="interval_to_bed_converter.xml" target_datatype="bed"/>
+ <indexer file="interval_awk.xml" />
+ </datatype>
+ <datatype extension="jpg" type="galaxy.datatypes.images:Image" mimetype="image/jpeg"/>
+ <datatype extension="laj" type="galaxy.datatypes.images:Laj"/>
+ <datatype extension="lav" type="galaxy.datatypes.sequence:Lav" display_in_upload="true"/>
+ <datatype extension="maf" type="galaxy.datatypes.sequence:Maf" display_in_upload="true">
+ <converter file="maf_to_fasta_converter.xml" target_datatype="fasta"/>
+ <converter file="maf_to_interval_converter.xml" target_datatype="interval"/>
+ </datatype>
+ <datatype extension="pdf" type="galaxy.datatypes.images:Image" mimetype="application/pdf"/>
+ <datatype extension="png" type="galaxy.datatypes.images:Image" mimetype="image/png"/>
+ <datatype extension="qualsolexa" type="galaxy.datatypes.qualityscore:QualityScoreSolexa" display_in_upload="true"/>
+ <datatype extension="qualsolid" type="galaxy.datatypes.qualityscore:QualityScoreSOLiD" display_in_upload="true"/>
+ <datatype extension="qual454" type="galaxy.datatypes.qualityscore:QualityScore454" display_in_upload="true"/>
+ <datatype extension="sam" type="galaxy.datatypes.tabular:Sam" display_in_upload="true"/>
+ <datatype extension="scf" type="galaxy.datatypes.binary:Scf" mimetype="application/octet-stream" display_in_upload="true"/>
+ <datatype extension="sff" type="galaxy.datatypes.binary:Sff" mimetype="application/octet-stream" display_in_upload="true"/>
+ <datatype extension="taxonomy" type="galaxy.datatypes.tabular:Taxonomy" display_in_upload="true"/>
+ <datatype extension="tabular" type="galaxy.datatypes.tabular:Tabular" display_in_upload="true"/>
+ <datatype extension="txt" type="galaxy.datatypes.data:Text" display_in_upload="true"/>
+ <datatype extension="blastxml" type="galaxy.datatypes.xml:BlastXml" display_in_upload="true"/>
+ <datatype extension="txtseq.zip" type="galaxy.datatypes.data:Txtseq" mimetype="application/zip" display_in_upload="true"/>
+ <datatype extension="wig" type="galaxy.datatypes.interval:Wiggle" display_in_upload="true">
+ <converter file="wiggle_to_array_tree_converter.xml" target_datatype="array_tree"/>
+ </datatype>
+ <datatype extension="array_tree" type="galaxy.datatypes.data:Data" />
+ <datatype extension="interval_index" type="galaxy.datatypes.data:Data" />
+ <!-- Start EMBOSS tools -->
+ <datatype extension="acedb" type="galaxy.datatypes.data:Text"/>
+ <datatype extension="asn1" type="galaxy.datatypes.data:Text"/>
+ <datatype extension="btwisted" type="galaxy.datatypes.data:Text"/>
+ <datatype extension="cai" type="galaxy.datatypes.data:Text"/>
+ <datatype extension="charge" type="galaxy.datatypes.data:Text"/>
+ <datatype extension="checktrans" type="galaxy.datatypes.data:Text"/>
+ <datatype extension="chips" type="galaxy.datatypes.data:Text"/>
+ <datatype extension="clustal" type="galaxy.datatypes.data:Text"/>
+ <datatype extension="codata" type="galaxy.datatypes.data:Text"/>
+ <datatype extension="codcmp" type="galaxy.datatypes.data:Text"/>
+ <datatype extension="coderet" type="galaxy.datatypes.data:Text"/>
+ <datatype extension="compseq" type="galaxy.datatypes.data:Text"/>
+ <datatype extension="cpgplot" type="galaxy.datatypes.data:Text"/>
+ <datatype extension="cpgreport" type="galaxy.datatypes.data:Text"/>
+ <datatype extension="cusp" type="galaxy.datatypes.data:Text"/>
+ <datatype extension="cut" type="galaxy.datatypes.data:Text"/>
+ <datatype extension="dan" type="galaxy.datatypes.data:Text"/>
+ <datatype extension="dbmotif" type="galaxy.datatypes.data:Text"/>
+ <datatype extension="diffseq" type="galaxy.datatypes.data:Text"/>
+ <datatype extension="digest" type="galaxy.datatypes.data:Text"/>
+ <datatype extension="dreg" type="galaxy.datatypes.data:Text"/>
+ <datatype extension="einverted" type="galaxy.datatypes.data:Text"/>
+ <datatype extension="embl" type="galaxy.datatypes.data:Text"/>
+ <datatype extension="epestfind" type="galaxy.datatypes.data:Text"/>
+ <datatype extension="equicktandem" type="galaxy.datatypes.data:Text"/>
+ <datatype extension="est2genome" type="galaxy.datatypes.data:Text"/>
+ <datatype extension="etandem" type="galaxy.datatypes.data:Text"/>
+ <datatype extension="excel" type="galaxy.datatypes.data:Text"/>
+ <datatype extension="feattable" type="galaxy.datatypes.data:Text"/>
+ <datatype extension="fitch" type="galaxy.datatypes.data:Text"/>
+ <datatype extension="freak" type="galaxy.datatypes.data:Text"/>
+ <datatype extension="fuzznuc" type="galaxy.datatypes.data:Text"/>
+ <datatype extension="fuzzpro" type="galaxy.datatypes.data:Text"/>
+ <datatype extension="fuzztran" type="galaxy.datatypes.data:Text"/>
+ <datatype extension="garnier" type="galaxy.datatypes.data:Text"/>
+ <datatype extension="gcg" type="galaxy.datatypes.data:Text"/>
+ <datatype extension="geecee" type="galaxy.datatypes.data:Text"/>
+ <datatype extension="genbank" type="galaxy.datatypes.data:Text"/>
+ <datatype extension="helixturnhelix" type="galaxy.datatypes.data:Text"/>
+ <datatype extension="hennig86" type="galaxy.datatypes.data:Text"/>
+ <datatype extension="hmoment" type="galaxy.datatypes.data:Text"/>
+ <datatype extension="ig" type="galaxy.datatypes.data:Text"/>
+ <datatype extension="isochore" type="galaxy.datatypes.data:Text"/>
+ <datatype extension="jackknifer" type="galaxy.datatypes.data:Text"/>
+ <datatype extension="jackknifernon" type="galaxy.datatypes.data:Text"/>
+ <datatype extension="markx10" type="galaxy.datatypes.data:Text"/>
+ <datatype extension="markx1" type="galaxy.datatypes.data:Text"/>
+ <datatype extension="markx0" type="galaxy.datatypes.data:Text"/>
+ <datatype extension="markx3" type="galaxy.datatypes.data:Text"/>
+ <datatype extension="markx2" type="galaxy.datatypes.data:Text"/>
+ <datatype extension="match" type="galaxy.datatypes.data:Text"/>
+ <datatype extension="mega" type="galaxy.datatypes.data:Text"/>
+ <datatype extension="meganon" type="galaxy.datatypes.data:Text"/>
+ <datatype extension="motif" type="galaxy.datatypes.data:Text"/>
+ <datatype extension="msf" type="galaxy.datatypes.data:Text"/>
+ <datatype extension="nametable" type="galaxy.datatypes.data:Text"/>
+ <datatype extension="ncbi" type="galaxy.datatypes.data:Text"/>
+ <datatype extension="needle" type="galaxy.datatypes.data:Text"/>
+ <datatype extension="newcpgreport" type="galaxy.datatypes.data:Text"/>
+ <datatype extension="newcpgseek" type="galaxy.datatypes.data:Text"/>
+ <datatype extension="nexus" type="galaxy.datatypes.data:Text"/>
+ <datatype extension="nexusnon" type="galaxy.datatypes.data:Text"/>
+ <datatype extension="noreturn" type="galaxy.datatypes.data:Text"/>
+ <datatype extension="pair" type="galaxy.datatypes.data:Text"/>
+ <datatype extension="palindrome" type="galaxy.datatypes.data:Text"/>
+ <datatype extension="pepcoil" type="galaxy.datatypes.data:Text"/>
+ <datatype extension="pepinfo" type="galaxy.datatypes.data:Text"/>
+ <datatype extension="pepstats" type="galaxy.datatypes.data:Text"/>
+ <datatype extension="phylip" type="galaxy.datatypes.data:Text"/>
+ <datatype extension="phylipnon" type="galaxy.datatypes.data:Text"/>
+ <datatype extension="pir" type="galaxy.datatypes.data:Text"/>
+ <datatype extension="polydot" type="galaxy.datatypes.data:Text"/>
+ <datatype extension="preg" type="galaxy.datatypes.data:Text"/>
+ <datatype extension="prettyseq" type="galaxy.datatypes.data:Text"/>
+ <datatype extension="primersearch" type="galaxy.datatypes.data:Text"/>
+ <datatype extension="regions" type="galaxy.datatypes.data:Text"/>
+ <datatype extension="score" type="galaxy.datatypes.data:Text"/>
+ <datatype extension="selex" type="galaxy.datatypes.data:Text"/>
+ <datatype extension="seqtable" type="galaxy.datatypes.data:Text"/>
+ <datatype extension="showfeat" type="galaxy.datatypes.data:Text"/>
+ <datatype extension="showorf" type="galaxy.datatypes.data:Text"/>
+ <datatype extension="simple" type="galaxy.datatypes.data:Text"/>
+ <datatype extension="sixpack" type="galaxy.datatypes.data:Text"/>
+ <datatype extension="srs" type="galaxy.datatypes.data:Text"/>
+ <datatype extension="srspair" type="galaxy.datatypes.data:Text"/>
+ <datatype extension="staden" type="galaxy.datatypes.data:Text"/>
+ <datatype extension="strider" type="galaxy.datatypes.data:Text"/>
+ <datatype extension="supermatcher" type="galaxy.datatypes.data:Text"/>
+ <datatype extension="swiss" type="galaxy.datatypes.data:Text"/>
+ <datatype extension="syco" type="galaxy.datatypes.data:Text"/>
+ <datatype extension="table" type="galaxy.datatypes.data:Text"/>
+ <datatype extension="textsearch" type="galaxy.datatypes.data:Text"/>
+ <datatype extension="vectorstrip" type="galaxy.datatypes.data:Text"/>
+ <datatype extension="wobble" type="galaxy.datatypes.data:Text"/>
+ <datatype extension="wordcount" type="galaxy.datatypes.data:Text"/>
+ <datatype extension="tagseq" type="galaxy.datatypes.data:Text"/>
+ <!-- End EMBOSS tools -->
+ <!-- Start RGenetics Datatypes -->
+ <datatype extension="affybatch" type="galaxy.datatypes.genetics:Affybatch" display_in_upload="true"/>
+ <!-- eigenstrat pedigree input file -->
+ <datatype extension="eigenstratgeno" type="galaxy.datatypes.genetics:Eigenstratgeno"/>
+ <!-- eigenstrat pca output file for adjusted eigenQTL eg -->
+ <datatype extension="eigenstratpca" type="galaxy.datatypes.genetics:Eigenstratpca"/>
+ <datatype extension="eset" type="galaxy.datatypes.genetics:Eset" display_in_upload="true" />
+ <!-- fbat/pbat format pedigree (header row of marker names) -->
+ <datatype extension="fped" type="galaxy.datatypes.genetics:Fped" display_in_upload="true"/>
+ <!-- phenotype file - fbat format -->
+ <datatype extension="fphe" type="galaxy.datatypes.genetics:Fphe" display_in_upload="true" mimetype="text/html"/>
+ <!-- genome graphs ucsc file - first col is always marker then numeric values to plot -->
+ <datatype extension="gg" type="galaxy.datatypes.genetics:GenomeGraphs"/>
+ <!-- part of linkage format pedigree -->
+ <datatype extension="lmap" type="galaxy.datatypes.genetics:Lmap" display_in_upload="true"/>
+ <datatype extension="malist" type="galaxy.datatypes.genetics:MAlist" display_in_upload="true"/>
+ <!-- linkage format pedigree (separate .map file) -->
+ <datatype extension="lped" type="galaxy.datatypes.genetics:Lped" display_in_upload="true">
+ <converter file="lped_to_fped_converter.xml" target_datatype="fped"/>
+ <converter file="lped_to_pbed_converter.xml" target_datatype="pbed"/>
+ </datatype>
+ <!-- plink compressed file - has bed extension unfortunately -->
+ <datatype extension="pbed" type="galaxy.datatypes.genetics:Pbed" display_in_upload="true">
+ <converter file="pbed_to_lped_converter.xml" target_datatype="lped"/>
+ </datatype>
+ <datatype extension="pheno" type="galaxy.datatypes.genetics:Pheno"/>
+ <!-- phenotype file - plink format -->
+ <datatype extension="pphe" type="galaxy.datatypes.genetics:Pphe" display_in_upload="true" mimetype="text/html"/>
+ <datatype extension="rexpbase" type="galaxy.datatypes.genetics:RexpBase"/>
+ <datatype extension="rgenetics" type="galaxy.datatypes.genetics:Rgenetics"/>
+ <datatype extension="snptest" type="galaxy.datatypes.genetics:Snptest" display_in_upload="true"/>
+ <datatype extension="snpmatrix" type="galaxy.datatypes.genetics:SNPMatrix" display_in_upload="true"/>
+ <datatype extension="xls" type="galaxy.datatypes.tabular:Tabular"/>
+ <!-- End RGenetics Datatypes -->
+ </registration>
+ <sniffers>
+ <!--
+ The order in which Galaxy attempts to determine data types is
+ important because some formats are much more loosely defined
+ than others. The following list should be the most rigidly
+ defined format first, followed by next-most rigidly defined,
+ and so on.
+ -->
+ <sniffer type="galaxy.datatypes.binary:Sff"/>
+ <sniffer type="galaxy.datatypes.xml:BlastXml"/>
+ <sniffer type="galaxy.datatypes.sequence:Maf"/>
+ <sniffer type="galaxy.datatypes.sequence:Lav"/>
+ <sniffer type="galaxy.datatypes.sequence:csFasta"/>
+ <sniffer type="galaxy.datatypes.qualityscore:QualityScoreSOLiD"/>
+ <sniffer type="galaxy.datatypes.qualityscore:QualityScore454"/>
+ <sniffer type="galaxy.datatypes.sequence:Fasta"/>
+ <sniffer type="galaxy.datatypes.sequence:Fastq"/>
+ <sniffer type="galaxy.datatypes.interval:Wiggle"/>
+ <sniffer type="galaxy.datatypes.images:Html"/>
+ <sniffer type="galaxy.datatypes.sequence:Axt"/>
+ <sniffer type="galaxy.datatypes.interval:Bed"/>
+ <sniffer type="galaxy.datatypes.interval:CustomTrack"/>
+ <sniffer type="galaxy.datatypes.interval:Gff"/>
+ <sniffer type="galaxy.datatypes.interval:Gff3"/>
+ <sniffer type="galaxy.datatypes.interval:Interval"/>
+ <sniffer type="galaxy.datatypes.tabular:Sam"/>
+ </sniffers>
</datatypes>
diff -r a0880f1b4297 -r 0ef5adc72242 lib/galaxy/datatypes/genetics.py
--- a/lib/galaxy/datatypes/genetics.py Fri Nov 13 15:28:22 2009 -0500
+++ b/lib/galaxy/datatypes/genetics.py Fri Nov 13 16:24:00 2009 -0500
@@ -1,6 +1,5 @@
"""
rgenetics datatypes
-Use at your peril
Ross Lazarus
for the rgenetics and galaxy projects
@@ -11,7 +10,6 @@
ross lazarus for rgenetics
august 20 2007
"""
-
import logging, os, sys, time, tempfile, shutil, string, glob
import data
from galaxy import util
@@ -26,8 +24,7 @@
from galaxy.datatypes.interval import Interval
from galaxy.util.hash_util import *
-gal_Log = logging.getLogger(__name__)
-verbose = False
+log = logging.getLogger(__name__)
class GenomeGraphs(Interval):
@@ -154,7 +151,7 @@
link = '%s?redirect_url=%s&display_url=%s' % ( internal_url, redirect_url, display_url )
ret_val.append( (site_name, link) )
else:
- gal_Log.debug('@@@ gg ucsc_links - no viewport_tuple')
+ log.debug('@@@ gg ucsc_links - no viewport_tuple')
return ret_val
def sniff( self, filename ):
"""
@@ -195,7 +192,8 @@
return False
class rgTabList(Tabular):
- """ for sampleid and for featureid lists of exclusions or inclusions in the clean tool
+ """
+ for sampleid and for featureid lists of exclusions or inclusions in the clean tool
featureid subsets on statistical criteria -> specialized display such as gg
"""
file_ext = "rgTList"
@@ -225,16 +223,19 @@
return out
class rgSampleList(rgTabList):
- """ for sampleid exclusions or inclusions in the clean tool
- output from QC eg excess het, gender error, ibd pair member,eigen outlier,excess mendel errors,...
- since they can be uploaded, should be flexible
- but they are persistent at least
- same infrastructure for expression?
+ """
+ for sampleid exclusions or inclusions in the clean tool
+ output from QC eg excess het, gender error, ibd pair member,eigen outlier,excess mendel errors,...
+ since they can be uploaded, should be flexible
+ but they are persistent at least
+ same infrastructure for expression?
"""
file_ext = "rgSList"
def __init__(self, **kwd):
- """Initialize samplelist datatype"""
+ """
+ Initialize samplelist datatype
+ """
rgTabList.__init__( self, **kwd )
self.column_names[0] = 'FID'
self.column_names[1] = 'IID'
@@ -250,10 +251,11 @@
return False
class rgFeatureList( rgTabList ):
- """ for featureid lists of exclusions or inclusions in the clean tool
- output from QC eg low maf, high missingness, bad hwe in controls, excess mendel errors,...
- featureid subsets on statistical criteria -> specialized display such as gg
- same infrastructure for expression?
+ """
+ for featureid lists of exclusions or inclusions in the clean tool
+ output from QC eg low maf, high missingness, bad hwe in controls, excess mendel errors,...
+ featureid subsets on statistical criteria -> specialized display such as gg
+ same infrastructure for expression?
"""
file_ext = "rgFList"
@@ -264,8 +266,9 @@
self.column_names[i] = s
class Rgenetics(Html):
- """class to use for rgenetics"""
-
+ """
+ class to use for rgenetics
+ """
MetadataElement( name="base_name", desc="base name for all transformed versions of this genetic dataset", default="rgenetics", readonly=True, set_in_upload=True)
composite_type = 'auto_primary_file'
@@ -289,7 +292,8 @@
rval.append( '</ul></div></html>' )
return "\n".join( rval )
def regenerate_primary_file(self,dataset):
- """cannot do this until we are setting metadata
+ """
+ cannot do this until we are setting metadata
"""
def fix(oldpath,newbase):
old,e = os.path.splitext(oldpath)
@@ -314,26 +318,24 @@
f.write('\n')
f.close()
def set_meta( self, dataset, **kwd ):
- """for lped/pbed eg"""
+ """
+ for lped/pbed eg
+ """
if kwd.get('overwrite') == False:
- if verbose:
- gal_Log.debug('@@@ rgenetics set_meta called with overwrite = False')
+ #log.debug('@@@ rgenetics set_meta called with overwrite = False')
return True
try:
efp = dataset.extra_files_path
except:
- if verbose:
- gal_Log.debug('@@@rgenetics set_meta failed %s - dataset %s has no efp ?' % (sys.exc_info()[0], dataset.name))
+ #log.debug('@@@rgenetics set_meta failed %s - dataset %s has no efp ?' % (sys.exc_info()[0], dataset.name))
return False
try:
flist = os.listdir(efp)
except:
- if verbose:
- gal_Log.debug('@@@rgenetics set_meta failed %s - dataset %s has no efp ?' % (sys.exc_info()[0],dataset.name))
+ #log.debug('@@@rgenetics set_meta failed %s - dataset %s has no efp ?' % (sys.exc_info()[0],dataset.name))
return False
if len(flist) == 0:
- if verbose:
- gal_Log.debug('@@@rgenetics set_meta failed - %s efp %s is empty?' % (dataset.name,efp))
+ #log.debug('@@@rgenetics set_meta failed - %s efp %s is empty?' % (dataset.name,efp))
return False
bn = None
for f in flist:
@@ -351,7 +353,8 @@
return True
class SNPMatrix(Rgenetics):
- """fake class to distinguish different species of Rgenetics data collections
+ """
+ fake class to distinguish different species of Rgenetics data collections
"""
file_ext="snpmatrix"
@@ -363,7 +366,8 @@
dataset.peek = 'file does not exist'
dataset.blurb = 'file purged from disk'
def sniff(self,filename):
- """ need to check the file header hex code
+ """
+ need to check the file header hex code
"""
infile = open(dataset.file_name, "b")
head = infile.read(16)
@@ -374,7 +378,8 @@
return True
class Lped(Rgenetics):
- """fake class to distinguish different species of Rgenetics data collections
+ """
+ fake class to distinguish different species of Rgenetics data collections
"""
file_ext="lped"
@@ -384,7 +389,8 @@
self.add_composite_file( '%s.map', description = 'Map File', substitute_name_with_metadata = 'base_name', is_binary = True )
class Pphe(Rgenetics):
- """fake class to distinguish different species of Rgenetics data collections
+ """
+ fake class to distinguish different species of Rgenetics data collections
"""
file_ext="pphe"
@@ -393,12 +399,14 @@
self.add_composite_file( '%s.pphe', description = 'Plink Phenotype File', substitute_name_with_metadata = 'base_name' )
class Lmap(Rgenetics):
- """fake class to distinguish different species of Rgenetics data collections
+ """
+ fake class to distinguish different species of Rgenetics data collections
"""
file_ext="lmap"
class Fphe(Rgenetics):
- """fake class to distinguish different species of Rgenetics data collections
+ """
+ fake class to distinguish different species of Rgenetics data collections
"""
file_ext="fphe"
@@ -407,7 +415,8 @@
self.add_composite_file( '%s.fphe', description = 'FBAT Phenotype File', substitute_name_with_metadata = 'base_name' )
class Phe(Rgenetics):
- """fake class to distinguish different species of Rgenetics data collections
+ """
+ fake class to distinguish different species of Rgenetics data collections
"""
file_ext="phe"
@@ -416,7 +425,8 @@
self.add_composite_file( '%s.phe', description = 'Phenotype File', substitute_name_with_metadata = 'base_name' )
class Fped(Rgenetics):
- """fake class to distinguish different species of Rgenetics data collections
+ """
+ fake class to distinguish different species of Rgenetics data collections
"""
file_ext="fped"
@@ -425,7 +435,8 @@
self.add_composite_file( '%s.fped', description = 'FBAT format pedfile', substitute_name_with_metadata = 'base_name' )
class Pbed(Rgenetics):
- """fake class to distinguish different species of Rgenetics data collections
+ """
+ fake class to distinguish different species of Rgenetics data collections
"""
file_ext="pbed"
@@ -436,7 +447,8 @@
self.add_composite_file( '%s.fam', substitute_name_with_metadata = 'base_name', is_binary = True )
class Eigenstratgeno(Rgenetics):
- """fake class to distinguish different species of Rgenetics data collections
+ """
+ fake class to distinguish different species of Rgenetics data collections
"""
file_ext="eigenstratgeno"
@@ -447,7 +459,8 @@
self.add_composite_file( '%s.map', substitute_name_with_metadata = 'base_name', is_binary = True )
class Eigenstratpca(Rgenetics):
- """fake class to distinguish different species of Rgenetics data collections
+ """
+ fake class to distinguish different species of Rgenetics data collections
"""
file_ext="eigenstratpca"
@@ -456,7 +469,8 @@
self.add_composite_file( '%s.eigenstratpca', description = 'Eigenstrat PCA file', substitute_name_with_metadata = 'base_name' )
class Snptest(Rgenetics):
- """fake class to distinguish different species of Rgenetics data collections
+ """
+ fake class to distinguish different species of Rgenetics data collections
"""
file_ext="snptest"
@@ -467,7 +481,8 @@
file_ext = 'pheno'
class RexpBase( Html ):
- """base class for BioC data structures in Galaxy
+ """
+ base class for BioC data structures in Galaxy
must be constructed with the pheno data in place since that
goes into the metadata for each instance
"""
@@ -488,7 +503,8 @@
self.add_composite_file( '%s.pheno', description = 'Phenodata tab text file',
substitute_name_with_metadata = 'base_name', is_binary=True)
def generate_primary_file( self, dataset = None ):
- """ This is called only at upload to write the html file
+ """
+ This is called only at upload to write the html file
cannot rename the datasets here - they come with the default unfortunately
"""
return '<html><head></head><body>AutoGenerated Primary File for Composite Dataset</body></html>'
@@ -517,7 +533,7 @@
else:
for col,code in enumerate(row): # keep column order correct
if col >= totcols:
- gal_Log.warning('### get_phecols error in pheno file - row %d col %d (%s) longer than header %s' % (nrows, col, row, head))
+ log.warning('### get_phecols error in pheno file - row %d col %d (%s) longer than header %s' % (nrows, col, row, head))
else:
concordance[col].setdefault(code,0) # first one is zero
concordance[col][code] += 1
@@ -564,7 +580,8 @@
return res
def get_pheno(self,dataset):
- """expects a .pheno file in the extra_files_dir - ugh
+ """
+ expects a .pheno file in the extra_files_dir - ugh
note that R is wierd and adds the row.name in
the header so the columns are all wrong - unless you tell it not to.
A file can be written as
@@ -581,9 +598,11 @@
p = []
return '\n'.join(p)
def set_peek( self, dataset ):
- """expects a .pheno file in the extra_files_dir - ugh
+ """
+ expects a .pheno file in the extra_files_dir - ugh
note that R is wierd and does not include the row.name in
- the header. why?"""
+ the header. why?
+ """
if not dataset.dataset.purged:
pp = os.path.join(dataset.extra_files_path,'%s.pheno' % dataset.metadata.base_name)
try:
@@ -596,8 +615,7 @@
dataset.peek = 'file does not exist\n'
dataset.blurb = 'file purged from disk'
def get_peek( self, dataset ):
- """expects a .pheno file in the extra_files_dir - ugh
- """
+ """expects a .pheno file in the extra_files_dir - ugh"""
pp = os.path.join(dataset.extra_files_path,'%s.pheno' % dataset.metadata.base_name)
try:
p = file(pp,'r').readlines()
@@ -640,8 +658,7 @@
try:
flist = os.listdir(dataset.extra_files_path)
except:
- if verbose:
- gal_Log.debug('@@@rexpression set_meta failed - no dataset?')
+ #log.debug('@@@rexpression set_meta failed - no dataset?')
return False
bn = None
for f in flist:
@@ -711,7 +728,6 @@
class Affybatch( RexpBase ):
"""derived class for BioC data structures in Galaxy """
-
file_ext = "affybatch"
def __init__( self, **kwd ):
@@ -728,7 +744,6 @@
self.add_composite_file( '%s.eset', description = 'ESet R object saved to file',
substitute_name_with_metadata = 'base_name', is_binary = True )
-
class MAlist( RexpBase ):
"""derived class for BioC data structures in Galaxy """
file_ext = "malist"
diff -r a0880f1b4297 -r 0ef5adc72242 tools/data_source/upload.xml
--- a/tools/data_source/upload.xml Fri Nov 13 15:28:22 2009 -0500
+++ b/tools/data_source/upload.xml Fri Nov 13 16:24:00 2009 -0500
@@ -181,6 +181,12 @@
-----
+**Sff**
+
+A binary file in 'Standard Flowgram Format' with a '.sff' file extension.
+
+-----
+
**Tabular (tab delimited)**
Any data in tab delimited format (tabular)
1
0
16 Nov '09
details: http://www.bx.psu.edu/hg/galaxy/rev/0e9cd7cfbc04
changeset: 3023:0e9cd7cfbc04
user: Nate Coraor <nate(a)bx.psu.edu>
date: Fri Nov 13 10:09:42 2009 -0500
description:
Upgrade to an SVN checkout of SQLAlchemy 0.5.x to resolve a race condition in 0.5.6 proper
diffstat:
eggs.ini | 3 ++-
1 files changed, 2 insertions(+), 1 deletions(-)
diffs (20 lines):
diff -r 0984c3800775 -r 0e9cd7cfbc04 eggs.ini
--- a/eggs.ini Thu Nov 12 15:25:48 2009 -0500
+++ b/eggs.ini Fri Nov 13 10:09:42 2009 -0500
@@ -61,6 +61,7 @@
python_lzo = _static
bx_python = _dev_r4bf1f32e6b76
GeneTrack = _dev_raa786e9fc131d998e532a1aef39d108850c9e93d
+SQLAlchemy = _dev_r6498
; nose = .dev_r7156749efc58
; source location, necessary for scrambling
@@ -92,7 +93,7 @@
PSI = http://pypi.python.org/packages/source/P/PSI/PSI-0.3b1.1.tar.gz
Routes = http://pypi.python.org/packages/source/R/Routes/Routes-1.11.tar.gz
simplejson = http://cheeseshop.python.org/packages/source/s/simplejson/simplejson-1.5.ta…
-SQLAlchemy = http://pypi.python.org/packages/source/S/SQLAlchemy/SQLAlchemy-0.5.6.tar.gz
+SQLAlchemy = http://dist.g2.bx.psu.edu/SQLAlchemy-0.5.6_r6498.tar.bz2
sqlalchemy_migrate = http://pypi.python.org/packages/source/s/sqlalchemy-migrate/sqlalchemy-migr…
Tempita = http://pypi.python.org/packages/source/T/Tempita/Tempita-0.1.tar.gz
twill = http://darcs.idyll.org/~t/projects/twill-0.9.tar.gz
1
0
details: http://www.bx.psu.edu/hg/galaxy/rev/4f9b630f9976
changeset: 3024:4f9b630f9976
user: Nate Coraor <nate(a)bx.psu.edu>
date: Fri Nov 13 10:39:15 2009 -0500
description:
Officially support Python 2.6.
diffstat:
scripts/check_python.py | 19 ++++++++-----------
1 files changed, 8 insertions(+), 11 deletions(-)
diffs (31 lines):
diff -r 0e9cd7cfbc04 -r 4f9b630f9976 scripts/check_python.py
--- a/scripts/check_python.py Fri Nov 13 10:09:42 2009 -0500
+++ b/scripts/check_python.py Fri Nov 13 10:39:15 2009 -0500
@@ -1,19 +1,16 @@
import os, sys
+msg = """ERROR: Your Python version is: %s
+Galaxy is currently supported on Python 2.4, 2.5 and 2.6. To run Galaxy,
+please download and install a supported version from python.org. If a
+supported version is installed but is not your default, getgalaxy.org
+contains instructions on how to force Galaxy to use a different version.""" % sys.version[:3]
+
def check_python():
- return
try:
- assert sys.version_info[:2] >= ( 2, 4 ) and sys.version_info[:2] <= ( 2, 5 )
+ assert sys.version_info[:2] >= ( 2, 4 ) and sys.version_info[:2] <= ( 2, 6 )
except AssertionError:
- print >>sys.stderr, "ERROR: Your Python version is:", sys.version.split( ' ', 1 )[0]
- print >>sys.stderr, "Galaxy is currently only supported on Python 2.4 and Python 2.5."
- if sys.version_info[:2] < ( 2, 4 ):
- print >>sys.stderr, "To run Galaxy, please download and install Python 2.5 from http://python.org"
- else:
- print >>sys.stderr, "To track the progress of Python 2.6 support, please see:"
- print >>sys.stderr, " http://bitbucket.org/galaxy/galaxy-central/issue/76/support-python-26"
- print >>sys.stderr, "For hints on how to direct Galaxy to use a different python installation, see:"
- print >>sys.stderr, " http://bitbucket.org/galaxy/galaxy-central/wiki/GetGalaxy"
+ print >>sys.stderr, msg
raise
if __name__ == '__main__':
1
0
16 Nov '09
details: http://www.bx.psu.edu/hg/galaxy/rev/0984c3800775
changeset: 3022:0984c3800775
user: Greg Von Kuster <greg(a)bx.psu.edu>
date: Thu Nov 12 15:25:48 2009 -0500
description:
Eliminate the _monkeypatch_session_method from assignmapper by cleaning up remaining object flushes. The _monkeypatch_query_method remains due to a single object query in ~/datatypes/metadata.py in the FileParameter.wrap() method. The sqlalchemy session is now also passed into the __init__ methods for both HistoryDatasetAssociation and LibraryDatasetDatasetAssociation when the create_dataset param is True to enable methods in the DatasetInstance class to correctly add the created dataset to the sqlalchemy session and flush it.
diffstat:
lib/galaxy/datatypes/metadata.py | 25 +++++++-----
lib/galaxy/model/__init__.py | 65 +++++++++++++++++++++-----------
lib/galaxy/model/mapping_tests.py | 5 +-
lib/galaxy/model/orm/ext/assignmapper.py | 18 +--------
lib/galaxy/tools/__init__.py | 15 ++++++-
lib/galaxy/tools/actions/__init__.py | 2 +-
lib/galaxy/tools/actions/upload_common.py | 8 ++-
lib/galaxy/tools/parameters/basic.py | 2 +-
lib/galaxy/web/controllers/async.py | 2 +-
lib/galaxy/web/controllers/requests.py | 3 +-
lib/galaxy/web/controllers/root.py | 7 +++-
tools/data_source/microbial_import_code.py | 2 +-
tools/maf/maf_to_bed_code.py | 2 +-
13 files changed, 94 insertions(+), 62 deletions(-)
diffs (505 lines):
diff -r 8bc85721cbce -r 0984c3800775 lib/galaxy/datatypes/metadata.py
--- a/lib/galaxy/datatypes/metadata.py Thu Nov 12 13:20:19 2009 -0500
+++ b/lib/galaxy/datatypes/metadata.py Thu Nov 12 15:25:48 2009 -0500
@@ -4,6 +4,7 @@
from galaxy.util.odict import odict
from galaxy.web import form_builder
import galaxy.model
+from sqlalchemy.orm import object_session
import pkg_resources
pkg_resources.require("simplejson")
@@ -298,7 +299,6 @@
if not isinstance( value, list ): return [value]
return value
-
class DBKeyParameter( SelectParameter ):
def get_html_field( self, value=None, context={}, other_values={}, values=None, **kwd):
try:
@@ -387,26 +387,28 @@
return "<div>No display available for Metadata Files</div>"
def wrap( self, value ):
+ if value is None:
+ return None
if isinstance( value, galaxy.model.MetadataFile ) or isinstance( value, MetadataTempFile ):
return value
if DATABASE_CONNECTION_AVAILABLE:
try:
- # FIXME: GVK ( 11/11/09 ) had to add the monkey patch back into assignmapper for the get
- # method for this since Metadata has no hook into mapping.context ( the salalchemy session ).
+ # FIXME: this query requires a monkey patch in assignmapper.py since
+ # MetadataParameters do not have a handle to the sqlalchemy session
return galaxy.model.MetadataFile.get( value )
except:
#value was not a valid id
return None
- elif value is not None:
+ else:
mf = galaxy.model.MetadataFile()
mf.id = value #we assume this is a valid id, since we cannot check it
return mf
- return None
- def make_copy( self, value, target_context = None, source_context = None ):
+ def make_copy( self, value, target_context, source_context ):
value = self.wrap( value )
if value:
new_value = galaxy.model.MetadataFile( dataset = target_context.parent, name = self.spec.name )
- new_value.flush()
+ object_session( target_context.parent ).add( new_value )
+ object_session( target_context.parent ).flush()
shutil.copy( value.file_name, new_value.file_name )
return self.unwrap( new_value )
return None
@@ -441,7 +443,8 @@
def new_file( self, dataset = None, **kwds ):
if DATABASE_CONNECTION_AVAILABLE:
mf = galaxy.model.MetadataFile( name = self.spec.name, dataset = dataset, **kwds )
- mf.flush() #flush to assign id
+ object_session( dataset ).add( mf )
+ object_session( dataset ).flush() #flush to assign id
return mf
else:
#we need to make a tmp file that is accessable to the head node,
@@ -557,7 +560,8 @@
#file to store kwds passed to set_meta()
metadata_files.filename_kwds = relpath( tempfile.NamedTemporaryFile( dir = tmp_dir, prefix = "metadata_kwds_%s_" % key ).name )
simplejson.dump( kwds, open( metadata_files.filename_kwds, 'wb+' ), ensure_ascii=True )
- metadata_files.flush()
+ sa_session.add( metadata_files )
+ sa_session.flush()
metadata_files_list.append( metadata_files )
#return command required to build
return "%s %s %s %s %s %s" % ( os.path.join( exec_dir, 'set_metadata.sh' ), dataset_files_path, tmp_dir, config_root, datatypes_config, " ".join( map( __metadata_files_list_to_cmd_line, metadata_files_list ) ) )
@@ -586,4 +590,5 @@
def set_job_runner_external_pid( self, pid, sa_session ):
for metadata_files in sa_session.query( galaxy.model.Job ).get( self.job_id ).external_output_metadata:
metadata_files.job_runner_external_pid = pid
- metadata_files.flush()
+ sa_session.add( metadata_files )
+ sa_session.flush()
diff -r 8bc85721cbce -r 0984c3800775 lib/galaxy/model/__init__.py
--- a/lib/galaxy/model/__init__.py Thu Nov 12 13:20:19 2009 -0500
+++ b/lib/galaxy/model/__init__.py Thu Nov 12 15:25:48 2009 -0500
@@ -17,6 +17,7 @@
from galaxy.web.form_builder import *
import logging
log = logging.getLogger( __name__ )
+from sqlalchemy.orm import object_session
datatypes_registry = galaxy.datatypes.registry.Registry() #Default Value Required for unit tests
@@ -205,7 +206,8 @@
def add_dataset( self, dataset, parent_id=None, genome_build=None, set_hid = True ):
if isinstance( dataset, Dataset ):
dataset = HistoryDatasetAssociation( dataset = dataset, copied_from = dataset )
- dataset.flush()
+ object_session( self ).add( dataset )
+ object_session( self ).flush()
elif not isinstance( dataset, HistoryDatasetAssociation ):
raise TypeError, "You can only add Dataset and HistoryDatasetAssociation instances to a history ( you tried to add %s )." % str( dataset )
if parent_id:
@@ -229,7 +231,8 @@
if not target_user:
target_user = self.user
new_history = History( name=name, user=target_user )
- new_history.flush()
+ object_session( self ).add( new_history )
+ object_session( self ).flush()
if activatable:
hdas = self.activatable_datasets
else:
@@ -237,9 +240,11 @@
for hda in hdas:
new_hda = hda.copy( copy_children=True, target_history=new_history )
new_history.add_dataset( new_hda, set_hid = False )
- new_hda.flush()
+ object_session( self ).add( new_hda )
+ object_session( self ).flush()
new_history.hid_counter = self.hid_counter
- new_history.flush()
+ object_session( self ).add( new_history )
+ object_session( self ).flush()
return new_history
@property
def activatable_datasets( self ):
@@ -439,7 +444,7 @@
permitted_actions = Dataset.permitted_actions
def __init__( self, id=None, hid=None, name=None, info=None, blurb=None, peek=None, extension=None,
dbkey=None, metadata=None, history=None, dataset=None, deleted=False, designation=None,
- parent_id=None, validation_errors=None, visible=True, create_dataset=False ):
+ parent_id=None, validation_errors=None, visible=True, create_dataset=False, sa_session=None ):
self.name = name or "Unnamed dataset"
self.id = id
self.info = info
@@ -454,8 +459,10 @@
self.visible = visible
# Relationships
if not dataset and create_dataset:
+ # Had to pass the sqlalchemy session in order to create a new dataset
dataset = Dataset( state=Dataset.states.NEW )
- dataset.flush()
+ sa_session.add( dataset )
+ sa_session.flush()
self.dataset = dataset
self.parent_id = parent_id
self.validation_errors = validation_errors
@@ -466,7 +473,8 @@
return self.dataset.state
def set_dataset_state ( self, state ):
self.dataset.state = state
- self.dataset.flush() #flush here, because hda.flush() won't flush the Dataset object
+ object_session( self ).add( self.dataset )
+ object_session( self ).flush() #flush here, because hda.flush() won't flush the Dataset object
state = property( get_dataset_state, set_dataset_state )
def get_file_name( self ):
return self.dataset.get_file_name()
@@ -616,8 +624,11 @@
history = None,
copied_from_history_dataset_association = None,
copied_from_library_dataset_dataset_association = None,
+ sa_session = None,
**kwd ):
- DatasetInstance.__init__( self, **kwd )
+ # FIXME: sa_session is must be passed to DataSetInstance if the create_dataset
+ # parameter is True so that the new object can be flushed. Is there a better way?
+ DatasetInstance.__init__( self, sa_session=sa_session, **kwd )
self.hid = hid
# Relationships
self.history = history
@@ -637,7 +648,8 @@
parent_id=parent_id,
copied_from_history_dataset_association=self,
history = target_history )
- hda.flush()
+ object_session( self ).add( hda )
+ object_session( self ).flush()
hda.set_size()
# Need to set after flushed, as MetadataFiles require dataset.id
hda.metadata = self.metadata
@@ -647,7 +659,7 @@
if not self.datatype.copy_safe_peek:
# In some instances peek relies on dataset_id, i.e. gmaj.zip for viewing MAFs
hda.set_peek()
- hda.flush()
+ object_session( self ).flush()
return hda
def to_library_dataset_dataset_association( self, target_folder, replace_dataset=None, parent_id=None, user=None ):
if replace_dataset:
@@ -657,7 +669,8 @@
# If replace_dataset is None, the Library level permissions will be taken from the folder and applied to the new
# LibraryDataset, and the current user's DefaultUserPermissions will be applied to the associated Dataset.
library_dataset = LibraryDataset( folder=target_folder, name=self.name, info=self.info )
- library_dataset.flush()
+ object_session( self ).add( library_dataset )
+ object_session( self ).flush()
if not user:
user = self.history.user
ldda = LibraryDatasetDatasetAssociation( name=self.name,
@@ -673,15 +686,18 @@
parent_id=parent_id,
copied_from_history_dataset_association=self,
user=user )
- ldda.flush()
+ object_session( self ).add( ldda )
+ object_session( self ).flush()
# Permissions must be the same on the LibraryDatasetDatasetAssociation and the associated LibraryDataset
# Must set metadata after ldda flushed, as MetadataFiles require ldda.id
ldda.metadata = self.metadata
if not replace_dataset:
target_folder.add_library_dataset( library_dataset, genome_build=ldda.dbkey )
- target_folder.flush()
+ object_session( self ).add( target_folder )
+ object_session( self ).flush()
library_dataset.library_dataset_dataset_association_id = ldda.id
- library_dataset.flush()
+ object_session( self ).add( library_dataset )
+ object_session( self ).flush()
for child in self.children:
child_copy = child.to_library_dataset_dataset_association( target_folder=target_folder,
replace_dataset=replace_dataset,
@@ -690,7 +706,7 @@
if not self.datatype.copy_safe_peek:
# In some instances peek relies on dataset_id, i.e. gmaj.zip for viewing MAFs
ldda.set_peek()
- ldda.flush()
+ object_session( self ).flush()
return ldda
def clear_associated_files( self, metadata_safe = False, purge = False ):
# metadata_safe = True means to only clear when assoc.metadata_safe == False
@@ -814,8 +830,8 @@
def set_library_dataset_dataset_association( self, ldda ):
self.library_dataset_dataset_association = ldda
ldda.library_dataset = self
- ldda.flush()
- self.flush()
+ object_session( self ).add_all( ( ldda, self ) )
+ object_session( self ).flush()
def get_info( self ):
if self.library_dataset_dataset_association:
return self.library_dataset_dataset_association.info
@@ -853,8 +869,11 @@
copied_from_library_dataset_dataset_association=None,
library_dataset=None,
user=None,
+ sa_session=None,
**kwd ):
- DatasetInstance.__init__( self, **kwd )
+ # FIXME: sa_session is must be passed to DataSetInstance if the create_dataset
+ # parameter in kwd is True so that the new object can be flushed. Is there a better way?
+ DatasetInstance.__init__( self, sa_session=sa_session, **kwd )
self.copied_from_history_dataset_association = copied_from_history_dataset_association
self.copied_from_library_dataset_dataset_association = copied_from_library_dataset_dataset_association
self.library_dataset = library_dataset
@@ -872,7 +891,8 @@
parent_id=parent_id,
copied_from_library_dataset_dataset_association=self,
history=target_history )
- hda.flush()
+ object_session( self ).add( hda )
+ object_session( self ).flush()
hda.metadata = self.metadata #need to set after flushed, as MetadataFiles require dataset.id
if add_to_history and target_history:
target_history.add_dataset( hda )
@@ -880,7 +900,7 @@
child_copy = child.to_history_dataset_association( target_history = target_history, parent_id = hda.id, add_to_history = False )
if not self.datatype.copy_safe_peek:
hda.set_peek() #in some instances peek relies on dataset_id, i.e. gmaj.zip for viewing MAFs
- hda.flush()
+ object_session( self ).flush()
return hda
def copy( self, copy_children = False, parent_id = None, target_folder = None ):
ldda = LibraryDatasetDatasetAssociation( name=self.name,
@@ -895,7 +915,8 @@
parent_id=parent_id,
copied_from_library_dataset_dataset_association=self,
folder=target_folder )
- ldda.flush()
+ object_session( self ).add( ldda )
+ object_session( self ).flush()
# Need to set after flushed, as MetadataFiles require dataset.id
ldda.metadata = self.metadata
if copy_children:
@@ -904,7 +925,7 @@
if not self.datatype.copy_safe_peek:
# In some instances peek relies on dataset_id, i.e. gmaj.zip for viewing MAFs
ldda.set_peek()
- ldda.flush()
+ object_session( self ).flush()
return ldda
def clear_associated_files( self, metadata_safe = False, purge = False ):
return
diff -r 8bc85721cbce -r 0984c3800775 lib/galaxy/model/mapping_tests.py
--- a/lib/galaxy/model/mapping_tests.py Thu Nov 12 13:20:19 2009 -0500
+++ b/lib/galaxy/model/mapping_tests.py Thu Nov 12 15:25:48 2009 -0500
@@ -15,11 +15,12 @@
#h1.queries.append( model.Query( "h1->q1" ) )
#h1.queries.append( model.Query( "h1->q2" ) )
h2 = model.History( name=( "H" * 1024 ) )
+ model.session.add_all( ( u, h1, h2 ) )
#q1 = model.Query( "h2->q1" )
- d1 = model.HistoryDatasetAssociation( extension="interval", metadata=dict(chromCol=1,startCol=2,endCol=3 ), history=h2, create_dataset=True )
+ d1 = model.HistoryDatasetAssociation( extension="interval", metadata=dict(chromCol=1,startCol=2,endCol=3 ), history=h2, create_dataset=True, sa_session=model.session )
#h2.queries.append( q1 )
#h2.queries.append( model.Query( "h2->q2" ) )
- model.session.add_all( ( u, h1, h2, d1 ) )
+ model.session.add( ( d1 ) )
model.session.flush()
model.session.expunge_all()
# Check
diff -r 8bc85721cbce -r 0984c3800775 lib/galaxy/model/orm/ext/assignmapper.py
--- a/lib/galaxy/model/orm/ext/assignmapper.py Thu Nov 12 13:20:19 2009 -0500
+++ b/lib/galaxy/model/orm/ext/assignmapper.py Thu Nov 12 15:25:48 2009 -0500
@@ -19,7 +19,6 @@
from sqlalchemy.orm import mapper as sqla_mapper
def _monkeypatch_query_method( name, session, class_ ):
- # TODO: eliminate this method by fixing the single query in ~/datatypes/metadata.py ( line 396 )
def do(self, *args, **kwargs):
return getattr( class_.query, name)(*args, **kwargs)
try:
@@ -28,20 +27,6 @@
pass
if not hasattr(class_, name):
setattr(class_, name, classmethod(do))
-def _monkeypatch_session_method( name, session, class_ ):
- # TODO: eliminate this method by fixing the session flushes in ~/model/__init__.py ( 20 of them )
- # and ~/datatypes/metadata.py ( 4 of them ). The affected objects have no known hook into mapping.context
- # ( i.e., sqlalchemy session ).
- def do( self, *args, **kwargs ):
- if self not in session.deleted:
- session.add( self )
- return session.flush()
- try:
- do.__name__ = name
- except:
- pass
- if not hasattr( class_, name ):
- setattr( class_, name, do )
def session_mapper( scoped_session, class_, *args, **kwargs ):
def mapper( cls, *arg, **kw ):
validate = kw.pop( 'validate', False )
@@ -54,8 +39,9 @@
setattr( self, key, value )
cls.__init__ = __init__
cls.query = scoped_session.query_property()
+ # FIXME: eliminate the need for the following monkey patch by fixing the single
+ # query in ~/datatypes/metadata.py in the FileParameter.wrap() method
_monkeypatch_query_method( 'get', scoped_session, cls )
- _monkeypatch_session_method( 'flush', scoped_session, cls )
return sqla_mapper( cls, *arg, **kw )
return mapper( class_, *args, **kwargs )
def assign_mapper( session, class_, *args, **kwargs ):
diff -r 8bc85721cbce -r 0984c3800775 lib/galaxy/tools/__init__.py
--- a/lib/galaxy/tools/__init__.py Thu Nov 12 13:20:19 2009 -0500
+++ b/lib/galaxy/tools/__init__.py Thu Nov 12 15:25:48 2009 -0500
@@ -1495,7 +1495,13 @@
if visible == "visible": visible = True
else: visible = False
ext = fields.pop(0).lower()
- child_dataset = self.app.model.HistoryDatasetAssociation( extension=ext, parent_id=outdata.id, designation=designation, visible=visible, dbkey=outdata.dbkey, create_dataset=True )
+ child_dataset = self.app.model.HistoryDatasetAssociation( extension=ext,
+ parent_id=outdata.id,
+ designation=designation,
+ visible=visible,
+ dbkey=outdata.dbkey,
+ create_dataset=True,
+ sa_session=self.sa_session )
self.app.security_agent.copy_dataset_permissions( outdata.dataset, child_dataset.dataset )
# Move data from temp location to dataset location
shutil.move( filename, child_dataset.file_name )
@@ -1548,7 +1554,12 @@
if fields:
dbkey = fields[ 0 ]
# Create new primary dataset
- primary_data = self.app.model.HistoryDatasetAssociation( extension=ext, designation=designation, visible=visible, dbkey=dbkey, create_dataset=True )
+ primary_data = self.app.model.HistoryDatasetAssociation( extension=ext,
+ designation=designation,
+ visible=visible,
+ dbkey=dbkey,
+ create_dataset=True,
+ sa_session=self.sa_session )
self.app.security_agent.copy_dataset_permissions( outdata.dataset, primary_data.dataset )
self.sa_session.add( primary_data )
self.sa_session.flush()
diff -r 8bc85721cbce -r 0984c3800775 lib/galaxy/tools/actions/__init__.py
--- a/lib/galaxy/tools/actions/__init__.py Thu Nov 12 13:20:19 2009 -0500
+++ b/lib/galaxy/tools/actions/__init__.py Thu Nov 12 15:25:48 2009 -0500
@@ -198,7 +198,7 @@
if check is not None:
if str( getattr( check, when_elem.get( 'attribute' ) ) ) == when_elem.get( 'value', None ):
ext = when_elem.get( 'format', ext )
- data = trans.app.model.HistoryDatasetAssociation( extension=ext, create_dataset=True )
+ data = trans.app.model.HistoryDatasetAssociation( extension=ext, create_dataset=True, sa_session=trans.sa_session )
# Commit the dataset immediately so it gets database assigned unique id
trans.sa_session.add( data )
trans.sa_session.flush()
diff -r 8bc85721cbce -r 0984c3800775 lib/galaxy/tools/actions/upload_common.py
--- a/lib/galaxy/tools/actions/upload_common.py Thu Nov 12 13:20:19 2009 -0500
+++ b/lib/galaxy/tools/actions/upload_common.py Thu Nov 12 15:25:48 2009 -0500
@@ -112,7 +112,8 @@
extension = uploaded_dataset.file_type,
dbkey = uploaded_dataset.dbkey,
history = trans.history,
- create_dataset = True )
+ create_dataset = True,
+ sa_session = trans.sa_session )
if state:
hda.state = state
else:
@@ -159,13 +160,14 @@
dbkey = uploaded_dataset.dbkey,
library_dataset = ld,
user = trans.user,
- create_dataset = True )
+ create_dataset = True,
+ sa_session = trans.sa_session )
+ trans.sa_session.add( ldda )
if state:
ldda.state = state
else:
ldda.state = ldda.states.QUEUED
ldda.message = library_bunch.message
- trans.sa_session.add( ldda )
trans.sa_session.flush()
# Permissions must be the same on the LibraryDatasetDatasetAssociation and the associated LibraryDataset
trans.app.security_agent.copy_library_permissions( ld, ldda )
diff -r 8bc85721cbce -r 0984c3800775 lib/galaxy/tools/parameters/basic.py
--- a/lib/galaxy/tools/parameters/basic.py Thu Nov 12 13:20:19 2009 -0500
+++ b/lib/galaxy/tools/parameters/basic.py Thu Nov 12 15:25:48 2009 -0500
@@ -730,7 +730,7 @@
>>> hist = History()
>>> sa_session.add( hist )
>>> sa_session.flush()
- >>> hist.add_dataset( HistoryDatasetAssociation( id=1, extension='interval', create_dataset=True ) )
+ >>> hist.add_dataset( HistoryDatasetAssociation( id=1, extension='interval', create_dataset=True, sa_session=sa_session ) )
>>> dtp = DataToolParameter( None, XML( '<param name="blah" type="data" format="interval"/>' ) )
>>> print dtp.name
blah
diff -r 8bc85721cbce -r 0984c3800775 lib/galaxy/web/controllers/async.py
--- a/lib/galaxy/web/controllers/async.py Thu Nov 12 13:20:19 2009 -0500
+++ b/lib/galaxy/web/controllers/async.py Thu Nov 12 15:25:48 2009 -0500
@@ -103,7 +103,7 @@
#data.state = jobs.JOB_OK
#history.datasets.add_dataset( data )
- data = trans.app.model.HistoryDatasetAssociation( create_dataset = True, extension = GALAXY_TYPE )
+ data = trans.app.model.HistoryDatasetAssociation( create_dataset=True, sa_session=trans.sa_session, extension=GALAXY_TYPE )
trans.app.security_agent.set_all_dataset_permissions( data.dataset, trans.app.security_agent.history_get_default_permissions( trans.history ) )
data.name = GALAXY_NAME
data.dbkey = GALAXY_BUILD
diff -r 8bc85721cbce -r 0984c3800775 lib/galaxy/web/controllers/requests.py
--- a/lib/galaxy/web/controllers/requests.py Thu Nov 12 13:20:19 2009 -0500
+++ b/lib/galaxy/web/controllers/requests.py Thu Nov 12 15:25:48 2009 -0500
@@ -667,7 +667,8 @@
request.library = library
request.folder = folder
request.state = trans.app.model.Request.states.UNSUBMITTED
- request.flush()
+ trans.sa_session.add( request )
+ trans.sa_session.flush()
return request
@web.expose
@web.require_login( "create/submit sequencing requests" )
diff -r 8bc85721cbce -r 0984c3800775 lib/galaxy/web/controllers/root.py
--- a/lib/galaxy/web/controllers/root.py Thu Nov 12 13:20:19 2009 -0500
+++ b/lib/galaxy/web/controllers/root.py Thu Nov 12 15:25:48 2009 -0500
@@ -501,7 +501,12 @@
"""Adds a POSTed file to a History"""
try:
history = trans.sa_session.query( trans.app.model.History ).get( history_id )
- data = trans.app.model.HistoryDatasetAssociation( name = name, info = info, extension = ext, dbkey = dbkey, create_dataset = True )
+ data = trans.app.model.HistoryDatasetAssociation( name = name,
+ info = info,
+ extension = ext,
+ dbkey = dbkey,
+ create_dataset = True,
+ sa_session = trans.sa_session )
if copy_access_from:
copy_access_from = trans.sa_session.query( trans.app.model.HistoryDatasetAssociation ).get( copy_access_from )
trans.app.security_agent.copy_dataset_permissions( copy_access_from.dataset, data.dataset )
diff -r 8bc85721cbce -r 0984c3800775 tools/data_source/microbial_import_code.py
--- a/tools/data_source/microbial_import_code.py Thu Nov 12 13:20:19 2009 -0500
+++ b/tools/data_source/microbial_import_code.py Thu Nov 12 15:25:48 2009 -0500
@@ -131,7 +131,7 @@
dbkey = fields[3]
filepath = fields[4]
file_type = fields[5]
- newdata = app.model.HistoryDatasetAssociation( create_dataset = True ) #This import should become a library
+ newdata = app.model.HistoryDatasetAssociation( create_dataset = True, sa_session = app.model.context ) #This import should become a library
newdata.set_size()
newdata.extension = file_type
newdata.name = basic_name + " (" + microbe_info[kingdom][org]['chrs'][chr]['data'][description]['feature'] +" for "+microbe_info[kingdom][org]['name']+":"+chr + ")"
diff -r 8bc85721cbce -r 0984c3800775 tools/maf/maf_to_bed_code.py
--- a/tools/maf/maf_to_bed_code.py Thu Nov 12 13:20:19 2009 -0500
+++ b/tools/maf/maf_to_bed_code.py Thu Nov 12 15:25:48 2009 -0500
@@ -27,7 +27,7 @@
fields = line.split("\t")
dbkey = fields[1]
filepath = fields[2]
- newdata = app.model.HistoryDatasetAssociation( create_dataset = True )
+ newdata = app.model.HistoryDatasetAssociation( create_dataset = True, sa_session = app.model.context )
newdata.set_size()
newdata.extension = "bed"
newdata.name = basic_name + " (" + dbkey + ")"
1
0
16 Nov '09
details: http://www.bx.psu.edu/hg/galaxy/rev/24f0d1e7f39f
changeset: 3018:24f0d1e7f39f
user: Nate Coraor <nate(a)bx.psu.edu>
date: Thu Nov 12 11:07:05 2009 -0500
description:
Add a "maxseconds" attribute to the "test" tag in tool configs. Allows the tool writer to decide how long Galaxy should wait for tool execution to complete. Resolves issue #219.
diffstat:
lib/galaxy/tools/__init__.py | 3 ++-
lib/galaxy/tools/test.py | 3 ++-
test/base/twilltestcase.py | 16 +++++++++-------
test/functional/test_toolbox.py | 2 +-
4 files changed, 14 insertions(+), 10 deletions(-)
diffs (85 lines):
diff -r 18586d1194f9 -r 24f0d1e7f39f lib/galaxy/tools/__init__.py
--- a/lib/galaxy/tools/__init__.py Wed Nov 11 17:55:08 2009 -0500
+++ b/lib/galaxy/tools/__init__.py Thu Nov 12 11:07:05 2009 -0500
@@ -547,7 +547,8 @@
self.tests = []
for i, test_elem in enumerate( tests_elem.findall( 'test' ) ):
name = test_elem.get( 'name', 'Test-%d' % (i+1) )
- test = ToolTestBuilder( self, name )
+ maxseconds = int( test_elem.get( 'maxseconds', '120' ) )
+ test = ToolTestBuilder( self, name, maxseconds )
try:
for param_elem in test_elem.findall( "param" ):
attrib = dict( param_elem.attrib )
diff -r 18586d1194f9 -r 24f0d1e7f39f lib/galaxy/tools/test.py
--- a/lib/galaxy/tools/test.py Wed Nov 11 17:55:08 2009 -0500
+++ b/lib/galaxy/tools/test.py Thu Nov 12 11:07:05 2009 -0500
@@ -11,9 +11,10 @@
dynamic TestCase class (the unittest framework is very class oriented,
doing dynamic tests in this was allows better integration)
"""
- def __init__( self, tool, name ):
+ def __init__( self, tool, name, maxseconds ):
self.tool = tool
self.name = name
+ self.maxseconds = maxseconds
self.required_files = []
self.inputs = []
self.outputs = []
diff -r 18586d1194f9 -r 24f0d1e7f39f test/base/twilltestcase.py
--- a/test/base/twilltestcase.py Wed Nov 11 17:55:08 2009 -0500
+++ b/test/base/twilltestcase.py Thu Nov 12 11:07:05 2009 -0500
@@ -513,10 +513,10 @@
hid = elem.get('hid')
hids.append(hid)
return hids
- def verify_dataset_correctness( self, filename, hid=None, wait=True ):
+ def verify_dataset_correctness( self, filename, hid=None, wait=True, maxseconds=120 ):
"""Verifies that the attributes and contents of a history item meet expectations"""
if wait:
- self.wait() #wait for job to finish
+ self.wait( maxseconds=maxseconds ) #wait for job to finish
data_list = self.get_history_as_data_list()
self.assertTrue( data_list )
if hid is None: # take last hid
@@ -906,21 +906,23 @@
tc.fv( "1","hgta_doGalaxyQuery", "Send query to Galaxy" )
self.submit_form( button="Send query to Galaxy" )#, **output_params ) #AssertionError: Attempting to set field 'fbQual' to value '['whole']' in form 'None' threw exception: no matching forms! control: <RadioControl(fbQual=[whole, upstreamAll, endAll])>
- def wait( self, maxiter=20 ):
+ def wait( self, maxseconds=120 ):
"""Waits for the tools to finish"""
- count = 0
sleep_amount = 0.1
+ slept = 0
self.home()
- while count < maxiter:
- count += 1
+ while slept <= maxseconds:
self.visit_page( "history" )
page = tc.browser.get_html()
if page.find( '<!-- running: do not change this comment, used by TwillTestCase.wait -->' ) > -1:
time.sleep( sleep_amount )
+ slept += sleep_amount
sleep_amount *= 2
+ if slept + sleep_amount > maxseconds:
+ sleep_amount = maxseconds - slept # don't overshoot maxseconds
else:
break
- self.assertNotEqual(count, maxiter)
+ assert slept < maxseconds
# Dataset Security stuff
# Tests associated with users
diff -r 18586d1194f9 -r 24f0d1e7f39f test/functional/test_toolbox.py
--- a/test/functional/test_toolbox.py Wed Nov 11 17:55:08 2009 -0500
+++ b/test/functional/test_toolbox.py Thu Nov 12 11:07:05 2009 -0500
@@ -63,7 +63,7 @@
# Check the result
assert len( testdef.outputs ) == 1, "ToolTestCase does not deal with multiple outputs properly yet."
for name, file in testdef.outputs:
- self.verify_dataset_correctness( file )
+ self.verify_dataset_correctness( file, maxseconds=testdef.maxseconds )
self.delete_history( id=self.security.encode_id( latest_history.id ) )
def __expand_grouping( self, tool_inputs, declared_inputs, prefix='' ):
1
0