5 new changesets in galaxy-central:
http://bitbucket.org/galaxy/galaxy-central/changeset/7507b7be224c/ changeset: 7507b7be224c user: dannon date: 2011-07-21 20:22:23 summary: Trailing whitespace cleanup workflow editor. affected #: 1 file (612 bytes)
--- a/templates/workflow/editor.mako Thu Jul 21 13:55:24 2011 -0400 +++ b/templates/workflow/editor.mako Thu Jul 21 14:22:23 2011 -0400 @@ -18,17 +18,17 @@ </%def><%def name="javascripts()"> - + ${parent.javascripts()} - + <!--[if lt IE 9]><script type='text/javascript' src="${h.url_for('/static/scripts/excanvas.js')}"></script><![endif]-->
${h.js( "jquery", "jquery.tipsy", - "jquery.event.drag", - "jquery.event.drop", + "jquery.event.drag", + "jquery.event.drop", "jquery.event.hover", "jquery.form", "json2", @@ -43,7 +43,7 @@ window.lt_ie_7 = true; </script><![endif]--> - + <script type='text/javascript'> // Globals workflow = null; @@ -51,10 +51,10 @@ active_ajax_call = false; var galaxy_async = new GalaxyAsync(); galaxy_async.set_func_url(galaxy_async.set_user_pref, "${h.url_for( controller='user', action='set_user_pref_async' )}"); - + // jQuery onReady $( function() { - + if ( window.lt_ie_7 ) { show_modal( "Browser not supported", @@ -62,7 +62,7 @@ ); return; } - + // Init tool options. %if trans.app.toolbox_search.enabled: make_popupmenu( $("#tools-options-button"), { @@ -71,7 +71,7 @@ show_tool_search = False if trans.user: show_tool_search = trans.user.preferences.get( "workflow.show_tool_search", "True" ) - + if show_tool_search == "True": initial_text = "Hide Search" else: @@ -85,7 +85,6 @@ pref_value = "False"; menu_option_text = "Search Tools"; menu.toggle(); - // Reset search. reset_tool_search(true); } else { @@ -94,14 +93,12 @@ menu_option_text = "Hide Search"; menu.toggle(); } - // Update menu option. $("#tools-options-button-menu").find("li").eq(0).text(menu_option_text); - galaxy_async.set_user_pref("workflow.show_tool_search", pref_value); } }); - + // Init searching. $("#tool-search-query").click( function (){ $(this).focus(); @@ -110,7 +107,6 @@ .keyup( function () { // Remove italics. $(this).css("font-style", "normal"); - // Don't update if same value as last time if ( this.value.length < 3 ) { reset_tool_search(false); @@ -127,7 +123,6 @@ // Start a new ajax-request in X ms $("#search-spinner").show(); this.timer = setTimeout(function () { - $.get("${h.url_for( controller='root', action='tool_search' )}", { query: q }, function (data) { // input.removeClass(config.loadingClass); // Show live-search if results and search-term aren't empty @@ -139,17 +134,15 @@ if ( data.length != 0 ) { // Map tool ids to element ids and join them. var s = $.map( data, function( n, i ) { return "#link-" + n; } ).join( ", " ); - // First pass to show matching tools and their parents. $(s).each( function() { // Add class to denote match. $(this).parent().addClass("search_match"); $(this).parent().show().parent().parent().show().parent().show(); }); - // Hide labels that have no visible children. $(".toolPanelLabel").each( function() { - var this_label = $(this); + var this_label = $(this); var next = this_label.next(); var no_visible_tools = true; // Look through tools following label and, if none are visible, hide label. @@ -174,11 +167,11 @@ } this.lastValue = this.value; }); - %endif - + %endif + // Canvas overview management canvas_manager = new CanvasManager( $("#canvas-viewport"), $("#overview") ); - + // Initialize workflow state reset(); // Load the datatype info @@ -225,7 +218,7 @@ }); } }); - + // For autosave purposes $(document).ajaxStart( function() { active_ajax_call = true; @@ -233,14 +226,14 @@ active_ajax_call = false; }); }); - + $(document).ajaxError( function ( e, x ) { // console.log( e, x ); var message = x.responseText || x.statusText || "Could not connect to server"; show_modal( "Server error", message, { "Ignore error" : hide_modal } ); return false; }); - + make_popupmenu( $("#workflow-options-button"), { "Save" : save_current_workflow, ##"Create New" : create_new_workflow_dialog, @@ -250,7 +243,7 @@ ##"Load a Workflow" : load_workflow, "Close": close_editor }); - + function edit_workflow_outputs(){ workflow.clear_active_node(); $('.right-content').hide(); @@ -297,14 +290,14 @@ scroll_to_nodes(); canvas_manager.draw_overview(); } - + function edit_workflow_attributes() { workflow.clear_active_node(); $('.right-content').hide(); $('#edit-attributes').show();
} - + // On load, set the size to the pref stored in local storage if it exists overview_size = $.jStorage.get("overview-size"); if (overview_size !== undefined) { @@ -313,14 +306,14 @@ height: overview_size }); } - + // Show viewport on load unless pref says it's off if ($.jStorage.get("overview-off")) { hide_overview(); } else { show_overview(); } - + // Stores the size of the overview into local storage when it's resized $("#overview-border").bind( "dragend", function( e, d ) { var op = $(this).offsetParent(); @@ -329,19 +322,19 @@ op.height() - ( d.offsetY - opo.top ) ); $.jStorage.set("overview-size", new_size + "px"); }); - + function show_overview() { $.jStorage.set("overview-off", false); $("#overview-border").css("right", "0px"); $("#close-viewport").css("background-position", "0px 0px"); } - + function hide_overview() { $.jStorage.set("overview-off", true); $("#overview-border").css("right", "20000px"); $("#close-viewport").css("background-position", "12px 0px"); } - + // Lets the overview be toggled visible and invisible, adjusting the arrows accordingly $("#close-viewport").click( function() { if ( $("#overview-border").css("right") === "0px" ) { @@ -350,19 +343,19 @@ show_overview(); } }); - + // Unload handler window.onbeforeunload = function() { if ( workflow && workflow.has_changes ) { return "There are unsaved changes to your workflow which will be lost."; } }; - + // Tool menu $( "div.toolSectionBody" ).hide(); $( "div.toolSectionTitle > span" ).wrap( "<a href='#'></a>" ); var last_expanded = null; - $( "div.toolSectionTitle" ).each( function() { + $( "div.toolSectionTitle" ).each( function() { var body = $(this).next( "div.toolSectionBody" ); $(this).click( function() { if ( body.is( ":hidden" ) ) { @@ -379,7 +372,7 @@
// Rename async. async_save_text("workflow-name", "workflow-name", "${h.url_for( action='rename_async', id=trans.security.encode_id(stored.id) )}", "new_name"); - + // Tag async. Simply have the workflow edit element generate a click on the tag element to activate tagging. $('#workflow-tag').click( function() { $('.tag-area').click(); @@ -396,7 +389,7 @@ } workflow = new Workflow( $("#canvas-container") ); } - + function scroll_to_nodes() { var cv = $("#canvas-viewport"); var cc = $("#canvas-container"); @@ -413,7 +406,7 @@ } cc.css( { left: left, top: top } ); } - + // Add a new step to the workflow by tool id function add_node_for_tool( id, title ) { var node = prebuild_node( 'tool', title, id ); @@ -422,7 +415,7 @@ canvas_manager.draw_overview(); workflow.activate_node( node ); $.ajax( { - url: "${h.url_for( action='get_new_module_info' )}", + url: "${h.url_for( action='get_new_module_info' )}", data: { type: "tool", tool_id: id, "_": "true" }, global: false, dataType: "json", @@ -438,7 +431,7 @@ } }); } - + function add_node_for_module( type, title ) { node = prebuild_node( type, title ); workflow.add_node( node ); @@ -446,8 +439,8 @@ canvas_manager.draw_overview(); workflow.activate_node( node ); $.ajax( { - url: "${h.url_for( action='get_new_module_info' )}", - data: { type: type, "_": "true" }, + url: "${h.url_for( action='get_new_module_info' )}", + data: { type: type, "_": "true" }, dataType: "json", success: function( data ) { node.init_field_data( data ); @@ -479,11 +472,11 @@ workflow.active_form_has_changes = true; }); } - + function display_pja_list(){ return "${ActionBox.get_add_list()}"; } - + function display_file_list(node){ addlist = "<select id='node_data_list' name='node_data_list'>"; for (var out_terminal in node.output_terminals){ @@ -492,7 +485,7 @@ addlist += "</select>"; return addlist; } - + function new_pja(action_type, target, node){ if (node.post_job_actions === undefined){ //New tool node, set up dict. @@ -511,7 +504,7 @@ return false; } } - + function show_workflow_parameters(){ var parameter_re = /${.+?}/g; var workflow_parameters = []; @@ -532,7 +525,7 @@ if (arg_matches){ matches = matches.concat(arg_matches); } - }); + }); } }); if (matches){ @@ -541,7 +534,7 @@ workflow_parameters.push(element); } }); - } + } } }); if (workflow_parameters && workflow_parameters.length !== 0){ @@ -555,7 +548,7 @@ wf_parm_box.hide(); } } - + function show_form_for_tool( text, node ) { $('.right-content').hide(); $("#right-content").show().html( text ); @@ -632,7 +625,7 @@ }); }); } - + var close_editor = function() { <% next_url = h.url_for( controller='workflow', action='index' ) %> workflow.check_changes_in_active_form(); @@ -655,7 +648,7 @@ window.document.location = "${next_url}"; } }; - + var save_current_workflow = function ( eventObj, success_callback ) { show_modal( "Saving workflow", "progress" ); workflow.check_changes_in_active_form(); @@ -677,7 +670,7 @@ "_": "true" }, dataType: 'json', - success: function( data ) { + success: function( data ) { var body = $("<div></div>").text( data.message ); if ( data.errors ) { body.addClass( "warningmark" ); @@ -704,7 +697,7 @@ } }); }; - + // We bind to ajaxStop because of auto-saving, since the form submission ajax // call needs to be completed so that the new data is saved if (active_ajax_call) { @@ -718,7 +711,7 @@ savefn(success_callback); } }; - + </script></%def>
@@ -732,7 +725,7 @@
<style type="text/css"> body { margin: 0; padding: 0; overflow: hidden; } - + /* Wider right panel */ #center { right: 309px; } #right-border { right: 300px; } @@ -744,11 +737,11 @@ ## top: 2.5em; ## margin-top: 7px; ## } - + #left { background: #C1C9E5 url(${h.url_for('/static/style/menu_bg.png')}) top repeat-x; } - + div.toolMenu { margin: 5px; margin-left: 10px; @@ -785,8 +778,8 @@ .right-content { margin: 5px; } - - canvas { position: absolute; z-index: 10; } + + canvas { position: absolute; z-index: 10; } canvas.dragging { position: absolute; z-index: 1000; } .input-terminal { width: 12px; height: 12px; background: url(${h.url_for('/static/style/workflow_circle_open.png')}); position: absolute; top: 50%; margin-top: -6px; left: -6px; z-index: 1500; } .output-terminal { width: 12px; height: 12px; background: url(${h.url_for('/static/style/workflow_circle_open.png')}); position: absolute; top: 50%; margin-top: -6px; right: -6px; z-index: 1500; } @@ -795,12 +788,12 @@ ## .input-terminal-hover { background: yellow; border: solid black 1px; } .unselectable { -moz-user-select: none; -khtml-user-select: none; user-select: none; } img { border: 0; } - + div.buttons img { width: 16px; height: 16px; cursor: pointer; } - + ## Extra styles for the representation of a tool on the canvas (looks like ## a tiny tool form) div.toolFormInCanvas { @@ -809,18 +802,18 @@ ## min-width: 130px; margin: 6px; } - + div.toolForm-active { z-index: 1001; border: solid #8080FF 4px; margin: 3px; } - + div.toolFormTitle { cursor: move; min-height: 16px; } - + div.titleRow { font-weight: bold; border-bottom: dotted gray 1px; @@ -830,7 +823,7 @@ div.form-row { position: relative; } - + div.tool-node-error div.toolFormTitle { background: #FFCCCC; border-color: #AA6666; @@ -838,14 +831,14 @@ div.tool-node-error { border-color: #AA6666; } - + #canvas-area { position: absolute; top: 0; left: 305px; bottom: 0; right: 0; border: solid red 1px; overflow: none; } - + .form-row { }
@@ -855,14 +848,14 @@ .form-row-clear { clear: both; } - + div.rule { height: 0; border: none; border-bottom: dotted black 1px; margin: 0 5px; } - + .callout { position: absolute; z-index: 10000; @@ -871,21 +864,21 @@ .pjaForm { margin-bottom:10px; } - + .pjaForm .toolFormBody{ padding:10px; } - + .pjaForm .toolParamHelp{ padding:5px; } - + .panel-header-button-group { margin-right: 5px; padding-right: 5px; border-right: solid gray 1px; } - + </style></%def>
@@ -945,7 +938,7 @@ ${n_('Tools')} </div></div> - + <div class="unified-panel-body" style="overflow: auto;"><div class="toolMenu"> ## Tool search. @@ -953,7 +946,6 @@ show_tool_search = False if trans.user: show_tool_search = trans.user.preferences.get( "workflow.show_tool_search", "True" ) - if show_tool_search == "True": display = "block" else: @@ -963,7 +955,6 @@ <input type="text" name="query" value="search tools" id="tool-search-query" style="width: 100%; font-style:italic; font-size: inherit" autocomplete="off"/><img src="${h.url_for('/static/images/loading_small_white_bg.gif')}" id="search-spinner" style="display: none; position: absolute; right: 0; top: 5px;"/></div> - <div class="toolSectionList"> %for key, val in app.toolbox.tool_panel.items(): <div class="toolSectionWrapper"> @@ -1007,10 +998,10 @@ <a href="#" onclick="add_node_for_module( 'data_input', 'Input Dataset' )">Input dataset</a></div></div> - </div> + </div></div></div> - + </%def><%def name="center_panel()"> @@ -1023,7 +1014,6 @@ Workflow Canvas | ${h.to_unicode( stored.name ) | h} </div></div> - <div class="unified-panel-body"><div id="canvas-viewport" style="width: 100%; height: 100%; position: absolute; overflow: hidden; background: #EEEEEE; background: white url(${h.url_for('/static/images/light_gray_grid.gif')}) repeat;"><div id="canvas-container" style="position: absolute; width: 100%; height: 100%;"></div> @@ -1079,7 +1069,7 @@ <div class="toolParamHelp">Apply tags to make it easy to search for and find items with the same tag.</div></div> ## Workflow annotation. - ## Annotation elt. + ## Annotation elt. <div id="workflow-annotation-area" class="form-row"><label>Annotation / Notes:</label><div id="workflow-annotation" class="tooltip editable-text" original-title="Click to edit annotation">
http://bitbucket.org/galaxy/galaxy-central/changeset/2687588bca04/ changeset: 2687588bca04 user: dannon date: 2011-07-26 19:18:22 summary: merge affected #: 17 files (15.8 KB)
--- a/datatypes_conf.xml.sample Thu Jul 21 14:22:23 2011 -0400 +++ b/datatypes_conf.xml.sample Tue Jul 26 13:18:22 2011 -0400 @@ -123,7 +123,7 @@ <datatype extension="vcf" type="galaxy.datatypes.tabular:Vcf" display_in_upload="true"><converter file="vcf_to_bgzip_converter.xml" target_datatype="bgzip"/><converter file="vcf_to_tabix_converter.xml" target_datatype="tabix" depends_on="bgzip"/> - <converter file="vcf_to_summary_tree_converter.xml" target_datatype="summary_tree"/> + <converter file="vcf_to_summary_tree_converter.xml" target_datatype="summary_tree"/></datatype><datatype extension="wsf" type="galaxy.datatypes.wsf:SnpFile" display_in_upload="true"/><datatype extension="velvet" type="galaxy.datatypes.assembly:Velvet" display_in_upload="false"/> @@ -274,10 +274,10 @@ </registration><sniffers><!-- - The order in which Galaxy attempts to determine data types is - important because some formats are much more loosely defined - than others. The following list should be the most rigidly - defined format first, followed by next-most rigidly defined, + The order in which Galaxy attempts to determine data types is + important because some formats are much more loosely defined + than others. The following list should be the most rigidly + defined format first, followed by next-most rigidly defined, and so on. --><sniffer type="galaxy.datatypes.tabular:Vcf"/>
--- a/lib/galaxy/web/controllers/tracks.py Thu Jul 21 14:22:23 2011 -0400 +++ b/lib/galaxy/web/controllers/tracks.py Tue Jul 26 13:18:22 2011 -0400 @@ -90,10 +90,14 @@ class DbKeyColumn( grids.GridColumn ): """ Column for filtering by and displaying dataset dbkey. """ def filter( self, trans, user, query, dbkey ): - """ Filter by dbkey. """ + """ Filter by dbkey; datasets without a dbkey are returned as well. """ # use raw SQL b/c metadata is a BLOB dbkey = dbkey.replace("'", "\'") - return query.filter( or_( "metadata like '%%"dbkey": ["%s"]%%'" % dbkey, "metadata like '%%"dbkey": "%s"%%'" % dbkey ) ) + return query.filter( or_( \ + or_( "metadata like '%%"dbkey": ["%s"]%%'" % dbkey, "metadata like '%%"dbkey": "%s"%%'" % dbkey ), \ + or_( "metadata like '%%"dbkey": ["?"]%%'", "metadata like '%%"dbkey": "?"%%'" ) \ + ) + )
class HistoryColumn( grids.GridColumn ): """ Column for filtering by history id. """
--- a/lib/galaxy/webapps/community/controllers/repository.py Thu Jul 21 14:22:23 2011 -0400 +++ b/lib/galaxy/webapps/community/controllers/repository.py Tue Jul 26 13:18:22 2011 -0400 @@ -935,14 +935,22 @@ message = util.restore_text( params.get( 'message', '' ) ) status = params.get( 'status', 'done' ) repository = get_repository( trans, repository_id ) - tool = load_tool( trans, os.path.abspath( tool_config ) ) - tool_state = self.__new_state( trans ) - return trans.fill_template( "/webapps/community/repository/tool_form.mako", - repository=repository, - tool=tool, - tool_state=tool_state, - message=message, - status=status ) + try: + tool = load_tool( trans, os.path.abspath( tool_config ) ) + tool_state = self.__new_state( trans ) + return trans.fill_template( "/webapps/community/repository/tool_form.mako", + repository=repository, + tool=tool, + tool_state=tool_state, + message=message, + status=status ) + except Exception, e: + message = 'Error loading tool: %s. Click <b>Reset metadata</b> to correct this error.' % str( e ) + return trans.response.send_redirect( web.url_for( controller='repository', + action='manage_repository', + id=repository_id, + message=message, + status='error' ) ) def __new_state( self, trans, all_pages=False ): """ Create a new `DefaultToolState` for this tool. It will not be initialized @@ -955,6 +963,27 @@ state.inputs = {} return state @web.expose + def view_tool_metadata( self, trans, repository_id, changeset_revision, tool_id, **kwd ): + params = util.Params( kwd ) + message = util.restore_text( params.get( 'message', '' ) ) + status = params.get( 'status', 'done' ) + repository = get_repository( trans, repository_id ) + metadata = {} + tool = None + repository_metadata = get_repository_metadata( trans, repository_id, changeset_revision ).metadata + if 'tools' in repository_metadata: + for tool_metadata_dict in repository_metadata[ 'tools' ]: + if tool_metadata_dict[ 'id' ] == tool_id: + metadata = tool_metadata_dict + tool = load_tool( trans, os.path.abspath( metadata[ 'tool_config' ] ) ) + break + return trans.fill_template( "/webapps/community/repository/view_tool_metadata.mako", + repository=repository, + tool=tool, + metadata=metadata, + message=message, + status=status ) + @web.expose def download( self, trans, repository_id, file_type, **kwd ): # Download an archive of the repository files compressed as zip, gz or bz2. params = util.Params( kwd )
--- a/static/june_2007_style/blue/panel_layout.css Thu Jul 21 14:22:23 2011 -0400 +++ b/static/june_2007_style/blue/panel_layout.css Tue Jul 26 13:18:22 2011 -0400 @@ -1,4 +1,3 @@ -body,html{overflow:hidden;margin:0;padding:0;width:100%;height:100%;} body{font:75% "Lucida Grande",verdana,arial,helvetica,sans-serif;background:#eee;} .unselectable{user-select:none;-moz-user-select:none;-webkit-user-select:none;} #background{position:absolute;background:#eee;z-index:-1;top:0;left:0;margin:0;padding:0;width:100%;height:100%;}
--- a/static/june_2007_style/blue/trackster.css Thu Jul 21 14:22:23 2011 -0400 +++ b/static/june_2007_style/blue/trackster.css Tue Jul 26 13:18:22 2011 -0400 @@ -55,4 +55,4 @@ .icon.more-down{background:url('../images/fugue/arrow-transition-270-bw.png') no-repeat 0px 0px;} .icon.more-across{background:url('../images/fugue/arrow-transition-bw.png') no-repeat 0px 0px;} .intro{padding:1em;} -.intro>.action-button{background-color:#CCC;padding:1em;} \ No newline at end of file +.intro > .action-button{background-color:#CCC;padding:1em;}
--- a/static/june_2007_style/panel_layout.css.tmpl Thu Jul 21 14:22:23 2011 -0400 +++ b/static/june_2007_style/panel_layout.css.tmpl Tue Jul 26 13:18:22 2011 -0400 @@ -1,11 +1,3 @@ -body, html { - overflow: hidden; - margin: 0; - padding: 0; - width: 100%; - height: 100%; -} - body { font: 75% "Lucida Grande",verdana,arial,helvetica,sans-serif; background: ${layout_bg};
--- a/static/scripts/trackster.js Thu Jul 21 14:22:23 2011 -0400 +++ b/static/scripts/trackster.js Tue Jul 26 13:18:22 2011 -0400 @@ -807,6 +807,9 @@ } view.redraw(); }, + /** + * Add a track to the view. + */ add_track: function(track) { track.view = this; track.track_id = this.track_id_counter; @@ -822,6 +825,9 @@ label_track.view = this; this.label_tracks.push(label_track); }, + /** + * Remove a track from the view. + */ remove_track: function(track) { this.has_changes = true; delete this.tracks[this.tracks.indexOf(track)]; @@ -1588,23 +1594,24 @@ /** * Tiles drawn by tracks. */ -var Tile = function(index, resolution, canvas) { +var Tile = function(index, resolution, canvas, data) { this.index = index; this.low = index * DENSITY * resolution; this.high = (index + 1) * DENSITY * resolution; this.resolution = resolution; // Wrap element in div for background. this.canvas = $("<div class='track-tile'/>").append(canvas); + this.data = data; this.stale = false; };
-var SummaryTreeTile = function(index, resolution, canvas, max_val) { - Tile.call(this, index, resolution, canvas); +var SummaryTreeTile = function(index, resolution, canvas, data, max_val) { + Tile.call(this, index, resolution, canvas, data); this.max_val = max_val; };
-var FeatureTrackTile = function(index, resolution, canvas, message) { - Tile.call(this, index, resolution, canvas); +var FeatureTrackTile = function(index, resolution, canvas, data, message) { + Tile.call(this, index, resolution, canvas, data); this.message = message; };
@@ -2096,13 +2103,17 @@ filters[f].update_ui_elt(); }
- // Determine if filters are available; this is based on the example feature. - var filters_available = false; - if (track.example_feature) { - for (var f = 0; f < filters.length; f++) { - if (filters[f].applies_to(track.example_feature)) { - filters_available = true; - break; + // Determine if filters are available; this is based on the tiles' data. + var filters_available = false, + example_feature; + for (var i = 0; i < drawn_tiles.length; i++) { + if (drawn_tiles[i].data.length) { + example_feature = drawn_tiles[i].data[0]; + for (var f = 0; f < filters.length; f++) { + if (filters[f].applies_to(example_feature)) { + filters_available = true; + break; + } } } } @@ -2385,7 +2396,7 @@ var c_start = Math.round(c * w_scale); ctx.fillText(seq[c], c_start + track.left_offset, 10); } - return new Tile(tile_index, resolution, canvas); + return new Tile(tile_index, resolution, canvas, seq); } this.content_div.css("height", "0px"); } @@ -2523,7 +2534,7 @@ var painter = new painters.LinePainter(result.data, tile_low, tile_low + tile_length, this.prefs, this.mode); painter.draw(ctx, width, height);
- return new Tile(tile_index, resolution, canvas); + return new Tile(tile_index, resolution, canvas, result.data); } });
@@ -2737,7 +2748,7 @@ // TODO: this shouldn't be done at the tile level this.container_div.find(".yaxislabel").remove(); var max_label = $("<div />").addClass('yaxislabel'); - max_label.text( result.max ); + max_label.text(result.max); max_label.css({ position: "absolute", top: "24px", left: "10px", color: this.prefs.label_color }); max_label.prependTo(this.container_div); // Create canvas @@ -2760,7 +2771,7 @@ // Deal with left_offset by translating ctx.translate(left_offset, SUMMARY_TREE_TOP_PADDING); painter.draw(ctx, width, required_height); - return new SummaryTreeTile(tile_index, resolution, canvas, result.max); + return new SummaryTreeTile(tile_index, resolution, canvas, result.data, result.max); }
// Start dealing with row-by-row tracks @@ -2811,16 +2822,12 @@ this.container_div.find(".yaxislabel").remove();
if (result.data) { - // Set example feature. This is needed so that track can update its UI based on feature attributes. - // TODO: use tile data rather than example feature? - this.example_feature = (result.data.length ? result.data[0] : undefined); - // Draw features. ctx.translate(left_offset, 0); painter.draw(ctx, width, required_height, slots); }
- return new FeatureTrackTile(tile_index, resolution, canvas, result.message); + return new FeatureTrackTile(tile_index, resolution, canvas, result.data, result.message); } });
--- a/templates/base_panels.mako Thu Jul 21 14:22:23 2011 -0400 +++ b/templates/base_panels.mako Tue Jul 26 13:18:22 2011 -0400 @@ -19,6 +19,13 @@ <%def name="stylesheets()"> ${h.css('base','panel_layout','jquery.rating')} <style type="text/css"> + body, html { + overflow: hidden; + margin: 0; + padding: 0; + width: 100%; + height: 100%; + } #center { %if not self.has_left_panel: left: 0;
--- a/templates/tool_form.mako Thu Jul 21 14:22:23 2011 -0400 +++ b/templates/tool_form.mako Tue Jul 26 13:18:22 2011 -0400 @@ -2,7 +2,12 @@ <%namespace file="/base_panels.mako" import="overlay" /><%def name="stylesheets()"> - ${h.css( "autocomplete_tagging", "panel_layout", "base", "library" )} + ${h.css( "autocomplete_tagging", "base", "panel_layout", "library" )} + <style type="text/css"> + html, body { + background-color: #fff; + } + </style></%def><%def name="javascripts()">
--- a/templates/tracks/browser.mako Thu Jul 21 14:22:23 2011 -0400 +++ b/templates/tracks/browser.mako Tue Jul 26 13:18:22 2011 -0400 @@ -273,7 +273,7 @@ }
// Add track. - tracks.push( { + tracks.push({ "track_type": track.get_type(), "name": track.name, "hda_ldda": track.hda_ldda,
--- a/templates/visualization/display.mako Thu Jul 21 14:22:23 2011 -0400 +++ b/templates/visualization/display.mako Tue Jul 26 13:18:22 2011 -0400 @@ -122,6 +122,11 @@ // Keyboard navigation. Scroll ~7% of height when scrolling up/down. // $(document).keydown(function(e) { + // Do not navigate if arrow keys used in input element. + if ($(e.srcElement).is(':input')) { + return; + } + // Key codes: left == 37, up == 38, right == 39, down == 40 switch(e.which) { case 37:
--- a/templates/webapps/community/repository/manage_repository.mako Thu Jul 21 14:22:23 2011 -0400 +++ b/templates/webapps/community/repository/manage_repository.mako Tue Jul 26 13:18:22 2011 -0400 @@ -120,7 +120,7 @@ <div style="clear: both"></div></div><div class="form-row"> - <label>Version:</label> + <label>Revision:</label> %if can_view_change_log: <a href="${h.url_for( controller='repository', action='view_changelog', id=trans.app.security.encode_id( repository.id ) )}">${repository.revision}</a> %else: @@ -151,34 +151,6 @@ </form></div></div> -<p/> -<div class="toolForm"> - <div class="toolFormTitle">Manage categories</div> - <div class="toolFormBody"> - <form name="categories" id="categories" action="${h.url_for( controller='repository', action='manage_repository', id=trans.security.encode_id( repository.id ) )}" method="post" > - <div class="form-row"> - <label>Categories</label> - <select name="category_id" multiple> - %for category in categories: - %if category.id in selected_categories: - <option value="${trans.security.encode_id( category.id )}" selected>${category.name}</option> - %else: - <option value="${trans.security.encode_id( category.id )}">${category.name}</option> - %endif - %endfor - </select> - <div class="toolParamHelp" style="clear: both;"> - Multi-select list - hold the appropriate key while clicking to select multiple categories. - </div> - <div style="clear: both"></div> - </div> - <div class="form-row"> - <input type="submit" name="manage_categories_button" value="Save"/> - </div> - </form> - </div> -</div> -<p/> %if can_set_metadata: <p/><div class="toolForm"> @@ -204,7 +176,16 @@ </tr> %for tool_dict in tool_dicts: <tr> - <td><a href="${h.url_for( controller='repository', action='display_tool', repository_id=trans.security.encode_id( repository.id ), tool_config=tool_dict[ 'tool_config' ] )}">${tool_dict[ 'name' ]}</a></td> + <td> + <div style="float: left; margin-left: 1px;" class="menubutton split popup" id="tool-${tool_dict[ 'id' ]}-popup"> + <a class="view-info" href="${h.url_for( controller='repository', action='display_tool', repository_id=trans.security.encode_id( repository.id ), tool_config=tool_dict[ 'tool_config' ] )}"> + ${tool_dict[ 'name' ]} + </a> + </div> + <div popupmenu="tool-${tool_dict[ 'id' ]}-popup"> + <a class="action-button" href="${h.url_for( controller='repository', action='view_tool_metadata', repository_id=trans.security.encode_id( repository.id ), changeset_revision=repository.tip, tool_id=tool_dict[ 'id' ] )}">View all metadata for this tool</a> + </div> + </td><td>${tool_dict[ 'description' ]}</td><td>${tool_dict[ 'version' ]}</td><td> @@ -274,6 +255,33 @@ </div></div> %endif +<p/> +<div class="toolForm"> + <div class="toolFormTitle">Manage categories</div> + <div class="toolFormBody"> + <form name="categories" id="categories" action="${h.url_for( controller='repository', action='manage_repository', id=trans.security.encode_id( repository.id ) )}" method="post" > + <div class="form-row"> + <label>Categories</label> + <select name="category_id" multiple> + %for category in categories: + %if category.id in selected_categories: + <option value="${trans.security.encode_id( category.id )}" selected>${category.name}</option> + %else: + <option value="${trans.security.encode_id( category.id )}">${category.name}</option> + %endif + %endfor + </select> + <div class="toolParamHelp" style="clear: both;"> + Multi-select list - hold the appropriate key while clicking to select multiple categories. + </div> + <div style="clear: both"></div> + </div> + <div class="form-row"> + <input type="submit" name="manage_categories_button" value="Save"/> + </div> + </form> + </div> +</div> %if trans.app.config.smtp_server: <p/><div class="toolForm"> @@ -330,8 +338,8 @@ </form></div></div> -<p/> %if repository.ratings: + <p/><div class="toolForm"><div class="toolFormTitle">Rating</div><div class="toolFormBody">
--- a/templates/webapps/community/repository/view_repository.mako Thu Jul 21 14:22:23 2011 -0400 +++ b/templates/webapps/community/repository/view_repository.mako Tue Jul 26 13:18:22 2011 -0400 @@ -118,7 +118,7 @@ </div> %endif <div class="form-row"> - <label>Version:</label> + <label>Revision:</label> %if can_view_change_log: <a href="${h.url_for( controller='repository', action='view_changelog', id=trans.app.security.encode_id( repository.id ) )}">${repository.revision}</a> %else: @@ -145,20 +145,6 @@ %endif </div></div> -%if repository.categories: - <p/> - <div class="toolForm"> - <div class="toolFormTitle">Categories</div> - <div class="toolFormBody"> - %for rca in repository.categories: - <div class="form-row"> - ${rca.category.name} - </div> - %endfor - <div style="clear: both"></div> - </div> - </div> -%endif %if metadata: <p/><div class="toolForm"> @@ -183,9 +169,18 @@ </tr> %for tool_dict in tool_dicts: <tr> - <td><a href="${h.url_for( controller='repository', action='display_tool', repository_id=trans.security.encode_id( repository.id ), tool_config=tool_dict[ 'tool_config' ] )}">${tool_dict[ 'name' ]}</a></td> + <td> + <div style="float: left; margin-left: 1px;" class="menubutton split popup" id="tool-${repository.id}-popup"> + <a class="view-info" href="${h.url_for( controller='repository', action='display_tool', repository_id=trans.security.encode_id( repository.id ), tool_config=tool_dict[ 'tool_config' ] )}"> + ${tool_dict[ 'name' ]} + </a> + </div> + <div popupmenu="tool-${repository.id}-popup"> + <a class="action-button" href="${h.url_for( controller='repository', action='view_tool_metadata', repository_id=trans.security.encode_id( repository.id ), changeset_revision=repository.tip, tool_id=tool_dict[ 'id' ] )}">View all metadata for this tool</a> + </div> + </td><td>${tool_dict[ 'description' ]}</td> - <td>version: ${tool_dict[ 'version' ]}</td> + <td>${tool_dict[ 'version' ]}</td><td><% if 'requirements' in tool_dict: @@ -242,6 +237,20 @@ </div></div> %endif +%if repository.categories: + <p/> + <div class="toolForm"> + <div class="toolFormTitle">Categories</div> + <div class="toolFormBody"> + %for rca in repository.categories: + <div class="form-row"> + ${rca.category.name} + </div> + %endfor + <div style="clear: both"></div> + </div> + </div> +%endif %if trans.user and trans.app.config.smtp_server: <p/><div class="toolForm">
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/templates/webapps/community/repository/view_tool_metadata.mako Tue Jul 26 13:18:22 2011 -0400 @@ -0,0 +1,202 @@ +<%inherit file="/base.mako"/> +<%namespace file="/message.mako" import="render_msg" /> +<%namespace file="/webapps/community/common/common.mako" import="*" /> +<%namespace file="/webapps/community/repository/common.mako" import="*" /> + +<% + from galaxy.web.framework.helpers import time_ago + from urllib import quote_plus + is_admin = trans.user_is_admin() + is_new = repository.is_new + can_push = trans.app.security_agent.can_push( trans.user, repository ) + can_upload = can_push + can_browse_contents = not is_new + can_rate = repository.user != trans.user + can_manage = is_admin or repository.user == trans.user + can_view_change_log = not is_new + if can_push: + browse_label = 'Browse or delete repository files' + else: + browse_label = 'Browse repository files' +%> + +<%! + def inherit(context): + if context.get('use_panels'): + return '/webapps/community/base_panels.mako' + else: + return '/base.mako' +%> +<%inherit file="${inherit(context)}"/> + +<br/><br/> +<ul class="manage-table-actions"> + %if is_new: + <a class="action-button" href="${h.url_for( controller='upload', action='upload', repository_id=trans.security.encode_id( repository.id ), webapp='community' )}">Upload files to repository</a> + %else: + <li><a class="action-button" id="repository-${repository.id}-popup" class="menubutton">Repository Actions</a></li> + <div popupmenu="repository-${repository.id}-popup"> + %if can_manage: + <a class="action-button" href="${h.url_for( controller='repository', action='manage_repository', id=trans.app.security.encode_id( repository.id ) )}">Manage repository</a> + %else: + <a class="action-button" href="${h.url_for( controller='repository', action='view_repository', id=trans.app.security.encode_id( repository.id ) )}">View repository</a> + %endif + %if can_upload: + <a class="action-button" href="${h.url_for( controller='upload', action='upload', repository_id=trans.security.encode_id( repository.id ), webapp='community' )}">Upload files to repository</a> + %endif + %if can_view_change_log: + <a class="action-button" href="${h.url_for( controller='repository', action='view_changelog', id=trans.app.security.encode_id( repository.id ) )}">View change log</a> + %endif + %if can_browse_contents: + <a class="action-button" href="${h.url_for( controller='repository', action='browse_repository', id=trans.app.security.encode_id( repository.id ) )}">${browse_label}</a> + %endif + <a class="action-button" href="${h.url_for( controller='repository', action='download', repository_id=trans.app.security.encode_id( repository.id ), file_type='gz' )}">Download as a .tar.gz file</a> + <a class="action-button" href="${h.url_for( controller='repository', action='download', repository_id=trans.app.security.encode_id( repository.id ), file_type='bz2' )}">Download as a .tar.bz2 file</a> + <a class="action-button" href="${h.url_for( controller='repository', action='download', repository_id=trans.app.security.encode_id( repository.id ), file_type='zip' )}">Download as a zip file</a> + </div> + %endif +</ul> + +%if message: + ${render_msg( message, status )} +%endif + +<div class="toolForm"> + <div class="toolFormTitle">${repository.name}</div> + <div class="toolFormBody"> + <div class="form-row"> + <label>Clone this repository:</label> + ${render_clone_str( repository )} + </div> + </div> +</div> +%if metadata: +## "{"tools": +## [{"description": "data on any column using simple expressions", +## "id": "Filter1", +## "name": "Filter", +## "requirements": [], +## "tests": [{ +## "inputs": [["input", "1.bed", {"children": [], "value": "1.bed"}], ["cond", "c1=='chr22'", {"children": [], "value": "c1=='chr22'"}]], "name": "Test-1", +## "outputs": [["out_file1", "filter1_test1.bed", {"compare": "diff", "delta": 10000, "extra_files": [], "lines_diff": 0, "sort": false}]], +## "required_files": [["1.bed", {"children": [], "value": "1.bed"}]]}, {"inputs": [["input", "7.bed", {"children": [], "value": "7.bed"}], ["cond", "c1=='chr1' and c3-c2>=2000 and c6=='+'", {"children": [], "value": "c1=='chr1' and c3-c2>=2000 and c6=='+'"}]], "name": "Test-2", "outputs": [["out_file1", "filter1_test2.bed", {"compare": "diff", "delta": 10000, "extra_files": [], "lines_diff": 0, "sort": false}]], "required_files": [["7.bed", {"children": [], "value": "7.bed"}]]}], "tool_config": "database/community_files/000/repo_1/filtering.xml", "version": "1.0.1", "version_string_cmd": null}], "workflows": [{"a_galaxy_workflow": "true", "annotation": "", "format-version": "0.1", "name": "Workflow constructed from history 'Unnamed history'", "steps": {"0": {"annotation": "", "id": 0, "input_connections": {}, "inputs": [{"description": "", "name": "Input Dataset"}], "name": "Input dataset", "outputs": [], "position": {"left": 10, "top": 10}, "tool_errors": null, "tool_id": null, "tool_state": "{\"name\": \"Input Dataset\"}", "tool_version": null, "type": "data_input", "user_outputs": []}, "1": {"annotation": "", "id": 1, "input_connections": {"input": {"id": 0, "output_name": "output"}}, "inputs": [], "name": "Filter", "outputs": [{"name": "out_file1", "type": "input"}], "position": {"left": 230, "top": 10}, "post_job_actions": {}, "tool_errors": null, "tool_id": "Filter1", "tool_state": "{\"__page__\": 0, \"cond\": \"\\\"c1=='chr1'\\\"\", \"chromInfo\": \"\\\"/Users/gvk/workspaces_2008/central_051111/tool-data/shared/ucsc/chrom/?.len\\\"\", \"input\": \"null\"}", "tool_version": null, "type": "tool", "user_outputs": []}, "2": {"annotation": "", "id": 2, "input_connections": {"input1": {"id": 0, "output_name": "output"}, "input2": {"id": 1, "output_name": "out_file1"}}, "inputs": [], "name": "Subtract Whole Dataset", "outputs": [{"name": "output", "type": "input"}], "position": {"left": 450, "top": 10}, "post_job_actions": {}, "tool_errors": null, "tool_id": "subtract_query1", "tool_state": "{\"input2\": \"null\", \"__page__\": 0, \"end_col\": \"{\\\"__class__\\\": \\\"UnvalidatedValue\\\", \\\"value\\\": \\\"None\\\"}\", \"begin_col\": \"{\\\"__class__\\\": \\\"UnvalidatedValue\\\", \\\"value\\\": \\\"None\\\"}\", \"input1\": \"null\", \"chromInfo\": \"\\\"/Users/gvk/workspaces_2008/central_051111/tool-data/shared/ucsc/chrom/?.len\\\"\"}", "tool_version": null, "type": "tool", "user_outputs": []}}}]}" + <p/> + <div class="toolForm"> + <div class="toolFormTitle">${metadata[ 'name' ]} tool metadata</div> + <div class="toolFormBody"> + <div class="form-row"> + <label>Name:</label> + <a href="${h.url_for( controller='repository', action='display_tool', repository_id=trans.security.encode_id( repository.id ), tool_config=metadata[ 'tool_config' ] )}">${metadata[ 'name' ]}</a> + <div style="clear: both"></div> + </div> + <div class="form-row"> + <label>Description:</label> + ${metadata[ 'description' ]} + <div style="clear: both"></div> + </div> + <div class="form-row"> + <label>Id:</label> + ${metadata[ 'id' ]} + <div style="clear: both"></div> + </div> + <div class="form-row"> + <label>Version:</label> + ${metadata[ 'version' ]} + <div style="clear: both"></div> + </div> + <div class="form-row"> + <label>Version command string:</label> + ${metadata[ 'version_string_cmd' ]} + <div style="clear: both"></div> + </div> + <div class="form-row"> + <label>Command:</label> + ${tool.command} + <div style="clear: both"></div> + </div> + <div class="form-row"> + <label>Interpreter:</label> + ${tool.interpreter} + <div style="clear: both"></div> + </div> + <div class="form-row"> + <label>Is multi-byte:</label> + ${tool.is_multi_byte} + <div style="clear: both"></div> + </div> + <div class="form-row"> + <label>Forces a history refresh:</label> + ${tool.force_history_refresh} + <div style="clear: both"></div> + </div> + <div class="form-row"> + <label>Parallelism:</label> + ${tool.parallelism} + <div style="clear: both"></div> + </div> + <% + if 'requirements' in metadata: + requirements = metadata[ 'requirements' ] + else: + requirements = None + %> + %if requirements: + <% + requirements_str = '' + for requirement_dict in metadata[ 'requirements' ]: + requirements_str += '%s (%s), ' % ( requirement_dict[ 'name' ], requirement_dict[ 'type' ] ) + requirements_str = requirements_str.rstrip( ', ' ) + %> + <div class="form-row"> + <label>Requirements:</label> + ${requirements_str} + <div style="clear: both"></div> + </div> + %endif + <% + if 'tests' in metadata: + tests = metadata[ 'tests' ] + else: + tests = None + %> + %if tests: + <div class="form-row"> + <label>Functional tests:</label></td> + <table class="grid"> + <tr> + <td><b>name</b></td> + <td><b>inputs</b></td> + <td><b>outputs</b></td> + <td><b>required files</b></td> + </tr> + %for test_dict in tests: + <% + inputs = test_dict[ 'inputs' ] + outputs = test_dict[ 'outputs' ] + required_files = test_dict[ 'required_files' ] + %> + <tr> + <td>${test_dict[ 'name' ]}</td> + <td> + %for input in inputs: + <b>${input[0]}:</b> ${input[1]}<br/> + %endfor + </td> + <td> + %for output in outputs: + <b>${output[0]}:</b> ${output[1]}<br/> + %endfor + </td> + <td> + %for required_file in required_files: + ${required_file[0]}<br/> + %endfor + </td> + </tr> + %endfor + </table> + </div> + %endif + </div> + </div> +%endif
--- a/tool_conf.xml.main Thu Jul 21 14:22:23 2011 -0400 +++ b/tool_conf.xml.main Tue Jul 26 13:18:22 2011 -0400 @@ -51,7 +51,7 @@ <tool file="fasta_tools/fasta_to_tabular.xml" /><tool file="filters/gff2bed.xml" /><tool file="maf/maf_to_bed.xml" /> - <tool file="maf/maf_to_interval.xml" /> + <tool file="maf/maf_to_interval.xml" /><tool file="maf/maf_to_fasta.xml" /><tool file="fasta_tools/tabular_to_fasta.xml" /><tool file="fastq/fastq_to_fasta.xml" /> @@ -78,13 +78,13 @@ <tool file="filters/gff/extract_GFF_Features.xml" /><tool file="filters/gff/gff_filter_by_attribute.xml" /><tool file="filters/gff/gff_filter_by_feature_count.xml" /> - <tool file="filters/gff/gtf_filter_by_attribute_values_list.xml" /> + <tool file="filters/gff/gtf_filter_by_attribute_values_list.xml" /></section><section name="Join, Subtract and Group" id="group"><tool file="filters/joiner.xml" /><tool file="filters/compare.xml"/><tool file="new_operations/subtract_query.xml"/> - <tool file="stats/grouping.xml" /> + <tool file="stats/grouping.xml" /><tool file="new_operations/column_join.xml"/></section><section name="Extract Features" id="features"> @@ -112,7 +112,7 @@ <tool file="extract/phastOdds/phastOdds_tool.xml" /></section><section name="Operate on Genomic Intervals" id="bxops"> - <tool file="new_operations/intersect.xml" /> + <tool file="new_operations/intersect.xml" /><tool file="new_operations/subtract.xml" /><tool file="new_operations/merge.xml" /><tool file="new_operations/concat.xml" /> @@ -127,7 +127,7 @@ </section><section name="Statistics" id="stats"><tool file="stats/gsummary.xml" /> - <tool file="filters/uniq.xml" /> + <tool file="filters/uniq.xml" /><tool file="stats/cor.xml" /><tool file="stats/generate_matrix_for_pca_lda.xml" /><tool file="stats/lda_analy.xml" /> @@ -223,13 +223,13 @@ <tool file="emboss_5/emboss_chips.xml" /><tool file="emboss_5/emboss_cirdna.xml" /><tool file="emboss_5/emboss_codcmp.xml" /> - <tool file="emboss_5/emboss_coderet.xml" /> + <tool file="emboss_5/emboss_coderet.xml" /><tool file="emboss_5/emboss_compseq.xml" /> - <tool file="emboss_5/emboss_cpgplot.xml" /> + <tool file="emboss_5/emboss_cpgplot.xml" /><tool file="emboss_5/emboss_cpgreport.xml" /><tool file="emboss_5/emboss_cusp.xml" /><tool file="emboss_5/emboss_cutseq.xml" /> - <tool file="emboss_5/emboss_dan.xml" /> + <tool file="emboss_5/emboss_dan.xml" /><tool file="emboss_5/emboss_degapseq.xml" /><tool file="emboss_5/emboss_descseq.xml" /><tool file="emboss_5/emboss_diffseq.xml" /> @@ -245,7 +245,7 @@ <tool file="emboss_5/emboss_etandem.xml" /><tool file="emboss_5/emboss_extractfeat.xml" /><tool file="emboss_5/emboss_extractseq.xml" /> - <tool file="emboss_5/emboss_freak.xml" /> + <tool file="emboss_5/emboss_freak.xml" /><tool file="emboss_5/emboss_fuzznuc.xml" /><tool file="emboss_5/emboss_fuzzpro.xml" /><tool file="emboss_5/emboss_fuzztran.xml" /> @@ -266,7 +266,7 @@ <tool file="emboss_5/emboss_merger.xml" /><tool file="emboss_5/emboss_msbar.xml" /><tool file="emboss_5/emboss_needle.xml" /> - <tool file="emboss_5/emboss_newcpgreport.xml" /> + <tool file="emboss_5/emboss_newcpgreport.xml" /><tool file="emboss_5/emboss_newcpgseek.xml" /><tool file="emboss_5/emboss_newseq.xml" /><tool file="emboss_5/emboss_noreturn.xml" /> @@ -294,7 +294,7 @@ <tool file="emboss_5/emboss_revseq.xml" /><tool file="emboss_5/emboss_seqmatchall.xml" /><tool file="emboss_5/emboss_seqret.xml" /> - <tool file="emboss_5/emboss_showfeat.xml" /> + <tool file="emboss_5/emboss_showfeat.xml" /><tool file="emboss_5/emboss_shuffleseq.xml" /><tool file="emboss_5/emboss_sigcleave.xml" /><tool file="emboss_5/emboss_sirna.xml" /> @@ -316,7 +316,7 @@ <tool file="emboss_5/emboss_water.xml" /><tool file="emboss_5/emboss_wobble.xml" /><tool file="emboss_5/emboss_wordcount.xml" /> - <tool file="emboss_5/emboss_wordmatch.xml" /> + <tool file="emboss_5/emboss_wordmatch.xml" /></section><label text="NGS Toolbox Beta" id="ngs" /><section name="NGS: QC and manipulation" id="cshl_library_information">
--- a/tool_conf.xml.sample Thu Jul 21 14:22:23 2011 -0400 +++ b/tool_conf.xml.sample Tue Jul 26 13:18:22 2011 -0400 @@ -144,14 +144,14 @@ <tool file="regVariation/t_test_two_samples.xml" /><tool file="regVariation/compute_q_values.xml" /><label text="GFF" id="gff" /> - <tool file="stats/count_gff_features.xml" /> + <tool file="stats/count_gff_features.xml" /></section><!-- Keep this section commented until all of the tools have functional tests <section name="Wavelet Analysis" id="dwt"><tool file="discreteWavelet/execute_dwt_IvC_all.xml" /><tool file="discreteWavelet/execute_dwt_cor_aVa_perClass.xml" /> - <tool file="discreteWavelet/execute_dwt_cor_aVb_all.xml" /> + <tool file="discreteWavelet/execute_dwt_cor_aVb_all.xml" /><tool file="discreteWavelet/execute_dwt_var_perClass.xml" /></section> --> @@ -184,8 +184,8 @@ <tool file="regVariation/compute_motif_frequencies_for_all_motifs.xml" /><tool file="regVariation/categorize_elements_satisfying_criteria.xml" />s <tool file="regVariation/draw_stacked_barplots.xml" /> - <tool file="regVariation/multispecies_MicrosatDataGenerator_interrupted_GALAXY.xml" /> - <tool file="regVariation/microsatellite_birthdeath.xml" /> + <tool file="regVariation/multispecies_MicrosatDataGenerator_interrupted_GALAXY.xml" /> + <tool file="regVariation/microsatellite_birthdeath.xml" /></section><section name="Multiple regression" id="multReg"><tool file="regVariation/linear_regression.xml" /> @@ -241,7 +241,7 @@ </section><section name="NGS: QC and manipulation" id="NGS_QC"><label text="FastQC: fastq/sam/bam" id="fastqcsambam" /> - <tool file="rgenetics/rgFastQC.xml" /> + <tool file="rgenetics/rgFastQC.xml" /><label text="Illumina fastq" id="illumina" /><tool file="fastq/fastq_groomer.xml" /><tool file="fastq/fastq_paired_end_splitter.xml" /> @@ -280,21 +280,21 @@ <tool file="fastx_toolkit/fastx_collapser.xml" /><tool file="fastx_toolkit/fastx_renamer.xml" /><tool file="fastx_toolkit/fastx_reverse_complement.xml" /> - <tool file="fastx_toolkit/fastx_trimmer.xml" /> + <tool file="fastx_toolkit/fastx_trimmer.xml" /></section><section name="NGS: Picard (beta)" id="picard_beta"><label text="QC/Metrics for sam/bam" id="qcsambam"/><tool file="picard/picard_BamIndexStats.xml" /> - <tool file="picard/rgPicardASMetrics.xml" /> - <tool file="picard/rgPicardGCBiasMetrics.xml" /> - <tool file="picard/rgPicardLibComplexity.xml" /> + <tool file="picard/rgPicardASMetrics.xml" /> + <tool file="picard/rgPicardGCBiasMetrics.xml" /> + <tool file="picard/rgPicardLibComplexity.xml" /><tool file="picard/rgPicardInsertSize.xml" /><tool file="picard/rgPicardHsMetrics.xml" /><label text="bam/sam Cleaning" id="picard-clean" /><tool file="picard/picard_AddOrReplaceReadGroups.xml" /><tool file="picard/picard_ReorderSam.xml" /><tool file="picard/picard_ReplaceSamHeader.xml" /> - <tool file="picard/rgPicardFixMate.xml" /> + <tool file="picard/rgPicardFixMate.xml" /><tool file="picard/rgPicardMarkDups.xml" /></section><!--
--- a/tool_list.py Thu Jul 21 14:22:23 2011 -0400 +++ b/tool_list.py Tue Jul 26 13:18:22 2011 -0400 @@ -4,19 +4,19 @@ onoff = 1 tool_list = [] for line in open("tool_conf.xml.sample", "r"): - if line.find("<!--") != -1: + if line.find("<!--") != -1: onoff = 0 - if line.find("file") != -1 and onoff==1: - strs = line.split('\"') + if line.find("file") != -1 and onoff==1: + strs = line.split('\"') tool_list.append(strs[1]) - if line.find("<section") != -1 and onoff==1: + if line.find("<section") != -1 and onoff==1: keys = line.strip().split('\"') n = 0 strtmp = "section::" - while n < len(keys) : - if keys[n].find("id") != -1 : strtmp = strtmp + keys[n+1] - if keys[n].find("name") != -1 : strtmp = strtmp + keys[n+1] + "-" - n = n + 1 + while n < len(keys) : + if keys[n].find("id") != -1 : strtmp = strtmp + keys[n+1] + if keys[n].find("name") != -1 : strtmp = strtmp + keys[n+1] + "-" + n = n + 1 tool_list.append(strtmp.replace(' ', '_')) if line.find("-->") != -1: onoff =1 @@ -26,42 +26,42 @@ id = [] desc = [] tool_infos = [] -for tool in tool_list : - if tool.find("section")!=-1 : +for tool in tool_list : + if tool.find("section")!=-1 : tool_info = dict() tool_info["id"] = tool tool_infos.append(tool_info) - if os.path.exists("tools/"+tool) : - for line in open("tools/"+tool) : - if line.find("<tool ") != -1 and line.find("id") != -1 : - keys = line.strip().split('"') - n = 0 - tool_info = dict() - tool_info["desc"] = '' - while n < len(keys) : - if keys[n].find("id") != -1 : tool_info["id"] = keys[n+1].replace(' ', '_') - if keys[n].find("name") != -1 : tool_info["name"] = keys[n+1] - if keys[n].find("description") != -1 : tool_info["desc"] = keys[n+1] - n = n + 1 - tool_infos.append(tool_info) - break + if os.path.exists("tools/"+tool) : + for line in open("tools/"+tool) : + if line.find("<tool ") != -1 and line.find("id") != -1 : + keys = line.strip().split('"') + n = 0 + tool_info = dict() + tool_info["desc"] = '' + while n < len(keys) : + if keys[n].find("id") != -1 : tool_info["id"] = keys[n+1].replace(' ', '_') + if keys[n].find("name") != -1 : tool_info["name"] = keys[n+1] + if keys[n].find("description") != -1 : tool_info["desc"] = keys[n+1] + n = n + 1 + tool_infos.append(tool_info) + break
flag=0 -if len(sys.argv) == 1 : - for tool_info in tool_infos: - if tool_info["id"].find("section") != -1 : +if len(sys.argv) == 1 : + for tool_info in tool_infos: + if tool_info["id"].find("section") != -1 : print "===========================================================================================================================================" print "%-45s\t%-40s\t%s" % ("id", "name", tool_info["id"]) print "- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -" else : print "%-45s\t%-40s" % (tool_info["id"], tool_info["name"]) -else: - for tool_info in tool_infos: +else: + for tool_info in tool_infos: if tool_info["id"].find("section") != -1 : flag=0 elif flag==1: print " functional.test_toolbox:TestForTool_%s" % tool_info["id"], - if tool_info["id"].replace('section::', '')==sys.argv[1]: + if tool_info["id"].replace('section::', '')==sys.argv[1]: flag=1
#for key in tool_infos.keys():
http://bitbucket.org/galaxy/galaxy-central/changeset/e055ca3efb5c/ changeset: e055ca3efb5c user: dannon date: 2011-07-26 19:56:16 summary: merge affected #: 13 files (41.1 KB)
--- a/lib/galaxy/jobs/__init__.py Tue Jul 26 13:18:22 2011 -0400 +++ b/lib/galaxy/jobs/__init__.py Tue Jul 26 13:56:16 2011 -0400 @@ -646,9 +646,14 @@ tool=self.tool, stdout=stdout, stderr=stderr ) job.command_line = self.command_line
+ bytes = 0 # Once datasets are collected, set the total dataset size (includes extra files) for dataset_assoc in job.output_datasets + job.output_library_datasets: dataset_assoc.dataset.dataset.set_total_size() + bytes += dataset_assoc.dataset.dataset.get_total_size() + + if job.user: + job.user.total_disk_usage += bytes
# fix permissions for path in [ dp.real_path for dp in self.get_output_fnames() ]:
--- a/lib/galaxy/model/__init__.py Tue Jul 26 13:18:22 2011 -0400 +++ b/lib/galaxy/model/__init__.py Tue Jul 26 13:56:16 2011 -0400 @@ -70,6 +70,27 @@ if role not in roles: roles.append( role ) return roles + def get_disk_usage( self, nice_size=False ): + rval = 0 + if self.disk_usage is not None: + rval = self.disk_usage + if nice_size: + rval = galaxy.datatypes.data.nice_size( rval ) + return rval + def set_disk_usage( self, bytes ): + self.disk_usage = bytes + total_disk_usage = property( get_disk_usage, set_disk_usage ) + def calculate_disk_usage( self ): + dataset_ids = [] + total = 0 + # this can be a huge number and can run out of memory, so we avoid the mappers + db_session = object_session( self ) + for history in db_session.query( History ).enable_eagerloads( False ).filter_by( user_id=self.id ).yield_per( 1000 ): + for hda in db_session.query( HistoryDatasetAssociation ).enable_eagerloads( False ).filter_by( history_id=history.id, purged=False ).yield_per( 1000 ): + if not hda.dataset.id in dataset_ids and not hda.dataset.purged and not hda.dataset.library_associations: + dataset_ids.append( hda.dataset.id ) + total += hda.dataset.get_total_size() + return total
class Job( object ): """ @@ -349,7 +370,7 @@ self.galaxy_sessions.append( GalaxySessionToHistoryAssociation( galaxy_session, self ) ) else: self.galaxy_sessions.append( association ) - def add_dataset( self, dataset, parent_id=None, genome_build=None, set_hid = True ): + def add_dataset( self, dataset, parent_id=None, genome_build=None, set_hid=True, quota=True ): if isinstance( dataset, Dataset ): dataset = HistoryDatasetAssociation(dataset=dataset) object_session( self ).add( dataset ) @@ -367,6 +388,8 @@ else: if set_hid: dataset.hid = self._next_hid() + if quota and self.user: + self.user.total_disk_usage += dataset.quota_amount( self.user ) dataset.history = self if genome_build not in [None, '?']: self.genome_build = genome_build @@ -378,6 +401,9 @@ name = self.name if not target_user: target_user = self.user + quota = True + if target_user == self.user: + quota = False new_history = History( name=name, user=target_user ) db_session = object_session( self ) db_session.add( new_history ) @@ -393,8 +419,8 @@ hdas = self.active_datasets for hda in hdas: # Copy HDA. - new_hda = hda.copy( copy_children=True, target_history=new_history ) - new_history.add_dataset( new_hda, set_hid = False ) + new_hda = hda.copy( copy_children=True ) + new_history.add_dataset( new_hda, set_hid = False, quota=quota ) db_session.add( new_hda ) db_session.flush() # Copy annotation. @@ -741,6 +767,10 @@ def set_size( self ): """Returns the size of the data on disk""" return self.dataset.set_size() + def get_total_size( self ): + return self.dataset.get_total_size() + def set_total_size( self ): + return self.dataset.set_total_size() def has_data( self ): """Detects whether there is any data""" return self.dataset.has_data() @@ -922,7 +952,7 @@ self.history = history self.copied_from_history_dataset_association = copied_from_history_dataset_association self.copied_from_library_dataset_dataset_association = copied_from_library_dataset_dataset_association - def copy( self, copy_children = False, parent_id = None, target_history = None ): + def copy( self, copy_children = False, parent_id = None ): hda = HistoryDatasetAssociation( hid=self.hid, name=self.name, info=self.info, @@ -934,8 +964,7 @@ visible=self.visible, deleted=self.deleted, parent_id=parent_id, - copied_from_history_dataset_association=self, - history = target_history ) + copied_from_history_dataset_association=self ) object_session( self ).add( hda ) object_session( self ).flush() hda.set_size() @@ -1017,6 +1046,26 @@ return hda_name def get_access_roles( self, trans ): return self.dataset.get_access_roles( trans ) + def quota_amount( self, user ): + """ + If the user has multiple instances of this dataset, it will not affect their disk usage statistic. + """ + rval = 0 + # Anon users are handled just by their single history size. + if not user: + return rval + # Gets an HDA and its children's disk usage, if the user does not already have an association of the same dataset + if not self.dataset.library_associations and not self.purged and not self.dataset.purged: + for hda in self.dataset.history_associations: + if hda.id == self.id: + continue + if not hda.purged and hda.history and hda.history.user and hda.history.user == user: + break + else: + rval += self.get_total_size() + for child in self.children: + rval += child.get_disk_usage( user ) + return rval
class HistoryDatasetAssociationDisplayAtAuthorization( object ): def __init__( self, hda=None, user=None, site=None ): @@ -1467,6 +1516,13 @@ self.histories.append( GalaxySessionToHistoryAssociation( self, history ) ) else: self.histories.append( association ) + def get_disk_usage( self ): + if self.disk_usage is None: + return 0 + return self.disk_usage + def set_disk_usage( self, bytes ): + self.disk_usage = bytes + total_disk_usage = property( get_disk_usage, set_disk_usage )
class GalaxySessionToHistoryAssociation( object ): def __init__( self, galaxy_session, history ):
--- a/lib/galaxy/web/controllers/dataset.py Tue Jul 26 13:18:22 2011 -0400 +++ b/lib/galaxy/web/controllers/dataset.py Tue Jul 26 13:56:16 2011 -0400 @@ -9,6 +9,7 @@ from galaxy.util import inflector from galaxy.model.item_attrs import * from galaxy.model import LibraryDatasetDatasetAssociation, HistoryDatasetAssociation +from galaxy.web.framework.helpers import to_unicode
import pkg_resources; pkg_resources.require( "Paste" ) @@ -383,6 +384,188 @@ return trans.stream_template_mako( "/dataset/large_file.mako", truncated_data = open( data.file_name ).read(max_peek_size), data = data ) + + @web.expose + def edit(self, trans, dataset_id=None, filename=None, hid=None, **kwd): + """Allows user to modify parameters of an HDA.""" + message = None + status = 'done' + refresh_frames = [] + error = False + def __ok_to_edit_metadata( dataset_id ): + #prevent modifying metadata when dataset is queued or running as input/output + #This code could be more efficient, i.e. by using mappers, but to prevent slowing down loading a History panel, we'll leave the code here for now + for job_to_dataset_association in trans.sa_session.query( self.app.model.JobToInputDatasetAssociation ) \ + .filter_by( dataset_id=dataset_id ) \ + .all() \ + + trans.sa_session.query( self.app.model.JobToOutputDatasetAssociation ) \ + .filter_by( dataset_id=dataset_id ) \ + .all(): + if job_to_dataset_association.job.state not in [ job_to_dataset_association.job.states.OK, job_to_dataset_association.job.states.ERROR, job_to_dataset_association.job.states.DELETED ]: + return False + return True + if hid is not None: + history = trans.get_history() + # TODO: hid handling + data = history.datasets[ int( hid ) - 1 ] + id = None + elif dataset_id is not None: + id = trans.app.security.decode_id( dataset_id ) + data = trans.sa_session.query( self.app.model.HistoryDatasetAssociation ).get( id ) + else: + trans.log_event( "dataset_id and hid are both None, cannot load a dataset to edit" ) + return trans.show_error_message( "You must provide a history dataset id to edit" ) + if data is None: + trans.log_event( "Problem retrieving dataset (encoded: %s, decoded: %s) with history id %s." % ( str( dataset_id ), str( id ), str( hid ) ) ) + return trans.show_error_message( "History dataset id is invalid" ) + if dataset_id is not None and data.history.user is not None and data.history.user != trans.user: + trans.log_event( "User attempted to edit an HDA they do not own (encoded: %s, decoded: %s)" % ( dataset_id, id ) ) + # Do not reveal the dataset's existence + return trans.show_error_message( "History dataset id is invalid" ) + current_user_roles = trans.get_current_user_roles() + if data.history.user and not data.dataset.has_manage_permissions_roles( trans ): + # Permission setting related to DATASET_MANAGE_PERMISSIONS was broken for a period of time, + # so it is possible that some Datasets have no roles associated with the DATASET_MANAGE_PERMISSIONS + # permission. In this case, we'll reset this permission to the hda user's private role. + manage_permissions_action = trans.app.security_agent.get_action( trans.app.security_agent.permitted_actions.DATASET_MANAGE_PERMISSIONS.action ) + permissions = { manage_permissions_action : [ trans.app.security_agent.get_private_user_role( data.history.user ) ] } + trans.app.security_agent.set_dataset_permission( data.dataset, permissions ) + if trans.app.security_agent.can_access_dataset( current_user_roles, data.dataset ): + if data.state == trans.model.Dataset.states.UPLOAD: + return trans.show_error_message( "Please wait until this dataset finishes uploading before attempting to edit its metadata." ) + params = util.Params( kwd, sanitize=False ) + if params.change: + # The user clicked the Save button on the 'Change data type' form + if data.datatype.allow_datatype_change and trans.app.datatypes_registry.get_datatype_by_extension( params.datatype ).allow_datatype_change: + #prevent modifying datatype when dataset is queued or running as input/output + if not __ok_to_edit_metadata( data.id ): + message = "This dataset is currently being used as input or output. You cannot change datatype until the jobs have completed or you have canceled them." + error = True + else: + trans.app.datatypes_registry.change_datatype( data, params.datatype, set_meta = not trans.app.config.set_metadata_externally ) + trans.sa_session.flush() + if trans.app.config.set_metadata_externally: + trans.app.datatypes_registry.set_external_metadata_tool.tool_action.execute( trans.app.datatypes_registry.set_external_metadata_tool, trans, incoming = { 'input1':data }, overwrite = False ) #overwrite is False as per existing behavior + message = "Changed the type of dataset '%s' to %s" % ( to_unicode( data.name ), params.datatype ) + refresh_frames=['history'] + else: + message = "You are unable to change datatypes in this manner. Changing %s to %s is not allowed." % ( data.extension, params.datatype ) + error = True + elif params.save: + # The user clicked the Save button on the 'Edit Attributes' form + data.name = params.name + data.info = params.info + message = '' + if __ok_to_edit_metadata( data.id ): + # The following for loop will save all metadata_spec items + for name, spec in data.datatype.metadata_spec.items(): + if spec.get("readonly"): + continue + optional = params.get("is_"+name, None) + other = params.get("or_"+name, None) + if optional and optional == 'true': + # optional element... == 'true' actually means it is NOT checked (and therefore omitted) + setattr(data.metadata, name, None) + else: + if other: + setattr( data.metadata, name, other ) + else: + setattr( data.metadata, name, spec.unwrap( params.get (name, None) ) ) + data.datatype.after_setting_metadata( data ) + # Sanitize annotation before adding it. + if params.annotation: + annotation = sanitize_html( params.annotation, 'utf-8', 'text/html' ) + self.add_item_annotation( trans.sa_session, trans.get_user(), data, annotation ) + # If setting metadata previously failed and all required elements have now been set, clear the failed state. + if data._state == trans.model.Dataset.states.FAILED_METADATA and not data.missing_meta(): + data._state = None + trans.sa_session.flush() + message = "Attributes updated%s" % message + refresh_frames=['history'] + else: + trans.sa_session.flush() + message = "Attributes updated, but metadata could not be changed because this dataset is currently being used as input or output. You must cancel or wait for these jobs to complete before changing metadata." + status = "warning" + refresh_frames=['history'] + elif params.detect: + # The user clicked the Auto-detect button on the 'Edit Attributes' form + #prevent modifying metadata when dataset is queued or running as input/output + if not __ok_to_edit_metadata( data.id ): + message = "This dataset is currently being used as input or output. You cannot change metadata until the jobs have completed or you have canceled them." + error = True + else: + for name, spec in data.metadata.spec.items(): + # We need to be careful about the attributes we are resetting + if name not in [ 'name', 'info', 'dbkey', 'base_name' ]: + if spec.get( 'default' ): + setattr( data.metadata, name, spec.unwrap( spec.get( 'default' ) ) ) + if trans.app.config.set_metadata_externally: + message = 'Attributes have been queued to be updated' + trans.app.datatypes_registry.set_external_metadata_tool.tool_action.execute( trans.app.datatypes_registry.set_external_metadata_tool, trans, incoming = { 'input1':data } ) + else: + message = 'Attributes updated' + data.set_meta() + data.datatype.after_setting_metadata( data ) + trans.sa_session.flush() + refresh_frames=['history'] + elif params.convert_data: + target_type = kwd.get("target_type", None) + if target_type: + message = data.datatype.convert_dataset(trans, data, target_type) + refresh_frames=['history'] + elif params.update_roles_button: + if not trans.user: + return trans.show_error_message( "You must be logged in if you want to change permissions." ) + if trans.app.security_agent.can_manage_dataset( current_user_roles, data.dataset ): + access_action = trans.app.security_agent.get_action( trans.app.security_agent.permitted_actions.DATASET_ACCESS.action ) + manage_permissions_action = trans.app.security_agent.get_action( trans.app.security_agent.permitted_actions.DATASET_MANAGE_PERMISSIONS.action ) + # The user associated the DATASET_ACCESS permission on the dataset with 1 or more roles. We + # need to ensure that they did not associate roles that would cause accessibility problems. + permissions, in_roles, error, message = \ + trans.app.security_agent.derive_roles_from_access( trans, data.dataset.id, 'root', **kwd ) + if error: + # Keep the original role associations for the DATASET_ACCESS permission on the dataset. + permissions[ access_action ] = data.dataset.get_access_roles( trans ) + status = 'error' + else: + error = trans.app.security_agent.set_all_dataset_permissions( data.dataset, permissions ) + if error: + message += error + status = 'error' + else: + message = 'Your changes completed successfully.' + trans.sa_session.refresh( data.dataset ) + else: + message = "You are not authorized to change this dataset's permissions" + error = True + else: + if "dbkey" in data.datatype.metadata_spec and not data.metadata.dbkey: + # Copy dbkey into metadata, for backwards compatability + # This looks like it does nothing, but getting the dbkey + # returns the metadata dbkey unless it is None, in which + # case it resorts to the old dbkey. Setting the dbkey + # sets it properly in the metadata + #### This is likely no longer required, since the dbkey exists entirely within metadata (the old_dbkey field is gone): REMOVE ME? + data.metadata.dbkey = data.dbkey + # let's not overwrite the imported datatypes module with the variable datatypes? + # the built-in 'id' is overwritten in lots of places as well + ldatatypes = [ dtype_name for dtype_name, dtype_value in trans.app.datatypes_registry.datatypes_by_extension.iteritems() if dtype_value.allow_datatype_change ] + ldatatypes.sort() + all_roles = trans.app.security_agent.get_legitimate_roles( trans, data.dataset, 'root' ) + if error: + status = 'error' + return trans.fill_template( "/dataset/edit_attributes.mako", + data=data, + data_annotation=self.get_item_annotation_str( trans.sa_session, trans.user, data ), + datatypes=ldatatypes, + current_user_roles=current_user_roles, + all_roles=all_roles, + message=message, + status=status, + dataset_id=dataset_id, + refresh_frames=refresh_frames ) + else: + return trans.show_error_message( "You do not have permission to edit this dataset's ( id: %s ) information." % str( dataset_id ) )
@web.expose @web.require_login( "see all available datasets" ) @@ -654,111 +837,190 @@ return trans.fill_template_mako( "dataset/display_application/display.mako", msg = msg, display_app = display_app, display_link = display_link, refresh = refresh ) return trans.show_error_message( 'You do not have permission to view this dataset at an external display application.' )
- def _undelete( self, trans, id ): + def _delete( self, trans, dataset_id ): + message = None + status = 'done' + id = None try: - id = int( id ) - except ValueError, e: - return False - history = trans.get_history() - data = trans.sa_session.query( self.app.model.HistoryDatasetAssociation ).get( id ) - if data and data.undeletable: + id = trans.app.security.decode_id( dataset_id ) + history = trans.get_history() + hda = trans.sa_session.query( self.app.model.HistoryDatasetAssociation ).get( id ) + assert hda, 'Invalid HDA: %s' % id # Walk up parent datasets to find the containing history - topmost_parent = data + topmost_parent = hda + while topmost_parent.parent: + topmost_parent = topmost_parent.parent + assert topmost_parent in trans.history.datasets, "Data does not belong to current history" + # Mark deleted and cleanup + hda.mark_deleted() + hda.clear_associated_files() + trans.log_event( "Dataset id %s marked as deleted" % str(id) ) + if hda.parent_id is None and len( hda.creating_job_associations ) > 0: + # Mark associated job for deletion + job = hda.creating_job_associations[0].job + if job.state in [ self.app.model.Job.states.QUEUED, self.app.model.Job.states.RUNNING, self.app.model.Job.states.NEW ]: + # Are *all* of the job's other output datasets deleted? + if job.check_if_output_datasets_deleted(): + job.mark_deleted( self.app.config.get_bool( 'enable_job_running', True ), + self.app.config.get_bool( 'track_jobs_in_database', False ) ) + self.app.job_manager.job_stop_queue.put( job.id ) + trans.sa_session.flush() + except Exception, e: + msg = 'HDA deletion failed (encoded: %s, decoded: %s)' % ( dataset_id, id ) + log.exception( msg ) + trans.log_event( msg ) + message = 'Dataset deletion failed' + status = 'error' + return ( message, status ) + + def _undelete( self, trans, dataset_id ): + message = None + status = 'done' + id = None + try: + id = trans.app.security.decode_id( dataset_id ) + history = trans.get_history() + hda = trans.sa_session.query( self.app.model.HistoryDatasetAssociation ).get( id ) + assert hda and hda.undeletable, 'Invalid HDA: %s' % id + # Walk up parent datasets to find the containing history + topmost_parent = hda while topmost_parent.parent: topmost_parent = topmost_parent.parent assert topmost_parent in history.datasets, "Data does not belong to current history" # Mark undeleted - data.mark_undeleted() + hda.mark_undeleted() trans.sa_session.flush() trans.log_event( "Dataset id %s has been undeleted" % str(id) ) - return True - return False + except Exception, e: + msg = 'HDA undeletion failed (encoded: %s, decoded: %s)' % ( dataset_id, id ) + log.exception( msg ) + trans.log_event( msg ) + message = 'Dataset undeletion failed' + status = 'error' + return ( message, status )
- def _unhide( self, trans, id ): + def _unhide( self, trans, dataset_id ): try: - id = int( id ) - except ValueError, e: + id = trans.app.security.decode_id( dataset_id ) + except: return False history = trans.get_history() - data = trans.sa_session.query( self.app.model.HistoryDatasetAssociation ).get( id ) - if data: + hda = trans.sa_session.query( self.app.model.HistoryDatasetAssociation ).get( id ) + if hda: # Walk up parent datasets to find the containing history - topmost_parent = data + topmost_parent = hda while topmost_parent.parent: topmost_parent = topmost_parent.parent assert topmost_parent in history.datasets, "Data does not belong to current history" # Mark undeleted - data.mark_unhidden() + hda.mark_unhidden() trans.sa_session.flush() trans.log_event( "Dataset id %s has been unhidden" % str(id) ) return True return False
- def _purge( self, trans, id ): + def _purge( self, trans, dataset_id ): + message = None + status = 'done' try: - id = int( id ) - except ValueError, e: - return False - hda = trans.sa_session.query( self.app.model.HistoryDatasetAssociation ).get( id ) - # Invalid HDA or not deleted - if not hda or not hda.history or not hda.deleted: - return False - # If the user is anonymous, make sure the HDA is owned by the current session. - if not hda.history.user and trans.galaxy_session.id not in [ s.id for s in hda.history.galaxy_sessions ]: - return False - # If the user is known, make sure the HDA is owned by the current user. - if hda.history.user and hda.history.user != trans.user: - return False - # HDA is purgeable - hda.purged = True - trans.sa_session.add( hda ) - trans.log_event( "HDA id %s has been purged" % hda.id ) - # Don't delete anything if there are active HDAs or any LDDAs, even if - # the LDDAs are deleted. Let the cleanup scripts get it in the latter - # case. - if hda.dataset.user_can_purge: - try: - hda.dataset.full_delete() - trans.log_event( "Dataset id %s has been purged upon the the purge of HDA id %s" % ( hda.dataset.id, hda.id ) ) - trans.sa_session.add( hda.dataset ) - except: - log.exception( 'Unable to purge dataset (%s) on purge of hda (%s):' % ( hda.dataset.id, hda.id ) ) - trans.sa_session.flush() - return True + id = trans.app.security.decode_id( dataset_id ) + history = trans.get_history() + user = trans.get_user() + hda = trans.sa_session.query( self.app.model.HistoryDatasetAssociation ).get( id ) + # Invalid HDA + assert hda, 'Invalid history dataset ID' + # Walk up parent datasets to find the containing history + topmost_parent = hda + while topmost_parent.parent: + topmost_parent = topmost_parent.parent + assert topmost_parent in history.datasets, "Data does not belong to current history" + # If the user is anonymous, make sure the HDA is owned by the current session. + if not user: + assert trans.galaxy_session.id in [ s.id for s in hda.history.galaxy_sessions ], 'Invalid history dataset ID' + # If the user is known, make sure the HDA is owned by the current user. + else: + assert topmost_parent.history.user == trans.user, 'Invalid history dataset ID' + # HDA is not deleted + assert hda.deleted, 'History dataset is not marked as deleted' + # HDA is purgeable + # Decrease disk usage first + if user: + user.total_disk_usage -= hda.quota_amount( user ) + # Mark purged + hda.purged = True + trans.sa_session.add( hda ) + trans.log_event( "HDA id %s has been purged" % hda.id ) + # Don't delete anything if there are active HDAs or any LDDAs, even if + # the LDDAs are deleted. Let the cleanup scripts get it in the latter + # case. + if hda.dataset.user_can_purge: + try: + hda.dataset.full_delete() + trans.log_event( "Dataset id %s has been purged upon the the purge of HDA id %s" % ( hda.dataset.id, hda.id ) ) + trans.sa_session.add( hda.dataset ) + except: + log.exception( 'Unable to purge dataset (%s) on purge of HDA (%s):' % ( hda.dataset.id, hda.id ) ) + trans.sa_session.flush() + except Exception, e: + msg = 'HDA purge failed (encoded: %s, decoded: %s)' % ( dataset_id, id ) + log.exception( msg ) + trans.log_event( msg ) + message = 'Dataset removal from disk failed' + status = 'error' + return ( message, status )
@web.expose - def undelete( self, trans, id ): - if self._undelete( trans, id ): - return trans.response.send_redirect( web.url_for( controller='root', action='history', show_deleted = True ) ) - raise Exception( "Error undeleting" ) + def delete( self, trans, dataset_id, filename, show_deleted_on_refresh = False ): + message, status = self._delete( trans, dataset_id ) + return trans.response.send_redirect( web.url_for( controller='root', action='history', show_deleted=show_deleted_on_refresh, message=message, status=status ) )
@web.expose - def unhide( self, trans, id ): - if self._unhide( trans, id ): + def delete_async( self, trans, dataset_id, filename ): + message, status = self._delete( trans, dataset_id ) + if status == 'done': + return "OK" + else: + raise Exception( message ) + + @web.expose + def undelete( self, trans, dataset_id, filename ): + message, status = self._undelete( trans, dataset_id ) + return trans.response.send_redirect( web.url_for( controller='root', action='history', show_deleted = True, message=message, status=status ) ) + + @web.expose + def undelete_async( self, trans, dataset_id, filename ): + message, status =self._undelete( trans, dataset_id ) + if status == 'done': + return "OK" + else: + raise Exception( message ) + + @web.expose + def unhide( self, trans, dataset_id, filename ): + if self._unhide( trans, dataset_id ): return trans.response.send_redirect( web.url_for( controller='root', action='history', show_hidden = True ) ) raise Exception( "Error unhiding" )
@web.expose - def undelete_async( self, trans, id ): - if self._undelete( trans, id ): - return "OK" - raise Exception( "Error undeleting" ) - - @web.expose - def purge( self, trans, id ): - if not trans.app.config.allow_user_dataset_purge: - raise Exception( "Removal of datasets by users is not allowed in this Galaxy instance. Please contact your Galaxy administrator." ) - if self._purge( trans, id ): - return trans.response.send_redirect( web.url_for( controller='root', action='history', show_deleted = True ) ) - raise Exception( "Error removing disk file" ) + def purge( self, trans, dataset_id, filename, show_deleted_on_refresh = False ): + if trans.app.config.allow_user_dataset_purge: + message, status = self._purge( trans, dataset_id ) + else: + message = "Removal of datasets by users is not allowed in this Galaxy instance. Please contact your Galaxy administrator." + status = 'error' + return trans.response.send_redirect( web.url_for( controller='root', action='history', show_deleted=show_deleted_on_refresh, message=message, status=status ) )
@web.expose - def purge_async( self, trans, id ): - if not trans.app.config.allow_user_dataset_purge: - raise Exception( "Removal of datasets by users is not allowed in this Galaxy instance. Please contact your Galaxy administrator." ) - if self._purge( trans, id ): + def purge_async( self, trans, dataset_id, filename ): + if trans.app.config.allow_user_dataset_purge: + message, status = self._purge( trans, dataset_id ) + else: + message = "Removal of datasets by users is not allowed in this Galaxy instance. Please contact your Galaxy administrator." + status = 'error' + if status == 'done': return "OK" - raise Exception( "Error removing disk file" ) + else: + raise Exception( message )
@web.expose def show_params( self, trans, dataset_id=None, from_noframe=None, **kwd ):
--- a/lib/galaxy/web/controllers/root.py Tue Jul 26 13:18:22 2011 -0400 +++ b/lib/galaxy/web/controllers/root.py Tue Jul 26 13:56:16 2011 -0400 @@ -8,7 +8,6 @@ from galaxy.util.sanitize_html import sanitize_html from galaxy.model.orm import * from galaxy.model.item_attrs import UsesAnnotations -from galaxy.web.framework.helpers import to_unicode
log = logging.getLogger( __name__ )
@@ -99,11 +98,14 @@ return trans.fill_template_mako( "/my_data.mako" )
@web.expose - def history( self, trans, as_xml=False, show_deleted=False, show_hidden=False, hda_id=None ): + def history( self, trans, as_xml=False, show_deleted=False, show_hidden=False, hda_id=None, **kwd ): """ Display the current history, creating a new history if necessary. NOTE: No longer accepts "id" or "template" options for security reasons. """ + params = util.Params( kwd ) + message = params.get( 'message', None ) + status = params.get( 'status', 'done' ) if trans.app.config.require_login and not trans.user: return trans.fill_template( '/no_access.mako', message = 'Please log in to access Galaxy histories.' ) history = trans.get_history( create=True ) @@ -123,7 +125,9 @@ datasets = datasets, hda_id = hda_id, show_deleted = show_deleted, - show_hidden=show_hidden ) + show_hidden=show_hidden, + message=message, + status=status )
@web.expose def dataset_state ( self, trans, id=None, stamp=None ): @@ -160,9 +164,13 @@ # Create new HTML for any that have changed rval = {} if ids is not None and states is not None: - ids = map( int, ids.split( "," ) ) + ids = ids.split( "," ) states = states.split( "," ) - for id, state in zip( ids, states ): + for encoded_id, state in zip( ids, states ): + try: + id = int( trans.app.security.decode_id( encoded_id ) ) + except: + id = int( encoded_id ) data = trans.sa_session.query( self.app.model.HistoryDatasetAssociation ).get( id ) if data.state != state: job_hda = data @@ -175,7 +183,7 @@ force_history_refresh = tool.force_history_refresh if not job_hda.visible: force_history_refresh = True - rval[id] = { + rval[encoded_id] = { "state": data.state, "html": unicode( trans.fill_template( "root/history_item.mako", data=data, hid=data.hid ), 'utf-8' ), "force_history_refresh": force_history_refresh @@ -288,237 +296,6 @@ else: yield "No data with id=%d" % id
- @web.expose - def edit(self, trans, id=None, hid=None, **kwd): - """Allows user to modify parameters of an HDA.""" - message = '' - error = False - def __ok_to_edit_metadata( dataset_id ): - #prevent modifying metadata when dataset is queued or running as input/output - #This code could be more efficient, i.e. by using mappers, but to prevent slowing down loading a History panel, we'll leave the code here for now - for job_to_dataset_association in trans.sa_session.query( self.app.model.JobToInputDatasetAssociation ) \ - .filter_by( dataset_id=dataset_id ) \ - .all() \ - + trans.sa_session.query( self.app.model.JobToOutputDatasetAssociation ) \ - .filter_by( dataset_id=dataset_id ) \ - .all(): - if job_to_dataset_association.job.state not in [ job_to_dataset_association.job.states.OK, job_to_dataset_association.job.states.ERROR, job_to_dataset_association.job.states.DELETED ]: - return False - return True - if hid is not None: - history = trans.get_history() - # TODO: hid handling - data = history.datasets[ int( hid ) - 1 ] - elif id is not None: - data = trans.sa_session.query( self.app.model.HistoryDatasetAssociation ).get( id ) - else: - trans.log_event( "Problem loading dataset id %s with history id %s." % ( str( id ), str( hid ) ) ) - return trans.show_error_message( "Problem loading dataset." ) - if data is None: - trans.log_event( "Problem retrieving dataset id %s with history id." % ( str( id ), str( hid ) ) ) - return trans.show_error_message( "Problem retrieving dataset." ) - if id is not None and data.history.user is not None and data.history.user != trans.user: - return trans.show_error_message( "This instance of a dataset (%s) in a history does not belong to you." % ( data.id ) ) - current_user_roles = trans.get_current_user_roles() - if data.history.user and not data.dataset.has_manage_permissions_roles( trans ): - # Permission setting related to DATASET_MANAGE_PERMISSIONS was broken for a period of time, - # so it is possible that some Datasets have no roles associated with the DATASET_MANAGE_PERMISSIONS - # permission. In this case, we'll reset this permission to the hda user's private role. - manage_permissions_action = trans.app.security_agent.get_action( trans.app.security_agent.permitted_actions.DATASET_MANAGE_PERMISSIONS.action ) - permissions = { manage_permissions_action : [ trans.app.security_agent.get_private_user_role( data.history.user ) ] } - trans.app.security_agent.set_dataset_permission( data.dataset, permissions ) - if trans.app.security_agent.can_access_dataset( current_user_roles, data.dataset ): - if data.state == trans.model.Dataset.states.UPLOAD: - return trans.show_error_message( "Please wait until this dataset finishes uploading before attempting to edit its metadata." ) - params = util.Params( kwd, sanitize=False ) - if params.change: - # The user clicked the Save button on the 'Change data type' form - if data.datatype.allow_datatype_change and trans.app.datatypes_registry.get_datatype_by_extension( params.datatype ).allow_datatype_change: - #prevent modifying datatype when dataset is queued or running as input/output - if not __ok_to_edit_metadata( data.id ): - return trans.show_error_message( "This dataset is currently being used as input or output. You cannot change datatype until the jobs have completed or you have canceled them." ) - trans.app.datatypes_registry.change_datatype( data, params.datatype, set_meta = not trans.app.config.set_metadata_externally ) - trans.sa_session.flush() - if trans.app.config.set_metadata_externally: - trans.app.datatypes_registry.set_external_metadata_tool.tool_action.execute( trans.app.datatypes_registry.set_external_metadata_tool, trans, incoming = { 'input1':data }, overwrite = False ) #overwrite is False as per existing behavior - return trans.show_ok_message( "Changed the type of dataset '%s' to %s" % ( to_unicode( data.name ), params.datatype ), refresh_frames=['history'] ) - else: - return trans.show_error_message( "You are unable to change datatypes in this manner. Changing %s to %s is not allowed." % ( data.extension, params.datatype ) ) - elif params.save: - # The user clicked the Save button on the 'Edit Attributes' form - data.name = params.name - data.info = params.info - message = '' - if __ok_to_edit_metadata( data.id ): - # The following for loop will save all metadata_spec items - for name, spec in data.datatype.metadata_spec.items(): - if spec.get("readonly"): - continue - optional = params.get("is_"+name, None) - other = params.get("or_"+name, None) - if optional and optional == 'true': - # optional element... == 'true' actually means it is NOT checked (and therefore omitted) - setattr(data.metadata, name, None) - else: - if other: - setattr( data.metadata, name, other ) - else: - setattr( data.metadata, name, spec.unwrap( params.get (name, None) ) ) - data.datatype.after_setting_metadata( data ) - # Sanitize annotation before adding it. - if params.annotation: - annotation = sanitize_html( params.annotation, 'utf-8', 'text/html' ) - self.add_item_annotation( trans.sa_session, trans.get_user(), data, annotation ) - # If setting metadata previously failed and all required elements have now been set, clear the failed state. - if data._state == trans.model.Dataset.states.FAILED_METADATA and not data.missing_meta(): - data._state = None - trans.sa_session.flush() - return trans.show_ok_message( "Attributes updated%s" % message, refresh_frames=['history'] ) - else: - trans.sa_session.flush() - return trans.show_warn_message( "Attributes updated, but metadata could not be changed because this dataset is currently being used as input or output. You must cancel or wait for these jobs to complete before changing metadata.", refresh_frames=['history'] ) - elif params.detect: - # The user clicked the Auto-detect button on the 'Edit Attributes' form - #prevent modifying metadata when dataset is queued or running as input/output - if not __ok_to_edit_metadata( data.id ): - return trans.show_error_message( "This dataset is currently being used as input or output. You cannot change metadata until the jobs have completed or you have canceled them." ) - for name, spec in data.metadata.spec.items(): - # We need to be careful about the attributes we are resetting - if name not in [ 'name', 'info', 'dbkey', 'base_name' ]: - if spec.get( 'default' ): - setattr( data.metadata, name, spec.unwrap( spec.get( 'default' ) ) ) - if trans.app.config.set_metadata_externally: - message = 'Attributes have been queued to be updated' - trans.app.datatypes_registry.set_external_metadata_tool.tool_action.execute( trans.app.datatypes_registry.set_external_metadata_tool, trans, incoming = { 'input1':data } ) - else: - message = 'Attributes updated' - data.set_meta() - data.datatype.after_setting_metadata( data ) - trans.sa_session.flush() - return trans.show_ok_message( message, refresh_frames=['history'] ) - elif params.convert_data: - target_type = kwd.get("target_type", None) - if target_type: - message = data.datatype.convert_dataset(trans, data, target_type) - return trans.show_ok_message( message, refresh_frames=['history'] ) - elif params.update_roles_button: - if not trans.user: - return trans.show_error_message( "You must be logged in if you want to change permissions." ) - if trans.app.security_agent.can_manage_dataset( current_user_roles, data.dataset ): - access_action = trans.app.security_agent.get_action( trans.app.security_agent.permitted_actions.DATASET_ACCESS.action ) - manage_permissions_action = trans.app.security_agent.get_action( trans.app.security_agent.permitted_actions.DATASET_MANAGE_PERMISSIONS.action ) - # The user associated the DATASET_ACCESS permission on the dataset with 1 or more roles. We - # need to ensure that they did not associate roles that would cause accessibility problems. - permissions, in_roles, error, message = \ - trans.app.security_agent.derive_roles_from_access( trans, data.dataset.id, 'root', **kwd ) - if error: - # Keep the original role associations for the DATASET_ACCESS permission on the dataset. - permissions[ access_action ] = data.dataset.get_access_roles( trans ) - status = 'error' - else: - error = trans.app.security_agent.set_all_dataset_permissions( data.dataset, permissions ) - if error: - message += error - status = 'error' - else: - message = 'Your changes completed successfully.' - trans.sa_session.refresh( data.dataset ) - else: - return trans.show_error_message( "You are not authorized to change this dataset's permissions" ) - if "dbkey" in data.datatype.metadata_spec and not data.metadata.dbkey: - # Copy dbkey into metadata, for backwards compatability - # This looks like it does nothing, but getting the dbkey - # returns the metadata dbkey unless it is None, in which - # case it resorts to the old dbkey. Setting the dbkey - # sets it properly in the metadata - #### This is likely no longer required, since the dbkey exists entirely within metadata (the old_dbkey field is gone): REMOVE ME? - data.metadata.dbkey = data.dbkey - # let's not overwrite the imported datatypes module with the variable datatypes? - # the built-in 'id' is overwritten in lots of places as well - ldatatypes = [ dtype_name for dtype_name, dtype_value in trans.app.datatypes_registry.datatypes_by_extension.iteritems() if dtype_value.allow_datatype_change ] - ldatatypes.sort() - all_roles = trans.app.security_agent.get_legitimate_roles( trans, data.dataset, 'root' ) - if error: - status = 'error' - else: - status = 'done' - return trans.fill_template( "/dataset/edit_attributes.mako", - data=data, - data_annotation=self.get_item_annotation_str( trans.sa_session, trans.user, data ), - datatypes=ldatatypes, - current_user_roles=current_user_roles, - all_roles=all_roles, - message=message, - status=status ) - else: - return trans.show_error_message( "You do not have permission to edit this dataset's ( id: %s ) information." % str( id ) ) - - def __delete_dataset( self, trans, id ): - data = trans.sa_session.query( self.app.model.HistoryDatasetAssociation ).get( id ) - if data: - # Walk up parent datasets to find the containing history - topmost_parent = data - while topmost_parent.parent: - topmost_parent = topmost_parent.parent - assert topmost_parent in trans.history.datasets, "Data does not belong to current history" - # Mark deleted and cleanup - data.mark_deleted() - data.clear_associated_files() - trans.log_event( "Dataset id %s marked as deleted" % str(id) ) - if data.parent_id is None and len( data.creating_job_associations ) > 0: - # Mark associated job for deletion - job = data.creating_job_associations[0].job - if job.state in [ self.app.model.Job.states.QUEUED, self.app.model.Job.states.RUNNING, self.app.model.Job.states.NEW ]: - # Are *all* of the job's other output datasets deleted? - if job.check_if_output_datasets_deleted(): - job.mark_deleted( self.app.config.get_bool( 'enable_job_running', True ), - self.app.config.get_bool( 'track_jobs_in_database', False ) ) - self.app.job_manager.job_stop_queue.put( job.id ) - trans.sa_session.flush() - - @web.expose - def delete( self, trans, id = None, show_deleted_on_refresh = False, **kwd): - if id: - if isinstance( id, list ): - dataset_ids = id - else: - dataset_ids = [ id ] - history = trans.get_history() - for id in dataset_ids: - try: - id = int( id ) - except: - continue - self.__delete_dataset( trans, id ) - return self.history( trans, show_deleted = show_deleted_on_refresh ) - - @web.expose - def delete_async( self, trans, id = None, **kwd): - if id: - try: - id = int( id ) - except: - return "Dataset id '%s' is invalid" %str( id ) - self.__delete_dataset( trans, id ) - return "OK" - - @web.expose - def purge( self, trans, id = None, show_deleted_on_refresh = False, **kwd ): - if not trans.app.config.allow_user_dataset_purge: - return trans.show_error_message( "Removal of datasets by users is not allowed in this Galaxy instance. Please contact your Galaxy administrator." ) - hda = trans.sa_session.query( self.app.model.HistoryDatasetAssociation ).get( int( id ) ) - if bool( hda.dataset.active_history_associations or hda.dataset.library_associations ): - return trans.show_error_message( "Unable to purge: LDDA(s) or active HDA(s) exist" ) - elif hda.dataset.purged: - return trans.show_error_message( "Unable to purge: dataset is already purged" ) - os.unlink( hda.dataset.file_name ) - if os.path.exists( hda.extra_files_path ): - shutil.rmtree( hda.extra_files_path ) - hda.dataset.purged = True - trans.sa_session.add( hda.dataset ) - trans.sa_session.flush() - return self.history( trans, show_deleted = show_deleted_on_refresh ) - ## ---- History management -----------------------------------------------
@web.expose
--- a/lib/galaxy/web/framework/__init__.py Tue Jul 26 13:18:22 2011 -0400 +++ b/lib/galaxy/web/framework/__init__.py Tue Jul 26 13:56:16 2011 -0400 @@ -471,6 +471,7 @@ - associate new session with user - if old session had a history and it was not associated with a user, associate it with the new session, otherwise associate the current session's history with the user + - add the disk usage of the current session to the user's total disk usage """ # Set the previous session prev_galaxy_session = self.galaxy_session @@ -494,6 +495,10 @@ # If the previous galaxy session had a history, associate it with the new # session, but only if it didn't belong to a different user. history = prev_galaxy_session.current_history + if prev_galaxy_session.user is None: + # Increase the user's disk usage by the amount of the previous history's datasets if they didn't already own it. + for hda in history.datasets: + user.total_disk_usage += hda.quota_amount( user ) elif self.galaxy_session.current_history: history = self.galaxy_session.current_history if not history and \
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/scripts/set_user_disk_usage.py Tue Jul 26 13:56:16 2011 -0400 @@ -0,0 +1,76 @@ +#!/usr/bin/env python + +import os, sys +from ConfigParser import ConfigParser +from optparse import OptionParser + +parser = OptionParser() +parser.add_option( '-c', '--config', dest='config', help='Path to Galaxy config file (universe_wsgi.ini)', default='universe_wsgi.ini' ) +parser.add_option( '-u', '--username', dest='username', help='Username of user to update', default='all' ) +parser.add_option( '-e', '--email', dest='email', help='Email address of user to update', default='all' ) +parser.add_option( '--dry-run', dest='dryrun', help='Dry run (show changes but do not save to database)', action='store_true', default=False ) +( options, args ) = parser.parse_args() + +def init(): + + options.config = os.path.abspath( options.config ) + if options.username == 'all': + options.username = None + if options.email == 'all': + options.email = None + + os.chdir( os.path.dirname( options.config ) ) + sys.path.append( 'lib' ) + + from galaxy import eggs + import pkg_resources + + config = ConfigParser( dict( file_path = 'database/files', + database_connection = 'sqlite:///database/universe.sqlite?isolation_level=IMMEDIATE' ) ) + config.read( os.path.basename( options.config ) ) + + from galaxy.model import mapping + + return mapping.init( config.get( 'app:main', 'file_path' ), config.get( 'app:main', 'database_connection' ), create_tables = False ) + +def quotacheck( sa_session, users ): + sa_session.refresh( user ) + current = user.get_disk_usage() + print user.username, '<' + user.email + '> current usage:', str( current ) + ',', + new = user.calculate_disk_usage() + sa_session.refresh( user ) + # usage changed while calculating, do it again + if user.get_disk_usage() != current: + print 'usage changed while calculating, trying again...' + return quotacheck( sa_session, user ) + # yes, still a small race condition between here and the flush + if new == current: + print 'no change' + else: + print 'new:', new + if not options.dryrun: + user.set_disk_usage( new ) + sa_session.add( user ) + sa_session.flush() + +if __name__ == '__main__': + print 'Loading Galaxy model...' + model = init() + sa_session = model.context.current + + if not options.username and not options.email: + user_count = sa_session.query( model.User ).count() + print 'Processing %i users...' % user_count + for i, user in enumerate( sa_session.query( model.User ).enable_eagerloads( False ).yield_per( 1000 ) ): + print '%3i%%' % int( float(i) / user_count * 100 ), + quotacheck( sa_session, user ) + print '100% complete' + sys.exit( 0 ) + elif options.username: + user = sa_session.query( model.User ).enable_eagerloads( False ).filter_by( username=options.username ).first() + elif options.email: + user = sa_session.query( model.User ).enable_eagerloads( False ).filter_by( email=options.email ).first() + if not user: + print 'User not found' + sys.exit( 1 ) + quotacheck( sa_session, user )
--- a/templates/dataset/edit_attributes.mako Tue Jul 26 13:18:22 2011 -0400 +++ b/templates/dataset/edit_attributes.mako Tue Jul 26 13:56:16 2011 -0400 @@ -1,5 +1,6 @@ <%inherit file="/base.mako"/><%namespace file="/message.mako" import="render_msg" /> +<%namespace file="/message.mako" name="message_ns" import="javascripts" /><%def name="title()">${_('Edit Dataset Attributes')}</%def>
@@ -10,6 +11,7 @@ <%def name="javascripts()"> ${parent.javascripts()} ${h.js( "galaxy.base", "jquery.autocomplete", "autocomplete_tagging" )} + ${message_ns.javascripts()} </%def><%def name="datatype( dataset, datatypes )"> @@ -31,8 +33,7 @@ <div class="toolForm"><div class="toolFormTitle">${_('Edit Attributes')}</div><div class="toolFormBody"> - <form name="edit_attributes" action="${h.url_for( controller='root', action='edit' )}" method="post"> - <input type="hidden" name="id" value="${data.id}"/> + <form name="edit_attributes" action="${h.url_for( controller='dataset', action='edit', dataset_id=dataset_id )}" method="post"><div class="form-row"><label> Name: @@ -80,8 +81,7 @@ <input type="submit" name="save" value="${_('Save')}"/></div></form> - <form name="auto_detect" action="${h.url_for( controller='root', action='edit' )}" method="post"> - <input type="hidden" name="id" value="${data.id}"/> + <form name="auto_detect" action="${h.url_for( controller='dataset', action='edit', dataset_id=dataset_id )}" method="post"><div class="form-row"><div style="float: left; width: 250px; margin-right: 10px;"><input type="submit" name="detect" value="${_('Auto-detect')}"/> @@ -104,8 +104,7 @@ <div class="toolForm"><div class="toolFormTitle">${_('Convert to new format')}</div><div class="toolFormBody"> - <form name="convert_data" action="${h.url_for( controller='root', action='edit' )}" method="post"> - <input type="hidden" name="id" value="${data.id}"/> + <form name="convert_data" action="${h.url_for( controller='dataset', action='edit', dataset_id=dataset_id )}" method="post"><div class="form-row"><div style="float: left; width: 250px; margin-right: 10px;"><select name="target_type"> @@ -132,8 +131,7 @@ <div class="toolFormTitle">${_('Change data type')}</div><div class="toolFormBody"> %if data.datatype.allow_datatype_change: - <form name="change_datatype" action="${h.url_for( controller='root', action='edit' )}" method="post"> - <input type="hidden" name="id" value="${data.id}"/> + <form name="change_datatype" action="${h.url_for( controller='dataset', action='edit', dataset_id=dataset_id )}" method="post"><div class="form-row"><label> ${_('New Type')}: @@ -161,7 +159,7 @@
%if trans.app.security_agent.can_manage_dataset( current_user_roles, data.dataset ): <%namespace file="/dataset/security_common.mako" import="render_permission_form" /> - ${render_permission_form( data.dataset, data.get_display_name(), h.url_for( controller='root', action='edit', id=data.id ), all_roles )} + ${render_permission_form( data.dataset, data.get_display_name(), h.url_for( controller='dataset', action='edit', dataset_id=dataset_id ), all_roles )} %elif trans.user: <div class="toolForm"><div class="toolFormTitle">View Permissions</div>
--- a/templates/root/history.mako Tue Jul 26 13:18:22 2011 -0400 +++ b/templates/root/history.mako Tue Jul 26 13:56:16 2011 -0400 @@ -1,3 +1,5 @@ +<%namespace file="/message.mako" import="render_msg" /> + <% _=n_ %><!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
@@ -105,6 +107,11 @@ }); };
+// Update the message for async operations +function render_message(message, status) { + $("div#message-container").html( "<div class="" + status + "message">" + message + "</div><br/>" ); +} + $(function() { var historywrapper = $("div.historyItemWrapper"); init_history_items(historywrapper); @@ -115,8 +122,8 @@ $(this).click( function() { $( '#historyItem-' + data_id + "> div.historyItemTitleBar" ).addClass( "spinner" ); $.ajax({ - url: "${h.url_for( action='delete_async', id='XXX' )}".replace( 'XXX', data_id ), - error: function() { alert( "Delete failed" ); }, + url: "${h.url_for( controller='dataset', action='delete_async', dataset_id='XXX' )}".replace( 'XXX', data_id ), + error: function() { render_message( "Dataset deletion failed", "error" ); }, success: function(msg) { if (msg === "OK") { %if show_deleted: @@ -133,7 +140,7 @@ %endif $(".tipsy").remove(); } else { - alert( "Delete failed" ); + render_message( "Dataset deletion failed", "error" ); } } }); @@ -147,8 +154,8 @@ $(this).click( function() { $( '#historyItem-' + data_id + " > div.historyItemTitleBar" ).addClass( "spinner" ); $.ajax({ - url: "${h.url_for( controller='dataset', action='undelete_async', id='XXX' )}".replace( 'XXX', data_id ), - error: function() { alert( "Undelete failed" ) }, + url: "${h.url_for( controller='dataset', action='undelete_async', dataset_id='XXX' )}".replace( 'XXX', data_id ), + error: function() { render_message( "Dataset undeletion failed", "error" ); }, success: function() { var to_update = {}; to_update[data_id] = "none"; @@ -165,8 +172,8 @@ $(this).click( function() { $( '#historyItem-' + data_id + " > div.historyItemTitleBar" ).addClass( "spinner" ); $.ajax({ - url: "${h.url_for( controller='dataset', action='purge_async', id='XXX' )}".replace( 'XXX', data_id ), - error: function() { alert( "Removal from disk failed" ) }, + url: "${h.url_for( controller='dataset', action='purge_async', dataset_id='XXX' )}".replace( 'XXX', data_id ), + error: function() { render_message( "Dataset removal from disk failed", "error" ) }, success: function() { var to_update = {}; to_update[data_id] = "none"; @@ -258,7 +265,7 @@
// Updater updater( - ${ h.to_json_string( dict([(data.id, data.state) for data in reversed( datasets ) if data.visible and data.state not in TERMINAL_STATES]) ) } + ${ h.to_json_string( dict([(trans.app.security.encode_id(data.id), data.state) for data in reversed( datasets ) if data.visible and data.state not in TERMINAL_STATES]) ) } );
// Navigate to a dataset. @@ -311,11 +318,11 @@ if ( val.force_history_refresh ){ force_history_refresh = true; } - delete tracked_datasets[ parseInt(id) ]; + delete tracked_datasets[id]; // When a dataset becomes terminal, check for changes in history disk size check_history_size = true; } else { - tracked_datasets[ parseInt(id) ] = val.state; + tracked_datasets[id] = val.state; } }); if ( force_history_refresh ) { @@ -458,6 +465,12 @@ </div> %endif
+<div id="message-container"> + %if message: + ${render_msg( message, status )} + %endif +</div> + %if not datasets:
<div class="infomessagesmall" id="emptyHistoryMessage"> @@ -467,7 +480,7 @@ ## Render requested datasets, ordered from newest to oldest %for data in reversed( datasets ): %if data.visible or show_hidden: - <div class="historyItemContainer" id="historyItemContainer-${data.id}"> + <div class="historyItemContainer" id="historyItemContainer-${trans.app.security.encode_id(data.id)}"> ${render_dataset( data, data.hid, show_deleted_on_refresh = show_deleted, for_editing = True )} </div> %endif
--- a/templates/root/history_common.mako Tue Jul 26 13:18:22 2011 -0400 +++ b/templates/root/history_common.mako Tue Jul 26 13:56:16 2011 -0400 @@ -39,9 +39,9 @@ can_edit = not ( data.deleted or data.purged ) %> %if not trans.user_is_admin() and not trans.app.security_agent.can_access_dataset( current_user_roles, data.dataset ): - <div class="historyItemWrapper historyItem historyItem-${data_state} historyItem-noPermission" id="historyItem-${data.id}"> + <div class="historyItemWrapper historyItem historyItem-${data_state} historyItem-noPermission" id="historyItem-${dataset_id}"> %else: - <div class="historyItemWrapper historyItem historyItem-${data_state}" id="historyItem-${data.id}"> + <div class="historyItemWrapper historyItem historyItem-${data_state}" id="historyItem-${dataset_id}"> %endif
%if data.deleted or data.purged or data.dataset.purged: @@ -51,9 +51,9 @@ %else: This dataset has been deleted. %if for_editing: - Click <a href="${h.url_for( controller='dataset', action='undelete', id=data.id )}" class="historyItemUndelete" id="historyItemUndeleter-${data.id}" target="galaxy_history">here</a> to undelete + Click <a href="${h.url_for( controller='dataset', action='undelete', dataset_id=dataset_id )}" class="historyItemUndelete" id="historyItemUndeleter-${dataset_id}" target="galaxy_history">here</a> to undelete %if trans.app.config.allow_user_dataset_purge: - or <a href="${h.url_for( controller='dataset', action='purge', id=data.id )}" class="historyItemPurge" id="historyItemPurger-${data.id}" target="galaxy_history">here</a> to immediately remove it from disk. + or <a href="${h.url_for( controller='dataset', action='purge', dataset_id=dataset_id )}" class="historyItemPurge" id="historyItemPurger-${dataset_id}" target="galaxy_history">here</a> to immediately remove it from disk. %else: it. %endif @@ -64,7 +64,7 @@
%if data.visible is False: <div class="warningmessagesmall"> - <strong>This dataset has been hidden. Click <a href="${h.url_for( controller='dataset', action='unhide', id=data.id )}" class="historyItemUnhide" id="historyItemUnhider-${data.id}" target="galaxy_history">here</a> to unhide.</strong> + <strong>This dataset has been hidden. Click <a href="${h.url_for( controller='dataset', action='unhide', dataset_id=dataset_id )}" class="historyItemUnhide" id="historyItemUnhider-${dataset_id}" target="galaxy_history">here</a> to unhide.</strong></div> %endif
@@ -110,13 +110,13 @@ %elif data.purged: <span title="Cannot edit attributes of datasets removed from disk" class="icon-button edit_disabled tooltip"></span> %else: - <a class="icon-button edit tooltip" title="Edit attributes" href="${h.url_for( controller='root', action='edit', id=data.id )}" target="galaxy_main"></a> + <a class="icon-button edit tooltip" title="Edit attributes" href="${h.url_for( controller='dataset', action='edit', dataset_id=dataset_id )}" target="galaxy_main"></a> %endif %endif %endif %if for_editing: %if can_edit: - <a class="icon-button delete tooltip" title="Delete" href="${h.url_for( action='delete', id=data.id, show_deleted_on_refresh=show_deleted_on_refresh )}" id="historyItemDeleter-${data.id}"></a> + <a class="icon-button delete tooltip" title="Delete" href="${h.url_for( controller='dataset', action='delete', dataset_id=dataset_id, show_deleted_on_refresh=show_deleted_on_refresh )}" id="historyItemDeleter-${dataset_id}"></a> %else: <span title="Dataset is already deleted" class="icon-button delete_disabled tooltip"></span> %endif @@ -184,7 +184,7 @@ <div class="warningmessagesmall" style="margin: 4px 0 4px 0"> An error occurred setting the metadata for this dataset. %if can_edit: - You may be able to <a href="${h.url_for( controller='root', action='edit', id=data.id )}" target="galaxy_main">set it manually or retry auto-detection</a>. + You may be able to <a href="${h.url_for( controller='dataset', action='edit', dataset_id=dataset_id )}" target="galaxy_main">set it manually or retry auto-detection</a>. %endif </div> %endif @@ -193,7 +193,7 @@ format: <span class="${data.ext}">${data.ext}</span>, database: %if data.dbkey == '?' and can_edit: - <a href="${h.url_for( controller='root', action='edit', id=data.id )}" target="galaxy_main">${_(data.dbkey)}</a> + <a href="${h.url_for( controller='dataset', action='edit', dataset_id=dataset_id )}" target="galaxy_main">${_(data.dbkey)}</a> %else: <span class="${data.dbkey}">${_(data.dbkey)}</span> %endif
--- a/templates/user/index.mako Tue Jul 26 13:18:22 2011 -0400 +++ b/templates/user/index.mako Tue Jul 26 13:56:16 2011 -0400 @@ -22,6 +22,7 @@ <li><a href="${h.url_for( controller='user', action='manage_user_info', cntrller=cntrller, webapp='community' )}">${_('Manage your information')}</a></li> %endif </ul> + <p>You are currently using <strong>${trans.user.get_disk_usage( nice_size=True )}</strong> of disk space in this Galaxy instance.</p> %else: %if not message: <p>${n_('You are currently not logged in.')}</p>
--- a/templates/webapps/community/repository/common.mako Tue Jul 26 13:18:22 2011 -0400 +++ b/templates/webapps/community/repository/common.mako Tue Jul 26 13:56:16 2011 -0400 @@ -50,39 +50,4 @@ onActivate: function(dtnode) { var cell = $("#file_contents"); var selected_value; - if (dtnode.data.key == 'root') { - selected_value = "${repository.repo_path}/"; - } else { - selected_value = dtnode.data.key; - }; - if (selected_value.charAt(selected_value.length-1) != '/') { - // Make ajax call - $.ajax( { - type: "POST", - url: "${h.url_for( controller='repository', action='get_file_contents' )}", - dataType: "json", - data: { file_path: selected_value }, - success : function ( data ) { - cell.html( '<label>'+data+'</label>' ) - } - }); - } else { - cell.html( '' ); - }; - }, - }); - }); - </script> -</%def> - -<%def name="render_clone_str( repository )"> - <% - protocol, base = trans.request.base.split( '://' ) - if trans.user: - username = '%s@' % trans.user.username - else: - username = '' - clone_str = '%s://%s%s/repos/%s/%s' % ( protocol, username, base, repository.user.username, repository.name ) - %> - hg clone <a href="${clone_str}">${clone_str}</a> -</%def> \ No newline at end of file + \ No newline at end of file
--- a/test/base/twilltestcase.py Tue Jul 26 13:18:22 2011 -0400 +++ b/test/base/twilltestcase.py Tue Jul 26 13:56:16 2011 -0400 @@ -474,7 +474,7 @@ elem = data_list[-1] hid = int( elem.get('hid') ) self.assertTrue( hid ) - self.visit_page( "edit?hid=%s" % hid ) + self.visit_page( "dataset/edit?hid=%s" % hid ) for subpatt in patt.split(): tc.find(subpatt) def delete_history_item( self, hda_id, strings_displayed=[] ): @@ -483,7 +483,7 @@ hda_id = int( hda_id ) except: raise AssertionError, "Invalid hda_id '%s' - must be int" % hda_id - self.visit_url( "%s/root/delete?show_deleted_on_refresh=False&id=%s" % ( self.url, hda_id ) ) + self.visit_url( "%s/datasets/%s/delete?show_deleted_on_refresh=False" % ( self.url, self.security.encode_id( hda_id ) ) ) for check_str in strings_displayed: self.check_page_for_string( check_str ) def undelete_history_item( self, hda_id, strings_displayed=[] ): @@ -492,7 +492,7 @@ hda_id = int( hda_id ) except: raise AssertionError, "Invalid hda_id '%s' - must be int" % hda_id - self.visit_url( "%s/dataset/undelete?id=%s" % ( self.url, hda_id ) ) + self.visit_url( "%s/datasets/%s/undelete" % ( self.url, self.security.encode_id( hda_id ) ) ) for check_str in strings_displayed: self.check_page_for_string( check_str ) def display_history_item( self, hda_id, strings_displayed=[] ): @@ -511,7 +511,7 @@ strings_displayed=[], strings_not_displayed=[] ): """Edit history_dataset_association attribute information""" self.home() - self.visit_url( "%s/root/edit?id=%s" % ( self.url, hda_id ) ) + self.visit_url( "%s/datasets/%s/edit" % ( self.url, self.security.encode_id( hda_id ) ) ) submit_required = False self.check_page_for_string( 'Edit Attributes' ) if new_name: @@ -545,9 +545,9 @@ def auto_detect_metadata( self, hda_id ): """Auto-detect history_dataset_association metadata""" self.home() - self.visit_url( "%s/root/edit?id=%s" % ( self.url, hda_id ) ) + self.visit_url( "%s/datasets/%s/edit" % ( self.url, self.security.encode_id( hda_id ) ) ) self.check_page_for_string( 'This will inspect the dataset and attempt' ) - tc.fv( 'auto_detect', 'id', hda_id ) + tc.fv( 'auto_detect', 'detect', 'Auto-detect' ) tc.submit( 'detect' ) try: self.check_page_for_string( 'Attributes have been queued to be updated' ) @@ -559,7 +559,7 @@ def convert_format( self, hda_id, target_type ): """Convert format of history_dataset_association""" self.home() - self.visit_url( "%s/root/edit?id=%s" % ( self.url, hda_id ) ) + self.visit_url( "%s/datasets/%s/edit" % ( self.url, self.security.encode_id( hda_id ) ) ) self.check_page_for_string( 'This will inspect the dataset and attempt' ) tc.fv( 'convert_data', 'target_type', target_type ) tc.submit( 'convert_data' ) @@ -569,7 +569,7 @@ def change_datatype( self, hda_id, datatype ): """Change format of history_dataset_association""" self.home() - self.visit_url( "%s/root/edit?id=%s" % ( self.url, hda_id ) ) + self.visit_url( "%s/datasets/%s/edit" % ( self.url, self.security.encode_id( hda_id ) ) ) self.check_page_for_string( 'This will change the datatype of the existing dataset but' ) tc.fv( 'change_datatype', 'datatype', datatype ) tc.submit( 'change' )
--- a/test/functional/test_history_functions.py Tue Jul 26 13:18:22 2011 -0400 +++ b/test/functional/test_history_functions.py Tue Jul 26 13:56:16 2011 -0400 @@ -664,7 +664,7 @@ .order_by( desc( galaxy.model.HistoryDatasetAssociation.table.c.create_time ) ) \ .first() self.home() - self.visit_url( "%s/root/delete?show_deleted_on_refresh=False&id=%s" % ( self.url, str( latest_hda.id ) ) ) + self.delete_history_item( str( latest_hda.id ) ) self.check_history_for_string( 'Your history is empty' ) self.home() self.visit_url( "%s/history/?show_deleted=True" % self.url )
http://bitbucket.org/galaxy/galaxy-central/changeset/57c8053675b6/ changeset: 57c8053675b6 user: dannon date: 2011-07-28 14:22:07 summary: Expose workflows shared with the user in /workflows list of the API. affected #: 1 file (597 bytes)
--- a/lib/galaxy/web/api/workflows.py Tue Jul 26 13:56:16 2011 -0400 +++ b/lib/galaxy/web/api/workflows.py Thu Jul 28 08:22:07 2011 -0400 @@ -29,6 +29,14 @@ encoded_id = trans.security.encode_id(wf.id) item['url'] = url_for('workflow', id=encoded_id) rval.append(item) + for wf_sa in trans.sa_session.query( trans.app.model.StoredWorkflowUserShareAssociation ).filter_by( + user=trans.user ).join( 'stored_workflow' ).filter( + trans.app.model.StoredWorkflow.deleted == False ).order_by( + desc( trans.app.model.StoredWorkflow.update_time ) ).all(): + item = wf_sa.stored_workflow.get_api_value(value_mapper={'id':trans.security.encode_id}) + encoded_id = trans.security.encode_id(wf_sa.stored_workflow.id) + item['url'] = url_for('workflow', id=encoded_id) + rval.append(item) return rval @web.expose_api def show(self, trans, id, **kwd):
http://bitbucket.org/galaxy/galaxy-central/changeset/f0c4e3efcb99/ changeset: f0c4e3efcb99 user: dannon date: 2011-07-28 14:22:22 summary: Merge. affected #: 15 files (61.1 KB)
--- a/datatypes_conf.xml.sample Thu Jul 28 08:22:07 2011 -0400 +++ b/datatypes_conf.xml.sample Thu Jul 28 08:22:22 2011 -0400 @@ -72,9 +72,10 @@ <!-- <display file="gbrowse/gbrowse_gff.xml" inherit="True" /> --></datatype><datatype extension="gff3" type="galaxy.datatypes.interval:Gff3" display_in_upload="true"/> - <datatype extension="gif" type="galaxy.datatypes.images:Image" mimetype="image/gif"/> + <datatype extension="gif" type="galaxy.datatypes.images:Gif" mimetype="image/gif"/><datatype extension="gmaj.zip" type="galaxy.datatypes.images:Gmaj" mimetype="application/zip"/><datatype extension="gtf" type="galaxy.datatypes.interval:Gtf" display_in_upload="true"/> + <datatype extension="h5" type="galaxy.datatypes.data:Data" mimetype="application/octet-stream"/><datatype extension="html" type="galaxy.datatypes.images:Html" mimetype="text/html"/><datatype extension="interval" type="galaxy.datatypes.interval:Interval" display_in_upload="true"><converter file="interval_to_bed_converter.xml" target_datatype="bed"/> @@ -90,7 +91,21 @@ <datatype extension="picard_interval_list" type="galaxy.datatypes.data:Text" subclass="True" display_in_upload="True"/><datatype extension="gatk_interval" type="galaxy.datatypes.data:Text" subclass="True" display_in_upload="True"/><datatype extension="gatk_dbsnp" type="galaxy.datatypes.data:Text" subclass="True" display_in_upload="True"/> - <datatype extension="jpg" type="galaxy.datatypes.images:Image" mimetype="image/jpeg"/> + <datatype extension="jpg" type="galaxy.datatypes.images:Jpg" mimetype="image/jpeg"/> + <datatype extension="tiff" type="galaxy.datatypes.images:Tiff" mimetype="image/tiff"/> + <datatype extension="bmp" type="galaxy.datatypes.images:Bmp" mimetype="image/bmp"/> + <datatype extension="im" type="galaxy.datatypes.images:Im" mimetype="image/im"/> + <datatype extension="pcd" type="galaxy.datatypes.images:Pcd" mimetype="image/pcd"/> + <datatype extension="pcx" type="galaxy.datatypes.images:Pcx" mimetype="image/pcx"/> + <datatype extension="ppm" type="galaxy.datatypes.images:Ppm" mimetype="image/ppm"/> + <datatype extension="psd" type="galaxy.datatypes.images:Psd" mimetype="image/psd"/> + <datatype extension="xbm" type="galaxy.datatypes.images:Xbm" mimetype="image/xbm"/> + <datatype extension="xpm" type="galaxy.datatypes.images:Xpm" mimetype="image/xpm"/> + <datatype extension="rgb" type="galaxy.datatypes.images:Rgb" mimetype="image/rgb"/> + <datatype extension="pbm" type="galaxy.datatypes.images:Pbm" mimetype="image/pbm"/> + <datatype extension="pgm" type="galaxy.datatypes.images:Pgm" mimetype="image/pgm"/> + <datatype extension="eps" type="galaxy.datatypes.images:Eps" mimetype="image/eps"/> + <datatype extension="rast" type="galaxy.datatypes.images:Rast" mimetype="image/rast"/><datatype extension="laj" type="galaxy.datatypes.images:Laj"/><datatype extension="lav" type="galaxy.datatypes.sequence:Lav" display_in_upload="true"/><datatype extension="maf" type="galaxy.datatypes.sequence:Maf" display_in_upload="true"> @@ -102,7 +117,7 @@ </datatype><datatype extension="pdf" type="galaxy.datatypes.images:Pdf" mimetype="application/pdf"/><datatype extension="pileup" type="galaxy.datatypes.tabular:Pileup" display_in_upload="true" /> - <datatype extension="png" type="galaxy.datatypes.images:Image" mimetype="image/png"/> + <datatype extension="png" type="galaxy.datatypes.images:Png" mimetype="image/png"/><datatype extension="qual" type="galaxy.datatypes.qualityscore:QualityScore" /><datatype extension="qualsolexa" type="galaxy.datatypes.qualityscore:QualityScoreSolexa" display_in_upload="true"/><datatype extension="qualillumina" type="galaxy.datatypes.qualityscore:QualityScoreIllumina" display_in_upload="true"/> @@ -116,7 +131,7 @@ <datatype extension="svg" type="galaxy.datatypes.images:Image" mimetype="image/svg+xml"/><datatype extension="taxonomy" type="galaxy.datatypes.tabular:Taxonomy" display_in_upload="true"/><datatype extension="tabular" type="galaxy.datatypes.tabular:Tabular" display_in_upload="true"/> - <datatype extension="twobit" type="galaxy.datatypes.binary:TwoBit" mimetype="application/octet-stream" display_in_upload="true"/> + <datatype extension="twobit" type="galaxy.datatypes.binary:TwoBit" mimetype="application/octet-stream" display_in_upload="true"/><datatype extension="txt" type="galaxy.datatypes.data:Text" display_in_upload="true"/><datatype extension="memexml" type="galaxy.datatypes.xml:MEMEXml" mimetype="application/xml" display_in_upload="true"/><datatype extension="blastxml" type="galaxy.datatypes.xml:BlastXml" mimetype="application/xml" display_in_upload="true"/> @@ -304,6 +319,24 @@ <sniffer type="galaxy.datatypes.tabular:Pileup"/><sniffer type="galaxy.datatypes.interval:Interval"/><sniffer type="galaxy.datatypes.tabular:Sam"/> + <sniffer type="galaxy.datatypes.images:Jpg"/> + <sniffer type="galaxy.datatypes.images:Png"/> + <sniffer type="galaxy.datatypes.images:Tiff"/> + <sniffer type="galaxy.datatypes.images:Bmp"/> + <sniffer type="galaxy.datatypes.images:Gif"/> + <sniffer type="galaxy.datatypes.images:Im"/> + <sniffer type="galaxy.datatypes.images:Pcd"/> + <sniffer type="galaxy.datatypes.images:Pcx"/> + <sniffer type="galaxy.datatypes.images:Ppm"/> + <sniffer type="galaxy.datatypes.images:Psd"/> + <sniffer type="galaxy.datatypes.images:Xbm"/> + <sniffer type="galaxy.datatypes.images:Xpm"/> + <sniffer type="galaxy.datatypes.images:Rgb"/> + <sniffer type="galaxy.datatypes.images:Pbm"/> + <sniffer type="galaxy.datatypes.images:Pgm"/> + <sniffer type="galaxy.datatypes.images:Xpm"/> + <sniffer type="galaxy.datatypes.images:Eps"/> + <sniffer type="galaxy.datatypes.images:Rast"/><!-- Keep this commented until the sniff method in the assembly.py module is fixed to not read the entire file.
--- a/lib/galaxy/datatypes/binary.py Thu Jul 28 08:22:07 2011 -0400 +++ b/lib/galaxy/datatypes/binary.py Thu Jul 28 08:22:22 2011 -0400 @@ -18,7 +18,7 @@ log = logging.getLogger(__name__)
# Currently these supported binary data types must be manually set on upload -unsniffable_binary_formats = [ 'ab1', 'scf' ] +unsniffable_binary_formats = [ 'ab1', 'scf', 'h5' ]
class Binary( data.Data ): """Binary data""" @@ -206,7 +206,24 @@ return "Binary bam alignments file (%s)" % ( data.nice_size( dataset.get_size() ) ) def get_track_type( self ): return "ReadTrack", {"data": "bai", "index": "summary_tree"} - + +class H5( Binary ): + """Class describing an HDF5 file""" + file_ext = "h5" + + def set_peek( self, dataset, is_multi_byte=False ): + if not dataset.dataset.purged: + dataset.peek = "Binary h5 file" + dataset.blurb = data.nice_size( dataset.get_size() ) + else: + dataset.peek = 'file does not exist' + dataset.blurb = 'file purged from disk' + def display_peek( self, dataset ): + try: + return dataset.peek + except: + return "Binary h5 sequence file (%s)" % ( data.nice_size( dataset.get_size() ) ) + class Scf( Binary ): """Class describing an scf binary sequence file""" file_ext = "scf" @@ -292,7 +309,6 @@ Binary.__init__( self, **kwd ) self._magic = 0x8789F2EB self._name = "BigBed" - def get_track_type( self ): return "LineTrack", {"data_standalone": "bigbed"}
@@ -309,14 +325,12 @@ return True except IOError: return False - def set_peek(self, dataset, is_multi_byte=False): if not dataset.dataset.purged: dataset.peek = "Binary TwoBit format nucleotide file" dataset.blurb = data.nice_size(dataset.get_size()) else: return super(TwoBit, self).set_peek(dataset, is_multi_byte) - def display_peek(self, dataset): try: return dataset.peek
--- a/lib/galaxy/datatypes/checkers.py Thu Jul 28 08:22:07 2011 -0400 +++ b/lib/galaxy/datatypes/checkers.py Thu Jul 28 08:22:22 2011 -0400 @@ -1,6 +1,28 @@ import os, gzip, re, gzip, zipfile, binascii, bz2 from galaxy import util
+try: + import Image as PIL +except ImportError: + try: + from PIL import Image as PIL + except: + PIL = None + +def check_image( file_path ): + if PIL != None: + try: + im = PIL.open( file_path ) + except: + return False + if im: + return im + return False + else: + if imghdr.what( file_path ) != None: + return True + return False + def check_html( file_path, chunk=None ): if chunk is None: temp = open( file_path, "U" )
--- a/lib/galaxy/datatypes/images.py Thu Jul 28 08:22:07 2011 -0400 +++ b/lib/galaxy/datatypes/images.py Thu Jul 28 08:22:22 2011 -0400 @@ -7,12 +7,31 @@ from galaxy.datatypes.metadata import MetadataElement from galaxy.datatypes import metadata from galaxy.datatypes.sniff import * +from galaxy.datatypes.util.image_util import * from urllib import urlencode, quote_plus import zipfile -import os, subprocess, tempfile +import os, subprocess, tempfile, imghdr + +try: + import Image as PIL +except ImportError: + try: + from PIL import Image as PIL + except: + PIL = None
log = logging.getLogger(__name__)
+# TODO: Uploading image files of various types is supported in Galaxy, but on +# the main public instance, the display_in_upload is not set for these data +# types in datatypes_conf.xml because we do not allow image files to be uploaded +# there. There is currently no API feature that allows uploading files outside +# of a data library ( where it requires either the upload_paths or upload_directory +# option to be enabled, which is not the case on the main public instance ). Because +# of this, we're currently safe, but when the api is enhanced to allow other uploads, +# we need to ensure that the implementation is such that image files cannot be uploaded +# to our main public instance. + class Image( data.Data ): """Class describing an image""" def set_peek( self, dataset, is_multi_byte=False ): @@ -22,11 +41,110 @@ else: dataset.peek = 'file does not exist' dataset.blurb = 'file purged from disk' + def sniff( self, filename ): + # First check if we can use PIL + if PIL is not None: + try: + im = PIL.open( filename ) + im.close() + return True + except: + return False + else: + if imghdr.what( filename ) is not None: + return True + else: + return False + +class Jpg( Image ): + def sniff(self, filename, image=None): + """Determine if the file is in jpg format.""" + return check_image_type( filename, ['JPEG'], image ) + +class Png( Image ): + def sniff(self, filename, image=None): + """Determine if the file is in png format.""" + return check_image_type( filename, ['PNG'], image ) + +class Tiff( Image ): + def sniff(self, filename, image=None): + """Determine if the file is in tiff format.""" + return check_image_type( filename, ['TIFF'], image ) + +class Bmp( Image ): + def sniff(self, filename, image=None): + """Determine if the file is in bmp format.""" + return check_image_type( filename, ['BMP'], image ) + +class Gif( Image ): + def sniff(self, filename, image=None): + """Determine if the file is in gif format.""" + return check_image_type( filename, ['GIF'], image ) + +class Im( Image ): + def sniff(self, filename, image=None): + """Determine if the file is in im format.""" + return check_image_type( filename, ['IM'], image ) + +class Pcd( Image ): + def sniff(self, filename, image=None): + """Determine if the file is in pcd format.""" + return check_image_type( filename, ['PCD'], image ) + +class Pcx( Image ): + def sniff(self, filename, image=None): + """Determine if the file is in pcx format.""" + return check_image_type( filename, ['PCX'], image ) + +class Ppm( Image ): + def sniff(self, filename, image=None): + """Determine if the file is in ppm format.""" + return check_image_type( filename, ['PPM'], image ) + +class Psd( Image ): + def sniff(self, filename, image=None): + """Determine if the file is in psd format.""" + return check_image_type( filename, ['PSD'], image ) + +class Xbm( Image ): + def sniff(self, filename, image=None): + """Determine if the file is in XBM format.""" + return check_image_type( filename, ['XBM'], image ) + +class Xpm( Image ): + def sniff(self, filename, image=None): + """Determine if the file is in XPM format.""" + return check_image_type( filename, ['XPM'], image ) + +class Rgb( Image ): + def sniff(self, filename, image=None): + """Determine if the file is in RGB format.""" + return check_image_type( filename, ['RGB'], image ) + +class Pbm( Image ): + def sniff(self, filename, image=None): + """Determine if the file is in PBM format""" + return check_image_type( filename, ['PBM'], image ) + +class Pgm( Image ): + def sniff(self, filename, image=None): + """Determine if the file is in PGM format""" + return check_image_type( filename, ['PGM'], image ) + +class Eps( Image ): + def sniff(self, filename, image=None): + """Determine if the file is in eps format.""" + return check_image_type( filename, ['EPS'], image ) + + +class Rast( Image ): + def sniff(self, filename, image=None): + """Determine if the file is in rast format""" + return check_image_type( filename, ['RAST'], image )
class Pdf( Image ): def sniff(self, filename): - """Determine if the file is in pdf format. - """ + """Determine if the file is in pdf format.""" headers = get_headers(filename, None, 1) try: if headers[0][0].startswith("%PDF"):
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/lib/galaxy/datatypes/util/image_util.py Thu Jul 28 08:22:22 2011 -0400 @@ -0,0 +1,76 @@ +""" +Provides utilities for working with image files. +""" +import logging, imghdr + +try: + import Image as PIL +except ImportError: + try: + from PIL import Image as PIL + except: + PIL = None + +log = logging.getLogger(__name__) + +def image_type( filename, image=None ): + format = '' + if PIL is not None: + if image is not None: + format = image.format + else: + try: + im = PIL.open( filename ) + format = im.format + im.close() + except: + return False + else: + format = imghdr.what( filename ) + if format is not None: + format = format.upper() + else: + return False + return format +def check_image_type( filename, types, image=None ): + format = image_type( filename, image ) + # First check if we can use PIL + if format in types: + return True + return False +def get_image_ext ( file_path, image ): + #determine ext + format = image_type( file_path, image ) + if format in [ 'JPG','JPEG' ]: + return 'jpg' + if format == 'PNG': + return 'png' + if format == 'TIFF': + return 'tiff' + if format == 'BMP': + return 'bmp' + if format == 'GIF': + return 'gif' + if format == 'IM': + return 'im' + if format == 'PCD': + return 'pcd' + if format == 'PCX': + return 'pcx' + if format == 'PPM': + return 'ppm' + if format == 'PSD': + return 'psd' + if format == 'XBM': + return 'xbm' + if format == 'XPM': + return 'xpm' + if format == 'RGB': + return 'rgb' + if format == 'PBM': + return 'pbm' + if format == 'PGM': + return 'pgm' + if format == 'EPS': + return 'eps' + return None
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/test-data/5_mult_liftover_mapped.bed Thu Jul 28 08:22:22 2011 -0400 @@ -0,0 +1,132 @@ +chr7 116197893 116197920 CCDS5763.1_cds_0_0_chr7_115444713_f 1 + +chr7 116222929 116223015 CCDS5763.1_cds_1_0_chr7_115468539_f 1 + +chr7 116237938 116238188 CCDS5763.1_cds_2_0_chr7_115483025_f 1 + +chr7 116239076 116239412 CCDS5763.1_cds_3_0_chr7_115484166_f 1 + +chr7 116240675 116240891 CCDS5763.1_cds_4_0_chr7_115485765_f 1 + +chr7 116241233 116241396 CCDS5763.1_cds_5_0_chr7_115486323_f 1 + +chr7 116246193 116246382 CCDS5763.1_cds_6_0_chr7_115491299_f 1 + +chr7 116222929 116223015 CCDS5764.1_cds_0_0_chr7_115468539_f 1 + +chr7 116237938 116238188 CCDS5764.1_cds_1_0_chr7_115483025_f 1 + +chr7 116239076 116239412 CCDS5764.1_cds_2_0_chr7_115484166_f 1 + +chr7 116240675 116240891 CCDS5764.1_cds_3_0_chr7_115485765_f 1 + +chr7 116241233 116241396 CCDS5764.1_cds_4_0_chr7_115486323_f 1 + +chr7 116246193 116246382 CCDS5764.1_cds_5_0_chr7_115491299_f 1 + +chr7 116495075 116495225 CCDS5766.1_cds_0_0_chr7_115733787_f 1 + +chr7 116495546 116495734 CCDS5766.1_cds_1_0_chr7_115734265_f 1 + +chr7 116501260 116501411 CCDS5766.1_cds_2_0_chr7_115739976_f 1 + +chr7 116495075 116495225 CCDS5765.1_cds_0_0_chr7_115733787_f 1 + +chr7 116501260 116501449 CCDS5765.1_cds_1_0_chr7_115739976_f 1 + +chr7 116522088 116522118 CCDS5767.1_cds_0_0_chr7_115759068_f 1 + +chr7 116523550 116523715 CCDS5767.1_cds_1_0_chr7_115760530_f 1 + +chr7 116562770 116563112 CCDS5767.1_cds_2_0_chr7_115792951_f 1 + +chr7 116872904 116872943 CCDS5768.1_cds_0_0_chr7_116096617_f 1 + +chr7 116899050 116899114 CCDS5768.1_cds_1_0_chr7_116122132_f 1 + +chr7 116903933 116903985 CCDS5768.1_cds_2_0_chr7_116126999_f 1 + +chr7 116909709 116909773 CCDS5768.1_cds_3_0_chr7_116132777_f 1 + +chr7 116915110 116915317 CCDS5768.1_cds_4_0_chr7_116138182_f 1 + +chr7 116917198 116917278 CCDS5768.1_cds_5_0_chr7_116140268_f 1 + +chr7 116921242 116921321 CCDS5768.1_cds_6_0_chr7_116144238_f 1 + +chr7 116923078 116923150 CCDS5768.1_cds_7_0_chr7_116146074_f 1 + +chr7 116927214 116927277 CCDS5768.1_cds_8_0_chr7_116150065_f 1 + +chr7 116928880 116929021 CCDS5768.1_cds_9_0_chr7_116151732_f 1 + +chr7 116964784 116964932 CCDS5770.1_cds_0_0_chr7_116187546_f 1 + +chr7 117108894 117108977 CCDS5770.1_cds_1_0_chr7_116333767_f 1 + +chr7 117128694 117128854 CCDS5770.1_cds_2_0_chr7_116353566_f 1 + +chr7 117138899 117138954 CCDS5770.1_cds_3_0_chr7_116363798_f 1 + +chr7 117139597 117139713 CCDS5770.1_cds_4_0_chr7_116364496_f 1 + +chr7 117140988 117141064 CCDS5770.1_cds_5_0_chr7_116365890_f 1 + +chr7 117143259 117143328 CCDS5770.1_cds_6_0_chr7_116368129_f 1 + +chr7 117145226 117145381 CCDS5770.1_cds_7_0_chr7_116370086_f 1 + +chr7 117147574 117147672 CCDS5770.1_cds_8_0_chr7_116372440_f 1 + +chr7_random 2679423 2679538 CCDS5770.1_cds_9_0_chr7_116404867_f 1 + +chr7 117201671 117201751 CCDS5770.1_cds_10_0_chr7_116423326_f 1 + +chr7 117203227 117203330 CCDS5770.1_cds_11_0_chr7_116424839_f 1 + +chr7 117222109 117222260 CCDS5770.1_cds_12_0_chr7_116443792_f 1 + +chr7 117231432 117231525 CCDS5770.1_cds_13_0_chr7_116453089_f 1 + +chr7 117234203 117234343 CCDS5770.1_cds_14_0_chr7_116455928_f 1 + +chr7 117235141 117235261 CCDS5770.1_cds_15_0_chr7_116456866_f 1 + +chr7 116964784 116964932 CCDS5769.1_cds_0_0_chr7_116187546_f 1 + +chr7 117108894 117108977 CCDS5769.1_cds_1_0_chr7_116333767_f 1 + +chr7 117128694 117128854 CCDS5769.1_cds_2_0_chr7_116353566_f 1 + +chr7 117138899 117138954 CCDS5769.1_cds_3_0_chr7_116363798_f 1 + +chr7 117139597 117139713 CCDS5769.1_cds_4_0_chr7_116364496_f 1 + +chr7 117140988 117141064 CCDS5769.1_cds_5_0_chr7_116365890_f 1 + +chr7 117145226 117145381 CCDS5769.1_cds_6_0_chr7_116370086_f 1 + +chr7 117147574 117147672 CCDS5769.1_cds_7_0_chr7_116372440_f 1 + +chr7_random 2679423 2679538 CCDS5769.1_cds_8_0_chr7_116404867_f 1 + +chr7 117201671 117201751 CCDS5769.1_cds_9_0_chr7_116423326_f 1 + +chr7 117203227 117203330 CCDS5769.1_cds_10_0_chr7_116424839_f 1 + +chr7 117222109 117222260 CCDS5769.1_cds_11_0_chr7_116443792_f 1 + +chr7 117231432 117231525 CCDS5769.1_cds_12_0_chr7_116453089_f 1 + +chr7 117234203 117234343 CCDS5769.1_cds_13_0_chr7_116455928_f 1 + +chr7 117241962 117242058 CCDS5769.1_cds_14_0_chr7_116463767_f 1 + +chr7 117291331 117291561 CCDS5771.1_cds_0_0_chr7_116512160_r 1 - +chr7 117310742 117311007 CCDS5771.1_cds_1_0_chr7_116531617_r 1 - +chr7 117328536 117328629 CCDS5771.1_cds_2_0_chr7_116549076_r 1 - +chr7 117333743 117333970 CCDS5771.1_cds_3_0_chr7_116554572_r 1 - +chr7 117336084 117336167 CCDS5771.1_cds_4_0_chr7_116556912_r 1 - +chr7 117382797 117382950 CCDS5772.1_cds_0_0_chr7_116597601_r 1 - +chr7 117386552 117386666 CCDS5772.1_cds_1_0_chr7_116601357_r 1 - +chr7 117387812 117387919 CCDS5772.1_cds_2_0_chr7_116602617_r 1 - +chr7 117397672 117397782 CCDS5772.1_cds_3_0_chr7_116613943_r 1 - +chr7 117398745 117398802 CCDS5772.1_cds_4_0_chr7_116615016_r 1 - +chr7 117399808 117399884 CCDS5772.1_cds_5_0_chr7_116616074_r 1 - +chr7 117400724 117400849 CCDS5772.1_cds_6_0_chr7_116616991_r 1 - +chr7 117402466 117402602 CCDS5772.1_cds_7_0_chr7_116618731_r 1 - +chr7 117403442 117403554 CCDS5772.1_cds_8_0_chr7_116619703_r 1 - +chr7 117438281 117438393 CCDS5772.1_cds_9_0_chr7_116654168_r 1 - +chr7 117440357 117440480 CCDS5772.1_cds_10_0_chr7_116656242_r 1 - +chr7 117444948 117445048 CCDS5772.1_cds_11_0_chr7_116660841_r 1 - +chr7 117445468 117445573 CCDS5772.1_cds_12_0_chr7_116661361_r 1 - +chr7 117499706 117499759 CCDS5773.1_cds_0_0_chr7_116714100_f 1 + +chr7 117523820 117523931 CCDS5773.1_cds_1_0_chr7_116738258_f 1 + +chr7 117528597 117528706 CCDS5773.1_cds_2_0_chr7_116743039_f 1 + +chr7 117550464 117550680 CCDS5773.1_cds_3_0_chr7_116764904_f 1 + +chr7 117553829 117553919 CCDS5773.1_cds_4_0_chr7_116768281_f 1 + +chr7 117554806 117554970 CCDS5773.1_cds_5_0_chr7_116769253_f 1 + +chr7 117556111 117556237 CCDS5773.1_cds_6_0_chr7_116770553_f 1 + +chr7 117559659 117559905 CCDS5773.1_cds_7_0_chr7_116774105_f 1 + +chr7 117561568 117561660 CCDS5773.1_cds_8_0_chr7_116776021_f 1 + +chr7 117568199 117568382 CCDS5773.1_cds_9_0_chr7_116782646_f 1 + +chr7 117579005 117579197 CCDS5773.1_cds_10_0_chr7_116793469_f 1 + +chr7 117609945 117610041 CCDS5773.1_cds_11_0_chr7_116821744_f 1 + +chr7 117612558 117612645 CCDS5773.1_cds_12_0_chr7_116824358_f 1 + +chr7 117614292 117615016 CCDS5773.1_cds_13_0_chr7_116825939_f 1 + +chr7 117617279 117617408 CCDS5773.1_cds_14_0_chr7_116828935_f 1 + +chr7 117625173 117625211 CCDS5773.1_cds_15_0_chr7_116836831_f 1 + +chr7 117625879 117626130 CCDS5773.1_cds_16_0_chr7_116837537_f 1 + +chr7 117628986 117629066 CCDS5773.1_cds_17_0_chr7_116840679_f 1 + +chr7 117632825 117632976 CCDS5773.1_cds_18_0_chr7_116844524_f 1 + +chr7 117633887 117634115 CCDS5773.1_cds_19_0_chr7_116845586_f 1 + +chr7 117636923 117637024 CCDS5773.1_cds_20_0_chr7_116848618_f 1 + +chr7 117649505 117649753 CCDS5773.1_cds_21_0_chr7_116861527_f 1 + +chr7 117664146 117664302 CCDS5773.1_cds_22_0_chr7_116876443_f 1 + +chr7 117674548 117674638 CCDS5773.1_cds_23_0_chr7_116886847_f 1 + +chr7 117686685 117686858 CCDS5773.1_cds_24_0_chr7_116898693_f 1 + +chr7 117687456 117687562 CCDS5773.1_cds_25_0_chr7_116899464_f 1 + +chr7 117688902 117689103 CCDS5773.1_cds_26_0_chr7_116900913_f 1 + +chr7 117734744 117734996 CCDS5774.1_cds_0_0_chr7_116945542_r 1 - +chr7 117741224 117741326 CCDS5774.1_cds_1_0_chr7_116952023_r 1 - +chr7 117743450 117743638 CCDS5774.1_cds_4_0_chr7_116958552_r 1 - +chr7 117743957 117744164 CCDS5774.1_cds_5_0_chr7_116959057_r 1 - +chr7 117746996 117747175 CCDS5774.1_cds_6_0_chr7_116962094_r 1 - +chr7 117753794 117753981 CCDS5774.1_cds_7_0_chr7_116968918_r 1 - +chr7 117754149 117754302 CCDS5774.1_cds_8_0_chr7_116969274_r 1 - +chr7 117764699 117764799 CCDS5774.1_cds_9_0_chr7_116979836_r 1 - +chr7 117764881 117764968 CCDS5774.1_cds_10_0_chr7_116980018_r 1 - +chr7 117775103 117775183 CCDS5774.1_cds_11_0_chr7_116990560_r 1 - +chr7 117776423 117776519 CCDS5774.1_cds_12_0_chr7_116991880_r 1 - +chr7 117779436 117779712 CCDS5774.1_cds_13_0_chr7_116994440_r 1 - +chr7 117786062 117786180 CCDS5774.1_cds_14_0_chr7_117001064_r 1 - +chr7 117796458 117796713 CCDS5774.1_cds_15_0_chr7_117011516_r 1 - +chr7 117799369 117799520 CCDS5774.1_cds_16_0_chr7_117014446_r 1 - +chr7 117801790 117801890 CCDS5774.1_cds_17_0_chr7_117016867_r 1 - +chr7 117803186 117803390 CCDS5774.1_cds_18_0_chr7_117018256_r 1 - +chr7 117810065 117811719 CCDS5774.1_cds_19_0_chr7_117025133_r 1 - +chr7 117829639 117829865 CCDS5774.1_cds_20_0_chr7_117044770_r 1 - +chr7 117880732 117880840 CCDS5774.1_cds_21_0_chr7_117095214_r 1 - +chr7 117893163 117893244 CCDS5774.1_cds_22_0_chr7_117107340_r 1 - +chr5 133682646 133682808 CCDS4149.1_cds_0_0_chr5_131424299_f 1 + +chr5 133682906 133682948 CCDS4149.1_cds_1_0_chr5_131424559_f 1 + +chr5 133684249 133684339 CCDS4149.1_cds_2_0_chr5_131425904_f 1 + +chr5 133684463 133684505 CCDS4149.1_cds_3_0_chr5_131426118_f 1 +
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/test-data/5_mult_liftover_unmapped.bed Thu Jul 28 08:22:22 2011 -0400 @@ -0,0 +1,4 @@ +#Deleted in new +chr7 116953508 116953641 CCDS5774.1_cds_2_0_chr7_116953509_r 0 - +#Deleted in new +chr7 116955071 116955135 CCDS5774.1_cds_3_0_chr7_116955072_r 0 -
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/test-data/cuffcompare_in1_liftover_mapped.bed Thu Jul 28 08:22:22 2011 -0400 @@ -0,0 +1,86 @@ +chr1 Cufflinks transcript 3022555 3022596 1000 . . gene_id "CUFF.1"; transcript_id "CUFF.1.1"; FPKM "20.6079364877"; frac "1.000000"; conf_lo "0.000000"; conf_hi "49.751960"; cov "1.317073"; +chr1 Cufflinks exon 3022555 3022596 1000 . . gene_id "CUFF.1"; transcript_id "CUFF.1.1"; exon_number "1"; FPKM "20.6079364877"; frac "1.000000"; conf_lo "0.000000"; conf_hi "49.751960"; cov "1.317073"; +chr1 Cufflinks transcript 3117334 3117360 1000 . . gene_id "CUFF.5"; transcript_id "CUFF.5.1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000"; +chr1 Cufflinks exon 3117334 3117360 1000 . . gene_id "CUFF.5"; transcript_id "CUFF.5.1"; exon_number "1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000"; +chr1 Cufflinks transcript 3117031 3117199 1000 . . gene_id "CUFF.7"; transcript_id "CUFF.7.1"; FPKM "9.9991171124"; frac "1.000000"; conf_lo "0.000000"; conf_hi "19.998234"; cov "0.639053"; +chr1 Cufflinks exon 3117031 3117199 1000 . . gene_id "CUFF.7"; transcript_id "CUFF.7.1"; exon_number "1"; FPKM "9.9991171124"; frac "1.000000"; conf_lo "0.000000"; conf_hi "19.998234"; cov "0.639053"; +chr1 Cufflinks transcript 3118118 3118521 1000 . . gene_id "CUFF.9"; transcript_id "CUFF.9.1"; FPKM "17.7768957078"; frac "1.000000"; conf_lo "9.153835"; conf_hi "26.399957"; cov "1.136139"; +chr1 Cufflinks exon 3118118 3118521 1000 . . gene_id "CUFF.9"; transcript_id "CUFF.9.1"; exon_number "1"; FPKM "17.7768957078"; frac "1.000000"; conf_lo "9.153835"; conf_hi "26.399957"; cov "1.136139"; +chr1 Cufflinks transcript 3118713 3118739 1000 . . gene_id "CUFF.11"; transcript_id "CUFF.11.1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000"; +chr1 Cufflinks exon 3118713 3118739 1000 . . gene_id "CUFF.11"; transcript_id "CUFF.11.1"; exon_number "1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000"; +chr1 Cufflinks transcript 3121789 3121867 1000 . . gene_id "CUFF.13"; transcript_id "CUFF.13.1"; FPKM "10.6952581772"; frac "1.000000"; conf_lo "0.000000"; conf_hi "25.820637"; cov "0.683544"; +chr1 Cufflinks exon 3121789 3121867 1000 . . gene_id "CUFF.13"; transcript_id "CUFF.13.1"; exon_number "1"; FPKM "10.6952581772"; frac "1.000000"; conf_lo "0.000000"; conf_hi "25.820637"; cov "0.683544"; +chr1 Cufflinks transcript 3128503 3128581 1000 . . gene_id "CUFF.15"; transcript_id "CUFF.15.1"; FPKM "10.6952581772"; frac "1.000000"; conf_lo "0.000000"; conf_hi "25.820637"; cov "0.683544"; +chr1 Cufflinks exon 3128503 3128581 1000 . . gene_id "CUFF.15"; transcript_id "CUFF.15.1"; exon_number "1"; FPKM "10.6952581772"; frac "1.000000"; conf_lo "0.000000"; conf_hi "25.820637"; cov "0.683544"; +chr1 Cufflinks transcript 3129386 3129482 1000 . . gene_id "CUFF.17"; transcript_id "CUFF.17.1"; FPKM "8.7105710927"; frac "1.000000"; conf_lo "0.000000"; conf_hi "21.029179"; cov "0.556701"; +chr1 Cufflinks exon 3129386 3129482 1000 . . gene_id "CUFF.17"; transcript_id "CUFF.17.1"; exon_number "1"; FPKM "8.7105710927"; frac "1.000000"; conf_lo "0.000000"; conf_hi "21.029179"; cov "0.556701"; +chr1 Cufflinks transcript 3128657 3128728 1000 . . gene_id "CUFF.19"; transcript_id "CUFF.19.1"; FPKM "29.3376873610"; frac "1.000000"; conf_lo "3.097262"; conf_hi "55.578113"; cov "1.875000"; +chr1 Cufflinks exon 3128657 3128728 1000 . . gene_id "CUFF.19"; transcript_id "CUFF.19.1"; exon_number "1"; FPKM "29.3376873610"; frac "1.000000"; conf_lo "3.097262"; conf_hi "55.578113"; cov "1.875000"; +chr1 Cufflinks transcript 3162445 3162500 1000 . . gene_id "CUFF.23"; transcript_id "CUFF.23.1"; FPKM "23.4701498888"; frac "1.000000"; conf_lo "0.000000"; conf_hi "50.571145"; cov "1.500000"; +chr1 Cufflinks exon 3162445 3162500 1000 . . gene_id "CUFF.23"; transcript_id "CUFF.23.1"; exon_number "1"; FPKM "23.4701498888"; frac "1.000000"; conf_lo "0.000000"; conf_hi "50.571145"; cov "1.500000"; +chr1 Cufflinks transcript 3176998 3177034 1000 . . gene_id "CUFF.27"; transcript_id "CUFF.27.1"; FPKM "34.2537322701"; frac "1.000000"; conf_lo "0.000000"; conf_hi "73.806535"; cov "2.189189"; +chr1 Cufflinks exon 3176998 3177034 1000 . . gene_id "CUFF.27"; transcript_id "CUFF.27.1"; exon_number "1"; FPKM "34.2537322701"; frac "1.000000"; conf_lo "0.000000"; conf_hi "73.806535"; cov "2.189189"; +chr1 Cufflinks transcript 3107191 3107612 1000 . . gene_id "CUFF.29"; transcript_id "CUFF.29.1"; FPKM "107.1032192108"; frac "1.000000"; conf_lo "71.402146"; conf_hi "142.804292"; cov "6.845070"; +chr1 Cufflinks exon 3107191 3107612 1000 . . gene_id "CUFF.29"; transcript_id "CUFF.29.1"; exon_number "1"; FPKM "107.1032192108"; frac "1.000000"; conf_lo "71.402146"; conf_hi "142.804292"; cov "6.845070"; +chr1 Cufflinks transcript 3107844 3107874 1000 . . gene_id "CUFF.31"; transcript_id "CUFF.31.1"; FPKM "122.6504607091"; frac "1.000000"; conf_lo "40.883487"; conf_hi "204.417435"; cov "7.838710"; +chr1 Cufflinks exon 3107844 3107874 1000 . . gene_id "CUFF.31"; transcript_id "CUFF.31.1"; exon_number "1"; FPKM "122.6504607091"; frac "1.000000"; conf_lo "40.883487"; conf_hi "204.417435"; cov "7.838710"; +chr1 Cufflinks transcript 3108025 3108051 1000 . . gene_id "CUFF.33"; transcript_id "CUFF.33.1"; FPKM "109.5273661476"; frac "1.000000"; conf_lo "26.732460"; conf_hi "192.322273"; cov "7.000000"; +chr1 Cufflinks exon 3108025 3108051 1000 . . gene_id "CUFF.33"; transcript_id "CUFF.33.1"; exon_number "1"; FPKM "109.5273661476"; frac "1.000000"; conf_lo "26.732460"; conf_hi "192.322273"; cov "7.000000"; +chr1 Cufflinks transcript 3109111 3109241 1000 . . gene_id "CUFF.35"; transcript_id "CUFF.35.1"; FPKM "96.7471827476"; frac "1.000000"; conf_lo "61.420107"; conf_hi "132.074259"; cov "6.183206"; +chr1 Cufflinks exon 3109111 3109241 1000 . . gene_id "CUFF.35"; transcript_id "CUFF.35.1"; exon_number "1"; FPKM "96.7471827476"; frac "1.000000"; conf_lo "61.420107"; conf_hi "132.074259"; cov "6.183206"; +chr1 Cufflinks transcript 3109989 3110041 1000 . . gene_id "CUFF.39"; transcript_id "CUFF.39.1"; FPKM "23.9129829055"; frac "1.000000"; conf_lo "0.000000"; conf_hi "51.525317"; cov "1.528302"; +chr1 Cufflinks exon 3109989 3110041 1000 . . gene_id "CUFF.39"; transcript_id "CUFF.39.1"; exon_number "1"; FPKM "23.9129829055"; frac "1.000000"; conf_lo "0.000000"; conf_hi "51.525317"; cov "1.528302"; +chr1 Cufflinks transcript 3110098 3110176 1000 . . gene_id "CUFF.41"; transcript_id "CUFF.41.1"; FPKM "10.6952581772"; frac "1.000000"; conf_lo "0.000000"; conf_hi "25.820637"; cov "0.683544"; +chr1 Cufflinks exon 3110098 3110176 1000 . . gene_id "CUFF.41"; transcript_id "CUFF.41.1"; exon_number "1"; FPKM "10.6952581772"; frac "1.000000"; conf_lo "0.000000"; conf_hi "25.820637"; cov "0.683544"; +chr1 Cufflinks transcript 3110280 3110358 1000 . . gene_id "CUFF.43"; transcript_id "CUFF.43.1"; FPKM "10.5615674500"; frac "1.000000"; conf_lo "0.000000"; conf_hi "25.497879"; cov "0.675000"; +chr1 Cufflinks exon 3110280 3110358 1000 . . gene_id "CUFF.43"; transcript_id "CUFF.43.1"; exon_number "1"; FPKM "10.5615674500"; frac "1.000000"; conf_lo "0.000000"; conf_hi "25.497879"; cov "0.675000"; +chr1 Cufflinks transcript 3110488 3110589 1000 . . gene_id "CUFF.45"; transcript_id "CUFF.45.1"; FPKM "20.7089557842"; frac "1.000000"; conf_lo "2.186303"; conf_hi "39.231609"; cov "1.323529"; +chr1 Cufflinks exon 3110488 3110589 1000 . . gene_id "CUFF.45"; transcript_id "CUFF.45.1"; exon_number "1"; FPKM "20.7089557842"; frac "1.000000"; conf_lo "2.186303"; conf_hi "39.231609"; cov "1.323529"; +chr1 Cufflinks transcript 3111332 3111358 1000 . . gene_id "CUFF.49"; transcript_id "CUFF.49.1"; FPKM "15.6467665925"; frac "1.000000"; conf_lo "0.000000"; conf_hi "46.940300"; cov "1.000000"; +chr1 Cufflinks exon 3111332 3111358 1000 . . gene_id "CUFF.49"; transcript_id "CUFF.49.1"; exon_number "1"; FPKM "15.6467665925"; frac "1.000000"; conf_lo "0.000000"; conf_hi "46.940300"; cov "1.000000"; +chr1 Cufflinks transcript 3112113 3112139 1000 . . gene_id "CUFF.51"; transcript_id "CUFF.51.1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000"; +chr1 Cufflinks exon 3112113 3112139 1000 . . gene_id "CUFF.51"; transcript_id "CUFF.51.1"; exon_number "1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000"; +chr1 Cufflinks transcript 3112479 3112505 1000 . . gene_id "CUFF.53"; transcript_id "CUFF.53.1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000"; +chr1 Cufflinks exon 3112479 3112505 1000 . . gene_id "CUFF.53"; transcript_id "CUFF.53.1"; exon_number "1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000"; +chr1 Cufflinks transcript 3114116 3114142 1000 . . gene_id "CUFF.55"; transcript_id "CUFF.55.1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000"; +chr1 Cufflinks exon 3114116 3114142 1000 . . gene_id "CUFF.55"; transcript_id "CUFF.55.1"; exon_number "1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000"; +chr1 Cufflinks transcript 3114273 3114299 1000 . . gene_id "CUFF.57"; transcript_id "CUFF.57.1"; FPKM "15.6467665925"; frac "1.000000"; conf_lo "0.000000"; conf_hi "46.940300"; cov "1.000000"; +chr1 Cufflinks exon 3114273 3114299 1000 . . gene_id "CUFF.57"; transcript_id "CUFF.57.1"; exon_number "1"; FPKM "15.6467665925"; frac "1.000000"; conf_lo "0.000000"; conf_hi "46.940300"; cov "1.000000"; +chr1 Cufflinks transcript 3114373 3114399 1000 . . gene_id "CUFF.59"; transcript_id "CUFF.59.1"; FPKM "15.6467665925"; frac "1.000000"; conf_lo "0.000000"; conf_hi "46.940300"; cov "1.000000"; +chr1 Cufflinks exon 3114373 3114399 1000 . . gene_id "CUFF.59"; transcript_id "CUFF.59.1"; exon_number "1"; FPKM "15.6467665925"; frac "1.000000"; conf_lo "0.000000"; conf_hi "46.940300"; cov "1.000000"; +chr1 Cufflinks transcript 3201794 3201848 1000 . . gene_id "CUFF.65"; transcript_id "CUFF.65.1"; FPKM "15.3622799272"; frac "1.000000"; conf_lo "0.000000"; conf_hi "37.087825"; cov "0.981818"; +chr1 Cufflinks exon 3201794 3201848 1000 . . gene_id "CUFF.65"; transcript_id "CUFF.65.1"; exon_number "1"; FPKM "15.3622799272"; frac "1.000000"; conf_lo "0.000000"; conf_hi "37.087825"; cov "0.981818"; +chr1 Cufflinks transcript 3211077 3211141 1000 . . gene_id "CUFF.67"; transcript_id "CUFF.67.1"; FPKM "12.9988522461"; frac "1.000000"; conf_lo "0.000000"; conf_hi "31.382005"; cov "0.830769"; +chr1 Cufflinks exon 3211077 3211141 1000 . . gene_id "CUFF.67"; transcript_id "CUFF.67.1"; exon_number "1"; FPKM "12.9988522461"; frac "1.000000"; conf_lo "0.000000"; conf_hi "31.382005"; cov "0.830769"; +chr1 Cufflinks transcript 3211528 3211611 1000 . . gene_id "CUFF.69"; transcript_id "CUFF.69.1"; FPKM "10.0586356666"; frac "1.000000"; conf_lo "0.000000"; conf_hi "24.283695"; cov "0.642857"; +chr1 Cufflinks exon 3211528 3211611 1000 . . gene_id "CUFF.69"; transcript_id "CUFF.69.1"; exon_number "1"; FPKM "10.0586356666"; frac "1.000000"; conf_lo "0.000000"; conf_hi "24.283695"; cov "0.642857"; +chr1 Cufflinks transcript 3211677 3211774 1000 . . gene_id "CUFF.71"; transcript_id "CUFF.71.1"; FPKM "8.6216877142"; frac "1.000000"; conf_lo "0.000000"; conf_hi "20.814595"; cov "0.551020"; +chr1 Cufflinks exon 3211677 3211774 1000 . . gene_id "CUFF.71"; transcript_id "CUFF.71.1"; exon_number "1"; FPKM "8.6216877142"; frac "1.000000"; conf_lo "0.000000"; conf_hi "20.814595"; cov "0.551020"; +chr1 Cufflinks transcript 3220199 3220253 1000 . . gene_id "CUFF.73"; transcript_id "CUFF.73.1"; FPKM "15.3622799272"; frac "1.000000"; conf_lo "0.000000"; conf_hi "37.087825"; cov "0.981818"; +chr1 Cufflinks exon 3220199 3220253 1000 . . gene_id "CUFF.73"; transcript_id "CUFF.73.1"; exon_number "1"; FPKM "15.3622799272"; frac "1.000000"; conf_lo "0.000000"; conf_hi "37.087825"; cov "0.981818"; +chr1 Cufflinks transcript 3220641 3220667 1000 . . gene_id "CUFF.75"; transcript_id "CUFF.75.1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000"; +chr1 Cufflinks exon 3220641 3220667 1000 . . gene_id "CUFF.75"; transcript_id "CUFF.75.1"; exon_number "1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000"; +chr1 Cufflinks transcript 3240464 3240515 1000 . . gene_id "CUFF.77"; transcript_id "CUFF.77.1"; FPKM "16.2485653076"; frac "1.000000"; conf_lo "0.000000"; conf_hi "39.227507"; cov "1.038462"; +chr1 Cufflinks exon 3240464 3240515 1000 . . gene_id "CUFF.77"; transcript_id "CUFF.77.1"; exon_number "1"; FPKM "16.2485653076"; frac "1.000000"; conf_lo "0.000000"; conf_hi "39.227507"; cov "1.038462"; +chr1 Cufflinks transcript 3277601 3277627 1000 . . gene_id "CUFF.79"; transcript_id "CUFF.79.1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000"; +chr1 Cufflinks exon 3277601 3277627 1000 . . gene_id "CUFF.79"; transcript_id "CUFF.79.1"; exon_number "1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000"; +chr1 Cufflinks transcript 3285318 3285381 1000 . . gene_id "CUFF.81"; transcript_id "CUFF.81.1"; FPKM "13.2019593124"; frac "1.000000"; conf_lo "0.000000"; conf_hi "31.872349"; cov "0.843750"; +chr1 Cufflinks exon 3285318 3285381 1000 . . gene_id "CUFF.81"; transcript_id "CUFF.81.1"; exon_number "1"; FPKM "13.2019593124"; frac "1.000000"; conf_lo "0.000000"; conf_hi "31.872349"; cov "0.843750"; +chr1 Cufflinks transcript 3285858 3285953 1000 . . gene_id "CUFF.83"; transcript_id "CUFF.83.1"; FPKM "13.2019593124"; frac "1.000000"; conf_lo "0.000000"; conf_hi "28.446269"; cov "0.843750"; +chr1 Cufflinks exon 3285858 3285953 1000 . . gene_id "CUFF.83"; transcript_id "CUFF.83.1"; exon_number "1"; FPKM "13.2019593124"; frac "1.000000"; conf_lo "0.000000"; conf_hi "28.446269"; cov "0.843750"; +chr1 Cufflinks transcript 3289268 3289294 1000 . . gene_id "CUFF.85"; transcript_id "CUFF.85.1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000"; +chr1 Cufflinks exon 3289268 3289294 1000 . . gene_id "CUFF.85"; transcript_id "CUFF.85.1"; exon_number "1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000"; +chr1 Cufflinks transcript 3289466 3289514 1000 . . gene_id "CUFF.87"; transcript_id "CUFF.87.1"; FPKM "17.2433754285"; frac "1.000000"; conf_lo "0.000000"; conf_hi "41.629191"; cov "1.102041"; +chr1 Cufflinks exon 3289466 3289514 1000 . . gene_id "CUFF.87"; transcript_id "CUFF.87.1"; exon_number "1"; FPKM "17.2433754285"; frac "1.000000"; conf_lo "0.000000"; conf_hi "41.629191"; cov "1.102041"; +chr1 Cufflinks transcript 3300382 3300432 1000 . . gene_id "CUFF.89"; transcript_id "CUFF.89.1"; FPKM "16.5671646274"; frac "1.000000"; conf_lo "0.000000"; conf_hi "39.996674"; cov "1.058824"; +chr1 Cufflinks exon 3300382 3300432 1000 . . gene_id "CUFF.89"; transcript_id "CUFF.89.1"; exon_number "1"; FPKM "16.5671646274"; frac "1.000000"; conf_lo "0.000000"; conf_hi "39.996674"; cov "1.058824"; +chr1 Cufflinks transcript 3317446 3317472 1000 . . gene_id "CUFF.91"; transcript_id "CUFF.91.1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000"; +chr1 Cufflinks exon 3317446 3317472 1000 . . gene_id "CUFF.91"; transcript_id "CUFF.91.1"; exon_number "1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000"; +chr1 Cufflinks transcript 3365246 3365284 1000 . . gene_id "CUFF.93"; transcript_id "CUFF.93.1"; FPKM "21.6647537435"; frac "1.000000"; conf_lo "0.000000"; conf_hi "52.303342"; cov "1.384615"; +chr1 Cufflinks exon 3365246 3365284 1000 . . gene_id "CUFF.93"; transcript_id "CUFF.93.1"; exon_number "1"; FPKM "21.6647537435"; frac "1.000000"; conf_lo "0.000000"; conf_hi "52.303342"; cov "1.384615"; +chr1 Cufflinks transcript 3377607 3377633 1000 . . gene_id "CUFF.95"; transcript_id "CUFF.95.1"; FPKM "46.9402997776"; frac "1.000000"; conf_lo "0.000000"; conf_hi "101.142289"; cov "3.000000"; +chr1 Cufflinks exon 3377607 3377633 1000 . . gene_id "CUFF.95"; transcript_id "CUFF.95.1"; exon_number "1"; FPKM "46.9402997776"; frac "1.000000"; conf_lo "0.000000"; conf_hi "101.142289"; cov "3.000000"; +chr1 Cufflinks transcript 3381259 3381317 1000 . . gene_id "CUFF.97"; transcript_id "CUFF.97.1"; FPKM "21.4811541355"; frac "1.000000"; conf_lo "0.000000"; conf_hi "46.285454"; cov "1.372881"; +chr1 Cufflinks exon 3381259 3381317 1000 . . gene_id "CUFF.97"; transcript_id "CUFF.97.1"; exon_number "1"; FPKM "21.4811541355"; frac "1.000000"; conf_lo "0.000000"; conf_hi "46.285454"; cov "1.372881"; +chr1 Cufflinks transcript 3381404 3381474 1000 . . gene_id "CUFF.99"; transcript_id "CUFF.99.1"; FPKM "14.5676792413"; frac "1.000000"; conf_lo "0.000000"; conf_hi "35.169489"; cov "0.931034"; +chr1 Cufflinks exon 3381404 3381474 1000 . . gene_id "CUFF.99"; transcript_id "CUFF.99.1"; exon_number "1"; FPKM "14.5676792413"; frac "1.000000"; conf_lo "0.000000"; conf_hi "35.169489"; cov "0.931034";
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/test-data/cuffcompare_in1_liftover_unmapped.bed Thu Jul 28 08:22:22 2011 -0400 @@ -0,0 +1,28 @@ +# Deleted in new +chr1 Cufflinks transcript 3111546 3111576 1000 . . gene_id "CUFF.3"; transcript_id "CUFF.3.1"; FPKM "27.2556579354"; frac "1.000000"; conf_lo "0.000000"; conf_hi "65.800979"; cov "1.741935"; +# Deleted in new +chr1 Cufflinks exon 3111546 3111576 1000 . . gene_id "CUFF.3"; transcript_id "CUFF.3.1"; exon_number "1"; FPKM "27.2556579354"; frac "1.000000"; conf_lo "0.000000"; conf_hi "65.800979"; cov "1.741935"; +# Partially deleted in new +chr1 Cufflinks transcript 3243019 3243079 1000 . . gene_id "CUFF.21"; transcript_id "CUFF.21.1"; FPKM "13.8512359999"; frac "1.000000"; conf_lo "0.000000"; conf_hi "33.439842"; cov "0.885246"; +# Partially deleted in new +chr1 Cufflinks exon 3243019 3243079 1000 . . gene_id "CUFF.21"; transcript_id "CUFF.21.1"; exon_number "1"; FPKM "13.8512359999"; frac "1.000000"; conf_lo "0.000000"; conf_hi "33.439842"; cov "0.885246"; +# Partially deleted in new +chr1 Cufflinks transcript 3242634 3242923 1000 . . gene_id "CUFF.25"; transcript_id "CUFF.25.1"; FPKM "14.5676792413"; frac "1.000000"; conf_lo "5.354270"; conf_hi "23.781089"; cov "0.931034"; +# Partially deleted in new +chr1 Cufflinks exon 3242634 3242923 1000 . . gene_id "CUFF.25"; transcript_id "CUFF.25.1"; exon_number "1"; FPKM "14.5676792413"; frac "1.000000"; conf_lo "5.354270"; conf_hi "23.781089"; cov "0.931034"; +# Partially deleted in new +chr1 Cufflinks transcript 3191877 3191945 1000 . . gene_id "CUFF.37"; transcript_id "CUFF.37.1"; FPKM "104.0850125502"; frac "1.000000"; conf_lo "53.596365"; conf_hi "154.573660"; cov "6.652174"; +# Partially deleted in new +chr1 Cufflinks exon 3191877 3191945 1000 . . gene_id "CUFF.37"; transcript_id "CUFF.37.1"; exon_number "1"; FPKM "104.0850125502"; frac "1.000000"; conf_lo "53.596365"; conf_hi "154.573660"; cov "6.652174"; +# Partially deleted in new +chr1 Cufflinks transcript 3194186 3194226 1000 . . gene_id "CUFF.47"; transcript_id "CUFF.47.1"; FPKM "20.6079364877"; frac "1.000000"; conf_lo "0.000000"; conf_hi "49.751960"; cov "1.317073"; +# Partially deleted in new +chr1 Cufflinks exon 3194186 3194226 1000 . . gene_id "CUFF.47"; transcript_id "CUFF.47.1"; exon_number "1"; FPKM "20.6079364877"; frac "1.000000"; conf_lo "0.000000"; conf_hi "49.751960"; cov "1.317073"; +# Deleted in new +chr1 Cufflinks transcript 3277191 3277218 1000 . . gene_id "CUFF.61"; transcript_id "CUFF.61.1"; FPKM "45.2638604998"; frac "1.000000"; conf_lo "0.000000"; conf_hi "97.530065"; cov "2.892857"; +# Deleted in new +chr1 Cufflinks exon 3277191 3277218 1000 . . gene_id "CUFF.61"; transcript_id "CUFF.61.1"; exon_number "1"; FPKM "45.2638604998"; frac "1.000000"; conf_lo "0.000000"; conf_hi "97.530065"; cov "2.892857"; +# Deleted in new +chr1 Cufflinks transcript 3278237 3278263 1000 . . gene_id "CUFF.63"; transcript_id "CUFF.63.1"; FPKM "15.6467665925"; frac "1.000000"; conf_lo "0.000000"; conf_hi "46.940300"; cov "1.000000"; +# Deleted in new +chr1 Cufflinks exon 3278237 3278263 1000 . . gene_id "CUFF.63"; transcript_id "CUFF.63.1"; exon_number "1"; FPKM "15.6467665925"; frac "1.000000"; conf_lo "0.000000"; conf_hi "46.940300"; cov "1.000000";
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/test-data/cuffcompare_in1_mult_liftover_mapped.bed Thu Jul 28 08:22:22 2011 -0400 @@ -0,0 +1,92 @@ +chr1 Cufflinks transcript 3022555 3022596 1000 . . gene_id "CUFF.1"; transcript_id "CUFF.1.1"; FPKM "20.6079364877"; frac "1.000000"; conf_lo "0.000000"; conf_hi "49.751960"; cov "1.317073"; +chr1 Cufflinks exon 3022555 3022596 1000 . . gene_id "CUFF.1"; transcript_id "CUFF.1.1"; exon_number "1"; FPKM "20.6079364877"; frac "1.000000"; conf_lo "0.000000"; conf_hi "49.751960"; cov "1.317073"; +chr1 Cufflinks transcript 3117334 3117360 1000 . . gene_id "CUFF.5"; transcript_id "CUFF.5.1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000"; +chr1 Cufflinks exon 3117334 3117360 1000 . . gene_id "CUFF.5"; transcript_id "CUFF.5.1"; exon_number "1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000"; +chr1 Cufflinks transcript 3117031 3117199 1000 . . gene_id "CUFF.7"; transcript_id "CUFF.7.1"; FPKM "9.9991171124"; frac "1.000000"; conf_lo "0.000000"; conf_hi "19.998234"; cov "0.639053"; +chr1 Cufflinks exon 3117031 3117199 1000 . . gene_id "CUFF.7"; transcript_id "CUFF.7.1"; exon_number "1"; FPKM "9.9991171124"; frac "1.000000"; conf_lo "0.000000"; conf_hi "19.998234"; cov "0.639053"; +chr1 Cufflinks transcript 3118118 3118521 1000 . . gene_id "CUFF.9"; transcript_id "CUFF.9.1"; FPKM "17.7768957078"; frac "1.000000"; conf_lo "9.153835"; conf_hi "26.399957"; cov "1.136139"; +chr1 Cufflinks exon 3118118 3118521 1000 . . gene_id "CUFF.9"; transcript_id "CUFF.9.1"; exon_number "1"; FPKM "17.7768957078"; frac "1.000000"; conf_lo "9.153835"; conf_hi "26.399957"; cov "1.136139"; +chr1 Cufflinks transcript 3118713 3118739 1000 . . gene_id "CUFF.11"; transcript_id "CUFF.11.1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000"; +chr1 Cufflinks exon 3118713 3118739 1000 . . gene_id "CUFF.11"; transcript_id "CUFF.11.1"; exon_number "1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000"; +chr1 Cufflinks transcript 3121789 3121867 1000 . . gene_id "CUFF.13"; transcript_id "CUFF.13.1"; FPKM "10.6952581772"; frac "1.000000"; conf_lo "0.000000"; conf_hi "25.820637"; cov "0.683544"; +chr1 Cufflinks exon 3121789 3121867 1000 . . gene_id "CUFF.13"; transcript_id "CUFF.13.1"; exon_number "1"; FPKM "10.6952581772"; frac "1.000000"; conf_lo "0.000000"; conf_hi "25.820637"; cov "0.683544"; +chr1 Cufflinks transcript 3128503 3128581 1000 . . gene_id "CUFF.15"; transcript_id "CUFF.15.1"; FPKM "10.6952581772"; frac "1.000000"; conf_lo "0.000000"; conf_hi "25.820637"; cov "0.683544"; +chr1 Cufflinks exon 3128503 3128581 1000 . . gene_id "CUFF.15"; transcript_id "CUFF.15.1"; exon_number "1"; FPKM "10.6952581772"; frac "1.000000"; conf_lo "0.000000"; conf_hi "25.820637"; cov "0.683544"; +chr1 Cufflinks transcript 3129386 3129482 1000 . . gene_id "CUFF.17"; transcript_id "CUFF.17.1"; FPKM "8.7105710927"; frac "1.000000"; conf_lo "0.000000"; conf_hi "21.029179"; cov "0.556701"; +chr1 Cufflinks exon 3129386 3129482 1000 . . gene_id "CUFF.17"; transcript_id "CUFF.17.1"; exon_number "1"; FPKM "8.7105710927"; frac "1.000000"; conf_lo "0.000000"; conf_hi "21.029179"; cov "0.556701"; +chr1 Cufflinks transcript 3128657 3128728 1000 . . gene_id "CUFF.19"; transcript_id "CUFF.19.1"; FPKM "29.3376873610"; frac "1.000000"; conf_lo "3.097262"; conf_hi "55.578113"; cov "1.875000"; +chr1 Cufflinks exon 3128657 3128728 1000 . . gene_id "CUFF.19"; transcript_id "CUFF.19.1"; exon_number "1"; FPKM "29.3376873610"; frac "1.000000"; conf_lo "3.097262"; conf_hi "55.578113"; cov "1.875000"; +chr1 Cufflinks transcript 3162123 3162179 1000 . . gene_id "CUFF.21"; transcript_id "CUFF.21.1"; FPKM "13.8512359999"; frac "1.000000"; conf_lo "0.000000"; conf_hi "33.439842"; cov "0.885246"; +chr1 Cufflinks exon 3162123 3162179 1000 . . gene_id "CUFF.21"; transcript_id "CUFF.21.1"; exon_number "1"; FPKM "13.8512359999"; frac "1.000000"; conf_lo "0.000000"; conf_hi "33.439842"; cov "0.885246"; +chr1 Cufflinks transcript 3162445 3162500 1000 . . gene_id "CUFF.23"; transcript_id "CUFF.23.1"; FPKM "23.4701498888"; frac "1.000000"; conf_lo "0.000000"; conf_hi "50.571145"; cov "1.500000"; +chr1 Cufflinks exon 3162445 3162500 1000 . . gene_id "CUFF.23"; transcript_id "CUFF.23.1"; exon_number "1"; FPKM "23.4701498888"; frac "1.000000"; conf_lo "0.000000"; conf_hi "50.571145"; cov "1.500000"; +chr1 Cufflinks transcript 3161752 3162025 1000 . . gene_id "CUFF.25"; transcript_id "CUFF.25.1"; FPKM "14.5676792413"; frac "1.000000"; conf_lo "5.354270"; conf_hi "23.781089"; cov "0.931034"; +chr1 Cufflinks exon 3161752 3162025 1000 . . gene_id "CUFF.25"; transcript_id "CUFF.25.1"; exon_number "1"; FPKM "14.5676792413"; frac "1.000000"; conf_lo "5.354270"; conf_hi "23.781089"; cov "0.931034"; +chr1 Cufflinks transcript 3176998 3177034 1000 . . gene_id "CUFF.27"; transcript_id "CUFF.27.1"; FPKM "34.2537322701"; frac "1.000000"; conf_lo "0.000000"; conf_hi "73.806535"; cov "2.189189"; +chr1 Cufflinks exon 3176998 3177034 1000 . . gene_id "CUFF.27"; transcript_id "CUFF.27.1"; exon_number "1"; FPKM "34.2537322701"; frac "1.000000"; conf_lo "0.000000"; conf_hi "73.806535"; cov "2.189189"; +chr1 Cufflinks transcript 3107191 3107612 1000 . . gene_id "CUFF.29"; transcript_id "CUFF.29.1"; FPKM "107.1032192108"; frac "1.000000"; conf_lo "71.402146"; conf_hi "142.804292"; cov "6.845070"; +chr1 Cufflinks exon 3107191 3107612 1000 . . gene_id "CUFF.29"; transcript_id "CUFF.29.1"; exon_number "1"; FPKM "107.1032192108"; frac "1.000000"; conf_lo "71.402146"; conf_hi "142.804292"; cov "6.845070"; +chr1 Cufflinks transcript 3107844 3107874 1000 . . gene_id "CUFF.31"; transcript_id "CUFF.31.1"; FPKM "122.6504607091"; frac "1.000000"; conf_lo "40.883487"; conf_hi "204.417435"; cov "7.838710"; +chr1 Cufflinks exon 3107844 3107874 1000 . . gene_id "CUFF.31"; transcript_id "CUFF.31.1"; exon_number "1"; FPKM "122.6504607091"; frac "1.000000"; conf_lo "40.883487"; conf_hi "204.417435"; cov "7.838710"; +chr1 Cufflinks transcript 3108025 3108051 1000 . . gene_id "CUFF.33"; transcript_id "CUFF.33.1"; FPKM "109.5273661476"; frac "1.000000"; conf_lo "26.732460"; conf_hi "192.322273"; cov "7.000000"; +chr1 Cufflinks exon 3108025 3108051 1000 . . gene_id "CUFF.33"; transcript_id "CUFF.33.1"; exon_number "1"; FPKM "109.5273661476"; frac "1.000000"; conf_lo "26.732460"; conf_hi "192.322273"; cov "7.000000"; +chr1 Cufflinks transcript 3109111 3109241 1000 . . gene_id "CUFF.35"; transcript_id "CUFF.35.1"; FPKM "96.7471827476"; frac "1.000000"; conf_lo "61.420107"; conf_hi "132.074259"; cov "6.183206"; +chr1 Cufflinks exon 3109111 3109241 1000 . . gene_id "CUFF.35"; transcript_id "CUFF.35.1"; exon_number "1"; FPKM "96.7471827476"; frac "1.000000"; conf_lo "61.420107"; conf_hi "132.074259"; cov "6.183206"; +chr1 Cufflinks transcript 3109449 3109512 1000 . . gene_id "CUFF.37"; transcript_id "CUFF.37.1"; FPKM "104.0850125502"; frac "1.000000"; conf_lo "53.596365"; conf_hi "154.573660"; cov "6.652174"; +chr1 Cufflinks exon 3109449 3109512 1000 . . gene_id "CUFF.37"; transcript_id "CUFF.37.1"; exon_number "1"; FPKM "104.0850125502"; frac "1.000000"; conf_lo "53.596365"; conf_hi "154.573660"; cov "6.652174"; +chr1 Cufflinks transcript 3109989 3110041 1000 . . gene_id "CUFF.39"; transcript_id "CUFF.39.1"; FPKM "23.9129829055"; frac "1.000000"; conf_lo "0.000000"; conf_hi "51.525317"; cov "1.528302"; +chr1 Cufflinks exon 3109989 3110041 1000 . . gene_id "CUFF.39"; transcript_id "CUFF.39.1"; exon_number "1"; FPKM "23.9129829055"; frac "1.000000"; conf_lo "0.000000"; conf_hi "51.525317"; cov "1.528302"; +chr1 Cufflinks transcript 3110098 3110176 1000 . . gene_id "CUFF.41"; transcript_id "CUFF.41.1"; FPKM "10.6952581772"; frac "1.000000"; conf_lo "0.000000"; conf_hi "25.820637"; cov "0.683544"; +chr1 Cufflinks exon 3110098 3110176 1000 . . gene_id "CUFF.41"; transcript_id "CUFF.41.1"; exon_number "1"; FPKM "10.6952581772"; frac "1.000000"; conf_lo "0.000000"; conf_hi "25.820637"; cov "0.683544"; +chr1 Cufflinks transcript 3110280 3110358 1000 . . gene_id "CUFF.43"; transcript_id "CUFF.43.1"; FPKM "10.5615674500"; frac "1.000000"; conf_lo "0.000000"; conf_hi "25.497879"; cov "0.675000"; +chr1 Cufflinks exon 3110280 3110358 1000 . . gene_id "CUFF.43"; transcript_id "CUFF.43.1"; exon_number "1"; FPKM "10.5615674500"; frac "1.000000"; conf_lo "0.000000"; conf_hi "25.497879"; cov "0.675000"; +chr1 Cufflinks transcript 3110488 3110589 1000 . . gene_id "CUFF.45"; transcript_id "CUFF.45.1"; FPKM "20.7089557842"; frac "1.000000"; conf_lo "2.186303"; conf_hi "39.231609"; cov "1.323529"; +chr1 Cufflinks exon 3110488 3110589 1000 . . gene_id "CUFF.45"; transcript_id "CUFF.45.1"; exon_number "1"; FPKM "20.7089557842"; frac "1.000000"; conf_lo "2.186303"; conf_hi "39.231609"; cov "1.323529"; +chr1 Cufflinks transcript 3111332 3111358 1000 . . gene_id "CUFF.49"; transcript_id "CUFF.49.1"; FPKM "15.6467665925"; frac "1.000000"; conf_lo "0.000000"; conf_hi "46.940300"; cov "1.000000"; +chr1 Cufflinks exon 3111332 3111358 1000 . . gene_id "CUFF.49"; transcript_id "CUFF.49.1"; exon_number "1"; FPKM "15.6467665925"; frac "1.000000"; conf_lo "0.000000"; conf_hi "46.940300"; cov "1.000000"; +chr1 Cufflinks transcript 3112113 3112139 1000 . . gene_id "CUFF.51"; transcript_id "CUFF.51.1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000"; +chr1 Cufflinks exon 3112113 3112139 1000 . . gene_id "CUFF.51"; transcript_id "CUFF.51.1"; exon_number "1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000"; +chr1 Cufflinks transcript 3112479 3112505 1000 . . gene_id "CUFF.53"; transcript_id "CUFF.53.1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000"; +chr1 Cufflinks exon 3112479 3112505 1000 . . gene_id "CUFF.53"; transcript_id "CUFF.53.1"; exon_number "1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000"; +chr1 Cufflinks transcript 3114116 3114142 1000 . . gene_id "CUFF.55"; transcript_id "CUFF.55.1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000"; +chr1 Cufflinks exon 3114116 3114142 1000 . . gene_id "CUFF.55"; transcript_id "CUFF.55.1"; exon_number "1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000"; +chr1 Cufflinks transcript 3114273 3114299 1000 . . gene_id "CUFF.57"; transcript_id "CUFF.57.1"; FPKM "15.6467665925"; frac "1.000000"; conf_lo "0.000000"; conf_hi "46.940300"; cov "1.000000"; +chr1 Cufflinks exon 3114273 3114299 1000 . . gene_id "CUFF.57"; transcript_id "CUFF.57.1"; exon_number "1"; FPKM "15.6467665925"; frac "1.000000"; conf_lo "0.000000"; conf_hi "46.940300"; cov "1.000000"; +chr1 Cufflinks transcript 3114373 3114399 1000 . . gene_id "CUFF.59"; transcript_id "CUFF.59.1"; FPKM "15.6467665925"; frac "1.000000"; conf_lo "0.000000"; conf_hi "46.940300"; cov "1.000000"; +chr1 Cufflinks exon 3114373 3114399 1000 . . gene_id "CUFF.59"; transcript_id "CUFF.59.1"; exon_number "1"; FPKM "15.6467665925"; frac "1.000000"; conf_lo "0.000000"; conf_hi "46.940300"; cov "1.000000"; +chr1 Cufflinks transcript 3201794 3201848 1000 . . gene_id "CUFF.65"; transcript_id "CUFF.65.1"; FPKM "15.3622799272"; frac "1.000000"; conf_lo "0.000000"; conf_hi "37.087825"; cov "0.981818"; +chr1 Cufflinks exon 3201794 3201848 1000 . . gene_id "CUFF.65"; transcript_id "CUFF.65.1"; exon_number "1"; FPKM "15.3622799272"; frac "1.000000"; conf_lo "0.000000"; conf_hi "37.087825"; cov "0.981818"; +chr1 Cufflinks transcript 3211077 3211141 1000 . . gene_id "CUFF.67"; transcript_id "CUFF.67.1"; FPKM "12.9988522461"; frac "1.000000"; conf_lo "0.000000"; conf_hi "31.382005"; cov "0.830769"; +chr1 Cufflinks exon 3211077 3211141 1000 . . gene_id "CUFF.67"; transcript_id "CUFF.67.1"; exon_number "1"; FPKM "12.9988522461"; frac "1.000000"; conf_lo "0.000000"; conf_hi "31.382005"; cov "0.830769"; +chr1 Cufflinks transcript 3211528 3211611 1000 . . gene_id "CUFF.69"; transcript_id "CUFF.69.1"; FPKM "10.0586356666"; frac "1.000000"; conf_lo "0.000000"; conf_hi "24.283695"; cov "0.642857"; +chr1 Cufflinks exon 3211528 3211611 1000 . . gene_id "CUFF.69"; transcript_id "CUFF.69.1"; exon_number "1"; FPKM "10.0586356666"; frac "1.000000"; conf_lo "0.000000"; conf_hi "24.283695"; cov "0.642857"; +chr1 Cufflinks transcript 3211677 3211774 1000 . . gene_id "CUFF.71"; transcript_id "CUFF.71.1"; FPKM "8.6216877142"; frac "1.000000"; conf_lo "0.000000"; conf_hi "20.814595"; cov "0.551020"; +chr1 Cufflinks exon 3211677 3211774 1000 . . gene_id "CUFF.71"; transcript_id "CUFF.71.1"; exon_number "1"; FPKM "8.6216877142"; frac "1.000000"; conf_lo "0.000000"; conf_hi "20.814595"; cov "0.551020"; +chr1 Cufflinks transcript 3220199 3220253 1000 . . gene_id "CUFF.73"; transcript_id "CUFF.73.1"; FPKM "15.3622799272"; frac "1.000000"; conf_lo "0.000000"; conf_hi "37.087825"; cov "0.981818"; +chr1 Cufflinks exon 3220199 3220253 1000 . . gene_id "CUFF.73"; transcript_id "CUFF.73.1"; exon_number "1"; FPKM "15.3622799272"; frac "1.000000"; conf_lo "0.000000"; conf_hi "37.087825"; cov "0.981818"; +chr1 Cufflinks transcript 3220641 3220667 1000 . . gene_id "CUFF.75"; transcript_id "CUFF.75.1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000"; +chr1 Cufflinks exon 3220641 3220667 1000 . . gene_id "CUFF.75"; transcript_id "CUFF.75.1"; exon_number "1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000"; +chr1 Cufflinks transcript 3240464 3240515 1000 . . gene_id "CUFF.77"; transcript_id "CUFF.77.1"; FPKM "16.2485653076"; frac "1.000000"; conf_lo "0.000000"; conf_hi "39.227507"; cov "1.038462"; +chr1 Cufflinks exon 3240464 3240515 1000 . . gene_id "CUFF.77"; transcript_id "CUFF.77.1"; exon_number "1"; FPKM "16.2485653076"; frac "1.000000"; conf_lo "0.000000"; conf_hi "39.227507"; cov "1.038462"; +chr1 Cufflinks transcript 3277601 3277627 1000 . . gene_id "CUFF.79"; transcript_id "CUFF.79.1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000"; +chr1 Cufflinks exon 3277601 3277627 1000 . . gene_id "CUFF.79"; transcript_id "CUFF.79.1"; exon_number "1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000"; +chr1 Cufflinks transcript 3285318 3285381 1000 . . gene_id "CUFF.81"; transcript_id "CUFF.81.1"; FPKM "13.2019593124"; frac "1.000000"; conf_lo "0.000000"; conf_hi "31.872349"; cov "0.843750"; +chr1 Cufflinks exon 3285318 3285381 1000 . . gene_id "CUFF.81"; transcript_id "CUFF.81.1"; exon_number "1"; FPKM "13.2019593124"; frac "1.000000"; conf_lo "0.000000"; conf_hi "31.872349"; cov "0.843750"; +chr1 Cufflinks transcript 3285858 3285953 1000 . . gene_id "CUFF.83"; transcript_id "CUFF.83.1"; FPKM "13.2019593124"; frac "1.000000"; conf_lo "0.000000"; conf_hi "28.446269"; cov "0.843750"; +chr1 Cufflinks exon 3285858 3285953 1000 . . gene_id "CUFF.83"; transcript_id "CUFF.83.1"; exon_number "1"; FPKM "13.2019593124"; frac "1.000000"; conf_lo "0.000000"; conf_hi "28.446269"; cov "0.843750"; +chr1 Cufflinks transcript 3289268 3289294 1000 . . gene_id "CUFF.85"; transcript_id "CUFF.85.1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000"; +chr1 Cufflinks exon 3289268 3289294 1000 . . gene_id "CUFF.85"; transcript_id "CUFF.85.1"; exon_number "1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000"; +chr1 Cufflinks transcript 3289466 3289514 1000 . . gene_id "CUFF.87"; transcript_id "CUFF.87.1"; FPKM "17.2433754285"; frac "1.000000"; conf_lo "0.000000"; conf_hi "41.629191"; cov "1.102041"; +chr1 Cufflinks exon 3289466 3289514 1000 . . gene_id "CUFF.87"; transcript_id "CUFF.87.1"; exon_number "1"; FPKM "17.2433754285"; frac "1.000000"; conf_lo "0.000000"; conf_hi "41.629191"; cov "1.102041"; +chr1 Cufflinks transcript 3300382 3300432 1000 . . gene_id "CUFF.89"; transcript_id "CUFF.89.1"; FPKM "16.5671646274"; frac "1.000000"; conf_lo "0.000000"; conf_hi "39.996674"; cov "1.058824"; +chr1 Cufflinks exon 3300382 3300432 1000 . . gene_id "CUFF.89"; transcript_id "CUFF.89.1"; exon_number "1"; FPKM "16.5671646274"; frac "1.000000"; conf_lo "0.000000"; conf_hi "39.996674"; cov "1.058824"; +chr1 Cufflinks transcript 3317446 3317472 1000 . . gene_id "CUFF.91"; transcript_id "CUFF.91.1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000"; +chr1 Cufflinks exon 3317446 3317472 1000 . . gene_id "CUFF.91"; transcript_id "CUFF.91.1"; exon_number "1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000"; +chr1 Cufflinks transcript 3365246 3365284 1000 . . gene_id "CUFF.93"; transcript_id "CUFF.93.1"; FPKM "21.6647537435"; frac "1.000000"; conf_lo "0.000000"; conf_hi "52.303342"; cov "1.384615"; +chr1 Cufflinks exon 3365246 3365284 1000 . . gene_id "CUFF.93"; transcript_id "CUFF.93.1"; exon_number "1"; FPKM "21.6647537435"; frac "1.000000"; conf_lo "0.000000"; conf_hi "52.303342"; cov "1.384615"; +chr1 Cufflinks transcript 3377607 3377633 1000 . . gene_id "CUFF.95"; transcript_id "CUFF.95.1"; FPKM "46.9402997776"; frac "1.000000"; conf_lo "0.000000"; conf_hi "101.142289"; cov "3.000000"; +chr1 Cufflinks exon 3377607 3377633 1000 . . gene_id "CUFF.95"; transcript_id "CUFF.95.1"; exon_number "1"; FPKM "46.9402997776"; frac "1.000000"; conf_lo "0.000000"; conf_hi "101.142289"; cov "3.000000"; +chr1 Cufflinks transcript 3381259 3381317 1000 . . gene_id "CUFF.97"; transcript_id "CUFF.97.1"; FPKM "21.4811541355"; frac "1.000000"; conf_lo "0.000000"; conf_hi "46.285454"; cov "1.372881"; +chr1 Cufflinks exon 3381259 3381317 1000 . . gene_id "CUFF.97"; transcript_id "CUFF.97.1"; exon_number "1"; FPKM "21.4811541355"; frac "1.000000"; conf_lo "0.000000"; conf_hi "46.285454"; cov "1.372881"; +chr1 Cufflinks transcript 3381404 3381474 1000 . . gene_id "CUFF.99"; transcript_id "CUFF.99.1"; FPKM "14.5676792413"; frac "1.000000"; conf_lo "0.000000"; conf_hi "35.169489"; cov "0.931034"; +chr1 Cufflinks exon 3381404 3381474 1000 . . gene_id "CUFF.99"; transcript_id "CUFF.99.1"; exon_number "1"; FPKM "14.5676792413"; frac "1.000000"; conf_lo "0.000000"; conf_hi "35.169489"; cov "0.931034";
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/test-data/cuffcompare_in1_mult_liftover_unmapped.bed Thu Jul 28 08:22:22 2011 -0400 @@ -0,0 +1,16 @@ +# Deleted in new +chr1 Cufflinks transcript 3111546 3111576 1000 . . gene_id "CUFF.3"; transcript_id "CUFF.3.1"; FPKM "27.2556579354"; frac "1.000000"; conf_lo "0.000000"; conf_hi "65.800979"; cov "1.741935"; +# Deleted in new +chr1 Cufflinks exon 3111546 3111576 1000 . . gene_id "CUFF.3"; transcript_id "CUFF.3.1"; exon_number "1"; FPKM "27.2556579354"; frac "1.000000"; conf_lo "0.000000"; conf_hi "65.800979"; cov "1.741935"; +# Partially deleted in new +chr1 Cufflinks transcript 3194186 3194226 1000 . . gene_id "CUFF.47"; transcript_id "CUFF.47.1"; FPKM "20.6079364877"; frac "1.000000"; conf_lo "0.000000"; conf_hi "49.751960"; cov "1.317073"; +# Partially deleted in new +chr1 Cufflinks exon 3194186 3194226 1000 . . gene_id "CUFF.47"; transcript_id "CUFF.47.1"; exon_number "1"; FPKM "20.6079364877"; frac "1.000000"; conf_lo "0.000000"; conf_hi "49.751960"; cov "1.317073"; +# Deleted in new +chr1 Cufflinks transcript 3277191 3277218 1000 . . gene_id "CUFF.61"; transcript_id "CUFF.61.1"; FPKM "45.2638604998"; frac "1.000000"; conf_lo "0.000000"; conf_hi "97.530065"; cov "2.892857"; +# Deleted in new +chr1 Cufflinks exon 3277191 3277218 1000 . . gene_id "CUFF.61"; transcript_id "CUFF.61.1"; exon_number "1"; FPKM "45.2638604998"; frac "1.000000"; conf_lo "0.000000"; conf_hi "97.530065"; cov "2.892857"; +# Deleted in new +chr1 Cufflinks transcript 3278237 3278263 1000 . . gene_id "CUFF.63"; transcript_id "CUFF.63.1"; FPKM "15.6467665925"; frac "1.000000"; conf_lo "0.000000"; conf_hi "46.940300"; cov "1.000000"; +# Deleted in new +chr1 Cufflinks exon 3278237 3278263 1000 . . gene_id "CUFF.63"; transcript_id "CUFF.63.1"; exon_number "1"; FPKM "15.6467665925"; frac "1.000000"; conf_lo "0.000000"; conf_hi "46.940300"; cov "1.000000";
--- a/test/functional/test_get_data.py Thu Jul 28 08:22:07 2011 -0400 +++ b/test/functional/test_get_data.py Thu Jul 28 08:22:22 2011 -0400 @@ -124,7 +124,7 @@ self.upload_file( '454Score.png' ) hda = get_latest_hda() assert hda is not None, "Problem retrieving hda from database" - self.check_history_for_string( "The uploaded file contains inappropriate content" ) + self.check_history_for_string( "454Score.png" ) def test_0055_upload_file( self ): """Test uploading lped composite datatype file, manually setting the file format""" # Logged in as admin_user
--- a/tools/data_source/upload.py Thu Jul 28 08:22:07 2011 -0400 +++ b/tools/data_source/upload.py Thu Jul 28 08:22:22 2011 -0400 @@ -14,9 +14,18 @@ from galaxy.datatypes.images import Pdf from galaxy.datatypes.registry import Registry from galaxy import util +from galaxy.datatypes.util.image_util import * from galaxy.util.json import *
try: + import Image as PIL +except ImportError: + try: + from PIL import Image as PIL + except: + PIL = None + +try: import bz2 except: bz2 = None @@ -51,16 +60,12 @@ return d def check_bam( file_path ): return Bam().sniff( file_path ) - def check_sff( file_path ): return Sff().sniff( file_path ) - def check_pdf( file_path ): return Pdf().sniff( file_path ) - def check_bigwig( file_path ): return BigWig().sniff( file_path ) - def check_bigbed( file_path ): return BigBed().sniff( file_path ) def parse_outputs( args ): @@ -102,8 +107,16 @@ dataset.is_multi_byte = util.is_multi_byte( codecs.open( dataset.path, 'r', 'utf-8' ).read( 100 ) ) except UnicodeDecodeError, e: dataset.is_multi_byte = False + # Is dataset an image? + image = check_image( dataset.path ) + if image: + if not PIL: + image = None + # get_image_ext() returns None if nor a supported Image type + ext = get_image_ext( dataset.path, image ) + data_type = ext # Is dataset content multi-byte? - if dataset.is_multi_byte: + elif dataset.is_multi_byte: data_type = 'multi-byte char' ext = sniff.guess_ext( dataset.path, is_multi_byte=True ) # Is dataset content supported sniffable binary? @@ -122,7 +135,7 @@ elif check_bigbed( dataset.path ): ext = 'bigbed' data_type = 'bigbed' - else: + if not data_type: # See if we have a gzipped file, which, if it passes our restrictions, we'll uncompress is_gzipped, is_valid = check_gzip( dataset.path ) if is_gzipped and not is_valid: @@ -314,7 +327,6 @@ if datatype.dataset_content_needs_grooming( output_path ): # Groom the dataset content if necessary datatype.groom_dataset_content( output_path ) - def add_composite_file( dataset, registry, json_file, output_path, files_path ): if dataset.composite_files: os.mkdir( files_path )
--- a/tools/extract/liftOver_wrapper.py Thu Jul 28 08:22:07 2011 -0400 +++ b/tools/extract/liftOver_wrapper.py Thu Jul 28 08:22:22 2011 -0400 @@ -34,15 +34,27 @@ out_handle.close() return fname
-if len( sys.argv ) != 7: - stop_err( "USAGE: prog input out_file1 out_file2 input_dbkey output_dbkey minMatch" ) +if len( sys.argv ) < 9: + stop_err( "USAGE: prog input out_file1 out_file2 input_dbkey output_dbkey infile_type minMatch multiple <minChainT><minChainQ><minSizeQ>" )
infile = sys.argv[1] outfile1 = sys.argv[2] outfile2 = sys.argv[3] in_dbkey = sys.argv[4] mapfilepath = sys.argv[5] -minMatch = sys.argv[6] +infile_type = sys.argv[6] +gff_option = "" +if infile_type == "gff": + gff_option = "-gff " +minMatch = sys.argv[7] +multiple = int(sys.argv[8]) +multiple_option = "" +if multiple: + minChainT = sys.argv[9] + minChainQ = sys.argv[10] + minSizeQ = sys.argv[11] + multiple_option = " -multiple -minChainT=%s -minChainQ=%s -minSizeQ=%s " %(minChainT,minChainQ,minSizeQ) + try: assert float(minMatch) except: @@ -55,7 +67,8 @@ stop_err( "%s mapping is not currently available." % ( mapfilepath.split('/')[-1].split('.')[0] ) )
safe_infile = safe_bed_file(infile) -cmd_line = "liftOver -minMatch=" + str(minMatch) + " " + safe_infile + " " + mapfilepath + " " + outfile1 + " " + outfile2 + " > /dev/null" +cmd_line = "liftOver " + gff_option + "-minMatch=" + str(minMatch) + multiple_option + " " + safe_infile + " " + mapfilepath + " " + outfile1 + " " + outfile2 + " > /dev/null" + try: # have to nest try-except in try-finally to handle 2.4 try:
--- a/tools/extract/liftOver_wrapper.xml Thu Jul 28 08:22:07 2011 -0400 +++ b/tools/extract/liftOver_wrapper.xml Thu Jul 28 08:22:22 2011 -0400 @@ -1,8 +1,21 @@ -<tool id="liftOver1" name="Convert genome coordinates" version="1.0.2"> +<tool id="liftOver1" name="Convert genome coordinates" version="1.0.3"><description> between assemblies and genomes</description> - <command interpreter="python">liftOver_wrapper.py $input "$out_file1" "$out_file2" $dbkey $to_dbkey $minMatch</command> + <command interpreter="python"> + liftOver_wrapper.py + $input + "$out_file1" + "$out_file2" + $dbkey + $to_dbkey + #if isinstance( $input.datatype, $__app__.datatypes_registry.get_datatype_by_extension('gff').__class__) or isinstance( $input.datatype, $__app__.datatypes_registry.get_datatype_by_extension('gtf').__class__): + "gff" + #else: + "interval" + #end if + $minMatch ${multiple.choice} ${multiple.minChainT} ${multiple.minChainQ} ${multiple.minSizeQ} + </command><inputs> - <param format="interval" name="input" type="data" label="Convert coordinates of"> + <param format="interval,gff,gtf" name="input" type="data" label="Convert coordinates of"><validator type="unspecified_build" /><validator type="dataset_metadata_in_file" filename="liftOver.loc" metadata_name="dbkey" metadata_column="0" message="Liftover mappings are currently not available for the specified build." /></param> @@ -14,7 +27,23 @@ <filter type="data_meta" ref="input" key="dbkey" column="0" /></options></param> - <param name="minMatch" size="10" type="float" value="0.95" label="Minimum ratio of bases that must remap" /> + <param name="minMatch" size="10" type="float" value="0.95" label="Minimum ratio of bases that must remap" help="Recommended values: same species = 0.95, different species = 0.10" /> + <conditional name="multiple"> + <param name="choice" type="select" label="Allow multiple output regions?" help="Recommended values: same species = No, different species = Yes"> + <option value="0" selected="true">No</option> + <option value="1">Yes</option> + </param> + <when value="0"> + <param name="minSizeQ" type="hidden" value="0" /> + <param name="minChainQ" type="hidden" value="0" /> + <param name="minChainT" type="hidden" value="0" /> + </when> + <when value="1"> + <param name="minSizeQ" size="10" type="integer" value="0" label="Minimum matching region size in query" help="Recommended value: set to >= 300 bases for complete transcripts"/> + <param name="minChainQ" size="10" type="integer" value="500" label="Minimum chain size in query"/> + <param name="minChainT" size="10" type="integer" value="500" label="Minimum chain size in target"/> + </when> + </conditional></inputs><outputs><data format="input" name="out_file1" label="${tool.name} on ${on_string} [ MAPPED COORDINATES ]"> @@ -37,9 +66,40 @@ <param name="input" value="5.bed" dbkey="hg18" ftype="bed" /><param name="to_dbkey" value="panTro2" /><param name="minMatch" value="0.95" /> + <param name="choice" value="0" /><output name="out_file1" file="5_liftover_mapped.bed"/><output name="out_file2" file="5_liftover_unmapped.bed"/></test> + <test> + <param name="input" value="5.bed" dbkey="hg18" ftype="bed" /> + <param name="to_dbkey" value="panTro2" /> + <param name="minMatch" value="0.10" /> + <param name="choice" value="1" /> + <param name="minSizeQ" value="0" /> + <param name="minChainQ" value="500" /> + <param name="minChainT" value="500" /> + <output name="out_file1" file="5_mult_liftover_mapped.bed"/> + <output name="out_file2" file="5_mult_liftover_unmapped.bed"/> + </test> + <test> + <param name="input" value="cuffcompare_in1.gtf" dbkey="hg18" ftype="gtf" /> + <param name="to_dbkey" value="panTro2" /> + <param name="minMatch" value="0.95" /> + <param name="choice" value="0" /> + <output name="out_file1" file="cuffcompare_in1_liftover_mapped.bed"/> + <output name="out_file2" file="cuffcompare_in1_liftover_unmapped.bed"/> + </test> + <test> + <param name="input" value="cuffcompare_in1.gtf" dbkey="hg18" ftype="gtf" /> + <param name="to_dbkey" value="panTro2" /> + <param name="minMatch" value="0.10" /> + <param name="choice" value="1" /> + <param name="minSizeQ" value="0" /> + <param name="minChainQ" value="500" /> + <param name="minChainT" value="500" /> + <output name="out_file1" file="cuffcompare_in1_mult_liftover_mapped.bed"/> + <output name="out_file2" file="cuffcompare_in1_mult_liftover_unmapped.bed"/> + </test></tests><help> .. class:: warningmark @@ -48,7 +108,7 @@
.. class:: warningmark
-This tool will only work on interval datasets with chromosome in column 1, +This tool can work with interval, GFF, and GTF datasets. It requires the interval datasets to have chromosome in column 1, start co-ordinate in column 2 and end co-ordinate in column 3. BED comments and track and browser lines will be ignored, but if other non-interval lines are present the tool will return empty output datasets. @@ -59,7 +119,11 @@
**What it does**
-This tool converts coordinates and annotations between assemblies and genomes. It produces 2 files, one containing all the mapped coordinates and the other containing the unmapped coordinates, if any. +This tool is based on the LiftOver utility and Chain track from `the UC Santa Cruz Genome Browser`__. + +It converts coordinates and annotations between assemblies and genomes. It produces 2 files, one containing all the mapped coordinates and the other containing the unmapped coordinates, if any. + + .. __: http://genome.ucsc.edu/
-----
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving this because you have the service enabled, addressing the recipient of this email.
galaxy-commits@lists.galaxyproject.org