galaxy-dev
Threads by month
- ----- 2025 -----
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2024 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2023 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2022 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2021 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2020 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2019 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2018 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2017 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2016 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2015 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2014 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2013 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2012 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2011 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2010 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2009 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2008 -----
- December
- November
- October
- September
- August
- 10008 discussions

15 Sep '09
details: http://www.bx.psu.edu/hg/galaxy/rev/032478337b82
changeset: 2683:032478337b82
user: jeremy goecks <jeremy.goecks(a)emory.edu>
date: Fri Sep 11 14:57:02 2009 -0400
description:
(1) Unicode support for tagging and (2) added tagging_common mako file to support creation and standardization of tagging elements across all pages.
9 file(s) affected in this change:
lib/galaxy/web/controllers/history.py
lib/galaxy/web/controllers/tag.py
static/scripts/autocomplete_tagging.js
static/scripts/packed/autocomplete_tagging.js
templates/dataset/edit_attributes.mako
templates/history/grid.mako
templates/root/history.mako
templates/tagging_common.mako
tool_conf.xml.main
diffs (781 lines):
diff -r ed4cbaf23c88 -r 032478337b82 lib/galaxy/web/controllers/history.py
--- a/lib/galaxy/web/controllers/history.py Fri Sep 11 09:00:36 2009 -0400
+++ b/lib/galaxy/web/controllers/history.py Fri Sep 11 14:57:02 2009 -0400
@@ -38,6 +38,17 @@
if item.users_shared_with or item.importable:
return dict( operation="sharing" )
return None
+ class TagsColumn( grids.GridColumn ):
+ def __init__(self, col_name):
+ grids.GridColumn.__init__(self, col_name)
+ self.tag_elt_id_gen = 0
+
+ def get_value( self, trans, grid, history ):
+ self.tag_elt_id_gen += 1
+ return trans.fill_template( "/tagging_common.mako", trans=trans,
+ tagged_item=history,
+ elt_id="tagging-elt" + str(self.tag_elt_id_gen) )
+
# Grid definition
title = "Stored histories"
model_class = model.History
@@ -48,6 +59,7 @@
link=( lambda item: iff( item.deleted, None, dict( operation="switch", id=item.id ) ) ),
attach_popup=True ),
DatasetsByStateColumn( "Datasets (by state)", ncells=4 ),
+ #TagsColumn( "Tags" ),
StatusColumn( "Status", attach_popup=False ),
grids.GridColumn( "Created", key="create_time", format=time_ago ),
grids.GridColumn( "Last Updated", key="update_time", format=time_ago ),
diff -r ed4cbaf23c88 -r 032478337b82 lib/galaxy/web/controllers/tag.py
--- a/lib/galaxy/web/controllers/tag.py Fri Sep 11 09:00:36 2009 -0400
+++ b/lib/galaxy/web/controllers/tag.py Fri Sep 11 14:57:02 2009 -0400
@@ -4,7 +4,6 @@
from galaxy.model import History, HistoryTagAssociation, Dataset, DatasetTagAssociation, \
HistoryDatasetAssociation, HistoryDatasetAssociationTagAssociation, Page, PageTagAssociation
-
from galaxy.web.base.controller import *
from galaxy.tags.tag_handler import *
from sqlalchemy.sql.expression import func, and_
@@ -15,65 +14,68 @@
def __init__(self, app):
BaseController.__init__(self, app)
- # Set up dict for mapping from short-hand to full item class.
- self.shorthand_to_item_class_dict = dict()
- self.shorthand_to_item_class_dict["history"] = History
- self.shorthand_to_item_class_dict["hda"] = HistoryDatasetAssociation
+ # Keep a list of taggable classes.
+ self.taggable_classes = dict()
+ self.taggable_classes[History.__name__] = History
+ self.taggable_classes[HistoryDatasetAssociation.__name__] = HistoryDatasetAssociation
+ self.taggable_classes[Page.__name__] = Page
- # Set up tag handler to recognize the following items: History, HistoryDatasetAssociation, ...
+ # Set up tag handler to recognize the following items: History, HistoryDatasetAssociation, Page, ...
self.tag_handler = TagHandler()
self.tag_handler.add_tag_assoc_class(History, HistoryTagAssociation)
self.tag_handler.add_tag_assoc_class(HistoryDatasetAssociation, HistoryDatasetAssociationTagAssociation)
-
+ self.tag_handler.add_tag_assoc_class(Page, PageTagAssociation)
+
@web.expose
- def add_tag_async( self, trans, id=None, item_type=None, new_tag=None ):
+ @web.require_login( "Add tag to an item." )
+ def add_tag_async( self, trans, id=None, item_class=None, new_tag=None ):
""" Add tag to an item. """
- item = self._get_item(trans, item_type, trans.security.decode_id(id))
+ item = self._get_item(trans, item_class, trans.security.decode_id(id))
self._do_security_check(trans, item)
- self.tag_handler.apply_item_tags(trans.sa_session, item, new_tag)
+ self.tag_handler.apply_item_tags( trans.sa_session, item, unicode(new_tag).encode('utf-8') )
trans.sa_session.flush()
@web.expose
- def remove_tag_async( self, trans, id=None, item_type=None, tag_name=None ):
+ @web.require_login( "Remove tag from an item." )
+ def remove_tag_async( self, trans, id=None, item_class=None, tag_name=None ):
""" Remove tag from an item. """
- item = self._get_item(trans, item_type, trans.security.decode_id(id))
+ item = self._get_item(trans, item_class, trans.security.decode_id(id))
self._do_security_check(trans, item)
- self.tag_handler.remove_item_tag(item, tag_name)
+ self.tag_handler.remove_item_tag( item, unicode(tag_name).encode('utf-8') )
+ #print tag_name
+ #print unicode(tag_name)
trans.sa_session.flush()
# Retag an item. All previous tags are deleted and new tags are applied.
@web.expose
- def retag_async( self, trans, id=None, item_type=None, new_tags=None ):
+ @web.require_login( "Apply a new set of tags to an item; previous tags are deleted." )
+ def retag_async( self, trans, id=None, item_class=None, new_tags=None ):
""" Apply a new set of tags to an item; previous tags are deleted. """
- item = self._get_item(trans, item_type, trans.security.decode_id(id))
+ item = self._get_item(trans, item_class, trans.security.decode_id(id))
self._do_security_check(trans, item)
tag_handler.delete_item_tags(item)
- self.tag_handler.apply_item_tags(trans.sa_session, item, new_tag)
+ self.tag_handler.apply_item_tags( trans.sa_session, item, unicode(new_tags).encode('utf-8') )
trans.sa_session.flush()
-
- tag_handler.delete_item_tags(history)
- tag_handler.apply_item_tags(trans.sa_session, history, new_tags)
- # Flush to complete changes.
- trans.sa_session.flush()
-
+
@web.expose
@web.require_login( "get autocomplete data for an item's tags" )
- def tag_autocomplete_data(self, trans, id=None, item_type=None, q=None, limit=None, timestamp=None):
+ def tag_autocomplete_data(self, trans, id=None, item_class=None, q=None, limit=None, timestamp=None):
""" Get autocomplete data for an item's tags. """
#
# Get item, do security check, and get autocomplete data.
#
- item = self._get_item(trans, item_type, trans.security.decode_id(id))
+ item = self._get_item(trans, item_class, trans.security.decode_id(id))
self._do_security_check(trans, item)
+ q = unicode(q).encode('utf-8')
if q.find(":") == -1:
return self._get_tag_autocomplete_names(trans, item, q, limit, timestamp)
else:
@@ -184,9 +186,9 @@
# Use the user_id associated with the HDA's history.
return History.table.c.user_id
- def _get_item(self, trans, item_type, id):
+ def _get_item(self, trans, item_class_name, id):
""" Get an item based on type and id. """
- item_class = self.shorthand_to_item_class_dict[item_type]
+ item_class = self.taggable_classes[item_class_name]
item = trans.sa_session.query(item_class).filter("id=" + str(id))[0]
return item;
diff -r ed4cbaf23c88 -r 032478337b82 static/scripts/autocomplete_tagging.js
--- a/static/scripts/autocomplete_tagging.js Fri Sep 11 09:00:36 2009 -0400
+++ b/static/scripts/autocomplete_tagging.js Fri Sep 11 14:57:02 2009 -0400
@@ -187,13 +187,14 @@
// Tag button is image's parent.
var tag_button = $(this).parent();
- // Get tag name.
+ // Get tag name, value.
var tag_name_elt = tag_button.find(".tag-name").eq(0);
var tag_str = tag_name_elt.text();
- var tag_name = get_tag_name_and_value(tag_str)[0];
+ var tag_name_and_value = get_tag_name_and_value(tag_str);
+ var tag_name = tag_name_and_value[0];
+ var tag_value = tag_name_and_value[1];
- // TODO: should remove succeed if tag is not already applied to
- // history?
+ var prev_button = tag_button.prev();
tag_button.remove();
// Remove tag from local list for consistency.
@@ -209,12 +210,28 @@
data: { tag_name: tag_name },
error: function()
{
- // Failed.
- alert( "Remove tag failed" );
+ // Failed. Roll back changes and show alert.
+ settings.tags[tag_name] = tag_value;
+ if (prev_button.hasClass("tag-button"))
+ prev_button.after(tag_button);
+ else
+ tag_area.prepend(tag_button);
+ var new_text = settings.get_toggle_link_text_fn(settings.tags);
+ alert( "Remove tag failed" );
+
+ toggle_link.text(new_text);
+
+ // TODO: no idea why it's necessary to set this up again.
+ delete_img.mouseenter( function ()
+ {
+ $(this).attr("src", settings.delete_tag_img_rollover);
+ });
+ delete_img.mouseleave( function ()
+ {
+ $(this).attr("src", settings.delete_tag_img);
+ });
},
- success: function()
- {
- }
+ success: function() {}
});
return true;
@@ -323,8 +340,9 @@
data: { new_tag: new_value },
error: function()
{
- // Remove tag and show alert.
+ // Failed. Roll back changes and show alert.
new_tag_button.remove();
+ delete settings.tags[tag_name_and_value[0]];
var new_text = settings.get_toggle_link_text_fn(settings.tags);
toggle_link.text(new_text);
alert( "Add tag failed" );
diff -r ed4cbaf23c88 -r 032478337b82 static/scripts/packed/autocomplete_tagging.js
--- a/static/scripts/packed/autocomplete_tagging.js Fri Sep 11 09:00:36 2009 -0400
+++ b/static/scripts/packed/autocomplete_tagging.js Fri Sep 11 14:57:02 2009 -0400
@@ -1,1 +1,1 @@
-var ac_tag_area_id_gen=1;jQuery.fn.autocomplete_tagging=function(c){var e={get_toggle_link_text_fn:function(u){var w="";var v=o(u);if(v!=0){w=v+(v!=0?" Tags":" Tag")}else{w="Add tags"}return w},tag_click_fn:function(u){},input_size:20,in_form:false,tags:{},use_toggle_link:true,item_id:"",add_tag_img:"",add_tag_img_rollover:"",delete_tag_img:"",ajax_autocomplete_tag_url:"",ajax_retag_url:"",ajax_delete_tag_url:"",ajax_add_tag_url:""};var p=jQuery.extend(e,c);var k="tag-area-"+(ac_tag_area_id_gen)++;var m=$("<div></div>").attr("id",k).addClass("tag-area");this.append(m);var o=function(u){if(u.length){return u.length}var v=0;for(element in u){v++}return v};var b=function(){var u=p.get_toggle_link_text_fn(p.tags);var v=$("<a href='/history/tags'>"+u+"</a>").addClass("toggle-link");v.click(function(){var w=(m.css("display")=="none");var x;if(w){x=function(){var y=o(p.tags);if(y==0){m.click()}}}else{x=function(){m.blur()}}m.slideToggle("fast",x);return false});return v};var s=b();
if(p.use_toggle_link){this.prepend(s)}var t=function(u){var v=new Array();for(key in u){v[v.length]=key+"-->"+u[key]}return"{"+v.join(",")+"}"};var a=function(v,u){return v+((u!=""&&u)?":"+u:"")};var h=function(u){return u.split(":")};var i=function(u){var v=$("<img src='"+p.add_tag_img+"' rollover='"+p.add_tag_img_rollover+"'/>").addClass("add-tag-button");v.click(function(){$(this).hide();m.click();return false});return v};var j=function(u){var v=$("<img src='"+p.delete_tag_img+"'/>").addClass("delete-tag-img");v.mouseenter(function(){$(this).attr("src",p.delete_tag_img_rollover)});v.mouseleave(function(){$(this).attr("src",p.delete_tag_img)});v.click(function(){var B=$(this).parent();var A=B.find(".tag-name").eq(0);var z=A.text();var C=h(z)[0];B.remove();delete p.tags[C];var y=p.get_toggle_link_text_fn(p.tags);s.text(y);$.ajax({url:p.ajax_delete_tag_url,data:{tag_name:C},error:function(){alert("Remove tag failed")},success:function(){}});return true});var w=$("<span>"+u+"
</span>").addClass("tag-name");w.click(function(){p.tag_click_fn(u);return true});var x=$("<span></span>").addClass("tag-button");x.append(w);x.append(v);return x};var d=function(v){var u;if(p.in_form){u=$("<textarea id='history-tag-input' rows='1' cols='"+p.input_size+"' value='"+v+"'></textarea>")}else{u=$("<input id='history-tag-input' type='text' size='"+p.input_size+"' value='"+v+"'></input>")}u.keyup(function(D){if(D.keyCode==27){$(this).trigger("blur")}else{if((D.keyCode==13)||(D.keyCode==188)||(D.keyCode==32)){new_value=this.value;if(return_key_pressed_for_autocomplete==true){return_key_pressed_for_autocomplete=false;return false}if(new_value.indexOf(": ",new_value.length-2)!=-1){this.value=new_value.substring(0,new_value.length-1);return false}if((D.keyCode==188)||(D.keyCode==32)){new_value=new_value.substring(0,new_value.length-1)}new_value=new_value.replace(/^\s+|\s+$/g,"");if(new_value.length<3){return false}this.value="";var A=j(new_value);var z=m.children(".tag
-button");if(z.length!=0){var E=z.slice(z.length-1);E.after(A)}else{m.prepend(A)}var y=new_value.split(":");p.tags[y[0]]=y[1];var B=p.get_toggle_link_text_fn(p.tags);s.text(B);var C=$(this);$.ajax({url:p.ajax_add_tag_url,data:{new_tag:new_value},error:function(){A.remove();var F=p.get_toggle_link_text_fn(p.tags);s.text(F);alert("Add tag failed")},success:function(){C.flushCache()}});return false}}});var w=function(A,z,y,C,B){tag_name_and_value=C.split(":");return(tag_name_and_value.length==1?tag_name_and_value[0]:tag_name_and_value[1])};var x={selectFirst:false,formatItem:w,autoFill:false,highlight:false};u.autocomplete(p.ajax_autocomplete_tag_url,x);u.addClass("tag-input");return u};for(tag_name in p.tags){var q=p.tags[tag_name];var l=a(tag_name,q);var g=j(l,s,p.tags);m.append(g)}var n=d("");var f=i(n);m.blur(function(u){r=o(p.tags);if(r!=0){f.show();n.hide();m.removeClass("active-tag-area")}else{}});m.append(f);m.append(n);n.hide();m.click(function(w){var v=$(this).hasClas
s("active-tag-area");if($(w.target).hasClass("delete-tag-img")&&!v){return false}if($(w.target).hasClass("tag-name")&&!v){return false}$(this).addClass("active-tag-area");f.hide();n.show();n.focus();var u=function(y){var x=m.attr("id");if(($(y.target).attr("id")!=x)&&($(y.target).parents().filter(x).length==0)){m.blur();$(document).unbind("click",u)}};$(window).click(u);return false});if(p.use_toggle_link){m.hide()}else{var r=o(p.tags);if(r==0){f.hide();n.show()}}return this.addClass("tag-element")};
\ No newline at end of file
+var ac_tag_area_id_gen=1;jQuery.fn.autocomplete_tagging=function(c){var e={get_toggle_link_text_fn:function(u){var w="";var v=o(u);if(v!=0){w=v+(v!=0?" Tags":" Tag")}else{w="Add tags"}return w},tag_click_fn:function(u){},input_size:20,in_form:false,tags:{},use_toggle_link:true,item_id:"",add_tag_img:"",add_tag_img_rollover:"",delete_tag_img:"",ajax_autocomplete_tag_url:"",ajax_retag_url:"",ajax_delete_tag_url:"",ajax_add_tag_url:""};var p=jQuery.extend(e,c);var k="tag-area-"+(ac_tag_area_id_gen)++;var m=$("<div></div>").attr("id",k).addClass("tag-area");this.append(m);var o=function(u){if(u.length){return u.length}var v=0;for(element in u){v++}return v};var b=function(){var u=p.get_toggle_link_text_fn(p.tags);var v=$("<a href='/history/tags'>"+u+"</a>").addClass("toggle-link");v.click(function(){var w=(m.css("display")=="none");var x;if(w){x=function(){var y=o(p.tags);if(y==0){m.click()}}}else{x=function(){m.blur()}}m.slideToggle("fast",x);return false});return v};var s=b();
if(p.use_toggle_link){this.prepend(s)}var t=function(u){var v=new Array();for(key in u){v[v.length]=key+"-->"+u[key]}return"{"+v.join(",")+"}"};var a=function(v,u){return v+((u!=""&&u)?":"+u:"")};var h=function(u){return u.split(":")};var i=function(u){var v=$("<img src='"+p.add_tag_img+"' rollover='"+p.add_tag_img_rollover+"'/>").addClass("add-tag-button");v.click(function(){$(this).hide();m.click();return false});return v};var j=function(u){var v=$("<img src='"+p.delete_tag_img+"'/>").addClass("delete-tag-img");v.mouseenter(function(){$(this).attr("src",p.delete_tag_img_rollover)});v.mouseleave(function(){$(this).attr("src",p.delete_tag_img)});v.click(function(){var D=$(this).parent();var C=D.find(".tag-name").eq(0);var B=C.text();var z=h(B);var F=z[0];var y=z[1];var E=D.prev();D.remove();delete p.tags[F];var A=p.get_toggle_link_text_fn(p.tags);s.text(A);$.ajax({url:p.ajax_delete_tag_url,data:{tag_name:F},error:function(){p.tags[F]=y;if(E.hasClass("tag-button")){E.after(D)
}else{m.prepend(D)}var G=p.get_toggle_link_text_fn(p.tags);alert("Remove tag failed");s.text(G);v.mouseenter(function(){$(this).attr("src",p.delete_tag_img_rollover)});v.mouseleave(function(){$(this).attr("src",p.delete_tag_img)})},success:function(){}});return true});var w=$("<span>"+u+"</span>").addClass("tag-name");w.click(function(){p.tag_click_fn(u);return true});var x=$("<span></span>").addClass("tag-button");x.append(w);x.append(v);return x};var d=function(v){var u;if(p.in_form){u=$("<textarea id='history-tag-input' rows='1' cols='"+p.input_size+"' value='"+v+"'></textarea>")}else{u=$("<input id='history-tag-input' type='text' size='"+p.input_size+"' value='"+v+"'></input>")}u.keyup(function(D){if(D.keyCode==27){$(this).trigger("blur")}else{if((D.keyCode==13)||(D.keyCode==188)||(D.keyCode==32)){new_value=this.value;if(return_key_pressed_for_autocomplete==true){return_key_pressed_for_autocomplete=false;return false}if(new_value.indexOf(": ",new_value.length-2)!=-1){thi
s.value=new_value.substring(0,new_value.length-1);return false}if((D.keyCode==188)||(D.keyCode==32)){new_value=new_value.substring(0,new_value.length-1)}new_value=new_value.replace(/^\s+|\s+$/g,"");if(new_value.length<3){return false}this.value="";var A=j(new_value);var z=m.children(".tag-button");if(z.length!=0){var E=z.slice(z.length-1);E.after(A)}else{m.prepend(A)}var y=new_value.split(":");p.tags[y[0]]=y[1];var B=p.get_toggle_link_text_fn(p.tags);s.text(B);var C=$(this);$.ajax({url:p.ajax_add_tag_url,data:{new_tag:new_value},error:function(){A.remove();delete p.tags[y[0]];var F=p.get_toggle_link_text_fn(p.tags);s.text(F);alert("Add tag failed")},success:function(){C.flushCache()}});return false}}});var w=function(A,z,y,C,B){tag_name_and_value=C.split(":");return(tag_name_and_value.length==1?tag_name_and_value[0]:tag_name_and_value[1])};var x={selectFirst:false,formatItem:w,autoFill:false,highlight:false};u.autocomplete(p.ajax_autocomplete_tag_url,x);u.addClass("tag-input
");return u};for(tag_name in p.tags){var q=p.tags[tag_name];var l=a(tag_name,q);var g=j(l,s,p.tags);m.append(g)}var n=d("");var f=i(n);m.blur(function(u){r=o(p.tags);if(r!=0){f.show();n.hide();m.removeClass("active-tag-area")}else{}});m.append(f);m.append(n);n.hide();m.click(function(w){var v=$(this).hasClass("active-tag-area");if($(w.target).hasClass("delete-tag-img")&&!v){return false}if($(w.target).hasClass("tag-name")&&!v){return false}$(this).addClass("active-tag-area");f.hide();n.show();n.focus();var u=function(y){var x=m.attr("id");if(($(y.target).attr("id")!=x)&&($(y.target).parents().filter(x).length==0)){m.blur();$(document).unbind("click",u)}};$(window).click(u);return false});if(p.use_toggle_link){m.hide()}else{var r=o(p.tags);if(r==0){f.hide();n.show()}}return this.addClass("tag-element")};
\ No newline at end of file
diff -r ed4cbaf23c88 -r 032478337b82 templates/dataset/edit_attributes.mako
--- a/templates/dataset/edit_attributes.mako Fri Sep 11 09:00:36 2009 -0400
+++ b/templates/dataset/edit_attributes.mako Fri Sep 11 14:57:02 2009 -0400
@@ -9,43 +9,8 @@
<% user, user_roles = trans.get_user_and_roles() %>
<%def name="javascripts()">
- ## <!--[if lt IE 7]>
- ## <script type='text/javascript' src="/static/scripts/IE7.js"> </script>
- ## <![endif]-->
- ${h.js( "jquery", "galaxy.base", "jquery.autocomplete", "autocomplete_tagging" )}
- <script type="text/javascript">
- $( document ).ready( function() {
- // Set up autocomplete tagger.
-<%
- ## Build string of tag name, values.
- tag_names_and_values = list()
- for tag in data.tags:
- tag_name = tag.user_tname
- tag_value = ""
- if tag.value is not None:
- tag_value = tag.user_value
- tag_names_and_values.append("\"" + tag_name + "\" : \"" + tag_value + "\"")
-%>
- var options = {
- tags : {${", ".join(tag_names_and_values)}},
- tag_click_fn: function(tag) { /* Do nothing. */ },
- use_toggle_link: false,
- input_size: 30,
- in_form: true,
- <% encoded_data_id = trans.security.encode_id(data.id) %>
- ajax_autocomplete_tag_url: "${h.url_for( controller='tag', action='tag_autocomplete_data', id=encoded_data_id, item_type="hda" )}",
- ajax_add_tag_url: "${h.url_for( controller='tag', action='add_tag_async', id=encoded_data_id, item_type="hda" )}",
- ajax_delete_tag_url: "${h.url_for( controller='tag', action='remove_tag_async', id=encoded_data_id, item_type="hda" )}",
- delete_tag_img: "${h.url_for('/static/images/delete_tag_icon_gray.png')}",
- delete_tag_img_rollover: "${h.url_for('/static/images/delete_tag_icon_white.png')}",
- add_tag_img: "${h.url_for('/static/images/add_icon.png')}",
- add_tag_img_rollover: "${h.url_for('/static/images/add_icon_dark.png')}",
- };
-% if trans.get_user() is not None:
- $("#dataset-tag-area").autocomplete_tagging(options);
-% endif
-});
- </script>
+ ${parent.javascripts()}
+ ${h.js( "jquery.autocomplete", "autocomplete_tagging" )}
</%def>
<%def name="datatype( dataset, datatypes )">
@@ -84,16 +49,18 @@
<div style="clear: both"></div>
</div>
%if trans.get_user() is not None:
- <div class="form-row">
- <label>
- Tags:
- </label>
- <div id="dataset-tag-area"
+ <%namespace file="../tagging_common.mako" import="render_tagging_element" />
+ <div class="form-row">
+ <label>
+ Tags:
+ </label>
+ <div id="dataset-tag-area"
style="float: left; margin-left: 1px; width: 295px; margin-right: 10px; border-style: inset; border-color: #ddd; border-width: 1px">
- </div>
- <div style="clear: both"></div>
- </div>
- %endif
+ </div>
+ <div style="clear: both"></div>
+ </div>
+ ${render_tagging_element(data, "dataset-tag-area", use_toggle_link="false", in_form="true", input_size="30")}
+ %endif
%for name, spec in data.metadata.spec.items():
%if spec.visible:
<div class="form-row">
diff -r ed4cbaf23c88 -r 032478337b82 templates/history/grid.mako
--- a/templates/history/grid.mako Fri Sep 11 09:00:36 2009 -0400
+++ b/templates/history/grid.mako Fri Sep 11 14:57:02 2009 -0400
@@ -10,6 +10,7 @@
<%def name="javascripts()">
${parent.javascripts()}
+ ${h.js("jquery.autocomplete", "autocomplete_tagging" )}
<script type="text/javascript">
## TODO: generalize and move into galaxy.base.js
$(document).ready(function() {
@@ -58,7 +59,7 @@
</%def>
<%def name="stylesheets()">
- <link href="${h.url_for('/static/style/base.css')}" rel="stylesheet" type="text/css" />
+ ${h.css( "base", "autocomplete_tagging" )}
<style>
## Not generic to all grids -- move to base?
.count-box {
diff -r ed4cbaf23c88 -r 032478337b82 templates/root/history.mako
--- a/templates/root/history.mako Fri Sep 11 09:00:36 2009 -0400
+++ b/templates/root/history.mako Fri Sep 11 14:57:02 2009 -0400
@@ -77,83 +77,6 @@
<% updateable = [data for data in reversed( datasets ) if data.visible and data.state not in [ "deleted", "empty", "error", "ok" ]] %>
${ ",".join( map(lambda data: "\"%s\" : \"%s\"" % (data.id, data.state), updateable) ) }
});
-
- // Set up autocomplete tagger.
-<%
- ## Build string of tag name, values.
- tag_names_and_values = list()
- for tag in history.tags:
- tag_name = tag.user_tname
- tag_value = ""
- if tag.value is not None:
- tag_value = tag.user_value
- tag_names_and_values.append("\"" + tag_name + "\" : \"" + tag_value + "\"")
-%>
- // Returns the number of keys (elements) in an array/dictionary.
- var array_length = function(an_array)
- {
- if (an_array.length)
- return an_array.length;
-
- var count = 0;
- for (element in an_array)
- count++;
- return count;
- };
-
- // Function get text to display on the toggle link.
- var get_toggle_link_text = function(tags)
- {
- var text = "";
- var num_tags = array_length(tags);
- if (num_tags != 0) {
- text = num_tags + (num_tags != 1 ? " Tags" : " Tag");
- /*
- // Show first N tags; hide the rest.
- var max_to_show = 1;
-
- // Build tag string.
- var tag_strs = new Array();
- var count = 0;
- for (tag_name in tags)
- {
- tag_value = tags[tag_name];
- tag_strs[tag_strs.length] = build_tag_str(tag_name, tag_value);
- if (++count == max_to_show)
- break;
- }
- tag_str = tag_strs.join(", ");
-
- // Finalize text.
- var num_tags_hiding = num_tags - max_to_show;
- text = "Tags: " + tag_str +
- (num_tags_hiding != 0 ? " and " + num_tags_hiding + " more" : "");
- */
- } else {
- // No tags.
- text = "Add tags to this history";
- }
- return text;
- };
-
- var options = {
- tags : {${", ".join(tag_names_and_values)}},
- get_toggle_link_text_fn: get_toggle_link_text,
- input_size: 15,
- tag_click_fn: function(tag) { /* Do nothing. */ },
- <% encoded_history_id = trans.security.encode_id(history.id) %>
- ajax_autocomplete_tag_url: "${h.url_for( controller='tag', action='tag_autocomplete_data', id=encoded_history_id, item_type="history" )}",
- ajax_add_tag_url: "${h.url_for( controller='tag', action='add_tag_async', id=encoded_history_id, item_type="history" )}",
- ajax_delete_tag_url: "${h.url_for( controller='tag', action='remove_tag_async', id=encoded_history_id, item_type="history" )}",
- delete_tag_img: "${h.url_for('/static/images/delete_tag_icon_gray.png')}",
- delete_tag_img_rollover: "${h.url_for('/static/images/delete_tag_icon_white.png')}",
- add_tag_img: "${h.url_for('/static/images/add_icon.png')}",
- add_tag_img_rollover: "${h.url_for('/static/images/add_icon_dark.png')}",
- };
-% if trans.get_user() is not None:
- $("#history-tag-area").autocomplete_tagging(options);
-% endif
-
});
// Functionized so AJAX'd datasets can call them
function initShowHide() {
@@ -361,7 +284,13 @@
<div id="history-tag-area" style="margin-bottom: 1em">
</div>
+<%namespace file="../tagging_common.mako" import="render_tagging_element" />
<%namespace file="history_common.mako" import="render_dataset" />
+
+%if trans.get_user() is not None:
+ <div id='history-tag-area' class="tag-element"></div>
+ ${render_tagging_element(history, "history-tag-area")}
+%endif
%if not datasets:
diff -r ed4cbaf23c88 -r 032478337b82 templates/tagging_common.mako
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/templates/tagging_common.mako Fri Sep 11 14:57:02 2009 -0400
@@ -0,0 +1,92 @@
+## Render the tags 'tags' as an autocomplete element.
+<%def name="render_tagging_element(tagged_item, elt_id, use_toggle_link='true', in_form='false', input_size='15')">
+ <script type="text/javascript">
+
+ //
+ // Set up autocomplete tagger.
+ //
+ <%
+ ## Build string of tag name, values.
+ tag_names_and_values = list()
+ for tag in tagged_item.tags:
+ tag_name = tag.user_tname
+ tag_value = ""
+ if tag.value is not None:
+ tag_value = tag.user_value
+ tag_names_and_values.append( ("\"" + tag_name + "\" : \"" + tag_value + "\"") )
+ %>
+ //
+ // Returns the number of keys (elements) in an array/dictionary.
+ //
+ var array_length = function(an_array)
+ {
+ if (an_array.length)
+ return an_array.length;
+
+ var count = 0;
+ for (element in an_array)
+ count++;
+ return count;
+ };
+
+ //
+ // Function get text to display on the toggle link.
+ //
+ var get_toggle_link_text = function(tags)
+ {
+ var text = "";
+ var num_tags = array_length(tags);
+ if (num_tags != 0)
+ {
+ text = num_tags + (num_tags != 1 ? " Tags" : " Tag");
+ /*
+ // Show first N tags; hide the rest.
+ var max_to_show = 1;
+
+ // Build tag string.
+ var tag_strs = new Array();
+ var count = 0;
+ for (tag_name in tags)
+ {
+ tag_value = tags[tag_name];
+ tag_strs[tag_strs.length] = build_tag_str(tag_name, tag_value);
+ if (++count == max_to_show)
+ break;
+ }
+ tag_str = tag_strs.join(", ");
+
+ // Finalize text.
+ var num_tags_hiding = num_tags - max_to_show;
+ text = "Tags: " + tag_str +
+ (num_tags_hiding != 0 ? " and " + num_tags_hiding + " more" : "");
+ */
+ }
+ else
+ {
+ // No tags.
+ text = "Add tags to history";
+ }
+ return text;
+ };
+
+ var options =
+ {
+ tags : {${unicode(", ".join(tag_names_and_values), 'utf-8')}},
+ get_toggle_link_text_fn: get_toggle_link_text,
+ tag_click_fn: function(tag) { /* Do nothing. */ },
+ <% tagged_item_id = trans.security.encode_id(tagged_item.id) %>
+ ajax_autocomplete_tag_url: "${h.url_for( controller='tag', action='tag_autocomplete_data', id=tagged_item_id, item_class=tagged_item.__class__.__name__ )}",
+ ajax_add_tag_url: "${h.url_for( controller='tag', action='add_tag_async', id=tagged_item_id, item_class=tagged_item.__class__.__name__ )}",
+ ajax_delete_tag_url: "${h.url_for( controller='tag', action='remove_tag_async', id=tagged_item_id, item_class=tagged_item.__class__.__name__ )}",
+ delete_tag_img: "${h.url_for('/static/images/delete_tag_icon_gray.png')}",
+ delete_tag_img_rollover: "${h.url_for('/static/images/delete_tag_icon_white.png')}",
+ add_tag_img: "${h.url_for('/static/images/add_icon.png')}",
+ add_tag_img_rollover: "${h.url_for('/static/images/add_icon_dark.png')}",
+ input_size: ${input_size},
+ in_form: ${in_form},
+ use_toggle_link: ${use_toggle_link}
+ };
+
+ $("#${elt_id}").autocomplete_tagging(options)
+ </script>
+</%def>
\ No newline at end of file
diff -r ed4cbaf23c88 -r 032478337b82 tool_conf.xml.main
--- a/tool_conf.xml.main Fri Sep 11 09:00:36 2009 -0400
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,263 +0,0 @@
-<?xml version="1.0"?>
-<toolbox>
- <section name="Get Data" id="getext">
- <tool file="data_source/upload.xml"/>
- <tool file="data_source/ucsc_tablebrowser.xml" />
- <tool file="data_source/ucsc_tablebrowser_archaea.xml" />
- <tool file="data_source/microbial_import.xml" />
- <tool file="data_source/biomart.xml" />
- <tool file="data_source/gramene_mart.xml" />
- <tool file="data_source/flymine.xml" />
- <tool file="data_source/encode_db.xml" />
- <tool file="data_source/epigraph_import.xml" />
- </section>
- <section name="Send Data" id="send">
- <tool file="data_destination/epigraph.xml" />
- </section>
- <section name="ENCODE Tools" id="EncodeTools">
- <tool file="encode/gencode_partition.xml" />
- <tool file="encode/random_intervals.xml" />
- </section>
- <section name="Lift-Over" id="liftOver">
- <tool file="extract/liftOver_wrapper.xml" />
- </section>
- <section name="Text Manipulation" id="textutil">
- <tool file="filters/fixedValueColumn.xml" />
- <tool file="stats/column_maker.xml" />
- <tool file="filters/catWrapper.xml" />
- <tool file="filters/condense_characters.xml" />
- <tool file="filters/convert_characters.xml" />
- <tool file="filters/CreateInterval.xml" />
- <tool file="filters/cutWrapper.xml" />
- <tool file="filters/changeCase.xml" />
- <tool file="filters/pasteWrapper.xml" />
- <tool file="filters/remove_beginning.xml" />
- <tool file="filters/headWrapper.xml" />
- <tool file="filters/tailWrapper.xml" />
- </section>
- <section name="Convert Formats" id="convert">
- <tool file="filters/bed2gff.xml" />
- <tool file="fasta_tools/fasta_to_tabular.xml" />
- <tool file="filters/gff2bed.xml" />
- <tool file="maf/maf_to_bed.xml" />
- <tool file="maf/maf_to_fasta.xml" />
- <tool file="fasta_tools/tabular_to_fasta.xml" />
- </section>
- <section name="FASTA manipulation" id="fasta_manipulation">
- <tool file="fasta_tools/fasta_compute_length.xml" />
- <tool file="fasta_tools/fasta_filter_by_length.xml" />
- <tool file="fasta_tools/fasta_concatenate_by_species.xml" />
- <tool file="fasta_tools/fasta_to_tabular.xml" />
- <tool file="fasta_tools/tabular_to_fasta.xml" />
- </section>
- <section name="Filter and Sort" id="filter">
- <tool file="stats/filtering.xml" />
- <tool file="filters/sorter.xml" />
- <tool file="filters/grep.xml" />
- </section>
- <section name="Join, Subtract and Group" id="group">
- <tool file="filters/joiner.xml" />
- <tool file="filters/compare.xml"/>
- <tool file="new_operations/subtract_query.xml"/>
- <tool file="stats/grouping.xml" />
- </section>
- <section name="Extract Features" id="features">
- <tool file="filters/ucsc_gene_bed_to_exon_bed.xml" />
- <tool file="extract/extract_GFF_Features.xml" />
- </section>
- <section name="Fetch Sequences" id="fetchSeq">
- <tool file="extract/extract_genomic_dna.xml" />
- </section>
- <section name="Fetch Alignments" id="fetchAlign">
- <tool file="maf/interval2maf_pairwise.xml" />
- <tool file="maf/interval2maf.xml" />
- <tool file="maf/interval_maf_to_merged_fasta.xml" />
- <tool file="maf/genebed_maf_to_fasta.xml"/>
- <tool file="maf/maf_stats.xml"/>
- <tool file="maf/maf_thread_for_species.xml"/>
- <tool file="maf/maf_limit_to_species.xml"/>
- <tool file="maf/maf_limit_size.xml"/>
- <tool file="maf/maf_by_block_number.xml"/>
- <tool file="maf/maf_filter.xml"/>
- <!--
- <tool file="maf/maf_reverse_complement.xml"/>
- -->
- </section>
- <section name="Get Genomic Scores" id="scores">
- <tool file="stats/wiggle_to_simple.xml" />
- <tool file="stats/aggregate_binned_scores_in_intervals.xml" />
- <tool file="extract/phastOdds/phastOdds_tool.xml" />
- </section>
- <section name="Operate on Genomic Intervals" id="bxops">
- <tool file="new_operations/intersect.xml" />
- <tool file="new_operations/subtract.xml" />
- <tool file="new_operations/merge.xml" />
- <tool file="new_operations/concat.xml" />
- <tool file="new_operations/basecoverage.xml" />
- <tool file="new_operations/coverage.xml" />
- <tool file="new_operations/complement.xml" />
- <tool file="new_operations/cluster.xml" id="cluster" />
- <tool file="new_operations/join.xml" />
- <tool file="new_operations/get_flanks.xml" />
- <tool file="new_operations/flanking_features.xml" />
- <tool file="annotation_profiler/annotation_profiler.xml" />
- </section>
- <section name="Statistics" id="stats">
- <tool file="stats/gsummary.xml" />
- <tool file="filters/uniq.xml" />
- <tool file="stats/cor.xml" />
- </section>
- <section name="Graph/Display Data" id="plots">
- <tool file="plotting/histogram2.xml" />
- <tool file="plotting/scatterplot.xml" />
- <tool file="plotting/xy_plot.xml" />
- <tool file="visualization/GMAJ.xml" />
- <tool file="visualization/build_ucsc_custom_track.xml" />
- </section>
- <section name="Regional Variation" id="regVar">
- <tool file="regVariation/windowSplitter.xml" />
- <tool file="regVariation/featureCounter.xml" />
- <tool file="regVariation/quality_filter.xml" />
- <tool file="regVariation/maf_cpg_filter.xml" />
- <tool file="regVariation/getIndels_2way.xml" />
- <tool file="regVariation/getIndels_3way.xml" />
- <tool file="regVariation/getIndelRates_3way.xml" />
- <tool file="regVariation/substitutions.xml" />
- <tool file="regVariation/substitution_rates.xml" />
- <tool file="regVariation/microsats_alignment_level.xml" />
- <tool file="regVariation/microsats_mutability.xml" />
- </section>
- <section name="Multiple regression" id="multReg">
- <tool file="regVariation/linear_regression.xml" />
- <tool file="regVariation/best_regression_subsets.xml" />
- <tool file="regVariation/rcve.xml" />
- </section>
- <section name="Evolution: HyPhy" id="hyphy">
- <tool file="hyphy/hyphy_branch_lengths_wrapper.xml" />
- <tool file="hyphy/hyphy_nj_tree_wrapper.xml" />
- <tool file="hyphy/hyphy_dnds_wrapper.xml" />
- </section>
- <section name="Metagenomic analyses" id="tax_manipulation">
- <tool file="taxonomy/gi2taxonomy.xml" />
- <tool file="taxonomy/t2t_report.xml" />
- <tool file="taxonomy/t2ps_wrapper.xml" />
- <tool file="taxonomy/find_diag_hits.xml" />
- <tool file="taxonomy/lca.xml" />
- <tool file="taxonomy/poisson2test.xml" />
- </section>
- <section name="Short Read Analysis" id="short_read_analysis">
- <tool file="metag_tools/short_reads_figure_score.xml" />
- <tool file="metag_tools/short_reads_trim_seq.xml" />
- <tool file="metag_tools/megablast_wrapper.xml" />
- <tool file="metag_tools/megablast_xml_parser.xml" />
- </section>
- <section name="EMBOSS" id="EMBOSSLite">
- <tool file="emboss_5/emboss_antigenic.xml" />
- <tool file="emboss_5/emboss_backtranseq.xml" />
- <tool file="emboss_5/emboss_banana.xml" />
- <tool file="emboss_5/emboss_biosed.xml" />
- <tool file="emboss_5/emboss_btwisted.xml" />
- <tool file="emboss_5/emboss_cai_custom.xml" />
- <tool file="emboss_5/emboss_cai.xml" />
- <tool file="emboss_5/emboss_chaos.xml" />
- <tool file="emboss_5/emboss_charge.xml" />
- <tool file="emboss_5/emboss_checktrans.xml" />
- <tool file="emboss_5/emboss_chips.xml" />
- <tool file="emboss_5/emboss_cirdna.xml" />
- <tool file="emboss_5/emboss_codcmp.xml" />
- <tool file="emboss_5/emboss_coderet.xml" />
- <tool file="emboss_5/emboss_compseq.xml" />
- <tool file="emboss_5/emboss_cpgplot.xml" />
- <tool file="emboss_5/emboss_cpgreport.xml" />
- <tool file="emboss_5/emboss_cusp.xml" />
- <tool file="emboss_5/emboss_cutseq.xml" />
- <tool file="emboss_5/emboss_dan.xml" />
- <tool file="emboss_5/emboss_degapseq.xml" />
- <tool file="emboss_5/emboss_descseq.xml" />
- <tool file="emboss_5/emboss_diffseq.xml" />
- <tool file="emboss_5/emboss_digest.xml" />
- <tool file="emboss_5/emboss_dotmatcher.xml" />
- <tool file="emboss_5/emboss_dotpath.xml" />
- <tool file="emboss_5/emboss_dottup.xml" />
- <tool file="emboss_5/emboss_dreg.xml" />
- <tool file="emboss_5/emboss_einverted.xml" />
- <tool file="emboss_5/emboss_epestfind.xml" />
- <tool file="emboss_5/emboss_equicktandem.xml" />
- <tool file="emboss_5/emboss_est2genome.xml" />
- <tool file="emboss_5/emboss_etandem.xml" />
- <tool file="emboss_5/emboss_extractfeat.xml" />
- <tool file="emboss_5/emboss_extractseq.xml" />
- <tool file="emboss_5/emboss_freak.xml" />
- <tool file="emboss_5/emboss_fuzznuc.xml" />
- <tool file="emboss_5/emboss_fuzzpro.xml" />
- <tool file="emboss_5/emboss_fuzztran.xml" />
- <tool file="emboss_5/emboss_garnier.xml" />
- <tool file="emboss_5/emboss_geecee.xml" />
- <tool file="emboss_5/emboss_getorf.xml" />
- <tool file="emboss_5/emboss_helixturnhelix.xml" />
- <tool file="emboss_5/emboss_hmoment.xml" />
- <tool file="emboss_5/emboss_iep.xml" />
- <tool file="emboss_5/emboss_infoseq.xml" />
- <tool file="emboss_5/emboss_isochore.xml" />
- <tool file="emboss_5/emboss_lindna.xml" />
- <tool file="emboss_5/emboss_marscan.xml" />
- <tool file="emboss_5/emboss_maskfeat.xml" />
- <tool file="emboss_5/emboss_maskseq.xml" />
- <tool file="emboss_5/emboss_matcher.xml" />
- <tool file="emboss_5/emboss_megamerger.xml" />
- <tool file="emboss_5/emboss_merger.xml" />
- <tool file="emboss_5/emboss_msbar.xml" />
- <tool file="emboss_5/emboss_needle.xml" />
- <tool file="emboss_5/emboss_newcpgreport.xml" />
- <tool file="emboss_5/emboss_newcpgseek.xml" />
- <tool file="emboss_5/emboss_newseq.xml" />
- <tool file="emboss_5/emboss_noreturn.xml" />
- <tool file="emboss_5/emboss_notseq.xml" />
- <tool file="emboss_5/emboss_nthseq.xml" />
- <tool file="emboss_5/emboss_octanol.xml" />
- <tool file="emboss_5/emboss_oddcomp.xml" />
- <tool file="emboss_5/emboss_palindrome.xml" />
- <tool file="emboss_5/emboss_pasteseq.xml" />
- <tool file="emboss_5/emboss_patmatdb.xml" />
- <tool file="emboss_5/emboss_pepcoil.xml" />
- <tool file="emboss_5/emboss_pepinfo.xml" />
- <tool file="emboss_5/emboss_pepnet.xml" />
- <tool file="emboss_5/emboss_pepstats.xml" />
- <tool file="emboss_5/emboss_pepwheel.xml" />
- <tool file="emboss_5/emboss_pepwindow.xml" />
- <tool file="emboss_5/emboss_pepwindowall.xml" />
- <tool file="emboss_5/emboss_plotcon.xml" />
- <tool file="emboss_5/emboss_plotorf.xml" />
- <tool file="emboss_5/emboss_polydot.xml" />
- <tool file="emboss_5/emboss_preg.xml" />
- <tool file="emboss_5/emboss_prettyplot.xml" />
- <tool file="emboss_5/emboss_prettyseq.xml" />
- <tool file="emboss_5/emboss_primersearch.xml" />
- <tool file="emboss_5/emboss_revseq.xml" />
- <tool file="emboss_5/emboss_seqmatchall.xml" />
- <tool file="emboss_5/emboss_seqret.xml" />
- <tool file="emboss_5/emboss_showfeat.xml" />
- <tool file="emboss_5/emboss_shuffleseq.xml" />
- <tool file="emboss_5/emboss_sigcleave.xml" />
- <tool file="emboss_5/emboss_sirna.xml" />
- <tool file="emboss_5/emboss_sixpack.xml" />
- <tool file="emboss_5/emboss_skipseq.xml" />
- <tool file="emboss_5/emboss_splitter.xml" />
- <tool file="emboss_5/emboss_supermatcher.xml" />
- <tool file="emboss_5/emboss_syco.xml" />
- <tool file="emboss_5/emboss_tcode.xml" />
- <tool file="emboss_5/emboss_textsearch.xml" />
- <tool file="emboss_5/emboss_tmap.xml" />
- <tool file="emboss_5/emboss_tranalign.xml" />
- <tool file="emboss_5/emboss_transeq.xml" />
- <tool file="emboss_5/emboss_trimest.xml" />
- <tool file="emboss_5/emboss_trimseq.xml" />
- <tool file="emboss_5/emboss_twofeat.xml" />
- <tool file="emboss_5/emboss_union.xml" />
- <tool file="emboss_5/emboss_vectorstrip.xml" />
- <tool file="emboss_5/emboss_water.xml" />
- <tool file="emboss_5/emboss_wobble.xml" />
- <tool file="emboss_5/emboss_wordcount.xml" />
- <tool file="emboss_5/emboss_wordmatch.xml" />
- </section>
-</toolbox>
1
0

11 Sep '09
details: http://www.bx.psu.edu/hg/galaxy/rev/f0adb6152df9
changeset: 2678:f0adb6152df9
user: Greg Von Kuster <greg(a)bx.psu.edu>
date: Thu Sep 10 21:24:06 2009 -0400
description:
Streamline history sharing, add a "Manage shared histories" section to the History options menu. Also use a better approach to setting peek on datasets.
5 file(s) affected in this change:
lib/galaxy/datatypes/data.py
lib/galaxy/web/controllers/history.py
templates/history/sharing.mako
templates/root/index.mako
test/functional/test_history_functions.py
diffs (482 lines):
diff -r 96ccd29277be -r f0adb6152df9 lib/galaxy/datatypes/data.py
--- a/lib/galaxy/datatypes/data.py Thu Sep 10 17:48:52 2009 -0400
+++ b/lib/galaxy/datatypes/data.py Thu Sep 10 21:24:06 2009 -0400
@@ -416,8 +416,9 @@
count = 0
file_type = None
data_checked = False
- for line in file( file_name ):
- line = line[:WIDTH]
+ temp = open( file_name, "U" )
+ while count <= LINE_COUNT:
+ line = temp.readline( WIDTH )
if line and not is_multi_byte and not data_checked:
# See if we have a compressed or binary file
if line[0:2] == util.gzip_magic:
@@ -432,9 +433,8 @@
if file_type in [ 'gzipped', 'binary' ]:
break
lines.append( line )
- if count == LINE_COUNT:
- break
count += 1
+ temp.close()
if file_type in [ 'gzipped', 'binary' ]:
text = "%s file" % file_type
else:
diff -r 96ccd29277be -r f0adb6152df9 lib/galaxy/web/controllers/history.py
--- a/lib/galaxy/web/controllers/history.py Thu Sep 10 17:48:52 2009 -0400
+++ b/lib/galaxy/web/controllers/history.py Thu Sep 10 21:24:06 2009 -0400
@@ -4,7 +4,7 @@
from galaxy.model.mapping import desc
from galaxy.model.orm import *
from galaxy.util.json import *
-import webhelpers, logging
+import webhelpers, logging, operator
from datetime import datetime
from cgi import escape
@@ -31,10 +31,12 @@
return "deleted"
elif history.users_shared_with:
return "shared"
+ elif history.importable:
+ return "importable"
return ""
def get_link( self, trans, grid, item ):
- if item.users_shared_with:
- return dict( operation="sharing", id=item.id )
+ if item.users_shared_with or item.importable:
+ return dict( operation="sharing" )
return None
# Grid definition
title = "Stored histories"
@@ -55,9 +57,12 @@
operations = [
grids.GridOperation( "Switch", allow_multiple=False, condition=( lambda item: not item.deleted ) ),
grids.GridOperation( "Share", condition=( lambda item: not item.deleted ) ),
+ grids.GridOperation( "Unshare", condition=( lambda item: not item.deleted ) ),
grids.GridOperation( "Rename", condition=( lambda item: not item.deleted ) ),
grids.GridOperation( "Delete", condition=( lambda item: not item.deleted ) ),
- grids.GridOperation( "Undelete", condition=( lambda item: item.deleted ) )
+ grids.GridOperation( "Undelete", condition=( lambda item: item.deleted ) ),
+ grids.GridOperation( "Enable import via link", condition=( lambda item: item.deleted ) ),
+ grids.GridOperation( "Disable import via link", condition=( lambda item: item.deleted ) )
]
standard_filters = [
grids.GridColumnFilter( "Active", args=dict( deleted=False ) ),
@@ -99,7 +104,9 @@
]
operations = [
grids.GridOperation( "Clone" ),
- grids.GridOperation( "Unshare" )
+ grids.GridOperation( "Unshare" ),
+ grids.GridOperation( "Enable import via link", condition=( lambda item: item.deleted ) ),
+ grids.GridOperation( "Disable import via link", condition=( lambda item: item.deleted ) )
]
standard_filters = []
def build_initial_query( self, session ):
@@ -126,19 +133,19 @@
current_history = trans.get_history()
status = message = None
if 'operation' in kwargs:
- history_ids = util.listify( kwargs.get( 'id', [] ) )
- histories = []
operation = kwargs['operation'].lower()
if operation == "share":
return self.share( trans, **kwargs )
- elif operation == "rename":
+ if operation == "rename":
return self.rename( trans, **kwargs )
- elif operation == 'sharing':
- return self.sharing( trans, id=kwargs['id'] )
+ history_ids = util.listify( kwargs.get( 'id', [] ) )
+ if operation == "sharing":
+ return self.sharing( trans, id=history_ids )
# Display no message by default
status, message = None, None
refresh_history = False
# Load the histories and ensure they all belong to the current user
+ histories = []
for history_id in history_ids:
history = get_history( trans, history_id )
if history:
@@ -161,6 +168,21 @@
trans.template_context['refresh_frames'] = ['history']
elif operation == "undelete":
status, message = self._list_undelete( trans, histories )
+ elif operation == "unshare":
+ for history in histories:
+ husas = trans.app.model.HistoryUserShareAssociation.filter_by( history=history ).all()
+ for husa in husas:
+ husa.delete()
+ elif operation == "enable import via link":
+ for history in histories:
+ if not history.importable:
+ history.importable = True
+ elif operation == "disable import via link":
+ if history_ids:
+ histories = [ get_history( trans, history_id ) for history_id in history_ids ]
+ for history in histories:
+ if history.importable:
+ history.importable = False
trans.sa_session.flush()
# Render the list view
return self.stored_list_grid( trans, status=status, message=message, **kwargs )
@@ -237,24 +259,20 @@
msg = util.restore_text( kwargs.get( 'msg', '' ) )
status = message = None
if 'operation' in kwargs:
- id = kwargs.get( 'id', None )
+ ids = util.listify( kwargs.get( 'id', [] ) )
operation = kwargs['operation'].lower()
if operation == "clone":
- if not id:
+ if not ids:
message = "Select a history to clone"
return self.shared_list_grid( trans, status='error', message=message, **kwargs )
# When cloning shared histories, only copy active datasets
new_kwargs = { 'clone_choice' : 'active' }
return self.clone( trans, id, **new_kwargs )
elif operation == 'unshare':
- if not id:
+ if not ids:
message = "Select a history to unshare"
return self.shared_list_grid( trans, status='error', message=message, **kwargs )
- ids = util.listify( id )
- histories = []
- for history_id in ids:
- history = get_history( trans, history_id, check_ownership=False )
- histories.append( history )
+ histories = [ get_history( trans, history_id ) for history_id in ids ]
for history in histories:
# Current user is the user with which the histories were shared
association = trans.app.model.HistoryUserShareAssociation.filter_by( user=trans.user, history=history ).one()
@@ -262,6 +280,20 @@
association.flush()
message = "Unshared %d shared histories" % len( ids )
status = 'done'
+ elif operation == "enable import via link":
+ if ids:
+ histories = [ get_history( trans, id ) for id in ids ]
+ for history in histories:
+ if not history.importable:
+ history.importable = True
+ history.flush()
+ elif operation == "disable import via link":
+ if ids:
+ histories = [ get_history( trans, id ) for id in ids ]
+ for history in histories:
+ if history.importable:
+ history.importable = False
+ history.flush()
# Render the list view
return self.shared_list_grid( trans, status=status, message=message, **kwargs )
@web.expose
@@ -622,7 +654,9 @@
params = util.Params( kwd )
msg = util.restore_text ( params.get( 'msg', '' ) )
if id:
- histories = [ get_history( trans, id ) ]
+ ids = util.listify( id )
+ if ids:
+ histories = [ get_history( trans, history_id ) for history_id in ids ]
for history in histories:
if params.get( 'enable_import_via_link', False ):
history.importable = True
@@ -635,14 +669,34 @@
if not user:
msg = 'History (%s) does not seem to be shared with user (%s)' % ( history.name, user.email )
return trans.fill_template( 'history/sharing.mako', histories=histories, msg=msg, messagetype='error' )
- association = trans.app.model.HistoryUserShareAssociation.filter_by( user=user, history=history ).one()
- association.delete()
- association.flush()
- if not id:
- shared_msg = "History (%s) now shared with: %d users. " % ( history.name, len( history.users_shared_with ) )
- msg = '%s%s' % ( shared_msg, msg )
+ husas = trans.app.model.HistoryUserShareAssociation.filter_by( user=user, history=history ).all()
+ if husas:
+ for husa in husas:
+ husa.delete()
+ husa.flush()
+ histories = []
+ # Get all histories that have been shared with others
+ husas = trans.sa_session.query( trans.app.model.HistoryUserShareAssociation ) \
+ .join( "history" ) \
+ .filter( and_( trans.app.model.History.user == trans.user,
+ trans.app.model.History.deleted == False ) ) \
+ .order_by( trans.app.model.History.table.c.name ) \
+ .all()
+ for husa in husas:
+ history = husa.history
+ if history not in histories:
+ histories.append( history )
+ # Get all histories that are importable
+ importables = trans.sa_session.query( trans.app.model.History ) \
+ .filter_by( user=trans.user, importable=True, deleted=False ) \
+ .order_by( trans.app.model.History.table.c.name ) \
+ .all()
+ for importable in importables:
+ if importable not in histories:
+ histories.append( importable )
+ # Sort the list of histories by history.name
+ histories.sort( key=operator.attrgetter( 'name') )
return trans.fill_template( 'history/sharing.mako', histories=histories, msg=msg, messagetype='done' )
-
@web.expose
@web.require_login( "rename histories" )
def rename( self, trans, id=None, name=None, **kwd ):
diff -r 96ccd29277be -r f0adb6152df9 templates/history/sharing.mako
--- a/templates/history/sharing.mako Thu Sep 10 17:48:52 2009 -0400
+++ b/templates/history/sharing.mako Thu Sep 10 21:24:06 2009 -0400
@@ -1,75 +1,63 @@
<%inherit file="/base.mako"/>
<%namespace file="/message.mako" import="render_msg" />
-<h2>Public access via link</h2>
+##<h2>Import via link</h2>
%if msg:
${render_msg( msg, messagetype )}
%endif
-%for history in histories:
- <p>
- %if history.importable:
- Send the following URL to users as an easy way for them to import the history, making a copy of their own:
- <% url = h.url_for( controller='history', action='imp', id=trans.security.encode_id(history.id), qualified=True ) %>
- <blockquote>
- <a href="${url}">${url}</a>
- </blockquote>
- <br/>
- <form action="${h.url_for( controller='history', action='sharing', id=trans.security.encode_id( history.id ) )}" method="POST">
- <input class="action-button" type="submit" name="disable_import_via_link" value="Disable import via link">
- </form>
- %else:
- This history is currently restricted (only you and the users listed below
- can access it). Enabling the following option will generate a URL that you
- can give to a user to allow them to import this history.
- <br/>
- <form action="${h.url_for( action='sharing', id=trans.security.encode_id(history.id) )}" method="POST">
- <input class="action-button" type="submit" name="enable_import_via_link" value="Enable import via link">
- </form>
- %endif
- </p>
- <h2>Sharing with specific users</h2>
- %if history.users_shared_with:
- <ul class="manage-table-actions">
- <li>
- <a class="action-button" href="${h.url_for( controller='history', action='share', id=trans.security.encode_id( history.id ) )}">
- <span>Share with another user</span>
- </a>
- </li>
- </ul>
- <p>
- The following users will see this history in their list of histories
- shared with them by others, and they will be able to create their own copy of it:
- </p>
- <table class="colored" border="0" cellspacing="0" cellpadding="0" width="100%">
- <tr class="header">
- <th>History '${history.name}' currently shared with</th>
- <th></th>
- </tr>
- %for i, association in enumerate( history.users_shared_with ):
- <% user = association.user %>
- <tr>
- <td>
- ${user.email}
- <a id="user-${i}-popup" class="popup-arrow" style="display: none;">▼</a>
- </td>
- <td>
- %if len( histories ) == 1:
- ## Only allow unsharing if we're dealing with 1 history, otherwise
- ## page refreshes screw things up
- <div popupmenu="user-${i}-popup">
- <a class="action-button" href="${h.url_for( controller='history', action='sharing', id=trans.security.encode_id( history.id ), unshare_user=trans.security.encode_id( user.id ) )}">Unshare</a>
+<h2>Histories that you've shared with others or enabled to be imported</h2>
+
+%if not histories:
+ You have no histories that you've shared with others or enabled to be imported
+%else:
+ %for history in histories:
+ <div class="toolForm">
+ <div class="toolFormTitle">History '${history.name}' shared with</div>
+ <div class="toolFormBody">
+ <div class="form-row">
+ <div style="float: right;">
+ <a class="action-button" href="${h.url_for( controller='history', action='share', id=trans.security.encode_id( history.id ) )}">
+ <span>Share with another user</span>
+ </a>
+ </div>
+ </div>
+ %if history.users_shared_with:
+ %for i, association in enumerate( history.users_shared_with ):
+ <% user = association.user %>
+ <div class="form-row">
+ <a class="action-button" href="${h.url_for( controller='history', action='sharing', id=trans.security.encode_id( history.id ), unshare_user=trans.security.encode_id( user.id ) )}">Unshare</a>
+ ${user.email}
+ </div>
+ %endfor
+ %endif
+ %if history.importable:
+ <div class="form-row">
+ <% url = h.url_for( controller='history', action='imp', id=trans.security.encode_id(history.id), qualified=True ) %>
+ <a href="${url}">${url}</a>
+ <div class="toolParamHelp" style="clear: both;">
+ Send the above URL to users as an easy way for them to import the history, making a copy of their own
+ </div>
+ </div>
+ <div class="form-row">
+ <form action="${h.url_for( controller='history', action='sharing', id=trans.security.encode_id( history.id ) )}" method="POST">
+ <div class="form-row">
+ <input class="action-button" type="submit" name="disable_import_via_link" value="Disable import via link">
</div>
- %endif
- </td>
- </tr>
- %endfor
- </table>
- %else:
- <p>You have not shared this history with any users.</p>
- <a class="action-button" href="${h.url_for( controller='history', action='share', id=trans.security.encode_id(history.id) )}">
- <span>Share with another user</span>
- </a>
- %endif
-%endfor
+ </form>
+ </div>
+ %else:
+ <form action="${h.url_for( action='sharing', id=trans.security.encode_id(history.id) )}" method="POST">
+ <div class="form-row">
+ <input class="action-button" type="submit" name="enable_import_via_link" value="Enable import via link">
+ <div class="toolParamHelp" style="clear: both;">
+ Click to generate a URL that you can give to a user to allow them to import this history, making a copy of their own
+ </div>
+ </div>
+ </form>
+ %endif
+ </div>
+ </div>
+ %endfor
+%endif
diff -r 96ccd29277be -r f0adb6152df9 templates/root/index.mako
--- a/templates/root/index.mako Thu Sep 10 17:48:52 2009 -0400
+++ b/templates/root/index.mako Thu Sep 10 21:24:06 2009 -0400
@@ -9,9 +9,6 @@
"List your histories": null,
"Stored by you": function() {
galaxy_main.location = "${h.url_for( controller='history', action='list')}";
- },
- "Shared with you": function() {
- galaxy_main.location = "${h.url_for( controller='history', action='list_shared')}";
},
"Current History": null,
"Create new": function() {
@@ -32,10 +29,19 @@
"Show deleted datasets": function() {
galaxy_history.location = "${h.url_for( controller='root', action='history', show_deleted=True)}";
},
- "Delete": function() {
- if ( confirm( "Really delete the current history?" ) ) {
+ "Delete": function()
+ {
+ if ( confirm( "Really delete the current history?" ) )
+ {
galaxy_main.location = "${h.url_for( controller='history', action='delete_current' )}";
}
+ },
+ "Manage shared histories": null,
+ "Shared by you": function() {
+ galaxy_main.location = "${h.url_for( controller='history', action='list', operation='sharing' )}";
+ },
+ "Shared with you": function() {
+ galaxy_main.location = "${h.url_for( controller='history', action='list_shared')}";
}
});
});
diff -r 96ccd29277be -r f0adb6152df9 test/functional/test_history_functions.py
--- a/test/functional/test_history_functions.py Thu Sep 10 17:48:52 2009 -0400
+++ b/test/functional/test_history_functions.py Thu Sep 10 21:24:06 2009 -0400
@@ -141,14 +141,13 @@
check_str_after_submit='You cannot send histories to yourself.' )
# Share history3 with 1 valid user
self.share_current_history( regular_user1.email,
- check_str=history3.name,
- check_str_after_submit='History (%s) now shared with: 1 users' % history3.name )
+ check_str=history3.name )
# Check out list of histories to make sure history3 was shared
- self.view_stored_active_histories( check_str='operation=sharing&id=%s">shared' % self.security.encode_id( history3.id ) )
+ self.view_stored_active_histories( check_str='operation=sharing">shared' )
# Enable importing history3 via a URL
self.enable_import_via_link( self.security.encode_id( history3.id ),
check_str='Unshare',
- check_str_after_submit='Send the following URL to users' )
+ check_str_after_submit='Send the above URL to users' )
# Make sure history3 is now import-able
history3.refresh()
if not history3.importable:
@@ -159,7 +158,7 @@
check_str_after_submit='You cannot import your own history.' )
# Disable the import link for history3
self.disable_import_via_link( self.security.encode_id( history3.id ),
- check_str='Send the following URL to users',
+ check_str='Send the above URL to users',
check_str_after_submit='Enable import via link' )
# Try importing history3 after disabling the URL
self.import_history_via_url( self.security.encode_id( history3.id ),
@@ -274,12 +273,10 @@
self.upload_file( '2.bed', dbkey='hg18' )
ids = '%s,%s' % ( self.security.encode_id( history3.id ), self.security.encode_id( history4.id ) )
emails = '%s,%s' % ( regular_user2.email, regular_user3.email )
- check_str_after_submit = 'History (%s) now shared with: 3 users.' % history3.name
self.share_histories_with_users( ids,
emails,
check_str1='Share 2 histories',
- check_str2=history4.name,
- check_str_after_submit=check_str_after_submit )
+ check_str2=history4.name )
self.logout()
self.login( email=regular_user2.email )
# Shared history3 should be in regular_user2's list of shared histories
@@ -342,12 +339,10 @@
"""Testing sharing a restricted history by making the datasets public"""
# Logged in as admin_user
action_check_str = 'The following datasets can be shared with %s by updating their permissions' % regular_user1.email
- action_check_str_after_submit = 'History (%s) now shared with: 1 users.' % history5.name
# Current history is history5
self.share_current_history( regular_user1.email,
action='public',
- action_check_str=action_check_str,
- action_check_str_after_submit=action_check_str_after_submit )
+ action_check_str=action_check_str )
self.logout()
self.login( email=regular_user1.email )
# Shared history5 should be in regular_user1's list of shared histories
@@ -375,12 +370,10 @@
self.upload_file( '2.bed', dbkey='hg18' )
check_str_after_submit = 'The following datasets can be shared with %s with no changes' % regular_user2.email
check_str_after_submit2 = 'The following datasets can be shared with %s by updating their permissions' % regular_user2.email
- action_check_str_after_submit = 'History (%s) now shared with: 2 users.' % history5.name
self.share_current_history( regular_user2.email,
check_str_after_submit=check_str_after_submit,
check_str_after_submit2=check_str_after_submit2,
- action='private',
- action_check_str_after_submit=action_check_str_after_submit )
+ action='private' )
# We should now have a new sharing role
global sharing_role
role_name = 'Sharing role for: %s, %s' % ( admin_user.email, regular_user2.email )
@@ -470,12 +463,10 @@
check_str_after_submit = 'The following datasets can be shared with %s with no changes' % email
check_str_after_submit2 = 'The following datasets can be shared with %s by updating their permissions' % email
# history5 will be shared with regular_user1, regular_user2 and regular_user3
- action_check_str_after_submit = 'History (%s) now shared with: 3 users.' % history5.name
self.share_current_history( email,
check_str_after_submit=check_str_after_submit,
check_str_after_submit2=check_str_after_submit2,
- action='share_anyway',
- action_check_str_after_submit=action_check_str_after_submit )
+ action='share_anyway' )
# Check security on clone of history5 for regular_user2
self.logout()
self.login( email=regular_user2.email )
1
0
details: http://www.bx.psu.edu/hg/galaxy/rev/96ccd29277be
changeset: 2677:96ccd29277be
user: jeremy goecks <jeremy.goecks(a)emory.edu>
date: Thu Sep 10 17:48:52 2009 -0400
description:
Merge
0 file(s) affected in this change:
diffs (69 lines):
diff -r f2e4673d784b -r 96ccd29277be cron/updateucsc.sh.sample
--- a/cron/updateucsc.sh.sample Thu Sep 10 17:48:37 2009 -0400
+++ b/cron/updateucsc.sh.sample Thu Sep 10 17:48:52 2009 -0400
@@ -6,7 +6,8 @@
# Edit this line to refer to galaxy's path:
GALAXY=/galaxy/path
-export PYTHONPATH=${GALAXY}/lib
+PYTHONPATH=${GALAXY}/lib
+export PYTHONPATH
# setup directories
echo "Creating required directories."
@@ -32,7 +33,11 @@
python ${GALAXY}/cron/parse_builds.py > ${GALAXY}/tool-data/shared/ucsc/new/builds.txt
if [ $? -eq 0 ]
then
- cp -uf ${GALAXY}/tool-data/shared/ucsc/new/builds.txt ${GALAXY}/tool-data/shared/ucsc/builds.txt
+ diff ${GALAXY}/tool-data/shared/ucsc/new/builds.txt ${GALAXY}/tool-data/shared/ucsc/builds.txt > /dev/null 2>&1
+ if [ $? -ne 0 ]
+ then
+ cp -f ${GALAXY}/tool-data/shared/ucsc/new/builds.txt ${GALAXY}/tool-data/shared/ucsc/builds.txt
+ fi
else
echo "Failed to update builds.txt" >&2
fi
@@ -42,7 +47,11 @@
python ${GALAXY}/cron/parse_builds_3_sites.py > ${GALAXY}/tool-data/shared/ucsc/new/ucsc_build_sites.txt
if [ $? -eq 0 ]
then
- cp -uf ${GALAXY}/tool-data/shared/ucsc/new/ucsc_build_sites.txt ${GALAXY}/tool-data/shared/ucsc/ucsc_build_sites.txt
+ diff ${GALAXY}/tool-data/shared/ucsc/new/ucsc_build_sites.txt ${GALAXY}/tool-data/shared/ucsc/ucsc_build_sites.txt > /dev/null 2>&1
+ if [ $? -ne 0 ]
+ then
+ cp -f ${GALAXY}/tool-data/shared/ucsc/new/ucsc_build_sites.txt ${GALAXY}/tool-data/shared/ucsc/ucsc_build_sites.txt
+ fi
else
echo "Failed to update builds.txt" >&2
fi
@@ -52,7 +61,16 @@
python ${GALAXY}/cron/build_chrom_db.py ${GALAXY}/tool-data/shared/ucsc/chrom/new/ ${GALAXY}/tool-data/shared/ucsc/builds.txt
if [ $? -eq 0 ]
then
- cp -uf ${GALAXY}/tool-data/shared/ucsc/chrom/new/*.len ${GALAXY}/tool-data/shared/ucsc/chrom/
+ for src in ${GALAXY}/tool-data/shared/ucsc/chrom/new/*.len
+ do
+ dst=${GALAXY}/tool-data/shared/ucsc/chrom/`basename $src`
+ diff $src $dst > /dev/null 2>&1
+ if [ $? -ne 0 ]
+ then
+ echo "cp -f $src $dst"
+ cp -f $src $dst
+ fi
+ done
else
echo "Failed to update chromInfo tables." >&2
fi
diff -r f2e4673d784b -r 96ccd29277be tools/new_operations/flanking_features.py
--- a/tools/new_operations/flanking_features.py Thu Sep 10 17:48:37 2009 -0400
+++ b/tools/new_operations/flanking_features.py Thu Sep 10 17:48:52 2009 -0400
@@ -129,7 +129,7 @@
map(outfields.append, result_down[-1].other) #The last element of result_down will be the closest element to the given interval
elif result_up:
map(outfields.append, result_up[res_ind].other)
- else:
+ elif result_down:
map(outfields.append, result_down[-1].other) #The last element of result_down will be the closest element to the given interval
yield outfields
1
0

11 Sep '09
details: http://www.bx.psu.edu/hg/galaxy/rev/2a15e0eca0b9
changeset: 2675:2a15e0eca0b9
user: jeremy goecks <jeremy.goecks(a)emory.edu>
date: Thu Sep 10 16:48:11 2009 -0400
description:
Support for (1) ordering tags and (2) page tags.
5 file(s) affected in this change:
lib/galaxy/model/__init__.py
lib/galaxy/model/mapping.py
lib/galaxy/model/migrate/versions/0018_ordered_tags_and_page_tags.py
lib/galaxy/tags/tag_handler.py
lib/galaxy/web/controllers/tag.py
diffs (249 lines):
diff -r dbbc63c0630a -r 2a15e0eca0b9 lib/galaxy/model/__init__.py
--- a/lib/galaxy/model/__init__.py Thu Sep 10 10:42:50 2009 -0400
+++ b/lib/galaxy/model/__init__.py Thu Sep 10 16:48:11 2009 -0400
@@ -1145,7 +1145,8 @@
return "Tag(id=%s, type=%i, parent_id=%s, name=%s)" % ( self.id, self.type, self.parent_id, self.name )
class ItemTagAssociation ( object ):
- def __init__( self, item_id=None, tag_id=None, user_tname=None, value=None ):
+ def __init__( self, id=None, item_id=None, tag_id=None, user_tname=None, value=None ):
+ self.id = id
self.item_id = item_id
self.tag_id = tag_id
self.user_tname = user_tname
@@ -1165,6 +1166,8 @@
class HistoryDatasetAssociationTagAssociation ( ItemTagAssociation ):
pass
+class PageTagAssociation ( ItemTagAssociation ):
+ pass
## ---- Utility methods -------------------------------------------------------
diff -r dbbc63c0630a -r 2a15e0eca0b9 lib/galaxy/model/mapping.py
--- a/lib/galaxy/model/mapping.py Thu Sep 10 10:42:50 2009 -0400
+++ b/lib/galaxy/model/mapping.py Thu Sep 10 16:48:11 2009 -0400
@@ -552,6 +552,7 @@
UniqueConstraint( "name" ) )
HistoryTagAssociation.table = Table( "history_tag_association", metadata,
+ Column( "id", Integer, primary_key=True ),
Column( "history_id", Integer, ForeignKey( "history.id" ), index=True ),
Column( "tag_id", Integer, ForeignKey( "tag.id" ), index=True ),
Column( "user_tname", TrimmedString(255), index=True),
@@ -559,6 +560,7 @@
Column( "user_value", TrimmedString(255), index=True) )
DatasetTagAssociation.table = Table( "dataset_tag_association", metadata,
+ Column( "id", Integer, primary_key=True ),
Column( "dataset_id", Integer, ForeignKey( "dataset.id" ), index=True ),
Column( "tag_id", Integer, ForeignKey( "tag.id" ), index=True ),
Column( "user_tname", TrimmedString(255), index=True),
@@ -566,7 +568,16 @@
Column( "user_value", TrimmedString(255), index=True) )
HistoryDatasetAssociationTagAssociation.table = Table( "history_dataset_association_tag_association", metadata,
+ Column( "id", Integer, primary_key=True ),
Column( "history_dataset_association_id", Integer, ForeignKey( "history_dataset_association.id" ), index=True ),
+ Column( "tag_id", Integer, ForeignKey( "tag.id" ), index=True ),
+ Column( "user_tname", TrimmedString(255), index=True),
+ Column( "value", TrimmedString(255), index=True),
+ Column( "user_value", TrimmedString(255), index=True) )
+
+PageTagAssociation.table = Table( "page_tag_association", metadata,
+ Column( "id", Integer, primary_key=True ),
+ Column( "page_id", Integer, ForeignKey( "page.id" ), index=True ),
Column( "tag_id", Integer, ForeignKey( "tag.id" ), index=True ),
Column( "user_tname", TrimmedString(255), index=True),
Column( "value", TrimmedString(255), index=True),
@@ -672,7 +683,7 @@
visible_children=relation(
HistoryDatasetAssociation,
primaryjoin=( ( HistoryDatasetAssociation.table.c.parent_id == HistoryDatasetAssociation.table.c.id ) & ( HistoryDatasetAssociation.table.c.visible == True ) ) ),
- tags=relation(HistoryDatasetAssociationTagAssociation, backref='history_tag_associations')
+ tags=relation(HistoryDatasetAssociationTagAssociation, order_by=HistoryDatasetAssociationTagAssociation.table.c.id, backref='history_tag_associations')
) )
assign_mapper( context, Dataset, Dataset.table,
@@ -689,7 +700,7 @@
active_library_associations=relation(
LibraryDatasetDatasetAssociation,
primaryjoin=( ( Dataset.table.c.id == LibraryDatasetDatasetAssociation.table.c.dataset_id ) & ( LibraryDatasetDatasetAssociation.table.c.deleted == False ) ) ),
- tags=relation(DatasetTagAssociation, backref='datasets')
+ tags=relation(DatasetTagAssociation, order_by=DatasetTagAssociation.table.c.id, backref='datasets')
) )
assign_mapper( context, HistoryDatasetAssociationDisplayAtAuthorization, HistoryDatasetAssociationDisplayAtAuthorization.table,
@@ -709,7 +720,7 @@
properties=dict( galaxy_sessions=relation( GalaxySessionToHistoryAssociation ),
datasets=relation( HistoryDatasetAssociation, backref="history", order_by=asc(HistoryDatasetAssociation.table.c.hid) ),
active_datasets=relation( HistoryDatasetAssociation, primaryjoin=( ( HistoryDatasetAssociation.table.c.history_id == History.table.c.id ) & ( not_( HistoryDatasetAssociation.table.c.deleted ) ) ), order_by=asc( HistoryDatasetAssociation.table.c.hid ), viewonly=True ),
- tags=relation(HistoryTagAssociation, backref="histories")
+ tags=relation(HistoryTagAssociation, order_by=HistoryTagAssociation.table.c.id, backref="histories")
) )
assign_mapper( context, HistoryUserShareAssociation, HistoryUserShareAssociation.table,
@@ -967,7 +978,8 @@
primaryjoin=( Page.table.c.id == PageRevision.table.c.page_id ) ),
latest_revision=relation( PageRevision, post_update=True,
primaryjoin=( Page.table.c.latest_revision_id == PageRevision.table.c.id ),
- lazy=False )
+ lazy=False ),
+ tags=relation(PageTagAssociation, order_by=PageTagAssociation.table.c.id, backref="pages")
) )
assign_mapper( context, Tag, Tag.table,
@@ -988,6 +1000,11 @@
properties=dict( tag=relation(Tag, backref="tagged_history_dataset_associations") ),
primary_key=[HistoryDatasetAssociationTagAssociation.table.c.history_dataset_association_id, HistoryDatasetAssociationTagAssociation.table.c.tag_id]
)
+
+assign_mapper( context, PageTagAssociation, PageTagAssociation.table,
+ properties=dict( tag=relation(Tag, backref="tagged_pages") ),
+ primary_key=[PageTagAssociation.table.c.page_id, PageTagAssociation.table.c.tag_id]
+ )
def db_next_hid( self ):
"""
diff -r dbbc63c0630a -r 2a15e0eca0b9 lib/galaxy/model/migrate/versions/0018_ordered_tags_and_page_tags.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/lib/galaxy/model/migrate/versions/0018_ordered_tags_and_page_tags.py Thu Sep 10 16:48:11 2009 -0400
@@ -0,0 +1,116 @@
+"""
+This migration script provides support for (a) ordering tags by recency and
+(b) tagging pages. This script deletes all existing tags.
+"""
+
+from sqlalchemy import *
+from sqlalchemy.orm import *
+from sqlalchemy.exceptions import *
+from migrate import *
+import migrate.changeset
+
+import datetime
+now = datetime.datetime.utcnow
+
+# Need our custom types, but don't import anything else from model
+from galaxy.model.custom_types import *
+
+import logging
+log = logging.getLogger( __name__ )
+
+metadata = MetaData( migrate_engine )
+
+def display_migration_details():
+ print ""
+ print "This migration script provides support for (a) ordering tags by recency and"
+ print "(b) tagging pages. This script deletes all existing tags."
+
+HistoryTagAssociation_table = Table( "history_tag_association", metadata,
+ Column( "id", Integer, primary_key=True ),
+ Column( "history_id", Integer, ForeignKey( "history.id" ), index=True ),
+ Column( "tag_id", Integer, ForeignKey( "tag.id" ), index=True ),
+ Column( "user_tname", TrimmedString(255), index=True),
+ Column( "value", TrimmedString(255), index=True),
+ Column( "user_value", TrimmedString(255), index=True) )
+
+DatasetTagAssociation_table = Table( "dataset_tag_association", metadata,
+ Column( "id", Integer, primary_key=True ),
+ Column( "dataset_id", Integer, ForeignKey( "dataset.id" ), index=True ),
+ Column( "tag_id", Integer, ForeignKey( "tag.id" ), index=True ),
+ Column( "user_tname", TrimmedString(255), index=True),
+ Column( "value", TrimmedString(255), index=True),
+ Column( "user_value", TrimmedString(255), index=True) )
+
+HistoryDatasetAssociationTagAssociation_table = Table( "history_dataset_association_tag_association", metadata,
+ Column( "id", Integer, primary_key=True ),
+ Column( "history_dataset_association_id", Integer, ForeignKey( "history_dataset_association.id" ), index=True ),
+ Column( "tag_id", Integer, ForeignKey( "tag.id" ), index=True ),
+ Column( "user_tname", TrimmedString(255), index=True),
+ Column( "value", TrimmedString(255), index=True),
+ Column( "user_value", TrimmedString(255), index=True) )
+
+PageTagAssociation_table = Table( "page_tag_association", metadata,
+ Column( "id", Integer, primary_key=True ),
+ Column( "page_id", Integer, ForeignKey( "page.id" ), index=True ),
+ Column( "tag_id", Integer, ForeignKey( "tag.id" ), index=True ),
+ Column( "user_tname", TrimmedString(255), index=True),
+ Column( "value", TrimmedString(255), index=True),
+ Column( "user_value", TrimmedString(255), index=True) )
+
+def upgrade():
+ display_migration_details()
+ metadata.reflect()
+
+ #
+ # Recreate tables.
+ #
+ try:
+ HistoryTagAssociation_table.drop()
+ HistoryTagAssociation_table.create()
+ except Exception, e:
+ print "Recreating history_tag_association table failed: %s" % str( e )
+ log.debug( "Recreating history_tag_association table failed: %s" % str( e ) )
+
+ try:
+ DatasetTagAssociation_table.drop()
+ DatasetTagAssociation_table.create()
+ except Exception, e:
+ print str(e)
+ log.debug( "Recreating dataset_tag_association table failed: %s" % str( e ) )
+
+ try:
+ HistoryDatasetAssociationTagAssociation_table.drop()
+ HistoryDatasetAssociationTagAssociation_table.create()
+ except OperationalError, e:
+ # Handle error that results from and index name that is too long; this occurs
+ # in MySQL.
+ if str(e).find("CREATE INDEX") != -1:
+ # Manually create index.
+ i = Index( "ix_hda_ta_history_dataset_association_id", HistoryDatasetAssociationTagAssociation_table.c.history_dataset_association_id )
+ try:
+ i.create()
+ except Exception, e:
+ print str(e)
+ log.debug( "Adding index 'ix_hda_ta_history_dataset_association_id' to table 'history_dataset_association_tag_association' table failed: %s" % str( e ) )
+ except Exception, e:
+ print str(e)
+ log.debug( "Recreating history_dataset_association_tag_association table failed: %s" % str( e ) )
+
+ # Create page_tag_association table.
+ try:
+ PageTagAssociation_table.create()
+ except Exception, e:
+ print str(e)
+ log.debug( "Creating page_tag_association table failed: %s" % str( e ) )
+
+def downgrade():
+ metadata.reflect()
+
+ # No need to downgrade other tagging tables. They work fine with verision 16 code.
+
+ # Drop page_tag_association table.
+ try:
+ PageTagAssociation_table.drop()
+ except Exception, e:
+ print str(e)
+ log.debug( "Dropping page_tag_association table failed: %s" % str( e ) )
diff -r dbbc63c0630a -r 2a15e0eca0b9 lib/galaxy/tags/tag_handler.py
--- a/lib/galaxy/tags/tag_handler.py Thu Sep 10 10:42:50 2009 -0400
+++ b/lib/galaxy/tags/tag_handler.py Thu Sep 10 16:48:11 2009 -0400
@@ -1,4 +1,4 @@
-from galaxy.model import Tag, History, HistoryTagAssociation, Dataset, DatasetTagAssociation, HistoryDatasetAssociation, HistoryDatasetAssociationTagAssociation
+from galaxy.model import Tag
import re
class TagHandler( object ):
diff -r dbbc63c0630a -r 2a15e0eca0b9 lib/galaxy/web/controllers/tag.py
--- a/lib/galaxy/web/controllers/tag.py Thu Sep 10 10:42:50 2009 -0400
+++ b/lib/galaxy/web/controllers/tag.py Thu Sep 10 16:48:11 2009 -0400
@@ -1,6 +1,9 @@
"""
Tags Controller: handles tagging/untagging of entities and provides autocomplete support.
"""
+
+from galaxy.model import History, HistoryTagAssociation, Dataset, DatasetTagAssociation, \
+ HistoryDatasetAssociation, HistoryDatasetAssociationTagAssociation, Page, PageTagAssociation
from galaxy.web.base.controller import *
from galaxy.tags.tag_handler import *
1
0
details: http://www.bx.psu.edu/hg/galaxy/rev/f2e4673d784b
changeset: 2676:f2e4673d784b
user: jeremy goecks <jeremy.goecks(a)emory.edu>
date: Thu Sep 10 17:48:37 2009 -0400
description:
merge
0 file(s) affected in this change:
diffs (151 lines):
diff -r 2a15e0eca0b9 -r f2e4673d784b lib/galaxy/jobs/__init__.py
--- a/lib/galaxy/jobs/__init__.py Thu Sep 10 16:48:11 2009 -0400
+++ b/lib/galaxy/jobs/__init__.py Thu Sep 10 17:48:37 2009 -0400
@@ -502,13 +502,6 @@
context = self.get_dataset_finish_context( job_context, dataset_assoc.dataset.dataset )
#should this also be checking library associations? - can a library item be added from a history before the job has ended? - lets not allow this to occur
for dataset in dataset_assoc.dataset.dataset.history_associations: #need to update all associated output hdas, i.e. history was shared with job running
- if context.get( 'path', None ):
- # The tool can set an alternate output path for the dataset.
- try:
- shutil.move( context['path'], dataset.file_name )
- except ( IOError, OSError ):
- if not context['stderr']:
- context['stderr'] = 'This dataset could not be processed'
dataset.blurb = 'done'
dataset.peek = 'no peek'
dataset.info = context['stdout'] + context['stderr']
@@ -707,6 +700,13 @@
sizes.append( ( outfile, os.stat( outfile ).st_size ) )
return sizes
def setup_external_metadata( self, exec_dir = None, tmp_dir = None, dataset_files_path = None, config_root = None, datatypes_config = None, **kwds ):
+ # extension could still be 'auto' if this is the upload tool.
+ job = model.Job.get( self.job_id )
+ for output_dataset_assoc in job.output_datasets:
+ if output_dataset_assoc.dataset.ext == 'auto':
+ context = self.get_dataset_finish_context( dict(), output_dataset_assoc.dataset.dataset )
+ output_dataset_assoc.dataset.extension = context.get( 'ext', 'data' )
+ mapping.context.current.flush()
if tmp_dir is None:
#this dir should should relative to the exec_dir
tmp_dir = self.app.config.new_file_path
@@ -716,7 +716,6 @@
config_root = self.app.config.root
if datatypes_config is None:
datatypes_config = self.app.config.datatypes_config
- job = model.Job.get( self.job_id )
return self.external_output_metadata.setup_external_metadata( [ output_dataset_assoc.dataset for output_dataset_assoc in job.output_datasets ], exec_dir = exec_dir, tmp_dir = tmp_dir, dataset_files_path = dataset_files_path, config_root = config_root, datatypes_config = datatypes_config, **kwds )
class DefaultJobDispatcher( object ):
diff -r 2a15e0eca0b9 -r f2e4673d784b lib/galaxy/tools/actions/upload.py
--- a/lib/galaxy/tools/actions/upload.py Thu Sep 10 16:48:11 2009 -0400
+++ b/lib/galaxy/tools/actions/upload.py Thu Sep 10 17:48:37 2009 -0400
@@ -144,7 +144,7 @@
job.add_parameter( name, value )
job.add_parameter( 'paramfile', to_json_string( json_file_path ) )
for i, dataset in enumerate( data_list ):
- job.add_output_dataset( i, dataset )
+ job.add_output_dataset( 'output%i' % i, dataset )
job.state = trans.app.model.Job.states.NEW
trans.app.model.flush()
diff -r 2a15e0eca0b9 -r f2e4673d784b tools/data_source/upload.py
--- a/tools/data_source/upload.py Thu Sep 10 16:48:11 2009 -0400
+++ b/tools/data_source/upload.py Thu Sep 10 17:48:37 2009 -0400
@@ -115,7 +115,14 @@
return ( True, False, test_ext )
return ( True, True, test_ext )
-def add_file( dataset, json_file ):
+def parse_outputs( args ):
+ rval = {}
+ for arg in args:
+ id, path = arg.split( ':', 1 )
+ rval[int( id )] = path
+ return rval
+
+def add_file( dataset, json_file, output_path ):
data_type = None
line_count = None
@@ -229,16 +236,18 @@
ext = dataset.ext
if ext == 'auto':
ext = 'data'
+ # Move the dataset to its "real" path
+ shutil.move( dataset.path, output_path )
+ # Write the job info
info = dict( type = 'dataset',
dataset_id = dataset.dataset_id,
- path = dataset.path,
ext = ext,
stdout = 'uploaded %s file' % data_type,
name = dataset.name,
line_count = line_count )
json_file.write( to_json_string( info ) + "\n" )
-def add_composite_file( dataset, json_file ):
+def add_composite_file( dataset, json_file, output_path ):
if dataset.composite_files:
os.mkdir( dataset.extra_files_path )
for name, value in dataset.composite_files.iteritems():
@@ -253,17 +262,21 @@
else:
sniff.convert_newlines( dataset.composite_file_paths[ value.name ][ 'path' ] )
shutil.move( dataset.composite_file_paths[ value.name ][ 'path' ], os.path.join( dataset.extra_files_path, name ) )
+ # Move the dataset to its "real" path
+ shutil.move( dataset.primary_file, output_path )
+ # Write the job info
info = dict( type = 'dataset',
dataset_id = dataset.dataset_id,
- path = dataset.primary_file,
stdout = 'uploaded %s file' % dataset.file_type )
json_file.write( to_json_string( info ) + "\n" )
def __main__():
- if len( sys.argv ) != 2:
- print >>sys.stderr, 'usage: upload.py <json paramfile>'
+ if len( sys.argv ) < 2:
+ print >>sys.stderr, 'usage: upload.py <json paramfile> <output spec> ...'
sys.exit( 1 )
+
+ output_paths = parse_outputs( sys.argv[2:] )
json_file = open( 'galaxy.json', 'w' )
@@ -271,10 +284,16 @@
dataset = from_json_string( line )
dataset = util.bunch.Bunch( **safe_dict( dataset ) )
+ try:
+ output_path = output_paths[int( dataset.dataset_id )]
+ except:
+ print >>sys.stderr, 'Output path for dataset %s not found on command line' % dataset.dataset_id
+ sys.exit( 1 )
+
if dataset.type == 'composite':
- add_composite_file( dataset, json_file )
+ add_composite_file( dataset, json_file, output_path )
else:
- add_file( dataset, json_file )
+ add_file( dataset, json_file, output_path )
# clean up paramfile
try:
diff -r 2a15e0eca0b9 -r f2e4673d784b tools/data_source/upload.xml
--- a/tools/data_source/upload.xml Thu Sep 10 16:48:11 2009 -0400
+++ b/tools/data_source/upload.xml Thu Sep 10 17:48:37 2009 -0400
@@ -7,6 +7,12 @@
<action module="galaxy.tools.actions.upload" class="UploadToolAction"/>
<command interpreter="python">
upload.py $paramfile
+ #set $outnum = 0
+ #while $varExists('output%i' % $outnum):
+ #set $output = $getVar('output%i' % $outnum)
+ #set $outnum += 1
+ ${output.dataset.dataset.id}:${output}
+ #end while
</command>
<inputs>
<param name="file_type" type="select" label="File Format" help="Which format? See help below">
1
0

11 Sep '09
details: http://www.bx.psu.edu/hg/galaxy/rev/0a41293e679a
changeset: 2674:0a41293e679a
user: guru
date: Thu Sep 10 17:35:21 2009 -0400
description:
Bug fix for 'Fetch closest feature' tool.
1 file(s) affected in this change:
tools/new_operations/flanking_features.py
diffs (12 lines):
diff -r ce8c57840343 -r 0a41293e679a tools/new_operations/flanking_features.py
--- a/tools/new_operations/flanking_features.py Thu Sep 10 17:31:05 2009 -0400
+++ b/tools/new_operations/flanking_features.py Thu Sep 10 17:35:21 2009 -0400
@@ -129,7 +129,7 @@
map(outfields.append, result_down[-1].other) #The last element of result_down will be the closest element to the given interval
elif result_up:
map(outfields.append, result_up[res_ind].other)
- else:
+ elif result_down:
map(outfields.append, result_down[-1].other) #The last element of result_down will be the closest element to the given interval
yield outfields
1
0
details: http://www.bx.psu.edu/hg/galaxy/rev/ce8c57840343
changeset: 2673:ce8c57840343
user: Nate Coraor <nate(a)bx.psu.edu>
date: Thu Sep 10 17:31:05 2009 -0400
description:
POSIXize/Bournize updateucsc.sh.sample
1 file(s) affected in this change:
cron/updateucsc.sh.sample
diffs (57 lines):
diff -r d3fe789e3931 -r ce8c57840343 cron/updateucsc.sh.sample
--- a/cron/updateucsc.sh.sample Thu Sep 10 14:52:38 2009 -0400
+++ b/cron/updateucsc.sh.sample Thu Sep 10 17:31:05 2009 -0400
@@ -6,7 +6,8 @@
# Edit this line to refer to galaxy's path:
GALAXY=/galaxy/path
-export PYTHONPATH=${GALAXY}/lib
+PYTHONPATH=${GALAXY}/lib
+export PYTHONPATH
# setup directories
echo "Creating required directories."
@@ -32,7 +33,11 @@
python ${GALAXY}/cron/parse_builds.py > ${GALAXY}/tool-data/shared/ucsc/new/builds.txt
if [ $? -eq 0 ]
then
- cp -uf ${GALAXY}/tool-data/shared/ucsc/new/builds.txt ${GALAXY}/tool-data/shared/ucsc/builds.txt
+ diff ${GALAXY}/tool-data/shared/ucsc/new/builds.txt ${GALAXY}/tool-data/shared/ucsc/builds.txt > /dev/null 2>&1
+ if [ $? -ne 0 ]
+ then
+ cp -f ${GALAXY}/tool-data/shared/ucsc/new/builds.txt ${GALAXY}/tool-data/shared/ucsc/builds.txt
+ fi
else
echo "Failed to update builds.txt" >&2
fi
@@ -42,7 +47,11 @@
python ${GALAXY}/cron/parse_builds_3_sites.py > ${GALAXY}/tool-data/shared/ucsc/new/ucsc_build_sites.txt
if [ $? -eq 0 ]
then
- cp -uf ${GALAXY}/tool-data/shared/ucsc/new/ucsc_build_sites.txt ${GALAXY}/tool-data/shared/ucsc/ucsc_build_sites.txt
+ diff ${GALAXY}/tool-data/shared/ucsc/new/ucsc_build_sites.txt ${GALAXY}/tool-data/shared/ucsc/ucsc_build_sites.txt > /dev/null 2>&1
+ if [ $? -ne 0 ]
+ then
+ cp -f ${GALAXY}/tool-data/shared/ucsc/new/ucsc_build_sites.txt ${GALAXY}/tool-data/shared/ucsc/ucsc_build_sites.txt
+ fi
else
echo "Failed to update builds.txt" >&2
fi
@@ -52,7 +61,16 @@
python ${GALAXY}/cron/build_chrom_db.py ${GALAXY}/tool-data/shared/ucsc/chrom/new/ ${GALAXY}/tool-data/shared/ucsc/builds.txt
if [ $? -eq 0 ]
then
- cp -uf ${GALAXY}/tool-data/shared/ucsc/chrom/new/*.len ${GALAXY}/tool-data/shared/ucsc/chrom/
+ for src in ${GALAXY}/tool-data/shared/ucsc/chrom/new/*.len
+ do
+ dst=${GALAXY}/tool-data/shared/ucsc/chrom/`basename $src`
+ diff $src $dst > /dev/null 2>&1
+ if [ $? -ne 0 ]
+ then
+ echo "cp -f $src $dst"
+ cp -f $src $dst
+ fi
+ done
else
echo "Failed to update chromInfo tables." >&2
fi
1
0

10 Sep '09
details: http://www.bx.psu.edu/hg/galaxy/rev/d3fe789e3931
changeset: 2672:d3fe789e3931
user: Nate Coraor <nate(a)bx.psu.edu>
date: Thu Sep 10 14:52:38 2009 -0400
description:
Get rid of the hacky "alternate path" stuff used by the upload tool and fix setting metadata when using autodetect and set_metadata_externally
4 file(s) affected in this change:
lib/galaxy/jobs/__init__.py
lib/galaxy/tools/actions/upload.py
tools/data_source/upload.py
tools/data_source/upload.xml
diffs (151 lines):
diff -r dbbc63c0630a -r d3fe789e3931 lib/galaxy/jobs/__init__.py
--- a/lib/galaxy/jobs/__init__.py Thu Sep 10 10:42:50 2009 -0400
+++ b/lib/galaxy/jobs/__init__.py Thu Sep 10 14:52:38 2009 -0400
@@ -502,13 +502,6 @@
context = self.get_dataset_finish_context( job_context, dataset_assoc.dataset.dataset )
#should this also be checking library associations? - can a library item be added from a history before the job has ended? - lets not allow this to occur
for dataset in dataset_assoc.dataset.dataset.history_associations: #need to update all associated output hdas, i.e. history was shared with job running
- if context.get( 'path', None ):
- # The tool can set an alternate output path for the dataset.
- try:
- shutil.move( context['path'], dataset.file_name )
- except ( IOError, OSError ):
- if not context['stderr']:
- context['stderr'] = 'This dataset could not be processed'
dataset.blurb = 'done'
dataset.peek = 'no peek'
dataset.info = context['stdout'] + context['stderr']
@@ -707,6 +700,13 @@
sizes.append( ( outfile, os.stat( outfile ).st_size ) )
return sizes
def setup_external_metadata( self, exec_dir = None, tmp_dir = None, dataset_files_path = None, config_root = None, datatypes_config = None, **kwds ):
+ # extension could still be 'auto' if this is the upload tool.
+ job = model.Job.get( self.job_id )
+ for output_dataset_assoc in job.output_datasets:
+ if output_dataset_assoc.dataset.ext == 'auto':
+ context = self.get_dataset_finish_context( dict(), output_dataset_assoc.dataset.dataset )
+ output_dataset_assoc.dataset.extension = context.get( 'ext', 'data' )
+ mapping.context.current.flush()
if tmp_dir is None:
#this dir should should relative to the exec_dir
tmp_dir = self.app.config.new_file_path
@@ -716,7 +716,6 @@
config_root = self.app.config.root
if datatypes_config is None:
datatypes_config = self.app.config.datatypes_config
- job = model.Job.get( self.job_id )
return self.external_output_metadata.setup_external_metadata( [ output_dataset_assoc.dataset for output_dataset_assoc in job.output_datasets ], exec_dir = exec_dir, tmp_dir = tmp_dir, dataset_files_path = dataset_files_path, config_root = config_root, datatypes_config = datatypes_config, **kwds )
class DefaultJobDispatcher( object ):
diff -r dbbc63c0630a -r d3fe789e3931 lib/galaxy/tools/actions/upload.py
--- a/lib/galaxy/tools/actions/upload.py Thu Sep 10 10:42:50 2009 -0400
+++ b/lib/galaxy/tools/actions/upload.py Thu Sep 10 14:52:38 2009 -0400
@@ -144,7 +144,7 @@
job.add_parameter( name, value )
job.add_parameter( 'paramfile', to_json_string( json_file_path ) )
for i, dataset in enumerate( data_list ):
- job.add_output_dataset( i, dataset )
+ job.add_output_dataset( 'output%i' % i, dataset )
job.state = trans.app.model.Job.states.NEW
trans.app.model.flush()
diff -r dbbc63c0630a -r d3fe789e3931 tools/data_source/upload.py
--- a/tools/data_source/upload.py Thu Sep 10 10:42:50 2009 -0400
+++ b/tools/data_source/upload.py Thu Sep 10 14:52:38 2009 -0400
@@ -115,7 +115,14 @@
return ( True, False, test_ext )
return ( True, True, test_ext )
-def add_file( dataset, json_file ):
+def parse_outputs( args ):
+ rval = {}
+ for arg in args:
+ id, path = arg.split( ':', 1 )
+ rval[int( id )] = path
+ return rval
+
+def add_file( dataset, json_file, output_path ):
data_type = None
line_count = None
@@ -229,16 +236,18 @@
ext = dataset.ext
if ext == 'auto':
ext = 'data'
+ # Move the dataset to its "real" path
+ shutil.move( dataset.path, output_path )
+ # Write the job info
info = dict( type = 'dataset',
dataset_id = dataset.dataset_id,
- path = dataset.path,
ext = ext,
stdout = 'uploaded %s file' % data_type,
name = dataset.name,
line_count = line_count )
json_file.write( to_json_string( info ) + "\n" )
-def add_composite_file( dataset, json_file ):
+def add_composite_file( dataset, json_file, output_path ):
if dataset.composite_files:
os.mkdir( dataset.extra_files_path )
for name, value in dataset.composite_files.iteritems():
@@ -253,17 +262,21 @@
else:
sniff.convert_newlines( dataset.composite_file_paths[ value.name ][ 'path' ] )
shutil.move( dataset.composite_file_paths[ value.name ][ 'path' ], os.path.join( dataset.extra_files_path, name ) )
+ # Move the dataset to its "real" path
+ shutil.move( dataset.primary_file, output_path )
+ # Write the job info
info = dict( type = 'dataset',
dataset_id = dataset.dataset_id,
- path = dataset.primary_file,
stdout = 'uploaded %s file' % dataset.file_type )
json_file.write( to_json_string( info ) + "\n" )
def __main__():
- if len( sys.argv ) != 2:
- print >>sys.stderr, 'usage: upload.py <json paramfile>'
+ if len( sys.argv ) < 2:
+ print >>sys.stderr, 'usage: upload.py <json paramfile> <output spec> ...'
sys.exit( 1 )
+
+ output_paths = parse_outputs( sys.argv[2:] )
json_file = open( 'galaxy.json', 'w' )
@@ -271,10 +284,16 @@
dataset = from_json_string( line )
dataset = util.bunch.Bunch( **safe_dict( dataset ) )
+ try:
+ output_path = output_paths[int( dataset.dataset_id )]
+ except:
+ print >>sys.stderr, 'Output path for dataset %s not found on command line' % dataset.dataset_id
+ sys.exit( 1 )
+
if dataset.type == 'composite':
- add_composite_file( dataset, json_file )
+ add_composite_file( dataset, json_file, output_path )
else:
- add_file( dataset, json_file )
+ add_file( dataset, json_file, output_path )
# clean up paramfile
try:
diff -r dbbc63c0630a -r d3fe789e3931 tools/data_source/upload.xml
--- a/tools/data_source/upload.xml Thu Sep 10 10:42:50 2009 -0400
+++ b/tools/data_source/upload.xml Thu Sep 10 14:52:38 2009 -0400
@@ -7,6 +7,12 @@
<action module="galaxy.tools.actions.upload" class="UploadToolAction"/>
<command interpreter="python">
upload.py $paramfile
+ #set $outnum = 0
+ #while $varExists('output%i' % $outnum):
+ #set $output = $getVar('output%i' % $outnum)
+ #set $outnum += 1
+ ${output.dataset.dataset.id}:${output}
+ #end while
</command>
<inputs>
<param name="file_type" type="select" label="File Format" help="Which format? See help below">
1
0

10 Sep '09
details: http://www.bx.psu.edu/hg/galaxy/rev/dbbc63c0630a
changeset: 2671:dbbc63c0630a
user: guru
date: Thu Sep 10 10:42:50 2009 -0400
description:
Updated manual builds will L. major (2005) genome.
1 file(s) affected in this change:
tool-data/shared/ucsc/manual_builds.txt
diffs (8 lines):
diff -r c3b40f23a0e0 -r dbbc63c0630a tool-data/shared/ucsc/manual_builds.txt
--- a/tool-data/shared/ucsc/manual_builds.txt Wed Sep 09 14:24:11 2009 -0400
+++ b/tool-data/shared/ucsc/manual_builds.txt Thu Sep 10 10:42:50 2009 -0400
@@ -664,3 +664,4 @@
baciAnth_AMES Bacillus anthracis str. Ames chr=5227293
shewOnei Shewanella oneidensis MR-1 plasmid_pMR-1=161613,chr=4969803
15217 Human herpesvirus 1 NC_001806=152261
+lMaj5 Leishmania major 2005 chr1=268984,chr2=355714,chr3=384518,chr4=441313,chr5=465823,chr6=516874,chr7=596348,chr8=574972,chr9=573441,chr10=570864,chr11=582575,chr12=675347,chr13=654604,chr14=622648,chr15=629514,chr16=714659,chr17=684831,chr18=739751,chr19=702212,chr20=742551,chr21=772974,chr22=716608,chr23=772567,chr24=840950,chr25=912849,chr26=1091579,chr27=1130447,chr28=1160128,chr29=1212674,chr30=1403454,chr31=1484336,chr32=1604650,chr33=1583673,chr34=1866754,chr35=2090491,chr36=2682183
1
0
Hi,
I am looking for more information on the options in the Logging and
Debugging section of the universe_wsgi.ini file, in particular what
are specific values to which I can set log_level in the first option
"verbosity of log messages".
I wish to turn off the debugging messages in the log file.
Thanks
Shaun Webb
--
The University of Edinburgh is a charitable body, registered in
Scotland, with registration number SC005336.
2
1