galaxy-dev
  Threads by month 
                
            - ----- 2025 -----
 - November
 - October
 - September
 - August
 - July
 - June
 - May
 - April
 - March
 - February
 - January
 - ----- 2024 -----
 - December
 - November
 - October
 - September
 - August
 - July
 - June
 - May
 - April
 - March
 - February
 - January
 - ----- 2023 -----
 - December
 - November
 - October
 - September
 - August
 - July
 - June
 - May
 - April
 - March
 - February
 - January
 - ----- 2022 -----
 - December
 - November
 - October
 - September
 - August
 - July
 - June
 - May
 - April
 - March
 - February
 - January
 - ----- 2021 -----
 - December
 - November
 - October
 - September
 - August
 - July
 - June
 - May
 - April
 - March
 - February
 - January
 - ----- 2020 -----
 - December
 - November
 - October
 - September
 - August
 - July
 - June
 - May
 - April
 - March
 - February
 - January
 - ----- 2019 -----
 - December
 - November
 - October
 - September
 - August
 - July
 - June
 - May
 - April
 - March
 - February
 - January
 - ----- 2018 -----
 - December
 - November
 - October
 - September
 - August
 - July
 - June
 - May
 - April
 - March
 - February
 - January
 - ----- 2017 -----
 - December
 - November
 - October
 - September
 - August
 - July
 - June
 - May
 - April
 - March
 - February
 - January
 - ----- 2016 -----
 - December
 - November
 - October
 - September
 - August
 - July
 - June
 - May
 - April
 - March
 - February
 - January
 - ----- 2015 -----
 - December
 - November
 - October
 - September
 - August
 - July
 - June
 - May
 - April
 - March
 - February
 - January
 - ----- 2014 -----
 - December
 - November
 - October
 - September
 - August
 - July
 - June
 - May
 - April
 - March
 - February
 - January
 - ----- 2013 -----
 - December
 - November
 - October
 - September
 - August
 - July
 - June
 - May
 - April
 - March
 - February
 - January
 - ----- 2012 -----
 - December
 - November
 - October
 - September
 - August
 - July
 - June
 - May
 - April
 - March
 - February
 - January
 - ----- 2011 -----
 - December
 - November
 - October
 - September
 - August
 - July
 - June
 - May
 - April
 - March
 - February
 - January
 - ----- 2010 -----
 - December
 - November
 - October
 - September
 - August
 - July
 - June
 - May
 - April
 - March
 - February
 - January
 - ----- 2009 -----
 - December
 - November
 - October
 - September
 - August
 - July
 - June
 - May
 - April
 - March
 - February
 - January
 - ----- 2008 -----
 - December
 - November
 - October
 - September
 - August
 
March 2010
- 36 participants
 - 171 discussions
 
18 Mar '10
                    
                        details:   http://www.bx.psu.edu/hg/galaxy/rev/6d079d53f9db
changeset: 3530:6d079d53f9db
user:      jeremy goecks <jeremy.goecks(a)emory.edu>
date:      Mon Mar 15 11:31:14 2010 -0400
description:
Add functionality for inline editing of dataset tags and annotation in history panel (icons are currently opposite save/rerun icons). Various style and template fixes as well.
diffstat:
 lib/galaxy/web/base/controller.py                    |   11 +-
 lib/galaxy/web/controllers/dataset.py                |   31 ++++-
 lib/galaxy/web/controllers/page.py                   |    2 +-
 lib/galaxy/web/controllers/tag.py                    |   19 ++-
 lib/galaxy/web/framework/helpers/grids.py            |    4 +-
 static/june_2007_style/autocomplete_tagging.css.tmpl |    5 +
 static/june_2007_style/base.css.tmpl                 |    9 +-
 static/june_2007_style/blue/autocomplete_tagging.css |    3 +-
 static/june_2007_style/blue/base.css                 |    3 +-
 templates/dataset/display.mako                       |    2 +-
 templates/dataset/embed.mako                         |    6 +-
 templates/display_base.mako                          |    2 +-
 templates/embed_base.mako                            |   20 ++-
 templates/grid_base_async.mako                       |    2 +-
 templates/root/history.mako                          |  110 ++++++++++++++++--
 templates/root/history_common.mako                   |   18 ++-
 templates/tagging_common.mako                        |    6 +-
 17 files changed, 203 insertions(+), 50 deletions(-)
diffs (553 lines):
diff -r 96ec861b4b6e -r 6d079d53f9db lib/galaxy/web/base/controller.py
--- a/lib/galaxy/web/base/controller.py	Sun Mar 14 11:49:44 2010 -0400
+++ b/lib/galaxy/web/base/controller.py	Mon Mar 15 11:31:14 2010 -0400
@@ -62,7 +62,7 @@
         
     def get_item_annotation_obj( self, db_session, user, item ):
         """ Returns a user's annotation object for an item. """
-        # Get annotation association. TODO: we could replace this eval() with a long if/else stmt, but this is more general without sacrificing
+        # Get annotation association.
         try:
             annotation_assoc_class = eval( "model.%sAnnotationAssociation" % item.__class__.__name__ )
         except:
@@ -126,7 +126,7 @@
 class UsesHistoryDatasetAssociation:
     """ Mixin for controllers that use HistoryDatasetAssociation objects. """
     
-    def get_dataset( self, trans, dataset_id, check_accessible=True ):
+    def get_dataset( self, trans, dataset_id, check_ownership=True, check_accessible=False ):
         """ Get an HDA object by id. """
         # DEPRECATION: We still support unencoded ids for backward compatibility
         try:
@@ -136,6 +136,13 @@
         data = trans.sa_session.query( model.HistoryDatasetAssociation ).get( dataset_id )
         if not data:
             raise paste.httpexceptions.HTTPRequestRangeNotSatisfiable( "Invalid dataset id: %s." % str( dataset_id ) )
+        if check_ownership:
+            # Verify ownership.
+            user = trans.get_user()
+            if not user:
+                error( "Must be logged in to manage Galaxy items" )
+            if data.history.user != user:
+                error( "%s is not owned by current user" % data.__class__.__name__ )
         if check_accessible:
             current_user_roles = trans.get_current_user_roles()
             if trans.app.security_agent.can_access_dataset( current_user_roles, data.dataset ):
diff -r 96ec861b4b6e -r 6d079d53f9db lib/galaxy/web/controllers/dataset.py
--- a/lib/galaxy/web/controllers/dataset.py	Sun Mar 14 11:49:44 2010 -0400
+++ b/lib/galaxy/web/controllers/dataset.py	Mon Mar 15 11:31:14 2010 -0400
@@ -5,6 +5,7 @@
 from galaxy import util, datatypes, jobs, web, model
 from cgi import escape, FieldStorage
 from galaxy.datatypes.display_applications.util import encode_dataset_user, decode_dataset_user
+from galaxy.util.sanitize_html import sanitize_html
 
 from email.MIMEText import MIMEText
 import pkg_resources; 
@@ -444,16 +445,14 @@
     @web.require_login( "use Galaxy datasets" )
     def get_name_and_link_async( self, trans, id=None ):
         """ Returns dataset's name and link. """
-        dataset = self.get_dataset( trans, id )
+        dataset = self.get_dataset( trans, id, False, True )
         return_dict = { "name" : dataset.name, "link" : url_for( action="display_by_username_and_slug", username=dataset.history.user.username, slug=trans.security.encode_id( dataset.id ) ) }
         return return_dict
                 
     @web.expose
     def get_embed_html_async( self, trans, id ):
         """ Returns HTML for embedding a dataset in a page. """
-
-        # TODO: user should be able to embed any item he has access to. see display_by_username_and_slug for security code.
-        dataset = self.get_dataset( trans, id )
+        dataset = self.get_dataset( trans, id, False, True )
         if dataset:
             return "Embedded Dataset '%s'" % dataset.name
 
@@ -466,7 +465,7 @@
     @web.expose
     def display_by_username_and_slug( self, trans, username, slug, preview=True ):
         """ Display dataset by username and slug; because datasets do not yet have slugs, the slug is the dataset's id. """
-        dataset = self.get_dataset( trans, slug )
+        dataset = self.get_dataset( trans, slug, False, True )
         if dataset:
             truncated, dataset_data = self.get_data( dataset, preview )
             dataset.annotation = self.get_item_annotation_str( trans.sa_session, dataset.history.user, dataset )
@@ -478,7 +477,7 @@
     def get_item_content_async( self, trans, id ):
         """ Returns item content in HTML format. """
 
-        dataset = self.get_dataset( trans, id )
+        dataset = self.get_dataset( trans, id, False, True )
         if dataset is None:
             raise web.httpexceptions.HTTPNotFound()
         truncated, dataset_data = self.get_data( dataset, preview=True )
@@ -486,6 +485,24 @@
         dataset.annotation = self.get_item_annotation_str( trans.sa_session, trans.get_user(), dataset )
         return trans.stream_template_mako( "/dataset/item_content.mako", item=dataset, item_data=dataset_data, truncated=truncated )
         
+    @web.expose
+    def annotate_async( self, trans, id, new_annotation=None, **kwargs ):
+        dataset = self.get_dataset( trans, id, False, True )
+        if not dataset:
+            web.httpexceptions.HTTPNotFound()
+        if dataset and new_annotation:
+            # Sanitize annotation before adding it.
+            new_annotation = sanitize_html( new_annotation, 'utf-8', 'text/html' )
+            self.add_item_annotation( trans, dataset, new_annotation )
+            trans.sa_session.flush()
+            return new_annotation
+    
+    @web.expose
+    def get_annotation_async( self, trans, id ):
+        dataset = self.get_dataset( trans, id, False, True )
+        if not dataset:
+            web.httpexceptions.HTTPNotFound()
+        return self.get_item_annotation_str( trans.sa_session, trans.get_user(), dataset )
 
     @web.expose
     def display_at( self, trans, dataset_id, filename=None, **kwd ):
@@ -704,7 +721,7 @@
                 if user != history.user:
                     error_msg = error_msg + "You do not have permission to add datasets to %i requested histories.  " % ( len( target_histories ) )
             for dataset_id in dataset_ids:
-                data = self.get_dataset( trans, dataset_id )
+                data = self.get_dataset( trans, dataset_id, False, True )
                 if data is None:
                     error_msg = error_msg + "You tried to copy a dataset that does not exist or that you do not have access to.  "
                     invalid_datasets += 1
diff -r 96ec861b4b6e -r 6d079d53f9db lib/galaxy/web/controllers/page.py
--- a/lib/galaxy/web/controllers/page.py	Sun Mar 14 11:49:44 2010 -0400
+++ b/lib/galaxy/web/controllers/page.py	Mon Mar 15 11:31:14 2010 -0400
@@ -674,7 +674,7 @@
                 datasets = self.get_history_datasets( trans, history )
                 return trans.fill_template( "history/embed.mako", item=history, item_data=datasets )
         elif item_class == model.HistoryDatasetAssociation:
-            dataset = self.get_dataset( trans, item_id )
+            dataset = self.get_dataset( trans, item_id, False, True )
             dataset.annotation = self.get_item_annotation_str( trans.sa_session, dataset.history.user, dataset )
             if dataset:
                 data = self.get_data( dataset )
diff -r 96ec861b4b6e -r 6d079d53f9db lib/galaxy/web/controllers/tag.py
--- a/lib/galaxy/web/controllers/tag.py	Sun Mar 14 11:49:44 2010 -0400
+++ b/lib/galaxy/web/controllers/tag.py	Mon Mar 15 11:31:14 2010 -0400
@@ -11,12 +11,21 @@
 
     def __init__(self, app):
         BaseController.__init__(self, app)
-
-        # Set up tag handler to recognize the following items: History, HistoryDatasetAssociation, Page, ...
         self.tag_handler = TagHandler()
+        
+    @web.expose
+    @web.require_login( "edit item tags" )
+    def get_tagging_elt_async( self, trans, item_id, item_class, elt_context="" ):
+        """ Returns HTML for editing an item's tags. """
+        item = self._get_item( trans, item_class, trans.security.decode_id( item_id ) )
+        if not item:
+            return trans.show_error_message( "No item of class %s with id % " % ( item_class, item_id ) )
+        user = trans.get_user()
+        return trans.fill_template( "/tagging_common.mako", tag_type="individual", user=trans.get_user(), tagged_item=item, elt_context=elt_context,
+                                    in_form=False, input_size="22", tag_click_fn="default_tag_click_fn", use_toggle_link=False )
 
     @web.expose
-    @web.require_login( "Add tag to an item." )
+    @web.require_login( "add tag to an item" )
     def add_tag_async( self, trans, item_id=None, item_class=None, new_tag=None, context=None ):
         """ Add tag to an item. """
                 
@@ -28,10 +37,10 @@
         
         # Log.
         params = dict( item_id=item.id, item_class=item_class, tag=new_tag)
-        trans.log_action( user, unicode( "tag"), context, params )
+        trans.log_action( user, unicode( "tag" ), context, params )
         
     @web.expose
-    @web.require_login( "Remove tag from an item." )
+    @web.require_login( "remove tag from an item" )
     def remove_tag_async( self, trans, item_id=None, item_class=None, tag_name=None, context=None ):
         """ Remove tag from an item. """
         
diff -r 96ec861b4b6e -r 6d079d53f9db lib/galaxy/web/framework/helpers/grids.py
--- a/lib/galaxy/web/framework/helpers/grids.py	Sun Mar 14 11:49:44 2010 -0400
+++ b/lib/galaxy/web/framework/helpers/grids.py	Mon Mar 15 11:31:14 2010 -0400
@@ -389,7 +389,7 @@
         self.grid_name = grid_name
     def get_value( self, trans, grid, item ):
         return trans.fill_template( "/tagging_common.mako", tag_type="community", trans=trans, user=trans.get_user(), tagged_item=item, elt_context=self.grid_name,
-                                    in_form=True, input_size="20", tag_click_fn="add_tag_to_grid_filter" )
+                                    in_form=True, input_size="20", tag_click_fn="add_tag_to_grid_filter", use_toggle_link=True )
     def filter( self, db_session, user, query, column_filter ):
         """ Modify query to filter model_class by tag. Multiple filters are ANDed. """
         if column_filter == "All":
@@ -418,7 +418,7 @@
     """ Column that supports individual tags. """
     def get_value( self, trans, grid, item ):
         return trans.fill_template( "/tagging_common.mako", tag_type="individual", trans=trans, user=trans.get_user(), tagged_item=item, elt_context=self.grid_name,
-                                    in_form=True, input_size="20", tag_click_fn="add_tag_to_grid_filter" )
+                                    in_form=True, input_size="20", tag_click_fn="add_tag_to_grid_filter", use_toggle_link=True )
     def get_filter( self, user, column_filter ):
             # Parse filter to extract multiple tags.
             tag_handler = TagHandler()
diff -r 96ec861b4b6e -r 6d079d53f9db static/june_2007_style/autocomplete_tagging.css.tmpl
--- a/static/june_2007_style/autocomplete_tagging.css.tmpl	Sun Mar 14 11:49:44 2010 -0400
+++ b/static/june_2007_style/autocomplete_tagging.css.tmpl	Mon Mar 15 11:31:14 2010 -0400
@@ -74,6 +74,11 @@
     cursor: pointer;
 }
 
+.individual-tag-area:hover 
+{
+    border:dotted #999999 1px;
+}
+
 .active-tag-area {
     background-color: white;
 }
diff -r 96ec861b4b6e -r 6d079d53f9db static/june_2007_style/base.css.tmpl
--- a/static/june_2007_style/base.css.tmpl	Sun Mar 14 11:49:44 2010 -0400
+++ b/static/june_2007_style/base.css.tmpl	Mon Mar 15 11:31:14 2010 -0400
@@ -835,8 +835,7 @@
     cursor:pointer;
 }
 
-.editable-text:hover{
-    background-image:url();
-    background-repeat:no-repeat;
-    background-position:right;
-}
+.editable-text:hover {
+    cursor: text;
+    border: dotted #999999 1px;
+}
\ No newline at end of file
diff -r 96ec861b4b6e -r 6d079d53f9db static/june_2007_style/blue/autocomplete_tagging.css
--- a/static/june_2007_style/blue/autocomplete_tagging.css	Sun Mar 14 11:49:44 2010 -0400
+++ b/static/june_2007_style/blue/autocomplete_tagging.css	Mon Mar 15 11:31:14 2010 -0400
@@ -7,7 +7,8 @@
 .ac_over{background-color:#0A246A;color:white;}
 .ac_header{font-style:normal;color:gray;border-bottom:0.1em solid gray;}
 .tag-area{width:100%;}
-.individual-tag-area{border:solid 1px #eee;cursor:pointer;}
+.individual-tag-area{cursor:pointer;}
+.individual-tag-area:hover{border:dotted #999999 1px;}
 .active-tag-area{background-color:white;}
 .toggle-link{font-weight:normal;padding:0.3em;margin-bottom:1em;width:100%;padding:0.2em 0em 0.2em 0em;}
 .tag-button{width:auto;color:#444;text-decoration:none;display:inline-block;cursor:pointer;margin:0.2em;border:solid #bbb 1px;padding:0.1em 0.5em 0.1em 0.5em;-moz-border-radius:.5em;-webkit-border-radius:.5em;border-radius:.5em;background:#eee;}
diff -r 96ec861b4b6e -r 6d079d53f9db static/june_2007_style/blue/base.css
--- a/static/june_2007_style/blue/base.css	Sun Mar 14 11:49:44 2010 -0400
+++ b/static/june_2007_style/blue/base.css	Mon Mar 15 11:31:14 2010 -0400
@@ -145,4 +145,5 @@
 .tipsy-east{background-position:right center;}
 .tipsy-west{background-position:left center;}
 .editable-text{cursor:pointer;}
-.editable-text:hover{background-image:url();background-repeat:no-repeat;background-position:right;}
+.editable-text:hover{cursor: text;border: dotted #999999 1px;}
+
diff -r 96ec861b4b6e -r 6d079d53f9db templates/dataset/display.mako
--- a/templates/dataset/display.mako	Sun Mar 14 11:49:44 2010 -0400
+++ b/templates/dataset/display.mako	Mon Mar 15 11:31:14 2010 -0400
@@ -22,7 +22,7 @@
 </%def>
 
 <%def name="render_item_links( data )">
-    ## Provide links to save data and TODO: import dataset.
+    ## Provide links to save data and import dataset.
     <a href="${h.url_for( controller='/dataset', action='display', dataset_id=trans.security.encode_id( data.id ), to_ext=data.ext )}" class="icon-button disk tooltip" title="Save dataset"></a>
         <a href="${h.url_for( controller='/dataset', action='imp', dataset_id=trans.security.encode_id( data.id ) )}" class="icon-button import tooltip" title="Import dataset"></a>
 </%def>
diff -r 96ec861b4b6e -r 6d079d53f9db templates/dataset/embed.mako
--- a/templates/dataset/embed.mako	Sun Mar 14 11:49:44 2010 -0400
+++ b/templates/dataset/embed.mako	Mon Mar 15 11:31:14 2010 -0400
@@ -3,8 +3,12 @@
 	from galaxy.web.framework.helpers import iff
 %>
 
-<%def name="render_item_specific_title_links( dataset )">
+<%def name="render_item_links( dataset )">
     <a href="${h.url_for( controller='/dataset', action='display', dataset_id=trans.security.encode_id( dataset.id ), to_ext=dataset.ext )}" title="Save dataset" class="icon-button disk tooltip"></a>
+    ## Links for importing and viewing an item.
+    <a href="${h.url_for( controller='/dataset', action='imp', dataset_id=trans.security.encode_id( item.id ) )}" title="Import dataset" class="icon-button import tooltip"></a>
+    <a class="icon-button go-to-full-screen tooltip" href="${h.url_for( controller='/dataset', action='display_by_username_and_slug', username=dataset.history.user.username, slug=trans.security.encode_id( dataset.id ) )}" title="Go to dataset"></a>
+    
 </%def>
 
 <%def name="render_summary_content( dataset, data )">
diff -r 96ec861b4b6e -r 6d079d53f9db templates/display_base.mako
--- a/templates/display_base.mako	Sun Mar 14 11:49:44 2010 -0400
+++ b/templates/display_base.mako	Mon Mar 15 11:31:14 2010 -0400
@@ -25,7 +25,7 @@
 	self.has_left_panel=False
 	self.has_right_panel=True
 	self.message_box_visible=False
-	self.active_view="user"
+	self.active_view=""
 	self.overlay_visible=False
 %>
 </%def>
diff -r 96ec861b4b6e -r 6d079d53f9db templates/embed_base.mako
--- a/templates/embed_base.mako	Sun Mar 14 11:49:44 2010 -0400
+++ b/templates/embed_base.mako	Mon Mar 15 11:31:14 2010 -0400
@@ -18,8 +18,19 @@
     </div>
 </div>
 
-## Render item-specific title links.
-<%def name="render_item_specific_title_links( item )">
+## Render item links.
+<%def name="render_item_links( item )">
+    <%
+        item_display_name = get_class_display_name( item.__class__ ).lower()
+        item_controller = "/%s" % get_controller_name( item )
+        item_user = get_item_user( item )
+        item_slug = get_item_slug( item )
+        display_href = h.url_for( controller=item_controller, action='display_by_username_and_slug', username=item_user.username, slug=item_slug )
+    %>
+    
+    ## Links for importing and viewing an item.
+    <a href="${h.url_for( controller=item_controller, action='imp', id=trans.security.encode_id( item.id ) )}" title="Import ${item_display_name}" class="icon-button import tooltip"></a>
+    <a class="icon-button go-to-full-screen tooltip" href="${display_href}" title="Go to ${item_display_name}"></a>
 </%def>
 
 <%def name="render_title( item )">
@@ -36,10 +47,7 @@
         <a class="toggle-contract icon-button tooltip" href="${display_href}" title="Hide ${item_display_name} content"></a>
     </div>
     <div style="float: right">
-        ${self.render_item_specific_title_links( item )}
-        ## Links applicable for all items.
-        <a href="${h.url_for( controller=item_controller, action='imp', id=trans.security.encode_id( item.id ) )}" title="Import ${item_display_name}" class="icon-button import tooltip"></a>
-        <a class="icon-button go-to-full-screen tooltip" href="${display_href}" title="Go to ${item_display_name}"></a>
+        ${self.render_item_links( item )}
     </div>
     <h4><a class="toggle-embed tooltip" href="${display_href}" title="Show or hide ${item_display_name} content">Galaxy ${get_class_display_name( item.__class__ )} | ${get_item_name( item )}</a></h4>
     %if hasattr( item, "annotation") and item.annotation:
diff -r 96ec861b4b6e -r 6d079d53f9db templates/grid_base_async.mako
--- a/templates/grid_base_async.mako	Sun Mar 14 11:49:44 2010 -0400
+++ b/templates/grid_base_async.mako	Mon Mar 15 11:31:14 2010 -0400
@@ -13,4 +13,4 @@
 *****
 ${num_pages}
 *****
-${render_message( grid )}
\ No newline at end of file
+${render_message( message, message_type )}
\ No newline at end of file
diff -r 96ec861b4b6e -r 6d079d53f9db templates/root/history.mako
--- a/templates/root/history.mako	Sun Mar 14 11:49:44 2010 -0400
+++ b/templates/root/history.mako	Mon Mar 15 11:31:14 2010 -0400
@@ -42,10 +42,10 @@
         $.jStore.remove("history_expand_state");
     }).show();
     
-    // Rename management.
+    // History rename functionality.
     async_save_text("history-name-container", "history-name", "${h.url_for( controller="/history", action="rename_async", id=trans.security.encode_id(history.id) )}", "new_name", 18);
     
-    // Tag management.
+    // History tagging functionality.
     var historyTagArea = $('#history-tag-area');
     $('#history-tag').click( function() 
     {
@@ -57,7 +57,7 @@
         return false;
     });
     
-    // Annotation management.
+    // History annotation functionality.
     var historyAnnotationArea = $('#history-annotation-area');
     $('#history-annotate').click( function() {
         if ( historyAnnotationArea.is( ":hidden" ) ) {
@@ -104,7 +104,8 @@
         })
     }
 }
-// Add show/hide link and delete link to a history item
+// (a) Add show/hide link and delete link to a history item;
+// (b) handle tagging and annotation using jquery.
 function setupHistoryItem( query ) {
     query.each( function() {
         var id = this.id;
@@ -180,6 +181,99 @@
                 return false;
             });
         });
+        
+        // Tag handling.
+        $(this).find( "a.icon-button.tags").each( function() 
+        {
+            // Use links parameters but custom URL as ajax URL.
+            $(this).click( function() {
+                // Get tag area, tag element.
+                var history_item = $(this).parents(".historyItem");
+                var tag_area = history_item.find(".tag-area");
+                var tag_elt = history_item.find(".tag-elt");
+                
+                // Show or hide tag area; if showing tag area and it's empty, fill it.
+                if ( tag_area.is( ":hidden" ) ) 
+                {
+                    if (tag_elt.html() == "" )
+                    {
+                        // Need to fill tag element.
+                        var href_parms = $(this).attr("href").split("?")[1];
+                        var ajax_url = "${h.url_for( controller='tag', action='get_tagging_elt_async' )}?" + href_parms;
+                        $.ajax({
+                            url: ajax_url,
+                            error: function() { alert( "Tagging failed" ) },
+                            success: function(tag_elt_html) {
+                                tag_elt.html(tag_elt_html);
+                                tag_elt.find(".tooltip").tipsy( { gravity: 's' } );
+                                tag_area.slideDown("fast");
+                            }
+                        });
+                    }
+                    else
+                    {
+                        // Tag element is filled; show.
+                        tag_area.slideDown("fast");
+                    }
+                } 
+                else 
+                {
+                    // Hide.
+                    tag_area.slideUp("fast");
+                }
+                return false;        
+            });
+        });
+        
+        // Annotation handling.
+        $(this).find( "a.icon-button.annotate").each( function() 
+        {
+            // Use links parameters but custom URL as ajax URL.
+            $(this).click( function() {
+                // Get tag area, tag element.
+                var history_item = $(this).parents(".historyItem");
+                var annotation_area = history_item.find(".annotation-area");
+                var annotation_elt = history_item.find(".annotation-elt");
+                
+                // Show or hide annotation area; if showing annotation area and it's empty, fill it.
+                if ( annotation_area.is( ":hidden" ) ) 
+                {
+                    if (annotation_elt.html() == "" )
+                    {
+                        // Need to fill annotation element.
+                        var href_parms = $(this).attr("href").split("?")[1];
+                        var ajax_url = "${h.url_for( controller='dataset', action='get_annotation_async' )}?" + href_parms;
+                        $.ajax({
+                            url: ajax_url,
+                            error: function() { alert( "Annotations failed" ) },
+                            success: function(annotation) {
+                                if (annotation == "")
+                                    annotation = "<i>Describe or add notes to dataset</i>";
+                                annotation_elt.html(annotation);
+                                annotation_area.find(".tooltip").tipsy( { gravity: 's' } );
+                                async_save_text(
+                                    annotation_elt.attr("id"), annotation_elt.attr("id"),
+                                    "${h.url_for( controller="/dataset", action="annotate_async")}?" + href_parms,
+                                    "new_annotation", 18, true, 4);
+                                annotation_area.slideDown("fast");
+                            }
+                        });
+                    }
+                    else
+                    {
+                        // Annotation element is filled; show.
+                        annotation_area.slideDown("fast");
+                    }
+                } 
+                else 
+                {
+                    // Hide.
+                    annotation_area.slideUp("fast");
+                }
+                return false;        
+            });
+        });
+        
     });
 };
 // Looks for changes in dataset state using an async request. Keeps
@@ -279,13 +373,6 @@
     padding: 3px;
     margin: -4px;
 }
-.editable-text:hover {
-    cursor: text;
-    border: dotted #999999 1px;
-}
-.tag-area {
-    border: none;
-}
 </style>
 
 <noscript>
@@ -299,7 +386,6 @@
 </head>
 
 <body class="historyPage">
-    
 <div id="top-links" class="historyLinks">
     
     <a title="${_('refresh')}" class="icon-button arrow-circle tooltip" href="${h.url_for('history', show_deleted=show_deleted)}"></a>
diff -r 96ec861b4b6e -r 6d079d53f9db templates/root/history_common.mako
--- a/templates/root/history_common.mako	Sun Mar 14 11:49:44 2010 -0400
+++ b/templates/root/history_common.mako	Mon Mar 15 11:31:14 2010 -0400
@@ -86,10 +86,26 @@
             </div>
             <div class="info">${_('Info: ')}${data.display_info()}</div>
             <div> 
+                <% dataset_id=trans.security.encode_id( data.id ) %>
                 %if data.has_data:
-                    <a href="${h.url_for( controller='dataset', action='display', dataset_id=trans.security.encode_id( data.id ), to_ext=data.ext )}" title="Save" class="icon-button disk tooltip"></a>
+                    <a href="${h.url_for( controller='dataset', action='display', dataset_id=dataset_id, to_ext=data.ext )}" title="Save" class="icon-button disk tooltip"></a>
                     %if user_owns_dataset:
                         <a href="${h.url_for( controller='tool_runner', action='rerun', id=data.id )}" target="galaxy_main" title="Run this job again" class="icon-button arrow-circle tooltip"></a>
+                        %if trans.user:
+                            <div style="float: right">
+                                <a href="${h.url_for( controller='tag', action='retag', item_class=data.__class__.__name__, item_id=dataset_id )}" target="galaxy_main" title="Edit dataset tags" class="icon-button tags tooltip"></a>
+                                <a href="${h.url_for( controller='dataset', action='annotate', id=dataset_id )}" target="galaxy_main" title="Edit dataset annotation" class="icon-button annotate tooltip"></a>
+                            </div>
+                            <div style="clear: both"></div>
+                            <div class="tag-area" style="display: none">
+                                <strong>Tags:</strong>
+                                <div class="tag-elt"></div>
+                            </div>
+                            <div id="${dataset_id}-annotation-area" class="annotation-area" style="display: none">
+                                <strong>Annotation:</strong>
+                                <div id="${dataset_id}-annotation-elt" style="margin: 1px 0px 1px 0px" class="annotation-elt tooltip editable-text" title="Edit dataset annotation"></div>
+                            </div>
+                        %endif
                     %endif
 		    <div style="clear: both"></div>
                     %for display_app in data.datatype.get_display_types():
diff -r 96ec861b4b6e -r 6d079d53f9db templates/tagging_common.mako
--- a/templates/tagging_common.mako	Sun Mar 14 11:49:44 2010 -0400
+++ b/templates/tagging_common.mako	Mon Mar 15 11:31:14 2010 -0400
@@ -13,7 +13,7 @@
 ## Render a tagging element if there is a tagged_item.
 %if tagged_item is not None:
     %if tag_type == "individual":
-        ${render_individual_tagging_element(user=user, tagged_item=tagged_item, elt_context=elt_context, in_form=in_form, input_size=input_size, tag_click_fn=tag_click_fn)}
+        ${render_individual_tagging_element( user=user, tagged_item=tagged_item, elt_context=elt_context, in_form=in_form, input_size=input_size, tag_click_fn=tag_click_fn, use_toggle_link=use_toggle_link )}
     %elif tag_type == "community":
         ${render_community_tagging_element(tagged_item=tagged_item, elt_context=elt_context, tag_click_fn=tag_click_fn)}
     %endif
@@ -123,7 +123,7 @@
         //
         // Set up autocomplete tagger.
         //
-    
+
         //
         // Default function get text to display on the toggle link.
         //
@@ -193,7 +193,7 @@
             ajax_delete_tag_url: "${h.url_for( controller='/tag', action='remove_tag_async', item_id=tagged_item_id, item_class=tagged_item.__class__.__name__, context=elt_context )}",
             delete_tag_img: "${h.url_for('/static/images/delete_tag_icon_gray.png')}",
             delete_tag_img_rollover: "${h.url_for('/static/images/delete_tag_icon_white.png')}",
-            use_toggle_link: ${iff( use_toggle_link, 'true', 'false' )},
+            use_toggle_link: ${iff( use_toggle_link, 'true', 'false' )}
          };
          
         $('#${elt_id}').autocomplete_tagging(options);
                    
                  
                  
                          
                            
                            1
                            
                          
                          
                            
                            0
                            
                          
                          
                            
    
                          
                        
                    18 Mar '10
                    
                        details:   http://www.bx.psu.edu/hg/galaxy/rev/96ec861b4b6e
changeset: 3529:96ec861b4b6e
user:      jeremy goecks <jeremy.goecks(a)emory.edu>
date:      Sun Mar 14 11:49:44 2010 -0400
description:
Make the login/register/logout sequence more user friendly. Specific changes: (a) use panels so that Galaxy masthead is always available; (b) provide links to guide users past login; (c) enable 'user' to be an active view; (d) updated functional tests.
diffstat:
 lib/galaxy/web/controllers/dataset.py       |    4 +-
 lib/galaxy/web/controllers/user.py          |   28 ++-
 lib/galaxy/web/controllers/visualization.py |    2 +-
 lib/galaxy/web/controllers/workflow.py      |    8 +-
 lib/galaxy/web/framework/__init__.py        |   26 +-
 templates/base_panels.mako                  |   14 +-
 templates/display_base.mako                 |   12 +-
 templates/form.mako                         |  115 ++++++++++------
 templates/history/list_published.mako       |    2 +-
 templates/message.mako                      |    3 +-
 templates/page/list_published.mako          |    2 +-
 templates/user/register.mako                |  162 ++++++++++++----------
 templates/visualization/list_published.mako |    2 +-
 templates/workflow/list.mako                |  193 +++++++++++++++------------
 templates/workflow/list_published.mako      |    2 +-
 test/base/twilltestcase.py                  |   11 +-
 16 files changed, 329 insertions(+), 257 deletions(-)
diffs (900 lines):
diff -r 48e83411aa91 -r 96ec861b4b6e lib/galaxy/web/controllers/dataset.py
--- a/lib/galaxy/web/controllers/dataset.py	Fri Mar 12 16:11:26 2010 -0500
+++ b/lib/galaxy/web/controllers/dataset.py	Sun Mar 14 11:49:44 2010 -0400
@@ -436,8 +436,8 @@
         # Do import.
         cur_history = trans.get_history( create=True )
         status, message = self._copy_datasets( trans, [ dataset_id ], [ cur_history ] )
-        message = message + "<br>You can <a href='%s'>start using the dataset</a> or %s." % ( url_for('/'),  referer_message )
-        return trans.show_message( message, type=status )
+        message = "Dataset imported. <br>You can <a href='%s'>start using the dataset</a> or %s." % ( url_for('/'),  referer_message )
+        return trans.show_message( message, type=status, use_panels=True )
         
     @web.expose
     @web.json
diff -r 48e83411aa91 -r 96ec861b4b6e lib/galaxy/web/controllers/user.py
--- a/lib/galaxy/web/controllers/user.py	Fri Mar 12 16:11:26 2010 -0500
+++ b/lib/galaxy/web/controllers/user.py	Sun Mar 14 11:49:44 2010 -0400
@@ -115,9 +115,14 @@
                                 you share publicly. Usernames must be at least
                                 four characters in length and contain only lowercase
                                 letters, numbers, and the '-' character.""" ) )
+                                
     @web.expose
-    def login( self, trans, email='', password='' ):
+    def login( self, trans, email='', password='', referer='', use_panels='True' ):
         email_error = password_error = None
+        
+        # Convert use_panels to Boolean.
+        use_panels = use_panels in [ 'True', 'true', 't', 'T' ]
+        
         # Attempt login
         if trans.app.config.require_login:
             refresh_frames = [ 'masthead', 'history', 'tools' ]
@@ -136,21 +141,23 @@
             else:
                 trans.handle_user_login( user )
                 trans.log_event( "User logged in" )
-                msg = "Now logged in as " + user.email + "."
+                msg = "You are now logged in as %s.<br>You can <a href='%s'>go back to the page you were visiting</a> or <a href='%s'>go to the Galaxy homepage</a>." % ( user.email, referer, url_for( '/' ) )
                 if trans.app.config.require_login:
                     msg += '  <a href="%s">Click here</a> to continue to the front page.' % web.url_for( '/static/welcome.html' )
-                return trans.show_ok_message( msg, refresh_frames=refresh_frames )
+                return trans.show_ok_message( msg, refresh_frames=refresh_frames, use_panels=use_panels, active_view="user" )
         form = web.FormBuilder( web.url_for(), "Login", submit_text="Login" ) \
                 .add_text( "email", "Email address", value=email, error=email_error ) \
                 .add_password( "password", "Password", value='', error=password_error, 
-                                help="<a href='%s'>Forgot password? Reset here</a>" % web.url_for( action='reset_password' ) )
+                                help="<a href='%s'>Forgot password? Reset here</a>" % web.url_for( action='reset_password' ) ) \
+                .add_input( "hidden", "referer", "referer", value=trans.request.referer, use_label=False )
         if trans.app.config.require_login:
             if trans.app.config.allow_user_creation:
-                return trans.show_form( form, header = require_login_creation_template % web.url_for( action = 'create' ) )
+                return trans.show_form( form, header = require_login_creation_template % web.url_for( action = 'create' ), use_panels=use_panels, active_view="user" )
             else:
-                return trans.show_form( form, header = require_login_nocreation_template )
+                return trans.show_form( form, header = require_login_nocreation_template, use_panels=use_panels, active_view="user" )
         else:
-            return trans.show_form( form )
+            return trans.show_form( form, use_panels=use_panels, active_view="user" )
+            
     @web.expose
     def logout( self, trans ):
         if trans.app.config.require_login:
@@ -160,10 +167,11 @@
         # Since logging an event requires a session, we'll log prior to ending the session
         trans.log_event( "User logged out" )
         trans.handle_user_logout()
-        msg = "You are no longer logged in."
+        msg = "You have been logged out.<br>You can <a href='%s'>go back to the page you were visiting</a> or <a href='%s'>go to the Galaxy homepage</a>." % ( trans.request.referer, url_for( '/' ) )
         if trans.app.config.require_login:
             msg += '  <a href="%s">Click here</a> to return to the login page.' % web.url_for( controller='user', action='login' )
-        return trans.show_ok_message( msg, refresh_frames=refresh_frames )
+        return trans.show_ok_message( msg, refresh_frames=refresh_frames, use_panels=True, active_view="user" )
+        
     @web.expose
     def create( self, trans, **kwd ):
         params = util.Params( kwd )
@@ -217,7 +225,7 @@
                 trans.log_event( "User created a new account" )
                 trans.log_event( "User logged in" )
                 # subscribe user to email list
-                return trans.show_ok_message( "Now logged in as " + user.email, refresh_frames=refresh_frames )
+                return trans.show_ok_message( "Now logged in as %s.<br><a href='%s'>Return to the Galaxy start page.</a>" % ( user.email, url_for( '/' ) ), refresh_frames=refresh_frames, use_panels=True )
             else:
                 trans.response.send_redirect( web.url_for( controller='admin',
                                                            action='users',
diff -r 48e83411aa91 -r 96ec861b4b6e lib/galaxy/web/controllers/visualization.py
--- a/lib/galaxy/web/controllers/visualization.py	Fri Mar 12 16:11:26 2010 -0500
+++ b/lib/galaxy/web/controllers/visualization.py	Sun Mar 14 11:49:44 2010 -0400
@@ -75,7 +75,7 @@
             return trans.fill_template( "visualization/list_published.mako", grid=grid )
     
     @web.expose
-    @web.require_login("use Galaxy visualizations")
+    @web.require_login( "use Galaxy visualizations", use_panels=True )
     def list( self, trans, *args, **kwargs ):
         # Handle operation
         if 'operation' in kwargs and 'id' in kwargs:
diff -r 48e83411aa91 -r 96ec861b4b6e lib/galaxy/web/controllers/workflow.py
--- a/lib/galaxy/web/controllers/workflow.py	Fri Mar 12 16:11:26 2010 -0500
+++ b/lib/galaxy/web/controllers/workflow.py	Sun Mar 14 11:49:44 2010 -0400
@@ -85,7 +85,7 @@
     
     @web.expose
     def index( self, trans ):
-        return trans.fill_template( "workflow/index.mako" )
+        return self.list( trans )
         
     @web.expose
     @web.require_login( "use Galaxy workflows" )
@@ -102,7 +102,7 @@
         return self.stored_list_grid( trans, **kwargs )
                                    
     @web.expose
-    @web.require_login( "use Galaxy workflows" )
+    @web.require_login( "use Galaxy workflows", use_panels=True )
     def list( self, trans ):
         """
         Render workflow main page (management of existing workflows)
@@ -276,7 +276,7 @@
                                     item=stored )
 
     @web.expose
-    @web.require_login( "use Galaxy workflows" )
+    @web.require_login( "to import a workflow", use_panels=True )
     def imp( self, trans, id, **kwargs ):
         # Set referer message.
         referer = trans.request.referer
@@ -284,7 +284,7 @@
             referer_message = "<a href='%s'>return to the previous page</a>" % referer
         else:
             referer_message = "<a href='%s'>go to Galaxy's start page</a>" % url_for( '/' )
-        
+                    
         # Do import.
         session = trans.sa_session
         stored = self.get_stored_workflow( trans, id, check_ownership=False )
diff -r 48e83411aa91 -r 96ec861b4b6e lib/galaxy/web/framework/__init__.py
--- a/lib/galaxy/web/framework/__init__.py	Fri Mar 12 16:11:26 2010 -0500
+++ b/lib/galaxy/web/framework/__init__.py	Sun Mar 14 11:49:44 2010 -0400
@@ -65,15 +65,15 @@
     decorator.exposed = True
     return decorator
 
-def require_login( verb="perform this action" ):
+def require_login( verb="perform this action", use_panels=False ):
     def argcatcher( func ):
         def decorator( self, trans, *args, **kwargs ):
             if trans.get_user():
                 return func( self, trans, *args, **kwargs )
             else:
                 return trans.show_error_message(
-                    "You must be <a target='galaxy_main' href='%s'>logged in</a> to %s</div>"
-                    % ( url_for( controller='user', action='login' ), verb ) )      
+                    "You must be <a target='_top' href='%s'>logged in</a> to %s</div>."
+                    % ( url_for( controller='user', action='login' ), verb ), use_panels=use_panels )      
         return decorator
     return argcatcher
     
@@ -561,7 +561,7 @@
         context.
         """
         return self.template_context['message']
-    def show_message( self, message, type='info', refresh_frames=[], cont=None, use_panels=False ):
+    def show_message( self, message, type='info', refresh_frames=[], cont=None, use_panels=False, active_view="" ):
         """
         Convenience method for displaying a simple page with a single message.
         
@@ -571,28 +571,28 @@
         `refresh_frames`: names of frames in the interface that should be 
                           refreshed when the message is displayed
         """
-        return self.fill_template( "message.mako", message_type=type, message=message, refresh_frames=refresh_frames, cont=cont, use_panels=use_panels )
-    def show_error_message( self, message, refresh_frames=[], use_panels=False ):
+        return self.fill_template( "message.mako", message_type=type, message=message, refresh_frames=refresh_frames, cont=cont, use_panels=use_panels, active_view=active_view )
+    def show_error_message( self, message, refresh_frames=[], use_panels=False, active_view="" ):
         """
         Convenience method for displaying an error message. See `show_message`.
         """
-        return self.show_message( message, 'error', refresh_frames, use_panels=use_panels )
-    def show_ok_message( self, message, refresh_frames=[], use_panels=False ):
+        return self.show_message( message, 'error', refresh_frames, use_panels=use_panels, active_view=active_view )
+    def show_ok_message( self, message, refresh_frames=[], use_panels=False, active_view="" ):
         """
         Convenience method for displaying an ok message. See `show_message`.
         """
-        return self.show_message( message, 'done', refresh_frames, use_panels=use_panels )
-    def show_warn_message( self, message, refresh_frames=[], use_panels=False ):
+        return self.show_message( message, 'done', refresh_frames, use_panels=use_panels, active_view=active_view )
+    def show_warn_message( self, message, refresh_frames=[], use_panels=False, active_view="" ):
         """
         Convenience method for displaying an warn message. See `show_message`.
         """
-        return self.show_message( message, 'warning', refresh_frames, use_panels=use_panels )
-    def show_form( self, form, header=None, template="form.mako" ):
+        return self.show_message( message, 'warning', refresh_frames, use_panels=use_panels, active_view=active_view )
+    def show_form( self, form, header=None, template="form.mako", use_panels=False, active_view="" ):
         """
         Convenience method for displaying a simple page with a single HTML
         form.
         """    
-        return self.fill_template( template, form=form, header=header )
+        return self.fill_template( template, form=form, header=header, use_panels=use_panels, active_view=active_view )
     def fill_template(self, filename, **kwargs):
         """
         Fill in a template, putting any keyword arguments on the context.
diff -r 48e83411aa91 -r 96ec861b4b6e templates/base_panels.mako
--- a/templates/base_panels.mako	Fri Mar 12 16:11:26 2010 -0500
+++ b/templates/base_panels.mako	Sun Mar 14 11:49:44 2010 -0400
@@ -227,7 +227,13 @@
         </div>
     </td>
     
-    <td class="tab">
+    ## User tab.
+    <%
+        cls = "tab"
+        if self.active_view == 'user':
+	        cls += " active"
+    %>
+    <td class="${cls}">
         <a>User</a>
         <%
         if trans.user:
@@ -241,9 +247,9 @@
         %>
         <div class="submenu">
         <ul class="loggedout-only" style="${style1}">
-            <li><a target="galaxy_main" href="${h.url_for( controller='/user', action='login' )}">Login</a></li>
+            <li><a href="${h.url_for( controller='/user', action='login' )}">Login</a></li>
             %if app.config.allow_user_creation:
-            <li><a target="galaxy_main" href="${h.url_for( controller='/user', action='create' )}">Register</a></li>
+            <li><a href="${h.url_for( controller='/user', action='create' )}">Register</a></li>
             %endif
         </ul>
         <ul class="loggedin-only" style="${style2}">
@@ -259,7 +265,7 @@
                         logout_target = ""
                         logout_url = h.url_for( controller='/root', action='index', m_c='user', m_a='logout' )
                     else:
-                        logout_target = "galaxy_main"
+                        logout_target = ""
                         logout_url = h.url_for( controller='/user', action='logout' )
                 %>
                 <li><a target="${logout_target}" href="${logout_url}">Logout</a></li>
diff -r 48e83411aa91 -r 96ec861b4b6e templates/display_base.mako
--- a/templates/display_base.mako	Fri Mar 12 16:11:26 2010 -0500
+++ b/templates/display_base.mako	Sun Mar 14 11:49:44 2010 -0400
@@ -216,11 +216,13 @@
                     %endif
                 </div>
                 ## Individual tags.
-                <p>
-                <div>
-                    Yours:
-                    ${render_individual_tagging_element( user=trans.get_user(), tagged_item=item, elt_context='view.mako', use_toggle_link=False, tag_click_fn='community_tag_click' )}
-                </div>
+                %if trans.get_user():
+                    <p>
+                    <div>
+                        Yours:
+                        ${render_individual_tagging_element( user=trans.get_user(), tagged_item=item, elt_context='view.mako', use_toggle_link=False, tag_click_fn='community_tag_click' )}
+                    </div>
+                %endif
             </div>    
         </div>
     </div>
diff -r 48e83411aa91 -r 96ec861b4b6e templates/form.mako
--- a/templates/form.mako	Fri Mar 12 16:11:26 2010 -0500
+++ b/templates/form.mako	Sun Mar 14 11:49:44 2010 -0400
@@ -1,54 +1,83 @@
+<%!
+    def inherit(context):
+        if context.get('use_panels') is True:
+            print "here"
+            return '/base_panels.mako'
+        else:
+            return '/base.mako'
+%>
+<%inherit file="${inherit(context)}"/>
 <% _=n_ %>
-<%inherit file="/base.mako"/>
+
+<%def name="init()">
+<%
+    self.has_left_panel=False
+    self.has_right_panel=False
+    self.active_view=active_view
+    self.message_box_visible=False
+%>
+</%def>
+
+
 <%def name="title()">${form.title}</%def>
 
 <%def name="javascripts()">
-${parent.javascripts()}
-<script type="text/javascript">
-$(function(){
-    $("input:text:first").focus();
-})
-</script>
+    ${parent.javascripts()}
+    <script type="text/javascript">
+        $(function(){
+            $("input:text:first").focus();
+        })
+    </script>
 </%def>
 
-%if header:
-    ${header}
-%endif
+<%def name="center_panel()">
+    ${render_form( )}
+</%def>
 
-<div class="form">
-    <div class="form-title">${form.title}</div>
-    <div class="form-body">
-    <form name="${form.name}" action="${form.action}" method="post" >
-        %for input in form.inputs:
-            <%
-            cls = "form-row"
-            if input.error:
-                cls += " form-row-error"
-            %>
-            <div class="${cls}">
-            %if input.use_label:
-              <label>
-                  ${_(input.label)}:
-              </label>
-            %endif
-              <div class="form-row-input">
-                  <input type="${input.type}" name="${input.name}" value="${input.value}" size="40">
-              </div>
-              %if input.error:
-              <div class="form-row-error-message">${input.error}</div>
-              %endif
-              %if input.help:
-              <div class="toolParamHelp" style="clear: both;">
-                  ${input.help}
-              </div>
-              %endif
+<%def name="body()">
+    ${render_form( )}
+</%def>
+
+<%def name="render_form()">
+    %if header:
+        ${header}
+    %endif
     
-                  <div style="clear: both"></div>
+    <div class="form" style="margin: 1em">
+        <div class="form-title">${form.title}</div>
+        <div class="form-body">
+        <form name="${form.name}" action="${form.action}" method="post" >
+            %for input in form.inputs:
+                <%
+                cls = "form-row"
+                if input.error:
+                    cls += " form-row-error"
+                %>
+                <div class="${cls}">
+                %if input.use_label:
+                  <label>
+                      ${_(input.label)}:
+                  </label>
+                %endif
+                  <div class="form-row-input">
+                      <input type="${input.type}" name="${input.name}" value="${input.value}" size="40">
+                  </div>
+                  %if input.error:
+                  <div class="form-row-error-message">${input.error}</div>
+                  %endif
+                  %if input.help:
+                  <div class="toolParamHelp" style="clear: both;">
+                      ${input.help}
+                  </div>
+                  %endif
     
-                </div>
-            %endfor
-            <div class="form-row"><input type="submit" value="${form.submit_text}"></div>
+                      <div style="clear: both"></div>
     
-        </form>
+                    </div>
+                %endfor
+                <div class="form-row"><input type="submit" value="${form.submit_text}"></div>
+    
+            </form>
+        </div>
     </div>
-</div>
+</%def>
\ No newline at end of file
diff -r 48e83411aa91 -r 96ec861b4b6e templates/history/list_published.mako
--- a/templates/history/list_published.mako	Fri Mar 12 16:11:26 2010 -0500
+++ b/templates/history/list_published.mako	Sun Mar 14 11:49:44 2010 -0400
@@ -10,7 +10,7 @@
 </%def>
 
 <%def name="title()">
-    Galaxy :: Published Histories
+    Galaxy | Published Histories
 </%def>
 
 <%def name="stylesheets()">
diff -r 48e83411aa91 -r 96ec861b4b6e templates/message.mako
--- a/templates/message.mako	Fri Mar 12 16:11:26 2010 -0500
+++ b/templates/message.mako	Sun Mar 14 11:49:44 2010 -0400
@@ -1,6 +1,6 @@
 <%!
     def inherit(context):
-        if context.get('use_panels'):
+        if context.get('use_panels') is True:
             return '/base_panels.mako'
         else:
             return '/base.mako'
@@ -69,7 +69,6 @@
     ${render_large_message( message, message_type )}
 </%def>
 
-## Render the grid's basic elements. Each of these elements can be subclassed.
 <%def name="body()">
     ${render_large_message( message, message_type )}
 </%def>
diff -r 48e83411aa91 -r 96ec861b4b6e templates/page/list_published.mako
--- a/templates/page/list_published.mako	Fri Mar 12 16:11:26 2010 -0500
+++ b/templates/page/list_published.mako	Sun Mar 14 11:49:44 2010 -0400
@@ -10,7 +10,7 @@
 </%def>
 
 <%def name="title()">
-    Galaxy :: Published Pages
+    Galaxy | Published Pages
 </%def>
 
 <%def name="stylesheets()">
diff -r 48e83411aa91 -r 96ec861b4b6e templates/user/register.mako
--- a/templates/user/register.mako	Fri Mar 12 16:11:26 2010 -0500
+++ b/templates/user/register.mako	Sun Mar 14 11:49:44 2010 -0400
@@ -1,87 +1,99 @@
-<%inherit file="/base.mako"/>
+<%inherit file="/base_panels.mako"/>
 <%namespace file="/message.mako" import="render_msg" />
 
+<%def name="init()">
+<%
+    self.has_left_panel=False
+    self.has_right_panel=False
+    self.active_view="user"
+    self.message_box_visible=False
+%>
+</%def>
 
-%if msg:
-    ${render_msg( msg, messagetype )}
-%endif
-
-
-
-<script type="text/javascript">
-$( function() {
-    $( "select[refresh_on_change='true']").change( function() {
-        var refresh = false;
-        var refresh_on_change_values = $( this )[0].attributes.getNamedItem( 'refresh_on_change_values' )
-        if ( refresh_on_change_values ) {
-            refresh_on_change_values = refresh_on_change_values.value.split( ',' );
-            var last_selected_value = $( this )[0].attributes.getNamedItem( 'last_selected_value' );
-            for( i= 0; i < refresh_on_change_values.length; i++ ) {
-                if ( $( this )[0].value == refresh_on_change_values[i] || ( last_selected_value && last_selected_value.value == refresh_on_change_values[i] ) ){
-                    refresh = true;
-                    break;
+<%def name="javascripts()">
+    ${parent.javascripts()}
+    <script type="text/javascript">
+    $( function() {
+        $( "select[refresh_on_change='true']").change( function() {
+            var refresh = false;
+            var refresh_on_change_values = $( this )[0].attributes.getNamedItem( 'refresh_on_change_values' )
+            if ( refresh_on_change_values ) {
+                refresh_on_change_values = refresh_on_change_values.value.split( ',' );
+                var last_selected_value = $( this )[0].attributes.getNamedItem( 'last_selected_value' );
+                for( i= 0; i < refresh_on_change_values.length; i++ ) {
+                    if ( $( this )[0].value == refresh_on_change_values[i] || ( last_selected_value && last_selected_value.value == refresh_on_change_values[i] ) ){
+                        refresh = true;
+                        break;
+                    }
                 }
             }
-        }
-        else {
-            refresh = true;
-        }
-        if ( refresh ){
-            $( "#registration" ).submit();
-        }
+            else {
+                refresh = true;
+            }
+            if ( refresh ){
+                $( "#registration" ).submit();
+            }
+        });
     });
-});
-</script>
+    </script>
 
-<div class="toolForm">
-    <form name="registration" id="registration" action="${h.url_for( controller='user', action='create', admin_view=admin_view )}" method="post" >
-        <div class="toolFormTitle">Create account</div>
-        <div class="form-row">
-            <label>Email</label>
-            ${login_info[ 'Email' ].get_html()}
-        </div>
-        <div class="form-row">
-            <label>Password</label>
-            ${login_info[ 'Password' ].get_html()}
-        </div>
-        <div class="form-row">
-            <label>Confirm</label>
-            ${login_info[ 'Confirm' ].get_html()}
-        </div>
-        <div class="form-row">
-            <label>Public Username</label>
-            ${login_info[ 'Public Username' ].get_html()}
-            <div class="toolParamHelp" style="clear: both;">
-                Optional
+</%def>
+
+<%def name="center_panel()">
+    %if msg:
+        ${render_msg( msg, messagetype )}
+    %endif
+
+    <div class="toolForm" style="margin: 1em">
+        <form name="registration" id="registration" action="${h.url_for( controller='user', action='create', admin_view=admin_view )}" method="post" >
+            <div class="toolFormTitle">Create account</div>
+            <div class="form-row">
+                <label>Email</label>
+                ${login_info[ 'Email' ].get_html()}
             </div>
-        </div>
-        <div class="form-row">
-            <label>Subscribe To Mailing List</label>
-            ${login_info[ 'Subscribe To Mailing List' ].get_html()}
-        </div>
-        %if user_info_select:
             <div class="form-row">
-                <label>User type</label>
-                ${user_info_select.get_html()}
+                <label>Password</label>
+                ${login_info[ 'Password' ].get_html()}
             </div>
-        %endif
-        %if user_info_form:
-            %for field in widgets:
+            <div class="form-row">
+                <label>Confirm Password</label>
+                ${login_info[ 'Confirm' ].get_html()}
+            </div>
+            <div class="form-row">
+                <label>Public Username</label>
+                ${login_info[ 'Public Username' ].get_html()}
+                <div class="toolParamHelp" style="clear: both;">
+                    When you share or publish items, this name is shown as the author.
+                </div>
+            </div>
+            <div class="form-row">
+                <label>Subscribe To Mailing List</label>
+                ${login_info[ 'Subscribe To Mailing List' ].get_html()}
+            </div>
+            %if user_info_select:
                 <div class="form-row">
-                    <label>${field['label']}</label>
-                    ${field['widget'].get_html()}
-                    <div class="toolParamHelp" style="clear: both;">
-                        ${field['helptext']}
+                    <label>User type</label>
+                    ${user_info_select.get_html()}
+                </div>
+            %endif
+            %if user_info_form:
+                %for field in widgets:
+                    <div class="form-row">
+                        <label>${field['label']}</label>
+                        ${field['widget'].get_html()}
+                        <div class="toolParamHelp" style="clear: both;">
+                            ${field['helptext']}
+                        </div>
+                        <div style="clear: both"></div>
                     </div>
-                    <div style="clear: both"></div>
-                </div>
-            %endfor
-            %if not user_info_select:
-                <input type="hidden" name="user_info_select" value="${user_info_form.id}"/>
-            %endif   
-        %endif
-        <div class="form-row">
-            <input type="submit" name="create_user_button" value="Submit">
-        </div>
-    </form>
-</div>
\ No newline at end of file
+                %endfor
+                %if not user_info_select:
+                    <input type="hidden" name="user_info_select" value="${user_info_form.id}"/>
+                %endif   
+            %endif
+            <div class="form-row">
+                <input type="submit" name="create_user_button" value="Submit">
+            </div>
+        </form>
+    </div>
+</%def>
\ No newline at end of file
diff -r 48e83411aa91 -r 96ec861b4b6e templates/visualization/list_published.mako
--- a/templates/visualization/list_published.mako	Fri Mar 12 16:11:26 2010 -0500
+++ b/templates/visualization/list_published.mako	Sun Mar 14 11:49:44 2010 -0400
@@ -10,7 +10,7 @@
 </%def>
 
 <%def name="title()">
-    Galaxy :: Published Visualizations
+    Galaxy | Published Visualizations
 </%def>
 
 <%def name="stylesheets()">
diff -r 48e83411aa91 -r 96ec861b4b6e templates/workflow/list.mako
--- a/templates/workflow/list.mako	Fri Mar 12 16:11:26 2010 -0500
+++ b/templates/workflow/list.mako	Sun Mar 14 11:49:44 2010 -0400
@@ -1,103 +1,118 @@
-<%inherit file="/base.mako"/>
+<%inherit file="/base_panels.mako"/>
+
+<%def name="init()">
+<%
+    self.has_left_panel=False
+    self.has_right_panel=False
+    self.active_view="workflow"
+    self.message_box_visible=False
+%>
+</%def>
 
 <%def name="title()">Workflow home</%def>
 
-%if message:
-<%
-    try:
-        messagetype
-    except:
-        messagetype = "done"
-%>
-<p />
-<div class="${messagetype}message">
-    ${message}
-</div>
-%endif
+<%def name="center_panel()">
+    <div style="overflow: auto; height: 100%;">
+        <div class="page-container" style="padding: 10px;">
+            %if message:
+            <%
+                try:
+                    messagetype
+                except:
+                    messagetype = "done"
+            %>
+            <p />
+            <div class="${messagetype}message">
+                ${message}
+            </div>
+            %endif
 
-<h2>Your workflows</h2>
+            <h2>Your workflows</h2>
 
-<ul class="manage-table-actions">
-    <li>
-        <a class="action-button" href="${h.url_for( action='create' )}">
-            <img src="${h.url_for('/static/images/silk/add.png')}" />
-            <span>Create new workflow</span>
-        </a>
-    </li>
-</ul>
+            <ul class="manage-table-actions">
+                <li>
+                    <a class="action-button" href="${h.url_for( action='create' )}">
+                        <img src="${h.url_for('/static/images/silk/add.png')}" />
+                        <span>Create new workflow</span>
+                    </a>
+                </li>
+            </ul>
   
-%if workflows:
-    <table class="manage-table colored" border="0" cellspacing="0" cellpadding="0" style="width:100%;">
-        <tr class="header">
-            <th>Name</th>
-            <th># of Steps</th>
-            ## <th>Last Updated</th>
-            <th></th>
-        </tr>
-        %for i, workflow in enumerate( workflows ):
-            <tr>
-                <td>
-                    <div class="menubutton" style="float: left;" id="wf-${i}-popup">
-                    ${workflow.name | h}
-                    </div>
-                </td>
-                <td>${len(workflow.latest_workflow.steps)}</td>
-                ## <td>${str(workflow.update_time)[:19]}</td>
-                <td>
-                    <div popupmenu="wf-${i}-popup">
-                    <a class="action-button" href="${h.url_for( action='editor', id=trans.security.encode_id(workflow.id) )}" target="_parent">Edit</a>
-                    <a class="action-button" href="${h.url_for( controller='root', action='index', workflow_id=trans.security.encode_id(workflow.id) )}" target="_parent">Run</a>
-                    <a class="action-button" href="${h.url_for( action='sharing', id=trans.security.encode_id(workflow.id) )}">Share or Publish</a>
-                    <a class="action-button" href="${h.url_for( action='clone', id=trans.security.encode_id(workflow.id) )}">Clone</a>
-                    <a class="action-button" href="${h.url_for( action='rename', id=trans.security.encode_id(workflow.id) )}">Rename</a>
-                    <a class="action-button" confirm="Are you sure you want to delete workflow '${workflow.name}'?" href="${h.url_for( action='delete', id=trans.security.encode_id(workflow.id) )}">Delete</a>
-                    </div>
-                </td>
-            </tr>    
-        %endfor
-    </table>
-%else:
+            %if workflows:
+                <table class="manage-table colored" border="0" cellspacing="0" cellpadding="0" style="width:100%;">
+                    <tr class="header">
+                        <th>Name</th>
+                        <th># of Steps</th>
+                        ## <th>Last Updated</th>
+                        <th></th>
+                    </tr>
+                    %for i, workflow in enumerate( workflows ):
+                        <tr>
+                            <td>
+                                <div class="menubutton" style="float: left;" id="wf-${i}-popup">
+                                ${workflow.name | h}
+                                </div>
+                            </td>
+                            <td>${len(workflow.latest_workflow.steps)}</td>
+                            ## <td>${str(workflow.update_time)[:19]}</td>
+                            <td>
+                                <div popupmenu="wf-${i}-popup">
+                                <a class="action-button" href="${h.url_for( action='editor', id=trans.security.encode_id(workflow.id) )}" target="_parent">Edit</a>
+                                <a class="action-button" href="${h.url_for( controller='root', action='index', workflow_id=trans.security.encode_id(workflow.id) )}" target="_parent">Run</a>
+                                <a class="action-button" href="${h.url_for( action='sharing', id=trans.security.encode_id(workflow.id) )}">Share or Publish</a>
+                                <a class="action-button" href="${h.url_for( action='clone', id=trans.security.encode_id(workflow.id) )}">Clone</a>
+                                <a class="action-button" href="${h.url_for( action='rename', id=trans.security.encode_id(workflow.id) )}">Rename</a>
+                                <a class="action-button" confirm="Are you sure you want to delete workflow '${workflow.name}'?" href="${h.url_for( action='delete', id=trans.security.encode_id(workflow.id) )}">Delete</a>
+                                </div>
+                            </td>
+                        </tr>    
+                    %endfor
+                </table>
+            %else:
 
-    You have no workflows.
+                You have no workflows.
 
-%endif
+            %endif
 
-<h2>Workflows shared with you by others</h2>
+            <h2>Workflows shared with you by others</h2>
 
-%if shared_by_others:
-    <table class="colored" border="0" cellspacing="0" cellpadding="0" width="100%">
-        <tr class="header">
-            <th>Name</th>
-            <th>Owner</th>
-            <th># of Steps</th>
-            <th></th>
-        </tr>
-        %for i, association in enumerate( shared_by_others ):
-            <% workflow = association.stored_workflow %>
-            <tr>
-                <td>
-                    <a class="menubutton" id="shared-${i}-popup" href="${h.url_for( action='run', id=trans.security.encode_id(workflow.id) )}">${workflow.name | h}</a>
-                </td>
-                <td>${workflow.user.email}</td>
-                <td>${len(workflow.latest_workflow.steps)}</td>
-                <td>
-                    <div popupmenu="shared-${i}-popup">
-						<a class="action-button" href="${h.url_for( action='display_by_username_and_slug', username=workflow.user.username, slug=workflow.slug)}" target="_top">View</a>
-	                    <a class="action-button" href="${h.url_for( action='run', id=trans.security.encode_id(workflow.id) )}">Run</a>
-	                    <a class="action-button" href="${h.url_for( action='clone', id=trans.security.encode_id(workflow.id) )}">Clone</a>
-                    </div>
-                </td>
-            </tr>    
-        %endfor
-    </table>
-%else:
+            %if shared_by_others:
+                <table class="colored" border="0" cellspacing="0" cellpadding="0" width="100%">
+                    <tr class="header">
+                        <th>Name</th>
+                        <th>Owner</th>
+                        <th># of Steps</th>
+                        <th></th>
+                    </tr>
+                    %for i, association in enumerate( shared_by_others ):
+                        <% workflow = association.stored_workflow %>
+                        <tr>
+                            <td>
+                                <a class="menubutton" id="shared-${i}-popup" href="${h.url_for( action='run', id=trans.security.encode_id(workflow.id) )}">${workflow.name | h}</a>
+                            </td>
+                            <td>${workflow.user.email}</td>
+                            <td>${len(workflow.latest_workflow.steps)}</td>
+                            <td>
+                                <div popupmenu="shared-${i}-popup">
+            						<a class="action-button" href="${h.url_for( action='display_by_username_and_slug', username=workflow.user.username, slug=workflow.slug)}" target="_top">View</a>
+            	                    <a class="action-button" href="${h.url_for( action='run', id=trans.security.encode_id(workflow.id) )}">Run</a>
+            	                    <a class="action-button" href="${h.url_for( action='clone', id=trans.security.encode_id(workflow.id) )}">Clone</a>
+                                </div>
+                            </td>
+                        </tr>    
+                    %endfor
+                </table>
+            %else:
 
-    No workflows have been shared with you.
+                No workflows have been shared with you.
 
-%endif
+            %endif
 
-<h2>Other options</h2>
+            <h2>Other options</h2>
 
-<a class="action-button" href="${h.url_for( action='configure_menu' )}">
-    <span>Configure your workflow menu</span>
-</a>
\ No newline at end of file
+            <a class="action-button" href="${h.url_for( action='configure_menu' )}">
+                <span>Configure your workflow menu</span>
+            </a>
+        </div>
+    </div>    
+</%def>
\ No newline at end of file
diff -r 48e83411aa91 -r 96ec861b4b6e templates/workflow/list_published.mako
--- a/templates/workflow/list_published.mako	Fri Mar 12 16:11:26 2010 -0500
+++ b/templates/workflow/list_published.mako	Sun Mar 14 11:49:44 2010 -0400
@@ -10,7 +10,7 @@
 </%def>
 
 <%def name="title()">
-    Galaxy :: Published Workflows
+    Galaxy | Published Workflows
 </%def>
 
 <%def name="stylesheets()">
diff -r 48e83411aa91 -r 96ec861b4b6e test/base/twilltestcase.py
--- a/test/base/twilltestcase.py	Fri Mar 12 16:11:26 2010 -0500
+++ b/test/base/twilltestcase.py	Sun Mar 14 11:49:44 2010 -0400
@@ -791,7 +791,7 @@
         self.home()
         # Create user, setting username to email.
         self.visit_page( "user/create?email=%s&username=%s&password=%s&confirm=%s&create_user_button=Submit" % ( email, email, password, password ) )
-        self.check_page_for_string( "Now logged in as %s" %email )
+        self.check_page_for_string( "now logged in as %s" %email )
         self.home()
         # Make sure a new private role was created for the user
         self.visit_page( "user/set_default_permissions" )
@@ -816,7 +816,7 @@
         for index, info_value in enumerate(user_info_values):
             tc.fv( "1", "field_%i" % index, info_value )
         tc.submit( "create_user_button" )
-        self.check_page_for_string( "Now logged in as %s" % email )
+        self.check_page_for_string( "now logged in as %s" % email )
     def create_user_with_info_as_admin( self, email, password, username, user_info_forms, user_info_form_id, user_info_values ):
         '''
         This method registers a new user and also provides use info as an admin
@@ -906,16 +906,17 @@
             self.create( email=email, password=password )
         except:
             self.home()
-            self.visit_url( "%s/user/login" % self.url )
+            # HACK: don't use panels because late_javascripts() messes up the twill browser and it can't find form fields (and hence user can't be logged in).
+            self.visit_url( "%s/user/login?use_panels=False" % self.url )
             tc.fv( '1', 'email', email )
             tc.fv( '1', 'password', password )
             tc.submit( 'Login' )
-            self.check_page_for_string( "Now logged in as %s" %email )
+            self.check_page_for_string( "now logged in as %s" %email )
             self.home()
     def logout( self ):
         self.home()
         self.visit_page( "user/logout" )
-        self.check_page_for_string( "You are no longer logged in" )
+        self.check_page_for_string( "You have been logged out" )
         self.home()
     
     # Functions associated with browsers, cookies, HTML forms and page visits
                    
                  
                  
                          
                            
                            1
                            
                          
                          
                            
                            0
                            
                          
                          
                            
    
                          
                        
                    18 Mar '10
                    
                        details:   http://www.bx.psu.edu/hg/galaxy/rev/48e83411aa91
changeset: 3528:48e83411aa91
user:      Greg Von Kuster <greg(a)bx.psu.edu>
date:      Fri Mar 12 16:11:26 2010 -0500
description:
New separate functional test scripts for testing admin features, library features, library security and data security.  These use a new test_db_util module for all db interaction ( other functional test scripts can be enhanced to use this ).  A lot of code cleanup in the functional tests.  In twilltestcase, cleaned up library and security related methods to mirror names in recently merged library code.
diffstat:
 lib/galaxy/web/controllers/library_common.py   |     7 +-
 test/base/test_db_util.py                      |   123 +
 test/base/twilltestcase.py                     |   342 ++-
 test/functional/test_admin_features.py         |   422 ++++
 test/functional/test_data_security.py          |   196 ++
 test/functional/test_library_features.py       |   606 ++++++
 test/functional/test_library_security.py       |   603 ++++++
 test/functional/test_security_and_libraries.py |  2141 ------------------------
 8 files changed, 2167 insertions(+), 2273 deletions(-)
diffs (truncated from 4641 to 3000 lines):
diff -r e39c9a2a0b4c -r 48e83411aa91 lib/galaxy/web/controllers/library_common.py
--- a/lib/galaxy/web/controllers/library_common.py	Fri Mar 12 14:27:04 2010 -0500
+++ b/lib/galaxy/web/controllers/library_common.py	Fri Mar 12 16:11:26 2010 -0500
@@ -155,7 +155,7 @@
                 library.root_folder.description = new_description
                 trans.sa_session.add_all( ( library, library.root_folder ) )
                 trans.sa_session.flush()
-                msg = "Library '%s' has been renamed to '%s'" % ( old_name, new_name )
+                msg = "The information has been updated."
                 return trans.response.send_redirect( web.url_for( controller='library_common',
                                                                   action='library_info',
                                                                   cntrller=cntrller,
@@ -313,7 +313,7 @@
                     folder.description = new_description
                     trans.sa_session.add( folder )
                     trans.sa_session.flush()
-                    msg = "Folder '%s' has been renamed to '%s'" % ( old_name, new_name )
+                    msg = "The information has been updated."
                     messagetype='done'
             else:
                 msg = "You are not authorized to edit this folder"
@@ -698,7 +698,6 @@
                         trans.app.security_agent.derive_roles_from_access( trans, trans.app.security.decode_id( library_id ), cntrller, library=True, **vars )
                 if error:
                     messagetype = 'error'
-
                     trans.response.send_redirect( web.url_for( controller='library_common',
                                                                            action='upload_library_dataset',
                                                                            cntrller=cntrller,
@@ -1171,7 +1170,7 @@
                         library_dataset.info = new_info
                         trans.sa_session.add( library_dataset )
                         trans.sa_session.flush()
-                        msg = "Dataset '%s' has been renamed to '%s'" % ( old_name, new_name )
+                        msg = "The information has been updated."
                         messagetype = 'done'
             else:
                 msg = "You are not authorized to change the attributes of this dataset"
diff -r e39c9a2a0b4c -r 48e83411aa91 test/base/test_db_util.py
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/test/base/test_db_util.py	Fri Mar 12 16:11:26 2010 -0500
@@ -0,0 +1,123 @@
+import galaxy.model
+from galaxy.model.orm import *
+from galaxy.model.mapping import context as sa_session
+from base.twilltestcase import *
+import sys
+
+def flush( obj ):
+    sa_session.add( obj )
+    sa_session.flush()
+def get_all_histories_for_user( user ):
+    return sa_session.query( galaxy.model.History ) \
+                     .filter( and_( galaxy.model.History.table.c.user_id==user.id,
+                                    galaxy.model.History.table.c.deleted==False ) ) \
+                     .all()
+def get_dataset_permissions_by_dataset( dataset ):
+    return sa_session.query( galaxy.model.DatasetPermissions ) \
+                     .filter( galaxy.model.DatasetPermissions.table.c.dataset_id==dataset.id ) \
+                     .all()
+def get_dataset_permissions_by_role( role ):
+    return sa_session.query( galaxy.model.DatasetPermissions ) \
+                     .filter( galaxy.model.DatasetPermissions.table.c.role_id == role.id ) \
+                     .first()
+def get_default_history_permissions_by_history( history ):
+    return sa_session.query( galaxy.model.DefaultHistoryPermissions ) \
+                     .filter( galaxy.model.DefaultHistoryPermissions.table.c.history_id==history.id ) \
+                     .all()
+def get_default_history_permissions_by_role( role ):
+    return sa_session.query( galaxy.model.DefaultHistoryPermissions ) \
+                     .filter( galaxy.model.DefaultHistoryPermissions.table.c.role_id == role.id ) \
+                     .all()
+def get_default_user_permissions_by_role( role ):
+    return sa_session.query( galaxy.model.DefaultUserPermissions ) \
+                     .filter( galaxy.model.DefaultUserPermissions.table.c.role_id == role.id ) \
+                     .all()
+def get_default_user_permissions_by_user( user ):
+    return sa_session.query( galaxy.model.DefaultUserPermissions ) \
+                     .filter( galaxy.model.DefaultUserPermissions.table.c.user_id==user.id ) \
+                     .all()
+def get_form( name ):
+    fdc_list = sa_session.query( galaxy.model.FormDefinitionCurrent ) \
+                         .filter( galaxy.model.FormDefinitionCurrent.table.c.deleted == False ) \
+                         .order_by( galaxy.model.FormDefinitionCurrent.table.c.create_time.desc() )
+    for fdc in fdc_list:
+        sa_session.refresh( fdc )
+        sa_session.refresh( fdc.latest_form )
+        if fdc.latest_form.name == name:
+            return fdc.latest_form
+    return None
+def get_folder( parent_id, name, description ):
+    return sa_session.query( galaxy.model.LibraryFolder ) \
+                     .filter( and_( galaxy.model.LibraryFolder.table.c.parent_id==parent_id,
+                                    galaxy.model.LibraryFolder.table.c.name==name,
+                                    galaxy.model.LibraryFolder.table.c.description==description ) ) \
+                     .first()
+def get_group_by_name( name ):
+    return sa_session.query( galaxy.model.Group ).filter( galaxy.model.Group.table.c.name==name ).first()
+def get_group_role_associations_by_group( group ):
+    return sa_session.query( galaxy.model.GroupRoleAssociation ) \
+                     .filter( galaxy.model.GroupRoleAssociation.table.c.group_id == group.id ) \
+                     .all()
+def get_group_role_associations_by_role( role ):
+    return sa_session.query( galaxy.model.GroupRoleAssociation ) \
+                     .filter( galaxy.model.GroupRoleAssociation.table.c.role_id == role.id ) \
+                     .all()
+def get_latest_dataset():
+    return sa_session.query( galaxy.model.Dataset ) \
+                     .order_by( desc( galaxy.model.Dataset.table.c.create_time ) ) \
+                     .first()
+def get_latest_hda():
+    return sa_session.query( galaxy.model.HistoryDatasetAssociation ) \
+                     .order_by( desc( galaxy.model.HistoryDatasetAssociation.table.c.create_time ) ) \
+                     .first()
+def get_latest_history_for_user( user ):
+    return sa_session.query( galaxy.model.History ) \
+                     .filter( and_( galaxy.model.History.table.c.deleted==False,
+                                    galaxy.model.History.table.c.user_id==user.id ) ) \
+                     .order_by( desc( galaxy.model.History.table.c.create_time ) ) \
+                     .first()
+def get_latest_ldda():
+    return sa_session.query( galaxy.model.LibraryDatasetDatasetAssociation ) \
+                     .order_by( desc( galaxy.model.LibraryDatasetDatasetAssociation.table.c.create_time ) ) \
+                     .first()
+def get_latest_lddas( limit ):
+    return sa_session.query( galaxy.model.LibraryDatasetDatasetAssociation ) \
+                     .order_by( desc( galaxy.model.LibraryDatasetDatasetAssociation.table.c.update_time ) ) \
+                     .limit( limit )
+def get_library( name, description, synopsis ):
+    return sa_session.query( galaxy.model.Library ) \
+                     .filter( and_( galaxy.model.Library.table.c.name==name,
+                                    galaxy.model.Library.table.c.description==description,
+                                    galaxy.model.Library.table.c.synopsis==synopsis,
+                                    galaxy.model.Library.table.c.deleted==False ) ) \
+                     .first()
+def get_private_role( user ):
+    for role in user.all_roles():
+        if role.name == user.email and role.description == 'Private Role for %s' % user.email:
+            return role
+    raise AssertionError( "Private role not found for user '%s'" % user.email )
+def get_role_by_name( name ):
+    return sa_session.query( galaxy.model.Role ).filter( galaxy.model.Role.table.c.name==name ).first()
+def get_user( email ):
+    return sa_session.query( galaxy.model.User ) \
+                     .filter( galaxy.model.User.table.c.email==email ) \
+                     .first()
+def get_user_group_associations_by_group( group ):
+    return sa_session.query( galaxy.model.UserGroupAssociation ) \
+                     .filter( galaxy.model.UserGroupAssociation.table.c.group_id == group.id ) \
+                     .all()
+def get_user_role_associations_by_role( role ):
+    return sa_session.query( galaxy.model.UserRoleAssociation ) \
+                     .filter( galaxy.model.UserRoleAssociation.table.c.role_id == role.id ) \
+                     .all()
+def refresh( obj ):
+    sa_session.refresh( obj )
+def set_library_permissions( in_list ):
+    permissions_in = []
+    permissions_out = []
+    for k, v in galaxy.model.Library.permitted_actions.items():
+        if k in in_list:
+            permissions_in.append( k )
+        else:
+            permissions_out.append( k )
+    return permissions_in, permissions_out
diff -r e39c9a2a0b4c -r 48e83411aa91 test/base/twilltestcase.py
--- a/test/base/twilltestcase.py	Fri Mar 12 14:27:04 2010 -0500
+++ b/test/base/twilltestcase.py	Fri Mar 12 16:11:26 2010 -0500
@@ -535,21 +535,55 @@
         if check_str:
             self.check_page_for_string( check_str )
         self.home()
-    def edit_hda_attribute_info( self, hda_id, new_name='', new_info='', new_dbkey='', new_startcol='' ):
+    def edit_hda_attribute_info( self, hda_id, new_name='', new_info='', new_dbkey='', new_startcol='',
+                                 check_str1='', check_str2='', check_str3='', check_str4='',
+                                 not_displayed1='', not_displayed2='', not_displayed3='' ):
         """Edit history_dataset_association attribute information"""
         self.home()
         self.visit_url( "%s/root/edit?id=%s" % ( self.url, hda_id ) )
+        submit_required = False
         self.check_page_for_string( 'Edit Attributes' )
         if new_name:
             tc.fv( 'edit_attributes', 'name', new_name )
+            submit_required = True
         if new_info:
             tc.fv( 'edit_attributes', 'info', new_info )
+            submit_required = True
         if new_dbkey:
             tc.fv( 'edit_attributes', 'dbkey', new_dbkey )
+            submit_required = True
         if new_startcol:
             tc.fv( 'edit_attributes', 'startCol', new_startcol )
-        tc.submit( 'save' )
-        self.check_page_for_string( 'Attributes updated' )
+            submit_required = True
+        if submit_required:
+            tc.submit( 'save' )
+            self.check_page_for_string( 'Attributes updated' )
+        if check_str1:
+            self.check_page_for_string( check_str1 )
+        if check_str2:
+            self.check_page_for_string( check_str2 )
+        if check_str3:
+            self.check_page_for_string( check_str3 )
+        if check_str4:
+            self.check_page_for_string( check_str4 )
+        if not_displayed1:
+            try:
+                self.check_page_for_string( not_displayed1 )
+                raise AssertionError, "String (%s) incorrectly displayed on Edit Attributes page." % not_displayed
+            except:
+                pass
+        if not_displayed2:
+            try:
+                self.check_page_for_string( not_displayed2 )
+                raise AssertionError, "String (%s) incorrectly displayed on Edit Attributes page." % not_displayed
+            except:
+                pass
+        if not_displayed3:
+            try:
+                self.check_page_for_string( not_displayed3 )
+                raise AssertionError, "String (%s) incorrectly displayed on Edit Attributes page." % not_displayed
+            except:
+                pass
         self.home()
     def auto_detect_metadata( self, hda_id ):
         """Auto-detect history_dataset_association metadata"""
@@ -1164,12 +1198,10 @@
         check_str = "Purged 1 users"
         self.check_page_for_string( check_str )
         self.home()
-    def associate_roles_and_groups_with_user( self, user_id, email,
-                                              in_role_ids=[], out_role_ids=[],
-                                              in_group_ids=[], out_group_ids=[],
-                                              check_str='' ):
+    def manage_roles_and_groups_for_user( self, user_id, in_role_ids=[], out_role_ids=[],
+                                          in_group_ids=[], out_group_ids=[], check_str='' ):
         self.home()
-        url = "%s/admin/manage_roles_and_groups_for_user?id=%s&user_roles_groups_edit_button=Save" % ( self.url, user_id )
+        url = "%s/admin/manage_roles_and_groups_for_user?id=%s" % ( self.url, user_id )
         if in_role_ids:
             url += "&in_roles=%s" % ','.join( in_role_ids )
         if out_role_ids:
@@ -1178,12 +1210,18 @@
             url += "&in_groups=%s" % ','.join( in_group_ids )
         if out_group_ids:
             url += "&out_groups=%s" % ','.join( out_group_ids )
+        if in_role_ids or out_role_ids or in_group_ids or out_group_ids:
+            url += "&user_roles_groups_edit_button=Save"
         self.visit_url( url )
         if check_str:
             self.check_page_for_string( check_str )
         self.home()
 
     # Tests associated with roles
+    def browse_roles( self, check_str1='' ):
+        self.visit_url( '%s/admin/roles' % self.url )
+        if check_str1:
+            self.check_page_for_string( check_str1 )
     def create_role( self,
                      name='Role One',
                      description="This is Role One",
@@ -1280,6 +1318,10 @@
         self.visit_url( "%s/admin/groups" % self.url )
         self.check_page_for_string( name )
         self.home()
+    def browse_groups( self, check_str1='' ):
+        self.visit_url( '%s/admin/groups' % self.url )
+        if check_str1:
+            self.check_page_for_string( check_str1 )
     def rename_group( self, group_id, name='Group One Renamed' ):
         """Rename a group"""
         self.home()
@@ -1532,6 +1574,58 @@
         self.check_page_for_string( 'Address <b>%s</b> has been added' % address_dict[ 'short_desc' ] )
         
     # Library stuff
+    def add_library_template( self, cntrller, item_type, library_id, form_id, form_name, folder_id=None, ldda_id=None ):
+        """Add a new info template to a library item"""
+        self.home()
+        if item_type == 'library':
+            url = "%s/library_common/add_template?cntrller=%s&item_type=%s&library_id=%s" % \
+            ( self.url, cntrller, item_type, library_id )
+        elif item_type == 'folder':
+            url = "%s/library_common/add_template?cntrller=%s&item_type=%s&library_id=%s&folder_id=%s" % \
+            ( self.url, cntrller, item_type, library_id, folder_id )
+        elif item_type == 'ldda':
+            url = "%s/library_common/add_template?cntrller=%s&item_type=%s&library_id=%s&folder_id=%s&ldda_id=%s" % \
+            ( self.url, cntrller, item_type, library_id, folder_id, ldda_id )
+        self.visit_url( url )
+        self.check_page_for_string ( "Select a template for the" )
+        tc.fv( '1', 'form_id', form_id )
+        tc.fv( '1', 'inherit', '1' )
+        tc.submit( 'add_template_button' )
+        self.check_page_for_string = 'A template based on the form "%s" has been added to this' % form_name
+        self.home()
+    def browse_libraries_admin( self, deleted=False, check_str1='', check_str2='', not_displayed1='' ):
+        self.visit_url( '%s/library_admin/browse_libraries?sort=name&f-description=All&f-name=All&f-deleted=%s' % ( self.url, str( deleted ) ) )
+        if check_str1:
+            self.check_page_for_string( check_str1 )
+        if check_str2:
+            self.check_page_for_string( check_str2 )
+        if not_displayed1:
+            try:
+                self.check_page_for_string( not_displayed1 )
+                raise AssertionError, "String (%s) incorrectly displayed when browing library." % not_displayed1
+            except:
+                pass
+    def browse_libraries_regular_user( self, check_str1='', check_str2='' ):
+        self.visit_url( '%s/library/browse_libraries' % self.url )
+        if check_str1:
+            self.check_page_for_string( check_str1 )
+        if check_str2:
+            self.check_page_for_string( check_str2 )
+    def browse_library( self, cntrller, id, show_deleted=False,
+                        check_str1='', check_str2='', check_str3='', not_displayed='', not_displayed2='' ):
+        self.visit_url( '%s/library_common/browse_library?cntrller=%s&id=%s&show_deleted=%s' % ( self.url, cntrller, id, str( show_deleted ) ) )
+        if check_str1:
+            self.check_page_for_string( check_str1 )
+        if check_str2:
+            self.check_page_for_string( check_str2 )
+        if check_str3:
+            self.check_page_for_string( check_str3 )
+        if not_displayed:
+            try:
+                self.check_page_for_string( not_displayed )
+                raise AssertionError, "String (%s) incorrectly displayed when browing library." % not_displayed
+            except:
+                pass
     def create_library( self, name='Library One', description='This is Library One', synopsis='Synopsis for Library One' ):
         """Create a new library"""
         self.home()
@@ -1544,6 +1638,28 @@
         check_str = "The new library named '%s' has been created" % name
         self.check_page_for_string( check_str )
         self.home()
+    def library_info( self, cntrller, library_id, library_name, new_name='', new_description='', new_synopsis='',
+                           ele_1_field_name='', ele_1_contents='', ele_2_field_name='', ele_2_contents='', check_str1='' ):
+        """Edit information about a library, optionally using an existing template with up to 2 elements"""
+        self.home()
+        self.visit_url( "%s/library_common/library_info?cntrller=%s&id=%s" % ( self.url, cntrller, library_id ) )
+        if check_str1:
+            self.check_page_for_string( check_str1 )
+        if new_name and new_description and new_synopsis:
+            tc.fv( '1', 'name', new_name )
+            tc.fv( '1', 'description', new_description )
+            tc.fv( '1', 'synopsis', new_synopsis )
+            tc.submit( 'library_info_button' )
+            self.check_page_for_string( "The information has been updated." )
+        # If there is a template, then there are 2 forms on this page and the template is the 2nd form
+        if ele_1_field_name and ele_1_contents and ele_2_field_name and ele_2_contents:
+            tc.fv( '2', ele_1_field_name, ele_1_contents )
+            tc.fv( '2', ele_2_field_name, ele_2_contents )
+            tc.submit( 'edit_info_button' )
+        elif ele_1_field_name and ele_1_contents:
+            tc.fv( '2', ele_1_field_name, ele_1_contents )
+            tc.submit( 'edit_info_button' )
+        self.home()
     def library_permissions( self, library_id, library_name, role_ids_str, permissions_in, permissions_out, cntrller='library_admin' ):
         # role_ids_str must be a comma-separated string of role ids
         url = "library_common/library_permissions?id=%s&cntrller=%slibrary_admin&update_roles_button=Save" % ( library_id, cntrller )
@@ -1558,46 +1674,8 @@
         check_str = "Permissions updated for library '%s'" % library_name
         self.check_page_for_string( check_str )
         self.home()
-    def rename_library( self, library_id, old_name, name='Library One Renamed', description='This is Library One Re-described',
-                        synopsis='This is the new synopsis for Library One ', controller='library_admin' ):
-        """Rename a library"""
-        self.home()
-        self.visit_url( "%s/library_common/library_info?id=%s&cntrller=%s" % ( self.url, library_id, controller ) )
-        self.check_page_for_string( old_name )
-        # Since twill barfs on the form submisson, we ar forced to simulate it
-        url = "%s/library_common/library_info?id=%s&cntrller=%s&library_info_button=Save&description=%s&name=%s&synopsis=%s" % \
-        ( self.url, library_id, controller, description.replace( ' ', '+' ), name.replace( ' ', '+' ), synopsis.replace( ' ', '+' ) )
-        self.home()
-        self.visit_url( url )
-        check_str = "Library '%s' has been renamed to '%s'" % ( old_name, name )
-        self.check_page_for_string( check_str )
-        self.home()
-    def add_template( self, cntrller, item_type, library_id, form_id, form_name, folder_id=None, ldda_id=None ):
-        """Add a new info template to a library item"""
-        self.home()
-        if item_type == 'library':
-            url = "%s/library_common/add_template?cntrller=%s&item_type=%s&library_id=%s" % ( self.url, cntrller, item_type, library_id )
-        elif item_type == 'folder':
-            url = "%s/library_common/add_template?cntrller=%s&item_type=%s&library_id=%s&folder_id=%s" % ( self.url, cntrller, item_type, library_id, folder_id )
-        elif item_type == 'ldda':
-            url = "%s/library_common/add_template?cntrller=%s&item_type=%s&library_id=%s&folder_id=%s&ldda_id=%s" % ( self.url, cntrller, item_type, library_id, folder_id, ldda_id )
-        self.visit_url( url )
-        self.check_page_for_string ( "Select a template for the" )
-        tc.fv( '1', 'form_id', form_id )
-        tc.fv( '1', 'inherit', '1' )
-        tc.submit( 'add_template_button' )
-        self.check_page_for_string = 'A template based on the form "%s" has been added to this' % form_name
-        self.home()
-    def library_info( self, library_id, library_name, ele_1_field_name, ele_1_contents, ele_2_field_name, ele_2_contents, controller='library_admin' ):
-        """Add information to a library using an existing template with 2 elements"""
-        self.home()
-        self.visit_url( "%s/library_common/library_info?id=%s&cntrller=%s" % ( self.url, library_id, controller ) )
-        check_str = 'Other information about library %s' % library_name
-        self.check_page_for_string( check_str )
-        tc.fv( '2', ele_1_field_name, ele_1_contents )
-        tc.fv( '2', ele_2_field_name, ele_2_contents )
-        tc.submit( 'create_new_info_button' )
-        self.home()
+
+    # Library folder stuff
     def add_folder( self, controller, library_id, folder_id, name='Folder One', description='This is Folder One' ):
         """Create a new folder"""
         self.home()
@@ -1609,27 +1687,40 @@
         check_str = "The new folder named '%s' has been added to the data library." % name
         self.check_page_for_string( check_str )
         self.home()
-    def folder_info( self, controller, folder_id, library_id, name, new_name, description, contents='', field_name='' ):
+    def folder_info( self, cntrller, folder_id, library_id, name='', new_name='', description='',
+                     field_name='', contents='', check_str1='', check_str2='', not_displayed='' ):
         """Add information to a library using an existing template with 2 elements"""
         self.home()
         self.visit_url( "%s/library_common/folder_info?cntrller=%s&id=%s&library_id=%s" % \
-                        ( self.url, controller, folder_id, library_id) )
+                        ( self.url, cntrller, folder_id, library_id ) )
         # Twill cannot handle the following call for some reason - it's buggy
         # self.check_page_for_string( "Edit folder name and description" )
-        tc.fv( '1', "name", new_name )
-        tc.fv( '1', "description", description )
-        tc.submit( 'rename_folder_button' )
-        # Twill cannot handle the following call for some reason - it's buggy
-        # check_str = "Folder '%s' has been renamed to '%s'" % ( name, new_name )
-        # self.check_page_for_string( check_str )
-        if contents and field_name:
+        if name and new_name and description:
+            tc.fv( '1', "name", new_name )
+            tc.fv( '1', "description", description )
+            tc.submit( 'rename_folder_button' )
+            # Twill barfs on this, so keep it commented...
+            #self.check_page_for_string( "The information has been updated." )
+        if field_name and contents:
             # We have an information template associated with the folder, so
             # there are 2 forms on this page and the template is the 2nd form
             tc.fv( '2', field_name, contents )
             tc.submit( 'edit_info_button' )
-            # Twill cannot handle the following call for some reason - it's buggy
-            # self.check_page_for_string( 'The information has been updated.' )
+            # Twill barfs on this, so keep it commented...
+            #self.check_page_for_string( 'The information has been updated.' )
+        if check_str1:
+            self.check_page_for_string( check_str1 )
+        if check_str2:
+            self.check_page_for_string( check_str2 )
+        if not_displayed:
+            try:
+                self.check_page_for_string( not_displayed )
+                raise AssertionError, "String (%s) should not have been displayed on folder info page." % not_displayed
+            except:
+                pass
         self.home()
+
+    # Library dataset stuff
     def add_library_dataset( self, cntrller, filename, library_id, folder_id, folder_name,
                              file_type='auto', dbkey='hg18', roles=[], message='', root=False,
                              template_field_name1='', template_field_contents1='', show_deleted='False',
@@ -1638,7 +1729,7 @@
         filename = self.get_filename( filename )
         self.home()
         self.visit_url( "%s/library_common/upload_library_dataset?cntrller=%s&library_id=%s&folder_id=%s&upload_option=%s&message=%s" % \
-                        ( self.url, cntrller, library_id, folder_id, upload_option, message ) )
+                        ( self.url, cntrller, library_id, folder_id, upload_option, message.replace( ' ', '+' ) ) )
         self.check_page_for_string( 'Upload files' )
         tc.fv( "1", "library_id", library_id )
         tc.fv( "1", "folder_id", folder_id )
@@ -1659,68 +1750,77 @@
             check_str = "Added 1 datasets to the folder '%s' (each is selected)." % folder_name
         self.library_wait( library_id )
         self.home()
-    def set_library_dataset_permissions( self, cntrller, library_id, folder_id, ldda_id, ldda_name, role_ids_str, permissions_in, permissions_out ):
+    def ldda_permissions( self, cntrller, library_id, folder_id, id, role_ids_str,
+                          permissions_in=[], permissions_out=[], check_str1='' ):
         # role_ids_str must be a comma-separated string of role ids
-        url = "library_common/ldda_permissions?cntrller=%s&library_id=%s&folder_id=%s&id=%s&update_roles_button=Save" % \
-            ( cntrller, library_id, folder_id, ldda_id )
+        url = "%s/library_common/ldda_permissions?cntrller=%s&library_id=%s&folder_id=%s&id=%s" % \
+            ( self.url, cntrller, library_id, folder_id, id )
         for po in permissions_out:
             key = '%s_out' % po
             url ="%s&%s=%s" % ( url, key, role_ids_str )
         for pi in permissions_in:
             key = '%s_in' % pi
             url ="%s&%s=%s" % ( url, key, role_ids_str )
-        self.home()
-        self.visit_url( "%s/%s" % ( self.url, url ) )
-        check_str = "Permissions have been updated on 1 datasets"
+        if permissions_in or permissions_out:
+            url += "&update_roles_button=Save"
+            self.visit_url( url )
+        if check_str1:
+            check_str = check_str1
+        else:
+            check_str = "Permissions have been updated on 1 datasets"
         self.check_page_for_string( check_str )
         self.home()
-    def edit_ldda_template_element_info( self, library_id, folder_id, ldda_id, ldda_name, ele_1_field_name, 
-                        ele_1_contents, ele_2_field_name, ele_2_contents, ele_1_help='', ele_2_help='',
-                        ele_3_field_name='', ele_3_contents='', ele_3_help='' ):
-        """Edit library_dataset_dataset_association template element information"""
-        self.home()
-        self.visit_url( "%s/library_common/ldda_edit_info?cntrller=library_admin&library_id=%s&folder_id=%s&id=%s" % \
-                        ( self.url, library_id, folder_id, ldda_id ) )        
+    def ldda_edit_info( self, cntrller, library_id, folder_id, ldda_id, ldda_name, new_ldda_name='',
+                        ele_1_field_name='', ele_1_contents='', ele_1_help='',
+                        ele_2_field_name='', ele_2_contents='', ele_2_help='',
+                        ele_3_field_name='', ele_3_contents='', ele_3_help='',
+                        check_str1='', check_str2='', check_str3='', not_displayed='' ):
+        """Edit library_dataset_dataset_association information, optionally template element information"""
+        self.visit_url( "%s/library_common/ldda_edit_info?cntrller=%s&library_id=%s&folder_id=%s&id=%s" % \
+                        ( self.url, cntrller, library_id, folder_id, ldda_id ) )        
         check_str = 'Edit attributes of %s' % ldda_name
         self.check_page_for_string( check_str )
-        ele_1_contents = ele_1_contents.replace( '+', ' ' )
-        ele_2_contents = ele_2_contents.replace( '+', ' ' )
-        tc.fv( '4', ele_1_field_name, ele_1_contents )
-        tc.fv( '4', ele_2_field_name, ele_2_contents.replace( '+', ' ' ) )
+        if new_ldda_name:
+            tc.fv( '1', 'name', new_ldda_name )
+            tc.submit( 'save' )
+            check_str = 'Attributes updated for library dataset %s' % new_ldda_name
+            self.check_page_for_string( check_str )
+        # There are 4 forms on this page and the template is the 4th form
+        if ele_1_field_name and ele_1_contents:
+            ele_1_contents = ele_1_contents.replace( '+', ' ' )
+            tc.fv( '4', ele_1_field_name, ele_1_contents )
+        if ele_2_field_name and ele_2_contents:
+            ele_2_contents = ele_2_contents.replace( '+', ' ' )
+            tc.fv( '4', ele_2_field_name, ele_2_contents.replace( '+', ' ' ) )
         if ele_3_field_name and ele_3_contents:
             ele_3_contents = ele_3_contents.replace( '+', ' ' )
             tc.fv( '4', ele_3_field_name, ele_3_contents )
-        tc.submit( 'edit_info_button' )
-        self.check_page_for_string( 'This is the latest version of this library dataset' )
-        self.check_page_for_string( 'The information has been updated.' )
-        self.check_page_for_string( ele_1_contents )
-        self.check_page_for_string( ele_2_contents )
-        if ele_3_field_name and ele_3_contents:
+        if ele_1_field_name:
+            tc.submit( 'edit_info_button' )
+            self.check_page_for_string( 'This is the latest version of this library dataset' )
+            self.check_page_for_string( 'The information has been updated.' )
+            self.check_page_for_string( ele_1_contents )
+        if ele_2_field_name:
+            self.check_page_for_string( ele_2_contents )
+        if ele_3_field_name:
             self.check_page_for_string( ele_3_contents )
         if ele_1_help:
             check_str = ele_1_help.replace( '+', ' ' )
             self.check_page_for_string( check_str )
-        self.check_page_for_string( ele_2_contents )
         if ele_2_help:
             check_str = ele_2_help.replace( '+', ' ' )
             self.check_page_for_string( check_str )
         if ele_2_help:
             check_str = ele_3_help.replace( '+', ' ' )
             self.check_page_for_string( check_str )
-        self.home()
-    def edit_ldda_attribute_info( self, cntrller, library_id, folder_id, ldda_id, ldda_name, new_ldda_name ):
-        """Edit library_dataset_dataset_association attribute information"""
-        self.home()
-        self.visit_url( "%s/library_common/ldda_edit_info?cntrller=%s&library_id=%s&folder_id=%s&id=%s" % \
-                        ( self.url, cntrller, library_id, folder_id, ldda_id ) )
-        check_str = 'Edit attributes of %s' % ldda_name
-        self.check_page_for_string( check_str )
-        tc.fv( '1', 'name', new_ldda_name )
-        tc.submit( 'save' )
-        check_str = 'Attributes updated for library dataset %s' % new_ldda_name
-        self.check_page_for_string( check_str )
-        check_str = 'Edit attributes of %s' % new_ldda_name
-        self.check_page_for_string( check_str )
+        if check_str1:
+            self.check_page_for_string( check_str1 )
+        if not_displayed:
+            try:
+                self.check_page_for_string( not_displayed )
+                raise AssertionError, "String (%s) should not have been displayed on ldda Edit Attributes page." % not_displayed
+            except:
+                pass
         self.home()
     def upload_new_dataset_version( self, cntrller, filename, library_id, folder_id, folder_name, library_dataset_id, ldda_name, file_type='auto',
                                     dbkey='hg18', message='', template_field_name1='', template_field_contents1='' ):
@@ -1755,19 +1855,21 @@
             check_str = "Added 1 datasets to the folder '%s' (each is selected)." % folder_name
         self.check_page_for_string( check_str )
         self.home()
-    def add_dir_of_files_from_admin_view( self, library_id, folder_id, file_type='auto', dbkey='hg18', roles_tuple=[],
-                                          message='', check_str_after_submit='', template_field_name1='', template_field_contents1='' ):
+    def upload_directory_of_files( self, cntrller, library_id, folder_id, server_dir, file_type='auto', dbkey='hg18', roles_tuple=[],
+                                   message='', check_str1='', check_str_after_submit='', template_field_name1='', template_field_contents1='' ):
         """Add a directory of datasets to a folder"""
         # roles is a list of tuples: [ ( role_id, role_description ) ]
-        self.home()
-        self.visit_url( "%s/library_common/upload_library_dataset?cntrller=library_admin&upload_option=upload_directory&library_id=%s&folder_id=%s" % \
-            ( self.url, library_id, folder_id ) )
+        url = "%s/library_common/upload_library_dataset?cntrller=%s&library_id=%s&folder_id=%s&upload_option=upload_directory" % \
+            ( self.url, cntrller, library_id, folder_id )
+        self.visit_url( url )
         self.check_page_for_string( 'Upload a directory of files' )
+        if check_str1:
+            self.check_page_for_strin( check_str1 )
         tc.fv( "1", "folder_id", folder_id )
         tc.fv( "1", "file_type", file_type )
         tc.fv( "1", "dbkey", dbkey )
-        tc.fv( "1", "message", message.replace( '+', ' ' ) )
-        tc.fv( "1", "server_dir", "library" )
+        tc.fv( "1", "message", message )
+        tc.fv( "1", "server_dir", server_dir )
         for role_tuple in roles_tuple:
             tc.fv( "1", "roles", role_tuple[1] ) # role_tuple[1] is the role name
         # Add template field contents, if any...
@@ -1778,29 +1880,13 @@
             self.check_page_for_string( check_str_after_submit )
         self.library_wait( library_id )
         self.home()
-    def add_dir_of_files_from_libraries_view( self, library_id, folder_id, selected_dir, file_type='auto', dbkey='hg18', roles_tuple=[],
-                                              message='', check_str_after_submit='', template_field_name1='', template_field_contents1='' ):
-        """Add a directory of datasets to a folder"""
-        # roles is a list of tuples: [ ( role_id, role_description ) ]
-        self.home()
-        self.visit_url( "%s/library_common/upload_library_dataset?cntrller=library&upload_option=upload_directory&library_id=%s&folder_id=%s" % \
-            ( self.url, library_id, folder_id ) )
-        self.check_page_for_string( 'Upload a directory of files' )
-        tc.fv( "1", "folder_id", folder_id )
-        tc.fv( "1", "file_type", file_type )
-        tc.fv( "1", "dbkey", dbkey )
-        tc.fv( "1", "message", message.replace( '+', ' ' ) )
-        tc.fv( "1", "server_dir", selected_dir )
-        for role_tuple in roles_tuple:
-            tc.fv( "1", "roles", role_tuple[1] ) # role_tuple[1] is the role name
-        # Add template field contents, if any...
-        if template_field_name1:
-            tc.fv( "1", template_field_name1, template_field_contents1 )
-        tc.submit( "runtool_btn" )
-        if check_str_after_submit:
-            self.check_page_for_string( check_str_after_submit )
-        self.library_wait( library_id, cntrller='library' )
-        self.home()
+    def act_on_multiple_datasets( self, cntrller, library_id, do_action, ldda_ids='', check_str1='' ):
+        # Can't use the ~/library_admin/libraries form as twill barfs on it so we'll simulate the form submission
+        # by going directly to the form action
+        self.visit_url( '%s/library_common/act_on_multiple_datasets?cntrller=%s&library_id=%s&ldda_ids=%s&do_action=%s' \
+                        % ( self.url, cntrller, library_id, ldda_ids, do_action ) )
+        if check_str1:
+            self.check_page_for_string( check_str1 )
     def download_archive_of_library_files( self, cntrller, library_id, ldda_ids, format ):
         self.home()
         # Here it would be ideal to have twill set form values and submit the form, but
diff -r e39c9a2a0b4c -r 48e83411aa91 test/functional/test_admin_features.py
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/test/functional/test_admin_features.py	Fri Mar 12 16:11:26 2010 -0500
@@ -0,0 +1,422 @@
+from base.twilltestcase import *
+from base.test_db_util import *
+
+class TestDataSecurity( TwillTestCase ):
+    def test_000_initiate_users( self ):
+        """Ensuring all required user accounts exist"""
+        self.logout()
+        self.login( email='test1(a)bx.psu.edu' )
+        global regular_user1
+        regular_user1 = get_user( 'test1(a)bx.psu.edu' )
+        assert regular_user1 is not None, 'Problem retrieving user with email "test1(a)bx.psu.edu" from the database'
+        self.logout()
+        self.login( email='test2(a)bx.psu.edu' )
+        global regular_user2
+        regular_user2 = get_user( 'test2(a)bx.psu.edu' )
+        assert regular_user2 is not None, 'Problem retrieving user with email "test2(a)bx.psu.edu" from the database'
+        self.logout()
+        self.login( email='test(a)bx.psu.edu' )
+        global admin_user
+        admin_user = get_user( 'test(a)bx.psu.edu' )
+        assert admin_user is not None, 'Problem retrieving user with email "test(a)bx.psu.edu" from the database'
+    def test_005_create_new_user_account_as_admin( self ):
+        """Testing creating a new user account as admin"""
+        # Logged in as admin_user
+        email = 'test3(a)bx.psu.edu'
+        password = 'testuser'
+        previously_created = self.create_new_account_as_admin( email=email, password=password )
+        # Get the user object for later tests
+        global regular_user3
+        regular_user3 = get_user( email )
+        assert regular_user3 is not None, 'Problem retrieving user with email "%s" from the database' % email
+        global regular_user3_private_role
+        regular_user3_private_role = get_private_role( regular_user3 )
+        # Make sure DefaultUserPermissions were created
+        if not regular_user3.default_permissions:
+            raise AssertionError( 'No DefaultUserPermissions were created for user %s when the admin created the account' % email )
+        # Make sure a private role was created for the user
+        if not regular_user3.roles:
+            raise AssertionError( 'No UserRoleAssociations were created for user %s when the admin created the account' % email )
+        if not previously_created and len( regular_user3.roles ) != 1:
+            raise AssertionError( '%d UserRoleAssociations were created for user %s when the admin created the account ( should have been 1 )' \
+                                  % ( len( regular_user3.roles ), regular_user3.email ) )
+        for ura in regular_user3.roles:
+            role = sa_session.query( galaxy.model.Role ).get( ura.role_id )
+            if not previously_created and role.type != 'private':
+                raise AssertionError( 'Role created for user %s when the admin created the account is not private, type is' \
+                                      % str( role.type ) )
+        if not previously_created:
+            # Make sure a history was not created ( previous test runs may have left deleted histories )
+            histories = get_all_histories_for_user( regular_user3 )
+            if histories:
+                raise AssertionError( 'Histories were incorrectly created for user %s when the admin created the account' % email )
+            # Make sure the user was not associated with any groups
+            if regular_user3.groups:
+                raise AssertionError( 'Groups were incorrectly associated with user %s when the admin created the account' % email )
+    def test_010_reset_password_as_admin( self ):
+        """Testing reseting a user password as admin"""
+        self.reset_password_as_admin( user_id=self.security.encode_id( regular_user3.id ), password='testreset' )
+    def test_015_login_after_password_reset( self ):
+        """Testing logging in after an admin reset a password - tests DefaultHistoryPermissions for accounts created by an admin"""
+        # logged in as admin_user
+        self.logout()
+        self.login( email=regular_user3.email, password='testreset' )
+        # Make sure a History and HistoryDefaultPermissions exist for the user
+        latest_history = get_latest_history_for_user( regular_user3 )
+        if not latest_history.user_id == regular_user3.id:
+            raise AssertionError( 'A history was not created for user %s when he logged in' % email )
+        if not latest_history.default_permissions:
+            raise AssertionError( 'No DefaultHistoryPermissions were created for history id %d when it was created' % latest_history.id )
+        dhps = get_default_history_permissions_by_history( latest_history )
+        if len( dhps ) > 1:
+            raise AssertionError( 'More than 1 DefaultHistoryPermissions were created for history id %d when it was created' % latest_history.id )
+        dhp = dhps[0]
+        if not dhp.action == galaxy.model.Dataset.permitted_actions.DATASET_MANAGE_PERMISSIONS.action:
+            raise AssertionError( 'The DefaultHistoryPermission.action for history id %d is "%s", but it should be "manage permissions"' \
+                                  % ( latest_history.id, dhp.action ) )
+        # Upload a file to create a HistoryDatasetAssociation
+        self.upload_file( '1.bed' )
+        latest_dataset = get_latest_dataset()
+        for dp in latest_dataset.actions:
+            # Should only have 1 DatasetPermissions
+            if dp.action != galaxy.model.Dataset.permitted_actions.DATASET_MANAGE_PERMISSIONS.action:
+                raise AssertionError( 'The DatasetPermissions for dataset id %d is %s ( should have been %s )' \
+                                      % ( latest_dataset.id,
+                                          latest_dataset.actions.action, 
+                                          galaxy.model.Dataset.permitted_actions.DATASET_MANAGE_PERMISSIONS.action ) )
+        self.logout()
+        # Reset the password to the default for later tests
+        self.login( email='test(a)bx.psu.edu' )
+        self.reset_password_as_admin( user_id=self.security.encode_id( regular_user3.id ), password='testuser' )
+    def test_020_mark_user_deleted( self ):
+        """Testing marking a user account as deleted"""
+        # Logged in as admin_user
+        self.mark_user_deleted( user_id=self.security.encode_id( regular_user3.id ), email=regular_user3.email )
+        if not regular_user3.active_histories:
+            raise AssertionError( 'HistoryDatasetAssociations for regular_user3 were incorrectly deleted when the user was marked deleted' )
+    def test_025_undelete_user( self ):
+        """Testing undeleting a user account"""
+        # Logged in as admin_user
+        self.undelete_user( user_id=self.security.encode_id( regular_user3.id ), email=regular_user3.email )
+    def test_030_create_role( self ):
+        """Testing creating new role with 3 members ( and a new group named the same ), then renaming the role"""
+        # Logged in as admin_user
+        name = 'Role One'
+        description = "This is Role Ones description"
+        user_ids=[ str( admin_user.id ), str( regular_user1.id ), str( regular_user3.id ) ]
+        self.create_role( name=name,
+                          description=description,
+                          in_user_ids=user_ids,
+                          in_group_ids=[],
+                          create_group_for_role='yes',
+                          private_role=admin_user.email )
+        # Get the role object for later tests
+        global role_one
+        role_one = sa_session.query( galaxy.model.Role ).filter( galaxy.model.Role.table.c.name==name ).first()
+        assert role_one is not None, 'Problem retrieving role named "Role One" from the database'
+        # Make sure UserRoleAssociations are correct
+        if len( role_one.users ) != len( user_ids ):
+            raise AssertionError( '%d UserRoleAssociations were created for role id %d when it was created ( should have been %d )' \
+                                  % ( len( role_one.users ), role_one.id, len( user_ids ) ) )
+        # Each of the following users should now have 2 role associations, their private role and role_one
+        for user in [ admin_user, regular_user1, regular_user3 ]:
+            refresh( user )
+            if len( user.roles ) != 2:
+                raise AssertionError( '%d UserRoleAssociations are associated with user %s ( should be 2 )' \
+                                      % ( len( user.roles ), user.email ) )
+        # Make sure the group was created
+        self.visit_url( '%s/admin/groups' % self.url )
+        self.check_page_for_string( name )
+        global group_zero
+        group_zero = get_group_by_name( name )
+        # Rename the role
+        rename = "Role One's been Renamed"
+        new_description="This is Role One's Re-described"
+        self.rename_role( self.security.encode_id( role_one.id ), name=rename, description=new_description )
+        self.visit_url( '%s/admin/roles' % self.url )
+        self.check_page_for_string( rename )
+        self.check_page_for_string( new_description )
+        # Reset the role back to the original name and description
+        self.rename_role( self.security.encode_id( role_one.id ), name=name, description=description )
+    def test_035_create_group( self ):
+        """Testing creating new group with 3 members and 1 associated role, then renaming it"""
+        # Logged in as admin_user
+        name = "Group One's Name"
+        user_ids=[ str( admin_user.id ), str( regular_user1.id ), str( regular_user3.id ) ]
+        role_ids=[ str( role_one.id ) ]
+        self.create_group( name=name, in_user_ids=user_ids, in_role_ids=role_ids )
+        # Get the group object for later tests
+        global group_one
+        group_one = get_group_by_name( name )
+        assert group_one is not None, 'Problem retrieving group named "Group One" from the database'
+        # Make sure UserGroupAssociations are correct
+        if len( group_one.users ) != len( user_ids ):
+            raise AssertionError( '%d UserGroupAssociations were created for group id %d when it was created ( should have been %d )' \
+                                  % ( len( group_one.users ), group_one.id, len( user_ids ) ) )
+        # Each user should now have 1 group association, group_one
+        for user in [ admin_user, regular_user1, regular_user3 ]:
+            refresh( user )
+            if len( user.groups ) != 1:
+                raise AssertionError( '%d UserGroupAssociations are associated with user %s ( should be 1 )' % ( len( user.groups ), user.email ) )
+        # Make sure GroupRoleAssociations are correct
+        if len( group_one.roles ) != len( role_ids ):
+            raise AssertionError( '%d GroupRoleAssociations were created for group id %d when it was created ( should have been %d )' \
+                                  % ( len( group_one.roles ), group_one.id, len( role_ids ) ) )
+        # Rename the group
+        rename = "Group One's been Renamed"
+        self.rename_group( self.security.encode_id( group_one.id ), name=rename, )
+        self.home()
+        self.visit_url( '%s/admin/groups' % self.url )
+        self.check_page_for_string( rename )
+        # Reset the group back to the original name
+        self.rename_group( self.security.encode_id( group_one.id ), name=name )
+    def test_040_add_members_and_role_to_group( self ):
+        """Testing editing user membership and role associations of an existing group"""
+        # Logged in as admin_user
+        name = 'Group Two'
+        self.create_group( name=name, in_user_ids=[], in_role_ids=[] )
+        # Get the group object for later tests
+        global group_two
+        group_two = get_group_by_name( name )
+        assert group_two is not None, 'Problem retrieving group named "Group Two" from the database'
+        # group_two should have no associations
+        if group_two.users:
+            raise AssertionError( '%d UserGroupAssociations were created for group id %d when it was created ( should have been 0 )' \
+                              % ( len( group_two.users ), group_two.id ) )
+        if group_two.roles:
+            raise AssertionError( '%d GroupRoleAssociations were created for group id %d when it was created ( should have been 0 )' \
+                              % ( len( group_two.roles ), group_two.id ) )
+        user_ids = [ str( regular_user1.id )  ]
+        role_ids = [ str( role_one.id ) ]
+        self.associate_users_and_roles_with_group( self.security.encode_id( group_two.id ),
+                                                   group_two.name,
+                                                   user_ids=user_ids,
+                                                   role_ids=role_ids )
+    def test_045_create_role_with_user_and_group_associations( self ):
+        """Testing creating a role with user and group associations"""
+        # Logged in as admin_user
+        # NOTE: To get this to work with twill, all select lists on the ~/admin/role page must contain at least
+        # 1 option value or twill throws an exception, which is: ParseError: OPTION outside of SELECT
+        # Due to this bug in twill, we create the role, we bypass the page and visit the URL in the
+        # associate_users_and_groups_with_role() method.
+        name = 'Role Two'
+        description = 'This is Role Two'
+        user_ids=[ str( admin_user.id ) ]
+        group_ids=[ str( group_two.id ) ]
+        private_role=admin_user.email
+        # Create the role
+        self.create_role( name=name,
+                          description=description,
+                          in_user_ids=user_ids,
+                          in_group_ids=group_ids,
+                          private_role=private_role )
+        # Get the role object for later tests
+        global role_two
+        role_two = get_role_by_name( name )
+        assert role_two is not None, 'Problem retrieving role named "Role Two" from the database'
+        # Make sure UserRoleAssociations are correct
+        if len( role_two.users ) != len( user_ids ):
+            raise AssertionError( '%d UserRoleAssociations were created for role id %d when it was created with %d members' \
+                                  % ( len( role_two.users ), role_two.id, len( user_ids ) ) )
+        # admin_user should now have 3 role associations, private role, role_one, role_two
+        refresh( admin_user )
+        if len( admin_user.roles ) != 3:
+            raise AssertionError( '%d UserRoleAssociations are associated with user %s ( should be 3 )' % ( len( admin_user.roles ), admin_user.email ) )
+        # Make sure GroupRoleAssociations are correct
+        refresh( role_two )
+        if len( role_two.groups ) != len( group_ids ):
+            raise AssertionError( '%d GroupRoleAssociations were created for role id %d when it was created ( should have been %d )' \
+                                  % ( len( role_two.groups ), role_two.id, len( group_ids ) ) )
+        # group_two should now be associated with 2 roles: role_one, role_two
+        refresh( group_two )
+        if len( group_two.roles ) != 2:
+            raise AssertionError( '%d GroupRoleAssociations are associated with group id %d ( should be 2 )' % ( len( group_two.roles ), group_two.id ) )
+    def test_050_change_user_role_associations( self ):
+        """Testing changing roles associated with a user"""
+        # Logged in as admin_user
+        # Create a new role with no associations
+        name = 'Role Three'
+        description = 'This is Role Three'
+        user_ids=[]
+        group_ids=[]
+        private_role=admin_user.email
+        self.create_role( name=name,
+                          description=description,
+                          in_user_ids=user_ids,
+                          in_group_ids=group_ids,
+                          private_role=private_role )
+        # Get the role object for later tests
+        global role_three
+        role_three = get_role_by_name( name )
+        assert role_three is not None, 'Problem retrieving role named "Role Three" from the database'
+        # Associate the role with a user
+        refresh( admin_user )
+        role_ids = []
+        for ura in admin_user.non_private_roles:
+            role_ids.append( str( ura.role_id ) )
+        role_ids.append( str( role_three.id ) )
+        group_ids = []
+        for uga in admin_user.groups:
+            group_ids.append( str( uga.group_id ) )
+        check_str = "User '%s' has been updated with %d associated roles and %d associated groups" % \
+        ( admin_user.email, len( role_ids ), len( group_ids ) )
+        self.manage_roles_and_groups_for_user( self.security.encode_id( admin_user.id ),
+                                               in_role_ids=role_ids,
+                                               in_group_ids=group_ids,
+                                               check_str=check_str )
+        refresh( admin_user )
+        # admin_user should now be associated with 4 roles: private, role_one, role_two, role_three
+        if len( admin_user.roles ) != 4:
+            raise AssertionError( '%d UserRoleAssociations are associated with %s ( should be 4 )' % \
+                                  ( len( admin_user.roles ), admin_user.email ) )
+    def test_055_mark_group_deleted( self ):
+        """Testing marking a group as deleted"""
+        # Logged in as admin_user
+        self.browse_groups( check_str1=group_two.name )
+        self.mark_group_deleted( self.security.encode_id( group_two.id ), group_two.name )
+        refresh( group_two )
+        if not group_two.deleted:
+            raise AssertionError( '%s was not correctly marked as deleted.' % group_two.name )
+        # Deleting a group should not delete any associations
+        if not group_two.members:
+            raise AssertionError( '%s incorrectly lost all members when it was marked as deleted.' % group_two.name )
+        if not group_two.roles:
+            raise AssertionError( '%s incorrectly lost all role associations when it was marked as deleted.' % group_two.name )
+    def test_060_undelete_group( self ):
+        """Testing undeleting a deleted group"""
+        # Logged in as admin_user
+        self.undelete_group( self.security.encode_id( group_two.id ), group_two.name )
+        refresh( group_two )
+        if group_two.deleted:
+            raise AssertionError( '%s was not correctly marked as not deleted.' % group_two.name )
+    def test_065_mark_role_deleted( self ):
+        """Testing marking a role as deleted"""
+        # Logged in as admin_user
+        self.home()
+        self.browse_roles( check_str1=role_two.name )
+        self.mark_role_deleted( self.security.encode_id( role_two.id ), role_two.name )
+        refresh( role_two )
+        if not role_two.deleted:
+            raise AssertionError( '%s was not correctly marked as deleted.' % role_two.name )
+        # Deleting a role should not delete any associations
+        if not role_two.users:
+            raise AssertionError( '%s incorrectly lost all user associations when it was marked as deleted.' % role_two.name )
+        if not role_two.groups:
+            raise AssertionError( '%s incorrectly lost all group associations when it was marked as deleted.' % role_two.name )
+    def test_070_undelete_role( self ):
+        """Testing undeleting a deleted role"""
+        # Logged in as admin_user
+        self.undelete_role( self.security.encode_id( role_two.id ), role_two.name )
+    def test_075_purge_user( self ):
+        """Testing purging a user account"""
+        # Logged in as admin_user
+        self.mark_user_deleted( user_id=self.security.encode_id( regular_user3.id ), email=regular_user3.email )
+        refresh( regular_user3 )
+        self.purge_user( self.security.encode_id( regular_user3.id ), regular_user3.email )
+        refresh( regular_user3 )
+        if not regular_user3.purged:
+            raise AssertionError( 'User %s was not marked as purged.' % regular_user3.email )
+        # Make sure DefaultUserPermissions deleted EXCEPT FOR THE PRIVATE ROLE
+        if len( regular_user3.default_permissions ) != 1:
+            raise AssertionError( 'DefaultUserPermissions for user %s were not deleted.' % regular_user3.email )
+        for dup in regular_user3.default_permissions:
+            role = sa_session.query( galaxy.model.Role ).get( dup.role_id )
+            if role.type != 'private':
+                raise AssertionError( 'DefaultUserPermissions for user %s are not related with the private role.' % regular_user3.email )
+        # Make sure History deleted
+        for history in regular_user3.histories:
+            refresh( history )
+            if not history.deleted:
+                raise AssertionError( 'User %s has active history id %d after their account was marked as purged.' % ( regular_user3.email, hda.id ) )
+            # NOTE: Not all hdas / datasets will be deleted at the time a history is deleted - the cleanup_datasets.py script
+            # is responsible for this.
+        # Make sure UserGroupAssociations deleted
+        if regular_user3.groups:
+            raise AssertionError( 'User %s has active group id %d after their account was marked as purged.' % ( regular_user3.email, uga.id ) )
+        # Make sure UserRoleAssociations deleted EXCEPT FOR THE PRIVATE ROLE
+        if len( regular_user3.roles ) != 1:
+            raise AssertionError( 'UserRoleAssociations for user %s were not deleted.' % regular_user3.email )
+        for ura in regular_user3.roles:
+            role = sa_session.query( galaxy.model.Role ).get( ura.role_id )
+            if role.type != 'private':
+                raise AssertionError( 'UserRoleAssociations for user %s are not related with the private role.' % regular_user3.email )
+    def test_080_manually_unpurge_user( self ):
+        """Testing manually un-purging a user account"""
+        # Logged in as admin_user
+        # Reset the user for later test runs.  The user's private Role and DefaultUserPermissions for that role
+        # should have been preserved, so all we need to do is reset purged and deleted.
+        # TODO: If we decide to implement the GUI feature for un-purging a user, replace this with a method call
+        regular_user3.purged = False
+        regular_user3.deleted = False
+        flush( regular_user3 )
+    def test_085_purge_group( self ):
+        """Testing purging a group"""
+        # Logged in as admin_user
+        self.mark_group_deleted( self.security.encode_id( group_two.id ), group_two.name )
+        self.purge_group( self.security.encode_id( group_two.id ), group_two.name )
+        # Make sure there are no UserGroupAssociations
+        if get_user_group_associations_by_group( group_two ):
+            raise AssertionError( "Purging the group did not delete the UserGroupAssociations for group_id '%s'" % group_two.id )
+        # Make sure there are no GroupRoleAssociations
+        if get_group_role_associations_by_group( group_two ):
+            raise AssertionError( "Purging the group did not delete the GroupRoleAssociations for group_id '%s'" % group_two.id )
+        # Undelete the group for later test runs
+        self.undelete_group( self.security.encode_id( group_two.id ), group_two.name )
+    def test_090_purge_role( self ):
+        """Testing purging a role"""
+        # Logged in as admin_user
+        self.mark_role_deleted( self.security.encode_id( role_two.id ), role_two.name )
+        self.purge_role( self.security.encode_id( role_two.id ), role_two.name )
+        # Make sure there are no UserRoleAssociations
+        if get_user_role_associations_by_role( role_two ):
+            raise AssertionError( "Purging the role did not delete the UserRoleAssociations for role_id '%s'" % role_two.id )
+        # Make sure there are no DefaultUserPermissions associated with the Role
+        if get_default_user_permissions_by_role( role_two ):
+            raise AssertionError( "Purging the role did not delete the DefaultUserPermissions for role_id '%s'" % role_two.id )
+        # Make sure there are no DefaultHistoryPermissions associated with the Role
+        if get_default_history_permissions_by_role( role_two ):
+            raise AssertionError( "Purging the role did not delete the DefaultHistoryPermissions for role_id '%s'" % role_two.id )
+        # Make sure there are no GroupRoleAssociations
+        if get_group_role_associations_by_role( role_two ):
+            raise AssertionError( "Purging the role did not delete the GroupRoleAssociations for role_id '%s'" % role_two.id )
+        # Make sure there are no DatasetPermissionss
+        if get_dataset_permissions_by_role( role_two ):
+            raise AssertionError( "Purging the role did not delete the DatasetPermissionss for role_id '%s'" % role_two.id )
+    def test_095_manually_unpurge_role( self ):
+        """Testing manually un-purging a role"""
+        # Logged in as admin_user
+        # Manually unpurge, then undelete the role for later test runs
+        # TODO: If we decide to implement the GUI feature for un-purging a role, replace this with a method call
+        role_two.purged = False
+        flush( role_two )
+        self.undelete_role( self.security.encode_id( role_two.id ), role_two.name )
+    def test_999_reset_data_for_later_test_runs( self ):
+        """Reseting data to enable later test runs to pass"""
+        # Logged in as admin_user
+        ##################
+        # Eliminate all non-private roles
+        ##################
+        for role in [ role_one, role_two, role_three ]:
+            self.mark_role_deleted( self.security.encode_id( role.id ), role.name )
+            self.purge_role( self.security.encode_id( role.id ), role.name )
+            # Manually delete the role from the database
+            refresh( role )
+            sa_session.delete( role )
+            sa_session.flush()
+        ##################
+        # Eliminate all groups
+        ##################
+        for group in [ group_zero, group_one, group_two ]:
+            self.mark_group_deleted( self.security.encode_id( group.id ), group.name )
+            self.purge_group( self.security.encode_id( group.id ), group.name )
+            # Manually delete the group from the database
+            refresh( group )
+            sa_session.delete( group )
+            sa_session.flush()
+        ##################
+        # Make sure all users are associated only with their private roles
+        ##################
+        for user in [ admin_user, regular_user1, regular_user2, regular_user3 ]:
+            refresh( user )
+            if len( user.roles) != 1:
+                raise AssertionError( '%d UserRoleAssociations are associated with %s ( should be 1 )' % ( len( user.roles ), user.email ) )
diff -r e39c9a2a0b4c -r 48e83411aa91 test/functional/test_data_security.py
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/test/functional/test_data_security.py	Fri Mar 12 16:11:26 2010 -0500
@@ -0,0 +1,196 @@
+from base.twilltestcase import *
+from base.test_db_util import *
+
+class TestDataSecurity( TwillTestCase ):
+    def test_000_initiate_users( self ):
+        """Ensuring all required user accounts exist"""
+        self.logout()
+        self.login( email='test1(a)bx.psu.edu' )
+        global regular_user1
+        regular_user1 = get_user( 'test1(a)bx.psu.edu' )
+        assert regular_user1 is not None, 'Problem retrieving user with email "test1(a)bx.psu.edu" from the database'
+        global regular_user1_private_role
+        regular_user1_private_role = get_private_role( regular_user1 )
+        self.logout()
+        self.login( email='test2(a)bx.psu.edu' )
+        global regular_user2
+        regular_user2 = get_user( 'test2(a)bx.psu.edu' )
+        assert regular_user2 is not None, 'Problem retrieving user with email "test2(a)bx.psu.edu" from the database'
+        global regular_user2_private_role
+        regular_user2_private_role = get_private_role( regular_user2 )
+        self.logout()
+        self.login( email='test3(a)bx.psu.edu' )
+        global regular_user3
+        regular_user3 = get_user( 'test3(a)bx.psu.edu' )
+        assert regular_user3 is not None, 'Problem retrieving user with email "test3(a)bx.psu.edu" from the database'
+        global regular_user3_private_role
+        regular_user3_private_role = get_private_role( regular_user3 )
+        self.logout()
+        self.login( email='test(a)bx.psu.edu' )
+        global admin_user
+        admin_user = get_user( 'test(a)bx.psu.edu' )
+        assert admin_user is not None, 'Problem retrieving user with email "test(a)bx.psu.edu" from the database'
+        global admin_user_private_role
+        admin_user_private_role = get_private_role( admin_user )
+    def test_005_default_permissions( self ):
+        """Testing initial settings for DefaultUserPermissions and DefaultHistoryPermissions"""
+        # Logged in as admin_user
+        # Make sure DefaultUserPermissions are correct
+        dups = get_default_user_permissions_by_user( admin_user )
+        if len( dups ) > 1:
+            raise AssertionError( '%d DefaultUserPermissions associated with user %s ( should be 1 )' \
+                                  % ( len( admin_user.default_permissions ), admin_user.email ) )
+        dup = dups[0]
+        if not dup.action == galaxy.model.Dataset.permitted_actions.DATASET_MANAGE_PERMISSIONS.action:
+            raise AssertionError( 'The DefaultUserPermission.action for user "%s" is "%s", but it should be "%s"' \
+                                  % ( admin_user.email, dup.action, galaxy.model.Dataset.permitted_actions.DATASET_MANAGE_PERMISSIONS.action ) )
+        # Make sure DefaultHistoryPermissions are correct
+        latest_history = get_latest_history_for_user( admin_user )
+        dhps = get_default_history_permissions_by_history( latest_history )
+        if len( dhps ) > 1:
+            raise AssertionError( '%d DefaultHistoryPermissions were created for history id %d when it was created ( should have been 1 )' \
+                                  % ( len( latest_history.default_permissions ), latest_history.id ) )
+        dhp = dhps[0]
+        if not dhp.action == galaxy.model.Dataset.permitted_actions.DATASET_MANAGE_PERMISSIONS.action:
+            raise AssertionError( 'The DefaultHistoryPermission.action for history id %d is "%s", but it should be "%s"' \
+                                  % ( latest_history.id, dhp.action, galaxy.model.Dataset.permitted_actions.DATASET_MANAGE_PERMISSIONS.action ) )
+        self.manage_roles_and_groups_for_user( self.security.encode_id( admin_user.id ),
+                                               check_str=admin_user.email )
+        # Try deleting the admin_user's private role
+        check_str = "You cannot eliminate a user's private role association."
+        self.manage_roles_and_groups_for_user( self.security.encode_id( admin_user.id ),
+                                               out_role_ids=str( admin_user_private_role.id ),
+                                               check_str=check_str )
+    def test_010_private_role_creation_and_default_history_permissions( self ):
+        """Testing private role creation and changing DefaultHistoryPermissions for new histories"""
+        # Logged in as admin_user
+        self.logout()
+        # Some of the history related tests here are similar to some tests in the
+        # test_history_functions.py script, so we could potentially eliminate 1 or 2 of them.
+        self.login( email='test1(a)bx.psu.edu' )
+        global regular_user1
+        regular_user1 = get_user( 'test1(a)bx.psu.edu' )
+        assert regular_user1 is not None, 'Problem retrieving user with email "test1(a)bx.psu.edu" from the database'
+        # Add a dataset to the history
+        self.upload_file( '1.bed' )
+        latest_dataset = get_latest_dataset()
+        # Make sure DatasetPermissions are correct - default is 'manage permissions'
+        dps = get_dataset_permissions_by_dataset( latest_dataset )
+        if len( dps ) > 1:
+            raise AssertionError( '%d DatasetPermissions were created for dataset id %d when it was created ( should have been 1 )' \
+                                  % ( len( dps ), latest_dataset.id ) )
+        dp = dps[0]
+        if not dp.action == galaxy.model.Dataset.permitted_actions.DATASET_MANAGE_PERMISSIONS.action:
+            raise AssertionError( 'The DatasetPermissions.action for dataset id %d is "%s", but it should be "manage permissions"' \
+                                  % ( latest_dataset.id, dp.action ) )
+        # Change DefaultHistoryPermissions for regular_user1
+        permissions_in = []
+        actions_in = []
+        for key, value in galaxy.model.Dataset.permitted_actions.items():
+            # Setting the 'access' permission with the private role makes this dataset private
+            permissions_in.append( key )
+            actions_in.append( value.action )
+        # Sort actions for later comparison
+        actions_in.sort()
+        self.user_set_default_permissions( permissions_in=permissions_in, role_id=str( regular_user1_private_role.id ) )
+        # Make sure the default permissions are changed for new histories
+        self.new_history()
+        # logged in as regular_user1
+        latest_history = get_latest_history_for_user( regular_user1 )
+        if len( latest_history.default_permissions ) != len( actions_in ):
+            raise AssertionError( '%d DefaultHistoryPermissions were created for history id %d, should have been %d' % \
+                                  ( len( latest_history.default_permissions ), latest_history.id, len( actions_in ) ) )
+        dhps = []
+        for dhp in latest_history.default_permissions:
+            dhps.append( dhp.action )
+        # Sort permissions for later comparison
+        dhps.sort()
+        for key, value in galaxy.model.Dataset.permitted_actions.items():
+            if value.action not in dhps:
+                raise AssertionError( '%s not in history id %d default_permissions after they were changed' % ( value.action, latest_history.id ) )
+        # Add a dataset to the history
+        self.upload_file( '1.bed' )
+        latest_dataset = get_latest_dataset()
+        # Make sure DatasetPermissions are correct
+        if len( latest_dataset.actions ) != len( latest_history.default_permissions ):
+            raise AssertionError( '%d DatasetPermissions were created for dataset id %d when it was created ( should have been %d )' % \
+                                  ( len( latest_dataset.actions ), latest_dataset.id, len( latest_history.default_permissions ) ) )
+        dps = []
+        for dp in latest_dataset.actions:
+            dps.append( dp.action )
+        # Sort actions for later comparison
+        dps.sort()
+        # Compare DatasetPermissions with permissions_in - should be the same
+        if dps != actions_in:
+            raise AssertionError( 'DatasetPermissions "%s" for dataset id %d differ from changed default permissions "%s"' \
+                % ( str( dps ), latest_dataset.id, str( actions_in ) ) )
+        # Compare DefaultHistoryPermissions and DatasetPermissions - should be the same
+        if dps != dhps:
+                raise AssertionError( 'DatasetPermissions "%s" for dataset id %d differ from DefaultHistoryPermissions "%s" for history id %d' \
+                                      % ( str( dps ), latest_dataset.id, str( dhps ), latest_history.id ) )
+    def test_015_change_default_permissions_for_current_history( self ):
+        """Testing changing DefaultHistoryPermissions for the current history"""
+        # logged in a regular_user1
+        self.logout()
+        self.login( email=regular_user2.email )
+        latest_history = get_latest_history_for_user( regular_user2 )
+        self.upload_file( '1.bed' )
+        latest_dataset = get_latest_dataset()
+        permissions_in = [ 'DATASET_MANAGE_PERMISSIONS' ]
+        # Make sure these are in sorted order for later comparison
+        actions_in = [ 'manage permissions' ]
+        permissions_out = [ 'DATASET_ACCESS' ]
+        actions_out = [ 'access' ]
+        # Change DefaultHistoryPermissions for the current history
+        self.history_set_default_permissions( permissions_out=permissions_out, permissions_in=permissions_in, role_id=str( regular_user2_private_role.id ) )
+        if len( latest_history.default_permissions ) != len( actions_in ):
+            raise AssertionError( '%d DefaultHistoryPermissions were created for history id %d, should have been %d' \
+                                  % ( len( latest_history.default_permissions ), latest_history.id, len( permissions_in ) ) )
+        # Make sure DefaultHistoryPermissions were correctly changed for the current history
+        dhps = []
+        for dhp in latest_history.default_permissions:
+            dhps.append( dhp.action )
+        # Sort permissions for later comparison
+        dhps.sort()
+        # Compare DefaultHistoryPermissions and actions_in - should be the same
+        if dhps != actions_in:
+            raise AssertionError( 'DefaultHistoryPermissions "%s" for history id %d differ from actions "%s" passed for changing' \
+                                      % ( str( dhps ), latest_history.id, str( actions_in ) ) )
+        # Make sure DatasetPermissionss are correct
+        if len( latest_dataset.actions ) != len( latest_history.default_permissions ):
+            raise AssertionError( '%d DatasetPermissionss were created for dataset id %d when it was created ( should have been %d )' \
+                                  % ( len( latest_dataset.actions ), latest_dataset.id, len( latest_history.default_permissions ) ) )
+        dps = []
+        for dp in latest_dataset.actions:
+            dps.append( dp.action )
+        # Sort actions for comparison
+        dps.sort()
+        # Compare DatasetPermissionss and DefaultHistoryPermissions - should be the same
+        if dps != dhps:
+            raise AssertionError( 'DatasetPermissionss "%s" for dataset id %d differ from DefaultHistoryPermissions "%s"' \
+                                      % ( str( dps ), latest_dataset.id, str( dhps ) ) )
+    def test_999_reset_data_for_later_test_runs( self ):
+        """Reseting data to enable later test runs to pass"""
+        # Logged in as regular_user2
+        self.logout()
+        self.login( email=admin_user.email )
+        ##################
+        # Make sure all users are associated only with their private roles
+        ##################
+        for user in [ admin_user, regular_user1, regular_user2, regular_user3 ]:
+            refresh( user )
+            if len( user.roles) != 1:
+                raise AssertionError( '%d UserRoleAssociations are associated with %s ( should be 1 )' % ( len( user.roles ), user.email ) )
+        #####################
+        # Reset DefaultHistoryPermissions for regular_user1
+        #####################
+        self.logout()
+        self.login( email=regular_user1.email )
+        # Change DefaultHistoryPermissions for regular_user1 back to the default
+        permissions_in = [ 'DATASET_MANAGE_PERMISSIONS' ]
+        permissions_out = [ 'DATASET_ACCESS' ]
+        self.user_set_default_permissions( permissions_in=permissions_in,
+                                           permissions_out=permissions_out,
+                                           role_id=str( regular_user1_private_role.id ) )
+        self.logout()
+        self.login( email=admin_user.email )
diff -r e39c9a2a0b4c -r 48e83411aa91 test/functional/test_library_features.py
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/test/functional/test_library_features.py	Fri Mar 12 16:11:26 2010 -0500
@@ -0,0 +1,606 @@
+from base.twilltestcase import *
+from base.test_db_util import *
+
+class TestLibraryFeatures( TwillTestCase ):
+    def test_000_initiate_users( self ):
+        """Ensuring all required user accounts exist"""
+        self.logout()
+        self.login( email='test1(a)bx.psu.edu' )
+        global regular_user1
+        regular_user1 = get_user( 'test1(a)bx.psu.edu' )
+        assert regular_user1 is not None, 'Problem retrieving user with email "test1(a)bx.psu.edu" from the database'
+        self.logout()
+        self.login( email='test2(a)bx.psu.edu' )
+        global regular_user2
+        regular_user2 = get_user( 'test2(a)bx.psu.edu' )
+        assert regular_user2 is not None, 'Problem retrieving user with email "test2(a)bx.psu.edu" from the database'
+        self.logout()
+        self.login( email='test3(a)bx.psu.edu' )
+        global regular_user3
+        regular_user3 = get_user( 'test3(a)bx.psu.edu' )
+        assert regular_user3 is not None, 'Problem retrieving user with email "test3(a)bx.psu.edu" from the database'
+        self.logout()
+        self.login( email='test(a)bx.psu.edu' )
+        global admin_user
+        admin_user = get_user( 'test(a)bx.psu.edu' )
+        assert admin_user is not None, 'Problem retrieving user with email "test(a)bx.psu.edu" from the database'
+    def test_005_create_library( self ):
+        """Testing creating a new library, then renaming it"""
+        # Logged in as admin_user
+        name = "library features Library1"
+        description = "library features Library1 description"
+        synopsis = "library features Library1 synopsis"
+        self.create_library( name=name, description=description, synopsis=synopsis )
+        self.browse_libraries_admin( check_str1=name, check_str2=description )
+        # Get the library object for later tests
+        global library_one
+        library_one = get_library( name, description, synopsis )
+        assert library_one is not None, 'Problem retrieving library named "%s" from the database' % name
+        # Rename the library
+        new_name = "library features Library1 new name"
+        new_description = "library features Library1 new description"
+        new_synopsis = "library features Library1 new synopsis"
+        self.library_info( 'library_admin',
+                            self.security.encode_id( library_one.id ),
+                            library_one.name,
+                            new_name=new_name,
+                            new_description=new_description,
+                            new_synopsis=new_synopsis )
+        self.browse_libraries_admin( check_str1=new_name, check_str2=new_description )
+        # Reset the library back to the original name and description
+        self.library_info( 'library_admin',
+                            self.security.encode_id( library_one.id ),
+                            library_one.name,
+                            new_name=name,
+                            new_description=description,
+                            new_synopsis=synopsis )
+        refresh( library_one )
+    def test_010_library_template_features( self ):
+        """Testing adding a template to a library, then filling in the contents"""
+        # Logged in as admin_user
+        form_name = 'Library template Form One'
+        form_desc = 'This is Form One'
+        form_type = galaxy.model.FormDefinition.types.LIBRARY_INFO_TEMPLATE
+        # Create form for library template
+        self.create_form( name=form_name, desc=form_desc, formtype=form_type )
+        global form_one
+        form_one = get_form( form_name )
+        assert form_one is not None, 'Problem retrieving form named (%s) from the database' % form_name
+        # Add new template based on the form to the library
+        template_name = 'Library Template 1'
+        self.add_library_template( 'library_admin',
+                                   'library',
+                                   self.security.encode_id( library_one.id ),
+                                   self.security.encode_id( form_one.id ),
+                                   form_one.name )
+        # Make sure the template fields are displayed on the library information page
+        field_dict = form_one.fields[ 0 ]
+        global form_one_field_label
+        form_one_field_label = '%s' % str( field_dict.get( 'label', 'Field 0' ) )
+        global form_one_field_help
+        form_one_field_help = '%s' % str( field_dict.get( 'helptext', 'Field 0 help' ) )
+        global form_one_field_required
+        form_one_field_required = '%s' % str( field_dict.get( 'required', 'optional' ) ).capitalize()
+        # Add information to the library using the template
+        global form_one_field_name
+        form_one_field_name = 'field_0'
+        contents = '%s library contents' % form_one_field_label
+        self.library_info( 'library_admin',
+                            self.security.encode_id( library_one.id ),
+                            library_one.name,
+                            ele_1_field_name=form_one_field_name,
+                            ele_1_contents=contents )
+    def test_015_edit_template_contents_admin_view( self ):
+        """Test editing template contents from the Admin view"""
+        # Logged in as admin_user
+        # Make sure the template contents were from the previous method correctly saved
+        # Twill barfs if this test is run in the previous method.
+        contents = '%s library contents' % form_one_field_label
+        self.library_info( 'library_admin',
+                            self.security.encode_id( library_one.id ),
+                            library_one.name,
+                            check_str1=contents )
+        contents = '%s library contents' % form_one_field_label
+        contents_edited = contents + ' edited'
+        # Edit the contents and then save them
+        self.library_info( 'library_admin',
+                            self.security.encode_id( library_one.id ),
+                            library_one.name,
+                            ele_1_field_name=form_one_field_name,
+                            ele_1_contents=contents_edited )
+        # Make sure the template contents were correctly saved
+        self.library_info( 'library_admin',
+                            self.security.encode_id( library_one.id ),
+                            library_one.name,
+                            check_str1=contents_edited )
+    def test_020_add_public_dataset_to_root_folder( self ):
+        """Testing adding a public dataset to the root folder, making sure library template is inherited"""
+        # Logged in as admin_user
+        message = 'Testing adding a public dataset to the root folder'
+        # The template should be inherited to the library dataset upload form.
+        template_contents = "%s contents for root folder 1.bed" % form_one_field_label
+        self.add_library_dataset( 'library_admin',
+                                  '1.bed',
+                                  self.security.encode_id( library_one.id ),
+                                  self.security.encode_id( library_one.root_folder.id ),
+                                  library_one.root_folder.name,
+                                  file_type='bed',
+                                  dbkey='hg18',
+                                  message=message.replace( ' ', '+' ),
+                                  root=True,
+                                  template_field_name1=form_one_field_name,
+                                  template_field_contents1=template_contents )
+        global ldda_one
+        ldda_one = get_latest_ldda()
+        assert ldda_one is not None, 'Problem retrieving LibraryDatasetDatasetAssociation ldda_one from the database'
+        self.browse_library( 'library_admin',
+                             self.security.encode_id( library_one.id ),
+                             check_str1='1.bed',
+                             check_str2=message,
+                             check_str3=admin_user.email )
+        # Make sure the library template contents were correctly saved
+        self.ldda_edit_info( 'library_admin',
+                             self.security.encode_id( library_one.id ),
+                             self.security.encode_id( library_one.root_folder.id ),
+                             self.security.encode_id( ldda_one.id ),
+                             ldda_one.name,
+                             check_str1=template_contents )
+    def test_025_add_new_folder_to_root_folder( self ):
+        """Testing adding a folder to a library root folder"""
+        # logged in as admin_user
+        root_folder = library_one.root_folder
+        name = "Root Folder's Folder One"
+        description = "This is the root folder's Folder One"
+        self.add_folder( 'library_admin',
+                         self.security.encode_id( library_one.id ),
+                         self.security.encode_id( root_folder.id ),
+                         name=name,
+                         description=description )
+        global folder_one
+        folder_one = get_folder( root_folder.id, name, description )
+        assert folder_one is not None, 'Problem retrieving library folder named "%s" from the database' % name
+        self.browse_library( 'library_admin',
+                             self.security.encode_id( library_one.id ),
+                             check_str1=name,
+                             check_str2=description )
+        # Make sure the template was inherited, but the contents were not
+        contents = '%s library contents' % form_one_field_label
+        self.folder_info( 'library_admin',
+                          self.security.encode_id( folder_one.id ),
+                          self.security.encode_id( library_one.id ),
+                          check_str1=form_one_field_name,
+                          not_displayed=contents )
+        # Add contents to the inherited template
+        template_contents = "%s contents for Folder One" % form_one_field_label
+        self.folder_info( 'library_admin',
+                          self.security.encode_id( folder_one.id ),
+                          self.security.encode_id( library_one.id ),
+                          field_name=form_one_field_name,
+                          contents=template_contents )
+    def test_030_add_subfolder_to_folder( self ):
+        """Testing adding a folder to a library folder"""
+        # logged in as admin_user
+        name = "Folder One's Subfolder"
+        description = "This is the Folder One's subfolder"
+        self.add_folder( 'library_admin',
+                         self.security.encode_id( library_one.id ),
+                         self.security.encode_id( folder_one.id ),
+                         name=name,
+                         description=description )
+        global subfolder_one
+        subfolder_one = get_folder( folder_one.id, name, description )
+        assert subfolder_one is not None, 'Problem retrieving library folder named "Folder Ones Subfolder" from the database'
+        self.browse_library( 'library_admin',
+                             self.security.encode_id( library_one.id ),
+                             check_str1=name,
+                             check_str2=description )
+        # Make sure the template was inherited, but the contents were not
+        contents = '%s library contents' % form_one_field_label
+        self.folder_info( 'library_admin',
+                          self.security.encode_id( subfolder_one.id ),
+                          self.security.encode_id( library_one.id ),
+                          check_str1=form_one_field_name,
+                          not_displayed=contents )
+        # Add contents to the inherited template
+        template_contents = "%s contents for Folder One" % form_one_field_label
+        self.folder_info( 'library_admin',
+                          self.security.encode_id( subfolder_one.id ),
+                          self.security.encode_id( library_one.id ),
+                          field_name=form_one_field_name,
+                          contents=template_contents )
+    def test_035_add_2nd_new_folder_to_root_folder( self ):
+        """Testing adding a 2nd folder to a library root folder"""
+        # logged in as admin_user
+        root_folder = library_one.root_folder
+        name = "Folder Two"
+        description = "This is the root folder's Folder Two"
+        self.add_folder( 'library_admin',
+                         self.security.encode_id( library_one.id ),
+                         self.security.encode_id( root_folder.id ),
+                         name=name,
+                         description=description )
+        global folder_two
+        folder_two = get_folder( root_folder.id, name, description )
+        assert folder_two is not None, 'Problem retrieving library folder named "%s" from the database' % name
+        self.browse_library( 'library_admin',
+                             self.security.encode_id( library_one.id ),
+                             check_str1=name,
+                             check_str2=description )
+    def test_040_add_public_dataset_to_root_folders_2nd_subfolder( self ):
+        """Testing adding a public dataset to the root folder's 2nd sub-folder"""
+        # Logged in as admin_user
+        message = "Testing adding a public dataset to the folder named %s" % folder_two.name
+        # The form_one template should be inherited to the library dataset upload form.
+        template_contents = "%s contents for %s 2.bed" % ( form_one_field_label, folder_two.name )
+        self.add_library_dataset( 'library_admin',
+                                  '2.bed',
+                                  self.security.encode_id( library_one.id ),
+                                  self.security.encode_id( folder_two.id ),
+                                  folder_two.name,
+                                  file_type='bed',
+                                  dbkey='hg18',
+                                  message=message.replace( ' ', '+' ),
+                                  root=False,
+                                  template_field_name1=form_one_field_name,
+                                  template_field_contents1=template_contents )
+        global ldda_two
+        ldda_two = get_latest_ldda()
+        assert ldda_two is not None, 'Problem retrieving LibraryDatasetDatasetAssociation ldda_two from the database'
+        self.browse_library( 'library_admin',
+                             self.security.encode_id( library_one.id ),
+                             check_str1='2.bed',
+                             check_str2=message,
+                             check_str3=admin_user.email )
+        # Make sure the library template contents were correctly saved
+        self.ldda_edit_info( 'library_admin',
+                             self.security.encode_id( library_one.id ),
+                             self.security.encode_id( folder_two.id ),
+                             self.security.encode_id( ldda_two.id ),
+                             ldda_two.name,
+                             check_str1=template_contents )
+    def test_045_add_2nd_public_dataset_to_root_folders_2nd_subfolder( self ):
+        """Testing adding a 2nd public dataset to the root folder's 2nd sub-folder"""
+        # Logged in as admin_user
+        message = "Testing adding a 2nd public dataset to the folder named %s" % folder_two.name
+        # The form_one template should be inherited to the library dataset upload form.
+        template_contents = "%s contents for %s 3.bed" % ( form_one_field_label, folder_two.name )
+        self.add_library_dataset( 'library_admin',
+                                  '3.bed',
+                                  self.security.encode_id( library_one.id ),
+                                  self.security.encode_id( folder_two.id ),
+                                  folder_two.name,
+                                  file_type='bed',
+                                  dbkey='hg18',
+                                  message=message.replace( ' ', '+' ),
+                                  root=False,
+                                  template_field_name1=form_one_field_name,
+                                  template_field_contents1=template_contents )
+        global ldda_three
+        ldda_three = get_latest_ldda()
+        assert ldda_three is not None, 'Problem retrieving LibraryDatasetDatasetAssociation ldda_three from the database'
+        self.browse_library( 'library_admin',
+                             self.security.encode_id( library_one.id ),
+                             check_str1='3.bed',
+                             check_str2=message,
+                             check_str3=admin_user.email )
+        # Make sure the library template contents were correctly saved
+        self.ldda_edit_info( 'library_admin',
+                             self.security.encode_id( library_one.id ),
+                             self.security.encode_id( folder_two.id ),
+                             self.security.encode_id( ldda_three.id ),
+                             ldda_three.name,
+                             check_str1=template_contents )
+    def test_050_copy_dataset_from_history_to_subfolder( self ):
+        """Testing copying a dataset from the current history to a subfolder"""
+        # logged in as admin_user
+        self.new_history()
+        self.upload_file( "4.bed" )
+        latest_hda = get_latest_hda()
+        self.add_history_datasets_to_library( 'library_admin',
+                                              self.security.encode_id( library_one.id ),
+                                              self.security.encode_id( subfolder_one.id ),
+                                              subfolder_one.name,
+                                              self.security.encode_id( latest_hda.id ),
+                                              root=False )
+        global ldda_four
+        ldda_four = get_latest_ldda()
+        assert ldda_four is not None, 'Problem retrieving LibraryDatasetDatasetAssociation ldda_four from the database'
+        # Make sure the correct template was inherited but the contents were not inherited
+        contents = "%s contents for Folder One's Subfolder" % form_one_field_label
+        self.ldda_edit_info( 'library_admin',
+                             self.security.encode_id( library_one.id ),
+                             self.security.encode_id( subfolder_one.id ),
+                             self.security.encode_id( ldda_four.id ),
+                             ldda_four.name,
+                             check_str1=form_one_field_name,
+                             not_displayed=contents )
+    def test_055_editing_dataset_attribute_info( self ):
+        """Testing editing a library dataset's attribute information"""
+        # logged in as admin_user
+        new_ldda_name = '4.bed ( version 1 )'
+        self.ldda_edit_info( 'library_admin',
+                             self.security.encode_id( library_one.id ),
+                             self.security.encode_id( subfolder_one.id ),
+                             self.security.encode_id( ldda_four.id ),
+                             ldda_four.name,
+                             new_ldda_name=new_ldda_name )
+        refresh( ldda_four )
+        self.browse_library( 'library_admin', self.security.encode_id( library_one.id ), check_str1=new_ldda_name )
+        # Make sure the correct template was inherited but the contents were not inherited
+        contents = "%s contents for Folder One's Subfolder" % form_one_field_label
+        self.ldda_edit_info( 'library_admin',
+                             self.security.encode_id( library_one.id ),
+                             self.security.encode_id( subfolder_one.id ),
+                             self.security.encode_id( ldda_four.id ),
+                             ldda_four.name,
+                             check_str1=form_one_field_name,
+                             not_displayed=contents )
+    def test_060_uploading_new_dataset_version( self ):
+        """Testing uploading a new version of a library dataset"""
+        # logged in as admin_user
+        message = 'Testing uploading a new version of a dataset'
+        # The form_one template should be inherited to the library dataset upload form.
+        template_contents = "%s contents for %s new version of 4.bed" % ( form_one_field_label, folder_one.name )
+        self.upload_new_dataset_version( 'library_admin',
+                                         '4.bed',
+                                         self.security.encode_id( library_one.id ),
+                                         self.security.encode_id( subfolder_one.id ),
+                                         subfolder_one.name,
+                                         self.security.encode_id( ldda_four.library_dataset.id ),
+                                         ldda_four.name,
+                                         file_type='auto',
+                                         dbkey='hg18',
+                                         message=message.replace( ' ', '+' ),
+                                         template_field_name1=form_one_field_name,
+                                         template_field_contents1=template_contents )
+        global ldda_four_version_two
+        ldda_four_version_two = get_latest_ldda()
+        assert ldda_four_version_two is not None, 'Problem retrieving LibraryDatasetDatasetAssociation ldda_four_version_two from the database'
+        # Make sure the correct template was inherited, but does not include any contents
+        contents = "%s contents for Folder One's Subfolder" % form_one_field_label
+        self.ldda_edit_info( 'library_admin',
+                             self.security.encode_id( library_one.id ),
+                             self.security.encode_id( subfolder_one.id ),
+                             self.security.encode_id( ldda_four_version_two.id ),
+                             ldda_four_version_two.name,
+                             check_str1='This is the latest version of this library dataset',
+                             not_displayed=contents )
+        # Fill in the template contents
+        self.ldda_edit_info( 'library_admin',
+                             self.security.encode_id( library_one.id ),
+                             self.security.encode_id( subfolder_one.id ),
+                             self.security.encode_id( ldda_four_version_two.id ),
+                             ldda_four_version_two.name,
+                             ele_1_field_name=form_one_field_name,
+                             ele_1_contents=template_contents )
+        # Check the previous version
+        self.ldda_edit_info( 'library_admin',
+                             self.security.encode_id( library_one.id ),
+                             self.security.encode_id( subfolder_one.id ),
+                             self.security.encode_id( ldda_four.id ),
+                             ldda_four.name,
+                             check_str1='This is an expired version of this library dataset' )
+        # Make sure ldda_four is no longer displayed in the library
+        self.browse_library( 'library_admin',
+                             self.security.encode_id( library_one.id ),
+                             not_displayed=ldda_four.name )
+    def test_065_upload_directory_of_files_from_libraries_view( self ):
+        """Testing uploading a directory of files to a root folder from the Data Libraries view"""
+        # logged in as admin_user
+        # admin_user will not have the option to upload a directory of files from the
+        # Libraries view since a sub-directory named the same as their email is not contained
+        # in the configured user_library_import_dir.  However, since members of role_one have
+        # the LIBRARY_ADD permission, we can test this feature as regular_user1 or regular_user3
+        self.logout()
+        self.login( email=regular_user1.email )
+        message = 'Uploaded all files in test-data/users/test1...'
+        # Since regular_user1 does not have any sub-directories contained within her configured
+        # user_library_import_dir, the only option in her server_dir select list will be the
+        # directory named the same as her email
+        check_str_after_submit = "Added 1 datasets to the library '%s' (each is selected)." % library_one.root_folder.name
+        # TODO: gvk( 3/12/10 )this is broken, so commenting until I have time to discover why...
+        """
+        self.upload_directory_of_files( 'library',
+                                        self.security.encode_id( library_one.id ),
+                                        self.security.encode_id( library_one.root_folder.id ),
+                                        server_dir=regular_user1.email,
+                                        message=message,
+                                        check_str_after_submit=check_str_after_submit )
+        self.browse_library( 'library',
+                             self.security.encode_id( library_one.id ),
+                             check_str1=regular_user1.email,
+                             check_str2=message )
+        self.logout()
+        self.login( regular_user3.email )
+        message = 'Uploaded all files in test-data/users/test3.../run1'
+        # Since regular_user2 has a subdirectory contained within her configured user_library_import_dir,
+        # she will have a "None" option in her server_dir select list
+        self.upload_directory_of_files( 'library',
+                                        self.security.encode_id( library_one.id ),
+                                        self.security.encode_id( library_one.root_folder.id ),
+                                        server_dir='run1',
+                                        message=message,
+                                        check_str1='<option>None</option>',
+                                        check_str_after_submit=check_str_after_submit )
+        self.browse_library( 'library',
+                             self.security.encode_id( library_one.id ),
+                             check_str1=regular_user3.email,
+                             check_str2=message )
+        """
+    def test_070_download_archive_of_library_files( self ):
+        """Testing downloading an archive of files from the library"""
+        # logged in as regular_user3
+        self.logout()
+        self.login( email=admin_user.email )
+        for format in ( 'tbz', 'tgz', 'zip' ):
+            archive = self.download_archive_of_library_files( cntrller='library',
+                                                              library_id=self.security.encode_id( library_one.id ),
+                                                              ldda_ids=[ self.security.encode_id( ldda_one.id ), self.security.encode_id( ldda_two.id ) ],
+                                                              format=format )
+            self.check_archive_contents( archive, ( ldda_one, ldda_two ) )
+            os.remove( archive )
+    def test_075_mark_dataset_deleted( self ):
+        """Testing marking a library dataset as deleted"""
+        # Logged in as admin_user
+        self.delete_library_item( self.security.encode_id( library_one.id ),
+                                  self.security.encode_id( ldda_two.library_dataset.id ),
+                                  ldda_two.name,
+                                  item_type='library_dataset' )
+        self.browse_library( 'library_admin',
+                             self.security.encode_id( library_one.id ),
+                             not_displayed=ldda_two.name )
+    def test_080_display_and_hide_deleted_dataset( self ):
+        """Testing displaying and hiding a deleted library dataset"""
+        # Logged in as admin_user
+        self.browse_library( 'library_admin',
+                             self.security.encode_id( library_one.id ),
+                             show_deleted=True,
+                             check_str1=ldda_two.name )
+        self.browse_library( 'library_admin',
+                             self.security.encode_id( library_one.id ),
+                             not_displayed=ldda_two.name )
+    def test_085_mark_folder_deleted( self ):
+        """Testing marking a library folder as deleted"""
+        # Logged in as admin_user
+        self.delete_library_item( self.security.encode_id( library_one.id ),
+                                  self.security.encode_id( folder_two.id ),
+                                  folder_two.name,
+                                  item_type='folder' )
+        self.browse_library( 'library_admin',
+                             self.security.encode_id( library_one.id ),
+                             not_displayed=folder_two.name )
+    def test_090_mark_folder_undeleted( self ):
+        """Testing marking a library folder as undeleted"""
+        # Logged in as admin_user
+        self.undelete_library_item( self.security.encode_id( library_one.id ),
+                                    self.security.encode_id( folder_two.id ),
+                                    folder_two.name,
+                                    item_type='folder' )
+        # 2.bed was deleted before the folder was deleted, so state should have been saved.  In order
+        # for 2.bed to be displayed, it would itself have to be marked undeleted.
+        self.browse_library( 'library_admin',
+                             self.security.encode_id( library_one.id ),
+                             check_str1=folder_two.name,
+                             not_displayed=ldda_two.name )
+    def test_095_mark_library_deleted( self ):
+        """Testing marking a library as deleted"""
+        # Logged in as admin_user
+        # First mark folder_two as deleted to further test state saving when we undelete the library
+        self.delete_library_item( self.security.encode_id( library_one.id ),
+                                  self.security.encode_id( folder_two.id ),
+                                  folder_two.name,
+                                  item_type='folder' )
+        self.delete_library_item( self.security.encode_id( library_one.id ),
+                                  self.security.encode_id( library_one.id ),
+                                  library_one.name,
+                                  item_type='library' )
+        self.browse_libraries_admin( not_displayed1=library_one.name )
+        self.browse_libraries_admin( deleted=True, check_str1=library_one.name )
+    def test_100_mark_library_undeleted( self ):
+        """Testing marking a library as undeleted"""
+        # Logged in as admin_user
+        self.undelete_library_item( self.security.encode_id( library_one.id ),
+                                    self.security.encode_id( library_one.id ),
+                                    library_one.name,
+                                    item_type='library' )
+        self.browse_libraries_admin( check_str1=library_one.name )
+        self.browse_library( 'library_admin',
+                            self.security.encode_id( library_one.id ),
+                            check_str1=library_one.name,
+                            not_displayed=folder_two.name )
+    def test_105_purge_library( self ):
+        """Testing purging a library"""
+        # Logged in as admin_user
+        self.delete_library_item( self.security.encode_id( library_one.id ),
+                                  self.security.encode_id( library_one.id ),
+                                  library_one.name,
+                                  item_type='library' )
+        self.purge_library( self.security.encode_id( library_one.id ), library_one.name )
+        # Make sure the library was purged
+        refresh( library_one )
+        if not ( library_one.deleted and library_one.purged ):
+            raise AssertionError( 'The library id %s named "%s" has not been marked as deleted and purged.' % ( str( library_one.id ), library_one.name ) )
+        def check_folder( library_folder ):
+            for folder in library_folder.folders:
+                refresh( folder )
+                # Make sure all of the library_folders are purged
+                if not folder.purged:
+                    raise AssertionError( 'The library_folder id %s named "%s" has not been marked purged.' % ( str( folder.id ), folder.name ) )
+                check_folder( folder )
+            # Make sure all of the LibraryDatasets and associated objects are deleted
+            refresh( library_folder )
+            for library_dataset in library_folder.datasets:
+                refresh( library_dataset )
+                ldda = library_dataset.library_dataset_dataset_association
+                if ldda:
+                    refresh( ldda )
+                    if not ldda.deleted:
+                        raise AssertionError( 'The library_dataset_dataset_association id %s named "%s" has not been marked as deleted.' % \
+                                              ( str( ldda.id ), ldda.name ) )
+                    # Make sure all of the datasets have been deleted
+                    dataset = ldda.dataset
+                    refresh( dataset )
+                    if not dataset.deleted:
+                        raise AssertionError( 'The dataset with id "%s" has not been marked as deleted when it should have been.' % \
+                                              str( ldda.dataset.id ) )
+                if not library_dataset.deleted:
+                    raise AssertionError( 'The library_dataset id %s named "%s" has not been marked as deleted.' % \
+                                          ( str( library_dataset.id ), library_dataset.name ) )
+        check_folder( library_one.root_folder )
+    def test_110_no_library_template( self ):
+        """Test library features when library has no template"""
+        # Logged in as admin_user
+        name = "library features Library Two"
+        description = "library features This is Library Two"
+        synopsis = "library features Library Two synopsis"
+        # Create a library, adding no template
+        self.create_library( name=name, description=description, synopsis=synopsis )
+        self.browse_libraries_admin( check_str1=name, check_str2=description )
+        global library_two
+        library_two = get_library( name, description, synopsis )
+        assert library_two is not None, 'Problem retrieving library named "%s" from the database' % name
+        # Add a dataset to the library
+        self.add_library_dataset( 'library_admin',
+                                  '3.bed',
+                                  self.security.encode_id( library_two.id ),
+                                  self.security.encode_id( library_two.root_folder.id ),
+                                  library_two.root_folder.name,
+                                  file_type='bed',
+                                  dbkey='hg18',
+                                  message='',
+                                  root=True )
+        ldda_three = get_latest_ldda()
+        assert ldda_three is not None, 'Problem retrieving LibraryDatasetDatasetAssociation ldda_three from the database'
+        self.browse_library( 'library_admin',
+                             self.security.encode_id( library_two.id ),
+                             check_str1='3.bed',
+                             check_str2=admin_user.email )
+        # TODO: add a functional test to cover adding a library dataset via url_paste here...
+        # TODO: Add a functional test to cover checking the space_to_tab checkbox here...
+        # Delete and purge the library
+        self.delete_library_item( self.security.encode_id( library_two.id ),
+                                  self.security.encode_id( library_two.id ),
+                                  library_two.name,
+                                  item_type='library' )
+        self.purge_library( self.security.encode_id( library_two.id ), library_two.name )
+        self.home()
+    def test_999_reset_data_for_later_test_runs( self ):
+        """Reseting data to enable later test runs to pass"""
+        # Logged in as admin_user
+        ##################
+        # Purge all libraries
+        ##################
+        for library in [ library_one, library_two ]:
+            self.delete_library_item( self.security.encode_id( library.id ),
+                                      self.security.encode_id( library.id ),
+                                      library.name,
+                                      item_type='library' )
+            self.purge_library( self.security.encode_id( library.id ), library.name )
+        ##################
+        # Make sure all users are associated only with their private roles
+        ##################
+        for user in [ admin_user, regular_user1, regular_user2, regular_user3 ]:
+            refresh( user )
+            if len( user.roles) != 1:
+                raise AssertionError( '%d UserRoleAssociations are associated with %s ( should be 1 )' % ( len( user.roles ), user.email ) )
+        self.logout()
diff -r e39c9a2a0b4c -r 48e83411aa91 test/functional/test_library_security.py
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/test/functional/test_library_security.py	Fri Mar 12 16:11:26 2010 -0500
@@ -0,0 +1,603 @@
+from base.twilltestcase import *
+from base.test_db_util import *
+
+class TestLibrarySecurity( TwillTestCase ):
+    def test_000_initiate_users( self ):
+        """Ensuring all required user accounts exist"""
+        self.logout()
+        self.login( email='test1(a)bx.psu.edu' )
+        global regular_user1
+        regular_user1 = get_user( 'test1(a)bx.psu.edu' )
+        assert regular_user1 is not None, 'Problem retrieving user with email "test1(a)bx.psu.edu" from the database'
+        global regular_user1_private_role
+        regular_user1_private_role = get_private_role( regular_user1 )
+        self.logout()
+        self.login( email='test2(a)bx.psu.edu' )
+        global regular_user2
+        regular_user2 = get_user( 'test2(a)bx.psu.edu' )
+        assert regular_user2 is not None, 'Problem retrieving user with email "test2(a)bx.psu.edu" from the database'
+        global regular_user2_private_role
+        regular_user2_private_role = get_private_role( regular_user2 )
+        self.logout()
+        self.login( email='test3(a)bx.psu.edu' )
+        global regular_user3
+        regular_user3 = get_user( 'test3(a)bx.psu.edu' )
+        assert regular_user3 is not None, 'Problem retrieving user with email "test3(a)bx.psu.edu" from the database'
+        global regular_user3_private_role
+        regular_user3_private_role = get_private_role( regular_user3 )
+        self.logout()
+        self.login( email='test(a)bx.psu.edu' )
+        global admin_user
+        admin_user = get_user( 'test(a)bx.psu.edu' )
+        assert admin_user is not None, 'Problem retrieving user with email "test(a)bx.psu.edu" from the database'
+        global admin_user_private_role
+        admin_user_private_role = get_private_role( admin_user )
+    def test_005_create_required_groups_and_roles( self ):
+        """Testing creating all required groups and roles for this script"""
+        # Logged in as admin_user
+        # Create role_one
+        name = 'library security Role One'
+        description = "library security This is Role One's description"
+        user_ids = [ str( admin_user.id ), str( regular_user1.id ), str( regular_user3.id ) ]
+        self.create_role( name=name,
+                          description=description,
+                          in_user_ids=user_ids,
+                          in_group_ids=[],
+                          create_group_for_role='no',
+                          private_role=admin_user.email )
+        # Get the role object for later tests
+        global role_one
+        role_one = get_role_by_name( name )
+        # Create group_one
+        name = 'Group One'
+        self.create_group( name=name, in_user_ids=[ str( regular_user1.id ) ], in_role_ids=[ str( role_one.id ) ] )
+        # Get the group object for later tests
+        global group_one
+        group_one = get_group_by_name( name )
+        assert group_one is not None, 'Problem retrieving group named "Group One" from the database'
+        # NOTE: To get this to work with twill, all select lists on the ~/admin/role page must contain at least
+        # 1 option value or twill throws an exception, which is: ParseError: OPTION outside of SELECT
+        # Due to this bug in twill, we create the role, we bypass the page and visit the URL in the
+        # associate_users_and_groups_with_role() method.
+        #
+        #create role_two
+        name = 'library security Role Two'
+        description = 'library security This is Role Two'
+        user_ids = [ str( admin_user.id ) ]
+        group_ids = [ str( group_one.id ) ]
+        private_role = admin_user.email
+        self.create_role( name=name,
+                          description=description,
+                          in_user_ids=user_ids,
+                          in_group_ids=group_ids,
+                          private_role=private_role )
+        # Get the role object for later tests
+        global role_two
+        role_two = get_role_by_name( name )
+        assert role_two is not None, 'Problem retrieving role named "Role Two" from the database'
+    def test_010_create_library( self ):
+        """Testing creating a new library, then renaming it"""
+        # Logged in as admin_user
+        name = "library security Library1"
+        description = "library security Library1 description"
+        synopsis = "library security Library1 synopsis"
+        self.create_library( name=name, description=description, synopsis=synopsis )
+        # Get the library object for later tests
+        global library_one
+        library_one = get_library( name, description, synopsis )
+        assert library_one is not None, 'Problem retrieving library named "%s" from the database' % name
+        # Make sure library_one is public
+        assert 'access library' not in [ a.action for a in library_one.actions ], 'Library %s is not public when first created' % library_one.name
+        # Set permissions on the library, sort for later testing.
+        permissions_in = [ k for k, v in galaxy.model.Library.permitted_actions.items() ]
+        permissions_out = []
+        # Role one members are: admin_user, regular_user1, regular_user3.  Each of these users will be permitted for
+        # LIBRARY_ACCESS, LIBRARY_ADD, LIBRARY_MODIFY, LIBRARY_MANAGE on this library and it's contents.
+        self.library_permissions( self.security.encode_id( library_one.id ),
+                                  library_one.name,
+                                  str( role_one.id ),
+                                  permissions_in,
+                                  permissions_out )
+        # Make sure the library is accessible by admin_user
+        self.visit_url( '%s/library/browse_libraries' % self.url )
+        self.check_page_for_string( library_one.name )
+        # Make sure the library is not accessible by regular_user2 since regular_user2 does not have Role1.
+        self.logout()
+        self.login( email=regular_user2.email )
+        self.visit_url( '%s/library/browse_libraries' % self.url )
+        try:
+            self.check_page_for_string( library_one.name )
+            raise AssertionError, 'Library %s is accessible by %s when it should be restricted' % ( library_one.name, regular_user2.email )
+        except:
+            pass
+        self.logout()
+        self.login( email=admin_user.email )
+    def test_015_add_new_folder_to_root_folder( self ):
+        """Testing adding a folder to a library root folder"""
+        # logged in as admin_user
+        root_folder = library_one.root_folder
+        name = "Root Folder's Folder One"
+        description = "This is the root folder's Folder One"
+        self.add_folder( 'library_admin',
+                         self.security.encode_id( library_one.id ),
+                         self.security.encode_id( root_folder.id ),
+                         name=name,
+                         description=description )
+        global folder_one
+        folder_one = get_folder( root_folder.id, name, description )
+        assert folder_one is not None, 'Problem retrieving library folder named "%s" from the database' % name
+    def test_020_add_dataset_with_private_role_restriction_to_folder( self ):
+        """Testing adding a dataset with a private role restriction to a folder"""
+        # Logged in as admin_user
+        #
+        # Keep in mind that # LIBRARY_ACCESS = "Role One" on the whole library
+        #
+        # Add a dataset restricted by the following:
+        # DATASET_MANAGE_PERMISSIONS = "test(a)bx.psu.edu" via DefaultUserPermissions
+        # DATASET_ACCESS = "regular_user1" private role via this test method
+        # LIBRARY_ADD = "Role One" via inheritance from parent folder
+        # LIBRARY_MODIFY = "Role One" via inheritance from parent folder
+        # LIBRARY_MANAGE = "Role One" via inheritance from parent folder
+        # "Role One" members are: test(a)bx.psu.edu, test1(a)bx.psu.edu, test3(a)bx.psu.edu
+        # This means that only user test1(a)bx.psu.edu can see the dataset from the Libraries view
+        message ='This is a test of the fourth dataset uploaded'
+        self.add_library_dataset( 'library_admin',
+                                  '1.bed',
+                                  self.security.encode_id( library_one.id ),
+                                  self.security.encode_id( folder_one.id ),
+                                  folder_one.name,
+                                  file_type='bed',
+                                  dbkey='hg18',
+                                  roles=[ str( regular_user1_private_role.id ) ],
+                                  message=message.replace( ' ', '+' ),
+                                  root=False )
+        global ldda_one
+        ldda_one = get_latest_ldda()
+        assert ldda_one is not None, 'Problem retrieving LibraryDatasetDatasetAssociation ldda_one from the database'
+        self.browse_library( 'library_admin',
+                             self.security.encode_id( library_one.id ),
+                             check_str1='1.bed',
+                             check_str2=message,
+                             check_str3=admin_user.email )
+    def test_025_accessing_dataset_with_private_role_restriction( self ):
+        """Testing accessing a dataset with a private role restriction"""
+        # Logged in as admin_user
+        #
+        # Keep in mind that # LIBRARY_ACCESS = "Role One" on the whole library
+        # Role one members are: admin_user, regular_user1, regular_user3.  Each of these users will be permitted for
+        # LIBRARY_ACCESS, LIBRARY_ADD, LIBRARY_MODIFY, LIBRARY_MANAGE on this library and it's contents.
+        #
+        # Legitimate roles displayed on the permission form are as follows:
+        # 'Role One' since the LIBRARY_ACCESS permission is associated with Role One.  # Role one members are: admin_user, regular_user1, regular_user3.
+        # 'test(a)bx.psu.edu' ( admin_user's private role ) since admin_user has Role One
+        # 'Role Two' since admin_user has Role Two
+        # 'Role Three' since admin_user has Role Three
+        # 'test1(a)bx.psu.edu' ( regular_user1's private role ) since regular_user1 has Role One
+        # 'test3(a)bx.psu.edu' ( regular_user3's private role ) since regular_user3 has Role One
+        #
+        # admin_user should not be able to see 1.bed from the analysis view's access libraries
+        self.browse_library( 'library',
+                              self.security.encode_id( library_one.id ),
+                              not_displayed=folder_one.name,
+                              not_displayed2='1.bed' )
+        self.logout()
+        # regular_user1 should be able to see 1.bed from the analysis view's access librarys
+        # since it was associated with regular_user1's private role
+        self.login( email=regular_user1.email )
+        self.browse_library( 'library',
+                              self.security.encode_id( library_one.id ),
+                              check_str1=folder_one.name,
+                              check_str2='1.bed' )
+        self.logout()
+        # regular_user2 should not be to see the library since they do not have 
+        # Role One which is associated with the LIBRARY_ACCESS permission
+        self.login( email=regular_user2.email )
+        self.browse_libraries_regular_user( check_str1="You are not authorized to access any libraries" )
+        self.logout()
+        # regular_user3 should not be able to see 1.bed from the analysis view's access librarys
+        self.login( email=regular_user3.email )
+        self.browse_library( 'library',
+                             self.security.encode_id( library_one.id ),
+                             not_displayed=folder_one.name,
+                             not_displayed2='1.bed' )
+        self.logout()
+        self.login( email=admin_user.email )
+    def test_030_change_dataset_access_permission( self ):
+        """Testing changing the access permission on a dataset with a private role restriction"""
+        # Logged in as admin_user
+        # We need admin_user to be able to access 1.bed
+        permissions_in = [ k for k, v in galaxy.model.Dataset.permitted_actions.items() ]
+        for k, v in galaxy.model.Library.permitted_actions.items():
+            if k != 'LIBRARY_ACCESS':
+                permissions_in.append( k )
+        permissions_out = []
+        # Attempt to associate multiple roles with the library dataset, with one of the
+        # roles being private.
+        role_ids_str = '%s,%s' % ( str( role_one.id ), str( admin_user_private_role.id ) )
+        check_str = "At least 1 user must have every role associated with accessing datasets.  "
+        check_str += "Since you are associating more than 1 role, no private roles are allowed."
+        self.ldda_permissions( 'library_admin',
+                                self.security.encode_id( library_one.id ),
+                                self.security.encode_id( folder_one.id ),
+                                self.security.encode_id( ldda_one.id ),
+                                role_ids_str,
+                                permissions_in,
+                                permissions_out,
+                                check_str1=check_str )
+        role_ids_str = str( role_one.id )
+        self.ldda_permissions( 'library_admin',
+                                self.security.encode_id( library_one.id ),
+                                self.security.encode_id( folder_one.id ),
+                                self.security.encode_id( ldda_one.id ),
+                                role_ids_str,
+                                permissions_in,
+                                permissions_out )
+        # admin_user should now be able to see 1.bed from the analysis view's access libraries
+        self.browse_library( 'library',
+                             self.security.encode_id( library_one.id ),
+                             check_str1=ldda_one.name )
+    def test_035_add_dataset_with_role_associated_with_group_and_users( self ):
+        """Testing adding a dataset with a role that is associated with a group and users"""
+        # Logged in as admin_user
+        # Add a dataset restricted by role_two, which is currently associated as follows:
+        # groups: group_one
+        # users: test(a)bx.psu.edu, test1(a)bx.psu.edu via group_one
+        #
+        # We first need to make library_one public
+        permissions_in = []
+        for k, v in galaxy.model.Library.permitted_actions.items():
+            if k != 'LIBRARY_ACCESS':
+                permissions_in.append( k )
+        permissions_out = []
+        # Role one members are: admin_user, regular_user1, regular_user3.  Each of these users will now be permitted for
+        # LIBRARY_ADD, LIBRARY_MODIFY, LIBRARY_MANAGE on this library and it's contents.  The library will be public from
+        # this point on.
+        self.library_permissions( self.security.encode_id( library_one.id ),
+                                  library_one.name,
+                                  str( role_one.id ),
+                                  permissions_in,
+                                  permissions_out )
+        refresh( library_one )
+        message = 'Testing adding a dataset with a role that is associated with a group and users'
+        self.add_library_dataset( 'library_admin',
+                                  '2.bed',
+                                  self.security.encode_id( library_one.id ),
+                                  self.security.encode_id( folder_one.id ),
+                                  folder_one.name,
+                                  file_type='bed',
+                                  dbkey='hg17',
+                                  roles=[ str( role_two.id ) ],
+                                  message=message.replace( ' ', '+' ),
+                                  root=False )
+        global ldda_two
+        ldda_two = get_latest_ldda()
+        assert ldda_two is not None, 'Problem retrieving LibraryDatasetDatasetAssociation ldda_two from the database'
+        self.browse_library( 'library',
+                             self.security.encode_id( library_one.id ),
+                             check_str1='2.bed',
+                             check_str2=message,
+                             check_str3=admin_user.email )
+    def test_040_accessing_dataset_with_role_associated_with_group_and_users( self ):
+        """Testing accessing a dataset with a role that is associated with a group and users"""
+        # Logged in as admin_user
+        # admin_user should be able to see 2.bed since she is associated with role_two
+        self.browse_library( 'library',
+                             self.security.encode_id( library_one.id ),
+                             check_str1='2.bed',
+                             check_str2=admin_user.email )
+        self.logout()
+        # regular_user1 should be able to see 2.bed since she is associated with group_two
+        self.login( email = 'test1(a)bx.psu.edu' )
+        self.browse_library( 'library',
+                             self.security.encode_id( library_one.id ),
+                             check_str1=folder_one.name,
+                             check_str2='2.bed',
+                             check_str3=admin_user.email )
+        # Check the permissions on the dataset 2.bed - they are as folows:
+        # DATASET_MANAGE_PERMISSIONS = test(a)bx.psu.edu
+        # DATASET_ACCESS = Role Two
+        #                  Role Two associations: test(a)bx.psu.edu and Group Two
+        #                  Group Two members: Role One, Role Two, test1(a)bx.psu.edu
+        #                  Role One associations: test(a)bx.psu.edu, test1(a)bx.psu.edu, test3(a)bx.psu.edu
+        # LIBRARY_ADD = Role One
+        #               Role One aassociations: test(a)bx.psu.edu, test1(a)bx.psu.edu, test3(a)bx.psu.edu
+        # LIBRARY_MODIFY = Role One
+        #                  Role One aassociations: test(a)bx.psu.edu, test1(a)bx.psu.edu, test3(a)bx.psu.edu
+        # LIBRARY_MANAGE = Role One
+        #                  Role One aassociations: test(a)bx.psu.edu, test1(a)bx.psu.edu, test3(a)bx.psu.edu        
+        self.ldda_edit_info( 'library',
+                             self.security.encode_id( library_one.id ),
+                             self.security.encode_id( folder_one.id ),
+                             self.security.encode_id( ldda_two.id ),
+                             ldda_two.name,
+                             check_str1='2.bed',
+                             check_str2='This is the latest version of this library dataset',
+                             check_str3='Edit attributes of 2.bed' )
+        self.act_on_multiple_datasets( 'library',
+                                       self.security.encode_id( library_one.id ),
+                                       'import_to_history',
+                                       ldda_ids=self.security.encode_id( ldda_two.id ),
+                                       check_str1='1 dataset(s) have been imported into your history' )
+        self.logout()
+        # regular_user2 should not be able to see 2.bed
+        self.login( email = 'test2(a)bx.psu.edu' )
+        self.browse_library( 'library',
+                             self.security.encode_id( library_one.id ),
+                             not_displayed=folder_one.name,
+                             not_displayed2='2.bed' )
+        
+        self.logout()
+        # regular_user3 should not be able to see folder_one ( even though it does not contain any datasets that she
+        # can access ) since she has Role One, and Role One has all library permissions ( see above ).
+        self.login( email = 'test3(a)bx.psu.edu' )
+        self.browse_library( 'library',
+                             self.security.encode_id( library_one.id ),
+                             check_str1=folder_one.name,
+                             not_displayed='2.bed' )
+        self.logout()
+        self.login( email='test(a)bx.psu.edu' )
+    def test_045_upload_directory_of_files_from_admin_view( self ):
+        """Testing uploading a directory of files to a root folder from the Admin view"""
+        # logged in as admin_user
+        message = 'This is a test for uploading a directory of files'
+        check_str_after_submit="Added 3 datasets to the library '%s' (each is selected)." % library_one.root_folder.name
+        self.upload_directory_of_files( 'library_admin',
+                                        self.security.encode_id( library_one.id ),
+                                        self.security.encode_id( library_one.root_folder.id ),
+                                        server_dir='library',
+                                        message=message,
+                                        check_str_after_submit=check_str_after_submit )
+        self.browse_library( 'library_admin',
+                             self.security.encode_id( library_one.id ),
+                             check_str1=admin_user.email,
+                             check_str2=message )
+    def test_050_change_permissions_on_datasets_uploaded_from_library_dir( self ):
+        """Testing changing the permissions on datasets uploaded from a directory from the Admin view"""
+        # logged in as admin_user
+        # It would be nice if twill functioned such that the above test resulted in a
+        # form with the uploaded datasets selected, but it does not ( they're not checked ),
+        # so we'll have to simulate this behavior ( not ideal ) for the 'edit' action.  We
+        # first need to get the ldda.id for the 3 new datasets
+        latest_3_lddas = get_latest_lddas( 3 )
+        ldda_ids = ''
+        for ldda in latest_3_lddas:
+            ldda_ids += '%s,' % self.security.encode_id( ldda.id )
+        ldda_ids = ldda_ids.rstrip( ',' )
+        # Set permissions
+        self.ldda_permissions( 'library_admin',
+                               self.security.encode_id( library_one.id ),
+                               self.security.encode_id( folder_one.id ),
+                               ldda_ids,
+                               str( role_one.id ),
+                               permissions_in=[ 'DATASET_ACCESS', 'LIBRARY_MANAGE' ],
+                               check_str1='Permissions have been updated on 3 datasets' )
+        # Make sure the permissions have been correctly updated for the 3 datasets.  Permissions should 
+        # be all of the above on any of the 3 datasets that are imported into a history.
+        def check_edit_page( lddas, check_str1='', check_str2='', check_str3='', check_str4='',
+                             not_displayed1='', not_displayed2='', not_displayed3='' ):
+            for ldda in lddas:
+                # Import each library dataset into our history
+                self.act_on_multiple_datasets( 'library',
+                                               self.security.encode_id( library_one.id ),
+                                               'import_to_history',
+                                               ldda_ids=self.security.encode_id( ldda.id ) )
+                # Determine the new HistoryDatasetAssociation id created when the library dataset was imported into our history
+                last_hda_created = get_latest_hda()            
+                self.edit_hda_attribute_info( str( last_hda_created.id ),
+                                              check_str1=check_str1,
+                                              check_str2=check_str2,
+                                              check_str3=check_str3,
+                                              check_str4=check_str4 )
+        # admin_user is associated with role_one, so should have all permissions on imported datasets
+        check_edit_page( latest_3_lddas,
+                         check_str1='Manage dataset permissions on',
+                         check_str2='Role members can manage the roles associated with permissions on this dataset',
+                         check_str3='Role members can import this dataset into their history for analysis' )
+        self.logout()
+        # regular_user1 is associated with role_one, so should have all permissions on imported datasets
+        self.login( email='test1(a)bx.psu.edu' )
+        check_edit_page( latest_3_lddas )
+        self.logout()
+        # Since regular_user2 is not associated with role_one, she should not have
+        # access to any of the 3 datasets, so she will not see folder_one on the libraries page
+        self.login( email='test2(a)bx.psu.edu' )        
+        self.browse_library( 'library',
+                             self.security.encode_id( library_one.id ),
+                             not_displayed=folder_one.name )
+        self.logout()
+        # regular_user3 is associated with role_one, so should have all permissions on imported datasets
+        self.login( email='test3(a)bx.psu.edu' )
+        check_edit_page( latest_3_lddas )
+        self.logout()
+        self.login( email='test(a)bx.psu.edu' )
+        # Change the permissions and test again
+        self.ldda_permissions( 'library_admin',
+                               self.security.encode_id( library_one.id ),
+                               self.security.encode_id( folder_one.id ),
+                               ldda_ids,
+                               str( role_one.id ),
+                               permissions_in=[ 'DATASET_ACCESS' ],
+                               check_str1='Permissions have been updated on 3 datasets' )
+        check_edit_page( latest_3_lddas,
+                         check_str1='View Permissions',
+                         not_displayed1='Manage dataset permissions on',
+                         not_displayed2='Role members can manage roles associated with permissions on this library item',
+                         not_displayed3='Role members can import this dataset into their history for analysis' )
+    def test_055_library_permissions( self ):
+        """Test library permissions"""
+        # Logged in as admin_user
+        form_name = 'Library template Form One'
+        form_desc = 'This is Form One'
+        form_type = galaxy.model.FormDefinition.types.LIBRARY_INFO_TEMPLATE
+        # Create form for library template
+        self.create_form( name=form_name, desc=form_desc, formtype=form_type )
+        global form_one
+        form_one = get_form( form_name )
+        assert form_one is not None, 'Problem retrieving form named (%s) from the database' % form_name
+        # Make sure the template fields are displayed on the library information page
+        field_dict = form_one.fields[ 0 ]
+        global form_one_field_label
+        form_one_field_label = '%s' % str( field_dict.get( 'label', 'Field 0' ) )
+        global form_one_field_help
+        form_one_field_help = '%s' % str( field_dict.get( 'helptext', 'Field 0 help' ) )
+        global form_one_field_required
+        form_one_field_required = '%s' % str( field_dict.get( 'required', 'optional' ) ).capitalize()
+        # Add information to the library using the template
+        global form_one_field_name
+        form_one_field_name = 'field_0'
+        # Create a library, adding no template
+        name = "library security Library Two"
+        description = "library security This is Library Two"
+        synopsis = "library security Library Two synopsis"
+        self.create_library( name=name, description=description, synopsis=synopsis )
+        self.browse_libraries_admin( check_str1=name, check_str2=description )
+        global library_two
+        library_two = get_library( name, description, synopsis )
+        assert library_two is not None, 'Problem retrieving library named "%s" from the database' % name
+        # Set library permissions for regular_user1 and regular_user2.  Each of these users will be permitted to
+        # LIBRARY_ADD, LIBRARY_MODIFY, LIBRARY_MANAGE for library items.
+        permissions_in = [ k for k, v in galaxy.model.Library.permitted_actions.items() ]
+        permissions_out = []
+        role_ids_str = '%s,%s' % ( str( regular_user1_private_role.id ), str( regular_user2_private_role.id ) )
+        self.library_permissions( self.security.encode_id( library_two.id ),
+                                  library_two.name,
+                                  role_ids_str,
+                                  permissions_in,
+                                  permissions_out )
+        self.logout()
+        # Login as regular_user1 and make sure they can see the library
+        self.login( email=regular_user1.email )
+        self.browse_libraries_regular_user( check_str1=name )
+        self.logout()
+        # Login as regular_user2 and make sure they can see the library
+        self.login( email=regular_user2.email )
+        self.browse_libraries_regular_user( check_str1=name )
+        # Add a dataset to the library
+        message = 'Testing adding 1.bed to Library Two root folder'
+        self.add_library_dataset( 'library',
+                                  '1.bed',
+                                  self.security.encode_id( library_two.id ),
+                                  self.security.encode_id( library_two.root_folder.id ),
+                                  library_two.root_folder.name,
+                                  file_type='bed',
+                                  dbkey='hg18',
+                                  message=message,
+                                  root=True )
+        # Add a folder to the library
+        name = "Root Folder's Folder X"
+        description = "This is the root folder's Folder X"
+        self.add_folder( 'library',
+                         self.security.encode_id( library_two.id ),
+                         self.security.encode_id( library_two.root_folder.id ), 
+                         name=name,
+                         description=description )
+        global folder_x
+        folder_x = get_folder( library_two.root_folder.id, name, description )
+        # Add an information template to the folder
+        template_name = 'Folder Template 1'
+        self.add_library_template( 'library',
+                                   'folder',
+                                   self.security.encode_id( library_one.id ),
+                                   self.security.encode_id( form_one.id ),
+                                   form_one.name,
+                                   folder_id=self.security.encode_id( folder_x.id ) )
+        # Modify the folder's information
+        contents = '%s folder contents' % form_one_field_label
+        new_name = "Root Folder's Folder Y"
+        new_description = "This is the root folder's Folder Y"
+        self.folder_info( 'library',
+                          self.security.encode_id( folder_x.id ),
+                          self.security.encode_id( library_two.id ),
+                          name,
+                          new_name,
+                          new_description,
+                          contents=contents,
+                          field_name=form_one_field_name )
+        # Twill barfs when self.check_page_for_string() is called after dealing with an information template,
+        # the exception is: TypeError: 'str' object is not callable
+        # the work-around it to end this method so any calls are in the next method.
+    def test_060_template_features_and_permissions( self ):
+        """Test library template and more permissions behavior from the Data Libraries view"""
+        # Logged in as regular_user2
+        refresh( folder_x )
+        # Add a dataset to the folder
+        message = 'Testing adding 2.bed to Library Three root folder'
+        self.add_library_dataset( 'library',
+                                  '2.bed',
+                                  self.security.encode_id( library_two.id ),
+                                  self.security.encode_id( folder_x.id ),
+                                  folder_x.name,
+                                  file_type='bed',
+                                  dbkey='hg18',
+                                  message=message.replace( ' ', '+' ),
+                                  root=False )
+        global ldda_x
+        ldda_x = get_latest_ldda()
+        assert ldda_x is not None, 'Problem retrieving ldda_x from the database'
+        # Add an information template to the library
+        template_name = 'Library Template 3'
+        self.add_library_template( 'library',
+                                   'library',
+                                   self.security.encode_id( library_two.id ),
+                                   self.security.encode_id( form_one.id ),
+                                   form_one.name )
+        # Add information to the library using the template
+        contents = '%s library contents' % form_one_field_label
+        self.visit_url( '%s/library_common/library_info?cntrller=library&id=%s' % ( self.url, self.security.encode_id( library_two.id ) ) )
+        # There are 2 forms on this page and the template is the 2nd form
+        tc.fv( '2', form_one_field_name, contents )
+        tc.submit( 'edit_info_button' )
+        # For some reason, the following check:
+        # self.check_page_for_string ( 'The information has been updated.' )
+        # ...throws the following exception - I have not idea why!
+        # TypeError: 'str' object is not callable
+        # The work-around is to not make ANY self.check_page_for_string() calls until the next method
+    def test_065_permissions_as_different_regular_user( self ):
+        """Test library template and more permissions behavior from the Data Libraries view as a different user"""
+        # Logged in as regular_user2
+        self.logout()
+        self.login( email=regular_user1.email )
+        self.browse_library( 'library',
+                             self.security.encode_id( library_two.id ),
+                             check_str1=ldda_x.name )
+    def test_999_reset_data_for_later_test_runs( self ):
+        """Reseting data to enable later test runs to pass"""
+        # Logged in as regular_user1
+        self.logout()
+        self.login( email=admin_user.email )
+        ##################
+        # Purge all libraries
+        ##################
+        for library in [ library_one, library_two ]:
+            self.delete_library_item( self.security.encode_id( library.id ),
+                                      self.security.encode_id( library.id ),
+                                      library.name,
+                                      item_type='library' )
+            self.purge_library( self.security.encode_id( library.id ), library.name )
+        ##################
+        # Eliminate all non-private roles
+        ##################
+        for role in [ role_one, role_two ]:
+            self.mark_role_deleted( self.security.encode_id( role.id ), role.name )
+            self.purge_role( self.security.encode_id( role.id ), role.name )
+            # Manually delete the role from the database
+            refresh( role )
+            sa_session.delete( role )
+            sa_session.flush()
+        ##################
+        # Eliminate all groups
+        ##################
+        for group in [ group_one ]:
+            self.mark_group_deleted( self.security.encode_id( group.id ), group.name )
+            self.purge_group( self.security.encode_id( group.id ), group.name )
+            # Manually delete the group from the database
+            refresh( group )
+            sa_session.delete( group )
+            sa_session.flush()
+        ##################
+        # Make sure all users are associated only with their private roles
+        ##################
+        for user in [ admin_user, regular_user1, regular_user2, regular_user3 ]:
+            refresh( user )
+            if len( user.roles) != 1:
+                raise AssertionError( '%d UserRoleAssociations are associated with %s ( should be 1 )' % ( len( user.roles ), user.email ) )
diff -r e39c9a2a0b4c -r 48e83411aa91 test/functional/test_security_and_libraries.py
--- a/test/functional/test_security_and_libraries.py	Fri Mar 12 14:27:04 2010 -0500
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,2141 +0,0 @@
-import galaxy.model
-from galaxy.model.orm import *
-from galaxy.model.mapping import context as sa_session
-from base.twilltestcase import *
-
-not_logged_in_security_msg = 'You must be logged in as an administrator to access this feature.'
-logged_in_security_msg = 'You must be an administrator to access this feature.'
-
-import sys
-class TestSecurityAndLibraries( TwillTestCase ):
-    def test_000_admin_features_when_not_logged_in( self ):
-        """Testing admin_features when not logged in"""
-        self.logout()
-        self.visit_url( "%s/admin" % self.url )
-        self.check_page_for_string( not_logged_in_security_msg )
-        self.visit_url( "%s/admin/reload_tool?tool_id=upload1" % self.url )
-        self.check_page_for_string( not_logged_in_security_msg )
-        self.visit_url( "%s/admin/roles" % self.url )
-        self.check_page_for_string( not_logged_in_security_msg )
-        self.visit_url( "%s/admin/create_role" % self.url )
-        self.check_page_for_string( not_logged_in_security_msg )
-        self.visit_url( "%s/admin/create_role" % self.url )
-        self.check_page_for_string( not_logged_in_security_msg )
-        self.visit_url( "%s/admin/manage_users_and_groups_for_role" % self.url )
-        self.check_page_for_string( not_logged_in_security_msg )
-        self.visit_url( "%s/admin/groups" % self.url )
-        self.check_page_for_string( not_logged_in_security_msg )
-        self.visit_url( "%s/admin/create_group" % self.url )
-        self.check_page_for_string( not_logged_in_security_msg )
-        self.check_page_for_string( not_logged_in_security_msg )
-        self.visit_url( "%s/admin/users" % self.url )
-        self.check_page_for_string( not_logged_in_security_msg )
-    def test_005_login_as_admin_user( self ):
-        """Testing logging in as an admin user test(a)bx.psu.edu - tests initial settings for DefaultUserPermissions and DefaultHistoryPermissions"""
-        self.login( email='test(a)bx.psu.edu' ) # test(a)bx.psu.edu is configured as our admin user
-        self.visit_page( "admin" )
-        self.check_page_for_string( 'Administration' )
-        global admin_user
-        admin_user = sa_session.query( galaxy.model.User ) \
-                               .filter( galaxy.model.User.table.c.email=='test(a)bx.psu.edu' ) \
-                               .first()
-        assert admin_user is not None, 'Problem retrieving user with email "test(a)bx.psu.edu" from the database'
-        # Get the admin user's private role for later use
-        global admin_user_private_role
-        admin_user_private_role = None
-        for role in admin_user.all_roles():
-            if role.name == admin_user.email and role.description == 'Private Role for %s' % admin_user.email:
-                admin_user_private_role = role
-                break
-        if not admin_user_private_role:
-            raise AssertionError( "Private role not found for user '%s'" % admin_user.email )
-        # Make sure DefaultUserPermissions are correct
-        if len( admin_user.default_permissions ) > 1:
-            raise AssertionError( '%d DefaultUserPermissions associated with user %s ( should be 1 )' \
-                                  % ( len( admin_user.default_permissions ), admin_user.email ) )
-        dup = sa_session.query( galaxy.model.DefaultUserPermissions ) \
-                         .filter( galaxy.model.DefaultUserPermissions.table.c.user_id==admin_user.id ) \
-                         .first()
-        if not dup.action == galaxy.model.Dataset.permitted_actions.DATASET_MANAGE_PERMISSIONS.action:
-            raise AssertionError( 'The DefaultUserPermission.action for user "%s" is "%s", but it should be "%s"' \
-                                  % ( admin_user.email, dup.action, galaxy.model.Dataset.permitted_actions.DATASET_MANAGE_PERMISSIONS.action ) )
-        # Make sure DefaultHistoryPermissions are correct
-        # Logged in as admin_user
-        latest_history = sa_session.query( galaxy.model.History ) \
-                                   .filter( and_( galaxy.model.History.table.c.deleted==False,
-                                                  galaxy.model.History.table.c.user_id==admin_user.id ) ) \
-                                   .order_by( desc( galaxy.model.History.table.c.create_time ) ) \
-                                   .first()
-        if len( latest_history.default_permissions ) > 1:
-            raise AssertionError( '%d DefaultHistoryPermissions were created for history id %d when it was created ( should have been 1 )' \
-                                  % ( len( latest_history.default_permissions ), latest_history.id ) )
-        dhp = sa_session.query( galaxy.model.DefaultHistoryPermissions ) \
-                        .filter( galaxy.model.DefaultHistoryPermissions.table.c.history_id==latest_history.id ) \
-                        .first()
-        if not dhp.action == galaxy.model.Dataset.permitted_actions.DATASET_MANAGE_PERMISSIONS.action:
-            raise AssertionError( 'The DefaultHistoryPermission.action for history id %d is "%s", but it should be "%s"' \
-                                  % ( latest_history.id, dhp.action, galaxy.model.Dataset.permitted_actions.DATASET_MANAGE_PERMISSIONS.action ) )
-        self.home()
-        self.visit_url( "%s/admin/manage_roles_and_groups_for_user?id=%s" % ( self.url, self.security.encode_id( admin_user.id ) ) )
-        self.check_page_for_string( admin_user.email )
-        # Try deleting the admin_user's private role
-        check_str = "You cannot eliminate a user's private role association."
-        self.associate_roles_and_groups_with_user( self.security.encode_id( admin_user.id ), admin_user.email,
-                                                   out_role_ids=str( admin_user_private_role.id ),
-                                                   check_str=check_str )
-        self.logout()
-    def test_010_login_as_regular_user1( self ):
-        """Testing logging in as regular user test1(a)bx.psu.edu - tests private role creation and changing DefaultHistoryPermissions for new histories"""
-        # Some of the history related tests here are similar to some tests in the
-        # test_history_functions.py script, so we could potentially eliminate 1 or 2 of them.
-        self.login( email='test1(a)bx.psu.edu' ) # test1(a)bx.psu.edu is not an admin user
-        global regular_user1
-        regular_user1 = sa_session.query( galaxy.model.User ) \
-                                  .filter( galaxy.model.User.table.c.email=='test1(a)bx.psu.edu' ) \
-                                  .first()
-        assert regular_user1 is not None, 'Problem retrieving user with email "test1(a)bx.psu.edu" from the database'
-        self.visit_page( "admin" )
-        self.check_page_for_string( logged_in_security_msg )
-        # Make sure a private role exists for regular_user1
-        private_role = None
-        for role in regular_user1.all_roles():
-            if role.name == regular_user1.email and role.description == 'Private Role for %s' % regular_user1.email:
-                private_role = role
-                break
-        if not private_role:
-            raise AssertionError( "Private role not found for user '%s'" % regular_user1.email )
-        global regular_user1_private_role
-        regular_user1_private_role = private_role
-        # Add a dataset to the history
-        self.upload_file( '1.bed' )
-        latest_dataset = sa_session.query( galaxy.model.Dataset ) \
-                                   .order_by( desc( galaxy.model.Dataset.table.c.create_time ) ) \
-                                   .first()
-        # Make sure DatasetPermissions is correct - default is 'manage permissions'
-        if len( latest_dataset.actions ) > 1:
-            actions = [ a.action for a in latest_dataset.actions ]
-            raise AssertionError( '%d DatasetPermissions (%s) were created for dataset id %d when it was created ( should have been 1 )' \
-                                  % ( len( latest_dataset.actions ), str( actions ), latest_dataset.id ) )
-        dp = sa_session.query( galaxy.model.DatasetPermissions ) \
-                       .filter( galaxy.model.DatasetPermissions.table.c.dataset_id==latest_dataset.id ) \
-                       .first()
-        if not dp.action:
-            raise AssertionError( 'The Dataset id %d has no associated DatasetPermissions when is should have "manage permissions".' \
-                                  % latest_dataset.id )
-        elif not dp.action == galaxy.model.Dataset.permitted_actions.DATASET_MANAGE_PERMISSIONS.action:
-            raise AssertionError( 'The DatasetPermissions.action for dataset id %d is "%s", but it should be "manage permissions"' \
-                                  % ( latest_dataset.id, dp.action ) )
-        # Change DefaultHistoryPermissions for regular_user1
-        permissions_in = []
-        actions_in = []
-        for key, value in galaxy.model.Dataset.permitted_actions.items():
-            # NOTE: setting the 'access' permission with the private role makes this dataset private
-            permissions_in.append( key )
-            actions_in.append( value.action )
-        # Sort actions for later comparison
-        actions_in.sort()
-        role_id = str( private_role.id )
-        self.user_set_default_permissions( permissions_in=permissions_in, role_id=role_id )
-        # Make sure the default permissions are changed for new histories
-        self.new_history()
-        # logged in as regular_user1
-        latest_history = sa_session.query( galaxy.model.History ) \
-                                   .filter( and_( galaxy.model.History.table.c.deleted==False,
-                                                  galaxy.model.History.table.c.user_id==regular_user1.id ) ) \
-                                   .order_by( desc( galaxy.model.History.table.c.create_time ) ) \
-                                   .first()
-        if len( latest_history.default_permissions ) != len( galaxy.model.Dataset.permitted_actions.items() ):
-            raise AssertionError( '%d DefaultHistoryPermissions were created for history id %d, should have been %d' % \
-                                  ( len( latest_history.default_permissions ), latest_history.id, len( galaxy.model.Dataset.permitted_actions.items() ) ) )
-        dhps = []
-        for dhp in latest_history.default_permissions:
-            dhps.append( dhp.action )
-        # Sort permissions for later comparison
-        dhps.sort()
-        for key, value in galaxy.model.Dataset.permitted_actions.items():
-            if value.action not in dhps:
-                raise AssertionError( '%s not in history id %d default_permissions after they were changed' % ( value.action, latest_history.id ) )
-        # Add a dataset to the history
-        self.upload_file( '1.bed' )
-        latest_dataset = sa_session.query( galaxy.model.Dataset ).order_by( desc( galaxy.model.Dataset.table.c.create_time ) ).first()
-        # Make sure DatasetPermissionss are correct
-        if len( latest_dataset.actions ) != len( latest_history.default_permissions ):
-            raise AssertionError( '%d DatasetPermissionss were created for dataset id %d when it was created ( should have been %d )' % \
-                                  ( len( latest_dataset.actions ), latest_dataset.id, len( latest_history.default_permissions ) ) )
-        dps = []
-        for dp in latest_dataset.actions:
-            dps.append( dp.action )
-        # Sort actions for later comparison
-        dps.sort()
-        # Compare DatasetPermissions with permissions_in - should be the same
-        if dps != actions_in:
-            raise AssertionError( 'DatasetPermissionss "%s" for dataset id %d differ from changed default permissions "%s"' \
-                                      % ( str( dps ), latest_dataset.id, str( actions_in ) ) )
-        # Compare DefaultHistoryPermissions and DatasetPermissionss - should be the same
-        if dps != dhps:
-                raise AssertionError( 'DatasetPermissionss "%s" for dataset id %d differ from DefaultHistoryPermissions "%s" for history id %d' \
-                                      % ( str( dps ), latest_dataset.id, str( dhps ), latest_history.id ) )
-        self.logout()
-
-    def test_015_login_as_regular_user2( self ):
-        """Testing logging in as regular user test2(a)bx.psu.edu - tests changing DefaultHistoryPermissions for the current history"""
-        email = 'test2(a)bx.psu.edu'
-        self.login( email=email ) # This will not be an admin user
-        global regular_user2
-        regular_user2 = sa_session.query( galaxy.model.User ) \
-                                  .filter( galaxy.model.User.table.c.email==email ) \
-                                  .first()
-        assert regular_user2 is not None, 'Problem retrieving user with email "" from the database' % email
-        # Logged in as regular_user2
-        latest_history = sa_session.query( galaxy.model.History ) \
-                                   .filter( and_( galaxy.model.History.table.c.deleted==False,
-                                                  galaxy.model.History.table.c.user_id==regular_user2.id ) ) \
-                                   .order_by( desc( galaxy.model.History.table.c.create_time ) ) \
-                                   .first()
-        self.upload_file( '1.bed' )
-        latest_dataset = sa_session.query( galaxy.model.Dataset ).order_by( desc( galaxy.model.Dataset.table.c.create_time ) ).first()
-        permissions_in = [ 'DATASET_MANAGE_PERMISSIONS' ]
-        # Make sure these are in sorted order for later comparison
-        actions_in = [ 'manage permissions' ]
-        permissions_out = [ 'DATASET_ACCESS' ]
-        actions_out = [ 'access' ]
-        global regular_user2_private_role
-        regular_user2_private_role = None
-        for role in regular_user2.all_roles():
-            if role.name == regular_user2.email and role.description == 'Private Role for %s' % regular_user2.email:
-                regular_user2_private_role = role
-                break
-        if not regular_user2_private_role:
-            raise AssertionError( "Private role not found for user '%s'" % regular_user2.email )
-        role_id = str( regular_user2_private_role.id )
-        # Change DefaultHistoryPermissions for the current history
-        self.history_set_default_permissions( permissions_out=permissions_out, permissions_in=permissions_in, role_id=role_id )
-        if len( latest_history.default_permissions ) != len( actions_in ):
-            raise AssertionError( '%d DefaultHistoryPermissions were created for history id %d, should have been %d' \
-                                  % ( len( latest_history.default_permissions ), latest_history.id, len( permissions_in ) ) )
-        # Make sure DefaultHistoryPermissions were correctly changed for the current history
-        dhps = []
-        for dhp in latest_history.default_permissions:
-            dhps.append( dhp.action )
-        # Sort permissions for later comparison
-        dhps.sort()
-        # Compare DefaultHistoryPermissions and actions_in - should be the same
-        if dhps != actions_in:
-            raise AssertionError( 'DefaultHistoryPermissions "%s" for history id %d differ from actions "%s" passed for changing' \
-                                      % ( str( dhps ), latest_history.id, str( actions_in ) ) )
-        # Make sure DatasetPermissionss are correct
-        if len( latest_dataset.actions ) != len( latest_history.default_permissions ):
-            raise AssertionError( '%d DatasetPermissionss were created for dataset id %d when it was created ( should have been %d )' \
-                                  % ( len( latest_dataset.actions ), latest_dataset.id, len( latest_history.default_permissions ) ) )
-        dps = []
-        for dp in latest_dataset.actions:
-            dps.append( dp.action )
-        # Sort actions for comparison
-        dps.sort()
-        # Compare DatasetPermissionss and DefaultHistoryPermissions - should be the same
-        if dps != dhps:
-            raise AssertionError( 'DatasetPermissionss "%s" for dataset id %d differ from DefaultHistoryPermissions "%s"' \
-                                      % ( str( dps ), latest_dataset.id, str( dhps ) ) )
-        self.logout()
-    def test_020_create_new_user_account_as_admin( self ):
-        """Testing creating a new user account as admin"""
-        self.login( email=admin_user.email )
-        email = 'test3(a)bx.psu.edu'
-        password = 'testuser'
-        previously_created = self.create_new_account_as_admin( email=email, password=password )
-        # Get the user object for later tests
-        global regular_user3
-        regular_user3 = sa_session.query( galaxy.model.User ).filter( galaxy.model.User.table.c.email==email ).first()
-        assert regular_user3 is not None, 'Problem retrieving user with email "%s" from the database' % email
-        global regular_user3_private_role
-        regular_user3_private_role = None
-        for role in regular_user3.all_roles():
-            if role.name == regular_user3.email and role.description == 'Private Role for %s' % regular_user3.email:
-                regular_user3_private_role = role
-                break
-        if not regular_user3_private_role:
-            raise AssertionError( "Private role not found for user '%s'" % regular_user3.email )
-        # Make sure DefaultUserPermissions were created
-        if not regular_user3.default_permissions:
-            raise AssertionError( 'No DefaultUserPermissions were created for user %s when the admin created the account' % email )
-        # Make sure a private role was created for the user
-        if not regular_user3.roles:
-            raise AssertionError( 'No UserRoleAssociations were created for user %s when the admin created the account' % email )
-        if not previously_created and len( regular_user3.roles ) != 1:
-            raise AssertionError( '%d UserRoleAssociations were created for user %s when the admin created the account ( should have been 1 )' \
-                                  % ( len( regular_user3.roles ), regular_user3.email ) )
-        for ura in regular_user3.roles:
-            role = sa_session.query( galaxy.model.Role ).get( ura.role_id )
-            if not previously_created and role.type != 'private':
-                raise AssertionError( 'Role created for user %s when the admin created the account is not private, type is' \
-                                      % str( role.type ) )
-        if not previously_created:
-            # Make sure a history was not created ( previous test runs may have left deleted histories )
-            histories = sa_session.query( galaxy.model.History ) \
-                                  .filter( and_( galaxy.model.History.table.c.user_id==regular_user3.id,
-                                           galaxy.model.History.table.c.deleted==False ) ) \
-                                  .all()
-            if histories:
-                raise AssertionError( 'Histories were incorrectly created for user %s when the admin created the account' % email )
-            # Make sure the user was not associated with any groups
-            if regular_user3.groups:
-                raise AssertionError( 'Groups were incorrectly associated with user %s when the admin created the account' % email )
-    def test_025_reset_password_as_admin( self ):
-        """Testing reseting a user password as admin"""
-        email = 'test3(a)bx.psu.edu'
-        self.reset_password_as_admin( user_id=self.security.encode_id( regular_user3.id ), password='testreset' )
-        self.logout()
-    def test_030_login_after_password_reset( self ):
-        """Testing logging in after an admin reset a password - tests DefaultHistoryPermissions for accounts created by an admin"""
-        self.login( email='test3(a)bx.psu.edu', password='testreset' )
-        # Make sure a History and HistoryDefaultPermissions exist for the user
-        # Logged in as regular_user3
-        latest_history = sa_session.query( galaxy.model.History ) \
-                                   .filter( and_( galaxy.model.History.table.c.deleted==False,
-                                                  galaxy.model.History.table.c.user_id==regular_user3.id ) ) \
-                                   .order_by( desc( galaxy.model.History.table.c.create_time ) ) \
-                                   .first()
-        if not latest_history.user_id == regular_user3.id:
-            raise AssertionError( 'A history was not created for user %s when he logged in' % email )
-        if not latest_history.default_permissions:
-            raise AssertionError( 'No DefaultHistoryPermissions were created for history id %d when it was created' % latest_history.id )
-        if len( latest_history.default_permissions ) > 1:
-            raise AssertionError( 'More than 1 DefaultHistoryPermissions were created for history id %d when it was created' % latest_history.id )
-        dhp =  sa_session.query( galaxy.model.DefaultHistoryPermissions ) \
-                         .filter( galaxy.model.DefaultHistoryPermissions.table.c.history_id==latest_history.id ) \
-                         .first()
-        if not dhp.action == galaxy.model.Dataset.permitted_actions.DATASET_MANAGE_PERMISSIONS.action:
-            raise AssertionError( 'The DefaultHistoryPermission.action for history id %d is "%s", but it should be "manage permissions"' \
-                                  % ( latest_history.id, dhp.action ) )
-        # Upload a file to create a HistoryDatasetAssociation
-        self.upload_file( '1.bed' )
-        latest_dataset = sa_session.query( galaxy.model.Dataset ).order_by( desc( galaxy.model.Dataset.table.c.create_time ) ).first()
-        for dp in latest_dataset.actions:
-            # Should only have 1 DatasetPermissions
-            if dp.action != galaxy.model.Dataset.permitted_actions.DATASET_MANAGE_PERMISSIONS.action:
-                raise AssertionError( 'The DatasetPermissions for dataset id %d is %s ( should have been %s )' \
-                                      % ( latest_dataset.id,
-                                          latest_dataset.actions.action, 
-                                          galaxy.model.Dataset.permitted_actions.DATASET_MANAGE_PERMISSIONS.action ) )
-        self.logout()
-        # Reset the password to the default for later tests
-        self.login( email='test(a)bx.psu.edu' )
-        self.reset_password_as_admin( user_id=self.security.encode_id( regular_user3.id ), password='testuser' )
-    def test_035_mark_user_deleted( self ):
-        """Testing marking a user account as deleted"""
-        # Logged in as admin_user
-        self.mark_user_deleted( user_id=self.security.encode_id( regular_user3.id ), email=regular_user3.email )
-        # Deleting a user should not delete any associations
-        sa_session.refresh( regular_user3 )
-        if not regular_user3.active_histories:
-            raise AssertionError( 'HistoryDatasetAssociations for regular_user3 were incorrectly deleted when the user was marked deleted' )
-    def test_040_undelete_user( self ):
-        """Testing undeleting a user account"""
-        # Logged in as admin_user
-        self.undelete_user( user_id=self.security.encode_id( regular_user3.id ), email=regular_user3.email )
-    def test_045_create_role( self ):
-        """Testing creating new role with 3 members ( and a new group named the same ), then renaming the role"""
-        # Logged in as admin_user
-        name = 'Role One'
-        description = "This is Role Ones description"
-        user_ids=[ str( admin_user.id ), str( regular_user1.id ), str( regular_user3.id ) ]
-        self.create_role( name=name,
-                          description=description,
-                          in_user_ids=user_ids,
-                          in_group_ids=[],
-                          create_group_for_role='yes',
-                          private_role=admin_user.email )
-        # Get the role object for later tests
-        global role_one
-        role_one = sa_session.query( galaxy.model.Role ).filter( galaxy.model.Role.table.c.name==name ).first()
-        assert role_one is not None, 'Problem retrieving role named "Role One" from the database'
-        # Make sure UserRoleAssociations are correct
-        if len( role_one.users ) != len( user_ids ):
-            raise AssertionError( '%d UserRoleAssociations were created for role id %d when it was created ( should have been %d )' \
-                                  % ( len( role_one.users ), role_one.id, len( user_ids ) ) )
-        # Each of the following users should now have 2 role associations, their private role and role_one
-        for user in [ admin_user, regular_user1, regular_user3 ]:
-            sa_session.refresh( user )
-            if len( user.roles ) != 2:
-                raise AssertionError( '%d UserRoleAssociations are associated with user %s ( should be 2 )' \
-                                      % ( len( user.roles ), user.email ) )
-        # Make sure the group was created
-        self.home()
-        self.visit_page( 'admin/groups' )
-        self.check_page_for_string( name )
-        global group_zero
-        group_zero = sa_session.query( galaxy.model.Group ).filter( galaxy.model.Group.table.c.name==name ).first()
-        # Rename the role
-        rename = "Role One's been Renamed"
-        new_description="This is Role One's Re-described"
-        self.rename_role( self.security.encode_id( role_one.id ), name=rename, description=new_description )
-        self.home()
-        self.visit_page( 'admin/roles' )
-        self.check_page_for_string( rename )
-        self.check_page_for_string( new_description )
-        # Reset the role back to the original name and description
-        self.rename_role( self.security.encode_id( role_one.id ), name=name, description=description )
-    def test_050_create_group( self ):
-        """Testing creating new group with 3 members and 1 associated role, then renaming it"""
-        # Logged in as admin_user
-        name = "Group One's Name"
-        user_ids=[ str( admin_user.id ), str( regular_user1.id ), str( regular_user3.id ) ]
-        role_ids=[ str( role_one.id ) ]
-        self.create_group( name=name, in_user_ids=user_ids, in_role_ids=role_ids )
-        # Get the group object for later tests
-        global group_one
-        group_one = sa_session.query( galaxy.model.Group ).filter( galaxy.model.Group.table.c.name==name ).first()
-        assert group_one is not None, 'Problem retrieving group named "Group One" from the database'
-        # Make sure UserGroupAssociations are correct
-        if len( group_one.users ) != len( user_ids ):
-            raise AssertionError( '%d UserGroupAssociations were created for group id %d when it was created ( should have been %d )' \
-                                  % ( len( group_one.users ), group_one.id, len( user_ids ) ) )
-        # Each user should now have 1 group association, group_one
-        for user in [ admin_user, regular_user1, regular_user3 ]:
-            sa_session.refresh( user )
-            if len( user.groups ) != 1:
-                raise AssertionError( '%d UserGroupAssociations are associated with user %s ( should be 1 )' % ( len( user.groups ), user.email ) )
-        # Make sure GroupRoleAssociations are correct
-        if len( group_one.roles ) != len( role_ids ):
-            raise AssertionError( '%d GroupRoleAssociations were created for group id %d when it was created ( should have been %d )' \
-                                  % ( len( group_one.roles ), group_one.id, len( role_ids ) ) )
-        # Rename the group
-        rename = "Group One's been Renamed"
-        self.rename_group( self.security.encode_id( group_one.id ), name=rename, )
-        self.home()
-        self.visit_page( 'admin/groups' )
-        self.check_page_for_string( rename )
-        # Reset the group back to the original name
-        self.rename_group( self.security.encode_id( group_one.id ), name=name )
-    def test_055_add_members_and_role_to_group( self ):
-        """Testing editing user membership and role associations of an existing group"""
-        # Logged in as admin_user
-        name = 'Group Two'
-        self.create_group( name=name, in_user_ids=[], in_role_ids=[] )
-        # Get the group object for later tests
-        global group_two
-        group_two = sa_session.query( galaxy.model.Group ).filter( galaxy.model.Group.table.c.name==name ).first()
-        assert group_two is not None, 'Problem retrieving group named "Group Two" from the database'
-        # group_two should have no associations
-        if group_two.users:
-            raise AssertionError( '%d UserGroupAssociations were created for group id %d when it was created ( should have been 0 )' \
-                              % ( len( group_two.users ), group_two.id ) )
-        if group_two.roles:
-            raise AssertionError( '%d GroupRoleAssociations were created for group id %d when it was created ( should have been 0 )' \
-                              % ( len( group_two.roles ), group_two.id ) )
-        user_ids = [ str( regular_user1.id )  ]
-        role_ids = [ str( role_one.id ) ]
-        self.associate_users_and_roles_with_group( self.security.encode_id( group_two.id ),
-                                                   group_two.name,
-                                                   user_ids=user_ids,
-                                                   role_ids=role_ids )
-    def test_060_create_role_with_user_and_group_associations( self ):
-        """Testing creating a role with user and group associations"""
-        # Logged in as admin_user
-        # NOTE: To get this to work with twill, all select lists on the ~/admin/role page must contain at least
-        # 1 option value or twill throws an exception, which is: ParseError: OPTION outside of SELECT
-        # Due to this bug in twill, we create the role, we bypass the page and visit the URL in the
-        # associate_users_and_groups_with_role() method.
-        name = 'Role Two'
-        description = 'This is Role Two'
-        user_ids=[ str( admin_user.id ) ]
-        group_ids=[ str( group_two.id ) ]
-        private_role=admin_user.email
-        # Create the role
-        self.create_role( name=name,
-                          description=description,
-                          in_user_ids=user_ids,
-                          in_group_ids=group_ids,
-                          private_role=private_role )
-        # Get the role object for later tests
-        global role_two
-        role_two = sa_session.query( galaxy.model.Role ).filter( galaxy.model.Role.table.c.name==name ).first()
-        assert role_two is not None, 'Problem retrieving role named "Role Two" from the database'
-        # Make sure UserRoleAssociations are correct
-        if len( role_two.users ) != len( user_ids ):
-            raise AssertionError( '%d UserRoleAssociations were created for role id %d when it was created with %d members' \
-                                  % ( len( role_two.users ), role_two.id, len( user_ids ) ) )
-        # admin_user should now have 3 role associations, private role, role_one, role_two
-        sa_session.refresh( admin_user )
-        if len( admin_user.roles ) != 3:
-            raise AssertionError( '%d UserRoleAssociations are associated with user %s ( should be 3 )' % ( len( admin_user.roles ), admin_user.email ) )
-        # Make sure GroupRoleAssociations are correct
-        sa_session.refresh( role_two )
-        if len( role_two.groups ) != len( group_ids ):
-            raise AssertionError( '%d GroupRoleAssociations were created for role id %d when it was created ( should have been %d )' \
-                                  % ( len( role_two.groups ), role_two.id, len( group_ids ) ) )
-        # group_two should now be associated with 2 roles: role_one, role_two
-        sa_session.refresh( group_two )
-        if len( group_two.roles ) != 2:
-            raise AssertionError( '%d GroupRoleAssociations are associated with group id %d ( should be 2 )' % ( len( group_two.roles ), group_two.id ) )
-    def test_065_change_user_role_associations( self ):
-        """Testing changing roles associated with a user"""
-        # Logged in as admin_user
-        # Create a new role with no associations
-        name = 'Role Three'
-        description = 'This is Role Three'
-        user_ids=[]
-        group_ids=[]
-        private_role=admin_user.email
-        self.create_role( name=name,
-                          description=description,
-                          in_user_ids=user_ids,
-                          in_group_ids=group_ids,
-                          private_role=private_role )
-        # Get the role object for later tests
-        global role_three
-        role_three = sa_session.query( galaxy.model.Role ).filter( galaxy.model.Role.table.c.name==name ).first()
-        assert role_three is not None, 'Problem retrieving role named "Role Three" from the database'
-        # Associate the role with a user
-        sa_session.refresh( admin_user )
-        role_ids = []
-        for ura in admin_user.non_private_roles:
-            role_ids.append( str( ura.role_id ) )
-        role_ids.append( str( role_three.id ) )
-        group_ids = []
-        for uga in admin_user.groups:
-            group_ids.append( str( uga.group_id ) )
-        check_str = "User '%s' has been updated with %d associated roles and %d associated groups" % ( admin_user.email, len( role_ids ), len( group_ids ) )
-        self.associate_roles_and_groups_with_user( self.security.encode_id( admin_user.id ),
-                                                   str( admin_user.email ),
                    
                  
                  
                          
                            
                            1
                            
                          
                          
                            
                            0
                            
                          
                          
                            
    
                          
                        
                    
                    
                        details:   http://www.bx.psu.edu/hg/galaxy/rev/e39c9a2a0b4c
changeset: 3527:e39c9a2a0b4c
user:      jeremy goecks <jeremy.goecks(a)emory.edu>
date:      Fri Mar 12 14:27:04 2010 -0500
description:
UI Improvements:
-Make it possible to show messages in frames.
-Improve import interactions by showing messages in frames and providing useful links.
-Add action icons to display framework.
diffstat:
 lib/galaxy/web/controllers/dataset.py       |  16 +++++++++---
 lib/galaxy/web/controllers/history.py       |  35 ++++++++++++++++++---------
 lib/galaxy/web/controllers/workflow.py      |  22 +++++++++++++----
 lib/galaxy/web/framework/__init__.py        |  16 ++++++------
 templates/dataset/display.mako              |  10 ++++---
 templates/display_base.mako                 |   7 +++--
 templates/display_common.mako               |  10 ++++++++
 templates/grid_base.mako                    |  13 +--------
 templates/grid_base_async.mako              |   3 +-
 templates/history/display.mako              |  11 +--------
 templates/message.mako                      |  36 +++++++++++++++++++++++++++-
 templates/page/select_items_grid_async.mako |   3 +-
 templates/workflow/display.mako             |   6 +----
 13 files changed, 122 insertions(+), 66 deletions(-)
diffs (411 lines):
diff -r f8c305ba23cb -r e39c9a2a0b4c lib/galaxy/web/controllers/dataset.py
--- a/lib/galaxy/web/controllers/dataset.py	Fri Mar 12 12:43:31 2010 -0500
+++ b/lib/galaxy/web/controllers/dataset.py	Fri Mar 12 14:27:04 2010 -0500
@@ -418,17 +418,25 @@
         return self.stored_list_grid( trans, status=status, message=message, **kwargs )
         
     @web.expose
-    def imp( self, trans, id=None, **kwd ):
+    def imp( self, trans, dataset_id=None, **kwd ):
         """ Import another user's dataset via a shared URL; dataset is added to user's current history. """
         msg = ""
         
+        # Set referer message.
+        referer = trans.request.referer
+        if referer is not "":
+            referer_message = "<a href='%s'>return to the previous page</a>" % referer
+        else:
+            referer_message = "<a href='%s'>go to Galaxy's start page</a>" % url_for( '/' )
+        
         # Error checking.
-        if not id:
-            return trans.show_error_message( "You must specify an ID for a dataset to import." )
+        if not dataset_id:
+            return trans.show_error_message( "You must specify a dataset to import. You can %s." % referer_message, use_panels=True )
             
         # Do import.
         cur_history = trans.get_history( create=True )
-        status, message = self._copy_datasets( trans, [ id ], [ cur_history ] )
+        status, message = self._copy_datasets( trans, [ dataset_id ], [ cur_history ] )
+        message = message + "<br>You can <a href='%s'>start using the dataset</a> or %s." % ( url_for('/'),  referer_message )
         return trans.show_message( message, type=status )
         
     @web.expose
diff -r f8c305ba23cb -r e39c9a2a0b4c lib/galaxy/web/controllers/history.py
--- a/lib/galaxy/web/controllers/history.py	Fri Mar 12 12:43:31 2010 -0500
+++ b/lib/galaxy/web/controllers/history.py	Fri Mar 12 14:27:04 2010 -0500
@@ -505,16 +505,27 @@
         msg = ""
         user = trans.get_user()
         user_history = trans.get_history()
+        # Set referer message
+        if 'referer' in kwd:
+            referer = kwd['referer']
+        else:
+            referer = trans.request.referer
+        if referer is not "":
+            referer_message = "<a href='%s'>return to the previous page</a>" % referer
+        else:
+            referer_message = "<a href='%s'>go to Galaxy's start page</a>" % url_for( '/' )
+            
+        # Do import.
         if not id:
-            return trans.show_error_message( "You must specify a history you want to import." )
+            return trans.show_error_message( "You must specify a history you want to import.<br>You can %s." % referer_message, use_panels=True )
         import_history = self.get_history( trans, id, check_ownership=False )
         if not import_history:
-            return trans.show_error_message( "The specified history does not exist.")
+            return trans.show_error_message( "The specified history does not exist.<br>You can %s." % referer_message, use_panels=True )
         if not import_history.importable:
-            error( "The owner of this history has disabled imports via this link." )
+            return trans.show_error_message( "The owner of this history has disabled imports via this link.<br>You can %s." % referer_message, use_panels=True )
         if user:
             if import_history.user_id == user.id:
-                return trans.show_error_message( "You cannot import your own history." )
+                return trans.show_error_message( "You cannot import your own history.<br>You can %s." % referer_message, use_panels=True )
             new_history = import_history.copy( target_user=user )
             new_history.name = "imported: " + new_history.name
             new_history.user_id = user.id
@@ -530,9 +541,9 @@
             trans.sa_session.flush()
             if not user_history.datasets:
                 trans.set_history( new_history )
-            return trans.show_ok_message( """
-                History "%s" has been imported. Click <a href="%s">here</a>
-                to begin.""" % ( new_history.name, web.url_for( '/' ) ) )
+            return trans.show_ok_message(
+                message="""History "%s" has been imported. <br>You can <a href="%s">start using this history</a> or %s.""" 
+                % ( new_history.name, web.url_for( '/' ), referer_message ), use_panels=True )
         elif not user_history or not user_history.datasets or confirm:
             new_history = import_history.copy()
             new_history.name = "imported: " + new_history.name
@@ -548,13 +559,13 @@
             trans.sa_session.add( new_history )
             trans.sa_session.flush()
             trans.set_history( new_history )
-            return trans.show_ok_message( """
-                History "%s" has been imported. Click <a href="%s">here</a>
-                to begin.""" % ( new_history.name, web.url_for( '/' ) ) )
+            return trans.show_ok_message(
+                message="""History "%s" has been imported. <br>You can <a href="%s">start using this history</a> or %s.""" 
+                % ( new_history.name, web.url_for( '/' ), referer_message ), use_panels=True )
         return trans.show_warn_message( """
             Warning! If you import this history, you will lose your current
-            history. Click <a href="%s">here</a> to confirm.
-            """ % web.url_for( id=id, confirm=True ) )
+            history. <br>You can <a href="%s">continue and import this history</a> or %s.
+            """ % ( web.url_for( id=id, confirm=True, referer=trans.request.referer ), referer_message ), use_panels=True )
         
     @web.expose
     def view( self, trans, id=None ):
diff -r f8c305ba23cb -r e39c9a2a0b4c lib/galaxy/web/controllers/workflow.py
--- a/lib/galaxy/web/controllers/workflow.py	Fri Mar 12 12:43:31 2010 -0500
+++ b/lib/galaxy/web/controllers/workflow.py	Fri Mar 12 14:27:04 2010 -0500
@@ -278,18 +278,28 @@
     @web.expose
     @web.require_login( "use Galaxy workflows" )
     def imp( self, trans, id, **kwargs ):
+        # Set referer message.
+        referer = trans.request.referer
+        if referer is not "":
+            referer_message = "<a href='%s'>return to the previous page</a>" % referer
+        else:
+            referer_message = "<a href='%s'>go to Galaxy's start page</a>" % url_for( '/' )
+        
+        # Do import.
         session = trans.sa_session
         stored = self.get_stored_workflow( trans, id, check_ownership=False )
         if stored.importable == False:
-            error( "The owner of this workflow has disabled imports via this link" )
+            return trans.show_error_message( "The owner of this workflow has disabled imports via this link.<br>You can %s" % referer_message, use_panels=True )
         elif stored.user == trans.user:
-            error( "You are already the owner of this workflow, can't import" )
+            return trans.show_error_message( "You can't import this workflow because you own it.<br>You can %s" % referer_message, use_panels=True )
         elif stored.deleted:
-            error( "This workflow has been deleted, can't import" )
+            return trans.show_error_message( "You can't import this workflow because it has been deleted.<br>You can %s" % referer_message, use_panels=True )
         elif session.query( model.StoredWorkflowUserShareAssociation ) \
                     .filter_by( user=trans.user, stored_workflow=stored ).count() > 0:
-            error( "This workflow is already shared with you" )
+            # TODO: this is only reasonable as long as import creates a sharing relation.
+            return trans.show_error_message( "You can't import this workflow because it is already shared with you.<br>You can %s" % referer_message, use_panels=True )
         else:
+            # TODO: Shouldn't an import provide a copy of a workflow?
             share = model.StoredWorkflowUserShareAssociation()
             share.stored_workflow = stored
             share.user = trans.user
@@ -297,7 +307,9 @@
             session.add( share )
             session.flush()
             # Redirect to load galaxy frames.
-            return trans.response.send_redirect( url_for( controller='workflow' ) )
+            return trans.show_ok_message(
+                message="""Workflow "%s" has been imported. <br>You can <a href="%s">start using this workflow</a> or %s.""" 
+                % ( stored.name, web.url_for( controller='workflow' ), referer_message ), use_panels=True )
             
     @web.expose
     @web.require_login( "use Galaxy workflows" )
diff -r f8c305ba23cb -r e39c9a2a0b4c lib/galaxy/web/framework/__init__.py
--- a/lib/galaxy/web/framework/__init__.py	Fri Mar 12 12:43:31 2010 -0500
+++ b/lib/galaxy/web/framework/__init__.py	Fri Mar 12 14:27:04 2010 -0500
@@ -561,7 +561,7 @@
         context.
         """
         return self.template_context['message']
-    def show_message( self, message, type='info', refresh_frames=[], cont=None ):
+    def show_message( self, message, type='info', refresh_frames=[], cont=None, use_panels=False ):
         """
         Convenience method for displaying a simple page with a single message.
         
@@ -571,22 +571,22 @@
         `refresh_frames`: names of frames in the interface that should be 
                           refreshed when the message is displayed
         """
-        return self.fill_template( "message.mako", message_type=type, message=message, refresh_frames=refresh_frames, cont=cont )
-    def show_error_message( self, message, refresh_frames=[] ):
+        return self.fill_template( "message.mako", message_type=type, message=message, refresh_frames=refresh_frames, cont=cont, use_panels=use_panels )
+    def show_error_message( self, message, refresh_frames=[], use_panels=False ):
         """
         Convenience method for displaying an error message. See `show_message`.
         """
-        return self.show_message( message, 'error', refresh_frames )
-    def show_ok_message( self, message, refresh_frames=[] ):
+        return self.show_message( message, 'error', refresh_frames, use_panels=use_panels )
+    def show_ok_message( self, message, refresh_frames=[], use_panels=False ):
         """
         Convenience method for displaying an ok message. See `show_message`.
         """
-        return self.show_message( message, 'done', refresh_frames )
-    def show_warn_message( self, message, refresh_frames=[] ):
+        return self.show_message( message, 'done', refresh_frames, use_panels=use_panels )
+    def show_warn_message( self, message, refresh_frames=[], use_panels=False ):
         """
         Convenience method for displaying an warn message. See `show_message`.
         """
-        return self.show_message( message, 'warning', refresh_frames )
+        return self.show_message( message, 'warning', refresh_frames, use_panels=use_panels )
     def show_form( self, form, header=None, template="form.mako" ):
         """
         Convenience method for displaying a simple page with a single HTML
diff -r f8c305ba23cb -r e39c9a2a0b4c templates/dataset/display.mako
--- a/templates/dataset/display.mako	Fri Mar 12 12:43:31 2010 -0500
+++ b/templates/dataset/display.mako	Fri Mar 12 14:27:04 2010 -0500
@@ -23,7 +23,8 @@
 
 <%def name="render_item_links( data )">
     ## Provide links to save data and TODO: import dataset.
-    <a href="${h.url_for( controller='dataset', action='display', dataset_id=trans.security.encode_id( data.id ), to_ext=data.ext )}">save</a>
+    <a href="${h.url_for( controller='/dataset', action='display', dataset_id=trans.security.encode_id( data.id ), to_ext=data.ext )}" class="icon-button disk tooltip" title="Save dataset"></a>
+        <a href="${h.url_for( controller='/dataset', action='imp', dataset_id=trans.security.encode_id( data.id ) )}" class="icon-button import tooltip" title="Import dataset"></a>
 </%def>
 
 <%def name="render_item( data, data_to_render )">
@@ -49,11 +50,12 @@
     <div class="unified-panel-body">
         <div style="overflow: auto; height: 100%;">        
             <div class="page-body">
-                <div style="padding: 0px 0px 5px 0px">
+                <div style="float: right">
                     ${self.render_item_links( item )}
                 </div>
-                
-                ${self.render_item_header( item )}
+                <div>
+                    ${self.render_item_header( item )}
+                </div>
                 
                 ${self.render_item( item, item_data )}
             </div>
diff -r f8c305ba23cb -r e39c9a2a0b4c templates/display_base.mako
--- a/templates/display_base.mako	Fri Mar 12 12:43:31 2010 -0500
+++ b/templates/display_base.mako	Fri Mar 12 14:27:04 2010 -0500
@@ -156,11 +156,12 @@
     <div class="unified-panel-body">
         <div style="overflow: auto; height: 100%;">        
             <div class="page-body">
-                <div style="padding: 0px 0px 5px 0px">
+                <div style="float: right">
                     ${self.render_item_links( item )}
                 </div>
-                
-                ${self.render_item_header( item )}
+                <div>
+                    ${self.render_item_header( item )}
+                </div>
                 
                 ${self.render_item( item, item_data )}
             </div>
diff -r f8c305ba23cb -r e39c9a2a0b4c templates/display_common.mako
--- a/templates/display_common.mako	Fri Mar 12 12:43:31 2010 -0500
+++ b/templates/display_common.mako	Fri Mar 12 14:27:04 2010 -0500
@@ -128,3 +128,13 @@
     %endif
 </%def>
 
+## Render message.
+<%def name="render_message( message, message_type )">
+    %if message:
+        <p>
+            <div class="${message_type}message transient-message">${util.restore_text( message )}</div>
+            <div style="clear: both"></div>
+        </p>
+    %endif
+</%def>
+
diff -r f8c305ba23cb -r e39c9a2a0b4c templates/grid_base.mako
--- a/templates/grid_base.mako	Fri Mar 12 12:43:31 2010 -0500
+++ b/templates/grid_base.mako	Fri Mar 12 14:27:04 2010 -0500
@@ -9,6 +9,7 @@
             return '/base.mako'
 %>
 <%inherit file="${inherit(context)}"/>
+<%namespace file="/display_common.mako" import="render_message" />
 
 <%def name="init()">
 <%
@@ -663,23 +664,13 @@
         <tr>
             <td width="75%">${self.render_grid_header( grid )}</td>
             <td></td>
-            <td width="25%" id="grid-message" valign="top">${self.render_grid_message( grid )}</td>
+            <td width="25%" id="grid-message" valign="top">${render_message( message, message_type )}</td>
         </tr>
     </table>
 
     ${self.render_grid_table( grid )}
 </%def>
 
-## Render grid message.
-<%def name="render_grid_message( grid )">
-    %if message:
-        <p>
-            <div class="${message_type}message transient-message">${util.restore_text( message )}</div>
-            <div style="clear: both"></div>
-        </p>
-    %endif
-</%def>
-
 ## Render grid header.
 <%def name="render_grid_header( grid, render_title=True)">
     <div class="grid-header">
diff -r f8c305ba23cb -r e39c9a2a0b4c templates/grid_base_async.mako
--- a/templates/grid_base_async.mako	Fri Mar 12 12:43:31 2010 -0500
+++ b/templates/grid_base_async.mako	Fri Mar 12 14:27:04 2010 -0500
@@ -1,4 +1,5 @@
 <%namespace file="./grid_base.mako" import="*" />
+<%namespace file="/display_common.mako" import="render_message" />
 
 <%
     # Set flag to indicate whether grid has operations that operate on multiple items.
@@ -12,4 +13,4 @@
 *****
 ${num_pages}
 *****
-${render_grid_message( grid )}
\ No newline at end of file
+${render_message( grid )}
\ No newline at end of file
diff -r f8c305ba23cb -r e39c9a2a0b4c templates/history/display.mako
--- a/templates/history/display.mako	Fri Mar 12 12:43:31 2010 -0500
+++ b/templates/history/display.mako	Fri Mar 12 14:27:04 2010 -0500
@@ -236,16 +236,7 @@
 </%def>
 
 <%def name="render_item_links( history )">
-    %if history.user != trans.get_user():
-        <a href="${h.url_for( controller='/history', action='imp', id=trans.security.encode_id(history.id) )}">import and start using history</a>
-    %else:
-        ## TODO: add tooltip to indicate why this link is disabled.
-        import and start using history
-    %endif
-    ##<a href="${self.get_history_link( history )}">${_('refresh')}</a> 
-    %if show_deleted:
-        | <a href="${h.url_for('history', show_deleted=False)}">${_('hide deleted')}</a> 
-    %endif
+    <a href="${h.url_for( controller='/history', action='imp', id=trans.security.encode_id(history.id) )}" class="icon-button import tooltip" title="Import history"></a>
 </%def>
 
 <%def name="render_item( history, datasets )">
diff -r f8c305ba23cb -r e39c9a2a0b4c templates/message.mako
--- a/templates/message.mako	Fri Mar 12 12:43:31 2010 -0500
+++ b/templates/message.mako	Fri Mar 12 14:27:04 2010 -0500
@@ -1,5 +1,21 @@
+<%!
+    def inherit(context):
+        if context.get('use_panels'):
+            return '/base_panels.mako'
+        else:
+            return '/base.mako'
+%>
+<%inherit file="${inherit(context)}"/>
 <% _=n_ %>
-<%inherit file="/base.mako"/>
+
+<%def name="init()">
+<%
+    self.has_left_panel=False
+    self.has_right_panel=False
+    self.active_view=active_view
+    self.message_box_visible=False
+%>
+</%def>
 
 <%def name="javascripts()">
     ${parent.javascripts()}
@@ -45,7 +61,23 @@
     </script>
 </%def>
 
-<div class="${message_type}messagelarge">${_(message)}</div>
+##
+## Override methods from base.mako and base_panels.mako
+##
+
+<%def name="center_panel()">
+    ${render_large_message( message, message_type )}
+</%def>
+
+## Render the grid's basic elements. Each of these elements can be subclassed.
+<%def name="body()">
+    ${render_large_message( message, message_type )}
+</%def>
+
+## Render large message.
+<%def name="render_large_message( message, message_type )">
+    <div class="${message_type}messagelarge" style="margin: 1em">${_(message)}</div>
+</%def>
 
 ## Render a message
 <%def name="render_msg( msg, messagetype='done' )">
diff -r f8c305ba23cb -r e39c9a2a0b4c templates/page/select_items_grid_async.mako
--- a/templates/page/select_items_grid_async.mako	Fri Mar 12 12:43:31 2010 -0500
+++ b/templates/page/select_items_grid_async.mako	Fri Mar 12 14:27:04 2010 -0500
@@ -1,8 +1,9 @@
 <%namespace file="../grid_base.mako" import="*" />
+<%namespace file="/display_common.mako" import="render_message" />
 
 ## Always show item checkboxes so that users can select histories.
 ${render_grid_table_body_contents( grid, show_item_checkboxes=True )}
 *****
 ${num_pages}
 *****
-${render_grid_message( grid )}
\ No newline at end of file
+${render_message( message, message_type )}
\ No newline at end of file
diff -r f8c305ba23cb -r e39c9a2a0b4c templates/workflow/display.mako
--- a/templates/workflow/display.mako	Fri Mar 12 12:43:31 2010 -0500
+++ b/templates/workflow/display.mako	Fri Mar 12 14:27:04 2010 -0500
@@ -77,11 +77,7 @@
 
 
 <%def name="render_item_links( workflow )">
-    %if workflow.user != trans.get_user():
-        <a href="${h.url_for( controller='/workflow', action='imp', id=trans.security.encode_id(workflow.id) )}">import and start using workflow</a>
-    %else:
-        import and start using workflow
-    %endif
+    <a href="${h.url_for( controller='/workflow', action='imp', id=trans.security.encode_id(workflow.id) )}" class="icon-button import tooltip" title="Import workflow"></a>
 </%def>
 
 <%def name="render_item( workflow, steps )">
                    
                  
                  
                          
                            
                            1
                            
                          
                          
                            
                            0
                            
                          
                          
                            
    
                          
                        
                    18 Mar '10
                    
                        details:   http://www.bx.psu.edu/hg/galaxy/rev/f8c305ba23cb
changeset: 3526:f8c305ba23cb
user:      Dan Blankenberg <dan(a)bx.psu.edu>
date:      Fri Mar 12 12:43:31 2010 -0500
description:
When preparing Display Applications make manual click refresh link a non-javascript action.
diffstat:
 templates/dataset/display_application/display.mako |  2 +-
 1 files changed, 1 insertions(+), 1 deletions(-)
diffs (11 lines):
diff -r 90fa86a7b4e3 -r f8c305ba23cb templates/dataset/display_application/display.mako
--- a/templates/dataset/display_application/display.mako	Fri Mar 12 12:17:36 2010 -0500
+++ b/templates/dataset/display_application/display.mako	Fri Mar 12 12:43:31 2010 -0500
@@ -7,6 +7,6 @@
 %if refresh:
 <%def name="metas()"><meta http-equiv="refresh" content="10" /></%def>
 <p>
-This page will <a href="javascript:location.reload(true);">refresh</a> after 10 seconds.
+This page will <a href="${trans.request.url}">refresh</a> after 10 seconds.
 </p>
 %endif
                    
                  
                  
                          
                            
                            1
                            
                          
                          
                            
                            0
                            
                          
                          
                            
    
                          
                        
                    18 Mar '10
                    
                        details:   http://www.bx.psu.edu/hg/galaxy/rev/90fa86a7b4e3
changeset: 3525:90fa86a7b4e3
user:      Dan Blankenberg <dan(a)bx.psu.edu>
date:      Fri Mar 12 12:17:36 2010 -0500
description:
Tweaks for flow control in Display applications:
    Display Applications now use trans.response.send_redirect instead of javascript when redirecting users to prepared display applications.
    Display Applications now use a <meta> refresh instead of a javascript based refresh when notifying users that the display application is being prepared.
diffstat:
 lib/galaxy/web/controllers/dataset.py                     |   3 ++-
 templates/dataset/display_application/display.mako        |   9 ++-------
 templates/dataset/display_application/launch_display.mako |  15 ---------------
 3 files changed, 4 insertions(+), 23 deletions(-)
diffs (55 lines):
diff -r 5f967426f33f -r 90fa86a7b4e3 lib/galaxy/web/controllers/dataset.py
--- a/lib/galaxy/web/controllers/dataset.py	Fri Mar 12 12:03:25 2010 -0500
+++ b/lib/galaxy/web/controllers/dataset.py	Fri Mar 12 12:17:36 2010 -0500
@@ -557,7 +557,8 @@
                         trans.response.headers[ 'Content-Length' ] = content_length
                         return rval
                     elif app_action == None:
-                        return trans.fill_template_mako( "dataset/display_application/launch_display.mako", display_link = display_link )
+                        #redirect user to url generated by display link
+                        return trans.response.send_redirect( display_link.display_url() )
                     else:
                         msg.append( ( 'Invalid action provided: %s' % app_action, 'error' ) )
                 else:
diff -r 5f967426f33f -r 90fa86a7b4e3 templates/dataset/display_application/display.mako
--- a/templates/dataset/display_application/display.mako	Fri Mar 12 12:03:25 2010 -0500
+++ b/templates/dataset/display_application/display.mako	Fri Mar 12 12:17:36 2010 -0500
@@ -1,17 +1,12 @@
 <%inherit file="/base.mako"/>
 <%namespace file="/message.mako" import="render_msg" />
 <%def name="title()">Display Application: ${display_link.link.display_application.name}  ${display_link.link.name}</%def>
-<% refresh_rate = 10 %>
-%if refresh:
-<script type="text/javascript">  
-    setTimeout( "location.reload(true);", ${ refresh_rate * 1000 } );
-</script>
-%endif
 %for message, message_type in msg:
     ${render_msg( message, message_type )}
 %endfor
 %if refresh:
+<%def name="metas()"><meta http-equiv="refresh" content="10" /></%def>
 <p>
-This page will <a href="javascript:location.reload(true);">refresh</a> after ${refresh_rate} seconds.
+This page will <a href="javascript:location.reload(true);">refresh</a> after 10 seconds.
 </p>
 %endif
diff -r 5f967426f33f -r 90fa86a7b4e3 templates/dataset/display_application/launch_display.mako
--- a/templates/dataset/display_application/launch_display.mako	Fri Mar 12 12:03:25 2010 -0500
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,15 +0,0 @@
-<%inherit file="/base.mako"/>
-<%def name="title()">Launching Display Application: ${display_link.link.display_application.name}  ${display_link.link.name}</%def>
-
-<script type="text/javascript">  
-    location.href = '${display_link.display_url()}';
-</script>
-<p>
-All data has been prepared for the external display application: ${display_link.link.display_application.name}  ${display_link.link.name}.
-</p>
-<p>
-You are now being automatically forwarded to the external application.
-</p>
-<p>
-Click <a href="${display_link.display_url()}">here</a> if this redirect has failed.
-</p>
                    
                  
                  
                          
                            
                            1
                            
                          
                          
                            
                            0
                            
                          
                          
                            
    
                          
                        
                    18 Mar '10
                    
                        details:   http://www.bx.psu.edu/hg/galaxy/rev/5f967426f33f
changeset: 3524:5f967426f33f
user:      Dan Blankenberg <dan(a)bx.psu.edu>
date:      Fri Mar 12 12:03:25 2010 -0500
description:
Add 'metas()' %def to base.mako, to allow subclassing templates to declare additional <meta/> tags.
diffstat:
 templates/base.mako |  3 +++
 1 files changed, 3 insertions(+), 0 deletions(-)
diffs (17 lines):
diff -r c3eccab29814 -r 5f967426f33f templates/base.mako
--- a/templates/base.mako	Fri Mar 12 09:37:22 2010 -0500
+++ b/templates/base.mako	Fri Mar 12 12:03:25 2010 -0500
@@ -5,6 +5,7 @@
 <head>
 <title>${self.title()}</title>
 <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
+${self.metas()}
 ${self.stylesheets()}
 ${self.javascripts()}
 </head>
@@ -30,3 +31,5 @@
   ${h.js( "jquery", "galaxy.base" )}
 </%def>
 
+## Additional metas can be defined by templates inheriting from this one.
+<%def name="metas()"></%def>
                    
                  
                  
                          
                            
                            1
                            
                          
                          
                            
                            0
                            
                          
                          
                            
    
                          
                        
                    18 Mar '10
                    
                        details:   http://www.bx.psu.edu/hg/galaxy/rev/c3eccab29814
changeset: 3523:c3eccab29814
user:      jeremy goecks <jeremy.goecks(a)emory.edu>
date:      Fri Mar 12 09:37:22 2010 -0500
description:
Make visualizations sharable, publishable, taggable, and annotate-able. Plumbing code is in place, but UI code needs work; in particular, viewing a shared/published visualization is empty and annotations are available only via edit attributes.
Some code tidying: refactoring, removing unused code.
diffstat:
 lib/galaxy/model/__init__.py                                                       |   16 +-
 lib/galaxy/model/mapping.py                                                        |   73 +-
 lib/galaxy/model/migrate/versions/0043_visualization_sharing_tagging_annotating.py |  220 ++++++
 lib/galaxy/tags/tag_handler.py                                                     |    1 +
 lib/galaxy/web/base/controller.py                                                  |   39 +-
 lib/galaxy/web/buildapp.py                                                         |    1 +
 lib/galaxy/web/controllers/history.py                                              |   56 +-
 lib/galaxy/web/controllers/page.py                                                 |   21 +-
 lib/galaxy/web/controllers/tracks.py                                               |    4 +-
 lib/galaxy/web/controllers/visualization.py                                        |  322 +++++++++-
 lib/galaxy/web/controllers/workflow.py                                             |    6 +-
 templates/base_panels.mako                                                         |    2 +-
 templates/display_common.mako                                                      |    7 +-
 templates/page/create.mako                                                         |    3 +-
 templates/panels.mako                                                              |    2 -
 templates/visualization/create.mako                                                |   14 +
 templates/visualization/display.mako                                               |   19 +
 templates/visualization/list.mako                                                  |   52 +
 templates/visualization/list_published.mako                                        |   36 +
 19 files changed, 773 insertions(+), 121 deletions(-)
diffs (1317 lines):
diff -r 582fd1777763 -r c3eccab29814 lib/galaxy/model/__init__.py
--- a/lib/galaxy/model/__init__.py	Thu Mar 11 15:54:13 2010 -0500
+++ b/lib/galaxy/model/__init__.py	Fri Mar 12 09:37:22 2010 -0500
@@ -1527,6 +1527,11 @@
         self.title = None
         self.config = None
         
+class VisualizationUserShareAssociation( object ):
+    def __init__( self ):
+        self.visualization = None
+        self.user = None
+        
 class Tag ( object ):
     def __init__( self, id=None, type=None, parent_id=None, name=None ):
         self.id = id
@@ -1558,16 +1563,16 @@
 
 class PageTagAssociation ( ItemTagAssociation ):
     pass
-    
-class WorkflowTagAssociation ( ItemTagAssociation ):
-    pass
-    
+
 class WorkflowStepTagAssociation ( ItemTagAssociation ):
     pass
     
 class StoredWorkflowTagAssociation ( ItemTagAssociation ):
     pass
     
+class VisualizationTagAssociation ( ItemTagAssociation ):
+    pass
+    
 class HistoryAnnotationAssociation( object ):
     pass
     
@@ -1583,6 +1588,9 @@
 class PageAnnotationAssociation( object ):
     pass
     
+class VisualizationAnnotationAssociation( object ):
+    pass
+    
 class UserPreference ( object ):
     def __init__( self, name=None, value=None ):
         self.name = name
diff -r 582fd1777763 -r c3eccab29814 lib/galaxy/model/mapping.py
--- a/lib/galaxy/model/mapping.py	Thu Mar 11 15:54:13 2010 -0500
+++ b/lib/galaxy/model/mapping.py	Fri Mar 12 09:37:22 2010 -0500
@@ -80,7 +80,7 @@
     Column( "genome_build", TrimmedString( 40 ) ),
     Column( "importable", Boolean, default=False ),
     Column( "slug", TEXT, index=True ),
-    Column( "published", Boolean, index=True ) )
+    Column( "published", Boolean, index=True, default=False ) )
 
 HistoryUserShareAssociation.table = Table( "history_user_share_association", metadata,
     Column( "id", Integer, primary_key=True ),
@@ -521,7 +521,7 @@
     Column( "deleted", Boolean, default=False ),
     Column( "importable", Boolean, default=False ),
     Column( "slug", TEXT, index=True ),
-    Column( "published", Boolean, index=True )
+    Column( "published", Boolean, index=True, default=False )
     )
 
 Workflow.table = Table( "workflow", metadata,
@@ -721,7 +721,11 @@
     Column( "latest_revision_id", Integer,
             ForeignKey( "visualization_revision.id", use_alter=True, name='visualization_latest_revision_id_fk' ), index=True ),
     Column( "title", TEXT ),
-    Column( "type", TEXT )
+    Column( "type", TEXT ),
+    Column( "deleted", Boolean, default=False, index=True ),
+    Column( "importable", Boolean, default=False, index=True ),
+    Column( "slug", TEXT, index=True ),
+    Column( "published", Boolean, default=False, index=True )
     )
 
 VisualizationRevision.table = Table( "visualization_revision", metadata,
@@ -733,6 +737,12 @@
     Column( "config", JSONType )
     )
     
+VisualizationUserShareAssociation.table = Table( "visualization_user_share_association", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "visualization_id", Integer, ForeignKey( "visualization.id" ), index=True ),
+    Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True )
+    )
+    
 # Tagging tables.
 
 Tag.table = Table( "tag", metadata,
@@ -768,16 +778,7 @@
     Column( "user_tname", TrimmedString(255), index=True),
     Column( "value", TrimmedString(255), index=True),
     Column( "user_value", TrimmedString(255), index=True) )
-    
-WorkflowTagAssociation.table = Table( "workflow_tag_association", metadata,
-    Column( "id", Integer, primary_key=True ),
-    Column( "workflow_id", Integer, ForeignKey( "workflow.id" ), index=True ),
-    Column( "tag_id", Integer, ForeignKey( "tag.id" ), index=True ),
-    Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
-    Column( "user_tname", Unicode(255), index=True),
-    Column( "value", Unicode(255), index=True),
-    Column( "user_value", Unicode(255), index=True) )
-    
+        
 StoredWorkflowTagAssociation.table = Table( "stored_workflow_tag_association", metadata,
     Column( "id", Integer, primary_key=True ),
     Column( "stored_workflow_id", Integer, ForeignKey( "stored_workflow.id" ), index=True ),
@@ -805,6 +806,15 @@
     Column( "value", Unicode(255), index=True),
     Column( "user_value", Unicode(255), index=True) )
     
+VisualizationTagAssociation.table = Table( "visualization_tag_association", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "visualization_id", Integer, ForeignKey( "visualization.id" ), index=True ),
+    Column( "tag_id", Integer, ForeignKey( "tag.id" ), index=True ),
+    Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
+    Column( "user_tname", TrimmedString(255), index=True),
+    Column( "value", TrimmedString(255), index=True),
+    Column( "user_value", TrimmedString(255), index=True) )
+    
 # Annotation tables.
 
 HistoryAnnotationAssociation.table = Table( "history_annotation_association", metadata,
@@ -836,6 +846,12 @@
     Column( "page_id", Integer, ForeignKey( "page.id" ), index=True ),
     Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
     Column( "annotation", TEXT, index=True) )
+    
+VisualizationAnnotationAssociation.table = Table( "visualization_annotation_association", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "visualization_id", Integer, ForeignKey( "visualization.id" ), index=True ),
+    Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
+    Column( "annotation", TEXT, index=True) )
 
 # User tables.
     
@@ -1271,8 +1287,7 @@
     properties=dict( steps=relation( WorkflowStep, backref='workflow',
                                      order_by=asc(WorkflowStep.table.c.order_index),
                                      cascade="all, delete-orphan",
-                                     lazy=False ),
-                     tags=relation(WorkflowTagAssociation, order_by=WorkflowTagAssociation.table.c.id, backref="workflows") 
+                                     lazy=False )
                                       ) )
 
 assign_mapper( context, WorkflowStep, WorkflowStep.table,
@@ -1359,8 +1374,20 @@
                                          primaryjoin=( Visualization.table.c.id == VisualizationRevision.table.c.visualization_id ) ),
                      latest_revision=relation( VisualizationRevision, post_update=True,
                                                primaryjoin=( Visualization.table.c.latest_revision_id == VisualizationRevision.table.c.id ),
-                                               lazy=False )
+                                               lazy=False ),
+                     tags=relation( VisualizationTagAssociation, order_by=VisualizationTagAssociation.table.c.id, backref="visualizations" ),
+                     annotations=relation( VisualizationAnnotationAssociation, order_by=VisualizationAnnotationAssociation.table.c.id, backref="visualizations" )
                    ) )
+                   
+# Set up proxy so that 
+#   Visualization.users_shared_with_dot_users
+# returns a list of User objects for users that a visualization is shared with.
+Visualization.users_shared_with_dot_users = association_proxy( 'users_shared_with', 'user' )
+                   
+assign_mapper( context, VisualizationUserShareAssociation, VisualizationUserShareAssociation.table,
+  properties=dict( user=relation( User, backref='visualizations_shared_by_others' ),
+                   visualization=relation( Visualization, backref='users_shared_with' )
+                 ) )
 
 assign_mapper( context, Tag, Tag.table,
     properties=dict( children=relation(Tag, backref=backref( 'parent', remote_side=[Tag.table.c.id] ) )  
@@ -1381,19 +1408,19 @@
 assign_mapper( context, PageTagAssociation, PageTagAssociation.table,
     properties=dict( tag=relation(Tag, backref="tagged_pages"), user=relation( User ) )
                     )
-
-assign_mapper( context, WorkflowTagAssociation, WorkflowTagAssociation.table,
-    properties=dict( tag=relation(Tag, backref="tagged_workflows"), user=relation( User ) )
-                    )
                     
 assign_mapper( context, StoredWorkflowTagAssociation, StoredWorkflowTagAssociation.table,
-    properties=dict( tag=relation(Tag, backref="tagged_stored_workflows"), user=relation( User ) )
+    properties=dict( tag=relation(Tag, backref="tagged_workflows"), user=relation( User ) )
                     )
                     
 assign_mapper( context, WorkflowStepTagAssociation, WorkflowStepTagAssociation.table,
     properties=dict( tag=relation(Tag, backref="tagged_workflow_steps"), user=relation( User ) )
                     )
                     
+assign_mapper( context, VisualizationTagAssociation, VisualizationTagAssociation.table,
+    properties=dict( tag=relation(Tag, backref="tagged_visualizations"), user=relation( User ) )
+                    )
+                    
 assign_mapper( context, HistoryAnnotationAssociation, HistoryAnnotationAssociation.table,
     properties=dict( history=relation( History ), user=relation( User ) )
                     )
@@ -1414,6 +1441,10 @@
     properties=dict( page=relation( Page ), user=relation( User ) )
                     )
                     
+assign_mapper( context, VisualizationAnnotationAssociation, VisualizationAnnotationAssociation.table,
+    properties=dict( visualization=relation( Visualization ), user=relation( User ) )
+                    )
+                    
 assign_mapper( context, UserPreference, UserPreference.table, 
     properties = {}
               )
diff -r 582fd1777763 -r c3eccab29814 lib/galaxy/model/migrate/versions/0043_visualization_sharing_tagging_annotating.py
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/lib/galaxy/model/migrate/versions/0043_visualization_sharing_tagging_annotating.py	Fri Mar 12 09:37:22 2010 -0500
@@ -0,0 +1,220 @@
+"""
+Migration script to create tables and columns for sharing visualizations.
+"""
+
+from sqlalchemy import *
+from sqlalchemy.orm import *
+from migrate import *
+from migrate.changeset import *
+
+import logging
+log = logging.getLogger( __name__ )
+
+metadata = MetaData( migrate_engine )
+db_session = scoped_session( sessionmaker( bind=migrate_engine, autoflush=False, autocommit=True ) )
+
+# Sharing visualizations.
+
+VisualizationUserShareAssociation_table = Table( "visualization_user_share_association", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "visualization_id", Integer, ForeignKey( "visualization.id" ), index=True ),
+    Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True )
+    )
+    
+# Tagging visualizations.
+
+VisualizationTagAssociation_table = Table( "visualization_tag_association", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "visualization_id", Integer, ForeignKey( "visualization.id" ), index=True ),
+    Column( "tag_id", Integer, ForeignKey( "tag.id" ), index=True ),
+    Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
+    Column( "user_tname", Unicode(255), index=True),
+    Column( "value", Unicode(255), index=True),
+    Column( "user_value", Unicode(255), index=True) )
+
+# Annotating visualizations.
+
+VisualizationAnnotationAssociation_table = Table( "visualization_annotation_association", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "visualization_id", Integer, ForeignKey( "visualization.id" ), index=True ),
+    Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
+    Column( "annotation", TEXT, index=False ) )
+    
+Visualiation_table = Table( "visualization", metadata, autoload=True )
+    
+def upgrade():
+    print __doc__
+    metadata.reflect()
+
+    # Create visualization_user_share_association table.
+    try:
+        VisualizationUserShareAssociation_table.create()
+    except Exception, e:
+        print "Creating visualization_user_share_association table failed: %s" % str( e )
+        log.debug( "Creating visualization_user_share_association table failed: %s" % str( e ) )
+        
+    # Get default boolean value 'false' so that columns can be initialized.
+    if migrate_engine.name == 'mysql':
+        default_false = "0"
+    elif migrate_engine.name == 'sqlite':
+        default_false = "'false'"
+    elif migrate_engine.name == 'postgres':
+        default_false = "false"
+        
+    # Add columns & create indices for supporting sharing to visualization table.
+    deleted_column = Column( "deleted", Boolean, default=False, index=True )
+    importable_column = Column( "importable", Boolean, default=False, index=True )
+    slug_column = Column( "slug", TEXT, index=True )
+    published_column = Column( "published", Boolean, index=True )
+    
+    try:
+        # Add column.
+        deleted_column.create( Visualiation_table )
+        assert deleted_column is Visualiation_table.c.deleted
+	    
+        # Fill column with default value.
+        cmd = "UPDATE visualization SET deleted = %s" % default_false
+        db_session.execute( cmd )
+    except Exception, e:
+        print "Adding deleted column to visualization table failed: %s" % str( e )
+        log.debug( "Adding deleted column to visualization table failed: %s" % str( e ) )
+        
+    try:
+        i = Index( "ix_visualization_deleted", Visualiation_table.c.deleted )
+        i.create()
+    except Exception, e:
+        print "Adding index 'ix_visualization_deleted' failed: %s" % str( e )
+        log.debug( "Adding index 'ix_visualization_deleted' failed: %s" % str( e ) )
+	    
+    try:
+        # Add column.
+        importable_column.create( Visualiation_table )
+        assert importable_column is Visualiation_table.c.importable
+
+        # Fill column with default value.
+        cmd = "UPDATE visualization SET importable = %s" % default_false
+        db_session.execute( cmd )
+    except Exception, e:
+        print "Adding importable column to visualization table failed: %s" % str( e )
+        log.debug( "Adding importable column to visualization table failed: %s" % str( e ) )
+        
+    i = Index( "ix_visualization_importable", Visualiation_table.c.importable )
+    try:
+        i.create()
+    except Exception, e:
+        print "Adding index 'ix_visualization_importable' failed: %s" % str( e )
+        log.debug( "Adding index 'ix_visualization_importable' failed: %s" % str( e ) )
+	    
+    try:
+	    slug_column.create( Visualiation_table )
+	    assert slug_column is Visualiation_table.c.slug
+    except Exception, e:
+        print "Adding slug column to visualization table failed: %s" % str( e )
+        log.debug( "Adding slug column to visualization table failed: %s" % str( e ) )
+                
+    try:
+        if migrate_engine.name == 'mysql':
+            # Have to create index manually.
+            cmd = "CREATE INDEX ix_visualization_slug ON visualization ( slug ( 100 ) )"
+            db_session.execute( cmd )
+        else:
+            i = Index( "ix_visualization_slug", Visualiation_table.c.slug )
+            i.create()
+    except Exception, e:
+        print "Adding index 'ix_visualization_slug' failed: %s" % str( e )
+        log.debug( "Adding index 'ix_visualization_slug' failed: %s" % str( e ) )
+	    
+    try:
+        # Add column.
+        published_column.create( Visualiation_table )
+        assert published_column is Visualiation_table.c.published
+
+        # Fill column with default value.
+        cmd = "UPDATE visualization SET published = %s" % default_false
+        db_session.execute( cmd )
+    except Exception, e:
+        print "Adding published column to visualization table failed: %s" % str( e )
+        log.debug( "Adding published column to visualization table failed: %s" % str( e ) )
+        
+    i = Index( "ix_visualization_published", Visualiation_table.c.published )
+    try:
+        i.create()
+    except Exception, e:
+        print "Adding index 'ix_visualization_published' failed: %s" % str( e )
+        log.debug( "Adding index 'ix_visualization_published' failed: %s" % str( e ) )
+        
+    # Create visualization_tag_association table.
+    try:
+        VisualizationTagAssociation_table.create()
+    except Exception, e:
+        print str(e)
+        log.debug( "Creating visualization_tag_association table failed: %s" % str( e ) )
+        
+    # Create visualization_annotation_association table.
+    try:
+        VisualizationAnnotationAssociation_table.create()
+    except Exception, e:
+        print str(e)
+        log.debug( "Creating visualization_annotation_association table failed: %s" % str( e ) )
+
+    # Need to create index for visualization annotation manually to deal with errors.
+    try:
+       if migrate_engine.name == 'mysql':
+           # Have to create index manually.
+           cmd = "CREATE INDEX ix_visualization_annotation_association_annotation ON visualization_annotation_association ( annotation ( 100 ) )"
+           db_session.execute( cmd )
+       else:
+           i = Index( "ix_visualization_annotation_association_annotation", VisualizationAnnotationAssociation_table.c.annotation )
+           i.create()
+    except Exception, e:
+       print "Adding index 'ix_visualization_annotation_association_annotation' failed: %s" % str( e )
+       log.debug( "Adding index 'ix_visualization_annotation_association_annotation' failed: %s" % str( e ) )
+                       
+def downgrade():
+    metadata.reflect()
+        
+    # Drop visualization_user_share_association table.
+    try:
+        VisualizationUserShareAssociation_table.drop()
+    except Exception, e:
+        print str(e)
+        log.debug( "Dropping visualization_user_share_association table failed: %s" % str( e ) )
+
+    # Drop columns for supporting sharing from visualization table.
+    try:
+	    Visualiation_table.c.deleted.drop()
+    except Exception, e:
+        print "Dropping deleted column from visualization table failed: %s" % str( e )
+        log.debug( "Dropping deleted column from visualization table failed: %s" % str( e ) )
+
+    try:
+	    Visualiation_table.c.importable.drop()
+    except Exception, e:
+        print "Dropping importable column from visualization table failed: %s" % str( e )
+        log.debug( "Dropping importable column from visualization table failed: %s" % str( e ) )
+
+    try:
+	    Visualiation_table.c.slug.drop()
+    except Exception, e:
+        print "Dropping slug column from visualization table failed: %s" % str( e )
+        log.debug( "Dropping slug column from visualization table failed: %s" % str( e ) )
+
+    try:
+	    Visualiation_table.c.published.drop()
+    except Exception, e:
+        print "Dropping published column from visualization table failed: %s" % str( e )
+        log.debug( "Dropping published column from visualization table failed: %s" % str( e ) )
+        
+    # Drop visualization_tag_association table.
+    try:
+        VisualizationTagAssociation_table.drop()
+    except Exception, e:
+        print str(e)
+        log.debug( "Dropping visualization_tag_association table failed: %s" % str( e ) )
+
+    # Drop visualization_annotation_association table.
+    try:
+        VisualizationAnnotationAssociation_table.drop()
+    except Exception, e:
+        print str(e)
+        log.debug( "Dropping visualization_annotation_association table failed: %s" % str( e ) )
\ No newline at end of file
diff -r 582fd1777763 -r c3eccab29814 lib/galaxy/tags/tag_handler.py
--- a/lib/galaxy/tags/tag_handler.py	Thu Mar 11 15:54:13 2010 -0500
+++ b/lib/galaxy/tags/tag_handler.py	Fri Mar 12 09:37:22 2010 -0500
@@ -34,6 +34,7 @@
         ItemTagAssocInfo( model.HistoryDatasetAssociation, model.HistoryDatasetAssociationTagAssociation, model.HistoryDatasetAssociationTagAssociation.table.c.history_dataset_association_id )
     item_tag_assoc_info["Page"] = ItemTagAssocInfo( model.Page, model.PageTagAssociation, model.PageTagAssociation.table.c.page_id )
     item_tag_assoc_info["StoredWorkflow"] = ItemTagAssocInfo( model.StoredWorkflow, model.StoredWorkflowTagAssociation, model.StoredWorkflowTagAssociation.table.c.stored_workflow_id )
+    item_tag_assoc_info["Visualization"] = ItemTagAssocInfo( model.Visualization, model.VisualizationTagAssociation, model.VisualizationTagAssociation.table.c.visualization_id )
         
     def get_tag_assoc_class(self, item_class):
         """ Returns tag association class for item class. """
diff -r 582fd1777763 -r c3eccab29814 lib/galaxy/web/base/controller.py
--- a/lib/galaxy/web/base/controller.py	Thu Mar 11 15:54:13 2010 -0500
+++ b/lib/galaxy/web/base/controller.py	Fri Mar 12 09:37:22 2010 -0500
@@ -16,6 +16,9 @@
 
 # States for passing messages
 SUCCESS, INFO, WARNING, ERROR = "done", "info", "warning", "error"
+
+# RE that tests for valid slug.
+VALID_SLUG_RE = re.compile( "^[a-z0-9\-]+$" )
     
 class BaseController( object ):
     """
@@ -40,6 +43,8 @@
             item_class = model.Page
         elif class_name == 'StoredWorkflow':
             item_class = model.StoredWorkflow
+        elif class_name == 'Visualization':
+            item_class = model.Visualization
         else:
             item_class = None
         return item_class
@@ -76,6 +81,8 @@
             annotation_assoc = annotation_assoc.filter_by( workflow_step=item )
         elif item.__class__ == model.Page:
             annotation_assoc = annotation_assoc.filter_by( page=item )
+        elif item.__class__ == model.Visualization:
+            annotation_assoc = annotation_assoc.filter_by( visualization=item )
         return annotation_assoc.first()
         
     def add_item_annotation( self, trans, item, annotation ):
@@ -153,6 +160,19 @@
                 truncated = False
         return truncated, dataset_data
         
+class UsesVisualization( SharableItemSecurity ):
+    """ Mixin for controllers that use Visualization objects. """
+
+    def get_visualization( self, trans, id, check_ownership=True, check_accessible=False ):
+        """ Get a Visualization from the database by id, verifying ownership. """
+        # Load workflow from database
+        id = trans.security.decode_id( id )
+        visualization = trans.sa_session.query( model.Visualization ).get( id )
+        if not visualization:
+            error( "Visualization not found" )
+        else:
+            return self.security_check( trans.get_user(), stored, check_ownership, check_accessible )
+        
 class UsesStoredWorkflow( SharableItemSecurity ):
     """ Mixin for controllers that use StoredWorkflow objects. """
     
@@ -240,6 +260,12 @@
         pass
     
     @web.expose
+    @web.require_login( "share Galaxy items" )
+    def share( self, trans, id=None, email="", **kwd ):
+        """ Handle sharing an item with a particular user. """
+        pass
+    
+    @web.expose
     def display_by_username_and_slug( self, trans, username, slug ):
         """ Display item by username and slug. """
         pass
@@ -262,13 +288,18 @@
     def _make_item_accessible( self, sa_session, item ):
         """ Makes item accessible--viewable and importable--and sets item's slug. Does not flush/commit changes, however. Item must have name, user, importable, and slug attributes. """
         item.importable = True
-        self.set_item_slug( sa_session, item )
+        self.create_item_slug( sa_session, item )
 
-    def set_item_slug( self, sa_session, item ):
-        """ Set item slug. Slug is unique among user's importable items for item's class. Returns true if item's slug was set; false otherwise. """
+    def create_item_slug( self, sa_session, item ):
+        """ Create item slug. Slug is unique among user's importable items for item's class. Returns true if item's slug was set; false otherwise. """
         if item.slug is None or item.slug == "":
+            # Item can have either a name or a title.
+            if hasattr( item, 'name' ):
+                item_name = item.name
+            elif hasattr( item, 'title' ):
+                item_name = item.title
             # Replace whitespace with '-'
-            slug_base = re.sub( "\s+", "-", item.name.lower() )
+            slug_base = re.sub( "\s+", "-", item_name.lower() )
             # Remove all non-alphanumeric characters.
             slug_base = re.sub( "[^a-zA-Z0-9\-]", "", slug_base )
             # Remove trailing '-'.
diff -r 582fd1777763 -r c3eccab29814 lib/galaxy/web/buildapp.py
--- a/lib/galaxy/web/buildapp.py	Thu Mar 11 15:54:13 2010 -0500
+++ b/lib/galaxy/web/buildapp.py	Fri Mar 12 09:37:22 2010 -0500
@@ -79,6 +79,7 @@
     webapp.add_route( '/u/:username/p/:slug', controller='page', action='display_by_username_and_slug' )
     webapp.add_route( '/u/:username/h/:slug', controller='history', action='display_by_username_and_slug' )
     webapp.add_route( '/u/:username/w/:slug', controller='workflow', action='display_by_username_and_slug' )
+    webapp.add_route( '/u/:username/v/:slug', controller='visualization', action='display_by_username_and_slug' )
     webapp.finalize_config()
     # Wrap the webapp in some useful middleware
     if kwargs.get( 'middleware', True ):
diff -r 582fd1777763 -r c3eccab29814 lib/galaxy/web/controllers/history.py
--- a/lib/galaxy/web/controllers/history.py	Thu Mar 11 15:54:13 2010 -0500
+++ b/lib/galaxy/web/controllers/history.py	Fri Mar 12 09:37:22 2010 -0500
@@ -440,7 +440,7 @@
         """ Returns history's name and link. """
         history = self.get_history( trans, id, False )
         
-        if self.set_item_slug( trans.sa_session, history ):
+        if self.create_item_slug( trans.sa_session, history ):
             trans.sa_session.flush()
         return_dict = { "name" : history.name, "link" : url_for( action="display_by_username_and_slug", username=history.user.username, slug=history.slug ) }
         return return_dict
@@ -652,58 +652,6 @@
         session.flush()
                 
         return trans.fill_template( "/sharing_base.mako", item=history )
-        
-    ## TODO: remove this method when history sharing has been verified to work correctly with new sharing() method.
-    @web.expose
-    @web.require_login( "share histories with other users" )
-    def sharing_old( self, trans, histories=[], id=None, **kwd ):
-        """Performs sharing of histories among users."""
-        # histories looks like: [ historyX, historyY ]
-        params = util.Params( kwd )
-        msg = util.restore_text ( params.get( 'msg', '' ) )
-        if id:
-            ids = util.listify( id )
-            if ids:
-                histories = [ self.get_history( trans, history_id ) for history_id in ids ]
-        for history in histories:
-            trans.sa_session.add( history )
-            if params.get( 'enable_import_via_link', False ):
-                self._make_item_accessible( trans.sa_session, history )
-                trans.sa_session.flush()
-            elif params.get( 'disable_import_via_link', False ):
-                history.importable = False
-                trans.sa_session.flush()
-            elif params.get( 'unshare_user', False ):
-                user = trans.sa_session.query( trans.app.model.User ).get( trans.security.decode_id( kwd[ 'unshare_user' ] ) )
-                if not user:
-                    msg = 'History (%s) does not seem to be shared with user (%s)' % ( history.name, user.email )
-                    return trans.fill_template( 'history/sharing.mako', histories=histories, msg=msg, messagetype='error' )
-                husas = trans.sa_session.query( trans.app.model.HistoryUserShareAssociation ).filter_by( user=user, history=history ).all()
-                if husas:
-                    for husa in husas:
-                        trans.sa_session.delete( husa )
-                        trans.sa_session.flush()
-        histories = []
-        # Get all histories that have been shared with others
-        husas = trans.sa_session.query( trans.app.model.HistoryUserShareAssociation ) \
-                                .join( "history" ) \
-                                .filter( and_( trans.app.model.History.user == trans.user,
-                                               trans.app.model.History.deleted == False ) ) \
-                                .order_by( trans.app.model.History.table.c.name )
-        for husa in husas:
-            history = husa.history
-            if history not in histories:
-                histories.append( history )
-        # Get all histories that are importable
-        importables = trans.sa_session.query( trans.app.model.History ) \
-                                      .filter_by( user=trans.user, importable=True, deleted=False ) \
-                                      .order_by( trans.app.model.History.table.c.name )
-        for importable in importables:
-            if importable not in histories:
-                histories.append( importable )
-        # Sort the list of histories by history.name
-        histories.sort( key=operator.attrgetter( 'name') )
-        return trans.fill_template( 'history/sharing.mako', histories=histories, msg=msg, messagetype='done' )
                                       
     @web.expose
     @web.require_login( "share histories with other users" )
@@ -975,7 +923,7 @@
                     share.history = history
                     share.user = send_to_user
                     trans.sa_session.add( share )
-                    self.set_item_slug( trans.sa_session, history )
+                    self.create_item_slug( trans.sa_session, history )
                     trans.sa_session.flush()
                     if history not in shared_histories:
                         shared_histories.append( history )
diff -r 582fd1777763 -r c3eccab29814 lib/galaxy/web/controllers/page.py
--- a/lib/galaxy/web/controllers/page.py	Thu Mar 11 15:54:13 2010 -0500
+++ b/lib/galaxy/web/controllers/page.py	Fri Mar 12 09:37:22 2010 -0500
@@ -4,10 +4,6 @@
 from galaxy.util.odict import odict
 from galaxy.util.json import from_json_string
 
-import re
-
-VALID_SLUG_RE = re.compile( "^[a-z0-9\-]+$" )
-
 def format_bool( b ):
     if b:
         return "yes"
@@ -45,8 +41,8 @@
     ]
     operations = [
         grids.DisplayByUsernameAndSlugGridOperation( "View", allow_multiple=False ),
+        grids.GridOperation( "Edit content", allow_multiple=False, url_args=dict( action='edit_content') ),
         grids.GridOperation( "Edit attributes", allow_multiple=False, url_args=dict( action='edit') ),
-        grids.GridOperation( "Edit content", allow_multiple=False, url_args=dict( action='edit_content') ),
         grids.GridOperation( "Share or Publish", allow_multiple=False, condition=( lambda item: not item.deleted ), async_compatible=False ),
         grids.GridOperation( "Delete", confirm="Are you sure you want to delete this page?" ),
     ]
@@ -62,7 +58,7 @@
     default_sort_key = "-update_time"
     default_filter = dict( title="All", username="All" )
     columns = [
-        grids.PublicURLColumn( "Title", key="title", model_class=model.Page, filterable="advanced"),
+        grids.PublicURLColumn( "Title", key="title", model_class=model.Page, filterable="advanced" ),
         grids.OwnerAnnotationColumn( "Annotation", key="annotation", model_class=model.Page, model_annotation_association_class=model.PageAnnotationAssociation, filterable="advanced" ),
         grids.OwnerColumn( "Owner", key="username", model_class=model.User, filterable="advanced", sortable=False ), 
         grids.CommunityTagsColumn( "Community Tags", "tags", model.Page, model.PageTagAssociation, filterable="advanced", grid_name="PageAllPublishedGrid" ),
@@ -356,10 +352,10 @@
                 template="page/create.mako" )
         
     @web.expose
-    @web.require_login( "create pages" )
+    @web.require_login( "edit pages" )
     def edit( self, trans, id, page_title="", page_slug="", page_annotation="" ):
         """
-        Create a new page
+        Edit a page's attributes.
         """
         encoded_id = id
         id = trans.security.decode_id( id )
@@ -456,6 +452,7 @@
     @web.expose
     @web.require_login( "use Galaxy pages" )
     def share( self, trans, id, email="" ):
+        """ Handle sharing with an individual user. """
         msg = mtype = None
         page = trans.sa_session.query( model.Page ).get( trans.security.decode_id( id ) )
         if email:
@@ -468,18 +465,18 @@
                 msg = ( "User '%s' does not exist" % email )
             elif other == trans.get_user():
                 mtype = "error"
-                msg = ( "You cannot share a workflow with yourself" )
+                msg = ( "You cannot share a page with yourself" )
             elif trans.sa_session.query( model.PageUserShareAssociation ) \
                     .filter_by( user=other, page=page ).count() > 0:
                 mtype = "error"
-                msg = ( "Workflow already shared with '%s'" % email )
+                msg = ( "Page already shared with '%s'" % email )
             else:
                 share = model.PageUserShareAssociation()
                 share.page = page
                 share.user = other
                 session = trans.sa_session
                 session.add( share )
-                self.set_item_slug( session, page )
+                self.create_item_slug( session, page )
                 session.flush()
                 trans.set_message( "Page '%s' shared with user '%s'" % ( page.title, other.email ) )
                 return trans.response.send_redirect( url_for( controller='page', action='sharing', id=id ) )
@@ -609,7 +606,7 @@
         """ Returns page's name and link. """
         page = self.get_page( trans, id )
 
-        if self.set_item_slug( trans.sa_session, page ):
+        if self.create_item_slug( trans.sa_session, page ):
             trans.sa_session.flush()
         return_dict = { "name" : page.title, "link" : url_for( action="display_by_username_and_slug", username=page.user.username, slug=page.slug ) }
         return return_dict
diff -r 582fd1777763 -r c3eccab29814 lib/galaxy/web/controllers/tracks.py
--- a/lib/galaxy/web/controllers/tracks.py	Thu Mar 11 15:54:13 2010 -0500
+++ b/lib/galaxy/web/controllers/tracks.py	Fri Mar 12 09:37:22 2010 -0500
@@ -262,7 +262,9 @@
     @web.json
     def save( self, trans, **kwargs ):
         session = trans.sa_session
-        vis_id = kwargs['vis_id'].strip('"')
+        vis_id = "undefined"
+        if 'vis_id' in kwargs:
+            vis_id = kwargs['vis_id'].strip('"')
         dbkey = kwargs['dbkey']
         
         if vis_id == "undefined": # new vis
diff -r 582fd1777763 -r c3eccab29814 lib/galaxy/web/controllers/visualization.py
--- a/lib/galaxy/web/controllers/visualization.py	Thu Mar 11 15:54:13 2010 -0500
+++ b/lib/galaxy/web/controllers/visualization.py	Fri Mar 12 09:37:22 2010 -0500
@@ -1,35 +1,325 @@
 from galaxy.web.base.controller import *
-from galaxy.web.framework.helpers import time_ago, grids
+from galaxy.web.framework.helpers import time_ago, grids, iff
 from galaxy.util.sanitize_html import sanitize_html
 
 
 class VisualizationListGrid( grids.Grid ):
     # Grid definition
-    title = "Visualizations"
+    title = "Saved Visualizations"
     model_class = model.Visualization
     default_sort_key = "-update_time"
+    default_filter = dict( title="All", deleted="False", tags="All", sharing="All" )
     columns = [
-        grids.GridColumn( "Title", key="title", attach_popup=True,
+        grids.TextColumn( "Title", key="title", model_class=model.Visualization, attach_popup=True,
                          link=( lambda item: dict( controller="tracks", action="browser", id=item.id ) ) ),
-        grids.GridColumn( "Type", key="type" ),
+        grids.TextColumn( "Type", key="type", model_class=model.Visualization ),
+        grids.IndividualTagsColumn( "Tags", "tags", model.Visualization, model.VisualizationTagAssociation, filterable="advanced", grid_name="VisualizationListGrid" ),
+        grids.SharingStatusColumn( "Sharing", key="sharing", model_class=model.Visualization, filterable="advanced", sortable=False ),
         grids.GridColumn( "Created", key="create_time", format=time_ago ),
         grids.GridColumn( "Last Updated", key="update_time", format=time_ago ),
-    ]
-    ## global_actions = [
-    ##     grids.GridAction( "Add new page", dict( action='create' ) )
-    ## ]
+    ]    
+    columns.append( 
+        grids.MulticolFilterColumn(  
+        "Search", 
+        cols_to_filter=[ columns[0], columns[2] ], 
+        key="free-text-search", visible=False, filterable="standard" )
+                )
     operations = [
-        grids.GridOperation( "View", allow_multiple=False, url_args=dict( controller="tracks", action='browser' ) ),
+        grids.GridOperation( "Edit content", allow_multiple=False, url_args=dict( controller='tracks', action='browser' ) ),
+        grids.GridOperation( "Edit attributes", allow_multiple=False, url_args=dict( action='edit') ),
+        grids.GridOperation( "Share or Publish", allow_multiple=False, condition=( lambda item: not item.deleted ), async_compatible=False ),
+        grids.GridOperation( "Delete", condition=( lambda item: not item.deleted ), async_compatible=True, confirm="Are you sure you want to delete this visualization?" ),
     ]
     def apply_default_filter( self, trans, query, **kwargs ):
-        return query.filter_by( user=trans.user )
+        return query.filter_by( user=trans.user, deleted=False )
+        
+class VisualizationAllPublishedGrid( grids.Grid ):
+    # Grid definition
+    use_panels = True
+    use_async = True
+    title = "Published Visualizations"
+    model_class = model.Visualization
+    default_sort_key = "-update_time"
+    default_filter = dict( title="All", username="All" )
+    columns = [
+        grids.PublicURLColumn( "Title", key="title", model_class=model.Visualization, filterable="advanced" ),
+        grids.OwnerAnnotationColumn( "Annotation", key="annotation", model_class=model.Visualization, model_annotation_association_class=model.VisualizationAnnotationAssociation, filterable="advanced" ),
+        grids.OwnerColumn( "Owner", key="username", model_class=model.User, filterable="advanced", sortable=False ), 
+        grids.CommunityTagsColumn( "Community Tags", "tags", model.Visualization, model.VisualizationTagAssociation, filterable="advanced", grid_name="VisualizationAllPublishedGrid" ),
+        grids.GridColumn( "Last Updated", key="update_time", format=time_ago )
+    ]
+    columns.append( 
+        grids.MulticolFilterColumn(  
+        "Search", 
+        cols_to_filter=[ columns[0], columns[1], columns[2], columns[3] ], 
+        key="free-text-search", visible=False, filterable="standard" )
+                )
+    def build_initial_query( self, session ):
+        # Join so that searching history.user makes sense.
+        return session.query( self.model_class ).join( model.User.table )
+    def apply_default_filter( self, trans, query, **kwargs ):
+        return query.filter( self.model_class.deleted==False ).filter( self.model_class.published==True )
 
-class VisualizationController( BaseController ):
-    list_grid = VisualizationListGrid()
+
+class VisualizationController( BaseController, Sharable, UsesAnnotations, UsesVisualization ):
+    _user_list_grid = VisualizationListGrid()
+    _published_list_grid = VisualizationAllPublishedGrid()
+    
     @web.expose
-    @web.require_login()
-    def index( self, trans, *args, **kwargs ):
-        return trans.fill_template( "panels.mako", grid=self.list_grid( trans, *args, **kwargs ), active_view='visualization', main_url=url_for( action='list' ) )
+    def list_published( self, trans, *args, **kwargs ):
+        grid = self._published_list_grid( trans, **kwargs )
+        if 'async' in kwargs:
+            return grid
+        else:
+            # Render grid wrapped in panels
+            return trans.fill_template( "visualization/list_published.mako", grid=grid )
+    
+    @web.expose
+    @web.require_login("use Galaxy visualizations")
+    def list( self, trans, *args, **kwargs ):
+        # Handle operation
+        if 'operation' in kwargs and 'id' in kwargs:
+            session = trans.sa_session
+            operation = kwargs['operation'].lower()
+            ids = util.listify( kwargs['id'] )
+            for id in ids:
+                item = session.query( model.Visualization ).get( trans.security.decode_id( id ) )
+                if operation == "delete":
+                    item.deleted = True
+                if operation == "share or publish":
+                    return self.sharing( trans, **kwargs )
+            session.flush()
+            
+        # Build list of visualizations shared with user.
+        shared_by_others = trans.sa_session \
+            .query( model.VisualizationUserShareAssociation ) \
+            .filter_by( user=trans.get_user() ) \
+            .join( model.Visualization.table ) \
+            .filter( model.Visualization.deleted == False ) \
+            .order_by( desc( model.Visualization.update_time ) ) \
+            .all()
+        
+        return trans.fill_template( "visualization/list.mako", grid=self._user_list_grid( trans, *args, **kwargs ), shared_by_others=shared_by_others )
+        
+    @web.expose
+    @web.require_login( "modify Galaxy visualizations" )
+    def set_slug_async( self, trans, id, new_slug ):
+        """ Set item slug asynchronously. """
+        visualization = self.get_visualization( trans, id )
+        if visualization:
+            visualization.slug = new_slug
+            trans.sa_session.flush()
+            return visualization.slug
+
+    @web.expose
+    @web.require_login( "share Galaxy visualizations" )
+    def sharing( self, trans, id, **kwargs ):
+        """ Handle visualization sharing. """
+
+        # Get session and visualization.
+        session = trans.sa_session
+        visualization = trans.sa_session.query( model.Visualization ).get( trans.security.decode_id( id ) )
+
+        # Do operation on visualization.
+        if 'make_accessible_via_link' in kwargs:
+            self._make_item_accessible( trans.sa_session, visualization )
+        elif 'make_accessible_and_publish' in kwargs:
+            self._make_item_accessible( trans.sa_session, visualization )
+            visualization.published = True
+        elif 'publish' in kwargs:
+            visualization.published = True
+        elif 'disable_link_access' in kwargs:
+            visualization.importable = False
+        elif 'unpublish' in kwargs:
+            visualization.published = False
+        elif 'disable_link_access_and_unpublish' in kwargs:
+            visualization.importable = visualization.published = False
+        elif 'unshare_user' in kwargs:
+            user = session.query( model.User ).get( trans.security.decode_id( kwargs['unshare_user' ] ) )
+            if not user:
+                error( "User not found for provided id" )
+            association = session.query( model.VisualizationUserShareAssociation ) \
+                                 .filter_by( user=user, visualization=visualization ).one()
+            session.delete( association )
+
+        session.flush()
+
+        return trans.fill_template( "/sharing_base.mako", item=visualization )
+
+    @web.expose
+    @web.require_login( "share Galaxy visualizations" )
+    def share( self, trans, id=None, email="", **kwd ):
+        """ Handle sharing a visualization with a particular user. """
+        msg = mtype = None
+        visualization = trans.sa_session.query( model.Visualization ).get( trans.security.decode_id( id ) )
+        if email:
+            other = trans.sa_session.query( model.User ) \
+                                    .filter( and_( model.User.table.c.email==email,
+                                                   model.User.table.c.deleted==False ) ) \
+                                    .first()
+            if not other:
+                mtype = "error"
+                msg = ( "User '%s' does not exist" % email )
+            elif other == trans.get_user():
+                mtype = "error"
+                msg = ( "You cannot share a visualization with yourself" )
+            elif trans.sa_session.query( model.VisualizationUserShareAssociation ) \
+                    .filter_by( user=other, visualization=visualization ).count() > 0:
+                mtype = "error"
+                msg = ( "Visualization already shared with '%s'" % email )
+            else:
+                share = model.VisualizationUserShareAssociation()
+                share.visualization = visualization
+                share.user = other
+                session = trans.sa_session
+                session.add( share )
+                self.create_item_slug( session, visualization )
+                session.flush()
+                trans.set_message( "Visualization '%s' shared with user '%s'" % ( visualization.title, other.email ) )
+                return trans.response.send_redirect( url_for( action='sharing', id=id ) )
+        return trans.fill_template( "/share_base.mako",
+                                    message = msg,
+                                    messagetype = mtype,
+                                    item=visualization,
+                                    email=email )
+        
+
+    @web.expose
+    def display_by_username_and_slug( self, trans, username, slug ):
+        """ Display visualization based on a username and slug. """
+
+        # Get visualization.
+        session = trans.sa_session
+        user = session.query( model.User ).filter_by( username=username ).first()
+        visualization = trans.sa_session.query( model.Visualization ).filter_by( user=user, slug=slug, deleted=False ).first()
+        if visualization is None:
+            raise web.httpexceptions.HTTPNotFound()
+        # Security check raises error if user cannot access visualization.
+        self.security_check( trans.get_user(), visualization, False, True)    
+        return trans.fill_template_mako( "visualization/display.mako", item=visualization, item_data=None, content_only=True )
+        
+    @web.expose
+    @web.json
+    @web.require_login( "get item name and link" )
+    def get_name_and_link_async( self, trans, id=None ):
+        """ Returns visualization's name and link. """
+        visualization = self.get_visualization( trans, id )
+
+        if self.create_item_slug( trans.sa_session, visualization ):
+            trans.sa_session.flush()
+        return_dict = { "name" : visualization.title, "link" : url_for( action="display_by_username_and_slug", username=visualization.user.username, slug=visualization.slug ) }
+        return return_dict
+
+    @web.expose
+    @web.require_login("get item content asynchronously")
+    def get_item_content_async( self, trans, id ):
+        """ Returns item content in HTML format. """
+        pass
+        
+    @web.expose
+    @web.require_login( "create visualizations" )
+    def create( self, trans, visualization_title="", visualization_slug="", visualization_annotation="" ):
+        """
+        Create a new visualization
+        """
+        user = trans.get_user()
+        visualization_title_err = visualization_slug_err = visualization_annotation_err = ""
+        if trans.request.method == "POST":
+            if not visualization_title:
+                visualization_title_err = "visualization name is required"
+            elif not visualization_slug:
+                visualization_slug_err = "visualization id is required"
+            elif not VALID_SLUG_RE.match( visualization_slug ):
+                visualization_slug_err = "visualization identifier must consist of only lowercase letters, numbers, and the '-' character"
+            elif trans.sa_session.query( model.Visualization ).filter_by( user=user, slug=visualization_slug, deleted=False ).first():
+                visualization_slug_err = "visualization id must be unique"
+            else:
+                # Create the new stored visualization
+                visualization = model.Visualization()
+                visualization.title = visualization_title
+                visualization.slug = visualization_slug
+                visualization_annotation = sanitize_html( visualization_annotation, 'utf-8', 'text/html' )
+                self.add_item_annotation( trans, visualization, visualization_annotation )
+                visualization.user = user
+                # And the first (empty) visualization revision
+                visualization_revision = model.VisualizationRevision()
+                visualization_revision.title = visualization_title
+                visualization_revision.visualization = visualization
+                visualization.latest_revision = visualization_revision
+                visualization_revision.content = ""
+                # Persist
+                session = trans.sa_session
+                session.add( visualization )
+                session.flush()
+                # Display the management visualization
+                ## trans.set_message( "Visualization '%s' created" % visualization.title )
+                return trans.response.send_redirect( web.url_for( action='list' ) )
+        return trans.show_form( 
+            web.FormBuilder( web.url_for(), "Create new visualization", submit_text="Submit" )
+                .add_text( "visualization_title", "Visualization title", value=visualization_title, error=visualization_title_err )
+                .add_text( "visualization_slug", "Visualization identifier", value=visualization_slug, error=visualization_slug_err,
+                           help="""A unique identifier that will be used for
+                                public links to this visualization. A default is generated
+                                from the visualization title, but can be edited. This field
+                                must contain only lowercase letters, numbers, and
+                                the '-' character.""" )
+                .add_text( "visualization_annotation", "Visualization annotation", value=visualization_annotation, error=visualization_annotation_err,
+                            help="A description of the visualization; annotation is shown alongside published visualizations."),
+                template="visualization/create.mako" )
+        
+    @web.expose
+    @web.require_login( "edit visualizations" )
+    def edit( self, trans, id, visualization_title="", visualization_slug="", visualization_annotation="" ):
+        """
+        Edit a visualization's attributes.
+        """
+        encoded_id = id
+        id = trans.security.decode_id( id )
+        session = trans.sa_session
+        visualization = session.query( model.Visualization ).get( id )
+        user = trans.user
+        assert visualization.user == user
+        visualization_title_err = visualization_slug_err = visualization_annotation_err = ""
+        if trans.request.method == "POST":
+            if not visualization_title:
+                visualization_title_err = "Visualization name is required"
+            elif not visualization_slug:
+                visualization_slug_err = "Visualization id is required"
+            elif not VALID_SLUG_RE.match( visualization_slug ):
+                visualization_slug_err = "Visualization identifier must consist of only lowercase letters, numbers, and the '-' character"
+            elif visualization_slug != visualization.slug and trans.sa_session.query( model.Visualization ).filter_by( user=user, slug=visualization_slug, deleted=False ).first():
+                visualization_slug_err = "Visualization id must be unique"
+            elif not visualization_annotation:
+                visualization_annotation_err = "Visualization annotation is required"
+            else:
+                visualization.title = visualization_title
+                visualization.slug = visualization_slug
+                visualization_annotation = sanitize_html( visualization_annotation, 'utf-8', 'text/html' )
+                self.add_item_annotation( trans, visualization, visualization_annotation )
+                session.flush()
+                # Redirect to visualization list.
+                return trans.response.send_redirect( web.url_for( action='list' ) )
+        else:
+            visualization_title = visualization.title
+            # Create slug if it's not already set.
+            if visualization.slug is None:
+                self.create_item_slug( trans.sa_session, visualization )
+            visualization_slug = visualization.slug
+            visualization_annotation = self.get_item_annotation_str( trans.sa_session, trans.get_user(), visualization )
+            if not visualization_annotation:
+                visualization_annotation = ""
+        return trans.show_form( 
+            web.FormBuilder( web.url_for( id=encoded_id ), "Edit visualization attributes", submit_text="Submit" )
+                .add_text( "visualization_title", "Visualization title", value=visualization_title, error=visualization_title_err )
+                .add_text( "visualization_slug", "Visualization identifier", value=visualization_slug, error=visualization_slug_err,
+                           help="""A unique identifier that will be used for
+                                public links to this visualization. A default is generated
+                                from the visualization title, but can be edited. This field
+                                must contain only lowercase letters, numbers, and
+                                the '-' character.""" )
+                .add_text( "visualization_annotation", "Visualization annotation", value=visualization_annotation, error=visualization_annotation_err,
+                            help="A description of the visualization; annotation is shown alongside published visualizations."),
+            template="visualization/create.mako" )
     
     # @web.expose
     # @web.require_login()
@@ -42,6 +332,6 @@
     #    # Build grid
     #    grid = self.list( trans, *args, **kwargs )
     #    # Render grid wrapped in panels
-    #    return trans.fill_template( "page/index.mako", grid=grid )
+    #    return trans.fill_template( "visualization/index.mako", grid=grid )
     
     
\ No newline at end of file
diff -r 582fd1777763 -r c3eccab29814 lib/galaxy/web/controllers/workflow.py
--- a/lib/galaxy/web/controllers/workflow.py	Thu Mar 11 15:54:13 2010 -0500
+++ b/lib/galaxy/web/controllers/workflow.py	Fri Mar 12 09:37:22 2010 -0500
@@ -123,7 +123,7 @@
         # Legacy issue: all shared workflows must have slugs.
         slug_set = False
         for workflow_assoc in shared_by_others:
-            slug_set = self.set_item_slug( trans.sa_session, workflow_assoc.stored_workflow )
+            slug_set = self.create_item_slug( trans.sa_session, workflow_assoc.stored_workflow )
         if slug_set:
             trans.sa_session.flush()
 
@@ -224,7 +224,7 @@
                 share.user = other
                 session = trans.sa_session
                 session.add( share )
-                self.set_item_slug( session, stored )
+                self.create_item_slug( session, stored )
                 session.flush()
                 trans.set_message( "Workflow '%s' shared with user '%s'" % ( stored.name, other.email ) )
                 return trans.response.send_redirect( url_for( controller='workflow', action='sharing', id=id ) )
@@ -401,7 +401,7 @@
         """ Returns workflow's name and link. """
         stored = self.get_stored_workflow( trans, id )
 
-        if self.set_item_slug( trans.sa_session, stored ):
+        if self.create_item_slug( trans.sa_session, stored ):
             trans.sa_session.flush()
         return_dict = { "name" : stored.name, "link" : url_for( action="display_by_username_and_slug", username=stored.user.username, slug=stored.slug ) }
         return return_dict
diff -r 582fd1777763 -r c3eccab29814 templates/base_panels.mako
--- a/templates/base_panels.mako	Thu Mar 11 15:54:13 2010 -0500
+++ b/templates/base_panels.mako	Fri Mar 12 09:37:22 2010 -0500
@@ -207,7 +207,7 @@
         <ul>
             <li><a href="${h.url_for( controller='/tracks', action='index' )}">Build track browser</a></li>
             <li><hr style="color: inherit; background-color: gray"/></li>
-	    <li><a href="${h.url_for( controller='/visualization', action='index' )}">Stored visualizations</a></li>
+	    <li><a href="${h.url_for( controller='/visualization', action='list' )}">Stored visualizations</a></li>
         </ul>
         </div>
     </td>
diff -r 582fd1777763 -r c3eccab29814 templates/display_common.mako
--- a/templates/display_common.mako	Thu Mar 11 15:54:13 2010 -0500
+++ b/templates/display_common.mako	Fri Mar 12 09:37:22 2010 -0500
@@ -18,8 +18,11 @@
 
 <%def name="get_item_name( item )">
     <% 
+        # Start with exceptions, end with default.
 		if type( item ) is model.Page:
 			return item.title
+		elif type( item ) is model.Visualization:
+		    return item.title
 		if hasattr( item, 'get_display_name'):
 		    return item.get_display_name()
 		return item.name
@@ -29,7 +32,7 @@
 ## Get plural display name for a class.
 <%def name="get_class_plural_display_name( a_class )">
 <%
-    ## Start with exceptions, end with default.
+    # Start with exceptions, end with default.
     if a_class is model.History:
         return "Histories"
     elif a_class is model.FormDefinitionCurrent:
@@ -89,6 +92,8 @@
             return "dataset"
         elif isinstance( item, model.Page ):
             return "page"
+        elif isinstance( item, model.Visualization ):
+            return "visualization"
     %>
 </%def>
 
diff -r 582fd1777763 -r c3eccab29814 templates/page/create.mako
--- a/templates/page/create.mako	Thu Mar 11 15:54:13 2010 -0500
+++ b/templates/page/create.mako	Fri Mar 12 09:37:22 2010 -0500
@@ -8,8 +8,7 @@
     var page_slug = $("input[name=page_slug]");
     page_name.keyup(function(){
         page_slug.val( $(this).val().replace(/\s+/g,'-').replace(/[^a-zA-Z0-9\-]/g,'').toLowerCase() )
-    });
-    
+    });    
 })
 </script>
 </%def>
\ No newline at end of file
diff -r 582fd1777763 -r c3eccab29814 templates/panels.mako
--- a/templates/panels.mako	Thu Mar 11 15:54:13 2010 -0500
+++ b/templates/panels.mako	Fri Mar 12 09:37:22 2010 -0500
@@ -10,11 +10,9 @@
 </%def>
 
 <%def name="center_panel()">
-    
     <div style="overflow: auto; height: 100%;">
         <div style="padding: 10px">
             ${grid}
         </div>
     </div>
-
 </%def>
diff -r 582fd1777763 -r c3eccab29814 templates/visualization/create.mako
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/templates/visualization/create.mako	Fri Mar 12 09:37:22 2010 -0500
@@ -0,0 +1,14 @@
+<%inherit file="/form.mako"/>
+
+<%def name="javascripts()">
+${parent.javascripts()}
+<script type="text/javascript">
+$(function(){
+    var visualization_name = $("input[name=visualization_title]");
+    var visualization_slug = $("input[name=visualization_slug]");
+    visualization_name.keyup(function(){
+        visualization_slug.val( $(this).val().replace(/\s+/g,'-').replace(/[^a-zA-Z0-9\-]/g,'').toLowerCase() )
+    });    
+})
+</script>
+</%def>
\ No newline at end of file
diff -r 582fd1777763 -r c3eccab29814 templates/visualization/display.mako
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/templates/visualization/display.mako	Fri Mar 12 09:37:22 2010 -0500
@@ -0,0 +1,19 @@
+<%inherit file="/display_base.mako"/>
+
+<%def name="javascripts()">
+    ${parent.javascripts()}
+    ## Need visualization JS.
+</%def>
+
+<%def name="stylesheets()">
+    ${parent.stylesheets()}
+    ## Need visualization CSS.
+</%def>
+
+<%def name="render_item_links( visualization )">
+    ## TODO
+</%def>
+
+<%def name="render_item( visualization, visualization_data )">
+    ## TODO
+</%def>
\ No newline at end of file
diff -r 582fd1777763 -r c3eccab29814 templates/visualization/list.mako
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/templates/visualization/list.mako	Fri Mar 12 09:37:22 2010 -0500
@@ -0,0 +1,52 @@
+<%inherit file="/base_panels.mako"/>
+
+<%def name="init()">
+<%
+    self.has_left_panel=False
+    self.has_right_panel=False
+    self.active_view="visualization"
+    self.message_box_visible=False
+%>
+</%def>
+
+<%def name="center_panel()">
+
+    <div style="overflow: auto; height: 100%;">
+        <div class="page-container" style="padding: 10px;">
+            ${grid}
+
+			<br><br>
+			<h2>Visualizations shared with you by others</h2>
+
+			%if shared_by_others:
+			    <table class="colored" border="0" cellspacing="0" cellpadding="0" width="100%">
+			        <tr class="header">
+			            <th>Title</th>
+			            <th>Owner</th>
+			            <th></th>
+			        </tr>
+			        %for i, association in enumerate( shared_by_others ):
+			            <% visualization = association.visualization %>
+			            <tr>
+			                <td>
+			                    <a class="menubutton" id="shared-${i}-popup" href="${h.url_for( action='display_by_username_and_slug', username=visualization.user.username, slug=visualization.slug)}">${visualization.title}</a>
+			                </td>
+			                <td>${visualization.user.username}</td>
+			                <td>
+			                    <div popupmenu="shared-${i}-popup">
+									<a class="action-button" href="${h.url_for( action='display_by_username_and_slug', username=visualization.user.username, slug=visualization.slug)}" target="_top">View</a>
+			                    </div>
+			                </td>
+			            </tr>    
+			        %endfor
+			    </table>
+			%else:
+
+			    No visualizations have been shared with you.
+
+			%endif
+
+        </div>
+    </div>
+
+</%def>
diff -r 582fd1777763 -r c3eccab29814 templates/visualization/list_published.mako
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/templates/visualization/list_published.mako	Fri Mar 12 09:37:22 2010 -0500
@@ -0,0 +1,36 @@
+<%inherit file="/base_panels.mako"/>
+
+<%def name="init()">
+<%
+    self.has_left_panel=False
+    self.has_right_panel=False
+    self.active_view="page"
+    self.message_box_visible=False
+%>
+</%def>
+
+<%def name="title()">
+    Galaxy :: Published Visualizations
+</%def>
+
+<%def name="stylesheets()">
+    ${parent.stylesheets()}
+    <style>
+        .grid td {
+            min-width: 100px;
+        }
+    </style>
+</%def>
+
+<%def name="center_panel()">
+
+    ## <iframe name="galaxy_main" id="galaxy_main" frameborder="0" style="position: absolute; width: 100%; height: 100%;" src="${h.url_for( controller="page", action="list" )}"> </iframe>
+
+    <div style="overflow: auto; height: 100%;">
+        <div class="page-container" style="padding: 10px;">
+            ${unicode( grid, 'utf-8' )}
+        </div>
+    </div>
+
+
+</%def>
                    
                  
                  
                          
                            
                            1
                            
                          
                          
                            
                            0
                            
                          
                          
                            
    
                          
                        
                    18 Mar '10
                    
                        details:   http://www.bx.psu.edu/hg/galaxy/rev/582fd1777763
changeset: 3522:582fd1777763
user:      Kelly Vincent <kpvincent(a)bx.psu.edu>
date:      Thu Mar 11 15:54:13 2010 -0500
description:
Modified BWA output files so last two tests will pass
diffstat:
 test-data/bwa_wrapper_out2.sam |  2 +-
 test-data/bwa_wrapper_out3.sam |  4 ++--
 2 files changed, 3 insertions(+), 3 deletions(-)
diffs (14 lines):
diff -r f13d85256124 -r 582fd1777763 test-data/bwa_wrapper_out2.sam
--- a/test-data/bwa_wrapper_out2.sam	Thu Mar 11 14:35:36 2010 -0500
+++ b/test-data/bwa_wrapper_out2.sam	Thu Mar 11 15:54:13 2010 -0500
@@ -1,1 +1,1 @@
-081017-and-081020:1:1:1715:1759	16	phiX	322	25	36M	*	0	0	GATATTTTAAAGGAGCGTGGATTACTATCTGAGTCC	B&&I13A$G$*%$IIIIIII9(.+5$IIIIIII#II	XT:A:U	NM:i:2	X0:i:1	XM:i:2	XO:i:0	XG:i:0	MD:Z:2C8A24
+081017-and-081020:1:1:1715:1759	16	phiX	322	25	36M	*	0	0	GATATTTTAAAGGAGCGTGGATTACTATCTGAGTCC	B&&I13A$G$*%$IIIIIII9(.+5$IIIIIII#II	XT:A:U	NM:i:2	X0:i:1	X1:i:0	XM:i:2	XO:i:0	XG:i:0	MD:Z:2C8A24
diff -r f13d85256124 -r 582fd1777763 test-data/bwa_wrapper_out3.sam
--- a/test-data/bwa_wrapper_out3.sam	Thu Mar 11 14:35:36 2010 -0500
+++ b/test-data/bwa_wrapper_out3.sam	Thu Mar 11 15:54:13 2010 -0500
@@ -1,2 +1,2 @@
-081017-and-081020:1:1:1715:1759	113	PHIX174	322	25	18M	=	340	18	GATATTTTAAAGGAGCGT	B&&I13A$G$*%$IIIII	XT:A:U	NM:i:2	SM:i:25	AM:i:25	X0:i:1	XM:i:2	XO:i:0	XG:i:0	MD:Z:2C8A6
-081017-and-081020:1:1:1715:1759	177	PHIX174	340	37	18M	=	322	-18	GGATTACTATCTGAGTCC	II9(.+5$IIIIIII#II	XT:A:U	NM:i:0	SM:i:37	AM:i:25	X0:i:1	XM:i:0	XO:i:0	XG:i:0	MD:Z:18
+081017-and-081020:1:1:1715:1759	113	PHIX174	322	25	18M	=	340	18	GATATTTTAAAGGAGCGT	B&&I13A$G$*%$IIIII	XT:A:U	NM:i:2	SM:i:25	AM:i:25	X0:i:1	X1:i:0	XM:i:2	XO:i:0	XG:i:0	MD:Z:2C8A6
+081017-and-081020:1:1:1715:1759	177	PHIX174	340	37	18M	=	322	-18	GGATTACTATCTGAGTCC	II9(.+5$IIIIIII#II	XT:A:U	NM:i:0	SM:i:37	AM:i:25	X0:i:1	X1:i:0	XM:i:0	XO:i:0	XG:i:0	MD:Z:18
                    
                  
                  
                          
                            
                            1
                            
                          
                          
                            
                            0
                            
                          
                          
                            
    
                          
                        
                    18 Mar '10
                    
                        details:   http://www.bx.psu.edu/hg/galaxy/rev/f13d85256124
changeset: 3521:f13d85256124
user:      Dan Blankenberg <dan(a)bx.psu.edu>
date:      Thu Mar 11 14:35:36 2010 -0500
description:
Display Application framework enhancements.
    Add the ability for display applications to be populated dynamically based upon the content of (e.g. tabular) files.
    Display application links can be filtered by various attributes, including e.g. dataset dbkey matching from field in a file or an attribute matching a Galaxy application configuration setting.
    Param and Data URL values can now be generated dynamically, allowing e.g unique base filenames to be created and used.
See updated xml configurations in /display_applications/ for examples of syntax.
diffstat:
 datatypes_conf.xml.sample                                |   9 +-
 display_applications/ucsc/bam.xml                        |  34 +++++-
 display_applications/ucsc/interval_as_bed.xml            |  63 +++++++++++-
 lib/galaxy/datatypes/data.py                             |   9 +
 lib/galaxy/datatypes/display_applications/application.py |  78 ++++++++++++++-
 lib/galaxy/datatypes/display_applications/parameters.py  |  13 +-
 lib/galaxy/datatypes/registry.py                         |  34 ++++--
 lib/galaxy/model/__init__.py                             |   3 +
 templates/root/history_common.mako                       |   2 +-
 tool-data/shared/bx/bx_build_sites.txt                   |   2 +-
 10 files changed, 205 insertions(+), 42 deletions(-)
diffs (468 lines):
diff -r 262b16c8e277 -r f13d85256124 datatypes_conf.xml.sample
--- a/datatypes_conf.xml.sample	Thu Mar 11 13:51:53 2010 -0500
+++ b/datatypes_conf.xml.sample	Thu Mar 11 14:35:36 2010 -0500
@@ -15,10 +15,7 @@
             <!-- <display file="ucsc/interval_as_bed.xml" /> -->
             <display file="genetrack.xml" />
         </datatype>
-        <datatype extension="bedstrict" type="galaxy.datatypes.interval:BedStrict">
-            <display file="ucsc/interval_as_bed.xml" />
-            <display file="genetrack.xml" />
-        </datatype>
+        <datatype extension="bedstrict" type="galaxy.datatypes.interval:BedStrict" />
         <datatype extension="binseq.zip" type="galaxy.datatypes.binary:Binseq" mimetype="application/zip" display_in_upload="true"/>
         <datatype extension="len" type="galaxy.datatypes.chrominfo:ChromInfo" display_in_upload="true">
             <!-- no converters yet -->
@@ -51,8 +48,8 @@
             <converter file="interval_to_bed_converter.xml" target_datatype="bed"/>
             <converter file="interval_to_bedstrict_converter.xml" target_datatype="bedstrict"/>
             <indexer file="interval_awk.xml" />
-            <!-- <display file="ucsc/interval_as_bed.xml" /> -->
-            <display file="genetrack.xml" />
+            <!-- <display file="ucsc/interval_as_bed.xml" inherit="True" /> -->
+            <display file="genetrack.xml" inherit="True"/>
         </datatype>
         <datatype extension="jpg" type="galaxy.datatypes.images:Image" mimetype="image/jpeg"/>
         <datatype extension="laj" type="galaxy.datatypes.images:Laj"/>
diff -r 262b16c8e277 -r f13d85256124 display_applications/ucsc/bam.xml
--- a/display_applications/ucsc/bam.xml	Thu Mar 11 13:51:53 2010 -0500
+++ b/display_applications/ucsc/bam.xml	Thu Mar 11 14:35:36 2010 -0500
@@ -1,8 +1,32 @@
 <display id="ucsc_bam" version="1.0.0" name="display at UCSC">
-    <link id="main" name="main">
-        <url>http://genome.ucsc.edu/cgi-bin/hgTracks?db=${qp($bam_file.dbkey)}&hgt.c…</url>
-        <param type="data" name="bam_file" url="galaxy.bam" strip_https="True" />
-        <param type="data" name="bai_file" url="galaxy.bam.bai" metadata="bam_index" strip_https="True" /><!-- UCSC expects index file to exist as bam_file_name.bai -->
+    <!-- Load links from file: one line to one link -->
+    <dynamic_links from_file="tool-data/shared/ucsc/ucsc_build_sites.txt" skip_startswith="#" id="0" name="0">
+        <!-- Define parameters by column from file, allow splitting on builds -->
+        <dynamic_param name="site_id" value="0"/>
+        <dynamic_param name="ucsc_link" value="1"/>
+        <dynamic_param name="builds" value="2" split="True" separator="," />
+        <!-- Filter out some of the links based upon matching site_id to a Galaxy application configuration parameter and by dataset dbkey -->
+        <filter>${site_id in $APP.config.ucsc_display_sites}</filter>
+        <filter>${dataset.dbkey in $builds}</filter>
+        <!-- We define url and params as normal, but values defined in dynamic_param are available by specified name -->
+        <url>${ucsc_link}db=${qp($bam_file.dbkey)}&hgt.customText=${qp($track.url)}</url>
+        <param type="data" name="bam_file" url="galaxy_${DATASET_HASH}.bam" strip_https="True" />
+        <param type="data" name="bai_file" url="galaxy_${DATASET_HASH}.bam.bai" metadata="bam_index" strip_https="True" /><!-- UCSC expects index file to exist as bam_file_name.bai -->
         <param type="template" name="track" viewable="True" strip_https="True">track type=bam name="${bam_file.name}" bigDataUrl=${bam_file.url} db=${bam_file.dbkey}</param>
-    </link>
+    </dynamic_links>
+    <!-- Load links from file: one line to one link -->
+    <dynamic_links from_file="tool-data/shared/bx/bx_build_sites.txt" skip_startswith="#" id="3" name="3">
+        <!-- Define parameters by column from file, allow splitting on builds -->
+        <dynamic_param name="site_id" value="0"/>
+        <dynamic_param name="ucsc_link" value="1"/>
+        <dynamic_param name="builds" value="2" split="True" separator="," />
+        <!-- Filter out some of the links based upon matching site_id to a Galaxy application configuration parameter and by dataset dbkey -->
+        <filter>${site_id in $APP.config.bx_display_sites}</filter>
+        <filter>${dataset.dbkey in $builds}</filter>
+        <!-- We define url and params as normal, but values defined in dynamic_param are available by specified name -->
+        <url>${ucsc_link}db=${qp($bam_file.dbkey)}&hgt.customText=${qp($track.url)}</url>
+        <param type="data" name="bam_file" url="galaxy_${DATASET_HASH}.bam" strip_https="True" />
+        <param type="data" name="bai_file" url="galaxy_${DATASET_HASH}.bam.bai" metadata="bam_index" strip_https="True" /><!-- UCSC expects index file to exist as bam_file_name.bai -->
+        <param type="template" name="track" viewable="True" strip_https="True">track type=bam name="${bam_file.name}" bigDataUrl=${bam_file.url} db=${bam_file.dbkey}</param>
+    </dynamic_links>
 </display>
diff -r 262b16c8e277 -r f13d85256124 display_applications/ucsc/interval_as_bed.xml
--- a/display_applications/ucsc/interval_as_bed.xml	Thu Mar 11 13:51:53 2010 -0500
+++ b/display_applications/ucsc/interval_as_bed.xml	Thu Mar 11 14:35:36 2010 -0500
@@ -1,7 +1,16 @@
-<display id="ucsc_interval_as_bed" version="1.0.0" name="display at UCSC">
-    <link id="main" name="main">
-        <url>http://genome.ucsc.edu/cgi-bin/hgTracks?db=${qp($bed_file.dbkey)}&posit…</url>
-        <param type="data" name="bed_file" url="galaxy.bed" format="bedstrict"/> <!-- Galaxy allows BED files to contain non-standard fields beyond the first 3 columns, UCSC does not: force use of converter which will make strict BED6+ file -->
+<display id="ucsc_interval_as_bed" version="1.0.0" name="display at UCSC" inherit="True">
+    <!-- Load links from file: one line to one link -->
+    <dynamic_links from_file="tool-data/shared/ucsc/ucsc_build_sites.txt" skip_startswith="#" id="0" name="0">
+        <!-- Define parameters by column from file, allow splitting on builds -->
+        <dynamic_param name="site_id" value="0"/>
+        <dynamic_param name="ucsc_link" value="1"/>
+        <dynamic_param name="builds" value="2" split="True" separator="," />
+        <!-- Filter out some of the links based upon matching site_id to a Galaxy application configuration parameter and by dataset dbkey -->
+        <filter>${site_id in $APP.config.ucsc_display_sites}</filter>
+        <filter>${dataset.dbkey in $builds}</filter>
+        <!-- We define url and params as normal, but values defined in dynamic_param are available by specified name -->
+        <url>${ucsc_link}db=${qp($bed_file.dbkey)}&position=${position.qp}&hgt.customText=${bed_file.qp}</url>
+        <param type="data" name="bed_file" url="galaxy_${DATASET_HASH}.bed" format="bedstrict"/> <!-- Galaxy allows BED files to contain non-standard fields beyond the first 3 columns, UCSC does not: force use of converter which will make strict BED6+ file -->
         <param type="template" name="position" strip="True" >
 #set line_count = 0
 #set chrom = None
@@ -33,5 +42,49 @@
 :-
 #end if
         </param>
-    </link>
+    </dynamic_links>
+    <!-- Load links from file: one line to one link -->
+    <dynamic_links from_file="tool-data/shared/bx/bx_build_sites.txt" skip_startswith="#" id="3" name="3">
+        <!-- Define parameters by column from file, allow splitting on builds -->
+        <dynamic_param name="site_id" value="0"/>
+        <dynamic_param name="ucsc_link" value="1"/>
+        <dynamic_param name="builds" value="2" split="True" separator="," />
+        <!-- Filter out some of the links based upon matching site_id to a Galaxy application configuration parameter and by dataset dbkey -->
+        <filter>${site_id in $APP.config.bx_display_sites}</filter>
+        <filter>${dataset.dbkey in $builds}</filter>
+        <!-- We define url and params as normal, but values defined in dynamic_param are available by specified name -->
+        <url>${ucsc_link}db=${qp($bed_file.dbkey)}&position=${position.qp}&hgt.customText=${bed_file.qp}</url>
+        <param type="data" name="bed_file" url="galaxy_${DATASET_HASH}.bed" format="bedstrict"/> <!-- Galaxy allows BED files to contain non-standard fields beyond the first 3 columns, UCSC does not: force use of converter which will make strict BED6+ file -->
+        <param type="template" name="position" strip="True" >
+#set line_count = 0
+#set chrom = None
+#set start = float( 'inf' )
+#set end = 0
+#for $line in open( $bed_file.file_name ):
+    #if $line_count > 10: ##10 max lines to check for view port
+        #break
+    #end if
+    #if not $line.startswith( "#" ):
+        #set $fields = $line.split( "\t" )
+        #try:
+            #if len( $fields ) >= max( $bed_file.metadata.startCol, $bed_file.metadata.endCol, $bed_file.metadata.chromCol ):
+                #if $chrom is None or $fields[ $bed_file.metadata.chromCol - 1 ] == $chrom:
+                    #set chrom = $fields[ $bed_file.metadata.chromCol - 1 ]
+                    #set start = min( $start, int( $fields[ $bed_file.metadata.startCol - 1 ] ) )
+                    #set end = max( $end, int( $fields[ $bed_file.metadata.endCol - 1 ] ) )
+                #end if
+            #end if
+        #except:
+            #pass
+        #end try
+    #end if
+    #set line_count += 1
+#end for
+#if $chrom is not None:
+${chrom}:${start}-${end + 1}
+#else:
+:-
+#end if
+        </param>
+    </dynamic_links>
 </display>
diff -r 262b16c8e277 -r f13d85256124 lib/galaxy/datatypes/data.py
--- a/lib/galaxy/datatypes/data.py	Thu Mar 11 13:51:53 2010 -0500
+++ b/lib/galaxy/datatypes/data.py	Thu Mar 11 14:35:36 2010 -0500
@@ -205,6 +205,15 @@
         """New style display applications"""
         assert display_application.id not in self.display_applications, 'Attempted to add a display application twice'
         self.display_applications[ display_application.id ] = display_application
+    def get_display_application( self, key, default = None ):
+        return self.display_applications.get( key, default )
+    def get_display_applications_by_dataset( self, dataset, trans ):
+        rval = odict()
+        for key, value in self.display_applications.iteritems():
+            value = value.filter_by_dataset( dataset, trans )
+            if value.links:
+                rval[key] = value
+        return rval
     def get_display_types(self):
         """Returns display types available"""
         return self.supported_display_apps.keys()
diff -r 262b16c8e277 -r f13d85256124 lib/galaxy/datatypes/display_applications/application.py
--- a/lib/galaxy/datatypes/display_applications/application.py	Thu Mar 11 13:51:53 2010 -0500
+++ b/lib/galaxy/datatypes/display_applications/application.py	Thu Mar 11 14:35:36 2010 -0500
@@ -1,24 +1,27 @@
 #Contains objects for using external display applications
-from galaxy.util import parse_xml
+from galaxy.util import parse_xml, string_as_bool
 from galaxy.util.odict import odict
 from galaxy.util.template import fill_template
 from galaxy.web import url_for
 from parameters import DisplayApplicationParameter, DEFAULT_DATASET_NAME
 from urllib import quote_plus
 from util import encode_dataset_user
+from copy import deepcopy
 
 #Any basic functions that we want to provide as a basic part of parameter dict should be added to this dict
 BASE_PARAMS = { 'qp': quote_plus, 'url_for':url_for } #url_for has route memory...
 
 class DisplayApplicationLink( object ):
     @classmethod
-    def from_elem( cls, elem, display_application ):
+    def from_elem( cls, elem, display_application, other_values = None ):
         rval = DisplayApplicationLink( display_application )
         rval.id = elem.get( 'id', None )
         assert rval.id, 'Link elements require a id.'
         rval.name = elem.get( 'name', rval.id )
         rval.url = elem.find( 'url' )
         assert rval.url is not None, 'A url element must be provided for link elements.'
+        rval.other_values = other_values
+        rval.filters = elem.findall( 'filter' )
         for param_elem in elem.findall( 'param' ):
             param = DisplayApplicationParameter.from_elem( param_elem, rval )
             assert param, 'Unable to load parameter from element: %s' % param_elem
@@ -36,13 +39,19 @@
         dataset_hash, user_hash = encode_dataset_user( trans, data, None )
         return url_for( controller = '/dataset', action = "display_application", dataset_id = dataset_hash, user_id = user_hash, app_name = self.display_application.id, link_name = self.id, app_action = None )
     def get_inital_values( self, data, trans ):
-        rval = odict( { 'BASE_URL': trans.request.base, 'APP': trans.app } ) #trans automatically appears as a response, need to add properties of trans that we want here
+        if self.other_values:
+            rval = odict( self.other_values )
+        else:
+            rval = odict()
+        rval.update( { 'BASE_URL': trans.request.base, 'APP': trans.app } ) #trans automatically appears as a response, need to add properties of trans that we want here
         for key, value in  BASE_PARAMS.iteritems(): #add helper functions/variables
             rval[ key ] = value
         rval[ DEFAULT_DATASET_NAME ] = data #always have the display dataset name available
         return rval
     def build_parameter_dict( self, data, dataset_hash, user_hash, trans ):
         other_values = self.get_inital_values( data, trans )
+        other_values[ 'DATASET_HASH' ] = dataset_hash
+        other_values[ 'USER_HASH' ] = user_hash
         for name, param in self.parameters.iteritems():
             assert name not in other_values, "The display parameter '%s' has been defined more than once." % name
             if param.ready( other_values ):
@@ -51,6 +60,51 @@
                 other_values[ name ] = None
                 return False, other_values #need to stop here, next params may need this value
         return True, other_values #we built other_values, lets provide it as well, or else we will likely regenerate it in the next step
+    def filter_by_dataset( self, data, trans ):
+        context = self.get_inital_values( data, trans )
+        for filter_elem in self.filters:
+            if fill_template( filter_elem.text, context = context ) != filter_elem.get( 'value', 'True' ):
+                return False
+        return True
+
+class DynamicDisplayApplicationBuilder( object ):
+    @classmethod
+    def __init__( self, elem, display_application ):
+        rval = []
+        filename = elem.get( 'from_file', None )
+        assert filename is not None, 'Filename and id attributes required for dynamic_links'
+        skip_startswith = elem.get( 'skip_startswith', None )
+        separator = elem.get( 'separator', '\t' )
+        id_col = int( elem.get( 'id', None ) )
+        name_col = int( elem.get( 'name', id_col ) )
+        dynamic_params = {}
+        max_col = max( id_col, name_col )
+        for dynamic_param in elem.findall( 'dynamic_param' ):
+            name = dynamic_param.get( 'name' )
+            value = int( dynamic_param.get( 'value' ) )
+            split = string_as_bool( dynamic_param.get( 'split', False ) )
+            param_separator =  dynamic_param.get( 'separator', ',' )
+            max_col = max( max_col, value )
+            dynamic_params[name] = { 'column': value, 'split': split, 'separator': param_separator }
+        for line in open( filename ):
+            if not skip_startswith or not line.startswith( skip_startswith ):
+                line = line.rstrip( '\n\r' )
+                fields = line.split( separator )
+                if len( fields ) >= max_col:
+                    new_elem = deepcopy( elem )
+                    new_elem.set( 'id', fields[id_col] )
+                    new_elem.set( 'name', fields[name_col] )
+                    dynamic_values = {}
+                    for key, attributes in dynamic_params.iteritems():
+                        value = fields[ attributes[ 'column' ] ]
+                        if attributes['split']:
+                            value = value.split( attributes['separator'] )
+                        dynamic_values[key] = value
+                    #now populate
+                    rval.append( DisplayApplicationLink.from_elem( new_elem, display_application, other_values = dynamic_values ) )
+        self.links = rval
+    def __iter__( self ):
+        return iter( self.links )
 
 class PopulatedDisplayApplicationLink( object ):
     def __init__( self, display_application_link, data, dataset_hash, user_hash, trans ):
@@ -84,9 +138,11 @@
     def display_url( self ):
         assert self.display_ready(), 'Display is not yet ready, cannot generate display link'
         return fill_template( self.link.url.text, context = self.parameters )
-    def get_param_name_by_url( self, name ):
-        assert name in self.link.url_param_name_map, "Unknown URL parameter name provided: %s" % name
-        return self.link.url_param_name_map[ name ]
+    def get_param_name_by_url( self, url ):
+        for name, parameter in self.link.parameters.iteritems():
+            if parameter.build_url( self.parameters ) == url:
+                return name
+        raise ValueError( "Unknown URL parameter name provided: %s" % url )
 
 class DisplayApplication( object ):
     @classmethod
@@ -103,6 +159,9 @@
             link = DisplayApplicationLink.from_elem( link_elem, rval )
             if link:
                 rval.links[ link.id ] = link
+        for dynamic_links in elem.findall( 'dynamic_links' ):
+            for link in DynamicDisplayApplicationBuilder( dynamic_links, rval ):
+                rval.links[ link.id ] = link
         return rval
     def __init__( self, display_id, name, datatypes_registry, version = None ):
         self.id = display_id
@@ -115,4 +174,9 @@
     def get_link( self, link_name, data, dataset_hash, user_hash, trans ):
         #returns a link object with data knowledge to generate links
         return PopulatedDisplayApplicationLink( self.links[ link_name ], data, dataset_hash, user_hash, trans )
-    
+    def filter_by_dataset( self, data, trans ):
+        filtered = DisplayApplication( self.id, self.name, self.datatypes_registry, version = self.version )
+        for link_name, link_value in self.links.iteritems():
+            if link_value.filter_by_dataset( data, trans ):
+                filtered.links[link_name] = link_value
+        return filtered
diff -r 262b16c8e277 -r f13d85256124 lib/galaxy/datatypes/display_applications/parameters.py
--- a/lib/galaxy/datatypes/display_applications/parameters.py	Thu Mar 11 13:51:53 2010 -0500
+++ b/lib/galaxy/datatypes/display_applications/parameters.py	Thu Mar 11 14:35:36 2010 -0500
@@ -35,6 +35,8 @@
         return True
     def is_preparing( self, other_values ):
         return False
+    def build_url( self, other_values ):
+        return fill_template( self.url, context = other_values )
 
 class DisplayApplicationDataParameter( DisplayApplicationParameter ):
     """ Parameter that returns a file_name containing the requested content """
@@ -141,15 +143,16 @@
         self.trans = trans
         self._dataset_hash = dataset_hash
         self._user_hash = user_hash
+        self._url = self.parameter.build_url( self.other_values )
     def __str__( self ):
         return str( self.value )
     def mime_type( self ):
         if self.parameter.mime_type is not None:
             return self.parameter.mime_type
         if self.parameter.guess_mime_type:
-            mime, encoding = mimetypes.guess_type( self.parameter.url )
+            mime, encoding = mimetypes.guess_type( self._url )
             if not mime:
-                mime = self.trans.app.datatypes_registry.get_mimetype_by_extension( ".".split( self.parameter.url )[ -1 ], None ) 
+                mime = self.trans.app.datatypes_registry.get_mimetype_by_extension( ".".split( self._url )[ -1 ], None ) 
             if mime:
                 return mime
         return 'text/plain'
@@ -158,7 +161,7 @@
         base_url = self.trans.request.base
         if self.parameter.strip_https and base_url[ : 5].lower() == 'https':
             base_url = "http%s" % base_url[ 5: ]
-        return "%s%s" % ( base_url, url_for( controller = '/dataset', action = "display_application", dataset_id = self._dataset_hash, user_id = self._user_hash, app_name = self.parameter.link.display_application.id, link_name = self.parameter.link.id, app_action = self.action_name, action_param = self.parameter.url ) )
+        return "%s%s" % ( base_url, url_for( controller = '/dataset', action = "display_application", dataset_id = self._dataset_hash, user_id = self._user_hash, app_name = self.parameter.link.display_application.id, link_name = self.parameter.link.id, app_action = self.action_name, action_param = self._url ) )
     @property
     def action_name( self ):
         return self.ACTION_NAME
@@ -178,9 +181,9 @@
         if self.parameter.mime_type is not None:
             return self.parameter.mime_type
         if self.parameter.guess_mime_type:
-            mime, encoding = mimetypes.guess_type( self.parameter.url )
+            mime, encoding = mimetypes.guess_type( self._url )
             if not mime:
-                mime = self.trans.app.datatypes_registry.get_mimetype_by_extension( ".".split( self.parameter.url )[ -1 ], None ) 
+                mime = self.trans.app.datatypes_registry.get_mimetype_by_extension( ".".split( self._url )[ -1 ], None ) 
             if mime:
                 return mime
         return self.other_values[ DEFAULT_DATASET_NAME ].get_mime()
diff -r 262b16c8e277 -r f13d85256124 lib/galaxy/datatypes/registry.py
--- a/lib/galaxy/datatypes/registry.py	Thu Mar 11 13:51:53 2010 -0500
+++ b/lib/galaxy/datatypes/registry.py	Thu Mar 11 14:35:36 2010 -0500
@@ -25,6 +25,7 @@
         self.sniff_order = []
         self.upload_file_formats = []
         self.display_applications = odict() #map a display application id to a display application
+        inherit_display_application_by_class = []
         if root_dir and config:
             # Parse datatypes_conf.xml
             tree = galaxy.util.parse_xml( config )
@@ -42,11 +43,11 @@
             for elem in registration.findall( 'datatype' ):
                 try:
                     extension = elem.get( 'extension', None ) 
-                    type = elem.get( 'type', None )
+                    dtype = elem.get( 'type', None )
                     mimetype = elem.get( 'mimetype', None )
                     display_in_upload = elem.get( 'display_in_upload', False )
-                    if extension and type:
-                        fields = type.split( ':' )
+                    if extension and dtype:
+                        fields = dtype.split( ':' )
                         datatype_module = fields[0]
                         datatype_class = fields[1]
                         fields = datatype_module.split( '.' )
@@ -85,27 +86,36 @@
                         for display_app in elem.findall( 'display' ):
                             display_file = display_app.get( 'file', None )
                             assert display_file is not None, "A file must be specified for a datatype display tag."
+                            inherit = galaxy.util.string_as_bool( display_app.get( 'inherit', 'False' ) )
                             display_app = DisplayApplication.from_file( os.path.join( self.display_applications_path, display_file ), self )
                             if display_app:
                                 if display_app.id in self.display_applications:
                                     #if we already loaded this display application, we'll use the first one again
                                     display_app = self.display_applications[ display_app.id ]
-                                self.log.debug( "Loaded display application '%s' for datatype '%s'" % ( display_app.id, extension ) )
+                                self.log.debug( "Loaded display application '%s' for datatype '%s', inherit=%s" % ( display_app.id, extension, inherit ) )
                                 self.display_applications[ display_app.id ] = display_app #Display app by id
                                 self.datatypes_by_extension[ extension ].add_display_application( display_app )
-                            
+                                if inherit and ( self.datatypes_by_extension[extension], display_app ) not in inherit_display_application_by_class:
+                                    #subclass inheritance will need to wait until all datatypes have been loaded
+                                    inherit_display_application_by_class.append( ( self.datatypes_by_extension[extension], display_app ) )
                 except Exception, e:
                     self.log.warning( 'Error loading datatype "%s", problem: %s' % ( extension, str( e ) ) )
+            # Handle display_application subclass inheritance here:
+            for ext, d_type1 in self.datatypes_by_extension.iteritems():
+                for d_type2, display_app in inherit_display_application_by_class:
+                    current_app = d_type1.get_display_application( display_app.id, None )
+                    if current_app is None and isinstance( d_type1, type( d_type2 ) ):
+                        d_type1.add_display_application( display_app )
             # Load datatype sniffers from the config
             sniff_order = []
             sniffers = root.find( 'sniffers' )
             for elem in sniffers.findall( 'sniffer' ):
-                type = elem.get( 'type', None )
-                if type:
-                    sniff_order.append( type )
-            for type in sniff_order:
+                dtype = elem.get( 'type', None )
+                if dtype:
+                    sniff_order.append( dtype )
+            for dtype in sniff_order:
                 try:
-                    fields = type.split( ":" )
+                    fields = dtype.split( ":" )
                     datatype_module = fields[0]
                     datatype_class = fields[1]
                     fields = datatype_module.split( "." )
@@ -120,9 +130,9 @@
                             break
                     if not included:
                         self.sniff_order.append( aclass )
-                        self.log.debug( 'Loaded sniffer for datatype: %s' % type )
+                        self.log.debug( 'Loaded sniffer for datatype: %s' % dtype )
                 except Exception, exc:
-                    self.log.warning( 'Error appending datatype %s to sniff_order, problem: %s' % ( type, str( exc ) ) )
+                    self.log.warning( 'Error appending datatype %s to sniff_order, problem: %s' % ( dtype, str( exc ) ) )
         #default values
         if len(self.datatypes_by_extension) < 1:
             self.datatypes_by_extension = { 
diff -r 262b16c8e277 -r f13d85256124 lib/galaxy/model/__init__.py
--- a/lib/galaxy/model/__init__.py	Thu Mar 11 13:51:53 2010 -0500
+++ b/lib/galaxy/model/__init__.py	Thu Mar 11 14:35:36 2010 -0500
@@ -637,6 +637,9 @@
             return ( None, None )
         return get_source( self )
 
+    def get_display_applications( self, trans ):
+        return self.datatype.get_display_applications_by_dataset( self, trans )
+
 class HistoryDatasetAssociation( DatasetInstance ):
     def __init__( self, 
                   hid = None, 
diff -r 262b16c8e277 -r f13d85256124 templates/root/history_common.mako
--- a/templates/root/history_common.mako	Thu Mar 11 13:51:53 2010 -0500
+++ b/templates/root/history_common.mako	Thu Mar 11 14:35:36 2010 -0500
@@ -102,7 +102,7 @@
                         %endif
                     %endfor
                 %endif
-            %for display_app in data.datatype.display_applications.itervalues():
+            %for display_app in data.get_display_applications( trans ).itervalues():
                 | ${display_app.name} 
                 %for link_app in display_app.links.itervalues():
                 <a target="${link_app.url.get( 'target_frame', '_blank' )}" href="${link_app.get_display_url( data, trans )}">${_(link_app.name)}</a> 
diff -r 262b16c8e277 -r f13d85256124 tool-data/shared/bx/bx_build_sites.txt
--- a/tool-data/shared/bx/bx_build_sites.txt	Thu Mar 11 13:51:53 2010 -0500
+++ b/tool-data/shared/bx/bx_build_sites.txt	Thu Mar 11 14:35:36 2010 -0500
@@ -1,1 +1,1 @@
-main	http://main.genome-browser.bx.psu.edu/cgi-bin/hgTracks?	hg18,hg19,mm8,mm9
+main	http://main.genome-browser.bx.psu.edu/cgi-bin/hgTracks?	hg18,hg19,mm8,mm9	bx-main
                    
                  
                  
                          
                            
                            1
                            
                          
                          
                            
                            0
                            
                          
                          
                            
    
                          
                        
                    
                    
                        details:   http://www.bx.psu.edu/hg/galaxy/rev/262b16c8e277
changeset: 3520:262b16c8e277
user:      fubar: ross Lazarus at gmail period com
date:      Thu Mar 11 13:51:53 2010 -0500
description:
remove bogus string.translate
diffstat:
 lib/galaxy/web/controllers/library_common.py |  2 --
 1 files changed, 0 insertions(+), 2 deletions(-)
diffs (12 lines):
diff -r 39e4b119bdf4 -r 262b16c8e277 lib/galaxy/web/controllers/library_common.py
--- a/lib/galaxy/web/controllers/library_common.py	Thu Mar 11 13:41:40 2010 -0500
+++ b/lib/galaxy/web/controllers/library_common.py	Thu Mar 11 13:51:53 2010 -0500
@@ -1357,8 +1357,6 @@
                             path = os.path.join( parent_folder.name, path )
                             parent_folder = parent_folder.parent
                         path += ldda.name
-                        if path > '':
-			    path = path.translate(trantab)
                         while path in seen:
                             path += '_'
                         seen.append( path )
                    
                  
                  
                          
                            
                            1
                            
                          
                          
                            
                            0
                            
                          
                          
                            
    
                          
                        
                    
                    
                        details:   http://www.bx.psu.edu/hg/galaxy/rev/39e4b119bdf4
changeset: 3519:39e4b119bdf4
user:      fubar: ross Lazarus at gmail period com
date:      Thu Mar 11 13:41:40 2010 -0500
description:
merge with 3518:0c9e154e9176
diffstat:
 lib/galaxy/datatypes/sniff.py |  39 ++++++++++++++++++++++++---------------
 tools/data_source/upload.py   |  24 +++++++++++++++++++-----
 2 files changed, 43 insertions(+), 20 deletions(-)
diffs (140 lines):
diff -r e98117dd6054 -r 39e4b119bdf4 lib/galaxy/datatypes/sniff.py
--- a/lib/galaxy/datatypes/sniff.py	Thu Mar 11 13:37:49 2010 -0500
+++ b/lib/galaxy/datatypes/sniff.py	Thu Mar 11 13:41:40 2010 -0500
@@ -70,7 +70,7 @@
     f.close()
     return False
 
-def convert_newlines( fname ):
+def convert_newlines( fname, in_place=True ):
     """
     Converts in place a file from universal line endings 
     to Posix line endings.
@@ -78,7 +78,7 @@
     >>> fname = get_test_fname('temp.txt')
     >>> file(fname, 'wt').write("1 2\\r3 4")
     >>> convert_newlines(fname)
-    2
+    (2, None)
     >>> file(fname).read()
     '1 2\\n3 4\\n'
     """
@@ -87,18 +87,21 @@
     for i, line in enumerate( file( fname, "U" ) ):
         fp.write( "%s\n" % line.rstrip( "\r\n" ) )
     fp.close()
-    shutil.move( temp_name, fname )
-    # Return number of lines in file.
-    return i + 1
+    if in_place:
+        shutil.move( temp_name, fname )
+        # Return number of lines in file.
+        return ( i + 1, None )
+    else:
+        return ( i + 1, temp_name )
 
-def sep2tabs(fname, patt="\\s+"):
+def sep2tabs( fname, in_place=True, patt="\\s+" ):
     """
     Transforms in place a 'sep' separated file to a tab separated one
 
     >>> fname = get_test_fname('temp.txt')
     >>> file(fname, 'wt').write("1 2\\n3 4\\n")
     >>> sep2tabs(fname)
-    2
+    (2, None)
     >>> file(fname).read()
     '1\\t2\\n3\\t4\\n'
     """
@@ -110,11 +113,14 @@
         elems = regexp.split( line )
         fp.write( "%s\n" % '\t'.join( elems ) )
     fp.close()
-    shutil.move( temp_name, fname )
-    # Return number of lines in file.
-    return i + 1
+    if in_place:
+        shutil.move( temp_name, fname )
+        # Return number of lines in file.
+        return ( i + 1, None )
+    else:
+        return ( i + 1, temp_name )
 
-def convert_newlines_sep2tabs( fname, patt="\\s+" ):
+def convert_newlines_sep2tabs( fname, in_place=True, patt="\\s+" ):
     """
     Combines above methods: convert_newlines() and sep2tabs()
     so that files do not need to be read twice
@@ -122,7 +128,7 @@
     >>> fname = get_test_fname('temp.txt')
     >>> file(fname, 'wt').write("1 2\\r3 4")
     >>> convert_newlines_sep2tabs(fname)
-    2
+    (2, None)
     >>> file(fname).read()
     '1\\t2\\n3\\t4\\n'
     """
@@ -134,9 +140,12 @@
         elems = regexp.split( line )
         fp.write( "%s\n" % '\t'.join( elems ) )
     fp.close()
-    shutil.move( temp_name, fname )
-    # Return number of lines in file.
-    return i + 1
+    if in_place:
+        shutil.move( temp_name, fname )
+        # Return number of lines in file.
+        return ( i + 1, None )
+    else:
+        return ( i + 1, temp_name )
 
 def get_headers( fname, sep, count=60, is_multi_byte=False ):
     """
diff -r e98117dd6054 -r 39e4b119bdf4 tools/data_source/upload.py
--- a/tools/data_source/upload.py	Thu Mar 11 13:37:49 2010 -0500
+++ b/tools/data_source/upload.py	Thu Mar 11 13:41:40 2010 -0500
@@ -138,6 +138,7 @@
 def add_file( dataset, json_file, output_path ):
     data_type = None
     line_count = None
+    converted_path = None
 
     if dataset.type == 'url':
         try:
@@ -239,10 +240,15 @@
                 file_err( 'The uploaded file contains inappropriate content', dataset, json_file )
                 return
         if data_type != 'binary' and data_type != 'zip':
-            if dataset.space_to_tab:
-                line_count = sniff.convert_newlines_sep2tabs( dataset.path )
-            else:
-                line_count = sniff.convert_newlines( dataset.path )
+            # don't convert newlines on data we're only going to symlink
+            if not dataset.get( 'link_data_only', False ):
+                in_place = True
+                if dataset.type in ( 'server_dir', 'path_paste' ):
+                    in_place = False
+                if dataset.space_to_tab:
+                    line_count, converted_path = sniff.convert_newlines_sep2tabs( dataset.path, in_place=in_place )
+                else:
+                    line_count, converted_path = sniff.convert_newlines( dataset.path, in_place=in_place )
             if dataset.file_type == 'auto':
                 ext = sniff.guess_ext( dataset.path )
             else:
@@ -257,7 +263,15 @@
     if dataset.get( 'link_data_only', False ):
         pass # data will remain in place
     elif dataset.type in ( 'server_dir', 'path_paste' ):
-        shutil.copy( dataset.path, output_path )
+        if converted_path is not None:
+            shutil.copy( converted_path, output_path )
+            try:
+                os.remove( converted_path )
+            except:
+                pass
+        else:
+            # this should not happen, but it's here just in case
+            shutil.copy( dataset.path, output_path )
     else:
         shutil.move( dataset.path, output_path )
     # Write the job info
                    
                  
                  
                          
                            
                            1
                            
                          
                          
                            
                            0
                            
                          
                          
                            
    
                          
                        
                    18 Mar '10
                    
                        details:   http://www.bx.psu.edu/hg/galaxy/rev/0c9e154e9176
changeset: 3517:0c9e154e9176
user:      Nate Coraor <nate(a)bx.psu.edu>
date:      Thu Mar 11 11:56:20 2010 -0500
description:
Modify the newline conversion methods in sniff so converting in place is optional.  This is necessary to fix a bug that occurs if using the 'server_dir' or 'path_paste' library upload methods: previously, they would modify the file to be imported in-place if permissions allowed (probably not what the admin wanted) or fail if permissions did not allow.  New functionality is to return the converted tempfile if 'server_dir' or 'path_paste' methods are used.  Also, no newline conversion will be done if the administrator uses the symlink checkbox.
diffstat:
 lib/galaxy/datatypes/sniff.py |  39 ++++++++++++++++++++++++---------------
 tools/data_source/upload.py   |  24 +++++++++++++++++++-----
 2 files changed, 43 insertions(+), 20 deletions(-)
diffs (140 lines):
diff -r 2e97ae04856d -r 0c9e154e9176 lib/galaxy/datatypes/sniff.py
--- a/lib/galaxy/datatypes/sniff.py	Thu Mar 11 11:17:11 2010 -0500
+++ b/lib/galaxy/datatypes/sniff.py	Thu Mar 11 11:56:20 2010 -0500
@@ -70,7 +70,7 @@
     f.close()
     return False
 
-def convert_newlines( fname ):
+def convert_newlines( fname, in_place=True ):
     """
     Converts in place a file from universal line endings 
     to Posix line endings.
@@ -78,7 +78,7 @@
     >>> fname = get_test_fname('temp.txt')
     >>> file(fname, 'wt').write("1 2\\r3 4")
     >>> convert_newlines(fname)
-    2
+    (2, None)
     >>> file(fname).read()
     '1 2\\n3 4\\n'
     """
@@ -87,18 +87,21 @@
     for i, line in enumerate( file( fname, "U" ) ):
         fp.write( "%s\n" % line.rstrip( "\r\n" ) )
     fp.close()
-    shutil.move( temp_name, fname )
-    # Return number of lines in file.
-    return i + 1
+    if in_place:
+        shutil.move( temp_name, fname )
+        # Return number of lines in file.
+        return ( i + 1, None )
+    else:
+        return ( i + 1, temp_name )
 
-def sep2tabs(fname, patt="\\s+"):
+def sep2tabs( fname, in_place=True, patt="\\s+" ):
     """
     Transforms in place a 'sep' separated file to a tab separated one
 
     >>> fname = get_test_fname('temp.txt')
     >>> file(fname, 'wt').write("1 2\\n3 4\\n")
     >>> sep2tabs(fname)
-    2
+    (2, None)
     >>> file(fname).read()
     '1\\t2\\n3\\t4\\n'
     """
@@ -110,11 +113,14 @@
         elems = regexp.split( line )
         fp.write( "%s\n" % '\t'.join( elems ) )
     fp.close()
-    shutil.move( temp_name, fname )
-    # Return number of lines in file.
-    return i + 1
+    if in_place:
+        shutil.move( temp_name, fname )
+        # Return number of lines in file.
+        return ( i + 1, None )
+    else:
+        return ( i + 1, temp_name )
 
-def convert_newlines_sep2tabs( fname, patt="\\s+" ):
+def convert_newlines_sep2tabs( fname, in_place=True, patt="\\s+" ):
     """
     Combines above methods: convert_newlines() and sep2tabs()
     so that files do not need to be read twice
@@ -122,7 +128,7 @@
     >>> fname = get_test_fname('temp.txt')
     >>> file(fname, 'wt').write("1 2\\r3 4")
     >>> convert_newlines_sep2tabs(fname)
-    2
+    (2, None)
     >>> file(fname).read()
     '1\\t2\\n3\\t4\\n'
     """
@@ -134,9 +140,12 @@
         elems = regexp.split( line )
         fp.write( "%s\n" % '\t'.join( elems ) )
     fp.close()
-    shutil.move( temp_name, fname )
-    # Return number of lines in file.
-    return i + 1
+    if in_place:
+        shutil.move( temp_name, fname )
+        # Return number of lines in file.
+        return ( i + 1, None )
+    else:
+        return ( i + 1, temp_name )
 
 def get_headers( fname, sep, count=60, is_multi_byte=False ):
     """
diff -r 2e97ae04856d -r 0c9e154e9176 tools/data_source/upload.py
--- a/tools/data_source/upload.py	Thu Mar 11 11:17:11 2010 -0500
+++ b/tools/data_source/upload.py	Thu Mar 11 11:56:20 2010 -0500
@@ -138,6 +138,7 @@
 def add_file( dataset, json_file, output_path ):
     data_type = None
     line_count = None
+    converted_path = None
 
     if dataset.type == 'url':
         try:
@@ -239,10 +240,15 @@
                 file_err( 'The uploaded file contains inappropriate content', dataset, json_file )
                 return
         if data_type != 'binary' and data_type != 'zip':
-            if dataset.space_to_tab:
-                line_count = sniff.convert_newlines_sep2tabs( dataset.path )
-            else:
-                line_count = sniff.convert_newlines( dataset.path )
+            # don't convert newlines on data we're only going to symlink
+            if not dataset.get( 'link_data_only', False ):
+                in_place = True
+                if dataset.type in ( 'server_dir', 'path_paste' ):
+                    in_place = False
+                if dataset.space_to_tab:
+                    line_count, converted_path = sniff.convert_newlines_sep2tabs( dataset.path, in_place=in_place )
+                else:
+                    line_count, converted_path = sniff.convert_newlines( dataset.path, in_place=in_place )
             if dataset.file_type == 'auto':
                 ext = sniff.guess_ext( dataset.path )
             else:
@@ -257,7 +263,15 @@
     if dataset.get( 'link_data_only', False ):
         pass # data will remain in place
     elif dataset.type in ( 'server_dir', 'path_paste' ):
-        shutil.copy( dataset.path, output_path )
+        if converted_path is not None:
+            shutil.copy( converted_path, output_path )
+            try:
+                os.remove( converted_path )
+            except:
+                pass
+        else:
+            # this should not happen, but it's here just in case
+            shutil.copy( dataset.path, output_path )
     else:
         shutil.move( dataset.path, output_path )
     # Write the job info
                    
                  
                  
                          
                            
                            1
                            
                          
                          
                            
                            0
                            
                          
                          
                            
    
                          
                        
                    18 Mar '10
                    
                        details:   http://www.bx.psu.edu/hg/galaxy/rev/e98117dd6054
changeset: 3518:e98117dd6054
user:      fubar: ross Lazarus at gmail period com
date:      Thu Mar 11 13:37:49 2010 -0500
description:
fix for string.translate on empty path in creating an archive of library composite datatypes
diffstat:
 lib/galaxy/web/controllers/library_common.py |  6 ++++--
 1 files changed, 4 insertions(+), 2 deletions(-)
diffs (23 lines):
diff -r 2e97ae04856d -r e98117dd6054 lib/galaxy/web/controllers/library_common.py
--- a/lib/galaxy/web/controllers/library_common.py	Thu Mar 11 11:17:11 2010 -0500
+++ b/lib/galaxy/web/controllers/library_common.py	Thu Mar 11 13:37:49 2010 -0500
@@ -1357,7 +1357,8 @@
                             path = os.path.join( parent_folder.name, path )
                             parent_folder = parent_folder.parent
                         path += ldda.name
-    			path = path.translate(trantab)
+                        if path > '':
+			    path = path.translate(trantab)
                         while path in seen:
                             path += '_'
                         seen.append( path )
@@ -1377,7 +1378,8 @@
                             flist = glob.glob(os.path.join(ldda.dataset.extra_files_path,'*.*')) # glob returns full paths
                             for fpath in flist:
                                 efp,fname = os.path.split(fpath)
-               			fname = fname.translate(trantab)
+               			if fname > '':
+				    fname = fname.translate(trantab)
                                 try:
                                     archive.add( fpath,fname )
                                 except IOError:
                    
                  
                  
                          
                            
                            1
                            
                          
                          
                            
                            0
                            
                          
                          
                            
    
                          
                        
                    
                    
                        details:   http://www.bx.psu.edu/hg/galaxy/rev/2e97ae04856d
changeset: 3516:2e97ae04856d
user:      Dan Blankenberg <dan(a)bx.psu.edu>
date:      Thu Mar 11 11:17:11 2010 -0500
description:
Fix for implicit datatype conversion.
diffstat:
 lib/galaxy/datatypes/data.py |  2 +-
 1 files changed, 1 insertions(+), 1 deletions(-)
diffs (12 lines):
diff -r 6fa986527398 -r 2e97ae04856d lib/galaxy/datatypes/data.py
--- a/lib/galaxy/datatypes/data.py	Wed Mar 10 23:59:25 2010 -0500
+++ b/lib/galaxy/datatypes/data.py	Thu Mar 11 11:17:11 2010 -0500
@@ -257,7 +257,7 @@
                 break
         params[input_name] = original_dataset
         #Run converter, job is dispatched through Queue
-        converted_dataset = converter.execute( trans, incoming = params, set_output_hid = visible )
+        converted_dataset = converter.execute( trans, incoming = params, set_output_hid = visible )[1]
         if len(params) > 0:
             trans.log_event( "Converter params: %s" % (str(params)), tool_id=converter.id )
         if not visible:
                    
                  
                  
                          
                            
                            1
                            
                          
                          
                            
                            0
                            
                          
                          
                            
    
                          
                        
                    
                    
                        details:   http://www.bx.psu.edu/hg/galaxy/rev/6fa986527398
changeset: 3515:6fa986527398
user:      fubar: ross Lazarus at gmail period com
date:      Wed Mar 10 23:59:25 2010 -0500
description:
merge
                    
                  
                  
                          
                            
                            1
                            
                          
                          
                            
                            0
                            
                          
                          
                            
    
                          
                        
                    18 Mar '10
                    
                        details:   http://www.bx.psu.edu/hg/galaxy/rev/87da8cd1f091
changeset: 3513:87da8cd1f091
user:      fubar: ross Lazarus at gmail period com
date:      Wed Mar 10 20:59:11 2010 -0500
description:
Remove old Lmap from datatypes_conf.xml.sample - now deprecated from genetics.py
Causing buildbot to barf.
diffstat:
 datatypes_conf.xml.sample                    |  1 -
 lib/galaxy/web/controllers/library_common.py |  3 ++-
 run_functional_tests.sh                      |  2 +-
 3 files changed, 3 insertions(+), 3 deletions(-)
diffs (43 lines):
diff -r 53ddb4b728f7 -r 87da8cd1f091 datatypes_conf.xml.sample
--- a/datatypes_conf.xml.sample	Wed Mar 10 19:48:28 2010 -0500
+++ b/datatypes_conf.xml.sample	Wed Mar 10 20:59:11 2010 -0500
@@ -194,7 +194,6 @@
         <!-- genome graphs ucsc file - first col is always marker then numeric values to plot -->
         <datatype extension="gg" type="galaxy.datatypes.genetics:GenomeGraphs"/>
         <!-- part of linkage format pedigree -->
-        <datatype extension="lmap" type="galaxy.datatypes.genetics:Lmap" display_in_upload="true"/>
         <datatype extension="malist" type="galaxy.datatypes.genetics:MAlist" display_in_upload="true"/>
         <!-- linkage format pedigree (separate .map file) -->
         <datatype extension="lped" type="galaxy.datatypes.genetics:Lped" display_in_upload="true">
diff -r 53ddb4b728f7 -r 87da8cd1f091 lib/galaxy/web/controllers/library_common.py
--- a/lib/galaxy/web/controllers/library_common.py	Wed Mar 10 19:48:28 2010 -0500
+++ b/lib/galaxy/web/controllers/library_common.py	Wed Mar 10 20:59:11 2010 -0500
@@ -1111,7 +1111,7 @@
                 # is composite - must return a zip of contents and the html file itself - ugh - should be reversible at upload!
                 # use act_on_multiple_datasets( self, trans, cntrller, library_id, ldda_ids='', **kwd ) since it does what we need
                 kwd['do_action'] = 'zip'
-                return self.act_on_multiple_datasets( trans, cntrller, library_id, ldda_ids=id, **kwd )
+                return self.act_on_multiple_datasets( trans, cntrller, library_id, ldda_ids=[id,], **kwd )
             else:
                 mime = trans.app.datatypes_registry.get_mimetype_by_extension( ldda.extension.lower() )
                 trans.response.set_content_type( mime )
@@ -1258,6 +1258,7 @@
             messagetype = 'error'
         else:
             ldda_ids = util.listify( ldda_ids )
+	    log.debug('## act on multiple got %s' % ldda_ids)
             if action == 'import_to_history':
                 history = trans.get_history()
                 if history is None:
diff -r 53ddb4b728f7 -r 87da8cd1f091 run_functional_tests.sh
--- a/run_functional_tests.sh	Wed Mar 10 19:48:28 2010 -0500
+++ b/run_functional_tests.sh	Wed Mar 10 20:59:11 2010 -0500
@@ -1,7 +1,7 @@
 #!/bin/sh
 
 # A good place to look for nose info: http://somethingaboutorange.com/mrl/projects/nose/
-
+export PATH=/usr/local/bin:$PATH
 rm -f run_functional_tests.log 
 
 if [ ! $1 ]; then
                    
                  
                  
                          
                            
                            1
                            
                          
                          
                            
                            0
                            
                          
                          
                            
    
                          
                        
                    18 Mar '10
                    
                        details:   http://www.bx.psu.edu/hg/galaxy/rev/06dcf56688ea
changeset: 3514:06dcf56688ea
user:      fubar: ross Lazarus at gmail period com
date:      Wed Mar 10 23:58:35 2010 -0500
description:
Fixes for downloading library archives - correct extensions help a lot and removing spaces from ldda.name helps too.
diffstat:
 lib/galaxy/web/controllers/library_common.py |  15 +++++++++------
 1 files changed, 9 insertions(+), 6 deletions(-)
diffs (63 lines):
diff -r 87da8cd1f091 -r 06dcf56688ea lib/galaxy/web/controllers/library_common.py
--- a/lib/galaxy/web/controllers/library_common.py	Wed Mar 10 20:59:11 2010 -0500
+++ b/lib/galaxy/web/controllers/library_common.py	Wed Mar 10 23:58:35 2010 -0500
@@ -1,4 +1,4 @@
-import os, os.path, shutil, urllib, StringIO, re, gzip, tempfile, shutil, zipfile, copy, glob
+import os, os.path, shutil, urllib, StringIO, re, gzip, tempfile, shutil, zipfile, copy, glob, string
 from galaxy.web.base.controller import *
 from galaxy import util, jobs
 from galaxy.datatypes import sniff
@@ -1258,7 +1258,6 @@
             messagetype = 'error'
         else:
             ldda_ids = util.listify( ldda_ids )
-	    log.debug('## act on multiple got %s' % ldda_ids)
             if action == 'import_to_history':
                 history = trans.get_history()
                 if history is None:
@@ -1312,6 +1311,8 @@
                 msg = "The selected datasets have been removed from this data library"
             else:
                 error = False
+                killme = string.punctuation + string.whitespace
+    		trantab = string.maketrans(killme,'_'*len(killme))
                 try:
                     outext = 'zip'
                     if action == 'zip':
@@ -1325,10 +1326,10 @@
                         archive.add = lambda x, y: archive.write( x, y.encode('CP437') )
                     elif action == 'tgz':
                         archive = util.streamball.StreamBall( 'w|gz' )
-                        outext = 'gz'
+                        outext = 'tgz'
                     elif action == 'tbz':
                         archive = util.streamball.StreamBall( 'w|bz2' )
-                        outext = 'bz2'
+                        outext = 'tbz2'
                 except (OSError, zipfile.BadZipFile):
                     error = True
                     log.exception( "Unable to create archive for download" )
@@ -1356,12 +1357,13 @@
                             path = os.path.join( parent_folder.name, path )
                             parent_folder = parent_folder.parent
                         path += ldda.name
+    			path = path.translate(trantab)
                         while path in seen:
                             path += '_'
                         seen.append( path )
+                        zpath = os.path.split(path)[-1] # comes as base_name/fname
+                        outfname,zpathext = os.path.splitext(zpath)
                         if is_composite: # need to add all the components from the extra_files_path to the zip
-                            zpath = os.path.split(path)[-1] # comes as base_name/fname
-                            outfname,zpathext = os.path.splitext(zpath)
                             if zpathext == '':
                                 zpath = '%s.html' % zpath # fake the real nature of the html file 
                             try:
@@ -1375,6 +1377,7 @@
                             flist = glob.glob(os.path.join(ldda.dataset.extra_files_path,'*.*')) # glob returns full paths
                             for fpath in flist:
                                 efp,fname = os.path.split(fpath)
+               			fname = fname.translate(trantab)
                                 try:
                                     archive.add( fpath,fname )
                                 except IOError:
                    
                  
                  
                          
                            
                            1
                            
                          
                          
                            
                            0
                            
                          
                          
                            
    
                          
                        
                    18 Mar '10
                    
                        details:   http://www.bx.psu.edu/hg/galaxy/rev/53ddb4b728f7
changeset: 3512:53ddb4b728f7
user:      fubar: ross Lazarus at gmail period com
date:      Wed Mar 10 19:48:28 2010 -0500
description:
Reverted missing python version check for zipfile setup
Changed download archive names for both libraries and for history items
so they're related to the content - uses data.name rather than one fixed string for all
diffstat:
 lib/galaxy/web/controllers/dataset.py        |  25 ++++++++++++++++---------
 lib/galaxy/web/controllers/library_common.py |  13 ++++++++-----
 2 files changed, 24 insertions(+), 14 deletions(-)
diffs (153 lines):
diff -r 2af472aa0844 -r 53ddb4b728f7 lib/galaxy/web/controllers/dataset.py
--- a/lib/galaxy/web/controllers/dataset.py	Wed Mar 10 17:10:43 2010 -0500
+++ b/lib/galaxy/web/controllers/dataset.py	Wed Mar 10 19:48:28 2010 -0500
@@ -1,4 +1,4 @@
-import logging, os, string, shutil, re, socket, mimetypes, smtplib, urllib, tempfile, zipfile, glob
+import logging, os, string, shutil, re, socket, mimetypes, smtplib, urllib, tempfile, zipfile, glob, sys
 
 from galaxy.web.base.controller import *
 from galaxy.web.framework.helpers import time_ago, iff, grids
@@ -11,6 +11,11 @@
 pkg_resources.require( "Paste" )
 import paste.httpexceptions
 
+if sys.version_info[:2] < ( 2, 6 ):
+    zipfile.BadZipFile = zipfile.error
+if sys.version_info[:2] < ( 2, 5 ):
+    zipfile.LargeZipFile = zipfile.error
+
 tmpd = tempfile.mkdtemp()
 comptypes=[]
 ziptype = '32'
@@ -204,6 +209,9 @@
     def archive_composite_dataset( self, trans, data=None, **kwd ):
         # save a composite object into a compressed archive for downloading
         params = util.Params( kwd )
+        valid_chars = '.,^_-()[]0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ'
+        outfname = data.name[0:150]
+        outfname = ''.join(c in valid_chars and c or '_' for c in outfname)
         if (params.do_action == None):
      	    params.do_action = 'zip' # default
         msg = util.restore_text( params.get( 'msg', ''  ) )
@@ -230,7 +238,7 @@
             except (OSError, zipfile.BadZipFile):
                 error = True
                 log.exception( "Unable to create archive for download" )
-                msg = "Unable to create archive for %s for download, please report this error" % data.name
+                msg = "Unable to create archive for %s for download, please report this error" % outfname
                 messagetype = 'error'
             if not error:
                 current_user_roles = trans.get_current_user_roles()
@@ -239,7 +247,7 @@
                 fname = os.path.split(path)[-1]
                 basename = data.metadata.base_name
                 efp = data.extra_files_path
-                htmlname = os.path.splitext(data.name)[0]
+                htmlname = os.path.splitext(outfname)[0]
                 if not htmlname.endswith(ext):
                     htmlname = '%s_%s' % (htmlname,ext)
                 archname = '%s.html' % htmlname # fake the real nature of the html file
@@ -276,14 +284,14 @@
                             messagetype = 'error'
                         if not error:
                             trans.response.set_content_type( "application/x-zip-compressed" )
-                            trans.response.headers[ "Content-Disposition" ] = "attachment; filename=GalaxyCompositeObject.zip" 
+                            trans.response.headers[ "Content-Disposition" ] = "attachment; filename=%s.zip" % outfname 
                             return tmpfh
                     else:
                         trans.response.set_content_type( "application/x-tar" )
                         outext = 'tgz'
                         if params.do_action == 'tbz':
                             outext = 'tbz'
-                        trans.response.headers[ "Content-Disposition" ] = "attachment; filename=GalaxyLibraryFiles.%s" % outext 
+                        trans.response.headers[ "Content-Disposition" ] = "attachment; filename=%s.%s" % (outfname,outext) 
                         archive.wsgi_status = trans.response.wsgi_status()
                         archive.wsgi_headeritems = trans.response.wsgi_headeritems()
                         return archive.stream
@@ -294,7 +302,8 @@
     @web.expose
     def display(self, trans, dataset_id=None, preview=False, filename=None, to_ext=None, **kwd):
         """Catches the dataset id and displays file contents as directed"""
-        
+        composite_extensions = trans.app.datatypes_registry.get_composite_extensions( )
+        composite_extensions.append('html') # for archiving composite datatypes
         # DEPRECATION: We still support unencoded ids for backward compatibility
         try:
             dataset_id = int( dataset_id )
@@ -329,8 +338,6 @@
             trans.log_event( "Display dataset id: %s" % str( dataset_id ) )
             
             if to_ext: # Saving the file
-                composite_extensions = trans.app.datatypes_registry.get_composite_extensions( )
-                composite_extensions.append('html')
                 if data.ext in composite_extensions:
                     return self.archive_composite_dataset( trans, data, **kwd )
                 else:                    
@@ -340,7 +347,7 @@
                     valid_chars = '.,^_-()[]0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ'
                     fname = data.name
                     fname = ''.join(c in valid_chars and c or '_' for c in fname)[0:150]
-                    trans.response.headers["Content-Disposition"] = "attachment; filename=GalaxyHistoryItem-%s-[%s]%s" % (data.hid, fname, to_ext)
+                    trans.response.headers["Content-Disposition"] = "attachment; filename=Galaxy%s-[%s]%s" % (data.hid, fname, to_ext)
                     return open( data.file_name )
             if os.path.exists( data.file_name ):
                 max_peek_size = 1000000 # 1 MB
diff -r 2af472aa0844 -r 53ddb4b728f7 lib/galaxy/web/controllers/library_common.py
--- a/lib/galaxy/web/controllers/library_common.py	Wed Mar 10 17:10:43 2010 -0500
+++ b/lib/galaxy/web/controllers/library_common.py	Wed Mar 10 19:48:28 2010 -0500
@@ -1312,6 +1312,7 @@
             else:
                 error = False
                 try:
+                    outext = 'zip'
                     if action == 'zip':
                         # Can't use mkstemp - the file must not exist first
                         tmpd = tempfile.mkdtemp()
@@ -1323,8 +1324,10 @@
                         archive.add = lambda x, y: archive.write( x, y.encode('CP437') )
                     elif action == 'tgz':
                         archive = util.streamball.StreamBall( 'w|gz' )
+                        outext = 'gz'
                     elif action == 'tbz':
                         archive = util.streamball.StreamBall( 'w|bz2' )
+                        outext = 'bz2'
                 except (OSError, zipfile.BadZipFile):
                     error = True
                     log.exception( "Unable to create archive for download" )
@@ -1357,11 +1360,11 @@
                         seen.append( path )
                         if is_composite: # need to add all the components from the extra_files_path to the zip
                             zpath = os.path.split(path)[-1] # comes as base_name/fname
-                            zpathext = os.path.splitext(zpath)[-1]
+                            outfname,zpathext = os.path.splitext(zpath)
                             if zpathext == '':
                                 zpath = '%s.html' % zpath # fake the real nature of the html file 
                             try:
-                                archive.add(ldda.dataset.file_name,zpath)
+                                archive.add(ldda.dataset.file_name,zpath) # add the primary of a composite set
                             except IOError:
                                 error = True
                                 log.exception( "Unable to add composite parent %s to temporary library download archive" % ldda.dataset.file_name)
@@ -1375,7 +1378,7 @@
                                     archive.add( fpath,fname )
                                 except IOError:
                                     error = True
-                                    log.exception( "Unable to add %s to temporary library download archive" % fname)
+                                    log.exception( "Unable to add %s to temporary library download archive %s" % (fname,outfname))
                                     msg = "Unable to create archive for download, please report this error"
                                     messagetype = 'error'
                                     continue
@@ -1402,11 +1405,11 @@
                                 messagetype = 'error'
                             if not error:
                                 trans.response.set_content_type( "application/x-zip-compressed" )
-                                trans.response.headers[ "Content-Disposition" ] = "attachment; filename=GalaxyLibraryFiles.%s" % action
+                                trans.response.headers[ "Content-Disposition" ] = "attachment; filename=%s.%s" % (outfname,outext)
                                 return tmpfh
                         else:
                             trans.response.set_content_type( "application/x-tar" )
-                            trans.response.headers[ "Content-Disposition" ] = "attachment; filename=GalaxyLibraryFiles.%s" % action
+                            trans.response.headers[ "Content-Disposition" ] = "attachment; filename=%s.%s" % (outfname,outext)
                             archive.wsgi_status = trans.response.wsgi_status()
                             archive.wsgi_headeritems = trans.response.wsgi_headeritems()
                             return archive.stream
                    
                  
                  
                          
                            
                            1
                            
                          
                          
                            
                            0
                            
                          
                          
                            
    
                          
                        
                    
                    
                        Hi,
I was wondering if there's an easy way to disable the pop-up balloon tooltips (on the save, rerun, view, delete icons) ?
The new beautiful icons (or the old text-label) are informative enough - and the constantly appearing/disappearing black tooltip balloons (when you move the mouse over datasets) is a bit annoying.
Alternatively, is it possible to add a short delay before showing the tooltip (like in all regular GUI client-side applications) ?
Thanks,
 -gordon
                    
                  
                  
                          
                            
                            2
                            
                          
                          
                            
                            1
                            
                          
                          
                            
    
                          
                        
                    17 Mar '10
                    
                        Hi Galaxy team,
When working with histories with many items I sometimes get a little lost: which is the history item whose content is displayed in the middle section of my Galaxy window? (The output from different tools usually is different enough to prevent confusion, but when you process multiple samples in the same history it can get confusing.) Would it be possible to highlight the history item whose content is displayed in the center of a Galaxy window. Some other visual clue in middle section of a Galaxy window like a small heading or title bar with the name/tag of the displayed history item would probably also do the trick....
Cheers,
Pi
---------------------------------------------------------------
Biomolecular Mass Spectrometry & Proteomics group
Utrecht University
Visiting address:
H.R. Kruyt building room O607
Padualaan 8
3584 CH Utrecht
The Netherlands
Mail address:
P.O. box 80.082
3508 TB Utrecht
The Netherlands
phone: +31 6 143 66 783
email: pieter.neerincx(a)gmail.com
skype: pieter.online
---------------------------------------------------------------
                    
                  
                  
                          
                            
                            2
                            
                          
                          
                            
                            1
                            
                          
                          
                            
    
                          
                        
                    
                    
                        Hi Galaxy team,
As my local instance grows it becomes more difficult each time to upgrade. I now use diff on the various config files to see what changes I should lift over to the new install. (I don't update, but leave my current setup untouched as a backup, create a new install and if that works switch to the new setup using a symlink.) Would it be possible to use a special directory with config files for site specific customizations? I'm thinking of something like Apache's "conf.d" directory or good ol' SRS's "sites" directory. This would allow us to leave the original Galaxy tools + datatypes config files untouched and list our local additions/mods in separate config files making upgrades a bit easier...
Cheers,
Pi
---------------------------------------------------------------
Biomolecular Mass Spectrometry & Proteomics group
Utrecht University
Visiting address:
H.R. Kruyt building room O607
Padualaan 8
3584 CH Utrecht
The Netherlands
Mail address:
P.O. box 80.082
3508 TB Utrecht
The Netherlands
phone: +31 6 143 66 783
email: pieter.neerincx(a)gmail.com
skype: pieter.online
---------------------------------------------------------------
                    
                  
                  
                          
                            
                            4
                            
                          
                          
                            
                            5
                            
                          
                          
                            
    
                          
                        
                    
                    
                        please help!  our install of galaxy is completely unable to load any data into libraries now!
URL: http://galaxy.jgi-psf.org/library_common/act_on_multiple_datasets?library_i…
File '/house/groupdirs/mep/galaxy/galaxy-dist/eggs/Paste-1.6-py2.6.egg/paste/exceptions/errormiddleware.py', line 143 in __call__
  app_iter = self.application(environ, start_response)
File '/house/groupdirs/mep/galaxy/galaxy-dist/eggs/Paste-1.6-py2.6.egg/paste/debug/prints.py', line 98 in __call__
  environ, self.app)
File '/house/groupdirs/mep/galaxy/galaxy-dist/eggs/Paste-1.6-py2.6.egg/paste/wsgilib.py', line 539 in intercept_output
  app_iter = application(environ, replacement_start_response)
File '/house/groupdirs/mep/galaxy/galaxy-dist/eggs/Paste-1.6-py2.6.egg/paste/recursive.py', line 80 in __call__
  return self.application(environ, start_response)
File '/house/groupdirs/mep/galaxy/galaxy-dist/eggs/Paste-1.6-py2.6.egg/paste/httpexceptions.py', line 632 in __call__
  return self.application(environ, start_response)
File '/house/groupdirs/mep/galaxy/galaxy-dist/lib/galaxy/web/framework/base.py', line 125 in __call__
  body = method( trans, **kwargs )
File '/house/groupdirs/mep/galaxy/galaxy-dist/lib/galaxy/web/controllers/library_common.py', line 1384 in act_on_multiple_datasets
  archive.add( ldda.dataset.file_name, path )
UnboundLocalError: local variable 'archive' referenced before assignment
i tried adding a new library to see if i could see it but i get an error symlinking or copying from filesystem paths:
Miscellaneous information:
Traceback (most recent call last): File "/house/groupdirs/mep/galaxy/galaxy-dist/tools/data_source/upload.py", line 326, in __main__() File "/house/groupdirs/mep/galaxy/galaxy-dist/tools/data_source/upload.py", line 318, in __main__ a
error
I could upload, but I still get the first error
Thanks in advance for any assistance!
Ed Kirton
                    
                  
                  
                          
                            
                            4
                            
                          
                          
                            
                            4
                            
                          
                          
                            
    
                          
                        
                    
                    
                        I have a perl script that splits a text file with genomic information
(forward strand and reverse strand) in two separate files.
One file for the forward strand and one file for the reverse strand.
I have written a wrapper so that Galaxy can execute this script.
When my script outputs one file this files is listed as a history item.
When I let my script output the two files only one history item gets
created.
What I need to now is what extra variables do I need to output from
the .xml file so that galaxy finds the two output files and puts both
files in as a history item 
I have been looking at the "interval2maf .py and .xml" I just can't
figure out how this works, it seems that the history_id is important and
the species is a list array and for every species a file gets created in
the databas/tmp directory and when the file is created the tmp dir is
deleted.
Adding and extra file from the .py into this tmp dir does not end up in
the history items, renaming the files seems to end up with no history
items at all.
Cheers,
//Michel
                    
                  
                  
                          
                            
                            11
                            
                          
                          
                            
                            23
                            
                          
                          
                            
    
                          
                        
                    
                    
                        Hi,
I've tried to install galaxy on my Ubuntu Linux amd64 2.6.27-11-generic, Python 2.5.2 (r252:60911), >>> sqlalchemy.__version__
'0.5.8', I ran $ hg clone http://bitbucket.org/galaxy/galaxy-central/ 5 days ago
And I got this error after calling % sh run.sh
Traceback (most recent call last):
  File "/home/tan/galaxy_dist/lib/galaxy/web/buildapp.py", line 61, in app_factory
    app = UniverseApplication( global_conf = global_conf, **kwargs )
  File "/home/tan/galaxy_dist/lib/galaxy/app.py", line 28, in __init__
    create_or_verify_database( db_url, self.config.database_engine_options )
  File "/home/tan/galaxy_dist/lib/galaxy/model/migrate/check.py", line 66, in create_or_verify_database
    db_schema = schema.ControlledSchema( engine, migrate_repository )
  File "/home/tan/galaxy_dist/eggs/sqlalchemy_migrate-0.5.4-py2.5.egg/migrate/versioning/schema.py", line 24, in __init__
    self._load()
  File "/home/tan/galaxy_dist/eggs/sqlalchemy_migrate-0.5.4-py2.5.egg/migrate/versioning/schema.py", line 36, in _load
    self.table = Table(tname, self.meta, autoload=True)
  File "/var/lib/python-support/python2.5/sqlalchemy/schema.py", line 110, in __call__
    return type.__call__(self, name, metadata, *args, **kwargs)
  File "/var/lib/python-support/python2.5/sqlalchemy/schema.py", line 226, in __init__
    _bind_or_error(metadata).reflecttable(self, include_columns=include_columns)
  File "/var/lib/python-support/python2.5/sqlalchemy/engine/base.py", line 1275, in reflecttable
    self.dialect.reflecttable(conn, table, include_columns)
  File "/var/lib/python-support/python2.5/sqlalchemy/databases/sqlite.py", line 343, in reflecttable
    raise exceptions.NoSuchTableError(table.name)
NoSuchTableError: migrate_version
/var/lib/python-support/python2.5/sqlalchemy/__init__ says ver 0.4.6
Thanks,
Patrick
      __________________________________________________________________
Connect with friends from any web browser - no download required. Try the new Yahoo! Canada Messenger for the Web BETA at http://ca.messenger.yahoo.com/webmessengerpromo.php
                    
                  
                  
                          
                            
                            3
                            
                          
                          
                            
                            4
                            
                          
                          
                            
    
                          
                        
                    
                    
                        Hi,
Is it possible to have textFields pre-set within a form or somewhere  
so that when someone goes and creates the sequence request certain  
fields are already set?
Thanks,
Natalie
                    
                  
                  
                          
                            
                            2
                            
                          
                          
                            
                            1
                            
                          
                          
                            
    
                          
                        
                    
                    
                        Hi Freerk,
Can you be more specific about the problems that you're seeing? Also, please continue to cc galaxy-dev as you're more likely to get help with your issues since it's going to a larger audience.
Thanks,
J.
On Mar 11, 2010, at 10:42 AM, Dijk, F van wrote:
> Hi Jeremy,
>  
> Thanks for the fast replay. The first issue is solved, a new one occured during the second point. When I migrate the sqlite database to SQL and try to import it into the MySQL database some errors occur.
> MySQL is complaining about the syntax used in the SQLdump.
> We are using the following MySQL version:
> mysql  Ver 14.12 Distrib 5.0.75, for debian-linux-gnu (x86_64) using readline 5.2
> I also read the info on the link you provided in your answer, but this won't solve this problem I think. So, do you have a solution?
>  
> Sincerely,
>  
>  
> Freerk van Dijk
>  
> 
> Van: Jeremy Goecks [mailto:jeremy.goecks@emory.edu] 
> Verzonden: donderdag 11 maart 2010 15:25
> Aan: Dijk, F van
> CC: galaxy-dev(a)bx.psu.edu
> Onderwerp: Re: [galaxy-dev] Database migration problem
> 
> Hi Freerk,
> 
>> When we change the databaseconnection to "mysql:///galaxy?unix_socket=/var/run/mysqld/mysqld.sock" in the universe_wsgi.ini file and run Galaxy we receive an error like:
>> 
>> "Access denied for user 'root'@'localhost' (using password: NO)") None None
>> 
>> So is there an option somewhere which we also have to change to make the connection work?
>> 
> You need to specify a username and password to access your database. Try this URL:
> 
> mysql://user:password@localhost/galaxy?unix_socket=/var/lib/mysql/mysql.sock
> (exchanging 'user' and 'password' according to your settings)
> 
>> And question two: 
>> Do we need to migrate the content from the sqlite database to the MySQL database by hand or does this happen automatically?
>> 
> 
> You need to do the migration manually. Kanwei provided a nice solution to this problem a while back:
> 
> http://lists.bx.psu.edu/pipermail/galaxy-dev/2010-February/001996.html
> 
> --
> What I would suggest is to dump the sqlite database to standard sql,
> and then creating a new instance of galaxy that has mysql as the
> configured database. When you run the setup and run script, you'll
> have a freshly instantiated galaxy with empty mysql tables. Then you
> can just import the standard sql file into the mysql database.
> --
> 
> Good luck and let us know if you have any more problems.
> 
> J.
> 
> De inhoud van dit bericht is vertrouwelijk en alleen bestemd voor de geadresseerde(n). Anderen dan de geadresseerde(n) mogen geen gebruik maken van dit bericht, het niet openbaar maken of op enige wijze verspreiden of vermenigvuldigen. Het UMCG kan niet aansprakelijk gesteld worden voor een incomplete aankomst of vertraging van dit verzonden bericht.
> 
> The contents of this message are confidential and only intended for the eyes of the addressee(s). Others than the addressee(s) are not allowed to use this message, to make it public or to distribute or multiply this message in any way. The UMCG cannot be held responsible for incomplete reception or delay of this transferred message.
> 
                    
                  
                  
                          
                            
                            3
                            
                          
                          
                            
                            4
                            
                          
                          
                            
    
                          
                        
                    
                    
                        Hi,
I am trying to install galaxy in my macbook with python version 2.5.2.  
But I got error messages when I sh run.sh. Could you please help me  
with this?
Best regards,
Lin
Traceback (most recent call last):
  File "./scripts/paster.py", line 34, in <module>
    command.run()
  File "/Users/linjiang/galaxy_dist/eggs/PasteScript-1.7.3-py2.5.egg/ 
paste/script/command.py", line 84, in run
    invoke(command, command_name, options, args[1:])
  File "/Users/linjiang/galaxy_dist/eggs/PasteScript-1.7.3-py2.5.egg/ 
paste/script/command.py", line 123, in invoke
    exit_code = runner.run(args)
  File "/Users/linjiang/galaxy_dist/eggs/PasteScript-1.7.3-py2.5.egg/ 
paste/script/command.py", line 218, in run
    result = self.command()
  File "/Users/linjiang/galaxy_dist/eggs/PasteScript-1.7.3-py2.5.egg/ 
paste/script/serve.py", line 276, in command
    relative_to=base, global_conf=vars)
  File "/Users/linjiang/galaxy_dist/eggs/PasteScript-1.7.3-py2.5.egg/ 
paste/script/serve.py", line 313, in loadapp
    **kw)
  File "/Users/linjiang/galaxy_dist/eggs/PasteDeploy-1.3.3-py2.5.egg/ 
paste/deploy/loadwsgi.py", line 204, in loadapp
    return loadobj(APP, uri, name=name, **kw)
  File "/Users/linjiang/galaxy_dist/eggs/PasteDeploy-1.3.3-py2.5.egg/ 
paste/deploy/loadwsgi.py", line 224, in loadobj
    global_conf=global_conf)
  File "/Users/linjiang/galaxy_dist/eggs/PasteDeploy-1.3.3-py2.5.egg/ 
paste/deploy/loadwsgi.py", line 248, in loadcontext
    global_conf=global_conf)
  File "/Users/linjiang/galaxy_dist/eggs/PasteDeploy-1.3.3-py2.5.egg/ 
paste/deploy/loadwsgi.py", line 278, in _loadconfig
    return loader.get_context(object_type, name, global_conf)
  File "/Users/linjiang/galaxy_dist/eggs/PasteDeploy-1.3.3-py2.5.egg/ 
paste/deploy/loadwsgi.py", line 413, in get_context
    section)
  File "/Users/linjiang/galaxy_dist/eggs/PasteDeploy-1.3.3-py2.5.egg/ 
paste/deploy/loadwsgi.py", line 458, in _context_from_explicit
    value = import_string(found_expr)
  File "/Users/linjiang/galaxy_dist/eggs/PasteDeploy-1.3.3-py2.5.egg/ 
paste/deploy/loadwsgi.py", line 18, in import_string
    return pkg_resources.EntryPoint.parse("x="+s).load(False)
  File "/Users/linjiang/galaxy_dist/lib/pkg_resources.py", line 1912,  
in load
    entry = __import__(self.module_name, globals(),globals(),  
['__name__'])
  File "/Users/linjiang/galaxy_dist/lib/galaxy/web/__init__.py", line  
5, in <module>
    from framework import expose, json, require_login, require_admin,  
url_for, error, form, FormBuilder
  File "/Users/linjiang/galaxy_dist/lib/galaxy/web/framework/ 
__init__.py", line 29, in <module>
    from babel.support import Translations
  File "/Users/linjiang/galaxy_dist/eggs/Babel-0.9.4-py2.5.egg/babel/ 
support.py", line 29, in <module>
    from babel.dates import format_date, format_datetime, format_time,  
LC_TIME
  File "/Users/linjiang/galaxy_dist/eggs/Babel-0.9.4-py2.5.egg/babel/ 
dates.py", line 34, in <module>
    LC_TIME = default_locale('LC_TIME')
  File "/Users/linjiang/galaxy_dist/eggs/Babel-0.9.4-py2.5.egg/babel/ 
core.py", line 642, in default_locale
    return '_'.join(filter(None, parse_locale(locale)))
  File "/Users/linjiang/galaxy_dist/eggs/Babel-0.9.4-py2.5.egg/babel/ 
core.py", line 763, in parse_locale
    raise ValueError('expected only letters, got %r' % lang)
ValueError: expected only letters, got 'utf-8'
-- 
Lin Jiang, Ph.D student
Dept. of Medical Biochemistry and Microbiology
Uppsala University
Husargatan 3
Biomedical Centre D11:3 307d
Box 582 IMBIM Husdjursgenetik
S-752 37 Uppsala
Sweden
Workphone: +46 (0) 18 471 4383
Lin.Jiang(a)imbim.uu.se
-- 
Lin Jiang, Ph.D student
Dept. of Medical Biochemistry and Microbiology
Uppsala University
Husargatan 3
Biomedical Centre D11:3 307d
Box 582 IMBIM Husdjursgenetik
S-752 37 Uppsala
Sweden
Workphone: +46 (0) 18 471 4383
Lin.Jiang(a)imbim.uu.se
                    
                  
                  
                          
                            
                            2
                            
                          
                          
                            
                            1
                            
                          
                          
                            
    
                          
                        
                    
                    
                        Hi,
I am trying to install galaxy in my macbook with python version 2.5.2.  
But I got error messages when I sh run.sh. Could you please help me  
with this?
Best regards,
Lin
Traceback (most recent call last):
   File "./scripts/paster.py", line 34, in <module>
     command.run()
   File "/Users/linjiang/galaxy_dist/eggs/PasteScript-1.7.3-py2.5.egg/ 
paste/script/command.py", line 84, in run
     invoke(command, command_name, options, args[1:])
   File "/Users/linjiang/galaxy_dist/eggs/PasteScript-1.7.3-py2.5.egg/ 
paste/script/command.py", line 123, in invoke
     exit_code = runner.run(args)
   File "/Users/linjiang/galaxy_dist/eggs/PasteScript-1.7.3-py2.5.egg/ 
paste/script/command.py", line 218, in run
     result = self.command()
   File "/Users/linjiang/galaxy_dist/eggs/PasteScript-1.7.3-py2.5.egg/ 
paste/script/serve.py", line 276, in command
     relative_to=base, global_conf=vars)
   File "/Users/linjiang/galaxy_dist/eggs/PasteScript-1.7.3-py2.5.egg/ 
paste/script/serve.py", line 313, in loadapp
     **kw)
   File "/Users/linjiang/galaxy_dist/eggs/PasteDeploy-1.3.3-py2.5.egg/ 
paste/deploy/loadwsgi.py", line 204, in loadapp
     return loadobj(APP, uri, name=name, **kw)
   File "/Users/linjiang/galaxy_dist/eggs/PasteDeploy-1.3.3-py2.5.egg/ 
paste/deploy/loadwsgi.py", line 224, in loadobj
     global_conf=global_conf)
   File "/Users/linjiang/galaxy_dist/eggs/PasteDeploy-1.3.3-py2.5.egg/ 
paste/deploy/loadwsgi.py", line 248, in loadcontext
     global_conf=global_conf)
   File "/Users/linjiang/galaxy_dist/eggs/PasteDeploy-1.3.3-py2.5.egg/ 
paste/deploy/loadwsgi.py", line 278, in _loadconfig
     return loader.get_context(object_type, name, global_conf)
   File "/Users/linjiang/galaxy_dist/eggs/PasteDeploy-1.3.3-py2.5.egg/ 
paste/deploy/loadwsgi.py", line 413, in get_context
     section)
   File "/Users/linjiang/galaxy_dist/eggs/PasteDeploy-1.3.3-py2.5.egg/ 
paste/deploy/loadwsgi.py", line 458, in _context_from_explicit
     value = import_string(found_expr)
   File "/Users/linjiang/galaxy_dist/eggs/PasteDeploy-1.3.3-py2.5.egg/ 
paste/deploy/loadwsgi.py", line 18, in import_string
     return pkg_resources.EntryPoint.parse("x="+s).load(False)
   File "/Users/linjiang/galaxy_dist/lib/pkg_resources.py", line 1912,  
in load
     entry = __import__(self.module_name, globals(),globals(),  
['__name__'])
   File "/Users/linjiang/galaxy_dist/lib/galaxy/web/__init__.py", line  
5, in <module>
     from framework import expose, json, require_login, require_admin,  
url_for, error, form, FormBuilder
   File "/Users/linjiang/galaxy_dist/lib/galaxy/web/framework/ 
__init__.py", line 29, in <module>
     from babel.support import Translations
   File "/Users/linjiang/galaxy_dist/eggs/Babel-0.9.4-py2.5.egg/babel/ 
support.py", line 29, in <module>
     from babel.dates import format_date, format_datetime,  
format_time, LC_TIME
   File "/Users/linjiang/galaxy_dist/eggs/Babel-0.9.4-py2.5.egg/babel/ 
dates.py", line 34, in <module>
     LC_TIME = default_locale('LC_TIME')
   File "/Users/linjiang/galaxy_dist/eggs/Babel-0.9.4-py2.5.egg/babel/ 
core.py", line 642, in default_locale
     return '_'.join(filter(None, parse_locale(locale)))
   File "/Users/linjiang/galaxy_dist/eggs/Babel-0.9.4-py2.5.egg/babel/ 
core.py", line 763, in parse_locale
     raise ValueError('expected only letters, got %r' % lang)
ValueError: expected only letters, got 'utf-8'
-- 
Lin Jiang, Ph.D student
Dept. of Medical Biochemistry and Microbiology
Uppsala University
Husargatan 3
Biomedical Centre D11:3 307d
Box 582 IMBIM Husdjursgenetik
S-752 37 Uppsala
Sweden
Workphone: +46 (0) 18 471 4383
Lin.Jiang(a)imbim.uu.se
                    
                  
                  
                          
                            
                            1
                            
                          
                          
                            
                            0
                            
                          
                          
                            
    
                          
                        
                    
                    
                        Hi,
We have two parallel installs of Galaxy one which runs with python2.4 
and the other with 2.6. The 2.6 is working fine (other than the issues 
I've posted) however, the 2.4 won't start as it can't find DRMAA_python. 
I have scrambled the egg and it does exist:
www-galaxy@ge-002: galaxy_live> ls -l 
/homes/www-galaxy/galaxy_live/eggs/DRMAA_python-0.2-py2.4-linux-x86_64-ucs4.egg
-rw-r--r-- 1 www-galaxy barton 62374 Mar  5 14:22 
/homes/www-galaxy/galaxy_live/eggs/DRMAA_python-0.2-py2.4-linux-x86_64-ucs4.egg
What am I missing?
Cheers,
Chris
-- 
Dr Chris Cole
Senior Bioinformatics Research Officer
School of Life Sciences Research
University of Dundee
Dow Street
Dundee
DD1 5EH
Scotland, UK
url: http://network.nature.com/profile/drchriscole
e-mail: chris(a)compbio.dundee.ac.uk
Tel: +44 (0)1382 388 721
The University of Dundee is a registered Scottish charity, No: SC015096
                    
                  
                  
                          
                            
                            2
                            
                          
                          
                            
                            7
                            
                          
                          
                            
    
                          
                        
                    
                    
                        Hi, let me lead in with how impressed I am at the professional packaging of
galaxy -- dropped right in.  Amazing!
Now on to the problem, I'm sure we're just missing something here on our end,
but we're seeing an error when running the histogram tool which says hist wants
a vector not a matrix.
An error occurred running this job: Error in hist.default(list(68, 71, 
62, 75, 58, 60, 67, 68, 71, 69), xlab = "V1", :
'x' must be numeric
Looking at
http://bitbucket.org/galaxy/galaxy-central/src/tip/tools/plotting/histogram…
and the history thereof it's clear that's always been the case back to revision
0, but when we try from the console we get rpy complaining that a matrix isn't
a vactor, and it works when we try a vector.
By hand with a matrix:
    $ python
    Python 2.6.4 (r264:75706, Mar  9 2010, 10:00:44)
    [GCC 4.2.1 (Apple Inc. build 5646) (dot 1)] on darwin
    Type "help", "copyright", "credits" or "license" for more information.
     >>> import sys
     >>> from rpy import *
     >>> matrix = []
     >>> vals = ["23","14","32","25","12","9","35","18","24"]
     >>> for i in vals:
    ...     row = [];row.append(float(i));matrix.append(row)
    ...
     >>> matrix
    [[23.0], [14.0], [32.0], [25.0], [12.0], [9.0], [35.0], [18.0], [24.0]]
     >>> a = array(matrix)
     >>> r.pdf("histtest.pdf", 8, 8)
     >>> title = "Histogram Test";xlab="Count";breaks="Sturges"
     >>> r.hist( a, probability=True, main=title, xlab=xlab, breaks=breaks )
    Traceback (most recent call last):
      File "<stdin>", line 1, in <module>
    rpy.RPy_RException: Error in hist.default(list(23, 14, 32, 25, 12, 9, 
    35, 18, 24), xlab = "Count",  :
      'x' must be numeric
     >>> a 
    array([[ 23.],
           [ 14.],
           [ 32.],
           [ 25.],
           [ 12.],
           [  9.],
           [ 35.],
           [ 18.],
           [ 24.]])
However, when we build a vector instead of rows R/rpy is happy:
     >>> v = []
     >>> for i in vals:
    ...     v.append(float(i))
    ...
     >>> v
    [23.0, 14.0, 32.0, 25.0, 12.0, 9.0, 35.0, 18.0, 24.0]
     >>> r.hist(v, probability=True, main=title, xlab=xlab, breaks=breaks )
    {'density': [0.022222217777778667, 0.044444444444444446, 
    0.02222222222222223, 0.066666666666666666, 0.0, 0.044444444444444446], 
    'equidist': True, 'breaks': [5.0, 10.0, 15.0, 20.0, 25.0, 30.0, 35.0], 
    'intensities': [0.022222217777778667, 0.044444444444444446, 
    0.02222222222222223, 0.066666666666666666, 0.0, 0.044444444444444446], 
    'counts': [1, 2, 1, 3, 0, 2], 'xname': 'c(23, 14, 32, 25, 12, 9, 35, 18, 
    24)', 'mids': [7.5, 12.5, 17.5, 22.5, 27.5, 32.5]}
     >>>
     >>> a = array(v)
     >>> r.hist( a, probability=True, main=title, xlab=xlab, breaks=breaks )
    {'density': [0.022222217777778667, 0.044444444444444446, 
    0.02222222222222223, 0.066666666666666666, 0.0, 0.044444444444444446], 
    'equidist': True, 'breaks': [5.0, 10.0, 15.0, 20.0, 25.0, 30.0, 35.0], 
    'intensities': [0.022222217777778667, 0.044444444444444446, 
    0.02222222222222223, 0.066666666666666666, 0.0, 0.044444444444444446], 
    'counts': [1, 2, 1, 3, 0, 2], 'xname': 'c(23, 14, 32, 25, 12, 9, 35, 18, 
    24)', 'mids': [7.5, 12.5, 17.5, 22.5, 27.5, 32.5]}
The relevant versions are:
    Galaxy: 297d8c9c5eb0 (galaxy-dist from a few weeks ago)
    R: 2.10.1
    rpy: 1.0.3 (slightly tweaked for two-digit R minor-version strings, sigh)
So after all that, how does your histogram work passing a matrix to
something that wants a vector and can ours work that way too please? :)
Thanks,
-- 
Ry4an Brase                                         612-626-6575
University of Minnesota Supercomputing Institute
for Advanced Computational Research                 http://www.msi.umn.edu
                    
                  
                  
                          
                            
                            1
                            
                          
                          
                            
                            0
                            
                          
                          
                            
    
                          
                        
                    
                    
                        Hi
I just noticed (after a history refresh) that when I try to use a GFF3 
file to extract genomic DNA a job is created that converts the GFF to 
BED. Using the GFF file directly gives the error I detailed earlier (see 
below) but using the BED file works fine and I get the genomic sequence 
I want.
If I first convert the GFF to a BED file everything is also fine (as 
expected from the above).
So, it looks as though the problem is actually associated with the 
process of GFF-->BED-->extract genomic and not the actual extraction of 
sequences.
Has anyone else seen this behaviour?
Thanks
Nathaniel
-------- Original Message --------
Subject: Adding custom genome
Date: Thu, 11 Mar 2010 14:24:13 +0100
From: Nathaniel Street <nathaniel.street(a)plantphys.umu.se>
To: galaxy-dev(a)lists.bx.psu.edu
Hi
I'm trying to add a custom genome to a local galaxy install. So far I
have done this:
1)Create a fasta file per scaffold (it's an unfinished genome)
2)Create a nib file per one of those scaffold fasta files
3)Add an entry to build.txt
nis    Test genome (nis)
4)Add a line to alignseq.loc
seq    nis    /data/nib/nis
5)Add a line to faseq.loc
nis    /data/sequences/nis
Is there more that I need to do to be able to extract sequences using
the Extract Genomic DNA tool?
I tried to use the tool by uploading a GFF3 file, extracting a small
part of that for testing and then using that small part to extract
genomic DNA. When I try this I get an error message returned
AttributeError: 'tuple' object has no attribute 'iteritems'
And the text version of the Traceback gives this
URL: http://XXX:XXX:XX:XX:8080/tool_runner/index
File
'/home/nat/work/software/galaxy-dist/eggs/WebError-0.8a-py2.6.egg/weberror/evalexception/middleware.py', 
line 364 in respond
   app_iter = self.application(environ, detect_start_response)
File
'/home/nat/work/software/galaxy-dist/eggs/Paste-1.6-py2.6.egg/paste/debug/prints.py', 
line 98 in __call__
   environ, self.app)
File
'/home/nat/work/software/galaxy-dist/eggs/Paste-1.6-py2.6.egg/paste/wsgilib.py', 
line 539 in intercept_output
   app_iter = application(environ, replacement_start_response)
File
'/home/nat/work/software/galaxy-dist/eggs/Paste-1.6-py2.6.egg/paste/recursive.py', 
line 80 in __call__
   return self.application(environ, start_response)
File
'/home/nat/work/software/galaxy-dist/eggs/Paste-1.6-py2.6.egg/paste/httpexceptions.py', 
line 632 in __call__
   return self.application(environ, start_response)
File
'/home/nat/work/software/galaxy-dist/lib/galaxy/web/framework/base.py',
line 125 in __call__
   body = method( trans, **kwargs )
File
'/home/nat/work/software/galaxy-dist/lib/galaxy/web/controllers/tool_runner.py', 
line 53 in index
   template, vars = tool.handle_input( trans, params.__dict__ )
File '/home/nat/work/software/galaxy-dist/lib/galaxy/tools/__init__.py',
line 807 in handle_input
   _, out_data = self.execute( trans, incoming=params )
File '/home/nat/work/software/galaxy-dist/lib/galaxy/tools/__init__.py',
line 1079 in execute
   return self.tool_action.execute( self, trans, incoming=incoming,
set_output_hid=set_output_hid )
File
'/home/nat/work/software/galaxy-dist/lib/galaxy/tools/actions/__init__.py',
line 140 in execute
   inp_data = self.collect_input_datasets( tool, incoming, trans )
File
'/home/nat/work/software/galaxy-dist/lib/galaxy/tools/actions/__init__.py',
line 101 in collect_input_datasets
   tool.visit_inputs( param_values, visitor )
File '/home/nat/work/software/galaxy-dist/lib/galaxy/tools/__init__.py',
line 754 in visit_inputs
   callback( "", input, value[input.name] )
File
'/home/nat/work/software/galaxy-dist/lib/galaxy/tools/actions/__init__.py',
line 85 in visitor
   input_datasets[ prefix + input.name ] = process_dataset( value )
File
'/home/nat/work/software/galaxy-dist/lib/galaxy/tools/actions/__init__.py',
line 47 in process_dataset
   new_data = data.datatype.convert_dataset( trans, data, target_ext,
return_output = True, visible = False ).values()[0]
File '/home/nat/work/software/galaxy-dist/lib/galaxy/datatypes/data.py',
line 264 in convert_dataset
   for name, value in converted_dataset.iteritems():
AttributeError: 'tuple' object has no attribute 'iteritems'
Can anyone tell me what I'm doing wrong? Do I need more than only the
nib files?
Many thanks
Nathaniel
-- 
Nathaniel Street
Umeå Plant Science Centre
Department of Plant Physiology
University of Umeå
SE-901 87 Umeå
SWEDEN
email: nathaniel.street(a)plantphys.umu.se
tel: +46-90-786 5473
fax:  +46-90-786 6676
www.popgenie.org
-- 
Nathaniel Street
Umeå Plant Science Centre
Department of Plant Physiology
University of Umeå
SE-901 87 Umeå
SWEDEN
email: nathaniel.street(a)plantphys.umu.se
tel: +46-90-786 5473
fax:  +46-90-786 6676
www.popgenie.org
                    
                  
                  
                          
                            
                            2
                            
                          
                          
                            
                            1
                            
                          
                          
                            
    
                          
                        
                    11 Mar '10
                    
                        Dear Galaxy Team,
I need to run a galaxy tool in cluster which would like to consume a more
amount of memory than the default. Any Idea how can I manage this ?   I am
using 'sge'.
Many thanks, Vipin
                    
                  
                  
                          
                            
                            2
                            
                          
                          
                            
                            1
                            
                          
                          
                            
    
                          
                        
                    
                    
                        Hi,
I send this mail again, because the last time I didn't receive a reply.
We have a local installation of Galaxy running on our system. We are
trying to migrate the contents from the sqlite database to our own MySQL
database.
When we change the databaseconnection to
"mysql:///galaxy?unix_socket=/var/run/mysqld/mysqld.sock" in the
universe_wsgi.ini file and run Galaxy we receive an error like:
"Access denied for user 'root'@'localhost' (using password: NO)") None
None
So is there an option somewhere which we also have to change to make the
connection work?
And question two:
Do we need to migrate the content from the sqlite database to the MySQL
database by hand or does this happen automatically?
I hope to receive an answer soon.
Sincerely,
Freerk van Dijk
De inhoud van dit bericht is vertrouwelijk en alleen bestemd voor de geadresseerde(n). Anderen dan de geadresseerde(n) mogen geen gebruik maken van dit bericht, het niet openbaar maken of op enige wijze verspreiden of vermenigvuldigen. Het UMCG kan niet aansprakelijk gesteld worden voor een incomplete aankomst of vertraging van dit verzonden bericht.
The contents of this message are confidential and only intended for the eyes of the addressee(s). Others than the addressee(s) are not allowed to use this message, to make it public or to distribute or multiply this message in any way. The UMCG cannot be held responsible for incomplete reception or delay of this transferred message.
                    
                  
                  
                          
                            
                            2
                            
                          
                          
                            
                            1
                            
                          
                          
                            
    
                          
                        
                    11 Mar '10
                    
                        details:   http://www.bx.psu.edu/hg/galaxy/rev/44e9b5ca9cf0
changeset: 3511:44e9b5ca9cf0
user:      fubar: ross Lazarus at gmail period com
date:      Wed Mar 10 21:01:20 2010 -0500
description:
Remove deprecated rgenetics Lmap datatype from datatypes_conf.xml.sample
Was causing buildbot problems..
diffstat:
 datatypes_conf.xml.sample |  1 -
 1 files changed, 0 insertions(+), 1 deletions(-)
diffs (11 lines):
diff -r 2af472aa0844 -r 44e9b5ca9cf0 datatypes_conf.xml.sample
--- a/datatypes_conf.xml.sample	Wed Mar 10 17:10:43 2010 -0500
+++ b/datatypes_conf.xml.sample	Wed Mar 10 21:01:20 2010 -0500
@@ -194,7 +194,6 @@
         <!-- genome graphs ucsc file - first col is always marker then numeric values to plot -->
         <datatype extension="gg" type="galaxy.datatypes.genetics:GenomeGraphs"/>
         <!-- part of linkage format pedigree -->
-        <datatype extension="lmap" type="galaxy.datatypes.genetics:Lmap" display_in_upload="true"/>
         <datatype extension="malist" type="galaxy.datatypes.genetics:MAlist" display_in_upload="true"/>
         <!-- linkage format pedigree (separate .map file) -->
         <datatype extension="lped" type="galaxy.datatypes.genetics:Lped" display_in_upload="true">
                    
                  
                  
                          
                            
                            1
                            
                          
                          
                            
                            0
                            
                          
                          
                            
    
                          
                        
                    11 Mar '10
                    
                        details:   http://www.bx.psu.edu/hg/galaxy/rev/2af472aa0844
changeset: 3510:2af472aa0844
user:      fubar: ross Lazarus at gmail period com
date:      Wed Mar 10 17:10:43 2010 -0500
description:
Revert the twilltestcase upload_composite code needed for lped/pbed upload testing
diffstat:
 test/base/twilltestcase.py       |  54 ++++++++++++++++++++++++++++++++++++++++
 test/functional/test_get_data.py |   6 ++--
 test/functional/test_toolbox.py  |   1 +
 3 files changed, 58 insertions(+), 3 deletions(-)
diffs (100 lines):
diff -r 26c40d8e8fdc -r 2af472aa0844 test/base/twilltestcase.py
--- a/test/base/twilltestcase.py	Wed Mar 10 16:23:53 2010 -0500
+++ b/test/base/twilltestcase.py	Wed Mar 10 17:10:43 2010 -0500
@@ -196,6 +196,60 @@
         # Wait for upload processing to finish (TODO: this should be done in each test case instead)
         self.wait()
 
+    def upload_composite_datatype_file( self, ftype, ped_file='', map_file='', bim_file='', bed_file='',
+           fped_file='',fphe_file='',pphe_file='',fam_file='',pheno_file='',eset_file='',malist_file='',
+           affybatch_file='', dbkey='unspecified (?)', base_name='rgenetics' ):
+        """Tests uploading either of 2 different composite data types ( lped and pbed )"""
+        self.visit_url( "%s/tool_runner/index?tool_id=upload1" % self.url )
+        # Handle refresh_on_change
+        self.refresh_form( "file_type", ftype )
+        tc.fv( "1", "dbkey", dbkey )
+        tc.fv( "1", "files_metadata|base_name", base_name )
+        if ftype == 'lped':
+            # lped data types include a ped_file and a map_file
+            ped_file = self.get_filename( ped_file )
+            tc.formfile( "1", "files_0|file_data", ped_file )
+            map_file = self.get_filename( map_file )
+            tc.formfile( "1", "files_1|file_data", map_file )
+        elif ftype == 'pbed':
+            # pbed data types include a bim_file, a bed_file and a fam_file
+            bim_file = self.get_filename( bim_file )
+            tc.formfile( "1", "files_0|file_data", bim_file )
+            bed_file = self.get_filename( bed_file )
+            tc.formfile( "1", "files_1|file_data", bed_file )
+            fam_file = self.get_filename( fam_file )
+            tc.formfile( "1", "files_2|file_data", fam_file )
+        elif ftype == 'pphe':
+            # pphe data types include a phe_file
+            pphe_file = self.get_filename( pphe_file )
+            tc.formfile( "1", "files_0|file_data", pphe_file )
+        elif ftype == 'fped':
+            # fped data types include an fped_file only
+            fped_file = self.get_filename( fped_file )
+            tc.formfile( "1", "files_0|file_data", fped_file )
+        elif ftype == 'eset':
+            # eset data types include a eset_file, a pheno_file
+            eset_file = self.get_filename( eset_file )
+            tc.formfile( "1", "files_0|file_data", eset_file )
+            pheno_file = self.get_filename( pheno_file )
+            tc.formfile( "1", "files_1|file_data", pheno_file )
+        elif ftype == 'affybatch':
+            # affybatch data types include an affybatch_file, and a pheno_file
+            affybatch_file = self.get_filename( affybatch_file )
+            tc.formfile( "1", "files_0|file_data", affybatch_file )
+            pheno_file = self.get_filename( pheno_file )
+            tc.formfile( "1", "files_1|file_data", pheno_file )
+        else:
+            raise AssertionError, "Unsupported composite data type (%s) received, currently only %s data types are supported."\
+                 % (ftype,','.join(self.composite_extensions))
+        tc.submit( "runtool_btn" )
+        self.check_page_for_string( 'The following job has been succesfully added to the queue:' )
+        check_str = base_name #'Uploaded Composite Dataset (%s)' % ftype
+        self.check_page_for_string( check_str )
+        # Wait for upload processing to finish (TODO: this should be done in each test case instead)
+        self.wait()
+        self.check_history_for_string( check_str )
+
     # Functions associated with histories
     def check_history_for_errors( self ):
         """Raises an exception if there are errors in a history"""
diff -r 26c40d8e8fdc -r 2af472aa0844 test/functional/test_get_data.py
--- a/test/functional/test_get_data.py	Wed Mar 10 16:23:53 2010 -0500
+++ b/test/functional/test_get_data.py	Wed Mar 10 17:10:43 2010 -0500
@@ -183,7 +183,7 @@
         # We'll test against the resulting ped file and map file for correctness
         self.verify_composite_datatype_file_content( 'rgenetics.ped', str( hda.id ) )
         self.verify_composite_datatype_file_content( 'rgenetics.map', str( hda.id ) )
-        self.check_history_for_string( "Uploaded Composite Dataset (lped)" )
+        self.check_history_for_string( "rgenetics" )
         self.delete_history( id=self.security.encode_id( history.id ) )
     def test_0060_upload_file( self ):
         """Test uploading pbed composite datatype file, manually setting the file format"""
@@ -205,7 +205,7 @@
         self.verify_composite_datatype_file_content( 'rgenetics.bim', str( hda.id ) )
         self.verify_composite_datatype_file_content( 'rgenetics.bed', str( hda.id ) )
         self.verify_composite_datatype_file_content( 'rgenetics.fam', str( hda.id ) )
-        self.check_history_for_string( "Uploaded Composite Dataset (pbed)" )
+        self.check_history_for_string( "rgenetics" )
         self.delete_history( id=self.security.encode_id( history.id ) )
     def test_0065_upload_file( self ):
         """Test uploading asian_chars_1.txt, NOT setting the file format"""
@@ -576,4 +576,4 @@
         self.check_history_for_string( 'hello world' )
         self.delete_history( id=self.security.encode_id( history.id ) )
     def test_9999_clean_up( self ):
-        self.logout()
\ No newline at end of file
+        self.logout()
diff -r 26c40d8e8fdc -r 2af472aa0844 test/functional/test_toolbox.py
--- a/test/functional/test_toolbox.py	Wed Mar 10 16:23:53 2010 -0500
+++ b/test/functional/test_toolbox.py	Wed Mar 10 17:10:43 2010 -0500
@@ -161,3 +161,4 @@
                 m.__doc__ = "%s ( %s ) > %s" % ( tool.name, tool.id, testdef.name )
                 d['test_tool_%06d' % j] = m
             G[ n ] = new.classobj( n, s, d )
+
                    
                  
                  
                          
                            
                            1
                            
                          
                          
                            
                            0
                            
                          
                          
                            
    
                          
                        
                    11 Mar '10
                    
                        details:   http://www.bx.psu.edu/hg/galaxy/rev/26c40d8e8fdc
changeset: 3509:26c40d8e8fdc
user:      gua110
date:      Wed Mar 10 16:23:53 2010 -0500
description:
Updated functional tests for \"multivariate statistics\" tools, by shortening the number of decimal places displayed, so that they pass on all test platforms.
diffstat:
 test-data/cca_out1.tabular       |   30 +-
 test-data/cca_out2.pdf           |    4 +-
 test-data/kcca_out1.tabular      |  602 +++++++++++++++++++-------------------
 test-data/kcca_out2.tabular      |  602 +++++++++++++++++++-------------------
 test-data/kpca_out1.tabular      |  602 +++++++++++++++++++-------------------
 test-data/kpca_out2.pdf          |    4 +-
 test-data/kpca_out3.tabular      |  602 +++++++++++++++++++-------------------
 test-data/kpca_out4.pdf          |    4 +-
 test-data/pca_out1.tabular       |  312 ++++++++++----------
 test-data/pca_out2.pdf           |    4 +-
 test-data/pca_out3.tabular       |  312 ++++++++++----------
 test-data/pca_out4.pdf           |    4 +-
 tools/multivariate_stats/cca.py  |   18 +-
 tools/multivariate_stats/kcca.py |    6 +-
 tools/multivariate_stats/kpca.py |    6 +-
 tools/multivariate_stats/pca.py  |   19 +-
 16 files changed, 1568 insertions(+), 1563 deletions(-)
diffs (truncated from 3327 to 3000 lines):
diff -r 07a608852925 -r 26c40d8e8fdc test-data/cca_out1.tabular
--- a/test-data/cca_out1.tabular	Wed Mar 10 16:11:59 2010 -0500
+++ b/test-data/cca_out1.tabular	Wed Mar 10 16:23:53 2010 -0500
@@ -1,22 +1,22 @@
 #Component	1	2
-#Correlation	0.940897180432	0.131074795925
-#F-statistic	144.410560578	2.5696974623
-#p-value	6.21285598619e-68	0.111075110551
+#Correlation	0.9409	0.1311
+#F-statistic	144.4	2.57
+#p-value	6.213e-68	0.1111
 #X-Coefficients	1	2
-c3	1.50661351834	-3.37790409332
-c4	-0.537226204038	3.65944099051
+c3	1.507	-3.378
+c4	-0.5372	3.659
 #Y-Coefficients	1	2
-c1	6.35046749378	3.37940792566
-c2	-2.6597206473	6.66976562808
+c1	6.35	3.379
+c2	-2.66	6.67
 #X-Loadings	1	2
-c3	0.989395177676	0.145248691528
-c4	0.913276653253	0.407339851504
+c3	0.9894	0.1452
+c4	0.9133	0.4073
 #Y-Loadings	1	2
-c1	0.928869265104	0.370407732566
-c2	-0.469775462051	0.882785939656
+c1	0.9289	0.3704
+c2	-0.4698	0.8828
 #X-CrossLoadings	1	2
-c3	0.930919133009	0.0190384426004
-c4	0.859299428	0.0533919879079
+c3	0.9309	0.01904
+c4	0.8593	0.05339
 #Y-CrossLoadings	1	2
-c1	0.873970472527	0.0485511179549
-c2	-0.44201040768	0.115710986885
+c1	0.874	0.04855
+c2	-0.442	0.1157
diff -r 07a608852925 -r 26c40d8e8fdc test-data/cca_out2.pdf
--- a/test-data/cca_out2.pdf	Wed Mar 10 16:11:59 2010 -0500
+++ b/test-data/cca_out2.pdf	Wed Mar 10 16:23:53 2010 -0500
@@ -2,8 +2,8 @@
 %âãÏÓ\r
 1 0 obj
 <<
-/CreationDate (D:20100303132536)
-/ModDate (D:20100303132536)
+/CreationDate (D:20100310155029)
+/ModDate (D:20100310155029)
 /Title (R Graphics Output)
 /Producer (R 2.10.0)
 /Creator (R)
diff -r 07a608852925 -r 26c40d8e8fdc test-data/kcca_out1.tabular
--- a/test-data/kcca_out1.tabular	Wed Mar 10 16:11:59 2010 -0500
+++ b/test-data/kcca_out1.tabular	Wed Mar 10 16:23:53 2010 -0500
@@ -1,304 +1,304 @@
 #Component	1	2	3	4
-#Correlation	-0.999499965799	0.999499965799	-0.937388978867	0.937388978867
+#Correlation	-0.9995	0.9995	-0.9374	0.9374
 #Estimated X-coefficients	1	2	3	4
-1	-0.00159226306681	-0.00159226306681	0.0347922336733	0.0347922336734
-2	-0.00110978716164	-0.00110978716146	0.0325550634964	0.0325550634964
-3	0.000104871380431	0.00010487138039	0.0183484731624	0.0183484731623
-4	0.00102415734634	0.00102415734636	-0.00485228064778	-0.00485228064775
-5	-0.000169999236028	-0.000169999236004	0.0524192345792	0.0524192345792
-6	0.00230258587505	0.00230258587502	0.0196794881537	0.0196794881537
-7	0.00145199861756	0.00145199861756	0.00575575672441	0.00575575672438
-8	-0.00159595387536	-0.00159595387541	0.0382833844726	0.0382833844726
-9	0.00360877029314	0.00360877029312	-0.0750970801906	-0.0750970801906
-10	-0.00136083597277	-0.00136083597278	0.0318384011561	0.0318384011561
-11	-0.000806991906868	-0.000806991906869	0.00139328931528	0.00139328931529
-12	-0.000200007372161	-0.000200007372182	0.0392128558972	0.0392128558972
-13	-0.000376620449557	-0.000376620449546	0.0293209660811	0.0293209660811
-14	0.00424812920862	0.00424812920862	-0.131314655808	-0.131314655808
-15	0.00440751040841	0.00440751040844	-0.0215875677083	-0.0215875677083
-16	0.0158137737287	0.0158137737287	0.0413483506506	0.0413483506506
-17	0.00230258096229	0.00230258096233	0.0196792569231	0.0196792569231
-18	-0.00159229337587	-0.00159229337587	0.034791794765	0.034791794765
-19	0.000470908151429	0.000470908151435	-0.033768454116	-0.033768454116
-20	0.00175754173735	0.00175754173739	0.0594708458718	0.0594708458718
-21	-0.00334320757685	-0.00334320757684	-0.0210157518991	-0.0210157518991
-22	0.000356998265944	0.00035699826596	0.0506142212928	0.0506142212928
-23	0.00287793787049	0.00287793787051	0.0198921560768	0.0198921560768
-24	-0.00253287592115	-0.00253287592111	0.0229011408536	0.0229011408536
-25	-0.000199966741261	-0.000199966741259	0.039213190751	0.0392131907509
-26	-0.00177266177845	-0.00177266177847	0.0283910522008	0.0283910522008
-27	-0.00159590123482	-0.00159590123485	0.038282588671	0.038282588671
-28	-0.00208498848365	-0.00208498848366	0.0204255781063	0.0204255781063
-29	-0.00267192971883	-0.00267192971883	0.0138389269867	0.0138389269867
-30	0.000104980469185	0.000104980469188	0.0183490300234	0.0183490300234
-31	-0.000627859471217	-0.000627859471211	0.0286050882842	0.0286050882842
-32	-0.00334315632058	-0.0033431563206	-0.021015317998	-0.021015317998
-33	0.00737949966525	0.00737949966526	0.0757911137154	0.0757911137153
-34	0.00926681020655	0.00926681020653	0.0362748843231	0.0362748843231
-35	-0.00136084775243	-0.00136084775244	0.0318385553343	0.0318385553343
-36	-0.00208708693783	-0.00208708693783	0.029318413035	0.029318413035
-37	-0.00291824596158	-0.0029182459616	-0.0316400169446	-0.0316400169446
-38	-0.00136084705751	-0.00136084705752	0.0318383985846	0.0318383985845
-39	0.00317922708464	0.00317922708461	-0.0784504607557	-0.0784504607557
-40	-0.00217929118556	-0.00217929118555	0.0282052840325	0.0282052840325
-41	-0.00100896730901	-0.00100896730901	0.0448689392189	0.0448689392188
-42	0.0088378418176	0.00883784181758	0.0414569436219	0.0414569436219
-43	0.00286479080033	0.00286479080034	-0.0775226035577	-0.0775226035577
-44	-0.00100892420468	-0.00100892420468	0.044868959418	0.044868959418
-45	0.00175754329615	0.00175754329613	0.0594711681396	0.0594711681395
-46	-0.000376629073915	-0.000376629073921	0.0293209668801	0.0293209668801
-47	0.00175754920557	0.00175754920562	0.0594710191761	0.0594710191761
-48	0.000960826589906	0.000960826589907	-0.00320856169229	-0.00320856169234
-49	-0.000526936996666	-0.000526936996691	0.0192804658813	0.0192804658813
-50	-0.00194953734422	-0.0019495373442	0.0329782808594	0.0329782808594
-51	6.55505160483e-05	6.55505160251e-05	0.0963097412008	0.0963097412008
-52	-0.0011718760537	-0.00117187605367	-0.0201469456847	-0.0201469456848
-53	0.000176671254579	0.000176671254581	0.0847986697478	0.0847986697478
-54	0.00296310911687	0.00296310911688	0.0293431942735	0.0293431942735
-55	0.000607776190472	0.000607776190486	0.0117139207005	0.0117139207005
-56	-0.00261157098019	-0.00261157098018	-0.0654067243745	-0.0654067243745
-57	-0.0015146048768	-0.0015146048768	-0.0376121620644	-0.0376121620644
-58	0.00404504063063	0.00404504063063	0.092945109105	0.092945109105
-59	0.00036877721416	0.000368777214162	0.0295027716538	0.0295027716538
-60	-0.00104700002988	-0.00104700002991	0.0224831294608	0.0224831294608
-61	0.0106423484729	0.0106423484729	0.152385267998	0.152385267998
-62	-0.0031520185592	-0.00315201855917	-0.0846778020825	-0.0846778020825
-63	0.00557212171653	0.00557212171651	0.0129291757862	0.0129291757862
-64	-0.00189259304055	-0.00189259304053	-0.0699730283655	-0.0699730283655
-65	-0.00329284250094	-0.00329284250094	-0.059850140447	-0.059850140447
-66	-4.19255339958e-05	-4.19255339884e-05	0.0484549594796	0.0484549594797
-67	-0.00372238588335	-0.00372238588336	-0.0632035000244	-0.0632035000244
-68	-0.00164700987437	-0.00164700987437	-0.0637729313267	-0.0637729313267
-69	0.00650587912972	0.00650587912971	0.0346722693281	0.0346722693281
-70	0.000134953294294	0.000134953294323	-0.0187985221915	-0.0187985221915
-71	-0.00346646664488	-0.00346646664488	-0.0837494602393	-0.0837494602393
-72	-0.00129113838679	-0.00129113838678	-0.0638064857952	-0.0638064857951
-73	0.00251974364983	0.00251974364985	0.0022055760206	0.00220557602061
-74	-0.00129116036811	-0.0012911603681	-0.0638066193927	-0.0638066193927
-75	-0.000427852329099	-0.000427852329122	-0.0177210266231	-0.0177210266231
-76	-6.07104681603e-05	-6.07104681845e-05	0.0261490639915	0.0261490639915
-77	0.00140452555267	0.00140452555267	0.0790608976409	0.0790608976408
-78	0.000209327997368	0.000209327997393	0.0491700187535	0.0491700187535
-79	-0.00233986780648	-0.00233986780648	-0.0783320881243	-0.0783320881243
-80	-0.000900328222947	-0.00090032822295	-0.0446524406999	-0.0446524406999
-81	0.00147282851191	0.00147282851192	0.0122725620319	0.0122725620319
-82	0.00147281209976	0.00147281209977	0.0122725086224	0.0122725086224
-83	-0.00164700173013	-0.00164700173014	-0.06377323849	-0.06377323849
-84	-0.000967708203349	-0.000967708203351	-0.0631482116068	-0.0631482116068
-85	-0.00351986557339	-0.00351986557338	-0.030908097987	-0.030908097987
-86	-0.00259272827972	-0.00259272827972	-0.0717927951437	-0.0717927951437
-87	-4.18965141628e-05	-4.18965141522e-05	0.0484552570532	0.0484552570532
-88	0.00530734252965	0.00530734252963	0.0352616047044	0.0352616047044
-89	-0.00372239312775	-0.00372239312776	-0.0632034676432	-0.0632034676432
-90	0.000175478311049	0.000175478311042	-0.00371336493864	-0.00371336493865
-91	-0.000939577619534	-0.000939577619522	-0.0178449710765	-0.0178449710765
-92	-0.00232205533945	-0.00232205533946	-0.0733261558013	-0.0733261558013
-93	-0.00070634156182	-0.000706341561824	-0.052035469948	-0.052035469948
-94	0.00487236705406	0.00487236705406	0.105852225573	0.105852225573
-95	-0.00192078284287	-0.00192078284288	-0.0446670554592	-0.0446670554592
-96	-0.00364258343827	-0.00364258343827	-0.0749260068443	-0.0749260068443
-97	-0.00321303286671	-0.0032130328667	-0.0715723152411	-0.0715723152411
-98	-0.00140604417103	-0.00140604417105	-0.0565884530031	-0.0565884530031
-99	0.00150148065434	0.00150148065431	0.0627184287393	0.0627184287394
-100	-0.00261158477	-0.00261158476999	-0.0654066442111	-0.0654066442112
-101	-0.00151458006047	-0.00151458006047	-0.0376119582238	-0.0376119582238
-102	-0.00164700343889	-0.00164700343888	-0.06377288888	-0.06377288888
-103	0.000251068468294	0.000251068468298	0.0975435120003	0.0975435120003
-104	-0.000908080899935	-0.000908080899925	-0.0388460712661	-0.0388460712661
-105	-0.000423216718942	-0.000423216718959	0.00219435777697	0.00219435777695
-106	0.000255624978855	0.00025562497886	-0.0480116282141	-0.0480116282141
-107	0.00274773787057	0.00274773787055	0.0769590274139	0.0769590274139
-108	0.000341726654612	0.000341726654597	0.0766246057863	0.0766246057863
-109	0.00406672119039	0.0040667211904	0.0935756884052	0.0935756884052
-110	0.00168012039808	0.00168012039807	0.114599686992	0.114599686992
-111	-0.000737702734906	-0.000737702734914	0.0031222049116	0.00312220491159
-112	0.000944239770407	0.000944239770391	-0.00253804367075	-0.00253804367075
-113	0.000373533380611	0.000373533380627	0.0695413493313	0.0695413493313
-114	0.000214764783387	0.000214764783394	-0.0305207662625	-0.0305207662625
-115	-0.0024176574971	-0.00241765749712	-0.0727895122943	-0.0727895122943
-116	-0.00117184106569	-0.00117184106569	-0.020147379235	-0.020147379235
-117	-0.000423245890376	-0.000423245890374	0.00219411724102	0.002194117241
-118	0.00511278621358	0.00511278621362	-0.0711192328408	-0.0711192328408
-119	0.00374158288488	0.00374158288488	-0.0820025184858	-0.0820025184857
-120	0.00557210528707	0.00557210528706	0.0129285972769	0.0129285972769
-121	0.000113440914253	0.000113440914255	0.0864426484813	0.0864426484813
-122	-0.00269141225358	-0.00269141225358	-0.0536835150794	-0.0536835150794
-123	0.00203032906275	0.00203032906276	-0.102756314558	-0.102756314558
-124	0.000463985316019	0.00046398531602	-0.0236637080927	-0.0236637080927
-125	3.23398695696e-05	3.23398695946e-05	0.0537583523329	0.0537583523329
-126	-0.000236944823046	-0.000236944823052	0.0914993805259	0.0914993805259
-127	-0.000804521731675	-0.000804521731666	-0.0504218395017	-0.0504218395017
-128	-0.00232209010852	-0.00232209010854	-0.0733257347061	-0.0733257347061
-129	0.000173591516244	0.000173591516248	-0.0115550211641	-0.0115550211641
-130	7.75013058308e-05	7.7501305819e-05	0.0905707020934	0.0905707020934
-131	0.000856346120183	0.000856346120186	0.0542528201863	0.0542528201863
-132	0.00833835823432	0.0083383582343	-0.232668169832	-0.232668169832
-133	0.000173619999677	0.000173619999659	-0.0115549698037	-0.0115549698037
-134	-0.000306631445279	-0.000306631445295	-0.0326803614331	-0.032680361433
-135	0.000420140644556	0.00042014064454	-0.0430529283201	-0.0430529283201
-136	0.0009993072384	0.000999307238389	-0.112276057196	-0.112276057196
-137	-0.00116097843462	-0.00116097843463	-0.0323074368066	-0.0323074368066
-138	-0.00110862543672	-0.0011086254367	-0.0217901323169	-0.0217901323169
-139	-0.0027693146479	-0.00276931464791	-0.0816839889734	-0.0816839889734
-140	0.000176676195733	0.000176676195739	0.0847993817539	0.0847993817538
-141	-4.19162508702e-05	-4.19162509088e-05	0.048455131876	0.048455131876
-142	0.000176690917474	0.000176690917456	0.0847992182528	0.0847992182528
-143	-0.00164700027723	-0.00164700027725	-0.0637729771005	-0.0637729771005
-144	5.90748259444e-05	5.90748259518e-05	0.0704687213602	0.0704687213602
-145	3.23787858251e-05	3.23787858432e-05	0.0537581665472	0.0537581665472
-146	0.000209342022695	0.000209342022689	0.0491697329809	0.0491697329809
-147	0.00251978363427	0.00251978363426	0.00220560706463	0.00220560706461
-148	-0.000423180575963	-0.000423180576007	0.00219444456054	0.00219444456053
-149	-0.00165893244585	-0.00165893244587	-0.0500493239232	-0.0500493239232
-150	-0.00315200955113	-0.00315200955116	-0.0846780840986	-0.0846780840986
+1	-0.001592	-0.001592	0.03479	0.03479
+2	-0.00111	-0.00111	0.03256	0.03256
+3	0.0001049	0.0001049	0.01835	0.01835
+4	0.001024	0.001024	-0.004852	-0.004852
+5	-0.00017	-0.00017	0.05242	0.05242
+6	0.002303	0.002303	0.01968	0.01968
+7	0.001452	0.001452	0.005756	0.005756
+8	-0.001596	-0.001596	0.03828	0.03828
+9	0.003609	0.003609	-0.0751	-0.0751
+10	-0.001361	-0.001361	0.03184	0.03184
+11	-0.000807	-0.000807	0.001393	0.001393
+12	-0.0002	-0.0002	0.03921	0.03921
+13	-0.0003766	-0.0003766	0.02932	0.02932
+14	0.004248	0.004248	-0.1313	-0.1313
+15	0.004408	0.004408	-0.02159	-0.02159
+16	0.01581	0.01581	0.04135	0.04135
+17	0.002303	0.002303	0.01968	0.01968
+18	-0.001592	-0.001592	0.03479	0.03479
+19	0.0004709	0.0004709	-0.03377	-0.03377
+20	0.001758	0.001758	0.05947	0.05947
+21	-0.003343	-0.003343	-0.02102	-0.02102
+22	0.000357	0.000357	0.05061	0.05061
+23	0.002878	0.002878	0.01989	0.01989
+24	-0.002533	-0.002533	0.0229	0.0229
+25	-0.0002	-0.0002	0.03921	0.03921
+26	-0.001773	-0.001773	0.02839	0.02839
+27	-0.001596	-0.001596	0.03828	0.03828
+28	-0.002085	-0.002085	0.02043	0.02043
+29	-0.002672	-0.002672	0.01384	0.01384
+30	0.000105	0.000105	0.01835	0.01835
+31	-0.0006279	-0.0006279	0.02861	0.02861
+32	-0.003343	-0.003343	-0.02102	-0.02102
+33	0.007379	0.007379	0.07579	0.07579
+34	0.009267	0.009267	0.03627	0.03627
+35	-0.001361	-0.001361	0.03184	0.03184
+36	-0.002087	-0.002087	0.02932	0.02932
+37	-0.002918	-0.002918	-0.03164	-0.03164
+38	-0.001361	-0.001361	0.03184	0.03184
+39	0.003179	0.003179	-0.07845	-0.07845
+40	-0.002179	-0.002179	0.02821	0.02821
+41	-0.001009	-0.001009	0.04487	0.04487
+42	0.008838	0.008838	0.04146	0.04146
+43	0.002865	0.002865	-0.07752	-0.07752
+44	-0.001009	-0.001009	0.04487	0.04487
+45	0.001758	0.001758	0.05947	0.05947
+46	-0.0003766	-0.0003766	0.02932	0.02932
+47	0.001758	0.001758	0.05947	0.05947
+48	0.0009608	0.0009608	-0.003209	-0.003209
+49	-0.0005269	-0.0005269	0.01928	0.01928
+50	-0.00195	-0.00195	0.03298	0.03298
+51	6.555e-05	6.555e-05	0.09631	0.09631
+52	-0.001172	-0.001172	-0.02015	-0.02015
+53	0.0001767	0.0001767	0.0848	0.0848
+54	0.002963	0.002963	0.02934	0.02934
+55	0.0006078	0.0006078	0.01171	0.01171
+56	-0.002612	-0.002612	-0.06541	-0.06541
+57	-0.001515	-0.001515	-0.03761	-0.03761
+58	0.004045	0.004045	0.09295	0.09295
+59	0.0003688	0.0003688	0.0295	0.0295
+60	-0.001047	-0.001047	0.02248	0.02248
+61	0.01064	0.01064	0.1524	0.1524
+62	-0.003152	-0.003152	-0.08468	-0.08468
+63	0.005572	0.005572	0.01293	0.01293
+64	-0.001893	-0.001893	-0.06997	-0.06997
+65	-0.003293	-0.003293	-0.05985	-0.05985
+66	-4.193e-05	-4.193e-05	0.04845	0.04845
+67	-0.003722	-0.003722	-0.0632	-0.0632
+68	-0.001647	-0.001647	-0.06377	-0.06377
+69	0.006506	0.006506	0.03467	0.03467
+70	0.000135	0.000135	-0.0188	-0.0188
+71	-0.003466	-0.003466	-0.08375	-0.08375
+72	-0.001291	-0.001291	-0.06381	-0.06381
+73	0.00252	0.00252	0.002206	0.002206
+74	-0.001291	-0.001291	-0.06381	-0.06381
+75	-0.0004279	-0.0004279	-0.01772	-0.01772
+76	-6.071e-05	-6.071e-05	0.02615	0.02615
+77	0.001405	0.001405	0.07906	0.07906
+78	0.0002093	0.0002093	0.04917	0.04917
+79	-0.00234	-0.00234	-0.07833	-0.07833
+80	-0.0009003	-0.0009003	-0.04465	-0.04465
+81	0.001473	0.001473	0.01227	0.01227
+82	0.001473	0.001473	0.01227	0.01227
+83	-0.001647	-0.001647	-0.06377	-0.06377
+84	-0.0009677	-0.0009677	-0.06315	-0.06315
+85	-0.00352	-0.00352	-0.03091	-0.03091
+86	-0.002593	-0.002593	-0.07179	-0.07179
+87	-4.19e-05	-4.19e-05	0.04846	0.04846
+88	0.005307	0.005307	0.03526	0.03526
+89	-0.003722	-0.003722	-0.0632	-0.0632
+90	0.0001755	0.0001755	-0.003713	-0.003713
+91	-0.0009396	-0.0009396	-0.01784	-0.01784
+92	-0.002322	-0.002322	-0.07333	-0.07333
+93	-0.0007063	-0.0007063	-0.05204	-0.05204
+94	0.004872	0.004872	0.1059	0.1059
+95	-0.001921	-0.001921	-0.04467	-0.04467
+96	-0.003643	-0.003643	-0.07493	-0.07493
+97	-0.003213	-0.003213	-0.07157	-0.07157
+98	-0.001406	-0.001406	-0.05659	-0.05659
+99	0.001501	0.001501	0.06272	0.06272
+100	-0.002612	-0.002612	-0.06541	-0.06541
+101	-0.001515	-0.001515	-0.03761	-0.03761
+102	-0.001647	-0.001647	-0.06377	-0.06377
+103	0.0002511	0.0002511	0.09754	0.09754
+104	-0.0009081	-0.0009081	-0.03885	-0.03885
+105	-0.0004232	-0.0004232	0.002194	0.002194
+106	0.0002556	0.0002556	-0.04801	-0.04801
+107	0.002748	0.002748	0.07696	0.07696
+108	0.0003417	0.0003417	0.07662	0.07662
+109	0.004067	0.004067	0.09358	0.09358
+110	0.00168	0.00168	0.1146	0.1146
+111	-0.0007377	-0.0007377	0.003122	0.003122
+112	0.0009442	0.0009442	-0.002538	-0.002538
+113	0.0003735	0.0003735	0.06954	0.06954
+114	0.0002148	0.0002148	-0.03052	-0.03052
+115	-0.002418	-0.002418	-0.07279	-0.07279
+116	-0.001172	-0.001172	-0.02015	-0.02015
+117	-0.0004232	-0.0004232	0.002194	0.002194
+118	0.005113	0.005113	-0.07112	-0.07112
+119	0.003742	0.003742	-0.082	-0.082
+120	0.005572	0.005572	0.01293	0.01293
+121	0.0001134	0.0001134	0.08644	0.08644
+122	-0.002691	-0.002691	-0.05368	-0.05368
+123	0.00203	0.00203	-0.1028	-0.1028
+124	0.000464	0.000464	-0.02366	-0.02366
+125	3.234e-05	3.234e-05	0.05376	0.05376
+126	-0.0002369	-0.0002369	0.0915	0.0915
+127	-0.0008045	-0.0008045	-0.05042	-0.05042
+128	-0.002322	-0.002322	-0.07333	-0.07333
+129	0.0001736	0.0001736	-0.01156	-0.01156
+130	7.75e-05	7.75e-05	0.09057	0.09057
+131	0.0008563	0.0008563	0.05425	0.05425
+132	0.008338	0.008338	-0.2327	-0.2327
+133	0.0001736	0.0001736	-0.01155	-0.01155
+134	-0.0003066	-0.0003066	-0.03268	-0.03268
+135	0.0004201	0.0004201	-0.04305	-0.04305
+136	0.0009993	0.0009993	-0.1123	-0.1123
+137	-0.001161	-0.001161	-0.03231	-0.03231
+138	-0.001109	-0.001109	-0.02179	-0.02179
+139	-0.002769	-0.002769	-0.08168	-0.08168
+140	0.0001767	0.0001767	0.0848	0.0848
+141	-4.192e-05	-4.192e-05	0.04846	0.04846
+142	0.0001767	0.0001767	0.0848	0.0848
+143	-0.001647	-0.001647	-0.06377	-0.06377
+144	5.907e-05	5.907e-05	0.07047	0.07047
+145	3.238e-05	3.238e-05	0.05376	0.05376
+146	0.0002093	0.0002093	0.04917	0.04917
+147	0.00252	0.00252	0.002206	0.002206
+148	-0.0004232	-0.0004232	0.002194	0.002194
+149	-0.001659	-0.001659	-0.05005	-0.05005
+150	-0.003152	-0.003152	-0.08468	-0.08468
 #Estimated Y-coefficients	1	2	3	4
-1	-0.00207802379602	0.00207802379604	0.0147525748618	-0.0147525748618
-2	-0.00207803770129	0.00207803770129	0.014754894517	-0.0147548945171
-3	-0.00186125958712	0.00186125958712	0.00430906718496	-0.00430906718501
-4	-0.00204203652249	0.00204203652248	0.00946170484263	-0.00946170484263
-5	-0.00207799868272	0.00207799868272	0.0147524900087	-0.0147524900087
-6	0.00466745517136	-0.00466745517131	-0.00473638718146	0.00473638718143
-7	0.00127661314181	-0.0012766131418	0.0308096049073	-0.0308096049073
-8	-0.00204202966462	0.0020420296646	0.00946178027838	-0.00946178027836
-9	-0.00207798104461	0.00207798104462	0.0147522000925	-0.0147522000924
-10	-0.00592375302166	0.00592375302167	-0.0133800658341	0.0133800658341
-11	-0.00204201746389	0.0020420174639	0.00946180738045	-0.00946180738041
-12	-0.00184159719676	0.00184159719675	-0.00664656658428	0.00664656658427
-13	-0.00595967357466	0.00595967357467	-0.00808941808643	0.00808941808643
-14	-0.00418938935089	0.0041893893509	-0.108608995338	0.108608995338
-15	-0.0013012394729	0.00130123947293	-0.0272679367976	0.0272679367976
-16	0.00418518885797	-0.00418518885798	0.0339388340086	-0.0339388340086
-17	0.00436596427266	-0.00436596427265	0.0287864784987	-0.0287864784987
-18	0.00127661524729	-0.00127661524728	0.0308095328996	-0.0308095328996
-19	0.00179485276965	-0.00179485276965	-0.013157096566	0.013157096566
-20	0.00131259459476	-0.00131259459477	0.0255187095653	-0.0255187095653
-21	-0.00155977265386	0.00155977265387	-0.0292141997607	0.0292141997607
-22	0.0041852046722	-0.0041852046722	0.0339387026427	-0.0339387026427
-23	0.00120614277783	-0.00120614277786	-0.17724731125	0.17724731125
-24	0.00710764806912	-0.00710764806913	-0.00432447811811	0.00432447811812
-25	-0.0010444934481	0.00104449344812	-0.0793995548327	0.0793995548327
-26	-0.00184159033127	0.00184159033127	-0.00664665686254	0.00664665686252
-27	0.00438565231829	-0.00438565231831	0.0178306826236	-0.0178306826236
-28	-0.00204202477243	0.00204202477243	0.0094617038311	-0.00946170383111
-29	-0.0020779828495	0.0020779828495	0.0147525280146	-0.0147525280146
-30	-0.00184157136867	0.00184157136867	-0.00664674784976	0.00664674784975
-31	-0.0018415799055	0.00184157990549	-0.00664671809202	0.00664671809201
-32	0.0041851833166	-0.0041851833166	0.0339389281678	-0.0339389281679
-33	-0.00592375365141	0.00592375365141	-0.013380052398	0.0133800523981
-34	-0.00207798401128	0.00207798401127	0.0147525387324	-0.0147525387324
-35	-0.00592374684747	0.00592374684747	-0.0133800953506	0.0133800953506
-36	-0.00130125322065	0.00130125322063	-0.0272678229064	0.0272678229064
-37	-0.00186126364156	0.00186126364158	0.00430983712957	-0.00430983712957
-38	-0.0059237521318	0.00592375213181	-0.013380153675	0.013380153675
-39	-0.00186126331287	0.00186126331284	0.00430983539031	-0.00430983539032
-40	-0.00204201942989	0.00204201942987	0.00946167601189	-0.00946167601188
-41	0.00149334720855	-0.00149334720853	0.0203666076652	-0.0203666076652
-42	0.00149334486802	-0.00149334486802	0.0203666572745	-0.0203666572745
-43	-0.00186125654471	0.00186125654469	0.00430985522631	-0.00430985522632
-44	0.00888546279299	-0.00888546279299	0.0107444363185	-0.0107444363185
-45	0.00518271680498	-0.005182716805	-0.0549225278324	0.0549225278324
-46	0.00127661752013	-0.00127661752012	0.0308093216206	-0.0308093216206
-47	-0.00184157347647	0.00184157347649	-0.00664647143535	0.00664647143536
-48	-0.00207797495723	0.00207797495723	0.0147525398855	-0.0147525398855
-49	-0.00204202136105	0.00204202136103	0.00946156002567	-0.00946156002569
-50	-0.00207797529532	0.00207797529531	0.0147525447432	-0.0147525447431
-51	0.000173374914166	-0.000173374914135	0.0183900180943	-0.0183900180943
-52	0.000334232451934	-0.000334232451922	-0.0231810555233	0.0231810555233
-53	0.000845589590767	-0.000845589590778	0.0130254684027	-0.0130254684028
-54	-0.00050620465143	0.000506204651423	-0.00673528987195	0.00673528987195
-55	0.000459214630557	-0.000459214630566	-0.0149813117293	0.0149813117293
-56	-0.000615457853335	0.000615457853327	0.0299232782723	-0.0299232782723
-57	0.000879971359249	-0.00087997135925	-0.0255719364993	0.0255719364993
-58	0.00260036335542	-0.0026003633554	-0.0195491080562	0.0195491080562
-59	-0.000490443228571	0.000490443228557	0.0381226217921	-0.0381226217921
-60	0.000342085855682	-0.000342085855703	-0.0444375409715	0.0444375409715
-61	0.000316768459576	-0.000316768459588	0.0237688603721	-0.0237688603721
-62	0.00016878609145	-0.000168786091454	-0.0447717783554	0.0447717783554
-63	-0.00277577058767	0.00277577058768	0.0897601662258	-0.0897601662258
-64	0.000173360052024	-0.000173360052007	0.0183900476597	-0.0183900476597
-65	0.00165683452716	-0.00165683452713	-0.0548302244871	0.0548302244871
-66	-0.000187042401834	0.000187042401845	-0.00623951407567	0.00623951407569
-67	0.000334221183017	-0.000334221183032	-0.0231809127501	0.0231809127501
-68	-0.00296633524758	0.0029663352476	0.0976503457326	-0.0976503457326
-69	0.000334209374616	-0.000334209374652	-0.0231808821748	0.0231808821748
-70	-0.00157894710211	0.0015789471021	0.0483095137533	-0.0483095137533
-71	0.00107261772161	-0.0010726177216	-0.038389379181	0.038389379181
-72	-0.000506194774864	0.000506194774861	-0.00673520248594	0.00673520248594
-73	0.000845585488371	-0.000845585488368	0.0130257485428	-0.0130257485428
-74	-0.00099649083861	0.000996490838618	0.0784164003274	-0.0784164003274
-75	-0.000780572502124	0.000780572502132	0.015267209426	-0.015267209426
-76	-0.000187049589314	0.000187049589319	-0.0062394904277	0.0062394904277
-77	0.000303631239276	-0.000303631239272	0.0278134201998	-0.0278134201999
-78	0.00138186467032	-0.0013818646703	-0.0107230643199	0.0107230643199
-79	0.000334201978329	-0.00033420197834	-0.0231807067633	0.0231807067632
-80	0.000316783844032	-0.000316783844047	0.0237685795838	-0.0237685795838
-81	-0.00111921630232	0.00111921630232	0.0375207814487	-0.0375207814487
-82	-0.0013854108222	0.00138541082219	0.0569766380778	-0.0569766380778
-83	-0.000827746868009	0.000827746867994	0.0155883232139	-0.0155883232139
-84	0.0013709210119	-0.00137092101189	0.0122186887524	-0.0122186887524
-85	0.000334212989212	-0.000334212989235	-0.0231806937032	0.0231806937032
-86	0.000622483770067	-0.000622483770066	-0.0426415468143	0.0426415468143
-87	0.000591717557469	-0.000591717557463	-0.00611123603734	0.00611123603733
-88	-0.000718358722683	0.000718358722686	0.0223631657984	-0.0223631657984
-89	-0.000696756836574	0.000696756836581	0.00115505660589	-0.00115505660589
-90	-0.000506204477782	0.000506204477782	-0.00673516408525	0.00673516408525
-91	-0.00135690159436	0.00135690159436	0.0537867933428	-0.0537867933428
-92	4.08718730297e-05	-4.08718730283e-05	0.00951997394088	-0.00951997394087
-93	-0.00114474880537	0.00114474880538	0.0246887231278	-0.0246887231278
-94	0.00260039073663	-0.00260039073661	-0.0195489870951	0.0195489870951
-95	-0.000780885311649	0.000780885311643	0.00833259826846	-0.00833259826847
-96	-0.00141938985277	0.00141938985275	0.0397560748719	-0.0397560748719
-97	-0.000780881620895	0.000780881620912	0.00833252194557	-0.00833252194556
-98	-0.000780566605387	0.000780566605416	0.0152673550777	-0.0152673550777
-99	0.00760602648383	-0.00760602648382	-0.129536312408	0.129536312408
-100	-0.000696760328124	0.000696760328164	0.00115497725961	-0.00115497725962
-101	-0.00678600258327	0.00678600258326	-0.0465649501276	0.0465649501276
-102	0.00111985118299	-0.00111985118299	-0.0133311713893	0.0133311713893
-103	0.00179128299969	-0.00179128299967	-0.0117629965412	0.0117629965413
-104	0.00245919021666	-0.00245919021666	0.00824989787101	-0.00824989787102
-105	-8.76492503355e-05	8.76492503354e-05	-0.000496254333627	0.000496254333633
-106	0.000761967432587	-0.000761967432566	-7.83367381654e-05	7.83367381754e-05
-107	0.000752341381348	-0.00075234138135	-0.0565467602809	0.0565467602809
-108	0.00493978422624	-0.00493978422623	-0.0423953299002	0.0423953299002
-109	0.00324788826973	-0.00324788826971	-0.00283722024642	0.00283722024643
-110	-0.00637676293374	0.00637676293375	-0.0562338429038	0.0562338429038
-111	0.000503159820755	-0.000503159820738	-0.0127616264245	0.0127616264245
-112	0.00140440261799	-0.00140440261802	0.000816319075632	-0.000816319075634
-113	0.000240534267342	-0.000240534267329	0.00957268947576	-0.00957268947577
-114	0.000384206839893	-0.000384206839888	-0.0217977011189	0.0217977011189
-115	-0.00634724290611	0.0063472429061	-0.0162945308629	0.016294530863
-116	-0.00354841350688	0.0035484135069	0.00558402036707	-0.00558402036708
-117	0.00215492172491	-0.00215492172492	0.00963144402194	-0.00963144402194
-118	-0.00275396275316	0.00275396275316	0.044837028746	-0.044837028746
-119	-0.0119814923701	0.01198149237	0.193515094085	-0.193515094085
-120	0.000963734153542	-0.00096373415354	0.0226428351944	-0.0226428351944
-121	-0.00244037523199	0.00244037523198	0.00546591341746	-0.00546591341744
-122	0.000266029608572	-0.00026602960856	-0.0314149983787	0.0314149983787
-123	-0.000348614897638	0.000348614897651	0.0397566415853	-0.0397566415853
-124	0.00119624036114	-0.00119624036116	-0.0286752098822	0.0286752098822
-125	0.000911513554691	-0.000911513554674	0.0039503682424	-0.00395036824241
-126	0.00416496074481	-0.00416496074481	-0.021645721723	0.021645721723
-127	0.00107259886096	-0.00107259886096	-0.0383893990267	0.0383893990268
-128	0.00119624052347	-0.00119624052348	-0.0286751556142	0.0286751556142
-129	0.00054481896186	-0.000544818961844	0.00819147865674	-0.00819147865675
-130	0.00318546200452	-0.00318546200451	0.0194033902589	-0.0194033902589
-131	0.00426070161194	-0.00426070161192	-0.0346242888528	0.0346242888528
-132	0.00376292316169	-0.00376292316171	-0.0418071757284	0.0418071757283
-133	-0.00087631493066	0.000876314930647	0.0105907484353	-0.0105907484353
-134	0.0010826505927	-0.00108265059272	0.0316795216603	-0.0316795216603
-135	0.00169020541014	-0.00169020541014	0.0744521027106	-0.0744521027107
-136	-0.000692087598833	0.000692087598854	-0.0298567664795	0.0298567664795
-137	-0.00532133931133	0.00532133931132	0.00197677995068	-0.00197677995068
-138	0.00215491723505	-0.00215491723504	0.00963119640396	-0.00963119640397
-139	0.00107260919521	-0.00107260919522	-0.0383891865417	0.0383891865417
-140	-3.59016028262e-06	3.59016027307e-06	0.00811701973019	-0.00811701973017
-141	-0.00532134139947	0.00532134139946	0.00197683432762	-0.00197683432763
-142	-0.00383298415331	0.00383298415332	-0.00856391328718	0.00856391328718
-143	0.00111984317293	-0.00111984317294	-0.0133312669044	0.0133312669044
-144	-0.0015606294529	0.00156062945289	-0.0102470797367	0.0102470797367
-145	-0.00812507234928	0.00812507234927	-0.0209114480533	0.0209114480533
-146	-0.00370288186472	0.00370288186472	-0.000652009420983	0.000652009420984
-147	0.0010009233634	-0.00100092336337	-0.0223679412346	0.0223679412346
-148	0.00063323482203	-0.000633234822026	-0.0048486309278	0.00484863092779
-149	-0.00335548443538	0.00335548443542	0.00963297901935	-0.00963297901936
-150	0.00143331899859	-0.00143331899856	-0.0100217011426	0.0100217011426
+1	-0.002078	0.002078	0.01475	-0.01475
+2	-0.002078	0.002078	0.01475	-0.01475
+3	-0.001861	0.001861	0.004309	-0.004309
+4	-0.002042	0.002042	0.009462	-0.009462
+5	-0.002078	0.002078	0.01475	-0.01475
+6	0.004667	-0.004667	-0.004736	0.004736
+7	0.001277	-0.001277	0.03081	-0.03081
+8	-0.002042	0.002042	0.009462	-0.009462
+9	-0.002078	0.002078	0.01475	-0.01475
+10	-0.005924	0.005924	-0.01338	0.01338
+11	-0.002042	0.002042	0.009462	-0.009462
+12	-0.001842	0.001842	-0.006647	0.006647
+13	-0.00596	0.00596	-0.008089	0.008089
+14	-0.004189	0.004189	-0.1086	0.1086
+15	-0.001301	0.001301	-0.02727	0.02727
+16	0.004185	-0.004185	0.03394	-0.03394
+17	0.004366	-0.004366	0.02879	-0.02879
+18	0.001277	-0.001277	0.03081	-0.03081
+19	0.001795	-0.001795	-0.01316	0.01316
+20	0.001313	-0.001313	0.02552	-0.02552
+21	-0.00156	0.00156	-0.02921	0.02921
+22	0.004185	-0.004185	0.03394	-0.03394
+23	0.001206	-0.001206	-0.1772	0.1772
+24	0.007108	-0.007108	-0.004324	0.004324
+25	-0.001044	0.001044	-0.0794	0.0794
+26	-0.001842	0.001842	-0.006647	0.006647
+27	0.004386	-0.004386	0.01783	-0.01783
+28	-0.002042	0.002042	0.009462	-0.009462
+29	-0.002078	0.002078	0.01475	-0.01475
+30	-0.001842	0.001842	-0.006647	0.006647
+31	-0.001842	0.001842	-0.006647	0.006647
+32	0.004185	-0.004185	0.03394	-0.03394
+33	-0.005924	0.005924	-0.01338	0.01338
+34	-0.002078	0.002078	0.01475	-0.01475
+35	-0.005924	0.005924	-0.01338	0.01338
+36	-0.001301	0.001301	-0.02727	0.02727
+37	-0.001861	0.001861	0.00431	-0.00431
+38	-0.005924	0.005924	-0.01338	0.01338
+39	-0.001861	0.001861	0.00431	-0.00431
+40	-0.002042	0.002042	0.009462	-0.009462
+41	0.001493	-0.001493	0.02037	-0.02037
+42	0.001493	-0.001493	0.02037	-0.02037
+43	-0.001861	0.001861	0.00431	-0.00431
+44	0.008885	-0.008885	0.01074	-0.01074
+45	0.005183	-0.005183	-0.05492	0.05492
+46	0.001277	-0.001277	0.03081	-0.03081
+47	-0.001842	0.001842	-0.006646	0.006646
+48	-0.002078	0.002078	0.01475	-0.01475
+49	-0.002042	0.002042	0.009462	-0.009462
+50	-0.002078	0.002078	0.01475	-0.01475
+51	0.0001734	-0.0001734	0.01839	-0.01839
+52	0.0003342	-0.0003342	-0.02318	0.02318
+53	0.0008456	-0.0008456	0.01303	-0.01303
+54	-0.0005062	0.0005062	-0.006735	0.006735
+55	0.0004592	-0.0004592	-0.01498	0.01498
+56	-0.0006155	0.0006155	0.02992	-0.02992
+57	0.00088	-0.00088	-0.02557	0.02557
+58	0.0026	-0.0026	-0.01955	0.01955
+59	-0.0004904	0.0004904	0.03812	-0.03812
+60	0.0003421	-0.0003421	-0.04444	0.04444
+61	0.0003168	-0.0003168	0.02377	-0.02377
+62	0.0001688	-0.0001688	-0.04477	0.04477
+63	-0.002776	0.002776	0.08976	-0.08976
+64	0.0001734	-0.0001734	0.01839	-0.01839
+65	0.001657	-0.001657	-0.05483	0.05483
+66	-0.000187	0.000187	-0.00624	0.00624
+67	0.0003342	-0.0003342	-0.02318	0.02318
+68	-0.002966	0.002966	0.09765	-0.09765
+69	0.0003342	-0.0003342	-0.02318	0.02318
+70	-0.001579	0.001579	0.04831	-0.04831
+71	0.001073	-0.001073	-0.03839	0.03839
+72	-0.0005062	0.0005062	-0.006735	0.006735
+73	0.0008456	-0.0008456	0.01303	-0.01303
+74	-0.0009965	0.0009965	0.07842	-0.07842
+75	-0.0007806	0.0007806	0.01527	-0.01527
+76	-0.000187	0.000187	-0.006239	0.006239
+77	0.0003036	-0.0003036	0.02781	-0.02781
+78	0.001382	-0.001382	-0.01072	0.01072
+79	0.0003342	-0.0003342	-0.02318	0.02318
+80	0.0003168	-0.0003168	0.02377	-0.02377
+81	-0.001119	0.001119	0.03752	-0.03752
+82	-0.001385	0.001385	0.05698	-0.05698
+83	-0.0008277	0.0008277	0.01559	-0.01559
+84	0.001371	-0.001371	0.01222	-0.01222
+85	0.0003342	-0.0003342	-0.02318	0.02318
+86	0.0006225	-0.0006225	-0.04264	0.04264
+87	0.0005917	-0.0005917	-0.006111	0.006111
+88	-0.0007184	0.0007184	0.02236	-0.02236
+89	-0.0006968	0.0006968	0.001155	-0.001155
+90	-0.0005062	0.0005062	-0.006735	0.006735
+91	-0.001357	0.001357	0.05379	-0.05379
+92	4.087e-05	-4.087e-05	0.00952	-0.00952
+93	-0.001145	0.001145	0.02469	-0.02469
+94	0.0026	-0.0026	-0.01955	0.01955
+95	-0.0007809	0.0007809	0.008333	-0.008333
+96	-0.001419	0.001419	0.03976	-0.03976
+97	-0.0007809	0.0007809	0.008333	-0.008333
+98	-0.0007806	0.0007806	0.01527	-0.01527
+99	0.007606	-0.007606	-0.1295	0.1295
+100	-0.0006968	0.0006968	0.001155	-0.001155
+101	-0.006786	0.006786	-0.04656	0.04656
+102	0.00112	-0.00112	-0.01333	0.01333
+103	0.001791	-0.001791	-0.01176	0.01176
+104	0.002459	-0.002459	0.00825	-0.00825
+105	-8.765e-05	8.765e-05	-0.0004963	0.0004963
+106	0.000762	-0.000762	-7.834e-05	7.834e-05
+107	0.0007523	-0.0007523	-0.05655	0.05655
+108	0.00494	-0.00494	-0.0424	0.0424
+109	0.003248	-0.003248	-0.002837	0.002837
+110	-0.006377	0.006377	-0.05623	0.05623
+111	0.0005032	-0.0005032	-0.01276	0.01276
+112	0.001404	-0.001404	0.0008163	-0.0008163
+113	0.0002405	-0.0002405	0.009573	-0.009573
+114	0.0003842	-0.0003842	-0.0218	0.0218
+115	-0.006347	0.006347	-0.01629	0.01629
+116	-0.003548	0.003548	0.005584	-0.005584
+117	0.002155	-0.002155	0.009631	-0.009631
+118	-0.002754	0.002754	0.04484	-0.04484
+119	-0.01198	0.01198	0.1935	-0.1935
+120	0.0009637	-0.0009637	0.02264	-0.02264
+121	-0.00244	0.00244	0.005466	-0.005466
+122	0.000266	-0.000266	-0.03141	0.03141
+123	-0.0003486	0.0003486	0.03976	-0.03976
+124	0.001196	-0.001196	-0.02868	0.02868
+125	0.0009115	-0.0009115	0.00395	-0.00395
+126	0.004165	-0.004165	-0.02165	0.02165
+127	0.001073	-0.001073	-0.03839	0.03839
+128	0.001196	-0.001196	-0.02868	0.02868
+129	0.0005448	-0.0005448	0.008191	-0.008191
+130	0.003185	-0.003185	0.0194	-0.0194
+131	0.004261	-0.004261	-0.03462	0.03462
+132	0.003763	-0.003763	-0.04181	0.04181
+133	-0.0008763	0.0008763	0.01059	-0.01059
+134	0.001083	-0.001083	0.03168	-0.03168
+135	0.00169	-0.00169	0.07445	-0.07445
+136	-0.0006921	0.0006921	-0.02986	0.02986
+137	-0.005321	0.005321	0.001977	-0.001977
+138	0.002155	-0.002155	0.009631	-0.009631
+139	0.001073	-0.001073	-0.03839	0.03839
+140	-3.59e-06	3.59e-06	0.008117	-0.008117
+141	-0.005321	0.005321	0.001977	-0.001977
+142	-0.003833	0.003833	-0.008564	0.008564
+143	0.00112	-0.00112	-0.01333	0.01333
+144	-0.001561	0.001561	-0.01025	0.01025
+145	-0.008125	0.008125	-0.02091	0.02091
+146	-0.003703	0.003703	-0.000652	0.000652
+147	0.001001	-0.001001	-0.02237	0.02237
+148	0.0006332	-0.0006332	-0.004849	0.004849
+149	-0.003355	0.003355	0.009633	-0.009633
+150	0.001433	-0.001433	-0.01002	0.01002
diff -r 07a608852925 -r 26c40d8e8fdc test-data/kcca_out2.tabular
--- a/test-data/kcca_out2.tabular	Wed Mar 10 16:11:59 2010 -0500
+++ b/test-data/kcca_out2.tabular	Wed Mar 10 16:23:53 2010 -0500
@@ -1,304 +1,304 @@
 #Component	1	2
-#Correlation	-0.997722247293	0.997722247293
+#Correlation	-0.9977	0.9977
 #Estimated X-coefficients	1	2
-1	-0.00122731840743	0.00122731840742
-2	-0.00122731106403	0.00122731106387
-3	0.00166794295518	-0.00166794295525
-4	-0.00173362374287	0.00173362374291
-5	-0.00122731862766	0.00122731862769
-6	-0.0029554785628	0.00295547856271
-7	-0.00461498736385	0.00461498736384
-8	-0.00173362629843	0.00173362629836
-9	-0.00122732331673	0.00122732331672
-10	0.00688097994625	-0.00688097994624
-11	-0.00173362826184	0.00173362826186
-12	-8.37004866976e-05	8.37004867417e-05
-13	0.00635554576927	-0.00635554576925
-14	0.00873026392029	-0.00873026392028
-15	0.00652732389235	-0.00652732389232
-16	-0.00603047472024	0.00603047472021
-17	0.00873631760763	-0.00873631760765
-18	-0.00461499433646	0.00461499433645
-19	-0.00219576033584	0.00219576033585
-20	-0.00691698230553	0.00691698230556
-21	0.00292177636345	-0.00292177636347
-22	-0.00603047506996	0.00603047506995
-23	0.0172373160862	-0.0172373160862
-24	0.00249397988781	-0.00249397988776
-25	0.00798852789597	-0.00798852789598
-26	-8.37043942033e-05	8.37043942473e-05
-27	-0.00630705042003	0.00630705042003
-28	-0.00173363075689	0.0017336307569
-29	-0.00122732018654	0.00122732018653
-30	-8.3706566709e-05	8.37065667174e-05
-31	-8.37037029634e-05	8.37037029813e-05
-32	-0.00603046924565	0.00603046924565
-33	0.00688097728833	-0.00688097728833
-34	-0.00122732731419	0.00122732731425
-35	0.00688098087012	-0.00688098087015
-36	0.00652731714093	-0.00652731714093
-37	0.00166793992756	-0.00166793992754
-38	0.00688097839506	-0.00688097839508
-39	0.00166794141434	-0.00166794141431
-40	-0.00173363131319	0.00173363131319
-41	0.0014004109735	-0.00140041097347
-42	0.00140041664328	-0.00140041664323
-43	0.00166794191139	-0.00166794191136
-44	0.0155877231838	-0.0155877231838
-45	0.00840352861092	-0.0084035286109
-46	-0.0046149948871	0.00461499488714
-47	-8.37036024121e-05	8.37036023937e-05
-48	-0.00122732358385	0.00122732358381
-49	-0.00173363170928	0.00173363170926
-50	-0.00122732256951	0.0012273225695
-51	0.000660960845406	-0.000660960845404
-52	-0.00213814090606	0.00213814090606
-53	-0.00172474025037	0.00172474025036
-54	-1.15192134321e-05	1.15192134284e-05
-55	-0.00270262833397	0.00270262833397
-56	0.00215511291935	-0.00215511291935
-57	-0.00300187218663	0.00300187218663
-58	0.00185267145989	-0.0018526714599
-59	0.00472457250519	-0.00472457250517
-60	0.00384665232521	-0.0038466523252
-61	-0.00310019835939	0.0031001983594
-62	0.00258744962442	-0.00258744962443
-63	0.00641438900071	-0.00641438900071
-64	0.00066096075849	-0.000660960758496
-65	0.00671559094757	-0.00671559094758
-66	-0.00232160450111	0.00232160450112
-67	-0.00213814191545	0.00213814191544
-68	0.0114627849428	-0.0114627849428
-69	-0.0021381429419	0.00213814294188
-70	0.000641012727381	-0.00064101272739
-71	0.00112493603698	-0.00112493603698
-72	-1.15272295775e-05	1.15272295687e-05
-73	-0.00172473744183	0.00172473744183
-74	0.018276550116	-0.018276550116
-75	-0.00125666892339	0.00125666892339
-76	-0.00232160381613	0.00232160381614
-77	0.00219982317236	-0.00219982317237
-78	-0.00586026163233	0.00586026163236
-79	-0.00213814041066	0.00213814041065
-80	-0.0031001930441	0.00310019304412
-81	0.00032247134769	-0.000322471347699
-82	-0.00253960255175	0.00253960255175
-83	0.000580298776161	-0.000580298776172
-84	-0.00339887362339	0.00339887362339
-85	-0.00213814304507	0.00213814304505
-86	0.00141644720267	-0.00141644720266
-87	-0.00275665838117	0.00275665838116
-88	7.299837356e-05	-7.29983735588e-05
-89	-0.00122133112354	0.00122133112352
-90	-1.1520732577e-05	1.15207325665e-05
-91	0.00623668821962	-0.00623668821961
-92	-0.000750488786068	0.000750488786081
-93	-0.000134743806793	0.000134743806781
-94	0.00185266774503	-0.00185266774504
-95	-0.00168681443561	0.00168681443562
-96	0.00104551800392	-0.00104551800392
-97	-0.00168681378149	0.00168681378151
-98	-0.00125666731119	0.0012566673112
-99	0.0273373188851	-0.0273373188851
-100	-0.00122132449275	0.00122132449276
-101	0.0164080376037	-0.0164080376037
-102	-0.00584347307691	0.00584347307691
-103	-0.0175645745896	0.0175645745896
-104	-0.000812522970572	0.000812522970572
-105	-0.00635860059631	0.0063586005963
-106	0.00275366392106	-0.00275366392106
-107	0.00954602258661	-0.00954602258661
-108	0.0155278096117	-0.0155278096117
-109	-0.00306825657773	0.00306825657775
-110	0.0112597051463	-0.0112597051463
-111	-0.00398796328134	0.00398796328133
-112	-0.00441274112723	0.00441274112722
-113	-0.00171496236972	0.00171496236972
-114	0.000877789735939	-0.000877789735948
-115	0.0273970544333	-0.0273970544333
-116	-0.00131825030571	0.0013182503057
-117	-0.000930125218004	0.000930125218002
-118	0.0155089924497	-0.0155089924497
-119	0.0118140403622	-0.0118140403622
-120	-0.000801552123165	0.00080155212317
-121	0.00284932184324	-0.00284932184324
-122	0.00962061241972	-0.00962061241972
-123	0.0158935928536	-0.0158935928536
-124	-0.0032912758955	0.00329127589549
-125	-0.00449164711626	0.00449164711627
-126	-0.00365891492729	0.0036589149273
-127	0.00112493995903	-0.00112493995902
-128	-0.0032912756797	0.00329127567971
-129	-0.00180788589755	0.00180788589754
-130	0.0223816193407	-0.0223816193407
-131	-0.0186465081581	0.0186465081581
-132	-0.0165897014	0.0165897014
-133	-9.15600724877e-05	9.15600724931e-05
-134	0.000396631463788	-0.000396631463796
-135	0.0366828617501	-0.0366828617501
-136	-0.0207881503725	0.0207881503725
-137	0.00716867549967	-0.00716867549967
-138	-0.000930124083137	0.000930124083139
-139	0.00112493800574	-0.00112493800573
-140	-0.00317568145256	0.00317568145257
-141	0.00716867883327	-0.00716867883327
-142	0.0151224437075	-0.0151224437075
-143	-0.00584347370931	0.00584347370932
-144	-0.00593952985175	0.00593952985175
-145	0.0182854382585	-0.0182854382585
-146	0.00354375934258	-0.00354375934259
-147	-0.00361285051891	0.00361285051892
-148	-0.0055083733504	0.00550837335042
-149	-0.00145063403334	0.00145063403336
-150	-0.00625027498836	0.00625027498837
+1	-0.001227	0.001227
+2	-0.001227	0.001227
+3	0.001668	-0.001668
+4	-0.001734	0.001734
+5	-0.001227	0.001227
+6	-0.002955	0.002955
+7	-0.004615	0.004615
+8	-0.001734	0.001734
+9	-0.001227	0.001227
+10	0.006881	-0.006881
+11	-0.001734	0.001734
+12	-8.37e-05	8.37e-05
+13	0.006356	-0.006356
+14	0.00873	-0.00873
+15	0.006527	-0.006527
+16	-0.00603	0.00603
+17	0.008736	-0.008736
+18	-0.004615	0.004615
+19	-0.002196	0.002196
+20	-0.006917	0.006917
+21	0.002922	-0.002922
+22	-0.00603	0.00603
+23	0.01724	-0.01724
+24	0.002494	-0.002494
+25	0.007989	-0.007989
+26	-8.37e-05	8.37e-05
+27	-0.006307	0.006307
+28	-0.001734	0.001734
+29	-0.001227	0.001227
+30	-8.371e-05	8.371e-05
+31	-8.37e-05	8.37e-05
+32	-0.00603	0.00603
+33	0.006881	-0.006881
+34	-0.001227	0.001227
+35	0.006881	-0.006881
+36	0.006527	-0.006527
+37	0.001668	-0.001668
+38	0.006881	-0.006881
+39	0.001668	-0.001668
+40	-0.001734	0.001734
+41	0.0014	-0.0014
+42	0.0014	-0.0014
+43	0.001668	-0.001668
+44	0.01559	-0.01559
+45	0.008404	-0.008404
+46	-0.004615	0.004615
+47	-8.37e-05	8.37e-05
+48	-0.001227	0.001227
+49	-0.001734	0.001734
+50	-0.001227	0.001227
+51	0.000661	-0.000661
+52	-0.002138	0.002138
+53	-0.001725	0.001725
+54	-1.152e-05	1.152e-05
+55	-0.002703	0.002703
+56	0.002155	-0.002155
+57	-0.003002	0.003002
+58	0.001853	-0.001853
+59	0.004725	-0.004725
+60	0.003847	-0.003847
+61	-0.0031	0.0031
+62	0.002587	-0.002587
+63	0.006414	-0.006414
+64	0.000661	-0.000661
+65	0.006716	-0.006716
+66	-0.002322	0.002322
+67	-0.002138	0.002138
+68	0.01146	-0.01146
+69	-0.002138	0.002138
+70	0.000641	-0.000641
+71	0.001125	-0.001125
+72	-1.153e-05	1.153e-05
+73	-0.001725	0.001725
+74	0.01828	-0.01828
+75	-0.001257	0.001257
+76	-0.002322	0.002322
+77	0.0022	-0.0022
+78	-0.00586	0.00586
+79	-0.002138	0.002138
+80	-0.0031	0.0031
+81	0.0003225	-0.0003225
+82	-0.00254	0.00254
+83	0.0005803	-0.0005803
+84	-0.003399	0.003399
+85	-0.002138	0.002138
+86	0.001416	-0.001416
+87	-0.002757	0.002757
+88	7.3e-05	-7.3e-05
+89	-0.001221	0.001221
+90	-1.152e-05	1.152e-05
+91	0.006237	-0.006237
+92	-0.0007505	0.0007505
+93	-0.0001347	0.0001347
+94	0.001853	-0.001853
+95	-0.001687	0.001687
+96	0.001046	-0.001046
+97	-0.001687	0.001687
+98	-0.001257	0.001257
+99	0.02734	-0.02734
+100	-0.001221	0.001221
+101	0.01641	-0.01641
+102	-0.005843	0.005843
+103	-0.01756	0.01756
+104	-0.0008125	0.0008125
+105	-0.006359	0.006359
+106	0.002754	-0.002754
+107	0.009546	-0.009546
+108	0.01553	-0.01553
+109	-0.003068	0.003068
+110	0.01126	-0.01126
+111	-0.003988	0.003988
+112	-0.004413	0.004413
+113	-0.001715	0.001715
+114	0.0008778	-0.0008778
+115	0.0274	-0.0274
+116	-0.001318	0.001318
+117	-0.0009301	0.0009301
+118	0.01551	-0.01551
+119	0.01181	-0.01181
+120	-0.0008016	0.0008016
+121	0.002849	-0.002849
+122	0.009621	-0.009621
+123	0.01589	-0.01589
+124	-0.003291	0.003291
+125	-0.004492	0.004492
+126	-0.003659	0.003659
+127	0.001125	-0.001125
+128	-0.003291	0.003291
+129	-0.001808	0.001808
+130	0.02238	-0.02238
+131	-0.01865	0.01865
+132	-0.01659	0.01659
+133	-9.156e-05	9.156e-05
+134	0.0003966	-0.0003966
+135	0.03668	-0.03668
+136	-0.02079	0.02079
+137	0.007169	-0.007169
+138	-0.0009301	0.0009301
+139	0.001125	-0.001125
+140	-0.003176	0.003176
+141	0.007169	-0.007169
+142	0.01512	-0.01512
+143	-0.005843	0.005843
+144	-0.00594	0.00594
+145	0.01829	-0.01829
+146	0.003544	-0.003544
+147	-0.003613	0.003613
+148	-0.005508	0.005508
+149	-0.001451	0.001451
+150	-0.00625	0.00625
 #Estimated Y-coefficients	1	2
-1	0.000952236730115	0.000952236730136
-2	0.00753180911842	0.00753180911838
-3	-0.00516801028579	-0.00516801028581
-4	-0.00882498990477	-0.00882498990477
-5	-0.00140542873921	-0.00140542873922
-6	0.00330266798272	0.00330266798272
-7	-0.0081387911267	-0.00813879112671
-8	0.00152443564077	0.00152443564076
-9	-0.00771870201025	-0.00771870201025
-10	0.00547713010073	0.00547713010074
-11	0.003270659849	0.003270659849
-12	-0.00306812817496	-0.00306812817497
-13	0.00445886261671	0.00445886261673
-14	-0.00353335505412	-0.00353335505412
-15	0.0085173158937	0.0085173158937
-16	-0.0383961776234	-0.0383961776234
-17	0.00330267922582	0.00330267922583
-18	0.000952240516451	0.000952240516456
-19	0.00818906645105	0.00818906645104
-20	-0.00309614228051	-0.00309614228051
-21	-0.000971621477879	-0.000971621477885
-22	-0.00165164343654	-0.00165164343652
-23	0.00604384541776	0.00604384541777
-24	0.00341647364957	0.00341647364958
-25	-0.00306812439373	-0.00306812439374
-26	0.00830847900819	0.00830847900821
-27	0.00152443887844	0.00152443887844
-28	0.00132401884933	0.00132401884932
-29	0.00179979352006	0.00179979352008
-30	-0.00516800670429	-0.0051680067043
-31	0.00208562867941	0.00208562867942
-32	-0.000971619066246	-0.000971619066261
-33	-0.0113128074854	-0.0113128074854
-34	-0.00870056701442	-0.0087005670144
-35	0.00547713027423	0.00547713027424
-36	0.00495542862463	0.00495542862463
-37	-0.000337423424473	-0.000337423424464
-38	0.00547712655395	0.00547712655396
-39	-0.010973258652	-0.010973258652
-40	0.0022008011203	0.0022008011203
-41	-5.46008492518e-05	-5.46008492437e-05
-42	-0.0234396971996	-0.0234396971996
-43	-0.0134511606022	-0.0134511606023
-44	-5.45996121933e-05	-5.45996122011e-05
-45	-0.00309614158163	-0.00309614158163
-46	0.00445886019725	0.00445886019722
-47	-0.00309614420386	-0.00309614420386
-48	-0.0104579849693	-0.0104579849693
-49	0.00168842769973	0.00168842769972
-50	0.00322357131135	0.00322357131133
-51	0.00202630582019	0.00202630582019
-52	-0.000691700383038	-0.000691700383018
-53	-0.00104045859173	-0.00104045859172
-54	-0.00344346850677	-0.00344346850675
-55	0.000998125320535	0.000998125320523
-56	-0.00199381905541	-0.0019938190554
-57	-0.00106665569072	-0.00106665569075
-58	0.00650984277058	0.00650984277059
-59	-0.00127555360175	-0.00127555360178
-60	0.0074404524885	0.00744045248845
-61	-0.0317908788312	-0.0317908788312
-62	-0.0039999913858	-0.00399999138579
-63	-0.0115243240423	-0.0115243240423
-64	0.00176831166006	0.00176831166007
-65	-0.00440085578754	-0.00440085578751
-66	-0.00192225561804	-0.00192225561805
-67	-0.00603442682678	-0.00603442682675
-68	0.00152674197776	0.00152674197778
-69	-0.0154861222259	-0.0154861222259
-70	0.00112666408174	0.00112666408172
-71	-0.0063441217448	-0.0063441217448
-72	0.00416182160539	0.0041618216054
-73	0.00285475184215	0.00285475184214
-74	0.00416181855219	0.00416181855219
-75	0.00220354621189	0.00220354621189
-76	-0.00115406490584	-0.00115406490584
-77	-0.0083901924992	-0.00839019249917
-78	-0.00238535825306	-0.0023853582531
-79	0.000206755320136	0.000206755320148
-80	0.00158982739788	0.00158982739788
-81	-1.37720111221e-05	-1.37720111046e-05
-82	-1.3770694678e-05	-1.37706946744e-05
-83	0.00152674594702	0.00152674594698
-84	0.00488108634371	0.00488108634369
-85	-0.00220091184671	-0.0022009118467
-86	-0.00318012733342	-0.0031801273334
-87	-0.00192225857826	-0.00192225857825
-88	-0.00911898267779	-0.00911898267778
-89	-0.00603441933504	-0.00603441933506
-90	0.00142092462435	0.00142092462434
-91	0.00128331390667	0.00128331390668
-92	-0.00057432105445	-0.000574321054431
-93	0.00290994036196	0.00290994036199
-94	0.00227437971826	0.00227437971826
-95	-0.000409584211176	-0.000409584211157
-96	-0.0062564186313	-0.00625641863131
-97	-0.00430727169536	-0.00430727169537
-98	0.00274042717244	0.00274042717247
-99	0.00952367801675	0.00952367801678
-100	-0.00199382288663	-0.00199382288664
-101	-0.00106665068897	-0.00106665068897
-102	0.00152674543824	0.00152674543823
-103	0.00174787415807	0.00174787415805
-104	0.00290750496381	0.00290750496381
-105	9.66708166621e-05	9.6670816661e-05
-106	-0.00101935419571	-0.00101935419568
-107	0.0100871271044	0.0100871271044
-108	0.00352958176992	0.00352958176992
-109	-0.019066105421	-0.0190661054211
-110	0.00240693112447	0.00240693112447
-111	-0.00090513595859	-0.00090513595857
-112	0.0035205054436	0.0035205054436
-113	-0.00310599240981	-0.00310599240982
-114	0.00169430085813	0.00169430085813
-115	-0.000733363285101	-0.000733363285101
-116	-0.000691698262239	-0.000691698262241
-117	9.66698551435e-05	9.66698551349e-05
-118	0.0171457397785	0.0171457397785
-119	0.00625881933768	0.00625881933768
-120	-0.0115243221165	-0.0115243221165
-121	-0.000457609674661	-0.000457609674665
-122	-0.0023718905732	-0.00237189057318
-123	-0.00299238147421	-0.00299238147421
-124	0.00547391509592	0.00547391509591
-125	-0.00200127266833	-0.00200127266834
-126	0.00971289811654	0.00971289811655
-127	0.00481618871657	0.00481618871656
-128	-0.000574322474196	-0.000574322474171
-129	0.00323239431475	0.00323239431476
-130	0.00567244833234	0.00567244833233
-131	-0.00143721256674	-0.00143721256671
-132	-0.0474284189227	-0.0474284189227
-133	0.00323239591076	0.00323239591076
-134	0.00453050823172	0.00453050823173
-135	0.00662134137135	0.00662134137136
-136	-0.0222253238714	-0.0222253238714
-137	-0.000472180108221	-0.000472180108238
-138	-8.96030271952e-05	-8.96030271771e-05
-139	-0.00223279969811	-0.00223279969812
-140	-0.00104045712364	-0.00104045712363
-141	-0.00192225343433	-0.00192225343433
-142	-0.0010404587415	-0.00104045874147
-143	0.00152674421725	0.00152674421723
-144	-0.00165763647411	-0.00165763647409
-145	-0.00200127181815	-0.00200127181816
-146	-0.00238535855193	-0.00238535855193
-147	0.00285475011985	0.00285475011986
-148	9.66702716109e-05	9.66702716107e-05
-149	-0.00109079514933	-0.00109079514929
-150	-0.00399998975976	-0.00399998975976
+1	0.0009522	0.0009522
+2	0.007532	0.007532
+3	-0.005168	-0.005168
+4	-0.008825	-0.008825
+5	-0.001405	-0.001405
+6	0.003303	0.003303
+7	-0.008139	-0.008139
+8	0.001524	0.001524
+9	-0.007719	-0.007719
+10	0.005477	0.005477
+11	0.003271	0.003271
+12	-0.003068	-0.003068
+13	0.004459	0.004459
+14	-0.003533	-0.003533
+15	0.008517	0.008517
+16	-0.0384	-0.0384
+17	0.003303	0.003303
+18	0.0009522	0.0009522
+19	0.008189	0.008189
+20	-0.003096	-0.003096
+21	-0.0009716	-0.0009716
+22	-0.001652	-0.001652
+23	0.006044	0.006044
+24	0.003416	0.003416
+25	-0.003068	-0.003068
+26	0.008308	0.008308
+27	0.001524	0.001524
+28	0.001324	0.001324
+29	0.0018	0.0018
+30	-0.005168	-0.005168
+31	0.002086	0.002086
+32	-0.0009716	-0.0009716
+33	-0.01131	-0.01131
+34	-0.008701	-0.008701
+35	0.005477	0.005477
+36	0.004955	0.004955
+37	-0.0003374	-0.0003374
+38	0.005477	0.005477
+39	-0.01097	-0.01097
+40	0.002201	0.002201
+41	-5.46e-05	-5.46e-05
+42	-0.02344	-0.02344
+43	-0.01345	-0.01345
+44	-5.46e-05	-5.46e-05
+45	-0.003096	-0.003096
+46	0.004459	0.004459
+47	-0.003096	-0.003096
+48	-0.01046	-0.01046
+49	0.001688	0.001688
+50	0.003224	0.003224
+51	0.002026	0.002026
+52	-0.0006917	-0.0006917
+53	-0.00104	-0.00104
+54	-0.003443	-0.003443
+55	0.0009981	0.0009981
+56	-0.001994	-0.001994
+57	-0.001067	-0.001067
+58	0.00651	0.00651
+59	-0.001276	-0.001276
+60	0.00744	0.00744
+61	-0.03179	-0.03179
+62	-0.004	-0.004
+63	-0.01152	-0.01152
+64	0.001768	0.001768
+65	-0.004401	-0.004401
+66	-0.001922	-0.001922
+67	-0.006034	-0.006034
+68	0.001527	0.001527
+69	-0.01549	-0.01549
+70	0.001127	0.001127
+71	-0.006344	-0.006344
+72	0.004162	0.004162
+73	0.002855	0.002855
+74	0.004162	0.004162
+75	0.002204	0.002204
+76	-0.001154	-0.001154
+77	-0.00839	-0.00839
+78	-0.002385	-0.002385
+79	0.0002068	0.0002068
+80	0.00159	0.00159
+81	-1.377e-05	-1.377e-05
+82	-1.377e-05	-1.377e-05
+83	0.001527	0.001527
+84	0.004881	0.004881
+85	-0.002201	-0.002201
+86	-0.00318	-0.00318
+87	-0.001922	-0.001922
+88	-0.009119	-0.009119
+89	-0.006034	-0.006034
+90	0.001421	0.001421
+91	0.001283	0.001283
+92	-0.0005743	-0.0005743
+93	0.00291	0.00291
+94	0.002274	0.002274
+95	-0.0004096	-0.0004096
+96	-0.006256	-0.006256
+97	-0.004307	-0.004307
+98	0.00274	0.00274
+99	0.009524	0.009524
+100	-0.001994	-0.001994
+101	-0.001067	-0.001067
+102	0.001527	0.001527
+103	0.001748	0.001748
+104	0.002908	0.002908
+105	9.667e-05	9.667e-05
+106	-0.001019	-0.001019
+107	0.01009	0.01009
+108	0.00353	0.00353
+109	-0.01907	-0.01907
+110	0.002407	0.002407
+111	-0.0009051	-0.0009051
+112	0.003521	0.003521
+113	-0.003106	-0.003106
+114	0.001694	0.001694
+115	-0.0007334	-0.0007334
+116	-0.0006917	-0.0006917
+117	9.667e-05	9.667e-05
+118	0.01715	0.01715
+119	0.006259	0.006259
+120	-0.01152	-0.01152
+121	-0.0004576	-0.0004576
+122	-0.002372	-0.002372
+123	-0.002992	-0.002992
+124	0.005474	0.005474
+125	-0.002001	-0.002001
+126	0.009713	0.009713
+127	0.004816	0.004816
+128	-0.0005743	-0.0005743
+129	0.003232	0.003232
+130	0.005672	0.005672
+131	-0.001437	-0.001437
+132	-0.04743	-0.04743
+133	0.003232	0.003232
+134	0.004531	0.004531
+135	0.006621	0.006621
+136	-0.02223	-0.02223
+137	-0.0004722	-0.0004722
+138	-8.96e-05	-8.96e-05
+139	-0.002233	-0.002233
+140	-0.00104	-0.00104
+141	-0.001922	-0.001922
+142	-0.00104	-0.00104
+143	0.001527	0.001527
+144	-0.001658	-0.001658
+145	-0.002001	-0.002001
+146	-0.002385	-0.002385
+147	0.002855	0.002855
+148	9.667e-05	9.667e-05
+149	-0.001091	-0.001091
+150	-0.004	-0.004
diff -r 07a608852925 -r 26c40d8e8fdc test-data/kpca_out1.tabular
--- a/test-data/kpca_out1.tabular	Wed Mar 10 16:11:59 2010 -0500
+++ b/test-data/kpca_out1.tabular	Wed Mar 10 16:23:53 2010 -0500
@@ -1,304 +1,304 @@
 #Component	1	2
-#Eigenvalue	748.53295257	31.7564711908
+#Eigenvalue	748.5	31.76
 #Principal component vectors	1	2
-1	-0.00355307349951	0.010798043393
-2	-0.00372171398464	-0.00319202227241
-3	-0.00388666963315	-0.00306208266383
-4	-0.0037995051228	-0.00750936595439
-5	-0.00359988454628	0.0106843456635
-6	-0.00299499814368	0.0244145455947
-7	-0.00381378035242	-0.00182771020035
-8	-0.00354042321831	0.00630539436577
-9	-0.00400175554745	-0.0145202364622
-10	-0.0036630352299	-0.00136819012831
-11	-0.00326674025957	0.0213381031254
-12	-0.00357519624231	0.00181491746835
-13	-0.00380730480961	-0.0049118993488
-14	-0.0042807390398	-0.013520126312
-15	-0.00325109413176	0.0386362582597
-16	-0.0029222183254	0.0439372659369
-17	-0.00334909683853	0.025845377936
-18	-0.00352067050475	0.0105349072573
-19	-0.00284736799589	0.0295161649048
-20	-0.00340343226564	0.0164339289456
-21	-0.00312848751113	0.0141934462825
-22	-0.00338156462338	0.0140621875533
-23	-0.00412447090326	0.00340207509083
-24	-0.00322710660152	0.00486270310359
-25	-0.00332920046569	0.000741686459317
-26	-0.00350562773687	-0.0018911475654
-27	-0.00339310720123	0.00542360368742
-28	-0.00341115136262	0.0126295090519
-29	-0.00350586794258	0.0109084171675
-30	-0.00365520087074	-0.00403408864917
-31	-0.0036097322482	-0.00395326488411
-32	-0.00323422448999	0.0143754764524
-33	-0.00337735967561	0.0257295115702
-34	-0.00323624470121	0.035204794488
-35	-0.0036630352299	-0.00136819012831
-36	-0.00380098698319	0.00332108429984
-37	-0.00340101962525	0.0199919567158
-38	-0.0036630352299	-0.00136819012831
-39	-0.00406364785654	-0.0124621360523
-40	-0.00348192497209	0.00842215059195
-41	-0.00365954855054	0.00871751918805
-42	-0.00405650169482	-0.0228354383139
-43	-0.00404399701786	-0.00889238369142
-44	-0.003314992587	0.00689235987806
-45	-0.00302208085492	0.014691962046
-46	-0.00374682975461	-0.0054083973616
-47	-0.00335195787406	0.0163610385692
-48	-0.00386473126531	-0.0053404692461
-49	-0.00332787397638	0.0190663593005
-50	-0.00363243277483	0.00464859342961
-51	0.00213134716649	0.0235348223035
-52	0.00126833388577	0.010770856327
-53	0.00231973604021	0.0165043816266
-54	-0.00054456516814	-0.0221796154179
-55	0.00139079918371	0.00286317141153
-56	0.000352174603738	-0.0119723346983
-57	0.00150292456851	0.0088321679366
-58	-0.00187873890345	-0.0252928947697
-59	0.0014246176887	0.00830441949209
-60	-0.000783999496101	-0.0195987047495
-61	-0.00168806505916	-0.0318750199005
-62	0.000332005325527	-0.00227272707421
-63	-0.000240109205398	-0.0136362166478
-64	0.00107889762034	-0.00371369901933
-65	-0.000755702559702	-0.00554349815991
-66	0.00137471857777	0.0164559631848
-67	0.000400237612268	-0.0105937514639
-68	-0.000186605633993	-0.00791711329626
-69	0.000781260386547	-0.0156695687823
-70	-0.000602293691986	-0.014916217386
-71	0.00128053061726	-0.00382898708379
-72	0.000141070576797	-0.000177704692482
-73	0.0014804796511	-0.0105311838
-74	0.00095326423819	-0.0050250570579
-75	0.000831513901556	0.00625180962049
-76	0.00124204236488	0.0117902480983
-77	0.00192892696243	0.00828195017884
-78	0.00230888433286	0.00765437064913
-79	0.000772988980957	-0.00466004256634
-80	-0.000972934470601	-0.00794203589665
-81	-0.000825239851136	-0.0180890001892
-82	-0.000975175708257	-0.017046945113
-83	-0.000328157175199	-0.00733223273401
-84	0.00152809458981	-0.014337289403
-85	0.000204561491838	-0.0145038337473
-86	0.000958564114974	0.00611562699814
-87	0.00182495005699	0.0135671536294
-88	0.000691799295473	-0.00991154906392
-89	-0.000164814642312	-0.00683729147812
-90	-0.000495108434506	-0.018272246356
-91	-5.75283291151e-05	-0.0187391570922
-92	0.000980006758757	-0.000691467878134
-93	-0.000237878724736	-0.0101121125697
-94	-0.00182103731256	-0.02537083053
-95	-0.000124416007346	-0.0137765920579
-96	1.28307436801e-06	-0.00511559296079
-97	2.13006652241e-05	-0.00765449401728
-98	0.000630107936427	0.00193944394238
-99	-0.00195781990797	-0.0185721711052
-100	-0.000121047850397	-0.00903979441776
-101	0.00380328631922	-0.00735042696528
-102	0.00146195943076	-0.0196360714253
-103	0.00431818384817	0.00623068278311
-104	0.00270535358616	-0.00919789069736
-105	0.00348128902639	-0.00661174582865
-106	0.00608948395467	0.0105925795597
-107	-0.000316164072773	-0.0340356815797
-108	0.00498054497252	0.00589466060194
-109	0.00332851194261	-0.0119530548351
-110	0.00521718108616	0.019838433964
-111	0.0024425365186	0.0056190945732
-112	0.00239304071946	-0.00943633793421
-113	0.0033517411382	0.00333352355283
-114	0.00121571788624	-0.0252729457662
-115	0.00174247582859	-0.0198739526029
-116	0.00276824324416	0.00012671005862
-117	0.00283087224731	-0.00171497869775
-118	0.00677564081585	0.0333579774127
-119	0.00669433449169	-0.00213577965515
-120	0.00119992099901	-0.0235626863995
-121	0.0039514012311	0.00753951675886
-122	0.00107394296724	-0.0201437000923
-123	0.00625053915694	0.00718047948235
-124	0.00169144697539	-0.00751769568719
-125	0.00362530843477	0.00652532644077
-126	0.00449337826627	0.0143122037644
-127	0.00148231189251	-0.00651360693069
-128	0.00156493567407	-0.00495452682527
-129	0.00295099547041	-0.0109308371774
-130	0.0040009541229	0.0122602980101
-131	0.00482086296995	0.0071695428205
-132	0.00641618457442	0.0431443194121
-133	0.00300528343518	-0.0114433590101
-134	0.00183409020836	-0.00555386556062
-135	0.00216811021196	-0.0179461401821
-136	0.00553489980813	0.0172257235932
-137	0.00319828572246	-0.000592991026508
-138	0.00274692788031	-0.00152565423339
-139	0.00132810381823	-0.00620179238822
-140	0.00335981997382	0.00898416237223
-141	0.00357910201795	0.00100409451945
-142	0.00303686395111	0.0107216156308
-143	0.00146195943076	-0.0196360714253
-144	0.00412744981738	0.00329510730094
-145	0.00385310530472	0.0043919488708
-146	0.00291206809269	0.00286898102218
-147	0.00181332919273	-0.0132702604229
-148	0.00251823082733	-2.87503486175e-06
-149	0.00274135416487	-0.000415299029875
-150	0.00161105855831	-0.0107445157278
+1	-0.003553	0.0108
+2	-0.003722	-0.003192
+3	-0.003887	-0.003062
+4	-0.0038	-0.007509
+5	-0.0036	0.01068
+6	-0.002995	0.02441
+7	-0.003814	-0.001828
+8	-0.00354	0.006305
+9	-0.004002	-0.01452
+10	-0.003663	-0.001368
+11	-0.003267	0.02134
+12	-0.003575	0.001815
+13	-0.003807	-0.004912
+14	-0.004281	-0.01352
+15	-0.003251	0.03864
+16	-0.002922	0.04394
+17	-0.003349	0.02585
+18	-0.003521	0.01053
+19	-0.002847	0.02952
+20	-0.003403	0.01643
+21	-0.003128	0.01419
+22	-0.003382	0.01406
+23	-0.004124	0.003402
+24	-0.003227	0.004863
+25	-0.003329	0.0007417
+26	-0.003506	-0.001891
+27	-0.003393	0.005424
+28	-0.003411	0.01263
+29	-0.003506	0.01091
+30	-0.003655	-0.004034
+31	-0.00361	-0.003953
+32	-0.003234	0.01438
+33	-0.003377	0.02573
+34	-0.003236	0.0352
+35	-0.003663	-0.001368
+36	-0.003801	0.003321
+37	-0.003401	0.01999
+38	-0.003663	-0.001368
+39	-0.004064	-0.01246
+40	-0.003482	0.008422
+41	-0.00366	0.008718
+42	-0.004057	-0.02284
+43	-0.004044	-0.008892
+44	-0.003315	0.006892
+45	-0.003022	0.01469
+46	-0.003747	-0.005408
+47	-0.003352	0.01636
+48	-0.003865	-0.00534
+49	-0.003328	0.01907
+50	-0.003632	0.004649
+51	0.002131	0.02353
+52	0.001268	0.01077
+53	0.00232	0.0165
+54	-0.0005446	-0.02218
+55	0.001391	0.002863
+56	0.0003522	-0.01197
+57	0.001503	0.008832
+58	-0.001879	-0.02529
+59	0.001425	0.008304
+60	-0.000784	-0.0196
+61	-0.001688	-0.03188
+62	0.000332	-0.002273
+63	-0.0002401	-0.01364
+64	0.001079	-0.003714
+65	-0.0007557	-0.005543
+66	0.001375	0.01646
+67	0.0004002	-0.01059
+68	-0.0001866	-0.007917
+69	0.0007813	-0.01567
+70	-0.0006023	-0.01492
+71	0.001281	-0.003829
+72	0.0001411	-0.0001777
+73	0.00148	-0.01053
+74	0.0009533	-0.005025
+75	0.0008315	0.006252
+76	0.001242	0.01179
+77	0.001929	0.008282
+78	0.002309	0.007654
+79	0.000773	-0.00466
+80	-0.0009729	-0.007942
+81	-0.0008252	-0.01809
+82	-0.0009752	-0.01705
+83	-0.0003282	-0.007332
+84	0.001528	-0.01434
+85	0.0002046	-0.0145
+86	0.0009586	0.006116
+87	0.001825	0.01357
+88	0.0006918	-0.009912
+89	-0.0001648	-0.006837
+90	-0.0004951	-0.01827
+91	-5.753e-05	-0.01874
+92	0.00098	-0.0006915
+93	-0.0002379	-0.01011
+94	-0.001821	-0.02537
+95	-0.0001244	-0.01378
+96	1.283e-06	-0.005116
+97	2.13e-05	-0.007654
+98	0.0006301	0.001939
+99	-0.001958	-0.01857
+100	-0.000121	-0.00904
+101	0.003803	-0.00735
+102	0.001462	-0.01964
+103	0.004318	0.006231
+104	0.002705	-0.009198
+105	0.003481	-0.006612
+106	0.006089	0.01059
+107	-0.0003162	-0.03404
+108	0.004981	0.005895
+109	0.003329	-0.01195
+110	0.005217	0.01984
+111	0.002443	0.005619
+112	0.002393	-0.009436
+113	0.003352	0.003334
+114	0.001216	-0.02527
+115	0.001742	-0.01987
+116	0.002768	0.0001267
+117	0.002831	-0.001715
+118	0.006776	0.03336
+119	0.006694	-0.002136
+120	0.0012	-0.02356
+121	0.003951	0.00754
+122	0.001074	-0.02014
+123	0.006251	0.00718
+124	0.001691	-0.007518
+125	0.003625	0.006525
+126	0.004493	0.01431
+127	0.001482	-0.006514
+128	0.001565	-0.004955
+129	0.002951	-0.01093
+130	0.004001	0.01226
+131	0.004821	0.00717
+132	0.006416	0.04314
+133	0.003005	-0.01144
+134	0.001834	-0.005554
+135	0.002168	-0.01795
+136	0.005535	0.01723
+137	0.003198	-0.000593
+138	0.002747	-0.001526
+139	0.001328	-0.006202
+140	0.00336	0.008984
+141	0.003579	0.001004
+142	0.003037	0.01072
+143	0.001462	-0.01964
+144	0.004127	0.003295
+145	0.003853	0.004392
+146	0.002912	0.002869
+147	0.001813	-0.01327
+148	0.002518	-2.875e-06
+149	0.002741	-0.0004153
+150	0.001611	-0.01074
 #Rotated values	1	2
-1	-398.938889593	51.4361630892
-2	-417.873833632	-15.2051045001
-3	-436.395044425	-14.5861409847
-4	-426.608218182	-35.7706445387
-5	-404.194831251	50.8945672883
-6	-336.278220515	116.297972072
-7	-428.211040147	-8.70624394838
-8	-397.518516743	30.0355611785
-9	-449.31688431	-69.1667206344
-10	-411.285386401	-6.51733355896
-11	-366.789409766	101.643428575
-12	-401.422829891	8.64530614458
-13	-427.483966571	-23.3976885243
-14	-480.641134897	-64.4027252584
-15	-365.03266343	184.042683351
-16	-328.106506675	209.293877989
-17	-376.036401748	123.113699976
-18	-395.300683193	50.1827218221
-19	-319.702315953	140.599386069
-20	-382.137180401	78.282538667
-21	-351.266399068	67.6100651954
-22	-379.681882777	66.9848180875
-23	-463.095357451	16.2056849416
-24	-362.339344904	23.1633436528
-25	-373.802438142	3.53300170168
-26	-393.611682074	-9.00842597671
-27	-380.977882758	25.8351771375
-28	-383.003880169	60.1602960542
-29	-393.638652357	51.9619253275
-30	-410.405745001	-19.2162629953
-31	-405.30053066	-18.8312613603
-32	-363.138541016	68.4771605722
-33	-379.209751481	122.56177394
-34	-363.365370216	167.69700629
-35	-411.285386401	-6.51733355896
-36	-426.774601382	15.8198876835
-37	-381.866289275	95.2310996238
-38	-411.285386401	-6.51733355896
-39	-456.266149239	-59.3630196781
-40	-390.950336998	40.1186673956
-41	-410.893902242	41.5256470425
-42	-455.463778609	-108.775940841
-43	-454.059754194	-42.3586089771
-44	-372.207178335	32.8315541856
-45	-339.319065786	69.9847304172
-46	-420.69383085	-25.7627422503
-47	-376.357638654	77.9353274961
-48	-433.931805737	-25.4391686638
-49	-373.653499999	90.8220434758
-50	-407.849344494	22.1434384987
-51	239.307538123	112.107435969
-52	142.408456254	51.3066582975
-53	260.459830104	78.618137947
-54	-61.1437459762	-105.651947706
-55	156.158852912	13.6386330667
-56	39.5421443935	-57.0298652899
-57	168.748284713	42.0717729946
-58	-210.944696776	-120.481962613
-59	159.955992721	39.5578587535
-60	-88.0274186445	-93.3578554128
-61	-189.53584843	-151.835722676
-62	37.2775389879	-10.8260687785
-63	-26.9594478684	-64.955718169
-64	121.138563191	-17.6900963878
-65	-84.8502402418	-26.4062909417
-66	154.353323395	78.3874981192
-67	44.9386562461	-50.4630244749
-68	-20.9520699268	-37.712937046
-69	87.7198715803	-74.6415314411
-70	-67.6255013365	-71.0529641542
-71	143.77790457	-18.2392677024
-72	15.8393963056	-0.846491092088
-73	166.228170669	-50.1649852426
-74	107.032454219	-23.9367119537
-75	93.3623333752	29.7803118155
-76	139.45644579	56.16250111
-77	216.579809171	39.4508268386
-78	259.241401023	36.4613701504
-79	86.791158633	-22.1979761258
-80	-109.24102679	-37.8316551222
-81	-92.6578833524	-86.1664220066
-82	-109.492672826	-81.2026232057
-83	-36.8454688889	-34.9268756372
-84	171.574373268	-68.295257682
-85	22.9681526201	-69.0885867828
-86	107.627524081	29.131609887
-87	204.905288168	64.6267385057
-88	77.6751853839	-47.2133733456
-89	-18.5053786255	-32.5692374772
-90	-55.5907467484	-87.0393097495
-91	-6.45927750734	-89.2634253508
-92	110.035102901	-3.29378696269
-93	-26.709009627	-48.1687517244
-94	-204.465965447	-120.853207322
-95	-13.9694221989	-65.6243923192
-96	0.144063516758	-24.3679770725
-97	2.39163747478	-36.4619578109
-98	70.7484831137	9.23848435236
-99	-219.823907447	-88.4679924977
-100	-13.591245729	-43.0607956497
-101	427.032770699	-35.0135433244
-102	164.148721387	-93.5358504774
-103	484.845435841	29.6796747451
-104	303.756946139	-43.813882642
-105	390.878933052	-31.4948573892
-106	683.726910633	50.4574421437
-107	-35.4988840335	-162.127971232
-108	559.215305053	28.0790429377
-109	373.72513081	-56.9380262268
-110	585.784794378	94.499798497
-111	274.247860804	26.7663922398
-112	268.690475304	-44.9497190631
-113	376.333303564	15.8791417004
-114	136.500734832	-120.386936119
-115	195.645086513	-94.6689904923
-116	310.818193348	0.603579648922
-117	317.850174244	-8.16925074119
-118	760.768563816	158.899747303
-119	751.639494384	-10.1737237633
-120	134.727061236	-112.240165774
-121	443.663104546	35.9142670118
-122	120.582255024	-95.9539247487
-123	701.810179545	34.2040034726
-124	189.915569791	-35.8103229767
-125	407.049423998	31.083201169
-126	504.5162551	68.1757629779
-127	166.43389463	-31.0273756263
-128	175.710888105	-23.6007432586
-129	331.337602873	-52.0687223872
-130	449.226900407	58.4015700824
-131	541.286218925	34.1519070045
-132	720.408837559	205.516700468
-133	337.433052457	-54.5101051094
-134	205.931543841	-26.455675751
-135	243.435290768	-85.4859125517
-136	621.458234334	82.0542292543
-137	359.103338249	-2.82469536745
-138	308.424905512	-7.26740920646
-139	149.119420857	-29.5420561961
-140	377.240394766	42.795794032
-141	401.861370157	4.78297480198
-142	340.978910982	51.0721016846
-143	164.148721387	-93.5358504774
-144	463.429829758	15.6961470109
-145	432.626443546	20.920919668
-146	326.966839126	13.6663069767
-147	203.600498192	-63.212496422
-148	282.746813465	-0.0136951442637
-149	307.799089061	-1.97826475167
-150	180.889562913	-51.1811856252
+1	-398.9	51.44
+2	-417.9	-15.21
+3	-436.4	-14.59
+4	-426.6	-35.77
+5	-404.2	50.89
+6	-336.3	116.3
+7	-428.2	-8.706
+8	-397.5	30.04
+9	-449.3	-69.17
+10	-411.3	-6.517
+11	-366.8	101.6
+12	-401.4	8.645
+13	-427.5	-23.4
+14	-480.6	-64.4
+15	-365	184
+16	-328.1	209.3
+17	-376	123.1
+18	-395.3	50.18
+19	-319.7	140.6
+20	-382.1	78.28
+21	-351.3	67.61
+22	-379.7	66.98
+23	-463.1	16.21
+24	-362.3	23.16
+25	-373.8	3.533
+26	-393.6	-9.008
+27	-381	25.84
+28	-383	60.16
+29	-393.6	51.96
+30	-410.4	-19.22
+31	-405.3	-18.83
+32	-363.1	68.48
+33	-379.2	122.6
+34	-363.4	167.7
+35	-411.3	-6.517
+36	-426.8	15.82
+37	-381.9	95.23
+38	-411.3	-6.517
+39	-456.3	-59.36
+40	-391	40.12
+41	-410.9	41.53
+42	-455.5	-108.8
+43	-454.1	-42.36
+44	-372.2	32.83
+45	-339.3	69.98
+46	-420.7	-25.76
+47	-376.4	77.94
+48	-433.9	-25.44
+49	-373.7	90.82
+50	-407.8	22.14
+51	239.3	112.1
+52	142.4	51.31
+53	260.5	78.62
+54	-61.14	-105.7
+55	156.2	13.64
+56	39.54	-57.03
+57	168.7	42.07
+58	-210.9	-120.5
+59	160	39.56
+60	-88.03	-93.36
+61	-189.5	-151.8
+62	37.28	-10.83
+63	-26.96	-64.96
+64	121.1	-17.69
+65	-84.85	-26.41
+66	154.4	78.39
+67	44.94	-50.46
+68	-20.95	-37.71
+69	87.72	-74.64
+70	-67.63	-71.05
+71	143.8	-18.24
+72	15.84	-0.8465
+73	166.2	-50.16
+74	107	-23.94
+75	93.36	29.78
+76	139.5	56.16
+77	216.6	39.45
+78	259.2	36.46
+79	86.79	-22.2
+80	-109.2	-37.83
+81	-92.66	-86.17
+82	-109.5	-81.2
+83	-36.85	-34.93
+84	171.6	-68.3
+85	22.97	-69.09
+86	107.6	29.13
+87	204.9	64.63
+88	77.68	-47.21
+89	-18.51	-32.57
+90	-55.59	-87.04
+91	-6.459	-89.26
+92	110	-3.294
+93	-26.71	-48.17
+94	-204.5	-120.9
+95	-13.97	-65.62
+96	0.1441	-24.37
+97	2.392	-36.46
+98	70.75	9.238
+99	-219.8	-88.47
+100	-13.59	-43.06
+101	427	-35.01
+102	164.1	-93.54
+103	484.8	29.68
+104	303.8	-43.81
+105	390.9	-31.49
+106	683.7	50.46
+107	-35.5	-162.1
+108	559.2	28.08
+109	373.7	-56.94
+110	585.8	94.5
+111	274.2	26.77
+112	268.7	-44.95
+113	376.3	15.88
+114	136.5	-120.4
+115	195.6	-94.67
+116	310.8	0.6036
+117	317.9	-8.169
+118	760.8	158.9
+119	751.6	-10.17
+120	134.7	-112.2
+121	443.7	35.91
+122	120.6	-95.95
+123	701.8	34.2
+124	189.9	-35.81
+125	407	31.08
+126	504.5	68.18
+127	166.4	-31.03
+128	175.7	-23.6
+129	331.3	-52.07
+130	449.2	58.4
+131	541.3	34.15
+132	720.4	205.5
+133	337.4	-54.51
+134	205.9	-26.46
+135	243.4	-85.49
+136	621.5	82.05
+137	359.1	-2.825
+138	308.4	-7.267
+139	149.1	-29.54
+140	377.2	42.8
+141	401.9	4.783
+142	341	51.07
+143	164.1	-93.54
+144	463.4	15.7
+145	432.6	20.92
+146	327	13.67
+147	203.6	-63.21
+148	282.7	-0.0137
+149	307.8	-1.978
+150	180.9	-51.18
diff -r 07a608852925 -r 26c40d8e8fdc test-data/kpca_out2.pdf
--- a/test-data/kpca_out2.pdf	Wed Mar 10 16:11:59 2010 -0500
+++ b/test-data/kpca_out2.pdf	Wed Mar 10 16:23:53 2010 -0500
@@ -2,8 +2,8 @@
 %âãÏÓ\r
 1 0 obj
 <<
-/CreationDate (D:20100303115833)
-/ModDate (D:20100303115833)
+/CreationDate (D:20100310152519)
+/ModDate (D:20100310152519)
 /Title (R Graphics Output)
 /Producer (R 2.10.0)
 /Creator (R)
diff -r 07a608852925 -r 26c40d8e8fdc test-data/kpca_out3.tabular
--- a/test-data/kpca_out3.tabular	Wed Mar 10 16:11:59 2010 -0500
+++ b/test-data/kpca_out3.tabular	Wed Mar 10 16:23:53 2010 -0500
@@ -1,304 +1,304 @@
 #Component	1
-#Eigenvalue	0.383633782425
+#Eigenvalue	0.3836
 #Principal component vectors	1
-1	-0.185843051028
-2	-0.181428165807
-3	-0.185859249771
-4	-0.180608592797
-5	-0.185779114773
-6	-0.173450096143
-7	-0.184654978426
-8	-0.183342755691
-9	-0.179634326545
-10	-0.181320357007
-11	-0.183283707685
-12	-0.180538178566
-13	-0.182036533193
-14	-0.185344590367
-15	-0.184526727477
-16	-0.170434338287
-17	-0.183277063273
-18	-0.184927028233
-19	-0.175718857154
-20	-0.181680491185
-21	-0.177181023283
-22	-0.180973935981
-23	-0.18879919219
-24	-0.171685597374
-25	-0.16885668022
-26	-0.176210994722
-27	-0.177901230872
-28	-0.183637458976
-29	-0.185586660267
-30	-0.178962452299
-31	-0.177730530774
-32	-0.180917047631
-33	-0.179323102414
-34	-0.178700887193
-35	-0.181320357007
-36	-0.187026472694
-37	-0.187484289185
-38	-0.181320357007
-39	-0.183195868488
-40	-0.183342755691
-41	-0.186668209958
-42	-0.164131657602
-43	-0.185859249771
-44	-0.174314649845
-45	-0.165903096322
-46	-0.180461981351
-47	-0.180029913254
-48	-0.184124311064
-49	-0.183283707685
-50	-0.185012459583
-51	0.0942363984412
-52	0.0852035041935
-53	0.10768188396
-54	0.0475694649417
-55	0.0943527981382
-56	0.0825748819852
-57	0.0979972750157
-58	-0.0233654549559
-59	0.0882288472796
-60	0.0416564831403
-61	-0.00243777000151
-62	0.0665074398542
-63	0.0371663691142
-64	0.0968217773028
-65	0.00993115739351
-66	0.0768633607048
-67	0.0872586274324
-68	0.0436063386747
-69	0.0862347466212
-70	0.0321343717223
-71	0.108058085177
-72	0.0458355513695
-73	0.108266881631
-74	0.0911783500891
-75	0.0683993163465
-76	0.0778280589161
-77	0.102242260903
-78	0.116337023057
-79	0.0879507719057
-80	-0.00751313350151
-81	0.0242445751445
-82	0.0120027700758
-83	0.0346227542783
-84	0.118261876355
-85	0.0872586274324
-86	0.0849092505391
-87	0.098013797976
-88	0.0756882831679
-89	0.0522329673214
-90	0.0473615456199
-91	0.0732645705878
-92	0.0905493705978
-93	0.0435140522343
-94	-0.0221940320837
-95	0.0621646602878
-96	0.0565885154519
-97	0.0609576609235
-98	0.0683993163465
-99	-0.0478191910469
-100	0.0539084973987
-101	0.116079702219
-102	0.122090948168
-103	0.125794088091
-104	0.128509617495
-105	0.12652479603
-106	0.102504931737
-107	0.0929811106109
-108	0.116032789314
-109	0.124684918317
-110	0.108672337594
-111	0.120990051431
-112	0.126248324837
-113	0.128639151235
-114	0.118532439899
-115	0.12178749141
-116	0.124484067654
-117	0.127958285183
-118	0.0862556895231
-119	0.0821350202959
-120	0.108242711844
-121	0.125331989407
-122	0.1167560565
-123	0.0979484407405
-124	0.11460521934
-125	0.125894432778
-126	0.123158494779
-127	0.11059361124
-128	0.114095357757
-129	0.128690994227
-130	0.125636948314
-131	0.121892068306
-132	0.100713002067
-133	0.12806059698
-134	0.116484703738
-135	0.121891129935
-136	0.119070960893
-137	0.122085746848
-138	0.127403650206
-139	0.109872871363
-140	0.127473736894
-141	0.125371169881
-142	0.121785772566
-143	0.122090948168
-144	0.122665736125
-145	0.121132693034
-146	0.124391104206
-147	0.117888635141
-148	0.124958557781
-149	0.122870371594
-150	0.120935324954
+1	-0.1858
+2	-0.1814
+3	-0.1859
+4	-0.1806
+5	-0.1858
+6	-0.1735
+7	-0.1847
+8	-0.1833
+9	-0.1796
+10	-0.1813
+11	-0.1833
+12	-0.1805
+13	-0.182
+14	-0.1853
+15	-0.1845
+16	-0.1704
+17	-0.1833
+18	-0.1849
+19	-0.1757
+20	-0.1817
+21	-0.1772
+22	-0.181
+23	-0.1888
+24	-0.1717
+25	-0.1689
+26	-0.1762
+27	-0.1779
+28	-0.1836
+29	-0.1856
+30	-0.179
+31	-0.1777
+32	-0.1809
+33	-0.1793
+34	-0.1787
+35	-0.1813
+36	-0.187
+37	-0.1875
+38	-0.1813
+39	-0.1832
+40	-0.1833
+41	-0.1867
+42	-0.1641
+43	-0.1859
+44	-0.1743
+45	-0.1659
+46	-0.1805
+47	-0.18
+48	-0.1841
+49	-0.1833
+50	-0.185
+51	0.09424
+52	0.0852
+53	0.1077
+54	0.04757
+55	0.09435
+56	0.08257
+57	0.098
+58	-0.02337
+59	0.08823
+60	0.04166
+61	-0.002438
+62	0.06651
+63	0.03717
+64	0.09682
+65	0.009931
+66	0.07686
+67	0.08726
+68	0.04361
+69	0.08623
+70	0.03213
+71	0.1081
+72	0.04584
+73	0.1083
+74	0.09118
+75	0.0684
+76	0.07783
+77	0.1022
+78	0.1163
+79	0.08795
+80	-0.007513
+81	0.02424
+82	0.012
+83	0.03462
+84	0.1183
+85	0.08726
+86	0.08491
+87	0.09801
+88	0.07569
+89	0.05223
+90	0.04736
+91	0.07326
+92	0.09055
+93	0.04351
+94	-0.02219
+95	0.06216
+96	0.05659
+97	0.06096
+98	0.0684
+99	-0.04782
+100	0.05391
+101	0.1161
+102	0.1221
+103	0.1258
+104	0.1285
+105	0.1265
+106	0.1025
+107	0.09298
+108	0.116
+109	0.1247
+110	0.1087
+111	0.121
+112	0.1262
+113	0.1286
+114	0.1185
+115	0.1218
+116	0.1245
+117	0.128
+118	0.08626
+119	0.08214
+120	0.1082
+121	0.1253
+122	0.1168
+123	0.09795
+124	0.1146
+125	0.1259
+126	0.1232
+127	0.1106
+128	0.1141
+129	0.1287
+130	0.1256
+131	0.1219
+132	0.1007
+133	0.1281
+134	0.1165
+135	0.1219
+136	0.1191
+137	0.1221
+138	0.1274
+139	0.1099
+140	0.1275
+141	0.1254
+142	0.1218
+143	0.1221
+144	0.1227
+145	0.1211
+146	0.1244
+147	0.1179
+148	0.125
+149	0.1229
+150	0.1209
 #Rotated values	1
-1	-10.6943508905
-2	-10.440296023
-3	-10.6952830482
-4	-10.393133639
-5	-10.6906716744
-6	-9.98119746679
-7	-10.6259831726
-8	-10.5504712269
-9	-10.3370694219
-10	-10.4340921584
-11	-10.5470733054
-12	-10.3890816473
-13	-10.4753045652
-14	-10.6656669382
-15	-10.618602963
-16	-9.80765547784
-17	-10.5466909523
-18	-10.641638297
-19	-10.111753472
-20	-10.4548161039
-21	-10.1958939204
-22	-10.4141573371
-23	-10.8644622328
-24	-9.87965926626
-25	-9.71686903807
-26	-10.1400735615
-27	-10.2373383146
-28	-10.5674299473
-29	-10.6795968669
-30	-10.2984063731
-31	-10.227515366
-32	-10.4108836932
-33	-10.3191600083
-34	-10.2833545914
-35	-10.4340921584
-36	-10.76245097
-37	-10.7887960508
-38	-10.4340921584
-39	-10.5420185929
-40	-10.5504712269
-41	-10.7418347167
-42	-9.44496729322
-43	-10.6952830482
-44	-10.0309482678
-45	-9.54690485367
-46	-10.3846968734
-47	-10.3598334857
-48	-10.5954458835
-49	-10.5470733054
-50	-10.6465544498
-51	5.42283989641
-52	4.90304138844
-53	6.19656126634
-54	2.73738806452
-55	5.42953812482
-56	4.75177714639
-57	5.63925979224
-58	-1.34456667942
-59	5.07713496012
-60	2.39712512844
-61	-0.140281638954
-62	3.8271751066
-63	2.13874121434
-64	5.57161569716
-65	0.571489121209
-66	4.42310726956
-67	5.02130359366
-68	2.50932969652
-69	4.96238430341
-70	1.84917458545
-71	6.2182097907
-72	2.63760989121
-73	6.2302249967
-74	5.24686429798
-75	3.93604326679
-76	4.47862089311
-77	5.88353779109
-78	6.69462182871
-79	5.061133094
-80	-0.432343773457
-81	1.39515570989
-82	0.690700212563
-83	1.99236872726
-84	6.80538764143
-85	5.02130359366
-86	4.88610854207
-87	5.6402106071
-88	4.35548735354
-89	3.00574962312
-90	2.72542333314
-91	4.21601464985
-92	5.21066963079
-93	2.50401906709
-94	-1.27715707133
-95	3.5772695639
-96	3.25638993369
-97	3.50781270418
-98	3.93604326679
-99	-2.75175857007
-100	3.10216811428
-101	6.67981428373
-102	7.02573183683
-103	7.23882927317
-104	7.39509459563
-105	7.28087791073
-106	5.89865320193
-107	5.35060427365
-108	6.67711467748
-109	7.17500202376
-110	6.25355698739
-111	6.96238065996
-112	7.26496835728
-113	7.4025486234
-114	6.82095723878
-115	7.00826939727
-116	7.16344405884
-117	7.36336814062
-118	4.96358946411
-119	4.72646527585
-120	6.22883414471
-121	7.21223777328
-122	6.71873513643
-123	5.63644962058
-124	6.59496506713
-125	7.24460361494
-126	7.08716387847
-127	6.36411680878
-128	6.56562504802
-129	7.40553193189
-130	7.2297866541
-131	7.01428728179
-132	5.79553648833
-133	7.36925567984
-134	6.70312012344
-135	7.01423328312
-136	6.85194646563
-137	7.0254325265
-138	7.33145163347
-139	6.322641784
-140	7.33548477667
-141	7.21449241625
-142	7.00817048627
-143	7.02573183683
-144	7.05880804852
-145	6.97058898059
-146	7.158094471
-147	6.78390945061
-148	7.19074862515
-149	7.0705838104
-150	6.95923142115
+1	-10.69
+2	-10.44
+3	-10.7
+4	-10.39
+5	-10.69
+6	-9.981
+7	-10.63
+8	-10.55
+9	-10.34
+10	-10.43
+11	-10.55
+12	-10.39
+13	-10.48
+14	-10.67
+15	-10.62
+16	-9.808
+17	-10.55
+18	-10.64
+19	-10.11
+20	-10.45
+21	-10.2
+22	-10.41
+23	-10.86
+24	-9.88
+25	-9.717
+26	-10.14
+27	-10.24
+28	-10.57
+29	-10.68
+30	-10.3
+31	-10.23
+32	-10.41
+33	-10.32
+34	-10.28
+35	-10.43
+36	-10.76
+37	-10.79
+38	-10.43
+39	-10.54
+40	-10.55
+41	-10.74
+42	-9.445
+43	-10.7
+44	-10.03
+45	-9.547
+46	-10.38
+47	-10.36
+48	-10.6
+49	-10.55
+50	-10.65
+51	5.423
+52	4.903
+53	6.197
+54	2.737
+55	5.43
+56	4.752
+57	5.639
+58	-1.345
+59	5.077
+60	2.397
+61	-0.1403
+62	3.827
+63	2.139
+64	5.572
+65	0.5715
+66	4.423
+67	5.021
+68	2.509
+69	4.962
+70	1.849
+71	6.218
+72	2.638
+73	6.23
+74	5.247
+75	3.936
+76	4.479
+77	5.884
+78	6.695
+79	5.061
+80	-0.4323
+81	1.395
+82	0.6907
+83	1.992
+84	6.805
+85	5.021
+86	4.886
+87	5.64
+88	4.355
+89	3.006
+90	2.725
+91	4.216
+92	5.211
+93	2.504
+94	-1.277
+95	3.577
+96	3.256
+97	3.508
+98	3.936
+99	-2.752
+100	3.102
+101	6.68
+102	7.026
+103	7.239
+104	7.395
+105	7.281
+106	5.899
+107	5.351
+108	6.677
+109	7.175
+110	6.254
+111	6.962
+112	7.265
+113	7.403
+114	6.821
+115	7.008
+116	7.163
+117	7.363
+118	4.964
+119	4.726
+120	6.229
+121	7.212
+122	6.719
+123	5.636
+124	6.595
+125	7.245
+126	7.087
+127	6.364
+128	6.566
+129	7.406
+130	7.23
+131	7.014
+132	5.796
+133	7.369
+134	6.703
+135	7.014
+136	6.852
+137	7.025
+138	7.331
+139	6.323
+140	7.335
+141	7.214
+142	7.008
+143	7.026
+144	7.059
+145	6.971
+146	7.158
+147	6.784
+148	7.191
+149	7.071
+150	6.959
diff -r 07a608852925 -r 26c40d8e8fdc test-data/kpca_out4.pdf
--- a/test-data/kpca_out4.pdf	Wed Mar 10 16:11:59 2010 -0500
+++ b/test-data/kpca_out4.pdf	Wed Mar 10 16:23:53 2010 -0500
@@ -2,8 +2,8 @@
 %âãÏÓ\r
 1 0 obj
 <<
-/CreationDate (D:20100303121837)
-/ModDate (D:20100303121837)
+/CreationDate (D:20100310154313)
+/ModDate (D:20100310154313)
 /Title (R Graphics Output)
 /Producer (R 2.10.0)
 /Creator (R)
diff -r 07a608852925 -r 26c40d8e8fdc test-data/pca_out1.tabular
--- a/test-data/pca_out1.tabular	Wed Mar 10 16:11:59 2010 -0500
+++ b/test-data/pca_out1.tabular	Wed Mar 10 16:23:53 2010 -0500
@@ -1,159 +1,159 @@
 #Component	1	2	3	4
-#Std. deviation	0.959802547771	0.143553847861	0.383866224491	1.70611197867
-#Proportion of variance explained	0.230305232677	0.00515192680891	0.0368383195763	0.727704520938
+#Std. deviation	0.9598	0.1436	0.3839	1.706
+#Proportion of variance explained	0.2303	0.005152	0.03684	0.7277
 #Loadings	1	2	3	4
-c1	0.522371620408	-0.37231836335	0.721016809062	0.2619955869
-c2	-0.263354915314	-0.925556494147	-0.242032877214	-0.124134810063
-c3	0.581254005598	-0.0210947768412	-0.140892258488	-0.80115426908
-c4	0.565611049883	-0.0654157690789	-0.633801403356	0.523546271604
+c1	0.5224	-0.3723	0.721	0.262
+c2	-0.2634	-0.9256	-0.242	-0.1241
+c3	0.5813	-0.02109	-0.1409	-0.8012
+c4	0.5656	-0.06542	-0.6338	0.5235
 #Scores	1	2	3	4
-1	-2.26454172839	-0.505703902774	0.121943347782	0.0230733234546
-2	-2.08642550062	0.655404729369	0.227250832399	0.103208244417
-3	-2.36795044906	0.318477310847	-0.051479623645	0.0278252250225
-4	-2.30419716115	0.575367712533	-0.0988604444374	-0.0663114622137
-5	-2.38877749351	-0.674767396703	-0.0214278489731	-0.0373972870003
-6	-2.07053680718	-1.51854855989	-0.0306842582807	0.00439877494217
-7	-2.44571133963	-0.0745626750069	-0.342197636214	-0.0380965668019
-8	-2.23384185871	-0.247613931752	0.0825744644549	-0.0255051622663
-9	-2.34195767646	1.09514636258	-0.153562398695	-0.0267938291099
-10	-2.18867575528	0.448629048338	0.246559522389	-0.0399073034932
-11	-2.16348655627	-1.07059557604	0.264009373347	0.0153011376811
-12	-2.32737775413	-0.158587454659	-0.100165615627	-0.134554258442
-13	-2.22408272205	0.70911815803	0.223214513993	0.00263061381638
-14	-2.63971626084	0.938281982108	-0.189570030151	-0.0194220182895
-15	-2.1922915085	-1.88997851232	0.469480094636	0.192782042174
-16	-2.25146521458	-2.72237107657	-0.032603796659	0.0471257043405
-17	-2.20275048008	-1.51375028255	0.00136349158139	0.186631581003
-18	-2.19017916379	-0.514304308221	0.0386155948801	0.091905506725
-19	-1.89407429302	-1.43111070694	0.370742833992	0.0595280967873
-20	-2.33994907042	-1.15803342899	-0.137417718926	-0.0398281841641
-21	-1.91455639496	-0.43046516328	0.416006874756	0.0103602240245
-22	-2.20464539753	-0.952457317289	-0.164738346381	0.0577291622309
-23	-2.77416979371	-0.489517026503	-0.338836384343	0.0178537297394
-24	-1.82041156334	-0.106750792835	-0.0400614724262	0.150345594969
-25	-2.22821749946	-0.162186162666	-0.124201428024	-0.271228862988
-26	-1.95702400734	0.607892567484	0.298591028776	0.0438372887169
-27	-2.05206331128	-0.266014311982	-0.092092978814	0.0666010027592
-28	-2.16819365335	-0.552016495324	0.201295481624	0.0092605692697
-29	-2.14030596328	-0.336640408845	0.265314544537	0.0835439339095
-30	-2.26879019439	0.314878602841	-0.0755154360416	-0.108849379523
-31	-2.14455442928	0.483942096769	0.0678557607133	-0.0483787690683
-32	-1.8319381022	-0.445266835503	0.265375243884	0.239140993596
-33	-2.60820286766	-1.82847519273	-0.0514195181545	-0.231922592748
-34	-2.4379508591	-2.18539161563	0.079349754887	-0.0510210290964
-35	-2.18867575528	0.448629048338	0.246559522389	-0.0399073034932
-36	-2.21111989681	0.18433781054	0.218624527745	0.168619768529
-37	-2.04441651932	-0.684956426295	0.47941157048	0.195613314291
-38	-2.18867575528	0.448629048338	0.246559522389	-0.0399073034932
-39	-2.43595220297	0.882169414776	-0.201557586676	-0.00996079071925
-40	-2.17054720188	-0.292726954966	0.169938535763	0.00624028506401
-41	-2.28652723884	-0.467991715671	-0.0407365389625	0.10571826091
-42	-1.87170722354	2.32769160611	0.194528609858	0.291692981754
-43	-2.55783441954	0.45381638049	-0.313571837569	-0.0674111169685
-44	-1.96427929035	-0.497391640018	-0.314755610064	0.175540206175
-45	-2.13337283292	-1.17143211177	-0.252793221689	-0.153228806955
-46	-2.07535759284	0.691917347136	0.0565590081895	0.140294980357
-47	-2.3812582168	-1.15063259287	-0.0621019034895	-0.15421856895
-48	-2.39819168766	0.362390764726	-0.146855632419	-0.0494784238231
-49	-2.22678121309	-1.02548255282	0.176645302039	-0.0164443096492
-50	-2.20595416865	-0.0322378452738	0.146593527367	0.0487782023735
-51	1.1039936528	-0.863112445851	0.685555107794	0.0349778613463
-52	0.732481440009	-0.598635573342	0.0940668019749	0.00445376366529
-53	1.24210950532	-0.614822449612	0.5548465341	0.00967335738053
-54	0.397307282823	1.75816894744	0.0185694823534	0.0673978468494
-55	1.0725939482	0.21175790268	0.397447437604	0.105541661979
-56	0.384458146169	0.591062468623	-0.126797689592	-0.240528081689
-57	0.748715075952	-0.778698611389	-0.148656022613	-0.0783010665497
-58	-0.497863387798	1.84886877128	-0.255555249978	-0.0393891394616
-59	0.926222367533	-0.0303308267834	0.595459889269	-0.0291024203562
-60	0.00496802558432	1.0294011124	-0.542867048794	-0.0283487628543
-61	-0.12469746138	2.65806267796	0.0398134481856	0.0161405573367
-62	0.438730117781	0.0588812850215	-0.206703491276	0.0398514578087
-63	0.551633981423	1.77258156486	0.761380223046	0.0483536968144
-64	0.717165066237	0.185434314505	0.0672998423614	-0.164555675253
-65	-0.0372583829657	0.432795098714	-0.198061449157	0.109025121493
-66	0.875890535941	-0.509998151059	0.503505831713	0.105141287026
-67	0.34800640198	0.190621646657	-0.492831517597	-0.192059488728
-68	0.153392544569	0.790725456234	0.29860451573	-0.204321214984
-69	1.21530320922	1.63335564452	0.479409913826	0.228214500251
-70	0.156941175644	1.30310327017	0.168586746037	-0.050413197095
-71	0.73825610437	-0.402470381619	-0.616772625667	-0.0844515277208
-72	0.472369682345	0.416608222444	0.262718282968	0.114244715208
-73	1.22798821408	0.94091479253	0.366704858932	-0.00844834785365
-74	0.629381045315	0.416811642542	0.289962473611	-0.273494878669
-75	0.700472799208	0.0634939276517	0.444767559049	0.0440812895288
-76	0.873536987401	-0.250708610702	0.472148885852	0.10212100282
-77	1.25422219052	0.0826200998131	0.726843529499	0.0408294176689
-78	1.35823984739	-0.32882026627	0.261458073662	0.06701379087
-79	0.662126137566	0.224346070943	-0.0873681069173	-0.0363525362821
-80	-0.0472815133199	1.05721241261	0.315319194662	0.0660077099012
-81	0.12153420888	1.56359237987	0.145241737641	-0.0078752797855
-82	0.0141182260514	1.57339235465	0.236581428008	-0.0311492615406
-83	0.236010837327	0.775923784012	0.147972884858	0.0244595545867
-84	1.05669142809	0.636901283769	-0.106753233719	-0.183419235854
-85	0.22141708833	0.280847693086	-0.667559660213	-0.255550383389
-86	0.431783160741	-0.855136920218	-0.450731487053	-0.111146168635
-87	1.04941335522	-0.522197264513	0.396142266415	0.0372988657504
-88	1.03587821032	1.39246648439	0.685434302956	0.139128619431
-89	0.067067599873	0.212620734893	-0.294128261931	-0.147491049208
-90	0.275425066252	1.32981591316	-0.09344476854	0.00994752060019
-91	0.272335066262	1.11944152412	-0.0981718909473	-0.269842631856
-92	0.623170539726	-0.0275426333023	0.0193046543801	-0.147722636862
-93	0.330005363837	0.988900731819	0.195968072839	0.00762651619612
-94	-0.373627622687	2.0179322652	-0.112184053224	0.0210814709933
-95	0.282944342955	0.853950716987	-0.134118823056	-0.106873761349
-96	0.089053110319	0.174908547791	-0.131448375187	-0.230135986663
-97	0.224356783209	0.380484659487	-0.158769002642	-0.132578640268
-98	0.573883485558	0.15371997408	0.270039416433	-0.0194096051318
-99	-0.457012872505	1.53946451026	-0.196126173314	0.20088337989
-100	0.252244473269	0.595860745965	-0.0947499397296	-0.0582952756283
-101	1.84767259431	-0.871696661773	-1.00276098578	-0.0510680368143
-102	1.15318980825	0.701326113857	-0.53146463504	-0.0404135807034
-103	2.20634949713	-0.554470104935	0.205495910098	0.0593004996357
-104	1.43868540233	0.0500105222689	-0.163390463819	-0.235759861148
-105	1.86789070256	-0.291192801761	-0.394004333187	-0.0167817995605
-106	2.75419670883	-0.788432206355	0.58623270438	-0.100879674319
-107	0.358374474842	1.56009458398	-0.990999895323	-0.132987437876
-108	2.30300589815	-0.409516695222	0.654166687004	-0.237212798451
-109	2.00173529922	0.723865359313	0.39407044827	-0.0849938223583
-110	2.2675545991	-1.92144299147	-0.392517657812	0.102907298269
-111	1.3659094292	-0.693948039804	-0.283279516018	0.107010918256
-112	1.59906458565	0.4282488359	-0.0233040821221	0.058942700248
-113	1.88425185375	-0.41433275795	-0.0245485539642	0.146296963706
-114	1.25308651438	1.16739133525	-0.582130270891	0.0996816830012
-115	1.46406152299	0.444147569479	-1.00411052499	0.275022172524
-116	1.59180930264	-0.677035371602	-0.636650720962	0.190645617706
-117	1.47128018947	-0.253192471967	-0.0366575091837	-0.155435928096
-118	2.43737848219	-2.55675734149	0.134200081747	-0.275661550231
-119	3.30914118268	0.00236132010208	0.706933959275	0.0467561875045
-120	1.25398098669	1.71758384427	0.264622083882	-0.063067401986
-121	2.04049625966	-0.907398765015	-0.231878114284	0.167140048297
-122	0.97391511447	0.571174376366	-0.829503781073	0.0273189478123
-123	2.89806443584	-0.397791359172	0.860926842018	-0.126074285525
-124	1.32919369132	0.486760541904	0.0047073493335	0.140597875708
-125	1.70424070852	-1.01414842484	-0.295957876544	-0.0627403760289
-126	1.95772766179	-1.00333451543	0.422817051752	-0.21845913061
-127	1.17190450799	0.318896617311	-0.130651909956	0.125685466769
-128	1.01978105282	-0.0655429630966	-0.338042169623	-0.00906850832619
-129	1.78600886125	0.193272799857	-0.270002525769	0.0312072991185
-130	1.86477791271	-0.555381531579	0.71751068338	-0.207556767871
-131	2.43549739142	-0.246654468069	0.730234005788	-0.0167936016953
-132	2.31608241196	-2.62618386902	0.499619542563	-0.213160417565
-133	1.86037142585	0.18467239441	-0.35333027867	0.100039482389
-134	1.11127172568	0.29598610243	0.182659607661	-0.185740240258
-135	1.19746915512	0.817167741914	0.163213781512	-0.488403999516
-136	2.80094940373	-0.844748193785	0.547000957212	0.296321147128
-137	1.58015524852	-1.07247449613	-0.943392608462	0.0336074228517
-138	1.34704442435	-0.422255965896	-0.180028705939	-0.215906538551
-139	0.923432977766	-0.0192303705469	-0.417394303466	0.00474424585873
-140	1.85355198407	-0.672422728972	0.0148203293627	0.194875449427
-141	2.01615720068	-0.610397037555	-0.42591494689	0.246764701547
-142	1.9031168586	-0.686024831859	-0.127799364044	0.469214420513
-143	1.15318980825	0.701326113857	-0.53146463504	-0.0404135807034
-144	2.04330843929	-0.864684880472	-0.335266060523	0.0442781979364
-145	2.00169096693	-1.04855004662	-0.62926888815	0.212588357053
-146	1.87052207146	-0.382821837623	-0.254532318679	0.388890487462
-147	1.55849189072	0.905313601407	0.0253819098598	0.221322183713
-148	1.520845064	-0.266794574854	-0.17927720259	0.11890304299
-149	1.37639119064	-1.01636192879	-0.931405051938	0.0241461952814
-150	0.959298575616	0.0222839446606	-0.52879418717	-0.163675806017
+1	-2.265	-0.5057	0.1219	0.02307
+2	-2.086	0.6554	0.2273	0.1032
+3	-2.368	0.3185	-0.05148	0.02783
+4	-2.304	0.5754	-0.09886	-0.06631
+5	-2.389	-0.6748	-0.02143	-0.0374
+6	-2.071	-1.519	-0.03068	0.004399
+7	-2.446	-0.07456	-0.3422	-0.0381
+8	-2.234	-0.2476	0.08257	-0.02551
+9	-2.342	1.095	-0.1536	-0.02679
+10	-2.189	0.4486	0.2466	-0.03991
+11	-2.163	-1.071	0.264	0.0153
+12	-2.327	-0.1586	-0.1002	-0.1346
+13	-2.224	0.7091	0.2232	0.002631
+14	-2.64	0.9383	-0.1896	-0.01942
+15	-2.192	-1.89	0.4695	0.1928
+16	-2.251	-2.722	-0.0326	0.04713
+17	-2.203	-1.514	0.001363	0.1866
+18	-2.19	-0.5143	0.03862	0.09191
+19	-1.894	-1.431	0.3707	0.05953
+20	-2.34	-1.158	-0.1374	-0.03983
+21	-1.915	-0.4305	0.416	0.01036
+22	-2.205	-0.9525	-0.1647	0.05773
+23	-2.774	-0.4895	-0.3388	0.01785
+24	-1.82	-0.1068	-0.04006	0.1503
+25	-2.228	-0.1622	-0.1242	-0.2712
+26	-1.957	0.6079	0.2986	0.04384
+27	-2.052	-0.266	-0.09209	0.0666
+28	-2.168	-0.552	0.2013	0.009261
+29	-2.14	-0.3366	0.2653	0.08354
+30	-2.269	0.3149	-0.07552	-0.1088
+31	-2.145	0.4839	0.06786	-0.04838
+32	-1.832	-0.4453	0.2654	0.2391
+33	-2.608	-1.828	-0.05142	-0.2319
+34	-2.438	-2.185	0.07935	-0.05102
+35	-2.189	0.4486	0.2466	-0.03991
+36	-2.211	0.1843	0.2186	0.1686
+37	-2.044	-0.685	0.4794	0.1956
+38	-2.189	0.4486	0.2466	-0.03991
+39	-2.436	0.8822	-0.2016	-0.009961
+40	-2.171	-0.2927	0.1699	0.00624
+41	-2.287	-0.468	-0.04074	0.1057
+42	-1.872	2.328	0.1945	0.2917
+43	-2.558	0.4538	-0.3136	-0.06741
+44	-1.964	-0.4974	-0.3148	0.1755
+45	-2.133	-1.171	-0.2528	-0.1532
+46	-2.075	0.6919	0.05656	0.1403
+47	-2.381	-1.151	-0.0621	-0.1542
+48	-2.398	0.3624	-0.1469	-0.04948
+49	-2.227	-1.025	0.1766	-0.01644
+50	-2.206	-0.03224	0.1466	0.04878
+51	1.104	-0.8631	0.6856	0.03498
+52	0.7325	-0.5986	0.09407	0.004454
+53	1.242	-0.6148	0.5548	0.009673
+54	0.3973	1.758	0.01857	0.0674
+55	1.073	0.2118	0.3974	0.1055
+56	0.3845	0.5911	-0.1268	-0.2405
+57	0.7487	-0.7787	-0.1487	-0.0783
+58	-0.4979	1.849	-0.2556	-0.03939
+59	0.9262	-0.03033	0.5955	-0.0291
+60	0.004968	1.029	-0.5429	-0.02835
+61	-0.1247	2.658	0.03981	0.01614
+62	0.4387	0.05888	-0.2067	0.03985
+63	0.5516	1.773	0.7614	0.04835
+64	0.7172	0.1854	0.0673	-0.1646
+65	-0.03726	0.4328	-0.1981	0.109
+66	0.8759	-0.51	0.5035	0.1051
+67	0.348	0.1906	-0.4928	-0.1921
+68	0.1534	0.7907	0.2986	-0.2043
+69	1.215	1.633	0.4794	0.2282
+70	0.1569	1.303	0.1686	-0.05041
+71	0.7383	-0.4025	-0.6168	-0.08445
+72	0.4724	0.4166	0.2627	0.1142
+73	1.228	0.9409	0.3667	-0.008448
+74	0.6294	0.4168	0.29	-0.2735
+75	0.7005	0.06349	0.4448	0.04408
+76	0.8735	-0.2507	0.4721	0.1021
+77	1.254	0.08262	0.7268	0.04083
+78	1.358	-0.3288	0.2615	0.06701
+79	0.6621	0.2243	-0.08737	-0.03635
+80	-0.04728	1.057	0.3153	0.06601
+81	0.1215	1.564	0.1452	-0.007875
+82	0.01412	1.573	0.2366	-0.03115
+83	0.236	0.7759	0.148	0.02446
+84	1.057	0.6369	-0.1068	-0.1834
+85	0.2214	0.2808	-0.6676	-0.2556
+86	0.4318	-0.8551	-0.4507	-0.1111
+87	1.049	-0.5222	0.3961	0.0373
+88	1.036	1.392	0.6854	0.1391
+89	0.06707	0.2126	-0.2941	-0.1475
+90	0.2754	1.33	-0.09344	0.009948
+91	0.2723	1.119	-0.09817	-0.2698
+92	0.6232	-0.02754	0.0193	-0.1477
+93	0.33	0.9889	0.196	0.007627
+94	-0.3736	2.018	-0.1122	0.02108
+95	0.2829	0.854	-0.1341	-0.1069
+96	0.08905	0.1749	-0.1314	-0.2301
+97	0.2244	0.3805	-0.1588	-0.1326
+98	0.5739	0.1537	0.27	-0.01941
+99	-0.457	1.539	-0.1961	0.2009
+100	0.2522	0.5959	-0.09475	-0.0583
+101	1.848	-0.8717	-1.003	-0.05107
+102	1.153	0.7013	-0.5315	-0.04041
+103	2.206	-0.5545	0.2055	0.0593
+104	1.439	0.05001	-0.1634	-0.2358
+105	1.868	-0.2912	-0.394	-0.01678
+106	2.754	-0.7884	0.5862	-0.1009
+107	0.3584	1.56	-0.991	-0.133
+108	2.303	-0.4095	0.6542	-0.2372
+109	2.002	0.7239	0.3941	-0.08499
+110	2.268	-1.921	-0.3925	0.1029
+111	1.366	-0.6939	-0.2833	0.107
+112	1.599	0.4282	-0.0233	0.05894
+113	1.884	-0.4143	-0.02455	0.1463
+114	1.253	1.167	-0.5821	0.09968
+115	1.464	0.4441	-1.004	0.275
+116	1.592	-0.677	-0.6367	0.1906
+117	1.471	-0.2532	-0.03666	-0.1554
+118	2.437	-2.557	0.1342	-0.2757
+119	3.309	0.002361	0.7069	0.04676
+120	1.254	1.718	0.2646	-0.06307
+121	2.04	-0.9074	-0.2319	0.1671
+122	0.9739	0.5712	-0.8295	0.02732
+123	2.898	-0.3978	0.8609	-0.1261
+124	1.329	0.4868	0.004707	0.1406
+125	1.704	-1.014	-0.296	-0.06274
+126	1.958	-1.003	0.4228	-0.2185
+127	1.172	0.3189	-0.1307	0.1257
+128	1.02	-0.06554	-0.338	-0.009069
+129	1.786	0.1933	-0.27	0.03121
+130	1.865	-0.5554	0.7175	-0.2076
+131	2.435	-0.2467	0.7302	-0.01679
+132	2.316	-2.626	0.4996	-0.2132
+133	1.86	0.1847	-0.3533	0.1
+134	1.111	0.296	0.1827	-0.1857
+135	1.197	0.8172	0.1632	-0.4884
+136	2.801	-0.8447	0.547	0.2963
+137	1.58	-1.072	-0.9434	0.03361
+138	1.347	-0.4223	-0.18	-0.2159
+139	0.9234	-0.01923	-0.4174	0.004744
+140	1.854	-0.6724	0.01482	0.1949
+141	2.016	-0.6104	-0.4259	0.2468
+142	1.903	-0.686	-0.1278	0.4692
+143	1.153	0.7013	-0.5315	-0.04041
+144	2.043	-0.8647	-0.3353	0.04428
+145	2.002	-1.049	-0.6293	0.2126
+146	1.871	-0.3828	-0.2545	0.3889
+147	1.558	0.9053	0.02538	0.2213
+148	1.521	-0.2668	-0.1793	0.1189
+149	1.376	-1.016	-0.9314	0.02415
+150	0.9593	0.02228	-0.5288	-0.1637
diff -r 07a608852925 -r 26c40d8e8fdc test-data/pca_out2.pdf
--- a/test-data/pca_out2.pdf	Wed Mar 10 16:11:59 2010 -0500
+++ b/test-data/pca_out2.pdf	Wed Mar 10 16:23:53 2010 -0500
@@ -2,8 +2,8 @@
 %âãÏÓ\r
 1 0 obj
 <<
-/CreationDate (D:20100303130628)
-/ModDate (D:20100303130628)
+/CreationDate (D:20100310155829)
+/ModDate (D:20100310155829)
 /Title (R Graphics Output)
 /Producer (R 2.10.0)
 /Creator (R)
diff -r 07a608852925 -r 26c40d8e8fdc test-data/pca_out3.tabular
--- a/test-data/pca_out3.tabular	Wed Mar 10 16:11:59 2010 -0500
+++ b/test-data/pca_out3.tabular	Wed Mar 10 16:23:53 2010 -0500
@@ -1,159 +1,159 @@
 #Component	1	2	3	4
-#Std. deviation	0.490539105967	0.153379073796	0.279285544512	2.04857881547
-#Proportion of variance explained	0.0530155678505	0.00518308545019	0.017185139525	0.924616207174
+#Std. deviation	0.4905	0.1534	0.2793	2.049
+#Proportion of variance explained	0.05302	0.005183	0.01719	0.9246
 #Loadings	1	2	3	4
-c1	0.361589677381	-0.656539883286	0.580997279828	0.317254547169
-c2	-0.0822688898922	-0.729712371326	-0.596418087938	-0.324094352418
-c3	0.856572105291	0.175767403429	-0.072524075487	-0.47971898733
-c4	0.358843926248	0.074706470135	-0.549060910727	0.751120560381
+c1	0.3616	-0.6565	0.581	0.3173
+c2	-0.08227	-0.7297	-0.5964	-0.3241
+c3	0.8566	0.1758	-0.07252	-0.4797
+c4	0.3588	0.07471	-0.5491	0.7511
 #Scores	1	2	3	4
-1	-2.6842071251	-0.326607314764	0.021511837002	0.00100615724154
-2	-2.71539061563	0.169556847556	0.203521425005	0.0996024240168
-3	-2.88981953962	0.137345609605	-0.024709240999	0.0193045428325
-4	-2.74643719731	0.311124315752	-0.0376719752853	-0.0759552741085
-5	-2.72859298183	-0.333924563568	-0.0962296997746	-0.0631287327171
-6	-2.27989736101	-0.747782713225	-0.174325619016	-0.0271468036979
-7	-2.82089068218	0.0821045110247	-0.264251085191	-0.0500996250629
-8	-2.62648199332	-0.17040534896	0.0158015102643	-0.0462817609665
-9	-2.88795856534	0.570798026332	-0.0273354061145	-0.0266154143257
-10	-2.67384468672	0.106691703753	0.191533299736	-0.0558909659961
-11	-2.50652678934	-0.651935013673	0.0692749958139	-0.0166082478245
-12	-2.61314271827	-0.0215206319603	-0.10765035325	-0.157704569133
-13	-2.787433976	0.227740188871	0.200327788095	-0.00723508673812
-14	-3.22520044627	0.503279909485	-0.0684136291723	-0.0219466641234
-15	-2.64354321694	-1.18619489941	0.14450570401	0.156980961517
-16	-2.3838693238	-1.34475434456	-0.28373066394	0.00192618170967
-17	-2.62252620313	-0.818089674597	-0.145315988822	0.164740791234
-18	-2.64832273248	-0.319136667751	-0.0333942540707	0.0761182132796
-19	-2.19907796143	-0.879244088092	0.114521464798	0.0253269396564
-20	-2.58734618892	-0.520473638806	-0.219572088001	-0.0690819911788
-21	-2.31053170131	-0.397867821589	0.233695607098	-0.0153237395651
-22	-2.5432349073	-0.44003175466	-0.21483637028	0.0384395001011
-23	-3.2158576949	-0.141615571626	-0.299618981511	0.00185704334745
-24	-2.30312853766	-0.10552267843	-0.0456800412745	0.14724549964
-25	-2.35617108668	0.0312095890683	-0.129407575896	-0.301620265332
-26	-2.50791722684	0.139056339913	0.247116337891	0.0353840812677
-27	-2.46905599755	-0.13788731459	-0.10126307943	0.0559704523767
-28	-2.56239094684	-0.37468456275	0.072359157436	-0.0152402867746
-29	-2.63982126838	-0.31929006596	0.139253373779	0.0651410472002
-30	-2.63284790803	0.190075830634	-0.046466463645	-0.124611153366
-31	-2.5884620513	0.197393079438	0.0712750731315	-0.0604762634078
-32	-2.41007733712	-0.418080008248	0.13838824005	0.230844169977
-33	-2.6476366734	-0.81998263256	-0.230585604254	-0.284808954263
-34	-2.59715947708	-1.10002192801	-0.163581912624	-0.0989580705836
-35	-2.67384468672	0.106691703753	0.191533299736	-0.0558909659961
-36	-2.86699984693	-0.0771930957236	0.156842350498	0.162452805716
-37	-2.62522846468	-0.606800008422	0.261163156482	0.175879874842
-38	-2.67384468672	0.106691703753	0.191533299736	-0.0558909659961
-39	-2.98184266485	0.480250048856	-0.0797248073596	-0.0110529508345
-40	-2.59032302559	-0.236059337289	0.0739012382471	-0.0145563062497
-41	-2.77013891075	-0.271059419765	-0.0842415745048	0.0923646572958
-42	-2.85221108157	0.93286536747	0.340961491107	0.322650606613
-43	-2.99829644283	0.334307574591	-0.199008424947	-0.0758718213181
-44	-2.40551410128	-0.195917257696	-0.270717070369	0.173785129211
-45	-2.20883295418	-0.442696030421	-0.303487809268	-0.185857530073
-46	-2.71566519075	0.242681482898	0.0905156059501	0.142989025338
-47	-2.53757337101	-0.510367545477	-0.171918404477	-0.19216594595
-48	-2.84032129683	0.220576338276	-0.0900613765304	-0.0603928106173
-49	-2.54268575708	-0.586281025344	0.0111752678312	-0.0483337025413
-50	-2.70391231486	-0.115010852171	0.0826957266068	0.0340995730083
-51	1.28479458785	-0.685439186133	0.406129553077	0.0192901168553
-52	0.932410752983	-0.319198089834	0.0171299092051	-6.75794170965e-06
-53	1.46406132278	-0.504189832972	0.338260727718	-0.000857644047617
-54	0.180967206348	0.825603943576	0.177082856393	0.0957844483716
-55	1.08713448721	-0.0753903892888	0.306544464814	0.113384539009
-56	0.640436749523	0.41732348297	-0.0411887693536	-0.242671312069
-57	1.09522370994	-0.283891210938	-0.170022533741	-0.0849733893283
-58	-0.751467140648	1.00110751297	-0.0156721942382	-0.0165105921546
-59	1.04329778071	-0.228956908777	0.414814566149	-0.0375235535918
-60	-0.0101900707279	0.720574866702	-0.283437246254	-0.00594570197509
-61	-0.511086195895	1.26249195386	0.266489953822	0.0489088060634
-62	0.511098060683	0.102284105046	-0.132327890475	0.0501005351566
-63	0.262335756153	0.547893298025	0.691941578319	0.0614849890834
-64	0.984044545169	0.124360420222	0.0621574276135	-0.169010669871
-65	-0.174864001966	0.25181557108	-0.093658638192	0.12494088657
-66	0.927572942033	-0.468236205043	0.313229400569	0.100438884146
-67	0.659592789056	0.35197629106	-0.328384297069	-0.188991525193
-68	0.23454058626	0.331921829362	0.270280670836	-0.211984995292
-69	0.942361707399	0.54182225815	0.497348541178	0.260636685043
-70	0.0432464003287	0.581489446612	0.232963556483	-0.0395611807383
-71	1.11624072375	0.0842140138784	-0.459844226573	-0.0772135596107
-72	0.35678656783	0.0668238279414	0.227472180321	0.124090000464
-73	1.29646885029	0.327561519795	0.347513212584	0.00324623910204
-74	0.920502648909	0.182390363328	0.231611418553	-0.286825346705
-75	0.714008213643	-0.150379153148	0.320372332829	0.0429412331735
-76	0.899640863284	-0.329610979582	0.31477148138	0.101122864671
-77	1.33104141885	-0.244669520602	0.521244924738	0.0375050496558
-78	1.55739627207	-0.267392584813	0.164638490852	0.0703530951036
-79	0.812455548998	0.162331574879	-0.0363435763445	-0.0296802710837
-80	-0.307334755663	0.365086612766	0.315337196939	0.0765303776306
-81	-0.0703428889493	0.702537931731	0.241758044842	0.00909469851961
-82	-0.191884492103	0.677490544374	0.303916543464	-0.0180454587855
-83	0.134994950451	0.311709642703	0.174973303788	0.0341829142499
-84	1.37873698278	0.421205138215	-0.0154804951217	-0.17758073696
-85	0.58727485358	0.483284267717	-0.444583753035	-0.252442434627
-86	0.807205496677	-0.195053963771	-0.389458711386	-0.116615391255
-87	1.22042896624	-0.408035337001	0.23656608685	0.0316352439847
-88	0.812867790369	0.370678998319	0.612871050061	0.157700491174
-89	0.24519516169	0.266728035662	-0.189562484729	-0.147328042337
-90	0.164513428369	0.679661469311	0.0577992388057	0.030965577888
-91	0.463030988871	0.669526546536	0.0240538908898	-0.268443508324
-92	0.890160445651	0.0338124427468	0.00976802636843	-0.15344820638
-93	0.22887904997	0.402257620179	0.227362705033	0.0186204507587
-94	-0.707081283921	1.00842476178	0.102069342538	0.047624297804
-95	0.355533039187	0.503218487403	-0.0178894658965	-0.0980716353447
-96	0.331126947333	0.211180140663	-0.0838090732225	-0.238686542391
-97	0.375238228947	0.291622024809	-0.0790733555014	-0.131165051111
-98	0.641690278167	-0.0190711764913	0.204172876864	-0.0205096762602
-99	-0.908463333123	0.751568725169	0.00773658450698	0.233558634274
-100	0.297807907407	0.347016521599	-0.0121791391589	-0.0507837171367
-101	2.53172698044	0.0118422366403	-0.758458651528	-0.0325995685145
-102	1.41407222517	0.574925055912	-0.296398224305	-0.0156954782795
-103	2.61648460828	-0.341935286987	0.112141370554	0.0659560495264
-104	1.97081494591	0.181125694705	-0.10653914865	-0.236858624882
-105	2.34975798395	0.0418825496548	-0.284110680866	-0.00131272400361
-106	3.39687992068	-0.54716804623	0.351873157627	-0.11121996802
-107	0.519383245085	1.19135168905	-0.546685531125	-0.0987984199258
-108	2.93200509699	-0.352377006181	0.423691278337	-0.255407368844
-109	2.31967279387	0.245548170607	0.349922183359	-0.0762628625132
-110	2.91813423364	-0.780380629372	-0.421738933614	0.107729319479
-111	1.66193494702	-0.242038401038	-0.242815263468	0.119447584568
-112	1.80234045266	0.216154606627	0.037695328494	0.0787134525557
-113	2.16537886295	-0.215280283373	-0.0331481831991	0.162667280308
-114	1.34459421751	0.776415425177	-0.282868018224	0.140481892258
-115	1.58526729931	0.539307053847	-0.630570488462	0.327455366669
-116	1.90474357821	-0.118818990983	-0.480138079766	0.217114500499
-117	1.94924878186	-0.0407302594278	-0.0427290939294	-0.157845251957
-118	3.48876537966	-1.17154454426	-0.129320083362	-0.311629837933
-119	3.79468686121	-0.253265570973	0.516970715994	0.056451643541
-120	1.29832982457	0.761013936522	0.344887047469	-0.0426737180561
-121	2.42816725902	-0.376781971254	-0.218649070047	0.183854179151
-122	1.19809737227	0.605578961765	-0.51264076504	0.0595000305491
-123	3.4992654843	-0.456773466964	0.576910186722	-0.137759597591
-124	1.38766825018	0.20403098657	0.0635113217786	0.163763536733
-125	2.27585364931	-0.333386525757	-0.284678152661	-0.0622302776004
-126	2.6141938307	-0.558366950279	0.208423346619	-0.240445433088
-127	1.25762518293	0.179136997423	-0.0469778074492	0.147600545507
-128	1.29066964774	0.116425251829	-0.231613560568	0.00308432157359
-129	2.12285398051	0.210854884549	-0.153515885091	0.052612433191
                    
                  
                  
                          
                            
                            1
                            
                          
                          
                            
                            0
                            
                          
                          
                            
    
                          
                        
                    11 Mar '10
                    
                        details:   http://www.bx.psu.edu/hg/galaxy/rev/07a608852925
changeset: 3508:07a608852925
user:      fubar: ross Lazarus at gmail period com
date:      Wed Mar 10 16:11:59 2010 -0500
description:
Remove redundant composite_extensions - not needed with new composite output test framework
diffstat:
 test/base/twilltestcase.py |  2 --
 1 files changed, 0 insertions(+), 2 deletions(-)
diffs (12 lines):
diff -r 70930ea26347 -r 07a608852925 test/base/twilltestcase.py
--- a/test/base/twilltestcase.py	Wed Mar 10 16:09:37 2010 -0500
+++ b/test/base/twilltestcase.py	Wed Mar 10 16:11:59 2010 -0500
@@ -23,8 +23,6 @@
 log = logging.getLogger( __name__ )
 
 class TwillTestCase( unittest.TestCase ):
-    composite_extensions = ['html','lped','pbed','fped','pphe','eigenstratgeno','eset','affybatch','malist','test-data' ]
-
 
     def setUp( self ):
         # Security helper
                    
                  
                  
                          
                            
                            1
                            
                          
                          
                            
                            0
                            
                          
                          
                            
    
                          
                        
                    
                    
                        details:   http://www.bx.psu.edu/hg/galaxy/rev/70930ea26347
changeset: 3507:70930ea26347
user:      fubar: ross Lazarus at gmail period com
date:      Wed Mar 10 16:09:37 2010 -0500
description:
Cleanup genetics.py
Allow download of composite objects from libraries so content is in the archive
Allow download of composite objects from histories ditto (current default is zip)
Small fix to missing param in download from library
Small fix to twilltestcase when testing a tool with hidden form fields
diffstat:
 lib/galaxy/datatypes/genetics.py             |  468 ++++++++++++++------------
 lib/galaxy/tools/parameters/grouping.py      |   10 +-
 lib/galaxy/web/controllers/dataset.py        |  142 +++++++-
 lib/galaxy/web/controllers/library_common.py |    1 +
 test/base/twilltestcase.py                   |   57 +--
 5 files changed, 414 insertions(+), 264 deletions(-)
diffs (1190 lines):
diff -r 9701e5ee128d -r 70930ea26347 lib/galaxy/datatypes/genetics.py
--- a/lib/galaxy/datatypes/genetics.py	Wed Mar 10 14:25:34 2010 -0500
+++ b/lib/galaxy/datatypes/genetics.py	Wed Mar 10 16:09:37 2010 -0500
@@ -1,5 +1,6 @@
 """
 rgenetics datatypes 
+Use at your peril
 Ross Lazarus
 for the rgenetics and galaxy projects 
 
@@ -10,6 +11,7 @@
 ross lazarus for rgenetics
 august 20 2007
 """
+
 import logging, os, sys, time, tempfile, shutil, string, glob
 import data
 from galaxy import util
@@ -24,186 +26,172 @@
 from galaxy.datatypes.interval import Interval
 from galaxy.util.hash_util import *
 
-log = logging.getLogger(__name__)
+gal_Log = logging.getLogger(__name__)
+verbose = False
 
-class GenomeGraphs(Interval):
+class GenomeGraphs( Tabular ):
+    """
+    Tab delimited data containing a marker id and any number of numeric values
+    """
 
-    """gg version viewable at ucsc of Gff format"""
-    file_ext = "gg"
-    column_names = [ 'Seqname', 'Source', 'Feature', 'Start', 'End', 'Score', 'Strand', 'Frame', 'Group' ]
+    MetadataElement( name="markerCol", default=1, desc="Marker ID column", param=metadata.ColumnParameter )
+    MetadataElement( name="columns", default=3, desc="Number of columns", readonly=True )
+    MetadataElement( name="column_types", default=[], desc="Column types", readonly=True, visible=False )
+    file_ext = 'gg'
 
-    """Add metadata elements"""
-    MetadataElement( name="columns", default=9, desc="Number of columns", readonly=True, visible=False )
-    MetadataElement( name="column_types", default=['str','str','str','int','int','int','str','str','str'], param=metadata.ColumnTypesParameter, desc="Column types", readonly=True, visible=False )
-    MetadataElement( name="chromCol", default=1, desc="Chrom column", param=metadata.ColumnParameter )
-    MetadataElement( name="startCol", default=4, desc="Start column", param=metadata.ColumnParameter )
-    MetadataElement( name="endCol", default=5, desc="End column", param=metadata.ColumnParameter )
-    MetadataElement( name="strandCol", desc="Strand column (click box & select)", param=metadata.ColumnParameter, optional=True, no_value=0 )
-    ###do we need to repeat these? they are the same as should be inherited from interval type
+    def __init__(self, **kwd):
+        """
+        Initialize gg datatype, by adding UCSC display apps
+        """
+        Tabular.__init__(self, **kwd)
+        self.add_display_app ( 'ucsc', 'Genome Graph', 'as_ucsc_display_file', 'ucsc_links' )    
+
     
-    def __init__(self, **kwd):
-        """Initialize datatype, by adding GBrowse display app"""
-        Interval.__init__(self, **kwd)
-        self.add_display_app ( 'ucsc', 'display at UCSC', 'as_ucsc_display_file', 'ucsc_links' )
+    def set_meta(self,dataset,**kwd):
+        Tabular.set_meta( self, dataset, **kwd)
+        dataset.metadata.markerCol = 1
+        header = file(dataset.file_name,'r').readlines()[0].strip().split('\t')
+        dataset.metadata.columns = len(header)
+        t = ['numeric' for x in header]
+	t[0] = 'string'
+        dataset.metadata.column_types = t
+	return True
+
     def as_ucsc_display_file( self, dataset, **kwd ):
-        return open( dataset.file_name )
-    def set_meta( self, dataset, overwrite = True, **kwd ):
-        i = 0
-        for i, line in enumerate( file ( dataset.file_name ) ):
-            line = line.rstrip('\r\n')
-            if line and not line.startswith( '#' ):
-                elems = line.split( '\t' )
-                if len(elems) == 9:
-                    try:
-                        int( elems[3] )
-                        int( elems[4] )
-                        break
-                    except:
-                        pass
-        Interval.set_meta( self, dataset, overwrite = overwrite, skip = i )
+        """
+        Returns file
+        """
+        return file(dataset.file_name,'r')
+
+    def ucsc_links( self, dataset, type, app, base_url ):
+        """ 
+        from the ever-helpful angie hinrichs angie(a)soe.ucsc.edu
+        a genome graphs call looks like this 
+        http://genome.ucsc.edu/cgi-bin/hgGenome?clade=mammal&org=Human&db=hg18&hgGe…
+        &hgGenome_dataSetDescription=test&hgGenome_formatType=best%20guess&hgGenome_markerType=best%20guess
+        &hgGenome_columnLabels=best%20guess&hgGenome_maxVal=&hgGenome_labelVals=
+        &hgGenome_maxGapToFill=25000000&hgGenome_uploadFile=http://galaxy.esphealth.org/datasets/333/display/index
+        &hgGenome_doSubmitUpload=submit
+                Galaxy gives this for an interval file
+        http://genome.ucsc.edu/cgi-bin/hgTracks?db=hg18&position=chr1:1-1000&hgt.cu…
+        http%3A%2F%2Fgalaxy.esphealth.org%2Fdisplay_as%3Fid%3D339%26display_app%3Ducsc
+        """
+        ret_val = []
+        ggtail = 'hgGenome_doSubmitUpload=submit'
+        if not dataset.dbkey:
+              dataset.dbkey = 'hg18' # punt!
+        if dataset.has_data:
+              for site_name, site_url in util.get_ucsc_by_build(dataset.dbkey):
+                    if site_name in app.config.ucsc_display_sites:
+                        site_url = site_url.replace('/hgTracks?','/hgGenome?') # for genome graphs
+                        internal_url = "%s" % url_for( controller='dataset',
+                                dataset_id=dataset.id, action='display_at', filename='ucsc_' + site_name )
+                        if base_url.startswith( 'https://' ):
+                            base_url = base_url.replace( 'https', 'http', 1 )
+                        display_url = "%s%s/display_as?id=%i&display_app=%s&authz_method=display_at" % (base_url, url_for( controller='root' ), dataset.id, type) 
+                        display_url = urllib.quote_plus( display_url )
+                        # was display_url = urllib.quote_plus( "%s/display_as?id=%i&display_app=%s" % (base_url, dataset.id, type) )
+                        #redirect_url = urllib.quote_plus( "%sdb=%s&position=%s:%s-%s&hgt.customText=%%s" % (site_url, dataset.dbkey, chrom, start, stop) )
+                        sl = ["%sdb=%s" % (site_url,dataset.dbkey ),]
+                        #sl.append("&hgt.customText=%s")
+                        sl.append("&hgGenome_dataSetName=%s&hgGenome_dataSetDescription=%s" % (dataset.name, 'GalaxyGG_data'))
+                        sl.append("&hgGenome_formatType=best guess&hgGenome_markerType=best guess")
+                        sl.append("&hgGenome_columnLabels=first row&hgGenome_maxVal=&hgGenome_labelVals=")
+                        sl.append("&hgGenome_doSubmitUpload=submit")
+                        sl.append("&hgGenome_maxGapToFill=25000000&hgGenome_uploadFile=%s" % display_url)
+                        s = ''.join(sl)
+                        s = urllib.quote_plus(s)
+                        redirect_url = s
+                        log.debug('## rg gg ucsc rdurl=%s; s = %s' % (redirect_url,s))
+                        link = '%s?redirect_url=%s&display_url=%s' % ( internal_url, redirect_url, display_url )
+                        ret_val.append( (site_name, link) )
+        return ret_val
+
     def make_html_table( self, dataset, skipchars=[] ):
-        """Create HTML table, used for displaying peek"""
+        """
+        Create HTML table, used for displaying peek
+        """
+        npeek = 5
         out = ['<table cellspacing="0" cellpadding="3">']
-        comments = []
+        f = open(dataset.file_name,'r')
+        d = [f.next() for x in range(npeek)]
+        hasheader = 0
+        try:
+            test = ['%f' % x for x in d[0][1:]] # first is name - see if starts all numerics
+        except:
+            hasheader = 1
         try:
             # Generate column header
             out.append( '<tr>' )
-            for i, name in enumerate( self.column_names ):
-                out.append( '<th>%s.%s</th>' % ( str( i+1 ), name ) )
-            out.append( self.make_html_peek_rows( dataset, skipchars=skipchars ) )
+	    if hasheader:
+               for i, name in enumerate(d[0].split() ):
+                  out.append( '<th>%s.%s</th>' % ( str( i+1 ), name ) )
+               d.pop(0)
+               out.append('</tr>')
+            for row in d:
+               out.append('<tr>')
+               out.append(''.join(['<td>%s</td>' % x for x in row.split()]))
+               out.append('</tr>')
             out.append( '</table>' )
             out = "".join( out )
         except Exception, exc:
             out = "Can't create peek %s" % exc
         return out
-    def get_estimated_display_viewport( self, dataset ):
+        
+    def validate( self, dataset ):
         """
-        Return a chrom, start, stop tuple for viewing a file.  There are slight differences between gff 2 and gff 3
-        formats.  This function should correctly handle both...
+        Validate a gg file - all numeric after header row
         """
-        if True or (dataset.has_data() and dataset.state == dataset.states.OK):
-            try:
-                seqid = ''
-                start = 2147483647  # Maximum value of a signed 32 bit integer ( 2**31 - 1 )
-                stop = 0
-                for i, line in enumerate( file( dataset.file_name ) ):
-                    if i == 0: # track stuff there
-                        continue
-                    line = line.rstrip( '\r\n' )
-                    if not line:
-                        continue
-                    if not line.startswith( '#' ):
-                        elems = line.split( '\t' )
-                        if not seqid:
-                            # We can only set the viewport for a single chromosome
-                            seqid = elems[0]
-                        if seqid == elems[0]:
-                            # Make sure we have not spanned chromosomes
-                            start = min( start, int( elems[3] ) )
-                            stop = max( stop, int( elems[4] ) )
-                        else:
-                            # We've spanned a chromosome
-                            break
-                    if i > 10: # span 10 features
-                        break
-            except:
-                 seqid, start, stop = ( '', '', '' )
-            return ( seqid, str( start ), str( stop ) )
-        else:
-            return ( '', '', '' )
-    def gbrowse_links( self, dataset, type, app, base_url ):
-        ret_val = []
-        if dataset.has_data:
-            viewport_tuple = self.get_estimated_display_viewport( dataset )
-            seqid = viewport_tuple[0]
-            start = viewport_tuple[1]
-            stop = viewport_tuple[2]
-            if seqid and start and stop:
-                for site_name, site_url in util.get_gbrowse_sites_by_build( dataset.dbkey ):
-                    if site_name in app.config.gbrowse_display_sites:
-                        link = "%s?start=%s&stop=%s&ref=%s&dbkey=%s" % ( site_url, start, stop, seqid, dataset.dbkey )
-                        ret_val.append( ( site_name, link ) )
-        return ret_val
-    def ucsc_links( self, dataset, type, app, base_url ):
-        ret_val = []
-        if dataset.has_data:
-            viewport_tuple = self.get_estimated_display_viewport(dataset)
-            if viewport_tuple:
-                chrom = viewport_tuple[0]
-                start = viewport_tuple[1]
-                stop = viewport_tuple[2]
-                if start == '' or int(start) < 1:
-                    start='1'
-                if stop == '' or int(stop) <= start:
-                    stop = '%d' % (int(start) + 10000)
-                for site_name, site_url in util.get_ucsc_by_build(dataset.dbkey):
-                    if site_name in app.config.ucsc_display_sites:
-                        # HACK: UCSC doesn't support https, so force http even
-                        # if our URL scheme is https.  Making this work
-                        # requires additional hackery in your upstream proxy.
-                        # If UCSC ever supports https, remove this hack.
-                        internal_url = "%s" % url_for( controller='dataset',
-                                dataset_id=dataset.id, action='display_at', filename='ucsc_' + site_name )
-                        if base_url.startswith( 'https://' ):
-                            base_url = base_url.replace( 'https', 'http', 1 )
-                        display_url = urllib.quote_plus( "%s%s/display_as?id=%i&display_app=%s&authz_method=display_at" % (base_url, url_for( controller='root' ), dataset.id, type) )
-                        redirect_url = urllib.quote_plus( "%sdb=%s&position=%s:%s-%s&hgt.customText=%%s" % (site_url, dataset.dbkey, chrom, start, stop) )
-                        link = '%s?redirect_url=%s&display_url=%s' % ( internal_url, redirect_url, display_url )
-                        ret_val.append( (site_name, link) )
-            else:
-                log.debug('@@@ gg ucsc_links - no viewport_tuple')
-        return ret_val
+        errors = list()
+        infile = open(dataset.file_name, "r")
+        header= infile.next() # header
+        for i,row in enumerate(infile):
+           ll = row.strip().split('\t')[1:] # first is alpha feature identifier
+           badvals = []
+           for j,x in enumerate(ll):
+              try:
+                x = float(x)
+              except:
+                badval.append('col%d:%s' % (j+1,x))
+        if len(badvals) > 0:
+            errors.append('row %d, %s' % (' '.join(badvals)))
+            return errors 
+        
     def sniff( self, filename ):
         """
-        Determines whether the file is in gff format
-        
-        GFF lines have nine required fields that must be tab-separated.
+        Determines whether the file is in gg format
         """
         f = open(filename,'r')
-        headers = f.readline().split
-        if headers[0].lower() == 'track':
-            headers = f.readline.split()
+        headers = f.readline().split()
+        rows = [f.readline().split()[1:] for x in range(3)] # small sample
         #headers = get_headers( filename, '\t' )
-        try:
-            if len(headers) < 2:
-                return False
-            for hdr in headers:
-                if hdr and hdr[0].startswith( '##gff-version' ) and hdr[0].find( '2' ) < 0:
-                    return False
-                if hdr and hdr[0] and not hdr[0].startswith( '#' ):
-                    if len(hdr) != 9:
-                        return False
-                    try:
-                        int( hdr[3] )
-                        int( hdr[4] )
-                    except:
-                        return False
-                    if hdr[5] != '.':
-                        try:
-                            score = int(hdr[5])
-                        except:
-                            return False
-                        if (score < 0 or score > 1000):
-                            return False
-                    if hdr[6] not in data.valid_strand:
-                        return False
-            return True
-        except:
-            return False
+        for row in rows:
+            try:
+                nums = [float(x) for x in row] # first col has been removed
+            except:
+                return false
+        return true
+
 
 class rgTabList(Tabular):
-    """
+    """ 
     for sampleid and for featureid lists of exclusions or inclusions in the clean tool
     featureid subsets on statistical criteria -> specialized display such as gg
     """    
     file_ext = "rgTList"
 
+
     def __init__(self, **kwd):
-        """Initialize featurelistt datatype"""
+        """
+        Initialize featurelistt datatype
+        """
         Tabular.__init__( self, **kwd )
         self.column_names = []
+
     def make_html_table( self, dataset, skipchars=[] ):
-        """Create HTML table, used for displaying peek"""
+        """
+        Create HTML table, used for displaying peek
+        """
         out = ['<table cellspacing="0" cellpadding="3">']
         comments = []
         try:
@@ -222,8 +210,9 @@
             out = "Can't create peek %s" % exc
         return out
 
+
 class rgSampleList(rgTabList):
-    """
+    """ 
     for sampleid exclusions or inclusions in the clean tool
     output from QC eg excess het, gender error, ibd pair member,eigen outlier,excess mendel errors,...
     since they can be uploaded, should be flexible
@@ -240,9 +229,8 @@
         self.column_names[0] = 'FID'
         self.column_names[1] = 'IID'
         # this is what Plink wants as at 2009
+   
     def sniff(self,filename):
-        """
-        """
         infile = open(dataset.file_name, "r")
         header= infile.next() # header
         if header[0] == 'FID' and header[1] == 'IID':
@@ -264,12 +252,17 @@
         rgTabList.__init__( self, **kwd )
         for i,s in enumerate(['#FeatureId', 'Chr', 'Genpos', 'Mappos']):
             self.column_names[i] = s
+ 
 
 class Rgenetics(Html):      
     """
-    class to use for rgenetics
+    base class to use for rgenetics datatypes
+    derived from html - composite datatype elements
+    stored in extra files path
     """
-    MetadataElement( name="base_name", desc="base name for all transformed versions of this genetic dataset", default="rgenetics", readonly=True, set_in_upload=True)
+   
+    MetadataElement( name="base_name", desc="base name for all transformed versions of this genetic dataset", default="rgenetics", 
+    readonly=True, set_in_upload=True)
     
     composite_type = 'auto_primary_file'
     allow_datatype_change = False
@@ -279,16 +272,22 @@
         rval = ['<html><head><title>Rgenetics Galaxy Composite Dataset </title></head><p/>']
         rval.append('<div>This composite dataset is composed of the following files:<p/><ul>')
         for composite_name, composite_file in self.get_composite_files( dataset = dataset ).iteritems():
+            fn = composite_name
             opt_text = ''
             if composite_file.optional:
                 opt_text = ' (optional)'
-            rval.append( '<li><a href="%s" type="application/binary">%s</a>%s' % ( composite_name, composite_name, opt_text ) )
+            if composite_file.get('description'):
+	        rval.append( '<li><a href="%s" type="application/binary">%s (%s)</a>%s</li>' % ( fn, fn, composite_file.get('description'), opt_text ) )
+            else:
+		rval.append( '<li><a href="%s" type="application/binary">%s</a>%s</li>' % ( fn, fn, opt_text ) )
         rval.append( '</ul></div></html>' )
         return "\n".join( rval )
+
     def regenerate_primary_file(self,dataset):
         """
         cannot do this until we are setting metadata 
         """
+        guessmt = {'.log':'text/plain','.ped':'text/plain', '.map':'text/plain','.out':'text/plain','.in':'text/plain'}
         def fix(oldpath,newbase):
            old,e = os.path.splitext(oldpath)
            head,rest = os.path.split(old)
@@ -301,44 +300,45 @@
         efp = dataset.extra_files_path
         flist = os.listdir(efp)
         proper_base = bn
-        rval = ['<html><head><title>Files for Composite Dataset %s</title></head><p/>Comprises the following files:<p/><ul>' % (bn)]
+        rval = ['<html><head><title>Files for Composite Dataset %s</title></head><p/>Composite %s contains the following files:<p/><ul>' % (dataset.name,dataset.name)]
         for i,fname in enumerate(flist):
             newpath = fix(os.path.join(efp,fname),proper_base)
             sfname = os.path.split(newpath)[-1] 
-            rval.append( '<li><a href="%s">%s</a>' % ( sfname, sfname ) )
+            f,e = os.path.splitext(fname)
+            mt = guessmt.get(e,'application/binary')
+            rval.append( '<li><a href="%s" mimetype="%s">%s</a></li>' % ( sfname, mt, sfname) )
         rval.append( '</ul></html>' )
         f = file(dataset.file_name,'w')
         f.write("\n".join( rval ))
         f.write('\n')
         f.close()
+
     def set_meta( self, dataset, **kwd ):
+
         """
         for lped/pbed eg
+
         """
+        Html.set_meta( self, dataset, **kwd )
         if kwd.get('overwrite') == False:
-            #log.debug('@@@ rgenetics set_meta called with overwrite = False')
+            if verbose:
+                gal_Log.debug('@@@ rgenetics set_meta called with overwrite = False')
             return True
         try:
             efp = dataset.extra_files_path
         except: 
-            #log.debug('@@@rgenetics set_meta failed %s - dataset %s has no efp ?' % (sys.exc_info()[0], dataset.name))
+            if verbose:                
+               gal_Log.debug('@@@rgenetics set_meta failed %s - dataset %s has no efp ?' % (sys.exc_info()[0], dataset.name))
             return False
         try:
             flist = os.listdir(efp)
         except:
-            #log.debug('@@@rgenetics set_meta failed %s - dataset %s has no efp ?' % (sys.exc_info()[0],dataset.name))
+            if verbose: gal_Log.debug('@@@rgenetics set_meta failed %s - dataset %s has no efp ?' % (sys.exc_info()[0],dataset.name))
             return False
         if len(flist) == 0:
-            #log.debug('@@@rgenetics set_meta failed - %s efp %s is empty?' % (dataset.name,efp))
+            if verbose:
+                gal_Log.debug('@@@rgenetics set_meta failed - %s efp %s is empty?' % (dataset.name,efp))
             return False
-        bn = None
-        for f in flist:
-           n,e = os.path.splitext(f)[0]                  
-           if (not bn) and e in ('.ped','.map','.bim','.fam'):
-                bn = n
-                dataset.metadata.base_name = bn
-        if not bn:
-            bn = '?'
         self.regenerate_primary_file(dataset)
         if not dataset.info:           
                 dataset.info = 'Galaxy genotype datatype object'
@@ -346,22 +346,23 @@
                dataset.blurb = 'Composite file - Rgenetics Galaxy toolkit'
         return True
 
+
 class SNPMatrix(Rgenetics):
     """
-    fake class to distinguish different species of Rgenetics data collections
+    BioC SNPMatrix Rgenetics data collections
     """
     file_ext="snpmatrix"
 
-    def set_peek( self, dataset, is_multi_byte=False ):
+    def set_peek( self, dataset, **kwd ):
         if not dataset.dataset.purged:
             dataset.peek  = "Binary RGenetics file"
             dataset.blurb = data.nice_size( dataset.get_size() )
         else:
             dataset.peek = 'file does not exist'
             dataset.blurb = 'file purged from disk'
+            
     def sniff(self,filename):
-        """
-        need to check the file header hex code
+        """ need to check the file header hex code
         """
         infile = open(dataset.file_name, "b")
         head = infile.read(16)
@@ -371,9 +372,10 @@
         else:
             return True
 
+
 class Lped(Rgenetics):
     """
-    fake class to distinguish different species of Rgenetics data collections
+    linkage pedigree (ped,map) Rgenetics data collections
     """
     file_ext="lped"
     
@@ -382,25 +384,24 @@
         self.add_composite_file( '%s.ped', description = 'Pedigree File', substitute_name_with_metadata = 'base_name', is_binary = True )
         self.add_composite_file( '%s.map', description = 'Map File', substitute_name_with_metadata = 'base_name', is_binary = True )
 
+
 class Pphe(Rgenetics):
     """
-    fake class to distinguish different species of Rgenetics data collections
+    Plink phenotype file - header must have FID\tIID... Rgenetics data collections
     """
     file_ext="pphe"
 
     def __init__( self, **kwd ):
         Rgenetics.__init__(self, **kwd)
-        self.add_composite_file( '%s.pphe', description = 'Plink Phenotype File', substitute_name_with_metadata = 'base_name' )
+        self.add_composite_file( '%s.pphe', description = 'Plink Phenotype File', substitute_name_with_metadata = 'base_name', is_binary = True )
 
-class Lmap(Rgenetics):
-    """
-    fake class to distinguish different species of Rgenetics data collections
-    """
-    file_ext="lmap"
+
+
 
 class Fphe(Rgenetics):
     """
-    fake class to distinguish different species of Rgenetics data collections
+    fbat pedigree file - mad format with ! as first char on header row
+    Rgenetics data collections
     """
     file_ext="fphe"
 
@@ -410,7 +411,7 @@
 
 class Phe(Rgenetics):
     """
-    fake class to distinguish different species of Rgenetics data collections
+    Phenotype file
     """
     file_ext="phe"
 
@@ -418,9 +419,12 @@
         Rgenetics.__init__(self, **kwd)
         self.add_composite_file( '%s.phe', description = 'Phenotype File', substitute_name_with_metadata = 'base_name' )
 
+
+
 class Fped(Rgenetics):
     """
-    fake class to distinguish different species of Rgenetics data collections
+    FBAT pedigree format - single file, map is header row of rs numbers. Strange.
+    Rgenetics data collections
     """
     file_ext="fped"
 
@@ -428,9 +432,10 @@
         Rgenetics.__init__(self, **kwd)
         self.add_composite_file( '%s.fped', description = 'FBAT format pedfile', substitute_name_with_metadata = 'base_name' )
 
+
 class Pbed(Rgenetics):
     """
-    fake class to distinguish different species of Rgenetics data collections
+    Plink Binary compressed 2bit/geno Rgenetics data collections
     """
     file_ext="pbed"
     
@@ -442,7 +447,9 @@
 
 class Eigenstratgeno(Rgenetics):
     """
-    fake class to distinguish different species of Rgenetics data collections
+    Eigenstrat format - may be able to get rid of this
+    if we move to shellfish
+    Rgenetics data collections
     """
     file_ext="eigenstratgeno"
     
@@ -451,10 +458,13 @@
         self.add_composite_file( '%s.eigenstratgeno', substitute_name_with_metadata = 'base_name', is_binary = True )
         self.add_composite_file( '%s.ind', substitute_name_with_metadata = 'base_name', is_binary = True )
         self.add_composite_file( '%s.map', substitute_name_with_metadata = 'base_name', is_binary = True )
+        
+
 
 class Eigenstratpca(Rgenetics):
     """
-    fake class to distinguish different species of Rgenetics data collections
+    Eigenstrat PCA file for case control adjustment
+    Rgenetics data collections
     """
     file_ext="eigenstratpca"
 
@@ -462,18 +472,21 @@
         Rgenetics.__init__(self, **kwd)
         self.add_composite_file( '%s.eigenstratpca', description = 'Eigenstrat PCA file', substitute_name_with_metadata = 'base_name' )
 
+
 class Snptest(Rgenetics):
     """
-    fake class to distinguish different species of Rgenetics data collections
+    BioC snptest Rgenetics data collections
     """
     file_ext="snptest"
 
+
 class Pheno(Tabular):
     """
     base class for pheno files
     """
     file_ext = 'pheno'
 
+
 class RexpBase( Html ):
     """
     base class for BioC data structures in Galaxy 
@@ -492,16 +505,19 @@
     composite_type = 'auto_primary_file'
     allow_datatype_change = False
     
+    
     def __init__( self, **kwd ):
         Html.__init__(self,**kwd)
         self.add_composite_file( '%s.pheno', description = 'Phenodata tab text file', 
           substitute_name_with_metadata = 'base_name', is_binary=True)
+
     def generate_primary_file( self, dataset = None ):
-        """
+        """ 
         This is called only at upload to write the html file
         cannot rename the datasets here - they come with the default unfortunately
         """
         return '<html><head></head><body>AutoGenerated Primary File for Composite Dataset</body></html>'
+    
     def get_phecols(self, phenolist=[], maxConc=20):
         """
         sept 2009: cannot use whitespace to split - make a more complex structure here
@@ -527,7 +543,7 @@
             else:
                 for col,code in enumerate(row): # keep column order correct
                     if col >= totcols:
-                          log.warning('### get_phecols error in pheno file - row %d col %d (%s) longer than header %s' % (nrows, col, row, head))
+                          gal_Log.warning('### get_phecols error in pheno file - row %d col %d (%s) longer than header %s' % (nrows, col, row, head))
                     else:
                         concordance[col].setdefault(code,0) # first one is zero
                         concordance[col][code] += 1 
@@ -573,6 +589,8 @@
             res = [('no usable phenotype columns found',[('?',0),]),]     
         return res
 
+    
+
     def get_pheno(self,dataset):
         """
         expects a .pheno file in the extra_files_dir - ugh
@@ -591,12 +609,12 @@
         else:
             p = []
         return '\n'.join(p)
-    def set_peek( self, dataset, is_multi_byte=False ):
+
+    def set_peek( self, dataset, **kwd ):
         """
         expects a .pheno file in the extra_files_dir - ugh
         note that R is wierd and does not include the row.name in
-        the header. why?
-        """
+        the header. why?"""
         if not dataset.dataset.purged:
             pp = os.path.join(dataset.extra_files_path,'%s.pheno' % dataset.metadata.base_name)
             try:
@@ -608,14 +626,18 @@
         else:
             dataset.peek = 'file does not exist\n'
             dataset.blurb = 'file purged from disk'
+
     def get_peek( self, dataset ):
-        """expects a .pheno file in the extra_files_dir - ugh"""
+        """
+        expects a .pheno file in the extra_files_dir - ugh
+        """
         pp = os.path.join(dataset.extra_files_path,'%s.pheno' % dataset.metadata.base_name)
         try:
             p = file(pp,'r').readlines()
         except:
             p = ['##failed to find %s' % pp]
         return ''.join(p[:5])
+
     def get_file_peek(self,filename):
         """
         can't really peek at a filename - need the extra_files_path and such?
@@ -626,8 +648,10 @@
         except:
             pass
         return ''.join(h[:5])
+
     def regenerate_primary_file(self,dataset):
-        """cannot do this until we are setting metadata 
+        """
+        cannot do this until we are setting metadata 
         """
         bn = dataset.metadata.base_name
         flist = os.listdir(dataset.extra_files_path)
@@ -640,28 +664,34 @@
         f.write("\n".join( rval ))
         f.write('\n')
         f.close()
+
     def init_meta( self, dataset, copy_from=None ):
-        """Add metadata elements"""
         if copy_from:
             dataset.metadata = copy_from.metadata     
+
     def set_meta( self, dataset, **kwd ):         
+
         """
         NOTE we apply the tabular machinary to the phenodata extracted
         from a BioC eSet or affybatch.
+
         """
+        Html.set_meta(self, dataset, **kwd)
         try:
             flist = os.listdir(dataset.extra_files_path)
         except:
-            #log.debug('@@@rexpression set_meta failed - no dataset?')
+            if verbose:
+                gal_Log.debug('@@@rexpression set_meta failed - no dataset?')
             return False
-        bn = None
-        for f in flist:
-           n = os.path.splitext(f)[0]
-           if not bn:
-                bn = n
-                dataset.metadata.base_name = bn
+        bn = dataset.metadata.base_name
+        if not bn:
+           for f in flist:
+               n = os.path.splitext(f)[0]
+               bn = n
+               dataset.metadata.base_name = bn
         if not bn:
             bn = '?'
+            dataset.metadata.base_name = bn
         pn = '%s.pheno' % (bn)
         pp = os.path.join(dataset.extra_files_path,pn)
         dataset.metadata.pheno_path=pp
@@ -680,7 +710,7 @@
             dataset.metadata.column_names = []
             dataset.metadata.columns = 0
             dataset.peek = 'No pheno file found'
-        if len(pf) > 1:
+        if pf and len(pf) > 1:
             dataset.metadata.pheCols = self.get_phecols(phenolist=pf)
         else:
             dataset.metadata.pheCols = [('','No useable phenotypes found',False),]
@@ -690,8 +720,11 @@
         if not dataset.blurb:
                dataset.blurb = 'R loadable BioC expression object for the Rexpression Galaxy toolkit'
         return True
+            
     def make_html_table( self, pp='nothing supplied from peek\n'):
-        """Create HTML table, used for displaying peek"""
+        """
+        Create HTML table, used for displaying peek
+        """
         out = ['<table cellspacing="0" cellpadding="3">',]
         p = pp.split('\n')
         try:
@@ -712,25 +745,37 @@
         except Exception, exc:
             out = "Can't create html table %s" % str( exc )
         return out
+    
     def display_peek( self, dataset ):
-        """Returns formatted html of peek"""
+        """
+        Returns formatted html of peek
+        """
         out=self.make_html_table(dataset.peek)
         return out
+    
     def get_mime(self):
-        """Returns the mime type of the datatype"""
+        """
+        Returns the mime type of the datatype
+        """
         return 'text/html'
+    
 
 class Affybatch( RexpBase ):
-    """derived class for BioC data structures in Galaxy """
+    """
+    derived class for BioC data structures in Galaxy 
+    """
+
     file_ext = "affybatch"
 
     def __init__( self, **kwd ):
         RexpBase.__init__(self, **kwd)
         self.add_composite_file( '%s.affybatch', description = 'AffyBatch R object saved to file', 
         substitute_name_with_metadata = 'base_name', is_binary=True )
-
+    
 class Eset( RexpBase ):
-    """derived class for BioC data structures in Galaxy """
+    """
+    derived class for BioC data structures in Galaxy 
+    """
     file_ext = "eset"
 
     def __init__( self, **kwd ):
@@ -738,8 +783,11 @@
         self.add_composite_file( '%s.eset', description = 'ESet R object saved to file', 
         substitute_name_with_metadata = 'base_name', is_binary = True )
 
+
 class MAlist( RexpBase ):
-    """derived class for BioC data structures in Galaxy """
+    """
+    derived class for BioC data structures in Galaxy 
+    """
     file_ext = "malist"    
 
     def __init__( self, **kwd ):
@@ -747,6 +795,8 @@
         self.add_composite_file( '%s.malist', description = 'MAlist R object saved to file', 
         substitute_name_with_metadata = 'base_name', is_binary = True )
 
+
 if __name__ == '__main__':
     import doctest, sys
     doctest.testmod(sys.modules[__name__])
+
diff -r 9701e5ee128d -r 70930ea26347 lib/galaxy/tools/parameters/grouping.py
--- a/lib/galaxy/tools/parameters/grouping.py	Wed Mar 10 14:25:34 2010 -0500
+++ b/lib/galaxy/tools/parameters/grouping.py	Wed Mar 10 16:09:37 2010 -0500
@@ -99,6 +99,10 @@
         self.default_file_type = 'txt'
         self.file_type_to_ext = { 'auto':self.default_file_type }
         self.metadata_ref = 'files_metadata'
+    def get_file_base_name( self, context ):
+        log.debug('### uploadDataset get base name context = %s' % str(context))
+        fd = context.get('files_metadata|base_name','?')
+        return fd
     def get_file_type( self, context ):
         return context.get( self.file_type_name, self.default_file_type )
     def get_datatype_ext( self, trans, context ):
@@ -291,15 +295,13 @@
                 temp_name, is_multi_byte = sniff.stream_to_file( StringIO.StringIO( d_type.generate_primary_file( dataset ) ), prefix='upload_auto_primary_file' )
                 dataset.primary_file = temp_name
                 dataset.space_to_tab = False
-                dataset.precreated_name = dataset.name = 'Uploaded Composite Dataset (%s)' % ( file_type )
+                dataset.precreated_name = dataset.name = dataset.metadata['base_name'] # was 'Uploaded Composite Dataset (%s)' % ( file_type )
             else:
                 file_bunch, warnings = get_one_filename( groups_incoming[ 0 ] )
-                if dataset.datatype.composite_type:
-                    precreated_name = 'Uploaded Composite Dataset (%s)' % ( file_type )
                 writable_files_offset = 1
                 dataset.primary_file = file_bunch.path
                 dataset.space_to_tab = file_bunch.space_to_tab
-                dataset.precreated_name = file_bunch.precreated_name
+                dataset.precreated_name = dataset.metadata['base_name'] # file_bunch.precreated_name
                 dataset.name = file_bunch.precreated_name
                 dataset.warnings.extend( file_bunch.warnings )
             if dataset.primary_file is None:#remove this before finish, this should create an empty dataset
diff -r 9701e5ee128d -r 70930ea26347 lib/galaxy/web/controllers/dataset.py
--- a/lib/galaxy/web/controllers/dataset.py	Wed Mar 10 14:25:34 2010 -0500
+++ b/lib/galaxy/web/controllers/dataset.py	Wed Mar 10 16:09:37 2010 -0500
@@ -1,4 +1,4 @@
-import logging, os, string, shutil, re, socket, mimetypes, smtplib, urllib
+import logging, os, string, shutil, re, socket, mimetypes, smtplib, urllib, tempfile, zipfile, glob
 
 from galaxy.web.base.controller import *
 from galaxy.web.framework.helpers import time_ago, iff, grids
@@ -7,11 +7,30 @@
 from galaxy.datatypes.display_applications.util import encode_dataset_user, decode_dataset_user
 
 from email.MIMEText import MIMEText
-
 import pkg_resources; 
 pkg_resources.require( "Paste" )
 import paste.httpexceptions
 
+tmpd = tempfile.mkdtemp()
+comptypes=[]
+ziptype = '32'
+tmpf = os.path.join( tmpd, 'compression_test.zip' )
+try:
+    archive = zipfile.ZipFile( tmpf, 'w', zipfile.ZIP_DEFLATED, True )
+    archive.close()
+    comptypes.append( 'zip' )
+    ziptype = '64'
+except RuntimeError:
+    log.exception( "Compression error when testing zip compression. This option will be disabled for library downloads." )
+except (TypeError, zipfile.LargeZipFile):    # ZIP64 is only in Python2.5+.  Remove TypeError when 2.4 support is dropped
+    log.warning( 'Max zip file size is 2GB, ZIP64 not supported' )    
+    comptypes.append( 'zip' )
+try:
+    os.unlink( tmpf )
+except OSError:
+    pass
+os.rmdir( tmpd )
+
 log = logging.getLogger( __name__ )
 
 error_report_template = """
@@ -182,6 +201,97 @@
         return 'This link may not be followed from within Galaxy.'
     
     @web.expose
+    def archive_composite_dataset( self, trans, data=None, **kwd ):
+        # save a composite object into a compressed archive for downloading
+        params = util.Params( kwd )
+        if (params.do_action == None):
+     	    params.do_action = 'zip' # default
+        msg = util.restore_text( params.get( 'msg', ''  ) )
+        messagetype = params.get( 'messagetype', 'done' )
+        if not data:
+            msg = "You must select at least one dataset"
+            messagetype = 'error'
+        else:
+            error = False
+            try:
+                if (params.do_action == 'zip'): 
+                    # Can't use mkstemp - the file must not exist first
+                    tmpd = tempfile.mkdtemp()
+                    tmpf = os.path.join( tmpd, 'library_download.' + params.do_action )
+                    if ziptype == '64':
+                        archive = zipfile.ZipFile( tmpf, 'w', zipfile.ZIP_DEFLATED, True )
+                    else:
+                        archive = zipfile.ZipFile( tmpf, 'w', zipfile.ZIP_DEFLATED )
+                    archive.add = lambda x, y: archive.write( x, y.encode('CP437') )
+                elif params.do_action == 'tgz':
+                    archive = util.streamball.StreamBall( 'w|gz' )
+                elif params.do_action == 'tbz':
+                    archive = util.streamball.StreamBall( 'w|bz2' )
+            except (OSError, zipfile.BadZipFile):
+                error = True
+                log.exception( "Unable to create archive for download" )
+                msg = "Unable to create archive for %s for download, please report this error" % data.name
+                messagetype = 'error'
+            if not error:
+                current_user_roles = trans.get_current_user_roles()
+                ext = data.extension
+                path = data.file_name
+                fname = os.path.split(path)[-1]
+                basename = data.metadata.base_name
+                efp = data.extra_files_path
+                htmlname = os.path.splitext(data.name)[0]
+                if not htmlname.endswith(ext):
+                    htmlname = '%s_%s' % (htmlname,ext)
+                archname = '%s.html' % htmlname # fake the real nature of the html file
+                try:
+                    archive.add(data.file_name,archname)
+                except IOError:
+                    error = True
+                    log.exception( "Unable to add composite parent %s to temporary library download archive" % data.file_name)
+                    msg = "Unable to create archive for download, please report this error"
+                    messagetype = 'error'
+                flist = glob.glob(os.path.join(efp,'*.*')) # glob returns full paths
+                for fpath in flist:
+                    efp,fname = os.path.split(fpath)
+                    try:
+                        archive.add( fpath,fname )
+                    except IOError:
+                        error = True
+                        log.exception( "Unable to add %s to temporary library download archive" % fname)
+                        msg = "Unable to create archive for download, please report this error"
+                        messagetype = 'error'
+                        continue
+                if not error:    
+                    if params.do_action == 'zip':
+                        archive.close()
+                        tmpfh = open( tmpf )
+                        # clean up now
+                        try:
+                            os.unlink( tmpf )
+                            os.rmdir( tmpd )
+                        except OSError:
+                            error = True
+                            msg = "Unable to remove temporary library download archive and directory"
+                            log.exception( msg )
+                            messagetype = 'error'
+                        if not error:
+                            trans.response.set_content_type( "application/x-zip-compressed" )
+                            trans.response.headers[ "Content-Disposition" ] = "attachment; filename=GalaxyCompositeObject.zip" 
+                            return tmpfh
+                    else:
+                        trans.response.set_content_type( "application/x-tar" )
+                        outext = 'tgz'
+                        if params.do_action == 'tbz':
+                            outext = 'tbz'
+                        trans.response.headers[ "Content-Disposition" ] = "attachment; filename=GalaxyLibraryFiles.%s" % outext 
+                        archive.wsgi_status = trans.response.wsgi_status()
+                        archive.wsgi_headeritems = trans.response.wsgi_headeritems()
+                        return archive.stream
+        return trans.show_error_message( msg )
+
+
+    
+    @web.expose
     def display(self, trans, dataset_id=None, preview=False, filename=None, to_ext=None, **kwd):
         """Catches the dataset id and displays file contents as directed"""
         
@@ -219,15 +329,19 @@
             trans.log_event( "Display dataset id: %s" % str( dataset_id ) )
             
             if to_ext: # Saving the file
-                trans.response.headers['Content-Length'] = int( os.stat( data.file_name ).st_size )
-                if to_ext[0] != ".":
-                    to_ext = "." + to_ext
-                valid_chars = '.,^_-()[]0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ'
-                fname = data.name
-                fname = ''.join(c in valid_chars and c or '_' for c in fname)[0:150]
-                trans.response.headers["Content-Disposition"] = "attachment; filename=GalaxyHistoryItem-%s-[%s]%s" % (data.hid, fname, to_ext)
-                return open( data.file_name )
-                
+                composite_extensions = trans.app.datatypes_registry.get_composite_extensions( )
+                composite_extensions.append('html')
+                if data.ext in composite_extensions:
+                    return self.archive_composite_dataset( trans, data, **kwd )
+                else:                    
+                    trans.response.headers['Content-Length'] = int( os.stat( data.file_name ).st_size )
+                    if to_ext[0] != ".":
+                        to_ext = "." + to_ext
+                    valid_chars = '.,^_-()[]0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ'
+                    fname = data.name
+                    fname = ''.join(c in valid_chars and c or '_' for c in fname)[0:150]
+                    trans.response.headers["Content-Disposition"] = "attachment; filename=GalaxyHistoryItem-%s-[%s]%s" % (data.hid, fname, to_ext)
+                    return open( data.file_name )
             if os.path.exists( data.file_name ):
                 max_peek_size = 1000000 # 1 MB
                 if preview and os.stat( data.file_name ).st_size > max_peek_size:
@@ -367,7 +481,10 @@
             raise paste.httpexceptions.HTTPRequestRangeNotSatisfiable( "Invalid reference dataset id: %s." % str( dataset_id ) )
         if 'display_url' not in kwd or 'redirect_url' not in kwd:
             return trans.show_error_message( 'Invalid parameters specified for "display at" link, please contact a Galaxy administrator' )
-        redirect_url = kwd['redirect_url'] % urllib.quote_plus( kwd['display_url'] )
+        try:
+              redirect_url = kwd['redirect_url'] % urllib.quote_plus( kwd['display_url'] )
+        except:
+              redirect_url = kwd['redirect_url'] # not all will need custom text
         current_user_roles = trans.get_current_user_roles()
         if trans.app.security_agent.dataset_is_public( data.dataset ):
             return trans.response.send_redirect( redirect_url ) # anon access already permitted by rbac
@@ -591,4 +708,3 @@
             status = SUCCESS
             message = done_msg
         return status, message
-        
\ No newline at end of file
diff -r 9701e5ee128d -r 70930ea26347 lib/galaxy/web/controllers/library_common.py
--- a/lib/galaxy/web/controllers/library_common.py	Wed Mar 10 14:25:34 2010 -0500
+++ b/lib/galaxy/web/controllers/library_common.py	Wed Mar 10 16:09:37 2010 -0500
@@ -1098,6 +1098,7 @@
     def download_dataset_from_folder( self, trans, cntrller, id, library_id=None, **kwd ):
         """Catches the dataset id and displays file contents as directed"""
         show_deleted = util.string_as_bool( kwd.get( 'show_deleted', False ) )
+        params = util.Params( kwd )        
         use_panels = util.string_as_bool( params.get( 'use_panels', False ) )
         ldda = trans.sa_session.query( trans.app.model.LibraryDatasetDatasetAssociation ).get( trans.security.decode_id( id ) )
         if not ldda.dataset:
diff -r 9701e5ee128d -r 70930ea26347 test/base/twilltestcase.py
--- a/test/base/twilltestcase.py	Wed Mar 10 14:25:34 2010 -0500
+++ b/test/base/twilltestcase.py	Wed Mar 10 16:09:37 2010 -0500
@@ -23,6 +23,8 @@
 log = logging.getLogger( __name__ )
 
 class TwillTestCase( unittest.TestCase ):
+    composite_extensions = ['html','lped','pbed','fped','pphe','eigenstratgeno','eset','affybatch','malist','test-data' ]
+
 
     def setUp( self ):
         # Security helper
@@ -61,9 +63,10 @@
             else:
                 files_differ = True
             if files_differ:
-                allowed_diff_count = attributes.get( 'lines_diff', 0 )
+                allowed_diff_count = int(attributes.get( 'lines_diff', 0 ))
                 diff = list( difflib.unified_diff( local_file, history_data, "local_file", "history_data" ) )
                 diff_lines = get_lines_diff( diff )
+                log.debug('## files diff on %s and %s lines_diff=%d, found diff = %d' % (file1,file2,allowed_diff_count,diff_lines))
                 if diff_lines > allowed_diff_count:
                     diff_slice = diff[0:40]
                     #FIXME: This pdf stuff is rather special cased and has not been updated to consider lines_diff 
@@ -75,7 +78,7 @@
                         # PDF files contain creation dates, modification dates, ids and descriptions that change with each
                         # new file, so we need to handle these differences.  As long as the rest of the PDF file does
                         # not differ we're ok.
-                        valid_diff_strs = [ 'description', 'createdate', 'creationdate', 'moddate', 'id' ]
+                        valid_diff_strs = [ 'description', 'createdate', 'creationdate', 'moddate', 'id', 'producer', 'creator' ]
                         valid_diff = False
                         for line in diff_slice:
                             # Make sure to lower case strings before checking.
@@ -109,7 +112,7 @@
             attributes = {}
         if attributes.get( 'sort', False ):
             history_data.sort()
-        lines_diff = attributes.get( 'lines_diff', 0 )
+        lines_diff = int(attributes.get( 'lines_diff', 0 ))
         line_diff_count = 0
         diffs = []
         for i in range( len( history_data ) ):
@@ -194,36 +197,7 @@
                 raise AssertionError, "Invalid hid (%s) created when pasting %s" % ( hid, url_paste )
         # Wait for upload processing to finish (TODO: this should be done in each test case instead)
         self.wait()
-    def upload_composite_datatype_file( self, ftype, ped_file='', map_file='', bim_file='', bed_file='', fam_file='', dbkey='unspecified (?)', base_name='rgenetics' ):
-        """Tests uploading either of 2 different composite data types ( lped and pbed )"""
-        self.visit_url( "%s/tool_runner/index?tool_id=upload1" % self.url )
-        # Handle refresh_on_change
-        self.refresh_form( "file_type", ftype )
-        tc.fv( "1", "dbkey", dbkey )
-        tc.fv( "1", "files_metadata|base_name", base_name )
-        if ftype == 'lped':
-            # lped data types include a ped_file and a map_file
-            ped_file = self.get_filename( ped_file )
-            tc.formfile( "1", "files_0|file_data", ped_file )
-            map_file = self.get_filename( map_file )
-            tc.formfile( "1", "files_1|file_data", map_file )
-        elif ftype == 'pbed':
-            # pbed data types include a bim_file, a bed_file and a fam_file
-            bim_file = self.get_filename( bim_file )
-            tc.formfile( "1", "files_0|file_data", bim_file )
-            bed_file = self.get_filename( bed_file )
-            tc.formfile( "1", "files_1|file_data", bed_file )
-            fam_file = self.get_filename( fam_file )
-            tc.formfile( "1", "files_2|file_data", fam_file )
-        else:
-            raise AssertionError, "Unsupported composite data type (%s) received, currently only lped and pbed data types are supported." % ftype
-        tc.submit( "runtool_btn" )
-        self.check_page_for_string( 'The following job has been succesfully added to the queue:' )
-        check_str = 'Uploaded Composite Dataset (%s)' % ftype
-        self.check_page_for_string( check_str )
-        # Wait for upload processing to finish (TODO: this should be done in each test case instead)
-        self.wait()
-        self.check_history_for_string( check_str )
+
     # Functions associated with histories
     def check_history_for_errors( self ):
         """Raises an exception if there are errors in a history"""
@@ -672,7 +646,7 @@
     def verify_composite_datatype_file_content( self, file_name, hda_id, base_name = None, attributes = None ):
         local_name = self.get_filename( file_name )
         if base_name is None:
-            base_name = file_name
+            base_name = os.path.split(file_name)[-1]
         temp_name = self.get_filename( '%s_temp' % file_name ) #This is a terrible way to generate a temp name
         self.visit_url( "%s/datasets/%s/display/%s" % ( self.url, self.security.encode_id( hda_id ), base_name ) )
         data = self.last_page()
@@ -915,9 +889,14 @@
         # To help with debugging a tool, print out the form controls when the test fails
         print "form '%s' contains the following controls ( note the values )" % f.name
         control_names = []
+        hidden_control_names = [] # cannot change these, so ignore or many complex page tool tests will fail
+        hc_prefix = '<HiddenControl('
         for i, control in enumerate( f.controls ):
-            print "control %d: %s" % ( i, str( control ) )
-            try:
+           print "control %d: %s" % ( i, str( control ) )
+           if hc_prefix in str(control):
+                hidden_control_names.append(control.name) # cannot do much with these
+           else: 
+              try:
                 #check if a repeat element needs to be added
                 if control.name not in kwd and control.name.endswith( '_add' ):
                     #control name doesn't exist, could be repeat
@@ -946,12 +925,14 @@
                         # Submit for refresh
                         tc.submit( '___refresh_grouping___' )
                         return self.submit_form( form_no=form_no, button=button, **kwd )
-            except Exception, e:
+              except Exception, e:
                 log.debug( "In submit_form, continuing, but caught exception: %s" % str( e ) )
                 continue
-            control_names.append( control.name )
+              control_names.append( control.name )
         # No refresh_on_change attribute found in current form, so process as usual
         for control_name, control_value in kwd.items():
+            if control_name in hidden_control_names:
+                continue # these cannot be handled safely - cause the test to barf out
             if not isinstance( control_value, list ):
                 control_value = [ control_value ]
             try:
                    
                  
                  
                          
                            
                            1
                            
                          
                          
                            
                            0
                            
                          
                          
                            
    
                          
                        
                    11 Mar '10
                    
                        details:   http://www.bx.psu.edu/hg/galaxy/rev/9701e5ee128d
changeset: 3506:9701e5ee128d
user:      Nate Coraor <nate(a)bx.psu.edu>
date:      Wed Mar 10 14:25:34 2010 -0500
description:
Now that cntrller is passed as a POST var in the upload form, it's not necessary to have the library_admin and library controller upload convenience methods.  This should fix broken library tests on the production buildbot.
diffstat:
 lib/galaxy/web/controllers/library.py        |  3 ---
 lib/galaxy/web/controllers/library_admin.py  |  4 ----
 lib/galaxy/web/controllers/library_common.py |  8 ++------
 3 files changed, 2 insertions(+), 13 deletions(-)
diffs (39 lines):
diff -r 9efe896dbb17 -r 9701e5ee128d lib/galaxy/web/controllers/library.py
--- a/lib/galaxy/web/controllers/library.py	Wed Mar 10 11:51:00 2010 -0500
+++ b/lib/galaxy/web/controllers/library.py	Wed Mar 10 14:25:34 2010 -0500
@@ -34,6 +34,3 @@
                                     default_action=params.get( 'default_action', None ),
                                     msg=msg,
                                     messagetype=messagetype )
-    @web.expose
-    def upload_library_dataset( self, trans, library_id, folder_id, **kwd ):
-        return trans.webapp.controllers[ 'library_common' ].upload_library_dataset( trans, 'library', library_id, folder_id, **kwd )
diff -r 9efe896dbb17 -r 9701e5ee128d lib/galaxy/web/controllers/library_admin.py
--- a/lib/galaxy/web/controllers/library_admin.py	Wed Mar 10 11:51:00 2010 -0500
+++ b/lib/galaxy/web/controllers/library_admin.py	Wed Mar 10 14:25:34 2010 -0500
@@ -235,7 +235,3 @@
                                                               show_deleted=show_deleted,
                                                               msg=msg,
                                                               messagetype=status ) )
-    @web.expose
-    @web.require_admin
-    def upload_library_dataset( self, trans, library_id, folder_id, **kwd ):
-        return trans.webapp.controllers[ 'library_common' ].upload_library_dataset( trans, 'library_admin', library_id, folder_id, **kwd )
diff -r 9efe896dbb17 -r 9701e5ee128d lib/galaxy/web/controllers/library_common.py
--- a/lib/galaxy/web/controllers/library_common.py	Wed Mar 10 11:51:00 2010 -0500
+++ b/lib/galaxy/web/controllers/library_common.py	Wed Mar 10 14:25:34 2010 -0500
@@ -804,12 +804,8 @@
         if upload_option == 'upload_file' and trans.app.config.nginx_upload_path:
             # url_for is intentionally not used on the base URL here -
             # nginx_upload_path is expected to include the proxy prefix if the
-            # administrator intends for it to be part of the URL.  We also
-            # redirect to the library or library_admin controller rather than
-            # library_common because GET arguments can't be used in conjunction
-            # with nginx upload (nginx can't do percent decoding without a
-            # bunch of hacky rewrite rules).
-            action = trans.app.config.nginx_upload_path + '?nginx_redir=' + web.url_for( controller=cntrller, action='upload_library_dataset' )
+            # administrator intends for it to be part of the URL.
+            action = trans.app.config.nginx_upload_path + '?nginx_redir=' + web.url_for( controller='library_common', action='upload_library_dataset' )
         return trans.fill_template( '/library/common/upload.mako',
                                     cntrller=cntrller,
                                     upload_option=upload_option,
                    
                  
                  
                          
                            
                            1
                            
                          
                          
                            
                            0
                            
                          
                          
                            
    
                          
                        
                    11 Mar '10
                    
                        details:   http://www.bx.psu.edu/hg/galaxy/rev/9efe896dbb17
changeset: 3505:9efe896dbb17
user:      Kelly Vincent <kpvincent(a)bx.psu.edu>
date:      Wed Mar 10 11:51:00 2010 -0500
description:
Fix try-except-finally bug to be compatible with Python 2.4 in several tools and twilltestcase.py. Updated liftOver test to replace hard-coded (incorrect) path and modified code file to allow for new path. Improved bowtie error handling.
diffstat:
 test/base/twilltestcase.py             |  70 ++++++++++++++------------
 tools/extract/liftOver_wrapper.py      |  17 ++++--
 tools/extract/liftOver_wrapper.xml     |   9 +--
 tools/extract/liftOver_wrapper_code.py |   6 +-
 tools/samtools/sam_pileup.py           |  44 ++++++++--------
 tools/sr_mapping/bowtie_wrapper.py     |  72 ++++++++++++++++++---------
 tools/sr_mapping/bwa_wrapper.py        |  88 +++++++++++++++++----------------
 7 files changed, 171 insertions(+), 135 deletions(-)
diffs (452 lines):
diff -r f84112d155c0 -r 9efe896dbb17 test/base/twilltestcase.py
--- a/test/base/twilltestcase.py	Wed Mar 10 11:28:50 2010 -0500
+++ b/test/base/twilltestcase.py	Wed Mar 10 11:51:00 2010 -0500
@@ -633,24 +633,26 @@
             data = self.last_page()
             file( temp_name, 'wb' ).write(data)
             try:
-                if attributes is None:
-                    attributes = {}
-                compare = attributes.get( 'compare', 'diff' )
-                extra_files = attributes.get( 'extra_files', None )
-                if compare == 'diff':
-                    self.files_diff( local_name, temp_name, attributes=attributes )
-                elif compare == 're_match':
-                    self.files_re_match( local_name, temp_name, attributes=attributes )
-                elif compare == 're_match_multiline':
-                    self.files_re_match_multiline( local_name, temp_name, attributes=attributes )
-                else:
-                    raise Exception, 'Unimplemented Compare type: %s' % compare
-                if extra_files:
-                    self.verify_extra_files_content( extra_files, elem.get( 'id' ) )
-            except AssertionError, err:
-                errmsg = 'History item %s different than expected, difference (using %s):\n' % ( hid, compare )
-                errmsg += str( err )
-                raise AssertionError( errmsg )
+                # have to nest try-except in try-finally to handle 2.4
+                try:
+                    if attributes is None:
+                        attributes = {}
+                    compare = attributes.get( 'compare', 'diff' )
+                    extra_files = attributes.get( 'extra_files', None )
+                    if compare == 'diff':
+                        self.files_diff( local_name, temp_name, attributes=attributes )
+                    elif compare == 're_match':
+                        self.files_re_match( local_name, temp_name, attributes=attributes )
+                    elif compare == 're_match_multiline':
+                        self.files_re_match_multiline( local_name, temp_name, attributes=attributes )
+                    else:
+                        raise Exception, 'Unimplemented Compare type: %s' % compare
+                    if extra_files:
+                        self.verify_extra_files_content( extra_files, elem.get( 'id' ) )
+                except AssertionError, err:
+                    errmsg = 'History item %s different than expected, difference (using %s):\n' % ( hid, compare )
+                    errmsg += str( err )
+                    raise AssertionError( errmsg )
             finally:
                 os.remove( temp_name )
 
@@ -676,21 +678,23 @@
         data = self.last_page()
         file( temp_name, 'wb' ).write( data )
         try:
-            if attributes is None:
-                attributes = {}
-            compare = attributes.get( 'compare', 'diff' )
-            if compare == 'diff':
-                self.files_diff( local_name, temp_name, attributes=attributes )
-            elif compare == 're_match':
-                self.files_re_match( local_name, temp_name, attributes=attributes )
-            elif compare == 're_match_multiline':
-                self.files_re_match_multiline( local_name, temp_name, attributes=attributes )
-            else:
-                raise Exception, 'Unimplemented Compare type: %s' % compare
-        except AssertionError, err:
-            errmsg = 'Composite file (%s) of History item %s different than expected, difference (using %s):\n' % ( base_name, hda_id, compare )
-            errmsg += str( err )
-            raise AssertionError( errmsg )
+            # have to nest try-except in try-finally to handle 2.4
+            try:
+                if attributes is None:
+                    attributes = {}
+                compare = attributes.get( 'compare', 'diff' )
+                if compare == 'diff':
+                    self.files_diff( local_name, temp_name, attributes=attributes )
+                elif compare == 're_match':
+                    self.files_re_match( local_name, temp_name, attributes=attributes )
+                elif compare == 're_match_multiline':
+                    self.files_re_match_multiline( local_name, temp_name, attributes=attributes )
+                else:
+                    raise Exception, 'Unimplemented Compare type: %s' % compare
+            except AssertionError, err:
+                errmsg = 'Composite file (%s) of History item %s different than expected, difference (using %s):\n' % ( base_name, hda_id, compare )
+                errmsg += str( err )
+                raise AssertionError( errmsg )
         finally:
             os.remove( temp_name )
 
diff -r f84112d155c0 -r 9efe896dbb17 tools/extract/liftOver_wrapper.py
--- a/tools/extract/liftOver_wrapper.py	Wed Mar 10 11:28:50 2010 -0500
+++ b/tools/extract/liftOver_wrapper.py	Wed Mar 10 11:51:00 2010 -0500
@@ -4,7 +4,7 @@
 Converts coordinates from one build/assembly to another using liftOver binary and mapping files downloaded from UCSC.
 """
 
-import sys, os, string
+import os, string, subprocess, sys
 import tempfile
 import re
 
@@ -51,15 +51,20 @@
 if in_dbkey == "?": 
     stop_err( "Input dataset genome build unspecified, click the pencil icon in the history item to specify it." )
 
-
 if not os.path.isfile( mapfilepath ):
     stop_err( "%s mapping is not currently available."  % ( mapfilepath.split('/')[-1].split('.')[0] ) )
 
 safe_infile = safe_bed_file(infile)
-cmd_line = "liftOver -minMatch=" + str(minMatch) + " " + safe_infile + " " + mapfilepath + " " + outfile1 + " " + outfile2 + "  > /dev/null 2>&1"
+cmd_line = "liftOver -minMatch=" + str(minMatch) + " " + safe_infile + " " + mapfilepath + " " + outfile1 + " " + outfile2 + "  > /dev/null"
 try:
-    os.system( cmd_line )
-except Exception, exc:
-    stop_err( "Exception caught attempting conversion: %s"  % str( exc ) )
+    # have to nest try-except in try-finally to handle 2.4
+    try:
+        proc = subprocess.Popen( args=cmd_line, shell=True, stderr=subprocess.PIPE )
+        returncode = proc.wait()
+        stderr = proc.stderr.read()
+        if returncode != 0:
+            raise Exception, stderr
+    except Exception, e:
+        raise Exception, 'Exception caught attempting conversion: ' + str( e )
 finally:
     os.remove(safe_infile)
diff -r f84112d155c0 -r 9efe896dbb17 tools/extract/liftOver_wrapper.xml
--- a/tools/extract/liftOver_wrapper.xml	Wed Mar 10 11:28:50 2010 -0500
+++ b/tools/extract/liftOver_wrapper.xml	Wed Mar 10 11:51:00 2010 -0500
@@ -23,19 +23,16 @@
   <requirements>
     <requirement type="binary">liftOver</requirement>
   </requirements>
-  
   <tests>
     <test>
       <param name="input" value="5.bed" dbkey="hg18" ftype="bed" />
-      <param name="to_dbkey" value="/galaxy/data/hg18/liftOver/hg18ToPanTro2.over.chain" />
+      <param name="to_dbkey" value="panTro2" />
       <param name="minMatch" value="0.95" />
       <output name="out_file1" file="5_liftover_mapped.bed"/>
       <output name="out_file2" file="5_liftover_unmapped.bed"/>
     </test>
   </tests>
-
   <help>
-
 .. class:: warningmark
 
 Make sure that the genome build of the input dataset is specified (click the pencil icon in the history item to set it if necessary).
@@ -71,6 +68,6 @@
     chrX  158279  160020  AK097346  0  +
     chrX  160024  169033  AK074528  0  -
 
-</help>
-<code file="liftOver_wrapper_code.py"/>
+  </help>
+  <code file="liftOver_wrapper_code.py" />
 </tool>
diff -r f84112d155c0 -r 9efe896dbb17 tools/extract/liftOver_wrapper_code.py
--- a/tools/extract/liftOver_wrapper_code.py	Wed Mar 10 11:28:50 2010 -0500
+++ b/tools/extract/liftOver_wrapper_code.py	Wed Mar 10 11:51:00 2010 -0500
@@ -1,8 +1,10 @@
+import os
+
 def exec_before_job(app, inp_data, out_data, param_dict, tool):
     #Assuming the path of the form liftOverDirectory/hg18ToHg17.over.chain (This is how the mapping chain files from UCSC look.)
-    to_dbkey = param_dict['to_dbkey'].split('.')[0].split('To')[1]
+    #allows for . in path
+    to_dbkey = os.path.split(param_dict['to_dbkey'])[1].split('.')[0].split('To')[1]
     to_dbkey = to_dbkey[0].lower()+to_dbkey[1:]
     out_data['out_file1'].set_dbkey(to_dbkey)
     out_data['out_file1'].name = out_data['out_file1'].name + " [ MAPPED COORDINATES ]"
     out_data['out_file2'].name = out_data['out_file2'].name + " [ UNMAPPED COORDINATES ]"
-    
diff -r f84112d155c0 -r 9efe896dbb17 tools/samtools/sam_pileup.py
--- a/tools/samtools/sam_pileup.py	Wed Mar 10 11:28:50 2010 -0500
+++ b/tools/samtools/sam_pileup.py	Wed Mar 10 11:51:00 2010 -0500
@@ -78,30 +78,32 @@
     #prepare basic pileup command
     cmd = 'samtools pileup %s -f %s %s > %s'
     try:
-        #index reference if necessary and prepare pileup command
-        if options.ref == 'indexed':
-            if not os.path.exists( "%s.fai" % seqPath ):
-                raise Exception, "No sequences are available for '%s', request them by reporting this error." % options.dbkey
-            cmd = cmd % ( opts, seqPath, tmpf0bam_name, options.output1 )
-        elif options.ref == 'history':
-            os.symlink( options.ownFile, tmpf1_name )
-            cmdIndex = 'samtools faidx %s' % ( tmpf1_name )
-            proc = subprocess.Popen( args=cmdIndex, shell=True, cwd=tmpDir, stderr=subprocess.PIPE )
+        # have to nest try-except in try-finally to handle 2.4
+        try:
+            #index reference if necessary and prepare pileup command
+            if options.ref == 'indexed':
+                if not os.path.exists( "%s.fai" % seqPath ):
+                    raise Exception, "No sequences are available for '%s', request them by reporting this error." % options.dbkey
+                cmd = cmd % ( opts, seqPath, tmpf0bam_name, options.output1 )
+            elif options.ref == 'history':
+                os.symlink( options.ownFile, tmpf1_name )
+                cmdIndex = 'samtools faidx %s' % ( tmpf1_name )
+                proc = subprocess.Popen( args=cmdIndex, shell=True, cwd=tmpDir, stderr=subprocess.PIPE )
+                returncode = proc.wait()
+                stderr = proc.stderr.read()
+                #did index succeed?
+                if returncode != 0:
+                    raise Exception, 'Error creating index file\n' + stderr
+                cmd = cmd % ( opts, tmpf1_name, tmpf0bam_name, options.output1 )
+            #perform pileup command
+            proc = subprocess.Popen( args=cmd, shell=True, cwd=tmpDir, stderr=subprocess.PIPE )
             returncode = proc.wait()
+            #did it succeed?
             stderr = proc.stderr.read()
-            #did index succeed?
             if returncode != 0:
-                raise Exception, 'Error creating index file\n' + stderr
-            cmd = cmd % ( opts, tmpf1_name, tmpf0bam_name, options.output1 )
-        #perform pileup command
-        proc = subprocess.Popen( args=cmd, shell=True, cwd=tmpDir, stderr=subprocess.PIPE )
-        returncode = proc.wait()
-        #did it succeed?
-        stderr = proc.stderr.read()
-        if returncode != 0:
-            raise Exception, stderr
-    except Exception, e:
-        stop_err( 'Error running Samtools pileup tool\n' + str( e ) )
+                raise Exception, stderr
+        except Exception, e:
+            stop_err( 'Error running Samtools pileup tool\n' + str( e ) )
     finally:
         #clean up temp files
         if os.path.exists( tmpDir ):
diff -r f84112d155c0 -r 9efe896dbb17 tools/sr_mapping/bowtie_wrapper.py
--- a/tools/sr_mapping/bowtie_wrapper.py	Wed Mar 10 11:28:50 2010 -0500
+++ b/tools/sr_mapping/bowtie_wrapper.py	Wed Mar 10 11:51:00 2010 -0500
@@ -2,7 +2,7 @@
 
 """
 Runs Bowtie on single-end or paired-end data.
-For use with Bowtie v. 0.12.1
+For use with Bowtie v. 0.12.3
 
 usage: bowtie_wrapper.py [options]
     -t, --threads=t: The number of threads to run
@@ -58,12 +58,12 @@
     -H, --suppressHeader=H: Suppress header
 """
 
-import optparse, os, shutil, sys, tempfile
+import optparse, os, shutil, subprocess, sys, tempfile
 
 def stop_err( msg ):
-    sys.stderr.write( "%s\n" % msg )
+    sys.stderr.write( '%s\n' % msg )
     sys.exit()
- 
+
 def __main__():
     #Parse Command Line
     parser = optparse.OptionParser()
@@ -119,6 +119,7 @@
     parser.add_option( '-x', '--indexSettings', dest='index_settings', help='Whether or not indexing options are to be set' )
     parser.add_option( '-H', '--suppressHeader', dest='suppressHeader', help='Suppress header' )
     (options, args) = parser.parse_args()
+    stdout = ''
     # make temp directory for placement of indices and copy reference file there if necessary
     tmp_index_dir = tempfile.mkdtemp()
     # get type of data (solid or solexa)
@@ -187,17 +188,25 @@
                                   iseed, icutoff, colorspace )
             except ValueError:
                 indexing_cmds = '%s' % colorspace
+        ref_file = tempfile.NamedTemporaryFile( dir=tmp_index_dir )
+        ref_file_name = ref_file.name
+        ref_file.close()
+        os.symlink( options.ref, ref_file_name )
+        cmd1 = 'bowtie-build %s -f %s %s' % ( indexing_cmds, ref_file_name, ref_file_name )
         try:
-            shutil.copy( options.ref, tmp_index_dir )
+            proc = subprocess.Popen( args=cmd1, shell=True, cwd=tmp_index_dir, stderr=subprocess.PIPE, stdout=subprocess.PIPE )
+            returncode = proc.wait()
+            stderr = proc.stderr.read()
+            if returncode != 0:
+                raise Exception, stderr
         except Exception, e:
-            stop_err( 'Error creating temp directory for indexing purposes\n' + str( e ) )
-        options.ref = os.path.join( tmp_index_dir, os.path.split( options.ref )[1] )
-        cmd1 = 'bowtie-build %s -f %s %s 2> /dev/null' % ( indexing_cmds, options.ref, options.ref )
-        try:
-            os.chdir( tmp_index_dir )
-            os.system( cmd1 )
-        except Exception, e:
+            # clean up temp dir
+            if os.path.exists( tmp_index_dir ):
+                shutil.rmtree( tmp_index_dir )
             stop_err( 'Error indexing reference sequence\n' + str( e ) )
+        stdout += 'File indexed. '
+    else:
+        ref_file_name = options.ref
     # set up aligning and generate aligning command options
     # automatically set threads in both cases
     if options.suppressHeader == 'true':
@@ -328,19 +337,34 @@
                               best, strata, offrate, seed, colorspace, snpphred, snpfrac, 
                               keepends, options.threads, suppressHeader )
         except ValueError, e:
+            # clean up temp dir
+            if os.path.exists( tmp_index_dir ):
+                shutil.rmtree( tmp_index_dir )
             stop_err( 'Something is wrong with the alignment parameters and the alignment could not be run\n' + str( e ) )
-    # prepare actual aligning commands
-    if options.paired == 'paired':
-        cmd2 = 'bowtie %s %s -1 %s -2 %s > %s 2> /dev/null' % ( aligning_cmds, options.ref, options.input1, options.input2, options.output ) 
-    else:
-        cmd2 = 'bowtie %s %s %s > %s 2> /dev/null' % ( aligning_cmds, options.ref, options.input1, options.output ) 
-    # align
     try:
-        os.system( cmd2 )
-    except Exception, e:
-        stop_err( 'Error aligning sequence\n' + str( e ) )
-    # clean up temp dir
-    if os.path.exists( tmp_index_dir ):
-        shutil.rmtree( tmp_index_dir )
+        # have to nest try-except in try-finally to handle 2.4
+        try:
+            # prepare actual aligning commands
+            if options.paired == 'paired':
+                cmd2 = 'bowtie %s %s -1 %s -2 %s > %s' % ( aligning_cmds, ref_file_name, options.input1, options.input2, options.output )
+            else:
+                cmd2 = 'bowtie %s %s %s > %s' % ( aligning_cmds, ref_file_name, options.input1, options.output )
+            # align
+            proc = subprocess.Popen( args=cmd2, shell=True, cwd=tmp_index_dir, stderr=subprocess.PIPE )
+            returncode = proc.wait()
+            stderr = proc.stderr.read()
+            if returncode != 0:
+                raise Exception, stderr
+            # check that there are results in the output file
+            if len( open( options.output, 'rb' ).read().strip() ) == 0:
+                raise Exception, 'The output file is empty, there may be an error with your input file or settings.'
+        except Exception, e:
+            stop_err( 'Error aligning sequence. ' + str( e ) )
+    finally:
+        # clean up temp dir
+        if os.path.exists( tmp_index_dir ):
+            shutil.rmtree( tmp_index_dir )
+    stdout += 'Sequence file aligned.\n'
+    sys.stdout.write( stdout )
 
 if __name__=="__main__": __main__()
diff -r f84112d155c0 -r 9efe896dbb17 tools/sr_mapping/bwa_wrapper.py
--- a/tools/sr_mapping/bwa_wrapper.py	Wed Mar 10 11:28:50 2010 -0500
+++ b/tools/sr_mapping/bwa_wrapper.py	Wed Mar 10 11:51:00 2010 -0500
@@ -152,55 +152,57 @@
         cmd3 = 'bwa samse %s %s %s %s >> %s' % ( gen_alignment_cmds, ref_file_name, tmp_align_out_name, options.fastq, options.output )
     # perform alignments
     try:
-        # align
+        # need to nest try-except in try-finally to handle 2.4
         try:
-            proc = subprocess.Popen( args=cmd2, shell=True, cwd=tmp_dir, stderr=subprocess.PIPE )
-            returncode = proc.wait()
-            stderr = proc.stderr.read()
-            if returncode != 0:
-                raise Exception, stderr
-        except Exception, e:
-            raise Exception, 'Error aligning sequence. ' + str( e )
-        # and again if paired data
-        try:
-            if cmd2b: 
-                proc = subprocess.Popen( args=cmd2b, shell=True, cwd=tmp_dir, stderr=subprocess.PIPE )
+            # align
+            try:
+                proc = subprocess.Popen( args=cmd2, shell=True, cwd=tmp_dir, stderr=subprocess.PIPE )
                 returncode = proc.wait()
                 stderr = proc.stderr.read()
                 if returncode != 0:
                     raise Exception, stderr
+            except Exception, e:
+                raise Exception, 'Error aligning sequence. ' + str( e )
+            # and again if paired data
+            try:
+                if cmd2b: 
+                    proc = subprocess.Popen( args=cmd2b, shell=True, cwd=tmp_dir, stderr=subprocess.PIPE )
+                    returncode = proc.wait()
+                    stderr = proc.stderr.read()
+                    if returncode != 0:
+                        raise Exception, stderr
+            except Exception, e:
+                raise Exception, 'Error aligning second sequence. ' + str( e )
+            # generate align
+            try:
+                proc = subprocess.Popen( args=cmd3, shell=True, cwd=tmp_dir, stderr=subprocess.PIPE )
+                returncode = proc.wait()
+                stderr = proc.stderr.read()
+                if returncode != 0:
+                    raise Exception, stderr
+            except Exception, e:
+                raise Exception, 'Error generating alignments. ' + str( e ) 
+            # remove header if necessary
+            if options.suppressHeader == 'true':
+                tmp_out = tempfile.NamedTemporaryFile( dir=tmp_dir)
+                tmp_out_name = tmp_out.name
+                tmp_out.close()
+                try:
+                    shutil.move( options.output, tmp_out_name )
+                except Exception, e:
+                    raise Exception, 'Error moving output file before removing headers. ' + str( e )
+                fout = file( options.output, 'w' )
+                for line in file( tmp_out.name, 'r' ):
+                    if not ( line.startswith( '@HD' ) or line.startswith( '@SQ' ) or line.startswith( '@RG' ) or line.startswith( '@PG' ) or line.startswith( '@CO' ) ):
+                        fout.write( line )
+                fout.close()
+            # check that there are results in the output file
+            if os.path.getsize( options.output ) > 0:
+                sys.stdout.write( 'BWA run on %s-end data' % options.genAlignType )
+            else:
+                raise Exception, 'The output file is empty. You may simply have no matches, or there may be an error with your input file or settings.'
         except Exception, e:
-            raise Exception, 'Error aligning second sequence. ' + str( e )
-        # generate align
-        try:
-            proc = subprocess.Popen( args=cmd3, shell=True, cwd=tmp_dir, stderr=subprocess.PIPE )
-            returncode = proc.wait()
-            stderr = proc.stderr.read()
-            if returncode != 0:
-                raise Exception, stderr
-        except Exception, e:
-            raise Exception, 'Error generating alignments. ' + str( e ) 
-        # remove header if necessary
-        if options.suppressHeader == 'true':
-            tmp_out = tempfile.NamedTemporaryFile( dir=tmp_dir)
-            tmp_out_name = tmp_out.name
-            tmp_out.close()
-            try:
-                shutil.move( options.output, tmp_out_name )
-            except Exception, e:
-                raise Exception, 'Error moving output file before removing headers. ' + str( e )
-            fout = file( options.output, 'w' )
-            for line in file( tmp_out.name, 'r' ):
-                if not ( line.startswith( '@HD' ) or line.startswith( '@SQ' ) or line.startswith( '@RG' ) or line.startswith( '@PG' ) or line.startswith( '@CO' ) ):
-                    fout.write( line )
-            fout.close()
-        # check that there are results in the output file
-        if os.path.getsize( options.output ) > 0:
-            sys.stdout.write( 'BWA run on %s-end data' % options.genAlignType )
-        else:
-            raise Exception, 'The output file is empty. You may simply have no matches, or there may be an error with your input file or settings.'
-    except Exception, e:
-        stop_err( 'The alignment failed.\n' + str( e ) )
+            stop_err( 'The alignment failed.\n' + str( e ) )
     finally:
         # clean up temp dir
         if os.path.exists( tmp_index_dir ):
                    
                  
                  
                          
                            
                            1
                            
                          
                          
                            
                            0
                            
                          
                          
                            
    
                          
                        
                    11 Mar '10
                    
                        details:   http://www.bx.psu.edu/hg/galaxy/rev/f84112d155c0
changeset: 3504:f84112d155c0
user:      Dan Blankenberg <dan(a)bx.psu.edu>
date:      Wed Mar 10 11:28:50 2010 -0500
description:
Allow renaming of uploaded files in toolbox tests by including a <edit_attributes type="name" value="new dataset name" /> child tag. A rename directive is automatically assigned (no xml changes required) to uploaded composite datasets, so that they can be identified uniquely.
diffstat:
 lib/galaxy/tools/__init__.py    |  23 +++++++++++++++++++++++
 lib/galaxy/tools/test.py        |  11 +++++++----
 test/functional/test_toolbox.py |  16 ++++++++++++++--
 3 files changed, 44 insertions(+), 6 deletions(-)
diffs (91 lines):
diff -r c73f093219aa -r f84112d155c0 lib/galaxy/tools/__init__.py
--- a/lib/galaxy/tools/__init__.py	Wed Mar 10 11:24:49 2010 -0500
+++ b/lib/galaxy/tools/__init__.py	Wed Mar 10 11:28:50 2010 -0500
@@ -510,6 +510,7 @@
         store in `self.tests`.
         """
         self.tests = []
+        composite_data_names_counter = 0 #composite datasets need a unique name: each test occurs in a fresh history, but we'll keep it unique per set of tests
         for i, test_elem in enumerate( tests_elem.findall( 'test' ) ):
             name = test_elem.get( 'name', 'Test-%d' % (i+1) )
             maxseconds = int( test_elem.get( 'maxseconds', '120' ) )
@@ -524,6 +525,28 @@
                     else:
                         value = None
                     attrib['children'] = list( param_elem.getchildren() )
+                    if attrib['children']:
+                        #at this time, we can assume having children only occurs on DataToolParameter test items
+                        #but this could change and would cause the below parsing to change based upon differences in children items
+                        attrib['metadata'] = []
+                        attrib['composite_data'] = []
+                        attrib['edit_attributes'] = []
+                        composite_data_name = None #composite datasets need to be renamed uniquely
+                        for child in attrib['children']:
+                            if child.tag == 'composite_data':
+                                attrib['composite_data'].append( child )
+                                if composite_data_name is None:
+                                    #generate a unique name; each test uses a fresh history
+                                    composite_data_name = '_COMPOSITE_RENAMED %i_' % ( composite_data_names_counter )
+                                    composite_data_names_counter += 1
+                            elif child.tag == 'metadata':
+                                attrib['metadata'].append( child )
+                            elif child.tag == 'metadata':
+                                attrib['metadata'].append( child )
+                            elif child.tag == 'edit_attributes':
+                                attrib['edit_attributes'].append( child )
+                        if composite_data_name:
+                            attrib['edit_attributes'].insert( 0, { 'type': 'name', 'value': composite_data_name } ) #composite datasets need implicit renaming; inserted at front of list so explicit declarations take precedence
                     test.add_param( attrib.pop( 'name' ), value, attrib )
                 for output_elem in test_elem.findall( "output" ):
                     attrib = dict( output_elem.attrib )
diff -r c73f093219aa -r f84112d155c0 lib/galaxy/tools/test.py
--- a/lib/galaxy/tools/test.py	Wed Mar 10 11:24:49 2010 -0500
+++ b/lib/galaxy/tools/test.py	Wed Mar 10 11:28:50 2010 -0500
@@ -30,12 +30,15 @@
                     if isinstance( input_value, grouping.Conditional ) or isinstance( input_value, grouping.Repeat ):
                         self.__expand_grouping_for_data_input(name, value, extra, input_name, input_value)
             elif isinstance( self.tool.inputs[name], parameters.DataToolParameter ) and ( value, extra ) not in self.required_files:
-                if value is None and len( [ child for child in extra.get( 'children', [] ) if child.tag == 'composite_data' ] ) == 0:
+                name_change = [ att for att in extra.get( 'edit_attributes', [] ) if att.get( 'type' ) == 'name' ]
+                if name_change:
+                    name_change = name_change[-1].get( 'value' ) #only the last name change really matters
+                if value is None and not name_change:
                     assert self.tool.inputs[name].optional, '%s is not optional. You must provide a valid filename.' % name
                 else:
-                    self.required_files.append( ( value, extra ) )
-                    if value is None and len( [ child for child in extra.get( 'children', [] ) if child.tag == 'composite_data' ] ) > 0:
-                        value = extra.get( 'ftype' )
+                    self.required_files.append( ( value, extra ) ) #these files will be uploaded
+                    if name_change:
+                        value = name_change #change value for select to renamed uploaded file for e.g. composite dataset
         except Exception, e:
             log.debug( "Error in add_param for %s: %s" % ( name, e ) )
         self.inputs.append( ( name, value, extra ) )
diff -r c73f093219aa -r f84112d155c0 test/functional/test_toolbox.py
--- a/test/functional/test_toolbox.py	Wed Mar 10 11:24:49 2010 -0500
+++ b/test/functional/test_toolbox.py	Wed Mar 10 11:28:50 2010 -0500
@@ -33,10 +33,22 @@
         # Upload any needed files
         for fname, extra in testdef.required_files:
             children = extra.get( 'children', [] )
-            metadata = [ child for child in children if child.tag == 'metadata' ]
-            composite_data = [ child for child in children if child.tag == 'composite_data' ]
+            metadata = extra.get( 'metadata', [] )
+            composite_data = extra.get( 'composite_data', [] )
             self.upload_file( fname, ftype=extra.get( 'ftype', 'auto' ), dbkey=extra.get( 'dbkey', 'hg17' ), metadata = metadata, composite_data = composite_data )
             print "Uploaded file: ", fname, ", ftype: ", extra.get( 'ftype', 'auto' ), ", extra: ", extra
+            #Post upload attribute editing
+            edit_attributes = extra.get( 'edit_attributes', [] )
+            #currently only renaming is supported
+            for edit_att in edit_attributes:
+                if edit_att.get( 'type', None ) == 'name':
+                    new_name = edit_att.get( 'value', None )
+                    assert new_name, 'You must supply the new dataset name as the value tag of the edit_attributes tag'
+                    hda_id = self.get_history_as_data_list()[-1].get( 'id' )
+                    self.edit_hda_attribute_info( hda_id, new_name = new_name )
+                    print "Renamed uploaded file to:", new_name
+                else:
+                    raise Exception( 'edit_attributes type (%s) is unimplemented' % edit_att.get( 'type', None ) )
         # We need to handle the case where we've uploaded a valid compressed file since the upload
         # tool will have uncompressed it on the fly.
         all_inputs = {}
                    
                  
                  
                          
                            
                            1
                            
                          
                          
                            
                            0
                            
                          
                          
                            
    
                          
                        
                    11 Mar '10
                    
                        details:   http://www.bx.psu.edu/hg/galaxy/rev/c73f093219aa
changeset: 3503:c73f093219aa
user:      Greg Von Kuster <greg(a)bx.psu.edu>
date:      Wed Mar 10 11:24:49 2010 -0500
description:
Fix for deleting a list of library datasets - resolves ticket # 102.
diffstat:
 lib/galaxy/web/controllers/library_admin.py    |   4 ++--
 lib/galaxy/web/controllers/library_common.py   |  14 ++++++++------
 templates/library/common/browse_library.mako   |   2 +-
 templates/library/common/ldda_info.mako        |   2 +-
 test/base/twilltestcase.py                     |   9 ++++++---
 test/functional/test_security_and_libraries.py |   6 +++---
 6 files changed, 21 insertions(+), 16 deletions(-)
diffs (130 lines):
diff -r 34babf71a09f -r c73f093219aa lib/galaxy/web/controllers/library_admin.py
--- a/lib/galaxy/web/controllers/library_admin.py	Tue Mar 09 16:28:04 2010 -0500
+++ b/lib/galaxy/web/controllers/library_admin.py	Wed Mar 10 11:24:49 2010 -0500
@@ -169,8 +169,8 @@
         # deleted / purged contents will have the same state ).  When a library or folder has been deleted for
         # the amount of time defined in the cleanup_datasets.py script, the library or folder and all of its
         # contents will be purged.  The association between this method and the cleanup_datasets.py script
-        # enables clean maintenance of libraries and library dataset disk files.  This is also why the following
-        # 3 objects, and not any of the associations ( the cleanup_datasets.py scipot handles everything else ).
+        # enables clean maintenance of libraries and library dataset disk files.  This is also why the item_types
+        # are not any of the associations ( the cleanup_datasets.py script handles everything ).
         show_deleted = util.string_as_bool( kwd.get( 'show_deleted', False ) )
         item_types = { 'library': trans.app.model.Library,
                        'folder': trans.app.model.LibraryFolder,
diff -r 34babf71a09f -r c73f093219aa lib/galaxy/web/controllers/library_common.py
--- a/lib/galaxy/web/controllers/library_common.py	Tue Mar 09 16:28:04 2010 -0500
+++ b/lib/galaxy/web/controllers/library_common.py	Wed Mar 10 11:24:49 2010 -0500
@@ -1246,8 +1246,7 @@
                                     messagetype=messagetype )
     @web.expose
     def act_on_multiple_datasets( self, trans, cntrller, library_id, ldda_ids='', **kwd ):
-        # This method is used by the select list labeled "Perform action on selected datasets"
-        # on the analysis library browser
+        # Perform an action on a list of library datasets.
         params = util.Params( kwd )
         msg = util.restore_text( params.get( 'msg', ''  ) )
         messagetype = params.get( 'messagetype', 'done' )
@@ -1262,7 +1261,7 @@
             messagetype = 'error'
         else:
             ldda_ids = util.listify( ldda_ids )
-            if action == 'add':
+            if action == 'import_to_history':
                 history = trans.get_history()
                 if history is None:
                     # Must be a bot sending a request without having a history.
@@ -1306,9 +1305,12 @@
             elif action == 'delete':
                 for ldda_id in ldda_ids:
                     ldda = trans.sa_session.query( trans.app.model.LibraryDatasetDatasetAssociation ).get( trans.security.decode_id( ldda_id ) )
-                    ldda.deleted = True
-                    trans.sa_session.add( ldda )
-                    trans.sa_session.flush()
+                    # Do not delete the association, just delete the library_dataset.  The
+                    # cleanup_datasets.py script handles everything else.
+                    ld = ldda.library_dataset
+                    ld.deleted = True
+                    trans.sa_session.add( ld )
+                trans.sa_session.flush()
                 msg = "The selected datasets have been removed from this data library"
             else:
                 error = False
diff -r 34babf71a09f -r c73f093219aa templates/library/common/browse_library.mako
--- a/templates/library/common/browse_library.mako	Tue Mar 09 16:28:04 2010 -0500
+++ b/templates/library/common/browse_library.mako	Wed Mar 10 11:24:49 2010 -0500
@@ -229,7 +229,7 @@
                        <a class="action-button" href="${h.url_for( controller='library_common', action='upload_library_dataset', cntrller=cntrller, library_id=trans.security.encode_id( library.id ), folder_id=trans.security.encode_id( folder.id ), replace_id=trans.security.encode_id( library_dataset.id ), show_deleted=show_deleted )}">Upload a new version of this dataset</a>
                    %endif
                    %if not branch_deleted( folder ) and not ldda.library_dataset.deleted and ldda.has_data:
-                       <a class="action-button" href="${h.url_for( controller='library_common', action='act_on_multiple_datasets', cntrller=cntrller, library_id=trans.security.encode_id( library.id ), ldda_ids=trans.security.encode_id( ldda.id ), do_action='add', use_panels=use_panels, show_deleted=show_deleted )}">Import this dataset into your current history</a>
+                       <a class="action-button" href="${h.url_for( controller='library_common', action='act_on_multiple_datasets', cntrller=cntrller, library_id=trans.security.encode_id( library.id ), ldda_ids=trans.security.encode_id( ldda.id ), do_action='import_to_history', use_panels=use_panels, show_deleted=show_deleted )}">Import this dataset into your current history</a>
                        <a class="action-button" href="${h.url_for( controller='library_common', action='download_dataset_from_folder', cntrller=cntrller, id=trans.security.encode_id( ldda.id ), library_id=trans.security.encode_id( library.id ), use_panels=use_panels )}">Download this dataset</a>
                    %endif
                    %if cntrller in [ 'library_admin', 'requests_admin' ]:
diff -r 34babf71a09f -r c73f093219aa templates/library/common/ldda_info.mako
--- a/templates/library/common/ldda_info.mako	Tue Mar 09 16:28:04 2010 -0500
+++ b/templates/library/common/ldda_info.mako	Wed Mar 10 11:24:49 2010 -0500
@@ -57,7 +57,7 @@
                     <a class="action-button" href="${h.url_for( controller='library_common', action='upload_library_dataset', cntrller=cntrller, library_id=trans.security.encode_id( library.id ), folder_id=trans.security.encode_id( ldda.library_dataset.folder.id ), replace_id=trans.security.encode_id( ldda.library_dataset.id ) )}">Upload a new version of this dataset</a>
                 %endif
                 %if cntrller=='library' and ldda.has_data:
-                    <a class="action-button" href="${h.url_for( controller='library_common', action='act_on_multiple_datasets', cntrller=cntrller, library_id=trans.security.encode_id( library.id ), ldda_ids=trans.security.encode_id( ldda.id ), do_action='add', use_panels=use_panels, show_deleted=show_deleted )}">Import this dataset into your current history</a>
+                    <a class="action-button" href="${h.url_for( controller='library_common', action='act_on_multiple_datasets', cntrller=cntrller, library_id=trans.security.encode_id( library.id ), ldda_ids=trans.security.encode_id( ldda.id ), do_action='import_to_history', use_panels=use_panels, show_deleted=show_deleted )}">Import this dataset into your current history</a>
                     <a class="action-button" href="${h.url_for( controller='library', action='download_dataset_from_folder', cntrller=cntrller, id=trans.security.encode_id( ldda.id ), library_id=trans.security.encode_id( library.id ), use_panels=use_panels, show_deleted=show_deleted )}">Download this dataset</a>
                 %endif
             </div>
diff -r 34babf71a09f -r c73f093219aa test/base/twilltestcase.py
--- a/test/base/twilltestcase.py	Tue Mar 09 16:28:04 2010 -0500
+++ b/test/base/twilltestcase.py	Wed Mar 10 11:24:49 2010 -0500
@@ -1595,14 +1595,17 @@
         self.home()
     def add_library_dataset( self, cntrller, filename, library_id, folder_id, folder_name,
                              file_type='auto', dbkey='hg18', roles=[], message='', root=False,
-                             template_field_name1='', template_field_contents1='' ):
+                             template_field_name1='', template_field_contents1='', show_deleted='False',
+                             upload_option='upload_file' ):
         """Add a dataset to a folder"""
         filename = self.get_filename( filename )
         self.home()
-        self.visit_url( "%s/library_common/upload_library_dataset?cntrller=%s&upload_option=upload_file&library_id=%s&folder_id=%s&message=%s" % \
-                        ( self.url, cntrller, library_id, folder_id, message ) )
+        self.visit_url( "%s/library_common/upload_library_dataset?cntrller=%s&library_id=%s&folder_id=%s&upload_option=%s&message=%s" % \
+                        ( self.url, cntrller, library_id, folder_id, upload_option, message ) )
         self.check_page_for_string( 'Upload files' )
+        tc.fv( "1", "library_id", library_id )
         tc.fv( "1", "folder_id", folder_id )
+        tc.fv( "1", "show_deleted", show_deleted )
         tc.formfile( "1", "files_0|file_data", filename )
         tc.fv( "1", "file_type", file_type )
         tc.fv( "1", "dbkey", dbkey )
diff -r 34babf71a09f -r c73f093219aa test/functional/test_security_and_libraries.py
--- a/test/functional/test_security_and_libraries.py	Tue Mar 09 16:28:04 2010 -0500
+++ b/test/functional/test_security_and_libraries.py	Wed Mar 10 11:24:49 2010 -0500
@@ -1181,7 +1181,7 @@
         # Test importing the restricted dataset into a history, can't use the 
         # ~/library_admin/libraries form as twill barfs on it so we'll simulate the form submission
         # by going directly to the form action
-        self.visit_url( '%s/library_common/act_on_multiple_datasets?cntrller=library&do_action=add&ldda_ids=%s&library_id=%s' \
+        self.visit_url( '%s/library_common/act_on_multiple_datasets?cntrller=library&do_action=import_to_history&ldda_ids=%s&library_id=%s' \
                         % ( self.url, self.security.encode_id( ldda_five.id ), self.security.encode_id( library_one.id ) ) )
         self.check_page_for_string( '1 dataset(s) have been imported into your history' )
         self.logout()
@@ -1473,7 +1473,7 @@
             for ldda in lddas:
                 # Import each library dataset into our history
                 self.home()
-                self.visit_url( '%s/library_common/act_on_multiple_datasets?cntrller=library&do_action=add&ldda_ids=%s&library_id=%s' % \
+                self.visit_url( '%s/library_common/act_on_multiple_datasets?cntrller=library&do_action=import_to_history&ldda_ids=%s&library_id=%s' % \
                     ( self.url, self.security.encode_id( ldda.id ), self.security.encode_id( library_one.id ) ) )
                 # Determine the new HistoryDatasetAssociation id created when the library dataset was imported into our history
                 last_hda_created = sa_session.query( galaxy.model.HistoryDatasetAssociation ) \
@@ -1522,7 +1522,7 @@
             # be all of the above on any of the 3 datasets that are imported into a history
             for ldda in lddas:
                 self.home()
-                self.visit_url( '%s/library_common/act_on_multiple_datasets?cntrller=library&library_id=%s&do_action=add&ldda_ids=%s' % \
+                self.visit_url( '%s/library_common/act_on_multiple_datasets?cntrller=library&library_id=%s&do_action=import_to_history&ldda_ids=%s' % \
                     ( self.url, self.security.encode_id( library_one.id ), self.security.encode_id( ldda.id ) ) )
                 # Determine the new HistoryDatasetAssociation id created when the library dataset was imported into our history
                 last_hda_created = sa_session.query( galaxy.model.HistoryDatasetAssociation ) \
                    
                  
                  
                          
                            
                            1
                            
                          
                          
                            
                            0
                            
                          
                          
                            
    
                          
                        
                    11 Mar '10
                    
                        details:   http://www.bx.psu.edu/hg/galaxy/rev/34babf71a09f
changeset: 3502:34babf71a09f
user:      Greg Von Kuster <greg(a)bx.psu.edu>
date:      Tue Mar 09 16:28:04 2010 -0500
description:
Keep the state of displaying / hiding deleted library items whn uploading library datasets.
diffstat:
 lib/galaxy/web/controllers/library_common.py |   7 +++++++
 lib/galaxy/web/controllers/tool_runner.py    |   1 -
 templates/base_panels.mako                   |   5 +++--
 templates/library/common/browse_library.mako |   2 +-
 templates/library/common/common.mako         |   3 ++-
 templates/library/common/upload.mako         |  14 +++++++-------
 6 files changed, 20 insertions(+), 12 deletions(-)
diffs (161 lines):
diff -r e4592fc99acc -r 34babf71a09f lib/galaxy/web/controllers/library_common.py
--- a/lib/galaxy/web/controllers/library_common.py	Tue Mar 09 15:25:23 2010 -0500
+++ b/lib/galaxy/web/controllers/library_common.py	Tue Mar 09 16:28:04 2010 -0500
@@ -659,6 +659,7 @@
         msg = util.restore_text( params.get( 'msg', ''  ) )
         messagetype = params.get( 'messagetype', 'done' )
         deleted = util.string_as_bool( params.get( 'deleted', False ) )
+        show_deleted = util.string_as_bool( params.get( 'show_deleted', False ) )
         dbkey = params.get( 'dbkey', '?' )
         if isinstance( dbkey, list ):
             last_used_build = dbkey[0]
@@ -705,6 +706,7 @@
                                                                            folder_id=folder_id,
                                                                            replace_id=replace_id,
                                                                            upload_option=upload_option,
+                                                                           show_deleted=show_deleted,
                                                                            msg=util.sanitize_text( msg ),
                                                                            messagetype='error' ) )
 
@@ -758,6 +760,7 @@
                                                                            id=library_id,
                                                                            default_action=default_action,
                                                                            created_ldda_ids=created_ldda_ids,
+                                                                           show_deleted=show_deleted,
                                                                            msg=util.sanitize_text( msg ), 
                                                                            messagetype='done' ) )
                         
@@ -770,6 +773,7 @@
                                                                cntrller=cntrller,
                                                                id=library_id,
                                                                created_ldda_ids=created_ldda_ids,
+                                                               show_deleted=show_deleted,
                                                                msg=util.sanitize_text( msg ),
                                                                messagetype=messagetype ) )
         # See if we have any inherited templates, but do not inherit contents.
@@ -819,6 +823,7 @@
                                     roles=roles,
                                     history=history,
                                     widgets=widgets,
+                                    show_deleted=show_deleted,
                                     msg=msg,
                                     messagetype=messagetype )
     def upload_dataset( self, trans, cntrller, library_id, folder_id, replace_dataset=None, **kwd ):
@@ -834,6 +839,7 @@
                 dataset_upload_inputs.append( input )
         # Library-specific params
         params = util.Params( kwd ) # is this filetoolparam safe?
+        show_deleted = util.string_as_bool( params.get( 'show_deleted', False ) )
         library_bunch = upload_common.handle_library_params( trans, params, folder_id, replace_dataset )
         msg = util.restore_text( params.get( 'msg', ''  ) )
         messagetype = params.get( 'messagetype', 'done' )
@@ -883,6 +889,7 @@
                                                        folder_id=folder_id,
                                                        replace_id=replace_id,
                                                        upload_option=upload_option,
+                                                       show_deleted=show_deleted,
                                                        msg=util.sanitize_text( msg ),
                                                        messagetype='error' ) )
         json_file_path = upload_common.create_paramfile( trans, uploaded_datasets )
diff -r e4592fc99acc -r 34babf71a09f lib/galaxy/web/controllers/tool_runner.py
--- a/lib/galaxy/web/controllers/tool_runner.py	Tue Mar 09 15:25:23 2010 -0500
+++ b/lib/galaxy/web/controllers/tool_runner.py	Tue Mar 09 16:28:04 2010 -0500
@@ -155,7 +155,6 @@
             permissions, in_roles, error, msg = trans.app.security_agent.derive_roles_from_access( trans, library_id, cntrller, library=True, **vars )
             if error:
                 return [ 'error', msg ]
-        permissions = trans.app.security_agent.history_get_default_permissions( trans.history )
         def create_dataset( name ):
             ud = Bunch( name=name, file_type=None, dbkey=None )
             if nonfile_params.get( 'folder_id', False ):
diff -r e4592fc99acc -r 34babf71a09f templates/base_panels.mako
--- a/templates/base_panels.mako	Tue Mar 09 15:25:23 2010 -0500
+++ b/templates/base_panels.mako	Tue Mar 09 16:28:04 2010 -0500
@@ -131,10 +131,11 @@
                             $(this).ajaxSubmit( { iframe: true } );
                             if ( $(this).find("input[name='folder_id']").val() != undefined ) {
                                 var library_id = $(this).find("input[name='library_id']").val();
+                                var show_deleted = $(this).find("input[name='show_deleted']").val();
                                 if ( location.pathname.indexOf( 'admin' ) != -1 ) {
-                                    $("iframe#galaxy_main").attr("src","${h.url_for( controller='library_common', action='browse_library' )}?cntrller=library_admin&id=" + library_id + "&created_ldda_ids=" + async_datasets);
+                                    $("iframe#galaxy_main").attr("src","${h.url_for( controller='library_common', action='browse_library' )}?cntrller=library_admin&id=" + library_id + "&created_ldda_ids=" + async_datasets + "&show_deleted=" + show_deleted);
                                 } else {
-                                    $("iframe#galaxy_main").attr("src","${h.url_for( controller='library_common', action='browse_library' )}?cntrller=library&id=" + library_id + "&created_ldda_ids=" + async_datasets);
+                                    $("iframe#galaxy_main").attr("src","${h.url_for( controller='library_common', action='browse_library' )}?cntrller=library&id=" + library_id + "&created_ldda_ids=" + async_datasets + "&show_deleted=" + show_deleted);
                                 }
                             } else {
                                 $("iframe#galaxy_main").attr("src","${h.url_for(controller='tool_runner', action='upload_async_message')}");
diff -r e4592fc99acc -r 34babf71a09f templates/library/common/browse_library.mako
--- a/templates/library/common/browse_library.mako	Tue Mar 09 15:25:23 2010 -0500
+++ b/templates/library/common/browse_library.mako	Tue Mar 09 16:28:04 2010 -0500
@@ -404,7 +404,7 @@
 
    <ul class="manage-table-actions">
        %if not library.deleted and ( cntrller in [ 'library_admin', 'requests_admin' ] or can_add ):
-           <li><a class="action-button" href="${h.url_for( controller='library_common', action='upload_library_dataset', cntrller=cntrller, library_id=trans.security.encode_id( library.id ), folder_id=trans.security.encode_id( library.root_folder.id ) )}"><span>Add datasets</span></a></li>
+           <li><a class="action-button" href="${h.url_for( controller='library_common', action='upload_library_dataset', cntrller=cntrller, library_id=trans.security.encode_id( library.id ), folder_id=trans.security.encode_id( library.root_folder.id ), use_panels=use_panels, show_deleted=show_deleted )}"><span>Add datasets</span></a></li>
            <li><a class="action-button" href="${h.url_for( controller='library_common', action='create_folder', cntrller=cntrller, parent_id=trans.security.encode_id( library.root_folder.id ), library_id=trans.security.encode_id( library.id ), use_panels=use_panels, show_deleted=show_deleted )}">Add folder</a></li>
        %endif
    </ul>
diff -r e4592fc99acc -r 34babf71a09f templates/library/common/common.mako
--- a/templates/library/common/common.mako	Tue Mar 09 15:25:23 2010 -0500
+++ b/templates/library/common/common.mako	Tue Mar 09 16:28:04 2010 -0500
@@ -87,7 +87,7 @@
     %endif
 </%def>
 
-<%def name="render_upload_form( cntrller, upload_option, action, library_id, folder_id, replace_dataset, file_formats, dbkeys, widgets, roles, history )">
+<%def name="render_upload_form( cntrller, upload_option, action, library_id, folder_id, replace_dataset, file_formats, dbkeys, widgets, roles, history, show_deleted )">
     <% import os, os.path %>
     %if upload_option in [ 'upload_file', 'upload_directory', 'upload_paths' ]:
         <div class="toolForm" id="upload_library_dataset">
@@ -106,6 +106,7 @@
                     <input type="hidden" name="library_id" value="${library_id}"/>
                     <input type="hidden" name="folder_id" value="${folder_id}"/>
                     <input type="hidden" name="upload_option" value="${upload_option}"/>
+                    <input type="hidden" name="show_deleted" value="${show_deleted}"/>
                     %if replace_dataset not in [ None, 'None' ]:
                         <input type="hidden" name="replace_id" value="${trans.security.encode_id( replace_dataset.id )}"/>
                         <div class="form-row">
diff -r e4592fc99acc -r 34babf71a09f templates/library/common/upload.mako
--- a/templates/library/common/upload.mako	Tue Mar 09 15:25:23 2010 -0500
+++ b/templates/library/common/upload.mako	Tue Mar 09 16:28:04 2010 -0500
@@ -26,26 +26,26 @@
     ## Don't allow multiple datasets to be uploaded when replacing a dataset with a new version
     <a id="upload-librarydataset--popup" class="popup-arrow" style="display: none;">▼</a>
     <div popupmenu="upload-librarydataset--popup">
-        <a class="action-button" href="${h.url_for( controller='library_common', action='upload_library_dataset', cntrller=cntrller,library_id=library_id, folder_id=folder_id, replace_id=replace_id, upload_option='upload_file' )}">Upload files</a>
+        <a class="action-button" href="${h.url_for( controller='library_common', action='upload_library_dataset', cntrller=cntrller,library_id=library_id, folder_id=folder_id, replace_id=replace_id, upload_option='upload_file', show_deleted=show_deleted )}">Upload files</a>
         %if cntrller == 'library_admin':
             %if trans.app.config.library_import_dir and os.path.exists( trans.app.config.library_import_dir ):
-                <a class="action-button" href="${h.url_for( controller='library_common', action='upload_library_dataset', cntrller=cntrller, library_id=library_id, folder_id=folder_id, replace_id=replace_id, upload_option='upload_directory' )}">Upload directory of files</a>
+                <a class="action-button" href="${h.url_for( controller='library_common', action='upload_library_dataset', cntrller=cntrller, library_id=library_id, folder_id=folder_id, replace_id=replace_id, upload_option='upload_directory', show_deleted=show_deleted )}">Upload directory of files</a>
             %endif
             %if trans.app.config.allow_library_path_paste:
-                <a class="action-button" href="${h.url_for( controller='library_common', action='upload_library_dataset', cntrller=cntrller, library_id=library_id, folder_id=folder_id, replace_id=replace_id, upload_option='upload_paths' )}">Upload files from filesystem paths</a>
+                <a class="action-button" href="${h.url_for( controller='library_common', action='upload_library_dataset', cntrller=cntrller, library_id=library_id, folder_id=folder_id, replace_id=replace_id, upload_option='upload_paths', show_deleted=show_deleted )}">Upload files from filesystem paths</a>
             %endif
         %elif cntrller == 'library':
             %if trans.app.config.user_library_import_dir and os.path.exists( os.path.join( trans.app.config.user_library_import_dir, trans.user.email ) ):
-                <a class="action-button" href="${h.url_for( controller='library_common', action='upload_library_dataset', cntrller=cntrller, library_id=library_id, folder_id=folder_id, replace_id=replace_id, upload_option='upload_directory' )}">Upload directory of files</a>
+                <a class="action-button" href="${h.url_for( controller='library_common', action='upload_library_dataset', cntrller=cntrller, library_id=library_id, folder_id=folder_id, replace_id=replace_id, upload_option='upload_directory', show_deleted=show_deleted )}">Upload directory of files</a>
             %endif
         %endif
-        <a class="action-button" href="${h.url_for( controller='library_common', action='upload_library_dataset', cntrller=cntrller, library_id=library_id, folder_id=folder_id, replace_id=replace_id, upload_option='import_from_history' )}">Import datasets from your current history</a>
+        <a class="action-button" href="${h.url_for( controller='library_common', action='upload_library_dataset', cntrller=cntrller, library_id=library_id, folder_id=folder_id, replace_id=replace_id, upload_option='import_from_history', show_deleted=show_deleted )}">Import datasets from your current history</a>
     </div>
 %endif
 <br/><br/>
 <ul class="manage-table-actions">
     <li>
-        <a class="action-button" href="${h.url_for( controller='library_common', action='browse_library', cntrller=cntrller, id=library_id )}"><span>Browse this data library</span></a>
+        <a class="action-button" href="${h.url_for( controller='library_common', action='browse_library', cntrller=cntrller, id=library_id, show_deleted=show_deleted )}"><span>Browse this data library</span></a>
     </li>
 </ul>
 
@@ -53,4 +53,4 @@
     ${render_msg( msg, messagetype )}
 %endif
 
-${render_upload_form( cntrller, upload_option, action, library_id, folder_id, replace_dataset, file_formats, dbkeys, widgets, roles, history )}
+${render_upload_form( cntrller, upload_option, action, library_id, folder_id, replace_dataset, file_formats, dbkeys, widgets, roles, history, show_deleted )}
                    
                  
                  
                          
                            
                            1
                            
                          
                          
                            
                            0
                            
                          
                          
                            
    
                          
                        
                    11 Mar '10
                    
                        details:   http://www.bx.psu.edu/hg/galaxy/rev/e4592fc99acc
changeset: 3501:e4592fc99acc
user:      Dan Blankenberg <dan(a)bx.psu.edu>
date:      Tue Mar 09 15:25:23 2010 -0500
description:
Add a helper script that will re.escape files; useful for creating files valid for re_match and re_match_multiline comparisons in tool tests.
diffstat:
 scripts/tools/re_escape_output.py |  34 ++++++++++++++++++++++++++++++++++
 1 files changed, 34 insertions(+), 0 deletions(-)
diffs (38 lines):
diff -r c67b5628f348 -r e4592fc99acc scripts/tools/re_escape_output.py
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/scripts/tools/re_escape_output.py	Tue Mar 09 15:25:23 2010 -0500
@@ -0,0 +1,34 @@
+#! /usr/bin/python
+
+"""
+Escapes a file into a form suitable for use with tool tests using re_match or re_match_multiline (when -m/--multiline option is used)
+
+usage: re_escape_output.py [options] input_file [output_file]
+    -m: Use Multiline Matching
+"""
+
+import optparse, re
+
+def __main__():
+    #Parse Command Line
+    parser = optparse.OptionParser()
+    parser.add_option( "-m", "--multiline", action="store_true", dest="multiline", default=False, help="Use Multiline Matching")
+    ( options, args ) = parser.parse_args()
+    input = open( args[0] ,'rb' )
+    if len( args ) > 1:
+        output = open( args[1], 'wb' )
+    else:
+        if options.multiline:
+            suffix = 're_match_multiline'
+        else:
+            suffix = 're_match'
+        output = open( "%s.%s" % ( args[0], suffix ), 'wb' )
+    if options.multiline:
+        lines = [ re.escape( input.read() ) ]
+    else:
+        lines = [ "%s\n" % re.escape( line.rstrip( '\n\r' ) ) for line in input ]
+    output.writelines( lines )
+    output.close()
+
+if __name__ == "__main__":
+    __main__()
                    
                  
                  
                          
                            
                            1
                            
                          
                          
                            
                            0
                            
                          
                          
                            
    
                          
                        
                    11 Mar '10
                    
                        details:   http://www.bx.psu.edu/hg/galaxy/rev/c67b5628f348
changeset: 3500:c67b5628f348
user:      Nate Coraor <nate(a)bx.psu.edu>
date:      Tue Mar 09 14:51:01 2010 -0500
description:
Add server_side_cursors to database_engine_option_ parsing
diffstat:
 lib/galaxy/config.py |  3 ++-
 1 files changed, 2 insertions(+), 1 deletions(-)
diffs (13 lines):
diff -r 478447ba0ec6 -r c67b5628f348 lib/galaxy/config.py
--- a/lib/galaxy/config.py	Tue Mar 09 14:29:17 2010 -0500
+++ b/lib/galaxy/config.py	Tue Mar 09 14:51:01 2010 -0500
@@ -172,7 +172,8 @@
         'pool_recycle': int,
         'pool_size': int,
         'max_overflow': int,
-        'pool_threadlocal': string_as_bool
+        'pool_threadlocal': string_as_bool,
+        'server_side_cursors': string_as_bool
     }
     prefix = "database_engine_option_"
     prefix_len = len( prefix )
                    
                  
                  
                          
                            
                            1
                            
                          
                          
                            
                            0
                            
                          
                          
                            
    
                          
                        
                    11 Mar '10
                    
                        details:   http://www.bx.psu.edu/hg/galaxy/rev/821b6cbbee1c
changeset: 3498:821b6cbbee1c
user:      Dan Blankenberg <dan(a)bx.psu.edu>
date:      Tue Mar 09 14:28:33 2010 -0500
description:
Allow better testing of tool outputs with an unknown number of dynamically created outputs.
diffstat:
 test/functional/test_toolbox.py |  11 ++++++++++-
 1 files changed, 10 insertions(+), 1 deletions(-)
diffs (28 lines):
diff -r a719c6971613 -r 821b6cbbee1c test/functional/test_toolbox.py
--- a/test/functional/test_toolbox.py	Tue Mar 09 14:13:05 2010 -0500
+++ b/test/functional/test_toolbox.py	Tue Mar 09 14:28:33 2010 -0500
@@ -53,6 +53,11 @@
             if isinstance( input_value, grouping.Repeat ):
                 repeat_name = input_name
                 break
+        #check if we need to verify number of outputs created dynamically by tool
+        if testdef.tool.force_history_refresh:
+            job_finish_by_output_count = len( self.get_history_as_data_list() )
+        else:
+            job_finish_by_output_count = False
         # Do the first page
         page_inputs =  self.__expand_grouping(testdef.tool.inputs_by_page[0], all_inputs)
         # Run the tool
@@ -65,7 +70,11 @@
             print "page_inputs (%i)" % i, page_inputs
         # Check the results ( handles single or multiple tool outputs ).  Make sure to pass the correct hid.
         # The output datasets from the tool should be in the same order as the testdef.outputs.
-        data_list = self.get_history_as_data_list()
+        data_list = None
+        while data_list is None:
+            data_list = self.get_history_as_data_list()
+            if job_finish_by_output_count and len( testdef.outputs ) > ( len( data_list ) - job_finish_by_output_count ):
+                data_list = None
         self.assertTrue( data_list )
         elem_index = 0 - len( testdef.outputs )
         for output_tuple in testdef.outputs:
                    
                  
                  
                          
                            
                            1
                            
                          
                          
                            
                            0
                            
                          
                          
                            
    
                          
                        
                    11 Mar '10
                    
                        details:   http://www.bx.psu.edu/hg/galaxy/rev/478447ba0ec6
changeset: 3499:478447ba0ec6
user:      Dan Blankenberg <dan(a)bx.psu.edu>
date:      Tue Mar 09 14:29:17 2010 -0500
description:
Update tool tests for MAF to interval tool
diffstat:
 tools/maf/maf_to_interval.xml |  2 +-
 1 files changed, 1 insertions(+), 1 deletions(-)
diffs (12 lines):
diff -r 821b6cbbee1c -r 478447ba0ec6 tools/maf/maf_to_interval.xml
--- a/tools/maf/maf_to_interval.xml	Tue Mar 09 14:28:33 2010 -0500
+++ b/tools/maf/maf_to_interval.xml	Tue Mar 09 14:29:17 2010 -0500
@@ -27,7 +27,7 @@
       <param name="complete_blocks" value="partial_disallowed"/>
       <param name="remove_gaps" value="keep_gaps"/>
       <param name="species" value="panTro1" />
-      <!-- <output name="out_file1" file="maf_to_interval_out_hg17.interval"/> cannot test primary species, because we cannot leave species blank and we can only test the last item added to a history-->
+      <output name="out_file1" file="maf_to_interval_out_hg17.interval"/>
       <output name="out_file1" file="maf_to_interval_out_panTro1.interval"/>
     </test>
   </tests>
                    
                  
                  
                          
                            
                            1
                            
                          
                          
                            
                            0
                            
                          
                          
                            
    
                          
                        
                    11 Mar '10
                    
                        details:   http://www.bx.psu.edu/hg/galaxy/rev/a719c6971613
changeset: 3497:a719c6971613
user:      Nate Coraor <nate(a)bx.psu.edu>
date:      Tue Mar 09 14:13:05 2010 -0500
description:
Update psycopg2 to 2.0.13/postgresql 8.4.2
diffstat:
 dist-eggs.ini                                |   2 +-
 eggs.ini                                     |   6 +-
 scripts/scramble/scripts/psycopg2-linux.py   |   1 +
 scripts/scramble/scripts/psycopg2-macosx.py  |  85 ++++++++++++++++++++++++++++
 scripts/scramble/scripts/psycopg2-solaris.py |   1 +
 scripts/scramble/scripts/psycopg2.py         |   1 +
 6 files changed, 92 insertions(+), 4 deletions(-)
diffs (162 lines):
diff -r 5f7ace3195b7 -r a719c6971613 dist-eggs.ini
--- a/dist-eggs.ini	Tue Mar 09 13:54:30 2010 -0500
+++ b/dist-eggs.ini	Tue Mar 09 14:13:05 2010 -0500
@@ -22,7 +22,7 @@
 py2.4-macosx-10.3-fat-ucs2 = medeski.bx.psu.edu /usr/local/bin/python2.4
 py2.5-macosx-10.3-fat-ucs2 = medeski.bx.psu.edu /usr/local/bin/python2.5
 py2.6-macosx-10.3-fat-ucs2 = medeski.bx.psu.edu /usr/local/bin/python2.6
-py2.6-macosx-10.6-universal-ucs2 = lion.bx.psu.edu /usr/bin/python2.6
+py2.6-macosx-10.6-universal-ucs2 = bach.bx.psu.edu /usr/bin/python2.6
 py2.4-solaris-2.10-i86pc_32-ucs2 = thumper.bx.psu.edu /afs/bx.psu.edu/project/pythons/solaris-2.10-i86pc_32-ucs2/bin/python2.4
 py2.5-solaris-2.10-i86pc_32-ucs2 = thumper.bx.psu.edu /afs/bx.psu.edu/project/pythons/solaris-2.10-i86pc_32-ucs2/bin/python2.5
 py2.6-solaris-2.10-i86pc_32-ucs2 = thumper.bx.psu.edu /afs/bx.psu.edu/project/pythons/solaris-2.10-i86pc_32-ucs2/bin/python2.6
diff -r 5f7ace3195b7 -r a719c6971613 eggs.ini
--- a/eggs.ini	Tue Mar 09 13:54:30 2010 -0500
+++ b/eggs.ini	Tue Mar 09 14:13:05 2010 -0500
@@ -17,7 +17,7 @@
 DRMAA_python = 0.2
 MySQL_python = 1.2.3c1
 pbs_python = 2.9.4
-psycopg2 = 2.0.6
+psycopg2 = 2.0.13
 pycrypto = 2.0.1
 pysam = 0.1.1
 pysqlite = 2.5.6
@@ -56,7 +56,7 @@
 
 ; extra version information
 [tags]
-psycopg2 = _8.2.6_static
+psycopg2 = _8.4.2_static
 pysqlite = _3.6.17_static
 MySQL_python = _5.1.41_static
 bx_python = _dev_3b9d30e47619
@@ -68,5 +68,5 @@
 ; the wiki page above
 [source]
 MySQL_python = mysql-5.1.41
-psycopg2 = postgresql-8.2.6
+psycopg2 = postgresql-8.4.2
 pysqlite = sqlite-amalgamation-3_6_17
diff -r 5f7ace3195b7 -r a719c6971613 scripts/scramble/scripts/psycopg2-linux.py
--- a/scripts/scramble/scripts/psycopg2-linux.py	Tue Mar 09 13:54:30 2010 -0500
+++ b/scripts/scramble/scripts/psycopg2-linux.py	Tue Mar 09 14:13:05 2010 -0500
@@ -20,6 +20,7 @@
         "Configuring postgres (./configure)" )
 
     # compile
+    run( "make ../../src/include/utils/fmgroids.h", os.path.join( pg_srcdir, 'src', 'backend' ), "Compiling fmgroids.h (cd src/backend; make ../../src/include/utils/fmgroids.h)" )
     run( "make", os.path.join( pg_srcdir, 'src', 'interfaces', 'libpq' ), "Compiling libpq (cd src/interfaces/libpq; make)" )
     run( "make", os.path.join( pg_srcdir, 'src', 'bin', 'pg_config' ), "Compiling pg_config (cd src/bin/pg_config; make)" )
 
diff -r 5f7ace3195b7 -r a719c6971613 scripts/scramble/scripts/psycopg2-macosx.py
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/scripts/scramble/scripts/psycopg2-macosx.py	Tue Mar 09 14:13:05 2010 -0500
@@ -0,0 +1,85 @@
+import os, sys, shutil
+from distutils.sysconfig import get_config_var
+
+def prep_postgres( prepped, args ):
+
+    pg_version = args['version']
+    pg_srcdir = os.path.join( os.getcwd(), "postgresql-%s" % pg_version )
+
+    # set up environment
+    os.environ['CC'] = get_config_var('CC')
+    os.environ['CFLAGS'] = get_config_var('CFLAGS')
+    os.environ['LDFLAGS'] = get_config_var('LDFLAGS')
+
+    if '-fPIC' not in os.environ['CFLAGS']:
+        os.environ['CFLAGS'] += ' -fPIC'
+
+    # run configure
+    run( "./configure --prefix=%s/postgres --disable-dependency-tracking --enable-static --disable-shared --without-readline --with-thread-safety" % os.getcwd(),
+        os.path.join( os.getcwd(), "postgresql-%s" % pg_version ),
+        "Configuring postgres (./configure)" )
+
+    # compile
+    run( "make ../../src/include/utils/fmgroids.h", os.path.join( pg_srcdir, 'src', 'backend' ), "Compiling fmgroids.h (cd src/backend; make ../../src/include/utils/fmgroids.h)" )
+    run( "make all-static-lib", os.path.join( pg_srcdir, 'src', 'interfaces', 'libpq' ), "Compiling libpq (cd src/interfaces/libpq; make)" )
+    run( "make", os.path.join( pg_srcdir, 'src', 'bin', 'pg_config' ), "Compiling pg_config (cd src/bin/pg_config; make)" )
+
+    # install
+    run( "make install-lib-static", os.path.join( pg_srcdir, 'src', 'interfaces', 'libpq' ), "Compiling libpq (cd src/interfaces/libpq; make install)" )
+    run( "make install", os.path.join( pg_srcdir, 'src', 'bin', 'pg_config' ), "Compiling pg_config (cd src/bin/pg_config; make install)" )
+    run( "make install", os.path.join( pg_srcdir, 'src', 'include' ), "Compiling pg_config (cd src/include; make install)" )
+
+    # manually install some headers
+    run( "cp libpq-fe.h %s" % os.path.join( os.getcwd(), 'postgres', 'include' ), os.path.join( pg_srcdir, 'src', 'interfaces', 'libpq' ), "Installing libpq-fe.h" )
+    run( "cp libpq-events.h %s" % os.path.join( os.getcwd(), 'postgres', 'include' ), os.path.join( pg_srcdir, 'src', 'interfaces', 'libpq' ), "Installing libpq-fe.h" )
+    run( "cp libpq-int.h %s" % os.path.join( os.getcwd(), 'postgres', 'include', 'internal' ), os.path.join( pg_srcdir, 'src', 'interfaces', 'libpq' ), "Installing libpq-fe.h" )
+    run( "cp pqexpbuffer.h %s" % os.path.join( os.getcwd(), 'postgres', 'include', 'internal' ), os.path.join( pg_srcdir, 'src', 'interfaces', 'libpq' ), "Installing libpq-fe.h" )
+
+    # create prepped archive
+    print "%s(): Creating prepped archive for future builds at:" % sys._getframe().f_code.co_name
+    print " ", prepped
+    compress( prepped,
+           'postgres/bin',
+           'postgres/include',
+           'postgres/lib' )
+
+if __name__ == '__main__':
+
+    # change back to the build dir
+    if os.path.dirname( sys.argv[0] ) != "":
+        os.chdir( os.path.dirname( sys.argv[0] ) )
+
+    # find setuptools
+    sys.path.append( os.path.abspath( os.path.join( '..', '..', '..', 'lib' ) ) )
+    from scramble_lib import *
+
+    tag = get_tag()
+
+    pg_version = ( tag.split( "_" ) )[1]
+    pg_archive_base = os.path.join( archives, "postgresql-%s" % pg_version )
+    pg_archive = get_archive( pg_archive_base )
+    pg_archive_prepped = os.path.join( archives, "postgresql-%s-%s.tar.gz" % ( pg_version, platform_noucs ) )
+
+    # clean up any existing stuff (could happen if you run scramble.py by hand)
+    clean( [ 'postgresql-%s' % pg_version ] )
+
+    # unpack postgres
+    unpack_dep( pg_archive, pg_archive_prepped, prep_postgres, dict( version=pg_version ) )
+
+    # localize setup.cfg
+    if not os.path.exists( 'setup.cfg.orig' ):
+        shutil.copy( 'setup.cfg', 'setup.cfg.orig' )
+        f = open( 'setup.cfg', 'a' )
+        f.write( '\npg_config=postgres/bin/pg_config\n' )
+        f.close()
+
+    # tag
+    me = sys.argv[0]
+    sys.argv = [ me ]
+    if tag is not None:
+        sys.argv.append( "egg_info" )
+        sys.argv.append( "--tag-build=%s" %tag )
+    sys.argv.append( "bdist_egg" )
+
+    # go
+    execfile( "setup.py", globals(), locals() )
diff -r 5f7ace3195b7 -r a719c6971613 scripts/scramble/scripts/psycopg2-solaris.py
--- a/scripts/scramble/scripts/psycopg2-solaris.py	Tue Mar 09 13:54:30 2010 -0500
+++ b/scripts/scramble/scripts/psycopg2-solaris.py	Tue Mar 09 14:13:05 2010 -0500
@@ -23,6 +23,7 @@
         "Configuring postgres (./configure)" )
 
     # compile
+    run( "gmake ../../src/include/utils/fmgroids.h", os.path.join( pg_srcdir, 'src', 'backend' ), "Compiling fmgroids.h (cd src/backend; gmake ../../src/include/utils/fmgroids.h)" )
     run( "gmake", os.path.join( pg_srcdir, 'src', 'interfaces', 'libpq' ), "Compiling libpq (cd src/interfaces/libpq; gmake)" )
     run( "gmake", os.path.join( pg_srcdir, 'src', 'bin', 'pg_config' ), "Compiling pg_config (cd src/bin/pg_config; gmake)" )
 
diff -r 5f7ace3195b7 -r a719c6971613 scripts/scramble/scripts/psycopg2.py
--- a/scripts/scramble/scripts/psycopg2.py	Tue Mar 09 13:54:30 2010 -0500
+++ b/scripts/scramble/scripts/psycopg2.py	Tue Mar 09 14:13:05 2010 -0500
@@ -20,6 +20,7 @@
         "Configuring postgres (./configure)" )
 
     # compile
+    run( "make ../../src/include/utils/fmgroids.h", os.path.join( pg_srcdir, 'src', 'backend' ), "Compiling fmgroids.h (cd src/backend; make ../../src/include/utils/fmgroids.h)" )
     run( "make", os.path.join( pg_srcdir, 'src', 'interfaces', 'libpq' ), "Compiling libpq (cd src/interfaces/libpq; make)" )
     run( "make", os.path.join( pg_srcdir, 'src', 'bin', 'pg_config' ), "Compiling pg_config (cd src/bin/pg_config; make)" )
 
                    
                  
                  
                          
                            
                            1
                            
                          
                          
                            
                            0
                            
                          
                          
                            
    
                          
                        
                    11 Mar '10
                    
                        details:   http://www.bx.psu.edu/hg/galaxy/rev/5f7ace3195b7
changeset: 3496:5f7ace3195b7
user:      Dan Blankenberg <dan(a)bx.psu.edu>
date:      Tue Mar 09 13:54:30 2010 -0500
description:
Allow uploading and use of composite files in toolbox tests. Along with setting each component of a composite datatype, metadata attributes (e.g. basename in Rgenetics datatypes) can be set.
Example Tool:
<tool id='temp' name='temp test'>
    <description>test</description>
    <command>cat ${i.extra_files_path}/${i.metadata.base_name}.fam > $out_file1</command>
    <inputs>
      <param name="i"  type="data" label="RGenetics genotype data from your current history" format="pbed" />
    </inputs>
   <outputs>
       <data format="text" name="out_file1" />
   </outputs>
<tests>
<test>
 <param name='i' ftype='pbed'>
    <metadata name='base_name' value='rgenetics_CHANGED' />
    <composite_data value='temp/somefile1' />
    <composite_data value='temp/somefile2' />
    <composite_data value='temp/somefile3' />
 </param>
 <output name='out_file1' file='temp/somefile3' />
 </test>
 </tests>
</tool>
diffstat:
 lib/galaxy/tools/__init__.py    |   1 +
 lib/galaxy/tools/test.py        |   4 +++-
 test/base/twilltestcase.py      |  16 ++++++++++++----
 test/functional/test_toolbox.py |   5 ++++-
 4 files changed, 20 insertions(+), 6 deletions(-)
diffs (73 lines):
diff -r 3b1be99d1f62 -r 5f7ace3195b7 lib/galaxy/tools/__init__.py
--- a/lib/galaxy/tools/__init__.py	Tue Mar 09 11:28:21 2010 -0500
+++ b/lib/galaxy/tools/__init__.py	Tue Mar 09 13:54:30 2010 -0500
@@ -523,6 +523,7 @@
                         value = attrib['value']
                     else:
                         value = None
+                    attrib['children'] = list( param_elem.getchildren() )
                     test.add_param( attrib.pop( 'name' ), value, attrib )
                 for output_elem in test_elem.findall( "output" ):
                     attrib = dict( output_elem.attrib )
diff -r 3b1be99d1f62 -r 5f7ace3195b7 lib/galaxy/tools/test.py
--- a/lib/galaxy/tools/test.py	Tue Mar 09 11:28:21 2010 -0500
+++ b/lib/galaxy/tools/test.py	Tue Mar 09 13:54:30 2010 -0500
@@ -30,10 +30,12 @@
                     if isinstance( input_value, grouping.Conditional ) or isinstance( input_value, grouping.Repeat ):
                         self.__expand_grouping_for_data_input(name, value, extra, input_name, input_value)
             elif isinstance( self.tool.inputs[name], parameters.DataToolParameter ) and ( value, extra ) not in self.required_files:
-                if value is None:
+                if value is None and len( [ child for child in extra.get( 'children', [] ) if child.tag == 'composite_data' ] ) == 0:
                     assert self.tool.inputs[name].optional, '%s is not optional. You must provide a valid filename.' % name
                 else:
                     self.required_files.append( ( value, extra ) )
+                    if value is None and len( [ child for child in extra.get( 'children', [] ) if child.tag == 'composite_data' ] ) > 0:
+                        value = extra.get( 'ftype' )
         except Exception, e:
             log.debug( "Error in add_param for %s: %s" % ( name, e ) )
         self.inputs.append( ( name, value, extra ) )
diff -r 3b1be99d1f62 -r 5f7ace3195b7 test/base/twilltestcase.py
--- a/test/base/twilltestcase.py	Tue Mar 09 11:28:21 2010 -0500
+++ b/test/base/twilltestcase.py	Tue Mar 09 13:54:30 2010 -0500
@@ -142,14 +142,22 @@
         filename = os.path.join( *path )
         file(filename, 'wt').write(buffer.getvalue())
 
-    def upload_file( self, filename, ftype='auto', dbkey='unspecified (?)' ):
+    def upload_file( self, filename, ftype='auto', dbkey='unspecified (?)', metadata = None, composite_data = None ):
         """Uploads a file"""
-        filename = self.get_filename(filename)
         self.visit_url( "%s/tool_runner?tool_id=upload1" % self.url )
         try: 
-            tc.fv("1","file_type", ftype)
+            self.refresh_form( "file_type", ftype ) #Refresh, to support composite files
             tc.fv("1","dbkey", dbkey)
-            tc.formfile("1","file_data", filename)
+            if metadata:
+                for elem in metadata:
+                    tc.fv( "1", "files_metadata|%s" % elem.get( 'name' ), elem.get( 'value' ) )
+            if composite_data:
+                for i, composite_file in enumerate( composite_data ):
+                    filename = self.get_filename( composite_file.get( 'value' ) )
+                    tc.formfile( "1", "files_%i|file_data" % i, filename )
+            else:
+                filename = self.get_filename( filename )
+                tc.formfile( "1", "file_data", filename )
             tc.submit("runtool_btn")
             self.home()
         except AssertionError, err:
diff -r 3b1be99d1f62 -r 5f7ace3195b7 test/functional/test_toolbox.py
--- a/test/functional/test_toolbox.py	Tue Mar 09 11:28:21 2010 -0500
+++ b/test/functional/test_toolbox.py	Tue Mar 09 13:54:30 2010 -0500
@@ -32,7 +32,10 @@
             raise AssertionError("ToolTestCase.do_it failed")
         # Upload any needed files
         for fname, extra in testdef.required_files:
-            self.upload_file( fname, ftype=extra.get( 'ftype', 'auto' ), dbkey=extra.get( 'dbkey', 'hg17' ) )
+            children = extra.get( 'children', [] )
+            metadata = [ child for child in children if child.tag == 'metadata' ]
+            composite_data = [ child for child in children if child.tag == 'composite_data' ]
+            self.upload_file( fname, ftype=extra.get( 'ftype', 'auto' ), dbkey=extra.get( 'dbkey', 'hg17' ), metadata = metadata, composite_data = composite_data )
             print "Uploaded file: ", fname, ", ftype: ", extra.get( 'ftype', 'auto' ), ", extra: ", extra
         # We need to handle the case where we've uploaded a valid compressed file since the upload
         # tool will have uncompressed it on the fly.
                    
                  
                  
                          
                            
                            1
                            
                          
                          
                            
                            0