galaxy-commits
Threads by month
- ----- 2025 -----
- May
- April
- March
- February
- January
- ----- 2024 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2023 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2022 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2021 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2020 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2019 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2018 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2017 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2016 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2015 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2014 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2013 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2012 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2011 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2010 -----
- December
- November
- October
- September
- August
- July
- June
- May
May 2010
- 2 participants
- 158 discussions
details: http://www.bx.psu.edu/hg/galaxy/rev/7ae394334e5c
changeset: 3690:7ae394334e5c
user: Nate Coraor <nate(a)bx.psu.edu>
date: Fri Apr 23 16:32:40 2010 -0400
description:
Added missing templates
diffstat:
templates/webapps/community/admin/category/category_create.mako | 35 ++
templates/webapps/community/admin/category/category_rename.mako | 44 +++
templates/webapps/community/admin/category/grid.mako | 1 +
templates/webapps/community/admin/category/role.mako | 118 ++++++++++
4 files changed, 198 insertions(+), 0 deletions(-)
diffs (214 lines):
diff -r a66d849924d2 -r 7ae394334e5c templates/webapps/community/admin/category/category_create.mako
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/templates/webapps/community/admin/category/category_create.mako Fri Apr 23 16:32:40 2010 -0400
@@ -0,0 +1,35 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+
+<%def name="javascripts()">
+ ${parent.javascripts()}
+ <script type="text/javascript">
+ $(function(){
+ $("input:text:first").focus();
+ })
+ </script>
+</%def>
+
+%if message:
+ ${render_msg( message, status )}
+%endif
+
+<div class="toolForm">
+ <div class="toolFormTitle">Create Role</div>
+ <div class="toolFormBody">
+ <form name="create_category_form" id="create_category_form" action="${h.url_for( action='create_category' )}" method="post" >
+ <div class="form-row">
+ <input name="webapp" type="hidden" value="${webapp}" size=40"/>
+ <label>Name:</label>
+ <input name="name" type="textfield" value="" size=40"/>
+ </div>
+ <div class="form-row">
+ <label>Description:</label>
+ <input name="description" type="textfield" value="" size=40"/>
+ </div>
+ <div class="form-row">
+ <input type="submit" name="create_category_button" value="Save"/>
+ </div>
+ </form>
+ </div>
+</div>
diff -r a66d849924d2 -r 7ae394334e5c templates/webapps/community/admin/category/category_rename.mako
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/templates/webapps/community/admin/category/category_rename.mako Fri Apr 23 16:32:40 2010 -0400
@@ -0,0 +1,44 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+
+%if message:
+ ${render_msg( message, status )}
+%endif
+
+<div class="toolForm">
+ <div class="toolFormTitle">Change category name and description</div>
+ <div class="toolFormBody">
+ <form name="library" action="${h.url_for( controller='admin', action='rename_category' )}" method="post" >
+ <div class="form-row">
+ <input name="webapp" type="hidden" value="${webapp}" size=40"/>
+ <label>Name:</label>
+ <div style="float: left; width: 250px; margin-right: 10px;">
+ <input type="text" name="name" value="${category.name}" size="40"/>
+ </div>
+ <div style="clear: both"></div>
+ </div>
+ <div class="form-row">
+ <label>Description:</label>
+ <div style="float: left; width: 250px; margin-right: 10px;">
+ <input name="description" type="textfield" value="${category.description}" size=40"/>
+ </div>
+ <div style="clear: both"></div>
+ </div>
+ <div class="form-row">
+ <div style="float: left; width: 250px; margin-right: 10px;">
+ <input type="hidden" name="rename" value="submitted"/>
+ </div>
+ <div style="clear: both"></div>
+ </div>
+ <div class="form-row">
+ <div style="float: left; width: 250px; margin-right: 10px;">
+ <input type="hidden" name="id" value="${trans.security.encode_id( category.id )}"/>
+ </div>
+ <div style="clear: both"></div>
+ </div>
+ <div class="form-row">
+ <input type="submit" name="rename_category_button" value="Save"/>
+ </div>
+ </form>
+ </div>
+</div>
diff -r a66d849924d2 -r 7ae394334e5c templates/webapps/community/admin/category/grid.mako
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/templates/webapps/community/admin/category/grid.mako Fri Apr 23 16:32:40 2010 -0400
@@ -0,0 +1,1 @@
+<%inherit file="/grid_base.mako"/>
diff -r a66d849924d2 -r 7ae394334e5c templates/webapps/community/admin/category/role.mako
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/templates/webapps/community/admin/category/role.mako Fri Apr 23 16:32:40 2010 -0400
@@ -0,0 +1,118 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+
+<%def name="javascripts()">
+ ${parent.javascripts()}
+ <script type="text/javascript">
+ $(function(){
+ $("input:text:first").focus();
+ })
+ </script>
+</%def>
+
+<%def name="render_select( name, options )">
+ <select name="${name}" id="${name}" style="min-width: 250px; height: 150px;" multiple>
+ %for option in options:
+ <option value="${option[0]}">${option[1]}</option>
+ %endfor
+ </select>
+</%def>
+
+<script type="text/javascript">
+$().ready(function() {
+ $('#users_add_button').click(function() {
+ return !$('#out_users option:selected').remove().appendTo('#in_users');
+ });
+ $('#users_remove_button').click(function() {
+ return !$('#in_users option:selected').remove().appendTo('#out_users');
+ });
+ $('#groups_add_button').click(function() {
+ return !$('#out_groups option:selected').remove().appendTo('#in_groups');
+ });
+ $('#groups_remove_button').click(function() {
+ return !$('#in_groups option:selected').remove().appendTo('#out_groups');
+ });
+ $('form#associate_role_user_group').submit(function() {
+ $('#in_users option').each(function(i) {
+ $(this).attr("selected", "selected");
+ });
+ $('#in_groups option').each(function(i) {
+ $(this).attr("selected", "selected");
+ });
+ });
+});
+</script>
+
+%if message:
+ ${render_msg( message, status )}
+%endif
+
+<div class="toolForm">
+ <div class="toolFormTitle">Role '${role.name}'</div>
+ <div class="toolFormBody">
+ <form name="associate_role_user_group" id="associate_role_user_group" action="${h.url_for( action='manage_users_and_groups_for_role', id=trans.security.encode_id( role.id ) )}" method="post" >
+ <input name="webapp" type="hidden" value="${webapp}" size=40"/>
+ <div class="form-row">
+ <div style="float: left; margin-right: 10px;">
+ <label>Users associated with '${role.name}'</label>
+ ${render_select( "in_users", in_users )}<br/>
+ <input type="submit" id="users_remove_button" value=">>"/>
+ </div>
+ <div>
+ <label>Users not associated with '${role.name}'</label>
+ ${render_select( "out_users", out_users )}<br/>
+ <input type="submit" id="users_add_button" value="<<"/>
+ </div>
+ </div>
+ <div class="form-row">
+ <div style="float: left; margin-right: 10px;">
+ <label>Groups associated with '${role.name}'</label>
+ ${render_select( "in_groups", in_groups )}<br/>
+ <input type="submit" id="groups_remove_button" value=">>"/>
+ </div>
+ <div>
+ <label>Groups not associated with '${role.name}'</label>
+ ${render_select( "out_groups", out_groups )}<br/>
+ <input type="submit" id="groups_add_button" value="<<"/>
+ </div>
+ </div>
+ <div class="form-row">
+ <input type="submit" name="role_members_edit_button" value="Save"/>
+ </div>
+ </form>
+ </div>
+</div>
+<br clear="left"/>
+<br/>
+%if len( library_dataset_actions ) > 0:
+ <h3>Data library datasets associated with role '${role.name}'</h3>
+ <table class="manage-table colored" border="0" cellspacing="0" cellpadding="0" width="100%">
+ <tr>
+ <td>
+ <ul>
+ %for ctr, library, in enumerate( library_dataset_actions.keys() ):
+ <li>
+ <img src="${h.url_for( '/static/images/silk/book_open.png' )}" class="rowIcon"/>
+ ${library.name}
+ <ul>
+ %for folder_path, permissions in library_dataset_actions[ library ].items():
+ <li>
+ <img src="/static/images/silk/folder_page.png" class="rowIcon"/>
+ ${folder_path}
+ <ul>
+ % for permission in permissions:
+ <ul>
+ <li>${permission}</li>
+ </ul>
+ %endfor
+ </ul>
+ </li>
+ %endfor
+ </ul>
+ </li>
+ %endfor
+ </ul>
+ </td>
+ </tr>
+ </table>
+%endif
1
0

10 May '10
details: http://www.bx.psu.edu/hg/galaxy/rev/a66d849924d2
changeset: 3689:a66d849924d2
user: Nate Coraor <nate(a)bx.psu.edu>
date: Fri Apr 23 16:24:49 2010 -0400
description:
Move setting categories to the Tool class in the model
diffstat:
lib/galaxy/webapps/community/controllers/tool_browser.py | 17 +---------------
lib/galaxy/webapps/community/model/__init__.py | 9 ++++++++
2 files changed, 10 insertions(+), 16 deletions(-)
diffs (50 lines):
diff -r 87cee993fa2d -r a66d849924d2 lib/galaxy/webapps/community/controllers/tool_browser.py
--- a/lib/galaxy/webapps/community/controllers/tool_browser.py Fri Apr 23 16:11:55 2010 -0400
+++ b/lib/galaxy/webapps/community/controllers/tool_browser.py Fri Apr 23 16:24:49 2010 -0400
@@ -117,7 +117,7 @@
elif params.save_button:
tool.user_description = util.restore_text( params.description )
categories = []
- set_tool_category_associations( trans, tool, util.listify( params.category ) )
+ tool.set_categories( trans, util.listify( params.category ) )
trans.sa_session.add( tool )
trans.sa_session.flush()
return trans.response.send_redirect( web.url_for( controller='tool_browser',
@@ -131,18 +131,3 @@
categories=categories,
message=message,
status=status )
-
-## ---- Utility methods -------------------------------------------------------
-
-# It may make sense to create something like the security controller to do
-# this, but seems unnecessary for this single operation
-
-def set_tool_category_associations( trans, tool, categories, delete_existing_assocs=True ):
- if delete_existing_assocs:
- for a in tool.categories:
- trans.sa_session.delete( a )
- trans.sa_session.flush()
- for category in categories:
- if not isinstance( category, trans.model.Category ):
- category = trans.sa_session.query( trans.model.Category ).get( int( category ) )
- tool.categories.append( trans.model.ToolCategoryAssociation( tool, category ) )
diff -r 87cee993fa2d -r a66d849924d2 lib/galaxy/webapps/community/model/__init__.py
--- a/lib/galaxy/webapps/community/model/__init__.py Fri Apr 23 16:11:55 2010 -0400
+++ b/lib/galaxy/webapps/community/model/__init__.py Fri Apr 23 16:24:49 2010 -0400
@@ -123,6 +123,15 @@
self.description = datatype_bunch.description
self.version = datatype_bunch.version
self.user_id = datatype_bunch.user.id
+ def set_categories( self, trans, categories, delete_existing_assocs=True ):
+ if delete_existing_assocs:
+ for a in self.categories:
+ trans.sa_session.delete( a )
+ trans.sa_session.flush()
+ for category in categories:
+ if not isinstance( category, Category ):
+ category = trans.sa_session.query( Category ).get( int( category ) )
+ self.categories.append( ToolCategoryAssociation( self, category ) )
class Tag ( object ):
def __init__( self, id=None, type=None, parent_id=None, name=None ):
1
0
details: http://www.bx.psu.edu/hg/galaxy/rev/87cee993fa2d
changeset: 3688:87cee993fa2d
user: Nate Coraor <nate(a)bx.psu.edu>
date: Fri Apr 23 16:11:55 2010 -0400
description:
Add categories to the community app
diffstat:
lib/galaxy/webapps/community/controllers/admin.py | 275 ++++++++++
lib/galaxy/webapps/community/controllers/tool_browser.py | 26 +-
lib/galaxy/webapps/community/controllers/upload.py | 7 +-
lib/galaxy/webapps/community/model/__init__.py | 7 +-
lib/galaxy/webapps/community/model/mapping.py | 3 +-
lib/galaxy/webapps/community/model/migrate/versions/0001_initial_tables.py | 3 +-
templates/webapps/community/admin/index.mako | 9 +
templates/webapps/community/base_panels.mako | 2 +-
templates/webapps/community/tool/edit_tool.mako | 4 +-
9 files changed, 323 insertions(+), 13 deletions(-)
diffs (452 lines):
diff -r 318dc4410301 -r 87cee993fa2d lib/galaxy/webapps/community/controllers/admin.py
--- a/lib/galaxy/webapps/community/controllers/admin.py Fri Apr 23 15:31:52 2010 -0400
+++ b/lib/galaxy/webapps/community/controllers/admin.py Fri Apr 23 16:11:55 2010 -0400
@@ -277,8 +277,283 @@
def build_initial_query( self, session ):
return session.query( self.model_class )
+class CategoryListGrid( grids.Grid ):
+ class NameColumn( grids.TextColumn ):
+ def get_value( self, trans, grid, category ):
+ return category.name
+ class DescriptionColumn( grids.TextColumn ):
+ def get_value( self, trans, grid, category ):
+ return category.description
+ class StatusColumn( grids.GridColumn ):
+ def get_value( self, trans, grid, category ):
+ if category.deleted:
+ return "deleted"
+ return ""
+
+ # Grid definition
+ webapp = "community"
+ title = "Categories"
+ model_class = model.Category
+ template='/webapps/community/admin/category/grid.mako'
+ default_sort_key = "name"
+ columns = [
+ NameColumn( "Name",
+ key="name",
+ link=( lambda item: dict( operation="Edit category", id=item.id, webapp="community" ) ),
+ model_class=model.Category,
+ attach_popup=True,
+ filterable="advanced" ),
+ DescriptionColumn( "Description", attach_popup=False ),
+ StatusColumn( "Status", attach_popup=False ),
+ # Columns that are valid for filtering but are not visible.
+ grids.DeletedColumn( "Deleted", key="deleted", visible=False, filterable="advanced" )
+ ]
+ columns.append( grids.MulticolFilterColumn( "Search",
+ cols_to_filter=[ columns[0], columns[1], columns[2] ],
+ key="free-text-search",
+ visible=False,
+ filterable="standard" ) )
+ global_actions = [
+ grids.GridAction( "Add new category",
+ dict( controller='admin', action='categories', operation='create', webapp="community" ) )
+ ]
+ operations = [ grids.GridOperation( "Rename",
+ condition=( lambda item: not item.deleted ),
+ allow_multiple=False,
+ url_args=dict( webapp="community", action="rename_category" ) ),
+ grids.GridOperation( "Delete",
+ condition=( lambda item: not item.deleted ),
+ allow_multiple=True,
+ url_args=dict( webapp="community", action="mark_category_deleted" ) ),
+ grids.GridOperation( "Undelete",
+ condition=( lambda item: item.deleted ),
+ allow_multiple=True,
+ url_args=dict( webapp="community", action="undelete_category" ) ),
+ grids.GridOperation( "Purge",
+ condition=( lambda item: item.deleted ),
+ allow_multiple=True,
+ url_args=dict( webapp="community", action="purge_category" ) ) ]
+ standard_filters = [
+ grids.GridColumnFilter( "Active", args=dict( deleted=False ) ),
+ grids.GridColumnFilter( "Deleted", args=dict( deleted=True ) ),
+ grids.GridColumnFilter( "All", args=dict( deleted='All' ) )
+ ]
+ num_rows_per_page = 50
+ preserve_state = False
+ use_paging = True
+ def get_current_item( self, trans ):
+ return None
+ def build_initial_query( self, session ):
+ return session.query( self.model_class )
+
class AdminCommunity( BaseController, Admin ):
user_list_grid = UserListGrid()
role_list_grid = RoleListGrid()
group_list_grid = GroupListGrid()
+ category_list_grid = CategoryListGrid()
+
+ @web.expose
+ @web.require_admin
+ def categories( self, trans, **kwargs ):
+ if 'operation' in kwargs:
+ operation = kwargs['operation'].lower()
+ if operation == "create":
+ return self.create_category( trans, **kwargs )
+ if operation == "delete":
+ return self.mark_category_deleted( trans, **kwargs )
+ if operation == "undelete":
+ return self.undelete_category( trans, **kwargs )
+ if operation == "purge":
+ return self.purge_category( trans, **kwargs )
+ if operation == "rename":
+ return self.rename_category( trans, **kwargs )
+ # Render the list view
+ return self.category_list_grid( trans, **kwargs )
+
+ @web.expose
+ @web.require_admin
+ def create_category( self, trans, **kwd ):
+ params = util.Params( kwd )
+ webapp = params.get( 'webapp', 'community' )
+ message = util.restore_text( params.get( 'message', '' ) )
+ status = params.get( 'status', 'done' )
+ if params.get( 'create_category_button', False ):
+ name = util.restore_text( params.name )
+ description = util.restore_text( params.description )
+ if not name or not description:
+ message = "Enter a valid name and a description"
+ elif trans.sa_session.query( trans.app.model.Category ).filter( trans.app.model.Category.table.c.name==name ).first():
+ message = "A category with that name already exists"
+ else:
+ # Create the category
+ category = trans.app.model.Category( name=name, description=description )
+ trans.sa_session.add( category )
+ message = "Category '%s' has been created" % category.name
+ trans.sa_session.flush()
+ trans.response.send_redirect( web.url_for( controller='admin',
+ action='categories',
+ webapp=webapp,
+ message=util.sanitize_text( message ),
+ status='done' ) )
+ trans.response.send_redirect( web.url_for( controller='admin',
+ action='create_category',
+ webapp=webapp,
+ message=util.sanitize_text( message ),
+ status='error' ) )
+ return trans.fill_template( '/webapps/community/admin/category/category_create.mako',
+ webapp=webapp,
+ message=message,
+ status=status )
+ @web.expose
+ @web.require_admin
+ def rename_category( self, trans, **kwd ):
+ params = util.Params( kwd )
+ webapp = params.get( 'webapp', 'galaxy' )
+ message = util.restore_text( params.get( 'message', '' ) )
+ status = params.get( 'status', 'done' )
+ id = params.get( 'id', None )
+ if not id:
+ message = "No category ids received for renaming"
+ trans.response.send_redirect( web.url_for( controller='admin',
+ action='categories',
+ webapp=webapp,
+ message=message,
+ status='error' ) )
+ category = get_category( trans, id )
+ if params.get( 'rename_category_button', False ):
+ old_name = category.name
+ new_name = util.restore_text( params.name )
+ new_description = util.restore_text( params.description )
+ if not new_name:
+ message = 'Enter a valid name'
+ status = 'error'
+ elif trans.sa_session.query( trans.app.model.Category ).filter( trans.app.model.Category.table.c.name==new_name ).first():
+ message = 'A category with that name already exists'
+ status = 'error'
+ else:
+ category.name = new_name
+ category.description = new_description
+ trans.sa_session.add( category )
+ trans.sa_session.flush()
+ message = "Category '%s' has been renamed to '%s'" % ( old_name, new_name )
+ return trans.response.send_redirect( web.url_for( controller='admin',
+ action='categories',
+ webapp=webapp,
+ message=util.sanitize_text( message ),
+ status='done' ) )
+ return trans.fill_template( '/webapps/community/admin/category/category_rename.mako',
+ category=category,
+ webapp=webapp,
+ message=message,
+ status=status )
+ @web.expose
+ @web.require_admin
+ def mark_category_deleted( self, trans, **kwd ):
+ params = util.Params( kwd )
+ webapp = params.get( 'webapp', 'galaxy' )
+ id = kwd.get( 'id', None )
+ if not id:
+ message = "No category ids received for deleting"
+ trans.response.send_redirect( web.url_for( controller='admin',
+ action='categories',
+ webapp=webapp,
+ message=message,
+ status='error' ) )
+ ids = util.listify( id )
+ message = "Deleted %d categories: " % len( ids )
+ for category_id in ids:
+ category = get_category( trans, category_id )
+ category.deleted = True
+ trans.sa_session.add( category )
+ trans.sa_session.flush()
+ message += " %s " % category.name
+ trans.response.send_redirect( web.url_for( controller='admin',
+ action='categories',
+ webapp=webapp,
+ message=util.sanitize_text( message ),
+ status='done' ) )
+ @web.expose
+ @web.require_admin
+ def undelete_category( self, trans, **kwd ):
+ params = util.Params( kwd )
+ webapp = params.get( 'webapp', 'galaxy' )
+ id = kwd.get( 'id', None )
+ if not id:
+ message = "No category ids received for undeleting"
+ trans.response.send_redirect( web.url_for( controller='admin',
+ action='categories',
+ webapp=webapp,
+ message=message,
+ status='error' ) )
+ ids = util.listify( id )
+ count = 0
+ undeleted_categories = ""
+ for category_id in ids:
+ category = get_category( trans, category_id )
+ if not category.deleted:
+ message = "Category '%s' has not been deleted, so it cannot be undeleted." % category.name
+ trans.response.send_redirect( web.url_for( controller='admin',
+ action='categories',
+ webapp=webapp,
+ message=util.sanitize_text( message ),
+ status='error' ) )
+ category.deleted = False
+ trans.sa_session.add( category )
+ trans.sa_session.flush()
+ count += 1
+ undeleted_categories += " %s" % category.name
+ message = "Undeleted %d categories: %s" % ( count, undeleted_categories )
+ trans.response.send_redirect( web.url_for( controller='admin',
+ action='categories',
+ webapp=webapp,
+ message=util.sanitize_text( message ),
+ status='done' ) )
+ @web.expose
+ @web.require_admin
+ def purge_category( self, trans, **kwd ):
+ # This method should only be called for a Category that has previously been deleted.
+ # Purging a deleted Category deletes all of the following from the database:
+ # - ToolCategoryAssociations where category_id == Category.id
+ params = util.Params( kwd )
+ webapp = params.get( 'webapp', 'galaxy' )
+ id = kwd.get( 'id', None )
+ if not id:
+ message = "No category ids received for purging"
+ trans.response.send_redirect( web.url_for( controller='admin',
+ action='categories',
+ webapp=webapp,
+ message=util.sanitize_text( message ),
+ status='error' ) )
+ ids = util.listify( id )
+ message = "Purged %d categories: " % len( ids )
+ for category_id in ids:
+ category = get_category( trans, category_id )
+ if not category.deleted:
+ message = "Category '%s' has not been deleted, so it cannot be purged." % category.name
+ trans.response.send_redirect( web.url_for( controller='admin',
+ action='categories',
+ webapp=webapp,
+ message=util.sanitize_text( message ),
+ status='error' ) )
+ # Delete ToolCategoryAssociations
+ for tca in category.tools:
+ trans.sa_session.delete( tca )
+ trans.sa_session.flush()
+ message += " %s " % category.name
+ trans.response.send_redirect( web.url_for( controller='admin',
+ action='categories',
+ webapp=webapp,
+ message=util.sanitize_text( message ),
+ status='done' ) )
+
+## ---- Utility methods -------------------------------------------------------
+
+def get_category( trans, id ):
+ """Get a User from the database by id."""
+ # Load user from database
+ id = trans.security.decode_id( id )
+ category = trans.sa_session.query( trans.model.Category ).get( id )
+ if not category:
+ return trans.show_error_message( "Category not found for id (%s)" % str( id ) )
+ return category
diff -r 318dc4410301 -r 87cee993fa2d lib/galaxy/webapps/community/controllers/tool_browser.py
--- a/lib/galaxy/webapps/community/controllers/tool_browser.py Fri Apr 23 15:31:52 2010 -0400
+++ b/lib/galaxy/webapps/community/controllers/tool_browser.py Fri Apr 23 16:11:55 2010 -0400
@@ -115,8 +115,15 @@
message = 'Uploading new version not implemented'
status = 'error'
elif params.save_button:
- tool.user_description = params.description
- tool.category = params.category
+ tool.user_description = util.restore_text( params.description )
+ categories = []
+ set_tool_category_associations( trans, tool, util.listify( params.category ) )
+ trans.sa_session.add( tool )
+ trans.sa_session.flush()
+ return trans.response.send_redirect( web.url_for( controller='tool_browser',
+ action='browse_tools',
+ message='Saved categories and description for %s' % tool.name,
+ status='done' ) )
categories = trans.sa_session.query( trans.model.Category ).order_by( trans.model.Category.table.c.name ).all()
return trans.fill_template( '/webapps/community/tool/edit_tool.mako',
encoded_id = encoded_id,
@@ -124,3 +131,18 @@
categories=categories,
message=message,
status=status )
+
+## ---- Utility methods -------------------------------------------------------
+
+# It may make sense to create something like the security controller to do
+# this, but seems unnecessary for this single operation
+
+def set_tool_category_associations( trans, tool, categories, delete_existing_assocs=True ):
+ if delete_existing_assocs:
+ for a in tool.categories:
+ trans.sa_session.delete( a )
+ trans.sa_session.flush()
+ for category in categories:
+ if not isinstance( category, trans.model.Category ):
+ category = trans.sa_session.query( trans.model.Category ).get( int( category ) )
+ tool.categories.append( trans.model.ToolCategoryAssociation( tool, category ) )
diff -r 318dc4410301 -r 87cee993fa2d lib/galaxy/webapps/community/controllers/upload.py
--- a/lib/galaxy/webapps/community/controllers/upload.py Fri Apr 23 15:31:52 2010 -0400
+++ b/lib/galaxy/webapps/community/controllers/upload.py Fri Apr 23 16:11:55 2010 -0400
@@ -49,12 +49,15 @@
os.link( uploaded_file.name, obj.file_name )
except OSError:
shutil.copy( uploaded_file.name, obj.file_name )
- message = 'Uploaded %s' % meta.message
+ return trans.response.send_redirect( web.url_for( controller='tool_browser',
+ action='edit_tool',
+ message='Uploaded %s' % meta.message,
+ status='done' ) )
except datatypes.DatatypeVerificationError, e:
message = str( e )
status = 'error'
except sqlalchemy.exc.IntegrityError:
- message = 'A tool with the same ID already exists. If you are trying to update this tool to a new version, please ... ??? ... Otherwise, please choose a new ID.'
+ message = 'A tool with the same ID already exists. If you are trying to update this tool to a new version, please use the upload form on the "Edit Tool" page. Otherwise, please choose a new ID.'
status = 'error'
uploaded_file.close()
selected_upload_type = params.get( 'type', 'tool' )
diff -r 318dc4410301 -r 87cee993fa2d lib/galaxy/webapps/community/model/__init__.py
--- a/lib/galaxy/webapps/community/model/__init__.py Fri Apr 23 15:31:52 2010 -0400
+++ b/lib/galaxy/webapps/community/model/__init__.py Fri Apr 23 16:11:55 2010 -0400
@@ -134,10 +134,10 @@
return "Tag(id=%s, type=%i, parent_id=%s, name=%s)" % ( self.id, self.type, self.parent_id, self.name )
class Category( object ):
- def __init__( self, id=None, name=None, description=None ):
- self.id = id
+ def __init__( self, name=None, description=None, deleted=False ):
self.name = name
self.description = description
+ self.deleted = deleted
class ItemTagAssociation ( object ):
def __init__( self, id=None, user=None, item_id=None, tag_id=None, user_tname=None, value=None ):
@@ -156,8 +156,7 @@
pass
class ToolCategoryAssociation( object ):
- def __init__( self, id=None, tool=None, category=None ):
- self.id = id
+ def __init__( self, tool=None, category=None ):
self.tool = tool
self.category = category
diff -r 318dc4410301 -r 87cee993fa2d lib/galaxy/webapps/community/model/mapping.py
--- a/lib/galaxy/webapps/community/model/mapping.py Fri Apr 23 15:31:52 2010 -0400
+++ b/lib/galaxy/webapps/community/model/mapping.py Fri Apr 23 16:11:55 2010 -0400
@@ -119,7 +119,8 @@
Column( "create_time", DateTime, default=now ),
Column( "update_time", DateTime, default=now, onupdate=now ),
Column( "name", TrimmedString( 255 ), index=True, unique=True ),
- Column( "description" , TEXT ) )
+ Column( "description" , TEXT ),
+ Column( "deleted", Boolean, index=True, default=False ) )
ToolCategoryAssociation.table = Table( "tool_category_association", metadata,
Column( "id", Integer, primary_key=True ),
diff -r 318dc4410301 -r 87cee993fa2d lib/galaxy/webapps/community/model/migrate/versions/0001_initial_tables.py
--- a/lib/galaxy/webapps/community/model/migrate/versions/0001_initial_tables.py Fri Apr 23 15:31:52 2010 -0400
+++ b/lib/galaxy/webapps/community/model/migrate/versions/0001_initial_tables.py Fri Apr 23 16:11:55 2010 -0400
@@ -96,7 +96,8 @@
Column( "create_time", DateTime, default=now ),
Column( "update_time", DateTime, default=now, onupdate=now ),
Column( "name", TrimmedString( 255 ), index=True, unique=True ),
- Column( "description" , TEXT ) )
+ Column( "description" , TEXT ),
+ Column( "deleted", Boolean, index=True, default=False ) )
ToolCategoryAssociation_table = Table( "tool_category_association", metadata,
Column( "id", Integer, primary_key=True ),
diff -r 318dc4410301 -r 87cee993fa2d templates/webapps/community/admin/index.mako
--- a/templates/webapps/community/admin/index.mako Fri Apr 23 15:31:52 2010 -0400
+++ b/templates/webapps/community/admin/index.mako Fri Apr 23 16:11:55 2010 -0400
@@ -88,6 +88,15 @@
<div class="toolTitle"><a href="${h.url_for( controller='tool_browser', action='browse_tools', webapp='community' )}" target="galaxy_main">Manage tools</a></div>
</div>
</div>
+ <div class="toolSectionPad"></div>
+ <div class="toolSectionTitle">
+ <span>Community</span>
+ </div>
+ <div class="toolSectionBody">
+ <div class="toolSectionBg">
+ <div class="toolTitle"><a href="${h.url_for( controller='admin', action='categories', webapp='community' )}" target="galaxy_main">Manage categories</a></div>
+ </div>
+ </div>
</div>
</div>
</div>
diff -r 318dc4410301 -r 87cee993fa2d templates/webapps/community/base_panels.mako
--- a/templates/webapps/community/base_panels.mako Fri Apr 23 15:31:52 2010 -0400
+++ b/templates/webapps/community/base_panels.mako Fri Apr 23 16:11:55 2010 -0400
@@ -92,7 +92,7 @@
<div class="title" style="position: absolute; top: 0; left: 0;">
<a href="${app.config.get( 'logo_url', '/' )}">
<img border="0" src="${h.url_for('/static/images/galaxyIcon_noText.png')}" style="width: 26px; vertical-align: top;">
- Galaxy
+ Galaxy Community
%if app.config.brand:
<span class='brand'>/ ${app.config.brand}</span>
%endif
diff -r 318dc4410301 -r 87cee993fa2d templates/webapps/community/tool/edit_tool.mako
--- a/templates/webapps/community/tool/edit_tool.mako Fri Apr 23 15:31:52 2010 -0400
+++ b/templates/webapps/community/tool/edit_tool.mako Fri Apr 23 16:11:55 2010 -0400
@@ -25,7 +25,7 @@
<div class="form-row">
<label>Categories:</label>
<div class="form-row-input">
- <select name="category" multiple size=5>
+ <select name="category" multiple size=5 style="min-width: 250px;">
%for category in categories:
%if category.id in [ tool_category.id for tool_category in tool.categories ]:
<option value="${category.id}" selected>${category.name}</option>
@@ -39,7 +39,7 @@
</div>
<div class="form-row">
<label>Description:</label>
- <div class="form-row-input"><textarea name="description" rows="5" cols="35"></textarea></div>
+ <div class="form-row-input"><textarea name="description" rows="5" cols="35">${tool.user_description}</textarea></div>
<div style="clear: both"></div>
</div>
<div class="form-row">
1
0
details: http://www.bx.psu.edu/hg/galaxy/rev/318dc4410301
changeset: 3687:318dc4410301
user: rc
date: Fri Apr 23 15:31:52 2010 -0400
description:
lims:
- ui cleanup
- fixed a functional test bug
diffstat:
templates/admin/requests/get_data.mako | 5 ++---
test/functional/test_forms_and_requests.py | 15 ++++++++++-----
2 files changed, 12 insertions(+), 8 deletions(-)
diffs (44 lines):
diff -r f6e86e26cfe2 -r 318dc4410301 templates/admin/requests/get_data.mako
--- a/templates/admin/requests/get_data.mako Fri Apr 23 15:16:24 2010 -0400
+++ b/templates/admin/requests/get_data.mako Fri Apr 23 15:31:52 2010 -0400
@@ -113,9 +113,8 @@
</div>
<div class="form-row">
<div class="toolParamHelp" style="clear: both;">
- After clicking <b>Transfer</b> do <i>not</i> close this page or
- navigate away from this page. Once the transfer is complete
- the dataset(s) will show up on this page.
+ After selecting dataset(s), be sure to click on the <b>Start transfer</b> button.
+ Once the transfer is complete the dataset(s) will show up on this page.
</div>
<input type="submit" name="select_files_button" value="Select"/>
</div>
diff -r f6e86e26cfe2 -r 318dc4410301 test/functional/test_forms_and_requests.py
--- a/test/functional/test_forms_and_requests.py Fri Apr 23 15:16:24 2010 -0400
+++ b/test/functional/test_forms_and_requests.py Fri Apr 23 15:31:52 2010 -0400
@@ -372,15 +372,20 @@
% ( request_two.name, request_two.states.REJECTED )
def test_055_reset_data_for_later_test_runs( self ):
"""Reseting data to enable later test runs to pass"""
- # TODO: RC: add whatever is missing from this method that should be marked
- # deleted or purged so that later test runs will correctly test features if the
- # database has not be purged.
- #
# Logged in as admin_user
+ # remove the request_type permissions
+ rt_actions = sa_session.query( galaxy.model.RequestTypePermissions ) \
+ .filter(and_(galaxy.model.RequestTypePermissions.table.c.request_type_id==request_type.id) ) \
+ .order_by( desc( galaxy.model.RequestTypePermissions.table.c.create_time ) ) \
+ .all()
+ for a in rt_actions:
+ sa_session.delete( a )
+ sa_session.flush()
+
##################
# Eliminate all non-private roles
##################
- for role in [ role_one ]:
+ for role in [ role_one, role_two ]:
self.mark_role_deleted( self.security.encode_id( role.id ), role.name )
self.purge_role( self.security.encode_id( role.id ), role.name )
# Manually delete the role from the database
1
0
details: http://www.bx.psu.edu/hg/galaxy/rev/f6e86e26cfe2
changeset: 3686:f6e86e26cfe2
user: Greg Von Kuster <greg(a)bx.psu.edu>
date: Fri Apr 23 15:16:24 2010 -0400
description:
Miscellaneous community space fixes.
diffstat:
lib/galaxy/web/base/controller.py | 6 +-
lib/galaxy/web/framework/__init__.py | 6 +-
lib/galaxy/web/security/__init__.py | 6 +-
lib/galaxy/webapps/community/controllers/admin.py | 1 -
lib/galaxy/webapps/community/controllers/upload.py | 10 +-
lib/galaxy/webapps/community/model/__init__.py | 7 +-
setup.sh | 2 +-
templates/admin/center.mako | 187 ---------------------
templates/webapps/community/admin/center.mako | 41 ++++
templates/webapps/community/admin/index.mako | 20 +-
templates/webapps/galaxy/admin/center.mako | 187 +++++++++++++++++++++
templates/webapps/galaxy/admin/index.mako | 2 +-
12 files changed, 265 insertions(+), 210 deletions(-)
diffs (620 lines):
diff -r 34eec4d48cc4 -r f6e86e26cfe2 lib/galaxy/web/base/controller.py
--- a/lib/galaxy/web/base/controller.py Fri Apr 23 13:16:40 2010 -0400
+++ b/lib/galaxy/web/base/controller.py Fri Apr 23 15:16:24 2010 -0400
@@ -330,7 +330,11 @@
@web.expose
@web.require_admin
def center( self, trans, **kwd ):
- return trans.fill_template( '/admin/center.mako' )
+ webapp = kwd.get( 'webapp', 'galaxy' )
+ if webapp == 'galaxy':
+ return trans.fill_template( '/webapps/galaxy/admin/center.mako' )
+ else:
+ return trans.fill_template( '/webapps/community/admin/center.mako' )
@web.expose
@web.require_admin
def reload_tool( self, trans, **kwd ):
diff -r 34eec4d48cc4 -r f6e86e26cfe2 lib/galaxy/web/framework/__init__.py
--- a/lib/galaxy/web/framework/__init__.py Fri Apr 23 13:16:40 2010 -0400
+++ b/lib/galaxy/web/framework/__init__.py Fri Apr 23 15:16:24 2010 -0400
@@ -268,7 +268,7 @@
galaxy_session_requires_flush = False
if secure_id:
# Decode the cookie value to get the session_key
- session_key = self.security.decode_session_key( secure_id )
+ session_key = self.security.decode_guid( secure_id )
try:
# Make sure we have a valid UTF-8 string
session_key = session_key.encode( 'utf8' )
@@ -365,7 +365,7 @@
Caller is responsible for flushing the returned session.
"""
- session_key = self.security.get_new_session_key()
+ session_key = self.security.get_new_guid()
galaxy_session = self.app.model.GalaxySession(
session_key=session_key,
is_valid=True,
@@ -411,7 +411,7 @@
"""
Update the session cookie to match the current session.
"""
- self.set_cookie( self.security.encode_session_key( self.galaxy_session.session_key ), name=name, path=self.app.config.cookie_path )
+ self.set_cookie( self.security.encode_guid( self.galaxy_session.session_key ), name=name, path=self.app.config.cookie_path )
def handle_user_login( self, user, webapp ):
"""
Login a new user (possibly newly created)
diff -r 34eec4d48cc4 -r f6e86e26cfe2 lib/galaxy/web/security/__init__.py
--- a/lib/galaxy/web/security/__init__.py Fri Apr 23 13:16:40 2010 -0400
+++ b/lib/galaxy/web/security/__init__.py Fri Apr 23 15:16:24 2010 -0400
@@ -43,16 +43,16 @@
return self.id_cipher.encrypt( s ).encode( 'hex' )
def decode_id( self, obj_id ):
return int( self.id_cipher.decrypt( obj_id.decode( 'hex' ) ).lstrip( "!" ) )
- def encode_session_key( self, session_key ):
+ def encode_guid( self, session_key ):
# Session keys are strings
# Pad to a multiple of 8 with leading "!"
s = ( "!" * ( 8 - len( session_key ) % 8 ) ) + session_key
# Encrypt
return self.id_cipher.encrypt( s ).encode( 'hex' )
- def decode_session_key( self, session_key ):
+ def decode_guid( self, session_key ):
# Session keys are strings
return self.id_cipher.decrypt( session_key.decode( 'hex' ) ).lstrip( "!" )
- def get_new_session_key( self ):
+ def get_new_guid( self ):
# Generate a unique, high entropy 128 bit random number
return get_random_bytes( 16 )
\ No newline at end of file
diff -r 34eec4d48cc4 -r f6e86e26cfe2 lib/galaxy/webapps/community/controllers/admin.py
--- a/lib/galaxy/webapps/community/controllers/admin.py Fri Apr 23 13:16:40 2010 -0400
+++ b/lib/galaxy/webapps/community/controllers/admin.py Fri Apr 23 15:16:24 2010 -0400
@@ -43,7 +43,6 @@
return self.format( user.galaxy_sessions[ 0 ].update_time )
return 'never'
- log.debug("####In UserListGrid, in community" )
# Grid definition
webapp = "community"
title = "Users"
diff -r 34eec4d48cc4 -r f6e86e26cfe2 lib/galaxy/webapps/community/controllers/upload.py
--- a/lib/galaxy/webapps/community/controllers/upload.py Fri Apr 23 13:16:40 2010 -0400
+++ b/lib/galaxy/webapps/community/controllers/upload.py Fri Apr 23 15:16:24 2010 -0400
@@ -41,6 +41,7 @@
try:
meta = datatype.verify( uploaded_file )
meta.user = trans.user
+ meta.guid = trans.app.security.get_new_guid()
obj = datatype.create_model_object( meta )
trans.sa_session.add( obj )
trans.sa_session.flush()
@@ -57,7 +58,8 @@
status = 'error'
uploaded_file.close()
selected_upload_type = params.get( 'type', 'tool' )
- return trans.fill_template( '/webapps/community/upload/upload.mako', message=message,
- status=status,
- selected_upload_type=selected_upload_type,
- upload_types=trans.app.datatypes_registry.get_datatypes_for_select_list() )
+ return trans.fill_template( '/webapps/community/upload/upload.mako',
+ message=message,
+ status=status,
+ selected_upload_type=selected_upload_type,
+ upload_types=trans.app.datatypes_registry.get_datatypes_for_select_list() )
diff -r 34eec4d48cc4 -r f6e86e26cfe2 lib/galaxy/webapps/community/model/__init__.py
--- a/lib/galaxy/webapps/community/model/__init__.py Fri Apr 23 13:16:40 2010 -0400
+++ b/lib/galaxy/webapps/community/model/__init__.py Fri Apr 23 15:16:24 2010 -0400
@@ -93,7 +93,6 @@
self.name = name or "Unnamed tool"
self.description = description
self.user_description = user_description
- self.category = category
self.version = version or "1.0.0"
self.user_id = user_id
self.external_filename = external_filename
@@ -117,11 +116,13 @@
self.external_filename = filename
file_name = property( get_file_name, set_file_name )
def create_from_datatype( self, datatype_bunch ):
+ # TODO: ensure guid is unique and generate a new one if not.
+ self.guid = datatype_bunch.guid
self.tool_id = datatype_bunch.id
self.name = datatype_bunch.name
+ self.description = datatype_bunch.description
self.version = datatype_bunch.version
- self.description = datatype_bunch.description
- self.user_id = datatype_bunch.user
+ self.user_id = datatype_bunch.user.id
class Tag ( object ):
def __init__( self, id=None, type=None, parent_id=None, name=None ):
diff -r 34eec4d48cc4 -r f6e86e26cfe2 setup.sh
--- a/setup.sh Fri Apr 23 13:16:40 2010 -0400
+++ b/setup.sh Fri Apr 23 15:16:24 2010 -0400
@@ -31,7 +31,7 @@
DIRS="
database
database/files
-database/tools
+database/community_files
database/tmp
database/compiled_templates
database/job_working_directory
diff -r 34eec4d48cc4 -r f6e86e26cfe2 templates/admin/center.mako
--- a/templates/admin/center.mako Fri Apr 23 13:16:40 2010 -0400
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,187 +0,0 @@
-<%inherit file="/base.mako"/>
-
-<%def name="title()">Galaxy Administration</%def>
-
-<h2>Administration</h2>
-
-<p>The menu on the left provides the following features</p>
-<ul>
- <li><strong>Security</strong> - see the <strong>Data Security and Data Libraries</strong> section below for details
- <p/>
- <ul>
- <li>
- <strong>Manage users</strong> - provides a view of the registered users and all groups and non-private roles associated
- with each user.
- </li>
- <p/>
- <li>
- <strong>Manage groups</strong> - provides a view of all groups along with the members of the group and the roles associated with
- each group (both private and non-private roles). The group names include a link to a page that allows you to manage the users and
- roles that are associated with the group.
- </li>
- <p/>
- <li>
- <strong>Manage roles</strong> - provides a view of all non-private roles along with the role type, and the users and groups that
- are associated with the role. The role names include a link to a page that allows you to manage the users and groups that are associated
- with the role. The page also includes a view of the data library datasets that are associated with the role and the permissions applied
- to each dataset.
- </li>
- </ul>
- </li>
- <p/>
- <li><strong>Data</strong>
- <p/>
- <ul>
- <li>
- <strong>Manage data libraries</strong> - Data libraries enable a Galaxy administrator to upload datasets into a data library. Currently,
- only administrators can create data libraries.
- <p/>
- When a data library is first created, it is considered "public" since it will be displayed in the "Data Libraries" view for any user, even
- those that are not logged in. The Galaxy administrator can restrict access to a data library by associating roles with the data library's
- "access library" permission. This permission will conservatively override the [dataset] "access" permission for the data library's contained
- datasets.
- <p/>
- For example, if a data library's "access library" permission is associated with Role1 and the data library contains "public" datasets, the
- data library will still only be displayed to those users that have Role1. However, if the data library's "access library" permission is
- associated with both Role1 and Role2 and the data library contains datasets whose [dataset] "access" permission is associated with only Role1,
- then users that have Role2 will be able to access the library, but will not see those contained datasets whose [dataset] "access" permission
- is associated with only Role1.
- <p/>
- In addition to the "access library" permission, permission to perform the following functions on the data library (and it's contents) can
- be granted to users (a library item is one of: a data library, a library folder, a library dataset).
- <p/>
- <ul>
- <li><strong>add library item</strong> - Role members can add library items to this data library or folder</li>
- <li><strong>modify library item</strong> - Role members can modify this library item</li>
- <li><strong>manage library permissions</strong> - Role members can manage permissions applied to this library item</li>
- </ul>
- <p/>
- The default behavior is for no permissions to be applied to a data library item, but applied permissions are inherited downward (with the exception
- of the "access library" permission, which is only available on the data library itself). Because of this, it is important to set desired permissions
- on a new data library when it is created. When this is done, new folders and datasets added to the data library will automatically inherit those
- permissions. In the same way, permissions can be applied to a folder, which will be automatically inherited by all contained datasets and sub-folders.
- <p/>
- The "Data Libraries" menu item allows users to access the datasets in a data library as long as they are not restricted from accessing them.
- Importing a library dataset into a history will not make a copy of the dataset, but will be a "pointer" to the dataset on disk. This
- approach allows for multiple users to use a single (possibly very large) dataset file.
- </li>
- </ul>
- </li>
- <p/>
- <li><strong>Server</strong>
- <p/>
- <ul>
- <li>
- <strong>Reload a tool's configuration</strong> - allows a new version of a tool to be loaded while the server is running
- </li>
- <p/>
- <li>
- <strong>Profile memory usage</strong> - measures system memory used for certain Galaxy functions
- </li>
- <p/>
- <li>
- <strong>Manage jobs</strong> - displays all jobs that are currently not finished (i.e., their state is new, waiting, queued, or
- running). Administrators are able to cleanly stop long-running jobs.
- </li>
- </ul>
- </li>
- <p/>
- <li><strong>Forms</strong>
- <p/>To be completed
- </li>
- <p/>
- <li><strong>Sequencing Requests</strong>
- <p/>To be completed
- </li>
- <p/>
- <li><strong>Cloud</strong>
- <p/>To be completed
- </li>
-</ul>
-<p/>
-<p><strong>Data Security and Data Libraries</strong></p>
-<p/>
-<strong>Security</strong> - Data security in Galaxy is a new feature, so familiarize yourself with the details which can be found
-here or in our <a href="http://g2.trac.bx.psu.edu/wiki/SecurityFeatures" target="_blank">data security page</a>. The data security
-process incorporates users, groups and roles, and enables the application of certain permissions on datasets, specifically "access"
-and "manage permissions". By default, the "manage permissions" permission is associated with the dataset owner's private role, and
-the "access" permission is not set, making the dataset public. With these default permissions, users should not see any difference
-in the way Galaxy has behaved in the past.
-<ul>
- <li>
- <strong>Users</strong> - registered Galaxy users that have created a Galaxy account. Users can belong to groups and can
- be associated with 1 or more roles. If a user is not authenticated during a Galaxy session, they will not have access to any
- of the security features, and datasets they create during that session will have no permissions applied to them (i.e., they
- will be considered "public", and no one will be allowed to change permissions on them).
- </li>
- <p/>
- <li>
- <strong>Groups</strong> - a set of 0 or more users which are considered members of the group. Groups can be associated with 0
- or more roles, simplifying the process of applying permissions to the data between a select group of users.
- </li>
- <p/>
- <li>
- <strong>Roles</strong> - associate users and groups with specific permissions on datasets. For example, users in groups A and B
- can be associated with role C which gives them the "access" permission on datasets D, E and F. Roles have a type which is currently
- one of the following:
- <ul>
- <li>
- <strong>private</strong> - every user is associated automatically with their own private role. Administrators cannot
- manage private roles.
- </li>
- <li>
- <strong>user</strong> - this is currently not used, but eventually any registered user will be able to create a new role
- and this will be it's type.
- </li>
- <li>
- <strong>sharing</strong> - a role created automatically during a Galaxy session that enables a user to share data with
- another user. This can generally be considered a temporary role.
- </li>
- <li><strong>admin</strong> - a role created by a Galaxy administrator.</li>
- </ul>
- </li>
- <p/>
- <li>
- <strong>Dataset Permissions</strong> - applying the following permissions will to a dataset will result in the behavior described.
- <ul>
- <li>
- <strong>access</strong> - users associated with the role can import this dataset into their history for analysis.
- <p>
- If no roles with the "access" permission are associated with a dataset, the dataset is "public" and may be accessed by anyone
- that can access the data library in which it is contained. See the <strong>Manage data libraries</strong> section above for
- details. Public datasets contained in public data libraries will be accessible to all users (as well as anyone not logged in
- during a Galaxy session) from the list of data libraries displayed when the "Data Libraries" menu item is selected.
- </p>
- <p>
- Associating a dataset with a role that includes the "access" permission restricts the set of users that can access it.
- For example, if 'Role A' includes the "access" permission and 'Role A' is associated with the dataset, only those users
- and groups who are associated with 'Role A' may access the dataset.
- </p>
- <p>
- If multiple roles that include the "access" permission are associated with a dataset, access to the dataset is derived
- from the intersection of the users associated with the roles. For example, if 'Role A' and 'Role B' are associated with
- a dataset, only those users and groups who are associated with both 'Role A' AND 'Role B' may access the dataset. When
- the "access" permission is applied to a dataset, Galaxy checks to make sure that at least 1 user belongs to all groups and
- roles associated with the "access" permission (otherwise the dataset would be restricted from everyone).
- </p>
- <p>
- In order for a user to make a dataset private (i.e., only they can access it), they should associate the dataset with
- their private role (the role identical to their Galaxy user name / email address). Associating additional roles that
- include the "access" permission is not possible, since it would render the dataset inaccessible to everyone.
- <p>
- To make a dataset private to themselves and one or more other users, the user can create a new role and associate the dataset
- with that role, not their "private role". Galaxy makes this easy by telling the user they are about to share a private dataset
- and giving them the option of doing so. If they respond positively, the sharing role is automatically created for them.
- </p>
- <p>
- Private data (data associated with roles that include the "access" permission) must be made public in order to be used
- with external applications like the "view at UCSC" link, or the "Perform genome analysis and prediction with EpiGRAPH"
- tool. Being made publically accessible means removing the association of all roles that include the "access" permission
- from the dataset.
- <p>
- </li>
- <li><strong>manage permissions</strong> - Role members can manage the permissions applied to this dataset</li>
- </ul>
- </li>
-</ul>
-<br/>
diff -r 34eec4d48cc4 -r f6e86e26cfe2 templates/webapps/community/admin/center.mako
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/templates/webapps/community/admin/center.mako Fri Apr 23 15:16:24 2010 -0400
@@ -0,0 +1,41 @@
+<%inherit file="/base.mako"/>
+
+<%def name="title()">Galaxy Administration</%def>
+
+<h2>Administration</h2>
+
+<p>The menu on the left provides the following features</p>
+<ul>
+ <li><strong>Security</strong> - see the <strong>Data Security and Data Libraries</strong> section below for details
+ <p/>
+ <ul>
+ <li>
+ <strong>Manage users</strong> - provides a view of the registered users and all groups and non-private roles associated
+ with each user.
+ </li>
+ <p/>
+ <li>
+ <strong>Manage groups</strong> - provides a view of all groups along with the members of the group and the roles associated with
+ each group (both private and non-private roles). The group names include a link to a page that allows you to manage the users and
+ roles that are associated with the group.
+ </li>
+ <p/>
+ <li>
+ <strong>Manage roles</strong> - provides a view of all non-private roles along with the role type, and the users and groups that
+ are associated with the role. The role names include a link to a page that allows you to manage the users and groups that are associated
+ with the role. The page also includes a view of the data library datasets that are associated with the role and the permissions applied
+ to each dataset.
+ </li>
+ </ul>
+ </li>
+ <p/>
+ <li><strong>Tools</strong>
+ <p/>
+ <ul>
+ <li>
+ <strong>Manage tools</strong> - coming soon...
+ </li>
+ </ul>
+ </li>
+</ul>
+<br/>
diff -r 34eec4d48cc4 -r f6e86e26cfe2 templates/webapps/community/admin/index.mako
--- a/templates/webapps/community/admin/index.mako Fri Apr 23 13:16:40 2010 -0400
+++ b/templates/webapps/community/admin/index.mako Fri Apr 23 15:16:24 2010 -0400
@@ -74,11 +74,19 @@
<span>Security</span>
</div>
<div class="toolSectionBody">
- <div class="toolSectionBg">
- <div class="toolTitle"><a href="${h.url_for( controller='admin', action='users', webapp='community' )}" target="galaxy_main">Manage users</a></div>
- <div class="toolTitle"><a href="${h.url_for( controller='admin', action='groups', webapp='community' )}" target="galaxy_main">Manage groups</a></div>
- <div class="toolTitle"><a href="${h.url_for( controller='admin', action='roles', webapp='community' )}" target="galaxy_main">Manage roles</a></div>
- </div>
+ <div class="toolSectionBg">
+ <div class="toolTitle"><a href="${h.url_for( controller='admin', action='users', webapp='community' )}" target="galaxy_main">Manage users</a></div>
+ <div class="toolTitle"><a href="${h.url_for( controller='admin', action='groups', webapp='community' )}" target="galaxy_main">Manage groups</a></div>
+ <div class="toolTitle"><a href="${h.url_for( controller='admin', action='roles', webapp='community' )}" target="galaxy_main">Manage roles</a></div>
+ </div>
+ </div>
+ <div class="toolSectionTitle">
+ <span>Tools</span>
+ </div>
+ <div class="toolSectionBody">
+ <div class="toolSectionBg">
+ <div class="toolTitle"><a href="${h.url_for( controller='tool_browser', action='browse_tools', webapp='community' )}" target="galaxy_main">Manage tools</a></div>
+ </div>
</div>
</div>
</div>
@@ -87,7 +95,7 @@
<%def name="center_panel()">
<%
- center_url = h.url_for( action='center' )
+ center_url = h.url_for( action='center', webapp='community' )
%>
<iframe name="galaxy_main" id="galaxy_main" frameborder="0" style="position: absolute; width: 100%; height: 100%;" src="${center_url}"> </iframe>
</%def>
diff -r 34eec4d48cc4 -r f6e86e26cfe2 templates/webapps/galaxy/admin/center.mako
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/templates/webapps/galaxy/admin/center.mako Fri Apr 23 15:16:24 2010 -0400
@@ -0,0 +1,187 @@
+<%inherit file="/base.mako"/>
+
+<%def name="title()">Galaxy Administration</%def>
+
+<h2>Administration</h2>
+
+<p>The menu on the left provides the following features</p>
+<ul>
+ <li><strong>Security</strong> - see the <strong>Data Security and Data Libraries</strong> section below for details
+ <p/>
+ <ul>
+ <li>
+ <strong>Manage users</strong> - provides a view of the registered users and all groups and non-private roles associated
+ with each user.
+ </li>
+ <p/>
+ <li>
+ <strong>Manage groups</strong> - provides a view of all groups along with the members of the group and the roles associated with
+ each group (both private and non-private roles). The group names include a link to a page that allows you to manage the users and
+ roles that are associated with the group.
+ </li>
+ <p/>
+ <li>
+ <strong>Manage roles</strong> - provides a view of all non-private roles along with the role type, and the users and groups that
+ are associated with the role. The role names include a link to a page that allows you to manage the users and groups that are associated
+ with the role. The page also includes a view of the data library datasets that are associated with the role and the permissions applied
+ to each dataset.
+ </li>
+ </ul>
+ </li>
+ <p/>
+ <li><strong>Data</strong>
+ <p/>
+ <ul>
+ <li>
+ <strong>Manage data libraries</strong> - Data libraries enable a Galaxy administrator to upload datasets into a data library. Currently,
+ only administrators can create data libraries.
+ <p/>
+ When a data library is first created, it is considered "public" since it will be displayed in the "Data Libraries" view for any user, even
+ those that are not logged in. The Galaxy administrator can restrict access to a data library by associating roles with the data library's
+ "access library" permission. This permission will conservatively override the [dataset] "access" permission for the data library's contained
+ datasets.
+ <p/>
+ For example, if a data library's "access library" permission is associated with Role1 and the data library contains "public" datasets, the
+ data library will still only be displayed to those users that have Role1. However, if the data library's "access library" permission is
+ associated with both Role1 and Role2 and the data library contains datasets whose [dataset] "access" permission is associated with only Role1,
+ then users that have Role2 will be able to access the library, but will not see those contained datasets whose [dataset] "access" permission
+ is associated with only Role1.
+ <p/>
+ In addition to the "access library" permission, permission to perform the following functions on the data library (and it's contents) can
+ be granted to users (a library item is one of: a data library, a library folder, a library dataset).
+ <p/>
+ <ul>
+ <li><strong>add library item</strong> - Role members can add library items to this data library or folder</li>
+ <li><strong>modify library item</strong> - Role members can modify this library item</li>
+ <li><strong>manage library permissions</strong> - Role members can manage permissions applied to this library item</li>
+ </ul>
+ <p/>
+ The default behavior is for no permissions to be applied to a data library item, but applied permissions are inherited downward (with the exception
+ of the "access library" permission, which is only available on the data library itself). Because of this, it is important to set desired permissions
+ on a new data library when it is created. When this is done, new folders and datasets added to the data library will automatically inherit those
+ permissions. In the same way, permissions can be applied to a folder, which will be automatically inherited by all contained datasets and sub-folders.
+ <p/>
+ The "Data Libraries" menu item allows users to access the datasets in a data library as long as they are not restricted from accessing them.
+ Importing a library dataset into a history will not make a copy of the dataset, but will be a "pointer" to the dataset on disk. This
+ approach allows for multiple users to use a single (possibly very large) dataset file.
+ </li>
+ </ul>
+ </li>
+ <p/>
+ <li><strong>Server</strong>
+ <p/>
+ <ul>
+ <li>
+ <strong>Reload a tool's configuration</strong> - allows a new version of a tool to be loaded while the server is running
+ </li>
+ <p/>
+ <li>
+ <strong>Profile memory usage</strong> - measures system memory used for certain Galaxy functions
+ </li>
+ <p/>
+ <li>
+ <strong>Manage jobs</strong> - displays all jobs that are currently not finished (i.e., their state is new, waiting, queued, or
+ running). Administrators are able to cleanly stop long-running jobs.
+ </li>
+ </ul>
+ </li>
+ <p/>
+ <li><strong>Forms</strong>
+ <p/>To be completed
+ </li>
+ <p/>
+ <li><strong>Sequencing Requests</strong>
+ <p/>To be completed
+ </li>
+ <p/>
+ <li><strong>Cloud</strong>
+ <p/>To be completed
+ </li>
+</ul>
+<p/>
+<p><strong>Data Security and Data Libraries</strong></p>
+<p/>
+<strong>Security</strong> - Data security in Galaxy is a new feature, so familiarize yourself with the details which can be found
+here or in our <a href="http://g2.trac.bx.psu.edu/wiki/SecurityFeatures" target="_blank">data security page</a>. The data security
+process incorporates users, groups and roles, and enables the application of certain permissions on datasets, specifically "access"
+and "manage permissions". By default, the "manage permissions" permission is associated with the dataset owner's private role, and
+the "access" permission is not set, making the dataset public. With these default permissions, users should not see any difference
+in the way Galaxy has behaved in the past.
+<ul>
+ <li>
+ <strong>Users</strong> - registered Galaxy users that have created a Galaxy account. Users can belong to groups and can
+ be associated with 1 or more roles. If a user is not authenticated during a Galaxy session, they will not have access to any
+ of the security features, and datasets they create during that session will have no permissions applied to them (i.e., they
+ will be considered "public", and no one will be allowed to change permissions on them).
+ </li>
+ <p/>
+ <li>
+ <strong>Groups</strong> - a set of 0 or more users which are considered members of the group. Groups can be associated with 0
+ or more roles, simplifying the process of applying permissions to the data between a select group of users.
+ </li>
+ <p/>
+ <li>
+ <strong>Roles</strong> - associate users and groups with specific permissions on datasets. For example, users in groups A and B
+ can be associated with role C which gives them the "access" permission on datasets D, E and F. Roles have a type which is currently
+ one of the following:
+ <ul>
+ <li>
+ <strong>private</strong> - every user is associated automatically with their own private role. Administrators cannot
+ manage private roles.
+ </li>
+ <li>
+ <strong>user</strong> - this is currently not used, but eventually any registered user will be able to create a new role
+ and this will be it's type.
+ </li>
+ <li>
+ <strong>sharing</strong> - a role created automatically during a Galaxy session that enables a user to share data with
+ another user. This can generally be considered a temporary role.
+ </li>
+ <li><strong>admin</strong> - a role created by a Galaxy administrator.</li>
+ </ul>
+ </li>
+ <p/>
+ <li>
+ <strong>Dataset Permissions</strong> - applying the following permissions will to a dataset will result in the behavior described.
+ <ul>
+ <li>
+ <strong>access</strong> - users associated with the role can import this dataset into their history for analysis.
+ <p>
+ If no roles with the "access" permission are associated with a dataset, the dataset is "public" and may be accessed by anyone
+ that can access the data library in which it is contained. See the <strong>Manage data libraries</strong> section above for
+ details. Public datasets contained in public data libraries will be accessible to all users (as well as anyone not logged in
+ during a Galaxy session) from the list of data libraries displayed when the "Data Libraries" menu item is selected.
+ </p>
+ <p>
+ Associating a dataset with a role that includes the "access" permission restricts the set of users that can access it.
+ For example, if 'Role A' includes the "access" permission and 'Role A' is associated with the dataset, only those users
+ and groups who are associated with 'Role A' may access the dataset.
+ </p>
+ <p>
+ If multiple roles that include the "access" permission are associated with a dataset, access to the dataset is derived
+ from the intersection of the users associated with the roles. For example, if 'Role A' and 'Role B' are associated with
+ a dataset, only those users and groups who are associated with both 'Role A' AND 'Role B' may access the dataset. When
+ the "access" permission is applied to a dataset, Galaxy checks to make sure that at least 1 user belongs to all groups and
+ roles associated with the "access" permission (otherwise the dataset would be restricted from everyone).
+ </p>
+ <p>
+ In order for a user to make a dataset private (i.e., only they can access it), they should associate the dataset with
+ their private role (the role identical to their Galaxy user name / email address). Associating additional roles that
+ include the "access" permission is not possible, since it would render the dataset inaccessible to everyone.
+ <p>
+ To make a dataset private to themselves and one or more other users, the user can create a new role and associate the dataset
+ with that role, not their "private role". Galaxy makes this easy by telling the user they are about to share a private dataset
+ and giving them the option of doing so. If they respond positively, the sharing role is automatically created for them.
+ </p>
+ <p>
+ Private data (data associated with roles that include the "access" permission) must be made public in order to be used
+ with external applications like the "view at UCSC" link, or the "Perform genome analysis and prediction with EpiGRAPH"
+ tool. Being made publically accessible means removing the association of all roles that include the "access" permission
+ from the dataset.
+ <p>
+ </li>
+ <li><strong>manage permissions</strong> - Role members can manage the permissions applied to this dataset</li>
+ </ul>
+ </li>
+</ul>
+<br/>
diff -r 34eec4d48cc4 -r f6e86e26cfe2 templates/webapps/galaxy/admin/index.mako
--- a/templates/webapps/galaxy/admin/index.mako Fri Apr 23 13:16:40 2010 -0400
+++ b/templates/webapps/galaxy/admin/index.mako Fri Apr 23 15:16:24 2010 -0400
@@ -133,7 +133,7 @@
<%def name="center_panel()">
<%
- center_url = h.url_for( action='center' )
+ center_url = h.url_for( action='center', webapp='galaxy' )
%>
<iframe name="galaxy_main" id="galaxy_main" frameborder="0" style="position: absolute; width: 100%; height: 100%;" src="${center_url}"> </iframe>
</%def>
1
0
details: http://www.bx.psu.edu/hg/galaxy/rev/34eec4d48cc4
changeset: 3685:34eec4d48cc4
user: rc
date: Fri Apr 23 13:16:40 2010 -0400
description:
lims: add tests for request_type permissions
diffstat:
test/base/twilltestcase.py | 15 ++
test/functional/test_forms_and_requests.py | 214 ++++++++++++++++++----------
2 files changed, 151 insertions(+), 78 deletions(-)
diffs (337 lines):
diff -r a77ec2944999 -r 34eec4d48cc4 test/base/twilltestcase.py
--- a/test/base/twilltestcase.py Fri Apr 23 11:30:16 2010 -0400
+++ b/test/base/twilltestcase.py Fri Apr 23 13:16:40 2010 -0400
@@ -1526,6 +1526,21 @@
tc.fv("1", "state_desc_%i" % index, state[1])
tc.submit( "save_request_type" )
self.check_page_for_string( "Request type <b>%s</b> has been created" % name )
+ def request_type_permissions( self, request_type_id, request_type_name, role_ids_str, permissions_in, permissions_out ):
+ # role_ids_str must be a comma-separated string of role ids
+ url = "requests_admin/manage_request_types?operation=permissions&id=%s&update_roles_button=Save" % ( request_type_id )
+ for po in permissions_out:
+ key = '%s_out' % po
+ url ="%s&%s=%s" % ( url, key, role_ids_str )
+ for pi in permissions_in:
+ key = '%s_in' % pi
+ url ="%s&%s=%s" % ( url, key, role_ids_str )
+ self.home()
+ self.visit_url( "%s/%s" % ( self.url, url ) )
+ print url
+ check_str = "Permissions updated for request type '%s'" % request_type_name
+ self.check_page_for_string( check_str )
+ self.home()
def create_request( self, request_type_id, name, desc, fields ):
self.home()
self.visit_url( "%s/requests/new?create=True&select_request_type=%i" % ( self.url,
diff -r a77ec2944999 -r 34eec4d48cc4 test/functional/test_forms_and_requests.py
--- a/test/functional/test_forms_and_requests.py Fri Apr 23 11:30:16 2010 -0400
+++ b/test/functional/test_forms_and_requests.py Fri Apr 23 13:16:40 2010 -0400
@@ -2,6 +2,7 @@
from galaxy.model.orm import *
from galaxy.model.mapping import context as sa_session
from base.twilltestcase import *
+from base.test_db_util import *
not_logged_in_as_admin_security_msg = 'You must be logged in as an administrator to access this feature.'
logged_in_as_admin_security_msg = 'You must be an administrator to access this feature.'
@@ -37,7 +38,80 @@
class TestFormsAndRequests( TwillTestCase ):
- def test_000_create_form( self ):
+ def test_000_initiate_users( self ):
+ """Ensuring all required user accounts exist"""
+ self.logout()
+ self.login( email='test1(a)bx.psu.edu', username='regular-user1' )
+ global regular_user1
+ regular_user1 = get_user( 'test1(a)bx.psu.edu' )
+ assert regular_user1 is not None, 'Problem retrieving user with email "test1(a)bx.psu.edu" from the database'
+ global regular_user1_private_role
+ regular_user1_private_role = get_private_role( regular_user1 )
+ self.logout()
+ self.login( email='test2(a)bx.psu.edu', username='regular-user2' )
+ global regular_user2
+ regular_user2 = get_user( 'test2(a)bx.psu.edu' )
+ assert regular_user2 is not None, 'Problem retrieving user with email "test2(a)bx.psu.edu" from the database'
+ global regular_user2_private_role
+ regular_user2_private_role = get_private_role( regular_user2 )
+ self.logout()
+ self.login( email='test3(a)bx.psu.edu', username='regular-user3' )
+ global regular_user3
+ regular_user3 = get_user( 'test3(a)bx.psu.edu' )
+ assert regular_user3 is not None, 'Problem retrieving user with email "test3(a)bx.psu.edu" from the database'
+ global regular_user3_private_role
+ regular_user3_private_role = get_private_role( regular_user3 )
+ self.logout()
+ self.login( email='test(a)bx.psu.edu', username='admin-user' )
+ global admin_user
+ admin_user = get_user( 'test(a)bx.psu.edu' )
+ assert admin_user is not None, 'Problem retrieving user with email "test(a)bx.psu.edu" from the database'
+ global admin_user_private_role
+ admin_user_private_role = get_private_role( admin_user )
+ def test_005_create_required_groups_and_roles( self ):
+ """Testing creating all required groups and roles for this script"""
+ # Logged in as admin_user
+ # Create role_one
+ name = 'Role One'
+ description = "This is Role One's description"
+ user_ids = [ str( admin_user.id ), str( regular_user1.id ), str( regular_user3.id ) ]
+ self.create_role( name=name,
+ description=description,
+ in_user_ids=user_ids,
+ in_group_ids=[],
+ create_group_for_role='no',
+ private_role=admin_user.email )
+ # Get the role object for later tests
+ global role_one
+ role_one = get_role_by_name( name )
+ # Create group_one
+ name = 'Group One'
+ self.create_group( name=name, in_user_ids=[ str( regular_user1.id ) ], in_role_ids=[ str( role_one.id ) ] )
+ # Get the group object for later tests
+ global group_one
+ group_one = get_group_by_name( name )
+ assert group_one is not None, 'Problem retrieving group named "Group One" from the database'
+ # NOTE: To get this to work with twill, all select lists on the ~/admin/role page must contain at least
+ # 1 option value or twill throws an exception, which is: ParseError: OPTION outside of SELECT
+ # Due to this bug in twill, we create the role, we bypass the page and visit the URL in the
+ # associate_users_and_groups_with_role() method.
+ #
+ #create role_two
+ name = 'Role Two'
+ description = 'This is Role Two'
+ user_ids = [ str( admin_user.id ) ]
+ group_ids = [ str( group_one.id ) ]
+ private_role = admin_user.email
+ self.create_role( name=name,
+ description=description,
+ in_user_ids=user_ids,
+ in_group_ids=group_ids,
+ private_role=private_role )
+ # Get the role object for later tests
+ global role_two
+ role_two = get_role_by_name( name )
+ assert role_two is not None, 'Problem retrieving role named "Role Two" from the database'
+ def test_010_create_form( self ):
"""Testing creating a new form and editing it"""
self.logout()
self.login( email='test(a)bx.psu.edu' )
@@ -58,7 +132,7 @@
self.check_page_for_string( new_name )
self.check_page_for_string( new_desc )
form_one_name = new_name
- def test_005_add_form_fields( self ):
+ def test_015_add_form_fields( self ):
"""Testing adding fields to a form definition"""
fields = [dict(name='Test field name one',
desc='Test field description one',
@@ -78,7 +152,7 @@
field_index=len(form_one.fields), fields=fields)
form_one_latest = get_latest_form(form_one_name)
assert len(form_one_latest.fields) == len(form_one.fields)+len(fields)
- def test_015_create_sample_form( self ):
+ def test_020_create_sample_form( self ):
"""Testing creating another form (for samples)"""
global form_two_name
desc = "This is Form Two's description"
@@ -90,7 +164,7 @@
self.check_page_for_string( form_two_name )
self.check_page_for_string( desc )
self.check_page_for_string( formtype )
- def test_020_create_request_type( self ):
+ def test_025_create_request_type( self ):
"""Testing creating a new requestype"""
request_form = get_latest_form(form_one_name)
sample_form = get_latest_form(form_two_name)
@@ -102,93 +176,77 @@
.order_by( desc( galaxy.model.RequestType.table.c.create_time ) ) \
.first()
assert request_type is not None, 'Problem retrieving request type named "%s" from the database' % request_type_name
- def test_025_create_address_and_library( self ):
+ # Set permissions
+ permissions_in = [ k for k, v in galaxy.model.RequestType.permitted_actions.items() ]
+ permissions_out = []
+ # Role one members are: admin_user, regular_user1, regular_user3. Each of these users will be permitted for
+ # REQUEST_TYPE_ACCESS on this request_type
+ self.request_type_permissions(self.security.encode_id( request_type.id ),
+ request_type.name,
+ str( role_one.id ),
+ permissions_in,
+ permissions_out )
+ # Make sure the request_type is not accessible by regular_user2 since regular_user2 does not have Role1.
+ self.logout()
+ self.login( email=regular_user2.email )
+ self.visit_url( '%s/requests/new?create=True&select_request_type=%i' % (self.url, request_type.id) )
+ try:
+ self.check_page_for_string( 'There are no request types created for a new request.' )
+ raise AssertionError, 'The request_type %s is accessible by %s when it should be restricted' % ( request_type.name, regular_user2.email )
+ except:
+ pass
+ self.logout()
+ self.login( email=admin_user.email )
+
+ def test_030_create_address_and_library( self ):
"""Testing address & library creation"""
- # first create a regular user
- self.logout()
- self.login( email='test1(a)bx.psu.edu', username='regular-user1' )
- self.logout()
- self.login( email='test(a)bx.psu.edu' )
# first create a library for the request so that it can be submitted later
- lib_name = 'TestLib001'
- self.create_library( lib_name, '' )
- self.visit_page( 'library_admin/browse_libraries' )
- self.check_page_for_string( lib_name )
+ name = "TestLib001"
+ description = "TestLib001 description"
+ synopsis = "TestLib001 synopsis"
+ self.create_library( name=name, description=description, synopsis=synopsis )
# Get the library object for later tests
global library_one
- library_one = sa_session.query( galaxy.model.Library ) \
- .filter( and_( galaxy.model.Library.table.c.name==lib_name,
- galaxy.model.Library.table.c.deleted==False ) ) \
- .first()
- assert library_one is not None, 'Problem retrieving library named "%s" from the database' % lib_name
- global admin_user
- admin_user = sa_session.query( galaxy.model.User ) \
- .filter( galaxy.model.User.table.c.email=='test(a)bx.psu.edu' ) \
- .first()
- assert admin_user is not None, 'Problem retrieving user with email "test(a)bx.psu.edu" from the database'
- # Get the admin user's private role for later use
- global admin_user_private_role
- admin_user_private_role = None
- for role in admin_user.all_roles():
- if role.name == admin_user.email and role.description == 'Private Role for %s' % admin_user.email:
- admin_user_private_role = role
- break
- if not admin_user_private_role:
- raise AssertionError( "Private role not found for user '%s'" % admin_user.email )
- global regular_user1
- regular_user1 = sa_session.query( galaxy.model.User ) \
- .filter( galaxy.model.User.table.c.email=='test1(a)bx.psu.edu' ) \
- .first()
- assert regular_user1 is not None, 'Problem retrieving user with email "test1(a)bx.psu.edu" from the database'
- # Get the regular user's private role for later use
- global regular_user1_private_role
- regular_user1_private_role = None
- for role in regular_user1.all_roles():
- if role.name == regular_user1.email and role.description == 'Private Role for %s' % regular_user1.email:
- regular_user1_private_role = role
- break
- if not regular_user1_private_role:
- raise AssertionError( "Private role not found for user '%s'" % regular_user1.email )
- # Set permissions on the library, sort for later testing
+ library_one = get_library( name, description, synopsis )
+ assert library_one is not None, 'Problem retrieving library named "%s" from the database' % name
+ # Make sure library_one is public
+ assert 'access library' not in [ a.action for a in library_one.actions ], 'Library %s is not public when first created' % library_one.name
+ # Set permissions on the library, sort for later testing.
permissions_in = [ k for k, v in galaxy.model.Library.permitted_actions.items() ]
permissions_out = []
- name = 'Role for testing forms'
- description = "This is Role Ones description"
- user_ids=[ str( admin_user.id ), str( regular_user1.id ) ]
- self.create_role( name=name,
- description=description,
- in_user_ids=user_ids,
- in_group_ids=[],
- create_group_for_role='yes',
- private_role=admin_user.email )
- # Get the role object for later tests
- global role_one
- role_one = sa_session.query( galaxy.model.Role ).filter( galaxy.model.Role.table.c.name==name ).first()
- assert role_one is not None, 'Problem retrieving role named "Role for testing forms" from the database'
- # Role one members are: admin_user, regular_user1. Each of these users will be permitted to
- # LIBRARY_ADD, LIBRARY_MODIFY, LIBRARY_MANAGE for library items.
+ # Role one members are: admin_user, regular_user1, regular_user3. Each of these users will be permitted for
+ # LIBRARY_ACCESS, LIBRARY_ADD, LIBRARY_MODIFY, LIBRARY_MANAGE on this library and it's contents.
self.library_permissions( self.security.encode_id( library_one.id ),
library_one.name,
str( role_one.id ),
permissions_in,
permissions_out )
- # create a folder in the library
+ # Make sure the library is accessible by admin_user
+ self.visit_url( '%s/library/browse_libraries' % self.url )
+ self.check_page_for_string( library_one.name )
+ # Make sure the library is not accessible by regular_user2 since regular_user2 does not have Role1.
+ self.logout()
+ self.login( email=regular_user2.email )
+ self.visit_url( '%s/library/browse_libraries' % self.url )
+ try:
+ self.check_page_for_string( library_one.name )
+ raise AssertionError, 'Library %s is accessible by %s when it should be restricted' % ( library_one.name, regular_user2.email )
+ except:
+ pass
+ self.logout()
+ self.login( email=admin_user.email )
+ # create folder
root_folder = library_one.root_folder
- name = "Folder One"
+ name = "Root Folder's Folder One"
+ description = "This is the root folder's Folder One"
self.add_folder( 'library_admin',
self.security.encode_id( library_one.id ),
self.security.encode_id( root_folder.id ),
name=name,
- description='' )
+ description=description )
global folder_one
- folder_one = sa_session.query( galaxy.model.LibraryFolder ) \
- .filter( and_( galaxy.model.LibraryFolder.table.c.parent_id==root_folder.id,
- galaxy.model.LibraryFolder.table.c.name==name ) ) \
- .first()
+ folder_one = get_folder( root_folder.id, name, description )
assert folder_one is not None, 'Problem retrieving library folder named "%s" from the database' % name
- self.home()
- self.visit_url( '%s/library_common/browse_library?cntrller=library_admin&id=%s' % ( self.url, self.security.encode_id( library_one.id ) ) )
- self.check_page_for_string( name )
# create address
self.logout()
self.login( email='test1(a)bx.psu.edu', username='regular-user1' )
@@ -202,7 +260,7 @@
.filter( and_( galaxy.model.UserAddress.table.c.desc==address1[ 'short_desc' ],
galaxy.model.UserAddress.table.c.deleted==False ) ) \
.first()
- def test_030_create_request( self ):
+ def test_035_create_request( self ):
"""Testing creating, editing and submitting a request as a regular user"""
# login as a regular user
self.logout()
@@ -240,7 +298,7 @@
# check if the request's state is now set to 'submitted'
assert request_one.state is not request_one.states.SUBMITTED, "The state of the request '%s' should be set to '%s'" \
% ( request_one.name, request_one.states.SUBMITTED )
- def test_035_request_lifecycle( self ):
+ def test_040_request_lifecycle( self ):
"""Testing request lifecycle as it goes through all the states"""
# goto admin manage requests page
self.logout()
@@ -264,7 +322,7 @@
self.check_request_grid(state='Complete', request_name=request_one.name)
assert request_one.state is not request_one.states.COMPLETE, "The state of the request '%s' should be set to '%s'" \
% ( request_one.name, request_one.states.COMPLETE )
- def test_040_admin_create_request_on_behalf_of_regular_user( self ):
+ def test_045_admin_create_request_on_behalf_of_regular_user( self ):
"""Testing creating and submitting a request as an admin on behalf of a regular user"""
self.logout()
self.login( email='test(a)bx.psu.edu' )
@@ -301,7 +359,7 @@
# check if both the requests is showing in the 'All' filter
self.check_request_admin_grid(state='All', request_name=request_one.name)
self.check_request_admin_grid(state='All', request_name=request_two.name)
- def test_045_reject_request( self ):
+ def test_050_reject_request( self ):
'''Testing rejecting a request'''
self.logout()
self.login( email='test(a)bx.psu.edu' )
@@ -312,7 +370,7 @@
# check if the request's state is now set to 'submitted'
assert request_two.state is not request_two.states.REJECTED, "The state of the request '%s' should be set to '%s'" \
% ( request_two.name, request_two.states.REJECTED )
- def test_050_reset_data_for_later_test_runs( self ):
+ def test_055_reset_data_for_later_test_runs( self ):
"""Reseting data to enable later test runs to pass"""
# TODO: RC: add whatever is missing from this method that should be marked
# deleted or purged so that later test runs will correctly test features if the
1
0
details: http://www.bx.psu.edu/hg/galaxy/rev/a77ec2944999
changeset: 3684:a77ec2944999
user: Nate Coraor <nate(a)bx.psu.edu>
date: Fri Apr 23 11:30:16 2010 -0400
description:
Community upload functionality
diffstat:
community_datatypes_conf.xml.sample | 9 +
community_wsgi.ini.sample | 2 +-
lib/galaxy/model/orm/__init__.py | 1 +
lib/galaxy/web/base/controller.py | 2 +-
lib/galaxy/webapps/community/app.py | 4 +
lib/galaxy/webapps/community/config.py | 1 +
lib/galaxy/webapps/community/controllers/tool_browser.py | 48 ++-
lib/galaxy/webapps/community/controllers/upload.py | 63 ++++
lib/galaxy/webapps/community/datatypes/__init__.py | 145 ++++++++++
lib/galaxy/webapps/community/model/__init__.py | 124 ++-----
lib/galaxy/webapps/community/model/mapping.py | 45 +-
lib/galaxy/webapps/community/model/migrate/versions/0001_initial_tables.py | 24 +-
templates/webapps/community/tool/edit_tool.mako | 73 +++++
templates/webapps/community/upload/upload.mako | 66 ++++
14 files changed, 480 insertions(+), 127 deletions(-)
diffs (832 lines):
diff -r 742fa2afcad9 -r a77ec2944999 community_datatypes_conf.xml.sample
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/community_datatypes_conf.xml.sample Fri Apr 23 11:30:16 2010 -0400
@@ -0,0 +1,9 @@
+<?xml version="1.0"?>
+<datatypes>
+ <registration>
+ <datatype extension="tool" type="galaxy.webapps.community.datatypes:Tool" model="galaxy.webapps.community.model:Tool"/>
+ </registration>
+ <sniffers>
+ <sniffer type="galaxy.webapps.community.datatypes:Tool"/>
+ </sniffers>
+</datatypes>
diff -r 742fa2afcad9 -r a77ec2944999 community_wsgi.ini.sample
--- a/community_wsgi.ini.sample Fri Apr 23 11:14:26 2010 -0400
+++ b/community_wsgi.ini.sample Fri Apr 23 11:30:16 2010 -0400
@@ -22,7 +22,7 @@
#database_connection = postgres:///community_test?host=/var/run/postgresql
# Where dataset files are saved
-file_path = database/files
+file_path = database/community_files
# Temporary storage for additional datasets, this should be shared through the cluster
new_file_path = database/tmp
diff -r 742fa2afcad9 -r a77ec2944999 lib/galaxy/model/orm/__init__.py
--- a/lib/galaxy/model/orm/__init__.py Fri Apr 23 11:14:26 2010 -0400
+++ b/lib/galaxy/model/orm/__init__.py Fri Apr 23 11:30:16 2010 -0400
@@ -3,5 +3,6 @@
from sqlalchemy import *
from sqlalchemy.orm import *
+import sqlalchemy.exc
from sqlalchemy.ext.orderinglist import ordering_list
diff -r 742fa2afcad9 -r a77ec2944999 lib/galaxy/web/base/controller.py
--- a/lib/galaxy/web/base/controller.py Fri Apr 23 11:14:26 2010 -0400
+++ b/lib/galaxy/web/base/controller.py Fri Apr 23 11:30:16 2010 -0400
@@ -304,7 +304,7 @@
class ControllerUnavailable( Exception ):
pass
-class Admin():
+class Admin( object ):
# Override these
user_list_grid = None
role_list_grid = None
diff -r 742fa2afcad9 -r a77ec2944999 lib/galaxy/webapps/community/app.py
--- a/lib/galaxy/webapps/community/app.py Fri Apr 23 11:14:26 2010 -0400
+++ b/lib/galaxy/webapps/community/app.py Fri Apr 23 11:30:16 2010 -0400
@@ -1,5 +1,6 @@
import sys, config
import galaxy.webapps.community.model
+import galaxy.webapps.community.datatypes
from galaxy.web import security
from galaxy.tags.tag_handler import CommunityTagHandler
@@ -11,6 +12,9 @@
self.config = config.Configuration( **kwargs )
self.config.check()
config.configure_logging( self.config )
+ # Set up datatypes registry
+ self.datatypes_registry = galaxy.webapps.community.datatypes.Registry( self.config.root, self.config.datatypes_config )
+ galaxy.model.set_datatypes_registry( self.datatypes_registry )
# Determine the database url
if self.config.database_connection:
db_url = self.config.database_connection
diff -r 742fa2afcad9 -r a77ec2944999 lib/galaxy/webapps/community/config.py
--- a/lib/galaxy/webapps/community/config.py Fri Apr 23 11:14:26 2010 -0400
+++ b/lib/galaxy/webapps/community/config.py Fri Apr 23 11:30:16 2010 -0400
@@ -61,6 +61,7 @@
self.screencasts_url = kwargs.get( 'screencasts_url', None )
self.log_events = False
self.cloud_controller_instance = False
+ self.datatypes_config = kwargs.get( 'datatypes_config_file', 'community_datatypes_conf.xml' )
# Parse global_conf and save the parser
global_conf = kwargs.get( 'global_conf', None )
global_conf_parser = ConfigParser.ConfigParser()
diff -r 742fa2afcad9 -r a77ec2944999 lib/galaxy/webapps/community/controllers/tool_browser.py
--- a/lib/galaxy/webapps/community/controllers/tool_browser.py Fri Apr 23 11:14:26 2010 -0400
+++ b/lib/galaxy/webapps/community/controllers/tool_browser.py Fri Apr 23 11:30:16 2010 -0400
@@ -1,5 +1,4 @@
import sys, os, operator, string, shutil, re, socket, urllib, time, logging
-from cgi import escape, FieldStorage
from galaxy.web.base.controller import *
from galaxy.webapps.community import model
@@ -27,18 +26,14 @@
title = "Tools"
model_class = model.Tool
template='/webapps/community/tool/grid.mako'
- default_sort_key = "category"
+ default_sort_key = "name"
columns = [
NameColumn( "Name",
key="name",
model_class=model.Tool,
+ link=( lambda item: dict( operation="Edit Tool", id=item.id, webapp="community" ) ),
attach_popup=False,
filterable="advanced" ),
- CategoryColumn( "Category",
- key="category",
- model_class=model.Tool,
- attach_popup=False,
- filterable="advanced" ),
# Columns that are valid for filtering but are not visible.
grids.DeletedColumn( "Deleted", key="deleted", visible=False, filterable="advanced" )
]
@@ -48,7 +43,7 @@
visible=False,
filterable="standard" ) )
global_actions = [
- grids.GridAction( "Upload tool", dict( controller='tool_browwser', action='upload' ) )
+ grids.GridAction( "Upload tool", dict( controller='upload', action='upload', type='tool' ) )
]
operations = [
grids.GridOperation( "View versions", condition=( lambda item: not item.deleted ), allow_multiple=False )
@@ -57,7 +52,7 @@
grids.GridColumnFilter( "Deleted", args=dict( deleted=True ) ),
grids.GridColumnFilter( "All", args=dict( deleted='All' ) )
]
- default_filter = dict( name="All", category="All", deleted="False" )
+ default_filter = dict( name="All", deleted="False" )
num_rows_per_page = 50
preserve_state = False
use_paging = True
@@ -84,6 +79,10 @@
return trans.response.send_redirect( web.url_for( controller='tool_browser',
action='browse_tool',
**kwargs ) )
+ elif operation == "edit tool":
+ return trans.response.send_redirect( web.url_for( controller='tool_browser',
+ action='edit_tool',
+ **kwargs ) )
# Render the list view
return self.tool_list_grid( trans, **kwargs )
@web.expose
@@ -96,5 +95,32 @@
message=message,
status=status )
@web.expose
- def upload( self, trans, **kwargs ):
- pass
+ def edit_tool( self, trans, id=None, **kwd ):
+ params = util.Params( kwd )
+ message = util.restore_text( params.get( 'message', '' ) )
+ status = params.get( 'status', 'done' )
+ # Get the tool
+ tool = None
+ if id is not None:
+ encoded_id = id
+ id = trans.app.security.decode_id( id )
+ tool = trans.sa_session.query( trans.model.Tool ).get( id )
+ if tool is None:
+ return trans.response.send_redirect( web.url_for( controller='tool_browser',
+ action='browse_tools',
+ message='Please select a Tool to edit (the tool ID provided was invalid)',
+ status='error' ) )
+ if params.save_button and ( params.file_data != '' or params.url != '' ):
+ # TODO: call the upload method in the upload controller.
+ message = 'Uploading new version not implemented'
+ status = 'error'
+ elif params.save_button:
+ tool.user_description = params.description
+ tool.category = params.category
+ categories = trans.sa_session.query( trans.model.Category ).order_by( trans.model.Category.table.c.name ).all()
+ return trans.fill_template( '/webapps/community/tool/edit_tool.mako',
+ encoded_id = encoded_id,
+ tool=tool,
+ categories=categories,
+ message=message,
+ status=status )
diff -r 742fa2afcad9 -r a77ec2944999 lib/galaxy/webapps/community/controllers/upload.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/lib/galaxy/webapps/community/controllers/upload.py Fri Apr 23 11:30:16 2010 -0400
@@ -0,0 +1,63 @@
+import sys, os, shutil, logging, urllib2
+
+from galaxy.web.base.controller import *
+from galaxy.web.framework.helpers import time_ago, iff, grids
+from galaxy.model.orm import *
+from galaxy.webapps.community import datatypes
+
+log = logging.getLogger( __name__ )
+
+# States for passing messages
+SUCCESS, INFO, WARNING, ERROR = "done", "info", "warning", "error"
+
+class UploadController( BaseController ):
+
+ @web.expose
+ def upload( self, trans, **kwd ):
+ params = util.Params( kwd )
+ message = util.restore_text( params.get( 'message', '' ) )
+ status = params.get( 'status', 'done' )
+ uploaded_file = None
+ if params.file_data == '' and params.url.strip() == '':
+ message = 'No files were entered on the upload form.'
+ status = 'error'
+ elif params.file_data == '':
+ try:
+ uploaded_file = urllib2.urlopen( params.url.strip() )
+ except ( ValueError, urllib2.HTTPError ), e:
+ message = 'An error occurred trying to retrieve the URL entered on the upload form: %s' % e
+ status = 'error'
+ except urllib2.URLError, e:
+ message = 'An error occurred trying to retrieve the URL entered on the upload form: %s' % e.reason
+ status = 'error'
+ elif params.file_data not in ( '', None ):
+ uploaded_file = params.file_data.file
+ if params.upload_button and uploaded_file:
+ datatype = trans.app.datatypes_registry.get_datatype_by_extension( params.upload_type )
+ if datatype is None:
+ message = 'An unknown filetype was selected. This should not be possble, please report the error.'
+ status = 'error'
+ else:
+ try:
+ meta = datatype.verify( uploaded_file )
+ meta.user = trans.user
+ obj = datatype.create_model_object( meta )
+ trans.sa_session.add( obj )
+ trans.sa_session.flush()
+ try:
+ os.link( uploaded_file.name, obj.file_name )
+ except OSError:
+ shutil.copy( uploaded_file.name, obj.file_name )
+ message = 'Uploaded %s' % meta.message
+ except datatypes.DatatypeVerificationError, e:
+ message = str( e )
+ status = 'error'
+ except sqlalchemy.exc.IntegrityError:
+ message = 'A tool with the same ID already exists. If you are trying to update this tool to a new version, please ... ??? ... Otherwise, please choose a new ID.'
+ status = 'error'
+ uploaded_file.close()
+ selected_upload_type = params.get( 'type', 'tool' )
+ return trans.fill_template( '/webapps/community/upload/upload.mako', message=message,
+ status=status,
+ selected_upload_type=selected_upload_type,
+ upload_types=trans.app.datatypes_registry.get_datatypes_for_select_list() )
diff -r 742fa2afcad9 -r a77ec2944999 lib/galaxy/webapps/community/datatypes/__init__.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/lib/galaxy/webapps/community/datatypes/__init__.py Fri Apr 23 11:30:16 2010 -0400
@@ -0,0 +1,145 @@
+import sys, logging, tarfile
+import galaxy.util
+from galaxy.util.bunch import Bunch
+
+log = logging.getLogger( __name__ )
+
+if sys.version_info[:2] == ( 2, 4 ):
+ from galaxy import eggs
+ eggs.require( 'ElementTree' )
+ from elementtree import ElementTree
+else:
+ from xml.etree import ElementTree
+
+class DatatypeVerificationError( Exception ):
+ pass
+
+class Registry( object ):
+ def __init__( self, root_dir=None, config=None ):
+ self.datatypes_by_extension = {}
+ self.sniff_order = []
+ if root_dir and config:
+ # Parse datatypes_conf.xml
+ tree = galaxy.util.parse_xml( config )
+ root = tree.getroot()
+ # Load datatypes and converters from config
+ log.debug( 'Loading datatypes from %s' % config )
+ registration = root.find( 'registration' )
+ for elem in registration.findall( 'datatype' ):
+ try:
+ extension = elem.get( 'extension', None )
+ dtype = elem.get( 'type', None )
+ model_object = elem.get( 'model', None )
+ if extension and dtype:
+ fields = dtype.split( ':' )
+ datatype_module = fields[0]
+ datatype_class = fields[1]
+ fields = datatype_module.split( '.' )
+ module = __import__( fields.pop(0) )
+ for mod in fields:
+ module = getattr( module, mod )
+ self.datatypes_by_extension[extension] = getattr( module, datatype_class )()
+ log.debug( 'Loaded datatype: %s' % dtype )
+ if model_object:
+ model_module, model_class = model_object.split( ':' )
+ fields = model_module.split( '.' )
+ module = __import__( fields.pop(0) )
+ for mod in fields:
+ module = getattr( module, mod )
+ self.datatypes_by_extension[extension].model_object = getattr( module, model_class )
+ log.debug( 'Added model class: %s to datatype: %s' % ( model_class, dtype ) )
+ except Exception, e:
+ log.warning( 'Error loading datatype "%s", problem: %s' % ( extension, str( e ) ) )
+ # Load datatype sniffers from the config
+ sniff_order = []
+ sniffers = root.find( 'sniffers' )
+ for elem in sniffers.findall( 'sniffer' ):
+ dtype = elem.get( 'type', None )
+ if dtype:
+ sniff_order.append( dtype )
+ for dtype in sniff_order:
+ try:
+ fields = dtype.split( ":" )
+ datatype_module = fields[0]
+ datatype_class = fields[1]
+ fields = datatype_module.split( "." )
+ module = __import__( fields.pop(0) )
+ for mod in fields:
+ module = getattr( module, mod )
+ aclass = getattr( module, datatype_class )()
+ included = False
+ for atype in self.sniff_order:
+ if not issubclass( atype.__class__, aclass.__class__ ) and isinstance( atype, aclass.__class__ ):
+ included = True
+ break
+ if not included:
+ self.sniff_order.append( aclass )
+ log.debug( 'Loaded sniffer for datatype: %s' % dtype )
+ except Exception, exc:
+ log.warning( 'Error appending datatype %s to sniff_order, problem: %s' % ( dtype, str( exc ) ) )
+ def get_datatype_by_extension( self, ext ):
+ return self.datatypes_by_extension.get( ext, None )
+ def get_datatypes_for_select_list( self ):
+ rval = []
+ for ext, datatype in self.datatypes_by_extension.items():
+ rval.append( ( ext, datatype.select_name ) )
+ return rval
+ def sniff( self, fname ):
+ for datatype in sniff_order:
+ try:
+ datatype.sniff( fname )
+ return datatype.file_ext
+ except:
+ pass
+
+class Tool( object ):
+ select_name = 'Tool'
+ def __init__( self, model_object=None ):
+ self.model_object = model_object
+ def verify( self, file ):
+ msg = ''
+ try:
+ tar = tarfile.TarFile( fileobj = file )
+ except tarfile.ReadError:
+ raise DatatypeVerificationError( 'The tool file is not a readable tar file' )
+ xml_names = filter( lambda x: x.lower().endswith( '.xml' ), tar.getnames() )
+ if not xml_names:
+ raise DatatypeVerificationError( 'The tool file does not contain an XML file' )
+ for xml_name in xml_names:
+ try:
+ tree = ElementTree.parse( tar.extractfile( xml_name ) )
+ root = tree.getroot()
+ except:
+ log.exception( 'fail:' )
+ continue
+ if root.tag == 'tool':
+ rval = Bunch()
+ try:
+ rval.id = root.attrib['id']
+ rval.name = root.attrib['name']
+ rval.version = root.attrib['version']
+ except KeyError, e:
+ raise DatatypeVerificationError( 'Tool XML file does not conform to the specification. Missing required <tool> tag attribute: %s' % e )
+ rval.description = None
+ desc_tag = root.find( 'description' )
+ if desc_tag is not None:
+ rval.description = desc_tag.text.strip()
+ rval.message = 'Tool: %s %s, Version: %s, ID: %s' % ( rval.name, rval.description or '', rval.version, rval.id )
+ return rval
+ else:
+ raise DatatypeVerificationError( 'Unable to find a properly formatted tool XML file' )
+ def create_model_object( self, datatype_bunch ):
+ if self.model_object is None:
+ raise Exception( 'No model object configured for %s, please check the datatype configuration file' % self.__class__.__name__ )
+ if datatype_bunch is None:
+ # TODO: do it automatically
+ raise Exception( 'Unable to create %s model object without passing in data' % self.__class__.__name__ )
+ o = self.model_object()
+ o.create_from_datatype( datatype_bunch )
+ return o
+ def sniff( self, fname ):
+ try:
+ self.verify( open( fname, 'r' ) )
+ return True
+ except:
+ return False
diff -r 742fa2afcad9 -r a77ec2944999 lib/galaxy/webapps/community/model/__init__.py
--- a/lib/galaxy/webapps/community/model/__init__.py Fri Apr 23 11:14:26 2010 -0400
+++ b/lib/galaxy/webapps/community/model/__init__.py Fri Apr 23 11:30:16 2010 -0400
@@ -4,7 +4,7 @@
Naming: try to use class names that have a distinct plural form so that
the relationship cardinalities are obvious (e.g. prefer Dataset to Data)
"""
-import os.path, os, errno, sys, codecs, operator, tempfile, logging
+import os.path, os, errno, sys, codecs, operator, tempfile, logging, tarfile
from galaxy.util.bunch import Bunch
from galaxy import util
from galaxy.util.hash_util import *
@@ -86,93 +86,43 @@
self.prev_session_id = prev_session_id
class Tool( object ):
- def __init__( self, guid=None, name=None, description=None, category=None, version=None, user_id=None, external_filename=None ):
+ file_path = '/tmp'
+ def __init__( self, guid=None, tool_id=None, name=None, description=None, user_description=None, category=None, version=None, user_id=None, external_filename=None ):
self.guid = guid
+ self.tool_id = tool_id
self.name = name or "Unnamed tool"
self.description = description
+ self.user_description = user_description
self.category = category
self.version = version or "1.0.0"
self.user_id = user_id
self.external_filename = external_filename
+ def get_file_name( self ):
+ if not self.external_filename:
+ assert self.id is not None, "ID must be set before filename used (commit the object)"
+ dir = os.path.join( self.file_path, 'tools', *directory_hash_id( self.id ) )
+ # Create directory if it does not exist
+ if not os.path.exists( dir ):
+ os.makedirs( dir )
+ # Return filename inside hashed directory
+ filename = os.path.join( dir, "tool_%d.dat" % self.id )
+ else:
+ filename = self.external_filename
+ # Make filename absolute
+ return os.path.abspath( filename )
+ def set_file_name( self, filename ):
+ if not filename:
+ self.external_filename = None
+ else:
+ self.external_filename = filename
+ file_name = property( get_file_name, set_file_name )
+ def create_from_datatype( self, datatype_bunch ):
+ self.tool_id = datatype_bunch.id
+ self.name = datatype_bunch.name
+ self.version = datatype_bunch.version
+ self.description = datatype_bunch.description
+ self.user_id = datatype_bunch.user
-class Job( object ):
- """
- A job represents a request to run a tool given input datasets, tool
- parameters, and output datasets.
- """
- states = Bunch( NEW = 'new',
- UPLOAD = 'upload',
- WAITING = 'waiting',
- QUEUED = 'queued',
- RUNNING = 'running',
- OK = 'ok',
- ERROR = 'error',
- DELETED = 'deleted' )
- def __init__( self ):
- self.session_id = None
- self.tool_id = None
- self.tool_version = None
- self.command_line = None
- self.param_filename = None
- self.parameters = []
- self.input_datasets = []
- self.output_datasets = []
- self.output_library_datasets = []
- self.state = Job.states.NEW
- self.info = None
- self.job_runner_name = None
- self.job_runner_external_id = None
- def add_parameter( self, name, value ):
- self.parameters.append( JobParameter( name, value ) )
- def add_input_dataset( self, name, dataset ):
- self.input_datasets.append( JobToInputDatasetAssociation( name, dataset ) )
- def add_output_dataset( self, name, dataset ):
- self.output_datasets.append( JobToOutputDatasetAssociation( name, dataset ) )
- def add_output_library_dataset( self, name, dataset ):
- self.output_library_datasets.append( JobToOutputLibraryDatasetAssociation( name, dataset ) )
- def set_state( self, state ):
- self.state = state
- # For historical reasons state propogates down to datasets
- for da in self.output_datasets:
- da.dataset.state = state
- def get_param_values( self, app ):
- """
- Read encoded parameter values from the database and turn back into a
- dict of tool parameter values.
- """
- param_dict = dict( [ ( p.name, p.value ) for p in self.parameters ] )
- tool = app.toolbox.tools_by_id[self.tool_id]
- param_dict = tool.params_from_strings( param_dict, app )
- return param_dict
- def check_if_output_datasets_deleted( self ):
- """
- Return true if all of the output datasets associated with this job are
- in the deleted state
- """
- for dataset_assoc in self.output_datasets:
- dataset = dataset_assoc.dataset
- # only the originator of the job can delete a dataset to cause
- # cancellation of the job, no need to loop through history_associations
- if not dataset.deleted:
- return False
- return True
- def mark_deleted( self ):
- """
- Mark this job as deleted, and mark any output datasets as discarded.
- """
- self.state = Job.states.DELETED
- self.info = "Job output deleted by user before job completed."
- for dataset_assoc in self.output_datasets:
- dataset = dataset_assoc.dataset
- dataset.deleted = True
- dataset.state = dataset.states.DISCARDED
- for dataset in dataset.dataset.history_associations:
- # propagate info across shared datasets
- dataset.deleted = True
- dataset.blurb = 'deleted'
- dataset.peek = 'Job deleted'
- dataset.info = 'Job output deleted by user before job completed'
-
class Tag ( object ):
def __init__( self, id=None, type=None, parent_id=None, name=None ):
self.id = id
@@ -182,6 +132,12 @@
def __str__ ( self ):
return "Tag(id=%s, type=%i, parent_id=%s, name=%s)" % ( self.id, self.type, self.parent_id, self.name )
+class Category( object ):
+ def __init__( self, id=None, name=None, description=None ):
+ self.id = id
+ self.name = name
+ self.description = description
+
class ItemTagAssociation ( object ):
def __init__( self, id=None, user=None, item_id=None, tag_id=None, user_tname=None, value=None ):
self.id = id
@@ -198,6 +154,12 @@
class ToolAnnotationAssociation( object ):
pass
+class ToolCategoryAssociation( object ):
+ def __init__( self, id=None, tool=None, category=None ):
+ self.id = id
+ self.tool = tool
+ self.category = category
+
## ---- Utility methods -------------------------------------------------------
def directory_hash_id( id ):
@@ -207,7 +169,7 @@
if l < 4:
return [ "000" ]
# Pad with zeros until a multiple of three
- padded = ( ( 3 - len( s ) % 3 ) * "0" ) + s
+ padded = ( ( ( 3 - len( s ) ) % 3 ) * "0" ) + s
# Drop the last three digits -- 1000 files per directory
padded = padded[:-3]
# Break into chunks of three
diff -r 742fa2afcad9 -r a77ec2944999 lib/galaxy/webapps/community/model/mapping.py
--- a/lib/galaxy/webapps/community/model/mapping.py Fri Apr 23 11:14:26 2010 -0400
+++ b/lib/galaxy/webapps/community/model/mapping.py Fri Apr 23 11:30:16 2010 -0400
@@ -103,32 +103,28 @@
Tool.table = Table( "tool", metadata,
Column( "id", Integer, primary_key=True ),
Column( "guid", TrimmedString( 255 ), index=True, unique=True ),
+ Column( "tool_id", TrimmedString( 255 ), index=True, unique=True ),
Column( "create_time", DateTime, default=now ),
Column( "update_time", DateTime, default=now, onupdate=now ),
- Column( "name", TrimmedString( 255 ), index=True, unique=True ),
+ Column( "name", TrimmedString( 255 ), index=True ),
Column( "description" , TEXT ),
- Column( "category", TrimmedString( 255 ), index=True ),
+ Column( "user_description" , TEXT ),
Column( "version", TrimmedString( 255 ) ),
Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
Column( "external_filename" , TEXT ),
Column( "deleted", Boolean, default=False ) )
-Job.table = Table( "job", metadata,
+Category.table = Table( "category", metadata,
Column( "id", Integer, primary_key=True ),
Column( "create_time", DateTime, default=now ),
Column( "update_time", DateTime, default=now, onupdate=now ),
+ Column( "name", TrimmedString( 255 ), index=True, unique=True ),
+ Column( "description" , TEXT ) )
+
+ToolCategoryAssociation.table = Table( "tool_category_association", metadata,
+ Column( "id", Integer, primary_key=True ),
Column( "tool_id", Integer, ForeignKey( "tool.id" ), index=True ),
- Column( "state", String( 64 ), index=True ),
- Column( "info", TrimmedString( 255 ) ),
- Column( "command_line", TEXT ),
- Column( "param_filename", String( 1024 ) ),
- Column( "runner_name", String( 255 ) ),
- Column( "stdout", TEXT ),
- Column( "stderr", TEXT ),
- Column( "traceback", TEXT ),
- Column( "session_id", Integer, ForeignKey( "galaxy_session.id" ), index=True, nullable=True ),
- Column( "job_runner_name", String( 255 ) ),
- Column( "job_runner_external_id", String( 255 ) ) )
+ Column( "category_id", Integer, ForeignKey( "category.id" ), index=True ) )
Tag.table = Table( "tag", metadata,
Column( "id", Integer, primary_key=True ),
@@ -193,10 +189,6 @@
assign_mapper( context, GalaxySession, GalaxySession.table,
properties=dict( user=relation( User.mapper ) ) )
-assign_mapper( context, Job, Job.table,
- properties=dict( galaxy_session=relation( GalaxySession ),
- tool=relation( Tool ) ) )
-
assign_mapper( context, Tag, Tag.table,
properties=dict( children=relation(Tag, backref=backref( 'parent', remote_side=[Tag.table.c.id] ) ) ) )
@@ -207,7 +199,22 @@
properties=dict( tool=relation( Tool ), user=relation( User ) ) )
assign_mapper( context, Tool, Tool.table,
- properties = dict( user=relation( User.mapper ) ) )
+ properties = dict(
+ categories=relation( ToolCategoryAssociation ),
+ user=relation( User.mapper )
+ )
+)
+
+assign_mapper( context, Category, Category.table,
+ properties=dict( tools=relation( ToolCategoryAssociation ) ) )
+
+assign_mapper( context, ToolCategoryAssociation, ToolCategoryAssociation.table,
+ properties=dict(
+ category=relation(Category),
+ tool=relation(Tool)
+ )
+)
+
def guess_dialect_for_url( url ):
return (url.split(':', 1))[0]
diff -r 742fa2afcad9 -r a77ec2944999 lib/galaxy/webapps/community/model/migrate/versions/0001_initial_tables.py
--- a/lib/galaxy/webapps/community/model/migrate/versions/0001_initial_tables.py Fri Apr 23 11:14:26 2010 -0400
+++ b/lib/galaxy/webapps/community/model/migrate/versions/0001_initial_tables.py Fri Apr 23 11:30:16 2010 -0400
@@ -80,32 +80,28 @@
Tool_table = Table( "tool", metadata,
Column( "id", Integer, primary_key=True ),
Column( "guid", TrimmedString( 255 ), index=True, unique=True ),
+ Column( "tool_id", TrimmedString( 255 ), index=True, unique=True ),
Column( "create_time", DateTime, default=now ),
Column( "update_time", DateTime, default=now, onupdate=now ),
- Column( "name", TrimmedString( 255 ), index=True, unique=True ),
+ Column( "name", TrimmedString( 255 ), index=True ),
Column( "description" , TEXT ),
- Column( "category", TrimmedString( 255 ), index=True ),
+ Column( "user_description" , TEXT ),
Column( "version", TrimmedString( 255 ) ),
Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
Column( "external_filename" , TEXT ),
Column( "deleted", Boolean, default=False ) )
-Job_table = Table( "job", metadata,
+Category_table = Table( "category", metadata,
Column( "id", Integer, primary_key=True ),
Column( "create_time", DateTime, default=now ),
Column( "update_time", DateTime, default=now, onupdate=now ),
+ Column( "name", TrimmedString( 255 ), index=True, unique=True ),
+ Column( "description" , TEXT ) )
+
+ToolCategoryAssociation_table = Table( "tool_category_association", metadata,
+ Column( "id", Integer, primary_key=True ),
Column( "tool_id", Integer, ForeignKey( "tool.id" ), index=True ),
- Column( "state", String( 64 ), index=True ),
- Column( "info", TrimmedString( 255 ) ),
- Column( "command_line", TEXT ),
- Column( "param_filename", String( 1024 ) ),
- Column( "runner_name", String( 255 ) ),
- Column( "stdout", TEXT ),
- Column( "stderr", TEXT ),
- Column( "traceback", TEXT ),
- Column( "session_id", Integer, ForeignKey( "galaxy_session.id" ), index=True, nullable=True ),
- Column( "job_runner_name", String( 255 ) ),
- Column( "job_runner_external_id", String( 255 ) ) )
+ Column( "category_id", Integer, ForeignKey( "category.id" ), index=True ) )
Tag_table = Table( "tag", metadata,
Column( "id", Integer, primary_key=True ),
diff -r 742fa2afcad9 -r a77ec2944999 templates/webapps/community/tool/edit_tool.mako
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/templates/webapps/community/tool/edit_tool.mako Fri Apr 23 11:30:16 2010 -0400
@@ -0,0 +1,73 @@
+<%namespace file="/message.mako" import="render_msg" />
+
+<%!
+ def inherit(context):
+ if context.get('use_panels'):
+ return '/webapps/community/base_panels.mako'
+ else:
+ return '/base.mako'
+%>
+<%inherit file="${inherit(context)}"/>
+
+<%def name="title()">Edit Tool</%def>
+
+<h2>Edit Tool: ${tool.name} ${tool.version} (${tool.tool_id})</h2>
+
+%if message:
+ ${render_msg( message, status )}
+%endif
+
+<form id="tool_edit_form" name="tool_edit_form" action="${h.url_for( controller='tool_browser', action='edit_tool' )}" enctype="multipart/form-data" method="post">
+<input type="hidden" name="id" value="${encoded_id}"/>
+<div class="toolForm">
+ <div class="toolFormTitle">Edit Tool</div>
+ <div class="toolFormBody">
+ <div class="form-row">
+ <label>Categories:</label>
+ <div class="form-row-input">
+ <select name="category" multiple size=5>
+ %for category in categories:
+ %if category.id in [ tool_category.id for tool_category in tool.categories ]:
+ <option value="${category.id}" selected>${category.name}</option>
+ %else:
+ <option value="${category.id}">${category.name}</option>
+ %endif
+ %endfor
+ </select>
+ </div>
+ <div style="clear: both"></div>
+ </div>
+ <div class="form-row">
+ <label>Description:</label>
+ <div class="form-row-input"><textarea name="description" rows="5" cols="35"></textarea></div>
+ <div style="clear: both"></div>
+ </div>
+ <div class="form-row">
+ <input type="submit" class="primary-button" name="save_button" value="Save">
+ </div>
+ </div>
+</div>
+
+<p/>
+
+<div class="toolForm">
+ <div class="toolFormTitle">Upload new version</div>
+ <div class="toolFormBody">
+ <div class="form-row">
+ <label>File:</label>
+ <div class="form-row-input"><input type="file" name="file_data"/></div>
+ <div style="clear: both"></div>
+ </div>
+ <div class="form-row">
+ <label>URL:</label>
+ <div class="form-row-input"><input type="text" name="url" style="width: 100%;"/></div>
+ <div class="toolParamHelp" style="clear: both;">
+ Instead of uploading directly from your computer, you may instruct Galaxy to download the file from a Web or FTP address.
+ </div>
+ <div style="clear: both"></div>
+ </div>
+ <div class="form-row">
+ <input type="submit" class="primary-button" name="save_button" value="Save">
+ </div>
+</div>
+</form>
diff -r 742fa2afcad9 -r a77ec2944999 templates/webapps/community/upload/upload.mako
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/templates/webapps/community/upload/upload.mako Fri Apr 23 11:30:16 2010 -0400
@@ -0,0 +1,66 @@
+<%namespace file="/message.mako" import="render_msg" />
+
+<%!
+ def inherit(context):
+ if context.get('use_panels'):
+ return '/webapps/community/base_panels.mako'
+ else:
+ return '/base.mako'
+%>
+<%inherit file="${inherit(context)}"/>
+
+<%def name="title()">Upload</%def>
+
+<h2>Upload</h2>
+
+%if message:
+ ${render_msg( message, status )}
+%endif
+
+<div class="toolForm">
+ <div class="toolFormTitle">Upload</div>
+ <div class="toolFormBody">
+ ## TODO: nginx
+ <form id="upload_form" name="upload_form" action="${h.url_for( controller='upload', action='upload' )}" enctype="multipart/form-data" method="post">
+ <div class="form-row">
+ <label>Upload Type</label>
+ <div class="form-row-input">
+ <select name="upload_type">
+ %for type_id, type_name in upload_types:
+ %if type_id == selected_upload_type:
+ <option value="${type_id}" selected>${type_name}</option>
+ %else:
+ <option value="${type_id}">${type_name}</option>
+ %endif
+ %endfor
+ </select>
+ </div>
+ <div class="toolParamHelp" style="clear: both;">
+ Need help creating a tool file? See help below.
+ </div>
+ <div style="clear: both"></div>
+ </div>
+ <div class="form-row">
+ <label>File:</label>
+ <div class="form-row-input"><input type="file" name="file_data"/></div>
+ <div style="clear: both"></div>
+ </div>
+ <div class="form-row">
+ <label>URL:</label>
+ <div class="form-row-input"><input type="text" name="url" style="width: 100%;"/></div>
+ <div class="toolParamHelp" style="clear: both;">
+ Instead of uploading directly from your computer, you may instruct Galaxy to download the file from a Web or FTP address.
+ </div>
+ <div style="clear: both"></div>
+ </div>
+ <div class="form-row">
+ <input type="submit" class="primary-button" name="upload_button" value="Upload">
+ </div>
+ </form>
+ </div>
+</div>
+<div class="toolHelp">
+ <div class="toolHelpBody">
+ <p><strong>Tool Files</strong></p>
+ </div>
+</div>
1
0

10 May '10
details: http://www.bx.psu.edu/hg/galaxy/rev/742fa2afcad9
changeset: 3683:742fa2afcad9
user: Dan Blankenberg <dan(a)bx.psu.edu>
date: Fri Apr 23 11:14:26 2010 -0400
description:
Updates for 'Profile Annotations for a set of genomic intervals' tool. This tool will now report a 'data version'. Add a script that creates the indexes and table description xml from a UCSC database dump.
diffstat:
lib/galaxy/tools/parameters/basic.py | 12 +-
scripts/tools/annotation_profiler/README.txt | 54 +
scripts/tools/annotation_profiler/build_profile_indexes.py | 338 ++++++++++
tool-data/annotation_profiler_options.xml.sample | 2 +-
tools/annotation_profiler/annotation_profiler.xml | 4 +-
tools/annotation_profiler/annotation_profiler_for_interval.py | 74 +-
6 files changed, 449 insertions(+), 35 deletions(-)
diffs (603 lines):
diff -r c37de7a983e7 -r 742fa2afcad9 lib/galaxy/tools/parameters/basic.py
--- a/lib/galaxy/tools/parameters/basic.py Thu Apr 22 21:11:17 2010 -0400
+++ b/lib/galaxy/tools/parameters/basic.py Fri Apr 23 11:14:26 2010 -0400
@@ -960,8 +960,11 @@
if filter.get( 'type' ) == 'data_meta':
if filter.get( 'data_ref' ) not in self.filtered:
self.filtered[filter.get( 'data_ref' )] = {}
- self.filtered[filter.get( 'data_ref' )][filter.get( 'meta_key' )] = { 'value': filter.get( 'value' ), 'options':[] }
- recurse_option_elems( self.filtered[filter.get( 'data_ref' )][filter.get( 'meta_key' )]['options'], filter.find( 'options' ).findall( 'option' ) )
+ if filter.get( 'meta_key' ) not in self.filtered[filter.get( 'data_ref' )]:
+ self.filtered[filter.get( 'data_ref' )][filter.get( 'meta_key' )] = {}
+ if filter.get( 'value' ) not in self.filtered[filter.get( 'data_ref' )][filter.get( 'meta_key' )]:
+ self.filtered[filter.get( 'data_ref' )][filter.get( 'meta_key' )][filter.get( 'value' )] = []
+ recurse_option_elems( self.filtered[filter.get( 'data_ref' )][filter.get( 'meta_key' )][filter.get( 'value' )], filter.find( 'options' ).findall( 'option' ) )
else:
recurse_option_elems( self.options, elem.find( 'options' ).findall( 'option' ) )
@@ -974,8 +977,9 @@
dataset = dataset.dataset
if dataset:
for meta_key, meta_dict in filter_value.iteritems():
- if dataset.metadata.spec[meta_key].param.to_string( dataset.metadata.get( meta_key ) ) == meta_dict['value']:
- options.extend( meta_dict['options'] )
+ check_meta_val = dataset.metadata.spec[meta_key].param.to_string( dataset.metadata.get( meta_key ) )
+ if check_meta_val in meta_dict:
+ options.extend( meta_dict[check_meta_val] )
return options
return self.options
diff -r c37de7a983e7 -r 742fa2afcad9 scripts/tools/annotation_profiler/README.txt
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/scripts/tools/annotation_profiler/README.txt Fri Apr 23 11:14:26 2010 -0400
@@ -0,0 +1,54 @@
+This file explains how to create annotation indexes for the annotation profiler tool. Annotation profiler indexes are an exceedingly simple binary format,
+containing no header information and consisting of an ordered linear list of (start,stop encoded individually as '<I') regions which are covered by a UCSC table partitioned
+by chromosome name. Genomic regions are merged by overlap / direct adjacency (e.g. a table having ranges of: 1-10, 6-12, 12-20 and 25-28 results in two merged ranges of: 1-20 and 25-28).
+
+Files are arranged like:
+/profiled_annotations/DBKEY/TABLE_NAME/
+ CHROMOSOME_NAME.covered
+ CHROMOSOME_NAME.total_coverage
+ CHROMOSOME_NAME.total_regions
+/profiled_annotations/DBKEY/
+ DBKEY_tables.xml
+ chromosomes.txt
+ profiled_info.txt
+
+
+where CHROMOSOME_NAME.covered is the binary file, CHROMOSOME_NAME.total_coverage is a text file containing the integer count of bases covered by the
+table and CHROMOSOME_NAME.total_regions contains the integer count of the number of regions found in CHROMOSOME_NAME.covered
+
+DBKEY_tables.xml should be appended to the annotation profile available table configuration file (tool-data/annotation_profiler_options.xml).
+The DBKEY should also be added as a new line to the annotation profiler valid builds file (annotation_profiler_valid_builds.txt).
+The output (/profiled_annotations/DBKEY) should be made available as GALAXY_ROOT/tool-data/annotation_profiler/DBKEY.
+
+profiled_info.txt contains info on the generated annotations, separated by lines with tab-delimited label,value pairs:
+ profiler_version - the version of the build_profile_indexes.py script that was used to generate the profiled data
+ dbkey - the dbkey used for the run
+ chromosomes - contains the names and lengths of chromosomes that were used to parse single-chromosome tables (tables divided into individual files by chromosome)
+ dump_time - the declared dump time of the database, taken from trackDb.txt.gz
+ profiled_time - seconds since epoch in utc for when the database dump was profiled
+ database_hash - a md5 hex digest of all the profiled table info
+
+
+Typical usage includes:
+
+python build_profile_indexes.py -d hg19 -i /ucsc_data/hg19/database/ > hg19.txt
+
+where the genome build is hg19 and /ucsc_data/hg19/database/ contains the downloaded database dump from UCSC (e.g. obtained by rsync: rsync -avzP rsync://hgdownload.cse.ucsc.edu/goldenPath/hg19/database/ /ucsc_data/hg19/database/).
+
+
+
+By default, chromosome names come from a file named 'chromInfo.txt.gz' found in the input directory, with FTP used as a backup.
+When FTP is used to obtain the names of chromosomes from UCSC for a particular genome build, alternate ftp sites and paths can be specified by using the --ftp_site and --ftp_path attributes.
+Chromosome names can instead be provided on the commandline via the --chromosomes option, which accepts a comma separated list of:ChromName1[=length],ChromName2[=length],...
+
+
+
+ usage = "usage: %prog options"
+ parser = OptionParser( usage=usage )
+ parser.add_option( '-d', '--dbkey', dest='dbkey', default='hg18', help='dbkey to process' )
+ parser.add_option( '-i', '--input_dir', dest='input_dir', default=os.path.join( 'golden_path','%s', 'database' ), help='Input Directory' )
+ parser.add_option( '-o', '--output_dir', dest='output_dir', default=os.path.join( 'profiled_annotations','%s' ), help='Output Directory' )
+ parser.add_option( '-c', '--chromosomes', dest='chromosomes', default='', help='Comma separated list of: ChromName1[=length],ChromName2[=length],...' )
+ parser.add_option( '-b', '--bitset_size', dest='bitset_size', default=DEFAULT_BITSET_SIZE, type='int', help='Default BitSet size; overridden by sizes specified in chromInfo.txt.gz or by --chromosomes' )
+ parser.add_option( '-f', '--ftp_site', dest='ftp_site', default='hgdownload.cse.ucsc.edu', help='FTP site; used for chromosome info when chromInfo.txt.gz method fails' )
+ parser.add_option( '-p', '--ftp_path', dest='ftp_path', default='/goldenPath/%s/chromosomes/', help='FTP Path; used for chromosome info when chromInfo.txt.gz method fails' )
diff -r c37de7a983e7 -r 742fa2afcad9 scripts/tools/annotation_profiler/build_profile_indexes.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/scripts/tools/annotation_profiler/build_profile_indexes.py Fri Apr 23 11:14:26 2010 -0400
@@ -0,0 +1,338 @@
+#!/usr/bin/env python
+#Dan Blankenberg
+
+VERSION = '1.0.0' # version of this script
+
+from optparse import OptionParser
+import os, gzip, struct, time
+from ftplib import FTP #do we want a diff method than using FTP to determine Chrom Names, eg use local copy
+
+#import md5 from hashlib; if python2.4 or less, use old md5
+try:
+ from hashlib import md5
+except ImportError:
+ from md5 import new as md5
+
+#import BitSet from bx-python, try using eggs and package resources, fall back to any local installation
+try:
+ from galaxy import eggs
+ import pkg_resources
+ pkg_resources.require( "bx-python" )
+except: pass #Maybe there is a local installation available
+from bx.bitset import BitSet
+
+#Define constants
+STRUCT_FMT = '<I'
+STRUCT_SIZE = struct.calcsize( STRUCT_FMT )
+DEFAULT_BITSET_SIZE = 300000000
+CHUNK_SIZE = 1024
+
+#Headers used to parse .sql files to determine column indexes for chromosome name, start and end
+alias_spec = {
+ 'chromCol' : [ 'chrom' , 'CHROMOSOME' , 'CHROM', 'Chromosome Name', 'tName' ],
+ 'startCol' : [ 'start' , 'START', 'chromStart', 'txStart', 'Start Position (bp)', 'tStart', 'genoStart' ],
+ 'endCol' : [ 'end' , 'END' , 'STOP', 'chromEnd', 'txEnd', 'End Position (bp)', 'tEnd', 'genoEnd' ],
+}
+
+#Headers used to parse trackDb.txt.gz
+#TODO: these should be parsed directly from trackDb.sql
+trackDb_headers = ["tableName", "shortLabel", "type", "longLabel", "visibility", "priority", "colorR", "colorG", "colorB", "altColorR", "altColorG", "altColorB", "useScore", "private", "restrictCount", "restrictList", "url", "html", "grp", "canPack", "settings"]
+
+def get_columns( filename ):
+ input_sql = open( filename ).read()
+ input_sql = input_sql.split( 'CREATE TABLE ' )[1].split( ';' )[0]
+ input_sql = input_sql.split( ' (', 1 )
+ table_name = input_sql[0].strip().strip( '`' )
+ input_sql = [ split.strip().split( ' ' )[0].strip().strip( '`' ) for split in input_sql[1].rsplit( ')', 1 )[0].strip().split( '\n' ) ]
+ print input_sql
+ chrom_col = None
+ start_col = None
+ end_col = None
+ for col_name in alias_spec['chromCol']:
+ for i, header_name in enumerate( input_sql ):
+ if col_name == header_name:
+ chrom_col = i
+ break
+ if chrom_col is not None:
+ break
+
+ for col_name in alias_spec['startCol']:
+ for i, header_name in enumerate( input_sql ):
+ if col_name == header_name:
+ start_col = i
+ break
+ if start_col is not None:
+ break
+
+ for col_name in alias_spec['endCol']:
+ for i, header_name in enumerate( input_sql ):
+ if col_name == header_name:
+ end_col = i
+ break
+ if end_col is not None:
+ break
+
+ return table_name, chrom_col, start_col, end_col
+
+
+def create_grouping_xml( input_dir, output_dir, dbkey ):
+ output_filename = os.path.join( output_dir, '%s_tables.xml' % dbkey )
+ def load_groups( file_name = 'grp.txt.gz' ):
+ groups = {}
+ for line in gzip.open( os.path.join( input_dir, file_name ) ):
+ fields = line.split( '\t' )
+ groups[fields[0]] = { 'desc': fields[1], 'priority': fields[2] }
+ return groups
+ f = gzip.open( os.path.join( input_dir, 'trackDb.txt.gz' ) )
+ out = open( output_filename, 'wb' )
+ tables = {}
+ cur_buf = ''
+ while True:
+ line = f.readline()
+ if not line: break
+ #remove new lines
+ line = line.rstrip( '\n\r' )
+ line = line.replace( '\\\t', ' ' ) #replace escaped tabs with space
+ cur_buf += "%s\n" % line.rstrip( '\\' )
+ if line.endswith( '\\' ):
+ continue #line is wrapped, next line
+ #all fields should be loaded now...
+ fields = cur_buf.split( '\t' )
+ cur_buf = '' #reset buffer
+ assert len( fields ) == len( trackDb_headers ), 'Failed Parsing trackDb.txt.gz; fields: %s' % fields
+ table_name = fields[ 0 ]
+ tables[ table_name ] = {}
+ for field_name, field_value in zip( trackDb_headers, fields ):
+ tables[ table_name ][ field_name ] = field_value
+ #split settings fields into dict
+ fields = fields[-1].split( '\n' )
+ tables[ table_name ][ 'settings' ] = {}
+ for field in fields:
+ setting_fields = field.split( ' ', 1 )
+ setting_name = setting_value = setting_fields[ 0 ]
+ if len( setting_fields ) > 1:
+ setting_value = setting_fields[ 1 ]
+ if setting_name or setting_value:
+ tables[ table_name ][ 'settings' ][ setting_name ] = setting_value
+ #Load Groups
+ groups = load_groups()
+ in_groups = {}
+ for table_name, values in tables.iteritems():
+ if os.path.exists( os.path.join( output_dir, table_name ) ):
+ group = values['grp']
+ if group not in in_groups:
+ in_groups[group]={}
+ #***NAME CHANGE***, 'subTrack' no longer exists as a setting...use 'parent' instead
+ #subTrack = values.get('settings', {} ).get( 'subTrack', table_name )
+ subTrack = values.get('settings', {} ).get( 'parent', table_name ).split( ' ' )[0] #need to split, because could be e.g. 'trackgroup on'
+ if subTrack not in in_groups[group]:
+ in_groups[group][subTrack]=[]
+ in_groups[group][subTrack].append( table_name )
+
+ assigned_tables = []
+ out.write( """<filter type="data_meta" data_ref="input1" meta_key="dbkey" value="%s">\n""" % ( dbkey ) )
+ out.write( " <options>\n" )
+ for group, subTracks in sorted( in_groups.iteritems() ):
+ out.write( """ <option name="%s" value="group-%s">\n""" % ( groups[group]['desc'], group ) )
+ for sub_name, sub_tracks in subTracks.iteritems():
+ if len( sub_tracks ) > 1:
+ out.write( """ <option name="%s" value="subtracks-%s">\n""" % ( sub_name, sub_name ) )
+ sub_tracks.sort()
+ for track in sub_tracks:
+ track_label = track
+ if "$" not in tables[track]['shortLabel']:
+ track_label = tables[track]['shortLabel']
+ out.write( """ <option name="%s" value="%s"/>\n""" % ( track_label, track ) )
+ assigned_tables.append( track )
+ out.write( " </option>\n" )
+ else:
+ track = sub_tracks[0]
+ track_label = track
+ if "$" not in tables[track]['shortLabel']:
+ track_label = tables[track]['shortLabel']
+ out.write( """ <option name="%s" value="%s"/>\n""" % ( track_label, track ) )
+ assigned_tables.append( track )
+ out.write( " </option>\n" )
+ unassigned_tables = list( sorted( [ table_dir for table_dir in os.listdir( output_dir ) if table_dir not in assigned_tables and os.path.isdir( os.path.join( output_dir, table_dir ) ) ] ) )
+ if unassigned_tables:
+ out.write( """ <option name="Uncategorized Tables" value="group-trackDbUnassigned">\n""" )
+ for table_name in unassigned_tables:
+ out.write( """ <option name="%s" value="%s"/>\n""" % ( table_name, table_name ) )
+ out.write( " </option>\n" )
+ out.write( " </options>\n" )
+ out.write( """</filter>\n""" )
+ out.close()
+
+def write_database_dump_info( input_dir, output_dir, dbkey, chrom_lengths, default_bitset_size ):
+ #generate hash for profiled table directories
+ #sort directories off output root (files in output root not hashed, including the profiler_info.txt file)
+ #sort files in each directory and hash file contents
+ profiled_hash = md5()
+ for table_dir in sorted( [ table_dir for table_dir in os.listdir( output_dir ) if os.path.isdir( os.path.join( output_dir, table_dir ) ) ] ):
+ for filename in sorted( os.listdir( os.path.join( output_dir, table_dir ) ) ):
+ f = open( os.path.join( output_dir, table_dir, filename ), 'rb' )
+ while True:
+ hash_chunk = f.read( CHUNK_SIZE )
+ if not hash_chunk:
+ break
+ profiled_hash.update( hash_chunk )
+ profiled_hash = profiled_hash.hexdigest()
+
+ #generate hash for input dir
+ #sort directories off input root
+ #sort files in each directory and hash file contents
+ database_hash = md5()
+ for dirpath, dirnames, filenames in sorted( os.walk( input_dir ) ):
+ for filename in sorted( filenames ):
+ f = open( os.path.join( input_dir, dirpath, filename ), 'rb' )
+ while True:
+ hash_chunk = f.read( CHUNK_SIZE )
+ if not hash_chunk:
+ break
+ database_hash.update( hash_chunk )
+ database_hash = database_hash.hexdigest()
+
+ #write out info file
+ out = open( os.path.join( output_dir, 'profiler_info.txt' ), 'wb' )
+ out.write( 'dbkey\t%s\n' % ( dbkey ) )
+ out.write( 'chromosomes\t%s\n' % ( ','.join( [ '%s=%s' % ( chrom_name, chrom_len ) for chrom_name, chrom_len in chrom_lengths.iteritems() ] ) ) )
+ out.write( 'bitset_size\t%s\n' % ( default_bitset_size ) )
+ for line in open( os.path.join( input_dir, 'trackDb.sql' ) ):
+ line = line.strip()
+ if line.startswith( '-- Dump completed on ' ):
+ line = line[ len( '-- Dump completed on ' ): ]
+ out.write( 'dump_time\t%s\n' % ( line ) )
+ break
+ out.write( 'dump_hash\t%s\n' % ( database_hash ) )
+ out.write( 'profiler_time\t%s\n' % ( time.time() ) )
+ out.write( 'profiler_hash\t%s\n' % ( profiled_hash ) )
+ out.write( 'profiler_version\t%s\n' % ( VERSION ) )
+ out.write( 'profiler_struct_format\t%s\n' % ( STRUCT_FMT ) )
+ out.write( 'profiler_struct_size\t%s\n' % ( STRUCT_SIZE ) )
+ out.close()
+
+def __main__():
+ usage = "usage: %prog options"
+ parser = OptionParser( usage=usage )
+ parser.add_option( '-d', '--dbkey', dest='dbkey', default='hg18', help='dbkey to process' )
+ parser.add_option( '-i', '--input_dir', dest='input_dir', default=os.path.join( 'golden_path','%s', 'database' ), help='Input Directory' )
+ parser.add_option( '-o', '--output_dir', dest='output_dir', default=os.path.join( 'profiled_annotations','%s' ), help='Output Directory' )
+ parser.add_option( '-c', '--chromosomes', dest='chromosomes', default='', help='Comma separated list of: ChromName1[=length],ChromName2[=length],...' )
+ parser.add_option( '-b', '--bitset_size', dest='bitset_size', default=DEFAULT_BITSET_SIZE, type='int', help='Default BitSet size; overridden by sizes specified in chromInfo.txt.gz or by --chromosomes' )
+ parser.add_option( '-f', '--ftp_site', dest='ftp_site', default='hgdownload.cse.ucsc.edu', help='FTP site; used for chromosome info when chromInfo.txt.gz method fails' )
+ parser.add_option( '-p', '--ftp_path', dest='ftp_path', default='/goldenPath/%s/chromosomes/', help='FTP Path; used for chromosome info when chromInfo.txt.gz method fails' )
+
+ ( options, args ) = parser.parse_args()
+
+ input_dir = options.input_dir
+ if '%' in input_dir:
+ input_dir = input_dir % options.dbkey
+ assert os.path.exists( input_dir ), 'Input directory does not exist'
+ output_dir = options.output_dir
+ if '%' in output_dir:
+ output_dir = output_dir % options.dbkey
+ assert not os.path.exists( output_dir ), 'Output directory already exists'
+ os.makedirs( output_dir )
+ ftp_path = options.ftp_path
+ if '%' in ftp_path:
+ ftp_path = ftp_path % options.dbkey
+
+ #Get chromosome names and lengths
+ chrom_lengths = {}
+ if options.chromosomes:
+ for chrom in options.chromosomes.split( ',' ):
+ fields = chrom.split( '=' )
+ chrom = fields[0]
+ if len( fields ) > 1:
+ chrom_len = int( fields[1] )
+ else:
+ chrom_len = options.bitset_size
+ chrom_lengths[ chrom ] = chrom_len
+ chroms = chrom_lengths.keys()
+ print 'Chrom info taken from command line option.'
+ else:
+ try:
+ for line in gzip.open( os.path.join( input_dir, 'chromInfo.txt.gz' ) ):
+ fields = line.strip().split( '\t' )
+ chrom_lengths[ fields[0] ] = int( fields[ 1 ] )
+ chroms = chrom_lengths.keys()
+ print 'Chrom info taken from chromInfo.txt.gz.'
+ except Exception, e:
+ print 'Error loading chrom info from chromInfo.txt.gz, trying FTP method.'
+ chrom_lengths = {} #zero out chrom_lengths
+ chroms = []
+ ftp = FTP( options.ftp_site )
+ ftp.login()
+ for name in ftp.nlst( ftp_path ):
+ if name.endswith( '.fa.gz' ):
+ chroms.append( name.split( '/' )[-1][ :-len( '.fa.gz' ) ] )
+ ftp.close()
+ for chrom in chroms:
+ chrom_lengths[ chrom ] = options.bitset_size
+ #sort chroms by length of name, decending; necessary for when table names start with chrom name
+ chroms = list( reversed( [ chrom for chrom_len, chrom in sorted( [ ( len( chrom ), chrom ) for chrom in chroms ] ) ] ) )
+
+ #parse tables from local files
+ #loop through directory contents, if file ends in '.sql', process table
+ for filename in os.listdir( input_dir ):
+ if filename.endswith ( '.sql' ):
+ base_filename = filename[ 0:-len( '.sql' ) ]
+ table_out_dir = os.path.join( output_dir, base_filename )
+ #some tables are chromosome specific, lets strip off the chrom name
+ for chrom in chroms:
+ if base_filename.startswith( "%s_" % chrom ):
+ #found chromosome
+ table_out_dir = os.path.join( output_dir, base_filename[len( "%s_" % chrom ):] )
+ break
+ #create table dir
+ if not os.path.exists( table_out_dir ):
+ os.mkdir( table_out_dir ) #table dir may already exist in the case of single chrom tables
+ print "Created table dir (%s)." % table_out_dir
+ else:
+ print "Table dir (%s) already exists." % table_out_dir
+ #find column assignments
+ table_name, chrom_col, start_col, end_col = get_columns( "%s.sql" % os.path.join( input_dir, base_filename ) )
+ if chrom_col is None or start_col is None or end_col is None:
+ print "Table %s (%s) does not appear to have a chromosome, a start, or a stop." % ( table_name, "%s.sql" % os.path.join( input_dir, base_filename ) )
+ if not os.listdir( table_out_dir ):
+ print "Removing empty table (%s) directory (%s)." % ( table_name, table_out_dir )
+ os.rmdir( table_out_dir )
+ continue
+ #build bitsets from table
+ bitset_dict = {}
+ for line in gzip.open( '%s.txt.gz' % os.path.join( input_dir, base_filename ) ):
+ fields = line.strip().split( '\t' )
+ chrom = fields[ chrom_col ]
+ start = int( fields[ start_col ] )
+ end = int( fields[ end_col ] )
+ if chrom not in bitset_dict:
+ bitset_dict[ chrom ] = BitSet( chrom_lengths.get( chrom, options.bitset_size ) )
+ bitset_dict[ chrom ].set_range( start, end - start )
+ #write bitsets as profiled annotations
+ for chrom_name, chrom_bits in bitset_dict.iteritems():
+ out = open( os.path.join( table_out_dir, '%s.covered' % chrom_name ), 'wb' )
+ end = 0
+ total_regions = 0
+ total_coverage = 0
+ max_size = chrom_lengths.get( chrom_name, options.bitset_size )
+ while True:
+ start = chrom_bits.next_set( end )
+ if start >= max_size:
+ break
+ end = chrom_bits.next_clear( start )
+ out.write( struct.pack( STRUCT_FMT, start ) )
+ out.write( struct.pack( STRUCT_FMT, end ) )
+ total_regions += 1
+ total_coverage += end - start
+ if end >= max_size:
+ break
+ out.close()
+ open( os.path.join( table_out_dir, '%s.total_regions' % chrom_name ), 'wb' ).write( str( total_regions ) )
+ open( os.path.join( table_out_dir, '%s.total_coverage' % chrom_name ), 'wb' ).write( str( total_coverage ) )
+
+ #create xml
+ create_grouping_xml( input_dir, output_dir, options.dbkey )
+ #create database dump info file, for database version control
+ write_database_dump_info( input_dir, output_dir, options.dbkey, chrom_lengths, options.bitset_size )
+
+if __name__ == "__main__": __main__()
diff -r c37de7a983e7 -r 742fa2afcad9 tool-data/annotation_profiler_options.xml.sample
--- a/tool-data/annotation_profiler_options.xml.sample Thu Apr 22 21:11:17 2010 -0400
+++ b/tool-data/annotation_profiler_options.xml.sample Fri Apr 23 11:14:26 2010 -0400
@@ -1,4 +1,4 @@
-<filter type="meta_key" name="dbkey" value="hg18">
+<filter type="data_meta" data_ref="input1" meta_key="dbkey" value="hg18">
<options>
<option name="Mapping and Sequencing Tracks" value="group-map">
<option name="STS Markers" value="stsMap"/>
diff -r c37de7a983e7 -r 742fa2afcad9 tools/annotation_profiler/annotation_profiler.xml
--- a/tools/annotation_profiler/annotation_profiler.xml Thu Apr 22 21:11:17 2010 -0400
+++ b/tools/annotation_profiler/annotation_profiler.xml Fri Apr 23 11:14:26 2010 -0400
@@ -1,6 +1,6 @@
<tool id="Annotation_Profiler_0" name="Profile Annotations" Version="1.0.0">
<description>for a set of genomic intervals</description>
- <command interpreter="python">annotation_profiler_for_interval.py -i $input1 -c ${input1.metadata.chromCol} -s ${input1.metadata.startCol} -e ${input1.metadata.endCol} -o $out_file1 $keep_empty -p ${GALAXY_DATA_INDEX_DIR}/annotation_profiler/$dbkey $summary -l ${chromInfo} -b 3 -t $table_names</command>
+ <command interpreter="python">annotation_profiler_for_interval.py -i $input1 -c ${input1.metadata.chromCol} -s ${input1.metadata.startCol} -e ${input1.metadata.endCol} -o $out_file1 $keep_empty -p ${GALAXY_DATA_INDEX_DIR}/annotation_profiler/$dbkey $summary -b 3 -t $table_names</command>
<inputs>
<param format="interval" name="input1" type="data" label="Choose Intervals">
<validator type="dataset_metadata_in_file" filename="annotation_profiler_valid_builds.txt" metadata_name="dbkey" metadata_column="0" message="Profiling is not currently available for this species."/>
@@ -41,7 +41,7 @@
<help>
**What it does**
-Takes an input set of intervals and for each interval determines the base coverage of the interval by a set of features (tables) available from UCSC.
+Takes an input set of intervals and for each interval determines the base coverage of the interval by a set of features (tables) available from UCSC. Genomic regions from the input feature data have been merged by overlap / direct adjacency (e.g. a table having ranges of: 1-10, 6-12, 12-20 and 25-28 results in two merged ranges of: 1-20 and 25-28).
By default, this tool will check the coverage of your intervals against all available features; you may, however, choose to select only those tables that you want to include. Selecting a section heading will effectively cause all of its children to be selected.
diff -r c37de7a983e7 -r 742fa2afcad9 tools/annotation_profiler/annotation_profiler_for_interval.py
--- a/tools/annotation_profiler/annotation_profiler_for_interval.py Thu Apr 22 21:11:17 2010 -0400
+++ b/tools/annotation_profiler/annotation_profiler_for_interval.py Fri Apr 23 11:14:26 2010 -0400
@@ -18,12 +18,13 @@
assert sys.version_info[:2] >= ( 2, 4 )
class CachedRangesInFile:
- fmt = '<I'
- fmt_size = struct.calcsize( fmt )
- def __init__( self, filename ):
+ DEFAULT_STRUCT_FORMAT = '<I'
+ def __init__( self, filename, profiler_info ):
self.file_size = os.stat( filename ).st_size
self.file = open( filename, 'rb' )
self.filename = filename
+ self.fmt = profiler_info.get( 'profiler_struct_format', self.DEFAULT_STRUCT_FORMAT )
+ self.fmt_size = int( profiler_info.get( 'profiler_struct_size', struct.calcsize( self.fmt ) ) )
self.length = int( self.file_size / self.fmt_size / 2 )
self._cached_ranges = [ None for i in xrange( self.length ) ]
def __getitem__( self, i ):
@@ -43,9 +44,9 @@
return self.length
class RegionCoverage:
- def __init__( self, filename_base ):
+ def __init__( self, filename_base, profiler_info ):
try:
- self._coverage = CachedRangesInFile( "%s.covered" % filename_base )
+ self._coverage = CachedRangesInFile( "%s.covered" % filename_base, profiler_info )
except Exception, e:
#print "Error loading coverage file %s: %s" % ( "%s.covered" % filename_base, e )
self._coverage = []
@@ -89,12 +90,14 @@
return coverage, region_count, start_index
class CachedCoverageReader:
- def __init__( self, base_file_path, buffer = 10, table_names = None ):
+ def __init__( self, base_file_path, buffer = 10, table_names = None, profiler_info = None ):
self._base_file_path = base_file_path
self._buffer = buffer #number of chromosomes to keep in memory at a time
self._coverage = {}
- if table_names is None: table_names = os.listdir( self._base_file_path )
+ if table_names is None: table_names = [ table_dir for table_dir in os.listdir( self._base_file_path ) if os.path.isdir( os.path.join( self._base_file_path, table_dir ) ) ]
for tablename in table_names: self._coverage[tablename] = {}
+ if profiler_info is None: profiler_info = {}
+ self._profiler_info = profiler_info
def iter_table_coverage_by_region( self, chrom, start, end ):
for tablename, coverage, regions in self.iter_table_coverage_regions_by_region( chrom, start, end ):
yield tablename, coverage
@@ -107,7 +110,7 @@
if len( chromosomes ) >= self._buffer:
#randomly remove one chromosome from this table
del chromosomes[ chromosomes.keys().pop( random.randint( 0, self._buffer - 1 ) ) ]
- chromosomes[chrom] = RegionCoverage( os.path.join ( self._base_file_path, tablename, chrom ) )
+ chromosomes[chrom] = RegionCoverage( os.path.join ( self._base_file_path, tablename, chrom ), self._profiler_info )
coverage, regions, index = chromosomes[chrom].get_coverage_regions_index_overlap( start, end )
yield tablename, coverage, regions, index
@@ -240,19 +243,35 @@
print "%s has max length of %s, exceeded by %s%s." % ( chrom, chrom_lengths.get( chrom ), ", ".join( map( str, regions[:3] ) ), extra_region_info )
class ChromosomeLengths:
- def __init__( self, filename ):
+ def __init__( self, profiler_info ):
self.chroms = {}
- try:
- for line in open( filename ):
- try:
- fields = line.strip().split( "\t" )
- self.chroms[fields[0]] = int( fields[1] )
- except:
- continue
- except:
- pass
+ self.default_bitset_size = int( profiler_info.get( 'bitset_size', bx.bitset.MAX ) )
+ chroms = profiler_info.get( 'chromosomes', None )
+ if chroms:
+ for chrom in chroms.split( ',' ):
+ for fields in chrom.rsplit( '=', 1 ):
+ if len( fields ) == 2:
+ self.chroms[ fields[0] ] = int( fields[1] )
+ else:
+ self.chroms[ fields[0] ] = self.default_bitset_size
def get( self, name ):
- return self.chroms.get( name, bx.bitset.MAX )
+ return self.chroms.get( name, self.default_bitset_size )
+
+def parse_profiler_info( filename ):
+ profiler_info = {}
+ try:
+ for line in open( filename ):
+ fields = line.rstrip( '\n\r' ).split( '\t', 1 )
+ if len( fields ) == 2:
+ if fields[0] in profiler_info:
+ if not isinstance( profiler_info[ fields[0] ], list ):
+ profiler_info[ fields[0] ] = [ profiler_info[ fields[0] ] ]
+ profiler_info[ fields[0] ].append( fields[1] )
+ else:
+ profiler_info[ fields[0] ] = fields[1]
+ except:
+ pass #likely missing file
+ return profiler_info
def __main__():
parser = optparse.OptionParser()
@@ -294,16 +313,10 @@
help='Path to profiled data for this organism'
)
parser.add_option(
- '-l','--lengths',
- dest='lengths',
- type='str',default='test-data/shared/ucsc/hg18.len',
- help='Path to chromosome lengths data for this organism'
- )
- parser.add_option(
'-t','--table_names',
dest='table_names',
type='str',default='None',
- help='Path to profiled data for this organism'
+ help='Table names requested'
)
parser.add_option(
'-i','--input',
@@ -327,14 +340,19 @@
options, args = parser.parse_args()
+ #get profiler_info
+ profiler_info = parse_profiler_info( os.path.join( options.path, 'profiler_info.txt' ) )
+
table_names = options.table_names.split( "," )
if table_names == ['None']: table_names = None
- coverage_reader = CachedCoverageReader( options.path, buffer = options.buffer, table_names = table_names )
+ coverage_reader = CachedCoverageReader( options.path, buffer = options.buffer, table_names = table_names, profiler_info = profiler_info )
if options.summary:
- profile_summary( options.interval_filename, options.chrom_col - 1, options.start_col - 1, options.end_col -1, options.out_filename, options.keep_empty, coverage_reader, ChromosomeLengths( options.lengths ) )
+ profile_summary( options.interval_filename, options.chrom_col - 1, options.start_col - 1, options.end_col -1, options.out_filename, options.keep_empty, coverage_reader, ChromosomeLengths( profiler_info ) )
else:
profile_per_interval( options.interval_filename, options.chrom_col - 1, options.start_col - 1, options.end_col -1, options.out_filename, options.keep_empty, coverage_reader )
+ #print out data version info
+ print 'Data version (%s:%s:%s)' % ( profiler_info.get( 'dbkey', 'unknown' ), profiler_info.get( 'profiler_hash', 'unknown' ), profiler_info.get( 'dump_time', 'unknown' ) )
if __name__ == "__main__": __main__()
1
0
details: http://www.bx.psu.edu/hg/galaxy/rev/c37de7a983e7
changeset: 3682:c37de7a983e7
user: rc
date: Thu Apr 22 21:11:17 2010 -0400
description:
lims: added request_types permissions
diffstat:
lib/galaxy/model/__init__.py | 19 ++
lib/galaxy/model/mapping.py | 15 +
lib/galaxy/model/migrate/versions/0045_request_type_permissions_table.py | 34 +++
lib/galaxy/security/__init__.py | 38 ++++-
lib/galaxy/web/controllers/requests.py | 3 +-
lib/galaxy/web/controllers/requests_admin.py | 59 +++++-
templates/admin/requests/request_type_permissions.mako | 92 ++++++++++
templates/webapps/galaxy/base_panels.mako | 2 +-
8 files changed, 253 insertions(+), 9 deletions(-)
diffs (392 lines):
diff -r 1b30f5fa152b -r c37de7a983e7 lib/galaxy/model/__init__.py
--- a/lib/galaxy/model/__init__.py Thu Apr 22 16:05:08 2010 -0400
+++ b/lib/galaxy/model/__init__.py Thu Apr 22 21:11:17 2010 -0400
@@ -73,6 +73,18 @@
if can_show:
libraries[ library ] = hidden_folder_ids
return libraries
+ def accessible_request_types(self, trans):
+ # get all permitted libraries for this user
+ all_rt_list = trans.sa_session.query( trans.app.model.RequestType ) \
+ .filter( trans.app.model.RequestType.table.c.deleted == False ) \
+ .order_by( trans.app.model.RequestType.name )
+ roles = self.all_roles()
+ rt_list = []
+ for rt in all_rt_list:
+ for permission in rt.actions:
+ if permission.role.id in [r.id for r in roles]:
+ rt_list.append(rt)
+ return list(set(rt_list))
class Job( object ):
"""
@@ -1445,6 +1457,7 @@
self.comment = comment
class RequestType( object ):
+ permitted_actions = get_permitted_actions( filter='REQUEST_TYPE' )
def __init__(self, name=None, desc=None, request_form=None, sample_form=None,
datatx_info=None):
self.name = name
@@ -1452,6 +1465,12 @@
self.request_form = request_form
self.sample_form = sample_form
self.datatx_info = datatx_info
+
+class RequestTypePermissions( object ):
+ def __init__( self, action, request_type, role ):
+ self.action = action
+ self.request_type = request_type
+ self.role = role
class Sample( object ):
transfer_status = Bunch( NOT_STARTED = 'Not started',
diff -r 1b30f5fa152b -r c37de7a983e7 lib/galaxy/model/mapping.py
--- a/lib/galaxy/model/mapping.py Thu Apr 22 16:05:08 2010 -0400
+++ b/lib/galaxy/model/mapping.py Thu Apr 22 21:11:17 2010 -0400
@@ -628,6 +628,14 @@
Column( "datatx_info", JSONType() ),
Column( "deleted", Boolean, index=True, default=False ) )
+RequestTypePermissions.table = Table( "request_type_permissions", metadata,
+ Column( "id", Integer, primary_key=True ),
+ Column( "create_time", DateTime, default=now ),
+ Column( "update_time", DateTime, default=now, onupdate=now ),
+ Column( "action", TEXT ),
+ Column( "request_type_id", Integer, ForeignKey( "request_type.id" ), nullable=True, index=True ),
+ Column( "role_id", Integer, ForeignKey( "role.id" ), index=True ) )
+
FormValues.table = Table('form_values', metadata,
Column( "id", Integer, primary_key=True),
Column( "create_time", DateTime, default=now ),
@@ -923,6 +931,13 @@
primaryjoin=( RequestType.table.c.sample_form_id == FormDefinition.table.c.id ) ),
) )
+assign_mapper( context, RequestTypePermissions, RequestTypePermissions.table,
+ properties=dict(
+ request_type=relation( RequestType, backref="actions" ),
+ role=relation( Role, backref="request_type_actions" )
+ )
+)
+
assign_mapper( context, FormDefinition, FormDefinition.table,
properties=dict( current=relation( FormDefinitionCurrent,
primaryjoin=( FormDefinition.table.c.form_definition_current_id == FormDefinitionCurrent.table.c.id ) )
diff -r 1b30f5fa152b -r c37de7a983e7 lib/galaxy/model/migrate/versions/0045_request_type_permissions_table.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/lib/galaxy/model/migrate/versions/0045_request_type_permissions_table.py Thu Apr 22 21:11:17 2010 -0400
@@ -0,0 +1,34 @@
+"""
+Migration script to add the request_type_permissions table.
+"""
+
+from sqlalchemy import *
+from migrate import *
+from migrate.changeset import *
+
+import datetime
+now = datetime.datetime.utcnow
+
+import logging
+log = logging.getLogger( __name__ )
+
+metadata = MetaData( migrate_engine )
+
+RequestTypePermissions_table = Table( "request_type_permissions", metadata,
+ Column( "id", Integer, primary_key=True ),
+ Column( "create_time", DateTime, default=now ),
+ Column( "update_time", DateTime, default=now, onupdate=now ),
+ Column( "action", TEXT ),
+ Column( "request_type_id", Integer, ForeignKey( "request_type.id" ), nullable=True, index=True ),
+ Column( "role_id", Integer, ForeignKey( "role.id" ), index=True ) )
+
+def upgrade():
+ print __doc__
+ metadata.reflect()
+ try:
+ RequestTypePermissions_table.create()
+ except Exception, e:
+ log.debug( "Creating request_type_permissions table failed: %s" % str( e ) )
+
+def downgrade():
+ pass
\ No newline at end of file
diff -r 1b30f5fa152b -r c37de7a983e7 lib/galaxy/security/__init__.py
--- a/lib/galaxy/security/__init__.py Thu Apr 22 16:05:08 2010 -0400
+++ b/lib/galaxy/security/__init__.py Thu Apr 22 21:11:17 2010 -0400
@@ -24,7 +24,10 @@
LIBRARY_ACCESS = Action( "access library", "Restrict access to this library to only role members", "restrict" ),
LIBRARY_ADD = Action( "add library item", "Role members can add library items to this library item", "grant" ),
LIBRARY_MODIFY = Action( "modify library item", "Role members can modify this library item", "grant" ),
- LIBRARY_MANAGE = Action( "manage library permissions", "Role members can manage roles associated with permissions on this library item", "grant" )
+ LIBRARY_MANAGE = Action( "manage library permissions", "Role members can manage roles associated with permissions on this library item", "grant" ),
+ # Request type permissions
+ REQUEST_TYPE_ACCESS = Action( "access request_type", "Restrict access to this request_type to only role members", "restrict" )
+
)
def get_action( self, name, default=None ):
"""Get a permitted action by its dict key or action name"""
@@ -754,6 +757,39 @@
else:
hidden_folder_ids = '%d' % sub_folder.id
return False, hidden_folder_ids
+ #
+ # RequestType Permissions
+ #
+ def can_access_request_type( self, roles, request_type ):
+ action = self.permitted_actions.REQUEST_TYPE_ACCESS
+ request_type_actions = []
+ for permission in request_type.actions:
+ if permission.action == action.action:
+ request_type_actions.append(permission)
+ if not request_type_actions:
+ return action.model == 'restrict'
+ ret_val = False
+ for item_action in item_actions:
+ if item_action.role in roles:
+ ret_val = True
+ break
+ return ret_val
+ def set_request_type_permissions( self, request_type, permissions={} ):
+ # Set new permissions on request_type, eliminating all current permissions
+ for role_assoc in request_type.actions:
+ self.sa_session.delete( role_assoc )
+ # Add the new permissions on request_type
+ item_class = self.model.RequestType
+ permission_class = self.model.RequestTypePermissions
+ for action, roles in permissions.items():
+ if isinstance( action, Action ):
+ action = action.action
+ for role_assoc in [ permission_class( action, request_type, role ) for role in roles ]:
+ self.sa_session.add( role_assoc )
+ self.sa_session.flush()
+
+
+
class HostAgent( RBACAgent ):
"""
diff -r 1b30f5fa152b -r c37de7a983e7 lib/galaxy/web/controllers/requests.py
--- a/lib/galaxy/web/controllers/requests.py Thu Apr 22 16:05:08 2010 -0400
+++ b/lib/galaxy/web/controllers/requests.py Thu Apr 22 21:11:17 2010 -0400
@@ -623,8 +623,7 @@
details=details,
edit_mode=edit_mode)
def __select_request_type(self, trans, rtid):
- requesttype_list = trans.sa_session.query( trans.app.model.RequestType )\
- .order_by( trans.app.model.RequestType.name.asc() )
+ requesttype_list = trans.user.accessible_request_types(trans)
rt_ids = ['none']
for rt in requesttype_list:
if not rt.deleted:
diff -r 1b30f5fa152b -r c37de7a983e7 lib/galaxy/web/controllers/requests_admin.py
--- a/lib/galaxy/web/controllers/requests_admin.py Thu Apr 22 16:05:08 2010 -0400
+++ b/lib/galaxy/web/controllers/requests_admin.py Thu Apr 22 21:11:17 2010 -0400
@@ -195,7 +195,8 @@
visible=False,
filterable="standard" ) )
operations = [
- #grids.GridOperation( "Update", allow_multiple=False, condition=( lambda item: not item.deleted ) ),
+ grids.GridOperation( "Permissions", allow_multiple=False, condition=( lambda item: not item.deleted ) ),
+ #grids.GridOperation( "Clone", allow_multiple=False, condition=( lambda item: not item.deleted ) ),
grids.GridOperation( "Delete", allow_multiple=True, condition=( lambda item: not item.deleted ) ),
grids.GridOperation( "Undelete", condition=( lambda item: item.deleted ) ),
]
@@ -258,6 +259,7 @@
'''
List all request made by the current user
'''
+ #self.__sample_datasets(trans, **kwd)
if 'operation' in kwd:
operation = kwd['operation'].lower()
if not kwd.get( 'id', None ):
@@ -534,8 +536,7 @@
#---- Request Creation ----------------------------------------------------------
#
def __select_request_type(self, trans, rtid):
- requesttype_list = trans.sa_session.query( trans.app.model.RequestType )\
- .order_by( trans.app.model.RequestType.name.asc() )
+ requesttype_list = trans.user.accessible_request_types(trans)
rt_ids = ['none']
for rt in requesttype_list:
if not rt.deleted:
@@ -1771,6 +1772,25 @@
dataset_index=dataset_index,
message=message,
status=status)
+
+# def __sample_datasets(self, trans, **kwd):
+# samples = trans.sa_session.query( trans.app.model.Sample ).all()
+# for s in samples:
+# if s.dataset_files:
+# newdf = []
+# for df in s.dataset_files:
+# if type(s.dataset_files[0]) == type([1,2]):
+# filepath = df[0]
+# status = df[1]
+# newdf.append(dict(filepath=filepath,
+# status=status,
+# name=filepath.split('/')[-1],
+# error_msg='',
+# size='Unknown'))
+# s.dataset_files = newdf
+# trans.sa_session.add( s )
+# trans.sa_session.flush()
+#
##
#### Request Type Stuff ###################################################
##
@@ -1792,8 +1812,10 @@
return self.__delete_request_type( trans, **kwd )
elif operation == "undelete":
return self.__undelete_request_type( trans, **kwd )
-# elif operation == "update":
-# return self.__edit_request( trans, **kwd )
+ elif operation == "clone":
+ return self.__clone_request_type( trans, **kwd )
+ elif operation == "permissions":
+ return self.__show_request_type_permissions( trans, **kwd )
# Render the grid view
return self.requesttype_grid( trans, **kwd )
def __view_request_type(self, trans, **kwd):
@@ -1992,3 +2014,30 @@
action='manage_request_types',
message='%i request type(s) has been undeleted' % len(id_list),
status='done') )
+ def __show_request_type_permissions(self, trans, **kwd ):
+ params = util.Params( kwd )
+ message = util.restore_text( params.get( 'message', '' ) )
+ status = params.get( 'status', 'done' )
+ try:
+ rt = trans.sa_session.query( trans.app.model.RequestType ).get( trans.security.decode_id(kwd['id']) )
+ except:
+ return trans.response.send_redirect( web.url_for( controller='requests_admin',
+ action='manage_request_types',
+ status='error',
+ message="Invalid requesttype ID") )
+ roles = trans.sa_session.query( trans.app.model.Role ) \
+ .filter( trans.app.model.Role.table.c.deleted==False ) \
+ .order_by( trans.app.model.Role.table.c.name )
+ if params.get( 'update_roles_button', False ):
+ permissions = {}
+ for k, v in trans.app.model.RequestType.permitted_actions.items():
+ in_roles = [ trans.sa_session.query( trans.app.model.Role ).get( x ) for x in util.listify( params.get( k + '_in', [] ) ) ]
+ permissions[ trans.app.security_agent.get_action( v.action ) ] = in_roles
+ trans.app.security_agent.set_request_type_permissions( rt, permissions )
+ trans.sa_session.refresh( rt )
+ message = "Permissions updated for request type '%s'" % rt.name
+ return trans.fill_template( '/admin/requests/request_type_permissions.mako',
+ request_type=rt,
+ roles=roles,
+ status=status,
+ message=message)
diff -r 1b30f5fa152b -r c37de7a983e7 templates/admin/requests/request_type_permissions.mako
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/templates/admin/requests/request_type_permissions.mako Thu Apr 22 21:11:17 2010 -0400
@@ -0,0 +1,92 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+
+
+%if message:
+ ${render_msg( message, status )}
+%endif
+
+<script type="text/javascript">
+ $( document ).ready( function () {
+ $( '.role_add_button' ).click( function() {
+ var action = this.id.substring( 0, this.id.lastIndexOf( '_add_button' ) )
+ var in_select = '#' + action + '_in_select';
+ var out_select = '#' + action + '_out_select';
+ return !$( out_select + ' option:selected' ).remove().appendTo( in_select );
+ });
+ $( '.role_remove_button' ).click( function() {
+ var action = this.id.substring( 0, this.id.lastIndexOf( '_remove_button' ) )
+ var in_select = '#' + action + '_in_select';
+ var out_select = '#' + action + '_out_select';
+ return !$( in_select + ' option:selected' ).remove().appendTo( out_select );
+ });
+ $( 'form#edit_role_associations' ).submit( function() {
+ $( '.in_select option' ).each(function( i ) {
+ $( this ).attr( "selected", "selected" );
+ });
+ });
+ });
+</script>
+
+
+<div class="toolForm">
+ <div class="toolFormTitle">Manage permissions on "${request_type.name}"</div>
+ <div class="toolFormBody">
+ <form name="request_type_permissions" id="request_type_permissions" action="${h.url_for( controller='requests_admin', action='manage_request_types', operation="permissions", id=trans.security.encode_id(request_type.id))}" method="post">
+ <div class="form-row">
+## %for k, v in permitted_actions:
+## %if k not in do_not_render:
+## <div class="form-row">
+## ${render_select( current_actions, k, v, all_roles )}
+## </div>
+## %endif
+## %endfor
+## <%def name="render_select( current_actions, action_key, action, all_roles )">
+ <%
+ obj_name = request_type.name
+ current_actions = request_type.actions
+ permitted_actions = trans.app.model.RequestType.permitted_actions.items()
+ action = trans.app.model.RequestType.permitted_actions.REQUEST_TYPE_ACCESS
+ obj_str = 'request_type %s' % obj_name
+ obj_type = 'request_type'
+ all_roles = roles
+ action_key = 'REQUEST_TYPE_ACCESS'
+
+ import sets
+ in_roles = sets.Set()
+ for a in current_actions:
+ if a.action == action.action:
+ in_roles.add( a.role )
+ out_roles = filter( lambda x: x not in in_roles, all_roles )
+ %>
+ <p>
+ <b>${action.action}:</b> ${action.description}
+ </p>
+ <div style="width: 100%; white-space: nowrap;">
+ <div style="float: left; width: 50%;">
+ Roles associated:<br/>
+ <select name="${action_key}_in" id="${action_key}_in_select" class="in_select" style="max-width: 98%; width: 98%; height: 150px; font-size: 100%;" multiple>
+ %for role in in_roles:
+ <option value="${role.id}">${role.name}</option>
+ %endfor
+ </select> <br/>
+ <div style="width: 98%; text-align: right"><input type="submit" id="${action_key}_remove_button" class="role_remove_button" value=">>"/></div>
+ </div>
+ <div style="width: 50%;">
+ Roles not associated:<br/>
+ <select name="${action_key}_out" id="${action_key}_out_select" style="max-width: 98%; width: 98%; height: 150px; font-size: 100%;" multiple>
+ %for role in out_roles:
+ <option value="${role.id}">${role.name}</option>
+ %endfor
+ </select> <br/>
+ <input type="submit" id="${action_key}_add_button" class="role_add_button" value="<<"/>
+ </div>
+ </div>
+## </%def>
+ </div>
+ <div class="form-row">
+ <input type="submit" name="update_roles_button" value="Save"/>
+ </div>
+ </form>
+ </div>
+</div>
\ No newline at end of file
diff -r 1b30f5fa152b -r c37de7a983e7 templates/webapps/galaxy/base_panels.mako
--- a/templates/webapps/galaxy/base_panels.mako Thu Apr 22 16:05:08 2010 -0400
+++ b/templates/webapps/galaxy/base_panels.mako Thu Apr 22 21:11:17 2010 -0400
@@ -36,7 +36,7 @@
${tab( "libraries", "Data Libraries", h.url_for( controller='/library', action='index' ))}
%endif
- %if trans.user and trans.request_types():
+ %if trans.user and trans.user.accessible_request_types(trans):
<td class="tab">
<a>Lab</a>
<div class="submenu">
1
0
details: http://www.bx.psu.edu/hg/galaxy/rev/1b30f5fa152b
changeset: 3681:1b30f5fa152b
user: jeremy goecks <jeremy.goecks(a)emory.edu>
date: Thu Apr 22 16:05:08 2010 -0400
description:
GTF to BEDGraph converter.
diffstat:
test-data/gtf2bedgraph_in.gtf | 100 ++++++++++++++++++++++++++++
test-data/gtf2bedgraph_out.bedgraph | 101 +++++++++++++++++++++++++++++
tool_conf.xml.sample | 1 +
tools/filters/gtf2bedgraph.xml | 79 ++++++++++++++++++++++
tools/filters/gtf_to_bedgraph_converter.py | 73 ++++++++++++++++++++
5 files changed, 354 insertions(+), 0 deletions(-)
diffs (381 lines):
diff -r 3445ca17a4c5 -r 1b30f5fa152b test-data/gtf2bedgraph_in.gtf
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/gtf2bedgraph_in.gtf Thu Apr 22 16:05:08 2010 -0400
@@ -0,0 +1,100 @@
+chr1 Cufflinks exon 36425950 36426026 1000 - . gene_id "uc007aqa.1"; transcript_id "uc007aqa.1"; exon_number "21"; FPKM "4.8386844109"; frac "0.515875"; conf_lo "0.000000"; conf_hi "9.779040"; cov "0.274837";
+chr1 Cufflinks exon 46891972 46892996 1000 - . gene_id "uc007axc.1"; transcript_id "uc007axc.1"; exon_number "9"; FPKM "8.4688567539"; frac "1.000000"; conf_lo "6.667227"; conf_hi "10.270487"; cov "0.481031";
+chr1 Cufflinks exon 71654478 71654594 1000 - . gene_id "uc007bkb.1"; transcript_id "uc007bkb.1"; exon_number "4"; FPKM "0.4686878995"; frac "0.186704"; conf_lo "0.300747"; conf_hi "0.636629"; cov "0.026621";
+chr1 Cufflinks transcript 72629845 72679706 1000 + . gene_id "uc007bks.1"; transcript_id "uc007bks.1"; FPKM "4.0695297327"; frac "1.000000"; conf_lo "2.473329"; conf_hi "5.665731"; cov "0.231149";
+chr1 Cufflinks exon 75531753 75532000 1000 + . gene_id "uc007bpt.1"; transcript_id "uc007bpt.1"; exon_number "24"; FPKM "3.6392661141"; frac "1.000000"; conf_lo "2.391008"; conf_hi "4.887524"; cov "0.206710";
+chr1 Cufflinks exon 123389482 123389564 1000 + . gene_id "uc007cju.1"; transcript_id "uc007cju.1"; exon_number "20"; FPKM "0.9948773061"; frac "1.000000"; conf_lo "0.105032"; conf_hi "1.884723"; cov "0.056509";
+chr1 Cufflinks exon 129625990 129626119 1000 + . gene_id "uc007ckv.1"; transcript_id "uc007ckv.1"; exon_number "1"; FPKM "0.0003267777"; frac "0.004692"; conf_lo "0.000000"; conf_hi "0.000915"; cov "0.000019";
+chr1 Cufflinks exon 132059397 132059512 1000 + . gene_id "uc007clw.1"; transcript_id "uc007clw.1"; exon_number "7"; FPKM "0.2051423010"; frac "0.886787"; conf_lo "0.000000"; conf_hi "0.509199"; cov "0.011652";
+chr1 Cufflinks exon 175865141 175865308 1000 - . gene_id "uc007dsf.1"; transcript_id "uc007dsf.1"; exon_number "5"; FPKM "0.6544444010"; frac "1.000000"; conf_lo "0.068952"; conf_hi "1.239936"; cov "0.037172";
+chr10 Cufflinks transcript 7399380 7400956 1000 - . gene_id "uc007eie.1"; transcript_id "uc007eie.1"; FPKM "2.1099978681"; frac "1.000000"; conf_lo "0.514989"; conf_hi "3.705006"; cov "0.119848";
+chr10 Cufflinks exon 79784826 79784954 1000 - . gene_id "uc007gcr.1"; transcript_id "uc007gcr.1"; exon_number "2"; FPKM "1.2054582676"; frac "1.000000"; conf_lo "0.000000"; conf_hi "2.597402"; cov "0.068470";
+chr10 Cufflinks exon 79820729 79820836 1000 + . gene_id "uc007gcy.1"; transcript_id "uc007gcy.1"; exon_number "2"; FPKM "1.8177911161"; frac "1.000000"; conf_lo "0.532419"; conf_hi "3.103164"; cov "0.103250";
+chr10 Cufflinks transcript 105907395 106369573 1000 + . gene_id "uc007gyr.1"; transcript_id "uc007gyr.1"; FPKM "4.2493607936"; frac "0.247216"; conf_lo "3.727223"; conf_hi "4.771499"; cov "0.241364";
+chr10 Cufflinks exon 119487061 119487172 1000 + . gene_id "uc007hep.1"; transcript_id "uc007hep.1"; exon_number "10"; FPKM "4.3105966126"; frac "0.341843"; conf_lo "3.127417"; conf_hi "5.493776"; cov "0.244842";
+chr11 Cufflinks exon 29097093 29097209 1000 + . gene_id "uc007igs.1"; transcript_id "uc007igs.1"; exon_number "7"; FPKM "4.2530782301"; frac "1.000000"; conf_lo "2.700074"; conf_hi "5.806083"; cov "0.241575";
+chr11 Cufflinks exon 69404158 69404264 1000 + . gene_id "uc007jqm.1"; transcript_id "uc007jqm.1"; exon_number "10"; FPKM "18.7450971965"; frac "0.685277"; conf_lo "11.773851"; conf_hi "25.716343"; cov "1.064721";
+chr11 Cufflinks transcript 98249986 98261804 1000 - . gene_id "uc007lgh.1"; transcript_id "uc007lgh.1"; FPKM "2.1571271227"; frac "1.000000"; conf_lo "0.856331"; conf_hi "3.457924"; cov "0.122525";
+chr11 Cufflinks exon 102210141 102211681 1000 - . gene_id "uc007lrp.1"; transcript_id "uc007lrp.1"; exon_number "1"; FPKM "0.8688186006"; frac "1.000000"; conf_lo "0.254471"; conf_hi "1.483166"; cov "0.049349";
+chr11 Cufflinks transcript 105926400 105927243 1000 - . gene_id "uc007lya.1"; transcript_id "uc007lya.1"; FPKM "3.6706747247"; frac "1.000000"; conf_lo "0.000000"; conf_hi "8.861793"; cov "0.208494";
+chr11 Cufflinks exon 106633966 106634066 1000 - . gene_id "uc007lzm.1"; transcript_id "uc007lzm.1"; exon_number "2"; FPKM "2.4729108195"; frac "0.689555"; conf_lo "0.805433"; conf_hi "4.140389"; cov "0.140461";
+chr11 Cufflinks exon 120472427 120472492 1000 - . gene_id "uc007mtq.1"; transcript_id "uc007mtq.1"; exon_number "3"; FPKM "10.2380258574"; frac "0.356865"; conf_lo "4.499395"; conf_hi "15.976656"; cov "0.581520";
+chr12 Cufflinks exon 100112717 100112852 1000 - . gene_id "uc007orn.1"; transcript_id "uc007orn.1"; exon_number "39"; FPKM "1.8669402513"; frac "0.154118"; conf_lo "1.707295"; conf_hi "2.026586"; cov "0.106042";
+chr13 Cufflinks exon 8889564 8891614 1000 + . gene_id "uc007pkn.1"; transcript_id "uc007pkn.1"; exon_number "5"; FPKM "9.4402522582"; frac "1.000000"; conf_lo "6.745038"; conf_hi "12.135466"; cov "0.536206";
+chr13 Cufflinks exon 13756207 13756380 1000 + . gene_id "uc007pmj.1"; transcript_id "uc007pmj.1"; exon_number "18"; FPKM "0.0574771218"; frac "0.793101"; conf_lo "0.000000"; conf_hi "0.140705"; cov "0.003265";
+chr13 Cufflinks exon 93243918 93244083 1000 - . gene_id "uc007rkp.1"; transcript_id "uc007rkp.1"; exon_number "4"; FPKM "6.9802111138"; frac "1.000000"; conf_lo "3.858566"; conf_hi "10.101856"; cov "0.396476";
+chr14 Cufflinks exon 13130096 13130170 1000 + . gene_id "uc007sfq.1"; transcript_id "uc007sfq.1"; exon_number "4"; FPKM "4.0381928600"; frac "1.000000"; conf_lo "2.254366"; conf_hi "5.822020"; cov "0.229369";
+chr14 Cufflinks exon 32036106 32036250 1000 + . gene_id "uc007sxe.1"; transcript_id "uc007sxe.1"; exon_number "10"; FPKM "0.1289615781"; frac "1.000000"; conf_lo "0.000000"; conf_hi "0.386885"; cov "0.007325";
+chr14 Cufflinks exon 56517080 56517223 1000 - . gene_id "uc007ubd.1"; transcript_id "uc007ubd.1"; exon_number "2"; FPKM "15.7683764379"; frac "0.548796"; conf_lo "8.949920"; conf_hi "22.586833"; cov "0.895643";
+chr14 Cufflinks exon 62950942 62951013 1000 + . gene_id "uc007ugl.1"; transcript_id "uc007ugl.1"; exon_number "1"; FPKM "10.1138803585"; frac "1.000000"; conf_lo "6.480867"; conf_hi "13.746893"; cov "0.574468";
+chr14 Cufflinks exon 66479007 66479052 1000 + . gene_id "uc007ujq.1"; transcript_id "uc007ujq.1"; exon_number "8"; FPKM "14.3011267395"; frac "1.000000"; conf_lo "10.806805"; conf_hi "17.795448"; cov "0.812304";
+chr14 Cufflinks exon 70961619 70961783 1000 + . gene_id "uc007uoj.1"; transcript_id "uc007uoj.1"; exon_number "7"; FPKM "2.0814553995"; frac "1.000000"; conf_lo "1.231705"; conf_hi "2.931206"; cov "0.118227";
+chr14 Cufflinks exon 96679222 96679434 1000 - . gene_id "uc007uuq.1"; transcript_id "uc007uuq.1"; exon_number "7"; FPKM "1.7614342028"; frac "1.000000"; conf_lo "0.851833"; conf_hi "2.671035"; cov "0.100049";
+chr14 Cufflinks exon 99504388 99504488 1000 + . gene_id "uc007uvc.1"; transcript_id "uc007uvc.1"; exon_number "3"; FPKM "3.1573312214"; frac "0.277705"; conf_lo "2.620155"; conf_hi "3.694508"; cov "0.179336";
+chr15 Cufflinks exon 12777808 12777962 1000 + . gene_id "uc007vic.1"; transcript_id "uc007vic.1"; exon_number "6"; FPKM "12.7118803258"; frac "0.653301"; conf_lo "7.807708"; conf_hi "17.616053"; cov "0.722034";
+chr15 Cufflinks exon 28200049 28200282 1000 + . gene_id "uc007vjy.1"; transcript_id "uc007vjy.1"; exon_number "19"; FPKM "0.0608801712"; frac "1.000000"; conf_lo "0.000000"; conf_hi "0.146978"; cov "0.003458";
+chr15 Cufflinks exon 34434714 34434889 1000 + . gene_id "uc007vlv.1"; transcript_id "uc007vlv.1"; exon_number "4"; FPKM "2.1698982510"; frac "1.000000"; conf_lo "1.049368"; conf_hi "3.290429"; cov "0.123250";
+chr15 Cufflinks transcript 51709056 51716160 1000 + . gene_id "uc007vrc.1"; transcript_id "uc007vrc.1"; FPKM "5.0213279245"; frac "1.000000"; conf_lo "3.187798"; conf_hi "6.854858"; cov "0.285211";
+chr15 Cufflinks exon 54880182 54880296 1000 - . gene_id "uc007vrt.1"; transcript_id "uc007vrt.1"; exon_number "14"; FPKM "9.7267082384"; frac "1.000000"; conf_lo "7.809774"; conf_hi "11.643643"; cov "0.552477";
+chr15 Cufflinks exon 59176893 59177072 1000 - . gene_id "uc007vxs.1"; transcript_id "uc007vxs.1"; exon_number "11"; FPKM "4.5392702144"; frac "1.000000"; conf_lo "2.723562"; conf_hi "6.354978"; cov "0.257830";
+chr15 Cufflinks exon 76426650 76426779 1000 - . gene_id "uc007wla.1"; transcript_id "uc007wla.1"; exon_number "3"; FPKM "3.5730073595"; frac "0.230550"; conf_lo "2.576136"; conf_hi "4.569879"; cov "0.202947";
+chr15 Cufflinks exon 76533504 76533613 1000 + . gene_id "uc007wlt.1"; transcript_id "uc007wlt.1"; exon_number "4"; FPKM "3.3395072810"; frac "0.491112"; conf_lo "2.499197"; conf_hi "4.179818"; cov "0.189684";
+chr15 Cufflinks exon 88963183 88963261 1000 - . gene_id "uc007xfl.1"; transcript_id "uc007xfl.1"; exon_number "3"; FPKM "1.5871531781"; frac "1.000000"; conf_lo "0.291248"; conf_hi "2.883058"; cov "0.090150";
+chr15 Cufflinks exon 102455470 102455519 1000 - . gene_id "uc007xwk.1"; transcript_id "uc007xwk.1"; exon_number "13"; FPKM "0.2873090379"; frac "0.161741"; conf_lo "0.099159"; conf_hi "0.475459"; cov "0.016319";
+chr16 Cufflinks transcript 3979123 3982204 1000 - . gene_id "uc007xzf.1"; transcript_id "uc007xzf.1"; FPKM "4.1992546925"; frac "0.467884"; conf_lo "2.835257"; conf_hi "5.563252"; cov "0.238518";
+chr15 Cufflinks exon 102313591 102313719 1000 + . gene_id "uc007xvy.2"; transcript_id "uc007xvy.2"; exon_number "7"; FPKM "37.5792165910"; frac "0.297738"; conf_lo "34.688492"; conf_hi "40.469941"; cov "2.134498";
+chr16 Cufflinks exon 4608598 4608818 1000 + . gene_id "uc007xzw.1"; transcript_id "uc007xzw.1"; exon_number "2"; FPKM "5.7793602049"; frac "1.000000"; conf_lo "4.036818"; conf_hi "7.521903"; cov "0.328267";
+chr16 Cufflinks exon 20541820 20541939 1000 + . gene_id "uc007ypy.1"; transcript_id "uc007ypy.1"; exon_number "7"; FPKM "68.0268643583"; frac "1.000000"; conf_lo "60.085498"; conf_hi "75.968231"; cov "3.863924";
+chr17 Cufflinks transcript 24857054 24858867 1000 + . gene_id "uc008axy.1"; transcript_id "uc008axy.1"; FPKM "22.0141466642"; frac "1.000000"; conf_lo "15.369306"; conf_hi "28.658988"; cov "1.250403";
+chr17 Cufflinks exon 25379604 25380686 1000 - . gene_id "uc008bah.1"; transcript_id "uc008bah.1"; exon_number "1"; FPKM "1.7458387165"; frac "0.226783"; conf_lo "1.488719"; conf_hi "2.002959"; cov "0.099164";
+chr17 Cufflinks exon 27159196 27159462 1000 + . gene_id "uc008bfe.1"; transcript_id "uc008bfe.1"; exon_number "2"; FPKM "1.7334774900"; frac "0.118977"; conf_lo "1.272113"; conf_hi "2.194842"; cov "0.098461";
+chr18 Cufflinks exon 34787707 34787836 1000 + . gene_id "uc008ela.1"; transcript_id "uc008ela.1"; exon_number "7"; FPKM "5.0638001964"; frac "0.237331"; conf_lo "4.342098"; conf_hi "5.785503"; cov "0.287624";
+chr18 Cufflinks exon 61371052 61371250 1000 - . gene_id "uc008fbu.1"; transcript_id "uc008fbu.1"; exon_number "4"; FPKM "0.1230526474"; frac "1.000000"; conf_lo "0.000000"; conf_hi "0.369158"; cov "0.006989";
+chr18 Cufflinks exon 61167370 61167501 1000 - . gene_id "uc008fbi.1"; transcript_id "uc008fbi.1"; exon_number "12"; FPKM "2.4172869897"; frac "1.000000"; conf_lo "1.244731"; conf_hi "3.589843"; cov "0.137302";
+chr18 Cufflinks exon 86630592 86630719 1000 + . gene_id "uc008fuz.1"; transcript_id "uc008fuz.1"; exon_number "6"; FPKM "2.2892787327"; frac "1.000000"; conf_lo "1.065608"; conf_hi "3.512950"; cov "0.130031";
+chr19 Cufflinks exon 5603634 5603715 1000 - . gene_id "uc008gea.1"; transcript_id "uc008gea.1"; exon_number "2"; FPKM "2.1837193523"; frac "0.163446"; conf_lo "1.715120"; conf_hi "2.652319"; cov "0.124035";
+chr2 Cufflinks exon 28404475 28404676 1000 + . gene_id "uc008iyn.1"; transcript_id "uc008iyn.1"; exon_number "15"; FPKM "10.9087431164"; frac "0.368384"; conf_lo "4.356515"; conf_hi "17.460972"; cov "0.619616";
+chr2 Cufflinks exon 29770254 29770439 1000 + . gene_id "uc008jal.1"; transcript_id "uc008jal.1"; exon_number "12"; FPKM "7.2973656902"; frac "0.685974"; conf_lo "5.778526"; conf_hi "8.816206"; cov "0.414490";
+chr2 Cufflinks exon 30002172 30002382 1000 + . gene_id "uc008jbj.1"; transcript_id "uc008jbj.1"; exon_number "8"; FPKM "12.8769808138"; frac "1.000000"; conf_lo "10.220662"; conf_hi "15.533299"; cov "0.731412";
+chr2 Cufflinks exon 32076600 32076704 1000 + . gene_id "uc008jeo.1"; transcript_id "uc008jeo.1"; exon_number "21"; FPKM "43.8860660433"; frac "0.911093"; conf_lo "40.407190"; conf_hi "47.364942"; cov "2.492727";
+chr2 Cufflinks exon 32546710 32546774 1000 - . gene_id "uc008jgm.1"; transcript_id "uc008jgm.1"; exon_number "12"; FPKM "8.1366623064"; frac "1.000000"; conf_lo "5.496780"; conf_hi "10.776544"; cov "0.462162";
+chr2 Cufflinks exon 35574280 35574458 1000 + . gene_id "uc008jkv.1"; transcript_id "uc008jkv.1"; exon_number "6"; FPKM "2.0012109810"; frac "0.141121"; conf_lo "1.688896"; conf_hi "2.313526"; cov "0.113669";
+chr2 Cufflinks exon 117127697 117127757 1000 - . gene_id "uc008lrl.1"; transcript_id "uc008lrl.1"; exon_number "14"; FPKM "1.6760710643"; frac "0.685093"; conf_lo "1.109659"; conf_hi "2.242483"; cov "0.095201";
+chr2 Cufflinks exon 122435405 122435623 1000 - . gene_id "uc008maw.1"; transcript_id "uc008maw.1"; exon_number "8"; FPKM "10.5679023498"; frac "1.000000"; conf_lo "7.636894"; conf_hi "13.498911"; cov "0.600257";
+chr2 Cufflinks exon 130265172 130265261 1000 + . gene_id "uc008mja.1"; transcript_id "uc008mja.1"; exon_number "9"; FPKM "3.6318426438"; frac "0.287992"; conf_lo "2.815837"; conf_hi "4.447848"; cov "0.206289";
+chr2 Cufflinks exon 152702303 152702428 1000 + . gene_id "uc008ngq.1"; transcript_id "uc008ngq.1"; exon_number "7"; FPKM "2.5312142816"; frac "0.526901"; conf_lo "1.108909"; conf_hi "3.953519"; cov "0.143773";
+chr2 Cufflinks exon 158262739 158262887 1000 + . gene_id "uc008nqh.1"; transcript_id "uc008nqh.1"; exon_number "8"; FPKM "5.0001206267"; frac "1.000000"; conf_lo "3.934091"; conf_hi "6.066150"; cov "0.284007";
+chr2 Cufflinks exon 178152211 178152296 1000 + . gene_id "uc008ohq.1"; transcript_id "uc008ohq.1"; exon_number "3"; FPKM "1.6796903776"; frac "1.000000"; conf_lo "0.491970"; conf_hi "2.867411"; cov "0.095406";
+chr3 Cufflinks exon 97500913 97501137 1000 - . gene_id "uc008qpe.1"; transcript_id "uc008qpe.1"; exon_number "7"; FPKM "4.1738869883"; frac "0.398377"; conf_lo "3.671923"; conf_hi "4.675851"; cov "0.237077";
+chr3 Cufflinks exon 101987874 101987902 1000 - . gene_id "uc008qrt.1"; transcript_id "uc008qrt.1"; exon_number "4"; FPKM "0.6428024028"; frac "1.000000"; conf_lo "0.000000"; conf_hi "1.551862"; cov "0.036511";
+chr3 Cufflinks exon 127258214 127258303 1000 + . gene_id "uc008rhf.1"; transcript_id "uc008rhf.1"; exon_number "2"; FPKM "0.4060755145"; frac "0.353557"; conf_lo "0.120085"; conf_hi "0.692066"; cov "0.023065";
+chr3 Cufflinks exon 144790795 144790854 1000 + . gene_id "uc008rqi.1"; transcript_id "uc008rqi.1"; exon_number "5"; FPKM "1.1258808773"; frac "0.289434"; conf_lo "0.699104"; conf_hi "1.552658"; cov "0.063950";
+chr4 Cufflinks exon 17978869 17981846 1000 + . gene_id "uc008sbv.1"; transcript_id "uc008sbv.1"; exon_number "5"; FPKM "0.6623587694"; frac "0.585087"; conf_lo "0.270053"; conf_hi "1.054665"; cov "0.037622";
+chr4 Cufflinks exon 21711840 21711940 1000 + . gene_id "uc008scz.1"; transcript_id "uc008scz.1"; exon_number "5"; FPKM "0.9584930367"; frac "0.150841"; conf_lo "0.742054"; conf_hi "1.174932"; cov "0.054442";
+chr4 Cufflinks exon 108353507 108353731 1000 - . gene_id "uc008ubn.1"; transcript_id "uc008ubn.1"; exon_number "9"; FPKM "2.6286767383"; frac "1.000000"; conf_lo "1.111010"; conf_hi "4.146344"; cov "0.149309";
+chr4 Cufflinks exon 131325668 131325803 1000 - . gene_id "uc008vab.1"; transcript_id "uc008vab.1"; exon_number "2"; FPKM "4.0813960015"; frac "1.000000"; conf_lo "2.890730"; conf_hi "5.272062"; cov "0.231823";
+chr4 Cufflinks exon 153530641 153530927 1000 + . gene_id "uc008wbi.1"; transcript_id "uc008wbi.1"; exon_number "12"; FPKM "21.2412511761"; frac "1.000000"; conf_lo "16.217040"; conf_hi "26.265462"; cov "1.206502";
+chr5 Cufflinks exon 3631589 3631765 1000 + . gene_id "uc008whf.1"; transcript_id "uc008whf.1"; exon_number "19"; FPKM "4.6386616700"; frac "0.517324"; conf_lo "3.723563"; conf_hi "5.553760"; cov "0.263476";
+chr5 Cufflinks exon 3992046 3992138 1000 + . gene_id "uc008wid.1"; transcript_id "uc008wid.1"; exon_number "15"; FPKM "23.3742995121"; frac "0.874843"; conf_lo "21.278988"; conf_hi "25.469611"; cov "1.327659";
+chr5 Cufflinks exon 34223636 34223836 1000 + . gene_id "uc008xbk.1"; transcript_id "uc008xbk.1"; exon_number "12"; FPKM "4.1101744570"; frac "0.642098"; conf_lo "2.270677"; conf_hi "5.949672"; cov "0.233458";
+chr5 Cufflinks exon 115734400 115734621 1000 + . gene_id "uc008zdo.1"; transcript_id "uc008zdo.1"; exon_number "3"; FPKM "15.3221708908"; frac "1.000000"; conf_lo "11.506469"; conf_hi "19.137873"; cov "0.870299";
+chr5 Cufflinks exon 137807769 137808016 1000 + . gene_id "uc009aci.1"; transcript_id "uc009aci.1"; exon_number "12"; FPKM "0.7189248975"; frac "0.533440"; conf_lo "0.000000"; conf_hi "1.543846"; cov "0.040835";
+chr6 Cufflinks transcript 17015149 17055825 1000 + . gene_id "uc009azi.1"; transcript_id "uc009azi.1"; FPKM "12.3429992456"; frac "1.000000"; conf_lo "9.242902"; conf_hi "15.443097"; cov "0.701082";
+chr6 Cufflinks exon 15361026 15361102 1000 + . gene_id "uc009ayz.1"; transcript_id "uc009ayz.1"; exon_number "14"; FPKM "4.1692596952"; frac "0.281345"; conf_lo "2.894471"; conf_hi "5.444049"; cov "0.236814";
+chr6 Cufflinks exon 115576309 115576426 1000 - . gene_id "uc009dix.1"; transcript_id "uc009dix.1"; exon_number "8"; FPKM "34.7320589881"; frac "0.628311"; conf_lo "31.195284"; conf_hi "38.268834"; cov "1.972780";
+chr6 Cufflinks exon 117820274 117822784 1000 + . gene_id "uc009dld.1"; transcript_id "uc009dld.1"; exon_number "3"; FPKM "8.2141924772"; frac "1.000000"; conf_lo "5.778655"; conf_hi "10.649730"; cov "0.466566";
+chr6 Cufflinks exon 121331667 121331759 1000 - . gene_id "uc009don.1"; transcript_id "uc009don.1"; exon_number "4"; FPKM "0.9373248338"; frac "0.255959"; conf_lo "0.597786"; conf_hi "1.276864"; cov "0.053240";
+chr6 Cufflinks exon 134837648 134837803 1000 - . gene_id "uc009ekw.1"; transcript_id "uc009ekw.1"; exon_number "2"; FPKM "3.4342255434"; frac "0.337007"; conf_lo "2.099159"; conf_hi "4.769292"; cov "0.195064";
+chr7 Cufflinks exon 19628774 19628924 1000 + . gene_id "uc009fkg.1"; transcript_id "uc009fkg.1"; exon_number "14"; FPKM "3.4380795645"; frac "0.240903"; conf_lo "2.901335"; conf_hi "3.974824"; cov "0.195283";
+chr7 Cufflinks transcript 51739887 51740783 1000 + . gene_id "uc009gpo.1"; transcript_id "uc009gpo.1"; FPKM "3.5875651083"; frac "1.000000"; conf_lo "0.658330"; conf_hi "6.516800"; cov "0.203774";
+chr7 Cufflinks exon 53085965 53086159 1000 - . gene_id "uc009gxj.1"; transcript_id "uc009gxj.1"; exon_number "6"; FPKM "6.4200658663"; frac "0.543693"; conf_lo "4.666748"; conf_hi "8.173383"; cov "0.364660";
+chr7 Cufflinks exon 77546982 77547077 1000 + . gene_id "uc009hnk.1"; transcript_id "uc009hnk.1"; exon_number "7"; FPKM "0.4622078998"; frac "0.405823"; conf_lo "0.077413"; conf_hi "0.847003"; cov "0.026253";
+chr7 Cufflinks exon 82788205 82788350 1000 + . gene_id "uc009hwu.1"; transcript_id "uc009hwu.1"; exon_number "1"; FPKM "0.6859341657"; frac "0.576962"; conf_lo "0.055268"; conf_hi "1.316600"; cov "0.038961";
+chr7 Cufflinks exon 85984891 85985078 1000 - . gene_id "uc009hxo.1"; transcript_id "uc009hxo.1"; exon_number "3"; FPKM "3.0017741434"; frac "1.000000"; conf_lo "1.397258"; conf_hi "4.606290"; cov "0.170501";
+chr7 Cufflinks exon 148509981 148510078 1000 - . gene_id "uc009kkn.1"; transcript_id "uc009kkn.1"; exon_number "11"; FPKM "32.9197864125"; frac "1.000000"; conf_lo "27.375094"; conf_hi "38.464479"; cov "1.869843";
+chr9 Cufflinks exon 15330072 15330148 1000 + . gene_id "uc009ogc.1"; transcript_id "uc009ogc.1"; exon_number "2"; FPKM "1.0060367764"; frac "1.000000"; conf_lo "0.000000"; conf_hi "2.428788"; cov "0.057143";
+chr9 Cufflinks transcript 21069743 21078812 1000 + . gene_id "uc009okt.1"; transcript_id "uc009okt.1"; FPKM "7.9134805855"; frac "0.623402"; conf_lo "5.930640"; conf_hi "9.896321"; cov "0.449485";
+chr9 Cufflinks exon 57867100 57867303 1000 + . gene_id "uc009pwa.1"; transcript_id "uc009pwa.1"; exon_number "4"; FPKM "0.5359102332"; frac "1.000000"; conf_lo "0.000000"; conf_hi "1.293802"; cov "0.030440";
+chr9 Cufflinks exon 49314958 49315758 1000 - . gene_id "uc009pje.1"; transcript_id "uc009pje.1"; exon_number "2"; FPKM "156.0206032233"; frac "0.793945"; conf_lo "147.369898"; conf_hi "164.671308"; cov "8.861965";
+chr9 Cufflinks exon 106815438 106815604 1000 - . gene_id "uc009rkv.1"; transcript_id "uc009rkv.1"; exon_number "12"; FPKM "5.4023275754"; frac "1.000000"; conf_lo "4.337713"; conf_hi "6.466942"; cov "0.306852";
+chr9 Cufflinks exon 119703054 119703292 1000 - . gene_id "uc009sbk.1"; transcript_id "uc009sbk.1"; exon_number "15"; FPKM "0.0814657030"; frac "1.000000"; conf_lo "0.000000"; conf_hi "0.244397"; cov "0.004627";
\ No newline at end of file
diff -r 3445ca17a4c5 -r 1b30f5fa152b test-data/gtf2bedgraph_out.bedgraph
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/gtf2bedgraph_out.bedgraph Thu Apr 22 16:05:08 2010 -0400
@@ -0,0 +1,101 @@
+chr1 36425949 36426026 4.8386844109
+chr1 46891971 46892996 8.4688567539
+chr1 71654477 71654594 0.4686878995
+chr1 72629844 72679706 4.0695297327
+chr1 75531752 75532000 3.6392661141
+chr1 123389481 123389564 0.9948773061
+chr1 129625989 129626119 0.0003267777
+chr1 132059396 132059512 0.2051423010
+chr1 175865140 175865308 0.6544444010
+chr10 7399379 7400956 2.1099978681
+chr10 79784825 79784954 1.2054582676
+chr10 79820728 79820836 1.8177911161
+chr10 105907394 106369573 4.2493607936
+chr10 119487060 119487172 4.3105966126
+chr11 29097092 29097209 4.2530782301
+chr11 69404157 69404264 18.7450971965
+chr11 98249985 98261804 2.1571271227
+chr11 102210140 102211681 0.8688186006
+chr11 105926399 105927243 3.6706747247
+chr11 106633965 106634066 2.4729108195
+chr11 120472426 120472492 10.2380258574
+chr12 100112716 100112852 1.8669402513
+chr13 8889563 8891614 9.4402522582
+chr13 13756206 13756380 0.0574771218
+chr13 93243917 93244083 6.9802111138
+chr14 13130095 13130170 4.0381928600
+chr14 32036105 32036250 0.1289615781
+chr14 56517079 56517223 15.7683764379
+chr14 62950941 62951013 10.1138803585
+chr14 66479006 66479052 14.3011267395
+chr14 70961618 70961783 2.0814553995
+chr14 96679221 96679434 1.7614342028
+chr14 99504387 99504488 3.1573312214
+chr15 12777807 12777962 12.7118803258
+chr15 28200048 28200282 0.0608801712
+chr15 34434713 34434889 2.1698982510
+chr15 51709055 51716160 5.0213279245
+chr15 54880181 54880296 9.7267082384
+chr15 59176892 59177072 4.5392702144
+chr15 76426649 76426779 3.5730073595
+chr15 76533503 76533613 3.3395072810
+chr15 88963182 88963261 1.5871531781
+chr15 102313590 102313719 37.5792165910
+chr15 102455469 102455519 0.2873090379
+chr16 3979122 3982204 4.1992546925
+chr16 4608597 4608818 5.7793602049
+chr16 20541819 20541939 68.0268643583
+chr17 24857053 24858867 22.0141466642
+chr17 25379603 25380686 1.7458387165
+chr17 27159195 27159462 1.7334774900
+chr18 34787706 34787836 5.0638001964
+chr18 61167369 61167501 2.4172869897
+chr18 61371051 61371250 0.1230526474
+chr18 86630591 86630719 2.2892787327
+chr19 5603633 5603715 2.1837193523
+chr2 28404474 28404676 10.9087431164
+chr2 29770253 29770439 7.2973656902
+chr2 30002171 30002382 12.8769808138
+chr2 32076599 32076704 43.8860660433
+chr2 32546709 32546774 8.1366623064
+chr2 35574279 35574458 2.0012109810
+chr2 117127696 117127757 1.6760710643
+chr2 122435404 122435623 10.5679023498
+chr2 130265171 130265261 3.6318426438
+chr2 152702302 152702428 2.5312142816
+chr2 158262738 158262887 5.0001206267
+chr2 178152210 178152296 1.6796903776
+chr3 97500912 97501137 4.1738869883
+chr3 101987873 101987902 0.6428024028
+chr3 127258213 127258303 0.4060755145
+chr3 144790794 144790854 1.1258808773
+chr4 17978868 17981846 0.6623587694
+chr4 21711839 21711940 0.9584930367
+chr4 108353506 108353731 2.6286767383
+chr4 131325667 131325803 4.0813960015
+chr4 153530640 153530927 21.2412511761
+chr5 3631588 3631765 4.6386616700
+chr5 3992045 3992138 23.3742995121
+chr5 34223635 34223836 4.1101744570
+chr5 115734399 115734621 15.3221708908
+chr5 137807768 137808016 0.7189248975
+chr6 15361025 15361102 4.1692596952
+chr6 17015148 17055825 12.3429992456
+chr6 115576308 115576426 34.7320589881
+chr6 117820273 117822784 8.2141924772
+chr6 121331666 121331759 0.9373248338
+chr6 134837647 134837803 3.4342255434
+chr7 19628773 19628924 3.4380795645
+chr7 51739886 51740783 3.5875651083
+chr7 53085964 53086159 6.4200658663
+chr7 77546981 77547077 0.4622078998
+chr7 82788204 82788350 0.6859341657
+chr7 85984890 85985078 3.0017741434
+chr7 148509980 148510078 32.9197864125
+chr9 15330071 15330148 1.0060367764
+chr9 21069742 21078812 7.9134805855
+chr9 49314957 49315758 156.0206032233
+chr9 57867099 57867303 0.5359102332
+chr9 106815437 106815604 5.4023275754
+chr9 119703053 119703292 0.0814657030
+track type=bedGraph
diff -r 3445ca17a4c5 -r 1b30f5fa152b tool_conf.xml.sample
--- a/tool_conf.xml.sample Thu Apr 22 09:35:44 2010 -0400
+++ b/tool_conf.xml.sample Thu Apr 22 16:05:08 2010 -0400
@@ -79,6 +79,7 @@
<tool file="fastx_toolkit/fastq_to_fasta.xml" />
<tool file="filters/wiggle_to_simple.xml" />
<tool file="filters/sff_extractor.xml" />
+ <tool file="filters/gtf2bedgraph.xml" />
</section>
<section name="Extract Features" id="features">
<tool file="filters/ucsc_gene_bed_to_exon_bed.xml" />
diff -r 3445ca17a4c5 -r 1b30f5fa152b tools/filters/gtf2bedgraph.xml
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tools/filters/gtf2bedgraph.xml Thu Apr 22 16:05:08 2010 -0400
@@ -0,0 +1,79 @@
+<tool id="gtf2bedgraph" name="GTF-to-BEDGraph">
+ <description>converter</description>
+ <command interpreter="python">gtf_to_bedgraph_converter.py $input $out_file1 $attribute_name</command>
+ <inputs>
+ <param format="gtf" name="input" type="data" label="Convert this query"/>
+ <param name="attribute_name" type="text" label="Attribute to Use for Value"/>
+ </inputs>
+ <outputs>
+ <data format="bedgraph" name="out_file1" />
+ </outputs>
+ <tests>
+ <test>
+ <param name="input" value="gtf2bedgraph_in.gtf" ftype="gtf"/>
+ <param name="attribute_name" value="FPKM"/>
+ <output name="out_file1" file="gtf2bedgraph_out.bedgraph" ftype="bedgraph"/>
+ </test>
+ </tests>
+ <help>
+
+**What it does**
+
+This tool converts data from GTF format to BEDGraph format (scroll down for format description).
+
+--------
+
+**Example**
+
+The following data in GFF format::
+
+ chr22 GeneA enhancer 10000000 10001000 500 + . gene_id "GeneA"; transcript_id "TranscriptAlpha"; FPKM "2.75"; frac "1.000000";
+ chr22 GeneA promoter 10010000 10010100 900 + . gene_id "GeneA"; transcript_id "TranscriptsAlpha"; FPKM "2.25"; frac "1.000000";
+
+using the attribute name 'FPKM' will be converted to BEDGraph (**note** that 1 is subtracted from the start coordinate)::
+
+
+ chr22 9999999 10001000 2.75
+ chr22 10009999 10010100 2.25
+
+------
+
+.. class:: infomark
+
+**About formats**
+
+**GTF format** Gene Transfer Format is a format for describing genes and other features associated with DNA, RNA and Protein sequences. GTF lines have nine tab-separated fields::
+
+ 1. seqname - Must be a chromosome or scaffold.
+ 2. source - The program that generated this feature.
+ 3. feature - The name of this type of feature. Some examples of standard feature types are "CDS", "start_codon", "stop_codon", and "exon".
+ 4. start - The starting position of the feature in the sequence. The first base is numbered 1.
+ 5. end - The ending position of the feature (inclusive).
+ 6. score - A score between 0 and 1000. If there is no score value, enter ".".
+ 7. strand - Valid entries include '+', '-', or '.' (for don't know/care).
+ 8. frame - If the feature is a coding exon, frame should be a number between 0-2 that represents the reading frame of the first base. If the feature is not a coding exon, the value should be '.'.
+ 9. group - The group field is a list of attributes. Each attribute consists of a type/value pair. Attributes must end in a semi-colon, and be separated from any following attribute by exactly one space. The attribute list must begin with the two mandatory attributes: (i) gene_id value - A globally unique identifier for the genomic source of the sequence and (ii) transcript_id value - A globally unique identifier for the predicted transcript.
+
+**BEDGraph format**
+
+The bedGraph format is line-oriented. Bedgraph data are preceeded by a track definition line, which adds a number of options for controlling the default display of this track.
+
+For the track definition line, all options are placed in a single line separated by spaces:
+ track type=bedGraph name=track_label description=center_label
+ visibility=display_mode color=r,g,b altColor=r,g,b
+ priority=priority autoScale=on|off alwaysZero=on|off
+ gridDefault=on|off maxHeightPixels=max:default:min
+ graphType=bar|points viewLimits=lower:upper
+ yLineMark=real-value yLineOnOff=on|off
+ windowingFunction=maximum|mean|minimum smoothingWindow=off|2-16
+
+The track type is REQUIRED, and must be bedGraph:
+ type=bedGraph
+
+Following the track definition line are the track data in four column BED format::
+
+ chromA chromStartA chromEndA dataValueA
+ chromB chromStartB chromEndB dataValueB
+
+</help>
+</tool>
diff -r 3445ca17a4c5 -r 1b30f5fa152b tools/filters/gtf_to_bedgraph_converter.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tools/filters/gtf_to_bedgraph_converter.py Thu Apr 22 16:05:08 2010 -0400
@@ -0,0 +1,73 @@
+#!/usr/bin/env python
+import os, sys, tempfile
+
+assert sys.version_info[:2] >= ( 2, 4 )
+
+def __main__():
+ # Read parms.
+ input_name = sys.argv[1]
+ output_name = sys.argv[2]
+ attribute_name = sys.argv[3]
+
+ # Create temp file.
+ tmp_name = tempfile.NamedTemporaryFile().name
+
+ # Do conversion.
+ skipped_lines = 0
+ first_skipped_line = 0
+ out = open( tmp_name, 'w' )
+
+ # Write track definition line.
+ out.write( "track type=bedGraph\n")
+
+ # Write track data to temporary file.
+ i = 0
+ for i, line in enumerate( file( input_name ) ):
+ line = line.rstrip( '\r\n' )
+
+ if line and not line.startswith( '#' ):
+ try:
+ elems = line.split( '\t' )
+ start = str( int( elems[3] ) - 1 ) # GTF coordinates are 1-based, BedGraph are 0-based.
+ strand = elems[6]
+ if strand not in ['+', '-']:
+ strand = '+'
+ attributes_list = elems[8].split(";")
+ attributes = {}
+ for name_value_pair in attributes_list:
+ pair = name_value_pair.strip().split(" ")
+ name = pair[0].strip()
+ if name == '':
+ continue
+ # Need to strip double quote from values
+ value = pair[1].strip(" \"")
+ attributes[name] = value
+ value = attributes[ attribute_name ]
+ # GTF format: chrom source, name, chromStart, chromEnd, score, strand, frame, attributes.
+ # BedGraph format: chrom, chromStart, chromEnd, value
+ out.write( "%s\t%s\t%s\t%s\n" %( elems[0], start, elems[4], value ) )
+ except:
+ skipped_lines += 1
+ if not first_skipped_line:
+ first_skipped_line = i + 1
+ else:
+ skipped_lines += 1
+ if not first_skipped_line:
+ first_skipped_line = i + 1
+ out.close()
+
+ # Sort tmp file to create bedgraph file; sort by chromosome name and chromosome start.
+ cmd = "sort -k1,1 -k2,2n < %s > %s" % ( tmp_name, output_name )
+ try:
+ os.system(cmd)
+ os.remove(tmp_name)
+ except Exception, ex:
+ sys.stderr.write( "%s\n" % ex )
+ sys.exit(1)
+
+ info_msg = "%i lines converted to BEDGraph. " % ( i + 1 - skipped_lines )
+ if skipped_lines > 0:
+ info_msg += "Skipped %d blank/comment/invalid lines starting with line #%d." %( skipped_lines, first_skipped_line )
+ print info_msg
+
+if __name__ == "__main__": __main__()
1
0