galaxy-dev
Threads by month
- ----- 2025 -----
- January
- ----- 2024 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2023 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2022 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2021 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2020 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2019 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2018 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2017 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2016 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2015 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2014 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2013 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2012 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2011 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2010 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2009 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2008 -----
- December
- November
- October
- September
- August
- 10007 discussions
11 Sep '09
details: http://www.bx.psu.edu/hg/galaxy/rev/f0adb6152df9
changeset: 2678:f0adb6152df9
user: Greg Von Kuster <greg(a)bx.psu.edu>
date: Thu Sep 10 21:24:06 2009 -0400
description:
Streamline history sharing, add a "Manage shared histories" section to the History options menu. Also use a better approach to setting peek on datasets.
5 file(s) affected in this change:
lib/galaxy/datatypes/data.py
lib/galaxy/web/controllers/history.py
templates/history/sharing.mako
templates/root/index.mako
test/functional/test_history_functions.py
diffs (482 lines):
diff -r 96ccd29277be -r f0adb6152df9 lib/galaxy/datatypes/data.py
--- a/lib/galaxy/datatypes/data.py Thu Sep 10 17:48:52 2009 -0400
+++ b/lib/galaxy/datatypes/data.py Thu Sep 10 21:24:06 2009 -0400
@@ -416,8 +416,9 @@
count = 0
file_type = None
data_checked = False
- for line in file( file_name ):
- line = line[:WIDTH]
+ temp = open( file_name, "U" )
+ while count <= LINE_COUNT:
+ line = temp.readline( WIDTH )
if line and not is_multi_byte and not data_checked:
# See if we have a compressed or binary file
if line[0:2] == util.gzip_magic:
@@ -432,9 +433,8 @@
if file_type in [ 'gzipped', 'binary' ]:
break
lines.append( line )
- if count == LINE_COUNT:
- break
count += 1
+ temp.close()
if file_type in [ 'gzipped', 'binary' ]:
text = "%s file" % file_type
else:
diff -r 96ccd29277be -r f0adb6152df9 lib/galaxy/web/controllers/history.py
--- a/lib/galaxy/web/controllers/history.py Thu Sep 10 17:48:52 2009 -0400
+++ b/lib/galaxy/web/controllers/history.py Thu Sep 10 21:24:06 2009 -0400
@@ -4,7 +4,7 @@
from galaxy.model.mapping import desc
from galaxy.model.orm import *
from galaxy.util.json import *
-import webhelpers, logging
+import webhelpers, logging, operator
from datetime import datetime
from cgi import escape
@@ -31,10 +31,12 @@
return "deleted"
elif history.users_shared_with:
return "shared"
+ elif history.importable:
+ return "importable"
return ""
def get_link( self, trans, grid, item ):
- if item.users_shared_with:
- return dict( operation="sharing", id=item.id )
+ if item.users_shared_with or item.importable:
+ return dict( operation="sharing" )
return None
# Grid definition
title = "Stored histories"
@@ -55,9 +57,12 @@
operations = [
grids.GridOperation( "Switch", allow_multiple=False, condition=( lambda item: not item.deleted ) ),
grids.GridOperation( "Share", condition=( lambda item: not item.deleted ) ),
+ grids.GridOperation( "Unshare", condition=( lambda item: not item.deleted ) ),
grids.GridOperation( "Rename", condition=( lambda item: not item.deleted ) ),
grids.GridOperation( "Delete", condition=( lambda item: not item.deleted ) ),
- grids.GridOperation( "Undelete", condition=( lambda item: item.deleted ) )
+ grids.GridOperation( "Undelete", condition=( lambda item: item.deleted ) ),
+ grids.GridOperation( "Enable import via link", condition=( lambda item: item.deleted ) ),
+ grids.GridOperation( "Disable import via link", condition=( lambda item: item.deleted ) )
]
standard_filters = [
grids.GridColumnFilter( "Active", args=dict( deleted=False ) ),
@@ -99,7 +104,9 @@
]
operations = [
grids.GridOperation( "Clone" ),
- grids.GridOperation( "Unshare" )
+ grids.GridOperation( "Unshare" ),
+ grids.GridOperation( "Enable import via link", condition=( lambda item: item.deleted ) ),
+ grids.GridOperation( "Disable import via link", condition=( lambda item: item.deleted ) )
]
standard_filters = []
def build_initial_query( self, session ):
@@ -126,19 +133,19 @@
current_history = trans.get_history()
status = message = None
if 'operation' in kwargs:
- history_ids = util.listify( kwargs.get( 'id', [] ) )
- histories = []
operation = kwargs['operation'].lower()
if operation == "share":
return self.share( trans, **kwargs )
- elif operation == "rename":
+ if operation == "rename":
return self.rename( trans, **kwargs )
- elif operation == 'sharing':
- return self.sharing( trans, id=kwargs['id'] )
+ history_ids = util.listify( kwargs.get( 'id', [] ) )
+ if operation == "sharing":
+ return self.sharing( trans, id=history_ids )
# Display no message by default
status, message = None, None
refresh_history = False
# Load the histories and ensure they all belong to the current user
+ histories = []
for history_id in history_ids:
history = get_history( trans, history_id )
if history:
@@ -161,6 +168,21 @@
trans.template_context['refresh_frames'] = ['history']
elif operation == "undelete":
status, message = self._list_undelete( trans, histories )
+ elif operation == "unshare":
+ for history in histories:
+ husas = trans.app.model.HistoryUserShareAssociation.filter_by( history=history ).all()
+ for husa in husas:
+ husa.delete()
+ elif operation == "enable import via link":
+ for history in histories:
+ if not history.importable:
+ history.importable = True
+ elif operation == "disable import via link":
+ if history_ids:
+ histories = [ get_history( trans, history_id ) for history_id in history_ids ]
+ for history in histories:
+ if history.importable:
+ history.importable = False
trans.sa_session.flush()
# Render the list view
return self.stored_list_grid( trans, status=status, message=message, **kwargs )
@@ -237,24 +259,20 @@
msg = util.restore_text( kwargs.get( 'msg', '' ) )
status = message = None
if 'operation' in kwargs:
- id = kwargs.get( 'id', None )
+ ids = util.listify( kwargs.get( 'id', [] ) )
operation = kwargs['operation'].lower()
if operation == "clone":
- if not id:
+ if not ids:
message = "Select a history to clone"
return self.shared_list_grid( trans, status='error', message=message, **kwargs )
# When cloning shared histories, only copy active datasets
new_kwargs = { 'clone_choice' : 'active' }
return self.clone( trans, id, **new_kwargs )
elif operation == 'unshare':
- if not id:
+ if not ids:
message = "Select a history to unshare"
return self.shared_list_grid( trans, status='error', message=message, **kwargs )
- ids = util.listify( id )
- histories = []
- for history_id in ids:
- history = get_history( trans, history_id, check_ownership=False )
- histories.append( history )
+ histories = [ get_history( trans, history_id ) for history_id in ids ]
for history in histories:
# Current user is the user with which the histories were shared
association = trans.app.model.HistoryUserShareAssociation.filter_by( user=trans.user, history=history ).one()
@@ -262,6 +280,20 @@
association.flush()
message = "Unshared %d shared histories" % len( ids )
status = 'done'
+ elif operation == "enable import via link":
+ if ids:
+ histories = [ get_history( trans, id ) for id in ids ]
+ for history in histories:
+ if not history.importable:
+ history.importable = True
+ history.flush()
+ elif operation == "disable import via link":
+ if ids:
+ histories = [ get_history( trans, id ) for id in ids ]
+ for history in histories:
+ if history.importable:
+ history.importable = False
+ history.flush()
# Render the list view
return self.shared_list_grid( trans, status=status, message=message, **kwargs )
@web.expose
@@ -622,7 +654,9 @@
params = util.Params( kwd )
msg = util.restore_text ( params.get( 'msg', '' ) )
if id:
- histories = [ get_history( trans, id ) ]
+ ids = util.listify( id )
+ if ids:
+ histories = [ get_history( trans, history_id ) for history_id in ids ]
for history in histories:
if params.get( 'enable_import_via_link', False ):
history.importable = True
@@ -635,14 +669,34 @@
if not user:
msg = 'History (%s) does not seem to be shared with user (%s)' % ( history.name, user.email )
return trans.fill_template( 'history/sharing.mako', histories=histories, msg=msg, messagetype='error' )
- association = trans.app.model.HistoryUserShareAssociation.filter_by( user=user, history=history ).one()
- association.delete()
- association.flush()
- if not id:
- shared_msg = "History (%s) now shared with: %d users. " % ( history.name, len( history.users_shared_with ) )
- msg = '%s%s' % ( shared_msg, msg )
+ husas = trans.app.model.HistoryUserShareAssociation.filter_by( user=user, history=history ).all()
+ if husas:
+ for husa in husas:
+ husa.delete()
+ husa.flush()
+ histories = []
+ # Get all histories that have been shared with others
+ husas = trans.sa_session.query( trans.app.model.HistoryUserShareAssociation ) \
+ .join( "history" ) \
+ .filter( and_( trans.app.model.History.user == trans.user,
+ trans.app.model.History.deleted == False ) ) \
+ .order_by( trans.app.model.History.table.c.name ) \
+ .all()
+ for husa in husas:
+ history = husa.history
+ if history not in histories:
+ histories.append( history )
+ # Get all histories that are importable
+ importables = trans.sa_session.query( trans.app.model.History ) \
+ .filter_by( user=trans.user, importable=True, deleted=False ) \
+ .order_by( trans.app.model.History.table.c.name ) \
+ .all()
+ for importable in importables:
+ if importable not in histories:
+ histories.append( importable )
+ # Sort the list of histories by history.name
+ histories.sort( key=operator.attrgetter( 'name') )
return trans.fill_template( 'history/sharing.mako', histories=histories, msg=msg, messagetype='done' )
-
@web.expose
@web.require_login( "rename histories" )
def rename( self, trans, id=None, name=None, **kwd ):
diff -r 96ccd29277be -r f0adb6152df9 templates/history/sharing.mako
--- a/templates/history/sharing.mako Thu Sep 10 17:48:52 2009 -0400
+++ b/templates/history/sharing.mako Thu Sep 10 21:24:06 2009 -0400
@@ -1,75 +1,63 @@
<%inherit file="/base.mako"/>
<%namespace file="/message.mako" import="render_msg" />
-<h2>Public access via link</h2>
+##<h2>Import via link</h2>
%if msg:
${render_msg( msg, messagetype )}
%endif
-%for history in histories:
- <p>
- %if history.importable:
- Send the following URL to users as an easy way for them to import the history, making a copy of their own:
- <% url = h.url_for( controller='history', action='imp', id=trans.security.encode_id(history.id), qualified=True ) %>
- <blockquote>
- <a href="${url}">${url}</a>
- </blockquote>
- <br/>
- <form action="${h.url_for( controller='history', action='sharing', id=trans.security.encode_id( history.id ) )}" method="POST">
- <input class="action-button" type="submit" name="disable_import_via_link" value="Disable import via link">
- </form>
- %else:
- This history is currently restricted (only you and the users listed below
- can access it). Enabling the following option will generate a URL that you
- can give to a user to allow them to import this history.
- <br/>
- <form action="${h.url_for( action='sharing', id=trans.security.encode_id(history.id) )}" method="POST">
- <input class="action-button" type="submit" name="enable_import_via_link" value="Enable import via link">
- </form>
- %endif
- </p>
- <h2>Sharing with specific users</h2>
- %if history.users_shared_with:
- <ul class="manage-table-actions">
- <li>
- <a class="action-button" href="${h.url_for( controller='history', action='share', id=trans.security.encode_id( history.id ) )}">
- <span>Share with another user</span>
- </a>
- </li>
- </ul>
- <p>
- The following users will see this history in their list of histories
- shared with them by others, and they will be able to create their own copy of it:
- </p>
- <table class="colored" border="0" cellspacing="0" cellpadding="0" width="100%">
- <tr class="header">
- <th>History '${history.name}' currently shared with</th>
- <th></th>
- </tr>
- %for i, association in enumerate( history.users_shared_with ):
- <% user = association.user %>
- <tr>
- <td>
- ${user.email}
- <a id="user-${i}-popup" class="popup-arrow" style="display: none;">▼</a>
- </td>
- <td>
- %if len( histories ) == 1:
- ## Only allow unsharing if we're dealing with 1 history, otherwise
- ## page refreshes screw things up
- <div popupmenu="user-${i}-popup">
- <a class="action-button" href="${h.url_for( controller='history', action='sharing', id=trans.security.encode_id( history.id ), unshare_user=trans.security.encode_id( user.id ) )}">Unshare</a>
+<h2>Histories that you've shared with others or enabled to be imported</h2>
+
+%if not histories:
+ You have no histories that you've shared with others or enabled to be imported
+%else:
+ %for history in histories:
+ <div class="toolForm">
+ <div class="toolFormTitle">History '${history.name}' shared with</div>
+ <div class="toolFormBody">
+ <div class="form-row">
+ <div style="float: right;">
+ <a class="action-button" href="${h.url_for( controller='history', action='share', id=trans.security.encode_id( history.id ) )}">
+ <span>Share with another user</span>
+ </a>
+ </div>
+ </div>
+ %if history.users_shared_with:
+ %for i, association in enumerate( history.users_shared_with ):
+ <% user = association.user %>
+ <div class="form-row">
+ <a class="action-button" href="${h.url_for( controller='history', action='sharing', id=trans.security.encode_id( history.id ), unshare_user=trans.security.encode_id( user.id ) )}">Unshare</a>
+ ${user.email}
+ </div>
+ %endfor
+ %endif
+ %if history.importable:
+ <div class="form-row">
+ <% url = h.url_for( controller='history', action='imp', id=trans.security.encode_id(history.id), qualified=True ) %>
+ <a href="${url}">${url}</a>
+ <div class="toolParamHelp" style="clear: both;">
+ Send the above URL to users as an easy way for them to import the history, making a copy of their own
+ </div>
+ </div>
+ <div class="form-row">
+ <form action="${h.url_for( controller='history', action='sharing', id=trans.security.encode_id( history.id ) )}" method="POST">
+ <div class="form-row">
+ <input class="action-button" type="submit" name="disable_import_via_link" value="Disable import via link">
</div>
- %endif
- </td>
- </tr>
- %endfor
- </table>
- %else:
- <p>You have not shared this history with any users.</p>
- <a class="action-button" href="${h.url_for( controller='history', action='share', id=trans.security.encode_id(history.id) )}">
- <span>Share with another user</span>
- </a>
- %endif
-%endfor
+ </form>
+ </div>
+ %else:
+ <form action="${h.url_for( action='sharing', id=trans.security.encode_id(history.id) )}" method="POST">
+ <div class="form-row">
+ <input class="action-button" type="submit" name="enable_import_via_link" value="Enable import via link">
+ <div class="toolParamHelp" style="clear: both;">
+ Click to generate a URL that you can give to a user to allow them to import this history, making a copy of their own
+ </div>
+ </div>
+ </form>
+ %endif
+ </div>
+ </div>
+ %endfor
+%endif
diff -r 96ccd29277be -r f0adb6152df9 templates/root/index.mako
--- a/templates/root/index.mako Thu Sep 10 17:48:52 2009 -0400
+++ b/templates/root/index.mako Thu Sep 10 21:24:06 2009 -0400
@@ -9,9 +9,6 @@
"List your histories": null,
"Stored by you": function() {
galaxy_main.location = "${h.url_for( controller='history', action='list')}";
- },
- "Shared with you": function() {
- galaxy_main.location = "${h.url_for( controller='history', action='list_shared')}";
},
"Current History": null,
"Create new": function() {
@@ -32,10 +29,19 @@
"Show deleted datasets": function() {
galaxy_history.location = "${h.url_for( controller='root', action='history', show_deleted=True)}";
},
- "Delete": function() {
- if ( confirm( "Really delete the current history?" ) ) {
+ "Delete": function()
+ {
+ if ( confirm( "Really delete the current history?" ) )
+ {
galaxy_main.location = "${h.url_for( controller='history', action='delete_current' )}";
}
+ },
+ "Manage shared histories": null,
+ "Shared by you": function() {
+ galaxy_main.location = "${h.url_for( controller='history', action='list', operation='sharing' )}";
+ },
+ "Shared with you": function() {
+ galaxy_main.location = "${h.url_for( controller='history', action='list_shared')}";
}
});
});
diff -r 96ccd29277be -r f0adb6152df9 test/functional/test_history_functions.py
--- a/test/functional/test_history_functions.py Thu Sep 10 17:48:52 2009 -0400
+++ b/test/functional/test_history_functions.py Thu Sep 10 21:24:06 2009 -0400
@@ -141,14 +141,13 @@
check_str_after_submit='You cannot send histories to yourself.' )
# Share history3 with 1 valid user
self.share_current_history( regular_user1.email,
- check_str=history3.name,
- check_str_after_submit='History (%s) now shared with: 1 users' % history3.name )
+ check_str=history3.name )
# Check out list of histories to make sure history3 was shared
- self.view_stored_active_histories( check_str='operation=sharing&id=%s">shared' % self.security.encode_id( history3.id ) )
+ self.view_stored_active_histories( check_str='operation=sharing">shared' )
# Enable importing history3 via a URL
self.enable_import_via_link( self.security.encode_id( history3.id ),
check_str='Unshare',
- check_str_after_submit='Send the following URL to users' )
+ check_str_after_submit='Send the above URL to users' )
# Make sure history3 is now import-able
history3.refresh()
if not history3.importable:
@@ -159,7 +158,7 @@
check_str_after_submit='You cannot import your own history.' )
# Disable the import link for history3
self.disable_import_via_link( self.security.encode_id( history3.id ),
- check_str='Send the following URL to users',
+ check_str='Send the above URL to users',
check_str_after_submit='Enable import via link' )
# Try importing history3 after disabling the URL
self.import_history_via_url( self.security.encode_id( history3.id ),
@@ -274,12 +273,10 @@
self.upload_file( '2.bed', dbkey='hg18' )
ids = '%s,%s' % ( self.security.encode_id( history3.id ), self.security.encode_id( history4.id ) )
emails = '%s,%s' % ( regular_user2.email, regular_user3.email )
- check_str_after_submit = 'History (%s) now shared with: 3 users.' % history3.name
self.share_histories_with_users( ids,
emails,
check_str1='Share 2 histories',
- check_str2=history4.name,
- check_str_after_submit=check_str_after_submit )
+ check_str2=history4.name )
self.logout()
self.login( email=regular_user2.email )
# Shared history3 should be in regular_user2's list of shared histories
@@ -342,12 +339,10 @@
"""Testing sharing a restricted history by making the datasets public"""
# Logged in as admin_user
action_check_str = 'The following datasets can be shared with %s by updating their permissions' % regular_user1.email
- action_check_str_after_submit = 'History (%s) now shared with: 1 users.' % history5.name
# Current history is history5
self.share_current_history( regular_user1.email,
action='public',
- action_check_str=action_check_str,
- action_check_str_after_submit=action_check_str_after_submit )
+ action_check_str=action_check_str )
self.logout()
self.login( email=regular_user1.email )
# Shared history5 should be in regular_user1's list of shared histories
@@ -375,12 +370,10 @@
self.upload_file( '2.bed', dbkey='hg18' )
check_str_after_submit = 'The following datasets can be shared with %s with no changes' % regular_user2.email
check_str_after_submit2 = 'The following datasets can be shared with %s by updating their permissions' % regular_user2.email
- action_check_str_after_submit = 'History (%s) now shared with: 2 users.' % history5.name
self.share_current_history( regular_user2.email,
check_str_after_submit=check_str_after_submit,
check_str_after_submit2=check_str_after_submit2,
- action='private',
- action_check_str_after_submit=action_check_str_after_submit )
+ action='private' )
# We should now have a new sharing role
global sharing_role
role_name = 'Sharing role for: %s, %s' % ( admin_user.email, regular_user2.email )
@@ -470,12 +463,10 @@
check_str_after_submit = 'The following datasets can be shared with %s with no changes' % email
check_str_after_submit2 = 'The following datasets can be shared with %s by updating their permissions' % email
# history5 will be shared with regular_user1, regular_user2 and regular_user3
- action_check_str_after_submit = 'History (%s) now shared with: 3 users.' % history5.name
self.share_current_history( email,
check_str_after_submit=check_str_after_submit,
check_str_after_submit2=check_str_after_submit2,
- action='share_anyway',
- action_check_str_after_submit=action_check_str_after_submit )
+ action='share_anyway' )
# Check security on clone of history5 for regular_user2
self.logout()
self.login( email=regular_user2.email )
1
0
details: http://www.bx.psu.edu/hg/galaxy/rev/96ccd29277be
changeset: 2677:96ccd29277be
user: jeremy goecks <jeremy.goecks(a)emory.edu>
date: Thu Sep 10 17:48:52 2009 -0400
description:
Merge
0 file(s) affected in this change:
diffs (69 lines):
diff -r f2e4673d784b -r 96ccd29277be cron/updateucsc.sh.sample
--- a/cron/updateucsc.sh.sample Thu Sep 10 17:48:37 2009 -0400
+++ b/cron/updateucsc.sh.sample Thu Sep 10 17:48:52 2009 -0400
@@ -6,7 +6,8 @@
# Edit this line to refer to galaxy's path:
GALAXY=/galaxy/path
-export PYTHONPATH=${GALAXY}/lib
+PYTHONPATH=${GALAXY}/lib
+export PYTHONPATH
# setup directories
echo "Creating required directories."
@@ -32,7 +33,11 @@
python ${GALAXY}/cron/parse_builds.py > ${GALAXY}/tool-data/shared/ucsc/new/builds.txt
if [ $? -eq 0 ]
then
- cp -uf ${GALAXY}/tool-data/shared/ucsc/new/builds.txt ${GALAXY}/tool-data/shared/ucsc/builds.txt
+ diff ${GALAXY}/tool-data/shared/ucsc/new/builds.txt ${GALAXY}/tool-data/shared/ucsc/builds.txt > /dev/null 2>&1
+ if [ $? -ne 0 ]
+ then
+ cp -f ${GALAXY}/tool-data/shared/ucsc/new/builds.txt ${GALAXY}/tool-data/shared/ucsc/builds.txt
+ fi
else
echo "Failed to update builds.txt" >&2
fi
@@ -42,7 +47,11 @@
python ${GALAXY}/cron/parse_builds_3_sites.py > ${GALAXY}/tool-data/shared/ucsc/new/ucsc_build_sites.txt
if [ $? -eq 0 ]
then
- cp -uf ${GALAXY}/tool-data/shared/ucsc/new/ucsc_build_sites.txt ${GALAXY}/tool-data/shared/ucsc/ucsc_build_sites.txt
+ diff ${GALAXY}/tool-data/shared/ucsc/new/ucsc_build_sites.txt ${GALAXY}/tool-data/shared/ucsc/ucsc_build_sites.txt > /dev/null 2>&1
+ if [ $? -ne 0 ]
+ then
+ cp -f ${GALAXY}/tool-data/shared/ucsc/new/ucsc_build_sites.txt ${GALAXY}/tool-data/shared/ucsc/ucsc_build_sites.txt
+ fi
else
echo "Failed to update builds.txt" >&2
fi
@@ -52,7 +61,16 @@
python ${GALAXY}/cron/build_chrom_db.py ${GALAXY}/tool-data/shared/ucsc/chrom/new/ ${GALAXY}/tool-data/shared/ucsc/builds.txt
if [ $? -eq 0 ]
then
- cp -uf ${GALAXY}/tool-data/shared/ucsc/chrom/new/*.len ${GALAXY}/tool-data/shared/ucsc/chrom/
+ for src in ${GALAXY}/tool-data/shared/ucsc/chrom/new/*.len
+ do
+ dst=${GALAXY}/tool-data/shared/ucsc/chrom/`basename $src`
+ diff $src $dst > /dev/null 2>&1
+ if [ $? -ne 0 ]
+ then
+ echo "cp -f $src $dst"
+ cp -f $src $dst
+ fi
+ done
else
echo "Failed to update chromInfo tables." >&2
fi
diff -r f2e4673d784b -r 96ccd29277be tools/new_operations/flanking_features.py
--- a/tools/new_operations/flanking_features.py Thu Sep 10 17:48:37 2009 -0400
+++ b/tools/new_operations/flanking_features.py Thu Sep 10 17:48:52 2009 -0400
@@ -129,7 +129,7 @@
map(outfields.append, result_down[-1].other) #The last element of result_down will be the closest element to the given interval
elif result_up:
map(outfields.append, result_up[res_ind].other)
- else:
+ elif result_down:
map(outfields.append, result_down[-1].other) #The last element of result_down will be the closest element to the given interval
yield outfields
1
0
details: http://www.bx.psu.edu/hg/galaxy/rev/f2e4673d784b
changeset: 2676:f2e4673d784b
user: jeremy goecks <jeremy.goecks(a)emory.edu>
date: Thu Sep 10 17:48:37 2009 -0400
description:
merge
0 file(s) affected in this change:
diffs (151 lines):
diff -r 2a15e0eca0b9 -r f2e4673d784b lib/galaxy/jobs/__init__.py
--- a/lib/galaxy/jobs/__init__.py Thu Sep 10 16:48:11 2009 -0400
+++ b/lib/galaxy/jobs/__init__.py Thu Sep 10 17:48:37 2009 -0400
@@ -502,13 +502,6 @@
context = self.get_dataset_finish_context( job_context, dataset_assoc.dataset.dataset )
#should this also be checking library associations? - can a library item be added from a history before the job has ended? - lets not allow this to occur
for dataset in dataset_assoc.dataset.dataset.history_associations: #need to update all associated output hdas, i.e. history was shared with job running
- if context.get( 'path', None ):
- # The tool can set an alternate output path for the dataset.
- try:
- shutil.move( context['path'], dataset.file_name )
- except ( IOError, OSError ):
- if not context['stderr']:
- context['stderr'] = 'This dataset could not be processed'
dataset.blurb = 'done'
dataset.peek = 'no peek'
dataset.info = context['stdout'] + context['stderr']
@@ -707,6 +700,13 @@
sizes.append( ( outfile, os.stat( outfile ).st_size ) )
return sizes
def setup_external_metadata( self, exec_dir = None, tmp_dir = None, dataset_files_path = None, config_root = None, datatypes_config = None, **kwds ):
+ # extension could still be 'auto' if this is the upload tool.
+ job = model.Job.get( self.job_id )
+ for output_dataset_assoc in job.output_datasets:
+ if output_dataset_assoc.dataset.ext == 'auto':
+ context = self.get_dataset_finish_context( dict(), output_dataset_assoc.dataset.dataset )
+ output_dataset_assoc.dataset.extension = context.get( 'ext', 'data' )
+ mapping.context.current.flush()
if tmp_dir is None:
#this dir should should relative to the exec_dir
tmp_dir = self.app.config.new_file_path
@@ -716,7 +716,6 @@
config_root = self.app.config.root
if datatypes_config is None:
datatypes_config = self.app.config.datatypes_config
- job = model.Job.get( self.job_id )
return self.external_output_metadata.setup_external_metadata( [ output_dataset_assoc.dataset for output_dataset_assoc in job.output_datasets ], exec_dir = exec_dir, tmp_dir = tmp_dir, dataset_files_path = dataset_files_path, config_root = config_root, datatypes_config = datatypes_config, **kwds )
class DefaultJobDispatcher( object ):
diff -r 2a15e0eca0b9 -r f2e4673d784b lib/galaxy/tools/actions/upload.py
--- a/lib/galaxy/tools/actions/upload.py Thu Sep 10 16:48:11 2009 -0400
+++ b/lib/galaxy/tools/actions/upload.py Thu Sep 10 17:48:37 2009 -0400
@@ -144,7 +144,7 @@
job.add_parameter( name, value )
job.add_parameter( 'paramfile', to_json_string( json_file_path ) )
for i, dataset in enumerate( data_list ):
- job.add_output_dataset( i, dataset )
+ job.add_output_dataset( 'output%i' % i, dataset )
job.state = trans.app.model.Job.states.NEW
trans.app.model.flush()
diff -r 2a15e0eca0b9 -r f2e4673d784b tools/data_source/upload.py
--- a/tools/data_source/upload.py Thu Sep 10 16:48:11 2009 -0400
+++ b/tools/data_source/upload.py Thu Sep 10 17:48:37 2009 -0400
@@ -115,7 +115,14 @@
return ( True, False, test_ext )
return ( True, True, test_ext )
-def add_file( dataset, json_file ):
+def parse_outputs( args ):
+ rval = {}
+ for arg in args:
+ id, path = arg.split( ':', 1 )
+ rval[int( id )] = path
+ return rval
+
+def add_file( dataset, json_file, output_path ):
data_type = None
line_count = None
@@ -229,16 +236,18 @@
ext = dataset.ext
if ext == 'auto':
ext = 'data'
+ # Move the dataset to its "real" path
+ shutil.move( dataset.path, output_path )
+ # Write the job info
info = dict( type = 'dataset',
dataset_id = dataset.dataset_id,
- path = dataset.path,
ext = ext,
stdout = 'uploaded %s file' % data_type,
name = dataset.name,
line_count = line_count )
json_file.write( to_json_string( info ) + "\n" )
-def add_composite_file( dataset, json_file ):
+def add_composite_file( dataset, json_file, output_path ):
if dataset.composite_files:
os.mkdir( dataset.extra_files_path )
for name, value in dataset.composite_files.iteritems():
@@ -253,17 +262,21 @@
else:
sniff.convert_newlines( dataset.composite_file_paths[ value.name ][ 'path' ] )
shutil.move( dataset.composite_file_paths[ value.name ][ 'path' ], os.path.join( dataset.extra_files_path, name ) )
+ # Move the dataset to its "real" path
+ shutil.move( dataset.primary_file, output_path )
+ # Write the job info
info = dict( type = 'dataset',
dataset_id = dataset.dataset_id,
- path = dataset.primary_file,
stdout = 'uploaded %s file' % dataset.file_type )
json_file.write( to_json_string( info ) + "\n" )
def __main__():
- if len( sys.argv ) != 2:
- print >>sys.stderr, 'usage: upload.py <json paramfile>'
+ if len( sys.argv ) < 2:
+ print >>sys.stderr, 'usage: upload.py <json paramfile> <output spec> ...'
sys.exit( 1 )
+
+ output_paths = parse_outputs( sys.argv[2:] )
json_file = open( 'galaxy.json', 'w' )
@@ -271,10 +284,16 @@
dataset = from_json_string( line )
dataset = util.bunch.Bunch( **safe_dict( dataset ) )
+ try:
+ output_path = output_paths[int( dataset.dataset_id )]
+ except:
+ print >>sys.stderr, 'Output path for dataset %s not found on command line' % dataset.dataset_id
+ sys.exit( 1 )
+
if dataset.type == 'composite':
- add_composite_file( dataset, json_file )
+ add_composite_file( dataset, json_file, output_path )
else:
- add_file( dataset, json_file )
+ add_file( dataset, json_file, output_path )
# clean up paramfile
try:
diff -r 2a15e0eca0b9 -r f2e4673d784b tools/data_source/upload.xml
--- a/tools/data_source/upload.xml Thu Sep 10 16:48:11 2009 -0400
+++ b/tools/data_source/upload.xml Thu Sep 10 17:48:37 2009 -0400
@@ -7,6 +7,12 @@
<action module="galaxy.tools.actions.upload" class="UploadToolAction"/>
<command interpreter="python">
upload.py $paramfile
+ #set $outnum = 0
+ #while $varExists('output%i' % $outnum):
+ #set $output = $getVar('output%i' % $outnum)
+ #set $outnum += 1
+ ${output.dataset.dataset.id}:${output}
+ #end while
</command>
<inputs>
<param name="file_type" type="select" label="File Format" help="Which format? See help below">
1
0
11 Sep '09
details: http://www.bx.psu.edu/hg/galaxy/rev/2a15e0eca0b9
changeset: 2675:2a15e0eca0b9
user: jeremy goecks <jeremy.goecks(a)emory.edu>
date: Thu Sep 10 16:48:11 2009 -0400
description:
Support for (1) ordering tags and (2) page tags.
5 file(s) affected in this change:
lib/galaxy/model/__init__.py
lib/galaxy/model/mapping.py
lib/galaxy/model/migrate/versions/0018_ordered_tags_and_page_tags.py
lib/galaxy/tags/tag_handler.py
lib/galaxy/web/controllers/tag.py
diffs (249 lines):
diff -r dbbc63c0630a -r 2a15e0eca0b9 lib/galaxy/model/__init__.py
--- a/lib/galaxy/model/__init__.py Thu Sep 10 10:42:50 2009 -0400
+++ b/lib/galaxy/model/__init__.py Thu Sep 10 16:48:11 2009 -0400
@@ -1145,7 +1145,8 @@
return "Tag(id=%s, type=%i, parent_id=%s, name=%s)" % ( self.id, self.type, self.parent_id, self.name )
class ItemTagAssociation ( object ):
- def __init__( self, item_id=None, tag_id=None, user_tname=None, value=None ):
+ def __init__( self, id=None, item_id=None, tag_id=None, user_tname=None, value=None ):
+ self.id = id
self.item_id = item_id
self.tag_id = tag_id
self.user_tname = user_tname
@@ -1165,6 +1166,8 @@
class HistoryDatasetAssociationTagAssociation ( ItemTagAssociation ):
pass
+class PageTagAssociation ( ItemTagAssociation ):
+ pass
## ---- Utility methods -------------------------------------------------------
diff -r dbbc63c0630a -r 2a15e0eca0b9 lib/galaxy/model/mapping.py
--- a/lib/galaxy/model/mapping.py Thu Sep 10 10:42:50 2009 -0400
+++ b/lib/galaxy/model/mapping.py Thu Sep 10 16:48:11 2009 -0400
@@ -552,6 +552,7 @@
UniqueConstraint( "name" ) )
HistoryTagAssociation.table = Table( "history_tag_association", metadata,
+ Column( "id", Integer, primary_key=True ),
Column( "history_id", Integer, ForeignKey( "history.id" ), index=True ),
Column( "tag_id", Integer, ForeignKey( "tag.id" ), index=True ),
Column( "user_tname", TrimmedString(255), index=True),
@@ -559,6 +560,7 @@
Column( "user_value", TrimmedString(255), index=True) )
DatasetTagAssociation.table = Table( "dataset_tag_association", metadata,
+ Column( "id", Integer, primary_key=True ),
Column( "dataset_id", Integer, ForeignKey( "dataset.id" ), index=True ),
Column( "tag_id", Integer, ForeignKey( "tag.id" ), index=True ),
Column( "user_tname", TrimmedString(255), index=True),
@@ -566,7 +568,16 @@
Column( "user_value", TrimmedString(255), index=True) )
HistoryDatasetAssociationTagAssociation.table = Table( "history_dataset_association_tag_association", metadata,
+ Column( "id", Integer, primary_key=True ),
Column( "history_dataset_association_id", Integer, ForeignKey( "history_dataset_association.id" ), index=True ),
+ Column( "tag_id", Integer, ForeignKey( "tag.id" ), index=True ),
+ Column( "user_tname", TrimmedString(255), index=True),
+ Column( "value", TrimmedString(255), index=True),
+ Column( "user_value", TrimmedString(255), index=True) )
+
+PageTagAssociation.table = Table( "page_tag_association", metadata,
+ Column( "id", Integer, primary_key=True ),
+ Column( "page_id", Integer, ForeignKey( "page.id" ), index=True ),
Column( "tag_id", Integer, ForeignKey( "tag.id" ), index=True ),
Column( "user_tname", TrimmedString(255), index=True),
Column( "value", TrimmedString(255), index=True),
@@ -672,7 +683,7 @@
visible_children=relation(
HistoryDatasetAssociation,
primaryjoin=( ( HistoryDatasetAssociation.table.c.parent_id == HistoryDatasetAssociation.table.c.id ) & ( HistoryDatasetAssociation.table.c.visible == True ) ) ),
- tags=relation(HistoryDatasetAssociationTagAssociation, backref='history_tag_associations')
+ tags=relation(HistoryDatasetAssociationTagAssociation, order_by=HistoryDatasetAssociationTagAssociation.table.c.id, backref='history_tag_associations')
) )
assign_mapper( context, Dataset, Dataset.table,
@@ -689,7 +700,7 @@
active_library_associations=relation(
LibraryDatasetDatasetAssociation,
primaryjoin=( ( Dataset.table.c.id == LibraryDatasetDatasetAssociation.table.c.dataset_id ) & ( LibraryDatasetDatasetAssociation.table.c.deleted == False ) ) ),
- tags=relation(DatasetTagAssociation, backref='datasets')
+ tags=relation(DatasetTagAssociation, order_by=DatasetTagAssociation.table.c.id, backref='datasets')
) )
assign_mapper( context, HistoryDatasetAssociationDisplayAtAuthorization, HistoryDatasetAssociationDisplayAtAuthorization.table,
@@ -709,7 +720,7 @@
properties=dict( galaxy_sessions=relation( GalaxySessionToHistoryAssociation ),
datasets=relation( HistoryDatasetAssociation, backref="history", order_by=asc(HistoryDatasetAssociation.table.c.hid) ),
active_datasets=relation( HistoryDatasetAssociation, primaryjoin=( ( HistoryDatasetAssociation.table.c.history_id == History.table.c.id ) & ( not_( HistoryDatasetAssociation.table.c.deleted ) ) ), order_by=asc( HistoryDatasetAssociation.table.c.hid ), viewonly=True ),
- tags=relation(HistoryTagAssociation, backref="histories")
+ tags=relation(HistoryTagAssociation, order_by=HistoryTagAssociation.table.c.id, backref="histories")
) )
assign_mapper( context, HistoryUserShareAssociation, HistoryUserShareAssociation.table,
@@ -967,7 +978,8 @@
primaryjoin=( Page.table.c.id == PageRevision.table.c.page_id ) ),
latest_revision=relation( PageRevision, post_update=True,
primaryjoin=( Page.table.c.latest_revision_id == PageRevision.table.c.id ),
- lazy=False )
+ lazy=False ),
+ tags=relation(PageTagAssociation, order_by=PageTagAssociation.table.c.id, backref="pages")
) )
assign_mapper( context, Tag, Tag.table,
@@ -988,6 +1000,11 @@
properties=dict( tag=relation(Tag, backref="tagged_history_dataset_associations") ),
primary_key=[HistoryDatasetAssociationTagAssociation.table.c.history_dataset_association_id, HistoryDatasetAssociationTagAssociation.table.c.tag_id]
)
+
+assign_mapper( context, PageTagAssociation, PageTagAssociation.table,
+ properties=dict( tag=relation(Tag, backref="tagged_pages") ),
+ primary_key=[PageTagAssociation.table.c.page_id, PageTagAssociation.table.c.tag_id]
+ )
def db_next_hid( self ):
"""
diff -r dbbc63c0630a -r 2a15e0eca0b9 lib/galaxy/model/migrate/versions/0018_ordered_tags_and_page_tags.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/lib/galaxy/model/migrate/versions/0018_ordered_tags_and_page_tags.py Thu Sep 10 16:48:11 2009 -0400
@@ -0,0 +1,116 @@
+"""
+This migration script provides support for (a) ordering tags by recency and
+(b) tagging pages. This script deletes all existing tags.
+"""
+
+from sqlalchemy import *
+from sqlalchemy.orm import *
+from sqlalchemy.exceptions import *
+from migrate import *
+import migrate.changeset
+
+import datetime
+now = datetime.datetime.utcnow
+
+# Need our custom types, but don't import anything else from model
+from galaxy.model.custom_types import *
+
+import logging
+log = logging.getLogger( __name__ )
+
+metadata = MetaData( migrate_engine )
+
+def display_migration_details():
+ print ""
+ print "This migration script provides support for (a) ordering tags by recency and"
+ print "(b) tagging pages. This script deletes all existing tags."
+
+HistoryTagAssociation_table = Table( "history_tag_association", metadata,
+ Column( "id", Integer, primary_key=True ),
+ Column( "history_id", Integer, ForeignKey( "history.id" ), index=True ),
+ Column( "tag_id", Integer, ForeignKey( "tag.id" ), index=True ),
+ Column( "user_tname", TrimmedString(255), index=True),
+ Column( "value", TrimmedString(255), index=True),
+ Column( "user_value", TrimmedString(255), index=True) )
+
+DatasetTagAssociation_table = Table( "dataset_tag_association", metadata,
+ Column( "id", Integer, primary_key=True ),
+ Column( "dataset_id", Integer, ForeignKey( "dataset.id" ), index=True ),
+ Column( "tag_id", Integer, ForeignKey( "tag.id" ), index=True ),
+ Column( "user_tname", TrimmedString(255), index=True),
+ Column( "value", TrimmedString(255), index=True),
+ Column( "user_value", TrimmedString(255), index=True) )
+
+HistoryDatasetAssociationTagAssociation_table = Table( "history_dataset_association_tag_association", metadata,
+ Column( "id", Integer, primary_key=True ),
+ Column( "history_dataset_association_id", Integer, ForeignKey( "history_dataset_association.id" ), index=True ),
+ Column( "tag_id", Integer, ForeignKey( "tag.id" ), index=True ),
+ Column( "user_tname", TrimmedString(255), index=True),
+ Column( "value", TrimmedString(255), index=True),
+ Column( "user_value", TrimmedString(255), index=True) )
+
+PageTagAssociation_table = Table( "page_tag_association", metadata,
+ Column( "id", Integer, primary_key=True ),
+ Column( "page_id", Integer, ForeignKey( "page.id" ), index=True ),
+ Column( "tag_id", Integer, ForeignKey( "tag.id" ), index=True ),
+ Column( "user_tname", TrimmedString(255), index=True),
+ Column( "value", TrimmedString(255), index=True),
+ Column( "user_value", TrimmedString(255), index=True) )
+
+def upgrade():
+ display_migration_details()
+ metadata.reflect()
+
+ #
+ # Recreate tables.
+ #
+ try:
+ HistoryTagAssociation_table.drop()
+ HistoryTagAssociation_table.create()
+ except Exception, e:
+ print "Recreating history_tag_association table failed: %s" % str( e )
+ log.debug( "Recreating history_tag_association table failed: %s" % str( e ) )
+
+ try:
+ DatasetTagAssociation_table.drop()
+ DatasetTagAssociation_table.create()
+ except Exception, e:
+ print str(e)
+ log.debug( "Recreating dataset_tag_association table failed: %s" % str( e ) )
+
+ try:
+ HistoryDatasetAssociationTagAssociation_table.drop()
+ HistoryDatasetAssociationTagAssociation_table.create()
+ except OperationalError, e:
+ # Handle error that results from and index name that is too long; this occurs
+ # in MySQL.
+ if str(e).find("CREATE INDEX") != -1:
+ # Manually create index.
+ i = Index( "ix_hda_ta_history_dataset_association_id", HistoryDatasetAssociationTagAssociation_table.c.history_dataset_association_id )
+ try:
+ i.create()
+ except Exception, e:
+ print str(e)
+ log.debug( "Adding index 'ix_hda_ta_history_dataset_association_id' to table 'history_dataset_association_tag_association' table failed: %s" % str( e ) )
+ except Exception, e:
+ print str(e)
+ log.debug( "Recreating history_dataset_association_tag_association table failed: %s" % str( e ) )
+
+ # Create page_tag_association table.
+ try:
+ PageTagAssociation_table.create()
+ except Exception, e:
+ print str(e)
+ log.debug( "Creating page_tag_association table failed: %s" % str( e ) )
+
+def downgrade():
+ metadata.reflect()
+
+ # No need to downgrade other tagging tables. They work fine with verision 16 code.
+
+ # Drop page_tag_association table.
+ try:
+ PageTagAssociation_table.drop()
+ except Exception, e:
+ print str(e)
+ log.debug( "Dropping page_tag_association table failed: %s" % str( e ) )
diff -r dbbc63c0630a -r 2a15e0eca0b9 lib/galaxy/tags/tag_handler.py
--- a/lib/galaxy/tags/tag_handler.py Thu Sep 10 10:42:50 2009 -0400
+++ b/lib/galaxy/tags/tag_handler.py Thu Sep 10 16:48:11 2009 -0400
@@ -1,4 +1,4 @@
-from galaxy.model import Tag, History, HistoryTagAssociation, Dataset, DatasetTagAssociation, HistoryDatasetAssociation, HistoryDatasetAssociationTagAssociation
+from galaxy.model import Tag
import re
class TagHandler( object ):
diff -r dbbc63c0630a -r 2a15e0eca0b9 lib/galaxy/web/controllers/tag.py
--- a/lib/galaxy/web/controllers/tag.py Thu Sep 10 10:42:50 2009 -0400
+++ b/lib/galaxy/web/controllers/tag.py Thu Sep 10 16:48:11 2009 -0400
@@ -1,6 +1,9 @@
"""
Tags Controller: handles tagging/untagging of entities and provides autocomplete support.
"""
+
+from galaxy.model import History, HistoryTagAssociation, Dataset, DatasetTagAssociation, \
+ HistoryDatasetAssociation, HistoryDatasetAssociationTagAssociation, Page, PageTagAssociation
from galaxy.web.base.controller import *
from galaxy.tags.tag_handler import *
1
0
11 Sep '09
details: http://www.bx.psu.edu/hg/galaxy/rev/0a41293e679a
changeset: 2674:0a41293e679a
user: guru
date: Thu Sep 10 17:35:21 2009 -0400
description:
Bug fix for 'Fetch closest feature' tool.
1 file(s) affected in this change:
tools/new_operations/flanking_features.py
diffs (12 lines):
diff -r ce8c57840343 -r 0a41293e679a tools/new_operations/flanking_features.py
--- a/tools/new_operations/flanking_features.py Thu Sep 10 17:31:05 2009 -0400
+++ b/tools/new_operations/flanking_features.py Thu Sep 10 17:35:21 2009 -0400
@@ -129,7 +129,7 @@
map(outfields.append, result_down[-1].other) #The last element of result_down will be the closest element to the given interval
elif result_up:
map(outfields.append, result_up[res_ind].other)
- else:
+ elif result_down:
map(outfields.append, result_down[-1].other) #The last element of result_down will be the closest element to the given interval
yield outfields
1
0
details: http://www.bx.psu.edu/hg/galaxy/rev/ce8c57840343
changeset: 2673:ce8c57840343
user: Nate Coraor <nate(a)bx.psu.edu>
date: Thu Sep 10 17:31:05 2009 -0400
description:
POSIXize/Bournize updateucsc.sh.sample
1 file(s) affected in this change:
cron/updateucsc.sh.sample
diffs (57 lines):
diff -r d3fe789e3931 -r ce8c57840343 cron/updateucsc.sh.sample
--- a/cron/updateucsc.sh.sample Thu Sep 10 14:52:38 2009 -0400
+++ b/cron/updateucsc.sh.sample Thu Sep 10 17:31:05 2009 -0400
@@ -6,7 +6,8 @@
# Edit this line to refer to galaxy's path:
GALAXY=/galaxy/path
-export PYTHONPATH=${GALAXY}/lib
+PYTHONPATH=${GALAXY}/lib
+export PYTHONPATH
# setup directories
echo "Creating required directories."
@@ -32,7 +33,11 @@
python ${GALAXY}/cron/parse_builds.py > ${GALAXY}/tool-data/shared/ucsc/new/builds.txt
if [ $? -eq 0 ]
then
- cp -uf ${GALAXY}/tool-data/shared/ucsc/new/builds.txt ${GALAXY}/tool-data/shared/ucsc/builds.txt
+ diff ${GALAXY}/tool-data/shared/ucsc/new/builds.txt ${GALAXY}/tool-data/shared/ucsc/builds.txt > /dev/null 2>&1
+ if [ $? -ne 0 ]
+ then
+ cp -f ${GALAXY}/tool-data/shared/ucsc/new/builds.txt ${GALAXY}/tool-data/shared/ucsc/builds.txt
+ fi
else
echo "Failed to update builds.txt" >&2
fi
@@ -42,7 +47,11 @@
python ${GALAXY}/cron/parse_builds_3_sites.py > ${GALAXY}/tool-data/shared/ucsc/new/ucsc_build_sites.txt
if [ $? -eq 0 ]
then
- cp -uf ${GALAXY}/tool-data/shared/ucsc/new/ucsc_build_sites.txt ${GALAXY}/tool-data/shared/ucsc/ucsc_build_sites.txt
+ diff ${GALAXY}/tool-data/shared/ucsc/new/ucsc_build_sites.txt ${GALAXY}/tool-data/shared/ucsc/ucsc_build_sites.txt > /dev/null 2>&1
+ if [ $? -ne 0 ]
+ then
+ cp -f ${GALAXY}/tool-data/shared/ucsc/new/ucsc_build_sites.txt ${GALAXY}/tool-data/shared/ucsc/ucsc_build_sites.txt
+ fi
else
echo "Failed to update builds.txt" >&2
fi
@@ -52,7 +61,16 @@
python ${GALAXY}/cron/build_chrom_db.py ${GALAXY}/tool-data/shared/ucsc/chrom/new/ ${GALAXY}/tool-data/shared/ucsc/builds.txt
if [ $? -eq 0 ]
then
- cp -uf ${GALAXY}/tool-data/shared/ucsc/chrom/new/*.len ${GALAXY}/tool-data/shared/ucsc/chrom/
+ for src in ${GALAXY}/tool-data/shared/ucsc/chrom/new/*.len
+ do
+ dst=${GALAXY}/tool-data/shared/ucsc/chrom/`basename $src`
+ diff $src $dst > /dev/null 2>&1
+ if [ $? -ne 0 ]
+ then
+ echo "cp -f $src $dst"
+ cp -f $src $dst
+ fi
+ done
else
echo "Failed to update chromInfo tables." >&2
fi
1
0
10 Sep '09
details: http://www.bx.psu.edu/hg/galaxy/rev/d3fe789e3931
changeset: 2672:d3fe789e3931
user: Nate Coraor <nate(a)bx.psu.edu>
date: Thu Sep 10 14:52:38 2009 -0400
description:
Get rid of the hacky "alternate path" stuff used by the upload tool and fix setting metadata when using autodetect and set_metadata_externally
4 file(s) affected in this change:
lib/galaxy/jobs/__init__.py
lib/galaxy/tools/actions/upload.py
tools/data_source/upload.py
tools/data_source/upload.xml
diffs (151 lines):
diff -r dbbc63c0630a -r d3fe789e3931 lib/galaxy/jobs/__init__.py
--- a/lib/galaxy/jobs/__init__.py Thu Sep 10 10:42:50 2009 -0400
+++ b/lib/galaxy/jobs/__init__.py Thu Sep 10 14:52:38 2009 -0400
@@ -502,13 +502,6 @@
context = self.get_dataset_finish_context( job_context, dataset_assoc.dataset.dataset )
#should this also be checking library associations? - can a library item be added from a history before the job has ended? - lets not allow this to occur
for dataset in dataset_assoc.dataset.dataset.history_associations: #need to update all associated output hdas, i.e. history was shared with job running
- if context.get( 'path', None ):
- # The tool can set an alternate output path for the dataset.
- try:
- shutil.move( context['path'], dataset.file_name )
- except ( IOError, OSError ):
- if not context['stderr']:
- context['stderr'] = 'This dataset could not be processed'
dataset.blurb = 'done'
dataset.peek = 'no peek'
dataset.info = context['stdout'] + context['stderr']
@@ -707,6 +700,13 @@
sizes.append( ( outfile, os.stat( outfile ).st_size ) )
return sizes
def setup_external_metadata( self, exec_dir = None, tmp_dir = None, dataset_files_path = None, config_root = None, datatypes_config = None, **kwds ):
+ # extension could still be 'auto' if this is the upload tool.
+ job = model.Job.get( self.job_id )
+ for output_dataset_assoc in job.output_datasets:
+ if output_dataset_assoc.dataset.ext == 'auto':
+ context = self.get_dataset_finish_context( dict(), output_dataset_assoc.dataset.dataset )
+ output_dataset_assoc.dataset.extension = context.get( 'ext', 'data' )
+ mapping.context.current.flush()
if tmp_dir is None:
#this dir should should relative to the exec_dir
tmp_dir = self.app.config.new_file_path
@@ -716,7 +716,6 @@
config_root = self.app.config.root
if datatypes_config is None:
datatypes_config = self.app.config.datatypes_config
- job = model.Job.get( self.job_id )
return self.external_output_metadata.setup_external_metadata( [ output_dataset_assoc.dataset for output_dataset_assoc in job.output_datasets ], exec_dir = exec_dir, tmp_dir = tmp_dir, dataset_files_path = dataset_files_path, config_root = config_root, datatypes_config = datatypes_config, **kwds )
class DefaultJobDispatcher( object ):
diff -r dbbc63c0630a -r d3fe789e3931 lib/galaxy/tools/actions/upload.py
--- a/lib/galaxy/tools/actions/upload.py Thu Sep 10 10:42:50 2009 -0400
+++ b/lib/galaxy/tools/actions/upload.py Thu Sep 10 14:52:38 2009 -0400
@@ -144,7 +144,7 @@
job.add_parameter( name, value )
job.add_parameter( 'paramfile', to_json_string( json_file_path ) )
for i, dataset in enumerate( data_list ):
- job.add_output_dataset( i, dataset )
+ job.add_output_dataset( 'output%i' % i, dataset )
job.state = trans.app.model.Job.states.NEW
trans.app.model.flush()
diff -r dbbc63c0630a -r d3fe789e3931 tools/data_source/upload.py
--- a/tools/data_source/upload.py Thu Sep 10 10:42:50 2009 -0400
+++ b/tools/data_source/upload.py Thu Sep 10 14:52:38 2009 -0400
@@ -115,7 +115,14 @@
return ( True, False, test_ext )
return ( True, True, test_ext )
-def add_file( dataset, json_file ):
+def parse_outputs( args ):
+ rval = {}
+ for arg in args:
+ id, path = arg.split( ':', 1 )
+ rval[int( id )] = path
+ return rval
+
+def add_file( dataset, json_file, output_path ):
data_type = None
line_count = None
@@ -229,16 +236,18 @@
ext = dataset.ext
if ext == 'auto':
ext = 'data'
+ # Move the dataset to its "real" path
+ shutil.move( dataset.path, output_path )
+ # Write the job info
info = dict( type = 'dataset',
dataset_id = dataset.dataset_id,
- path = dataset.path,
ext = ext,
stdout = 'uploaded %s file' % data_type,
name = dataset.name,
line_count = line_count )
json_file.write( to_json_string( info ) + "\n" )
-def add_composite_file( dataset, json_file ):
+def add_composite_file( dataset, json_file, output_path ):
if dataset.composite_files:
os.mkdir( dataset.extra_files_path )
for name, value in dataset.composite_files.iteritems():
@@ -253,17 +262,21 @@
else:
sniff.convert_newlines( dataset.composite_file_paths[ value.name ][ 'path' ] )
shutil.move( dataset.composite_file_paths[ value.name ][ 'path' ], os.path.join( dataset.extra_files_path, name ) )
+ # Move the dataset to its "real" path
+ shutil.move( dataset.primary_file, output_path )
+ # Write the job info
info = dict( type = 'dataset',
dataset_id = dataset.dataset_id,
- path = dataset.primary_file,
stdout = 'uploaded %s file' % dataset.file_type )
json_file.write( to_json_string( info ) + "\n" )
def __main__():
- if len( sys.argv ) != 2:
- print >>sys.stderr, 'usage: upload.py <json paramfile>'
+ if len( sys.argv ) < 2:
+ print >>sys.stderr, 'usage: upload.py <json paramfile> <output spec> ...'
sys.exit( 1 )
+
+ output_paths = parse_outputs( sys.argv[2:] )
json_file = open( 'galaxy.json', 'w' )
@@ -271,10 +284,16 @@
dataset = from_json_string( line )
dataset = util.bunch.Bunch( **safe_dict( dataset ) )
+ try:
+ output_path = output_paths[int( dataset.dataset_id )]
+ except:
+ print >>sys.stderr, 'Output path for dataset %s not found on command line' % dataset.dataset_id
+ sys.exit( 1 )
+
if dataset.type == 'composite':
- add_composite_file( dataset, json_file )
+ add_composite_file( dataset, json_file, output_path )
else:
- add_file( dataset, json_file )
+ add_file( dataset, json_file, output_path )
# clean up paramfile
try:
diff -r dbbc63c0630a -r d3fe789e3931 tools/data_source/upload.xml
--- a/tools/data_source/upload.xml Thu Sep 10 10:42:50 2009 -0400
+++ b/tools/data_source/upload.xml Thu Sep 10 14:52:38 2009 -0400
@@ -7,6 +7,12 @@
<action module="galaxy.tools.actions.upload" class="UploadToolAction"/>
<command interpreter="python">
upload.py $paramfile
+ #set $outnum = 0
+ #while $varExists('output%i' % $outnum):
+ #set $output = $getVar('output%i' % $outnum)
+ #set $outnum += 1
+ ${output.dataset.dataset.id}:${output}
+ #end while
</command>
<inputs>
<param name="file_type" type="select" label="File Format" help="Which format? See help below">
1
0
10 Sep '09
details: http://www.bx.psu.edu/hg/galaxy/rev/dbbc63c0630a
changeset: 2671:dbbc63c0630a
user: guru
date: Thu Sep 10 10:42:50 2009 -0400
description:
Updated manual builds will L. major (2005) genome.
1 file(s) affected in this change:
tool-data/shared/ucsc/manual_builds.txt
diffs (8 lines):
diff -r c3b40f23a0e0 -r dbbc63c0630a tool-data/shared/ucsc/manual_builds.txt
--- a/tool-data/shared/ucsc/manual_builds.txt Wed Sep 09 14:24:11 2009 -0400
+++ b/tool-data/shared/ucsc/manual_builds.txt Thu Sep 10 10:42:50 2009 -0400
@@ -664,3 +664,4 @@
baciAnth_AMES Bacillus anthracis str. Ames chr=5227293
shewOnei Shewanella oneidensis MR-1 plasmid_pMR-1=161613,chr=4969803
15217 Human herpesvirus 1 NC_001806=152261
+lMaj5 Leishmania major 2005 chr1=268984,chr2=355714,chr3=384518,chr4=441313,chr5=465823,chr6=516874,chr7=596348,chr8=574972,chr9=573441,chr10=570864,chr11=582575,chr12=675347,chr13=654604,chr14=622648,chr15=629514,chr16=714659,chr17=684831,chr18=739751,chr19=702212,chr20=742551,chr21=772974,chr22=716608,chr23=772567,chr24=840950,chr25=912849,chr26=1091579,chr27=1130447,chr28=1160128,chr29=1212674,chr30=1403454,chr31=1484336,chr32=1604650,chr33=1583673,chr34=1866754,chr35=2090491,chr36=2682183
1
0
Hi,
I am looking for more information on the options in the Logging and
Debugging section of the universe_wsgi.ini file, in particular what
are specific values to which I can set log_level in the first option
"verbosity of log messages".
I wish to turn off the debugging messages in the log file.
Thanks
Shaun Webb
--
The University of Edinburgh is a charitable body, registered in
Scotland, with registration number SC005336.
2
1
09 Sep '09
details: http://www.bx.psu.edu/hg/galaxy/rev/c3b40f23a0e0
changeset: 2670:c3b40f23a0e0
user: James Taylor <james(a)jamestaylor.org>
date: Wed Sep 09 14:24:11 2009 -0400
description:
Automated merge with http://bitbucket.org/galaxy/galaxy-central/
2 file(s) affected in this change:
lib/galaxy/model/__init__.py
templates/dataset/edit_attributes.mako
diffs (truncated from 7595 to 3000 lines):
diff -r eba44fc830bf -r c3b40f23a0e0 lib/galaxy/datatypes/converters/maf_to_fasta_converter.py
--- a/lib/galaxy/datatypes/converters/maf_to_fasta_converter.py Tue Sep 08 17:33:38 2009 -0400
+++ b/lib/galaxy/datatypes/converters/maf_to_fasta_converter.py Wed Sep 09 14:24:11 2009 -0400
@@ -14,12 +14,15 @@
input_name = sys.argv.pop(1)
out = open( output_name, 'w' )
count = 0
- for count, maf in enumerate( bx.align.maf.Reader( open( input_name, 'r' ) ) ):
- for c in maf.components:
- spec, chrom = bx.align.maf.src_split( c.src )
- if not spec or not chrom:
- spec = chrom = c.src
- out.write( "%s\n" % maf_utilities.get_fasta_header( c, suffix = "%s_%i" % ( spec, count ) ) )
+ for count, block in enumerate( bx.align.maf.Reader( open( input_name, 'r' ) ) ):
+ spec_counts = {}
+ for c in block.components:
+ spec, chrom = maf_utilities.src_split( c.src )
+ if spec not in spec_counts:
+ spec_counts[ spec ] = 0
+ else:
+ spec_counts[ spec ] += 1
+ out.write( "%s\n" % maf_utilities.get_fasta_header( c, { 'block_index' : count, 'species' : spec, 'sequence_index' : spec_counts[ spec ] }, suffix = "%s_%i_%i" % ( spec, count, spec_counts[ spec ] ) ) )
out.write( "%s\n" % c.text )
out.write( "\n" )
out.close()
diff -r eba44fc830bf -r c3b40f23a0e0 lib/galaxy/datatypes/converters/maf_to_fasta_converter.xml
--- a/lib/galaxy/datatypes/converters/maf_to_fasta_converter.xml Tue Sep 08 17:33:38 2009 -0400
+++ b/lib/galaxy/datatypes/converters/maf_to_fasta_converter.xml Wed Sep 09 14:24:11 2009 -0400
@@ -1,4 +1,4 @@
-<tool id="CONVERTER_maf_to_fasta_0" name="Convert MAF to Fasta">
+<tool id="CONVERTER_maf_to_fasta_0" name="Convert MAF to Fasta" version="1.0.1">
<!-- <description>__NOT_USED_CURRENTLY_FOR_CONVERTERS__</description> -->
<command interpreter="python">maf_to_fasta_converter.py $output1 $input1</command>
<inputs>
diff -r eba44fc830bf -r c3b40f23a0e0 lib/galaxy/datatypes/converters/maf_to_interval_converter.py
--- a/lib/galaxy/datatypes/converters/maf_to_interval_converter.py Tue Sep 08 17:33:38 2009 -0400
+++ b/lib/galaxy/datatypes/converters/maf_to_interval_converter.py Wed Sep 09 14:24:11 2009 -0400
@@ -4,7 +4,8 @@
import sys
from galaxy import eggs
import pkg_resources; pkg_resources.require( "bx-python" )
-import bx.align.maf
+import bx.align.maf
+from galaxy.tools.util import maf_utilities
assert sys.version_info[:2] >= ( 2, 4 )
@@ -17,15 +18,15 @@
#write interval header line
out.write( "#chrom\tstart\tend\tstrand\n" )
try:
- for maf in bx.align.maf.Reader( open(input_name, 'r') ):
- c = maf.get_component_by_src_start(species)
- if c is not None:
- out.write( "%s\t%i\t%i\t%s\n" % (bx.align.src_split(c.src)[-1], c.get_forward_strand_start(), c.get_forward_strand_end(), c.strand) )
- count += 1
+ for block in bx.align.maf.Reader( open( input_name, 'r' ) ):
+ for c in maf_utilities.iter_components_by_src_start( block, species ):
+ if c is not None:
+ out.write( "%s\t%i\t%i\t%s\n" % ( bx.align.src_split( c.src )[-1], c.get_forward_strand_start(), c.get_forward_strand_end(), c.strand ) )
+ count += 1
except Exception, e:
print >> sys.stderr, "There was a problem processing your input: %s" % e
out.close()
- print "%i MAF blocks converted to Genomic Intervals for species %s." % (count, species)
+ print "%i MAF blocks converted to Genomic Intervals for species %s." % ( count, species )
if __name__ == "__main__": __main__()
diff -r eba44fc830bf -r c3b40f23a0e0 lib/galaxy/datatypes/converters/maf_to_interval_converter.xml
--- a/lib/galaxy/datatypes/converters/maf_to_interval_converter.xml Tue Sep 08 17:33:38 2009 -0400
+++ b/lib/galaxy/datatypes/converters/maf_to_interval_converter.xml Wed Sep 09 14:24:11 2009 -0400
@@ -1,4 +1,4 @@
-<tool id="CONVERTER_maf_to_interval_0" name="Convert MAF to Genomic Intervals">
+<tool id="CONVERTER_maf_to_interval_0" name="Convert MAF to Genomic Intervals" version="1.0.1">
<!-- <description>__NOT_USED_CURRENTLY_FOR_CONVERTERS__</description> -->
<command interpreter="python">maf_to_interval_converter.py $output1 $input1 ${input1.metadata.dbkey}</command>
<inputs>
diff -r eba44fc830bf -r c3b40f23a0e0 lib/galaxy/datatypes/sequence.py
--- a/lib/galaxy/datatypes/sequence.py Tue Sep 08 17:33:38 2009 -0400
+++ b/lib/galaxy/datatypes/sequence.py Wed Sep 09 14:24:11 2009 -0400
@@ -22,7 +22,7 @@
pass
class Alignment( Sequence ):
- """Class describing an alignmnet"""
+ """Class describing an alignment"""
"""Add metadata elements"""
MetadataElement( name="species", desc="Species", default=[], param=metadata.SelectParameter, multiple=True, readonly=True, no_value=None )
@@ -316,6 +316,78 @@
import bx.align.maf
except:
pass
+#trying to import maf_utilities here throws an ImportError due to a circular import between jobs and tools:
+#from galaxy.tools.util.maf_utilities import build_maf_index_species_chromosomes
+#Traceback (most recent call last):
+# File "./scripts/paster.py", line 27, in <module>
+# command.run()
+# File "build/bdist.solaris-2.11-i86pc/egg/paste/script/command.py", line 78, in run
+# File "build/bdist.solaris-2.11-i86pc/egg/paste/script/command.py", line 117, in invoke
+# File "build/bdist.solaris-2.11-i86pc/egg/paste/script/command.py", line 212, in run
+# File "build/bdist.solaris-2.11-i86pc/egg/paste/script/serve.py", line 227, in command
+# File "build/bdist.solaris-2.11-i86pc/egg/paste/script/serve.py", line 250, in loadapp
+# File "build/bdist.solaris-2.11-i86pc/egg/paste/deploy/loadwsgi.py", line 193, in loadapp
+# File "build/bdist.solaris-2.11-i86pc/egg/paste/deploy/loadwsgi.py", line 213, in loadobj
+# File "build/bdist.solaris-2.11-i86pc/egg/paste/deploy/loadwsgi.py", line 237, in loadcontext
+# File "build/bdist.solaris-2.11-i86pc/egg/paste/deploy/loadwsgi.py", line 267, in _loadconfig
+# File "build/bdist.solaris-2.11-i86pc/egg/paste/deploy/loadwsgi.py", line 397, in get_context
+# File "build/bdist.solaris-2.11-i86pc/egg/paste/deploy/loadwsgi.py", line 439, in _context_from_explicit
+# File "build/bdist.solaris-2.11-i86pc/egg/paste/deploy/loadwsgi.py", line 18, in import_string
+# File "/afs/bx.psu.edu/home/dan/galaxy/central/lib/pkg_resources.py", line 1912, in load
+# entry = __import__(self.module_name, globals(),globals(), ['__name__'])
+# File "/afs/bx.psu.edu/home/dan/galaxy/central/lib/galaxy/web/buildapp.py", line 18, in <module>
+# from galaxy import config, jobs, util, tools
+# File "/afs/bx.psu.edu/home/dan/galaxy/central/lib/galaxy/jobs/__init__.py", line 3, in <module>
+# from galaxy import util, model
+# File "/afs/bx.psu.edu/home/dan/galaxy/central/lib/galaxy/model/__init__.py", line 13, in <module>
+# import galaxy.datatypes.registry
+# File "/afs/bx.psu.edu/home/dan/galaxy/central/lib/galaxy/datatypes/registry.py", line 6, in <module>
+# import data, tabular, interval, images, sequence, qualityscore, genetics, xml, coverage, tracks, chrominfo
+# File "/afs/bx.psu.edu/home/dan/galaxy/central/lib/galaxy/datatypes/sequence.py", line 344, in <module>
+# from galaxy.tools.util.maf_utilities import build_maf_index_species_chromosomes
+# File "/afs/bx.psu.edu/home/dan/galaxy/central/lib/galaxy/tools/__init__.py", line 15, in <module>
+# from galaxy import util, jobs, model
+#ImportError: cannot import name jobs
+#so we'll copy and paste for now...terribly icky
+#*** ANYCHANGE TO THIS METHOD HERE OR IN maf_utilities MUST BE PROPAGATED ***
+def COPIED_build_maf_index_species_chromosomes( filename, index_species = None ):
+ species = []
+ species_chromosomes = {}
+ indexes = bx.interval_index_file.Indexes()
+ try:
+ maf_reader = bx.align.maf.Reader( open( filename ) )
+ while True:
+ pos = maf_reader.file.tell()
+ block = maf_reader.next()
+ if block is None: break
+ for c in block.components:
+ spec = c.src
+ chrom = None
+ if "." in spec:
+ spec, chrom = spec.split( ".", 1 )
+ if spec not in species:
+ species.append( spec )
+ species_chromosomes[spec] = []
+ if chrom and chrom not in species_chromosomes[spec]:
+ species_chromosomes[spec].append( chrom )
+ if index_species is None or spec in index_species:
+ forward_strand_start = c.forward_strand_start
+ forward_strand_end = c.forward_strand_end
+ try:
+ forward_strand_start = int( forward_strand_start )
+ forward_strand_end = int( forward_strand_end )
+ except ValueError:
+ continue #start and end are not integers, can't add component to index, goto next component
+ #this likely only occurs when parse_e_rows is True?
+ #could a species exist as only e rows? should the
+ if forward_strand_end > forward_strand_start:
+ #require positive length; i.e. certain lines have start = end = 0 and cannot be indexed
+ indexes.add( c.src, forward_strand_start, forward_strand_end, pos, max=c.src_size )
+ except Exception, e:
+ #most likely a bad MAF
+ log.debug( 'Building MAF index on %s failed: %s' % ( filename, e ) )
+ return ( None, [], {} )
+ return ( indexes, species, species_chromosomes )
class Maf( Alignment ):
"""Class describing a Maf alignment"""
@@ -333,38 +405,8 @@
Parses and sets species, chromosomes, index from MAF file.
"""
#these metadata values are not accessable by users, always overwrite
+ indexes, species, species_chromosomes = COPIED_build_maf_index_species_chromosomes( dataset.file_name )
- try:
- maf_reader = bx.align.maf.Reader( open( dataset.file_name ) )
- except:
- return #not a maf file
- species = []
- species_chromosomes = {}
- indexes = bx.interval_index_file.Indexes()
- while True:
- pos = maf_reader.file.tell()
- block = maf_reader.next()
- if block is None: break
- for c in block.components:
- spec = c.src
- chrom = None
- if "." in spec:
- spec, chrom = spec.split( ".", 1 )
- if spec not in species:
- species.append(spec)
- species_chromosomes[spec] = []
- if chrom and chrom not in species_chromosomes[spec]:
- species_chromosomes[spec].append( chrom )
- forward_strand_start = c.forward_strand_start
- forward_strand_end = c.forward_strand_end
- try:
- forward_strand_start = int( forward_strand_start )
- forward_strand_end = int( forward_strand_end )
- except ValueError:
- continue #start and end are not integers, can't add component to index, goto next component
- if forward_strand_end > forward_strand_start:
- #require positive length; i.e. certain lines have start = end = 0 and cannot be indexed
- indexes.add( c.src, forward_strand_start, forward_strand_end, pos, max=c.src_size )
dataset.metadata.species = species
#only overwrite the contents if our newly determined chromosomes don't match stored
chrom_file = dataset.metadata.species_chromosomes
diff -r eba44fc830bf -r c3b40f23a0e0 lib/galaxy/model/__init__.py
--- a/lib/galaxy/model/__init__.py Tue Sep 08 17:33:38 2009 -0400
+++ b/lib/galaxy/model/__init__.py Wed Sep 09 14:24:11 2009 -0400
@@ -709,9 +709,9 @@
folder.order_id = self.item_count
self.item_count += 1
def get_info_association( self, restrict=False ):
- # If restrict is True, we will return this folder's info_association whether it
- # exists or not. If restrict is False, we'll return the next available info_association
- # in the inheritable hierarchy
+ # If restrict is True, we will return this folder's info_association, not inheriting.
+ # If restrict is False, we'll return the next available info_association in the
+ # inheritable hierarchy
if self.info_association:
return self.info_association[0]
if restrict:
@@ -721,9 +721,6 @@
if self.library_root:
return self.library_root[0].get_info_association()
return None
- @property
- def active_components( self ):
- return list( self.active_folders ) + list( self.active_library_datasets )
@property
def active_library_datasets( self ):
# This needs to be a list
@@ -736,10 +733,6 @@
def active_datasets( self ):
# This needs to be a list
return [ ld.library_dataset_dataset_association.dataset for ld in self.datasets if not ld.library_dataset_dataset_association.deleted ]
- @property #make this a relation
- def activatable_folders( self ):
- # This needs to be a list
- return [ folder for folder in self.folders if not folder.purged ]
class LibraryDataset( object ):
# This class acts as a proxy to the currently selected LDDA
@@ -1062,17 +1055,11 @@
return s
return False
def submitted(self):
- if self.state == self.states.SUBMITTED:
- return True
- return False
+ return self.state == self.states.SUBMITTED
def unsubmitted(self):
- if self.state == self.states.UNSUBMITTED:
- return True
- return False
+ return self.state == self.states.UNSUBMITTED
def complete(self):
- if self.state == self.states.COMPLETE:
- return True
- return False
+ return self.state == self.states.COMPLETE
class RequestType( object ):
def __init__(self, name=None, desc=None, request_form=None, sample_form=None):
diff -r eba44fc830bf -r c3b40f23a0e0 lib/galaxy/model/mapping.py
--- a/lib/galaxy/model/mapping.py Tue Sep 08 17:33:38 2009 -0400
+++ b/lib/galaxy/model/mapping.py Wed Sep 09 14:24:11 2009 -0400
@@ -233,10 +233,10 @@
Column( "id", Integer, primary_key=True ),
Column( "library_dataset_dataset_association_id", Integer, ForeignKey( "library_dataset_dataset_association.id", use_alter=True, name="library_dataset_dataset_association_id_fk" ), nullable=True, index=True ),#current version of dataset, if null, there is not a current version selected
Column( "folder_id", Integer, ForeignKey( "library_folder.id" ), index=True ),
- Column( "order_id", Integer ),
+ Column( "order_id", Integer ), #not currently being used, but for possible future use
Column( "create_time", DateTime, default=now ),
Column( "update_time", DateTime, default=now, onupdate=now ),
- Column( "name", TrimmedString( 255 ), key="_name" ), #when not None/null this will supercede display in library (but not when imported into user's history?)
+ Column( "name", TrimmedString( 255 ), key="_name", index=True ), #when not None/null this will supercede display in library (but not when imported into user's history?)
Column( "info", TrimmedString( 255 ), key="_info" ), #when not None/null this will supercede display in library (but not when imported into user's history?)
Column( "deleted", Boolean, index=True, default=False ) )
@@ -248,7 +248,7 @@
Column( "update_time", DateTime, default=now, onupdate=now ),
Column( "copied_from_history_dataset_association_id", Integer, ForeignKey( "history_dataset_association.id", use_alter=True, name='history_dataset_association_dataset_id_fkey' ), nullable=True ),
Column( "copied_from_library_dataset_dataset_association_id", Integer, ForeignKey( "library_dataset_dataset_association.id", use_alter=True, name='library_dataset_dataset_association_id_fkey' ), nullable=True ),
- Column( "name", TrimmedString( 255 ) ),
+ Column( "name", TrimmedString( 255 ), index=True ),
Column( "info", TrimmedString( 255 ) ),
Column( "blurb", TrimmedString( 255 ) ),
Column( "peek" , TEXT ),
@@ -276,9 +276,9 @@
Column( "parent_id", Integer, ForeignKey( "library_folder.id" ), nullable = True, index=True ),
Column( "create_time", DateTime, default=now ),
Column( "update_time", DateTime, default=now, onupdate=now ),
- Column( "name", TEXT ),
+ Column( "name", TEXT, index=True ),
Column( "description", TEXT ),
- Column( "order_id", Integer ),
+ Column( "order_id", Integer ), #not currently being used, but for possible future use
Column( "item_count", Integer ),
Column( "deleted", Boolean, index=True, default=False ),
Column( "purged", Boolean, index=True, default=False ),
@@ -823,15 +823,16 @@
folders=relation(
LibraryFolder,
primaryjoin=( LibraryFolder.table.c.parent_id == LibraryFolder.table.c.id ),
+ order_by=asc( LibraryFolder.table.c.name ),
backref=backref( "parent", primaryjoin=( LibraryFolder.table.c.parent_id == LibraryFolder.table.c.id ), remote_side=[LibraryFolder.table.c.id] ) ),
active_folders=relation( LibraryFolder,
primaryjoin=( ( LibraryFolder.table.c.parent_id == LibraryFolder.table.c.id ) & ( not_( LibraryFolder.table.c.deleted ) ) ),
- order_by=asc( LibraryFolder.table.c.order_id ),
+ order_by=asc( LibraryFolder.table.c.name ),
lazy=True, #"""sqlalchemy.exceptions.ArgumentError: Error creating eager relationship 'active_folders' on parent class '<class 'galaxy.model.LibraryFolder'>' to child class '<class 'galaxy.model.LibraryFolder'>': Cant use eager loading on a self referential relationship."""
viewonly=True ),
datasets=relation( LibraryDataset,
primaryjoin=( ( LibraryDataset.table.c.folder_id == LibraryFolder.table.c.id ) ),
- order_by=asc( LibraryDataset.table.c.order_id ),
+ order_by=asc( LibraryDataset.table.c._name ),
lazy=False,
viewonly=True )
) )
diff -r eba44fc830bf -r c3b40f23a0e0 lib/galaxy/model/migrate/versions/0017_library_item_indexes.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/lib/galaxy/model/migrate/versions/0017_library_item_indexes.py Wed Sep 09 14:24:11 2009 -0400
@@ -0,0 +1,53 @@
+"""
+This script adds 3 indexes to table columns: library_folder.name,
+library_dataset.name, library_dataset_dataset_association.name.
+"""
+from sqlalchemy import *
+from sqlalchemy.orm import *
+from migrate import *
+import sys, logging
+
+log = logging.getLogger( __name__ )
+log.setLevel(logging.DEBUG)
+handler = logging.StreamHandler( sys.stdout )
+format = "%(name)s %(levelname)s %(asctime)s %(message)s"
+formatter = logging.Formatter( format )
+handler.setFormatter( formatter )
+log.addHandler( handler )
+
+metadata = MetaData( migrate_engine )
+db_session = scoped_session( sessionmaker( bind=migrate_engine, autoflush=False, transactional=False ) )
+LibraryFolder_table = Table( "library_folder", metadata, autoload=True )
+LibraryDatasetDatasetAssociation_table = Table( "library_dataset_dataset_association", metadata, autoload=True )
+LibraryDataset_table = Table( "library_dataset", metadata, autoload=True )
+
+def display_migration_details():
+ print "========================================"
+ print "This script adds 3 indexes to table columns: library_folder.name,"
+ print "library_dataset.name, library_dataset_dataset_association.name."
+ print "========================================"
+
+def upgrade():
+ display_migration_details()
+ # Load existing tables
+ metadata.reflect()
+ # Add 1 index to the library_folder table
+ i = Index( 'ix_library_folder_name', LibraryFolder_table.c.name )
+ try:
+ i.create()
+ except Exception, e:
+ log.debug( "Adding index 'ix_library_folder_name' to library_folder table failed: %s" % ( str( e ) ) )
+ # Add 1 index to the library_dataset_dataset_association table
+ i = Index( 'ix_library_dataset_dataset_association_name', LibraryDatasetDatasetAssociation_table.c.name )
+ try:
+ i.create()
+ except Exception, e:
+ log.debug( "Adding index 'ix_library_dataset_dataset_association_name' to library_dataset_dataset_association table failed: %s" % ( str( e ) ) )
+ # Add 1 index to the library_dataset table
+ i = Index( 'ix_library_dataset_name', LibraryDataset_table.c.name )
+ try:
+ i.create()
+ except Exception, e:
+ log.debug( "Adding index 'ix_library_dataset_name' to library_dataset table failed: %s" % ( str( e ) ) )
+def downgrade():
+ log.debug( "Downgrade is not possible." )
diff -r eba44fc830bf -r c3b40f23a0e0 lib/galaxy/security/__init__.py
--- a/lib/galaxy/security/__init__.py Tue Sep 08 17:33:38 2009 -0400
+++ b/lib/galaxy/security/__init__.py Wed Sep 09 14:24:11 2009 -0400
@@ -33,11 +33,19 @@
def get_actions( self ):
"""Get all permitted actions as a list of Action objects"""
return self.permitted_actions.__dict__.values()
- def allow_action( self, user, roles, action, **kwd ):
- raise 'No valid method of checking action (%s) on %s for user %s.' % ( action, kwd, user )
def get_item_action( self, action, item ):
raise 'No valid method of retrieving action (%s) for item %s.' % ( action, item )
def guess_derived_permissions_for_datasets( self, datasets = [] ):
+ raise "Unimplemented Method"
+ def can_access_dataset( self, roles, dataset ):
+ raise "Unimplemented Method"
+ def can_manage_dataset( self, roles, dataset ):
+ raise "Unimplemented Method"
+ def can_add_library_item( self, user, roles, item ):
+ raise "Unimplemented Method"
+ def can_modify_library_item( self, user, roles, item ):
+ raise "Unimplemented Method"
+ def can_manage_library_item( self, user, roles, item ):
raise "Unimplemented Method"
def associate_components( self, **kwd ):
raise 'No valid method of associating provided components: %s' % kwd
@@ -89,62 +97,44 @@
to allow migration toward a more SQLAlchemy 0.4 style of use.
"""
return self.model.context.current
- def allow_action( self, user, roles, action, **kwd ):
- if 'dataset' in kwd:
- return self.allow_dataset_action( user, roles, action, kwd[ 'dataset' ] )
- elif 'library_item' in kwd:
- return self.allow_library_item_action( user, roles, action, kwd[ 'library_item' ] )
- raise 'No valid method of checking action (%s) for user %s using kwd %s' % ( action, str( user ), str( kwd ) )
- def allow_dataset_action( self, user, roles, action, dataset ):
- """Returns true when user has permission to perform an action"""
- if not user:
- if action == self.permitted_actions.DATASET_ACCESS and action.action not in [ dp.action for dp in dataset.actions ]:
- # anons only get access, and only if there are no roles required for the access action
- # Other actions (or if the dataset has roles defined for the access action) fall through
- # to the false below
- return True
- elif action.action not in [ dp.action for dp in dataset.actions ]:
- if action.model == 'restrict':
- # Implicit access to restrict-style actions if the dataset does not have the action
- # Grant style actions fall through to the false below
- return True
- else:
- perms = self.get_dataset_permissions( dataset )
- if action in perms.keys():
- # The filter() returns a list of the dataset's role ids of which the user is not a member,
- # so an empty list means the user has all of the required roles.
- if not filter( lambda x: x not in roles, [ r for r in perms[ action ] ] ):
- # User has all of the roles required to perform the action
- return True
- # The user is missing at least one required role
- return False
- def allow_library_item_action( self, user, roles, action, library_item ):
+ def allow_dataset_action( self, roles, action, dataset ):
+ """
+ Returns true when user has permission to perform an action on an
+ instance of Dataset.
+ """
+ dataset_action = self.get_item_action( action, dataset )
+ if dataset_action is None:
+ return action.model == 'restrict'
+ return dataset_action.role in roles
+ def can_access_dataset( self, roles, dataset ):
+ return self.allow_dataset_action( roles, self.permitted_actions.DATASET_ACCESS, dataset )
+ def can_manage_dataset( self, roles, dataset ):
+ return self.allow_dataset_action( roles, self.permitted_actions.DATASET_MANAGE_PERMISSIONS, dataset )
+ def allow_library_item_action( self, user, roles, action, item ):
+ """
+ Method for checking a permission for the current user to perform a
+ specific library action on a library item, which must be one of:
+ Library, LibraryFolder, LibraryDataset, LibraryDatasetDatasetAssociation
+ """
if user is None:
# All permissions are granted, so non-users cannot have permissions
return False
- if action.model == 'grant':
- # Check to see if user has access to any of the roles
- allowed_role_assocs = []
- for item_class, permission_class in self.library_item_assocs:
- if isinstance( library_item, item_class ):
- if permission_class == self.model.LibraryPermissions:
- allowed_role_assocs = permission_class.filter_by( action=action.action, library_id=library_item.id ).all()
- elif permission_class == self.model.LibraryFolderPermissions:
- allowed_role_assocs = permission_class.filter_by( action=action.action, library_folder_id=library_item.id ).all()
- elif permission_class == self.model.LibraryDatasetPermissions:
- allowed_role_assocs = permission_class.filter_by( action=action.action, library_dataset_id=library_item.id ).all()
- elif permission_class == self.model.LibraryDatasetDatasetAssociationPermissions:
- allowed_role_assocs = permission_class.filter_by( action=action.action, library_dataset_dataset_association_id=library_item.id ).all()
- for allowed_role_assoc in allowed_role_assocs:
- if allowed_role_assoc.role in roles:
- return True
+ # Check to see if user has access to any of the roles associated with action
+ item_action = self.get_item_action( action, item )
+ if item_action is None:
+ # All permissions are granted, so item must have action
return False
- else:
- raise 'Unimplemented model (%s) specified for action (%s)' % ( action.model, action.action )
+ return item_action.role in roles
+ def can_add_library_item( self, user, roles, item ):
+ return self.allow_library_item_action( user, roles, self.permitted_actions.LIBRARY_ADD, item )
+ def can_modify_library_item( self, user, roles, item ):
+ return self.allow_library_item_action( user, roles, self.permitted_actions.LIBRARY_MODIFY, item )
+ def can_manage_library_item( self, user, roles, item ):
+ return self.allow_library_item_action( user, roles, self.permitted_actions.LIBRARY_MANAGE, item )
def get_item_action( self, action, item ):
# item must be one of: Dataset, Library, LibraryFolder, LibraryDataset, LibraryDatasetDatasetAssociation
for permission in item.actions:
- if permission.action == action:
+ if permission.action == action.action:
return permission
return None
def guess_derived_permissions_for_datasets( self, datasets=[] ):
@@ -276,10 +266,7 @@
if [ assoc for assoc in dataset.history_associations if assoc.history not in user.histories ]:
# Don't change permissions on a dataset associated with a history not owned by the user
continue
- if bypass_manage_permission or self.allow_action( user,
- user.all_roles(),
- self.permitted_actions.DATASET_MANAGE_PERMISSIONS,
- dataset=dataset ):
+ if bypass_manage_permission or self.can_manage_dataset( user.all_roles(), dataset ):
self.set_all_dataset_permissions( dataset, permissions )
def history_get_default_permissions( self, history ):
permissions = {}
@@ -291,7 +278,10 @@
permissions[ action ] = [ dhp.role ]
return permissions
def set_all_dataset_permissions( self, dataset, permissions={} ):
- # Set new permissions on a dataset, eliminating all current permissions
+ """
+ Set new permissions on a dataset, eliminating all current permissions
+ permissions looks like: { Action : [ Role, Role ] }
+ """
# Delete all of the current permissions on the dataset
# TODO: If setting ACCESS permission, at least 1 user must have every role associated with this dataset,
# or the dataset is inaccessible. See admin/library_dataset_dataset_association()
@@ -305,7 +295,10 @@
for dp in [ self.model.DatasetPermissions( action, dataset, role ) for role in roles ]:
dp.flush()
def set_dataset_permission( self, dataset, permission={} ):
- # Set a specific permission on a dataset, leaving all other current permissions on the dataset alone
+ """
+ Set a specific permission on a dataset, leaving all other current permissions on the dataset alone
+ permissions looks like: { Action : [ Role, Role ] }
+ """
# TODO: If setting ACCESS permission, at least 1 user must have every role associated with this dataset,
# or the dataset is inaccessible. See admin/library_dataset_dataset_association()
for action, roles in permission.items():
@@ -331,8 +324,11 @@
dp.delete()
dp.flush()
def get_dataset_permissions( self, dataset ):
- if not isinstance( dataset, self.model.Dataset ):
- dataset = dataset.dataset
+ """
+ Return a dictionary containing the actions and associated roles on dataset.
+ The dictionary looks like: { Action : [ Role, Role ] }
+ dataset must be an instance of Dataset()
+ """
permissions = {}
for dp in dataset.actions:
action = self.get_action( dp.action )
@@ -423,18 +419,29 @@
else:
raise 'Invalid class (%s) specified for target_library_item (%s)' % \
( target_library_item.__class__, target_library_item.__class__.__name__ )
- def show_library_item( self, user, roles, library_item ):
- if self.allow_action( user, roles, self.permitted_actions.LIBRARY_MODIFY, library_item=library_item ) or \
- self.allow_action( user, roles, self.permitted_actions.LIBRARY_MANAGE, library_item=library_item ) or \
- self.allow_action( user, roles, self.permitted_actions.LIBRARY_ADD, library_item=library_item ):
- return True
+ def show_library_item( self, user, roles, library_item, actions_to_check, hidden_folder_ids='' ):
+ """
+ This method must be sent an instance of Library() or LibraryFolder(). Recursive execution produces a
+ comma-separated string of folder ids whose folders do NOT meet the criteria for showing. Along with
+ the string, True is returned if the current user has permission to perform any 1 of actions_to_check
+ on library_item. Otherwise, cycle through all sub-folders in library_item until one is found that meets
+ this criteria, if it exists.
+ """
+ for action in actions_to_check:
+ if self.allow_library_item_action( user, roles, action, library_item ):
+ return True, hidden_folder_ids
if isinstance( library_item, self.model.Library ):
- return self.show_library_item( user, roles, library_item.root_folder )
- elif isinstance( library_item, self.model.LibraryFolder ):
- for folder in library_item.folders:
- if self.show_library_item( user, roles, folder ):
- return True
- return False
+ return self.show_library_item( user, roles, library_item.root_folder, actions_to_check, hidden_folder_ids=hidden_folder_ids )
+ if isinstance( library_item, self.model.LibraryFolder ):
+ for folder in library_item.active_folders:
+ can_show, hidden_folder_ids = self.show_library_item( user, roles, folder, actions_to_check, hidden_folder_ids=hidden_folder_ids )
+ if can_show:
+ return True, hidden_folder_ids
+ if hidden_folder_ids:
+ hidden_folder_ids = '%s,%d' % ( hidden_folder_ids, folder.id )
+ else:
+ hidden_folder_ids = '%d' % folder.id
+ return False, hidden_folder_ids
def set_entity_user_associations( self, users=[], roles=[], groups=[], delete_existing_assocs=True ):
for user in users:
if delete_existing_assocs:
@@ -482,12 +489,14 @@
if 'role' in kwd:
return self.model.GroupRoleAssociation.filter_by( role_id = kwd['role'].id, group_id = kwd['group'].id ).first()
raise 'No valid method of associating provided components: %s' % kwd
- def check_folder_contents( self, user, roles, folder ):
+ def check_folder_contents( self, user, roles, folder, hidden_folder_ids='' ):
"""
- Return true if there are any datasets under 'folder' that are public or that the
- user has access permission on.
+ This method must always be sent an instance of LibraryFolder(). Recursive execution produces a
+ comma-separated string of folder ids whose folders do NOT meet the criteria for showing. Along
+ with the string, True is returned if the current user has permission to access folder. Otherwise,
+ cycle through all sub-folders in folder until one is found that meets this criteria, if it exists.
"""
- action = self.permitted_actions.DATASET_ACCESS.action
+ action = self.permitted_actions.DATASET_ACCESS
lddas = self.sa_session.query( self.model.LibraryDatasetDatasetAssociation ) \
.join( "library_dataset" ) \
.filter( self.model.LibraryDataset.folder == folder ) \
@@ -498,14 +507,19 @@
ldda_access = self.get_item_action( action, ldda.dataset )
if ldda_access is None:
# Dataset is public
- return True
+ return True, hidden_folder_ids
if ldda_access.role in roles:
# The current user has access permission on the dataset
- return True
+ return True, hidden_folder_ids
for sub_folder in folder.active_folders:
- if self.check_folder_contents( user, roles, sub_folder ):
- return True
- return False
+ can_access, hidden_folder_ids = self.check_folder_contents( user, roles, sub_folder, hidden_folder_ids=hidden_folder_ids )
+ if can_access:
+ return True, hidden_folder_ids
+ if hidden_folder_ids:
+ hidden_folder_ids = '%s,%d' % ( hidden_folder_ids, sub_folder.id )
+ else:
+ hidden_folder_ids = '%d' % sub_folder.id
+ return False, hidden_folder_ids
class HostAgent( RBACAgent ):
"""
diff -r eba44fc830bf -r c3b40f23a0e0 lib/galaxy/tools/actions/__init__.py
--- a/lib/galaxy/tools/actions/__init__.py Tue Sep 08 17:33:38 2009 -0400
+++ b/lib/galaxy/tools/actions/__init__.py Wed Sep 09 14:24:11 2009 -0400
@@ -48,10 +48,7 @@
assoc.flush()
data = new_data
user, roles = trans.get_user_and_roles()
- if data and not trans.app.security_agent.allow_action( user,
- roles,
- data.permitted_actions.DATASET_ACCESS,
- dataset=data.dataset ):
+ if data and not trans.app.security_agent.can_access_dataset( roles, data.dataset ):
raise "User does not have permission to use a dataset (%s) provided for input." % data.id
return data
if isinstance( input, DataToolParameter ):
@@ -267,10 +264,7 @@
user, roles = trans.get_user_and_roles()
for name, dataset in inp_data.iteritems():
if dataset:
- if not trans.app.security_agent.allow_action( user,
- roles,
- dataset.permitted_actions.DATASET_ACCESS,
- dataset=dataset.dataset ):
+ if not trans.app.security_agent.can_access_dataset( roles, dataset.dataset ):
raise "User does not have permission to use a dataset (%s) provided for input." % data.id
job.add_input_dataset( name, dataset )
else:
diff -r eba44fc830bf -r c3b40f23a0e0 lib/galaxy/tools/parameters/basic.py
--- a/lib/galaxy/tools/parameters/basic.py Tue Sep 08 17:33:38 2009 -0400
+++ b/lib/galaxy/tools/parameters/basic.py Wed Sep 09 14:24:11 2009 -0400
@@ -1149,19 +1149,11 @@
hid = str( hda.hid )
if not hda.dataset.state in [galaxy.model.Dataset.states.ERROR, galaxy.model.Dataset.states.DISCARDED] and \
hda.visible and \
- trans.app.security_agent.allow_action( user,
- roles,
- hda.permitted_actions.DATASET_ACCESS,
- dataset=hda.dataset ):
+ trans.app.security_agent.can_access_dataset( roles, hda.dataset ):
# If we are sending data to an external application, then we need to make sure there are no roles
- # associated with the dataset that restrict it's access from "public". We determine this by sending
- # None as the user to the allow_action method.
- if self.tool and self.tool.tool_type == 'data_destination':
- if not trans.app.security_agent.allow_action( None,
- None,
- hda.permitted_actions.DATASET_ACCESS,
- dataset=hda.dataset ):
- continue
+ # associated with the dataset that restrict it's access from "public".
+ if self.tool and self.tool.tool_type == 'data_destination' and not trans.app.security_agent.dataset_is_public( hda.dataset ):
+ continue
if self.options and hda.get_dbkey() != filter_value:
continue
if isinstance( hda.datatype, self.formats):
@@ -1172,10 +1164,7 @@
if target_ext:
if converted_dataset:
hda = converted_dataset
- if not trans.app.security_agent.allow_action( user,
- roles,
- trans.app.security_agent.permitted_actions.DATASET_ACCESS,
- dataset=hda.dataset ):
+ if not trans.app.security_agent.can_access_dataset( roles, hda.dataset ):
continue
selected = ( value and ( hda in value ) )
field.add_option( "%s: (as %s) %s" % ( hid, target_ext, hda_name ), hda.id, selected )
diff -r eba44fc830bf -r c3b40f23a0e0 lib/galaxy/tools/util/maf_utilities.py
--- a/lib/galaxy/tools/util/maf_utilities.py Tue Sep 08 17:33:38 2009 -0400
+++ b/lib/galaxy/tools/util/maf_utilities.py Wed Sep 09 14:24:11 2009 -0400
@@ -7,10 +7,41 @@
import bx.align.maf
import bx.intervals
import bx.interval_index_file
-import sys, os, string, tempfile
+import sys, os, string, tempfile
+import logging
+from copy import deepcopy
assert sys.version_info[:2] >= ( 2, 4 )
-
+
+log = logging.getLogger(__name__)
+
+
+GAP_CHARS = [ '-' ]
+SRC_SPLIT_CHAR = '.'
+
+def src_split( src ):
+ spec, chrom = bx.align.maf.src_split( src )
+ if None in [ spec, chrom ]:
+ spec = chrom = src
+ return spec, chrom
+
+def src_merge( spec, chrom, contig = None ):
+ if None in [ spec, chrom ]:
+ spec = chrom = spec or chrom
+ return bx.align.maf.src_merge( spec, chrom, contig )
+
+def get_species_in_block( block ):
+ species = []
+ for c in block.components:
+ spec, chrom = src_split( c.src )
+ if spec not in species:
+ species.append( spec )
+ return species
+
+def tool_fail( msg = "Unknown Error" ):
+ print >> sys.stderr, "Fatal Error: %s" % msg
+ sys.exit()
+
#an object corresponding to a reference layered alignment
class RegionAlignment( object ):
@@ -153,69 +184,187 @@
except:
return build_maf_index( maf_file, species = species )
+#*** ANYCHANGE TO THIS METHOD HERE OR IN galaxy.datatypes.sequences MUST BE PROPAGATED ***
+def build_maf_index_species_chromosomes( filename, index_species = None ):
+ species = []
+ species_chromosomes = {}
+ indexes = bx.interval_index_file.Indexes()
+ try:
+ maf_reader = bx.align.maf.Reader( open( filename ) )
+ while True:
+ pos = maf_reader.file.tell()
+ block = maf_reader.next()
+ if block is None: break
+ for c in block.components:
+ spec = c.src
+ chrom = None
+ if "." in spec:
+ spec, chrom = spec.split( ".", 1 )
+ if spec not in species:
+ species.append( spec )
+ species_chromosomes[spec] = []
+ if chrom and chrom not in species_chromosomes[spec]:
+ species_chromosomes[spec].append( chrom )
+ if index_species is None or spec in index_species:
+ forward_strand_start = c.forward_strand_start
+ forward_strand_end = c.forward_strand_end
+ try:
+ forward_strand_start = int( forward_strand_start )
+ forward_strand_end = int( forward_strand_end )
+ except ValueError:
+ continue #start and end are not integers, can't add component to index, goto next component
+ #this likely only occurs when parse_e_rows is True?
+ #could a species exist as only e rows? should the
+ if forward_strand_end > forward_strand_start:
+ #require positive length; i.e. certain lines have start = end = 0 and cannot be indexed
+ indexes.add( c.src, forward_strand_start, forward_strand_end, pos, max=c.src_size )
+ except Exception, e:
+ #most likely a bad MAF
+ log.debug( 'Building MAF index on %s failed: %s' % ( filename, e ) )
+ return ( None, [], {} )
+ return ( indexes, species, species_chromosomes )
#builds and returns ( index, index_filename ) for specified maf_file
def build_maf_index( maf_file, species = None ):
- indexes = bx.interval_index_file.Indexes()
- try:
- maf_reader = bx.align.maf.Reader( open( maf_file ) )
- # Need to be a bit tricky in our iteration here to get the 'tells' right
- while True:
- pos = maf_reader.file.tell()
- block = maf_reader.next()
- if block is None: break
- for c in block.components:
- if species is not None and c.src.split( "." )[0] not in species:
- continue
- indexes.add( c.src, c.forward_strand_start, c.forward_strand_end, pos )
+ indexes, found_species, species_chromosomes = build_maf_index_species_chromosomes( maf_file, species )
+ if indexes is not None:
fd, index_filename = tempfile.mkstemp()
out = os.fdopen( fd, 'w' )
indexes.write( out )
out.close()
- return ( bx.align.maf.Indexed( maf_file, index_filename = index_filename, keep_open = True, parse_e_rows = False ), index_filename )
- except:
- return ( None, None )
-
-def chop_block_by_region( block, src, region, species = None, mincols = 0, force_strand = None ):
- ref = block.get_component_by_src( src )
- #We want our block coordinates to be from positive strand
- if ref.strand == "-":
- block = block.reverse_complement()
- ref = block.get_component_by_src( src )
+ return ( bx.align.maf.Indexed( maf_file, index_filename = index_filename, keep_open = True, parse_e_rows = False ), index_filename )
+ return ( None, None )
+
+def component_overlaps_region( c, region ):
+ if c is None: return False
+ start, end = c.get_forward_strand_start(), c.get_forward_strand_end()
+ if region.start >= end or region.end <= start:
+ return False
+ return True
+
+def chop_block_by_region( block, src, region, species = None, mincols = 0 ):
+ # This chopping method was designed to maintain consistency with how start/end padding gaps have been working in Galaxy thus far:
+ # behavior as seen when forcing blocks to be '+' relative to src sequence (ref) and using block.slice_by_component( ref, slice_start, slice_end )
+ # whether-or-not this is the 'correct' behavior is questionable, but this will at least maintain consistency
+ # comments welcome
+ slice_start = block.text_size #max for the min()
+ slice_end = 0 #min for the max()
+ old_score = block.score #save old score for later use
+ # We no longer assume only one occurance of src per block, so we need to check them all
+ for c in iter_components_by_src( block, src ):
+ if component_overlaps_region( c, region ):
+ if c.text is not None:
+ rev_strand = False
+ if c.strand == "-":
+ #We want our coord_to_col coordinates to be returned from positive stranded component
+ rev_strand = True
+ c = c.reverse_complement()
+ start = max( region.start, c.start )
+ end = min( region.end, c.end )
+ start = c.coord_to_col( start )
+ end = c.coord_to_col( end )
+ if rev_strand:
+ #need to orient slice coordinates to the original block direction
+ slice_len = end - start
+ end = len( c.text ) - start
+ start = end - slice_len
+ slice_start = min( start, slice_start )
+ slice_end = max( end, slice_end )
+
+ if slice_start < slice_end:
+ block = block.slice( slice_start, slice_end )
+ if block.text_size > mincols:
+ # restore old score, may not be accurate, but it is better than 0 for everything?
+ block.score = old_score
+ if species is not None:
+ block = block.limit_to_species( species )
+ block.remove_all_gap_columns()
+ return block
+ return None
- #save old score here for later use
- old_score = block.score
- slice_start = max( region.start, ref.start )
- slice_end = min( region.end, ref.end )
-
- #slice block by reference species at determined limits
- block = block.slice_by_component( ref, slice_start, slice_end )
-
- if block.text_size > mincols:
- if ( force_strand is None and region.strand != ref.strand ) or ( force_strand is not None and force_strand != ref.strand ):
- block = block.reverse_complement()
- # restore old score, may not be accurate, but it is better than 0 for everything
- block.score = old_score
- if species is not None:
- block = block.limit_to_species( species )
- block.remove_all_gap_columns()
- return block
- return None
+def orient_block_by_region( block, src, region, force_strand = None ):
+ #loop through components matching src,
+ #make sure each of these components overlap region
+ #cache strand for each of overlaping regions
+ #if force_strand / region.strand not in strand cache, reverse complement
+ ### we could have 2 sequences with same src, overlapping region, on different strands, this would cause no reverse_complementing
+ strands = [ c.strand for c in iter_components_by_src( block, src ) if component_overlaps_region( c, region ) ]
+ if strands and ( force_strand is None and region.strand not in strands ) or ( force_strand is not None and force_strand not in strands ):
+ block = block.reverse_complement()
+ return block
+
+def get_oriented_chopped_blocks_for_region( index, src, region, species = None, mincols = 0, force_strand = None ):
+ for block, idx, offset in get_oriented_chopped_blocks_with_index_offset_for_region( index, src, region, species, mincols, force_strand ):
+ yield block
+def get_oriented_chopped_blocks_with_index_offset_for_region( index, src, region, species = None, mincols = 0, force_strand = None ):
+ for block, idx, offset in get_chopped_blocks_with_index_offset_for_region( index, src, region, species, mincols ):
+ yield orient_block_by_region( block, src, region, force_strand ), idx, offset
+
+#split a block with multiple occurances of src into one block per src
+def iter_blocks_split_by_src( block, src ):
+ for src_c in iter_components_by_src( block, src ):
+ new_block = bx.align.Alignment( score=block.score, attributes=deepcopy( block.attributes ) )
+ new_block.text_size = block.text_size
+ for c in block.components:
+ if c == src_c or c.src != src:
+ new_block.add_component( deepcopy( c ) ) #components have reference to alignment, dont want to loose reference to original alignment block in original components
+ yield new_block
+
+#split a block into multiple blocks with all combinations of a species appearing only once per block
+def iter_blocks_split_by_species( block, species = None ):
+ def __split_components_by_species( components_by_species, new_block ):
+ if components_by_species:
+ #more species with components to add to this block
+ components_by_species = deepcopy( components_by_species )
+ spec_comps = components_by_species.pop( 0 )
+ for c in spec_comps:
+ newer_block = deepcopy( new_block )
+ newer_block.add_component( deepcopy( c ) )
+ for value in __split_components_by_species( components_by_species, newer_block ):
+ yield value
+ else:
+ #no more components to add, yield this block
+ yield new_block
+
+ #divide components by species
+ spec_dict = {}
+ if not species:
+ species = []
+ for c in block.components:
+ spec, chrom = src_split( c.src )
+ if spec not in spec_dict:
+ spec_dict[ spec ] = []
+ species.append( spec )
+ spec_dict[ spec ].append( c )
+ else:
+ for spec in species:
+ spec_dict[ spec ] = []
+ for c in iter_components_by_src_start( block, spec ):
+ spec_dict[ spec ].append( c )
+
+ empty_block = bx.align.Alignment( score=block.score, attributes=deepcopy( block.attributes ) ) #should we copy attributes?
+ empty_block.text_size = block.text_size
+ #call recursive function to split into each combo of spec/blocks
+ for value in __split_components_by_species( spec_dict.values(), empty_block ):
+ sort_block_components_by_block( value, block ) #restore original component order
+ yield value
+
+
#generator yielding only chopped and valid blocks for a specified region
-def get_chopped_blocks_for_region( index, src, region, species = None, mincols = 0, force_strand = None ):
- for block, idx, offset in get_chopped_blocks_with_index_offset_for_region( index, src, region, species, mincols, force_strand ):
+def get_chopped_blocks_for_region( index, src, region, species = None, mincols = 0 ):
+ for block, idx, offset in get_chopped_blocks_with_index_offset_for_region( index, src, region, species, mincols ):
yield block
-def get_chopped_blocks_with_index_offset_for_region( index, src, region, species = None, mincols = 0, force_strand = None ):
+def get_chopped_blocks_with_index_offset_for_region( index, src, region, species = None, mincols = 0 ):
for block, idx, offset in index.get_as_iterator_with_index_and_offset( src, region.start, region.end ):
- block = chop_block_by_region( block, src, region, species, mincols, force_strand )
+ block = chop_block_by_region( block, src, region, species, mincols )
if block is not None:
yield block, idx, offset
#returns a filled region alignment for specified regions
-def get_region_alignment( index, primary_species, chrom, start, end, strand = '+', species = None, mincols = 0 ):
+def get_region_alignment( index, primary_species, chrom, start, end, strand = '+', species = None, mincols = 0, overwrite_with_gaps = True ):
if species is not None: alignment = RegionAlignment( end - start, species )
else: alignment = RegionAlignment( end - start, primary_species )
- return fill_region_alignment( alignment, index, primary_species, chrom, start, end, strand, species, mincols )
+ return fill_region_alignment( alignment, index, primary_species, chrom, start, end, strand, species, mincols, overwrite_with_gaps )
#reduces a block to only positions exisiting in the src provided
def reduce_block_by_primary_genome( block, species, chromosome, region_start ):
@@ -237,13 +386,11 @@
return ( start_offset, species_texts )
#fills a region alignment
-def fill_region_alignment( alignment, index, primary_species, chrom, start, end, strand = '+', species = None, mincols = 0 ):
+def fill_region_alignment( alignment, index, primary_species, chrom, start, end, strand = '+', species = None, mincols = 0, overwrite_with_gaps = True ):
region = bx.intervals.Interval( start, end )
region.chrom = chrom
region.strand = strand
primary_src = "%s.%s" % ( primary_species, chrom )
-
-
#Order blocks overlaping this position by score, lowest first
blocks = []
@@ -255,28 +402,40 @@
break
else:
blocks.append( ( score, idx, offset ) )
-
+
+ gap_chars_tuple = tuple( GAP_CHARS )
+ gap_chars_str = ''.join( GAP_CHARS )
#Loop through ordered blocks and layer by increasing score
- for block_dict in blocks:
- block = chop_block_by_region( block_dict[1].get_at_offset( block_dict[2] ), primary_src, region, species, mincols, strand )
- if block is None: continue
- start_offset, species_texts = reduce_block_by_primary_genome( block, primary_species, chrom, start )
- for spec, text in species_texts.items():
- try:
- alignment.set_range( start_offset, spec, text )
- except:
- #species/sequence for species does not exist
- pass
-
+ for block_dict in blocks:
for block in iter_blocks_split_by_species( block_dict[1].get_at_offset( block_dict[2] ) ): #need to handle each occurance of sequence in block seperately
+ if component_overlaps_region( block.get_component_by_src( primary_src ), region ):
+ block = chop_block_by_region( block, primary_src, region, species, mincols ) #chop block
+ block = orient_block_by_region( block, primary_src, region ) #orient block
+ start_offset, species_texts = reduce_block_by_primary_genome( block, primary_species, chrom, start )
+ for spec, text in species_texts.items():
+ #we should trim gaps from both sides, since these are not positions in this species genome (sequence)
+ text = text.rstrip( gap_chars_str )
+ gap_offset = 0
+ while text.startswith( gap_chars_tuple ):
+ gap_offset += 1
+ text = text[1:]
+ if not text:
+ break
+ if text:
+ if overwrite_with_gaps:
+ alignment.set_range( start_offset + gap_offset, spec, text )
+ else:
+ for i, char in enumerate( text ):
+ if char not in GAP_CHARS:
+ alignment.set_position( start_offset + gap_offset + i, spec, char )
return alignment
#returns a filled spliced region alignment for specified region with start and end lists
-def get_spliced_region_alignment( index, primary_species, chrom, starts, ends, strand = '+', species = None, mincols = 0 ):
+def get_spliced_region_alignment( index, primary_species, chrom, starts, ends, strand = '+', species = None, mincols = 0, overwrite_with_gaps = True ):
#create spliced alignment object
if species is not None: alignment = SplicedAlignment( starts, ends, species )
else: alignment = SplicedAlignment( starts, ends, [primary_species] )
for exon in alignment.exons:
- fill_region_alignment( exon, index, primary_species, chrom, exon.start, exon.end, strand, species, mincols)
+ fill_region_alignment( exon, index, primary_species, chrom, exon.start, exon.end, strand, species, mincols, overwrite_with_gaps )
return alignment
#loop through string array, only return non-commented lines
@@ -319,29 +478,36 @@
starts.append( start )
ends.append( end )
return ( starts, ends, fields )
-
-def get_species_in_maf( maf_filename ):
- try:
- species={}
-
- file_in = open( maf_filename, 'r' )
- maf_reader = maf.Reader( file_in )
-
- for i, m in enumerate( maf_reader ):
- l = m.components
- for c in l:
- spec, chrom = maf.src_split( c.src )
- if not spec or not chrom:
- spec = chrom = c.src
- species[spec] = spec
-
- file_in.close()
-
- species = species.keys()
- species.sort()
- return species
- except:
- return []
+
+def iter_components_by_src( block, src ):
+ for c in block.components:
+ if c.src == src:
+ yield c
+
+def get_components_by_src( block, src ):
+ return [ value for value in iter_components_by_src( block, src ) ]
+
+def iter_components_by_src_start( block, src ):
+ for c in block.components:
+ if c.src.startswith( src ):
+ yield c
+
+def get_components_by_src_start( block, src ):
+ return [ value for value in iter_components_by_src_start( block, src ) ]
+
+def sort_block_components_by_block( block1, block2 ):
+ #orders the components in block1 by the index of the component in block2
+ #block1 must be a subset of block2
+ #occurs in-place
+ return block1.components.sort( cmp = lambda x, y: block2.components.index( x ) - block2.components.index( y ) )
+
+def get_species_in_maf( maf_filename ):
+ species = []
+ for block in maf.Reader( open( maf_filename ) ):
+ for spec in get_species_in_block( block ):
+ if spec not in species:
+ species.append( spec )
+ return species
def parse_species_option( species ):
if species:
diff -r eba44fc830bf -r c3b40f23a0e0 lib/galaxy/web/controllers/admin.py
--- a/lib/galaxy/web/controllers/admin.py Tue Sep 08 17:33:38 2009 -0400
+++ b/lib/galaxy/web/controllers/admin.py Wed Sep 09 14:24:11 2009 -0400
@@ -2001,7 +2001,6 @@
show_deleted=False,
msg=msg,
messagetype=messagetype )
-
def _save_request_type(self, trans, **kwd):
params = util.Params( kwd )
num_states = int( util.restore_text( params.get( 'num_states', 0 ) ))
@@ -2031,7 +2030,6 @@
ss.flush()
msg = "The new request type named '%s' with %s state(s) has been created" % (rt.name, num_states)
return rt, msg
-
@web.expose
@web.require_admin
def delete_request_type( self, trans, **kwd ):
@@ -2045,7 +2043,6 @@
action='manage_request_types',
msg='Request type <b>%s</b> has been deleted' % rt.name,
messagetype='done') )
-
@web.expose
@web.require_admin
def undelete_request_type( self, trans, **kwd ):
diff -r eba44fc830bf -r c3b40f23a0e0 lib/galaxy/web/controllers/dataset.py
--- a/lib/galaxy/web/controllers/dataset.py Tue Sep 08 17:33:38 2009 -0400
+++ b/lib/galaxy/web/controllers/dataset.py Wed Sep 09 14:24:11 2009 -0400
@@ -109,10 +109,7 @@
if not data:
raise paste.httpexceptions.HTTPRequestRangeNotSatisfiable( "Invalid reference dataset id: %s." % str( dataset_id ) )
user, roles = trans.get_user_and_roles()
- if trans.app.security_agent.allow_action( user,
- roles,
- data.permitted_actions.DATASET_ACCESS,
- dataset=data.dataset ):
+ if trans.app.security_agent.can_access_dataset( roles, data.dataset ):
if data.state == trans.model.Dataset.states.UPLOAD:
return trans.show_error_message( "Please wait until this dataset finishes uploading before attempting to view it." )
if filename is None or filename.lower() == "index":
@@ -147,12 +144,9 @@
return trans.show_error_message( 'Invalid parameters specified for "display at" link, please contact a Galaxy administrator' )
redirect_url = kwd['redirect_url'] % urllib.quote_plus( kwd['display_url'] )
user, roles = trans.get_user_and_roles()
- if trans.app.security_agent.allow_action( None, None, data.permitted_actions.DATASET_ACCESS, dataset=data.dataset ):
+ if trans.app.security_agent.dataset_is_public( data.dataset ):
return trans.response.send_redirect( redirect_url ) # anon access already permitted by rbac
- if trans.app.security_agent.allow_action( user,
- roles,
- data.permitted_actions.DATASET_ACCESS,
- dataset=data.dataset ):
+ if trans.app.security_agent.can_access_dataset( roles, data.dataset ):
trans.app.host_security_agent.set_dataset_permissions( data, trans.user, site )
return trans.response.send_redirect( redirect_url )
else:
diff -r eba44fc830bf -r c3b40f23a0e0 lib/galaxy/web/controllers/history.py
--- a/lib/galaxy/web/controllers/history.py Tue Sep 08 17:33:38 2009 -0400
+++ b/lib/galaxy/web/controllers/history.py Wed Sep 09 14:24:11 2009 -0400
@@ -443,15 +443,9 @@
for hda in history.activatable_datasets:
# If the current dataset is not public, we may need to perform an action on it to
# make it accessible by the other user.
- if not trans.app.security_agent.allow_action( send_to_user,
- send_to_user.all_roles(),
- trans.app.security_agent.permitted_actions.DATASET_ACCESS,
- dataset=hda.dataset ):
+ if not trans.app.security_agent.can_access_dataset( send_to_user.all_roles(), hda.dataset ):
# The user with which we are sharing the history does not have access permission on the current dataset
- if trans.app.security_agent.allow_action( user,
- user_roles,
- trans.app.security_agent.permitted_actions.DATASET_MANAGE_PERMISSIONS,
- dataset=hda.dataset ) and not hda.dataset.library_associations:
+ if trans.app.security_agent.can_manage_dataset( user_roles, hda.dataset ) and not hda.dataset.library_associations:
# The current user has authority to change permissions on the current dataset because
# they have permission to manage permissions on the dataset and the dataset is not associated
# with a library.
@@ -556,15 +550,9 @@
no_change_needed[ send_to_user ][ history ] = [ hda ]
else:
no_change_needed[ send_to_user ][ history ].append( hda )
- elif not trans.app.security_agent.allow_action( send_to_user,
- send_to_user.all_roles(),
- trans.app.security_agent.permitted_actions.DATASET_ACCESS,
- dataset=hda.dataset ):
+ elif not trans.app.security_agent.can_access_dataset( send_to_user.all_roles(), hda.dataset ):
# The user with which we are sharing the history does not have access permission on the current dataset
- if trans.app.security_agent.allow_action( user,
- user_roles,
- trans.app.security_agent.permitted_actions.DATASET_MANAGE_PERMISSIONS,
- dataset=hda.dataset ) and not hda.dataset.library_associations:
+ if trans.app.security_agent.can_manage_dataset( user_roles, hda.dataset ) and not hda.dataset.library_associations:
# The current user has authority to change permissions on the current dataset because
# they have permission to manage permissions on the dataset and the dataset is not associated
# with a library.
diff -r eba44fc830bf -r c3b40f23a0e0 lib/galaxy/web/controllers/library.py
--- a/lib/galaxy/web/controllers/library.py Tue Sep 08 17:33:38 2009 -0400
+++ b/lib/galaxy/web/controllers/library.py Wed Sep 09 14:24:11 2009 -0400
@@ -2,6 +2,7 @@
from galaxy.model.orm import *
from galaxy.datatypes import sniff
from galaxy import util
+from galaxy.util.odict import odict
from galaxy.web.controllers.forms import get_all_forms, get_form_widgets
from galaxy.util.streamball import StreamBall
import logging, tempfile, zipfile, tarfile, os, sys
@@ -65,23 +66,24 @@
user, roles = trans.get_user_and_roles()
all_libraries = trans.app.model.Library.filter( trans.app.model.Library.table.c.deleted==False ) \
.order_by( trans.app.model.Library.name ).all()
- authorized_libraries = []
+ library_actions = [ trans.app.security_agent.permitted_actions.LIBRARY_ADD,
+ trans.app.security_agent.permitted_actions.LIBRARY_MODIFY,
+ trans.app.security_agent.permitted_actions.LIBRARY_MANAGE ]
+ # The authorized_libraries dictionary looks like: { library : '1,2' }, library : '3' }
+ # Its keys are the libraries that should be displayed for the current user and whose values are a
+ # string of comma-separated folder ids, of the associated folders the should NOT be displayed.
+ # The folders that should not be displayed may not be a complete list, but it is ultimately passed
+ # to the browse_library() method and the browse_library.mako template to keep from re-checking the
+ # same folders when the library is rendered.
+ authorized_libraries = odict()
for library in all_libraries:
- if trans.app.security_agent.allow_action( user,
- roles,
- trans.app.security_agent.permitted_actions.LIBRARY_ADD,
- library_item=library ) or \
- trans.app.security_agent.allow_action( user,
- roles,
- trans.app.security_agent.permitted_actions.LIBRARY_MODIFY,
- library_item=library ) or \
- trans.app.security_agent.allow_action( user,
- roles,
- trans.app.security_agent.permitted_actions.LIBRARY_MANAGE,
- library_item=library ) or \
- trans.app.security_agent.check_folder_contents( user, roles, library.root_folder ) or \
- trans.app.security_agent.show_library_item( user, roles, library ):
- authorized_libraries.append( library )
+ can_access, hidden_folder_ids = trans.app.security_agent.check_folder_contents( user, roles, library.root_folder )
+ if can_access:
+ authorized_libraries[ library ] = hidden_folder_ids
+ else:
+ can_show, hidden_folder_ids = trans.app.security_agent.show_library_item( user, roles, library, library_actions )
+ if can_show:
+ authorized_libraries[ library ] = hidden_folder_ids
return trans.fill_template( '/library/browse_libraries.mako',
libraries=authorized_libraries,
default_action=params.get( 'default_action', None ),
@@ -94,6 +96,7 @@
messagetype = params.get( 'messagetype', 'done' )
id = params.get( 'id', None )
if not id:
+ # To handle bots
msg = "You must specify a library id."
return trans.response.send_redirect( web.url_for( controller='library',
action='browse_libraries',
@@ -102,6 +105,7 @@
messagetype='error' ) )
library = library=trans.app.model.Library.get( id )
if not library:
+ # To handle bots
msg = "Invalid library id ( %s )."
return trans.response.send_redirect( web.url_for( controller='library',
action='browse_libraries',
@@ -109,9 +113,11 @@
msg=util.sanitize_text( msg ),
messagetype='error' ) )
created_ldda_ids = params.get( 'created_ldda_ids', '' )
+ hidden_folder_ids = util.listify( util.restore_text( params.get( 'hidden_folder_ids', '' ) ) )
return trans.fill_template( '/library/browse_library.mako',
library=trans.app.model.Library.get( id ),
created_ldda_ids=created_ldda_ids,
+ hidden_folder_ids=hidden_folder_ids,
default_action=params.get( 'default_action', None ),
comptypes=comptypes,
msg=msg,
@@ -278,10 +284,7 @@
user, roles = trans.get_user_and_roles()
for id in ldda_ids:
ldda = trans.app.model.LibraryDatasetDatasetAssociation.get( id )
- if not ldda or not trans.app.security_agent.allow_action( user,
- roles,
- trans.app.security_agent.permitted_actions.DATASET_ACCESS,
- dataset = ldda.dataset ):
+ if not ldda or not trans.app.security_agent.can_access_dataset( roles, ldda.dataset ):
continue
path = ""
parent_folder = ldda.library_dataset.folder
@@ -379,10 +382,7 @@
user, roles = trans.get_user_and_roles()
if action == 'information':
if params.get( 'edit_attributes_button', False ):
- if trans.app.security_agent.allow_action( user,
- roles,
- trans.app.security_agent.permitted_actions.LIBRARY_MODIFY,
- library_item=library_dataset ):
+ if trans.app.security_agent.can_modify_library_item( user, roles, library_dataset ):
if params.get( 'edit_attributes_button', False ):
old_name = library_dataset.name
new_name = util.restore_text( params.get( 'name', '' ) )
@@ -406,10 +406,7 @@
messagetype=messagetype )
elif action == 'permissions':
if params.get( 'update_roles_button', False ):
- if trans.app.security_agent.allow_action( user,
- roles,
- trans.app.security_agent.permitted_actions.LIBRARY_MANAGE,
- library_item=library_dataset ):
+ if trans.app.security_agent.can_manage_library_item( user, roles, library_dataset ):
# The user clicked the Save button on the 'Associate With Roles' form
permissions = {}
for k, v in trans.app.model.Library.permitted_actions.items():
@@ -496,14 +493,8 @@
if action == 'permissions':
if params.get( 'update_roles_button', False ):
# The user clicked the Save button on the 'Associate With Roles' form
- if trans.app.security_agent.allow_action( user,
- roles,
- trans.app.security_agent.permitted_actions.LIBRARY_MANAGE,
- library_item=ldda ) and \
- trans.app.security_agent.allow_action( user,
- roles,
- trans.app.security_agent.permitted_actions.DATASET_MANAGE_PERMISSIONS,
- dataset=ldda.dataset ):
+ if trans.app.security_agent.can_manage_library_item( user, roles, ldda ) and \
+ trans.app.security_agent.can_manage_dataset( roles, ldda.dataset ):
permissions = {}
for k, v in trans.app.model.Dataset.permitted_actions.items():
in_roles = [ trans.app.model.Role.get( x ) for x in util.listify( params.get( k + '_in', [] ) ) ]
@@ -542,10 +533,7 @@
elif action == 'edit_info':
if params.get( 'change', False ):
# The user clicked the Save button on the 'Change data type' form
- if trans.app.security_agent.allow_action( user,
- roles,
- trans.app.security_agent.permitted_actions.LIBRARY_MODIFY,
- library_item=ldda ):
+ if trans.app.security_agent.can_modify_library_item( user, roles, ldda ):
if ldda.datatype.allow_datatype_change and trans.app.datatypes_registry.get_datatype_by_extension( params.datatype ).allow_datatype_change:
trans.app.datatypes_registry.change_datatype( ldda, params.datatype )
trans.app.model.flush()
@@ -566,10 +554,7 @@
messagetype=messagetype )
elif params.get( 'save', False ):
# The user clicked the Save button on the 'Edit Attributes' form
- if trans.app.security_agent.allow_action( user,
- roles,
- trans.app.security_agent.permitted_actions.LIBRARY_MODIFY,
- library_item=ldda ):
+ if trans.app.security_agent.can_modify_library_item( user, roles, ldda ):
old_name = ldda.name
new_name = util.restore_text( params.get( 'name', '' ) )
new_info = util.restore_text( params.get( 'info', '' ) )
@@ -608,10 +593,7 @@
messagetype=messagetype )
elif params.get( 'detect', False ):
# The user clicked the Auto-detect button on the 'Edit Attributes' form
- if trans.app.security_agent.allow_action( user,
- roles,
- trans.app.security_agent.permitted_actions.LIBRARY_MODIFY,
- library_item=ldda ):
+ if trans.app.security_agent.can_modify_library_item( user, roles, ldda ):
for name, spec in ldda.datatype.metadata_spec.items():
# We need to be careful about the attributes we are resetting
if name not in [ 'name', 'info', 'dbkey' ]:
@@ -633,10 +615,7 @@
msg=msg,
messagetype=messagetype )
elif params.get( 'delete', False ):
- if trans.app.security_agent.allow_action( user,
- roles,
- trans.app.security_agent.permitted_actions.LIBRARY_MODIFY,
- library_item=folder ):
+ if trans.app.security_agent.can_modify_library_item( user, roles, folder ):
ldda.deleted = True
ldda.flush()
msg = 'Dataset %s has been removed from this library' % ldda.name
@@ -651,10 +630,7 @@
widgets=widgets,
msg=msg,
messagetype=messagetype )
- if trans.app.security_agent.allow_action( user,
- roles,
- trans.app.security_agent.permitted_actions.LIBRARY_MODIFY,
- library_item=ldda ):
+ if trans.app.security_agent.can_modify_library_item( user, roles, ldda ):
ldda.datatype.before_edit( ldda )
if "dbkey" in ldda.datatype.metadata_spec and not ldda.metadata.dbkey:
# Copy dbkey into metadata, for backwards compatability
@@ -692,14 +668,8 @@
messagetype='error' ) )
if action == 'permissions':
if params.get( 'update_roles_button', False ):
- if trans.app.security_agent.allow_action( user,
- roles,
- trans.app.security_agent.permitted_actions.LIBRARY_MANAGE,
- library_item=ldda ) and \
- trans.app.security_agent.allow_action( user,
- roles,
- trans.app.security_agent.permitted_actions.DATASET_MANAGE_PERMISSIONS,
- dataset=ldda.dataset ):
+ if trans.app.security_agent.can_manage_library_item( user, roles, ldda ) and \
+ trans.app.security_agent.can_manage_dataset( roles, ldda.dataset ):
permissions = {}
for k, v in trans.app.model.Dataset.permitted_actions.items():
in_roles = [ trans.app.model.Role.get( x ) for x in util.listify( params.get( k + '_in', [] ) ) ]
@@ -730,14 +700,8 @@
library_id=library_id,
msg=msg,
messagetype=messagetype )
- if trans.app.security_agent.allow_action( user,
- roles,
- trans.app.security_agent.permitted_actions.LIBRARY_MANAGE,
- library_item=ldda ) and \
- trans.app.security_agent.allow_action( user,
- roles,
- trans.app.security_agent.permitted_actions.DATASET_MANAGE_PERMISSIONS,
- dataset=ldda.dataset ):
+ if trans.app.security_agent.can_manage_library_item( user, roles, ldda ) and \
+ trans.app.security_agent.can_manage_dataset( roles, ldda.dataset ):
# Ensure that the permissions across all library items are identical, otherwise we can't update them together.
check_list = []
for ldda in lddas:
@@ -769,14 +733,8 @@
library_id=library_id,
msg=msg,
messagetype=messagetype )
- if trans.app.security_agent.allow_action( user,
- roles,
- trans.app.security_agent.permitted_actions.LIBRARY_ADD,
- library_item=folder ) or \
- ( replace_dataset and trans.app.security_agent.allow_action( user,
- roles,
- trans.app.security_agent.permitted_actions.LIBRARY_MODIFY,
- library_item=replace_dataset ) ):
+ if trans.app.security_agent.can_add_library_item( user, roles, folder ) or \
+ ( replace_dataset and trans.app.security_agent.can_modify_library_item( user, roles, replace_dataset ) ):
if params.get( 'new_dataset_button', False ):
upload_option = params.get( 'upload_option', 'upload_file' )
created_ldda_ids = trans.webapp.controllers[ 'library_dataset' ].upload_dataset( trans,
@@ -799,10 +757,7 @@
# Since permissions on all LibraryDatasetDatasetAssociations must be the same at this point, we only need
# to check one of them to see if the current user can manage permissions on them.
check_ldda = trans.app.model.LibraryDatasetDatasetAssociation.get( ldda_id_list[0] )
- if trans.app.security_agent.allow_action( user,
- roles,
- trans.app.security_agent.permitted_actions.LIBRARY_MANAGE,
- library_item=check_ldda ):
+ if trans.app.security_agent.can_manage_library_item( user, roles, check_ldda ):
if replace_dataset:
default_action = ''
else:
@@ -924,10 +879,7 @@
# to check one of them to see if the current user can manage permissions on them.
check_ldda = trans.app.model.LibraryDatasetDatasetAssociation.get( ldda_id_list[0] )
user, roles = trans.get_user_and_roles()
- if trans.app.security_agent.allow_action( user,
- roles,
- trans.app.security_agent.permitted_actions.LIBRARY_MANAGE,
- library_item=check_ldda ):
+ if trans.app.security_agent.can_manage_library_item( user, roles, check_ldda ):
if replace_dataset:
default_action = ''
else:
@@ -1028,10 +980,7 @@
else:
widgets = []
if params.get( 'rename_folder_button', False ):
- if trans.app.security_agent.allow_action( user,
- roles,
- trans.app.security_agent.permitted_actions.LIBRARY_MODIFY,
- library_item=folder ):
+ if trans.app.security_agent.can_modify_library_item( user, roles, folder ):
old_name = folder.name
new_name = util.restore_text( params.name )
new_description = util.restore_text( params.description )
@@ -1072,10 +1021,7 @@
elif action == 'permissions':
if params.get( 'update_roles_button', False ):
# The user clicked the Save button on the 'Associate With Roles' form
- if trans.app.security_agent.allow_action( user,
- roles,
- trans.app.security_agent.permitted_actions.LIBRARY_MANAGE,
- library_item=folder ):
+ if trans.app.security_agent.can_manage_library_item( user, roles, folder ):
permissions = {}
for k, v in trans.app.model.Library.permitted_actions.items():
in_roles = [ trans.app.model.Role.get( int( x ) ) for x in util.listify( params.get( k + '_in', [] ) ) ]
@@ -1198,24 +1144,40 @@
msg=util.sanitize_text( msg ),
messagetype='done' ) )
-def get_authorized_libs( trans, user ):
- # TODO: this is a mis-named function - the name should reflect the authorization policy
- # If user is not authenticated, this method should not even be called. Also, it looks
- # like all that is using this is the new request stuff, so it should be placed there.
- if not user:
- return []
- roles = user.all_roles()
- all_libraries = trans.app.model.Library.filter( trans.app.model.Library.table.c.deleted == False ) \
- .order_by( trans.app.model.Library.name ).all()
- authorized_libraries = []
- for library in all_libraries:
- if trans.app.security_agent.allow_action( user,
- roles,
- trans.app.security_agent.permitted_actions.LIBRARY_ADD,
- library_item=library ) \
- or trans.app.security_agent.allow_action( user,
- roles,
- trans.app.security_agent.permitted_actions.LIBRARY_MODIFY,
- library_item=library ):
- authorized_libraries.append( library )
- return authorized_libraries
+# ---- Utility methods -------------------------------------------------------
+
+def active_folders( trans, folder ):
+ # Much faster way of retrieving all active sub-folders within a given folder than the
+ # performance of the mapper. This query also eagerloads the permissions on each folder.
+ return trans.sa_session.query( trans.app.model.LibraryFolder ) \
+ .filter_by( parent=folder, deleted=False ) \
+ .options( eagerload_all( "actions" ) ) \
+ .order_by( trans.app.model.LibraryFolder.table.c.name ) \
+ .all()
+def activatable_folders( trans, folder ):
+ return trans.sa_session.query( trans.app.model.LibraryFolder ) \
+ .filter_by( parent=folder, purged=False ) \
+ .options( eagerload_all( "actions" ) ) \
+ .order_by( trans.app.model.LibraryFolder.table.c.name ) \
+ .all()
+def active_folders_and_lddas( trans, folder ):
+ folders = active_folders( trans, folder )
+ # This query is much faster than the folder.active_library_datasets property
+ lddas = trans.sa_session.query( trans.app.model.LibraryDatasetDatasetAssociation ) \
+ .filter_by( deleted=False ) \
+ .join( "library_dataset" ) \
+ .filter( trans.app.model.LibraryDataset.table.c.folder_id==folder.id ) \
+ .order_by( trans.app.model.LibraryDatasetDatasetAssociation.table.c.name ) \
+ .all()
+ return folders, lddas
+def activatable_folders_and_lddas( trans, folder ):
+ folders = activatable_folders( trans, folder )
+ # This query is much faster than the folder.activatable_library_datasets property
+ lddas = trans.sa_session.query( trans.app.model.LibraryDatasetDatasetAssociation ) \
+ .join( "library_dataset" ) \
+ .filter( trans.app.model.LibraryDataset.table.c.folder_id==folder.id ) \
+ .join( "dataset" ) \
+ .filter( trans.app.model.Dataset.table.c.deleted==False ) \
+ .order_by( trans.app.model.LibraryDatasetDatasetAssociation.table.c.name ) \
+ .all()
+ return folders, lddas
diff -r eba44fc830bf -r c3b40f23a0e0 lib/galaxy/web/controllers/requests.py
--- a/lib/galaxy/web/controllers/requests.py Tue Sep 08 17:33:38 2009 -0400
+++ b/lib/galaxy/web/controllers/requests.py Wed Sep 09 14:24:11 2009 -0400
@@ -4,13 +4,12 @@
from galaxy.datatypes import sniff
from galaxy import util
from galaxy.util.streamball import StreamBall
+from galaxy.util.odict import odict
import logging, tempfile, zipfile, tarfile, os, sys
from galaxy.web.form_builder import *
from datetime import datetime, timedelta
from cgi import escape, FieldStorage
from galaxy.web.controllers.forms import get_form_widgets
-from galaxy.web.controllers.library import get_authorized_libs
-
log = logging.getLogger( __name__ )
@@ -432,8 +431,6 @@
return self.__show_request_form(trans, **kwd)
elif params.get('refresh', False) == 'true':
return self.__show_request_form(trans, **kwd)
-
-
def __show_request_form(self, trans, **kwd):
params = util.Params( kwd )
msg = util.restore_text( params.get( 'msg', '' ) )
@@ -460,7 +457,25 @@
helptext='(Optional)'))
# libraries selectbox
- libraries = get_authorized_libs(trans, trans.user)
+ all_libraries = trans.app.model.Library.filter( trans.app.model.Library.table.c.deleted == False ) \
+ .order_by( trans.app.model.Library.name ).all()
+ user, roles = trans.get_user_and_roles()
+ actions_to_check = [ trans.app.security_agent.permitted_actions.LIBRARY_ADD ]
+ # The libraries dictionary looks like: { library : '1,2' }, library : '3' }
+ # Its keys are the libraries that should be displayed for the current user and whose values are a
+ # string of comma-separated folder ids, of the associated folders the should NOT be displayed.
+ # The folders that should not be displayed may not be a complete list, but it is ultimately passed
+ # to the calling method to keep from re-checking the same folders when the library / folder
+ # select lists are rendered.
+ #
+ # TODO: RC, when you add the folders select list to your request form, take advantage of the hidden_folder_ids
+ # so that you do not need to check those same folders yet again when populating the select list.
+ #
+ libraries = odict()
+ for library in all_libraries:
+ can_show, hidden_folder_ids = trans.app.security_agent.show_library_item( user, roles, library, actions_to_check )
+ if can_show:
+ libraries[ library ] = hidden_folder_ids
libui = self.__library_ui(libraries, **kwd)
widgets = widgets + libui
widgets = widgets + get_form_widgets(trans, request_type.request_form, contents=[], **kwd)
@@ -470,12 +485,10 @@
widgets=widgets,
msg=msg,
messagetype=messagetype)
-
def __library_ui(self, libraries, request=None, **kwd):
params = util.Params( kwd )
lib_id = params.get( 'library_id', 'none' )
- lib_list = SelectField('library_id', refresh_on_change=True,
- refresh_on_change_values=['new'])
+ lib_list = SelectField( 'library_id', refresh_on_change=True, refresh_on_change_values=['new'] )
if request and lib_id == 'none':
if request.library:
lib_id = str(request.library.id)
@@ -483,7 +496,7 @@
lib_list.add_option('Select one', 'none', selected=True)
else:
lib_list.add_option('Select one', 'none')
- for lib in libraries:
+ for lib, hidden_folder_ids in libraries.items():
if str(lib.id) == lib_id:
lib_list.add_option(lib.name, lib.id, selected=True)
else:
@@ -653,9 +666,27 @@
widgets.append(dict(label='Description',
widget=TextField('desc', 40, desc),
helptext='(Optional)'))
-
# libraries selectbox
- libraries = get_authorized_libs(trans, trans.user)
+ all_libraries = trans.app.model.Library.filter( trans.app.model.Library.table.c.deleted == False ) \
+ .order_by( trans.app.model.Library.name ).all()
+ user, roles = trans.get_user_and_roles()
+ actions_to_check = [ trans.app.security_agent.permitted_actions.LIBRARY_ADD ]
+ # The libraries dictionary looks like:
+ # { library : '1,2' }, library : '3' }
+ # Its keys are the libraries that should be displayed for the current user and whose values are a
+ # string of comma-separated folder ids, of the associated folders the should NOT be displayed.
+ # The folders that should not be displayed may not be a complete list, but it is ultimately passed
+ # to the calling method to keep from re-checking the same folders when the library / folder
+ # select lists are rendered.
+ #
+ # TODO: RC, when you add the folders select list to your request form, take advantage of the hidden_folder_ids
+ # so that you do not need to check those same folders yet again when populating the select list.
+ #
+ libraries = {}
+ for library in all_libraries:
+ can_show, hidden_folder_ids = trans.app.security_agent.show_library_item( user, roles, library, actions_to_check )
+ if can_show:
+ libraries[ library ] = hidden_folder_ids
libui = self.__library_ui(libraries, request, **kwd)
widgets = widgets + libui
widgets = widgets + get_form_widgets(trans, request.type.request_form, request.values.content, **kwd)
diff -r eba44fc830bf -r c3b40f23a0e0 lib/galaxy/web/controllers/requests_admin.py
--- a/lib/galaxy/web/controllers/requests_admin.py Tue Sep 08 17:33:38 2009 -0400
+++ b/lib/galaxy/web/controllers/requests_admin.py Wed Sep 09 14:24:11 2009 -0400
@@ -8,7 +8,6 @@
from galaxy.web.form_builder import *
from datetime import datetime, timedelta
from galaxy.web.controllers.forms import get_form_widgets
-from galaxy.web.controllers.library import get_authorized_libs
log = logging.getLogger( __name__ )
@@ -709,14 +708,32 @@
return select_user
def __library_ui(self, trans, user, request=None, **kwd):
+ """
+ Return a list of libraries for which user has the permission
+ to perform the LIBRARY_ADD action on any of it's folders
+ """
params = util.Params( kwd )
lib_id = params.get( 'library_id', 'none' )
- if not user:
- libraries = trans.app.model.Library.filter(trans.app.model.Library.table.c.deleted == False).order_by(trans.app.model.Library.name).all()
- else:
- libraries = get_authorized_libs(trans, user)
- lib_list = SelectField('library_id', refresh_on_change=True,
- refresh_on_change_values=['new'])
+ all_libraries = trans.app.model.Library.filter( trans.app.model.Library.table.c.deleted == False ) \
+ .order_by( trans.app.model.Library.name ).all()
+ roles = user.all_roles()
+ actions_to_check = [ trans.app.security_agent.permitted_actions.LIBRARY_ADD ]
+ # The libraries dictionary looks like: { library : '1,2' }, library : '3' }
+ # Its keys are the libraries that should be displayed for the current user and whose values are a
+ # string of comma-separated folder ids, of the associated folders the should NOT be displayed.
+ # The folders that should not be displayed may not be a complete list, but it is ultimately passed
+ # to the calling method to keep from re-checking the same folders when the library / folder
+ # select lists are rendered.
+ #
+ # TODO: RC, when you add the folders select list to your request form, take advantage of the hidden_folder_ids
+ # so that you do not need to check those same folders yet again when populating the select list.
+ #
+ libraries = {}
+ for library in all_libraries:
+ can_show, hidden_folder_ids = trans.app.security_agent.show_library_item( user, roles, library, actions_to_check )
+ if can_show:
+ libraries[ library ] = hidden_folder_ids
+ lib_list = SelectField( 'library_id', refresh_on_change=True, refresh_on_change_values=['new'] )
if request and lib_id == 'none':
if request.library:
lib_id = str(request.library.id)
@@ -724,7 +741,7 @@
lib_list.add_option('Select one', 'none', selected=True)
else:
lib_list.add_option('Select one', 'none')
- for lib in libraries:
+ for lib, hidden_folder_ids in libraries.items():
if str(lib.id) == lib_id:
lib_list.add_option(lib.name, lib.id, selected=True)
else:
diff -r eba44fc830bf -r c3b40f23a0e0 lib/galaxy/web/controllers/root.py
--- a/lib/galaxy/web/controllers/root.py Tue Sep 08 17:33:38 2009 -0400
+++ b/lib/galaxy/web/controllers/root.py Wed Sep 09 14:24:11 2009 -0400
@@ -153,10 +153,7 @@
return "Dataset id '%s' is invalid" %str( id )
if data:
user, roles = trans.get_user_and_roles()
- if trans.app.security_agent.allow_action( user,
- roles,
- data.permitted_actions.DATASET_ACCESS,
- dataset = data.dataset ):
+ if trans.app.security_agent.can_access_dataset( roles, data.dataset ):
mime = trans.app.datatypes_registry.get_mimetype_by_extension( data.extension.lower() )
trans.response.set_content_type(mime)
if tofile:
@@ -189,10 +186,7 @@
child = data.get_child_by_designation( designation )
if child:
user, roles = trans.get_user_and_roles()
- if trans.app.security_agent.allow_action( user,
- roles,
- child.permitted_actions.DATASET_ACCESS,
- dataset = child ):
+ if trans.app.security_agent.can_access_dataset( roles, child ):
return self.display( trans, id=child.id, tofile=tofile, toext=toext )
else:
return "You are not privileged to access this dataset."
@@ -209,10 +203,7 @@
authz_method = kwd['authz_method']
if data:
user, roles = trans.get_user_and_roles()
- if authz_method == 'rbac' and trans.app.security_agent.allow_action( user,
- roles,
- data.permitted_actions.DATASET_ACCESS,
- dataset = data ):
+ if authz_method == 'rbac' and trans.app.security_agent.can_access_dataset( roles, data ):
trans.response.set_content_type( data.get_mime() )
trans.log_event( "Formatted dataset id %s for display at %s" % ( str( id ), display_app ) )
return data.as_display_type( display_app, **kwd )
@@ -262,10 +253,7 @@
if id is not None and data.history.user is not None and data.history.user != trans.user:
return trans.show_error_message( "This instance of a dataset (%s) in a history does not belong to you." % ( data.id ) )
user, roles = trans.get_user_and_roles()
- if trans.app.security_agent.allow_action( user,
- roles,
- data.permitted_actions.DATASET_ACCESS,
- dataset=data.dataset ):
+ if trans.app.security_agent.can_access_dataset( roles, data.dataset ):
if data.state == trans.model.Dataset.states.UPLOAD:
return trans.show_error_message( "Please wait until this dataset finishes uploading before attempting to edit its metadata." )
params = util.Params( kwd, safe=False )
@@ -331,10 +319,7 @@
elif params.update_roles_button:
if not trans.user:
return trans.show_error_message( "You must be logged in if you want to change permissions." )
- if trans.app.security_agent.allow_action( user,
- roles,
- data.dataset.permitted_actions.DATASET_MANAGE_PERMISSIONS,
- dataset = data.dataset ):
+ if trans.app.security_agent.can_manage_dataset( roles, data.dataset ):
permissions = {}
for k, v in trans.app.model.Dataset.permitted_actions.items():
in_roles = params.get( k + '_in', [] )
diff -r eba44fc830bf -r c3b40f23a0e0 lib/galaxy/web/framework/__init__.py
--- a/lib/galaxy/web/framework/__init__.py Tue Sep 08 17:33:38 2009 -0400
+++ b/lib/galaxy/web/framework/__init__.py Wed Sep 09 14:24:11 2009 -0400
@@ -503,7 +503,7 @@
if user:
roles = user.all_roles()
else:
- roles = None
+ roles = []
return user, roles
def user_is_admin( self ):
diff -r eba44fc830bf -r c3b40f23a0e0 static/images/pileup_parser_help1.png
Binary file static/images/pileup_parser_help1.png has changed
diff -r eba44fc830bf -r c3b40f23a0e0 static/images/pileup_parser_help2.png
Binary file static/images/pileup_parser_help2.png has changed
diff -r eba44fc830bf -r c3b40f23a0e0 templates/admin/library/browse_library.mako
--- a/templates/admin/library/browse_library.mako Tue Sep 08 17:33:38 2009 -0400
+++ b/templates/admin/library/browse_library.mako Wed Sep 09 14:24:11 2009 -0400
@@ -1,17 +1,15 @@
<%inherit file="/base.mako"/>
-<%namespace file="common.mako" import="render_dataset" />
<%namespace file="/message.mako" import="render_msg" />
-<% from galaxy import util %>
+<%
+ from time import strftime
+ from galaxy import util
+ from galaxy.web.controllers.library import active_folders_and_lddas, activatable_folders_and_lddas
+%>
<%def name="stylesheets()">
<link href="${h.url_for('/static/style/base.css')}" rel="stylesheet" type="text/css" />
<link href="${h.url_for('/static/style/library.css')}" rel="stylesheet" type="text/css" />
</%def>
-
-<%
-def name_sorted( l ):
- return sorted( l, lambda a, b: cmp( a.name.lower(), b.name.lower() ) )
-%>
<script type="text/javascript">
$( document ).ready( function () {
@@ -72,24 +70,77 @@
}
</script>
-<%def name="render_folder( folder, folder_pad, deleted, show_deleted, created_ldda_ids, library_id )">
+<%def name="render_dataset( ldda, library_dataset, selected, library, folder, deleted, show_deleted )">
<%
- root_folder = not folder.parent
+ ## The received data must always be a LibraryDatasetDatasetAssociation object. The object id passed to methods
+ ## from the drop down menu should be the ldda id to prevent id collision ( which could happen when displaying
+ ## children, which are always lddas ). We also need to make sure we're displaying the latest version of this
+ ## library_dataset, so we display the attributes from the ldda.
+ if ldda.user:
+ uploaded_by = ldda.user.email
+ else:
+ uploaded_by = 'anonymous'
+ if ldda == library_dataset.library_dataset_dataset_association:
+ current_version = True
+ else:
+ current_version = False
+ %>
+ <div class="historyItemWrapper historyItem historyItem-${ldda.state}" id="libraryItem-${ldda.id}">
+ ## Header row for library items (name, state, action buttons)
+ <div class="historyItemTitleBar">
+ <table cellspacing="0" cellpadding="0" border="0" width="100%">
+ <tr>
+ <td width="*">
+ %if selected:
+ <input type="checkbox" name="ldda_ids" value="${ldda.id}" checked/>
+ %else:
+ <input type="checkbox" name="ldda_ids" value="${ldda.id}"/>
+ %endif
+ <span class="libraryItemDeleted-${ldda.deleted}">
+ <a href="${h.url_for( controller='admin', action='library_dataset_dataset_association', library_id=library.id, folder_id=folder.id, id=ldda.id, info=True, deleted=deleted, show_deleted=show_deleted )}"><b>${ldda.name[:50]}</b></a>
+ </span>
+ <a id="dataset-${ldda.id}-popup" class="popup-arrow" style="display: none;">▼</a>
+ %if not library.deleted and not folder.deleted and not library_dataset.deleted:
+ <div popupmenu="dataset-${ldda.id}-popup">
+ <a class="action-button" href="${h.url_for( controller='admin', action='library_dataset_dataset_association', library_id=library.id, folder_id=folder.id, id=ldda.id, edit_info=True )}">Edit this dataset's information</a>
+ ## We're disabling the ability to add templates at the LDDA and LibraryDataset level, but will leave this here for possible future use
+ ##<a class="action-button" href="${h.url_for( controller='admin', action='info_template', library_id=library.id, library_dataset_id=library_dataset.id, new_template=True )}">Add an information template to this dataset</a>
+ <a class="action-button" href="${h.url_for( controller='admin', action='library_dataset_dataset_association', library_id=library.id, folder_id=folder.id, id=ldda.id, permissions=True )}">Edit this dataset's permissions</a>
+ %if current_version:
+ <a class="action-button" href="${h.url_for( controller='admin', action='library_dataset_dataset_association', library_id=library.id, folder_id=folder.id, replace_id=library_dataset.id )}">Upload a new version of this dataset</a>
+ %endif
+ %if ldda.has_data:
+ <a class="action-button" href="${h.url_for( controller='admin', action='download_dataset_from_folder', id=ldda.id, library_id=library.id )}">Download this dataset</a>
+ %endif
+ <a class="action-button" confirm="Click OK to delete dataset '${ldda.name}'." href="${h.url_for( controller='admin', action='delete_library_item', library_id=library.id, library_item_id=library_dataset.id, library_item_type='library_dataset' )}">Delete this dataset</a>
+ </div>
+ %elif not library.deleted and not folder.deleted and library_dataset.deleted:
+ <div popupmenu="dataset-${ldda.id}-popup">
+ <a class="action-button" href="${h.url_for( controller='admin', action='undelete_library_item', library_id=library.id, library_item_id=library_dataset.id, library_item_type='library_dataset' )}">Undelete this dataset</a>
+ </div>
+ %endif
+ </td>
+ <td width="300">${ldda.message}</td>
+ <td width="150">${uploaded_by}</td>
+ <td width="60">${ldda.create_time.strftime( "%Y-%m-%d" )}</td>
+ </tr>
+ </table>
+ </div>
+ </div>
+</%def>
+
+<%def name="render_folder( folder, folder_pad, deleted, show_deleted, created_ldda_ids, library_id, root_folder=False )">
+ <%
if root_folder:
pad = folder_pad
+ expander = "/static/images/silk/resultset_bottom.png"
+ folder_img = "/static/images/silk/folder_page.png"
else:
pad = folder_pad + 20
- if folder_pad == 0:
- expander = "/static/images/silk/resultset_bottom.png"
- folder_img = "/static/images/silk/folder_page.png"
- subfolder = False
- else:
expander = "/static/images/silk/resultset_next.png"
folder_img = "/static/images/silk/folder.png"
- subfolder = True
- created_ldda_id_list = util.listify( created_ldda_ids )
- if created_ldda_id_list:
- created_ldda_ids = [ int( ldda_id ) for ldda_id in created_ldda_id_list ]
+ if created_ldda_ids:
+ created_ldda_ids = [ int( ldda_id ) for ldda_id in util.listify( created_ldda_ids ) ]
%>
%if not root_folder:
<li class="folderRow libraryOrFolderRow" style="padding-left: ${pad}px;">
@@ -104,10 +155,6 @@
</div>
%endif
%if not folder.deleted:
- <%
- library_item_ids = {}
- library_item_ids[ 'folder' ] = folder.id
- %>
<div popupmenu="folder-${folder.id}-popup">
<a class="action-button" href="${h.url_for( controller='admin', action='library_dataset_dataset_association', library_id=library_id, folder_id=folder.id )}">Add datasets to this folder</a>
<a class="action-button" href="${h.url_for( controller='admin', action='folder', new=True, id=folder.id, library_id=library_id )}">Create a new sub-folder in this folder</a>
@@ -130,30 +177,31 @@
%endif
</li>
%endif
- %if subfolder:
+ %if pad > 0:
<ul id="subFolder">
%else:
<ul>
%endif
%if show_deleted:
<%
- parent_folders = folder.activatable_folders
- parent_datasets = folder.activatable_library_datasets
+ sub_folders, lddas = activatable_folders_and_lddas( trans, folder )
%>
%else:
<%
- parent_folders = folder.active_folders
- parent_datasets = folder.active_library_datasets
+ sub_folders, lddas = active_folders_and_lddas( trans, folder )
%>
%endif
- %for folder in name_sorted( parent_folders ):
- ${render_folder( folder, pad, deleted, show_deleted, created_ldda_ids, library_id )}
- %endfor
- %for library_dataset in name_sorted( parent_datasets ):
+ %for sub_folder in sub_folders:
+ ${render_folder( sub_folder, pad, deleted, show_deleted, created_ldda_ids, library_id )}
+ %endfor
+ %for ldda in lddas:
<%
- selected = created_ldda_ids and library_dataset.library_dataset_dataset_association.id in created_ldda_ids
+ library_dataset = ldda.library_dataset
+ selected = created_ldda_ids and ldda.id in created_ldda_ids
%>
- <li class="datasetRow" style="padding-left: ${pad + 18}px;">${render_dataset( library_dataset, selected, library, deleted, show_deleted )}</li>
+ <li class="datasetRow" style="padding-left: ${pad + 18}px;">
+ ${render_dataset( ldda, library_dataset, selected, library, folder, deleted, show_deleted )}
+ </li>
%endfor
</ul>
</%def>
@@ -196,10 +244,6 @@
<a id="library-${library.id}-popup" class="popup-arrow" style="display: none;">▼</a>
<div popupmenu="library-${library.id}-popup">
%if not deleted:
- <%
- library_item_ids = {}
- library_item_ids[ 'library' ] = library.id
- %>
<a class="action-button" href="${h.url_for( controller='admin', action='library', id=library.id, information=True )}">Edit this data library's information</a>
## Editing templates disabled until we determine optimal approach to re-linking library item to new version of form definition
##%if library.info_association:
@@ -228,7 +272,7 @@
</div>
</li>
<ul>
- ${render_folder( library.root_folder, 0, deleted, show_deleted, created_ldda_ids, library.id )}
+ ${render_folder( library.root_folder, 0, deleted, show_deleted, created_ldda_ids, library.id, root_folder=True )}
</ul>
<br/>
</ul>
diff -r eba44fc830bf -r c3b40f23a0e0 templates/admin/library/common.mako
--- a/templates/admin/library/common.mako Tue Sep 08 17:33:38 2009 -0400
+++ b/templates/admin/library/common.mako Wed Sep 09 14:24:11 2009 -0400
@@ -1,68 +1,3 @@
-<% from time import strftime %>
-
-<%def name="render_dataset( library_dataset, selected, library, deleted, show_deleted )">
- <%
- ## The received data must always be a LibraryDataset object, but the object id passed to methods from the drop down menu
- ## should be the underlying ldda id to prevent id collision ( which could happen when displaying children, which are always
- ## lddas ). We also need to make sure we're displaying the latest version of this library_dataset, so we display the attributes
- ## from the ldda.
- ldda = library_dataset.library_dataset_dataset_association
- if ldda.user:
- uploaded_by = ldda.user.email
- else:
- uploaded_by = 'anonymous'
- if ldda == ldda.library_dataset.library_dataset_dataset_association:
- current_version = True
- else:
- current_version = False
- %>
- <div class="historyItemWrapper historyItem historyItem-${ldda.state}" id="libraryItem-${ldda.id}">
- ## Header row for library items (name, state, action buttons)
- <div class="historyItemTitleBar">
- <table cellspacing="0" cellpadding="0" border="0" width="100%">
- <tr>
- <td width="*">
- %if selected:
- <input type="checkbox" name="ldda_ids" value="${ldda.id}" checked/>
- %else:
- <input type="checkbox" name="ldda_ids" value="${ldda.id}"/>
- %endif
- <span class="libraryItemDeleted-${library_dataset.deleted}">
- <a href="${h.url_for( controller='admin', action='library_dataset_dataset_association', library_id=library.id, folder_id=library_dataset.folder.id, id=ldda.id, info=True, deleted=deleted, show_deleted=show_deleted )}"><b>${ldda.name[:50]}</b></a>
- </span>
- <a id="dataset-${ldda.id}-popup" class="popup-arrow" style="display: none;">▼</a>
- %if not library.deleted and not library_dataset.folder.deleted and not library_dataset.deleted:
- <%
- library_item_ids = {}
- library_item_ids[ 'ldda' ] = ldda.id
- %>
- <div popupmenu="dataset-${ldda.id}-popup">
- <a class="action-button" href="${h.url_for( controller='admin', action='library_dataset_dataset_association', library_id=library.id, folder_id=library_dataset.folder.id, id=ldda.id, edit_info=True )}">Edit this dataset's information</a>
- ## We're disabling the ability to add templates at the LDDA and LibraryDataset level, but will leave this here for possible future use
- ##<a class="action-button" href="${h.url_for( controller='admin', action='info_template', library_id=library.id, library_dataset_id=library_dataset.id, new_template=True )}">Add an information template to this dataset</a>
- <a class="action-button" href="${h.url_for( controller='admin', action='library_dataset_dataset_association', library_id=library.id, folder_id=library_dataset.folder.id, id=ldda.id, permissions=True )}">Edit this dataset's permissions</a>
- %if current_version:
- <a class="action-button" href="${h.url_for( controller='admin', action='library_dataset_dataset_association', library_id=library.id, folder_id=library_dataset.folder.id, replace_id=library_dataset.id )}">Upload a new version of this dataset</a>
- %endif
- %if ldda.has_data:
- <a class="action-button" href="${h.url_for( controller='admin', action='download_dataset_from_folder', id=ldda.id, library_id=library.id )}">Download this dataset</a>
- %endif
- <a class="action-button" confirm="Click OK to delete dataset '${ldda.name}'." href="${h.url_for( controller='admin', action='delete_library_item', library_id=library.id, library_item_id=library_dataset.id, library_item_type='library_dataset' )}">Delete this dataset</a>
- </div>
- %elif not library.deleted and not library_dataset.folder.deleted and library_dataset.deleted:
- <div popupmenu="dataset-${ldda.id}-popup">
- <a class="action-button" href="${h.url_for( controller='admin', action='undelete_library_item', library_id=library.id, library_item_id=library_dataset.id, library_item_type='library_dataset' )}">Undelete this dataset</a>
- </div>
- %endif
- </td>
- <td width="300">${ldda.message}</td>
- <td width="150">${uploaded_by}</td>
- <td width="60">${ldda.create_time.strftime( "%Y-%m-%d" )}</td>
- </tr>
- </table>
- </div>
- </div>
-</%def>
<%def name="render_template_info( library_item, library_id, widgets, editable=True )">
<%
diff -r eba44fc830bf -r c3b40f23a0e0 templates/admin/library/ldda_info.mako
--- a/templates/admin/library/ldda_info.mako Tue Sep 08 17:33:38 2009 -0400
+++ b/templates/admin/library/ldda_info.mako Wed Sep 09 14:24:11 2009 -0400
@@ -86,26 +86,14 @@
<div class="form-row">
<div>${ldda.blurb}</div>
</div>
- <div class="form-row">
- <div id="info${ldda.id}" class="historyItemBody">
- %if ldda.peek != "no peek":
+ %if ldda.peek != "no peek":
+ <div class="form-row">
+ <div id="info${ldda.id}" class="historyItemBody">
<label>Peek:</label>
<div><pre id="peek${ldda.id}" class="peek">${ldda.display_peek()}</pre></div>
- %endif
- ## Recurse for child datasets
- %if len( ldda.visible_children ) > 0:
- <div>
- There are ${len( ldda.visible_children )} secondary datasets.
- %for idx, child in enumerate( ldda.visible_children ):
- ## TODO: do we need to clarify if the child is deleted?
- %if not child.purged:
- ${ render_dataset( child, selected, library, False, False ) }
- %endif
- %endfor
- </div>
- %endif
+ </div>
</div>
- </div>
+ %endif
</div>
%if widgets:
${render_template_info( ldda, library.id, widgets, editable=False )}
diff -r eba44fc830bf -r c3b40f23a0e0 templates/dataset/edit_attributes.mako
--- a/templates/dataset/edit_attributes.mako Tue Sep 08 17:33:38 2009 -0400
+++ b/templates/dataset/edit_attributes.mako Wed Sep 09 14:24:11 2009 -0400
@@ -189,7 +189,7 @@
</div>
<p />
-%if trans.app.security_agent.allow_action( user, user_roles, data.permitted_actions.DATASET_MANAGE_PERMISSIONS, dataset=data.dataset ):
+%if trans.app.security_agent.can_manage_dataset( user_roles, data.dataset ):
<%namespace file="/dataset/security_common.mako" import="render_permission_form" />
${render_permission_form( data.dataset, data.name, h.url_for( controller='root', action='edit', id=data.id ), user_roles )}
%elif trans.user:
diff -r eba44fc830bf -r c3b40f23a0e0 templates/library/browse_libraries.mako
--- a/templates/library/browse_libraries.mako Tue Sep 08 17:33:38 2009 -0400
+++ b/templates/library/browse_libraries.mako Wed Sep 09 14:24:11 2009 -0400
@@ -20,9 +20,9 @@
</tr>
</thead>
<tbody>
- %for library in libraries:
+ %for library, hidden_folder_ids in libraries.items():
<tr class="libraryRow libraryOrFolderRow" id="libraryRow">
- <td><a href="${h.url_for( controller='library', action='browse_library', id=library.id )}">${library.name}</a></td>
+ <td><a href="${h.url_for( controller='library', action='browse_library', id=library.id, hidden_folder_ids=hidden_folder_ids )}">${library.name}</a></td>
<td><i>${library.description}</i></td>
</tr>
%endfor
diff -r eba44fc830bf -r c3b40f23a0e0 templates/library/browse_library.mako
--- a/templates/library/browse_library.mako Tue Sep 08 17:33:38 2009 -0400
+++ b/templates/library/browse_library.mako Wed Sep 09 14:24:11 2009 -0400
@@ -2,8 +2,8 @@
<%namespace file="/message.mako" import="render_msg" />
<%
from galaxy import util
+ from galaxy.web.controllers.library import active_folders
from time import strftime
-
user, roles = trans.get_user_and_roles()
%>
@@ -14,10 +14,6 @@
</%def>
<%
-
-def name_sorted( l ):
- return sorted( l, lambda a, b: cmp( a.name.lower(), b.name.lower() ) )
-
class RowCounter( object ):
def __init__( self ):
self.count = 0
@@ -83,21 +79,22 @@
});
</script>
-<%def name="render_dataset( library_dataset, selected, library, pad, parent, row_conter )">
+<%def name="render_dataset( ldda, library_dataset, selected, library, folder, pad, parent, row_conter )">
<%
- ## The received data must always be a LibraryDataset object, but the object id passed to methods from the drop down menu
- ## should be the underlying ldda id to prevent id collision ( which could happen when displaying children, which are always
- ## lddas ). We also need to make sure we're displaying the latest version of this library_dataset, so we display the attributes
+ ## The id passed to methods from the drop down menu should be the ldda id to prevent id collision
+ ## ( which could happen when displaying children, which are always lddas ). We also need to make
+ ## sure we're displaying the latest version of this library_dataset, so we display the attributes
## from the ldda.
- ldda = library_dataset.library_dataset_dataset_association
if ldda.user:
uploaded_by = ldda.user.email
else:
uploaded_by = 'anonymous'
- if ldda == ldda.library_dataset.library_dataset_dataset_association:
+ if ldda == library_dataset.library_dataset_dataset_association:
current_version = True
else:
current_version = False
+ can_modify_library_dataset = trans.app.security_agent.can_modify_library_item( user, roles, library_dataset )
+ can_manage_library_dataset = trans.app.security_agent.can_manage_library_item( user, roles, library_dataset )
%>
<tr class="datasetRow"
@@ -112,19 +109,19 @@
%else:
<input type="checkbox" name="ldda_ids" value="${ldda.id}"/>
%endif
- <a href="${h.url_for( controller='library', action='library_dataset_dataset_association', library_id=library.id, folder_id=library_dataset.folder.id, id=ldda.id, info=True )}"><b>${ldda.name[:60]}</b></a>
+ <a href="${h.url_for( controller='library', action='library_dataset_dataset_association', library_id=library.id, folder_id=folder.id, id=ldda.id, info=True )}"><b>${ldda.name[:60]}</b></a>
<a id="dataset-${ldda.id}-popup" class="popup-arrow" style="display: none;">▼</a>
<div popupmenu="dataset-${ldda.id}-popup">
- %if trans.app.security_agent.allow_action( user, roles, trans.app.security_agent.permitted_actions.LIBRARY_MODIFY, library_item=ldda.library_dataset ):
- <a class="action-button" href="${h.url_for( controller='library', action='library_dataset_dataset_association', library_id=library.id, folder_id=library_dataset.folder.id, id=ldda.id, edit_info=True )}">Edit this dataset's information</a>
+ %if can_modify_library_dataset:
+ <a class="action-button" href="${h.url_for( controller='library', action='library_dataset_dataset_association', library_id=library.id, folder_id=folder.id, id=ldda.id, edit_info=True )}">Edit this dataset's information</a>
%else:
- <a class="action-button" href="${h.url_for( controller='library', action='library_dataset_dataset_association', library_id=library.id, folder_id=library_dataset.folder.id, id=ldda.id, information=True )}">View this dataset's information</a>
+ <a class="action-button" href="${h.url_for( controller='library', action='library_dataset_dataset_association', library_id=library.id, folder_id=folder.id, id=ldda.id, information=True )}">View this dataset's information</a>
%endif
- %if trans.app.security_agent.allow_action( user, roles, trans.app.security_agent.permitted_actions.DATASET_MANAGE_PERMISSIONS, dataset=ldda.dataset ) and trans.app.security_agent.allow_action( user, roles, trans.app.security_agent.permitted_actions.LIBRARY_MANAGE, library_item=ldda.library_dataset ):
- <a class="action-button" href="${h.url_for( controller='library', action='library_dataset_dataset_association', library_id=library.id, folder_id=library_dataset.folder.id, id=ldda.id, permissions=True )}">Edit this dataset's permissions</a>
- %if current_version and trans.app.security_agent.allow_action( user, roles, trans.app.security_agent.permitted_actions.LIBRARY_MODIFY, library_item=ldda.library_dataset ):
- <a class="action-button" href="${h.url_for( controller='library', action='library_dataset_dataset_association', library_id=library.id, folder_id=library_dataset.folder.id, replace_id=library_dataset.id )}">Upload a new version of this dataset</a>
+ %if can_manage_library_dataset:
+ <a class="action-button" href="${h.url_for( controller='library', action='library_dataset_dataset_association', library_id=library.id, folder_id=folder.id, id=ldda.id, permissions=True )}">Edit this dataset's permissions</a>
%endif
+ %if current_version and can_modify_library_dataset:
+ <a class="action-button" href="${h.url_for( controller='library', action='library_dataset_dataset_association', library_id=library.id, folder_id=folder.id, replace_id=library_dataset.id )}">Upload a new version of this dataset</a>
%endif
%if ldda.has_data:
<a class="action-button" href="${h.url_for( controller='library', action='datasets', library_id=library.id, ldda_ids=str( ldda.id ), do_action='add' )}">Import this dataset into your current history</a>
@@ -142,31 +139,30 @@
%>
</%def>
-<%def name="render_folder( folder, folder_pad, created_ldda_ids, library_id, parent=None, row_counter=None )">
+<%def name="render_folder( folder, folder_pad, created_ldda_ids, library_id, hidden_folder_ids, parent=None, row_counter=None, root_folder=False )">
<%
- def show_folder():
- ## TODO: instead of calling check_folder_contents(), which we've already done prior to getting here,
- ## add a new method that will itself call check_folder_contents() and build a list of accessible folders
- ## for each library - this should improve performance dor large libraries where the current user can only
- ## access a small number of folders.
- if trans.app.security_agent.check_folder_contents( user, roles, folder ) or \
- trans.app.security_agent.show_library_item( user, roles, folder ):
- return True
- return False
- if not show_folder:
+ if str( folder.id ) in hidden_folder_ids:
return ""
- root_folder = not folder.parent
+ can_access, folder_ids = trans.app.security_agent.check_folder_contents( user, roles, folder )
+ if not can_access:
+ can_show, folder_ids = \
+ trans.app.security_agent.show_library_item( user,
+ roles,
+ folder,
+ [ trans.app.security_agent.permitted_actions.LIBRARY_ADD,
+ trans.app.security_agent.permitted_actions.LIBRARY_MODIFY,
+ trans.app.security_agent.permitted_actions.LIBRARY_MANAGE ] )
+ if not can_show:
+ return ""
if root_folder:
pad = folder_pad
else:
pad = folder_pad + 20
- if folder_pad == 0:
- subfolder = False
- else:
- subfolder = True
- created_ldda_id_list = util.listify( created_ldda_ids )
- if created_ldda_id_list:
- created_ldda_ids = [ int( ldda_id ) for ldda_id in created_ldda_id_list ]
+ if created_ldda_ids:
+ created_ldda_ids = [ int( ldda_id ) for ldda_id in util.listify( created_ldda_ids ) ]
+ can_add = trans.app.security_agent.can_add_library_item( user, roles, folder )
+ can_modify = trans.app.security_agent.can_modify_library_item( user, roles, folder )
+ can_manage = trans.app.security_agent.can_manage_library_item( user, roles, folder )
my_row = None
%>
%if not root_folder:
@@ -186,21 +182,19 @@
%endif
<a id="folder_img-${folder.id}-popup" class="popup-arrow" style="display: none;">▼</a>
<div popupmenu="folder_img-${folder.id}-popup">
- %if trans.app.security_agent.allow_action( user, roles, trans.app.security_agent.permitted_actions.LIBRARY_ADD, library_item=folder ):
+ %if can_add:
<a class="action-button" href="${h.url_for( controller='library', action='library_dataset_dataset_association', library_id=library_id, folder_id=folder.id )}">Add datasets to this folder</a>
<a class="action-button" href="${h.url_for( controller='library', action='folder', new=True, id=folder.id, library_id=library_id )}">Create a new sub-folder in this folder</a>
%endif
- %if trans.app.security_agent.allow_action( user, roles, trans.app.security_agent.permitted_actions.LIBRARY_MODIFY, library_item=folder ):
+ %if can_modify:
<a class="action-button" href="${h.url_for( controller='library', action='folder', information=True, id=folder.id, library_id=library_id )}">Edit this folder's information</a>
%else:
<a class="action-button" href="${h.url_for( controller='library', action='folder', information=True, id=folder.id, library_id=library_id )}">View this folder's information</a>
%endif
- %if forms and not folder.info_association:
- %if trans.app.security_agent.allow_action( user, roles, trans.app.security_agent.permitted_actions.LIBRARY_ADD, library_item=library ):
- <a class="action-button" href="${h.url_for( controller='library', action='info_template', library_id=library.id, add=True )}">Add an information template to this folder</a>
- %endif
+ %if can_add and forms and not folder.info_association:
+ <a class="action-button" href="${h.url_for( controller='library', action='info_template', library_id=library.id, add=True )}">Add an information template to this folder</a>
%endif
- %if trans.app.security_agent.allow_action( user, roles, trans.app.security_agent.permitted_actions.LIBRARY_MANAGE, library_item=folder ):
+ %if can_manage:
<a class="action-button" href="${h.url_for( controller='library', action='folder', permissions=True, id=folder.id, library_id=library_id )}">Edit this folder's permissions</a>
%endif
</div>
@@ -212,43 +206,48 @@
row_counter.increment()
%>
%endif
- %for child_folder in name_sorted( folder.active_folders ):
- ${render_folder( child_folder, pad, created_ldda_ids, library_id, my_row, row_counter )}
+ <% sub_folders = active_folders( trans, folder ) %>
+ %for sub_folder in sub_folders:
+ ${render_folder( sub_folder, pad, created_ldda_ids, library_id, hidden_folder_ids, parent=my_row, row_counter=row_counter )}
%endfor
- %for library_dataset in name_sorted( folder.active_library_datasets ):
+ %for library_dataset in folder.active_library_datasets:
<%
- selected = created_ldda_ids and library_dataset.library_dataset_dataset_association.id in created_ldda_ids
+ ldda = library_dataset.library_dataset_dataset_association
+ can_access = trans.app.security_agent.can_access_dataset( roles, ldda.dataset )
+ selected = created_ldda_ids and ldda.id in created_ldda_ids
%>
- %if trans.app.security_agent.allow_action( user, roles, trans.app.security_agent.permitted_actions.DATASET_ACCESS, dataset=library_dataset.library_dataset_dataset_association.dataset ):
- ${render_dataset( library_dataset, selected, library, pad, my_row, row_counter )}
+ %if can_access:
+ ${render_dataset( ldda, library_dataset, selected, library, folder, pad, my_row, row_counter )}
%endif
%endfor
</%def>
<h2>Data Library “${library.name}”</h2>
+<%
+can_add = trans.app.security_agent.can_add_library_item( user, roles, library )
+can_modify = trans.app.security_agent.can_modify_library_item( user, roles, library )
+can_manage = trans.app.security_agent.can_manage_library_item( user, roles, library )
+%>
+
<ul class="manage-table-actions">
- %if trans.app.security_agent.allow_action( user, roles, trans.app.security_agent.permitted_actions.LIBRARY_ADD, library_item=library ):
- %if not deleted:
- <li>
- <a class="action-button" href="${h.url_for( controller='library', action='library_dataset_dataset_association', library_id=library.id, folder_id=library.root_folder.id )}"><span>Add datasets to this library</span></a>
- </li>
- <li>
- <a class="action-button" href="${h.url_for( controller='library', action='folder', new=True, id=library.root_folder.id, library_id=library.id )}">Add a folder to this library</a>
- </li>
- %endif
+ %if can_add and not_deleted:
+ <li>
+ <a class="action-button" href="${h.url_for( controller='library', action='library_dataset_dataset_association', library_id=library.id, folder_id=library.root_folder.id )}"><span>Add datasets to this library</span></a>
+ </li>
+ <li>
+ <a class="action-button" href="${h.url_for( controller='library', action='folder', new=True, id=library.root_folder.id, library_id=library.id )}">Add a folder to this library</a>
+ </li>
%endif
- %if trans.app.security_agent.allow_action( user, roles, trans.app.security_agent.permitted_actions.LIBRARY_MODIFY, library_item=library ):
+ %if can_modify:
<li><a class="action-button" href="${h.url_for( controller='library', action='library', information=True, id=library.id )}">Edit this library's information</a></li>
%else:
<li><a class="action-button" href="${h.url_for( controller='library', action='library', information=True, id=library.id )}">View this library's information</a></li>
%endif
- %if forms and not library.info_association:
- %if trans.app.security_agent.allow_action( user, roles, trans.app.security_agent.permitted_actions.LIBRARY_ADD, library_item=library ):
- <a class="action-button" href="${h.url_for( controller='library', action='info_template', library_id=library.id, add=True )}">Add an information template to this library</a>
- %endif
+ %if can_add and forms and not library.info_association:
+ <a class="action-button" href="${h.url_for( controller='library', action='info_template', library_id=library.id, add=True )}">Add an information template to this library</a>
%endif
- %if trans.app.security_agent.allow_action( user, roles, trans.app.security_agent.permitted_actions.LIBRARY_MANAGE, library_item=library ):
+ %if can_manage:
<li><a class="action-button" href="${h.url_for( controller='library', action='library', permissions=True, id=library.id )}">Edit this library's permissions</a></li>
%endif
</ul>
@@ -258,10 +257,6 @@
%endif
<form name="import_from_library" action="${h.url_for( controller='library', action='datasets', library_id=library.id )}" method="post">
- <%
- library_item_ids = {}
- library_item_ids[ 'library' ] = library.id
- %>
<table cellspacing="0" cellpadding="0" border="0" width="100%" class="grid" id="library-grid">
<thead>
<tr class="libraryTitle">
@@ -272,7 +267,7 @@
</thead>
</tr>
<% row_counter = RowCounter() %>
- ${render_folder( library.root_folder, 0, created_ldda_ids, library.id, None, row_counter )}
+ ${render_folder( library.root_folder, 0, created_ldda_ids, library.id, hidden_folder_ids, parent=None, row_counter=row_counter, root_folder=True )}
<tfoot>
<tr>
<td colspan="4" style="padding-left: 42px;">
diff -r eba44fc830bf -r c3b40f23a0e0 templates/library/common.mako
--- a/templates/library/common.mako Tue Sep 08 17:33:38 2009 -0400
+++ b/templates/library/common.mako Wed Sep 09 14:24:11 2009 -0400
@@ -21,7 +21,7 @@
<div class="toolForm">
<div class="toolFormTitle">Other information about ${library_item_desc} ${library_item.name}</div>
<div class="toolFormBody">
- %if editable and trans.app.security_agent.allow_action( user, roles, trans.app.security_agent.permitted_actions.LIBRARY_MODIFY, library_item=library_item ):
+ %if editable and trans.app.security_agent.can_modify_library_item( user, roles, library_item ):
<form name="edit_info" action="${h.url_for( controller='library', action='edit_template_info', library_id=library_id, num_widgets=len( widgets ) )}" method="post">
<input type="hidden" name="library_item_id" value="${library_item.id}"/>
<input type="hidden" name="library_item_type" value="${library_item_type}"/>
diff -r eba44fc830bf -r c3b40f23a0e0 templates/library/folder_info.mako
--- a/templates/library/folder_info.mako Tue Sep 08 17:33:38 2009 -0400
+++ b/templates/library/folder_info.mako Wed Sep 09 14:24:11 2009 -0400
@@ -18,7 +18,7 @@
<div class="toolForm">
<div class="toolFormTitle">Edit folder name and description</div>
<div class="toolFormBody">
- %if trans.app.security_agent.allow_action( user, roles, trans.app.security_agent.permitted_actions.LIBRARY_MODIFY, library_item=folder ):
+ %if trans.app.security_agent.can_modify_library_item( user, roles, folder ):
<form name="folder" action="${h.url_for( controller='library', action='folder', rename=True, id=folder.id, library_id=library_id )}" method="post" >
<div class="form-row">
<label>Name:</label>
diff -r eba44fc830bf -r c3b40f23a0e0 templates/library/folder_permissions.mako
--- a/templates/library/folder_permissions.mako Tue Sep 08 17:33:38 2009 -0400
+++ b/templates/library/folder_permissions.mako Wed Sep 09 14:24:11 2009 -0400
@@ -15,6 +15,6 @@
${render_msg( msg, messagetype )}
%endif
-%if trans.app.security_agent.allow_action( user, roles, trans.app.security_agent.permitted_actions.LIBRARY_MANAGE, library_item=folder ):
+%if trans.app.security_agent.can_manage_library_item( user, roles, folder ):
${render_permission_form( folder, folder.name, h.url_for( controller='library', action='folder', id=folder.id, library_id=library_id, permissions=True ), trans.user.all_roles() )}
%endif
diff -r eba44fc830bf -r c3b40f23a0e0 templates/library/ldda_edit_info.mako
--- a/templates/library/ldda_edit_info.mako Tue Sep 08 17:33:38 2009 -0400
+++ b/templates/library/ldda_edit_info.mako Wed Sep 09 14:24:11 2009 -0400
@@ -34,7 +34,7 @@
</select>
</%def>
-%if trans.app.security_agent.allow_action( user, roles, trans.app.security_agent.permitted_actions.LIBRARY_MODIFY, library_item=ldda.library_dataset ):
+%if trans.app.security_agent.can_modify_library_item( user, roles, ldda.library_dataset ):
<div class="toolForm">
<div class="toolFormTitle">Edit attributes of ${ldda.name}</div>
<div class="toolFormBody">
diff -r eba44fc830bf -r c3b40f23a0e0 templates/library/ldda_info.mako
--- a/templates/library/ldda_info.mako Tue Sep 08 17:33:38 2009 -0400
+++ b/templates/library/ldda_info.mako Wed Sep 09 14:24:11 2009 -0400
@@ -40,15 +40,15 @@
Information about ${ldda.name}
<a id="dataset-${ldda.id}-popup" class="popup-arrow" style="display: none;">▼</a>
<div popupmenu="dataset-${ldda.id}-popup">
- %if trans.app.security_agent.allow_action( user, roles, trans.app.security_agent.permitted_actions.LIBRARY_MODIFY, library_item=ldda.library_dataset ):
+ %if trans.app.security_agent.can_modify_library_item( user, roles, ldda.library_dataset ):
<a class="action-button" href="${h.url_for( controller='library', action='library_dataset_dataset_association', library_id=library_id, folder_id=ldda.library_dataset.folder.id, id=ldda.id, edit_info=True )}">Edit this dataset's information</a>
%else:
<a class="action-button" href="${h.url_for( controller='library', action='library_dataset_dataset_association', library_id=library_id, folder_id=ldda.library_dataset.folder.id, id=ldda.id, information=True )}">View this dataset's information</a>
%endif
- %if trans.app.security_agent.allow_action( user, roles, trans.app.security_agent.permitted_actions.DATASET_MANAGE_PERMISSIONS, dataset=ldda.dataset ) and trans.app.security_agent.allow_action( user, roles, trans.app.security_agent.permitted_actions.LIBRARY_MANAGE, library_item=ldda.library_dataset ):
+ %if trans.app.security_agent.can_manage_dataset( roles, ldda.dataset ) and trans.app.security_agent.can_manage_library_item( user, roles, ldda.library_dataset ):
<a class="action-button" href="${h.url_for( controller='library', action='library_dataset_dataset_association', library_id=library_id, folder_id=ldda.library_dataset.folder.id, id=ldda.id, permissions=True )}">Edit this dataset's permissions</a>
%endif
- %if current_version and trans.app.security_agent.allow_action( user, roles, trans.app.security_agent.permitted_actions.LIBRARY_MODIFY, library_item=ldda.library_dataset ):
+ %if current_version and trans.app.security_agent.can_modify_library_item( user, roles, ldda.library_dataset ):
<a class="action-button" href="${h.url_for( controller='library', action='library_dataset_dataset_association', library_id=library_id, folder_id=ldda.library_dataset.folder.id, replace_id=ldda.library_dataset.id )}">Upload a new version of this dataset</a>
%endif
%if ldda.has_data:
@@ -86,28 +86,14 @@
<div class="form-row">
<div>${ldda.blurb}</div>
</div>
- <div class="form-row">
- <div id="info${ldda.id}" class="historyItemBody">
- %if ldda.peek != "no peek":
+ %if ldda.peek != "no peek":
+ <div class="form-row">
+ <div id="info${ldda.id}" class="historyItemBody">
<label>Peek:</label>
<div><pre id="peek${ldda.id}" class="peek">${ldda.display_peek()}</pre></div>
- %endif
- ## Recurse for child datasets
- ## TODO: eliminate this - child datasets are deprecated, and where does
- ## render_dataset() come from anyway - it's not imported!
- %if len( ldda.visible_children ) > 0:
- <div>
- There are ${len( ldda.visible_children )} secondary datasets.
- %for idx, child in enumerate( ldda.visible_children ):
- ## TODO: do we need to clarify if the child is deleted?
- %if not child.purged:
- ${ render_dataset( child, selected, library ) }
- %endif
- %endfor
- </div>
- %endif
+ </div>
</div>
- </div>
+ %endif
</div>
%if widgets:
${render_template_info( ldda, library_id, widgets, editable=False )}
diff -r eba44fc830bf -r c3b40f23a0e0 templates/library/library_dataset_info.mako
--- a/templates/library/library_dataset_info.mako Tue Sep 08 17:33:38 2009 -0400
+++ b/templates/library/library_dataset_info.mako Wed Sep 09 14:24:11 2009 -0400
@@ -21,7 +21,7 @@
${render_msg( msg, messagetype )}
%endif
-%if trans.app.security_agent.allow_action( user, roles, trans.app.security_agent.permitted_actions.LIBRARY_MODIFY, library_item=library_dataset ):
+%if trans.app.security_agent.can_modify_library_item( user, roles, library_dataset ):
<div class="toolForm">
<div class="toolFormTitle">Edit attributes of ${library_dataset.name}</div>
<div class="toolFormBody">
diff -r eba44fc830bf -r c3b40f23a0e0 templates/library/library_dataset_permissions.mako
--- a/templates/library/library_dataset_permissions.mako Tue Sep 08 17:33:38 2009 -0400
+++ b/templates/library/library_dataset_permissions.mako Wed Sep 09 14:24:11 2009 -0400
@@ -21,7 +21,7 @@
${render_msg( msg, messagetype )}
%endif
-%if trans.app.security_agent.allow_action( user, user_roles, trans.app.security_agent.permitted_actions.LIBRARY_manage, library_item=library_dataset ):
+%if trans.app.security_agent.can_manage_library_item( user, user_roles, library_dataset ):
<%
roles = trans.app.model.Role.filter( trans.app.model.Role.table.c.deleted==False ).order_by( trans.app.model.Role.table.c.name ).all()
%>
diff -r eba44fc830bf -r c3b40f23a0e0 templates/library/library_info.mako
--- a/templates/library/library_info.mako Tue Sep 08 17:33:38 2009 -0400
+++ b/templates/library/library_info.mako Wed Sep 09 14:24:11 2009 -0400
@@ -15,7 +15,7 @@
${render_msg( msg, messagetype )}
%endif
-%if trans.app.security_agent.allow_action( user, roles, trans.app.security_agent.permitted_actions.LIBRARY_MODIFY, library_item=library ):
+%if trans.app.security_agent.can_modify_library_item( user, roles, library ):
<div class="toolForm">
<div class="toolFormTitle">Change library name and description</div>
<div class="toolFormBody">
diff -r eba44fc830bf -r c3b40f23a0e0 templates/library/library_permissions.mako
--- a/templates/library/library_permissions.mako Tue Sep 08 17:33:38 2009 -0400
+++ b/templates/library/library_permissions.mako Wed Sep 09 14:24:11 2009 -0400
@@ -15,7 +15,7 @@
${render_msg( msg, messagetype )}
%endif
-%if trans.app.security_agent.allow_action( user, user_roles, trans.app.security_agent.permitted_actions.LIBRARY_MANAGE, library_item=library ):
+%if trans.app.security_agent.can_manage_library_item( user, user_roles, library ):
<%
roles = trans.app.model.Role.filter( trans.app.model.Role.table.c.deleted==False ).order_by( trans.app.model.Role.table.c.name ).all()
%>
diff -r eba44fc830bf -r c3b40f23a0e0 templates/mobile/history/detail.mako
--- a/templates/mobile/history/detail.mako Tue Sep 08 17:33:38 2009 -0400
+++ b/templates/mobile/history/detail.mako Wed Sep 09 14:24:11 2009 -0400
@@ -37,7 +37,7 @@
<div class="secondary">
## Body for history items, extra info and actions, data "peek"
<% user, roles = trans.get_user_and_roles() %>
- %if not trans.app.security_agent.allow_action( user, roles, data.permitted_actions.DATASET_ACCESS, dataset = data.dataset ):
+ %if not trans.app.security_agent.can_access_dataset( roles, data.dataset ):
<div>You do not have permission to view this dataset.</div>
%elif data_state == "queued":
<div>Job is waiting to run</div>
diff -r eba44fc830bf -r c3b40f23a0e0 templates/mobile/manage_library.mako
--- a/templates/mobile/manage_library.mako Tue Sep 08 17:33:38 2009 -0400
+++ b/templates/mobile/manage_library.mako Wed Sep 09 14:24:11 2009 -0400
@@ -9,7 +9,7 @@
${render_msg( msg, messagetype )}
%endif
-%if trans.app.security_agent.allow_action( user, roles, trans.app.security_agent.permitted_actions.LIBRARY_MODIFY, library_item=library ):
+%if trans.app.security_agent.can_modify_library_item( user, roles, library ):
<div class="toolForm">
<div class="toolFormTitle">Change library name and description</div>
<div class="toolFormBody">
@@ -55,7 +55,7 @@
</div>
</div>
%endif
-%if trans.app.security_agent.allow_action( user, roles, trans.app.security_agent.permitted_actions.LIBRARY_MANAGE, library_item=library ):
+%if trans.app.security_agent.can_manage_library_item( user, roles, library ):
<%
roles = trans.app.model.Role.filter( trans.app.model.Role.table.c.deleted==False ).order_by( trans.app.model.Role.table.c.name ).all()
%>
diff -r eba44fc830bf -r c3b40f23a0e0 templates/root/history_common.mako
--- a/templates/root/history_common.mako Tue Sep 08 17:33:38 2009 -0400
+++ b/templates/root/history_common.mako Wed Sep 09 14:24:11 2009 -0400
@@ -8,7 +8,7 @@
data_state = data.state
user, roles = trans.get_user_and_roles()
%>
- %if not trans.app.security_agent.allow_action( user, roles, data.permitted_actions.DATASET_ACCESS, dataset = data.dataset ):
+ %if not trans.app.security_agent.can_access_dataset( roles, data.dataset ):
<div class="historyItemWrapper historyItem historyItem-${data_state} historyItem-noPermission" id="historyItem-${data.id}">
%else:
<div class="historyItemWrapper historyItem historyItem-${data_state}" id="historyItem-${data.id}">
@@ -42,7 +42,7 @@
## Body for history items, extra info and actions, data "peek"
<div id="info${data.id}" class="historyItemBody">
- %if not trans.app.security_agent.allow_action( user, roles, data.permitted_actions.DATASET_ACCESS, dataset = data.dataset ):
+ %if not trans.app.security_agent.can_access_dataset( roles, data.dataset ):
<div>You do not have permission to view this dataset.</div>
%elif data_state == "upload":
<div>Dataset is uploading</div>
diff -r eba44fc830bf -r c3b40f23a0e0 test-data/cf_maf2fasta_new.dat
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/cf_maf2fasta_new.dat Wed Sep 09 14:24:11 2009 -0400
@@ -0,0 +1,134 @@
+>hg17.chr7(+):127471195-127471526|sequence_index=0|block_index=0|species=hg17|hg17_0_0
+gtttgccatcttttgctgctctagggaatccagcagctgtcaccatgtaaacaagcccaggctagaccaGTTACCCTCATC---ATCTTAGCTGATAGCCAGCCAGCCACCACAGGCAtgagtcaggccatattgctggacccacagaattatgagctaaataaatagtcttgggttaagccactaagttttaggcatagtgtgttatgtaTCTCACAAACATATAAGACTGTGTGTTTGTTGACTGGAGGAAGAGATGCTATAAAGACCACCTTTTAAAACTTCCCAAATACTGCCACTGATGTCCTGATGGAGG-------------------------------------------------------TATGAA---------AACATCCACTAA
+>panTro1.chr6(+):129885076-129885407|sequence_index=0|block_index=0|species=panTro1|panTro1_0_0
+gtttgccatcttttgctgctcttgggaatccagcagctgtcaccatgtaaacaagcccaggctagaccaGTTACCCTCATC---ATCTTAGCTGATAGCCAGCCAGCCACCACAGGCAtgagtcaggccatattgctggacccacagaattatgagctaaataaatagtcttgggttaagccactaagttttaggcatagtgtgttatgtaTCTCACAAACATATAAGACTGTGTGTTTGTTGACTGGAGGAAGAGATGCTATAAAGACCACCTTTTGAAACTTCCCAAATACTGCCACTGATGTCCTGATGGAGG-------------------------------------------------------TATGAA---------AACATCCACTAA
+>rheMac2.chr3(+):165787989-165788319|sequence_index=0|block_index=0|species=rheMac2|rheMac2_0_0
+gcttgccatcttttgatgctcttgggaatccagcagctgtcaccat-taaacaagcccaggctagaccaGTTACCCTCATC---ATCTTAGCTGATAGCCAGCCAGCCACCATAGGCAtgagtcaggccatagtgctggacccacagaattatgagctaaataagtagtgttgggttaagtcactaagttttaggcatagtgtgttatgtagcTCACAAACATATAAGACTGTGTGTTTTTTGACTGGAGGAAGAGATGCCATAAAGACCACCTTTTGAAACTTCTCAAATACTGCCATTGATGTGCTGATGGAGG-------------------------------------------------------TATGAA---------AACATCCACTAA
+>rn3.chr4(+):56178191-56178473|sequence_index=0|block_index=0|species=rn3|rn3_0_0
+CTTCACTCTCATTTGCTGTT----------------CTGTCACTATGGAGACAAACACAGGCTAGCCCAGTTACTATCTTGATCACAGCAGCTGT----CAGCTAGCTGCCACTCACAGGAATAAGGCCATACCATT-GATCCACTGAACCTTGATCTAGGAATTTGGC----------------------TGGGGCCAGTTTGCGGTGTCACTCATGA--CTCTAAGATTGTGTGTTTG----CTCCAGGAAGAGACGGCAAGAGGATTACCTTTAAAAGGTTCGG-AGTCTAGCTGTAGACAGCCCAATGGG---------------------------------------------------------TATAAC---------AATACTCACTAA
+>mm7.chr6(+):28984529-28984886|sequence_index=0|block_index=0|species=mm7|mm7_0_0
+CTCCACTCTCGTTTGCTGTT----------------CTGTCACCATGGAAACAAACG-AGGGTGGTCCAGTTACTATCTTG---ACTGCAGCTGG----CAGTCAGTTGCCACT--CAGGAATAAGGCTATGCCATT-GATCCACTGAACCGTGATCTGGAAACCTGGCTGTTGTTT-------CAAGCCTTGGGGCCAGTTTGCGGTGTTACTCATGA--CTCTAAGATCGTGTGCTTG----CTGCAGGAAGAGACAGCAAGGGGGTTACATTTAAAAAGCCCCC-AGTTTAGCTATAGGCAGGCCAACAGGTGTAAAAATACTCACTAGTAATGGGCTGAACTCATGGAGGTAGCATTAGTGAGACACTGTAACTGTTTTTTTAAAAATCACTAA
+
+>hg17.chr7(+):127471526-127471584|sequence_index=0|block_index=1|species=hg17|hg17_1_0
+AATTTGTGGTTTATTCATTTTTCATTATTTTGTTTAAGGAGGTCTATAGTGGAAGAGG
+>mm7.chr6(+):28984886-28984940|sequence_index=0|block_index=1|species=mm7|mm7_1_0
+----AACGTTTCATTGATTGCTCATCATTTAAAAAAAGAAATTCCTCAGTGGAAGAGG
+>rheMac2.chr3(+):165788319-165788377|sequence_index=0|block_index=1|species=rheMac2|rheMac2_1_0
+AATTTGTGGTTTATTTATTTTTCATTATTTTGTTTAAGGAGGTCTATAGTGGAAGAGG
+>panTro1.chr6(+):129885407-129885465|sequence_index=0|block_index=1|species=panTro1|panTro1_1_0
+AATTTGTGGTTTATTCGTTTTTCATTATTTTGTTTAAGGAGGTCTATAGTGGAAGAGG
+
+>hg17.chr7(+):127471584-127471688|sequence_index=0|block_index=2|species=hg17|hg17_2_0
+GAGATATTT-GGggaaatttt-gtatagactagctt--tcacgatgttagggaattattattgtgtgataatggtcttgcagttac-acagaaattcttcctta-ttttt
+>panTro1.chr6(+):129885465-129885569|sequence_index=0|block_index=2|species=panTro1|panTro1_2_0
+GAGACATTT-GGggaaatttt-gtatagactagctt--tcacgatgttagggagttattattgtgtgataatggtcttgcagttac-acagaaattcttcctta-ttttt
+>rheMac2.chr3(+):165788377-165788482|sequence_index=0|block_index=2|species=rheMac2|rheMac2_2_0
+GAGATATTT-GGggaaatttg-gtatagactagctt--tcatgatgtaagggagttatttttgtgtgataatggccctacagttac-acagaaattcttccttatttttt
+>canFam2.chr14(-):11090703-11090811|sequence_index=0|block_index=2|species=canFam2|canFam2_2_0
+gagatattt-gggggaatttgaatgtagtgttgctcttttgtgatgctaagaaattataattgtctgatgatagtctcgtggttatgggggaaatgcttcctta-ttttt
+>bosTau2.chr4(-):50243931-50244034|sequence_index=0|block_index=2|species=bosTau2|bosTau2_2_0
+-agacattg-ggtaaaattcaaatgcagactagctc----atgatgttaaagaattactcttgtgtggtaatggtcttgtgatagagatagaaatgcttcctta-ttttt
+>rn3.chr4(+):56182200-56182295|sequence_index=0|block_index=2|species=rn3|rn3_2_0
+----TATTTGGGGGAAATATG-ATGTGCA----CTT--CCATGATCTTAAAGAATTGCTACTGTTTGATAGTGATCTTATGGTTAA-ATAAAAAAAAT--CTTA-GTTGT
+>dasNov1.scaffold_256527(+):298-392|sequence_index=0|block_index=2|species=dasNov1|dasNov1_2_0
+GAGACATTT-GGAGAAATTTG-----------Aatt--tcatgatgttaaggaattacttttgtatgatgatggtcttgtggctat-gtagaatttcttccgtg-tttta
+
+>hg17.chr7(+):127471688-127471871|sequence_index=0|block_index=3|species=hg17|hg17_3_0
+tgggaagcaccaaagta-------gggataaaatgtcatgatgtgtgcaatacactttaaaatgtttttgccaaaa----------taattaa-------------------------tgaagc--aaatatg---gaaaataataattattaaatctaggt-----gatgggtatattgtagttcactatagtattgcacacttttctgtatgtttaaatttttcattta--------------------------aaaa-
+>panTro1.chr6(+):129885569-129885752|sequence_index=0|block_index=3|species=panTro1|panTro1_3_0
+tgggaaacaccaaagta-------gggataaaatgtcatgatgtgtgcaatacgctttaaaatatttttgccaaaa----------taattaa-------------------------tgaagc--aaatatg---gaaaataataattattaaatctaggt-----gatgggtatattgtagttcactatagtattgcacacttttctgtatgtttaaaattttcattta--------------------------aaaa-
+>rheMac2.chr3(+):165788482-165788684|sequence_index=0|block_index=3|species=rheMac2|rheMac2_3_0
+tgggaagcacaaaagta-------gggataaaatgtcatgatgtgtacaatatgctttaaaatatttttgccaaaa----------taattaa-------------------------tgaagc--aaatatg---gaaaataataactgttaaatctaggt-----gttgggtatattgcagttcattatgttattgcacacttttctgtgtgtttaaaattttcatttaaaaatatgttttaaaaatg-------aaaa-
+>rn3.chr4(+):56182295-56182489|sequence_index=0|block_index=3|species=rn3|rn3_3_0
+TAGAAAATACTCAAATATTTAGGGGCGTGACAATGTCACAGTGTCTGCAATTTGCTTTAAAGATTTTT-----AAA----------TATTTAAAAAAGTTTTAATAATTTTGAAAAACTGAAGCTACACTATG---GGAAGTGGTAATTGTTACATATGGGT-----AATAAGTAT-----AATTCGTTATATTAT-------TTTTC------TTAGAATTTTTCATTTG--------------------------AAAA-
+>bosTau2.chr4(-):50243792-50243930|sequence_index=0|block_index=3|species=bosTau2|bosTau2_3_0
+agataaacacttaagtattta---aggatgaaacgccctgatgtttgtaatttgctttagaatattttagccaaaa----------gaattaa-------------------------tgatgc--aaatatg--caaaaagagta--cgttaaacctaa-----------------------------------------------------atttgCGATTttcattta--------------------------aaaa-
+>canFam2.chr14(-):11090345-11090505|sequence_index=0|block_index=3|species=canFam2|canFam2_3_0
+agacacaaactgaagtattta---aggatgaaatgtcatgatgtttgcaattggctttaaaatattttagccaaaa-----------agtaaa-------------------------tgaagc--AAATATG--GGAAGACAATAATCATTAAATCTAGGT-----GATGCATAC---------------------------TTTTCCATATGTTTGAAATTTTCATTTA--------------------------AAAA-
+>dasNov1.scaffold_256527(+):393-625|sequence_index=0|block_index=3|species=dasNov1|dasNov1_3_0
+agacgcatgctgaagcatgta---aggataaaatgtcgtggtgtttgtaatttattctaaaacattttagccaaaaacaaataaataaataaa-------------------------tgaagc--aaatatgggggaaatgtttaattgttaaatctagatttaacacggtatataccgtgcttcattatactagtctctacttttccatgtgtttgaaattttCATTAAAATGTTTGTTTGTTGTCTGTTTTAATGAAAT
+
+>hg17.chr7(+):127471871-127471910|sequence_index=0|block_index=4|species=hg17|hg17_4_0
+actttgagctagacaccaggctatgagcta-ggagcatag
+>rheMac2.chr3(+):165788684-165788723|sequence_index=0|block_index=4|species=rheMac2|rheMac2_4_0
+actttgagctagataccaggttatgagcta-ggagcatag
+>panTro1.chr6(+):129885752-129885791|sequence_index=0|block_index=4|species=panTro1|panTro1_4_0
+actttgagctagacaccaggctatgagcta-ggagcatag
+>bosTau2.chr4(-):50243734-50243773|sequence_index=0|block_index=4|species=bosTau2|bosTau2_4_0
+tcttcgtgcaacgcacggggctatcaatgt-gggatacag
+>canFam2.chr14(-):11090081-11090120|sequence_index=0|block_index=4|species=canFam2|canFam2_4_0
+ACATCAtgctagatcctggactatgagctg-ggtatatag
+>dasNov1.scaffold_256527(+):625-665|sequence_index=0|block_index=4|species=dasNov1|dasNov1_4_0
+CCTTTGTGCTAGCCACTGGGATGAAAGCTAGGGAACACAG
+
+>hg17.chr7(+):127471910-127472074|sequence_index=0|block_index=5|species=hg17|hg17_5_0
+caatgaccaa----------------------------------------------------------------------------------------------atagactcctaccaa-ctc-aaagaatgcacattctCTG-GGAAACATGTTTCCATTAGGAAGCCTCGAATGCAATGTGACTGTGGTCTCCAGGACCTG-TGTGATCCTGGCTTTTCCTGTTCCCTCCG---CATCATCACTGCAGGTGTGTTTTCCCAAG
+>panTro1.chr6(+):129885791-129885955|sequence_index=0|block_index=5|species=panTro1|panTro1_5_0
+caatgaccaa----------------------------------------------------------------------------------------------atagactcctaccaa-ctc-aaagaatgcacattctCTG-GGAAACATGTTTCCATTAGGAAGCCTCGAATGCAATGTGACTGTGGTCTCCAGGACATG-TGTGATCCTGGCTTTTCCTGTTCCCTCTG---CATCATCACTGCAGGTGTATTTTCCCAAG
+>rheMac2.chr3(+):165788723-165788885|sequence_index=0|block_index=5|species=rheMac2|rheMac2_5_0
+caatgaccaa----------------------------------------------------------------------------------------------atagacccctaccga-ctc-aaagaatgtacattctTTG-GGAAACATGTTTCCATCAGAAAATCTCAAATGCAATGTGACTGGGGTCTCCAGGACCTG-TGTGAGCCTGGCTTTTCCTGTTCCCTCCA---CATCATCACTGCAGGTGTATTTTCCC--G
+>mm7.chr6(+):28990714-28990875|sequence_index=0|block_index=5|species=mm7|mm7_5_0
+caaaaaccaa------------------------------------------------------------------------------------------------aaaaACCTATAGC-CTC-ACAGGGTGGGTTGTCTTTG-AGGAACATGCATCCGCTAGAAAGTCCCAAGTACACTATGACAGTTG--CCCAGGCCCCGCCTTAAACCTGGTTTTCCTGGTTTCTTTCA---CATCATTACCACGAATATATTTCCTCAAG
+>rn3.chr4(+):56183448-56183705|sequence_index=0|block_index=5|species=rn3|rn3_5_0
+--ATGACCAATATACACTGTTTACATGTATAGCATTGTGAATGGAGACATAAAAAGATAATCTAGCTTTGTGCTAGGTAGGTGCTGAGCTCTTAACAGTGCTGGGCAGAAACCTATAAC-CTC-ACAGGGTGGGTTGTCTTTG-AGGAGCGTGCTAACCCTAGGAAGTCTCAAATACAATGTGATGGTTGCCCCCAGGCACCACCTTGAACCTGGTCTTCCTGGTTTCTTTCA---CACCATTACCACAAATACATTTTCTCAGG
+>bosTau2.chr4(-):50243566-50243734|sequence_index=0|block_index=5|species=bosTau2|bosTau2_5_0
+atgtgaacaa---------------------------------------------------------------------------------------------aacggacccgtgtgggactcggcggagcacacagattttgcgggagCACGTTCCCGTTAGGAAGTCTCTGATGCAATACGACCGGTGCCTTCAGGACCTG-TG--AGGCTGACTTTCCTTA-CCCCTCCACACCATCATCAAGGCAGGTGTGATTTTCCAGG
+>canFam2.chr14(-):11089913-11090081|sequence_index=0|block_index=5|species=canFam2|canFam2_5_0
+cagtgaacaa---------------------------------------------------------------------------------------------aacagagccctgcagt-cttgatggagcacacaacctttg-gggaaCATGTTTCCATAAGAAAGTCTCCAATGTGATCTGA-TGGTGCCGCCAGGACCTA-TGTCAGCCTACCGTTCCATGTCCCCTCCACACCATCATCACTGCAGGTGTGTTTTCCCACA
+>dasNov1.scaffold_256527(+):665-786|sequence_index=0|block_index=5|species=dasNov1|dasNov1_5_0
+CAGTGAGCAA-----------------------------------------------------------------------------------------------CAGCCTGGCTCCGT-CC--GGGGGCCGCTCAGCAGCTC-GGGAGCGTGGAGACG---GGAAGTCTGTCACGCGATGCG-----------CTGGGCCCG------------CTGTTCCCGCCCCCCTCC---CCCC----------------TTTCCCAAG
+
+>hg17.chr7(+):127472074-127472258|sequence_index=0|block_index=6|species=hg17|hg17_6_0
+TTTTAAA------CATTTACCTTCCCAGTGGCCTTGCGTCTAGAGGAATCCCTGTATAGTGGT-ACATGAATATAACACATAACAAA-AATCATCTCTATGGTGTGTGTTGTTCCTGGGGTTCAattcagcaaatttt-ccc-tgggcacccatgtgttcttggcactggaaaagtaccgggactgaaacagtt
+>panTro1.chr6(+):129885955-129886139|sequence_index=0|block_index=6|species=panTro1|panTro1_6_0
+TTTTAAA------CATTTACCTTCCCAGTGGCCTTGCGTCTAGAGGAATCCCTGTATAGTGGT-ACATGAATATAACACATAACAAA-AATCATCTCTATGGTGTGTGTTGTTCCTGGGGTTCAattcagcaaatttt-tcc-tgggcacccatgtgttcttggcactggaaaagtaccgggactgaaacagtt
+>rheMac2.chr3(+):165788885-165789069|sequence_index=0|block_index=6|species=rheMac2|rheMac2_6_0
+TTTTAAA------CATTTACTCTCCCAGTAGCCTTGCATCTCGAGGAATCCCTGTATAGTGGT-ACATGAATATAACACATAACAAA-AATCATCTGTACGGTGTGTGTTGTTCCTGGGGTTCAattcagcaaatttt-tcc-tgggcacccctgtgttcttggcactggaaaagtaccaggacttaaatagta
+>mm7.chr6(+):28990875-28991025|sequence_index=0|block_index=6|species=mm7|mm7_6_0
+TTTAAAGAAAGTACCCCCTCCTTTCCAGT-GCCTCAAATCTAGAAGAATATTCATAGTGAAGT-GC------------------------ACAGCCGGGTGGTGCATGGTA-ATCTGGAAGTCACCTCTGCAAATCTT-TCC----------------TGTTGGTGCTGTGAAGGCACCAGGACTTCAAGAGTA
+>rn3.chr4(+):56183705-56183879|sequence_index=0|block_index=6|species=rn3|rn3_6_0
+TTTAAAAGAAGT-CCCACTCCTTTCCAGT-GCCCTAGATCTAGAAGCACATTCATAATGATGT-ACAC-----TAACCC----------GACAGCTGTGTGGTATATGGTA-TCCCGGAAGTCACCTCAGCAAACCTT-TCCCGGGGAACCTACATGGTGTTGGTGCTGTGAAGGTACCAGGTTGTCAAGGGTA
+>canFam2.chr14(-):11089743-11089913|sequence_index=0|block_index=6|species=canFam2|canFam2_6_0
+TTTTAAA------TATCTGC-TTCCCGGTGGCCTTGAGTCTAGAGGAGTCCCCCCACTATGGTGGCACTAATACTGAAGGTCAGAAATAATCAGTTCTGTGGTGCATGTTGCCCCTGAGGTTCTGTTCGGGAAACTTC-TTC-TGAGCAC----ATGCACCTGGCACTGCAAACGTACCAGGA-----------
+>dasNov1.scaffold_256527(+):786-964|sequence_index=0|block_index=6|species=dasNov1|dasNov1_6_0
+TTTTAAA------AATTTACCTTCCCAGTGGCGGTGAATCCGGAGGAATACGGAAACTGGGGC-GCACTACCATGACACGTGTCAAA-AATCAGTTCCGTGGTCCGTGGAGGGCCTGGGGTTC------GAAAATCTTGTCC-CGAGCACCCCCGTGCGCCTGGCACCGCGACAGTGACAGGACTGAAGCGTG-
+
+>hg17.chr7(+):127472258-127472280|sequence_index=0|block_index=7|species=hg17|hg17_7_0
+gatggccca-atccctgtcctct-
+>panTro1.chr6(+):129886139-129886161|sequence_index=0|block_index=7|species=panTro1|panTro1_7_0
+gatggccca-atccctgtcctct-
+>rheMac2.chr3(+):165789069-165789091|sequence_index=0|block_index=7|species=rheMac2|rheMac2_7_0
+gatggccca-atccctgtcctct-
+>mm7.chr6(+):28991025-28991048|sequence_index=0|block_index=7|species=mm7|mm7_7_0
+AATGGCAGAGGGCTCTGTTCTCT-
+>rn3.chr4(+):56183879-56183902|sequence_index=0|block_index=7|species=rn3|rn3_7_0
+AATGGCAGAGGCCCCTGTTCTCT-
+>canFam2.chr14(-):11089526-11089548|sequence_index=0|block_index=7|species=canFam2|canFam2_7_0
+GGAGACTTG-ATGCCTGCCTTCC-
+>dasNov1.scaffold_256527(+):964-987|sequence_index=0|block_index=7|species=dasNov1|dasNov1_7_0
+GACGGCCAG-ACCTCTGCCCTCGG
+
+>hg17.chr7(+):127472280-127472681|sequence_index=0|block_index=8|species=hg17|hg17_8_0
+taaaacctaagggaggagaTGGAAAG-GGGCACCCAACCCAGACTGAGAGACAGGAATTAGCTGCAAGGGGAACTAGGAAAAGCTTCTTTA---AGGATG--GAGAGGCCCTA-GTGGAATGGGGAGATTCTTCCGGGAGAAGCGATGGATGCACAGTTGGGCATCCCCACAGACGGACTGGAAAGAAAAAAGGCCTGGAGGAATCA------ATGTGC-AATGTATGTGTGTTCCCTGGTTcaagggctgg-gaactttctcta--aagggccaggtagaaaacattttaggctttctaagccaagg---caaaattgaggat-attacatgggtacttatacaacaagaataaacaatt---tacacaa-ttttttgttgacagaattcaaaa---ctttat----agacac---agaaatgcaaatttcctgt
+>panTro1.chr6(+):129886161-129886562|sequence_index=0|block_index=8|species=panTro1|panTro1_8_0
+taaaacctaagggaggagaTGGAAAG-GGGCACCCAACCCAGACTGAGAGACAGGAATTAGCTGCAAGGGGAACTAGGAAAAGCTTCTTTA---AGGATG--GAGAGACCCTA-GTGGAATGGGGAGATTCTTCCGGGAGAAGCGATGGATGCGCAGTTGGGCATCCCCACAGACGGACTGGAAAGAAAAAAGGCCTGGAGGAATCA------ATGTGC-AATGTATGTGTGTTCCCTGGTTcaagggctgg-gaactttctcta--aagggccaggtagaaaacattttaggctttctaagccaagg---caaaattgaggat-attacatgggtacttatacaacaagaataaacaatt---tacacaa-ttttttgttgacagaattcaaaa---ctttat----agacac---agaaatgtaaatttcctgt
+>rheMac2.chr3(+):165789091-165789492|sequence_index=0|block_index=8|species=rheMac2|rheMac2_8_0
+taaaacctaatggaggagatggaATG-GGTCACCCAACCCGGACTGAGAGACAGGAATTAGCTGCAAGGGTAACCAGGACAAGCTTCTCTA---ATGATG--GAGAGACCCTA-GTGGAATGGGGAGATTCTTCTGGGAGAAGCGATGGATTCGTAGTTGGGCATCCCCACAGAGGGACTGGAAAGAAAAAAGACCTGGAGGAACCA------ATGTGC-AATGTATGTGTGTTTCCTGGTTcaagggctggcaaactttctcta--aagggccagatagaaaacattttaggctttgtaagccaagg---caaaatcgaggag-attacatgggtacttatacaacaagaataaacaatt---tccacaa--tttttattcacagaattcaaaa---ctttat----agacac---agaaatgtaaatttcctgt
+>rn3.chr4(+):56183902-56184219|sequence_index=0|block_index=8|species=rn3|rn3_8_0
+------------------------------------GTCCATAGTCAAAG------------------------------AAGCCTCTCAG---ATGGAG--AGCAGGGCCTATGCAAAAGAGGGGGCTTCTGTAGGCAGAAGGGATGGACTAGCCTCCGGACATAGCCATAGAGAGGCTGGCAGGACTGAGACCCAGGAGAAGCCAGCGCAGGTGTGCGGGCGTGTGTATATTTCATAGTTTGCAGGTTGG----------------------------CAAACAATTCCTGCTTTGCAGGCCAAGA---GGAAACTGAAGGTGACCCCGTGAGTGCTTAC---ACAAGAGAAAACAAG-------ACAA-TTTTTGGTTGACCAAATTCAGAA---CTTTATTTGAGGATGC---TAAAGTTTAAATTTCTTTT
+>canFam2.chr14(-):11089143-11089523|sequence_index=0|block_index=8|species=canFam2|canFam2_8_0
+TACAGCCTGTGGGCAGAGGTGGGAAGAGGTCACGCAAGCCAGTTGGAATGAGGGGAGTTGGCTGGAAAGGTGACCAGGACAAGCTACTTCAACCAGGAAG--AAGAGACCCCG-GTG----------------CTTGGAGAAGGCCTGATTGAGCAGTCCTGCATGCCCGCCCAC-GACTGGCAGGAATAAAGACCCAGAAGAGCTA------ACGTGC-AATGTA------TTTTCTAGTTCCAgggttggcaaactttctctct-aagggtgggatgataaacattttaggcttttcagaccaaga---ggcgacatcagag-ggtatgtaggt---------acaagagggaaaagttgcccccggaa-ttttttg--gataaaattcaaaa---ctttacttagggatgc---caaaatgtaaacttcatat
+>dasNov1.scaffold_256527(+):987-1401|sequence_index=0|block_index=8|species=dasNov1|dasNov1_8_0
+CTAAATCTCGCGGAGAAGGTGGAACA-GGTTACCCAAACCCGACCGAG-GAGGCGAGTTG---GAAACGGCGACTGGGACAAGCTCCCTCA---GAGACGGAGAGAGACCCCA-GTGGAAGGGGGGAGAGGCTCTTAGGGAAACGATGGGGGGACCCGCCCGCACCCGCACAGAGGCGCTGGCAGGCACAGCGGCCCCGAGGAGCCC------AGGAGC-AGGGC-TGTGT-TCCCCTGCATcaggggttggcaaactttttctgcaaagggccagatagtaaatattttaggctttgcaaaccaagaagtagaaagggaggcc-attatgtacgtatttatatagcaagagagaacattt---cccacaatttttttattgacagaatttaaaacttctttattgatgaacaccaaagaaacttgaatttcatat
+
+>hg17.chr7(+):127472681-127472715|sequence_index=0|block_index=9|species=hg17|hg17_9_0
+aattttcccat---gagaactattcttcttttgtttt
+>rheMac2.chr3(+):165789492-165789526|sequence_index=0|block_index=9|species=rheMac2|rheMac2_9_0
+aattttcacat---aagaactattcttcttttgtttt
+>panTro1.chr6(+):129886562-129886596|sequence_index=0|block_index=9|species=panTro1|panTro1_9_0
+aattttcccgt---gagaactattcttcttttgtttt
+>canFam2.chr14(-):11089108-11089143|sequence_index=0|block_index=9|species=canFam2|canFam2_9_0
+aatggtcatgt--ccataactattcttcttttatttt
+>dasNov1.scaffold_256527(+):1401-1433|sequence_index=0|block_index=9|species=dasNov1|dasNov1_9_0
+aattttcacatatcacgaagtatttttttttt-----
+
diff -r eba44fc830bf -r c3b40f23a0e0 test-data/closest_features_either.interval
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/closest_features_either.interval Wed Sep 09 14:24:11 2009 -0400
@@ -0,0 +1,424 @@
+chr22 30128507 30133507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30136507 30141507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30132507 30137507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30140507 30145507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30136507 30141507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30128507 30133507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30140507 30145507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30132507 30137507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30144507 30149507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30136507 30141507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30148507 30153507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30140507 30145507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30152507 30157507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30144507 30149507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30156507 30161507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30148507 30153507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30160507 30165507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30152507 30157507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30164507 30169507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30156507 30161507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30168507 30173507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30160507 30165507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30172507 30177507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30164507 30169507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30176507 30181507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30168507 30173507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30180507 30185507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30172507 30177507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30184507 30189507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30176507 30181507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30188507 30193507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30180507 30185507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30192507 30197507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30184507 30189507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30196507 30201507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30188507 30193507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30200507 30205507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30192507 30197507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30204507 30209507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30196507 30201507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30208507 30213507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30200507 30205507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30212507 30217507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30204507 30209507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30216507 30221507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30208507 30213507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30220507 30225507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30212507 30217507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30224507 30229507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30216507 30221507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30228507 30233507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30220507 30225507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30232507 30237507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30224507 30229507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30236507 30241507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30228507 30233507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30240507 30245507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30232507 30237507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30244507 30249507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30236507 30241507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30248507 30253507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30240507 30245507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30252507 30257507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30244507 30249507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30256507 30261507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30248507 30253507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30260507 30265507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30252507 30257507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30264507 30269507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30256507 30261507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30268507 30273507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30260507 30265507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30272507 30277507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30264507 30269507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30276507 30281507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30268507 30273507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30280507 30285507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30272507 30277507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30284507 30289507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30276507 30281507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30288507 30293507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30280507 30285507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30292507 30297507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30284507 30289507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30296507 30301507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30288507 30293507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30300507 30305507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30292507 30297507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30304507 30309507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30296507 30301507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30308507 30313507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30300507 30305507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30312507 30317507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30304507 30309507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30316507 30321507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30308507 30313507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30320507 30325507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30312507 30317507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30324507 30329507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30316507 30321507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30328507 30333507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30320507 30325507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30332507 30337507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30324507 30329507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30336507 30341507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30328507 30333507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30340507 30345507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30332507 30337507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30344507 30349507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30336507 30341507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30348507 30353507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30340507 30345507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30352507 30357507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30344507 30349507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30356507 30361507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30348507 30353507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30360507 30365507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30352507 30357507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30364507 30369507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30356507 30361507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30368507 30373507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30360507 30365507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30372507 30377507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30364507 30369507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30376507 30381507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30368507 30373507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30380507 30385507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30372507 30377507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30384507 30389507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30376507 30381507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30388507 30393507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30380507 30385507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30392507 30397507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30384507 30389507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30396507 30401507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30388507 30393507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30400507 30405507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30392507 30397507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30404507 30409507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30396507 30401507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30408507 30413507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30400507 30405507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30412507 30417507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30404507 30409507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30416507 30421507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30408507 30413507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30420507 30425507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30412507 30417507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30424507 30429507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30416507 30421507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30428507 30433507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30420507 30425507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30432507 30437507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30424507 30429507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30436507 30441507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30428507 30433507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30440507 30445507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30432507 30437507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30444507 30449507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30436507 30441507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30448507 30453507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30440507 30445507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30452507 30457507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30444507 30449507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30456507 30461507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30448507 30453507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30460507 30465507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30452507 30457507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30464507 30469507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30456507 30461507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30468507 30473507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30460507 30465507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30472507 30477507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30464507 30469507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30476507 30481507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30468507 30473507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30480507 30485507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30472507 30477507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30484507 30489507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30476507 30481507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30488507 30493507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30480507 30485507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30492507 30497507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30484507 30489507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30496507 30501507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30488507 30493507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30500507 30505507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30492507 30497507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30504507 30509507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30496507 30501507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30508507 30513507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30500507 30505507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30512507 30517507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30504507 30509507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30516507 30521507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30508507 30513507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30520507 30525507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30512507 30517507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30524507 30529507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30516507 30521507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30528507 30533507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30520507 30525507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30532507 30537507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30524507 30529507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30536507 30541507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30528507 30533507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30540507 30545507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30532507 30537507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30544507 30549507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30536507 30541507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30548507 30553507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30540507 30545507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30552507 30557507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30544507 30549507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30556507 30561507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30548507 30553507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30560507 30565507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30552507 30557507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30564507 30569507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30556507 30561507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30568507 30573507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30560507 30565507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30572507 30577507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30564507 30569507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30576507 30581507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30568507 30573507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30580507 30585507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30572507 30577507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30584507 30589507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30576507 30581507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30588507 30593507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30580507 30585507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30592507 30597507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30584507 30589507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30596507 30601507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30588507 30593507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30600507 30605507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30592507 30597507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30604507 30609507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30596507 30601507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30608507 30613507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30600507 30605507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30612507 30617507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30604507 30609507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30616507 30621507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30608507 30613507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30620507 30625507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30612507 30617507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30624507 30629507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30616507 30621507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30628507 30633507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30620507 30625507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30632507 30637507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30624507 30629507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30636507 30641507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30628507 30633507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30640507 30645507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30632507 30637507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30644507 30649507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30636507 30641507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30648507 30653507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30640507 30645507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30652507 30657507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30644507 30649507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30656507 30661507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30648507 30653507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30660507 30665507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30652507 30657507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30664507 30669507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30656507 30661507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30668507 30673507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30660507 30665507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30672507 30677507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30664507 30669507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30676507 30681507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30668507 30673507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30680507 30685507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30672507 30677507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30684507 30689507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30676507 30681507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30688507 30693507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30680507 30685507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30692507 30697507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30684507 30689507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30696507 30701507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30688507 30693507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30700507 30705507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30692507 30697507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30704507 30709507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30696507 30701507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30708507 30713507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30700507 30705507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30712507 30717507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30704507 30709507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30716507 30721507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30708507 30713507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30720507 30725507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30712507 30717507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30724507 30729507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30716507 30721507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30728507 30733507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30720507 30725507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30732507 30737507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30724507 30729507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30736507 30741507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30728507 30733507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30740507 30745507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30732507 30737507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30744507 30749507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30736507 30741507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30748507 30753507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30740507 30745507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30752507 30757507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30744507 30749507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30756507 30761507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30748507 30753507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30760507 30765507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30752507 30757507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30764507 30769507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30756507 30761507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30768507 30773507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30760507 30765507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30772507 30777507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30764507 30769507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30776507 30781507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30768507 30773507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30780507 30785507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30772507 30777507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30784507 30789507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30776507 30781507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30788507 30793507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30780507 30785507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30792507 30797507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30784507 30789507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30796507 30801507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30788507 30793507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30800507 30805507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30792507 30797507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30804507 30809507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30796507 30801507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30808507 30813507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30800507 30805507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30812507 30817507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30804507 30809507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30816507 30821507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30808507 30813507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30820507 30825507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30812507 30817507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30824507 30829507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30816507 30821507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30828507 30833507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30820507 30825507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30832507 30837507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30824507 30829507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30836507 30841507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30828507 30833507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30840507 30845507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30832507 30837507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30844507 30849507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30836507 30841507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30848507 30853507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30840507 30845507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30852507 30857507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30844507 30849507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30856507 30861507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30848507 30853507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30860507 30865507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30852507 30857507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30864507 30869507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30856507 30861507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30868507 30873507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30860507 30865507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30872507 30877507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30864507 30869507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30876507 30881507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30868507 30873507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30880507 30885507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30872507 30877507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30884507 30889507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30876507 30881507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30888507 30893507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30880507 30885507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30892507 30897507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30884507 30889507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30896507 30901507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30888507 30893507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30900507 30905507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30892507 30897507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30904507 30909507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30896507 30901507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30908507 30913507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30900507 30905507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30912507 30917507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30904507 30909507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30916507 30921507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30908507 30913507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30920507 30925507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30912507 30917507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30924507 30929507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30916507 30921507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30928507 30933507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30920507 30925507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30932507 30937507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30924507 30929507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30936507 30941507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30928507 30933507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30940507 30945507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30932507 30937507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30944507 30949507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30936507 30941507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30948507 30953507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30940507 30945507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30952507 30957507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30944507 30949507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30956507 30961507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30948507 30953507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30960507 30965507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30952507 30957507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30964507 30969507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30956507 30961507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30968507 30973507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30960507 30965507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30972507 30977507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30964507 30969507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30976507 30981507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30968507 30973507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30980507 30985507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30972507 30977507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30984507 30989507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30976507 30981507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30988507 30993507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30980507 30985507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30992507 30997507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30984507 30989507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 30996507 31001507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30988507 30993507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 31000507 31005507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30992507 30997507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 31004507 31009507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 30996507 31001507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 31008507 31013507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 31000507 31005507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 31012507 31017507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 31004507 31009507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 31016507 31021507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 31008507 31013507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 31020507 31025507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 31012507 31017507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 31024507 31029507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 31016507 31021507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 31028507 31033507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 31020507 31025507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 31032507 31037507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 31024507 31029507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 31036507 31041507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 31028507 31033507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 31040507 31045507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 31032507 31037507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 31044507 31049507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 31036507 31041507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 31048507 31053507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 31040507 31045507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 31052507 31057507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 31044507 31049507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 31056507 31061507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 31048507 31053507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 31060507 31065507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 31052507 31057507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 31064507 31069507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 31056507 31061507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 31068507 31073507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 31060507 31065507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 31072507 31077507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 31064507 31069507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 31076507 31081507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 31068507 31073507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 31080507 31085507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 31072507 31077507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 31084507 31089507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 31076507 31081507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 31088507 31093507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 31080507 31085507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 31092507 31097507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 31084507 31089507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 31096507 31101507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 31088507 31093507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 31100507 31105507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 31092507 31097507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 31104507 31109507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 31096507 31101507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 31108507 31113507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 31100507 31105507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 31112507 31117507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 31104507 31109507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 31116507 31121507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 31108507 31113507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 31120507 31125507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 31112507 31117507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 31124507 31129507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 31116507 31121507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 31128507 31133507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 31120507 31125507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 31132507 31137507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 31124507 31129507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 31136507 31141507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 31128507 31133507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 31140507 31145507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 31132507 31137507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 31144507 31149507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 31136507 31141507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 31148507 31153507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 31140507 31145507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 31152507 31157507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 31144507 31149507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 31156507 31161507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 31148507 31153507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 31160507 31165507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 31152507 31157507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 31164507 31169507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 31156507 31161507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 31168507 31173507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 31160507 31165507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 31172507 31177507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 31164507 31169507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 31176507 31181507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 31168507 31173507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 31180507 31185507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 31172507 31177507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 31184507 31189507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 31176507 31181507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 31188507 31193507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 31180507 31185507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 31192507 31197507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 31184507 31189507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 31196507 31201507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 31188507 31193507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 31200507 31205507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 31192507 31197507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 31204507 31209507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 31196507 31201507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 31208507 31213507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 31200507 31205507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 31212507 31217507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 31204507 31209507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 31216507 31221507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 31208507 31213507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 31220507 31225507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 31212507 31217507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
+chr22 31224507 31229507 uc003bnx.1_cds_2_0_chr22_29227_f 0 + chr22 31216507 31221507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +
1
0