galaxy-dev
Threads by month
- ----- 2025 -----
- July
- June
- May
- April
- March
- February
- January
- ----- 2024 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2023 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2022 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2021 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2020 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2019 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2018 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2017 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2016 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2015 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2014 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2013 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2012 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2011 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2010 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2009 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2008 -----
- December
- November
- October
- September
- August
- 10008 discussions

16 Apr '10
details: http://www.bx.psu.edu/hg/galaxy/rev/46a391f2d696
changeset: 3645:46a391f2d696
user: Nate Coraor <nate(a)bx.psu.edu>
date: Wed Apr 14 10:14:26 2010 -0400
description:
Add the enable_job_running param to the sample config
diffstat:
universe_wsgi.ini.sample | 5 +++++
1 files changed, 5 insertions(+), 0 deletions(-)
diffs (15 lines):
diff -r d6a527033f2c -r 46a391f2d696 universe_wsgi.ini.sample
--- a/universe_wsgi.ini.sample Wed Apr 14 09:38:35 2010 -0400
+++ b/universe_wsgi.ini.sample Wed Apr 14 10:14:26 2010 -0400
@@ -205,6 +205,11 @@
# ---- Job Execution --------------------------------------------------------
+# If running multiple Galaxy processes, one can be designated as the job
+# runner. For more information, see:
+# http://bitbucket.org/galaxy/galaxy-central/wiki/Config/LoadBalancing
+#enable_job_running = True
+
# Number of concurrent jobs to run (local job runner)
#local_job_queue_workers = 5
1
0

16 Apr '10
details: http://www.bx.psu.edu/hg/galaxy/rev/d6a527033f2c
changeset: 3644:d6a527033f2c
user: Greg Von Kuster <greg(a)bx.psu.edu>
date: Wed Apr 14 09:38:35 2010 -0400
description:
Since the handle to the tag handler has been moved to the app, eliminate the now unnecessary import of a specific tag handler from the grid helper. Clean up the grid helpr code, pass trans to methods instead of trans.sa_session so that methods can get other items from trans ( like the tag hendler ). Eliminate a seemingly unnecessary import in grid_base.mako that tightly coupled the template to the model.
diffstat:
lib/galaxy/web/framework/helpers/grids.py | 85 ++++++++++--------------------
templates/grid_base.mako | 1 -
2 files changed, 28 insertions(+), 58 deletions(-)
diffs (351 lines):
diff -r 99782dc9d022 -r d6a527033f2c lib/galaxy/web/framework/helpers/grids.py
--- a/lib/galaxy/web/framework/helpers/grids.py Tue Apr 13 20:21:57 2010 -0400
+++ b/lib/galaxy/web/framework/helpers/grids.py Wed Apr 14 09:38:35 2010 -0400
@@ -3,7 +3,6 @@
from galaxy.web.base import controller
from galaxy.web.framework.helpers import iff
-from galaxy.tags.tag_handler import GalaxyTagHandler
from galaxy.web import url_for
from galaxy.util.json import from_json_string, to_json_string
from galaxy.util.odict import odict
@@ -21,9 +20,7 @@
model_class = None
template = "grid_base.mako"
async_template = "grid_base_async.mako"
-
use_async = False
-
global_actions = []
columns = []
operations = []
@@ -32,10 +29,8 @@
default_filter = {}
default_sort_key = None
preserve_state = False
-
use_paging = False
num_rows_per_page = 25
-
# Set preference names.
cur_filter_pref_name = ".filter"
cur_sort_key_pref_name = ".sort_key"
@@ -47,12 +42,10 @@
if operation.allow_multiple:
self.has_multiple_item_operations = True
break
-
def __call__( self, trans, **kwargs ):
status = kwargs.get( 'status', None )
message = kwargs.get( 'message', None )
session = trans.sa_session
-
# Build a base filter and sort key that is the combination of the saved state and defaults. Saved state takes preference over defaults.
base_filter = {}
if self.default_filter:
@@ -63,24 +56,19 @@
if pref_name in trans.get_user().preferences:
saved_filter = from_json_string( trans.get_user().preferences[pref_name] )
base_filter.update( saved_filter )
-
pref_name = unicode( self.__class__.__name__ + self.cur_sort_key_pref_name )
if pref_name in trans.get_user().preferences:
base_sort_key = from_json_string( trans.get_user().preferences[pref_name] )
-
# Build initial query
query = self.build_initial_query( session )
query = self.apply_default_filter( trans, query, **kwargs )
-
# Maintain sort state in generated urls
extra_url_args = {}
-
# Determine whether use_default_filter flag is set.
use_default_filter_str = kwargs.get( 'use_default_filter' )
use_default_filter = False
if use_default_filter_str:
use_default_filter = ( use_default_filter_str.lower() == 'true' )
-
# Process filtering arguments to (a) build a query that represents the filter and (b) builds a
# dictionary that denotes the current filter.
cur_filter_dict = {}
@@ -96,7 +84,6 @@
column_filter = kwargs.get( "f-" + column.key )
elif column.key in base_filter:
column_filter = base_filter.get( column.key )
-
# Method (1) combines a mix of strings and lists of strings into a single string and (2) attempts to de-jsonify all strings.
def from_json_string_recurse(item):
decoded_list = []
@@ -115,8 +102,7 @@
for element in item:
a_list = from_json_string_recurse( element )
decoded_list = decoded_list + a_list
- return decoded_list
-
+ return decoded_list
# If column filter found, apply it.
if column_filter is not None:
# TextColumns may have a mix of json and strings.
@@ -131,7 +117,7 @@
if column_filter == '':
continue
# Update query.
- query = column.filter( trans.sa_session, trans.get_user(), query, column_filter )
+ query = column.filter( trans, trans.get_user(), query, column_filter )
# Upate current filter dict.
cur_filter_dict[ column.key ] = column_filter
# Carry filter along to newly generated urls; make sure filter is a string so
@@ -147,7 +133,6 @@
if not isinstance( column_filter, basestring ):
column_filter = unicode(column_filter)
extra_url_args[ "f-" + column.key ] = column_filter.encode("utf-8")
-
# Process sort arguments.
sort_key = sort_order = None
if 'sort' in kwargs:
@@ -167,10 +152,8 @@
# See reason for not using lower() to do case-insensitive search.
query = query.order_by( self.model_class.table.c.get( sort_key ).asc() )
extra_url_args['sort'] = encoded_sort_key
-
# There might be a current row
current_item = self.get_current_item( trans )
-
# Process page number.
if self.use_paging:
if 'page' in kwargs:
@@ -196,8 +179,6 @@
# Defaults.
page_num = 1
num_pages = 1
-
-
# Preserve grid state: save current filter and sort key.
if self.preserve_state:
pref_name = unicode( self.__class__.__name__ + self.cur_filter_pref_name )
@@ -207,14 +188,12 @@
pref_name = unicode( self.__class__.__name__ + self.cur_sort_key_pref_name )
trans.get_user().preferences[pref_name] = unicode( to_json_string( sort_key ) )
trans.sa_session.flush()
-
# Log grid view.
context = unicode( self.__class__.__name__ )
params = cur_filter_dict.copy()
params['sort'] = sort_key
params['async'] = ( 'async' in kwargs )
trans.log_action( trans.get_user(), unicode( "grid.view"), context, params )
-
# Render grid.
def url( *args, **kwargs ):
# Only include sort/filter arguments if not linking to another
@@ -235,7 +214,6 @@
else:
new_kwargs[ 'id' ] = trans.security.encode_id( id )
return url_for( **new_kwargs )
-
use_panels = ( 'use_panels' in kwargs ) and ( kwargs['use_panels'] == True )
async_request = ( ( self.use_async ) and ( 'async' in kwargs ) and ( kwargs['async'] in [ 'True', 'true'] ) )
return trans.fill_template( iff( async_request, self.async_template, self.template),
@@ -254,9 +232,9 @@
message_type = status,
message = message,
use_panels=use_panels,
- # Pass back kwargs so that grid template can set and use args without grid explicitly having to pass them.
- kwargs=kwargs
- )
+ # Pass back kwargs so that grid template can set and use args without
+ # grid explicitly having to pass them.
+ kwargs=kwargs )
def get_ids( self, **kwargs ):
id = []
if 'id' in kwargs:
@@ -270,7 +248,6 @@
except:
error( "Invalid id" )
return id
-
# ---- Override these ----------------------------------------------------
def handle_operation( self, trans, operation, ids ):
pass
@@ -315,7 +292,7 @@
if self.link and self.link( item ):
return self.link( item )
return None
- def filter( self, db_session, user, query, column_filter ):
+ def filter( self, trans, user, query, column_filter ):
""" Modify query to reflect the column filter. """
if column_filter == "All":
pass
@@ -335,15 +312,14 @@
class TextColumn( GridColumn ):
""" Generic column that employs freetext and, hence, supports freetext, case-independent filtering. """
- def filter( self, db_session, user, query, column_filter ):
+ def filter( self, trans, user, query, column_filter ):
""" Modify query to filter using free text, case independence. """
if column_filter == "All":
pass
elif column_filter:
- query = query.filter( self.get_filter( user, column_filter ) )
+ query = query.filter( self.get_filter( trans, user, column_filter ) )
return query
-
- def get_filter( self, user, column_filter ):
+ def get_filter( self, trans, user, column_filter ):
""" Returns a SQLAlchemy criterion derived from column_filter. """
if isinstance( column_filter, basestring ):
return self.get_single_filter( user, column_filter )
@@ -352,7 +328,6 @@
for filter in column_filter:
clause_list.append( self.get_single_filter( user, filter ) )
return and_( *clause_list )
-
def get_single_filter( self, user, a_filter ):
""" Returns a SQLAlchemy criterion derived for a single filter. Single filter is the most basic filter--usually a string--and cannot be a list. """
model_class_key_field = getattr( self.model_class, self.key )
@@ -364,12 +339,10 @@
GridColumn.__init__( self, col_name, key=key, model_class=model_class, filterable=filterable )
self.sortable = False
self.model_annotation_association_class = model_annotation_association_class
-
def get_value( self, trans, grid, item ):
""" Returns item annotation. """
annotation = self.get_item_annotation_str( trans.sa_session, item.user, item )
return iff( annotation, annotation, "" )
-
def get_single_filter( self, user, a_filter ):
""" Filter by annotation and annotation owner. """
return self.model_class.annotations.any(
@@ -390,20 +363,19 @@
def get_value( self, trans, grid, item ):
return trans.fill_template( "/tagging_common.mako", tag_type="community", trans=trans, user=trans.get_user(), tagged_item=item, elt_context=self.grid_name,
in_form=True, input_size="20", tag_click_fn="add_tag_to_grid_filter", use_toggle_link=True )
- def filter( self, db_session, user, query, column_filter ):
+ def filter( self, trans, user, query, column_filter ):
""" Modify query to filter model_class by tag. Multiple filters are ANDed. """
if column_filter == "All":
pass
elif column_filter:
- query = query.filter( self.get_filter( user, column_filter ) )
+ query = query.filter( self.get_filter( trans, user, column_filter ) )
return query
- def get_filter( self, user, column_filter ):
+ def get_filter( self, trans, user, column_filter ):
# Parse filter to extract multiple tags.
- tag_handler = GalaxyTagHandler()
if isinstance( column_filter, list ):
# Collapse list of tags into a single string; this is redundant but effective. TODO: fix this by iterating over tags.
column_filter = ",".join( column_filter )
- raw_tags = tag_handler.parse_tags( column_filter.encode("utf-8") )
+ raw_tags = trans.app.tag_handler.parse_tags( column_filter.encode( "utf-8" ) )
clause_list = []
for name, value in raw_tags.items():
if name:
@@ -417,15 +389,21 @@
class IndividualTagsColumn( CommunityTagsColumn ):
""" Column that supports individual tags. """
def get_value( self, trans, grid, item ):
- return trans.fill_template( "/tagging_common.mako", tag_type="individual", trans=trans, user=trans.get_user(), tagged_item=item, elt_context=self.grid_name,
- in_form=True, input_size="20", tag_click_fn="add_tag_to_grid_filter", use_toggle_link=True )
- def get_filter( self, user, column_filter ):
+ return trans.fill_template( "/tagging_common.mako",
+ tag_type="individual",
+ user=trans.user,
+ tagged_item=item,
+ elt_context=self.grid_name,
+ in_form=True,
+ input_size="20",
+ tag_click_fn="add_tag_to_grid_filter",
+ use_toggle_link=True )
+ def get_filter( self, trans, user, column_filter ):
# Parse filter to extract multiple tags.
- tag_handler = GalaxyTagHandler()
if isinstance( column_filter, list ):
# Collapse list of tags into a single string; this is redundant but effective. TODO: fix this by iterating over tags.
column_filter = ",".join( column_filter )
- raw_tags = tag_handler.parse_tags( column_filter.encode("utf-8") )
+ raw_tags = trans.app.tag_handler.parse_tags( column_filter.encode( "utf-8" ) )
clause_list = []
for name, value in raw_tags.items():
if name:
@@ -441,7 +419,7 @@
def __init__( self, col_name, cols_to_filter, key, visible, filterable="default" ):
GridColumn.__init__( self, col_name, key=key, visible=visible, filterable=filterable)
self.cols_to_filter = cols_to_filter
- def filter( self, db_session, user, query, column_filter ):
+ def filter( self, trans, user, query, column_filter ):
""" Modify query to filter model_class by tag. Multiple filters are ANDed. """
if column_filter == "All":
return query
@@ -450,15 +428,14 @@
for filter in column_filter:
part_clause_list = []
for column in self.cols_to_filter:
- part_clause_list.append( column.get_filter( user, filter ) )
+ part_clause_list.append( column.get_filter( trans, user, filter ) )
clause_list.append( or_( *part_clause_list ) )
complete_filter = and_( *clause_list )
else:
clause_list = []
for column in self.cols_to_filter:
- clause_list.append( column.get_filter( user, column_filter ) )
+ clause_list.append( column.get_filter( trans, user, column_filter ) )
complete_filter = or_( *clause_list )
-
return query.filter( complete_filter )
class OwnerColumn( TextColumn ):
@@ -495,7 +472,6 @@
# Delete items cannot be shared.
if item.deleted:
return ""
-
# Build a list of sharing for this item.
sharing_statuses = []
if item.users_shared_with:
@@ -505,13 +481,11 @@
if item.published:
sharing_statuses.append( "Published" )
return ", ".join( sharing_statuses )
-
def get_link( self, trans, grid, item ):
if not item.deleted and ( item.users_shared_with or item.importable or item.published ):
return dict( operation="share or publish", id=item.id )
return None
-
- def filter( self, db_session, user, query, column_filter ):
+ def filter( self, trans, user, query, column_filter ):
""" Modify query to filter histories by sharing status. """
if column_filter == "All":
pass
@@ -526,7 +500,6 @@
elif column_filter == "published":
query = query.filter( self.model_class.published == True )
return query
-
def get_accepted_filters( self ):
""" Returns a list of accepted filters for this column. """
accepted_filter_labels_and_vals = odict()
@@ -560,7 +533,6 @@
return temp
else:
return dict( operation=self.label, id=item.id )
-
def allowed( self, item ):
if self.condition:
return self.condition( item )
@@ -586,4 +558,3 @@
for k, v in self.args.items():
rval[ "f-" + k ] = v
return rval
-
diff -r 99782dc9d022 -r d6a527033f2c templates/grid_base.mako
--- a/templates/grid_base.mako Tue Apr 13 20:21:57 2010 -0400
+++ b/templates/grid_base.mako Wed Apr 14 09:38:35 2010 -0400
@@ -1,6 +1,5 @@
<%!
from galaxy.web.framework.helpers.grids import TextColumn
- from galaxy.model import History, HistoryDatasetAssociation, User, Role, Group
import galaxy.util
def inherit(context):
if context.get('use_panels'):
1
0

16 Apr '10
details: http://www.bx.psu.edu/hg/galaxy/rev/99782dc9d022
changeset: 3643:99782dc9d022
user: Kanwei Li <kanwei(a)gmail.com>
date: Tue Apr 13 20:21:57 2010 -0400
description:
- Add support for custom dbkeys using user prefs db table (currently only enabled when enable_tracks=True)
- Fix async dataset delete: dataset was disappearing when not successfully deleted
diffstat:
lib/galaxy/web/controllers/user.py | 54 +++++++++++++++++
templates/root/history.mako | 34 ++++++----
templates/user/dbkeys.mako | 94 +++++++++++++++++++++++++++++++
templates/webapps/galaxy/base_panels.mako | 3 +
4 files changed, 170 insertions(+), 15 deletions(-)
diffs (225 lines):
diff -r 122a4568c046 -r 99782dc9d022 lib/galaxy/web/controllers/user.py
--- a/lib/galaxy/web/controllers/user.py Tue Apr 13 17:29:18 2010 -0400
+++ b/lib/galaxy/web/controllers/user.py Tue Apr 13 20:21:57 2010 -0400
@@ -7,6 +7,7 @@
import logging, os, string, re
from random import choice
from galaxy.web.form_builder import *
+from galaxy.util.json import from_json_string, to_json_string
log = logging.getLogger( __name__ )
@@ -841,3 +842,56 @@
user_id=user_id,
message='Address <b>%s</b> undeleted' % user_address.desc,
status='done') )
+
+ @web.expose
+ @web.require_login()
+ def dbkeys( self, trans, **kwds ):
+ user = trans.get_user()
+ message = None
+ lines_skipped = 0
+ if 'dbkeys' not in user.preferences:
+ dbkeys = {}
+ else:
+ dbkeys = from_json_string(user.preferences['dbkeys'])
+
+ if 'delete' in kwds:
+ key = kwds.get('key', '')
+ if key and key in dbkeys:
+ del dbkeys[key]
+
+ elif 'add' in kwds:
+ name = kwds.get('name', '')
+ key = kwds.get('key', '')
+ len_file = kwds.get('len_file', None)
+ if getattr(len_file, "file", None): # Check if it's a FieldStorage object
+ len_text = len_file.file.read()
+ else:
+ len_text = kwds.get('len_text', '')
+ if not name or not key or not len_text:
+ message = "You must specify values for all the fields."
+ else:
+ chrom_dict = {}
+ for line in len_text.split("\n"):
+ lst = line.strip().split()
+ if not lst or len(lst) < 2:
+ lines_skipped += 1
+ continue
+ chrom, length = lst[0], lst[1]
+ try:
+ length = int(length)
+ except ValueError:
+ lines_skipped += 1
+ continue
+ chrom_dict[chrom] = length
+ dbkeys[key] = { "name": name, "chroms": chrom_dict }
+
+ user.preferences['dbkeys'] = to_json_string(dbkeys)
+ trans.sa_session.flush()
+
+ return trans.fill_template( 'user/dbkeys.mako',
+ user=user,
+ dbkeys=dbkeys,
+ message=message,
+ lines_skipped=lines_skipped )
+
+
\ No newline at end of file
diff -r 122a4568c046 -r 99782dc9d022 templates/root/history.mako
--- a/templates/root/history.mako Tue Apr 13 17:29:18 2010 -0400
+++ b/templates/root/history.mako Tue Apr 13 20:21:57 2010 -0400
@@ -33,21 +33,25 @@
$( '#historyItem-' + data_id + "> div.historyItemTitleBar" ).addClass( "spinner" );
$.ajax({
url: "${h.url_for( action='delete_async', id='XXX' )}".replace( 'XXX', data_id ),
- error: function() { alert( "Delete failed" ) },
- success: function() {
- %if show_deleted:
- var to_update = {};
- to_update[data_id] = "none";
- updater( to_update );
- %else:
- $( "#historyItem-" + data_id ).fadeOut( "fast", function() {
- $( "#historyItemContainer-" + data_id ).remove();
- if ( $( "div.historyItemContainer" ).length < 1 ) {
- $( "#emptyHistoryMessage" ).show();
- }
- });
- %endif
- $(".tipsy").remove();
+ error: function() { alert( "Delete failed" ); },
+ success: function(msg) {
+ if (msg === "OK") {
+ %if show_deleted:
+ var to_update = {};
+ to_update[data_id] = "none";
+ updater( to_update );
+ %else:
+ $( "#historyItem-" + data_id ).fadeOut( "fast", function() {
+ $( "#historyItemContainer-" + data_id ).remove();
+ if ( $( "div.historyItemContainer" ).length < 1 ) {
+ $( "#emptyHistoryMessage" ).show();
+ }
+ });
+ %endif
+ $(".tipsy").remove();
+ } else {
+ alert( "Delete failed" );
+ }
}
});
return false;
diff -r 122a4568c046 -r 99782dc9d022 templates/user/dbkeys.mako
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/templates/user/dbkeys.mako Tue Apr 13 20:21:57 2010 -0400
@@ -0,0 +1,94 @@
+<%inherit file="/base.mako"/>
+<%def name="title()">Custom Database Builds</%def>
+
+<style type="text/css">
+ th, td {
+ min-width: 100px;
+ vertical-align: text-top;
+ }
+ pre {
+ padding: 0;
+ margin: 0;
+ }
+</style>
+
+<script type="text/javascript">
+
+$(function() {
+ $(".db_hide").each(function() {
+ var pre = $(this);
+ pre.hide();
+ pre.siblings("span").wrap( "<a href='javascript:void();'></a>" ).click( function() {
+ pre.toggle();
+ });
+ });
+});
+
+</script>
+
+% if message:
+ <div class="errormessagelarge">${message}</div>
+% endif
+
+% if lines_skipped > 0:
+ <div class="warningmessagelarge">Skipped ${lines_skipped} lines that could not be parsed</div>
+% endif
+
+<h2>Custom Database/Builds</h2>
+
+<p>You may specify your own database/builds here.</p>
+
+% if dbkeys:
+ <table class="colored" cellspacing="0" cellpadding="0">
+ <tr class="header">
+ <th>Name</th>
+ <th>Key</th>
+ <th>Chroms/Lengths</th>
+ <th></th>
+ </tr>
+ % for key, dct in dbkeys.iteritems():
+ <tr>
+ <td>${dct["name"] | h}</td>
+ <td>${key | h}</td>
+ <td>
+ <span>${len(dct["chroms"])} entries</span>
+ <pre id="pre_${key}" class="db_hide">
+ <table cellspacing="0" cellpadding="0">
+ <tr><th>Chrom</th><th>Length</th></tr>
+ % for chrom, chrom_len in dct["chroms"].iteritems():
+ <tr><td>${chrom | h}</td><td>${chrom_len | h}</td></tr>
+ % endfor
+ </table>
+ </pre>
+ </td>
+ <td><form action="dbkeys" method="post"><input type="hidden" name="key" value="${key}" /><input type="submit" name="delete" value="Delete" /></form></td>
+ </tr>
+ % endfor
+ </table>
+% else:
+ <p>You currently have no custom builds.</p>
+% endif
+<br />
+<form action="dbkeys" method="post" enctype="multipart/form-data">
+ <div class="toolForm">
+ <div class="toolFormTitle">Add a Build</div>
+ <div class="toolFormBody">
+ <div class="form-row">
+ <label for="name">Name (eg: Human Chromosome):</label>
+ <input type="text" id="name" name="name" />
+ </div>
+ <div class="form-row">
+ <label for="key">Key (eg: hg18):</label>
+ <input type="text" id="key" name="key" />
+ </div>
+ <div class="form-row">
+ <label for="len_file">Chromosome Length file upload (.len file):</label>
+ <input type="file" id="len_file" name="len_file" /><br />
+ <label for="len_text">Alternatively, paste length info:</label>
+ <textarea id="len_text" name="len_text" cols="40" rows="10"></textarea>
+ </div>
+
+ <div class="form-row"><input type="submit" name="add" value="Submit"/></div>
+ </div>
+ </div>
+</form>
\ No newline at end of file
diff -r 122a4568c046 -r 99782dc9d022 templates/webapps/galaxy/base_panels.mako
--- a/templates/webapps/galaxy/base_panels.mako Tue Apr 13 17:29:18 2010 -0400
+++ b/templates/webapps/galaxy/base_panels.mako Tue Apr 13 20:21:57 2010 -0400
@@ -119,6 +119,9 @@
else:
logout_url = h.url_for( controller='/user', action='logout' )
%>
+ %if app.config.get_bool( 'enable_tracks', False ):
+ <li><a target="galaxy_main" href="${h.url_for( controller='/user', action='dbkeys' )}">Custom Builds</a></li>
+ %endif
<li><a target="_top" href="${logout_url}">Logout</a></li>
%endif
<li><hr style="color: inherit; background-color: gray"/></li>
1
0
details: http://www.bx.psu.edu/hg/galaxy/rev/122a4568c046
changeset: 3642:122a4568c046
user: jeremy goecks <jeremy.goecks(a)emory.edu>
date: Tue Apr 13 17:29:18 2010 -0400
description:
Merge
diffstat:
templates/root/history.mako | 1 +
1 files changed, 1 insertions(+), 0 deletions(-)
diffs (11 lines):
diff -r 37c075416918 -r 122a4568c046 templates/root/history.mako
--- a/templates/root/history.mako Tue Apr 13 17:12:00 2010 -0400
+++ b/templates/root/history.mako Tue Apr 13 17:29:18 2010 -0400
@@ -47,6 +47,7 @@
}
});
%endif
+ $(".tipsy").remove();
}
});
return false;
1
0

16 Apr '10
details: http://www.bx.psu.edu/hg/galaxy/rev/37c075416918
changeset: 3641:37c075416918
user: jeremy goecks <jeremy.goecks(a)emory.edu>
date: Tue Apr 13 17:12:00 2010 -0400
description:
Rudimentary Cufflinks wrapper. Also created directory for all NGS RNA-seq tools, and Tophat now uses Bowtie indices rather than own indices file.
diffstat:
test-data/cufflinks_in.sam | 183 ++++++++++++++++++++++++++++++++++++
test-data/cufflinks_out1.gtf | 4 +
test-data/cufflinks_out2.expr | 2 +
test-data/cufflinks_out3.expr | 2 +
tool-data/tophat_indices.loc.sample | 29 -----
tool_conf.xml.sample | 5 +-
tools/ngs_rna/cufflinks_wrapper.py | 83 ++++++++++++++++
tools/ngs_rna/cufflinks_wrapper.xml | 91 +++++++++++++++++
tools/ngs_rna/tophat_wrapper.py | 80 +++++++++++++++
tools/ngs_rna/tophat_wrapper.xml | 129 +++++++++++++++++++++++++
tools/tophat/tophat_wrapper.py | 80 ---------------
tools/tophat/tophat_wrapper.xml | 124 ------------------------
12 files changed, 577 insertions(+), 235 deletions(-)
diffs (872 lines):
diff -r 869e494a8074 -r 37c075416918 test-data/cufflinks_in.sam
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/cufflinks_in.sam Tue Apr 13 17:12:00 2010 -0400
@@ -0,0 +1,183 @@
+test_mRNA_3_187_51 99 test_chromosome 53 255 75M = 163 0 TACTATTTGACTAGACTGGAGGCGCTTGCGACTGAGCTAGGACGTGCCACTACGGGGATGACGACTCGGACTACG IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:2
+test_mRNA_4_191_5d 163 test_chromosome 54 255 75M = 167 0 ACTATCTGACGAGACTGGAGGCGCTTGCGACTGAGCTAGGACGTACCATTACGCGGATGACGACTAGGACTACGG IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:4
+test_mRNA_5_197_46 97 test_chromosome 55 255 75M = 173 0 CTATCTGACTAGACTCGAGGCGCTTGCGTCTGAGCTAGGACGTGCCACTACGGGGATGACGACTAGGACTACGGA IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:2
+test_mRNA_6_182_59 99 test_chromosome 56 255 75M = 158 0 TATCTGACTAGACTGGAGGCGCTTGCGACTGAGCTAGGACGTGCCAGTACGGGGATGACGACTAGGACTACGGAC IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:1
+test_mRNA_8_155_9 163 test_chromosome 58 255 75M = 131 0 TGTGACTAGACTGGAGGCGCTTGCGACTGAGCTAGGACGTGCCACTACGGGGATGACGACTAGGACTACGGACGG IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:1
+test_mRNA_8_197_1 99 test_chromosome 58 255 75M = 173 0 TCTGACTAGACTGGAGGCGCTTGCGACTGAGCTAGGACGTGACACTACGGGGATGGCGACTAGGACTACGGACGG IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:2
+test_mRNA_9_179_52 163 test_chromosome 59 255 75M = 155 0 CTGACTAGACTGGAGGCGCTCGCGACTGAGCTAGGACGTGCCACTACGGGGATGACGACTAGGACTACGGACGGA IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:1
+test_mRNA_11_190_1a 99 test_chromosome 61 255 75M = 166 0 GACTAGACTGGAGGCGCTTGCGACTGAGCTAGGACGTGCCACTACGGGGATGACGACTAGGACTACGGACGGACT IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:0
+test_mRNA_16_194_10 163 test_chromosome 66 255 75M = 170 0 GACTGGATGCGCTTGCGACTGAGCTAGGACGTGCCACTACGGGGATGACGACTCGGACTACGGACGGACTTAAAG IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:3
+test_mRNA_21_208_24 163 test_chromosome 71 255 75M = 184 0 GAGGCGCTTGCGACTGAGCTAGGACGTGCCACTACGGGGATGACGACTAGGACTACGGACGGACTTAGAGCGTCA IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:0
+test_mRNA_22_173_62 99 test_chromosome 72 255 75M = 149 0 AGGCGCTTGCGACTGAGCTAGGACGTGCCACTACGGGGATGACGACTAGGACTACGGACGGACTTAGAGCGTCAG IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:0
+test_mRNA_23_186_42 163 test_chromosome 73 255 75M = 162 0 GGCGCTTGTGACTGAGCTAGGACGTGCCACTACGGGGATGAAGACTAGGACTACGGACGGACTTAGAGCGTCAGA IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:2
+test_mRNA_26_189_30 163 test_chromosome 76 255 75M = 165 0 GCTTGCGACTGAGCTAGGACGTGCCACTACGGGGATGACGACTAGGACTACGGACGGACTTAGAGCGTCAGATGC IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:0
+test_mRNA_28_188_11 99 test_chromosome 78 255 75M = 164 0 TTGCGACTGAGCTAGGACGTGCCACTACGGGGATGACGACTAGGACTACGAACGGACTTAGAGCGTCAGATGCAG IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:1
+test_mRNA_28_206_1f 73 test_chromosome 78 255 75M * 0 0 TTGCGACTGAGCTAGGACGTGCCACTACGGGGATGACGACTAGGACTACGGACGGACTTAGAGCGTCAGACGCAG IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:1
+test_mRNA_30_231_3c 161 test_chromosome 80 255 75M = 207 0 GCGACTGAGCTAGGACGTGCCACTACGGGGATGACGACTAGGACTACGGACGGACTTAGAGCGTCAGATGCAGCG IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:0
+test_mRNA_33_223_4e 73 test_chromosome 83 255 75M * 0 0 ACTGAGCTAGGACGTGCCACTACGGGGATGACGACTAGGACTACGGACGGACTTAGAGCGTCAGATGCAGCGACT IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:0
+test_mRNA_36_146_27 163 test_chromosome 86 255 75M = 122 0 GCGCTAGGACGTGCCACTACGGGGATGACGACTAGGACTACAGACGGACTTAGAGCGTCAGATGCAGCGACTGGA IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:2
+test_mRNA_36_218_12 99 test_chromosome 86 255 75M = 194 0 GAGCTAGGACGTGCCACTACGGGGATGACGACTAGGACTACGGACGGCCTTAGAGCGTCAGATGCAGCGACTGGA IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:1
+test_mRNA_38_199_29 99 test_chromosome 88 255 75M = 175 0 GCTAGGACGTGCCACTACGGGGATGACGACTAGGACTACGGACGGACTTAGAGCGTCAGATGCAGCGACTGGACT IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:0
+test_mRNA_39_219_5c 99 test_chromosome 89 255 75M = 195 0 CTAGGACGTCCCACTATGGGGATGACGACTAGGACTACGGACGGACTTAGAGCGTCAGATGCAGCGGCTGGACTA IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:3
+test_mRNA_41_236_55 97 test_chromosome 91 255 75M = 212 0 AGGACGTGCCACTACGGGGATGACGACTAGGACTACGGACGGACTTAGAGCGTCAGATGCAGCGACTGGAATATT IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:1
+test_mRNA_42_209_25 99 test_chromosome 92 255 75M = 185 0 GGACGTGCCACTACGTGGATGACGACTAGGACTACGGACGGACTTAGAGCGTCAGATGCAGCGACTGGACTATTT IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:1
+test_mRNA_44_193_3f 99 test_chromosome 94 255 75M = 169 0 ACGTGCCACTACGGGGATGACGACTAGGACTACGGACGGACTTAGAGCGTCAGATGCAGCGACTGGTCTATTTAG IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:1
+test_mRNA_44_197_35 99 test_chromosome 94 255 75M = 173 0 ACGTGCAACTACGGGGATGACGACTAGGACTACGGACGGACTTAGAGCGTCAGATGCAGCGACTGGACTATTTAG IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:1
+test_mRNA_44_225_1e 163 test_chromosome 94 255 75M = 201 0 ACGTGCCACTACGGGGATGACGACTAGGACTACGGACGGACTTAGAGCGTCGGGTGCAGCGACTGGACTATTTAG IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:2
+test_mRNA_46_195_17 137 test_chromosome 96 255 75M * 0 0 GTGCCACTACGGGGATGACGACTAGGACTACGGACGGACTTAGAGCGTCAGATGCAGCGACTGGACTATTTAGGA IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:0
+test_mRNA_46_232_2f 99 test_chromosome 96 255 75M = 208 0 GTGCCACTACGGGGATGACGACTAGGACTACGGCCGGACTTAGAGCGTCAGATGCAGCGACTGGACTATTTAGGA IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:1
+test_mRNA_48_207_39 73 test_chromosome 98 255 75M * 0 0 GCCCCTACGGGGATGACGACTAGGACTACGGACGGATTTAGACCGTCAGATGCAGCGACTGGACTATTTAGGACG IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:3
+test_mRNA_48_249_20 161 test_chromosome 98 255 75M = 225 0 GCCACTACGGGGATGACGACTAGGACGACGGACGGACTTAGAGCGTCAGATGCAGCGACTGGACTATTTAGGACG IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:1
+test_mRNA_50_224_2d 163 test_chromosome 100 255 75M = 200 0 CACTACGAGGATGACGTCTAGGACTACGGACGGACTTAGAGCGTCAGACGCAGCGACTGGACTATTTAGGACGAT IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:3
+test_mRNA_51_194_47 163 test_chromosome 101 255 75M = 170 0 ACTACGGGGATGACGACTAGGACTACGGACGGACTTAGAGCGTCAGATGCAGCGACTGGACTATTTAGGACGATC IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:0
+test_mRNA_51_194_49 99 test_chromosome 101 255 75M = 170 0 ACTACGGGGATGACGACTAGGCCTACGGATGGACTTAGAGCGTCAGATGCAGCGACTGGACTATTTAGGACGATC IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:2
+test_mRNA_51_237_a 99 test_chromosome 101 255 75M = 213 0 ACTACGGGGATGACGACTAGGACTACGGACGGACTTAGAGCGTCAGATGCAGCGACTGGACTATTTAGGACGATC IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:0
+test_mRNA_51_248_14 97 test_chromosome 101 255 75M = 224 0 ACTACGGGGATGACGACGAGGACTACGGACGGACTTAGAGCGTCAGATGCAGCGACTGAACTTTTTAGGACGATC IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:3
+test_mRNA_52_261_1b 97 test_chromosome 102 255 75M = 237 0 CTACGGGAATGACGACTAGGGCTACGGAGGGACTTACAGCGTCAGATGCAGCGACTGGACTATTTAGGACGATCG IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:4
+test_mRNA_53_212_19 99 test_chromosome 103 255 75M = 188 0 TACGGGGATGACGACTAGGACTACGGACGGACTTAGAGCGTCAGATGCAGCGACTGGAATATTTAGGACGATCGG IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:1
+test_mRNA_53_272_5a 161 test_chromosome 103 255 75M = 248 0 TACGGGGATGACGACTAGGACTACGGACGGACTTAGAGCGTCAGATGCAGCGACTGGACTATTTAGGACGATCGG IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:0
+test_mRNA_56_183_56 99 test_chromosome 106 255 75M = 159 0 GGGGATGACGACTAGGACTACGGACGGACTTAGAGCGTCAGATGCAGCGACTGGACTATTTGGGACGATCGGACT IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:1
+test_mRNA_57_231_8 99 test_chromosome 107 255 75M = 207 0 GGGATGACGACTAGGACTACGGACGGACTTAGAGCGTCAGATGCACCGACTGGACTATTTAGGACGATCGGACTG IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:1
+test_mRNA_58_218_16 163 test_chromosome 108 255 75M = 194 0 GGATGACGACTAGGACTACGGACGGACTTAGAACGTCAGATGCAGCGACTGGACTATTTAGGACGATCGGACTGA IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:1
+test_mRNA_58_220_3d 163 test_chromosome 108 255 75M = 196 0 GGATGACGACTAGGACTACGGACGGACTTAGAGCGTCAGATGCAGCGACTGGACTATTTAGGACGATCGGACTGA IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:0
+test_mRNA_58_234_7 163 test_chromosome 108 255 75M = 210 0 GGATGACGCCTAGGACTACGGACGGACTTAGAGCGTCAGATGCAGCGACTGGACTATTTAGGACGATCGGACTGA IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:1
+test_mRNA_63_229_4c 163 test_chromosome 113 255 75M = 205 0 ACGACTAGGACTACGGACGGACTTAGAGCGTCAGATGCAGGGACTGGACTATTTAGGACGATCGGACTGAGGAGG IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:1
+test_mRNA_65_238_2e 99 test_chromosome 115 255 75M = 214 0 GACTAGGACTACGGACGGACTTAGAGCGTCAGAAGCAGCGACTGGACTATTTAGGACGATCGGACTGAGGAGGGC IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:1
+test_mRNA_69_229_23 163 test_chromosome 119 255 75M = 205 0 AGGACTACGGACGGACTTATAGGGTCAGATGCAGCGACTGGACTATTTAGGACGATCGGACTGAGGAGGGCAGTA IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:2
+test_mRNA_36_146_27 83 test_chromosome 122 255 75M = 86 0 ACTACGGACGGACTTAGAGCGTCAGATGCAGCGACTGGACTATTTAGGACGATCGGACTGAGGAGTGCAGTAGGT IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:2
+test_mRNA_72_258_4 163 test_chromosome 122 255 75M = 234 0 ACTACGGACGGACTTAGAGCGTCAGATGCAGCAACTGGACTATTTAGGACGATCGGACTGAGGAGGGCAGTAGGA IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:1
+test_mRNA_73_240_34 99 test_chromosome 123 255 75M = 216 0 CTACGGACGGACTTAGAGCGTCAGATGCAGCGAATGGACTATTTAGGACGCTCGGACTGAGGAGGGCAGTAGGAC IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:2
+test_mRNA_73_259_5e 99 test_chromosome 123 255 75M = 235 0 CTACGGACGGACTTAGAGCGTCAGATGCTGCGACTGGACTATTTGGGACGATCGGACTGAGGAGGGCAGTAGGAC IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:2
+test_mRNA_75_204_54 73 test_chromosome 125 255 75M * 0 0 ACGGACGGACTTCGAGCCTCAGATGCAGCGACTGGACTATTTAGGACGATCGGACTGAGGAGGGCAGTAGGACGC IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:2
+test_mRNA_75_235_21 73 test_chromosome 125 255 75M * 0 0 ACGGACGGACTTAGAGCGTCAGATGCAGCGACTGGACTATTTAGCACGATCGGACTGAGGAGGGCAGTAGAACGT IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:3
+test_mRNA_75_277_3b 97 test_chromosome 125 255 75M = 353 0 ACGGACGGACTTAAAGCTTCAGATGCAGCGACAGGACTATTTAGGACGATCGGACTGAGGAGGGCAGTAGGACGC IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:3
+test_mRNA_77_256_2c 73 test_chromosome 127 255 75M * 0 0 GGACGGACTTAGAGCATCAGATGCAGCGACTGGACTATTTAGGACGATCGGACTGAGGAGGGCAGTAGGACGCTA IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:1
+test_mRNA_78_276_4b 97 test_chromosome 128 255 75M = 352 0 GACGGACTTAGAGCGTCAGATGCAGCGACTGGACTATTTAGGACGATCGGACTGAGGAGGGCAGTAGGGCGCTAC IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:1
+test_mRNA_79_256_31 137 test_chromosome 129 255 75M * 0 0 ACGGACTTAGAGCGTCAGATGCAGCGACTGGACTATTTAGGACGATCGGACTGAGGAGGGCAGTAGGACGCTACG IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:0
+test_mRNA_81_228_3a 163 test_chromosome 131 255 75M = 204 0 GGACTGAGAGCGTCAGATGCAGCGACTGGACTATTTAGGACGATCGGACTGAGGAGGGTAGTAGGACGCTACGTA IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:2
+test_mRNA_81_245_4d 163 test_chromosome 131 255 75M = 221 0 GGACTTAGAGCGTCAGATGCAGCGACTGGACTATTTAGGACGATCGGACTGATGAGGGCAGTAGGACGCTACGTA IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:1
+test_mRNA_8_155_9 83 test_chromosome 131 255 75M = 58 0 GGACTTCGAGCGTCAGATGCAGCGACTGTACTATTTAGGACGATCGGACTGAGGAGGGCAGTAGGACGCTACGTA IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:2
+test_mRNA_82_255_2 137 test_chromosome 132 255 75M * 0 0 GACTTAGAGCGTCAGATGCAGCGACTGGACTTTTTAGGACGATCGGACTGAGGAGGGCAGTAGGACGCTACGTAT IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:1
+test_mRNA_82_271_58 99 test_chromosome 132 255 75M = 247 0 GACTTAGAGCGTCAGTTGCAGCGACTGGACTATTTAGGACGATCGGACTGAGGAGGGCAGTAGGACGCTACGTAT IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:1
+test_mRNA_85_268_53 99 test_chromosome 135 255 75M = 244 0 TTAGTGCGTCAGATGCAGCGACTGGACTATTTAGGACGATCGGACTGAGGAGGGCAGTAGGACGCTACGTATTTG IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:1
+test_mRNA_87_250_57 163 test_chromosome 137 255 75M = 226 0 AGAGCGTCAGATGCAGAGACTGGACTATTTAGGACGATCGGACTGAGGAGTGCAGTAGGACGCTACGTATTTGGC IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:2
+test_mRNA_87_279_5f 161 test_chromosome 137 255 75M = 355 0 AGAGCGTCAGATGCAGCGACTGGACTATTTAGGACGATCGGACCGAGGAGGGCAGTAGGACGCTACGTATTTGGC IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:1
+test_mRNA_88_257_50 137 test_chromosome 138 255 75M * 0 0 GAGCGTCAGATGCAGCGACTGGACTATTTAGGACGATCGGACTGAGGAGGGCAGTAGGACGCTACGTATTTGGCG IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:0
+test_mRNA_89_230_b 163 test_chromosome 139 255 75M = 206 0 AGCGTCAGGTGCAGCGACTGGACTATTTAGGACGATCGGACTGAGGAGGGCAGTAGGACGCTACGTATTTGGCGC IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:1
+test_mRNA_89_245_15 99 test_chromosome 139 255 75M = 221 0 AGCGTCAGATGCAGCGACTGGACTATTTAGGACGATCGGACTGAGGAGGGCAGTAGGACGCTACGTATTTGGCGC IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:0
+test_mRNA_89_267_32 163 test_chromosome 139 255 75M = 243 0 AGCGTCAGATGCAGCGACTGGACTATTTAGGACGATCGGAGTGAGGAGGGCAGTAGGACGCTACGTATTTGGCGG IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:2
+test_mRNA_91_256_41 73 test_chromosome 141 255 75M * 0 0 CGTCAGATGCAGCGACTGGACTATTTAGGACGATCGGACTGAGGAGGGCAGTAGGACGCTACGTATTTGGCGCGC IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:0
+test_mRNA_92_250_44 99 test_chromosome 142 255 75M = 226 0 GTCAGATGCAGCGACTGGACTATTTAGGACGATCGGACTGAGGAGGGCAGTAGGACGCTACGTATTTGGCGCGCG IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:0
+test_mRNA_92_266_43 99 test_chromosome 142 255 75M = 242 0 GTCAGATGCAGCGACTGGACTATTTAGGACGATCGGACTCAGGAGGGCAGTAGGACGCTACGTATTTGGCGCGCG IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:1
+test_mRNA_96_238_3 163 test_chromosome 146 255 75M = 214 0 GATGCAGCGACTGGACTATTTAGGACGATCGGACGGAGGAGGGCAGTAGGACGCTACGTATTTGGCGCGCGGACC IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:3
+test_mRNA_97_275_26 97 test_chromosome 147 255 75M = 351 0 ATGCAGCGACTGGACTATTTAGGACGATCGGACTGAGGAGGGCAGTAGGACGCTACGTATTTGGCGCGCGGCCCT IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:1
+test_mRNA_22_173_62 147 test_chromosome 149 255 75M = 72 0 GCAGCGACTGGACTATTTAGGACGATCGGACTGAGGAGGGCAGTAGGACGCTACGTATTTGGCGCGCGGCCCTAC IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:1
+test_mRNA_103_284_2a 161 test_chromosome 153 255 75M = 360 0 CGACTGGACTATTTAGGACGATCGGACTGAGGAGGGCAGTAGGACGCTACGTATTTGGCGCGCGGCCCTACGGCT IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:1
+test_mRNA_104_278_3e 161 test_chromosome 154 255 75M = 354 0 GACTGGACTATTTAGGACGATCGGACTGAGGAGGGCAGTAGGACGCTACGTTTTTGGCGCGCGGCCCTACGGCTG IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:2
+test_mRNA_105_266_13 163 test_chromosome 155 255 75M = 242 0 ACTGGACTATTTAGGACGATCGGACTGAGGAGGGCAGTAGGACGCTACGTATTTGGCGCGCGGCCCTACGGCTGA IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:1
+test_mRNA_105_276_c 161 test_chromosome 155 255 75M = 352 0 ACTGGACTATTTAGGACGATCGGACTGAGGAAGGCAGTAGGACGCTACGTATTTGGCGCGCGGCCCTACGGCTGA IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:2
+test_mRNA_9_179_52 83 test_chromosome 155 255 75M = 59 0 ACTGGACCATTTAGGACGATCGGACTGAGGAGGGCAGTAGGACGCTACGTATTTGGCGCGCGGCCCTACGGCTGA IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:2
+test_mRNA_106_253_45 137 test_chromosome 156 255 75M * 0 0 CTGGACTATTTAGGTCGATCGGACTGAGGAGGGCAGTAGGACGCTACGTATTTGGCGCGCGGCCCTACGGCTGAG IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:2
+test_mRNA_107_286_5 161 test_chromosome 157 255 75M = 362 0 TGGACTATTTAGGACGATCGGACTGAGGAGGGCAGTAGGACGCTACGCATTTGGCGCGCGGCCCTACGGCTGAGC IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:2
+test_mRNA_6_182_59 147 test_chromosome 158 255 75M = 56 0 GGACTATTTAGGACGATCGGACTGAGGAGGGCAGTAGGACGCTACGTATTTGGCGCGCGGCCCTACGGCTGAGCG IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:1
+test_mRNA_56_183_56 147 test_chromosome 159 255 75M = 106 0 GACTATTTAGGACGATCGGACTGAGGAGGGCAGTAGGACGCTACGTATTTGGCGCGCGGCCCTACGGCTGAGCGT IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:1
+test_mRNA_110_267_22 163 test_chromosome 160 255 75M = 243 0 ACTAGTTAGGGCGATCGGACTGAGGAGGGCAGTAGGACGCTACGTAGTTGGCGCGCGGCCCTACGACTGAGCGTC IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:5
+test_mRNA_110_271_28 99 test_chromosome 160 255 75M = 247 0 ACTATTTAGGACGATCGGACTGAGGAGGGCAGTAGGACGCTACGTATTTGGCGCGCGGCCCTACGGCTGAGCGTC IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:1
+test_mRNA_111_297_61 161 test_chromosome 161 255 75M = 373 0 CTATTTAGGACGATCGGACTGGGGAGGGCAGTAGGACGCTACGGATTTGGCGCGCGGCCCTACGGCTGAGCGTCG IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:3
+test_mRNA_23_186_42 83 test_chromosome 162 255 75M = 73 0 TATTTAGGACGATCGGACGGAGGAGGGCAGAAGGACGCTACGTATTTGGCGCGCGGCCCTACGACTGAGCGTCGA IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:4
+test_mRNA_3_187_51 147 test_chromosome 163 255 75M = 53 0 ATTTAGGACGATCGGACTGAGGAGGGCAGTAGGACGCTACGTATTTGGCGCGCGGCCCTACGGCTGAGCGTCGAG IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:1
+test_mRNA_114_277_5b 161 test_chromosome 164 255 75M = 353 0 TTTAGGACGATCGGACTGAGGAGGGCAGTAGGACGCTACGTATTTGGCGCGCGGCCCTACGCCTGAGCGTCGAGC IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:2
+test_mRNA_28_188_11 147 test_chromosome 164 255 75M = 78 0 TTTAGGACGATCGGACTGAGGAAGGCAGTAGGACGCTTCGTATTTGGCGCGAGGCCCTACGGCTGAGCGTCGAGC IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:4
+test_mRNA_26_189_30 83 test_chromosome 165 255 75M = 76 0 TTAGGACGATCGGACTGAGGAGGGCAGTAGGACGGTACGTATTTGGCGCGCGGCCCTACGGCTGAGCGTCGAGCT IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:2
+test_mRNA_33_189_4a 89 test_chromosome 165 255 75M * 0 0 TTAGGACGATCGGACTGAGGAGGGCAGTAGGACGCTACCTATTTGGCGCGCGGCCCTACGGCTGAGCGTCGGGCT IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:3
+test_mRNA_116_271_2b 163 test_chromosome 166 255 75M = 247 0 TAGGACGATCGGACTGAGGAGGGCAGTAGGACGCTACGTATTTGGCGCGCGGCCCTACGGCTGAGCGTCGAGCTT IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:1
+test_mRNA_116_295_63 161 test_chromosome 166 255 75M = 371 0 TAGGACGATCGGACTGAGGAGGGCAGTAGGACGCTACGTATTTGGCGCGCGGCCCTACGGCTGAGCGTCGAGCTT IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:1
+test_mRNA_11_190_1a 147 test_chromosome 166 255 75M = 61 0 TAGGTCGATGGGACTGAGGAGGGCAGTAGGACGCTACGTATTTGGCGCGTGGCCCTACGGCTGAGCGTCGAGCTT IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:4
+test_mRNA_4_191_5d 83 test_chromosome 167 255 75M = 54 0 AGGACGATCGGACTGAGTAGGGCAGTAGGACACTACGTATTTGGCGCGCGGCCCTACGGCTGAGCGTCGAGCTTG IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:3
+test_mRNA_118_297_f 161 test_chromosome 168 255 75M = 373 0 GGACGATCGGACTGAGGAGGGCAGTAGGACGCTACGTATTTGGCGCGCGGCCCTACGGCTGAGCGTCGAGCTTGC IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:1
+test_mRNA_44_193_3f 147 test_chromosome 169 255 75M = 94 0 GACGATCGGACTGGGGAGAGCAGTAGGACGCTACGTATTTGGCGCGCGGCCCTACGGCTGAGCGTCGAGCTTGCG IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:3
+test_mRNA_16_194_10 83 test_chromosome 170 255 75M = 66 0 ACGATCGGACTGAGGAGGGCAGTAGGACGCTACGTATTTGGCGCGCGGCCCTACGGCTGAGCGTCGAGCTTGCGA IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:1
+test_mRNA_51_194_47 83 test_chromosome 170 255 75M = 101 0 ACGATCGGACTGAGGAGGGCAGTAGGACGCTACGTATTTGGCGCGCGGCCCTACGGCTGAGCGTCGAGCTTGCGA IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:1
+test_mRNA_51_194_49 147 test_chromosome 170 255 75M = 101 0 ACGTTCGGACTGAGGAGGGCAGTAGGACGCCACGTATTTGGCGCGCGGCCCTACGGCTGAGCGTCGAGCTTGCGG IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:4
+test_mRNA_122_299_6 161 test_chromosome 172 255 75M = 375 0 GATCGGACTGAGGAGGGCAGTAGGACGCTACGTATTTGGCGCGCGGCCCTACGGCTGAGCGTCGAGCTTGCGATA IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:1
+test_mRNA_44_197_35 147 test_chromosome 173 255 75M = 94 0 ATCGGACTGAGGAGGGCAGTAGGACGCTACGTATTTGGCGCGCGGCCCTACGGCTGATCGTCGAGCTTGCGATAC IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:2
+test_mRNA_5_197_46 145 test_chromosome 173 255 75M = 55 0 ATCGGACGGAGGAGGGCAGTAGGACGCTACGTATTTGGCGGGCGGCCCTACGGCTGAGCGTCGAGCTTGCGATAC IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:3
+test_mRNA_8_197_1 147 test_chromosome 173 255 75M = 58 0 ATCGGACTGAGGAGGGCAGTAGGACGCTACGTATTTGGCGCGCGGCCCTACGGCTGAGCGTCGAGCTTGCGATAC IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:1
+test_mRNA_125_280_48 97 test_chromosome 175 255 75M = 356 0 CGGACTGAGGAGGGCAGTAGGACGCTATGTATTTGGCGCGCGGCCCTACGGCTGAGCGTCGAGCTTGCGAAACGC IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:3
+test_mRNA_125_293_60 161 test_chromosome 175 255 75M = 369 0 CGGACTGAGGAGGGCAGTAGGACGCTATGTATTTGGCGCGCGGCCCTACGGCTGAGCTTCGAGGTTGCGATACGC IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:4
+test_mRNA_38_199_29 147 test_chromosome 175 255 75M = 88 0 CGGACTGAGGAGGGCAGTAGGACGCTACGTATTTGGCGCGCGGCCCTACGGCTGAGCGTCGAGCTTGCGATACGC IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:1
+test_mRNA_126_282_18 161 test_chromosome 176 255 75M = 358 0 GGACTGAGGAGGGCAGTAGGACGCTACGTATTTGGCGCGCGGCCCTACGGCTGAGCGTCGAGCTTGCGATACGCC IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:1
+test_mRNA_131_260_33 99 test_chromosome 181 255 70M100N5M = 236 0 GAGGAGGGCAGTAGGACGCTACGTATTTGGCGCGCGGCCCTACGGCTGAGCGTCGAGCTTGCGATACGCCACTAT IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:1 XS:A:+ NS:i:0
+test_mRNA_21_208_24 83 test_chromosome 184 255 67M100N8M = 71 0 GAGGGCAGTAGGACGCTACGTATTTGGCGCGCGGCCCTACGCCTGAGCGTCGAGCTTGCGATACGCCACTATTAC IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:2 XS:A:+ NS:i:0
+test_mRNA_42_209_25 147 test_chromosome 185 255 66M100N9M = 92 0 AGGGCAGTAGGACGCTACGTATTTGGCGCGCGGCCCTACGGCTGAGCGTCGAGCTTGCGATACGCCACTATTACT IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:1 XS:A:+ NS:i:0
+test_mRNA_53_212_19 147 test_chromosome 188 255 63M100N12M = 103 0 GCAGTAGGACGCTACGTATTTGGCGCGCGGCCCTACGGCTGAGCGTCGAGCTTGCGATACGCCACTATTTCTTTA IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:2 XS:A:+ NS:i:1
+test_mRNA_36_218_12 147 test_chromosome 194 255 57M100N18M = 86 0 GGACGCTACGTATTTGGCGCGCGGCCCTACGGCTGAGCGTCGAGCTTGCGATACGCCACTATTACTTTATTATCT IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:1 XS:A:+ NS:i:0
+test_mRNA_58_218_16 83 test_chromosome 194 255 57M100N18M = 108 0 GGACGCTACGTATTTGGCGCGCGGCCCTACGGCTGAGCGTCGAGCTTGCGATACGCCACTATTACTTTATTATCT IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:1 XS:A:+ NS:i:0
+test_mRNA_145_300_37 163 test_chromosome 195 255 56M100N19M = 376 0 GACGCTACGTATTTGGCGCGGGGCCCTATGGCTGAGCGTCGAGCTTGCGATACGCCACTATTACTTTAGTATATT IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:5 XS:A:+ NS:i:2
+test_mRNA_39_219_5c 147 test_chromosome 195 255 56M100N19M = 89 0 GACGCTACGTATTTGGCGCGCGGCCCTACGGCTGAGCGTCCAGCTTGCGATACGCCACTATTACTTTATTATCTT IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:2 XS:A:+ NS:i:0
+test_mRNA_58_220_3d 83 test_chromosome 196 255 55M100N20M = 108 0 ACGCTACGTATTTGGCGCGCGGCCCTACGGCTGAGCGTCGAGGTTGCGATACGCCACTATTACTTTATTATCTTC IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:3 XS:A:+ NS:i:1
+test_mRNA_50_224_2d 83 test_chromosome 200 255 51M100N24M = 100 0 TACGTATTTGGCGCGCGGCCCTACGGCTGAGCGTCGAGCTTGCGATACGCCACTATTACTTTATTATCTTACTCG IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:2 XS:A:+ NS:i:0
+test_mRNA_44_225_1e 83 test_chromosome 201 255 50M100N25M = 94 0 ACGTATATGGCGCGCGGCCCTACGGCTGAGCGTCGAGCTTGCGATACGCCACTATTACTTTATTATCTTACTCGG IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:2 XS:A:+ NS:i:0
+test_mRNA_81_228_3a 83 test_chromosome 204 255 47M100N28M = 131 0 TATTTGGCGCGCGGCCCTATGGCTGAGCGTCGAGCTTGCGATACGCCACTATTACTTTATTATCTTACTCGTAGG IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:4 XS:A:+ NS:i:0
+test_mRNA_63_229_4c 83 test_chromosome 205 255 46M100N29M = 113 0 ATTTGGCGCGCGGCCCTACGGCTGAGTGTCGAGCTTGCGATACGCCACTATTACTTTATTATCTTACTCGGACGT IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:2 XS:A:+ NS:i:1
+test_mRNA_69_229_23 83 test_chromosome 205 255 46M100N29M = 119 0 CTTTGGCGCGCGGCCCTACGGCTGAGCGTCTAGCTTGCGATACGCCACTATTACTTTATTATCTTACTCGGACGT IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:3 XS:A:+ NS:i:1
+test_mRNA_89_230_b 83 test_chromosome 206 255 45M100N30M = 139 0 TCTGGCGCGCGGCCCTACGGCTGAGCGTCGAGCTTGCGATACGCCACTATTACTTTATTAACTCACTCGGACGTA IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:4 XS:A:+ NS:i:0
+test_mRNA_30_231_3c 81 test_chromosome 207 255 44M100N31M = 80 0 TTGGCGCGCGGCCCTACGGCTAAGCGTCGAGCTTGCGATACGCCACTATTACTTTAATATCTTACTCGCACGTAG IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:4 XS:A:+ NS:i:0
+test_mRNA_57_231_8 147 test_chromosome 207 255 44M100N31M = 107 0 TTGGCGCGCGGCCCTAGGGCTGAGCGTCGAGCTTGCGATACGCCACTATTACTTTATTATCTTACTCGGACGTAG IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:2 XS:A:+ NS:i:0
+test_mRNA_46_232_2f 147 test_chromosome 208 255 43M100N32M = 96 0 TGGCGCGCGGCCCTACGGCTGAGCGTCGAGCTTGCGATACGCCACTATTACTTTATTATCTTACTCGGACGTAGA IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:1 XS:A:+ NS:i:0
+test_mRNA_58_234_7 83 test_chromosome 210 255 41M100N34M = 108 0 GCGCGCGGCCCTACGGCTGAGCGTCGAGCTTGCGATACGCCACTATTAGTTTATTATCTGACTCGGACGTAGACT IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:4 XS:A:+ NS:i:1
+test_mRNA_41_236_55 145 test_chromosome 212 255 39M100N36M = 91 0 GCGCGGCCCTACGGCTGAGCGTCGAGCTTGCGATACGCCACTATTACTTTATTATCTTACTCGGACGTAGACGGA IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:1 XS:A:+ NS:i:0
+test_mRNA_51_237_a 147 test_chromosome 213 255 38M100N37M = 101 0 CGCGGCCCTACGGCTGAGCGTCGAGCTTGCGATACGCCACTATTACTTTATTATCTTACTCGGACGTAGACGGAT IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:1 XS:A:+ NS:i:0
+test_mRNA_65_238_2e 147 test_chromosome 214 255 37M100N38M = 115 0 GCGGCCCTACGGCTGCGCGTCGAGCTTGCGATACGCCACTATTACTTTATTATCTTACTCGGACGTAGACGGATC IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:2 XS:A:+ NS:i:0
+test_mRNA_96_238_3 83 test_chromosome 214 255 37M100N38M = 146 0 GCGGCCCTACGGCTGAGCGTCGAGCTTGCGATACGCTACTAGTACTTTATTATCTTACGCGGACGTAGACGGATC IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:4 XS:A:+ NS:i:2
+test_mRNA_73_240_34 147 test_chromosome 216 255 35M100N40M = 123 0 GGCCCTACGGCTGAGCGTCGAGCTTGCGATACGCCACTATTACTTTATTATCTTTCTCGGACGTAGACGGATCGG IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:2 XS:A:+ NS:i:0
+test_mRNA_81_245_4d 83 test_chromosome 221 255 30M100N45M = 131 0 TACGGCTGAGCGTCGAGGTTGCGATACGCCACTATTACTTTATAATCTTACTCGGACGTAGACGGATCGGCAACG IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:2 XS:A:+ NS:i:1
+test_mRNA_89_245_15 147 test_chromosome 221 255 30M100N45M = 139 0 TACGGCTGAGCGTCGAGCTTGCGATACGCCACTATTTCTCTATTATCTTACTCGGACGTAGACGGATCGGCAACG IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:2 XS:A:+ NS:i:2
+test_mRNA_172_294_4f 99 test_chromosome 222 255 29M100N46M = 370 0 ACGGATGAGCGTCGAGCTTGCGATACGCCACTATTACTTTATTATCTTCCTCGGACGTAGACGGATCGCCAACGG IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:3 XS:A:+ NS:i:1
+test_mRNA_51_248_14 145 test_chromosome 224 255 27M100N48M = 101 0 GGCTGAGCGTCGAGCTTGCGATACGCCACTATTACTTTATTATCTTACTCGGACGTAGACGGAACGGCAACGGGA IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:3 XS:A:+ NS:i:0
+test_mRNA_48_249_20 81 test_chromosome 225 255 26M100N49M = 98 0 GCTGAGCGTCGAGCTTGCGATACGCCACTATTACTTTACTATCTTACTCGGACGGAGACGGATCGGCAACGGGAC IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:3 XS:A:+ NS:i:0
+test_mRNA_87_250_57 83 test_chromosome 226 255 25M100N50M = 137 0 ATGAGCGTCGAGCTTGCGATACGCCACTATTACTTTATTATCTTACTCGGACGTAGACGGATCGGCAACGGGACT IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:1 XS:A:+ NS:i:0
+test_mRNA_92_250_44 147 test_chromosome 226 255 25M100N50M = 142 0 CTGAGCGTCGAGCTTGCGATACGCCACTATTACTTTATTATCTTACTCGGACGTAGACGGATCGGGTACGGGACT IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:2 XS:A:+ NS:i:0
+test_mRNA_128_252_36 153 test_chromosome 228 255 23M100N52M * 0 0 GAGCGTCGAGCTTGCGATACGCCACTATTACTTTATTATCTTACTCGGACGTAGACGGATCGGGAACGGGACTTT IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:4 XS:A:+ NS:i:0
+test_mRNA_72_258_4 83 test_chromosome 234 255 17M100N50M100N8M = 122 0 CGAGCTTGCGATACGCCACTATTACTTTATTATCTTACTCGGACGTAGACGGATGGGCAACGGGACTTTTTCTAC IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:1 XS:A:+ NS:i:1
+test_mRNA_73_259_5e 147 test_chromosome 235 255 16M100N50M100N9M = 123 0 GAGCTTGCGATACGCCACTATTACTGTATTATCTTACTCGGACGTAGACGGATCGGCAACGGGACTTTTTCTACT IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:1 XS:A:+ NS:i:0
+test_mRNA_131_260_33 147 test_chromosome 236 255 15M100N50M100N10M = 181 0 AGCTTGTGATACGCCACTATTACTTTATTATCTTACTCGGACGTAAACGGATCGGCCACGGGACTTTTTTTACTT IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:4 XS:A:+ NS:i:3
+test_mRNA_52_261_1b 145 test_chromosome 237 255 14M100N50M100N11M = 102 0 GCTTGCGATACGCCACTATTACTTAATTATCTTACTCGGACGTAGAAGGATCGGCAACGGGACTTTTTCTACTTG IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:2 XS:A:+ NS:i:1
+test_mRNA_105_266_13 83 test_chromosome 242 255 9M100N50M100N16M = 155 0 CGATCCGCCACTATTACTTTATTATCTTACTCGGACGTAGACGGATCGGCAACGGGACTTTTTCTACTTGAGACT IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:1 XS:A:+ NS:i:1
+test_mRNA_92_266_43 147 test_chromosome 242 255 9M100N50M100N16M = 142 0 CGATACGCCACTATTACTTTCTTATCTTACTCGGACGTAGACGGAGCGGCAACGGGACTTTTTCTACTTGAGACC IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:3 XS:A:+ NS:i:2
+test_mRNA_110_267_22 83 test_chromosome 243 255 8M100N50M100N17M = 160 0 GATACGCCACTATTACTTTATTATCTTACTCGGACGTAGACGGATCGGCAACGGGACTTTTTCTACTTGAGACTG IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:0 XS:A:+ NS:i:0
+test_mRNA_89_267_32 83 test_chromosome 243 255 8M100N50M100N17M = 139 0 GATACGGCACTATTACTTTATTATCTTTCTCGGACGTAGACGGATCGGCAACGGGACTTTTTCTACTTGAGACTG IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:2 XS:A:+ NS:i:1
+test_mRNA_111_268_d 89 test_chromosome 244 255 7M100N50M100N18M * 0 0 ATACGCCACTATTATTTTATTATCTTACTCGGACGTAGACGGATCGGCAACGGGACTTTTTCTACTTGAGACTGG IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:1 XS:A:+ NS:i:1
+test_mRNA_85_268_53 147 test_chromosome 244 255 7M100N50M100N18M = 135 0 ATACGCCACTATTACTTTATTATCTTACTCGGACGTAGACGGATCGTCAACGGGACTTTTTCTACTTGAGACTGG IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:1 XS:A:+ NS:i:0
+test_mRNA_110_271_28 147 test_chromosome 247 255 4M100N50M100N21M = 160 0 CGCCACTATTACTTTATTATCTTACTCGGACGAAGACGGATCGGCAACGGGGCTTTTTCTACTTGAGACTGGGAT IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:2 XS:A:+ NS:i:1
+test_mRNA_116_271_2b 83 test_chromosome 247 255 4M100N50M100N21M = 166 0 CGCCACTATTACTTTATTATCTTACTCGGACGTAGACAGATCGGCAACGGGACTTTTTCTACTTGAGACTGGGAT IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:1 XS:A:+ NS:i:0
+test_mRNA_82_271_58 147 test_chromosome 247 255 4M100N50M100N21M = 132 0 CGCCACTATTACTTTATTATCTTACTCGGACGTAGACGCATCGGCAACGGGACTTTTTCTACTTGAGACTGGGAT IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:1 XS:A:+ NS:i:0
+test_mRNA_53_272_5a 81 test_chromosome 248 255 3M100N50M100N22M = 103 0 GCCACTATTACTTTATTATCTTACTCGGACGTAGACGGATCGGCAACGGGACTTTTTCTACTTGACACTGGGATC IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:1 XS:A:+ NS:i:1
+test_mRNA_104_274_1c 89 test_chromosome 350 255 51M100N24M * 0 0 CACTATTACTTTATTATCTTACTCGGACGTAGACGGATCGGCAACGGGACTTTTTCTACTTGAGACTGGGATCGA IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:2 XS:A:+ NS:i:0
+test_mRNA_85_275_38 153 test_chromosome 351 255 50M100N25M * 0 0 ACTCTTACTTTATTATCTTACTCGGACGTAGACGGATCGGCAACGGGACTTTTACTACTTGAGACTGGGATCGAG IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:2 XS:A:+ NS:i:0
+test_mRNA_97_275_26 145 test_chromosome 351 255 50M100N25M = 147 0 ACTATTACTTTATTATCTTAGTCGGACGTAGACGGATCGGAAACGGGACTCTTTCTACTTGAGACTGGGATCGAG IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:3 XS:A:+ NS:i:0
+test_mRNA_105_276_c 81 test_chromosome 352 255 49M100N26M = 155 0 CTATTACTTTATTATCTTACTCGGACGTAGACGGATCGGCAACGGGGCTTTTTCTACTTGAGACTGGGATCGAGG IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:2 XS:A:+ NS:i:0
+test_mRNA_78_276_4b 145 test_chromosome 352 255 49M100N26M = 128 0 CTATTACTTTATTATCTTACTCGGACGTAGACGGATCGGCAACGGGACTTTTTCTACTTGAGACTAGGATCGAGG IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:2 XS:A:+ NS:i:0
+test_mRNA_114_277_5b 81 test_chromosome 353 255 48M100N27M = 164 0 TATTACTTTATTATCTTACTCGGAGGTAGACGGAACGGCAACGGGACTTTTTCTGCTTGAGACTGGGATCGAGGC IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:4 XS:A:+ NS:i:0
+test_mRNA_75_277_3b 145 test_chromosome 353 255 48M100N27M = 125 0 TATTACTTTATTATCTTACTCGGACGTAGACGGATCGGCAACGGGACTTTTTCTACCTGAGACTGGGATCGAGGC IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:2 XS:A:+ NS:i:0
+test_mRNA_104_278_3e 17 test_chromosome 354 255 47M100N28M = 154 0 ATTACTTTATTATCTTACTCGGACGTAGACGGATCGGCAACGGGACTTTTTCTACTTGAGACTGGAATCGAGGCG IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:3 XS:A:+ NS:i:0
+test_mRNA_104_278_3e 81 test_chromosome 354 255 47M100N28M = 154 0 ATTACTTTATTATCTTACTCGGACGTAGACGGATCGGCAACGGGACTTTTTCTACTTGAGACTGGAATCGAGGCG IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:1 XS:A:+ NS:i:0
+test_mRNA_87_279_5f 81 test_chromosome 355 255 46M100N29M = 137 0 TTACTTTATTATCTTACTCGGACGTAGACGGATCGGCAACGGGACTTTTTCTACTTGAGACTGGGATCGAGGCGG IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:0 XS:A:+ NS:i:0
+test_mRNA_125_280_48 145 test_chromosome 356 255 45M100N30M = 175 0 TACTTTATTATCTTACTCTGACGTAGACGGATCGGCAACGGGACTTTTTCTACTTGAGACTGGGAGCGAGGCGGA IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:2 XS:A:+ NS:i:0
+test_mRNA_126_282_18 81 test_chromosome 358 255 43M100N32M = 176 0 CTTTATTATCTTACTCGGACGTAGACGGATCGGCAACGGGACTTTTTCTACTTGAGACTGGGATCGAGGCGGACT IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:0 XS:A:+ NS:i:0
+test_mRNA_103_284_2a 81 test_chromosome 360 255 41M100N34M = 153 0 TTATTATCTTACTCGGACGTAGACGGATCGGCAACGGGACTTTTTCTACTTGAGACTGGGATCGAGGCGGACTTT IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:0 XS:A:+ NS:i:0
+test_mRNA_107_286_5 81 test_chromosome 362 255 39M100N36M = 157 0 ATTATCTTACTCGGACGTAGACGGATCGGCAACGGGACTTTTTCTACTTGAGACTGGGATCGAGGCGGACTTTTT IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:0 XS:A:+ NS:i:0
+test_mRNA_151_286_e 153 test_chromosome 362 255 39M100N36M * 0 0 ATTATCTTACTCGGACGTAGACGGATCGGCAACGGGACTTTATCTACTTGAGACTGGGATCGAGGCGGACTTTTT IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:1 XS:A:+ NS:i:1
+test_mRNA_150_290_0 89 test_chromosome 366 255 35M100N40M * 0 0 TCTTACTCGGACGTAGACGGATCGCCAACGGGACTTTTTCTACTTGAGACTGAGACCGAGGCGGACTTTTTAGGA IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:3 XS:A:+ NS:i:0
+test_mRNA_94_291_40 153 test_chromosome 367 255 34M100N41M * 0 0 CTTCCTGGGACGTAGACGGATCGGCAACGCGACATTTTCTACTTGAGACTGGGATCGAGGCGGACTTTTTGGGAC IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:5 XS:A:+ NS:i:2
+test_mRNA_125_293_60 81 test_chromosome 369 255 32M100N43M = 175 0 TACTCGGACGTAGACGGATCGGCAACGGGACTTTTTCTACTTGAGACTGGGATCGAGGCGGACTTTTTAGGACGG IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:0 XS:A:+ NS:i:0
+test_mRNA_172_294_4f 147 test_chromosome 370 255 31M100N44M = 222 0 ACTCGGACGTAGACGGGTCGGCAGCGGGACTTTTTCTACTTGAGACTGGGATCGAGGCGGACGTTTTAGGACGGG IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:3 XS:A:+ NS:i:0
+test_mRNA_116_295_63 81 test_chromosome 371 255 30M100N45M = 166 0 CTCGGACGTAGACGGATCGGCAACGGGACTTTTTCTACTTGAGACTGGGATCGAGGCGGACTTTTTAGGACGGGA IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:0 XS:A:+ NS:i:0
+test_mRNA_111_297_61 17 test_chromosome 373 255 28M100N47M = 161 0 CGGACGTAGACGGATCCGCAACGGGACTTTTTCTACTTGAGACTGGGATCGAGGCGGACTTTTTAGGACGGGACT IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:3 XS:A:+ NS:i:0
+test_mRNA_111_297_61 81 test_chromosome 373 255 28M100N47M = 161 0 CGGACGTAGACGGATCCGCAACGGGACTTTTTCTACTTGAGACTGGGATCGAGGCGGACTTTTTAGGACGGGACT IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:1 XS:A:+ NS:i:0
+test_mRNA_118_297_f 17 test_chromosome 373 255 28M100N47M = 168 0 CGGACGTAGACGGATCGGCAACGGGACTTTTTCTACTTGAGACTGGGATCGAGGCGGACTTTTTAGGACGGGACT IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:2 XS:A:+ NS:i:0
+test_mRNA_118_297_f 81 test_chromosome 373 255 28M100N47M = 168 0 CGGACGTAGACGGATCGGCAACGGGACTTTTTCTACTTGAGACTGGGATCGAGGCGGACTTTTTAGGACGGGACT IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:0 XS:A:+ NS:i:0
+test_mRNA_151_297_1d 153 test_chromosome 373 255 28M100N47M * 0 0 CGGACGTAGACGGATCGGCAACGGGACTTTTTCTACTTGAGACTGGGATCGAGGCGGACATTTTAGGACGGGACT IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:1 XS:A:+ NS:i:0
+test_mRNA_151_297_1d 25 test_chromosome 373 255 28M100N47M * 0 0 CGGACGTAGACGGATCGGCAACGGGACTTTTTCTACTTGAGACTGGGATCGAGGCGGACATTTTAGGACGGGACT IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:3 XS:A:+ NS:i:0
+test_mRNA_122_299_6 81 test_chromosome 375 255 26M100N49M = 172 0 GACGTAGACGGAGCGGCAACGGGACTTTTTCTACTTGAGACTGGGATCGAGGCGGACTTTTTAGGACGGGACTTG IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:2 XS:A:+ NS:i:0
+test_mRNA_145_300_37 83 test_chromosome 376 255 25M100N50M = 195 0 ACGTAGACGGATCGGAAACGGGACTTTTTCTACTTGAGACTGGGATCGAGGCGGACTTTTTAGGACGGGACTTGC IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:1 XS:A:+ NS:i:0
diff -r 869e494a8074 -r 37c075416918 test-data/cufflinks_out1.gtf
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/cufflinks_out1.gtf Tue Apr 13 17:12:00 2010 -0400
@@ -0,0 +1,4 @@
+test_chromosome Cufflinks transcript 53 550 1000 + . gene_id "CUFF.1"; transcript_id "CUFF.1.1"; FPKM "3355704.6979865772"; frac "1.000000"; conf_lo "2697596.861952"; conf_hi "4013812.534021"; cov "46.057047";
+test_chromosome Cufflinks exon 53 250 1000 + . gene_id "CUFF.1"; transcript_id "CUFF.1.1"; exon_number "1"; FPKM "3355704.6979865772"; frac "1.000000"; conf_lo "2697596.861952"; conf_hi "4013812.534021"; cov "46.057047";
+test_chromosome Cufflinks exon 351 400 1000 + . gene_id "CUFF.1"; transcript_id "CUFF.1.1"; exon_number "2"; FPKM "3355704.6979865772"; frac "1.000000"; conf_lo "2697596.861952"; conf_hi "4013812.534021"; cov "46.057047";
+test_chromosome Cufflinks exon 501 550 1000 + . gene_id "CUFF.1"; transcript_id "CUFF.1.1"; exon_number "3"; FPKM "3355704.6979865772"; frac "1.000000"; conf_lo "2697596.861952"; conf_hi "4013812.534021"; cov "46.057047";
diff -r 869e494a8074 -r 37c075416918 test-data/cufflinks_out2.expr
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/cufflinks_out2.expr Tue Apr 13 17:12:00 2010 -0400
@@ -0,0 +1,2 @@
+trans_id bundle_id chr left right FPKM FMI frac FPKM_conf_lo FPKM_conf_hi coverage length
+CUFF.1.1 6 test_chromosome 52 550 3.3557e+06 1 1 2.6976e+06 4.01381e+06 46.057 298
diff -r 869e494a8074 -r 37c075416918 test-data/cufflinks_out3.expr
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/cufflinks_out3.expr Tue Apr 13 17:12:00 2010 -0400
@@ -0,0 +1,2 @@
+gene_id bundle_id chr left right FPKM FPKM_conf_lo FPKM_conf_hi
+CUFF.1 6 test_chromosome 52 550 3.3557e+06 2.6976e+06 4.01381e+06
diff -r 869e494a8074 -r 37c075416918 tool-data/tophat_indices.loc.sample
--- a/tool-data/tophat_indices.loc.sample Tue Apr 13 17:02:56 2010 -0400
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,29 +0,0 @@
-#This is a sample file distributed with Galaxy that enables tools
-#to use a directory of Tophat and Bowtie indexed sequences data files. You will need
-#to create these data files and then create a tophat_indices.loc file
-#similar to this one (store it in this directory) that points to
-#the directories in which those files are stored. The tophat_indices.loc
-#file has this format (white space characters are TAB characters):
-#
-#<build> <file_base>
-#
-#So, for example, if you had hg18 indexed stored in
-#/depot/data2/galaxy/tophat/hg18/,
-#then the tophat_indices.loc entry would look like this:
-#
-#hg18 /depot/data2/galaxy/tophat/hg18/hg18
-#
-#and your /depot/data2/galaxy/tophat/hg18/ directory
-#would contain hg18.*.ebwt files:
-#
-#-rw-r--r-- 1 james universe 830134 2005-09-13 10:12 hg18.1.ebwt
-#-rw-r--r-- 1 james universe 527388 2005-09-13 10:12 hg18.2.ebwt
-#-rw-r--r-- 1 james universe 269808 2005-09-13 10:12 gh18.3.ebwt
-#...etc...
-#
-#Your tophat_indices.loc file should include an entry per line for
-#each index set you have stored. The "file" in the path does not actually
-#exist, but it is the prefix for the actual index files. For example:
-#
-#hg18 /depot/data2/galaxy/bowtie/hg18/hg18
-#hg19 /depot/data2/galaxy/bowtie/hg19/hg19
diff -r 869e494a8074 -r 37c075416918 tool_conf.xml.sample
--- a/tool_conf.xml.sample Tue Apr 13 17:02:56 2010 -0400
+++ b/tool_conf.xml.sample Tue Apr 13 17:12:00 2010 -0400
@@ -225,8 +225,9 @@
<tool file="metag_tools/megablast_xml_parser.xml" />
<tool file="sr_mapping/PerM.xml" />
</section>
- <section name="NGS: Expression Analysis" id="rnatools">
- <tool file="tophat/tophat_wrapper.xml" />
+ <section name="NGS: Expression Analysis" id="ngs-rna-tools">
+ <tool file="ngs_rna/tophat_wrapper.xml" />
+ <tool file="ngs_rna/cufflinks_wrapper.xml" />
</section>
<section name="NGS: SAM Tools" id="samtools">
<tool file="samtools/sam_bitwise_flag_filter.xml" />
diff -r 869e494a8074 -r 37c075416918 tools/ngs_rna/cufflinks_wrapper.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tools/ngs_rna/cufflinks_wrapper.py Tue Apr 13 17:12:00 2010 -0400
@@ -0,0 +1,83 @@
+#!/usr/bin/env python
+
+import optparse, os, shutil, subprocess, sys, tempfile
+
+def stop_err( msg ):
+ sys.stderr.write( "%s\n" % msg )
+ sys.exit()
+
+def __main__():
+ #Parse Command Line
+ parser = optparse.OptionParser()
+ parser.add_option( '-1', '--input', dest='input', help=' file of RNA-Seq read alignments in the SAM format. SAM is a standard short read alignment, that allows aligners to attach custom tags to individual alignments, and Cufflinks requires that the alignments you supply have some of these tags. Please see Input formats for more details.' )
+ parser.add_option( '-s', '--inner-dist-std-dev', help='The standard deviation for the distribution on inner distances between mate pairs. The default is 20bp.' )
+ parser.add_option( '-I', '--max-intron-length', help='The minimum intron length. Cufflinks will not report transcripts with introns longer than this, and will ignore SAM alignments with REF_SKIP CIGAR operations longer than this. The default is 300,000.' )
+ parser.add_option( '-F', '--min-isoform-fraction', help='After calculating isoform abundance for a gene, Cufflinks filters out transcripts that it believes are very low abundance, because isoforms expressed at extremely low levels often cannot reliably be assembled, and may even be artifacts of incompletely spliced precursors of processed transcripts. This parameter is also used to filter out introns that have far fewer spliced alignments supporting them. The default is 0.05, or 5% of the most abundant isoform (the major isoform) of the gene.' )
+ parser.add_option( '-j', '--pre-mrna-fraction', help='Some RNA-Seq protocols produce a significant amount of reads that originate from incompletely spliced transcripts, and these reads can confound the assembly of fully spliced mRNAs. Cufflinks uses this parameter to filter out alignments that lie within the intronic intervals implied by the spliced alignments. The minimum depth of coverage in the intronic region covered by the alignment is divided by the number of spliced reads, and if the result is lower than this parameter value, the intronic alignments are ignored. The default is 5%.' )
+ parser.add_option( '-p', '--num-threads', help='Use this many threads to align reads. The default is 1.' )
+ parser.add_option( '-m', '--inner-mean-dist', dest='inner_mean_dist', help='This is the expected (mean) inner distance between mate pairs. \
+ For, example, for paired end runs with fragments selected at 300bp, \
+ where each end is 50bp, you should set -r to be 200. The default is 45bp.')
+ parser.add_option( '-Q', '--min-mapqual', help='Instructs Cufflinks to ignore alignments with a SAM mapping quality lower than this number. The default is 0.' )
+ parser.add_option( '-L', '--label', help='Cufflinks will report transfrags in GTF format, with a prefix given by this option. The default prefix is "CUFF".' )
+ parser.add_option( '-G', '--GTF', help='Tells Cufflinks to use the supplied reference annotation to estimate isoform expression. It will not assemble novel transcripts, and the program will ignore alignments not structurally compatible with any reference transcript.' )
+ # Advanced Options:
+ parser.add_option( '--num-importance-samples', help='Sets the number of importance samples generated for each locus during abundance estimation. Default: 1000' )
+ parser.add_option( '--max-mle-iterations', help='Sets the number of iterations allowed during maximum likelihood estimation of abundances. Default: 5000' )
+
+ # Wrapper / Galaxy options.
+ parser.add_option( '-A', '--assembled-isoforms-output', dest='assembled_isoforms_output_file', help='Assembled isoforms output file; formate is GTF.' )
+ parser.add_option( '-T', '--transcripts-expression-output', dest='transcripts_expression_output_file', help='TODO' )
+ parser.add_option( '-Z', '--genes-expression-output', dest='genes_expression_output_file', help='TODO' )
+
+ (options, args) = parser.parse_args()
+
+ # Make temp directory for output.
+ tmp_output_dir = tempfile.mkdtemp()
+
+ # Build command.
+
+ # Base.
+ cmd = "cufflinks"
+
+ # Add options.
+ if options.inner_mean_dist:
+ cmd += ( " -m %i" % int ( options.inner_mean_dist ) )
+
+ # Add input files.
+ cmd += " " + options.input
+
+ # Run
+ try:
+ proc = subprocess.Popen( args=cmd, shell=True, cwd=tmp_output_dir, stdout=subprocess.PIPE, stderr=subprocess.PIPE )
+ returncode = proc.wait()
+ stderr = ''
+ buffsize = 1048576
+ try:
+ while True:
+ stderr += proc.stderr.read( buffsize )
+ if not stderr or len( stderr ) % buffsize != 0:
+ break
+ except OverflowError:
+ pass
+ if returncode != 0:
+ raise Exception, stderr
+ except Exception, e:
+ stop_err( 'Error in cufflinks:\n' + str( e ) )
+
+ # TODO: look for errors in program output.
+
+ # Copy output files from tmp directory to specified files.
+ try:
+ try:
+ shutil.copyfile( tmp_output_dir + "/transcripts.gtf", options.assembled_isoforms_output_file )
+ shutil.copyfile( tmp_output_dir + "/transcripts.expr", options.transcripts_expression_output_file )
+ shutil.copyfile( tmp_output_dir + "/genes.expr", options.genes_expression_output_file )
+ except Exception, e:
+ stop_err( 'Error in tophat:\n' + str( e ) )
+ finally:
+ # Clean up temp dirs
+ if os.path.exists( tmp_output_dir ):
+ shutil.rmtree( tmp_output_dir )
+
+if __name__=="__main__": __main__()
\ No newline at end of file
diff -r 869e494a8074 -r 37c075416918 tools/ngs_rna/cufflinks_wrapper.xml
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tools/ngs_rna/cufflinks_wrapper.xml Tue Apr 13 17:12:00 2010 -0400
@@ -0,0 +1,91 @@
+<tool id="cufflinks" name="Cufflinks" version="0.8.2">
+ <description>Transcript assembly, differential expression, and differential regulation for RNA-Seq</description>
+ <command interpreter="python">
+ cufflinks_wrapper.py
+ --input=$input
+ --assembled-isoforms-output=$assembled_isoforms
+ --transcripts-expression-output=$transcripts_expression
+ --genes-expression-output=$genes_expression
+ --num-threads="4"
+ #if $singlePaired.sPaired == "paired":
+ -r $singlePaired.mean_inner_distance
+ #end if
+ </command>
+ <inputs>
+ <param format="sam" name="input" type="data" label="SAM file of aligned RNA-Seq reads" help=""/>
+ <conditional name="singlePaired">
+ <param name="sPaired" type="select" label="Is this library mate-paired?">
+ <option value="single">Single-end</option>
+ <option value="paired">Paired-end</option>
+ </param>
+ <when value="single">
+
+ </when>
+ <when value="paired">
+ <param name="mean_inner_distance" type="integer" value="20" label="Mean Inner Distance between Mate Pairs"/>
+ </when>
+ </conditional>
+ </inputs>
+
+ <outputs>
+ <data format="expr" name="genes_expression" />
+ <data format="expr" name="transcripts_expression" />
+ <data format="gtf" name="assembled_isoforms" />
+ </outputs>
+
+ <tests>
+ <test>
+ <param name="sPaired" value="single"/>
+ <param name="input" value="cufflinks_in.sam"/>
+ <param name="mean_inner_distance" value="20"/>
+ <output name="assembled_isoforms" file="cufflinks_out1.gtf"/>
+ <!-- Can't test these right now because .expr files aren't recognized.
+ <output name="genes_expression" file="cufflinks_out3.expr"/>
+ <output name="transcripts_expression" file="cufflinks_out2.expr"/>
+ -->
+ </test>
+ </tests>
+
+ <help>
+**Cufflinks Overview**
+
+Cufflinks_ assembles transcripts, estimates their abundances, and tests for differential expression and regulation in RNA-Seq samples. It accepts aligned RNA-Seq reads and assembles the alignments into a parsimonious set of transcripts. Cufflinks then estimates the relative abundances of these transcripts based on how many reads support each one. Please cite: Trapnell C, Williams BA, Pertea G, Mortazavi AM, Kwan G, van Baren MJ, Salzberg SL, Wold B, Pachter L. Transcript assembly and abundance estimation from RNA-Seq reveals thousands of new transcripts and switching among isoforms. (manuscript in press)
+
+.. _Cufflinks: http://cufflinks.cbcb.umd.edu/
+
+------
+
+**Know what you are doing**
+
+.. class:: warningmark
+
+There is no such thing (yet) as an automated gearshift in expression analysis. It is all like stick-shift driving in San Francisco. In other words, running this tool with default parameters will probably not give you meaningful results. A way to deal with this is to **understand** the parameters by carefully reading the `documentation`__ and experimenting. Fortunately, Galaxy makes experimenting easy.
+
+.. __: http://cufflinks.cbcb.umd.edu/manual.html
+
+------
+
+**Input formats**
+
+Cufflinks accepts files in SAM format.
+
+------
+
+**Outputs**
+
+TODO
+
+-------
+
+**Cufflinks settings**
+
+All of the options have a default value. You can change any of them. Some of the options in Cufflinks have been implemented here.
+
+------
+
+**Cufflinks parameter list**
+
+This is a list of implemented Cufflinks options::
+
+ </help>
+</tool>
diff -r 869e494a8074 -r 37c075416918 tools/ngs_rna/tophat_wrapper.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tools/ngs_rna/tophat_wrapper.py Tue Apr 13 17:12:00 2010 -0400
@@ -0,0 +1,80 @@
+#!/usr/bin/env python
+
+import optparse, os, shutil, sys, tempfile
+
+def stop_err( msg ):
+ sys.stderr.write( "%s\n" % msg )
+ sys.exit()
+
+def __main__():
+ #Parse Command Line
+ parser = optparse.OptionParser()
+ parser.add_option( '-1', '--input1', dest='input1', help='The (forward or single-end) reads file in Sanger FASTQ format' )
+ parser.add_option( '-2', '--input2', dest='input2', help='The reverse reads file in Sanger FASTQ format' )
+ parser.add_option( '-a', '--min-anchor-length', dest='min_anchor_length',
+ help='The "anchor length". TopHat will report junctions spanned by reads with at least this many bases on each side of the junction.' )
+ parser.add_option( '-i', '--min-intron-length', dest='min_intron_length',
+ help='The minimum intron length. TopHat will ignore donor/acceptor pairs closer than this many bases apart.' )
+ parser.add_option( '-I', '--max-intron-length', dest='max_intron_length',
+ help='The maximum intron length. When searching for junctions ab initio, TopHat will ignore donor/acceptor pairs farther than this many bases apart, except when such a pair is supported by a split segment alignment of a long read.' )
+ parser.add_option( '-s', '--solexa-quals', dest='solexa_quals', help='Use the Solexa scale for quality values in FASTQ files.' )
+ parser.add_option( '-S', '--solexa.3-quals', dest='solexa_quals',
+ help='As of the Illumina GA pipeline version 1.3, quality scores are encoded in Phred-scaled base-64. Use this option for FASTQ files from pipeline 1.3 or later.' )
+ parser.add_option( '-p', '--num-threads', dest='num_threads', help='Use this many threads to align reads. The default is 1.' )
+ parser.add_option( '-C', '--coverage-output', dest='coverage_output_file', help='Coverage output file; formate is WIG.' )
+ parser.add_option( '-J', '--junctions-output', dest='junctions_output_file', help='Junctions output file; formate is BED.' )
+ parser.add_option( '-H', '--hits-output', dest='accepted_hits_output_file', help='Accepted hits output file; formate is SAM.' )
+ parser.add_option( '-D', '--indexes-dir', dest='indexes_directory', help='Indexes directory; location of .ebwt and .fa files.' )
+ parser.add_option( '-r', '--mate-inner-dist', dest='mate_inner_dist', help='This is the expected (mean) inner distance between mate pairs. \
+ For, example, for paired end runs with fragments selected at 300bp, \
+ where each end is 50bp, you should set -r to be 200. There is no default, \
+ and this parameter is required for paired end runs.')
+ (options, args) = parser.parse_args()
+
+ # Make temp directory for output.
+ tmp_output_dir = tempfile.mkdtemp()
+
+ # Build command.
+
+ # Base.
+ cmd = "tophat -o %s " % ( tmp_output_dir )
+
+ # Add options.
+ if options.mate_inner_dist:
+ cmd += ( " -r %i" % int ( options.mate_inner_dist ) )
+
+ # Add index prefix.
+ cmd += " " + options.indexes_directory
+
+ # Add input files.
+ cmd += " " + options.input1
+ if options.mate_inner_dist:
+ # Using paired-end reads.
+ cmd += " " + options.input2
+
+ # Route program output to file.
+ cmd += " > %s" % tmp_output_dir + "/std_out.txt"
+ # Route program error output to file.
+ cmd += " 2> %s" % tmp_output_dir + "/std_err.txt"
+
+ # Run.
+ try:
+ os.system( cmd )
+ except Exception, e:
+ stop_err( 'Error in tophat:\n' + str( e ) )
+
+ # TODO: look for errors in program output.
+
+ # Copy output files from tmp directory to specified files.
+ try:
+ shutil.copyfile( tmp_output_dir + "/coverage.wig", options.coverage_output_file )
+ shutil.copyfile( tmp_output_dir + "/junctions.bed", options.junctions_output_file )
+ shutil.copyfile( tmp_output_dir + "/accepted_hits.sam", options.accepted_hits_output_file )
+ except Exception, e:
+ stop_err( 'Error in tophat:\n' + str( e ) )
+
+ # Clean up temp dirs
+ if os.path.exists( tmp_output_dir ):
+ shutil.rmtree( tmp_output_dir )
+
+if __name__=="__main__": __main__()
diff -r 869e494a8074 -r 37c075416918 tools/ngs_rna/tophat_wrapper.xml
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tools/ngs_rna/tophat_wrapper.xml Tue Apr 13 17:12:00 2010 -0400
@@ -0,0 +1,129 @@
+<tool id="tophat" name="Tophat" version="1.0.13">
+ <description>Find splice junctions using RNA-seq data</description>
+ <command interpreter="python">
+ tophat_wrapper.py
+ --num-threads="4"
+ --coverage-output=$coverage
+ --junctions-output=$junctions
+ --hits-output=$accepted_hits
+ #if $refGenomeSource.genomeSource == "history":
+ --indexes-dir=$refGenomeSource.ownFile
+ #else:
+ --indexes-dir=$refGenomeSource.index.value
+ #end if
+ #if $singlePaired.sPaired == "single":
+ --input1=$singlePaired.input1
+ --input2="None"
+ #else:
+ -r $singlePaired.mean_inner_distance
+ --input1=$singlePaired.input1
+ --input2=$singlePaired.input2
+ #end if
+ </command>
+ <inputs>
+ <conditional name="refGenomeSource">
+ <param name="genomeSource" type="select" label="Will you select a reference genome from your history or use a built-in index?" help="Built-ins were indexed using default options">
+ <option value="indexed">Use a built-in index</option>
+ <option value="history">Use one from the history</option>
+ </param>
+ <when value="indexed">
+ <param name="index" type="select" label="Select a reference genome" help="If your genome of interest is not listed, contact the Galaxy team">
+ <options from_file="bowtie_indices.loc">
+ <column name="value" index="1" />
+ <column name="name" index="0" />
+ </options>
+ </param>
+ </when>
+ <when value="history">
+ <param name="ownFile" type="data" format="fasta" metadata_name="dbkey" label="Select the reference genome" />
+ </when> <!-- history -->
+ </conditional> <!-- refGenomeSource -->
+ <conditional name="singlePaired">
+ <param name="sPaired" type="select" label="Is this library mate-paired?">
+ <option value="single">Single-end</option>
+ <option value="paired">Paired-end</option>
+ </param>
+ <when value="single">
+ <param format="fastqsanger" name="input1" type="data" label="RNA-Seq FASTQ file" help="Must have Sanger-scaled quality values with ASCII offset 33"/>
+ </when>
+ <when value="paired">
+ <param format="fastqsanger" name="input1" type="data" label="RNA-Seq FASTQ file" help="Must have Sanger-scaled quality values with ASCII offset 33"/>
+ <param format="fastqsanger" name="input2" type="data" label="RNA-Seq FASTQ file" help="Must have Sanger-scaled quality values with ASCII offset 33"/>
+ <param format="fastqsanger" name="mean_inner_distance" type="integer" value="20" label="Mean Inner Distance between Mate Pairs"/>
+ </when>
+ </conditional>
+ </inputs>
+
+ <outputs>
+ <data format="sam" name="accepted_hits"/>
+ <data format="wig" name="coverage" />
+ <data format="bed" name="junctions" />
+ </outputs>
+
+ <tests>
+ <test>
+ <param name="genomeSource" value="indexed"/>
+ <param name="index" value="test_ref"/>
+ <param name="sPaired" value="paired"/>
+ <param name="input1" ftype="fastqsanger" value="tophat_in1.fq"/>
+ <param name="input2" ftype="fastqsanger" value="tophat_in2.fq"/>
+ <param name="mean_inner_distance" value="20"/>
+ <!--
+ Can't test this right now because first lines of file are run-specific.
+ <output name="accepted_hits" file="tophat_out1.sam"/>
+ -->
+ <output name="coverage" file="tophat_out2.wig"/>
+ <output name="junctions" file="tophat_out3.bed"/>
+ </test>
+ </tests>
+
+ <help>
+**Tophat Overview**
+
+TopHat_ is a fast splice junction mapper for RNA-Seq reads. It aligns RNA-Seq reads to mammalian-sized genomes using the ultra high-throughput short read aligner Bowtie, and then analyzes the mapping results to identify splice junctions between exons. Please cite: Trapnell, C., Pachter, L. and Salzberg, S.L. TopHat: discovering splice junctions with RNA-Seq. Bioinformatics 25, 1105-1111 (2009).
+
+.. _Tophat: http://tophat.cbcb.umd.edu/
+
+------
+
+**Know what you are doing**
+
+.. class:: warningmark
+
+There is no such thing (yet) as an automated gearshift in splice junction identification. It is all like stick-shift driving in San Francisco. In other words, running this tool with default parameters will probably not give you meaningful results. A way to deal with this is to **understand** the parameters by carefully reading the `documentation`__ and experimenting. Fortunately, Galaxy makes experimenting easy.
+
+.. __: http://tophat.cbcb.umd.edu/manual.html
+
+------
+
+**Input formats**
+
+Tophat accepts files in Sanger FASTQ format. Use the FASTQ Groomer to prepare your files.
+
+------
+
+**Outputs**
+
+Tophat produces three output files::
+
+ coverage.wig -- coverage of reads
+ accepted_hits.sam -- reads that were mapped onto genome
+ junctions.bed -- splice junctions identified by Tophat
+
+-------
+
+**Tophat settings**
+
+All of the options have a default value. You can change any of them. Some of the options in Tophat have been implemented here.
+
+------
+
+**Tophat parameter list**
+
+This is a list of implemented Tophat options::
+
+ -r This is the expected (mean) inner distance between mate pairs. For, example, for paired end runs with fragments
+ selected at 300bp, where each end is 50bp, you should set -r to be 200. There is no default, and this parameter
+ is required for paired end runs.
+ </help>
+</tool>
diff -r 869e494a8074 -r 37c075416918 tools/tophat/tophat_wrapper.py
--- a/tools/tophat/tophat_wrapper.py Tue Apr 13 17:02:56 2010 -0400
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,80 +0,0 @@
-#!/usr/bin/env python
-
-import optparse, os, shutil, sys, tempfile
-
-def stop_err( msg ):
- sys.stderr.write( "%s\n" % msg )
- sys.exit()
-
-def __main__():
- #Parse Command Line
- parser = optparse.OptionParser()
- parser.add_option( '-1', '--input1', dest='input1', help='The (forward or single-end) reads file in Sanger FASTQ format' )
- parser.add_option( '-2', '--input2', dest='input2', help='The reverse reads file in Sanger FASTQ format' )
- parser.add_option( '-a', '--min-anchor-length', dest='min_anchor_length',
- help='The "anchor length". TopHat will report junctions spanned by reads with at least this many bases on each side of the junction.' )
- parser.add_option( '-i', '--min-intron-length', dest='min_intron_length',
- help='The minimum intron length. TopHat will ignore donor/acceptor pairs closer than this many bases apart.' )
- parser.add_option( '-I', '--max-intron-length', dest='max_intron_length',
- help='The maximum intron length. When searching for junctions ab initio, TopHat will ignore donor/acceptor pairs farther than this many bases apart, except when such a pair is supported by a split segment alignment of a long read.' )
- parser.add_option( '-s', '--solexa-quals', dest='solexa_quals', help='Use the Solexa scale for quality values in FASTQ files.' )
- parser.add_option( '-S', '--solexa.3-quals', dest='solexa_quals',
- help='As of the Illumina GA pipeline version 1.3, quality scores are encoded in Phred-scaled base-64. Use this option for FASTQ files from pipeline 1.3 or later.' )
- parser.add_option( '-p', '--num-threads', dest='num_threads', help='Use this many threads to align reads. The default is 1.' )
- parser.add_option( '-C', '--coverage-output', dest='coverage_output_file', help='Coverage output file; formate is WIG.' )
- parser.add_option( '-J', '--junctions-output', dest='junctions_output_file', help='Junctions output file; formate is BED.' )
- parser.add_option( '-H', '--hits-output', dest='accepted_hits_output_file', help='Accepted hits output file; formate is SAM.' )
- parser.add_option( '-D', '--indexes-dir', dest='indexes_directory', help='Indexes directory; location of .ebwt and .fa files.' )
- parser.add_option( '-r', '--mate-inner-dist', dest='mate_inner_dist', help='This is the expected (mean) inner distance between mate pairs. \
- For, example, for paired end runs with fragments selected at 300bp, \
- where each end is 50bp, you should set -r to be 200. There is no default, \
- and this parameter is required for paired end runs.')
- (options, args) = parser.parse_args()
-
- # Make temp directory for output.
- tmp_output_dir = tempfile.mkdtemp()
-
- # Build command.
-
- # Base.
- cmd = "tophat -o %s " % ( tmp_output_dir )
-
- # Add options.
- if options.mate_inner_dist:
- cmd += ( " -r %i" % int ( options.mate_inner_dist ) )
-
- # Add index prefix.
- cmd += " " + options.indexes_directory
-
- # Add input files.
- cmd += " " + options.input1
- if options.mate_inner_dist:
- # Using paired-end reads.
- cmd += " " + options.input2
-
- # Route program output to file.
- cmd += " > %s" % tmp_output_dir + "/std_out.txt"
- # Route program error output to file.
- cmd += " 2> %s" % tmp_output_dir + "/std_err.txt"
-
- # Run.
- try:
- os.system( cmd )
- except Exception, e:
- stop_err( 'Error in tophat:\n' + str( e ) )
-
- # TODO: look for errors in program output.
-
- # Copy output files from tmp directory to specified files.
- try:
- shutil.copyfile( tmp_output_dir + "/coverage.wig", options.coverage_output_file )
- shutil.copyfile( tmp_output_dir + "/junctions.bed", options.junctions_output_file )
- shutil.copyfile( tmp_output_dir + "/accepted_hits.sam", options.accepted_hits_output_file )
- except Exception, e:
- stop_err( 'Error in tophat:\n' + str( e ) )
-
- # Clean up temp dirs
- if os.path.exists( tmp_output_dir ):
- shutil.rmtree( tmp_output_dir )
-
-if __name__=="__main__": __main__()
diff -r 869e494a8074 -r 37c075416918 tools/tophat/tophat_wrapper.xml
--- a/tools/tophat/tophat_wrapper.xml Tue Apr 13 17:02:56 2010 -0400
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,129 +0,0 @@
-<tool id="tophat" name="Tophat" version="1.0.13">
- <description>Find splice junctions using RNA-seq data</description>
- <command interpreter="python">
- tophat_wrapper.py
- --num-threads="4"
- --coverage-output=$coverage
- --junctions-output=$junctions
- --hits-output=$accepted_hits
- #if $refGenomeSource.genomeSource == "history":
- --indexes-dir=$refGenomeSource.ownFile
- #else:
- --indexes-dir=$refGenomeSource.index.value
- #end if
- #if $singlePaired.sPaired == "single":
- --input1=$singlePaired.input1
- --input2="None"
- #else:
- -r $singlePaired.mean_inner_distance
- --input1=$singlePaired.input1
- --input2=$singlePaired.input2
- #end if
- </command>
- <inputs>
- <conditional name="refGenomeSource">
- <param name="genomeSource" type="select" label="Will you select a reference genome from your history or use a built-in index?" help="Built-ins were indexed using default options">
- <option value="indexed">Use a built-in index</option>
- <option value="history">Use one from the history</option>
- </param>
- <when value="indexed">
- <param name="index" type="select" label="Select a reference genome" help="If your genome of interest is not listed, contact the Galaxy team">
- <options from_file="tophat_indices.loc">
- <column name="value" index="1" />
- <column name="name" index="0" />
- </options>
- </param>
- </when>
- <when value="history">
- <param name="ownFile" type="data" format="fasta" metadata_name="dbkey" label="Select the reference genome" />
- </when> <!-- history -->
- </conditional> <!-- refGenomeSource -->
- <conditional name="singlePaired">
- <param name="sPaired" type="select" label="Is this library mate-paired?">
- <option value="single">Single-end</option>
- <option value="paired">Paired-end</option>
- </param>
- <when value="single">
- <param format="fastqsanger" name="input1" type="data" label="RNA-Seq FASTQ file" help="Must have Sanger-scaled quality values with ASCII offset 33"/>
- </when>
- <when value="paired">
- <param format="fastqsanger" name="input1" type="data" label="RNA-Seq FASTQ file" help="Must have Sanger-scaled quality values with ASCII offset 33"/>
- <param format="fastqsanger" name="input2" type="data" label="RNA-Seq FASTQ file" help="Must have Sanger-scaled quality values with ASCII offset 33"/>
- <param format="fastqsanger" name="mean_inner_distance" type="integer" value="20" label="Mean Inner Distance between Mate Pairs"/>
- </when>
- </conditional>
- </inputs>
-
- <outputs>
- <data format="sam" name="accepted_hits"/>
- <data format="wig" name="coverage" />
- <data format="bed" name="junctions" />
- </outputs>
-
- <tests>
- <test>
- <param name="genomeSource" value="indexed"/>
- <param name="index" value="test_ref"/>
- <param name="sPaired" value="paired"/>
- <param name="input1" ftype="fastqsanger" value="tophat_in1.fq"/>
- <param name="input2" ftype="fastqsanger" value="tophat_in2.fq"/>
- <param name="mean_inner_distance" value="20"/>
- <!--
- Can't test this right now because first lines of file are run-specific.
- <output name="accepted_hits" file="tophat_out1.sam"/>
- -->
- <output name="coverage" file="tophat_out2.wig"/>
- <output name="junctions" file="tophat_out3.bed"/>
- </test>
- </tests>
-
- <help>
-**Tophat Overview**
-
-TopHat_ is a fast splice junction mapper for RNA-Seq reads. It aligns RNA-Seq reads to mammalian-sized genomes using the ultra high-throughput short read aligner Bowtie, and then analyzes the mapping results to identify splice junctions between exons. Please cite: Trapnell, C., Pachter, L. and Salzberg, S.L. TopHat: discovering splice junctions with RNA-Seq. Bioinformatics 25, 1105-1111 (2009).
-
-.. _Tophat: http://tophat.cbcb.umd.edu/
-
-------
-
-**Know what you are doing**
-
-.. class:: warningmark
-
-There is no such thing (yet) as an automated gearshift in splice junction identification. It is all like stick-shift driving in San Francisco. In other words, running this tool with default parameters will probably not give you meaningful results. A way to deal with this is to **understand** the parameters by carefully reading the `documentation`__ and experimenting. Fortunately, Galaxy makes experimenting easy.
-
-.. __: http://tophat.cbcb.umd.edu/manual.html
-
-------
-
-**Input formats**
-
-Tophat accepts files in Sanger FASTQ format. Use the FASTQ Groomer to prepare your files.
-
-------
-
-**Outputs**
-
-Tophat produces three output files::
-
- coverage.wig -- coverage of reads
- accepted_hits.sam -- reads that were mapped onto genome
- junctions.bed -- splice junctions identified by Tophat
-
--------
-
-**Tophat settings**
-
-All of the options have a default value. You can change any of them. Some of the options in Tophat have been implemented here.
-
-------
-
-**Tophat parameter list**
-
-This is a list of implemented Tophat options::
-
- -r This is the expected (mean) inner distance between mate pairs. For, example, for paired end runs with fragments
- selected at 300bp, where each end is 50bp, you should set -r to be 200. There is no default, and this parameter
- is required for paired end runs.
- </help>
-</tool>
1
0

16 Apr '10
details: http://www.bx.psu.edu/hg/galaxy/rev/67160108c887
changeset: 3640:67160108c887
user: Kanwei Li <kanwei(a)gmail.com>
date: Tue Apr 13 17:15:47 2010 -0400
description:
Fix tipsy tooltip staying on screen after deleting dataset
diffstat:
templates/root/history.mako | 1 +
1 files changed, 1 insertions(+), 0 deletions(-)
diffs (11 lines):
diff -r 869e494a8074 -r 67160108c887 templates/root/history.mako
--- a/templates/root/history.mako Tue Apr 13 17:02:56 2010 -0400
+++ b/templates/root/history.mako Tue Apr 13 17:15:47 2010 -0400
@@ -47,6 +47,7 @@
}
});
%endif
+ $(".tipsy").remove();
}
});
return false;
1
0
details: http://www.bx.psu.edu/hg/galaxy/rev/869e494a8074
changeset: 3639:869e494a8074
user: Kanwei Li <kanwei(a)gmail.com>
date: Tue Apr 13 17:02:56 2010 -0400
description:
Fix async updates in history panel
diffstat:
templates/root/history.mako | 26 +++++++++++++-------------
1 files changed, 13 insertions(+), 13 deletions(-)
diffs (61 lines):
diff -r e3167368345a -r 869e494a8074 templates/root/history.mako
--- a/templates/root/history.mako Tue Apr 13 15:35:32 2010 -0400
+++ b/templates/root/history.mako Tue Apr 13 17:02:56 2010 -0400
@@ -19,6 +19,9 @@
<script type="text/javascript">
+<% TERMINAL_STATES = ["ok", "error", "empty", "deleted", "discarded"] %>
+TERMINAL_STATES = ${ h.to_json_string(TERMINAL_STATES) };
+
$(function() {
var historywrapper = $("div.historyItemWrapper");
init_history_items(historywrapper);
@@ -175,7 +178,7 @@
// Updater
updater(
- ${ h.to_json_string( dict([(data.id, data.state) for data in reversed( datasets ) if data.visible and data.state not in [ "deleted", "empty", "error", "ok" ]]) ) }
+ ${ h.to_json_string( dict([(data.id, data.state) for data in reversed( datasets ) if data.visible and data.state not in TERMINAL_STATES]) ) }
);
// Navigate to a dataset.
@@ -194,18 +197,16 @@
empty = false;
break;
}
- if ( ! empty ) {
- // console.log( "Updater running in 3 seconds" );
- setTimeout( function() { updater_callback( tracked_datasets ) }, 3000 );
- } else {
- // console.log( "Updater finished" );
+ if ( !empty ) {
+ setTimeout( function() { updater_callback( tracked_datasets ) }, 4000 );
}
};
var updater_callback = function ( tracked_datasets ) {
// Build request data
- var ids = []
- var states = []
- var force_history_refresh = false
+ var ids = [],
+ states = [],
+ force_history_refresh = false;
+
$.each( tracked_datasets, function ( id, state ) {
ids.push( id );
states.push( state );
@@ -221,10 +222,9 @@
// Replace HTML
var container = $("#historyItemContainer-" + id);
container.html( val.html );
- setupHistoryItem( container.children( ".historyItemWrapper" ) );
- initShowHide();
- // If new state was terminal, stop tracking
- if (( val.state == "ok") || ( val.state == "error") || ( val.state == "empty") || ( val.state == "deleted" ) || ( val.state == "discarded" )) {
+ init_history_items( $("div.historyItemWrapper"), "noinit" );
+ // If new state is terminal, stop tracking
+ if (TERMINAL_STATES.indexOf(val.state) !== -1) {
if ( val.force_history_refresh ){
force_history_refresh = true;
}
1
0

16 Apr '10
details: http://www.bx.psu.edu/hg/galaxy/rev/e3167368345a
changeset: 3638:e3167368345a
user: Greg Von Kuster <greg(a)bx.psu.edu>
date: Tue Apr 13 15:35:32 2010 -0400
description:
Decouple the TagHandler from the model by allowing sub-classes to point to a specified model. Add a baseline functional tests script to provide coverage for tagging histories and history items ( more coverage is needed, of course, and can easily be added to this script ).
diffstat:
lib/galaxy/app.py | 4 +-
lib/galaxy/tags/tag_handler.py | 269 +++++++++++++----------------
lib/galaxy/web/controllers/history.py | 2 +-
lib/galaxy/web/controllers/tag.py | 202 ++++++++++------------
lib/galaxy/web/controllers/user.py | 2 +-
lib/galaxy/web/framework/helpers/grids.py | 6 +-
templates/tagging_common.mako | 5 +-
templates/user/index.mako | 3 +-
test/base/twilltestcase.py | 16 +
test/functional/test_tags.py | 63 +++++++
10 files changed, 299 insertions(+), 273 deletions(-)
diffs (930 lines):
diff -r 4fb48981bdb0 -r e3167368345a lib/galaxy/app.py
--- a/lib/galaxy/app.py Tue Apr 13 13:14:59 2010 -0400
+++ b/lib/galaxy/app.py Tue Apr 13 15:35:32 2010 -0400
@@ -1,11 +1,11 @@
import sys, os, atexit
from galaxy import config, jobs, util, tools, web, cloud
-## from galaxy.tracks import store
from galaxy.web import security
import galaxy.model
import galaxy.datatypes.registry
import galaxy.security
+from galaxy.tags.tag_handler import GalaxyTagHandler
class UniverseApplication( object ):
"""Encapsulates the state of a Universe application"""
@@ -33,6 +33,8 @@
self.config.database_engine_options )
# Security helper
self.security = security.SecurityHelper( id_secret=self.config.id_secret )
+ # Tag handler
+ self.tag_handler = GalaxyTagHandler()
# Initialize the tools
self.toolbox = tools.ToolBox( self.config.tool_config, self.config.tool_path, self )
# Load datatype converters
diff -r 4fb48981bdb0 -r e3167368345a lib/galaxy/tags/tag_handler.py
--- a/lib/galaxy/tags/tag_handler.py Tue Apr 13 13:14:59 2010 -0400
+++ b/lib/galaxy/tags/tag_handler.py Tue Apr 13 15:35:32 2010 -0400
@@ -1,148 +1,117 @@
-from galaxy import model
-import re
+import re, logging
from sqlalchemy.sql.expression import func, and_
from sqlalchemy.sql import select
+log = logging.getLogger( __name__ )
+
+# Item-specific information needed to perform tagging.
+class ItemTagAssocInfo( object ):
+ def __init__( self, item_class, tag_assoc_class, item_id_col ):
+ self.item_class = item_class
+ self.tag_assoc_class = tag_assoc_class
+ self.item_id_col = item_id_col
+
class TagHandler( object ):
-
- # Minimum tag length.
- min_tag_len = 2
-
- # Maximum tag length.
- max_tag_len = 255
-
- # Tag separator.
- tag_separators = ',;'
-
- # Hierarchy separator.
- hierarchy_separator = '.'
-
- # Key-value separator.
- key_value_separators = "=:"
-
- # Item-specific information needed to perform tagging.
- class ItemTagAssocInfo( object ):
- def __init__( self, item_class, tag_assoc_class, item_id_col ):
- self.item_class = item_class
- self.tag_assoc_class = tag_assoc_class
- self.item_id_col = item_id_col
-
- # Initialize with known classes.
- item_tag_assoc_info = {}
- item_tag_assoc_info["History"] = ItemTagAssocInfo( model.History, model.HistoryTagAssociation, model.HistoryTagAssociation.table.c.history_id )
- item_tag_assoc_info["HistoryDatasetAssociation"] = \
- ItemTagAssocInfo( model.HistoryDatasetAssociation, model.HistoryDatasetAssociationTagAssociation, model.HistoryDatasetAssociationTagAssociation.table.c.history_dataset_association_id )
- item_tag_assoc_info["Page"] = ItemTagAssocInfo( model.Page, model.PageTagAssociation, model.PageTagAssociation.table.c.page_id )
- item_tag_assoc_info["StoredWorkflow"] = ItemTagAssocInfo( model.StoredWorkflow, model.StoredWorkflowTagAssociation, model.StoredWorkflowTagAssociation.table.c.stored_workflow_id )
- item_tag_assoc_info["Visualization"] = ItemTagAssocInfo( model.Visualization, model.VisualizationTagAssociation, model.VisualizationTagAssociation.table.c.visualization_id )
-
- def get_tag_assoc_class(self, item_class):
- """ Returns tag association class for item class. """
+ def __init__( self ):
+ # Minimum tag length.
+ self.min_tag_len = 2
+ # Maximum tag length.
+ self.max_tag_len = 255
+ # Tag separator.
+ self.tag_separators = ',;'
+ # Hierarchy separator.
+ self.hierarchy_separator = '.'
+ # Key-value separator.
+ self.key_value_separators = "=:"
+ # Initialize with known classes - add to this in subclasses.
+ self.item_tag_assoc_info = {}
+ def get_tag_assoc_class( self, item_class ):
+ """Returns tag association class for item class."""
return self.item_tag_assoc_info[item_class.__name__].tag_assoc_class
-
- def get_id_col_in_item_tag_assoc_table( self, item_class):
- """ Returns item id column in class' item-tag association table. """
+ def get_id_col_in_item_tag_assoc_table( self, item_class ):
+ """Returns item id column in class' item-tag association table."""
return self.item_tag_assoc_info[item_class.__name__].item_id_col
-
- def get_community_tags(self, sa_session, item=None, limit=None):
- """ Returns community tags for an item. """
-
+ def get_community_tags( self, trans, item=None, limit=None ):
+ """Returns community tags for an item."""
# Get item-tag association class.
item_class = item.__class__
item_tag_assoc_class = self.get_tag_assoc_class( item_class )
if not item_tag_assoc_class:
return []
-
# Build select statement.
- cols_to_select = [ item_tag_assoc_class.table.c.tag_id, func.count('*') ]
- from_obj = item_tag_assoc_class.table.join( item_class.table ).join( model.Tag.table )
- where_clause = ( self.get_id_col_in_item_tag_assoc_table(item_class) == item.id )
- order_by = [ func.count("*").desc() ]
+ cols_to_select = [ item_tag_assoc_class.table.c.tag_id, func.count( '*' ) ]
+ from_obj = item_tag_assoc_class.table.join( item_class.table ).join( trans.app.model.Tag.table )
+ where_clause = ( self.get_id_col_in_item_tag_assoc_table( item_class ) == item.id )
+ order_by = [ func.count( "*" ).desc() ]
group_by = item_tag_assoc_class.table.c.tag_id
-
# Do query and get result set.
- query = select(columns=cols_to_select, from_obj=from_obj,
- whereclause=where_clause, group_by=group_by, order_by=order_by, limit=limit)
- result_set = sa_session.execute(query)
-
+ query = select( columns=cols_to_select,
+ from_obj=from_obj,
+ whereclause=where_clause,
+ group_by=group_by,
+ order_by=order_by,
+ limit=limit )
+ result_set = trans.sa_session.execute( query )
# Return community tags.
community_tags = []
for row in result_set:
tag_id = row[0]
- community_tags.append( self.get_tag_by_id( sa_session, tag_id ) )
-
+ community_tags.append( self.get_tag_by_id( trans, tag_id ) )
return community_tags
-
def remove_item_tag( self, trans, user, item, tag_name ):
"""Remove a tag from an item."""
# Get item tag association.
- item_tag_assoc = self._get_item_tag_assoc(user, item, tag_name)
-
+ item_tag_assoc = self._get_item_tag_assoc( user, item, tag_name )
# Remove association.
if item_tag_assoc:
# Delete association.
trans.sa_session.delete( item_tag_assoc )
- item.tags.remove(item_tag_assoc)
+ item.tags.remove( item_tag_assoc )
return True
-
return False
-
def delete_item_tags( self, trans, user, item ):
"""Delete tags from an item."""
# Delete item-tag associations.
for tag in item.tags:
trans.sa_session.delete( tag )
-
# Delete tags from item.
del item.tags[:]
-
- def item_has_tag(self, user, item, tag):
+ def item_has_tag( self, trans, user, item, tag ):
"""Returns true if item is has a given tag."""
# Get tag name.
- if isinstance(tag, basestring):
+ if isinstance( tag, basestring ):
tag_name = tag
- elif isinstance(tag, model.Tag):
+ elif isinstance( tag, trans.app.model.Tag ):
tag_name = tag.name
-
# Check for an item-tag association to see if item has a given tag.
- item_tag_assoc = self._get_item_tag_assoc(user, item, tag_name)
+ item_tag_assoc = self._get_item_tag_assoc( user, item, tag_name )
if item_tag_assoc:
return True
return False
-
-
- def apply_item_tags(self, db_session, user, item, tags_str):
+ def apply_item_tags( self, trans, user, item, tags_str ):
"""Apply tags to an item."""
# Parse tags.
- parsed_tags = self.parse_tags(tags_str)
-
+ parsed_tags = self.parse_tags( tags_str )
# Apply each tag.
for name, value in parsed_tags.items():
# Use lowercase name for searching/creating tag.
lc_name = name.lower()
-
# Get or create item-tag association.
- item_tag_assoc = self._get_item_tag_assoc(user, item, lc_name)
+ item_tag_assoc = self._get_item_tag_assoc( user, item, lc_name )
if not item_tag_assoc:
- #
# Create item-tag association.
- #
-
# Create tag; if None, skip the tag (and log error).
- tag = self._get_or_create_tag(db_session, lc_name)
+ tag = self._get_or_create_tag( trans, lc_name )
if not tag:
# Log error?
continue
-
# Create tag association based on item class.
item_tag_assoc_class = self.get_tag_assoc_class( item.__class__ )
item_tag_assoc = item_tag_assoc_class()
-
# Add tag to association.
- item.tags.append(item_tag_assoc)
+ item.tags.append( item_tag_assoc )
item_tag_assoc.tag = tag
- item_tag_assoc.user = user
-
+ item_tag_assoc.user = user
# Apply attributes to item-tag association. Strip whitespace from user name and tag.
lc_value = None
if value:
@@ -150,144 +119,142 @@
item_tag_assoc.user_tname = name
item_tag_assoc.user_value = value
item_tag_assoc.value = lc_value
-
- def get_tags_str(self, tags):
+ def get_tags_str( self, tags ):
"""Build a string from an item's tags."""
# Return empty string if there are no tags.
if not tags:
return ""
-
# Create string of tags.
tags_str_list = list()
for tag in tags:
tag_str = tag.user_tname
if tag.value is not None:
tag_str += ":" + tag.user_value
- tags_str_list.append(tag_str)
- return ", ".join(tags_str_list)
-
- def get_tag_by_id(self, db_session, tag_id):
+ tags_str_list.append( tag_str )
+ return ", ".join( tags_str_list )
+ def get_tag_by_id( self, trans, tag_id ):
"""Get a Tag object from a tag id."""
- return db_session.query( model.Tag ).filter_by( id=tag_id) .first()
-
- def get_tag_by_name(self, db_session, tag_name):
+ return trans.sa_session.query( trans.app.model.Tag ).filter_by( id=tag_id ).first()
+ def get_tag_by_name( self, trans, tag_name ):
"""Get a Tag object from a tag name (string)."""
if tag_name:
- return db_session.query( model.Tag ).filter_by( name=tag_name.lower() ).first()
+ return trans.sa_session.query( trans.app.model.Tag ).filter_by( name=tag_name.lower() ).first()
return None
-
- def _create_tag(self, db_session, tag_str):
+ def _create_tag( self, trans, tag_str ):
"""Create a Tag object from a tag string."""
- tag_hierarchy = tag_str.split(self.__class__.hierarchy_separator)
+ tag_hierarchy = tag_str.split( self.hierarchy_separator )
tag_prefix = ""
parent_tag = None
for sub_tag in tag_hierarchy:
# Get or create subtag.
- tag_name = tag_prefix + self._scrub_tag_name(sub_tag)
- tag = db_session.query( model.Tag ).filter_by( name=tag_name).first()
+ tag_name = tag_prefix + self._scrub_tag_name( sub_tag )
+ tag = trans.sa_session.query( trans.app.model.Tag ).filter_by( name=tag_name).first()
if not tag:
- tag = model.Tag(type=0, name=tag_name)
-
+ tag = trans.app.model.Tag( type=0, name=tag_name )
# Set tag parent.
tag.parent = parent_tag
-
# Update parent and tag prefix.
parent_tag = tag
- tag_prefix = tag.name + self.__class__.hierarchy_separator
+ tag_prefix = tag.name + self.hierarchy_separator
return tag
-
- def _get_or_create_tag(self, db_session, tag_str):
+ def _get_or_create_tag( self, trans, tag_str ):
"""Get or create a Tag object from a tag string."""
# Scrub tag; if tag is None after being scrubbed, return None.
- scrubbed_tag_str = self._scrub_tag_name(tag_str)
+ scrubbed_tag_str = self._scrub_tag_name( tag_str )
if not scrubbed_tag_str:
return None
-
# Get item tag.
- tag = self.get_tag_by_name(db_session, scrubbed_tag_str)
-
+ tag = self.get_tag_by_name( trans, scrubbed_tag_str )
# Create tag if necessary.
if tag is None:
- tag = self._create_tag(db_session, scrubbed_tag_str)
-
+ tag = self._create_tag( trans, scrubbed_tag_str )
return tag
-
def _get_item_tag_assoc( self, user, item, tag_name ):
- """Return ItemTagAssociation object for a user, item, and tag string; returns None if there is
- no such association."""
+ """
+ Return ItemTagAssociation object for a user, item, and tag string; returns None if there is
+ no such association.
+ """
scrubbed_tag_name = self._scrub_tag_name( tag_name )
for item_tag_assoc in item.tags:
if ( item_tag_assoc.user == user ) and ( item_tag_assoc.user_tname == scrubbed_tag_name ):
return item_tag_assoc
return None
-
- def parse_tags(self, tag_str):
- """Returns a list of raw (tag-name, value) pairs derived from a string; method scrubs tag names and values as well.
- Return value is a dictionary where tag-names are keys."""
+ def parse_tags( self, tag_str ):
+ """
+ Returns a list of raw (tag-name, value) pairs derived from a string; method scrubs tag names and values as well.
+ Return value is a dictionary where tag-names are keys.
+ """
# Gracefully handle None.
if not tag_str:
return dict()
-
# Split tags based on separators.
- reg_exp = re.compile('[' + self.__class__.tag_separators + ']')
- raw_tags = reg_exp.split(tag_str)
-
+ reg_exp = re.compile( '[' + self.tag_separators + ']' )
+ raw_tags = reg_exp.split( tag_str )
# Extract name-value pairs.
name_value_pairs = dict()
for raw_tag in raw_tags:
- nv_pair = self._get_name_value_pair(raw_tag)
+ nv_pair = self._get_name_value_pair( raw_tag )
scrubbed_name = self._scrub_tag_name( nv_pair[0] )
scrubbed_value = self._scrub_tag_value( nv_pair[1] )
name_value_pairs[scrubbed_name] = scrubbed_value
return name_value_pairs
-
- def _scrub_tag_value(self, value):
+ def _scrub_tag_value( self, value ):
"""Scrub a tag value."""
# Gracefully handle None:
if not value:
return None
-
# Remove whitespace from value.
- reg_exp = re.compile('\s')
- scrubbed_value = re.sub(reg_exp, "", value)
-
+ reg_exp = re.compile( '\s' )
+ scrubbed_value = re.sub( reg_exp, "", value )
return scrubbed_value
-
- def _scrub_tag_name(self, name):
+ def _scrub_tag_name( self, name ):
"""Scrub a tag name."""
# Gracefully handle None:
if not name:
return None
-
# Remove whitespace from name.
- reg_exp = re.compile('\s')
- scrubbed_name = re.sub(reg_exp, "", name)
-
+ reg_exp = re.compile( '\s' )
+ scrubbed_name = re.sub( reg_exp, "", name )
# Ignore starting ':' char.
- if scrubbed_name.startswith(self.__class__.hierarchy_separator):
+ if scrubbed_name.startswith( self.hierarchy_separator ):
scrubbed_name = scrubbed_name[1:]
-
# If name is too short or too long, return None.
- if len(scrubbed_name) < self.min_tag_len or len(scrubbed_name) > self.max_tag_len:
+ if len( scrubbed_name ) < self.min_tag_len or len( scrubbed_name ) > self.max_tag_len:
return None
-
return scrubbed_name
-
- def _scrub_tag_name_list(self, tag_name_list):
+ def _scrub_tag_name_list( self, tag_name_list ):
"""Scrub a tag name list."""
scrubbed_tag_list = list()
for tag in tag_name_list:
- scrubbed_tag_list.append( self._scrub_tag_name(tag) )
+ scrubbed_tag_list.append( self._scrub_tag_name( tag ) )
return scrubbed_tag_list
-
- def _get_name_value_pair(self, tag_str):
+ def _get_name_value_pair( self, tag_str ):
"""Get name, value pair from a tag string."""
# Use regular expression to parse name, value.
- reg_exp = re.compile( "[" + self.__class__.key_value_separators + "]" )
+ reg_exp = re.compile( "[" + self.key_value_separators + "]" )
name_value_pair = reg_exp.split( tag_str )
-
# Add empty slot if tag does not have value.
- if len(name_value_pair) < 2:
- name_value_pair.append(None)
-
- return name_value_pair
\ No newline at end of file
+ if len( name_value_pair ) < 2:
+ name_value_pair.append( None )
+ return name_value_pair
+
+class GalaxyTagHandler( TagHandler ):
+ def __init__( self ):
+ from galaxy import model
+ TagHandler.__init__( self )
+ self.item_tag_assoc_info["History"] = ItemTagAssocInfo( model.History,
+ model.HistoryTagAssociation,
+ model.HistoryTagAssociation.table.c.history_id )
+ self.item_tag_assoc_info["HistoryDatasetAssociation"] = \
+ ItemTagAssocInfo( model.HistoryDatasetAssociation,
+ model.HistoryDatasetAssociationTagAssociation,
+ model.HistoryDatasetAssociationTagAssociation.table.c.history_dataset_association_id )
+ self.item_tag_assoc_info["Page"] = ItemTagAssocInfo( model.Page,
+ model.PageTagAssociation,
+ model.PageTagAssociation.table.c.page_id )
+ self.item_tag_assoc_info["StoredWorkflow"] = ItemTagAssocInfo( model.StoredWorkflow,
+ model.StoredWorkflowTagAssociation,
+ model.StoredWorkflowTagAssociation.table.c.stored_workflow_id )
+ self.item_tag_assoc_info["Visualization"] = ItemTagAssocInfo( model.Visualization,
+ model.VisualizationTagAssociation,
+ model.VisualizationTagAssociation.table.c.visualization_id )
diff -r 4fb48981bdb0 -r e3167368345a lib/galaxy/web/controllers/history.py
--- a/lib/galaxy/web/controllers/history.py Tue Apr 13 13:14:59 2010 -0400
+++ b/lib/galaxy/web/controllers/history.py Tue Apr 13 15:35:32 2010 -0400
@@ -5,7 +5,7 @@
from galaxy.model.orm import *
from galaxy.util.json import *
from galaxy.util.sanitize_html import sanitize_html
-from galaxy.tags.tag_handler import TagHandler
+from galaxy.tags.tag_handler import GalaxyTagHandler
from sqlalchemy.sql.expression import ClauseElement
import webhelpers, logging, operator
from datetime import datetime
diff -r 4fb48981bdb0 -r e3167368345a lib/galaxy/web/controllers/tag.py
--- a/lib/galaxy/web/controllers/tag.py Tue Apr 13 13:14:59 2010 -0400
+++ b/lib/galaxy/web/controllers/tag.py Tue Apr 13 15:35:32 2010 -0400
@@ -1,203 +1,185 @@
"""
Tags Controller: handles tagging/untagging of entities and provides autocomplete support.
"""
-
+import logging
from galaxy.web.base.controller import *
-from galaxy.tags.tag_handler import *
from sqlalchemy.sql.expression import func, and_
from sqlalchemy.sql import select
+log = logging.getLogger( __name__ )
+
class TagsController ( BaseController ):
-
- def __init__(self, app):
- BaseController.__init__(self, app)
- self.tag_handler = TagHandler()
-
+ def __init__( self, app ):
+ BaseController.__init__( self, app )
+ self.tag_handler = app.tag_handler
@web.expose
@web.require_login( "edit item tags" )
def get_tagging_elt_async( self, trans, item_id, item_class, elt_context="" ):
""" Returns HTML for editing an item's tags. """
item = self._get_item( trans, item_class, trans.security.decode_id( item_id ) )
if not item:
- return trans.show_error_message( "No item of class %s with id % " % ( item_class, item_id ) )
- user = trans.get_user()
- return trans.fill_template( "/tagging_common.mako", tag_type="individual", user=trans.get_user(), tagged_item=item, elt_context=elt_context,
- in_form=False, input_size="22", tag_click_fn="default_tag_click_fn", use_toggle_link=False )
-
+ return trans.show_error_message( "No item of class %s with id %s " % ( item_class, item_id ) )
+ return trans.fill_template( "/tagging_common.mako",
+ tag_type="individual",
+ user=trans.user,
+ tagged_item=item,
+ elt_context=elt_context,
+ in_form=False,
+ input_size="22",
+ tag_click_fn="default_tag_click_fn",
+ use_toggle_link=False )
@web.expose
@web.require_login( "add tag to an item" )
def add_tag_async( self, trans, item_id=None, item_class=None, new_tag=None, context=None ):
- """ Add tag to an item. """
-
+ """ Add tag to an item. """
# Apply tag.
item = self._get_item( trans, item_class, trans.security.decode_id( item_id ) )
- user = trans.get_user()
- self.tag_handler.apply_item_tags( trans.sa_session, user, item, new_tag.encode('utf-8') )
+ user = trans.user
+ self.tag_handler.apply_item_tags( trans, user, item, new_tag.encode( 'utf-8' ) )
trans.sa_session.flush()
-
# Log.
- params = dict( item_id=item.id, item_class=item_class, tag=new_tag)
+ params = dict( item_id=item.id, item_class=item_class, tag=new_tag )
trans.log_action( user, unicode( "tag" ), context, params )
-
@web.expose
@web.require_login( "remove tag from an item" )
def remove_tag_async( self, trans, item_id=None, item_class=None, tag_name=None, context=None ):
""" Remove tag from an item. """
-
# Remove tag.
- item = self._get_item( trans, item_class, trans.security.decode_id( item_id) )
- user = trans.get_user()
- self.tag_handler.remove_item_tag( trans, user, item, tag_name.encode('utf-8') )
+ item = self._get_item( trans, item_class, trans.security.decode_id( item_id ) )
+ user = trans.user
+ self.tag_handler.remove_item_tag( trans, user, item, tag_name.encode( 'utf-8' ) )
trans.sa_session.flush()
-
# Log.
- params = dict( item_id=item.id, item_class=item_class, tag=tag_name)
- trans.log_action( user, unicode( "untag"), context, params )
-
+ params = dict( item_id=item.id, item_class=item_class, tag=tag_name )
+ trans.log_action( user, unicode( "untag" ), context, params )
# Retag an item. All previous tags are deleted and new tags are applied.
#(a)web.expose
@web.require_login( "Apply a new set of tags to an item; previous tags are deleted." )
def retag_async( self, trans, item_id=None, item_class=None, new_tags=None ):
""" Apply a new set of tags to an item; previous tags are deleted. """
-
# Apply tags.
item = self._get_item( trans, item_class, trans.security.decode_id( item_id ) )
- user = trans.get_user()
- tag_handler.delete_item_tags( trans, item )
- self.tag_handler.apply_item_tags( trans.sa_session, user, item, new_tags.encode('utf-8') )
- trans.sa_session.flush()
-
+ user = trans.user
+ self.tag_handler.delete_item_tags( trans, item )
+ self.tag_handler.apply_item_tags( trans, user, item, new_tags.encode( 'utf-8' ) )
+ trans.sa_session.flush()
@web.expose
@web.require_login( "get autocomplete data for an item's tags" )
def tag_autocomplete_data( self, trans, q=None, limit=None, timestamp=None, item_id=None, item_class=None ):
""" Get autocomplete data for an item's tags. """
-
- #
# Get item, do security check, and get autocomplete data.
- #
item = None
if item_id is not None:
item = self._get_item( trans, item_class, trans.security.decode_id( item_id ) )
- user = trans.get_user()
+ user = trans.user
item_class = self.get_class( item_class )
-
- q = q.encode('utf-8')
- if q.find(":") == -1:
- return self._get_tag_autocomplete_names(trans, q, limit, timestamp, user, item, item_class)
+ q = q.encode( 'utf-8' )
+ if q.find( ":" ) == -1:
+ return self._get_tag_autocomplete_names( trans, q, limit, timestamp, user, item, item_class )
else:
- return self._get_tag_autocomplete_values(trans, q, limit, timestamp, user, item, item_class)
-
+ return self._get_tag_autocomplete_values( trans, q, limit, timestamp, user, item, item_class )
def _get_tag_autocomplete_names( self, trans, q, limit, timestamp, user=None, item=None, item_class=None ):
- """Returns autocomplete data for tag names ordered from most frequently used to
- least frequently used."""
- #
+ """
+ Returns autocomplete data for tag names ordered from most frequently used to
+ least frequently used.
+ """
# Get user's item tags and usage counts.
- #
-
# Get item's class object and item-tag association class.
if item is None and item_class is None:
- raise RuntimeError("Both item and item_class cannot be None")
+ raise RuntimeError( "Both item and item_class cannot be None" )
elif item is not None:
item_class = item.__class__
-
- item_tag_assoc_class = self.tag_handler.get_tag_assoc_class(item_class)
-
+ item_tag_assoc_class = self.tag_handler.get_tag_assoc_class( item_class )
# Build select statement.
- cols_to_select = [ item_tag_assoc_class.table.c.tag_id, func.count('*') ]
- from_obj = item_tag_assoc_class.table.join( item_class.table ).join( model.Tag.table )
- where_clause = and_(
- model.Tag.table.c.name.like(q + "%"),
- item_tag_assoc_class.table.c.user_id == user.id
- )
- order_by = [ func.count("*").desc() ]
+ cols_to_select = [ item_tag_assoc_class.table.c.tag_id, func.count( '*' ) ]
+ from_obj = item_tag_assoc_class.table.join( item_class.table ).join( trans.app.model.Tag.table )
+ where_clause = and_( trans.app.model.Tag.table.c.name.like( q + "%" ),
+ item_tag_assoc_class.table.c.user_id == user.id )
+ order_by = [ func.count( "*" ).desc() ]
group_by = item_tag_assoc_class.table.c.tag_id
-
# Do query and get result set.
- query = select(columns=cols_to_select, from_obj=from_obj,
- whereclause=where_clause, group_by=group_by, order_by=order_by, limit=limit)
- result_set = trans.sa_session.execute(query)
-
+ query = select( columns=cols_to_select,
+ from_obj=from_obj,
+ whereclause=where_clause,
+ group_by=group_by,
+ order_by=order_by,
+ limit=limit )
+ result_set = trans.sa_session.execute( query )
# Create and return autocomplete data.
ac_data = "#Header|Your Tags\n"
for row in result_set:
- tag = self.tag_handler.get_tag_by_id(trans.sa_session, row[0])
-
+ tag = self.tag_handler.get_tag_by_id( trans, row[0] )
# Exclude tags that are already applied to the item.
- if ( item is not None ) and ( self.tag_handler.item_has_tag( trans.get_user(), item, tag ) ):
+ if ( item is not None ) and ( self.tag_handler.item_has_tag( trans, trans.user, item, tag ) ):
continue
# Add tag to autocomplete data. Use the most frequent name that user
# has employed for the tag.
- tag_names = self._get_usernames_for_tag(trans.sa_session, trans.get_user(),
- tag, item_class, item_tag_assoc_class)
+ tag_names = self._get_usernames_for_tag( trans, trans.user, tag, item_class, item_tag_assoc_class )
ac_data += tag_names[0] + "|" + tag_names[0] + "\n"
-
return ac_data
-
- def _get_tag_autocomplete_values(self, trans, q, limit, timestamp, user=None, item=None, item_class=None):
- """Returns autocomplete data for tag values ordered from most frequently used to
- least frequently used."""
-
- tag_name_and_value = q.split(":")
+ def _get_tag_autocomplete_values( self, trans, q, limit, timestamp, user=None, item=None, item_class=None ):
+ """
+ Returns autocomplete data for tag values ordered from most frequently used to
+ least frequently used.
+ """
+ tag_name_and_value = q.split( ":" )
tag_name = tag_name_and_value[0]
tag_value = tag_name_and_value[1]
- tag = self.tag_handler.get_tag_by_name(trans.sa_session, tag_name)
+ tag = self.tag_handler.get_tag_by_name( trans, tag_name )
# Don't autocomplete if tag doesn't exist.
if tag is None:
return ""
-
# Get item's class object and item-tag association class.
if item is None and item_class is None:
- raise RuntimeError("Both item and item_class cannot be None")
+ raise RuntimeError( "Both item and item_class cannot be None" )
elif item is not None:
item_class = item.__class__
-
- item_tag_assoc_class = self.tag_handler.get_tag_assoc_class(item_class)
-
+ item_tag_assoc_class = self.tag_handler.get_tag_assoc_class( item_class )
# Build select statement.
- cols_to_select = [ item_tag_assoc_class.table.c.value, func.count('*') ]
- from_obj = item_tag_assoc_class.table.join( item_class.table ).join( model.Tag.table )
+ cols_to_select = [ item_tag_assoc_class.table.c.value, func.count( '*' ) ]
+ from_obj = item_tag_assoc_class.table.join( item_class.table ).join( trans.app.model.Tag.table )
where_clause = and_( item_tag_assoc_class.table.c.user_id == user.id,
- model.Tag.table.c.id==tag.id,
- item_tag_assoc_class.table.c.value.like(tag_value + "%") )
- order_by = [ func.count("*").desc(), item_tag_assoc_class.table.c.value ]
+ trans.app.model.Tag.table.c.id == tag.id,
+ item_tag_assoc_class.table.c.value.like( tag_value + "%" ) )
+ order_by = [ func.count("*").desc(), item_tag_assoc_class.table.c.value ]
group_by = item_tag_assoc_class.table.c.value
-
# Do query and get result set.
- query = select(columns=cols_to_select, from_obj=from_obj,
- whereclause=where_clause, group_by=group_by, order_by=order_by, limit=limit)
- result_set = trans.sa_session.execute(query)
-
+ query = select( columns=cols_to_select,
+ from_obj=from_obj,
+ whereclause=where_clause,
+ group_by=group_by,
+ order_by=order_by,
+ limit=limit )
+ result_set = trans.sa_session.execute( query )
# Create and return autocomplete data.
- ac_data = "#Header|Your Values for '%s'\n" % (tag_name)
- tag_uname = self._get_usernames_for_tag(trans.sa_session, trans.get_user(), tag, item_class, item_tag_assoc_class)[0]
+ ac_data = "#Header|Your Values for '%s'\n" % ( tag_name )
+ tag_uname = self._get_usernames_for_tag( trans, trans.user, tag, item_class, item_tag_assoc_class )[0]
for row in result_set:
ac_data += tag_uname + ":" + row[0] + "|" + row[0] + "\n"
return ac_data
-
- def _get_usernames_for_tag(self, db_session, user, tag, item_class, item_tag_assoc_class):
- """ Returns an ordered list of the user names for a tag; list is ordered from
- most popular to least popular name."""
-
+ def _get_usernames_for_tag( self, trans, user, tag, item_class, item_tag_assoc_class ):
+ """
+ Returns an ordered list of the user names for a tag; list is ordered from
+ most popular to least popular name.
+ """
# Build select stmt.
- cols_to_select = [ item_tag_assoc_class.table.c.user_tname, func.count('*') ]
+ cols_to_select = [ item_tag_assoc_class.table.c.user_tname, func.count( '*' ) ]
where_clause = and_( item_tag_assoc_class.table.c.user_id == user.id,
item_tag_assoc_class.table.c.tag_id == tag.id )
group_by = item_tag_assoc_class.table.c.user_tname
- order_by = [ func.count("*").desc() ]
-
+ order_by = [ func.count( "*" ).desc() ]
# Do query and get result set.
- query = select(columns=cols_to_select, whereclause=where_clause,
- group_by=group_by, order_by=order_by)
- result_set = db_session.execute(query)
-
+ query = select( columns=cols_to_select,
+ whereclause=where_clause,
+ group_by=group_by,
+ order_by=order_by )
+ result_set = trans.sa_session.execute( query )
user_tag_names = list()
for row in result_set:
- user_tag_names.append(row[0])
-
+ user_tag_names.append( row[0] )
return user_tag_names
-
def _get_item( self, trans, item_class_name, id ):
""" Get an item based on type and id. """
item_class = self.tag_handler.item_tag_assoc_info[item_class_name].item_class
- item = trans.sa_session.query(item_class).filter("id=" + str(id))[0]
+ item = trans.sa_session.query( item_class ).filter( "id=" + str( id ) )[0]
return item
diff -r 4fb48981bdb0 -r e3167368345a lib/galaxy/web/controllers/user.py
--- a/lib/galaxy/web/controllers/user.py Tue Apr 13 13:14:59 2010 -0400
+++ b/lib/galaxy/web/controllers/user.py Tue Apr 13 15:35:32 2010 -0400
@@ -89,7 +89,7 @@
else:
refresh_frames = [ 'masthead', 'history' ]
else:
- refresh_frames = []
+ refresh_frames = [ 'masthead' ]
# Since logging an event requires a session, we'll log prior to ending the session
trans.log_event( "User logged out" )
trans.handle_user_logout()
diff -r 4fb48981bdb0 -r e3167368345a lib/galaxy/web/framework/helpers/grids.py
--- a/lib/galaxy/web/framework/helpers/grids.py Tue Apr 13 13:14:59 2010 -0400
+++ b/lib/galaxy/web/framework/helpers/grids.py Tue Apr 13 15:35:32 2010 -0400
@@ -3,7 +3,7 @@
from galaxy.web.base import controller
from galaxy.web.framework.helpers import iff
-from galaxy.tags.tag_handler import TagHandler
+from galaxy.tags.tag_handler import GalaxyTagHandler
from galaxy.web import url_for
from galaxy.util.json import from_json_string, to_json_string
from galaxy.util.odict import odict
@@ -399,7 +399,7 @@
return query
def get_filter( self, user, column_filter ):
# Parse filter to extract multiple tags.
- tag_handler = TagHandler()
+ tag_handler = GalaxyTagHandler()
if isinstance( column_filter, list ):
# Collapse list of tags into a single string; this is redundant but effective. TODO: fix this by iterating over tags.
column_filter = ",".join( column_filter )
@@ -421,7 +421,7 @@
in_form=True, input_size="20", tag_click_fn="add_tag_to_grid_filter", use_toggle_link=True )
def get_filter( self, user, column_filter ):
# Parse filter to extract multiple tags.
- tag_handler = TagHandler()
+ tag_handler = GalaxyTagHandler()
if isinstance( column_filter, list ):
# Collapse list of tags into a single string; this is redundant but effective. TODO: fix this by iterating over tags.
column_filter = ",".join( column_filter )
diff -r 4fb48981bdb0 -r e3167368345a templates/tagging_common.mako
--- a/templates/tagging_common.mako Tue Apr 13 13:14:59 2010 -0400
+++ b/templates/tagging_common.mako Tue Apr 13 15:35:32 2010 -0400
@@ -4,10 +4,7 @@
from random import random
from sys import maxint
from math import floor
- from galaxy.tags.tag_handler import TagHandler
from galaxy.model import Tag, ItemTagAssociation
-
- tag_handler = TagHandler()
%>
## Render a tagging element if there is a tagged_item.
@@ -92,7 +89,7 @@
## Build HTML.
<%
elt_id = int ( floor ( random()*maxint ) )
- community_tags = tag_handler.get_community_tags(trans.sa_session, tagged_item, 10)
+ community_tags = trans.app.tag_handler.get_community_tags( trans, item=tagged_item, limit=10 )
%>
${self.render_tagging_element_html(elt_id=elt_id, tags=community_tags, use_toggle_link=use_toggle_link, editable=False, tag_type="community")}
diff -r 4fb48981bdb0 -r e3167368345a templates/user/index.mako
--- a/templates/user/index.mako Tue Apr 13 13:14:59 2010 -0400
+++ b/templates/user/index.mako Tue Apr 13 15:35:32 2010 -0400
@@ -13,10 +13,9 @@
<li><a href="${h.url_for( action='show_info' )}">${_('Manage your information')}</a></li>
<li><a href="${h.url_for( action='set_default_permissions' )}">${_('Change default permissions')}</a> for new histories</li>
%endif
- <li><a href="${h.url_for( action='logout' )}">${_('Logout')}</a></li>
</ul>
%else:
- %if not msg:
+ %if not message:
<p>${n_('You are currently not logged in.')}</p>
%endif
<ul>
diff -r 4fb48981bdb0 -r e3167368345a test/base/twilltestcase.py
--- a/test/base/twilltestcase.py Tue Apr 13 13:14:59 2010 -0400
+++ b/test/base/twilltestcase.py Tue Apr 13 15:35:32 2010 -0400
@@ -585,6 +585,16 @@
except:
pass
self.home()
+ def check_hda_attribute_info( self, hda_id, check_str1='', check_str2='', check_str3='', check_str4='' ):
+ """Edit history_dataset_association attribute information"""
+ if check_str1:
+ self.check_page_for_string( check_str1 )
+ if check_str2:
+ self.check_page_for_string( check_str2 )
+ if check_str3:
+ self.check_page_for_string( check_str3 )
+ if check_str4:
+ self.check_page_for_string( check_str4 )
def auto_detect_metadata( self, hda_id ):
"""Auto-detect history_dataset_association metadata"""
self.home()
@@ -2050,3 +2060,9 @@
else:
break
self.assertNotEqual(count, maxiter)
+
+ # Tests associated with tags
+ def add_tag( self, item_id, item_class, context, new_tag, check_str='' ):
+ self.visit_url( "%s/tag/add_tag_async?item_id=%s&item_class=%s&context=%s&new_tag=%s" % \
+ ( self.url, item_id, item_class, context, new_tag ) )
+
\ No newline at end of file
diff -r 4fb48981bdb0 -r e3167368345a test/functional/test_tags.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test/functional/test_tags.py Tue Apr 13 15:35:32 2010 -0400
@@ -0,0 +1,63 @@
+from base.twilltestcase import *
+from base.test_db_util import *
+
+class TestTags( TwillTestCase ):
+ # TODO: Add more functional test coverage for tags
+ def test_000_initiate_users( self ):
+ """Ensuring all required user accounts exist"""
+ self.logout()
+ self.login( email='test1(a)bx.psu.edu', username='regular-user1' )
+ global regular_user1
+ regular_user1 = get_user( 'test1(a)bx.psu.edu' )
+ assert regular_user1 is not None, 'Problem retrieving user with email "test1(a)bx.psu.edu" from the database'
+ global regular_user1_private_role
+ regular_user1_private_role = get_private_role( regular_user1 )
+ self.logout()
+ self.login( email='test2(a)bx.psu.edu', username='regular-user2' )
+ global regular_user2
+ regular_user2 = get_user( 'test2(a)bx.psu.edu' )
+ assert regular_user2 is not None, 'Problem retrieving user with email "test2(a)bx.psu.edu" from the database'
+ global regular_user2_private_role
+ regular_user2_private_role = get_private_role( regular_user2 )
+ self.logout()
+ self.login( email='test3(a)bx.psu.edu', username='regular-user3' )
+ global regular_user3
+ regular_user3 = get_user( 'test3(a)bx.psu.edu' )
+ assert regular_user3 is not None, 'Problem retrieving user with email "test3(a)bx.psu.edu" from the database'
+ global regular_user3_private_role
+ regular_user3_private_role = get_private_role( regular_user3 )
+ self.logout()
+ self.login( email='test(a)bx.psu.edu', username='admin-user' )
+ global admin_user
+ admin_user = get_user( 'test(a)bx.psu.edu' )
+ assert admin_user is not None, 'Problem retrieving user with email "test(a)bx.psu.edu" from the database'
+ global admin_user_private_role
+ admin_user_private_role = get_private_role( admin_user )
+ def test_005_add_tag_to_history( self ):
+ """Testing adding a tag to a history"""
+ # Logged in as admin_user
+ # Create a new, empty history named anonymous
+ name = 'anonymous'
+ self.new_history( name=name )
+ global history1
+ history1 = get_latest_history_for_user( admin_user )
+ self.add_tag( self.security.encode_id( history1.id ),
+ 'History',
+ 'history.mako',
+ 'hello' )
+ self.check_history_for_string( 'tags : {"hello"' )
+ def test_010_add_tag_to_history_item( self ):
+ """Testing adding a tag to a history item"""
+ # Logged in as admin_user
+ self.upload_file( '1.bed' )
+ latest_hda = get_latest_hda()
+ self.add_tag( self.security.encode_id( latest_hda.id ),
+ 'HistoryDatasetAssociation',
+ 'edit_attributes.mako',
+ 'goodbye' )
+ self.check_hda_attribute_info( 'tags : {"goodbye"' )
+ def test_999_reset_data_for_later_test_runs( self ):
+ """Reseting data to enable later test runs to to be valid"""
+ # logged in as admin_user
+ # Delete histories
+ self.delete_history( id=self.security.encode_id( history1.id ) )
1
0
details: http://www.bx.psu.edu/hg/galaxy/rev/4fb48981bdb0
changeset: 3637:4fb48981bdb0
user: jeremy goecks <jeremy.goecks(a)emory.edu>
date: Tue Apr 13 13:14:59 2010 -0400
description:
Pack scripts.
diffstat:
static/scripts/packed/galaxy.base.js | 2 +-
1 files changed, 1 insertions(+), 1 deletions(-)
diffs (8 lines):
diff -r 9678e1225651 -r 4fb48981bdb0 static/scripts/packed/galaxy.base.js
--- a/static/scripts/packed/galaxy.base.js Tue Apr 13 12:11:17 2010 -0400
+++ b/static/scripts/packed/galaxy.base.js Tue Apr 13 13:14:59 2010 -0400
@@ -1,1 +1,1 @@
-$(document).ready(function(){replace_big_select_inputs()});$.fn.makeAbsolute=function(a){return this.each(function(){var b=$(this);var c=b.position();b.css({position:"absolute",marginLeft:0,marginTop:0,top:c.top,left:c.left,right:$(window).width()-(c.left+b.width())});if(a){b.remove().appendTo("body")}})};function ensure_popup_helper(){if($("#popup-helper").length===0){$("<div id='popup-helper'/>").css({background:"white",opacity:0,zIndex:15000,position:"absolute",top:0,left:0,width:"100%",height:"100%"}).appendTo("body").hide()}}function attach_popupmenu(b,d){var a=function(){d.unbind().hide();$("#popup-helper").unbind("click.popupmenu").hide()};var c=function(g){$("#popup-helper").bind("click.popupmenu",a).show();d.click(a).css({left:0,top:-1000}).show();var f=g.pageX-d.width()/2;f=Math.min(f,$(document).scrollLeft()+$(window).width()-$(d).width()-20);f=Math.max(f,$(document).scrollLeft()+20);d.css({top:g.pageY-5,left:f});return false};$(b).click(c)}function make_popupmen!
u(c,b){ensure_popup_helper();var a=$("<ul id='"+c.attr("id")+"-menu'></ul>");$.each(b,function(f,e){if(e){$("<li/>").html(f).click(e).appendTo(a)}else{$("<li class='head'/>").html(f).appendTo(a)}});var d=$("<div class='popmenu-wrapper'>");d.append(a).append("<div class='overlay-border'>").css("position","absolute").appendTo("body").hide();attach_popupmenu(c,d)}function make_popup_menus(){jQuery("div[popupmenu]").each(function(){var c={};$(this).find("a").each(function(){var b=$(this).attr("confirm"),d=$(this).attr("href"),e=$(this).attr("target");c[$(this).text()]=function(){if(!b||confirm(b)){var g=window;if(e=="_parent"){g=window.parent}else{if(e=="_top"){g=window.top}}g.location=d}}});var a=$("#"+$(this).attr("popupmenu"));make_popupmenu(a,c);$(this).remove();a.addClass("popup").show()})}function array_length(b){if(b.length){return b.length}var c=0;for(var a in b){c++}return c}function replace_big_select_inputs(){$("select[name=dbkey]").each(function(){var a=$(this);if(a!
.find("option").length<20){return}var b=a.attr("value");var c=$("<inpu
t type='text' class='text-and-autocomplete-select'></input>");c.attr("size",40);c.attr("name",a.attr("name"));c.click(function(){var g=$(this).attr("value");$(this).attr("value","Loading...");$(this).showAllInCache();$(this).attr("value",g);$(this).select()});var f=[];var e={};a.children("option").each(function(){var h=$(this).text();var g=$(this).attr("value");if(g=="?"){return}f.push(h);e[h]=g;e[g]=g;if(g==b){c.attr("value",h)}});if(c.attr("value")==""){c.attr("value","Click to Search or Select")}var d={selectFirst:false,autoFill:false,mustMatch:false,matchContains:true,max:1000,minChars:0,hideForLessThanMinChars:false};c.autocomplete(f,d);a.replaceWith(c);c.parents("form").submit(function(){var h=c.attr("value");var g=e[h];if(g!==null&&g!==undefined){c.attr("value",g)}else{if(b!=""){c.attr("value",b)}else{c.attr("value","?")}}})})}function async_save_text(d,f,e,a,c,h,i,g,b){if(c===undefined){c=30}if(i===undefined){i=4}$("#"+d).live("click",function(){if($("#renaming-activ!
e").length>0){return}var l=$("#"+f),k=l.text(),j;if(h){j=$("<textarea></textarea>").attr({rows:i,cols:c}).text(k)}else{j=$("<input type='text'></input>").attr({value:k,size:c})}j.attr("id","renaming-active");j.blur(function(){$(this).remove();l.show();if(b){b(j)}});j.keyup(function(n){if(n.keyCode===27){$(this).trigger("blur")}else{if(n.keyCode===13){var m={};m[a]=$(this).val();$(this).trigger("blur");$.ajax({url:e,data:m,error:function(){alert("Text editing for elt "+f+" failed")},success:function(o){l.text(o);if(b){b(j)}}})}}});if(g){g(j)}l.hide();j.insertAfter(l);j.focus();j.select();return})}function init_history_items(c,a){var b=function(){try{var d=$.jStore.store("history_expand_state");if(d){for(var f in d){$("#"+f+" div.historyItemBody").show()}}}catch(e){$.jStore.remove("history_expand_state")}if($.browser.mozilla){$("div.historyItemBody").each(function(){if(!$(this).is(":visible")){$(this).find("pre.peek").css("overflow","hidden")}})}c.each(function(){var i=this.i!
d;var g=$(this).children("div.historyItemBody");var h=g.find("pre.peek
");$(this).children(".historyItemTitleBar").find(".historyItemTitle").wrap("<a href='#'></a>").click(function(){if(g.is(":visible")){if($.browser.mozilla){h.css("overflow","hidden")}g.slideUp("fast");if(!a){var j=$.jStore.store("history_expand_state");if(j){delete j[i];$.jStore.store("history_expand_state",j)}}}else{g.slideDown("fast",function(){if($.browser.mozilla){h.css("overflow","auto")}});if(!a){var j=$.jStore.store("history_expand_state");if(j===undefined){j={}}j[i]=true;$.jStore.store("history_expand_state",j)}}return false})});$("#top-links > a.toggle").click(function(){var g=$.jStore.store("history_expand_state");if(g===undefined){g={}}$("div.historyItemBody:visible").each(function(){if($.browser.mozilla){$(this).find("pre.peek").css("overflow","hidden")}$(this).slideUp("fast");if(g){delete g[$(this).parent().attr("id")]}});$.jStore.store("history_expand_state",g)}).show()};if(a){b()}else{$.jStore.init("galaxy");$.jStore.engineReady(function(){b()})}}$(document).re!
ady(function(){$("a[confirm]").click(function(){return confirm($(this).attr("confirm"))});if($.fn.tipsy){$(".tooltip").tipsy({gravity:"s"})}make_popup_menus()});
\ No newline at end of file
+$(document).ready(function(){replace_big_select_inputs()});$.fn.makeAbsolute=function(a){return this.each(function(){var b=$(this);var c=b.position();b.css({position:"absolute",marginLeft:0,marginTop:0,top:c.top,left:c.left,right:$(window).width()-(c.left+b.width())});if(a){b.remove().appendTo("body")}})};function ensure_popup_helper(){if($("#popup-helper").length===0){$("<div id='popup-helper'/>").css({background:"white",opacity:0,zIndex:15000,position:"absolute",top:0,left:0,width:"100%",height:"100%"}).appendTo("body").hide()}}function attach_popupmenu(b,d){var a=function(){d.unbind().hide();$("#popup-helper").unbind("click.popupmenu").hide()};var c=function(g){$("#popup-helper").bind("click.popupmenu",a).show();d.click(a).css({left:0,top:-1000}).show();var f=g.pageX-d.width()/2;f=Math.min(f,$(document).scrollLeft()+$(window).width()-$(d).width()-20);f=Math.max(f,$(document).scrollLeft()+20);d.css({top:g.pageY-5,left:f});return false};$(b).click(c)}function make_popupmen!
u(c,b){ensure_popup_helper();var a=$("<ul id='"+c.attr("id")+"-menu'></ul>");$.each(b,function(f,e){if(e){$("<li/>").html(f).click(e).appendTo(a)}else{$("<li class='head'/>").html(f).appendTo(a)}});var d=$("<div class='popmenu-wrapper'>");d.append(a).append("<div class='overlay-border'>").css("position","absolute").appendTo("body").hide();attach_popupmenu(c,d)}function make_popup_menus(){jQuery("div[popupmenu]").each(function(){var c={};$(this).find("a").each(function(){var b=$(this).attr("confirm"),d=$(this).attr("href"),e=$(this).attr("target");c[$(this).text()]=function(){if(!b||confirm(b)){var g=window;if(e=="_parent"){g=window.parent}else{if(e=="_top"){g=window.top}}g.location=d}}});var a=$("#"+$(this).attr("popupmenu"));make_popupmenu(a,c);$(this).remove();a.addClass("popup").show()})}function array_length(b){if(b.length){return b.length}var c=0;for(var a in b){c++}return c}function replace_big_select_inputs(){$("select[name=dbkey]").each(function(){var a=$(this);if(a!
.find("option").length<20){return}var b=a.attr("value");var c=$("<inpu
t type='text' class='text-and-autocomplete-select'></input>");c.attr("size",40);c.attr("name",a.attr("name"));c.click(function(){var g=$(this).attr("value");$(this).attr("value","Loading...");$(this).showAllInCache();$(this).attr("value",g);$(this).select()});var f=[];var e={};a.children("option").each(function(){var h=$(this).text();var g=$(this).attr("value");if(g=="?"){return}f.push(h);e[h]=g;e[g]=g;if(g==b){c.attr("value",h)}});f.push("unspecified (?)");e["unspecified (?)"]="?";e["?"]="?";if(c.attr("value")==""){c.attr("value","Click to Search or Select")}var d={selectFirst:false,autoFill:false,mustMatch:false,matchContains:true,max:1000,minChars:0,hideForLessThanMinChars:false};c.autocomplete(f,d);a.replaceWith(c);c.parents("form").submit(function(){var h=c.attr("value");var g=e[h];if(g!==null&&g!==undefined){c.attr("value",g)}else{if(b!=""){c.attr("value",b)}else{c.attr("value","?")}}})})}function async_save_text(d,f,e,a,c,h,i,g,b){if(c===undefined){c=30}if(i===undefin!
ed){i=4}$("#"+d).live("click",function(){if($("#renaming-active").length>0){return}var l=$("#"+f),k=l.text(),j;if(h){j=$("<textarea></textarea>").attr({rows:i,cols:c}).text(k)}else{j=$("<input type='text'></input>").attr({value:k,size:c})}j.attr("id","renaming-active");j.blur(function(){$(this).remove();l.show();if(b){b(j)}});j.keyup(function(n){if(n.keyCode===27){$(this).trigger("blur")}else{if(n.keyCode===13){var m={};m[a]=$(this).val();$(this).trigger("blur");$.ajax({url:e,data:m,error:function(){alert("Text editing for elt "+f+" failed")},success:function(o){l.text(o);if(b){b(j)}}})}}});if(g){g(j)}l.hide();j.insertAfter(l);j.focus();j.select();return})}function init_history_items(c,a){var b=function(){try{var d=$.jStore.store("history_expand_state");if(d){for(var f in d){$("#"+f+" div.historyItemBody").show()}}}catch(e){$.jStore.remove("history_expand_state")}if($.browser.mozilla){$("div.historyItemBody").each(function(){if(!$(this).is(":visible")){$(this).find("pre.pee!
k").css("overflow","hidden")}})}c.each(function(){var i=this.id;var g=
$(this).children("div.historyItemBody");var h=g.find("pre.peek");$(this).children(".historyItemTitleBar").find(".historyItemTitle").wrap("<a href='#'></a>").click(function(){if(g.is(":visible")){if($.browser.mozilla){h.css("overflow","hidden")}g.slideUp("fast");if(!a){var j=$.jStore.store("history_expand_state");if(j){delete j[i];$.jStore.store("history_expand_state",j)}}}else{g.slideDown("fast",function(){if($.browser.mozilla){h.css("overflow","auto")}});if(!a){var j=$.jStore.store("history_expand_state");if(j===undefined){j={}}j[i]=true;$.jStore.store("history_expand_state",j)}}return false})});$("#top-links > a.toggle").click(function(){var g=$.jStore.store("history_expand_state");if(g===undefined){g={}}$("div.historyItemBody:visible").each(function(){if($.browser.mozilla){$(this).find("pre.peek").css("overflow","hidden")}$(this).slideUp("fast");if(g){delete g[$(this).parent().attr("id")]}});$.jStore.store("history_expand_state",g)}).show()};if(a){b()}else{$.jStore.init("!
galaxy");$.jStore.engineReady(function(){b()})}}$(document).ready(function(){$("a[confirm]").click(function(){return confirm($(this).attr("confirm"))});if($.fn.tipsy){$(".tooltip").tipsy({gravity:"s"})}make_popup_menus()});
\ No newline at end of file
1
0

16 Apr '10
details: http://www.bx.psu.edu/hg/galaxy/rev/9678e1225651
changeset: 3636:9678e1225651
user: Nate Coraor <nate(a)bx.psu.edu>
date: Tue Apr 13 12:11:17 2010 -0400
description:
Allow setting the session cookie path for avoiding conflicts with multiple galaxy servers running behind the same hostname not at the server root
diffstat:
lib/galaxy/config.py | 1 +
lib/galaxy/web/framework/__init__.py | 2 +-
2 files changed, 2 insertions(+), 1 deletions(-)
diffs (23 lines):
diff -r cdd8d520f3c7 -r 9678e1225651 lib/galaxy/config.py
--- a/lib/galaxy/config.py Tue Apr 13 11:23:19 2010 -0400
+++ b/lib/galaxy/config.py Tue Apr 13 12:11:17 2010 -0400
@@ -38,6 +38,7 @@
# Where dataset files are stored
self.file_path = resolve_path( kwargs.get( "file_path", "database/files" ), self.root )
self.new_file_path = resolve_path( kwargs.get( "new_file_path", "database/tmp" ), self.root )
+ self.cookie_path = kwargs.get( "cookie_path", "/" )
# dataset Track files
self.track_store_path = kwargs.get( "track_store_path", "${extra_files_path}/tracks")
self.tool_path = resolve_path( kwargs.get( "tool_path", "tools" ), self.root )
diff -r cdd8d520f3c7 -r 9678e1225651 lib/galaxy/web/framework/__init__.py
--- a/lib/galaxy/web/framework/__init__.py Tue Apr 13 11:23:19 2010 -0400
+++ b/lib/galaxy/web/framework/__init__.py Tue Apr 13 12:11:17 2010 -0400
@@ -410,7 +410,7 @@
"""
Update the session cookie to match the current session.
"""
- self.set_cookie( self.security.encode_session_key( self.galaxy_session.session_key ), name=name )
+ self.set_cookie( self.security.encode_session_key( self.galaxy_session.session_key ), name=name, path=self.app.config.cookie_path )
def handle_user_login( self, user, webapp ):
"""
Login a new user (possibly newly created)
1
0