galaxy-commits
Threads by month
- ----- 2025 -----
- July
- June
- May
- April
- March
- February
- January
- ----- 2024 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2023 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2022 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2021 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2020 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2019 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2018 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2017 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2016 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2015 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2014 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2013 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2012 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2011 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2010 -----
- December
- November
- October
- September
- August
- July
- June
- May
- 15302 discussions

galaxy-dist commit 6d218af7906a: Add history import and export functionality to Galaxy. Options for exporting the current history to a file or importing a history from a file are now listed in the history options menu. Also modified the form builder to support file inputs.
by commits-noreply@bitbucket.org 07 Jun '10
by commits-noreply@bitbucket.org 07 Jun '10
07 Jun '10
# HG changeset patch -- Bitbucket.org
# Project galaxy-dist
# URL http://bitbucket.org/galaxy/galaxy-dist/overview
# User jeremy goecks <jeremy.goecks(a)emory.edu>
# Date 1275581854 14400
# Node ID 6d218af7906af1f9524d02f785885bf1fd508f16
# Parent a12eeed213ff6d8581073ad77914b12dfc26f9bd
Add history import and export functionality to Galaxy. Options for exporting the current history to a file or importing a history from a file are now listed in the history options menu. Also modified the form builder to support file inputs.
--- a/templates/root/index.mako
+++ b/templates/root/index.mako
@@ -35,12 +35,19 @@
"Show structure": function() {
galaxy_main.location = "${h.url_for( controller='history', action='display_structured' )}";
},
+ "Export to File": function() {
+ galaxy_main.location = "${h.url_for( controller='history', action='export_archive' )}";
+ },
"Delete": function()
{
if ( confirm( "Really delete the current history?" ) )
{
galaxy_main.location = "${h.url_for( controller='history', action='delete_current' )}";
}
+ },
+ "Other Actions": null,
+ "Import from File": function() {
+ galaxy_main.location = "${h.url_for( controller='history', action='import_archive' )}";
}
});
});
--- a/templates/form.mako
+++ b/templates/form.mako
@@ -55,7 +55,18 @@
<div class="form" style="margin: 1em"><div class="form-title">${form.title}</div><div class="form-body">
- <form name="${form.name}" action="${form.action}" method="post" >
+ <%
+ has_file_input = False
+ for input in form.inputs:
+ if input.type == 'file':
+ has_file_input = True
+ break
+ %>
+ <form name="${form.name}" action="${form.action}" method="post"
+ %if has_file_input:
+ enctype="multipart/form-data"
+ %endif
+ >
%for input in form.inputs:
<%
cls = "form-row"
--- /dev/null
+++ b/database/export/info.txt
@@ -0,0 +1,1 @@
+This folder contains the data that can be exported.
--- a/lib/galaxy/web/controllers/history.py
+++ b/lib/galaxy/web/controllers/history.py
@@ -6,9 +6,10 @@ from galaxy.model.mapping import desc
from galaxy.model.orm import *
from galaxy.util.json import *
from galaxy.util.sanitize_html import sanitize_html
+from galaxy.tools.actions import upload_common
from galaxy.tags.tag_handler import GalaxyTagHandler
from sqlalchemy.sql.expression import ClauseElement
-import webhelpers, logging, operator
+import webhelpers, logging, operator, tempfile, subprocess, shutil
from datetime import datetime
from cgi import escape
@@ -441,6 +442,226 @@ class HistoryController( BaseController,
self.add_item_annotation( trans, history, new_annotation )
trans.sa_session.flush()
return new_annotation
+
+ @web.expose
+ def import_archive( self, trans, archived_history=None ):
+ """ Import a history. """
+
+ if archived_history is not None:
+ # Import archived history.
+ try:
+ archive_file = archived_history.file
+
+ # Unpack archive in temporary directory.
+ temp_output_dir = tempfile.mkdtemp()
+ cmd = "pax -z -r < %s " % archive_file.name
+ temp_stderr_name = tempfile.NamedTemporaryFile( dir=temp_output_dir ).name
+ temp_stderr = open( temp_stderr_name, 'wb' )
+ proc = subprocess.Popen( args=cmd, shell=True, cwd=temp_output_dir, stderr=temp_stderr.fileno() )
+ returncode = proc.wait()
+ temp_stderr.close()
+
+ # Read history attributes.
+ history_attr_in = open( '%s/%s' % ( temp_output_dir, 'history_attrs.txt'), 'rb' )
+ history_attr_str = ''
+ buffsize = 1048576
+ try:
+ while True:
+ history_attr_str += history_attr_in.read( buffsize )
+ if not history_attr_str or len( history_attr_str ) % buffsize != 0:
+ break
+ except OverflowError:
+ pass
+ history_attrs = from_json_string( history_attr_str )
+
+ # Create history.
+ # TODO: set tags, annotations.
+ new_history = model.History( name='imported: %s' % history_attrs['name'].encode( 'utf-8' ), user=trans.user )
+ trans.sa_session.add( new_history )
+ trans.sa_session.flush()
+ # TODO: Ignore hid_counter for now since it just artificially increases the hid for all the history's HDAs.
+ #new_history.hid_counter = history_attrs['hid_counter']
+ new_history.genome_build = history_attrs['genome_build']
+ trans.sa_session.flush()
+
+ # Read datasets attributes.
+ datasets_attr_in = open( '%s/%s' % ( temp_output_dir, 'datasets_attrs.txt'), 'rb' )
+ datasets_attr_str = ''
+ buffsize = 1048576
+ try:
+ while True:
+ datasets_attr_str += datasets_attr_in.read( buffsize )
+ if not datasets_attr_str or len( datasets_attr_str ) % buffsize != 0:
+ break
+ except OverflowError:
+ pass
+ datasets_attrs = from_json_string( datasets_attr_str )
+
+ # Create datasets.
+ for dataset_attrs in datasets_attrs:
+ metadata = dataset_attrs['metadata']
+
+ # Create dataset and HDA.
+ hda = trans.app.model.HistoryDatasetAssociation( name = dataset_attrs['name'].encode( 'utf-8' ),
+ extension = dataset_attrs['extension'],
+ hid = dataset_attrs['hid'],
+ info = dataset_attrs['info'].encode( 'utf-8' ),
+ blurb = dataset_attrs['blurb'],
+ peek = dataset_attrs['peek'],
+ designation = dataset_attrs['designation'],
+ visible = dataset_attrs['visible'],
+ dbkey = metadata['dbkey'],
+ metadata = metadata,
+ history = new_history,
+ create_dataset = True,
+ sa_session = trans.sa_session )
+ hda.state = hda.states.OK
+ trans.sa_session.add( hda )
+ trans.sa_session.flush()
+ new_history.add_dataset( hda, genome_build = None )
+ permissions = trans.app.security_agent.history_get_default_permissions( new_history )
+ trans.app.security_agent.set_all_dataset_permissions( hda.dataset, permissions )
+ trans.sa_session.flush()
+
+ # Copy dataset data.
+ temp_dataset_name = '%s/datasets/%s' % ( temp_output_dir, dataset_attrs['file_name'] )
+ shutil.copyfile( temp_dataset_name, hda.file_name )
+
+ # TODO: set tags, annotations.
+
+ # Cleanup.
+ if os.path.exists( temp_output_dir ):
+ shutil.rmtree( temp_output_dir )
+
+ return trans.show_ok_message( message="History '%s' has been imported. " % history_attrs['name'] )
+ except Exception, e:
+ return trans.show_error_message( 'Error importing history archive. ' + str( e ) )
+
+
+ return trans.show_form(
+ web.FormBuilder( web.url_for(), "Import a History from an Archive", submit_text="Submit" )
+ .add_input( "file", "Archived History File", "archived_history", value=None, error=None )
+ )
+
+ @web.expose
+ def export_archive( self, trans, id=None ):
+ """ Export a history. """
+
+ # Get history to export.
+ if id:
+ history = self.get_history( trans, id, check_ownership=False, check_accessible=True )
+ else:
+ # Use current history.
+ history = trans.history
+
+ if not history:
+ return trans.show_error_message( "This history does not exist or you cannot export this history." )
+
+ history_export_dir_name = "./database/export"
+ archive_file_name = '%s/%s.tar.gz' % ( history_export_dir_name, trans.security.encode_id( history.id ) )
+ # TODO: for now, always create archive when exporting; this is for debugging purposes.
+ if True:
+ # Condition for only creating an archive when history is newer than archive:
+ #not os.path.exists ( archive_file_name ) or datetime.utcfromtimestamp( os.path.getmtime( archive_file_name ) ) < history.update_time:
+
+ # Create archive and stream back to client.
+
+ # Simple method to convert strings to unicode in utf-8 format. Method should be used for all user input.
+ def unicode_wrangler( a_string ):
+ a_string_type = type ( a_string )
+ if a_string_type is str:
+ return unicode( a_string, 'utf-8' )
+ elif a_string_type is unicode:
+ return a_string.encode( 'utf-8' )
+
+ try:
+ # Use temporary directory for temp output files.
+ temp_output_dir = tempfile.mkdtemp()
+
+ # Write history attributes to file.
+ # TODO: include tags, annotations.
+ history_attrs = {
+ "create_time" : history.create_time.__str__(),
+ "update_time" : history.update_time.__str__(),
+ "name" : unicode_wrangler( history.name ),
+ "hid_counter" : history.hid_counter,
+ "genome_build" : history.genome_build
+ }
+ history_attrs_file_name = tempfile.NamedTemporaryFile( dir=temp_output_dir ).name
+ history_attrs_out = open( history_attrs_file_name, 'w' )
+ history_attrs_out.write( to_json_string( history_attrs ) )
+ history_attrs_out.close()
+ new_name = '%s/%s' % ( temp_output_dir, "history_attrs.txt" )
+ os.rename( history_attrs_file_name, new_name )
+ history_attrs_file_name = new_name
+
+ # Write datasets' attributes to file.
+ # TODO: include tags, annotations.
+ datasets = self.get_history_datasets( trans, history )
+ datasets_attrs = []
+ for dataset in datasets:
+ attribute_dict = {
+ "create_time" : dataset.create_time.__str__(),
+ "update_time" : dataset.update_time.__str__(),
+ "hid" : dataset.hid,
+ "name" : unicode_wrangler( dataset.name ),
+ "info" : unicode_wrangler( dataset.info ),
+ "blurb" : dataset.blurb,
+ "peek" : dataset.peek,
+ "extension" : dataset.extension,
+ "metadata" : dict( dataset.metadata.items() ),
+ "parent_id" : dataset.parent_id,
+ "designation" : dataset.designation,
+ "deleted" : dataset.deleted,
+ "visible" : dataset.visible,
+ "file_name" : dataset.file_name.split('/')[-1]
+ }
+ datasets_attrs.append( attribute_dict )
+ datasets_attrs_file_name = tempfile.NamedTemporaryFile( dir=temp_output_dir ).name
+ datasets_attrs_out = open( datasets_attrs_file_name, 'w' )
+ datasets_attrs_out.write( to_json_string( datasets_attrs ) )
+ datasets_attrs_out.close()
+ new_name = '%s/%s' % ( temp_output_dir, "datasets_attrs.txt" )
+ os.rename( datasets_attrs_file_name, new_name )
+ datasets_attrs_file_name = new_name
+
+ # Write temp file with all files to archive. These files are (a) history attributes file; (b) datasets attributes file; and (c)
+ # datasets files.
+ archive_list_file_name = tempfile.NamedTemporaryFile( dir=temp_output_dir ).name
+ archive_list_out = open( archive_list_file_name, 'w' )
+ archive_list_out.write( '%s\n' % history_attrs_file_name )
+ archive_list_out.write( '%s\n' % datasets_attrs_file_name )
+ for dataset in datasets:
+ archive_list_out.write( '%s\n' % dataset.file_name )
+ archive_list_out.close()
+
+ # Use 'pax' to create compressed tar archive of history's datasets. -s options uses a regular expression to replace path
+ # information.
+ cmd = "pax -w -s ',.*history_attrs.txt,history_attrs.txt,' -s ',.*datasets_attrs.txt,datasets_attrs.txt,' -s ',/.*/,datasets/,' -x tar -z -f %s.tar.gz < %s" % ( trans.security.encode_id( history.id ), archive_list_file_name )
+ temp_stderr_name = tempfile.NamedTemporaryFile( dir=temp_output_dir ).name
+ temp_stderr = open( temp_stderr_name, 'wb' )
+ proc = subprocess.Popen( args=cmd, shell=True, cwd=history_export_dir_name, stderr=temp_stderr.fileno() )
+ returncode = proc.wait()
+ temp_stderr.close()
+
+ # Remove temp directory.
+ if os.path.exists( temp_output_dir ):
+ shutil.rmtree( temp_output_dir )
+
+ except Exception, e:
+ return trans.show_error_message( 'Error creating history archive. ' + str( e ) )
+
+ # Stream archive.
+ archive_file_name = '%s/%s.tar.gz' % ( history_export_dir_name, trans.security.encode_id( history.id ) )
+ if os.path.exists( archive_file_name ):
+ valid_chars = '.,^_-()[]0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ'
+ hname = history.name
+ hname = ''.join(c in valid_chars and c or '_' for c in hname)[0:150]
+ trans.response.headers["Content-Disposition"] = "attachment; filename=Galaxy-History-%s.tar.gz" % ( hname )
+ trans.response.set_content_type( 'application/x-gzip' )
+ return open( archive_file_name )
+ else:
+ return
@web.expose
@web.json
1
0

galaxy-dist commit 1dba26a363ab: Enhancements for the library dataset information page: display all metadata elements, and if in the admin view, display all undeleted history items and other undeleted library datasets that use the current library dataset's disk file.
by commits-noreply@bitbucket.org 07 Jun '10
by commits-noreply@bitbucket.org 07 Jun '10
07 Jun '10
# HG changeset patch -- Bitbucket.org
# Project galaxy-dist
# URL http://bitbucket.org/galaxy/galaxy-dist/overview
# User Greg Von Kuster <greg(a)bx.psu.edu>
# Date 1275503359 14400
# Node ID 1dba26a363abfb488b8021f3c86c75651fce81b0
# Parent ea2658d131b4c54fca14cdf2b5a66fbf128eaf40
Enhancements for the library dataset information page: display all metadata elements, and if in the admin view, display all undeleted history items and other undeleted library datasets that use the current library dataset's disk file.
--- a/templates/library/common/common.mako
+++ b/templates/library/common/common.mako
@@ -11,9 +11,9 @@
can_modify = trans.app.security_agent.can_modify_library_item( current_user_roles, item )
%>
%if widgets:
- <p/>
- <div class="toolForm">
- %if editable and ( cntrller=='library_admin' or trans.app.security_agent.can_modify_library_item( current_user_roles, item ) ):
+ %if editable and ( cntrller=='library_admin' or trans.app.security_agent.can_modify_library_item( current_user_roles, item ) ):
+ <p/>
+ <div class="toolForm"><div class="toolFormTitle">
%if inherited:
Other information <i>- this is an inherited template and is not required to be used with this ${item_type}</i>
@@ -54,20 +54,23 @@
</div></form></div>
- %else:
- <% contents = False %>
- %for i, field in enumerate( widgets ):
- %if field[ 'widget' ].value:
- <%
- contents = True
- break
- %>
- %endif
- %endfor
- %if contents:
- <div class="toolForm">
- <div class="toolFormTitle">Other information about ${item.name}</div>
- <div class="toolFormBody">
+ </div>
+ <p/>
+ %else:
+ <% contents = False %>
+ %for i, field in enumerate( widgets ):
+ %if field[ 'widget' ].value:
+ <%
+ contents = True
+ break
+ %>
+ %endif
+ %endfor
+ %if contents:
+ <p/>
+ <div class="toolForm">
+ <div class="toolFormTitle">Other information about ${item.name}</div>
+ <div class="toolFormBody">
%for i, field in enumerate( widgets ):
%if field[ 'widget' ].value:
<div class="form-row">
@@ -81,9 +84,10 @@
%endif
%endfor
</div>
- %endif
+ </div>
+ <p/>
%endif
- </div>
+ %endif
%endif
</%def>
--- a/lib/galaxy/web/controllers/library_common.py
+++ b/lib/galaxy/web/controllers/library_common.py
@@ -523,6 +523,20 @@ class LibraryCommon( BaseController ):
message=util.sanitize_text( message ),
status='error' ) )
library = trans.sa_session.query( trans.app.model.Library ).get( trans.security.decode_id( library_id ) )
+ if cntrller == 'library_admin':
+ # Get all associated hdas and lddas that use the same disk file.
+ associated_hdas = trans.sa_session.query( trans.model.HistoryDatasetAssociation ) \
+ .filter( and_( trans.model.HistoryDatasetAssociation.deleted == False,
+ trans.model.HistoryDatasetAssociation.dataset_id == ldda.dataset_id ) ) \
+ .all()
+ associated_lddas = trans.sa_session.query( trans.model.LibraryDatasetDatasetAssociation ) \
+ .filter( and_( trans.model.LibraryDatasetDatasetAssociation.deleted == False,
+ trans.model.LibraryDatasetDatasetAssociation.dataset_id == ldda.dataset_id,
+ trans.model.LibraryDatasetDatasetAssociation.id != ldda.id ) ) \
+ .all()
+ else:
+ associated_hdas = []
+ associated_lddas = []
# See if we have any associated templates
widgets = []
info_association, inherited = ldda.get_info_association()
@@ -534,6 +548,8 @@ class LibraryCommon( BaseController ):
use_panels=use_panels,
ldda=ldda,
library=library,
+ associated_hdas=associated_hdas,
+ associated_lddas=associated_lddas,
show_deleted=show_deleted,
widgets=widgets,
current_user_roles=current_user_roles,
@@ -702,15 +718,15 @@ class LibraryCommon( BaseController ):
if error:
status = 'error'
trans.response.send_redirect( web.url_for( controller='library_common',
- action='upload_library_dataset',
- cntrller=cntrller,
- library_id=library_id,
- folder_id=folder_id,
- replace_id=replace_id,
- upload_option=upload_option,
- show_deleted=show_deleted,
- message=util.sanitize_text( message ),
- status='error' ) )
+ action='upload_library_dataset',
+ cntrller=cntrller,
+ library_id=library_id,
+ folder_id=folder_id,
+ replace_id=replace_id,
+ upload_option=upload_option,
+ show_deleted=show_deleted,
+ message=util.sanitize_text( message ),
+ status='error' ) )
else:
# See if we have any inherited templates, but do not inherit contents.
@@ -722,13 +738,13 @@ class LibraryCommon( BaseController ):
template_id = 'None'
widgets = []
created_outputs_dict = trans.webapp.controllers[ 'library_common' ].upload_dataset( trans,
- cntrller=cntrller,
- library_id=library_id,
- folder_id=folder_id,
- template_id=template_id,
- widgets=widgets,
- replace_dataset=replace_dataset,
- **kwd )
+ cntrller=cntrller,
+ library_id=library_id,
+ folder_id=folder_id,
+ template_id=template_id,
+ widgets=widgets,
+ replace_dataset=replace_dataset,
+ **kwd )
if created_outputs_dict:
total_added = len( created_outputs_dict.keys() )
ldda_id_list = [ str( v.id ) for k, v in created_outputs_dict.items() ]
@@ -1094,6 +1110,8 @@ class LibraryCommon( BaseController ):
library = trans.sa_session.query( trans.app.model.Library ).get( trans.security.decode_id( library_id ) )
roles = trans.app.security_agent.get_legitimate_roles( trans, library, cntrller )
return trans.fill_template( "/library/common/upload.mako",
+ action='add_history_datasets_to_library',
+ cntrller=cntrller,
upload_option=upload_option,
library_id=library_id,
folder_id=folder_id,
@@ -1877,9 +1895,24 @@ def activatable_folders_and_lddas( trans
.all()
return folders, lddas
def branch_deleted( folder ):
- # Return True if a folder belongs to a branc that has been deleted
+ # Return True if a folder belongs to a branch that has been deleted
if folder.deleted:
return True
if folder.parent:
return branch_deleted( folder.parent )
return False
+def get_containing_library_from_library_dataset( trans, library_dataset ):
+ """Given a library_dataset, get the containing library"""
+ folder = library_dataset.folder
+ parent = folder
+ while folder.parent:
+ parent = folder.parent
+ # We have parent set to the library's root folder, which has the
+ # same name as the library
+ for library in trans.sa_session.query( trans.model.Library ) \
+ .filter( and_( trans.model.Library.table.c.deleted == False,
+ trans.model.Library.table.c.name == parent.name ) ):
+ if library.root_folder == parent:
+ return library
+ return None
+
--- a/templates/library/common/ldda_info.mako
+++ b/templates/library/common/ldda_info.mako
@@ -3,7 +3,8 @@
<%namespace file="/library/common/common.mako" import="render_template_info" /><%
from galaxy import util
- from galaxy.web.controllers.library_common import branch_deleted
+ from galaxy.web.controllers.library_common import branch_deleted, get_containing_library_from_library_dataset
+ from galaxy.web.framework.helpers import time_ago
if ldda == ldda.library_dataset.library_dataset_dataset_association:
current_version = True
@@ -64,11 +65,13 @@
%endif
</div><div class="toolFormBody">
- <div class="form-row">
- <label>Message:</label>
- <pre>${ldda.message}</pre>
- <div style="clear: both"></div>
- </div>
+ %if ldda.message:
+ <div class="form-row">
+ <label>Message:</label>
+ <pre>${ldda.message}</pre>
+ <div style="clear: both"></div>
+ </div>
+ %endif
<div class="form-row"><label>Uploaded by:</label>
${uploaded_by}
@@ -80,6 +83,11 @@
<div style="clear: both"></div></div><div class="form-row">
+ <label>File size:</label>
+ ${ldda.get_size( nice_size=True )}
+ <div style="clear: both"></div>
+ </div>
+ <div class="form-row"><label>Data type:</label>
${ldda.ext}
<div style="clear: both"></div>
@@ -97,6 +105,20 @@
<div class="form-row"><div>${ldda.blurb}</div></div>
+ %for name, spec in ldda.metadata.spec.items():
+ <div class="form-row">
+ <label>${spec.desc.replace( ' (click box & select)', '' )}:</label>
+ <%
+ metadata_val = ldda.metadata.get( name )
+ if isinstance( metadata_val, trans.model.MetadataFile ):
+ metadata_val = metadata_val.file_name
+ elif isinstance( metadata_val, list ):
+ metadata_val = ', '.join( metadata_val )
+ %>
+ ${metadata_val}
+ <div style="clear: both"></div>
+ </div>
+ %endfor
%if ldda.peek != "no peek":
<div class="form-row"><div id="info${ldda.id}" class="historyItemBody">
@@ -110,6 +132,79 @@
%if widgets:
${render_template_info( cntrller=cntrller, item_type='ldda', library_id=library_id, widgets=widgets, info_association=info_association, inherited=inherited, folder_id=trans.security.encode_id( ldda.library_dataset.folder.id ), ldda_id=trans.security.encode_id( ldda.id ), editable=False )}
%endif
+%if cntrller == 'library_admin':
+ %if associated_hdas:
+ <p/>
+ <b>History items that use this library dataset's disk file</b>
+ <div class="toolForm">
+ <table class="grid">
+ <thead>
+ <tr>
+ <th>History</th>
+ <th>History Item</th>
+ <th>Last Updated</th>
+ <th>User</th>
+ </tr>
+ </thead>
+ %for hda in associated_hdas:
+ <tr>
+ <td><a target="_blank" href="${h.url_for( controller='history', action='view', id=trans.security.encode_id( hda.history.id ) )}">${hda.history.get_display_name()}</a></td>
+ <td>${hda.get_display_name()}</td>
+ <td>${time_ago( hda.update_time )}</td>
+ <td>
+ %if hda.history.user:
+ ${hda.history.user.email}
+ %else:
+ anonymous
+ %endif
+ </td>
+ </tr>
+ %endfor
+ </table>
+ </div>
+ <p/>
+ %endif
+ %if associated_lddas:
+ <p/>
+ <b>Other library datasets that use this library dataset's disk file</b>
+ <div class="toolForm">
+ <table class="grid">
+ <thead>
+ <tr>
+ <th>Library</th>
+ <th>Library Folder</th>
+ <th>Library Dataset</th>
+ <th>Last Updated</th>
+ <th>User</th>
+ </tr>
+ </thead>
+ %for copied_ldda in associated_lddas:
+ <% containing_library = get_containing_library_from_library_dataset( trans, copied_ldda.library_dataset ) %>
+ <tr>
+ <td>
+ %if containing_library:
+ <a href="${h.url_for( controller='library_common', action='browse_library', id=trans.security.encode_id( containing_library.id ), cntrller=cntrller, use_panels=use_panels )}">${containing_library.get_display_name()}</a>
+ %else:
+ error finding library
+ %endif
+ </td>
+ <td>${copied_ldda.library_dataset.folder.get_display_name()}</td>
+ <td>${copied_ldda.get_display_name()}</td>
+ <td>${time_ago( copied_ldda.update_time )}</td>
+ <td>
+ %if copied_ldda.user:
+ ${copied_ldda.user.email}
+ %else:
+ anonymous
+ %endif
+ </td>
+ </tr>
+ %endfor
+ </table>
+ </div>
+ <p/>
+ %endif
+%endif
%if current_version:
<% expired_lddas = [ e_ldda for e_ldda in ldda.library_dataset.expired_datasets ] %>
%if expired_lddas:
1
0

galaxy-dist commit cea1569d9efd: add fly_modencode, worm_modencode, and wormbase tools to main
by commits-noreply@bitbucket.org 01 Jun '10
by commits-noreply@bitbucket.org 01 Jun '10
01 Jun '10
# HG changeset patch -- Bitbucket.org
# Project galaxy-dist
# URL http://bitbucket.org/galaxy/galaxy-dist/overview
# User Nate Coraor <nate(a)bx.psu.edu>
# Date 1275405570 14400
# Node ID cea1569d9efd3f89188f455e9eb101ac3ce48594
# Parent f65470ca24068dfb7e8c4539e2c30f64402224c1
add fly_modencode, worm_modencode, and wormbase tools to main
--- a/tool_conf.xml.main
+++ b/tool_conf.xml.main
@@ -9,6 +9,9 @@
<tool file="data_source/biomart.xml" /><tool file="data_source/gramene_mart.xml" /><tool file="data_source/flymine.xml" />
+ <tool file="data_source/fly_modencode.xml" />
+ <tool file="data_source/worm_modencode.xml" />
+ <tool file="data_source/wormbase.xml" /><tool file="data_source/eupathdb.xml" /><tool file="data_source/encode_db.xml" /><tool file="data_source/epigraph_import.xml" />
1
0

galaxy-dist commit bf51b81ec7b2: Add missing location files for buildbot
by commits-noreply@bitbucket.org 01 Jun '10
by commits-noreply@bitbucket.org 01 Jun '10
01 Jun '10
# HG changeset patch -- Bitbucket.org
# Project galaxy-dist
# URL http://bitbucket.org/galaxy/galaxy-dist/overview
# User Nate Coraor <nate(a)bx.psu.edu>
# Date 1274977347 14400
# Node ID bf51b81ec7b25d05676a408d0d12e773d538ef10
# Parent fbb990ec01fa7ee3bd9b13ea3f610c8463ad64b8
Add missing location files for buildbot
--- a/buildbot_setup.sh
+++ b/buildbot_setup.sh
@@ -25,6 +25,7 @@ case "$OSTYPE" in
esac
LINKS="
+/galaxy/data/location/add_scores.loc
/galaxy/data/location/alignseq.loc
/galaxy/data/annotation_profiler
/galaxy/data/annotation_profiler/annotation_profiler.loc
@@ -35,6 +36,7 @@ LINKS="
/galaxy/data/location/bowtie_indices.loc
/galaxy/data/location/bowtie_indices_color.loc
/galaxy/data/location/bwa_index.loc
+/galaxy/data/location/codingSnps.loc
/galaxy/data/location/encode_datasets.loc
/galaxy/home/universe/encode_feature_partitions
/galaxy/data/location/lastz_seqs.loc
1
0
details: http://www.bx.psu.edu/hg/galaxy/rev/58108ced520b
changeset: 3820:58108ced520b
user: Nate Coraor <nate(a)bx.psu.edu>
date: Mon May 24 16:11:13 2010 -0400
description:
Committed mod_zip code from the wrong repo
diffstat:
lib/galaxy/web/controllers/library_common.py | 14 ++++++++------
1 files changed, 8 insertions(+), 6 deletions(-)
diffs (48 lines):
diff -r d1624544bc55 -r 58108ced520b lib/galaxy/web/controllers/library_common.py
--- a/lib/galaxy/web/controllers/library_common.py Mon May 24 16:00:03 2010 -0400
+++ b/lib/galaxy/web/controllers/library_common.py Mon May 24 16:11:13 2010 -0400
@@ -1267,7 +1267,7 @@
for fname, relpath in self.files.items():
size = os.stat( fname ).st_size
quoted_fname = urllib.quote_plus( fname, '/' )
- rval += '- %i %s%s %s\n' % ( size, self.url_base, quoted_fname, relpath )
+ rval += '- %i %s%s %s\r\n' % ( size, self.url_base, quoted_fname, relpath )
return rval
# Perform an action on a list of library datasets.
params = util.Params( kwd )
@@ -1429,6 +1429,8 @@
message = "Unable to create archive for download, please report this error"
status = 'error'
if not error:
+ lname = trans.sa_session.query( trans.app.model.Library ).get( trans.security.decode_id( library_id ) ).name
+ fname = lname.replace( ' ', '_' ) + '_files'
if action == 'zip':
archive.close()
tmpfh = open( tmpf )
@@ -1443,21 +1445,21 @@
status = 'error'
if not error:
trans.response.set_content_type( "application/x-zip-compressed" )
- trans.response.headers[ "Content-Disposition" ] = "attachment; filename=%s.%s" % (outfname,outext)
+ trans.response.headers[ "Content-Disposition" ] = "attachment; filename=%s.%s" % (fname,outext)
return tmpfh
elif action == 'ngxzip':
- #trans.response.set_content_type( "application/x-zip-compressed" )
- #trans.response.headers[ "Content-Disposition" ] = "attachment; filename=%s.%s" % (outfname,outext)
+ trans.response.set_content_type( "application/x-zip-compressed" )
+ trans.response.headers[ "Content-Disposition" ] = "attachment; filename=%s.%s" % (fname,outext)
trans.response.headers[ "X-Archive-Files" ] = "zip"
return archive
else:
trans.response.set_content_type( "application/x-tar" )
- trans.response.headers[ "Content-Disposition" ] = "attachment; filename=%s.%s" % (outfname,outext)
+ trans.response.headers[ "Content-Disposition" ] = "attachment; filename=%s.%s" % (fname,outext)
archive.wsgi_status = trans.response.wsgi_status()
archive.wsgi_headeritems = trans.response.wsgi_headeritems()
return archive.stream
else: # unknown action
- message = '### unknown action = %s in act_on_multiple_datasets' % action
+ message = '### unknown action = %s in act_on_multiple_datasets' % action
return trans.response.send_redirect( web.url_for( controller='library_common',
action='browse_library',
1
0
details: http://www.bx.psu.edu/hg/galaxy/rev/d1624544bc55
changeset: 3819:d1624544bc55
user: rc
date: Mon May 24 16:00:03 2010 -0400
description:
lims: ui fixes in adding samples
diffstat:
lib/galaxy/web/controllers/requests.py | 63 +++++++++++++++----------
lib/galaxy/web/controllers/requests_admin.py | 63 +++++++++++++++----------
templates/admin/requests/show_request.mako | 52 ++++++++++++++------
templates/requests/show_request.mako | 69 +++++++++++++++++----------
test/base/twilltestcase.py | 6 +-
5 files changed, 159 insertions(+), 94 deletions(-)
diffs (388 lines):
diff -r f7525fb463e0 -r d1624544bc55 lib/galaxy/web/controllers/requests.py
--- a/lib/galaxy/web/controllers/requests.py Mon May 24 15:33:55 2010 -0400
+++ b/lib/galaxy/web/controllers/requests.py Mon May 24 16:00:03 2010 -0400
@@ -288,7 +288,7 @@
sample_copy=self.__copy_sample(current_samples),
details='hide', edit_mode=util.restore_text( params.get( 'edit_mode', 'False' ) ),
message=message, status=status )
- def __library_widgets(self, trans, user, sample_index, libraries, sample=None, **kwd):
+ def __library_widgets(self, trans, user, sample_index, libraries, sample=None, lib_id=None, folder_id=None, **kwd):
'''
This method creates the data library & folder selectbox for creating &
editing samples. First we get a list of all the libraries accessible to
@@ -298,7 +298,8 @@
'''
params = util.Params( kwd )
# data library selectbox
- lib_id = params.get( "sample_%i_library_id" % sample_index, 'none' )
+ if not lib_id:
+ lib_id = params.get( "sample_%i_library_id" % sample_index, 'none' )
selected_lib = None
if sample and lib_id == 'none':
if sample.library:
@@ -317,7 +318,7 @@
lib_widget.add_option('Select one', 'none')
# all the libraries available to the selected user
for lib, hidden_folder_ids in libraries.items():
- if str(lib.id) == lib_id:
+ if str(lib.id) == str(lib_id):
lib_widget.add_option(lib.name, lib.id, selected=True)
selected_lib, selected_hidden_folder_ids = lib, hidden_folder_ids.split(',')
else:
@@ -338,7 +339,10 @@
else:
current_fid = params.get( "sample_%i_folder_id" % sample_index, 'none' )
else:
- current_fid = 'none'
+ if folder_id:
+ current_fid = folder_id
+ else:
+ current_fid = 'none'
# first option
if lib_id == 'none':
folder_widget.add_option('Select one', 'none', selected=True)
@@ -479,29 +483,38 @@
# if the user has selected a sample no. to copy then copy the contents
# of the src sample to the new sample else an empty sample
src_sample_index = int(params.get( 'copy_sample', -1 ))
+ # get the number of new copies of the src sample
+ num_sample_to_copy = int(params.get( 'num_sample_to_copy', 1 ))
if src_sample_index == -1:
- # empty sample
- lib_widget, folder_widget = self.__library_widgets(trans, request.user,
- len(current_samples),
- libraries, None, **kwd)
- current_samples.append(dict(name='Sample_%i' % (len(current_samples)+1),
- barcode='',
- library=None,
- folder=None,
- field_values=['' for field in request.type.sample_form.fields],
- lib_widget=lib_widget,
- folder_widget=folder_widget))
+ for ns in range(num_sample_to_copy):
+ # empty sample
+ lib_widget, folder_widget = self.__library_widgets(trans, request.user,
+ len(current_samples),
+ libraries, None, **kwd)
+ current_samples.append(dict(name='Sample_%i' % (len(current_samples)+1),
+ barcode='',
+ library=None,
+ folder=None,
+ field_values=['' for field in request.type.sample_form.fields],
+ lib_widget=lib_widget,
+ folder_widget=folder_widget))
else:
- lib_widget, folder_widget = self.__library_widgets(trans, request.user,
- len(current_samples),
- libraries, None, **kwd)
- current_samples.append(dict(name=current_samples[src_sample_index]['name']+'_%i' % (len(current_samples)+1),
- barcode='',
- library_id='none',
- folder_id='none',
- field_values=[val for val in current_samples[src_sample_index]['field_values']],
- lib_widget=lib_widget,
- folder_widget=folder_widget))
+ src_library_id = current_samples[src_sample_index]['lib_widget'].get_selected()[1]
+ src_folder_id = current_samples[src_sample_index]['folder_widget'].get_selected()[1]
+ for ns in range(num_sample_to_copy):
+ lib_widget, folder_widget = self.__library_widgets(trans, request.user,
+ len(current_samples),
+ libraries, sample=None,
+ lib_id=src_library_id,
+ folder_id=src_folder_id,
+ **kwd)
+ current_samples.append(dict(name=current_samples[src_sample_index]['name']+'_%i' % (len(current_samples)+1),
+ barcode='',
+ library_id='none',
+ folder_id='none',
+ field_values=[val for val in current_samples[src_sample_index]['field_values']],
+ lib_widget=lib_widget,
+ folder_widget=folder_widget))
return trans.fill_template( '/requests/show_request.mako',
request=request,
request_details=self.request_details(trans, request.id),
diff -r f7525fb463e0 -r d1624544bc55 lib/galaxy/web/controllers/requests_admin.py
--- a/lib/galaxy/web/controllers/requests_admin.py Mon May 24 15:33:55 2010 -0400
+++ b/lib/galaxy/web/controllers/requests_admin.py Mon May 24 16:00:03 2010 -0400
@@ -805,7 +805,7 @@
sample_copy=self.__copy_sample(current_samples),
details='hide', edit_mode=util.restore_text( params.get( 'edit_mode', 'False' ) ),
message=message, status=status )
- def __library_widgets(self, trans, user, sample_index, libraries, sample=None, **kwd):
+ def __library_widgets(self, trans, user, sample_index, libraries, sample=None, lib_id=None, folder_id=None, **kwd):
'''
This method creates the data library & folder selectbox for creating &
editing samples. First we get a list of all the libraries accessible to
@@ -815,7 +815,8 @@
'''
params = util.Params( kwd )
# data library selectbox
- lib_id = params.get( "sample_%i_library_id" % sample_index, 'none' )
+ if not lib_id:
+ lib_id = params.get( "sample_%i_library_id" % sample_index, 'none' )
selected_lib = None
if sample and lib_id == 'none':
if sample.library:
@@ -834,7 +835,7 @@
lib_widget.add_option('Select one', 'none')
# all the libraries available to the selected user
for lib, hidden_folder_ids in libraries.items():
- if str(lib.id) == lib_id:
+ if str(lib.id) == str(lib_id):
lib_widget.add_option(lib.name, lib.id, selected=True)
selected_lib, selected_hidden_folder_ids = lib, hidden_folder_ids.split(',')
else:
@@ -855,7 +856,10 @@
else:
current_fid = params.get( "sample_%i_folder_id" % sample_index, 'none' )
else:
- current_fid = 'none'
+ if folder_id:
+ current_fid = folder_id
+ else:
+ current_fid = 'none'
# first option
if lib_id == 'none':
folder_widget.add_option('Select one', 'none', selected=True)
@@ -995,29 +999,38 @@
# if the user has selected a sample no. to copy then copy the contents
# of the src sample to the new sample else an empty sample
src_sample_index = int(params.get( 'copy_sample', -1 ))
+ # get the number of new copies of the src sample
+ num_sample_to_copy = int(params.get( 'num_sample_to_copy', 1 ))
if src_sample_index == -1:
- # empty sample
- lib_widget, folder_widget = self.__library_widgets(trans, request.user,
- len(current_samples),
- libraries, None, **kwd)
- current_samples.append(dict(name='Sample_%i' % (len(current_samples)+1),
- barcode='',
- library=None,
- folder=None,
- field_values=['' for field in request.type.sample_form.fields],
- lib_widget=lib_widget,
- folder_widget=folder_widget))
+ for ns in range(num_sample_to_copy):
+ # empty sample
+ lib_widget, folder_widget = self.__library_widgets(trans, request.user,
+ len(current_samples),
+ libraries, None, **kwd)
+ current_samples.append(dict(name='Sample_%i' % (len(current_samples)+1),
+ barcode='',
+ library=None,
+ folder=None,
+ field_values=['' for field in request.type.sample_form.fields],
+ lib_widget=lib_widget,
+ folder_widget=folder_widget))
else:
- lib_widget, folder_widget = self.__library_widgets(trans, request.user,
- len(current_samples),
- libraries, None, **kwd)
- current_samples.append(dict(name=current_samples[src_sample_index]['name']+'_%i' % (len(current_samples)+1),
- barcode='',
- library_id='none',
- folder_id='none',
- field_values=[val for val in current_samples[src_sample_index]['field_values']],
- lib_widget=lib_widget,
- folder_widget=folder_widget))
+ src_library_id = current_samples[src_sample_index]['lib_widget'].get_selected()[1]
+ src_folder_id = current_samples[src_sample_index]['folder_widget'].get_selected()[1]
+ for ns in range(num_sample_to_copy):
+ lib_widget, folder_widget = self.__library_widgets(trans, request.user,
+ len(current_samples),
+ libraries, sample=None,
+ lib_id=src_library_id,
+ folder_id=src_folder_id,
+ **kwd)
+ current_samples.append(dict(name=current_samples[src_sample_index]['name']+'_%i' % (len(current_samples)+1),
+ barcode='',
+ library_id='none',
+ folder_id='none',
+ field_values=[val for val in current_samples[src_sample_index]['field_values']],
+ lib_widget=lib_widget,
+ folder_widget=folder_widget))
return trans.fill_template( '/admin/requests/show_request.mako',
request=request,
request_details=self.request_details(trans, request.id),
diff -r f7525fb463e0 -r d1624544bc55 templates/admin/requests/show_request.mako
--- a/templates/admin/requests/show_request.mako Mon May 24 15:33:55 2010 -0400
+++ b/templates/admin/requests/show_request.mako Mon May 24 16:00:03 2010 -0400
@@ -444,8 +444,7 @@
%else:
<label>There are no samples.</label>
%endif
- </div>
-
+ </div>
%if request.samples and request.submitted():
<script type="text/javascript">
// Updater
@@ -458,25 +457,23 @@
<tbody>
<tr>
<div class="form-row">
+
+ %if request.unsubmitted():
+ <td>
+ %if current_samples:
+ <label>Copy </label>
+ <input type="integer" name="num_sample_to_copy" value="1" size="3"/>
+ <label>sample(s) from sample</label>
+ ${sample_copy.get_html()}
+ %endif
+ <input type="submit" name="add_sample_button" value="Add New"/>
+ </td>
+ %endif
<td>
%if current_samples:
<input type="submit" name="edit_samples_button" value="Edit samples"/>
%endif
</td>
- %if request.unsubmitted():
- <td>
- <label>Import from csv file</label>
- <input type="file" name="file_data" />
- <input type="submit" name="import_samples_button" value="Import samples"/>
- </td>
- <td>
- %if current_samples:
- <label>Copy from sample</label>
- ${sample_copy.get_html()}
- %endif
- <input type="submit" name="add_sample_button" value="Add New"/>
- </td>
- %endif
</div>
</tr>
</tbody>
@@ -504,3 +501,26 @@
<input type="hidden" name="request_id" value="${request.id}" />
</form>
</div>
+
+<br/>
+
+%if request.unsubmitted():
+<div class="toolForm">
+ <div class="form-row">
+ <div class="msg_list">
+ <h4 class="msg_head"><u>Import Samples</u></h4>
+ <div class="msg_body">
+ <label>Import from csv file</label>
+ <input type="file" name="file_data" />
+ <input type="submit" name="import_samples_button" value="Import samples"/>
+ <br/>
+ <div class="toolParamHelp" style="clear: both;">
+ The csv file must be in the following format:<br/>
+ SampleName,DataLibrary,DataLibraryFolder,FieldValue1,FieldValue2...
+ </div>
+ </div>
+ </div>
+ </div>
+</div>
+%endif
+
diff -r f7525fb463e0 -r d1624544bc55 templates/requests/show_request.mako
--- a/templates/requests/show_request.mako Mon May 24 15:33:55 2010 -0400
+++ b/templates/requests/show_request.mako Mon May 24 16:00:03 2010 -0400
@@ -364,31 +364,28 @@
%endif
</div>
%if request.unsubmitted() and edit_mode == 'False':
- <table class="grid">
- <tbody>
- <tr>
- <div class="form-row">
- <td>
- %if current_samples:
- <input type="submit" name="edit_samples_button" value="Edit samples"/>
- %endif
- </td>
- <td>
- <label>Import from csv file</label>
- <input type="file" name="file_data" />
- <input type="submit" name="import_samples_button" value="Import samples"/>
- </td>
- <td>
- %if current_samples:
- <label>Copy from sample</label>
- ${sample_copy.get_html()}
- %endif
- <input type="submit" name="add_sample_button" value="Add New"/>
- </td>
- </div>
- </tr>
- </tbody>
- </table>
+ <table class="grid">
+ <tbody>
+ <tr>
+ <div class="form-row">
+ <td>
+ %if current_samples:
+ <label>Copy </label>
+ <input type="integer" name="num_sample_to_copy" value="1" size="3"/>
+ <label>sample(s) from sample</label>
+ ${sample_copy.get_html()}
+ %endif
+ <input type="submit" name="add_sample_button" value="Add New"/>
+ </td>
+ <td>
+ %if current_samples:
+ <input type="submit" name="edit_samples_button" value="Edit samples"/>
+ %endif
+ </td>
+ </div>
+ </tr>
+ </tbody>
+ </table>
%endif
%if request.unsubmitted() and (request.samples or current_samples):
<div class="form-row">
@@ -407,3 +404,25 @@
<input type="hidden" name="request_id" value="${request.id}" />
</form>
</div>
+
+
+<br/>
+%if request.unsubmitted():
+<div class="toolForm">
+ <div class="form-row">
+ <div class="msg_list">
+ <h4 class="msg_head"><u>Import Samples</u></h4>
+ <div class="msg_body">
+ <label>Import from csv file</label>
+ <input type="file" name="file_data" />
+ <input type="submit" name="import_samples_button" value="Import samples"/>
+ <br/>
+ <div class="toolParamHelp" style="clear: both;">
+ The csv file must be in the following format:<br/>
+ SampleName,DataLibrary,DataLibraryFolder,FieldValue1,FieldValue2...
+ </div>
+ </div>
+ </div>
+ </div>
+</div>
+%endif
diff -r f7525fb463e0 -r d1624544bc55 test/base/twilltestcase.py
--- a/test/base/twilltestcase.py Mon May 24 15:33:55 2010 -0400
+++ b/test/base/twilltestcase.py Mon May 24 16:00:03 2010 -0400
@@ -1572,14 +1572,14 @@
self.check_page_for_string( 'There are no samples.' )
# this redundant stmt below is add so that the second form in
# the page gets selected
- tc.fv( "2", "request_id", request_id )
+ tc.fv( "3", "request_id", request_id )
for sample_index, sample in enumerate(samples):
tc.submit( "add_sample_button" )
self.check_page_for_string( 'Sequencing Request "%s"' % request_name )
sample_name, fields = sample
- tc.fv( "2", "sample_%i_name" % sample_index, sample_name )
+ tc.fv( "3", "sample_%i_name" % sample_index, sample_name )
for field_index, field_value in enumerate(fields):
- tc.fv( "2", "sample_%i_field_%i" % ( sample_index, field_index ), field_value )
+ tc.fv( "3", "sample_%i_field_%i" % ( sample_index, field_index ), field_value )
tc.submit( "save_samples_button" )
for sample_name, fields in samples:
self.check_page_for_string( sample_name )
1
0

25 May '10
details: http://www.bx.psu.edu/hg/galaxy/rev/f7525fb463e0
changeset: 3818:f7525fb463e0
user: Dan Blankenberg <dan(a)bx.psu.edu>
date: Mon May 24 15:33:55 2010 -0400
description:
Allow FASTQ Groomer/parser to work on tab-delimited decimal scores.
diffstat:
lib/galaxy_utils/sequence/fastq.py | 11 ++++++++---
test-data/sanger_full_range_as_tab_decimal_sanger.fastqsanger | 8 ++++++++
tools/fastq/fastq_groomer.xml | 11 ++++++++++-
3 files changed, 26 insertions(+), 4 deletions(-)
diffs (72 lines):
diff -r 86fe916dbdb5 -r f7525fb463e0 lib/galaxy_utils/sequence/fastq.py
--- a/lib/galaxy_utils/sequence/fastq.py Mon May 24 15:10:31 2010 -0400
+++ b/lib/galaxy_utils/sequence/fastq.py Mon May 24 15:33:55 2010 -0400
@@ -41,7 +41,12 @@
def convert_color_to_base_space( cls, sequence ):
return cls.color_space_converter.to_base_space( sequence )
def is_ascii_encoded( self ):
- return ' ' not in self.quality #as per fastq definition only decimal quality strings can have spaces in them (and must have a trailing space)
+ #as per fastq definition only decimal quality strings can have spaces (and TABs for our purposes) in them (and must have a trailing space)
+ if ' ' in self.quality:
+ return False
+ if '\t' in self.quality:
+ return False
+ return True
def get_ascii_quality_scores( self ):
if self.is_ascii_encoded():
return list( self.quality )
@@ -49,7 +54,7 @@
quality = self.quality.rstrip() #decimal scores should have a trailing space
if quality:
try:
- return [ chr( int( val ) + self.ascii_min - self.quality_min ) for val in quality.split( ' ' ) ]
+ return [ chr( int( val ) + self.ascii_min - self.quality_min ) for val in quality.split() ]
except ValueError, e:
raise ValueError( 'Error Parsing quality String. ASCII quality strings cannot contain spaces (%s): %s' % ( self.quality, e ) )
else:
@@ -60,7 +65,7 @@
else:
quality = self.quality.rstrip() #decimal scores should have a trailing space
if quality:
- return [ int( val ) for val in quality.split( ' ' ) if val.strip() ]
+ return [ int( val ) for val in quality.split() if val.strip() ]
else:
return []
def convert_read_to_format( self, format, force_quality_encoding = None ):
diff -r 86fe916dbdb5 -r f7525fb463e0 test-data/sanger_full_range_as_tab_decimal_sanger.fastqsanger
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/sanger_full_range_as_tab_decimal_sanger.fastqsanger Mon May 24 15:33:55 2010 -0400
@@ -0,0 +1,8 @@
+@FAKE0001 Original version has PHRED scores from 0 to 93 inclusive (in that order)
+ACGTACGTACGTACGTACGTACGTACGTACGTACGTACGTACGTACGTACGTACGTACGTACGTACGTACGTACGTACGTACGTACGTACGTAC
++
+0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93
+@FAKE0002 Original version has PHRED scores from 93 to 0 inclusive (in that order)
+CATGCATGCATGCATGCATGCATGCATGCATGCATGCATGCATGCATGCATGCATGCATGCATGCATGCATGCATGCATGCATGCATGCATGCA
++
+93 92 91 90 89 88 87 86 85 84 83 82 81 80 79 78 77 76 75 74 73 72 71 70 69 68 67 66 65 64 63 62 61 60 59 58 57 56 55 54 53 52 51 50 49 48 47 46 45 44 43 42 41 40 39 38 37 36 35 34 33 32 31 30 29 28 27 26 25 24 23 22 21 20 19 18 17 16 15 14 13 12 11 10 9 8 7 6 5 4 3 2 1 0
diff -r 86fe916dbdb5 -r f7525fb463e0 tools/fastq/fastq_groomer.xml
--- a/tools/fastq/fastq_groomer.xml Mon May 24 15:10:31 2010 -0400
+++ b/tools/fastq/fastq_groomer.xml Mon May 24 15:33:55 2010 -0400
@@ -1,4 +1,4 @@
-<tool id="fastq_groomer" name="FASTQ Groomer" version="1.0.2">
+<tool id="fastq_groomer" name="FASTQ Groomer" version="1.0.3">
<description>convert between various FASTQ quality formats</description>
<command interpreter="python">fastq_groomer.py '$input_file' '$input_type' '$output_file'
#if str( $options_type['options_type_selector'] ) == 'basic':
@@ -288,6 +288,15 @@
<param name="summarize_input" value="summarize_input" />
<output name="output_file" file="sanger_full_range_as_decimal_sanger.fastqsanger" />
</test>
+ <test>
+ <param name="input_file" value="sanger_full_range_as_tab_decimal_sanger.fastqsanger" ftype="fastq" />
+ <param name="input_type" value="sanger" />
+ <param name="options_type_selector" value="advanced" />
+ <param name="output_type" value="sanger" />
+ <param name="force_quality_encoding" value="ascii" />
+ <param name="summarize_input" value="summarize_input" />
+ <output name="output_file" file="sanger_full_range_original_sanger.fastqsanger" />
+ </test>
<!-- Solexa, range -5 - 62 -->
<test>
<param name="input_file" value="solexa_full_range_as_decimal_solexa.fastqsolexa" ftype="fastq" />
1
0
details: http://www.bx.psu.edu/hg/galaxy/rev/86fe916dbdb5
changeset: 3817:86fe916dbdb5
user: rc
date: Mon May 24 15:10:31 2010 -0400
description:
fixed the status message box layout in grid
diffstat:
templates/grid_base.mako | 7 ++++++-
1 files changed, 6 insertions(+), 1 deletions(-)
diffs (17 lines):
diff -r 8833ebe82ed1 -r 86fe916dbdb5 templates/grid_base.mako
--- a/templates/grid_base.mako Mon May 24 15:08:57 2010 -0400
+++ b/templates/grid_base.mako Mon May 24 15:10:31 2010 -0400
@@ -679,7 +679,12 @@
<tr>
<td width="75%">${self.render_grid_header( grid )}</td>
<td></td>
- <td width="25%" id="grid-message" valign="top">${render_message( message, status )}</td>
+ <td></td>
+ </tr>
+ <tr>
+ <td width="100%" id="grid-message" valign="top">${render_message( message, status )}</td>
+ <td></td>
+ <td></td>
</tr>
</table>
1
0
details: http://www.bx.psu.edu/hg/galaxy/rev/8833ebe82ed1
changeset: 3816:8833ebe82ed1
user: rc
date: Mon May 24 15:08:57 2010 -0400
description:
sff_converter tool functional test
diffstat:
test-data/2.sff | 0
test-data/sff_converter_fasta.dat | 6 ++
test-data/sff_converter_fastq.dat | 12 +++++
test-data/sff_converter_qual.dat | 6 ++
test-data/sff_converter_xml_1.dat | 18 ++++++++
test-data/sff_converter_xml_2.dat | 18 ++++++++
tools/filters/sff_extractor.xml | 83 +++++++++++++++++++++++--------------
7 files changed, 111 insertions(+), 32 deletions(-)
diffs (176 lines):
diff -r cbacbb736899 -r 8833ebe82ed1 test-data/2.sff
Binary file test-data/2.sff has changed
diff -r cbacbb736899 -r 8833ebe82ed1 test-data/sff_converter_fasta.dat
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/sff_converter_fasta.dat Mon May 24 15:08:57 2010 -0400
@@ -0,0 +1,6 @@
+>GGOQ6K301BJT46
+gactACACGTAGTATAAGCTAAATGTTGGAGAGGATGCTGTAATCAACGTTTTGTGTCTCCTTACAGGACACCAGCTGATTTGGAGGTCATCCAATATAAACCTGGCTGGAACTTGCTATGGTTAGGAGAGCTATGAATGGAAGAGATAATTCCATTATTTAATAAGGCTACAGACAAATTAGGACAAGAAGCAGCTATTAGATTATTTAGTGCTAGCACACCTAAATAGAGAGAGATAAATTCATTAGACTTTTTGCTAttttattgacttttggagagacagtatttaaagtaccaaatcccagaggttgcctatgttggtggtgttgcaagctctattattggcgattgcaatctaccttatccataagttctgcttagagattatttgttattaattttcatctacatcaataaatataggacagtaaggattttgtacaaaatagacaatgggataggtaccagaagaggacagaggaattactggtattttgatatagactacactaacgtacgtacgtaggaattttaaccgggg
+>GGOQ6K301AWPXS
+gactACGTACACACTGGAATAAAATGGAGTTTTCATACTAGAGATTATTATATGGGCTATGTAAAAGAACTAGTAGCAGGATCTAGCACACCAGACAGCCTAAGACTGTATATTTTATATAAGCAACCCGTTATGGCATGGGAAATATCGCCCAGGGTTAAAAAATTTTAACAAGGAATGGCCTTTTTGTAAATATGTGGATAAAAAACAGGGTTCATGTGGGATGATATTGAAAAACAAAAGATTTGCGTAGGAGGAGAAATATCACCAGGATGGGGACCTGGAATGGTTGGCATAGCAAATAAAAGCCTTTAGTTGTGGGGAGAGAAAAATCGAGGCAACTCCTGTTATGATTATAAGAGAAGAGATAGATCCCAAAAAATGGTGTGGAGATTGTTGGAATTTAATGTGTCTTAGAAACTCACCTCCAGGAACGTTGTCAAAGACTCGCTATGTTGGCATGTGGACGGaaagactaaatgttggagaggatgctgtatcaacgttttgttgtcgtccttacacggacaccagctgtagttgtctgtagnttttaccggg
+>GGOQ6K301A8J46
+gactACTATACGAGTCTGCCAATCTTCTTCACTCATCCCCTTCAGGAAGAGTGCAGGGTTCTGGGACTCTCCGTATGTGCCTCCTAGGTACAAGAAAATATCCCCTCTCTTCATCCTTTAATAACACTGATCCTTGTCCCCAATACTCTACTCTCATTGGTCCTTTCCAATTTTTGTCTTTTTTGATCTTTATAGTAAATCCACTGACCTTGCAATTTGCTTGGAATTTGAGAAAAATAATCCTGTATTCTTAATGATTCTTGTTGTGTTAATAATTCATATGGGGGCCATTCCCCCTATTCTACCCCTTTGTTTAAAATTAAGGCAATGCAGAGCGAGAGCCAAAGCATTGTCTAAAGATGTTGTTTCAGGCAAAAACTTCTGAATCCAACACTTTAAAGTATTATTggcattttcaaccaaagcttgagtattgagggttgcctggtattccaaatttatgttttattccctatgtaattgtagtaaaccttcctattttttgattttctaaaatttggtccctattgttctgtttgctagttctgtaactattatgagtccaacctgtttagg
diff -r cbacbb736899 -r 8833ebe82ed1 test-data/sff_converter_fastq.dat
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/sff_converter_fastq.dat Mon May 24 15:08:57 2010 -0400
@@ -0,0 +1,12 @@
+@GGOQ6K301BJT46
+gactACACGTAGTATAAGCTAAATGTTGGAGAGGATGCTGTAATCAACGTTTTGTGTCTCCTTACAGGACACCAGCTGATTTGGAGGTCATCCAATATAAACCTGGCTGGAACTTGCTATGGTTAGGAGAGCTATGAATGGAAGAGATAATTCCATTATTTAATAAGGCTACAGACAAATTAGGACAAGAAGCAGCTATTAGATTATTTAGTGCTAGCACACCTAAATAGAGAGAGATAAATTCATTAGACTTTTTGCTAttttattgacttttggagagacagtatttaaagtaccaaatcccagaggttgcctatgttggtggtgttgcaagctctattattggcgattgcaatctaccttatccataagttctgcttagagattatttgttattaattttcatctacatcaataaatataggacagtaaggattttgtacaaaatagacaatgggataggtaccagaagaggacagaggaattactggtattttgatatagactacactaacgtacgtacgtaggaattttaaccgggg
++
+FFFFFFFFFFFFFFFIIIIIHHHHHFFBBBFEDDFFFFFHHGFFFFFFF????FFFFFFDDFFFFF<==<ADDDDDDAC8::DCDFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFDDDFFFFFFFFFFFFFFFFFFFFFFA@@FFFFFFDD?????AAAAAB=??ADAACFFFFFDDDFFFFFFFFFDDDD544??8888000<2449=:<<?@841114?>AAAA?:::A???DBBA:::A?A//...::33,,,,------,,,,;;:89=9:9733...2223//9<688<444<AA::::8::BBAA@@>>>43434<4444449??@AA@BBBA:8:<A998<?4448<<<<244<3338<<>:884433822255,,,3--3,,,,,,,,,3774,,,33043///0,,,,,,,,,,,,111,,,,,),0....0),,,.,,,,,,,143.,,,.---86---20787777870206433,,,,,0,,,,,,,0..,,,...3,,,,-44444489965225666----
+@GGOQ6K301AWPXS
+gactACGTACACACTGGAATAAAATGGAGTTTTCATACTAGAGATTATTATATGGGCTATGTAAAAGAACTAGTAGCAGGATCTAGCACACCAGACAGCCTAAGACTGTATATTTTATATAAGCAACCCGTTATGGCATGGGAAATATCGCCCAGGGTTAAAAAATTTTAACAAGGAATGGCCTTTTTGTAAATATGTGGATAAAAAACAGGGTTCATGTGGGATGATATTGAAAAACAAAAGATTTGCGTAGGAGGAGAAATATCACCAGGATGGGGACCTGGAATGGTTGGCATAGCAAATAAAAGCCTTTAGTTGTGGGGAGAGAAAAATCGAGGCAACTCCTGTTATGATTATAAGAGAAGAGATAGATCCCAAAAAATGGTGTGGAGATTGTTGGAATTTAATGTGTCTTAGAAACTCACCTCCAGGAACGTTGTCAAAGACTCGCTATGTTGGCATGTGGACGGaaagactaaatgttggagaggatgctgtatcaacgttttgttgtcgtccttacacggacaccagctgtagttgtctgtagnttttaccggg
++
+FFFFFFFFFFFFFDDDFFD@3333BAA?95558HFFFFFHIIIIIIIHIIIHH?;;A???940000836?AAABDFFFFFFFFFFFFA@@FFFFFFFFFF===?<?????88111119??????ADCCCCAAAAAAAAB555:::?>CFD==;A<9966,,,,,,,,,,,,=887766=;;22-----<466694?ABAAAD666666D7FFFFFFFFFF666FFFF?;;:A;<<75B....D7333<@A@?888<AA==;;<7@><;;<688<00009<>???<<<===<<897==73...3----:99666:>7996/////-<<11202>??AA=:55448BBB===AAADDDDDAA>>===DB@>9723000,,,,,,300004;;><<:99666666.,,,,/7600,,,9922/33000020.00011742---223380220786646770---0331.00..,,,,,,0,,,,,,,..0144330,,,0.0,,,,,,,,,,,)),,,,,,1..,,,,,,,...,,,,,,.,,,,,,,,,,,,!//,--226::
+@GGOQ6K301A8J46
+gactACTATACGAGTCTGCCAATCTTCTTCACTCATCCCCTTCAGGAAGAGTGCAGGGTTCTGGGACTCTCCGTATGTGCCTCCTAGGTACAAGAAAATATCCCCTCTCTTCATCCTTTAATAACACTGATCCTTGTCCCCAATACTCTACTCTCATTGGTCCTTTCCAATTTTTGTCTTTTTTGATCTTTATAGTAAATCCACTGACCTTGCAATTTGCTTGGAATTTGAGAAAAATAATCCTGTATTCTTAATGATTCTTGTTGTGTTAATAATTCATATGGGGGCCATTCCCCCTATTCTACCCCTTTGTTTAAAATTAAGGCAATGCAGAGCGAGAGCCAAAGCATTGTCTAAAGATGTTGTTTCAGGCAAAAACTTCTGAATCCAACACTTTAAAGTATTATTggcattttcaaccaaagcttgagtattgagggttgcctggtattccaaatttatgttttattccctatgtaattgtagtaaaccttcctattttttgattttctaaaatttggtccctattgttctgtttgctagttctgtaactattatgagtccaacctgtttagg
++
+FFFFFFFFFFFFFFFIIIIIIIIIIIIIIIIIIIIIFFFFIIIIIIIIIIIIFHFFFFFFFF:::DFFFFFFFFFFFFFDDDFFFFAAABBDD=3333=7B::99?7BBBBBA?4400088/0008??BDDDDFFFFDDBBFFFFFFFFFFFFFFFFFFFFFF???DDB@--,,,=/7,,,,,-::==68<BAFFFFFFFDDDFFFFFFFFFDDD???FFFFDBA>999?94/////>34=;;?>?ABBB5666=BBBB?><=<<986....255---3==8,,,,,--357,,,,-8766=<=2444000977757664400554444<<?@@BDDDDDAAA???AAAAA555:???D<??>DD===DD88833553B443238<76654222....,07/166622///345429:<8888022263,,,.1300,,,00,0034413..,,,,-330-,000207..2220202.344.0.0.00027:----8://22--00,,,,,),,,,,,,00-000.0..00,,,0,,,.0430,,,.2022611...143,,,,,,,,,,000000
diff -r cbacbb736899 -r 8833ebe82ed1 test-data/sff_converter_qual.dat
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/sff_converter_qual.dat Mon May 24 15:08:57 2010 -0400
@@ -0,0 +1,6 @@
+>GGOQ6K301BJT46
+37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 40 40 40 40 40 39 39 39 39 39 37 37 33 33 33 37 36 35 35 37 37 37 37 37 39 39 38 37 37 37 37 37 37 37 30 30 30 30 37 37 37 37 37 37 35 35 37 37 37 37 37 27 28 28 27 32 35 35 35 35 35 35 32 34 23 25 25 35 34 35 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 35 35 35 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 32 31 31 37 37 37 37 37 37 35 35 30 30 30 30 30 32 32 32 32 32 33 28 30 30 32 35 32 32 34 37 37 37 37 37 35 35 35 37 37 37 37 37 37 37 37 37 35 35 35 35 20 19 19 30 30 23 23 23 23 15 15 15 27 17 19 19 24 28 25 27 27 30 31 23 19 16 16 16 19 30 29 32 32 32 32 30 25 25 25 32 30 30 30 35 33 33 32 25 25 25 32 30 32 14 14 13 13 13 25 25 18 18 11 11 11 11 12 12 12 12 12 12 11 11 11 11 26 26 25 23 24 28 24 25 24 22 18 18 13 13 13 17 17 17 18 14 14 24 27 21 23 23 27 19 19 19 27 32 32 25 25 25 25 23 25 25 33 33 32 32 31 31 29 29 29 19 18 19 18 19 27 1!
9 19 19 19 19 19 24 30 30 31 32 32 31 33 33 33 32 25 23 25 27 32 24 24 23 27 30 19 19 19 23 27 27 27 27 17 19 19 27 18 18 18 23 27 27 29 25 23 23 19 19 18 18 23 17 17 17 20 20 11 11 11 18 12 12 18 11 11 11 11 11 11 11 11 11 18 22 22 19 11 11 11 18 18 15 19 18 14 14 14 15 11 11 11 11 11 11 11 11 11 11 11 11 16 16 16 11 11 11 11 11 8 11 15 13 13 13 13 15 8 11 11 11 13 11 11 11 11 11 11 11 16 19 18 13 11 11 11 13 12 12 12 23 21 12 12 12 17 15 22 23 22 22 22 22 23 22 15 17 15 21 19 18 18 11 11 11 11 11 15 11 11 11 11 11 11 11 15 13 13 11 11 11 13 13 13 18 11 11 11 11 12 19 19 19 19 19 19 23 24 24 21 20 17 17 20 21 21 21 12 12 12 12
+>GGOQ6K301AWPXS
+37 37 37 37 37 37 37 37 37 37 37 37 37 35 35 35 37 37 35 31 18 18 18 18 33 32 32 30 24 20 20 20 23 39 37 37 37 37 37 39 40 40 40 40 40 40 40 39 40 40 40 39 39 30 26 26 32 30 30 30 24 19 15 15 15 15 23 18 21 30 32 32 32 33 35 37 37 37 37 37 37 37 37 37 37 37 37 32 31 31 37 37 37 37 37 37 37 37 37 37 28 28 28 30 27 30 30 30 30 30 23 23 16 16 16 16 16 24 30 30 30 30 30 30 32 35 34 34 34 34 32 32 32 32 32 32 32 32 33 20 20 20 25 25 25 30 29 34 37 35 28 28 26 32 27 24 24 21 21 11 11 11 11 11 11 11 11 11 11 11 11 28 23 23 22 22 21 21 28 26 26 17 17 12 12 12 12 12 27 19 21 21 21 24 19 30 32 33 32 32 32 35 21 21 21 21 21 21 35 22 37 37 37 37 37 37 37 37 37 37 21 21 21 37 37 37 37 30 26 26 25 32 26 27 27 22 20 33 13 13 13 13 35 22 18 18 18 27 31 32 31 30 23 23 23 27 32 32 28 28 26 26 27 22 31 29 27 26 26 27 21 23 23 27 15 15 15 15 24 27 29 30 30 30 27 27 27 28 28 28 27 27 23 24 22 28 28 22 18 13 13 13 18 12 12 12 12 25 24 24 21 21 21 25 29 22 24 24 21 14 14 14 14 14 12 27 27 16 16 1!
7 15 17 29 30 30 32 32 28 25 20 20 19 19 23 33 33 33 28 28 28 32 32 32 35 35 35 35 35 32 32 29 29 28 28 28 35 33 31 29 24 22 17 18 15 15 15 11 11 11 11 11 11 18 15 15 15 15 19 26 26 29 27 27 25 24 24 21 21 21 21 21 21 13 11 11 11 11 14 22 21 15 15 11 11 11 24 24 17 17 14 18 18 15 15 15 15 17 15 13 15 15 15 16 16 22 19 17 12 12 12 17 17 18 18 23 15 17 17 15 22 23 21 21 19 21 22 22 15 12 12 12 15 18 18 16 13 15 15 13 13 11 11 11 11 11 11 15 11 11 11 11 11 11 11 13 13 15 16 19 19 18 18 15 11 11 11 15 13 15 11 11 11 11 11 11 11 11 11 11 11 8 8 11 11 11 11 11 11 16 13 13 11 11 11 11 11 11 11 13 13 13 11 11 11 11 11 11 13 11 11 11 11 11 11 11 11 11 11 11 11 0 14 14 11 12 12 17 17 21 25 25
+>GGOQ6K301A8J46
+37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 40 40 40 40 40 40 40 40 40 40 40 40 40 40 40 40 40 40 40 40 40 37 37 37 37 40 40 40 40 40 40 40 40 40 40 40 40 37 39 37 37 37 37 37 37 37 37 25 25 25 35 37 37 37 37 37 37 37 37 37 37 37 37 37 35 35 35 37 37 37 37 32 32 32 33 33 35 35 28 18 18 18 18 28 22 33 25 25 24 24 30 22 33 33 33 33 33 32 30 19 19 15 15 15 23 23 14 15 15 15 23 30 30 33 35 35 35 35 37 37 37 37 35 35 33 33 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 30 30 30 35 35 33 31 12 12 11 11 11 28 14 22 11 11 11 11 11 12 25 25 28 28 21 23 27 33 32 37 37 37 37 37 37 37 35 35 35 37 37 37 37 37 37 37 37 37 35 35 35 30 30 30 37 37 37 37 35 33 32 29 24 24 24 30 24 19 14 14 14 14 14 29 18 19 28 26 26 30 29 30 32 33 33 33 20 21 21 21 28 33 33 33 33 30 29 27 28 27 27 24 23 21 13 13 13 13 17 20 20 12 12 12 18 28 28 23 11 11 11 11 11 12 12 18 20 22 11 11 11 11 12 23 22 21 21 28 27 28 17 19 19 19 15 15 15 24 22 22 22 20 22 21 21 19 19 15 15 20 20 19 19 19 19 2!
7 27 30 31 31 33 35 35 35 35 35 32 32 32 30 30 30 32 32 32 32 32 20 20 20 25 30 30 30 35 27 30 30 29 35 35 28 28 28 35 35 23 23 23 18 18 20 20 18 33 19 19 18 17 18 23 27 22 21 21 20 19 17 17 17 13 13 13 13 11 15 22 14 16 21 21 21 17 17 14 14 14 18 19 20 19 17 24 25 27 23 23 23 23 15 17 17 17 21 18 11 11 11 13 16 18 15 15 11 11 11 15 15 11 15 15 18 19 19 16 18 13 13 11 11 11 11 12 18 18 15 12 11 15 15 15 17 15 22 13 13 17 17 17 15 17 15 17 13 18 19 19 13 15 13 15 13 15 15 15 17 22 25 12 12 12 12 23 25 14 14 17 17 12 12 15 15 11 11 11 11 11 8 11 11 11 11 11 11 11 15 15 12 15 15 15 13 15 13 13 15 15 11 11 11 15 11 11 11 13 15 19 18 15 11 11 11 13 17 15 17 17 21 16 16 13 13 13 16 19 18 11 11 11 11 11 11 11 11 11 11 15 15 15 15 15 15
diff -r cbacbb736899 -r 8833ebe82ed1 test-data/sff_converter_xml_1.dat
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/sff_converter_xml_1.dat Mon May 24 15:08:57 2010 -0400
@@ -0,0 +1,18 @@
+<?xml version="1.0"?>
+<trace_volume>
+ <trace>
+ <trace_name>GGOQ6K301BJT46</trace_name>
+ <clip_vector_left>5</clip_vector_left>
+ <clip_vector_right>260</clip_vector_right>
+ </trace>
+ <trace>
+ <trace_name>GGOQ6K301AWPXS</trace_name>
+ <clip_vector_left>5</clip_vector_left>
+ <clip_vector_right>470</clip_vector_right>
+ </trace>
+ <trace>
+ <trace_name>GGOQ6K301A8J46</trace_name>
+ <clip_vector_left>5</clip_vector_left>
+ <clip_vector_right>408</clip_vector_right>
+ </trace>
+</trace_volume>
diff -r cbacbb736899 -r 8833ebe82ed1 test-data/sff_converter_xml_2.dat
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/sff_converter_xml_2.dat Mon May 24 15:08:57 2010 -0400
@@ -0,0 +1,18 @@
+<?xml version="1.0"?>
+<trace_volume>
+ <trace>
+ <trace_name>GGOQ6K301BJT46</trace_name>
+ <clip_vector_left>5</clip_vector_left>
+ <clip_vector_right>260</clip_vector_right>
+ </trace>
+ <trace>
+ <trace_name>GGOQ6K301AWPXS</trace_name>
+ <clip_vector_left>5</clip_vector_left>
+ <clip_vector_right>470</clip_vector_right>
+ </trace>
+ <trace>
+ <trace_name>GGOQ6K301A8J46</trace_name>
+ <clip_vector_left>5</clip_vector_left>
+ <clip_vector_right>408</clip_vector_right>
+ </trace>
+</trace_volume>
diff -r cbacbb736899 -r 8833ebe82ed1 tools/filters/sff_extractor.xml
--- a/tools/filters/sff_extractor.xml Mon May 24 15:05:55 2010 -0400
+++ b/tools/filters/sff_extractor.xml Mon May 24 15:08:57 2010 -0400
@@ -1,39 +1,58 @@
<tool id="Sff_extractor" name="SFF converter" version="1.0.0">
- <description></description>
- <command interpreter="python">
- #if str($fastq_output) == "fastq_false" #sff_extract.py $clip --seq_file=$out_file3 --qual_file=$out_file4 --xml_file=$out_file2 $input
- #elif str($fastq_output) == "fastq_true" #sff_extract.py $clip --fastq --seq_file=$out_file1 --xml_file=$out_file2 $input
- #end if#
- </command>
- <inputs>
- <param format="sff" name="input" type="data" label="Extract from this dataset"/>
- <param name="clip" type="select" label="Completely remove ends with low qual and/or adaptor sequence">
- <option value="">No</option>
- <option value="--clip">Yes</option>
- </param>
- <param name="fastq_output" type="boolean" truevalue="fastq_true" falsevalue="fastq_false" checked="False" label="Do you want FASTQ file instead of FASTA + FASTA quality file?" />
- </inputs>
- <outputs>
- <data format="fastq" name="out_file1" >
- <filter>fastq_output is True</filter>
- </data>
- <data format="xml" name="out_file2">
- </data>
- <data format="fasta" name="out_file3">
- <filter>fastq_output is False</filter>
- </data>
- <data format="qual" name="out_file4">
- <filter>fastq_output is False</filter>
- </data>
- </outputs>
- <help>
+ <description></description>
+ <command interpreter="python">
+ #if str($fastq_output) == "fastq_false" #sff_extract.py $clip --seq_file=$out_file3 --qual_file=$out_file4 --xml_file=$out_file2 $input
+ #elif str($fastq_output) == "fastq_true" #sff_extract.py $clip --fastq --seq_file=$out_file1 --xml_file=$out_file2 $input
+ #end if#
+ </command>
+ <inputs>
+ <param format="sff" name="input" type="data" label="Extract from this dataset"/>
+ <param name="clip" type="select" label="Completely remove ends with low qual and/or adaptor sequence">
+ <option value="">No</option>
+ <option value="--clip">Yes</option>
+ </param>
+ <param name="fastq_output" type="boolean" truevalue="fastq_true" falsevalue="fastq_false" checked="False" label="Do you want FASTQ file instead of FASTA + FASTA quality file?" />
+ </inputs>
+ <outputs>
+ <data format="fastq" name="out_file1" >
+ <filter>fastq_output is True</filter>
+ </data>
+ <data format="xml" name="out_file2">
+ </data>
+ <data format="fasta" name="out_file3">
+ <filter>fastq_output is False</filter>
+ </data>
+ <data format="qual" name="out_file4">
+ <filter>fastq_output is False</filter>
+ </data>
+ </outputs>
+ <tests>
+ <test>
+ <param name="input" value="2.sff"/>
+ <param name="clip" value=""/>
+ <param name="fastq_output" value="false"/>
+ <output name="out_file2" file="sff_converter_xml_1.dat"/>
+ <output name="out_file3" file="sff_converter_fasta.dat"/>
+ <output name="out_file4" file="sff_converter_qual.dat"/>
+ </test>
+ <test>
+ <param name="input" value="2.sff"/>
+ <param name="clip" value=""/>
+ <param name="fastq_output" value="true"/>
+ <output name="out_file1" file="sff_converter_fastq.dat"/>
+ <output name="out_file2" file="sff_converter_xml_2.dat"/>
+ </test>
+ </tests>
+ <help>
+
**What it does**
This tool extracts data from the 454 Sequencer SFF format and creates three files containing the:
- Sequences (FASTA),
- Qualities (QUAL) and
- Clippings (XML)
- </help>
+Sequences (FASTA),
+Qualities (QUAL) and
+Clippings (XML)
+
+ </help>
</tool>
1
0

25 May '10
details: http://www.bx.psu.edu/hg/galaxy/rev/cbacbb736899
changeset: 3815:cbacbb736899
user: Dan Blankenberg <dan(a)bx.psu.edu>
date: Mon May 24 15:05:55 2010 -0400
description:
Minor cleanup for ensembl display applications and fastq masker tool.
diffstat:
display_applications/ensembl/ensembl_gff.xml | 6 ++----
display_applications/ensembl/ensembl_interval_as_bed.xml | 6 ++----
tool_conf.xml.main | 1 +
tools/fastq/fastq_masker_by_quality.py | 2 +-
4 files changed, 6 insertions(+), 9 deletions(-)
diffs (69 lines):
diff -r 6056caca2503 -r cbacbb736899 display_applications/ensembl/ensembl_gff.xml
--- a/display_applications/ensembl/ensembl_gff.xml Mon May 24 14:50:20 2010 -0400
+++ b/display_applications/ensembl/ensembl_gff.xml Mon May 24 15:05:55 2010 -0400
@@ -18,8 +18,7 @@
<param type="data" name="gff_file" url="galaxy_${DATASET_HASH}.gff" />
<param type="template" name="site_organism" strip="True" >
- #set index = $site_dbkeys.index( $gff_file.dbkey )
- $site_organisms[ $index ]
+ $site_organisms[ $site_dbkeys.index( $gff_file.dbkey ) ]
</param>
<param type="template" name="position" strip="True" >
#set line_count = 0
@@ -82,8 +81,7 @@
<param type="data" name="gff_file" url="galaxy_${DATASET_HASH}.gff" />
<param type="template" name="site_organism" strip="True" >
- #set index = $site_dbkeys.index( $gff_file.dbkey )
- $site_organisms[ $index ]
+ $site_organisms[ $site_dbkeys.index( $gff_file.dbkey ) ]
</param>
<param type="template" name="position" strip="True" >
#set line_count = 0
diff -r 6056caca2503 -r cbacbb736899 display_applications/ensembl/ensembl_interval_as_bed.xml
--- a/display_applications/ensembl/ensembl_interval_as_bed.xml Mon May 24 14:50:20 2010 -0400
+++ b/display_applications/ensembl/ensembl_interval_as_bed.xml Mon May 24 15:05:55 2010 -0400
@@ -18,8 +18,7 @@
<param type="data" name="bed_file" url="galaxy_${DATASET_HASH}.bed" format="bedstrict"/>
<param type="template" name="site_organism" strip="True" >
- #set index = $site_dbkeys.index( $bed_file.dbkey )
- $site_organisms[ $index ]
+ $site_organisms[ $site_dbkeys.index( $bed_file.dbkey ) ]
</param>
<param type="template" name="position" strip="True" >
#set line_count = 0
@@ -82,8 +81,7 @@
<param type="data" name="bed_file" url="galaxy_${DATASET_HASH}.bed" format="bedstrict"/>
<param type="template" name="site_organism" strip="True" >
- #set index = $site_dbkeys.index( $bed_file.dbkey )
- $site_organisms[ $index ]
+ $site_organisms[ $site_dbkeys.index( $bed_file.dbkey ) ]
</param>
<param type="template" name="position" strip="True" >
#set line_count = 0
diff -r 6056caca2503 -r cbacbb736899 tool_conf.xml.main
--- a/tool_conf.xml.main Mon May 24 14:50:20 2010 -0400
+++ b/tool_conf.xml.main Mon May 24 15:05:55 2010 -0400
@@ -298,6 +298,7 @@
<tool file="fastq/fastq_filter.xml" />
<tool file="fastq/fastq_trimmer.xml" />
<tool file="fastq/fastq_trimmer_by_quality.xml" />
+ <tool file="fastq/fastq_masker_by_quality.xml" />
<tool file="fastq/fastq_manipulation.xml" />
<tool file="fastq/fastq_to_fasta.xml" />
<tool file="fastq/fastq_to_tabular.xml" />
diff -r 6056caca2503 -r cbacbb736899 tools/fastq/fastq_masker_by_quality.py
--- a/tools/fastq/fastq_masker_by_quality.py Mon May 24 14:50:20 2010 -0400
+++ b/tools/fastq/fastq_masker_by_quality.py Mon May 24 15:05:55 2010 -0400
@@ -46,7 +46,7 @@
def main():
usage = "usage: %prog [options] input_file output_file"
parser = OptionParser( usage=usage )
- parser.add_option( '-f', '--format', dest='format', type='choice', default='sanger', choices=( 'sanger', 'cssanger', 'solexa', 'illumina' ), help='FASTQ variant type' )
+ parser.add_option( '-f', '--format', dest='format', type='choice', default='sanger', choices=( 'sanger', 'solexa', 'illumina' ), help='FASTQ variant type' )
parser.add_option( '-m', '--mask_character', dest='mask_character', default='N', help='Mask Character to use' )
parser.add_option( '-c', '--score_comparison', type="choice", dest='score_comparison', default='le', choices=('gt','ge','eq','lt', 'le', 'ne' ), help='Mask base when score is' )
parser.add_option( '-s', '--quality_score', type="float", dest='quality_score', default='0', help='Quality Score' )
1
0