# HG changeset patch --
Bitbucket.org
# Project galaxy-dist
# URL
http://bitbucket.org/galaxy/galaxy-dist/overview
# User Dan Blankenberg <dan(a)bx.psu.edu>
# Date 1275939260 14400
# Node ID 37f4420ba3fcf9eb0fd097cf812ad91cc2824f99
# Parent 7de1adaf5628e04e2da0d948f03d08cb5ce5d7f3
Bug fix for handling 'spaces to tab' for non-binary composite datatype files.
Add upload tests for testing 'space to tab' when uploading composite and
non-composite datatypes.
--- a/test/functional/test_get_data.py
+++ b/test/functional/test_get_data.py
@@ -4,14 +4,16 @@ from galaxy.model.mapping import context
from base.twilltestcase import TwillTestCase
class UploadData( TwillTestCase ):
- def test_0005_upload_file( self ):
- """Test uploading 1.bed, NOT setting the file
format"""
+ def test_0000_setup_upload_tests( self ):
+ """Configuring upload tests, setting admin_user"""
self.logout()
self.login( email='test(a)bx.psu.edu' )
global admin_user
admin_user = sa_session.query( galaxy.model.User ) \
.filter(
galaxy.model.User.table.c.email=='test(a)bx.psu.edu' ) \
.one()
+ def test_0005_upload_file( self ):
+ """Test uploading 1.bed, NOT setting the file
format"""
history = sa_session.query( galaxy.model.History ) \
.filter( and_( galaxy.model.History.table.c.deleted==False,
galaxy.model.History.table.c.user_id==admin_user.id ) ) \
@@ -25,6 +27,21 @@ class UploadData( TwillTestCase ):
self.verify_dataset_correctness( '1.bed', hid=str( hda.hid ) )
self.check_history_for_string(
"<th>1.Chrom</th><th>2.Start</th><th>3.End</th>"
)
self.delete_history( id=self.security.encode_id( history.id ) )
+ def test_0006_upload_file( self ):
+ """Test uploading 1.bed.spaces, with space to tab selected, NOT
setting the file format"""
+ history = sa_session.query( galaxy.model.History ) \
+ .filter( and_( galaxy.model.History.table.c.deleted==False,
+
galaxy.model.History.table.c.user_id==admin_user.id ) ) \
+ .order_by( desc( galaxy.model.History.table.c.create_time ) )
\
+ .first()
+ self.upload_file( '1.bed.spaces', space_to_tab = True )
+ hda = sa_session.query( galaxy.model.HistoryDatasetAssociation ) \
+ .order_by( desc(
galaxy.model.HistoryDatasetAssociation.table.c.create_time ) ) \
+ .first()
+ assert hda is not None, "Problem retrieving hda from database"
+ self.verify_dataset_correctness( '1.bed', hid=str( hda.hid ) )
+ self.check_history_for_string(
"<th>1.Chrom</th><th>2.Start</th><th>3.End</th>"
)
+ self.delete_history( id=self.security.encode_id( history.id ) )
def test_0010_upload_file( self ):
"""Test uploading 4.bed.gz, manually setting the file
format"""
self.check_history_for_string( 'Your history is empty' )
@@ -174,15 +191,36 @@ class UploadData( TwillTestCase ):
.order_by( desc( galaxy.model.History.table.c.create_time ) )
\
.first()
# lped data types include a ped_file and a map_file ( which is binary )
- self.upload_composite_datatype_file( 'lped',
ped_file='tinywga.ped', map_file='tinywga.map',
base_name='rgenetics' )
+ self.upload_file( None, ftype='lped', metadata = [ {
'name':'base_name', 'value':'rgenetics' } ],
composite_data = [ { 'name':'ped_file',
'value':'tinywga.ped' }, { 'name':'map_file',
'value':'tinywga.map'} ] )
# Get the latest hid for testing
hda = sa_session.query( galaxy.model.HistoryDatasetAssociation ) \
.order_by( desc(
galaxy.model.HistoryDatasetAssociation.table.c.create_time ) ) \
.first()
assert hda is not None, "Problem retrieving hda from database"
# We'll test against the resulting ped file and map file for correctness
- self.verify_composite_datatype_file_content( 'rgenetics.ped', str( hda.id
) )
- self.verify_composite_datatype_file_content( 'rgenetics.map', str( hda.id
) )
+ self.verify_composite_datatype_file_content( 'tinywga.ped', str( hda.id
), base_name = 'rgenetics.ped' )
+ self.verify_composite_datatype_file_content( 'tinywga.map', str( hda.id
), base_name = 'rgenetics.map' )
+ self.check_history_for_string( "rgenetics" )
+ self.delete_history( id=self.security.encode_id( history.id ) )
+ def test_0056_upload_file( self ):
+ """Test uploading lped composite datatype file, manually setting
the file format, and using space to tab on one file (tinywga.ped)"""
+ # Logged in as admin_user
+ self.check_history_for_string( 'Your history is empty' )
+ history = sa_session.query( galaxy.model.History ) \
+ .filter( and_( galaxy.model.History.table.c.deleted==False,
+
galaxy.model.History.table.c.user_id==admin_user.id ) ) \
+ .order_by( desc( galaxy.model.History.table.c.create_time ) )
\
+ .first()
+ # lped data types include a ped_file and a map_file ( which is binary )
+ self.upload_file( None, ftype='lped', metadata = [ {
'name':'base_name', 'value':'rgenetics' } ],
composite_data = [ { 'name':'ped_file',
'value':'tinywga.ped', 'space_to_tab':True }, {
'name':'map_file', 'value':'tinywga.map'} ] )
+ # Get the latest hid for testing
+ hda = sa_session.query( galaxy.model.HistoryDatasetAssociation ) \
+ .order_by( desc(
galaxy.model.HistoryDatasetAssociation.table.c.create_time ) ) \
+ .first()
+ assert hda is not None, "Problem retrieving hda from database"
+ # We'll test against the resulting ped file and map file for correctness
+ self.verify_composite_datatype_file_content( 'tinywga.ped.space_to_tab',
str( hda.id ), base_name = 'rgenetics.ped' )
+ self.verify_composite_datatype_file_content( 'tinywga.map', str( hda.id
), base_name = 'rgenetics.map' )
self.check_history_for_string( "rgenetics" )
self.delete_history( id=self.security.encode_id( history.id ) )
def test_0060_upload_file( self ):
@@ -195,16 +233,16 @@ class UploadData( TwillTestCase ):
.order_by( desc( galaxy.model.History.table.c.create_time ) )
\
.first()
# pbed data types include a bim_file, a bed_file and a fam_file
- self.upload_composite_datatype_file( 'pbed',
bim_file='tinywga.bim', bed_file='tinywga.bed',
fam_file='tinywga.fam', base_name='rgenetics' )
+ self.upload_file( None, ftype='pbed', metadata = [ {
'name':'base_name', 'value':'rgenetics' } ],
composite_data = [ { 'name':'bim_file',
'value':'tinywga.bim' }, { 'name':'bed_file',
'value':'tinywga.bed'}, { 'name':'fam_file',
'value':'tinywga.fam' } ] )
# Get the latest hid for testing
hda = sa_session.query( galaxy.model.HistoryDatasetAssociation ) \
.order_by( desc(
galaxy.model.HistoryDatasetAssociation.table.c.create_time ) ) \
.first()
assert hda is not None, "Problem retrieving hda from database"
# We'll test against the resulting ped file and map file for correctness
- self.verify_composite_datatype_file_content( 'rgenetics.bim', str( hda.id
) )
- self.verify_composite_datatype_file_content( 'rgenetics.bed', str( hda.id
) )
- self.verify_composite_datatype_file_content( 'rgenetics.fam', str( hda.id
) )
+ self.verify_composite_datatype_file_content( 'tinywga.bim', str( hda.id
), base_name = 'rgenetics.bim' )
+ self.verify_composite_datatype_file_content( 'tinywga.bed', str( hda.id
), base_name = 'rgenetics.bed' )
+ self.verify_composite_datatype_file_content( 'tinywga.fam', str( hda.id
), base_name = 'rgenetics.fam' )
self.check_history_for_string( "rgenetics" )
self.delete_history( id=self.security.encode_id( history.id ) )
def test_0065_upload_file( self ):
--- /dev/null
+++ b/test-data/tinywga.ped.space_to_tab
@@ -0,0 +1,40 @@
+101 1 3 2 2 2 2 2 4 2 1 3 3 3 3 3 2 2 3 3 1 2 3 1 2 2 2 2 2 4 3 1 4 4 1 1 2 2 2 2 3 1 3 1 2 1 0 0 2 2 1 3 3 1 1 3
+101 2 0 0 2 1 2 2 4 2 1 3 3 3 3 3 2 2 3 3 1 2 3 1 2 2 2 2 2 4 3 1 4 4 1 1 2 2 2 2 3 1 3 1 2 1 3 3 2 2 1 3 3 1 1 3
+101 3 0 0 1 1 2 2 4 4 1 3 3 3 3 3 2 2 3 3 2 2 1 1 2 2 2 2 4 4 1 1 4 4 1 1 2 2 2 2 1 1 3 1 1 1 1 3 2 4 3 3 1 1 3 3
+105 1 3 2 2 2 2 2 4 2 3 3 3 3 3 1 2 2 3 3 1 1 3 3 2 2 2 2 2 2 3 3 4 4 1 1 2 2 2 2 3 1 1 1 2 1 0 0 2 2 1 1 3 3 1 1
+105 2 0 0 2 1 2 2 4 4 3 3 3 3 3 1 2 2 1 3 1 2 3 1 2 2 4 2 2 4 3 1 4 4 1 1 2 2 2 2 1 1 1 1 1 1 3 3 2 4 1 3 3 1 1 3
+105 3 0 0 1 1 4 2 2 2 3 3 3 3 3 1 2 2 3 3 1 2 3 1 2 2 2 2 2 4 3 1 3 4 1 1 2 2 4 2 3 1 1 1 2 1 3 3 2 4 1 3 3 1 1 3
+112 1 3 2 1 2 4 2 2 2 3 3 1 1 1 1 2 2 3 3 1 2 3 1 2 2 2 2 2 4 3 1 3 4 1 1 2 2 4 2 1 1 1 1 1 1 3 3 4 4 3 3 1 1 3 3
+112 2 0 0 2 1 2 2 2 2 3 3 1 1 1 1 2 2 3 3 1 1 3 3 2 2 2 2 2 2 3 3 4 4 1 1 2 2 2 2 1 1 1 1 1 1 3 3 4 4 3 3 1 1 3 3
+112 3 0 0 1 1 4 2 4 2 3 3 1 3 3 1 2 2 1 3 2 2 1 1 2 2 4 2 4 4 1 1 3 4 1 1 2 2 4 2 1 1 1 1 1 1 3 3 2 4 1 3 3 1 1 3
+117 1 3 2 2 2 2 2 4 2 3 3 3 3 3 3 4 2 1 1 2 2 1 1 2 2 4 2 2 4 3 1 4 4 1 1 2 2 2 2 1 1 1 1 1 1 3 3 2 2 1 1 3 3 1 1
+117 2 0 0 2 1 2 2 4 4 1 3 3 3 3 3 2 2 1 3 2 2 1 1 2 2 4 2 4 4 1 1 4 4 1 1 2 2 2 2 1 1 1 1 1 1 3 3 2 4 1 3 3 1 1 3
+117 3 0 0 1 1 2 2 4 2 3 3 3 3 3 3 4 2 1 1 2 2 1 1 2 2 4 2 2 4 3 1 4 4 1 1 2 2 2 2 1 1 1 1 1 1 3 3 2 4 1 3 3 1 1 3
+12 1 3 2 1 2 2 2 4 4 1 3 3 3 3 3 2 2 3 3 2 2 1 1 2 2 2 2 4 4 1 1 4 4 1 1 2 2 2 2 1 1 3 1 1 1 3 3 2 4 3 3 1 1 3 3
+12 2 0 0 2 1 2 2 4 4 1 3 3 3 3 3 2 2 3 3 2 2 1 1 2 2 2 2 4 4 1 1 4 4 1 1 2 2 2 2 1 1 3 1 1 1 3 3 2 4 3 3 1 1 3 3
+12 3 0 0 1 1 2 2 4 2 1 3 1 3 3 1 2 2 3 3 1 2 3 1 2 2 2 2 2 4 3 1 4 4 1 1 2 2 2 2 1 1 3 1 1 1 3 3 2 4 3 3 1 1 3 3
+13 1 3 2 1 2 4 2 4 2 1 3 3 3 3 1 2 2 3 3 2 2 1 1 2 2 2 2 4 4 1 1 3 4 1 1 2 2 4 2 1 1 3 1 1 1 3 3 2 4 3 3 1 1 3 3
+13 2 0 0 2 1 4 2 2 2 3 3 1 3 1 1 2 2 3 3 1 2 3 1 2 2 2 2 2 4 3 1 3 4 1 1 2 2 4 2 1 1 1 1 1 1 0 0 0 0 3 3 1 1 3 3
+13 3 0 0 1 1 2 2 4 4 1 3 3 3 3 3 2 2 1 3 1 2 3 1 2 2 2 2 2 4 3 1 4 4 3 1 2 2 2 2 1 1 3 1 1 1 3 3 2 2 3 3 1 1 3 3
+1334 1 10 11 1 2 2 2 4 2 3 3 1 3 3 1 2 2 1 3 1 2 3 1 4 2 4 2 2 4 3 1 4 4 1 1 2 2 2 2 1 1 1 1 1 1 3 3 4 4 3 3 1 1 3 3
+1334 10 0 0 1 1 4 2 4 2 3 3 1 3 3 1 2 2 1 3 2 2 1 1 4 2 4 2 4 4 1 1 3 4 1 1 2 2 4 2 3 1 1 1 2 1 3 3 2 4 1 3 3 1 1 3
+1334 11 0 0 2 1 2 2 2 2 3 3 1 1 1 1 2 2 3 3 1 1 3 3 2 2 2 2 2 2 3 3 4 4 1 1 2 2 2 2 1 1 1 1 1 1 3 3 4 4 3 3 1 1 3 3
+1334 12 0 0 1 1 4 2 2 2 3 3 1 3 1 1 2 2 3 3 2 2 1 1 2 2 4 2 4 4 1 1 3 4 1 1 2 2 4 2 3 1 1 1 2 1 3 3 2 4 1 3 3 1 1 3
+1334 13 0 0 2 1 4 2 4 2 3 3 1 3 3 1 4 2 1 3 2 2 1 1 2 2 2 2 2 4 3 1 3 4 1 1 2 2 4 2 1 1 1 1 1 1 3 3 2 4 1 3 3 1 1 3
+1334 2 12 13 2 2 4 4 2 2 3 3 1 3 1 1 2 2 3 3 2 2 1 1 2 2 2 2 4 4 1 1 3 3 1 1 2 2 4 4 3 1 1 1 2 1 3 3 2 4 1 3 3 1 1 3
+1340 1 9 10 1 2 4 2 4 2 3 3 1 3 3 1 2 2 1 3 1 2 3 1 2 2 2 2 2 4 3 1 3 4 3 1 2 2 4 2 1 1 1 1 1 1 3 3 4 4 3 3 1 1 3 3
+1340 10 0 0 2 1 4 2 4 2 3 3 1 3 3 1 2 2 1 3 1 2 3 1 2 2 2 2 2 4 3 1 3 4 3 1 2 2 4 2 1 1 1 1 1 1 3 3 4 4 3 3 1 1 3 3
+1340 11 0 0 1 1 4 2 4 2 3 3 1 3 3 1 4 2 1 3 2 2 1 1 2 2 2 2 2 4 3 1 3 4 1 1 2 2 4 2 1 1 1 1 1 1 3 3 2 4 1 3 3 1 1 3
+1340 12 0 0 2 1 2 2 4 2 3 3 1 3 3 1 2 2 1 3 1 2 3 1 4 2 4 2 2 4 3 1 4 4 1 1 4 2 2 2 1 1 1 1 1 1 3 3 2 4 1 3 3 1 1 3
+1340 2 11 12 2 2 2 2 4 2 3 3 1 3 3 1 4 2 1 3 1 2 3 1 2 2 2 2 2 2 3 3 4 4 1 1 2 2 2 2 1 1 1 1 1 1 3 3 2 4 1 3 3 1 1 3
+1340 9 0 0 1 1 4 4 2 2 3 3 1 1 1 1 2 2 3 3 2 2 1 1 2 2 2 2 4 4 1 1 3 3 1 1 2 2 4 4 1 1 1 1 1 1 3 3 4 4 3 3 1 1 3 3
+1341 1 11 12 1 1 2 2 4 2 3 3 1 3 3 1 2 2 1 3 1 2 3 1 4 2 4 2 2 4 3 1 4 4 1 1 0 0 2 2 1 1 1 1 1 1 3 3 2 2 1 1 3 3 1 1
+1341 11 0 0 1 1 2 2 2 2 3 3 1 1 1 1 2 2 3 3 1 1 3 3 2 2 2 2 2 2 3 3 4 4 1 1 4 2 2 2 1 1 1 1 1 1 3 3 2 4 1 3 3 1 1 3
+1341 12 0 0 2 1 4 2 4 2 3 3 1 3 3 1 2 2 1 3 2 2 1 1 4 2 4 2 4 4 1 1 3 4 1 1 4 2 4 2 3 1 1 1 2 1 3 3 2 2 1 1 3 3 1 1
+1341 13 0 0 1 1 4 2 2 2 3 3 1 1 1 1 2 2 3 3 1 2 3 1 2 2 2 2 2 4 3 1 3 4 1 1 2 2 4 2 1 1 1 1 1 1 3 3 4 4 3 3 1 1 3 3
+1341 14 0 0 2 1 4 2 2 2 3 3 1 1 1 1 2 2 3 3 1 2 3 1 2 2 2 2 2 4 3 1 3 4 1 1 2 2 4 2 1 1 1 1 1 1 3 3 4 4 3 3 1 1 3 3
+1341 2 13 14 2 1 4 2 2 2 3 3 1 1 1 1 2 2 3 3 1 2 3 1 2 2 2 2 2 4 3 1 3 4 1 1 2 2 4 2 1 1 1 1 1 1 3 3 4 4 3 3 1 1 3 3
+1344 1 12 13 1 1 2 2 4 4 3 3 3 3 3 3 4 4 1 1 2 2 1 1 2 2 2 2 2 2 3 3 4 4 1 1 2 2 2 2 1 1 1 1 1 1 3 3 2 2 1 1 3 3 1 1
+1344 12 0 0 1 1 2 2 4 2 3 3 1 3 3 1 4 2 1 3 1 2 3 1 2 2 2 2 2 2 3 3 4 4 1 1 2 2 2 2 1 1 1 1 1 1 3 3 2 4 1 3 3 1 1 3
+1344 13 0 0 2 1 2 2 4 2 3 3 1 3 3 1 4 2 1 3 1 2 3 1 2 2 2 2 2 2 3 3 4 4 1 1 2 2 2 2 1 1 1 1 1 1 3 3 2 4 1 3 3 1 1 3
+1345 12 0 0 1 1 4 2 4 2 3 3 3 3 3 1 2 2 1 3 2 2 1 1 4 2 4 2 4 4 1 1 3 4 1 1 4 2 4 2 3 1 1 1 2 1 3 3 2 2 1 1 3 3 1 1
--- /dev/null
+++ b/test-data/1.bed.spaces
@@ -0,0 +1,65 @@
+chr1 147962192 147962580 CCDS989.1_cds_0_0_chr1_147962193_r 0 -
+chr1 147984545 147984630 CCDS990.1_cds_0_0_chr1_147984546_f 0 +
+chr1 148078400 148078582 CCDS993.1_cds_0_0_chr1_148078401_r 0 -
+chr1 148185136 148185276 CCDS996.1_cds_0_0_chr1_148185137_f 0 +
+chr10 55251623 55253124 CCDS7248.1_cds_0_0_chr10_55251624_r 0 -
+chr11 116124407 116124501 CCDS8374.1_cds_0_0_chr11_116124408_r 0 -
+chr11 116206508 116206563 CCDS8377.1_cds_0_0_chr11_116206509_f 0 +
+chr11 116211733 116212337 CCDS8378.1_cds_0_0_chr11_116211734_r 0 -
+chr11 1812377 1812407 CCDS7726.1_cds_0_0_chr11_1812378_f 0 +
+chr12 38440094 38440321 CCDS8736.1_cds_0_0_chr12_38440095_r 0 -
+chr13 112381694 112381953 CCDS9526.1_cds_0_0_chr13_112381695_f 0 +
+chr14 98710240 98712285 CCDS9949.1_cds_0_0_chr14_98710241_r 0 -
+chr15 41486872 41487060 CCDS10096.1_cds_0_0_chr15_41486873_r 0 -
+chr15 41673708 41673857 CCDS10097.1_cds_0_0_chr15_41673709_f 0 +
+chr15 41679161 41679250 CCDS10098.1_cds_0_0_chr15_41679162_r 0 -
+chr15 41826029 41826196 CCDS10101.1_cds_0_0_chr15_41826030_f 0 +
+chr16 142908 143003 CCDS10397.1_cds_0_0_chr16_142909_f 0 +
+chr16 179963 180135 CCDS10401.1_cds_0_0_chr16_179964_r 0 -
+chr16 244413 244681 CCDS10402.1_cds_0_0_chr16_244414_f 0 +
+chr16 259268 259383 CCDS10403.1_cds_0_0_chr16_259269_r 0 -
+chr18 23786114 23786321 CCDS11891.1_cds_0_0_chr18_23786115_r 0 -
+chr18 59406881 59407046 CCDS11985.1_cds_0_0_chr18_59406882_f 0 +
+chr18 59455932 59456337 CCDS11986.1_cds_0_0_chr18_59455933_r 0 -
+chr18 59600586 59600754 CCDS11988.1_cds_0_0_chr18_59600587_f 0 +
+chr19 59068595 59069564 CCDS12866.1_cds_0_0_chr19_59068596_f 0 +
+chr19 59236026 59236146 CCDS12872.1_cds_0_0_chr19_59236027_r 0 -
+chr19 59297998 59298008 CCDS12877.1_cds_0_0_chr19_59297999_f 0 +
+chr19 59302168 59302288 CCDS12878.1_cds_0_0_chr19_59302169_r 0 -
+chr2 118288583 118288668 CCDS2120.1_cds_0_0_chr2_118288584_f 0 +
+chr2 118394148 118394202 CCDS2121.1_cds_0_0_chr2_118394149_r 0 -
+chr2 220190202 220190242 CCDS2441.1_cds_0_0_chr2_220190203_f 0 +
+chr2 220229609 220230869 CCDS2443.1_cds_0_0_chr2_220229610_r 0 -
+chr20 33330413 33330423 CCDS13249.1_cds_0_0_chr20_33330414_r 0 -
+chr20 33513606 33513792 CCDS13255.1_cds_0_0_chr20_33513607_f 0 +
+chr20 33579500 33579527 CCDS13256.1_cds_0_0_chr20_33579501_r 0 -
+chr20 33593260 33593348 CCDS13257.1_cds_0_0_chr20_33593261_f 0 +
+chr21 32707032 32707192 CCDS13614.1_cds_0_0_chr21_32707033_f 0 +
+chr21 32869641 32870022 CCDS13615.1_cds_0_0_chr21_32869642_r 0 -
+chr21 33321040 33322012 CCDS13620.1_cds_0_0_chr21_33321041_f 0 +
+chr21 33744994 33745040 CCDS13625.1_cds_0_0_chr21_33744995_r 0 -
+chr22 30120223 30120265 CCDS13897.1_cds_0_0_chr22_30120224_f 0 +
+chr22 30160419 30160661 CCDS13898.1_cds_0_0_chr22_30160420_r 0 -
+chr22 30665273 30665360 CCDS13901.1_cds_0_0_chr22_30665274_f 0 +
+chr22 30939054 30939266 CCDS13903.1_cds_0_0_chr22_30939055_r 0 -
+chr5 131424298 131424460 CCDS4149.1_cds_0_0_chr5_131424299_f 0 +
+chr5 131556601 131556672 CCDS4151.1_cds_0_0_chr5_131556602_r 0 -
+chr5 131621326 131621419 CCDS4152.1_cds_0_0_chr5_131621327_f 0 +
+chr5 131847541 131847666 CCDS4155.1_cds_0_0_chr5_131847542_r 0 -
+chr6 108299600 108299744 CCDS5061.1_cds_0_0_chr6_108299601_r 0 -
+chr6 108594662 108594687 CCDS5063.1_cds_0_0_chr6_108594663_f 0 +
+chr6 108640045 108640151 CCDS5064.1_cds_0_0_chr6_108640046_r 0 -
+chr6 108722976 108723115 CCDS5067.1_cds_0_0_chr6_108722977_f 0 +
+chr7 113660517 113660685 CCDS5760.1_cds_0_0_chr7_113660518_f 0 +
+chr7 116512159 116512389 CCDS5771.1_cds_0_0_chr7_116512160_r 0 -
+chr7 116714099 116714152 CCDS5773.1_cds_0_0_chr7_116714100_f 0 +
+chr7 116945541 116945787 CCDS5774.1_cds_0_0_chr7_116945542_r 0 -
+chr8 118881131 118881317 CCDS6324.1_cds_0_0_chr8_118881132_r 0 -
+chr9 128764156 128764189 CCDS6914.1_cds_0_0_chr9_128764157_f 0 +
+chr9 128787519 128789136 CCDS6915.1_cds_0_0_chr9_128787520_r 0 -
+chr9 128882427 128882523 CCDS6917.1_cds_0_0_chr9_128882428_f 0 +
+chr9 128937229 128937445 CCDS6919.1_cds_0_0_chr9_128937230_r 0 -
+chrX 122745047 122745924 CCDS14606.1_cds_0_0_chrX_122745048_f 0 +
+chrX 152648964 152649196 CCDS14733.1_cds_0_0_chrX_152648965_r 0 -
+chrX 152691446 152691471 CCDS14735.1_cds_0_0_chrX_152691447_f 0 +
+chrX 152694029 152694263 CCDS14736.1_cds_0_0_chrX_152694030_r 0 -
--- a/lib/galaxy/datatypes/data.py
+++ b/lib/galaxy/datatypes/data.py
@@ -295,7 +295,7 @@ class Data( object ):
def after_setting_metadata( self, dataset ):
"""This function is called on the dataset after metadata is
set."""
dataset.clear_associated_files( metadata_safe = True )
- def __new_composite_file( self, name, optional = False, mimetype = None, description
= None, substitute_name_with_metadata = None, is_binary = False, space_to_tab = True,
**kwds ):
+ def __new_composite_file( self, name, optional = False, mimetype = None, description
= None, substitute_name_with_metadata = None, is_binary = False, space_to_tab = False,
**kwds ):
kwds[ 'name' ] = name
kwds[ 'optional' ] = optional
kwds[ 'mimetype' ] = mimetype
--- a/test/base/twilltestcase.py
+++ b/test/base/twilltestcase.py
@@ -143,7 +143,7 @@ class TwillTestCase( unittest.TestCase )
filename = os.path.join( *path )
file(filename, 'wt').write(buffer.getvalue())
- def upload_file( self, filename, ftype='auto', dbkey='unspecified
(?)', metadata = None, composite_data = None ):
+ def upload_file( self, filename, ftype='auto', dbkey='unspecified
(?)', space_to_tab = False, metadata = None, composite_data = None ):
"""Uploads a file"""
self.visit_url( "%s/tool_runner?tool_id=upload1" % self.url )
try:
@@ -156,9 +156,11 @@ class TwillTestCase( unittest.TestCase )
for i, composite_file in enumerate( composite_data ):
filename = self.get_filename( composite_file.get( 'value' )
)
tc.formfile( "1", "files_%i|file_data" % i,
filename )
+ tc.fv( "1", "files_%i|space_to_tab" % i,
composite_file.get( 'space_to_tab', False ) )
else:
filename = self.get_filename( filename )
tc.formfile( "1", "file_data", filename )
+ tc.fv( "1", "space_to_tab", space_to_tab )
tc.submit("runtool_btn")
self.home()
except AssertionError, err:
@@ -196,60 +198,6 @@ class TwillTestCase( unittest.TestCase )
# Wait for upload processing to finish (TODO: this should be done in each test
case instead)
self.wait()
- def upload_composite_datatype_file( self, ftype, ped_file='',
map_file='', bim_file='', bed_file='',
-
fped_file='',fphe_file='',pphe_file='',fam_file='',pheno_file='',eset_file='',malist_file='',
- affybatch_file='', dbkey='unspecified (?)',
base_name='rgenetics' ):
- """Tests uploading either of 2 different composite data types (
lped and pbed )"""
- self.visit_url( "%s/tool_runner/index?tool_id=upload1" % self.url )
- # Handle refresh_on_change
- self.refresh_form( "file_type", ftype )
- tc.fv( "1", "dbkey", dbkey )
- tc.fv( "1", "files_metadata|base_name", base_name )
- if ftype == 'lped':
- # lped data types include a ped_file and a map_file
- ped_file = self.get_filename( ped_file )
- tc.formfile( "1", "files_0|file_data", ped_file )
- map_file = self.get_filename( map_file )
- tc.formfile( "1", "files_1|file_data", map_file )
- elif ftype == 'pbed':
- # pbed data types include a bim_file, a bed_file and a fam_file
- bim_file = self.get_filename( bim_file )
- tc.formfile( "1", "files_0|file_data", bim_file )
- bed_file = self.get_filename( bed_file )
- tc.formfile( "1", "files_1|file_data", bed_file )
- fam_file = self.get_filename( fam_file )
- tc.formfile( "1", "files_2|file_data", fam_file )
- elif ftype == 'pphe':
- # pphe data types include a phe_file
- pphe_file = self.get_filename( pphe_file )
- tc.formfile( "1", "files_0|file_data", pphe_file )
- elif ftype == 'fped':
- # fped data types include an fped_file only
- fped_file = self.get_filename( fped_file )
- tc.formfile( "1", "files_0|file_data", fped_file )
- elif ftype == 'eset':
- # eset data types include a eset_file, a pheno_file
- eset_file = self.get_filename( eset_file )
- tc.formfile( "1", "files_0|file_data", eset_file )
- pheno_file = self.get_filename( pheno_file )
- tc.formfile( "1", "files_1|file_data", pheno_file )
- elif ftype == 'affybatch':
- # affybatch data types include an affybatch_file, and a pheno_file
- affybatch_file = self.get_filename( affybatch_file )
- tc.formfile( "1", "files_0|file_data", affybatch_file )
- pheno_file = self.get_filename( pheno_file )
- tc.formfile( "1", "files_1|file_data", pheno_file )
- else:
- raise AssertionError, "Unsupported composite data type (%s) received,
currently only %s data types are supported."\
- % (ftype,','.join(self.composite_extensions))
- tc.submit( "runtool_btn" )
- self.check_page_for_string( 'The following job has been succesfully added to
the queue:' )
- check_str = base_name #'Uploaded Composite Dataset (%s)' % ftype
- self.check_page_for_string( check_str )
- # Wait for upload processing to finish (TODO: this should be done in each test
case instead)
- self.wait()
- self.check_history_for_string( check_str )
-
# Functions associated with histories
def check_history_for_errors( self ):
"""Raises an exception if there are errors in a
history"""
--- a/lib/galaxy/tools/parameters/grouping.py
+++ b/lib/galaxy/tools/parameters/grouping.py
@@ -241,8 +241,8 @@ class UploadDataset( Group ):
name = context.get( 'NAME', None )
info = context.get( 'INFO', None )
warnings = []
- space_to_tab = False
- if context.get( 'space_to_tab', None ) not in ["None",
None]:
+ space_to_tab = False
+ if context.get( 'space_to_tab', None ) not in [ "None",
None, False ]:
space_to_tab = True
file_bunch = get_data_file_filename( data_file, override_name = name,
override_info = info )
if file_bunch.path and url_paste:
@@ -261,7 +261,7 @@ class UploadDataset( Group ):
name = context.get( 'NAME', None )
info = context.get( 'INFO', None )
space_to_tab = False
- if context.get( 'space_to_tab', None ) not in ["None",
None]:
+ if context.get( 'space_to_tab', None ) not in [ "None",
None, False ]:
space_to_tab = True
warnings = []
file_bunch = get_data_file_filename( data_file, override_name = name,
override_info = info )
--- a/tools/data_source/upload.py
+++ b/tools/data_source/upload.py
@@ -299,7 +299,7 @@ def add_composite_file( dataset, json_fi
break
elif dataset.composite_file_paths[value.name] is not None:
if not value.is_binary:
- if value.space_to_tab:
+ if dataset.composite_file_paths[ value.name ].get(
'space_to_tab', value.space_to_tab ):
sniff.convert_newlines_sep2tabs(
dataset.composite_file_paths[ value.name ][ 'path' ] )
else:
sniff.convert_newlines( dataset.composite_file_paths[
value.name ][ 'path' ] )