details: http://www.bx.psu.edu/hg/galaxy/rev/6d00e4ff7129
changeset: 2439:6d00e4ff7129
user: Dan Blankenberg <dan(a)bx.psu.edu>
date: Tue Jun 09 12:05:46 2009 -0400
description:
Fix for handling the of the error given by upload of html, empty, etc, files
2 file(s) affected in this change:
lib/galaxy/tools/actions/upload.py
test/base/twilltestcase.py
diffs (125 lines):
diff -r 17fd27c7f286 -r 6d00e4ff7129 lib/galaxy/tools/actions/upload.py
--- a/lib/galaxy/tools/actions/upload.py Tue Jun 09 11:01:19 2009 -0400
+++ b/lib/galaxy/tools/actions/upload.py Tue Jun 09 12:05:46 2009 -0400
@@ -126,10 +126,24 @@
return dict( output=data )
def add_file( self, trans, temp_name, file_name, file_type, is_multi_byte, dbkey, info=None, space_to_tab=False, precreated_dataset=None ):
+ def dataset_no_data_error( data, message = 'there was an error uploading your file' ):
+ data.info = "No data: %s." % message
+ data.state = data.states.ERROR
+ if data.extension is None:
+ data.extension = 'data'
+ return data
data_type = None
+
+ if precreated_dataset is not None:
+ data = precreated_dataset
+ else:
+ data = trans.app.model.HistoryDatasetAssociation( history = trans.history, extension = ext, create_dataset = True )
+ trans.app.security_agent.set_all_dataset_permissions( data.dataset, trans.app.security_agent.history_get_default_permissions( trans.history ) )
+
# See if we have an empty file
if not os.path.getsize( temp_name ) > 0:
- raise BadFileException( "you attempted to upload an empty file." )
+ return dataset_no_data_error( data, message = 'you attempted to upload an empty file' )
+ #raise BadFileException( "you attempted to upload an empty file." )
if is_multi_byte:
ext = sniff.guess_ext( temp_name, is_multi_byte=True )
else:
@@ -138,7 +152,8 @@
# we'll decompress on the fly.
is_gzipped, is_valid = self.check_gzip( temp_name )
if is_gzipped and not is_valid:
- raise BadFileException( "you attempted to upload an inappropriate file." )
+ return dataset_no_data_error( data, message = 'you attempted to upload an inappropriate file' )
+ #raise BadFileException( "you attempted to upload an inappropriate file." )
elif is_gzipped and is_valid:
# We need to uncompress the temp_name file
CHUNK_SIZE = 2**20 # 1Mb
@@ -150,7 +165,8 @@
except IOError:
os.close( fd )
os.remove( uncompressed )
- raise BadFileException( 'problem decompressing gzipped data.' )
+ return dataset_no_data_error( data, message = 'problem decompressing gzipped data' )
+ #raise BadFileException( 'problem decompressing gzipped data.' )
if not chunk:
break
os.write( fd, chunk )
@@ -165,16 +181,20 @@
# See if we have a zip archive
is_zipped, is_valid, test_ext = self.check_zip( temp_name )
if is_zipped and not is_valid:
- raise BadFileException( "you attempted to upload an inappropriate file." )
+ return dataset_no_data_error( data, message = 'you attempted to upload an inappropriate file' )
+ #raise BadFileException( "you attempted to upload an inappropriate file." )
elif is_zipped and is_valid:
# Currently, we force specific tools to handle this case. We also require the user
# to manually set the incoming file_type
if ( test_ext == 'ab1' or test_ext == 'scf' ) and file_type != 'binseq.zip':
- raise BadFileException( "Invalid 'File Format' for archive consisting of binary files - use 'Binseq.zip'." )
+ return dataset_no_data_error( data, message = "Invalid 'File Format' for archive consisting of binary files - use 'Binseq.zip'" )
+ #raise BadFileException( "Invalid 'File Format' for archive consisting of binary files - use 'Binseq.zip'." )
elif test_ext == 'txt' and file_type != 'txtseq.zip':
- raise BadFileException( "Invalid 'File Format' for archive consisting of text files - use 'Txtseq.zip'." )
+ return dataset_no_data_error( data, message = "Invalid 'File Format' for archive consisting of text files - use 'Txtseq.zip'" )
+ #raise BadFileException( "Invalid 'File Format' for archive consisting of text files - use 'Txtseq.zip'." )
if not ( file_type == 'binseq.zip' or file_type == 'txtseq.zip' ):
- raise BadFileException( "you must manually set the 'File Format' to either 'Binseq.zip' or 'Txtseq.zip' when uploading zip files." )
+ return dataset_no_data_error( data, message = "you must manually set the 'File Format' to either 'Binseq.zip' or 'Txtseq.zip' when uploading zip files" )
+ #raise BadFileException( "you must manually set the 'File Format' to either 'Binseq.zip' or 'Txtseq.zip' when uploading zip files." )
data_type = 'zip'
ext = file_type
if not data_type:
@@ -183,16 +203,20 @@
if len( parts ) > 1:
ext = parts[1].strip().lower()
if not( ext == 'ab1' or ext == 'scf' ):
- raise BadFileException( "you attempted to upload an inappropriate file." )
+ return dataset_no_data_error( data, message = "you attempted to upload an inappropriate file" )
+ #raise BadFileException( "you attempted to upload an inappropriate file." )
if ext == 'ab1' and file_type != 'ab1':
- raise BadFileException( "you must manually set the 'File Format' to 'Ab1' when uploading ab1 files." )
+ return dataset_no_data_error( data, message = "you must manually set the 'File Format' to 'Ab1' when uploading ab1 files" )
+ #raise BadFileException( "you must manually set the 'File Format' to 'Ab1' when uploading ab1 files." )
elif ext == 'scf' and file_type != 'scf':
- raise BadFileException( "you must manually set the 'File Format' to 'Scf' when uploading scf files." )
+ return dataset_no_data_error( data, message = "you must manually set the 'File Format' to 'Scf' when uploading scf files" )
+ #raise BadFileException( "you must manually set the 'File Format' to 'Scf' when uploading scf files." )
data_type = 'binary'
if not data_type:
# We must have a text file
if trans.app.datatypes_registry.get_datatype_by_extension( file_type ).composite_type != 'auto_primary_file' and self.check_html( temp_name ):
- raise BadFileException( "you attempted to upload an inappropriate file." )
+ return dataset_no_data_error( data, message = "you attempted to upload an inappropriate file" )
+ #raise BadFileException( "you attempted to upload an inappropriate file." )
if data_type != 'binary' and data_type != 'zip':
if space_to_tab:
self.line_count = sniff.convert_newlines_sep2tabs( temp_name )
@@ -205,12 +229,7 @@
data_type = ext
if info is None:
info = 'uploaded %s file' %data_type
- if precreated_dataset is not None:
- data = precreated_dataset
- data.extension = ext
- else:
- data = trans.app.model.HistoryDatasetAssociation( history = trans.history, extension = ext, create_dataset = True )
- trans.app.security_agent.set_all_dataset_permissions( data.dataset, trans.app.security_agent.history_get_default_permissions( trans.history ) )
+ data.extension = ext
data.name = file_name
data.dbkey = dbkey
data.info = info
diff -r 17fd27c7f286 -r 6d00e4ff7129 test/base/twilltestcase.py
--- a/test/base/twilltestcase.py Tue Jun 09 11:01:19 2009 -0400
+++ b/test/base/twilltestcase.py Tue Jun 09 12:05:46 2009 -0400
@@ -552,7 +552,7 @@
for value in kwd[ control.name ]:
if value not in control.value and True not in [ value in item_label for item_label in item_labels ]:
changed = True
- break
+ break
if changed:
# Clear Control and set to proper value
control.clear()
details: http://www.bx.psu.edu/hg/galaxy/rev/c0aa8af62124
changeset: 2441:c0aa8af62124
user: Greg Von Kuster <greg(a)bx.psu.edu>
date: Tue Jun 09 15:03:04 2009 -0400
description:
Only refresh history if necessary ( better fix than my previous commit ), add functional tests to cover behavior when deleing current history, and add better functional tests for sniffing data formats.
3 file(s) affected in this change:
lib/galaxy/web/controllers/history.py
test/functional/test_history_functions.py
test/functional/test_sniffing_and_metadata_settings.py
diffs (288 lines):
diff -r 6a992f466b80 -r c0aa8af62124 lib/galaxy/web/controllers/history.py
--- a/lib/galaxy/web/controllers/history.py Tue Jun 09 12:08:35 2009 -0400
+++ b/lib/galaxy/web/controllers/history.py Tue Jun 09 15:03:04 2009 -0400
@@ -79,6 +79,7 @@
@web.require_login( "work with multiple histories" )
def list( self, trans, **kwargs ):
"""List all available histories"""
+ current_history = trans.history
status = message = None
if 'operation' in kwargs:
operation = kwargs['operation'].lower()
@@ -104,12 +105,14 @@
if histories:
if operation == "switch":
status, message = self._list_switch( trans, histories )
+ # Current history changed, refresh history frame
+ trans.template_context['refresh_frames'] = ['history']
elif operation == "delete":
status, message = self._list_delete( trans, histories )
+ if current_history in histories:
+ trans.template_context['refresh_frames'] = ['history']
elif operation == "undelete":
status, message = self._list_undelete( trans, histories )
- # Current history may have changed, refresh history frame
- trans.template_context['refresh_frames'] = ['history']
trans.sa_session.flush()
# Render the list view
return self.list_grid( trans, status=status, message=message, **kwargs )
diff -r 6a992f466b80 -r c0aa8af62124 test/functional/test_history_functions.py
--- a/test/functional/test_history_functions.py Tue Jun 09 12:08:35 2009 -0400
+++ b/test/functional/test_history_functions.py Tue Jun 09 15:03:04 2009 -0400
@@ -47,6 +47,8 @@
latest_history.refresh()
if not latest_history.deleted:
raise AssertionError, "Problem deleting history id %d" % latest_history.id
+ # Since we deleted the current history, make sure the history frame was refreshed
+ self.check_history_for_string( 'Your history is empty.' )
# We'll now test deleting a list of histories
# After deleting the current history, a new one should have been created
global history1
@@ -60,6 +62,8 @@
self.upload_file( '2.bed', dbkey='hg18' )
ids = '%s,%s' % ( str( history1.id ), str( history2.id ) )
self.delete_history( ids )
+ # Since we deleted the current history, make sure the history frame was refreshed
+ self.check_history_for_string( 'Your history is empty.' )
try:
self.view_stored_active_histories( check_str=history1.name )
raise AssertionError, "History %s is displayed in the active history list after it was deleted" % history1.name
diff -r 6a992f466b80 -r c0aa8af62124 test/functional/test_sniffing_and_metadata_settings.py
--- a/test/functional/test_sniffing_and_metadata_settings.py Tue Jun 09 12:08:35 2009 -0400
+++ b/test/functional/test_sniffing_and_metadata_settings.py Tue Jun 09 15:03:04 2009 -0400
@@ -3,7 +3,7 @@
from base.twilltestcase import TwillTestCase
class SniffingAndMetaDataSettings( TwillTestCase ):
- def test_00_axt_datatype( self ):
+ def test_000_axt_datatype( self ):
"""Testing correctly sniffing axt data type upon upload"""
self.logout()
self.login( email='test(a)bx.psu.edu' )
@@ -15,7 +15,12 @@
self.verify_dataset_correctness( '1.axt' )
self.check_history_for_string( '1.axt format: <span class="axt">axt</span>, database: \? Info: uploaded file' )
self.check_metadata_for_string( 'value="1.axt" value="\?" Change data type selected value="axt" selected="yes"' )
- def test_05_bed_datatype( self ):
+ latest_hda = galaxy.model.HistoryDatasetAssociation.query() \
+ .order_by( desc( galaxy.model.HistoryDatasetAssociation.table.c.create_time ) ).first()
+ assert latest_hda is not None, "Problem retrieving axt hda from the database"
+ if not latest_hda.name == '1.axt' and not latest_hda.extension == 'axt':
+ raise AssertionError, "axt data type was not correctly sniffed."
+ def test_005_bed_datatype( self ):
"""Testing correctly sniffing bed data type upon upload"""
self.upload_file( '1.bed' )
self.verify_dataset_correctness( '1.bed' )
@@ -25,24 +30,64 @@
self.check_metadata_for_string( 'End column: <option value="3" selected> Strand column <option value="6" selected>' )
self.check_metadata_for_string( 'Convert to new format <option value="bed">Genomic Intervals To BED <option value="gff">BED to GFF' )
self.check_metadata_for_string( 'Change data type selected value="bed" selected="yes"' )
- def test_10_customtrack_datatype( self ):
+ latest_hda = galaxy.model.HistoryDatasetAssociation.query() \
+ .order_by( desc( galaxy.model.HistoryDatasetAssociation.table.c.create_time ) ).first()
+ assert latest_hda is not None, "Problem retrieving bed hda from the database"
+ if not latest_hda.name == '1.bed' and not latest_hda.extension == 'bed':
+ raise AssertionError, "bed data type was not correctly sniffed."
+ def test_010_blastxml_datatype( self ):
+ """Testing correctly sniffing blastxml data type upon upload"""
+ self.upload_file( 'megablast_xml_parser_test1.gz' )
+ self.check_history_for_string( 'NCBI Blast XML data format: <span class="blastxml">blastxml</span>' )
+ latest_hda = galaxy.model.HistoryDatasetAssociation.query() \
+ .order_by( desc( galaxy.model.HistoryDatasetAssociation.table.c.create_time ) ).first()
+ assert latest_hda is not None, "Problem retrieving blastxml hda from the database"
+ if not latest_hda.name == 'megablast_xml_parser_test1' and not latest_hda.extension == 'blastxml':
+ raise AssertionError, "blastxml data type was not correctly sniffed."
+ def test_015_csfasta_datatype( self ):
+ """Testing correctly sniffing csfasta data type upon upload"""
+ self.upload_file( 'shrimp_cs_test1.csfasta' )
+ self.verify_dataset_correctness( 'shrimp_cs_test1.csfasta' )
+ self.check_history_for_string( '162.6 Kb, format: <span class="csfasta">csfasta</span>, <td>>2_14_26_F3,-1282216.0</td>' )
+ self.check_metadata_for_string( 'value="shrimp_cs_test1.csfasta" value="\?" Change data type value="csfasta" selected="yes"' )
+ latest_hda = galaxy.model.HistoryDatasetAssociation.query() \
+ .order_by( desc( galaxy.model.HistoryDatasetAssociation.table.c.create_time ) ).first()
+ assert latest_hda is not None, "Problem retrieving csfasta hda from the database"
+ if not latest_hda.name == 'shrimp_cs_test1.csfasta' and not latest_hda.extension == 'csfasta':
+ raise AssertionError, "csfasta data type was not correctly sniffed."
+ def test_020_customtrack_datatype( self ):
"""Testing correctly sniffing customtrack data type upon upload"""
self.upload_file( '1.customtrack' )
self.verify_dataset_correctness( '1.customtrack' )
self.check_history_for_string( '1.customtrack format: <span class="customtrack">customtrack</span>, database: \? Info: uploaded file' )
self.check_metadata_for_string( 'value="1.customtrack" value="\?" Change data type selected value="customtrack" selected="yes"' )
- def test_15_fasta_datatype( self ):
+ latest_hda = galaxy.model.HistoryDatasetAssociation.query() \
+ .order_by( desc( galaxy.model.HistoryDatasetAssociation.table.c.create_time ) ).first()
+ assert latest_hda is not None, "Problem retrieving customtrack hda from the database"
+ if not latest_hda.name == '1.customtrack' and not latest_hda.extension == 'customtrack':
+ raise AssertionError, "customtrack data type was not correctly sniffed."
+ def test_025_fasta_datatype( self ):
"""Testing correctly sniffing fasta data type upon upload"""
self.upload_file( '1.fasta' )
self.verify_dataset_correctness( '1.fasta' )
self.check_history_for_string( '1.fasta format: <span class="fasta">fasta</span>, database: \? Info: uploaded file' )
self.check_metadata_for_string( 'value="1.fasta" value="\?" Change data type selected value="fasta" selected="yes"' )
- def test_18_fastqsolexa_datatype( self ):
+ latest_hda = galaxy.model.HistoryDatasetAssociation.query() \
+ .order_by( desc( galaxy.model.HistoryDatasetAssociation.table.c.create_time ) ).first()
+ assert latest_hda is not None, "Problem retrieving fasta hda from the database"
+ if not latest_hda.name == '1.fasta' and not latest_hda.extension == 'fasta':
+ raise AssertionError, "fasta data type was not correctly sniffed."
+ def test_030_fastqsolexa_datatype( self ):
"""Testing correctly sniffing fastqsolexa ( the Solexa variant ) data type upon upload"""
self.upload_file( '1.fastqsolexa' )
self.verify_dataset_correctness( '1.fastqsolexa' )
self.check_history_for_string( '1.fastqsolexa format: <span class="fastqsolexa">fastqsolexa</span>, database: \? Info: uploaded fastqsolexa file' )
- def test_20_gff_datatype( self ):
+ latest_hda = galaxy.model.HistoryDatasetAssociation.query() \
+ .order_by( desc( galaxy.model.HistoryDatasetAssociation.table.c.create_time ) ).first()
+ assert latest_hda is not None, "Problem retrieving fastqsolexa hda from the database"
+ if not latest_hda.name == '1.fastqsolexa' and not latest_hda.extension == 'fastqsolexa':
+ raise AssertionError, "fastqsolexa data type was not correctly sniffed."
+ def test_035_gff_datatype( self ):
"""Testing correctly sniffing gff data type upon upload"""
self.upload_file( '5.gff' )
self.verify_dataset_correctness( '5.gff' )
@@ -50,7 +95,12 @@
self.check_metadata_for_string( 'value="5.gff" value="\?"' )
self.check_metadata_for_string( 'Convert to new format <option value="bed">GFF to BED' )
self.check_metadata_for_string( 'Change data type selected value="gff" selected="yes"' )
- def test_25_gff3_datatype( self ):
+ latest_hda = galaxy.model.HistoryDatasetAssociation.query() \
+ .order_by( desc( galaxy.model.HistoryDatasetAssociation.table.c.create_time ) ).first()
+ assert latest_hda is not None, "Problem retrieving gff hda from the database"
+ if not latest_hda.name == '5.gff' and not latest_hda.extension == 'gff':
+ raise AssertionError, "gff data type was not correctly sniffed."
+ def test_040_gff3_datatype( self ):
"""Testing correctly sniffing gff3 data type upon upload"""
self.upload_file( '5.gff3' )
self.verify_dataset_correctness( '5.gff3' )
@@ -58,7 +108,21 @@
self.check_metadata_for_string( 'value="5.gff3" value="\?"' )
self.check_metadata_for_string( 'Convert to new format <option value="bed">GFF to BED' )
self.check_metadata_for_string( 'Change data type selected value="gff3" selected="yes"' )
- def test_30_interval_datatype( self ):
+ latest_hda = galaxy.model.HistoryDatasetAssociation.query() \
+ .order_by( desc( galaxy.model.HistoryDatasetAssociation.table.c.create_time ) ).first()
+ assert latest_hda is not None, "Problem retrieving gff3 hda from the database"
+ if not latest_hda.name == '5.gff3' and not latest_hda.extension == 'gff3':
+ raise AssertionError, "gff3 data type was not correctly sniffed."
+ def test_045_html_datatype( self ):
+ """Testing correctly sniffing html data type upon upload"""
+ self.upload_file( 'html_file.txt' )
+ self.check_history_for_string( 'An error occurred running this job: No data: you attempted to upload an inappropriate file.' )
+ latest_hda = galaxy.model.HistoryDatasetAssociation.query() \
+ .order_by( desc( galaxy.model.HistoryDatasetAssociation.table.c.create_time ) ).first()
+ assert latest_hda is not None, "Problem retrieving html hda from the database"
+ if not latest_hda.name == 'html_file.txt' and not latest_hda.extension == 'data':
+ raise AssertionError, "html data type was not correctly sniffed."
+ def test_050_interval_datatype( self ):
"""Testing correctly sniffing interval data type upon upload"""
self.upload_file( '1.interval' )
self.verify_dataset_correctness( '1.interval' )
@@ -68,14 +132,24 @@
self.check_metadata_for_string( 'End column: <option value="3" selected> Strand column <option value="6" selected>' )
self.check_metadata_for_string( 'Convert to new format <option value="bed">Genomic Intervals To BED' )
self.check_metadata_for_string( 'Change data type selected value="interval" selected="yes"' )
- def test_35_lav_datatype( self ):
+ latest_hda = galaxy.model.HistoryDatasetAssociation.query() \
+ .order_by( desc( galaxy.model.HistoryDatasetAssociation.table.c.create_time ) ).first()
+ assert latest_hda is not None, "Problem retrieving interval hda from the database"
+ if not latest_hda.name == '1.interval' and not latest_hda.extension == 'interval':
+ raise AssertionError, "interval data type was not correctly sniffed."
+ def test_055_lav_datatype( self ):
"""Testing correctly sniffing lav data type upon upload"""
self.upload_file( '1.lav' )
self.verify_dataset_correctness( '1.lav' )
self.check_history_for_string( '1.lav format: <span class="lav">lav</span>, database: \? Info: uploaded file' )
self.check_metadata_for_string( 'value="1.lav" value="\?"' )
self.check_metadata_for_string( 'Change data type selected value="lav" selected="yes"' )
- def test_40_maf_datatype( self ):
+ latest_hda = galaxy.model.HistoryDatasetAssociation.query() \
+ .order_by( desc( galaxy.model.HistoryDatasetAssociation.table.c.create_time ) ).first()
+ assert latest_hda is not None, "Problem retrieving lav hda from the database"
+ if not latest_hda.name == '1.lav' and not latest_hda.extension == 'lav':
+ raise AssertionError, "lav data type was not correctly sniffed."
+ def test_060_maf_datatype( self ):
"""Testing correctly sniffing maf data type upon upload"""
self.upload_file( '3.maf' )
self.verify_dataset_correctness( '3.maf' )
@@ -83,40 +157,57 @@
self.check_metadata_for_string( 'value="3.maf" value="\?"' )
self.check_metadata_for_string( 'Convert to new format <option value="interval">MAF to Genomic Intervals <option value="fasta">MAF to Fasta' )
self.check_metadata_for_string( 'Change data type selected value="maf" selected="yes"' )
- def test_45_tabular_datatype( self ):
+ latest_hda = galaxy.model.HistoryDatasetAssociation.query() \
+ .order_by( desc( galaxy.model.HistoryDatasetAssociation.table.c.create_time ) ).first()
+ assert latest_hda is not None, "Problem retrieving maf hda from the database"
+ if not latest_hda.name == '3.maf' and not latest_hda.extension == 'maf':
+ raise AssertionError, "maf data type was not correctly sniffed."
+ def test_065_qual454_datatype( self ):
+ """Testing correctly sniffing qual454 data type upon upload"""
+ self.upload_file( 'qualscores.qual454' )
+ self.verify_dataset_correctness( 'qualscores.qual454' )
+ self.check_history_for_string( '5.6 Kb, format: <span class="qual454">qual454</span>, database: \?' )
+ self.check_metadata_for_string( 'Change data type value="qual454" selected="yes">qual454' )
+ latest_hda = galaxy.model.HistoryDatasetAssociation.query() \
+ .order_by( desc( galaxy.model.HistoryDatasetAssociation.table.c.create_time ) ).first()
+ assert latest_hda is not None, "Problem retrieving qual454 hda from the database"
+ if not latest_hda.name == 'qualscores.qual454' and not latest_hda.extension == 'qual454':
+ raise AssertionError, "qual454 data type was not correctly sniffed."
+ def test_070_qualsolid_datatype( self ):
+ """Testing correctly sniffing qualsolid data type upon upload"""
+ self.upload_file( 'qualscores.qualsolid' )
+ self.verify_dataset_correctness('qualscores.qualsolid' )
+ self.check_history_for_string('2.5 Kb, format: <span class="qualsolid">qualsolid</span>, database: \? Info: uploaded file' )
+ self.check_metadata_for_string( 'Change data type value="qualsolid" selected="yes">qualsolid' )
+ latest_hda = galaxy.model.HistoryDatasetAssociation.query() \
+ .order_by( desc( galaxy.model.HistoryDatasetAssociation.table.c.create_time ) ).first()
+ assert latest_hda is not None, "Problem retrieving qualsolid hda from the database"
+ if not latest_hda.name == 'qualscores.qualsolid' and not latest_hda.extension == 'qualsolid':
+ raise AssertionError, "qualsolid data type was not correctly sniffed."
+ def test_075_tabular_datatype( self ):
"""Testing correctly sniffing tabular data type upon upload"""
self.upload_file( '1.tabular' )
self.verify_dataset_correctness( '1.tabular' )
self.check_history_for_string( '1.tabular format: <span class="tabular">tabular</span>, database: \? Info: uploaded file' )
self.check_metadata_for_string( 'value="1.tabular" value="\?"' )
self.check_metadata_for_string( 'Change data type selected value="tabular" selected="yes"' )
- def test_50_wig_datatype( self ):
+ latest_hda = galaxy.model.HistoryDatasetAssociation.query() \
+ .order_by( desc( galaxy.model.HistoryDatasetAssociation.table.c.create_time ) ).first()
+ assert latest_hda is not None, "Problem retrieving tabular hda from the database"
+ if not latest_hda.name == '1.tabular' and not latest_hda.extension == 'tabular':
+ raise AssertionError, "tabular data type was not correctly sniffed."
+ def test_080_wig_datatype( self ):
"""Testing correctly sniffing wig data type upon upload"""
self.upload_file( '1.wig' )
self.verify_dataset_correctness( '1.wig' )
self.check_history_for_string( '1.wig format: <span class="wig">wig</span>, database: \? Info: uploaded file' )
self.check_metadata_for_string( 'value="1.wig" value="\?"' )
self.check_metadata_for_string( 'Change data type selected value="wig" selected="yes"' )
- def test_55_blastxml_datatype( self ):
- """Testing correctly sniffing blastxml data type upon upload"""
- self.upload_file( 'megablast_xml_parser_test1.gz' )
- self.check_history_for_string( 'NCBI Blast XML data' )
- self.check_history_for_string( 'format: <span class="blastxml">blastxml</span>' )
- """
- TODO: It is currently not even possible to set the following format on upload. They
- should be included in the File Format select list on the upload form if they are to
- be tested here...
- def test_60_qualsolid_datatype( self ):
- Testing correctly sniffing qualsolid data type upon upload
- self.upload_file( 'qualscores.qualsolid' )
- self.verify_dataset_correctness('qualscores.qualsolid' )
- self.check_history_for_string('2.5 Kb, format: <span class="fasta">fasta</span>, database: \? Info: uploaded file' )
- def test_65_qual454_datatype( self ):
- Testing correctly sniffing qual454 data type upon upload
- self.upload_file( 'qualscores.qual454' )
- self.verify_dataset_correctness( 'qualscores.qual454' )
- self.check_history_for_string( '5.6 Kb, format: <span class="qual454">qual454</span>, database: \? Info: uploaded qual454 file' )
- """
+ latest_hda = galaxy.model.HistoryDatasetAssociation.query() \
+ .order_by( desc( galaxy.model.HistoryDatasetAssociation.table.c.create_time ) ).first()
+ assert latest_hda is not None, "Problem retrieving wig hda from the database"
+ if not latest_hda.name == '1.wig' and not latest_hda.extension == 'wig':
+ raise AssertionError, "wig data type was not correctly sniffed."
def test_9999_clean_up( self ):
self.delete_history( id=str( history1.id ) )
self.logout()
details: http://www.bx.psu.edu/hg/galaxy/rev/e27e392806df
changeset: 2437:e27e392806df
user: Greg Von Kuster <greg(a)bx.psu.edu>
date: Tue Jun 09 10:28:27 2009 -0400
description:
Merge genetics.py
1 file(s) affected in this change:
lib/galaxy/datatypes/genetics.py
diffs (159 lines):
diff -r 2c6720a9130c -r e27e392806df lib/galaxy/datatypes/genetics.py
--- a/lib/galaxy/datatypes/genetics.py Mon Jun 08 21:02:41 2009 -0400
+++ b/lib/galaxy/datatypes/genetics.py Tue Jun 09 10:28:27 2009 -0400
@@ -17,6 +17,7 @@
from galaxy import util
from cgi import escape
import urllib
+from galaxy.web import url_for
from galaxy.datatypes import metadata
from galaxy.datatypes.metadata import MetadataElement
#from galaxy.datatypes.data import Text
@@ -80,7 +81,7 @@
for site_name, site_url in util.get_ucsc_by_build(dataset.dbkey):
if site_name in app.config.ucsc_display_sites:
site_url = site_url.replace('/hgTracks?','/hgGenome?') # for genome graphs
- display_url = urllib.quote_plus( "%s/display_as?id=%i&display_app=%s" % (base_url, dataset.id, type) )
+ display_url = urllib.quote_plus( "%s%s/display_as?id=%i&display_app=%s" % (base_url, url_for( controller='root' ), dataset.id, type))
sl = ["%sdb=%s" % (site_url,dataset.dbkey ),]
sl.append("&hgGenome_dataSetName=%s&hgGenome_dataSetDescription=%s" % (dataset.name, 'GalaxyGG_data'))
sl.append("&hgGenome_formatType=best%20guess&hgGenome_markerType=best%20guess")
@@ -116,8 +117,8 @@
class Rgenetics(Html):
"""class to use for rgenetics"""
"""Add metadata elements"""
-
MetadataElement( name="base_name", desc="base name for all transformed versions of this genetic dataset", default="galaxy", readonly=True)
+
file_ext="html"
composite_type = 'auto_primary_file'
@@ -234,7 +235,128 @@
"""fake class to distinguish different species of Rgenetics data collections
"""
file_ext="snptest"
+
+class RexpBase( Html ):
+ """base class for BioC data structures in Galaxy
+ must be constructed with the pheno data in place since that
+ goes into the metadata for each instance"""
+ """Add metadata elements"""
+ MetadataElement( name="columns", default=0, desc="Number of columns", readonly=True, visible=False )
+ MetadataElement( name="column_names", default=[], desc="Column names", readonly=True,visible=True )
+ MetadataElement( name="base_name",
+ desc="base name for all transformed versions of this genetic dataset", readonly=True)
+ MetadataElement( name="pheno_path",
+ desc="Path to phenotype data for this experiment", readonly=True)
+ MetadataElement( name="pheno",
+ desc="Phenotype data for this experiment", readonly=True)
+
+ file_ext = None
+
+ def set_peek( self, dataset ):
+ """expects a .pheno file in the extra_files_dir - ugh
+ note that R is wierd and does not include the row.name in
+ the header. why?"""
+ p = file(dataset.metadata.pheno_path,'r').readlines()
+ head = p[0].strip().split('\t')
+ head.insert(0,'ChipFileName') # fix R write.table b0rken-ness
+ p[0] = '\t'.join(head)
+ p = '\n'.join(p)
+ dataset.peek = p
+ dataset.metadata.pheno = p
+ dataset.blurb = 'R loadable BioC expression object for the Rexpression Galaxy toolkit'
+
+ # stolen from Tabular
+ # class Tabular( data.Text ):
+ """Tab delimited data"""
+
+ """Add metadata elements"""
+ def init_meta( self, dataset, copy_from=None ):
+ if copy_from:
+ dataset.metadata = copy_from.metadata
+
+
+ #def set_readonly_meta( self, dataset, skip=0, **kwd ):
+ # """Resets the values of readonly metadata elements."""
+ # RexpBase.set_meta( self, dataset, skip=skip )
+
+ def set_readonly_meta( self, dataset, **kwd ):
+ """Resets the values of readonly metadata elements."""
+ RexpBase.set_meta( self, dataset )
+
+ #def set_meta( self, dataset, skip=0, **kwd ):
+ def set_meta( self, dataset, **kwd ):
+
+ """
+ NOTE we apply the tabular machinary to the phenodata extracted
+ from a BioC eSet or affybatch.
+
+ """
+ if not dataset.peek:
+ dataset.set_peek()
+ pk = dataset.peek # use the peek which is the pheno data insead of dataset (!)
+ if pk:
+ p = pk.split('\n')
+ h = p[0].strip().split('\t') # hope is header
+ h = [escape(x) for x in h]
+ dataset.metadata.column_names = h
+ dataset.metadata.columns = len(h)
+ else:
+ dataset.metadata.column_names = []
+ dataset.metadata.columns = 0
+
+ def make_html_table( self, dataset):
+ """Create HTML table, used for displaying peek"""
+ out = ['<table cellspacing="0" cellpadding="3">',]
+ try:
+ # Generate column header
+ pk = dataset.peek
+ p = pk.split('\n')
+ for i,row in enumerate(p):
+ lrow = row.strip().split('\t')
+ if i == 0:
+ orow = ['<th>%s</th>' % escape(x) for x in lrow]
+ orow.insert(0,'<tr>')
+ orow.append('</tr>')
+ else:
+ orow = ['<td>%s</td>' % escape(x) for x in lrow]
+ orow.insert(0,'<tr>')
+ orow.append('</tr>')
+ out.append(''.join(orow))
+ out.append( '</table>' )
+ out = "\n".join( out )
+ except Exception, exc:
+ out = "Can't create peek %s" % str( exc )
+ return out
+
+ def display_peek( self, dataset ):
+ """Returns formatted html of peek"""
+ if not dataset.peek:
+ dataset.set_peek()
+ return self.make_html_table( dataset )
+
+ def get_mime(self):
+ """Returns the mime type of the datatype"""
+ return 'application/gzip'
+
+ def sniff(self):
+ """ can we be bothered looking for the signature or loading via rpy?
+ """
+ return true
+
+class AffyBatch( RexpBase ):
+ """derived class for BioC data structures in Galaxy """
+ file_ext = "affybatch"
+
+
+class ESet( RexpBase ):
+ """derived class for BioC data structures in Galaxy """
+ file_ext = "eset"
+
+
+class MAList( RexpBase ):
+ """derived class for BioC data structures in Galaxy """
+ file_ext = "malist"
if __name__ == '__main__':
details: http://www.bx.psu.edu/hg/galaxy/rev/17fd27c7f286
changeset: 2438:17fd27c7f286
user: Greg Von Kuster <greg(a)bx.psu.edu>
date: Tue Jun 09 11:01:19 2009 -0400
description:
Refresh the history frame when the current history is deleted ( fixes issue # 40 ).
1 file(s) affected in this change:
lib/galaxy/web/controllers/history.py
diffs (27 lines):
diff -r e27e392806df -r 17fd27c7f286 lib/galaxy/web/controllers/history.py
--- a/lib/galaxy/web/controllers/history.py Tue Jun 09 10:28:27 2009 -0400
+++ b/lib/galaxy/web/controllers/history.py Tue Jun 09 11:01:19 2009 -0400
@@ -104,12 +104,12 @@
if histories:
if operation == "switch":
status, message = self._list_switch( trans, histories )
- # Current history changed, refresh history frame
- trans.template_context['refresh_frames'] = ['history']
elif operation == "delete":
status, message = self._list_delete( trans, histories )
elif operation == "undelete":
status, message = self._list_undelete( trans, histories )
+ # Current history may have changed, refresh history frame
+ trans.template_context['refresh_frames'] = ['history']
trans.sa_session.flush()
# Render the list view
return self.list_grid( trans, status=status, message=message, **kwargs )
@@ -189,7 +189,7 @@
# Regardless of whether it was previously deleted, we make a new
# history active
trans.new_history()
- return trans.show_ok_message( "History deleted, a new history is active" )
+ return trans.show_ok_message( "History deleted, a new history is active", refresh_frames=['history'] )
@web.expose
def rename_async( self, trans, id=None, new_name=None ):
history = model.History.get( id )
details: http://www.bx.psu.edu/hg/galaxy/rev/73d8b2acef0a
changeset: 2434:73d8b2acef0a
user: James Taylor <james(a)jamestaylor.org>
date: Mon Jun 08 16:47:01 2009 -0400
description:
Allow UnvalidatedValues to be used when generating output labels (fixes issue #59)
1 file(s) affected in this change:
lib/galaxy/tools/parameters/basic.py
diffs (38 lines):
diff -r 73a8b43f1d97 -r 73d8b2acef0a lib/galaxy/tools/parameters/basic.py
--- a/lib/galaxy/tools/parameters/basic.py Mon Jun 08 12:49:26 2009 -0400
+++ b/lib/galaxy/tools/parameters/basic.py Mon Jun 08 16:47:01 2009 -0400
@@ -535,9 +535,10 @@
def from_html( self, value, trans=None, context={} ):
if self.need_late_validation( trans, context ):
if self.multiple:
- #While it is generally allowed that a select value can be '',
- #we do not allow this to be the case in a dynamically generated multiple select list being set in workflow building mode
- #we instead treat '' as 'No option Selected' (None)
+ # While it is generally allowed that a select value can be '',
+ # we do not allow this to be the case in a dynamically
+ # generated multiple select list being set in workflow building
+ # mode we instead treat '' as 'No option Selected' (None)
if value == '':
value = None
else:
@@ -565,9 +566,9 @@
if isinstance( value, list ):
if not(self.repeat):
assert self.multiple, "Multiple values provided but parameter is not expecting multiple values"
- return self.separator.join( value )
+ return self.separator.join( map( str, value ) )
else:
- return value
+ return str(value)
def value_to_basic( self, value, app ):
if isinstance( value, UnvalidatedValue ):
return { "__class__": "UnvalidatedValue", "value": value.value }
@@ -1331,6 +1332,8 @@
"""
def __init__( self, value ):
self.value = value
+ def __str__( self ):
+ return str( self.value )
class RuntimeValue( object ):
"""
details: http://www.bx.psu.edu/hg/galaxy/rev/3190f7d6b572
changeset: 2435:3190f7d6b572
user: Dan Blankenberg <dan(a)bx.psu.edu>
date: Mon Jun 08 16:52:55 2009 -0400
description:
Update the way the testing framework decides whether a refresh_on_change is required.
1 file(s) affected in this change:
test/base/twilltestcase.py
diffs (30 lines):
diff -r 73d8b2acef0a -r 3190f7d6b572 test/base/twilltestcase.py
--- a/test/base/twilltestcase.py Mon Jun 08 16:47:01 2009 -0400
+++ b/test/base/twilltestcase.py Mon Jun 08 16:52:55 2009 -0400
@@ -548,22 +548,11 @@
# Check for refresh_on_change attribute, submit a change if required
if 'refresh_on_change' in control.attrs.keys():
changed = False
- for elem in kwd[control.name]:
- # For DataToolParameter, control.value is the index of the DataToolParameter select list,
- # but elem is the filename. The following loop gets the filename of that index.
- param_text = ''
- for param in tc.show().split('<select'):
- param = ('<select' + param.split('select>')[0] + 'select>').replace('selected', 'selected="yes"')
- if param.find('on_chang') != -1 and param.find('name="%s"' % control.name) != -1:
- tree = ElementTree.fromstring(param)
- for option in tree.findall('option'):
- if option.get('value') in control.value:
- param_text = option.text.strip()
- break
- break
- if elem not in control.value and param_text.find(elem) == -1 :
+ item_labels = [ item.attrs[ 'label' ] for item in control.get_items() if item.selected ] #For DataToolParameter, control.value is the HDA id, but kwd contains the filename. This loop gets the filename/label for the selected values.
+ for value in kwd[ control.name ]:
+ if value not in control.value and True not in [ value in item_label for item_label in item_labels ]:
changed = True
- break
+ break
if changed:
# Clear Control and set to proper value
control.clear()