galaxy-dev
Threads by month
- ----- 2024 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2023 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2022 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2021 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2020 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2019 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2018 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2017 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2016 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2015 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2014 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2013 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2012 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2011 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2010 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2009 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2008 -----
- December
- November
- October
- September
- August
June 2009
- 6 participants
- 50 discussions
details: http://www.bx.psu.edu/hg/galaxy/rev/6a992f466b80
changeset: 2440:6a992f466b80
user: Dan Blankenberg <dan(a)bx.psu.edu>
date: Tue Jun 09 12:08:35 2009 -0400
description:
bug fix for non ajax upload
1 file(s) affected in this change:
lib/galaxy/tools/actions/upload.py
diffs (12 lines):
diff -r 6d00e4ff7129 -r 6a992f466b80 lib/galaxy/tools/actions/upload.py
--- a/lib/galaxy/tools/actions/upload.py Tue Jun 09 12:05:46 2009 -0400
+++ b/lib/galaxy/tools/actions/upload.py Tue Jun 09 12:08:35 2009 -0400
@@ -137,7 +137,7 @@
if precreated_dataset is not None:
data = precreated_dataset
else:
- data = trans.app.model.HistoryDatasetAssociation( history = trans.history, extension = ext, create_dataset = True )
+ data = trans.app.model.HistoryDatasetAssociation( history = trans.history, create_dataset = True )
trans.app.security_agent.set_all_dataset_permissions( data.dataset, trans.app.security_agent.history_get_default_permissions( trans.history ) )
# See if we have an empty file
1
0
09 Jun '09
details: http://www.bx.psu.edu/hg/galaxy/rev/6d00e4ff7129
changeset: 2439:6d00e4ff7129
user: Dan Blankenberg <dan(a)bx.psu.edu>
date: Tue Jun 09 12:05:46 2009 -0400
description:
Fix for handling the of the error given by upload of html, empty, etc, files
2 file(s) affected in this change:
lib/galaxy/tools/actions/upload.py
test/base/twilltestcase.py
diffs (125 lines):
diff -r 17fd27c7f286 -r 6d00e4ff7129 lib/galaxy/tools/actions/upload.py
--- a/lib/galaxy/tools/actions/upload.py Tue Jun 09 11:01:19 2009 -0400
+++ b/lib/galaxy/tools/actions/upload.py Tue Jun 09 12:05:46 2009 -0400
@@ -126,10 +126,24 @@
return dict( output=data )
def add_file( self, trans, temp_name, file_name, file_type, is_multi_byte, dbkey, info=None, space_to_tab=False, precreated_dataset=None ):
+ def dataset_no_data_error( data, message = 'there was an error uploading your file' ):
+ data.info = "No data: %s." % message
+ data.state = data.states.ERROR
+ if data.extension is None:
+ data.extension = 'data'
+ return data
data_type = None
+
+ if precreated_dataset is not None:
+ data = precreated_dataset
+ else:
+ data = trans.app.model.HistoryDatasetAssociation( history = trans.history, extension = ext, create_dataset = True )
+ trans.app.security_agent.set_all_dataset_permissions( data.dataset, trans.app.security_agent.history_get_default_permissions( trans.history ) )
+
# See if we have an empty file
if not os.path.getsize( temp_name ) > 0:
- raise BadFileException( "you attempted to upload an empty file." )
+ return dataset_no_data_error( data, message = 'you attempted to upload an empty file' )
+ #raise BadFileException( "you attempted to upload an empty file." )
if is_multi_byte:
ext = sniff.guess_ext( temp_name, is_multi_byte=True )
else:
@@ -138,7 +152,8 @@
# we'll decompress on the fly.
is_gzipped, is_valid = self.check_gzip( temp_name )
if is_gzipped and not is_valid:
- raise BadFileException( "you attempted to upload an inappropriate file." )
+ return dataset_no_data_error( data, message = 'you attempted to upload an inappropriate file' )
+ #raise BadFileException( "you attempted to upload an inappropriate file." )
elif is_gzipped and is_valid:
# We need to uncompress the temp_name file
CHUNK_SIZE = 2**20 # 1Mb
@@ -150,7 +165,8 @@
except IOError:
os.close( fd )
os.remove( uncompressed )
- raise BadFileException( 'problem decompressing gzipped data.' )
+ return dataset_no_data_error( data, message = 'problem decompressing gzipped data' )
+ #raise BadFileException( 'problem decompressing gzipped data.' )
if not chunk:
break
os.write( fd, chunk )
@@ -165,16 +181,20 @@
# See if we have a zip archive
is_zipped, is_valid, test_ext = self.check_zip( temp_name )
if is_zipped and not is_valid:
- raise BadFileException( "you attempted to upload an inappropriate file." )
+ return dataset_no_data_error( data, message = 'you attempted to upload an inappropriate file' )
+ #raise BadFileException( "you attempted to upload an inappropriate file." )
elif is_zipped and is_valid:
# Currently, we force specific tools to handle this case. We also require the user
# to manually set the incoming file_type
if ( test_ext == 'ab1' or test_ext == 'scf' ) and file_type != 'binseq.zip':
- raise BadFileException( "Invalid 'File Format' for archive consisting of binary files - use 'Binseq.zip'." )
+ return dataset_no_data_error( data, message = "Invalid 'File Format' for archive consisting of binary files - use 'Binseq.zip'" )
+ #raise BadFileException( "Invalid 'File Format' for archive consisting of binary files - use 'Binseq.zip'." )
elif test_ext == 'txt' and file_type != 'txtseq.zip':
- raise BadFileException( "Invalid 'File Format' for archive consisting of text files - use 'Txtseq.zip'." )
+ return dataset_no_data_error( data, message = "Invalid 'File Format' for archive consisting of text files - use 'Txtseq.zip'" )
+ #raise BadFileException( "Invalid 'File Format' for archive consisting of text files - use 'Txtseq.zip'." )
if not ( file_type == 'binseq.zip' or file_type == 'txtseq.zip' ):
- raise BadFileException( "you must manually set the 'File Format' to either 'Binseq.zip' or 'Txtseq.zip' when uploading zip files." )
+ return dataset_no_data_error( data, message = "you must manually set the 'File Format' to either 'Binseq.zip' or 'Txtseq.zip' when uploading zip files" )
+ #raise BadFileException( "you must manually set the 'File Format' to either 'Binseq.zip' or 'Txtseq.zip' when uploading zip files." )
data_type = 'zip'
ext = file_type
if not data_type:
@@ -183,16 +203,20 @@
if len( parts ) > 1:
ext = parts[1].strip().lower()
if not( ext == 'ab1' or ext == 'scf' ):
- raise BadFileException( "you attempted to upload an inappropriate file." )
+ return dataset_no_data_error( data, message = "you attempted to upload an inappropriate file" )
+ #raise BadFileException( "you attempted to upload an inappropriate file." )
if ext == 'ab1' and file_type != 'ab1':
- raise BadFileException( "you must manually set the 'File Format' to 'Ab1' when uploading ab1 files." )
+ return dataset_no_data_error( data, message = "you must manually set the 'File Format' to 'Ab1' when uploading ab1 files" )
+ #raise BadFileException( "you must manually set the 'File Format' to 'Ab1' when uploading ab1 files." )
elif ext == 'scf' and file_type != 'scf':
- raise BadFileException( "you must manually set the 'File Format' to 'Scf' when uploading scf files." )
+ return dataset_no_data_error( data, message = "you must manually set the 'File Format' to 'Scf' when uploading scf files" )
+ #raise BadFileException( "you must manually set the 'File Format' to 'Scf' when uploading scf files." )
data_type = 'binary'
if not data_type:
# We must have a text file
if trans.app.datatypes_registry.get_datatype_by_extension( file_type ).composite_type != 'auto_primary_file' and self.check_html( temp_name ):
- raise BadFileException( "you attempted to upload an inappropriate file." )
+ return dataset_no_data_error( data, message = "you attempted to upload an inappropriate file" )
+ #raise BadFileException( "you attempted to upload an inappropriate file." )
if data_type != 'binary' and data_type != 'zip':
if space_to_tab:
self.line_count = sniff.convert_newlines_sep2tabs( temp_name )
@@ -205,12 +229,7 @@
data_type = ext
if info is None:
info = 'uploaded %s file' %data_type
- if precreated_dataset is not None:
- data = precreated_dataset
- data.extension = ext
- else:
- data = trans.app.model.HistoryDatasetAssociation( history = trans.history, extension = ext, create_dataset = True )
- trans.app.security_agent.set_all_dataset_permissions( data.dataset, trans.app.security_agent.history_get_default_permissions( trans.history ) )
+ data.extension = ext
data.name = file_name
data.dbkey = dbkey
data.info = info
diff -r 17fd27c7f286 -r 6d00e4ff7129 test/base/twilltestcase.py
--- a/test/base/twilltestcase.py Tue Jun 09 11:01:19 2009 -0400
+++ b/test/base/twilltestcase.py Tue Jun 09 12:05:46 2009 -0400
@@ -552,7 +552,7 @@
for value in kwd[ control.name ]:
if value not in control.value and True not in [ value in item_label for item_label in item_labels ]:
changed = True
- break
+ break
if changed:
# Clear Control and set to proper value
control.clear()
1
0
09 Jun '09
details: http://www.bx.psu.edu/hg/galaxy/rev/c0aa8af62124
changeset: 2441:c0aa8af62124
user: Greg Von Kuster <greg(a)bx.psu.edu>
date: Tue Jun 09 15:03:04 2009 -0400
description:
Only refresh history if necessary ( better fix than my previous commit ), add functional tests to cover behavior when deleing current history, and add better functional tests for sniffing data formats.
3 file(s) affected in this change:
lib/galaxy/web/controllers/history.py
test/functional/test_history_functions.py
test/functional/test_sniffing_and_metadata_settings.py
diffs (288 lines):
diff -r 6a992f466b80 -r c0aa8af62124 lib/galaxy/web/controllers/history.py
--- a/lib/galaxy/web/controllers/history.py Tue Jun 09 12:08:35 2009 -0400
+++ b/lib/galaxy/web/controllers/history.py Tue Jun 09 15:03:04 2009 -0400
@@ -79,6 +79,7 @@
@web.require_login( "work with multiple histories" )
def list( self, trans, **kwargs ):
"""List all available histories"""
+ current_history = trans.history
status = message = None
if 'operation' in kwargs:
operation = kwargs['operation'].lower()
@@ -104,12 +105,14 @@
if histories:
if operation == "switch":
status, message = self._list_switch( trans, histories )
+ # Current history changed, refresh history frame
+ trans.template_context['refresh_frames'] = ['history']
elif operation == "delete":
status, message = self._list_delete( trans, histories )
+ if current_history in histories:
+ trans.template_context['refresh_frames'] = ['history']
elif operation == "undelete":
status, message = self._list_undelete( trans, histories )
- # Current history may have changed, refresh history frame
- trans.template_context['refresh_frames'] = ['history']
trans.sa_session.flush()
# Render the list view
return self.list_grid( trans, status=status, message=message, **kwargs )
diff -r 6a992f466b80 -r c0aa8af62124 test/functional/test_history_functions.py
--- a/test/functional/test_history_functions.py Tue Jun 09 12:08:35 2009 -0400
+++ b/test/functional/test_history_functions.py Tue Jun 09 15:03:04 2009 -0400
@@ -47,6 +47,8 @@
latest_history.refresh()
if not latest_history.deleted:
raise AssertionError, "Problem deleting history id %d" % latest_history.id
+ # Since we deleted the current history, make sure the history frame was refreshed
+ self.check_history_for_string( 'Your history is empty.' )
# We'll now test deleting a list of histories
# After deleting the current history, a new one should have been created
global history1
@@ -60,6 +62,8 @@
self.upload_file( '2.bed', dbkey='hg18' )
ids = '%s,%s' % ( str( history1.id ), str( history2.id ) )
self.delete_history( ids )
+ # Since we deleted the current history, make sure the history frame was refreshed
+ self.check_history_for_string( 'Your history is empty.' )
try:
self.view_stored_active_histories( check_str=history1.name )
raise AssertionError, "History %s is displayed in the active history list after it was deleted" % history1.name
diff -r 6a992f466b80 -r c0aa8af62124 test/functional/test_sniffing_and_metadata_settings.py
--- a/test/functional/test_sniffing_and_metadata_settings.py Tue Jun 09 12:08:35 2009 -0400
+++ b/test/functional/test_sniffing_and_metadata_settings.py Tue Jun 09 15:03:04 2009 -0400
@@ -3,7 +3,7 @@
from base.twilltestcase import TwillTestCase
class SniffingAndMetaDataSettings( TwillTestCase ):
- def test_00_axt_datatype( self ):
+ def test_000_axt_datatype( self ):
"""Testing correctly sniffing axt data type upon upload"""
self.logout()
self.login( email='test(a)bx.psu.edu' )
@@ -15,7 +15,12 @@
self.verify_dataset_correctness( '1.axt' )
self.check_history_for_string( '1.axt format: <span class="axt">axt</span>, database: \? Info: uploaded file' )
self.check_metadata_for_string( 'value="1.axt" value="\?" Change data type selected value="axt" selected="yes"' )
- def test_05_bed_datatype( self ):
+ latest_hda = galaxy.model.HistoryDatasetAssociation.query() \
+ .order_by( desc( galaxy.model.HistoryDatasetAssociation.table.c.create_time ) ).first()
+ assert latest_hda is not None, "Problem retrieving axt hda from the database"
+ if not latest_hda.name == '1.axt' and not latest_hda.extension == 'axt':
+ raise AssertionError, "axt data type was not correctly sniffed."
+ def test_005_bed_datatype( self ):
"""Testing correctly sniffing bed data type upon upload"""
self.upload_file( '1.bed' )
self.verify_dataset_correctness( '1.bed' )
@@ -25,24 +30,64 @@
self.check_metadata_for_string( 'End column: <option value="3" selected> Strand column <option value="6" selected>' )
self.check_metadata_for_string( 'Convert to new format <option value="bed">Genomic Intervals To BED <option value="gff">BED to GFF' )
self.check_metadata_for_string( 'Change data type selected value="bed" selected="yes"' )
- def test_10_customtrack_datatype( self ):
+ latest_hda = galaxy.model.HistoryDatasetAssociation.query() \
+ .order_by( desc( galaxy.model.HistoryDatasetAssociation.table.c.create_time ) ).first()
+ assert latest_hda is not None, "Problem retrieving bed hda from the database"
+ if not latest_hda.name == '1.bed' and not latest_hda.extension == 'bed':
+ raise AssertionError, "bed data type was not correctly sniffed."
+ def test_010_blastxml_datatype( self ):
+ """Testing correctly sniffing blastxml data type upon upload"""
+ self.upload_file( 'megablast_xml_parser_test1.gz' )
+ self.check_history_for_string( 'NCBI Blast XML data format: <span class="blastxml">blastxml</span>' )
+ latest_hda = galaxy.model.HistoryDatasetAssociation.query() \
+ .order_by( desc( galaxy.model.HistoryDatasetAssociation.table.c.create_time ) ).first()
+ assert latest_hda is not None, "Problem retrieving blastxml hda from the database"
+ if not latest_hda.name == 'megablast_xml_parser_test1' and not latest_hda.extension == 'blastxml':
+ raise AssertionError, "blastxml data type was not correctly sniffed."
+ def test_015_csfasta_datatype( self ):
+ """Testing correctly sniffing csfasta data type upon upload"""
+ self.upload_file( 'shrimp_cs_test1.csfasta' )
+ self.verify_dataset_correctness( 'shrimp_cs_test1.csfasta' )
+ self.check_history_for_string( '162.6 Kb, format: <span class="csfasta">csfasta</span>, <td>>2_14_26_F3,-1282216.0</td>' )
+ self.check_metadata_for_string( 'value="shrimp_cs_test1.csfasta" value="\?" Change data type value="csfasta" selected="yes"' )
+ latest_hda = galaxy.model.HistoryDatasetAssociation.query() \
+ .order_by( desc( galaxy.model.HistoryDatasetAssociation.table.c.create_time ) ).first()
+ assert latest_hda is not None, "Problem retrieving csfasta hda from the database"
+ if not latest_hda.name == 'shrimp_cs_test1.csfasta' and not latest_hda.extension == 'csfasta':
+ raise AssertionError, "csfasta data type was not correctly sniffed."
+ def test_020_customtrack_datatype( self ):
"""Testing correctly sniffing customtrack data type upon upload"""
self.upload_file( '1.customtrack' )
self.verify_dataset_correctness( '1.customtrack' )
self.check_history_for_string( '1.customtrack format: <span class="customtrack">customtrack</span>, database: \? Info: uploaded file' )
self.check_metadata_for_string( 'value="1.customtrack" value="\?" Change data type selected value="customtrack" selected="yes"' )
- def test_15_fasta_datatype( self ):
+ latest_hda = galaxy.model.HistoryDatasetAssociation.query() \
+ .order_by( desc( galaxy.model.HistoryDatasetAssociation.table.c.create_time ) ).first()
+ assert latest_hda is not None, "Problem retrieving customtrack hda from the database"
+ if not latest_hda.name == '1.customtrack' and not latest_hda.extension == 'customtrack':
+ raise AssertionError, "customtrack data type was not correctly sniffed."
+ def test_025_fasta_datatype( self ):
"""Testing correctly sniffing fasta data type upon upload"""
self.upload_file( '1.fasta' )
self.verify_dataset_correctness( '1.fasta' )
self.check_history_for_string( '1.fasta format: <span class="fasta">fasta</span>, database: \? Info: uploaded file' )
self.check_metadata_for_string( 'value="1.fasta" value="\?" Change data type selected value="fasta" selected="yes"' )
- def test_18_fastqsolexa_datatype( self ):
+ latest_hda = galaxy.model.HistoryDatasetAssociation.query() \
+ .order_by( desc( galaxy.model.HistoryDatasetAssociation.table.c.create_time ) ).first()
+ assert latest_hda is not None, "Problem retrieving fasta hda from the database"
+ if not latest_hda.name == '1.fasta' and not latest_hda.extension == 'fasta':
+ raise AssertionError, "fasta data type was not correctly sniffed."
+ def test_030_fastqsolexa_datatype( self ):
"""Testing correctly sniffing fastqsolexa ( the Solexa variant ) data type upon upload"""
self.upload_file( '1.fastqsolexa' )
self.verify_dataset_correctness( '1.fastqsolexa' )
self.check_history_for_string( '1.fastqsolexa format: <span class="fastqsolexa">fastqsolexa</span>, database: \? Info: uploaded fastqsolexa file' )
- def test_20_gff_datatype( self ):
+ latest_hda = galaxy.model.HistoryDatasetAssociation.query() \
+ .order_by( desc( galaxy.model.HistoryDatasetAssociation.table.c.create_time ) ).first()
+ assert latest_hda is not None, "Problem retrieving fastqsolexa hda from the database"
+ if not latest_hda.name == '1.fastqsolexa' and not latest_hda.extension == 'fastqsolexa':
+ raise AssertionError, "fastqsolexa data type was not correctly sniffed."
+ def test_035_gff_datatype( self ):
"""Testing correctly sniffing gff data type upon upload"""
self.upload_file( '5.gff' )
self.verify_dataset_correctness( '5.gff' )
@@ -50,7 +95,12 @@
self.check_metadata_for_string( 'value="5.gff" value="\?"' )
self.check_metadata_for_string( 'Convert to new format <option value="bed">GFF to BED' )
self.check_metadata_for_string( 'Change data type selected value="gff" selected="yes"' )
- def test_25_gff3_datatype( self ):
+ latest_hda = galaxy.model.HistoryDatasetAssociation.query() \
+ .order_by( desc( galaxy.model.HistoryDatasetAssociation.table.c.create_time ) ).first()
+ assert latest_hda is not None, "Problem retrieving gff hda from the database"
+ if not latest_hda.name == '5.gff' and not latest_hda.extension == 'gff':
+ raise AssertionError, "gff data type was not correctly sniffed."
+ def test_040_gff3_datatype( self ):
"""Testing correctly sniffing gff3 data type upon upload"""
self.upload_file( '5.gff3' )
self.verify_dataset_correctness( '5.gff3' )
@@ -58,7 +108,21 @@
self.check_metadata_for_string( 'value="5.gff3" value="\?"' )
self.check_metadata_for_string( 'Convert to new format <option value="bed">GFF to BED' )
self.check_metadata_for_string( 'Change data type selected value="gff3" selected="yes"' )
- def test_30_interval_datatype( self ):
+ latest_hda = galaxy.model.HistoryDatasetAssociation.query() \
+ .order_by( desc( galaxy.model.HistoryDatasetAssociation.table.c.create_time ) ).first()
+ assert latest_hda is not None, "Problem retrieving gff3 hda from the database"
+ if not latest_hda.name == '5.gff3' and not latest_hda.extension == 'gff3':
+ raise AssertionError, "gff3 data type was not correctly sniffed."
+ def test_045_html_datatype( self ):
+ """Testing correctly sniffing html data type upon upload"""
+ self.upload_file( 'html_file.txt' )
+ self.check_history_for_string( 'An error occurred running this job: No data: you attempted to upload an inappropriate file.' )
+ latest_hda = galaxy.model.HistoryDatasetAssociation.query() \
+ .order_by( desc( galaxy.model.HistoryDatasetAssociation.table.c.create_time ) ).first()
+ assert latest_hda is not None, "Problem retrieving html hda from the database"
+ if not latest_hda.name == 'html_file.txt' and not latest_hda.extension == 'data':
+ raise AssertionError, "html data type was not correctly sniffed."
+ def test_050_interval_datatype( self ):
"""Testing correctly sniffing interval data type upon upload"""
self.upload_file( '1.interval' )
self.verify_dataset_correctness( '1.interval' )
@@ -68,14 +132,24 @@
self.check_metadata_for_string( 'End column: <option value="3" selected> Strand column <option value="6" selected>' )
self.check_metadata_for_string( 'Convert to new format <option value="bed">Genomic Intervals To BED' )
self.check_metadata_for_string( 'Change data type selected value="interval" selected="yes"' )
- def test_35_lav_datatype( self ):
+ latest_hda = galaxy.model.HistoryDatasetAssociation.query() \
+ .order_by( desc( galaxy.model.HistoryDatasetAssociation.table.c.create_time ) ).first()
+ assert latest_hda is not None, "Problem retrieving interval hda from the database"
+ if not latest_hda.name == '1.interval' and not latest_hda.extension == 'interval':
+ raise AssertionError, "interval data type was not correctly sniffed."
+ def test_055_lav_datatype( self ):
"""Testing correctly sniffing lav data type upon upload"""
self.upload_file( '1.lav' )
self.verify_dataset_correctness( '1.lav' )
self.check_history_for_string( '1.lav format: <span class="lav">lav</span>, database: \? Info: uploaded file' )
self.check_metadata_for_string( 'value="1.lav" value="\?"' )
self.check_metadata_for_string( 'Change data type selected value="lav" selected="yes"' )
- def test_40_maf_datatype( self ):
+ latest_hda = galaxy.model.HistoryDatasetAssociation.query() \
+ .order_by( desc( galaxy.model.HistoryDatasetAssociation.table.c.create_time ) ).first()
+ assert latest_hda is not None, "Problem retrieving lav hda from the database"
+ if not latest_hda.name == '1.lav' and not latest_hda.extension == 'lav':
+ raise AssertionError, "lav data type was not correctly sniffed."
+ def test_060_maf_datatype( self ):
"""Testing correctly sniffing maf data type upon upload"""
self.upload_file( '3.maf' )
self.verify_dataset_correctness( '3.maf' )
@@ -83,40 +157,57 @@
self.check_metadata_for_string( 'value="3.maf" value="\?"' )
self.check_metadata_for_string( 'Convert to new format <option value="interval">MAF to Genomic Intervals <option value="fasta">MAF to Fasta' )
self.check_metadata_for_string( 'Change data type selected value="maf" selected="yes"' )
- def test_45_tabular_datatype( self ):
+ latest_hda = galaxy.model.HistoryDatasetAssociation.query() \
+ .order_by( desc( galaxy.model.HistoryDatasetAssociation.table.c.create_time ) ).first()
+ assert latest_hda is not None, "Problem retrieving maf hda from the database"
+ if not latest_hda.name == '3.maf' and not latest_hda.extension == 'maf':
+ raise AssertionError, "maf data type was not correctly sniffed."
+ def test_065_qual454_datatype( self ):
+ """Testing correctly sniffing qual454 data type upon upload"""
+ self.upload_file( 'qualscores.qual454' )
+ self.verify_dataset_correctness( 'qualscores.qual454' )
+ self.check_history_for_string( '5.6 Kb, format: <span class="qual454">qual454</span>, database: \?' )
+ self.check_metadata_for_string( 'Change data type value="qual454" selected="yes">qual454' )
+ latest_hda = galaxy.model.HistoryDatasetAssociation.query() \
+ .order_by( desc( galaxy.model.HistoryDatasetAssociation.table.c.create_time ) ).first()
+ assert latest_hda is not None, "Problem retrieving qual454 hda from the database"
+ if not latest_hda.name == 'qualscores.qual454' and not latest_hda.extension == 'qual454':
+ raise AssertionError, "qual454 data type was not correctly sniffed."
+ def test_070_qualsolid_datatype( self ):
+ """Testing correctly sniffing qualsolid data type upon upload"""
+ self.upload_file( 'qualscores.qualsolid' )
+ self.verify_dataset_correctness('qualscores.qualsolid' )
+ self.check_history_for_string('2.5 Kb, format: <span class="qualsolid">qualsolid</span>, database: \? Info: uploaded file' )
+ self.check_metadata_for_string( 'Change data type value="qualsolid" selected="yes">qualsolid' )
+ latest_hda = galaxy.model.HistoryDatasetAssociation.query() \
+ .order_by( desc( galaxy.model.HistoryDatasetAssociation.table.c.create_time ) ).first()
+ assert latest_hda is not None, "Problem retrieving qualsolid hda from the database"
+ if not latest_hda.name == 'qualscores.qualsolid' and not latest_hda.extension == 'qualsolid':
+ raise AssertionError, "qualsolid data type was not correctly sniffed."
+ def test_075_tabular_datatype( self ):
"""Testing correctly sniffing tabular data type upon upload"""
self.upload_file( '1.tabular' )
self.verify_dataset_correctness( '1.tabular' )
self.check_history_for_string( '1.tabular format: <span class="tabular">tabular</span>, database: \? Info: uploaded file' )
self.check_metadata_for_string( 'value="1.tabular" value="\?"' )
self.check_metadata_for_string( 'Change data type selected value="tabular" selected="yes"' )
- def test_50_wig_datatype( self ):
+ latest_hda = galaxy.model.HistoryDatasetAssociation.query() \
+ .order_by( desc( galaxy.model.HistoryDatasetAssociation.table.c.create_time ) ).first()
+ assert latest_hda is not None, "Problem retrieving tabular hda from the database"
+ if not latest_hda.name == '1.tabular' and not latest_hda.extension == 'tabular':
+ raise AssertionError, "tabular data type was not correctly sniffed."
+ def test_080_wig_datatype( self ):
"""Testing correctly sniffing wig data type upon upload"""
self.upload_file( '1.wig' )
self.verify_dataset_correctness( '1.wig' )
self.check_history_for_string( '1.wig format: <span class="wig">wig</span>, database: \? Info: uploaded file' )
self.check_metadata_for_string( 'value="1.wig" value="\?"' )
self.check_metadata_for_string( 'Change data type selected value="wig" selected="yes"' )
- def test_55_blastxml_datatype( self ):
- """Testing correctly sniffing blastxml data type upon upload"""
- self.upload_file( 'megablast_xml_parser_test1.gz' )
- self.check_history_for_string( 'NCBI Blast XML data' )
- self.check_history_for_string( 'format: <span class="blastxml">blastxml</span>' )
- """
- TODO: It is currently not even possible to set the following format on upload. They
- should be included in the File Format select list on the upload form if they are to
- be tested here...
- def test_60_qualsolid_datatype( self ):
- Testing correctly sniffing qualsolid data type upon upload
- self.upload_file( 'qualscores.qualsolid' )
- self.verify_dataset_correctness('qualscores.qualsolid' )
- self.check_history_for_string('2.5 Kb, format: <span class="fasta">fasta</span>, database: \? Info: uploaded file' )
- def test_65_qual454_datatype( self ):
- Testing correctly sniffing qual454 data type upon upload
- self.upload_file( 'qualscores.qual454' )
- self.verify_dataset_correctness( 'qualscores.qual454' )
- self.check_history_for_string( '5.6 Kb, format: <span class="qual454">qual454</span>, database: \? Info: uploaded qual454 file' )
- """
+ latest_hda = galaxy.model.HistoryDatasetAssociation.query() \
+ .order_by( desc( galaxy.model.HistoryDatasetAssociation.table.c.create_time ) ).first()
+ assert latest_hda is not None, "Problem retrieving wig hda from the database"
+ if not latest_hda.name == '1.wig' and not latest_hda.extension == 'wig':
+ raise AssertionError, "wig data type was not correctly sniffed."
def test_9999_clean_up( self ):
self.delete_history( id=str( history1.id ) )
self.logout()
1
0
details: http://www.bx.psu.edu/hg/galaxy/rev/e27e392806df
changeset: 2437:e27e392806df
user: Greg Von Kuster <greg(a)bx.psu.edu>
date: Tue Jun 09 10:28:27 2009 -0400
description:
Merge genetics.py
1 file(s) affected in this change:
lib/galaxy/datatypes/genetics.py
diffs (159 lines):
diff -r 2c6720a9130c -r e27e392806df lib/galaxy/datatypes/genetics.py
--- a/lib/galaxy/datatypes/genetics.py Mon Jun 08 21:02:41 2009 -0400
+++ b/lib/galaxy/datatypes/genetics.py Tue Jun 09 10:28:27 2009 -0400
@@ -17,6 +17,7 @@
from galaxy import util
from cgi import escape
import urllib
+from galaxy.web import url_for
from galaxy.datatypes import metadata
from galaxy.datatypes.metadata import MetadataElement
#from galaxy.datatypes.data import Text
@@ -80,7 +81,7 @@
for site_name, site_url in util.get_ucsc_by_build(dataset.dbkey):
if site_name in app.config.ucsc_display_sites:
site_url = site_url.replace('/hgTracks?','/hgGenome?') # for genome graphs
- display_url = urllib.quote_plus( "%s/display_as?id=%i&display_app=%s" % (base_url, dataset.id, type) )
+ display_url = urllib.quote_plus( "%s%s/display_as?id=%i&display_app=%s" % (base_url, url_for( controller='root' ), dataset.id, type))
sl = ["%sdb=%s" % (site_url,dataset.dbkey ),]
sl.append("&hgGenome_dataSetName=%s&hgGenome_dataSetDescription=%s" % (dataset.name, 'GalaxyGG_data'))
sl.append("&hgGenome_formatType=best%20guess&hgGenome_markerType=best%20guess")
@@ -116,8 +117,8 @@
class Rgenetics(Html):
"""class to use for rgenetics"""
"""Add metadata elements"""
-
MetadataElement( name="base_name", desc="base name for all transformed versions of this genetic dataset", default="galaxy", readonly=True)
+
file_ext="html"
composite_type = 'auto_primary_file'
@@ -234,7 +235,128 @@
"""fake class to distinguish different species of Rgenetics data collections
"""
file_ext="snptest"
+
+class RexpBase( Html ):
+ """base class for BioC data structures in Galaxy
+ must be constructed with the pheno data in place since that
+ goes into the metadata for each instance"""
+ """Add metadata elements"""
+ MetadataElement( name="columns", default=0, desc="Number of columns", readonly=True, visible=False )
+ MetadataElement( name="column_names", default=[], desc="Column names", readonly=True,visible=True )
+ MetadataElement( name="base_name",
+ desc="base name for all transformed versions of this genetic dataset", readonly=True)
+ MetadataElement( name="pheno_path",
+ desc="Path to phenotype data for this experiment", readonly=True)
+ MetadataElement( name="pheno",
+ desc="Phenotype data for this experiment", readonly=True)
+
+ file_ext = None
+
+ def set_peek( self, dataset ):
+ """expects a .pheno file in the extra_files_dir - ugh
+ note that R is wierd and does not include the row.name in
+ the header. why?"""
+ p = file(dataset.metadata.pheno_path,'r').readlines()
+ head = p[0].strip().split('\t')
+ head.insert(0,'ChipFileName') # fix R write.table b0rken-ness
+ p[0] = '\t'.join(head)
+ p = '\n'.join(p)
+ dataset.peek = p
+ dataset.metadata.pheno = p
+ dataset.blurb = 'R loadable BioC expression object for the Rexpression Galaxy toolkit'
+
+ # stolen from Tabular
+ # class Tabular( data.Text ):
+ """Tab delimited data"""
+
+ """Add metadata elements"""
+ def init_meta( self, dataset, copy_from=None ):
+ if copy_from:
+ dataset.metadata = copy_from.metadata
+
+
+ #def set_readonly_meta( self, dataset, skip=0, **kwd ):
+ # """Resets the values of readonly metadata elements."""
+ # RexpBase.set_meta( self, dataset, skip=skip )
+
+ def set_readonly_meta( self, dataset, **kwd ):
+ """Resets the values of readonly metadata elements."""
+ RexpBase.set_meta( self, dataset )
+
+ #def set_meta( self, dataset, skip=0, **kwd ):
+ def set_meta( self, dataset, **kwd ):
+
+ """
+ NOTE we apply the tabular machinary to the phenodata extracted
+ from a BioC eSet or affybatch.
+
+ """
+ if not dataset.peek:
+ dataset.set_peek()
+ pk = dataset.peek # use the peek which is the pheno data insead of dataset (!)
+ if pk:
+ p = pk.split('\n')
+ h = p[0].strip().split('\t') # hope is header
+ h = [escape(x) for x in h]
+ dataset.metadata.column_names = h
+ dataset.metadata.columns = len(h)
+ else:
+ dataset.metadata.column_names = []
+ dataset.metadata.columns = 0
+
+ def make_html_table( self, dataset):
+ """Create HTML table, used for displaying peek"""
+ out = ['<table cellspacing="0" cellpadding="3">',]
+ try:
+ # Generate column header
+ pk = dataset.peek
+ p = pk.split('\n')
+ for i,row in enumerate(p):
+ lrow = row.strip().split('\t')
+ if i == 0:
+ orow = ['<th>%s</th>' % escape(x) for x in lrow]
+ orow.insert(0,'<tr>')
+ orow.append('</tr>')
+ else:
+ orow = ['<td>%s</td>' % escape(x) for x in lrow]
+ orow.insert(0,'<tr>')
+ orow.append('</tr>')
+ out.append(''.join(orow))
+ out.append( '</table>' )
+ out = "\n".join( out )
+ except Exception, exc:
+ out = "Can't create peek %s" % str( exc )
+ return out
+
+ def display_peek( self, dataset ):
+ """Returns formatted html of peek"""
+ if not dataset.peek:
+ dataset.set_peek()
+ return self.make_html_table( dataset )
+
+ def get_mime(self):
+ """Returns the mime type of the datatype"""
+ return 'application/gzip'
+
+ def sniff(self):
+ """ can we be bothered looking for the signature or loading via rpy?
+ """
+ return true
+
+class AffyBatch( RexpBase ):
+ """derived class for BioC data structures in Galaxy """
+ file_ext = "affybatch"
+
+
+class ESet( RexpBase ):
+ """derived class for BioC data structures in Galaxy """
+ file_ext = "eset"
+
+
+class MAList( RexpBase ):
+ """derived class for BioC data structures in Galaxy """
+ file_ext = "malist"
if __name__ == '__main__':
1
0
09 Jun '09
details: http://www.bx.psu.edu/hg/galaxy/rev/17fd27c7f286
changeset: 2438:17fd27c7f286
user: Greg Von Kuster <greg(a)bx.psu.edu>
date: Tue Jun 09 11:01:19 2009 -0400
description:
Refresh the history frame when the current history is deleted ( fixes issue # 40 ).
1 file(s) affected in this change:
lib/galaxy/web/controllers/history.py
diffs (27 lines):
diff -r e27e392806df -r 17fd27c7f286 lib/galaxy/web/controllers/history.py
--- a/lib/galaxy/web/controllers/history.py Tue Jun 09 10:28:27 2009 -0400
+++ b/lib/galaxy/web/controllers/history.py Tue Jun 09 11:01:19 2009 -0400
@@ -104,12 +104,12 @@
if histories:
if operation == "switch":
status, message = self._list_switch( trans, histories )
- # Current history changed, refresh history frame
- trans.template_context['refresh_frames'] = ['history']
elif operation == "delete":
status, message = self._list_delete( trans, histories )
elif operation == "undelete":
status, message = self._list_undelete( trans, histories )
+ # Current history may have changed, refresh history frame
+ trans.template_context['refresh_frames'] = ['history']
trans.sa_session.flush()
# Render the list view
return self.list_grid( trans, status=status, message=message, **kwargs )
@@ -189,7 +189,7 @@
# Regardless of whether it was previously deleted, we make a new
# history active
trans.new_history()
- return trans.show_ok_message( "History deleted, a new history is active" )
+ return trans.show_ok_message( "History deleted, a new history is active", refresh_frames=['history'] )
@web.expose
def rename_async( self, trans, id=None, new_name=None ):
history = model.History.get( id )
1
0
09 Jun '09
details: http://www.bx.psu.edu/hg/galaxy/rev/2c6720a9130c
changeset: 2436:2c6720a9130c
user: Greg Von Kuster <greg(a)bx.psu.edu>
date: Mon Jun 08 21:02:41 2009 -0400
description:
Functional test fixes for sniffing data formats and setting metadata.
1 file(s) affected in this change:
test/functional/test_sniffing_and_metadata_settings.py
diffs (229 lines):
diff -r 3190f7d6b572 -r 2c6720a9130c test/functional/test_sniffing_and_metadata_settings.py
--- a/test/functional/test_sniffing_and_metadata_settings.py Mon Jun 08 16:52:55 2009 -0400
+++ b/test/functional/test_sniffing_and_metadata_settings.py Mon Jun 08 21:02:41 2009 -0400
@@ -1,131 +1,122 @@
+import galaxy.model
+from galaxy.model.orm import *
from base.twilltestcase import TwillTestCase
class SniffingAndMetaDataSettings( TwillTestCase ):
-
def test_00_axt_datatype( self ):
"""Testing correctly sniffing axt data type upon upload"""
- self.login()
- self.upload_file('1.axt')
- self.verify_dataset_correctness('1.axt')
- self.check_history_for_string('1.axt format: <span class="axt">axt</span>, database: \? Info: uploaded file')
- self.check_metadata_for_string('value="1.axt" value="\?" Change data type selected value="axt" selected="yes"')
- self.delete_history_item( 1 )
+ self.logout()
+ self.login( email='test(a)bx.psu.edu' )
+ self.new_history( name='history1' )
+ global history1
+ history1 = galaxy.model.History.query().order_by( desc( galaxy.model.History.table.c.create_time ) ).first()
+ assert history1 is not None, "Problem retrieving history1 from database"
+ self.upload_file( '1.axt' )
+ self.verify_dataset_correctness( '1.axt' )
+ self.check_history_for_string( '1.axt format: <span class="axt">axt</span>, database: \? Info: uploaded file' )
+ self.check_metadata_for_string( 'value="1.axt" value="\?" Change data type selected value="axt" selected="yes"' )
def test_05_bed_datatype( self ):
"""Testing correctly sniffing bed data type upon upload"""
- self.upload_file('1.bed')
- self.verify_dataset_correctness('1.bed')
- self.check_history_for_string('1.bed format: <span class="bed">bed</span>, database: \? Info: uploaded file')
- self.check_metadata_for_string('value="1.bed" value="\?"')
- self.check_metadata_for_string('Chrom column: <option value="1" selected> Start column: <option value="2" selected>')
- self.check_metadata_for_string('End column: <option value="3" selected> Strand column <option value="6" selected>')
- self.check_metadata_for_string('Convert to new format <option value="bed">Genomic Intervals To BED <option value="gff">BED to GFF')
- self.check_metadata_for_string('Change data type selected value="bed" selected="yes"')
- self.delete_history_item( 1 )
+ self.upload_file( '1.bed' )
+ self.verify_dataset_correctness( '1.bed' )
+ self.check_history_for_string( '1.bed format: <span class="bed">bed</span>, database: \? Info: uploaded file')
+ self.check_metadata_for_string( 'value="1.bed" value="\?"' )
+ self.check_metadata_for_string( 'Chrom column: <option value="1" selected> Start column: <option value="2" selected>' )
+ self.check_metadata_for_string( 'End column: <option value="3" selected> Strand column <option value="6" selected>' )
+ self.check_metadata_for_string( 'Convert to new format <option value="bed">Genomic Intervals To BED <option value="gff">BED to GFF' )
+ self.check_metadata_for_string( 'Change data type selected value="bed" selected="yes"' )
def test_10_customtrack_datatype( self ):
"""Testing correctly sniffing customtrack data type upon upload"""
- self.upload_file('1.customtrack')
- self.verify_dataset_correctness('1.customtrack')
- self.check_history_for_string('1.customtrack format: <span class="customtrack">customtrack</span>, database: \? Info: uploaded file')
- self.check_metadata_for_string('value="1.customtrack" value="\?" Change data type selected value="customtrack" selected="yes"')
- self.delete_history_item( 1 )
+ self.upload_file( '1.customtrack' )
+ self.verify_dataset_correctness( '1.customtrack' )
+ self.check_history_for_string( '1.customtrack format: <span class="customtrack">customtrack</span>, database: \? Info: uploaded file' )
+ self.check_metadata_for_string( 'value="1.customtrack" value="\?" Change data type selected value="customtrack" selected="yes"' )
def test_15_fasta_datatype( self ):
"""Testing correctly sniffing fasta data type upon upload"""
- self.upload_file('1.fasta')
- self.verify_dataset_correctness('1.fasta')
- self.check_history_for_string('1.fasta format: <span class="fasta">fasta</span>, database: \? Info: uploaded file')
- self.check_metadata_for_string('value="1.fasta" value="\?" Change data type selected value="fasta" selected="yes"')
- self.delete_history_item( 1 )
+ self.upload_file( '1.fasta' )
+ self.verify_dataset_correctness( '1.fasta' )
+ self.check_history_for_string( '1.fasta format: <span class="fasta">fasta</span>, database: \? Info: uploaded file' )
+ self.check_metadata_for_string( 'value="1.fasta" value="\?" Change data type selected value="fasta" selected="yes"' )
def test_18_fastqsolexa_datatype( self ):
"""Testing correctly sniffing fastqsolexa ( the Solexa variant ) data type upon upload"""
- self.upload_file('1.fastqsolexa')
- self.verify_dataset_correctness('1.fastqsolexa')
- self.check_history_for_string('1.fastqsolexa format: <span class="fastqsolexa">fastqsolexa</span>, database: \? Info: uploaded fastqsolexa file')
- self.delete_history_item( 1 )
+ self.upload_file( '1.fastqsolexa' )
+ self.verify_dataset_correctness( '1.fastqsolexa' )
+ self.check_history_for_string( '1.fastqsolexa format: <span class="fastqsolexa">fastqsolexa</span>, database: \? Info: uploaded fastqsolexa file' )
def test_20_gff_datatype( self ):
"""Testing correctly sniffing gff data type upon upload"""
- self.upload_file('5.gff')
- self.verify_dataset_correctness('5.gff')
- self.check_history_for_string('5.gff format: <span class="gff">gff</span>, database: \? Info: uploaded file')
- self.check_metadata_for_string('value="5.gff" value="\?"')
- self.check_metadata_for_string('Convert to new format <option value="bed">GFF to BED')
- self.check_metadata_for_string('Change data type selected value="gff" selected="yes"')
- self.delete_history_item( 1 )
+ self.upload_file( '5.gff' )
+ self.verify_dataset_correctness( '5.gff' )
+ self.check_history_for_string( '5.gff format: <span class="gff">gff</span>, database: \? Info: uploaded file' )
+ self.check_metadata_for_string( 'value="5.gff" value="\?"' )
+ self.check_metadata_for_string( 'Convert to new format <option value="bed">GFF to BED' )
+ self.check_metadata_for_string( 'Change data type selected value="gff" selected="yes"' )
def test_25_gff3_datatype( self ):
"""Testing correctly sniffing gff3 data type upon upload"""
- self.upload_file('5.gff3')
- self.verify_dataset_correctness('5.gff3')
- self.check_history_for_string('5.gff3 format: <span class="gff3">gff3</span>, database: \? Info: uploaded file')
- self.check_metadata_for_string('value="5.gff3" value="\?"')
- self.check_metadata_for_string('Convert to new format <option value="bed">GFF to BED')
- self.check_metadata_for_string('Change data type selected value="gff3" selected="yes"')
- self.delete_history_item( 1 )
- def test_30_html_datatype( self ):
- """Testing correctly sniffing html data type upon upload"""
- self.upload_file('html_file.txt')
- self.check_history_for_string('No data: attempted to upload an empty or inappropriate file')
- self.delete_history_item( 1 )
- def test_35_interval_datatype( self ):
+ self.upload_file( '5.gff3' )
+ self.verify_dataset_correctness( '5.gff3' )
+ self.check_history_for_string( '5.gff3 format: <span class="gff3">gff3</span>, database: \? Info: uploaded file' )
+ self.check_metadata_for_string( 'value="5.gff3" value="\?"' )
+ self.check_metadata_for_string( 'Convert to new format <option value="bed">GFF to BED' )
+ self.check_metadata_for_string( 'Change data type selected value="gff3" selected="yes"' )
+ def test_30_interval_datatype( self ):
"""Testing correctly sniffing interval data type upon upload"""
- self.upload_file('1.interval')
- self.verify_dataset_correctness('1.interval')
- self.check_history_for_string('1.interval format: <span class="interval">interval</span>, database: \? Info: uploaded file')
- self.check_metadata_for_string('value="1.interval" value="\?"')
- self.check_metadata_for_string('Chrom column: <option value="1" selected> Start column: <option value="2" selected>')
- self.check_metadata_for_string('End column: <option value="3" selected> Strand column <option value="6" selected>')
- self.check_metadata_for_string('Convert to new format <option value="bed">Genomic Intervals To BED')
- self.check_metadata_for_string('Change data type selected value="interval" selected="yes"')
- self.delete_history_item( 1 )
- def test_40_lav_datatype( self ):
+ self.upload_file( '1.interval' )
+ self.verify_dataset_correctness( '1.interval' )
+ self.check_history_for_string( '1.interval format: <span class="interval">interval</span>, database: \? Info: uploaded file' )
+ self.check_metadata_for_string( 'value="1.interval" value="\?"' )
+ self.check_metadata_for_string( 'Chrom column: <option value="1" selected> Start column: <option value="2" selected>' )
+ self.check_metadata_for_string( 'End column: <option value="3" selected> Strand column <option value="6" selected>' )
+ self.check_metadata_for_string( 'Convert to new format <option value="bed">Genomic Intervals To BED' )
+ self.check_metadata_for_string( 'Change data type selected value="interval" selected="yes"' )
+ def test_35_lav_datatype( self ):
"""Testing correctly sniffing lav data type upon upload"""
- self.upload_file('1.lav')
- self.verify_dataset_correctness('1.lav')
- self.check_history_for_string('1.lav format: <span class="lav">lav</span>, database: \? Info: uploaded file')
- self.check_metadata_for_string('value="1.lav" value="\?"')
- self.check_metadata_for_string('Change data type selected value="lav" selected="yes"')
- self.delete_history_item( 1 )
- def test_45_maf_datatype( self ):
+ self.upload_file( '1.lav' )
+ self.verify_dataset_correctness( '1.lav' )
+ self.check_history_for_string( '1.lav format: <span class="lav">lav</span>, database: \? Info: uploaded file' )
+ self.check_metadata_for_string( 'value="1.lav" value="\?"' )
+ self.check_metadata_for_string( 'Change data type selected value="lav" selected="yes"' )
+ def test_40_maf_datatype( self ):
"""Testing correctly sniffing maf data type upon upload"""
- self.upload_file('3.maf')
- self.verify_dataset_correctness('3.maf')
- self.check_history_for_string('3.maf format: <span class="maf">maf</span>, database: \? Info: uploaded file')
- self.check_metadata_for_string('value="3.maf" value="\?"')
- self.check_metadata_for_string('Convert to new format <option value="interval">MAF to Genomic Intervals <option value="fasta">MAF to Fasta')
- self.check_metadata_for_string('Change data type selected value="maf" selected="yes"')
- self.delete_history_item( 1 )
- def test_50_tabular_datatype( self ):
+ self.upload_file( '3.maf' )
+ self.verify_dataset_correctness( '3.maf' )
+ self.check_history_for_string( '3.maf format: <span class="maf">maf</span>, database: \? Info: uploaded file' )
+ self.check_metadata_for_string( 'value="3.maf" value="\?"' )
+ self.check_metadata_for_string( 'Convert to new format <option value="interval">MAF to Genomic Intervals <option value="fasta">MAF to Fasta' )
+ self.check_metadata_for_string( 'Change data type selected value="maf" selected="yes"' )
+ def test_45_tabular_datatype( self ):
"""Testing correctly sniffing tabular data type upon upload"""
- self.upload_file('1.tabular')
- self.verify_dataset_correctness('1.tabular')
- self.check_history_for_string('1.tabular format: <span class="tabular">tabular</span>, database: \? Info: uploaded file')
- self.check_metadata_for_string('value="1.tabular" value="\?"')
- self.check_metadata_for_string('Change data type selected value="tabular" selected="yes"')
- self.delete_history_item( 1 )
- def test_55_wig_datatype( self ):
+ self.upload_file( '1.tabular' )
+ self.verify_dataset_correctness( '1.tabular' )
+ self.check_history_for_string( '1.tabular format: <span class="tabular">tabular</span>, database: \? Info: uploaded file' )
+ self.check_metadata_for_string( 'value="1.tabular" value="\?"' )
+ self.check_metadata_for_string( 'Change data type selected value="tabular" selected="yes"' )
+ def test_50_wig_datatype( self ):
"""Testing correctly sniffing wig data type upon upload"""
- self.upload_file('1.wig')
- self.verify_dataset_correctness('1.wig')
- self.check_history_for_string('1.wig format: <span class="wig">wig</span>, database: \? Info: uploaded file')
- self.check_metadata_for_string('value="1.wig" value="\?"')
- self.check_metadata_for_string('Change data type selected value="wig" selected="yes"')
- self.delete_history_item( 1 )
- def test_60_blastxml_datatype( self ):
+ self.upload_file( '1.wig' )
+ self.verify_dataset_correctness( '1.wig' )
+ self.check_history_for_string( '1.wig format: <span class="wig">wig</span>, database: \? Info: uploaded file' )
+ self.check_metadata_for_string( 'value="1.wig" value="\?"' )
+ self.check_metadata_for_string( 'Change data type selected value="wig" selected="yes"' )
+ def test_55_blastxml_datatype( self ):
"""Testing correctly sniffing blastxml data type upon upload"""
self.upload_file( 'megablast_xml_parser_test1.gz' )
self.check_history_for_string( 'NCBI Blast XML data' )
self.check_history_for_string( 'format: <span class="blastxml">blastxml</span>' )
- self.delete_history_item( 1 )
- def test_65_qualsolid_datatype( self ):
- """Testing correctly sniffing qualsolid data type upon upload"""
- self.upload_file( 'qualscores.qualsolid' )
- self.verify_dataset_correctness('qualscores.qualsolid')
- self.check_history_for_string('qualscores.qualsolid format: <span class="qualsolid">qualsolid</span>, database: \? Info: uploaded qualsolid file')
- self.delete_history_item( 1 )
- def test_70_qual454_datatype( self ):
- """Testing correctly sniffing qual454 data type upon upload"""
- self.upload_file( 'qualscores.qual454' )
- self.verify_dataset_correctness('qualscores.qual454')
- self.check_history_for_string('qualscores.qual454 format: <span class="qual454">qual454</span>, database: \? Info: uploaded qual454 file')
- self.delete_history_item( 1 )
+ """
+ TODO: It is currently not even possible to set the following format on upload. They
+ should be included in the File Format select list on the upload form if they are to
+ be tested here...
+ def test_60_qualsolid_datatype( self ):
+ Testing correctly sniffing qualsolid data type upon upload
+ self.upload_file( 'qualscores.qualsolid' )
+ self.verify_dataset_correctness('qualscores.qualsolid' )
+ self.check_history_for_string('2.5 Kb, format: <span class="fasta">fasta</span>, database: \? Info: uploaded file' )
+ def test_65_qual454_datatype( self ):
+ Testing correctly sniffing qual454 data type upon upload
+ self.upload_file( 'qualscores.qual454' )
+ self.verify_dataset_correctness( 'qualscores.qual454' )
+ self.check_history_for_string( '5.6 Kb, format: <span class="qual454">qual454</span>, database: \? Info: uploaded qual454 file' )
+ """
def test_9999_clean_up( self ):
- self.delete_history()
+ self.delete_history( id=str( history1.id ) )
self.logout()
1
0
09 Jun '09
details: http://www.bx.psu.edu/hg/galaxy/rev/c0c50620b89d
changeset: 2432:c0c50620b89d
user: Greg Von Kuster <greg(a)bx.psu.edu>
date: Mon Jun 08 12:35:38 2009 -0400
description:
Functional test fixes for purging a user.
2 file(s) affected in this change:
test/functional/test_history_functions.py
test/functional/test_security_and_libraries.py
diffs (215 lines):
diff -r 1a24a530a3ae -r c0c50620b89d test/functional/test_history_functions.py
--- a/test/functional/test_history_functions.py Mon Jun 08 10:31:59 2009 -0400
+++ b/test/functional/test_history_functions.py Mon Jun 08 12:35:38 2009 -0400
@@ -114,11 +114,10 @@
self.login( email=regular_user1.email )
check_str = '%s from %s' % ( history3.name, admin_user.email )
self.view_stored_active_histories( check_str=check_str )
+ # Need to delete history3_copy1
+ self.delete_history( id=str( history3_copy1.id ) )
self.logout()
self.login( email=admin_user.email )
- # Need to delete history3_copy1
- history3_copy1.deleted = True
- history3_copy1.flush()
# Test sharing a history with an invalid user
email = 'jack(a)jill.com'
check_str = '%s is not a valid Galaxy user.' % email
@@ -138,48 +137,55 @@
self.share_history( id, email, check_str )
# We need to keep track of all shared histories so they can later be deleted
history3_copy_name = "%s from %s" % ( history3.name, admin_user.email )
- history3_copies = galaxy.model.History \
+ history3_to_use_for_regular_user2 = galaxy.model.History \
.filter( and_( galaxy.model.History.table.c.name==history3_copy_name,
+ galaxy.model.History.table.c.user_id==regular_user2.id,
galaxy.model.History.table.c.deleted==False ) ) \
.order_by( desc( galaxy.model.History.table.c.create_time ) ) \
- .limit( 2 ) \
- .all()
- history3_copy2 = history3_copies[0]
- history3_copy3 = history3_copies[1]
- history4_copy_name = "%s from %s" % ( history4.name, admin_user.email )
- history4_copyies = galaxy.model.History \
- .filter( and_( galaxy.model.History.table.c.name==history4_copy_name,
+ .first()
+ assert history3_to_use_for_regular_user2 is not None, "Problem retrieving history3_to_use_for_regular_user2 from database"
+ history3_to_use_for_regular_user3 = galaxy.model.History \
+ .filter( and_( galaxy.model.History.table.c.name==history3_copy_name,
+ galaxy.model.History.table.c.user_id==regular_user3.id,
galaxy.model.History.table.c.deleted==False ) ) \
.order_by( desc( galaxy.model.History.table.c.create_time ) ) \
- .limit( 2 ) \
- .all()
- history4_copy1 = history4_copyies[0]
- history4_copy2 = history4_copyies[1]
+ .first()
+ assert history3_to_use_for_regular_user3 is not None, "Problem retrieving history3_to_use_for_regular_user3 from database"
+ history4_copy_name = "%s from %s" % ( history4.name, admin_user.email )
+ history4_to_use_for_regular_user2 = galaxy.model.History \
+ .filter( and_( galaxy.model.History.table.c.name==history4_copy_name,
+ galaxy.model.History.table.c.user_id==regular_user2.id,
+ galaxy.model.History.table.c.deleted==False ) ) \
+ .order_by( desc( galaxy.model.History.table.c.create_time ) ) \
+ .first()
+ assert history4_to_use_for_regular_user2 is not None, "Problem retrieving history4_to_use_for_regular_user2 from database"
+ history4_to_use_for_regular_user3 = galaxy.model.History \
+ .filter( and_( galaxy.model.History.table.c.name==history4_copy_name,
+ galaxy.model.History.table.c.user_id==regular_user3.id,
+ galaxy.model.History.table.c.deleted==False ) ) \
+ .order_by( desc( galaxy.model.History.table.c.create_time ) ) \
+ .first()
+ assert history4_to_use_for_regular_user3 is not None, "Problem retrieving history4_to_use_for_regular_user3 from database"
self.logout()
self.login( email=regular_user2.email )
check_str = '%s from %s' % ( history3.name, admin_user.email )
self.view_stored_active_histories( check_str=check_str )
check_str = '%s from %s' % ( history4.name, admin_user.email )
self.view_stored_active_histories( check_str=check_str )
+ # Need to delete the copied histories, so later test runs are valid
+ self.delete_history( id=str( history3_to_use_for_regular_user2.id ) )
+ self.delete_history( id=str( history4_to_use_for_regular_user2.id ) )
self.logout()
self.login( email=regular_user3.email )
check_str = '%s from %s' % ( history3.name, admin_user.email )
self.view_stored_active_histories( check_str=check_str )
check_str = '%s from %s' % ( history4.name, admin_user.email )
self.view_stored_active_histories( check_str=check_str )
+ # Need to delete the copied histories, so later test runs are valid
+ self.delete_history( id=str( history3_to_use_for_regular_user3.id ) )
+ self.delete_history( id=str( history4_to_use_for_regular_user3.id ) )
self.logout()
self.login( email=admin_user.email )
- # Need to delete the copied histories, so later test runs are valid
- history3_copy2.deleted = True
- history3_copy2.flush()
- history3_copy3.deleted = True
- history3_copy3.flush()
- history4_copy1.deleted = True
- history4_copy1.flush()
- history4_copy1.deleted = True
- history4_copy1.flush()
- history4_copy2.deleted = True
- history4_copy2.flush()
def test_030_change_permissions_on_current_history( self ):
"""Testing changing permissions on the current history"""
global history5
@@ -222,8 +228,7 @@
self.visit_url( "%s/history/list" % self.url )
self.check_page_for_string( history5_copy1.name )
# Need to delete history5_copy1 on the history list page for regular_user1
- history5_copy1.deleted = True
- history5_copy1.flush()
+ self.delete_history( id=str( history5_copy1.id ) )
self.logout()
self.login( email=admin_user.email )
def test_040_sharing_history_by_making_new_sharing_role( self ):
@@ -278,8 +283,7 @@
# Make sure 2.bed is accessible since it is associated with a sharing role
self.display_history_item( str( hda_2_bed.id ), check_str='chr1' )
# Need to delete history5_copy2 on the history list page for regular_user1
- history5_copy2.deleted = True
- history5_copy2.flush()
+ self.delete_history( id=str( history5_copy2.id ) )
def test_045_sharing_private_history_with_multiple_users_by_changing_no_permissions( self ):
"""Testing sharing a restricted history with multiple users, making no permission changes"""
self.logout()
@@ -301,47 +305,44 @@
action_check_str=action_check_str )
# We need to keep track of all shared histories so they can later be deleted
history5_copy_name = "%s from %s" % ( history5.name, admin_user.email )
- history5_copies = galaxy.model.History \
+ history5_to_use_for_regular_user1 = galaxy.model.History \
.filter( and_( galaxy.model.History.table.c.name==history5_copy_name,
+ galaxy.model.History.table.c.user_id==regular_user1.id,
galaxy.model.History.table.c.deleted==False ) ) \
.order_by( desc( galaxy.model.History.table.c.create_time ) ) \
- .limit( 2 ) \
- .all()
- history5_copy3 = history5_copies[0]
- assert history5_copy3 is not None, "Problem retrieving history5_copy3 from database"
- history5_copy4 = history5_copies[1]
- assert history5_copy4 is not None, "Problem retrieving history5_copy4 from database"
- # Make sure test1(a)bx.psu.edu received a copy of history5 with both datasets accessible
- if history5_copy3.user_id == regular_user1.id:
- history_to_use_for_regular_user_1 = history5_copy3
- history_to_use_for_regular_user_2 = history5_copy4
- elif history5_copy4.user_id == regular_user1.id:
- history_to_use_for_regular_user_1 = history5_copy4
- history_to_use_for_regular_user_2 = history5_copy3
- else:
- raise AssertionError, "Copies of history5 were not correctly associated with users"
+ .first()
+ assert history5_to_use_for_regular_user1 is not None, "Problem retrieving history5_to_use_for_regular_user1 from database"
+ history5_to_use_for_regular_user2 = galaxy.model.History \
+ .filter( and_( galaxy.model.History.table.c.name==history5_copy_name,
+ galaxy.model.History.table.c.user_id==regular_user2.id,
+ galaxy.model.History.table.c.deleted==False ) ) \
+ .order_by( desc( galaxy.model.History.table.c.create_time ) ) \
+ .first()
+ assert history5_to_use_for_regular_user2 is not None, "Problem retrieving history5_to_use_for_regular_user2 from database"
self.logout()
self.login( email=regular_user1.email )
check_str = '%s from %s' % ( history5.name, admin_user.email )
self.view_stored_active_histories( check_str=check_str )
- self.switch_history( id=str( history_to_use_for_regular_user_1.id ), name=history_to_use_for_regular_user_1.name )
+ self.switch_history( id=str( history5_to_use_for_regular_user1.id ), name=history5_to_use_for_regular_user1.name )
self.check_history_for_string( '1.bed' )
self.check_history_for_string( '2.bed' )
+ # Need to delete the copied histories, so later test runs are valid
+ self.delete_history( id=str( history5_to_use_for_regular_user1.id ) )
self.logout()
# Make sure test2(a)bx.psu.edu received a copy of history5, with only 1.bed accessible
self.login( email=regular_user2.email )
self.view_stored_active_histories( check_str=check_str )
- self.switch_history( id=str( history_to_use_for_regular_user_2.id ), name=history_to_use_for_regular_user_2.name )
+ self.switch_history( id=str( history5_to_use_for_regular_user2.id ), name=history5_to_use_for_regular_user2.name )
self.check_history_for_string( '1.bed' )
self.check_history_for_string( '2.bed' )
# Get both new hdas from the db that were created for the shared history
hda_1_bed = galaxy.model.HistoryDatasetAssociation \
- .filter( and_( galaxy.model.HistoryDatasetAssociation.table.c.history_id==history5_copy4.id,
+ .filter( and_( galaxy.model.HistoryDatasetAssociation.table.c.history_id==history5_to_use_for_regular_user1.id,
galaxy.model.HistoryDatasetAssociation.table.c.name=='1.bed' ) ) \
.first()
assert hda_1_bed is not None, "Problem retrieving hda_1_bed from database"
hda_2_bed = galaxy.model.HistoryDatasetAssociation \
- .filter( and_( galaxy.model.HistoryDatasetAssociation.table.c.history_id==history5_copy4.id,
+ .filter( and_( galaxy.model.HistoryDatasetAssociation.table.c.history_id==history5_to_use_for_regular_user1.id,
galaxy.model.HistoryDatasetAssociation.table.c.name=='2.bed' ) ) \
.first()
assert hda_2_bed is not None, "Problem retrieving hda_2_bed from database"
@@ -355,10 +356,7 @@
pass
self.check_history_for_string( 'You do not have permission to view this dataset' )
# Need to delete the copied histories, so later test runs are valid
- history5_copy3.deleted = True
- history5_copy3.flush()
- history5_copy4.deleted = True
- history5_copy4.flush()
+ self.delete_history( id=str( history5_to_use_for_regular_user2.id ) )
def test_050_sharing_private_history_by_choosing_to_not_share( self ):
"""Testing sharing a restricted history with multiple users by choosing not to share"""
self.logout()
diff -r 1a24a530a3ae -r c0c50620b89d test/functional/test_security_and_libraries.py
--- a/test/functional/test_security_and_libraries.py Mon Jun 08 10:31:59 2009 -0400
+++ b/test/functional/test_security_and_libraries.py Mon Jun 08 12:35:38 2009 -0400
@@ -1668,16 +1668,8 @@
history.refresh()
if not history.deleted:
raise AssertionError( 'User %s has active history id %d after their account was marked as purged.' % ( regular_user3.email, hda.id ) )
- # Make sure HistoryDatasetAssociation deleted
- for hda in history.datasets:
- hda.refresh()
- if not hda.deleted:
- raise AssertionError( 'HistoryDatasetAssociation id %d was not deleted.' % hda.id )
- # Make sure Dataset deleted
- d = galaxy.model.Dataset.filter( galaxy.model.Dataset.table.c.id==hda.dataset_id ).first()
- d.refresh()
- if not d.deleted:
- raise AssertionError( 'Dataset id %d was not deleted.' % d.id )
+ # NOTE: Not all hdas / datasets will be deleted at the time a history is deleted - the cleanup_datasets.py script
+ # is responsible for this.
# Make sure UserGroupAssociations deleted
if regular_user3.groups:
raise AssertionError( 'User %s has active group id %d after their account was marked as purged.' % ( regular_user3.email, uga.id ) )
1
0
09 Jun '09
details: http://www.bx.psu.edu/hg/galaxy/rev/73d8b2acef0a
changeset: 2434:73d8b2acef0a
user: James Taylor <james(a)jamestaylor.org>
date: Mon Jun 08 16:47:01 2009 -0400
description:
Allow UnvalidatedValues to be used when generating output labels (fixes issue #59)
1 file(s) affected in this change:
lib/galaxy/tools/parameters/basic.py
diffs (38 lines):
diff -r 73a8b43f1d97 -r 73d8b2acef0a lib/galaxy/tools/parameters/basic.py
--- a/lib/galaxy/tools/parameters/basic.py Mon Jun 08 12:49:26 2009 -0400
+++ b/lib/galaxy/tools/parameters/basic.py Mon Jun 08 16:47:01 2009 -0400
@@ -535,9 +535,10 @@
def from_html( self, value, trans=None, context={} ):
if self.need_late_validation( trans, context ):
if self.multiple:
- #While it is generally allowed that a select value can be '',
- #we do not allow this to be the case in a dynamically generated multiple select list being set in workflow building mode
- #we instead treat '' as 'No option Selected' (None)
+ # While it is generally allowed that a select value can be '',
+ # we do not allow this to be the case in a dynamically
+ # generated multiple select list being set in workflow building
+ # mode we instead treat '' as 'No option Selected' (None)
if value == '':
value = None
else:
@@ -565,9 +566,9 @@
if isinstance( value, list ):
if not(self.repeat):
assert self.multiple, "Multiple values provided but parameter is not expecting multiple values"
- return self.separator.join( value )
+ return self.separator.join( map( str, value ) )
else:
- return value
+ return str(value)
def value_to_basic( self, value, app ):
if isinstance( value, UnvalidatedValue ):
return { "__class__": "UnvalidatedValue", "value": value.value }
@@ -1331,6 +1332,8 @@
"""
def __init__( self, value ):
self.value = value
+ def __str__( self ):
+ return str( self.value )
class RuntimeValue( object ):
"""
1
0
09 Jun '09
details: http://www.bx.psu.edu/hg/galaxy/rev/3190f7d6b572
changeset: 2435:3190f7d6b572
user: Dan Blankenberg <dan(a)bx.psu.edu>
date: Mon Jun 08 16:52:55 2009 -0400
description:
Update the way the testing framework decides whether a refresh_on_change is required.
1 file(s) affected in this change:
test/base/twilltestcase.py
diffs (30 lines):
diff -r 73d8b2acef0a -r 3190f7d6b572 test/base/twilltestcase.py
--- a/test/base/twilltestcase.py Mon Jun 08 16:47:01 2009 -0400
+++ b/test/base/twilltestcase.py Mon Jun 08 16:52:55 2009 -0400
@@ -548,22 +548,11 @@
# Check for refresh_on_change attribute, submit a change if required
if 'refresh_on_change' in control.attrs.keys():
changed = False
- for elem in kwd[control.name]:
- # For DataToolParameter, control.value is the index of the DataToolParameter select list,
- # but elem is the filename. The following loop gets the filename of that index.
- param_text = ''
- for param in tc.show().split('<select'):
- param = ('<select' + param.split('select>')[0] + 'select>').replace('selected', 'selected="yes"')
- if param.find('on_chang') != -1 and param.find('name="%s"' % control.name) != -1:
- tree = ElementTree.fromstring(param)
- for option in tree.findall('option'):
- if option.get('value') in control.value:
- param_text = option.text.strip()
- break
- break
- if elem not in control.value and param_text.find(elem) == -1 :
+ item_labels = [ item.attrs[ 'label' ] for item in control.get_items() if item.selected ] #For DataToolParameter, control.value is the HDA id, but kwd contains the filename. This loop gets the filename/label for the selected values.
+ for value in kwd[ control.name ]:
+ if value not in control.value and True not in [ value in item_label for item_label in item_labels ]:
changed = True
- break
+ break
if changed:
# Clear Control and set to proper value
control.clear()
1
0
09 Jun '09
details: http://www.bx.psu.edu/hg/galaxy/rev/73a8b43f1d97
changeset: 2433:73a8b43f1d97
user: Dan Blankenberg <dan(a)bx.psu.edu>
date: Mon Jun 08 12:49:26 2009 -0400
description:
Allow the uploading of composite datatypes. A new grouping parameter, UploadDataset, is used to contain and process the file_data/url_paste/space_to_tab used to upload a file - multiple sets are displayed when uploading a composite datatype (similar to a repeat). Composite files can now be declared to the datatypes registry (required for proper uploading), but they are stored in the same manner as before (the extra_files_path) and should be backwards compatible. When uploading a composite datatype, only one dataset can be uploaded at a time. The ability to upload multiple datasets (url_paste (contents or urls) + file_data) for non-composite datatypes remains unchanged.
A more structured way of storing these files (rather than dumping in a directory) is worth considering.
15 file(s) affected in this change:
lib/galaxy/datatypes/data.py
lib/galaxy/datatypes/genetics.py
lib/galaxy/datatypes/registry.py
lib/galaxy/tools/__init__.py
lib/galaxy/tools/actions/upload.py
lib/galaxy/tools/parameters/__init__.py
lib/galaxy/tools/parameters/basic.py
lib/galaxy/tools/parameters/grouping.py
lib/galaxy/tools/parameters/validation.py
lib/galaxy/util/__init__.py
lib/galaxy/web/controllers/tool_runner.py
lib/galaxy/web/form_builder.py
templates/base_panels.mako
templates/tool_form.mako
tools/data_source/upload.xml
diffs (1641 lines):
diff -r c0c50620b89d -r 73a8b43f1d97 lib/galaxy/datatypes/data.py
--- a/lib/galaxy/datatypes/data.py Mon Jun 08 12:35:38 2009 -0400
+++ b/lib/galaxy/datatypes/data.py Mon Jun 08 12:49:26 2009 -0400
@@ -1,5 +1,7 @@
import logging, os, sys, time, sets, tempfile
from galaxy import util
+from galaxy.util.odict import odict
+from galaxy.util.bunch import Bunch
from cgi import escape
import metadata
from metadata import MetadataElement #import directly to maintain ease of use in Datatype class definitions
@@ -48,10 +50,16 @@
"""If False, the peek is regenerated whenever a dataset of this type is copied"""
copy_safe_peek = True
+ #Composite datatypes
+ composite_type = None
+ composite_files = odict()
+ primary_file_name = 'index'
+
def __init__(self, **kwd):
"""Initialize the datatype"""
object.__init__(self, **kwd)
self.supported_display_apps = self.supported_display_apps.copy()
+ self.composite_files = self.composite_files.copy()
def write_from_stream(self, dataset, stream):
"""Writes data from a stream"""
fd = open(dataset.file_name, 'wb')
@@ -242,7 +250,49 @@
def after_edit( self, dataset ):
"""This function is called on the dataset after metadata is edited."""
dataset.clear_associated_files( metadata_safe = True )
-
+ def __new_composite_file( self, optional = False, mimetype = None, description = None, substitute_name_with_metadata = None, **kwds ):
+ kwds[ 'optional' ] = optional
+ kwds[ 'mimetype' ] = mimetype
+ kwds[ 'description' ] = description
+ kwds[ 'substitute_name_with_metadata' ] = substitute_name_with_metadata
+ return Bunch( **kwds )
+ def add_composite_file( self, name, **kwds ):
+ #self.composite_files = self.composite_files.copy()
+ self.composite_files[ name ] = self.__new_composite_file( **kwds )
+
+
+ def __substitute_composite_key( self, key, composite_file, dataset = None ):
+ if composite_file.substitute_name_with_metadata:
+ if dataset:
+ meta_value = str( dataset.metadata.get( composite_file.substitute_name_with_metadata ) )
+ else:
+ meta_value = self.spec[composite_file.substitute_name_with_metadata].default
+ return key % meta_value
+ return key
+ @property
+ def writable_files( self, dataset = None ):
+ files = odict()
+ if self.composite_type != 'auto_primary_file':
+ files[ self.primary_file_name ] = self.__new_composite_file()
+ for key, value in self.get_composite_files( dataset = dataset ).iteritems():
+ files[ key ] = value
+ return files
+ def get_composite_files( self, dataset = None ):
+ def substitute_composite_key( key, composite_file ):
+ if composite_file.substitute_name_with_metadata:
+ if dataset:
+ meta_value = str( dataset.metadata.get( composite_file.substitute_name_with_metadata ) )
+ else:
+ meta_value = self.metadata_spec[ composite_file.substitute_name_with_metadata ].default
+ return key % meta_value
+ return key
+ files = odict()
+ for key, value in self.composite_files.iteritems():
+ files[ substitute_composite_key( key, value ) ] = value
+ return files
+ def generate_auto_primary_file( self, dataset = None ):
+ raise Exception( "generate_auto_primary_file is not implemented for this datatype." )
+
@property
def has_resolution(self):
return False
diff -r c0c50620b89d -r 73a8b43f1d97 lib/galaxy/datatypes/genetics.py
--- a/lib/galaxy/datatypes/genetics.py Mon Jun 08 12:35:38 2009 -0400
+++ b/lib/galaxy/datatypes/genetics.py Mon Jun 08 12:49:26 2009 -0400
@@ -117,15 +117,26 @@
"""class to use for rgenetics"""
"""Add metadata elements"""
- MetadataElement( name="base_name", desc="base name for all transformed versions of this genetic dataset", readonly=True)
+ MetadataElement( name="base_name", desc="base name for all transformed versions of this genetic dataset", default="galaxy", readonly=True)
file_ext="html"
-
+ composite_type = 'auto_primary_file'
+
def missing_meta( self, dataset ):
"""Checks for empty meta values"""
for key, value in dataset.metadata.items():
if not value:
return True
return False
+
+ def generate_primary_file( self, dataset = None ):
+ rval = ['<html><head><title>Files for Composite Dataset (%s)</title></head><p/>This composite dataset is composed of the following files:<p/><ul>' % ( self.file_ext ) ]
+ for composite_name, composite_file in self.get_composite_files( dataset = dataset ).iteritems():
+ opt_text = ''
+ if composite_file.optional:
+ opt_text = ' (optional)'
+ rval.append( '<li><a href="%s">%s</a>%s' % ( composite_name, composite_name, opt_text ) )
+ rval.append( '</ul></html>' )
+ return "\n".join( rval )
class SNPMatrix(Rgenetics):
"""fake class to distinguish different species of Rgenetics data collections
@@ -148,6 +159,12 @@
"""fake class to distinguish different species of Rgenetics data collections
"""
file_ext="lped"
+
+ def __init__( self, **kwd ):
+ Rgenetics.__init__( self, **kwd )
+ self.add_composite_file( '%s.ped', description = 'Pedigree File', substitute_name_with_metadata = 'base_name' )
+ self.add_composite_file( '%s.map', description = 'Map File', substitute_name_with_metadata = 'base_name' )
+
class Pphe(Rgenetics):
"""fake class to distinguish different species of Rgenetics data collections
@@ -180,11 +197,33 @@
"""fake class to distinguish different species of Rgenetics data collections
"""
file_ext="pbed"
+
+ def __init__( self, **kwd ):
+ Rgenetics.__init__( self, **kwd )
+ self.add_composite_file( '%s.bim', substitute_name_with_metadata = 'base_name' )
+ self.add_composite_file( '%s.bed', substitute_name_with_metadata = 'base_name' )
+ self.add_composite_file( '%s.fam', substitute_name_with_metadata = 'base_name' )
+ self.add_composite_file( '%s.map', substitute_name_with_metadata = 'base_name' )
+
class Eigenstratgeno(Rgenetics):
"""fake class to distinguish different species of Rgenetics data collections
"""
file_ext="eigenstratgeno"
+
+ def __init__( self, **kwd ):
+ Rgenetics.__init__( self, **kwd )
+ self.add_composite_file( '%s.eigenstratgeno', substitute_name_with_metadata = 'base_name' )
+ self.add_composite_file( '%s.ind', substitute_name_with_metadata = 'base_name' )
+ self.add_composite_file( '%s.map', substitute_name_with_metadata = 'base_name' )
+ self.add_composite_file( '%s_fo.eigenstratgeno', substitute_name_with_metadata = 'base_name', optional = 'True' )
+ self.add_composite_file( '%s_fo.ind', substitute_name_with_metadata = 'base_name', optional = 'True' )
+ self.add_composite_file( '%s_fo.map', substitute_name_with_metadata = 'base_name', optional = 'True' )
+ self.add_composite_file( '%s_oo.eigenstratgeno', substitute_name_with_metadata = 'base_name', optional = 'True' )
+ self.add_composite_file( '%s_oo.ind', substitute_name_with_metadata = 'base_name', optional = 'True' )
+ self.add_composite_file( '%s_oo.map', substitute_name_with_metadata = 'base_name', optional = 'True' )
+
+
class Eigenstratpca(Rgenetics):
"""fake class to distinguish different species of Rgenetics data collections
diff -r c0c50620b89d -r 73a8b43f1d97 lib/galaxy/datatypes/registry.py
--- a/lib/galaxy/datatypes/registry.py Mon Jun 08 12:35:38 2009 -0400
+++ b/lib/galaxy/datatypes/registry.py Mon Jun 08 12:49:26 2009 -0400
@@ -67,6 +67,15 @@
indexer_config = indexer.get( 'file', None )
if indexer_config:
self.indexers.append( (indexer_config, extension) )
+ for composite_file in elem.findall( 'composite_file' ):
+ # add composite files
+ name = composite_file.get( 'name', None )
+ if name is None:
+ log.warning( "You must provide a name for your composite_file (%s)." % composite_file )
+ optional = composite_file.get( 'optional', False )
+ mimetype = composite_file.get( 'mimetype', None )
+ self.datatypes_by_extension[extension].add_composite_file( name, optional=optional, mimetype=mimetype )
+
except Exception, e:
self.log.warning( 'Error loading datatype "%s", problem: %s' % ( extension, str( e ) ) )
# Load datatype sniffers from the config
@@ -294,3 +303,7 @@
ret_data = None
return ( convert_ext, ret_data )
return ( None, None )
+
+ def get_composite_extensions( self ):
+ return [ ext for ( ext, d_type ) in self.datatypes_by_extension.iteritems() if d_type.composite_type is not None ]
+
diff -r c0c50620b89d -r 73a8b43f1d97 lib/galaxy/tools/__init__.py
--- a/lib/galaxy/tools/__init__.py Mon Jun 08 12:35:38 2009 -0400
+++ b/lib/galaxy/tools/__init__.py Mon Jun 08 12:49:26 2009 -0400
@@ -591,7 +591,7 @@
group = Repeat()
group.name = elem.get( "name" )
group.title = elem.get( "title" )
- group.inputs = self.parse_input_elem( elem, enctypes, context )
+ group.inputs = self.parse_input_elem( elem, enctypes, context )
rval[group.name] = group
elif elem.tag == "conditional":
group = Conditional()
@@ -609,6 +609,16 @@
case.inputs = self.parse_input_elem( case_elem, enctypes, context )
group.cases.append( case )
rval[group.name] = group
+ elif elem.tag == "upload_dataset":
+ group = UploadDataset()
+ group.name = elem.get( "name" )
+ group.title = elem.get( "title" )
+ group.file_type_name = elem.get( 'file_type_name', group.file_type_name )
+ group.default_file_type = elem.get( 'default_file_type', group.default_file_type )
+ rval[ group.file_type_name ].refresh_on_change = True
+ rval[ group.file_type_name ].refresh_on_change_values = self.app.datatypes_registry.get_composite_extensions()
+ group.inputs = self.parse_input_elem( elem, enctypes, context )
+ rval[ group.name ] = group
elif elem.tag == "param":
param = self.parse_param_elem( elem, enctypes, context )
rval[param.name] = param
@@ -951,6 +961,56 @@
group_state['__current_case__'] = current_case
# Store the value of the test element
group_state[ input.test_param.name ] = value
+ elif isinstance( input, UploadDataset ):
+ group_state = state[input.name]
+ group_errors = []
+ group_old_errors = old_errors.get( input.name, None )
+ any_group_errors = False
+ d_type = input.get_datatype( trans, context )
+ writable_files = d_type.writable_files
+ #remove extra files
+ while len( group_state ) > len( writable_files ):
+ del group_state[-1]
+ if group_old_errors:
+ del group_old_errors[-1]
+ # Update state
+ max_index = -1
+ for i, rep_state in enumerate( group_state ):
+ rep_index = rep_state['__index__']
+ max_index = max( max_index, rep_index )
+ rep_prefix = "%s_%d|" % ( key, rep_index )
+ if group_old_errors:
+ rep_old_errors = group_old_errors[i]
+ else:
+ rep_old_errors = {}
+ rep_errors = self.update_state( trans,
+ input.inputs,
+ rep_state,
+ incoming,
+ prefix=rep_prefix,
+ context=context,
+ update_only=update_only,
+ old_errors=rep_old_errors,
+ changed_dependencies=changed_dependencies,
+ item_callback=item_callback )
+ if rep_errors:
+ any_group_errors = True
+ group_errors.append( rep_errors )
+ else:
+ group_errors.append( {} )
+ #add new fileupload as needed
+ offset = 1
+ while len( writable_files ) > len( group_state ):
+ new_state = {}
+ new_state['__index__'] = max_index + offset
+ offset += 1
+ self.fill_in_new_state( trans, input.inputs, new_state, context )
+ group_state.append( new_state )
+ if any_group_errors:
+ group_errors.append( {} )
+ # Were there *any* errors for any repetition?
+ if any_group_errors:
+ errors[input.name] = group_errors
else:
if key not in incoming \
and "__force_update__" + key not in incoming \
diff -r c0c50620b89d -r 73a8b43f1d97 lib/galaxy/tools/actions/upload.py
--- a/lib/galaxy/tools/actions/upload.py Mon Jun 08 12:35:38 2009 -0400
+++ b/lib/galaxy/tools/actions/upload.py Mon Jun 08 12:49:26 2009 -0400
@@ -19,19 +19,15 @@
except:
log.exception( 'failure removing temporary file: %s' % filename )
def execute( self, tool, trans, incoming={}, set_output_hid = True ):
- data_file = incoming['file_data']
- file_type = incoming['file_type']
- dbkey = incoming['dbkey']
- url_paste = incoming['url_paste']
- is_multi_byte = False
- space_to_tab = False
- if 'space_to_tab' in incoming:
- if incoming['space_to_tab'] not in ["None", None]:
- space_to_tab = True
+ dataset_upload_inputs = []
+ for input_name, input in tool.inputs.iteritems():
+ if input.type == "upload_dataset":
+ dataset_upload_inputs.append( input )
+ assert dataset_upload_inputs, Exception( "No dataset upload groups were found." )
# Get any precreated datasets (when using asynchronous uploads)
async_datasets = []
self.precreated_datasets = []
- if incoming['async_datasets'] not in ["None", "", None]:
+ if incoming.get( 'async_datasets', None ) not in ["None", "", None]:
async_datasets = incoming['async_datasets'].split(',')
for id in async_datasets:
try:
@@ -45,8 +41,39 @@
log.error( 'Got a precreated dataset (%s) but it does not belong to current user (%s)' % ( data.id, trans.user.id ) )
else:
self.precreated_datasets.append( data )
- temp_name = ""
data_list = []
+ for dataset_upload_input in dataset_upload_inputs:
+ uploaded_datasets = dataset_upload_input.get_uploaded_datasets( trans, incoming )
+ for uploaded_dataset in uploaded_datasets:
+ precreated_dataset = self.get_precreated_dataset( uploaded_dataset.precreated_name )
+ dataset = self.add_file( trans, uploaded_dataset.primary_file, uploaded_dataset.name, uploaded_dataset.file_type, uploaded_dataset.is_multi_byte, uploaded_dataset.dbkey, space_to_tab = uploaded_dataset.space_to_tab, info = uploaded_dataset.info, precreated_dataset = precreated_dataset )
+ if uploaded_dataset.composite_files:
+ os.mkdir( dataset.extra_files_path ) #make extra files path
+ for name, value in uploaded_dataset.composite_files.iteritems():
+ #what about binary files here, need to skip converting newlines
+ if value is None and not dataset.datatype.writable_files[ name ].optional:
+ dataset.info = "A required composite data file was not provided (%s)" % name
+ dataset.state = dataset.states.ERROR
+ break
+ elif value is not None:
+ if value.space_to_tab:
+ sniff.convert_newlines_sep2tabs( value.filename )
+ else:
+ sniff.convert_newlines( value.filename )
+ shutil.move( value.filename, os.path.join( dataset.extra_files_path, name ) )
+ data_list.append( dataset )
+ #clean up extra temp names
+ uploaded_dataset.clean_up_temp_files()
+
+ #cleanup unclaimed precreated datasets:
+ for data in self.precreated_datasets:
+ log.info( 'Cleaned up unclaimed precreated dataset (%s).' % ( data.id ) )
+ data.state = data.states.ERROR
+ data.info = 'No file contents were available.'
+
+ if data_list:
+ trans.app.model.flush()
+
# Create the job object
job = trans.app.model.Job()
job.session_id = trans.get_galaxy_session().id
@@ -56,104 +83,14 @@
# For backward compatibility, some tools may not have versions yet.
job.tool_version = tool.version
except:
- job.tool_version = "1.0.0"
+ job.tool_version = "1.0.1"
job.state = trans.app.model.Job.states.UPLOAD
job.flush()
log.info( 'tool %s created job id %d' % ( tool.id, job.id ) )
trans.log_event( 'created job id %d' % job.id, tool_id=tool.id )
- if 'local_filename' in dir( data_file ):
- # Use the existing file
- file_name = data_file.filename
- file_name = file_name.split( '\\' )[-1]
- file_name = file_name.split( '/' )[-1]
- precreated_dataset = self.get_precreated_dataset( file_name )
- try:
- data_list.append( self.add_file( trans, data_file.local_filename, file_name, file_type, is_multi_byte, dbkey, space_to_tab=space_to_tab, precreated_dataset=precreated_dataset ) )
- except Exception, e:
- log.exception( 'exception in add_file using datafile.local_filename %s: %s' % ( data_file.local_filename, str( e ) ) )
- self.remove_tempfile( data_file.local_filename )
- return self.upload_empty( trans, job, "Error:", str( e ), precreated_dataset=precreated_dataset )
- elif 'filename' in dir( data_file ):
- file_name = data_file.filename
- file_name = file_name.split( '\\' )[-1]
- file_name = file_name.split( '/' )[-1]
- precreated_dataset = self.get_precreated_dataset( file_name )
- try:
- temp_name, is_multi_byte = sniff.stream_to_file( data_file.file, prefix='upload' )
- except Exception, e:
- log.exception( 'exception in sniff.stream_to_file using file %s: %s' % ( data_file.filename, str( e ) ) )
- self.remove_tempfile( temp_name )
- return self.upload_empty( trans, job, "Error:", str( e ), precreated_dataset=precreated_dataset )
- try:
- data_list.append( self.add_file( trans, temp_name, file_name, file_type, is_multi_byte, dbkey, space_to_tab=space_to_tab, precreated_dataset=precreated_dataset ) )
- except Exception, e:
- log.exception( 'exception in add_file using file temp_name %s: %s' % ( str( temp_name ), str( e ) ) )
- self.remove_tempfile( temp_name )
- return self.upload_empty( trans, job, "Error:", str( e ), precreated_dataset=precreated_dataset )
- if url_paste not in [ None, "" ]:
- if url_paste.lstrip().lower().startswith( 'http://' ) or url_paste.lstrip().lower().startswith( 'ftp://' ):
- # If we were sent a DATA_URL from an external application in a post, NAME and INFO
- # values should be in the request
- if 'NAME' in incoming and incoming[ 'NAME' ] not in [ "None", None ]:
- NAME = incoming[ 'NAME' ]
- else:
- NAME = ''
- if 'INFO' in incoming and incoming[ 'INFO' ] not in [ "None", None ]:
- INFO = incoming[ 'INFO' ]
- else:
- INFO = "uploaded url"
- url_paste = url_paste.replace( '\r', '' ).split( '\n' )
- name_set_from_line = False #if we are setting the name from the line, it needs to be the line that creates that dataset
- for line in url_paste:
- line = line.strip()
- if line:
- if not line.lower().startswith( 'http://' ) and not line.lower().startswith( 'ftp://' ):
- continue # non-url line, ignore
- if not NAME or name_set_from_line:
- NAME = line
- name_set_from_line = True
- precreated_dataset = self.get_precreated_dataset( NAME )
- try:
- temp_name, is_multi_byte = sniff.stream_to_file( urllib.urlopen( line ), prefix='url_paste' )
- except Exception, e:
- log.exception( 'exception in sniff.stream_to_file using url_paste %s: %s' % ( url_paste, str( e ) ) )
- self.remove_tempfile( temp_name )
- return self.upload_empty( trans, job, "Error:", str( e ), precreated_dataset=precreated_dataset )
- try:
- data_list.append( self.add_file( trans, temp_name, NAME, file_type, is_multi_byte, dbkey, info="uploaded url", space_to_tab=space_to_tab, precreated_dataset=precreated_dataset ) )
- except Exception, e:
- log.exception( 'exception in add_file using url_paste temp_name %s: %s' % ( str( temp_name ), str( e ) ) )
- self.remove_tempfile( temp_name )
- return self.upload_empty( trans, job, "Error:", str( e ), precreated_dataset=precreated_dataset )
- else:
- precreated_dataset = self.get_precreated_dataset( 'Pasted Entry' )
- is_valid = False
- for line in url_paste:
- line = line.rstrip( '\r\n' )
- if line:
- is_valid = True
- break
- if is_valid:
- try:
- temp_name, is_multi_byte = sniff.stream_to_file( StringIO.StringIO( url_paste ), prefix='strio_url_paste' )
- except Exception, e:
- log.exception( 'exception in sniff.stream_to_file using StringIO.StringIO( url_paste ) %s: %s' % ( url_paste, str( e ) ) )
- self.remove_tempfile( temp_name )
- return self.upload_empty( trans, job, "Error:", str( e ), precreated_dataset=precreated_dataset )
- try:
- data_list.append( self.add_file( trans, temp_name, 'Pasted Entry', file_type, is_multi_byte, dbkey, info="pasted entry", space_to_tab=space_to_tab, precreated_dataset=precreated_dataset ) )
- except Exception, e:
- log.exception( 'exception in add_file using StringIO.StringIO( url_paste ) temp_name %s: %s' % ( str( temp_name ), str( e ) ) )
- self.remove_tempfile( temp_name )
- return self.upload_empty( trans, job, "Error:", str( e ), precreated_dataset=precreated_dataset )
- else:
- return self.upload_empty( trans, job, "No data error:", "you pasted no data.", precreated_dataset=precreated_dataset )
- if self.empty:
- return self.upload_empty( trans, job, "Empty file error:", "you attempted to upload an empty file." )
- elif len( data_list ) < 1:
- return self.upload_empty( trans, job, "No data error:", "either you pasted no data, the url you specified is invalid, or you have not specified a file." )
+
#if we could make a 'real' job here, then metadata could be set before job.finish() is called
- hda = data_list[0] #only our first hda is being added as input for the job, why?
+ hda = data_list[0] #only our first hda is being added as output for the job, why?
job.state = trans.app.model.Job.states.OK
file_size_str = datatypes.data.nice_size( hda.dataset.file_size )
job.info = "%s, size: %s" % ( hda.info, file_size_str )
@@ -162,7 +99,7 @@
log.info( 'job id %d ended ok, file size: %s' % ( job.id, file_size_str ) )
trans.log_event( 'job id %d ended ok, file size: %s' % ( job.id, file_size_str ), tool_id=tool.id )
return dict( output=hda )
-
+
def upload_empty(self, trans, job, err_code, err_msg, precreated_dataset = None):
if precreated_dataset is not None:
data = precreated_dataset
@@ -188,7 +125,7 @@
trans.log_event( 'job id %d ended with errors, err_msg: %s' % ( job.id, err_msg ), tool_id=job.tool_id )
return dict( output=data )
- def add_file( self, trans, temp_name, file_name, file_type, is_multi_byte, dbkey, info=None, space_to_tab=False, precreated_dataset=None ):
+ def add_file( self, trans, temp_name, file_name, file_type, is_multi_byte, dbkey, info=None, space_to_tab=False, precreated_dataset=None ):
data_type = None
# See if we have an empty file
if not os.path.getsize( temp_name ) > 0:
@@ -254,7 +191,7 @@
data_type = 'binary'
if not data_type:
# We must have a text file
- if self.check_html( temp_name ):
+ if trans.app.datatypes_registry.get_datatype_by_extension( file_type ).composite_type != 'auto_primary_file' and self.check_html( temp_name ):
raise BadFileException( "you attempted to upload an inappropriate file." )
if data_type != 'binary' and data_type != 'zip':
if space_to_tab:
@@ -404,7 +341,7 @@
return self.precreated_datasets.pop( names.index( name ) )
else:
return None
-
+
class BadFileException( Exception ):
pass
diff -r c0c50620b89d -r 73a8b43f1d97 lib/galaxy/tools/parameters/__init__.py
--- a/lib/galaxy/tools/parameters/__init__.py Mon Jun 08 12:35:38 2009 -0400
+++ b/lib/galaxy/tools/parameters/__init__.py Mon Jun 08 12:49:26 2009 -0400
@@ -20,7 +20,7 @@
be nice to unify all the places that recursively visit inputs.
"""
for input in inputs.itervalues():
- if isinstance( input, Repeat ):
+ if isinstance( input, Repeat ) or isinstance( input, UploadDataset ):
for i, d in enumerate( input_values[ input.name ] ):
index = d['__index__']
new_name_prefix = name_prefix + "%s_%d|" % ( input.name, index )
diff -r c0c50620b89d -r 73a8b43f1d97 lib/galaxy/tools/parameters/basic.py
--- a/lib/galaxy/tools/parameters/basic.py Mon Jun 08 12:35:38 2009 -0400
+++ b/lib/galaxy/tools/parameters/basic.py Mon Jun 08 12:49:26 2009 -0400
@@ -23,6 +23,7 @@
def __init__( self, tool, param, context=None ):
self.tool = tool
self.refresh_on_change = False
+ self.refresh_on_change_values = []
self.name = param.get("name")
self.type = param.get("type")
self.label = util.xml_text(param, "label")
@@ -301,7 +302,7 @@
self.name = elem.get( 'name' )
self.ajax = str_bool( elem.get( 'ajax-upload' ) )
def get_html_field( self, trans=None, value=None, other_values={} ):
- return form_builder.FileField( self.name, self.ajax )
+ return form_builder.FileField( self.name, ajax = self.ajax, value = value )
def from_html( self, value, trans=None, other_values={} ):
# Middleware or proxies may encode files in special ways (TODO: this
# should be pluggable)
@@ -325,10 +326,11 @@
"""
return "multipart/form-data"
def to_string( self, value, app ):
- if value is None:
+ if value in [ None, '' ]:
return None
- else:
- raise Exception( "FileToolParameter cannot be persisted" )
+ elif isinstance( value, unicode ) or isinstance( value, str ):
+ return value
+ raise Exception( "FileToolParameter cannot be persisted" )
def to_python( self, value, app ):
if value is None:
return None
@@ -401,13 +403,13 @@
>>> print p.name
blah
>>> print p.get_html()
- <select name="blah">
+ <select name="blah" last_selected_value="y">
<option value="x">I am X</option>
<option value="y" selected>I am Y</option>
<option value="z">I am Z</option>
</select>
>>> print p.get_html( value="z" )
- <select name="blah">
+ <select name="blah" last_selected_value="z">
<option value="x">I am X</option>
<option value="y">I am Y</option>
<option value="z" selected>I am Z</option>
@@ -426,13 +428,13 @@
>>> print p.name
blah
>>> print p.get_html()
- <select name="blah" multiple>
+ <select name="blah" multiple last_selected_value="z">
<option value="x">I am X</option>
<option value="y" selected>I am Y</option>
<option value="z" selected>I am Z</option>
</select>
>>> print p.get_html( value=["x","y"])
- <select name="blah" multiple>
+ <select name="blah" multiple last_selected_value="y">
<option value="x" selected>I am X</option>
<option value="y" selected>I am Y</option>
<option value="z">I am Z</option>
@@ -520,7 +522,7 @@
return form_builder.TextField( self.name, value=(value or "") )
if value is not None:
if not isinstance( value, list ): value = [ value ]
- field = form_builder.SelectField( self.name, self.multiple, self.display, self.refresh_on_change )
+ field = form_builder.SelectField( self.name, self.multiple, self.display, self.refresh_on_change, refresh_on_change_values = self.refresh_on_change_values )
options = self.get_options( trans, context )
for text, optval, selected in options:
if isinstance( optval, UnvalidatedValue ):
@@ -676,7 +678,7 @@
>>> # hg17 should be selected by default
>>> print p.get_html( trans ) # doctest: +ELLIPSIS
- <select name="blah">
+ <select name="blah" last_selected_value="hg17">
<option value="?">unspecified (?)</option>
...
<option value="hg18">Human Mar. 2006 (hg18)</option>
@@ -687,7 +689,7 @@
>>> # If the user selected something else already, that should be used
>>> # instead
>>> print p.get_html( trans, value='hg18' ) # doctest: +ELLIPSIS
- <select name="blah">
+ <select name="blah" last_selected_value="hg18">
<option value="?">unspecified (?)</option>
...
<option value="hg18" selected>Human Mar. 2006 (hg18)</option>
@@ -942,7 +944,7 @@
return form_builder.TextArea( self.name, value=value )
else:
return form_builder.TextField( self.name, value=(value or "") )
- return form_builder.DrillDownField( self.name, self.multiple, self.display, self.refresh_on_change, self.get_options( trans, value, other_values ), value )
+ return form_builder.DrillDownField( self.name, self.multiple, self.display, self.refresh_on_change, self.get_options( trans, value, other_values ), value, refresh_on_change_values = self.refresh_on_change_values )
def from_html( self, value, trans=None, other_values={} ):
if self.need_late_validation( trans, other_values ):
@@ -1108,7 +1110,7 @@
if value is not None:
if type( value ) != list:
value = [ value ]
- field = form_builder.SelectField( self.name, self.multiple, None, self.refresh_on_change )
+ field = form_builder.SelectField( self.name, self.multiple, None, self.refresh_on_change, refresh_on_change_values = self.refresh_on_change_values )
# CRUCIAL: the dataset_collector function needs to be local to DataToolParameter.get_html_field()
def dataset_collector( hdas, parent_hid ):
for i, hda in enumerate( hdas ):
diff -r c0c50620b89d -r 73a8b43f1d97 lib/galaxy/tools/parameters/grouping.py
--- a/lib/galaxy/tools/parameters/grouping.py Mon Jun 08 12:35:38 2009 -0400
+++ b/lib/galaxy/tools/parameters/grouping.py Mon Jun 08 12:49:26 2009 -0400
@@ -4,6 +4,14 @@
from basic import ToolParameter
from galaxy.util.expressions import ExpressionContext
+
+import logging
+log = logging.getLogger( __name__ )
+
+import StringIO, os, urllib
+from galaxy.datatypes import sniff
+from galaxy.util.bunch import Bunch
+from galaxy.util.odict import odict
class Group( object ):
def __init__( self ):
@@ -74,7 +82,322 @@
input.visit_inputs( new_prefix, d[input.name], callback )
def get_initial_value( self, trans, context ):
return []
+
+class UploadDataset( Group ):
+ type = "upload_dataset"
+ def __init__( self ):
+ Group.__init__( self )
+ self.title = None
+ self.inputs = None
+ self.file_type_name = 'file_type'
+ self.default_file_type = 'txt'
+ self.file_type_to_ext = { 'auto':self.default_file_type }
+ def get_file_type( self, context ):
+ return context.get( self.file_type_name, self.default_file_type )
+ def get_datatype_ext( self, trans, context ):
+ ext = self.get_file_type( context )
+ if ext in self.file_type_to_ext:
+ ext = self.file_type_to_ext[ext] #when using autodetect, we will use composite info from 'text', i.e. only the main file
+ return ext
+ def get_datatype( self, trans, context ):
+ ext = self.get_datatype_ext( trans, context )
+ return trans.app.datatypes_registry.get_datatype_by_extension( ext )
+ @property
+ def title_plural( self ):
+ if self.title.endswith( "s" ):
+ return self.title
+ else:
+ return self.title + "s"
+ def group_title( self, context ):
+ return "%s (%s)" % ( self.title, context.get( self.file_type_name, self.default_file_type ) )
+ def title_by_index( self, trans, index, context ):
+ d_type = self.get_datatype( trans, context )
+ for i, ( composite_name, composite_file ) in enumerate( d_type.writable_files.iteritems() ):
+ if i == index:
+ rval = composite_name
+ if composite_file.description:
+ rval = "%s (%s)" % ( rval, composite_file.description )
+ if composite_file.optional:
+ rval = "%s [optional]" % rval
+ return rval
+ return None
+ def value_to_basic( self, value, app ):
+ rval = []
+ for d in value:
+ rval_dict = {}
+ # Propogate __index__
+ if '__index__' in d:
+ rval_dict['__index__'] = d['__index__']
+ for input in self.inputs.itervalues():
+ rval_dict[ input.name ] = input.value_to_basic( d[input.name], app )
+ rval.append( rval_dict )
+ return rval
+ def value_from_basic( self, value, app, ignore_errors=False ):
+ rval = []
+ for i, d in enumerate( value ):
+ rval_dict = {}
+ # If the special __index__ key is not set, create it (for backward
+ # compatibility)
+ rval_dict['__index__'] = d.get( '__index__', i )
+ # Restore child inputs
+ for input in self.inputs.itervalues():
+ if ignore_errors and input.name not in d: #this wasn't tested
+ rval_dict[ input.name ] = input.get_initial_value( None, d )
+ else:
+ rval_dict[ input.name ] = input.value_from_basic( d[input.name], app, ignore_errors )
+ rval.append( rval_dict )
+ return rval
+ def visit_inputs( self, prefix, value, callback ):
+ for i, d in enumerate( value ):
+ for input in self.inputs.itervalues():
+ new_prefix = prefix + "%s_%d|" % ( self.name, i )
+ if isinstance( input, ToolParameter ):
+ callback( new_prefix, input, d[input.name], parent = d )
+ else:
+ input.visit_inputs( new_prefix, d[input.name], callback )
+ def get_initial_value( self, trans, context ):
+ d_type = self.get_datatype( trans, context )
+ rval = []
+ for i, ( composite_name, composite_file ) in enumerate( d_type.writable_files.iteritems() ):
+ rval_dict = {}
+ rval_dict['__index__'] = i # create __index__
+ for input in self.inputs.itervalues():
+ rval_dict[ input.name ] = input.get_initial_value( trans, context ) #input.value_to_basic( d[input.name], app )
+ rval.append( rval_dict )
+ return rval
+ def get_uploaded_datasets( self, trans, context, override_name = None, override_info = None ):
+ def get_data_file_filename( data_file, is_multi_byte = False, override_name = None, override_info = None ):
+ dataset_name = override_name
+ dataset_info = override_info
+ def get_file_name( file_name ):
+ file_name = file_name.split( '\\' )[-1]
+ file_name = file_name.split( '/' )[-1]
+ return file_name
+ if 'local_filename' in dir( data_file ):
+ # Use the existing file
+ return data_file.local_filename, get_file_name( data_file.filename ), is_multi_byte
+ elif 'filename' in dir( data_file ):
+ #create a new tempfile
+ try:
+ temp_name, is_multi_byte = sniff.stream_to_file( data_file.file, prefix='upload' )
+ precreated_name = get_file_name( data_file.filename )
+ if not dataset_name:
+ dataset_name = precreated_name
+ if not dataset_info:
+ dataset_info = 'uploaded file'
+ return temp_name, get_file_name( data_file.filename ), is_multi_byte, dataset_name, dataset_info
+ except Exception, e:
+ log.exception( 'exception in sniff.stream_to_file using file %s: %s' % ( data_file.filename, str( e ) ) )
+ self.remove_temp_file( temp_name )
+ return None, None, is_multi_byte, None, None
+ def filenames_from_url_paste( url_paste, group_incoming, override_name = None, override_info = None ):
+ filenames = []
+ if url_paste not in [ None, "" ]:
+ if url_paste.lstrip().lower().startswith( 'http://' ) or url_paste.lstrip().lower().startswith( 'ftp://' ):
+ url_paste = url_paste.replace( '\r', '' ).split( '\n' )
+ for line in url_paste:
+ line = line.strip()
+ if line:
+ if not line.lower().startswith( 'http://' ) and not line.lower().startswith( 'ftp://' ):
+ continue # non-url line, ignore
+ precreated_name = line
+ dataset_name = override_name
+ if not dataset_name:
+ dataset_name = line
+ dataset_info = override_info
+ if not dataset_info:
+ dataset_info = 'uploaded url'
+ try:
+ temp_name, is_multi_byte = sniff.stream_to_file( urllib.urlopen( line ), prefix='url_paste' )
+ except Exception, e:
+ temp_name = None
+ precreated_name = str( e )
+ log.exception( 'exception in sniff.stream_to_file using url_paste %s: %s' % ( url_paste, str( e ) ) )
+ try:
+ self.remove_temp_file( temp_name )
+ except:
+ pass
+ yield ( temp_name, precreated_name, is_multi_byte, dataset_name, dataset_info )
+ #yield ( None, str( e ), False, dataset_name, dataset_info )
+ else:
+ dataset_name = dataset_info = precreated_name = 'Pasted Entry' #we need to differentiate between various url pastes here
+ if override_name:
+ dataset_name = override_name
+ if override_info:
+ dataset_info = override_info
+ is_valid = False
+ for line in url_paste: #Trim off empty lines from begining
+ line = line.rstrip( '\r\n' )
+ if line:
+ is_valid = True
+ break
+ if is_valid:
+ try:
+ temp_name, is_multi_byte = sniff.stream_to_file( StringIO.StringIO( url_paste ), prefix='strio_url_paste' )
+ except Exception, e:
+ log.exception( 'exception in sniff.stream_to_file using StringIO.StringIO( url_paste ) %s: %s' % ( url_paste, str( e ) ) )
+ temp_name = None
+ precreated_name = str( e )
+ try:
+ self.remove_temp_file( temp_name )
+ except:
+ pass
+ yield ( temp_name, precreated_name, is_multi_byte, dataset_name, dataset_info )
+ #yield ( None, str( e ), False, dataset_name, dataset_info )
+ def get_one_filename( context ):
+ data_file = context['file_data']
+ url_paste = context['url_paste']
+ name = context.get( 'NAME', None )
+ info = context.get( 'INFO', None )
+ warnings = []
+ is_multi_byte = False
+ space_to_tab = False
+ if context.get( 'space_to_tab', None ) not in ["None", None]:
+ space_to_tab = True
+ temp_name, precreated_name, is_multi_byte, dataset_name, dataset_info = get_data_file_filename( data_file, is_multi_byte = is_multi_byte, override_name = name, override_info = info )
+ if temp_name:
+ if url_paste.strip():
+ warnings.append( "All file contents specified in the paste box were ignored." )
+ else: #we need to use url_paste
+ #file_names = filenames_from_url_paste( url_paste, context, override_name = name, override_info = info )
+ for temp_name, precreated_name, is_multi_byte, dataset_name, dataset_info in filenames_from_url_paste( url_paste, context, override_name = name, override_info = info ):#file_names:
+ if temp_name:
+ break
+ ###this check will cause an additional file to be retrieved and created...so lets not do that
+ #try: #check to see if additional paste contents were available
+ # file_names.next()
+ # warnings.append( "Additional file contents were specified in the paste box, but ignored." )
+ #except StopIteration:
+ # pass
+ return temp_name, precreated_name, is_multi_byte, space_to_tab, dataset_name, dataset_info, warnings
+
+ def get_filenames( context ):
+ rval = []
+ data_file = context['file_data']
+ url_paste = context['url_paste']
+ name = context.get( 'NAME', None )
+ info = context.get( 'INFO', None )
+ warnings = []
+ is_multi_byte = False
+ space_to_tab = False
+ if context.get( 'space_to_tab', None ) not in ["None", None]:
+ space_to_tab = True
+ temp_name, precreated_name, is_multi_byte, dataset_name, dataset_info = get_data_file_filename( data_file, is_multi_byte = is_multi_byte, override_name = name, override_info = info )
+ if temp_name:
+ rval.append( ( temp_name, precreated_name, is_multi_byte, space_to_tab, dataset_name, dataset_info ) )
+ for temp_name, precreated_name, is_multi_byte, dataset_name, dataset_info in filenames_from_url_paste( url_paste, context, override_name = name, override_info = info ):
+ if temp_name:
+ rval.append( ( temp_name, precreated_name, is_multi_byte, space_to_tab, dataset_name, dataset_info ) )
+ return rval
+ class UploadedDataset( Bunch ):
+ def __init__( self, **kwd ):
+ Bunch.__init__( self, **kwd )
+ self.primary_file = None
+ self.composite_files = odict()
+ self.dbkey = None
+ self.warnings = []
+
+ self._temp_filenames = [] #store all created filenames here, delete on cleanup
+ def register_temp_file( self, filename ):
+ if isinstance( filename, list ):
+ self._temp_filenames.extend( filename )
+ else:
+ self._temp_filenames.append( filename )
+ def remove_temp_file( self, filename ):
+ try:
+ os.unlink( filename )
+ except Exception, e:
+ pass
+ #log.warning( str( e ) )
+ def clean_up_temp_files( self ):
+ for filename in self._temp_filenames:
+ self.remove_temp_file( filename )
+
+ file_type = self.get_file_type( context )
+ d_type = self.get_datatype( trans, context )
+ dbkey = context.get( 'dbkey', None )
+ writable_files = d_type.writable_files
+ writable_files_offset = 0
+ groups_incoming = [ None for filename in writable_files ]
+ for group_incoming in context.get( self.name, [] ):
+ i = int( group_incoming['__index__'] )
+ groups_incoming[ i ] = group_incoming
+
+ if d_type.composite_type is not None:
+ #handle uploading of composite datatypes
+ #Only one Dataset can be created
+
+ dataset = UploadedDataset()
+ dataset.file_type = file_type
+ dataset.datatype = d_type
+ dataset.dbkey = dbkey
+
+ temp_name = None
+ precreated_name = None
+ is_multi_byte = False
+ space_to_tab = False
+ warnings = []
+
+ dataset_name = None
+ dataset_info = None
+ if dataset.datatype.composite_type == 'auto_primary_file':
+ #replace sniff here with just creating an empty file
+ temp_name, is_multi_byte = sniff.stream_to_file( StringIO.StringIO( d_type.generate_primary_file() ), prefix='upload_auto_primary_file' )
+ precreated_name = dataset_name = 'Uploaded Composite Dataset (%s)' % ( file_type )
+ else:
+ temp_name, precreated_name, is_multi_byte, space_to_tab, dataset_name, dataset_info, warnings = get_one_filename( groups_incoming[ 0 ] )
+ writable_files_offset = 1
+ if temp_name is None:#remove this before finish, this should create an empty dataset
+ raise Exception( 'No primary dataset file was available for composite upload' )
+ dataset.primary_file = temp_name
+ dataset.is_multi_byte = is_multi_byte
+ dataset.space_to_tab = space_to_tab
+ dataset.precreated_name = precreated_name
+ dataset.name = dataset_name
+ dataset.info = dataset_info
+ dataset.warnings.extend( warnings )
+ dataset.register_temp_file( temp_name )
+
+ keys = writable_files.keys()
+ for i, group_incoming in enumerate( groups_incoming[ writable_files_offset : ] ):
+ key = keys[ i + writable_files_offset ]
+ if group_incoming is None and not writable_files[ key ].optional:
+ dataset.warnings.append( "A required composite file (%s) was not specified." % ( key ) )
+ dataset.composite_files[ key ] = None
+ else:
+ temp_name, precreated_name, is_multi_byte, space_to_tab, dataset_name, dataset_info, warnings = get_one_filename( group_incoming )
+ if temp_name:
+ dataset.composite_files[ key ] = Bunch( filename = temp_name, precreated_name = precreated_name, is_multi_byte = is_multi_byte, space_to_tab = space_to_tab, warnings = warnings, info = dataset_info, name = dataset_name )
+ dataset.register_temp_file( temp_name )
+ else:
+ dataset.composite_files[ key ] = None
+ if not writable_files[ key ].optional:
+ dataset.warnings.append( "A required composite file (%s) was not specified." % ( key ) )
+ return [ dataset ]
+ else:
+ rval = []
+ for temp_name, precreated_name, is_multi_byte, space_to_tab, dataset_name, dataset_info, in get_filenames( context[ self.name ][0] ):
+ dataset = UploadedDataset()
+ dataset.file_type = file_type
+ dataset.datatype = d_type
+ dataset.dbkey = dbkey
+ dataset.primary_file = temp_name
+ dataset.is_multi_byte = is_multi_byte
+ dataset.space_to_tab = space_to_tab
+ dataset.name = dataset_name
+ dataset.info = dataset_info
+ dataset.precreated_name = precreated_name
+ dataset.register_temp_file( temp_name )
+ rval.append( dataset )
+ return rval
+ def remove_temp_file( self, filename ):
+ try:
+ os.unlink( filename )
+ except Exception, e:
+ log.warning( str( e ) )
+
+
class Conditional( Group ):
type = "conditional"
def __init__( self ):
diff -r c0c50620b89d -r 73a8b43f1d97 lib/galaxy/tools/parameters/validation.py
--- a/lib/galaxy/tools/parameters/validation.py Mon Jun 08 12:35:38 2009 -0400
+++ b/lib/galaxy/tools/parameters/validation.py Mon Jun 08 12:49:26 2009 -0400
@@ -72,14 +72,18 @@
"""
@classmethod
def from_element( cls, param, elem ):
- return cls( elem.get( 'message' ), elem.text )
- def __init__( self, message, expression ):
+ return cls( elem.get( 'message' ), elem.text, elem.get( 'substitute_value_in_message' ) )
+ def __init__( self, message, expression, substitute_value_in_message ):
self.message = message
+ self.substitute_value_in_message = substitute_value_in_message
# Save compiled expression, code objects are thread safe (right?)
- self.expression = compile( expression, '<string>', 'eval' )
+ self.expression = compile( expression, '<string>', 'eval' )
def validate( self, value, history=None ):
if not( eval( self.expression, dict( value=value ) ) ):
- raise ValueError( self.message )
+ message = self.message
+ if self.substitute_value_in_message:
+ message = message % value
+ raise ValueError( message )
class InRangeValidator( Validator ):
"""
diff -r c0c50620b89d -r 73a8b43f1d97 lib/galaxy/util/__init__.py
--- a/lib/galaxy/util/__init__.py Mon Jun 08 12:35:38 2009 -0400
+++ b/lib/galaxy/util/__init__.py Mon Jun 08 12:49:26 2009 -0400
@@ -146,6 +146,7 @@
elif isinstance( value, list ):
return map(sanitize_text, value)
else:
+ print value
raise Exception, 'Unknown parameter type (%s)' % ( type( value ) )
class Params:
@@ -222,7 +223,7 @@
pass
if not value and not new_value:
new_value = tool.param_trans_dict[ key ][1]
- if key not in self.NEVER_SANITIZE and sanitize:
+ if sanitize and not ( key in self.NEVER_SANITIZE or True in [ key.endswith( "|%s" % nonsanitize_parameter ) for nonsanitize_parameter in self.NEVER_SANITIZE ] ): #sanitize check both ungrouped and grouped parameters by name
self.__dict__[ new_key ] = sanitize_param( new_value )
else:
self.__dict__[ new_key ] = new_value
diff -r c0c50620b89d -r 73a8b43f1d97 lib/galaxy/web/controllers/tool_runner.py
--- a/lib/galaxy/web/controllers/tool_runner.py Mon Jun 08 12:35:38 2009 -0400
+++ b/lib/galaxy/web/controllers/tool_runner.py Mon Jun 08 12:49:26 2009 -0400
@@ -3,6 +3,8 @@
"""
from galaxy.web.base.controller import *
+from galaxy.util.bunch import Bunch
+from galaxy.tools import DefaultToolState
import logging
log = logging.getLogger( __name__ )
@@ -75,32 +77,51 @@
tool = self.get_toolbox().tools_by_id.get( tool_id, None )
if not tool:
return False # bad tool_id
- params = util.Params( kwd, sanitize=tool.options.sanitize, tool=tool )
+ #params = util.Params( kwd, sanitize=tool.options.sanitize, tool=tool )
+ if "tool_state" in kwd:
+ encoded_state = util.string_to_object( kwd["tool_state"] )
+ tool_state = DefaultToolState()
+ tool_state.decode( encoded_state, tool, trans.app )
+ else:
+ tool_state = tool.new_state( trans )
+ errors = tool.update_state( trans, tool.inputs, tool_state.inputs, kwd, update_only = True )
datasets = []
- if params.file_data not in [ None, "" ]:
- name = params.file_data
- if name.count('/'):
- name = name.rsplit('/',1)[1]
- if name.count('\\'):
- name = name.rsplit('\\',1)[1]
- datasets.append( create_dataset( name, trans.history ) )
- if params.url_paste not in [ None, "" ]:
- url_paste = params.url_paste.replace( '\r', '' ).split( '\n' )
- url = False
- for line in url_paste:
- line = line.rstrip( '\r\n' ).strip()
- if not line:
- continue
- elif line.lower().startswith( 'http://' ) or line.lower().startswith( 'ftp://' ):
- url = True
- datasets.append( create_dataset( line, trans.history ) )
- else:
- if url:
- continue # non-url when we've already processed some urls
- else:
- # pasted data
- datasets.append( create_dataset( 'Pasted Entry', trans.history ) )
- break
+ dataset_upload_inputs = []
+ for input_name, input in tool.inputs.iteritems():
+ if input.type == "upload_dataset":
+ dataset_upload_inputs.append( input )
+ assert dataset_upload_inputs, Exception( "No dataset upload groups were found." )
+ for dataset_upload_input in dataset_upload_inputs:
+ d_type = dataset_upload_input.get_datatype( trans, kwd )
+
+ if d_type.composite_type is not None:
+ datasets.append( create_dataset( 'Uploaded Composite Dataset (%s)' % dataset_upload_input.get_datatype_ext( trans, kwd ), trans.history ) )
+ else:
+ params = Bunch( ** tool_state.inputs[dataset_upload_input.name][0] )
+ if params.file_data not in [ None, "" ]:
+ name = params.file_data
+ if name.count('/'):
+ name = name.rsplit('/',1)[1]
+ if name.count('\\'):
+ name = name.rsplit('\\',1)[1]
+ datasets.append( create_dataset( name, trans.history ) )
+ if params.url_paste not in [ None, "" ]:
+ url_paste = params.url_paste.replace( '\r', '' ).split( '\n' )
+ url = False
+ for line in url_paste:
+ line = line.rstrip( '\r\n' ).strip()
+ if not line:
+ continue
+ elif line.lower().startswith( 'http://' ) or line.lower().startswith( 'ftp://' ):
+ url = True
+ datasets.append( create_dataset( line, trans.history ) )
+ else:
+ if url:
+ continue # non-url when we've already processed some urls
+ else:
+ # pasted data
+ datasets.append( create_dataset( 'Pasted Entry', trans.history ) )
+ break
if datasets:
trans.model.flush()
return [ d.id for d in datasets ]
diff -r c0c50620b89d -r 73a8b43f1d97 lib/galaxy/web/form_builder.py
--- a/lib/galaxy/web/form_builder.py Mon Jun 08 12:35:38 2009 -0400
+++ b/lib/galaxy/web/form_builder.py Mon Jun 08 12:49:26 2009 -0400
@@ -78,17 +78,21 @@
>>> print FileField( "foo" ).get_html()
<input type="file" name="foo">
- >>> print FileField( "foo", True ).get_html()
+ >>> print FileField( "foo", ajax = True ).get_html()
<input type="file" name="foo" galaxy-ajax-upload="true">
"""
- def __init__( self, name, ajax=False ):
+ def __init__( self, name, value = None, ajax=False ):
self.name = name
self.ajax = ajax
+ self.value = value
def get_html( self, prefix="" ):
+ value_text = ""
+ if self.value:
+ value_text = ' value="%s"' % self.value
+ ajax_text = ""
if self.ajax:
- return '<input type="file" name="%s%s" galaxy-ajax-upload="true">' % ( prefix, self.name )
- else:
- return '<input type="file" name="%s%s">' % ( prefix, self.name )
+ ajax_text = ' galaxy-ajax-upload="true"'
+ return '<input type="file" name="%s%s"%s%s>' % ( prefix, self.name, ajax_text, value_text )
class HiddenField(BaseField):
"""
@@ -120,7 +124,7 @@
>>> t.add_option( "automatic", 3 )
>>> t.add_option( "bazooty", 4, selected=True )
>>> print t.get_html()
- <select name="bar">
+ <select name="bar" last_selected_value="4">
<option value="3">automatic</option>
<option value="4" selected>bazooty</option>
</select>
@@ -140,7 +144,7 @@
<div><input type="checkbox" name="bar" value="3">automatic</div>
<div><input type="checkbox" name="bar" value="4" checked>bazooty</div>
"""
- def __init__( self, name, multiple=None, display=None, refresh_on_change=False ):
+ def __init__( self, name, multiple=None, display=None, refresh_on_change = False, refresh_on_change_values = [] ):
self.name = name
self.multiple = multiple or False
self.options = list()
@@ -152,8 +156,11 @@
raise Exception, "Unknown display type: %s" % display
self.display = display
self.refresh_on_change = refresh_on_change
+ self.refresh_on_change_values = refresh_on_change_values
if self.refresh_on_change:
self.refresh_on_change_text = ' refresh_on_change="true"'
+ if self.refresh_on_change_values:
+ self.refresh_on_change_text = '%s refresh_on_change_values="%s"' % ( self.refresh_on_change_text, ",".join( self.refresh_on_change_values ) )
else:
self.refresh_on_change_text = ''
def add_option( self, text, value, selected = False ):
@@ -195,11 +202,17 @@
def get_html_default( self, prefix="" ):
if self.multiple: multiple = " multiple"
else: multiple = ""
- rval = [ '<select name="%s%s"%s%s>' % ( prefix, self.name, multiple, self.refresh_on_change_text ) ]
+ rval = []
+ last_selected_value = ""
for text, value, selected in self.options:
- if selected: selected_text = " selected"
+ if selected:
+ selected_text = " selected"
+ last_selected_value = value
else: selected_text = ""
rval.append( '<option value="%s"%s>%s</option>' % ( value, selected_text, text ) )
+ if last_selected_value:
+ last_selected_value = ' last_selected_value="%s"' % last_selected_value
+ rval.insert( 0, '<select name="%s%s"%s%s%s>' % ( prefix, self.name, multiple, self.refresh_on_change_text, last_selected_value ) )
rval.append( '</select>' )
return "\n".join( rval )
@@ -253,7 +266,7 @@
</li>
</ul></div>
"""
- def __init__( self, name, multiple=None, display=None, refresh_on_change=False, options = [], value = [] ):
+ def __init__( self, name, multiple=None, display=None, refresh_on_change=False, options = [], value = [], refresh_on_change_values = [] ):
self.name = name
self.multiple = multiple or False
self.options = options
@@ -270,8 +283,11 @@
raise Exception, "Unknown display type: %s" % display
self.display = display
self.refresh_on_change = refresh_on_change
+ self.refresh_on_change_values = refresh_on_change_values
if self.refresh_on_change:
self.refresh_on_change_text = ' refresh_on_change="true"'
+ if self.refresh_on_change_values:
+ self.refresh_on_change_text = '%s refresh_on_change_values="%s"' % ( self.refresh_on_change_text, ",".join( self.refresh_on_change_values ) )
else:
self.refresh_on_change_text = ''
def get_html( self, prefix="" ):
@@ -308,6 +324,7 @@
rval.append( '</ul></div>' )
return '\n'.join( rval )
+
def get_suite():
"""Get unittest suite for this module"""
import doctest, sys
diff -r c0c50620b89d -r 73a8b43f1d97 templates/base_panels.mako
--- a/templates/base_panels.mako Mon Jun 08 12:35:38 2009 -0400
+++ b/templates/base_panels.mako Mon Jun 08 12:49:26 2009 -0400
@@ -59,19 +59,19 @@
<script type="text/javascript" src="${h.url_for('/static/scripts/galaxy.panels.js')}"></script>
<script type="text/javascript">
- ensure_dd_helper();
+ ensure_dd_helper();
- %if self.has_left_panel:
+ %if self.has_left_panel:
var lp = make_left_panel( $("#left"), $("#center"), $("#left-border" ) );
force_left_panel = lp.force_panel;
%endif
- %if self.has_right_panel:
+ %if self.has_right_panel:
var rp = make_right_panel( $("#right"), $("#center"), $("#right-border" ) );
handle_minwidth_hint = rp.handle_minwidth_hint;
force_right_panel = rp.force_panel;
%endif
-
+
</script>
## Handle AJAX (actually hidden iframe) upload tool
<![if !IE]>
@@ -81,34 +81,36 @@
##$(this.contentDocument).find("input[galaxy-ajax-upload]").each( function() {
##$("iframe")[0].contentDocument.body.innerHTML = "HELLO"
##$(this.contentWindow.document).find("input[galaxy-ajax-upload]").each( function() {
- $(this).contents().find("input[galaxy-ajax-upload]").each( function() {
- var error_set = false;
- $(this).parents("form").submit( function() {
- // Make a synchronous request to create the datasets first
- var async_datasets;
- $.ajax( {
- async: false,
- type: "POST",
- url: "${h.url_for(controller='tool_runner', action='upload_async_create')}",
- data: $(this).formSerialize(),
- dataType: "json",
- success: function( d, s ) { async_datasets = d.join() }
- } );
- if (async_datasets == '') {
- if (! error_set) {
- $("iframe#galaxy_main").contents().find("body").prepend( '<div class="errormessage">No data was entered in the upload form. You may choose to upload a file, paste some data directly in the data box, or enter URL(s) to fetch from.</div><p/>' );
- error_set = true;
+ $(this).contents().find("form").each( function() {
+ if ( $(this).find("input[galaxy-ajax-upload]").length > 0 ){
+ $(this).submit( function() {
+ var error_set = false;
+ // Make a synchronous request to create the datasets first
+ var async_datasets;
+ $.ajax( {
+ async: false,
+ type: "POST",
+ url: "${h.url_for(controller='tool_runner', action='upload_async_create')}",
+ data: $(this).formSerialize(),
+ dataType: "json",
+ success: function( d, s ) { async_datasets = d.join() }
+ } );
+ if (async_datasets == '') {
+ if (! error_set) {
+ $("iframe#galaxy_main").contents().find("body").prepend( '<div class="errormessage">No data was entered in the upload form. You may choose to upload a file, paste some data directly in the data box, or enter URL(s) to fetch from.</div><p/>' );
+ error_set = true;
+ }
+ return false;
+ } else {
+ $(this).find("input[name=async_datasets]").val( async_datasets );
+ $(this).append("<input type='hidden' name='ajax_upload' value='true'>");
}
+ // iframe submit is required for nginx (otherwise the encoding is wrong)
+ $(this).ajaxSubmit( { iframe: true } );
+ $("iframe#galaxy_main").attr("src","${h.url_for(controller='tool_runner', action='upload_async_message')}");
return false;
- } else {
- $(this).find("input[name=async_datasets]").val( async_datasets );
- $(this).append("<input type='hidden' name='ajax_upload' value='true'>");
- }
- // iframe submit is required for nginx (otherwise the encoding is wrong)
- $(this).ajaxSubmit( { iframe: true } );
- $("iframe#galaxy_main").attr("src","${h.url_for(controller='tool_runner', action='upload_async_message')}");
- return false;
- });
+ });
+ }
});
});
});
@@ -120,88 +122,88 @@
<%def name="masthead()">
<div class="title" style="float: left;">
- <a target="_blank" href="${app.config.wiki_url}">
- <img border="0" src="${h.url_for('/static/images/galaxyIcon_noText.png')}" style="width: 26px; vertical-align: top;">
- </a>
- Galaxy
- %if app.config.brand:
- <span class='brand'>/${app.config.brand}</span>
- %endif
+ <a target="_blank" href="${app.config.wiki_url}">
+ <img border="0" src="${h.url_for('/static/images/galaxyIcon_noText.png')}" style="width: 26px; vertical-align: top;">
+ </a>
+ Galaxy
+ %if app.config.brand:
+ <span class='brand'>/${app.config.brand}</span>
+ %endif
</div>
<div style="position: absolute; left: 50%;">
<div class="tab-group" style="position: relative; left: -50%;">
-
- <%def name="tab( id, display, href, target='_parent', visible=True, extra_class='' )">
- <%
- cls = "tab"
- if extra_class:
- cls += " " + extra_class
- if self.active_view == id:
- cls += " active"
- style = ""
- if not visible:
- style = "display: none;"
- %>
- <span class="${cls}" style="${style}"><a target="${target}" href="${href}">${display}</a></span>
- </%def>
- ## ${tab( "tracks", "View Data", h.url_for( controller='tracks', action='dbkeys' ), target="galaxy_main")}
+ <%def name="tab( id, display, href, target='_parent', visible=True, extra_class='' )">
+ <%
+ cls = "tab"
+ if extra_class:
+ cls += " " + extra_class
+ if self.active_view == id:
+ cls += " active"
+ style = ""
+ if not visible:
+ style = "display: none;"
+ %>
+ <span class="${cls}" style="${style}"><a target="${target}" href="${href}">${display}</a></span>
+ </%def>
+
+ ## ${tab( "tracks", "View Data", h.url_for( controller='tracks', action='dbkeys' ), target="galaxy_main")}
- ${tab( "analysis", "Analyze Data", h.url_for( controller='root', action='index' ))}
+ ${tab( "analysis", "Analyze Data", h.url_for( controller='root', action='index' ))}
- ${tab( "workflow", "Workflow", h.url_for( controller='workflow', action='index' ))}
+ ${tab( "workflow", "Workflow", h.url_for( controller='workflow', action='index' ))}
${tab( "libraries", "Libraries", h.url_for( controller='library', action='index' ))}
- ${tab( "admin", "Admin", h.url_for( controller='admin', action='index' ), extra_class="admin-only", visible=( trans.user and app.config.is_admin_user( trans.user ) ) )}
-
- <span class="tab">
- <a>Help</a>
- <div class="submenu">
- <ul>
- <li><a href="${app.config.get( "bugs_email", "mailto:galaxy-bugs@bx.psu.edu" )}">Email comments, bug reports, or suggestions</a></li>
- <li><a target="_blank" href="${app.config.get( "wiki_url", "http://g2.trac.bx.psu.edu/" )}">Galaxy Wiki</a></li>
- <li><a target="_blank" href="${app.config.get( "screencasts_url", "http://g2.trac.bx.psu.edu/wiki/ScreenCasts" )}">Video tutorials (screencasts)</a></li>
- </ul>
- </div>
- </span>
+ ${tab( "admin", "Admin", h.url_for( controller='admin', action='index' ), extra_class="admin-only", visible=( trans.user and app.config.is_admin_user( trans.user ) ) )}
- <span class="tab">
- <a>User</a>
- <%
- if trans.user:
- user_email = trans.user.email
- style1 = "display: none;"
- style2 = "";
- else:
- user_email = ""
- style1 = ""
- style2 = "display: none;"
- %>
- <div class="submenu">
- <ul class="loggedout-only" style="${style1}">
- <li><a target="galaxy_main" href="${h.url_for( controller='user', action='login' )}">Login</a></li>
- %if app.config.allow_user_creation:
- <li><a target="galaxy_main" href="${h.url_for( controller='user', action='create' )}">Register</a></li>
- %endif
- </ul>
- <ul class="loggedin-only" style="${style2}">
- <li>Logged in as <span id="user-email">${user_email}</span></li>
- <li><a target="galaxy_main" href="${h.url_for( controller='user', action='index' )}">Preferences</a></li>
- <%
- if app.config.require_login:
- logout_target = ""
- logout_url = h.url_for( controller='root', action='index', m_c='user', m_a='logout' )
- else:
- logout_target = "galaxy_main"
- logout_url = h.url_for( controller='user', action='logout' )
- %>
- <li><a target="${logout_target}" href="${logout_url}">Logout</a></li>
- </ul>
- </div>
- </span>
-
+ <span class="tab">
+ <a>Help</a>
+ <div class="submenu">
+ <ul>
+ <li><a href="${app.config.get( "bugs_email", "mailto:galaxy-bugs@bx.psu.edu" )}">Email comments, bug reports, or suggestions</a></li>
+ <li><a target="_blank" href="${app.config.get( "wiki_url", "http://g2.trac.bx.psu.edu/" )}">Galaxy Wiki</a></li>
+ <li><a target="_blank" href="${app.config.get( "screencasts_url", "http://g2.trac.bx.psu.edu/wiki/ScreenCasts" )}">Video tutorials (screencasts)</a></li>
+ </ul>
+ </div>
+ </span>
+
+ <span class="tab">
+ <a>User</a>
+ <%
+ if trans.user:
+ user_email = trans.user.email
+ style1 = "display: none;"
+ style2 = "";
+ else:
+ user_email = ""
+ style1 = ""
+ style2 = "display: none;"
+ %>
+ <div class="submenu">
+ <ul class="loggedout-only" style="${style1}">
+ <li><a target="galaxy_main" href="${h.url_for( controller='user', action='login' )}">Login</a></li>
+ %if app.config.allow_user_creation:
+ <li><a target="galaxy_main" href="${h.url_for( controller='user', action='create' )}">Register</a></li>
+ %endif
+ </ul>
+ <ul class="loggedin-only" style="${style2}">
+ <li>Logged in as <span id="user-email">${user_email}</span></li>
+ <li><a target="galaxy_main" href="${h.url_for( controller='user', action='index' )}">Preferences</a></li>
+ <%
+ if app.config.require_login:
+ logout_target = ""
+ logout_url = h.url_for( controller='root', action='index', m_c='user', m_a='logout' )
+ else:
+ logout_target = "galaxy_main"
+ logout_url = h.url_for( controller='user', action='logout' )
+ %>
+ <li><a target="${logout_target}" href="${logout_url}">Logout</a></li>
+ </ul>
+ </div>
+ </span>
+
</div>
</div>
@@ -213,32 +215,32 @@
<div id="overlay"
%if not self.overlay_visible:
- style="display: none;"
+ style="display: none;"
%endif
>
- ##
- <div id="overlay-background" style="position: absolute; width: 100%; height: 100%;"></div>
-
- ## Need a table here for centering in IE6
- <table class="dialog-box-container" border="0" cellpadding="0" cellspacing="0"
- %if not self.overlay_visible:
- style="display: none;"
- %endif
- ><tr><td>
- <div class="dialog-box-wrapper">
- <div class="dialog-box">
- <div class="unified-panel-header">
- <div class="unified-panel-header-inner"><span class='title'>${title}</span></div>
- </div>
- <div class="body" style="max-height: 600px; overflow: auto;">${content}</div>
- <div>
- <div class="buttons" style="display: none; float: right;"></div>
- <div class="extra_buttons" style="display: none; padding: 5px;"></div>
- <div style="clear: both;"></div>
- </div>
- </div>
- </div>
- </td></tr></table>
+ ##
+ <div id="overlay-background" style="position: absolute; width: 100%; height: 100%;"></div>
+
+ ## Need a table here for centering in IE6
+ <table class="dialog-box-container" border="0" cellpadding="0" cellspacing="0"
+ %if not self.overlay_visible:
+ style="display: none;"
+ %endif
+ ><tr><td>
+ <div class="dialog-box-wrapper">
+ <div class="dialog-box">
+ <div class="unified-panel-header">
+ <div class="unified-panel-header-inner"><span class='title'>${title}</span></div>
+ </div>
+ <div class="body" style="max-height: 600px; overflow: auto;">${content}</div>
+ <div>
+ <div class="buttons" style="display: none; float: right;"></div>
+ <div class="extra_buttons" style="display: none; padding: 5px;"></div>
+ <div style="clear: both;"></div>
+ </div>
+ </div>
+ </div>
+ </td></tr></table>
</div>
</%def>
@@ -268,7 +270,7 @@
${self.message_box_content()}
%endif
</div>
- ${self.overlay()}
+ ${self.overlay()}
%if self.has_left_panel:
<div id="left">
${self.left_panel()}
diff -r c0c50620b89d -r 73a8b43f1d97 templates/tool_form.mako
--- a/templates/tool_form.mako Mon Jun 08 12:35:38 2009 -0400
+++ b/templates/tool_form.mako Mon Jun 08 12:49:26 2009 -0400
@@ -15,7 +15,39 @@
<script type="text/javascript">
$( function() {
$( "select[refresh_on_change='true']").change( function() {
- $( "#tool_form" ).submit();
+ var refresh = false;
+ var refresh_on_change_values = $( this )[0].attributes.getNamedItem( 'refresh_on_change_values' )
+ if ( refresh_on_change_values ) {
+ refresh_on_change_values = refresh_on_change_values.value.split( ',' );
+ var last_selected_value = $( this )[0].attributes.getNamedItem( 'last_selected_value' );
+ for( i= 0; i < refresh_on_change_values.length; i++ ) {
+ if ( $( this )[0].value == refresh_on_change_values[i] || ( last_selected_value && last_selected_value.value == refresh_on_change_values[i] ) ){
+ refresh = true;
+ break;
+ }
+ }
+ }
+ else {
+ refresh = true;
+ }
+ if ( refresh ){
+ $( ':file' ).each( function() {
+ var file_value = $( this )[0].value;
+ if ( file_value ) {
+ //disable file input, since we don't want to upload the file on refresh
+ var file_name = $( this )[0].name;
+ $( this )[0].name = 'replaced_file_input_' + file_name
+ $( this )[0].disable = true;
+ //create a new hidden field which stores the filename and has the original name of the file input
+ var new_file_input = document.createElement( 'input' );
+ new_file_input.type = 'hidden';
+ new_file_input.value = file_value;
+ new_file_input.name = file_name;
+ document.getElementById( 'tool_form' ).appendChild( new_file_input );
+ }
+ } );
+ $( "#tool_form" ).submit();
+ }
});
});
%if not add_frame.debug:
@@ -72,6 +104,38 @@
%>
${row_for_param( group_prefix, input.test_param, group_state, group_errors, other_values )}
${do_inputs( input.cases[current_case].inputs, group_state, group_errors, group_prefix, other_values )}
+ %elif input.type == "upload_dataset":
+ %if input.get_datatype( trans, other_values ).composite_type is None: #have non-composite upload appear as before
+ <%
+ if input.name in errors:
+ rep_errors = errors[input.name][0]
+ else:
+ rep_errors = dict()
+ %>
+ ${do_inputs( input.inputs, tool_state[input.name][0], rep_errors, prefix + input.name + "_" + str( 0 ) + "|", other_values )}
+ %else:
+ <div class="repeat-group">
+ <div class="form-title-row"><b>${input.group_title( other_values )}</b></div>
+ <%
+ repeat_state = tool_state[input.name]
+ %>
+ %for i in range( len( repeat_state ) ):
+ <div class="repeat-group-item">
+ <%
+ if input.name in errors:
+ rep_errors = errors[input.name][i]
+ else:
+ rep_errors = dict()
+ index = repeat_state[i]['__index__']
+ %>
+ <div class="form-title-row"><b>File Contents for ${input.title_by_index( trans, i, other_values )}</b></div>
+ ${do_inputs( input.inputs, repeat_state[i], rep_errors, prefix + input.name + "_" + str(index) + "|", other_values )}
+ ##<div class="form-row"><input type="submit" name="${prefix}${input.name}_${index}_remove" value="Remove ${input.title} ${i+1}"></div>
+ </div>
+ %endfor
+ ##<div class="form-row"><input type="submit" name="${prefix}${input.name}_add" value="Add new ${input.title}"></div>
+ </div>
+ %endif
%else:
${row_for_param( prefix, input, tool_state, errors, other_values )}
%endif
@@ -127,7 +191,7 @@
<br/>
%endif
-<div class="toolForm" id="$tool.id">
+<div class="toolForm" id="${tool.id}">
%if tool.has_multiple_pages:
<div class="toolFormTitle">${tool.name} (step ${tool_state.page+1} of ${tool.npages})</div>
%else:
diff -r c0c50620b89d -r 73a8b43f1d97 tools/data_source/upload.xml
--- a/tools/data_source/upload.xml Mon Jun 08 12:35:38 2009 -0400
+++ b/tools/data_source/upload.xml Mon Jun 08 12:49:26 2009 -0400
@@ -1,17 +1,11 @@
<?xml version="1.0"?>
-<tool name="Upload File" id="upload1">
+<tool name="Upload File" id="upload1" version="1.0.1">
<description>
from your computer
</description>
<action module="galaxy.tools.actions.upload" class="UploadToolAction"/>
<inputs>
- <param name="async_datasets" type="hidden" value="None"/>
- <param name="file_data" type="file" size="30" label="File" ajax-upload="true"/>
- <param name="url_paste" type="text" area="true" size="5x35" label="URL/Text" help="Here you may specify a list of URLs (one per line) or paste the contents of a file."/>
- <param name="space_to_tab" type="select" display="checkboxes" multiple="True" label="Convert spaces to tabs" help="Use this option if you are entering intervals by hand.">
- <option value="Yes">Yes</option>
- </param>
<param name="file_type" type="select" label="File Format" help="Which format? See help below">
<options from_parameter="tool.app.datatypes_registry.upload_file_formats" transform_lines="[ "%s%s%s" % ( line, self.separator, line ) for line in obj ]">
<column name="value" index="1"/>
@@ -20,6 +14,16 @@
<filter type="add_value" name="Auto-detect" value="auto" index="0"/>
</options>
</param>
+ <param name="async_datasets" type="hidden" value="None"/>
+ <upload_dataset name="files" title="Specify Files for Dataset" file_type_name="file_type">
+ <param name="file_data" type="file" size="30" label="File" ajax-upload="true">
+ <validator type="expression" message="You will need to reselect the file you specified (%s)." substitute_value_in_message="True">not ( ( isinstance( value, unicode ) or isinstance( value, str ) ) and value != "" )</validator> <!-- use validator to post message to user about needing to reselect the file, since most browsers won't accept the value attribute for file inputs -->
+ </param>
+ <param name="url_paste" type="text" area="true" size="5x35" label="URL/Text" help="Here you may specify a list of URLs (one per line) or paste the contents of a file."/>
+ <param name="space_to_tab" type="select" display="checkboxes" multiple="True" label="Convert spaces to tabs" help="Use this option if you are entering intervals by hand.">
+ <option value="Yes">Yes</option>
+ </param>
+ </upload_dataset>
<param name="dbkey" type="genomebuild" label="Genome" />
</inputs>
<help>
1
0