[hg] galaxy 3505: Fix try-except-finally bug to be compatible wi...
by Greg Von Kuster
details: http://www.bx.psu.edu/hg/galaxy/rev/9efe896dbb17
changeset: 3505:9efe896dbb17
user: Kelly Vincent <kpvincent(a)bx.psu.edu>
date: Wed Mar 10 11:51:00 2010 -0500
description:
Fix try-except-finally bug to be compatible with Python 2.4 in several tools and twilltestcase.py. Updated liftOver test to replace hard-coded (incorrect) path and modified code file to allow for new path. Improved bowtie error handling.
diffstat:
test/base/twilltestcase.py | 70 ++++++++++++++------------
tools/extract/liftOver_wrapper.py | 17 ++++--
tools/extract/liftOver_wrapper.xml | 9 +--
tools/extract/liftOver_wrapper_code.py | 6 +-
tools/samtools/sam_pileup.py | 44 ++++++++--------
tools/sr_mapping/bowtie_wrapper.py | 72 ++++++++++++++++++---------
tools/sr_mapping/bwa_wrapper.py | 88 +++++++++++++++++----------------
7 files changed, 171 insertions(+), 135 deletions(-)
diffs (452 lines):
diff -r f84112d155c0 -r 9efe896dbb17 test/base/twilltestcase.py
--- a/test/base/twilltestcase.py Wed Mar 10 11:28:50 2010 -0500
+++ b/test/base/twilltestcase.py Wed Mar 10 11:51:00 2010 -0500
@@ -633,24 +633,26 @@
data = self.last_page()
file( temp_name, 'wb' ).write(data)
try:
- if attributes is None:
- attributes = {}
- compare = attributes.get( 'compare', 'diff' )
- extra_files = attributes.get( 'extra_files', None )
- if compare == 'diff':
- self.files_diff( local_name, temp_name, attributes=attributes )
- elif compare == 're_match':
- self.files_re_match( local_name, temp_name, attributes=attributes )
- elif compare == 're_match_multiline':
- self.files_re_match_multiline( local_name, temp_name, attributes=attributes )
- else:
- raise Exception, 'Unimplemented Compare type: %s' % compare
- if extra_files:
- self.verify_extra_files_content( extra_files, elem.get( 'id' ) )
- except AssertionError, err:
- errmsg = 'History item %s different than expected, difference (using %s):\n' % ( hid, compare )
- errmsg += str( err )
- raise AssertionError( errmsg )
+ # have to nest try-except in try-finally to handle 2.4
+ try:
+ if attributes is None:
+ attributes = {}
+ compare = attributes.get( 'compare', 'diff' )
+ extra_files = attributes.get( 'extra_files', None )
+ if compare == 'diff':
+ self.files_diff( local_name, temp_name, attributes=attributes )
+ elif compare == 're_match':
+ self.files_re_match( local_name, temp_name, attributes=attributes )
+ elif compare == 're_match_multiline':
+ self.files_re_match_multiline( local_name, temp_name, attributes=attributes )
+ else:
+ raise Exception, 'Unimplemented Compare type: %s' % compare
+ if extra_files:
+ self.verify_extra_files_content( extra_files, elem.get( 'id' ) )
+ except AssertionError, err:
+ errmsg = 'History item %s different than expected, difference (using %s):\n' % ( hid, compare )
+ errmsg += str( err )
+ raise AssertionError( errmsg )
finally:
os.remove( temp_name )
@@ -676,21 +678,23 @@
data = self.last_page()
file( temp_name, 'wb' ).write( data )
try:
- if attributes is None:
- attributes = {}
- compare = attributes.get( 'compare', 'diff' )
- if compare == 'diff':
- self.files_diff( local_name, temp_name, attributes=attributes )
- elif compare == 're_match':
- self.files_re_match( local_name, temp_name, attributes=attributes )
- elif compare == 're_match_multiline':
- self.files_re_match_multiline( local_name, temp_name, attributes=attributes )
- else:
- raise Exception, 'Unimplemented Compare type: %s' % compare
- except AssertionError, err:
- errmsg = 'Composite file (%s) of History item %s different than expected, difference (using %s):\n' % ( base_name, hda_id, compare )
- errmsg += str( err )
- raise AssertionError( errmsg )
+ # have to nest try-except in try-finally to handle 2.4
+ try:
+ if attributes is None:
+ attributes = {}
+ compare = attributes.get( 'compare', 'diff' )
+ if compare == 'diff':
+ self.files_diff( local_name, temp_name, attributes=attributes )
+ elif compare == 're_match':
+ self.files_re_match( local_name, temp_name, attributes=attributes )
+ elif compare == 're_match_multiline':
+ self.files_re_match_multiline( local_name, temp_name, attributes=attributes )
+ else:
+ raise Exception, 'Unimplemented Compare type: %s' % compare
+ except AssertionError, err:
+ errmsg = 'Composite file (%s) of History item %s different than expected, difference (using %s):\n' % ( base_name, hda_id, compare )
+ errmsg += str( err )
+ raise AssertionError( errmsg )
finally:
os.remove( temp_name )
diff -r f84112d155c0 -r 9efe896dbb17 tools/extract/liftOver_wrapper.py
--- a/tools/extract/liftOver_wrapper.py Wed Mar 10 11:28:50 2010 -0500
+++ b/tools/extract/liftOver_wrapper.py Wed Mar 10 11:51:00 2010 -0500
@@ -4,7 +4,7 @@
Converts coordinates from one build/assembly to another using liftOver binary and mapping files downloaded from UCSC.
"""
-import sys, os, string
+import os, string, subprocess, sys
import tempfile
import re
@@ -51,15 +51,20 @@
if in_dbkey == "?":
stop_err( "Input dataset genome build unspecified, click the pencil icon in the history item to specify it." )
-
if not os.path.isfile( mapfilepath ):
stop_err( "%s mapping is not currently available." % ( mapfilepath.split('/')[-1].split('.')[0] ) )
safe_infile = safe_bed_file(infile)
-cmd_line = "liftOver -minMatch=" + str(minMatch) + " " + safe_infile + " " + mapfilepath + " " + outfile1 + " " + outfile2 + " > /dev/null 2>&1"
+cmd_line = "liftOver -minMatch=" + str(minMatch) + " " + safe_infile + " " + mapfilepath + " " + outfile1 + " " + outfile2 + " > /dev/null"
try:
- os.system( cmd_line )
-except Exception, exc:
- stop_err( "Exception caught attempting conversion: %s" % str( exc ) )
+ # have to nest try-except in try-finally to handle 2.4
+ try:
+ proc = subprocess.Popen( args=cmd_line, shell=True, stderr=subprocess.PIPE )
+ returncode = proc.wait()
+ stderr = proc.stderr.read()
+ if returncode != 0:
+ raise Exception, stderr
+ except Exception, e:
+ raise Exception, 'Exception caught attempting conversion: ' + str( e )
finally:
os.remove(safe_infile)
diff -r f84112d155c0 -r 9efe896dbb17 tools/extract/liftOver_wrapper.xml
--- a/tools/extract/liftOver_wrapper.xml Wed Mar 10 11:28:50 2010 -0500
+++ b/tools/extract/liftOver_wrapper.xml Wed Mar 10 11:51:00 2010 -0500
@@ -23,19 +23,16 @@
<requirements>
<requirement type="binary">liftOver</requirement>
</requirements>
-
<tests>
<test>
<param name="input" value="5.bed" dbkey="hg18" ftype="bed" />
- <param name="to_dbkey" value="/galaxy/data/hg18/liftOver/hg18ToPanTro2.over.chain" />
+ <param name="to_dbkey" value="panTro2" />
<param name="minMatch" value="0.95" />
<output name="out_file1" file="5_liftover_mapped.bed"/>
<output name="out_file2" file="5_liftover_unmapped.bed"/>
</test>
</tests>
-
<help>
-
.. class:: warningmark
Make sure that the genome build of the input dataset is specified (click the pencil icon in the history item to set it if necessary).
@@ -71,6 +68,6 @@
chrX 158279 160020 AK097346 0 +
chrX 160024 169033 AK074528 0 -
-</help>
-<code file="liftOver_wrapper_code.py"/>
+ </help>
+ <code file="liftOver_wrapper_code.py" />
</tool>
diff -r f84112d155c0 -r 9efe896dbb17 tools/extract/liftOver_wrapper_code.py
--- a/tools/extract/liftOver_wrapper_code.py Wed Mar 10 11:28:50 2010 -0500
+++ b/tools/extract/liftOver_wrapper_code.py Wed Mar 10 11:51:00 2010 -0500
@@ -1,8 +1,10 @@
+import os
+
def exec_before_job(app, inp_data, out_data, param_dict, tool):
#Assuming the path of the form liftOverDirectory/hg18ToHg17.over.chain (This is how the mapping chain files from UCSC look.)
- to_dbkey = param_dict['to_dbkey'].split('.')[0].split('To')[1]
+ #allows for . in path
+ to_dbkey = os.path.split(param_dict['to_dbkey'])[1].split('.')[0].split('To')[1]
to_dbkey = to_dbkey[0].lower()+to_dbkey[1:]
out_data['out_file1'].set_dbkey(to_dbkey)
out_data['out_file1'].name = out_data['out_file1'].name + " [ MAPPED COORDINATES ]"
out_data['out_file2'].name = out_data['out_file2'].name + " [ UNMAPPED COORDINATES ]"
-
diff -r f84112d155c0 -r 9efe896dbb17 tools/samtools/sam_pileup.py
--- a/tools/samtools/sam_pileup.py Wed Mar 10 11:28:50 2010 -0500
+++ b/tools/samtools/sam_pileup.py Wed Mar 10 11:51:00 2010 -0500
@@ -78,30 +78,32 @@
#prepare basic pileup command
cmd = 'samtools pileup %s -f %s %s > %s'
try:
- #index reference if necessary and prepare pileup command
- if options.ref == 'indexed':
- if not os.path.exists( "%s.fai" % seqPath ):
- raise Exception, "No sequences are available for '%s', request them by reporting this error." % options.dbkey
- cmd = cmd % ( opts, seqPath, tmpf0bam_name, options.output1 )
- elif options.ref == 'history':
- os.symlink( options.ownFile, tmpf1_name )
- cmdIndex = 'samtools faidx %s' % ( tmpf1_name )
- proc = subprocess.Popen( args=cmdIndex, shell=True, cwd=tmpDir, stderr=subprocess.PIPE )
+ # have to nest try-except in try-finally to handle 2.4
+ try:
+ #index reference if necessary and prepare pileup command
+ if options.ref == 'indexed':
+ if not os.path.exists( "%s.fai" % seqPath ):
+ raise Exception, "No sequences are available for '%s', request them by reporting this error." % options.dbkey
+ cmd = cmd % ( opts, seqPath, tmpf0bam_name, options.output1 )
+ elif options.ref == 'history':
+ os.symlink( options.ownFile, tmpf1_name )
+ cmdIndex = 'samtools faidx %s' % ( tmpf1_name )
+ proc = subprocess.Popen( args=cmdIndex, shell=True, cwd=tmpDir, stderr=subprocess.PIPE )
+ returncode = proc.wait()
+ stderr = proc.stderr.read()
+ #did index succeed?
+ if returncode != 0:
+ raise Exception, 'Error creating index file\n' + stderr
+ cmd = cmd % ( opts, tmpf1_name, tmpf0bam_name, options.output1 )
+ #perform pileup command
+ proc = subprocess.Popen( args=cmd, shell=True, cwd=tmpDir, stderr=subprocess.PIPE )
returncode = proc.wait()
+ #did it succeed?
stderr = proc.stderr.read()
- #did index succeed?
if returncode != 0:
- raise Exception, 'Error creating index file\n' + stderr
- cmd = cmd % ( opts, tmpf1_name, tmpf0bam_name, options.output1 )
- #perform pileup command
- proc = subprocess.Popen( args=cmd, shell=True, cwd=tmpDir, stderr=subprocess.PIPE )
- returncode = proc.wait()
- #did it succeed?
- stderr = proc.stderr.read()
- if returncode != 0:
- raise Exception, stderr
- except Exception, e:
- stop_err( 'Error running Samtools pileup tool\n' + str( e ) )
+ raise Exception, stderr
+ except Exception, e:
+ stop_err( 'Error running Samtools pileup tool\n' + str( e ) )
finally:
#clean up temp files
if os.path.exists( tmpDir ):
diff -r f84112d155c0 -r 9efe896dbb17 tools/sr_mapping/bowtie_wrapper.py
--- a/tools/sr_mapping/bowtie_wrapper.py Wed Mar 10 11:28:50 2010 -0500
+++ b/tools/sr_mapping/bowtie_wrapper.py Wed Mar 10 11:51:00 2010 -0500
@@ -2,7 +2,7 @@
"""
Runs Bowtie on single-end or paired-end data.
-For use with Bowtie v. 0.12.1
+For use with Bowtie v. 0.12.3
usage: bowtie_wrapper.py [options]
-t, --threads=t: The number of threads to run
@@ -58,12 +58,12 @@
-H, --suppressHeader=H: Suppress header
"""
-import optparse, os, shutil, sys, tempfile
+import optparse, os, shutil, subprocess, sys, tempfile
def stop_err( msg ):
- sys.stderr.write( "%s\n" % msg )
+ sys.stderr.write( '%s\n' % msg )
sys.exit()
-
+
def __main__():
#Parse Command Line
parser = optparse.OptionParser()
@@ -119,6 +119,7 @@
parser.add_option( '-x', '--indexSettings', dest='index_settings', help='Whether or not indexing options are to be set' )
parser.add_option( '-H', '--suppressHeader', dest='suppressHeader', help='Suppress header' )
(options, args) = parser.parse_args()
+ stdout = ''
# make temp directory for placement of indices and copy reference file there if necessary
tmp_index_dir = tempfile.mkdtemp()
# get type of data (solid or solexa)
@@ -187,17 +188,25 @@
iseed, icutoff, colorspace )
except ValueError:
indexing_cmds = '%s' % colorspace
+ ref_file = tempfile.NamedTemporaryFile( dir=tmp_index_dir )
+ ref_file_name = ref_file.name
+ ref_file.close()
+ os.symlink( options.ref, ref_file_name )
+ cmd1 = 'bowtie-build %s -f %s %s' % ( indexing_cmds, ref_file_name, ref_file_name )
try:
- shutil.copy( options.ref, tmp_index_dir )
+ proc = subprocess.Popen( args=cmd1, shell=True, cwd=tmp_index_dir, stderr=subprocess.PIPE, stdout=subprocess.PIPE )
+ returncode = proc.wait()
+ stderr = proc.stderr.read()
+ if returncode != 0:
+ raise Exception, stderr
except Exception, e:
- stop_err( 'Error creating temp directory for indexing purposes\n' + str( e ) )
- options.ref = os.path.join( tmp_index_dir, os.path.split( options.ref )[1] )
- cmd1 = 'bowtie-build %s -f %s %s 2> /dev/null' % ( indexing_cmds, options.ref, options.ref )
- try:
- os.chdir( tmp_index_dir )
- os.system( cmd1 )
- except Exception, e:
+ # clean up temp dir
+ if os.path.exists( tmp_index_dir ):
+ shutil.rmtree( tmp_index_dir )
stop_err( 'Error indexing reference sequence\n' + str( e ) )
+ stdout += 'File indexed. '
+ else:
+ ref_file_name = options.ref
# set up aligning and generate aligning command options
# automatically set threads in both cases
if options.suppressHeader == 'true':
@@ -328,19 +337,34 @@
best, strata, offrate, seed, colorspace, snpphred, snpfrac,
keepends, options.threads, suppressHeader )
except ValueError, e:
+ # clean up temp dir
+ if os.path.exists( tmp_index_dir ):
+ shutil.rmtree( tmp_index_dir )
stop_err( 'Something is wrong with the alignment parameters and the alignment could not be run\n' + str( e ) )
- # prepare actual aligning commands
- if options.paired == 'paired':
- cmd2 = 'bowtie %s %s -1 %s -2 %s > %s 2> /dev/null' % ( aligning_cmds, options.ref, options.input1, options.input2, options.output )
- else:
- cmd2 = 'bowtie %s %s %s > %s 2> /dev/null' % ( aligning_cmds, options.ref, options.input1, options.output )
- # align
try:
- os.system( cmd2 )
- except Exception, e:
- stop_err( 'Error aligning sequence\n' + str( e ) )
- # clean up temp dir
- if os.path.exists( tmp_index_dir ):
- shutil.rmtree( tmp_index_dir )
+ # have to nest try-except in try-finally to handle 2.4
+ try:
+ # prepare actual aligning commands
+ if options.paired == 'paired':
+ cmd2 = 'bowtie %s %s -1 %s -2 %s > %s' % ( aligning_cmds, ref_file_name, options.input1, options.input2, options.output )
+ else:
+ cmd2 = 'bowtie %s %s %s > %s' % ( aligning_cmds, ref_file_name, options.input1, options.output )
+ # align
+ proc = subprocess.Popen( args=cmd2, shell=True, cwd=tmp_index_dir, stderr=subprocess.PIPE )
+ returncode = proc.wait()
+ stderr = proc.stderr.read()
+ if returncode != 0:
+ raise Exception, stderr
+ # check that there are results in the output file
+ if len( open( options.output, 'rb' ).read().strip() ) == 0:
+ raise Exception, 'The output file is empty, there may be an error with your input file or settings.'
+ except Exception, e:
+ stop_err( 'Error aligning sequence. ' + str( e ) )
+ finally:
+ # clean up temp dir
+ if os.path.exists( tmp_index_dir ):
+ shutil.rmtree( tmp_index_dir )
+ stdout += 'Sequence file aligned.\n'
+ sys.stdout.write( stdout )
if __name__=="__main__": __main__()
diff -r f84112d155c0 -r 9efe896dbb17 tools/sr_mapping/bwa_wrapper.py
--- a/tools/sr_mapping/bwa_wrapper.py Wed Mar 10 11:28:50 2010 -0500
+++ b/tools/sr_mapping/bwa_wrapper.py Wed Mar 10 11:51:00 2010 -0500
@@ -152,55 +152,57 @@
cmd3 = 'bwa samse %s %s %s %s >> %s' % ( gen_alignment_cmds, ref_file_name, tmp_align_out_name, options.fastq, options.output )
# perform alignments
try:
- # align
+ # need to nest try-except in try-finally to handle 2.4
try:
- proc = subprocess.Popen( args=cmd2, shell=True, cwd=tmp_dir, stderr=subprocess.PIPE )
- returncode = proc.wait()
- stderr = proc.stderr.read()
- if returncode != 0:
- raise Exception, stderr
- except Exception, e:
- raise Exception, 'Error aligning sequence. ' + str( e )
- # and again if paired data
- try:
- if cmd2b:
- proc = subprocess.Popen( args=cmd2b, shell=True, cwd=tmp_dir, stderr=subprocess.PIPE )
+ # align
+ try:
+ proc = subprocess.Popen( args=cmd2, shell=True, cwd=tmp_dir, stderr=subprocess.PIPE )
returncode = proc.wait()
stderr = proc.stderr.read()
if returncode != 0:
raise Exception, stderr
+ except Exception, e:
+ raise Exception, 'Error aligning sequence. ' + str( e )
+ # and again if paired data
+ try:
+ if cmd2b:
+ proc = subprocess.Popen( args=cmd2b, shell=True, cwd=tmp_dir, stderr=subprocess.PIPE )
+ returncode = proc.wait()
+ stderr = proc.stderr.read()
+ if returncode != 0:
+ raise Exception, stderr
+ except Exception, e:
+ raise Exception, 'Error aligning second sequence. ' + str( e )
+ # generate align
+ try:
+ proc = subprocess.Popen( args=cmd3, shell=True, cwd=tmp_dir, stderr=subprocess.PIPE )
+ returncode = proc.wait()
+ stderr = proc.stderr.read()
+ if returncode != 0:
+ raise Exception, stderr
+ except Exception, e:
+ raise Exception, 'Error generating alignments. ' + str( e )
+ # remove header if necessary
+ if options.suppressHeader == 'true':
+ tmp_out = tempfile.NamedTemporaryFile( dir=tmp_dir)
+ tmp_out_name = tmp_out.name
+ tmp_out.close()
+ try:
+ shutil.move( options.output, tmp_out_name )
+ except Exception, e:
+ raise Exception, 'Error moving output file before removing headers. ' + str( e )
+ fout = file( options.output, 'w' )
+ for line in file( tmp_out.name, 'r' ):
+ if not ( line.startswith( '@HD' ) or line.startswith( '@SQ' ) or line.startswith( '@RG' ) or line.startswith( '@PG' ) or line.startswith( '@CO' ) ):
+ fout.write( line )
+ fout.close()
+ # check that there are results in the output file
+ if os.path.getsize( options.output ) > 0:
+ sys.stdout.write( 'BWA run on %s-end data' % options.genAlignType )
+ else:
+ raise Exception, 'The output file is empty. You may simply have no matches, or there may be an error with your input file or settings.'
except Exception, e:
- raise Exception, 'Error aligning second sequence. ' + str( e )
- # generate align
- try:
- proc = subprocess.Popen( args=cmd3, shell=True, cwd=tmp_dir, stderr=subprocess.PIPE )
- returncode = proc.wait()
- stderr = proc.stderr.read()
- if returncode != 0:
- raise Exception, stderr
- except Exception, e:
- raise Exception, 'Error generating alignments. ' + str( e )
- # remove header if necessary
- if options.suppressHeader == 'true':
- tmp_out = tempfile.NamedTemporaryFile( dir=tmp_dir)
- tmp_out_name = tmp_out.name
- tmp_out.close()
- try:
- shutil.move( options.output, tmp_out_name )
- except Exception, e:
- raise Exception, 'Error moving output file before removing headers. ' + str( e )
- fout = file( options.output, 'w' )
- for line in file( tmp_out.name, 'r' ):
- if not ( line.startswith( '@HD' ) or line.startswith( '@SQ' ) or line.startswith( '@RG' ) or line.startswith( '@PG' ) or line.startswith( '@CO' ) ):
- fout.write( line )
- fout.close()
- # check that there are results in the output file
- if os.path.getsize( options.output ) > 0:
- sys.stdout.write( 'BWA run on %s-end data' % options.genAlignType )
- else:
- raise Exception, 'The output file is empty. You may simply have no matches, or there may be an error with your input file or settings.'
- except Exception, e:
- stop_err( 'The alignment failed.\n' + str( e ) )
+ stop_err( 'The alignment failed.\n' + str( e ) )
finally:
# clean up temp dir
if os.path.exists( tmp_index_dir ):
12 years, 3 months
[hg] galaxy 3504: Allow renaming of uploaded files in toolbox te...
by Greg Von Kuster
details: http://www.bx.psu.edu/hg/galaxy/rev/f84112d155c0
changeset: 3504:f84112d155c0
user: Dan Blankenberg <dan(a)bx.psu.edu>
date: Wed Mar 10 11:28:50 2010 -0500
description:
Allow renaming of uploaded files in toolbox tests by including a <edit_attributes type="name" value="new dataset name" /> child tag. A rename directive is automatically assigned (no xml changes required) to uploaded composite datasets, so that they can be identified uniquely.
diffstat:
lib/galaxy/tools/__init__.py | 23 +++++++++++++++++++++++
lib/galaxy/tools/test.py | 11 +++++++----
test/functional/test_toolbox.py | 16 ++++++++++++++--
3 files changed, 44 insertions(+), 6 deletions(-)
diffs (91 lines):
diff -r c73f093219aa -r f84112d155c0 lib/galaxy/tools/__init__.py
--- a/lib/galaxy/tools/__init__.py Wed Mar 10 11:24:49 2010 -0500
+++ b/lib/galaxy/tools/__init__.py Wed Mar 10 11:28:50 2010 -0500
@@ -510,6 +510,7 @@
store in `self.tests`.
"""
self.tests = []
+ composite_data_names_counter = 0 #composite datasets need a unique name: each test occurs in a fresh history, but we'll keep it unique per set of tests
for i, test_elem in enumerate( tests_elem.findall( 'test' ) ):
name = test_elem.get( 'name', 'Test-%d' % (i+1) )
maxseconds = int( test_elem.get( 'maxseconds', '120' ) )
@@ -524,6 +525,28 @@
else:
value = None
attrib['children'] = list( param_elem.getchildren() )
+ if attrib['children']:
+ #at this time, we can assume having children only occurs on DataToolParameter test items
+ #but this could change and would cause the below parsing to change based upon differences in children items
+ attrib['metadata'] = []
+ attrib['composite_data'] = []
+ attrib['edit_attributes'] = []
+ composite_data_name = None #composite datasets need to be renamed uniquely
+ for child in attrib['children']:
+ if child.tag == 'composite_data':
+ attrib['composite_data'].append( child )
+ if composite_data_name is None:
+ #generate a unique name; each test uses a fresh history
+ composite_data_name = '_COMPOSITE_RENAMED %i_' % ( composite_data_names_counter )
+ composite_data_names_counter += 1
+ elif child.tag == 'metadata':
+ attrib['metadata'].append( child )
+ elif child.tag == 'metadata':
+ attrib['metadata'].append( child )
+ elif child.tag == 'edit_attributes':
+ attrib['edit_attributes'].append( child )
+ if composite_data_name:
+ attrib['edit_attributes'].insert( 0, { 'type': 'name', 'value': composite_data_name } ) #composite datasets need implicit renaming; inserted at front of list so explicit declarations take precedence
test.add_param( attrib.pop( 'name' ), value, attrib )
for output_elem in test_elem.findall( "output" ):
attrib = dict( output_elem.attrib )
diff -r c73f093219aa -r f84112d155c0 lib/galaxy/tools/test.py
--- a/lib/galaxy/tools/test.py Wed Mar 10 11:24:49 2010 -0500
+++ b/lib/galaxy/tools/test.py Wed Mar 10 11:28:50 2010 -0500
@@ -30,12 +30,15 @@
if isinstance( input_value, grouping.Conditional ) or isinstance( input_value, grouping.Repeat ):
self.__expand_grouping_for_data_input(name, value, extra, input_name, input_value)
elif isinstance( self.tool.inputs[name], parameters.DataToolParameter ) and ( value, extra ) not in self.required_files:
- if value is None and len( [ child for child in extra.get( 'children', [] ) if child.tag == 'composite_data' ] ) == 0:
+ name_change = [ att for att in extra.get( 'edit_attributes', [] ) if att.get( 'type' ) == 'name' ]
+ if name_change:
+ name_change = name_change[-1].get( 'value' ) #only the last name change really matters
+ if value is None and not name_change:
assert self.tool.inputs[name].optional, '%s is not optional. You must provide a valid filename.' % name
else:
- self.required_files.append( ( value, extra ) )
- if value is None and len( [ child for child in extra.get( 'children', [] ) if child.tag == 'composite_data' ] ) > 0:
- value = extra.get( 'ftype' )
+ self.required_files.append( ( value, extra ) ) #these files will be uploaded
+ if name_change:
+ value = name_change #change value for select to renamed uploaded file for e.g. composite dataset
except Exception, e:
log.debug( "Error in add_param for %s: %s" % ( name, e ) )
self.inputs.append( ( name, value, extra ) )
diff -r c73f093219aa -r f84112d155c0 test/functional/test_toolbox.py
--- a/test/functional/test_toolbox.py Wed Mar 10 11:24:49 2010 -0500
+++ b/test/functional/test_toolbox.py Wed Mar 10 11:28:50 2010 -0500
@@ -33,10 +33,22 @@
# Upload any needed files
for fname, extra in testdef.required_files:
children = extra.get( 'children', [] )
- metadata = [ child for child in children if child.tag == 'metadata' ]
- composite_data = [ child for child in children if child.tag == 'composite_data' ]
+ metadata = extra.get( 'metadata', [] )
+ composite_data = extra.get( 'composite_data', [] )
self.upload_file( fname, ftype=extra.get( 'ftype', 'auto' ), dbkey=extra.get( 'dbkey', 'hg17' ), metadata = metadata, composite_data = composite_data )
print "Uploaded file: ", fname, ", ftype: ", extra.get( 'ftype', 'auto' ), ", extra: ", extra
+ #Post upload attribute editing
+ edit_attributes = extra.get( 'edit_attributes', [] )
+ #currently only renaming is supported
+ for edit_att in edit_attributes:
+ if edit_att.get( 'type', None ) == 'name':
+ new_name = edit_att.get( 'value', None )
+ assert new_name, 'You must supply the new dataset name as the value tag of the edit_attributes tag'
+ hda_id = self.get_history_as_data_list()[-1].get( 'id' )
+ self.edit_hda_attribute_info( hda_id, new_name = new_name )
+ print "Renamed uploaded file to:", new_name
+ else:
+ raise Exception( 'edit_attributes type (%s) is unimplemented' % edit_att.get( 'type', None ) )
# We need to handle the case where we've uploaded a valid compressed file since the upload
# tool will have uncompressed it on the fly.
all_inputs = {}
12 years, 3 months
[hg] galaxy 3503: Fix for deleting a list of library datasets - ...
by Greg Von Kuster
details: http://www.bx.psu.edu/hg/galaxy/rev/c73f093219aa
changeset: 3503:c73f093219aa
user: Greg Von Kuster <greg(a)bx.psu.edu>
date: Wed Mar 10 11:24:49 2010 -0500
description:
Fix for deleting a list of library datasets - resolves ticket # 102.
diffstat:
lib/galaxy/web/controllers/library_admin.py | 4 ++--
lib/galaxy/web/controllers/library_common.py | 14 ++++++++------
templates/library/common/browse_library.mako | 2 +-
templates/library/common/ldda_info.mako | 2 +-
test/base/twilltestcase.py | 9 ++++++---
test/functional/test_security_and_libraries.py | 6 +++---
6 files changed, 21 insertions(+), 16 deletions(-)
diffs (130 lines):
diff -r 34babf71a09f -r c73f093219aa lib/galaxy/web/controllers/library_admin.py
--- a/lib/galaxy/web/controllers/library_admin.py Tue Mar 09 16:28:04 2010 -0500
+++ b/lib/galaxy/web/controllers/library_admin.py Wed Mar 10 11:24:49 2010 -0500
@@ -169,8 +169,8 @@
# deleted / purged contents will have the same state ). When a library or folder has been deleted for
# the amount of time defined in the cleanup_datasets.py script, the library or folder and all of its
# contents will be purged. The association between this method and the cleanup_datasets.py script
- # enables clean maintenance of libraries and library dataset disk files. This is also why the following
- # 3 objects, and not any of the associations ( the cleanup_datasets.py scipot handles everything else ).
+ # enables clean maintenance of libraries and library dataset disk files. This is also why the item_types
+ # are not any of the associations ( the cleanup_datasets.py script handles everything ).
show_deleted = util.string_as_bool( kwd.get( 'show_deleted', False ) )
item_types = { 'library': trans.app.model.Library,
'folder': trans.app.model.LibraryFolder,
diff -r 34babf71a09f -r c73f093219aa lib/galaxy/web/controllers/library_common.py
--- a/lib/galaxy/web/controllers/library_common.py Tue Mar 09 16:28:04 2010 -0500
+++ b/lib/galaxy/web/controllers/library_common.py Wed Mar 10 11:24:49 2010 -0500
@@ -1246,8 +1246,7 @@
messagetype=messagetype )
@web.expose
def act_on_multiple_datasets( self, trans, cntrller, library_id, ldda_ids='', **kwd ):
- # This method is used by the select list labeled "Perform action on selected datasets"
- # on the analysis library browser
+ # Perform an action on a list of library datasets.
params = util.Params( kwd )
msg = util.restore_text( params.get( 'msg', '' ) )
messagetype = params.get( 'messagetype', 'done' )
@@ -1262,7 +1261,7 @@
messagetype = 'error'
else:
ldda_ids = util.listify( ldda_ids )
- if action == 'add':
+ if action == 'import_to_history':
history = trans.get_history()
if history is None:
# Must be a bot sending a request without having a history.
@@ -1306,9 +1305,12 @@
elif action == 'delete':
for ldda_id in ldda_ids:
ldda = trans.sa_session.query( trans.app.model.LibraryDatasetDatasetAssociation ).get( trans.security.decode_id( ldda_id ) )
- ldda.deleted = True
- trans.sa_session.add( ldda )
- trans.sa_session.flush()
+ # Do not delete the association, just delete the library_dataset. The
+ # cleanup_datasets.py script handles everything else.
+ ld = ldda.library_dataset
+ ld.deleted = True
+ trans.sa_session.add( ld )
+ trans.sa_session.flush()
msg = "The selected datasets have been removed from this data library"
else:
error = False
diff -r 34babf71a09f -r c73f093219aa templates/library/common/browse_library.mako
--- a/templates/library/common/browse_library.mako Tue Mar 09 16:28:04 2010 -0500
+++ b/templates/library/common/browse_library.mako Wed Mar 10 11:24:49 2010 -0500
@@ -229,7 +229,7 @@
<a class="action-button" href="${h.url_for( controller='library_common', action='upload_library_dataset', cntrller=cntrller, library_id=trans.security.encode_id( library.id ), folder_id=trans.security.encode_id( folder.id ), replace_id=trans.security.encode_id( library_dataset.id ), show_deleted=show_deleted )}">Upload a new version of this dataset</a>
%endif
%if not branch_deleted( folder ) and not ldda.library_dataset.deleted and ldda.has_data:
- <a class="action-button" href="${h.url_for( controller='library_common', action='act_on_multiple_datasets', cntrller=cntrller, library_id=trans.security.encode_id( library.id ), ldda_ids=trans.security.encode_id( ldda.id ), do_action='add', use_panels=use_panels, show_deleted=show_deleted )}">Import this dataset into your current history</a>
+ <a class="action-button" href="${h.url_for( controller='library_common', action='act_on_multiple_datasets', cntrller=cntrller, library_id=trans.security.encode_id( library.id ), ldda_ids=trans.security.encode_id( ldda.id ), do_action='import_to_history', use_panels=use_panels, show_deleted=show_deleted )}">Import this dataset into your current history</a>
<a class="action-button" href="${h.url_for( controller='library_common', action='download_dataset_from_folder', cntrller=cntrller, id=trans.security.encode_id( ldda.id ), library_id=trans.security.encode_id( library.id ), use_panels=use_panels )}">Download this dataset</a>
%endif
%if cntrller in [ 'library_admin', 'requests_admin' ]:
diff -r 34babf71a09f -r c73f093219aa templates/library/common/ldda_info.mako
--- a/templates/library/common/ldda_info.mako Tue Mar 09 16:28:04 2010 -0500
+++ b/templates/library/common/ldda_info.mako Wed Mar 10 11:24:49 2010 -0500
@@ -57,7 +57,7 @@
<a class="action-button" href="${h.url_for( controller='library_common', action='upload_library_dataset', cntrller=cntrller, library_id=trans.security.encode_id( library.id ), folder_id=trans.security.encode_id( ldda.library_dataset.folder.id ), replace_id=trans.security.encode_id( ldda.library_dataset.id ) )}">Upload a new version of this dataset</a>
%endif
%if cntrller=='library' and ldda.has_data:
- <a class="action-button" href="${h.url_for( controller='library_common', action='act_on_multiple_datasets', cntrller=cntrller, library_id=trans.security.encode_id( library.id ), ldda_ids=trans.security.encode_id( ldda.id ), do_action='add', use_panels=use_panels, show_deleted=show_deleted )}">Import this dataset into your current history</a>
+ <a class="action-button" href="${h.url_for( controller='library_common', action='act_on_multiple_datasets', cntrller=cntrller, library_id=trans.security.encode_id( library.id ), ldda_ids=trans.security.encode_id( ldda.id ), do_action='import_to_history', use_panels=use_panels, show_deleted=show_deleted )}">Import this dataset into your current history</a>
<a class="action-button" href="${h.url_for( controller='library', action='download_dataset_from_folder', cntrller=cntrller, id=trans.security.encode_id( ldda.id ), library_id=trans.security.encode_id( library.id ), use_panels=use_panels, show_deleted=show_deleted )}">Download this dataset</a>
%endif
</div>
diff -r 34babf71a09f -r c73f093219aa test/base/twilltestcase.py
--- a/test/base/twilltestcase.py Tue Mar 09 16:28:04 2010 -0500
+++ b/test/base/twilltestcase.py Wed Mar 10 11:24:49 2010 -0500
@@ -1595,14 +1595,17 @@
self.home()
def add_library_dataset( self, cntrller, filename, library_id, folder_id, folder_name,
file_type='auto', dbkey='hg18', roles=[], message='', root=False,
- template_field_name1='', template_field_contents1='' ):
+ template_field_name1='', template_field_contents1='', show_deleted='False',
+ upload_option='upload_file' ):
"""Add a dataset to a folder"""
filename = self.get_filename( filename )
self.home()
- self.visit_url( "%s/library_common/upload_library_dataset?cntrller=%s&upload_option=upload_file&library_id=%s&folder_id=%s&message=%s" % \
- ( self.url, cntrller, library_id, folder_id, message ) )
+ self.visit_url( "%s/library_common/upload_library_dataset?cntrller=%s&library_id=%s&folder_id=%s&upload_option=%s&message=%s" % \
+ ( self.url, cntrller, library_id, folder_id, upload_option, message ) )
self.check_page_for_string( 'Upload files' )
+ tc.fv( "1", "library_id", library_id )
tc.fv( "1", "folder_id", folder_id )
+ tc.fv( "1", "show_deleted", show_deleted )
tc.formfile( "1", "files_0|file_data", filename )
tc.fv( "1", "file_type", file_type )
tc.fv( "1", "dbkey", dbkey )
diff -r 34babf71a09f -r c73f093219aa test/functional/test_security_and_libraries.py
--- a/test/functional/test_security_and_libraries.py Tue Mar 09 16:28:04 2010 -0500
+++ b/test/functional/test_security_and_libraries.py Wed Mar 10 11:24:49 2010 -0500
@@ -1181,7 +1181,7 @@
# Test importing the restricted dataset into a history, can't use the
# ~/library_admin/libraries form as twill barfs on it so we'll simulate the form submission
# by going directly to the form action
- self.visit_url( '%s/library_common/act_on_multiple_datasets?cntrller=library&do_action=add&ldda_ids=%s&library_id=%s' \
+ self.visit_url( '%s/library_common/act_on_multiple_datasets?cntrller=library&do_action=import_to_history&ldda_ids=%s&library_id=%s' \
% ( self.url, self.security.encode_id( ldda_five.id ), self.security.encode_id( library_one.id ) ) )
self.check_page_for_string( '1 dataset(s) have been imported into your history' )
self.logout()
@@ -1473,7 +1473,7 @@
for ldda in lddas:
# Import each library dataset into our history
self.home()
- self.visit_url( '%s/library_common/act_on_multiple_datasets?cntrller=library&do_action=add&ldda_ids=%s&library_id=%s' % \
+ self.visit_url( '%s/library_common/act_on_multiple_datasets?cntrller=library&do_action=import_to_history&ldda_ids=%s&library_id=%s' % \
( self.url, self.security.encode_id( ldda.id ), self.security.encode_id( library_one.id ) ) )
# Determine the new HistoryDatasetAssociation id created when the library dataset was imported into our history
last_hda_created = sa_session.query( galaxy.model.HistoryDatasetAssociation ) \
@@ -1522,7 +1522,7 @@
# be all of the above on any of the 3 datasets that are imported into a history
for ldda in lddas:
self.home()
- self.visit_url( '%s/library_common/act_on_multiple_datasets?cntrller=library&library_id=%s&do_action=add&ldda_ids=%s' % \
+ self.visit_url( '%s/library_common/act_on_multiple_datasets?cntrller=library&library_id=%s&do_action=import_to_history&ldda_ids=%s' % \
( self.url, self.security.encode_id( library_one.id ), self.security.encode_id( ldda.id ) ) )
# Determine the new HistoryDatasetAssociation id created when the library dataset was imported into our history
last_hda_created = sa_session.query( galaxy.model.HistoryDatasetAssociation ) \
12 years, 3 months
[hg] galaxy 3502: Keep the state of displaying / hiding deleted ...
by Greg Von Kuster
details: http://www.bx.psu.edu/hg/galaxy/rev/34babf71a09f
changeset: 3502:34babf71a09f
user: Greg Von Kuster <greg(a)bx.psu.edu>
date: Tue Mar 09 16:28:04 2010 -0500
description:
Keep the state of displaying / hiding deleted library items whn uploading library datasets.
diffstat:
lib/galaxy/web/controllers/library_common.py | 7 +++++++
lib/galaxy/web/controllers/tool_runner.py | 1 -
templates/base_panels.mako | 5 +++--
templates/library/common/browse_library.mako | 2 +-
templates/library/common/common.mako | 3 ++-
templates/library/common/upload.mako | 14 +++++++-------
6 files changed, 20 insertions(+), 12 deletions(-)
diffs (161 lines):
diff -r e4592fc99acc -r 34babf71a09f lib/galaxy/web/controllers/library_common.py
--- a/lib/galaxy/web/controllers/library_common.py Tue Mar 09 15:25:23 2010 -0500
+++ b/lib/galaxy/web/controllers/library_common.py Tue Mar 09 16:28:04 2010 -0500
@@ -659,6 +659,7 @@
msg = util.restore_text( params.get( 'msg', '' ) )
messagetype = params.get( 'messagetype', 'done' )
deleted = util.string_as_bool( params.get( 'deleted', False ) )
+ show_deleted = util.string_as_bool( params.get( 'show_deleted', False ) )
dbkey = params.get( 'dbkey', '?' )
if isinstance( dbkey, list ):
last_used_build = dbkey[0]
@@ -705,6 +706,7 @@
folder_id=folder_id,
replace_id=replace_id,
upload_option=upload_option,
+ show_deleted=show_deleted,
msg=util.sanitize_text( msg ),
messagetype='error' ) )
@@ -758,6 +760,7 @@
id=library_id,
default_action=default_action,
created_ldda_ids=created_ldda_ids,
+ show_deleted=show_deleted,
msg=util.sanitize_text( msg ),
messagetype='done' ) )
@@ -770,6 +773,7 @@
cntrller=cntrller,
id=library_id,
created_ldda_ids=created_ldda_ids,
+ show_deleted=show_deleted,
msg=util.sanitize_text( msg ),
messagetype=messagetype ) )
# See if we have any inherited templates, but do not inherit contents.
@@ -819,6 +823,7 @@
roles=roles,
history=history,
widgets=widgets,
+ show_deleted=show_deleted,
msg=msg,
messagetype=messagetype )
def upload_dataset( self, trans, cntrller, library_id, folder_id, replace_dataset=None, **kwd ):
@@ -834,6 +839,7 @@
dataset_upload_inputs.append( input )
# Library-specific params
params = util.Params( kwd ) # is this filetoolparam safe?
+ show_deleted = util.string_as_bool( params.get( 'show_deleted', False ) )
library_bunch = upload_common.handle_library_params( trans, params, folder_id, replace_dataset )
msg = util.restore_text( params.get( 'msg', '' ) )
messagetype = params.get( 'messagetype', 'done' )
@@ -883,6 +889,7 @@
folder_id=folder_id,
replace_id=replace_id,
upload_option=upload_option,
+ show_deleted=show_deleted,
msg=util.sanitize_text( msg ),
messagetype='error' ) )
json_file_path = upload_common.create_paramfile( trans, uploaded_datasets )
diff -r e4592fc99acc -r 34babf71a09f lib/galaxy/web/controllers/tool_runner.py
--- a/lib/galaxy/web/controllers/tool_runner.py Tue Mar 09 15:25:23 2010 -0500
+++ b/lib/galaxy/web/controllers/tool_runner.py Tue Mar 09 16:28:04 2010 -0500
@@ -155,7 +155,6 @@
permissions, in_roles, error, msg = trans.app.security_agent.derive_roles_from_access( trans, library_id, cntrller, library=True, **vars )
if error:
return [ 'error', msg ]
- permissions = trans.app.security_agent.history_get_default_permissions( trans.history )
def create_dataset( name ):
ud = Bunch( name=name, file_type=None, dbkey=None )
if nonfile_params.get( 'folder_id', False ):
diff -r e4592fc99acc -r 34babf71a09f templates/base_panels.mako
--- a/templates/base_panels.mako Tue Mar 09 15:25:23 2010 -0500
+++ b/templates/base_panels.mako Tue Mar 09 16:28:04 2010 -0500
@@ -131,10 +131,11 @@
$(this).ajaxSubmit( { iframe: true } );
if ( $(this).find("input[name='folder_id']").val() != undefined ) {
var library_id = $(this).find("input[name='library_id']").val();
+ var show_deleted = $(this).find("input[name='show_deleted']").val();
if ( location.pathname.indexOf( 'admin' ) != -1 ) {
- $("iframe#galaxy_main").attr("src","${h.url_for( controller='library_common', action='browse_library' )}?cntrller=library_admin&id=" + library_id + "&created_ldda_ids=" + async_datasets);
+ $("iframe#galaxy_main").attr("src","${h.url_for( controller='library_common', action='browse_library' )}?cntrller=library_admin&id=" + library_id + "&created_ldda_ids=" + async_datasets + "&show_deleted=" + show_deleted);
} else {
- $("iframe#galaxy_main").attr("src","${h.url_for( controller='library_common', action='browse_library' )}?cntrller=library&id=" + library_id + "&created_ldda_ids=" + async_datasets);
+ $("iframe#galaxy_main").attr("src","${h.url_for( controller='library_common', action='browse_library' )}?cntrller=library&id=" + library_id + "&created_ldda_ids=" + async_datasets + "&show_deleted=" + show_deleted);
}
} else {
$("iframe#galaxy_main").attr("src","${h.url_for(controller='tool_runner', action='upload_async_message')}");
diff -r e4592fc99acc -r 34babf71a09f templates/library/common/browse_library.mako
--- a/templates/library/common/browse_library.mako Tue Mar 09 15:25:23 2010 -0500
+++ b/templates/library/common/browse_library.mako Tue Mar 09 16:28:04 2010 -0500
@@ -404,7 +404,7 @@
<ul class="manage-table-actions">
%if not library.deleted and ( cntrller in [ 'library_admin', 'requests_admin' ] or can_add ):
- <li><a class="action-button" href="${h.url_for( controller='library_common', action='upload_library_dataset', cntrller=cntrller, library_id=trans.security.encode_id( library.id ), folder_id=trans.security.encode_id( library.root_folder.id ) )}"><span>Add datasets</span></a></li>
+ <li><a class="action-button" href="${h.url_for( controller='library_common', action='upload_library_dataset', cntrller=cntrller, library_id=trans.security.encode_id( library.id ), folder_id=trans.security.encode_id( library.root_folder.id ), use_panels=use_panels, show_deleted=show_deleted )}"><span>Add datasets</span></a></li>
<li><a class="action-button" href="${h.url_for( controller='library_common', action='create_folder', cntrller=cntrller, parent_id=trans.security.encode_id( library.root_folder.id ), library_id=trans.security.encode_id( library.id ), use_panels=use_panels, show_deleted=show_deleted )}">Add folder</a></li>
%endif
</ul>
diff -r e4592fc99acc -r 34babf71a09f templates/library/common/common.mako
--- a/templates/library/common/common.mako Tue Mar 09 15:25:23 2010 -0500
+++ b/templates/library/common/common.mako Tue Mar 09 16:28:04 2010 -0500
@@ -87,7 +87,7 @@
%endif
</%def>
-<%def name="render_upload_form( cntrller, upload_option, action, library_id, folder_id, replace_dataset, file_formats, dbkeys, widgets, roles, history )">
+<%def name="render_upload_form( cntrller, upload_option, action, library_id, folder_id, replace_dataset, file_formats, dbkeys, widgets, roles, history, show_deleted )">
<% import os, os.path %>
%if upload_option in [ 'upload_file', 'upload_directory', 'upload_paths' ]:
<div class="toolForm" id="upload_library_dataset">
@@ -106,6 +106,7 @@
<input type="hidden" name="library_id" value="${library_id}"/>
<input type="hidden" name="folder_id" value="${folder_id}"/>
<input type="hidden" name="upload_option" value="${upload_option}"/>
+ <input type="hidden" name="show_deleted" value="${show_deleted}"/>
%if replace_dataset not in [ None, 'None' ]:
<input type="hidden" name="replace_id" value="${trans.security.encode_id( replace_dataset.id )}"/>
<div class="form-row">
diff -r e4592fc99acc -r 34babf71a09f templates/library/common/upload.mako
--- a/templates/library/common/upload.mako Tue Mar 09 15:25:23 2010 -0500
+++ b/templates/library/common/upload.mako Tue Mar 09 16:28:04 2010 -0500
@@ -26,26 +26,26 @@
## Don't allow multiple datasets to be uploaded when replacing a dataset with a new version
<a id="upload-librarydataset--popup" class="popup-arrow" style="display: none;">▼</a>
<div popupmenu="upload-librarydataset--popup">
- <a class="action-button" href="${h.url_for( controller='library_common', action='upload_library_dataset', cntrller=cntrller,library_id=library_id, folder_id=folder_id, replace_id=replace_id, upload_option='upload_file' )}">Upload files</a>
+ <a class="action-button" href="${h.url_for( controller='library_common', action='upload_library_dataset', cntrller=cntrller,library_id=library_id, folder_id=folder_id, replace_id=replace_id, upload_option='upload_file', show_deleted=show_deleted )}">Upload files</a>
%if cntrller == 'library_admin':
%if trans.app.config.library_import_dir and os.path.exists( trans.app.config.library_import_dir ):
- <a class="action-button" href="${h.url_for( controller='library_common', action='upload_library_dataset', cntrller=cntrller, library_id=library_id, folder_id=folder_id, replace_id=replace_id, upload_option='upload_directory' )}">Upload directory of files</a>
+ <a class="action-button" href="${h.url_for( controller='library_common', action='upload_library_dataset', cntrller=cntrller, library_id=library_id, folder_id=folder_id, replace_id=replace_id, upload_option='upload_directory', show_deleted=show_deleted )}">Upload directory of files</a>
%endif
%if trans.app.config.allow_library_path_paste:
- <a class="action-button" href="${h.url_for( controller='library_common', action='upload_library_dataset', cntrller=cntrller, library_id=library_id, folder_id=folder_id, replace_id=replace_id, upload_option='upload_paths' )}">Upload files from filesystem paths</a>
+ <a class="action-button" href="${h.url_for( controller='library_common', action='upload_library_dataset', cntrller=cntrller, library_id=library_id, folder_id=folder_id, replace_id=replace_id, upload_option='upload_paths', show_deleted=show_deleted )}">Upload files from filesystem paths</a>
%endif
%elif cntrller == 'library':
%if trans.app.config.user_library_import_dir and os.path.exists( os.path.join( trans.app.config.user_library_import_dir, trans.user.email ) ):
- <a class="action-button" href="${h.url_for( controller='library_common', action='upload_library_dataset', cntrller=cntrller, library_id=library_id, folder_id=folder_id, replace_id=replace_id, upload_option='upload_directory' )}">Upload directory of files</a>
+ <a class="action-button" href="${h.url_for( controller='library_common', action='upload_library_dataset', cntrller=cntrller, library_id=library_id, folder_id=folder_id, replace_id=replace_id, upload_option='upload_directory', show_deleted=show_deleted )}">Upload directory of files</a>
%endif
%endif
- <a class="action-button" href="${h.url_for( controller='library_common', action='upload_library_dataset', cntrller=cntrller, library_id=library_id, folder_id=folder_id, replace_id=replace_id, upload_option='import_from_history' )}">Import datasets from your current history</a>
+ <a class="action-button" href="${h.url_for( controller='library_common', action='upload_library_dataset', cntrller=cntrller, library_id=library_id, folder_id=folder_id, replace_id=replace_id, upload_option='import_from_history', show_deleted=show_deleted )}">Import datasets from your current history</a>
</div>
%endif
<br/><br/>
<ul class="manage-table-actions">
<li>
- <a class="action-button" href="${h.url_for( controller='library_common', action='browse_library', cntrller=cntrller, id=library_id )}"><span>Browse this data library</span></a>
+ <a class="action-button" href="${h.url_for( controller='library_common', action='browse_library', cntrller=cntrller, id=library_id, show_deleted=show_deleted )}"><span>Browse this data library</span></a>
</li>
</ul>
@@ -53,4 +53,4 @@
${render_msg( msg, messagetype )}
%endif
-${render_upload_form( cntrller, upload_option, action, library_id, folder_id, replace_dataset, file_formats, dbkeys, widgets, roles, history )}
+${render_upload_form( cntrller, upload_option, action, library_id, folder_id, replace_dataset, file_formats, dbkeys, widgets, roles, history, show_deleted )}
12 years, 3 months
[hg] galaxy 3501: Add a helper script that will re.escape files; ...
by Greg Von Kuster
details: http://www.bx.psu.edu/hg/galaxy/rev/e4592fc99acc
changeset: 3501:e4592fc99acc
user: Dan Blankenberg <dan(a)bx.psu.edu>
date: Tue Mar 09 15:25:23 2010 -0500
description:
Add a helper script that will re.escape files; useful for creating files valid for re_match and re_match_multiline comparisons in tool tests.
diffstat:
scripts/tools/re_escape_output.py | 34 ++++++++++++++++++++++++++++++++++
1 files changed, 34 insertions(+), 0 deletions(-)
diffs (38 lines):
diff -r c67b5628f348 -r e4592fc99acc scripts/tools/re_escape_output.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/scripts/tools/re_escape_output.py Tue Mar 09 15:25:23 2010 -0500
@@ -0,0 +1,34 @@
+#! /usr/bin/python
+
+"""
+Escapes a file into a form suitable for use with tool tests using re_match or re_match_multiline (when -m/--multiline option is used)
+
+usage: re_escape_output.py [options] input_file [output_file]
+ -m: Use Multiline Matching
+"""
+
+import optparse, re
+
+def __main__():
+ #Parse Command Line
+ parser = optparse.OptionParser()
+ parser.add_option( "-m", "--multiline", action="store_true", dest="multiline", default=False, help="Use Multiline Matching")
+ ( options, args ) = parser.parse_args()
+ input = open( args[0] ,'rb' )
+ if len( args ) > 1:
+ output = open( args[1], 'wb' )
+ else:
+ if options.multiline:
+ suffix = 're_match_multiline'
+ else:
+ suffix = 're_match'
+ output = open( "%s.%s" % ( args[0], suffix ), 'wb' )
+ if options.multiline:
+ lines = [ re.escape( input.read() ) ]
+ else:
+ lines = [ "%s\n" % re.escape( line.rstrip( '\n\r' ) ) for line in input ]
+ output.writelines( lines )
+ output.close()
+
+if __name__ == "__main__":
+ __main__()
12 years, 3 months
[hg] galaxy 3500: Add server_side_cursors to database_engine_opt...
by Greg Von Kuster
details: http://www.bx.psu.edu/hg/galaxy/rev/c67b5628f348
changeset: 3500:c67b5628f348
user: Nate Coraor <nate(a)bx.psu.edu>
date: Tue Mar 09 14:51:01 2010 -0500
description:
Add server_side_cursors to database_engine_option_ parsing
diffstat:
lib/galaxy/config.py | 3 ++-
1 files changed, 2 insertions(+), 1 deletions(-)
diffs (13 lines):
diff -r 478447ba0ec6 -r c67b5628f348 lib/galaxy/config.py
--- a/lib/galaxy/config.py Tue Mar 09 14:29:17 2010 -0500
+++ b/lib/galaxy/config.py Tue Mar 09 14:51:01 2010 -0500
@@ -172,7 +172,8 @@
'pool_recycle': int,
'pool_size': int,
'max_overflow': int,
- 'pool_threadlocal': string_as_bool
+ 'pool_threadlocal': string_as_bool,
+ 'server_side_cursors': string_as_bool
}
prefix = "database_engine_option_"
prefix_len = len( prefix )
12 years, 3 months
[hg] galaxy 3498: Allow better testing of tool outputs with an u...
by Greg Von Kuster
details: http://www.bx.psu.edu/hg/galaxy/rev/821b6cbbee1c
changeset: 3498:821b6cbbee1c
user: Dan Blankenberg <dan(a)bx.psu.edu>
date: Tue Mar 09 14:28:33 2010 -0500
description:
Allow better testing of tool outputs with an unknown number of dynamically created outputs.
diffstat:
test/functional/test_toolbox.py | 11 ++++++++++-
1 files changed, 10 insertions(+), 1 deletions(-)
diffs (28 lines):
diff -r a719c6971613 -r 821b6cbbee1c test/functional/test_toolbox.py
--- a/test/functional/test_toolbox.py Tue Mar 09 14:13:05 2010 -0500
+++ b/test/functional/test_toolbox.py Tue Mar 09 14:28:33 2010 -0500
@@ -53,6 +53,11 @@
if isinstance( input_value, grouping.Repeat ):
repeat_name = input_name
break
+ #check if we need to verify number of outputs created dynamically by tool
+ if testdef.tool.force_history_refresh:
+ job_finish_by_output_count = len( self.get_history_as_data_list() )
+ else:
+ job_finish_by_output_count = False
# Do the first page
page_inputs = self.__expand_grouping(testdef.tool.inputs_by_page[0], all_inputs)
# Run the tool
@@ -65,7 +70,11 @@
print "page_inputs (%i)" % i, page_inputs
# Check the results ( handles single or multiple tool outputs ). Make sure to pass the correct hid.
# The output datasets from the tool should be in the same order as the testdef.outputs.
- data_list = self.get_history_as_data_list()
+ data_list = None
+ while data_list is None:
+ data_list = self.get_history_as_data_list()
+ if job_finish_by_output_count and len( testdef.outputs ) > ( len( data_list ) - job_finish_by_output_count ):
+ data_list = None
self.assertTrue( data_list )
elem_index = 0 - len( testdef.outputs )
for output_tuple in testdef.outputs:
12 years, 3 months
[hg] galaxy 3499: Update tool tests for MAF to interval tool
by Greg Von Kuster
details: http://www.bx.psu.edu/hg/galaxy/rev/478447ba0ec6
changeset: 3499:478447ba0ec6
user: Dan Blankenberg <dan(a)bx.psu.edu>
date: Tue Mar 09 14:29:17 2010 -0500
description:
Update tool tests for MAF to interval tool
diffstat:
tools/maf/maf_to_interval.xml | 2 +-
1 files changed, 1 insertions(+), 1 deletions(-)
diffs (12 lines):
diff -r 821b6cbbee1c -r 478447ba0ec6 tools/maf/maf_to_interval.xml
--- a/tools/maf/maf_to_interval.xml Tue Mar 09 14:28:33 2010 -0500
+++ b/tools/maf/maf_to_interval.xml Tue Mar 09 14:29:17 2010 -0500
@@ -27,7 +27,7 @@
<param name="complete_blocks" value="partial_disallowed"/>
<param name="remove_gaps" value="keep_gaps"/>
<param name="species" value="panTro1" />
- <!-- <output name="out_file1" file="maf_to_interval_out_hg17.interval"/> cannot test primary species, because we cannot leave species blank and we can only test the last item added to a history-->
+ <output name="out_file1" file="maf_to_interval_out_hg17.interval"/>
<output name="out_file1" file="maf_to_interval_out_panTro1.interval"/>
</test>
</tests>
12 years, 3 months
[hg] galaxy 3497: Update psycopg2 to 2.0.13/postgresql 8.4.2
by Greg Von Kuster
details: http://www.bx.psu.edu/hg/galaxy/rev/a719c6971613
changeset: 3497:a719c6971613
user: Nate Coraor <nate(a)bx.psu.edu>
date: Tue Mar 09 14:13:05 2010 -0500
description:
Update psycopg2 to 2.0.13/postgresql 8.4.2
diffstat:
dist-eggs.ini | 2 +-
eggs.ini | 6 +-
scripts/scramble/scripts/psycopg2-linux.py | 1 +
scripts/scramble/scripts/psycopg2-macosx.py | 85 ++++++++++++++++++++++++++++
scripts/scramble/scripts/psycopg2-solaris.py | 1 +
scripts/scramble/scripts/psycopg2.py | 1 +
6 files changed, 92 insertions(+), 4 deletions(-)
diffs (162 lines):
diff -r 5f7ace3195b7 -r a719c6971613 dist-eggs.ini
--- a/dist-eggs.ini Tue Mar 09 13:54:30 2010 -0500
+++ b/dist-eggs.ini Tue Mar 09 14:13:05 2010 -0500
@@ -22,7 +22,7 @@
py2.4-macosx-10.3-fat-ucs2 = medeski.bx.psu.edu /usr/local/bin/python2.4
py2.5-macosx-10.3-fat-ucs2 = medeski.bx.psu.edu /usr/local/bin/python2.5
py2.6-macosx-10.3-fat-ucs2 = medeski.bx.psu.edu /usr/local/bin/python2.6
-py2.6-macosx-10.6-universal-ucs2 = lion.bx.psu.edu /usr/bin/python2.6
+py2.6-macosx-10.6-universal-ucs2 = bach.bx.psu.edu /usr/bin/python2.6
py2.4-solaris-2.10-i86pc_32-ucs2 = thumper.bx.psu.edu /afs/bx.psu.edu/project/pythons/solaris-2.10-i86pc_32-ucs2/bin/python2.4
py2.5-solaris-2.10-i86pc_32-ucs2 = thumper.bx.psu.edu /afs/bx.psu.edu/project/pythons/solaris-2.10-i86pc_32-ucs2/bin/python2.5
py2.6-solaris-2.10-i86pc_32-ucs2 = thumper.bx.psu.edu /afs/bx.psu.edu/project/pythons/solaris-2.10-i86pc_32-ucs2/bin/python2.6
diff -r 5f7ace3195b7 -r a719c6971613 eggs.ini
--- a/eggs.ini Tue Mar 09 13:54:30 2010 -0500
+++ b/eggs.ini Tue Mar 09 14:13:05 2010 -0500
@@ -17,7 +17,7 @@
DRMAA_python = 0.2
MySQL_python = 1.2.3c1
pbs_python = 2.9.4
-psycopg2 = 2.0.6
+psycopg2 = 2.0.13
pycrypto = 2.0.1
pysam = 0.1.1
pysqlite = 2.5.6
@@ -56,7 +56,7 @@
; extra version information
[tags]
-psycopg2 = _8.2.6_static
+psycopg2 = _8.4.2_static
pysqlite = _3.6.17_static
MySQL_python = _5.1.41_static
bx_python = _dev_3b9d30e47619
@@ -68,5 +68,5 @@
; the wiki page above
[source]
MySQL_python = mysql-5.1.41
-psycopg2 = postgresql-8.2.6
+psycopg2 = postgresql-8.4.2
pysqlite = sqlite-amalgamation-3_6_17
diff -r 5f7ace3195b7 -r a719c6971613 scripts/scramble/scripts/psycopg2-linux.py
--- a/scripts/scramble/scripts/psycopg2-linux.py Tue Mar 09 13:54:30 2010 -0500
+++ b/scripts/scramble/scripts/psycopg2-linux.py Tue Mar 09 14:13:05 2010 -0500
@@ -20,6 +20,7 @@
"Configuring postgres (./configure)" )
# compile
+ run( "make ../../src/include/utils/fmgroids.h", os.path.join( pg_srcdir, 'src', 'backend' ), "Compiling fmgroids.h (cd src/backend; make ../../src/include/utils/fmgroids.h)" )
run( "make", os.path.join( pg_srcdir, 'src', 'interfaces', 'libpq' ), "Compiling libpq (cd src/interfaces/libpq; make)" )
run( "make", os.path.join( pg_srcdir, 'src', 'bin', 'pg_config' ), "Compiling pg_config (cd src/bin/pg_config; make)" )
diff -r 5f7ace3195b7 -r a719c6971613 scripts/scramble/scripts/psycopg2-macosx.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/scripts/scramble/scripts/psycopg2-macosx.py Tue Mar 09 14:13:05 2010 -0500
@@ -0,0 +1,85 @@
+import os, sys, shutil
+from distutils.sysconfig import get_config_var
+
+def prep_postgres( prepped, args ):
+
+ pg_version = args['version']
+ pg_srcdir = os.path.join( os.getcwd(), "postgresql-%s" % pg_version )
+
+ # set up environment
+ os.environ['CC'] = get_config_var('CC')
+ os.environ['CFLAGS'] = get_config_var('CFLAGS')
+ os.environ['LDFLAGS'] = get_config_var('LDFLAGS')
+
+ if '-fPIC' not in os.environ['CFLAGS']:
+ os.environ['CFLAGS'] += ' -fPIC'
+
+ # run configure
+ run( "./configure --prefix=%s/postgres --disable-dependency-tracking --enable-static --disable-shared --without-readline --with-thread-safety" % os.getcwd(),
+ os.path.join( os.getcwd(), "postgresql-%s" % pg_version ),
+ "Configuring postgres (./configure)" )
+
+ # compile
+ run( "make ../../src/include/utils/fmgroids.h", os.path.join( pg_srcdir, 'src', 'backend' ), "Compiling fmgroids.h (cd src/backend; make ../../src/include/utils/fmgroids.h)" )
+ run( "make all-static-lib", os.path.join( pg_srcdir, 'src', 'interfaces', 'libpq' ), "Compiling libpq (cd src/interfaces/libpq; make)" )
+ run( "make", os.path.join( pg_srcdir, 'src', 'bin', 'pg_config' ), "Compiling pg_config (cd src/bin/pg_config; make)" )
+
+ # install
+ run( "make install-lib-static", os.path.join( pg_srcdir, 'src', 'interfaces', 'libpq' ), "Compiling libpq (cd src/interfaces/libpq; make install)" )
+ run( "make install", os.path.join( pg_srcdir, 'src', 'bin', 'pg_config' ), "Compiling pg_config (cd src/bin/pg_config; make install)" )
+ run( "make install", os.path.join( pg_srcdir, 'src', 'include' ), "Compiling pg_config (cd src/include; make install)" )
+
+ # manually install some headers
+ run( "cp libpq-fe.h %s" % os.path.join( os.getcwd(), 'postgres', 'include' ), os.path.join( pg_srcdir, 'src', 'interfaces', 'libpq' ), "Installing libpq-fe.h" )
+ run( "cp libpq-events.h %s" % os.path.join( os.getcwd(), 'postgres', 'include' ), os.path.join( pg_srcdir, 'src', 'interfaces', 'libpq' ), "Installing libpq-fe.h" )
+ run( "cp libpq-int.h %s" % os.path.join( os.getcwd(), 'postgres', 'include', 'internal' ), os.path.join( pg_srcdir, 'src', 'interfaces', 'libpq' ), "Installing libpq-fe.h" )
+ run( "cp pqexpbuffer.h %s" % os.path.join( os.getcwd(), 'postgres', 'include', 'internal' ), os.path.join( pg_srcdir, 'src', 'interfaces', 'libpq' ), "Installing libpq-fe.h" )
+
+ # create prepped archive
+ print "%s(): Creating prepped archive for future builds at:" % sys._getframe().f_code.co_name
+ print " ", prepped
+ compress( prepped,
+ 'postgres/bin',
+ 'postgres/include',
+ 'postgres/lib' )
+
+if __name__ == '__main__':
+
+ # change back to the build dir
+ if os.path.dirname( sys.argv[0] ) != "":
+ os.chdir( os.path.dirname( sys.argv[0] ) )
+
+ # find setuptools
+ sys.path.append( os.path.abspath( os.path.join( '..', '..', '..', 'lib' ) ) )
+ from scramble_lib import *
+
+ tag = get_tag()
+
+ pg_version = ( tag.split( "_" ) )[1]
+ pg_archive_base = os.path.join( archives, "postgresql-%s" % pg_version )
+ pg_archive = get_archive( pg_archive_base )
+ pg_archive_prepped = os.path.join( archives, "postgresql-%s-%s.tar.gz" % ( pg_version, platform_noucs ) )
+
+ # clean up any existing stuff (could happen if you run scramble.py by hand)
+ clean( [ 'postgresql-%s' % pg_version ] )
+
+ # unpack postgres
+ unpack_dep( pg_archive, pg_archive_prepped, prep_postgres, dict( version=pg_version ) )
+
+ # localize setup.cfg
+ if not os.path.exists( 'setup.cfg.orig' ):
+ shutil.copy( 'setup.cfg', 'setup.cfg.orig' )
+ f = open( 'setup.cfg', 'a' )
+ f.write( '\npg_config=postgres/bin/pg_config\n' )
+ f.close()
+
+ # tag
+ me = sys.argv[0]
+ sys.argv = [ me ]
+ if tag is not None:
+ sys.argv.append( "egg_info" )
+ sys.argv.append( "--tag-build=%s" %tag )
+ sys.argv.append( "bdist_egg" )
+
+ # go
+ execfile( "setup.py", globals(), locals() )
diff -r 5f7ace3195b7 -r a719c6971613 scripts/scramble/scripts/psycopg2-solaris.py
--- a/scripts/scramble/scripts/psycopg2-solaris.py Tue Mar 09 13:54:30 2010 -0500
+++ b/scripts/scramble/scripts/psycopg2-solaris.py Tue Mar 09 14:13:05 2010 -0500
@@ -23,6 +23,7 @@
"Configuring postgres (./configure)" )
# compile
+ run( "gmake ../../src/include/utils/fmgroids.h", os.path.join( pg_srcdir, 'src', 'backend' ), "Compiling fmgroids.h (cd src/backend; gmake ../../src/include/utils/fmgroids.h)" )
run( "gmake", os.path.join( pg_srcdir, 'src', 'interfaces', 'libpq' ), "Compiling libpq (cd src/interfaces/libpq; gmake)" )
run( "gmake", os.path.join( pg_srcdir, 'src', 'bin', 'pg_config' ), "Compiling pg_config (cd src/bin/pg_config; gmake)" )
diff -r 5f7ace3195b7 -r a719c6971613 scripts/scramble/scripts/psycopg2.py
--- a/scripts/scramble/scripts/psycopg2.py Tue Mar 09 13:54:30 2010 -0500
+++ b/scripts/scramble/scripts/psycopg2.py Tue Mar 09 14:13:05 2010 -0500
@@ -20,6 +20,7 @@
"Configuring postgres (./configure)" )
# compile
+ run( "make ../../src/include/utils/fmgroids.h", os.path.join( pg_srcdir, 'src', 'backend' ), "Compiling fmgroids.h (cd src/backend; make ../../src/include/utils/fmgroids.h)" )
run( "make", os.path.join( pg_srcdir, 'src', 'interfaces', 'libpq' ), "Compiling libpq (cd src/interfaces/libpq; make)" )
run( "make", os.path.join( pg_srcdir, 'src', 'bin', 'pg_config' ), "Compiling pg_config (cd src/bin/pg_config; make)" )
12 years, 3 months
[hg] galaxy 3496: Allow uploading and use of composite files in ...
by Greg Von Kuster
details: http://www.bx.psu.edu/hg/galaxy/rev/5f7ace3195b7
changeset: 3496:5f7ace3195b7
user: Dan Blankenberg <dan(a)bx.psu.edu>
date: Tue Mar 09 13:54:30 2010 -0500
description:
Allow uploading and use of composite files in toolbox tests. Along with setting each component of a composite datatype, metadata attributes (e.g. basename in Rgenetics datatypes) can be set.
Example Tool:
<tool id='temp' name='temp test'>
<description>test</description>
<command>cat ${i.extra_files_path}/${i.metadata.base_name}.fam > $out_file1</command>
<inputs>
<param name="i" type="data" label="RGenetics genotype data from your current history" format="pbed" />
</inputs>
<outputs>
<data format="text" name="out_file1" />
</outputs>
<tests>
<test>
<param name='i' ftype='pbed'>
<metadata name='base_name' value='rgenetics_CHANGED' />
<composite_data value='temp/somefile1' />
<composite_data value='temp/somefile2' />
<composite_data value='temp/somefile3' />
</param>
<output name='out_file1' file='temp/somefile3' />
</test>
</tests>
</tool>
diffstat:
lib/galaxy/tools/__init__.py | 1 +
lib/galaxy/tools/test.py | 4 +++-
test/base/twilltestcase.py | 16 ++++++++++++----
test/functional/test_toolbox.py | 5 ++++-
4 files changed, 20 insertions(+), 6 deletions(-)
diffs (73 lines):
diff -r 3b1be99d1f62 -r 5f7ace3195b7 lib/galaxy/tools/__init__.py
--- a/lib/galaxy/tools/__init__.py Tue Mar 09 11:28:21 2010 -0500
+++ b/lib/galaxy/tools/__init__.py Tue Mar 09 13:54:30 2010 -0500
@@ -523,6 +523,7 @@
value = attrib['value']
else:
value = None
+ attrib['children'] = list( param_elem.getchildren() )
test.add_param( attrib.pop( 'name' ), value, attrib )
for output_elem in test_elem.findall( "output" ):
attrib = dict( output_elem.attrib )
diff -r 3b1be99d1f62 -r 5f7ace3195b7 lib/galaxy/tools/test.py
--- a/lib/galaxy/tools/test.py Tue Mar 09 11:28:21 2010 -0500
+++ b/lib/galaxy/tools/test.py Tue Mar 09 13:54:30 2010 -0500
@@ -30,10 +30,12 @@
if isinstance( input_value, grouping.Conditional ) or isinstance( input_value, grouping.Repeat ):
self.__expand_grouping_for_data_input(name, value, extra, input_name, input_value)
elif isinstance( self.tool.inputs[name], parameters.DataToolParameter ) and ( value, extra ) not in self.required_files:
- if value is None:
+ if value is None and len( [ child for child in extra.get( 'children', [] ) if child.tag == 'composite_data' ] ) == 0:
assert self.tool.inputs[name].optional, '%s is not optional. You must provide a valid filename.' % name
else:
self.required_files.append( ( value, extra ) )
+ if value is None and len( [ child for child in extra.get( 'children', [] ) if child.tag == 'composite_data' ] ) > 0:
+ value = extra.get( 'ftype' )
except Exception, e:
log.debug( "Error in add_param for %s: %s" % ( name, e ) )
self.inputs.append( ( name, value, extra ) )
diff -r 3b1be99d1f62 -r 5f7ace3195b7 test/base/twilltestcase.py
--- a/test/base/twilltestcase.py Tue Mar 09 11:28:21 2010 -0500
+++ b/test/base/twilltestcase.py Tue Mar 09 13:54:30 2010 -0500
@@ -142,14 +142,22 @@
filename = os.path.join( *path )
file(filename, 'wt').write(buffer.getvalue())
- def upload_file( self, filename, ftype='auto', dbkey='unspecified (?)' ):
+ def upload_file( self, filename, ftype='auto', dbkey='unspecified (?)', metadata = None, composite_data = None ):
"""Uploads a file"""
- filename = self.get_filename(filename)
self.visit_url( "%s/tool_runner?tool_id=upload1" % self.url )
try:
- tc.fv("1","file_type", ftype)
+ self.refresh_form( "file_type", ftype ) #Refresh, to support composite files
tc.fv("1","dbkey", dbkey)
- tc.formfile("1","file_data", filename)
+ if metadata:
+ for elem in metadata:
+ tc.fv( "1", "files_metadata|%s" % elem.get( 'name' ), elem.get( 'value' ) )
+ if composite_data:
+ for i, composite_file in enumerate( composite_data ):
+ filename = self.get_filename( composite_file.get( 'value' ) )
+ tc.formfile( "1", "files_%i|file_data" % i, filename )
+ else:
+ filename = self.get_filename( filename )
+ tc.formfile( "1", "file_data", filename )
tc.submit("runtool_btn")
self.home()
except AssertionError, err:
diff -r 3b1be99d1f62 -r 5f7ace3195b7 test/functional/test_toolbox.py
--- a/test/functional/test_toolbox.py Tue Mar 09 11:28:21 2010 -0500
+++ b/test/functional/test_toolbox.py Tue Mar 09 13:54:30 2010 -0500
@@ -32,7 +32,10 @@
raise AssertionError("ToolTestCase.do_it failed")
# Upload any needed files
for fname, extra in testdef.required_files:
- self.upload_file( fname, ftype=extra.get( 'ftype', 'auto' ), dbkey=extra.get( 'dbkey', 'hg17' ) )
+ children = extra.get( 'children', [] )
+ metadata = [ child for child in children if child.tag == 'metadata' ]
+ composite_data = [ child for child in children if child.tag == 'composite_data' ]
+ self.upload_file( fname, ftype=extra.get( 'ftype', 'auto' ), dbkey=extra.get( 'dbkey', 'hg17' ), metadata = metadata, composite_data = composite_data )
print "Uploaded file: ", fname, ", ftype: ", extra.get( 'ftype', 'auto' ), ", extra: ", extra
# We need to handle the case where we've uploaded a valid compressed file since the upload
# tool will have uncompressed it on the fly.
12 years, 3 months