galaxy-dev
Threads by month
- ----- 2024 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2023 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2022 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2021 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2020 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2019 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2018 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2017 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2016 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2015 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2014 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2013 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2012 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2011 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2010 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2009 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2008 -----
- December
- November
- October
- September
- August
September 2009
- 15 participants
- 140 discussions
details: http://www.bx.psu.edu/hg/galaxy/rev/8877ef766447
changeset: 2680:8877ef766447
user: Greg Von Kuster <greg(a)bx.psu.edu>
date: Fri Sep 11 11:26:26 2009 -0400
description:
Fix for cleanup_datasets.py script.
1 file(s) affected in this change:
scripts/cleanup_datasets/cleanup_datasets.py
diffs (25 lines):
diff -r ed4cbaf23c88 -r 8877ef766447 scripts/cleanup_datasets/cleanup_datasets.py
--- a/scripts/cleanup_datasets/cleanup_datasets.py Fri Sep 11 09:00:36 2009 -0400
+++ b/scripts/cleanup_datasets/cleanup_datasets.py Fri Sep 11 11:26:26 2009 -0400
@@ -1,4 +1,8 @@
#!/usr/bin/env python
+
+from galaxy import eggs
+import pkg_resources
+pkg_resources.require( "SQLAlchemy >= 0.4" )
import sys, os, time, ConfigParser, shutil
from datetime import datetime, timedelta
@@ -9,12 +13,7 @@
new_path.extend( sys.path[1:] ) # remove scripts/ from the path
sys.path = new_path
-from galaxy import eggs
import galaxy.model.mapping
-import pkg_resources
-
-pkg_resources.require( "SQLAlchemy >= 0.4" )
-
from galaxy.model.orm import and_, eagerload
assert sys.version_info[:2] >= ( 2, 4 )
1
0
14 Sep '09
details: http://www.bx.psu.edu/hg/galaxy/rev/ed4cbaf23c88
changeset: 2679:ed4cbaf23c88
user: Greg Von Kuster <greg(a)bx.psu.edu>
date: Fri Sep 11 09:00:36 2009 -0400
description:
Eliminate buttons from shared history grid that were not meant to be committed.
1 file(s) affected in this change:
lib/galaxy/web/controllers/history.py
diffs (35 lines):
diff -r f0adb6152df9 -r ed4cbaf23c88 lib/galaxy/web/controllers/history.py
--- a/lib/galaxy/web/controllers/history.py Thu Sep 10 21:24:06 2009 -0400
+++ b/lib/galaxy/web/controllers/history.py Fri Sep 11 09:00:36 2009 -0400
@@ -104,9 +104,7 @@
]
operations = [
grids.GridOperation( "Clone" ),
- grids.GridOperation( "Unshare" ),
- grids.GridOperation( "Enable import via link", condition=( lambda item: item.deleted ) ),
- grids.GridOperation( "Disable import via link", condition=( lambda item: item.deleted ) )
+ grids.GridOperation( "Unshare" )
]
standard_filters = []
def build_initial_query( self, session ):
@@ -280,20 +278,6 @@
association.flush()
message = "Unshared %d shared histories" % len( ids )
status = 'done'
- elif operation == "enable import via link":
- if ids:
- histories = [ get_history( trans, id ) for id in ids ]
- for history in histories:
- if not history.importable:
- history.importable = True
- history.flush()
- elif operation == "disable import via link":
- if ids:
- histories = [ get_history( trans, id ) for id in ids ]
- for history in histories:
- if history.importable:
- history.importable = False
- history.flush()
# Render the list view
return self.shared_list_grid( trans, status=status, message=message, **kwargs )
@web.expose
1
0
details: http://www.bx.psu.edu/hg/galaxy/rev/a7b1304e736f
changeset: 2682:a7b1304e736f
user: Kelly Vincent <kpvincent(a)bx.psu.edu>
date: Fri Sep 11 14:38:05 2009 -0400
description:
Added Bowtie wrapper tool
9 file(s) affected in this change:
test-data/bowtie_in1.fastq
test-data/bowtie_in2.fastq
test-data/bowtie_in3.fastq
test-data/bowtie_out1.sam
test-data/bowtie_out2.sam
tool-data/bowtie_indices.loc.sample
tool_conf.xml.sample
tools/sr_mapping/bowtie_wrapper.py
tools/sr_mapping/bowtie_wrapper.xml
diffs (819 lines):
diff -r e7b899fb4462 -r a7b1304e736f test-data/bowtie_in1.fastq
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/bowtie_in1.fastq Fri Sep 11 14:38:05 2009 -0400
@@ -0,0 +1,5 @@
+@HWI-EAS91_1_30788AAXX:1:1:1513:715/1
+GTTTTTTNNGCATAGATGTTTAGTTGTGGTAGTCAG
++/1
+IIIIIII""IIIIIIIIIIIIIIIIIIIDI?II-+I
+
diff -r e7b899fb4462 -r a7b1304e736f test-data/bowtie_in2.fastq
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/bowtie_in2.fastq Fri Sep 11 14:38:05 2009 -0400
@@ -0,0 +1,4 @@
+@HWI-EAS91_1_30788AAXX:1:2:618:346/1
+TAGACTACGAAAGTGACTTTAATACCTCTGACTACA
++
+IIIIIIIIIIIIIIIIIIIIIIIIIIIII%4II;I3
diff -r e7b899fb4462 -r a7b1304e736f test-data/bowtie_in3.fastq
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/bowtie_in3.fastq Fri Sep 11 14:38:05 2009 -0400
@@ -0,0 +1,4 @@
+@HWI-EAS91_1_30788AAXX:1:2:618:346/2
+ATAGGCTGAATTAGCAATGGATGGTGGGGTTTATCG
++
+IIIIIIIIIIIIIII9I.II5II6DFIIIIII*I2)
diff -r e7b899fb4462 -r a7b1304e736f test-data/bowtie_out1.sam
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/bowtie_out1.sam Fri Sep 11 14:38:05 2009 -0400
@@ -0,0 +1,1 @@
+HWI-EAS91_1_30788AAXX:1:1:1513:715 16 chrM 9563 25 36M * 0 0 CTGACTACCACAACTAAACATCTATGCNNAAAAAAC I+-II?IDIIIIIIIIIIIIIIIIIII""IIIIIII NM:i:1 X1:i:1 MD:Z:7N0N27
diff -r e7b899fb4462 -r a7b1304e736f test-data/bowtie_out2.sam
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/bowtie_out2.sam Fri Sep 11 14:38:05 2009 -0400
@@ -0,0 +1,2 @@
+HWI-EAS91_1_30788AAXX:1:2:618:346 0 chrM 441 25 36M * 0 0 TAGACTACGAAAGTGACTTTAATACCTCTGACTACA IIIIIIIIIIIIIIIIIIIIIIIIIIIII%4II;I3 NM:i:0 X0:i:1 MD:Z:36
+HWI-EAS91_1_30788AAXX:1:2:618:346 16 chrM 652 25 36M * 0 0 CGATAAACCCCACCATCCATTGCTAATTCAGCCTAT )2I*IIIIIIFD6II5II.I9IIIIIIIIIIIIIII NM:i:1 X1:i:1 MD:Z:17A18
diff -r e7b899fb4462 -r a7b1304e736f tool-data/bowtie_indices.loc.sample
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tool-data/bowtie_indices.loc.sample Fri Sep 11 14:38:05 2009 -0400
@@ -0,0 +1,28 @@
+#This is a sample file distributed with Galaxy that enables tools
+#to use a directory of Bowtie indexed sequences data files. You will need
+#to create these data files and then create a bowtie_indices.loc file
+#similar to this one (store it in this directory ) that points to
+#the directories in which those files are stored. The bowtie_indices.loc
+#file has this format (white space characters are TAB characters):
+#
+#<build> <file_base>
+#
+#So, for example, if you had hg18 indexed stored in
+#/depot/data2/galaxy/bowtie/hg18/,
+#then the bowtie_indices.loc entry would look like this:
+#
+#hg18 /depot/data2/galaxy/bowtie/hg18/hg18
+#
+#and your /depot/data2/galaxy/bowtie/hg18/ directory
+#would contain hg18.*.ebwt files:
+#
+#-rw-r--r-- 1 james universe 830134 2005-09-13 10:12 hg18.1.ebwt
+#-rw-r--r-- 1 james universe 527388 2005-09-13 10:12 hg18.2.ebwt
+#-rw-r--r-- 1 james universe 269808 2005-09-13 10:12 gh18.3.ebwt
+#...etc...
+#
+#Your bowtie_indices.loc file should include an entry per line for
+#each index set you have stored. The "file" in the path does not actually
+#exist, but it is the prefix for the actual index files. For example:
+#
+#hg18 /depot/data2/galaxy/bowtie/hg18/hg18
diff -r e7b899fb4462 -r a7b1304e736f tool_conf.xml.sample
--- a/tool_conf.xml.sample Fri Sep 11 12:48:33 2009 -0400
+++ b/tool_conf.xml.sample Fri Sep 11 14:38:05 2009 -0400
@@ -332,7 +332,8 @@
<tool file="metag_tools/megablast_xml_parser.xml" />
<tool file="metag_tools/blat_wrapper.xml" />
<tool file="metag_tools/mapping_to_ucsc.xml" />
- </section>
+ <tool file="sr_mapping/bowtie_wrapper.xml" />
+ </section>
<section name="Tracks" id="tracks">
<tool file="visualization/genetrack.xml" />
</section>
diff -r e7b899fb4462 -r a7b1304e736f tools/sr_mapping/bowtie_wrapper.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tools/sr_mapping/bowtie_wrapper.py Fri Sep 11 14:38:05 2009 -0400
@@ -0,0 +1,174 @@
+#! /usr/bin/python
+
+"""
+Runs Bowtie on single-end or paired-end data.
+"""
+
+import optparse, os, sys, tempfile
+
+def stop_err( msg ):
+ sys.stderr.write( "%s\n" % msg )
+ sys.exit()
+
+def __main__():
+ #Parse Command Line
+ parser = optparse.OptionParser()
+ parser.add_option('', '--input1', dest='input1', help='The (forward or single-end) reads file in Sanger FASTQ format')
+ parser.add_option('', '--input2', dest='input2', help='The reverse reads file in Sanger FASTQ format')
+ parser.add_option('', '--output', dest='output', help='The output file')
+ parser.add_option('', '--paired', dest='paired', help='Whether the data is single- or paired-end')
+ parser.add_option('', '--genomeSource', dest='genomeSource', help='The type of reference provided')
+ parser.add_option('', '--ref', dest='ref', help='The reference genome to use or index')
+ parser.add_option('', '--skip', dest='skip', help='Skip the first n reads')
+ parser.add_option('', '--alignLimit', dest='alignLimit', help='Only align the first n reads')
+ parser.add_option('', '--trimH', dest='trimH', help='Trim n bases from high-quality (left) end of each read before alignment')
+ parser.add_option('', '--trimL', dest='trimL', help='Trim n bases from low-quality (right) end of each read before alignment')
+ parser.add_option('', '--mismatchSeed', dest='mismatchSeed', help='Maximum number of mismatches permitted in the seed')
+ parser.add_option('', '--mismatchQual', dest='mismatchQual', help='Maximum permitted total of quality values at mismatched read positions')
+ parser.add_option('', '--seedLen', dest='seedLen', help='Seed length')
+ parser.add_option('', '--rounding', dest='rounding', help='Whether or not to round to the nearest 10 and saturating at 30')
+ parser.add_option('', '--maqSoapAlign', dest='maqSoapAlign', help='Choose MAQ- or SOAP-like alignment policy')
+ parser.add_option('', '--tryHard', dest='tryHard', help='Whether or not to try as hard as possible to find valid alignments when they exist')
+ parser.add_option('', '--valAlign', dest='valAlign', help='Report up to n valid arguments per read')
+ parser.add_option('', '--allValAligns', dest='allValAligns', help='Whether or not to report all valid alignments per read')
+ parser.add_option('', '--suppressAlign', dest='suppressAlign', help='Suppress all alignments for a read if more than n reportable alignments exist')
+ parser.add_option('', '--offbase', dest='offbase', help='Number the first base of a reference sequence as n when outputting alignments')
+ parser.add_option('', '--best', dest='best', help="Whether or not to make Bowtie guarantee that reported singleton alignments are 'best' in terms of stratum and in terms of the quality values at the mismatched positions")
+ parser.add_option('', '--maxBacktracks', dest='maxBacktracks', help='Maximum number of backtracks permitted when aligning a read')
+ parser.add_option('', '--threadMem', dest='threadMem', help='Number of megabytes of memory a given thread is given to store path descriptors in best mode')
+ parser.add_option('', '--strata', dest='strata', help='Whether or not to report only those alignments that fall in the best stratum if many valid alignments exist and are reportable')
+ parser.add_option('', '--minInsert', dest='minInsert', help='Minimum insert size for valid paired-end alignments')
+ parser.add_option('', '--maxInsert', dest='maxInsert', help='Maximum insert size for valid paired-end alignments')
+ parser.add_option('', '--mateOrient', dest='mateOrient', help='The upstream/downstream mate orientation for valid paired-end alignment against the forward reference strand')
+ parser.add_option('', '--maxAlignAttempt', dest='maxAlignAttempt', help='Maximum number of attempts Bowtie will make to match an alignment for one mate with an alignment for the opposite mate')
+ parser.add_option('', '--forwardAlign', dest='forwardAlign', help='Whether or not to attempt to align the forward reference strand')
+ parser.add_option('', '--reverseAlign', dest='reverseAlign', help='Whether or not to attempt to align the reverse-complement reference strand')
+ parser.add_option('', '--phased', dest='phased', help='Whether or not it should alternate between using the forward and mirror indexes in a series of phases so that only half of the index is resident in memory at one time')
+ parser.add_option('', '--offrate', dest='offrate', help='Override the offrate of the index to n')
+ parser.add_option('', '--mm', dest='mm', help='Whether or not to use memory-mapped I/O to load the index')
+ parser.add_option('', '--seed', dest='seed', help='Seed for pseudo-random number generator')
+ parser.add_option('', '--dbkey', dest='dbkey', help='')
+ parser.add_option('', '--params', dest='params', help='Whether to use default or specified parameters')
+ parser.add_option('', '--iauto_b', dest='iauto_b', help='Automatic or specified behavior')
+ parser.add_option('', '--ipacked', dest='ipacked', help='Whether or not to use a packed representation for DNA strings')
+ parser.add_option('', '--ibmax', dest='ibmax', help='Maximum number of suffixes allowed in a block')
+ parser.add_option('', '--ibmaxdivn', dest='ibmaxdivn', help='Maximum number of suffixes allowed in a block as a fraction of the length of the reference')
+ parser.add_option('', '--idcv', dest='idcv', help='The period for the difference-cover sample')
+ parser.add_option('', '--inodc', dest='inodc', help='Whether or not to disable the use of the difference-cover sample')
+ parser.add_option('', '--inoref', dest='inoref', help='Whether or not to build the part of the reference index used only in paried-end alignment')
+ parser.add_option('', '--ioffrate', dest='ioffrate', help='How many rows get marked during annotation of some or all of the Burrows-Wheeler rows')
+ parser.add_option('', '--iftab', dest='iftab', help='The size of the lookup table used to calculate an initial Burrows-Wheeler range with respect to the first n characters of the query')
+ parser.add_option('', '--intoa', dest='intoa', help='Whether or not to convert Ns in the reference sequence to As')
+ parser.add_option('', '--iendian', dest='iendian', help='Endianness to use when serializing integers to the index file')
+ parser.add_option('', '--iseed', dest='iseed', help='Seed for the pseudorandom number generator')
+ parser.add_option('', '--icutoff', dest='icutoff', help='Number of first bases of the reference sequence to index')
+ parser.add_option('', '--ioldpmap', dest='ioldpmap', help='Use the scheme for mapping joined reference locations to original reference locations used in versions of Bowtie prior to 0.9.8')
+ parser.add_option('', '--indexSettings', dest='index_settings', help='Whether or not indexing options are to be set')
+ (options, args) = parser.parse_args()
+
+ # index if necessary
+ if options.genomeSource == 'history':
+ # set up commands
+ if options.index_settings =='index_pre_set':
+ indexing_cmds = ''
+ else:
+ try:
+ indexing_cmds = '%s %s %s %s %s %s %s --offrate %s %s %s %s %s %s %s' % \
+ (('','--noauto')[options.iauto_b=='set'],
+ ('','--packed')[options.ipacked=='packed'],
+ ('','--bmax %s'%options.ibmax)[options.ibmax!='None' and options.ibmax>=1],
+ ('','--bmaxdivn %s'%options.ibmaxdivn)[options.ibmaxdivn!='None'],
+ ('','--dcv %s'%options.idcv)[options.idcv!='None'],
+ ('','--nodc')[options.inodc=='nodc'],
+ ('','--noref')[options.inoref=='noref'], options.ioffrate,
+ ('','--ftabchars %s'%options.iftab)[int(options.iftab)>=0],
+ ('','--ntoa')[options.intoa=='yes'],
+ ('--little','--big')[options.iendian=='big'],
+ ('','--seed %s'%options.iseed)[int(options.iseed)>0],
+ ('','--cutoff %s'%options.icutoff)[int(options.icutoff)>0],
+ ('','--oldpmap')[options.ioldpmap=='yes'])
+ except ValueError:
+ indexing_cmds = ''
+
+ # make temp directory for placement of indices and copy reference file there
+ tmp_dir = tempfile.gettempdir()
+ try:
+ os.system('cp %s %s' % (options.ref, tmp_dir))
+ except Exception, erf:
+ stop_err('Error creating temp directory for indexing purposes\n' + str(erf))
+ options.ref = os.path.join(tmp_dir,os.path.split(options.ref)[1])
+ cmd1 = 'cd %s; bowtie-build %s -f %s %s > /dev/null' % (tmp_dir, indexing_cmds, options.ref, options.ref)
+ try:
+ os.system(cmd1)
+ except Exception, erf:
+ stop_err('Error indexing reference sequence\n' + str(erf))
+
+ # set up aligning and generate aligning command options
+ # automatically set threads to 8 in both cases
+ if options.params == 'pre_set':
+ aligning_cmds = '-p 8'
+ else:
+ try:
+ aligning_cmds = '%s %s %s %s %s %s %s %s %s %s %s %s %s %s ' \
+ '%s %s %s %s %s %s %s %s %s %s %s %s %s %s -p 8' % \
+ (('','-s %s'%options.skip)[options.skip!='None'],
+ ('','-u %s'%options.alignLimit)[int(options.alignLimit)>0],
+ ('','-5 %s'%options.trimH)[int(options.trimH)>=0],
+ ('','-3 %s'%options.trimL)[int(options.trimL)>=0],
+ ('','-n %s'%options.mismatchSeed)[options.mismatchSeed=='0' or options.mismatchSeed=='1' or options.mismatchSeed=='2' or options.mismatchSeed=='3'],
+ ('','-e %s'%options.mismatchQual)[int(options.mismatchQual)>=0],
+ ('','-l %s'%options.seedLen)[int(options.seedLen)>=5],
+ ('','--nomaqround')[options.rounding=='noRound'],
+ ('','-v %s'%options.maqSoapAlign)[options.maqSoapAlign!='-1'],
+ ('','-I %s'%options.minInsert)[options.minInsert!='None'],
+ ('','-X %s'%options.maxInsert)[options.maxInsert!='None'],
+ ('','--%s'%options.mateOrient)[options.mateOrient!='None'],
+ ('','--pairtries %s'%options.maxAlignAttempt)[int(options.maxAlignAttempt)>=0],
+ ('','--nofw')[options.forwardAlign=='noForward'],
+ ('','--norc')[options.reverseAlign=='noReverse'],
+ ('','--maxbts %s'%options.maxBacktracks)[options.maxBacktracks!='None' and (options.mismatchSeed=='2' or options.mismatchSeed=='3')],
+ ('','-y')[options.tryHard=='doTryHard'],
+ ('','--chunkmbs %s'%options.threadMem)[options.threadMem!='None' and int(options.threadMem)>=0],
+ ('','-k %s'%options.valAlign)[options.valAlign!='None' and int(options.valAlign)>=0],
+ ('','-a')[options.allValAligns=='doAllValAligns' and int(options.allValAligns)>=0],
+ ('','-m %s'%options.suppressAlign)[int(options.suppressAlign)>=0],
+ ('','--best')[options.best=='doBest'],
+ ('','--strata')[options.strata=='doStrata'],
+ ('','-B %s'%options.offbase)[int(options.offbase)>=0],
+ ('','-z %s'%options.phased)[options.phased!='None'],
+ ('','-o %s'%options.offrate)[int(options.offrate)>=0],
+ ('','--mm')[options.mm=='doMm'],
+ ('','--seed %s'%options.seed)[int(options.seed)>=0])
+ except ValueError:
+ aligning_cmds = '-p 8'
+
+ tmp_out = tempfile.NamedTemporaryFile()
+
+ # prepare actual aligning commands
+ if options.paired == 'paired':
+ cmd2 = 'bowtie %s %s -1 %s -2 %s > %s 2> /dev/null' % (aligning_cmds, options.ref, options.input1, options.input2, tmp_out.name)
+ else:
+ cmd2 = 'bowtie %s %s %s > %s 2> /dev/null' % (aligning_cmds, options.ref, options.input1, tmp_out.name)
+ # prepare command to convert bowtie output to sam and alternative
+ cmd3 = 'bowtie2sam.pl %s > %s' % (tmp_out.name, options.output)
+ cmd4 = 'cp %s %s' % (tmp_out.name, options.output)
+
+ # align
+ try:
+ os.system(cmd2)
+ except Exception, erf:
+ stop_err("Error aligning sequence\n" + str(erf))
+ if len(file(tmp_out.name,'r').read()) > 0:
+ #convert
+ try:
+ os.system(cmd3)
+ except Exception, erf:
+ stop_err('Error converting output to sam format\n' + str(erf))
+ else:
+ try:
+ os.system(cmd4)
+ sys.stdout.write('Alignment file contained no data')
+ except Exception, erf:
+ stop_err('Error producing alignment file. File contained no data.\n' + str(erf))
+
+if __name__=="__main__": __main__()
diff -r e7b899fb4462 -r a7b1304e736f tools/sr_mapping/bowtie_wrapper.xml
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tools/sr_mapping/bowtie_wrapper.xml Fri Sep 11 14:38:05 2009 -0400
@@ -0,0 +1,556 @@
+<tool id="bowtie_wrapper" name="Bowtie" version="1.0.0">
+ <description> fast alignment of reads against reference sequence </description>
+ <command interpreter="python">
+ bowtie_wrapper.py
+ --input1=$singlePaired.input1
+ #if $singlePaired.sPaired == "paired":
+ --input2=$singlePaired.input2
+ #else:
+ --input2="None"
+ #end if
+ --output=$output
+ --paired=$singlePaired.sPaired
+ --genomeSource=$refGenomeSource.genomeSource
+ #if $refGenomeSource.genomeSource == "history":
+ --ref=$refGenomeSource.ownFile
+ #else:
+ --ref=$refGenomeSource.indices.value
+ #end if
+ --params=$singlePaired.params.settings_type
+ #if $singlePaired.params.settings_type == "full":
+ --skip=$singlePaired.params.skip
+ --alignLimit=$singlePaired.params.alignLimit
+ --trimH=$singlePaired.params.trimH
+ --trimL=$singlePaired.params.trimL
+ --mismatchSeed=$singlePaired.params.mismatchSeed
+ --mismatchQual=$singlePaired.params.mismatchQual
+ --seedLen=$singlePaired.params.seedLen
+ --rounding=$singlePaired.params.rounding
+ --maqSoapAlign=$singlePaired.params.maqSoapAlign
+ --tryHard=$singlePaired.params.tryHard
+ --valAlign=$singlePaired.params.valAlign
+ --allValAligns=$singlePaired.params.allValAligns
+ --suppressAlign=$singlePaired.params.suppressAlign
+ --offbase=$singlePaired.params.offbase
+ --offrate=$singlePaired.params.offrate
+ --mm=$singlePaired.params.mm
+ --seed=$singlePaired.params.seed
+ --best=$singlePaired.params.bestOption.best
+ #if $singlePaired.params.bestOption.best == "doBest":
+ --maxBacktracks=$singlePaired.params.bestOption.maxBacktracks
+ --threadMem=$singlePaired.params.bestOption.threadMem
+ --strata=$singlePaired.params.bestOption.strata
+ --phased="None"
+ #else:
+ --maxBacktracks="None"
+ --threadMem="None"
+ --strata="None"
+ #if $singlePaired.sPaired =="single":
+ --phased=$singlePaired.params.bestOption.phased
+ #else:
+ --phased="None"
+ #end if
+ #end if
+ #if $singlePaired.sPaired == "single":
+ --minInsert="None"
+ --maxInsert="None"
+ --mateOrient="None"
+ --maxAlignAttempt="None"
+ --forwardAlign="None"
+ --reverseAlign="None"
+ #else:
+ --minInsert=$singlePaired.params.minInsert
+ --maxInsert=$singlePaired.params.maxInsert
+ --mateOrient=$singlePaired.params.mateOrient
+ --maxAlignAttempt=$singlePaired.params.maxAlignAttempt
+ --forwardAlign=$singlePaired.params.forwardAlign
+ --reverseAlign=$singlePaired.params.reverseAlign
+ #end if
+ #else
+ --skip="None"
+ --alignLimit="None"
+ --trimH="None"
+ --trimL="None"
+ --mismatchSeed="None"
+ --mismatchQual="None"
+ --seedLen="None"
+ --rounding="None"
+ --maqSoapAlign="None"
+ --tryHard="None"
+ --valAlign="None"
+ --allValAligns="None"
+ --suppressAlign="None"
+ --offbase="None"
+ --best="None"
+ --maxBacktracks="None"
+ --threadMem="None"
+ --strata="None"
+ --minInsert="None"
+ --maxInsert="None"
+ --mateOrient="None"
+ --maxAlignAttempt="None"
+ --forwardAlign="None"
+ --reverseAlign="None"
+ --phased="None"
+ --offrate="None"
+ --mm="None"
+ --seed="None"
+ #end if
+ #if $refGenomeSource.genomeSource == "history":
+ --dbkey=$dbkey
+ #else:
+ --dbkey="None"
+ #end if
+ #if $refGenomeSource.genomeSource == "history":
+ --indexSettings=$refGenomeSource.indexParams.index_settings
+ #else:
+ --indexSettings="None"
+ #end if
+ #if $refGenomeSource.genomeSource == "history" and $refGenomeSource.indexParams.index_settings == "index_full":
+ --iauto_b=$refGenomeSource.indexParams.auto_behavior.auto_b
+ #if $refGenomeSource.indexParams.auto_behavior.auto_b == "set":
+ --ipacked=$refGenomeSource.indexParams.auto_behavior.packed
+ --ibmax=$refGenomeSource.indexParams.auto_behavior.bmax
+ --ibmaxdivn=$refGenomeSource.indexParams.auto_behavior.bmaxdivn
+ --idcv=$refGenomeSource.indexParams.auto_behavior.dcv
+ #else:
+ --ipacked="None"
+ --ibmax="None"
+ --ibmaxdivn="None"
+ --idcv="None"
+ #end if
+ --inodc=$refGenomeSource.indexParams.nodc
+ --inoref=$refGenomeSource.indexParams.noref
+ --ioffrate=$refGenomeSource.indexParams.offrate
+ --iftab=$refGenomeSource.indexParams.ftab
+ --intoa=$refGenomeSource.indexParams.ntoa
+ --iendian=$refGenomeSource.indexParams.endian
+ --iseed=$refGenomeSource.indexParams.seed
+ --icutoff=$refGenomeSource.indexParams.cutoff
+ --ioldpmap=$refGenomeSource.indexParams.oldpmap
+ #else:
+ --iauto_b="None"
+ --ipacked="None"
+ --ibmax="None"
+ --ibmaxdivn="None"
+ --idcv="None"
+ --inodc="None"
+ --inoref="None"
+ --ioffrate="None"
+ --iftab="None"
+ --intoa="None"
+ --iendian="None"
+ --iseed="None"
+ --icutoff="None"
+ --ioldpmap="None"
+ #end if
+ </command>
+ <inputs>
+ <conditional name="refGenomeSource">
+ <param name="genomeSource" type="select" label="Will you select a reference genome from your history or use a built-in index?" help="Built-ins were indexed using default options">
+ <option value="indexed">Use a built-in index</option>
+ <option value="history">Use one from the history</option>
+ </param>
+ <when value="indexed">
+ <param name="indices" type="select" label="Select a reference genome">
+ <options from_file="bowtie_indices.loc">
+ <column name="value" index="1" />
+ <column name="name" index="0" />
+ <filter type="sort_by" column="0" />
+ </options>
+ </param>
+ </when>
+ <when value="history">
+ <param name="ownFile" type="data" format="fasta" metadata_name="dbkey" label="Select a reference genome" />
+ <conditional name="indexParams">
+ <param name="index_settings" type="select" label="Choose whether to use default options or to set your own">
+ <option value="index_pre_set">Commonly Used</option>
+ <option value="index_full">Full Parameter List</option>
+ </param>
+ <when value="index_pre_set" />
+ <when value="index_full">
+ <conditional name="auto_behavior">
+ <param name="auto_b" type="select" label="Choose to use automatic or specified behavior for some parameters (-a)" help="Allows you to set --packed, --bmax, --bmaxdivn, and --dcv">
+ <option value="auto">Automatic behavior</option>
+ <option value="set">Set values (sets --noauto and allows others to be set)</option>
+ </param>
+ <when value="auto" />
+ <when value="set">
+ <param name="packed" type="select" label="Whether or not to use a packed representation for DNA strings (-p)">
+ <option value="unpacked">Use regular representation</option>
+ <option value="packed">Use packed representation</option>
+ </param>
+ <param name="bmax" type="integer" value="-1" label="Maximum number of suffixes allowed in a block (--bmax)" help="-1 for not specified. Must be at least 1" />
+ <param name="bmaxdivn" type="integer" value="4" label="Maximum number of suffixes allowed in a block as a fraction of the length of the reference (--bmaxdivn)" />
+ <param name="dcv" type="integer" value="1024" label="The period for the difference-cover sample (--dcv)" />
+ </when>
+ </conditional>
+ <param name="nodc" type="select" label="Whether or not to disable the use of the difference-cover sample (--nodc)" help="Suffix sorting becomes quadratic-time in the worst case (a very repetetive reference)">
+ <option value="dc">Use difference-cover sample</option>
+ <option value="nodc">Disable difference-cover sample</option>
+ </param>
+ <param name="noref" type="select" label="Whether or not to build the part of the reference index used only in paired-end alignment (-r)">
+ <option value="ref">Build all index files</option>
+ <option value="noref">Do not build paired-end alignment index files</option>
+ </param>
+ <param name="offrate" type="integer" value="5" label="How many rows get marked during annotation of some or all of the Burrows-Wheeler rows (-o)" />
+ <param name="ftab" type="integer" value="10" label="The size of the lookup table used to calculate an initial Burrows-Wheeler range with respect to the first n characters of the query (-t)" help="ftab is 4^(n+1) bytes" />
+ <param name="ntoa" type="select" label="Whether or not to convert Ns in the reference sequence to As (--ntoa)">
+ <option value="no">Do not convert Ns</option>
+ <option value="yes">Convert Ns to As</option>
+ </param>
+ <param name="endian" type="select" label="Endianness to use when serializing integers to the index file (--big/--little)" help="Little is most appropriate for Intel- and AMD-based architecture">
+ <option value="little">Little</option>
+ <option value="big">Big</option>
+ </param>
+ <param name="seed" type="integer" value="-1" label="Seed for the pseudorandom number generator (--seed)" help="Use -1 to use default" />
+ <param name="cutoff" type="integer" value="-1" label="Number of first bases of the reference sequence to index (--cutoff)" help="Use -1 to use default" />
+ <param name="oldpmap" type="select" label="Use the scheme for mapping joined reference locations to original reference locations used in versions of Bowtie prior to 0.9.8 (--oldpmap)" help="The old scheme uses padding and the new one doesn't">
+ <option value="no">Use the new scheme</option>
+ <option value="yes">Use the old scheme</option>
+ </param>
+ </when> <!-- index_full -->
+ </conditional>
+ </when>
+ </conditional> <!-- refGenomeSource -->
+ <conditional name="singlePaired">
+ <param name="sPaired" type="select" label="Is this library mate-paired?">
+ <option value="single">Single-end</option>
+ <option value="paired">Paired-end</option>
+ </param>
+ <when value="single">
+ <param name="input1" type="data" format="fastqsanger" label="FASTQ file" />
+ <conditional name="params">
+ <param name="settings_type" type="select" label="Bowtie settings to use" help="For most mapping needs use Commonly used settings. If you want full control use Full parameter list">
+ <option value="pre_set">Commonly used</option>
+ <option value="full">Full parameter list</option>
+ </param>
+ <when value="pre_set" />
+ <when value="full">
+ <param name="skip" type="integer" value="0" label="Skip the first n reads (-s)" />
+ <param name="alignLimit" type="integer" value="-1" label="Only align the first n reads (-u)" help="-1 for off" />
+ <param name="trimH" type="integer" value="0" label="Trim n bases from high-quality (left) end of each read before alignment (-5)" />
+ <param name="trimL" type="integer" value="0" label="Trim n bases from low-quality (right) end of each read before alignment (-3)" />
+ <param name="mismatchSeed" type="integer" value="2" label="Maximum number of mismatches permitted in the seed (-n)" help="May be 0, 1, 2, or 3" />
+ <param name="mismatchQual" type="integer" value="70" label="Maximum permitted total of quality values at mismatched read positions (-e)" />
+ <param name="seedLen" type="integer" value="28" label="Seed length (-l)" help="Minimum value is 5" />
+ <param name="rounding" type="select" label="Whether or not to round to the nearest 10 and saturating at 30 (--nomaqround)">
+ <option value="round">Round to nearest 10</option>
+ <option value="noRound">Do not round to nearest 10</option>
+ </param>
+ <param name="maqSoapAlign" type="integer" value="-1" label="Number of mismatches for SOAP-like alignment policy (-v)" help="-1 for default MAQ-like alignment policy" />
+ <param name="tryHard" type="select" label="Whether or not to try as hard as possible to find valid alignments when they exist (-y)" help="Tryhard mode is much slower than regular mode">
+ <option value="noTryHard">Do not try hard</option>
+ <option value="doTryHard">Try hard</option>
+ </param>
+ <param name="valAlign" type="integer" value="1" label="Report up to n valid arguments per read (-k)" />
+ <param name="allValAligns" type="select" label="Whether or not to report all valid alignments per read (-a)">
+ <option value="noAllValAligns">Do not report all valid alignments</option>
+ <option value="doAllValAligns">Report all valid alignments</option>
+ </param>
+ <param name="suppressAlign" type="integer" value="-1" label="Suppress all alignments for a read if more than n reportable alignments exist (-m)" help="-1 for no limit" />
+ <param name="offbase" type="integer" value="0" label="Number the first base of a reference sequence as n when outputting alignments (-B)" />
+ <conditional name="bestOption">
+ <param name="best" type="select" label="Whether or not to make Bowtie guarantee that reported singleton alignments are 'best' in terms of stratum and in terms of the quality values at the mismatched positions (--best)" help="Removes all strand bias. Only affects which alignments are reported by Bowtie. Runs slower with best option">
+ <option value="noBest">Do not use best</option>
+ <option value="doBest">Use best</option>
+ </param>
+ <when value="noBest">
+ <param name="maxBacktracks" type="integer" value="125" label="Maximum number of backtracks permitted when aligning a read (--maxbts)" />
+ <param name="phased" type="select" label="Whether or not it should alternate between using the forward and mirror indexes in a series of phases so that only half of the index is resident in memory at one time (-z)">
+ <option value="noPhased">Don't alternate</option>
+ <option value="doPhased">Do alternate</option>
+ </param>
+ </when>
+ <when value="doBest">
+ <param name="maxBacktracks" type="integer" value="800" label="Maximum number of backtracks permitted when aligning a read (--maxbts)" />
+ <param name="threadMem" type="integer" value="32" label="Number of megabytes of memory a given thread is given to store path descriptors in best mode (--chunkmbs)" help="If running in best mode, and you run out of memory, try adjusting this" />
+ <param name="strata" type="select" label="Whether or not to report only those alignments that fall in the best stratum if many valid alignments exist and are reportable (--strata)">
+ <option value="noStrata">Do not use strata option</option>
+ <option value="doStrata">Use strata option</option>
+ </param>
+ </when>
+ </conditional> <!-- bestOption -->
+ <param name="offrate" type="integer" value="-1" label="Override the offrate of the index to n (-o)" help="-1 for default" />
+ <param name="mm" type="select" label="Whether or not to use memory-mapped I/O to load the index (--m)">
+ <option value="noMm">Use POSIX/C file I/O</option>
+ <option value="doMm">Use memory-mapped I/O</option>
+ </param>
+ <param name="seed" type="integer" value="-1" label="Seed for pseudo-random number generator (--seed)" help="-1 for default" />
+ </when> <!-- full -->
+ </conditional> <!-- params -->
+ </when> <!-- single -->
+ <when value="paired">
+ <param name="input1" type="data" format="fastqsanger" label="Forward FASTQ file" />
+ <param name="input2" type="data" format="fastqsanger" label="Reverse FASTQ file" />
+ <conditional name="params">
+ <param name="settings_type" type="select" label="BWA settings to use" help="For most mapping needs use Commonly used settings. If you want full control use Full parameter list">
+ <option value="pre_set">Commonly used</option>
+ <option value="full">Full parameter list</option>
+ </param>
+ <when value="pre_set" />
+ <when value="full">
+ <param name="skip" type="integer" value="0" label="Skip the first n pairs (-s)" />
+ <param name="alignLimit" type="integer" value="-1" label="Only align the first n pairs (-u)" help="-1 for off" />
+ <param name="trimH" type="integer" value="0" label="Trim n bases from high-quality (left) end of each read before alignment (-5)" />
+ <param name="trimL" type="integer" value="0" label="Trim n bases from low-quality (right) end of each read before alignment (-3)" />
+ <param name="mismatchSeed" type="integer" value="2" label="Maximum number of mismatches permitted in the seed (-n)" help="May be 0, 1, 2, or 3" />
+ <param name="mismatchQual" type="integer" value="70" label="Maximum permitted total of quality values at mismatched read positions (-e)" />
+ <param name="seedLen" type="integer" value="28" label="Seed length (-l)" help="Minimum value is 5" />
+ <param name="rounding" type="select" label="Whether or not to round to the nearest 10 and saturating at 30 (--nomaqround)">
+ <option value="round">Round to nearest 10</option>
+ <option value="noRound">Do not round to nearest 10</option>
+ </param>
+ <param name="maqSoapAlign" type="integer" value="-1" label="Number of mismatches for SOAP-like alignment policy (-v)" help="-1 for default MAQ-like alignment policy" />
+ <param name="minInsert" type="integer" value="0" label="Minimum insert size for valid paired-end alignments (-I)" />
+ <param name="maxInsert" type="integer" value="250" label="Maximum insert size for valid paired-end alignments (-X)" />
+ <param name="mateOrient" type="select" label="The upstream/downstream mate orientation for valid paired-end alignment against the forward reference strand (--fr/--rf/--ff)">
+ <option value="fr">FR (for Illumina)</option>
+ <option value="rf">RF</option>
+ <option value="ff">FF</option>
+ </param>
+ <param name="maxAlignAttempt" type="integer" value="100" label="Maximum number of attempts Bowtie will make to match an alignment for one mate with an alignment for the opposite mate (--pairtries)" />
+ <param name="forwardAlign" type="select" label="Choose whether or not to attempt to align the forward reference strand (--nofw)">
+ <option value="forward">Align against the forward reference strand</option>
+ <option value="noForward">Do not align against the forward reference strand</option>
+ </param>
+ <param name="reverseAlign" type="select" label="Choose whether or not to align against the reverse-complement reference strand (--norc)">
+ <option value="reverse">Align against the reverse-complement reference strand</option>
+ <option value="noReverse">Do not align against the reverse-complement reference strand</option>
+ </param>
+ <param name="tryHard" type="select" label="Whether or not to try as hard as possible to find valid alignments when they exist (-y)" help="Tryhard mode is much slower than regular mode">
+ <option value="noTryHard">Do not try hard</option>
+ <option value="doTryHard">Try hard</option>
+ </param>
+ <param name="valAlign" type="integer" value="1" label="Report up to n valid arguments per pair (-k)" />
+ <param name="allValAligns" type="select" label="Whether or not to report all valid alignments per pair (-a)">
+ <option value="noAllValAligns">Do not report all valid alignments</option>
+ <option value="doAllValAligns">Report all valid alignments</option>
+ </param>
+ <param name="suppressAlign" type="integer" value="-1" label="Suppress all alignments for a pair if more than n reportable alignments exist (-m)" help="-1 for no limit" />
+ <param name="offbase" type="integer" value="0" label="Number the first base of a reference sequence as n when outputting alignments (-B)" />
+ <conditional name="bestOption">
+ <param name="best" type="select" label="Whether or not to make Bowtie guarantee that reported singleton alignments are 'best' in terms of stratum and in terms of the quality values at the mismatched positions (--best)" help="Removes all strand bias. Only affects which alignments are reported by Bowtie. Runs slower with best option">
+ <option value="noBest">Do not use best</option>
+ <option value="doBest">Use best</option>
+ </param>
+ <when value="noBest">
+ <param name="maxBacktracks" type="integer" value="125" label="Maximum number of backtracks permitted when aligning a read (--maxbts)" />
+ </when>
+ <when value="doBest">
+ <param name="maxBacktracks" type="integer" value="800" label="Maximum number of backtracks permitted when aligning a read (--maxbts)" />
+ <param name="threadMem" type="integer" value="32" label="Number of megabytes of memory a given thread is given to store path descriptors in best mode (--chunkmbs)" help="If running in best mode, and you run out of memory, try adjusting this" />
+ <param name="strata" type="select" label="Whether or not to report only those alignments that fall in the best stratum if many valid alignments exist and are reportable (--strata)">
+ <option value="noStrata">Do not use strata option</option>
+ <option value="doStrata">Use strata option</option>
+ </param>
+ </when>
+ </conditional>
+ <param name="offrate" type="integer" value="-1" label="Override the offrate of the index to n -o)" help="-1 for default" />
+ <param name="mm" type="select" label="Whether or not to use memory-mapped I/O to load the index (--mm)">
+ <option value="noMm">Use POSIX/C file I/O</option>
+ <option value="doMm">Use memory-mapped I/O</option>
+ </param>
+ <param name="seed" type="integer" value="-1" label="Seed for pseudo-random number generator (--seed)" help="-1 for default" />
+ </when> <!-- full -->
+ </conditional> <!-- params -->
+ </when> <!-- paired -->
+ </conditional> <!-- singlePaired -->
+ </inputs>
+ <outputs>
+ <data format="sam" name="output" />
+ </outputs>
+ <tests>
+ <test>
+ <param name="genomeSource" value="indexed" />
+ <param name="indices" value="chrM" />
+ <param name="sPaired" value="single" />
+ <param name="input1" ftype="fastqsanger" value="bowtie_in1.fastq" />
+ <param name="settings_type" value="pre_set" />
+ <output name="output" ftype="sam" file="bowtie_out1.sam" />
+ </test>
+ <test>
+ <param name="genomeSource" value="history" />
+ <param name="ownFile" value="chrM.fa" />
+ <param name="index_settings" value="index_pre_set" />
+ <param name="sPaired" value="paired" />
+ <param name="input1" ftype="fastqsanger" value="bowtie_in2.fastq" />
+ <param name="input2" ftype="fastqsanger" value="bowtie_in3.fastq" />
+ <param name="settings_type" value="pre_set" />
+ <output name="output" ftype="sam" file="bowtie_out2.sam" />
+ </test>
+ <test>
+ <param name="genomeSource" value="history" />
+ <param name="ownFile" value="chrM.fa" />
+ <param name="index_settings" value="index_full" />
+ <param name="auto_b" value="set" />
+ <param name="packed" value="unpacked" />
+ <param name="bmax" value="-1" />
+ <param name="bmaxdivn" value="4" />
+ <param name="dcv" value="2048" />
+ <param name="nodc" value="dc" />
+ <param name="noref" value="noref" />
+ <param name="offrate" value="6" />
+ <param name="ftab" value="10" />
+ <param name="ntoa" value="yes" />
+ <param name="endian" value="little" />
+ <param name="seed" value="-1" />
+ <param name="cutoff" value="-1" />
+ <param name="oldpmap" value="no" />
+ <param name="sPaired" value="single" />
+ <param name="input1" ftype="fastqsanger" value="bowtie_in1.fastq" />
+ <param name="settings_type" value="pre_set" />
+ <output name="output" ftype="sam" file="bowtie_out1.sam" />
+ </test>
+ <test>
+ <param name="genomeSource" value="indexed" />
+ <param name="indices" value="chrM" />
+ <param name="sPaired" value="paired" />
+ <param name="input1" ftype="fastqsanger" value="bowtie_in2.fastq" />
+ <param name="input2" ftype="fastqsanger" value="bowtie_in3.fastq" />
+ <param name="settings_type" value="full" />
+ <param name="skip" value="0" />
+ <param name="alignLimit" value="-1" />
+ <param name="trimL" value="0" />
+ <param name="trimH" value="0" />
+ <param name="mismatchSeed" value="3" />
+ <param name="mismatchQual" value="50" />
+ <param name="seedLen" value="10" />
+ <param name="rounding" value="round" />
+ <param name="maqSoapAlign" value="-1" />
+ <param name="minInsert" value="0" />
+ <param name="maxInsert" value="250" />
+ <param name="mateOrient" value="fr" />
+ <param name="maxAlignAttempt" value="100" />
+ <param name="forwardAlign" value="forward" />
+ <param name="reverseAlign" value="reverse" />
+ <param name="tryHard" value="doTryHard" />
+ <param name="valAlign" value="1" />
+ <param name="allValAligns" value="noAllValAligns" />
+ <param name="suppressAlign" value="-1" />
+ <param name="offbase" value="0" />
+ <param name="best" value="doBest" />
+ <param name="maxBacktracks" value="800" />
+ <param name="threadMem" value="32" />
+ <param name="strata" value="noStrata" />
+ <param name="offrate" value="-1" />
+ <param name="mm" value="noMm" />
+ <param name="seed" value="403" />
+ <output name="output" ftype="sam" file="bowtie_out2.sam" />
+ </test>
+ </tests>
+ <help>
+
+**What it does**
+
+Bowtie_ is a short read aligner designed to be ultrafast and memory-efficient. Reads can be as long as 1024 base pairs, though shorter is better. Bowtie produces a specific output format which is converted to SAM by this tool.
+
+.. _Bowtie: http://bowtie-bio.sourceforge.net/index.shtml
+
+------
+
+**Input formats**
+
+Bowtie accepts files in Sanger FASTQ format.
+
+------
+
+**Outputs**
+
+The output is in SAM format, and has the following columns::
+
+ 1 QNAME - Query (pair) NAME
+ 2 FLAG - bitwise FLAG
+ 3 RNAME - Reference sequence NAME
+ 4 POS - 1-based leftmost POSition/coordinate of clipped sequence
+ 5 MAPQ - MAPping Quality (Phred-scaled)
+ 6 CIGAR - extended CIGAR string
+ 7 MRNM - Mate Reference sequence NaMe ('=' if same as RNAME)
+ 8 MPOS - 1-based Mate POSition
+ 9 ISIZE - Inferred insert SIZE
+ 10 SEQ - query SEQuence on the same strand as the reference
+ 11 QUAL - query QUALity (ASCII-33 gives the Phred base quality)
+ 12 OPT - variable OPTional fields in the format TAG:VTYPE:VALU
+
+The flags are as follows::
+
+ Flag - Description
+ 0x0001 - the read is paired in sequencing
+ 0x0002 - the read is mapped in a proper pair
+ 0x0004 - the query sequence itself is unmapped
+ 0x0008 - the mate is unmapped
+ 0x0010 - strand of the query (1 for reverse)
+ 0x0020 - strand of the mate
+ 0x0040 - the read is the first read in a pair
+ 0x0080 - the read is the second read in a pair
+ 0x0100 - the alignment is not primary
+
+It looks like this (scroll sideways to see the entire example)::
+
+ QNAME FLAG RNAME POS MAPQ CIAGR MRNM MPOS ISIZE SEQ QUAL OPT
+ HWI-EAS91_1_30788AAXX:1:1:1761:343 4 * 0 0 * * 0 0 AAAAAAANNAAAAAAAAAAAAAAAAAAAAAAAAAAACNNANNGAGTNGNNNNNNNGCTTCCCACAGNNCTGG hhhhhhh;;hhhhhhhhhhh^hOhhhhghhhfhhhgh;;h;;hhhh;h;;;;;;;hhhhhhghhhh;;Phhh
+ HWI-EAS91_1_30788AAXX:1:1:1578:331 4 * 0 0 * * 0 0 GTATAGANNAATAAGAAAAAAAAAAATGAAGACTTTCNNANNTCTGNANNNNNNNTCTTTTTTCAGNNGTAG hhhhhhh;;hhhhhhhhhhhhhhhhhhhhhhhhhhhh;;h;;hhhh;h;;;;;;;hhhhhhhhhhh;;hhVh
+
+-------
+
+**Bowtie settings**
+
+All of the options have a default value. You can change any of them. Most of the options in Bowtie have been implemented here.
+
+------
+
+**Bowtie parameter list**
+
+This is an exhaustive list of Bowtie options:
+
+For indexing (bowtie-build)::
+ -a No auto behavior. Disable the default behavior where bowtie automatically selects values for --bmax/--dcv/--packed parameters according to the memory available. [off]
+ -p Packing. Use a packed representation for DNA strings. [auto]
+ --bmax <int> Suffix maximum. The maximum number of suffixes allowed in a block. [auto]
+ --bmaxdivn <int> Suffix maximum fraction. The maximum number of suffixes allowed in a block expressed as a fraction of the length of the reference. [4]
+ --dcv <int> Difference-cover sample. Use <int> as the period for the difference-cover sample. [1024]
+ --nodc <int> No difference-cover sample. Disable the difference-cover sample. [off]
+ -r No reference indexes. Do not build the NAME.3.ebwt and NAME.4.ebwt portions of the index, used only for paired-end alignment. [off]
+ -o Offrate. How many Burrows-Wheeler rows get marked by the indexer. The indexer will mark every 2^<int> rows. The marked rows correspond to rows on the genome. [5]
+ -t <int> Ftab. The lookup table used to calculate an initial Burrows-Wheeler range with respect to the first <int> characters of the query. Ftab is 4^<int>+1 bytes. [10]
+ --ntoa N conversion. Convert Ns to As before building the index. Otherwise, Ns are simply excluded from the index and Bowtie will not find alignments that overlap them. [off]
+ --big Endianness. Endianness to use when serializing integers to the index file. [off]
+ --little Endianness. [--little]
+ --seed <int> Random seed. Use <int> as the seed for the pseudo-random number generator. [off]
+ --cutoff <int> Cutoff. Index only the first <int> bases of the reference sequences (cumulative across sequences) and ignore the rest. [off]
+ --oldpmap Use old mapping scheme. Use the padding-based scheme from Bowtie versions before 0.9.8 instead of the current scheme. [off]
+
+For aligning (bowtie)::
+ -s <int> Skip. Do not align the first <int> reads or pairs in the input. [off]
+ -u <int> Align limit. Only align the first <int> reads/pairs from the input. [no limit]
+ -5 <int> High-quality trim. Trim <int> bases from the high-quality (left) end of each read before alignment. [0]
+ -3 <int> Low-quality trim. Trim <int> bases from the low-quality (right) end of each read before alignment. [0]
+ -n <int> Mismatch seed. Maximum number of mismatches permitted in the seed (defined with seed length option). Can be 0, 1, 2, or 3. [2]
+ -e <int> Mismatch quality. Maximum permitted total of quality values at mismatched read positions. Bowtie rounds quality values to the nearest 10 and saturates at 30. [70]
+ -l <int> Seed length. The number of bases on the high-quality end of the read to which the -n ceiling applies. Must be at least 5. [28]
+ --nomaqround Suppress MAQ rounding. Values are internally rounded to the nearest 10 and saturate at 30. This options turns off that rounding. [off]
+ -v <int> MAQ- or SOAP-like alignment policy. This option turns off the default MAQ-like alignment policy in favor of a SOAP-like one. End-to-end alignments with at most <int> mismatches. [off]
+ -I <int> Minimum insert. The minimum insert size for valid paired-end alignments. Does checking on untrimmed reads if -5 or -3 is used. [0]
+ --fr Mate orientation. The upstream/downstream mate orientations for a valid paired-end alignment against the forward reference strand. [--fr]
+ --rf Mate orientation. [off]
+ --ff Mate orientation. [off]
+ -X <int> Maximum insert. The maximum insert size for valid paired-end alignments. Does checking on untrimmed reads if -5 or -3 is used. [250]
+ --pairtries <int> Maximum alignment attempts for paired-end data. [100]
+ --nofw No forward aligning. Choosing this option means that Bowtie will not attempt to align against the forward reference strand. [off]
+ --norc No reverse-complement aligning. Setting this will mean that Bowtie will not attempt to align against the reverse-complement reference strand. [off]
+ --maxbts <int> Maximum backtracks. The maximum number of backtracks permitted when aligning a read in -n 2 or -n 3 mode. [125 without --best] [800 with --best]
+ -y Try hard. Try as hard as possible to find valid alignments when they exist, including paired-end alignments. [off]
+ --chunkmbs <int> Thread memory. The number of megabytes of memory a given thread is given to store path descriptors in --best mode. [32]
+ -k <int> Valid alignments. The number of valid alignments per read or pair. [off]
+ -a All valid alignments. Choosing this means that all valid alignments per read or pair will be reported. [off]
+ -m <int> Suppress alignments. Suppress all alignments for a particular read or pair if more than <int> reportable alignments exist for it. [no limit]
+ --best Best mode. Make Bowtie guarantee that reported singleton alignments are "best" in terms of stratum (the number of mismatches) and quality values at mismatched position. [off]
+ --strata Best strata. When running in best mode, report alignments that fall into the best stratum if there are ones falling into more than one. [off]
+ -B <int> First base number. When outputting alignments, number the first base of a reference sequence as <int>. [0]
+ -z <int> Phased. Alternate between using the forward and mirror indexes in a series of phases such that only one half of the index is resident in memory at one time. Cannot be used with paired-end alignment. [off]
+ -o <int> Offrate override. Override the offrate of the index with <int>. Some row markings are discarded when index read into memory. <int> must be greater than the value used to build the index (default: 5). [off]
+ --mm I/O for index loading. Choosing this option means that memory-mapped I/O will be used to load the index instead of the normal POSIX/C file I/O. Allows memory-efficient parallelization where using -p is not desirable. [off]
+ --seed <int> Random seed. Use <int> as the seed for the pseudo-random number generator. [off]
+
+ </help>
+</tool>
1
0
14 Sep '09
details: http://www.bx.psu.edu/hg/galaxy/rev/e7b899fb4462
changeset: 2681:e7b899fb4462
user: James Taylor <james(a)jamestaylor.org>
date: Fri Sep 11 12:48:33 2009 -0400
description:
Fix syntax error when running workflows. This is actually a regression in Mako, the multiline conditional in the elif was somehow causing it to improperly nest the if statements
2 file(s) affected in this change:
eggs.ini
templates/workflow/run.mako
diffs (34 lines):
diff -r 8877ef766447 -r e7b899fb4462 eggs.ini
--- a/eggs.ini Fri Sep 11 11:26:26 2009 -0400
+++ b/eggs.ini Fri Sep 11 12:48:33 2009 -0400
@@ -31,7 +31,7 @@
elementtree = 1.2.6_20050316
lrucache = 0.2
;lsprof - james
-Mako = 0.2.4
+Mako = 0.2.5
MyghtyUtils = 0.52
nose = 0.9.1
NoseHTML = 0.2
@@ -79,7 +79,7 @@
docutils = http://downloads.sourceforge.net/docutils/docutils-0.4.tar.gz
elementtree = http://effbot.org/downloads/elementtree-1.2.6-20050316.tar.gz
lrucache = http://evan.prodromou.name/lrucache/lrucache-0.2.tar.gz
-Mako = http://www.makotemplates.org/downloads/Mako-0.2.4.tar.gz
+Mako = http://www.makotemplates.org/downloads/Mako-0.2.5.tar.gz
MyghtyUtils = http://cheeseshop.python.org/packages/source/M/MyghtyUtils/MyghtyUtils-0.52…
nose = http://www.somethingaboutorange.com/mrl/projects/nose/nose-0.9.1.tar.gz
NoseHTML = http://dist.g2.bx.psu.edu/nosehtml-0.2.tar.bz2
diff -r 8877ef766447 -r e7b899fb4462 templates/workflow/run.mako
--- a/templates/workflow/run.mako Fri Sep 11 11:26:26 2009 -0400
+++ b/templates/workflow/run.mako Fri Sep 11 12:48:33 2009 -0400
@@ -87,8 +87,7 @@
${param.get_html_field( t, value, other_values ).get_html( str(step.id) + "|" + prefix )}
<input type="hidden" name="${step.id}|__force_update__${prefix}${param.name}" value="true" />
%endif
- %elif isinstance( value, RuntimeValue ) or \
- ( str(step.id) + '|__runtime__' + prefix + param.name ) in incoming:
+ %elif isinstance( value, RuntimeValue ) or ( str(step.id) + '|__runtime__' + prefix + param.name ) in incoming:
## On the first load we may see a RuntimeValue, so we write
## an input field using the initial value for the param.
## Subsequents posts will no longer have the runtime value
1
0
14 Sep '09
details: http://www.bx.psu.edu/hg/galaxy/rev/032478337b82
changeset: 2683:032478337b82
user: jeremy goecks <jeremy.goecks(a)emory.edu>
date: Fri Sep 11 14:57:02 2009 -0400
description:
(1) Unicode support for tagging and (2) added tagging_common mako file to support creation and standardization of tagging elements across all pages.
9 file(s) affected in this change:
lib/galaxy/web/controllers/history.py
lib/galaxy/web/controllers/tag.py
static/scripts/autocomplete_tagging.js
static/scripts/packed/autocomplete_tagging.js
templates/dataset/edit_attributes.mako
templates/history/grid.mako
templates/root/history.mako
templates/tagging_common.mako
tool_conf.xml.main
diffs (781 lines):
diff -r ed4cbaf23c88 -r 032478337b82 lib/galaxy/web/controllers/history.py
--- a/lib/galaxy/web/controllers/history.py Fri Sep 11 09:00:36 2009 -0400
+++ b/lib/galaxy/web/controllers/history.py Fri Sep 11 14:57:02 2009 -0400
@@ -38,6 +38,17 @@
if item.users_shared_with or item.importable:
return dict( operation="sharing" )
return None
+ class TagsColumn( grids.GridColumn ):
+ def __init__(self, col_name):
+ grids.GridColumn.__init__(self, col_name)
+ self.tag_elt_id_gen = 0
+
+ def get_value( self, trans, grid, history ):
+ self.tag_elt_id_gen += 1
+ return trans.fill_template( "/tagging_common.mako", trans=trans,
+ tagged_item=history,
+ elt_id="tagging-elt" + str(self.tag_elt_id_gen) )
+
# Grid definition
title = "Stored histories"
model_class = model.History
@@ -48,6 +59,7 @@
link=( lambda item: iff( item.deleted, None, dict( operation="switch", id=item.id ) ) ),
attach_popup=True ),
DatasetsByStateColumn( "Datasets (by state)", ncells=4 ),
+ #TagsColumn( "Tags" ),
StatusColumn( "Status", attach_popup=False ),
grids.GridColumn( "Created", key="create_time", format=time_ago ),
grids.GridColumn( "Last Updated", key="update_time", format=time_ago ),
diff -r ed4cbaf23c88 -r 032478337b82 lib/galaxy/web/controllers/tag.py
--- a/lib/galaxy/web/controllers/tag.py Fri Sep 11 09:00:36 2009 -0400
+++ b/lib/galaxy/web/controllers/tag.py Fri Sep 11 14:57:02 2009 -0400
@@ -4,7 +4,6 @@
from galaxy.model import History, HistoryTagAssociation, Dataset, DatasetTagAssociation, \
HistoryDatasetAssociation, HistoryDatasetAssociationTagAssociation, Page, PageTagAssociation
-
from galaxy.web.base.controller import *
from galaxy.tags.tag_handler import *
from sqlalchemy.sql.expression import func, and_
@@ -15,65 +14,68 @@
def __init__(self, app):
BaseController.__init__(self, app)
- # Set up dict for mapping from short-hand to full item class.
- self.shorthand_to_item_class_dict = dict()
- self.shorthand_to_item_class_dict["history"] = History
- self.shorthand_to_item_class_dict["hda"] = HistoryDatasetAssociation
+ # Keep a list of taggable classes.
+ self.taggable_classes = dict()
+ self.taggable_classes[History.__name__] = History
+ self.taggable_classes[HistoryDatasetAssociation.__name__] = HistoryDatasetAssociation
+ self.taggable_classes[Page.__name__] = Page
- # Set up tag handler to recognize the following items: History, HistoryDatasetAssociation, ...
+ # Set up tag handler to recognize the following items: History, HistoryDatasetAssociation, Page, ...
self.tag_handler = TagHandler()
self.tag_handler.add_tag_assoc_class(History, HistoryTagAssociation)
self.tag_handler.add_tag_assoc_class(HistoryDatasetAssociation, HistoryDatasetAssociationTagAssociation)
-
+ self.tag_handler.add_tag_assoc_class(Page, PageTagAssociation)
+
@web.expose
- def add_tag_async( self, trans, id=None, item_type=None, new_tag=None ):
+ @web.require_login( "Add tag to an item." )
+ def add_tag_async( self, trans, id=None, item_class=None, new_tag=None ):
""" Add tag to an item. """
- item = self._get_item(trans, item_type, trans.security.decode_id(id))
+ item = self._get_item(trans, item_class, trans.security.decode_id(id))
self._do_security_check(trans, item)
- self.tag_handler.apply_item_tags(trans.sa_session, item, new_tag)
+ self.tag_handler.apply_item_tags( trans.sa_session, item, unicode(new_tag).encode('utf-8') )
trans.sa_session.flush()
@web.expose
- def remove_tag_async( self, trans, id=None, item_type=None, tag_name=None ):
+ @web.require_login( "Remove tag from an item." )
+ def remove_tag_async( self, trans, id=None, item_class=None, tag_name=None ):
""" Remove tag from an item. """
- item = self._get_item(trans, item_type, trans.security.decode_id(id))
+ item = self._get_item(trans, item_class, trans.security.decode_id(id))
self._do_security_check(trans, item)
- self.tag_handler.remove_item_tag(item, tag_name)
+ self.tag_handler.remove_item_tag( item, unicode(tag_name).encode('utf-8') )
+ #print tag_name
+ #print unicode(tag_name)
trans.sa_session.flush()
# Retag an item. All previous tags are deleted and new tags are applied.
@web.expose
- def retag_async( self, trans, id=None, item_type=None, new_tags=None ):
+ @web.require_login( "Apply a new set of tags to an item; previous tags are deleted." )
+ def retag_async( self, trans, id=None, item_class=None, new_tags=None ):
""" Apply a new set of tags to an item; previous tags are deleted. """
- item = self._get_item(trans, item_type, trans.security.decode_id(id))
+ item = self._get_item(trans, item_class, trans.security.decode_id(id))
self._do_security_check(trans, item)
tag_handler.delete_item_tags(item)
- self.tag_handler.apply_item_tags(trans.sa_session, item, new_tag)
+ self.tag_handler.apply_item_tags( trans.sa_session, item, unicode(new_tags).encode('utf-8') )
trans.sa_session.flush()
-
- tag_handler.delete_item_tags(history)
- tag_handler.apply_item_tags(trans.sa_session, history, new_tags)
- # Flush to complete changes.
- trans.sa_session.flush()
-
+
@web.expose
@web.require_login( "get autocomplete data for an item's tags" )
- def tag_autocomplete_data(self, trans, id=None, item_type=None, q=None, limit=None, timestamp=None):
+ def tag_autocomplete_data(self, trans, id=None, item_class=None, q=None, limit=None, timestamp=None):
""" Get autocomplete data for an item's tags. """
#
# Get item, do security check, and get autocomplete data.
#
- item = self._get_item(trans, item_type, trans.security.decode_id(id))
+ item = self._get_item(trans, item_class, trans.security.decode_id(id))
self._do_security_check(trans, item)
+ q = unicode(q).encode('utf-8')
if q.find(":") == -1:
return self._get_tag_autocomplete_names(trans, item, q, limit, timestamp)
else:
@@ -184,9 +186,9 @@
# Use the user_id associated with the HDA's history.
return History.table.c.user_id
- def _get_item(self, trans, item_type, id):
+ def _get_item(self, trans, item_class_name, id):
""" Get an item based on type and id. """
- item_class = self.shorthand_to_item_class_dict[item_type]
+ item_class = self.taggable_classes[item_class_name]
item = trans.sa_session.query(item_class).filter("id=" + str(id))[0]
return item;
diff -r ed4cbaf23c88 -r 032478337b82 static/scripts/autocomplete_tagging.js
--- a/static/scripts/autocomplete_tagging.js Fri Sep 11 09:00:36 2009 -0400
+++ b/static/scripts/autocomplete_tagging.js Fri Sep 11 14:57:02 2009 -0400
@@ -187,13 +187,14 @@
// Tag button is image's parent.
var tag_button = $(this).parent();
- // Get tag name.
+ // Get tag name, value.
var tag_name_elt = tag_button.find(".tag-name").eq(0);
var tag_str = tag_name_elt.text();
- var tag_name = get_tag_name_and_value(tag_str)[0];
+ var tag_name_and_value = get_tag_name_and_value(tag_str);
+ var tag_name = tag_name_and_value[0];
+ var tag_value = tag_name_and_value[1];
- // TODO: should remove succeed if tag is not already applied to
- // history?
+ var prev_button = tag_button.prev();
tag_button.remove();
// Remove tag from local list for consistency.
@@ -209,12 +210,28 @@
data: { tag_name: tag_name },
error: function()
{
- // Failed.
- alert( "Remove tag failed" );
+ // Failed. Roll back changes and show alert.
+ settings.tags[tag_name] = tag_value;
+ if (prev_button.hasClass("tag-button"))
+ prev_button.after(tag_button);
+ else
+ tag_area.prepend(tag_button);
+ var new_text = settings.get_toggle_link_text_fn(settings.tags);
+ alert( "Remove tag failed" );
+
+ toggle_link.text(new_text);
+
+ // TODO: no idea why it's necessary to set this up again.
+ delete_img.mouseenter( function ()
+ {
+ $(this).attr("src", settings.delete_tag_img_rollover);
+ });
+ delete_img.mouseleave( function ()
+ {
+ $(this).attr("src", settings.delete_tag_img);
+ });
},
- success: function()
- {
- }
+ success: function() {}
});
return true;
@@ -323,8 +340,9 @@
data: { new_tag: new_value },
error: function()
{
- // Remove tag and show alert.
+ // Failed. Roll back changes and show alert.
new_tag_button.remove();
+ delete settings.tags[tag_name_and_value[0]];
var new_text = settings.get_toggle_link_text_fn(settings.tags);
toggle_link.text(new_text);
alert( "Add tag failed" );
diff -r ed4cbaf23c88 -r 032478337b82 static/scripts/packed/autocomplete_tagging.js
--- a/static/scripts/packed/autocomplete_tagging.js Fri Sep 11 09:00:36 2009 -0400
+++ b/static/scripts/packed/autocomplete_tagging.js Fri Sep 11 14:57:02 2009 -0400
@@ -1,1 +1,1 @@
-var ac_tag_area_id_gen=1;jQuery.fn.autocomplete_tagging=function(c){var e={get_toggle_link_text_fn:function(u){var w="";var v=o(u);if(v!=0){w=v+(v!=0?" Tags":" Tag")}else{w="Add tags"}return w},tag_click_fn:function(u){},input_size:20,in_form:false,tags:{},use_toggle_link:true,item_id:"",add_tag_img:"",add_tag_img_rollover:"",delete_tag_img:"",ajax_autocomplete_tag_url:"",ajax_retag_url:"",ajax_delete_tag_url:"",ajax_add_tag_url:""};var p=jQuery.extend(e,c);var k="tag-area-"+(ac_tag_area_id_gen)++;var m=$("<div></div>").attr("id",k).addClass("tag-area");this.append(m);var o=function(u){if(u.length){return u.length}var v=0;for(element in u){v++}return v};var b=function(){var u=p.get_toggle_link_text_fn(p.tags);var v=$("<a href='/history/tags'>"+u+"</a>").addClass("toggle-link");v.click(function(){var w=(m.css("display")=="none");var x;if(w){x=function(){var y=o(p.tags);if(y==0){m.click()}}}else{x=function(){m.blur()}}m.slideToggle("fast",x);return false});return v};var s=b();
if(p.use_toggle_link){this.prepend(s)}var t=function(u){var v=new Array();for(key in u){v[v.length]=key+"-->"+u[key]}return"{"+v.join(",")+"}"};var a=function(v,u){return v+((u!=""&&u)?":"+u:"")};var h=function(u){return u.split(":")};var i=function(u){var v=$("<img src='"+p.add_tag_img+"' rollover='"+p.add_tag_img_rollover+"'/>").addClass("add-tag-button");v.click(function(){$(this).hide();m.click();return false});return v};var j=function(u){var v=$("<img src='"+p.delete_tag_img+"'/>").addClass("delete-tag-img");v.mouseenter(function(){$(this).attr("src",p.delete_tag_img_rollover)});v.mouseleave(function(){$(this).attr("src",p.delete_tag_img)});v.click(function(){var B=$(this).parent();var A=B.find(".tag-name").eq(0);var z=A.text();var C=h(z)[0];B.remove();delete p.tags[C];var y=p.get_toggle_link_text_fn(p.tags);s.text(y);$.ajax({url:p.ajax_delete_tag_url,data:{tag_name:C},error:function(){alert("Remove tag failed")},success:function(){}});return true});var w=$("<span>"+u+"
</span>").addClass("tag-name");w.click(function(){p.tag_click_fn(u);return true});var x=$("<span></span>").addClass("tag-button");x.append(w);x.append(v);return x};var d=function(v){var u;if(p.in_form){u=$("<textarea id='history-tag-input' rows='1' cols='"+p.input_size+"' value='"+v+"'></textarea>")}else{u=$("<input id='history-tag-input' type='text' size='"+p.input_size+"' value='"+v+"'></input>")}u.keyup(function(D){if(D.keyCode==27){$(this).trigger("blur")}else{if((D.keyCode==13)||(D.keyCode==188)||(D.keyCode==32)){new_value=this.value;if(return_key_pressed_for_autocomplete==true){return_key_pressed_for_autocomplete=false;return false}if(new_value.indexOf(": ",new_value.length-2)!=-1){this.value=new_value.substring(0,new_value.length-1);return false}if((D.keyCode==188)||(D.keyCode==32)){new_value=new_value.substring(0,new_value.length-1)}new_value=new_value.replace(/^\s+|\s+$/g,"");if(new_value.length<3){return false}this.value="";var A=j(new_value);var z=m.children(".tag
-button");if(z.length!=0){var E=z.slice(z.length-1);E.after(A)}else{m.prepend(A)}var y=new_value.split(":");p.tags[y[0]]=y[1];var B=p.get_toggle_link_text_fn(p.tags);s.text(B);var C=$(this);$.ajax({url:p.ajax_add_tag_url,data:{new_tag:new_value},error:function(){A.remove();var F=p.get_toggle_link_text_fn(p.tags);s.text(F);alert("Add tag failed")},success:function(){C.flushCache()}});return false}}});var w=function(A,z,y,C,B){tag_name_and_value=C.split(":");return(tag_name_and_value.length==1?tag_name_and_value[0]:tag_name_and_value[1])};var x={selectFirst:false,formatItem:w,autoFill:false,highlight:false};u.autocomplete(p.ajax_autocomplete_tag_url,x);u.addClass("tag-input");return u};for(tag_name in p.tags){var q=p.tags[tag_name];var l=a(tag_name,q);var g=j(l,s,p.tags);m.append(g)}var n=d("");var f=i(n);m.blur(function(u){r=o(p.tags);if(r!=0){f.show();n.hide();m.removeClass("active-tag-area")}else{}});m.append(f);m.append(n);n.hide();m.click(function(w){var v=$(this).hasClas
s("active-tag-area");if($(w.target).hasClass("delete-tag-img")&&!v){return false}if($(w.target).hasClass("tag-name")&&!v){return false}$(this).addClass("active-tag-area");f.hide();n.show();n.focus();var u=function(y){var x=m.attr("id");if(($(y.target).attr("id")!=x)&&($(y.target).parents().filter(x).length==0)){m.blur();$(document).unbind("click",u)}};$(window).click(u);return false});if(p.use_toggle_link){m.hide()}else{var r=o(p.tags);if(r==0){f.hide();n.show()}}return this.addClass("tag-element")};
\ No newline at end of file
+var ac_tag_area_id_gen=1;jQuery.fn.autocomplete_tagging=function(c){var e={get_toggle_link_text_fn:function(u){var w="";var v=o(u);if(v!=0){w=v+(v!=0?" Tags":" Tag")}else{w="Add tags"}return w},tag_click_fn:function(u){},input_size:20,in_form:false,tags:{},use_toggle_link:true,item_id:"",add_tag_img:"",add_tag_img_rollover:"",delete_tag_img:"",ajax_autocomplete_tag_url:"",ajax_retag_url:"",ajax_delete_tag_url:"",ajax_add_tag_url:""};var p=jQuery.extend(e,c);var k="tag-area-"+(ac_tag_area_id_gen)++;var m=$("<div></div>").attr("id",k).addClass("tag-area");this.append(m);var o=function(u){if(u.length){return u.length}var v=0;for(element in u){v++}return v};var b=function(){var u=p.get_toggle_link_text_fn(p.tags);var v=$("<a href='/history/tags'>"+u+"</a>").addClass("toggle-link");v.click(function(){var w=(m.css("display")=="none");var x;if(w){x=function(){var y=o(p.tags);if(y==0){m.click()}}}else{x=function(){m.blur()}}m.slideToggle("fast",x);return false});return v};var s=b();
if(p.use_toggle_link){this.prepend(s)}var t=function(u){var v=new Array();for(key in u){v[v.length]=key+"-->"+u[key]}return"{"+v.join(",")+"}"};var a=function(v,u){return v+((u!=""&&u)?":"+u:"")};var h=function(u){return u.split(":")};var i=function(u){var v=$("<img src='"+p.add_tag_img+"' rollover='"+p.add_tag_img_rollover+"'/>").addClass("add-tag-button");v.click(function(){$(this).hide();m.click();return false});return v};var j=function(u){var v=$("<img src='"+p.delete_tag_img+"'/>").addClass("delete-tag-img");v.mouseenter(function(){$(this).attr("src",p.delete_tag_img_rollover)});v.mouseleave(function(){$(this).attr("src",p.delete_tag_img)});v.click(function(){var D=$(this).parent();var C=D.find(".tag-name").eq(0);var B=C.text();var z=h(B);var F=z[0];var y=z[1];var E=D.prev();D.remove();delete p.tags[F];var A=p.get_toggle_link_text_fn(p.tags);s.text(A);$.ajax({url:p.ajax_delete_tag_url,data:{tag_name:F},error:function(){p.tags[F]=y;if(E.hasClass("tag-button")){E.after(D)
}else{m.prepend(D)}var G=p.get_toggle_link_text_fn(p.tags);alert("Remove tag failed");s.text(G);v.mouseenter(function(){$(this).attr("src",p.delete_tag_img_rollover)});v.mouseleave(function(){$(this).attr("src",p.delete_tag_img)})},success:function(){}});return true});var w=$("<span>"+u+"</span>").addClass("tag-name");w.click(function(){p.tag_click_fn(u);return true});var x=$("<span></span>").addClass("tag-button");x.append(w);x.append(v);return x};var d=function(v){var u;if(p.in_form){u=$("<textarea id='history-tag-input' rows='1' cols='"+p.input_size+"' value='"+v+"'></textarea>")}else{u=$("<input id='history-tag-input' type='text' size='"+p.input_size+"' value='"+v+"'></input>")}u.keyup(function(D){if(D.keyCode==27){$(this).trigger("blur")}else{if((D.keyCode==13)||(D.keyCode==188)||(D.keyCode==32)){new_value=this.value;if(return_key_pressed_for_autocomplete==true){return_key_pressed_for_autocomplete=false;return false}if(new_value.indexOf(": ",new_value.length-2)!=-1){thi
s.value=new_value.substring(0,new_value.length-1);return false}if((D.keyCode==188)||(D.keyCode==32)){new_value=new_value.substring(0,new_value.length-1)}new_value=new_value.replace(/^\s+|\s+$/g,"");if(new_value.length<3){return false}this.value="";var A=j(new_value);var z=m.children(".tag-button");if(z.length!=0){var E=z.slice(z.length-1);E.after(A)}else{m.prepend(A)}var y=new_value.split(":");p.tags[y[0]]=y[1];var B=p.get_toggle_link_text_fn(p.tags);s.text(B);var C=$(this);$.ajax({url:p.ajax_add_tag_url,data:{new_tag:new_value},error:function(){A.remove();delete p.tags[y[0]];var F=p.get_toggle_link_text_fn(p.tags);s.text(F);alert("Add tag failed")},success:function(){C.flushCache()}});return false}}});var w=function(A,z,y,C,B){tag_name_and_value=C.split(":");return(tag_name_and_value.length==1?tag_name_and_value[0]:tag_name_and_value[1])};var x={selectFirst:false,formatItem:w,autoFill:false,highlight:false};u.autocomplete(p.ajax_autocomplete_tag_url,x);u.addClass("tag-input
");return u};for(tag_name in p.tags){var q=p.tags[tag_name];var l=a(tag_name,q);var g=j(l,s,p.tags);m.append(g)}var n=d("");var f=i(n);m.blur(function(u){r=o(p.tags);if(r!=0){f.show();n.hide();m.removeClass("active-tag-area")}else{}});m.append(f);m.append(n);n.hide();m.click(function(w){var v=$(this).hasClass("active-tag-area");if($(w.target).hasClass("delete-tag-img")&&!v){return false}if($(w.target).hasClass("tag-name")&&!v){return false}$(this).addClass("active-tag-area");f.hide();n.show();n.focus();var u=function(y){var x=m.attr("id");if(($(y.target).attr("id")!=x)&&($(y.target).parents().filter(x).length==0)){m.blur();$(document).unbind("click",u)}};$(window).click(u);return false});if(p.use_toggle_link){m.hide()}else{var r=o(p.tags);if(r==0){f.hide();n.show()}}return this.addClass("tag-element")};
\ No newline at end of file
diff -r ed4cbaf23c88 -r 032478337b82 templates/dataset/edit_attributes.mako
--- a/templates/dataset/edit_attributes.mako Fri Sep 11 09:00:36 2009 -0400
+++ b/templates/dataset/edit_attributes.mako Fri Sep 11 14:57:02 2009 -0400
@@ -9,43 +9,8 @@
<% user, user_roles = trans.get_user_and_roles() %>
<%def name="javascripts()">
- ## <!--[if lt IE 7]>
- ## <script type='text/javascript' src="/static/scripts/IE7.js"> </script>
- ## <![endif]-->
- ${h.js( "jquery", "galaxy.base", "jquery.autocomplete", "autocomplete_tagging" )}
- <script type="text/javascript">
- $( document ).ready( function() {
- // Set up autocomplete tagger.
-<%
- ## Build string of tag name, values.
- tag_names_and_values = list()
- for tag in data.tags:
- tag_name = tag.user_tname
- tag_value = ""
- if tag.value is not None:
- tag_value = tag.user_value
- tag_names_and_values.append("\"" + tag_name + "\" : \"" + tag_value + "\"")
-%>
- var options = {
- tags : {${", ".join(tag_names_and_values)}},
- tag_click_fn: function(tag) { /* Do nothing. */ },
- use_toggle_link: false,
- input_size: 30,
- in_form: true,
- <% encoded_data_id = trans.security.encode_id(data.id) %>
- ajax_autocomplete_tag_url: "${h.url_for( controller='tag', action='tag_autocomplete_data', id=encoded_data_id, item_type="hda" )}",
- ajax_add_tag_url: "${h.url_for( controller='tag', action='add_tag_async', id=encoded_data_id, item_type="hda" )}",
- ajax_delete_tag_url: "${h.url_for( controller='tag', action='remove_tag_async', id=encoded_data_id, item_type="hda" )}",
- delete_tag_img: "${h.url_for('/static/images/delete_tag_icon_gray.png')}",
- delete_tag_img_rollover: "${h.url_for('/static/images/delete_tag_icon_white.png')}",
- add_tag_img: "${h.url_for('/static/images/add_icon.png')}",
- add_tag_img_rollover: "${h.url_for('/static/images/add_icon_dark.png')}",
- };
-% if trans.get_user() is not None:
- $("#dataset-tag-area").autocomplete_tagging(options);
-% endif
-});
- </script>
+ ${parent.javascripts()}
+ ${h.js( "jquery.autocomplete", "autocomplete_tagging" )}
</%def>
<%def name="datatype( dataset, datatypes )">
@@ -84,16 +49,18 @@
<div style="clear: both"></div>
</div>
%if trans.get_user() is not None:
- <div class="form-row">
- <label>
- Tags:
- </label>
- <div id="dataset-tag-area"
+ <%namespace file="../tagging_common.mako" import="render_tagging_element" />
+ <div class="form-row">
+ <label>
+ Tags:
+ </label>
+ <div id="dataset-tag-area"
style="float: left; margin-left: 1px; width: 295px; margin-right: 10px; border-style: inset; border-color: #ddd; border-width: 1px">
- </div>
- <div style="clear: both"></div>
- </div>
- %endif
+ </div>
+ <div style="clear: both"></div>
+ </div>
+ ${render_tagging_element(data, "dataset-tag-area", use_toggle_link="false", in_form="true", input_size="30")}
+ %endif
%for name, spec in data.metadata.spec.items():
%if spec.visible:
<div class="form-row">
diff -r ed4cbaf23c88 -r 032478337b82 templates/history/grid.mako
--- a/templates/history/grid.mako Fri Sep 11 09:00:36 2009 -0400
+++ b/templates/history/grid.mako Fri Sep 11 14:57:02 2009 -0400
@@ -10,6 +10,7 @@
<%def name="javascripts()">
${parent.javascripts()}
+ ${h.js("jquery.autocomplete", "autocomplete_tagging" )}
<script type="text/javascript">
## TODO: generalize and move into galaxy.base.js
$(document).ready(function() {
@@ -58,7 +59,7 @@
</%def>
<%def name="stylesheets()">
- <link href="${h.url_for('/static/style/base.css')}" rel="stylesheet" type="text/css" />
+ ${h.css( "base", "autocomplete_tagging" )}
<style>
## Not generic to all grids -- move to base?
.count-box {
diff -r ed4cbaf23c88 -r 032478337b82 templates/root/history.mako
--- a/templates/root/history.mako Fri Sep 11 09:00:36 2009 -0400
+++ b/templates/root/history.mako Fri Sep 11 14:57:02 2009 -0400
@@ -77,83 +77,6 @@
<% updateable = [data for data in reversed( datasets ) if data.visible and data.state not in [ "deleted", "empty", "error", "ok" ]] %>
${ ",".join( map(lambda data: "\"%s\" : \"%s\"" % (data.id, data.state), updateable) ) }
});
-
- // Set up autocomplete tagger.
-<%
- ## Build string of tag name, values.
- tag_names_and_values = list()
- for tag in history.tags:
- tag_name = tag.user_tname
- tag_value = ""
- if tag.value is not None:
- tag_value = tag.user_value
- tag_names_and_values.append("\"" + tag_name + "\" : \"" + tag_value + "\"")
-%>
- // Returns the number of keys (elements) in an array/dictionary.
- var array_length = function(an_array)
- {
- if (an_array.length)
- return an_array.length;
-
- var count = 0;
- for (element in an_array)
- count++;
- return count;
- };
-
- // Function get text to display on the toggle link.
- var get_toggle_link_text = function(tags)
- {
- var text = "";
- var num_tags = array_length(tags);
- if (num_tags != 0) {
- text = num_tags + (num_tags != 1 ? " Tags" : " Tag");
- /*
- // Show first N tags; hide the rest.
- var max_to_show = 1;
-
- // Build tag string.
- var tag_strs = new Array();
- var count = 0;
- for (tag_name in tags)
- {
- tag_value = tags[tag_name];
- tag_strs[tag_strs.length] = build_tag_str(tag_name, tag_value);
- if (++count == max_to_show)
- break;
- }
- tag_str = tag_strs.join(", ");
-
- // Finalize text.
- var num_tags_hiding = num_tags - max_to_show;
- text = "Tags: " + tag_str +
- (num_tags_hiding != 0 ? " and " + num_tags_hiding + " more" : "");
- */
- } else {
- // No tags.
- text = "Add tags to this history";
- }
- return text;
- };
-
- var options = {
- tags : {${", ".join(tag_names_and_values)}},
- get_toggle_link_text_fn: get_toggle_link_text,
- input_size: 15,
- tag_click_fn: function(tag) { /* Do nothing. */ },
- <% encoded_history_id = trans.security.encode_id(history.id) %>
- ajax_autocomplete_tag_url: "${h.url_for( controller='tag', action='tag_autocomplete_data', id=encoded_history_id, item_type="history" )}",
- ajax_add_tag_url: "${h.url_for( controller='tag', action='add_tag_async', id=encoded_history_id, item_type="history" )}",
- ajax_delete_tag_url: "${h.url_for( controller='tag', action='remove_tag_async', id=encoded_history_id, item_type="history" )}",
- delete_tag_img: "${h.url_for('/static/images/delete_tag_icon_gray.png')}",
- delete_tag_img_rollover: "${h.url_for('/static/images/delete_tag_icon_white.png')}",
- add_tag_img: "${h.url_for('/static/images/add_icon.png')}",
- add_tag_img_rollover: "${h.url_for('/static/images/add_icon_dark.png')}",
- };
-% if trans.get_user() is not None:
- $("#history-tag-area").autocomplete_tagging(options);
-% endif
-
});
// Functionized so AJAX'd datasets can call them
function initShowHide() {
@@ -361,7 +284,13 @@
<div id="history-tag-area" style="margin-bottom: 1em">
</div>
+<%namespace file="../tagging_common.mako" import="render_tagging_element" />
<%namespace file="history_common.mako" import="render_dataset" />
+
+%if trans.get_user() is not None:
+ <div id='history-tag-area' class="tag-element"></div>
+ ${render_tagging_element(history, "history-tag-area")}
+%endif
%if not datasets:
diff -r ed4cbaf23c88 -r 032478337b82 templates/tagging_common.mako
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/templates/tagging_common.mako Fri Sep 11 14:57:02 2009 -0400
@@ -0,0 +1,92 @@
+## Render the tags 'tags' as an autocomplete element.
+<%def name="render_tagging_element(tagged_item, elt_id, use_toggle_link='true', in_form='false', input_size='15')">
+ <script type="text/javascript">
+
+ //
+ // Set up autocomplete tagger.
+ //
+ <%
+ ## Build string of tag name, values.
+ tag_names_and_values = list()
+ for tag in tagged_item.tags:
+ tag_name = tag.user_tname
+ tag_value = ""
+ if tag.value is not None:
+ tag_value = tag.user_value
+ tag_names_and_values.append( ("\"" + tag_name + "\" : \"" + tag_value + "\"") )
+ %>
+ //
+ // Returns the number of keys (elements) in an array/dictionary.
+ //
+ var array_length = function(an_array)
+ {
+ if (an_array.length)
+ return an_array.length;
+
+ var count = 0;
+ for (element in an_array)
+ count++;
+ return count;
+ };
+
+ //
+ // Function get text to display on the toggle link.
+ //
+ var get_toggle_link_text = function(tags)
+ {
+ var text = "";
+ var num_tags = array_length(tags);
+ if (num_tags != 0)
+ {
+ text = num_tags + (num_tags != 1 ? " Tags" : " Tag");
+ /*
+ // Show first N tags; hide the rest.
+ var max_to_show = 1;
+
+ // Build tag string.
+ var tag_strs = new Array();
+ var count = 0;
+ for (tag_name in tags)
+ {
+ tag_value = tags[tag_name];
+ tag_strs[tag_strs.length] = build_tag_str(tag_name, tag_value);
+ if (++count == max_to_show)
+ break;
+ }
+ tag_str = tag_strs.join(", ");
+
+ // Finalize text.
+ var num_tags_hiding = num_tags - max_to_show;
+ text = "Tags: " + tag_str +
+ (num_tags_hiding != 0 ? " and " + num_tags_hiding + " more" : "");
+ */
+ }
+ else
+ {
+ // No tags.
+ text = "Add tags to history";
+ }
+ return text;
+ };
+
+ var options =
+ {
+ tags : {${unicode(", ".join(tag_names_and_values), 'utf-8')}},
+ get_toggle_link_text_fn: get_toggle_link_text,
+ tag_click_fn: function(tag) { /* Do nothing. */ },
+ <% tagged_item_id = trans.security.encode_id(tagged_item.id) %>
+ ajax_autocomplete_tag_url: "${h.url_for( controller='tag', action='tag_autocomplete_data', id=tagged_item_id, item_class=tagged_item.__class__.__name__ )}",
+ ajax_add_tag_url: "${h.url_for( controller='tag', action='add_tag_async', id=tagged_item_id, item_class=tagged_item.__class__.__name__ )}",
+ ajax_delete_tag_url: "${h.url_for( controller='tag', action='remove_tag_async', id=tagged_item_id, item_class=tagged_item.__class__.__name__ )}",
+ delete_tag_img: "${h.url_for('/static/images/delete_tag_icon_gray.png')}",
+ delete_tag_img_rollover: "${h.url_for('/static/images/delete_tag_icon_white.png')}",
+ add_tag_img: "${h.url_for('/static/images/add_icon.png')}",
+ add_tag_img_rollover: "${h.url_for('/static/images/add_icon_dark.png')}",
+ input_size: ${input_size},
+ in_form: ${in_form},
+ use_toggle_link: ${use_toggle_link}
+ };
+
+ $("#${elt_id}").autocomplete_tagging(options)
+ </script>
+</%def>
\ No newline at end of file
diff -r ed4cbaf23c88 -r 032478337b82 tool_conf.xml.main
--- a/tool_conf.xml.main Fri Sep 11 09:00:36 2009 -0400
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,263 +0,0 @@
-<?xml version="1.0"?>
-<toolbox>
- <section name="Get Data" id="getext">
- <tool file="data_source/upload.xml"/>
- <tool file="data_source/ucsc_tablebrowser.xml" />
- <tool file="data_source/ucsc_tablebrowser_archaea.xml" />
- <tool file="data_source/microbial_import.xml" />
- <tool file="data_source/biomart.xml" />
- <tool file="data_source/gramene_mart.xml" />
- <tool file="data_source/flymine.xml" />
- <tool file="data_source/encode_db.xml" />
- <tool file="data_source/epigraph_import.xml" />
- </section>
- <section name="Send Data" id="send">
- <tool file="data_destination/epigraph.xml" />
- </section>
- <section name="ENCODE Tools" id="EncodeTools">
- <tool file="encode/gencode_partition.xml" />
- <tool file="encode/random_intervals.xml" />
- </section>
- <section name="Lift-Over" id="liftOver">
- <tool file="extract/liftOver_wrapper.xml" />
- </section>
- <section name="Text Manipulation" id="textutil">
- <tool file="filters/fixedValueColumn.xml" />
- <tool file="stats/column_maker.xml" />
- <tool file="filters/catWrapper.xml" />
- <tool file="filters/condense_characters.xml" />
- <tool file="filters/convert_characters.xml" />
- <tool file="filters/CreateInterval.xml" />
- <tool file="filters/cutWrapper.xml" />
- <tool file="filters/changeCase.xml" />
- <tool file="filters/pasteWrapper.xml" />
- <tool file="filters/remove_beginning.xml" />
- <tool file="filters/headWrapper.xml" />
- <tool file="filters/tailWrapper.xml" />
- </section>
- <section name="Convert Formats" id="convert">
- <tool file="filters/bed2gff.xml" />
- <tool file="fasta_tools/fasta_to_tabular.xml" />
- <tool file="filters/gff2bed.xml" />
- <tool file="maf/maf_to_bed.xml" />
- <tool file="maf/maf_to_fasta.xml" />
- <tool file="fasta_tools/tabular_to_fasta.xml" />
- </section>
- <section name="FASTA manipulation" id="fasta_manipulation">
- <tool file="fasta_tools/fasta_compute_length.xml" />
- <tool file="fasta_tools/fasta_filter_by_length.xml" />
- <tool file="fasta_tools/fasta_concatenate_by_species.xml" />
- <tool file="fasta_tools/fasta_to_tabular.xml" />
- <tool file="fasta_tools/tabular_to_fasta.xml" />
- </section>
- <section name="Filter and Sort" id="filter">
- <tool file="stats/filtering.xml" />
- <tool file="filters/sorter.xml" />
- <tool file="filters/grep.xml" />
- </section>
- <section name="Join, Subtract and Group" id="group">
- <tool file="filters/joiner.xml" />
- <tool file="filters/compare.xml"/>
- <tool file="new_operations/subtract_query.xml"/>
- <tool file="stats/grouping.xml" />
- </section>
- <section name="Extract Features" id="features">
- <tool file="filters/ucsc_gene_bed_to_exon_bed.xml" />
- <tool file="extract/extract_GFF_Features.xml" />
- </section>
- <section name="Fetch Sequences" id="fetchSeq">
- <tool file="extract/extract_genomic_dna.xml" />
- </section>
- <section name="Fetch Alignments" id="fetchAlign">
- <tool file="maf/interval2maf_pairwise.xml" />
- <tool file="maf/interval2maf.xml" />
- <tool file="maf/interval_maf_to_merged_fasta.xml" />
- <tool file="maf/genebed_maf_to_fasta.xml"/>
- <tool file="maf/maf_stats.xml"/>
- <tool file="maf/maf_thread_for_species.xml"/>
- <tool file="maf/maf_limit_to_species.xml"/>
- <tool file="maf/maf_limit_size.xml"/>
- <tool file="maf/maf_by_block_number.xml"/>
- <tool file="maf/maf_filter.xml"/>
- <!--
- <tool file="maf/maf_reverse_complement.xml"/>
- -->
- </section>
- <section name="Get Genomic Scores" id="scores">
- <tool file="stats/wiggle_to_simple.xml" />
- <tool file="stats/aggregate_binned_scores_in_intervals.xml" />
- <tool file="extract/phastOdds/phastOdds_tool.xml" />
- </section>
- <section name="Operate on Genomic Intervals" id="bxops">
- <tool file="new_operations/intersect.xml" />
- <tool file="new_operations/subtract.xml" />
- <tool file="new_operations/merge.xml" />
- <tool file="new_operations/concat.xml" />
- <tool file="new_operations/basecoverage.xml" />
- <tool file="new_operations/coverage.xml" />
- <tool file="new_operations/complement.xml" />
- <tool file="new_operations/cluster.xml" id="cluster" />
- <tool file="new_operations/join.xml" />
- <tool file="new_operations/get_flanks.xml" />
- <tool file="new_operations/flanking_features.xml" />
- <tool file="annotation_profiler/annotation_profiler.xml" />
- </section>
- <section name="Statistics" id="stats">
- <tool file="stats/gsummary.xml" />
- <tool file="filters/uniq.xml" />
- <tool file="stats/cor.xml" />
- </section>
- <section name="Graph/Display Data" id="plots">
- <tool file="plotting/histogram2.xml" />
- <tool file="plotting/scatterplot.xml" />
- <tool file="plotting/xy_plot.xml" />
- <tool file="visualization/GMAJ.xml" />
- <tool file="visualization/build_ucsc_custom_track.xml" />
- </section>
- <section name="Regional Variation" id="regVar">
- <tool file="regVariation/windowSplitter.xml" />
- <tool file="regVariation/featureCounter.xml" />
- <tool file="regVariation/quality_filter.xml" />
- <tool file="regVariation/maf_cpg_filter.xml" />
- <tool file="regVariation/getIndels_2way.xml" />
- <tool file="regVariation/getIndels_3way.xml" />
- <tool file="regVariation/getIndelRates_3way.xml" />
- <tool file="regVariation/substitutions.xml" />
- <tool file="regVariation/substitution_rates.xml" />
- <tool file="regVariation/microsats_alignment_level.xml" />
- <tool file="regVariation/microsats_mutability.xml" />
- </section>
- <section name="Multiple regression" id="multReg">
- <tool file="regVariation/linear_regression.xml" />
- <tool file="regVariation/best_regression_subsets.xml" />
- <tool file="regVariation/rcve.xml" />
- </section>
- <section name="Evolution: HyPhy" id="hyphy">
- <tool file="hyphy/hyphy_branch_lengths_wrapper.xml" />
- <tool file="hyphy/hyphy_nj_tree_wrapper.xml" />
- <tool file="hyphy/hyphy_dnds_wrapper.xml" />
- </section>
- <section name="Metagenomic analyses" id="tax_manipulation">
- <tool file="taxonomy/gi2taxonomy.xml" />
- <tool file="taxonomy/t2t_report.xml" />
- <tool file="taxonomy/t2ps_wrapper.xml" />
- <tool file="taxonomy/find_diag_hits.xml" />
- <tool file="taxonomy/lca.xml" />
- <tool file="taxonomy/poisson2test.xml" />
- </section>
- <section name="Short Read Analysis" id="short_read_analysis">
- <tool file="metag_tools/short_reads_figure_score.xml" />
- <tool file="metag_tools/short_reads_trim_seq.xml" />
- <tool file="metag_tools/megablast_wrapper.xml" />
- <tool file="metag_tools/megablast_xml_parser.xml" />
- </section>
- <section name="EMBOSS" id="EMBOSSLite">
- <tool file="emboss_5/emboss_antigenic.xml" />
- <tool file="emboss_5/emboss_backtranseq.xml" />
- <tool file="emboss_5/emboss_banana.xml" />
- <tool file="emboss_5/emboss_biosed.xml" />
- <tool file="emboss_5/emboss_btwisted.xml" />
- <tool file="emboss_5/emboss_cai_custom.xml" />
- <tool file="emboss_5/emboss_cai.xml" />
- <tool file="emboss_5/emboss_chaos.xml" />
- <tool file="emboss_5/emboss_charge.xml" />
- <tool file="emboss_5/emboss_checktrans.xml" />
- <tool file="emboss_5/emboss_chips.xml" />
- <tool file="emboss_5/emboss_cirdna.xml" />
- <tool file="emboss_5/emboss_codcmp.xml" />
- <tool file="emboss_5/emboss_coderet.xml" />
- <tool file="emboss_5/emboss_compseq.xml" />
- <tool file="emboss_5/emboss_cpgplot.xml" />
- <tool file="emboss_5/emboss_cpgreport.xml" />
- <tool file="emboss_5/emboss_cusp.xml" />
- <tool file="emboss_5/emboss_cutseq.xml" />
- <tool file="emboss_5/emboss_dan.xml" />
- <tool file="emboss_5/emboss_degapseq.xml" />
- <tool file="emboss_5/emboss_descseq.xml" />
- <tool file="emboss_5/emboss_diffseq.xml" />
- <tool file="emboss_5/emboss_digest.xml" />
- <tool file="emboss_5/emboss_dotmatcher.xml" />
- <tool file="emboss_5/emboss_dotpath.xml" />
- <tool file="emboss_5/emboss_dottup.xml" />
- <tool file="emboss_5/emboss_dreg.xml" />
- <tool file="emboss_5/emboss_einverted.xml" />
- <tool file="emboss_5/emboss_epestfind.xml" />
- <tool file="emboss_5/emboss_equicktandem.xml" />
- <tool file="emboss_5/emboss_est2genome.xml" />
- <tool file="emboss_5/emboss_etandem.xml" />
- <tool file="emboss_5/emboss_extractfeat.xml" />
- <tool file="emboss_5/emboss_extractseq.xml" />
- <tool file="emboss_5/emboss_freak.xml" />
- <tool file="emboss_5/emboss_fuzznuc.xml" />
- <tool file="emboss_5/emboss_fuzzpro.xml" />
- <tool file="emboss_5/emboss_fuzztran.xml" />
- <tool file="emboss_5/emboss_garnier.xml" />
- <tool file="emboss_5/emboss_geecee.xml" />
- <tool file="emboss_5/emboss_getorf.xml" />
- <tool file="emboss_5/emboss_helixturnhelix.xml" />
- <tool file="emboss_5/emboss_hmoment.xml" />
- <tool file="emboss_5/emboss_iep.xml" />
- <tool file="emboss_5/emboss_infoseq.xml" />
- <tool file="emboss_5/emboss_isochore.xml" />
- <tool file="emboss_5/emboss_lindna.xml" />
- <tool file="emboss_5/emboss_marscan.xml" />
- <tool file="emboss_5/emboss_maskfeat.xml" />
- <tool file="emboss_5/emboss_maskseq.xml" />
- <tool file="emboss_5/emboss_matcher.xml" />
- <tool file="emboss_5/emboss_megamerger.xml" />
- <tool file="emboss_5/emboss_merger.xml" />
- <tool file="emboss_5/emboss_msbar.xml" />
- <tool file="emboss_5/emboss_needle.xml" />
- <tool file="emboss_5/emboss_newcpgreport.xml" />
- <tool file="emboss_5/emboss_newcpgseek.xml" />
- <tool file="emboss_5/emboss_newseq.xml" />
- <tool file="emboss_5/emboss_noreturn.xml" />
- <tool file="emboss_5/emboss_notseq.xml" />
- <tool file="emboss_5/emboss_nthseq.xml" />
- <tool file="emboss_5/emboss_octanol.xml" />
- <tool file="emboss_5/emboss_oddcomp.xml" />
- <tool file="emboss_5/emboss_palindrome.xml" />
- <tool file="emboss_5/emboss_pasteseq.xml" />
- <tool file="emboss_5/emboss_patmatdb.xml" />
- <tool file="emboss_5/emboss_pepcoil.xml" />
- <tool file="emboss_5/emboss_pepinfo.xml" />
- <tool file="emboss_5/emboss_pepnet.xml" />
- <tool file="emboss_5/emboss_pepstats.xml" />
- <tool file="emboss_5/emboss_pepwheel.xml" />
- <tool file="emboss_5/emboss_pepwindow.xml" />
- <tool file="emboss_5/emboss_pepwindowall.xml" />
- <tool file="emboss_5/emboss_plotcon.xml" />
- <tool file="emboss_5/emboss_plotorf.xml" />
- <tool file="emboss_5/emboss_polydot.xml" />
- <tool file="emboss_5/emboss_preg.xml" />
- <tool file="emboss_5/emboss_prettyplot.xml" />
- <tool file="emboss_5/emboss_prettyseq.xml" />
- <tool file="emboss_5/emboss_primersearch.xml" />
- <tool file="emboss_5/emboss_revseq.xml" />
- <tool file="emboss_5/emboss_seqmatchall.xml" />
- <tool file="emboss_5/emboss_seqret.xml" />
- <tool file="emboss_5/emboss_showfeat.xml" />
- <tool file="emboss_5/emboss_shuffleseq.xml" />
- <tool file="emboss_5/emboss_sigcleave.xml" />
- <tool file="emboss_5/emboss_sirna.xml" />
- <tool file="emboss_5/emboss_sixpack.xml" />
- <tool file="emboss_5/emboss_skipseq.xml" />
- <tool file="emboss_5/emboss_splitter.xml" />
- <tool file="emboss_5/emboss_supermatcher.xml" />
- <tool file="emboss_5/emboss_syco.xml" />
- <tool file="emboss_5/emboss_tcode.xml" />
- <tool file="emboss_5/emboss_textsearch.xml" />
- <tool file="emboss_5/emboss_tmap.xml" />
- <tool file="emboss_5/emboss_tranalign.xml" />
- <tool file="emboss_5/emboss_transeq.xml" />
- <tool file="emboss_5/emboss_trimest.xml" />
- <tool file="emboss_5/emboss_trimseq.xml" />
- <tool file="emboss_5/emboss_twofeat.xml" />
- <tool file="emboss_5/emboss_union.xml" />
- <tool file="emboss_5/emboss_vectorstrip.xml" />
- <tool file="emboss_5/emboss_water.xml" />
- <tool file="emboss_5/emboss_wobble.xml" />
- <tool file="emboss_5/emboss_wordcount.xml" />
- <tool file="emboss_5/emboss_wordmatch.xml" />
- </section>
-</toolbox>
1
0
11 Sep '09
details: http://www.bx.psu.edu/hg/galaxy/rev/f0adb6152df9
changeset: 2678:f0adb6152df9
user: Greg Von Kuster <greg(a)bx.psu.edu>
date: Thu Sep 10 21:24:06 2009 -0400
description:
Streamline history sharing, add a "Manage shared histories" section to the History options menu. Also use a better approach to setting peek on datasets.
5 file(s) affected in this change:
lib/galaxy/datatypes/data.py
lib/galaxy/web/controllers/history.py
templates/history/sharing.mako
templates/root/index.mako
test/functional/test_history_functions.py
diffs (482 lines):
diff -r 96ccd29277be -r f0adb6152df9 lib/galaxy/datatypes/data.py
--- a/lib/galaxy/datatypes/data.py Thu Sep 10 17:48:52 2009 -0400
+++ b/lib/galaxy/datatypes/data.py Thu Sep 10 21:24:06 2009 -0400
@@ -416,8 +416,9 @@
count = 0
file_type = None
data_checked = False
- for line in file( file_name ):
- line = line[:WIDTH]
+ temp = open( file_name, "U" )
+ while count <= LINE_COUNT:
+ line = temp.readline( WIDTH )
if line and not is_multi_byte and not data_checked:
# See if we have a compressed or binary file
if line[0:2] == util.gzip_magic:
@@ -432,9 +433,8 @@
if file_type in [ 'gzipped', 'binary' ]:
break
lines.append( line )
- if count == LINE_COUNT:
- break
count += 1
+ temp.close()
if file_type in [ 'gzipped', 'binary' ]:
text = "%s file" % file_type
else:
diff -r 96ccd29277be -r f0adb6152df9 lib/galaxy/web/controllers/history.py
--- a/lib/galaxy/web/controllers/history.py Thu Sep 10 17:48:52 2009 -0400
+++ b/lib/galaxy/web/controllers/history.py Thu Sep 10 21:24:06 2009 -0400
@@ -4,7 +4,7 @@
from galaxy.model.mapping import desc
from galaxy.model.orm import *
from galaxy.util.json import *
-import webhelpers, logging
+import webhelpers, logging, operator
from datetime import datetime
from cgi import escape
@@ -31,10 +31,12 @@
return "deleted"
elif history.users_shared_with:
return "shared"
+ elif history.importable:
+ return "importable"
return ""
def get_link( self, trans, grid, item ):
- if item.users_shared_with:
- return dict( operation="sharing", id=item.id )
+ if item.users_shared_with or item.importable:
+ return dict( operation="sharing" )
return None
# Grid definition
title = "Stored histories"
@@ -55,9 +57,12 @@
operations = [
grids.GridOperation( "Switch", allow_multiple=False, condition=( lambda item: not item.deleted ) ),
grids.GridOperation( "Share", condition=( lambda item: not item.deleted ) ),
+ grids.GridOperation( "Unshare", condition=( lambda item: not item.deleted ) ),
grids.GridOperation( "Rename", condition=( lambda item: not item.deleted ) ),
grids.GridOperation( "Delete", condition=( lambda item: not item.deleted ) ),
- grids.GridOperation( "Undelete", condition=( lambda item: item.deleted ) )
+ grids.GridOperation( "Undelete", condition=( lambda item: item.deleted ) ),
+ grids.GridOperation( "Enable import via link", condition=( lambda item: item.deleted ) ),
+ grids.GridOperation( "Disable import via link", condition=( lambda item: item.deleted ) )
]
standard_filters = [
grids.GridColumnFilter( "Active", args=dict( deleted=False ) ),
@@ -99,7 +104,9 @@
]
operations = [
grids.GridOperation( "Clone" ),
- grids.GridOperation( "Unshare" )
+ grids.GridOperation( "Unshare" ),
+ grids.GridOperation( "Enable import via link", condition=( lambda item: item.deleted ) ),
+ grids.GridOperation( "Disable import via link", condition=( lambda item: item.deleted ) )
]
standard_filters = []
def build_initial_query( self, session ):
@@ -126,19 +133,19 @@
current_history = trans.get_history()
status = message = None
if 'operation' in kwargs:
- history_ids = util.listify( kwargs.get( 'id', [] ) )
- histories = []
operation = kwargs['operation'].lower()
if operation == "share":
return self.share( trans, **kwargs )
- elif operation == "rename":
+ if operation == "rename":
return self.rename( trans, **kwargs )
- elif operation == 'sharing':
- return self.sharing( trans, id=kwargs['id'] )
+ history_ids = util.listify( kwargs.get( 'id', [] ) )
+ if operation == "sharing":
+ return self.sharing( trans, id=history_ids )
# Display no message by default
status, message = None, None
refresh_history = False
# Load the histories and ensure they all belong to the current user
+ histories = []
for history_id in history_ids:
history = get_history( trans, history_id )
if history:
@@ -161,6 +168,21 @@
trans.template_context['refresh_frames'] = ['history']
elif operation == "undelete":
status, message = self._list_undelete( trans, histories )
+ elif operation == "unshare":
+ for history in histories:
+ husas = trans.app.model.HistoryUserShareAssociation.filter_by( history=history ).all()
+ for husa in husas:
+ husa.delete()
+ elif operation == "enable import via link":
+ for history in histories:
+ if not history.importable:
+ history.importable = True
+ elif operation == "disable import via link":
+ if history_ids:
+ histories = [ get_history( trans, history_id ) for history_id in history_ids ]
+ for history in histories:
+ if history.importable:
+ history.importable = False
trans.sa_session.flush()
# Render the list view
return self.stored_list_grid( trans, status=status, message=message, **kwargs )
@@ -237,24 +259,20 @@
msg = util.restore_text( kwargs.get( 'msg', '' ) )
status = message = None
if 'operation' in kwargs:
- id = kwargs.get( 'id', None )
+ ids = util.listify( kwargs.get( 'id', [] ) )
operation = kwargs['operation'].lower()
if operation == "clone":
- if not id:
+ if not ids:
message = "Select a history to clone"
return self.shared_list_grid( trans, status='error', message=message, **kwargs )
# When cloning shared histories, only copy active datasets
new_kwargs = { 'clone_choice' : 'active' }
return self.clone( trans, id, **new_kwargs )
elif operation == 'unshare':
- if not id:
+ if not ids:
message = "Select a history to unshare"
return self.shared_list_grid( trans, status='error', message=message, **kwargs )
- ids = util.listify( id )
- histories = []
- for history_id in ids:
- history = get_history( trans, history_id, check_ownership=False )
- histories.append( history )
+ histories = [ get_history( trans, history_id ) for history_id in ids ]
for history in histories:
# Current user is the user with which the histories were shared
association = trans.app.model.HistoryUserShareAssociation.filter_by( user=trans.user, history=history ).one()
@@ -262,6 +280,20 @@
association.flush()
message = "Unshared %d shared histories" % len( ids )
status = 'done'
+ elif operation == "enable import via link":
+ if ids:
+ histories = [ get_history( trans, id ) for id in ids ]
+ for history in histories:
+ if not history.importable:
+ history.importable = True
+ history.flush()
+ elif operation == "disable import via link":
+ if ids:
+ histories = [ get_history( trans, id ) for id in ids ]
+ for history in histories:
+ if history.importable:
+ history.importable = False
+ history.flush()
# Render the list view
return self.shared_list_grid( trans, status=status, message=message, **kwargs )
@web.expose
@@ -622,7 +654,9 @@
params = util.Params( kwd )
msg = util.restore_text ( params.get( 'msg', '' ) )
if id:
- histories = [ get_history( trans, id ) ]
+ ids = util.listify( id )
+ if ids:
+ histories = [ get_history( trans, history_id ) for history_id in ids ]
for history in histories:
if params.get( 'enable_import_via_link', False ):
history.importable = True
@@ -635,14 +669,34 @@
if not user:
msg = 'History (%s) does not seem to be shared with user (%s)' % ( history.name, user.email )
return trans.fill_template( 'history/sharing.mako', histories=histories, msg=msg, messagetype='error' )
- association = trans.app.model.HistoryUserShareAssociation.filter_by( user=user, history=history ).one()
- association.delete()
- association.flush()
- if not id:
- shared_msg = "History (%s) now shared with: %d users. " % ( history.name, len( history.users_shared_with ) )
- msg = '%s%s' % ( shared_msg, msg )
+ husas = trans.app.model.HistoryUserShareAssociation.filter_by( user=user, history=history ).all()
+ if husas:
+ for husa in husas:
+ husa.delete()
+ husa.flush()
+ histories = []
+ # Get all histories that have been shared with others
+ husas = trans.sa_session.query( trans.app.model.HistoryUserShareAssociation ) \
+ .join( "history" ) \
+ .filter( and_( trans.app.model.History.user == trans.user,
+ trans.app.model.History.deleted == False ) ) \
+ .order_by( trans.app.model.History.table.c.name ) \
+ .all()
+ for husa in husas:
+ history = husa.history
+ if history not in histories:
+ histories.append( history )
+ # Get all histories that are importable
+ importables = trans.sa_session.query( trans.app.model.History ) \
+ .filter_by( user=trans.user, importable=True, deleted=False ) \
+ .order_by( trans.app.model.History.table.c.name ) \
+ .all()
+ for importable in importables:
+ if importable not in histories:
+ histories.append( importable )
+ # Sort the list of histories by history.name
+ histories.sort( key=operator.attrgetter( 'name') )
return trans.fill_template( 'history/sharing.mako', histories=histories, msg=msg, messagetype='done' )
-
@web.expose
@web.require_login( "rename histories" )
def rename( self, trans, id=None, name=None, **kwd ):
diff -r 96ccd29277be -r f0adb6152df9 templates/history/sharing.mako
--- a/templates/history/sharing.mako Thu Sep 10 17:48:52 2009 -0400
+++ b/templates/history/sharing.mako Thu Sep 10 21:24:06 2009 -0400
@@ -1,75 +1,63 @@
<%inherit file="/base.mako"/>
<%namespace file="/message.mako" import="render_msg" />
-<h2>Public access via link</h2>
+##<h2>Import via link</h2>
%if msg:
${render_msg( msg, messagetype )}
%endif
-%for history in histories:
- <p>
- %if history.importable:
- Send the following URL to users as an easy way for them to import the history, making a copy of their own:
- <% url = h.url_for( controller='history', action='imp', id=trans.security.encode_id(history.id), qualified=True ) %>
- <blockquote>
- <a href="${url}">${url}</a>
- </blockquote>
- <br/>
- <form action="${h.url_for( controller='history', action='sharing', id=trans.security.encode_id( history.id ) )}" method="POST">
- <input class="action-button" type="submit" name="disable_import_via_link" value="Disable import via link">
- </form>
- %else:
- This history is currently restricted (only you and the users listed below
- can access it). Enabling the following option will generate a URL that you
- can give to a user to allow them to import this history.
- <br/>
- <form action="${h.url_for( action='sharing', id=trans.security.encode_id(history.id) )}" method="POST">
- <input class="action-button" type="submit" name="enable_import_via_link" value="Enable import via link">
- </form>
- %endif
- </p>
- <h2>Sharing with specific users</h2>
- %if history.users_shared_with:
- <ul class="manage-table-actions">
- <li>
- <a class="action-button" href="${h.url_for( controller='history', action='share', id=trans.security.encode_id( history.id ) )}">
- <span>Share with another user</span>
- </a>
- </li>
- </ul>
- <p>
- The following users will see this history in their list of histories
- shared with them by others, and they will be able to create their own copy of it:
- </p>
- <table class="colored" border="0" cellspacing="0" cellpadding="0" width="100%">
- <tr class="header">
- <th>History '${history.name}' currently shared with</th>
- <th></th>
- </tr>
- %for i, association in enumerate( history.users_shared_with ):
- <% user = association.user %>
- <tr>
- <td>
- ${user.email}
- <a id="user-${i}-popup" class="popup-arrow" style="display: none;">▼</a>
- </td>
- <td>
- %if len( histories ) == 1:
- ## Only allow unsharing if we're dealing with 1 history, otherwise
- ## page refreshes screw things up
- <div popupmenu="user-${i}-popup">
- <a class="action-button" href="${h.url_for( controller='history', action='sharing', id=trans.security.encode_id( history.id ), unshare_user=trans.security.encode_id( user.id ) )}">Unshare</a>
+<h2>Histories that you've shared with others or enabled to be imported</h2>
+
+%if not histories:
+ You have no histories that you've shared with others or enabled to be imported
+%else:
+ %for history in histories:
+ <div class="toolForm">
+ <div class="toolFormTitle">History '${history.name}' shared with</div>
+ <div class="toolFormBody">
+ <div class="form-row">
+ <div style="float: right;">
+ <a class="action-button" href="${h.url_for( controller='history', action='share', id=trans.security.encode_id( history.id ) )}">
+ <span>Share with another user</span>
+ </a>
+ </div>
+ </div>
+ %if history.users_shared_with:
+ %for i, association in enumerate( history.users_shared_with ):
+ <% user = association.user %>
+ <div class="form-row">
+ <a class="action-button" href="${h.url_for( controller='history', action='sharing', id=trans.security.encode_id( history.id ), unshare_user=trans.security.encode_id( user.id ) )}">Unshare</a>
+ ${user.email}
+ </div>
+ %endfor
+ %endif
+ %if history.importable:
+ <div class="form-row">
+ <% url = h.url_for( controller='history', action='imp', id=trans.security.encode_id(history.id), qualified=True ) %>
+ <a href="${url}">${url}</a>
+ <div class="toolParamHelp" style="clear: both;">
+ Send the above URL to users as an easy way for them to import the history, making a copy of their own
+ </div>
+ </div>
+ <div class="form-row">
+ <form action="${h.url_for( controller='history', action='sharing', id=trans.security.encode_id( history.id ) )}" method="POST">
+ <div class="form-row">
+ <input class="action-button" type="submit" name="disable_import_via_link" value="Disable import via link">
</div>
- %endif
- </td>
- </tr>
- %endfor
- </table>
- %else:
- <p>You have not shared this history with any users.</p>
- <a class="action-button" href="${h.url_for( controller='history', action='share', id=trans.security.encode_id(history.id) )}">
- <span>Share with another user</span>
- </a>
- %endif
-%endfor
+ </form>
+ </div>
+ %else:
+ <form action="${h.url_for( action='sharing', id=trans.security.encode_id(history.id) )}" method="POST">
+ <div class="form-row">
+ <input class="action-button" type="submit" name="enable_import_via_link" value="Enable import via link">
+ <div class="toolParamHelp" style="clear: both;">
+ Click to generate a URL that you can give to a user to allow them to import this history, making a copy of their own
+ </div>
+ </div>
+ </form>
+ %endif
+ </div>
+ </div>
+ %endfor
+%endif
diff -r 96ccd29277be -r f0adb6152df9 templates/root/index.mako
--- a/templates/root/index.mako Thu Sep 10 17:48:52 2009 -0400
+++ b/templates/root/index.mako Thu Sep 10 21:24:06 2009 -0400
@@ -9,9 +9,6 @@
"List your histories": null,
"Stored by you": function() {
galaxy_main.location = "${h.url_for( controller='history', action='list')}";
- },
- "Shared with you": function() {
- galaxy_main.location = "${h.url_for( controller='history', action='list_shared')}";
},
"Current History": null,
"Create new": function() {
@@ -32,10 +29,19 @@
"Show deleted datasets": function() {
galaxy_history.location = "${h.url_for( controller='root', action='history', show_deleted=True)}";
},
- "Delete": function() {
- if ( confirm( "Really delete the current history?" ) ) {
+ "Delete": function()
+ {
+ if ( confirm( "Really delete the current history?" ) )
+ {
galaxy_main.location = "${h.url_for( controller='history', action='delete_current' )}";
}
+ },
+ "Manage shared histories": null,
+ "Shared by you": function() {
+ galaxy_main.location = "${h.url_for( controller='history', action='list', operation='sharing' )}";
+ },
+ "Shared with you": function() {
+ galaxy_main.location = "${h.url_for( controller='history', action='list_shared')}";
}
});
});
diff -r 96ccd29277be -r f0adb6152df9 test/functional/test_history_functions.py
--- a/test/functional/test_history_functions.py Thu Sep 10 17:48:52 2009 -0400
+++ b/test/functional/test_history_functions.py Thu Sep 10 21:24:06 2009 -0400
@@ -141,14 +141,13 @@
check_str_after_submit='You cannot send histories to yourself.' )
# Share history3 with 1 valid user
self.share_current_history( regular_user1.email,
- check_str=history3.name,
- check_str_after_submit='History (%s) now shared with: 1 users' % history3.name )
+ check_str=history3.name )
# Check out list of histories to make sure history3 was shared
- self.view_stored_active_histories( check_str='operation=sharing&id=%s">shared' % self.security.encode_id( history3.id ) )
+ self.view_stored_active_histories( check_str='operation=sharing">shared' )
# Enable importing history3 via a URL
self.enable_import_via_link( self.security.encode_id( history3.id ),
check_str='Unshare',
- check_str_after_submit='Send the following URL to users' )
+ check_str_after_submit='Send the above URL to users' )
# Make sure history3 is now import-able
history3.refresh()
if not history3.importable:
@@ -159,7 +158,7 @@
check_str_after_submit='You cannot import your own history.' )
# Disable the import link for history3
self.disable_import_via_link( self.security.encode_id( history3.id ),
- check_str='Send the following URL to users',
+ check_str='Send the above URL to users',
check_str_after_submit='Enable import via link' )
# Try importing history3 after disabling the URL
self.import_history_via_url( self.security.encode_id( history3.id ),
@@ -274,12 +273,10 @@
self.upload_file( '2.bed', dbkey='hg18' )
ids = '%s,%s' % ( self.security.encode_id( history3.id ), self.security.encode_id( history4.id ) )
emails = '%s,%s' % ( regular_user2.email, regular_user3.email )
- check_str_after_submit = 'History (%s) now shared with: 3 users.' % history3.name
self.share_histories_with_users( ids,
emails,
check_str1='Share 2 histories',
- check_str2=history4.name,
- check_str_after_submit=check_str_after_submit )
+ check_str2=history4.name )
self.logout()
self.login( email=regular_user2.email )
# Shared history3 should be in regular_user2's list of shared histories
@@ -342,12 +339,10 @@
"""Testing sharing a restricted history by making the datasets public"""
# Logged in as admin_user
action_check_str = 'The following datasets can be shared with %s by updating their permissions' % regular_user1.email
- action_check_str_after_submit = 'History (%s) now shared with: 1 users.' % history5.name
# Current history is history5
self.share_current_history( regular_user1.email,
action='public',
- action_check_str=action_check_str,
- action_check_str_after_submit=action_check_str_after_submit )
+ action_check_str=action_check_str )
self.logout()
self.login( email=regular_user1.email )
# Shared history5 should be in regular_user1's list of shared histories
@@ -375,12 +370,10 @@
self.upload_file( '2.bed', dbkey='hg18' )
check_str_after_submit = 'The following datasets can be shared with %s with no changes' % regular_user2.email
check_str_after_submit2 = 'The following datasets can be shared with %s by updating their permissions' % regular_user2.email
- action_check_str_after_submit = 'History (%s) now shared with: 2 users.' % history5.name
self.share_current_history( regular_user2.email,
check_str_after_submit=check_str_after_submit,
check_str_after_submit2=check_str_after_submit2,
- action='private',
- action_check_str_after_submit=action_check_str_after_submit )
+ action='private' )
# We should now have a new sharing role
global sharing_role
role_name = 'Sharing role for: %s, %s' % ( admin_user.email, regular_user2.email )
@@ -470,12 +463,10 @@
check_str_after_submit = 'The following datasets can be shared with %s with no changes' % email
check_str_after_submit2 = 'The following datasets can be shared with %s by updating their permissions' % email
# history5 will be shared with regular_user1, regular_user2 and regular_user3
- action_check_str_after_submit = 'History (%s) now shared with: 3 users.' % history5.name
self.share_current_history( email,
check_str_after_submit=check_str_after_submit,
check_str_after_submit2=check_str_after_submit2,
- action='share_anyway',
- action_check_str_after_submit=action_check_str_after_submit )
+ action='share_anyway' )
# Check security on clone of history5 for regular_user2
self.logout()
self.login( email=regular_user2.email )
1
0
details: http://www.bx.psu.edu/hg/galaxy/rev/96ccd29277be
changeset: 2677:96ccd29277be
user: jeremy goecks <jeremy.goecks(a)emory.edu>
date: Thu Sep 10 17:48:52 2009 -0400
description:
Merge
0 file(s) affected in this change:
diffs (69 lines):
diff -r f2e4673d784b -r 96ccd29277be cron/updateucsc.sh.sample
--- a/cron/updateucsc.sh.sample Thu Sep 10 17:48:37 2009 -0400
+++ b/cron/updateucsc.sh.sample Thu Sep 10 17:48:52 2009 -0400
@@ -6,7 +6,8 @@
# Edit this line to refer to galaxy's path:
GALAXY=/galaxy/path
-export PYTHONPATH=${GALAXY}/lib
+PYTHONPATH=${GALAXY}/lib
+export PYTHONPATH
# setup directories
echo "Creating required directories."
@@ -32,7 +33,11 @@
python ${GALAXY}/cron/parse_builds.py > ${GALAXY}/tool-data/shared/ucsc/new/builds.txt
if [ $? -eq 0 ]
then
- cp -uf ${GALAXY}/tool-data/shared/ucsc/new/builds.txt ${GALAXY}/tool-data/shared/ucsc/builds.txt
+ diff ${GALAXY}/tool-data/shared/ucsc/new/builds.txt ${GALAXY}/tool-data/shared/ucsc/builds.txt > /dev/null 2>&1
+ if [ $? -ne 0 ]
+ then
+ cp -f ${GALAXY}/tool-data/shared/ucsc/new/builds.txt ${GALAXY}/tool-data/shared/ucsc/builds.txt
+ fi
else
echo "Failed to update builds.txt" >&2
fi
@@ -42,7 +47,11 @@
python ${GALAXY}/cron/parse_builds_3_sites.py > ${GALAXY}/tool-data/shared/ucsc/new/ucsc_build_sites.txt
if [ $? -eq 0 ]
then
- cp -uf ${GALAXY}/tool-data/shared/ucsc/new/ucsc_build_sites.txt ${GALAXY}/tool-data/shared/ucsc/ucsc_build_sites.txt
+ diff ${GALAXY}/tool-data/shared/ucsc/new/ucsc_build_sites.txt ${GALAXY}/tool-data/shared/ucsc/ucsc_build_sites.txt > /dev/null 2>&1
+ if [ $? -ne 0 ]
+ then
+ cp -f ${GALAXY}/tool-data/shared/ucsc/new/ucsc_build_sites.txt ${GALAXY}/tool-data/shared/ucsc/ucsc_build_sites.txt
+ fi
else
echo "Failed to update builds.txt" >&2
fi
@@ -52,7 +61,16 @@
python ${GALAXY}/cron/build_chrom_db.py ${GALAXY}/tool-data/shared/ucsc/chrom/new/ ${GALAXY}/tool-data/shared/ucsc/builds.txt
if [ $? -eq 0 ]
then
- cp -uf ${GALAXY}/tool-data/shared/ucsc/chrom/new/*.len ${GALAXY}/tool-data/shared/ucsc/chrom/
+ for src in ${GALAXY}/tool-data/shared/ucsc/chrom/new/*.len
+ do
+ dst=${GALAXY}/tool-data/shared/ucsc/chrom/`basename $src`
+ diff $src $dst > /dev/null 2>&1
+ if [ $? -ne 0 ]
+ then
+ echo "cp -f $src $dst"
+ cp -f $src $dst
+ fi
+ done
else
echo "Failed to update chromInfo tables." >&2
fi
diff -r f2e4673d784b -r 96ccd29277be tools/new_operations/flanking_features.py
--- a/tools/new_operations/flanking_features.py Thu Sep 10 17:48:37 2009 -0400
+++ b/tools/new_operations/flanking_features.py Thu Sep 10 17:48:52 2009 -0400
@@ -129,7 +129,7 @@
map(outfields.append, result_down[-1].other) #The last element of result_down will be the closest element to the given interval
elif result_up:
map(outfields.append, result_up[res_ind].other)
- else:
+ elif result_down:
map(outfields.append, result_down[-1].other) #The last element of result_down will be the closest element to the given interval
yield outfields
1
0
11 Sep '09
details: http://www.bx.psu.edu/hg/galaxy/rev/2a15e0eca0b9
changeset: 2675:2a15e0eca0b9
user: jeremy goecks <jeremy.goecks(a)emory.edu>
date: Thu Sep 10 16:48:11 2009 -0400
description:
Support for (1) ordering tags and (2) page tags.
5 file(s) affected in this change:
lib/galaxy/model/__init__.py
lib/galaxy/model/mapping.py
lib/galaxy/model/migrate/versions/0018_ordered_tags_and_page_tags.py
lib/galaxy/tags/tag_handler.py
lib/galaxy/web/controllers/tag.py
diffs (249 lines):
diff -r dbbc63c0630a -r 2a15e0eca0b9 lib/galaxy/model/__init__.py
--- a/lib/galaxy/model/__init__.py Thu Sep 10 10:42:50 2009 -0400
+++ b/lib/galaxy/model/__init__.py Thu Sep 10 16:48:11 2009 -0400
@@ -1145,7 +1145,8 @@
return "Tag(id=%s, type=%i, parent_id=%s, name=%s)" % ( self.id, self.type, self.parent_id, self.name )
class ItemTagAssociation ( object ):
- def __init__( self, item_id=None, tag_id=None, user_tname=None, value=None ):
+ def __init__( self, id=None, item_id=None, tag_id=None, user_tname=None, value=None ):
+ self.id = id
self.item_id = item_id
self.tag_id = tag_id
self.user_tname = user_tname
@@ -1165,6 +1166,8 @@
class HistoryDatasetAssociationTagAssociation ( ItemTagAssociation ):
pass
+class PageTagAssociation ( ItemTagAssociation ):
+ pass
## ---- Utility methods -------------------------------------------------------
diff -r dbbc63c0630a -r 2a15e0eca0b9 lib/galaxy/model/mapping.py
--- a/lib/galaxy/model/mapping.py Thu Sep 10 10:42:50 2009 -0400
+++ b/lib/galaxy/model/mapping.py Thu Sep 10 16:48:11 2009 -0400
@@ -552,6 +552,7 @@
UniqueConstraint( "name" ) )
HistoryTagAssociation.table = Table( "history_tag_association", metadata,
+ Column( "id", Integer, primary_key=True ),
Column( "history_id", Integer, ForeignKey( "history.id" ), index=True ),
Column( "tag_id", Integer, ForeignKey( "tag.id" ), index=True ),
Column( "user_tname", TrimmedString(255), index=True),
@@ -559,6 +560,7 @@
Column( "user_value", TrimmedString(255), index=True) )
DatasetTagAssociation.table = Table( "dataset_tag_association", metadata,
+ Column( "id", Integer, primary_key=True ),
Column( "dataset_id", Integer, ForeignKey( "dataset.id" ), index=True ),
Column( "tag_id", Integer, ForeignKey( "tag.id" ), index=True ),
Column( "user_tname", TrimmedString(255), index=True),
@@ -566,7 +568,16 @@
Column( "user_value", TrimmedString(255), index=True) )
HistoryDatasetAssociationTagAssociation.table = Table( "history_dataset_association_tag_association", metadata,
+ Column( "id", Integer, primary_key=True ),
Column( "history_dataset_association_id", Integer, ForeignKey( "history_dataset_association.id" ), index=True ),
+ Column( "tag_id", Integer, ForeignKey( "tag.id" ), index=True ),
+ Column( "user_tname", TrimmedString(255), index=True),
+ Column( "value", TrimmedString(255), index=True),
+ Column( "user_value", TrimmedString(255), index=True) )
+
+PageTagAssociation.table = Table( "page_tag_association", metadata,
+ Column( "id", Integer, primary_key=True ),
+ Column( "page_id", Integer, ForeignKey( "page.id" ), index=True ),
Column( "tag_id", Integer, ForeignKey( "tag.id" ), index=True ),
Column( "user_tname", TrimmedString(255), index=True),
Column( "value", TrimmedString(255), index=True),
@@ -672,7 +683,7 @@
visible_children=relation(
HistoryDatasetAssociation,
primaryjoin=( ( HistoryDatasetAssociation.table.c.parent_id == HistoryDatasetAssociation.table.c.id ) & ( HistoryDatasetAssociation.table.c.visible == True ) ) ),
- tags=relation(HistoryDatasetAssociationTagAssociation, backref='history_tag_associations')
+ tags=relation(HistoryDatasetAssociationTagAssociation, order_by=HistoryDatasetAssociationTagAssociation.table.c.id, backref='history_tag_associations')
) )
assign_mapper( context, Dataset, Dataset.table,
@@ -689,7 +700,7 @@
active_library_associations=relation(
LibraryDatasetDatasetAssociation,
primaryjoin=( ( Dataset.table.c.id == LibraryDatasetDatasetAssociation.table.c.dataset_id ) & ( LibraryDatasetDatasetAssociation.table.c.deleted == False ) ) ),
- tags=relation(DatasetTagAssociation, backref='datasets')
+ tags=relation(DatasetTagAssociation, order_by=DatasetTagAssociation.table.c.id, backref='datasets')
) )
assign_mapper( context, HistoryDatasetAssociationDisplayAtAuthorization, HistoryDatasetAssociationDisplayAtAuthorization.table,
@@ -709,7 +720,7 @@
properties=dict( galaxy_sessions=relation( GalaxySessionToHistoryAssociation ),
datasets=relation( HistoryDatasetAssociation, backref="history", order_by=asc(HistoryDatasetAssociation.table.c.hid) ),
active_datasets=relation( HistoryDatasetAssociation, primaryjoin=( ( HistoryDatasetAssociation.table.c.history_id == History.table.c.id ) & ( not_( HistoryDatasetAssociation.table.c.deleted ) ) ), order_by=asc( HistoryDatasetAssociation.table.c.hid ), viewonly=True ),
- tags=relation(HistoryTagAssociation, backref="histories")
+ tags=relation(HistoryTagAssociation, order_by=HistoryTagAssociation.table.c.id, backref="histories")
) )
assign_mapper( context, HistoryUserShareAssociation, HistoryUserShareAssociation.table,
@@ -967,7 +978,8 @@
primaryjoin=( Page.table.c.id == PageRevision.table.c.page_id ) ),
latest_revision=relation( PageRevision, post_update=True,
primaryjoin=( Page.table.c.latest_revision_id == PageRevision.table.c.id ),
- lazy=False )
+ lazy=False ),
+ tags=relation(PageTagAssociation, order_by=PageTagAssociation.table.c.id, backref="pages")
) )
assign_mapper( context, Tag, Tag.table,
@@ -988,6 +1000,11 @@
properties=dict( tag=relation(Tag, backref="tagged_history_dataset_associations") ),
primary_key=[HistoryDatasetAssociationTagAssociation.table.c.history_dataset_association_id, HistoryDatasetAssociationTagAssociation.table.c.tag_id]
)
+
+assign_mapper( context, PageTagAssociation, PageTagAssociation.table,
+ properties=dict( tag=relation(Tag, backref="tagged_pages") ),
+ primary_key=[PageTagAssociation.table.c.page_id, PageTagAssociation.table.c.tag_id]
+ )
def db_next_hid( self ):
"""
diff -r dbbc63c0630a -r 2a15e0eca0b9 lib/galaxy/model/migrate/versions/0018_ordered_tags_and_page_tags.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/lib/galaxy/model/migrate/versions/0018_ordered_tags_and_page_tags.py Thu Sep 10 16:48:11 2009 -0400
@@ -0,0 +1,116 @@
+"""
+This migration script provides support for (a) ordering tags by recency and
+(b) tagging pages. This script deletes all existing tags.
+"""
+
+from sqlalchemy import *
+from sqlalchemy.orm import *
+from sqlalchemy.exceptions import *
+from migrate import *
+import migrate.changeset
+
+import datetime
+now = datetime.datetime.utcnow
+
+# Need our custom types, but don't import anything else from model
+from galaxy.model.custom_types import *
+
+import logging
+log = logging.getLogger( __name__ )
+
+metadata = MetaData( migrate_engine )
+
+def display_migration_details():
+ print ""
+ print "This migration script provides support for (a) ordering tags by recency and"
+ print "(b) tagging pages. This script deletes all existing tags."
+
+HistoryTagAssociation_table = Table( "history_tag_association", metadata,
+ Column( "id", Integer, primary_key=True ),
+ Column( "history_id", Integer, ForeignKey( "history.id" ), index=True ),
+ Column( "tag_id", Integer, ForeignKey( "tag.id" ), index=True ),
+ Column( "user_tname", TrimmedString(255), index=True),
+ Column( "value", TrimmedString(255), index=True),
+ Column( "user_value", TrimmedString(255), index=True) )
+
+DatasetTagAssociation_table = Table( "dataset_tag_association", metadata,
+ Column( "id", Integer, primary_key=True ),
+ Column( "dataset_id", Integer, ForeignKey( "dataset.id" ), index=True ),
+ Column( "tag_id", Integer, ForeignKey( "tag.id" ), index=True ),
+ Column( "user_tname", TrimmedString(255), index=True),
+ Column( "value", TrimmedString(255), index=True),
+ Column( "user_value", TrimmedString(255), index=True) )
+
+HistoryDatasetAssociationTagAssociation_table = Table( "history_dataset_association_tag_association", metadata,
+ Column( "id", Integer, primary_key=True ),
+ Column( "history_dataset_association_id", Integer, ForeignKey( "history_dataset_association.id" ), index=True ),
+ Column( "tag_id", Integer, ForeignKey( "tag.id" ), index=True ),
+ Column( "user_tname", TrimmedString(255), index=True),
+ Column( "value", TrimmedString(255), index=True),
+ Column( "user_value", TrimmedString(255), index=True) )
+
+PageTagAssociation_table = Table( "page_tag_association", metadata,
+ Column( "id", Integer, primary_key=True ),
+ Column( "page_id", Integer, ForeignKey( "page.id" ), index=True ),
+ Column( "tag_id", Integer, ForeignKey( "tag.id" ), index=True ),
+ Column( "user_tname", TrimmedString(255), index=True),
+ Column( "value", TrimmedString(255), index=True),
+ Column( "user_value", TrimmedString(255), index=True) )
+
+def upgrade():
+ display_migration_details()
+ metadata.reflect()
+
+ #
+ # Recreate tables.
+ #
+ try:
+ HistoryTagAssociation_table.drop()
+ HistoryTagAssociation_table.create()
+ except Exception, e:
+ print "Recreating history_tag_association table failed: %s" % str( e )
+ log.debug( "Recreating history_tag_association table failed: %s" % str( e ) )
+
+ try:
+ DatasetTagAssociation_table.drop()
+ DatasetTagAssociation_table.create()
+ except Exception, e:
+ print str(e)
+ log.debug( "Recreating dataset_tag_association table failed: %s" % str( e ) )
+
+ try:
+ HistoryDatasetAssociationTagAssociation_table.drop()
+ HistoryDatasetAssociationTagAssociation_table.create()
+ except OperationalError, e:
+ # Handle error that results from and index name that is too long; this occurs
+ # in MySQL.
+ if str(e).find("CREATE INDEX") != -1:
+ # Manually create index.
+ i = Index( "ix_hda_ta_history_dataset_association_id", HistoryDatasetAssociationTagAssociation_table.c.history_dataset_association_id )
+ try:
+ i.create()
+ except Exception, e:
+ print str(e)
+ log.debug( "Adding index 'ix_hda_ta_history_dataset_association_id' to table 'history_dataset_association_tag_association' table failed: %s" % str( e ) )
+ except Exception, e:
+ print str(e)
+ log.debug( "Recreating history_dataset_association_tag_association table failed: %s" % str( e ) )
+
+ # Create page_tag_association table.
+ try:
+ PageTagAssociation_table.create()
+ except Exception, e:
+ print str(e)
+ log.debug( "Creating page_tag_association table failed: %s" % str( e ) )
+
+def downgrade():
+ metadata.reflect()
+
+ # No need to downgrade other tagging tables. They work fine with verision 16 code.
+
+ # Drop page_tag_association table.
+ try:
+ PageTagAssociation_table.drop()
+ except Exception, e:
+ print str(e)
+ log.debug( "Dropping page_tag_association table failed: %s" % str( e ) )
diff -r dbbc63c0630a -r 2a15e0eca0b9 lib/galaxy/tags/tag_handler.py
--- a/lib/galaxy/tags/tag_handler.py Thu Sep 10 10:42:50 2009 -0400
+++ b/lib/galaxy/tags/tag_handler.py Thu Sep 10 16:48:11 2009 -0400
@@ -1,4 +1,4 @@
-from galaxy.model import Tag, History, HistoryTagAssociation, Dataset, DatasetTagAssociation, HistoryDatasetAssociation, HistoryDatasetAssociationTagAssociation
+from galaxy.model import Tag
import re
class TagHandler( object ):
diff -r dbbc63c0630a -r 2a15e0eca0b9 lib/galaxy/web/controllers/tag.py
--- a/lib/galaxy/web/controllers/tag.py Thu Sep 10 10:42:50 2009 -0400
+++ b/lib/galaxy/web/controllers/tag.py Thu Sep 10 16:48:11 2009 -0400
@@ -1,6 +1,9 @@
"""
Tags Controller: handles tagging/untagging of entities and provides autocomplete support.
"""
+
+from galaxy.model import History, HistoryTagAssociation, Dataset, DatasetTagAssociation, \
+ HistoryDatasetAssociation, HistoryDatasetAssociationTagAssociation, Page, PageTagAssociation
from galaxy.web.base.controller import *
from galaxy.tags.tag_handler import *
1
0
details: http://www.bx.psu.edu/hg/galaxy/rev/f2e4673d784b
changeset: 2676:f2e4673d784b
user: jeremy goecks <jeremy.goecks(a)emory.edu>
date: Thu Sep 10 17:48:37 2009 -0400
description:
merge
0 file(s) affected in this change:
diffs (151 lines):
diff -r 2a15e0eca0b9 -r f2e4673d784b lib/galaxy/jobs/__init__.py
--- a/lib/galaxy/jobs/__init__.py Thu Sep 10 16:48:11 2009 -0400
+++ b/lib/galaxy/jobs/__init__.py Thu Sep 10 17:48:37 2009 -0400
@@ -502,13 +502,6 @@
context = self.get_dataset_finish_context( job_context, dataset_assoc.dataset.dataset )
#should this also be checking library associations? - can a library item be added from a history before the job has ended? - lets not allow this to occur
for dataset in dataset_assoc.dataset.dataset.history_associations: #need to update all associated output hdas, i.e. history was shared with job running
- if context.get( 'path', None ):
- # The tool can set an alternate output path for the dataset.
- try:
- shutil.move( context['path'], dataset.file_name )
- except ( IOError, OSError ):
- if not context['stderr']:
- context['stderr'] = 'This dataset could not be processed'
dataset.blurb = 'done'
dataset.peek = 'no peek'
dataset.info = context['stdout'] + context['stderr']
@@ -707,6 +700,13 @@
sizes.append( ( outfile, os.stat( outfile ).st_size ) )
return sizes
def setup_external_metadata( self, exec_dir = None, tmp_dir = None, dataset_files_path = None, config_root = None, datatypes_config = None, **kwds ):
+ # extension could still be 'auto' if this is the upload tool.
+ job = model.Job.get( self.job_id )
+ for output_dataset_assoc in job.output_datasets:
+ if output_dataset_assoc.dataset.ext == 'auto':
+ context = self.get_dataset_finish_context( dict(), output_dataset_assoc.dataset.dataset )
+ output_dataset_assoc.dataset.extension = context.get( 'ext', 'data' )
+ mapping.context.current.flush()
if tmp_dir is None:
#this dir should should relative to the exec_dir
tmp_dir = self.app.config.new_file_path
@@ -716,7 +716,6 @@
config_root = self.app.config.root
if datatypes_config is None:
datatypes_config = self.app.config.datatypes_config
- job = model.Job.get( self.job_id )
return self.external_output_metadata.setup_external_metadata( [ output_dataset_assoc.dataset for output_dataset_assoc in job.output_datasets ], exec_dir = exec_dir, tmp_dir = tmp_dir, dataset_files_path = dataset_files_path, config_root = config_root, datatypes_config = datatypes_config, **kwds )
class DefaultJobDispatcher( object ):
diff -r 2a15e0eca0b9 -r f2e4673d784b lib/galaxy/tools/actions/upload.py
--- a/lib/galaxy/tools/actions/upload.py Thu Sep 10 16:48:11 2009 -0400
+++ b/lib/galaxy/tools/actions/upload.py Thu Sep 10 17:48:37 2009 -0400
@@ -144,7 +144,7 @@
job.add_parameter( name, value )
job.add_parameter( 'paramfile', to_json_string( json_file_path ) )
for i, dataset in enumerate( data_list ):
- job.add_output_dataset( i, dataset )
+ job.add_output_dataset( 'output%i' % i, dataset )
job.state = trans.app.model.Job.states.NEW
trans.app.model.flush()
diff -r 2a15e0eca0b9 -r f2e4673d784b tools/data_source/upload.py
--- a/tools/data_source/upload.py Thu Sep 10 16:48:11 2009 -0400
+++ b/tools/data_source/upload.py Thu Sep 10 17:48:37 2009 -0400
@@ -115,7 +115,14 @@
return ( True, False, test_ext )
return ( True, True, test_ext )
-def add_file( dataset, json_file ):
+def parse_outputs( args ):
+ rval = {}
+ for arg in args:
+ id, path = arg.split( ':', 1 )
+ rval[int( id )] = path
+ return rval
+
+def add_file( dataset, json_file, output_path ):
data_type = None
line_count = None
@@ -229,16 +236,18 @@
ext = dataset.ext
if ext == 'auto':
ext = 'data'
+ # Move the dataset to its "real" path
+ shutil.move( dataset.path, output_path )
+ # Write the job info
info = dict( type = 'dataset',
dataset_id = dataset.dataset_id,
- path = dataset.path,
ext = ext,
stdout = 'uploaded %s file' % data_type,
name = dataset.name,
line_count = line_count )
json_file.write( to_json_string( info ) + "\n" )
-def add_composite_file( dataset, json_file ):
+def add_composite_file( dataset, json_file, output_path ):
if dataset.composite_files:
os.mkdir( dataset.extra_files_path )
for name, value in dataset.composite_files.iteritems():
@@ -253,17 +262,21 @@
else:
sniff.convert_newlines( dataset.composite_file_paths[ value.name ][ 'path' ] )
shutil.move( dataset.composite_file_paths[ value.name ][ 'path' ], os.path.join( dataset.extra_files_path, name ) )
+ # Move the dataset to its "real" path
+ shutil.move( dataset.primary_file, output_path )
+ # Write the job info
info = dict( type = 'dataset',
dataset_id = dataset.dataset_id,
- path = dataset.primary_file,
stdout = 'uploaded %s file' % dataset.file_type )
json_file.write( to_json_string( info ) + "\n" )
def __main__():
- if len( sys.argv ) != 2:
- print >>sys.stderr, 'usage: upload.py <json paramfile>'
+ if len( sys.argv ) < 2:
+ print >>sys.stderr, 'usage: upload.py <json paramfile> <output spec> ...'
sys.exit( 1 )
+
+ output_paths = parse_outputs( sys.argv[2:] )
json_file = open( 'galaxy.json', 'w' )
@@ -271,10 +284,16 @@
dataset = from_json_string( line )
dataset = util.bunch.Bunch( **safe_dict( dataset ) )
+ try:
+ output_path = output_paths[int( dataset.dataset_id )]
+ except:
+ print >>sys.stderr, 'Output path for dataset %s not found on command line' % dataset.dataset_id
+ sys.exit( 1 )
+
if dataset.type == 'composite':
- add_composite_file( dataset, json_file )
+ add_composite_file( dataset, json_file, output_path )
else:
- add_file( dataset, json_file )
+ add_file( dataset, json_file, output_path )
# clean up paramfile
try:
diff -r 2a15e0eca0b9 -r f2e4673d784b tools/data_source/upload.xml
--- a/tools/data_source/upload.xml Thu Sep 10 16:48:11 2009 -0400
+++ b/tools/data_source/upload.xml Thu Sep 10 17:48:37 2009 -0400
@@ -7,6 +7,12 @@
<action module="galaxy.tools.actions.upload" class="UploadToolAction"/>
<command interpreter="python">
upload.py $paramfile
+ #set $outnum = 0
+ #while $varExists('output%i' % $outnum):
+ #set $output = $getVar('output%i' % $outnum)
+ #set $outnum += 1
+ ${output.dataset.dataset.id}:${output}
+ #end while
</command>
<inputs>
<param name="file_type" type="select" label="File Format" help="Which format? See help below">
1
0
11 Sep '09
details: http://www.bx.psu.edu/hg/galaxy/rev/0a41293e679a
changeset: 2674:0a41293e679a
user: guru
date: Thu Sep 10 17:35:21 2009 -0400
description:
Bug fix for 'Fetch closest feature' tool.
1 file(s) affected in this change:
tools/new_operations/flanking_features.py
diffs (12 lines):
diff -r ce8c57840343 -r 0a41293e679a tools/new_operations/flanking_features.py
--- a/tools/new_operations/flanking_features.py Thu Sep 10 17:31:05 2009 -0400
+++ b/tools/new_operations/flanking_features.py Thu Sep 10 17:35:21 2009 -0400
@@ -129,7 +129,7 @@
map(outfields.append, result_down[-1].other) #The last element of result_down will be the closest element to the given interval
elif result_up:
map(outfields.append, result_up[res_ind].other)
- else:
+ elif result_down:
map(outfields.append, result_down[-1].other) #The last element of result_down will be the closest element to the given interval
yield outfields
1
0