galaxy-dev
Threads by month
- ----- 2025 -----
- July
- June
- May
- April
- March
- February
- January
- ----- 2024 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2023 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2022 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2021 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2020 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2019 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2018 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2017 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2016 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2015 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2014 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2013 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2012 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2011 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2010 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2009 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2008 -----
- December
- November
- October
- September
- August
March 2010
- 36 participants
- 171 discussions
Hi,
I have configured and built my own galaxy server. I have set up most of the genome databases but I have a problem with the getting the "Tables to Use" to be displayed on the web interface.
I have looked at the following files.
1. annotation_profiler_options.xml
2. annotation_profiler_valid_builds.txt
I did not change the annotation_profiler_options.xml file (used as is) and annotation_profiler_valid_builds.txt contains just one line,
hg18
which is fine since I am starting with human.
Where do I specify the location of my hg18?
Please let me know if you need more information.
Thanks for your help.
Best regards,
Chee Seng, Chan
Genome Institute of Singapore
60 Biopolis Street, Genome
#02-01 Singapore 138672
DID 64788065
-------------------------------
This email is confidential and may be privileged. If you are not the intended recipient, please delete it and notify us immediately. Please do not copy or use it for any purpose, or disclose its contents to any other person. Thank you.
-------------------------------
2
1
Hi,
I recently discovered an issue that prevents me from easily deleting
multiple histories at once.
This is the method I use:
Click ³Options² on the right side menu -> Go to ³Saved Histories² -> Click
on the check box of the history to be deleted -> Click ³Delete² button at
the bottom (next to ³For selected histories:²).
When I do this, nothing happens, and the history remains.
However, if I click on the the downward triangle and click on ³Delete² from
the pull down menu, the history is correctly deleted.
Deleting using the check-boxes and the Delete button works in changeset
3446:143c920af25c
But fails in changeset 3447:f5d383525d68.
Please let me know if you cannot reproduce the problem.
Cheers,
Oliver
2
1
Hi
I have a problem that I've not been able to track down. I've tried
searching the mailing list archive and googling.
When I enable paste in universe_wsgi.ini and then run run.sh I get this
error. The same occurs for both the stable and development branches.
Traceback (most recent call last):
File "./scripts/paster.py", line 34, in <module>
command.run()
File
"/home/nat/work/software/galaxy-central/eggs/PasteScript-1.7.3-py2.6.egg/paste/script/command.py",
line 84, in run
invoke(command, command_name, options, args[1:])
File
"/home/nat/work/software/galaxy-central/eggs/PasteScript-1.7.3-py2.6.egg/paste/script/command.py",
line 123, in invoke
exit_code = runner.run(args)
File
"/home/nat/work/software/galaxy-central/eggs/PasteScript-1.7.3-py2.6.egg/paste/script/command.py",
line 218, in run
result = self.command()
File
"/home/nat/work/software/galaxy-central/eggs/PasteScript-1.7.3-py2.6.egg/paste/script/serve.py",
line 274, in command
relative_to=base, global_conf=vars)
File
"/home/nat/work/software/galaxy-central/eggs/PasteScript-1.7.3-py2.6.egg/paste/script/serve.py",
line 308, in loadserver
relative_to=relative_to, **kw)
File
"/home/nat/work/software/galaxy-central/eggs/PasteDeploy-1.3.3-py2.6.egg/paste/deploy/loadwsgi.py",
line 210, in loadserver
return loadobj(SERVER, uri, name=name, **kw)
File
"/home/nat/work/software/galaxy-central/eggs/PasteDeploy-1.3.3-py2.6.egg/paste/deploy/loadwsgi.py",
line 224, in loadobj
global_conf=global_conf)
File
"/home/nat/work/software/galaxy-central/eggs/PasteDeploy-1.3.3-py2.6.egg/paste/deploy/loadwsgi.py",
line 248, in loadcontext
global_conf=global_conf)
File
"/home/nat/work/software/galaxy-central/eggs/PasteDeploy-1.3.3-py2.6.egg/paste/deploy/loadwsgi.py",
line 278, in _loadconfig
return loader.get_context(object_type, name, global_conf)
File
"/home/nat/work/software/galaxy-central/eggs/PasteDeploy-1.3.3-py2.6.egg/paste/deploy/loadwsgi.py",
line 409, in get_context
section)
File
"/home/nat/work/software/galaxy-central/eggs/PasteDeploy-1.3.3-py2.6.egg/paste/deploy/loadwsgi.py",
line 431, in _context_from_use
object_type, name=use, global_conf=global_conf)
File
"/home/nat/work/software/galaxy-central/eggs/PasteDeploy-1.3.3-py2.6.egg/paste/deploy/loadwsgi.py",
line 361, in get_context
global_conf=global_conf)
File
"/home/nat/work/software/galaxy-central/eggs/PasteDeploy-1.3.3-py2.6.egg/paste/deploy/loadwsgi.py",
line 248, in loadcontext
global_conf=global_conf)
File
"/home/nat/work/software/galaxy-central/eggs/PasteDeploy-1.3.3-py2.6.egg/paste/deploy/loadwsgi.py",
line 285, in _loadegg
return loader.get_context(object_type, name, global_conf)
File
"/home/nat/work/software/galaxy-central/eggs/PasteDeploy-1.3.3-py2.6.egg/paste/deploy/loadwsgi.py",
line 561, in get_context
object_type, name=name)
File
"/home/nat/work/software/galaxy-central/eggs/PasteDeploy-1.3.3-py2.6.egg/paste/deploy/loadwsgi.py",
line 600, in find_egg_entry_point
for prot in protocol_options] or '(no entry points)'))))
LookupError: Entry point 'gzip' not found in egg 'Paste' (dir:
/home/nat/work/software/galaxy-central/eggs/Paste-1.6-py2.6.egg;
protocols: paste.server_factory, paste.server_runner; entry_points: )
The strange thing is on another machine I don't get the error but I
can't find out what the missing dependency is on the machine the
generates the error despite comparing installed packages etc.
I'm running Ubunutu 9.1 64bit on both machines.
Can anyone tell me what I'm missing that leads to the above error?
Many thanks
Nathaniel
--
Nathaniel Street
Umeå Plant Science Centre
Department of Plant Physiology
University of Umeå
SE-901 87 Umeå
SWEDEN
email: nathaniel.street(a)plantphys.umu.se
tel: +46-90-786 5473
fax: +46-90-786 6676
www.popgenie.org
2
1
On 09/03/10 15:42, Chris Cole wrote:
> On 09/03/10 14:53, Nate Coraor wrote:
>> Chris Cole wrote:
>>> Anyone got any ideas on this?
>>>
>>> It's still not working (following the most recent updates) and we've
>>> got some NGS data to import into Galaxy.
>>
>> Hi Chris,
>>
>> To use this feature, the contents of library_import_dir should
>> themselves be directories (or symlinks to the same). Selecting that
>> directory from the dropdown in the library upload form will then import
>> the contents of those directories.
>
> Ah, right. That's not clear in the docs.
>
> How come it works with locally symlinked files, then?
Just checked it again (sorry I shouldn't have sent the previous email)
with a directory and it still isn't working.
www-galaxy@ge-002: tmp> cd ~/data_import/
www-galaxy@ge-002: data_import> ls
README
www-galaxy@ge-002: data_import> ln -s /homes/pschofield/data/TOH
www-galaxy@ge-002: data_import> ls TOH/
0min_1.txt 30m_1.txt 60min_1.txt Cd_inuc_1.txt Cmono_1.txt
Dd_inuc_1.txt Dmono_1.txt processed
0min_2.txt 30m_2.txt 60min_2.txt Cd_inuc_2.txt Cmono_2.txt
Dd_inuc_2.txt Dmono_2.txt
www-galaxy@ge-002: data_import> ls -l
total 96
-rw-r--r-- 1 www-galaxy barton 142 Mar 9 14:14 README
lrwxrwxrwx 1 www-galaxy barton 26 Mar 9 15:59 TOH ->
/homes/pschofield/data/TOH
I got the TOH option in the 'Upload directory of files', which I
selected, but the upload failed. Again, this is the error I get when
selecting one of the filenames in the dataset:
Traceback (most recent call last): File
"/homes/www-galaxy/galaxy_devel/tools/data_source/upload.py", line 326,
in __main__() File
"/homes/www-galaxy/galaxy_devel/tools/data_source/upload.py", line 318,
in __main__ add_file( dataset,
error
Thanks for help.
Chris
3
7
Everyone,
Bitbucket (our code and wiki host) does not offer any way to search
the wiki, so there are now two new pages to fill the void: SiteMap and
SiteIndex. These are both generated automatically. The idea is that
the pages should be generated by a script that is scheduled to run
each night, but I'm still getting that going--in the meantime, I'll be
running the script manually.
SiteMap is a simple listing of all the pages found under the base wiki
directory. You can get a pretty good idea what the page is about just
by the name of it, which is all that's included in this list. You can
see SiteMap at http://bitbucket.org/galaxy/galaxy-central/wiki/SiteMap.
SiteIndex is more complicated. It is a traditional index, which means
that (almost) every word that appears in the wiki is included with a
list of the pages on which it appears following it. A stoplist was
used to remove useless or meaningless words, but there is still some
junk that has made it past this filter. The index involves multiple
pages, divided up alphabetically. It's available at http://bitbucket.org/galaxy/galaxy-central/wiki/SiteIndex
.
Regards,
Kelly
3
3
hi, how do i reference conditional variables from within the
change_format tag? i couldn't find any examples that did this. an
example tool config xml file follows.
thanks!
ed kirton
us doe jgi
<tool id="fasta_get_seqs" name="Get Sequences">
<description>Create a Fasta/Qual file of selected sequences</description>
<command interpreter="perl">fasta_get_seqs.pl $intype.infile
$input_table $id_column $ignorecase $cosorted $output
#for $i in $filters
"${i.condition} ${i.column} ${i.value}"
#end for
</command>
<inputs>
<conditional name='intype'>
<param name='sel' type='select' label='Input file type'>
<option value='Fasta'>Fasta</option>
<option value='Qual454'>Qual454</option>
</param>
<when value='Fasta'>
<param name="infile" type="data" format="fasta" label="Fasta file"/>
</when>
<when value='Qual454'>
<param name="infile" type="data" format="qual454"
label="Qual454 file"/>
</when>
</conditional>
<param name="input_table" type="data" format="tabular" label="File
containing target IDs"/>
<param name='id_column' type='data_column' data_ref='input_table'
label='Column containing sequence IDs'/>
<param name='ignorecase' type='boolean' truevalue='1'
falsevalue='0' label='Ignore case?' />
<param name="cosorted" type="boolean" checked="true" truevalue='1'
falsevalue='0' label='Sequences appear in same order in both files'/>
<repeat name='filters' title='Filter'>
<param name='condition' type='select' label='Condition'>
<option value='s_eq'>text equals</option>
<option value='s_ne'>text not equals</option>
<option value='s_contains'>text contains</option>
<option value='s_startswith'>text starts with</option>
<option value='s_endswith'>text ends with</option>
<option value='n_eq'>number ==</option>
<option value='n_ne'>number !=</option>
<option value='n_gt'>number ></option>
<option value='n_lt'>number <</option>
</param>
<param name='column' type='data_column' data_ref='input_table'
label='On column'/>
<param name='value' type='text' label='Value'/>
</repeat>
</inputs>
<outputs>
<data name="output" format="fasta">
<change_format>
<when input="intype.sel" value="Qual454" format="qual454" />
</change_format>
</data>
</outputs>
<help>
**What it does**
Produces a Fasta/Qual file of selected sequences.
If the files are cosorted, a much faster algorithm is used. If it
fails, it assumes the files were not cosorted and automatically tries
again
using the slower algorithm which does not require cosorted files.
One or more filters may be included to evaluate text (equal, not
equal, contains, starts with) or numeric data (==, !=, >, <).
</help>
</tool>
2
1

Patch: show User menu's history and dataset options show independent of app.config.use_remote_user setting
by Ry4an Brase 30 Mar '10
by Ry4an Brase 30 Mar '10
30 Mar '10
It's not much, but we're using a remote_user setup and the History and
Datasets menu items were not showing up under the User menu, which
didn't seem to be by design. Attached is a micro patch to the template,
which is also available here:
http://bitbucket.org/Ry4an/galaxy-umn-msi-changes/src/tip/always-show-users…
Also, What's the preferred patch submission mechanism? Patchbomb?
Bitbucket pull-request?
Thanks,
--
Ry4an Brase 612-626-6575
University of Minnesota Supercomputing Institute
for Advanced Computational Research http://www.msi.umn.edu
3
4
Hi,
Occasionally, we see jobs that are constantly in the 'job running' state
in the history. This is despite the fact that the job completed
correctly. I can also see in the paster.log file that Galaxy has picked
up on the fact that the job ended, but it just hasn't updated the UI.
Clicking the eye for the 'running' history items does show the results
of the run.
This does not happen all the time. Is this a known issue and/or how can
I resolve it?
Thanks,
Chris
2
6

Re: [galaxy-dev] Python crashing after latest changeset update (OSX) (Fixed)
by Roy Weckiewicz 25 Mar '10
by Roy Weckiewicz 25 Mar '10
25 Mar '10
Solved this problem by creating a new instance of galaxy via hg clone and
creating a new MySQL database followed by the setup.sh, modifying
universe_wsgi.ini to point to my MySQL database and finally run.sh which
migrated the database and started the server without a Python crash.
On Thu, Mar 25, 2010 at 4:35 PM, Roy Weckiewicz <roystefan(a)gmail.com> wrote:
> Hello,
>
>
> After doing a hg pull and update a few minutes ago, my Python (2.5.1) is
> crashing on OSX (10.5.8) after I run.sh daemon. The galaxy log doesn't seem
> to contain any information about the error and seems to start the galaxy
> server just fine (localhost). I've attached osx error report details. Any
> ideas?
>
>
>
>
> Thank you,
>
>
> Roy Weckiewicz
>
> --
> Roy Weckiewicz
> Texas A&M University
>
--
Roy Weckiewicz
Texas A&M University
1
0
Hello,
After doing a hg pull and update a few minutes ago, my Python (2.5.1) is
crashing on OSX (10.5.8) after I run.sh daemon. The galaxy log doesn't seem
to contain any information about the error and seems to start the galaxy
server just fine (localhost). I've attached osx error report details. Any
ideas?
Thank you,
Roy Weckiewicz
--
Roy Weckiewicz
Texas A&M University
1
0
Hi
(I am sorry, if this has been reported already, but I couldn't find it in
the archive)
This morning, I upgraded one of our local galaxy servers from changeset 2833
to 3545....so far, I haven't encountered any major problems, except for the
cases, where we use a conditional, eg:
<command>
#if $range.mode=="part":#clustalw2 -infile=$input -outfile=$output
-OUTORDER=$out_format1 -SEQNOS=$out_format2
-RANGE=$range.seq_range_start,$range.seq_range_end
#else :#clustalw2 -infile=$input -outfile=$output -OUTORDER=$out_format1
-SEQNOS=$out_format2
#end if
</command>
which is pretty much the same as the example for the <conditional> tag set
in the wiki (ToolConfigSyntax):
<command interpreter="python">
#if $source.source_select=="database":#blat_wrapper.py 0 $source.dbkey
$input_query $output1 $iden $tile_size $one_off
#else:#blat_wrapper.py 1 $source.input_target $input_query $output1
$iden $tile_size $one_off
#end if
</command>
However, I got the following error (after the upgrade):
////
File
"/work2/scratch/galaxy/galaxy_dist/eggs/Cheetah-2.2.2-py2.5-linux-x86_64-ucs
4.egg/Cheetah/Parser.py", line 2645, in popFromOpenDirectivesStack
raise ParseError(self, msg="#end found, but nothing to end")
ParseError:
#end found, but nothing to end
Line 4, column 1
Line|Cheetah Code
----|-------------------------------------------------------------
2 | #else:#clustalw2 -infile=$input -outfile=$output
-OUTORDER=$out_format1 -SEQNOS=$out_format2
3 | #end if
4 |
^
////
after looking around in similar tool config files which have been changed in
the distribution, I noticed that the colons are no longer present. I removed
both ":" in front of "#clustalw2", resulting in:
<command>
#if $range.mode=="part" #clustalw2 -infile=$input -outfile=$output
-OUTORDER=$out_format1 -SEQNOS=$out_format2
-RANGE=$range.seq_range_start,$range.seq_range_end
#else #clustalw2 -infile=$input -outfile=$output -OUTORDER=$out_format1
-SEQNOS=$out_format2
#end if
</command>
and it worked!
was this the correct fix? and if so, could you please update the wiki page,
ie removing the two ":" in front of "#blat_wrapper.py"
Thank you very much for your help
Hans
2
1
details: http://www.bx.psu.edu/hg/galaxy/rev/4c95f1a101f1
changeset: 3560:4c95f1a101f1
user: Greg Von Kuster <greg(a)bx.psu.edu>
date: Wed Mar 24 14:14:58 2010 -0400
description:
Next rev of codingSnps tool.
diffstat:
tools/evolution/codingSnps.pl | 268 ++++++++++++++++++----------------------
tools/evolution/codingSnps.xml | 2 +-
2 files changed, 124 insertions(+), 146 deletions(-)
diffs (423 lines):
diff -r bc2e61c2dac1 -r 4c95f1a101f1 tools/evolution/codingSnps.pl
--- a/tools/evolution/codingSnps.pl Tue Mar 23 15:43:58 2010 -0400
+++ b/tools/evolution/codingSnps.pl Wed Mar 24 14:14:58 2010 -0400
@@ -9,75 +9,32 @@
# those that cause a frameshift or substitution in the amino acid.
#########################################################################
-my $uniq = 0; # flag for whether want uniq positions
-my $syn = 0; # flag for if want synonomous changes rather than non-syn
-my $seqFlag = "2bit"; # flag to set sequence type 2bit|nib
-my $nibDir; # directory containg data
-my $nibTag; # tag for directory above
-
-################################################################################
-# Parse command line arguments #
-################################################################################
-
-# make sure we have enough arguments
if (!@ARGV or scalar @ARGV < 3) {
- print STDERR "Usage: codingSnps.pl snps.bed genes.bed locfile.loc [chr=# start=# end=# snp=#] output_file\n";
+ print "Usage: codingSnps.pl snps.bed genes.bed (/dir/nib/|Galaxy build= loc=) [chr=# start=# end=# snp=#] > codingSnps.txt\n";
exit;
}
-
-# get first three command line arguments
-my $snpFile = shift @ARGV;
+my $uniq = 0; #flag for whether want uniq positions
+my $syn = 0; #flag for if want synonomous changes rather than non-syn
+my $snpFile = shift @ARGV;
my $geneFile = shift @ARGV;
-my $locFile = shift @ARGV;
-
-# read $locFile to get $nibDir (ignoring commets)
-# FIXME: the last entry is the one you get
-open(LF, "< $locFile") || die "open($locFile): $!\n";
-while(<LF>) {
- s/#.*$//;
- s/(?:^\s+|\s+$)//g;
- next if (/^$/);
-
- # $tag and $path are set for each "valid" line in the file
- ($nibTag, $nibDir) = split(/\t/);
-}
-close(LF);
-
-# bed like columns in default positions
-my $col0 = 0;
+my $nibDir = shift @ARGV;
+if ($nibDir eq 'Galaxy') { getGalaxyInfo(); }
+my $seqFlag = "2bit"; #flag to set sequence type 2bit|nib
+my $col0 = 0; #bed like columns in default positions
my $col1 = 1;
my $col2 = 2;
my $col3 = 3;
-
-# get column positions for chr, start, end, snp
-# column positions 1 based coming in (for Galaxy)
+#column positions 1 based coming in (for Galaxy)
foreach (@ARGV) {
- if (/^chr=(\d+)$/) {
- $col0 = $1 - 1;
- } elsif (/^start=(\d+)$/) {
- $col1 = $1 - 1;
- } elsif (/^end=(\d+)$/) {
- $col2 = $1 - 1;
- } elsif (/^snp=(\d+)$/) {
- $col3 = $1 - 1;
- }
+ if (/chr=(\d+)/) { $col0 = $1 -1; }
+ elsif (/start=(\d+)/) { $col1 = $1 -1; }
+ elsif (/end=(\d+)/) { $col2 = $1 -1; }
+ elsif (/snp=(\d+)/) { $col3 = $1 -1; }
}
-
-# make sure the column positions are sane
if ($col0 < 0 || $col1 < 0 || $col2 < 0 || $col3 < 0) {
print STDERR "ERROR column numbers are given with origin 1\n";
exit 1;
}
-
-# get the output_file from the command line arguments
-my $outFile = $ARGV[$#ARGV];
-
-open(OUTFILE, "> $outFile") || die "open($outFile): $!\n";
-
-################################################################################
-# Initialization #
-################################################################################
-
my @genes; #bed lines for genes, sorted by chrom and start
my %chrSt; #index in array where each chrom starts
my %codon; #hash of codon amino acid conversions
@@ -97,13 +54,7 @@
"V" => "A/C/G",
"N" => "A/C/G/T"
);
-
fill_codon();
-
-################################################################################
-# Main #
-################################################################################
-
open(FH, "cat $geneFile | sort -k1,1 -k2,2n |")
or die "Couldn't open and sort $geneFile, $!\n";
my $i = 0;
@@ -117,7 +68,7 @@
}
close FH or die "Couldn't close $geneFile, $!\n";
-if ($ends) { print STDERR "TESTING using block ends rather than sizes\n"; }
+if ($ends) { print STDERR "WARNING using block ends rather than sizes\n"; }
#open snps sorted as well
my $s1 = $col0 + 1; #sort order is origin 1
@@ -129,6 +80,7 @@
my %done;
while(<FH>) {
chomp;
+ if (/^\s*#/) { next; } #comment
my @s = split(/\t/); #SNP fields
if (!@s or !$s[$col0]) { die "ERROR missing SNP data, $_\n"; }
my $size = $#s;
@@ -200,14 +152,10 @@
}
}
close FH or die "Couldn't close $snpFile, $!\n";
-close(OUTFILE) || die "close($outFile): $!\n";
exit;
-################################################################################
-# Subroutines #
-################################################################################
-
+########################################################################
sub processSnp {
my $sref = shift;
my $gref = shift;
@@ -220,7 +168,7 @@
my $i = 0;
my @st = split(/,/, $gref->[11]);
my @size = split(/,/, $gref->[10]);
- if (scalar @st ne $gref->[9]) { die "bad gene $gref->[3]\n"; }
+ if (scalar @st ne $gref->[9]) { return; } #cant do this gene #die "bad gene $gref->[3]\n"; }
my @pos;
my $in = 0;
for($i = 0; $i < $gref->[9]; $i++) {
@@ -246,12 +194,12 @@
my $c = ($copy =~ tr/-//);
if ($c % 3 == 0) { return; } #not frameshift
#handle bed4 to bed4 + 4 (pgSnp)
- print OUTFILE "$sref->[$col0]\t$sref->[$col1]\t$sref->[$col2]\t$sref->[$col3]";
+ print "$sref->[$col0]\t$sref->[$col1]\t$sref->[$col2]\t$sref->[$col3]";
#if ($sref->[4]) { print "\t$sref->[4]"; }
#if ($sref->[5]) { print "\t$sref->[5]"; }
#if ($sref->[6]) { print "\t$sref->[6]"; }
#if ($sref->[7]) { print "\t$sref->[7]"; }
- print OUTFILE "\t$gref->[3]\tframeshift\n";
+ print "\t$gref->[3]\tframeshift\n";
$done{"$sref->[$col0] $sref->[$col1] $sref->[$col2]"}++;
return;
}elsif ($sref->[$col1] == $sref->[$col2]) { #insertion
@@ -259,12 +207,12 @@
my $c = ($copy =~ tr/\[ACTG]+//);
if ($c % 3 == 0) { return; } #not frameshift
#handle bed4 to bed4 + 4 (pgSnp)
- print OUTFILE "$sref->[$col0]\t$sref->[$col1]\t$sref->[$col2]\t$sref->[$col3]";
+ print "$sref->[$col0]\t$sref->[$col1]\t$sref->[$col2]\t$sref->[$col3]";
#if ($sref->[4]) { print "\t$sref->[4]"; }
#if ($sref->[5]) { print "\t$sref->[5]"; }
#if ($sref->[6]) { print "\t$sref->[6]"; }
#if ($sref->[7]) { print "\t$sref->[7]"; }
- print OUTFILE "\t$gref->[3]\tframeshift\n";
+ print "\t$gref->[3]\tframeshift\n";
$done{"$sref->[$col0] $sref->[$col1] $sref->[$col2]"}++;
return;
}
@@ -310,6 +258,7 @@
my @vars = split(/\//, $sref->[$col3]);
if ($gref->[5] eq '-') { #complement oldnts and revcomp vars
$oldnts = compl($oldnts);
+ if (!$oldnts) { return; } #skip this one
$oldnts = join('', (reverse(split(/ */, $oldnts))));
foreach (@vars) {
$_ = reverse(split(/ */));
@@ -320,6 +269,7 @@
my @newnts;
my $changed = '';
foreach my $v (@vars) {
+ if (!$v or length($v) != 1) { return; } #only simple changes
my @new = split(/ */, $oldnts);
$changed = splice(@new, $r, $len, split(/ */, $v));
#should only change single nt
@@ -335,8 +285,8 @@
push(@newaa, $t);
}
if (!$change && $syn) {
- print OUTFILE "$sref->[$col0]\t$sref->[$col1]\t$sref->[$col2]\t$sref->[$col3]";
- print OUTFILE "\t$gref->[3]\t$oldaa:", join("/", @newaa), "\n";
+ print "$sref->[$col0]\t$sref->[$col1]\t$sref->[$col2]\t$sref->[$col3]";
+ print "\t$gref->[3]\t$oldaa:", join("/", @newaa), "\n";
return;
}elsif ($syn) { return; } #only want synonymous changes
if (!$change) { return; } #no change in amino acids
@@ -346,13 +296,14 @@
#print STDERR "oldnt $oldnts, strand $gref->[5]\n";
#exit;
#}
- print OUTFILE "$sref->[$col0]\t$sref->[$col1]\t$sref->[$col2]\t$sref->[$col3]";
+ print "$sref->[$col0]\t$sref->[$col1]\t$sref->[$col2]\t$sref->[$col3]";
#if (defined $sref->[4]) { print "\t$sref->[4]"; }
#if (defined $sref->[5]) { print "\t$sref->[5]"; }
#if (defined $sref->[6]) { print "\t$sref->[6]"; }
#if (defined $sref->[7]) { print "\t$sref->[7]"; }
if ($gref->[5] eq '-') { $changed = compl($changed); } #use plus for ref
- print OUTFILE "\t$gref->[3]\t$oldaa:", join("/", @newaa), "\t$cdNum\t$changed\n";
+ if (!$changed) { return; } #skip this one
+ print "\t$gref->[3]\t$oldaa:", join("/", @newaa), "\t$cdNum\t$changed\n";
$done{"$sref->[$col0] $sref->[$col1] $sref->[$col2]"}++;
}
}
@@ -415,7 +366,7 @@
my $end = shift;
my $strand = '+';
$st--; #change to UCSC numbering
- open (BIT, "twoBitToFa -seq=$chr -start=$st -end=$end $nibDir/$nibTag.2bit stdout |") or
+ open (BIT, "twoBitToFa -seq=$chr -start=$st -end=$end $nibDir stdout |") or
die "Couldn't run twoBitToFa, $!\n";
my $seq = '';
while (<BIT>) {
@@ -423,7 +374,7 @@
if (/^>/) { next; } #header
$seq .= $_;
}
- close BIT or die "Couldn't finish nibFrag on $chr $st $end, $!\n";
+ close BIT or die "Couldn't finish twoBitToFa on $chr $st $end, $!\n";
return $seq;
}
@@ -454,7 +405,7 @@
elsif ($n eq 'G') { $comp .= 'C'; }
elsif ($n eq 'N') { $comp .= 'N'; }
elsif ($n eq '-') { $comp .= '-'; } #deletion
- else { die "Couldn't do complement of $n for $nts\n"; }
+ else { $comp = undef; }
}
return $comp;
}
@@ -473,69 +424,96 @@
}
sub fill_codon {
- $codon{GCA} = 'Ala';
- $codon{GCC} = 'Ala';
- $codon{GCG} = 'Ala';
- $codon{GCT} = 'Ala';
- $codon{CGG} = 'Arg';
- $codon{CGT} = 'Arg';
- $codon{CGC} = 'Arg';
- $codon{AGA} = 'Arg';
- $codon{AGG} = 'Arg';
- $codon{CGA} = 'Arg';
- $codon{AAC} = 'Asn';
- $codon{AAT} = 'Asn';
- $codon{GAC} = 'Asp';
- $codon{GAT} = 'Asp';
- $codon{TGC} = 'Cys';
- $codon{TGT} = 'Cys';
- $codon{CAG} = 'Gln';
- $codon{CAA} = 'Gln';
- $codon{GAA} = 'Glu';
- $codon{GAG} = 'Glu';
- $codon{GGG} = 'Gly';
- $codon{GGA} = 'Gly';
- $codon{GGC} = 'Gly';
- $codon{GGT} = 'Gly';
- $codon{CAC} = 'His';
- $codon{CAT} = 'His';
- $codon{ATA} = 'Ile';
- $codon{ATT} = 'Ile';
- $codon{ATC} = 'Ile';
- $codon{CTA} = 'Leu';
- $codon{CTC} = 'Leu';
- $codon{CTG} = 'Leu';
- $codon{CTT} = 'Leu';
- $codon{TTG} = 'Leu';
- $codon{TTA} = 'Leu';
- $codon{AAA} = 'Lys';
- $codon{AAG} = 'Lys';
- $codon{ATG} = 'Met';
- $codon{TTC} = 'Phe';
- $codon{TTT} = 'Phe';
- $codon{CCT} = 'Pro';
- $codon{CCA} = 'Pro';
- $codon{CCC} = 'Pro';
- $codon{CCG} = 'Pro';
- $codon{TCA} = 'Ser';
- $codon{AGC} = 'Ser';
- $codon{AGT} = 'Ser';
- $codon{TCC} = 'Ser';
- $codon{TCT} = 'Ser';
- $codon{TCG} = 'Ser';
- $codon{TGA} = 'Stop';
- $codon{TAG} = 'Stop';
- $codon{TAA} = 'Stop';
- $codon{ACT} = 'Thr';
- $codon{ACA} = 'Thr';
- $codon{ACC} = 'Thr';
- $codon{ACG} = 'Thr';
- $codon{TGG} = 'Trp';
- $codon{TAT} = 'Tyr';
- $codon{TAC} = 'Tyr';
- $codon{GTC} = 'Val';
- $codon{GTA} = 'Val';
- $codon{GTG} = 'Val';
- $codon{GTT} = 'Val';
+$codon{GCA} = 'Ala';
+$codon{GCC} = 'Ala';
+$codon{GCG} = 'Ala';
+$codon{GCT} = 'Ala';
+$codon{CGG} = 'Arg';
+$codon{CGT} = 'Arg';
+$codon{CGC} = 'Arg';
+$codon{AGA} = 'Arg';
+$codon{AGG} = 'Arg';
+$codon{CGA} = 'Arg';
+$codon{AAC} = 'Asn';
+$codon{AAT} = 'Asn';
+$codon{GAC} = 'Asp';
+$codon{GAT} = 'Asp';
+$codon{TGC} = 'Cys';
+$codon{TGT} = 'Cys';
+$codon{CAG} = 'Gln';
+$codon{CAA} = 'Gln';
+$codon{GAA} = 'Glu';
+$codon{GAG} = 'Glu';
+$codon{GGG} = 'Gly';
+$codon{GGA} = 'Gly';
+$codon{GGC} = 'Gly';
+$codon{GGT} = 'Gly';
+$codon{CAC} = 'His';
+$codon{CAT} = 'His';
+$codon{ATA} = 'Ile';
+$codon{ATT} = 'Ile';
+$codon{ATC} = 'Ile';
+$codon{CTA} = 'Leu';
+$codon{CTC} = 'Leu';
+$codon{CTG} = 'Leu';
+$codon{CTT} = 'Leu';
+$codon{TTG} = 'Leu';
+$codon{TTA} = 'Leu';
+$codon{AAA} = 'Lys';
+$codon{AAG} = 'Lys';
+$codon{ATG} = 'Met';
+$codon{TTC} = 'Phe';
+$codon{TTT} = 'Phe';
+$codon{CCT} = 'Pro';
+$codon{CCA} = 'Pro';
+$codon{CCC} = 'Pro';
+$codon{CCG} = 'Pro';
+$codon{TCA} = 'Ser';
+$codon{AGC} = 'Ser';
+$codon{AGT} = 'Ser';
+$codon{TCC} = 'Ser';
+$codon{TCT} = 'Ser';
+$codon{TCG} = 'Ser';
+$codon{TGA} = 'Stop';
+$codon{TAG} = 'Stop';
+$codon{TAA} = 'Stop';
+$codon{ACT} = 'Thr';
+$codon{ACA} = 'Thr';
+$codon{ACC} = 'Thr';
+$codon{ACG} = 'Thr';
+$codon{TGG} = 'Trp';
+$codon{TAT} = 'Tyr';
+$codon{TAC} = 'Tyr';
+$codon{GTC} = 'Val';
+$codon{GTA} = 'Val';
+$codon{GTG} = 'Val';
+$codon{GTT} = 'Val';
}
-
+
+sub getGalaxyInfo {
+ my $build;
+ my $locFile;
+ foreach (@ARGV) {
+ if (/build=(.*)/) { $build = $1; }
+ elsif (/loc=(.*)/) { $locFile = $1; }
+ }
+ if (!$build or !$locFile) {
+ print STDERR "ERROR missing build or locfile for Galaxy input\n";
+ exit 1;
+ }
+ # read $locFile to get $nibDir (ignoring commets)
+ open(LF, "< $locFile") || die "open($locFile): $!\n";
+ while(<LF>) {
+ s/#.*$//;
+ s/(?:^\s+|\s+$)//g;
+ next if (/^$/);
+
+ my @t = split(/\t/);
+ if ($t[0] eq $build) { $nibDir = $t[1]; }
+ }
+ close(LF);
+ if ($nibDir eq 'Galaxy') {
+ print STDERR "Failed to find sequence directory in locfile $locFile\n";
+ }
+ $nibDir .= "/$build.2bit"; #we want full path and filename
+}
diff -r bc2e61c2dac1 -r 4c95f1a101f1 tools/evolution/codingSnps.xml
--- a/tools/evolution/codingSnps.xml Tue Mar 23 15:43:58 2010 -0400
+++ b/tools/evolution/codingSnps.xml Wed Mar 24 14:14:58 2010 -0400
@@ -1,7 +1,7 @@
<tool id="codingSnps" name="Amino-acid changes">
<description>caused by a set of SNPs</description>
<command interpreter="perl">
- codingSnps.pl $input1 $input2 ${GALAXY_DATA_INDEX_DIR}/codingSnps.loc chr=${input1.metadata.chromCol} start=${input1.metadata.startCol} end=${input1.metadata.endCol} snp=$col1 $out_file1
+ codingSnps.pl $input1 $input2 Galaxy build=${input1.metadata.dbkey} loc=${GALAXY_DATA_INDEX_DIR}/codingSnps.loc chr=${input1.metadata.chromCol} start=${input1.metadata.startCol} end=${input1.metadata.endCol} snp=$col1 > $out_file1
</command>
<inputs>
<param format="interval" name="input1" type="data" label="SNPs"/>
1
0
details: http://www.bx.psu.edu/hg/galaxy/rev/bc2e61c2dac1
changeset: 3559:bc2e61c2dac1
user: Greg Von Kuster <greg(a)bx.psu.edu>
date: Tue Mar 23 15:43:58 2010 -0400
description:
Add the add_scores tool.
diffstat:
test-data/add_scores_input1.interval | 20 +++++++++++
test-data/add_scores_input2.bed | 25 ++++++++++++++
test-data/add_scores_output1.interval | 20 +++++++++++
test-data/add_scores_output2.interval | 25 ++++++++++++++
tool-data/add_scores.loc.sample | 21 ++++++++++++
tool_conf.xml.sample | 1 +
tools/evolution/add_scores.xml | 60 +++++++++++++++++++++++++++++++++++
7 files changed, 172 insertions(+), 0 deletions(-)
diffs (206 lines):
diff -r dfaa18960944 -r bc2e61c2dac1 test-data/add_scores_input1.interval
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/add_scores_input1.interval Tue Mar 23 15:43:58 2010 -0400
@@ -0,0 +1,20 @@
+chr20 74149 74150 G G
+chr22 14642921 14642922 C A
+chr20 74148 74149 T C
+chr22 15452519 15452520 C C
+chr22 15472687 15472688 C N
+chr22 15508088 15508089 G G
+chr22 15534573 15534574 C C
+chr20 24770 24771 C C
+chr20 24961 24962 T T
+chr22 15451993 15451994 C C
+chr22 14667850 14667851 G G
+chr22 15452482 15452483 A G
+chr20 71317 71318 T T
+chr20 74223 74224 A A
+chr22 15453065 15453066 A G
+chr20 74284 74285 T T
+chr20 74309 74310 A A
+chr22 15472610 15472611 G N
+chr20 86193 86194 C C
+chr20 87418 87419 C C
diff -r dfaa18960944 -r bc2e61c2dac1 test-data/add_scores_input2.bed
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/add_scores_input2.bed Tue Mar 23 15:43:58 2010 -0400
@@ -0,0 +1,25 @@
+chr1 90000 90001 A
+chr2 90000 90001 A
+chr3 90000 90001 A
+chr4 90000 90001 A
+chr5 90000 90001 A
+chr6 90000 90001 A
+chr7 90000 90001 A
+chr8 90000 90001 A
+chr9 90000 90001 A
+chr10 90000 90001 A
+chr11 90000 90001 A
+chr12 90000 90001 A
+chr13 90000 90001 A
+chr14 90000 90001 A
+chr15 90000 90001 A
+chr16 90000 90001 A
+chr17 90000 90001 A
+chr18 90000 90001 A
+chr19 90000 90001 A
+chr20 90000 90001 A
+chr21 90000 90001 A
+chr22 90000 90001 A
+chrX 90000 90001 A
+chrY 90000 90001 A
+chrM 9000 9001 A
diff -r dfaa18960944 -r bc2e61c2dac1 test-data/add_scores_output1.interval
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/add_scores_output1.interval Tue Mar 23 15:43:58 2010 -0400
@@ -0,0 +1,20 @@
+chr20 74149 74150 G G 0.885
+chr22 14642921 14642922 C A -1.844
+chr20 74148 74149 T C -3.161
+chr22 15452519 15452520 C C -1.138
+chr22 15472687 15472688 C N NA
+chr22 15508088 15508089 G G -1.398
+chr22 15534573 15534574 C C 0.460
+chr20 24770 24771 C C -1.374
+chr20 24961 24962 T T -1.599
+chr22 15451993 15451994 C C 0.645
+chr22 14667850 14667851 G G 0.469
+chr22 15452482 15452483 A G -1.246
+chr20 71317 71318 T T -0.825
+chr20 74223 74224 A A -1.451
+chr22 15453065 15453066 A G -0.776
+chr20 74284 74285 T T -0.701
+chr20 74309 74310 A A -0.863
+chr22 15472610 15472611 G N NA
+chr20 86193 86194 C C 0.887
+chr20 87418 87419 C C -1.703
diff -r dfaa18960944 -r bc2e61c2dac1 test-data/add_scores_output2.interval
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/add_scores_output2.interval Tue Mar 23 15:43:58 2010 -0400
@@ -0,0 +1,25 @@
+chr1 90000 90001 A 0.431
+chr2 90000 90001 A 0.514
+chr3 90000 90001 A 0.808
+chr4 90000 90001 A 0.456
+chr5 90000 90001 A 0.446
+chr6 90000 90001 A 0.397
+chr7 90000 90001 A 0.446
+chr8 90000 90001 A NA
+chr9 90000 90001 A 0.470
+chr10 90000 90001 A 0.463
+chr11 90000 90001 A 0.369
+chr12 90000 90001 A 0.557
+chr13 90000 90001 A NA
+chr14 90000 90001 A NA
+chr15 90000 90001 A NA
+chr16 90000 90001 A 0.819
+chr17 90000 90001 A -0.993
+chr18 90000 90001 A 0.657
+chr19 90000 90001 A 0.372
+chr20 90000 90001 A 0.360
+chr21 90000 90001 A NA
+chr22 90000 90001 A NA
+chrX 90000 90001 A 0.414
+chrY 90000 90001 A 0.414
+chrM 9000 9001 A -0.587
diff -r dfaa18960944 -r bc2e61c2dac1 tool-data/add_scores.loc.sample
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tool-data/add_scores.loc.sample Tue Mar 23 15:43:58 2010 -0400
@@ -0,0 +1,21 @@
+#This is a sample file distributed with Galaxy that enables tools
+#to use a directory of gzipped genome files for use with add_scores. You will
+#need to supply these files and then create a add_scores.loc file
+#similar to this one (store it in this directory) that points to
+#the directories in which those files are stored. The add_scores.loc
+#file has this format (white space characters are TAB characters):
+#
+#<build> <file_path>
+#
+#So, for example, if your add_scores.loc began like this:
+#
+#hg18 /afs/bx.psu.edu/depot/data/genome/hg18/misc/phyloP/
+#
+#then your /afs/bx.psu.edu/depot/data/genome/hg18/misc/phyloP/ directory
+#would need to contain the following gzipped files, among others:
+#
+#-rw-r--r-- 1 rico rico 161981190 2010-03-19 12:48 chr10.phyloP44way.primate.wigFix.gz
+#-rw-r--r-- 1 rico rico 54091 2010-03-19 12:56 chr10_random.phyloP44way.primate.wigFix.gz
+#-rw-r--r-- 1 rico rico 158621990 2010-03-19 12:46 chr11.phyloP44way.primate.wigFix.gz
+#
+hg18 /galaxy/data/hg18/misc/phyloP
diff -r dfaa18960944 -r bc2e61c2dac1 tool_conf.xml.sample
--- a/tool_conf.xml.sample Tue Mar 23 13:53:21 2010 -0400
+++ b/tool_conf.xml.sample Tue Mar 23 15:43:58 2010 -0400
@@ -162,6 +162,7 @@
<tool file="hyphy/hyphy_dnds_wrapper.xml" />
<tool file="evolution/mutate_snp_codon.xml" />
<tool file="evolution/codingSnps.xml" />
+ <tool file="evolution/add_scores.xml" />
</section>
<section name="Metagenomic analyses" id="tax_manipulation">
<tool file="taxonomy/gi2taxonomy.xml" />
diff -r dfaa18960944 -r bc2e61c2dac1 tools/evolution/add_scores.xml
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tools/evolution/add_scores.xml Tue Mar 23 15:43:58 2010 -0400
@@ -0,0 +1,60 @@
+<tool id="add_scores" name="Add scores">
+ <description>for interspecies conservation at each SNPs</description>
+ <command>
+ add_scores $input1 ${input1.metadata.dbkey} ${input1.metadata.chromCol} ${input1.metadata.startCol} ${GALAXY_DATA_INDEX_DIR}/add_scores.loc $out_file1
+ </command>
+ <inputs>
+ <param format="interval" name="input1" type="data" label="SNPs"/>
+ </inputs>
+ <outputs>
+ <data format="input" name="out_file1" />
+ </outputs>
+ <tests>
+ <test>
+ <param name="input1" value="add_scores_input1.interval" dbkey="hg18" />
+ <output name="output" file="add_scores_output1.interval" />
+ </test>
+ <test>
+ <param name="input1" value="add_scores_input2.bed" dbkey="hg18" />
+ <output name="output" file="add_scores_output2.interval" />
+ </test>
+ </tests>
+
+ <help>
+This tool adds a column that measures interspecies conservation at each SNP position, using conservation scores for primates computed by the phyloP program. It currently works only for hg18.
+
+**Example**
+
+- input file, with SNPs::
+
+ chr22 16440426 14440427 C/T
+ chr22 15494851 14494852 A/G
+ chr22 14494911 14494912 A/T
+ chr22 14550435 14550436 A/G
+ chr22 14611956 14611957 G/T
+ chr22 14612076 14612077 A/G
+ chr22 14668537 14668538 C
+ chr22 14668703 14668704 A/T
+ chr22 14668775 14668776 G
+ chr22 14680074 14680075 A/T
+ etc.
+
+- output file, showing non-synonymous substitutions in coding regions::
+
+ chr22 16440426 14440427 C/T 0.509
+ chr22 15494851 14494852 A/G 0.427
+ chr22 14494911 14494912 A/T NA
+ chr22 14550435 14550436 A/G NA
+ chr22 14611956 14611957 G/T -2.142
+ chr22 14612076 14612077 A/G 0.369
+ chr22 14668537 14668538 C 0.419
+ chr22 14668703 14668704 A/T -1.462
+ chr22 14668775 14668776 G 0.470
+ chr22 14680074 14680075 A/T 0.000
+ chr22 14680074 14680075 A/T 0.303
+ etc.
+
+"NA", means that the phyloP score was not available.
+
+</help>
+</tool>
1
0
details: http://www.bx.psu.edu/hg/galaxy/rev/dfaa18960944
changeset: 3558:dfaa18960944
user: Greg Von Kuster <greg(a)bx.psu.edu>
date: Tue Mar 23 13:53:21 2010 -0400
description:
Add the codingSnps tool.
diffstat:
test-data/codingSnps_input1.interval | 21 +
test-data/codingSnps_input2.bed | 20 +
test-data/codingSnps_output.interval | 21 +
tool-data/codingSnps.loc.sample | 25 +
tool_conf.xml.sample | 1 +
tools/evolution/codingSnps.pl | 541 +++++++++++++++++++++++++++++++++++
tools/evolution/codingSnps.xml | 89 +++++
tools/evolution/codingSnps_filter.py | 43 ++
8 files changed, 761 insertions(+), 0 deletions(-)
diffs (799 lines):
diff -r a67ca0795efb -r dfaa18960944 test-data/codingSnps_input1.interval
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/codingSnps_input1.interval Tue Mar 23 13:53:21 2010 -0400
@@ -0,0 +1,21 @@
+chr1 1415785 1415786 C C S N C N C N C
+chr1 1420777 1420778 C C Y N C N C N C
+chr1 1469195 1469196 A G R R G G G G G
+chr1 1541789 1541790 T C Y Y N Y N T N
+chr1 1548654 1548655 T T C N C N N N N
+chr1 1549089 1549090 T C A A T A N A N
+chr1 1551984 1551985 C C Y N C N N N N
+chr1 1571349 1571350 G A R N N N N N N
+chr1 1571354 1571355 G G R N N N N N N
+chr1 1589750 1589751 A G G N G N G N G
+chr1 1675899 1675900 G A C C G C G C N
+chr1 1714578 1714579 G G R N A N G N G
+chr1 1837838 1837839 T C Y N T N T N C
+chr1 1839388 1839389 A A T T A T A T A
+chr1 1839389 1839390 T T Y N T N T N C
+chr1 1839603 1839604 G G R N G N G N R
+chr1 1843968 1843969 A G G G R R A A G
+chr1 1844405 1844406 C C Y Y C C N C Y
+chr1 1876878 1876879 A G G N N N N N N
+chr1 1886192 1886193 G N R N G N N N N
+chr1 1890091 1890092 T C Y Y C C N C N
diff -r a67ca0795efb -r dfaa18960944 test-data/codingSnps_input2.bed
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/codingSnps_input2.bed Tue Mar 23 13:53:21 2010 -0400
@@ -0,0 +1,20 @@
+chr1 67051161 67163158 ENST00000371026 0 - 67052400 67163102 0 17 1290,157,227,99,122,158,152,87,203,195,156,140,157,113,185,175,226, 0,9470,13929,14921,20694,21100,22735,24819,27578,34593,49256,58479,61890,78263,80338,92310,111771,
+chr1 67075869 67163055 ENST00000371023 0 - 67075923 67162979 0 10 198,203,195,156,140,157,113,185,175,123, 0,2870,9885,24548,33771,37182,53555,55630,67602,87063,
+chr1 1843259 1849228 ENST00000378596 0 - 1843585 1847183 0 10 428,135,161,133,155,130,156,120,56,1278, 0,628,1024,1406,1814,2430,3822,4267,4496,4691,
+chr1 1874611 1925136 ENST00000270720 0 - 1876876 1912255 0 18 2538,165,90,67,119,174,156,199,158,103,111,174,105,99,144,85,86,137, 0,3307,5807,6651,10436,11575,13058,15322,17346,18667,19624,20712,32073,33624,35199,35576,37577,50388,
+chr1 1838889 1840572 ENST00000310991 0 - 1839180 1840564 0 5 572,179,42,44,92, 0,662,1302,1454,1591,
+chr1 1838888 1840096 ENST00000378602 0 - 1839180 1839855 0 2 573,545, 0,663,
+chr1 1672537 1699769 ENST00000344463 0 - 1674202 1686705 0 14 1822,83,158,100,155,103,86,155,204,143,169,84,219,182, 0,2328,2812,3045,3305,4135,5019,5264,5868,7862,8495,10713,13989,27050,
+chr1 1836125 1838593 ENST00000307786 0 + 1836579 1838492 0 6 481,51,175,145,101,148, 0,858,1614,1925,2147,2320,
+chr1 1672530 1701368 ENST00000378625 0 - 1674202 1686705 0 14 1829,83,158,100,155,103,86,155,204,143,169,84,219,165, 0,2335,2819,3052,3312,4142,5026,5271,5875,7869,8502,10720,13996,28673,
+chr1 1672537 1699769 ENST00000341426 0 - 1674202 1686705 0 12 1822,83,158,100,155,103,86,106,130,84,219,182, 0,2328,2812,3045,3305,4135,5019,5264,5942,10713,13989,27050,
+chr1 1540746 1555847 ENST00000378710 0 + 1540873 1555773 0 19 130,107,311,172,107,195,143,105,163,134,149,157,161,127,234,179,182,66,313, 0,1004,7885,8270,9291,9487,9782,11146,11333,11570,11792,12169,12515,12769,12985,13629,13881,14135,14788,
+chr1 1672530 1700089 ENST00000341991 0 - 1674202 1686705 0 12 1829,83,158,100,155,103,86,106,130,84,219,45, 0,2335,2819,3052,3312,4142,5026,5271,5949,10720,13996,27514,
+chr1 1540746 1555847 ENST00000355826 0 + 1540873 1555773 0 20 130,107,311,172,107,195,143,108,105,163,134,149,157,161,127,234,179,182,66,313, 0,1004,7885,8270,9291,9487,9782,10042,11146,11333,11570,11792,12169,12515,12769,12985,13629,13881,14135,14788,
+chr1 1836125 1838593 ENST00000378604 0 + 1836579 1838492 0 5 481,175,145,101,148, 0,1614,1925,2147,2320,
+chr1 1540746 1555847 ENST00000357882 0 + 1540873 1555773 0 19 130,107,311,172,107,143,108,105,163,134,149,157,161,127,234,179,182,66,313, 0,1004,7885,8270,9291,9782,10042,11146,11333,11570,11792,12169,12515,12769,12985,13629,13881,14135,14788,
+chr1 1540746 1555847 ENST00000357882 0 + 1540873 1555773 0 19 130,107,311,172,107,143,108,105,163,134,149,157,161,127,234,179,182,66,313, 0,1004,7885,8270,9291,9782,10042,11146,11333,11570,11792,12169,12515,12769,12985,13629,13881,14135,14788,
+chr1 67075991 67163158 ENST00000395250 0 - 67075991 67113089 0 11 27,45,203,195,156,140,157,113,185,175,226, 0,31,2748,9763,24426,33649,37060,53433,55508,67480,86941,
+chr1 201326404 201403155 ENST00000309502 0 + 201364592 201401651 0 6 81,18,100,155,398,2144, 0,1039,37175,37624,38131,74607,
+chr1 8335052 8508585 ENST00000377464 0 - 8337733 8508430 0 17 2715,181,147,721,223,1379,114,162,200,93,163,81,99,100,125,49,181, 0,3013,3694,5790,7358,7706,9357,10277,11650,12340,13406,70321,113519,142657,144999,156220,173352,
+chr1 8335054 8800286 ENST00000400908 0 - 8337733 8638943 0 23 2713,181,147,721,223,1379,114,162,200,93,163,81,99,100,125,49,105,97,106,126,71,469,481, 0,3011,3692,5788,7356,7704,9355,10275,11648,12338,13404,70319,113517,142655,144997,156218,188805,204066,205009,262152,271901,303564,464751,
diff -r a67ca0795efb -r dfaa18960944 test-data/codingSnps_output.interval
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/codingSnps_output.interval Tue Mar 23 13:53:21 2010 -0400
@@ -0,0 +1,21 @@
+chr1 1541789 1541790 C/T ENST00000355826 Phe:Leu/Phe 15 T
+chr1 1541789 1541790 C/T ENST00000357882 Phe:Leu/Phe 15 T
+chr1 1541789 1541790 C/T ENST00000357882 Phe:Leu/Phe 15 T
+chr1 1541789 1541790 C/T ENST00000378710 Phe:Leu/Phe 15 T
+chr1 1548654 1548655 C ENST00000355826 Met:Thr 45 T
+chr1 1548654 1548655 C ENST00000357882 Met:Thr 45 T
+chr1 1548654 1548655 C ENST00000357882 Met:Thr 45 T
+chr1 1548654 1548655 C ENST00000378710 Met:Thr 45 T
+chr1 1675899 1675900 C ENST00000341991 Asn:Lys 262 G
+chr1 1675899 1675900 C ENST00000378625 Asn:Lys 407 G
+chr1 1675899 1675900 C ENST00000341426 Asn:Lys 262 G
+chr1 1675899 1675900 C ENST00000344463 Asn:Lys 407 G
+chr1 1837838 1837839 C/T ENST00000307786 Trp:Arg/Trp 60 T
+chr1 1837838 1837839 C/T ENST00000378604 Trp:Arg/Trp 43 T
+chr1 1839388 1839389 T ENST00000378602 Met:Lys 126 A
+chr1 1839388 1839389 T ENST00000310991 Met:Lys 141 A
+chr1 1839389 1839390 C/T ENST00000378602 Met:Val/Met 126 T
+chr1 1839389 1839390 C/T ENST00000310991 Met:Val/Met 141 T
+chr1 1844405 1844406 C/T ENST00000378596 Glu:Glu/Lys 187 C
+chr1 1876878 1876879 G ENST00000270720 Stop:Gln 763 A
+chr1 1890091 1890092 C/T ENST00000270720 Ile:Val/Ile 363 T
diff -r a67ca0795efb -r dfaa18960944 tool-data/codingSnps.loc.sample
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tool-data/codingSnps.loc.sample Tue Mar 23 13:53:21 2010 -0400
@@ -0,0 +1,25 @@
+#This is a sample file distributed with Galaxy that enables tools
+#to use a directory of nib genome files for use with codingSnps. You will
+#need to supply these files and then create a codingSnps.loc file
+#similar to this one (store it in this directory) that points to
+#the directories in which those files are stored. The codingSnps.loc
+#file has this format (white space characters are TAB characters):
+#
+#<build> <file_path>
+#
+#So, for example, if your codingSnps.loc began like this:
+#
+#hg18 /afs/bx.psu.edu/depot/data/genome/hg18/
+
+#
+#then your /afs/bx.psu.edu/depot/data/genome/hg18/ directory
+#would need to contain the following 2bit file:
+#
+#
+#-rw-r--r-- 1 g2data g2data 807604784 Dec 8 13:21 hg18.2bit
+
+#Your codingSnps.loc file should include an entry per line for
+#each file you have stored that you want to be available. Note that
+#your files should all have the extension '2bit'.
+
+hg18 /afs/bx.psu.edu/depot/data/genome/hg18
diff -r a67ca0795efb -r dfaa18960944 tool_conf.xml.sample
--- a/tool_conf.xml.sample Tue Mar 23 13:15:56 2010 -0400
+++ b/tool_conf.xml.sample Tue Mar 23 13:53:21 2010 -0400
@@ -161,6 +161,7 @@
<tool file="hyphy/hyphy_nj_tree_wrapper.xml" />
<tool file="hyphy/hyphy_dnds_wrapper.xml" />
<tool file="evolution/mutate_snp_codon.xml" />
+ <tool file="evolution/codingSnps.xml" />
</section>
<section name="Metagenomic analyses" id="tax_manipulation">
<tool file="taxonomy/gi2taxonomy.xml" />
diff -r a67ca0795efb -r dfaa18960944 tools/evolution/codingSnps.pl
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tools/evolution/codingSnps.pl Tue Mar 23 13:53:21 2010 -0400
@@ -0,0 +1,541 @@
+#!/usr/bin/perl -w
+use strict;
+
+#########################################################################
+# codingSnps.pl
+# This takes a bed file with the names being / separated nts
+# and a gene bed file with cds start and stop.
+# It then checks for changes in coding regions, reporting
+# those that cause a frameshift or substitution in the amino acid.
+#########################################################################
+
+my $uniq = 0; # flag for whether want uniq positions
+my $syn = 0; # flag for if want synonomous changes rather than non-syn
+my $seqFlag = "2bit"; # flag to set sequence type 2bit|nib
+my $nibDir; # directory containg data
+my $nibTag; # tag for directory above
+
+################################################################################
+# Parse command line arguments #
+################################################################################
+
+# make sure we have enough arguments
+if (!@ARGV or scalar @ARGV < 3) {
+ print STDERR "Usage: codingSnps.pl snps.bed genes.bed locfile.loc [chr=# start=# end=# snp=#] output_file\n";
+ exit;
+}
+
+# get first three command line arguments
+my $snpFile = shift @ARGV;
+my $geneFile = shift @ARGV;
+my $locFile = shift @ARGV;
+
+# read $locFile to get $nibDir (ignoring commets)
+# FIXME: the last entry is the one you get
+open(LF, "< $locFile") || die "open($locFile): $!\n";
+while(<LF>) {
+ s/#.*$//;
+ s/(?:^\s+|\s+$)//g;
+ next if (/^$/);
+
+ # $tag and $path are set for each "valid" line in the file
+ ($nibTag, $nibDir) = split(/\t/);
+}
+close(LF);
+
+# bed like columns in default positions
+my $col0 = 0;
+my $col1 = 1;
+my $col2 = 2;
+my $col3 = 3;
+
+# get column positions for chr, start, end, snp
+# column positions 1 based coming in (for Galaxy)
+foreach (@ARGV) {
+ if (/^chr=(\d+)$/) {
+ $col0 = $1 - 1;
+ } elsif (/^start=(\d+)$/) {
+ $col1 = $1 - 1;
+ } elsif (/^end=(\d+)$/) {
+ $col2 = $1 - 1;
+ } elsif (/^snp=(\d+)$/) {
+ $col3 = $1 - 1;
+ }
+}
+
+# make sure the column positions are sane
+if ($col0 < 0 || $col1 < 0 || $col2 < 0 || $col3 < 0) {
+ print STDERR "ERROR column numbers are given with origin 1\n";
+ exit 1;
+}
+
+# get the output_file from the command line arguments
+my $outFile = $ARGV[$#ARGV];
+
+open(OUTFILE, "> $outFile") || die "open($outFile): $!\n";
+
+################################################################################
+# Initialization #
+################################################################################
+
+my @genes; #bed lines for genes, sorted by chrom and start
+my %chrSt; #index in array where each chrom starts
+my %codon; #hash of codon amino acid conversions
+my $ends = 0; #ends vs sizes in bed 11 position, starts relative to chrom
+my $ignoreN = 1; #skip N
+
+my %amb = (
+"R" => "A/G",
+"Y" => "C/T",
+"S" => "C/G",
+"W" => "A/T",
+"K" => "G/T",
+"M" => "A/C",
+"B" => "C/G/T",
+"D" => "A/G/T",
+"H" => "A/C/T",
+"V" => "A/C/G",
+"N" => "A/C/G/T"
+);
+
+fill_codon();
+
+################################################################################
+# Main #
+################################################################################
+
+open(FH, "cat $geneFile | sort -k1,1 -k2,2n |")
+ or die "Couldn't open and sort $geneFile, $!\n";
+my $i = 0;
+while(<FH>) {
+ chomp;
+ if (/refGene.cdsEnd|ccdsGene.exonEnds/) { $ends = 1; next; }
+ push(@genes, "$_");
+ my @f = split(/\t/);
+ if (!exists $chrSt{$f[0]}) { $chrSt{$f[0]} = $i; }
+ $i++;
+}
+close FH or die "Couldn't close $geneFile, $!\n";
+
+if ($ends) { print STDERR "TESTING using block ends rather than sizes\n"; }
+
+#open snps sorted as well
+my $s1 = $col0 + 1; #sort order is origin 1
+my $s2 = $col1 + 1;
+open(FH, "cat $snpFile | sort -k$s1,$s1 -k$s2,${s2}n |")
+ or die "Couldn't open and sort $snpFile, $!\n";
+$i = 0;
+my @g; #one genes fields, should be used repeatedly
+my %done;
+while(<FH>) {
+ chomp;
+ my @s = split(/\t/); #SNP fields
+ if (!@s or !$s[$col0]) { die "ERROR missing SNP data, $_\n"; }
+ my $size = $#s;
+ if ($col0 > $size || $col1 > $size || $col2 > $size || $col3 > $size) {
+ print STDERR "ERROR file has fewer columns than requested, requested columns (0 based) $col0 $col1 $col2 $col3, file has $size\n";
+ exit 1;
+ }
+ if ($s[$col1] =~ /\D/) {
+ print STDERR "ERROR the start point must be an integer not $s[$col1]\n";
+ exit 1;
+ }
+ if ($s[$col2] =~ /\D/) {
+ print STDERR "ERROR the start point must be an integer not $s[$col2]\n";
+ exit 1;
+ }
+ if ($s[$col3] eq 'N' && $ignoreN) { next; }
+ if (exists $amb{$s[$col3]}) { $s[$col3] = $amb{$s[$col3]}; }
+ if (!@g && exists $chrSt{$s[$col0]}) { #need to fetch first gene row
+ $i = $chrSt{$s[$col0]};
+ @g = split(/\t/, $genes[$i]);
+ }elsif (!@g) {
+ next; #no gene for this chrom
+ }elsif ($s[$col0] ne $g[0] && exists $chrSt{$s[$col0]}) { #new chrom
+ $i = $chrSt{$s[$col0]};
+ @g = split(/\t/, $genes[$i]);
+ }elsif ($s[$col0] ne $g[0]) {
+ next; #no gene for this chrom
+ }elsif ($s[$col1] < $g[1] && $i == $chrSt{$s[$col0]}) {
+ next; #before any genes
+ }elsif ($s[$col1] > $g[2] && ($i == $#genes or $genes[$i+1] !~ $s[$col0])) {
+ next; #after all genes on chr
+ }else {
+ while ($s[$col1] > $g[2] && $i < $#genes) {
+ $i++;
+ @g = split(/\t/, $genes[$i]);
+ if ($s[$col0] ne $g[0]) { last; } #end of gene
+ }
+ if ($s[$col0] ne $g[0] or $s[$col1] < $g[1] or $s[$col1] > $g[2]) {
+ next; #no overlap with genes
+ }
+ }
+
+ processSnp(\@s, \@g);
+ if ($uniq && exists $done{"$s[$col0] $s[$col1] $s[$col2]"}) { next; }
+
+ my $k = $i + 1; #check for more genes without losing data of first
+ if ($k <= $#genes) {
+ my @g2 = split(/\t/, $genes[$k]);
+ while (@g2 && $k <= $#genes) {
+ @g2 = split(/\t/, $genes[$k]);
+ if ($s[$col0] ne $g2[0]) {
+ undef @g2;
+ last; #not same chrom
+ }else {
+ while ($s[$col1] > $g2[2] && $k <= $#genes) {
+ $k++;
+ @g2 = split(/\t/, $genes[$k]);
+ if ($s[$col0] ne $g2[0]) { last; } #end of chrom
+ }
+ if ($s[$col0] ne $g2[0] or $s[$col1] < $g2[1] or $s[$col1] > $g2[2]) {
+ undef @g2;
+ last; #no overlap with more genes
+ }
+ processSnp(\@s, \@g2);
+ if ($uniq && exists $done{"$s[$col0] $s[$col1] $s[$col2]"}) { last; }
+ }
+ $k++;
+ }
+ }
+}
+close FH or die "Couldn't close $snpFile, $!\n";
+close(OUTFILE) || die "close($outFile): $!\n";
+
+exit;
+
+################################################################################
+# Subroutines #
+################################################################################
+
+sub processSnp {
+ my $sref = shift;
+ my $gref = shift;
+ #overlaps gene, but maybe not coding seq
+ #inside cds
+ if ($sref->[$col1] + 1 < $gref->[6] or $sref->[$col2] > $gref->[7]) {
+ return; #outside of coding
+ }
+ #now check exon
+ my $i = 0;
+ my @st = split(/,/, $gref->[11]);
+ my @size = split(/,/, $gref->[10]);
+ if (scalar @st ne $gref->[9]) { die "bad gene $gref->[3]\n"; }
+ my @pos;
+ my $in = 0;
+ for($i = 0; $i < $gref->[9]; $i++) {
+ my $sta = $gref->[1] + $st[$i] + 1; #1 based position
+ my $end = $sta + $size[$i] - 1; #
+ if ($ends) { $end = $size[$i]; $sta = $st[$i] + 1; } #ends instead of sizes
+ if ($end < $gref->[6]) { next; } #utr only
+ if ($sta > $gref->[7]) { next; } #utr only
+ #shorten to coding only
+ if ($sta < $gref->[6]) { $sta = $gref->[6] + 1; }
+ if ($end > $gref->[7]) { $end = $gref->[7]; }
+ if ($sref->[$col1] + 1 >= $sta && $sref->[$col2] <= $end) { $in = 1; }
+ elsif ($sref->[$col1] == $sref->[$col2] && $sref->[$col2] <= $end && $sref->[$col2] >= $sta) { $in = 1; }
+ push(@pos, ($sta .. $end)); #add exon worth of positions
+ }
+ #@pos has coding positions for whole gene (chr coors),
+ #and $in has whether we need to continue
+ if (!$in) { return; } #not in coding exon
+ if ((scalar @pos) % 3 != 0) { return; } #partial gene? not even codons
+ if ($sref->[$col3] =~ /^-+\/[ACTG]+$/ or $sref->[$col3] =~ /^[ACTG]+\/-+$/ or
+ $sref->[$col3] =~ /^-+$/) { #indel or del
+ my $copy = $sref->[$col3];
+ my $c = ($copy =~ tr/-//);
+ if ($c % 3 == 0) { return; } #not frameshift
+ #handle bed4 to bed4 + 4 (pgSnp)
+ print OUTFILE "$sref->[$col0]\t$sref->[$col1]\t$sref->[$col2]\t$sref->[$col3]";
+ #if ($sref->[4]) { print "\t$sref->[4]"; }
+ #if ($sref->[5]) { print "\t$sref->[5]"; }
+ #if ($sref->[6]) { print "\t$sref->[6]"; }
+ #if ($sref->[7]) { print "\t$sref->[7]"; }
+ print OUTFILE "\t$gref->[3]\tframeshift\n";
+ $done{"$sref->[$col0] $sref->[$col1] $sref->[$col2]"}++;
+ return;
+ }elsif ($sref->[$col1] == $sref->[$col2]) { #insertion
+ my $copy = $sref->[$col3];
+ my $c = ($copy =~ tr/\[ACTG]+//);
+ if ($c % 3 == 0) { return; } #not frameshift
+ #handle bed4 to bed4 + 4 (pgSnp)
+ print OUTFILE "$sref->[$col0]\t$sref->[$col1]\t$sref->[$col2]\t$sref->[$col3]";
+ #if ($sref->[4]) { print "\t$sref->[4]"; }
+ #if ($sref->[5]) { print "\t$sref->[5]"; }
+ #if ($sref->[6]) { print "\t$sref->[6]"; }
+ #if ($sref->[7]) { print "\t$sref->[7]"; }
+ print OUTFILE "\t$gref->[3]\tframeshift\n";
+ $done{"$sref->[$col0] $sref->[$col1] $sref->[$col2]"}++;
+ return;
+ }
+ #check for amino acid substitutions
+ my $s = $sref->[$col1] + 1;
+ my $e = $sref->[$col2];
+ my $len = $sref->[$col2] - $sref->[$col1];
+ if ($gref->[5] eq '-') {
+ @pos = reverse(@pos);
+ my $t = $s;
+ $s = $e;
+ $e = $t;
+ }
+ $i = 0;
+ my $found = 0;
+ foreach (@pos) {
+ if ($s == $_) {
+ $found = 1;
+ last;
+ }
+ $i++;
+ }
+ if ($found) {
+ my $fs = $i; #keep original start index
+ #have index where substitution starts
+ my $cp = $i % 3;
+ $i -= $cp; #i is now first position in codon
+ my $cdNum = int($i / 3) + 1;
+ my $ls = $i;
+ if (!defined $ls) { die "ERROR not defined ls for $fs $sref->[$col2]\n"; }
+ if (!@pos) { die "ERROR not defined array pos\n"; }
+ if (!defined $pos[$ls]) { die "ERROR not defined pos at $ls\n"; }
+ if (!defined $e) { die "ERROR not defined e for $pos[0] $pos[1] $pos[2]\n"; }
+ while ($ls <= $#pos && $pos[$ls] ne $e) {
+ $ls++;
+ }
+ my $i2 = $ls + (2 - ($ls % 3));
+ if ($i2 > $#pos) { return; } #not a full codon, partial gene?
+
+ if ($i2 - $i < 2) { die "not a full codon positions $i to $i2 for $sref->[3]\n"; }
+ my $oldnts = getnts($sref->[$col0], @pos[$i..$i2]);
+ if (!$oldnts) { die "Failed to get sequence for $sref->[$col0] $pos[$i] .. $pos[$i2]\n"; }
+ my @vars = split(/\//, $sref->[$col3]);
+ if ($gref->[5] eq '-') { #complement oldnts and revcomp vars
+ $oldnts = compl($oldnts);
+ $oldnts = join('', (reverse(split(/ */, $oldnts))));
+ foreach (@vars) {
+ $_ = reverse(split(/ */));
+ $_ = compl($_);
+ }
+ }
+ my $r = $fs - $i; #difference in old indexes gives new index
+ my @newnts;
+ my $changed = '';
+ foreach my $v (@vars) {
+ my @new = split(/ */, $oldnts);
+ $changed = splice(@new, $r, $len, split(/ */, $v));
+ #should only change single nt
+ push(@newnts, join("", @new));
+ }
+ #now compute amino acids
+ my $oldaa = getaa($oldnts);
+ my @newaa;
+ my $change = 0; #flag for if there is a change
+ foreach my $v (@newnts) {
+ my $t = getaa($v);
+ if ($t ne $oldaa) { $change = 1; }
+ push(@newaa, $t);
+ }
+ if (!$change && $syn) {
+ print OUTFILE "$sref->[$col0]\t$sref->[$col1]\t$sref->[$col2]\t$sref->[$col3]";
+ print OUTFILE "\t$gref->[3]\t$oldaa:", join("/", @newaa), "\n";
+ return;
+ }elsif ($syn) { return; } #only want synonymous changes
+ if (!$change) { return; } #no change in amino acids
+#if (abs($pos[$i] - $pos[$i2]) > 200) {
+#print STDERR "TESTING found mutation at splice site $sref->[0]\t$sref->[1]\t$sref->[2]\n";
+#print STDERR "old $oldaa, new ", join(', ', @newaa), "\n";
+#print STDERR "oldnt $oldnts, strand $gref->[5]\n";
+#exit;
+#}
+ print OUTFILE "$sref->[$col0]\t$sref->[$col1]\t$sref->[$col2]\t$sref->[$col3]";
+ #if (defined $sref->[4]) { print "\t$sref->[4]"; }
+ #if (defined $sref->[5]) { print "\t$sref->[5]"; }
+ #if (defined $sref->[6]) { print "\t$sref->[6]"; }
+ #if (defined $sref->[7]) { print "\t$sref->[7]"; }
+ if ($gref->[5] eq '-') { $changed = compl($changed); } #use plus for ref
+ print OUTFILE "\t$gref->[3]\t$oldaa:", join("/", @newaa), "\t$cdNum\t$changed\n";
+ $done{"$sref->[$col0] $sref->[$col1] $sref->[$col2]"}++;
+ }
+}
+
+sub getnts {
+ my $chr = shift;
+ my @pos = @_; #list of positions not necessarily in order
+ #list may be reversed or have gaps(introns), at least 3 bps
+ my $seq = '';
+ if (scalar @pos < 3) { die "too small region for $chr $pos[0]\n"; }
+ if ($pos[0] < $pos[1]) { #not reversed
+ my $s = $pos[0];
+ for(my $i = 1; $i <= $#pos; $i++) {
+ if ($pos[$i] == $pos[$i-1] + 1) { next; }
+ if ($seqFlag eq '2bit') {
+ $seq .= fetchSeq2bit($chr, $s, $pos[$i-1]);
+ }else {
+ $seq .= fetchSeqNib($chr, $s, $pos[$i-1]);
+ }
+ $s = $pos[$i];
+ }
+ if (length $seq != scalar @pos) { #still need to fetch seq
+#if (abs($pos[$#pos]-$pos[0]) > 200) {
+#print STDERR "TESTING have split codon $chr $pos[0] $pos[$#pos]\n";
+#exit;
+#}
+ if ($seqFlag eq '2bit') {
+ $seq .= fetchSeq2bit($chr, $s, $pos[$#pos]);
+ }else {
+ $seq .= fetchSeqNib($chr, $s, $pos[$#pos]);
+ }
+ }
+ }else { #reversed
+ my $s = $pos[$#pos];
+ for(my $i = $#pos -1; $i >= 0; $i--) {
+ if ($pos[$i] == $pos[$i+1] + 1) { next; }
+ if ($seqFlag eq '2bit') {
+ $seq .= fetchSeq2bit($chr, $s, $pos[$i+1]);
+ }else {
+ $seq .= fetchSeqNib($chr, $s, $pos[$i+1]);
+ }
+ $s = $pos[$i];
+ }
+ if (length $seq != scalar @pos) { #still need to fetch seq
+#if (abs($pos[$#pos]-$pos[0]) > 200) {
+#print STDERR "TESTING have split codon $pos[0] .. $pos[$#pos]\n";
+#}
+ if ($seqFlag eq '2bit') {
+ $seq .= fetchSeq2bit($chr, $s, $pos[0]);
+ }else {
+ $seq .= fetchSeqNib($chr, $s, $pos[0]);
+ }
+ }
+ }
+}
+
+sub fetchSeq2bit {
+ my $chr = shift;
+ my $st = shift;
+ my $end = shift;
+ my $strand = '+';
+ $st--; #change to UCSC numbering
+ open (BIT, "twoBitToFa -seq=$chr -start=$st -end=$end $nibDir/$nibTag.2bit stdout |") or
+ die "Couldn't run twoBitToFa, $!\n";
+ my $seq = '';
+ while (<BIT>) {
+ chomp;
+ if (/^>/) { next; } #header
+ $seq .= $_;
+ }
+ close BIT or die "Couldn't finish nibFrag on $chr $st $end, $!\n";
+ return $seq;
+}
+
+sub fetchSeqNib {
+ my $chr = shift;
+ my $st = shift;
+ my $end = shift;
+ my $strand = '+';
+ $st--; #change to UCSC numbering
+ open (NIB, "nibFrag -upper $nibDir/${chr}.nib $st $end $strand stdout |") or die "Couldn't run nibFrag, $!\n";
+ my $seq = '';
+ while (<NIB>) {
+ chomp;
+ if (/^>/) { next; } #header
+ $seq .= $_;
+ }
+ close NIB or die "Couldn't finish nibFrag on $chr $st $end, $!\n";
+ return $seq;
+}
+
+sub compl {
+ my $nts = shift;
+ my $comp = '';
+ foreach my $n (split(/ */, $nts)) {
+ if ($n eq 'A') { $comp .= 'T'; }
+ elsif ($n eq 'T') { $comp .= 'A'; }
+ elsif ($n eq 'C') { $comp .= 'G'; }
+ elsif ($n eq 'G') { $comp .= 'C'; }
+ elsif ($n eq 'N') { $comp .= 'N'; }
+ elsif ($n eq '-') { $comp .= '-'; } #deletion
+ else { die "Couldn't do complement of $n for $nts\n"; }
+ }
+ return $comp;
+}
+
+sub getaa {
+ my $nts = shift; #in multiples of 3
+ my $aa = '';
+ my @n = split(/ */, $nts);
+ while (@n) {
+ my @t = splice(@n, 0, 3);
+ my $n = uc(join("", @t));
+ if (!exists $codon{$n}) { $aa .= 'N'; next; }
+ $aa .= $codon{$n};
+ }
+ return $aa;
+}
+
+sub fill_codon {
+ $codon{GCA} = 'Ala';
+ $codon{GCC} = 'Ala';
+ $codon{GCG} = 'Ala';
+ $codon{GCT} = 'Ala';
+ $codon{CGG} = 'Arg';
+ $codon{CGT} = 'Arg';
+ $codon{CGC} = 'Arg';
+ $codon{AGA} = 'Arg';
+ $codon{AGG} = 'Arg';
+ $codon{CGA} = 'Arg';
+ $codon{AAC} = 'Asn';
+ $codon{AAT} = 'Asn';
+ $codon{GAC} = 'Asp';
+ $codon{GAT} = 'Asp';
+ $codon{TGC} = 'Cys';
+ $codon{TGT} = 'Cys';
+ $codon{CAG} = 'Gln';
+ $codon{CAA} = 'Gln';
+ $codon{GAA} = 'Glu';
+ $codon{GAG} = 'Glu';
+ $codon{GGG} = 'Gly';
+ $codon{GGA} = 'Gly';
+ $codon{GGC} = 'Gly';
+ $codon{GGT} = 'Gly';
+ $codon{CAC} = 'His';
+ $codon{CAT} = 'His';
+ $codon{ATA} = 'Ile';
+ $codon{ATT} = 'Ile';
+ $codon{ATC} = 'Ile';
+ $codon{CTA} = 'Leu';
+ $codon{CTC} = 'Leu';
+ $codon{CTG} = 'Leu';
+ $codon{CTT} = 'Leu';
+ $codon{TTG} = 'Leu';
+ $codon{TTA} = 'Leu';
+ $codon{AAA} = 'Lys';
+ $codon{AAG} = 'Lys';
+ $codon{ATG} = 'Met';
+ $codon{TTC} = 'Phe';
+ $codon{TTT} = 'Phe';
+ $codon{CCT} = 'Pro';
+ $codon{CCA} = 'Pro';
+ $codon{CCC} = 'Pro';
+ $codon{CCG} = 'Pro';
+ $codon{TCA} = 'Ser';
+ $codon{AGC} = 'Ser';
+ $codon{AGT} = 'Ser';
+ $codon{TCC} = 'Ser';
+ $codon{TCT} = 'Ser';
+ $codon{TCG} = 'Ser';
+ $codon{TGA} = 'Stop';
+ $codon{TAG} = 'Stop';
+ $codon{TAA} = 'Stop';
+ $codon{ACT} = 'Thr';
+ $codon{ACA} = 'Thr';
+ $codon{ACC} = 'Thr';
+ $codon{ACG} = 'Thr';
+ $codon{TGG} = 'Trp';
+ $codon{TAT} = 'Tyr';
+ $codon{TAC} = 'Tyr';
+ $codon{GTC} = 'Val';
+ $codon{GTA} = 'Val';
+ $codon{GTG} = 'Val';
+ $codon{GTT} = 'Val';
+}
+
diff -r a67ca0795efb -r dfaa18960944 tools/evolution/codingSnps.xml
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tools/evolution/codingSnps.xml Tue Mar 23 13:53:21 2010 -0400
@@ -0,0 +1,89 @@
+<tool id="codingSnps" name="Amino-acid changes">
+ <description>caused by a set of SNPs</description>
+ <command interpreter="perl">
+ codingSnps.pl $input1 $input2 ${GALAXY_DATA_INDEX_DIR}/codingSnps.loc chr=${input1.metadata.chromCol} start=${input1.metadata.startCol} end=${input1.metadata.endCol} snp=$col1 $out_file1
+ </command>
+ <inputs>
+ <param format="interval" name="input1" type="data" label="SNPs"/>
+ <param format="interval" name="input2" type="data" label="genes"/>
+ <param name="col1" label="Column with SNPs" type="data_column" data_ref="input1" />
+ </inputs>
+ <outputs>
+ <data format="input" name="out_file1" />
+ </outputs>
+ <code file="codingSnps_filter.py"></code>
+ <requirements>
+ <requirement type="binary">twoBitToFa</requirement>
+ </requirements>
+ <tests>
+ <test>
+ <param name="input1" value="codingSnps_input1.interval" dbkey="hg18" />
+ <param name="input2" value="codingSnps_input2.bed" dbkey="hg18" />
+ <param name="col1" value="6" />
+ <output name="output" file="codingSnps_output.interval" />
+ </test>
+ </tests>
+
+ <help>
+This tool identifies which SNPs create amino-acid changes in the specified coding regions.
+
+**Example**
+
+- first input file, with SNPs::
+
+ chr22 14440426 14440427 C/T
+ chr22 14494851 14494852 A/G
+ chr22 14494911 14494912 A/T
+ chr22 14550435 14550436 A/G
+ chr22 14611956 14611957 G/T
+ chr22 14612076 14612077 A/G
+ chr22 14668537 14668538 C
+ chr22 14668703 14668704 A/T
+ chr22 14668775 14668776 G
+ chr22 14680074 14680075 A/T
+ etc.
+
+ alternatively indicating polymorphisms using ambiguous-nucleotide symbols:
+
+ chr22 14440426 14440427 Y
+ chr22 14494851 14494852 R
+ chr22 14494911 14494912 W
+ chr22 14550435 14550436 R
+ chr22 14611956 14611957 K
+ chr22 14612076 14612077 R
+ chr22 14668537 14668538 C
+ chr22 14668703 14668704 W
+ chr22 14668775 14668776 G
+ chr22 14680074 14680075 W
+ etc.
+
+- second input file, with UCSC annotations for human genes::
+
+ chr22 14504263 14572999 uc002zkr.2 0 - 14504263 14504263 0 5 710,91,136,138,94, 0,38133,62547,62901,68642,
+ chr22 14527995 14572999 uc010gqo.1 0 - 14527995 14527995 0 4 3826,91,136,94, 0,14401,38815,44910,
+ chr22 14542065 14552264 uc002zkt.2 0 + 14542065 14542065 0 3 323,88,313, 0,2416,9886,
+ chr22 14559619 14561004 uc002zku.2 0 - 14559619 14559619 0 1 1385, 0,
+ chr22 14567164 14572999 uc002zkv.2 0 - 14567164 14567164 0 5 138,112,115,111,94, 0,1867,2099,3516,5741,
+ chr22 14620243 14620281 uc002zkw.1 0 - 14620243 14620243 0 1 38, 0,
+ chr22 14620300 14620339 uc002zkx.1 0 - 14620300 14620300 0 1 39, 0,
+ chr22 14621086 14621125 uc002zky.1 0 + 14621086 14621086 0 1 39, 0,
+ chr22 14622000 14622030 uc002zkz.1 0 - 14622000 14622000 0 1 30, 0,
+ chr22 14623380 14623414 uc002zla.1 0 - 14623380 14623380 0 1 34, 0,
+ etc.
+
+- output file, showing non-synonymous substitutions in coding regions::
+
+ chr22 15452482 15452483 G uc002zlp.1 Trp:Arg 320 A
+ chr22 15644564 15644565 T uc002zlv.1 His:Asn 442 G
+ chr22 15645123 15645124 C uc002zlv.1 Phe:Leu 255 A
+ chr22 15645193 15645194 A/G uc002zlv.1 Pro:Leu/Pro 232 G
+ chr22 15660821 15660822 A/G uc002zlv.1 Thr:Met/Thr 143 G
+ chr22 15969208 15969209 C/T uc002zly.1 Ala:Ala/Val 367 C
+ chr22 15969208 15969209 C/T uc010gqt.1 Ala:Ala/Val 315 C
+ chr22 15999075 15999076 C/G uc002zmd.1 Arg:Arg/Ser 180 C
+ chr22 15999075 15999076 C/G uc002zme.1 Arg:Arg/Ser 161 C
+ chr22 15999075 15999076 C/G uc002zmf.1 Arg:Arg/Ser 369 C
+ etc.
+
+</help>
+</tool>
diff -r a67ca0795efb -r dfaa18960944 tools/evolution/codingSnps_filter.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tools/evolution/codingSnps_filter.py Tue Mar 23 13:53:21 2010 -0400
@@ -0,0 +1,43 @@
+# runs after the job (and after the default post-filter)
+import os
+from galaxy import eggs
+from galaxy import jobs
+from galaxy.tools.parameters import DataToolParameter
+# Older py compatibility
+try:
+ set()
+except:
+ from sets import Set as set
+
+def validate_input( trans, error_map, param_values, page_param_map ):
+ dbkeys = set()
+ data_param_names = set()
+ data_params = 0
+ for name, param in page_param_map.iteritems():
+ if isinstance( param, DataToolParameter ):
+ # for each dataset parameter
+ if param_values.get(name, None) != None:
+ dbkeys.add( param_values[name].dbkey )
+ data_params += 1
+ # check meta data
+ try:
+ param = param_values[name]
+ startCol = int( param.metadata.startCol )
+ endCol = int( param.metadata.endCol )
+ chromCol = int( param.metadata.chromCol )
+ if param.metadata.strandCol is not None:
+ strandCol = int ( param.metadata.strandCol )
+ else:
+ strandCol = 0
+ except:
+ error_msg = "The attributes of this dataset are not properly set. " + \
+ "Click the pencil icon in the history item to set the chrom, start, end and strand columns."
+ error_map[name] = error_msg
+ data_param_names.add( name )
+ if len( dbkeys ) > 1:
+ for name in data_param_names:
+ error_map[name] = "All datasets must belong to same genomic build, " \
+ "this dataset is linked to build '%s'" % param_values[name].dbkey
+ if data_params != len(data_param_names):
+ for name in data_param_names:
+ error_map[name] = "A dataset of the appropriate type is required"
1
0

24 Mar '10
details: http://www.bx.psu.edu/hg/galaxy/rev/a67ca0795efb
changeset: 3557:a67ca0795efb
user: rc
date: Tue Mar 23 13:15:56 2010 -0400
description:
lims: fixed a bug in the csv importer for samples
diffstat:
lib/galaxy/web/controllers/requests.py | 14 ++++++++++++--
lib/galaxy/web/controllers/requests_admin.py | 11 ++++++++++-
2 files changed, 22 insertions(+), 3 deletions(-)
diffs (59 lines):
diff -r 8f6fa47b21c4 -r a67ca0795efb lib/galaxy/web/controllers/requests.py
--- a/lib/galaxy/web/controllers/requests.py Tue Mar 23 12:45:08 2010 -0400
+++ b/lib/galaxy/web/controllers/requests.py Tue Mar 23 13:15:56 2010 -0400
@@ -438,18 +438,28 @@
current_samples, details, edit_mode, libraries = self.__update_samples( trans, request, **kwd )
if params.get('import_samples_button', False) == 'Import samples':
try:
+ lib_widget, folder_widget = self.__library_widgets(trans, request.user,
+ len(current_samples),
+ libraries, None, **kwd)
file_obj = params.get('file_data', '')
import csv
reader = csv.reader(file_obj.file)
for row in reader:
- current_samples.append([row[0], row[1:]])
- return trans.fill_template( '/requests/show_request.mako',
+ current_samples.append(dict(name=row[0],
+ barcode='',
+ library=None,
+ folder=None,
+ lib_widget=lib_widget,
+ folder_widget=folder_widget,
+ field_values=row[1:]))
+ return trans.fill_template( '/admin/requests/show_request.mako',
request=request,
request_details=self.request_details(trans, request.id),
current_samples=current_samples,
sample_copy=self.__copy_sample(current_samples),
details=details,
edit_mode=edit_mode)
+
except:
return trans.response.send_redirect( web.url_for( controller='requests',
action='list',
diff -r 8f6fa47b21c4 -r a67ca0795efb lib/galaxy/web/controllers/requests_admin.py
--- a/lib/galaxy/web/controllers/requests_admin.py Tue Mar 23 12:45:08 2010 -0400
+++ b/lib/galaxy/web/controllers/requests_admin.py Tue Mar 23 13:15:56 2010 -0400
@@ -946,11 +946,20 @@
current_samples, details, edit_mode, libraries = self.__update_samples( trans, request, **kwd )
if params.get('import_samples_button', False) == 'Import samples':
try:
+ lib_widget, folder_widget = self.__library_widgets(trans, request.user,
+ len(current_samples),
+ libraries, None, **kwd)
file_obj = params.get('file_data', '')
import csv
reader = csv.reader(file_obj.file)
for row in reader:
- current_samples.append([row[0], row[1:]])
+ current_samples.append(dict(name=row[0],
+ barcode='',
+ library=None,
+ folder=None,
+ lib_widget=lib_widget,
+ folder_widget=folder_widget,
+ field_values=row[1:]))
return trans.fill_template( '/admin/requests/show_request.mako',
request=request,
request_details=self.request_details(trans, request.id),
1
0

24 Mar '10
details: http://www.bx.psu.edu/hg/galaxy/rev/8f6fa47b21c4
changeset: 3556:8f6fa47b21c4
user: Nate Coraor <nate(a)bx.psu.edu>
date: Tue Mar 23 12:45:08 2010 -0400
description:
Fix bug in upload where source import files could be removed if an error occurs during detection and galaxy owns the source files.
diffstat:
tools/data_source/upload.py | 3 +++
1 files changed, 3 insertions(+), 0 deletions(-)
diffs (13 lines):
diff -r 4affff3421ed -r 8f6fa47b21c4 tools/data_source/upload.py
--- a/tools/data_source/upload.py Tue Mar 23 10:44:13 2010 -0400
+++ b/tools/data_source/upload.py Tue Mar 23 12:45:08 2010 -0400
@@ -24,6 +24,9 @@
ext = 'data',
dataset_id = dataset.dataset_id,
stderr = msg ) ) + "\n" )
+ # never remove a server-side upload
+ if dataset.type in ( 'server_dir', 'path_paste' ):
+ return
try:
os.remove( dataset.path )
except:
1
0

24 Mar '10
details: http://www.bx.psu.edu/hg/galaxy/rev/4affff3421ed
changeset: 3555:4affff3421ed
user: gua110
date: Tue Mar 23 10:44:13 2010 -0400
description:
Added 'Multivariate analysis' section to tool_conf.xml.main
diffstat:
tool_conf.xml.main | 6 ++++++
1 files changed, 6 insertions(+), 0 deletions(-)
diffs (16 lines):
diff -r a2d3d9514c76 -r 4affff3421ed tool_conf.xml.main
--- a/tool_conf.xml.main Tue Mar 23 10:09:12 2010 -0400
+++ b/tool_conf.xml.main Tue Mar 23 10:44:13 2010 -0400
@@ -141,6 +141,12 @@
<tool file="regVariation/best_regression_subsets.xml" />
<tool file="regVariation/rcve.xml" />
</section>
+ <section name="Multivariate Analysis" id="multVar">
+ <tool file="multivariate_stats/pca.xml" />
+ <tool file="multivariate_stats/cca.xml" />
+ <tool file="multivariate_stats/kpca.xml" />
+ <tool file="multivariate_stats/kcca.xml" />
+ </section>
<section name="Evolution" id="hyphy">
<tool file="hyphy/hyphy_branch_lengths_wrapper.xml" />
<tool file="hyphy/hyphy_nj_tree_wrapper.xml" />
1
0
details: http://www.bx.psu.edu/hg/galaxy/rev/a2d3d9514c76
changeset: 3554:a2d3d9514c76
user: rc
date: Tue Mar 23 10:09:12 2010 -0400
description:
lims: data transfer bug fix
diffstat:
scripts/galaxy_messaging/server/data_transfer.py | 6 +++---
1 files changed, 3 insertions(+), 3 deletions(-)
diffs (26 lines):
diff -r 96776367bea1 -r a2d3d9514c76 scripts/galaxy_messaging/server/data_transfer.py
--- a/scripts/galaxy_messaging/server/data_transfer.py Mon Mar 22 18:02:40 2010 -0400
+++ b/scripts/galaxy_messaging/server/data_transfer.py Tue Mar 23 10:09:12 2010 -0400
@@ -158,11 +158,11 @@
cj = cookielib.CookieJar()
opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj))
f = opener.open(url)
- if f.read().find("Now logged in as "+self.datatx_email) == -1:
+ if f.read().find("ogged in as "+self.datatx_email) == -1:
# if the user doesnt exist, create the user
url = "%s/user/create?email=%s&username=%s&password=%s&confirm=%s&create_user_button=Submit" % ( base_url, self.datatx_email, self.datatx_email, self.datatx_password, self.datatx_password )
f = opener.open(url)
- if f.read().find("Now logged in as "+self.datatx_email) == -1:
+ if f.read().find("ogged in as "+self.datatx_email) == -1:
raise DataTransferException("The "+self.datatx_email+" user could not login to Galaxy")
# after login, add dataset to the library
params = urllib.urlencode(dict( cntrller='library_admin',
@@ -185,7 +185,7 @@
raise DataTransferException("Dataset could not be uploaded to the data library")
# finally logout
f = opener.open(base_url+'/user/logout')
- if f.read().find("You are no longer logged in.") == -1:
+ if f.read().find("You have been logged out.") == -1:
raise DataTransferException("The "+self.datatx_email+" user could not logout of Galaxy")
except DataTransferException, (e):
self.error_and_exit(e.msg)
1
0

24 Mar '10
details: http://www.bx.psu.edu/hg/galaxy/rev/96776367bea1
changeset: 3553:96776367bea1
user: Kanwei Li <kanwei(a)gmail.com>
date: Mon Mar 22 18:02:40 2010 -0400
description:
Least Recently Used (LRU) cache for python
diffstat:
lib/galaxy/util/lrucache.py | 72 +++++++++++++++++++++++++++++++++++++++++++++
1 files changed, 72 insertions(+), 0 deletions(-)
diffs (77 lines):
diff -r 2bda9c59b992 -r 96776367bea1 lib/galaxy/util/lrucache.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/lib/galaxy/util/lrucache.py Mon Mar 22 18:02:40 2010 -0400
@@ -0,0 +1,72 @@
+"""
+Kanwei Li, 03/2010
+
+Simple LRU cache that uses a dictionary to store a specified number of objects
+at a time.
+"""
+
+class LRUCache:
+ def clear(self):
+ ''' Clears/initiates storage variables'''
+ self.key_ary = []
+ self.obj_cache = {}
+
+ def __init__(self, num_elements):
+ self.num_elements = num_elements
+ self.clear()
+
+ def __getitem__(self, key):
+ ''' Return value of key, or None if key is not in cache '''
+ try:
+ index = self.key_ary.index(key)
+ except ValueError:
+ return None
+ # Move this key to the end
+ self.key_ary.remove(key)
+ self.key_ary.append(key)
+ return self.obj_cache[key]
+
+ def __setitem__(self, key, value):
+ ''' Sets a new value to a key '''
+ if key not in self.obj_cache:
+ if len(self.key_ary) >= self.num_elements:
+ deleted_key = self.key_ary.pop(0) # Remove first element
+ del self.obj_cache[deleted_key]
+ self.key_ary.append(key)
+ self.obj_cache[key] = value
+ return value
+
+if __name__ == "__main__":
+ import unittest
+
+ class TestLRUCache(unittest.TestCase):
+ def test_lru(self):
+ lru = LRUCache(2)
+ for i in range(0, 4): # Insert 4 numbers
+ lru[i] = i
+ self.assertEqual( lru[0], None )
+ self.assertEqual( lru[1], None )
+ self.assertEqual( lru[2], 2 )
+ self.assertEqual( lru[3], 3 )
+
+ self.assertEqual( lru.__setitem__("hello", "world"), "world")
+ self.assertEqual( lru[2], None )
+
+ lru.clear()
+ self.assertEqual( lru["hello"], None )
+ self.assertEqual( lru[3], None )
+
+ # Test if recently used item is kept
+ lru[0] = 0
+ lru[1] = 1
+ # Now saturated
+ ping = lru[0]
+ lru[2] = 2
+ # Should keep 0, delete 1
+ self.assertEqual( lru[0], 0 )
+ self.assertEqual( lru[1], None )
+ self.assertEqual( lru[2], 2 )
+
+ unittest.main()
+
+
\ No newline at end of file
1
0

24 Mar '10
details: http://www.bx.psu.edu/hg/galaxy/rev/2bda9c59b992
changeset: 3552:2bda9c59b992
user: James Taylor <james(a)jamestaylor.org>
date: Mon Mar 22 17:11:10 2010 -0400
description:
Fix saving annotations in workflow editor
diffstat:
static/scripts/galaxy.workflow_editor.canvas.js | 6 +---
templates/workflow/editor.mako | 39 ++++++++++--------------
templates/workflow/editor_tool_form.mako | 15 +++++---
3 files changed, 26 insertions(+), 34 deletions(-)
diffs (104 lines):
diff -r 84386da2a3f3 -r 2bda9c59b992 static/scripts/galaxy.workflow_editor.canvas.js
--- a/static/scripts/galaxy.workflow_editor.canvas.js Mon Mar 22 16:59:20 2010 -0400
+++ b/static/scripts/galaxy.workflow_editor.canvas.js Mon Mar 22 17:11:10 2010 -0400
@@ -448,12 +448,8 @@
// If active form has changed, save it
if (this.active_form_has_changes) {
this.has_changes = true;
- // Get annotation and add to form.
- var annotation = $("textarea[name=annotation]").val();
- var tool_form = $("#right-content").find("form");
- tool_form.append( "<input type='hidden' name='annotation' value='"+annotation+"' />" );
// Submit form.
- tool_form.submit();
+ $("#right-content").find("form").submit();
this.active_form_has_changes = false;
}
},
diff -r 84386da2a3f3 -r 2bda9c59b992 templates/workflow/editor.mako
--- a/templates/workflow/editor.mako Mon Mar 22 16:59:20 2010 -0400
+++ b/templates/workflow/editor.mako Mon Mar 22 17:11:10 2010 -0400
@@ -310,7 +310,22 @@
function show_form_for_tool( text, node ) {
$("#edit-attributes").hide();
- $("#right-content").show().html( text );
+ $("#right-content").show().html( text );
+
+ // Add metadata form to tool.
+ if (node) {
+ $("#right-content").find(".toolForm").after( "<p><div class='metadataForm'> \
+ <div class='metadataFormTitle'>Edit Step Attributes</div> \
+ <div class='form-row'> \
+ <label>Annotation / Notes:</label> \
+ <div style='margin-right: 10px;'> \
+ <textarea name='annotation' rows='3' style='width: 100%'>" + node.annotation + "</textarea> \
+ <div class='toolParamHelp'>Add an annotation or notes to this step; annotations are available when a workflow is viewed.</div> \
+ </div> \
+ </div> \
+ </div>" );
+ }
+
$("#right-content").find( "form" ).ajaxForm( {
type: 'POST',
dataType: 'json',
@@ -352,28 +367,6 @@
});
});
});
-
-
- // Add metadata form to tool.
- if (node) {
- var metadata_div =
- $( "<p><div class='metadataForm'> \
- <div class='metadataFormTitle'>Edit Step Attributes</div> \
- <div class='form-row'> \
- <label>Annotation / Notes:</label> \
- <div style='margin-right: 10px;'> \
- <textarea name='annotation' rows='3' style='width: 100%'>" + node.annotation + "</textarea> \
- <div class='toolParamHelp'>Add an annotation or notes to this step; annotations are available when a workflow is viewed.</div> \
- </div> \
- </div> \
- </div>");
- // See above comment: this is necessary to handle autosaving.
- var textarea = $(metadata_div).find("textarea");
- textarea.focus( function () {
- workflow.active_form_has_changes = true;
- });
- $("#right-content").find(".toolForm").after( metadata_div );
- }
}
var close_editor = function() {
diff -r 84386da2a3f3 -r 2bda9c59b992 templates/workflow/editor_tool_form.mako
--- a/templates/workflow/editor_tool_form.mako Mon Mar 22 16:59:20 2010 -0400
+++ b/templates/workflow/editor_tool_form.mako Mon Mar 22 17:11:10 2010 -0400
@@ -94,10 +94,11 @@
</div>
</%def>
-<div class="toolForm">
- <div class="toolFormTitle">Tool: ${tool.name}</div>
- <div class="toolFormBody">
- <form method="post" action="${h.url_for( action='editor_form_post' )}">
+<form method="post" action="${h.url_for( action='editor_form_post' )}">
+
+ <div class="toolForm">
+ <div class="toolFormTitle">Tool: ${tool.name}</div>
+ <div class="toolFormBody">
<input type="hidden" name="tool_id" value="${tool.id}" />
%for i, inputs in enumerate( tool.inputs_by_page ):
%if tool.has_multiple_pages:
@@ -105,6 +106,8 @@
%endif
${do_inputs( inputs, values, errors, "" )}
%endfor
- </form>
+ </div>
</div>
-</div>
+
+
+</form>
\ No newline at end of file
1
0

24 Mar '10
details: http://www.bx.psu.edu/hg/galaxy/rev/84386da2a3f3
changeset: 3551:84386da2a3f3
user: Kanwei Li <kanwei(a)gmail.com>
date: Mon Mar 22 16:59:20 2010 -0400
description:
Can customize main logo link url, rename viz links
diffstat:
templates/base_panels.mako | 8 ++++----
universe_wsgi.ini.sample | 2 ++
2 files changed, 6 insertions(+), 4 deletions(-)
diffs (45 lines):
diff -r 8938319bf173 -r 84386da2a3f3 templates/base_panels.mako
--- a/templates/base_panels.mako Mon Mar 22 12:32:14 2010 -0400
+++ b/templates/base_panels.mako Mon Mar 22 16:59:20 2010 -0400
@@ -205,9 +205,9 @@
Visualization
<div class="submenu">
<ul>
- <li><a href="${h.url_for( controller='/tracks', action='index' )}">Build track browser</a></li>
+ <li><a href="${h.url_for( controller='/tracks', action='index' )}">New Track Browser</a></li>
<li><hr style="color: inherit; background-color: gray"/></li>
- <li><a href="${h.url_for( controller='/visualization', action='list' )}">Stored visualizations</a></li>
+ <li><a href="${h.url_for( controller='/visualization', action='list' )}">Saved Visualizations</a></li>
</ul>
</div>
</td>
@@ -287,11 +287,11 @@
## Logo, layered over tabs to be clickable
<div class="title" style="position: absolute; top: 0; left: 0;">
- <a href="/">
+ <a href="${app.config.get( 'logo_url', '/' )}">
<img border="0" src="${h.url_for('/static/images/galaxyIcon_noText.png')}" style="width: 26px; vertical-align: top;">
Galaxy
%if app.config.brand:
- <span class='brand'>/${app.config.brand}</span>
+ <span class='brand'>/ ${app.config.brand}</span>
%endif
</a>
</div>
diff -r 8938319bf173 -r 84386da2a3f3 universe_wsgi.ini.sample
--- a/universe_wsgi.ini.sample Mon Mar 22 12:32:14 2010 -0400
+++ b/universe_wsgi.ini.sample Mon Mar 22 16:59:20 2010 -0400
@@ -126,10 +126,12 @@
## for a local mirror where you are doing private software development
##
## Brand: appends "/[brand]" to the "Galaxy" text in the masthead
+## logo_url: replaces the default "Galaxy + brand" link url
## wiki_url: replaces the default galaxy main wiki
## bugs_email: replaces the default galaxy bugs email list
##citation_url: point to a URL listing citations
#brand = Private local mirror
+#logo_url = /
#wiki_url = /path/to/my/local/wiki
#bugs_email = mailto:galaxy-bugs@example.org
#citation_url = /path/to/my/citations
1
0

24 Mar '10
details: http://www.bx.psu.edu/hg/galaxy/rev/8938319bf173
changeset: 3550:8938319bf173
user: fubar: ross Lazarus at gmail period com
date: Mon Mar 22 12:32:14 2010 -0400
description:
Assume plink is on the executing node path for converters
Add a non-specific exception catcher to archive creation code in library_common
diffstat:
lib/galaxy/datatypes/converters/lped_to_pbed_converter.xml | 2 +-
lib/galaxy/datatypes/converters/pbed_to_lped_converter.xml | 2 +-
lib/galaxy/datatypes/genetics.py | 5 ++++-
lib/galaxy/web/controllers/dataset.py | 2 +-
lib/galaxy/web/controllers/library_common.py | 5 +++++
5 files changed, 12 insertions(+), 4 deletions(-)
diffs (66 lines):
diff -r 7120c4848270 -r 8938319bf173 lib/galaxy/datatypes/converters/lped_to_pbed_converter.xml
--- a/lib/galaxy/datatypes/converters/lped_to_pbed_converter.xml Mon Mar 22 11:06:40 2010 -0400
+++ b/lib/galaxy/datatypes/converters/lped_to_pbed_converter.xml Mon Mar 22 12:32:14 2010 -0400
@@ -3,7 +3,7 @@
<!-- Used on the metadata edit page. -->
<command interpreter="python">
lped_to_pbed_converter.py '$input1.extra_files_path/$input1.metadata.base_name'
- '$output1' '$output1.files_path' '${GALAXY_DATA_INDEX_DIR}/rg/bin/plink'
+ '$output1' '$output1.files_path' 'plink'
</command>
<inputs>
<param format="lped" name="input1" type="data" label="Choose linkage pedigree file"/>
diff -r 7120c4848270 -r 8938319bf173 lib/galaxy/datatypes/converters/pbed_to_lped_converter.xml
--- a/lib/galaxy/datatypes/converters/pbed_to_lped_converter.xml Mon Mar 22 11:06:40 2010 -0400
+++ b/lib/galaxy/datatypes/converters/pbed_to_lped_converter.xml Mon Mar 22 12:32:14 2010 -0400
@@ -3,7 +3,7 @@
<!-- Used on the metadata edit page. -->
<command interpreter="python">
pbed_to_lped_converter.py '$input1.extra_files_path/$input1.metadata.base_name'
- '$output1' '$output1.files_path' '${GALAXY_DATA_INDEX_DIR}/rg/bin/plink'
+ '$output1' '$output1.files_path' 'plink'
</command>
<inputs>
<param format="pbed" name="input1" type="data" label="Choose compressed Plink binary format genotype file"/>
diff -r 7120c4848270 -r 8938319bf173 lib/galaxy/datatypes/genetics.py
--- a/lib/galaxy/datatypes/genetics.py Mon Mar 22 11:06:40 2010 -0400
+++ b/lib/galaxy/datatypes/genetics.py Mon Mar 22 12:32:14 2010 -0400
@@ -114,7 +114,10 @@
npeek = 5
out = ['<table cellspacing="0" cellpadding="3">']
f = open(dataset.file_name,'r')
- d = [f.next() for x in range(npeek)]
+ d = f.readlines()[:5]
+ if len(d) == 0:
+ out = "Cannot find anything to parse in %s" % dataset.name
+ return out
hasheader = 0
try:
test = ['%f' % x for x in d[0][1:]] # first is name - see if starts all numerics
diff -r 7120c4848270 -r 8938319bf173 lib/galaxy/web/controllers/dataset.py
--- a/lib/galaxy/web/controllers/dataset.py Mon Mar 22 11:06:40 2010 -0400
+++ b/lib/galaxy/web/controllers/dataset.py Mon Mar 22 12:32:14 2010 -0400
@@ -332,7 +332,7 @@
trans.response.set_content_type( mime )
return open( file_path )
else:
- return "Could not find '%s' on the extra files path." % filename
+ return "Could not find '%s' on the extra files path %s." % (filename,file_path)
mime = trans.app.datatypes_registry.get_mimetype_by_extension( data.extension.lower() )
trans.response.set_content_type(mime)
diff -r 7120c4848270 -r 8938319bf173 lib/galaxy/web/controllers/library_common.py
--- a/lib/galaxy/web/controllers/library_common.py Mon Mar 22 11:06:40 2010 -0400
+++ b/lib/galaxy/web/controllers/library_common.py Mon Mar 22 12:32:14 2010 -0400
@@ -1334,6 +1334,11 @@
log.exception( "Unable to create archive for download" )
msg = "Unable to create archive for download, please report this error"
messagetype = 'error'
+ except:
+ error = True
+ log.exception( "Unexpected error %s in create archive for download" % sys.exc_info()[0])
+ msg = "Unable to create archive for download, please report - %s" % sys.exc_info()[0]
+ messagetype = 'error'
if not error:
composite_extensions = trans.app.datatypes_registry.get_composite_extensions( )
seen = []
1
0

24 Mar '10
details: http://www.bx.psu.edu/hg/galaxy/rev/7120c4848270
changeset: 3549:7120c4848270
user: Dan Blankenberg <dan(a)bx.psu.edu>
date: Mon Mar 22 11:06:40 2010 -0400
description:
Make boxplot tool respect exit codes when determining job errors: makes e.g. 'Could not find/open font when opening font "arial", using internal non-scalable font' a non-fatal warning. Add version attribute to gnuplot requirement tag.
diffstat:
tools/plotting/boxplot.xml | 5 +++--
1 files changed, 3 insertions(+), 2 deletions(-)
diffs (22 lines):
diff -r ff39f033ce68 -r 7120c4848270 tools/plotting/boxplot.xml
--- a/tools/plotting/boxplot.xml Fri Mar 19 15:09:13 2010 -0400
+++ b/tools/plotting/boxplot.xml Mon Mar 22 11:06:40 2010 -0400
@@ -1,8 +1,8 @@
<tool id="qual_stats_boxplot" name="Boxplot" version="1.0.0">
<description>of quality statistics</description>
- <command>gnuplot < '$gnuplot_commands' > '$output_file'</command>
+ <command>gnuplot < '$gnuplot_commands' 2>&1 || echo "Error running gnuplot." >&2</command>
<requirements>
- <requirement type="binary">gnuplot</requirement>
+ <requirement type="binary" version="gnuplot 4.2 patchlevel 2">gnuplot</requirement>
</requirements>
<inputs>
<param name="input_file" type="data" format="tabular" label="Quality Statistics File"/>
@@ -30,6 +30,7 @@
</inputs>
<configfiles>
<configfile name="gnuplot_commands">
+set output '$output_file'
set term png size ${graph_size}
set boxwidth 0.8
set key right tmargin
1
0

24 Mar '10
details: http://www.bx.psu.edu/hg/galaxy/rev/ff39f033ce68
changeset: 3548:ff39f033ce68
user: rc
date: Fri Mar 19 15:09:13 2010 -0400
description:
lims: fixed the search bug on the requests grid
diffstat:
lib/galaxy/web/controllers/requests.py | 2 +-
lib/galaxy/web/controllers/requests_admin.py | 2 +-
2 files changed, 2 insertions(+), 2 deletions(-)
diffs (24 lines):
diff -r 49a7de2c4707 -r ff39f033ce68 lib/galaxy/web/controllers/requests.py
--- a/lib/galaxy/web/controllers/requests.py Thu Mar 18 22:41:15 2010 -0400
+++ b/lib/galaxy/web/controllers/requests.py Fri Mar 19 15:09:13 2010 -0400
@@ -107,7 +107,7 @@
link=( lambda item: iff( item.deleted, None, dict( operation="events", id=item.id ) ) ) )
]
columns.append( grids.MulticolFilterColumn( "Search",
- cols_to_filter=[ columns[0], columns[1], columns[6] ],
+ cols_to_filter=[ columns[0], columns[1] ],
key="free-text-search",
visible=False,
filterable="standard" ) )
diff -r 49a7de2c4707 -r ff39f033ce68 lib/galaxy/web/controllers/requests_admin.py
--- a/lib/galaxy/web/controllers/requests_admin.py Thu Mar 18 22:41:15 2010 -0400
+++ b/lib/galaxy/web/controllers/requests_admin.py Fri Mar 19 15:09:13 2010 -0400
@@ -124,7 +124,7 @@
]
columns.append( grids.MulticolFilterColumn( "Search",
- cols_to_filter=[ columns[0], columns[1], columns[6] ],
+ cols_to_filter=[ columns[0], columns[1] ],
key="free-text-search",
visible=False,
filterable="standard" ) )
1
0

24 Mar '10
details: http://www.bx.psu.edu/hg/galaxy/rev/49a7de2c4707
changeset: 3547:49a7de2c4707
user: jeremy goecks <jeremy.goecks(a)emory.edu>
date: Thu Mar 18 22:41:15 2010 -0400
description:
Fix tag filtering bug for published item grids.
diffstat:
templates/display_base.mako | 2 +-
templates/grid_base.mako | 2 +-
templates/history/view.mako | 2 +-
3 files changed, 3 insertions(+), 3 deletions(-)
diffs (36 lines):
diff -r be396d94ca5a -r 49a7de2c4707 templates/display_base.mako
--- a/templates/display_base.mako Thu Mar 18 16:02:09 2010 -0400
+++ b/templates/display_base.mako Thu Mar 18 22:41:15 2010 -0400
@@ -43,7 +43,7 @@
<% controller_name = get_controller_name( item ) %>
var href = '${h.url_for ( controller='/' + controller_name , action='list_published')}';
href = href + "?f-tags=" + tag_name;
- if (tag_value != null && tag_value != "")
+ if (tag_value != undefined && tag_value != "")
href = href + ":" + tag_value;
self.location = href;
}
diff -r be396d94ca5a -r 49a7de2c4707 templates/grid_base.mako
--- a/templates/grid_base.mako Thu Mar 18 16:02:09 2010 -0400
+++ b/templates/grid_base.mako Thu Mar 18 22:41:15 2010 -0400
@@ -345,7 +345,7 @@
// Add tag to grid filter.
function add_tag_to_grid_filter(tag_name, tag_value) {
// Put tag name and value together.
- var tag = tag_name + (tag_value !== null && tag_value != "" ? ":" + tag_value : "");
+ var tag = tag_name + (tag_value !== undefined && tag_value != "" ? ":" + tag_value : "");
$('#more-search-options').show('fast');
add_filter_condition("tags", tag, true);
}
diff -r be396d94ca5a -r 49a7de2c4707 templates/history/view.mako
--- a/templates/history/view.mako Thu Mar 18 16:02:09 2010 -0400
+++ b/templates/history/view.mako Thu Mar 18 22:41:15 2010 -0400
@@ -258,7 +258,7 @@
{
var href = '${h.url_for( controller='/history', action='list_published')}';
href = href + "?f-tags=" + tag_name;
- if (tag_value != null && tag_value != "")
+ if (tag_value != undefined && tag_value != "")
href = href + ":" + tag_value;
self.location = href;
}
1
0

24 Mar '10
details: http://www.bx.psu.edu/hg/galaxy/rev/be396d94ca5a
changeset: 3546:be396d94ca5a
user: Greg Von Kuster <greg(a)bx.psu.edu>
date: Thu Mar 18 16:02:09 2010 -0400
description:
Eliminate the pop-up menus for folders and datasets if the library is deleted.
diffstat:
templates/library/common/browse_library.mako | 130 +++++++++++++-------------
1 files changed, 67 insertions(+), 63 deletions(-)
diffs (187 lines):
diff -r 2590120aed68 -r be396d94ca5a templates/library/common/browse_library.mako
--- a/templates/library/common/browse_library.mako Thu Mar 18 12:57:29 2010 -0400
+++ b/templates/library/common/browse_library.mako Thu Mar 18 16:02:09 2010 -0400
@@ -208,38 +208,40 @@
%if ldda.library_dataset.deleted:
</span>
%endif
- <a id="dataset-${ldda.id}-popup" class="popup-arrow" style="display: none;">▼</a>
- <div popupmenu="dataset-${ldda.id}-popup">
- %if not branch_deleted( folder ) and not ldda.library_dataset.deleted and ( cntrller == 'library_admin' or can_modify ):
- <a class="action-button" href="${h.url_for( controller='library_common', action='ldda_edit_info', cntrller=cntrller, library_id=trans.security.encode_id( library.id ), folder_id=trans.security.encode_id( folder.id ), id=trans.security.encode_id( ldda.id ), use_panels=use_panels, show_deleted=show_deleted )}">Edit information</a>
- %else:
- <a class="action-button" href="${h.url_for( controller='library_common', action='ldda_info', cntrller=cntrller, library_id=trans.security.encode_id( library.id ), folder_id=trans.security.encode_id( folder.id ), id=trans.security.encode_id( ldda.id ), use_panels=use_panels, show_deleted=show_deleted )}">View information</a>
- %endif
- %if not branch_deleted( folder ) and not ldda.library_dataset.deleted and ( ( cntrller == 'library_admin' or can_modify ) and not info_association ):
- <a class="action-button" href="${h.url_for( controller='library_common', action='add_template', cntrller=cntrller, item_type='ldda', library_id=trans.security.encode_id( library.id ), folder_id=trans.security.encode_id( folder.id ), ldda_id=trans.security.encode_id( ldda.id ), use_panels=use_panels, show_deleted=show_deleted )}">Add template</a>
- %endif
- %if not branch_deleted( folder ) and not ldda.library_dataset.deleted and ( ( cntrller == 'library_admin' or can_modify ) and info_association ):
- <a class="action-button" href="${h.url_for( controller='library_common', action='edit_template', cntrller=cntrller, item_type='ldda', library_id=trans.security.encode_id( library.id ), folder_id=trans.security.encode_id( folder.id ), ldda_id=trans.security.encode_id( ldda.id ), use_panels=use_panels, show_deleted=show_deleted )}">Edit template</a>
- <a class="action-button" href="${h.url_for( controller='library_common', action='delete_template', cntrller=cntrller, item_type='ldda', library_id=trans.security.encode_id( library.id ), folder_id=trans.security.encode_id( folder.id ), ldda_id=trans.security.encode_id( ldda.id ), use_panels=use_panels, show_deleted=show_deleted )}">Delete template</a>
- %endif
- %if not branch_deleted( folder ) and not ldda.library_dataset.deleted and ( cntrller == 'library_admin' or can_manage ):
- <a class="action-button" href="${h.url_for( controller='library_common', action='ldda_permissions', cntrller=cntrller, library_id=trans.security.encode_id( library.id ), folder_id=trans.security.encode_id( folder.id ), id=trans.security.encode_id( ldda.id ), use_panels=use_panels, show_deleted=show_deleted )}">Edit permissions</a>
- %endif
- %if not branch_deleted( folder ) and not ldda.library_dataset.deleted and ( cntrller == 'library_admin' or can_modify ):
- <a class="action-button" href="${h.url_for( controller='library_common', action='upload_library_dataset', cntrller=cntrller, library_id=trans.security.encode_id( library.id ), folder_id=trans.security.encode_id( folder.id ), replace_id=trans.security.encode_id( library_dataset.id ), show_deleted=show_deleted )}">Upload a new version of this dataset</a>
- %endif
- %if not branch_deleted( folder ) and not ldda.library_dataset.deleted and ldda.has_data:
- <a class="action-button" href="${h.url_for( controller='library_common', action='act_on_multiple_datasets', cntrller=cntrller, library_id=trans.security.encode_id( library.id ), ldda_ids=trans.security.encode_id( ldda.id ), do_action='import_to_history', use_panels=use_panels, show_deleted=show_deleted )}">Import this dataset into your current history</a>
- <a class="action-button" href="${h.url_for( controller='library_common', action='download_dataset_from_folder', cntrller=cntrller, id=trans.security.encode_id( ldda.id ), library_id=trans.security.encode_id( library.id ), use_panels=use_panels )}">Download this dataset</a>
- %endif
- %if cntrller in [ 'library_admin', 'requests_admin' ]:
- %if not library.deleted and not branch_deleted( folder ) and not ldda.library_dataset.deleted:
- <a class="action-button" confirm="Click OK to delete dataset '${ldda.name}'." href="${h.url_for( controller='library_admin', action='delete_library_item', library_id=trans.security.encode_id( library.id ), item_id=trans.security.encode_id( library_dataset.id ), item_type='library_dataset', show_deleted=show_deleted )}">Delete this dataset</a>
- %elif not library.deleted and not branch_deleted( folder ) and not ldda.library_dataset.purged and ldda.library_dataset.deleted:
- <a class="action-button" href="${h.url_for( controller='library_admin', action='undelete_library_item', library_id=trans.security.encode_id( library.id ), item_id=trans.security.encode_id( library_dataset.id ), item_type='library_dataset', show_deleted=show_deleted )}">Undelete this dataset</a>
+ %if not library.deleted:
+ <a id="dataset-${ldda.id}-popup" class="popup-arrow" style="display: none;">▼</a>
+ <div popupmenu="dataset-${ldda.id}-popup">
+ %if not branch_deleted( folder ) and not ldda.library_dataset.deleted and ( cntrller == 'library_admin' or can_modify ):
+ <a class="action-button" href="${h.url_for( controller='library_common', action='ldda_edit_info', cntrller=cntrller, library_id=trans.security.encode_id( library.id ), folder_id=trans.security.encode_id( folder.id ), id=trans.security.encode_id( ldda.id ), use_panels=use_panels, show_deleted=show_deleted )}">Edit information</a>
+ %else:
+ <a class="action-button" href="${h.url_for( controller='library_common', action='ldda_info', cntrller=cntrller, library_id=trans.security.encode_id( library.id ), folder_id=trans.security.encode_id( folder.id ), id=trans.security.encode_id( ldda.id ), use_panels=use_panels, show_deleted=show_deleted )}">View information</a>
%endif
- %endif
- </div>
+ %if not branch_deleted( folder ) and not ldda.library_dataset.deleted and ( ( cntrller == 'library_admin' or can_modify ) and not info_association ):
+ <a class="action-button" href="${h.url_for( controller='library_common', action='add_template', cntrller=cntrller, item_type='ldda', library_id=trans.security.encode_id( library.id ), folder_id=trans.security.encode_id( folder.id ), ldda_id=trans.security.encode_id( ldda.id ), use_panels=use_panels, show_deleted=show_deleted )}">Add template</a>
+ %endif
+ %if not branch_deleted( folder ) and not ldda.library_dataset.deleted and ( ( cntrller == 'library_admin' or can_modify ) and info_association ):
+ <a class="action-button" href="${h.url_for( controller='library_common', action='edit_template', cntrller=cntrller, item_type='ldda', library_id=trans.security.encode_id( library.id ), folder_id=trans.security.encode_id( folder.id ), ldda_id=trans.security.encode_id( ldda.id ), use_panels=use_panels, show_deleted=show_deleted )}">Edit template</a>
+ <a class="action-button" href="${h.url_for( controller='library_common', action='delete_template', cntrller=cntrller, item_type='ldda', library_id=trans.security.encode_id( library.id ), folder_id=trans.security.encode_id( folder.id ), ldda_id=trans.security.encode_id( ldda.id ), use_panels=use_panels, show_deleted=show_deleted )}">Delete template</a>
+ %endif
+ %if not branch_deleted( folder ) and not ldda.library_dataset.deleted and ( cntrller == 'library_admin' or can_manage ):
+ <a class="action-button" href="${h.url_for( controller='library_common', action='ldda_permissions', cntrller=cntrller, library_id=trans.security.encode_id( library.id ), folder_id=trans.security.encode_id( folder.id ), id=trans.security.encode_id( ldda.id ), use_panels=use_panels, show_deleted=show_deleted )}">Edit permissions</a>
+ %endif
+ %if not branch_deleted( folder ) and not ldda.library_dataset.deleted and ( cntrller == 'library_admin' or can_modify ):
+ <a class="action-button" href="${h.url_for( controller='library_common', action='upload_library_dataset', cntrller=cntrller, library_id=trans.security.encode_id( library.id ), folder_id=trans.security.encode_id( folder.id ), replace_id=trans.security.encode_id( library_dataset.id ), show_deleted=show_deleted )}">Upload a new version of this dataset</a>
+ %endif
+ %if not branch_deleted( folder ) and not ldda.library_dataset.deleted and ldda.has_data:
+ <a class="action-button" href="${h.url_for( controller='library_common', action='act_on_multiple_datasets', cntrller=cntrller, library_id=trans.security.encode_id( library.id ), ldda_ids=trans.security.encode_id( ldda.id ), do_action='import_to_history', use_panels=use_panels, show_deleted=show_deleted )}">Import this dataset into your current history</a>
+ <a class="action-button" href="${h.url_for( controller='library_common', action='download_dataset_from_folder', cntrller=cntrller, id=trans.security.encode_id( ldda.id ), library_id=trans.security.encode_id( library.id ), use_panels=use_panels )}">Download this dataset</a>
+ %endif
+ %if cntrller in [ 'library_admin', 'requests_admin' ]:
+ %if not library.deleted and not branch_deleted( folder ) and not ldda.library_dataset.deleted:
+ <a class="action-button" confirm="Click OK to delete dataset '${ldda.name}'." href="${h.url_for( controller='library_admin', action='delete_library_item', library_id=trans.security.encode_id( library.id ), item_id=trans.security.encode_id( library_dataset.id ), item_type='library_dataset', show_deleted=show_deleted )}">Delete this dataset</a>
+ %elif not library.deleted and not branch_deleted( folder ) and not ldda.library_dataset.purged and ldda.library_dataset.deleted:
+ <a class="action-button" href="${h.url_for( controller='library_admin', action='undelete_library_item', library_id=trans.security.encode_id( library.id ), item_id=trans.security.encode_id( library_dataset.id ), item_type='library_dataset', show_deleted=show_deleted )}">Undelete this dataset</a>
+ %endif
+ %endif
+ </div>
+ %endif
</td>
<td id="libraryItemInfo">${render_library_item_info( ldda )}</td>
<td>${uploaded_by}</td>
@@ -252,7 +254,7 @@
%endif
</%def>
-<%def name="render_folder( cntrller, folder, folder_pad, created_ldda_ids, library_id, hidden_folder_ids, tracked_datasets, show_deleted=False, parent=None, row_counter=None, root_folder=False )">
+<%def name="render_folder( cntrller, folder, folder_pad, created_ldda_ids, library, hidden_folder_ids, tracked_datasets, show_deleted=False, parent=None, row_counter=None, root_folder=False )">
<%
from galaxy.web.controllers.library_common import active_folders, active_folders_and_lddas, activatable_folders_and_lddas, branch_deleted
@@ -300,40 +302,42 @@
%if folder.deleted:
<span class="libraryItem-error">
%endif
- <a href="${h.url_for( controller='library_common', action='folder_info', cntrller=cntrller, use_panels=use_panels, id=trans.security.encode_id( folder.id ), library_id=library_id, show_deleted=show_deleted )}">${folder.name}</a>
+ <a href="${h.url_for( controller='library_common', action='folder_info', cntrller=cntrller, use_panels=use_panels, id=trans.security.encode_id( folder.id ), library_id=trans.security.encode_id( library.id ), show_deleted=show_deleted )}">${folder.name}</a>
%if folder.description:
<i>- ${folder.description}</i>
%endif
%if folder.deleted:
</span>
%endif
- <a id="folder_img-${folder.id}-popup" class="popup-arrow" style="display: none;">▼</a>
- <div popupmenu="folder_img-${folder.id}-popup">
- %if not branch_deleted( folder ) and ( cntrller == 'library_admin' or can_add ):
- <a class="action-button" href="${h.url_for( controller='library_common', action='upload_library_dataset', cntrller=cntrller, library_id=library_id, folder_id=trans.security.encode_id( folder.id ), use_panels=use_panels, show_deleted=show_deleted )}">Add datasets</a>
- <a class="action-button" href="${h.url_for( controller='library_common', action='create_folder', cntrller=cntrller, parent_id=trans.security.encode_id( folder.id ), library_id=library_id, use_panels=use_panels, show_deleted=show_deleted )}">Add sub-folder</a>
- %endif
- %if not branch_deleted( folder ) and ( cntrller == 'library_admin' or can_modify ):
- <a class="action-button" href="${h.url_for( controller='library_common', action='folder_info', cntrller=cntrller, id=trans.security.encode_id( folder.id ), library_id=library_id, use_panels=use_panels, show_deleted=show_deleted )}">Edit information</a>
- %endif
- %if not branch_deleted( folder ) and ( ( cntrller == 'library_admin' or can_modify ) and not info_association ):
- <a class="action-button" href="${h.url_for( controller='library_common', action='add_template', cntrller=cntrller, item_type='folder', library_id=trans.security.encode_id( library.id ), folder_id=trans.security.encode_id( folder.id ), use_panels=use_panels, show_deleted=show_deleted )}">Add template</a>
- %endif
- %if not branch_deleted( folder ) and ( ( cntrller == 'library_admin' or can_modify ) and info_association ):
- <a class="action-button" href="${h.url_for( controller='library_common', action='edit_template', cntrller=cntrller, item_type='folder', library_id=trans.security.encode_id( library.id ), folder_id=trans.security.encode_id( folder.id ), use_panels=use_panels, show_deleted=show_deleted )}">Edit template</a>
- <a class="action-button" href="${h.url_for( controller='library_common', action='delete_template', cntrller=cntrller, item_type='folder', library_id=trans.security.encode_id( library.id ), folder_id=trans.security.encode_id( folder.id ), use_panels=use_panels, show_deleted=show_deleted )}">Delete template</a>
- %endif
- %if not branch_deleted( folder ) and ( cntrller == 'library_admin' or can_manage ):
- <a class="action-button" href="${h.url_for( controller='library_common', action='folder_permissions', cntrller=cntrller, id=trans.security.encode_id( folder.id ), library_id=library_id, use_panels=use_panels, show_deleted=show_deleted )}">Edit permissions</a>
- %endif
- %if cntrller in [ 'library_admin', 'requests_admin' ]:
- %if not library.deleted and not folder.deleted:
- <a class="action-button" confirm="Click OK to delete the folder '${folder.name}.'" href="${h.url_for( controller='library_admin', action='delete_library_item', library_id=library_id, item_id=trans.security.encode_id( folder.id ), item_type='folder', show_deleted=show_deleted )}">Delete this folder</a>
- %elif not library.deleted and folder.deleted and not folder.purged:
- <a class="action-button" href="${h.url_for( controller='library_admin', action='undelete_library_item', library_id=library_id, item_id=trans.security.encode_id( folder.id ), item_type='folder', show_deleted=show_deleted )}">Undelete this folder</a>
+ %if not library.deleted:
+ <a id="folder_img-${folder.id}-popup" class="popup-arrow" style="display: none;">▼</a>
+ <div popupmenu="folder_img-${folder.id}-popup">
+ %if not branch_deleted( folder ) and ( cntrller == 'library_admin' or can_add ):
+ <a class="action-button" href="${h.url_for( controller='library_common', action='upload_library_dataset', cntrller=cntrller, library_id=trans.security.encode_id( library.id ), folder_id=trans.security.encode_id( folder.id ), use_panels=use_panels, show_deleted=show_deleted )}">Add datasets</a>
+ <a class="action-button" href="${h.url_for( controller='library_common', action='create_folder', cntrller=cntrller, parent_id=trans.security.encode_id( folder.id ), library_id=trans.security.encode_id( library.id ), use_panels=use_panels, show_deleted=show_deleted )}">Add sub-folder</a>
%endif
- %endif
- </div>
+ %if not branch_deleted( folder ) and ( cntrller == 'library_admin' or can_modify ):
+ <a class="action-button" href="${h.url_for( controller='library_common', action='folder_info', cntrller=cntrller, id=trans.security.encode_id( folder.id ), library_id=trans.security.encode_id( library.id ), use_panels=use_panels, show_deleted=show_deleted )}">Edit information</a>
+ %endif
+ %if not branch_deleted( folder ) and ( ( cntrller == 'library_admin' or can_modify ) and not info_association ):
+ <a class="action-button" href="${h.url_for( controller='library_common', action='add_template', cntrller=cntrller, item_type='folder', library_id=trans.security.encode_id( library.id ), folder_id=trans.security.encode_id( folder.id ), use_panels=use_panels, show_deleted=show_deleted )}">Add template</a>
+ %endif
+ %if not branch_deleted( folder ) and ( ( cntrller == 'library_admin' or can_modify ) and info_association ):
+ <a class="action-button" href="${h.url_for( controller='library_common', action='edit_template', cntrller=cntrller, item_type='folder', library_id=trans.security.encode_id( library.id ), folder_id=trans.security.encode_id( folder.id ), use_panels=use_panels, show_deleted=show_deleted )}">Edit template</a>
+ <a class="action-button" href="${h.url_for( controller='library_common', action='delete_template', cntrller=cntrller, item_type='folder', library_id=trans.security.encode_id( library.id ), folder_id=trans.security.encode_id( folder.id ), use_panels=use_panels, show_deleted=show_deleted )}">Delete template</a>
+ %endif
+ %if not branch_deleted( folder ) and ( cntrller == 'library_admin' or can_manage ):
+ <a class="action-button" href="${h.url_for( controller='library_common', action='folder_permissions', cntrller=cntrller, id=trans.security.encode_id( folder.id ), library_id=trans.security.encode_id( library.id ), use_panels=use_panels, show_deleted=show_deleted )}">Edit permissions</a>
+ %endif
+ %if cntrller in [ 'library_admin', 'requests_admin' ]:
+ %if not library.deleted and not folder.deleted:
+ <a class="action-button" confirm="Click OK to delete the folder '${folder.name}.'" href="${h.url_for( controller='library_admin', action='delete_library_item', library_id=trans.security.encode_id( library.id ), item_id=trans.security.encode_id( folder.id ), item_type='folder', show_deleted=show_deleted )}">Delete this folder</a>
+ %elif not library.deleted and folder.deleted and not folder.purged:
+ <a class="action-button" href="${h.url_for( controller='library_admin', action='undelete_library_item', library_id=trans.security.encode_id( library.id ), item_id=trans.security.encode_id( folder.id ), item_type='folder', show_deleted=show_deleted )}">Undelete this folder</a>
+ %endif
+ %endif
+ </div>
+ %endif
</div>
<td colspan="3"></td>
</tr>
@@ -345,7 +349,7 @@
%if cntrller == 'library':
<% sub_folders = active_folders( trans, folder ) %>
%for sub_folder in sub_folders:
- ${render_folder( cntrller, sub_folder, pad, created_ldda_ids, library_id, hidden_folder_ids, tracked_datasets, show_deleted=show_deleted, parent=my_row, row_counter=row_counter, root_folder=False )}
+ ${render_folder( cntrller, sub_folder, pad, created_ldda_ids, library, hidden_folder_ids, tracked_datasets, show_deleted=show_deleted, parent=my_row, row_counter=row_counter, root_folder=False )}
%endfor
%for library_dataset in folder.active_library_datasets:
<%
@@ -365,7 +369,7 @@
sub_folders, lddas = active_folders_and_lddas( trans, folder )
%>
%for sub_folder in sub_folders:
- ${render_folder( cntrller, sub_folder, pad, created_ldda_ids, library_id, [], tracked_datasets, show_deleted=show_deleted, parent=my_row, row_counter=row_counter, root_folder=False )}
+ ${render_folder( cntrller, sub_folder, pad, created_ldda_ids, library, [], tracked_datasets, show_deleted=show_deleted, parent=my_row, row_counter=row_counter, root_folder=False )}
%endfor
%for ldda in lddas:
<%
@@ -470,12 +474,12 @@
</tr>
<% row_counter = RowCounter() %>
%if cntrller in [ 'library', 'requests' ]:
- ${self.render_folder( 'library', library.root_folder, 0, created_ldda_ids, trans.security.encode_id( library.id ), hidden_folder_ids, tracked_datasets, show_deleted=show_deleted, parent=None, row_counter=row_counter, root_folder=True )}
+ ${self.render_folder( 'library', library.root_folder, 0, created_ldda_ids, library, hidden_folder_ids, tracked_datasets, show_deleted=show_deleted, parent=None, row_counter=row_counter, root_folder=True )}
%if not library.deleted:
${render_actions_on_multiple_items()}
%endif
%elif cntrller in [ 'library_admin', 'requests_admin' ]:
- ${self.render_folder( 'library_admin', library.root_folder, 0, created_ldda_ids, trans.security.encode_id( library.id ), [], tracked_datasets, show_deleted=show_deleted, parent=None, row_counter=row_counter, root_folder=True )}
+ ${self.render_folder( 'library_admin', library.root_folder, 0, created_ldda_ids, library, [], tracked_datasets, show_deleted=show_deleted, parent=None, row_counter=row_counter, root_folder=True )}
%if not library.deleted and not show_deleted:
${render_actions_on_multiple_items()}
%endif
1
0
Hi,
I'm trying to create a tool where there are several select parameters
whose choices are determine by the previous parameter(s). I have a toy
example with three parameters: Category, Food, and Prep method.
Depending on the chosen Category, only certain Foods are shown, which in
turn determines which Prep methods are available.
I'm trying to use the <options> tag with the "from_file" attribute. I
have one column per parameter and one row per valid combination.
My problem is that the front end can get into a state where a certain
valid combinations will be unselectable. In the example below, I can
never select "Fruit", "Kiwi", and "Raw". When I change the 1st select,
the values for the 2nd select are loaded, but the values for the 3rd
select cannot be updated. This is particularly problematic if there is
only one choice for the 2nd select, because I cannot get it's "onchange"
handler to fire (so the 3rd select is always empty).
In my case, it would be nice if the first value of the 2nd select is
chosen as a default, and the 3rd select is populated accordingly.
Any help would be appreciated. Perhaps I'm not using these features as
intended, but it seemed like a natural application.
Thanks,
Josh
My data file looks like:
Meat Chicken Fried
Meat Chicken Grilled
Meat Beef Grilled
Meat Beef Stir-fried
Veg Carrot Boiled
Veg Fennel Raw
Fruit Kiwi Raw
And my tool's <inputs> section is:
<inputs>
<param name="paramA" type="select" label="Category">
<options from_file="test_select_options.txt">
<column name="value" index="0"/>
<column name="name" index="0"/>
<filter type="unique_value" name="unique" column="0"/>
</options>
</param>
<param name="paramB" type="select" label="Food" >
<options from_file="test_select_options.txt">
<column name="value" index="1"/>
<column name="name" index="1"/>
<filter type="unique_value" name="unique" column="1"/>
<filter type="param_value" ref="paramA" name="A" column="0"/>
</options>
</param>
<param name="paramC" type="select" label="How prepared">
<options from_file="test_select_options.txt">
<column name="value" index="2"/>
<column name="name" index="2"/>
<filter type="param_value" ref="paramA" name="A" column="0"/>
<filter type="param_value" ref="paramB" name="B" column="1"/>
</options>
</param>
</inputs>
2
1
Hi Galaxy team,
Some time ago I added a binary data type to my Galaxy development machine. I simply defined the input for a tool as the standard binary data type. Recently I upgraded my Galaxy instance and afterwards I could no longer upload binary data successfully :(.
So, I looked at the docs to properly define a new binary data specific for that tool. I created a Python class, wrote a sniffer and listed the new stuff in the datatypes_conf.xml. To my surprise that did not work... After some digging I found that for a binary datatype I also had to hack the data upload tool in two places (see below). The word hack is maybe a bit too much for simply copying the lines for the already supported bam and Sff data types, but having to change code to support a new datatype sounds like a bad plan to me. Would it be possible to make sure support for a new binary data type is completely handled by config files and maybe a new python class, but without hacking code just like for non-binary data types?
Cheers,
Pi
-------------------
def check_bam( temp_name ):
return Bam().sniff( temp_name )
def check_sff( temp_name ):
return Sff().sniff( temp_name )
#
# Added mod for Thermo Finnigan RAW files START
#
def check_raw( temp_name ):
return RAW().sniff( temp_name )
#
# Added mod for Thermo Finnigan RAW files END
#
.....
# Is dataset content supported sniffable binary?
elif check_bam( dataset.path ):
ext = 'bam'
data_type = 'bam'
elif check_sff( dataset.path ):
ext = 'sff'
data_type = 'sff'
#
# Added mod for Thermo Finnigan RAW files START
#
elif check_raw( dataset.path ):
ext = 'raw'
data_type = 'raw'
#
# Added mod for Thermo Finnigan RAW files END
#
2
2

Feature Request: customize name of history item in a collected primary dataset
by Lee, Joshua 23 Mar '10
by Lee, Joshua 23 Mar '10
23 Mar '10
Hi,
I have a tool that produces a variable number of outputs, which are
written to the tmp database path with the specially formatted name
"%s_%s_%s_%s_%s" % ( 'primary', output1.id, name, 'visible', file_type
). In lib/galaxy/tools/__init__.py, the history item's "designation" is
set to "name" and the item's "name" is set to output1.name. This
potentially creates many items with the same name, which must be
differentiated either through meta data or by examining the contents of
the file. I would like to request a means to customize the name of the
collected dataset.
A simple but hack-y method would be to append another token to the name
of the file. If this token exists, it would be used as the history
item's name e.g. "primary_123_NAME123_visible_txt_dbkey_NEWNAME". This
would also require that the optional DBKEY token would have to be
validated for non-blank values.
Josh
1
0

23 Mar '10
Hi,
What is the order of the parameters/format for a csv file that I can
use to import a sequencing request sample?
Thanks,
Natalie
2
3
Hello Emeric,
Please send your questions to the galaxy mailing lists ( galaxy-bugs(a)bx.psu.ed, galaxy-dev(a)bx.psu.edu, galaxy-user(a)bx.psu.edu ) instead of private email addresses as you will be much more likely to get answers in a timely fashion.
Given the context of your question, I assume you want to point another UCSC-like "get data" tool to your mirror. If that is the case, simply copy the ~/tools/data_source/ucsc_tablebrowser.xml file to a different file name (e.g., my_mirror.xml) , and edit the following line in it to point to your mirror:
<inputs action="http://genome.ucsc.edu/cgi-bin/hgTables" check_values="false" method="get">
Next, add an entry for it in your tool_conf.xml file and restart your Galaxy instance.
That's all you should need.
On Mar 23, 2010, at 10:22 AM, Emeric Dubois wrote:
> Hello Greg
>
> I have installed a partial mirror of UCSC and Galaxy on my server.
> I want use my UCSC mirror to get data and display in my gallaxy installation.
> Is it possible to do that ? How ?
> Please, can you help me ?
>
> Thanks
>
> Emeric
>
> --
> *****************************************
>
> Emeric Dubois
> Plateforme MGX
> Institut de Génomique Fonctionnelle
> UMR 5203 CNRS – U 661 INSERM – Universite de Montpellier
> 141 rue de la cardonille
> 34094 Montpellier Cedex 05, France
> Tel: 04 67 14 29 32
>
> Emeric.Dubois(a)igf.cnrs.fr
>
> *****************************************
>
>
> --
> Ce message a été vérifié par MailScanner
> pour des virus ou des polluriels et rien de
> suspect n'a été trouvé.
>
Greg Von Kuster
Galaxy Development Team
greg(a)bx.psu.edu
1
0
Hi,
I know that I have had the data transfer functionality working in the
past but it's currently failing ending with the following:
127.0.0.1 - - [23/Mar/2010:08:37:50 -0400] "GET
/user/create?username=nsfox1%40example.org&confirm=testuser&messagetype=error&msg=User+with+that+email+already+exists&password=testuser&email=nsfox1%40example.org HTTP/1.1" 200 - "-"
"Python-urllib/2.5"
ERROR:root:Traceback (most recent call last):
File "scripts/galaxy_messaging/server/data_transfer.py", line 166,
in add_to_library
raise DataTransferException("The "+self.datatx_email+" user could
not login to Galaxy")
DataTransferException: 'The nsfox1(a)example.org user could not login to Galaxy'
ERROR:root:FATAL ERROR.The nsfox1(a)example.org user could not login to Galaxy
DEBUG:root:[[u'data/ATC_AA', u'Error'], [u'data/ATC_AB', u'Error'],
[u'data/CRA_AC', u'Error'], [u'data/CRA_AE', u'In progress']]
DEBUG:root:######################
[u'data/CRA_AE', u'Error.The nsfox1(a)example.org user could not login
to Galaxy']
I re-downloaded the Galaxy instance again but I'm still having this
issue. The user I'm using here is an admin user with permission to add
to the data library and when I go and login galaxy it works.
Any suggestions as to what I might be wrong would be great.
Thanks,
Natalie
2
1
Hi,
the following path:
database/reports/compiled_templates
can be added to ".hgignore" file to remove non-relevant compiled python files from "hg st".
-gordon
1
0
Hello,
A small feature requested by our users:
The ability to set the axis ranges in the plotting tool (xy_plot.xml).
The attached patch adds this ability as an optional parameter in the tool.
Note1: if the input data goes beyond the user's suggest range, the range is extended.
Note2: the patch contains CR/LF because it appears the 'xy_plot.xml' tool has windows-style newlines.
-gordon
1
0
Hi,
I'm getting an error "AttributeError: 'StateColumn' object has no
attribute 'get_filter'" if I search anything on the sequencing request
page. Searching specifically within the Name search or Description
search does work though. Is this a known bug or did I possibly miss
setting up something in my Galaxy Instance?
Also, that search, which is currently not working for me, it only
searches the information the sequencing requests page, right? I'd like
to request being able to search the requests for something within the
request or have some where of quickly determining the information of
the sample with the requests without browsing to each one.
Any information is appreciated.
Thanks,
Natalie
2
1

18 Mar '10
details: http://www.bx.psu.edu/hg/galaxy/rev/2590120aed68
changeset: 3545:2590120aed68
user: jeremy goecks <jeremy.goecks(a)emory.edu>
date: Thu Mar 18 12:57:29 2010 -0400
description:
Style and functional fixes for embedded items.
diffstat:
static/june_2007_style/blue/embed_item.css | 4 ++--
static/june_2007_style/embed_item.css.tmpl | 4 ++--
templates/display_base.mako | 18 ++++++++++++++++++
templates/history/display.mako | 7 +------
templates/history/view.mako | 2 +-
templates/page/display.mako | 10 +++++++++-
templates/workflow/display.mako | 8 +-------
7 files changed, 34 insertions(+), 19 deletions(-)
diffs (147 lines):
diff -r cb9b4a967ff2 -r 2590120aed68 static/june_2007_style/blue/embed_item.css
--- a/static/june_2007_style/blue/embed_item.css Wed Mar 17 20:59:47 2010 -0400
+++ b/static/june_2007_style/blue/embed_item.css Thu Mar 18 12:57:29 2010 -0400
@@ -4,8 +4,8 @@
.embedded-item.history p{background:#C1C9E5 no-repeat 2px 2px;margin-top:0;margin-bottom:0;}
.embedded-item.dataset{background-color:#CFC}
.embedded-item.dataset p{background:#CFC no-repeat 2px 2px;margin-top:0;margin-bottom:0;}
-.embedded-item.workflow{background-color:#EBD9B2}
-.embedded-item.workflow p{background:#EBD9B2 no-repeat 2px 2px;margin-top:0;margin-bottom:0;}
+.embedded-item.workflow{background-color:#FBDDB3}
+.embedded-item.workflow p{background:#FBDDB3 no-repeat 2px 2px;margin-top:0;margin-bottom:0;}
.embedded-item.placeholder{}
.embedded-item .item-content{max-height:25em;overflow:auto;display:none;}
.embedded-item .title{vertical-align:top;text-align:center;font-weight:bold;}
diff -r cb9b4a967ff2 -r 2590120aed68 static/june_2007_style/embed_item.css.tmpl
--- a/static/june_2007_style/embed_item.css.tmpl Wed Mar 17 20:59:47 2010 -0400
+++ b/static/june_2007_style/embed_item.css.tmpl Thu Mar 18 12:57:29 2010 -0400
@@ -32,11 +32,11 @@
}
.embedded-item.workflow {
- background-color:#EBD9B2
+ background-color:#FBDDB3
}
.embedded-item.workflow p {
- background:#EBD9B2 no-repeat 2px 2px;
+ background:#FBDDB3 no-repeat 2px 2px;
margin-top:0;
margin-bottom:0;
}
diff -r cb9b4a967ff2 -r 2590120aed68 templates/display_base.mako
--- a/templates/display_base.mako Wed Mar 17 20:59:47 2010 -0400
+++ b/templates/display_base.mako Thu Mar 18 12:57:29 2010 -0400
@@ -92,6 +92,24 @@
border: 2px solid #DDDDDD;
border-top: 4px solid #DDDDDD;
}
+
+ ## Make sure that history items and workflow steps do not get too long.
+ .historyItemContainer, .toolForm {
+ max-width: 500px;
+ }
+
+ ## Space out tool forms in workflows.
+ div.toolForm{
+ margin-top: 10px;
+ margin-bottom: 10px;
+ }
+
+ ## Add border to history item container.
+ .historyItemContainer {
+ padding-right: 3px;
+ border-right-style: solid;
+ border-right-color: #66AA66;
+ }
</style>
</%def>
diff -r cb9b4a967ff2 -r 2590120aed68 templates/history/display.mako
--- a/templates/history/display.mako Wed Mar 17 20:59:47 2010 -0400
+++ b/templates/history/display.mako Thu Mar 18 12:57:29 2010 -0400
@@ -208,11 +208,6 @@
${parent.stylesheets()}
${h.css( "history" )}
<style type="text/css">
- .visible-right-border {
- padding-right: 3px;
- border-right-style: solid;
- border-right-color: #66AA66;
- }
.historyItemBody {
display: none;
}
@@ -252,7 +247,7 @@
%else:
## Render requested datasets, ordered from newest to oldest, including annotations.
<table class="annotated-item">
- <tr><th>Dataset</th><th class="annotation">Description/Notes</th></tr>
+ <tr><th>Dataset</th><th class="annotation">Annotation</th></tr>
%for data in datasets:
<tr>
%if data.visible:
diff -r cb9b4a967ff2 -r 2590120aed68 templates/history/view.mako
--- a/templates/history/view.mako Wed Mar 17 20:59:47 2010 -0400
+++ b/templates/history/view.mako Thu Mar 18 12:57:29 2010 -0400
@@ -269,7 +269,7 @@
${parent.stylesheets()}
${h.css( "history", "autocomplete_tagging" )}
<style type="text/css">
- .visible-right-border {
+ .historyItemContainer {
padding-right: 3px;
border-right-style: solid;
border-right-color: #66AA66;
diff -r cb9b4a967ff2 -r 2590120aed68 templates/page/display.mako
--- a/templates/page/display.mako Wed Mar 17 20:59:47 2010 -0400
+++ b/templates/page/display.mako Thu Mar 18 12:57:29 2010 -0400
@@ -47,6 +47,15 @@
container.find(".item-content").html(item_content).show("fast");
container.find(".toggle-expand").hide();
container.find(".toggle-contract").show();
+
+ // Init needed for history items.
+ setupHistoryItem( container.find("div.historyItemWrapper") );
+ container.find( "div.historyItemBody:visible" ).each( function() {
+ if ( $.browser.mozilla ) {
+ $(this).find( "pre.peek" ).css( "overflow", "hidden" );
+ }
+ $(this).hide();
+ });
}
});
else
@@ -218,7 +227,6 @@
${h.css( "base", "history", "autocomplete_tagging" )}
<style type="text/css">
.toggle-contract { display: none; }
- .item-content { overflow: auto; }
.embedded-item h4 {
margin: 0px;
}
diff -r cb9b4a967ff2 -r 2590120aed68 templates/workflow/display.mako
--- a/templates/workflow/display.mako Wed Mar 17 20:59:47 2010 -0400
+++ b/templates/workflow/display.mako Thu Mar 18 12:57:29 2010 -0400
@@ -8,12 +8,6 @@
<%def name="stylesheets()">
${parent.stylesheets()}
${h.css( "workflow" )}
- <style type="text/css">
- div.toolForm{
- margin-top: 10px;
- margin-bottom: 10px;
- }
- </style>
</%def>
<%def name="do_inputs( inputs, values, prefix, step, other_values=None )">
@@ -87,7 +81,7 @@
trans.get_history( create=True )
%>
<table class="annotated-item">
- <tr><th>Step</th><th class="annotation">Description/Notes</th></tr>
+ <tr><th>Step</th><th class="annotation">Annotation</th></tr>
%for i, step in enumerate( steps ):
<tr><td>
%if step.type == 'tool' or step.type is None:
1
0

18 Mar '10
details: http://www.bx.psu.edu/hg/galaxy/rev/cb9b4a967ff2
changeset: 3544:cb9b4a967ff2
user: rc
date: Wed Mar 17 20:59:47 2010 -0400
description:
lims: workflow field now appears as a link to workflow run page
diffstat:
templates/admin/requests/show_request.mako | 7 ++++++-
1 files changed, 6 insertions(+), 1 deletions(-)
diffs (17 lines):
diff -r 1d98e3705f35 -r cb9b4a967ff2 templates/admin/requests/show_request.mako
--- a/templates/admin/requests/show_request.mako Wed Mar 17 17:10:24 2010 -0400
+++ b/templates/admin/requests/show_request.mako Wed Mar 17 20:59:47 2010 -0400
@@ -311,7 +311,12 @@
%for field_index, field in fields_dict.items():
<td>
%if sample_values[field_index]:
- ${sample_values[field_index]}
+ %if field['type'] == 'WorkflowField':
+ <% workflow = trans.sa_session.query( trans.app.model.StoredWorkflow ).get( int(sample_values[field_index]) ) %>
+ <a href="${h.url_for( controller='workflow', action='run', id=trans.security.encode_id(workflow.id) )}">${workflow.name}</a>
+ %else:
+ ${sample_values[field_index]}
+ %endif
%else:
<i>None</i>
%endif
1
0

18 Mar '10
details: http://www.bx.psu.edu/hg/galaxy/rev/1d98e3705f35
changeset: 3543:1d98e3705f35
user: rc
date: Wed Mar 17 17:10:24 2010 -0400
description:
lims: added workflow field type to form_builder
diffstat:
lib/galaxy/model/__init__.py | 4 ++++
lib/galaxy/model/mapping.py | 4 +++-
lib/galaxy/web/form_builder.py | 30 +++++++++++++++++++++++++++++-
templates/admin/requests/show_request.mako | 10 ++++++++++
4 files changed, 46 insertions(+), 2 deletions(-)
diffs (95 lines):
diff -r 032aae80bbb0 -r 1d98e3705f35 lib/galaxy/model/__init__.py
--- a/lib/galaxy/model/__init__.py Wed Mar 17 16:40:36 2010 -0400
+++ b/lib/galaxy/model/__init__.py Wed Mar 17 17:10:24 2010 -0400
@@ -1319,6 +1319,10 @@
field_widget.user = user
field_widget.value = value
field_widget.params = params
+ elif field['type'] == 'WorkflowField':
+ field_widget.user = user
+ field_widget.value = value
+ field_widget.params = params
elif field[ 'type' ] == 'SelectField':
for option in field[ 'selectlist' ]:
if option == value:
diff -r 032aae80bbb0 -r 1d98e3705f35 lib/galaxy/model/mapping.py
--- a/lib/galaxy/model/mapping.py Wed Mar 17 16:40:36 2010 -0400
+++ b/lib/galaxy/model/mapping.py Wed Mar 17 17:10:24 2010 -0400
@@ -1304,7 +1304,9 @@
assign_mapper( context, StoredWorkflow, StoredWorkflow.table,
- properties=dict( user=relation( User ),
+ properties=dict( user=relation( User,
+ primaryjoin=( User.table.c.id == StoredWorkflow.table.c.user_id ),
+ backref='stored_workflows' ),
workflows=relation( Workflow, backref='stored_workflow',
cascade="all, delete-orphan",
primaryjoin=( StoredWorkflow.table.c.id == Workflow.table.c.stored_workflow_id ) ),
diff -r 032aae80bbb0 -r 1d98e3705f35 lib/galaxy/web/form_builder.py
--- a/lib/galaxy/web/form_builder.py Wed Mar 17 16:40:36 2010 -0400
+++ b/lib/galaxy/web/form_builder.py Wed Mar 17 17:10:24 2010 -0400
@@ -12,7 +12,7 @@
raise TypeError( "Abstract Method" )
@staticmethod
def form_field_types():
- return ['TextField', 'TextArea', 'SelectField', 'CheckboxField', 'AddressField']
+ return ['TextField', 'TextArea', 'SelectField', 'CheckboxField', 'AddressField', 'WorkflowField']
class TextField(BaseField):
"""
@@ -433,6 +433,34 @@
else:
self.select_address.add_option('Add a new address', 'new')
return self.select_address.get_html()+address_html
+
+
+class WorkflowField(BaseField):
+ def __init__(self, name, user=None, value=None, params=None):
+ self.name = name
+ self.user = user
+ self.value = value
+ self.select_workflow = None
+ self.params = params
+ def get_html(self):
+ from galaxy import util
+ add_ids = ['none']
+ if self.user:
+ for a in self.user.stored_workflows:
+ add_ids.append(str(a.id))
+ self.select_workflow = SelectField(self.name)
+ if self.value == 'none':
+ self.select_workflow.add_option('Select one', 'none', selected=True)
+ else:
+ self.select_workflow.add_option('Select one', 'none')
+ if self.user:
+ for a in self.user.stored_workflows:
+ if not a.deleted:
+ if self.value == str(a.id):
+ self.select_workflow.add_option(a.name, str(a.id), selected=True)
+ else:
+ self.select_workflow.add_option(a.name, str(a.id))
+ return self.select_workflow.get_html()
def get_suite():
diff -r 032aae80bbb0 -r 1d98e3705f35 templates/admin/requests/show_request.mako
--- a/templates/admin/requests/show_request.mako Wed Mar 17 16:40:36 2010 -0400
+++ b/templates/admin/requests/show_request.mako Wed Mar 17 17:10:24 2010 -0400
@@ -284,6 +284,16 @@
%endif
%endfor
</select>
+ %elif field['type'] == 'WorkflowField':
+ <select name="sample_${index}_field_${field_index}">
+ %for option_index, option in enumerate(request.user.stored_workflows):
+ %if option == sample_values[field_index]:
+ <option value="${option.id}" selected>${option.name}</option>
+ %else:
+ <option value="${option.id}">${option.name}</option>
+ %endif
+ %endfor
+ </select>
%elif field['type'] == 'CheckboxField':
<input type="checkbox" name="sample_${index}_field_${field_index}" value="Yes"/>
%endif
1
0

18 Mar '10
details: http://www.bx.psu.edu/hg/galaxy/rev/032aae80bbb0
changeset: 3542:032aae80bbb0
user: Kelly Vincent <kpvincent(a)bx.psu.edu>
date: Wed Mar 17 16:40:36 2010 -0400
description:
First pass as PerM. Also made cosmetic changes to BWA.
diffstat:
buildbot_setup.sh | 2 +
test-data/perm_in1.fastqsanger | 76 ++++
test-data/perm_in2.fastqsanger | 76 ++++
test-data/perm_in3.fastqsanger | 396 +++++++++++++++++++++++
test-data/perm_in4.fastqcssanger | 576 ++++++++++++++++++++++++++++++++++
test-data/perm_in5.fastqcssanger | 48 ++
test-data/perm_in6.fastqcssanger | 48 ++
test-data/perm_out1.sam | 33 +
test-data/perm_out2.sam | 98 +++++
test-data/perm_out3.fastqsanger | 4 +
test-data/perm_out4.sam | 53 +++
test-data/perm_out5.sam | 9 +
tool-data/perm_base_index.loc.sample | 27 +
tool-data/perm_color_index.loc.sample | 27 +
tool_conf.xml.sample | 3 +-
tools/sr_mapping/PerM.xml | 368 +++++++++++++++++++++
tools/sr_mapping/bwa_wrapper.xml | 5 +-
17 files changed, 1845 insertions(+), 4 deletions(-)
diffs (1949 lines):
diff -r 137d93848139 -r 032aae80bbb0 buildbot_setup.sh
--- a/buildbot_setup.sh Tue Mar 16 18:54:23 2010 -0400
+++ b/buildbot_setup.sh Wed Mar 17 16:40:36 2010 -0400
@@ -42,6 +42,8 @@
/galaxy/data/location/maf_index.loc
/galaxy/data/location/maf_pairwise.loc
/galaxy/data/location/microbes/microbial_data.loc
+/galaxy/data/location/perm_base_index.loc
+/galaxy/data/location/perm_color_index.loc
/galaxy/data/location/phastOdds.loc
/galaxy/data/location/quality_scores.loc
/galaxy/data/location/regions.loc
diff -r 137d93848139 -r 032aae80bbb0 test-data/perm_in1.fastqsanger
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/perm_in1.fastqsanger Wed Mar 17 16:40:36 2010 -0400
@@ -0,0 +1,76 @@
+@HWI-EAS91_1_30788AAXX:1:1:1761:343/1
+TTTATCGCTTCCATGACGCAGAAGTTAACACTTTCGGATATTTCTGATGA
++/1
+IIIIIII""IIIIIIIIIII?I0IIIIHIIIGIIIII0II?I""IIIIII
+@HWI-EAS91_1_30788AAXX:1:1:1578:331/1
+TGTCAAAAACTGACGCGTTGGATGAGGAGAAGTGGCTTAATATGCTTGGC
++/1
+IIIIIII""IIIIIIIIIIIIIIIIIIIIIIIIII"IIIIIII000IIII
+@HWI-EAS91_1_30788AAXX:1:1:1647:512/1
+TACTGAACAATCCGTACGTTTCCAGACCGCTTTGGCCTCTATTAAGCTCA
++/1
+IIIIIII""IIIIIIIIIIIIIIIIIIIIIIIIII"IIIIIII000IIII
+@HWI-EAS91_1_30788AAXX:1:1:1570:620/1
+GAGTAACAAAGTaaaGTTTGGAccGTTTTTGTCTCGTGCTCGTCGCTGCG
++/1
+IIIIIII""IIIIIIIIIIIIIBIIIIIIIIIIII"IIIIIII000IIII
+@HWI-EAS91_1_30788AAXX:1:1:1599:330/1
+AGAAGAAAACGTGCGTCAAAAATTACGTGCaGAAGGAGTGATGTAATGTC
++/1
+IIIIIII""IIIIIIIIIIIIIIIIIII<III@II"IIIIIII000IIII
+@HWI-EAS91_1_30788AAXX:1:1:1652:270/1
+AGCGTAAAGGCGCTCGTCTTTGGTATGTAGGacTTTGCATTGTTTAATTG
++/1
+IIIIIII""IIIIIIIIIIIIIIIIIIIIIII6II"IIIIIII000IIII
+@HWI-EAS91_1_30788AAXX:1:1:1746:351/1
+CTCATCGTCACGTTTATGGTGAACAGTGGATTAAGTTCATGAAGGATGGT
++/1
+IIIIIII""IIIIIIIIIIIIIIIIIIIIIIIIII"IIIIIII000IIII
+@HWI-EAS91_1_30788AAXX:1:1:1582:633/1
+CCGCTTCCTCCTGAGACTGAGCTTTCTCGCCAAATGACGACTTCTACCAC
++/1
+IIIIIII""IIIIIIIIIIIIIIIIIIIIIIIIII"IIIIIII000IIII
+@HWI-EAS91_1_30788AAXX:1:1:1598:534/1
+GCGCTCTAATCTCTGGGCATCTGGCTATGATGTTGATGGAACTGACCAAA
++/1
+IIIIIII""IIIIIIIIIIIIIIIIIIIIIIIIII"IIIIIII000IIII
+@HWI-EAS91_1_30788AAXX:1:1:1572:324/1
+AAGGTGCTTaaaTTCgtGGGTCCTGAGCTGGCGACCCTGTTTTGTATGGC
++/1
+IIIIIII""IIIIIIIIIIIIIIIIIIII+7I05I"IIIIIII000IIII
+@HWI-EAS91_1_30788AAXX:1:1:1749:466/1
+TTGCAGTGGAATAGTCAGGTTAAATTTAATGTGACCGTTTATCGCAATCT
++/1
+IIIIIII""IIIIIIIIIIIIIIIIIIIIIIIIII"IIIIIII000IIII
+@HWI-EAS91_1_30788AAXX:1:1:1629:446/1
+AGGTTATAACGCCGAAGCGGTAAAAATTTTAATTTTTGCCGCTGAGGGGT
++/1
+IIIIIII""IIIIIIIIIIIIIIIIIIIIIIIIII"IIIIIII000IIII
+@HWI-EAS91_1_30788AAXX:1:1:1763:587/1
+AAGCTACATCGTCAACGTTATATTTTGATAGTTTGACGGTTAATGCTGGT
++/1
+IIIIIII""IIIIIIIIIIIIIIIIIIIIIIIIII"IIIIIII000IIII
+@HWI-EAS91_1_30788AAXX:1:1:1616:363/1
+TGTTTATCCTTTGAATGGTCGCCATGATGGTGGTTATTATACCGTCAAGG
++/1
+IIIIIII""IIIIIIIIIIIIIIIIIIIIIIDIII"IIIIIII000IIII
+@HWI-EAS91_1_30788AAXX:1:1:1768:715/1
+ACCCTGATGAGGCCGCCCCTAGTTTTGTTTCTGGTGCTATGGCTAAAGCT
++/1
+IIIIIII""IIIIIIIIIIIIIDIIIIIIIIIIII"IIIIIII000IIII
+@HWI-EAS91_1_30788AAXX:1:1:1572:270/1
+TGAGATGCTTGCTTATCAACAGAAGGAGTCTACTGCTCGCGTTGCGTCTA
++/1
+IIIIIII""IIIIIIIIIIIIIIIIIIIIIIIIII"IIIIIII000IIII
+@HWI-EAS91_1_30788AAXX:1:1:1785:1272/1
+TATTTTTCATGGTATTGATAAAGCTGTTGCCGATACTTGGAACAATTTCT
++/1
+III""""""IIIIIIIII""FI"IIII""II+ICI"IIIIIII000IIII
+@HWI-EAS91_1_30788AAXX:1:1:1673:514/1
+ATTATTTTGACTTTGAGCGTATCGAGGCTCTTAAACCTGCTATTGAGGCT
++/1
+IIIIIII""IIIIIIIIIIIIIIIIIIIIIIIII1"IIIIIII000IIII
+@HWI-EAS91_1_30788AAXX:1:1:1634:330/1
+GGATATTCGCGATGAGTATAATTACCCCAAAAAGAAAGGTATTAAGGATG
++/1
+IIIIIII""IIIIIIIIIIIIIIIIIIIIIII8II"IIIIIII000IIII
diff -r 137d93848139 -r 032aae80bbb0 test-data/perm_in2.fastqsanger
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/perm_in2.fastqsanger Wed Mar 17 16:40:36 2010 -0400
@@ -0,0 +1,76 @@
+@HWI-EAS91_1_30788AAXX:1:1:1761:343/2
+TTGATAAAGCAGGAATTACTACTGCTTGTTTACGAATTAAATCGAAGTGG
++/2
+IIIIIII""IIIIIII0II?I""IIIIIIIIIIII?I0IIIIHIIIGIII
+@HWI-EAS91_1_30788AAXX:1:1:1578:331/2
+AAGGACTGGTTTAGATATGAGTCACATTTTGTTCATGGTAGAGATTCTCT
++/2
+IIIIIII""IIIIIIIIIIIIIIIIIIIIIIIIII?I0IIIIHIIIGIII
+@HWI-EAS91_1_30788AAXX:1:1:1647:512/2
+TTCAGGCTTCTGCCGTTTTGGATTTAACCGAAGATGATTTCGATTTTCTG
++/2
+IIIIIII""IIIIIIIIIIIIIIIIIIIIIIIIII?I0IIIIHIIIGIII
+@HWI-EAS91_1_30788AAXX:1:1:1570:620/2
+CTTGCGTTTATGGTACGCTGGACTTTGTGGGATACCCTCGCTTTCCTGCT
++/2
+IIIIIII""IIIIIIIIIIIIIBIIIIIIIIIIII?I0IIIIHIIIGIII
+@HWI-EAS91_1_30788AAXX:1:1:1599:330/2
+TTCTGGCGCTCGCCCTGGTCGTCCGCAGCCGTTGCGAGGTACTAAAGGCA
++/2
+IIIIIII""IIIIIIIIIIIIIIIIIII<III@II?I0IIIIHIIIGIII
+@HWI-EAS91_1_30788AAXX:1:1:1652:270/2
+GCTTCGGCCCCTTACTTGAGGATAAATTATGTCTAATATTCAAACTGGCG
++/2
+IIIIIII""IIIIIIIIIIIIIIIIIIIIIII6II?I0IIIIHIIIGIII
+@HWI-EAS91_1_30788AAXX:1:1:1746:351/2
+CCACTCCTCTCCCGACTGTGTGTGTGTGTGTTTATATTGACCATGCCGCT
++/2
+IIIIIII""IIIIIIIIIIIIIIIIII?I0IIIIHIIIGIIIIIIIIIII
+@HWI-EAS91_1_30788AAXX:1:1:1582:633/2
+TCTATTGACATTATGGGTCTGCAAGCTGCTTTTTTTTTTTTTTTTTTTTT
++/2
+IIIIIII""IIIIIIIIIIIIIIIIIIIIIIIIII?I0IIIIHIIIGIII
+@HWI-EAS91_1_30788AAXX:1:1:1598:534/2
+TTAGGCCAGTTTTCTGGTCGTGTTCAACAGACCTATAAACATTCTGTGCC
++/2
+IIIIIII""IIIIIIIIIIIIIIIIIIIIIIIIII?I0IIIIHIIIGIII
+@HWI-EAS91_1_30788AAXX:1:1:1572:324/2
+GCCGCCGCGTGAAATTTCTATGAAGGATGTTTTCCGTTCTGGTGATTCGT
++/2
+IIIIIII""IIIIIIIIIIIIIIIIIIII+7I05I?I0IIIIHIIIGIII
+@HWI-EAS91_1_30788AAXX:1:1:1749:466/2
+GCCGACCACTCGCGATTCAATCATGACTTCGTGATAAAAGATTGAGTGTG
++/2
+IIIIIII""IIIIIIIIIIIIIIIIIIIIIIIIII?I0IIIIHIIIGIII
+@HWI-EAS91_1_30788AAXX:1:1:1629:446/2
+CAAGCGAAGCGCGGTAGGTTTTCTGCTTAGGAGTTTAATCATGTTTCAGA
++/2
+IIIIIII""IIIIIIIIIIIIIIIIIIIIIIIIII?I0IIIIHIIIGIII
+@HWI-EAS91_1_30788AAXX:1:1:1763:587/2
+CATTGCATTCAGATGGATACATCTGTCAACGCCGCTAATCAGGTTGTTTC
++/2
+IIIIIII""IIIIIIIIIIIIIIIIIIIIIIIIII?I0IIIIHIIIGIII
+@HWI-EAS91_1_30788AAXX:1:1:1616:363/2
+GTGACTATTGACGTCCTTCCCCGTACGCCGGGCAATAAtGTTTATGTTGG
++/2
+IIIIIII""IIIIIIIIIIIIIIIIIIIIIIDIII?I0IIIIHIIIGIII
+@HWI-EAS91_1_30788AAXX:1:1:1768:715/2
+AGGACTTCTTGAAGGTACGTTGCAGGCTGGCACTTCTGCCGTTTCTGATA
++/2
+IIIIIII""IIIIIIIIIIIIIDIIIIIIIIIIII?I0IIIIHIIIGIII
+@HWI-EAS91_1_30788AAXX:1:1:1572:270/2
+ACACCAATCTTTCCAAGCAACAGCAGGTTTCCGAGATTATGCGCCAAATG
++/2
+IIIIIII""IIIIIIIIIIIIIIIIIIIIIIIIII?I0IIIIHIIIGIII
+@HWI-EAS91_1_30788AAXX:1:1:1785:1272/2
+GACGGTAAAGCTGATGGTATTGGCTCTAATTTGTCTAGGAAATAACCGTC
++/2
+III""""""IIIIIIIII""FI"IIII""II+ICI?I0IIIIHIIIGIII
+@HWI-EAS91_1_30788AAXX:1:1:1673:514/2
+GCATTTCTACTCTTTCTCAATCCCCAATGCTTGGCTTCCATAAGCAGATG
++/2
+IIIIIII""IIIIIIIIIIIIIIIIIIIIIIIII1?I0IIIIHIIIGIII
+@HWI-EAS91_1_30788AAXX:1:1:1634:330/2
+TCAAGATTGCTGGAGGCCTCCACTATGAAATCGCGTAGAGGCTTTaCTAT
++/2
+IIIIIII""IIIIIIIIIIIIIIIIIIIIIII8II?I0IIIIHIIIGIII
diff -r 137d93848139 -r 032aae80bbb0 test-data/perm_in3.fastqsanger
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/perm_in3.fastqsanger Wed Mar 17 16:40:36 2010 -0400
@@ -0,0 +1,396 @@
+@HWI-EAS91_1_30788AAXX:1:1:1513:715/1
+GTTTTTTGGGCATAGATGTTTAGTTGTGGTAGTCAG
++/1
+IIIIIII""IIIIIIIIIIIIIIIIIIIDI?II-+I
+@HWI-EAS91_1_30788AAXX:1:1:1698:516/1
+GTTGTTAGGGAGAGGAGTTGAACCTCTGAGTGTAAA
++/1
+IIIIIII""IIIIIIIIIIIIIIIIIII5IIIII9I
+@HWI-EAS91_1_30788AAXX:1:1:1491:637/1
+GCTAGCAGGATGGATCCGGCAATTGGGGCTTCTACA
++/1
+IIIIIII""IIIIIIIIIIIIFIIIIIIIIIIIABD
+@HWI-EAS91_1_30788AAXX:1:1:1711:249/1
+GGAAGTAGGGGCCTGCGTTCAGGCGTTCTGTTTGGT
++/1
+IIIIIII""IIIIIIIIIIIIIIIIIIIIIIIIIII
+@HWI-EAS91_1_30788AAXX:1:1:1634:211/1
+GAAGCAGGGGCTTGATACTGACACTTCGTCGACGTA
++/1
+IIIIIII""IIIIIIIIIIIIIIIIIIIIII9IIDF
+@HWI-EAS91_1_30788AAXX:1:1:1218:141/1
+GTTAAATATTGGGAGTGGGGGGGGGGGGGAGTTTTGT
++/1
+IIIIIII""IIIIIIIIIIIIIIIIIIII1IIII+I
+@HWI-EAS91_1_30788AAXX:1:1:1398:854/1
+GTGAAGAGGAGGGGATTTATTAGTACGGGAAGGGTG
++/1
+IIIIIII""IIIIIBIIIIIIIIIIIIIIA=IIIII
+@HWI-EAS91_1_30788AAXX:1:1:1310:991/1
+GAATAGTGGTAGTATTATTCCTTCTAGGCATAGGAG
++/1
+IIIIIII""IIIIIIIIII4IIIIIIDII:IEI2:I
+@HWI-EAS91_1_30788AAXX:1:1:1716:413/1
+GATCCAAGGCTTTATCAACACCTATTCTGATTCTTC
++/1
+IIIIIII""IIIIIIIIIIIIIIIIIIIIIIIIIII
+@HWI-EAS91_1_30788AAXX:1:1:1630:59/1
+GGAGCGGGGGGTTGGTAAGGTTGGGGTCGAGTATGA
++/1
+IIIIIII""IIIIIIIIIIIIIIIIIIII;IIHIIF
+@HWI-EAS91_1_30788AAXX:1:1:1601:805/1
+GAAAACAGGAAAACAATCCAGTCACTTACCCTATGC
++/1
+IIIIIII""IIIIIIIIIIIIIIIIIIIIIII@III
+@HWI-EAS91_1_30788AAXX:1:1:1663:724/1
+GTTTGCCGGCGCCATCCTACGCTCCATTCCCAACAA
++/1
+IIIIIII""IIII8IIIIIIHIIII6IIIII1CI=3
+@HWI-EAS91_1_30788AAXX:1:1:1454:975/1
+GCTAGGCGGGAGTGGTAAAAGGCTCAGAAGAAGCCA
++/1
+IIIIIII""IIIIIIIIIIIIIIIIEIG;IIIIIII
+@HWI-EAS91_1_30788AAXX:1:1:1461:255/1
+GTACACCGGCGCCTGAGCCCTACTAATAACTCTCAT
++/1
+IIIIIII""IIIIII9IIIIIIEI(II9.I4III,I
+@HWI-EAS91_1_30788AAXX:1:1:1775:764/1
+GCATCCCGGTAGATCTAATTTTCTAAATCTGTCAAC
++/1
+IIIIIII""III@IIII+IIIIII8H8IIIIIIICI
+@HWI-EAS91_1_30788AAXX:1:1:1269:520/1
+GGAGTATGGAATAAGTGATTTTAGATCGGTTTGTCG
++/1
+IIIIIII""IIIIIIIIIIIIIIIIIIIIIIIIIII
+@HWI-EAS91_1_30788AAXX:1:1:1303:1162/1
+GAGCAAGGGCAGGAGGAGGAGTCCTAGGATGTCTTT
++/1
+IIIIIII""IIIIFII4*IGIAI(IAII49',3I6I
+@HWI-EAS91_1_30788AAXX:1:1:1090:409/1
+GTTTGTTGGGAATGGAGCGTAGGATGGCGTAGGCAA
++/1
+IIIIIII""IIIIIIIIIIIIIIIIIIIII:IIA8I
+@HWI-EAS91_1_30788AAXX:1:1:1336:1000/1
+GGTAAATGGGAAATATTAAGTTTCTGTTTCTAGATC
++/1
+IIIIIII""IIIIIIIIIIIIIIIIIIIIIIII9II
+@HWI-EAS91_1_30788AAXX:1:1:1199:1376/1
+GTTTTCTGGAAAACCTTCACCTATTTATGGGGGTTT
++/1
+IIIIIII""IIIIIIIIIIIII;III3IIG&:/III
+@HWI-EAS91_1_30788AAXX:1:1:1598:1148/1
+GATCAATGGTTTGGATCAATAAGTGATTATATATTT
++/1
+IIIIIII""IIIIIDIIIIII?IIICII=IHIIIII
+@HWI-EAS91_1_30788AAXX:1:1:1723:1459/1
+GAAACCCGGACGTTTGGATGGGCCCGGAGCGAGGAT
++/1
+IIIIIII""IIIIIIIIDIIIIIIIII9HII-II=I
+@HWI-EAS91_1_30788AAXX:1:1:1442:1346/1
+TATCAAGGGGCTGCTTCGAATCCGAAGTGGTGGCTG
++/1
+IIIIIII""IIIIIDIIIII1I(I4II<?<-II*,&
+@HWI-EAS91_1_30788AAXX:1:1:850:117/1
+GTATGACGGTAAAGAAAATTATTACGAATGCATGGG
++/1
+IIIIIII""IIIIIIEIEIIIIIIIIEBIDD9I;:?
+@HWI-EAS91_1_30788AAXX:1:1:795:325/1
+GGGTACTGGGAAGTGGAATGGTGTGAGTCCAAGTTT
++/1
+IIIIIII""IIIIIIIIIIIIIIIIIIIII65IIII
+@HWI-EAS91_1_30788AAXX:1:1:873:453/1
+GAGTAGGGGGATTGCTAGAGTTACTTCGTATGAGAT
++/1
+IIIIIII""IIIIIIIIIIIIIIIII@IIIIIII=I
+@HWI-EAS91_1_30788AAXX:1:1:1285:1334/1
+GATATGGGGTCTGGAATAGGATTGCGCTGTTATCCC
++/1
+IIIIIII""IIIIIBIIGIIIIIIIIII8IIIII8I
+@HWI-EAS91_1_30788AAXX:1:1:905:406/1
+GAAAAGTGGTAGGCTATATGCAACTTCGCAAAGGAC
++/1
+IIIIIII""IIIIIIIIBIIIIBIIII=I@96D2*I
+@HWI-EAS91_1_30788AAXX:1:1:1774:595/1
+TTTAGGTGGGATGTGGGGATCATGTAGGAGTCAAAG
++/1
+IIIIIII""IIIIIIIIIIIIIIIIIIIIIIIIIEI
+@HWI-EAS91_1_30788AAXX:1:1:1694:931/1
+TTAAATTGGCATTAGAATTGAGTAGTTTTTAGGTAA
++/1
+IIIIIII""IIIIIIIIIIIIIIIIIIIIIIH,III
+@HWI-EAS91_1_30788AAXX:1:1:1092:1151/1
+GAGAGTTGGCTGATTTAGGCGCCCAGGGATAGCGTC
++/1
+IIIIIII""IIIIIIIIIII5III>IIIII6@III5
+@HWI-EAS91_1_30788AAXX:1:1:803:557/1
+GGTGTGTGGGCGCTTCATGGCCTGATTCAATTAAGC
++/1
+IIIIIII""IIIIIIIIIIIIIII<IIII?III406
+@HWI-EAS91_1_30788AAXX:1:1:1315:1200/1
+TAGTTTTGGGGGGTTTTCTTCAAAACCTTCACCTAT
++/1
+IIIIIII""IIIIIIIIIIIIIIIIIIIIIIIII;I
+@HWI-EAS91_1_30788AAXX:1:1:765:358/1
+GGTGTTTGGATGAGGACGGCTACGATTACTAGGGCT
++/1
+IIIIIII""IIIIIIIIIIIIDIIAII4@I0D='1I
+@HWI-EAS91_1_30788AAXX:1:1:1425:1167/1
+GACCTTAGGTGTAGGACATGGTGTAATTCGGTAGCA
++/1
+IIIIIII""IIIIIIIIIIIIIIIIIIII(%II+IG
+@HWI-EAS91_1_30788AAXX:1:1:1775:635/1
+AATAGCCGGATAGCTAGAAGTAAGGTTGAGTTAAAG
++/1
+IIIIIII""IIIIIIIIIIIIIIIIIIIIIIIIIBI
+@HWI-EAS91_1_30788AAXX:1:1:1433:749/1
+GACTATGGGTGCTATTATACATGCTAGTCATAGGAA
++/1
+IIIIIII""IIIIIIIIIIIFIIIIICIIIIB/I9E
+@HWI-EAS91_1_30788AAXX:1:1:798:247/1
+GGAGATTGGAAAGTAGTATGCTTAGGGTAAGGGTGA
++/1
+IIIIIII""IIIIIIIIIIIIIIIIIIIIIIIIIII
+@HWI-EAS91_1_30788AAXX:1:1:1249:1238/1
+TATTGTTGGGGTAGCGAAAGAGGCGAATAGATTTTC
++/1
+IIIIIII""IIIIIIIIIIIBIIIICDIII7IIII@
+@HWI-EAS91_1_30788AAXX:1:1:1043:1104/1
+GGAAGGGGGAATAGGAGGGCAATTTCTAAGTCGAAT
++/1
+II&IIII""IIIIIIDIIIII;IFI<I0II.IIIC,
+@HWI-EAS91_1_30788AAXX:1:1:1655:1058/1
+AAAAATCGGCTTTACAATTATATTCGTAGGGGTAAA
++/1
+IIIIIII""IIIIIIIIIIIIIIIIFIII6AIIIGI
+@HWI-EAS91_1_30788AAXX:1:1:1764:1403/1
+TGATGAAGGGTTTGAGGGGGCTGGTAGGTCAATAAA
++/1
+IIIIIII""IIIIIIIIIIIIIIIIIIIIIBIIIII
+@HWI-EAS91_1_30788AAXX:1:1:1632:34/1
+AGGATAGGGGCTATCCGTTGGTCTTAGGAACCAAAA
++/1
+IIIIIII""IIIIIIIIIIIIIIIIIIIFIIIH:6I
+@HWI-EAS91_1_30788AAXX:1:1:1367:1208/1
+GACACGTGGCACTTCCAATCATACTATCCAGCATCC
++/1
+IIIIIII""IIIIIIII:IIII8IIEIIII-IAIII
+@HWI-EAS91_1_30788AAXX:1:1:1778:108/1
+AATACACGGACCATATCAACAGCATTAAACCTTCAT
++/1
+IIIIIII""IIIIIIIIIIIIIIIIIIIIIIIII@I
+@HWI-EAS91_1_30788AAXX:1:1:1152:1115/1
+GTAGTATGGCGTAGCCTCCTAGTTTTAGTAGAATGG
++/1
+7IIIIII""IIIIIIIIIII.68IIIDII=7IIIA'
+@HWI-EAS91_1_30788AAXX:1:1:1165:1121/1
+ATACTCAGGTCATTCTAGTCCTTTTTGGGTTCATTC
++/1
+IIIIIII""IIIIIIIIIIIIIIIII22?IIIIIIA
+@HWI-EAS91_1_30788AAXX:1:1:1500:55/1
+ATAGAGAGGAGTGCAACTAAGAGTGGGAGGGAACCT
++/1
+IIIIIII""IIIIIIIIIIFIIIIIIICIII<IIII
+@HWI-EAS91_1_30788AAXX:1:1:1142:1196/1
+TCATTTTGGTACCACTCGCAAGCACCATCGAAAACA
++/1
+IIIIIII""IIIIIIIIIIIIIIIII@I=I79I6II
+@HWI-EAS91_1_30788AAXX:1:1:1693:757/1
+AAATTATGGGTTAAACCCCTATATACCTCTATGGCC
++/1
+IIIIIII""IIIIIIIIIIIIIBIDIIIII0II-IG
+@HWI-EAS91_1_30788AAXX:1:1:1364:1053/1
+TTATTCAGGTGGGTATGAATCCTGATAGTGGGGGGA
++/1
+IIIIIII""IIIIIIIIIIIIIIIII?.I31G&4=5
+@HWI-EAS91_1_30788AAXX:1:1:1450:1443/1
+GTAAAAAGGTGCTCCAAGGCCTATTCATCACAATTT
++/1
+IIIIIII""IIIIIIIIIIIIIIIIIIIIEIB8III
+@HWI-EAS91_1_30788AAXX:1:1:1701:1298/1
+TAGTAGAGGCCGCGTCCTACGTGAATGAAGAGGCAG
++/1
+IIIIIII""IIIIIIIIIIIIIIIIII8II;IIIGI
+@HWI-EAS91_1_30788AAXX:1:1:1729:856/1
+TCCTATTGGTTCAATACTGAAACCAAGCACTACCCG
++/1
+IIIIIII""IIIIIIIIIIIIIIIIFIIIII@IIIH
+@HWI-EAS91_1_30788AAXX:1:1:1748:933/1
+TGAGCTCGGGGCTTCATCTTCTTATTCACAGTAGGA
++/1
+IIIIIII""IIIIIICIIIIIIIIIIIIII@II5@F
+@HWI-EAS91_1_30788AAXX:1:1:1662:1296/1
+TTAGTTAGGAATAGAATTACACATGCAAGTATCCGC
++/1
+IIIIIII""IIIIIIIIIIIIIHIIIEI5IIIII4I
+@HWI-EAS91_1_30788AAXX:1:1:1393:345/1
+ATTTACAGGAGAATTGAGTAGTTTTTAGGTAAATTT
++/1
+IIIIIII""IIIIIIIIIIIIIIIIIIE+IIEIIII
+@HWI-EAS91_1_30788AAXX:1:1:943:780/1
+AGGCGTTGGGTTTGGTTGCCTCAGCGGGTGATGATA
++/1
+IIIIIII""IIIIIIIIIIIIIIIIIIIII;II<IH
+@HWI-EAS91_1_30788AAXX:1:1:1639:968/1
+AGCTCACGGAAAATAGCAGCATCATCCTCCCCACAC
++/1
+IIIIIII""IIIIIIIIIIIIIIIIIIIIIII8I:I
+@HWI-EAS91_1_30788AAXX:1:1:589:1114/1
+GTCATTCGGGATTAGTTGAGGTTAATTCTACTGTAG
++/1
+IIIIIII""IIIIIIIIIII1IIIIIIIIIII&II)
+@HWI-EAS91_1_30788AAXX:1:1:763:408/1
+GCCCCACGGCTGGTGTTGACAACATGACTACTGCCA
++/1
+IIIIIII""IIIIIIIIIIIIII=I*5II<II'II(
+@HWI-EAS91_1_30788AAXX:1:1:950:352/1
+ACGCATAGGCAACATGAAATACCATCTCATCCATAG
++/1
+IIIIIII""IIIIIIIIIIIIIIIIIIIFIIIIIGI
+@HWI-EAS91_1_30788AAXX:1:1:1164:103/1
+TTAACTAGGACATTCACCAAACCATTAAAGTATAGG
++/1
+IIIIIII""IIIIIIIIIIIAII=IIB58I=<I;II
+@HWI-EAS91_1_30788AAXX:1:1:1078:1202/1
+TAGGGCTGGGCATAGTGGGGTATCTAATCCCAGTTT
++/1
+IIIIIII""IIIIIIIIIIIIIIIIIIIIIIIEIII
+@HWI-EAS91_1_30788AAXX:1:1:1617:490/1
+AATCCTTGGTAACCGCATCGGGGATATCGGCTTCAT
++/1
+IIIIIII""IIIIIIIIIIIIIIIIIIIIIIII:(D
+@HWI-EAS91_1_30788AAXX:1:1:843:504/1
+ACAACATGGAATACCATCTCATCCATAGGATCTTTT
++/1
+IIIIIII""IIIIIIIIAIIIIII>IIHI=IIIIII
+@HWI-EAS91_1_30788AAXX:1:1:942:318/1
+TTCATACGGGCCATGTCCAGCCTAGCTGTCTACTCA
++/1
+IIIIIII""IIIIIIIIIIHIIIIIIIIIII<IIIF
+@HWI-EAS91_1_30788AAXX:1:1:1719:283/1
+TTTTGTTGGCCGAGGTCACCCCAACCGAAATTGCTG
++/1
+IIIIIII""IIIIIIIIIIIIIIIIII3=2IGIIII
+@HWI-EAS91_1_30788AAXX:1:1:1517:961/1
+TGTCACTGGTCCATATTAATATCTTCCTAGCATTCA
++/1
+IIIIIII""IIIII=III9I@IIIIIIIBIIEIII>
+@HWI-EAS91_1_30788AAXX:1:1:1482:345/1
+AGGATGTGGGTTAATAGCCCTATAGCTAGAAGTAAG
++/1
+IIIIIII""IIIIIIIIIIIIIIIIII?IAHIIAAI
+@HWI-EAS91_1_30788AAXX:1:1:1697:533/1
+ATGAGGTGGATTAGGAGGTGTCCGGCGGTAATGTTA
++/1
+IIIIIII""IIIIIIIIIIIIIIIIIIII1>IIIII
+@HWI-EAS91_1_30788AAXX:1:1:1159:1225/1
+TTTCTACGGCAAGGGACGCCCATTTTCCTCATCCCC
++/1
+IIIIIII""IIIII=I7IIII3IIIIIIII/ID@II
+@HWI-EAS91_1_30788AAXX:1:1:982:332/1
+AAGGATTGGATCCCCTCCTCCTGCGGGGTCGAAGAA
++/1
+IIIIIII""IIIIIIIIIIIIIIIIIIIIII?<I7@
+@HWI-EAS91_1_30788AAXX:1:1:1073:668/1
+AGCATTTGGCAACCCCTACCTGCCAGAACTCTACTC
++/1
+IIIIIII""IIIIIIIIIIIIIII>I4@IIIIIIII
+@HWI-EAS91_1_30788AAXX:1:1:1386:1163/1
+AGTTGGCGGAAAAGCAGCCATCAATTAAGAAAGCGT
++/1
+IIIIIII""IIIIIIIIIICII=GIIIII@IGIIII
+@HWI-EAS91_1_30788AAXX:1:1:1196:344/1
+ATTGAAGGGAGTTGATTAGGGTATTTAGCTGTTAAC
++/1
+IIIIIII""IIIIIIIIIIIIIIIIIIIIIIIIII7
+@HWI-EAS91_1_30788AAXX:1:1:1551:1269/1
+ATTACCAGGATCCTAATAGGAGCCAATATCACCATC
++/1
+IIIIIII""IIIIIIIIGIIIIIIIIIIII@II4II
+@HWI-EAS91_1_30788AAXX:1:1:928:468/1
+AGGAGATGGAAATTTTAACTTGGCGCTATAGAGAAA
++/1
+IIIIIII""IIIIIIIIIIIIIIIIIIIIC<IHIII
+@HWI-EAS91_1_30788AAXX:1:1:763:835/1
+GCTCAGAGGAAGCCAGAGAAGAAGAAGACTTCTGAG
++/1
+IIIIIII""IIIIIIIHIEFIIFI<IIIIIIDII:I
+@HWI-EAS91_1_30788AAXX:1:1:1363:330/1
+TAGCCTCGGAGTTTTAGTAGAATGGCTGCTAGCACT
++/1
+IIIIIII""IIIIIIII@IIIIIIIIII5I,5C-75
+@HWI-EAS91_1_30788AAXX:1:1:1001:837/1
+ATACTATGGCTGTGAGGAATAATCATAACTAGTTCC
++/1
+IIIIIII""IIIII2IIIIIIIIIIIFIII:IIIII
+@HWI-EAS91_1_30788AAXX:1:1:1189:616/1
+TTTGATAGGGTAAAACATAGAGGCTCAAACCCTCTT
++/1
+IIIIIII""IIIIIIIIIIIIIIIIICIIIIIIIII
+@HWI-EAS91_1_30788AAXX:1:1:1212:634/1
+ATCAACAGGTATTCTGATTCTTCGGACACCCCGAAG
++/1
+IIIIIII""IIIIIIIIIIIIIIIIIIAIIIIF(.I
+@HWI-EAS91_1_30788AAXX:1:1:1271:421/1
+ACACGACGGCACCTAATGACCCACCAAACCCACGCT
++/1
+IIIIIII""IIIIIIIIIIIIIIII>C:IIIDI<II
+@HWI-EAS91_1_30788AAXX:1:1:1582:328/1
+AGCATTAGGCTTTTAAGTTAAAGATTGAGGGTTCAA
++/1
+IIIIIII""IIIIII,IIIB;BI<IIIIII+IIIBI
+@HWI-EAS91_1_30788AAXX:1:1:1133:687/1
+AGAGAGCGGATTAGGAATACGATTATTAGTGTGTGG
++/1
+IIIIIII""IIIIIIIIIIIIIIIIIIIIIII4III
+@HWI-EAS91_1_30788AAXX:1:1:1231:662/1
+TCACTCTGGAGAACATATAAAACCAACATAACCTCC
++/1
+IIIIIII""IIIIIIIIIIIIIIIIII?ICCIIIII
+@HWI-EAS91_1_30788AAXX:1:1:1693:1140/1
+ATTGATAGGATGGGGGTTAGGGGGAGGAGTAGGGAG
++/1
+IIIIIII""IIIIIIIIIIIIIII:II5II7IIA-I
+@HWI-EAS91_1_30788AAXX:1:1:867:279/1
+AAGCATTGGACTGTAAATCTAAAGACAGGGGTTGGA
++/1
+IIIIIII""IIIIIIIIIIIIIIIIIIIIIIIIIII
+@HWI-EAS91_1_30788AAXX:1:1:1095:605/1
+AAGCAAGGGACTGAAAATGCCTAGATGAGTATTCTT
++/1
+IIIIIII""IIIIIIIIIIIIIIIEIIIIIIIIIII
+@HWI-EAS91_1_30788AAXX:1:1:1650:1185/1
+ACCCCAGGGAACCCTCTCAGCACTCCCCCTCATATT
++/1
+IIIIIII""IIIIIIIIIIII6IIIIIIIII5I-II
+@HWI-EAS91_1_30788AAXX:1:1:799:192/1
+AATGTTAGGGGTTAGCCGCACGGCTAGGGCTACAGG
++/1
+IIIIIII""IIIIIIIIII8IIIEIIIII<I::%II
+@HWI-EAS91_1_30788AAXX:1:1:1082:719/1
+TCTTGAGGGTCTATGGTGCTGGTATGGGTTAATTTA
++/1
+IIIIIII""IIIIIIA8III>I92I3+3IIE0III<
+@HWI-EAS91_1_30788AAXX:1:1:1746:1180/1
+AAAATTAGGGAGAAGTAATCTAGTTTGAAGCTTAGG
++/1
+IIIIIII""IIIIIIIIIIIIIIIIIIFIIIIIIII
+@HWI-EAS91_1_30788AAXX:1:1:606:460/1
+TTAATTTGGATTATAATAACACTCACAATATTCATA
++/1
+IIIIIII""IIIIIIIIIIIIIIIIII?I6IIIII6
+@HWI-EAS91_1_30788AAXX:1:1:1059:362/1
+ATCGGTAGGCTCGTAGCTTCAGTATCATTGGTGGCC
++/1
+IIIIIII""IIIIIIIIIIIIIIIIIIIII2)</II
+@HWI-EAS91_1_30788AAXX:1:1:1483:1161/1
+ATATGTGGGGGGTGGGGATGAGTGCTAGGATCAGGA
++/1
+IIIIIII""IIIIIIIIIIIIIIIHIAIIFIIIIIH
+@HWI-EAS91_1_30788AAXX:1:1:1273:600/1
+TACTGAGGGGTATCCTGAGGTATGGGTGTCTAATAC
++/1
+IIIIIII""IIIIIIIIIIIIIIIIIIIIIIIIIII
+@HWI-EAS91_1_30788AAXX:1:1:1190:1283/1
+TTTCTGGGGTACAAGACCAGGGTAATGTGCGATATA
++/1
+IIIIIII""IIIIIIIIIIIIIIIAIIIII-?IIIE
diff -r 137d93848139 -r 032aae80bbb0 test-data/perm_in4.fastqcssanger
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/perm_in4.fastqcssanger Wed Mar 17 16:40:36 2010 -0400
@@ -0,0 +1,576 @@
+@1831_573_1004
+T00030133312212111300011021310132222
++
+%><C&&9952+C>5<.?<79,=42<292:<(9/-7
+@1831_573_1050
+T03330322230322112131010221102122113
++
+);@@17?@=>7??@A8?==@4A?A4)&+.'&+'1,
+@1831_573_1067
+T00023032023103330112220321200200002
++
+.++#%(',%/$,+&1#&),)&+'*'*%#$&#%('$
+@1831_573_1219
+T11211130300300301021212330201121310
++
+@@@=4/+)5)408?'665>*/5?<61';<3,:,5-
+@1831_573_1242
+T02132003121011302100130302112221121
++
+>>>::>17=A5?@@=;7A=;2.60>82<8=74+;;
+@1831_573_1333
+T00200312330110101013212313222303112
++
+)='@#%3=.>)/34*117,,/6-4+.9742456<)
+@1831_573_1362
+T21203131001102231121211101111321131
++
+CB@@?@@?@C@BA?@>@<@8A@?@'.8'?17:,+?
+@1831_573_1448
+T23101211223113320132212331313312022
++
+77=A)?,.@A@@:@@<=/7)@<-#8662%9613&+
+@1831_573_1490
+T31312310323301210002210123101021011
++
+@@@@?,@??@$><=>+%/*>*1,)?26&12'9%7.
+@1831_573_1523
+T10322001220012223202202222001230222
++
+(,//*,#8''.6''6'2&27/80)%�'%#*,2'
+@1831_573_1578
+T21202302100010020121100311022120111
++
+@?=@<@C@?@@??=>@;7<<8<694)3511(+1<.
+@1831_573_1647
+T10222233301013033120132223202022123
++
+=?@@<@@???;A==@@;9?@?3=;6.4/;6:=/;4
+@1831_573_1684
+T13310013212312012302121010221231123
++
+7>1:8<@39@@??9=;@@<>?@@<;<?7>76?9>?
+@1831_573_1769
+T33220123030232212032021032302233131
++
+754749&&767%/7;3$-7;3#,3//#,45/#&06
+@1831_573_1853
+T11000012111222211310103212122102331
++
+64./B@@72B+/4?@?7?+@9/+99.')2<2&)2&
+@1831_573_1943
+T20300123032210232001222122001132111
++
+(>/*1,),@-)'0*>5'$/?6(a)/.&?8/(')A43,
+@1831_573_1977
+T22212302221310332321002303112011311
++
+@@?6@8@@9A@=4633A7?9?<3:+7>'@%@?%32
+@1831_574_109
+T13122332123301331032220222133301033
++
+07651#4817>@65<2:84716=788<1995.7?6
+@1831_574_148
+T01200113123030012202302312200010231
++
+<;->3;;3>//>0;;7=,=4-4/+>029<9<624;
+@1831_574_185
+T21123333211302300321312212102123121
++
+@<4;%%@7>#*>97%6;+8$#<07%1372$%<54#
+@1831_574_243
+T30221011230013102201033131203302330
++
+#@@@95@=9?*><45$<@<4%@9?7#=?>7&>?97
+@1831_574_257
+T00301133110002100302003000000102301
++
+%>>B>)@?>B@B?@1%*%,#+00.'(+&5&%#$3&
+@1831_574_293
+T23213210003000103010211331300320130
++
+&')7'.*%#&$'/.$##$.$&*)+*$#8%&%#&&%
+@1831_574_389
+T21032213032101122333230212301312020
++
+))$>/>8):2@:213;;.1)@3%64%+)<7)+$92
+@1831_574_575
+T33313322100212102033032123311211302
++
+7?@>=?@>8@@@=@6*%&<='5@<<',0<=4*5/<
+@1831_574_592
+T33103330110123102223122023103310330
++
+73-5)19,,<@>4067<2.-864346;9<1/;212
+@1831_574_617
+T20021031221222021210021322200223211
++
+@>@?;>?=@==@>@=5?@+@:@40@><:0)')64-
+@1831_574_725
+T32010020322130330333010031120313210
++
+9'>?52+5=;4$6;<8.1<47*9+?7(+5;-).)4
+@1831_574_734
+T31132301200020012302210322213222222
++
+.-&.$2&<*(-%):/)%5:)/%7+,9034*A.?70
+@1831_574_824
+T30212100033032123311211302122020013
++
+;@@@;;),+@<-<7;<&89??&29?<$2:A<);8?
+@1831_574_959
+T11212130220131221111002020123311211
++
+7A=;+?@9;/<@;<8#&&/*%(%.0$0$2)+1$-1
+@1831_574_1062
+T30112230030300221001032033012211012
++
+4@821<//:136/8<1/5.427.;>288/0<1$)/
+@1831_574_1092
+T02013221200031031212200000111130310
++
+<8?.,98)03%#,,,(,++$'69'#9'7#';6.%2
+@1831_574_1103
+T20313113203302010303131123021310121
++
+@??>?@=>@@A<>?=@==<:?<;58'&&):+35'1
+@1831_574_1116
+T21011310123202303021021112021231011
++
+)(a)++,)/)?./)%<)2>.==:8?&;44&/&,)/*4
+@1831_574_1194
+T23303101033322220312200222013013312
++
+:5>;,??9@9@?@1@@=@3=@>@4?@@94?75/,6
+@1831_574_1204
+T21330132231321322010303023221203200
++
+AA5A5B@5:;@@,.9?A1?#.?;3),>82%)='7*
+@1831_574_1306
+T10332133020311023221213100301001220
++
+02.7&%''.$&#'%##$#'#%')')$'$%$,)'.1
+@1831_574_1387
+T12301331310032132101301303230121111
++
+-?A??:@?@@9?@@?9C;:>&).,,);&'<&7(/'
+@1831_574_1431
+T12011023331022213001123111301312011
++
+>7?>9BA897;+8*=6#6.+5&-#26$0.,5&'$1
+@1831_574_1560
+T32212313302203320020222113111011111
++
+=))&65<4=>99@:597(1&;+&(%&4#,#&&&*$
+@1831_574_1591
+T23202101330322130221230222201123202
++
+==<A><?@78@<=;@&@6>8:?,><&&<7>8(8+9
+@1831_574_1624
+T20122200222132200313011102302210332
++
+#<6.&(,7-+))7..'/;43.)927.(<85&13-)
+@1831_574_1826
+T13012312120112021233030302313201111
++
++@<<,))459<?.+57'-'&6?1)(;1;0&%4,#$
+@1831_574_1903
+T30232100103132133321330310210101221
++
+<956?@>;7@@@8:@>8><=9>9>@2=71?1:6;/
+@1831_574_1961
+T02333101331223303300200011100032200
++
+6>-07-@>(2'@<#064@%5%@@6.&<<(9)=:47
+@1831_575_54
+T13331330322230200102132110132013200
++
+>=@@68<2;<8;?@<>+9>7,,6972&57:4&9<8
+@1831_575_80
+T33133322233322221003332230323312313
++
+*?5><@?/=5%(&.5)+#/0'/'>#$/'37#()6&
+@1831_575_192
+T30013012111133003301010212123302011
++
+0:8<,9(.=/)#%8$,4'<,:)3/.'.5<,>-;7'
+@1831_575_197
+T33312113010133020301131330001310032
++
+,-:?463@A:88?6?437<=.2&1.;@67.4@%2,
+@1831_575_223
+T10121010002202131221210302100121020
++
+@?>?8?@:?;>+$'%&,+#$.--#%$1'&#-7$&'
+@1831_575_420
+T31110103220000101310112112001020212
++
+82?.>?(-''2+..@),$%1@;')1)?+)7.')..
+@1831_575_434
+T30312132120223101113223301211113311
++
+<@@><@B@>=@@?>><@?;)@?;6.?=4,'';/3)
+@1831_575_444
+T33022120112320220100202132332113320
++
+<2@@;<.=?@<+=:A<<@?<</>@><(<8>4&5;>
+@1831_575_459
+T31330310210101223330110231120131100
++
+?@@C@?A@@A@??A@@'6@@B'=;C@'4<=9,@1.
+@1831_575_506
+T31210200111210121332321310110132301
++
+8?)2*5@3+.7=?A:8@,/&):@37/<89'+4*)*
+@1831_575_569
+T10210201321323001012232322323002203
++
+/'9-,@29+479%&>9#&08=>7#9>,69/+'*3)
+@1831_575_622
+T01100031122111023002323113231210111
++
+@A8B=@@?<@?<>>=?96<497)<4/<':4+;74+
+@1831_575_644
+T11332003221203131231202200030110130
++
+<<8>.>&,+)8.10':5(/-62&*.=/%(.<&&18
+@1831_575_663
+T32210013303112103322311101322021210
++
+@@@@@A</;A>@6>0792<5966:?;25+:><1,+
+@1831_575_681
+T23131132033020103031013233200101021
++
+.72?.)25;96-14'*.03'#4#++$)/8+%/((&
+@1831_575_711
+T03032331231101231020121210002332121
++
+4%+:2(#'1+.)&*45-#,<A-%4;>+))<;##*&
+@1831_575_730
+T31010102200110302123032330331011111
++
+,%8<%-')5,*1&=#)(1*1)->7$.,4'4'&'*&
+@1831_575_904
+T20111213300020123200333321131121211
++
+?<><.689@@7@<54:4=@>69=:+6-=42;8<:#
+@1831_575_938
+T13103102220022130222233301013033120
++
+;:@@:?@;=8;>=;;:=<93<4.;<6:2<9;4)87
+@1831_575_970
+T23201311301023133303023011202220221
++
+=@=@=;@@><@@:9<:=?;>@=?9>>?@:2===<5
+@1831_575_991
+T33312212031111111012212120321121210
++
+@>>;7???/4<:::,#&*%'$--5-#(7)&&,-)%
+@1831_575_1138
+T23320002011320012120333103233301321
++
+<=0;%1936+?96?,?5<:2>@6@<:;4>.7;*:1
+@1831_575_1157
+T13121323330203331222022230133102321
++
+@B@B>?A?A@?9'?;?&8?@@6;<@=+<:99+965
+@1831_575_1180
+T32003310122102323303101123331133110
++
+/<B>5?':/78?;1;55:997@@>94=7<186->7
+@1831_575_1283
+T02232200301300220130032321323131333
++
+%C@A54><?A78@<?2>:#>=?@2>80:'?69.'7
+@1831_575_1302
+T01201303312333123130200123201013021
++
+&12>/#2'&-2&%'7+$%*#$/1(&&12/$,2,,*
+@1831_575_1310
+T31332131312021303211310220101211133
++
+6<<>663?99@(@7=11@:@@/7??<@<;>'3-7;
+@1831_575_1321
+T11001010233200122122022023000203212
++
+0;B@?@>=A@<A?>@>?6>@5>'4@9?)7@7?/89
+@1831_575_1373
+T21213011223311001221321132013121220
++
+A@@@>@;=9<@??7@>@:@;@;17?630/7=4,2<
+@1831_575_1419
+T33222200303001021230212332001013020
++
+>//3->&-.->%11/@&/&7:<,-1=,+2<6'%&<
+@1831_575_1436
+T13210313021212303321202113301220331
++
+*2@=;39*3<*4<:>52>>9'7:<='0=>1'136<
+@1831_575_1442
+T33132010022331132101132123132020222
++
+8>58<?7>+1B6-/9<<:5?>A:1@?539.@4/1>
+@1831_575_1454
+T11131130011012021120222231313211113
++
+24=?8=6589=0:A=:?>6<;@/,4+-6+/'.,(,
+@1831_575_1500
+T11010000223111301132313011130103021
++
+2?@@?A@?8<6<:985=99999==B3<5<<,7?>9
+@1831_575_1535
+T21312012030320112110211013300131121
++
+@B=@<@@B?:A@@=>@?68@=?@5?7;:6<<;>5<
+@1831_575_1724
+T33123002323300220213232301000010010
++
+9<B.+@),2*.%)))4%2@;7#%(%+$8))85%&5
+@1831_575_1829
+T21033321320111321230233302313101021
++
+22(*'%.3$+7)@&%$'3*+*#/#/*+0.=&#)+0
+@1831_575_1898
+T31330110303103131001110300102101330
++
+@B>@@&1/))'40)%#8/.%#8$((#;4'$'63,,
+@1831_575_1964
+T22010201103202213200201301300232123
++
+1>39)@2<2/@+9?2=&)>>@*62=5&2<42.'?+
+@1831_576_32
+T13012100120333032211330300332022110
++
++<>?>?=7<2)522;><<@40@>704<>5=23@+&
+@1831_576_74
+T30103313210232220102021223012112100
++
+8=@@?@>(??<B5?@@@9<@><+><;@';>6961?
+@1831_576_86
+T10320000121033022010011030032211310
++
+7811)2:*.++5</:3+43924*))/:,6&29)2/
+@1831_576_89
+T02132333203332020020220033002121120
++
+2)69+,'.4=-,>/>(*$#)3030*'(,%)2##$,
+@1831_576_266
+T30322223101312011300311121221333223
++
+<@@?=;<?7=@>9,>@1$&&89$/:>7'3178%&6
+@1831_576_327
+T22112331301313021321001332120332130
++
+<@?@6@@;<@?><@>==3@:==<3@@>53<9><6=
+@1831_576_331
+T32012133301311223023011232112333030
++
+;-2)+(*.*1/;5%.-9&#/1'+($*$##()%/$.
+@1831_576_387
+T00101211032031120300200222001230022
++
+/8).$5#1#*%.$##.*#$%##-%,+,1#&%.%))
+@1831_576_406
+T00223133010210122221320212103132011
++
+<4<>%%14:*4656)&<251&2+3#&19,6&4>5(
+@1831_576_449
+T31312001121222231100020132132100220
++
+(+'&'.,,$/+.)$$8&%#+?&,#)-&###7,+#*
+@1831_576_519
+T03011321130130133213131202130321131
++
+==46<97@>2/6?;2<4A881>9121+<1/4.9+7
+@1831_576_603
+T21003032313302312320131221001330311
++
+@@46=@C.??<A79@@;-<@@>29B?>55<B7598
+@1831_576_655
+T02001023130302322122200313123123102
++
+=@AA9@@<6*>@@5/<@>9'=;>7+@?9>/9;+,%
+@1831_576_677
+T13330131023320301031013230210103022
++
+6290&/*0#&'&,.2'#&*$&('#-%($*#%$)#%
+@1831_576_718
+T31232113331022231333313223132231213
++
+$4###$0###%(#########,####%####$###
+@1831_576_722
+T31230320322120231333030031100313200
++
+(*,-,##.)*&(*1%*(%(-2#+)-#.&-#%%$')
+@1831_576_754
+T30221231132103120112331303112133020
++
+.4//#(.$)'',>($<,##%((,#5?#0*%1*
+@1831_576_815
+T23022113203032010120310102321001031
++
+<3>@?9>@?B=>=;>A???=>:25=4.25?6<57.
+@1831_576_882
+T13230020122320223230022031020110122
++
+?26?8?@:4>@>96??<<=5'1<>9846=<9<1>8
+@1831_576_898
+T10230132312121033222231132231233213
++
+=@@C2?C>?<>@886B?;?.??87=B<8<15??=.
+@1831_576_923
+T21322010320202013210121223010123122
++
+76.51=;.699<96>;;49<;;11;<@59:9=647
+@1831_576_930
+T21322103230123110323102012021020013
++
+2#*0.2&.:((#'14'##-)#%$$2%#$/1&#%/#
+@1831_576_1019
+T22032121213231032210312001103122312
++
+2<$>$,1,&++&@,.)'+/+#9'69/6'2(+-'9-
+@1831_576_1068
+T00020232013101330112220321203220211
++
+:>><=4?;@7=??9?;9>5@9?:8@:=5';7;'#4
+@1831_576_1131
+T10233122200222132200313011102302210
++
+104@#@/@,4>9?2+?1571@',>=;(759;*92<
+@1831_576_1168
+T31013300131121323122002113301002010
++
+>@CC6@@2?9=>7?;76<;467@;9,0%26'',4$
+@1831_576_1207
+T21001132013000122220301213221213010
++
+%$>2,(&?4?(,@:<&,@>?$&:8A%%=0.%,597
+@1831_576_1289
+T03021210023110200323310302013121203
++
+@9<>1B@@;7@@663==28,5':8<<,.=2>>.50
+@1831_576_1329
+T01100302102020113003022000120002100
++
+<>&0,870/A@@/5.;=;:'&@3'&$$%,+#($&2
+@1831_576_1367
+T12231310311233110031222013332011023
++
+??@@<>@@=@<@<=@>@??9:?<=>=<8;59@787
+@1831_576_1416
+T33021233100123120313103133211203221
++
++';?..1.<@'.=+6.5?7<0-?7;(%=>56.98@
+@1831_576_1461
+T32022221221112233100210223002100100
++
+#1&74#(#(&##$#,'###%#%%#*#&%%##%%%'
+@1831_576_1605
+T30232100103132133321330310210101221
++
+6<@C@@>1;@:;<<@@9@???9<3?5-21=4877,
+@1831_576_1664
+T31212101001312110320301201002011120
++
+@@?>?@=A:@@>@='=<>=).>=9*8)(7#/++;)
+@1831_576_1671
+T22313332300211322113223102231322313
++
+@@?@5>@@8&8?25#&)&5&&)15&&,&4%&,&6%
+@1831_576_1729
+T11233312313010012320101302101023030
++
+@?@?<?<@>A?/==?>?7>?@8<?@>99;><+0=>
+@1831_576_1880
+T13032121323320213301001310130212003
++
+0515/792,:,7/%/05,%$):+#8%2(1754))3
+@1831_576_1982
+T00032312310201201333221212000011030
++
+,@@?@,A><@9@=?@9B=8<6@@6@2<A?>.7<+@
+@1831_576_1987
+T30022313313231221213220132001011320
++
+)##()(#&##)%)/-#%$11#%3>'##&$,#$$'+
+@1831_576_2014
+T31123201010100321122111102113021003
++
+@@@9,B29-5>'?,+?79+/A';'2@'5&/9,6&1
+@1831_576_2028
+T20131211210311112023201213120201100
++
+5<>9>5'2(&707.8#&&39,0%7/#(#,*%&5*+
+@1831_577_40
+T11111212330120012020200031313303003
++
+92/4('.')(<*?#$)%&<,/39<(.2,+<=@611
+@1831_577_119
+T33111010021103320103213121313000102
++
+9>?C11)-1/)#;#/,850*+.+$$5550+%-.40
+@1831_577_133
+T33213323012231300122223032223331322
++
+/4@>>?=1?:+>@07@@><>@*<+1@15)96'2$)
+@1831_577_255
+T00332022110020300332022020202002232
++
+7<=87@>69%**#&#-+$.#&&#+$-+%&%.,%5#
+@1831_577_281
+T03032301231212301013112222111210000
++
+7-:B;8@=;>7@>4?=?+659?;5<7?;9@8(>:?
+@1831_577_288
+T01031120221303100221230021013201130
++
+5;/79??&=B::298*6.7/+4&21,7,6?.7#'6
+@1831_577_322
+T12003213220230103303201000130312202
++
+9=;>4506;255464-<#7+194&2<?65968)7/
+@1831_577_362
+T31203302330110131230331210121110220
++
+62B?:@?@<?958=,3:,90:&'-99,6<5.($+6
+@1831_577_382
+T32312123033111120321303230201332100
++
+12957/:1))=76(*24;,3+:<.&.&-=1=2/5*
+@1831_577_464
+T13020221011130013102221333131203302
++
+3=<7/<826)>#.'&4204+5#/041.7*91&756
+@1831_577_488
+T13200302330322110200323132101120301
++
+:48.,*>6566<?8=<=<2>6;94>;=9>@8924@
+@1831_577_511
+T32232133031023313331312220133230333
++
+#7/+&$:<7%6,$$%'%/+)#$7((&*3>16'0/+
+@1831_577_545
+T00112131333222303222210031322103233
++
+>><<9?<>?A<481@<@8==@76/61<95.5988-
+@1831_577_559
+T32321101303233120102011130022122002
++
+.<@;??@>@?/2<.@1=>12=61/;=?.&2+92)'
+@1831_577_562
+T32331101301233110121000220031120031
++
+#((&*&0%)1%)#($2-,***%/-,,))&,-.1'*
+@1831_577_637
+T22113312122202103031023120301031110
++
+8-,4#>:-6+:8,&(5;3=0>7=68&1/9&'?;4,
+@1831_577_641
+T13031301101121223221212020032131113
++
+&47;/':A;;5?:72,(=),#*?+.#&7$8#%7/'
+@1831_577_692
+T01122320200330103121202301211100220
++
+@B,@?@B(@A?@+@@>@+?=>@'=<@<=<9=?75<
diff -r 137d93848139 -r 032aae80bbb0 test-data/perm_in5.fastqcssanger
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/perm_in5.fastqcssanger Wed Mar 17 16:40:36 2010 -0400
@@ -0,0 +1,48 @@
+@853_60_741/1
+G31030031122312021111010130200021003210121320000223
++
+==9:>>=;><9;=<<>;><>>;?<=?:>?><=>?9<>=<9=;<?=>>><9
+@853_111_1583/1
+G21011211200202112131111130201231220231231220200322
++
+:A<A>>>;=<;=<@A;?<<@<>?@><99A=<7=;=:6;<<:9::<;89=<
+@853_111_1628/1
+G10102021032020010121032020232110130223102122302200
++
+?;<<>;=<=>?8;>9>;<><=9;<;=<5<><;8=>?978<=8;;8<8:;:
+@853_276_1541/1
+G03222011023120123332203020012312001121123311001231
++
+=<><;A<<>@<@<<?:<:<>=:<==;?<<<<:>=A?;==;==;<@;;8:<
+@853_394_432/1
+G13312311132110330003300222221112131200021120330332
++
+@=>@A<?>>==?A?<??=@?>?=?><?=?=:@?=?==A=<;<<@=<<>=5
+@853_460_1442/1
+G02101122101002202021313212112232110310010223211122
++
+>??:?>@>A=?=:>A>>9?><==A>>>=?>;==:>>>;:;==:<;>;9<=
+@865_1106_310/1
+G01222230300213322221202031001001030210000110220102
++
+;6?;=>=:9>:=<:<>:<;<>;?;>>;<<<>9><>:9===?<6<::5:;;
+@869_1532_1255/1
+G21022233110003122233210021301222000112122113330022
++
+=;8:?@=?;;9:8;=>;5A?;<8><<=:9><;9<=8;96>8<5==:<98;
+@889_1337_1562/1
+G32320230101133233031203331023110123123110321101011
++
+==?@=@??@?=@>==;?=>=;;?>=<;==<=:A;<<<=7=8:;==6<>=;
+@891_252_1590/1
+G21001002321200302231203001233020201230020220002300
++
+<A>9<=9A;A><>>>;>@?A>9=;@>@>?<><@>@@<;=;8:7>:;:5=<
+@892_582_183/1
+G01101032230201131100203303123121133333011110033000
++
+;><:><=>;<<>:=>?>>;<=;:9><<:=:>;:;>;:=9;;=799<7699
+@932_1836_1806/1
+G32331100031222332100213012220001121221133300220331
++
+;:<;<;<><;>9@=<=<:>A:8<<=<=?<:@?<=?<;>9;<6<><<::5>
diff -r 137d93848139 -r 032aae80bbb0 test-data/perm_in6.fastqcssanger
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/perm_in6.fastqcssanger Wed Mar 17 16:40:36 2010 -0400
@@ -0,0 +1,48 @@
+@853_60_741/2
+T30321031112011000112320211202221320221313213231311
++
+99?;>=;?>:>=>:<>>?85>=<;:><995@::<7@<=88==>;=?>>9<
+@853_111_1583/2
+T10022103103000201030021103111120021021121130210022
++
+?;<<>;=<=>?8;>9>;<><=9;<;=<5<><;8=>?978<=8;;8<8:;:
+@853_111_1628/2
+T33002021100103231100332001320020101002002311222011
++
+><@A>=>;>?A<A:<;???@==>;?>>?8?<8;:@?=<;6>6<;;9=?<5
+@853_276_1541/2
+T30303002200001133331103221321130031333013232132322
++
+A?:5A<?:=A>A8@8AA:@:<@97<@@@>A7?@6?@;8?A7@986;5;?=
+@853_394_432/2
+T30030000333322020222202211233333002103312202000110
++
+:<:=>:?><==>=><<=><:;>=;:;9;><9<><@8;:;>=;:7:>7:7=
+@853_460_1442/2
+T11130211103112102212221200202022130303003210122101
++
+>A=:>@@==>@>=<<>?9>>=;9<<==:<<;>8;;;<<<:<:7=95>:;9
+@865_1106_310/2
+T33223200002331021122331020030003301230300213231311
++
+?@@=?A>A>==:=;=????A<;;>A<<?=>?<@<=@9:>9>:56A<;;9;
+@869_1532_1255/2
+T03103330303100300112231121121003120312113022230331
++
+@A=><>9=<@>;<;<==?;?<A=?<=<<:A>7>9:;:<=7:8@6><>96<
+@889_1337_1562/2
+T10031203121130222303312100203132322201123110000101
++
+=A;A@>AA@>?>A@>A:>@:<>>=5;;=;<;88?=>5=;899:9<<;>98
+@891_252_1590/2
+T03210330303300030311113131021332033110001211222031
++
+8;8?;><::;9;:>;><:>9=8;68:<?<;=@===<=>9=:>A9=89;==
+@892_582_183/2
+T20322002021213100112021203203010031223003101212213
++
+@?=?@>A?>>>??>A;@>?==>>@<>??><<>>::=?:=?=;6<>=<>=<
+@932_1836_1806/2
+T03233010131101303202311220122201011323220123130010
++
+AA<?A@AAAA>?A>A@=>@>A>??A@?>>@>@=>A9?>:?>8@@=>>=>@
diff -r 137d93848139 -r 032aae80bbb0 test-data/perm_out1.sam
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/perm_out1.sam Wed Mar 17 16:40:36 2010 -0400
@@ -0,0 +1,33 @@
+HWI-EAS91_1_30788AAXX:1:1:1761:343/1 65 PHIX174 5 255 50M = 69 64 TTTATCGCTTCCATGACGCAGAAGTTAACACTTTCGGATATTTCTGATGA IIIIIII""IIIIIIIIIII?I0IIIIHIIIGIIIII0II?I""IIIIII NM:i:0
+HWI-EAS91_1_30788AAXX:1:1:1761:343/1 129 PHIX174 69 255 50M = 5 64 TTGATAAAGCAGGAATTACTACTGCTTGTTTACGAATTAAATCGAAGTGG IIIIIII""IIIIIII0II?I""IIIIIIIIIIII?I0IIIIHIIIGIII NM:i:0
+HWI-EAS91_1_30788AAXX:1:1:1578:331/1 65 PHIX174 209 255 50M = 268 59 TGTCAAAAACTGACGCGTTGGATGAGGAGAAGTGGCTTAATATGCTTGGC IIIIIII""IIIIIIIIIIIIIIIIIIIIIIIIII"IIIIIII000IIII NM:i:0
+HWI-EAS91_1_30788AAXX:1:1:1578:331/1 129 PHIX174 268 255 50M = 209 59 AAGGACTGGTTTAGATATGAGTCACATTTTGTTCATGGTAGAGATTCTCT IIIIIII""IIIIIIIIIIIIIIIIIIIIIIIIII?I0IIIIHIIIGIII NM:i:0
+HWI-EAS91_1_30788AAXX:1:1:1647:512/1 65 PHIX174 401 255 50M = 451 50 TACTGAACAATCCGTACGTTTCCAGACCGCTTTGGCCTCTATTAAGCTCA IIIIIII""IIIIIIIIIIIIIIIIIIIIIIIIII"IIIIIII000IIII NM:i:0
+HWI-EAS91_1_30788AAXX:1:1:1647:512/1 129 PHIX174 451 255 50M = 401 50 TTCAGGCTTCTGCCGTTTTGGATTTAACCGAAGATGATTTCGATTTTCTG IIIIIII""IIIIIIIIIIIIIIIIIIIIIIIIII?I0IIIIHIIIGIII NM:i:0
+HWI-EAS91_1_30788AAXX:1:1:1570:620/2 0 PHIX174 559 255 50M * 0 0 CTTGCGTTTATGGTACGCTGGACTTTGTGGGATACCCTCGCTTTCCTGCT IIIIIII""IIIIIIIIIIIIIBIIIIIIIIIIII?I0IIIIHIIIGIII NM:i:0
+HWI-EAS91_1_30788AAXX:1:1:1599:330/1 65 PHIX174 803 255 50M = 868 65 AGAAGAAAACGTGCGTCAAAAATTACGTGCAGAAGGAGTGATGTAATGTC IIIIIII""IIIIIIIIIIIIIIIIIII<III@II"IIIIIII000IIII NM:i:0
+HWI-EAS91_1_30788AAXX:1:1:1599:330/1 129 PHIX174 868 255 50M = 803 65 TTCTGGCGCTCGCCCTGGTCGTCCGCAGCCGTTGCGAGGTACTAAAGGCA IIIIIII""IIIIIIIIIIIIIIIIIII<III@II?I0IIIIHIIIGIII NM:i:0
+HWI-EAS91_1_30788AAXX:1:1:1652:270/2 0 PHIX174 973 255 50M * 0 0 GCTTCGGCCCCTTACTTGAGGATAAATTATGTCTAATATTCAAACTGGCG IIIIIII""IIIIIIIIIIIIIIIIIIIIIII6II?I0IIIIHIIIGIII NM:i:0
+HWI-EAS91_1_30788AAXX:1:1:1746:351/1 0 PHIX174 1218 255 50M * 0 0 CTCATCGTCACGTTTATGGTGAACAGTGGATTAAGTTCATGAAGGATGGT IIIIIII""IIIIIIIIIIIIIIIIIIIIIIIIII"IIIIIII000IIII NM:i:0
+HWI-EAS91_1_30788AAXX:1:1:1582:633/1 0 PHIX174 1517 255 50M * 0 0 CCGCTTCCTCCTGAGACTGAGCTTTCTCGCCAAATGACGACTTCTACCAC IIIIIII""IIIIIIIIIIIIIIIIIIIIIIIIII"IIIIIII000IIII NM:i:0
+HWI-EAS91_1_30788AAXX:1:1:1598:534/1 65 PHIX174 1717 255 50M = 1772 55 GCGCTCTAATCTCTGGGCATCTGGCTATGATGTTGATGGAACTGACCAAA IIIIIII""IIIIIIIIIIIIIIIIIIIIIIIIII"IIIIIII000IIII NM:i:0
+HWI-EAS91_1_30788AAXX:1:1:1598:534/1 129 PHIX174 1772 255 50M = 1717 55 TTAGGCCAGTTTTCTGGTCGTGTTCAACAGACCTATAAACATTCTGTGCC IIIIIII""IIIIIIIIIIIIIIIIIIIIIIIIII?I0IIIIHIIIGIII NM:i:0
+HWI-EAS91_1_30788AAXX:1:1:1572:324/2 0 PHIX174 1972 255 50M * 0 0 GCCGCCGCGTGAAATTTCTATGAAGGATGTTTTCCGTTCTGGTGATTCGT IIIIIII""IIIIIIIIIIIIIIIIIIII+7I05I?I0IIIIHIIIGIII NM:i:0
+HWI-EAS91_1_30788AAXX:1:1:1749:466/1 65 PHIX174 2201 255 50M = 2251 50 TTGCAGTGGAATAGTCAGGTTAAATTTAATGTGACCGTTTATCGCAATCT IIIIIII""IIIIIIIIIIIIIIIIIIIIIIIIII"IIIIIII000IIII NM:i:0
+HWI-EAS91_1_30788AAXX:1:1:1749:466/1 129 PHIX174 2251 255 50M = 2201 50 GCCGACCACTCGCGATTCAATCATGACTTCGTGATAAAAGATTGAGTGTG IIIIIII""IIIIIIIIIIIIIIIIIIIIIIIIII?I0IIIIHIIIGIII NM:i:0
+HWI-EAS91_1_30788AAXX:1:1:1629:446/1 65 PHIX174 2301 255 50M = 2355 54 AGGTTATAACGCCGAAGCGGTAAAAATTTTAATTTTTGCCGCTGAGGGGT IIIIIII""IIIIIIIIIIIIIIIIIIIIIIIIII"IIIIIII000IIII NM:i:0
+HWI-EAS91_1_30788AAXX:1:1:1629:446/1 129 PHIX174 2355 255 50M = 2301 54 CAAGCGAAGCGCGGTAGGTTTTCTGCTTAGGAGTTTAATCATGTTTCAGA IIIIIII""IIIIIIIIIIIIIIIIIIIIIIIIII?I0IIIIHIIIGIII NM:i:0
+HWI-EAS91_1_30788AAXX:1:1:1763:587/1 65 PHIX174 2501 255 50M = 2566 65 AAGCTACATCGTCAACGTTATATTTTGATAGTTTGACGGTTAATGCTGGT IIIIIII""IIIIIIIIIIIIIIIIIIIIIIIIII"IIIIIII000IIII NM:i:0
+HWI-EAS91_1_30788AAXX:1:1:1763:587/1 129 PHIX174 2566 255 50M = 2501 65 CATTGCATTCAGATGGATACATCTGTCAACGCCGCTAATCAGGTTGTTTC IIIIIII""IIIIIIIIIIIIIIIIIIIIIIIIII?I0IIIIHIIIGIII NM:i:0
+HWI-EAS91_1_30788AAXX:1:1:1616:363/1 65 PHIX174 2718 255 50M = 2773 55 TGTTTATCCTTTGAATGGTCGCCATGATGGTGGTTATTATACCGTCAAGG IIIIIII""IIIIIIIIIIIIIIIIIIIIIIDIII"IIIIIII000IIII NM:i:0
+HWI-EAS91_1_30788AAXX:1:1:1616:363/1 129 PHIX174 2773 255 50M = 2718 55 GTGACTATTGACGTCCTTCCCCGTACGCCGGGCAATAATGTTTATGTTGG IIIIIII""IIIIIIIIIIIIIIIIIIIIIIDIII?I0IIIIHIIIGIII NM:i:0
+HWI-EAS91_1_30788AAXX:1:1:1768:715/1 65 PHIX174 3118 255 50M = 3173 55 ACCCTGATGAGGCCGCCCCTAGTTTTGTTTCTGGTGCTATGGCTAAAGCT IIIIIII""IIIIIIIIIIIIIDIIIIIIIIIIII"IIIIIII000IIII NM:i:0
+HWI-EAS91_1_30788AAXX:1:1:1768:715/1 129 PHIX174 3173 255 50M = 3118 55 AGGACTTCTTGAAGGTACGTTGCAGGCTGGCACTTCTGCCGTTTCTGATA IIIIIII""IIIIIIIIIIIIIDIIIIIIIIIIII?I0IIIIHIIIGIII NM:i:0
+HWI-EAS91_1_30788AAXX:1:1:1572:270/1 65 PHIX174 3518 255 50M = 3577 59 TGAGATGCTTGCTTATCAACAGAAGGAGTCTACTGCTCGCGTTGCGTCTA IIIIIII""IIIIIIIIIIIIIIIIIIIIIIIIII"IIIIIII000IIII NM:i:0
+HWI-EAS91_1_30788AAXX:1:1:1572:270/1 129 PHIX174 3577 255 50M = 3518 59 ACACCAATCTTTCCAAGCAACAGCAGGTTTCCGAGATTATGCGCCAAATG IIIIIII""IIIIIIIIIIIIIIIIIIIIIIIIII?I0IIIIHIIIGIII NM:i:0
+HWI-EAS91_1_30788AAXX:1:1:1785:1272/1 65 PHIX174 3818 255 50M = 3873 55 TATTTTTCATGGTATTGATAAAGCTGTTGCCGATACTTGGAACAATTTCT III""""""IIIIIIIII""FI"IIII""II+ICI"IIIIIII000IIII NM:i:0
+HWI-EAS91_1_30788AAXX:1:1:1785:1272/1 129 PHIX174 3873 255 50M = 3818 55 GACGGTAAAGCTGATGGTATTGGCTCTAATTTGTCTAGGAAATAACCGTC III""""""IIIIIIIII""FI"IIII""II+ICI?I0IIIIHIIIGIII NM:i:0
+HWI-EAS91_1_30788AAXX:1:1:1673:514/1 65 PHIX174 4018 255 50M = 4072 54 ATTATTTTGACTTTGAGCGTATCGAGGCTCTTAAACCTGCTATTGAGGCT IIIIIII""IIIIIIIIIIIIIIIIIIIIIIIII1"IIIIIII000IIII NM:i:0
+HWI-EAS91_1_30788AAXX:1:1:1673:514/1 129 PHIX174 4072 255 50M = 4018 54 GCATTTCTACTCTTTCTCAATCCCCAATGCTTGGCTTCCATAAGCAGATG IIIIIII""IIIIIIIIIIIIIIIIIIIIIIIII1?I0IIIIHIIIGIII NM:i:0
+HWI-EAS91_1_30788AAXX:1:1:1634:330/1 65 PHIX174 4418 255 50M = 4473 55 GGATATTCGCGATGAGTATAATTACCCCAAAAAGAAAGGTATTAAGGATG IIIIIII""IIIIIIIIIIIIIIIIIIIIIII8II"IIIIIII000IIII NM:i:0
+HWI-EAS91_1_30788AAXX:1:1:1634:330/1 129 PHIX174 4473 255 50M = 4418 55 TCAAGATTGCTGGAGGCCTCCACTATGAAATCGCGTAGAGGCTTTACTAT IIIIIII""IIIIIIIIIIIIIIIIIIIIIII8II?I0IIIIHIIIGIII NM:i:0
diff -r 137d93848139 -r 032aae80bbb0 test-data/perm_out2.sam
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/perm_out2.sam Wed Mar 17 16:40:36 2010 -0400
@@ -0,0 +1,98 @@
+HWI-EAS91_1_30788AAXX:1:1:1513:715/1 16 chrM 9563 255 36M * 0 0 CTGACTACCACAACTAAACATCTATGCCCAAAAAAC I+-II?IDIIIIIIIIIIIIIIIIIII""IIIIIII NM:i:2
+HWI-EAS91_1_30788AAXX:1:1:1698:516/1 16 chrM 2735 255 36M * 0 0 TTTACACTCAGAGGTTCAACTCCTCTCCCTAACAAC I9IIIII5IIIIIIIIIIIIIIIIIII""IIIIIII NM:i:0
+HWI-EAS91_1_30788AAXX:1:1:1491:637/1 16 chrM 10864 255 36M * 0 0 TGTAGAAGCCCCAATTGCCGGATCCATCCTGCTAGC DBAIIIIIIIIIIIFIIIIIIIIIIII""IIIIIII NM:i:2
+HWI-EAS91_1_30788AAXX:1:1:1711:249/1 16 chrM 10617 255 36M * 0 0 ACCAAACAGAACGCCTGAACGCAGGCCCCTACTTCC IIIIIIIIIIIIIIIIIIIIIIIIIII""IIIIIII NM:i:1
+HWI-EAS91_1_30788AAXX:1:1:1634:211/1 0 chrM 9350 255 36M * 0 0 GAAGCAGGGGCTTGATACTGACACTTCGTCGACGTA IIIIIII""IIIIIIIIIIIIIIIIIIIIII9IIDF NM:i:2
+HWI-EAS91_1_30788AAXX:1:1:1398:854/1 16 chrM 3921 255 36M * 0 0 CACCCTTCCCGTACTAATAAATCCCCTCCTCTTCAC IIIII=AIIIIIIIIIIIIIIBIIIII""IIIIIII NM:i:2
+HWI-EAS91_1_30788AAXX:1:1:1310:991/1 16 chrM 10002 255 36M * 0 0 CTCCTATGCCTAGAAGGAATAATACTACCACTATTC I:2IEI:IIDIIIIII4IIIIIIIIII""IIIIIII NM:i:1
+HWI-EAS91_1_30788AAXX:1:1:1716:413/1 0 chrM 6040 255 36M * 0 0 GATCCAAGGCTTTATCAACACCTATTCTGATTCTTC IIIIIII""IIIIIIIIIIIIIIIIIIIIIIIIIII NM:i:2
+HWI-EAS91_1_30788AAXX:1:1:1630:59/1 16 chrM 12387 255 36M * 0 0 TCATACTCGACCCCAACCTTACCAACCCCCCGCTCC FIIHII;IIIIIIIIIIIIIIIIIIII""IIIIIII NM:i:1
+HWI-EAS91_1_30788AAXX:1:1:1601:805/1 0 chrM 12584 255 36M * 0 0 GAAAACAGGAAAACAATCCAGTCACTTACCCTATGC IIIIIII""IIIIIIIIIIIIIIIIIIIIIII@III NM:i:2
+HWI-EAS91_1_30788AAXX:1:1:1663:724/1 0 chrM 15012 255 36M * 0 0 GTTTGCCGGCGCCATCCTACGCTCCATTCCCAACAA IIIIIII""IIII8IIIIIIHIIII6IIIII1CI=3 NM:i:2
+HWI-EAS91_1_30788AAXX:1:1:1454:975/1 16 chrM 8929 255 36M * 0 0 TGGCTTCTTCTGAGCCTTTTACCACTCCCGCCTAGC IIIIIII;GIEIIIIIIIIIIIIIIII""IIIIIII NM:i:2
+HWI-EAS91_1_30788AAXX:1:1:1461:255/1 0 chrM 12988 255 36M * 0 0 GTACACCGGCGCCTGAGCCCTACTAATAACTCTCAT IIIIIII""IIIIII9IIIIIIEI(II9.I4III,I NM:i:2
+HWI-EAS91_1_30788AAXX:1:1:1775:764/1 0 chrM 16613 255 36M * 0 0 GCATCCCGGTAGATCTAATTTTCTAAATCTGTCAAC IIIIIII""III@IIII+IIIIII8H8IIIIIIICI NM:i:2
+HWI-EAS91_1_30788AAXX:1:1:1269:520/1 16 chrM 11036 255 36M * 0 0 CGACAAACCGATCTAAAATCACTTATTCCATACTCC IIIIIIIIIIIIIIIIIIIIIIIIIII""IIIIIII NM:i:1
+HWI-EAS91_1_30788AAXX:1:1:1303:1162/1 16 chrM 14866 255 36M * 0 0 AAAGACATCCTAGGACTCCTCCTCCTGCCCTTGCTC I6I3,'94IIAI(IAIGI*4IIFIIII""IIIIIII NM:i:2
+HWI-EAS91_1_30788AAXX:1:1:1090:409/1 16 chrM 15014 255 36M * 0 0 TTGCCTACGCCATCCTACGCTCCATTCCCAACAAAC I8AII:IIIIIIIIIIIIIIIIIIIII""IIIIIII NM:i:0
+HWI-EAS91_1_30788AAXX:1:1:1336:1000/1 16 chrM 11616 255 36M * 0 0 GATCTAGAAACAGAAACTTAATATTTCCCATTTACC II9IIIIIIIIIIIIIIIIIIIIIIII""IIIIIII NM:i:2
+HWI-EAS91_1_30788AAXX:1:1:1199:1376/1 16 chrM 14023 255 36M * 0 0 AAACCCCCATAAATAGGTGAAGGTTTTCCAGAAAAC III/:&GII3III;IIIIIIIIIIIII""IIIIIII NM:i:2
+HWI-EAS91_1_30788AAXX:1:1:1598:1148/1 16 chrM 2334 255 36M * 0 0 AAATATATAATCACTTATTGATCCAAACCATTGATC IIIIIHI=IICIII?IIIIIIDIIIII""IIIIIII NM:i:0
+HWI-EAS91_1_30788AAXX:1:1:1723:1459/1 16 chrM 15787 255 36M * 0 0 ATCCTCGCTCCGGGCCCATCCAAACGTCCGGGTTTC I=II-IIH9IIIIIIIIIDIIIIIIII""IIIIIII NM:i:2
+HWI-EAS91_1_30788AAXX:1:1:1442:1346/1 16 chrM 9331 255 36M * 0 0 CAGCCACCACTTCGGATTCGAAGCAGCCCCTTGATA &,*II-<?<II4I(I1IIIIIDIIIII""IIIIIII NM:i:1
+HWI-EAS91_1_30788AAXX:1:1:850:117/1 16 chrM 5540 255 36M * 0 0 CCCATGCATTCGTAATAATTTTCTTTACCGTCATAC ?:;I9DDIBEIIIIIIIIEIEIIIIII""IIIIIII NM:i:2
+HWI-EAS91_1_30788AAXX:1:1:795:325/1 16 chrM 4249 255 36M * 0 0 AAACTTGGACTCACACCATTCCACTTCCCAGTACCC IIII56IIIIIIIIIIIIIIIIIIIII""IIIIIII NM:i:2
+HWI-EAS91_1_30788AAXX:1:1:873:453/1 16 chrM 3188 255 36M * 0 0 ATCTCATACGAAGTAACTCTAGCAATCCCCCTACTC I=IIIIIII@IIIIIIIIIIIIIIIII""IIIIIII NM:i:2
+HWI-EAS91_1_30788AAXX:1:1:1285:1334/1 16 chrM 2389 255 36M * 0 0 GGGATAACAGCGCAATCCTATTCCAGACCCCATATC I8IIIII8IIIIIIIIIIGIIBIIIII""IIIIIII NM:i:2
+HWI-EAS91_1_30788AAXX:1:1:905:406/1 0 chrM 2844 255 36M * 0 0 GAAAAGTGGTAGGCTATATGCAACTTCGCAAAGGAC IIIIIII""IIIIIIIIBIIIIBIIII=I@96D2*I NM:i:2
+HWI-EAS91_1_30788AAXX:1:1:1774:595/1 16 chrM 7398 255 36M * 0 0 CTTTGACTCCTACATGATCCCCACATCCCACCTAAA IEIIIIIIIIIIIIIIIIIIIIIIIII""IIIIIII NM:i:2
+HWI-EAS91_1_30788AAXX:1:1:1694:931/1 16 chrM 1503 255 36M * 0 0 TTACCTAAAAACTACTCAATTCTAATGCCAATTTAA III,HIIIIIIIIIIIIIIIIIIIIII""IIIIIII NM:i:2
+HWI-EAS91_1_30788AAXX:1:1:1092:1151/1 16 chrM 7564 255 36M * 0 0 GACGCTATCCCTGGGCGCCTAAATCAGCCAACTCTC 5III@6IIIII>III5IIIIIIIIIII""IIIIIII NM:i:1
+HWI-EAS91_1_30788AAXX:1:1:803:557/1 16 chrM 893 255 36M * 0 0 GCTTAATTGAATCAGGCCATGAAGCGCCCACACACC 604III?IIII<IIIIIIIIIIIIIII""IIIIIII NM:i:1
+HWI-EAS91_1_30788AAXX:1:1:1315:1200/1 16 chrM 14035 255 36M * 0 0 ATAGGTGAAGGTTTTGAAGAAAACCCCCCAAAACTA I;IIIIIIIIIIIIIIIIIIIIIIIII""IIIIIII NM:i:1
+HWI-EAS91_1_30788AAXX:1:1:765:358/1 16 chrM 11086 255 36M * 0 0 AGCCCTAGTAATCGTAGCCGTCCTCATCCAAACACC I1'=D0I@4IIAIIDIIIIIIIIIIII""IIIIIII NM:i:0
+HWI-EAS91_1_30788AAXX:1:1:1425:1167/1 16 chrM 3841 255 36M * 0 0 TGCTACCGAATTACACCATGTCCTACACCTAAGGTC GI+II%(IIIIIIIIIIIIIIIIIIII""IIIIIII NM:i:2
+HWI-EAS91_1_30788AAXX:1:1:1775:635/1 16 chrM 8752 255 36M * 0 0 CTTTAACTCAACCTTACTTCTAGCTATCCGGCTATT IBIIIIIIIIIIIIIIIIIIIIIIIII""IIIIIII NM:i:2
+HWI-EAS91_1_30788AAXX:1:1:1433:749/1 16 chrM 10784 255 36M * 0 0 TTCCTATGACTAGCATGTATAATAGCACCCATAGTC E9I/BIIIICIIIIIFIIIIIIIIIII""IIIIIII NM:i:2
+HWI-EAS91_1_30788AAXX:1:1:798:247/1 16 chrM 13560 255 36M * 0 0 TCACCCTTACCCTAAGCATACTACTTTCCAATCTCC IIIIIIIIIIIIIIIIIIIIIIIIIII""IIIIIII NM:i:2
+HWI-EAS91_1_30788AAXX:1:1:1249:1238/1 16 chrM 7971 255 36M * 0 0 GAAAATCTATTCGCCTCTTTCGCTACCCCAACAATA @IIII7IIIDCIIIIBIIIIIIIIIII""IIIIIII NM:i:0
+HWI-EAS91_1_30788AAXX:1:1:1043:1104/1 16 chrM 9689 255 36M * 0 0 ATTCGACTTAGAAATTGCCCTCCTATTCCCCCTTCC ,CIII.II0I<IFI;IIIIIDIIIIII""IIII&II NM:i:1
+HWI-EAS91_1_30788AAXX:1:1:1655:1058/1 0 chrM 6591 255 36M * 0 0 AAAAATCGGCTTTACAATTATATTCGTAGGGGTAAA IIIIIII""IIIIIIIIIIIIIIIIFIII6AIIIGI NM:i:2
+HWI-EAS91_1_30788AAXX:1:1:1764:1403/1 16 chrM 14239 255 36M * 0 0 TTTATTGACCTACCAGCCCCCTCAAACCCTTCATCA IIIIIBIIIIIIIIIIIIIIIIIIIII""IIIIIII NM:i:2
+HWI-EAS91_1_30788AAXX:1:1:1632:34/1 0 chrM 11721 255 36M * 0 0 AGGATAGGGGCTATCCGTTGGTCTTAGGAACCAAAA IIIIIII""IIIIIIIIIIIIIIIIIIIFIIIH:6I NM:i:1
+HWI-EAS91_1_30788AAXX:1:1:1367:1208/1 0 chrM 3678 255 36M * 0 0 GACACGTGGCACTTCCAATCATACTATCCAGCATCC IIIIIII""IIIIIIII:IIII8IIEIIII-IAIII NM:i:2
+HWI-EAS91_1_30788AAXX:1:1:1778:108/1 0 chrM 11457 255 36M * 0 0 AATACACGGACCATATCAACAGCATTAAACCTTCAT IIIIIII""IIIIIIIIIIIIIIIIIIIIIIIII@I NM:i:2
+HWI-EAS91_1_30788AAXX:1:1:1152:1115/1 16 chrM 10902 255 36M * 0 0 CCATTCTACTAAAACTAGGAGGCTACGCCATACTAC 'AIII7=IIDIII86.IIIIIIIIIII""IIIIII7 NM:i:2
+HWI-EAS91_1_30788AAXX:1:1:1165:1121/1 16 chrM 9810 255 36M * 0 0 GAATGAACCCAAAAAGGACTAGAATGACCTGAGTAT AIIIIII?22IIIIIIIIIIIIIIIII""IIIIIII NM:i:1
+HWI-EAS91_1_30788AAXX:1:1:1500:55/1 16 chrM 10669 255 36M * 0 0 AGGTTCCCTCCCACTCTTAGTTGCACTCCTCTCTAT IIII<IIICIIIIIIIFIIIIIIIIII""IIIIIII NM:i:2
+HWI-EAS91_1_30788AAXX:1:1:1142:1196/1 0 chrM 15275 255 36M * 0 0 TCATTTTGGTACCACTCGCAAGCACCATCGAAAACA IIIIIII""IIIIIIIIIIIIIIIII@I=I79I6II NM:i:2
+HWI-EAS91_1_30788AAXX:1:1:1693:757/1 0 chrM 7018 255 36M * 0 0 AAATTATGGGTTAAACCCCTATATACCTCTATGGCC IIIIIII""IIIIIIIIIIIIIBIDIIIII0II-IG NM:i:1
+HWI-EAS91_1_30788AAXX:1:1:1364:1053/1 16 chrM 4697 255 36M * 0 0 TCCCCCCACTATCAGGATTCATACCCACCTGAATAA 5=4&G13I.?IIIIIIIIIIIIIIIII""IIIIIII NM:i:2
+HWI-EAS91_1_30788AAXX:1:1:1450:1443/1 0 chrM 9111 255 36M * 0 0 GTAAAAAGGTGCTCCAAGGCCTATTCATCACAATTT IIIIIII""IIIIIIIIIIIIIIIIIIIIEIB8III NM:i:2
+HWI-EAS91_1_30788AAXX:1:1:1701:1298/1 16 chrM 14463 255 36M * 0 0 CTGCCTCTTCATTCACGTAGGACGCGGCCTCTACTA IGIII;II8IIIIIIIIIIIIIIIIII""IIIIIII NM:i:0
+HWI-EAS91_1_30788AAXX:1:1:1729:856/1 0 chrM 10731 255 36M * 0 0 TCCTATTGGTTCAATACTGAAACCAAGCACTACCCG IIIIIII""IIIIIIIIIIIIIIIIFIIIII@IIIH NM:i:2
+HWI-EAS91_1_30788AAXX:1:1:1748:933/1 0 chrM 6379 255 36M * 0 0 TGAGCTCGGGGCTTCATCTTCTTATTCACAGTAGGA IIIIIII""IIIIIICIIIIIIIIIIIIII@II5@F NM:i:2
+HWI-EAS91_1_30788AAXX:1:1:1662:1296/1 0 chrM 96 255 36M * 0 0 TTAGTTAGGAATAGAATTACACATGCAAGTATCCGC IIIIIII""IIIIIIIIIIIIIHIIIEI5IIIII4I NM:i:2
+HWI-EAS91_1_30788AAXX:1:1:1393:345/1 16 chrM 1499 255 36M * 0 0 AAATTTACCTAAAAACTACTCAATTCTCCTGTAAAT IIIIEII+EIIIIIIIIIIIIIIIIII""IIIIIII NM:i:2
+HWI-EAS91_1_30788AAXX:1:1:943:780/1 16 chrM 10597 255 36M * 0 0 TATCATCACCCGCTGAGGCAACCAAACCCAACGCCT HI<II;IIIIIIIIIIIIIIIIIIIII""IIIIIII NM:i:2
+HWI-EAS91_1_30788AAXX:1:1:1639:968/1 0 chrM 4742 255 36M * 0 0 AGCTCACGGAAAATAGCAGCATCATCCTCCCCACAC IIIIIII""IIIIIIIIIIIIIIIIIIIIIII8I:I NM:i:2
+HWI-EAS91_1_30788AAXX:1:1:589:1114/1 16 chrM 6809 255 36M * 0 0 CTACAGTAGAATTAACCTCAACTAATCCCGAATGAC )II&IIIIIIIIIII1IIIIIIIIIII""IIIIIII NM:i:2
+HWI-EAS91_1_30788AAXX:1:1:763:408/1 0 chrM 10391 255 36M * 0 0 GCCCCACGGCTGGTGTTGACAACATGACTACTGCCA IIIIIII""IIIIIIIIIIIIII=I*5II<II'II( NM:i:2
+HWI-EAS91_1_30788AAXX:1:1:950:352/1 0 chrM 6695 255 36M * 0 0 ACGCATAGGCAACATGAAATACCATCTCATCCATAG IIIIIII""IIIIIIIIIIIIIIIIIIIFIIIIIGI NM:i:2
+HWI-EAS91_1_30788AAXX:1:1:1164:103/1 0 chrM 1159 255 36M * 0 0 TTAACTAGGACATTCACCAAACCATTAAAGTATAGG IIIIIII""IIIIIIIIIIIAII=IIB58I=<I;II NM:i:2
+HWI-EAS91_1_30788AAXX:1:1:1078:1202/1 16 chrM 492 255 36M * 0 0 AAACTGGGATTAGATACCCCACTATGCCCAGCCCTA IIIEIIIIIIIIIIIIIIIIIIIIIII""IIIIIII NM:i:2
+HWI-EAS91_1_30788AAXX:1:1:1617:490/1 0 chrM 12295 255 36M * 0 0 AATCCTTGGTAACCGCATCGGGGATATCGGCTTCAT IIIIIII""IIIIIIIIIIIIIIIIIIIIIIII:(D NM:i:2
+HWI-EAS91_1_30788AAXX:1:1:843:504/1 0 chrM 6703 255 36M * 0 0 ACAACATGGAATACCATCTCATCCATAGGATCTTTT IIIIIII""IIIIIIIIAIIIIII>IIHI=IIIIII NM:i:1
+HWI-EAS91_1_30788AAXX:1:1:942:318/1 0 chrM 3080 255 36M * 0 0 TTCATACGGGCCATGTCCAGCCTAGCTGTCTACTCA IIIIIII""IIIIIIIIIIHIIIIIIIIIII<IIIF NM:i:2
+HWI-EAS91_1_30788AAXX:1:1:1719:283/1 16 chrM 2261 255 36M * 0 0 CAGCAATTTCGGTTGGGGTGACCTCGGCCAACAAAA IIIIGI2=3IIIIIIIIIIIIIIIIII""IIIIIII NM:i:2
+HWI-EAS91_1_30788AAXX:1:1:1517:961/1 0 chrM 9916 255 36M * 0 0 TGTCACTGGTCCATATTAATATCTTCCTAGCATTCA IIIIIII""IIIII=III9I@IIIIIIIBIIEIII> NM:i:1
+HWI-EAS91_1_30788AAXX:1:1:1482:345/1 16 chrM 8764 255 36M * 0 0 CTTACTTCTAGCTATAGGGCTATTAACCCACATCCT IAAIIHAI?IIIIIIIIIIIIIIIIII""IIIIIII NM:i:2
+HWI-EAS91_1_30788AAXX:1:1:1697:533/1 16 chrM 8450 255 36M * 0 0 TAACATTACCGCCGGACACCTCCTAATCCACCTCAT IIIII>1IIIIIIIIIIIIIIIIIIII""IIIIIII NM:i:1
+HWI-EAS91_1_30788AAXX:1:1:1159:1225/1 0 chrM 8346 255 36M * 0 0 TTTCTACGGCAAGGGACGCCCATTTTCCTCATCCCC IIIIIII""IIIII=I7IIII3IIIIIIII/ID@II NM:i:2
+HWI-EAS91_1_30788AAXX:1:1:982:332/1 16 chrM 6016 255 36M * 0 0 TTCTTCGACCCCGCAGGAGGAGGGGATCCAATCCTT @7I<?IIIIIIIIIIIIIIIIIIIIII""IIIIIII NM:i:0
+HWI-EAS91_1_30788AAXX:1:1:1073:668/1 0 chrM 3502 255 36M * 0 0 AGCATTTGGCAACCCCTACCTGCCAGAACTCTACTC IIIIIII""IIIIIIIIIIIIIII>I4@IIIIIIII NM:i:2
+HWI-EAS91_1_30788AAXX:1:1:1386:1163/1 0 chrM 1616 255 36M * 0 0 AGTTGGCGGAAAAGCAGCCATCAATTAAGAAAGCGT IIIIIII""IIIIIIIIIICII=GIIIII@IGIIII NM:i:2
+HWI-EAS91_1_30788AAXX:1:1:1196:344/1 16 chrM 5155 255 36M * 0 0 GTTAACAGCTAAATACCCTAATCAACTCCCTTCAAT 7IIIIIIIIIIIIIIIIIIIIIIIIII""IIIIIII NM:i:2
+HWI-EAS91_1_30788AAXX:1:1:1551:1269/1 0 chrM 11375 255 36M * 0 0 ATTACCAGGATCCTAATAGGAGCCAATATCACCATC IIIIIII""IIIIIIIIGIIIIIIIIIIII@II4II NM:i:2
+HWI-EAS91_1_30788AAXX:1:1:928:468/1 0 chrM 1192 255 36M * 0 0 AGGAGATGGAAATTTTAACTTGGCGCTATAGAGAAA IIIIIII""IIIIIIIIIIIIIIIIIIIIC<IHIII NM:i:1
+HWI-EAS91_1_30788AAXX:1:1:763:835/1 16 chrM 8908 255 36M * 0 0 CTCAGAAGTCTTCTTCTTCTCTGGCTTCCTCTGAGC I:IIDIIIIII<IFIIFEIHIIIIIII""IIIIIII NM:i:1
+HWI-EAS91_1_30788AAXX:1:1:1363:330/1 16 chrM 10891 255 36M * 0 0 AGTGCTAGCAGCCATTCTACTAAAACTCCGAGGCTA 57-C5,I5IIIIIIIIII@IIIIIIII""IIIIIII NM:i:2
+HWI-EAS91_1_30788AAXX:1:1:1001:837/1 0 chrM 10991 255 36M * 0 0 ATACTATGGCTGTGAGGAATAATCATAACTAGTTCC IIIIIII""IIIII2IIIIIIIIIIIFIII:IIIII NM:i:2
+HWI-EAS91_1_30788AAXX:1:1:1189:616/1 0 chrM 3753 255 36M * 0 0 TTTGATAGGGTAAAACATAGAGGCTCAAACCCTCTT IIIIIII""IIIIIIIIIIIIIIIIICIIIIIIIII NM:i:1
+HWI-EAS91_1_30788AAXX:1:1:1212:634/1 0 chrM 6053 255 36M * 0 0 ATCAACAGGTATTCTGATTCTTCGGACACCCCGAAG IIIIIII""IIIIIIIIIIIIIIIIIIAIIIIF(.I NM:i:2
+HWI-EAS91_1_30788AAXX:1:1:1271:421/1 0 chrM 8630 255 36M * 0 0 ACACGACGGCACCTAATGACCCACCAAACCCACGCT IIIIIII""IIIIIIIIIIIIIIII>C:IIIDI<II NM:i:2
+HWI-EAS91_1_30788AAXX:1:1:1582:328/1 0 chrM 7752 255 36M * 0 0 AGCATTAGGCTTTTAAGTTAAAGATTGAGGGTTCAA IIIIIII""IIIIII,IIIB;BI<IIIIII+IIIBI NM:i:2
+HWI-EAS91_1_30788AAXX:1:1:1133:687/1 16 chrM 7122 255 36M * 0 0 CCACACACTAATAATCGTATTCCTAATCCGCTCTCT III4IIIIIIIIIIIIIIIIIIIIIII""IIIIIII NM:i:2
+HWI-EAS91_1_30788AAXX:1:1:1231:662/1 0 chrM 13783 255 36M * 0 0 TCACTCTGGAGAACATATAAAACCAACATAACCTCC IIIIIII""IIIIIIIIIIIIIIIIII?ICCIIIII NM:i:2
+HWI-EAS91_1_30788AAXX:1:1:1693:1140/1 16 chrM 4932 255 36M * 0 0 CTCCCTACTCCTCCCCCTAACCCCCATCCTATCAAT I-AII7II5II:IIIIIIIIIIIIIII""IIIIIII NM:i:1
+HWI-EAS91_1_30788AAXX:1:1:867:279/1 16 chrM 5309 255 36M * 0 0 TCCAACCCCTGTCTTTAGATTTACAGTCCAATGCTT IIIIIIIIIIIIIIIIIIIIIIIIIII""IIIIIII NM:i:1
+HWI-EAS91_1_30788AAXX:1:1:1095:605/1 0 chrM 23 255 36M * 0 0 AAGCAAGGGACTGAAAATGCCTAGATGAGTATTCTT IIIIIII""IIIIIIIIIIIIIIIEIIIIIIIIIII NM:i:1
+HWI-EAS91_1_30788AAXX:1:1:1650:1185/1 0 chrM 14956 255 36M * 0 0 ACCCCAGGGAACCCTCTCAGCACTCCCCCTCATATT IIIIIII""IIIIIIIIIIII6IIIIIIIII5I-II NM:i:2
+HWI-EAS91_1_30788AAXX:1:1:799:192/1 16 chrM 8421 255 36M * 0 0 CCTGTAGCCCTAGCCGTGCGGCTAACCCCTAACATT II%::I<IIIIIEIII8IIIIIIIIII""IIIIIII NM:i:1
+HWI-EAS91_1_30788AAXX:1:1:1082:719/1 16 chrM 7191 255 36M * 0 0 TAAATTAACCCATACCAGCACCATAGACCCTCAAGA <III0EII3+3I29I>III8AIIIIII""IIIIIII NM:i:2
+HWI-EAS91_1_30788AAXX:1:1:1746:1180/1 16 chrM 12013 255 36M * 0 0 CCTAAGCTTCAAACTAGATTACTTCTCCCTAATTTT IIIIIIIIFIIIIIIIIIIIIIIIIII""IIIIIII NM:i:2
+HWI-EAS91_1_30788AAXX:1:1:606:460/1 0 chrM 4552 255 36M * 0 0 TTAATTTGGATTATAATAACACTCACAATATTCATA IIIIIII""IIIIIIIIIIIIIIIIII?I6IIIII6 NM:i:2
+HWI-EAS91_1_30788AAXX:1:1:1059:362/1 16 chrM 7348 255 36M * 0 0 GGCCACCAATGATACTGAAGCTACGAGCCTACCGAT II/<)2IIIIIIIIIIIIIIIIIIIII""IIIIIII NM:i:2
+HWI-EAS91_1_30788AAXX:1:1:1483:1161/1 16 chrM 15080 255 36M * 0 0 TCCTGATCCTAGCACTCATCCCCACCCCCCACATAT HIIIIIFIIAIHIIIIIIIIIIIIIII""IIIIIII NM:i:1
+HWI-EAS91_1_30788AAXX:1:1:1273:600/1 16 chrM 13855 255 36M * 0 0 GTATTAGACACCCATACCTCAGGATACCCCTCAGTA IIIIIIIIIIIIIIIIIIIIIIIIIII""IIIIIII NM:i:2
+HWI-EAS91_1_30788AAXX:1:1:1190:1283/1 16 chrM 15338 255 36M * 0 0 TATATCGCACATTACCCTGGTCTTGTACCCCAGAAA EIII?-IIIIIAIIIIIIIIIIIIIII""IIIIIII NM:i:2
diff -r 137d93848139 -r 032aae80bbb0 test-data/perm_out3.fastqsanger
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/perm_out3.fastqsanger Wed Mar 17 16:40:36 2010 -0400
@@ -0,0 +1,4 @@
+@HWI-EAS91_1_30788AAXX:1:1:1218:141/1
+GTTAAATATTGGGAGTGGGGGGGGGGGGGAGTTTTG
++
+IIIIIII""IIIIIIIIIIIIIIIIIIII1IIII+I
diff -r 137d93848139 -r 032aae80bbb0 test-data/perm_out4.sam
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/perm_out4.sam Wed Mar 17 16:40:36 2010 -0400
@@ -0,0 +1,53 @@
+1831_573_1219 16 phiX 2201 255 35M * 0 0 TTGCAGTGGAATAGTCAGGTTAAATTTAATGTGAC -A@EE>NVA7FQZSC8GRJK<EVGC<==39BP\__ NM:i:0 CS:Z:G1211130300300301021212330201121310 CQ:Z:@@@=4/+)5)408?'665>*/5?<61';<3,:,5-
+1831_573_1242 0 phiX 647 255 35M * 0 0 TCATCCCGTCAACATTCAAACGGCCTGTCTCATCA [[WSWNGS]US^_\WQW]WL?CEMUIMSTSJ*1U; NM:i:1 CS:Z:T2132003121011302100130302112221121 CQ:Z:>>>::>17=A5?@@=;7A=;2.60>82<8=74+;;
+1831_573_1647 16 phiX 1117 255 35M * 0 0 CGACTCCTTCGAGATGGACGCCGTTGGCGCTCTCC 4NIKVOPIBACPWOQ^^WSZ_\Y][Y]]^_[[_^[ NM:i:0 CS:Z:G0222233301013033120132223202022123 CQ:Z:=?@@<@@???;A==@@;9?@?3=;6.4/;6:=/;4
+1831_573_1684 16 phiX 3695 255 35M * 0 0 TAGTGCTGAGGTTGACTTAGTTCATCAGCAAACGC ?\VWTLTTUZVV[_^\Y[_ZWUW]^_XKR[SQJNT NM:i:0 CS:Z:G3310013212312012302121010221231123 CQ:Z:7>1:8<@39@@??9=;@@<>?@@<;<?7>76?9>?
+1831_574_109 0 phiX 3728 255 35M * 0 0 GCAGAATCAGCGGTATGGCTCTTCTCATATTGGCG 07651#4817>@65<2:84716=788<1995.7?6 NM:i:1 CS:Z:G3122332123301331032220222133301033 CQ:Z:07651#4817>@65<2:84716=788<1995.7?6
+1831_574_148 16 phiX 2368 255 35M * 0 0 GTAGGTTTTCTGCTTAGGAGTTTAATCATGTTTCA ;NEGQTTTJAMH9B@@PHHSQUJML=LPMUMPJGV NM:i:0 CS:Z:T1200113123030012202302312200010231 CQ:Z:<;->3;;3>//>0;;7=,=4-4/+>029<9<624;
+1831_574_575 16 phiX 4005 255 35M * 0 0 GAATGTCACGCTGATTATTTTGACTTTGAGCGTAT <JC>=PXK;2BW[T;CXA"&?U\\__WU]^[Z]^U NM:i:1 CS:Z:A3313322100212102033032123311211302 CQ:Z:7?@>=?@>8@@@=@6*%&<='5@<<',0<=4*5/<
+1831_574_592 16 phiX 2949 255 35M * 0 0 GGCGGTATTGCTTCTGCTCTTGCTGGTGGCGCCAT 2BBLI?LTSPIFFIMD:?MRLECQ][G7DI9=A?I NM:i:0 CS:Z:A3103330110123102223122023103310330 CQ:Z:73-5)19,,<@>4067<2.-864346;9<1/;212
+1831_574_824 16 phiX 3996 255 35M * 0 0 TACCCTTCTGAATGTCACGCTGATTATTTTGACTT ?VRCD\ZK5?ZWJ7D]WP=AVQRHH[J64CUZ__Z NM:i:0 CS:Z:A0212100033032123311211302122020013 CQ:Z:;@@@;;),+@<-<7;<&89??&29?<$2:A<);8?
+1831_574_1194 0 phiX 1135 255 35M * 0 0 CGCCGTTGGCGCTCTCCGTCTTTCTCCATTGCGTC NRXFJ]WXXX^^PP_\\RO\]]SR^_XLRUKC:A6 NM:i:0 CS:Z:C3303101033322220312200222013013312 CQ:Z::5>;,??9@9@?@1@@=@3=@>@4?@@94?75/,6
+1831_574_1591 16 phiX 5117 255 35M * 0 0 GAAGCTGTTCAGAATCAGAATGAGCCGCAACTTCG 9CB??UTRA!7YIJXQUSUEEZWX[WNV^ZY^\XY NM:i:1 CS:Z:C3202101330322130221230222201123202 CQ:Z:==<A><?@78@<=;@&@6>8:?,><&&<7>8(8+9
+1831_574_1624 16 phiX 4057 255 35M * 0 0 CTATTGAGGCTTGTGGCATTTCTACTCTTTCTCAA )5?C6:LSC5DHJA6@FNI54;D?137CB3-3CQ: NM:i:1 CS:Z:C0122200222132200313011102302210332 CQ:Z:#<6.&(,7-+))7..'/;43.)927.(<85&13-)
+1831_574_1903 0 phiX 5208 255 35M * 0 0 AAGCTGGGTTACGACGCGACGCCGTTCAACCAGAC TMJT^]XQV__WQY]UUYXUVVV]QNSGOOJOPI/ NM:i:1 CS:Z:A0232100103132133321330310210101221 CQ:Z:<956?@>;7@@@8:@>8><=9>9>@2=71?1:6;/
+1831_575_54 16 phiX 3586 255 35M * 0 0 TTTCCAAGCAACAGCAGGTTTCCGAGATTATGCGC 8ST4/MPK:7HONA7BTVCHY[^YRSVLMSMU_\Z NM:i:1 CS:Z:G3331330322230200102132110132013200 CQ:Z:>=@@68<2;<8;?@<>+9>7,,6972&57:4&9<8
+1831_575_197 16 phiX 4860 255 35M * 0 0 AGCTTGCAAAATACGTGGCCTTATGGTTACAGTAT ,=.<SADLUZH>67?JXRIFRTTVOQZ`RHIRXF8 NM:i:1 CS:Z:A3312113010133020301131330001310032 CQ:Z:,-:?463@A:88?6?437<=.2&1.;@67.4@%2,
+1831_575_444 16 phiX 5097 255 35M * 0 0 AAGCTGTCGCTACTTCCCAAGAAGCTGTTCAGAAT >XO0/QUSCCY]]LJWZ^[W\ZVGF[^[JIVZ_QM NM:i:1 CS:Z:A3022120112320220100202132332113320 CQ:Z:<2@@;<.=?@<+=:A<<@?<</>@><(<8>4&5;>
+1831_575_622 0 phiX 2484 255 35M * 0 0 TGTTTTACAGACACCTAAAGCTACATCGTCAACGT `XY^\_^Z[^ZY[Z[WNQOLO?DOBJB@M>EQ$!+ NM:i:1 CS:Z:T1100031122111023002323113231210111 CQ:Z:@A8B=@@?<@?<>>=?96<497)<4/<':4+;74+
+1831_575_938 16 phiX 1132 255 35M * 0 0 GGACGCCGTTGGCGCTCTCCGTCTTTCTCCATTGC 7N@<NSTMKOQVHAONKTXVTUWZXRTWZ^XY_YT NM:i:0 CS:Z:G3103102220022130222233301013033120 CQ:Z:;:@@:?@;=8;>=;;:=<93<4.;<6:2<9;4)87
+1831_575_970 16 phiX 231 255 35M * 0 0 TGAGGAGAAGTGGCTTAATATGCTTGGCACGTTCG 5PXYYNKY^\[VW[\]XY[VUTRY_[Y]_ZW\\\\ NM:i:0 CS:Z:C3201311301023133303023011202220221 CQ:Z:=@=@=;@@><@@:9<:=?;>@=?9>>?@:2===<5
+1831_575_1138 0 phiX 1912 255 35M * 0 0 CGCTAAAGGTGCTTTGACTTATACCGATATTGCTG XLJ7-IKH@IWNTJJSPUKO]UU[UTNQKDQDCJ1 NM:i:1 CS:Z:C3320002011320012120333103233301321 CQ:Z:<=0;%1936+?96?,?5<:2>@6@<:;4>.7;*:1
+1831_575_1180 0 phiX 4168 255 35M * 0 0 AGGGCGTTGAGTTCGATAATGGTGATATGTATGTT J]_RSE@HENVYKKOINRQOV_]VLPSRLHMBJT7 NM:i:0 CS:Z:A2003310122102323303101123331133110 CQ:Z:/<B>5?':/78?;1;55:997@@>94=7<186->7
+1831_575_1302 16 phiX 5201 255 35M * 0 0 ACTTACCAAGCTGGGTTACGACGCGACGCCGTTCA *57==/2(a)B6+-8?,!,.(.A=+*7>2,841LOB6 NM:i:1 CS:Z:T1201303312333123130200123201013021 CQ:Z:&12>/#2'&-2&%'7+$%*#$/1(&&12/$,2,,*
+1831_575_1310 16 phiX 1611 255 35M * 0 0 ATACTGACCAAGAACGTGATTACTTCATGCAGCGT ;QC!!DXV[[Z]UEN_YYPAMSVGGXQWQHKSYWQ NM:i:1 CS:Z:A1332131312021303211310220101211133 CQ:Z:6<<>663?99@(@7=11@:@@/7??<@<;>'3-7;
+1831_575_1321 0 phiX 2663 255 35M * 0 0 GTTTGGTTCGCTTTGAGTCTTCTTCGGTTCCGACT 0;B@?@>=A@<A?>@>?6>@5>'4@9?)7@7?/89 NM:i:1 CS:Z:G1001010233200122122022023000203212 CQ:Z:0;B@?@>=A@<A?>@>?6>@5>'4@9?)7@7?/89
+1831_575_1373 16 phiX 4220 255 35M * 0 0 TTCTGACGTTCGTGATGAGTTTGTATCTGTTACTG <M=?PSE>BHTUGKZZZYY]]VU]^[TUWZ]]__` NM:i:0 CS:Z:C1213011223311001221321132013121220 CQ:Z:A@@@>@;=9<@??7@>@:@;@;17?630/7=4,2<
+1831_575_1436 0 phiX 3459 255 35M * 0 0 GCTGGCATTCAGTCGGCGACTTCACGCCAGAATAC ;Q\WMKB<NE=OUWRFO[V?=PUXC6LZN77CHQ< NM:i:0 CS:Z:G3210313021212303321202113301220331 CQ:Z:*2@=;39*3<*4<:>52>>9'7:<='0=>1'136<
+1831_575_1442 0 phiX 3316 255 35M * 0 0 ATGCTTGGGAGCGTGCTGGTGCTGATGCTTCCTCT URLSZUTH;RWB;GTWUNS\^ZJP^SGKFMSB?N> NM:i:0 CS:Z:A3132010022331132101132123132020222 CQ:Z:8>58<?7>+1B6-/9<<:5?>A:1@?539.@4/1>
+1831_575_1500 16 phiX 4268 255 35M * 0 0 TGAATTGGCACAATGCTACAATGTGCTCCCCCAAC 9V\UBGWPPNT^YUQQQQUQLPRUQQSV^`_^_^P NM:i:0 CS:Z:G1010000223111301132313011130103021 CQ:Z:2?@@?A@?8<6<:985=99999==B3<5<<,7?>9
+1831_575_1535 16 phiX 1224 255 35M * 0 0 GTCACGTTTATGGTGAACAGTGGATTAAGTTCATG <PRXVWQOTQUST^[\WMT^]Z\_`ZX`a_[[\^a NM:i:0 CS:Z:C1312012030320112110211013300131121 CQ:Z:@B=@<@@B?:A@@=>@?68@=?@5?7;:6<<;>5<
+1831_576_86 16 phiX 2980 255 35M * 0 0 CCATGTCTAAATTGTTTGGAGGCGGTCAAAAAGCC /@:AJ7;AEH712=EJKF>=LHJP?587CK:9AHN NM:i:0 CS:Z:G0320000121033022010011030032211310 CQ:Z:7811)2:*.++5</:3+43924*))/:,6&29)2/
+1831_576_266 16 phiX 279 255 35M * 0 0 TAGATATGAGTCACATTTTGTTCATGGTAGAGATT 6;*<NGC9=TWH2<P=+#.P]IDV]\SUZVW[^_[ NM:i:1 CS:Z:A0322223101312011300311121221333223 CQ:Z:<@@?=;<?7=@>9,>@1$&&89$/:>7'3178%&6
+1831_576_603 16 phiX 1575 255 35M * 0 0 ACATTATGGGTCTGCAAGCTGCTTATGCTAATTTG 8PMKX]PIR\`ZJO]_[HGZ_XOW\Z]LPb\RIS_ NM:i:0 CS:Z:C1003032313302312320131221001330311 CQ:Z:@@46=@C.??<A79@@;-<@@>29B?>55<B7598
+1831_576_655 16 phiX 3291 255 35M * 0 0 CTTGCTGCTGCATTTCCTGAGCTTAATGCTTGGGA %06ESGLVW^JATXW73V][JCT_]G?Q[_XYa`\ NM:i:1 CS:Z:T2001023130302322122200313123123102 CQ:Z:=@AA9@@<6*>@@5/<@>9'=;>7+@?9>/9;+,%
+1831_576_815 0 phiX 5185 255 35M * 0 0 CGGAGTGCTTAATCCAACTTACCAAGCTGGGTTAC NP]^WV]^`^ZZWX^_]][ZWKFQPA?FSTQPKD. NM:i:0 CS:Z:C3022113203032010120310102321001031 CQ:Z:<3>@?9>@?B=>=;>A???=>:25=4.25?6<57.
+1831_576_882 0 phiX 5052 255 35M * 0 0 GCTAAAGGTCTAGGAGCTAAAGAATGGAACAACTC PGTVV^YMQ]]VNT]ZWXQ;7LYVPKIRXTTLNU8 NM:i:0 CS:Z:G3230020122320223230022031020110122 CQ:Z:?26?8?@:4>@>96??<<=5'1<>9846=<9<1>8
+1831_576_898 0 phiX 517 255 35M * 0 0 GGATTGCTACTGACCGCTCTCGTGCTCGTCGCTGC \_bTPa`\ZY]WOMW`YYLL]VNS^]SSLES][J. NM:i:0 CS:Z:G0230132312121033222231132231233213 CQ:Z:=@@C2?C>?<>@886B?;?.??87=B<8<15??=.
+1831_576_923 0 phiX 1049 255 35M * 0 0 CATCTTGGCTTCCTTGCTGGTCAGATTGGTCGTCT LCBEMWHCNQTTNSXUNLTVUKAKV[TMRRURIJ7 NM:i:0 CS:Z:C1322010320202013210121223010123122 CQ:Z:76.51=;.699<96>;;49<;;11;<@59:9=647
+1831_576_1131 16 phiX 4060 255 35M * 0 0 TTGAGGCTTGTGGCATTTCTACTCTTTCTCAATCC <MJBDSMK>BWZI2FPGKEOI<PWVQ?KNN>>SC@ NM:i:1 CS:Z:G0233122200222132200313011102302210 CQ:Z:104@#@/@,4>9?2+?1571@',>=;(759;*92<
+1831_576_1289 16 phiX 1898 255 35M * 0 0 ATTCAGTACCTTAACGCTAAAGGTGCTTTGACTTA 0DBK[ONJ9GWSQ@;@CINYOHKU_VQZ_aRNYTX NM:i:0 CS:Z:T3021210023110200323310302013121203 CQ:Z:@9<>1B@@;7@@663==28,5':8<<,.=2>>.50
+1831_576_1367 0 phiX 1838 255 35M * 0 0 GAGCATGGCACTATGTTTACTCTTGCGCTTGTTCG ]^_[Y]_\\[[[X\]]^]WRXZXZZXSROMXVNN7 NM:i:0 CS:Z:G2231310311233110031222013332011023 CQ:Z:??@@<>@@=@<@<=@>@??9:?<=>=<8;59@787
+1831_576_1416 0 phiX 4521 255 35M * 0 0 ATTCAGCGTTTGATGAATGCAATGCGACAGGCTCA 1AYL;>>I[F4JG@CBSURK<KUQB,AZRJCFPW@ NM:i:0 CS:Z:A3021233100123120313103133211203221 CQ:Z:+';?..1.<@'.=+6.5?7<0-?7;(%=>56.98@
+1831_576_1605 0 phiX 5208 255 35M * 0 0 AAGCTGGGTTACGACGCGACGCCGTTCAACCAGAC Q[bb_]NKZYTVW[_XX^]]WTNQSA>BMPKNMB, NM:i:1 CS:Z:A0232100103132133321330310210101221 CQ:Z:6<@C@@>1;@:;<<@@9@???9<3?5-21=4877,
+1831_576_1729 16 phiX 5193 255 35M * 0 0 TTAATCCAACTTACCAAGCTGGGTTACGACGCGAC >ZL:FYXSQV]^ZSW^\TU\\[YKM_^][ZZZ^^^ NM:i:0 CS:Z:G1233312313010012320101302101023030 CQ:Z:@?@?<?<@>A?/==?>?7>?@8<?@>99;><+0=>
+1831_576_1982 16 phiX 5023 255 35M * 0 0 TTAACAAAAAGTCAGATATGGACCTTGCTGCTAAA @JFRDK\_\MQUU_UQST^ZX^[\XX[Y^LK^^_K NM:i:0 CS:Z:T0032312310201201333221212000011030 CQ:Z:,@@?@,A><@9@=?@9B=8<6@@6@2<A?>.7<+@
+1831_577_133 16 phiX 1713 255 35M * 0 0 TCATGCGCTCTAATCTCTGGGCATCTGGCTATGAT )&/8<NA=EPP;FEI]YY]_VFO]HDXOM[\[]SB NM:i:1 CS:Z:A3213323012231300122223032223331322 CQ:Z:/4@>>?=1?:+>@07@@><>@*<+1@15)96'2$)
+1831_577_281 16 phiX 4576 255 35M * 0 0 TTTTTGACACTCTCACGTTGGCTGACGACCGATTA ?XWE?WXSYURPOYWMJ@I[[RQ]VTXW\WR\[FC NM:i:0 CS:Z:T3032301231212301013112222111210000 CQ:Z:7-:B;8@=;>7@>4?=?+659?;5<7?;9@8(>:?
+1831_577_322 16 phiX 457 255 35M * 0 0 CTTCTGCCGTTTTGGATTTAACCGAAGATGATTTC /E?@MNMJTZM79LI;A5:H@IIHIFLPEDHQXWU NM:i:1 CS:Z:G2003213220230103303201000130312202 CQ:Z:9=;>4506;255464-<#7+194&2<?65968)7/
+1831_577_362 0 phiX 2152 255 35M * 0 0 ACTTATTCGCCACCATGATTATGACCAGTGTTTCC GS`XY^^[ZWMLTH>LEDHI?,3EQDAQPB5%(@6 NM:i:1 CS:Z:A1203302330110131230331210121110220 CQ:Z:62B?:@?@<?958=,3:,90:&'-99,6<5.($+6
+1831_577_382 16 phiX 4685 255 35M * 0 0 GGGTCGCAAGGCTAATGATTCACACGCCGACTGCT *>C@NMMI2333IUD=>FNE;1=LSE19JHEKMJB NM:i:0 CS:Z:A2312123033111120321303230201332100 CQ:Z:12957/:1))=76(*24;,3+:<.&.&-=1=2/5*
+1831_577_488 16 phiX 4947 255 35M * 0 0 TGGCCTGTTGATGCTAAAGGTGAGCCGCTTAAAGC @SEJPW]VUWXQLSPSOMXXXTVZQKJJSG59EKM NM:i:0 CS:Z:G3200302330322110200323132101120301 CQ:Z::48.,*>6566<?8=<=<2>6;94>;=9>@8924@
+1831_577_545 0 phiX 1710 255 35M * 0 0 TTGTCATGCGCTCTAATCTCTGGGCATCTGGCTAT [YWTWZY\_\OKHP[[WTY\VLDDFLTMBBMPOD- NM:i:0 CS:Z:T0112131333222303222210031322103233 CQ:Z:>><<9?<>?A<481@<@8==@76/61<95.5988-
+1831_577_637 0 phiX 4246 255 35M * 0 0 CTGTTACTGAGAAGTTAATGGATGAATTGGCACAA D8?2<WFB@DQC1-<OMOLMTSRM=6?G>,EYN?, NM:i:1 CS:Z:C2113312122202103031023120301031110 CQ:Z:8-,4#>:-6+:8,&(5;3=0>7=68&1/9&'?;4,
+1831_577_692 0 phiX 3904 255 35M * 0 0 TGTCTAGGAAATAACCGTCAGGATTGACACCCTCC aMK^^aIG`_^JJ_]]JI[Z]FCX[[XXTU[UKP< NM:i:0 CS:Z:T1122320200330103121202301211100220 CQ:Z:@B,@?@B(@A?@+@@>@+?=>@'=<@<=<9=?75<
diff -r 137d93848139 -r 032aae80bbb0 test-data/perm_out5.sam
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/perm_out5.sam Wed Mar 17 16:40:36 2010 -0400
@@ -0,0 +1,9 @@
+865_1106_310/2 16 chrM 16577 255 50M * 0 0 ACATGCTACTTTAATCAATAAAATTTCCATAGACAGGTATCCCCCTAGAT ;SSUV\VJNWVVWRX\X[[Z\Z[ZW\^XUV\_]]][WWVVYZ^^^_[\_^ NM:i:2 CS:Z:A3223200002331021122331020030003301230300213231311 CQ:Z:?@@=?A>A>==:=;=????A<;;>A<<?=>?<@<=@9:>9>:56A<;;9;
+869_1532_1255/1 113 chrM 3727 255 50M = 1979 1748 AGAAATATGTCTGACAAAAGAGTTACTTTGATAGAGTAAAACATAGAGGT ;RPTUVYQPSUSNSRTXTSVYVRVXWYUSVY_UOXZWRQRSUY[\^XQRW NM:i:3 CS:Z:A1022233110003122233210021301222000112122113330022 CQ:Z:=;8:?@=?;;9:8;=>;5A?;<8><<=:9><;9<=8;96>8<5==:<98;
+869_1532_1255/1 177 chrM 1979 255 50M = 3727 1748 GTATTAGAGGCACTGCCTGCCCAGTGACATCTGTTTAAACGGCCGCGGTA <QNVYYSUWQPSXUTTRVTT^ZUWXXZ[]\ZYY[YXVVVX][XUVYYZ]` NM:i:0 CS:Z:T3103330303100300112231121121003120312113022230331 CQ:Z:@A=><>9=<@>;<;<==?;?<A=?<=<<:A>7>9:;:<=7:8@6><>96<
+889_1337_1562/2 16 chrM 1951 255 50M * 0 0 ACCAAAAACATCACCTCTAGCATTCCCAGTATTAGAGGCACTGCCTGCCC 8PVXVWTRRQPRWQRZ[VORVVWWUOQZ[YUY]WZ^]`^\\]`a^]`[[] NM:i:2 CS:Z:G0031203121130222303312100203132322201123110000101 CQ:Z:=A;A@>AA@>?>A@>A:>@:<>>=5;;=;<;88?=>5=;899:9<<;>98
+891_252_1590/1 65 chrM 15434 255 50M = 97 15337 ACCCAAAGCTGAAATTCTACTTAAACTATTCCTTGATTTCCTCCCCTAAA \^VTXUY[[^YY[[XX]^_^VUWZ]]]\ZYY[]]_[VWWRQPTWTTNQX< NM:i:2 CS:Z:A1001002321200302231203001233020201230020220002300 CQ:Z:<A>9<=9A;A><>>>;>@?A>9=;@>@>?<><@>@@<;=;8:7>:;:5=<
+891_252_1590/1 129 chrM 97 255 50M = 15434 15337 TAGTTATTAATAAAATTACACATGCAAGTATCCGCACCCCAGTGAGAATG RRVYXYUSTSSTWXXYUWVUTRPMQUZZVW\\YYXXZVUVW^YUTPSWY= NM:i:2 CS:Z:T3210330303300030311113131021332033110001211222031 CQ:Z:8;8?;><::;9;:>;><:>9=8;68:<?<;=@===<=>9=:>A9=89;==
+892_582_183/1 16 chrM 14089 255 50M * 0 0 AAAATAAACACAATATATGTCATCATTATTCCCACGTGGAATCTAACCAC 9QNLRTQOSWUSUVTXXTTXWVVUWYVRTWXVX[\\ZVWYWVXZXYWUYX NM:i:0 CS:Z:G1101032230201131100203303123121133333011110033000 CQ:Z:;><:><=>;<<>:=>?>>;<=;:9><<:=:>;:;>;:=9;;=799<7699
+932_1836_1806/1 113 chrM 3723 255 50M = 2434 1289 CATAAGAAATATGTCTGACAAAAGAGTTACTTTGATAGAGTAAAACATAG >RNSUWYYQQVSVXVZ[XZ^YUZ[XXXWSQZ^WUXXX\XVXVYYVVVVUT NM:i:2 CS:Z:C2331100031222332100213012220001121221133300220331 CQ:Z:;:<;<;<><;>9@=<=<:>A:8<<=<=?<:@?<=?<;>9;<6<><<::5>
+932_1836_1806/1 177 chrM 2434 255 50M = 3723 1289 GGTTTACGACCTCGATGTTGGATCAAGACATCCTAATGGTGCAACCGCTA @]ZZ[Z\_WU\XW\WY^Z\]]#!\^`_]\^^]]Z\`^^_\^aaa``_Z\a NM:i:1 CS:Z:T3233010131101303202311220122201011323220123130010 CQ:Z:AA<?A@AAAA>?A>A@=>@>A>??A@?>>@>@=>A9?>:?>8@@=>>=>@
diff -r 137d93848139 -r 032aae80bbb0 tool-data/perm_base_index.loc.sample
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tool-data/perm_base_index.loc.sample Wed Mar 17 16:40:36 2010 -0400
@@ -0,0 +1,27 @@
+#This is a sample file distributed with Galaxy that enables tools to
+#use a directory of PerM indexed sequences data files. You will need
+#to create these data files and then create a perm_base_index.loc file
+#similar to this one (store it in this directory) that points to
+#the directories in which those files are stored. The perm_base_index.loc
+#file has this format (white space characters are TAB characters):
+#
+#<build_seed_readlength> <file_base>
+#
+#Because each PerM index is built with a specific seed and a specific read
+#length, this needs to be specified so the user can choose the appropriate
+#one. So, for example, if you had phiX indexed with seed F3 and read length
+#50, and stored in /depot/data/galaxy/phiX/perm_index/,
+#then the perm_base_index.loc entry would look something like this:
+#
+#phiX_F3_50 /depot/data/galaxy/phiX/perm_index/phiX_base_F3_50.index
+#
+#and your /depot/data/galaxy/phiX/perm_index/ directory
+#would contain the file phiX_base_F3_50.index:
+#
+#Your perm_base_index.loc file should include an entry per line for each
+#index set you have stored. For example:
+#
+#phiX_F3_50 /depot/data/galaxy/phiX/perm_index/phiX_base_F3_50.index
+#phiX_F4_50 /depot/data/galaxy/phiX/perm_index/phiX_base_F3_50.index
+#hg19_F3_50 /depot/data/galaxy/hg19/perm_index/hg19_base_F3_50.index
+#hg19_F4_50 /depot/data/galaxy/hg19/perm_index/hg19_base_F3_50.index
diff -r 137d93848139 -r 032aae80bbb0 tool-data/perm_color_index.loc.sample
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tool-data/perm_color_index.loc.sample Wed Mar 17 16:40:36 2010 -0400
@@ -0,0 +1,27 @@
+#This is a sample file distributed with Galaxy that enables tools to
+#use a directory of PerM indexed sequences data files. You will need
+#to create these data files and then create a perm_color_index.loc file
+#similar to this one (store it in this directory) that points to
+#the directories in which those files are stored. The perm_color_index.loc
+#file has this format (white space characters are TAB characters):
+#
+#<build_seed_readlength> <file_base>
+#
+#Because each PerM index is built with a specific seed and a specific read
+#length, this needs to be specified so the user can choose the appropriate
+#one. So, for example, if you had phiX indexed with seed F3 and read length
+#50, and stored in /depot/data/galaxy/phiX/perm_index/,
+#then the perm_color_index.loc entry would look something like this:
+#
+#phiX_F3_50 /depot/data/galaxy/phiX/perm_index/phiX_color_F3_50.index
+#
+#and your /depot/data/galaxy/phiX/perm_index/ directory
+#would contain the file phiX_color_F3_50.index:
+#
+#Your perm_color_index.loc file should include an entry per line for each
+#index set you have stored. For example:
+#
+#phiX_F3_50 /depot/data/galaxy/phiX/perm_index/phiX_color_F3_50.index
+#phiX_F4_50 /depot/data/galaxy/phiX/perm_index/phiX_color_F3_50.index
+#hg19_F3_50 /depot/data/galaxy/hg19/perm_index/hg19_color_F3_50.index
+#hg19_F4_50 /depot/data/galaxy/hg19/perm_index/hg19_color_F3_50.index
diff -r 137d93848139 -r 032aae80bbb0 tool_conf.xml.sample
--- a/tool_conf.xml.sample Tue Mar 16 18:54:23 2010 -0400
+++ b/tool_conf.xml.sample Wed Mar 17 16:40:36 2010 -0400
@@ -60,7 +60,7 @@
<tool file="filters/joiner.xml" />
<tool file="filters/compare.xml"/>
<tool file="new_operations/subtract_query.xml"/>
- <tool file="stats/grouping.xml" />
+ <tool file="stats/grouping.xml" />
</section>
<section name="Convert Formats" id="convert">
<tool file="filters/axt_to_concat_fasta.xml" />
@@ -215,6 +215,7 @@
<tool file="sr_mapping/bwa_wrapper.xml" />
<tool file="metag_tools/megablast_wrapper.xml" />
<tool file="metag_tools/megablast_xml_parser.xml" />
+ <tool file="sr_mapping/PerM.xml" />
</section>
<section name="NGS: SAM Tools" id="samtools">
<tool file="samtools/sam_bitwise_flag_filter.xml" />
diff -r 137d93848139 -r 032aae80bbb0 tools/sr_mapping/PerM.xml
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tools/sr_mapping/PerM.xml Wed Mar 17 16:40:36 2010 -0400
@@ -0,0 +1,368 @@
+<tool id="PerM" name="Map with PerM" version="1.0.0">
+ <description>for SOLiD and Illumina</description>
+ <!-- works with PerM version 0.2.6 -->
+ <command>
+PerM
+#if $s.sourceOfRef.refSource == "history":
+ $s.sourceOfRef.ref
+#else:
+ $s.sourceOfRef.index.value
+#end if
+#if $s.mate.singleOrPairs == "single":
+ $s.mate.reads
+#else:
+ -1 $s.mate.reads1 -2 $s.mate.reads2
+ -U $s.mate.upperbound
+ -L $s.mate.lowerbound
+ $s.mate.excludeAmbiguousPairs
+#end if
+#if $s.space == "color":
+ --readFormat "csfastq"
+#else:
+ --readFormat "fastq"
+#end if
+#if $int($str($valAlign)) >= 0:
+ -v $valAlign
+#end if
+#if $align.options == "full":
+ --seed $align.seed
+ -$align.alignments
+ #if $str($align.delimiter) != "None":
+ --delimiter $align.delimiter
+ #end if
+ -T $align.sTrimL
+ $align.includeReadsWN
+ $align.statsOnly
+ $align.ignoreQS
+#end if
+#if $str($bUnmappedRead) == "true" and $s.space == "color":
+ -u $unmappedReadOutCS
+#elif $str($bUnmappedRead) == "true" and $s.space == "base":
+ -u $unmappedReadOut
+#end if
+-o $output --outputFormat sam --noSamHeader | tr '\r' '\n' | tr -cd "[:print:]\t\n " | grep "Reads\|Sub0\|Pairs\|single" | sed 's/.*Reads:,//' | sed 's/\/.*dat,_ Sub0/Sub0/'
+ </command>
+ <inputs>
+ <conditional name="s">
+ <param name="space" label="Is your data color space (SOLiD) or base space (Illumina)?" type="select">
+ <option value="color">Color space</option>
+ <option value="base">Base space</option>
+ </param>
+ <when value="color">
+ <conditional name="sourceOfRef">
+ <param name="refSource" label="Will you provide your own reference file from the history or use a built-in index?" type="select">
+ <option value="indexed">Built-in index</option>
+ <option value="history">Fasta file from history</option>
+ </param>
+ <when value="indexed">
+ <param name="index" type="select" label="Select a reference genome (with seed and read length)" help="if your genome of interest is not listed - contact Galaxy team">
+ <options from_file="perm_color_index.loc">
+ <column name="value" index="1" />
+ <column name="name" index="0" />
+ </options>
+ </param>
+ </when>
+ <when value="history">
+ <param name="ref" format="fasta" type="data" label="Reference" />
+ </when>
+ </conditional>
+ <conditional name="mate">
+ <param name="singleOrPairs" label="Mate-paired?" type="select">
+ <option value="single">Single-end</option>
+ <option value="paired">Mate pairs</option>
+ </param>
+ <when value="single">
+ <param format="fastqcssanger" name="reads" type="data" label="Reads" />
+ </when>
+ <when value="paired">
+ <param name="reads1" format="fastqcssanger" label="Forward FASTQ file" type="data" />
+ <param name="reads2" format="fastqcssanger" label="Reverse FASTQ file" type="data" />
+ <param label="Upperbound of pairs separation (-U)" name="upperbound" type="integer" size="8" value="100000" />
+ <param label="Lowerbound of pairs separation (-L)" name="lowerbound" type="integer" size="8" value="0" />
+ <param label="Exclude ambiguous pairs (-e)" name="excludeAmbiguousPairs" type="boolean" checked="false" truevalue="-e" falsevalue="" />
+ </when>
+ </conditional>
+ </when>
+ <when value="base">
+ <conditional name="sourceOfRef">
+ <param name="refSource" label="Will you provide your own reference file from the history or use a built-in index?" type="select">
+ <option value="indexed">Built-in index</option>
+ <option value="history">Fasta file from history</option>
+ </param>
+ <when value="indexed">
+ <param name="index" type="select" label="Select a reference genome with seed and read length" help="if your genome of interest is not listed - contact Galaxy team">
+ <options from_file="perm_base_index.loc">
+ <column name="value" index="1" />
+ <column name="name" index="0" />
+ </options>
+ </param>
+ </when>
+ <when value="history">
+ <param name="ref" format="fasta" type="data" label="Reference" />
+ </when>
+ </conditional>
+ <conditional name="mate">
+ <param name="singleOrPairs" label="Mate-paired?" type="select">
+ <option value="single">Single-end</option>
+ <option value="paired">Mate pairs</option>
+ </param>
+ <when value="single">
+ <param format="fastqsanger" name="reads" type="data" label="Reads" />
+ </when>
+ <when value="paired">
+ <param name="reads1" format="fastqsanger" label="Forward FASTQ file" type="data" />
+ <param name="reads2" format="fastqsanger" label="Reverse FASTQ file" type="data" />
+ <param label="Upperbound of pairs separation (-U)" name="upperbound" type="integer" size="8" value="100000" />
+ <param label="Lowerbound of pairs separation (-L)" name="lowerbound" type="integer" size="8" value="0" />
+ <param label="Exclude ambiguous pairs (-e)" name="excludeAmbiguousPairs" type="boolean" checked="false" truevalue="-e" falsevalue="" />
+ </when>
+ </conditional>
+ </when>
+ </conditional>
+ <param label="Maximum number of mismatches permitted in one end of full read (-v)" name="valAlign" type="integer" size="5" value="2" />
+ <conditional name="align">
+ <param help="Use default setting or specify full parameters list" label="PerM settings to use" name="options" type="select">
+ <option value="preSet">Commonly used</option>
+ <option value="full">Full parameter list</option>
+ </param>
+ <when value="preSet"/>
+ <when value="full">
+ <param label="Whether or not to report all valid alignments per read (-A/-B/-E)" name="alignments" type="select">
+ <option value="A">Report all valid alignments</option>
+ <option value="B">Report the best alignments in terms of number of mismatches</option>
+ <option value="E">Report only uniquely mapped reads</option>
+ </param>
+ <param label="Choose the seed full sensitive to different number of mismatches (--seed)" name="seed" type="select" >
+ <option value="F2">2 mismatches</option>
+ <option value="S11">1 SNP + 1 color error</option>
+ <option value="F3">3 mismatches</option>
+ <option value="F4">4 mismatches</option>
+ </param>
+ <param label="Choose the delimiter to identify read name (--delimiter)" name="delimiter" type="select">
+ <option value="None">Tab/Space/Comma</option>
+ <option value=":">Colon</option>
+ <option value="_">Underscore</option>
+ </param>
+ <param label="Use the first n bases of each read for alignment (-T)" name="sTrimL" type="integer" size="5" value="50" />
+ <param name="includeReadsWN" type="boolean" checked="true" truevalue="--includeReadsWN" falsevalue="" label="Include reads with 'N' or '.' by encoding '.' as 3, 'N' as 'A' (--includeReadsWN)" />
+ <param name="statsOnly" type="boolean" checked="false" truevalue="--statsOnly" falsevalue="" label="output mapping stats only. Don't output alignments (--statsOnly)" />
+ <param name="ignoreQS" type="boolean" checked="false" truevalue="--ignoreQS" falsevalue="" label="Ignore quality scores (--ignoreQS)" />
+ </when>
+ </conditional> <!-- options -->
+ <param name="bUnmappedRead" type="select" label="Output the unmapped reads (-u)">
+ <option value="true">Yes</option>
+ <option value="false">No</option>
+ </param>
+ </inputs>
+ <outputs>
+ <data name="output" format="sam"/>
+ <data name="unmappedReadOut" format="fastqsanger">
+ <filter>bUnmappedRead == "true" and s["space"] == "base"</filter>
+ </data>
+ <data name="unmappedReadOutCS" format="fastqcssanger">
+ <filter>bUnmappedRead == "true" and s["space"] == "color"</filter>
+ </data>
+ </outputs>
+ <tests>
+ <test>
+ <!--
+ PerM command:
+ PerM test-data/phiX.fasta 50 +seed F3 -m -s phiX_F3_50.index +readFormat .fastq
+ PerM phiX_F3_50.index -1 test-data/perm_in1.fastqsanger -2 test-data/perm_in2.fastqsanger -U 100000 -L 0 -e +readFormat .fastq -v 0 +seed F2 -A -T 50 +includeReadsWN -o perm_out1.sam +outputFormat sam +noSamHeader | tr '\r' '\n' | tr -cd "[:print:]\t\n " | grep "Reads\|Sub0\|Pairs\|single" | sed 's/.*Reads:,//' | sed 's/\/.*dat,_ Sub0/Sub0/'
+ You need to replace the + with 2 dashes.
+ -->
+ <param name="space" value="base" />
+ <param name="refSource" value="indexed" />
+ <param name="index" value="phiX_F3_50" />
+ <param name="singleOrPairs" value="paired" />
+ <param name="reads1" value="perm_in1.fastqsanger" ftype="fastqsanger" />
+ <param name="reads2" value="perm_in2.fastqsanger" ftype="fastqsanger" />
+ <param name="upperbound" value="100000" />
+ <param name="lowerbound" value="0" />
+ <param name="excludeAmbiguousPairs" value="true" />
+ <param name="valAlign" value="0" />
+ <param name="options" value="full" />
+ <param name="alignments" value="A" />
+ <param name="seed" value="F2" />
+ <param name="delimiter" value="None" />
+ <param name="sTrimL" value="50" />
+ <param name="includeReadsWN" value="true" />
+ <param name="statsOnly" value="false" />
+ <param name="ignoreQS" value="false" />
+ <param name="bUnmappedRead" value="false" />
+ <output name="output" file="perm_out1.sam" ftype="sam" />
+ </test>
+ <test>
+ <!--
+ PerM command:
+ PerM test-data/chr_m.fasta test-data/perm_in3.fastqsanger +readFormat .fastq -v 2 -u perm_out3.fastqsanger -o perm_out2.sam +outputFormat sam +noSamHeader | tr '\r' '\n' | tr -cd "[:print:]\t\n " | grep "Reads\|Sub0\|Pairs\|single" | sed 's/.*Reads:,//' | sed 's/\/.*dat,_ Sub0/Sub0/'
+ You need to replace the + with 2 dashes.
+ -->
+ <param name="space" value="base" />
+ <param name="refSource" value="history" />
+ <param name="ref" value="chr_m.fasta" ftype="fasta" />
+ <param name="singleOrPairs" value="single" />
+ <param name="reads" value="perm_in3.fastqsanger" ftype="fastqsanger" />
+ <param name="valAlign" value="2" />
+ <param name="options" value="preSet" />
+ <param name="bUnmappedRead" value="true" />
+ <output name="output" file="perm_out2.sam" ftype="sam" />
+ <output name="unmappedReadOut" file="perm_out3.fastqsanger" ftype="fastqsanger" />
+ </test>
+ <test>
+ <!--
+ PerM command:
+ PerM test-data/phiX.fasta test-data/perm_in4.fastqcssanger +readFormat .csfastq -v 1 -o perm_out4.sam +outputFormat sam +noSamHeader | tr '\r' '\n' | tr -cd "[:print:]\t\n " | grep "Reads\|Sub0\|Pairs\|single" | sed 's/.*Reads:,//' | sed 's/\/.*dat,_ Sub0/Sub0/'
+ You need to replace the + with 2 dashes.
+ -->
+ <param name="space" value="color" />
+ <param name="refSource" value="history" />
+ <param name="ref" value="phiX.fasta" ftype="fasta" />
+ <param name="singleOrPairs" value="single" />
+ <param name="reads" value="perm_in4.fastqcssanger" ftype="fastqcssanger" />
+ <param name="valAlign" value="1" />
+ <param name="options" value="preSet" />
+ <param name="bUnmappedRead" value="false" />
+ <output name="output" file="perm_out4.sam" ftype="sam" />
+ </test>
+ <test>
+ <!--
+ PerM command:
+ PerM equCab2.fasta 50 +seed F4 -m -s equCab2_F3_50.index +readFormat .csfastq
+ PerM equCab2_F3_50.index -1 test-data/perm_in5.fastqcssanger -2 test-data/perm_in6.fastqcssanger -U 90000 -L 10000 +readFormat .csfastq -v 3 -o perm_out5.sam +outputFormat sam +noSamHeader | tr '\r' '\n' | tr -cd "[:print:]\t\n " | grep "Reads\|Sub0\|Pairs\|single" | sed 's/.*Reads:,//' | sed 's/\/.*dat,_ Sub0/Sub0/'
+ You need to replace the + with 2 dashes.
+ hg19.fasta needs to be supplied.
+ -->
+ <param name="space" value="color" />
+ <param name="refSource" value="indexed" />
+ <param name="index" value="equCab2_chrM_F3_50" />
+ <param name="singleOrPairs" value="paired" />
+ <param name="reads1" value="perm_in5.fastqcssanger" ftype="fastqcssanger" />
+ <param name="reads2" value="perm_in6.fastqcssanger" ftype="fastqcssanger" />
+ <param name="upperbound" value="90000" />
+ <param name="lowerbound" value="10000" />
+ <param name="excludeAmbiguousPairs" value="false" />
+ <param name="valAlign" value="3" />
+ <param name="options" value="preSet" />
+ <param name="bUnmappedRead" value="false" />
+ <output name="output" file="perm_out5.sam" ftype="sam" />
+ </test>
+ </tests>
+ <help>
+**What it does**
+
+PerM is a short read aligner designed to be ultrafast with long SOLiD reads to the whole genome or transcriptions. PerM can be fully sensitive to alignments with up to four mismatches and highly sensitive to a higher number of mismatches.
+
+**Development team**
+
+PerM is developed by Ting Chen's group, Center of Excellence in Genomic Sciences at the University of Southern California. If you have any questions, please email yanghoch at usc.edu or check the `project page`__.
+
+ .. __: http://code.google.com/p/perm/
+
+**Citation**
+
+PerM: Efficient mapping of short sequencing reads with periodic full sensitive spaced seeds. Bioinformatics, 2009, 25 (19): 2514-2521.
+
+**Input**
+
+The input files are read files and a reference. Users can use the pre-indexed reference in Galaxy or upload their own reference.
+
+The uploaded reference file should be in the fasta format. Multiple sequences like transcriptions should be concatenated together separated by a header line that starts with the ">" character.
+
+Reads files must be in either fastqsanger or fastqcssanger format to use in PerM. However, there are several possible starting formats that can be converted to one of those two: fastq (any type), color-space fastq, fasta, csfasta, or csfasta+qualsolid.
+
+An uploaded base-space fastq file MUST be checked/transformed with FASTQGroomer tools in Galaxy to be converted to the fastqsanger format (this is true even if the original file is in Sanger format).
+
+Uploaded fasta and csfasta without quality score files can be transformed to fastqsanger by the FASTQGroomer, with pseudo quality scores added.
+
+An uploaded csfasta + qual pair can also be transformed into fastqcssanger by solid2fastq.
+
+**Outputs**
+
+The output mapping result is in SAM format, and has the following columns::
+
+ Column Description
+ -------- --------------------------------------------------------
+ 1 QNAME Query (pair) NAME
+ 2 FLAG bitwise FLAG
+ 3 RNAME Reference sequence NAME
+ 4 POS 1-based leftmost POSition/coordinate of clipped sequence
+ 5 MAPQ MAPping Quality (Phred-scaled)
+ 6 CIGAR extended CIGAR string
+ 7 MRNM Mate Reference sequence NaMe ('=' if same as RNAME)
+ 8 MPOS 1-based Mate POSition
+ 9 ISIZE Inferred insert SIZE
+ 10 SEQ query SEQuence on the same strand as the reference
+ 11 QUAL query QUALity (ASCII-33 gives the Phred base quality)
+ 12 OPT variable OPTional fields in the format TAG:VTYPE:VALUE
+ 12.1 NM Number of mismatches (SOLiD-specific)
+ 12.2 CS Reads in color space (SOLiD-specific)
+ 12.3 CQ Bases quality in color spacehidden="true" (SOLiD-specific)
+
+The flags are as follows::
+
+ Flag Description
+ ------ -------------------------------------
+ 0x0001 the read is paired in sequencing
+ 0x0002 the read is mapped in a proper pair
+ 0x0004 the query sequence itself is unmapped
+ 0x0008 the mate is unmapped
+ 0x0010 strand of the query (1 for reverse)
+ 0x0020 strand of the mate
+ 0x0040 the read is the first read in a pair
+ 0x0080 the read is the second read in a pair
+ 0x0100 the alignment is not primary
+
+Here is some sample output::
+
+ Qname FLAG Rname POS MAPQ CIAGR MRNM MPOS ISIZE SEQ QUAL NM CS CQ
+ 491_28_332_F3 16 ref-1 282734 255 35M * 0 0 AGTCAAACTCCGAATGCCAATGACTTATCCTTAGG #%%%%%%%!!%%%!!%%%%%%%%!!%%%%%%%%%% NM:i:3 CS:Z:C0230202330012130103100230121001212 CQ:Z:###################################
+ 491_28_332_F3 16 ref-1 269436 255 35M * 0 0 AGTCAAACTCCGAATGCCAATGACTTATCCTTAGG #%%%%%%%!!%%%!!%%%%%%%%!!%%%%%%%%%% NM:i:3 CS:Z:C0230202330012130103100230121001212 CQ:Z:###################################
+
+The user can check a checkbox for optional output containing the unmmaped reads in fastqsanger or fastqcssanger. The default is to produce it.
+
+**PerM parameter list**
+
+Below is a list of PerM command line options for PerM. Not all of these are relevant to Galaxy's implementation, but are included for completeness.
+
+The command for single-end::
+
+ PerM [ref_or_index] [read] [options]
+
+The command for paired-end::
+
+ PerM [ref_or_index] -1 [read1] -2 [read1] [options]
+
+The command-line options::
+
+ -A Output all alignments within the given mismatch threshold, end-to-end.
+ -B Output best alignments in terms of mismatches in the given mismatch threshold. [Default]
+ -E Output only the uniquely mapped reads in the given mismatch threshold.
+ -m Create the reference index, without reusing the saved index.
+ -s PATH Save the reference index to accelerate the mapping in the future. If PATH is not specified, the default path will be used.
+ -v INT Where INT is the number of mismatches allowed in one end. [Default=2]
+ -T INT Where INT is the length to truncate read length to, so 30 means use only first 30 bases (signals). Leave blank if the full read is meant to be used.
+ -o PATH Where PATH is for output the mapping of one read set. PerM's output are in .mapping or .sam format, determined by the ext name of PATH. Ex: -o out.sam will output in SAM format; -o out.mapping will output in .mapping format.
+ -d PATH Where PATH is the directory for multiple read sets.
+ -u PATH Print the fastq file of those unmapped reads to the file in PATH.
+ --noSamHeader Print no SAM header so it is convenient to concatenate multiple SAM output files.
+ --includeReadsWN Encodes N or "." with A or 3, respectively.
+ --statsOnly Output the mapping statistics in stdout only, without saving alignments to files.
+ --ignoreQS Ignore the quality scores in fastq or QUAL files.
+ --seed {F2 | S11 | F3 | F4} Specify the seed pattern, which has a specific full sensitivity. Check the algorithm page (link below) for seed patterns to balance the sensitivity and running time.
+ --readFormat {fasta | fastq | csfasta | csfastq} Read in reads in the specified format, instead of guessing according to the extension name.
+ --delimiter CHAR Which is a character used as the delimiter to separate the the read id, and the additional info in the line with ">" in fasta or csfasta.
+
+Paired reads options::
+
+ -e Exclude ambiguous paired.
+ -L INT Mate-paired separate lower bound.
+ -U INT Mate-paired separate upper bound.
+ -1 PATH The forward reads file path.
+ -2 PATH The reversed reads file path.
+
+See the PerM `algorithm page`__ for information on algorithms and seeds.
+
+ .. __: http://code.google.com/p/perm/wiki/Algorithms
+ </help>
+</tool>
diff -r 137d93848139 -r 032aae80bbb0 tools/sr_mapping/bwa_wrapper.xml
--- a/tools/sr_mapping/bwa_wrapper.xml Tue Mar 16 18:54:23 2010 -0400
+++ b/tools/sr_mapping/bwa_wrapper.xml Wed Mar 17 16:40:36 2010 -0400
@@ -110,8 +110,8 @@
BWA commands:
cp test-data/phiX.fasta phiX.fasta
bwa index -a is phiX.fasta
- bwa aln -n 0.04 -o 1 -e -1 -d 16 -i 5 -k 2 -t 4 -M 3 -O 11 -E 4 -R -N phiX.fasta test-data/bwa_wrapper_in1.fastq > bwa_wrapper_out1.sai
- bwa samse phiX.fasta bwa_wrapper_out1.sai test-data/bwa_wrapper_in1.fastq >> bwa_wrapper_out2.sam
+ bwa aln -n 0.04 -o 1 -e -1 -d 16 -i 5 -k 2 -t 4 -M 3 -O 11 -E 4 -R -N phiX.fasta test-data/bwa_wrapper_in1.fastq > bwa_wrapper_out2.sai
+ bwa samse phiX.fasta bwa_wrapper_out2.sai test-data/bwa_wrapper_in1.fastq >> bwa_wrapper_out2.sam
phiX.fasta is the prefix for the reference
remove the comment lines (beginning with '@') from the resulting sam file
-->
@@ -222,7 +222,6 @@
Flag Description
------ -------------------------------------
- Flag Description
0x0001 the read is paired in sequencing
0x0002 the read is mapped in a proper pair
0x0004 the query sequence itself is unmapped
1
0
details: http://www.bx.psu.edu/hg/galaxy/rev/137d93848139
changeset: 3541:137d93848139
user: Kanwei Li <kanwei(a)gmail.com>
date: Tue Mar 16 18:54:23 2010 -0400
description:
trackster:
- Use new array_tree summary structure. Feature tracks now display as intensity graphs at higher levels, and switch to detail levels when they fit the screen well
- Create array_tree indices for bed format to support the above (bam still in progress)
- Other fixes and improvements, including new icons
diffstat:
datatypes_conf.xml.sample | 2 +
lib/galaxy/datatypes/binary.py | 2 +-
lib/galaxy/datatypes/converters/bam_to_array_tree_converter.py | 45 +
lib/galaxy/datatypes/converters/bam_to_array_tree_converter.xml | 15 +
lib/galaxy/datatypes/converters/bed_to_array_tree_converter.py | 29 +
lib/galaxy/datatypes/converters/bed_to_array_tree_converter.xml | 14 +
lib/galaxy/datatypes/converters/wiggle_to_array_tree_converter.py | 12 +-
lib/galaxy/datatypes/converters/wiggle_to_array_tree_converter.xml | 2 +-
lib/galaxy/datatypes/interval.py | 4 +-
lib/galaxy/visualization/tracks/data/array_tree.py | 85 ++-
lib/galaxy/web/controllers/tracks.py | 73 +-
static/scripts/trackster.js | 242 ++++++---
static/trackster.css | 8 +-
templates/tracks/browser.mako | 27 +-
14 files changed, 401 insertions(+), 159 deletions(-)
diffs (1036 lines):
diff -r 861756e85b16 -r 137d93848139 datatypes_conf.xml.sample
--- a/datatypes_conf.xml.sample Tue Mar 16 16:03:28 2010 -0400
+++ b/datatypes_conf.xml.sample Tue Mar 16 18:54:23 2010 -0400
@@ -5,12 +5,14 @@
<datatype extension="axt" type="galaxy.datatypes.sequence:Axt" display_in_upload="true"/>
<datatype extension="bam" type="galaxy.datatypes.binary:Bam" mimetype="application/octet-stream" display_in_upload="true">
<converter file="bam_to_bai.xml" target_datatype="bai"/>
+ <converter file="bam_to_array_tree_converter.xml" target_datatype="array_tree"/>
<display file="ucsc/bam.xml" />
</datatype>
<datatype extension="bed" type="galaxy.datatypes.interval:Bed" display_in_upload="true">
<converter file="bed_to_gff_converter.xml" target_datatype="gff"/>
<converter file="interval_to_coverage.xml" target_datatype="coverage"/>
<converter file="bed_to_interval_index_converter.xml" target_datatype="interval_index"/>
+ <converter file="bed_to_array_tree_converter.xml" target_datatype="array_tree"/>
<converter file="bed_to_genetrack_converter.xml" target_datatype="genetrack"/>
<!-- <display file="ucsc/interval_as_bed.xml" /> -->
<display file="genetrack.xml" />
diff -r 861756e85b16 -r 137d93848139 lib/galaxy/datatypes/binary.py
--- a/lib/galaxy/datatypes/binary.py Tue Mar 16 16:03:28 2010 -0400
+++ b/lib/galaxy/datatypes/binary.py Tue Mar 16 18:54:23 2010 -0400
@@ -139,7 +139,7 @@
except:
return "Binary bam alignments file (%s)" % ( data.nice_size( dataset.get_size() ) )
def get_track_type( self ):
- return "ReadTrack", "bai"
+ return "ReadTrack", ["bai", "array_tree"]
class Binseq( Binary ):
"""Class describing a zip archive of binary sequence files"""
diff -r 861756e85b16 -r 137d93848139 lib/galaxy/datatypes/converters/bam_to_array_tree_converter.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/lib/galaxy/datatypes/converters/bam_to_array_tree_converter.py Tue Mar 16 18:54:23 2010 -0400
@@ -0,0 +1,45 @@
+#!/usr/bin/env python
+
+from __future__ import division
+
+import sys
+from galaxy import eggs
+import pkg_resources; pkg_resources.require( "bx-python" ); pkg_resources.require( "pysam" )
+
+from pysam import csamtools
+from bx.arrays.array_tree import *
+
+BLOCK_SIZE = 1000
+
+class BamReader:
+ def __init__( self, input_fname, index_fname ):
+ self.bamfile = csamtools.Samfile( filename=input_fname, mode='rb', index_filename=index_fname )
+ self.iterator = self.bamfile.fetch()
+
+ def __iter__( self ):
+ return self
+
+ def __next__( self ):
+ while True:
+ read = self.iterator.next()
+ return read.rname, read.mpos, read.pos + read.rlen, None, mapq
+
+
+def main():
+
+ input_fname = sys.argv[1]
+ index_fname = sys.argv[2]
+ out_fname = sys.argv[3]
+
+ reader = BamReader( input_fname, index_fname )
+
+ # Fill array from reader
+ d = array_tree_dict_from_reader( reader, {}, block_size = BLOCK_SIZE )
+
+ for array_tree in d.itervalues():
+ array_tree.root.build_summary()
+
+ FileArrayTreeDict.dict_to_file( d, open( out_fname, "w" ), no_leaves=True )
+
+if __name__ == "__main__":
+ main()
\ No newline at end of file
diff -r 861756e85b16 -r 137d93848139 lib/galaxy/datatypes/converters/bam_to_array_tree_converter.xml
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/lib/galaxy/datatypes/converters/bam_to_array_tree_converter.xml Tue Mar 16 18:54:23 2010 -0400
@@ -0,0 +1,15 @@
+<tool id="CONVERTER_bam_to_array_tree_0" name="Convert BAM to Array Tree" version="1.0.0">
+<!-- <description>__NOT_USED_CURRENTLY_FOR_CONVERTERS__</description> -->
+ <command interpreter="python">bam_to_array_tree_converter.py $input1 $output1</command>
+ <inputs>
+ <page>
+ <param format="bam" name="input1" type="data" label="Choose BAM file"/>
+ <param format="bai" name="index" type="data" label="BAM index file"/>
+ </page>
+ </inputs>
+ <outputs>
+ <data format="array_tree" name="output1"/>
+ </outputs>
+ <help>
+ </help>
+</tool>
diff -r 861756e85b16 -r 137d93848139 lib/galaxy/datatypes/converters/bed_to_array_tree_converter.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/lib/galaxy/datatypes/converters/bed_to_array_tree_converter.py Tue Mar 16 18:54:23 2010 -0400
@@ -0,0 +1,29 @@
+#!/usr/bin/env python
+
+from __future__ import division
+
+import sys
+from galaxy import eggs
+import pkg_resources; pkg_resources.require( "bx-python" )
+from bx.arrays.array_tree import *
+from bx.arrays.bed import BedReader
+
+BLOCK_SIZE = 1000
+
+def main():
+
+ input_fname = sys.argv[1]
+ out_fname = sys.argv[2]
+
+ reader = BedReader( open( input_fname ) )
+
+ # Fill array from reader
+ d = array_tree_dict_from_reader( reader, {}, block_size = BLOCK_SIZE )
+
+ for array_tree in d.itervalues():
+ array_tree.root.build_summary()
+
+ FileArrayTreeDict.dict_to_file( d, open( out_fname, "w" ), no_leaves=True )
+
+if __name__ == "__main__":
+ main()
\ No newline at end of file
diff -r 861756e85b16 -r 137d93848139 lib/galaxy/datatypes/converters/bed_to_array_tree_converter.xml
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/lib/galaxy/datatypes/converters/bed_to_array_tree_converter.xml Tue Mar 16 18:54:23 2010 -0400
@@ -0,0 +1,14 @@
+<tool id="CONVERTER_bed_to_array_tree_0" name="Convert BED to Array Tree" version="1.0.0">
+<!-- <description>__NOT_USED_CURRENTLY_FOR_CONVERTERS__</description> -->
+ <command interpreter="python">bed_to_array_tree_converter.py $input1 $output1</command>
+ <inputs>
+ <page>
+ <param format="bed" name="input1" type="data" label="Choose BED file"/>
+ </page>
+ </inputs>
+ <outputs>
+ <data format="array_tree" name="output1"/>
+ </outputs>
+ <help>
+ </help>
+</tool>
diff -r 861756e85b16 -r 137d93848139 lib/galaxy/datatypes/converters/wiggle_to_array_tree_converter.py
--- a/lib/galaxy/datatypes/converters/wiggle_to_array_tree_converter.py Tue Mar 16 16:03:28 2010 -0400
+++ b/lib/galaxy/datatypes/converters/wiggle_to_array_tree_converter.py Tue Mar 16 18:54:23 2010 -0400
@@ -6,7 +6,7 @@
from galaxy import eggs
import pkg_resources; pkg_resources.require( "bx-python" )
from bx.arrays.array_tree import *
-from bx.arrays.wiggle import IntervalReader
+from bx.arrays.wiggle import WiggleReader
BLOCK_SIZE = 100
@@ -15,17 +15,15 @@
input_fname = sys.argv[1]
out_fname = sys.argv[2]
- reader = IntervalReader( open( input_fname ) )
+ reader = WiggleReader( open( input_fname ) )
- # Fill array from wiggle
- d = array_tree_dict_from_wiggle_reader( reader, {}, block_size = BLOCK_SIZE )
+ # Fill array from reader
+ d = array_tree_dict_from_reader( reader, {}, block_size = BLOCK_SIZE )
for array_tree in d.itervalues():
array_tree.root.build_summary()
- f = open( out_fname, "w" )
- FileArrayTreeDict.dict_to_file( d, f )
- f.close()
+ FileArrayTreeDict.dict_to_file( d, open( out_fname, "w" ) )
if __name__ == "__main__":
main()
\ No newline at end of file
diff -r 861756e85b16 -r 137d93848139 lib/galaxy/datatypes/converters/wiggle_to_array_tree_converter.xml
--- a/lib/galaxy/datatypes/converters/wiggle_to_array_tree_converter.xml Tue Mar 16 16:03:28 2010 -0400
+++ b/lib/galaxy/datatypes/converters/wiggle_to_array_tree_converter.xml Tue Mar 16 18:54:23 2010 -0400
@@ -1,4 +1,4 @@
-<tool id="INDEXER_Wiggle_0" name="Index Wiggle for Track Viewer">
+<tool id="CONVERTER_Wiggle_0" name="Index Wiggle for Track Viewer">
<!-- Used internally to generate track indexes -->
<command interpreter="python">wiggle_to_array_tree_converter.py $input $output</command>
<inputs>
diff -r 861756e85b16 -r 137d93848139 lib/galaxy/datatypes/interval.py
--- a/lib/galaxy/datatypes/interval.py Tue Mar 16 16:03:28 2010 -0400
+++ b/lib/galaxy/datatypes/interval.py Tue Mar 16 18:54:23 2010 -0400
@@ -508,7 +508,7 @@
except: return False
def get_track_type( self ):
- return "FeatureTrack", "interval_index"
+ return "FeatureTrack", ["interval_index", "array_tree"]
class BedStrict( Bed ):
"""Tab delimited data in strict BED format - no non-standard columns allowed"""
@@ -959,7 +959,7 @@
resolution = max( resolution, 1 )
return resolution
def get_track_type( self ):
- return "LineTrack", "array_tree"
+ return "LineTrack", ["array_tree"]
class CustomTrack ( Tabular ):
"""UCSC CustomTrack"""
diff -r 861756e85b16 -r 137d93848139 lib/galaxy/visualization/tracks/data/array_tree.py
--- a/lib/galaxy/visualization/tracks/data/array_tree.py Tue Mar 16 16:03:28 2010 -0400
+++ b/lib/galaxy/visualization/tracks/data/array_tree.py Tue Mar 16 18:54:23 2010 -0400
@@ -7,14 +7,21 @@
from bx.arrays.array_tree import FileArrayTreeDict
except:
pass
-from math import floor, ceil, log
+from math import floor, ceil, log, pow
+import logging
+logger = logging.getLogger(__name__)
# Maybe this should be included in the datatype itself, so users can add their
# own types to the browser as long as they return the right format of data?
+SUMMARIZE_N = 200
+
class ArrayTreeDataProvider( object ):
def __init__( self, dataset, original_dataset ):
self.dataset = dataset
+
+ # def calc_resolution(self, start, end, density):
+ # return pow( 10, ceil( log( (end - start) / density , 10 ) ) )
def get_stats( self, chrom ):
f = open( self.dataset.file_name )
@@ -26,8 +33,26 @@
return "no data"
root_summary = chrom_array_tree.get_summary( 0, chrom_array_tree.levels )
+
+ level = chrom_array_tree.levels - 1
+ desired_summary = chrom_array_tree.get_summary( 0, level )
+ bs = chrom_array_tree.block_size ** level
+
+ frequencies = map(int, desired_summary.frequencies)
+ out = [ (i * bs, freq) for i, freq in enumerate(frequencies) ]
+
f.close()
- return { 'max': float( max(root_summary.maxs) ), 'min': float( min(root_summary.mins) ) }
+ return { 'max': float( max(root_summary.maxs) ), \
+ 'min': float( min(root_summary.mins) ), \
+ 'frequencies': out, \
+ 'total_frequency': sum(root_summary.frequencies) }
+
+ # Return None instead of NaN to pass jQuery 1.4's strict JSON
+ def float_nan(self, n):
+ if n != n: # NaN != NaN
+ return None
+ else:
+ return float(n)
def get_data( self, chrom, start, end, **kwargs ):
f = open( self.dataset.file_name )
@@ -44,28 +69,54 @@
start = int( start )
end = int( end )
resolution = max(1, ceil(float(kwargs['resolution'])))
-
- level = int( floor( log( resolution, block_size ) ) )
+
+ level = int( ceil( log( resolution, block_size ) ) )
level = max( level, 0 )
stepsize = block_size ** level
- step1 = stepsize * block_size
# Is the requested level valid?
assert 0 <= level <= chrom_array_tree.levels
- results = []
- for block_start in range( start, end, stepsize * block_size ):
- # print block_start
- # Return either data point or a summary depending on the level
- indexes = range( block_start, block_start + stepsize * block_size, stepsize )
- if level > 0:
- s = chrom_array_tree.get_summary( block_start, level )
- if s is not None:
- results.extend( zip( indexes, map( float, s.sums / s.counts ) ) )
+ if "frequencies" in kwargs:
+ if level <= 0:
+ # Low level enough to always display features
+ f.close()
+ return None
else:
- v = chrom_array_tree.get_leaf( block_start )
- if v is not None:
- results.extend( zip( indexes, map( float, v ) ) )
+ # Round to nearest bin
+ bin_start = start // (stepsize * block_size) * (stepsize * block_size)
+
+ indexes = range( bin_start, (bin_start + stepsize * block_size), stepsize )
+ summary = chrom_array_tree.get_summary( bin_start, level )
+ if summary:
+ results = zip( indexes, map( int, summary.frequencies ) )
+ filtered = filter(lambda tup: tup[0] >= start and tup[0] <= end, results)
+ sums = 0
+ max_f = 0
+ for tup in filtered:
+ sums += tup[1]
+ max_f = max(max_f, tup[1])
+
+ if max_f > 10000:
+ f.close()
+ return filtered, int(sums), float(sums)/len(filtered)
+ f.close()
+ return None
+
+ else:
+ results = []
+ for block_start in range( start, end, stepsize * block_size ):
+ # print block_start
+ # Return either data point or a summary depending on the level
+ indexes = range( block_start, block_start + stepsize * block_size, stepsize )
+ if level > 0:
+ s = chrom_array_tree.get_summary( block_start, level )
+ if s:
+ results.extend( zip( indexes, map( self.float_nan, s.sums / s.counts ) ) )
+ else:
+ l = chrom_array_tree.get_leaf( block_start )
+ if l:
+ results.extend( zip( indexes, map( self.float_nan, l ) ) )
f.close()
return results
diff -r 861756e85b16 -r 137d93848139 lib/galaxy/web/controllers/tracks.py
--- a/lib/galaxy/web/controllers/tracks.py Tue Mar 16 16:03:28 2010 -0400
+++ b/lib/galaxy/web/controllers/tracks.py Tue Mar 16 18:54:23 2010 -0400
@@ -98,11 +98,10 @@
hda_query = trans.sa_session.query( model.HistoryDatasetAssociation )
dataset = hda_query.get( dataset_id )
- track_type, indexer = dataset.datatype.get_track_type()
+ track_type, _ = dataset.datatype.get_track_type()
track = {
"track_type": track_type,
- "indexer": indexer,
"name": dataset.name,
"dataset_id": dataset.id,
"prefs": {},
@@ -134,10 +133,9 @@
except KeyError:
prefs = {}
dataset = hda_query.get( dataset_id )
- track_type, indexer = dataset.datatype.get_track_type()
+ track_type, _ = dataset.datatype.get_track_type()
tracks.append( {
"track_type": track_type,
- "indexer": indexer,
"name": dataset.name,
"dataset_id": dataset.id,
"prefs": simplejson.dumps(prefs),
@@ -187,41 +185,50 @@
return manifest
@web.json
- def data( self, trans, dataset_id, indexer, chrom, low, high, **kwargs ):
+ def data( self, trans, dataset_id, chrom, low, high, **kwargs ):
"""
Called by the browser to request a block of data
"""
- # Load the requested dataset
dataset = trans.sa_session.query( trans.app.model.HistoryDatasetAssociation ).get( dataset_id )
- # No dataset for that id
if not dataset or not chrom:
return messages.NO_DATA
- # Dataset is in error state, can't display
if dataset.state == trans.app.model.Job.states.ERROR:
return messages.ERROR
- # Dataset is still being generated
if dataset.state != trans.app.model.Job.states.OK:
return messages.PENDING
- # Determine what to return based on the type of track being drawn.
- converted_dataset_type = indexer
- converted_dataset = self.__dataset_as_type( trans, dataset, converted_dataset_type )
- if not converted_dataset:
- # No converter
- return messages.NO_CONVERTER
- # Need to check states again for the converted version
- if converted_dataset.state == model.Dataset.states.ERROR:
- return messages.ERROR
- if converted_dataset.state != model.Dataset.states.OK:
- return messages.PENDING
- # We have a dataset in the right format that is ready to use, wrap in
- # a data provider that knows how to access it
- data_provider = dataset_type_to_data_provider[ converted_dataset_type ]( converted_dataset, dataset )
+
+ track_type, indexes = dataset.datatype.get_track_type()
+ converted = dict([ (index, self.__dataset_as_type( trans, dataset, index )) for index in indexes ])
- # Return stats if we need them
- if 'stats' in kwargs: return data_provider.get_stats( chrom )
+ for index, converted_dataset in converted.iteritems():
+ if not converted_dataset:
+ return messages.NO_CONVERTER
- # Get the requested chunk of data
- return data_provider.get_data( chrom, low, high, **kwargs )
+ # Need to check states again for the converted version
+ if converted_dataset.state == model.Dataset.states.ERROR:
+ return messages.ERROR
+ if converted_dataset.state != model.Dataset.states.OK:
+ return messages.PENDING
+
+ if len(converted) > 1:
+ # Have to choose between array_tree and other provider
+ array_tree = ArrayTreeDataProvider( converted['array_tree'], dataset )
+ freqs = array_tree.get_data( chrom, low, high, frequencies=True, **kwargs )
+ if freqs is not None:
+ frequencies, sums, avg_f = freqs
+ return { "dataset_type": "array_tree", "data": frequencies, "sums": sums, "avg_f": avg_f }
+ dataset_type = "interval_index"
+ else:
+ dataset_type = converted.keys()[0]
+
+ data_provider = dataset_type_to_data_provider[ dataset_type ]( converted[dataset_type], dataset )
+
+ if 'stats' in kwargs:
+ data = data_provider.get_stats( chrom )
+ else:
+ data = data_provider.get_data( chrom, low, high, **kwargs )
+
+ return { "dataset_type": dataset_type, "data": data }
def __dataset_as_type( self, trans, dataset, type ):
"""
@@ -240,12 +247,11 @@
# See if converted dataset already exists
converted_datasets = [c for c in dataset.get_converted_files_by_type( type ) if c != None]
if converted_datasets:
- for d in converted_datasets:
- if d.state != 'error':
- return d
- else:
- return None
-
+ if converted_datasets[0].state != 'error':
+ return converted_datasets[0]
+ else:
+ return None
+
# Conversion is possible but hasn't been done yet, run converter here
# FIXME: this is largely duplicated from DefaultToolAction
assoc = model.ImplicitlyConvertedDatasetAssociation( parent = dataset, file_type = type, metadata_safe = False )
@@ -285,7 +291,6 @@
for track in decoded_payload:
tracks.append( { "dataset_id": str(track['dataset_id']),
"name": track['name'],
- "indexer": track['indexer'],
"track_type": track['track_type'],
"prefs": track['prefs']
} )
diff -r 861756e85b16 -r 137d93848139 static/scripts/trackster.js
--- a/static/scripts/trackster.js Tue Mar 16 16:03:28 2010 -0400
+++ b/static/scripts/trackster.js Tue Mar 16 18:54:23 2010 -0400
@@ -6,6 +6,7 @@
var DENSITY = 1000,
FEATURE_LEVELS = 10,
DATA_ERROR = "There was an error in indexing this dataset.",
+ DATA_NOCONVERTER = "A converter for this dataset is not installed. Please check your datatypes_conf.xml file.",
DATA_NONE = "No data for this chrom/contig.",
DATA_PENDING = "Currently indexing... please wait",
DATA_LOADING = "Loading data...",
@@ -77,6 +78,18 @@
}
});
+var Drawer = function() {};
+$.extend( Drawer.prototype, {
+ intensity: function(ctx, max, data) {
+
+
+ },
+
+});
+
+drawer = new Drawer();
+
+
var View = function( chrom, title, vis_id, dbkey ) {
this.vis_id = vis_id;
this.dbkey = dbkey;
@@ -124,6 +137,13 @@
}
}
},
+ reset: function() {
+ this.low = this.max_low;
+ this.high = this.max_high;
+ this.center = this.center = (this.max_high - this.max_low) / 2;
+ this.zoom_level = 0;
+ $(".yaxislabel").remove();
+ },
redraw: function(nodraw) {
this.span = this.max_high - this.max_low;
var span = this.span / Math.pow(this.zoom_factor, this.zoom_level),
@@ -156,20 +176,22 @@
$("#high").val( commatize(this.high) );
if (!nodraw) {
for ( var i = 0, len = this.tracks.length; i < len; i++ ) {
- this.tracks[i].draw();
+ if (this.tracks[i].enabled) {
+ this.tracks[i].draw();
+ }
}
for ( var i = 0, len = this.label_tracks.length; i < len; i++ ) {
this.label_tracks[i].draw();
}
}
},
- zoom_in: function ( point ) {
+ zoom_in: function ( point, container ) {
if (this.max_high === 0 || this.high - this.low < 30) {
return;
}
if ( point ) {
- this.center = point / $(document).width() * (this.high - this.low) + this.low;
+ this.center = point / container.width() * (this.high - this.low) + this.low;
}
this.zoom_level += 1;
this.redraw();
@@ -201,6 +223,7 @@
},
init_each: function(params, success_fn) {
var track = this;
+ track.enabled = false;
track.data_queue = {};
track.tile_cache.clear();
track.data_cache.clear();
@@ -209,21 +232,25 @@
track.container_div.removeClass("nodata error pending");
if (track.view.chrom) {
- $.getJSON( data_url, params, function ( data ) {
- if (!data || data == "error") {
+ $.getJSON( data_url, params, function (result) {
+ if (!result || result === "error") {
track.container_div.addClass("error");
track.content_div.text(DATA_ERROR);
- } else if (data.length === 0 || data == "no data") {
+ } else if (result === "no converter") {
+ track.container_div.addClass("error");
+ track.content_div.text(DATA_NOCONVERTER);
+ } else if ( (result.data && result.data.length === 0) || result === "no data") {
track.container_div.addClass("nodata");
track.content_div.text(DATA_NONE);
- } else if (data == "pending") {
+ } else if (result === "pending") {
track.container_div.addClass("pending");
track.content_div.text(DATA_PENDING);
setTimeout(function() { track.init(); }, 5000);
} else {
track.content_div.text("");
track.content_div.css( "height", track.height_px + "px" );
- success_fn(data);
+ track.enabled = true;
+ success_fn(result);
track.draw();
}
});
@@ -318,12 +345,11 @@
}
});
-var LineTrack = function ( name, dataset_id, indexer, prefs ) {
+var LineTrack = function ( name, dataset_id, prefs ) {
this.track_type = "LineTrack";
Track.call( this, name, $("#viewport") );
TiledTrack.call( this );
- this.indexer = indexer;
this.height_px = 100;
this.container_div.addClass( "line-track" );
this.dataset_id = dataset_id;
@@ -339,9 +365,9 @@
track_id = track.view.tracks.indexOf(track);
track.vertical_range = undefined;
- this.init_each({ stats: true, indexer: track.indexer,
- chrom: track.view.chrom, low: null, high: null,
- dataset_id: track.dataset_id }, function(data) {
+ this.init_each({ stats: true, chrom: track.view.chrom, low: null, high: null,
+ dataset_id: track.dataset_id }, function(result) {
+ data = result.data;
if ( isNaN(parseFloat(track.prefs.min_value)) || isNaN(parseFloat(track.prefs.max_value)) ) {
track.prefs.min_value = data.min;
track.prefs.max_value = data.max;
@@ -350,6 +376,7 @@
$('#track_' + track_id + '_maxval').val(track.prefs.max_value);
}
track.vertical_range = track.prefs.max_value - track.prefs.min_value;
+ track.total_frequency = data.total_frequency;
// Draw y-axis labels if necessary
$('#linetrack_' + track_id + '_minval').remove();
@@ -373,17 +400,29 @@
if (!track.data_queue[key]) {
track.data_queue[key] = true;
- $.getJSON( data_url, { "indexer": this.indexer, "chrom": this.view.chrom,
+ /*$.getJSON( data_url, { "chrom": this.view.chrom,
"low": low, "high": high, "dataset_id": this.dataset_id,
- "resolution": this.view.resolution, }, function (data) {
+ "resolution": this.view.resolution }, function (data) {
track.data_cache.set(key, data);
delete track.data_queue[key];
track.draw();
+ });*/
+ $.ajax({ 'url': data_url, 'dataType': 'json', 'data': { "chrom": this.view.chrom,
+ "low": low, "high": high, "dataset_id": this.dataset_id,
+ "resolution": this.view.resolution },
+ success: function (result) {
+ data = result.data;
+ track.data_cache.set(key, data);
+ delete track.data_queue[key];
+ track.draw();
+ }, error: function(r, t, e) {
+ console.log(r, t, e);
+ }
});
}
},
draw_tile: function( resolution, tile_index, parent_element, w_scale ) {
- if (this.vertical_range === undefined) { // We don't have the necessary information yet
+ if (this.vertical_range === undefined) {
return;
}
@@ -398,6 +437,7 @@
}
var data = this.data_cache.get(key);
+
canvas.css( {
position: "absolute",
top: 0,
@@ -411,31 +451,56 @@
min_value = this.prefs.min_value,
max_value = this.prefs.max_value,
vertical_range = this.vertical_range,
+ total_frequency = this.total_frequency,
height_px = this.height_px;
ctx.beginPath();
- for ( var i = 0; i < data.length - 1; i++ ) {
+
+ // for intensity, calculate delta x in pixels to for width of box
+ var delta_x_px = Math.ceil((data[1][0] - data[0][0]) * w_scale);
+ var mode = "line";
+
+ for ( var i = 0; i < data.length; i++ ) {
var x = data[i][0] - tile_low;
var y = data[i][1];
- // Missing data causes us to stop drawing
- if ( isNaN( y ) ) {
- in_path = false;
- } else {
- // Translate
+
+ if ( mode == "intensity" ) {
+ // DRAW INTENSITY
+ if (y === null) {
+ continue;
+ }
x = x * w_scale;
- // console.log(y, this.min_value, this.vertical_range, (y - this.min_value) / this.vertical_range * this.height_px);
if (y <= min_value) {
y = min_value;
} else if (y >= max_value) {
y = max_value;
}
- y = Math.round( height_px - (y - min_value) / vertical_range * height_px );
- // console.log(canvas.get(0).height, canvas.get(0).width);
- if ( in_path ) {
- ctx.lineTo( x, y );
+ y = Math.floor( (y - min_value) / vertical_range * 255 );
+ ctx.fillStyle = "rgb(" +y+ "," +y+ "," +y+ ")";
+ ctx.fillRect(x, 0, delta_x_px, 30);
+ }
+ else {
+ // Missing data causes us to stop drawing
+ if (y === null) {
+ in_path = false;
+ continue;
} else {
- ctx.moveTo( x, y );
- in_path = true;
+ // Translate
+ x = x * w_scale;
+ // console.log(y, this.min_value, this.vertical_range, (y - this.min_value) / this.vertical_range * this.height_px);
+ if (y <= min_value) {
+ y = min_value;
+ } else if (y >= max_value) {
+ y = max_value;
+ }
+ y = Math.round( height_px - (y - min_value) / vertical_range * height_px );
+ // console.log(canvas.get(0).height, canvas.get(0).width);
+ if ( in_path ) {
+ ctx.lineTo( x, y );
+ } else {
+ ctx.moveTo( x, y );
+ in_path = true;
+ }
}
}
}
@@ -471,12 +536,11 @@
}
});
-var FeatureTrack = function ( name, dataset_id, indexer, prefs ) {
+var FeatureTrack = function ( name, dataset_id, prefs ) {
this.track_type = "FeatureTrack";
Track.call( this, name, $("#viewport") );
TiledTrack.call( this );
- this.indexer = indexer;
this.height_px = 100;
this.container_div.addClass( "feature-track" );
this.dataset_id = dataset_id;
@@ -500,13 +564,16 @@
};
$.extend( FeatureTrack.prototype, TiledTrack.prototype, {
init: function() {
- var track = this;
- this.init_each({ indexer: track.indexer, low: track.view.max_low,
+ var track = this,
+ key = track.view.max_low + '_' + track.view.max_high;
+ this.init_each({ low: track.view.max_low,
high: track.view.max_high, dataset_id: track.dataset_id,
- chrom: track.view.chrom }, function (data) {
- track.values = data;
- track.calc_slots();
- track.slots = track.zo_slots;
+ chrom: track.view.chrom, resolution: this.view.resolution }, function (result) {
+ track.data_cache.set(key, result);
+ // track.values = result;
+ // track.calc_slots();
+ // track.slots = track.zo_slots;
+ track.draw();
});
},
get_data: function( low, high ) {
@@ -515,10 +582,10 @@
if (!track.data_queue[key]) {
track.data_queue[key] = true;
- $.getJSON( data_url, { indexer: track.indexer, chrom: track.view.chrom,
+ $.getJSON( data_url, { chrom: track.view.chrom,
low: low, high: high, dataset_id: track.dataset_id,
- include_blocks: true }, function (data) {
- track.data_cache.set(key, data);
+ include_blocks: true, resolution: this.view.resolution }, function (result) {
+ track.data_cache.set(key, result);
// console.log("datacache", track.data_cache.get(key));
delete track.data_queue[key];
track.draw();
@@ -612,52 +679,48 @@
},
draw_tile: function( resolution, tile_index, parent_element, w_scale ) {
- if (!this.values) {
- return;
- }
var tile_low = tile_index * DENSITY * resolution,
tile_high = ( tile_index + 1 ) * DENSITY * resolution,
tile_span = DENSITY * resolution;
// console.log("drawing " + tile_index);
- // Once we zoom in enough, show name labels
var data, slots, required_height;
- if (w_scale > this.show_labels_scale) {
- if (!this.showing_details) {
- this.showing_details = true;
+
+ /*for (var k in this.data_cache.obj_cache) {
+ var k_split = k.split("_"), k_low = k_split[0], k_high = k_split[1];
+ if (k_low <= tile_low && k_high >= tile_high) {
+ data = this.data_cache.get(k);
+ break;
}
- for (var k in this.data_cache.obj_cache) {
- var k_split = k.split("_"), k_low = k_split[0], k_high = k_split[1];
- if (k_low <= tile_low && k_high >= tile_high) {
- data = this.data_cache.get(k);
- break;
- }
- }
- if (!data) {
- this.data_queue[ [tile_low, tile_high] ] = true;
- this.get_data(tile_low, tile_high);
- return;
- }
- // Calculate new slots incrementally for this new chunk of data and update height if necessary
- required_height = this.incremental_slots( this.view.zoom_res, data ) * this.vertical_detail_px + 15;
+ }*/
+
+ // var k = this.view.low + '_' + this.view.high;
+ var k = tile_low + '_' + tile_high;
+ var data = this.data_cache.get(k);
+
+ if (!data) {
+ this.data_queue[ [tile_low, tile_high] ] = true;
+ this.get_data(tile_low, tile_high);
+ return;
+ }
+
+ if (data.dataset_type == "array_tree") {
+ required_height = 30;
+ // Blah
+ } else {
+ // Calculate new slots incrementally for this new chunk of data and update height if necessary
+ required_height = this.incremental_slots( this.view.zoom_res, data.data ) * this.vertical_detail_px + 15;
// console.log(required_height);
slots = this.inc_slots[this.view.zoom_res];
- } else {
- if (this.showing_details) {
- this.showing_details = false;
- }
- required_height = this.height_px;
- slots = this.zo_slots;
- data = this.values;
}
-
+
// console.log(tile_low, tile_high, tile_length, w_scale);
var width = Math.ceil( tile_span * w_scale ),
new_canvas = $("<canvas class='tile'></canvas>"),
label_color = this.prefs.label_color,
block_color = this.prefs.block_color,
left_offset = this.left_offset,
- showing_details = this.showing_details,
- y_scale = (this.showing_details ? this.vertical_detail_px : this.vertical_nodetail_px);
+ // showing_details = this.showing_details,
+ y_scale = this.vertical_detail_px;
new_canvas.css({
position: "absolute",
@@ -671,7 +734,30 @@
ctx.fillStyle = this.prefs.block_color;
ctx.font = this.default_font;
ctx.textAlign = "right";
-
+ var min_color = 150;
+
+ if (data.dataset_type == "array_tree") {
+ var points = data.data;
+ var sums = data.sums;
+ var avg_f = data.avg_f;
+ var delta_x_px = Math.ceil((points[1][0] - points[0][0]) * w_scale);
+
+ for ( var i = 0, len = points.length; i < len; i++ ) {
+ var x = Math.ceil( (points[i][0] - tile_low) * w_scale );
+ var y = points[i][1];
+
+ if (!y) {
+ continue;
+ }
+ y = Math.floor( min_color + (y - avg_f)/sums * min_color );
+ ctx.fillStyle = "rgb(" +y+ "," +y+ "," +y+ ")";
+ ctx.fillRect(x + left_offset, 0, delta_x_px, 20);
+ }
+ parent_element.append( new_canvas );
+ return new_canvas;
+ }
+
+ var data = data.data;
var j = 0;
for (var i = 0, len = data.length; i < len; i++) {
var feature = data[i];
@@ -685,10 +771,10 @@
thick_start = Math.floor( Math.max(0, (feature.thick_start - tile_low) * w_scale) );
thick_end = Math.ceil( Math.min(width, (feature.thick_end - tile_low) * w_scale) );
}
- if (!showing_details) {
+ // if (!showing_details) {
// Non-detail levels
- ctx.fillRect(f_start + left_offset, y_center + 5, f_end - f_start, 1);
- } else {
+ // ctx.fillRect(f_start + left_offset, y_center + 5, f_end - f_start, 1);
+ // } else {
// Showing labels, blocks, details
if (feature.start > tile_low) {
ctx.fillStyle = label_color;
@@ -743,7 +829,7 @@
ctx.fillStyle = prefs.block_color;
}
}
- }
+ // }
j++;
}
}
@@ -772,12 +858,12 @@
}
});
-var ReadTrack = function ( name, dataset_id, indexer, prefs ) {
+var ReadTrack = function ( name, dataset_id, prefs ) {
this.track_type = "ReadTrack";
this.tile_cache = new Cache(CACHED_TILES_FEATURE);
Track.call( this, name, $("#viewport") );
TiledTrack.call( this );
- FeatureTrack.call( this, name, dataset_id, indexer, prefs );
+ FeatureTrack.call( this, name, dataset_id, prefs );
};
$.extend( ReadTrack.prototype, TiledTrack.prototype, FeatureTrack.prototype, {
diff -r 861756e85b16 -r 137d93848139 static/trackster.css
--- a/static/trackster.css Tue Mar 16 16:03:28 2010 -0400
+++ b/static/trackster.css Tue Mar 16 18:54:23 2010 -0400
@@ -30,13 +30,7 @@
}
#nav-controls a {
- color: white;
- padding: 0.1em 0.4em;
- margin: 0 0;
- text-decoration: none;
- background: black;
- -webkit-border-radius: 1em;
- -moz-border-radius: 1em;
+ padding: 5px 0.4em;
}
#overview {
diff -r 861756e85b16 -r 137d93848139 templates/tracks/browser.mako
--- a/templates/tracks/browser.mako Tue Mar 16 16:03:28 2010 -0400
+++ b/templates/tracks/browser.mako Tue Mar 16 18:54:23 2010 -0400
@@ -51,8 +51,12 @@
</select>
<input id="low" size="12" />:<input id="high" size="12" />
<input type="hidden" name="id" value="${config.get('vis_id', '')}" />
- <a href="#" onclick="javascript:view.zoom_in();view.redraw();">+</a>
- <a href="#" onclick="javascript:view.zoom_out();view.redraw();">-</a>
+ <a href="#" onclick="javascript:view.zoom_in();view.redraw();">
+ <img src="${h.url_for('/static/images/fugue/magnifier-zoom.png')}" />
+ </a>
+ <a href="#" onclick="javascript:view.zoom_out();view.redraw();">
+ <img src="${h.url_for('/static/images/fugue/magnifier-zoom-out.png')}" />
+ </a>
</form>
<div id="debug" style="float: right"></div>
</div>
@@ -91,7 +95,7 @@
view = new View( "${config.get('chrom')}", "${config.get('title') | h}", "${config.get('vis_id')}", "${config.get('dbkey')}" );
%for track in config.get('tracks'):
view.add_track(
- new ${track["track_type"]}( "${track['name'] | h}", ${track['dataset_id']}, "${track['indexer']}", ${track['prefs']} )
+ new ${track["track_type"]}( "${track['name'] | h}", ${track['dataset_id']}, ${track['prefs']} )
);
%endfor
init();
@@ -131,7 +135,7 @@
$("#content").bind("mousewheel", function( e, delta ) {
if (delta > 0) {
- view.zoom_in(e.pageX);
+ view.zoom_in(e.pageX, $("#viewport-container"));
} else {
view.zoom_out();
}
@@ -139,7 +143,7 @@
});
$("#content").bind("dblclick", function( e ) {
- view.zoom_in(e.pageX);
+ view.zoom_in(e.pageX, $("#viewport-container"));
});
// To let the overview box be draggable
@@ -210,13 +214,13 @@
var td = track_data;
switch(track_data.track_type) {
case "LineTrack":
- new_track = new LineTrack( track_data.name, track_data.dataset_id, track_data.indexer, track_data.prefs );
+ new_track = new LineTrack( track_data.name, track_data.dataset_id, track_data.prefs );
break;
case "FeatureTrack":
- new_track = new FeatureTrack( track_data.name, track_data.dataset_id, track_data.indexer, track_data.prefs );
+ new_track = new FeatureTrack( track_data.name, track_data.dataset_id, track_data.prefs );
break;
case "ReadTrack":
- new_track = new ReadTrack( track_data.name, track_data.dataset_id, track_data.indexer, track_data.prefs );
+ new_track = new ReadTrack( track_data.name, track_data.dataset_id, track_data.prefs );
break;
}
view.add_track(new_track);
@@ -245,7 +249,6 @@
payload.push( {
"track_type": track.track_type,
- "indexer": track.indexer,
"name": track.name,
"dataset_id": track.dataset_id,
"prefs": track.prefs
@@ -286,6 +289,7 @@
return v.chrom === view.chrom;
})[0];
view.max_high = found.len;
+ view.reset();
view.redraw(true);
for (var track_id in view.tracks) {
@@ -307,13 +311,12 @@
del_icon = $('<a href="#" class="icon-button delete" />'),
edit_icon = $('<a href="#" class="icon-button edit" />'),
body = $('<div class="historyItemBody"></div>'),
- checkbox = $('<input type="checkbox" checked="checked"></input>').attr("id", "track_" + track_id + "title"),
li = $('<li class="sortable"></li>').attr("id", "track_" + track_id),
div = $('<div class="historyItemContainer historyItem"></div>'),
- editable = $('<div style="display:none"></div>');
+ editable = $('<div style="display:none"></div>').attr("id", "track_" + track_id + "_editable");
edit_icon.bind("click", function() {
- editable.toggle();
+ $("#track_" + track_id + "_editable").toggle();
});
del_icon.bind("click", function() {
1
0

18 Mar '10
details: http://www.bx.psu.edu/hg/galaxy/rev/861756e85b16
changeset: 3540:861756e85b16
user: Greg Von Kuster <greg(a)bx.psu.edu>
date: Tue Mar 16 16:03:28 2010 -0400
description:
Fix for editing library templates, add a new functional test to cover the scenario, and some code cleanup.
diffstat:
lib/galaxy/web/controllers/library_admin.py | 2 +-
lib/galaxy/web/controllers/library_common.py | 1 -
templates/library/common/browse_library.mako | 10 +-
test/base/test_db_util.py | 9 ---
test/base/twilltestcase.py | 20 +++++-
test/functional/test_library_features.py | 84 +++++++++++++++++++--------
6 files changed, 82 insertions(+), 44 deletions(-)
diffs (366 lines):
diff -r 4a670e8f4248 -r 861756e85b16 lib/galaxy/web/controllers/library_admin.py
--- a/lib/galaxy/web/controllers/library_admin.py Tue Mar 16 15:32:05 2010 -0400
+++ b/lib/galaxy/web/controllers/library_admin.py Tue Mar 16 16:03:28 2010 -0400
@@ -34,7 +34,7 @@
template='/admin/library/grid.mako'
default_sort_key = "name"
columns = [
- NameColumn( "Library Name",
+ NameColumn( "Name",
key="name",
model_class=model.Library,
link=( lambda library: dict( operation="browse", id=library.id ) ),
diff -r 4a670e8f4248 -r 861756e85b16 lib/galaxy/web/controllers/library_common.py
--- a/lib/galaxy/web/controllers/library_common.py Tue Mar 16 15:32:05 2010 -0400
+++ b/lib/galaxy/web/controllers/library_common.py Tue Mar 16 16:03:28 2010 -0400
@@ -1616,7 +1616,6 @@
response_redirect=web.url_for( controller='library_common',
action='edit_template',
cntrller=cntrller,
- use_panels=use_panels,
item_type=item_type,
library_id=library_id,
folder_id=folder_id,
diff -r 4a670e8f4248 -r 861756e85b16 templates/library/common/browse_library.mako
--- a/templates/library/common/browse_library.mako Tue Mar 16 15:32:05 2010 -0400
+++ b/templates/library/common/browse_library.mako Tue Mar 16 16:03:28 2010 -0400
@@ -445,11 +445,11 @@
%endif
%if cntrller == 'library_admin':
<a class="action-button" confirm="Click OK to delete the library named '${library.name}'." href="${h.url_for( controller='library_admin', action='delete_library_item', library_id=trans.security.encode_id( library.id ), item_id=trans.security.encode_id( library.id ), item_type='library' )}">Delete this data library</a>
- %endif
- %if show_deleted:
- <a class="action-button" href="${h.url_for( controller='library_common', action='browse_library', cntrller=cntrller, id=trans.security.encode_id( library.id ), use_panels=use_panels, show_deleted=False )}">Hide deleted items</a>
- %else:
- <a class="action-button" href="${h.url_for( controller='library_common', action='browse_library', cntrller=cntrller, id=trans.security.encode_id( library.id ), use_panels=use_panels, show_deleted=True )}">Show deleted items</a>
+ %if show_deleted:
+ <a class="action-button" href="${h.url_for( controller='library_common', action='browse_library', cntrller=cntrller, id=trans.security.encode_id( library.id ), use_panels=use_panels, show_deleted=False )}">Hide deleted items</a>
+ %else:
+ <a class="action-button" href="${h.url_for( controller='library_common', action='browse_library', cntrller=cntrller, id=trans.security.encode_id( library.id ), use_panels=use_panels, show_deleted=True )}">Show deleted items</a>
+ %endif
%endif
%elif cntrller == 'library_admin' and not library.purged:
<a class="action-button" href="${h.url_for( controller='library_admin', action='undelete_library_item', library_id=trans.security.encode_id( library.id ), item_id=trans.security.encode_id( library.id ), item_type='library', use_panels=use_panels )}">Undelete this data library</a>
diff -r 4a670e8f4248 -r 861756e85b16 test/base/test_db_util.py
--- a/test/base/test_db_util.py Tue Mar 16 15:32:05 2010 -0400
+++ b/test/base/test_db_util.py Tue Mar 16 16:03:28 2010 -0400
@@ -112,12 +112,3 @@
.all()
def refresh( obj ):
sa_session.refresh( obj )
-def set_library_permissions( in_list ):
- permissions_in = []
- permissions_out = []
- for k, v in galaxy.model.Library.permitted_actions.items():
- if k in in_list:
- permissions_in.append( k )
- else:
- permissions_out.append( k )
- return permissions_in, permissions_out
diff -r 4a670e8f4248 -r 861756e85b16 test/base/twilltestcase.py
--- a/test/base/twilltestcase.py Tue Mar 16 15:32:05 2010 -0400
+++ b/test/base/twilltestcase.py Tue Mar 16 16:03:28 2010 -0400
@@ -1630,7 +1630,6 @@
pass
def create_library( self, name='Library One', description='This is Library One', synopsis='Synopsis for Library One' ):
"""Create a new library"""
- self.home()
self.visit_url( "%s/library_admin/create_library" % self.url )
self.check_page_for_string( 'Create a new data library' )
tc.fv( "1", "name", name )
@@ -1640,10 +1639,25 @@
check_str = "The new library named '%s' has been created" % name
self.check_page_for_string( check_str )
self.home()
+ def edit_template( self, cntrller, item_type, library_id, folder_id='', ldda_id='', action='add_field',
+ field_name='', field_name_value='', field_help='', field_help_value='',
+ field_type='', field_type_value='' ):
+ """Edit the form fields defining a library template"""
+ self.visit_url( "%s/library_common/edit_template?cntrller=%s&item_type=%s&library_id=%s" % \
+ ( self.url, cntrller, item_type, library_id ) )
+ self.check_page_for_string( "Edit form definition" )
+ if action == 'add_field':
+ tc.submit( 'add_field_button' )
+ tc.fv( '1', field_name, field_name_value )
+ tc.fv( '1', field_help, field_help_value )
+ tc.fv( '1', field_type, field_type_value )
+ tc.submit( 'save_changes_button' )
+ self.check_page_for_string( "The template for this data library has been updated with your changes." )
+ if action == 'add_field':
+ self.check_page_for_string( field_name_value )
def library_info( self, cntrller, library_id, library_name, new_name='', new_description='', new_synopsis='',
ele_1_field_name='', ele_1_contents='', ele_2_field_name='', ele_2_contents='', check_str1='' ):
"""Edit information about a library, optionally using an existing template with up to 2 elements"""
- self.home()
self.visit_url( "%s/library_common/library_info?cntrller=%s&id=%s" % ( self.url, cntrller, library_id ) )
if check_str1:
self.check_page_for_string( check_str1 )
@@ -1866,7 +1880,7 @@
self.visit_url( url )
self.check_page_for_string( 'Upload a directory of files' )
if check_str1:
- self.check_page_for_strin( check_str1 )
+ self.check_page_for_string( check_str1 )
tc.fv( "1", "folder_id", folder_id )
tc.fv( "1", "file_type", file_type )
tc.fv( "1", "dbkey", dbkey )
diff -r 4a670e8f4248 -r 861756e85b16 test/functional/test_library_features.py
--- a/test/functional/test_library_features.py Tue Mar 16 15:32:05 2010 -0400
+++ b/test/functional/test_library_features.py Tue Mar 16 16:03:28 2010 -0400
@@ -9,21 +9,29 @@
global regular_user1
regular_user1 = get_user( 'test1(a)bx.psu.edu' )
assert regular_user1 is not None, 'Problem retrieving user with email "test1(a)bx.psu.edu" from the database'
+ global regular_user1_private_role
+ regular_user1_private_role = get_private_role( regular_user1 )
self.logout()
self.login( email='test2(a)bx.psu.edu' )
global regular_user2
regular_user2 = get_user( 'test2(a)bx.psu.edu' )
assert regular_user2 is not None, 'Problem retrieving user with email "test2(a)bx.psu.edu" from the database'
+ global regular_user2_private_role
+ regular_user2_private_role = get_private_role( regular_user2 )
self.logout()
self.login( email='test3(a)bx.psu.edu' )
global regular_user3
regular_user3 = get_user( 'test3(a)bx.psu.edu' )
assert regular_user3 is not None, 'Problem retrieving user with email "test3(a)bx.psu.edu" from the database'
+ global regular_user3_private_role
+ regular_user3_private_role = get_private_role( regular_user3 )
self.logout()
self.login( email='test(a)bx.psu.edu' )
global admin_user
admin_user = get_user( 'test(a)bx.psu.edu' )
assert admin_user is not None, 'Problem retrieving user with email "test(a)bx.psu.edu" from the database'
+ global admin_user_private_role
+ admin_user_private_role = get_private_role( admin_user )
def test_005_create_library( self ):
"""Testing creating a new library, then renaming it"""
# Logged in as admin_user
@@ -100,7 +108,6 @@
self.security.encode_id( library_one.id ),
library_one.name,
check_str1=contents )
- contents = '%s library contents' % form_one_field_label
contents_edited = contents + ' edited'
# Edit the contents and then save them
self.library_info( 'library_admin',
@@ -113,7 +120,25 @@
self.security.encode_id( library_one.id ),
library_one.name,
check_str1=contents_edited )
- def test_020_add_public_dataset_to_root_folder( self ):
+ def test_020_edit_template_admin_view( self ):
+ """Test editing the layout of a template from the Admin view"""
+ # Since there is only 1 field currently in the form definition, we'll add another
+ field_name = 'field_name_1'
+ field_name_value = 'Field 1'
+ field_help = 'field_helptext_1'
+ field_help_value = 'Field 1 help'
+ field_type = 'field_type_1'
+ field_type_value = 'TextArea'
+ self.edit_template( 'library_admin',
+ 'library',
+ self.security.encode_id( library_one.id ),
+ field_name=field_name,
+ field_name_value=field_name_value,
+ field_help=field_help,
+ field_help_value=field_help_value,
+ field_type=field_type,
+ field_type_value=field_type_value )
+ def test_025_add_public_dataset_to_root_folder( self ):
"""Testing adding a public dataset to the root folder, making sure library template is inherited"""
# Logged in as admin_user
message = 'Testing adding a public dataset to the root folder'
@@ -145,7 +170,7 @@
self.security.encode_id( ldda_one.id ),
ldda_one.name,
check_str1=template_contents )
- def test_025_add_new_folder_to_root_folder( self ):
+ def test_030_add_new_folder_to_root_folder( self ):
"""Testing adding a folder to a library root folder"""
# logged in as admin_user
root_folder = library_one.root_folder
@@ -177,7 +202,7 @@
self.security.encode_id( library_one.id ),
field_name=form_one_field_name,
contents=template_contents )
- def test_030_add_subfolder_to_folder( self ):
+ def test_035_add_subfolder_to_folder( self ):
"""Testing adding a folder to a library folder"""
# logged in as admin_user
name = "Folder One's Subfolder"
@@ -208,7 +233,7 @@
self.security.encode_id( library_one.id ),
field_name=form_one_field_name,
contents=template_contents )
- def test_035_add_2nd_new_folder_to_root_folder( self ):
+ def test_040_add_2nd_new_folder_to_root_folder( self ):
"""Testing adding a 2nd folder to a library root folder"""
# logged in as admin_user
root_folder = library_one.root_folder
@@ -226,7 +251,7 @@
self.security.encode_id( library_one.id ),
check_str1=name,
check_str2=description )
- def test_040_add_public_dataset_to_root_folders_2nd_subfolder( self ):
+ def test_045_add_public_dataset_to_root_folders_2nd_subfolder( self ):
"""Testing adding a public dataset to the root folder's 2nd sub-folder"""
# Logged in as admin_user
message = "Testing adding a public dataset to the folder named %s" % folder_two.name
@@ -258,7 +283,7 @@
self.security.encode_id( ldda_two.id ),
ldda_two.name,
check_str1=template_contents )
- def test_045_add_2nd_public_dataset_to_root_folders_2nd_subfolder( self ):
+ def test_050_add_2nd_public_dataset_to_root_folders_2nd_subfolder( self ):
"""Testing adding a 2nd public dataset to the root folder's 2nd sub-folder"""
# Logged in as admin_user
message = "Testing adding a 2nd public dataset to the folder named %s" % folder_two.name
@@ -290,7 +315,7 @@
self.security.encode_id( ldda_three.id ),
ldda_three.name,
check_str1=template_contents )
- def test_050_copy_dataset_from_history_to_subfolder( self ):
+ def test_055_copy_dataset_from_history_to_subfolder( self ):
"""Testing copying a dataset from the current history to a subfolder"""
# logged in as admin_user
self.new_history()
@@ -314,7 +339,7 @@
ldda_four.name,
check_str1=form_one_field_name,
not_displayed=contents )
- def test_055_editing_dataset_attribute_info( self ):
+ def test_060_editing_dataset_attribute_info( self ):
"""Testing editing a library dataset's attribute information"""
# logged in as admin_user
new_ldda_name = '4.bed ( version 1 )'
@@ -335,7 +360,7 @@
ldda_four.name,
check_str1=form_one_field_name,
not_displayed=contents )
- def test_060_uploading_new_dataset_version( self ):
+ def test_065_uploading_new_dataset_version( self ):
"""Testing uploading a new version of a library dataset"""
# logged in as admin_user
message = 'Testing uploading a new version of a dataset'
@@ -384,22 +409,32 @@
self.browse_library( 'library_admin',
self.security.encode_id( library_one.id ),
not_displayed=ldda_four.name )
- def test_065_upload_directory_of_files_from_libraries_view( self ):
+ def test_070_upload_directory_of_files_from_libraries_view( self ):
"""Testing uploading a directory of files to a root folder from the Data Libraries view"""
# logged in as admin_user
# admin_user will not have the option to upload a directory of files from the
# Libraries view since a sub-directory named the same as their email is not contained
- # in the configured user_library_import_dir. However, since members of role_one have
- # the LIBRARY_ADD permission, we can test this feature as regular_user1 or regular_user3
+ # in the configured user_library_import_dir ( in the test_data directory, only regular_user1
+ # and regular_user3 have directories ). We'll need to give these 2 user LIBRARY_ADD permission
+ # on library_one to test this feature.
+ permissions_in = [ 'LIBRARY_ADD' ]
+ permissions_out = [ 'LIBRARY_ACCESS', 'LIBRARY_MODIFY', 'LIBRARY_MANAGE' ]
+ role_ids = '%s,%s' % ( str( regular_user1_private_role.id ), str( regular_user3_private_role.id ) )
+ # Role one members are: admin_user, regular_user1, regular_user3. Each of these users will be permitted for
+ # LIBRARY_ACCESS, LIBRARY_ADD, LIBRARY_MODIFY, LIBRARY_MANAGE on this library and it's contents.
+ self.library_permissions( self.security.encode_id( library_one.id ),
+ library_one.name,
+ role_ids,
+ permissions_in,
+ permissions_out )
self.logout()
+ # Now that we have permissions set on the library, we can proceed to test uploading files
self.login( email=regular_user1.email )
message = 'Uploaded all files in test-data/users/test1...'
# Since regular_user1 does not have any sub-directories contained within her configured
# user_library_import_dir, the only option in her server_dir select list will be the
# directory named the same as her email
check_str_after_submit = "Added 1 datasets to the library '%s' (each is selected)." % library_one.root_folder.name
- # TODO: gvk( 3/12/10 )this is broken, so commenting until I have time to discover why...
- """
self.upload_directory_of_files( 'library',
self.security.encode_id( library_one.id ),
self.security.encode_id( library_one.root_folder.id ),
@@ -426,8 +461,7 @@
self.security.encode_id( library_one.id ),
check_str1=regular_user3.email,
check_str2=message )
- """
- def test_070_download_archive_of_library_files( self ):
+ def test_075_download_archive_of_library_files( self ):
"""Testing downloading an archive of files from the library"""
# logged in as regular_user3
self.logout()
@@ -439,7 +473,7 @@
format=format )
self.check_archive_contents( archive, ( ldda_one, ldda_two ) )
os.remove( archive )
- def test_075_mark_dataset_deleted( self ):
+ def test_080_mark_dataset_deleted( self ):
"""Testing marking a library dataset as deleted"""
# Logged in as admin_user
self.delete_library_item( self.security.encode_id( library_one.id ),
@@ -449,7 +483,7 @@
self.browse_library( 'library_admin',
self.security.encode_id( library_one.id ),
not_displayed=ldda_two.name )
- def test_080_display_and_hide_deleted_dataset( self ):
+ def test_085_display_and_hide_deleted_dataset( self ):
"""Testing displaying and hiding a deleted library dataset"""
# Logged in as admin_user
self.browse_library( 'library_admin',
@@ -459,7 +493,7 @@
self.browse_library( 'library_admin',
self.security.encode_id( library_one.id ),
not_displayed=ldda_two.name )
- def test_085_mark_folder_deleted( self ):
+ def test_090_mark_folder_deleted( self ):
"""Testing marking a library folder as deleted"""
# Logged in as admin_user
self.delete_library_item( self.security.encode_id( library_one.id ),
@@ -469,7 +503,7 @@
self.browse_library( 'library_admin',
self.security.encode_id( library_one.id ),
not_displayed=folder_two.name )
- def test_090_mark_folder_undeleted( self ):
+ def test_095_mark_folder_undeleted( self ):
"""Testing marking a library folder as undeleted"""
# Logged in as admin_user
self.undelete_library_item( self.security.encode_id( library_one.id ),
@@ -482,7 +516,7 @@
self.security.encode_id( library_one.id ),
check_str1=folder_two.name,
not_displayed=ldda_two.name )
- def test_095_mark_library_deleted( self ):
+ def test_100_mark_library_deleted( self ):
"""Testing marking a library as deleted"""
# Logged in as admin_user
# First mark folder_two as deleted to further test state saving when we undelete the library
@@ -496,7 +530,7 @@
item_type='library' )
self.browse_libraries_admin( not_displayed1=library_one.name )
self.browse_libraries_admin( deleted=True, check_str1=library_one.name )
- def test_100_mark_library_undeleted( self ):
+ def test_105_mark_library_undeleted( self ):
"""Testing marking a library as undeleted"""
# Logged in as admin_user
self.undelete_library_item( self.security.encode_id( library_one.id ),
@@ -508,7 +542,7 @@
self.security.encode_id( library_one.id ),
check_str1=library_one.name,
not_displayed=folder_two.name )
- def test_105_purge_library( self ):
+ def test_110_purge_library( self ):
"""Testing purging a library"""
# Logged in as admin_user
self.delete_library_item( self.security.encode_id( library_one.id ),
@@ -547,7 +581,7 @@
raise AssertionError( 'The library_dataset id %s named "%s" has not been marked as deleted.' % \
( str( library_dataset.id ), library_dataset.name ) )
check_folder( library_one.root_folder )
- def test_110_no_library_template( self ):
+ def test_115_no_library_template( self ):
"""Test library features when library has no template"""
# Logged in as admin_user
name = "library features Library Two"
1
0

18 Mar '10
details: http://www.bx.psu.edu/hg/galaxy/rev/4a670e8f4248
changeset: 3539:4a670e8f4248
user: fubar: ross Lazarus at gmail period com
date: Tue Mar 16 15:32:05 2010 -0400
description:
Fix for 'add' action in act_on_multiple_datasets - and a warning message if any bogus action supplied - fixes issue #297
diffstat:
lib/galaxy/web/controllers/library_common.py | 7 +++++--
1 files changed, 5 insertions(+), 2 deletions(-)
diffs (31 lines):
diff -r aad7b294ffca -r 4a670e8f4248 lib/galaxy/web/controllers/library_common.py
--- a/lib/galaxy/web/controllers/library_common.py Tue Mar 16 15:09:22 2010 -0400
+++ b/lib/galaxy/web/controllers/library_common.py Tue Mar 16 15:32:05 2010 -0400
@@ -1257,7 +1257,7 @@
messagetype = 'error'
else:
ldda_ids = util.listify( ldda_ids )
- if action == 'import_to_history':
+ if action == 'import_to_history' or action == 'add':
history = trans.get_history()
if history is None:
# Must be a bot sending a request without having a history.
@@ -1308,7 +1308,7 @@
trans.sa_session.add( ld )
trans.sa_session.flush()
msg = "The selected datasets have been removed from this data library"
- else:
+ elif action in ['zip','tgz','tbz']:
error = False
killme = string.punctuation + string.whitespace
trantab = string.maketrans(killme,'_'*len(killme))
@@ -1416,6 +1416,9 @@
archive.wsgi_status = trans.response.wsgi_status()
archive.wsgi_headeritems = trans.response.wsgi_headeritems()
return archive.stream
+ else: # unknown action
+ msg = '### unknown action = %s in act_on_multiple_datasets' % action
+
return trans.response.send_redirect( web.url_for( controller='library_common',
action='browse_library',
cntrller=cntrller,
1
0
details: http://www.bx.psu.edu/hg/galaxy/rev/aad7b294ffca
changeset: 3538:aad7b294ffca
user: Kanwei Li <kanwei(a)gmail.com>
date: Tue Mar 16 15:09:22 2010 -0400
description:
Add magnifier icons for trackster
diffstat:
static/images/fugue/magnifier-zoom-out.png | 0
static/images/fugue/magnifier-zoom.png | 0
2 files changed, 0 insertions(+), 0 deletions(-)
diffs (4 lines):
diff -r 2fa5488a9b3e -r aad7b294ffca static/images/fugue/magnifier-zoom-out.png
Binary file static/images/fugue/magnifier-zoom-out.png has changed
diff -r 2fa5488a9b3e -r aad7b294ffca static/images/fugue/magnifier-zoom.png
Binary file static/images/fugue/magnifier-zoom.png has changed
1
0

18 Mar '10
details: http://www.bx.psu.edu/hg/galaxy/rev/2fa5488a9b3e
changeset: 3537:2fa5488a9b3e
user: Nate Coraor <nate(a)bx.psu.edu>
date: Tue Mar 16 14:07:42 2010 -0400
description:
Monkeypatch pkg_resources to put eggs at the beginning of the path instead of the end
diffstat:
lib/galaxy/__init__.py | 31 +++++++++++++++++++++++++++++++
1 files changed, 31 insertions(+), 0 deletions(-)
diffs (38 lines):
diff -r d776beca95f8 -r 2fa5488a9b3e lib/galaxy/__init__.py
--- a/lib/galaxy/__init__.py Tue Mar 16 12:00:10 2010 -0400
+++ b/lib/galaxy/__init__.py Tue Mar 16 14:07:42 2010 -0400
@@ -61,3 +61,34 @@
except:
pkg_resources._compatible_platforms = pkg_resources.compatible_platforms
pkg_resources.compatible_platforms = _compatible_platforms
+
+# patch to insert eggs at the beginning of sys.path instead of at the end
+def _insert_on(self, path, loc = None):
+ """Insert self.location in path before its nearest parent directory"""
+
+ loc = loc or self.location
+ if not loc:
+ return
+
+ nloc = pkg_resources._normalize_cached(loc)
+ npath= [(p and pkg_resources._normalize_cached(p) or p) for p in path]
+
+ if path is sys.path:
+ self.check_version_conflict()
+ path.insert(0, loc)
+
+ # remove dups
+ while 1:
+ try:
+ np = npath.index(nloc, 1)
+ except ValueError:
+ break
+ else:
+ del npath[np], path[np]
+
+ return
+try:
+ assert pkg_resources.Distribution._insert_on
+except:
+ pkg_resources.Distribution._insert_on = pkg_resources.Distribution.insert_on
+ pkg_resources.Distribution.insert_on = _insert_on
1
0

18 Mar '10
details: http://www.bx.psu.edu/hg/galaxy/rev/15fd40238cef
changeset: 3535:15fd40238cef
user: rc
date: Tue Mar 16 09:50:13 2010 -0400
description:
Fixes user info functional tests broken in changeset 3529:96ec861b4b6e
diffstat:
test/base/twilltestcase.py | 23 ++++++++++++-----------
1 files changed, 12 insertions(+), 11 deletions(-)
diffs (48 lines):
diff -r 447c059a096a -r 15fd40238cef test/base/twilltestcase.py
--- a/test/base/twilltestcase.py Mon Mar 15 15:06:14 2010 -0400
+++ b/test/base/twilltestcase.py Tue Mar 16 09:50:13 2010 -0400
@@ -806,17 +806,18 @@
self.visit_page( "user/create?user_info_select=%i&admin_view=False" % user_info_form_id )
else:
self.visit_page( "user/create?admin_view=False" )
+ print self.write_temp_file( self.last_page() )
self.check_page_for_string( "Create account" )
- tc.fv( "1", "email", email )
- tc.fv( "1", "password", password )
- tc.fv( "1", "confirm", password )
- tc.fv( "1", "username", username )
+ tc.fv( "2", "email", email )
+ tc.fv( "2", "password", password )
+ tc.fv( "2", "confirm", password )
+ tc.fv( "2", "username", username )
if user_info_forms == 'multiple':
self.check_page_for_string( "User type" )
for index, info_value in enumerate(user_info_values):
- tc.fv( "1", "field_%i" % index, info_value )
+ tc.fv( "2", "field_%i" % index, info_value )
tc.submit( "create_user_button" )
- self.check_page_for_string( "now logged in as %s" % email )
+ self.check_page_for_string( "ogged in as %s" % email )
def create_user_with_info_as_admin( self, email, password, username, user_info_forms, user_info_form_id, user_info_values ):
'''
This method registers a new user and also provides use info as an admin
@@ -827,14 +828,14 @@
else:
self.visit_page( "admin/users?operation=create" )
self.check_page_for_string( "Create account" )
- tc.fv( "1", "email", email )
- tc.fv( "1", "password", password )
- tc.fv( "1", "confirm", password )
- tc.fv( "1", "username", username )
+ tc.fv( "2", "email", email )
+ tc.fv( "2", "password", password )
+ tc.fv( "2", "confirm", password )
+ tc.fv( "2", "username", username )
if user_info_forms == 'multiple':
self.check_page_for_string( "User type" )
for index, info_value in enumerate(user_info_values):
- tc.fv( "1", "field_%i" % index, info_value )
+ tc.fv( "2", "field_%i" % index, info_value )
tc.submit( "create_user_button" )
self.check_page_for_string( "Created new user account (%s)" % email )
def edit_login_info( self, new_email, new_username ):
1
0

18 Mar '10
details: http://www.bx.psu.edu/hg/galaxy/rev/d776beca95f8
changeset: 3536:d776beca95f8
user: jeremy goecks <jeremy.goecks(a)emory.edu>
date: Tue Mar 16 12:00:10 2010 -0400
description:
Fixes to item embedding in pages. In particular, fixed the editor color changes that were occurring when moving the cursor through embedded items.
diffstat:
static/june_2007_style/blue/embed_item.css | 16 ++++++++++------
static/june_2007_style/embed_item.css.tmpl | 25 ++++++++++++++++++++++++-
static/wymeditor/iframe/galaxy/wymiframe.css | 12 +++++++++---
templates/embed_base.mako | 2 +-
templates/page/editor.mako | 11 ++++++-----
templates/workflow/embed.mako | 1 +
6 files changed, 51 insertions(+), 16 deletions(-)
diffs (177 lines):
diff -r 15fd40238cef -r d776beca95f8 static/june_2007_style/blue/embed_item.css
--- a/static/june_2007_style/blue/embed_item.css Tue Mar 16 09:50:13 2010 -0400
+++ b/static/june_2007_style/blue/embed_item.css Tue Mar 16 12:00:10 2010 -0400
@@ -1,10 +1,14 @@
-.embedded-item{margin-left:auto;margin-right:auto;width:90%;padding: 0.5em;-moz-border-radius:0.5em;-webkit-border-radius:0.5em;border-radius:0.5em;}
-.embedded-item.history{background-color:#C1C9E5}
-.embedded-item.dataset{background-color:#CFC}
-.embedded-item.workflow{background-color:#EBD9B2}
+.embedded-item{margin-left:auto;margin-right:auto;width:90%;-moz-border-radius:0.5em;-webkit-border-radius:0.5em;border-radius:0.5em;}
+.embedded-item.display {padding: 0.5em;}
+.embedded-item.history{background-color:#C1C9E5}
+.embedded-item.history p{background:#C1C9E5 no-repeat 2px 2px;margin-top:0;margin-bottom:0;}
+.embedded-item.dataset{background-color:#CFC}
+.embedded-item.dataset p{background:#CFC no-repeat 2px 2px;margin-top:0;margin-bottom:0;}
+.embedded-item.workflow{background-color:#EBD9B2}
+.embedded-item.workflow p{background:#EBD9B2 no-repeat 2px 2px;margin-top:0;margin-bottom:0;}
.embedded-item.placeholder{}
-.embedded-item .item-content{max-height:25em;overflow:auto;}
-.embedded-item .title{vertical-align:top;text-align:center;}
+.embedded-item .item-content{max-height:25em;overflow:auto;display:none;}
+.embedded-item .title{vertical-align:top;text-align:center;font-weight:bold;}
.embedded-item.placeholder .content{padding: 1em 1em;font-style:italic;text-align:center;}
table.annotated-item{width:100%;border-collapse:collapse;}
table.annotated-item td,th{padding:0;}
diff -r 15fd40238cef -r d776beca95f8 static/june_2007_style/embed_item.css.tmpl
--- a/static/june_2007_style/embed_item.css.tmpl Tue Mar 16 09:50:13 2010 -0400
+++ b/static/june_2007_style/embed_item.css.tmpl Tue Mar 16 12:00:10 2010 -0400
@@ -1,35 +1,58 @@
+
.embedded-item {
margin-left:auto;
margin-right:auto;
width:90%;
- padding: 0.5em;
-moz-border-radius:0.5em;
-webkit-border-radius:0.5em;
border-radius:0.5em;
}
+.embedded-item.display {
+ padding: 0.5em;
+}
+
.embedded-item.history {
background-color:#C1C9E5
}
+.embedded-item.history p {
+ background:#C1C9E5 no-repeat 2px 2px;
+ margin-top:0;
+ margin-bottom:0;
+}
+
.embedded-item.dataset {
background-color:#CFC
}
+.embedded-item.dataset p {
+ background:#CFC no-repeat 2px 2px;
+ margin-top:0;margin-bottom:0;
+}
+
.embedded-item.workflow {
background-color:#EBD9B2
}
+.embedded-item.workflow p {
+ background:#EBD9B2 no-repeat 2px 2px;
+ margin-top:0;
+ margin-bottom:0;
+}
+
.embedded-item.placeholder{}
.embedded-item .item-content {
max-height: 25em;
overflow: auto;
+ display: none;
}
.embedded-item .title {
vertical-align:top;
text-align:center;
+ font-weight: bold;
}
.embedded-item.placeholder .content {
diff -r 15fd40238cef -r d776beca95f8 static/wymeditor/iframe/galaxy/wymiframe.css
--- a/static/wymeditor/iframe/galaxy/wymiframe.css Tue Mar 16 09:50:13 2010 -0400
+++ b/static/wymeditor/iframe/galaxy/wymiframe.css Tue Mar 16 12:00:10 2010 -0400
@@ -1,3 +1,7 @@
+/*
+ * JG: edited to remove '!important' from rules so that embedded Galaxy items can have unique background colors.
+ */
+
/*
* WYMeditor : what you see is What You Mean web-based editor
* Copyright (c) 2005 - 2009 Jean-Francois Hovinne, http://www.wymeditor.org/
@@ -33,9 +37,9 @@
ol,
table,
blockquote,
- pre { background: #FFFFFF no-repeat 2px 2px !important;
- padding:8px 5px 5px !important;
- margin:10px !important; }
+ pre { background: #FFFFFF no-repeat 2px 2px; /* JG removed: !important */
+ padding:8px 5px 5px; /* JG removed: !important */
+ margin:10px} /* JG removed: !important */
td { background: #F0F4F8; }
th { background: #ffffcc; }
ul,
@@ -52,6 +56,7 @@
td { height: 1.6em; }
/* labels */
+/* JG commented out:
p { background-image: url(lbl-p.png); }
h1 { background-image: url(lbl-h1.png); }
h2 { background-image: url(lbl-h2.png); }
@@ -61,6 +66,7 @@
h6 { background-image: url(lbl-h6.png); }
blockquote{ background-image: url(lbl-blockquote.png); }
pre { background-image: url(lbl-pre.png); }
+*/
/* specific HTML elements */
caption { text-align: left; }
diff -r 15fd40238cef -r d776beca95f8 templates/embed_base.mako
--- a/templates/embed_base.mako Tue Mar 16 09:50:13 2010 -0400
+++ b/templates/embed_base.mako Tue Mar 16 12:00:10 2010 -0400
@@ -6,7 +6,7 @@
<%namespace file="/display_common.mako" import="*" />
## HTML structure.
-<div class='embedded-item ${get_class_display_name( item.__class__ ).lower()}'>
+<div class='embedded-item display ${get_class_display_name( item.__class__ ).lower()}'>
<div class='title'>
${self.render_title( item )}
<hr/>
diff -r 15fd40238cef -r d776beca95f8 templates/page/editor.mako
--- a/templates/page/editor.mako Tue Mar 16 09:50:13 2010 -0400
+++ b/templates/page/editor.mako Tue Mar 16 12:00:10 2010 -0400
@@ -459,17 +459,18 @@
// all non-standard attributes when it returns its content (e.g. it will not return an element attribute of the form
// item_class='History').
var item_embed_html =
- "<p> \
+ "\
<div id='" + item_info.iclass + "-" + item_id + "' class='embedded-item " + item_info.singular.toLowerCase() +
" placeholder'> \
- <div class='title'> Embedded Galaxy " + item_info.singular + " '" + item_name + "'</div> \
- <div class='content'> \
+ <p class='title'>Embedded Galaxy " + item_info.singular + " '" + item_name + "'</p> \
+ <p class='content'> \
[Do not edit this block; Galaxy will fill it in with the annotated " +
item_info.singular.toLowerCase() + " when it is displayed.]</div> \
- </div> \
- </div></p>";
+ </p> \
+ </div><p></p>";
// Insert embedded representation into document.
+ // TODO: maybe try replace() instead to handle indenting?
wym.insert(item_embed_html);
});
hide_modal();
diff -r 15fd40238cef -r d776beca95f8 templates/workflow/embed.mako
--- a/templates/workflow/embed.mako Tue Mar 16 09:50:13 2010 -0400
+++ b/templates/workflow/embed.mako Tue Mar 16 12:00:10 2010 -0400
@@ -4,6 +4,7 @@
%>
<%def name="render_summary_content( workflow, steps )">
+
## <ul>
## <% num_steps = len ( steps ) %>
## <li>${num_steps} step${iff( num_steps != 1, "s", "" )}
1
0

18 Mar '10
details: http://www.bx.psu.edu/hg/galaxy/rev/ac60740712de
changeset: 3533:ac60740712de
user: Nate Coraor <nate(a)bx.psu.edu>
date: Mon Mar 15 14:35:35 2010 -0400
description:
Add numpy egg, update bx_python egg (now with 100% more numpy) and include code that allows scrambling to depend on other eggs existing and being importable.
diffstat:
eggs.ini | 6 +-
lib/galaxy/eggs/dist.py | 5 +
lib/galaxy/eggs/scramble.py | 12 ++-
lib/pkg_resources.py | 148 ++++++++++++++++++++++------------
scripts/dist-scramble.py | 13 +++
scripts/scramble.py | 12 ++-
scripts/scramble/lib/scramble_lib.py | 36 +++-----
scripts/scramble/scripts/generic.py | 1 +
scripts/test_dist_egg.py | 53 ++++++++++++
9 files changed, 207 insertions(+), 79 deletions(-)
diffs (592 lines):
diff -r cfb0776875c0 -r ac60740712de eggs.ini
--- a/eggs.ini Mon Mar 15 14:05:25 2010 -0400
+++ b/eggs.ini Mon Mar 15 14:35:35 2010 -0400
@@ -16,6 +16,7 @@
Cheetah = 2.2.2
DRMAA_python = 0.2
MySQL_python = 1.2.3c1
+numpy = 1.3.0
pbs_python = 2.9.4
psycopg2 = 2.0.13
pycrypto = 2.0.1
@@ -59,7 +60,7 @@
psycopg2 = _8.4.2_static
pysqlite = _3.6.17_static
MySQL_python = _5.1.41_static
-bx_python = _dev_3b9d30e47619
+bx_python = _dev_f74aec067563
GeneTrack = _dev_48da9e998f0caf01c5be731e926f4b0481f658f0
SQLAlchemy = _dev_r6498
pysam = _kanwei_90e03180969d
@@ -70,3 +71,6 @@
MySQL_python = mysql-5.1.41
psycopg2 = postgresql-8.4.2
pysqlite = sqlite-amalgamation-3_6_17
+
+[dependencies]
+bx_python = numpy
diff -r cfb0776875c0 -r ac60740712de lib/galaxy/eggs/dist.py
--- a/lib/galaxy/eggs/dist.py Mon Mar 15 14:05:25 2010 -0400
+++ b/lib/galaxy/eggs/dist.py Mon Mar 15 14:35:35 2010 -0400
@@ -72,6 +72,10 @@
sources = self.config.get( 'source', name ).split()
except:
sources = []
+ try:
+ dependencies = self.config.get( 'dependencies', name ).split()
+ except:
+ dependencies = []
if full_platform:
platforms = self.platforms
else:
@@ -83,4 +87,5 @@
host_info = self.hosts[platform].split()
egg.build_host, egg.python = host_info[:2]
egg.sources = sources
+ egg.dependencies = dependencies
self.eggs[name].append( egg )
diff -r cfb0776875c0 -r ac60740712de lib/galaxy/eggs/scramble.py
--- a/lib/galaxy/eggs/scramble.py Mon Mar 15 14:05:25 2010 -0400
+++ b/lib/galaxy/eggs/scramble.py Mon Mar 15 14:35:35 2010 -0400
@@ -3,7 +3,7 @@
"""
import os, sys, shutil, tempfile, subprocess, urlparse, urllib
-from __init__ import Egg, Crate, URLRetriever, galaxy_dir, py, unpack_zipfile
+from __init__ import Egg, Crate, URLRetriever, galaxy_dir, py, unpack_zipfile, EggNotFetchable
from distutils.sysconfig import get_config_var
import tarfile, zipfile, zlib
@@ -37,6 +37,7 @@
def __init__( self, *args, **kwargs ):
Egg.__init__( self, *args, **kwargs )
self.sources = []
+ self.dependencies = []
self.buildpath = None
self.source_path = None
self.py = py
@@ -184,6 +185,11 @@
tagfile = open( os.path.join( self.buildpath, ".galaxy_tag" ), "w" )
tagfile.write( self.tag + '\n' )
tagfile.close()
+ if self.dependencies:
+ depfile = open( os.path.join( self.buildpath, ".galaxy_deps" ), "w" )
+ for dependency in self.dependencies:
+ depfile.write( dependency + '\n' )
+ depfile.close()
def run_scramble_script( self ):
log.warning( "%s(): Beginning build" % sys._getframe().f_code.co_name )
# subprocessed to sterilize the env
@@ -211,6 +217,10 @@
egg.sources = self.config.get( "source", egg.name ).split()
except:
egg.sources = []
+ try:
+ egg.dependencies = self.config.get( "dependencies", egg.name ).split()
+ except:
+ egg.dependencies = []
def parse_egg_section( self, *args, **kwargs ):
kwargs['egg_class'] = ScrambleEgg
Crate.parse_egg_section( self, *args, **kwargs )
diff -r cfb0776875c0 -r ac60740712de lib/pkg_resources.py
--- a/lib/pkg_resources.py Mon Mar 15 14:05:25 2010 -0400
+++ b/lib/pkg_resources.py Mon Mar 15 14:35:35 2010 -0400
@@ -13,26 +13,67 @@
method.
"""
-import sys, os, zipimport, time, re, imp, new
+import sys, os, zipimport, time, re, imp
try:
frozenset
except NameError:
from sets import ImmutableSet as frozenset
-from os import utime, rename, unlink # capture these to bypass sandboxing
+# capture these to bypass sandboxing
+from os import utime, rename, unlink, mkdir
from os import open as os_open
-
-
-
-
-
-
-
-
-
-
-
+from os.path import isdir, split
+
+
+def _bypass_ensure_directory(name, mode=0777):
+ # Sandbox-bypassing version of ensure_directory()
+ dirname, filename = split(name)
+ if dirname and filename and not isdir(dirname):
+ _bypass_ensure_directory(dirname)
+ mkdir(dirname, mode)
+
+
+
+
+
+
+
+_state_vars = {}
+
+def _declare_state(vartype, **kw):
+ g = globals()
+ for name, val in kw.iteritems():
+ g[name] = val
+ _state_vars[name] = vartype
+
+def __getstate__():
+ state = {}
+ g = globals()
+ for k, v in _state_vars.iteritems():
+ state[k] = g['_sget_'+v](g[k])
+ return state
+
+def __setstate__(state):
+ g = globals()
+ for k, v in state.iteritems():
+ g['_sset_'+_state_vars[k]](k, g[k], v)
+ return state
+
+def _sget_dict(val):
+ return val.copy()
+
+def _sset_dict(key, ob, state):
+ ob.clear()
+ ob.update(state)
+
+def _sget_object(val):
+ return val.__getstate__()
+
+def _sset_object(key, ob, state):
+ ob.__setstate__(state)
+
+_sget_none = _sset_none = lambda *args: None
@@ -164,14 +205,8 @@
def _macosx_vers(_cache=[]):
if not _cache:
- info = os.popen('/usr/bin/sw_vers').read().splitlines()
- for line in info:
- key, value = line.split(None, 1)
- if key == 'ProductVersion:':
- _cache.append(value.strip().split("."))
- break
- else:
- raise ValueError, "What?!"
+ from platform import mac_ver
+ _cache.append(mac_ver()[0].split('.'))
return _cache[0]
def _macosx_arch(machine):
@@ -203,6 +238,12 @@
+
+
+
+
+
+
def compatible_platforms(provided,required):
"""Can code for the `provided` platform run on the `required` platform?
@@ -387,7 +428,7 @@
def add_entry(self, entry):
"""Add a path item to ``.entries``, finding any distributions on it
- ``find_distributions(entry,False)`` is used to find distributions
+ ``find_distributions(entry, True)`` is used to find distributions
corresponding to the path entry, and they are added. `entry` is
always appended to ``.entries``, even if it is already present.
(This is because ``sys.path`` can contain the same value more than
@@ -622,7 +663,6 @@
activated to fulfill the requirements; all relevant distributions are
included, even if they were already activated in this working set.
"""
-
needed = self.resolve(parse_requirements(requirements))
for dist in needed:
@@ -630,7 +670,6 @@
return needed
-
def subscribe(self, callback):
"""Invoke `callback` for all distributions (including existing ones)"""
if callback in self.callbacks:
@@ -639,19 +678,21 @@
for dist in self:
callback(dist)
-
def _added_new(self, dist):
for callback in self.callbacks:
callback(dist)
-
-
-
-
-
-
-
-
+ def __getstate__(self):
+ return (
+ self.entries[:], self.entry_keys.copy(), self.by_key.copy(),
+ self.callbacks[:]
+ )
+
+ def __setstate__(self, (entries, keys, by_key, callbacks)):
+ self.entries = entries[:]
+ self.entry_keys = keys.copy()
+ self.by_key = by_key.copy()
+ self.callbacks = callbacks[:]
class Environment(object):
@@ -916,7 +957,7 @@
extract_path = self.extraction_path or get_default_cache()
target_path = os.path.join(extract_path, archive_name+'-tmp', *names)
try:
- ensure_directory(target_path)
+ _bypass_ensure_directory(target_path)
except:
self.extraction_error()
@@ -1188,7 +1229,9 @@
)
def _fn(self, base, resource_name):
- return os.path.join(base, *resource_name.split('/'))
+ if resource_name:
+ return os.path.join(base, *resource_name.split('/'))
+ return base
def _get(self, path):
if hasattr(self.loader, 'get_data'):
@@ -1226,8 +1269,6 @@
-
-
class DefaultProvider(EggProvider):
"""Provides access to package resources in the filesystem"""
@@ -1597,7 +1638,7 @@
-_distribution_finders = {}
+_declare_state('dict', _distribution_finders = {})
def register_finder(importer_type, distribution_finder):
"""Register `distribution_finder` to find distributions in sys.path items
@@ -1646,7 +1687,7 @@
"""Yield distributions accessible on a sys.path directory"""
path_item = _normalize_cached(path_item)
- if os.path.isdir(path_item):
+ if os.path.isdir(path_item) and os.access(path_item, os.R_OK):
if path_item.lower().endswith('.egg'):
# unpacked egg
yield Distribution.from_filename(
@@ -1679,8 +1720,8 @@
break
register_finder(ImpWrapper,find_on_path)
-_namespace_handlers = {}
-_namespace_packages = {}
+_declare_state('dict', _namespace_handlers = {})
+_declare_state('dict', _namespace_packages = {})
def register_namespace_handler(importer_type, namespace_handler):
"""Register `namespace_handler` to declare namespace packages
@@ -1709,7 +1750,7 @@
return None
module = sys.modules.get(packageName)
if module is None:
- module = sys.modules[packageName] = new.module(packageName)
+ module = sys.modules[packageName] = imp.new_module(packageName)
module.__path__ = []; _set_parent_ns(packageName)
elif not hasattr(module,'__path__'):
raise TypeError("Not a package:", packageName)
@@ -1862,7 +1903,7 @@
The algorithm assumes that strings like "-" and any alpha string that
alphabetically follows "final" represents a "patch level". So, "2.4-1"
is assumed to be a branch or patch of "2.4", and therefore "2.4.1" is
- considered newer than "2.4-1", whic in turn is newer than "2.4".
+ considered newer than "2.4-1", which in turn is newer than "2.4".
Strings like "a", "b", "c", "alpha", "beta", "candidate" and so on (that
come before "final" alphabetically) are assumed to be pre-release versions,
@@ -1871,7 +1912,8 @@
Finally, to handle miscellaneous cases, the strings "pre", "preview", and
"rc" are treated as if they were "c", i.e. as though they were release
candidates, and therefore are not as new as a version string that does not
- contain them.
+ contain them, and "dev" is replaced with an '@' so that it sorts lower than
+ than any other pre-release tag.
"""
parts = []
for part in _parse_version_parts(s.lower()):
@@ -2219,12 +2261,9 @@
if not loc:
return
- if path is sys.path:
- self.check_version_conflict()
-
nloc = _normalize_cached(loc)
bdir = os.path.dirname(nloc)
- npath= map(_normalize_cached, path)
+ npath= [(p and _normalize_cached(p) or p) for p in path]
bp = None
for p, item in enumerate(npath):
@@ -2232,10 +2271,14 @@
break
elif item==bdir and self.precedence==EGG_DIST:
# if it's an .egg, give it precedence over its directory
+ if path is sys.path:
+ self.check_version_conflict()
path.insert(p, loc)
npath.insert(p, nloc)
break
else:
+ if path is sys.path:
+ self.check_version_conflict()
path.append(loc)
return
@@ -2252,7 +2295,6 @@
return
-
def check_version_conflict(self):
if self.key=='setuptools':
return # ignore the inevitable setuptools self-conflicts :(
@@ -2266,7 +2308,7 @@
continue
fn = getattr(sys.modules[modname], '__file__', None)
- if fn and normalize_path(fn).startswith(loc):
+ if fn and (normalize_path(fn).startswith(loc) or fn.startswith(loc)):
continue
issue_warning(
"Module %s was already imported from %s, but %s is being added"
@@ -2443,7 +2485,7 @@
def __contains__(self,item):
if isinstance(item,Distribution):
- if item.key <> self.key: return False
+ if item.key != self.key: return False
if self.index: item = item.parsed_version # only get if we need it
elif isinstance(item,basestring):
item = parse_version(item)
@@ -2540,7 +2582,7 @@
os.open = old_open # and then put it back
-# Set up global resource manager
+# Set up global resource manager (deliberately not state-saved)
_manager = ResourceManager()
def _initialize(g):
for name in dir(_manager):
@@ -2549,7 +2591,7 @@
_initialize(globals())
# Prepare the master working set and make the ``require()`` API available
-working_set = WorkingSet()
+_declare_state('object', working_set = WorkingSet())
try:
# Does the main program list any requirements?
from __main__ import __requires__
diff -r cfb0776875c0 -r ac60740712de scripts/dist-scramble.py
--- a/scripts/dist-scramble.py Mon Mar 15 14:05:25 2010 -0400
+++ b/scripts/dist-scramble.py Mon Mar 15 14:35:35 2010 -0400
@@ -32,6 +32,19 @@
failed = []
for egg in eggs:
try:
+ for dependency in egg.dependencies:
+ print "Checking %s dependency: %s" % ( egg.name, dependency )
+ # this could be in a better data structure...
+ dep = filter( lambda x: x.platform == egg.platform, c[dependency] )[0]
+ dep.resolve()
+ except EggNotFetchable, e:
+ degg = e.eggs[0]
+ print "%s build dependency %s %s %s couldn't be" % ( egg.name, degg.name, degg.version, degg.platform )
+ print "downloaded automatically. There isn't really a graceful"
+ print "way to handle this when dist-scrambling."
+ failed.append( egg.platform )
+ continue
+ try:
egg.scramble()
except ScrambleFailure:
failed.append( egg.platform )
diff -r cfb0776875c0 -r ac60740712de scripts/scramble.py
--- a/scripts/scramble.py Mon Mar 15 14:05:25 2010 -0400
+++ b/scripts/scramble.py Mon Mar 15 14:35:35 2010 -0400
@@ -14,7 +14,7 @@
lib = os.path.abspath( os.path.join( os.path.dirname( __file__ ), "..", "lib" ) )
sys.path.append( lib )
-from galaxy.eggs.scramble import ScrambleCrate, ScrambleFailure
+from galaxy.eggs.scramble import ScrambleCrate, ScrambleFailure, EggNotFetchable
c = ScrambleCrate()
@@ -31,6 +31,16 @@
except:
print "error: %s not in eggs.ini" % name
sys.exit( 1 )
+ for dependency in egg.dependencies:
+ print "Checking %s dependency: %s" % ( egg.name, dependency )
+ try:
+ c[dependency].require()
+ except EggNotFetchable, e:
+ degg = e.eggs[0]
+ print "%s build dependency %s %s couldn't be downloaded" % ( egg.name, degg.name, degg.version )
+ print "automatically. You can try building it by hand with:"
+ print " python scripts/scramble.py %s" % degg.name
+ sys.exit( 1 )
egg.scramble()
sys.exit( 0 )
except ScrambleFailure, e:
diff -r cfb0776875c0 -r ac60740712de scripts/scramble/lib/scramble_lib.py
--- a/scripts/scramble/lib/scramble_lib.py Mon Mar 15 14:05:25 2010 -0400
+++ b/scripts/scramble/lib/scramble_lib.py Mon Mar 15 14:35:35 2010 -0400
@@ -21,6 +21,15 @@
except:
return None
+def get_deps():
+ try:
+ depf = open( '.galaxy_deps', 'r' )
+ except:
+ return []
+ c = eggs.Crate()
+ for dep in depf:
+ c[dep.strip()].require()
+
def clean( extra_dirs=[] ):
for dir in [ 'build', 'dist' ] + extra_dirs:
try:
@@ -127,29 +136,10 @@
else:
return 'gcc'
-# Monkeypatch pkg_resources for better ABI recognition
-def _get_platform():
- plat = distutils.util._get_platform()
- if sys.version_info[:2] == ( 2, 5 ) and \
- ( ( os.uname()[-1] in ( 'i386', 'ppc' ) and sys.platform == 'darwin' and os.path.abspath( sys.prefix ).startswith( '/System' ) ) or \
- ( sys.platform == 'darwin' and get_config_vars().get('UNIVERSALSDK', '').strip() ) ):
- plat = 'macosx-10.3-fat'
- if sys.platform == "sunos5" and not (plat.endswith('_32') or plat.endswith('_64')):
- if sys.maxint > 2**31:
- plat += '_64'
- else:
- plat += '_32'
- if not (plat.endswith('-ucs2') or plat.endswith('-ucs4')):
- if sys.maxunicode > 2**16:
- plat += '-ucs4'
- else:
- plat += '-ucs2'
- return plat
-try:
- assert distutil.util._get_platform
-except:
- distutils.util._get_platform = distutils.util.get_platform
- distutils.util.get_platform = _get_platform
+# get galaxy eggs lib
+galaxy_lib = os.path.abspath( os.path.join( os.path.dirname( __file__ ), '..', '..', '..', 'lib' ) )
+sys.path.insert( 0, galaxy_lib )
+from galaxy import eggs
# get setuptools
from ez_setup import use_setuptools
diff -r cfb0776875c0 -r ac60740712de scripts/scramble/scripts/generic.py
--- a/scripts/scramble/scripts/generic.py Mon Mar 15 14:05:25 2010 -0400
+++ b/scripts/scramble/scripts/generic.py Mon Mar 15 14:35:35 2010 -0400
@@ -9,6 +9,7 @@
from scramble_lib import *
tag = get_tag() # get the tag
+get_deps() # require any dependent eggs
clean() # clean up any existing stuff (could happen if you run scramble.py by hand)
# reset args for distutils
diff -r cfb0776875c0 -r ac60740712de scripts/test_dist_egg.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/scripts/test_dist_egg.py Mon Mar 15 14:35:35 2010 -0400
@@ -0,0 +1,53 @@
+#!/usr/bin/env python
+"""
+A crude script for minimal "testing" of dist eggs (require and import). It may
+not work on all zipped eggs. It may be easiest to just customize this script
+for whatever egg you want to test.
+
+usage: test_dist_egg.py <egg_name>
+"""
+import os, sys, logging, subprocess
+
+try:
+ assert sys.argv[1]
+except:
+ print __doc__
+ sys.exit( 1 )
+
+lib = os.path.abspath( os.path.join( os.path.dirname( __file__ ), '..', 'lib' ) )
+sys.path.insert( 0, lib )
+
+if sys.argv[1].endswith( '.egg' ):
+
+ egg = sys.argv[1]
+ egg_name = os.path.basename( egg ).split( '-' )[0]
+ sys.path.insert( 0, egg )
+
+ import pkg_resources
+ pkg_resources.require( egg_name )
+ provider = pkg_resources.get_provider( egg_name )
+ importables = provider.get_metadata('top_level.txt').splitlines()
+
+ for importable in importables:
+ mod = __import__( importable )
+ assert os.path.dirname( mod.__path__[0] ) == os.path.dirname( provider.module_path )
+ print "OK"
+
+ sys.exit( 0 )
+
+else:
+
+ build_dir = os.path.join( os.path.dirname( os.path.abspath( __file__ ) ), 'scramble', 'build' )
+ if os.path.exists( build_dir ):
+ raise Exception( 'Build dir must be removed before testing: %s' % build_dir )
+
+ name = sys.argv[1]
+
+ from galaxy.eggs.dist import DistScrambleCrate
+
+ c = DistScrambleCrate()
+
+ for egg in c[name]:
+ print 'Checking %s %s for %s on %s' % ( name, egg.version, egg.platform, egg.build_host )
+ p = subprocess.Popen( 'ssh %s %s %s %s %s' % ( egg.build_host, egg.python, os.path.abspath( __file__ ), egg.distribution.location, egg.platform ), shell=True )
+ p.wait()
1
0

18 Mar '10
details: http://www.bx.psu.edu/hg/galaxy/rev/447c059a096a
changeset: 3534:447c059a096a
user: Nate Coraor <nate(a)bx.psu.edu>
date: Mon Mar 15 15:06:14 2010 -0400
description:
Fix a bug in removing conflicts from the eggs directory
diffstat:
lib/galaxy/eggs/__init__.py | 4 ++--
1 files changed, 2 insertions(+), 2 deletions(-)
diffs (21 lines):
diff -r ac60740712de -r 447c059a096a lib/galaxy/eggs/__init__.py
--- a/lib/galaxy/eggs/__init__.py Mon Mar 15 14:35:35 2010 -0400
+++ b/lib/galaxy/eggs/__init__.py Mon Mar 15 15:06:14 2010 -0400
@@ -2,7 +2,7 @@
Manage Galaxy eggs
"""
-import os, sys, glob, urllib, urllib2, ConfigParser, HTMLParser, zipimport, zipfile
+import os, sys, shutil, glob, urllib, urllib2, ConfigParser, HTMLParser, zipimport, zipfile
import logging
log = logging.getLogger( __name__ )
@@ -281,7 +281,7 @@
for egg in eggs:
try:
egg.resolve()
- except:
+ except EggNotFetchable:
missing.append( egg )
if missing:
raise EggNotFetchable( missing )
1
0

18 Mar '10
details: http://www.bx.psu.edu/hg/galaxy/rev/cfb0776875c0
changeset: 3532:cfb0776875c0
user: jeremy goecks <jeremy.goecks(a)emory.edu>
date: Mon Mar 15 14:05:25 2010 -0400
description:
Fix migration script's default value for 'deleted' column in visualization table. Because visualizations are not in use yet, effects are small, and manual fix is easy, it's not necessary to use another migration script to fix this problem. Fix, then, is largely for reference purposes.
diffstat:
lib/galaxy/model/migrate/versions/0043_visualization_sharing_tagging_annotating.py | 4 +---
1 files changed, 1 insertions(+), 3 deletions(-)
diffs (15 lines):
diff -r 3c124a19ae94 -r cfb0776875c0 lib/galaxy/model/migrate/versions/0043_visualization_sharing_tagging_annotating.py
--- a/lib/galaxy/model/migrate/versions/0043_visualization_sharing_tagging_annotating.py Mon Mar 15 12:40:30 2010 -0400
+++ b/lib/galaxy/model/migrate/versions/0043_visualization_sharing_tagging_annotating.py Mon Mar 15 14:05:25 2010 -0400
@@ -54,10 +54,8 @@
log.debug( "Creating visualization_user_share_association table failed: %s" % str( e ) )
# Get default boolean value 'false' so that columns can be initialized.
- if migrate_engine.name == 'mysql':
+ if migrate_engine.name == 'mysql' or migrate_engine.name == 'sqlite':
default_false = "0"
- elif migrate_engine.name == 'sqlite':
- default_false = "'false'"
elif migrate_engine.name == 'postgres':
default_false = "false"
1
0

18 Mar '10
details: http://www.bx.psu.edu/hg/galaxy/rev/3c124a19ae94
changeset: 3531:3c124a19ae94
user: rc
date: Mon Mar 15 12:40:30 2010 -0400
description:
lims: automatic update of sample state when the state is changed in th db (using scanner etc)
diffstat:
lib/galaxy/web/controllers/requests_admin.py | 21 ++++++
templates/admin/requests/show_request.mako | 87 ++++++++++++++++++++++++---
templates/requests/sample_datasets.mako | 7 ++
templates/requests/sample_state.mako | 5 +
4 files changed, 109 insertions(+), 11 deletions(-)
diffs (175 lines):
diff -r 6d079d53f9db -r 3c124a19ae94 lib/galaxy/web/controllers/requests_admin.py
--- a/lib/galaxy/web/controllers/requests_admin.py Mon Mar 15 11:31:14 2010 -0400
+++ b/lib/galaxy/web/controllers/requests_admin.py Mon Mar 15 12:40:30 2010 -0400
@@ -231,6 +231,27 @@
def index( self, trans ):
return trans.fill_template( "/admin/requests/index.mako" )
+ @web.json
+ def sample_state_updates( self, trans, ids=None, states=None ):
+ # Avoid caching
+ trans.response.headers['Pragma'] = 'no-cache'
+ trans.response.headers['Expires'] = '0'
+ # Create new HTML for any that have changed
+ rval = {}
+ if ids is not None and states is not None:
+ ids = map( int, ids.split( "," ) )
+ states = states.split( "," )
+ for id, state in zip( ids, states ):
+ sample = trans.sa_session.query( self.app.model.Sample ).get( id )
+ if sample.current_state().name != state:
+ rval[id] = {
+ "state": sample.current_state().name,
+ "datasets": len(sample.dataset_files),
+ "html_state": unicode( trans.fill_template( "requests/sample_state.mako", sample=sample ), 'utf-8' ),
+ "html_datasets": unicode( trans.fill_template( "requests/sample_datasets.mako", trans=trans, sample=sample ), 'utf-8' )
+ }
+ return rval
+
@web.expose
@web.require_admin
def list( self, trans, **kwd ):
diff -r 6d079d53f9db -r 3c124a19ae94 templates/admin/requests/show_request.mako
--- a/templates/admin/requests/show_request.mako Mon Mar 15 11:31:14 2010 -0400
+++ b/templates/admin/requests/show_request.mako Mon Mar 15 12:40:30 2010 -0400
@@ -1,5 +1,7 @@
<%inherit file="/base.mako"/>
<%namespace file="/message.mako" import="render_msg" />
+<%namespace file="/requests/sample_state.mako" import="render_sample_state" />
+<%namespace file="/requests/sample_datasets.mako" import="render_sample_datasets" />
%if msg:
${render_msg( msg, messagetype )}
@@ -42,6 +44,54 @@
});
</script>
+<script type="text/javascript">
+ // Looks for changes in sample states using an async request. Keeps
+ // calling itself (via setTimeout) until all samples are in a terminal
+ // state.
+ var updater = function ( sample_states ) {
+ // Check if there are any items left to track
+ var empty = true;
+ for ( i in sample_states ) {
+ empty = false;
+ break;
+ }
+ if ( ! empty ) {
+ setTimeout( function() { updater_callback( sample_states ) }, 1000 );
+ }
+ };
+ var updater_callback = function ( sample_states ) {
+ // Build request data
+ var ids = []
+ var states = []
+ $.each( sample_states, function ( id, state ) {
+ ids.push( id );
+ states.push( state );
+ });
+ // Make ajax call
+ $.ajax( {
+ type: "POST",
+ url: "${h.url_for( controller='requests_admin', action='sample_state_updates' )}",
+ dataType: "json",
+ data: { ids: ids.join( "," ), states: states.join( "," ) },
+ success : function ( data ) {
+ $.each( data, function( id, val ) {
+ // Replace HTML
+ var cell1 = $("#sampleState-" + id);
+ cell1.html( val.html_state );
+ var cell2 = $("#sampleDatasets-" + id);
+ cell2.html( val.html_datasets );
+ sample_states[ parseInt(id) ] = val.state;
+ });
+ updater( sample_states );
+ },
+ error: function() {
+ // Just retry, like the old method, should try to be smarter
+ updater( sample_states );
+ }
+ });
+ };
+</script>
+
<style type="text/css">
.msg_head {
padding: 0px 0px;
@@ -162,15 +212,21 @@
%if sample_index in range(len(request.samples)):
<td>${info['name']}</td>
<td>${info['barcode']}</td>
- <td>
- %if sample:
- %if sample.request.unsubmitted():
- Unsubmitted
- %else:
- <a href="${h.url_for( controller='requests_admin', action='show_events', sample_id=sample.id)}">${sample.current_state().name}</a>
- %endif
- %endif
- </td>
+ %if sample.request.unsubmitted():
+ <td>Unsubmitted</td>
+ %else:
+ <td id="sampleState-${sample.id}">${render_sample_state( sample )}</td>
+ %endif
+
+## <td>
+## %if sample:
+## %if sample.request.unsubmitted():
+## Unsubmitted
+## %else:
+## <a href="${h.url_for( controller='requests_admin', action='show_events', sample_id=sample.id)}">${sample.current_state().name}</a>
+## %endif
+## %endif
+## </td>
%if info['library']:
<td><a href="${h.url_for( controller='library_common', action='browse_library', cntrller='library', id=trans.security.encode_id( info['library'].id ) )}">${info['library'].name}</a></td>
%else:
@@ -182,8 +238,9 @@
<td></td>
%endif
%if request.submitted() or request.complete():
- <td>
- <a href="${h.url_for( controller='requests_admin', action='show_datatx_page', sample_id=trans.security.encode_id(sample.id) )}">${len(sample.dataset_files)}</a>
+ <td id="sampleDatasets-${sample.id}">
+ ${render_sample_datasets( sample )}
+## <a href="${h.url_for( controller='requests_admin', action='show_datatx_page', sample_id=trans.security.encode_id(sample.id) )}">${len(sample.dataset_files)}</a>
</td>
%endif
@@ -358,6 +415,14 @@
<label>There are no samples.</label>
%endif
</div>
+
+ %if request.samples and request.submitted():
+ <script type="text/javascript">
+ // Updater
+ updater({${ ",".join( [ '"%s" : "%s"' % ( s.id, s.current_state().name ) for s in request.samples ] ) }});
+ </script>
+ %endif
+
%if edit_mode == 'False':
<table class="grid">
<tbody>
diff -r 6d079d53f9db -r 3c124a19ae94 templates/requests/sample_datasets.mako
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/templates/requests/sample_datasets.mako Mon Mar 15 12:40:30 2010 -0400
@@ -0,0 +1,7 @@
+<%def name="render_sample_datasets( sample )">
+ <a href="${h.url_for(controller='requests_admin', action='show_datatx_page', sample_id=trans.security.encode_id(sample.id))}">${sample.transferred_dataset_files()}</a>
+</%def>
+
+
+
+${render_sample_datasets( sample )}
diff -r 6d079d53f9db -r 3c124a19ae94 templates/requests/sample_state.mako
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/templates/requests/sample_state.mako Mon Mar 15 12:40:30 2010 -0400
@@ -0,0 +1,5 @@
+<%def name="render_sample_state( sample )">
+ <a href="${h.url_for( controller='requests_admin', action='show_events', sample_id=sample.id)}">${sample.current_state().name}</a>
+</%def>
+
+${render_sample_state( sample )}
1
0

18 Mar '10
details: http://www.bx.psu.edu/hg/galaxy/rev/6d079d53f9db
changeset: 3530:6d079d53f9db
user: jeremy goecks <jeremy.goecks(a)emory.edu>
date: Mon Mar 15 11:31:14 2010 -0400
description:
Add functionality for inline editing of dataset tags and annotation in history panel (icons are currently opposite save/rerun icons). Various style and template fixes as well.
diffstat:
lib/galaxy/web/base/controller.py | 11 +-
lib/galaxy/web/controllers/dataset.py | 31 ++++-
lib/galaxy/web/controllers/page.py | 2 +-
lib/galaxy/web/controllers/tag.py | 19 ++-
lib/galaxy/web/framework/helpers/grids.py | 4 +-
static/june_2007_style/autocomplete_tagging.css.tmpl | 5 +
static/june_2007_style/base.css.tmpl | 9 +-
static/june_2007_style/blue/autocomplete_tagging.css | 3 +-
static/june_2007_style/blue/base.css | 3 +-
templates/dataset/display.mako | 2 +-
templates/dataset/embed.mako | 6 +-
templates/display_base.mako | 2 +-
templates/embed_base.mako | 20 ++-
templates/grid_base_async.mako | 2 +-
templates/root/history.mako | 110 ++++++++++++++++--
templates/root/history_common.mako | 18 ++-
templates/tagging_common.mako | 6 +-
17 files changed, 203 insertions(+), 50 deletions(-)
diffs (553 lines):
diff -r 96ec861b4b6e -r 6d079d53f9db lib/galaxy/web/base/controller.py
--- a/lib/galaxy/web/base/controller.py Sun Mar 14 11:49:44 2010 -0400
+++ b/lib/galaxy/web/base/controller.py Mon Mar 15 11:31:14 2010 -0400
@@ -62,7 +62,7 @@
def get_item_annotation_obj( self, db_session, user, item ):
""" Returns a user's annotation object for an item. """
- # Get annotation association. TODO: we could replace this eval() with a long if/else stmt, but this is more general without sacrificing
+ # Get annotation association.
try:
annotation_assoc_class = eval( "model.%sAnnotationAssociation" % item.__class__.__name__ )
except:
@@ -126,7 +126,7 @@
class UsesHistoryDatasetAssociation:
""" Mixin for controllers that use HistoryDatasetAssociation objects. """
- def get_dataset( self, trans, dataset_id, check_accessible=True ):
+ def get_dataset( self, trans, dataset_id, check_ownership=True, check_accessible=False ):
""" Get an HDA object by id. """
# DEPRECATION: We still support unencoded ids for backward compatibility
try:
@@ -136,6 +136,13 @@
data = trans.sa_session.query( model.HistoryDatasetAssociation ).get( dataset_id )
if not data:
raise paste.httpexceptions.HTTPRequestRangeNotSatisfiable( "Invalid dataset id: %s." % str( dataset_id ) )
+ if check_ownership:
+ # Verify ownership.
+ user = trans.get_user()
+ if not user:
+ error( "Must be logged in to manage Galaxy items" )
+ if data.history.user != user:
+ error( "%s is not owned by current user" % data.__class__.__name__ )
if check_accessible:
current_user_roles = trans.get_current_user_roles()
if trans.app.security_agent.can_access_dataset( current_user_roles, data.dataset ):
diff -r 96ec861b4b6e -r 6d079d53f9db lib/galaxy/web/controllers/dataset.py
--- a/lib/galaxy/web/controllers/dataset.py Sun Mar 14 11:49:44 2010 -0400
+++ b/lib/galaxy/web/controllers/dataset.py Mon Mar 15 11:31:14 2010 -0400
@@ -5,6 +5,7 @@
from galaxy import util, datatypes, jobs, web, model
from cgi import escape, FieldStorage
from galaxy.datatypes.display_applications.util import encode_dataset_user, decode_dataset_user
+from galaxy.util.sanitize_html import sanitize_html
from email.MIMEText import MIMEText
import pkg_resources;
@@ -444,16 +445,14 @@
@web.require_login( "use Galaxy datasets" )
def get_name_and_link_async( self, trans, id=None ):
""" Returns dataset's name and link. """
- dataset = self.get_dataset( trans, id )
+ dataset = self.get_dataset( trans, id, False, True )
return_dict = { "name" : dataset.name, "link" : url_for( action="display_by_username_and_slug", username=dataset.history.user.username, slug=trans.security.encode_id( dataset.id ) ) }
return return_dict
@web.expose
def get_embed_html_async( self, trans, id ):
""" Returns HTML for embedding a dataset in a page. """
-
- # TODO: user should be able to embed any item he has access to. see display_by_username_and_slug for security code.
- dataset = self.get_dataset( trans, id )
+ dataset = self.get_dataset( trans, id, False, True )
if dataset:
return "Embedded Dataset '%s'" % dataset.name
@@ -466,7 +465,7 @@
@web.expose
def display_by_username_and_slug( self, trans, username, slug, preview=True ):
""" Display dataset by username and slug; because datasets do not yet have slugs, the slug is the dataset's id. """
- dataset = self.get_dataset( trans, slug )
+ dataset = self.get_dataset( trans, slug, False, True )
if dataset:
truncated, dataset_data = self.get_data( dataset, preview )
dataset.annotation = self.get_item_annotation_str( trans.sa_session, dataset.history.user, dataset )
@@ -478,7 +477,7 @@
def get_item_content_async( self, trans, id ):
""" Returns item content in HTML format. """
- dataset = self.get_dataset( trans, id )
+ dataset = self.get_dataset( trans, id, False, True )
if dataset is None:
raise web.httpexceptions.HTTPNotFound()
truncated, dataset_data = self.get_data( dataset, preview=True )
@@ -486,6 +485,24 @@
dataset.annotation = self.get_item_annotation_str( trans.sa_session, trans.get_user(), dataset )
return trans.stream_template_mako( "/dataset/item_content.mako", item=dataset, item_data=dataset_data, truncated=truncated )
+ @web.expose
+ def annotate_async( self, trans, id, new_annotation=None, **kwargs ):
+ dataset = self.get_dataset( trans, id, False, True )
+ if not dataset:
+ web.httpexceptions.HTTPNotFound()
+ if dataset and new_annotation:
+ # Sanitize annotation before adding it.
+ new_annotation = sanitize_html( new_annotation, 'utf-8', 'text/html' )
+ self.add_item_annotation( trans, dataset, new_annotation )
+ trans.sa_session.flush()
+ return new_annotation
+
+ @web.expose
+ def get_annotation_async( self, trans, id ):
+ dataset = self.get_dataset( trans, id, False, True )
+ if not dataset:
+ web.httpexceptions.HTTPNotFound()
+ return self.get_item_annotation_str( trans.sa_session, trans.get_user(), dataset )
@web.expose
def display_at( self, trans, dataset_id, filename=None, **kwd ):
@@ -704,7 +721,7 @@
if user != history.user:
error_msg = error_msg + "You do not have permission to add datasets to %i requested histories. " % ( len( target_histories ) )
for dataset_id in dataset_ids:
- data = self.get_dataset( trans, dataset_id )
+ data = self.get_dataset( trans, dataset_id, False, True )
if data is None:
error_msg = error_msg + "You tried to copy a dataset that does not exist or that you do not have access to. "
invalid_datasets += 1
diff -r 96ec861b4b6e -r 6d079d53f9db lib/galaxy/web/controllers/page.py
--- a/lib/galaxy/web/controllers/page.py Sun Mar 14 11:49:44 2010 -0400
+++ b/lib/galaxy/web/controllers/page.py Mon Mar 15 11:31:14 2010 -0400
@@ -674,7 +674,7 @@
datasets = self.get_history_datasets( trans, history )
return trans.fill_template( "history/embed.mako", item=history, item_data=datasets )
elif item_class == model.HistoryDatasetAssociation:
- dataset = self.get_dataset( trans, item_id )
+ dataset = self.get_dataset( trans, item_id, False, True )
dataset.annotation = self.get_item_annotation_str( trans.sa_session, dataset.history.user, dataset )
if dataset:
data = self.get_data( dataset )
diff -r 96ec861b4b6e -r 6d079d53f9db lib/galaxy/web/controllers/tag.py
--- a/lib/galaxy/web/controllers/tag.py Sun Mar 14 11:49:44 2010 -0400
+++ b/lib/galaxy/web/controllers/tag.py Mon Mar 15 11:31:14 2010 -0400
@@ -11,12 +11,21 @@
def __init__(self, app):
BaseController.__init__(self, app)
-
- # Set up tag handler to recognize the following items: History, HistoryDatasetAssociation, Page, ...
self.tag_handler = TagHandler()
+
+ @web.expose
+ @web.require_login( "edit item tags" )
+ def get_tagging_elt_async( self, trans, item_id, item_class, elt_context="" ):
+ """ Returns HTML for editing an item's tags. """
+ item = self._get_item( trans, item_class, trans.security.decode_id( item_id ) )
+ if not item:
+ return trans.show_error_message( "No item of class %s with id % " % ( item_class, item_id ) )
+ user = trans.get_user()
+ return trans.fill_template( "/tagging_common.mako", tag_type="individual", user=trans.get_user(), tagged_item=item, elt_context=elt_context,
+ in_form=False, input_size="22", tag_click_fn="default_tag_click_fn", use_toggle_link=False )
@web.expose
- @web.require_login( "Add tag to an item." )
+ @web.require_login( "add tag to an item" )
def add_tag_async( self, trans, item_id=None, item_class=None, new_tag=None, context=None ):
""" Add tag to an item. """
@@ -28,10 +37,10 @@
# Log.
params = dict( item_id=item.id, item_class=item_class, tag=new_tag)
- trans.log_action( user, unicode( "tag"), context, params )
+ trans.log_action( user, unicode( "tag" ), context, params )
@web.expose
- @web.require_login( "Remove tag from an item." )
+ @web.require_login( "remove tag from an item" )
def remove_tag_async( self, trans, item_id=None, item_class=None, tag_name=None, context=None ):
""" Remove tag from an item. """
diff -r 96ec861b4b6e -r 6d079d53f9db lib/galaxy/web/framework/helpers/grids.py
--- a/lib/galaxy/web/framework/helpers/grids.py Sun Mar 14 11:49:44 2010 -0400
+++ b/lib/galaxy/web/framework/helpers/grids.py Mon Mar 15 11:31:14 2010 -0400
@@ -389,7 +389,7 @@
self.grid_name = grid_name
def get_value( self, trans, grid, item ):
return trans.fill_template( "/tagging_common.mako", tag_type="community", trans=trans, user=trans.get_user(), tagged_item=item, elt_context=self.grid_name,
- in_form=True, input_size="20", tag_click_fn="add_tag_to_grid_filter" )
+ in_form=True, input_size="20", tag_click_fn="add_tag_to_grid_filter", use_toggle_link=True )
def filter( self, db_session, user, query, column_filter ):
""" Modify query to filter model_class by tag. Multiple filters are ANDed. """
if column_filter == "All":
@@ -418,7 +418,7 @@
""" Column that supports individual tags. """
def get_value( self, trans, grid, item ):
return trans.fill_template( "/tagging_common.mako", tag_type="individual", trans=trans, user=trans.get_user(), tagged_item=item, elt_context=self.grid_name,
- in_form=True, input_size="20", tag_click_fn="add_tag_to_grid_filter" )
+ in_form=True, input_size="20", tag_click_fn="add_tag_to_grid_filter", use_toggle_link=True )
def get_filter( self, user, column_filter ):
# Parse filter to extract multiple tags.
tag_handler = TagHandler()
diff -r 96ec861b4b6e -r 6d079d53f9db static/june_2007_style/autocomplete_tagging.css.tmpl
--- a/static/june_2007_style/autocomplete_tagging.css.tmpl Sun Mar 14 11:49:44 2010 -0400
+++ b/static/june_2007_style/autocomplete_tagging.css.tmpl Mon Mar 15 11:31:14 2010 -0400
@@ -74,6 +74,11 @@
cursor: pointer;
}
+.individual-tag-area:hover
+{
+ border:dotted #999999 1px;
+}
+
.active-tag-area {
background-color: white;
}
diff -r 96ec861b4b6e -r 6d079d53f9db static/june_2007_style/base.css.tmpl
--- a/static/june_2007_style/base.css.tmpl Sun Mar 14 11:49:44 2010 -0400
+++ b/static/june_2007_style/base.css.tmpl Mon Mar 15 11:31:14 2010 -0400
@@ -835,8 +835,7 @@
cursor:pointer;
}
-.editable-text:hover{
- background-image:url();
- background-repeat:no-repeat;
- background-position:right;
-}
+.editable-text:hover {
+ cursor: text;
+ border: dotted #999999 1px;
+}
\ No newline at end of file
diff -r 96ec861b4b6e -r 6d079d53f9db static/june_2007_style/blue/autocomplete_tagging.css
--- a/static/june_2007_style/blue/autocomplete_tagging.css Sun Mar 14 11:49:44 2010 -0400
+++ b/static/june_2007_style/blue/autocomplete_tagging.css Mon Mar 15 11:31:14 2010 -0400
@@ -7,7 +7,8 @@
.ac_over{background-color:#0A246A;color:white;}
.ac_header{font-style:normal;color:gray;border-bottom:0.1em solid gray;}
.tag-area{width:100%;}
-.individual-tag-area{border:solid 1px #eee;cursor:pointer;}
+.individual-tag-area{cursor:pointer;}
+.individual-tag-area:hover{border:dotted #999999 1px;}
.active-tag-area{background-color:white;}
.toggle-link{font-weight:normal;padding:0.3em;margin-bottom:1em;width:100%;padding:0.2em 0em 0.2em 0em;}
.tag-button{width:auto;color:#444;text-decoration:none;display:inline-block;cursor:pointer;margin:0.2em;border:solid #bbb 1px;padding:0.1em 0.5em 0.1em 0.5em;-moz-border-radius:.5em;-webkit-border-radius:.5em;border-radius:.5em;background:#eee;}
diff -r 96ec861b4b6e -r 6d079d53f9db static/june_2007_style/blue/base.css
--- a/static/june_2007_style/blue/base.css Sun Mar 14 11:49:44 2010 -0400
+++ b/static/june_2007_style/blue/base.css Mon Mar 15 11:31:14 2010 -0400
@@ -145,4 +145,5 @@
.tipsy-east{background-position:right center;}
.tipsy-west{background-position:left center;}
.editable-text{cursor:pointer;}
-.editable-text:hover{background-image:url();background-repeat:no-repeat;background-position:right;}
+.editable-text:hover{cursor: text;border: dotted #999999 1px;}
+
diff -r 96ec861b4b6e -r 6d079d53f9db templates/dataset/display.mako
--- a/templates/dataset/display.mako Sun Mar 14 11:49:44 2010 -0400
+++ b/templates/dataset/display.mako Mon Mar 15 11:31:14 2010 -0400
@@ -22,7 +22,7 @@
</%def>
<%def name="render_item_links( data )">
- ## Provide links to save data and TODO: import dataset.
+ ## Provide links to save data and import dataset.
<a href="${h.url_for( controller='/dataset', action='display', dataset_id=trans.security.encode_id( data.id ), to_ext=data.ext )}" class="icon-button disk tooltip" title="Save dataset"></a>
<a href="${h.url_for( controller='/dataset', action='imp', dataset_id=trans.security.encode_id( data.id ) )}" class="icon-button import tooltip" title="Import dataset"></a>
</%def>
diff -r 96ec861b4b6e -r 6d079d53f9db templates/dataset/embed.mako
--- a/templates/dataset/embed.mako Sun Mar 14 11:49:44 2010 -0400
+++ b/templates/dataset/embed.mako Mon Mar 15 11:31:14 2010 -0400
@@ -3,8 +3,12 @@
from galaxy.web.framework.helpers import iff
%>
-<%def name="render_item_specific_title_links( dataset )">
+<%def name="render_item_links( dataset )">
<a href="${h.url_for( controller='/dataset', action='display', dataset_id=trans.security.encode_id( dataset.id ), to_ext=dataset.ext )}" title="Save dataset" class="icon-button disk tooltip"></a>
+ ## Links for importing and viewing an item.
+ <a href="${h.url_for( controller='/dataset', action='imp', dataset_id=trans.security.encode_id( item.id ) )}" title="Import dataset" class="icon-button import tooltip"></a>
+ <a class="icon-button go-to-full-screen tooltip" href="${h.url_for( controller='/dataset', action='display_by_username_and_slug', username=dataset.history.user.username, slug=trans.security.encode_id( dataset.id ) )}" title="Go to dataset"></a>
+
</%def>
<%def name="render_summary_content( dataset, data )">
diff -r 96ec861b4b6e -r 6d079d53f9db templates/display_base.mako
--- a/templates/display_base.mako Sun Mar 14 11:49:44 2010 -0400
+++ b/templates/display_base.mako Mon Mar 15 11:31:14 2010 -0400
@@ -25,7 +25,7 @@
self.has_left_panel=False
self.has_right_panel=True
self.message_box_visible=False
- self.active_view="user"
+ self.active_view=""
self.overlay_visible=False
%>
</%def>
diff -r 96ec861b4b6e -r 6d079d53f9db templates/embed_base.mako
--- a/templates/embed_base.mako Sun Mar 14 11:49:44 2010 -0400
+++ b/templates/embed_base.mako Mon Mar 15 11:31:14 2010 -0400
@@ -18,8 +18,19 @@
</div>
</div>
-## Render item-specific title links.
-<%def name="render_item_specific_title_links( item )">
+## Render item links.
+<%def name="render_item_links( item )">
+ <%
+ item_display_name = get_class_display_name( item.__class__ ).lower()
+ item_controller = "/%s" % get_controller_name( item )
+ item_user = get_item_user( item )
+ item_slug = get_item_slug( item )
+ display_href = h.url_for( controller=item_controller, action='display_by_username_and_slug', username=item_user.username, slug=item_slug )
+ %>
+
+ ## Links for importing and viewing an item.
+ <a href="${h.url_for( controller=item_controller, action='imp', id=trans.security.encode_id( item.id ) )}" title="Import ${item_display_name}" class="icon-button import tooltip"></a>
+ <a class="icon-button go-to-full-screen tooltip" href="${display_href}" title="Go to ${item_display_name}"></a>
</%def>
<%def name="render_title( item )">
@@ -36,10 +47,7 @@
<a class="toggle-contract icon-button tooltip" href="${display_href}" title="Hide ${item_display_name} content"></a>
</div>
<div style="float: right">
- ${self.render_item_specific_title_links( item )}
- ## Links applicable for all items.
- <a href="${h.url_for( controller=item_controller, action='imp', id=trans.security.encode_id( item.id ) )}" title="Import ${item_display_name}" class="icon-button import tooltip"></a>
- <a class="icon-button go-to-full-screen tooltip" href="${display_href}" title="Go to ${item_display_name}"></a>
+ ${self.render_item_links( item )}
</div>
<h4><a class="toggle-embed tooltip" href="${display_href}" title="Show or hide ${item_display_name} content">Galaxy ${get_class_display_name( item.__class__ )} | ${get_item_name( item )}</a></h4>
%if hasattr( item, "annotation") and item.annotation:
diff -r 96ec861b4b6e -r 6d079d53f9db templates/grid_base_async.mako
--- a/templates/grid_base_async.mako Sun Mar 14 11:49:44 2010 -0400
+++ b/templates/grid_base_async.mako Mon Mar 15 11:31:14 2010 -0400
@@ -13,4 +13,4 @@
*****
${num_pages}
*****
-${render_message( grid )}
\ No newline at end of file
+${render_message( message, message_type )}
\ No newline at end of file
diff -r 96ec861b4b6e -r 6d079d53f9db templates/root/history.mako
--- a/templates/root/history.mako Sun Mar 14 11:49:44 2010 -0400
+++ b/templates/root/history.mako Mon Mar 15 11:31:14 2010 -0400
@@ -42,10 +42,10 @@
$.jStore.remove("history_expand_state");
}).show();
- // Rename management.
+ // History rename functionality.
async_save_text("history-name-container", "history-name", "${h.url_for( controller="/history", action="rename_async", id=trans.security.encode_id(history.id) )}", "new_name", 18);
- // Tag management.
+ // History tagging functionality.
var historyTagArea = $('#history-tag-area');
$('#history-tag').click( function()
{
@@ -57,7 +57,7 @@
return false;
});
- // Annotation management.
+ // History annotation functionality.
var historyAnnotationArea = $('#history-annotation-area');
$('#history-annotate').click( function() {
if ( historyAnnotationArea.is( ":hidden" ) ) {
@@ -104,7 +104,8 @@
})
}
}
-// Add show/hide link and delete link to a history item
+// (a) Add show/hide link and delete link to a history item;
+// (b) handle tagging and annotation using jquery.
function setupHistoryItem( query ) {
query.each( function() {
var id = this.id;
@@ -180,6 +181,99 @@
return false;
});
});
+
+ // Tag handling.
+ $(this).find( "a.icon-button.tags").each( function()
+ {
+ // Use links parameters but custom URL as ajax URL.
+ $(this).click( function() {
+ // Get tag area, tag element.
+ var history_item = $(this).parents(".historyItem");
+ var tag_area = history_item.find(".tag-area");
+ var tag_elt = history_item.find(".tag-elt");
+
+ // Show or hide tag area; if showing tag area and it's empty, fill it.
+ if ( tag_area.is( ":hidden" ) )
+ {
+ if (tag_elt.html() == "" )
+ {
+ // Need to fill tag element.
+ var href_parms = $(this).attr("href").split("?")[1];
+ var ajax_url = "${h.url_for( controller='tag', action='get_tagging_elt_async' )}?" + href_parms;
+ $.ajax({
+ url: ajax_url,
+ error: function() { alert( "Tagging failed" ) },
+ success: function(tag_elt_html) {
+ tag_elt.html(tag_elt_html);
+ tag_elt.find(".tooltip").tipsy( { gravity: 's' } );
+ tag_area.slideDown("fast");
+ }
+ });
+ }
+ else
+ {
+ // Tag element is filled; show.
+ tag_area.slideDown("fast");
+ }
+ }
+ else
+ {
+ // Hide.
+ tag_area.slideUp("fast");
+ }
+ return false;
+ });
+ });
+
+ // Annotation handling.
+ $(this).find( "a.icon-button.annotate").each( function()
+ {
+ // Use links parameters but custom URL as ajax URL.
+ $(this).click( function() {
+ // Get tag area, tag element.
+ var history_item = $(this).parents(".historyItem");
+ var annotation_area = history_item.find(".annotation-area");
+ var annotation_elt = history_item.find(".annotation-elt");
+
+ // Show or hide annotation area; if showing annotation area and it's empty, fill it.
+ if ( annotation_area.is( ":hidden" ) )
+ {
+ if (annotation_elt.html() == "" )
+ {
+ // Need to fill annotation element.
+ var href_parms = $(this).attr("href").split("?")[1];
+ var ajax_url = "${h.url_for( controller='dataset', action='get_annotation_async' )}?" + href_parms;
+ $.ajax({
+ url: ajax_url,
+ error: function() { alert( "Annotations failed" ) },
+ success: function(annotation) {
+ if (annotation == "")
+ annotation = "<i>Describe or add notes to dataset</i>";
+ annotation_elt.html(annotation);
+ annotation_area.find(".tooltip").tipsy( { gravity: 's' } );
+ async_save_text(
+ annotation_elt.attr("id"), annotation_elt.attr("id"),
+ "${h.url_for( controller="/dataset", action="annotate_async")}?" + href_parms,
+ "new_annotation", 18, true, 4);
+ annotation_area.slideDown("fast");
+ }
+ });
+ }
+ else
+ {
+ // Annotation element is filled; show.
+ annotation_area.slideDown("fast");
+ }
+ }
+ else
+ {
+ // Hide.
+ annotation_area.slideUp("fast");
+ }
+ return false;
+ });
+ });
+
});
};
// Looks for changes in dataset state using an async request. Keeps
@@ -279,13 +373,6 @@
padding: 3px;
margin: -4px;
}
-.editable-text:hover {
- cursor: text;
- border: dotted #999999 1px;
-}
-.tag-area {
- border: none;
-}
</style>
<noscript>
@@ -299,7 +386,6 @@
</head>
<body class="historyPage">
-
<div id="top-links" class="historyLinks">
<a title="${_('refresh')}" class="icon-button arrow-circle tooltip" href="${h.url_for('history', show_deleted=show_deleted)}"></a>
diff -r 96ec861b4b6e -r 6d079d53f9db templates/root/history_common.mako
--- a/templates/root/history_common.mako Sun Mar 14 11:49:44 2010 -0400
+++ b/templates/root/history_common.mako Mon Mar 15 11:31:14 2010 -0400
@@ -86,10 +86,26 @@
</div>
<div class="info">${_('Info: ')}${data.display_info()}</div>
<div>
+ <% dataset_id=trans.security.encode_id( data.id ) %>
%if data.has_data:
- <a href="${h.url_for( controller='dataset', action='display', dataset_id=trans.security.encode_id( data.id ), to_ext=data.ext )}" title="Save" class="icon-button disk tooltip"></a>
+ <a href="${h.url_for( controller='dataset', action='display', dataset_id=dataset_id, to_ext=data.ext )}" title="Save" class="icon-button disk tooltip"></a>
%if user_owns_dataset:
<a href="${h.url_for( controller='tool_runner', action='rerun', id=data.id )}" target="galaxy_main" title="Run this job again" class="icon-button arrow-circle tooltip"></a>
+ %if trans.user:
+ <div style="float: right">
+ <a href="${h.url_for( controller='tag', action='retag', item_class=data.__class__.__name__, item_id=dataset_id )}" target="galaxy_main" title="Edit dataset tags" class="icon-button tags tooltip"></a>
+ <a href="${h.url_for( controller='dataset', action='annotate', id=dataset_id )}" target="galaxy_main" title="Edit dataset annotation" class="icon-button annotate tooltip"></a>
+ </div>
+ <div style="clear: both"></div>
+ <div class="tag-area" style="display: none">
+ <strong>Tags:</strong>
+ <div class="tag-elt"></div>
+ </div>
+ <div id="${dataset_id}-annotation-area" class="annotation-area" style="display: none">
+ <strong>Annotation:</strong>
+ <div id="${dataset_id}-annotation-elt" style="margin: 1px 0px 1px 0px" class="annotation-elt tooltip editable-text" title="Edit dataset annotation"></div>
+ </div>
+ %endif
%endif
<div style="clear: both"></div>
%for display_app in data.datatype.get_display_types():
diff -r 96ec861b4b6e -r 6d079d53f9db templates/tagging_common.mako
--- a/templates/tagging_common.mako Sun Mar 14 11:49:44 2010 -0400
+++ b/templates/tagging_common.mako Mon Mar 15 11:31:14 2010 -0400
@@ -13,7 +13,7 @@
## Render a tagging element if there is a tagged_item.
%if tagged_item is not None:
%if tag_type == "individual":
- ${render_individual_tagging_element(user=user, tagged_item=tagged_item, elt_context=elt_context, in_form=in_form, input_size=input_size, tag_click_fn=tag_click_fn)}
+ ${render_individual_tagging_element( user=user, tagged_item=tagged_item, elt_context=elt_context, in_form=in_form, input_size=input_size, tag_click_fn=tag_click_fn, use_toggle_link=use_toggle_link )}
%elif tag_type == "community":
${render_community_tagging_element(tagged_item=tagged_item, elt_context=elt_context, tag_click_fn=tag_click_fn)}
%endif
@@ -123,7 +123,7 @@
//
// Set up autocomplete tagger.
//
-
+
//
// Default function get text to display on the toggle link.
//
@@ -193,7 +193,7 @@
ajax_delete_tag_url: "${h.url_for( controller='/tag', action='remove_tag_async', item_id=tagged_item_id, item_class=tagged_item.__class__.__name__, context=elt_context )}",
delete_tag_img: "${h.url_for('/static/images/delete_tag_icon_gray.png')}",
delete_tag_img_rollover: "${h.url_for('/static/images/delete_tag_icon_white.png')}",
- use_toggle_link: ${iff( use_toggle_link, 'true', 'false' )},
+ use_toggle_link: ${iff( use_toggle_link, 'true', 'false' )}
};
$('#${elt_id}').autocomplete_tagging(options);
1
0

18 Mar '10
details: http://www.bx.psu.edu/hg/galaxy/rev/96ec861b4b6e
changeset: 3529:96ec861b4b6e
user: jeremy goecks <jeremy.goecks(a)emory.edu>
date: Sun Mar 14 11:49:44 2010 -0400
description:
Make the login/register/logout sequence more user friendly. Specific changes: (a) use panels so that Galaxy masthead is always available; (b) provide links to guide users past login; (c) enable 'user' to be an active view; (d) updated functional tests.
diffstat:
lib/galaxy/web/controllers/dataset.py | 4 +-
lib/galaxy/web/controllers/user.py | 28 ++-
lib/galaxy/web/controllers/visualization.py | 2 +-
lib/galaxy/web/controllers/workflow.py | 8 +-
lib/galaxy/web/framework/__init__.py | 26 +-
templates/base_panels.mako | 14 +-
templates/display_base.mako | 12 +-
templates/form.mako | 115 ++++++++++------
templates/history/list_published.mako | 2 +-
templates/message.mako | 3 +-
templates/page/list_published.mako | 2 +-
templates/user/register.mako | 162 ++++++++++++----------
templates/visualization/list_published.mako | 2 +-
templates/workflow/list.mako | 193 +++++++++++++++------------
templates/workflow/list_published.mako | 2 +-
test/base/twilltestcase.py | 11 +-
16 files changed, 329 insertions(+), 257 deletions(-)
diffs (900 lines):
diff -r 48e83411aa91 -r 96ec861b4b6e lib/galaxy/web/controllers/dataset.py
--- a/lib/galaxy/web/controllers/dataset.py Fri Mar 12 16:11:26 2010 -0500
+++ b/lib/galaxy/web/controllers/dataset.py Sun Mar 14 11:49:44 2010 -0400
@@ -436,8 +436,8 @@
# Do import.
cur_history = trans.get_history( create=True )
status, message = self._copy_datasets( trans, [ dataset_id ], [ cur_history ] )
- message = message + "<br>You can <a href='%s'>start using the dataset</a> or %s." % ( url_for('/'), referer_message )
- return trans.show_message( message, type=status )
+ message = "Dataset imported. <br>You can <a href='%s'>start using the dataset</a> or %s." % ( url_for('/'), referer_message )
+ return trans.show_message( message, type=status, use_panels=True )
@web.expose
@web.json
diff -r 48e83411aa91 -r 96ec861b4b6e lib/galaxy/web/controllers/user.py
--- a/lib/galaxy/web/controllers/user.py Fri Mar 12 16:11:26 2010 -0500
+++ b/lib/galaxy/web/controllers/user.py Sun Mar 14 11:49:44 2010 -0400
@@ -115,9 +115,14 @@
you share publicly. Usernames must be at least
four characters in length and contain only lowercase
letters, numbers, and the '-' character.""" ) )
+
@web.expose
- def login( self, trans, email='', password='' ):
+ def login( self, trans, email='', password='', referer='', use_panels='True' ):
email_error = password_error = None
+
+ # Convert use_panels to Boolean.
+ use_panels = use_panels in [ 'True', 'true', 't', 'T' ]
+
# Attempt login
if trans.app.config.require_login:
refresh_frames = [ 'masthead', 'history', 'tools' ]
@@ -136,21 +141,23 @@
else:
trans.handle_user_login( user )
trans.log_event( "User logged in" )
- msg = "Now logged in as " + user.email + "."
+ msg = "You are now logged in as %s.<br>You can <a href='%s'>go back to the page you were visiting</a> or <a href='%s'>go to the Galaxy homepage</a>." % ( user.email, referer, url_for( '/' ) )
if trans.app.config.require_login:
msg += ' <a href="%s">Click here</a> to continue to the front page.' % web.url_for( '/static/welcome.html' )
- return trans.show_ok_message( msg, refresh_frames=refresh_frames )
+ return trans.show_ok_message( msg, refresh_frames=refresh_frames, use_panels=use_panels, active_view="user" )
form = web.FormBuilder( web.url_for(), "Login", submit_text="Login" ) \
.add_text( "email", "Email address", value=email, error=email_error ) \
.add_password( "password", "Password", value='', error=password_error,
- help="<a href='%s'>Forgot password? Reset here</a>" % web.url_for( action='reset_password' ) )
+ help="<a href='%s'>Forgot password? Reset here</a>" % web.url_for( action='reset_password' ) ) \
+ .add_input( "hidden", "referer", "referer", value=trans.request.referer, use_label=False )
if trans.app.config.require_login:
if trans.app.config.allow_user_creation:
- return trans.show_form( form, header = require_login_creation_template % web.url_for( action = 'create' ) )
+ return trans.show_form( form, header = require_login_creation_template % web.url_for( action = 'create' ), use_panels=use_panels, active_view="user" )
else:
- return trans.show_form( form, header = require_login_nocreation_template )
+ return trans.show_form( form, header = require_login_nocreation_template, use_panels=use_panels, active_view="user" )
else:
- return trans.show_form( form )
+ return trans.show_form( form, use_panels=use_panels, active_view="user" )
+
@web.expose
def logout( self, trans ):
if trans.app.config.require_login:
@@ -160,10 +167,11 @@
# Since logging an event requires a session, we'll log prior to ending the session
trans.log_event( "User logged out" )
trans.handle_user_logout()
- msg = "You are no longer logged in."
+ msg = "You have been logged out.<br>You can <a href='%s'>go back to the page you were visiting</a> or <a href='%s'>go to the Galaxy homepage</a>." % ( trans.request.referer, url_for( '/' ) )
if trans.app.config.require_login:
msg += ' <a href="%s">Click here</a> to return to the login page.' % web.url_for( controller='user', action='login' )
- return trans.show_ok_message( msg, refresh_frames=refresh_frames )
+ return trans.show_ok_message( msg, refresh_frames=refresh_frames, use_panels=True, active_view="user" )
+
@web.expose
def create( self, trans, **kwd ):
params = util.Params( kwd )
@@ -217,7 +225,7 @@
trans.log_event( "User created a new account" )
trans.log_event( "User logged in" )
# subscribe user to email list
- return trans.show_ok_message( "Now logged in as " + user.email, refresh_frames=refresh_frames )
+ return trans.show_ok_message( "Now logged in as %s.<br><a href='%s'>Return to the Galaxy start page.</a>" % ( user.email, url_for( '/' ) ), refresh_frames=refresh_frames, use_panels=True )
else:
trans.response.send_redirect( web.url_for( controller='admin',
action='users',
diff -r 48e83411aa91 -r 96ec861b4b6e lib/galaxy/web/controllers/visualization.py
--- a/lib/galaxy/web/controllers/visualization.py Fri Mar 12 16:11:26 2010 -0500
+++ b/lib/galaxy/web/controllers/visualization.py Sun Mar 14 11:49:44 2010 -0400
@@ -75,7 +75,7 @@
return trans.fill_template( "visualization/list_published.mako", grid=grid )
@web.expose
- @web.require_login("use Galaxy visualizations")
+ @web.require_login( "use Galaxy visualizations", use_panels=True )
def list( self, trans, *args, **kwargs ):
# Handle operation
if 'operation' in kwargs and 'id' in kwargs:
diff -r 48e83411aa91 -r 96ec861b4b6e lib/galaxy/web/controllers/workflow.py
--- a/lib/galaxy/web/controllers/workflow.py Fri Mar 12 16:11:26 2010 -0500
+++ b/lib/galaxy/web/controllers/workflow.py Sun Mar 14 11:49:44 2010 -0400
@@ -85,7 +85,7 @@
@web.expose
def index( self, trans ):
- return trans.fill_template( "workflow/index.mako" )
+ return self.list( trans )
@web.expose
@web.require_login( "use Galaxy workflows" )
@@ -102,7 +102,7 @@
return self.stored_list_grid( trans, **kwargs )
@web.expose
- @web.require_login( "use Galaxy workflows" )
+ @web.require_login( "use Galaxy workflows", use_panels=True )
def list( self, trans ):
"""
Render workflow main page (management of existing workflows)
@@ -276,7 +276,7 @@
item=stored )
@web.expose
- @web.require_login( "use Galaxy workflows" )
+ @web.require_login( "to import a workflow", use_panels=True )
def imp( self, trans, id, **kwargs ):
# Set referer message.
referer = trans.request.referer
@@ -284,7 +284,7 @@
referer_message = "<a href='%s'>return to the previous page</a>" % referer
else:
referer_message = "<a href='%s'>go to Galaxy's start page</a>" % url_for( '/' )
-
+
# Do import.
session = trans.sa_session
stored = self.get_stored_workflow( trans, id, check_ownership=False )
diff -r 48e83411aa91 -r 96ec861b4b6e lib/galaxy/web/framework/__init__.py
--- a/lib/galaxy/web/framework/__init__.py Fri Mar 12 16:11:26 2010 -0500
+++ b/lib/galaxy/web/framework/__init__.py Sun Mar 14 11:49:44 2010 -0400
@@ -65,15 +65,15 @@
decorator.exposed = True
return decorator
-def require_login( verb="perform this action" ):
+def require_login( verb="perform this action", use_panels=False ):
def argcatcher( func ):
def decorator( self, trans, *args, **kwargs ):
if trans.get_user():
return func( self, trans, *args, **kwargs )
else:
return trans.show_error_message(
- "You must be <a target='galaxy_main' href='%s'>logged in</a> to %s</div>"
- % ( url_for( controller='user', action='login' ), verb ) )
+ "You must be <a target='_top' href='%s'>logged in</a> to %s</div>."
+ % ( url_for( controller='user', action='login' ), verb ), use_panels=use_panels )
return decorator
return argcatcher
@@ -561,7 +561,7 @@
context.
"""
return self.template_context['message']
- def show_message( self, message, type='info', refresh_frames=[], cont=None, use_panels=False ):
+ def show_message( self, message, type='info', refresh_frames=[], cont=None, use_panels=False, active_view="" ):
"""
Convenience method for displaying a simple page with a single message.
@@ -571,28 +571,28 @@
`refresh_frames`: names of frames in the interface that should be
refreshed when the message is displayed
"""
- return self.fill_template( "message.mako", message_type=type, message=message, refresh_frames=refresh_frames, cont=cont, use_panels=use_panels )
- def show_error_message( self, message, refresh_frames=[], use_panels=False ):
+ return self.fill_template( "message.mako", message_type=type, message=message, refresh_frames=refresh_frames, cont=cont, use_panels=use_panels, active_view=active_view )
+ def show_error_message( self, message, refresh_frames=[], use_panels=False, active_view="" ):
"""
Convenience method for displaying an error message. See `show_message`.
"""
- return self.show_message( message, 'error', refresh_frames, use_panels=use_panels )
- def show_ok_message( self, message, refresh_frames=[], use_panels=False ):
+ return self.show_message( message, 'error', refresh_frames, use_panels=use_panels, active_view=active_view )
+ def show_ok_message( self, message, refresh_frames=[], use_panels=False, active_view="" ):
"""
Convenience method for displaying an ok message. See `show_message`.
"""
- return self.show_message( message, 'done', refresh_frames, use_panels=use_panels )
- def show_warn_message( self, message, refresh_frames=[], use_panels=False ):
+ return self.show_message( message, 'done', refresh_frames, use_panels=use_panels, active_view=active_view )
+ def show_warn_message( self, message, refresh_frames=[], use_panels=False, active_view="" ):
"""
Convenience method for displaying an warn message. See `show_message`.
"""
- return self.show_message( message, 'warning', refresh_frames, use_panels=use_panels )
- def show_form( self, form, header=None, template="form.mako" ):
+ return self.show_message( message, 'warning', refresh_frames, use_panels=use_panels, active_view=active_view )
+ def show_form( self, form, header=None, template="form.mako", use_panels=False, active_view="" ):
"""
Convenience method for displaying a simple page with a single HTML
form.
"""
- return self.fill_template( template, form=form, header=header )
+ return self.fill_template( template, form=form, header=header, use_panels=use_panels, active_view=active_view )
def fill_template(self, filename, **kwargs):
"""
Fill in a template, putting any keyword arguments on the context.
diff -r 48e83411aa91 -r 96ec861b4b6e templates/base_panels.mako
--- a/templates/base_panels.mako Fri Mar 12 16:11:26 2010 -0500
+++ b/templates/base_panels.mako Sun Mar 14 11:49:44 2010 -0400
@@ -227,7 +227,13 @@
</div>
</td>
- <td class="tab">
+ ## User tab.
+ <%
+ cls = "tab"
+ if self.active_view == 'user':
+ cls += " active"
+ %>
+ <td class="${cls}">
<a>User</a>
<%
if trans.user:
@@ -241,9 +247,9 @@
%>
<div class="submenu">
<ul class="loggedout-only" style="${style1}">
- <li><a target="galaxy_main" href="${h.url_for( controller='/user', action='login' )}">Login</a></li>
+ <li><a href="${h.url_for( controller='/user', action='login' )}">Login</a></li>
%if app.config.allow_user_creation:
- <li><a target="galaxy_main" href="${h.url_for( controller='/user', action='create' )}">Register</a></li>
+ <li><a href="${h.url_for( controller='/user', action='create' )}">Register</a></li>
%endif
</ul>
<ul class="loggedin-only" style="${style2}">
@@ -259,7 +265,7 @@
logout_target = ""
logout_url = h.url_for( controller='/root', action='index', m_c='user', m_a='logout' )
else:
- logout_target = "galaxy_main"
+ logout_target = ""
logout_url = h.url_for( controller='/user', action='logout' )
%>
<li><a target="${logout_target}" href="${logout_url}">Logout</a></li>
diff -r 48e83411aa91 -r 96ec861b4b6e templates/display_base.mako
--- a/templates/display_base.mako Fri Mar 12 16:11:26 2010 -0500
+++ b/templates/display_base.mako Sun Mar 14 11:49:44 2010 -0400
@@ -216,11 +216,13 @@
%endif
</div>
## Individual tags.
- <p>
- <div>
- Yours:
- ${render_individual_tagging_element( user=trans.get_user(), tagged_item=item, elt_context='view.mako', use_toggle_link=False, tag_click_fn='community_tag_click' )}
- </div>
+ %if trans.get_user():
+ <p>
+ <div>
+ Yours:
+ ${render_individual_tagging_element( user=trans.get_user(), tagged_item=item, elt_context='view.mako', use_toggle_link=False, tag_click_fn='community_tag_click' )}
+ </div>
+ %endif
</div>
</div>
</div>
diff -r 48e83411aa91 -r 96ec861b4b6e templates/form.mako
--- a/templates/form.mako Fri Mar 12 16:11:26 2010 -0500
+++ b/templates/form.mako Sun Mar 14 11:49:44 2010 -0400
@@ -1,54 +1,83 @@
+<%!
+ def inherit(context):
+ if context.get('use_panels') is True:
+ print "here"
+ return '/base_panels.mako'
+ else:
+ return '/base.mako'
+%>
+<%inherit file="${inherit(context)}"/>
<% _=n_ %>
-<%inherit file="/base.mako"/>
+
+<%def name="init()">
+<%
+ self.has_left_panel=False
+ self.has_right_panel=False
+ self.active_view=active_view
+ self.message_box_visible=False
+%>
+</%def>
+
+
<%def name="title()">${form.title}</%def>
<%def name="javascripts()">
-${parent.javascripts()}
-<script type="text/javascript">
-$(function(){
- $("input:text:first").focus();
-})
-</script>
+ ${parent.javascripts()}
+ <script type="text/javascript">
+ $(function(){
+ $("input:text:first").focus();
+ })
+ </script>
</%def>
-%if header:
- ${header}
-%endif
+<%def name="center_panel()">
+ ${render_form( )}
+</%def>
-<div class="form">
- <div class="form-title">${form.title}</div>
- <div class="form-body">
- <form name="${form.name}" action="${form.action}" method="post" >
- %for input in form.inputs:
- <%
- cls = "form-row"
- if input.error:
- cls += " form-row-error"
- %>
- <div class="${cls}">
- %if input.use_label:
- <label>
- ${_(input.label)}:
- </label>
- %endif
- <div class="form-row-input">
- <input type="${input.type}" name="${input.name}" value="${input.value}" size="40">
- </div>
- %if input.error:
- <div class="form-row-error-message">${input.error}</div>
- %endif
- %if input.help:
- <div class="toolParamHelp" style="clear: both;">
- ${input.help}
- </div>
- %endif
+<%def name="body()">
+ ${render_form( )}
+</%def>
+
+<%def name="render_form()">
+ %if header:
+ ${header}
+ %endif
- <div style="clear: both"></div>
+ <div class="form" style="margin: 1em">
+ <div class="form-title">${form.title}</div>
+ <div class="form-body">
+ <form name="${form.name}" action="${form.action}" method="post" >
+ %for input in form.inputs:
+ <%
+ cls = "form-row"
+ if input.error:
+ cls += " form-row-error"
+ %>
+ <div class="${cls}">
+ %if input.use_label:
+ <label>
+ ${_(input.label)}:
+ </label>
+ %endif
+ <div class="form-row-input">
+ <input type="${input.type}" name="${input.name}" value="${input.value}" size="40">
+ </div>
+ %if input.error:
+ <div class="form-row-error-message">${input.error}</div>
+ %endif
+ %if input.help:
+ <div class="toolParamHelp" style="clear: both;">
+ ${input.help}
+ </div>
+ %endif
- </div>
- %endfor
- <div class="form-row"><input type="submit" value="${form.submit_text}"></div>
+ <div style="clear: both"></div>
- </form>
+ </div>
+ %endfor
+ <div class="form-row"><input type="submit" value="${form.submit_text}"></div>
+
+ </form>
+ </div>
</div>
-</div>
+</%def>
\ No newline at end of file
diff -r 48e83411aa91 -r 96ec861b4b6e templates/history/list_published.mako
--- a/templates/history/list_published.mako Fri Mar 12 16:11:26 2010 -0500
+++ b/templates/history/list_published.mako Sun Mar 14 11:49:44 2010 -0400
@@ -10,7 +10,7 @@
</%def>
<%def name="title()">
- Galaxy :: Published Histories
+ Galaxy | Published Histories
</%def>
<%def name="stylesheets()">
diff -r 48e83411aa91 -r 96ec861b4b6e templates/message.mako
--- a/templates/message.mako Fri Mar 12 16:11:26 2010 -0500
+++ b/templates/message.mako Sun Mar 14 11:49:44 2010 -0400
@@ -1,6 +1,6 @@
<%!
def inherit(context):
- if context.get('use_panels'):
+ if context.get('use_panels') is True:
return '/base_panels.mako'
else:
return '/base.mako'
@@ -69,7 +69,6 @@
${render_large_message( message, message_type )}
</%def>
-## Render the grid's basic elements. Each of these elements can be subclassed.
<%def name="body()">
${render_large_message( message, message_type )}
</%def>
diff -r 48e83411aa91 -r 96ec861b4b6e templates/page/list_published.mako
--- a/templates/page/list_published.mako Fri Mar 12 16:11:26 2010 -0500
+++ b/templates/page/list_published.mako Sun Mar 14 11:49:44 2010 -0400
@@ -10,7 +10,7 @@
</%def>
<%def name="title()">
- Galaxy :: Published Pages
+ Galaxy | Published Pages
</%def>
<%def name="stylesheets()">
diff -r 48e83411aa91 -r 96ec861b4b6e templates/user/register.mako
--- a/templates/user/register.mako Fri Mar 12 16:11:26 2010 -0500
+++ b/templates/user/register.mako Sun Mar 14 11:49:44 2010 -0400
@@ -1,87 +1,99 @@
-<%inherit file="/base.mako"/>
+<%inherit file="/base_panels.mako"/>
<%namespace file="/message.mako" import="render_msg" />
+<%def name="init()">
+<%
+ self.has_left_panel=False
+ self.has_right_panel=False
+ self.active_view="user"
+ self.message_box_visible=False
+%>
+</%def>
-%if msg:
- ${render_msg( msg, messagetype )}
-%endif
-
-
-
-<script type="text/javascript">
-$( function() {
- $( "select[refresh_on_change='true']").change( function() {
- var refresh = false;
- var refresh_on_change_values = $( this )[0].attributes.getNamedItem( 'refresh_on_change_values' )
- if ( refresh_on_change_values ) {
- refresh_on_change_values = refresh_on_change_values.value.split( ',' );
- var last_selected_value = $( this )[0].attributes.getNamedItem( 'last_selected_value' );
- for( i= 0; i < refresh_on_change_values.length; i++ ) {
- if ( $( this )[0].value == refresh_on_change_values[i] || ( last_selected_value && last_selected_value.value == refresh_on_change_values[i] ) ){
- refresh = true;
- break;
+<%def name="javascripts()">
+ ${parent.javascripts()}
+ <script type="text/javascript">
+ $( function() {
+ $( "select[refresh_on_change='true']").change( function() {
+ var refresh = false;
+ var refresh_on_change_values = $( this )[0].attributes.getNamedItem( 'refresh_on_change_values' )
+ if ( refresh_on_change_values ) {
+ refresh_on_change_values = refresh_on_change_values.value.split( ',' );
+ var last_selected_value = $( this )[0].attributes.getNamedItem( 'last_selected_value' );
+ for( i= 0; i < refresh_on_change_values.length; i++ ) {
+ if ( $( this )[0].value == refresh_on_change_values[i] || ( last_selected_value && last_selected_value.value == refresh_on_change_values[i] ) ){
+ refresh = true;
+ break;
+ }
}
}
- }
- else {
- refresh = true;
- }
- if ( refresh ){
- $( "#registration" ).submit();
- }
+ else {
+ refresh = true;
+ }
+ if ( refresh ){
+ $( "#registration" ).submit();
+ }
+ });
});
-});
-</script>
+ </script>
-<div class="toolForm">
- <form name="registration" id="registration" action="${h.url_for( controller='user', action='create', admin_view=admin_view )}" method="post" >
- <div class="toolFormTitle">Create account</div>
- <div class="form-row">
- <label>Email</label>
- ${login_info[ 'Email' ].get_html()}
- </div>
- <div class="form-row">
- <label>Password</label>
- ${login_info[ 'Password' ].get_html()}
- </div>
- <div class="form-row">
- <label>Confirm</label>
- ${login_info[ 'Confirm' ].get_html()}
- </div>
- <div class="form-row">
- <label>Public Username</label>
- ${login_info[ 'Public Username' ].get_html()}
- <div class="toolParamHelp" style="clear: both;">
- Optional
+</%def>
+
+<%def name="center_panel()">
+ %if msg:
+ ${render_msg( msg, messagetype )}
+ %endif
+
+ <div class="toolForm" style="margin: 1em">
+ <form name="registration" id="registration" action="${h.url_for( controller='user', action='create', admin_view=admin_view )}" method="post" >
+ <div class="toolFormTitle">Create account</div>
+ <div class="form-row">
+ <label>Email</label>
+ ${login_info[ 'Email' ].get_html()}
</div>
- </div>
- <div class="form-row">
- <label>Subscribe To Mailing List</label>
- ${login_info[ 'Subscribe To Mailing List' ].get_html()}
- </div>
- %if user_info_select:
<div class="form-row">
- <label>User type</label>
- ${user_info_select.get_html()}
+ <label>Password</label>
+ ${login_info[ 'Password' ].get_html()}
</div>
- %endif
- %if user_info_form:
- %for field in widgets:
+ <div class="form-row">
+ <label>Confirm Password</label>
+ ${login_info[ 'Confirm' ].get_html()}
+ </div>
+ <div class="form-row">
+ <label>Public Username</label>
+ ${login_info[ 'Public Username' ].get_html()}
+ <div class="toolParamHelp" style="clear: both;">
+ When you share or publish items, this name is shown as the author.
+ </div>
+ </div>
+ <div class="form-row">
+ <label>Subscribe To Mailing List</label>
+ ${login_info[ 'Subscribe To Mailing List' ].get_html()}
+ </div>
+ %if user_info_select:
<div class="form-row">
- <label>${field['label']}</label>
- ${field['widget'].get_html()}
- <div class="toolParamHelp" style="clear: both;">
- ${field['helptext']}
+ <label>User type</label>
+ ${user_info_select.get_html()}
+ </div>
+ %endif
+ %if user_info_form:
+ %for field in widgets:
+ <div class="form-row">
+ <label>${field['label']}</label>
+ ${field['widget'].get_html()}
+ <div class="toolParamHelp" style="clear: both;">
+ ${field['helptext']}
+ </div>
+ <div style="clear: both"></div>
</div>
- <div style="clear: both"></div>
- </div>
- %endfor
- %if not user_info_select:
- <input type="hidden" name="user_info_select" value="${user_info_form.id}"/>
- %endif
- %endif
- <div class="form-row">
- <input type="submit" name="create_user_button" value="Submit">
- </div>
- </form>
-</div>
\ No newline at end of file
+ %endfor
+ %if not user_info_select:
+ <input type="hidden" name="user_info_select" value="${user_info_form.id}"/>
+ %endif
+ %endif
+ <div class="form-row">
+ <input type="submit" name="create_user_button" value="Submit">
+ </div>
+ </form>
+ </div>
+</%def>
\ No newline at end of file
diff -r 48e83411aa91 -r 96ec861b4b6e templates/visualization/list_published.mako
--- a/templates/visualization/list_published.mako Fri Mar 12 16:11:26 2010 -0500
+++ b/templates/visualization/list_published.mako Sun Mar 14 11:49:44 2010 -0400
@@ -10,7 +10,7 @@
</%def>
<%def name="title()">
- Galaxy :: Published Visualizations
+ Galaxy | Published Visualizations
</%def>
<%def name="stylesheets()">
diff -r 48e83411aa91 -r 96ec861b4b6e templates/workflow/list.mako
--- a/templates/workflow/list.mako Fri Mar 12 16:11:26 2010 -0500
+++ b/templates/workflow/list.mako Sun Mar 14 11:49:44 2010 -0400
@@ -1,103 +1,118 @@
-<%inherit file="/base.mako"/>
+<%inherit file="/base_panels.mako"/>
+
+<%def name="init()">
+<%
+ self.has_left_panel=False
+ self.has_right_panel=False
+ self.active_view="workflow"
+ self.message_box_visible=False
+%>
+</%def>
<%def name="title()">Workflow home</%def>
-%if message:
-<%
- try:
- messagetype
- except:
- messagetype = "done"
-%>
-<p />
-<div class="${messagetype}message">
- ${message}
-</div>
-%endif
+<%def name="center_panel()">
+ <div style="overflow: auto; height: 100%;">
+ <div class="page-container" style="padding: 10px;">
+ %if message:
+ <%
+ try:
+ messagetype
+ except:
+ messagetype = "done"
+ %>
+ <p />
+ <div class="${messagetype}message">
+ ${message}
+ </div>
+ %endif
-<h2>Your workflows</h2>
+ <h2>Your workflows</h2>
-<ul class="manage-table-actions">
- <li>
- <a class="action-button" href="${h.url_for( action='create' )}">
- <img src="${h.url_for('/static/images/silk/add.png')}" />
- <span>Create new workflow</span>
- </a>
- </li>
-</ul>
+ <ul class="manage-table-actions">
+ <li>
+ <a class="action-button" href="${h.url_for( action='create' )}">
+ <img src="${h.url_for('/static/images/silk/add.png')}" />
+ <span>Create new workflow</span>
+ </a>
+ </li>
+ </ul>
-%if workflows:
- <table class="manage-table colored" border="0" cellspacing="0" cellpadding="0" style="width:100%;">
- <tr class="header">
- <th>Name</th>
- <th># of Steps</th>
- ## <th>Last Updated</th>
- <th></th>
- </tr>
- %for i, workflow in enumerate( workflows ):
- <tr>
- <td>
- <div class="menubutton" style="float: left;" id="wf-${i}-popup">
- ${workflow.name | h}
- </div>
- </td>
- <td>${len(workflow.latest_workflow.steps)}</td>
- ## <td>${str(workflow.update_time)[:19]}</td>
- <td>
- <div popupmenu="wf-${i}-popup">
- <a class="action-button" href="${h.url_for( action='editor', id=trans.security.encode_id(workflow.id) )}" target="_parent">Edit</a>
- <a class="action-button" href="${h.url_for( controller='root', action='index', workflow_id=trans.security.encode_id(workflow.id) )}" target="_parent">Run</a>
- <a class="action-button" href="${h.url_for( action='sharing', id=trans.security.encode_id(workflow.id) )}">Share or Publish</a>
- <a class="action-button" href="${h.url_for( action='clone', id=trans.security.encode_id(workflow.id) )}">Clone</a>
- <a class="action-button" href="${h.url_for( action='rename', id=trans.security.encode_id(workflow.id) )}">Rename</a>
- <a class="action-button" confirm="Are you sure you want to delete workflow '${workflow.name}'?" href="${h.url_for( action='delete', id=trans.security.encode_id(workflow.id) )}">Delete</a>
- </div>
- </td>
- </tr>
- %endfor
- </table>
-%else:
+ %if workflows:
+ <table class="manage-table colored" border="0" cellspacing="0" cellpadding="0" style="width:100%;">
+ <tr class="header">
+ <th>Name</th>
+ <th># of Steps</th>
+ ## <th>Last Updated</th>
+ <th></th>
+ </tr>
+ %for i, workflow in enumerate( workflows ):
+ <tr>
+ <td>
+ <div class="menubutton" style="float: left;" id="wf-${i}-popup">
+ ${workflow.name | h}
+ </div>
+ </td>
+ <td>${len(workflow.latest_workflow.steps)}</td>
+ ## <td>${str(workflow.update_time)[:19]}</td>
+ <td>
+ <div popupmenu="wf-${i}-popup">
+ <a class="action-button" href="${h.url_for( action='editor', id=trans.security.encode_id(workflow.id) )}" target="_parent">Edit</a>
+ <a class="action-button" href="${h.url_for( controller='root', action='index', workflow_id=trans.security.encode_id(workflow.id) )}" target="_parent">Run</a>
+ <a class="action-button" href="${h.url_for( action='sharing', id=trans.security.encode_id(workflow.id) )}">Share or Publish</a>
+ <a class="action-button" href="${h.url_for( action='clone', id=trans.security.encode_id(workflow.id) )}">Clone</a>
+ <a class="action-button" href="${h.url_for( action='rename', id=trans.security.encode_id(workflow.id) )}">Rename</a>
+ <a class="action-button" confirm="Are you sure you want to delete workflow '${workflow.name}'?" href="${h.url_for( action='delete', id=trans.security.encode_id(workflow.id) )}">Delete</a>
+ </div>
+ </td>
+ </tr>
+ %endfor
+ </table>
+ %else:
- You have no workflows.
+ You have no workflows.
-%endif
+ %endif
-<h2>Workflows shared with you by others</h2>
+ <h2>Workflows shared with you by others</h2>
-%if shared_by_others:
- <table class="colored" border="0" cellspacing="0" cellpadding="0" width="100%">
- <tr class="header">
- <th>Name</th>
- <th>Owner</th>
- <th># of Steps</th>
- <th></th>
- </tr>
- %for i, association in enumerate( shared_by_others ):
- <% workflow = association.stored_workflow %>
- <tr>
- <td>
- <a class="menubutton" id="shared-${i}-popup" href="${h.url_for( action='run', id=trans.security.encode_id(workflow.id) )}">${workflow.name | h}</a>
- </td>
- <td>${workflow.user.email}</td>
- <td>${len(workflow.latest_workflow.steps)}</td>
- <td>
- <div popupmenu="shared-${i}-popup">
- <a class="action-button" href="${h.url_for( action='display_by_username_and_slug', username=workflow.user.username, slug=workflow.slug)}" target="_top">View</a>
- <a class="action-button" href="${h.url_for( action='run', id=trans.security.encode_id(workflow.id) )}">Run</a>
- <a class="action-button" href="${h.url_for( action='clone', id=trans.security.encode_id(workflow.id) )}">Clone</a>
- </div>
- </td>
- </tr>
- %endfor
- </table>
-%else:
+ %if shared_by_others:
+ <table class="colored" border="0" cellspacing="0" cellpadding="0" width="100%">
+ <tr class="header">
+ <th>Name</th>
+ <th>Owner</th>
+ <th># of Steps</th>
+ <th></th>
+ </tr>
+ %for i, association in enumerate( shared_by_others ):
+ <% workflow = association.stored_workflow %>
+ <tr>
+ <td>
+ <a class="menubutton" id="shared-${i}-popup" href="${h.url_for( action='run', id=trans.security.encode_id(workflow.id) )}">${workflow.name | h}</a>
+ </td>
+ <td>${workflow.user.email}</td>
+ <td>${len(workflow.latest_workflow.steps)}</td>
+ <td>
+ <div popupmenu="shared-${i}-popup">
+ <a class="action-button" href="${h.url_for( action='display_by_username_and_slug', username=workflow.user.username, slug=workflow.slug)}" target="_top">View</a>
+ <a class="action-button" href="${h.url_for( action='run', id=trans.security.encode_id(workflow.id) )}">Run</a>
+ <a class="action-button" href="${h.url_for( action='clone', id=trans.security.encode_id(workflow.id) )}">Clone</a>
+ </div>
+ </td>
+ </tr>
+ %endfor
+ </table>
+ %else:
- No workflows have been shared with you.
+ No workflows have been shared with you.
-%endif
+ %endif
-<h2>Other options</h2>
+ <h2>Other options</h2>
-<a class="action-button" href="${h.url_for( action='configure_menu' )}">
- <span>Configure your workflow menu</span>
-</a>
\ No newline at end of file
+ <a class="action-button" href="${h.url_for( action='configure_menu' )}">
+ <span>Configure your workflow menu</span>
+ </a>
+ </div>
+ </div>
+</%def>
\ No newline at end of file
diff -r 48e83411aa91 -r 96ec861b4b6e templates/workflow/list_published.mako
--- a/templates/workflow/list_published.mako Fri Mar 12 16:11:26 2010 -0500
+++ b/templates/workflow/list_published.mako Sun Mar 14 11:49:44 2010 -0400
@@ -10,7 +10,7 @@
</%def>
<%def name="title()">
- Galaxy :: Published Workflows
+ Galaxy | Published Workflows
</%def>
<%def name="stylesheets()">
diff -r 48e83411aa91 -r 96ec861b4b6e test/base/twilltestcase.py
--- a/test/base/twilltestcase.py Fri Mar 12 16:11:26 2010 -0500
+++ b/test/base/twilltestcase.py Sun Mar 14 11:49:44 2010 -0400
@@ -791,7 +791,7 @@
self.home()
# Create user, setting username to email.
self.visit_page( "user/create?email=%s&username=%s&password=%s&confirm=%s&create_user_button=Submit" % ( email, email, password, password ) )
- self.check_page_for_string( "Now logged in as %s" %email )
+ self.check_page_for_string( "now logged in as %s" %email )
self.home()
# Make sure a new private role was created for the user
self.visit_page( "user/set_default_permissions" )
@@ -816,7 +816,7 @@
for index, info_value in enumerate(user_info_values):
tc.fv( "1", "field_%i" % index, info_value )
tc.submit( "create_user_button" )
- self.check_page_for_string( "Now logged in as %s" % email )
+ self.check_page_for_string( "now logged in as %s" % email )
def create_user_with_info_as_admin( self, email, password, username, user_info_forms, user_info_form_id, user_info_values ):
'''
This method registers a new user and also provides use info as an admin
@@ -906,16 +906,17 @@
self.create( email=email, password=password )
except:
self.home()
- self.visit_url( "%s/user/login" % self.url )
+ # HACK: don't use panels because late_javascripts() messes up the twill browser and it can't find form fields (and hence user can't be logged in).
+ self.visit_url( "%s/user/login?use_panels=False" % self.url )
tc.fv( '1', 'email', email )
tc.fv( '1', 'password', password )
tc.submit( 'Login' )
- self.check_page_for_string( "Now logged in as %s" %email )
+ self.check_page_for_string( "now logged in as %s" %email )
self.home()
def logout( self ):
self.home()
self.visit_page( "user/logout" )
- self.check_page_for_string( "You are no longer logged in" )
+ self.check_page_for_string( "You have been logged out" )
self.home()
# Functions associated with browsers, cookies, HTML forms and page visits
1
0

18 Mar '10
details: http://www.bx.psu.edu/hg/galaxy/rev/48e83411aa91
changeset: 3528:48e83411aa91
user: Greg Von Kuster <greg(a)bx.psu.edu>
date: Fri Mar 12 16:11:26 2010 -0500
description:
New separate functional test scripts for testing admin features, library features, library security and data security. These use a new test_db_util module for all db interaction ( other functional test scripts can be enhanced to use this ). A lot of code cleanup in the functional tests. In twilltestcase, cleaned up library and security related methods to mirror names in recently merged library code.
diffstat:
lib/galaxy/web/controllers/library_common.py | 7 +-
test/base/test_db_util.py | 123 +
test/base/twilltestcase.py | 342 ++-
test/functional/test_admin_features.py | 422 ++++
test/functional/test_data_security.py | 196 ++
test/functional/test_library_features.py | 606 ++++++
test/functional/test_library_security.py | 603 ++++++
test/functional/test_security_and_libraries.py | 2141 ------------------------
8 files changed, 2167 insertions(+), 2273 deletions(-)
diffs (truncated from 4641 to 3000 lines):
diff -r e39c9a2a0b4c -r 48e83411aa91 lib/galaxy/web/controllers/library_common.py
--- a/lib/galaxy/web/controllers/library_common.py Fri Mar 12 14:27:04 2010 -0500
+++ b/lib/galaxy/web/controllers/library_common.py Fri Mar 12 16:11:26 2010 -0500
@@ -155,7 +155,7 @@
library.root_folder.description = new_description
trans.sa_session.add_all( ( library, library.root_folder ) )
trans.sa_session.flush()
- msg = "Library '%s' has been renamed to '%s'" % ( old_name, new_name )
+ msg = "The information has been updated."
return trans.response.send_redirect( web.url_for( controller='library_common',
action='library_info',
cntrller=cntrller,
@@ -313,7 +313,7 @@
folder.description = new_description
trans.sa_session.add( folder )
trans.sa_session.flush()
- msg = "Folder '%s' has been renamed to '%s'" % ( old_name, new_name )
+ msg = "The information has been updated."
messagetype='done'
else:
msg = "You are not authorized to edit this folder"
@@ -698,7 +698,6 @@
trans.app.security_agent.derive_roles_from_access( trans, trans.app.security.decode_id( library_id ), cntrller, library=True, **vars )
if error:
messagetype = 'error'
-
trans.response.send_redirect( web.url_for( controller='library_common',
action='upload_library_dataset',
cntrller=cntrller,
@@ -1171,7 +1170,7 @@
library_dataset.info = new_info
trans.sa_session.add( library_dataset )
trans.sa_session.flush()
- msg = "Dataset '%s' has been renamed to '%s'" % ( old_name, new_name )
+ msg = "The information has been updated."
messagetype = 'done'
else:
msg = "You are not authorized to change the attributes of this dataset"
diff -r e39c9a2a0b4c -r 48e83411aa91 test/base/test_db_util.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test/base/test_db_util.py Fri Mar 12 16:11:26 2010 -0500
@@ -0,0 +1,123 @@
+import galaxy.model
+from galaxy.model.orm import *
+from galaxy.model.mapping import context as sa_session
+from base.twilltestcase import *
+import sys
+
+def flush( obj ):
+ sa_session.add( obj )
+ sa_session.flush()
+def get_all_histories_for_user( user ):
+ return sa_session.query( galaxy.model.History ) \
+ .filter( and_( galaxy.model.History.table.c.user_id==user.id,
+ galaxy.model.History.table.c.deleted==False ) ) \
+ .all()
+def get_dataset_permissions_by_dataset( dataset ):
+ return sa_session.query( galaxy.model.DatasetPermissions ) \
+ .filter( galaxy.model.DatasetPermissions.table.c.dataset_id==dataset.id ) \
+ .all()
+def get_dataset_permissions_by_role( role ):
+ return sa_session.query( galaxy.model.DatasetPermissions ) \
+ .filter( galaxy.model.DatasetPermissions.table.c.role_id == role.id ) \
+ .first()
+def get_default_history_permissions_by_history( history ):
+ return sa_session.query( galaxy.model.DefaultHistoryPermissions ) \
+ .filter( galaxy.model.DefaultHistoryPermissions.table.c.history_id==history.id ) \
+ .all()
+def get_default_history_permissions_by_role( role ):
+ return sa_session.query( galaxy.model.DefaultHistoryPermissions ) \
+ .filter( galaxy.model.DefaultHistoryPermissions.table.c.role_id == role.id ) \
+ .all()
+def get_default_user_permissions_by_role( role ):
+ return sa_session.query( galaxy.model.DefaultUserPermissions ) \
+ .filter( galaxy.model.DefaultUserPermissions.table.c.role_id == role.id ) \
+ .all()
+def get_default_user_permissions_by_user( user ):
+ return sa_session.query( galaxy.model.DefaultUserPermissions ) \
+ .filter( galaxy.model.DefaultUserPermissions.table.c.user_id==user.id ) \
+ .all()
+def get_form( name ):
+ fdc_list = sa_session.query( galaxy.model.FormDefinitionCurrent ) \
+ .filter( galaxy.model.FormDefinitionCurrent.table.c.deleted == False ) \
+ .order_by( galaxy.model.FormDefinitionCurrent.table.c.create_time.desc() )
+ for fdc in fdc_list:
+ sa_session.refresh( fdc )
+ sa_session.refresh( fdc.latest_form )
+ if fdc.latest_form.name == name:
+ return fdc.latest_form
+ return None
+def get_folder( parent_id, name, description ):
+ return sa_session.query( galaxy.model.LibraryFolder ) \
+ .filter( and_( galaxy.model.LibraryFolder.table.c.parent_id==parent_id,
+ galaxy.model.LibraryFolder.table.c.name==name,
+ galaxy.model.LibraryFolder.table.c.description==description ) ) \
+ .first()
+def get_group_by_name( name ):
+ return sa_session.query( galaxy.model.Group ).filter( galaxy.model.Group.table.c.name==name ).first()
+def get_group_role_associations_by_group( group ):
+ return sa_session.query( galaxy.model.GroupRoleAssociation ) \
+ .filter( galaxy.model.GroupRoleAssociation.table.c.group_id == group.id ) \
+ .all()
+def get_group_role_associations_by_role( role ):
+ return sa_session.query( galaxy.model.GroupRoleAssociation ) \
+ .filter( galaxy.model.GroupRoleAssociation.table.c.role_id == role.id ) \
+ .all()
+def get_latest_dataset():
+ return sa_session.query( galaxy.model.Dataset ) \
+ .order_by( desc( galaxy.model.Dataset.table.c.create_time ) ) \
+ .first()
+def get_latest_hda():
+ return sa_session.query( galaxy.model.HistoryDatasetAssociation ) \
+ .order_by( desc( galaxy.model.HistoryDatasetAssociation.table.c.create_time ) ) \
+ .first()
+def get_latest_history_for_user( user ):
+ return sa_session.query( galaxy.model.History ) \
+ .filter( and_( galaxy.model.History.table.c.deleted==False,
+ galaxy.model.History.table.c.user_id==user.id ) ) \
+ .order_by( desc( galaxy.model.History.table.c.create_time ) ) \
+ .first()
+def get_latest_ldda():
+ return sa_session.query( galaxy.model.LibraryDatasetDatasetAssociation ) \
+ .order_by( desc( galaxy.model.LibraryDatasetDatasetAssociation.table.c.create_time ) ) \
+ .first()
+def get_latest_lddas( limit ):
+ return sa_session.query( galaxy.model.LibraryDatasetDatasetAssociation ) \
+ .order_by( desc( galaxy.model.LibraryDatasetDatasetAssociation.table.c.update_time ) ) \
+ .limit( limit )
+def get_library( name, description, synopsis ):
+ return sa_session.query( galaxy.model.Library ) \
+ .filter( and_( galaxy.model.Library.table.c.name==name,
+ galaxy.model.Library.table.c.description==description,
+ galaxy.model.Library.table.c.synopsis==synopsis,
+ galaxy.model.Library.table.c.deleted==False ) ) \
+ .first()
+def get_private_role( user ):
+ for role in user.all_roles():
+ if role.name == user.email and role.description == 'Private Role for %s' % user.email:
+ return role
+ raise AssertionError( "Private role not found for user '%s'" % user.email )
+def get_role_by_name( name ):
+ return sa_session.query( galaxy.model.Role ).filter( galaxy.model.Role.table.c.name==name ).first()
+def get_user( email ):
+ return sa_session.query( galaxy.model.User ) \
+ .filter( galaxy.model.User.table.c.email==email ) \
+ .first()
+def get_user_group_associations_by_group( group ):
+ return sa_session.query( galaxy.model.UserGroupAssociation ) \
+ .filter( galaxy.model.UserGroupAssociation.table.c.group_id == group.id ) \
+ .all()
+def get_user_role_associations_by_role( role ):
+ return sa_session.query( galaxy.model.UserRoleAssociation ) \
+ .filter( galaxy.model.UserRoleAssociation.table.c.role_id == role.id ) \
+ .all()
+def refresh( obj ):
+ sa_session.refresh( obj )
+def set_library_permissions( in_list ):
+ permissions_in = []
+ permissions_out = []
+ for k, v in galaxy.model.Library.permitted_actions.items():
+ if k in in_list:
+ permissions_in.append( k )
+ else:
+ permissions_out.append( k )
+ return permissions_in, permissions_out
diff -r e39c9a2a0b4c -r 48e83411aa91 test/base/twilltestcase.py
--- a/test/base/twilltestcase.py Fri Mar 12 14:27:04 2010 -0500
+++ b/test/base/twilltestcase.py Fri Mar 12 16:11:26 2010 -0500
@@ -535,21 +535,55 @@
if check_str:
self.check_page_for_string( check_str )
self.home()
- def edit_hda_attribute_info( self, hda_id, new_name='', new_info='', new_dbkey='', new_startcol='' ):
+ def edit_hda_attribute_info( self, hda_id, new_name='', new_info='', new_dbkey='', new_startcol='',
+ check_str1='', check_str2='', check_str3='', check_str4='',
+ not_displayed1='', not_displayed2='', not_displayed3='' ):
"""Edit history_dataset_association attribute information"""
self.home()
self.visit_url( "%s/root/edit?id=%s" % ( self.url, hda_id ) )
+ submit_required = False
self.check_page_for_string( 'Edit Attributes' )
if new_name:
tc.fv( 'edit_attributes', 'name', new_name )
+ submit_required = True
if new_info:
tc.fv( 'edit_attributes', 'info', new_info )
+ submit_required = True
if new_dbkey:
tc.fv( 'edit_attributes', 'dbkey', new_dbkey )
+ submit_required = True
if new_startcol:
tc.fv( 'edit_attributes', 'startCol', new_startcol )
- tc.submit( 'save' )
- self.check_page_for_string( 'Attributes updated' )
+ submit_required = True
+ if submit_required:
+ tc.submit( 'save' )
+ self.check_page_for_string( 'Attributes updated' )
+ if check_str1:
+ self.check_page_for_string( check_str1 )
+ if check_str2:
+ self.check_page_for_string( check_str2 )
+ if check_str3:
+ self.check_page_for_string( check_str3 )
+ if check_str4:
+ self.check_page_for_string( check_str4 )
+ if not_displayed1:
+ try:
+ self.check_page_for_string( not_displayed1 )
+ raise AssertionError, "String (%s) incorrectly displayed on Edit Attributes page." % not_displayed
+ except:
+ pass
+ if not_displayed2:
+ try:
+ self.check_page_for_string( not_displayed2 )
+ raise AssertionError, "String (%s) incorrectly displayed on Edit Attributes page." % not_displayed
+ except:
+ pass
+ if not_displayed3:
+ try:
+ self.check_page_for_string( not_displayed3 )
+ raise AssertionError, "String (%s) incorrectly displayed on Edit Attributes page." % not_displayed
+ except:
+ pass
self.home()
def auto_detect_metadata( self, hda_id ):
"""Auto-detect history_dataset_association metadata"""
@@ -1164,12 +1198,10 @@
check_str = "Purged 1 users"
self.check_page_for_string( check_str )
self.home()
- def associate_roles_and_groups_with_user( self, user_id, email,
- in_role_ids=[], out_role_ids=[],
- in_group_ids=[], out_group_ids=[],
- check_str='' ):
+ def manage_roles_and_groups_for_user( self, user_id, in_role_ids=[], out_role_ids=[],
+ in_group_ids=[], out_group_ids=[], check_str='' ):
self.home()
- url = "%s/admin/manage_roles_and_groups_for_user?id=%s&user_roles_groups_edit_button=Save" % ( self.url, user_id )
+ url = "%s/admin/manage_roles_and_groups_for_user?id=%s" % ( self.url, user_id )
if in_role_ids:
url += "&in_roles=%s" % ','.join( in_role_ids )
if out_role_ids:
@@ -1178,12 +1210,18 @@
url += "&in_groups=%s" % ','.join( in_group_ids )
if out_group_ids:
url += "&out_groups=%s" % ','.join( out_group_ids )
+ if in_role_ids or out_role_ids or in_group_ids or out_group_ids:
+ url += "&user_roles_groups_edit_button=Save"
self.visit_url( url )
if check_str:
self.check_page_for_string( check_str )
self.home()
# Tests associated with roles
+ def browse_roles( self, check_str1='' ):
+ self.visit_url( '%s/admin/roles' % self.url )
+ if check_str1:
+ self.check_page_for_string( check_str1 )
def create_role( self,
name='Role One',
description="This is Role One",
@@ -1280,6 +1318,10 @@
self.visit_url( "%s/admin/groups" % self.url )
self.check_page_for_string( name )
self.home()
+ def browse_groups( self, check_str1='' ):
+ self.visit_url( '%s/admin/groups' % self.url )
+ if check_str1:
+ self.check_page_for_string( check_str1 )
def rename_group( self, group_id, name='Group One Renamed' ):
"""Rename a group"""
self.home()
@@ -1532,6 +1574,58 @@
self.check_page_for_string( 'Address <b>%s</b> has been added' % address_dict[ 'short_desc' ] )
# Library stuff
+ def add_library_template( self, cntrller, item_type, library_id, form_id, form_name, folder_id=None, ldda_id=None ):
+ """Add a new info template to a library item"""
+ self.home()
+ if item_type == 'library':
+ url = "%s/library_common/add_template?cntrller=%s&item_type=%s&library_id=%s" % \
+ ( self.url, cntrller, item_type, library_id )
+ elif item_type == 'folder':
+ url = "%s/library_common/add_template?cntrller=%s&item_type=%s&library_id=%s&folder_id=%s" % \
+ ( self.url, cntrller, item_type, library_id, folder_id )
+ elif item_type == 'ldda':
+ url = "%s/library_common/add_template?cntrller=%s&item_type=%s&library_id=%s&folder_id=%s&ldda_id=%s" % \
+ ( self.url, cntrller, item_type, library_id, folder_id, ldda_id )
+ self.visit_url( url )
+ self.check_page_for_string ( "Select a template for the" )
+ tc.fv( '1', 'form_id', form_id )
+ tc.fv( '1', 'inherit', '1' )
+ tc.submit( 'add_template_button' )
+ self.check_page_for_string = 'A template based on the form "%s" has been added to this' % form_name
+ self.home()
+ def browse_libraries_admin( self, deleted=False, check_str1='', check_str2='', not_displayed1='' ):
+ self.visit_url( '%s/library_admin/browse_libraries?sort=name&f-description=All&f-name=All&f-deleted=%s' % ( self.url, str( deleted ) ) )
+ if check_str1:
+ self.check_page_for_string( check_str1 )
+ if check_str2:
+ self.check_page_for_string( check_str2 )
+ if not_displayed1:
+ try:
+ self.check_page_for_string( not_displayed1 )
+ raise AssertionError, "String (%s) incorrectly displayed when browing library." % not_displayed1
+ except:
+ pass
+ def browse_libraries_regular_user( self, check_str1='', check_str2='' ):
+ self.visit_url( '%s/library/browse_libraries' % self.url )
+ if check_str1:
+ self.check_page_for_string( check_str1 )
+ if check_str2:
+ self.check_page_for_string( check_str2 )
+ def browse_library( self, cntrller, id, show_deleted=False,
+ check_str1='', check_str2='', check_str3='', not_displayed='', not_displayed2='' ):
+ self.visit_url( '%s/library_common/browse_library?cntrller=%s&id=%s&show_deleted=%s' % ( self.url, cntrller, id, str( show_deleted ) ) )
+ if check_str1:
+ self.check_page_for_string( check_str1 )
+ if check_str2:
+ self.check_page_for_string( check_str2 )
+ if check_str3:
+ self.check_page_for_string( check_str3 )
+ if not_displayed:
+ try:
+ self.check_page_for_string( not_displayed )
+ raise AssertionError, "String (%s) incorrectly displayed when browing library." % not_displayed
+ except:
+ pass
def create_library( self, name='Library One', description='This is Library One', synopsis='Synopsis for Library One' ):
"""Create a new library"""
self.home()
@@ -1544,6 +1638,28 @@
check_str = "The new library named '%s' has been created" % name
self.check_page_for_string( check_str )
self.home()
+ def library_info( self, cntrller, library_id, library_name, new_name='', new_description='', new_synopsis='',
+ ele_1_field_name='', ele_1_contents='', ele_2_field_name='', ele_2_contents='', check_str1='' ):
+ """Edit information about a library, optionally using an existing template with up to 2 elements"""
+ self.home()
+ self.visit_url( "%s/library_common/library_info?cntrller=%s&id=%s" % ( self.url, cntrller, library_id ) )
+ if check_str1:
+ self.check_page_for_string( check_str1 )
+ if new_name and new_description and new_synopsis:
+ tc.fv( '1', 'name', new_name )
+ tc.fv( '1', 'description', new_description )
+ tc.fv( '1', 'synopsis', new_synopsis )
+ tc.submit( 'library_info_button' )
+ self.check_page_for_string( "The information has been updated." )
+ # If there is a template, then there are 2 forms on this page and the template is the 2nd form
+ if ele_1_field_name and ele_1_contents and ele_2_field_name and ele_2_contents:
+ tc.fv( '2', ele_1_field_name, ele_1_contents )
+ tc.fv( '2', ele_2_field_name, ele_2_contents )
+ tc.submit( 'edit_info_button' )
+ elif ele_1_field_name and ele_1_contents:
+ tc.fv( '2', ele_1_field_name, ele_1_contents )
+ tc.submit( 'edit_info_button' )
+ self.home()
def library_permissions( self, library_id, library_name, role_ids_str, permissions_in, permissions_out, cntrller='library_admin' ):
# role_ids_str must be a comma-separated string of role ids
url = "library_common/library_permissions?id=%s&cntrller=%slibrary_admin&update_roles_button=Save" % ( library_id, cntrller )
@@ -1558,46 +1674,8 @@
check_str = "Permissions updated for library '%s'" % library_name
self.check_page_for_string( check_str )
self.home()
- def rename_library( self, library_id, old_name, name='Library One Renamed', description='This is Library One Re-described',
- synopsis='This is the new synopsis for Library One ', controller='library_admin' ):
- """Rename a library"""
- self.home()
- self.visit_url( "%s/library_common/library_info?id=%s&cntrller=%s" % ( self.url, library_id, controller ) )
- self.check_page_for_string( old_name )
- # Since twill barfs on the form submisson, we ar forced to simulate it
- url = "%s/library_common/library_info?id=%s&cntrller=%s&library_info_button=Save&description=%s&name=%s&synopsis=%s" % \
- ( self.url, library_id, controller, description.replace( ' ', '+' ), name.replace( ' ', '+' ), synopsis.replace( ' ', '+' ) )
- self.home()
- self.visit_url( url )
- check_str = "Library '%s' has been renamed to '%s'" % ( old_name, name )
- self.check_page_for_string( check_str )
- self.home()
- def add_template( self, cntrller, item_type, library_id, form_id, form_name, folder_id=None, ldda_id=None ):
- """Add a new info template to a library item"""
- self.home()
- if item_type == 'library':
- url = "%s/library_common/add_template?cntrller=%s&item_type=%s&library_id=%s" % ( self.url, cntrller, item_type, library_id )
- elif item_type == 'folder':
- url = "%s/library_common/add_template?cntrller=%s&item_type=%s&library_id=%s&folder_id=%s" % ( self.url, cntrller, item_type, library_id, folder_id )
- elif item_type == 'ldda':
- url = "%s/library_common/add_template?cntrller=%s&item_type=%s&library_id=%s&folder_id=%s&ldda_id=%s" % ( self.url, cntrller, item_type, library_id, folder_id, ldda_id )
- self.visit_url( url )
- self.check_page_for_string ( "Select a template for the" )
- tc.fv( '1', 'form_id', form_id )
- tc.fv( '1', 'inherit', '1' )
- tc.submit( 'add_template_button' )
- self.check_page_for_string = 'A template based on the form "%s" has been added to this' % form_name
- self.home()
- def library_info( self, library_id, library_name, ele_1_field_name, ele_1_contents, ele_2_field_name, ele_2_contents, controller='library_admin' ):
- """Add information to a library using an existing template with 2 elements"""
- self.home()
- self.visit_url( "%s/library_common/library_info?id=%s&cntrller=%s" % ( self.url, library_id, controller ) )
- check_str = 'Other information about library %s' % library_name
- self.check_page_for_string( check_str )
- tc.fv( '2', ele_1_field_name, ele_1_contents )
- tc.fv( '2', ele_2_field_name, ele_2_contents )
- tc.submit( 'create_new_info_button' )
- self.home()
+
+ # Library folder stuff
def add_folder( self, controller, library_id, folder_id, name='Folder One', description='This is Folder One' ):
"""Create a new folder"""
self.home()
@@ -1609,27 +1687,40 @@
check_str = "The new folder named '%s' has been added to the data library." % name
self.check_page_for_string( check_str )
self.home()
- def folder_info( self, controller, folder_id, library_id, name, new_name, description, contents='', field_name='' ):
+ def folder_info( self, cntrller, folder_id, library_id, name='', new_name='', description='',
+ field_name='', contents='', check_str1='', check_str2='', not_displayed='' ):
"""Add information to a library using an existing template with 2 elements"""
self.home()
self.visit_url( "%s/library_common/folder_info?cntrller=%s&id=%s&library_id=%s" % \
- ( self.url, controller, folder_id, library_id) )
+ ( self.url, cntrller, folder_id, library_id ) )
# Twill cannot handle the following call for some reason - it's buggy
# self.check_page_for_string( "Edit folder name and description" )
- tc.fv( '1', "name", new_name )
- tc.fv( '1', "description", description )
- tc.submit( 'rename_folder_button' )
- # Twill cannot handle the following call for some reason - it's buggy
- # check_str = "Folder '%s' has been renamed to '%s'" % ( name, new_name )
- # self.check_page_for_string( check_str )
- if contents and field_name:
+ if name and new_name and description:
+ tc.fv( '1', "name", new_name )
+ tc.fv( '1', "description", description )
+ tc.submit( 'rename_folder_button' )
+ # Twill barfs on this, so keep it commented...
+ #self.check_page_for_string( "The information has been updated." )
+ if field_name and contents:
# We have an information template associated with the folder, so
# there are 2 forms on this page and the template is the 2nd form
tc.fv( '2', field_name, contents )
tc.submit( 'edit_info_button' )
- # Twill cannot handle the following call for some reason - it's buggy
- # self.check_page_for_string( 'The information has been updated.' )
+ # Twill barfs on this, so keep it commented...
+ #self.check_page_for_string( 'The information has been updated.' )
+ if check_str1:
+ self.check_page_for_string( check_str1 )
+ if check_str2:
+ self.check_page_for_string( check_str2 )
+ if not_displayed:
+ try:
+ self.check_page_for_string( not_displayed )
+ raise AssertionError, "String (%s) should not have been displayed on folder info page." % not_displayed
+ except:
+ pass
self.home()
+
+ # Library dataset stuff
def add_library_dataset( self, cntrller, filename, library_id, folder_id, folder_name,
file_type='auto', dbkey='hg18', roles=[], message='', root=False,
template_field_name1='', template_field_contents1='', show_deleted='False',
@@ -1638,7 +1729,7 @@
filename = self.get_filename( filename )
self.home()
self.visit_url( "%s/library_common/upload_library_dataset?cntrller=%s&library_id=%s&folder_id=%s&upload_option=%s&message=%s" % \
- ( self.url, cntrller, library_id, folder_id, upload_option, message ) )
+ ( self.url, cntrller, library_id, folder_id, upload_option, message.replace( ' ', '+' ) ) )
self.check_page_for_string( 'Upload files' )
tc.fv( "1", "library_id", library_id )
tc.fv( "1", "folder_id", folder_id )
@@ -1659,68 +1750,77 @@
check_str = "Added 1 datasets to the folder '%s' (each is selected)." % folder_name
self.library_wait( library_id )
self.home()
- def set_library_dataset_permissions( self, cntrller, library_id, folder_id, ldda_id, ldda_name, role_ids_str, permissions_in, permissions_out ):
+ def ldda_permissions( self, cntrller, library_id, folder_id, id, role_ids_str,
+ permissions_in=[], permissions_out=[], check_str1='' ):
# role_ids_str must be a comma-separated string of role ids
- url = "library_common/ldda_permissions?cntrller=%s&library_id=%s&folder_id=%s&id=%s&update_roles_button=Save" % \
- ( cntrller, library_id, folder_id, ldda_id )
+ url = "%s/library_common/ldda_permissions?cntrller=%s&library_id=%s&folder_id=%s&id=%s" % \
+ ( self.url, cntrller, library_id, folder_id, id )
for po in permissions_out:
key = '%s_out' % po
url ="%s&%s=%s" % ( url, key, role_ids_str )
for pi in permissions_in:
key = '%s_in' % pi
url ="%s&%s=%s" % ( url, key, role_ids_str )
- self.home()
- self.visit_url( "%s/%s" % ( self.url, url ) )
- check_str = "Permissions have been updated on 1 datasets"
+ if permissions_in or permissions_out:
+ url += "&update_roles_button=Save"
+ self.visit_url( url )
+ if check_str1:
+ check_str = check_str1
+ else:
+ check_str = "Permissions have been updated on 1 datasets"
self.check_page_for_string( check_str )
self.home()
- def edit_ldda_template_element_info( self, library_id, folder_id, ldda_id, ldda_name, ele_1_field_name,
- ele_1_contents, ele_2_field_name, ele_2_contents, ele_1_help='', ele_2_help='',
- ele_3_field_name='', ele_3_contents='', ele_3_help='' ):
- """Edit library_dataset_dataset_association template element information"""
- self.home()
- self.visit_url( "%s/library_common/ldda_edit_info?cntrller=library_admin&library_id=%s&folder_id=%s&id=%s" % \
- ( self.url, library_id, folder_id, ldda_id ) )
+ def ldda_edit_info( self, cntrller, library_id, folder_id, ldda_id, ldda_name, new_ldda_name='',
+ ele_1_field_name='', ele_1_contents='', ele_1_help='',
+ ele_2_field_name='', ele_2_contents='', ele_2_help='',
+ ele_3_field_name='', ele_3_contents='', ele_3_help='',
+ check_str1='', check_str2='', check_str3='', not_displayed='' ):
+ """Edit library_dataset_dataset_association information, optionally template element information"""
+ self.visit_url( "%s/library_common/ldda_edit_info?cntrller=%s&library_id=%s&folder_id=%s&id=%s" % \
+ ( self.url, cntrller, library_id, folder_id, ldda_id ) )
check_str = 'Edit attributes of %s' % ldda_name
self.check_page_for_string( check_str )
- ele_1_contents = ele_1_contents.replace( '+', ' ' )
- ele_2_contents = ele_2_contents.replace( '+', ' ' )
- tc.fv( '4', ele_1_field_name, ele_1_contents )
- tc.fv( '4', ele_2_field_name, ele_2_contents.replace( '+', ' ' ) )
+ if new_ldda_name:
+ tc.fv( '1', 'name', new_ldda_name )
+ tc.submit( 'save' )
+ check_str = 'Attributes updated for library dataset %s' % new_ldda_name
+ self.check_page_for_string( check_str )
+ # There are 4 forms on this page and the template is the 4th form
+ if ele_1_field_name and ele_1_contents:
+ ele_1_contents = ele_1_contents.replace( '+', ' ' )
+ tc.fv( '4', ele_1_field_name, ele_1_contents )
+ if ele_2_field_name and ele_2_contents:
+ ele_2_contents = ele_2_contents.replace( '+', ' ' )
+ tc.fv( '4', ele_2_field_name, ele_2_contents.replace( '+', ' ' ) )
if ele_3_field_name and ele_3_contents:
ele_3_contents = ele_3_contents.replace( '+', ' ' )
tc.fv( '4', ele_3_field_name, ele_3_contents )
- tc.submit( 'edit_info_button' )
- self.check_page_for_string( 'This is the latest version of this library dataset' )
- self.check_page_for_string( 'The information has been updated.' )
- self.check_page_for_string( ele_1_contents )
- self.check_page_for_string( ele_2_contents )
- if ele_3_field_name and ele_3_contents:
+ if ele_1_field_name:
+ tc.submit( 'edit_info_button' )
+ self.check_page_for_string( 'This is the latest version of this library dataset' )
+ self.check_page_for_string( 'The information has been updated.' )
+ self.check_page_for_string( ele_1_contents )
+ if ele_2_field_name:
+ self.check_page_for_string( ele_2_contents )
+ if ele_3_field_name:
self.check_page_for_string( ele_3_contents )
if ele_1_help:
check_str = ele_1_help.replace( '+', ' ' )
self.check_page_for_string( check_str )
- self.check_page_for_string( ele_2_contents )
if ele_2_help:
check_str = ele_2_help.replace( '+', ' ' )
self.check_page_for_string( check_str )
if ele_2_help:
check_str = ele_3_help.replace( '+', ' ' )
self.check_page_for_string( check_str )
- self.home()
- def edit_ldda_attribute_info( self, cntrller, library_id, folder_id, ldda_id, ldda_name, new_ldda_name ):
- """Edit library_dataset_dataset_association attribute information"""
- self.home()
- self.visit_url( "%s/library_common/ldda_edit_info?cntrller=%s&library_id=%s&folder_id=%s&id=%s" % \
- ( self.url, cntrller, library_id, folder_id, ldda_id ) )
- check_str = 'Edit attributes of %s' % ldda_name
- self.check_page_for_string( check_str )
- tc.fv( '1', 'name', new_ldda_name )
- tc.submit( 'save' )
- check_str = 'Attributes updated for library dataset %s' % new_ldda_name
- self.check_page_for_string( check_str )
- check_str = 'Edit attributes of %s' % new_ldda_name
- self.check_page_for_string( check_str )
+ if check_str1:
+ self.check_page_for_string( check_str1 )
+ if not_displayed:
+ try:
+ self.check_page_for_string( not_displayed )
+ raise AssertionError, "String (%s) should not have been displayed on ldda Edit Attributes page." % not_displayed
+ except:
+ pass
self.home()
def upload_new_dataset_version( self, cntrller, filename, library_id, folder_id, folder_name, library_dataset_id, ldda_name, file_type='auto',
dbkey='hg18', message='', template_field_name1='', template_field_contents1='' ):
@@ -1755,19 +1855,21 @@
check_str = "Added 1 datasets to the folder '%s' (each is selected)." % folder_name
self.check_page_for_string( check_str )
self.home()
- def add_dir_of_files_from_admin_view( self, library_id, folder_id, file_type='auto', dbkey='hg18', roles_tuple=[],
- message='', check_str_after_submit='', template_field_name1='', template_field_contents1='' ):
+ def upload_directory_of_files( self, cntrller, library_id, folder_id, server_dir, file_type='auto', dbkey='hg18', roles_tuple=[],
+ message='', check_str1='', check_str_after_submit='', template_field_name1='', template_field_contents1='' ):
"""Add a directory of datasets to a folder"""
# roles is a list of tuples: [ ( role_id, role_description ) ]
- self.home()
- self.visit_url( "%s/library_common/upload_library_dataset?cntrller=library_admin&upload_option=upload_directory&library_id=%s&folder_id=%s" % \
- ( self.url, library_id, folder_id ) )
+ url = "%s/library_common/upload_library_dataset?cntrller=%s&library_id=%s&folder_id=%s&upload_option=upload_directory" % \
+ ( self.url, cntrller, library_id, folder_id )
+ self.visit_url( url )
self.check_page_for_string( 'Upload a directory of files' )
+ if check_str1:
+ self.check_page_for_strin( check_str1 )
tc.fv( "1", "folder_id", folder_id )
tc.fv( "1", "file_type", file_type )
tc.fv( "1", "dbkey", dbkey )
- tc.fv( "1", "message", message.replace( '+', ' ' ) )
- tc.fv( "1", "server_dir", "library" )
+ tc.fv( "1", "message", message )
+ tc.fv( "1", "server_dir", server_dir )
for role_tuple in roles_tuple:
tc.fv( "1", "roles", role_tuple[1] ) # role_tuple[1] is the role name
# Add template field contents, if any...
@@ -1778,29 +1880,13 @@
self.check_page_for_string( check_str_after_submit )
self.library_wait( library_id )
self.home()
- def add_dir_of_files_from_libraries_view( self, library_id, folder_id, selected_dir, file_type='auto', dbkey='hg18', roles_tuple=[],
- message='', check_str_after_submit='', template_field_name1='', template_field_contents1='' ):
- """Add a directory of datasets to a folder"""
- # roles is a list of tuples: [ ( role_id, role_description ) ]
- self.home()
- self.visit_url( "%s/library_common/upload_library_dataset?cntrller=library&upload_option=upload_directory&library_id=%s&folder_id=%s" % \
- ( self.url, library_id, folder_id ) )
- self.check_page_for_string( 'Upload a directory of files' )
- tc.fv( "1", "folder_id", folder_id )
- tc.fv( "1", "file_type", file_type )
- tc.fv( "1", "dbkey", dbkey )
- tc.fv( "1", "message", message.replace( '+', ' ' ) )
- tc.fv( "1", "server_dir", selected_dir )
- for role_tuple in roles_tuple:
- tc.fv( "1", "roles", role_tuple[1] ) # role_tuple[1] is the role name
- # Add template field contents, if any...
- if template_field_name1:
- tc.fv( "1", template_field_name1, template_field_contents1 )
- tc.submit( "runtool_btn" )
- if check_str_after_submit:
- self.check_page_for_string( check_str_after_submit )
- self.library_wait( library_id, cntrller='library' )
- self.home()
+ def act_on_multiple_datasets( self, cntrller, library_id, do_action, ldda_ids='', check_str1='' ):
+ # Can't use the ~/library_admin/libraries form as twill barfs on it so we'll simulate the form submission
+ # by going directly to the form action
+ self.visit_url( '%s/library_common/act_on_multiple_datasets?cntrller=%s&library_id=%s&ldda_ids=%s&do_action=%s' \
+ % ( self.url, cntrller, library_id, ldda_ids, do_action ) )
+ if check_str1:
+ self.check_page_for_string( check_str1 )
def download_archive_of_library_files( self, cntrller, library_id, ldda_ids, format ):
self.home()
# Here it would be ideal to have twill set form values and submit the form, but
diff -r e39c9a2a0b4c -r 48e83411aa91 test/functional/test_admin_features.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test/functional/test_admin_features.py Fri Mar 12 16:11:26 2010 -0500
@@ -0,0 +1,422 @@
+from base.twilltestcase import *
+from base.test_db_util import *
+
+class TestDataSecurity( TwillTestCase ):
+ def test_000_initiate_users( self ):
+ """Ensuring all required user accounts exist"""
+ self.logout()
+ self.login( email='test1(a)bx.psu.edu' )
+ global regular_user1
+ regular_user1 = get_user( 'test1(a)bx.psu.edu' )
+ assert regular_user1 is not None, 'Problem retrieving user with email "test1(a)bx.psu.edu" from the database'
+ self.logout()
+ self.login( email='test2(a)bx.psu.edu' )
+ global regular_user2
+ regular_user2 = get_user( 'test2(a)bx.psu.edu' )
+ assert regular_user2 is not None, 'Problem retrieving user with email "test2(a)bx.psu.edu" from the database'
+ self.logout()
+ self.login( email='test(a)bx.psu.edu' )
+ global admin_user
+ admin_user = get_user( 'test(a)bx.psu.edu' )
+ assert admin_user is not None, 'Problem retrieving user with email "test(a)bx.psu.edu" from the database'
+ def test_005_create_new_user_account_as_admin( self ):
+ """Testing creating a new user account as admin"""
+ # Logged in as admin_user
+ email = 'test3(a)bx.psu.edu'
+ password = 'testuser'
+ previously_created = self.create_new_account_as_admin( email=email, password=password )
+ # Get the user object for later tests
+ global regular_user3
+ regular_user3 = get_user( email )
+ assert regular_user3 is not None, 'Problem retrieving user with email "%s" from the database' % email
+ global regular_user3_private_role
+ regular_user3_private_role = get_private_role( regular_user3 )
+ # Make sure DefaultUserPermissions were created
+ if not regular_user3.default_permissions:
+ raise AssertionError( 'No DefaultUserPermissions were created for user %s when the admin created the account' % email )
+ # Make sure a private role was created for the user
+ if not regular_user3.roles:
+ raise AssertionError( 'No UserRoleAssociations were created for user %s when the admin created the account' % email )
+ if not previously_created and len( regular_user3.roles ) != 1:
+ raise AssertionError( '%d UserRoleAssociations were created for user %s when the admin created the account ( should have been 1 )' \
+ % ( len( regular_user3.roles ), regular_user3.email ) )
+ for ura in regular_user3.roles:
+ role = sa_session.query( galaxy.model.Role ).get( ura.role_id )
+ if not previously_created and role.type != 'private':
+ raise AssertionError( 'Role created for user %s when the admin created the account is not private, type is' \
+ % str( role.type ) )
+ if not previously_created:
+ # Make sure a history was not created ( previous test runs may have left deleted histories )
+ histories = get_all_histories_for_user( regular_user3 )
+ if histories:
+ raise AssertionError( 'Histories were incorrectly created for user %s when the admin created the account' % email )
+ # Make sure the user was not associated with any groups
+ if regular_user3.groups:
+ raise AssertionError( 'Groups were incorrectly associated with user %s when the admin created the account' % email )
+ def test_010_reset_password_as_admin( self ):
+ """Testing reseting a user password as admin"""
+ self.reset_password_as_admin( user_id=self.security.encode_id( regular_user3.id ), password='testreset' )
+ def test_015_login_after_password_reset( self ):
+ """Testing logging in after an admin reset a password - tests DefaultHistoryPermissions for accounts created by an admin"""
+ # logged in as admin_user
+ self.logout()
+ self.login( email=regular_user3.email, password='testreset' )
+ # Make sure a History and HistoryDefaultPermissions exist for the user
+ latest_history = get_latest_history_for_user( regular_user3 )
+ if not latest_history.user_id == regular_user3.id:
+ raise AssertionError( 'A history was not created for user %s when he logged in' % email )
+ if not latest_history.default_permissions:
+ raise AssertionError( 'No DefaultHistoryPermissions were created for history id %d when it was created' % latest_history.id )
+ dhps = get_default_history_permissions_by_history( latest_history )
+ if len( dhps ) > 1:
+ raise AssertionError( 'More than 1 DefaultHistoryPermissions were created for history id %d when it was created' % latest_history.id )
+ dhp = dhps[0]
+ if not dhp.action == galaxy.model.Dataset.permitted_actions.DATASET_MANAGE_PERMISSIONS.action:
+ raise AssertionError( 'The DefaultHistoryPermission.action for history id %d is "%s", but it should be "manage permissions"' \
+ % ( latest_history.id, dhp.action ) )
+ # Upload a file to create a HistoryDatasetAssociation
+ self.upload_file( '1.bed' )
+ latest_dataset = get_latest_dataset()
+ for dp in latest_dataset.actions:
+ # Should only have 1 DatasetPermissions
+ if dp.action != galaxy.model.Dataset.permitted_actions.DATASET_MANAGE_PERMISSIONS.action:
+ raise AssertionError( 'The DatasetPermissions for dataset id %d is %s ( should have been %s )' \
+ % ( latest_dataset.id,
+ latest_dataset.actions.action,
+ galaxy.model.Dataset.permitted_actions.DATASET_MANAGE_PERMISSIONS.action ) )
+ self.logout()
+ # Reset the password to the default for later tests
+ self.login( email='test(a)bx.psu.edu' )
+ self.reset_password_as_admin( user_id=self.security.encode_id( regular_user3.id ), password='testuser' )
+ def test_020_mark_user_deleted( self ):
+ """Testing marking a user account as deleted"""
+ # Logged in as admin_user
+ self.mark_user_deleted( user_id=self.security.encode_id( regular_user3.id ), email=regular_user3.email )
+ if not regular_user3.active_histories:
+ raise AssertionError( 'HistoryDatasetAssociations for regular_user3 were incorrectly deleted when the user was marked deleted' )
+ def test_025_undelete_user( self ):
+ """Testing undeleting a user account"""
+ # Logged in as admin_user
+ self.undelete_user( user_id=self.security.encode_id( regular_user3.id ), email=regular_user3.email )
+ def test_030_create_role( self ):
+ """Testing creating new role with 3 members ( and a new group named the same ), then renaming the role"""
+ # Logged in as admin_user
+ name = 'Role One'
+ description = "This is Role Ones description"
+ user_ids=[ str( admin_user.id ), str( regular_user1.id ), str( regular_user3.id ) ]
+ self.create_role( name=name,
+ description=description,
+ in_user_ids=user_ids,
+ in_group_ids=[],
+ create_group_for_role='yes',
+ private_role=admin_user.email )
+ # Get the role object for later tests
+ global role_one
+ role_one = sa_session.query( galaxy.model.Role ).filter( galaxy.model.Role.table.c.name==name ).first()
+ assert role_one is not None, 'Problem retrieving role named "Role One" from the database'
+ # Make sure UserRoleAssociations are correct
+ if len( role_one.users ) != len( user_ids ):
+ raise AssertionError( '%d UserRoleAssociations were created for role id %d when it was created ( should have been %d )' \
+ % ( len( role_one.users ), role_one.id, len( user_ids ) ) )
+ # Each of the following users should now have 2 role associations, their private role and role_one
+ for user in [ admin_user, regular_user1, regular_user3 ]:
+ refresh( user )
+ if len( user.roles ) != 2:
+ raise AssertionError( '%d UserRoleAssociations are associated with user %s ( should be 2 )' \
+ % ( len( user.roles ), user.email ) )
+ # Make sure the group was created
+ self.visit_url( '%s/admin/groups' % self.url )
+ self.check_page_for_string( name )
+ global group_zero
+ group_zero = get_group_by_name( name )
+ # Rename the role
+ rename = "Role One's been Renamed"
+ new_description="This is Role One's Re-described"
+ self.rename_role( self.security.encode_id( role_one.id ), name=rename, description=new_description )
+ self.visit_url( '%s/admin/roles' % self.url )
+ self.check_page_for_string( rename )
+ self.check_page_for_string( new_description )
+ # Reset the role back to the original name and description
+ self.rename_role( self.security.encode_id( role_one.id ), name=name, description=description )
+ def test_035_create_group( self ):
+ """Testing creating new group with 3 members and 1 associated role, then renaming it"""
+ # Logged in as admin_user
+ name = "Group One's Name"
+ user_ids=[ str( admin_user.id ), str( regular_user1.id ), str( regular_user3.id ) ]
+ role_ids=[ str( role_one.id ) ]
+ self.create_group( name=name, in_user_ids=user_ids, in_role_ids=role_ids )
+ # Get the group object for later tests
+ global group_one
+ group_one = get_group_by_name( name )
+ assert group_one is not None, 'Problem retrieving group named "Group One" from the database'
+ # Make sure UserGroupAssociations are correct
+ if len( group_one.users ) != len( user_ids ):
+ raise AssertionError( '%d UserGroupAssociations were created for group id %d when it was created ( should have been %d )' \
+ % ( len( group_one.users ), group_one.id, len( user_ids ) ) )
+ # Each user should now have 1 group association, group_one
+ for user in [ admin_user, regular_user1, regular_user3 ]:
+ refresh( user )
+ if len( user.groups ) != 1:
+ raise AssertionError( '%d UserGroupAssociations are associated with user %s ( should be 1 )' % ( len( user.groups ), user.email ) )
+ # Make sure GroupRoleAssociations are correct
+ if len( group_one.roles ) != len( role_ids ):
+ raise AssertionError( '%d GroupRoleAssociations were created for group id %d when it was created ( should have been %d )' \
+ % ( len( group_one.roles ), group_one.id, len( role_ids ) ) )
+ # Rename the group
+ rename = "Group One's been Renamed"
+ self.rename_group( self.security.encode_id( group_one.id ), name=rename, )
+ self.home()
+ self.visit_url( '%s/admin/groups' % self.url )
+ self.check_page_for_string( rename )
+ # Reset the group back to the original name
+ self.rename_group( self.security.encode_id( group_one.id ), name=name )
+ def test_040_add_members_and_role_to_group( self ):
+ """Testing editing user membership and role associations of an existing group"""
+ # Logged in as admin_user
+ name = 'Group Two'
+ self.create_group( name=name, in_user_ids=[], in_role_ids=[] )
+ # Get the group object for later tests
+ global group_two
+ group_two = get_group_by_name( name )
+ assert group_two is not None, 'Problem retrieving group named "Group Two" from the database'
+ # group_two should have no associations
+ if group_two.users:
+ raise AssertionError( '%d UserGroupAssociations were created for group id %d when it was created ( should have been 0 )' \
+ % ( len( group_two.users ), group_two.id ) )
+ if group_two.roles:
+ raise AssertionError( '%d GroupRoleAssociations were created for group id %d when it was created ( should have been 0 )' \
+ % ( len( group_two.roles ), group_two.id ) )
+ user_ids = [ str( regular_user1.id ) ]
+ role_ids = [ str( role_one.id ) ]
+ self.associate_users_and_roles_with_group( self.security.encode_id( group_two.id ),
+ group_two.name,
+ user_ids=user_ids,
+ role_ids=role_ids )
+ def test_045_create_role_with_user_and_group_associations( self ):
+ """Testing creating a role with user and group associations"""
+ # Logged in as admin_user
+ # NOTE: To get this to work with twill, all select lists on the ~/admin/role page must contain at least
+ # 1 option value or twill throws an exception, which is: ParseError: OPTION outside of SELECT
+ # Due to this bug in twill, we create the role, we bypass the page and visit the URL in the
+ # associate_users_and_groups_with_role() method.
+ name = 'Role Two'
+ description = 'This is Role Two'
+ user_ids=[ str( admin_user.id ) ]
+ group_ids=[ str( group_two.id ) ]
+ private_role=admin_user.email
+ # Create the role
+ self.create_role( name=name,
+ description=description,
+ in_user_ids=user_ids,
+ in_group_ids=group_ids,
+ private_role=private_role )
+ # Get the role object for later tests
+ global role_two
+ role_two = get_role_by_name( name )
+ assert role_two is not None, 'Problem retrieving role named "Role Two" from the database'
+ # Make sure UserRoleAssociations are correct
+ if len( role_two.users ) != len( user_ids ):
+ raise AssertionError( '%d UserRoleAssociations were created for role id %d when it was created with %d members' \
+ % ( len( role_two.users ), role_two.id, len( user_ids ) ) )
+ # admin_user should now have 3 role associations, private role, role_one, role_two
+ refresh( admin_user )
+ if len( admin_user.roles ) != 3:
+ raise AssertionError( '%d UserRoleAssociations are associated with user %s ( should be 3 )' % ( len( admin_user.roles ), admin_user.email ) )
+ # Make sure GroupRoleAssociations are correct
+ refresh( role_two )
+ if len( role_two.groups ) != len( group_ids ):
+ raise AssertionError( '%d GroupRoleAssociations were created for role id %d when it was created ( should have been %d )' \
+ % ( len( role_two.groups ), role_two.id, len( group_ids ) ) )
+ # group_two should now be associated with 2 roles: role_one, role_two
+ refresh( group_two )
+ if len( group_two.roles ) != 2:
+ raise AssertionError( '%d GroupRoleAssociations are associated with group id %d ( should be 2 )' % ( len( group_two.roles ), group_two.id ) )
+ def test_050_change_user_role_associations( self ):
+ """Testing changing roles associated with a user"""
+ # Logged in as admin_user
+ # Create a new role with no associations
+ name = 'Role Three'
+ description = 'This is Role Three'
+ user_ids=[]
+ group_ids=[]
+ private_role=admin_user.email
+ self.create_role( name=name,
+ description=description,
+ in_user_ids=user_ids,
+ in_group_ids=group_ids,
+ private_role=private_role )
+ # Get the role object for later tests
+ global role_three
+ role_three = get_role_by_name( name )
+ assert role_three is not None, 'Problem retrieving role named "Role Three" from the database'
+ # Associate the role with a user
+ refresh( admin_user )
+ role_ids = []
+ for ura in admin_user.non_private_roles:
+ role_ids.append( str( ura.role_id ) )
+ role_ids.append( str( role_three.id ) )
+ group_ids = []
+ for uga in admin_user.groups:
+ group_ids.append( str( uga.group_id ) )
+ check_str = "User '%s' has been updated with %d associated roles and %d associated groups" % \
+ ( admin_user.email, len( role_ids ), len( group_ids ) )
+ self.manage_roles_and_groups_for_user( self.security.encode_id( admin_user.id ),
+ in_role_ids=role_ids,
+ in_group_ids=group_ids,
+ check_str=check_str )
+ refresh( admin_user )
+ # admin_user should now be associated with 4 roles: private, role_one, role_two, role_three
+ if len( admin_user.roles ) != 4:
+ raise AssertionError( '%d UserRoleAssociations are associated with %s ( should be 4 )' % \
+ ( len( admin_user.roles ), admin_user.email ) )
+ def test_055_mark_group_deleted( self ):
+ """Testing marking a group as deleted"""
+ # Logged in as admin_user
+ self.browse_groups( check_str1=group_two.name )
+ self.mark_group_deleted( self.security.encode_id( group_two.id ), group_two.name )
+ refresh( group_two )
+ if not group_two.deleted:
+ raise AssertionError( '%s was not correctly marked as deleted.' % group_two.name )
+ # Deleting a group should not delete any associations
+ if not group_two.members:
+ raise AssertionError( '%s incorrectly lost all members when it was marked as deleted.' % group_two.name )
+ if not group_two.roles:
+ raise AssertionError( '%s incorrectly lost all role associations when it was marked as deleted.' % group_two.name )
+ def test_060_undelete_group( self ):
+ """Testing undeleting a deleted group"""
+ # Logged in as admin_user
+ self.undelete_group( self.security.encode_id( group_two.id ), group_two.name )
+ refresh( group_two )
+ if group_two.deleted:
+ raise AssertionError( '%s was not correctly marked as not deleted.' % group_two.name )
+ def test_065_mark_role_deleted( self ):
+ """Testing marking a role as deleted"""
+ # Logged in as admin_user
+ self.home()
+ self.browse_roles( check_str1=role_two.name )
+ self.mark_role_deleted( self.security.encode_id( role_two.id ), role_two.name )
+ refresh( role_two )
+ if not role_two.deleted:
+ raise AssertionError( '%s was not correctly marked as deleted.' % role_two.name )
+ # Deleting a role should not delete any associations
+ if not role_two.users:
+ raise AssertionError( '%s incorrectly lost all user associations when it was marked as deleted.' % role_two.name )
+ if not role_two.groups:
+ raise AssertionError( '%s incorrectly lost all group associations when it was marked as deleted.' % role_two.name )
+ def test_070_undelete_role( self ):
+ """Testing undeleting a deleted role"""
+ # Logged in as admin_user
+ self.undelete_role( self.security.encode_id( role_two.id ), role_two.name )
+ def test_075_purge_user( self ):
+ """Testing purging a user account"""
+ # Logged in as admin_user
+ self.mark_user_deleted( user_id=self.security.encode_id( regular_user3.id ), email=regular_user3.email )
+ refresh( regular_user3 )
+ self.purge_user( self.security.encode_id( regular_user3.id ), regular_user3.email )
+ refresh( regular_user3 )
+ if not regular_user3.purged:
+ raise AssertionError( 'User %s was not marked as purged.' % regular_user3.email )
+ # Make sure DefaultUserPermissions deleted EXCEPT FOR THE PRIVATE ROLE
+ if len( regular_user3.default_permissions ) != 1:
+ raise AssertionError( 'DefaultUserPermissions for user %s were not deleted.' % regular_user3.email )
+ for dup in regular_user3.default_permissions:
+ role = sa_session.query( galaxy.model.Role ).get( dup.role_id )
+ if role.type != 'private':
+ raise AssertionError( 'DefaultUserPermissions for user %s are not related with the private role.' % regular_user3.email )
+ # Make sure History deleted
+ for history in regular_user3.histories:
+ refresh( history )
+ if not history.deleted:
+ raise AssertionError( 'User %s has active history id %d after their account was marked as purged.' % ( regular_user3.email, hda.id ) )
+ # NOTE: Not all hdas / datasets will be deleted at the time a history is deleted - the cleanup_datasets.py script
+ # is responsible for this.
+ # Make sure UserGroupAssociations deleted
+ if regular_user3.groups:
+ raise AssertionError( 'User %s has active group id %d after their account was marked as purged.' % ( regular_user3.email, uga.id ) )
+ # Make sure UserRoleAssociations deleted EXCEPT FOR THE PRIVATE ROLE
+ if len( regular_user3.roles ) != 1:
+ raise AssertionError( 'UserRoleAssociations for user %s were not deleted.' % regular_user3.email )
+ for ura in regular_user3.roles:
+ role = sa_session.query( galaxy.model.Role ).get( ura.role_id )
+ if role.type != 'private':
+ raise AssertionError( 'UserRoleAssociations for user %s are not related with the private role.' % regular_user3.email )
+ def test_080_manually_unpurge_user( self ):
+ """Testing manually un-purging a user account"""
+ # Logged in as admin_user
+ # Reset the user for later test runs. The user's private Role and DefaultUserPermissions for that role
+ # should have been preserved, so all we need to do is reset purged and deleted.
+ # TODO: If we decide to implement the GUI feature for un-purging a user, replace this with a method call
+ regular_user3.purged = False
+ regular_user3.deleted = False
+ flush( regular_user3 )
+ def test_085_purge_group( self ):
+ """Testing purging a group"""
+ # Logged in as admin_user
+ self.mark_group_deleted( self.security.encode_id( group_two.id ), group_two.name )
+ self.purge_group( self.security.encode_id( group_two.id ), group_two.name )
+ # Make sure there are no UserGroupAssociations
+ if get_user_group_associations_by_group( group_two ):
+ raise AssertionError( "Purging the group did not delete the UserGroupAssociations for group_id '%s'" % group_two.id )
+ # Make sure there are no GroupRoleAssociations
+ if get_group_role_associations_by_group( group_two ):
+ raise AssertionError( "Purging the group did not delete the GroupRoleAssociations for group_id '%s'" % group_two.id )
+ # Undelete the group for later test runs
+ self.undelete_group( self.security.encode_id( group_two.id ), group_two.name )
+ def test_090_purge_role( self ):
+ """Testing purging a role"""
+ # Logged in as admin_user
+ self.mark_role_deleted( self.security.encode_id( role_two.id ), role_two.name )
+ self.purge_role( self.security.encode_id( role_two.id ), role_two.name )
+ # Make sure there are no UserRoleAssociations
+ if get_user_role_associations_by_role( role_two ):
+ raise AssertionError( "Purging the role did not delete the UserRoleAssociations for role_id '%s'" % role_two.id )
+ # Make sure there are no DefaultUserPermissions associated with the Role
+ if get_default_user_permissions_by_role( role_two ):
+ raise AssertionError( "Purging the role did not delete the DefaultUserPermissions for role_id '%s'" % role_two.id )
+ # Make sure there are no DefaultHistoryPermissions associated with the Role
+ if get_default_history_permissions_by_role( role_two ):
+ raise AssertionError( "Purging the role did not delete the DefaultHistoryPermissions for role_id '%s'" % role_two.id )
+ # Make sure there are no GroupRoleAssociations
+ if get_group_role_associations_by_role( role_two ):
+ raise AssertionError( "Purging the role did not delete the GroupRoleAssociations for role_id '%s'" % role_two.id )
+ # Make sure there are no DatasetPermissionss
+ if get_dataset_permissions_by_role( role_two ):
+ raise AssertionError( "Purging the role did not delete the DatasetPermissionss for role_id '%s'" % role_two.id )
+ def test_095_manually_unpurge_role( self ):
+ """Testing manually un-purging a role"""
+ # Logged in as admin_user
+ # Manually unpurge, then undelete the role for later test runs
+ # TODO: If we decide to implement the GUI feature for un-purging a role, replace this with a method call
+ role_two.purged = False
+ flush( role_two )
+ self.undelete_role( self.security.encode_id( role_two.id ), role_two.name )
+ def test_999_reset_data_for_later_test_runs( self ):
+ """Reseting data to enable later test runs to pass"""
+ # Logged in as admin_user
+ ##################
+ # Eliminate all non-private roles
+ ##################
+ for role in [ role_one, role_two, role_three ]:
+ self.mark_role_deleted( self.security.encode_id( role.id ), role.name )
+ self.purge_role( self.security.encode_id( role.id ), role.name )
+ # Manually delete the role from the database
+ refresh( role )
+ sa_session.delete( role )
+ sa_session.flush()
+ ##################
+ # Eliminate all groups
+ ##################
+ for group in [ group_zero, group_one, group_two ]:
+ self.mark_group_deleted( self.security.encode_id( group.id ), group.name )
+ self.purge_group( self.security.encode_id( group.id ), group.name )
+ # Manually delete the group from the database
+ refresh( group )
+ sa_session.delete( group )
+ sa_session.flush()
+ ##################
+ # Make sure all users are associated only with their private roles
+ ##################
+ for user in [ admin_user, regular_user1, regular_user2, regular_user3 ]:
+ refresh( user )
+ if len( user.roles) != 1:
+ raise AssertionError( '%d UserRoleAssociations are associated with %s ( should be 1 )' % ( len( user.roles ), user.email ) )
diff -r e39c9a2a0b4c -r 48e83411aa91 test/functional/test_data_security.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test/functional/test_data_security.py Fri Mar 12 16:11:26 2010 -0500
@@ -0,0 +1,196 @@
+from base.twilltestcase import *
+from base.test_db_util import *
+
+class TestDataSecurity( TwillTestCase ):
+ def test_000_initiate_users( self ):
+ """Ensuring all required user accounts exist"""
+ self.logout()
+ self.login( email='test1(a)bx.psu.edu' )
+ global regular_user1
+ regular_user1 = get_user( 'test1(a)bx.psu.edu' )
+ assert regular_user1 is not None, 'Problem retrieving user with email "test1(a)bx.psu.edu" from the database'
+ global regular_user1_private_role
+ regular_user1_private_role = get_private_role( regular_user1 )
+ self.logout()
+ self.login( email='test2(a)bx.psu.edu' )
+ global regular_user2
+ regular_user2 = get_user( 'test2(a)bx.psu.edu' )
+ assert regular_user2 is not None, 'Problem retrieving user with email "test2(a)bx.psu.edu" from the database'
+ global regular_user2_private_role
+ regular_user2_private_role = get_private_role( regular_user2 )
+ self.logout()
+ self.login( email='test3(a)bx.psu.edu' )
+ global regular_user3
+ regular_user3 = get_user( 'test3(a)bx.psu.edu' )
+ assert regular_user3 is not None, 'Problem retrieving user with email "test3(a)bx.psu.edu" from the database'
+ global regular_user3_private_role
+ regular_user3_private_role = get_private_role( regular_user3 )
+ self.logout()
+ self.login( email='test(a)bx.psu.edu' )
+ global admin_user
+ admin_user = get_user( 'test(a)bx.psu.edu' )
+ assert admin_user is not None, 'Problem retrieving user with email "test(a)bx.psu.edu" from the database'
+ global admin_user_private_role
+ admin_user_private_role = get_private_role( admin_user )
+ def test_005_default_permissions( self ):
+ """Testing initial settings for DefaultUserPermissions and DefaultHistoryPermissions"""
+ # Logged in as admin_user
+ # Make sure DefaultUserPermissions are correct
+ dups = get_default_user_permissions_by_user( admin_user )
+ if len( dups ) > 1:
+ raise AssertionError( '%d DefaultUserPermissions associated with user %s ( should be 1 )' \
+ % ( len( admin_user.default_permissions ), admin_user.email ) )
+ dup = dups[0]
+ if not dup.action == galaxy.model.Dataset.permitted_actions.DATASET_MANAGE_PERMISSIONS.action:
+ raise AssertionError( 'The DefaultUserPermission.action for user "%s" is "%s", but it should be "%s"' \
+ % ( admin_user.email, dup.action, galaxy.model.Dataset.permitted_actions.DATASET_MANAGE_PERMISSIONS.action ) )
+ # Make sure DefaultHistoryPermissions are correct
+ latest_history = get_latest_history_for_user( admin_user )
+ dhps = get_default_history_permissions_by_history( latest_history )
+ if len( dhps ) > 1:
+ raise AssertionError( '%d DefaultHistoryPermissions were created for history id %d when it was created ( should have been 1 )' \
+ % ( len( latest_history.default_permissions ), latest_history.id ) )
+ dhp = dhps[0]
+ if not dhp.action == galaxy.model.Dataset.permitted_actions.DATASET_MANAGE_PERMISSIONS.action:
+ raise AssertionError( 'The DefaultHistoryPermission.action for history id %d is "%s", but it should be "%s"' \
+ % ( latest_history.id, dhp.action, galaxy.model.Dataset.permitted_actions.DATASET_MANAGE_PERMISSIONS.action ) )
+ self.manage_roles_and_groups_for_user( self.security.encode_id( admin_user.id ),
+ check_str=admin_user.email )
+ # Try deleting the admin_user's private role
+ check_str = "You cannot eliminate a user's private role association."
+ self.manage_roles_and_groups_for_user( self.security.encode_id( admin_user.id ),
+ out_role_ids=str( admin_user_private_role.id ),
+ check_str=check_str )
+ def test_010_private_role_creation_and_default_history_permissions( self ):
+ """Testing private role creation and changing DefaultHistoryPermissions for new histories"""
+ # Logged in as admin_user
+ self.logout()
+ # Some of the history related tests here are similar to some tests in the
+ # test_history_functions.py script, so we could potentially eliminate 1 or 2 of them.
+ self.login( email='test1(a)bx.psu.edu' )
+ global regular_user1
+ regular_user1 = get_user( 'test1(a)bx.psu.edu' )
+ assert regular_user1 is not None, 'Problem retrieving user with email "test1(a)bx.psu.edu" from the database'
+ # Add a dataset to the history
+ self.upload_file( '1.bed' )
+ latest_dataset = get_latest_dataset()
+ # Make sure DatasetPermissions are correct - default is 'manage permissions'
+ dps = get_dataset_permissions_by_dataset( latest_dataset )
+ if len( dps ) > 1:
+ raise AssertionError( '%d DatasetPermissions were created for dataset id %d when it was created ( should have been 1 )' \
+ % ( len( dps ), latest_dataset.id ) )
+ dp = dps[0]
+ if not dp.action == galaxy.model.Dataset.permitted_actions.DATASET_MANAGE_PERMISSIONS.action:
+ raise AssertionError( 'The DatasetPermissions.action for dataset id %d is "%s", but it should be "manage permissions"' \
+ % ( latest_dataset.id, dp.action ) )
+ # Change DefaultHistoryPermissions for regular_user1
+ permissions_in = []
+ actions_in = []
+ for key, value in galaxy.model.Dataset.permitted_actions.items():
+ # Setting the 'access' permission with the private role makes this dataset private
+ permissions_in.append( key )
+ actions_in.append( value.action )
+ # Sort actions for later comparison
+ actions_in.sort()
+ self.user_set_default_permissions( permissions_in=permissions_in, role_id=str( regular_user1_private_role.id ) )
+ # Make sure the default permissions are changed for new histories
+ self.new_history()
+ # logged in as regular_user1
+ latest_history = get_latest_history_for_user( regular_user1 )
+ if len( latest_history.default_permissions ) != len( actions_in ):
+ raise AssertionError( '%d DefaultHistoryPermissions were created for history id %d, should have been %d' % \
+ ( len( latest_history.default_permissions ), latest_history.id, len( actions_in ) ) )
+ dhps = []
+ for dhp in latest_history.default_permissions:
+ dhps.append( dhp.action )
+ # Sort permissions for later comparison
+ dhps.sort()
+ for key, value in galaxy.model.Dataset.permitted_actions.items():
+ if value.action not in dhps:
+ raise AssertionError( '%s not in history id %d default_permissions after they were changed' % ( value.action, latest_history.id ) )
+ # Add a dataset to the history
+ self.upload_file( '1.bed' )
+ latest_dataset = get_latest_dataset()
+ # Make sure DatasetPermissions are correct
+ if len( latest_dataset.actions ) != len( latest_history.default_permissions ):
+ raise AssertionError( '%d DatasetPermissions were created for dataset id %d when it was created ( should have been %d )' % \
+ ( len( latest_dataset.actions ), latest_dataset.id, len( latest_history.default_permissions ) ) )
+ dps = []
+ for dp in latest_dataset.actions:
+ dps.append( dp.action )
+ # Sort actions for later comparison
+ dps.sort()
+ # Compare DatasetPermissions with permissions_in - should be the same
+ if dps != actions_in:
+ raise AssertionError( 'DatasetPermissions "%s" for dataset id %d differ from changed default permissions "%s"' \
+ % ( str( dps ), latest_dataset.id, str( actions_in ) ) )
+ # Compare DefaultHistoryPermissions and DatasetPermissions - should be the same
+ if dps != dhps:
+ raise AssertionError( 'DatasetPermissions "%s" for dataset id %d differ from DefaultHistoryPermissions "%s" for history id %d' \
+ % ( str( dps ), latest_dataset.id, str( dhps ), latest_history.id ) )
+ def test_015_change_default_permissions_for_current_history( self ):
+ """Testing changing DefaultHistoryPermissions for the current history"""
+ # logged in a regular_user1
+ self.logout()
+ self.login( email=regular_user2.email )
+ latest_history = get_latest_history_for_user( regular_user2 )
+ self.upload_file( '1.bed' )
+ latest_dataset = get_latest_dataset()
+ permissions_in = [ 'DATASET_MANAGE_PERMISSIONS' ]
+ # Make sure these are in sorted order for later comparison
+ actions_in = [ 'manage permissions' ]
+ permissions_out = [ 'DATASET_ACCESS' ]
+ actions_out = [ 'access' ]
+ # Change DefaultHistoryPermissions for the current history
+ self.history_set_default_permissions( permissions_out=permissions_out, permissions_in=permissions_in, role_id=str( regular_user2_private_role.id ) )
+ if len( latest_history.default_permissions ) != len( actions_in ):
+ raise AssertionError( '%d DefaultHistoryPermissions were created for history id %d, should have been %d' \
+ % ( len( latest_history.default_permissions ), latest_history.id, len( permissions_in ) ) )
+ # Make sure DefaultHistoryPermissions were correctly changed for the current history
+ dhps = []
+ for dhp in latest_history.default_permissions:
+ dhps.append( dhp.action )
+ # Sort permissions for later comparison
+ dhps.sort()
+ # Compare DefaultHistoryPermissions and actions_in - should be the same
+ if dhps != actions_in:
+ raise AssertionError( 'DefaultHistoryPermissions "%s" for history id %d differ from actions "%s" passed for changing' \
+ % ( str( dhps ), latest_history.id, str( actions_in ) ) )
+ # Make sure DatasetPermissionss are correct
+ if len( latest_dataset.actions ) != len( latest_history.default_permissions ):
+ raise AssertionError( '%d DatasetPermissionss were created for dataset id %d when it was created ( should have been %d )' \
+ % ( len( latest_dataset.actions ), latest_dataset.id, len( latest_history.default_permissions ) ) )
+ dps = []
+ for dp in latest_dataset.actions:
+ dps.append( dp.action )
+ # Sort actions for comparison
+ dps.sort()
+ # Compare DatasetPermissionss and DefaultHistoryPermissions - should be the same
+ if dps != dhps:
+ raise AssertionError( 'DatasetPermissionss "%s" for dataset id %d differ from DefaultHistoryPermissions "%s"' \
+ % ( str( dps ), latest_dataset.id, str( dhps ) ) )
+ def test_999_reset_data_for_later_test_runs( self ):
+ """Reseting data to enable later test runs to pass"""
+ # Logged in as regular_user2
+ self.logout()
+ self.login( email=admin_user.email )
+ ##################
+ # Make sure all users are associated only with their private roles
+ ##################
+ for user in [ admin_user, regular_user1, regular_user2, regular_user3 ]:
+ refresh( user )
+ if len( user.roles) != 1:
+ raise AssertionError( '%d UserRoleAssociations are associated with %s ( should be 1 )' % ( len( user.roles ), user.email ) )
+ #####################
+ # Reset DefaultHistoryPermissions for regular_user1
+ #####################
+ self.logout()
+ self.login( email=regular_user1.email )
+ # Change DefaultHistoryPermissions for regular_user1 back to the default
+ permissions_in = [ 'DATASET_MANAGE_PERMISSIONS' ]
+ permissions_out = [ 'DATASET_ACCESS' ]
+ self.user_set_default_permissions( permissions_in=permissions_in,
+ permissions_out=permissions_out,
+ role_id=str( regular_user1_private_role.id ) )
+ self.logout()
+ self.login( email=admin_user.email )
diff -r e39c9a2a0b4c -r 48e83411aa91 test/functional/test_library_features.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test/functional/test_library_features.py Fri Mar 12 16:11:26 2010 -0500
@@ -0,0 +1,606 @@
+from base.twilltestcase import *
+from base.test_db_util import *
+
+class TestLibraryFeatures( TwillTestCase ):
+ def test_000_initiate_users( self ):
+ """Ensuring all required user accounts exist"""
+ self.logout()
+ self.login( email='test1(a)bx.psu.edu' )
+ global regular_user1
+ regular_user1 = get_user( 'test1(a)bx.psu.edu' )
+ assert regular_user1 is not None, 'Problem retrieving user with email "test1(a)bx.psu.edu" from the database'
+ self.logout()
+ self.login( email='test2(a)bx.psu.edu' )
+ global regular_user2
+ regular_user2 = get_user( 'test2(a)bx.psu.edu' )
+ assert regular_user2 is not None, 'Problem retrieving user with email "test2(a)bx.psu.edu" from the database'
+ self.logout()
+ self.login( email='test3(a)bx.psu.edu' )
+ global regular_user3
+ regular_user3 = get_user( 'test3(a)bx.psu.edu' )
+ assert regular_user3 is not None, 'Problem retrieving user with email "test3(a)bx.psu.edu" from the database'
+ self.logout()
+ self.login( email='test(a)bx.psu.edu' )
+ global admin_user
+ admin_user = get_user( 'test(a)bx.psu.edu' )
+ assert admin_user is not None, 'Problem retrieving user with email "test(a)bx.psu.edu" from the database'
+ def test_005_create_library( self ):
+ """Testing creating a new library, then renaming it"""
+ # Logged in as admin_user
+ name = "library features Library1"
+ description = "library features Library1 description"
+ synopsis = "library features Library1 synopsis"
+ self.create_library( name=name, description=description, synopsis=synopsis )
+ self.browse_libraries_admin( check_str1=name, check_str2=description )
+ # Get the library object for later tests
+ global library_one
+ library_one = get_library( name, description, synopsis )
+ assert library_one is not None, 'Problem retrieving library named "%s" from the database' % name
+ # Rename the library
+ new_name = "library features Library1 new name"
+ new_description = "library features Library1 new description"
+ new_synopsis = "library features Library1 new synopsis"
+ self.library_info( 'library_admin',
+ self.security.encode_id( library_one.id ),
+ library_one.name,
+ new_name=new_name,
+ new_description=new_description,
+ new_synopsis=new_synopsis )
+ self.browse_libraries_admin( check_str1=new_name, check_str2=new_description )
+ # Reset the library back to the original name and description
+ self.library_info( 'library_admin',
+ self.security.encode_id( library_one.id ),
+ library_one.name,
+ new_name=name,
+ new_description=description,
+ new_synopsis=synopsis )
+ refresh( library_one )
+ def test_010_library_template_features( self ):
+ """Testing adding a template to a library, then filling in the contents"""
+ # Logged in as admin_user
+ form_name = 'Library template Form One'
+ form_desc = 'This is Form One'
+ form_type = galaxy.model.FormDefinition.types.LIBRARY_INFO_TEMPLATE
+ # Create form for library template
+ self.create_form( name=form_name, desc=form_desc, formtype=form_type )
+ global form_one
+ form_one = get_form( form_name )
+ assert form_one is not None, 'Problem retrieving form named (%s) from the database' % form_name
+ # Add new template based on the form to the library
+ template_name = 'Library Template 1'
+ self.add_library_template( 'library_admin',
+ 'library',
+ self.security.encode_id( library_one.id ),
+ self.security.encode_id( form_one.id ),
+ form_one.name )
+ # Make sure the template fields are displayed on the library information page
+ field_dict = form_one.fields[ 0 ]
+ global form_one_field_label
+ form_one_field_label = '%s' % str( field_dict.get( 'label', 'Field 0' ) )
+ global form_one_field_help
+ form_one_field_help = '%s' % str( field_dict.get( 'helptext', 'Field 0 help' ) )
+ global form_one_field_required
+ form_one_field_required = '%s' % str( field_dict.get( 'required', 'optional' ) ).capitalize()
+ # Add information to the library using the template
+ global form_one_field_name
+ form_one_field_name = 'field_0'
+ contents = '%s library contents' % form_one_field_label
+ self.library_info( 'library_admin',
+ self.security.encode_id( library_one.id ),
+ library_one.name,
+ ele_1_field_name=form_one_field_name,
+ ele_1_contents=contents )
+ def test_015_edit_template_contents_admin_view( self ):
+ """Test editing template contents from the Admin view"""
+ # Logged in as admin_user
+ # Make sure the template contents were from the previous method correctly saved
+ # Twill barfs if this test is run in the previous method.
+ contents = '%s library contents' % form_one_field_label
+ self.library_info( 'library_admin',
+ self.security.encode_id( library_one.id ),
+ library_one.name,
+ check_str1=contents )
+ contents = '%s library contents' % form_one_field_label
+ contents_edited = contents + ' edited'
+ # Edit the contents and then save them
+ self.library_info( 'library_admin',
+ self.security.encode_id( library_one.id ),
+ library_one.name,
+ ele_1_field_name=form_one_field_name,
+ ele_1_contents=contents_edited )
+ # Make sure the template contents were correctly saved
+ self.library_info( 'library_admin',
+ self.security.encode_id( library_one.id ),
+ library_one.name,
+ check_str1=contents_edited )
+ def test_020_add_public_dataset_to_root_folder( self ):
+ """Testing adding a public dataset to the root folder, making sure library template is inherited"""
+ # Logged in as admin_user
+ message = 'Testing adding a public dataset to the root folder'
+ # The template should be inherited to the library dataset upload form.
+ template_contents = "%s contents for root folder 1.bed" % form_one_field_label
+ self.add_library_dataset( 'library_admin',
+ '1.bed',
+ self.security.encode_id( library_one.id ),
+ self.security.encode_id( library_one.root_folder.id ),
+ library_one.root_folder.name,
+ file_type='bed',
+ dbkey='hg18',
+ message=message.replace( ' ', '+' ),
+ root=True,
+ template_field_name1=form_one_field_name,
+ template_field_contents1=template_contents )
+ global ldda_one
+ ldda_one = get_latest_ldda()
+ assert ldda_one is not None, 'Problem retrieving LibraryDatasetDatasetAssociation ldda_one from the database'
+ self.browse_library( 'library_admin',
+ self.security.encode_id( library_one.id ),
+ check_str1='1.bed',
+ check_str2=message,
+ check_str3=admin_user.email )
+ # Make sure the library template contents were correctly saved
+ self.ldda_edit_info( 'library_admin',
+ self.security.encode_id( library_one.id ),
+ self.security.encode_id( library_one.root_folder.id ),
+ self.security.encode_id( ldda_one.id ),
+ ldda_one.name,
+ check_str1=template_contents )
+ def test_025_add_new_folder_to_root_folder( self ):
+ """Testing adding a folder to a library root folder"""
+ # logged in as admin_user
+ root_folder = library_one.root_folder
+ name = "Root Folder's Folder One"
+ description = "This is the root folder's Folder One"
+ self.add_folder( 'library_admin',
+ self.security.encode_id( library_one.id ),
+ self.security.encode_id( root_folder.id ),
+ name=name,
+ description=description )
+ global folder_one
+ folder_one = get_folder( root_folder.id, name, description )
+ assert folder_one is not None, 'Problem retrieving library folder named "%s" from the database' % name
+ self.browse_library( 'library_admin',
+ self.security.encode_id( library_one.id ),
+ check_str1=name,
+ check_str2=description )
+ # Make sure the template was inherited, but the contents were not
+ contents = '%s library contents' % form_one_field_label
+ self.folder_info( 'library_admin',
+ self.security.encode_id( folder_one.id ),
+ self.security.encode_id( library_one.id ),
+ check_str1=form_one_field_name,
+ not_displayed=contents )
+ # Add contents to the inherited template
+ template_contents = "%s contents for Folder One" % form_one_field_label
+ self.folder_info( 'library_admin',
+ self.security.encode_id( folder_one.id ),
+ self.security.encode_id( library_one.id ),
+ field_name=form_one_field_name,
+ contents=template_contents )
+ def test_030_add_subfolder_to_folder( self ):
+ """Testing adding a folder to a library folder"""
+ # logged in as admin_user
+ name = "Folder One's Subfolder"
+ description = "This is the Folder One's subfolder"
+ self.add_folder( 'library_admin',
+ self.security.encode_id( library_one.id ),
+ self.security.encode_id( folder_one.id ),
+ name=name,
+ description=description )
+ global subfolder_one
+ subfolder_one = get_folder( folder_one.id, name, description )
+ assert subfolder_one is not None, 'Problem retrieving library folder named "Folder Ones Subfolder" from the database'
+ self.browse_library( 'library_admin',
+ self.security.encode_id( library_one.id ),
+ check_str1=name,
+ check_str2=description )
+ # Make sure the template was inherited, but the contents were not
+ contents = '%s library contents' % form_one_field_label
+ self.folder_info( 'library_admin',
+ self.security.encode_id( subfolder_one.id ),
+ self.security.encode_id( library_one.id ),
+ check_str1=form_one_field_name,
+ not_displayed=contents )
+ # Add contents to the inherited template
+ template_contents = "%s contents for Folder One" % form_one_field_label
+ self.folder_info( 'library_admin',
+ self.security.encode_id( subfolder_one.id ),
+ self.security.encode_id( library_one.id ),
+ field_name=form_one_field_name,
+ contents=template_contents )
+ def test_035_add_2nd_new_folder_to_root_folder( self ):
+ """Testing adding a 2nd folder to a library root folder"""
+ # logged in as admin_user
+ root_folder = library_one.root_folder
+ name = "Folder Two"
+ description = "This is the root folder's Folder Two"
+ self.add_folder( 'library_admin',
+ self.security.encode_id( library_one.id ),
+ self.security.encode_id( root_folder.id ),
+ name=name,
+ description=description )
+ global folder_two
+ folder_two = get_folder( root_folder.id, name, description )
+ assert folder_two is not None, 'Problem retrieving library folder named "%s" from the database' % name
+ self.browse_library( 'library_admin',
+ self.security.encode_id( library_one.id ),
+ check_str1=name,
+ check_str2=description )
+ def test_040_add_public_dataset_to_root_folders_2nd_subfolder( self ):
+ """Testing adding a public dataset to the root folder's 2nd sub-folder"""
+ # Logged in as admin_user
+ message = "Testing adding a public dataset to the folder named %s" % folder_two.name
+ # The form_one template should be inherited to the library dataset upload form.
+ template_contents = "%s contents for %s 2.bed" % ( form_one_field_label, folder_two.name )
+ self.add_library_dataset( 'library_admin',
+ '2.bed',
+ self.security.encode_id( library_one.id ),
+ self.security.encode_id( folder_two.id ),
+ folder_two.name,
+ file_type='bed',
+ dbkey='hg18',
+ message=message.replace( ' ', '+' ),
+ root=False,
+ template_field_name1=form_one_field_name,
+ template_field_contents1=template_contents )
+ global ldda_two
+ ldda_two = get_latest_ldda()
+ assert ldda_two is not None, 'Problem retrieving LibraryDatasetDatasetAssociation ldda_two from the database'
+ self.browse_library( 'library_admin',
+ self.security.encode_id( library_one.id ),
+ check_str1='2.bed',
+ check_str2=message,
+ check_str3=admin_user.email )
+ # Make sure the library template contents were correctly saved
+ self.ldda_edit_info( 'library_admin',
+ self.security.encode_id( library_one.id ),
+ self.security.encode_id( folder_two.id ),
+ self.security.encode_id( ldda_two.id ),
+ ldda_two.name,
+ check_str1=template_contents )
+ def test_045_add_2nd_public_dataset_to_root_folders_2nd_subfolder( self ):
+ """Testing adding a 2nd public dataset to the root folder's 2nd sub-folder"""
+ # Logged in as admin_user
+ message = "Testing adding a 2nd public dataset to the folder named %s" % folder_two.name
+ # The form_one template should be inherited to the library dataset upload form.
+ template_contents = "%s contents for %s 3.bed" % ( form_one_field_label, folder_two.name )
+ self.add_library_dataset( 'library_admin',
+ '3.bed',
+ self.security.encode_id( library_one.id ),
+ self.security.encode_id( folder_two.id ),
+ folder_two.name,
+ file_type='bed',
+ dbkey='hg18',
+ message=message.replace( ' ', '+' ),
+ root=False,
+ template_field_name1=form_one_field_name,
+ template_field_contents1=template_contents )
+ global ldda_three
+ ldda_three = get_latest_ldda()
+ assert ldda_three is not None, 'Problem retrieving LibraryDatasetDatasetAssociation ldda_three from the database'
+ self.browse_library( 'library_admin',
+ self.security.encode_id( library_one.id ),
+ check_str1='3.bed',
+ check_str2=message,
+ check_str3=admin_user.email )
+ # Make sure the library template contents were correctly saved
+ self.ldda_edit_info( 'library_admin',
+ self.security.encode_id( library_one.id ),
+ self.security.encode_id( folder_two.id ),
+ self.security.encode_id( ldda_three.id ),
+ ldda_three.name,
+ check_str1=template_contents )
+ def test_050_copy_dataset_from_history_to_subfolder( self ):
+ """Testing copying a dataset from the current history to a subfolder"""
+ # logged in as admin_user
+ self.new_history()
+ self.upload_file( "4.bed" )
+ latest_hda = get_latest_hda()
+ self.add_history_datasets_to_library( 'library_admin',
+ self.security.encode_id( library_one.id ),
+ self.security.encode_id( subfolder_one.id ),
+ subfolder_one.name,
+ self.security.encode_id( latest_hda.id ),
+ root=False )
+ global ldda_four
+ ldda_four = get_latest_ldda()
+ assert ldda_four is not None, 'Problem retrieving LibraryDatasetDatasetAssociation ldda_four from the database'
+ # Make sure the correct template was inherited but the contents were not inherited
+ contents = "%s contents for Folder One's Subfolder" % form_one_field_label
+ self.ldda_edit_info( 'library_admin',
+ self.security.encode_id( library_one.id ),
+ self.security.encode_id( subfolder_one.id ),
+ self.security.encode_id( ldda_four.id ),
+ ldda_four.name,
+ check_str1=form_one_field_name,
+ not_displayed=contents )
+ def test_055_editing_dataset_attribute_info( self ):
+ """Testing editing a library dataset's attribute information"""
+ # logged in as admin_user
+ new_ldda_name = '4.bed ( version 1 )'
+ self.ldda_edit_info( 'library_admin',
+ self.security.encode_id( library_one.id ),
+ self.security.encode_id( subfolder_one.id ),
+ self.security.encode_id( ldda_four.id ),
+ ldda_four.name,
+ new_ldda_name=new_ldda_name )
+ refresh( ldda_four )
+ self.browse_library( 'library_admin', self.security.encode_id( library_one.id ), check_str1=new_ldda_name )
+ # Make sure the correct template was inherited but the contents were not inherited
+ contents = "%s contents for Folder One's Subfolder" % form_one_field_label
+ self.ldda_edit_info( 'library_admin',
+ self.security.encode_id( library_one.id ),
+ self.security.encode_id( subfolder_one.id ),
+ self.security.encode_id( ldda_four.id ),
+ ldda_four.name,
+ check_str1=form_one_field_name,
+ not_displayed=contents )
+ def test_060_uploading_new_dataset_version( self ):
+ """Testing uploading a new version of a library dataset"""
+ # logged in as admin_user
+ message = 'Testing uploading a new version of a dataset'
+ # The form_one template should be inherited to the library dataset upload form.
+ template_contents = "%s contents for %s new version of 4.bed" % ( form_one_field_label, folder_one.name )
+ self.upload_new_dataset_version( 'library_admin',
+ '4.bed',
+ self.security.encode_id( library_one.id ),
+ self.security.encode_id( subfolder_one.id ),
+ subfolder_one.name,
+ self.security.encode_id( ldda_four.library_dataset.id ),
+ ldda_four.name,
+ file_type='auto',
+ dbkey='hg18',
+ message=message.replace( ' ', '+' ),
+ template_field_name1=form_one_field_name,
+ template_field_contents1=template_contents )
+ global ldda_four_version_two
+ ldda_four_version_two = get_latest_ldda()
+ assert ldda_four_version_two is not None, 'Problem retrieving LibraryDatasetDatasetAssociation ldda_four_version_two from the database'
+ # Make sure the correct template was inherited, but does not include any contents
+ contents = "%s contents for Folder One's Subfolder" % form_one_field_label
+ self.ldda_edit_info( 'library_admin',
+ self.security.encode_id( library_one.id ),
+ self.security.encode_id( subfolder_one.id ),
+ self.security.encode_id( ldda_four_version_two.id ),
+ ldda_four_version_two.name,
+ check_str1='This is the latest version of this library dataset',
+ not_displayed=contents )
+ # Fill in the template contents
+ self.ldda_edit_info( 'library_admin',
+ self.security.encode_id( library_one.id ),
+ self.security.encode_id( subfolder_one.id ),
+ self.security.encode_id( ldda_four_version_two.id ),
+ ldda_four_version_two.name,
+ ele_1_field_name=form_one_field_name,
+ ele_1_contents=template_contents )
+ # Check the previous version
+ self.ldda_edit_info( 'library_admin',
+ self.security.encode_id( library_one.id ),
+ self.security.encode_id( subfolder_one.id ),
+ self.security.encode_id( ldda_four.id ),
+ ldda_four.name,
+ check_str1='This is an expired version of this library dataset' )
+ # Make sure ldda_four is no longer displayed in the library
+ self.browse_library( 'library_admin',
+ self.security.encode_id( library_one.id ),
+ not_displayed=ldda_four.name )
+ def test_065_upload_directory_of_files_from_libraries_view( self ):
+ """Testing uploading a directory of files to a root folder from the Data Libraries view"""
+ # logged in as admin_user
+ # admin_user will not have the option to upload a directory of files from the
+ # Libraries view since a sub-directory named the same as their email is not contained
+ # in the configured user_library_import_dir. However, since members of role_one have
+ # the LIBRARY_ADD permission, we can test this feature as regular_user1 or regular_user3
+ self.logout()
+ self.login( email=regular_user1.email )
+ message = 'Uploaded all files in test-data/users/test1...'
+ # Since regular_user1 does not have any sub-directories contained within her configured
+ # user_library_import_dir, the only option in her server_dir select list will be the
+ # directory named the same as her email
+ check_str_after_submit = "Added 1 datasets to the library '%s' (each is selected)." % library_one.root_folder.name
+ # TODO: gvk( 3/12/10 )this is broken, so commenting until I have time to discover why...
+ """
+ self.upload_directory_of_files( 'library',
+ self.security.encode_id( library_one.id ),
+ self.security.encode_id( library_one.root_folder.id ),
+ server_dir=regular_user1.email,
+ message=message,
+ check_str_after_submit=check_str_after_submit )
+ self.browse_library( 'library',
+ self.security.encode_id( library_one.id ),
+ check_str1=regular_user1.email,
+ check_str2=message )
+ self.logout()
+ self.login( regular_user3.email )
+ message = 'Uploaded all files in test-data/users/test3.../run1'
+ # Since regular_user2 has a subdirectory contained within her configured user_library_import_dir,
+ # she will have a "None" option in her server_dir select list
+ self.upload_directory_of_files( 'library',
+ self.security.encode_id( library_one.id ),
+ self.security.encode_id( library_one.root_folder.id ),
+ server_dir='run1',
+ message=message,
+ check_str1='<option>None</option>',
+ check_str_after_submit=check_str_after_submit )
+ self.browse_library( 'library',
+ self.security.encode_id( library_one.id ),
+ check_str1=regular_user3.email,
+ check_str2=message )
+ """
+ def test_070_download_archive_of_library_files( self ):
+ """Testing downloading an archive of files from the library"""
+ # logged in as regular_user3
+ self.logout()
+ self.login( email=admin_user.email )
+ for format in ( 'tbz', 'tgz', 'zip' ):
+ archive = self.download_archive_of_library_files( cntrller='library',
+ library_id=self.security.encode_id( library_one.id ),
+ ldda_ids=[ self.security.encode_id( ldda_one.id ), self.security.encode_id( ldda_two.id ) ],
+ format=format )
+ self.check_archive_contents( archive, ( ldda_one, ldda_two ) )
+ os.remove( archive )
+ def test_075_mark_dataset_deleted( self ):
+ """Testing marking a library dataset as deleted"""
+ # Logged in as admin_user
+ self.delete_library_item( self.security.encode_id( library_one.id ),
+ self.security.encode_id( ldda_two.library_dataset.id ),
+ ldda_two.name,
+ item_type='library_dataset' )
+ self.browse_library( 'library_admin',
+ self.security.encode_id( library_one.id ),
+ not_displayed=ldda_two.name )
+ def test_080_display_and_hide_deleted_dataset( self ):
+ """Testing displaying and hiding a deleted library dataset"""
+ # Logged in as admin_user
+ self.browse_library( 'library_admin',
+ self.security.encode_id( library_one.id ),
+ show_deleted=True,
+ check_str1=ldda_two.name )
+ self.browse_library( 'library_admin',
+ self.security.encode_id( library_one.id ),
+ not_displayed=ldda_two.name )
+ def test_085_mark_folder_deleted( self ):
+ """Testing marking a library folder as deleted"""
+ # Logged in as admin_user
+ self.delete_library_item( self.security.encode_id( library_one.id ),
+ self.security.encode_id( folder_two.id ),
+ folder_two.name,
+ item_type='folder' )
+ self.browse_library( 'library_admin',
+ self.security.encode_id( library_one.id ),
+ not_displayed=folder_two.name )
+ def test_090_mark_folder_undeleted( self ):
+ """Testing marking a library folder as undeleted"""
+ # Logged in as admin_user
+ self.undelete_library_item( self.security.encode_id( library_one.id ),
+ self.security.encode_id( folder_two.id ),
+ folder_two.name,
+ item_type='folder' )
+ # 2.bed was deleted before the folder was deleted, so state should have been saved. In order
+ # for 2.bed to be displayed, it would itself have to be marked undeleted.
+ self.browse_library( 'library_admin',
+ self.security.encode_id( library_one.id ),
+ check_str1=folder_two.name,
+ not_displayed=ldda_two.name )
+ def test_095_mark_library_deleted( self ):
+ """Testing marking a library as deleted"""
+ # Logged in as admin_user
+ # First mark folder_two as deleted to further test state saving when we undelete the library
+ self.delete_library_item( self.security.encode_id( library_one.id ),
+ self.security.encode_id( folder_two.id ),
+ folder_two.name,
+ item_type='folder' )
+ self.delete_library_item( self.security.encode_id( library_one.id ),
+ self.security.encode_id( library_one.id ),
+ library_one.name,
+ item_type='library' )
+ self.browse_libraries_admin( not_displayed1=library_one.name )
+ self.browse_libraries_admin( deleted=True, check_str1=library_one.name )
+ def test_100_mark_library_undeleted( self ):
+ """Testing marking a library as undeleted"""
+ # Logged in as admin_user
+ self.undelete_library_item( self.security.encode_id( library_one.id ),
+ self.security.encode_id( library_one.id ),
+ library_one.name,
+ item_type='library' )
+ self.browse_libraries_admin( check_str1=library_one.name )
+ self.browse_library( 'library_admin',
+ self.security.encode_id( library_one.id ),
+ check_str1=library_one.name,
+ not_displayed=folder_two.name )
+ def test_105_purge_library( self ):
+ """Testing purging a library"""
+ # Logged in as admin_user
+ self.delete_library_item( self.security.encode_id( library_one.id ),
+ self.security.encode_id( library_one.id ),
+ library_one.name,
+ item_type='library' )
+ self.purge_library( self.security.encode_id( library_one.id ), library_one.name )
+ # Make sure the library was purged
+ refresh( library_one )
+ if not ( library_one.deleted and library_one.purged ):
+ raise AssertionError( 'The library id %s named "%s" has not been marked as deleted and purged.' % ( str( library_one.id ), library_one.name ) )
+ def check_folder( library_folder ):
+ for folder in library_folder.folders:
+ refresh( folder )
+ # Make sure all of the library_folders are purged
+ if not folder.purged:
+ raise AssertionError( 'The library_folder id %s named "%s" has not been marked purged.' % ( str( folder.id ), folder.name ) )
+ check_folder( folder )
+ # Make sure all of the LibraryDatasets and associated objects are deleted
+ refresh( library_folder )
+ for library_dataset in library_folder.datasets:
+ refresh( library_dataset )
+ ldda = library_dataset.library_dataset_dataset_association
+ if ldda:
+ refresh( ldda )
+ if not ldda.deleted:
+ raise AssertionError( 'The library_dataset_dataset_association id %s named "%s" has not been marked as deleted.' % \
+ ( str( ldda.id ), ldda.name ) )
+ # Make sure all of the datasets have been deleted
+ dataset = ldda.dataset
+ refresh( dataset )
+ if not dataset.deleted:
+ raise AssertionError( 'The dataset with id "%s" has not been marked as deleted when it should have been.' % \
+ str( ldda.dataset.id ) )
+ if not library_dataset.deleted:
+ raise AssertionError( 'The library_dataset id %s named "%s" has not been marked as deleted.' % \
+ ( str( library_dataset.id ), library_dataset.name ) )
+ check_folder( library_one.root_folder )
+ def test_110_no_library_template( self ):
+ """Test library features when library has no template"""
+ # Logged in as admin_user
+ name = "library features Library Two"
+ description = "library features This is Library Two"
+ synopsis = "library features Library Two synopsis"
+ # Create a library, adding no template
+ self.create_library( name=name, description=description, synopsis=synopsis )
+ self.browse_libraries_admin( check_str1=name, check_str2=description )
+ global library_two
+ library_two = get_library( name, description, synopsis )
+ assert library_two is not None, 'Problem retrieving library named "%s" from the database' % name
+ # Add a dataset to the library
+ self.add_library_dataset( 'library_admin',
+ '3.bed',
+ self.security.encode_id( library_two.id ),
+ self.security.encode_id( library_two.root_folder.id ),
+ library_two.root_folder.name,
+ file_type='bed',
+ dbkey='hg18',
+ message='',
+ root=True )
+ ldda_three = get_latest_ldda()
+ assert ldda_three is not None, 'Problem retrieving LibraryDatasetDatasetAssociation ldda_three from the database'
+ self.browse_library( 'library_admin',
+ self.security.encode_id( library_two.id ),
+ check_str1='3.bed',
+ check_str2=admin_user.email )
+ # TODO: add a functional test to cover adding a library dataset via url_paste here...
+ # TODO: Add a functional test to cover checking the space_to_tab checkbox here...
+ # Delete and purge the library
+ self.delete_library_item( self.security.encode_id( library_two.id ),
+ self.security.encode_id( library_two.id ),
+ library_two.name,
+ item_type='library' )
+ self.purge_library( self.security.encode_id( library_two.id ), library_two.name )
+ self.home()
+ def test_999_reset_data_for_later_test_runs( self ):
+ """Reseting data to enable later test runs to pass"""
+ # Logged in as admin_user
+ ##################
+ # Purge all libraries
+ ##################
+ for library in [ library_one, library_two ]:
+ self.delete_library_item( self.security.encode_id( library.id ),
+ self.security.encode_id( library.id ),
+ library.name,
+ item_type='library' )
+ self.purge_library( self.security.encode_id( library.id ), library.name )
+ ##################
+ # Make sure all users are associated only with their private roles
+ ##################
+ for user in [ admin_user, regular_user1, regular_user2, regular_user3 ]:
+ refresh( user )
+ if len( user.roles) != 1:
+ raise AssertionError( '%d UserRoleAssociations are associated with %s ( should be 1 )' % ( len( user.roles ), user.email ) )
+ self.logout()
diff -r e39c9a2a0b4c -r 48e83411aa91 test/functional/test_library_security.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test/functional/test_library_security.py Fri Mar 12 16:11:26 2010 -0500
@@ -0,0 +1,603 @@
+from base.twilltestcase import *
+from base.test_db_util import *
+
+class TestLibrarySecurity( TwillTestCase ):
+ def test_000_initiate_users( self ):
+ """Ensuring all required user accounts exist"""
+ self.logout()
+ self.login( email='test1(a)bx.psu.edu' )
+ global regular_user1
+ regular_user1 = get_user( 'test1(a)bx.psu.edu' )
+ assert regular_user1 is not None, 'Problem retrieving user with email "test1(a)bx.psu.edu" from the database'
+ global regular_user1_private_role
+ regular_user1_private_role = get_private_role( regular_user1 )
+ self.logout()
+ self.login( email='test2(a)bx.psu.edu' )
+ global regular_user2
+ regular_user2 = get_user( 'test2(a)bx.psu.edu' )
+ assert regular_user2 is not None, 'Problem retrieving user with email "test2(a)bx.psu.edu" from the database'
+ global regular_user2_private_role
+ regular_user2_private_role = get_private_role( regular_user2 )
+ self.logout()
+ self.login( email='test3(a)bx.psu.edu' )
+ global regular_user3
+ regular_user3 = get_user( 'test3(a)bx.psu.edu' )
+ assert regular_user3 is not None, 'Problem retrieving user with email "test3(a)bx.psu.edu" from the database'
+ global regular_user3_private_role
+ regular_user3_private_role = get_private_role( regular_user3 )
+ self.logout()
+ self.login( email='test(a)bx.psu.edu' )
+ global admin_user
+ admin_user = get_user( 'test(a)bx.psu.edu' )
+ assert admin_user is not None, 'Problem retrieving user with email "test(a)bx.psu.edu" from the database'
+ global admin_user_private_role
+ admin_user_private_role = get_private_role( admin_user )
+ def test_005_create_required_groups_and_roles( self ):
+ """Testing creating all required groups and roles for this script"""
+ # Logged in as admin_user
+ # Create role_one
+ name = 'library security Role One'
+ description = "library security This is Role One's description"
+ user_ids = [ str( admin_user.id ), str( regular_user1.id ), str( regular_user3.id ) ]
+ self.create_role( name=name,
+ description=description,
+ in_user_ids=user_ids,
+ in_group_ids=[],
+ create_group_for_role='no',
+ private_role=admin_user.email )
+ # Get the role object for later tests
+ global role_one
+ role_one = get_role_by_name( name )
+ # Create group_one
+ name = 'Group One'
+ self.create_group( name=name, in_user_ids=[ str( regular_user1.id ) ], in_role_ids=[ str( role_one.id ) ] )
+ # Get the group object for later tests
+ global group_one
+ group_one = get_group_by_name( name )
+ assert group_one is not None, 'Problem retrieving group named "Group One" from the database'
+ # NOTE: To get this to work with twill, all select lists on the ~/admin/role page must contain at least
+ # 1 option value or twill throws an exception, which is: ParseError: OPTION outside of SELECT
+ # Due to this bug in twill, we create the role, we bypass the page and visit the URL in the
+ # associate_users_and_groups_with_role() method.
+ #
+ #create role_two
+ name = 'library security Role Two'
+ description = 'library security This is Role Two'
+ user_ids = [ str( admin_user.id ) ]
+ group_ids = [ str( group_one.id ) ]
+ private_role = admin_user.email
+ self.create_role( name=name,
+ description=description,
+ in_user_ids=user_ids,
+ in_group_ids=group_ids,
+ private_role=private_role )
+ # Get the role object for later tests
+ global role_two
+ role_two = get_role_by_name( name )
+ assert role_two is not None, 'Problem retrieving role named "Role Two" from the database'
+ def test_010_create_library( self ):
+ """Testing creating a new library, then renaming it"""
+ # Logged in as admin_user
+ name = "library security Library1"
+ description = "library security Library1 description"
+ synopsis = "library security Library1 synopsis"
+ self.create_library( name=name, description=description, synopsis=synopsis )
+ # Get the library object for later tests
+ global library_one
+ library_one = get_library( name, description, synopsis )
+ assert library_one is not None, 'Problem retrieving library named "%s" from the database' % name
+ # Make sure library_one is public
+ assert 'access library' not in [ a.action for a in library_one.actions ], 'Library %s is not public when first created' % library_one.name
+ # Set permissions on the library, sort for later testing.
+ permissions_in = [ k for k, v in galaxy.model.Library.permitted_actions.items() ]
+ permissions_out = []
+ # Role one members are: admin_user, regular_user1, regular_user3. Each of these users will be permitted for
+ # LIBRARY_ACCESS, LIBRARY_ADD, LIBRARY_MODIFY, LIBRARY_MANAGE on this library and it's contents.
+ self.library_permissions( self.security.encode_id( library_one.id ),
+ library_one.name,
+ str( role_one.id ),
+ permissions_in,
+ permissions_out )
+ # Make sure the library is accessible by admin_user
+ self.visit_url( '%s/library/browse_libraries' % self.url )
+ self.check_page_for_string( library_one.name )
+ # Make sure the library is not accessible by regular_user2 since regular_user2 does not have Role1.
+ self.logout()
+ self.login( email=regular_user2.email )
+ self.visit_url( '%s/library/browse_libraries' % self.url )
+ try:
+ self.check_page_for_string( library_one.name )
+ raise AssertionError, 'Library %s is accessible by %s when it should be restricted' % ( library_one.name, regular_user2.email )
+ except:
+ pass
+ self.logout()
+ self.login( email=admin_user.email )
+ def test_015_add_new_folder_to_root_folder( self ):
+ """Testing adding a folder to a library root folder"""
+ # logged in as admin_user
+ root_folder = library_one.root_folder
+ name = "Root Folder's Folder One"
+ description = "This is the root folder's Folder One"
+ self.add_folder( 'library_admin',
+ self.security.encode_id( library_one.id ),
+ self.security.encode_id( root_folder.id ),
+ name=name,
+ description=description )
+ global folder_one
+ folder_one = get_folder( root_folder.id, name, description )
+ assert folder_one is not None, 'Problem retrieving library folder named "%s" from the database' % name
+ def test_020_add_dataset_with_private_role_restriction_to_folder( self ):
+ """Testing adding a dataset with a private role restriction to a folder"""
+ # Logged in as admin_user
+ #
+ # Keep in mind that # LIBRARY_ACCESS = "Role One" on the whole library
+ #
+ # Add a dataset restricted by the following:
+ # DATASET_MANAGE_PERMISSIONS = "test(a)bx.psu.edu" via DefaultUserPermissions
+ # DATASET_ACCESS = "regular_user1" private role via this test method
+ # LIBRARY_ADD = "Role One" via inheritance from parent folder
+ # LIBRARY_MODIFY = "Role One" via inheritance from parent folder
+ # LIBRARY_MANAGE = "Role One" via inheritance from parent folder
+ # "Role One" members are: test(a)bx.psu.edu, test1(a)bx.psu.edu, test3(a)bx.psu.edu
+ # This means that only user test1(a)bx.psu.edu can see the dataset from the Libraries view
+ message ='This is a test of the fourth dataset uploaded'
+ self.add_library_dataset( 'library_admin',
+ '1.bed',
+ self.security.encode_id( library_one.id ),
+ self.security.encode_id( folder_one.id ),
+ folder_one.name,
+ file_type='bed',
+ dbkey='hg18',
+ roles=[ str( regular_user1_private_role.id ) ],
+ message=message.replace( ' ', '+' ),
+ root=False )
+ global ldda_one
+ ldda_one = get_latest_ldda()
+ assert ldda_one is not None, 'Problem retrieving LibraryDatasetDatasetAssociation ldda_one from the database'
+ self.browse_library( 'library_admin',
+ self.security.encode_id( library_one.id ),
+ check_str1='1.bed',
+ check_str2=message,
+ check_str3=admin_user.email )
+ def test_025_accessing_dataset_with_private_role_restriction( self ):
+ """Testing accessing a dataset with a private role restriction"""
+ # Logged in as admin_user
+ #
+ # Keep in mind that # LIBRARY_ACCESS = "Role One" on the whole library
+ # Role one members are: admin_user, regular_user1, regular_user3. Each of these users will be permitted for
+ # LIBRARY_ACCESS, LIBRARY_ADD, LIBRARY_MODIFY, LIBRARY_MANAGE on this library and it's contents.
+ #
+ # Legitimate roles displayed on the permission form are as follows:
+ # 'Role One' since the LIBRARY_ACCESS permission is associated with Role One. # Role one members are: admin_user, regular_user1, regular_user3.
+ # 'test(a)bx.psu.edu' ( admin_user's private role ) since admin_user has Role One
+ # 'Role Two' since admin_user has Role Two
+ # 'Role Three' since admin_user has Role Three
+ # 'test1(a)bx.psu.edu' ( regular_user1's private role ) since regular_user1 has Role One
+ # 'test3(a)bx.psu.edu' ( regular_user3's private role ) since regular_user3 has Role One
+ #
+ # admin_user should not be able to see 1.bed from the analysis view's access libraries
+ self.browse_library( 'library',
+ self.security.encode_id( library_one.id ),
+ not_displayed=folder_one.name,
+ not_displayed2='1.bed' )
+ self.logout()
+ # regular_user1 should be able to see 1.bed from the analysis view's access librarys
+ # since it was associated with regular_user1's private role
+ self.login( email=regular_user1.email )
+ self.browse_library( 'library',
+ self.security.encode_id( library_one.id ),
+ check_str1=folder_one.name,
+ check_str2='1.bed' )
+ self.logout()
+ # regular_user2 should not be to see the library since they do not have
+ # Role One which is associated with the LIBRARY_ACCESS permission
+ self.login( email=regular_user2.email )
+ self.browse_libraries_regular_user( check_str1="You are not authorized to access any libraries" )
+ self.logout()
+ # regular_user3 should not be able to see 1.bed from the analysis view's access librarys
+ self.login( email=regular_user3.email )
+ self.browse_library( 'library',
+ self.security.encode_id( library_one.id ),
+ not_displayed=folder_one.name,
+ not_displayed2='1.bed' )
+ self.logout()
+ self.login( email=admin_user.email )
+ def test_030_change_dataset_access_permission( self ):
+ """Testing changing the access permission on a dataset with a private role restriction"""
+ # Logged in as admin_user
+ # We need admin_user to be able to access 1.bed
+ permissions_in = [ k for k, v in galaxy.model.Dataset.permitted_actions.items() ]
+ for k, v in galaxy.model.Library.permitted_actions.items():
+ if k != 'LIBRARY_ACCESS':
+ permissions_in.append( k )
+ permissions_out = []
+ # Attempt to associate multiple roles with the library dataset, with one of the
+ # roles being private.
+ role_ids_str = '%s,%s' % ( str( role_one.id ), str( admin_user_private_role.id ) )
+ check_str = "At least 1 user must have every role associated with accessing datasets. "
+ check_str += "Since you are associating more than 1 role, no private roles are allowed."
+ self.ldda_permissions( 'library_admin',
+ self.security.encode_id( library_one.id ),
+ self.security.encode_id( folder_one.id ),
+ self.security.encode_id( ldda_one.id ),
+ role_ids_str,
+ permissions_in,
+ permissions_out,
+ check_str1=check_str )
+ role_ids_str = str( role_one.id )
+ self.ldda_permissions( 'library_admin',
+ self.security.encode_id( library_one.id ),
+ self.security.encode_id( folder_one.id ),
+ self.security.encode_id( ldda_one.id ),
+ role_ids_str,
+ permissions_in,
+ permissions_out )
+ # admin_user should now be able to see 1.bed from the analysis view's access libraries
+ self.browse_library( 'library',
+ self.security.encode_id( library_one.id ),
+ check_str1=ldda_one.name )
+ def test_035_add_dataset_with_role_associated_with_group_and_users( self ):
+ """Testing adding a dataset with a role that is associated with a group and users"""
+ # Logged in as admin_user
+ # Add a dataset restricted by role_two, which is currently associated as follows:
+ # groups: group_one
+ # users: test(a)bx.psu.edu, test1(a)bx.psu.edu via group_one
+ #
+ # We first need to make library_one public
+ permissions_in = []
+ for k, v in galaxy.model.Library.permitted_actions.items():
+ if k != 'LIBRARY_ACCESS':
+ permissions_in.append( k )
+ permissions_out = []
+ # Role one members are: admin_user, regular_user1, regular_user3. Each of these users will now be permitted for
+ # LIBRARY_ADD, LIBRARY_MODIFY, LIBRARY_MANAGE on this library and it's contents. The library will be public from
+ # this point on.
+ self.library_permissions( self.security.encode_id( library_one.id ),
+ library_one.name,
+ str( role_one.id ),
+ permissions_in,
+ permissions_out )
+ refresh( library_one )
+ message = 'Testing adding a dataset with a role that is associated with a group and users'
+ self.add_library_dataset( 'library_admin',
+ '2.bed',
+ self.security.encode_id( library_one.id ),
+ self.security.encode_id( folder_one.id ),
+ folder_one.name,
+ file_type='bed',
+ dbkey='hg17',
+ roles=[ str( role_two.id ) ],
+ message=message.replace( ' ', '+' ),
+ root=False )
+ global ldda_two
+ ldda_two = get_latest_ldda()
+ assert ldda_two is not None, 'Problem retrieving LibraryDatasetDatasetAssociation ldda_two from the database'
+ self.browse_library( 'library',
+ self.security.encode_id( library_one.id ),
+ check_str1='2.bed',
+ check_str2=message,
+ check_str3=admin_user.email )
+ def test_040_accessing_dataset_with_role_associated_with_group_and_users( self ):
+ """Testing accessing a dataset with a role that is associated with a group and users"""
+ # Logged in as admin_user
+ # admin_user should be able to see 2.bed since she is associated with role_two
+ self.browse_library( 'library',
+ self.security.encode_id( library_one.id ),
+ check_str1='2.bed',
+ check_str2=admin_user.email )
+ self.logout()
+ # regular_user1 should be able to see 2.bed since she is associated with group_two
+ self.login( email = 'test1(a)bx.psu.edu' )
+ self.browse_library( 'library',
+ self.security.encode_id( library_one.id ),
+ check_str1=folder_one.name,
+ check_str2='2.bed',
+ check_str3=admin_user.email )
+ # Check the permissions on the dataset 2.bed - they are as folows:
+ # DATASET_MANAGE_PERMISSIONS = test(a)bx.psu.edu
+ # DATASET_ACCESS = Role Two
+ # Role Two associations: test(a)bx.psu.edu and Group Two
+ # Group Two members: Role One, Role Two, test1(a)bx.psu.edu
+ # Role One associations: test(a)bx.psu.edu, test1(a)bx.psu.edu, test3(a)bx.psu.edu
+ # LIBRARY_ADD = Role One
+ # Role One aassociations: test(a)bx.psu.edu, test1(a)bx.psu.edu, test3(a)bx.psu.edu
+ # LIBRARY_MODIFY = Role One
+ # Role One aassociations: test(a)bx.psu.edu, test1(a)bx.psu.edu, test3(a)bx.psu.edu
+ # LIBRARY_MANAGE = Role One
+ # Role One aassociations: test(a)bx.psu.edu, test1(a)bx.psu.edu, test3(a)bx.psu.edu
+ self.ldda_edit_info( 'library',
+ self.security.encode_id( library_one.id ),
+ self.security.encode_id( folder_one.id ),
+ self.security.encode_id( ldda_two.id ),
+ ldda_two.name,
+ check_str1='2.bed',
+ check_str2='This is the latest version of this library dataset',
+ check_str3='Edit attributes of 2.bed' )
+ self.act_on_multiple_datasets( 'library',
+ self.security.encode_id( library_one.id ),
+ 'import_to_history',
+ ldda_ids=self.security.encode_id( ldda_two.id ),
+ check_str1='1 dataset(s) have been imported into your history' )
+ self.logout()
+ # regular_user2 should not be able to see 2.bed
+ self.login( email = 'test2(a)bx.psu.edu' )
+ self.browse_library( 'library',
+ self.security.encode_id( library_one.id ),
+ not_displayed=folder_one.name,
+ not_displayed2='2.bed' )
+
+ self.logout()
+ # regular_user3 should not be able to see folder_one ( even though it does not contain any datasets that she
+ # can access ) since she has Role One, and Role One has all library permissions ( see above ).
+ self.login( email = 'test3(a)bx.psu.edu' )
+ self.browse_library( 'library',
+ self.security.encode_id( library_one.id ),
+ check_str1=folder_one.name,
+ not_displayed='2.bed' )
+ self.logout()
+ self.login( email='test(a)bx.psu.edu' )
+ def test_045_upload_directory_of_files_from_admin_view( self ):
+ """Testing uploading a directory of files to a root folder from the Admin view"""
+ # logged in as admin_user
+ message = 'This is a test for uploading a directory of files'
+ check_str_after_submit="Added 3 datasets to the library '%s' (each is selected)." % library_one.root_folder.name
+ self.upload_directory_of_files( 'library_admin',
+ self.security.encode_id( library_one.id ),
+ self.security.encode_id( library_one.root_folder.id ),
+ server_dir='library',
+ message=message,
+ check_str_after_submit=check_str_after_submit )
+ self.browse_library( 'library_admin',
+ self.security.encode_id( library_one.id ),
+ check_str1=admin_user.email,
+ check_str2=message )
+ def test_050_change_permissions_on_datasets_uploaded_from_library_dir( self ):
+ """Testing changing the permissions on datasets uploaded from a directory from the Admin view"""
+ # logged in as admin_user
+ # It would be nice if twill functioned such that the above test resulted in a
+ # form with the uploaded datasets selected, but it does not ( they're not checked ),
+ # so we'll have to simulate this behavior ( not ideal ) for the 'edit' action. We
+ # first need to get the ldda.id for the 3 new datasets
+ latest_3_lddas = get_latest_lddas( 3 )
+ ldda_ids = ''
+ for ldda in latest_3_lddas:
+ ldda_ids += '%s,' % self.security.encode_id( ldda.id )
+ ldda_ids = ldda_ids.rstrip( ',' )
+ # Set permissions
+ self.ldda_permissions( 'library_admin',
+ self.security.encode_id( library_one.id ),
+ self.security.encode_id( folder_one.id ),
+ ldda_ids,
+ str( role_one.id ),
+ permissions_in=[ 'DATASET_ACCESS', 'LIBRARY_MANAGE' ],
+ check_str1='Permissions have been updated on 3 datasets' )
+ # Make sure the permissions have been correctly updated for the 3 datasets. Permissions should
+ # be all of the above on any of the 3 datasets that are imported into a history.
+ def check_edit_page( lddas, check_str1='', check_str2='', check_str3='', check_str4='',
+ not_displayed1='', not_displayed2='', not_displayed3='' ):
+ for ldda in lddas:
+ # Import each library dataset into our history
+ self.act_on_multiple_datasets( 'library',
+ self.security.encode_id( library_one.id ),
+ 'import_to_history',
+ ldda_ids=self.security.encode_id( ldda.id ) )
+ # Determine the new HistoryDatasetAssociation id created when the library dataset was imported into our history
+ last_hda_created = get_latest_hda()
+ self.edit_hda_attribute_info( str( last_hda_created.id ),
+ check_str1=check_str1,
+ check_str2=check_str2,
+ check_str3=check_str3,
+ check_str4=check_str4 )
+ # admin_user is associated with role_one, so should have all permissions on imported datasets
+ check_edit_page( latest_3_lddas,
+ check_str1='Manage dataset permissions on',
+ check_str2='Role members can manage the roles associated with permissions on this dataset',
+ check_str3='Role members can import this dataset into their history for analysis' )
+ self.logout()
+ # regular_user1 is associated with role_one, so should have all permissions on imported datasets
+ self.login( email='test1(a)bx.psu.edu' )
+ check_edit_page( latest_3_lddas )
+ self.logout()
+ # Since regular_user2 is not associated with role_one, she should not have
+ # access to any of the 3 datasets, so she will not see folder_one on the libraries page
+ self.login( email='test2(a)bx.psu.edu' )
+ self.browse_library( 'library',
+ self.security.encode_id( library_one.id ),
+ not_displayed=folder_one.name )
+ self.logout()
+ # regular_user3 is associated with role_one, so should have all permissions on imported datasets
+ self.login( email='test3(a)bx.psu.edu' )
+ check_edit_page( latest_3_lddas )
+ self.logout()
+ self.login( email='test(a)bx.psu.edu' )
+ # Change the permissions and test again
+ self.ldda_permissions( 'library_admin',
+ self.security.encode_id( library_one.id ),
+ self.security.encode_id( folder_one.id ),
+ ldda_ids,
+ str( role_one.id ),
+ permissions_in=[ 'DATASET_ACCESS' ],
+ check_str1='Permissions have been updated on 3 datasets' )
+ check_edit_page( latest_3_lddas,
+ check_str1='View Permissions',
+ not_displayed1='Manage dataset permissions on',
+ not_displayed2='Role members can manage roles associated with permissions on this library item',
+ not_displayed3='Role members can import this dataset into their history for analysis' )
+ def test_055_library_permissions( self ):
+ """Test library permissions"""
+ # Logged in as admin_user
+ form_name = 'Library template Form One'
+ form_desc = 'This is Form One'
+ form_type = galaxy.model.FormDefinition.types.LIBRARY_INFO_TEMPLATE
+ # Create form for library template
+ self.create_form( name=form_name, desc=form_desc, formtype=form_type )
+ global form_one
+ form_one = get_form( form_name )
+ assert form_one is not None, 'Problem retrieving form named (%s) from the database' % form_name
+ # Make sure the template fields are displayed on the library information page
+ field_dict = form_one.fields[ 0 ]
+ global form_one_field_label
+ form_one_field_label = '%s' % str( field_dict.get( 'label', 'Field 0' ) )
+ global form_one_field_help
+ form_one_field_help = '%s' % str( field_dict.get( 'helptext', 'Field 0 help' ) )
+ global form_one_field_required
+ form_one_field_required = '%s' % str( field_dict.get( 'required', 'optional' ) ).capitalize()
+ # Add information to the library using the template
+ global form_one_field_name
+ form_one_field_name = 'field_0'
+ # Create a library, adding no template
+ name = "library security Library Two"
+ description = "library security This is Library Two"
+ synopsis = "library security Library Two synopsis"
+ self.create_library( name=name, description=description, synopsis=synopsis )
+ self.browse_libraries_admin( check_str1=name, check_str2=description )
+ global library_two
+ library_two = get_library( name, description, synopsis )
+ assert library_two is not None, 'Problem retrieving library named "%s" from the database' % name
+ # Set library permissions for regular_user1 and regular_user2. Each of these users will be permitted to
+ # LIBRARY_ADD, LIBRARY_MODIFY, LIBRARY_MANAGE for library items.
+ permissions_in = [ k for k, v in galaxy.model.Library.permitted_actions.items() ]
+ permissions_out = []
+ role_ids_str = '%s,%s' % ( str( regular_user1_private_role.id ), str( regular_user2_private_role.id ) )
+ self.library_permissions( self.security.encode_id( library_two.id ),
+ library_two.name,
+ role_ids_str,
+ permissions_in,
+ permissions_out )
+ self.logout()
+ # Login as regular_user1 and make sure they can see the library
+ self.login( email=regular_user1.email )
+ self.browse_libraries_regular_user( check_str1=name )
+ self.logout()
+ # Login as regular_user2 and make sure they can see the library
+ self.login( email=regular_user2.email )
+ self.browse_libraries_regular_user( check_str1=name )
+ # Add a dataset to the library
+ message = 'Testing adding 1.bed to Library Two root folder'
+ self.add_library_dataset( 'library',
+ '1.bed',
+ self.security.encode_id( library_two.id ),
+ self.security.encode_id( library_two.root_folder.id ),
+ library_two.root_folder.name,
+ file_type='bed',
+ dbkey='hg18',
+ message=message,
+ root=True )
+ # Add a folder to the library
+ name = "Root Folder's Folder X"
+ description = "This is the root folder's Folder X"
+ self.add_folder( 'library',
+ self.security.encode_id( library_two.id ),
+ self.security.encode_id( library_two.root_folder.id ),
+ name=name,
+ description=description )
+ global folder_x
+ folder_x = get_folder( library_two.root_folder.id, name, description )
+ # Add an information template to the folder
+ template_name = 'Folder Template 1'
+ self.add_library_template( 'library',
+ 'folder',
+ self.security.encode_id( library_one.id ),
+ self.security.encode_id( form_one.id ),
+ form_one.name,
+ folder_id=self.security.encode_id( folder_x.id ) )
+ # Modify the folder's information
+ contents = '%s folder contents' % form_one_field_label
+ new_name = "Root Folder's Folder Y"
+ new_description = "This is the root folder's Folder Y"
+ self.folder_info( 'library',
+ self.security.encode_id( folder_x.id ),
+ self.security.encode_id( library_two.id ),
+ name,
+ new_name,
+ new_description,
+ contents=contents,
+ field_name=form_one_field_name )
+ # Twill barfs when self.check_page_for_string() is called after dealing with an information template,
+ # the exception is: TypeError: 'str' object is not callable
+ # the work-around it to end this method so any calls are in the next method.
+ def test_060_template_features_and_permissions( self ):
+ """Test library template and more permissions behavior from the Data Libraries view"""
+ # Logged in as regular_user2
+ refresh( folder_x )
+ # Add a dataset to the folder
+ message = 'Testing adding 2.bed to Library Three root folder'
+ self.add_library_dataset( 'library',
+ '2.bed',
+ self.security.encode_id( library_two.id ),
+ self.security.encode_id( folder_x.id ),
+ folder_x.name,
+ file_type='bed',
+ dbkey='hg18',
+ message=message.replace( ' ', '+' ),
+ root=False )
+ global ldda_x
+ ldda_x = get_latest_ldda()
+ assert ldda_x is not None, 'Problem retrieving ldda_x from the database'
+ # Add an information template to the library
+ template_name = 'Library Template 3'
+ self.add_library_template( 'library',
+ 'library',
+ self.security.encode_id( library_two.id ),
+ self.security.encode_id( form_one.id ),
+ form_one.name )
+ # Add information to the library using the template
+ contents = '%s library contents' % form_one_field_label
+ self.visit_url( '%s/library_common/library_info?cntrller=library&id=%s' % ( self.url, self.security.encode_id( library_two.id ) ) )
+ # There are 2 forms on this page and the template is the 2nd form
+ tc.fv( '2', form_one_field_name, contents )
+ tc.submit( 'edit_info_button' )
+ # For some reason, the following check:
+ # self.check_page_for_string ( 'The information has been updated.' )
+ # ...throws the following exception - I have not idea why!
+ # TypeError: 'str' object is not callable
+ # The work-around is to not make ANY self.check_page_for_string() calls until the next method
+ def test_065_permissions_as_different_regular_user( self ):
+ """Test library template and more permissions behavior from the Data Libraries view as a different user"""
+ # Logged in as regular_user2
+ self.logout()
+ self.login( email=regular_user1.email )
+ self.browse_library( 'library',
+ self.security.encode_id( library_two.id ),
+ check_str1=ldda_x.name )
+ def test_999_reset_data_for_later_test_runs( self ):
+ """Reseting data to enable later test runs to pass"""
+ # Logged in as regular_user1
+ self.logout()
+ self.login( email=admin_user.email )
+ ##################
+ # Purge all libraries
+ ##################
+ for library in [ library_one, library_two ]:
+ self.delete_library_item( self.security.encode_id( library.id ),
+ self.security.encode_id( library.id ),
+ library.name,
+ item_type='library' )
+ self.purge_library( self.security.encode_id( library.id ), library.name )
+ ##################
+ # Eliminate all non-private roles
+ ##################
+ for role in [ role_one, role_two ]:
+ self.mark_role_deleted( self.security.encode_id( role.id ), role.name )
+ self.purge_role( self.security.encode_id( role.id ), role.name )
+ # Manually delete the role from the database
+ refresh( role )
+ sa_session.delete( role )
+ sa_session.flush()
+ ##################
+ # Eliminate all groups
+ ##################
+ for group in [ group_one ]:
+ self.mark_group_deleted( self.security.encode_id( group.id ), group.name )
+ self.purge_group( self.security.encode_id( group.id ), group.name )
+ # Manually delete the group from the database
+ refresh( group )
+ sa_session.delete( group )
+ sa_session.flush()
+ ##################
+ # Make sure all users are associated only with their private roles
+ ##################
+ for user in [ admin_user, regular_user1, regular_user2, regular_user3 ]:
+ refresh( user )
+ if len( user.roles) != 1:
+ raise AssertionError( '%d UserRoleAssociations are associated with %s ( should be 1 )' % ( len( user.roles ), user.email ) )
diff -r e39c9a2a0b4c -r 48e83411aa91 test/functional/test_security_and_libraries.py
--- a/test/functional/test_security_and_libraries.py Fri Mar 12 14:27:04 2010 -0500
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,2141 +0,0 @@
-import galaxy.model
-from galaxy.model.orm import *
-from galaxy.model.mapping import context as sa_session
-from base.twilltestcase import *
-
-not_logged_in_security_msg = 'You must be logged in as an administrator to access this feature.'
-logged_in_security_msg = 'You must be an administrator to access this feature.'
-
-import sys
-class TestSecurityAndLibraries( TwillTestCase ):
- def test_000_admin_features_when_not_logged_in( self ):
- """Testing admin_features when not logged in"""
- self.logout()
- self.visit_url( "%s/admin" % self.url )
- self.check_page_for_string( not_logged_in_security_msg )
- self.visit_url( "%s/admin/reload_tool?tool_id=upload1" % self.url )
- self.check_page_for_string( not_logged_in_security_msg )
- self.visit_url( "%s/admin/roles" % self.url )
- self.check_page_for_string( not_logged_in_security_msg )
- self.visit_url( "%s/admin/create_role" % self.url )
- self.check_page_for_string( not_logged_in_security_msg )
- self.visit_url( "%s/admin/create_role" % self.url )
- self.check_page_for_string( not_logged_in_security_msg )
- self.visit_url( "%s/admin/manage_users_and_groups_for_role" % self.url )
- self.check_page_for_string( not_logged_in_security_msg )
- self.visit_url( "%s/admin/groups" % self.url )
- self.check_page_for_string( not_logged_in_security_msg )
- self.visit_url( "%s/admin/create_group" % self.url )
- self.check_page_for_string( not_logged_in_security_msg )
- self.check_page_for_string( not_logged_in_security_msg )
- self.visit_url( "%s/admin/users" % self.url )
- self.check_page_for_string( not_logged_in_security_msg )
- def test_005_login_as_admin_user( self ):
- """Testing logging in as an admin user test(a)bx.psu.edu - tests initial settings for DefaultUserPermissions and DefaultHistoryPermissions"""
- self.login( email='test(a)bx.psu.edu' ) # test(a)bx.psu.edu is configured as our admin user
- self.visit_page( "admin" )
- self.check_page_for_string( 'Administration' )
- global admin_user
- admin_user = sa_session.query( galaxy.model.User ) \
- .filter( galaxy.model.User.table.c.email=='test(a)bx.psu.edu' ) \
- .first()
- assert admin_user is not None, 'Problem retrieving user with email "test(a)bx.psu.edu" from the database'
- # Get the admin user's private role for later use
- global admin_user_private_role
- admin_user_private_role = None
- for role in admin_user.all_roles():
- if role.name == admin_user.email and role.description == 'Private Role for %s' % admin_user.email:
- admin_user_private_role = role
- break
- if not admin_user_private_role:
- raise AssertionError( "Private role not found for user '%s'" % admin_user.email )
- # Make sure DefaultUserPermissions are correct
- if len( admin_user.default_permissions ) > 1:
- raise AssertionError( '%d DefaultUserPermissions associated with user %s ( should be 1 )' \
- % ( len( admin_user.default_permissions ), admin_user.email ) )
- dup = sa_session.query( galaxy.model.DefaultUserPermissions ) \
- .filter( galaxy.model.DefaultUserPermissions.table.c.user_id==admin_user.id ) \
- .first()
- if not dup.action == galaxy.model.Dataset.permitted_actions.DATASET_MANAGE_PERMISSIONS.action:
- raise AssertionError( 'The DefaultUserPermission.action for user "%s" is "%s", but it should be "%s"' \
- % ( admin_user.email, dup.action, galaxy.model.Dataset.permitted_actions.DATASET_MANAGE_PERMISSIONS.action ) )
- # Make sure DefaultHistoryPermissions are correct
- # Logged in as admin_user
- latest_history = sa_session.query( galaxy.model.History ) \
- .filter( and_( galaxy.model.History.table.c.deleted==False,
- galaxy.model.History.table.c.user_id==admin_user.id ) ) \
- .order_by( desc( galaxy.model.History.table.c.create_time ) ) \
- .first()
- if len( latest_history.default_permissions ) > 1:
- raise AssertionError( '%d DefaultHistoryPermissions were created for history id %d when it was created ( should have been 1 )' \
- % ( len( latest_history.default_permissions ), latest_history.id ) )
- dhp = sa_session.query( galaxy.model.DefaultHistoryPermissions ) \
- .filter( galaxy.model.DefaultHistoryPermissions.table.c.history_id==latest_history.id ) \
- .first()
- if not dhp.action == galaxy.model.Dataset.permitted_actions.DATASET_MANAGE_PERMISSIONS.action:
- raise AssertionError( 'The DefaultHistoryPermission.action for history id %d is "%s", but it should be "%s"' \
- % ( latest_history.id, dhp.action, galaxy.model.Dataset.permitted_actions.DATASET_MANAGE_PERMISSIONS.action ) )
- self.home()
- self.visit_url( "%s/admin/manage_roles_and_groups_for_user?id=%s" % ( self.url, self.security.encode_id( admin_user.id ) ) )
- self.check_page_for_string( admin_user.email )
- # Try deleting the admin_user's private role
- check_str = "You cannot eliminate a user's private role association."
- self.associate_roles_and_groups_with_user( self.security.encode_id( admin_user.id ), admin_user.email,
- out_role_ids=str( admin_user_private_role.id ),
- check_str=check_str )
- self.logout()
- def test_010_login_as_regular_user1( self ):
- """Testing logging in as regular user test1(a)bx.psu.edu - tests private role creation and changing DefaultHistoryPermissions for new histories"""
- # Some of the history related tests here are similar to some tests in the
- # test_history_functions.py script, so we could potentially eliminate 1 or 2 of them.
- self.login( email='test1(a)bx.psu.edu' ) # test1(a)bx.psu.edu is not an admin user
- global regular_user1
- regular_user1 = sa_session.query( galaxy.model.User ) \
- .filter( galaxy.model.User.table.c.email=='test1(a)bx.psu.edu' ) \
- .first()
- assert regular_user1 is not None, 'Problem retrieving user with email "test1(a)bx.psu.edu" from the database'
- self.visit_page( "admin" )
- self.check_page_for_string( logged_in_security_msg )
- # Make sure a private role exists for regular_user1
- private_role = None
- for role in regular_user1.all_roles():
- if role.name == regular_user1.email and role.description == 'Private Role for %s' % regular_user1.email:
- private_role = role
- break
- if not private_role:
- raise AssertionError( "Private role not found for user '%s'" % regular_user1.email )
- global regular_user1_private_role
- regular_user1_private_role = private_role
- # Add a dataset to the history
- self.upload_file( '1.bed' )
- latest_dataset = sa_session.query( galaxy.model.Dataset ) \
- .order_by( desc( galaxy.model.Dataset.table.c.create_time ) ) \
- .first()
- # Make sure DatasetPermissions is correct - default is 'manage permissions'
- if len( latest_dataset.actions ) > 1:
- actions = [ a.action for a in latest_dataset.actions ]
- raise AssertionError( '%d DatasetPermissions (%s) were created for dataset id %d when it was created ( should have been 1 )' \
- % ( len( latest_dataset.actions ), str( actions ), latest_dataset.id ) )
- dp = sa_session.query( galaxy.model.DatasetPermissions ) \
- .filter( galaxy.model.DatasetPermissions.table.c.dataset_id==latest_dataset.id ) \
- .first()
- if not dp.action:
- raise AssertionError( 'The Dataset id %d has no associated DatasetPermissions when is should have "manage permissions".' \
- % latest_dataset.id )
- elif not dp.action == galaxy.model.Dataset.permitted_actions.DATASET_MANAGE_PERMISSIONS.action:
- raise AssertionError( 'The DatasetPermissions.action for dataset id %d is "%s", but it should be "manage permissions"' \
- % ( latest_dataset.id, dp.action ) )
- # Change DefaultHistoryPermissions for regular_user1
- permissions_in = []
- actions_in = []
- for key, value in galaxy.model.Dataset.permitted_actions.items():
- # NOTE: setting the 'access' permission with the private role makes this dataset private
- permissions_in.append( key )
- actions_in.append( value.action )
- # Sort actions for later comparison
- actions_in.sort()
- role_id = str( private_role.id )
- self.user_set_default_permissions( permissions_in=permissions_in, role_id=role_id )
- # Make sure the default permissions are changed for new histories
- self.new_history()
- # logged in as regular_user1
- latest_history = sa_session.query( galaxy.model.History ) \
- .filter( and_( galaxy.model.History.table.c.deleted==False,
- galaxy.model.History.table.c.user_id==regular_user1.id ) ) \
- .order_by( desc( galaxy.model.History.table.c.create_time ) ) \
- .first()
- if len( latest_history.default_permissions ) != len( galaxy.model.Dataset.permitted_actions.items() ):
- raise AssertionError( '%d DefaultHistoryPermissions were created for history id %d, should have been %d' % \
- ( len( latest_history.default_permissions ), latest_history.id, len( galaxy.model.Dataset.permitted_actions.items() ) ) )
- dhps = []
- for dhp in latest_history.default_permissions:
- dhps.append( dhp.action )
- # Sort permissions for later comparison
- dhps.sort()
- for key, value in galaxy.model.Dataset.permitted_actions.items():
- if value.action not in dhps:
- raise AssertionError( '%s not in history id %d default_permissions after they were changed' % ( value.action, latest_history.id ) )
- # Add a dataset to the history
- self.upload_file( '1.bed' )
- latest_dataset = sa_session.query( galaxy.model.Dataset ).order_by( desc( galaxy.model.Dataset.table.c.create_time ) ).first()
- # Make sure DatasetPermissionss are correct
- if len( latest_dataset.actions ) != len( latest_history.default_permissions ):
- raise AssertionError( '%d DatasetPermissionss were created for dataset id %d when it was created ( should have been %d )' % \
- ( len( latest_dataset.actions ), latest_dataset.id, len( latest_history.default_permissions ) ) )
- dps = []
- for dp in latest_dataset.actions:
- dps.append( dp.action )
- # Sort actions for later comparison
- dps.sort()
- # Compare DatasetPermissions with permissions_in - should be the same
- if dps != actions_in:
- raise AssertionError( 'DatasetPermissionss "%s" for dataset id %d differ from changed default permissions "%s"' \
- % ( str( dps ), latest_dataset.id, str( actions_in ) ) )
- # Compare DefaultHistoryPermissions and DatasetPermissionss - should be the same
- if dps != dhps:
- raise AssertionError( 'DatasetPermissionss "%s" for dataset id %d differ from DefaultHistoryPermissions "%s" for history id %d' \
- % ( str( dps ), latest_dataset.id, str( dhps ), latest_history.id ) )
- self.logout()
-
- def test_015_login_as_regular_user2( self ):
- """Testing logging in as regular user test2(a)bx.psu.edu - tests changing DefaultHistoryPermissions for the current history"""
- email = 'test2(a)bx.psu.edu'
- self.login( email=email ) # This will not be an admin user
- global regular_user2
- regular_user2 = sa_session.query( galaxy.model.User ) \
- .filter( galaxy.model.User.table.c.email==email ) \
- .first()
- assert regular_user2 is not None, 'Problem retrieving user with email "" from the database' % email
- # Logged in as regular_user2
- latest_history = sa_session.query( galaxy.model.History ) \
- .filter( and_( galaxy.model.History.table.c.deleted==False,
- galaxy.model.History.table.c.user_id==regular_user2.id ) ) \
- .order_by( desc( galaxy.model.History.table.c.create_time ) ) \
- .first()
- self.upload_file( '1.bed' )
- latest_dataset = sa_session.query( galaxy.model.Dataset ).order_by( desc( galaxy.model.Dataset.table.c.create_time ) ).first()
- permissions_in = [ 'DATASET_MANAGE_PERMISSIONS' ]
- # Make sure these are in sorted order for later comparison
- actions_in = [ 'manage permissions' ]
- permissions_out = [ 'DATASET_ACCESS' ]
- actions_out = [ 'access' ]
- global regular_user2_private_role
- regular_user2_private_role = None
- for role in regular_user2.all_roles():
- if role.name == regular_user2.email and role.description == 'Private Role for %s' % regular_user2.email:
- regular_user2_private_role = role
- break
- if not regular_user2_private_role:
- raise AssertionError( "Private role not found for user '%s'" % regular_user2.email )
- role_id = str( regular_user2_private_role.id )
- # Change DefaultHistoryPermissions for the current history
- self.history_set_default_permissions( permissions_out=permissions_out, permissions_in=permissions_in, role_id=role_id )
- if len( latest_history.default_permissions ) != len( actions_in ):
- raise AssertionError( '%d DefaultHistoryPermissions were created for history id %d, should have been %d' \
- % ( len( latest_history.default_permissions ), latest_history.id, len( permissions_in ) ) )
- # Make sure DefaultHistoryPermissions were correctly changed for the current history
- dhps = []
- for dhp in latest_history.default_permissions:
- dhps.append( dhp.action )
- # Sort permissions for later comparison
- dhps.sort()
- # Compare DefaultHistoryPermissions and actions_in - should be the same
- if dhps != actions_in:
- raise AssertionError( 'DefaultHistoryPermissions "%s" for history id %d differ from actions "%s" passed for changing' \
- % ( str( dhps ), latest_history.id, str( actions_in ) ) )
- # Make sure DatasetPermissionss are correct
- if len( latest_dataset.actions ) != len( latest_history.default_permissions ):
- raise AssertionError( '%d DatasetPermissionss were created for dataset id %d when it was created ( should have been %d )' \
- % ( len( latest_dataset.actions ), latest_dataset.id, len( latest_history.default_permissions ) ) )
- dps = []
- for dp in latest_dataset.actions:
- dps.append( dp.action )
- # Sort actions for comparison
- dps.sort()
- # Compare DatasetPermissionss and DefaultHistoryPermissions - should be the same
- if dps != dhps:
- raise AssertionError( 'DatasetPermissionss "%s" for dataset id %d differ from DefaultHistoryPermissions "%s"' \
- % ( str( dps ), latest_dataset.id, str( dhps ) ) )
- self.logout()
- def test_020_create_new_user_account_as_admin( self ):
- """Testing creating a new user account as admin"""
- self.login( email=admin_user.email )
- email = 'test3(a)bx.psu.edu'
- password = 'testuser'
- previously_created = self.create_new_account_as_admin( email=email, password=password )
- # Get the user object for later tests
- global regular_user3
- regular_user3 = sa_session.query( galaxy.model.User ).filter( galaxy.model.User.table.c.email==email ).first()
- assert regular_user3 is not None, 'Problem retrieving user with email "%s" from the database' % email
- global regular_user3_private_role
- regular_user3_private_role = None
- for role in regular_user3.all_roles():
- if role.name == regular_user3.email and role.description == 'Private Role for %s' % regular_user3.email:
- regular_user3_private_role = role
- break
- if not regular_user3_private_role:
- raise AssertionError( "Private role not found for user '%s'" % regular_user3.email )
- # Make sure DefaultUserPermissions were created
- if not regular_user3.default_permissions:
- raise AssertionError( 'No DefaultUserPermissions were created for user %s when the admin created the account' % email )
- # Make sure a private role was created for the user
- if not regular_user3.roles:
- raise AssertionError( 'No UserRoleAssociations were created for user %s when the admin created the account' % email )
- if not previously_created and len( regular_user3.roles ) != 1:
- raise AssertionError( '%d UserRoleAssociations were created for user %s when the admin created the account ( should have been 1 )' \
- % ( len( regular_user3.roles ), regular_user3.email ) )
- for ura in regular_user3.roles:
- role = sa_session.query( galaxy.model.Role ).get( ura.role_id )
- if not previously_created and role.type != 'private':
- raise AssertionError( 'Role created for user %s when the admin created the account is not private, type is' \
- % str( role.type ) )
- if not previously_created:
- # Make sure a history was not created ( previous test runs may have left deleted histories )
- histories = sa_session.query( galaxy.model.History ) \
- .filter( and_( galaxy.model.History.table.c.user_id==regular_user3.id,
- galaxy.model.History.table.c.deleted==False ) ) \
- .all()
- if histories:
- raise AssertionError( 'Histories were incorrectly created for user %s when the admin created the account' % email )
- # Make sure the user was not associated with any groups
- if regular_user3.groups:
- raise AssertionError( 'Groups were incorrectly associated with user %s when the admin created the account' % email )
- def test_025_reset_password_as_admin( self ):
- """Testing reseting a user password as admin"""
- email = 'test3(a)bx.psu.edu'
- self.reset_password_as_admin( user_id=self.security.encode_id( regular_user3.id ), password='testreset' )
- self.logout()
- def test_030_login_after_password_reset( self ):
- """Testing logging in after an admin reset a password - tests DefaultHistoryPermissions for accounts created by an admin"""
- self.login( email='test3(a)bx.psu.edu', password='testreset' )
- # Make sure a History and HistoryDefaultPermissions exist for the user
- # Logged in as regular_user3
- latest_history = sa_session.query( galaxy.model.History ) \
- .filter( and_( galaxy.model.History.table.c.deleted==False,
- galaxy.model.History.table.c.user_id==regular_user3.id ) ) \
- .order_by( desc( galaxy.model.History.table.c.create_time ) ) \
- .first()
- if not latest_history.user_id == regular_user3.id:
- raise AssertionError( 'A history was not created for user %s when he logged in' % email )
- if not latest_history.default_permissions:
- raise AssertionError( 'No DefaultHistoryPermissions were created for history id %d when it was created' % latest_history.id )
- if len( latest_history.default_permissions ) > 1:
- raise AssertionError( 'More than 1 DefaultHistoryPermissions were created for history id %d when it was created' % latest_history.id )
- dhp = sa_session.query( galaxy.model.DefaultHistoryPermissions ) \
- .filter( galaxy.model.DefaultHistoryPermissions.table.c.history_id==latest_history.id ) \
- .first()
- if not dhp.action == galaxy.model.Dataset.permitted_actions.DATASET_MANAGE_PERMISSIONS.action:
- raise AssertionError( 'The DefaultHistoryPermission.action for history id %d is "%s", but it should be "manage permissions"' \
- % ( latest_history.id, dhp.action ) )
- # Upload a file to create a HistoryDatasetAssociation
- self.upload_file( '1.bed' )
- latest_dataset = sa_session.query( galaxy.model.Dataset ).order_by( desc( galaxy.model.Dataset.table.c.create_time ) ).first()
- for dp in latest_dataset.actions:
- # Should only have 1 DatasetPermissions
- if dp.action != galaxy.model.Dataset.permitted_actions.DATASET_MANAGE_PERMISSIONS.action:
- raise AssertionError( 'The DatasetPermissions for dataset id %d is %s ( should have been %s )' \
- % ( latest_dataset.id,
- latest_dataset.actions.action,
- galaxy.model.Dataset.permitted_actions.DATASET_MANAGE_PERMISSIONS.action ) )
- self.logout()
- # Reset the password to the default for later tests
- self.login( email='test(a)bx.psu.edu' )
- self.reset_password_as_admin( user_id=self.security.encode_id( regular_user3.id ), password='testuser' )
- def test_035_mark_user_deleted( self ):
- """Testing marking a user account as deleted"""
- # Logged in as admin_user
- self.mark_user_deleted( user_id=self.security.encode_id( regular_user3.id ), email=regular_user3.email )
- # Deleting a user should not delete any associations
- sa_session.refresh( regular_user3 )
- if not regular_user3.active_histories:
- raise AssertionError( 'HistoryDatasetAssociations for regular_user3 were incorrectly deleted when the user was marked deleted' )
- def test_040_undelete_user( self ):
- """Testing undeleting a user account"""
- # Logged in as admin_user
- self.undelete_user( user_id=self.security.encode_id( regular_user3.id ), email=regular_user3.email )
- def test_045_create_role( self ):
- """Testing creating new role with 3 members ( and a new group named the same ), then renaming the role"""
- # Logged in as admin_user
- name = 'Role One'
- description = "This is Role Ones description"
- user_ids=[ str( admin_user.id ), str( regular_user1.id ), str( regular_user3.id ) ]
- self.create_role( name=name,
- description=description,
- in_user_ids=user_ids,
- in_group_ids=[],
- create_group_for_role='yes',
- private_role=admin_user.email )
- # Get the role object for later tests
- global role_one
- role_one = sa_session.query( galaxy.model.Role ).filter( galaxy.model.Role.table.c.name==name ).first()
- assert role_one is not None, 'Problem retrieving role named "Role One" from the database'
- # Make sure UserRoleAssociations are correct
- if len( role_one.users ) != len( user_ids ):
- raise AssertionError( '%d UserRoleAssociations were created for role id %d when it was created ( should have been %d )' \
- % ( len( role_one.users ), role_one.id, len( user_ids ) ) )
- # Each of the following users should now have 2 role associations, their private role and role_one
- for user in [ admin_user, regular_user1, regular_user3 ]:
- sa_session.refresh( user )
- if len( user.roles ) != 2:
- raise AssertionError( '%d UserRoleAssociations are associated with user %s ( should be 2 )' \
- % ( len( user.roles ), user.email ) )
- # Make sure the group was created
- self.home()
- self.visit_page( 'admin/groups' )
- self.check_page_for_string( name )
- global group_zero
- group_zero = sa_session.query( galaxy.model.Group ).filter( galaxy.model.Group.table.c.name==name ).first()
- # Rename the role
- rename = "Role One's been Renamed"
- new_description="This is Role One's Re-described"
- self.rename_role( self.security.encode_id( role_one.id ), name=rename, description=new_description )
- self.home()
- self.visit_page( 'admin/roles' )
- self.check_page_for_string( rename )
- self.check_page_for_string( new_description )
- # Reset the role back to the original name and description
- self.rename_role( self.security.encode_id( role_one.id ), name=name, description=description )
- def test_050_create_group( self ):
- """Testing creating new group with 3 members and 1 associated role, then renaming it"""
- # Logged in as admin_user
- name = "Group One's Name"
- user_ids=[ str( admin_user.id ), str( regular_user1.id ), str( regular_user3.id ) ]
- role_ids=[ str( role_one.id ) ]
- self.create_group( name=name, in_user_ids=user_ids, in_role_ids=role_ids )
- # Get the group object for later tests
- global group_one
- group_one = sa_session.query( galaxy.model.Group ).filter( galaxy.model.Group.table.c.name==name ).first()
- assert group_one is not None, 'Problem retrieving group named "Group One" from the database'
- # Make sure UserGroupAssociations are correct
- if len( group_one.users ) != len( user_ids ):
- raise AssertionError( '%d UserGroupAssociations were created for group id %d when it was created ( should have been %d )' \
- % ( len( group_one.users ), group_one.id, len( user_ids ) ) )
- # Each user should now have 1 group association, group_one
- for user in [ admin_user, regular_user1, regular_user3 ]:
- sa_session.refresh( user )
- if len( user.groups ) != 1:
- raise AssertionError( '%d UserGroupAssociations are associated with user %s ( should be 1 )' % ( len( user.groups ), user.email ) )
- # Make sure GroupRoleAssociations are correct
- if len( group_one.roles ) != len( role_ids ):
- raise AssertionError( '%d GroupRoleAssociations were created for group id %d when it was created ( should have been %d )' \
- % ( len( group_one.roles ), group_one.id, len( role_ids ) ) )
- # Rename the group
- rename = "Group One's been Renamed"
- self.rename_group( self.security.encode_id( group_one.id ), name=rename, )
- self.home()
- self.visit_page( 'admin/groups' )
- self.check_page_for_string( rename )
- # Reset the group back to the original name
- self.rename_group( self.security.encode_id( group_one.id ), name=name )
- def test_055_add_members_and_role_to_group( self ):
- """Testing editing user membership and role associations of an existing group"""
- # Logged in as admin_user
- name = 'Group Two'
- self.create_group( name=name, in_user_ids=[], in_role_ids=[] )
- # Get the group object for later tests
- global group_two
- group_two = sa_session.query( galaxy.model.Group ).filter( galaxy.model.Group.table.c.name==name ).first()
- assert group_two is not None, 'Problem retrieving group named "Group Two" from the database'
- # group_two should have no associations
- if group_two.users:
- raise AssertionError( '%d UserGroupAssociations were created for group id %d when it was created ( should have been 0 )' \
- % ( len( group_two.users ), group_two.id ) )
- if group_two.roles:
- raise AssertionError( '%d GroupRoleAssociations were created for group id %d when it was created ( should have been 0 )' \
- % ( len( group_two.roles ), group_two.id ) )
- user_ids = [ str( regular_user1.id ) ]
- role_ids = [ str( role_one.id ) ]
- self.associate_users_and_roles_with_group( self.security.encode_id( group_two.id ),
- group_two.name,
- user_ids=user_ids,
- role_ids=role_ids )
- def test_060_create_role_with_user_and_group_associations( self ):
- """Testing creating a role with user and group associations"""
- # Logged in as admin_user
- # NOTE: To get this to work with twill, all select lists on the ~/admin/role page must contain at least
- # 1 option value or twill throws an exception, which is: ParseError: OPTION outside of SELECT
- # Due to this bug in twill, we create the role, we bypass the page and visit the URL in the
- # associate_users_and_groups_with_role() method.
- name = 'Role Two'
- description = 'This is Role Two'
- user_ids=[ str( admin_user.id ) ]
- group_ids=[ str( group_two.id ) ]
- private_role=admin_user.email
- # Create the role
- self.create_role( name=name,
- description=description,
- in_user_ids=user_ids,
- in_group_ids=group_ids,
- private_role=private_role )
- # Get the role object for later tests
- global role_two
- role_two = sa_session.query( galaxy.model.Role ).filter( galaxy.model.Role.table.c.name==name ).first()
- assert role_two is not None, 'Problem retrieving role named "Role Two" from the database'
- # Make sure UserRoleAssociations are correct
- if len( role_two.users ) != len( user_ids ):
- raise AssertionError( '%d UserRoleAssociations were created for role id %d when it was created with %d members' \
- % ( len( role_two.users ), role_two.id, len( user_ids ) ) )
- # admin_user should now have 3 role associations, private role, role_one, role_two
- sa_session.refresh( admin_user )
- if len( admin_user.roles ) != 3:
- raise AssertionError( '%d UserRoleAssociations are associated with user %s ( should be 3 )' % ( len( admin_user.roles ), admin_user.email ) )
- # Make sure GroupRoleAssociations are correct
- sa_session.refresh( role_two )
- if len( role_two.groups ) != len( group_ids ):
- raise AssertionError( '%d GroupRoleAssociations were created for role id %d when it was created ( should have been %d )' \
- % ( len( role_two.groups ), role_two.id, len( group_ids ) ) )
- # group_two should now be associated with 2 roles: role_one, role_two
- sa_session.refresh( group_two )
- if len( group_two.roles ) != 2:
- raise AssertionError( '%d GroupRoleAssociations are associated with group id %d ( should be 2 )' % ( len( group_two.roles ), group_two.id ) )
- def test_065_change_user_role_associations( self ):
- """Testing changing roles associated with a user"""
- # Logged in as admin_user
- # Create a new role with no associations
- name = 'Role Three'
- description = 'This is Role Three'
- user_ids=[]
- group_ids=[]
- private_role=admin_user.email
- self.create_role( name=name,
- description=description,
- in_user_ids=user_ids,
- in_group_ids=group_ids,
- private_role=private_role )
- # Get the role object for later tests
- global role_three
- role_three = sa_session.query( galaxy.model.Role ).filter( galaxy.model.Role.table.c.name==name ).first()
- assert role_three is not None, 'Problem retrieving role named "Role Three" from the database'
- # Associate the role with a user
- sa_session.refresh( admin_user )
- role_ids = []
- for ura in admin_user.non_private_roles:
- role_ids.append( str( ura.role_id ) )
- role_ids.append( str( role_three.id ) )
- group_ids = []
- for uga in admin_user.groups:
- group_ids.append( str( uga.group_id ) )
- check_str = "User '%s' has been updated with %d associated roles and %d associated groups" % ( admin_user.email, len( role_ids ), len( group_ids ) )
- self.associate_roles_and_groups_with_user( self.security.encode_id( admin_user.id ),
- str( admin_user.email ),
1
0
details: http://www.bx.psu.edu/hg/galaxy/rev/e39c9a2a0b4c
changeset: 3527:e39c9a2a0b4c
user: jeremy goecks <jeremy.goecks(a)emory.edu>
date: Fri Mar 12 14:27:04 2010 -0500
description:
UI Improvements:
-Make it possible to show messages in frames.
-Improve import interactions by showing messages in frames and providing useful links.
-Add action icons to display framework.
diffstat:
lib/galaxy/web/controllers/dataset.py | 16 +++++++++---
lib/galaxy/web/controllers/history.py | 35 ++++++++++++++++++---------
lib/galaxy/web/controllers/workflow.py | 22 +++++++++++++----
lib/galaxy/web/framework/__init__.py | 16 ++++++------
templates/dataset/display.mako | 10 ++++---
templates/display_base.mako | 7 +++--
templates/display_common.mako | 10 ++++++++
templates/grid_base.mako | 13 +--------
templates/grid_base_async.mako | 3 +-
templates/history/display.mako | 11 +--------
templates/message.mako | 36 +++++++++++++++++++++++++++-
templates/page/select_items_grid_async.mako | 3 +-
templates/workflow/display.mako | 6 +----
13 files changed, 122 insertions(+), 66 deletions(-)
diffs (411 lines):
diff -r f8c305ba23cb -r e39c9a2a0b4c lib/galaxy/web/controllers/dataset.py
--- a/lib/galaxy/web/controllers/dataset.py Fri Mar 12 12:43:31 2010 -0500
+++ b/lib/galaxy/web/controllers/dataset.py Fri Mar 12 14:27:04 2010 -0500
@@ -418,17 +418,25 @@
return self.stored_list_grid( trans, status=status, message=message, **kwargs )
@web.expose
- def imp( self, trans, id=None, **kwd ):
+ def imp( self, trans, dataset_id=None, **kwd ):
""" Import another user's dataset via a shared URL; dataset is added to user's current history. """
msg = ""
+ # Set referer message.
+ referer = trans.request.referer
+ if referer is not "":
+ referer_message = "<a href='%s'>return to the previous page</a>" % referer
+ else:
+ referer_message = "<a href='%s'>go to Galaxy's start page</a>" % url_for( '/' )
+
# Error checking.
- if not id:
- return trans.show_error_message( "You must specify an ID for a dataset to import." )
+ if not dataset_id:
+ return trans.show_error_message( "You must specify a dataset to import. You can %s." % referer_message, use_panels=True )
# Do import.
cur_history = trans.get_history( create=True )
- status, message = self._copy_datasets( trans, [ id ], [ cur_history ] )
+ status, message = self._copy_datasets( trans, [ dataset_id ], [ cur_history ] )
+ message = message + "<br>You can <a href='%s'>start using the dataset</a> or %s." % ( url_for('/'), referer_message )
return trans.show_message( message, type=status )
@web.expose
diff -r f8c305ba23cb -r e39c9a2a0b4c lib/galaxy/web/controllers/history.py
--- a/lib/galaxy/web/controllers/history.py Fri Mar 12 12:43:31 2010 -0500
+++ b/lib/galaxy/web/controllers/history.py Fri Mar 12 14:27:04 2010 -0500
@@ -505,16 +505,27 @@
msg = ""
user = trans.get_user()
user_history = trans.get_history()
+ # Set referer message
+ if 'referer' in kwd:
+ referer = kwd['referer']
+ else:
+ referer = trans.request.referer
+ if referer is not "":
+ referer_message = "<a href='%s'>return to the previous page</a>" % referer
+ else:
+ referer_message = "<a href='%s'>go to Galaxy's start page</a>" % url_for( '/' )
+
+ # Do import.
if not id:
- return trans.show_error_message( "You must specify a history you want to import." )
+ return trans.show_error_message( "You must specify a history you want to import.<br>You can %s." % referer_message, use_panels=True )
import_history = self.get_history( trans, id, check_ownership=False )
if not import_history:
- return trans.show_error_message( "The specified history does not exist.")
+ return trans.show_error_message( "The specified history does not exist.<br>You can %s." % referer_message, use_panels=True )
if not import_history.importable:
- error( "The owner of this history has disabled imports via this link." )
+ return trans.show_error_message( "The owner of this history has disabled imports via this link.<br>You can %s." % referer_message, use_panels=True )
if user:
if import_history.user_id == user.id:
- return trans.show_error_message( "You cannot import your own history." )
+ return trans.show_error_message( "You cannot import your own history.<br>You can %s." % referer_message, use_panels=True )
new_history = import_history.copy( target_user=user )
new_history.name = "imported: " + new_history.name
new_history.user_id = user.id
@@ -530,9 +541,9 @@
trans.sa_session.flush()
if not user_history.datasets:
trans.set_history( new_history )
- return trans.show_ok_message( """
- History "%s" has been imported. Click <a href="%s">here</a>
- to begin.""" % ( new_history.name, web.url_for( '/' ) ) )
+ return trans.show_ok_message(
+ message="""History "%s" has been imported. <br>You can <a href="%s">start using this history</a> or %s."""
+ % ( new_history.name, web.url_for( '/' ), referer_message ), use_panels=True )
elif not user_history or not user_history.datasets or confirm:
new_history = import_history.copy()
new_history.name = "imported: " + new_history.name
@@ -548,13 +559,13 @@
trans.sa_session.add( new_history )
trans.sa_session.flush()
trans.set_history( new_history )
- return trans.show_ok_message( """
- History "%s" has been imported. Click <a href="%s">here</a>
- to begin.""" % ( new_history.name, web.url_for( '/' ) ) )
+ return trans.show_ok_message(
+ message="""History "%s" has been imported. <br>You can <a href="%s">start using this history</a> or %s."""
+ % ( new_history.name, web.url_for( '/' ), referer_message ), use_panels=True )
return trans.show_warn_message( """
Warning! If you import this history, you will lose your current
- history. Click <a href="%s">here</a> to confirm.
- """ % web.url_for( id=id, confirm=True ) )
+ history. <br>You can <a href="%s">continue and import this history</a> or %s.
+ """ % ( web.url_for( id=id, confirm=True, referer=trans.request.referer ), referer_message ), use_panels=True )
@web.expose
def view( self, trans, id=None ):
diff -r f8c305ba23cb -r e39c9a2a0b4c lib/galaxy/web/controllers/workflow.py
--- a/lib/galaxy/web/controllers/workflow.py Fri Mar 12 12:43:31 2010 -0500
+++ b/lib/galaxy/web/controllers/workflow.py Fri Mar 12 14:27:04 2010 -0500
@@ -278,18 +278,28 @@
@web.expose
@web.require_login( "use Galaxy workflows" )
def imp( self, trans, id, **kwargs ):
+ # Set referer message.
+ referer = trans.request.referer
+ if referer is not "":
+ referer_message = "<a href='%s'>return to the previous page</a>" % referer
+ else:
+ referer_message = "<a href='%s'>go to Galaxy's start page</a>" % url_for( '/' )
+
+ # Do import.
session = trans.sa_session
stored = self.get_stored_workflow( trans, id, check_ownership=False )
if stored.importable == False:
- error( "The owner of this workflow has disabled imports via this link" )
+ return trans.show_error_message( "The owner of this workflow has disabled imports via this link.<br>You can %s" % referer_message, use_panels=True )
elif stored.user == trans.user:
- error( "You are already the owner of this workflow, can't import" )
+ return trans.show_error_message( "You can't import this workflow because you own it.<br>You can %s" % referer_message, use_panels=True )
elif stored.deleted:
- error( "This workflow has been deleted, can't import" )
+ return trans.show_error_message( "You can't import this workflow because it has been deleted.<br>You can %s" % referer_message, use_panels=True )
elif session.query( model.StoredWorkflowUserShareAssociation ) \
.filter_by( user=trans.user, stored_workflow=stored ).count() > 0:
- error( "This workflow is already shared with you" )
+ # TODO: this is only reasonable as long as import creates a sharing relation.
+ return trans.show_error_message( "You can't import this workflow because it is already shared with you.<br>You can %s" % referer_message, use_panels=True )
else:
+ # TODO: Shouldn't an import provide a copy of a workflow?
share = model.StoredWorkflowUserShareAssociation()
share.stored_workflow = stored
share.user = trans.user
@@ -297,7 +307,9 @@
session.add( share )
session.flush()
# Redirect to load galaxy frames.
- return trans.response.send_redirect( url_for( controller='workflow' ) )
+ return trans.show_ok_message(
+ message="""Workflow "%s" has been imported. <br>You can <a href="%s">start using this workflow</a> or %s."""
+ % ( stored.name, web.url_for( controller='workflow' ), referer_message ), use_panels=True )
@web.expose
@web.require_login( "use Galaxy workflows" )
diff -r f8c305ba23cb -r e39c9a2a0b4c lib/galaxy/web/framework/__init__.py
--- a/lib/galaxy/web/framework/__init__.py Fri Mar 12 12:43:31 2010 -0500
+++ b/lib/galaxy/web/framework/__init__.py Fri Mar 12 14:27:04 2010 -0500
@@ -561,7 +561,7 @@
context.
"""
return self.template_context['message']
- def show_message( self, message, type='info', refresh_frames=[], cont=None ):
+ def show_message( self, message, type='info', refresh_frames=[], cont=None, use_panels=False ):
"""
Convenience method for displaying a simple page with a single message.
@@ -571,22 +571,22 @@
`refresh_frames`: names of frames in the interface that should be
refreshed when the message is displayed
"""
- return self.fill_template( "message.mako", message_type=type, message=message, refresh_frames=refresh_frames, cont=cont )
- def show_error_message( self, message, refresh_frames=[] ):
+ return self.fill_template( "message.mako", message_type=type, message=message, refresh_frames=refresh_frames, cont=cont, use_panels=use_panels )
+ def show_error_message( self, message, refresh_frames=[], use_panels=False ):
"""
Convenience method for displaying an error message. See `show_message`.
"""
- return self.show_message( message, 'error', refresh_frames )
- def show_ok_message( self, message, refresh_frames=[] ):
+ return self.show_message( message, 'error', refresh_frames, use_panels=use_panels )
+ def show_ok_message( self, message, refresh_frames=[], use_panels=False ):
"""
Convenience method for displaying an ok message. See `show_message`.
"""
- return self.show_message( message, 'done', refresh_frames )
- def show_warn_message( self, message, refresh_frames=[] ):
+ return self.show_message( message, 'done', refresh_frames, use_panels=use_panels )
+ def show_warn_message( self, message, refresh_frames=[], use_panels=False ):
"""
Convenience method for displaying an warn message. See `show_message`.
"""
- return self.show_message( message, 'warning', refresh_frames )
+ return self.show_message( message, 'warning', refresh_frames, use_panels=use_panels )
def show_form( self, form, header=None, template="form.mako" ):
"""
Convenience method for displaying a simple page with a single HTML
diff -r f8c305ba23cb -r e39c9a2a0b4c templates/dataset/display.mako
--- a/templates/dataset/display.mako Fri Mar 12 12:43:31 2010 -0500
+++ b/templates/dataset/display.mako Fri Mar 12 14:27:04 2010 -0500
@@ -23,7 +23,8 @@
<%def name="render_item_links( data )">
## Provide links to save data and TODO: import dataset.
- <a href="${h.url_for( controller='dataset', action='display', dataset_id=trans.security.encode_id( data.id ), to_ext=data.ext )}">save</a>
+ <a href="${h.url_for( controller='/dataset', action='display', dataset_id=trans.security.encode_id( data.id ), to_ext=data.ext )}" class="icon-button disk tooltip" title="Save dataset"></a>
+ <a href="${h.url_for( controller='/dataset', action='imp', dataset_id=trans.security.encode_id( data.id ) )}" class="icon-button import tooltip" title="Import dataset"></a>
</%def>
<%def name="render_item( data, data_to_render )">
@@ -49,11 +50,12 @@
<div class="unified-panel-body">
<div style="overflow: auto; height: 100%;">
<div class="page-body">
- <div style="padding: 0px 0px 5px 0px">
+ <div style="float: right">
${self.render_item_links( item )}
</div>
-
- ${self.render_item_header( item )}
+ <div>
+ ${self.render_item_header( item )}
+ </div>
${self.render_item( item, item_data )}
</div>
diff -r f8c305ba23cb -r e39c9a2a0b4c templates/display_base.mako
--- a/templates/display_base.mako Fri Mar 12 12:43:31 2010 -0500
+++ b/templates/display_base.mako Fri Mar 12 14:27:04 2010 -0500
@@ -156,11 +156,12 @@
<div class="unified-panel-body">
<div style="overflow: auto; height: 100%;">
<div class="page-body">
- <div style="padding: 0px 0px 5px 0px">
+ <div style="float: right">
${self.render_item_links( item )}
</div>
-
- ${self.render_item_header( item )}
+ <div>
+ ${self.render_item_header( item )}
+ </div>
${self.render_item( item, item_data )}
</div>
diff -r f8c305ba23cb -r e39c9a2a0b4c templates/display_common.mako
--- a/templates/display_common.mako Fri Mar 12 12:43:31 2010 -0500
+++ b/templates/display_common.mako Fri Mar 12 14:27:04 2010 -0500
@@ -128,3 +128,13 @@
%endif
</%def>
+## Render message.
+<%def name="render_message( message, message_type )">
+ %if message:
+ <p>
+ <div class="${message_type}message transient-message">${util.restore_text( message )}</div>
+ <div style="clear: both"></div>
+ </p>
+ %endif
+</%def>
+
diff -r f8c305ba23cb -r e39c9a2a0b4c templates/grid_base.mako
--- a/templates/grid_base.mako Fri Mar 12 12:43:31 2010 -0500
+++ b/templates/grid_base.mako Fri Mar 12 14:27:04 2010 -0500
@@ -9,6 +9,7 @@
return '/base.mako'
%>
<%inherit file="${inherit(context)}"/>
+<%namespace file="/display_common.mako" import="render_message" />
<%def name="init()">
<%
@@ -663,23 +664,13 @@
<tr>
<td width="75%">${self.render_grid_header( grid )}</td>
<td></td>
- <td width="25%" id="grid-message" valign="top">${self.render_grid_message( grid )}</td>
+ <td width="25%" id="grid-message" valign="top">${render_message( message, message_type )}</td>
</tr>
</table>
${self.render_grid_table( grid )}
</%def>
-## Render grid message.
-<%def name="render_grid_message( grid )">
- %if message:
- <p>
- <div class="${message_type}message transient-message">${util.restore_text( message )}</div>
- <div style="clear: both"></div>
- </p>
- %endif
-</%def>
-
## Render grid header.
<%def name="render_grid_header( grid, render_title=True)">
<div class="grid-header">
diff -r f8c305ba23cb -r e39c9a2a0b4c templates/grid_base_async.mako
--- a/templates/grid_base_async.mako Fri Mar 12 12:43:31 2010 -0500
+++ b/templates/grid_base_async.mako Fri Mar 12 14:27:04 2010 -0500
@@ -1,4 +1,5 @@
<%namespace file="./grid_base.mako" import="*" />
+<%namespace file="/display_common.mako" import="render_message" />
<%
# Set flag to indicate whether grid has operations that operate on multiple items.
@@ -12,4 +13,4 @@
*****
${num_pages}
*****
-${render_grid_message( grid )}
\ No newline at end of file
+${render_message( grid )}
\ No newline at end of file
diff -r f8c305ba23cb -r e39c9a2a0b4c templates/history/display.mako
--- a/templates/history/display.mako Fri Mar 12 12:43:31 2010 -0500
+++ b/templates/history/display.mako Fri Mar 12 14:27:04 2010 -0500
@@ -236,16 +236,7 @@
</%def>
<%def name="render_item_links( history )">
- %if history.user != trans.get_user():
- <a href="${h.url_for( controller='/history', action='imp', id=trans.security.encode_id(history.id) )}">import and start using history</a>
- %else:
- ## TODO: add tooltip to indicate why this link is disabled.
- import and start using history
- %endif
- ##<a href="${self.get_history_link( history )}">${_('refresh')}</a>
- %if show_deleted:
- | <a href="${h.url_for('history', show_deleted=False)}">${_('hide deleted')}</a>
- %endif
+ <a href="${h.url_for( controller='/history', action='imp', id=trans.security.encode_id(history.id) )}" class="icon-button import tooltip" title="Import history"></a>
</%def>
<%def name="render_item( history, datasets )">
diff -r f8c305ba23cb -r e39c9a2a0b4c templates/message.mako
--- a/templates/message.mako Fri Mar 12 12:43:31 2010 -0500
+++ b/templates/message.mako Fri Mar 12 14:27:04 2010 -0500
@@ -1,5 +1,21 @@
+<%!
+ def inherit(context):
+ if context.get('use_panels'):
+ return '/base_panels.mako'
+ else:
+ return '/base.mako'
+%>
+<%inherit file="${inherit(context)}"/>
<% _=n_ %>
-<%inherit file="/base.mako"/>
+
+<%def name="init()">
+<%
+ self.has_left_panel=False
+ self.has_right_panel=False
+ self.active_view=active_view
+ self.message_box_visible=False
+%>
+</%def>
<%def name="javascripts()">
${parent.javascripts()}
@@ -45,7 +61,23 @@
</script>
</%def>
-<div class="${message_type}messagelarge">${_(message)}</div>
+##
+## Override methods from base.mako and base_panels.mako
+##
+
+<%def name="center_panel()">
+ ${render_large_message( message, message_type )}
+</%def>
+
+## Render the grid's basic elements. Each of these elements can be subclassed.
+<%def name="body()">
+ ${render_large_message( message, message_type )}
+</%def>
+
+## Render large message.
+<%def name="render_large_message( message, message_type )">
+ <div class="${message_type}messagelarge" style="margin: 1em">${_(message)}</div>
+</%def>
## Render a message
<%def name="render_msg( msg, messagetype='done' )">
diff -r f8c305ba23cb -r e39c9a2a0b4c templates/page/select_items_grid_async.mako
--- a/templates/page/select_items_grid_async.mako Fri Mar 12 12:43:31 2010 -0500
+++ b/templates/page/select_items_grid_async.mako Fri Mar 12 14:27:04 2010 -0500
@@ -1,8 +1,9 @@
<%namespace file="../grid_base.mako" import="*" />
+<%namespace file="/display_common.mako" import="render_message" />
## Always show item checkboxes so that users can select histories.
${render_grid_table_body_contents( grid, show_item_checkboxes=True )}
*****
${num_pages}
*****
-${render_grid_message( grid )}
\ No newline at end of file
+${render_message( message, message_type )}
\ No newline at end of file
diff -r f8c305ba23cb -r e39c9a2a0b4c templates/workflow/display.mako
--- a/templates/workflow/display.mako Fri Mar 12 12:43:31 2010 -0500
+++ b/templates/workflow/display.mako Fri Mar 12 14:27:04 2010 -0500
@@ -77,11 +77,7 @@
<%def name="render_item_links( workflow )">
- %if workflow.user != trans.get_user():
- <a href="${h.url_for( controller='/workflow', action='imp', id=trans.security.encode_id(workflow.id) )}">import and start using workflow</a>
- %else:
- import and start using workflow
- %endif
+ <a href="${h.url_for( controller='/workflow', action='imp', id=trans.security.encode_id(workflow.id) )}" class="icon-button import tooltip" title="Import workflow"></a>
</%def>
<%def name="render_item( workflow, steps )">
1
0

18 Mar '10
details: http://www.bx.psu.edu/hg/galaxy/rev/f8c305ba23cb
changeset: 3526:f8c305ba23cb
user: Dan Blankenberg <dan(a)bx.psu.edu>
date: Fri Mar 12 12:43:31 2010 -0500
description:
When preparing Display Applications make manual click refresh link a non-javascript action.
diffstat:
templates/dataset/display_application/display.mako | 2 +-
1 files changed, 1 insertions(+), 1 deletions(-)
diffs (11 lines):
diff -r 90fa86a7b4e3 -r f8c305ba23cb templates/dataset/display_application/display.mako
--- a/templates/dataset/display_application/display.mako Fri Mar 12 12:17:36 2010 -0500
+++ b/templates/dataset/display_application/display.mako Fri Mar 12 12:43:31 2010 -0500
@@ -7,6 +7,6 @@
%if refresh:
<%def name="metas()"><meta http-equiv="refresh" content="10" /></%def>
<p>
-This page will <a href="javascript:location.reload(true);">refresh</a> after 10 seconds.
+This page will <a href="${trans.request.url}">refresh</a> after 10 seconds.
</p>
%endif
1
0

18 Mar '10
details: http://www.bx.psu.edu/hg/galaxy/rev/90fa86a7b4e3
changeset: 3525:90fa86a7b4e3
user: Dan Blankenberg <dan(a)bx.psu.edu>
date: Fri Mar 12 12:17:36 2010 -0500
description:
Tweaks for flow control in Display applications:
Display Applications now use trans.response.send_redirect instead of javascript when redirecting users to prepared display applications.
Display Applications now use a <meta> refresh instead of a javascript based refresh when notifying users that the display application is being prepared.
diffstat:
lib/galaxy/web/controllers/dataset.py | 3 ++-
templates/dataset/display_application/display.mako | 9 ++-------
templates/dataset/display_application/launch_display.mako | 15 ---------------
3 files changed, 4 insertions(+), 23 deletions(-)
diffs (55 lines):
diff -r 5f967426f33f -r 90fa86a7b4e3 lib/galaxy/web/controllers/dataset.py
--- a/lib/galaxy/web/controllers/dataset.py Fri Mar 12 12:03:25 2010 -0500
+++ b/lib/galaxy/web/controllers/dataset.py Fri Mar 12 12:17:36 2010 -0500
@@ -557,7 +557,8 @@
trans.response.headers[ 'Content-Length' ] = content_length
return rval
elif app_action == None:
- return trans.fill_template_mako( "dataset/display_application/launch_display.mako", display_link = display_link )
+ #redirect user to url generated by display link
+ return trans.response.send_redirect( display_link.display_url() )
else:
msg.append( ( 'Invalid action provided: %s' % app_action, 'error' ) )
else:
diff -r 5f967426f33f -r 90fa86a7b4e3 templates/dataset/display_application/display.mako
--- a/templates/dataset/display_application/display.mako Fri Mar 12 12:03:25 2010 -0500
+++ b/templates/dataset/display_application/display.mako Fri Mar 12 12:17:36 2010 -0500
@@ -1,17 +1,12 @@
<%inherit file="/base.mako"/>
<%namespace file="/message.mako" import="render_msg" />
<%def name="title()">Display Application: ${display_link.link.display_application.name} ${display_link.link.name}</%def>
-<% refresh_rate = 10 %>
-%if refresh:
-<script type="text/javascript">
- setTimeout( "location.reload(true);", ${ refresh_rate * 1000 } );
-</script>
-%endif
%for message, message_type in msg:
${render_msg( message, message_type )}
%endfor
%if refresh:
+<%def name="metas()"><meta http-equiv="refresh" content="10" /></%def>
<p>
-This page will <a href="javascript:location.reload(true);">refresh</a> after ${refresh_rate} seconds.
+This page will <a href="javascript:location.reload(true);">refresh</a> after 10 seconds.
</p>
%endif
diff -r 5f967426f33f -r 90fa86a7b4e3 templates/dataset/display_application/launch_display.mako
--- a/templates/dataset/display_application/launch_display.mako Fri Mar 12 12:03:25 2010 -0500
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,15 +0,0 @@
-<%inherit file="/base.mako"/>
-<%def name="title()">Launching Display Application: ${display_link.link.display_application.name} ${display_link.link.name}</%def>
-
-<script type="text/javascript">
- location.href = '${display_link.display_url()}';
-</script>
-<p>
-All data has been prepared for the external display application: ${display_link.link.display_application.name} ${display_link.link.name}.
-</p>
-<p>
-You are now being automatically forwarded to the external application.
-</p>
-<p>
-Click <a href="${display_link.display_url()}">here</a> if this redirect has failed.
-</p>
1
0

18 Mar '10
details: http://www.bx.psu.edu/hg/galaxy/rev/5f967426f33f
changeset: 3524:5f967426f33f
user: Dan Blankenberg <dan(a)bx.psu.edu>
date: Fri Mar 12 12:03:25 2010 -0500
description:
Add 'metas()' %def to base.mako, to allow subclassing templates to declare additional <meta/> tags.
diffstat:
templates/base.mako | 3 +++
1 files changed, 3 insertions(+), 0 deletions(-)
diffs (17 lines):
diff -r c3eccab29814 -r 5f967426f33f templates/base.mako
--- a/templates/base.mako Fri Mar 12 09:37:22 2010 -0500
+++ b/templates/base.mako Fri Mar 12 12:03:25 2010 -0500
@@ -5,6 +5,7 @@
<head>
<title>${self.title()}</title>
<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
+${self.metas()}
${self.stylesheets()}
${self.javascripts()}
</head>
@@ -30,3 +31,5 @@
${h.js( "jquery", "galaxy.base" )}
</%def>
+## Additional metas can be defined by templates inheriting from this one.
+<%def name="metas()"></%def>
1
0

18 Mar '10
details: http://www.bx.psu.edu/hg/galaxy/rev/c3eccab29814
changeset: 3523:c3eccab29814
user: jeremy goecks <jeremy.goecks(a)emory.edu>
date: Fri Mar 12 09:37:22 2010 -0500
description:
Make visualizations sharable, publishable, taggable, and annotate-able. Plumbing code is in place, but UI code needs work; in particular, viewing a shared/published visualization is empty and annotations are available only via edit attributes.
Some code tidying: refactoring, removing unused code.
diffstat:
lib/galaxy/model/__init__.py | 16 +-
lib/galaxy/model/mapping.py | 73 +-
lib/galaxy/model/migrate/versions/0043_visualization_sharing_tagging_annotating.py | 220 ++++++
lib/galaxy/tags/tag_handler.py | 1 +
lib/galaxy/web/base/controller.py | 39 +-
lib/galaxy/web/buildapp.py | 1 +
lib/galaxy/web/controllers/history.py | 56 +-
lib/galaxy/web/controllers/page.py | 21 +-
lib/galaxy/web/controllers/tracks.py | 4 +-
lib/galaxy/web/controllers/visualization.py | 322 +++++++++-
lib/galaxy/web/controllers/workflow.py | 6 +-
templates/base_panels.mako | 2 +-
templates/display_common.mako | 7 +-
templates/page/create.mako | 3 +-
templates/panels.mako | 2 -
templates/visualization/create.mako | 14 +
templates/visualization/display.mako | 19 +
templates/visualization/list.mako | 52 +
templates/visualization/list_published.mako | 36 +
19 files changed, 773 insertions(+), 121 deletions(-)
diffs (1317 lines):
diff -r 582fd1777763 -r c3eccab29814 lib/galaxy/model/__init__.py
--- a/lib/galaxy/model/__init__.py Thu Mar 11 15:54:13 2010 -0500
+++ b/lib/galaxy/model/__init__.py Fri Mar 12 09:37:22 2010 -0500
@@ -1527,6 +1527,11 @@
self.title = None
self.config = None
+class VisualizationUserShareAssociation( object ):
+ def __init__( self ):
+ self.visualization = None
+ self.user = None
+
class Tag ( object ):
def __init__( self, id=None, type=None, parent_id=None, name=None ):
self.id = id
@@ -1558,16 +1563,16 @@
class PageTagAssociation ( ItemTagAssociation ):
pass
-
-class WorkflowTagAssociation ( ItemTagAssociation ):
- pass
-
+
class WorkflowStepTagAssociation ( ItemTagAssociation ):
pass
class StoredWorkflowTagAssociation ( ItemTagAssociation ):
pass
+class VisualizationTagAssociation ( ItemTagAssociation ):
+ pass
+
class HistoryAnnotationAssociation( object ):
pass
@@ -1583,6 +1588,9 @@
class PageAnnotationAssociation( object ):
pass
+class VisualizationAnnotationAssociation( object ):
+ pass
+
class UserPreference ( object ):
def __init__( self, name=None, value=None ):
self.name = name
diff -r 582fd1777763 -r c3eccab29814 lib/galaxy/model/mapping.py
--- a/lib/galaxy/model/mapping.py Thu Mar 11 15:54:13 2010 -0500
+++ b/lib/galaxy/model/mapping.py Fri Mar 12 09:37:22 2010 -0500
@@ -80,7 +80,7 @@
Column( "genome_build", TrimmedString( 40 ) ),
Column( "importable", Boolean, default=False ),
Column( "slug", TEXT, index=True ),
- Column( "published", Boolean, index=True ) )
+ Column( "published", Boolean, index=True, default=False ) )
HistoryUserShareAssociation.table = Table( "history_user_share_association", metadata,
Column( "id", Integer, primary_key=True ),
@@ -521,7 +521,7 @@
Column( "deleted", Boolean, default=False ),
Column( "importable", Boolean, default=False ),
Column( "slug", TEXT, index=True ),
- Column( "published", Boolean, index=True )
+ Column( "published", Boolean, index=True, default=False )
)
Workflow.table = Table( "workflow", metadata,
@@ -721,7 +721,11 @@
Column( "latest_revision_id", Integer,
ForeignKey( "visualization_revision.id", use_alter=True, name='visualization_latest_revision_id_fk' ), index=True ),
Column( "title", TEXT ),
- Column( "type", TEXT )
+ Column( "type", TEXT ),
+ Column( "deleted", Boolean, default=False, index=True ),
+ Column( "importable", Boolean, default=False, index=True ),
+ Column( "slug", TEXT, index=True ),
+ Column( "published", Boolean, default=False, index=True )
)
VisualizationRevision.table = Table( "visualization_revision", metadata,
@@ -733,6 +737,12 @@
Column( "config", JSONType )
)
+VisualizationUserShareAssociation.table = Table( "visualization_user_share_association", metadata,
+ Column( "id", Integer, primary_key=True ),
+ Column( "visualization_id", Integer, ForeignKey( "visualization.id" ), index=True ),
+ Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True )
+ )
+
# Tagging tables.
Tag.table = Table( "tag", metadata,
@@ -768,16 +778,7 @@
Column( "user_tname", TrimmedString(255), index=True),
Column( "value", TrimmedString(255), index=True),
Column( "user_value", TrimmedString(255), index=True) )
-
-WorkflowTagAssociation.table = Table( "workflow_tag_association", metadata,
- Column( "id", Integer, primary_key=True ),
- Column( "workflow_id", Integer, ForeignKey( "workflow.id" ), index=True ),
- Column( "tag_id", Integer, ForeignKey( "tag.id" ), index=True ),
- Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
- Column( "user_tname", Unicode(255), index=True),
- Column( "value", Unicode(255), index=True),
- Column( "user_value", Unicode(255), index=True) )
-
+
StoredWorkflowTagAssociation.table = Table( "stored_workflow_tag_association", metadata,
Column( "id", Integer, primary_key=True ),
Column( "stored_workflow_id", Integer, ForeignKey( "stored_workflow.id" ), index=True ),
@@ -805,6 +806,15 @@
Column( "value", Unicode(255), index=True),
Column( "user_value", Unicode(255), index=True) )
+VisualizationTagAssociation.table = Table( "visualization_tag_association", metadata,
+ Column( "id", Integer, primary_key=True ),
+ Column( "visualization_id", Integer, ForeignKey( "visualization.id" ), index=True ),
+ Column( "tag_id", Integer, ForeignKey( "tag.id" ), index=True ),
+ Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
+ Column( "user_tname", TrimmedString(255), index=True),
+ Column( "value", TrimmedString(255), index=True),
+ Column( "user_value", TrimmedString(255), index=True) )
+
# Annotation tables.
HistoryAnnotationAssociation.table = Table( "history_annotation_association", metadata,
@@ -836,6 +846,12 @@
Column( "page_id", Integer, ForeignKey( "page.id" ), index=True ),
Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
Column( "annotation", TEXT, index=True) )
+
+VisualizationAnnotationAssociation.table = Table( "visualization_annotation_association", metadata,
+ Column( "id", Integer, primary_key=True ),
+ Column( "visualization_id", Integer, ForeignKey( "visualization.id" ), index=True ),
+ Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
+ Column( "annotation", TEXT, index=True) )
# User tables.
@@ -1271,8 +1287,7 @@
properties=dict( steps=relation( WorkflowStep, backref='workflow',
order_by=asc(WorkflowStep.table.c.order_index),
cascade="all, delete-orphan",
- lazy=False ),
- tags=relation(WorkflowTagAssociation, order_by=WorkflowTagAssociation.table.c.id, backref="workflows")
+ lazy=False )
) )
assign_mapper( context, WorkflowStep, WorkflowStep.table,
@@ -1359,8 +1374,20 @@
primaryjoin=( Visualization.table.c.id == VisualizationRevision.table.c.visualization_id ) ),
latest_revision=relation( VisualizationRevision, post_update=True,
primaryjoin=( Visualization.table.c.latest_revision_id == VisualizationRevision.table.c.id ),
- lazy=False )
+ lazy=False ),
+ tags=relation( VisualizationTagAssociation, order_by=VisualizationTagAssociation.table.c.id, backref="visualizations" ),
+ annotations=relation( VisualizationAnnotationAssociation, order_by=VisualizationAnnotationAssociation.table.c.id, backref="visualizations" )
) )
+
+# Set up proxy so that
+# Visualization.users_shared_with_dot_users
+# returns a list of User objects for users that a visualization is shared with.
+Visualization.users_shared_with_dot_users = association_proxy( 'users_shared_with', 'user' )
+
+assign_mapper( context, VisualizationUserShareAssociation, VisualizationUserShareAssociation.table,
+ properties=dict( user=relation( User, backref='visualizations_shared_by_others' ),
+ visualization=relation( Visualization, backref='users_shared_with' )
+ ) )
assign_mapper( context, Tag, Tag.table,
properties=dict( children=relation(Tag, backref=backref( 'parent', remote_side=[Tag.table.c.id] ) )
@@ -1381,19 +1408,19 @@
assign_mapper( context, PageTagAssociation, PageTagAssociation.table,
properties=dict( tag=relation(Tag, backref="tagged_pages"), user=relation( User ) )
)
-
-assign_mapper( context, WorkflowTagAssociation, WorkflowTagAssociation.table,
- properties=dict( tag=relation(Tag, backref="tagged_workflows"), user=relation( User ) )
- )
assign_mapper( context, StoredWorkflowTagAssociation, StoredWorkflowTagAssociation.table,
- properties=dict( tag=relation(Tag, backref="tagged_stored_workflows"), user=relation( User ) )
+ properties=dict( tag=relation(Tag, backref="tagged_workflows"), user=relation( User ) )
)
assign_mapper( context, WorkflowStepTagAssociation, WorkflowStepTagAssociation.table,
properties=dict( tag=relation(Tag, backref="tagged_workflow_steps"), user=relation( User ) )
)
+assign_mapper( context, VisualizationTagAssociation, VisualizationTagAssociation.table,
+ properties=dict( tag=relation(Tag, backref="tagged_visualizations"), user=relation( User ) )
+ )
+
assign_mapper( context, HistoryAnnotationAssociation, HistoryAnnotationAssociation.table,
properties=dict( history=relation( History ), user=relation( User ) )
)
@@ -1414,6 +1441,10 @@
properties=dict( page=relation( Page ), user=relation( User ) )
)
+assign_mapper( context, VisualizationAnnotationAssociation, VisualizationAnnotationAssociation.table,
+ properties=dict( visualization=relation( Visualization ), user=relation( User ) )
+ )
+
assign_mapper( context, UserPreference, UserPreference.table,
properties = {}
)
diff -r 582fd1777763 -r c3eccab29814 lib/galaxy/model/migrate/versions/0043_visualization_sharing_tagging_annotating.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/lib/galaxy/model/migrate/versions/0043_visualization_sharing_tagging_annotating.py Fri Mar 12 09:37:22 2010 -0500
@@ -0,0 +1,220 @@
+"""
+Migration script to create tables and columns for sharing visualizations.
+"""
+
+from sqlalchemy import *
+from sqlalchemy.orm import *
+from migrate import *
+from migrate.changeset import *
+
+import logging
+log = logging.getLogger( __name__ )
+
+metadata = MetaData( migrate_engine )
+db_session = scoped_session( sessionmaker( bind=migrate_engine, autoflush=False, autocommit=True ) )
+
+# Sharing visualizations.
+
+VisualizationUserShareAssociation_table = Table( "visualization_user_share_association", metadata,
+ Column( "id", Integer, primary_key=True ),
+ Column( "visualization_id", Integer, ForeignKey( "visualization.id" ), index=True ),
+ Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True )
+ )
+
+# Tagging visualizations.
+
+VisualizationTagAssociation_table = Table( "visualization_tag_association", metadata,
+ Column( "id", Integer, primary_key=True ),
+ Column( "visualization_id", Integer, ForeignKey( "visualization.id" ), index=True ),
+ Column( "tag_id", Integer, ForeignKey( "tag.id" ), index=True ),
+ Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
+ Column( "user_tname", Unicode(255), index=True),
+ Column( "value", Unicode(255), index=True),
+ Column( "user_value", Unicode(255), index=True) )
+
+# Annotating visualizations.
+
+VisualizationAnnotationAssociation_table = Table( "visualization_annotation_association", metadata,
+ Column( "id", Integer, primary_key=True ),
+ Column( "visualization_id", Integer, ForeignKey( "visualization.id" ), index=True ),
+ Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
+ Column( "annotation", TEXT, index=False ) )
+
+Visualiation_table = Table( "visualization", metadata, autoload=True )
+
+def upgrade():
+ print __doc__
+ metadata.reflect()
+
+ # Create visualization_user_share_association table.
+ try:
+ VisualizationUserShareAssociation_table.create()
+ except Exception, e:
+ print "Creating visualization_user_share_association table failed: %s" % str( e )
+ log.debug( "Creating visualization_user_share_association table failed: %s" % str( e ) )
+
+ # Get default boolean value 'false' so that columns can be initialized.
+ if migrate_engine.name == 'mysql':
+ default_false = "0"
+ elif migrate_engine.name == 'sqlite':
+ default_false = "'false'"
+ elif migrate_engine.name == 'postgres':
+ default_false = "false"
+
+ # Add columns & create indices for supporting sharing to visualization table.
+ deleted_column = Column( "deleted", Boolean, default=False, index=True )
+ importable_column = Column( "importable", Boolean, default=False, index=True )
+ slug_column = Column( "slug", TEXT, index=True )
+ published_column = Column( "published", Boolean, index=True )
+
+ try:
+ # Add column.
+ deleted_column.create( Visualiation_table )
+ assert deleted_column is Visualiation_table.c.deleted
+
+ # Fill column with default value.
+ cmd = "UPDATE visualization SET deleted = %s" % default_false
+ db_session.execute( cmd )
+ except Exception, e:
+ print "Adding deleted column to visualization table failed: %s" % str( e )
+ log.debug( "Adding deleted column to visualization table failed: %s" % str( e ) )
+
+ try:
+ i = Index( "ix_visualization_deleted", Visualiation_table.c.deleted )
+ i.create()
+ except Exception, e:
+ print "Adding index 'ix_visualization_deleted' failed: %s" % str( e )
+ log.debug( "Adding index 'ix_visualization_deleted' failed: %s" % str( e ) )
+
+ try:
+ # Add column.
+ importable_column.create( Visualiation_table )
+ assert importable_column is Visualiation_table.c.importable
+
+ # Fill column with default value.
+ cmd = "UPDATE visualization SET importable = %s" % default_false
+ db_session.execute( cmd )
+ except Exception, e:
+ print "Adding importable column to visualization table failed: %s" % str( e )
+ log.debug( "Adding importable column to visualization table failed: %s" % str( e ) )
+
+ i = Index( "ix_visualization_importable", Visualiation_table.c.importable )
+ try:
+ i.create()
+ except Exception, e:
+ print "Adding index 'ix_visualization_importable' failed: %s" % str( e )
+ log.debug( "Adding index 'ix_visualization_importable' failed: %s" % str( e ) )
+
+ try:
+ slug_column.create( Visualiation_table )
+ assert slug_column is Visualiation_table.c.slug
+ except Exception, e:
+ print "Adding slug column to visualization table failed: %s" % str( e )
+ log.debug( "Adding slug column to visualization table failed: %s" % str( e ) )
+
+ try:
+ if migrate_engine.name == 'mysql':
+ # Have to create index manually.
+ cmd = "CREATE INDEX ix_visualization_slug ON visualization ( slug ( 100 ) )"
+ db_session.execute( cmd )
+ else:
+ i = Index( "ix_visualization_slug", Visualiation_table.c.slug )
+ i.create()
+ except Exception, e:
+ print "Adding index 'ix_visualization_slug' failed: %s" % str( e )
+ log.debug( "Adding index 'ix_visualization_slug' failed: %s" % str( e ) )
+
+ try:
+ # Add column.
+ published_column.create( Visualiation_table )
+ assert published_column is Visualiation_table.c.published
+
+ # Fill column with default value.
+ cmd = "UPDATE visualization SET published = %s" % default_false
+ db_session.execute( cmd )
+ except Exception, e:
+ print "Adding published column to visualization table failed: %s" % str( e )
+ log.debug( "Adding published column to visualization table failed: %s" % str( e ) )
+
+ i = Index( "ix_visualization_published", Visualiation_table.c.published )
+ try:
+ i.create()
+ except Exception, e:
+ print "Adding index 'ix_visualization_published' failed: %s" % str( e )
+ log.debug( "Adding index 'ix_visualization_published' failed: %s" % str( e ) )
+
+ # Create visualization_tag_association table.
+ try:
+ VisualizationTagAssociation_table.create()
+ except Exception, e:
+ print str(e)
+ log.debug( "Creating visualization_tag_association table failed: %s" % str( e ) )
+
+ # Create visualization_annotation_association table.
+ try:
+ VisualizationAnnotationAssociation_table.create()
+ except Exception, e:
+ print str(e)
+ log.debug( "Creating visualization_annotation_association table failed: %s" % str( e ) )
+
+ # Need to create index for visualization annotation manually to deal with errors.
+ try:
+ if migrate_engine.name == 'mysql':
+ # Have to create index manually.
+ cmd = "CREATE INDEX ix_visualization_annotation_association_annotation ON visualization_annotation_association ( annotation ( 100 ) )"
+ db_session.execute( cmd )
+ else:
+ i = Index( "ix_visualization_annotation_association_annotation", VisualizationAnnotationAssociation_table.c.annotation )
+ i.create()
+ except Exception, e:
+ print "Adding index 'ix_visualization_annotation_association_annotation' failed: %s" % str( e )
+ log.debug( "Adding index 'ix_visualization_annotation_association_annotation' failed: %s" % str( e ) )
+
+def downgrade():
+ metadata.reflect()
+
+ # Drop visualization_user_share_association table.
+ try:
+ VisualizationUserShareAssociation_table.drop()
+ except Exception, e:
+ print str(e)
+ log.debug( "Dropping visualization_user_share_association table failed: %s" % str( e ) )
+
+ # Drop columns for supporting sharing from visualization table.
+ try:
+ Visualiation_table.c.deleted.drop()
+ except Exception, e:
+ print "Dropping deleted column from visualization table failed: %s" % str( e )
+ log.debug( "Dropping deleted column from visualization table failed: %s" % str( e ) )
+
+ try:
+ Visualiation_table.c.importable.drop()
+ except Exception, e:
+ print "Dropping importable column from visualization table failed: %s" % str( e )
+ log.debug( "Dropping importable column from visualization table failed: %s" % str( e ) )
+
+ try:
+ Visualiation_table.c.slug.drop()
+ except Exception, e:
+ print "Dropping slug column from visualization table failed: %s" % str( e )
+ log.debug( "Dropping slug column from visualization table failed: %s" % str( e ) )
+
+ try:
+ Visualiation_table.c.published.drop()
+ except Exception, e:
+ print "Dropping published column from visualization table failed: %s" % str( e )
+ log.debug( "Dropping published column from visualization table failed: %s" % str( e ) )
+
+ # Drop visualization_tag_association table.
+ try:
+ VisualizationTagAssociation_table.drop()
+ except Exception, e:
+ print str(e)
+ log.debug( "Dropping visualization_tag_association table failed: %s" % str( e ) )
+
+ # Drop visualization_annotation_association table.
+ try:
+ VisualizationAnnotationAssociation_table.drop()
+ except Exception, e:
+ print str(e)
+ log.debug( "Dropping visualization_annotation_association table failed: %s" % str( e ) )
\ No newline at end of file
diff -r 582fd1777763 -r c3eccab29814 lib/galaxy/tags/tag_handler.py
--- a/lib/galaxy/tags/tag_handler.py Thu Mar 11 15:54:13 2010 -0500
+++ b/lib/galaxy/tags/tag_handler.py Fri Mar 12 09:37:22 2010 -0500
@@ -34,6 +34,7 @@
ItemTagAssocInfo( model.HistoryDatasetAssociation, model.HistoryDatasetAssociationTagAssociation, model.HistoryDatasetAssociationTagAssociation.table.c.history_dataset_association_id )
item_tag_assoc_info["Page"] = ItemTagAssocInfo( model.Page, model.PageTagAssociation, model.PageTagAssociation.table.c.page_id )
item_tag_assoc_info["StoredWorkflow"] = ItemTagAssocInfo( model.StoredWorkflow, model.StoredWorkflowTagAssociation, model.StoredWorkflowTagAssociation.table.c.stored_workflow_id )
+ item_tag_assoc_info["Visualization"] = ItemTagAssocInfo( model.Visualization, model.VisualizationTagAssociation, model.VisualizationTagAssociation.table.c.visualization_id )
def get_tag_assoc_class(self, item_class):
""" Returns tag association class for item class. """
diff -r 582fd1777763 -r c3eccab29814 lib/galaxy/web/base/controller.py
--- a/lib/galaxy/web/base/controller.py Thu Mar 11 15:54:13 2010 -0500
+++ b/lib/galaxy/web/base/controller.py Fri Mar 12 09:37:22 2010 -0500
@@ -16,6 +16,9 @@
# States for passing messages
SUCCESS, INFO, WARNING, ERROR = "done", "info", "warning", "error"
+
+# RE that tests for valid slug.
+VALID_SLUG_RE = re.compile( "^[a-z0-9\-]+$" )
class BaseController( object ):
"""
@@ -40,6 +43,8 @@
item_class = model.Page
elif class_name == 'StoredWorkflow':
item_class = model.StoredWorkflow
+ elif class_name == 'Visualization':
+ item_class = model.Visualization
else:
item_class = None
return item_class
@@ -76,6 +81,8 @@
annotation_assoc = annotation_assoc.filter_by( workflow_step=item )
elif item.__class__ == model.Page:
annotation_assoc = annotation_assoc.filter_by( page=item )
+ elif item.__class__ == model.Visualization:
+ annotation_assoc = annotation_assoc.filter_by( visualization=item )
return annotation_assoc.first()
def add_item_annotation( self, trans, item, annotation ):
@@ -153,6 +160,19 @@
truncated = False
return truncated, dataset_data
+class UsesVisualization( SharableItemSecurity ):
+ """ Mixin for controllers that use Visualization objects. """
+
+ def get_visualization( self, trans, id, check_ownership=True, check_accessible=False ):
+ """ Get a Visualization from the database by id, verifying ownership. """
+ # Load workflow from database
+ id = trans.security.decode_id( id )
+ visualization = trans.sa_session.query( model.Visualization ).get( id )
+ if not visualization:
+ error( "Visualization not found" )
+ else:
+ return self.security_check( trans.get_user(), stored, check_ownership, check_accessible )
+
class UsesStoredWorkflow( SharableItemSecurity ):
""" Mixin for controllers that use StoredWorkflow objects. """
@@ -240,6 +260,12 @@
pass
@web.expose
+ @web.require_login( "share Galaxy items" )
+ def share( self, trans, id=None, email="", **kwd ):
+ """ Handle sharing an item with a particular user. """
+ pass
+
+ @web.expose
def display_by_username_and_slug( self, trans, username, slug ):
""" Display item by username and slug. """
pass
@@ -262,13 +288,18 @@
def _make_item_accessible( self, sa_session, item ):
""" Makes item accessible--viewable and importable--and sets item's slug. Does not flush/commit changes, however. Item must have name, user, importable, and slug attributes. """
item.importable = True
- self.set_item_slug( sa_session, item )
+ self.create_item_slug( sa_session, item )
- def set_item_slug( self, sa_session, item ):
- """ Set item slug. Slug is unique among user's importable items for item's class. Returns true if item's slug was set; false otherwise. """
+ def create_item_slug( self, sa_session, item ):
+ """ Create item slug. Slug is unique among user's importable items for item's class. Returns true if item's slug was set; false otherwise. """
if item.slug is None or item.slug == "":
+ # Item can have either a name or a title.
+ if hasattr( item, 'name' ):
+ item_name = item.name
+ elif hasattr( item, 'title' ):
+ item_name = item.title
# Replace whitespace with '-'
- slug_base = re.sub( "\s+", "-", item.name.lower() )
+ slug_base = re.sub( "\s+", "-", item_name.lower() )
# Remove all non-alphanumeric characters.
slug_base = re.sub( "[^a-zA-Z0-9\-]", "", slug_base )
# Remove trailing '-'.
diff -r 582fd1777763 -r c3eccab29814 lib/galaxy/web/buildapp.py
--- a/lib/galaxy/web/buildapp.py Thu Mar 11 15:54:13 2010 -0500
+++ b/lib/galaxy/web/buildapp.py Fri Mar 12 09:37:22 2010 -0500
@@ -79,6 +79,7 @@
webapp.add_route( '/u/:username/p/:slug', controller='page', action='display_by_username_and_slug' )
webapp.add_route( '/u/:username/h/:slug', controller='history', action='display_by_username_and_slug' )
webapp.add_route( '/u/:username/w/:slug', controller='workflow', action='display_by_username_and_slug' )
+ webapp.add_route( '/u/:username/v/:slug', controller='visualization', action='display_by_username_and_slug' )
webapp.finalize_config()
# Wrap the webapp in some useful middleware
if kwargs.get( 'middleware', True ):
diff -r 582fd1777763 -r c3eccab29814 lib/galaxy/web/controllers/history.py
--- a/lib/galaxy/web/controllers/history.py Thu Mar 11 15:54:13 2010 -0500
+++ b/lib/galaxy/web/controllers/history.py Fri Mar 12 09:37:22 2010 -0500
@@ -440,7 +440,7 @@
""" Returns history's name and link. """
history = self.get_history( trans, id, False )
- if self.set_item_slug( trans.sa_session, history ):
+ if self.create_item_slug( trans.sa_session, history ):
trans.sa_session.flush()
return_dict = { "name" : history.name, "link" : url_for( action="display_by_username_and_slug", username=history.user.username, slug=history.slug ) }
return return_dict
@@ -652,58 +652,6 @@
session.flush()
return trans.fill_template( "/sharing_base.mako", item=history )
-
- ## TODO: remove this method when history sharing has been verified to work correctly with new sharing() method.
- @web.expose
- @web.require_login( "share histories with other users" )
- def sharing_old( self, trans, histories=[], id=None, **kwd ):
- """Performs sharing of histories among users."""
- # histories looks like: [ historyX, historyY ]
- params = util.Params( kwd )
- msg = util.restore_text ( params.get( 'msg', '' ) )
- if id:
- ids = util.listify( id )
- if ids:
- histories = [ self.get_history( trans, history_id ) for history_id in ids ]
- for history in histories:
- trans.sa_session.add( history )
- if params.get( 'enable_import_via_link', False ):
- self._make_item_accessible( trans.sa_session, history )
- trans.sa_session.flush()
- elif params.get( 'disable_import_via_link', False ):
- history.importable = False
- trans.sa_session.flush()
- elif params.get( 'unshare_user', False ):
- user = trans.sa_session.query( trans.app.model.User ).get( trans.security.decode_id( kwd[ 'unshare_user' ] ) )
- if not user:
- msg = 'History (%s) does not seem to be shared with user (%s)' % ( history.name, user.email )
- return trans.fill_template( 'history/sharing.mako', histories=histories, msg=msg, messagetype='error' )
- husas = trans.sa_session.query( trans.app.model.HistoryUserShareAssociation ).filter_by( user=user, history=history ).all()
- if husas:
- for husa in husas:
- trans.sa_session.delete( husa )
- trans.sa_session.flush()
- histories = []
- # Get all histories that have been shared with others
- husas = trans.sa_session.query( trans.app.model.HistoryUserShareAssociation ) \
- .join( "history" ) \
- .filter( and_( trans.app.model.History.user == trans.user,
- trans.app.model.History.deleted == False ) ) \
- .order_by( trans.app.model.History.table.c.name )
- for husa in husas:
- history = husa.history
- if history not in histories:
- histories.append( history )
- # Get all histories that are importable
- importables = trans.sa_session.query( trans.app.model.History ) \
- .filter_by( user=trans.user, importable=True, deleted=False ) \
- .order_by( trans.app.model.History.table.c.name )
- for importable in importables:
- if importable not in histories:
- histories.append( importable )
- # Sort the list of histories by history.name
- histories.sort( key=operator.attrgetter( 'name') )
- return trans.fill_template( 'history/sharing.mako', histories=histories, msg=msg, messagetype='done' )
@web.expose
@web.require_login( "share histories with other users" )
@@ -975,7 +923,7 @@
share.history = history
share.user = send_to_user
trans.sa_session.add( share )
- self.set_item_slug( trans.sa_session, history )
+ self.create_item_slug( trans.sa_session, history )
trans.sa_session.flush()
if history not in shared_histories:
shared_histories.append( history )
diff -r 582fd1777763 -r c3eccab29814 lib/galaxy/web/controllers/page.py
--- a/lib/galaxy/web/controllers/page.py Thu Mar 11 15:54:13 2010 -0500
+++ b/lib/galaxy/web/controllers/page.py Fri Mar 12 09:37:22 2010 -0500
@@ -4,10 +4,6 @@
from galaxy.util.odict import odict
from galaxy.util.json import from_json_string
-import re
-
-VALID_SLUG_RE = re.compile( "^[a-z0-9\-]+$" )
-
def format_bool( b ):
if b:
return "yes"
@@ -45,8 +41,8 @@
]
operations = [
grids.DisplayByUsernameAndSlugGridOperation( "View", allow_multiple=False ),
+ grids.GridOperation( "Edit content", allow_multiple=False, url_args=dict( action='edit_content') ),
grids.GridOperation( "Edit attributes", allow_multiple=False, url_args=dict( action='edit') ),
- grids.GridOperation( "Edit content", allow_multiple=False, url_args=dict( action='edit_content') ),
grids.GridOperation( "Share or Publish", allow_multiple=False, condition=( lambda item: not item.deleted ), async_compatible=False ),
grids.GridOperation( "Delete", confirm="Are you sure you want to delete this page?" ),
]
@@ -62,7 +58,7 @@
default_sort_key = "-update_time"
default_filter = dict( title="All", username="All" )
columns = [
- grids.PublicURLColumn( "Title", key="title", model_class=model.Page, filterable="advanced"),
+ grids.PublicURLColumn( "Title", key="title", model_class=model.Page, filterable="advanced" ),
grids.OwnerAnnotationColumn( "Annotation", key="annotation", model_class=model.Page, model_annotation_association_class=model.PageAnnotationAssociation, filterable="advanced" ),
grids.OwnerColumn( "Owner", key="username", model_class=model.User, filterable="advanced", sortable=False ),
grids.CommunityTagsColumn( "Community Tags", "tags", model.Page, model.PageTagAssociation, filterable="advanced", grid_name="PageAllPublishedGrid" ),
@@ -356,10 +352,10 @@
template="page/create.mako" )
@web.expose
- @web.require_login( "create pages" )
+ @web.require_login( "edit pages" )
def edit( self, trans, id, page_title="", page_slug="", page_annotation="" ):
"""
- Create a new page
+ Edit a page's attributes.
"""
encoded_id = id
id = trans.security.decode_id( id )
@@ -456,6 +452,7 @@
@web.expose
@web.require_login( "use Galaxy pages" )
def share( self, trans, id, email="" ):
+ """ Handle sharing with an individual user. """
msg = mtype = None
page = trans.sa_session.query( model.Page ).get( trans.security.decode_id( id ) )
if email:
@@ -468,18 +465,18 @@
msg = ( "User '%s' does not exist" % email )
elif other == trans.get_user():
mtype = "error"
- msg = ( "You cannot share a workflow with yourself" )
+ msg = ( "You cannot share a page with yourself" )
elif trans.sa_session.query( model.PageUserShareAssociation ) \
.filter_by( user=other, page=page ).count() > 0:
mtype = "error"
- msg = ( "Workflow already shared with '%s'" % email )
+ msg = ( "Page already shared with '%s'" % email )
else:
share = model.PageUserShareAssociation()
share.page = page
share.user = other
session = trans.sa_session
session.add( share )
- self.set_item_slug( session, page )
+ self.create_item_slug( session, page )
session.flush()
trans.set_message( "Page '%s' shared with user '%s'" % ( page.title, other.email ) )
return trans.response.send_redirect( url_for( controller='page', action='sharing', id=id ) )
@@ -609,7 +606,7 @@
""" Returns page's name and link. """
page = self.get_page( trans, id )
- if self.set_item_slug( trans.sa_session, page ):
+ if self.create_item_slug( trans.sa_session, page ):
trans.sa_session.flush()
return_dict = { "name" : page.title, "link" : url_for( action="display_by_username_and_slug", username=page.user.username, slug=page.slug ) }
return return_dict
diff -r 582fd1777763 -r c3eccab29814 lib/galaxy/web/controllers/tracks.py
--- a/lib/galaxy/web/controllers/tracks.py Thu Mar 11 15:54:13 2010 -0500
+++ b/lib/galaxy/web/controllers/tracks.py Fri Mar 12 09:37:22 2010 -0500
@@ -262,7 +262,9 @@
@web.json
def save( self, trans, **kwargs ):
session = trans.sa_session
- vis_id = kwargs['vis_id'].strip('"')
+ vis_id = "undefined"
+ if 'vis_id' in kwargs:
+ vis_id = kwargs['vis_id'].strip('"')
dbkey = kwargs['dbkey']
if vis_id == "undefined": # new vis
diff -r 582fd1777763 -r c3eccab29814 lib/galaxy/web/controllers/visualization.py
--- a/lib/galaxy/web/controllers/visualization.py Thu Mar 11 15:54:13 2010 -0500
+++ b/lib/galaxy/web/controllers/visualization.py Fri Mar 12 09:37:22 2010 -0500
@@ -1,35 +1,325 @@
from galaxy.web.base.controller import *
-from galaxy.web.framework.helpers import time_ago, grids
+from galaxy.web.framework.helpers import time_ago, grids, iff
from galaxy.util.sanitize_html import sanitize_html
class VisualizationListGrid( grids.Grid ):
# Grid definition
- title = "Visualizations"
+ title = "Saved Visualizations"
model_class = model.Visualization
default_sort_key = "-update_time"
+ default_filter = dict( title="All", deleted="False", tags="All", sharing="All" )
columns = [
- grids.GridColumn( "Title", key="title", attach_popup=True,
+ grids.TextColumn( "Title", key="title", model_class=model.Visualization, attach_popup=True,
link=( lambda item: dict( controller="tracks", action="browser", id=item.id ) ) ),
- grids.GridColumn( "Type", key="type" ),
+ grids.TextColumn( "Type", key="type", model_class=model.Visualization ),
+ grids.IndividualTagsColumn( "Tags", "tags", model.Visualization, model.VisualizationTagAssociation, filterable="advanced", grid_name="VisualizationListGrid" ),
+ grids.SharingStatusColumn( "Sharing", key="sharing", model_class=model.Visualization, filterable="advanced", sortable=False ),
grids.GridColumn( "Created", key="create_time", format=time_ago ),
grids.GridColumn( "Last Updated", key="update_time", format=time_ago ),
- ]
- ## global_actions = [
- ## grids.GridAction( "Add new page", dict( action='create' ) )
- ## ]
+ ]
+ columns.append(
+ grids.MulticolFilterColumn(
+ "Search",
+ cols_to_filter=[ columns[0], columns[2] ],
+ key="free-text-search", visible=False, filterable="standard" )
+ )
operations = [
- grids.GridOperation( "View", allow_multiple=False, url_args=dict( controller="tracks", action='browser' ) ),
+ grids.GridOperation( "Edit content", allow_multiple=False, url_args=dict( controller='tracks', action='browser' ) ),
+ grids.GridOperation( "Edit attributes", allow_multiple=False, url_args=dict( action='edit') ),
+ grids.GridOperation( "Share or Publish", allow_multiple=False, condition=( lambda item: not item.deleted ), async_compatible=False ),
+ grids.GridOperation( "Delete", condition=( lambda item: not item.deleted ), async_compatible=True, confirm="Are you sure you want to delete this visualization?" ),
]
def apply_default_filter( self, trans, query, **kwargs ):
- return query.filter_by( user=trans.user )
+ return query.filter_by( user=trans.user, deleted=False )
+
+class VisualizationAllPublishedGrid( grids.Grid ):
+ # Grid definition
+ use_panels = True
+ use_async = True
+ title = "Published Visualizations"
+ model_class = model.Visualization
+ default_sort_key = "-update_time"
+ default_filter = dict( title="All", username="All" )
+ columns = [
+ grids.PublicURLColumn( "Title", key="title", model_class=model.Visualization, filterable="advanced" ),
+ grids.OwnerAnnotationColumn( "Annotation", key="annotation", model_class=model.Visualization, model_annotation_association_class=model.VisualizationAnnotationAssociation, filterable="advanced" ),
+ grids.OwnerColumn( "Owner", key="username", model_class=model.User, filterable="advanced", sortable=False ),
+ grids.CommunityTagsColumn( "Community Tags", "tags", model.Visualization, model.VisualizationTagAssociation, filterable="advanced", grid_name="VisualizationAllPublishedGrid" ),
+ grids.GridColumn( "Last Updated", key="update_time", format=time_ago )
+ ]
+ columns.append(
+ grids.MulticolFilterColumn(
+ "Search",
+ cols_to_filter=[ columns[0], columns[1], columns[2], columns[3] ],
+ key="free-text-search", visible=False, filterable="standard" )
+ )
+ def build_initial_query( self, session ):
+ # Join so that searching history.user makes sense.
+ return session.query( self.model_class ).join( model.User.table )
+ def apply_default_filter( self, trans, query, **kwargs ):
+ return query.filter( self.model_class.deleted==False ).filter( self.model_class.published==True )
-class VisualizationController( BaseController ):
- list_grid = VisualizationListGrid()
+
+class VisualizationController( BaseController, Sharable, UsesAnnotations, UsesVisualization ):
+ _user_list_grid = VisualizationListGrid()
+ _published_list_grid = VisualizationAllPublishedGrid()
+
@web.expose
- @web.require_login()
- def index( self, trans, *args, **kwargs ):
- return trans.fill_template( "panels.mako", grid=self.list_grid( trans, *args, **kwargs ), active_view='visualization', main_url=url_for( action='list' ) )
+ def list_published( self, trans, *args, **kwargs ):
+ grid = self._published_list_grid( trans, **kwargs )
+ if 'async' in kwargs:
+ return grid
+ else:
+ # Render grid wrapped in panels
+ return trans.fill_template( "visualization/list_published.mako", grid=grid )
+
+ @web.expose
+ @web.require_login("use Galaxy visualizations")
+ def list( self, trans, *args, **kwargs ):
+ # Handle operation
+ if 'operation' in kwargs and 'id' in kwargs:
+ session = trans.sa_session
+ operation = kwargs['operation'].lower()
+ ids = util.listify( kwargs['id'] )
+ for id in ids:
+ item = session.query( model.Visualization ).get( trans.security.decode_id( id ) )
+ if operation == "delete":
+ item.deleted = True
+ if operation == "share or publish":
+ return self.sharing( trans, **kwargs )
+ session.flush()
+
+ # Build list of visualizations shared with user.
+ shared_by_others = trans.sa_session \
+ .query( model.VisualizationUserShareAssociation ) \
+ .filter_by( user=trans.get_user() ) \
+ .join( model.Visualization.table ) \
+ .filter( model.Visualization.deleted == False ) \
+ .order_by( desc( model.Visualization.update_time ) ) \
+ .all()
+
+ return trans.fill_template( "visualization/list.mako", grid=self._user_list_grid( trans, *args, **kwargs ), shared_by_others=shared_by_others )
+
+ @web.expose
+ @web.require_login( "modify Galaxy visualizations" )
+ def set_slug_async( self, trans, id, new_slug ):
+ """ Set item slug asynchronously. """
+ visualization = self.get_visualization( trans, id )
+ if visualization:
+ visualization.slug = new_slug
+ trans.sa_session.flush()
+ return visualization.slug
+
+ @web.expose
+ @web.require_login( "share Galaxy visualizations" )
+ def sharing( self, trans, id, **kwargs ):
+ """ Handle visualization sharing. """
+
+ # Get session and visualization.
+ session = trans.sa_session
+ visualization = trans.sa_session.query( model.Visualization ).get( trans.security.decode_id( id ) )
+
+ # Do operation on visualization.
+ if 'make_accessible_via_link' in kwargs:
+ self._make_item_accessible( trans.sa_session, visualization )
+ elif 'make_accessible_and_publish' in kwargs:
+ self._make_item_accessible( trans.sa_session, visualization )
+ visualization.published = True
+ elif 'publish' in kwargs:
+ visualization.published = True
+ elif 'disable_link_access' in kwargs:
+ visualization.importable = False
+ elif 'unpublish' in kwargs:
+ visualization.published = False
+ elif 'disable_link_access_and_unpublish' in kwargs:
+ visualization.importable = visualization.published = False
+ elif 'unshare_user' in kwargs:
+ user = session.query( model.User ).get( trans.security.decode_id( kwargs['unshare_user' ] ) )
+ if not user:
+ error( "User not found for provided id" )
+ association = session.query( model.VisualizationUserShareAssociation ) \
+ .filter_by( user=user, visualization=visualization ).one()
+ session.delete( association )
+
+ session.flush()
+
+ return trans.fill_template( "/sharing_base.mako", item=visualization )
+
+ @web.expose
+ @web.require_login( "share Galaxy visualizations" )
+ def share( self, trans, id=None, email="", **kwd ):
+ """ Handle sharing a visualization with a particular user. """
+ msg = mtype = None
+ visualization = trans.sa_session.query( model.Visualization ).get( trans.security.decode_id( id ) )
+ if email:
+ other = trans.sa_session.query( model.User ) \
+ .filter( and_( model.User.table.c.email==email,
+ model.User.table.c.deleted==False ) ) \
+ .first()
+ if not other:
+ mtype = "error"
+ msg = ( "User '%s' does not exist" % email )
+ elif other == trans.get_user():
+ mtype = "error"
+ msg = ( "You cannot share a visualization with yourself" )
+ elif trans.sa_session.query( model.VisualizationUserShareAssociation ) \
+ .filter_by( user=other, visualization=visualization ).count() > 0:
+ mtype = "error"
+ msg = ( "Visualization already shared with '%s'" % email )
+ else:
+ share = model.VisualizationUserShareAssociation()
+ share.visualization = visualization
+ share.user = other
+ session = trans.sa_session
+ session.add( share )
+ self.create_item_slug( session, visualization )
+ session.flush()
+ trans.set_message( "Visualization '%s' shared with user '%s'" % ( visualization.title, other.email ) )
+ return trans.response.send_redirect( url_for( action='sharing', id=id ) )
+ return trans.fill_template( "/share_base.mako",
+ message = msg,
+ messagetype = mtype,
+ item=visualization,
+ email=email )
+
+
+ @web.expose
+ def display_by_username_and_slug( self, trans, username, slug ):
+ """ Display visualization based on a username and slug. """
+
+ # Get visualization.
+ session = trans.sa_session
+ user = session.query( model.User ).filter_by( username=username ).first()
+ visualization = trans.sa_session.query( model.Visualization ).filter_by( user=user, slug=slug, deleted=False ).first()
+ if visualization is None:
+ raise web.httpexceptions.HTTPNotFound()
+ # Security check raises error if user cannot access visualization.
+ self.security_check( trans.get_user(), visualization, False, True)
+ return trans.fill_template_mako( "visualization/display.mako", item=visualization, item_data=None, content_only=True )
+
+ @web.expose
+ @web.json
+ @web.require_login( "get item name and link" )
+ def get_name_and_link_async( self, trans, id=None ):
+ """ Returns visualization's name and link. """
+ visualization = self.get_visualization( trans, id )
+
+ if self.create_item_slug( trans.sa_session, visualization ):
+ trans.sa_session.flush()
+ return_dict = { "name" : visualization.title, "link" : url_for( action="display_by_username_and_slug", username=visualization.user.username, slug=visualization.slug ) }
+ return return_dict
+
+ @web.expose
+ @web.require_login("get item content asynchronously")
+ def get_item_content_async( self, trans, id ):
+ """ Returns item content in HTML format. """
+ pass
+
+ @web.expose
+ @web.require_login( "create visualizations" )
+ def create( self, trans, visualization_title="", visualization_slug="", visualization_annotation="" ):
+ """
+ Create a new visualization
+ """
+ user = trans.get_user()
+ visualization_title_err = visualization_slug_err = visualization_annotation_err = ""
+ if trans.request.method == "POST":
+ if not visualization_title:
+ visualization_title_err = "visualization name is required"
+ elif not visualization_slug:
+ visualization_slug_err = "visualization id is required"
+ elif not VALID_SLUG_RE.match( visualization_slug ):
+ visualization_slug_err = "visualization identifier must consist of only lowercase letters, numbers, and the '-' character"
+ elif trans.sa_session.query( model.Visualization ).filter_by( user=user, slug=visualization_slug, deleted=False ).first():
+ visualization_slug_err = "visualization id must be unique"
+ else:
+ # Create the new stored visualization
+ visualization = model.Visualization()
+ visualization.title = visualization_title
+ visualization.slug = visualization_slug
+ visualization_annotation = sanitize_html( visualization_annotation, 'utf-8', 'text/html' )
+ self.add_item_annotation( trans, visualization, visualization_annotation )
+ visualization.user = user
+ # And the first (empty) visualization revision
+ visualization_revision = model.VisualizationRevision()
+ visualization_revision.title = visualization_title
+ visualization_revision.visualization = visualization
+ visualization.latest_revision = visualization_revision
+ visualization_revision.content = ""
+ # Persist
+ session = trans.sa_session
+ session.add( visualization )
+ session.flush()
+ # Display the management visualization
+ ## trans.set_message( "Visualization '%s' created" % visualization.title )
+ return trans.response.send_redirect( web.url_for( action='list' ) )
+ return trans.show_form(
+ web.FormBuilder( web.url_for(), "Create new visualization", submit_text="Submit" )
+ .add_text( "visualization_title", "Visualization title", value=visualization_title, error=visualization_title_err )
+ .add_text( "visualization_slug", "Visualization identifier", value=visualization_slug, error=visualization_slug_err,
+ help="""A unique identifier that will be used for
+ public links to this visualization. A default is generated
+ from the visualization title, but can be edited. This field
+ must contain only lowercase letters, numbers, and
+ the '-' character.""" )
+ .add_text( "visualization_annotation", "Visualization annotation", value=visualization_annotation, error=visualization_annotation_err,
+ help="A description of the visualization; annotation is shown alongside published visualizations."),
+ template="visualization/create.mako" )
+
+ @web.expose
+ @web.require_login( "edit visualizations" )
+ def edit( self, trans, id, visualization_title="", visualization_slug="", visualization_annotation="" ):
+ """
+ Edit a visualization's attributes.
+ """
+ encoded_id = id
+ id = trans.security.decode_id( id )
+ session = trans.sa_session
+ visualization = session.query( model.Visualization ).get( id )
+ user = trans.user
+ assert visualization.user == user
+ visualization_title_err = visualization_slug_err = visualization_annotation_err = ""
+ if trans.request.method == "POST":
+ if not visualization_title:
+ visualization_title_err = "Visualization name is required"
+ elif not visualization_slug:
+ visualization_slug_err = "Visualization id is required"
+ elif not VALID_SLUG_RE.match( visualization_slug ):
+ visualization_slug_err = "Visualization identifier must consist of only lowercase letters, numbers, and the '-' character"
+ elif visualization_slug != visualization.slug and trans.sa_session.query( model.Visualization ).filter_by( user=user, slug=visualization_slug, deleted=False ).first():
+ visualization_slug_err = "Visualization id must be unique"
+ elif not visualization_annotation:
+ visualization_annotation_err = "Visualization annotation is required"
+ else:
+ visualization.title = visualization_title
+ visualization.slug = visualization_slug
+ visualization_annotation = sanitize_html( visualization_annotation, 'utf-8', 'text/html' )
+ self.add_item_annotation( trans, visualization, visualization_annotation )
+ session.flush()
+ # Redirect to visualization list.
+ return trans.response.send_redirect( web.url_for( action='list' ) )
+ else:
+ visualization_title = visualization.title
+ # Create slug if it's not already set.
+ if visualization.slug is None:
+ self.create_item_slug( trans.sa_session, visualization )
+ visualization_slug = visualization.slug
+ visualization_annotation = self.get_item_annotation_str( trans.sa_session, trans.get_user(), visualization )
+ if not visualization_annotation:
+ visualization_annotation = ""
+ return trans.show_form(
+ web.FormBuilder( web.url_for( id=encoded_id ), "Edit visualization attributes", submit_text="Submit" )
+ .add_text( "visualization_title", "Visualization title", value=visualization_title, error=visualization_title_err )
+ .add_text( "visualization_slug", "Visualization identifier", value=visualization_slug, error=visualization_slug_err,
+ help="""A unique identifier that will be used for
+ public links to this visualization. A default is generated
+ from the visualization title, but can be edited. This field
+ must contain only lowercase letters, numbers, and
+ the '-' character.""" )
+ .add_text( "visualization_annotation", "Visualization annotation", value=visualization_annotation, error=visualization_annotation_err,
+ help="A description of the visualization; annotation is shown alongside published visualizations."),
+ template="visualization/create.mako" )
# @web.expose
# @web.require_login()
@@ -42,6 +332,6 @@
# # Build grid
# grid = self.list( trans, *args, **kwargs )
# # Render grid wrapped in panels
- # return trans.fill_template( "page/index.mako", grid=grid )
+ # return trans.fill_template( "visualization/index.mako", grid=grid )
\ No newline at end of file
diff -r 582fd1777763 -r c3eccab29814 lib/galaxy/web/controllers/workflow.py
--- a/lib/galaxy/web/controllers/workflow.py Thu Mar 11 15:54:13 2010 -0500
+++ b/lib/galaxy/web/controllers/workflow.py Fri Mar 12 09:37:22 2010 -0500
@@ -123,7 +123,7 @@
# Legacy issue: all shared workflows must have slugs.
slug_set = False
for workflow_assoc in shared_by_others:
- slug_set = self.set_item_slug( trans.sa_session, workflow_assoc.stored_workflow )
+ slug_set = self.create_item_slug( trans.sa_session, workflow_assoc.stored_workflow )
if slug_set:
trans.sa_session.flush()
@@ -224,7 +224,7 @@
share.user = other
session = trans.sa_session
session.add( share )
- self.set_item_slug( session, stored )
+ self.create_item_slug( session, stored )
session.flush()
trans.set_message( "Workflow '%s' shared with user '%s'" % ( stored.name, other.email ) )
return trans.response.send_redirect( url_for( controller='workflow', action='sharing', id=id ) )
@@ -401,7 +401,7 @@
""" Returns workflow's name and link. """
stored = self.get_stored_workflow( trans, id )
- if self.set_item_slug( trans.sa_session, stored ):
+ if self.create_item_slug( trans.sa_session, stored ):
trans.sa_session.flush()
return_dict = { "name" : stored.name, "link" : url_for( action="display_by_username_and_slug", username=stored.user.username, slug=stored.slug ) }
return return_dict
diff -r 582fd1777763 -r c3eccab29814 templates/base_panels.mako
--- a/templates/base_panels.mako Thu Mar 11 15:54:13 2010 -0500
+++ b/templates/base_panels.mako Fri Mar 12 09:37:22 2010 -0500
@@ -207,7 +207,7 @@
<ul>
<li><a href="${h.url_for( controller='/tracks', action='index' )}">Build track browser</a></li>
<li><hr style="color: inherit; background-color: gray"/></li>
- <li><a href="${h.url_for( controller='/visualization', action='index' )}">Stored visualizations</a></li>
+ <li><a href="${h.url_for( controller='/visualization', action='list' )}">Stored visualizations</a></li>
</ul>
</div>
</td>
diff -r 582fd1777763 -r c3eccab29814 templates/display_common.mako
--- a/templates/display_common.mako Thu Mar 11 15:54:13 2010 -0500
+++ b/templates/display_common.mako Fri Mar 12 09:37:22 2010 -0500
@@ -18,8 +18,11 @@
<%def name="get_item_name( item )">
<%
+ # Start with exceptions, end with default.
if type( item ) is model.Page:
return item.title
+ elif type( item ) is model.Visualization:
+ return item.title
if hasattr( item, 'get_display_name'):
return item.get_display_name()
return item.name
@@ -29,7 +32,7 @@
## Get plural display name for a class.
<%def name="get_class_plural_display_name( a_class )">
<%
- ## Start with exceptions, end with default.
+ # Start with exceptions, end with default.
if a_class is model.History:
return "Histories"
elif a_class is model.FormDefinitionCurrent:
@@ -89,6 +92,8 @@
return "dataset"
elif isinstance( item, model.Page ):
return "page"
+ elif isinstance( item, model.Visualization ):
+ return "visualization"
%>
</%def>
diff -r 582fd1777763 -r c3eccab29814 templates/page/create.mako
--- a/templates/page/create.mako Thu Mar 11 15:54:13 2010 -0500
+++ b/templates/page/create.mako Fri Mar 12 09:37:22 2010 -0500
@@ -8,8 +8,7 @@
var page_slug = $("input[name=page_slug]");
page_name.keyup(function(){
page_slug.val( $(this).val().replace(/\s+/g,'-').replace(/[^a-zA-Z0-9\-]/g,'').toLowerCase() )
- });
-
+ });
})
</script>
</%def>
\ No newline at end of file
diff -r 582fd1777763 -r c3eccab29814 templates/panels.mako
--- a/templates/panels.mako Thu Mar 11 15:54:13 2010 -0500
+++ b/templates/panels.mako Fri Mar 12 09:37:22 2010 -0500
@@ -10,11 +10,9 @@
</%def>
<%def name="center_panel()">
-
<div style="overflow: auto; height: 100%;">
<div style="padding: 10px">
${grid}
</div>
</div>
-
</%def>
diff -r 582fd1777763 -r c3eccab29814 templates/visualization/create.mako
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/templates/visualization/create.mako Fri Mar 12 09:37:22 2010 -0500
@@ -0,0 +1,14 @@
+<%inherit file="/form.mako"/>
+
+<%def name="javascripts()">
+${parent.javascripts()}
+<script type="text/javascript">
+$(function(){
+ var visualization_name = $("input[name=visualization_title]");
+ var visualization_slug = $("input[name=visualization_slug]");
+ visualization_name.keyup(function(){
+ visualization_slug.val( $(this).val().replace(/\s+/g,'-').replace(/[^a-zA-Z0-9\-]/g,'').toLowerCase() )
+ });
+})
+</script>
+</%def>
\ No newline at end of file
diff -r 582fd1777763 -r c3eccab29814 templates/visualization/display.mako
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/templates/visualization/display.mako Fri Mar 12 09:37:22 2010 -0500
@@ -0,0 +1,19 @@
+<%inherit file="/display_base.mako"/>
+
+<%def name="javascripts()">
+ ${parent.javascripts()}
+ ## Need visualization JS.
+</%def>
+
+<%def name="stylesheets()">
+ ${parent.stylesheets()}
+ ## Need visualization CSS.
+</%def>
+
+<%def name="render_item_links( visualization )">
+ ## TODO
+</%def>
+
+<%def name="render_item( visualization, visualization_data )">
+ ## TODO
+</%def>
\ No newline at end of file
diff -r 582fd1777763 -r c3eccab29814 templates/visualization/list.mako
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/templates/visualization/list.mako Fri Mar 12 09:37:22 2010 -0500
@@ -0,0 +1,52 @@
+<%inherit file="/base_panels.mako"/>
+
+<%def name="init()">
+<%
+ self.has_left_panel=False
+ self.has_right_panel=False
+ self.active_view="visualization"
+ self.message_box_visible=False
+%>
+</%def>
+
+<%def name="center_panel()">
+
+ <div style="overflow: auto; height: 100%;">
+ <div class="page-container" style="padding: 10px;">
+ ${grid}
+
+ <br><br>
+ <h2>Visualizations shared with you by others</h2>
+
+ %if shared_by_others:
+ <table class="colored" border="0" cellspacing="0" cellpadding="0" width="100%">
+ <tr class="header">
+ <th>Title</th>
+ <th>Owner</th>
+ <th></th>
+ </tr>
+ %for i, association in enumerate( shared_by_others ):
+ <% visualization = association.visualization %>
+ <tr>
+ <td>
+ <a class="menubutton" id="shared-${i}-popup" href="${h.url_for( action='display_by_username_and_slug', username=visualization.user.username, slug=visualization.slug)}">${visualization.title}</a>
+ </td>
+ <td>${visualization.user.username}</td>
+ <td>
+ <div popupmenu="shared-${i}-popup">
+ <a class="action-button" href="${h.url_for( action='display_by_username_and_slug', username=visualization.user.username, slug=visualization.slug)}" target="_top">View</a>
+ </div>
+ </td>
+ </tr>
+ %endfor
+ </table>
+ %else:
+
+ No visualizations have been shared with you.
+
+ %endif
+
+ </div>
+ </div>
+
+</%def>
diff -r 582fd1777763 -r c3eccab29814 templates/visualization/list_published.mako
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/templates/visualization/list_published.mako Fri Mar 12 09:37:22 2010 -0500
@@ -0,0 +1,36 @@
+<%inherit file="/base_panels.mako"/>
+
+<%def name="init()">
+<%
+ self.has_left_panel=False
+ self.has_right_panel=False
+ self.active_view="page"
+ self.message_box_visible=False
+%>
+</%def>
+
+<%def name="title()">
+ Galaxy :: Published Visualizations
+</%def>
+
+<%def name="stylesheets()">
+ ${parent.stylesheets()}
+ <style>
+ .grid td {
+ min-width: 100px;
+ }
+ </style>
+</%def>
+
+<%def name="center_panel()">
+
+ ## <iframe name="galaxy_main" id="galaxy_main" frameborder="0" style="position: absolute; width: 100%; height: 100%;" src="${h.url_for( controller="page", action="list" )}"> </iframe>
+
+ <div style="overflow: auto; height: 100%;">
+ <div class="page-container" style="padding: 10px;">
+ ${unicode( grid, 'utf-8' )}
+ </div>
+ </div>
+
+
+</%def>
1
0

18 Mar '10
details: http://www.bx.psu.edu/hg/galaxy/rev/582fd1777763
changeset: 3522:582fd1777763
user: Kelly Vincent <kpvincent(a)bx.psu.edu>
date: Thu Mar 11 15:54:13 2010 -0500
description:
Modified BWA output files so last two tests will pass
diffstat:
test-data/bwa_wrapper_out2.sam | 2 +-
test-data/bwa_wrapper_out3.sam | 4 ++--
2 files changed, 3 insertions(+), 3 deletions(-)
diffs (14 lines):
diff -r f13d85256124 -r 582fd1777763 test-data/bwa_wrapper_out2.sam
--- a/test-data/bwa_wrapper_out2.sam Thu Mar 11 14:35:36 2010 -0500
+++ b/test-data/bwa_wrapper_out2.sam Thu Mar 11 15:54:13 2010 -0500
@@ -1,1 +1,1 @@
-081017-and-081020:1:1:1715:1759 16 phiX 322 25 36M * 0 0 GATATTTTAAAGGAGCGTGGATTACTATCTGAGTCC B&&I13A$G$*%$IIIIIII9(.+5$IIIIIII#II XT:A:U NM:i:2 X0:i:1 XM:i:2 XO:i:0 XG:i:0 MD:Z:2C8A24
+081017-and-081020:1:1:1715:1759 16 phiX 322 25 36M * 0 0 GATATTTTAAAGGAGCGTGGATTACTATCTGAGTCC B&&I13A$G$*%$IIIIIII9(.+5$IIIIIII#II XT:A:U NM:i:2 X0:i:1 X1:i:0 XM:i:2 XO:i:0 XG:i:0 MD:Z:2C8A24
diff -r f13d85256124 -r 582fd1777763 test-data/bwa_wrapper_out3.sam
--- a/test-data/bwa_wrapper_out3.sam Thu Mar 11 14:35:36 2010 -0500
+++ b/test-data/bwa_wrapper_out3.sam Thu Mar 11 15:54:13 2010 -0500
@@ -1,2 +1,2 @@
-081017-and-081020:1:1:1715:1759 113 PHIX174 322 25 18M = 340 18 GATATTTTAAAGGAGCGT B&&I13A$G$*%$IIIII XT:A:U NM:i:2 SM:i:25 AM:i:25 X0:i:1 XM:i:2 XO:i:0 XG:i:0 MD:Z:2C8A6
-081017-and-081020:1:1:1715:1759 177 PHIX174 340 37 18M = 322 -18 GGATTACTATCTGAGTCC II9(.+5$IIIIIII#II XT:A:U NM:i:0 SM:i:37 AM:i:25 X0:i:1 XM:i:0 XO:i:0 XG:i:0 MD:Z:18
+081017-and-081020:1:1:1715:1759 113 PHIX174 322 25 18M = 340 18 GATATTTTAAAGGAGCGT B&&I13A$G$*%$IIIII XT:A:U NM:i:2 SM:i:25 AM:i:25 X0:i:1 X1:i:0 XM:i:2 XO:i:0 XG:i:0 MD:Z:2C8A6
+081017-and-081020:1:1:1715:1759 177 PHIX174 340 37 18M = 322 -18 GGATTACTATCTGAGTCC II9(.+5$IIIIIII#II XT:A:U NM:i:0 SM:i:37 AM:i:25 X0:i:1 X1:i:0 XM:i:0 XO:i:0 XG:i:0 MD:Z:18
1
0

18 Mar '10
details: http://www.bx.psu.edu/hg/galaxy/rev/f13d85256124
changeset: 3521:f13d85256124
user: Dan Blankenberg <dan(a)bx.psu.edu>
date: Thu Mar 11 14:35:36 2010 -0500
description:
Display Application framework enhancements.
Add the ability for display applications to be populated dynamically based upon the content of (e.g. tabular) files.
Display application links can be filtered by various attributes, including e.g. dataset dbkey matching from field in a file or an attribute matching a Galaxy application configuration setting.
Param and Data URL values can now be generated dynamically, allowing e.g unique base filenames to be created and used.
See updated xml configurations in /display_applications/ for examples of syntax.
diffstat:
datatypes_conf.xml.sample | 9 +-
display_applications/ucsc/bam.xml | 34 +++++-
display_applications/ucsc/interval_as_bed.xml | 63 +++++++++++-
lib/galaxy/datatypes/data.py | 9 +
lib/galaxy/datatypes/display_applications/application.py | 78 ++++++++++++++-
lib/galaxy/datatypes/display_applications/parameters.py | 13 +-
lib/galaxy/datatypes/registry.py | 34 ++++--
lib/galaxy/model/__init__.py | 3 +
templates/root/history_common.mako | 2 +-
tool-data/shared/bx/bx_build_sites.txt | 2 +-
10 files changed, 205 insertions(+), 42 deletions(-)
diffs (468 lines):
diff -r 262b16c8e277 -r f13d85256124 datatypes_conf.xml.sample
--- a/datatypes_conf.xml.sample Thu Mar 11 13:51:53 2010 -0500
+++ b/datatypes_conf.xml.sample Thu Mar 11 14:35:36 2010 -0500
@@ -15,10 +15,7 @@
<!-- <display file="ucsc/interval_as_bed.xml" /> -->
<display file="genetrack.xml" />
</datatype>
- <datatype extension="bedstrict" type="galaxy.datatypes.interval:BedStrict">
- <display file="ucsc/interval_as_bed.xml" />
- <display file="genetrack.xml" />
- </datatype>
+ <datatype extension="bedstrict" type="galaxy.datatypes.interval:BedStrict" />
<datatype extension="binseq.zip" type="galaxy.datatypes.binary:Binseq" mimetype="application/zip" display_in_upload="true"/>
<datatype extension="len" type="galaxy.datatypes.chrominfo:ChromInfo" display_in_upload="true">
<!-- no converters yet -->
@@ -51,8 +48,8 @@
<converter file="interval_to_bed_converter.xml" target_datatype="bed"/>
<converter file="interval_to_bedstrict_converter.xml" target_datatype="bedstrict"/>
<indexer file="interval_awk.xml" />
- <!-- <display file="ucsc/interval_as_bed.xml" /> -->
- <display file="genetrack.xml" />
+ <!-- <display file="ucsc/interval_as_bed.xml" inherit="True" /> -->
+ <display file="genetrack.xml" inherit="True"/>
</datatype>
<datatype extension="jpg" type="galaxy.datatypes.images:Image" mimetype="image/jpeg"/>
<datatype extension="laj" type="galaxy.datatypes.images:Laj"/>
diff -r 262b16c8e277 -r f13d85256124 display_applications/ucsc/bam.xml
--- a/display_applications/ucsc/bam.xml Thu Mar 11 13:51:53 2010 -0500
+++ b/display_applications/ucsc/bam.xml Thu Mar 11 14:35:36 2010 -0500
@@ -1,8 +1,32 @@
<display id="ucsc_bam" version="1.0.0" name="display at UCSC">
- <link id="main" name="main">
- <url>http://genome.ucsc.edu/cgi-bin/hgTracks?db=${qp($bam_file.dbkey)}&hgt.c…</url>
- <param type="data" name="bam_file" url="galaxy.bam" strip_https="True" />
- <param type="data" name="bai_file" url="galaxy.bam.bai" metadata="bam_index" strip_https="True" /><!-- UCSC expects index file to exist as bam_file_name.bai -->
+ <!-- Load links from file: one line to one link -->
+ <dynamic_links from_file="tool-data/shared/ucsc/ucsc_build_sites.txt" skip_startswith="#" id="0" name="0">
+ <!-- Define parameters by column from file, allow splitting on builds -->
+ <dynamic_param name="site_id" value="0"/>
+ <dynamic_param name="ucsc_link" value="1"/>
+ <dynamic_param name="builds" value="2" split="True" separator="," />
+ <!-- Filter out some of the links based upon matching site_id to a Galaxy application configuration parameter and by dataset dbkey -->
+ <filter>${site_id in $APP.config.ucsc_display_sites}</filter>
+ <filter>${dataset.dbkey in $builds}</filter>
+ <!-- We define url and params as normal, but values defined in dynamic_param are available by specified name -->
+ <url>${ucsc_link}db=${qp($bam_file.dbkey)}&hgt.customText=${qp($track.url)}</url>
+ <param type="data" name="bam_file" url="galaxy_${DATASET_HASH}.bam" strip_https="True" />
+ <param type="data" name="bai_file" url="galaxy_${DATASET_HASH}.bam.bai" metadata="bam_index" strip_https="True" /><!-- UCSC expects index file to exist as bam_file_name.bai -->
<param type="template" name="track" viewable="True" strip_https="True">track type=bam name="${bam_file.name}" bigDataUrl=${bam_file.url} db=${bam_file.dbkey}</param>
- </link>
+ </dynamic_links>
+ <!-- Load links from file: one line to one link -->
+ <dynamic_links from_file="tool-data/shared/bx/bx_build_sites.txt" skip_startswith="#" id="3" name="3">
+ <!-- Define parameters by column from file, allow splitting on builds -->
+ <dynamic_param name="site_id" value="0"/>
+ <dynamic_param name="ucsc_link" value="1"/>
+ <dynamic_param name="builds" value="2" split="True" separator="," />
+ <!-- Filter out some of the links based upon matching site_id to a Galaxy application configuration parameter and by dataset dbkey -->
+ <filter>${site_id in $APP.config.bx_display_sites}</filter>
+ <filter>${dataset.dbkey in $builds}</filter>
+ <!-- We define url and params as normal, but values defined in dynamic_param are available by specified name -->
+ <url>${ucsc_link}db=${qp($bam_file.dbkey)}&hgt.customText=${qp($track.url)}</url>
+ <param type="data" name="bam_file" url="galaxy_${DATASET_HASH}.bam" strip_https="True" />
+ <param type="data" name="bai_file" url="galaxy_${DATASET_HASH}.bam.bai" metadata="bam_index" strip_https="True" /><!-- UCSC expects index file to exist as bam_file_name.bai -->
+ <param type="template" name="track" viewable="True" strip_https="True">track type=bam name="${bam_file.name}" bigDataUrl=${bam_file.url} db=${bam_file.dbkey}</param>
+ </dynamic_links>
</display>
diff -r 262b16c8e277 -r f13d85256124 display_applications/ucsc/interval_as_bed.xml
--- a/display_applications/ucsc/interval_as_bed.xml Thu Mar 11 13:51:53 2010 -0500
+++ b/display_applications/ucsc/interval_as_bed.xml Thu Mar 11 14:35:36 2010 -0500
@@ -1,7 +1,16 @@
-<display id="ucsc_interval_as_bed" version="1.0.0" name="display at UCSC">
- <link id="main" name="main">
- <url>http://genome.ucsc.edu/cgi-bin/hgTracks?db=${qp($bed_file.dbkey)}&posit…</url>
- <param type="data" name="bed_file" url="galaxy.bed" format="bedstrict"/> <!-- Galaxy allows BED files to contain non-standard fields beyond the first 3 columns, UCSC does not: force use of converter which will make strict BED6+ file -->
+<display id="ucsc_interval_as_bed" version="1.0.0" name="display at UCSC" inherit="True">
+ <!-- Load links from file: one line to one link -->
+ <dynamic_links from_file="tool-data/shared/ucsc/ucsc_build_sites.txt" skip_startswith="#" id="0" name="0">
+ <!-- Define parameters by column from file, allow splitting on builds -->
+ <dynamic_param name="site_id" value="0"/>
+ <dynamic_param name="ucsc_link" value="1"/>
+ <dynamic_param name="builds" value="2" split="True" separator="," />
+ <!-- Filter out some of the links based upon matching site_id to a Galaxy application configuration parameter and by dataset dbkey -->
+ <filter>${site_id in $APP.config.ucsc_display_sites}</filter>
+ <filter>${dataset.dbkey in $builds}</filter>
+ <!-- We define url and params as normal, but values defined in dynamic_param are available by specified name -->
+ <url>${ucsc_link}db=${qp($bed_file.dbkey)}&position=${position.qp}&hgt.customText=${bed_file.qp}</url>
+ <param type="data" name="bed_file" url="galaxy_${DATASET_HASH}.bed" format="bedstrict"/> <!-- Galaxy allows BED files to contain non-standard fields beyond the first 3 columns, UCSC does not: force use of converter which will make strict BED6+ file -->
<param type="template" name="position" strip="True" >
#set line_count = 0
#set chrom = None
@@ -33,5 +42,49 @@
:-
#end if
</param>
- </link>
+ </dynamic_links>
+ <!-- Load links from file: one line to one link -->
+ <dynamic_links from_file="tool-data/shared/bx/bx_build_sites.txt" skip_startswith="#" id="3" name="3">
+ <!-- Define parameters by column from file, allow splitting on builds -->
+ <dynamic_param name="site_id" value="0"/>
+ <dynamic_param name="ucsc_link" value="1"/>
+ <dynamic_param name="builds" value="2" split="True" separator="," />
+ <!-- Filter out some of the links based upon matching site_id to a Galaxy application configuration parameter and by dataset dbkey -->
+ <filter>${site_id in $APP.config.bx_display_sites}</filter>
+ <filter>${dataset.dbkey in $builds}</filter>
+ <!-- We define url and params as normal, but values defined in dynamic_param are available by specified name -->
+ <url>${ucsc_link}db=${qp($bed_file.dbkey)}&position=${position.qp}&hgt.customText=${bed_file.qp}</url>
+ <param type="data" name="bed_file" url="galaxy_${DATASET_HASH}.bed" format="bedstrict"/> <!-- Galaxy allows BED files to contain non-standard fields beyond the first 3 columns, UCSC does not: force use of converter which will make strict BED6+ file -->
+ <param type="template" name="position" strip="True" >
+#set line_count = 0
+#set chrom = None
+#set start = float( 'inf' )
+#set end = 0
+#for $line in open( $bed_file.file_name ):
+ #if $line_count > 10: ##10 max lines to check for view port
+ #break
+ #end if
+ #if not $line.startswith( "#" ):
+ #set $fields = $line.split( "\t" )
+ #try:
+ #if len( $fields ) >= max( $bed_file.metadata.startCol, $bed_file.metadata.endCol, $bed_file.metadata.chromCol ):
+ #if $chrom is None or $fields[ $bed_file.metadata.chromCol - 1 ] == $chrom:
+ #set chrom = $fields[ $bed_file.metadata.chromCol - 1 ]
+ #set start = min( $start, int( $fields[ $bed_file.metadata.startCol - 1 ] ) )
+ #set end = max( $end, int( $fields[ $bed_file.metadata.endCol - 1 ] ) )
+ #end if
+ #end if
+ #except:
+ #pass
+ #end try
+ #end if
+ #set line_count += 1
+#end for
+#if $chrom is not None:
+${chrom}:${start}-${end + 1}
+#else:
+:-
+#end if
+ </param>
+ </dynamic_links>
</display>
diff -r 262b16c8e277 -r f13d85256124 lib/galaxy/datatypes/data.py
--- a/lib/galaxy/datatypes/data.py Thu Mar 11 13:51:53 2010 -0500
+++ b/lib/galaxy/datatypes/data.py Thu Mar 11 14:35:36 2010 -0500
@@ -205,6 +205,15 @@
"""New style display applications"""
assert display_application.id not in self.display_applications, 'Attempted to add a display application twice'
self.display_applications[ display_application.id ] = display_application
+ def get_display_application( self, key, default = None ):
+ return self.display_applications.get( key, default )
+ def get_display_applications_by_dataset( self, dataset, trans ):
+ rval = odict()
+ for key, value in self.display_applications.iteritems():
+ value = value.filter_by_dataset( dataset, trans )
+ if value.links:
+ rval[key] = value
+ return rval
def get_display_types(self):
"""Returns display types available"""
return self.supported_display_apps.keys()
diff -r 262b16c8e277 -r f13d85256124 lib/galaxy/datatypes/display_applications/application.py
--- a/lib/galaxy/datatypes/display_applications/application.py Thu Mar 11 13:51:53 2010 -0500
+++ b/lib/galaxy/datatypes/display_applications/application.py Thu Mar 11 14:35:36 2010 -0500
@@ -1,24 +1,27 @@
#Contains objects for using external display applications
-from galaxy.util import parse_xml
+from galaxy.util import parse_xml, string_as_bool
from galaxy.util.odict import odict
from galaxy.util.template import fill_template
from galaxy.web import url_for
from parameters import DisplayApplicationParameter, DEFAULT_DATASET_NAME
from urllib import quote_plus
from util import encode_dataset_user
+from copy import deepcopy
#Any basic functions that we want to provide as a basic part of parameter dict should be added to this dict
BASE_PARAMS = { 'qp': quote_plus, 'url_for':url_for } #url_for has route memory...
class DisplayApplicationLink( object ):
@classmethod
- def from_elem( cls, elem, display_application ):
+ def from_elem( cls, elem, display_application, other_values = None ):
rval = DisplayApplicationLink( display_application )
rval.id = elem.get( 'id', None )
assert rval.id, 'Link elements require a id.'
rval.name = elem.get( 'name', rval.id )
rval.url = elem.find( 'url' )
assert rval.url is not None, 'A url element must be provided for link elements.'
+ rval.other_values = other_values
+ rval.filters = elem.findall( 'filter' )
for param_elem in elem.findall( 'param' ):
param = DisplayApplicationParameter.from_elem( param_elem, rval )
assert param, 'Unable to load parameter from element: %s' % param_elem
@@ -36,13 +39,19 @@
dataset_hash, user_hash = encode_dataset_user( trans, data, None )
return url_for( controller = '/dataset', action = "display_application", dataset_id = dataset_hash, user_id = user_hash, app_name = self.display_application.id, link_name = self.id, app_action = None )
def get_inital_values( self, data, trans ):
- rval = odict( { 'BASE_URL': trans.request.base, 'APP': trans.app } ) #trans automatically appears as a response, need to add properties of trans that we want here
+ if self.other_values:
+ rval = odict( self.other_values )
+ else:
+ rval = odict()
+ rval.update( { 'BASE_URL': trans.request.base, 'APP': trans.app } ) #trans automatically appears as a response, need to add properties of trans that we want here
for key, value in BASE_PARAMS.iteritems(): #add helper functions/variables
rval[ key ] = value
rval[ DEFAULT_DATASET_NAME ] = data #always have the display dataset name available
return rval
def build_parameter_dict( self, data, dataset_hash, user_hash, trans ):
other_values = self.get_inital_values( data, trans )
+ other_values[ 'DATASET_HASH' ] = dataset_hash
+ other_values[ 'USER_HASH' ] = user_hash
for name, param in self.parameters.iteritems():
assert name not in other_values, "The display parameter '%s' has been defined more than once." % name
if param.ready( other_values ):
@@ -51,6 +60,51 @@
other_values[ name ] = None
return False, other_values #need to stop here, next params may need this value
return True, other_values #we built other_values, lets provide it as well, or else we will likely regenerate it in the next step
+ def filter_by_dataset( self, data, trans ):
+ context = self.get_inital_values( data, trans )
+ for filter_elem in self.filters:
+ if fill_template( filter_elem.text, context = context ) != filter_elem.get( 'value', 'True' ):
+ return False
+ return True
+
+class DynamicDisplayApplicationBuilder( object ):
+ @classmethod
+ def __init__( self, elem, display_application ):
+ rval = []
+ filename = elem.get( 'from_file', None )
+ assert filename is not None, 'Filename and id attributes required for dynamic_links'
+ skip_startswith = elem.get( 'skip_startswith', None )
+ separator = elem.get( 'separator', '\t' )
+ id_col = int( elem.get( 'id', None ) )
+ name_col = int( elem.get( 'name', id_col ) )
+ dynamic_params = {}
+ max_col = max( id_col, name_col )
+ for dynamic_param in elem.findall( 'dynamic_param' ):
+ name = dynamic_param.get( 'name' )
+ value = int( dynamic_param.get( 'value' ) )
+ split = string_as_bool( dynamic_param.get( 'split', False ) )
+ param_separator = dynamic_param.get( 'separator', ',' )
+ max_col = max( max_col, value )
+ dynamic_params[name] = { 'column': value, 'split': split, 'separator': param_separator }
+ for line in open( filename ):
+ if not skip_startswith or not line.startswith( skip_startswith ):
+ line = line.rstrip( '\n\r' )
+ fields = line.split( separator )
+ if len( fields ) >= max_col:
+ new_elem = deepcopy( elem )
+ new_elem.set( 'id', fields[id_col] )
+ new_elem.set( 'name', fields[name_col] )
+ dynamic_values = {}
+ for key, attributes in dynamic_params.iteritems():
+ value = fields[ attributes[ 'column' ] ]
+ if attributes['split']:
+ value = value.split( attributes['separator'] )
+ dynamic_values[key] = value
+ #now populate
+ rval.append( DisplayApplicationLink.from_elem( new_elem, display_application, other_values = dynamic_values ) )
+ self.links = rval
+ def __iter__( self ):
+ return iter( self.links )
class PopulatedDisplayApplicationLink( object ):
def __init__( self, display_application_link, data, dataset_hash, user_hash, trans ):
@@ -84,9 +138,11 @@
def display_url( self ):
assert self.display_ready(), 'Display is not yet ready, cannot generate display link'
return fill_template( self.link.url.text, context = self.parameters )
- def get_param_name_by_url( self, name ):
- assert name in self.link.url_param_name_map, "Unknown URL parameter name provided: %s" % name
- return self.link.url_param_name_map[ name ]
+ def get_param_name_by_url( self, url ):
+ for name, parameter in self.link.parameters.iteritems():
+ if parameter.build_url( self.parameters ) == url:
+ return name
+ raise ValueError( "Unknown URL parameter name provided: %s" % url )
class DisplayApplication( object ):
@classmethod
@@ -103,6 +159,9 @@
link = DisplayApplicationLink.from_elem( link_elem, rval )
if link:
rval.links[ link.id ] = link
+ for dynamic_links in elem.findall( 'dynamic_links' ):
+ for link in DynamicDisplayApplicationBuilder( dynamic_links, rval ):
+ rval.links[ link.id ] = link
return rval
def __init__( self, display_id, name, datatypes_registry, version = None ):
self.id = display_id
@@ -115,4 +174,9 @@
def get_link( self, link_name, data, dataset_hash, user_hash, trans ):
#returns a link object with data knowledge to generate links
return PopulatedDisplayApplicationLink( self.links[ link_name ], data, dataset_hash, user_hash, trans )
-
+ def filter_by_dataset( self, data, trans ):
+ filtered = DisplayApplication( self.id, self.name, self.datatypes_registry, version = self.version )
+ for link_name, link_value in self.links.iteritems():
+ if link_value.filter_by_dataset( data, trans ):
+ filtered.links[link_name] = link_value
+ return filtered
diff -r 262b16c8e277 -r f13d85256124 lib/galaxy/datatypes/display_applications/parameters.py
--- a/lib/galaxy/datatypes/display_applications/parameters.py Thu Mar 11 13:51:53 2010 -0500
+++ b/lib/galaxy/datatypes/display_applications/parameters.py Thu Mar 11 14:35:36 2010 -0500
@@ -35,6 +35,8 @@
return True
def is_preparing( self, other_values ):
return False
+ def build_url( self, other_values ):
+ return fill_template( self.url, context = other_values )
class DisplayApplicationDataParameter( DisplayApplicationParameter ):
""" Parameter that returns a file_name containing the requested content """
@@ -141,15 +143,16 @@
self.trans = trans
self._dataset_hash = dataset_hash
self._user_hash = user_hash
+ self._url = self.parameter.build_url( self.other_values )
def __str__( self ):
return str( self.value )
def mime_type( self ):
if self.parameter.mime_type is not None:
return self.parameter.mime_type
if self.parameter.guess_mime_type:
- mime, encoding = mimetypes.guess_type( self.parameter.url )
+ mime, encoding = mimetypes.guess_type( self._url )
if not mime:
- mime = self.trans.app.datatypes_registry.get_mimetype_by_extension( ".".split( self.parameter.url )[ -1 ], None )
+ mime = self.trans.app.datatypes_registry.get_mimetype_by_extension( ".".split( self._url )[ -1 ], None )
if mime:
return mime
return 'text/plain'
@@ -158,7 +161,7 @@
base_url = self.trans.request.base
if self.parameter.strip_https and base_url[ : 5].lower() == 'https':
base_url = "http%s" % base_url[ 5: ]
- return "%s%s" % ( base_url, url_for( controller = '/dataset', action = "display_application", dataset_id = self._dataset_hash, user_id = self._user_hash, app_name = self.parameter.link.display_application.id, link_name = self.parameter.link.id, app_action = self.action_name, action_param = self.parameter.url ) )
+ return "%s%s" % ( base_url, url_for( controller = '/dataset', action = "display_application", dataset_id = self._dataset_hash, user_id = self._user_hash, app_name = self.parameter.link.display_application.id, link_name = self.parameter.link.id, app_action = self.action_name, action_param = self._url ) )
@property
def action_name( self ):
return self.ACTION_NAME
@@ -178,9 +181,9 @@
if self.parameter.mime_type is not None:
return self.parameter.mime_type
if self.parameter.guess_mime_type:
- mime, encoding = mimetypes.guess_type( self.parameter.url )
+ mime, encoding = mimetypes.guess_type( self._url )
if not mime:
- mime = self.trans.app.datatypes_registry.get_mimetype_by_extension( ".".split( self.parameter.url )[ -1 ], None )
+ mime = self.trans.app.datatypes_registry.get_mimetype_by_extension( ".".split( self._url )[ -1 ], None )
if mime:
return mime
return self.other_values[ DEFAULT_DATASET_NAME ].get_mime()
diff -r 262b16c8e277 -r f13d85256124 lib/galaxy/datatypes/registry.py
--- a/lib/galaxy/datatypes/registry.py Thu Mar 11 13:51:53 2010 -0500
+++ b/lib/galaxy/datatypes/registry.py Thu Mar 11 14:35:36 2010 -0500
@@ -25,6 +25,7 @@
self.sniff_order = []
self.upload_file_formats = []
self.display_applications = odict() #map a display application id to a display application
+ inherit_display_application_by_class = []
if root_dir and config:
# Parse datatypes_conf.xml
tree = galaxy.util.parse_xml( config )
@@ -42,11 +43,11 @@
for elem in registration.findall( 'datatype' ):
try:
extension = elem.get( 'extension', None )
- type = elem.get( 'type', None )
+ dtype = elem.get( 'type', None )
mimetype = elem.get( 'mimetype', None )
display_in_upload = elem.get( 'display_in_upload', False )
- if extension and type:
- fields = type.split( ':' )
+ if extension and dtype:
+ fields = dtype.split( ':' )
datatype_module = fields[0]
datatype_class = fields[1]
fields = datatype_module.split( '.' )
@@ -85,27 +86,36 @@
for display_app in elem.findall( 'display' ):
display_file = display_app.get( 'file', None )
assert display_file is not None, "A file must be specified for a datatype display tag."
+ inherit = galaxy.util.string_as_bool( display_app.get( 'inherit', 'False' ) )
display_app = DisplayApplication.from_file( os.path.join( self.display_applications_path, display_file ), self )
if display_app:
if display_app.id in self.display_applications:
#if we already loaded this display application, we'll use the first one again
display_app = self.display_applications[ display_app.id ]
- self.log.debug( "Loaded display application '%s' for datatype '%s'" % ( display_app.id, extension ) )
+ self.log.debug( "Loaded display application '%s' for datatype '%s', inherit=%s" % ( display_app.id, extension, inherit ) )
self.display_applications[ display_app.id ] = display_app #Display app by id
self.datatypes_by_extension[ extension ].add_display_application( display_app )
-
+ if inherit and ( self.datatypes_by_extension[extension], display_app ) not in inherit_display_application_by_class:
+ #subclass inheritance will need to wait until all datatypes have been loaded
+ inherit_display_application_by_class.append( ( self.datatypes_by_extension[extension], display_app ) )
except Exception, e:
self.log.warning( 'Error loading datatype "%s", problem: %s' % ( extension, str( e ) ) )
+ # Handle display_application subclass inheritance here:
+ for ext, d_type1 in self.datatypes_by_extension.iteritems():
+ for d_type2, display_app in inherit_display_application_by_class:
+ current_app = d_type1.get_display_application( display_app.id, None )
+ if current_app is None and isinstance( d_type1, type( d_type2 ) ):
+ d_type1.add_display_application( display_app )
# Load datatype sniffers from the config
sniff_order = []
sniffers = root.find( 'sniffers' )
for elem in sniffers.findall( 'sniffer' ):
- type = elem.get( 'type', None )
- if type:
- sniff_order.append( type )
- for type in sniff_order:
+ dtype = elem.get( 'type', None )
+ if dtype:
+ sniff_order.append( dtype )
+ for dtype in sniff_order:
try:
- fields = type.split( ":" )
+ fields = dtype.split( ":" )
datatype_module = fields[0]
datatype_class = fields[1]
fields = datatype_module.split( "." )
@@ -120,9 +130,9 @@
break
if not included:
self.sniff_order.append( aclass )
- self.log.debug( 'Loaded sniffer for datatype: %s' % type )
+ self.log.debug( 'Loaded sniffer for datatype: %s' % dtype )
except Exception, exc:
- self.log.warning( 'Error appending datatype %s to sniff_order, problem: %s' % ( type, str( exc ) ) )
+ self.log.warning( 'Error appending datatype %s to sniff_order, problem: %s' % ( dtype, str( exc ) ) )
#default values
if len(self.datatypes_by_extension) < 1:
self.datatypes_by_extension = {
diff -r 262b16c8e277 -r f13d85256124 lib/galaxy/model/__init__.py
--- a/lib/galaxy/model/__init__.py Thu Mar 11 13:51:53 2010 -0500
+++ b/lib/galaxy/model/__init__.py Thu Mar 11 14:35:36 2010 -0500
@@ -637,6 +637,9 @@
return ( None, None )
return get_source( self )
+ def get_display_applications( self, trans ):
+ return self.datatype.get_display_applications_by_dataset( self, trans )
+
class HistoryDatasetAssociation( DatasetInstance ):
def __init__( self,
hid = None,
diff -r 262b16c8e277 -r f13d85256124 templates/root/history_common.mako
--- a/templates/root/history_common.mako Thu Mar 11 13:51:53 2010 -0500
+++ b/templates/root/history_common.mako Thu Mar 11 14:35:36 2010 -0500
@@ -102,7 +102,7 @@
%endif
%endfor
%endif
- %for display_app in data.datatype.display_applications.itervalues():
+ %for display_app in data.get_display_applications( trans ).itervalues():
| ${display_app.name}
%for link_app in display_app.links.itervalues():
<a target="${link_app.url.get( 'target_frame', '_blank' )}" href="${link_app.get_display_url( data, trans )}">${_(link_app.name)}</a>
diff -r 262b16c8e277 -r f13d85256124 tool-data/shared/bx/bx_build_sites.txt
--- a/tool-data/shared/bx/bx_build_sites.txt Thu Mar 11 13:51:53 2010 -0500
+++ b/tool-data/shared/bx/bx_build_sites.txt Thu Mar 11 14:35:36 2010 -0500
@@ -1,1 +1,1 @@
-main http://main.genome-browser.bx.psu.edu/cgi-bin/hgTracks? hg18,hg19,mm8,mm9
+main http://main.genome-browser.bx.psu.edu/cgi-bin/hgTracks? hg18,hg19,mm8,mm9 bx-main
1
0
details: http://www.bx.psu.edu/hg/galaxy/rev/262b16c8e277
changeset: 3520:262b16c8e277
user: fubar: ross Lazarus at gmail period com
date: Thu Mar 11 13:51:53 2010 -0500
description:
remove bogus string.translate
diffstat:
lib/galaxy/web/controllers/library_common.py | 2 --
1 files changed, 0 insertions(+), 2 deletions(-)
diffs (12 lines):
diff -r 39e4b119bdf4 -r 262b16c8e277 lib/galaxy/web/controllers/library_common.py
--- a/lib/galaxy/web/controllers/library_common.py Thu Mar 11 13:41:40 2010 -0500
+++ b/lib/galaxy/web/controllers/library_common.py Thu Mar 11 13:51:53 2010 -0500
@@ -1357,8 +1357,6 @@
path = os.path.join( parent_folder.name, path )
parent_folder = parent_folder.parent
path += ldda.name
- if path > '':
- path = path.translate(trantab)
while path in seen:
path += '_'
seen.append( path )
1
0
details: http://www.bx.psu.edu/hg/galaxy/rev/39e4b119bdf4
changeset: 3519:39e4b119bdf4
user: fubar: ross Lazarus at gmail period com
date: Thu Mar 11 13:41:40 2010 -0500
description:
merge with 3518:0c9e154e9176
diffstat:
lib/galaxy/datatypes/sniff.py | 39 ++++++++++++++++++++++++---------------
tools/data_source/upload.py | 24 +++++++++++++++++++-----
2 files changed, 43 insertions(+), 20 deletions(-)
diffs (140 lines):
diff -r e98117dd6054 -r 39e4b119bdf4 lib/galaxy/datatypes/sniff.py
--- a/lib/galaxy/datatypes/sniff.py Thu Mar 11 13:37:49 2010 -0500
+++ b/lib/galaxy/datatypes/sniff.py Thu Mar 11 13:41:40 2010 -0500
@@ -70,7 +70,7 @@
f.close()
return False
-def convert_newlines( fname ):
+def convert_newlines( fname, in_place=True ):
"""
Converts in place a file from universal line endings
to Posix line endings.
@@ -78,7 +78,7 @@
>>> fname = get_test_fname('temp.txt')
>>> file(fname, 'wt').write("1 2\\r3 4")
>>> convert_newlines(fname)
- 2
+ (2, None)
>>> file(fname).read()
'1 2\\n3 4\\n'
"""
@@ -87,18 +87,21 @@
for i, line in enumerate( file( fname, "U" ) ):
fp.write( "%s\n" % line.rstrip( "\r\n" ) )
fp.close()
- shutil.move( temp_name, fname )
- # Return number of lines in file.
- return i + 1
+ if in_place:
+ shutil.move( temp_name, fname )
+ # Return number of lines in file.
+ return ( i + 1, None )
+ else:
+ return ( i + 1, temp_name )
-def sep2tabs(fname, patt="\\s+"):
+def sep2tabs( fname, in_place=True, patt="\\s+" ):
"""
Transforms in place a 'sep' separated file to a tab separated one
>>> fname = get_test_fname('temp.txt')
>>> file(fname, 'wt').write("1 2\\n3 4\\n")
>>> sep2tabs(fname)
- 2
+ (2, None)
>>> file(fname).read()
'1\\t2\\n3\\t4\\n'
"""
@@ -110,11 +113,14 @@
elems = regexp.split( line )
fp.write( "%s\n" % '\t'.join( elems ) )
fp.close()
- shutil.move( temp_name, fname )
- # Return number of lines in file.
- return i + 1
+ if in_place:
+ shutil.move( temp_name, fname )
+ # Return number of lines in file.
+ return ( i + 1, None )
+ else:
+ return ( i + 1, temp_name )
-def convert_newlines_sep2tabs( fname, patt="\\s+" ):
+def convert_newlines_sep2tabs( fname, in_place=True, patt="\\s+" ):
"""
Combines above methods: convert_newlines() and sep2tabs()
so that files do not need to be read twice
@@ -122,7 +128,7 @@
>>> fname = get_test_fname('temp.txt')
>>> file(fname, 'wt').write("1 2\\r3 4")
>>> convert_newlines_sep2tabs(fname)
- 2
+ (2, None)
>>> file(fname).read()
'1\\t2\\n3\\t4\\n'
"""
@@ -134,9 +140,12 @@
elems = regexp.split( line )
fp.write( "%s\n" % '\t'.join( elems ) )
fp.close()
- shutil.move( temp_name, fname )
- # Return number of lines in file.
- return i + 1
+ if in_place:
+ shutil.move( temp_name, fname )
+ # Return number of lines in file.
+ return ( i + 1, None )
+ else:
+ return ( i + 1, temp_name )
def get_headers( fname, sep, count=60, is_multi_byte=False ):
"""
diff -r e98117dd6054 -r 39e4b119bdf4 tools/data_source/upload.py
--- a/tools/data_source/upload.py Thu Mar 11 13:37:49 2010 -0500
+++ b/tools/data_source/upload.py Thu Mar 11 13:41:40 2010 -0500
@@ -138,6 +138,7 @@
def add_file( dataset, json_file, output_path ):
data_type = None
line_count = None
+ converted_path = None
if dataset.type == 'url':
try:
@@ -239,10 +240,15 @@
file_err( 'The uploaded file contains inappropriate content', dataset, json_file )
return
if data_type != 'binary' and data_type != 'zip':
- if dataset.space_to_tab:
- line_count = sniff.convert_newlines_sep2tabs( dataset.path )
- else:
- line_count = sniff.convert_newlines( dataset.path )
+ # don't convert newlines on data we're only going to symlink
+ if not dataset.get( 'link_data_only', False ):
+ in_place = True
+ if dataset.type in ( 'server_dir', 'path_paste' ):
+ in_place = False
+ if dataset.space_to_tab:
+ line_count, converted_path = sniff.convert_newlines_sep2tabs( dataset.path, in_place=in_place )
+ else:
+ line_count, converted_path = sniff.convert_newlines( dataset.path, in_place=in_place )
if dataset.file_type == 'auto':
ext = sniff.guess_ext( dataset.path )
else:
@@ -257,7 +263,15 @@
if dataset.get( 'link_data_only', False ):
pass # data will remain in place
elif dataset.type in ( 'server_dir', 'path_paste' ):
- shutil.copy( dataset.path, output_path )
+ if converted_path is not None:
+ shutil.copy( converted_path, output_path )
+ try:
+ os.remove( converted_path )
+ except:
+ pass
+ else:
+ # this should not happen, but it's here just in case
+ shutil.copy( dataset.path, output_path )
else:
shutil.move( dataset.path, output_path )
# Write the job info
1
0

18 Mar '10
details: http://www.bx.psu.edu/hg/galaxy/rev/0c9e154e9176
changeset: 3517:0c9e154e9176
user: Nate Coraor <nate(a)bx.psu.edu>
date: Thu Mar 11 11:56:20 2010 -0500
description:
Modify the newline conversion methods in sniff so converting in place is optional. This is necessary to fix a bug that occurs if using the 'server_dir' or 'path_paste' library upload methods: previously, they would modify the file to be imported in-place if permissions allowed (probably not what the admin wanted) or fail if permissions did not allow. New functionality is to return the converted tempfile if 'server_dir' or 'path_paste' methods are used. Also, no newline conversion will be done if the administrator uses the symlink checkbox.
diffstat:
lib/galaxy/datatypes/sniff.py | 39 ++++++++++++++++++++++++---------------
tools/data_source/upload.py | 24 +++++++++++++++++++-----
2 files changed, 43 insertions(+), 20 deletions(-)
diffs (140 lines):
diff -r 2e97ae04856d -r 0c9e154e9176 lib/galaxy/datatypes/sniff.py
--- a/lib/galaxy/datatypes/sniff.py Thu Mar 11 11:17:11 2010 -0500
+++ b/lib/galaxy/datatypes/sniff.py Thu Mar 11 11:56:20 2010 -0500
@@ -70,7 +70,7 @@
f.close()
return False
-def convert_newlines( fname ):
+def convert_newlines( fname, in_place=True ):
"""
Converts in place a file from universal line endings
to Posix line endings.
@@ -78,7 +78,7 @@
>>> fname = get_test_fname('temp.txt')
>>> file(fname, 'wt').write("1 2\\r3 4")
>>> convert_newlines(fname)
- 2
+ (2, None)
>>> file(fname).read()
'1 2\\n3 4\\n'
"""
@@ -87,18 +87,21 @@
for i, line in enumerate( file( fname, "U" ) ):
fp.write( "%s\n" % line.rstrip( "\r\n" ) )
fp.close()
- shutil.move( temp_name, fname )
- # Return number of lines in file.
- return i + 1
+ if in_place:
+ shutil.move( temp_name, fname )
+ # Return number of lines in file.
+ return ( i + 1, None )
+ else:
+ return ( i + 1, temp_name )
-def sep2tabs(fname, patt="\\s+"):
+def sep2tabs( fname, in_place=True, patt="\\s+" ):
"""
Transforms in place a 'sep' separated file to a tab separated one
>>> fname = get_test_fname('temp.txt')
>>> file(fname, 'wt').write("1 2\\n3 4\\n")
>>> sep2tabs(fname)
- 2
+ (2, None)
>>> file(fname).read()
'1\\t2\\n3\\t4\\n'
"""
@@ -110,11 +113,14 @@
elems = regexp.split( line )
fp.write( "%s\n" % '\t'.join( elems ) )
fp.close()
- shutil.move( temp_name, fname )
- # Return number of lines in file.
- return i + 1
+ if in_place:
+ shutil.move( temp_name, fname )
+ # Return number of lines in file.
+ return ( i + 1, None )
+ else:
+ return ( i + 1, temp_name )
-def convert_newlines_sep2tabs( fname, patt="\\s+" ):
+def convert_newlines_sep2tabs( fname, in_place=True, patt="\\s+" ):
"""
Combines above methods: convert_newlines() and sep2tabs()
so that files do not need to be read twice
@@ -122,7 +128,7 @@
>>> fname = get_test_fname('temp.txt')
>>> file(fname, 'wt').write("1 2\\r3 4")
>>> convert_newlines_sep2tabs(fname)
- 2
+ (2, None)
>>> file(fname).read()
'1\\t2\\n3\\t4\\n'
"""
@@ -134,9 +140,12 @@
elems = regexp.split( line )
fp.write( "%s\n" % '\t'.join( elems ) )
fp.close()
- shutil.move( temp_name, fname )
- # Return number of lines in file.
- return i + 1
+ if in_place:
+ shutil.move( temp_name, fname )
+ # Return number of lines in file.
+ return ( i + 1, None )
+ else:
+ return ( i + 1, temp_name )
def get_headers( fname, sep, count=60, is_multi_byte=False ):
"""
diff -r 2e97ae04856d -r 0c9e154e9176 tools/data_source/upload.py
--- a/tools/data_source/upload.py Thu Mar 11 11:17:11 2010 -0500
+++ b/tools/data_source/upload.py Thu Mar 11 11:56:20 2010 -0500
@@ -138,6 +138,7 @@
def add_file( dataset, json_file, output_path ):
data_type = None
line_count = None
+ converted_path = None
if dataset.type == 'url':
try:
@@ -239,10 +240,15 @@
file_err( 'The uploaded file contains inappropriate content', dataset, json_file )
return
if data_type != 'binary' and data_type != 'zip':
- if dataset.space_to_tab:
- line_count = sniff.convert_newlines_sep2tabs( dataset.path )
- else:
- line_count = sniff.convert_newlines( dataset.path )
+ # don't convert newlines on data we're only going to symlink
+ if not dataset.get( 'link_data_only', False ):
+ in_place = True
+ if dataset.type in ( 'server_dir', 'path_paste' ):
+ in_place = False
+ if dataset.space_to_tab:
+ line_count, converted_path = sniff.convert_newlines_sep2tabs( dataset.path, in_place=in_place )
+ else:
+ line_count, converted_path = sniff.convert_newlines( dataset.path, in_place=in_place )
if dataset.file_type == 'auto':
ext = sniff.guess_ext( dataset.path )
else:
@@ -257,7 +263,15 @@
if dataset.get( 'link_data_only', False ):
pass # data will remain in place
elif dataset.type in ( 'server_dir', 'path_paste' ):
- shutil.copy( dataset.path, output_path )
+ if converted_path is not None:
+ shutil.copy( converted_path, output_path )
+ try:
+ os.remove( converted_path )
+ except:
+ pass
+ else:
+ # this should not happen, but it's here just in case
+ shutil.copy( dataset.path, output_path )
else:
shutil.move( dataset.path, output_path )
# Write the job info
1
0

18 Mar '10
details: http://www.bx.psu.edu/hg/galaxy/rev/e98117dd6054
changeset: 3518:e98117dd6054
user: fubar: ross Lazarus at gmail period com
date: Thu Mar 11 13:37:49 2010 -0500
description:
fix for string.translate on empty path in creating an archive of library composite datatypes
diffstat:
lib/galaxy/web/controllers/library_common.py | 6 ++++--
1 files changed, 4 insertions(+), 2 deletions(-)
diffs (23 lines):
diff -r 2e97ae04856d -r e98117dd6054 lib/galaxy/web/controllers/library_common.py
--- a/lib/galaxy/web/controllers/library_common.py Thu Mar 11 11:17:11 2010 -0500
+++ b/lib/galaxy/web/controllers/library_common.py Thu Mar 11 13:37:49 2010 -0500
@@ -1357,7 +1357,8 @@
path = os.path.join( parent_folder.name, path )
parent_folder = parent_folder.parent
path += ldda.name
- path = path.translate(trantab)
+ if path > '':
+ path = path.translate(trantab)
while path in seen:
path += '_'
seen.append( path )
@@ -1377,7 +1378,8 @@
flist = glob.glob(os.path.join(ldda.dataset.extra_files_path,'*.*')) # glob returns full paths
for fpath in flist:
efp,fname = os.path.split(fpath)
- fname = fname.translate(trantab)
+ if fname > '':
+ fname = fname.translate(trantab)
try:
archive.add( fpath,fname )
except IOError:
1
0
details: http://www.bx.psu.edu/hg/galaxy/rev/2e97ae04856d
changeset: 3516:2e97ae04856d
user: Dan Blankenberg <dan(a)bx.psu.edu>
date: Thu Mar 11 11:17:11 2010 -0500
description:
Fix for implicit datatype conversion.
diffstat:
lib/galaxy/datatypes/data.py | 2 +-
1 files changed, 1 insertions(+), 1 deletions(-)
diffs (12 lines):
diff -r 6fa986527398 -r 2e97ae04856d lib/galaxy/datatypes/data.py
--- a/lib/galaxy/datatypes/data.py Wed Mar 10 23:59:25 2010 -0500
+++ b/lib/galaxy/datatypes/data.py Thu Mar 11 11:17:11 2010 -0500
@@ -257,7 +257,7 @@
break
params[input_name] = original_dataset
#Run converter, job is dispatched through Queue
- converted_dataset = converter.execute( trans, incoming = params, set_output_hid = visible )
+ converted_dataset = converter.execute( trans, incoming = params, set_output_hid = visible )[1]
if len(params) > 0:
trans.log_event( "Converter params: %s" % (str(params)), tool_id=converter.id )
if not visible:
1
0
details: http://www.bx.psu.edu/hg/galaxy/rev/6fa986527398
changeset: 3515:6fa986527398
user: fubar: ross Lazarus at gmail period com
date: Wed Mar 10 23:59:25 2010 -0500
description:
merge
1
0

18 Mar '10
details: http://www.bx.psu.edu/hg/galaxy/rev/87da8cd1f091
changeset: 3513:87da8cd1f091
user: fubar: ross Lazarus at gmail period com
date: Wed Mar 10 20:59:11 2010 -0500
description:
Remove old Lmap from datatypes_conf.xml.sample - now deprecated from genetics.py
Causing buildbot to barf.
diffstat:
datatypes_conf.xml.sample | 1 -
lib/galaxy/web/controllers/library_common.py | 3 ++-
run_functional_tests.sh | 2 +-
3 files changed, 3 insertions(+), 3 deletions(-)
diffs (43 lines):
diff -r 53ddb4b728f7 -r 87da8cd1f091 datatypes_conf.xml.sample
--- a/datatypes_conf.xml.sample Wed Mar 10 19:48:28 2010 -0500
+++ b/datatypes_conf.xml.sample Wed Mar 10 20:59:11 2010 -0500
@@ -194,7 +194,6 @@
<!-- genome graphs ucsc file - first col is always marker then numeric values to plot -->
<datatype extension="gg" type="galaxy.datatypes.genetics:GenomeGraphs"/>
<!-- part of linkage format pedigree -->
- <datatype extension="lmap" type="galaxy.datatypes.genetics:Lmap" display_in_upload="true"/>
<datatype extension="malist" type="galaxy.datatypes.genetics:MAlist" display_in_upload="true"/>
<!-- linkage format pedigree (separate .map file) -->
<datatype extension="lped" type="galaxy.datatypes.genetics:Lped" display_in_upload="true">
diff -r 53ddb4b728f7 -r 87da8cd1f091 lib/galaxy/web/controllers/library_common.py
--- a/lib/galaxy/web/controllers/library_common.py Wed Mar 10 19:48:28 2010 -0500
+++ b/lib/galaxy/web/controllers/library_common.py Wed Mar 10 20:59:11 2010 -0500
@@ -1111,7 +1111,7 @@
# is composite - must return a zip of contents and the html file itself - ugh - should be reversible at upload!
# use act_on_multiple_datasets( self, trans, cntrller, library_id, ldda_ids='', **kwd ) since it does what we need
kwd['do_action'] = 'zip'
- return self.act_on_multiple_datasets( trans, cntrller, library_id, ldda_ids=id, **kwd )
+ return self.act_on_multiple_datasets( trans, cntrller, library_id, ldda_ids=[id,], **kwd )
else:
mime = trans.app.datatypes_registry.get_mimetype_by_extension( ldda.extension.lower() )
trans.response.set_content_type( mime )
@@ -1258,6 +1258,7 @@
messagetype = 'error'
else:
ldda_ids = util.listify( ldda_ids )
+ log.debug('## act on multiple got %s' % ldda_ids)
if action == 'import_to_history':
history = trans.get_history()
if history is None:
diff -r 53ddb4b728f7 -r 87da8cd1f091 run_functional_tests.sh
--- a/run_functional_tests.sh Wed Mar 10 19:48:28 2010 -0500
+++ b/run_functional_tests.sh Wed Mar 10 20:59:11 2010 -0500
@@ -1,7 +1,7 @@
#!/bin/sh
# A good place to look for nose info: http://somethingaboutorange.com/mrl/projects/nose/
-
+export PATH=/usr/local/bin:$PATH
rm -f run_functional_tests.log
if [ ! $1 ]; then
1
0

18 Mar '10
details: http://www.bx.psu.edu/hg/galaxy/rev/06dcf56688ea
changeset: 3514:06dcf56688ea
user: fubar: ross Lazarus at gmail period com
date: Wed Mar 10 23:58:35 2010 -0500
description:
Fixes for downloading library archives - correct extensions help a lot and removing spaces from ldda.name helps too.
diffstat:
lib/galaxy/web/controllers/library_common.py | 15 +++++++++------
1 files changed, 9 insertions(+), 6 deletions(-)
diffs (63 lines):
diff -r 87da8cd1f091 -r 06dcf56688ea lib/galaxy/web/controllers/library_common.py
--- a/lib/galaxy/web/controllers/library_common.py Wed Mar 10 20:59:11 2010 -0500
+++ b/lib/galaxy/web/controllers/library_common.py Wed Mar 10 23:58:35 2010 -0500
@@ -1,4 +1,4 @@
-import os, os.path, shutil, urllib, StringIO, re, gzip, tempfile, shutil, zipfile, copy, glob
+import os, os.path, shutil, urllib, StringIO, re, gzip, tempfile, shutil, zipfile, copy, glob, string
from galaxy.web.base.controller import *
from galaxy import util, jobs
from galaxy.datatypes import sniff
@@ -1258,7 +1258,6 @@
messagetype = 'error'
else:
ldda_ids = util.listify( ldda_ids )
- log.debug('## act on multiple got %s' % ldda_ids)
if action == 'import_to_history':
history = trans.get_history()
if history is None:
@@ -1312,6 +1311,8 @@
msg = "The selected datasets have been removed from this data library"
else:
error = False
+ killme = string.punctuation + string.whitespace
+ trantab = string.maketrans(killme,'_'*len(killme))
try:
outext = 'zip'
if action == 'zip':
@@ -1325,10 +1326,10 @@
archive.add = lambda x, y: archive.write( x, y.encode('CP437') )
elif action == 'tgz':
archive = util.streamball.StreamBall( 'w|gz' )
- outext = 'gz'
+ outext = 'tgz'
elif action == 'tbz':
archive = util.streamball.StreamBall( 'w|bz2' )
- outext = 'bz2'
+ outext = 'tbz2'
except (OSError, zipfile.BadZipFile):
error = True
log.exception( "Unable to create archive for download" )
@@ -1356,12 +1357,13 @@
path = os.path.join( parent_folder.name, path )
parent_folder = parent_folder.parent
path += ldda.name
+ path = path.translate(trantab)
while path in seen:
path += '_'
seen.append( path )
+ zpath = os.path.split(path)[-1] # comes as base_name/fname
+ outfname,zpathext = os.path.splitext(zpath)
if is_composite: # need to add all the components from the extra_files_path to the zip
- zpath = os.path.split(path)[-1] # comes as base_name/fname
- outfname,zpathext = os.path.splitext(zpath)
if zpathext == '':
zpath = '%s.html' % zpath # fake the real nature of the html file
try:
@@ -1375,6 +1377,7 @@
flist = glob.glob(os.path.join(ldda.dataset.extra_files_path,'*.*')) # glob returns full paths
for fpath in flist:
efp,fname = os.path.split(fpath)
+ fname = fname.translate(trantab)
try:
archive.add( fpath,fname )
except IOError:
1
0

18 Mar '10
details: http://www.bx.psu.edu/hg/galaxy/rev/53ddb4b728f7
changeset: 3512:53ddb4b728f7
user: fubar: ross Lazarus at gmail period com
date: Wed Mar 10 19:48:28 2010 -0500
description:
Reverted missing python version check for zipfile setup
Changed download archive names for both libraries and for history items
so they're related to the content - uses data.name rather than one fixed string for all
diffstat:
lib/galaxy/web/controllers/dataset.py | 25 ++++++++++++++++---------
lib/galaxy/web/controllers/library_common.py | 13 ++++++++-----
2 files changed, 24 insertions(+), 14 deletions(-)
diffs (153 lines):
diff -r 2af472aa0844 -r 53ddb4b728f7 lib/galaxy/web/controllers/dataset.py
--- a/lib/galaxy/web/controllers/dataset.py Wed Mar 10 17:10:43 2010 -0500
+++ b/lib/galaxy/web/controllers/dataset.py Wed Mar 10 19:48:28 2010 -0500
@@ -1,4 +1,4 @@
-import logging, os, string, shutil, re, socket, mimetypes, smtplib, urllib, tempfile, zipfile, glob
+import logging, os, string, shutil, re, socket, mimetypes, smtplib, urllib, tempfile, zipfile, glob, sys
from galaxy.web.base.controller import *
from galaxy.web.framework.helpers import time_ago, iff, grids
@@ -11,6 +11,11 @@
pkg_resources.require( "Paste" )
import paste.httpexceptions
+if sys.version_info[:2] < ( 2, 6 ):
+ zipfile.BadZipFile = zipfile.error
+if sys.version_info[:2] < ( 2, 5 ):
+ zipfile.LargeZipFile = zipfile.error
+
tmpd = tempfile.mkdtemp()
comptypes=[]
ziptype = '32'
@@ -204,6 +209,9 @@
def archive_composite_dataset( self, trans, data=None, **kwd ):
# save a composite object into a compressed archive for downloading
params = util.Params( kwd )
+ valid_chars = '.,^_-()[]0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ'
+ outfname = data.name[0:150]
+ outfname = ''.join(c in valid_chars and c or '_' for c in outfname)
if (params.do_action == None):
params.do_action = 'zip' # default
msg = util.restore_text( params.get( 'msg', '' ) )
@@ -230,7 +238,7 @@
except (OSError, zipfile.BadZipFile):
error = True
log.exception( "Unable to create archive for download" )
- msg = "Unable to create archive for %s for download, please report this error" % data.name
+ msg = "Unable to create archive for %s for download, please report this error" % outfname
messagetype = 'error'
if not error:
current_user_roles = trans.get_current_user_roles()
@@ -239,7 +247,7 @@
fname = os.path.split(path)[-1]
basename = data.metadata.base_name
efp = data.extra_files_path
- htmlname = os.path.splitext(data.name)[0]
+ htmlname = os.path.splitext(outfname)[0]
if not htmlname.endswith(ext):
htmlname = '%s_%s' % (htmlname,ext)
archname = '%s.html' % htmlname # fake the real nature of the html file
@@ -276,14 +284,14 @@
messagetype = 'error'
if not error:
trans.response.set_content_type( "application/x-zip-compressed" )
- trans.response.headers[ "Content-Disposition" ] = "attachment; filename=GalaxyCompositeObject.zip"
+ trans.response.headers[ "Content-Disposition" ] = "attachment; filename=%s.zip" % outfname
return tmpfh
else:
trans.response.set_content_type( "application/x-tar" )
outext = 'tgz'
if params.do_action == 'tbz':
outext = 'tbz'
- trans.response.headers[ "Content-Disposition" ] = "attachment; filename=GalaxyLibraryFiles.%s" % outext
+ trans.response.headers[ "Content-Disposition" ] = "attachment; filename=%s.%s" % (outfname,outext)
archive.wsgi_status = trans.response.wsgi_status()
archive.wsgi_headeritems = trans.response.wsgi_headeritems()
return archive.stream
@@ -294,7 +302,8 @@
@web.expose
def display(self, trans, dataset_id=None, preview=False, filename=None, to_ext=None, **kwd):
"""Catches the dataset id and displays file contents as directed"""
-
+ composite_extensions = trans.app.datatypes_registry.get_composite_extensions( )
+ composite_extensions.append('html') # for archiving composite datatypes
# DEPRECATION: We still support unencoded ids for backward compatibility
try:
dataset_id = int( dataset_id )
@@ -329,8 +338,6 @@
trans.log_event( "Display dataset id: %s" % str( dataset_id ) )
if to_ext: # Saving the file
- composite_extensions = trans.app.datatypes_registry.get_composite_extensions( )
- composite_extensions.append('html')
if data.ext in composite_extensions:
return self.archive_composite_dataset( trans, data, **kwd )
else:
@@ -340,7 +347,7 @@
valid_chars = '.,^_-()[]0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ'
fname = data.name
fname = ''.join(c in valid_chars and c or '_' for c in fname)[0:150]
- trans.response.headers["Content-Disposition"] = "attachment; filename=GalaxyHistoryItem-%s-[%s]%s" % (data.hid, fname, to_ext)
+ trans.response.headers["Content-Disposition"] = "attachment; filename=Galaxy%s-[%s]%s" % (data.hid, fname, to_ext)
return open( data.file_name )
if os.path.exists( data.file_name ):
max_peek_size = 1000000 # 1 MB
diff -r 2af472aa0844 -r 53ddb4b728f7 lib/galaxy/web/controllers/library_common.py
--- a/lib/galaxy/web/controllers/library_common.py Wed Mar 10 17:10:43 2010 -0500
+++ b/lib/galaxy/web/controllers/library_common.py Wed Mar 10 19:48:28 2010 -0500
@@ -1312,6 +1312,7 @@
else:
error = False
try:
+ outext = 'zip'
if action == 'zip':
# Can't use mkstemp - the file must not exist first
tmpd = tempfile.mkdtemp()
@@ -1323,8 +1324,10 @@
archive.add = lambda x, y: archive.write( x, y.encode('CP437') )
elif action == 'tgz':
archive = util.streamball.StreamBall( 'w|gz' )
+ outext = 'gz'
elif action == 'tbz':
archive = util.streamball.StreamBall( 'w|bz2' )
+ outext = 'bz2'
except (OSError, zipfile.BadZipFile):
error = True
log.exception( "Unable to create archive for download" )
@@ -1357,11 +1360,11 @@
seen.append( path )
if is_composite: # need to add all the components from the extra_files_path to the zip
zpath = os.path.split(path)[-1] # comes as base_name/fname
- zpathext = os.path.splitext(zpath)[-1]
+ outfname,zpathext = os.path.splitext(zpath)
if zpathext == '':
zpath = '%s.html' % zpath # fake the real nature of the html file
try:
- archive.add(ldda.dataset.file_name,zpath)
+ archive.add(ldda.dataset.file_name,zpath) # add the primary of a composite set
except IOError:
error = True
log.exception( "Unable to add composite parent %s to temporary library download archive" % ldda.dataset.file_name)
@@ -1375,7 +1378,7 @@
archive.add( fpath,fname )
except IOError:
error = True
- log.exception( "Unable to add %s to temporary library download archive" % fname)
+ log.exception( "Unable to add %s to temporary library download archive %s" % (fname,outfname))
msg = "Unable to create archive for download, please report this error"
messagetype = 'error'
continue
@@ -1402,11 +1405,11 @@
messagetype = 'error'
if not error:
trans.response.set_content_type( "application/x-zip-compressed" )
- trans.response.headers[ "Content-Disposition" ] = "attachment; filename=GalaxyLibraryFiles.%s" % action
+ trans.response.headers[ "Content-Disposition" ] = "attachment; filename=%s.%s" % (outfname,outext)
return tmpfh
else:
trans.response.set_content_type( "application/x-tar" )
- trans.response.headers[ "Content-Disposition" ] = "attachment; filename=GalaxyLibraryFiles.%s" % action
+ trans.response.headers[ "Content-Disposition" ] = "attachment; filename=%s.%s" % (outfname,outext)
archive.wsgi_status = trans.response.wsgi_status()
archive.wsgi_headeritems = trans.response.wsgi_headeritems()
return archive.stream
1
0
Hi,
I was wondering if there's an easy way to disable the pop-up balloon tooltips (on the save, rerun, view, delete icons) ?
The new beautiful icons (or the old text-label) are informative enough - and the constantly appearing/disappearing black tooltip balloons (when you move the mouse over datasets) is a bit annoying.
Alternatively, is it possible to add a short delay before showing the tooltip (like in all regular GUI client-side applications) ?
Thanks,
-gordon
2
1

17 Mar '10
Hi Galaxy team,
When working with histories with many items I sometimes get a little lost: which is the history item whose content is displayed in the middle section of my Galaxy window? (The output from different tools usually is different enough to prevent confusion, but when you process multiple samples in the same history it can get confusing.) Would it be possible to highlight the history item whose content is displayed in the center of a Galaxy window. Some other visual clue in middle section of a Galaxy window like a small heading or title bar with the name/tag of the displayed history item would probably also do the trick....
Cheers,
Pi
---------------------------------------------------------------
Biomolecular Mass Spectrometry & Proteomics group
Utrecht University
Visiting address:
H.R. Kruyt building room O607
Padualaan 8
3584 CH Utrecht
The Netherlands
Mail address:
P.O. box 80.082
3508 TB Utrecht
The Netherlands
phone: +31 6 143 66 783
email: pieter.neerincx(a)gmail.com
skype: pieter.online
---------------------------------------------------------------
2
1
Hi Galaxy team,
As my local instance grows it becomes more difficult each time to upgrade. I now use diff on the various config files to see what changes I should lift over to the new install. (I don't update, but leave my current setup untouched as a backup, create a new install and if that works switch to the new setup using a symlink.) Would it be possible to use a special directory with config files for site specific customizations? I'm thinking of something like Apache's "conf.d" directory or good ol' SRS's "sites" directory. This would allow us to leave the original Galaxy tools + datatypes config files untouched and list our local additions/mods in separate config files making upgrades a bit easier...
Cheers,
Pi
---------------------------------------------------------------
Biomolecular Mass Spectrometry & Proteomics group
Utrecht University
Visiting address:
H.R. Kruyt building room O607
Padualaan 8
3584 CH Utrecht
The Netherlands
Mail address:
P.O. box 80.082
3508 TB Utrecht
The Netherlands
phone: +31 6 143 66 783
email: pieter.neerincx(a)gmail.com
skype: pieter.online
---------------------------------------------------------------
4
5
please help! our install of galaxy is completely unable to load any data into libraries now!
URL: http://galaxy.jgi-psf.org/library_common/act_on_multiple_datasets?library_i…
File '/house/groupdirs/mep/galaxy/galaxy-dist/eggs/Paste-1.6-py2.6.egg/paste/exceptions/errormiddleware.py', line 143 in __call__
app_iter = self.application(environ, start_response)
File '/house/groupdirs/mep/galaxy/galaxy-dist/eggs/Paste-1.6-py2.6.egg/paste/debug/prints.py', line 98 in __call__
environ, self.app)
File '/house/groupdirs/mep/galaxy/galaxy-dist/eggs/Paste-1.6-py2.6.egg/paste/wsgilib.py', line 539 in intercept_output
app_iter = application(environ, replacement_start_response)
File '/house/groupdirs/mep/galaxy/galaxy-dist/eggs/Paste-1.6-py2.6.egg/paste/recursive.py', line 80 in __call__
return self.application(environ, start_response)
File '/house/groupdirs/mep/galaxy/galaxy-dist/eggs/Paste-1.6-py2.6.egg/paste/httpexceptions.py', line 632 in __call__
return self.application(environ, start_response)
File '/house/groupdirs/mep/galaxy/galaxy-dist/lib/galaxy/web/framework/base.py', line 125 in __call__
body = method( trans, **kwargs )
File '/house/groupdirs/mep/galaxy/galaxy-dist/lib/galaxy/web/controllers/library_common.py', line 1384 in act_on_multiple_datasets
archive.add( ldda.dataset.file_name, path )
UnboundLocalError: local variable 'archive' referenced before assignment
i tried adding a new library to see if i could see it but i get an error symlinking or copying from filesystem paths:
Miscellaneous information:
Traceback (most recent call last): File "/house/groupdirs/mep/galaxy/galaxy-dist/tools/data_source/upload.py", line 326, in __main__() File "/house/groupdirs/mep/galaxy/galaxy-dist/tools/data_source/upload.py", line 318, in __main__ a
error
I could upload, but I still get the first error
Thanks in advance for any assistance!
Ed Kirton
4
4
I have a perl script that splits a text file with genomic information
(forward strand and reverse strand) in two separate files.
One file for the forward strand and one file for the reverse strand.
I have written a wrapper so that Galaxy can execute this script.
When my script outputs one file this files is listed as a history item.
When I let my script output the two files only one history item gets
created.
What I need to now is what extra variables do I need to output from
the .xml file so that galaxy finds the two output files and puts both
files in as a history item
I have been looking at the "interval2maf .py and .xml" I just can't
figure out how this works, it seems that the history_id is important and
the species is a list array and for every species a file gets created in
the databas/tmp directory and when the file is created the tmp dir is
deleted.
Adding and extra file from the .py into this tmp dir does not end up in
the history items, renaming the files seems to end up with no history
items at all.
Cheers,
//Michel
11
23
Hi,
I've tried to install galaxy on my Ubuntu Linux amd64 2.6.27-11-generic, Python 2.5.2 (r252:60911), >>> sqlalchemy.__version__
'0.5.8', I ran $ hg clone http://bitbucket.org/galaxy/galaxy-central/ 5 days ago
And I got this error after calling % sh run.sh
Traceback (most recent call last):
File "/home/tan/galaxy_dist/lib/galaxy/web/buildapp.py", line 61, in app_factory
app = UniverseApplication( global_conf = global_conf, **kwargs )
File "/home/tan/galaxy_dist/lib/galaxy/app.py", line 28, in __init__
create_or_verify_database( db_url, self.config.database_engine_options )
File "/home/tan/galaxy_dist/lib/galaxy/model/migrate/check.py", line 66, in create_or_verify_database
db_schema = schema.ControlledSchema( engine, migrate_repository )
File "/home/tan/galaxy_dist/eggs/sqlalchemy_migrate-0.5.4-py2.5.egg/migrate/versioning/schema.py", line 24, in __init__
self._load()
File "/home/tan/galaxy_dist/eggs/sqlalchemy_migrate-0.5.4-py2.5.egg/migrate/versioning/schema.py", line 36, in _load
self.table = Table(tname, self.meta, autoload=True)
File "/var/lib/python-support/python2.5/sqlalchemy/schema.py", line 110, in __call__
return type.__call__(self, name, metadata, *args, **kwargs)
File "/var/lib/python-support/python2.5/sqlalchemy/schema.py", line 226, in __init__
_bind_or_error(metadata).reflecttable(self, include_columns=include_columns)
File "/var/lib/python-support/python2.5/sqlalchemy/engine/base.py", line 1275, in reflecttable
self.dialect.reflecttable(conn, table, include_columns)
File "/var/lib/python-support/python2.5/sqlalchemy/databases/sqlite.py", line 343, in reflecttable
raise exceptions.NoSuchTableError(table.name)
NoSuchTableError: migrate_version
/var/lib/python-support/python2.5/sqlalchemy/__init__ says ver 0.4.6
Thanks,
Patrick
__________________________________________________________________
Connect with friends from any web browser - no download required. Try the new Yahoo! Canada Messenger for the Web BETA at http://ca.messenger.yahoo.com/webmessengerpromo.php
3
4
Hi,
Is it possible to have textFields pre-set within a form or somewhere
so that when someone goes and creates the sequence request certain
fields are already set?
Thanks,
Natalie
2
1
Hi Freerk,
Can you be more specific about the problems that you're seeing? Also, please continue to cc galaxy-dev as you're more likely to get help with your issues since it's going to a larger audience.
Thanks,
J.
On Mar 11, 2010, at 10:42 AM, Dijk, F van wrote:
> Hi Jeremy,
>
> Thanks for the fast replay. The first issue is solved, a new one occured during the second point. When I migrate the sqlite database to SQL and try to import it into the MySQL database some errors occur.
> MySQL is complaining about the syntax used in the SQLdump.
> We are using the following MySQL version:
> mysql Ver 14.12 Distrib 5.0.75, for debian-linux-gnu (x86_64) using readline 5.2
> I also read the info on the link you provided in your answer, but this won't solve this problem I think. So, do you have a solution?
>
> Sincerely,
>
>
> Freerk van Dijk
>
>
> Van: Jeremy Goecks [mailto:jeremy.goecks@emory.edu]
> Verzonden: donderdag 11 maart 2010 15:25
> Aan: Dijk, F van
> CC: galaxy-dev(a)bx.psu.edu
> Onderwerp: Re: [galaxy-dev] Database migration problem
>
> Hi Freerk,
>
>> When we change the databaseconnection to "mysql:///galaxy?unix_socket=/var/run/mysqld/mysqld.sock" in the universe_wsgi.ini file and run Galaxy we receive an error like:
>>
>> "Access denied for user 'root'@'localhost' (using password: NO)") None None
>>
>> So is there an option somewhere which we also have to change to make the connection work?
>>
> You need to specify a username and password to access your database. Try this URL:
>
> mysql://user:password@localhost/galaxy?unix_socket=/var/lib/mysql/mysql.sock
> (exchanging 'user' and 'password' according to your settings)
>
>> And question two:
>> Do we need to migrate the content from the sqlite database to the MySQL database by hand or does this happen automatically?
>>
>
> You need to do the migration manually. Kanwei provided a nice solution to this problem a while back:
>
> http://lists.bx.psu.edu/pipermail/galaxy-dev/2010-February/001996.html
>
> --
> What I would suggest is to dump the sqlite database to standard sql,
> and then creating a new instance of galaxy that has mysql as the
> configured database. When you run the setup and run script, you'll
> have a freshly instantiated galaxy with empty mysql tables. Then you
> can just import the standard sql file into the mysql database.
> --
>
> Good luck and let us know if you have any more problems.
>
> J.
>
> De inhoud van dit bericht is vertrouwelijk en alleen bestemd voor de geadresseerde(n). Anderen dan de geadresseerde(n) mogen geen gebruik maken van dit bericht, het niet openbaar maken of op enige wijze verspreiden of vermenigvuldigen. Het UMCG kan niet aansprakelijk gesteld worden voor een incomplete aankomst of vertraging van dit verzonden bericht.
>
> The contents of this message are confidential and only intended for the eyes of the addressee(s). Others than the addressee(s) are not allowed to use this message, to make it public or to distribute or multiply this message in any way. The UMCG cannot be held responsible for incomplete reception or delay of this transferred message.
>
3
4
Hi,
I am trying to install galaxy in my macbook with python version 2.5.2.
But I got error messages when I sh run.sh. Could you please help me
with this?
Best regards,
Lin
Traceback (most recent call last):
File "./scripts/paster.py", line 34, in <module>
command.run()
File "/Users/linjiang/galaxy_dist/eggs/PasteScript-1.7.3-py2.5.egg/
paste/script/command.py", line 84, in run
invoke(command, command_name, options, args[1:])
File "/Users/linjiang/galaxy_dist/eggs/PasteScript-1.7.3-py2.5.egg/
paste/script/command.py", line 123, in invoke
exit_code = runner.run(args)
File "/Users/linjiang/galaxy_dist/eggs/PasteScript-1.7.3-py2.5.egg/
paste/script/command.py", line 218, in run
result = self.command()
File "/Users/linjiang/galaxy_dist/eggs/PasteScript-1.7.3-py2.5.egg/
paste/script/serve.py", line 276, in command
relative_to=base, global_conf=vars)
File "/Users/linjiang/galaxy_dist/eggs/PasteScript-1.7.3-py2.5.egg/
paste/script/serve.py", line 313, in loadapp
**kw)
File "/Users/linjiang/galaxy_dist/eggs/PasteDeploy-1.3.3-py2.5.egg/
paste/deploy/loadwsgi.py", line 204, in loadapp
return loadobj(APP, uri, name=name, **kw)
File "/Users/linjiang/galaxy_dist/eggs/PasteDeploy-1.3.3-py2.5.egg/
paste/deploy/loadwsgi.py", line 224, in loadobj
global_conf=global_conf)
File "/Users/linjiang/galaxy_dist/eggs/PasteDeploy-1.3.3-py2.5.egg/
paste/deploy/loadwsgi.py", line 248, in loadcontext
global_conf=global_conf)
File "/Users/linjiang/galaxy_dist/eggs/PasteDeploy-1.3.3-py2.5.egg/
paste/deploy/loadwsgi.py", line 278, in _loadconfig
return loader.get_context(object_type, name, global_conf)
File "/Users/linjiang/galaxy_dist/eggs/PasteDeploy-1.3.3-py2.5.egg/
paste/deploy/loadwsgi.py", line 413, in get_context
section)
File "/Users/linjiang/galaxy_dist/eggs/PasteDeploy-1.3.3-py2.5.egg/
paste/deploy/loadwsgi.py", line 458, in _context_from_explicit
value = import_string(found_expr)
File "/Users/linjiang/galaxy_dist/eggs/PasteDeploy-1.3.3-py2.5.egg/
paste/deploy/loadwsgi.py", line 18, in import_string
return pkg_resources.EntryPoint.parse("x="+s).load(False)
File "/Users/linjiang/galaxy_dist/lib/pkg_resources.py", line 1912,
in load
entry = __import__(self.module_name, globals(),globals(),
['__name__'])
File "/Users/linjiang/galaxy_dist/lib/galaxy/web/__init__.py", line
5, in <module>
from framework import expose, json, require_login, require_admin,
url_for, error, form, FormBuilder
File "/Users/linjiang/galaxy_dist/lib/galaxy/web/framework/
__init__.py", line 29, in <module>
from babel.support import Translations
File "/Users/linjiang/galaxy_dist/eggs/Babel-0.9.4-py2.5.egg/babel/
support.py", line 29, in <module>
from babel.dates import format_date, format_datetime, format_time,
LC_TIME
File "/Users/linjiang/galaxy_dist/eggs/Babel-0.9.4-py2.5.egg/babel/
dates.py", line 34, in <module>
LC_TIME = default_locale('LC_TIME')
File "/Users/linjiang/galaxy_dist/eggs/Babel-0.9.4-py2.5.egg/babel/
core.py", line 642, in default_locale
return '_'.join(filter(None, parse_locale(locale)))
File "/Users/linjiang/galaxy_dist/eggs/Babel-0.9.4-py2.5.egg/babel/
core.py", line 763, in parse_locale
raise ValueError('expected only letters, got %r' % lang)
ValueError: expected only letters, got 'utf-8'
--
Lin Jiang, Ph.D student
Dept. of Medical Biochemistry and Microbiology
Uppsala University
Husargatan 3
Biomedical Centre D11:3 307d
Box 582 IMBIM Husdjursgenetik
S-752 37 Uppsala
Sweden
Workphone: +46 (0) 18 471 4383
Lin.Jiang(a)imbim.uu.se
--
Lin Jiang, Ph.D student
Dept. of Medical Biochemistry and Microbiology
Uppsala University
Husargatan 3
Biomedical Centre D11:3 307d
Box 582 IMBIM Husdjursgenetik
S-752 37 Uppsala
Sweden
Workphone: +46 (0) 18 471 4383
Lin.Jiang(a)imbim.uu.se
2
1
Hi,
I am trying to install galaxy in my macbook with python version 2.5.2.
But I got error messages when I sh run.sh. Could you please help me
with this?
Best regards,
Lin
Traceback (most recent call last):
File "./scripts/paster.py", line 34, in <module>
command.run()
File "/Users/linjiang/galaxy_dist/eggs/PasteScript-1.7.3-py2.5.egg/
paste/script/command.py", line 84, in run
invoke(command, command_name, options, args[1:])
File "/Users/linjiang/galaxy_dist/eggs/PasteScript-1.7.3-py2.5.egg/
paste/script/command.py", line 123, in invoke
exit_code = runner.run(args)
File "/Users/linjiang/galaxy_dist/eggs/PasteScript-1.7.3-py2.5.egg/
paste/script/command.py", line 218, in run
result = self.command()
File "/Users/linjiang/galaxy_dist/eggs/PasteScript-1.7.3-py2.5.egg/
paste/script/serve.py", line 276, in command
relative_to=base, global_conf=vars)
File "/Users/linjiang/galaxy_dist/eggs/PasteScript-1.7.3-py2.5.egg/
paste/script/serve.py", line 313, in loadapp
**kw)
File "/Users/linjiang/galaxy_dist/eggs/PasteDeploy-1.3.3-py2.5.egg/
paste/deploy/loadwsgi.py", line 204, in loadapp
return loadobj(APP, uri, name=name, **kw)
File "/Users/linjiang/galaxy_dist/eggs/PasteDeploy-1.3.3-py2.5.egg/
paste/deploy/loadwsgi.py", line 224, in loadobj
global_conf=global_conf)
File "/Users/linjiang/galaxy_dist/eggs/PasteDeploy-1.3.3-py2.5.egg/
paste/deploy/loadwsgi.py", line 248, in loadcontext
global_conf=global_conf)
File "/Users/linjiang/galaxy_dist/eggs/PasteDeploy-1.3.3-py2.5.egg/
paste/deploy/loadwsgi.py", line 278, in _loadconfig
return loader.get_context(object_type, name, global_conf)
File "/Users/linjiang/galaxy_dist/eggs/PasteDeploy-1.3.3-py2.5.egg/
paste/deploy/loadwsgi.py", line 413, in get_context
section)
File "/Users/linjiang/galaxy_dist/eggs/PasteDeploy-1.3.3-py2.5.egg/
paste/deploy/loadwsgi.py", line 458, in _context_from_explicit
value = import_string(found_expr)
File "/Users/linjiang/galaxy_dist/eggs/PasteDeploy-1.3.3-py2.5.egg/
paste/deploy/loadwsgi.py", line 18, in import_string
return pkg_resources.EntryPoint.parse("x="+s).load(False)
File "/Users/linjiang/galaxy_dist/lib/pkg_resources.py", line 1912,
in load
entry = __import__(self.module_name, globals(),globals(),
['__name__'])
File "/Users/linjiang/galaxy_dist/lib/galaxy/web/__init__.py", line
5, in <module>
from framework import expose, json, require_login, require_admin,
url_for, error, form, FormBuilder
File "/Users/linjiang/galaxy_dist/lib/galaxy/web/framework/
__init__.py", line 29, in <module>
from babel.support import Translations
File "/Users/linjiang/galaxy_dist/eggs/Babel-0.9.4-py2.5.egg/babel/
support.py", line 29, in <module>
from babel.dates import format_date, format_datetime,
format_time, LC_TIME
File "/Users/linjiang/galaxy_dist/eggs/Babel-0.9.4-py2.5.egg/babel/
dates.py", line 34, in <module>
LC_TIME = default_locale('LC_TIME')
File "/Users/linjiang/galaxy_dist/eggs/Babel-0.9.4-py2.5.egg/babel/
core.py", line 642, in default_locale
return '_'.join(filter(None, parse_locale(locale)))
File "/Users/linjiang/galaxy_dist/eggs/Babel-0.9.4-py2.5.egg/babel/
core.py", line 763, in parse_locale
raise ValueError('expected only letters, got %r' % lang)
ValueError: expected only letters, got 'utf-8'
--
Lin Jiang, Ph.D student
Dept. of Medical Biochemistry and Microbiology
Uppsala University
Husargatan 3
Biomedical Centre D11:3 307d
Box 582 IMBIM Husdjursgenetik
S-752 37 Uppsala
Sweden
Workphone: +46 (0) 18 471 4383
Lin.Jiang(a)imbim.uu.se
1
0
Hi,
We have two parallel installs of Galaxy one which runs with python2.4
and the other with 2.6. The 2.6 is working fine (other than the issues
I've posted) however, the 2.4 won't start as it can't find DRMAA_python.
I have scrambled the egg and it does exist:
www-galaxy@ge-002: galaxy_live> ls -l
/homes/www-galaxy/galaxy_live/eggs/DRMAA_python-0.2-py2.4-linux-x86_64-ucs4.egg
-rw-r--r-- 1 www-galaxy barton 62374 Mar 5 14:22
/homes/www-galaxy/galaxy_live/eggs/DRMAA_python-0.2-py2.4-linux-x86_64-ucs4.egg
What am I missing?
Cheers,
Chris
--
Dr Chris Cole
Senior Bioinformatics Research Officer
School of Life Sciences Research
University of Dundee
Dow Street
Dundee
DD1 5EH
Scotland, UK
url: http://network.nature.com/profile/drchriscole
e-mail: chris(a)compbio.dundee.ac.uk
Tel: +44 (0)1382 388 721
The University of Dundee is a registered Scottish charity, No: SC015096
2
7
Hi, let me lead in with how impressed I am at the professional packaging of
galaxy -- dropped right in. Amazing!
Now on to the problem, I'm sure we're just missing something here on our end,
but we're seeing an error when running the histogram tool which says hist wants
a vector not a matrix.
An error occurred running this job: Error in hist.default(list(68, 71,
62, 75, 58, 60, 67, 68, 71, 69), xlab = "V1", :
'x' must be numeric
Looking at
http://bitbucket.org/galaxy/galaxy-central/src/tip/tools/plotting/histogram…
and the history thereof it's clear that's always been the case back to revision
0, but when we try from the console we get rpy complaining that a matrix isn't
a vactor, and it works when we try a vector.
By hand with a matrix:
$ python
Python 2.6.4 (r264:75706, Mar 9 2010, 10:00:44)
[GCC 4.2.1 (Apple Inc. build 5646) (dot 1)] on darwin
Type "help", "copyright", "credits" or "license" for more information.
>>> import sys
>>> from rpy import *
>>> matrix = []
>>> vals = ["23","14","32","25","12","9","35","18","24"]
>>> for i in vals:
... row = [];row.append(float(i));matrix.append(row)
...
>>> matrix
[[23.0], [14.0], [32.0], [25.0], [12.0], [9.0], [35.0], [18.0], [24.0]]
>>> a = array(matrix)
>>> r.pdf("histtest.pdf", 8, 8)
>>> title = "Histogram Test";xlab="Count";breaks="Sturges"
>>> r.hist( a, probability=True, main=title, xlab=xlab, breaks=breaks )
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
rpy.RPy_RException: Error in hist.default(list(23, 14, 32, 25, 12, 9,
35, 18, 24), xlab = "Count", :
'x' must be numeric
>>> a
array([[ 23.],
[ 14.],
[ 32.],
[ 25.],
[ 12.],
[ 9.],
[ 35.],
[ 18.],
[ 24.]])
However, when we build a vector instead of rows R/rpy is happy:
>>> v = []
>>> for i in vals:
... v.append(float(i))
...
>>> v
[23.0, 14.0, 32.0, 25.0, 12.0, 9.0, 35.0, 18.0, 24.0]
>>> r.hist(v, probability=True, main=title, xlab=xlab, breaks=breaks )
{'density': [0.022222217777778667, 0.044444444444444446,
0.02222222222222223, 0.066666666666666666, 0.0, 0.044444444444444446],
'equidist': True, 'breaks': [5.0, 10.0, 15.0, 20.0, 25.0, 30.0, 35.0],
'intensities': [0.022222217777778667, 0.044444444444444446,
0.02222222222222223, 0.066666666666666666, 0.0, 0.044444444444444446],
'counts': [1, 2, 1, 3, 0, 2], 'xname': 'c(23, 14, 32, 25, 12, 9, 35, 18,
24)', 'mids': [7.5, 12.5, 17.5, 22.5, 27.5, 32.5]}
>>>
>>> a = array(v)
>>> r.hist( a, probability=True, main=title, xlab=xlab, breaks=breaks )
{'density': [0.022222217777778667, 0.044444444444444446,
0.02222222222222223, 0.066666666666666666, 0.0, 0.044444444444444446],
'equidist': True, 'breaks': [5.0, 10.0, 15.0, 20.0, 25.0, 30.0, 35.0],
'intensities': [0.022222217777778667, 0.044444444444444446,
0.02222222222222223, 0.066666666666666666, 0.0, 0.044444444444444446],
'counts': [1, 2, 1, 3, 0, 2], 'xname': 'c(23, 14, 32, 25, 12, 9, 35, 18,
24)', 'mids': [7.5, 12.5, 17.5, 22.5, 27.5, 32.5]}
The relevant versions are:
Galaxy: 297d8c9c5eb0 (galaxy-dist from a few weeks ago)
R: 2.10.1
rpy: 1.0.3 (slightly tweaked for two-digit R minor-version strings, sigh)
So after all that, how does your histogram work passing a matrix to
something that wants a vector and can ours work that way too please? :)
Thanks,
--
Ry4an Brase 612-626-6575
University of Minnesota Supercomputing Institute
for Advanced Computational Research http://www.msi.umn.edu
1
0
Hi
I just noticed (after a history refresh) that when I try to use a GFF3
file to extract genomic DNA a job is created that converts the GFF to
BED. Using the GFF file directly gives the error I detailed earlier (see
below) but using the BED file works fine and I get the genomic sequence
I want.
If I first convert the GFF to a BED file everything is also fine (as
expected from the above).
So, it looks as though the problem is actually associated with the
process of GFF-->BED-->extract genomic and not the actual extraction of
sequences.
Has anyone else seen this behaviour?
Thanks
Nathaniel
-------- Original Message --------
Subject: Adding custom genome
Date: Thu, 11 Mar 2010 14:24:13 +0100
From: Nathaniel Street <nathaniel.street(a)plantphys.umu.se>
To: galaxy-dev(a)lists.bx.psu.edu
Hi
I'm trying to add a custom genome to a local galaxy install. So far I
have done this:
1)Create a fasta file per scaffold (it's an unfinished genome)
2)Create a nib file per one of those scaffold fasta files
3)Add an entry to build.txt
nis Test genome (nis)
4)Add a line to alignseq.loc
seq nis /data/nib/nis
5)Add a line to faseq.loc
nis /data/sequences/nis
Is there more that I need to do to be able to extract sequences using
the Extract Genomic DNA tool?
I tried to use the tool by uploading a GFF3 file, extracting a small
part of that for testing and then using that small part to extract
genomic DNA. When I try this I get an error message returned
AttributeError: 'tuple' object has no attribute 'iteritems'
And the text version of the Traceback gives this
URL: http://XXX:XXX:XX:XX:8080/tool_runner/index
File
'/home/nat/work/software/galaxy-dist/eggs/WebError-0.8a-py2.6.egg/weberror/evalexception/middleware.py',
line 364 in respond
app_iter = self.application(environ, detect_start_response)
File
'/home/nat/work/software/galaxy-dist/eggs/Paste-1.6-py2.6.egg/paste/debug/prints.py',
line 98 in __call__
environ, self.app)
File
'/home/nat/work/software/galaxy-dist/eggs/Paste-1.6-py2.6.egg/paste/wsgilib.py',
line 539 in intercept_output
app_iter = application(environ, replacement_start_response)
File
'/home/nat/work/software/galaxy-dist/eggs/Paste-1.6-py2.6.egg/paste/recursive.py',
line 80 in __call__
return self.application(environ, start_response)
File
'/home/nat/work/software/galaxy-dist/eggs/Paste-1.6-py2.6.egg/paste/httpexceptions.py',
line 632 in __call__
return self.application(environ, start_response)
File
'/home/nat/work/software/galaxy-dist/lib/galaxy/web/framework/base.py',
line 125 in __call__
body = method( trans, **kwargs )
File
'/home/nat/work/software/galaxy-dist/lib/galaxy/web/controllers/tool_runner.py',
line 53 in index
template, vars = tool.handle_input( trans, params.__dict__ )
File '/home/nat/work/software/galaxy-dist/lib/galaxy/tools/__init__.py',
line 807 in handle_input
_, out_data = self.execute( trans, incoming=params )
File '/home/nat/work/software/galaxy-dist/lib/galaxy/tools/__init__.py',
line 1079 in execute
return self.tool_action.execute( self, trans, incoming=incoming,
set_output_hid=set_output_hid )
File
'/home/nat/work/software/galaxy-dist/lib/galaxy/tools/actions/__init__.py',
line 140 in execute
inp_data = self.collect_input_datasets( tool, incoming, trans )
File
'/home/nat/work/software/galaxy-dist/lib/galaxy/tools/actions/__init__.py',
line 101 in collect_input_datasets
tool.visit_inputs( param_values, visitor )
File '/home/nat/work/software/galaxy-dist/lib/galaxy/tools/__init__.py',
line 754 in visit_inputs
callback( "", input, value[input.name] )
File
'/home/nat/work/software/galaxy-dist/lib/galaxy/tools/actions/__init__.py',
line 85 in visitor
input_datasets[ prefix + input.name ] = process_dataset( value )
File
'/home/nat/work/software/galaxy-dist/lib/galaxy/tools/actions/__init__.py',
line 47 in process_dataset
new_data = data.datatype.convert_dataset( trans, data, target_ext,
return_output = True, visible = False ).values()[0]
File '/home/nat/work/software/galaxy-dist/lib/galaxy/datatypes/data.py',
line 264 in convert_dataset
for name, value in converted_dataset.iteritems():
AttributeError: 'tuple' object has no attribute 'iteritems'
Can anyone tell me what I'm doing wrong? Do I need more than only the
nib files?
Many thanks
Nathaniel
--
Nathaniel Street
Umeå Plant Science Centre
Department of Plant Physiology
University of Umeå
SE-901 87 Umeå
SWEDEN
email: nathaniel.street(a)plantphys.umu.se
tel: +46-90-786 5473
fax: +46-90-786 6676
www.popgenie.org
--
Nathaniel Street
Umeå Plant Science Centre
Department of Plant Physiology
University of Umeå
SE-901 87 Umeå
SWEDEN
email: nathaniel.street(a)plantphys.umu.se
tel: +46-90-786 5473
fax: +46-90-786 6676
www.popgenie.org
2
1

11 Mar '10
Dear Galaxy Team,
I need to run a galaxy tool in cluster which would like to consume a more
amount of memory than the default. Any Idea how can I manage this ? I am
using 'sge'.
Many thanks, Vipin
2
1
Hi,
I send this mail again, because the last time I didn't receive a reply.
We have a local installation of Galaxy running on our system. We are
trying to migrate the contents from the sqlite database to our own MySQL
database.
When we change the databaseconnection to
"mysql:///galaxy?unix_socket=/var/run/mysqld/mysqld.sock" in the
universe_wsgi.ini file and run Galaxy we receive an error like:
"Access denied for user 'root'@'localhost' (using password: NO)") None
None
So is there an option somewhere which we also have to change to make the
connection work?
And question two:
Do we need to migrate the content from the sqlite database to the MySQL
database by hand or does this happen automatically?
I hope to receive an answer soon.
Sincerely,
Freerk van Dijk
De inhoud van dit bericht is vertrouwelijk en alleen bestemd voor de geadresseerde(n). Anderen dan de geadresseerde(n) mogen geen gebruik maken van dit bericht, het niet openbaar maken of op enige wijze verspreiden of vermenigvuldigen. Het UMCG kan niet aansprakelijk gesteld worden voor een incomplete aankomst of vertraging van dit verzonden bericht.
The contents of this message are confidential and only intended for the eyes of the addressee(s). Others than the addressee(s) are not allowed to use this message, to make it public or to distribute or multiply this message in any way. The UMCG cannot be held responsible for incomplete reception or delay of this transferred message.
2
1

11 Mar '10
details: http://www.bx.psu.edu/hg/galaxy/rev/44e9b5ca9cf0
changeset: 3511:44e9b5ca9cf0
user: fubar: ross Lazarus at gmail period com
date: Wed Mar 10 21:01:20 2010 -0500
description:
Remove deprecated rgenetics Lmap datatype from datatypes_conf.xml.sample
Was causing buildbot problems..
diffstat:
datatypes_conf.xml.sample | 1 -
1 files changed, 0 insertions(+), 1 deletions(-)
diffs (11 lines):
diff -r 2af472aa0844 -r 44e9b5ca9cf0 datatypes_conf.xml.sample
--- a/datatypes_conf.xml.sample Wed Mar 10 17:10:43 2010 -0500
+++ b/datatypes_conf.xml.sample Wed Mar 10 21:01:20 2010 -0500
@@ -194,7 +194,6 @@
<!-- genome graphs ucsc file - first col is always marker then numeric values to plot -->
<datatype extension="gg" type="galaxy.datatypes.genetics:GenomeGraphs"/>
<!-- part of linkage format pedigree -->
- <datatype extension="lmap" type="galaxy.datatypes.genetics:Lmap" display_in_upload="true"/>
<datatype extension="malist" type="galaxy.datatypes.genetics:MAlist" display_in_upload="true"/>
<!-- linkage format pedigree (separate .map file) -->
<datatype extension="lped" type="galaxy.datatypes.genetics:Lped" display_in_upload="true">
1
0

11 Mar '10
details: http://www.bx.psu.edu/hg/galaxy/rev/2af472aa0844
changeset: 3510:2af472aa0844
user: fubar: ross Lazarus at gmail period com
date: Wed Mar 10 17:10:43 2010 -0500
description:
Revert the twilltestcase upload_composite code needed for lped/pbed upload testing
diffstat:
test/base/twilltestcase.py | 54 ++++++++++++++++++++++++++++++++++++++++
test/functional/test_get_data.py | 6 ++--
test/functional/test_toolbox.py | 1 +
3 files changed, 58 insertions(+), 3 deletions(-)
diffs (100 lines):
diff -r 26c40d8e8fdc -r 2af472aa0844 test/base/twilltestcase.py
--- a/test/base/twilltestcase.py Wed Mar 10 16:23:53 2010 -0500
+++ b/test/base/twilltestcase.py Wed Mar 10 17:10:43 2010 -0500
@@ -196,6 +196,60 @@
# Wait for upload processing to finish (TODO: this should be done in each test case instead)
self.wait()
+ def upload_composite_datatype_file( self, ftype, ped_file='', map_file='', bim_file='', bed_file='',
+ fped_file='',fphe_file='',pphe_file='',fam_file='',pheno_file='',eset_file='',malist_file='',
+ affybatch_file='', dbkey='unspecified (?)', base_name='rgenetics' ):
+ """Tests uploading either of 2 different composite data types ( lped and pbed )"""
+ self.visit_url( "%s/tool_runner/index?tool_id=upload1" % self.url )
+ # Handle refresh_on_change
+ self.refresh_form( "file_type", ftype )
+ tc.fv( "1", "dbkey", dbkey )
+ tc.fv( "1", "files_metadata|base_name", base_name )
+ if ftype == 'lped':
+ # lped data types include a ped_file and a map_file
+ ped_file = self.get_filename( ped_file )
+ tc.formfile( "1", "files_0|file_data", ped_file )
+ map_file = self.get_filename( map_file )
+ tc.formfile( "1", "files_1|file_data", map_file )
+ elif ftype == 'pbed':
+ # pbed data types include a bim_file, a bed_file and a fam_file
+ bim_file = self.get_filename( bim_file )
+ tc.formfile( "1", "files_0|file_data", bim_file )
+ bed_file = self.get_filename( bed_file )
+ tc.formfile( "1", "files_1|file_data", bed_file )
+ fam_file = self.get_filename( fam_file )
+ tc.formfile( "1", "files_2|file_data", fam_file )
+ elif ftype == 'pphe':
+ # pphe data types include a phe_file
+ pphe_file = self.get_filename( pphe_file )
+ tc.formfile( "1", "files_0|file_data", pphe_file )
+ elif ftype == 'fped':
+ # fped data types include an fped_file only
+ fped_file = self.get_filename( fped_file )
+ tc.formfile( "1", "files_0|file_data", fped_file )
+ elif ftype == 'eset':
+ # eset data types include a eset_file, a pheno_file
+ eset_file = self.get_filename( eset_file )
+ tc.formfile( "1", "files_0|file_data", eset_file )
+ pheno_file = self.get_filename( pheno_file )
+ tc.formfile( "1", "files_1|file_data", pheno_file )
+ elif ftype == 'affybatch':
+ # affybatch data types include an affybatch_file, and a pheno_file
+ affybatch_file = self.get_filename( affybatch_file )
+ tc.formfile( "1", "files_0|file_data", affybatch_file )
+ pheno_file = self.get_filename( pheno_file )
+ tc.formfile( "1", "files_1|file_data", pheno_file )
+ else:
+ raise AssertionError, "Unsupported composite data type (%s) received, currently only %s data types are supported."\
+ % (ftype,','.join(self.composite_extensions))
+ tc.submit( "runtool_btn" )
+ self.check_page_for_string( 'The following job has been succesfully added to the queue:' )
+ check_str = base_name #'Uploaded Composite Dataset (%s)' % ftype
+ self.check_page_for_string( check_str )
+ # Wait for upload processing to finish (TODO: this should be done in each test case instead)
+ self.wait()
+ self.check_history_for_string( check_str )
+
# Functions associated with histories
def check_history_for_errors( self ):
"""Raises an exception if there are errors in a history"""
diff -r 26c40d8e8fdc -r 2af472aa0844 test/functional/test_get_data.py
--- a/test/functional/test_get_data.py Wed Mar 10 16:23:53 2010 -0500
+++ b/test/functional/test_get_data.py Wed Mar 10 17:10:43 2010 -0500
@@ -183,7 +183,7 @@
# We'll test against the resulting ped file and map file for correctness
self.verify_composite_datatype_file_content( 'rgenetics.ped', str( hda.id ) )
self.verify_composite_datatype_file_content( 'rgenetics.map', str( hda.id ) )
- self.check_history_for_string( "Uploaded Composite Dataset (lped)" )
+ self.check_history_for_string( "rgenetics" )
self.delete_history( id=self.security.encode_id( history.id ) )
def test_0060_upload_file( self ):
"""Test uploading pbed composite datatype file, manually setting the file format"""
@@ -205,7 +205,7 @@
self.verify_composite_datatype_file_content( 'rgenetics.bim', str( hda.id ) )
self.verify_composite_datatype_file_content( 'rgenetics.bed', str( hda.id ) )
self.verify_composite_datatype_file_content( 'rgenetics.fam', str( hda.id ) )
- self.check_history_for_string( "Uploaded Composite Dataset (pbed)" )
+ self.check_history_for_string( "rgenetics" )
self.delete_history( id=self.security.encode_id( history.id ) )
def test_0065_upload_file( self ):
"""Test uploading asian_chars_1.txt, NOT setting the file format"""
@@ -576,4 +576,4 @@
self.check_history_for_string( 'hello world' )
self.delete_history( id=self.security.encode_id( history.id ) )
def test_9999_clean_up( self ):
- self.logout()
\ No newline at end of file
+ self.logout()
diff -r 26c40d8e8fdc -r 2af472aa0844 test/functional/test_toolbox.py
--- a/test/functional/test_toolbox.py Wed Mar 10 16:23:53 2010 -0500
+++ b/test/functional/test_toolbox.py Wed Mar 10 17:10:43 2010 -0500
@@ -161,3 +161,4 @@
m.__doc__ = "%s ( %s ) > %s" % ( tool.name, tool.id, testdef.name )
d['test_tool_%06d' % j] = m
G[ n ] = new.classobj( n, s, d )
+
1
0

11 Mar '10
details: http://www.bx.psu.edu/hg/galaxy/rev/26c40d8e8fdc
changeset: 3509:26c40d8e8fdc
user: gua110
date: Wed Mar 10 16:23:53 2010 -0500
description:
Updated functional tests for \"multivariate statistics\" tools, by shortening the number of decimal places displayed, so that they pass on all test platforms.
diffstat:
test-data/cca_out1.tabular | 30 +-
test-data/cca_out2.pdf | 4 +-
test-data/kcca_out1.tabular | 602 +++++++++++++++++++-------------------
test-data/kcca_out2.tabular | 602 +++++++++++++++++++-------------------
test-data/kpca_out1.tabular | 602 +++++++++++++++++++-------------------
test-data/kpca_out2.pdf | 4 +-
test-data/kpca_out3.tabular | 602 +++++++++++++++++++-------------------
test-data/kpca_out4.pdf | 4 +-
test-data/pca_out1.tabular | 312 ++++++++++----------
test-data/pca_out2.pdf | 4 +-
test-data/pca_out3.tabular | 312 ++++++++++----------
test-data/pca_out4.pdf | 4 +-
tools/multivariate_stats/cca.py | 18 +-
tools/multivariate_stats/kcca.py | 6 +-
tools/multivariate_stats/kpca.py | 6 +-
tools/multivariate_stats/pca.py | 19 +-
16 files changed, 1568 insertions(+), 1563 deletions(-)
diffs (truncated from 3327 to 3000 lines):
diff -r 07a608852925 -r 26c40d8e8fdc test-data/cca_out1.tabular
--- a/test-data/cca_out1.tabular Wed Mar 10 16:11:59 2010 -0500
+++ b/test-data/cca_out1.tabular Wed Mar 10 16:23:53 2010 -0500
@@ -1,22 +1,22 @@
#Component 1 2
-#Correlation 0.940897180432 0.131074795925
-#F-statistic 144.410560578 2.5696974623
-#p-value 6.21285598619e-68 0.111075110551
+#Correlation 0.9409 0.1311
+#F-statistic 144.4 2.57
+#p-value 6.213e-68 0.1111
#X-Coefficients 1 2
-c3 1.50661351834 -3.37790409332
-c4 -0.537226204038 3.65944099051
+c3 1.507 -3.378
+c4 -0.5372 3.659
#Y-Coefficients 1 2
-c1 6.35046749378 3.37940792566
-c2 -2.6597206473 6.66976562808
+c1 6.35 3.379
+c2 -2.66 6.67
#X-Loadings 1 2
-c3 0.989395177676 0.145248691528
-c4 0.913276653253 0.407339851504
+c3 0.9894 0.1452
+c4 0.9133 0.4073
#Y-Loadings 1 2
-c1 0.928869265104 0.370407732566
-c2 -0.469775462051 0.882785939656
+c1 0.9289 0.3704
+c2 -0.4698 0.8828
#X-CrossLoadings 1 2
-c3 0.930919133009 0.0190384426004
-c4 0.859299428 0.0533919879079
+c3 0.9309 0.01904
+c4 0.8593 0.05339
#Y-CrossLoadings 1 2
-c1 0.873970472527 0.0485511179549
-c2 -0.44201040768 0.115710986885
+c1 0.874 0.04855
+c2 -0.442 0.1157
diff -r 07a608852925 -r 26c40d8e8fdc test-data/cca_out2.pdf
--- a/test-data/cca_out2.pdf Wed Mar 10 16:11:59 2010 -0500
+++ b/test-data/cca_out2.pdf Wed Mar 10 16:23:53 2010 -0500
@@ -2,8 +2,8 @@
%âãÏÓ\r
1 0 obj
<<
-/CreationDate (D:20100303132536)
-/ModDate (D:20100303132536)
+/CreationDate (D:20100310155029)
+/ModDate (D:20100310155029)
/Title (R Graphics Output)
/Producer (R 2.10.0)
/Creator (R)
diff -r 07a608852925 -r 26c40d8e8fdc test-data/kcca_out1.tabular
--- a/test-data/kcca_out1.tabular Wed Mar 10 16:11:59 2010 -0500
+++ b/test-data/kcca_out1.tabular Wed Mar 10 16:23:53 2010 -0500
@@ -1,304 +1,304 @@
#Component 1 2 3 4
-#Correlation -0.999499965799 0.999499965799 -0.937388978867 0.937388978867
+#Correlation -0.9995 0.9995 -0.9374 0.9374
#Estimated X-coefficients 1 2 3 4
-1 -0.00159226306681 -0.00159226306681 0.0347922336733 0.0347922336734
-2 -0.00110978716164 -0.00110978716146 0.0325550634964 0.0325550634964
-3 0.000104871380431 0.00010487138039 0.0183484731624 0.0183484731623
-4 0.00102415734634 0.00102415734636 -0.00485228064778 -0.00485228064775
-5 -0.000169999236028 -0.000169999236004 0.0524192345792 0.0524192345792
-6 0.00230258587505 0.00230258587502 0.0196794881537 0.0196794881537
-7 0.00145199861756 0.00145199861756 0.00575575672441 0.00575575672438
-8 -0.00159595387536 -0.00159595387541 0.0382833844726 0.0382833844726
-9 0.00360877029314 0.00360877029312 -0.0750970801906 -0.0750970801906
-10 -0.00136083597277 -0.00136083597278 0.0318384011561 0.0318384011561
-11 -0.000806991906868 -0.000806991906869 0.00139328931528 0.00139328931529
-12 -0.000200007372161 -0.000200007372182 0.0392128558972 0.0392128558972
-13 -0.000376620449557 -0.000376620449546 0.0293209660811 0.0293209660811
-14 0.00424812920862 0.00424812920862 -0.131314655808 -0.131314655808
-15 0.00440751040841 0.00440751040844 -0.0215875677083 -0.0215875677083
-16 0.0158137737287 0.0158137737287 0.0413483506506 0.0413483506506
-17 0.00230258096229 0.00230258096233 0.0196792569231 0.0196792569231
-18 -0.00159229337587 -0.00159229337587 0.034791794765 0.034791794765
-19 0.000470908151429 0.000470908151435 -0.033768454116 -0.033768454116
-20 0.00175754173735 0.00175754173739 0.0594708458718 0.0594708458718
-21 -0.00334320757685 -0.00334320757684 -0.0210157518991 -0.0210157518991
-22 0.000356998265944 0.00035699826596 0.0506142212928 0.0506142212928
-23 0.00287793787049 0.00287793787051 0.0198921560768 0.0198921560768
-24 -0.00253287592115 -0.00253287592111 0.0229011408536 0.0229011408536
-25 -0.000199966741261 -0.000199966741259 0.039213190751 0.0392131907509
-26 -0.00177266177845 -0.00177266177847 0.0283910522008 0.0283910522008
-27 -0.00159590123482 -0.00159590123485 0.038282588671 0.038282588671
-28 -0.00208498848365 -0.00208498848366 0.0204255781063 0.0204255781063
-29 -0.00267192971883 -0.00267192971883 0.0138389269867 0.0138389269867
-30 0.000104980469185 0.000104980469188 0.0183490300234 0.0183490300234
-31 -0.000627859471217 -0.000627859471211 0.0286050882842 0.0286050882842
-32 -0.00334315632058 -0.0033431563206 -0.021015317998 -0.021015317998
-33 0.00737949966525 0.00737949966526 0.0757911137154 0.0757911137153
-34 0.00926681020655 0.00926681020653 0.0362748843231 0.0362748843231
-35 -0.00136084775243 -0.00136084775244 0.0318385553343 0.0318385553343
-36 -0.00208708693783 -0.00208708693783 0.029318413035 0.029318413035
-37 -0.00291824596158 -0.0029182459616 -0.0316400169446 -0.0316400169446
-38 -0.00136084705751 -0.00136084705752 0.0318383985846 0.0318383985845
-39 0.00317922708464 0.00317922708461 -0.0784504607557 -0.0784504607557
-40 -0.00217929118556 -0.00217929118555 0.0282052840325 0.0282052840325
-41 -0.00100896730901 -0.00100896730901 0.0448689392189 0.0448689392188
-42 0.0088378418176 0.00883784181758 0.0414569436219 0.0414569436219
-43 0.00286479080033 0.00286479080034 -0.0775226035577 -0.0775226035577
-44 -0.00100892420468 -0.00100892420468 0.044868959418 0.044868959418
-45 0.00175754329615 0.00175754329613 0.0594711681396 0.0594711681395
-46 -0.000376629073915 -0.000376629073921 0.0293209668801 0.0293209668801
-47 0.00175754920557 0.00175754920562 0.0594710191761 0.0594710191761
-48 0.000960826589906 0.000960826589907 -0.00320856169229 -0.00320856169234
-49 -0.000526936996666 -0.000526936996691 0.0192804658813 0.0192804658813
-50 -0.00194953734422 -0.0019495373442 0.0329782808594 0.0329782808594
-51 6.55505160483e-05 6.55505160251e-05 0.0963097412008 0.0963097412008
-52 -0.0011718760537 -0.00117187605367 -0.0201469456847 -0.0201469456848
-53 0.000176671254579 0.000176671254581 0.0847986697478 0.0847986697478
-54 0.00296310911687 0.00296310911688 0.0293431942735 0.0293431942735
-55 0.000607776190472 0.000607776190486 0.0117139207005 0.0117139207005
-56 -0.00261157098019 -0.00261157098018 -0.0654067243745 -0.0654067243745
-57 -0.0015146048768 -0.0015146048768 -0.0376121620644 -0.0376121620644
-58 0.00404504063063 0.00404504063063 0.092945109105 0.092945109105
-59 0.00036877721416 0.000368777214162 0.0295027716538 0.0295027716538
-60 -0.00104700002988 -0.00104700002991 0.0224831294608 0.0224831294608
-61 0.0106423484729 0.0106423484729 0.152385267998 0.152385267998
-62 -0.0031520185592 -0.00315201855917 -0.0846778020825 -0.0846778020825
-63 0.00557212171653 0.00557212171651 0.0129291757862 0.0129291757862
-64 -0.00189259304055 -0.00189259304053 -0.0699730283655 -0.0699730283655
-65 -0.00329284250094 -0.00329284250094 -0.059850140447 -0.059850140447
-66 -4.19255339958e-05 -4.19255339884e-05 0.0484549594796 0.0484549594797
-67 -0.00372238588335 -0.00372238588336 -0.0632035000244 -0.0632035000244
-68 -0.00164700987437 -0.00164700987437 -0.0637729313267 -0.0637729313267
-69 0.00650587912972 0.00650587912971 0.0346722693281 0.0346722693281
-70 0.000134953294294 0.000134953294323 -0.0187985221915 -0.0187985221915
-71 -0.00346646664488 -0.00346646664488 -0.0837494602393 -0.0837494602393
-72 -0.00129113838679 -0.00129113838678 -0.0638064857952 -0.0638064857951
-73 0.00251974364983 0.00251974364985 0.0022055760206 0.00220557602061
-74 -0.00129116036811 -0.0012911603681 -0.0638066193927 -0.0638066193927
-75 -0.000427852329099 -0.000427852329122 -0.0177210266231 -0.0177210266231
-76 -6.07104681603e-05 -6.07104681845e-05 0.0261490639915 0.0261490639915
-77 0.00140452555267 0.00140452555267 0.0790608976409 0.0790608976408
-78 0.000209327997368 0.000209327997393 0.0491700187535 0.0491700187535
-79 -0.00233986780648 -0.00233986780648 -0.0783320881243 -0.0783320881243
-80 -0.000900328222947 -0.00090032822295 -0.0446524406999 -0.0446524406999
-81 0.00147282851191 0.00147282851192 0.0122725620319 0.0122725620319
-82 0.00147281209976 0.00147281209977 0.0122725086224 0.0122725086224
-83 -0.00164700173013 -0.00164700173014 -0.06377323849 -0.06377323849
-84 -0.000967708203349 -0.000967708203351 -0.0631482116068 -0.0631482116068
-85 -0.00351986557339 -0.00351986557338 -0.030908097987 -0.030908097987
-86 -0.00259272827972 -0.00259272827972 -0.0717927951437 -0.0717927951437
-87 -4.18965141628e-05 -4.18965141522e-05 0.0484552570532 0.0484552570532
-88 0.00530734252965 0.00530734252963 0.0352616047044 0.0352616047044
-89 -0.00372239312775 -0.00372239312776 -0.0632034676432 -0.0632034676432
-90 0.000175478311049 0.000175478311042 -0.00371336493864 -0.00371336493865
-91 -0.000939577619534 -0.000939577619522 -0.0178449710765 -0.0178449710765
-92 -0.00232205533945 -0.00232205533946 -0.0733261558013 -0.0733261558013
-93 -0.00070634156182 -0.000706341561824 -0.052035469948 -0.052035469948
-94 0.00487236705406 0.00487236705406 0.105852225573 0.105852225573
-95 -0.00192078284287 -0.00192078284288 -0.0446670554592 -0.0446670554592
-96 -0.00364258343827 -0.00364258343827 -0.0749260068443 -0.0749260068443
-97 -0.00321303286671 -0.0032130328667 -0.0715723152411 -0.0715723152411
-98 -0.00140604417103 -0.00140604417105 -0.0565884530031 -0.0565884530031
-99 0.00150148065434 0.00150148065431 0.0627184287393 0.0627184287394
-100 -0.00261158477 -0.00261158476999 -0.0654066442111 -0.0654066442112
-101 -0.00151458006047 -0.00151458006047 -0.0376119582238 -0.0376119582238
-102 -0.00164700343889 -0.00164700343888 -0.06377288888 -0.06377288888
-103 0.000251068468294 0.000251068468298 0.0975435120003 0.0975435120003
-104 -0.000908080899935 -0.000908080899925 -0.0388460712661 -0.0388460712661
-105 -0.000423216718942 -0.000423216718959 0.00219435777697 0.00219435777695
-106 0.000255624978855 0.00025562497886 -0.0480116282141 -0.0480116282141
-107 0.00274773787057 0.00274773787055 0.0769590274139 0.0769590274139
-108 0.000341726654612 0.000341726654597 0.0766246057863 0.0766246057863
-109 0.00406672119039 0.0040667211904 0.0935756884052 0.0935756884052
-110 0.00168012039808 0.00168012039807 0.114599686992 0.114599686992
-111 -0.000737702734906 -0.000737702734914 0.0031222049116 0.00312220491159
-112 0.000944239770407 0.000944239770391 -0.00253804367075 -0.00253804367075
-113 0.000373533380611 0.000373533380627 0.0695413493313 0.0695413493313
-114 0.000214764783387 0.000214764783394 -0.0305207662625 -0.0305207662625
-115 -0.0024176574971 -0.00241765749712 -0.0727895122943 -0.0727895122943
-116 -0.00117184106569 -0.00117184106569 -0.020147379235 -0.020147379235
-117 -0.000423245890376 -0.000423245890374 0.00219411724102 0.002194117241
-118 0.00511278621358 0.00511278621362 -0.0711192328408 -0.0711192328408
-119 0.00374158288488 0.00374158288488 -0.0820025184858 -0.0820025184857
-120 0.00557210528707 0.00557210528706 0.0129285972769 0.0129285972769
-121 0.000113440914253 0.000113440914255 0.0864426484813 0.0864426484813
-122 -0.00269141225358 -0.00269141225358 -0.0536835150794 -0.0536835150794
-123 0.00203032906275 0.00203032906276 -0.102756314558 -0.102756314558
-124 0.000463985316019 0.00046398531602 -0.0236637080927 -0.0236637080927
-125 3.23398695696e-05 3.23398695946e-05 0.0537583523329 0.0537583523329
-126 -0.000236944823046 -0.000236944823052 0.0914993805259 0.0914993805259
-127 -0.000804521731675 -0.000804521731666 -0.0504218395017 -0.0504218395017
-128 -0.00232209010852 -0.00232209010854 -0.0733257347061 -0.0733257347061
-129 0.000173591516244 0.000173591516248 -0.0115550211641 -0.0115550211641
-130 7.75013058308e-05 7.7501305819e-05 0.0905707020934 0.0905707020934
-131 0.000856346120183 0.000856346120186 0.0542528201863 0.0542528201863
-132 0.00833835823432 0.0083383582343 -0.232668169832 -0.232668169832
-133 0.000173619999677 0.000173619999659 -0.0115549698037 -0.0115549698037
-134 -0.000306631445279 -0.000306631445295 -0.0326803614331 -0.032680361433
-135 0.000420140644556 0.00042014064454 -0.0430529283201 -0.0430529283201
-136 0.0009993072384 0.000999307238389 -0.112276057196 -0.112276057196
-137 -0.00116097843462 -0.00116097843463 -0.0323074368066 -0.0323074368066
-138 -0.00110862543672 -0.0011086254367 -0.0217901323169 -0.0217901323169
-139 -0.0027693146479 -0.00276931464791 -0.0816839889734 -0.0816839889734
-140 0.000176676195733 0.000176676195739 0.0847993817539 0.0847993817538
-141 -4.19162508702e-05 -4.19162509088e-05 0.048455131876 0.048455131876
-142 0.000176690917474 0.000176690917456 0.0847992182528 0.0847992182528
-143 -0.00164700027723 -0.00164700027725 -0.0637729771005 -0.0637729771005
-144 5.90748259444e-05 5.90748259518e-05 0.0704687213602 0.0704687213602
-145 3.23787858251e-05 3.23787858432e-05 0.0537581665472 0.0537581665472
-146 0.000209342022695 0.000209342022689 0.0491697329809 0.0491697329809
-147 0.00251978363427 0.00251978363426 0.00220560706463 0.00220560706461
-148 -0.000423180575963 -0.000423180576007 0.00219444456054 0.00219444456053
-149 -0.00165893244585 -0.00165893244587 -0.0500493239232 -0.0500493239232
-150 -0.00315200955113 -0.00315200955116 -0.0846780840986 -0.0846780840986
+1 -0.001592 -0.001592 0.03479 0.03479
+2 -0.00111 -0.00111 0.03256 0.03256
+3 0.0001049 0.0001049 0.01835 0.01835
+4 0.001024 0.001024 -0.004852 -0.004852
+5 -0.00017 -0.00017 0.05242 0.05242
+6 0.002303 0.002303 0.01968 0.01968
+7 0.001452 0.001452 0.005756 0.005756
+8 -0.001596 -0.001596 0.03828 0.03828
+9 0.003609 0.003609 -0.0751 -0.0751
+10 -0.001361 -0.001361 0.03184 0.03184
+11 -0.000807 -0.000807 0.001393 0.001393
+12 -0.0002 -0.0002 0.03921 0.03921
+13 -0.0003766 -0.0003766 0.02932 0.02932
+14 0.004248 0.004248 -0.1313 -0.1313
+15 0.004408 0.004408 -0.02159 -0.02159
+16 0.01581 0.01581 0.04135 0.04135
+17 0.002303 0.002303 0.01968 0.01968
+18 -0.001592 -0.001592 0.03479 0.03479
+19 0.0004709 0.0004709 -0.03377 -0.03377
+20 0.001758 0.001758 0.05947 0.05947
+21 -0.003343 -0.003343 -0.02102 -0.02102
+22 0.000357 0.000357 0.05061 0.05061
+23 0.002878 0.002878 0.01989 0.01989
+24 -0.002533 -0.002533 0.0229 0.0229
+25 -0.0002 -0.0002 0.03921 0.03921
+26 -0.001773 -0.001773 0.02839 0.02839
+27 -0.001596 -0.001596 0.03828 0.03828
+28 -0.002085 -0.002085 0.02043 0.02043
+29 -0.002672 -0.002672 0.01384 0.01384
+30 0.000105 0.000105 0.01835 0.01835
+31 -0.0006279 -0.0006279 0.02861 0.02861
+32 -0.003343 -0.003343 -0.02102 -0.02102
+33 0.007379 0.007379 0.07579 0.07579
+34 0.009267 0.009267 0.03627 0.03627
+35 -0.001361 -0.001361 0.03184 0.03184
+36 -0.002087 -0.002087 0.02932 0.02932
+37 -0.002918 -0.002918 -0.03164 -0.03164
+38 -0.001361 -0.001361 0.03184 0.03184
+39 0.003179 0.003179 -0.07845 -0.07845
+40 -0.002179 -0.002179 0.02821 0.02821
+41 -0.001009 -0.001009 0.04487 0.04487
+42 0.008838 0.008838 0.04146 0.04146
+43 0.002865 0.002865 -0.07752 -0.07752
+44 -0.001009 -0.001009 0.04487 0.04487
+45 0.001758 0.001758 0.05947 0.05947
+46 -0.0003766 -0.0003766 0.02932 0.02932
+47 0.001758 0.001758 0.05947 0.05947
+48 0.0009608 0.0009608 -0.003209 -0.003209
+49 -0.0005269 -0.0005269 0.01928 0.01928
+50 -0.00195 -0.00195 0.03298 0.03298
+51 6.555e-05 6.555e-05 0.09631 0.09631
+52 -0.001172 -0.001172 -0.02015 -0.02015
+53 0.0001767 0.0001767 0.0848 0.0848
+54 0.002963 0.002963 0.02934 0.02934
+55 0.0006078 0.0006078 0.01171 0.01171
+56 -0.002612 -0.002612 -0.06541 -0.06541
+57 -0.001515 -0.001515 -0.03761 -0.03761
+58 0.004045 0.004045 0.09295 0.09295
+59 0.0003688 0.0003688 0.0295 0.0295
+60 -0.001047 -0.001047 0.02248 0.02248
+61 0.01064 0.01064 0.1524 0.1524
+62 -0.003152 -0.003152 -0.08468 -0.08468
+63 0.005572 0.005572 0.01293 0.01293
+64 -0.001893 -0.001893 -0.06997 -0.06997
+65 -0.003293 -0.003293 -0.05985 -0.05985
+66 -4.193e-05 -4.193e-05 0.04845 0.04845
+67 -0.003722 -0.003722 -0.0632 -0.0632
+68 -0.001647 -0.001647 -0.06377 -0.06377
+69 0.006506 0.006506 0.03467 0.03467
+70 0.000135 0.000135 -0.0188 -0.0188
+71 -0.003466 -0.003466 -0.08375 -0.08375
+72 -0.001291 -0.001291 -0.06381 -0.06381
+73 0.00252 0.00252 0.002206 0.002206
+74 -0.001291 -0.001291 -0.06381 -0.06381
+75 -0.0004279 -0.0004279 -0.01772 -0.01772
+76 -6.071e-05 -6.071e-05 0.02615 0.02615
+77 0.001405 0.001405 0.07906 0.07906
+78 0.0002093 0.0002093 0.04917 0.04917
+79 -0.00234 -0.00234 -0.07833 -0.07833
+80 -0.0009003 -0.0009003 -0.04465 -0.04465
+81 0.001473 0.001473 0.01227 0.01227
+82 0.001473 0.001473 0.01227 0.01227
+83 -0.001647 -0.001647 -0.06377 -0.06377
+84 -0.0009677 -0.0009677 -0.06315 -0.06315
+85 -0.00352 -0.00352 -0.03091 -0.03091
+86 -0.002593 -0.002593 -0.07179 -0.07179
+87 -4.19e-05 -4.19e-05 0.04846 0.04846
+88 0.005307 0.005307 0.03526 0.03526
+89 -0.003722 -0.003722 -0.0632 -0.0632
+90 0.0001755 0.0001755 -0.003713 -0.003713
+91 -0.0009396 -0.0009396 -0.01784 -0.01784
+92 -0.002322 -0.002322 -0.07333 -0.07333
+93 -0.0007063 -0.0007063 -0.05204 -0.05204
+94 0.004872 0.004872 0.1059 0.1059
+95 -0.001921 -0.001921 -0.04467 -0.04467
+96 -0.003643 -0.003643 -0.07493 -0.07493
+97 -0.003213 -0.003213 -0.07157 -0.07157
+98 -0.001406 -0.001406 -0.05659 -0.05659
+99 0.001501 0.001501 0.06272 0.06272
+100 -0.002612 -0.002612 -0.06541 -0.06541
+101 -0.001515 -0.001515 -0.03761 -0.03761
+102 -0.001647 -0.001647 -0.06377 -0.06377
+103 0.0002511 0.0002511 0.09754 0.09754
+104 -0.0009081 -0.0009081 -0.03885 -0.03885
+105 -0.0004232 -0.0004232 0.002194 0.002194
+106 0.0002556 0.0002556 -0.04801 -0.04801
+107 0.002748 0.002748 0.07696 0.07696
+108 0.0003417 0.0003417 0.07662 0.07662
+109 0.004067 0.004067 0.09358 0.09358
+110 0.00168 0.00168 0.1146 0.1146
+111 -0.0007377 -0.0007377 0.003122 0.003122
+112 0.0009442 0.0009442 -0.002538 -0.002538
+113 0.0003735 0.0003735 0.06954 0.06954
+114 0.0002148 0.0002148 -0.03052 -0.03052
+115 -0.002418 -0.002418 -0.07279 -0.07279
+116 -0.001172 -0.001172 -0.02015 -0.02015
+117 -0.0004232 -0.0004232 0.002194 0.002194
+118 0.005113 0.005113 -0.07112 -0.07112
+119 0.003742 0.003742 -0.082 -0.082
+120 0.005572 0.005572 0.01293 0.01293
+121 0.0001134 0.0001134 0.08644 0.08644
+122 -0.002691 -0.002691 -0.05368 -0.05368
+123 0.00203 0.00203 -0.1028 -0.1028
+124 0.000464 0.000464 -0.02366 -0.02366
+125 3.234e-05 3.234e-05 0.05376 0.05376
+126 -0.0002369 -0.0002369 0.0915 0.0915
+127 -0.0008045 -0.0008045 -0.05042 -0.05042
+128 -0.002322 -0.002322 -0.07333 -0.07333
+129 0.0001736 0.0001736 -0.01156 -0.01156
+130 7.75e-05 7.75e-05 0.09057 0.09057
+131 0.0008563 0.0008563 0.05425 0.05425
+132 0.008338 0.008338 -0.2327 -0.2327
+133 0.0001736 0.0001736 -0.01155 -0.01155
+134 -0.0003066 -0.0003066 -0.03268 -0.03268
+135 0.0004201 0.0004201 -0.04305 -0.04305
+136 0.0009993 0.0009993 -0.1123 -0.1123
+137 -0.001161 -0.001161 -0.03231 -0.03231
+138 -0.001109 -0.001109 -0.02179 -0.02179
+139 -0.002769 -0.002769 -0.08168 -0.08168
+140 0.0001767 0.0001767 0.0848 0.0848
+141 -4.192e-05 -4.192e-05 0.04846 0.04846
+142 0.0001767 0.0001767 0.0848 0.0848
+143 -0.001647 -0.001647 -0.06377 -0.06377
+144 5.907e-05 5.907e-05 0.07047 0.07047
+145 3.238e-05 3.238e-05 0.05376 0.05376
+146 0.0002093 0.0002093 0.04917 0.04917
+147 0.00252 0.00252 0.002206 0.002206
+148 -0.0004232 -0.0004232 0.002194 0.002194
+149 -0.001659 -0.001659 -0.05005 -0.05005
+150 -0.003152 -0.003152 -0.08468 -0.08468
#Estimated Y-coefficients 1 2 3 4
-1 -0.00207802379602 0.00207802379604 0.0147525748618 -0.0147525748618
-2 -0.00207803770129 0.00207803770129 0.014754894517 -0.0147548945171
-3 -0.00186125958712 0.00186125958712 0.00430906718496 -0.00430906718501
-4 -0.00204203652249 0.00204203652248 0.00946170484263 -0.00946170484263
-5 -0.00207799868272 0.00207799868272 0.0147524900087 -0.0147524900087
-6 0.00466745517136 -0.00466745517131 -0.00473638718146 0.00473638718143
-7 0.00127661314181 -0.0012766131418 0.0308096049073 -0.0308096049073
-8 -0.00204202966462 0.0020420296646 0.00946178027838 -0.00946178027836
-9 -0.00207798104461 0.00207798104462 0.0147522000925 -0.0147522000924
-10 -0.00592375302166 0.00592375302167 -0.0133800658341 0.0133800658341
-11 -0.00204201746389 0.0020420174639 0.00946180738045 -0.00946180738041
-12 -0.00184159719676 0.00184159719675 -0.00664656658428 0.00664656658427
-13 -0.00595967357466 0.00595967357467 -0.00808941808643 0.00808941808643
-14 -0.00418938935089 0.0041893893509 -0.108608995338 0.108608995338
-15 -0.0013012394729 0.00130123947293 -0.0272679367976 0.0272679367976
-16 0.00418518885797 -0.00418518885798 0.0339388340086 -0.0339388340086
-17 0.00436596427266 -0.00436596427265 0.0287864784987 -0.0287864784987
-18 0.00127661524729 -0.00127661524728 0.0308095328996 -0.0308095328996
-19 0.00179485276965 -0.00179485276965 -0.013157096566 0.013157096566
-20 0.00131259459476 -0.00131259459477 0.0255187095653 -0.0255187095653
-21 -0.00155977265386 0.00155977265387 -0.0292141997607 0.0292141997607
-22 0.0041852046722 -0.0041852046722 0.0339387026427 -0.0339387026427
-23 0.00120614277783 -0.00120614277786 -0.17724731125 0.17724731125
-24 0.00710764806912 -0.00710764806913 -0.00432447811811 0.00432447811812
-25 -0.0010444934481 0.00104449344812 -0.0793995548327 0.0793995548327
-26 -0.00184159033127 0.00184159033127 -0.00664665686254 0.00664665686252
-27 0.00438565231829 -0.00438565231831 0.0178306826236 -0.0178306826236
-28 -0.00204202477243 0.00204202477243 0.0094617038311 -0.00946170383111
-29 -0.0020779828495 0.0020779828495 0.0147525280146 -0.0147525280146
-30 -0.00184157136867 0.00184157136867 -0.00664674784976 0.00664674784975
-31 -0.0018415799055 0.00184157990549 -0.00664671809202 0.00664671809201
-32 0.0041851833166 -0.0041851833166 0.0339389281678 -0.0339389281679
-33 -0.00592375365141 0.00592375365141 -0.013380052398 0.0133800523981
-34 -0.00207798401128 0.00207798401127 0.0147525387324 -0.0147525387324
-35 -0.00592374684747 0.00592374684747 -0.0133800953506 0.0133800953506
-36 -0.00130125322065 0.00130125322063 -0.0272678229064 0.0272678229064
-37 -0.00186126364156 0.00186126364158 0.00430983712957 -0.00430983712957
-38 -0.0059237521318 0.00592375213181 -0.013380153675 0.013380153675
-39 -0.00186126331287 0.00186126331284 0.00430983539031 -0.00430983539032
-40 -0.00204201942989 0.00204201942987 0.00946167601189 -0.00946167601188
-41 0.00149334720855 -0.00149334720853 0.0203666076652 -0.0203666076652
-42 0.00149334486802 -0.00149334486802 0.0203666572745 -0.0203666572745
-43 -0.00186125654471 0.00186125654469 0.00430985522631 -0.00430985522632
-44 0.00888546279299 -0.00888546279299 0.0107444363185 -0.0107444363185
-45 0.00518271680498 -0.005182716805 -0.0549225278324 0.0549225278324
-46 0.00127661752013 -0.00127661752012 0.0308093216206 -0.0308093216206
-47 -0.00184157347647 0.00184157347649 -0.00664647143535 0.00664647143536
-48 -0.00207797495723 0.00207797495723 0.0147525398855 -0.0147525398855
-49 -0.00204202136105 0.00204202136103 0.00946156002567 -0.00946156002569
-50 -0.00207797529532 0.00207797529531 0.0147525447432 -0.0147525447431
-51 0.000173374914166 -0.000173374914135 0.0183900180943 -0.0183900180943
-52 0.000334232451934 -0.000334232451922 -0.0231810555233 0.0231810555233
-53 0.000845589590767 -0.000845589590778 0.0130254684027 -0.0130254684028
-54 -0.00050620465143 0.000506204651423 -0.00673528987195 0.00673528987195
-55 0.000459214630557 -0.000459214630566 -0.0149813117293 0.0149813117293
-56 -0.000615457853335 0.000615457853327 0.0299232782723 -0.0299232782723
-57 0.000879971359249 -0.00087997135925 -0.0255719364993 0.0255719364993
-58 0.00260036335542 -0.0026003633554 -0.0195491080562 0.0195491080562
-59 -0.000490443228571 0.000490443228557 0.0381226217921 -0.0381226217921
-60 0.000342085855682 -0.000342085855703 -0.0444375409715 0.0444375409715
-61 0.000316768459576 -0.000316768459588 0.0237688603721 -0.0237688603721
-62 0.00016878609145 -0.000168786091454 -0.0447717783554 0.0447717783554
-63 -0.00277577058767 0.00277577058768 0.0897601662258 -0.0897601662258
-64 0.000173360052024 -0.000173360052007 0.0183900476597 -0.0183900476597
-65 0.00165683452716 -0.00165683452713 -0.0548302244871 0.0548302244871
-66 -0.000187042401834 0.000187042401845 -0.00623951407567 0.00623951407569
-67 0.000334221183017 -0.000334221183032 -0.0231809127501 0.0231809127501
-68 -0.00296633524758 0.0029663352476 0.0976503457326 -0.0976503457326
-69 0.000334209374616 -0.000334209374652 -0.0231808821748 0.0231808821748
-70 -0.00157894710211 0.0015789471021 0.0483095137533 -0.0483095137533
-71 0.00107261772161 -0.0010726177216 -0.038389379181 0.038389379181
-72 -0.000506194774864 0.000506194774861 -0.00673520248594 0.00673520248594
-73 0.000845585488371 -0.000845585488368 0.0130257485428 -0.0130257485428
-74 -0.00099649083861 0.000996490838618 0.0784164003274 -0.0784164003274
-75 -0.000780572502124 0.000780572502132 0.015267209426 -0.015267209426
-76 -0.000187049589314 0.000187049589319 -0.0062394904277 0.0062394904277
-77 0.000303631239276 -0.000303631239272 0.0278134201998 -0.0278134201999
-78 0.00138186467032 -0.0013818646703 -0.0107230643199 0.0107230643199
-79 0.000334201978329 -0.00033420197834 -0.0231807067633 0.0231807067632
-80 0.000316783844032 -0.000316783844047 0.0237685795838 -0.0237685795838
-81 -0.00111921630232 0.00111921630232 0.0375207814487 -0.0375207814487
-82 -0.0013854108222 0.00138541082219 0.0569766380778 -0.0569766380778
-83 -0.000827746868009 0.000827746867994 0.0155883232139 -0.0155883232139
-84 0.0013709210119 -0.00137092101189 0.0122186887524 -0.0122186887524
-85 0.000334212989212 -0.000334212989235 -0.0231806937032 0.0231806937032
-86 0.000622483770067 -0.000622483770066 -0.0426415468143 0.0426415468143
-87 0.000591717557469 -0.000591717557463 -0.00611123603734 0.00611123603733
-88 -0.000718358722683 0.000718358722686 0.0223631657984 -0.0223631657984
-89 -0.000696756836574 0.000696756836581 0.00115505660589 -0.00115505660589
-90 -0.000506204477782 0.000506204477782 -0.00673516408525 0.00673516408525
-91 -0.00135690159436 0.00135690159436 0.0537867933428 -0.0537867933428
-92 4.08718730297e-05 -4.08718730283e-05 0.00951997394088 -0.00951997394087
-93 -0.00114474880537 0.00114474880538 0.0246887231278 -0.0246887231278
-94 0.00260039073663 -0.00260039073661 -0.0195489870951 0.0195489870951
-95 -0.000780885311649 0.000780885311643 0.00833259826846 -0.00833259826847
-96 -0.00141938985277 0.00141938985275 0.0397560748719 -0.0397560748719
-97 -0.000780881620895 0.000780881620912 0.00833252194557 -0.00833252194556
-98 -0.000780566605387 0.000780566605416 0.0152673550777 -0.0152673550777
-99 0.00760602648383 -0.00760602648382 -0.129536312408 0.129536312408
-100 -0.000696760328124 0.000696760328164 0.00115497725961 -0.00115497725962
-101 -0.00678600258327 0.00678600258326 -0.0465649501276 0.0465649501276
-102 0.00111985118299 -0.00111985118299 -0.0133311713893 0.0133311713893
-103 0.00179128299969 -0.00179128299967 -0.0117629965412 0.0117629965413
-104 0.00245919021666 -0.00245919021666 0.00824989787101 -0.00824989787102
-105 -8.76492503355e-05 8.76492503354e-05 -0.000496254333627 0.000496254333633
-106 0.000761967432587 -0.000761967432566 -7.83367381654e-05 7.83367381754e-05
-107 0.000752341381348 -0.00075234138135 -0.0565467602809 0.0565467602809
-108 0.00493978422624 -0.00493978422623 -0.0423953299002 0.0423953299002
-109 0.00324788826973 -0.00324788826971 -0.00283722024642 0.00283722024643
-110 -0.00637676293374 0.00637676293375 -0.0562338429038 0.0562338429038
-111 0.000503159820755 -0.000503159820738 -0.0127616264245 0.0127616264245
-112 0.00140440261799 -0.00140440261802 0.000816319075632 -0.000816319075634
-113 0.000240534267342 -0.000240534267329 0.00957268947576 -0.00957268947577
-114 0.000384206839893 -0.000384206839888 -0.0217977011189 0.0217977011189
-115 -0.00634724290611 0.0063472429061 -0.0162945308629 0.016294530863
-116 -0.00354841350688 0.0035484135069 0.00558402036707 -0.00558402036708
-117 0.00215492172491 -0.00215492172492 0.00963144402194 -0.00963144402194
-118 -0.00275396275316 0.00275396275316 0.044837028746 -0.044837028746
-119 -0.0119814923701 0.01198149237 0.193515094085 -0.193515094085
-120 0.000963734153542 -0.00096373415354 0.0226428351944 -0.0226428351944
-121 -0.00244037523199 0.00244037523198 0.00546591341746 -0.00546591341744
-122 0.000266029608572 -0.00026602960856 -0.0314149983787 0.0314149983787
-123 -0.000348614897638 0.000348614897651 0.0397566415853 -0.0397566415853
-124 0.00119624036114 -0.00119624036116 -0.0286752098822 0.0286752098822
-125 0.000911513554691 -0.000911513554674 0.0039503682424 -0.00395036824241
-126 0.00416496074481 -0.00416496074481 -0.021645721723 0.021645721723
-127 0.00107259886096 -0.00107259886096 -0.0383893990267 0.0383893990268
-128 0.00119624052347 -0.00119624052348 -0.0286751556142 0.0286751556142
-129 0.00054481896186 -0.000544818961844 0.00819147865674 -0.00819147865675
-130 0.00318546200452 -0.00318546200451 0.0194033902589 -0.0194033902589
-131 0.00426070161194 -0.00426070161192 -0.0346242888528 0.0346242888528
-132 0.00376292316169 -0.00376292316171 -0.0418071757284 0.0418071757283
-133 -0.00087631493066 0.000876314930647 0.0105907484353 -0.0105907484353
-134 0.0010826505927 -0.00108265059272 0.0316795216603 -0.0316795216603
-135 0.00169020541014 -0.00169020541014 0.0744521027106 -0.0744521027107
-136 -0.000692087598833 0.000692087598854 -0.0298567664795 0.0298567664795
-137 -0.00532133931133 0.00532133931132 0.00197677995068 -0.00197677995068
-138 0.00215491723505 -0.00215491723504 0.00963119640396 -0.00963119640397
-139 0.00107260919521 -0.00107260919522 -0.0383891865417 0.0383891865417
-140 -3.59016028262e-06 3.59016027307e-06 0.00811701973019 -0.00811701973017
-141 -0.00532134139947 0.00532134139946 0.00197683432762 -0.00197683432763
-142 -0.00383298415331 0.00383298415332 -0.00856391328718 0.00856391328718
-143 0.00111984317293 -0.00111984317294 -0.0133312669044 0.0133312669044
-144 -0.0015606294529 0.00156062945289 -0.0102470797367 0.0102470797367
-145 -0.00812507234928 0.00812507234927 -0.0209114480533 0.0209114480533
-146 -0.00370288186472 0.00370288186472 -0.000652009420983 0.000652009420984
-147 0.0010009233634 -0.00100092336337 -0.0223679412346 0.0223679412346
-148 0.00063323482203 -0.000633234822026 -0.0048486309278 0.00484863092779
-149 -0.00335548443538 0.00335548443542 0.00963297901935 -0.00963297901936
-150 0.00143331899859 -0.00143331899856 -0.0100217011426 0.0100217011426
+1 -0.002078 0.002078 0.01475 -0.01475
+2 -0.002078 0.002078 0.01475 -0.01475
+3 -0.001861 0.001861 0.004309 -0.004309
+4 -0.002042 0.002042 0.009462 -0.009462
+5 -0.002078 0.002078 0.01475 -0.01475
+6 0.004667 -0.004667 -0.004736 0.004736
+7 0.001277 -0.001277 0.03081 -0.03081
+8 -0.002042 0.002042 0.009462 -0.009462
+9 -0.002078 0.002078 0.01475 -0.01475
+10 -0.005924 0.005924 -0.01338 0.01338
+11 -0.002042 0.002042 0.009462 -0.009462
+12 -0.001842 0.001842 -0.006647 0.006647
+13 -0.00596 0.00596 -0.008089 0.008089
+14 -0.004189 0.004189 -0.1086 0.1086
+15 -0.001301 0.001301 -0.02727 0.02727
+16 0.004185 -0.004185 0.03394 -0.03394
+17 0.004366 -0.004366 0.02879 -0.02879
+18 0.001277 -0.001277 0.03081 -0.03081
+19 0.001795 -0.001795 -0.01316 0.01316
+20 0.001313 -0.001313 0.02552 -0.02552
+21 -0.00156 0.00156 -0.02921 0.02921
+22 0.004185 -0.004185 0.03394 -0.03394
+23 0.001206 -0.001206 -0.1772 0.1772
+24 0.007108 -0.007108 -0.004324 0.004324
+25 -0.001044 0.001044 -0.0794 0.0794
+26 -0.001842 0.001842 -0.006647 0.006647
+27 0.004386 -0.004386 0.01783 -0.01783
+28 -0.002042 0.002042 0.009462 -0.009462
+29 -0.002078 0.002078 0.01475 -0.01475
+30 -0.001842 0.001842 -0.006647 0.006647
+31 -0.001842 0.001842 -0.006647 0.006647
+32 0.004185 -0.004185 0.03394 -0.03394
+33 -0.005924 0.005924 -0.01338 0.01338
+34 -0.002078 0.002078 0.01475 -0.01475
+35 -0.005924 0.005924 -0.01338 0.01338
+36 -0.001301 0.001301 -0.02727 0.02727
+37 -0.001861 0.001861 0.00431 -0.00431
+38 -0.005924 0.005924 -0.01338 0.01338
+39 -0.001861 0.001861 0.00431 -0.00431
+40 -0.002042 0.002042 0.009462 -0.009462
+41 0.001493 -0.001493 0.02037 -0.02037
+42 0.001493 -0.001493 0.02037 -0.02037
+43 -0.001861 0.001861 0.00431 -0.00431
+44 0.008885 -0.008885 0.01074 -0.01074
+45 0.005183 -0.005183 -0.05492 0.05492
+46 0.001277 -0.001277 0.03081 -0.03081
+47 -0.001842 0.001842 -0.006646 0.006646
+48 -0.002078 0.002078 0.01475 -0.01475
+49 -0.002042 0.002042 0.009462 -0.009462
+50 -0.002078 0.002078 0.01475 -0.01475
+51 0.0001734 -0.0001734 0.01839 -0.01839
+52 0.0003342 -0.0003342 -0.02318 0.02318
+53 0.0008456 -0.0008456 0.01303 -0.01303
+54 -0.0005062 0.0005062 -0.006735 0.006735
+55 0.0004592 -0.0004592 -0.01498 0.01498
+56 -0.0006155 0.0006155 0.02992 -0.02992
+57 0.00088 -0.00088 -0.02557 0.02557
+58 0.0026 -0.0026 -0.01955 0.01955
+59 -0.0004904 0.0004904 0.03812 -0.03812
+60 0.0003421 -0.0003421 -0.04444 0.04444
+61 0.0003168 -0.0003168 0.02377 -0.02377
+62 0.0001688 -0.0001688 -0.04477 0.04477
+63 -0.002776 0.002776 0.08976 -0.08976
+64 0.0001734 -0.0001734 0.01839 -0.01839
+65 0.001657 -0.001657 -0.05483 0.05483
+66 -0.000187 0.000187 -0.00624 0.00624
+67 0.0003342 -0.0003342 -0.02318 0.02318
+68 -0.002966 0.002966 0.09765 -0.09765
+69 0.0003342 -0.0003342 -0.02318 0.02318
+70 -0.001579 0.001579 0.04831 -0.04831
+71 0.001073 -0.001073 -0.03839 0.03839
+72 -0.0005062 0.0005062 -0.006735 0.006735
+73 0.0008456 -0.0008456 0.01303 -0.01303
+74 -0.0009965 0.0009965 0.07842 -0.07842
+75 -0.0007806 0.0007806 0.01527 -0.01527
+76 -0.000187 0.000187 -0.006239 0.006239
+77 0.0003036 -0.0003036 0.02781 -0.02781
+78 0.001382 -0.001382 -0.01072 0.01072
+79 0.0003342 -0.0003342 -0.02318 0.02318
+80 0.0003168 -0.0003168 0.02377 -0.02377
+81 -0.001119 0.001119 0.03752 -0.03752
+82 -0.001385 0.001385 0.05698 -0.05698
+83 -0.0008277 0.0008277 0.01559 -0.01559
+84 0.001371 -0.001371 0.01222 -0.01222
+85 0.0003342 -0.0003342 -0.02318 0.02318
+86 0.0006225 -0.0006225 -0.04264 0.04264
+87 0.0005917 -0.0005917 -0.006111 0.006111
+88 -0.0007184 0.0007184 0.02236 -0.02236
+89 -0.0006968 0.0006968 0.001155 -0.001155
+90 -0.0005062 0.0005062 -0.006735 0.006735
+91 -0.001357 0.001357 0.05379 -0.05379
+92 4.087e-05 -4.087e-05 0.00952 -0.00952
+93 -0.001145 0.001145 0.02469 -0.02469
+94 0.0026 -0.0026 -0.01955 0.01955
+95 -0.0007809 0.0007809 0.008333 -0.008333
+96 -0.001419 0.001419 0.03976 -0.03976
+97 -0.0007809 0.0007809 0.008333 -0.008333
+98 -0.0007806 0.0007806 0.01527 -0.01527
+99 0.007606 -0.007606 -0.1295 0.1295
+100 -0.0006968 0.0006968 0.001155 -0.001155
+101 -0.006786 0.006786 -0.04656 0.04656
+102 0.00112 -0.00112 -0.01333 0.01333
+103 0.001791 -0.001791 -0.01176 0.01176
+104 0.002459 -0.002459 0.00825 -0.00825
+105 -8.765e-05 8.765e-05 -0.0004963 0.0004963
+106 0.000762 -0.000762 -7.834e-05 7.834e-05
+107 0.0007523 -0.0007523 -0.05655 0.05655
+108 0.00494 -0.00494 -0.0424 0.0424
+109 0.003248 -0.003248 -0.002837 0.002837
+110 -0.006377 0.006377 -0.05623 0.05623
+111 0.0005032 -0.0005032 -0.01276 0.01276
+112 0.001404 -0.001404 0.0008163 -0.0008163
+113 0.0002405 -0.0002405 0.009573 -0.009573
+114 0.0003842 -0.0003842 -0.0218 0.0218
+115 -0.006347 0.006347 -0.01629 0.01629
+116 -0.003548 0.003548 0.005584 -0.005584
+117 0.002155 -0.002155 0.009631 -0.009631
+118 -0.002754 0.002754 0.04484 -0.04484
+119 -0.01198 0.01198 0.1935 -0.1935
+120 0.0009637 -0.0009637 0.02264 -0.02264
+121 -0.00244 0.00244 0.005466 -0.005466
+122 0.000266 -0.000266 -0.03141 0.03141
+123 -0.0003486 0.0003486 0.03976 -0.03976
+124 0.001196 -0.001196 -0.02868 0.02868
+125 0.0009115 -0.0009115 0.00395 -0.00395
+126 0.004165 -0.004165 -0.02165 0.02165
+127 0.001073 -0.001073 -0.03839 0.03839
+128 0.001196 -0.001196 -0.02868 0.02868
+129 0.0005448 -0.0005448 0.008191 -0.008191
+130 0.003185 -0.003185 0.0194 -0.0194
+131 0.004261 -0.004261 -0.03462 0.03462
+132 0.003763 -0.003763 -0.04181 0.04181
+133 -0.0008763 0.0008763 0.01059 -0.01059
+134 0.001083 -0.001083 0.03168 -0.03168
+135 0.00169 -0.00169 0.07445 -0.07445
+136 -0.0006921 0.0006921 -0.02986 0.02986
+137 -0.005321 0.005321 0.001977 -0.001977
+138 0.002155 -0.002155 0.009631 -0.009631
+139 0.001073 -0.001073 -0.03839 0.03839
+140 -3.59e-06 3.59e-06 0.008117 -0.008117
+141 -0.005321 0.005321 0.001977 -0.001977
+142 -0.003833 0.003833 -0.008564 0.008564
+143 0.00112 -0.00112 -0.01333 0.01333
+144 -0.001561 0.001561 -0.01025 0.01025
+145 -0.008125 0.008125 -0.02091 0.02091
+146 -0.003703 0.003703 -0.000652 0.000652
+147 0.001001 -0.001001 -0.02237 0.02237
+148 0.0006332 -0.0006332 -0.004849 0.004849
+149 -0.003355 0.003355 0.009633 -0.009633
+150 0.001433 -0.001433 -0.01002 0.01002
diff -r 07a608852925 -r 26c40d8e8fdc test-data/kcca_out2.tabular
--- a/test-data/kcca_out2.tabular Wed Mar 10 16:11:59 2010 -0500
+++ b/test-data/kcca_out2.tabular Wed Mar 10 16:23:53 2010 -0500
@@ -1,304 +1,304 @@
#Component 1 2
-#Correlation -0.997722247293 0.997722247293
+#Correlation -0.9977 0.9977
#Estimated X-coefficients 1 2
-1 -0.00122731840743 0.00122731840742
-2 -0.00122731106403 0.00122731106387
-3 0.00166794295518 -0.00166794295525
-4 -0.00173362374287 0.00173362374291
-5 -0.00122731862766 0.00122731862769
-6 -0.0029554785628 0.00295547856271
-7 -0.00461498736385 0.00461498736384
-8 -0.00173362629843 0.00173362629836
-9 -0.00122732331673 0.00122732331672
-10 0.00688097994625 -0.00688097994624
-11 -0.00173362826184 0.00173362826186
-12 -8.37004866976e-05 8.37004867417e-05
-13 0.00635554576927 -0.00635554576925
-14 0.00873026392029 -0.00873026392028
-15 0.00652732389235 -0.00652732389232
-16 -0.00603047472024 0.00603047472021
-17 0.00873631760763 -0.00873631760765
-18 -0.00461499433646 0.00461499433645
-19 -0.00219576033584 0.00219576033585
-20 -0.00691698230553 0.00691698230556
-21 0.00292177636345 -0.00292177636347
-22 -0.00603047506996 0.00603047506995
-23 0.0172373160862 -0.0172373160862
-24 0.00249397988781 -0.00249397988776
-25 0.00798852789597 -0.00798852789598
-26 -8.37043942033e-05 8.37043942473e-05
-27 -0.00630705042003 0.00630705042003
-28 -0.00173363075689 0.0017336307569
-29 -0.00122732018654 0.00122732018653
-30 -8.3706566709e-05 8.37065667174e-05
-31 -8.37037029634e-05 8.37037029813e-05
-32 -0.00603046924565 0.00603046924565
-33 0.00688097728833 -0.00688097728833
-34 -0.00122732731419 0.00122732731425
-35 0.00688098087012 -0.00688098087015
-36 0.00652731714093 -0.00652731714093
-37 0.00166793992756 -0.00166793992754
-38 0.00688097839506 -0.00688097839508
-39 0.00166794141434 -0.00166794141431
-40 -0.00173363131319 0.00173363131319
-41 0.0014004109735 -0.00140041097347
-42 0.00140041664328 -0.00140041664323
-43 0.00166794191139 -0.00166794191136
-44 0.0155877231838 -0.0155877231838
-45 0.00840352861092 -0.0084035286109
-46 -0.0046149948871 0.00461499488714
-47 -8.37036024121e-05 8.37036023937e-05
-48 -0.00122732358385 0.00122732358381
-49 -0.00173363170928 0.00173363170926
-50 -0.00122732256951 0.0012273225695
-51 0.000660960845406 -0.000660960845404
-52 -0.00213814090606 0.00213814090606
-53 -0.00172474025037 0.00172474025036
-54 -1.15192134321e-05 1.15192134284e-05
-55 -0.00270262833397 0.00270262833397
-56 0.00215511291935 -0.00215511291935
-57 -0.00300187218663 0.00300187218663
-58 0.00185267145989 -0.0018526714599
-59 0.00472457250519 -0.00472457250517
-60 0.00384665232521 -0.0038466523252
-61 -0.00310019835939 0.0031001983594
-62 0.00258744962442 -0.00258744962443
-63 0.00641438900071 -0.00641438900071
-64 0.00066096075849 -0.000660960758496
-65 0.00671559094757 -0.00671559094758
-66 -0.00232160450111 0.00232160450112
-67 -0.00213814191545 0.00213814191544
-68 0.0114627849428 -0.0114627849428
-69 -0.0021381429419 0.00213814294188
-70 0.000641012727381 -0.00064101272739
-71 0.00112493603698 -0.00112493603698
-72 -1.15272295775e-05 1.15272295687e-05
-73 -0.00172473744183 0.00172473744183
-74 0.018276550116 -0.018276550116
-75 -0.00125666892339 0.00125666892339
-76 -0.00232160381613 0.00232160381614
-77 0.00219982317236 -0.00219982317237
-78 -0.00586026163233 0.00586026163236
-79 -0.00213814041066 0.00213814041065
-80 -0.0031001930441 0.00310019304412
-81 0.00032247134769 -0.000322471347699
-82 -0.00253960255175 0.00253960255175
-83 0.000580298776161 -0.000580298776172
-84 -0.00339887362339 0.00339887362339
-85 -0.00213814304507 0.00213814304505
-86 0.00141644720267 -0.00141644720266
-87 -0.00275665838117 0.00275665838116
-88 7.299837356e-05 -7.29983735588e-05
-89 -0.00122133112354 0.00122133112352
-90 -1.1520732577e-05 1.15207325665e-05
-91 0.00623668821962 -0.00623668821961
-92 -0.000750488786068 0.000750488786081
-93 -0.000134743806793 0.000134743806781
-94 0.00185266774503 -0.00185266774504
-95 -0.00168681443561 0.00168681443562
-96 0.00104551800392 -0.00104551800392
-97 -0.00168681378149 0.00168681378151
-98 -0.00125666731119 0.0012566673112
-99 0.0273373188851 -0.0273373188851
-100 -0.00122132449275 0.00122132449276
-101 0.0164080376037 -0.0164080376037
-102 -0.00584347307691 0.00584347307691
-103 -0.0175645745896 0.0175645745896
-104 -0.000812522970572 0.000812522970572
-105 -0.00635860059631 0.0063586005963
-106 0.00275366392106 -0.00275366392106
-107 0.00954602258661 -0.00954602258661
-108 0.0155278096117 -0.0155278096117
-109 -0.00306825657773 0.00306825657775
-110 0.0112597051463 -0.0112597051463
-111 -0.00398796328134 0.00398796328133
-112 -0.00441274112723 0.00441274112722
-113 -0.00171496236972 0.00171496236972
-114 0.000877789735939 -0.000877789735948
-115 0.0273970544333 -0.0273970544333
-116 -0.00131825030571 0.0013182503057
-117 -0.000930125218004 0.000930125218002
-118 0.0155089924497 -0.0155089924497
-119 0.0118140403622 -0.0118140403622
-120 -0.000801552123165 0.00080155212317
-121 0.00284932184324 -0.00284932184324
-122 0.00962061241972 -0.00962061241972
-123 0.0158935928536 -0.0158935928536
-124 -0.0032912758955 0.00329127589549
-125 -0.00449164711626 0.00449164711627
-126 -0.00365891492729 0.0036589149273
-127 0.00112493995903 -0.00112493995902
-128 -0.0032912756797 0.00329127567971
-129 -0.00180788589755 0.00180788589754
-130 0.0223816193407 -0.0223816193407
-131 -0.0186465081581 0.0186465081581
-132 -0.0165897014 0.0165897014
-133 -9.15600724877e-05 9.15600724931e-05
-134 0.000396631463788 -0.000396631463796
-135 0.0366828617501 -0.0366828617501
-136 -0.0207881503725 0.0207881503725
-137 0.00716867549967 -0.00716867549967
-138 -0.000930124083137 0.000930124083139
-139 0.00112493800574 -0.00112493800573
-140 -0.00317568145256 0.00317568145257
-141 0.00716867883327 -0.00716867883327
-142 0.0151224437075 -0.0151224437075
-143 -0.00584347370931 0.00584347370932
-144 -0.00593952985175 0.00593952985175
-145 0.0182854382585 -0.0182854382585
-146 0.00354375934258 -0.00354375934259
-147 -0.00361285051891 0.00361285051892
-148 -0.0055083733504 0.00550837335042
-149 -0.00145063403334 0.00145063403336
-150 -0.00625027498836 0.00625027498837
+1 -0.001227 0.001227
+2 -0.001227 0.001227
+3 0.001668 -0.001668
+4 -0.001734 0.001734
+5 -0.001227 0.001227
+6 -0.002955 0.002955
+7 -0.004615 0.004615
+8 -0.001734 0.001734
+9 -0.001227 0.001227
+10 0.006881 -0.006881
+11 -0.001734 0.001734
+12 -8.37e-05 8.37e-05
+13 0.006356 -0.006356
+14 0.00873 -0.00873
+15 0.006527 -0.006527
+16 -0.00603 0.00603
+17 0.008736 -0.008736
+18 -0.004615 0.004615
+19 -0.002196 0.002196
+20 -0.006917 0.006917
+21 0.002922 -0.002922
+22 -0.00603 0.00603
+23 0.01724 -0.01724
+24 0.002494 -0.002494
+25 0.007989 -0.007989
+26 -8.37e-05 8.37e-05
+27 -0.006307 0.006307
+28 -0.001734 0.001734
+29 -0.001227 0.001227
+30 -8.371e-05 8.371e-05
+31 -8.37e-05 8.37e-05
+32 -0.00603 0.00603
+33 0.006881 -0.006881
+34 -0.001227 0.001227
+35 0.006881 -0.006881
+36 0.006527 -0.006527
+37 0.001668 -0.001668
+38 0.006881 -0.006881
+39 0.001668 -0.001668
+40 -0.001734 0.001734
+41 0.0014 -0.0014
+42 0.0014 -0.0014
+43 0.001668 -0.001668
+44 0.01559 -0.01559
+45 0.008404 -0.008404
+46 -0.004615 0.004615
+47 -8.37e-05 8.37e-05
+48 -0.001227 0.001227
+49 -0.001734 0.001734
+50 -0.001227 0.001227
+51 0.000661 -0.000661
+52 -0.002138 0.002138
+53 -0.001725 0.001725
+54 -1.152e-05 1.152e-05
+55 -0.002703 0.002703
+56 0.002155 -0.002155
+57 -0.003002 0.003002
+58 0.001853 -0.001853
+59 0.004725 -0.004725
+60 0.003847 -0.003847
+61 -0.0031 0.0031
+62 0.002587 -0.002587
+63 0.006414 -0.006414
+64 0.000661 -0.000661
+65 0.006716 -0.006716
+66 -0.002322 0.002322
+67 -0.002138 0.002138
+68 0.01146 -0.01146
+69 -0.002138 0.002138
+70 0.000641 -0.000641
+71 0.001125 -0.001125
+72 -1.153e-05 1.153e-05
+73 -0.001725 0.001725
+74 0.01828 -0.01828
+75 -0.001257 0.001257
+76 -0.002322 0.002322
+77 0.0022 -0.0022
+78 -0.00586 0.00586
+79 -0.002138 0.002138
+80 -0.0031 0.0031
+81 0.0003225 -0.0003225
+82 -0.00254 0.00254
+83 0.0005803 -0.0005803
+84 -0.003399 0.003399
+85 -0.002138 0.002138
+86 0.001416 -0.001416
+87 -0.002757 0.002757
+88 7.3e-05 -7.3e-05
+89 -0.001221 0.001221
+90 -1.152e-05 1.152e-05
+91 0.006237 -0.006237
+92 -0.0007505 0.0007505
+93 -0.0001347 0.0001347
+94 0.001853 -0.001853
+95 -0.001687 0.001687
+96 0.001046 -0.001046
+97 -0.001687 0.001687
+98 -0.001257 0.001257
+99 0.02734 -0.02734
+100 -0.001221 0.001221
+101 0.01641 -0.01641
+102 -0.005843 0.005843
+103 -0.01756 0.01756
+104 -0.0008125 0.0008125
+105 -0.006359 0.006359
+106 0.002754 -0.002754
+107 0.009546 -0.009546
+108 0.01553 -0.01553
+109 -0.003068 0.003068
+110 0.01126 -0.01126
+111 -0.003988 0.003988
+112 -0.004413 0.004413
+113 -0.001715 0.001715
+114 0.0008778 -0.0008778
+115 0.0274 -0.0274
+116 -0.001318 0.001318
+117 -0.0009301 0.0009301
+118 0.01551 -0.01551
+119 0.01181 -0.01181
+120 -0.0008016 0.0008016
+121 0.002849 -0.002849
+122 0.009621 -0.009621
+123 0.01589 -0.01589
+124 -0.003291 0.003291
+125 -0.004492 0.004492
+126 -0.003659 0.003659
+127 0.001125 -0.001125
+128 -0.003291 0.003291
+129 -0.001808 0.001808
+130 0.02238 -0.02238
+131 -0.01865 0.01865
+132 -0.01659 0.01659
+133 -9.156e-05 9.156e-05
+134 0.0003966 -0.0003966
+135 0.03668 -0.03668
+136 -0.02079 0.02079
+137 0.007169 -0.007169
+138 -0.0009301 0.0009301
+139 0.001125 -0.001125
+140 -0.003176 0.003176
+141 0.007169 -0.007169
+142 0.01512 -0.01512
+143 -0.005843 0.005843
+144 -0.00594 0.00594
+145 0.01829 -0.01829
+146 0.003544 -0.003544
+147 -0.003613 0.003613
+148 -0.005508 0.005508
+149 -0.001451 0.001451
+150 -0.00625 0.00625
#Estimated Y-coefficients 1 2
-1 0.000952236730115 0.000952236730136
-2 0.00753180911842 0.00753180911838
-3 -0.00516801028579 -0.00516801028581
-4 -0.00882498990477 -0.00882498990477
-5 -0.00140542873921 -0.00140542873922
-6 0.00330266798272 0.00330266798272
-7 -0.0081387911267 -0.00813879112671
-8 0.00152443564077 0.00152443564076
-9 -0.00771870201025 -0.00771870201025
-10 0.00547713010073 0.00547713010074
-11 0.003270659849 0.003270659849
-12 -0.00306812817496 -0.00306812817497
-13 0.00445886261671 0.00445886261673
-14 -0.00353335505412 -0.00353335505412
-15 0.0085173158937 0.0085173158937
-16 -0.0383961776234 -0.0383961776234
-17 0.00330267922582 0.00330267922583
-18 0.000952240516451 0.000952240516456
-19 0.00818906645105 0.00818906645104
-20 -0.00309614228051 -0.00309614228051
-21 -0.000971621477879 -0.000971621477885
-22 -0.00165164343654 -0.00165164343652
-23 0.00604384541776 0.00604384541777
-24 0.00341647364957 0.00341647364958
-25 -0.00306812439373 -0.00306812439374
-26 0.00830847900819 0.00830847900821
-27 0.00152443887844 0.00152443887844
-28 0.00132401884933 0.00132401884932
-29 0.00179979352006 0.00179979352008
-30 -0.00516800670429 -0.0051680067043
-31 0.00208562867941 0.00208562867942
-32 -0.000971619066246 -0.000971619066261
-33 -0.0113128074854 -0.0113128074854
-34 -0.00870056701442 -0.0087005670144
-35 0.00547713027423 0.00547713027424
-36 0.00495542862463 0.00495542862463
-37 -0.000337423424473 -0.000337423424464
-38 0.00547712655395 0.00547712655396
-39 -0.010973258652 -0.010973258652
-40 0.0022008011203 0.0022008011203
-41 -5.46008492518e-05 -5.46008492437e-05
-42 -0.0234396971996 -0.0234396971996
-43 -0.0134511606022 -0.0134511606023
-44 -5.45996121933e-05 -5.45996122011e-05
-45 -0.00309614158163 -0.00309614158163
-46 0.00445886019725 0.00445886019722
-47 -0.00309614420386 -0.00309614420386
-48 -0.0104579849693 -0.0104579849693
-49 0.00168842769973 0.00168842769972
-50 0.00322357131135 0.00322357131133
-51 0.00202630582019 0.00202630582019
-52 -0.000691700383038 -0.000691700383018
-53 -0.00104045859173 -0.00104045859172
-54 -0.00344346850677 -0.00344346850675
-55 0.000998125320535 0.000998125320523
-56 -0.00199381905541 -0.0019938190554
-57 -0.00106665569072 -0.00106665569075
-58 0.00650984277058 0.00650984277059
-59 -0.00127555360175 -0.00127555360178
-60 0.0074404524885 0.00744045248845
-61 -0.0317908788312 -0.0317908788312
-62 -0.0039999913858 -0.00399999138579
-63 -0.0115243240423 -0.0115243240423
-64 0.00176831166006 0.00176831166007
-65 -0.00440085578754 -0.00440085578751
-66 -0.00192225561804 -0.00192225561805
-67 -0.00603442682678 -0.00603442682675
-68 0.00152674197776 0.00152674197778
-69 -0.0154861222259 -0.0154861222259
-70 0.00112666408174 0.00112666408172
-71 -0.0063441217448 -0.0063441217448
-72 0.00416182160539 0.0041618216054
-73 0.00285475184215 0.00285475184214
-74 0.00416181855219 0.00416181855219
-75 0.00220354621189 0.00220354621189
-76 -0.00115406490584 -0.00115406490584
-77 -0.0083901924992 -0.00839019249917
-78 -0.00238535825306 -0.0023853582531
-79 0.000206755320136 0.000206755320148
-80 0.00158982739788 0.00158982739788
-81 -1.37720111221e-05 -1.37720111046e-05
-82 -1.3770694678e-05 -1.37706946744e-05
-83 0.00152674594702 0.00152674594698
-84 0.00488108634371 0.00488108634369
-85 -0.00220091184671 -0.0022009118467
-86 -0.00318012733342 -0.0031801273334
-87 -0.00192225857826 -0.00192225857825
-88 -0.00911898267779 -0.00911898267778
-89 -0.00603441933504 -0.00603441933506
-90 0.00142092462435 0.00142092462434
-91 0.00128331390667 0.00128331390668
-92 -0.00057432105445 -0.000574321054431
-93 0.00290994036196 0.00290994036199
-94 0.00227437971826 0.00227437971826
-95 -0.000409584211176 -0.000409584211157
-96 -0.0062564186313 -0.00625641863131
-97 -0.00430727169536 -0.00430727169537
-98 0.00274042717244 0.00274042717247
-99 0.00952367801675 0.00952367801678
-100 -0.00199382288663 -0.00199382288664
-101 -0.00106665068897 -0.00106665068897
-102 0.00152674543824 0.00152674543823
-103 0.00174787415807 0.00174787415805
-104 0.00290750496381 0.00290750496381
-105 9.66708166621e-05 9.6670816661e-05
-106 -0.00101935419571 -0.00101935419568
-107 0.0100871271044 0.0100871271044
-108 0.00352958176992 0.00352958176992
-109 -0.019066105421 -0.0190661054211
-110 0.00240693112447 0.00240693112447
-111 -0.00090513595859 -0.00090513595857
-112 0.0035205054436 0.0035205054436
-113 -0.00310599240981 -0.00310599240982
-114 0.00169430085813 0.00169430085813
-115 -0.000733363285101 -0.000733363285101
-116 -0.000691698262239 -0.000691698262241
-117 9.66698551435e-05 9.66698551349e-05
-118 0.0171457397785 0.0171457397785
-119 0.00625881933768 0.00625881933768
-120 -0.0115243221165 -0.0115243221165
-121 -0.000457609674661 -0.000457609674665
-122 -0.0023718905732 -0.00237189057318
-123 -0.00299238147421 -0.00299238147421
-124 0.00547391509592 0.00547391509591
-125 -0.00200127266833 -0.00200127266834
-126 0.00971289811654 0.00971289811655
-127 0.00481618871657 0.00481618871656
-128 -0.000574322474196 -0.000574322474171
-129 0.00323239431475 0.00323239431476
-130 0.00567244833234 0.00567244833233
-131 -0.00143721256674 -0.00143721256671
-132 -0.0474284189227 -0.0474284189227
-133 0.00323239591076 0.00323239591076
-134 0.00453050823172 0.00453050823173
-135 0.00662134137135 0.00662134137136
-136 -0.0222253238714 -0.0222253238714
-137 -0.000472180108221 -0.000472180108238
-138 -8.96030271952e-05 -8.96030271771e-05
-139 -0.00223279969811 -0.00223279969812
-140 -0.00104045712364 -0.00104045712363
-141 -0.00192225343433 -0.00192225343433
-142 -0.0010404587415 -0.00104045874147
-143 0.00152674421725 0.00152674421723
-144 -0.00165763647411 -0.00165763647409
-145 -0.00200127181815 -0.00200127181816
-146 -0.00238535855193 -0.00238535855193
-147 0.00285475011985 0.00285475011986
-148 9.66702716109e-05 9.66702716107e-05
-149 -0.00109079514933 -0.00109079514929
-150 -0.00399998975976 -0.00399998975976
+1 0.0009522 0.0009522
+2 0.007532 0.007532
+3 -0.005168 -0.005168
+4 -0.008825 -0.008825
+5 -0.001405 -0.001405
+6 0.003303 0.003303
+7 -0.008139 -0.008139
+8 0.001524 0.001524
+9 -0.007719 -0.007719
+10 0.005477 0.005477
+11 0.003271 0.003271
+12 -0.003068 -0.003068
+13 0.004459 0.004459
+14 -0.003533 -0.003533
+15 0.008517 0.008517
+16 -0.0384 -0.0384
+17 0.003303 0.003303
+18 0.0009522 0.0009522
+19 0.008189 0.008189
+20 -0.003096 -0.003096
+21 -0.0009716 -0.0009716
+22 -0.001652 -0.001652
+23 0.006044 0.006044
+24 0.003416 0.003416
+25 -0.003068 -0.003068
+26 0.008308 0.008308
+27 0.001524 0.001524
+28 0.001324 0.001324
+29 0.0018 0.0018
+30 -0.005168 -0.005168
+31 0.002086 0.002086
+32 -0.0009716 -0.0009716
+33 -0.01131 -0.01131
+34 -0.008701 -0.008701
+35 0.005477 0.005477
+36 0.004955 0.004955
+37 -0.0003374 -0.0003374
+38 0.005477 0.005477
+39 -0.01097 -0.01097
+40 0.002201 0.002201
+41 -5.46e-05 -5.46e-05
+42 -0.02344 -0.02344
+43 -0.01345 -0.01345
+44 -5.46e-05 -5.46e-05
+45 -0.003096 -0.003096
+46 0.004459 0.004459
+47 -0.003096 -0.003096
+48 -0.01046 -0.01046
+49 0.001688 0.001688
+50 0.003224 0.003224
+51 0.002026 0.002026
+52 -0.0006917 -0.0006917
+53 -0.00104 -0.00104
+54 -0.003443 -0.003443
+55 0.0009981 0.0009981
+56 -0.001994 -0.001994
+57 -0.001067 -0.001067
+58 0.00651 0.00651
+59 -0.001276 -0.001276
+60 0.00744 0.00744
+61 -0.03179 -0.03179
+62 -0.004 -0.004
+63 -0.01152 -0.01152
+64 0.001768 0.001768
+65 -0.004401 -0.004401
+66 -0.001922 -0.001922
+67 -0.006034 -0.006034
+68 0.001527 0.001527
+69 -0.01549 -0.01549
+70 0.001127 0.001127
+71 -0.006344 -0.006344
+72 0.004162 0.004162
+73 0.002855 0.002855
+74 0.004162 0.004162
+75 0.002204 0.002204
+76 -0.001154 -0.001154
+77 -0.00839 -0.00839
+78 -0.002385 -0.002385
+79 0.0002068 0.0002068
+80 0.00159 0.00159
+81 -1.377e-05 -1.377e-05
+82 -1.377e-05 -1.377e-05
+83 0.001527 0.001527
+84 0.004881 0.004881
+85 -0.002201 -0.002201
+86 -0.00318 -0.00318
+87 -0.001922 -0.001922
+88 -0.009119 -0.009119
+89 -0.006034 -0.006034
+90 0.001421 0.001421
+91 0.001283 0.001283
+92 -0.0005743 -0.0005743
+93 0.00291 0.00291
+94 0.002274 0.002274
+95 -0.0004096 -0.0004096
+96 -0.006256 -0.006256
+97 -0.004307 -0.004307
+98 0.00274 0.00274
+99 0.009524 0.009524
+100 -0.001994 -0.001994
+101 -0.001067 -0.001067
+102 0.001527 0.001527
+103 0.001748 0.001748
+104 0.002908 0.002908
+105 9.667e-05 9.667e-05
+106 -0.001019 -0.001019
+107 0.01009 0.01009
+108 0.00353 0.00353
+109 -0.01907 -0.01907
+110 0.002407 0.002407
+111 -0.0009051 -0.0009051
+112 0.003521 0.003521
+113 -0.003106 -0.003106
+114 0.001694 0.001694
+115 -0.0007334 -0.0007334
+116 -0.0006917 -0.0006917
+117 9.667e-05 9.667e-05
+118 0.01715 0.01715
+119 0.006259 0.006259
+120 -0.01152 -0.01152
+121 -0.0004576 -0.0004576
+122 -0.002372 -0.002372
+123 -0.002992 -0.002992
+124 0.005474 0.005474
+125 -0.002001 -0.002001
+126 0.009713 0.009713
+127 0.004816 0.004816
+128 -0.0005743 -0.0005743
+129 0.003232 0.003232
+130 0.005672 0.005672
+131 -0.001437 -0.001437
+132 -0.04743 -0.04743
+133 0.003232 0.003232
+134 0.004531 0.004531
+135 0.006621 0.006621
+136 -0.02223 -0.02223
+137 -0.0004722 -0.0004722
+138 -8.96e-05 -8.96e-05
+139 -0.002233 -0.002233
+140 -0.00104 -0.00104
+141 -0.001922 -0.001922
+142 -0.00104 -0.00104
+143 0.001527 0.001527
+144 -0.001658 -0.001658
+145 -0.002001 -0.002001
+146 -0.002385 -0.002385
+147 0.002855 0.002855
+148 9.667e-05 9.667e-05
+149 -0.001091 -0.001091
+150 -0.004 -0.004
diff -r 07a608852925 -r 26c40d8e8fdc test-data/kpca_out1.tabular
--- a/test-data/kpca_out1.tabular Wed Mar 10 16:11:59 2010 -0500
+++ b/test-data/kpca_out1.tabular Wed Mar 10 16:23:53 2010 -0500
@@ -1,304 +1,304 @@
#Component 1 2
-#Eigenvalue 748.53295257 31.7564711908
+#Eigenvalue 748.5 31.76
#Principal component vectors 1 2
-1 -0.00355307349951 0.010798043393
-2 -0.00372171398464 -0.00319202227241
-3 -0.00388666963315 -0.00306208266383
-4 -0.0037995051228 -0.00750936595439
-5 -0.00359988454628 0.0106843456635
-6 -0.00299499814368 0.0244145455947
-7 -0.00381378035242 -0.00182771020035
-8 -0.00354042321831 0.00630539436577
-9 -0.00400175554745 -0.0145202364622
-10 -0.0036630352299 -0.00136819012831
-11 -0.00326674025957 0.0213381031254
-12 -0.00357519624231 0.00181491746835
-13 -0.00380730480961 -0.0049118993488
-14 -0.0042807390398 -0.013520126312
-15 -0.00325109413176 0.0386362582597
-16 -0.0029222183254 0.0439372659369
-17 -0.00334909683853 0.025845377936
-18 -0.00352067050475 0.0105349072573
-19 -0.00284736799589 0.0295161649048
-20 -0.00340343226564 0.0164339289456
-21 -0.00312848751113 0.0141934462825
-22 -0.00338156462338 0.0140621875533
-23 -0.00412447090326 0.00340207509083
-24 -0.00322710660152 0.00486270310359
-25 -0.00332920046569 0.000741686459317
-26 -0.00350562773687 -0.0018911475654
-27 -0.00339310720123 0.00542360368742
-28 -0.00341115136262 0.0126295090519
-29 -0.00350586794258 0.0109084171675
-30 -0.00365520087074 -0.00403408864917
-31 -0.0036097322482 -0.00395326488411
-32 -0.00323422448999 0.0143754764524
-33 -0.00337735967561 0.0257295115702
-34 -0.00323624470121 0.035204794488
-35 -0.0036630352299 -0.00136819012831
-36 -0.00380098698319 0.00332108429984
-37 -0.00340101962525 0.0199919567158
-38 -0.0036630352299 -0.00136819012831
-39 -0.00406364785654 -0.0124621360523
-40 -0.00348192497209 0.00842215059195
-41 -0.00365954855054 0.00871751918805
-42 -0.00405650169482 -0.0228354383139
-43 -0.00404399701786 -0.00889238369142
-44 -0.003314992587 0.00689235987806
-45 -0.00302208085492 0.014691962046
-46 -0.00374682975461 -0.0054083973616
-47 -0.00335195787406 0.0163610385692
-48 -0.00386473126531 -0.0053404692461
-49 -0.00332787397638 0.0190663593005
-50 -0.00363243277483 0.00464859342961
-51 0.00213134716649 0.0235348223035
-52 0.00126833388577 0.010770856327
-53 0.00231973604021 0.0165043816266
-54 -0.00054456516814 -0.0221796154179
-55 0.00139079918371 0.00286317141153
-56 0.000352174603738 -0.0119723346983
-57 0.00150292456851 0.0088321679366
-58 -0.00187873890345 -0.0252928947697
-59 0.0014246176887 0.00830441949209
-60 -0.000783999496101 -0.0195987047495
-61 -0.00168806505916 -0.0318750199005
-62 0.000332005325527 -0.00227272707421
-63 -0.000240109205398 -0.0136362166478
-64 0.00107889762034 -0.00371369901933
-65 -0.000755702559702 -0.00554349815991
-66 0.00137471857777 0.0164559631848
-67 0.000400237612268 -0.0105937514639
-68 -0.000186605633993 -0.00791711329626
-69 0.000781260386547 -0.0156695687823
-70 -0.000602293691986 -0.014916217386
-71 0.00128053061726 -0.00382898708379
-72 0.000141070576797 -0.000177704692482
-73 0.0014804796511 -0.0105311838
-74 0.00095326423819 -0.0050250570579
-75 0.000831513901556 0.00625180962049
-76 0.00124204236488 0.0117902480983
-77 0.00192892696243 0.00828195017884
-78 0.00230888433286 0.00765437064913
-79 0.000772988980957 -0.00466004256634
-80 -0.000972934470601 -0.00794203589665
-81 -0.000825239851136 -0.0180890001892
-82 -0.000975175708257 -0.017046945113
-83 -0.000328157175199 -0.00733223273401
-84 0.00152809458981 -0.014337289403
-85 0.000204561491838 -0.0145038337473
-86 0.000958564114974 0.00611562699814
-87 0.00182495005699 0.0135671536294
-88 0.000691799295473 -0.00991154906392
-89 -0.000164814642312 -0.00683729147812
-90 -0.000495108434506 -0.018272246356
-91 -5.75283291151e-05 -0.0187391570922
-92 0.000980006758757 -0.000691467878134
-93 -0.000237878724736 -0.0101121125697
-94 -0.00182103731256 -0.02537083053
-95 -0.000124416007346 -0.0137765920579
-96 1.28307436801e-06 -0.00511559296079
-97 2.13006652241e-05 -0.00765449401728
-98 0.000630107936427 0.00193944394238
-99 -0.00195781990797 -0.0185721711052
-100 -0.000121047850397 -0.00903979441776
-101 0.00380328631922 -0.00735042696528
-102 0.00146195943076 -0.0196360714253
-103 0.00431818384817 0.00623068278311
-104 0.00270535358616 -0.00919789069736
-105 0.00348128902639 -0.00661174582865
-106 0.00608948395467 0.0105925795597
-107 -0.000316164072773 -0.0340356815797
-108 0.00498054497252 0.00589466060194
-109 0.00332851194261 -0.0119530548351
-110 0.00521718108616 0.019838433964
-111 0.0024425365186 0.0056190945732
-112 0.00239304071946 -0.00943633793421
-113 0.0033517411382 0.00333352355283
-114 0.00121571788624 -0.0252729457662
-115 0.00174247582859 -0.0198739526029
-116 0.00276824324416 0.00012671005862
-117 0.00283087224731 -0.00171497869775
-118 0.00677564081585 0.0333579774127
-119 0.00669433449169 -0.00213577965515
-120 0.00119992099901 -0.0235626863995
-121 0.0039514012311 0.00753951675886
-122 0.00107394296724 -0.0201437000923
-123 0.00625053915694 0.00718047948235
-124 0.00169144697539 -0.00751769568719
-125 0.00362530843477 0.00652532644077
-126 0.00449337826627 0.0143122037644
-127 0.00148231189251 -0.00651360693069
-128 0.00156493567407 -0.00495452682527
-129 0.00295099547041 -0.0109308371774
-130 0.0040009541229 0.0122602980101
-131 0.00482086296995 0.0071695428205
-132 0.00641618457442 0.0431443194121
-133 0.00300528343518 -0.0114433590101
-134 0.00183409020836 -0.00555386556062
-135 0.00216811021196 -0.0179461401821
-136 0.00553489980813 0.0172257235932
-137 0.00319828572246 -0.000592991026508
-138 0.00274692788031 -0.00152565423339
-139 0.00132810381823 -0.00620179238822
-140 0.00335981997382 0.00898416237223
-141 0.00357910201795 0.00100409451945
-142 0.00303686395111 0.0107216156308
-143 0.00146195943076 -0.0196360714253
-144 0.00412744981738 0.00329510730094
-145 0.00385310530472 0.0043919488708
-146 0.00291206809269 0.00286898102218
-147 0.00181332919273 -0.0132702604229
-148 0.00251823082733 -2.87503486175e-06
-149 0.00274135416487 -0.000415299029875
-150 0.00161105855831 -0.0107445157278
+1 -0.003553 0.0108
+2 -0.003722 -0.003192
+3 -0.003887 -0.003062
+4 -0.0038 -0.007509
+5 -0.0036 0.01068
+6 -0.002995 0.02441
+7 -0.003814 -0.001828
+8 -0.00354 0.006305
+9 -0.004002 -0.01452
+10 -0.003663 -0.001368
+11 -0.003267 0.02134
+12 -0.003575 0.001815
+13 -0.003807 -0.004912
+14 -0.004281 -0.01352
+15 -0.003251 0.03864
+16 -0.002922 0.04394
+17 -0.003349 0.02585
+18 -0.003521 0.01053
+19 -0.002847 0.02952
+20 -0.003403 0.01643
+21 -0.003128 0.01419
+22 -0.003382 0.01406
+23 -0.004124 0.003402
+24 -0.003227 0.004863
+25 -0.003329 0.0007417
+26 -0.003506 -0.001891
+27 -0.003393 0.005424
+28 -0.003411 0.01263
+29 -0.003506 0.01091
+30 -0.003655 -0.004034
+31 -0.00361 -0.003953
+32 -0.003234 0.01438
+33 -0.003377 0.02573
+34 -0.003236 0.0352
+35 -0.003663 -0.001368
+36 -0.003801 0.003321
+37 -0.003401 0.01999
+38 -0.003663 -0.001368
+39 -0.004064 -0.01246
+40 -0.003482 0.008422
+41 -0.00366 0.008718
+42 -0.004057 -0.02284
+43 -0.004044 -0.008892
+44 -0.003315 0.006892
+45 -0.003022 0.01469
+46 -0.003747 -0.005408
+47 -0.003352 0.01636
+48 -0.003865 -0.00534
+49 -0.003328 0.01907
+50 -0.003632 0.004649
+51 0.002131 0.02353
+52 0.001268 0.01077
+53 0.00232 0.0165
+54 -0.0005446 -0.02218
+55 0.001391 0.002863
+56 0.0003522 -0.01197
+57 0.001503 0.008832
+58 -0.001879 -0.02529
+59 0.001425 0.008304
+60 -0.000784 -0.0196
+61 -0.001688 -0.03188
+62 0.000332 -0.002273
+63 -0.0002401 -0.01364
+64 0.001079 -0.003714
+65 -0.0007557 -0.005543
+66 0.001375 0.01646
+67 0.0004002 -0.01059
+68 -0.0001866 -0.007917
+69 0.0007813 -0.01567
+70 -0.0006023 -0.01492
+71 0.001281 -0.003829
+72 0.0001411 -0.0001777
+73 0.00148 -0.01053
+74 0.0009533 -0.005025
+75 0.0008315 0.006252
+76 0.001242 0.01179
+77 0.001929 0.008282
+78 0.002309 0.007654
+79 0.000773 -0.00466
+80 -0.0009729 -0.007942
+81 -0.0008252 -0.01809
+82 -0.0009752 -0.01705
+83 -0.0003282 -0.007332
+84 0.001528 -0.01434
+85 0.0002046 -0.0145
+86 0.0009586 0.006116
+87 0.001825 0.01357
+88 0.0006918 -0.009912
+89 -0.0001648 -0.006837
+90 -0.0004951 -0.01827
+91 -5.753e-05 -0.01874
+92 0.00098 -0.0006915
+93 -0.0002379 -0.01011
+94 -0.001821 -0.02537
+95 -0.0001244 -0.01378
+96 1.283e-06 -0.005116
+97 2.13e-05 -0.007654
+98 0.0006301 0.001939
+99 -0.001958 -0.01857
+100 -0.000121 -0.00904
+101 0.003803 -0.00735
+102 0.001462 -0.01964
+103 0.004318 0.006231
+104 0.002705 -0.009198
+105 0.003481 -0.006612
+106 0.006089 0.01059
+107 -0.0003162 -0.03404
+108 0.004981 0.005895
+109 0.003329 -0.01195
+110 0.005217 0.01984
+111 0.002443 0.005619
+112 0.002393 -0.009436
+113 0.003352 0.003334
+114 0.001216 -0.02527
+115 0.001742 -0.01987
+116 0.002768 0.0001267
+117 0.002831 -0.001715
+118 0.006776 0.03336
+119 0.006694 -0.002136
+120 0.0012 -0.02356
+121 0.003951 0.00754
+122 0.001074 -0.02014
+123 0.006251 0.00718
+124 0.001691 -0.007518
+125 0.003625 0.006525
+126 0.004493 0.01431
+127 0.001482 -0.006514
+128 0.001565 -0.004955
+129 0.002951 -0.01093
+130 0.004001 0.01226
+131 0.004821 0.00717
+132 0.006416 0.04314
+133 0.003005 -0.01144
+134 0.001834 -0.005554
+135 0.002168 -0.01795
+136 0.005535 0.01723
+137 0.003198 -0.000593
+138 0.002747 -0.001526
+139 0.001328 -0.006202
+140 0.00336 0.008984
+141 0.003579 0.001004
+142 0.003037 0.01072
+143 0.001462 -0.01964
+144 0.004127 0.003295
+145 0.003853 0.004392
+146 0.002912 0.002869
+147 0.001813 -0.01327
+148 0.002518 -2.875e-06
+149 0.002741 -0.0004153
+150 0.001611 -0.01074
#Rotated values 1 2
-1 -398.938889593 51.4361630892
-2 -417.873833632 -15.2051045001
-3 -436.395044425 -14.5861409847
-4 -426.608218182 -35.7706445387
-5 -404.194831251 50.8945672883
-6 -336.278220515 116.297972072
-7 -428.211040147 -8.70624394838
-8 -397.518516743 30.0355611785
-9 -449.31688431 -69.1667206344
-10 -411.285386401 -6.51733355896
-11 -366.789409766 101.643428575
-12 -401.422829891 8.64530614458
-13 -427.483966571 -23.3976885243
-14 -480.641134897 -64.4027252584
-15 -365.03266343 184.042683351
-16 -328.106506675 209.293877989
-17 -376.036401748 123.113699976
-18 -395.300683193 50.1827218221
-19 -319.702315953 140.599386069
-20 -382.137180401 78.282538667
-21 -351.266399068 67.6100651954
-22 -379.681882777 66.9848180875
-23 -463.095357451 16.2056849416
-24 -362.339344904 23.1633436528
-25 -373.802438142 3.53300170168
-26 -393.611682074 -9.00842597671
-27 -380.977882758 25.8351771375
-28 -383.003880169 60.1602960542
-29 -393.638652357 51.9619253275
-30 -410.405745001 -19.2162629953
-31 -405.30053066 -18.8312613603
-32 -363.138541016 68.4771605722
-33 -379.209751481 122.56177394
-34 -363.365370216 167.69700629
-35 -411.285386401 -6.51733355896
-36 -426.774601382 15.8198876835
-37 -381.866289275 95.2310996238
-38 -411.285386401 -6.51733355896
-39 -456.266149239 -59.3630196781
-40 -390.950336998 40.1186673956
-41 -410.893902242 41.5256470425
-42 -455.463778609 -108.775940841
-43 -454.059754194 -42.3586089771
-44 -372.207178335 32.8315541856
-45 -339.319065786 69.9847304172
-46 -420.69383085 -25.7627422503
-47 -376.357638654 77.9353274961
-48 -433.931805737 -25.4391686638
-49 -373.653499999 90.8220434758
-50 -407.849344494 22.1434384987
-51 239.307538123 112.107435969
-52 142.408456254 51.3066582975
-53 260.459830104 78.618137947
-54 -61.1437459762 -105.651947706
-55 156.158852912 13.6386330667
-56 39.5421443935 -57.0298652899
-57 168.748284713 42.0717729946
-58 -210.944696776 -120.481962613
-59 159.955992721 39.5578587535
-60 -88.0274186445 -93.3578554128
-61 -189.53584843 -151.835722676
-62 37.2775389879 -10.8260687785
-63 -26.9594478684 -64.955718169
-64 121.138563191 -17.6900963878
-65 -84.8502402418 -26.4062909417
-66 154.353323395 78.3874981192
-67 44.9386562461 -50.4630244749
-68 -20.9520699268 -37.712937046
-69 87.7198715803 -74.6415314411
-70 -67.6255013365 -71.0529641542
-71 143.77790457 -18.2392677024
-72 15.8393963056 -0.846491092088
-73 166.228170669 -50.1649852426
-74 107.032454219 -23.9367119537
-75 93.3623333752 29.7803118155
-76 139.45644579 56.16250111
-77 216.579809171 39.4508268386
-78 259.241401023 36.4613701504
-79 86.791158633 -22.1979761258
-80 -109.24102679 -37.8316551222
-81 -92.6578833524 -86.1664220066
-82 -109.492672826 -81.2026232057
-83 -36.8454688889 -34.9268756372
-84 171.574373268 -68.295257682
-85 22.9681526201 -69.0885867828
-86 107.627524081 29.131609887
-87 204.905288168 64.6267385057
-88 77.6751853839 -47.2133733456
-89 -18.5053786255 -32.5692374772
-90 -55.5907467484 -87.0393097495
-91 -6.45927750734 -89.2634253508
-92 110.035102901 -3.29378696269
-93 -26.709009627 -48.1687517244
-94 -204.465965447 -120.853207322
-95 -13.9694221989 -65.6243923192
-96 0.144063516758 -24.3679770725
-97 2.39163747478 -36.4619578109
-98 70.7484831137 9.23848435236
-99 -219.823907447 -88.4679924977
-100 -13.591245729 -43.0607956497
-101 427.032770699 -35.0135433244
-102 164.148721387 -93.5358504774
-103 484.845435841 29.6796747451
-104 303.756946139 -43.813882642
-105 390.878933052 -31.4948573892
-106 683.726910633 50.4574421437
-107 -35.4988840335 -162.127971232
-108 559.215305053 28.0790429377
-109 373.72513081 -56.9380262268
-110 585.784794378 94.499798497
-111 274.247860804 26.7663922398
-112 268.690475304 -44.9497190631
-113 376.333303564 15.8791417004
-114 136.500734832 -120.386936119
-115 195.645086513 -94.6689904923
-116 310.818193348 0.603579648922
-117 317.850174244 -8.16925074119
-118 760.768563816 158.899747303
-119 751.639494384 -10.1737237633
-120 134.727061236 -112.240165774
-121 443.663104546 35.9142670118
-122 120.582255024 -95.9539247487
-123 701.810179545 34.2040034726
-124 189.915569791 -35.8103229767
-125 407.049423998 31.083201169
-126 504.5162551 68.1757629779
-127 166.43389463 -31.0273756263
-128 175.710888105 -23.6007432586
-129 331.337602873 -52.0687223872
-130 449.226900407 58.4015700824
-131 541.286218925 34.1519070045
-132 720.408837559 205.516700468
-133 337.433052457 -54.5101051094
-134 205.931543841 -26.455675751
-135 243.435290768 -85.4859125517
-136 621.458234334 82.0542292543
-137 359.103338249 -2.82469536745
-138 308.424905512 -7.26740920646
-139 149.119420857 -29.5420561961
-140 377.240394766 42.795794032
-141 401.861370157 4.78297480198
-142 340.978910982 51.0721016846
-143 164.148721387 -93.5358504774
-144 463.429829758 15.6961470109
-145 432.626443546 20.920919668
-146 326.966839126 13.6663069767
-147 203.600498192 -63.212496422
-148 282.746813465 -0.0136951442637
-149 307.799089061 -1.97826475167
-150 180.889562913 -51.1811856252
+1 -398.9 51.44
+2 -417.9 -15.21
+3 -436.4 -14.59
+4 -426.6 -35.77
+5 -404.2 50.89
+6 -336.3 116.3
+7 -428.2 -8.706
+8 -397.5 30.04
+9 -449.3 -69.17
+10 -411.3 -6.517
+11 -366.8 101.6
+12 -401.4 8.645
+13 -427.5 -23.4
+14 -480.6 -64.4
+15 -365 184
+16 -328.1 209.3
+17 -376 123.1
+18 -395.3 50.18
+19 -319.7 140.6
+20 -382.1 78.28
+21 -351.3 67.61
+22 -379.7 66.98
+23 -463.1 16.21
+24 -362.3 23.16
+25 -373.8 3.533
+26 -393.6 -9.008
+27 -381 25.84
+28 -383 60.16
+29 -393.6 51.96
+30 -410.4 -19.22
+31 -405.3 -18.83
+32 -363.1 68.48
+33 -379.2 122.6
+34 -363.4 167.7
+35 -411.3 -6.517
+36 -426.8 15.82
+37 -381.9 95.23
+38 -411.3 -6.517
+39 -456.3 -59.36
+40 -391 40.12
+41 -410.9 41.53
+42 -455.5 -108.8
+43 -454.1 -42.36
+44 -372.2 32.83
+45 -339.3 69.98
+46 -420.7 -25.76
+47 -376.4 77.94
+48 -433.9 -25.44
+49 -373.7 90.82
+50 -407.8 22.14
+51 239.3 112.1
+52 142.4 51.31
+53 260.5 78.62
+54 -61.14 -105.7
+55 156.2 13.64
+56 39.54 -57.03
+57 168.7 42.07
+58 -210.9 -120.5
+59 160 39.56
+60 -88.03 -93.36
+61 -189.5 -151.8
+62 37.28 -10.83
+63 -26.96 -64.96
+64 121.1 -17.69
+65 -84.85 -26.41
+66 154.4 78.39
+67 44.94 -50.46
+68 -20.95 -37.71
+69 87.72 -74.64
+70 -67.63 -71.05
+71 143.8 -18.24
+72 15.84 -0.8465
+73 166.2 -50.16
+74 107 -23.94
+75 93.36 29.78
+76 139.5 56.16
+77 216.6 39.45
+78 259.2 36.46
+79 86.79 -22.2
+80 -109.2 -37.83
+81 -92.66 -86.17
+82 -109.5 -81.2
+83 -36.85 -34.93
+84 171.6 -68.3
+85 22.97 -69.09
+86 107.6 29.13
+87 204.9 64.63
+88 77.68 -47.21
+89 -18.51 -32.57
+90 -55.59 -87.04
+91 -6.459 -89.26
+92 110 -3.294
+93 -26.71 -48.17
+94 -204.5 -120.9
+95 -13.97 -65.62
+96 0.1441 -24.37
+97 2.392 -36.46
+98 70.75 9.238
+99 -219.8 -88.47
+100 -13.59 -43.06
+101 427 -35.01
+102 164.1 -93.54
+103 484.8 29.68
+104 303.8 -43.81
+105 390.9 -31.49
+106 683.7 50.46
+107 -35.5 -162.1
+108 559.2 28.08
+109 373.7 -56.94
+110 585.8 94.5
+111 274.2 26.77
+112 268.7 -44.95
+113 376.3 15.88
+114 136.5 -120.4
+115 195.6 -94.67
+116 310.8 0.6036
+117 317.9 -8.169
+118 760.8 158.9
+119 751.6 -10.17
+120 134.7 -112.2
+121 443.7 35.91
+122 120.6 -95.95
+123 701.8 34.2
+124 189.9 -35.81
+125 407 31.08
+126 504.5 68.18
+127 166.4 -31.03
+128 175.7 -23.6
+129 331.3 -52.07
+130 449.2 58.4
+131 541.3 34.15
+132 720.4 205.5
+133 337.4 -54.51
+134 205.9 -26.46
+135 243.4 -85.49
+136 621.5 82.05
+137 359.1 -2.825
+138 308.4 -7.267
+139 149.1 -29.54
+140 377.2 42.8
+141 401.9 4.783
+142 341 51.07
+143 164.1 -93.54
+144 463.4 15.7
+145 432.6 20.92
+146 327 13.67
+147 203.6 -63.21
+148 282.7 -0.0137
+149 307.8 -1.978
+150 180.9 -51.18
diff -r 07a608852925 -r 26c40d8e8fdc test-data/kpca_out2.pdf
--- a/test-data/kpca_out2.pdf Wed Mar 10 16:11:59 2010 -0500
+++ b/test-data/kpca_out2.pdf Wed Mar 10 16:23:53 2010 -0500
@@ -2,8 +2,8 @@
%âãÏÓ\r
1 0 obj
<<
-/CreationDate (D:20100303115833)
-/ModDate (D:20100303115833)
+/CreationDate (D:20100310152519)
+/ModDate (D:20100310152519)
/Title (R Graphics Output)
/Producer (R 2.10.0)
/Creator (R)
diff -r 07a608852925 -r 26c40d8e8fdc test-data/kpca_out3.tabular
--- a/test-data/kpca_out3.tabular Wed Mar 10 16:11:59 2010 -0500
+++ b/test-data/kpca_out3.tabular Wed Mar 10 16:23:53 2010 -0500
@@ -1,304 +1,304 @@
#Component 1
-#Eigenvalue 0.383633782425
+#Eigenvalue 0.3836
#Principal component vectors 1
-1 -0.185843051028
-2 -0.181428165807
-3 -0.185859249771
-4 -0.180608592797
-5 -0.185779114773
-6 -0.173450096143
-7 -0.184654978426
-8 -0.183342755691
-9 -0.179634326545
-10 -0.181320357007
-11 -0.183283707685
-12 -0.180538178566
-13 -0.182036533193
-14 -0.185344590367
-15 -0.184526727477
-16 -0.170434338287
-17 -0.183277063273
-18 -0.184927028233
-19 -0.175718857154
-20 -0.181680491185
-21 -0.177181023283
-22 -0.180973935981
-23 -0.18879919219
-24 -0.171685597374
-25 -0.16885668022
-26 -0.176210994722
-27 -0.177901230872
-28 -0.183637458976
-29 -0.185586660267
-30 -0.178962452299
-31 -0.177730530774
-32 -0.180917047631
-33 -0.179323102414
-34 -0.178700887193
-35 -0.181320357007
-36 -0.187026472694
-37 -0.187484289185
-38 -0.181320357007
-39 -0.183195868488
-40 -0.183342755691
-41 -0.186668209958
-42 -0.164131657602
-43 -0.185859249771
-44 -0.174314649845
-45 -0.165903096322
-46 -0.180461981351
-47 -0.180029913254
-48 -0.184124311064
-49 -0.183283707685
-50 -0.185012459583
-51 0.0942363984412
-52 0.0852035041935
-53 0.10768188396
-54 0.0475694649417
-55 0.0943527981382
-56 0.0825748819852
-57 0.0979972750157
-58 -0.0233654549559
-59 0.0882288472796
-60 0.0416564831403
-61 -0.00243777000151
-62 0.0665074398542
-63 0.0371663691142
-64 0.0968217773028
-65 0.00993115739351
-66 0.0768633607048
-67 0.0872586274324
-68 0.0436063386747
-69 0.0862347466212
-70 0.0321343717223
-71 0.108058085177
-72 0.0458355513695
-73 0.108266881631
-74 0.0911783500891
-75 0.0683993163465
-76 0.0778280589161
-77 0.102242260903
-78 0.116337023057
-79 0.0879507719057
-80 -0.00751313350151
-81 0.0242445751445
-82 0.0120027700758
-83 0.0346227542783
-84 0.118261876355
-85 0.0872586274324
-86 0.0849092505391
-87 0.098013797976
-88 0.0756882831679
-89 0.0522329673214
-90 0.0473615456199
-91 0.0732645705878
-92 0.0905493705978
-93 0.0435140522343
-94 -0.0221940320837
-95 0.0621646602878
-96 0.0565885154519
-97 0.0609576609235
-98 0.0683993163465
-99 -0.0478191910469
-100 0.0539084973987
-101 0.116079702219
-102 0.122090948168
-103 0.125794088091
-104 0.128509617495
-105 0.12652479603
-106 0.102504931737
-107 0.0929811106109
-108 0.116032789314
-109 0.124684918317
-110 0.108672337594
-111 0.120990051431
-112 0.126248324837
-113 0.128639151235
-114 0.118532439899
-115 0.12178749141
-116 0.124484067654
-117 0.127958285183
-118 0.0862556895231
-119 0.0821350202959
-120 0.108242711844
-121 0.125331989407
-122 0.1167560565
-123 0.0979484407405
-124 0.11460521934
-125 0.125894432778
-126 0.123158494779
-127 0.11059361124
-128 0.114095357757
-129 0.128690994227
-130 0.125636948314
-131 0.121892068306
-132 0.100713002067
-133 0.12806059698
-134 0.116484703738
-135 0.121891129935
-136 0.119070960893
-137 0.122085746848
-138 0.127403650206
-139 0.109872871363
-140 0.127473736894
-141 0.125371169881
-142 0.121785772566
-143 0.122090948168
-144 0.122665736125
-145 0.121132693034
-146 0.124391104206
-147 0.117888635141
-148 0.124958557781
-149 0.122870371594
-150 0.120935324954
+1 -0.1858
+2 -0.1814
+3 -0.1859
+4 -0.1806
+5 -0.1858
+6 -0.1735
+7 -0.1847
+8 -0.1833
+9 -0.1796
+10 -0.1813
+11 -0.1833
+12 -0.1805
+13 -0.182
+14 -0.1853
+15 -0.1845
+16 -0.1704
+17 -0.1833
+18 -0.1849
+19 -0.1757
+20 -0.1817
+21 -0.1772
+22 -0.181
+23 -0.1888
+24 -0.1717
+25 -0.1689
+26 -0.1762
+27 -0.1779
+28 -0.1836
+29 -0.1856
+30 -0.179
+31 -0.1777
+32 -0.1809
+33 -0.1793
+34 -0.1787
+35 -0.1813
+36 -0.187
+37 -0.1875
+38 -0.1813
+39 -0.1832
+40 -0.1833
+41 -0.1867
+42 -0.1641
+43 -0.1859
+44 -0.1743
+45 -0.1659
+46 -0.1805
+47 -0.18
+48 -0.1841
+49 -0.1833
+50 -0.185
+51 0.09424
+52 0.0852
+53 0.1077
+54 0.04757
+55 0.09435
+56 0.08257
+57 0.098
+58 -0.02337
+59 0.08823
+60 0.04166
+61 -0.002438
+62 0.06651
+63 0.03717
+64 0.09682
+65 0.009931
+66 0.07686
+67 0.08726
+68 0.04361
+69 0.08623
+70 0.03213
+71 0.1081
+72 0.04584
+73 0.1083
+74 0.09118
+75 0.0684
+76 0.07783
+77 0.1022
+78 0.1163
+79 0.08795
+80 -0.007513
+81 0.02424
+82 0.012
+83 0.03462
+84 0.1183
+85 0.08726
+86 0.08491
+87 0.09801
+88 0.07569
+89 0.05223
+90 0.04736
+91 0.07326
+92 0.09055
+93 0.04351
+94 -0.02219
+95 0.06216
+96 0.05659
+97 0.06096
+98 0.0684
+99 -0.04782
+100 0.05391
+101 0.1161
+102 0.1221
+103 0.1258
+104 0.1285
+105 0.1265
+106 0.1025
+107 0.09298
+108 0.116
+109 0.1247
+110 0.1087
+111 0.121
+112 0.1262
+113 0.1286
+114 0.1185
+115 0.1218
+116 0.1245
+117 0.128
+118 0.08626
+119 0.08214
+120 0.1082
+121 0.1253
+122 0.1168
+123 0.09795
+124 0.1146
+125 0.1259
+126 0.1232
+127 0.1106
+128 0.1141
+129 0.1287
+130 0.1256
+131 0.1219
+132 0.1007
+133 0.1281
+134 0.1165
+135 0.1219
+136 0.1191
+137 0.1221
+138 0.1274
+139 0.1099
+140 0.1275
+141 0.1254
+142 0.1218
+143 0.1221
+144 0.1227
+145 0.1211
+146 0.1244
+147 0.1179
+148 0.125
+149 0.1229
+150 0.1209
#Rotated values 1
-1 -10.6943508905
-2 -10.440296023
-3 -10.6952830482
-4 -10.393133639
-5 -10.6906716744
-6 -9.98119746679
-7 -10.6259831726
-8 -10.5504712269
-9 -10.3370694219
-10 -10.4340921584
-11 -10.5470733054
-12 -10.3890816473
-13 -10.4753045652
-14 -10.6656669382
-15 -10.618602963
-16 -9.80765547784
-17 -10.5466909523
-18 -10.641638297
-19 -10.111753472
-20 -10.4548161039
-21 -10.1958939204
-22 -10.4141573371
-23 -10.8644622328
-24 -9.87965926626
-25 -9.71686903807
-26 -10.1400735615
-27 -10.2373383146
-28 -10.5674299473
-29 -10.6795968669
-30 -10.2984063731
-31 -10.227515366
-32 -10.4108836932
-33 -10.3191600083
-34 -10.2833545914
-35 -10.4340921584
-36 -10.76245097
-37 -10.7887960508
-38 -10.4340921584
-39 -10.5420185929
-40 -10.5504712269
-41 -10.7418347167
-42 -9.44496729322
-43 -10.6952830482
-44 -10.0309482678
-45 -9.54690485367
-46 -10.3846968734
-47 -10.3598334857
-48 -10.5954458835
-49 -10.5470733054
-50 -10.6465544498
-51 5.42283989641
-52 4.90304138844
-53 6.19656126634
-54 2.73738806452
-55 5.42953812482
-56 4.75177714639
-57 5.63925979224
-58 -1.34456667942
-59 5.07713496012
-60 2.39712512844
-61 -0.140281638954
-62 3.8271751066
-63 2.13874121434
-64 5.57161569716
-65 0.571489121209
-66 4.42310726956
-67 5.02130359366
-68 2.50932969652
-69 4.96238430341
-70 1.84917458545
-71 6.2182097907
-72 2.63760989121
-73 6.2302249967
-74 5.24686429798
-75 3.93604326679
-76 4.47862089311
-77 5.88353779109
-78 6.69462182871
-79 5.061133094
-80 -0.432343773457
-81 1.39515570989
-82 0.690700212563
-83 1.99236872726
-84 6.80538764143
-85 5.02130359366
-86 4.88610854207
-87 5.6402106071
-88 4.35548735354
-89 3.00574962312
-90 2.72542333314
-91 4.21601464985
-92 5.21066963079
-93 2.50401906709
-94 -1.27715707133
-95 3.5772695639
-96 3.25638993369
-97 3.50781270418
-98 3.93604326679
-99 -2.75175857007
-100 3.10216811428
-101 6.67981428373
-102 7.02573183683
-103 7.23882927317
-104 7.39509459563
-105 7.28087791073
-106 5.89865320193
-107 5.35060427365
-108 6.67711467748
-109 7.17500202376
-110 6.25355698739
-111 6.96238065996
-112 7.26496835728
-113 7.4025486234
-114 6.82095723878
-115 7.00826939727
-116 7.16344405884
-117 7.36336814062
-118 4.96358946411
-119 4.72646527585
-120 6.22883414471
-121 7.21223777328
-122 6.71873513643
-123 5.63644962058
-124 6.59496506713
-125 7.24460361494
-126 7.08716387847
-127 6.36411680878
-128 6.56562504802
-129 7.40553193189
-130 7.2297866541
-131 7.01428728179
-132 5.79553648833
-133 7.36925567984
-134 6.70312012344
-135 7.01423328312
-136 6.85194646563
-137 7.0254325265
-138 7.33145163347
-139 6.322641784
-140 7.33548477667
-141 7.21449241625
-142 7.00817048627
-143 7.02573183683
-144 7.05880804852
-145 6.97058898059
-146 7.158094471
-147 6.78390945061
-148 7.19074862515
-149 7.0705838104
-150 6.95923142115
+1 -10.69
+2 -10.44
+3 -10.7
+4 -10.39
+5 -10.69
+6 -9.981
+7 -10.63
+8 -10.55
+9 -10.34
+10 -10.43
+11 -10.55
+12 -10.39
+13 -10.48
+14 -10.67
+15 -10.62
+16 -9.808
+17 -10.55
+18 -10.64
+19 -10.11
+20 -10.45
+21 -10.2
+22 -10.41
+23 -10.86
+24 -9.88
+25 -9.717
+26 -10.14
+27 -10.24
+28 -10.57
+29 -10.68
+30 -10.3
+31 -10.23
+32 -10.41
+33 -10.32
+34 -10.28
+35 -10.43
+36 -10.76
+37 -10.79
+38 -10.43
+39 -10.54
+40 -10.55
+41 -10.74
+42 -9.445
+43 -10.7
+44 -10.03
+45 -9.547
+46 -10.38
+47 -10.36
+48 -10.6
+49 -10.55
+50 -10.65
+51 5.423
+52 4.903
+53 6.197
+54 2.737
+55 5.43
+56 4.752
+57 5.639
+58 -1.345
+59 5.077
+60 2.397
+61 -0.1403
+62 3.827
+63 2.139
+64 5.572
+65 0.5715
+66 4.423
+67 5.021
+68 2.509
+69 4.962
+70 1.849
+71 6.218
+72 2.638
+73 6.23
+74 5.247
+75 3.936
+76 4.479
+77 5.884
+78 6.695
+79 5.061
+80 -0.4323
+81 1.395
+82 0.6907
+83 1.992
+84 6.805
+85 5.021
+86 4.886
+87 5.64
+88 4.355
+89 3.006
+90 2.725
+91 4.216
+92 5.211
+93 2.504
+94 -1.277
+95 3.577
+96 3.256
+97 3.508
+98 3.936
+99 -2.752
+100 3.102
+101 6.68
+102 7.026
+103 7.239
+104 7.395
+105 7.281
+106 5.899
+107 5.351
+108 6.677
+109 7.175
+110 6.254
+111 6.962
+112 7.265
+113 7.403
+114 6.821
+115 7.008
+116 7.163
+117 7.363
+118 4.964
+119 4.726
+120 6.229
+121 7.212
+122 6.719
+123 5.636
+124 6.595
+125 7.245
+126 7.087
+127 6.364
+128 6.566
+129 7.406
+130 7.23
+131 7.014
+132 5.796
+133 7.369
+134 6.703
+135 7.014
+136 6.852
+137 7.025
+138 7.331
+139 6.323
+140 7.335
+141 7.214
+142 7.008
+143 7.026
+144 7.059
+145 6.971
+146 7.158
+147 6.784
+148 7.191
+149 7.071
+150 6.959
diff -r 07a608852925 -r 26c40d8e8fdc test-data/kpca_out4.pdf
--- a/test-data/kpca_out4.pdf Wed Mar 10 16:11:59 2010 -0500
+++ b/test-data/kpca_out4.pdf Wed Mar 10 16:23:53 2010 -0500
@@ -2,8 +2,8 @@
%âãÏÓ\r
1 0 obj
<<
-/CreationDate (D:20100303121837)
-/ModDate (D:20100303121837)
+/CreationDate (D:20100310154313)
+/ModDate (D:20100310154313)
/Title (R Graphics Output)
/Producer (R 2.10.0)
/Creator (R)
diff -r 07a608852925 -r 26c40d8e8fdc test-data/pca_out1.tabular
--- a/test-data/pca_out1.tabular Wed Mar 10 16:11:59 2010 -0500
+++ b/test-data/pca_out1.tabular Wed Mar 10 16:23:53 2010 -0500
@@ -1,159 +1,159 @@
#Component 1 2 3 4
-#Std. deviation 0.959802547771 0.143553847861 0.383866224491 1.70611197867
-#Proportion of variance explained 0.230305232677 0.00515192680891 0.0368383195763 0.727704520938
+#Std. deviation 0.9598 0.1436 0.3839 1.706
+#Proportion of variance explained 0.2303 0.005152 0.03684 0.7277
#Loadings 1 2 3 4
-c1 0.522371620408 -0.37231836335 0.721016809062 0.2619955869
-c2 -0.263354915314 -0.925556494147 -0.242032877214 -0.124134810063
-c3 0.581254005598 -0.0210947768412 -0.140892258488 -0.80115426908
-c4 0.565611049883 -0.0654157690789 -0.633801403356 0.523546271604
+c1 0.5224 -0.3723 0.721 0.262
+c2 -0.2634 -0.9256 -0.242 -0.1241
+c3 0.5813 -0.02109 -0.1409 -0.8012
+c4 0.5656 -0.06542 -0.6338 0.5235
#Scores 1 2 3 4
-1 -2.26454172839 -0.505703902774 0.121943347782 0.0230733234546
-2 -2.08642550062 0.655404729369 0.227250832399 0.103208244417
-3 -2.36795044906 0.318477310847 -0.051479623645 0.0278252250225
-4 -2.30419716115 0.575367712533 -0.0988604444374 -0.0663114622137
-5 -2.38877749351 -0.674767396703 -0.0214278489731 -0.0373972870003
-6 -2.07053680718 -1.51854855989 -0.0306842582807 0.00439877494217
-7 -2.44571133963 -0.0745626750069 -0.342197636214 -0.0380965668019
-8 -2.23384185871 -0.247613931752 0.0825744644549 -0.0255051622663
-9 -2.34195767646 1.09514636258 -0.153562398695 -0.0267938291099
-10 -2.18867575528 0.448629048338 0.246559522389 -0.0399073034932
-11 -2.16348655627 -1.07059557604 0.264009373347 0.0153011376811
-12 -2.32737775413 -0.158587454659 -0.100165615627 -0.134554258442
-13 -2.22408272205 0.70911815803 0.223214513993 0.00263061381638
-14 -2.63971626084 0.938281982108 -0.189570030151 -0.0194220182895
-15 -2.1922915085 -1.88997851232 0.469480094636 0.192782042174
-16 -2.25146521458 -2.72237107657 -0.032603796659 0.0471257043405
-17 -2.20275048008 -1.51375028255 0.00136349158139 0.186631581003
-18 -2.19017916379 -0.514304308221 0.0386155948801 0.091905506725
-19 -1.89407429302 -1.43111070694 0.370742833992 0.0595280967873
-20 -2.33994907042 -1.15803342899 -0.137417718926 -0.0398281841641
-21 -1.91455639496 -0.43046516328 0.416006874756 0.0103602240245
-22 -2.20464539753 -0.952457317289 -0.164738346381 0.0577291622309
-23 -2.77416979371 -0.489517026503 -0.338836384343 0.0178537297394
-24 -1.82041156334 -0.106750792835 -0.0400614724262 0.150345594969
-25 -2.22821749946 -0.162186162666 -0.124201428024 -0.271228862988
-26 -1.95702400734 0.607892567484 0.298591028776 0.0438372887169
-27 -2.05206331128 -0.266014311982 -0.092092978814 0.0666010027592
-28 -2.16819365335 -0.552016495324 0.201295481624 0.0092605692697
-29 -2.14030596328 -0.336640408845 0.265314544537 0.0835439339095
-30 -2.26879019439 0.314878602841 -0.0755154360416 -0.108849379523
-31 -2.14455442928 0.483942096769 0.0678557607133 -0.0483787690683
-32 -1.8319381022 -0.445266835503 0.265375243884 0.239140993596
-33 -2.60820286766 -1.82847519273 -0.0514195181545 -0.231922592748
-34 -2.4379508591 -2.18539161563 0.079349754887 -0.0510210290964
-35 -2.18867575528 0.448629048338 0.246559522389 -0.0399073034932
-36 -2.21111989681 0.18433781054 0.218624527745 0.168619768529
-37 -2.04441651932 -0.684956426295 0.47941157048 0.195613314291
-38 -2.18867575528 0.448629048338 0.246559522389 -0.0399073034932
-39 -2.43595220297 0.882169414776 -0.201557586676 -0.00996079071925
-40 -2.17054720188 -0.292726954966 0.169938535763 0.00624028506401
-41 -2.28652723884 -0.467991715671 -0.0407365389625 0.10571826091
-42 -1.87170722354 2.32769160611 0.194528609858 0.291692981754
-43 -2.55783441954 0.45381638049 -0.313571837569 -0.0674111169685
-44 -1.96427929035 -0.497391640018 -0.314755610064 0.175540206175
-45 -2.13337283292 -1.17143211177 -0.252793221689 -0.153228806955
-46 -2.07535759284 0.691917347136 0.0565590081895 0.140294980357
-47 -2.3812582168 -1.15063259287 -0.0621019034895 -0.15421856895
-48 -2.39819168766 0.362390764726 -0.146855632419 -0.0494784238231
-49 -2.22678121309 -1.02548255282 0.176645302039 -0.0164443096492
-50 -2.20595416865 -0.0322378452738 0.146593527367 0.0487782023735
-51 1.1039936528 -0.863112445851 0.685555107794 0.0349778613463
-52 0.732481440009 -0.598635573342 0.0940668019749 0.00445376366529
-53 1.24210950532 -0.614822449612 0.5548465341 0.00967335738053
-54 0.397307282823 1.75816894744 0.0185694823534 0.0673978468494
-55 1.0725939482 0.21175790268 0.397447437604 0.105541661979
-56 0.384458146169 0.591062468623 -0.126797689592 -0.240528081689
-57 0.748715075952 -0.778698611389 -0.148656022613 -0.0783010665497
-58 -0.497863387798 1.84886877128 -0.255555249978 -0.0393891394616
-59 0.926222367533 -0.0303308267834 0.595459889269 -0.0291024203562
-60 0.00496802558432 1.0294011124 -0.542867048794 -0.0283487628543
-61 -0.12469746138 2.65806267796 0.0398134481856 0.0161405573367
-62 0.438730117781 0.0588812850215 -0.206703491276 0.0398514578087
-63 0.551633981423 1.77258156486 0.761380223046 0.0483536968144
-64 0.717165066237 0.185434314505 0.0672998423614 -0.164555675253
-65 -0.0372583829657 0.432795098714 -0.198061449157 0.109025121493
-66 0.875890535941 -0.509998151059 0.503505831713 0.105141287026
-67 0.34800640198 0.190621646657 -0.492831517597 -0.192059488728
-68 0.153392544569 0.790725456234 0.29860451573 -0.204321214984
-69 1.21530320922 1.63335564452 0.479409913826 0.228214500251
-70 0.156941175644 1.30310327017 0.168586746037 -0.050413197095
-71 0.73825610437 -0.402470381619 -0.616772625667 -0.0844515277208
-72 0.472369682345 0.416608222444 0.262718282968 0.114244715208
-73 1.22798821408 0.94091479253 0.366704858932 -0.00844834785365
-74 0.629381045315 0.416811642542 0.289962473611 -0.273494878669
-75 0.700472799208 0.0634939276517 0.444767559049 0.0440812895288
-76 0.873536987401 -0.250708610702 0.472148885852 0.10212100282
-77 1.25422219052 0.0826200998131 0.726843529499 0.0408294176689
-78 1.35823984739 -0.32882026627 0.261458073662 0.06701379087
-79 0.662126137566 0.224346070943 -0.0873681069173 -0.0363525362821
-80 -0.0472815133199 1.05721241261 0.315319194662 0.0660077099012
-81 0.12153420888 1.56359237987 0.145241737641 -0.0078752797855
-82 0.0141182260514 1.57339235465 0.236581428008 -0.0311492615406
-83 0.236010837327 0.775923784012 0.147972884858 0.0244595545867
-84 1.05669142809 0.636901283769 -0.106753233719 -0.183419235854
-85 0.22141708833 0.280847693086 -0.667559660213 -0.255550383389
-86 0.431783160741 -0.855136920218 -0.450731487053 -0.111146168635
-87 1.04941335522 -0.522197264513 0.396142266415 0.0372988657504
-88 1.03587821032 1.39246648439 0.685434302956 0.139128619431
-89 0.067067599873 0.212620734893 -0.294128261931 -0.147491049208
-90 0.275425066252 1.32981591316 -0.09344476854 0.00994752060019
-91 0.272335066262 1.11944152412 -0.0981718909473 -0.269842631856
-92 0.623170539726 -0.0275426333023 0.0193046543801 -0.147722636862
-93 0.330005363837 0.988900731819 0.195968072839 0.00762651619612
-94 -0.373627622687 2.0179322652 -0.112184053224 0.0210814709933
-95 0.282944342955 0.853950716987 -0.134118823056 -0.106873761349
-96 0.089053110319 0.174908547791 -0.131448375187 -0.230135986663
-97 0.224356783209 0.380484659487 -0.158769002642 -0.132578640268
-98 0.573883485558 0.15371997408 0.270039416433 -0.0194096051318
-99 -0.457012872505 1.53946451026 -0.196126173314 0.20088337989
-100 0.252244473269 0.595860745965 -0.0947499397296 -0.0582952756283
-101 1.84767259431 -0.871696661773 -1.00276098578 -0.0510680368143
-102 1.15318980825 0.701326113857 -0.53146463504 -0.0404135807034
-103 2.20634949713 -0.554470104935 0.205495910098 0.0593004996357
-104 1.43868540233 0.0500105222689 -0.163390463819 -0.235759861148
-105 1.86789070256 -0.291192801761 -0.394004333187 -0.0167817995605
-106 2.75419670883 -0.788432206355 0.58623270438 -0.100879674319
-107 0.358374474842 1.56009458398 -0.990999895323 -0.132987437876
-108 2.30300589815 -0.409516695222 0.654166687004 -0.237212798451
-109 2.00173529922 0.723865359313 0.39407044827 -0.0849938223583
-110 2.2675545991 -1.92144299147 -0.392517657812 0.102907298269
-111 1.3659094292 -0.693948039804 -0.283279516018 0.107010918256
-112 1.59906458565 0.4282488359 -0.0233040821221 0.058942700248
-113 1.88425185375 -0.41433275795 -0.0245485539642 0.146296963706
-114 1.25308651438 1.16739133525 -0.582130270891 0.0996816830012
-115 1.46406152299 0.444147569479 -1.00411052499 0.275022172524
-116 1.59180930264 -0.677035371602 -0.636650720962 0.190645617706
-117 1.47128018947 -0.253192471967 -0.0366575091837 -0.155435928096
-118 2.43737848219 -2.55675734149 0.134200081747 -0.275661550231
-119 3.30914118268 0.00236132010208 0.706933959275 0.0467561875045
-120 1.25398098669 1.71758384427 0.264622083882 -0.063067401986
-121 2.04049625966 -0.907398765015 -0.231878114284 0.167140048297
-122 0.97391511447 0.571174376366 -0.829503781073 0.0273189478123
-123 2.89806443584 -0.397791359172 0.860926842018 -0.126074285525
-124 1.32919369132 0.486760541904 0.0047073493335 0.140597875708
-125 1.70424070852 -1.01414842484 -0.295957876544 -0.0627403760289
-126 1.95772766179 -1.00333451543 0.422817051752 -0.21845913061
-127 1.17190450799 0.318896617311 -0.130651909956 0.125685466769
-128 1.01978105282 -0.0655429630966 -0.338042169623 -0.00906850832619
-129 1.78600886125 0.193272799857 -0.270002525769 0.0312072991185
-130 1.86477791271 -0.555381531579 0.71751068338 -0.207556767871
-131 2.43549739142 -0.246654468069 0.730234005788 -0.0167936016953
-132 2.31608241196 -2.62618386902 0.499619542563 -0.213160417565
-133 1.86037142585 0.18467239441 -0.35333027867 0.100039482389
-134 1.11127172568 0.29598610243 0.182659607661 -0.185740240258
-135 1.19746915512 0.817167741914 0.163213781512 -0.488403999516
-136 2.80094940373 -0.844748193785 0.547000957212 0.296321147128
-137 1.58015524852 -1.07247449613 -0.943392608462 0.0336074228517
-138 1.34704442435 -0.422255965896 -0.180028705939 -0.215906538551
-139 0.923432977766 -0.0192303705469 -0.417394303466 0.00474424585873
-140 1.85355198407 -0.672422728972 0.0148203293627 0.194875449427
-141 2.01615720068 -0.610397037555 -0.42591494689 0.246764701547
-142 1.9031168586 -0.686024831859 -0.127799364044 0.469214420513
-143 1.15318980825 0.701326113857 -0.53146463504 -0.0404135807034
-144 2.04330843929 -0.864684880472 -0.335266060523 0.0442781979364
-145 2.00169096693 -1.04855004662 -0.62926888815 0.212588357053
-146 1.87052207146 -0.382821837623 -0.254532318679 0.388890487462
-147 1.55849189072 0.905313601407 0.0253819098598 0.221322183713
-148 1.520845064 -0.266794574854 -0.17927720259 0.11890304299
-149 1.37639119064 -1.01636192879 -0.931405051938 0.0241461952814
-150 0.959298575616 0.0222839446606 -0.52879418717 -0.163675806017
+1 -2.265 -0.5057 0.1219 0.02307
+2 -2.086 0.6554 0.2273 0.1032
+3 -2.368 0.3185 -0.05148 0.02783
+4 -2.304 0.5754 -0.09886 -0.06631
+5 -2.389 -0.6748 -0.02143 -0.0374
+6 -2.071 -1.519 -0.03068 0.004399
+7 -2.446 -0.07456 -0.3422 -0.0381
+8 -2.234 -0.2476 0.08257 -0.02551
+9 -2.342 1.095 -0.1536 -0.02679
+10 -2.189 0.4486 0.2466 -0.03991
+11 -2.163 -1.071 0.264 0.0153
+12 -2.327 -0.1586 -0.1002 -0.1346
+13 -2.224 0.7091 0.2232 0.002631
+14 -2.64 0.9383 -0.1896 -0.01942
+15 -2.192 -1.89 0.4695 0.1928
+16 -2.251 -2.722 -0.0326 0.04713
+17 -2.203 -1.514 0.001363 0.1866
+18 -2.19 -0.5143 0.03862 0.09191
+19 -1.894 -1.431 0.3707 0.05953
+20 -2.34 -1.158 -0.1374 -0.03983
+21 -1.915 -0.4305 0.416 0.01036
+22 -2.205 -0.9525 -0.1647 0.05773
+23 -2.774 -0.4895 -0.3388 0.01785
+24 -1.82 -0.1068 -0.04006 0.1503
+25 -2.228 -0.1622 -0.1242 -0.2712
+26 -1.957 0.6079 0.2986 0.04384
+27 -2.052 -0.266 -0.09209 0.0666
+28 -2.168 -0.552 0.2013 0.009261
+29 -2.14 -0.3366 0.2653 0.08354
+30 -2.269 0.3149 -0.07552 -0.1088
+31 -2.145 0.4839 0.06786 -0.04838
+32 -1.832 -0.4453 0.2654 0.2391
+33 -2.608 -1.828 -0.05142 -0.2319
+34 -2.438 -2.185 0.07935 -0.05102
+35 -2.189 0.4486 0.2466 -0.03991
+36 -2.211 0.1843 0.2186 0.1686
+37 -2.044 -0.685 0.4794 0.1956
+38 -2.189 0.4486 0.2466 -0.03991
+39 -2.436 0.8822 -0.2016 -0.009961
+40 -2.171 -0.2927 0.1699 0.00624
+41 -2.287 -0.468 -0.04074 0.1057
+42 -1.872 2.328 0.1945 0.2917
+43 -2.558 0.4538 -0.3136 -0.06741
+44 -1.964 -0.4974 -0.3148 0.1755
+45 -2.133 -1.171 -0.2528 -0.1532
+46 -2.075 0.6919 0.05656 0.1403
+47 -2.381 -1.151 -0.0621 -0.1542
+48 -2.398 0.3624 -0.1469 -0.04948
+49 -2.227 -1.025 0.1766 -0.01644
+50 -2.206 -0.03224 0.1466 0.04878
+51 1.104 -0.8631 0.6856 0.03498
+52 0.7325 -0.5986 0.09407 0.004454
+53 1.242 -0.6148 0.5548 0.009673
+54 0.3973 1.758 0.01857 0.0674
+55 1.073 0.2118 0.3974 0.1055
+56 0.3845 0.5911 -0.1268 -0.2405
+57 0.7487 -0.7787 -0.1487 -0.0783
+58 -0.4979 1.849 -0.2556 -0.03939
+59 0.9262 -0.03033 0.5955 -0.0291
+60 0.004968 1.029 -0.5429 -0.02835
+61 -0.1247 2.658 0.03981 0.01614
+62 0.4387 0.05888 -0.2067 0.03985
+63 0.5516 1.773 0.7614 0.04835
+64 0.7172 0.1854 0.0673 -0.1646
+65 -0.03726 0.4328 -0.1981 0.109
+66 0.8759 -0.51 0.5035 0.1051
+67 0.348 0.1906 -0.4928 -0.1921
+68 0.1534 0.7907 0.2986 -0.2043
+69 1.215 1.633 0.4794 0.2282
+70 0.1569 1.303 0.1686 -0.05041
+71 0.7383 -0.4025 -0.6168 -0.08445
+72 0.4724 0.4166 0.2627 0.1142
+73 1.228 0.9409 0.3667 -0.008448
+74 0.6294 0.4168 0.29 -0.2735
+75 0.7005 0.06349 0.4448 0.04408
+76 0.8735 -0.2507 0.4721 0.1021
+77 1.254 0.08262 0.7268 0.04083
+78 1.358 -0.3288 0.2615 0.06701
+79 0.6621 0.2243 -0.08737 -0.03635
+80 -0.04728 1.057 0.3153 0.06601
+81 0.1215 1.564 0.1452 -0.007875
+82 0.01412 1.573 0.2366 -0.03115
+83 0.236 0.7759 0.148 0.02446
+84 1.057 0.6369 -0.1068 -0.1834
+85 0.2214 0.2808 -0.6676 -0.2556
+86 0.4318 -0.8551 -0.4507 -0.1111
+87 1.049 -0.5222 0.3961 0.0373
+88 1.036 1.392 0.6854 0.1391
+89 0.06707 0.2126 -0.2941 -0.1475
+90 0.2754 1.33 -0.09344 0.009948
+91 0.2723 1.119 -0.09817 -0.2698
+92 0.6232 -0.02754 0.0193 -0.1477
+93 0.33 0.9889 0.196 0.007627
+94 -0.3736 2.018 -0.1122 0.02108
+95 0.2829 0.854 -0.1341 -0.1069
+96 0.08905 0.1749 -0.1314 -0.2301
+97 0.2244 0.3805 -0.1588 -0.1326
+98 0.5739 0.1537 0.27 -0.01941
+99 -0.457 1.539 -0.1961 0.2009
+100 0.2522 0.5959 -0.09475 -0.0583
+101 1.848 -0.8717 -1.003 -0.05107
+102 1.153 0.7013 -0.5315 -0.04041
+103 2.206 -0.5545 0.2055 0.0593
+104 1.439 0.05001 -0.1634 -0.2358
+105 1.868 -0.2912 -0.394 -0.01678
+106 2.754 -0.7884 0.5862 -0.1009
+107 0.3584 1.56 -0.991 -0.133
+108 2.303 -0.4095 0.6542 -0.2372
+109 2.002 0.7239 0.3941 -0.08499
+110 2.268 -1.921 -0.3925 0.1029
+111 1.366 -0.6939 -0.2833 0.107
+112 1.599 0.4282 -0.0233 0.05894
+113 1.884 -0.4143 -0.02455 0.1463
+114 1.253 1.167 -0.5821 0.09968
+115 1.464 0.4441 -1.004 0.275
+116 1.592 -0.677 -0.6367 0.1906
+117 1.471 -0.2532 -0.03666 -0.1554
+118 2.437 -2.557 0.1342 -0.2757
+119 3.309 0.002361 0.7069 0.04676
+120 1.254 1.718 0.2646 -0.06307
+121 2.04 -0.9074 -0.2319 0.1671
+122 0.9739 0.5712 -0.8295 0.02732
+123 2.898 -0.3978 0.8609 -0.1261
+124 1.329 0.4868 0.004707 0.1406
+125 1.704 -1.014 -0.296 -0.06274
+126 1.958 -1.003 0.4228 -0.2185
+127 1.172 0.3189 -0.1307 0.1257
+128 1.02 -0.06554 -0.338 -0.009069
+129 1.786 0.1933 -0.27 0.03121
+130 1.865 -0.5554 0.7175 -0.2076
+131 2.435 -0.2467 0.7302 -0.01679
+132 2.316 -2.626 0.4996 -0.2132
+133 1.86 0.1847 -0.3533 0.1
+134 1.111 0.296 0.1827 -0.1857
+135 1.197 0.8172 0.1632 -0.4884
+136 2.801 -0.8447 0.547 0.2963
+137 1.58 -1.072 -0.9434 0.03361
+138 1.347 -0.4223 -0.18 -0.2159
+139 0.9234 -0.01923 -0.4174 0.004744
+140 1.854 -0.6724 0.01482 0.1949
+141 2.016 -0.6104 -0.4259 0.2468
+142 1.903 -0.686 -0.1278 0.4692
+143 1.153 0.7013 -0.5315 -0.04041
+144 2.043 -0.8647 -0.3353 0.04428
+145 2.002 -1.049 -0.6293 0.2126
+146 1.871 -0.3828 -0.2545 0.3889
+147 1.558 0.9053 0.02538 0.2213
+148 1.521 -0.2668 -0.1793 0.1189
+149 1.376 -1.016 -0.9314 0.02415
+150 0.9593 0.02228 -0.5288 -0.1637
diff -r 07a608852925 -r 26c40d8e8fdc test-data/pca_out2.pdf
--- a/test-data/pca_out2.pdf Wed Mar 10 16:11:59 2010 -0500
+++ b/test-data/pca_out2.pdf Wed Mar 10 16:23:53 2010 -0500
@@ -2,8 +2,8 @@
%âãÏÓ\r
1 0 obj
<<
-/CreationDate (D:20100303130628)
-/ModDate (D:20100303130628)
+/CreationDate (D:20100310155829)
+/ModDate (D:20100310155829)
/Title (R Graphics Output)
/Producer (R 2.10.0)
/Creator (R)
diff -r 07a608852925 -r 26c40d8e8fdc test-data/pca_out3.tabular
--- a/test-data/pca_out3.tabular Wed Mar 10 16:11:59 2010 -0500
+++ b/test-data/pca_out3.tabular Wed Mar 10 16:23:53 2010 -0500
@@ -1,159 +1,159 @@
#Component 1 2 3 4
-#Std. deviation 0.490539105967 0.153379073796 0.279285544512 2.04857881547
-#Proportion of variance explained 0.0530155678505 0.00518308545019 0.017185139525 0.924616207174
+#Std. deviation 0.4905 0.1534 0.2793 2.049
+#Proportion of variance explained 0.05302 0.005183 0.01719 0.9246
#Loadings 1 2 3 4
-c1 0.361589677381 -0.656539883286 0.580997279828 0.317254547169
-c2 -0.0822688898922 -0.729712371326 -0.596418087938 -0.324094352418
-c3 0.856572105291 0.175767403429 -0.072524075487 -0.47971898733
-c4 0.358843926248 0.074706470135 -0.549060910727 0.751120560381
+c1 0.3616 -0.6565 0.581 0.3173
+c2 -0.08227 -0.7297 -0.5964 -0.3241
+c3 0.8566 0.1758 -0.07252 -0.4797
+c4 0.3588 0.07471 -0.5491 0.7511
#Scores 1 2 3 4
-1 -2.6842071251 -0.326607314764 0.021511837002 0.00100615724154
-2 -2.71539061563 0.169556847556 0.203521425005 0.0996024240168
-3 -2.88981953962 0.137345609605 -0.024709240999 0.0193045428325
-4 -2.74643719731 0.311124315752 -0.0376719752853 -0.0759552741085
-5 -2.72859298183 -0.333924563568 -0.0962296997746 -0.0631287327171
-6 -2.27989736101 -0.747782713225 -0.174325619016 -0.0271468036979
-7 -2.82089068218 0.0821045110247 -0.264251085191 -0.0500996250629
-8 -2.62648199332 -0.17040534896 0.0158015102643 -0.0462817609665
-9 -2.88795856534 0.570798026332 -0.0273354061145 -0.0266154143257
-10 -2.67384468672 0.106691703753 0.191533299736 -0.0558909659961
-11 -2.50652678934 -0.651935013673 0.0692749958139 -0.0166082478245
-12 -2.61314271827 -0.0215206319603 -0.10765035325 -0.157704569133
-13 -2.787433976 0.227740188871 0.200327788095 -0.00723508673812
-14 -3.22520044627 0.503279909485 -0.0684136291723 -0.0219466641234
-15 -2.64354321694 -1.18619489941 0.14450570401 0.156980961517
-16 -2.3838693238 -1.34475434456 -0.28373066394 0.00192618170967
-17 -2.62252620313 -0.818089674597 -0.145315988822 0.164740791234
-18 -2.64832273248 -0.319136667751 -0.0333942540707 0.0761182132796
-19 -2.19907796143 -0.879244088092 0.114521464798 0.0253269396564
-20 -2.58734618892 -0.520473638806 -0.219572088001 -0.0690819911788
-21 -2.31053170131 -0.397867821589 0.233695607098 -0.0153237395651
-22 -2.5432349073 -0.44003175466 -0.21483637028 0.0384395001011
-23 -3.2158576949 -0.141615571626 -0.299618981511 0.00185704334745
-24 -2.30312853766 -0.10552267843 -0.0456800412745 0.14724549964
-25 -2.35617108668 0.0312095890683 -0.129407575896 -0.301620265332
-26 -2.50791722684 0.139056339913 0.247116337891 0.0353840812677
-27 -2.46905599755 -0.13788731459 -0.10126307943 0.0559704523767
-28 -2.56239094684 -0.37468456275 0.072359157436 -0.0152402867746
-29 -2.63982126838 -0.31929006596 0.139253373779 0.0651410472002
-30 -2.63284790803 0.190075830634 -0.046466463645 -0.124611153366
-31 -2.5884620513 0.197393079438 0.0712750731315 -0.0604762634078
-32 -2.41007733712 -0.418080008248 0.13838824005 0.230844169977
-33 -2.6476366734 -0.81998263256 -0.230585604254 -0.284808954263
-34 -2.59715947708 -1.10002192801 -0.163581912624 -0.0989580705836
-35 -2.67384468672 0.106691703753 0.191533299736 -0.0558909659961
-36 -2.86699984693 -0.0771930957236 0.156842350498 0.162452805716
-37 -2.62522846468 -0.606800008422 0.261163156482 0.175879874842
-38 -2.67384468672 0.106691703753 0.191533299736 -0.0558909659961
-39 -2.98184266485 0.480250048856 -0.0797248073596 -0.0110529508345
-40 -2.59032302559 -0.236059337289 0.0739012382471 -0.0145563062497
-41 -2.77013891075 -0.271059419765 -0.0842415745048 0.0923646572958
-42 -2.85221108157 0.93286536747 0.340961491107 0.322650606613
-43 -2.99829644283 0.334307574591 -0.199008424947 -0.0758718213181
-44 -2.40551410128 -0.195917257696 -0.270717070369 0.173785129211
-45 -2.20883295418 -0.442696030421 -0.303487809268 -0.185857530073
-46 -2.71566519075 0.242681482898 0.0905156059501 0.142989025338
-47 -2.53757337101 -0.510367545477 -0.171918404477 -0.19216594595
-48 -2.84032129683 0.220576338276 -0.0900613765304 -0.0603928106173
-49 -2.54268575708 -0.586281025344 0.0111752678312 -0.0483337025413
-50 -2.70391231486 -0.115010852171 0.0826957266068 0.0340995730083
-51 1.28479458785 -0.685439186133 0.406129553077 0.0192901168553
-52 0.932410752983 -0.319198089834 0.0171299092051 -6.75794170965e-06
-53 1.46406132278 -0.504189832972 0.338260727718 -0.000857644047617
-54 0.180967206348 0.825603943576 0.177082856393 0.0957844483716
-55 1.08713448721 -0.0753903892888 0.306544464814 0.113384539009
-56 0.640436749523 0.41732348297 -0.0411887693536 -0.242671312069
-57 1.09522370994 -0.283891210938 -0.170022533741 -0.0849733893283
-58 -0.751467140648 1.00110751297 -0.0156721942382 -0.0165105921546
-59 1.04329778071 -0.228956908777 0.414814566149 -0.0375235535918
-60 -0.0101900707279 0.720574866702 -0.283437246254 -0.00594570197509
-61 -0.511086195895 1.26249195386 0.266489953822 0.0489088060634
-62 0.511098060683 0.102284105046 -0.132327890475 0.0501005351566
-63 0.262335756153 0.547893298025 0.691941578319 0.0614849890834
-64 0.984044545169 0.124360420222 0.0621574276135 -0.169010669871
-65 -0.174864001966 0.25181557108 -0.093658638192 0.12494088657
-66 0.927572942033 -0.468236205043 0.313229400569 0.100438884146
-67 0.659592789056 0.35197629106 -0.328384297069 -0.188991525193
-68 0.23454058626 0.331921829362 0.270280670836 -0.211984995292
-69 0.942361707399 0.54182225815 0.497348541178 0.260636685043
-70 0.0432464003287 0.581489446612 0.232963556483 -0.0395611807383
-71 1.11624072375 0.0842140138784 -0.459844226573 -0.0772135596107
-72 0.35678656783 0.0668238279414 0.227472180321 0.124090000464
-73 1.29646885029 0.327561519795 0.347513212584 0.00324623910204
-74 0.920502648909 0.182390363328 0.231611418553 -0.286825346705
-75 0.714008213643 -0.150379153148 0.320372332829 0.0429412331735
-76 0.899640863284 -0.329610979582 0.31477148138 0.101122864671
-77 1.33104141885 -0.244669520602 0.521244924738 0.0375050496558
-78 1.55739627207 -0.267392584813 0.164638490852 0.0703530951036
-79 0.812455548998 0.162331574879 -0.0363435763445 -0.0296802710837
-80 -0.307334755663 0.365086612766 0.315337196939 0.0765303776306
-81 -0.0703428889493 0.702537931731 0.241758044842 0.00909469851961
-82 -0.191884492103 0.677490544374 0.303916543464 -0.0180454587855
-83 0.134994950451 0.311709642703 0.174973303788 0.0341829142499
-84 1.37873698278 0.421205138215 -0.0154804951217 -0.17758073696
-85 0.58727485358 0.483284267717 -0.444583753035 -0.252442434627
-86 0.807205496677 -0.195053963771 -0.389458711386 -0.116615391255
-87 1.22042896624 -0.408035337001 0.23656608685 0.0316352439847
-88 0.812867790369 0.370678998319 0.612871050061 0.157700491174
-89 0.24519516169 0.266728035662 -0.189562484729 -0.147328042337
-90 0.164513428369 0.679661469311 0.0577992388057 0.030965577888
-91 0.463030988871 0.669526546536 0.0240538908898 -0.268443508324
-92 0.890160445651 0.0338124427468 0.00976802636843 -0.15344820638
-93 0.22887904997 0.402257620179 0.227362705033 0.0186204507587
-94 -0.707081283921 1.00842476178 0.102069342538 0.047624297804
-95 0.355533039187 0.503218487403 -0.0178894658965 -0.0980716353447
-96 0.331126947333 0.211180140663 -0.0838090732225 -0.238686542391
-97 0.375238228947 0.291622024809 -0.0790733555014 -0.131165051111
-98 0.641690278167 -0.0190711764913 0.204172876864 -0.0205096762602
-99 -0.908463333123 0.751568725169 0.00773658450698 0.233558634274
-100 0.297807907407 0.347016521599 -0.0121791391589 -0.0507837171367
-101 2.53172698044 0.0118422366403 -0.758458651528 -0.0325995685145
-102 1.41407222517 0.574925055912 -0.296398224305 -0.0156954782795
-103 2.61648460828 -0.341935286987 0.112141370554 0.0659560495264
-104 1.97081494591 0.181125694705 -0.10653914865 -0.236858624882
-105 2.34975798395 0.0418825496548 -0.284110680866 -0.00131272400361
-106 3.39687992068 -0.54716804623 0.351873157627 -0.11121996802
-107 0.519383245085 1.19135168905 -0.546685531125 -0.0987984199258
-108 2.93200509699 -0.352377006181 0.423691278337 -0.255407368844
-109 2.31967279387 0.245548170607 0.349922183359 -0.0762628625132
-110 2.91813423364 -0.780380629372 -0.421738933614 0.107729319479
-111 1.66193494702 -0.242038401038 -0.242815263468 0.119447584568
-112 1.80234045266 0.216154606627 0.037695328494 0.0787134525557
-113 2.16537886295 -0.215280283373 -0.0331481831991 0.162667280308
-114 1.34459421751 0.776415425177 -0.282868018224 0.140481892258
-115 1.58526729931 0.539307053847 -0.630570488462 0.327455366669
-116 1.90474357821 -0.118818990983 -0.480138079766 0.217114500499
-117 1.94924878186 -0.0407302594278 -0.0427290939294 -0.157845251957
-118 3.48876537966 -1.17154454426 -0.129320083362 -0.311629837933
-119 3.79468686121 -0.253265570973 0.516970715994 0.056451643541
-120 1.29832982457 0.761013936522 0.344887047469 -0.0426737180561
-121 2.42816725902 -0.376781971254 -0.218649070047 0.183854179151
-122 1.19809737227 0.605578961765 -0.51264076504 0.0595000305491
-123 3.4992654843 -0.456773466964 0.576910186722 -0.137759597591
-124 1.38766825018 0.20403098657 0.0635113217786 0.163763536733
-125 2.27585364931 -0.333386525757 -0.284678152661 -0.0622302776004
-126 2.6141938307 -0.558366950279 0.208423346619 -0.240445433088
-127 1.25762518293 0.179136997423 -0.0469778074492 0.147600545507
-128 1.29066964774 0.116425251829 -0.231613560568 0.00308432157359
-129 2.12285398051 0.210854884549 -0.153515885091 0.052612433191
1
0

11 Mar '10
details: http://www.bx.psu.edu/hg/galaxy/rev/07a608852925
changeset: 3508:07a608852925
user: fubar: ross Lazarus at gmail period com
date: Wed Mar 10 16:11:59 2010 -0500
description:
Remove redundant composite_extensions - not needed with new composite output test framework
diffstat:
test/base/twilltestcase.py | 2 --
1 files changed, 0 insertions(+), 2 deletions(-)
diffs (12 lines):
diff -r 70930ea26347 -r 07a608852925 test/base/twilltestcase.py
--- a/test/base/twilltestcase.py Wed Mar 10 16:09:37 2010 -0500
+++ b/test/base/twilltestcase.py Wed Mar 10 16:11:59 2010 -0500
@@ -23,8 +23,6 @@
log = logging.getLogger( __name__ )
class TwillTestCase( unittest.TestCase ):
- composite_extensions = ['html','lped','pbed','fped','pphe','eigenstratgeno','eset','affybatch','malist','test-data' ]
-
def setUp( self ):
# Security helper
1
0
details: http://www.bx.psu.edu/hg/galaxy/rev/70930ea26347
changeset: 3507:70930ea26347
user: fubar: ross Lazarus at gmail period com
date: Wed Mar 10 16:09:37 2010 -0500
description:
Cleanup genetics.py
Allow download of composite objects from libraries so content is in the archive
Allow download of composite objects from histories ditto (current default is zip)
Small fix to missing param in download from library
Small fix to twilltestcase when testing a tool with hidden form fields
diffstat:
lib/galaxy/datatypes/genetics.py | 468 ++++++++++++++------------
lib/galaxy/tools/parameters/grouping.py | 10 +-
lib/galaxy/web/controllers/dataset.py | 142 +++++++-
lib/galaxy/web/controllers/library_common.py | 1 +
test/base/twilltestcase.py | 57 +--
5 files changed, 414 insertions(+), 264 deletions(-)
diffs (1190 lines):
diff -r 9701e5ee128d -r 70930ea26347 lib/galaxy/datatypes/genetics.py
--- a/lib/galaxy/datatypes/genetics.py Wed Mar 10 14:25:34 2010 -0500
+++ b/lib/galaxy/datatypes/genetics.py Wed Mar 10 16:09:37 2010 -0500
@@ -1,5 +1,6 @@
"""
rgenetics datatypes
+Use at your peril
Ross Lazarus
for the rgenetics and galaxy projects
@@ -10,6 +11,7 @@
ross lazarus for rgenetics
august 20 2007
"""
+
import logging, os, sys, time, tempfile, shutil, string, glob
import data
from galaxy import util
@@ -24,186 +26,172 @@
from galaxy.datatypes.interval import Interval
from galaxy.util.hash_util import *
-log = logging.getLogger(__name__)
+gal_Log = logging.getLogger(__name__)
+verbose = False
-class GenomeGraphs(Interval):
+class GenomeGraphs( Tabular ):
+ """
+ Tab delimited data containing a marker id and any number of numeric values
+ """
- """gg version viewable at ucsc of Gff format"""
- file_ext = "gg"
- column_names = [ 'Seqname', 'Source', 'Feature', 'Start', 'End', 'Score', 'Strand', 'Frame', 'Group' ]
+ MetadataElement( name="markerCol", default=1, desc="Marker ID column", param=metadata.ColumnParameter )
+ MetadataElement( name="columns", default=3, desc="Number of columns", readonly=True )
+ MetadataElement( name="column_types", default=[], desc="Column types", readonly=True, visible=False )
+ file_ext = 'gg'
- """Add metadata elements"""
- MetadataElement( name="columns", default=9, desc="Number of columns", readonly=True, visible=False )
- MetadataElement( name="column_types", default=['str','str','str','int','int','int','str','str','str'], param=metadata.ColumnTypesParameter, desc="Column types", readonly=True, visible=False )
- MetadataElement( name="chromCol", default=1, desc="Chrom column", param=metadata.ColumnParameter )
- MetadataElement( name="startCol", default=4, desc="Start column", param=metadata.ColumnParameter )
- MetadataElement( name="endCol", default=5, desc="End column", param=metadata.ColumnParameter )
- MetadataElement( name="strandCol", desc="Strand column (click box & select)", param=metadata.ColumnParameter, optional=True, no_value=0 )
- ###do we need to repeat these? they are the same as should be inherited from interval type
+ def __init__(self, **kwd):
+ """
+ Initialize gg datatype, by adding UCSC display apps
+ """
+ Tabular.__init__(self, **kwd)
+ self.add_display_app ( 'ucsc', 'Genome Graph', 'as_ucsc_display_file', 'ucsc_links' )
+
- def __init__(self, **kwd):
- """Initialize datatype, by adding GBrowse display app"""
- Interval.__init__(self, **kwd)
- self.add_display_app ( 'ucsc', 'display at UCSC', 'as_ucsc_display_file', 'ucsc_links' )
+ def set_meta(self,dataset,**kwd):
+ Tabular.set_meta( self, dataset, **kwd)
+ dataset.metadata.markerCol = 1
+ header = file(dataset.file_name,'r').readlines()[0].strip().split('\t')
+ dataset.metadata.columns = len(header)
+ t = ['numeric' for x in header]
+ t[0] = 'string'
+ dataset.metadata.column_types = t
+ return True
+
def as_ucsc_display_file( self, dataset, **kwd ):
- return open( dataset.file_name )
- def set_meta( self, dataset, overwrite = True, **kwd ):
- i = 0
- for i, line in enumerate( file ( dataset.file_name ) ):
- line = line.rstrip('\r\n')
- if line and not line.startswith( '#' ):
- elems = line.split( '\t' )
- if len(elems) == 9:
- try:
- int( elems[3] )
- int( elems[4] )
- break
- except:
- pass
- Interval.set_meta( self, dataset, overwrite = overwrite, skip = i )
+ """
+ Returns file
+ """
+ return file(dataset.file_name,'r')
+
+ def ucsc_links( self, dataset, type, app, base_url ):
+ """
+ from the ever-helpful angie hinrichs angie(a)soe.ucsc.edu
+ a genome graphs call looks like this
+ http://genome.ucsc.edu/cgi-bin/hgGenome?clade=mammal&org=Human&db=hg18&hgGe…
+ &hgGenome_dataSetDescription=test&hgGenome_formatType=best%20guess&hgGenome_markerType=best%20guess
+ &hgGenome_columnLabels=best%20guess&hgGenome_maxVal=&hgGenome_labelVals=
+ &hgGenome_maxGapToFill=25000000&hgGenome_uploadFile=http://galaxy.esphealth.org/datasets/333/display/index
+ &hgGenome_doSubmitUpload=submit
+ Galaxy gives this for an interval file
+ http://genome.ucsc.edu/cgi-bin/hgTracks?db=hg18&position=chr1:1-1000&hgt.cu…
+ http%3A%2F%2Fgalaxy.esphealth.org%2Fdisplay_as%3Fid%3D339%26display_app%3Ducsc
+ """
+ ret_val = []
+ ggtail = 'hgGenome_doSubmitUpload=submit'
+ if not dataset.dbkey:
+ dataset.dbkey = 'hg18' # punt!
+ if dataset.has_data:
+ for site_name, site_url in util.get_ucsc_by_build(dataset.dbkey):
+ if site_name in app.config.ucsc_display_sites:
+ site_url = site_url.replace('/hgTracks?','/hgGenome?') # for genome graphs
+ internal_url = "%s" % url_for( controller='dataset',
+ dataset_id=dataset.id, action='display_at', filename='ucsc_' + site_name )
+ if base_url.startswith( 'https://' ):
+ base_url = base_url.replace( 'https', 'http', 1 )
+ display_url = "%s%s/display_as?id=%i&display_app=%s&authz_method=display_at" % (base_url, url_for( controller='root' ), dataset.id, type)
+ display_url = urllib.quote_plus( display_url )
+ # was display_url = urllib.quote_plus( "%s/display_as?id=%i&display_app=%s" % (base_url, dataset.id, type) )
+ #redirect_url = urllib.quote_plus( "%sdb=%s&position=%s:%s-%s&hgt.customText=%%s" % (site_url, dataset.dbkey, chrom, start, stop) )
+ sl = ["%sdb=%s" % (site_url,dataset.dbkey ),]
+ #sl.append("&hgt.customText=%s")
+ sl.append("&hgGenome_dataSetName=%s&hgGenome_dataSetDescription=%s" % (dataset.name, 'GalaxyGG_data'))
+ sl.append("&hgGenome_formatType=best guess&hgGenome_markerType=best guess")
+ sl.append("&hgGenome_columnLabels=first row&hgGenome_maxVal=&hgGenome_labelVals=")
+ sl.append("&hgGenome_doSubmitUpload=submit")
+ sl.append("&hgGenome_maxGapToFill=25000000&hgGenome_uploadFile=%s" % display_url)
+ s = ''.join(sl)
+ s = urllib.quote_plus(s)
+ redirect_url = s
+ log.debug('## rg gg ucsc rdurl=%s; s = %s' % (redirect_url,s))
+ link = '%s?redirect_url=%s&display_url=%s' % ( internal_url, redirect_url, display_url )
+ ret_val.append( (site_name, link) )
+ return ret_val
+
def make_html_table( self, dataset, skipchars=[] ):
- """Create HTML table, used for displaying peek"""
+ """
+ Create HTML table, used for displaying peek
+ """
+ npeek = 5
out = ['<table cellspacing="0" cellpadding="3">']
- comments = []
+ f = open(dataset.file_name,'r')
+ d = [f.next() for x in range(npeek)]
+ hasheader = 0
+ try:
+ test = ['%f' % x for x in d[0][1:]] # first is name - see if starts all numerics
+ except:
+ hasheader = 1
try:
# Generate column header
out.append( '<tr>' )
- for i, name in enumerate( self.column_names ):
- out.append( '<th>%s.%s</th>' % ( str( i+1 ), name ) )
- out.append( self.make_html_peek_rows( dataset, skipchars=skipchars ) )
+ if hasheader:
+ for i, name in enumerate(d[0].split() ):
+ out.append( '<th>%s.%s</th>' % ( str( i+1 ), name ) )
+ d.pop(0)
+ out.append('</tr>')
+ for row in d:
+ out.append('<tr>')
+ out.append(''.join(['<td>%s</td>' % x for x in row.split()]))
+ out.append('</tr>')
out.append( '</table>' )
out = "".join( out )
except Exception, exc:
out = "Can't create peek %s" % exc
return out
- def get_estimated_display_viewport( self, dataset ):
+
+ def validate( self, dataset ):
"""
- Return a chrom, start, stop tuple for viewing a file. There are slight differences between gff 2 and gff 3
- formats. This function should correctly handle both...
+ Validate a gg file - all numeric after header row
"""
- if True or (dataset.has_data() and dataset.state == dataset.states.OK):
- try:
- seqid = ''
- start = 2147483647 # Maximum value of a signed 32 bit integer ( 2**31 - 1 )
- stop = 0
- for i, line in enumerate( file( dataset.file_name ) ):
- if i == 0: # track stuff there
- continue
- line = line.rstrip( '\r\n' )
- if not line:
- continue
- if not line.startswith( '#' ):
- elems = line.split( '\t' )
- if not seqid:
- # We can only set the viewport for a single chromosome
- seqid = elems[0]
- if seqid == elems[0]:
- # Make sure we have not spanned chromosomes
- start = min( start, int( elems[3] ) )
- stop = max( stop, int( elems[4] ) )
- else:
- # We've spanned a chromosome
- break
- if i > 10: # span 10 features
- break
- except:
- seqid, start, stop = ( '', '', '' )
- return ( seqid, str( start ), str( stop ) )
- else:
- return ( '', '', '' )
- def gbrowse_links( self, dataset, type, app, base_url ):
- ret_val = []
- if dataset.has_data:
- viewport_tuple = self.get_estimated_display_viewport( dataset )
- seqid = viewport_tuple[0]
- start = viewport_tuple[1]
- stop = viewport_tuple[2]
- if seqid and start and stop:
- for site_name, site_url in util.get_gbrowse_sites_by_build( dataset.dbkey ):
- if site_name in app.config.gbrowse_display_sites:
- link = "%s?start=%s&stop=%s&ref=%s&dbkey=%s" % ( site_url, start, stop, seqid, dataset.dbkey )
- ret_val.append( ( site_name, link ) )
- return ret_val
- def ucsc_links( self, dataset, type, app, base_url ):
- ret_val = []
- if dataset.has_data:
- viewport_tuple = self.get_estimated_display_viewport(dataset)
- if viewport_tuple:
- chrom = viewport_tuple[0]
- start = viewport_tuple[1]
- stop = viewport_tuple[2]
- if start == '' or int(start) < 1:
- start='1'
- if stop == '' or int(stop) <= start:
- stop = '%d' % (int(start) + 10000)
- for site_name, site_url in util.get_ucsc_by_build(dataset.dbkey):
- if site_name in app.config.ucsc_display_sites:
- # HACK: UCSC doesn't support https, so force http even
- # if our URL scheme is https. Making this work
- # requires additional hackery in your upstream proxy.
- # If UCSC ever supports https, remove this hack.
- internal_url = "%s" % url_for( controller='dataset',
- dataset_id=dataset.id, action='display_at', filename='ucsc_' + site_name )
- if base_url.startswith( 'https://' ):
- base_url = base_url.replace( 'https', 'http', 1 )
- display_url = urllib.quote_plus( "%s%s/display_as?id=%i&display_app=%s&authz_method=display_at" % (base_url, url_for( controller='root' ), dataset.id, type) )
- redirect_url = urllib.quote_plus( "%sdb=%s&position=%s:%s-%s&hgt.customText=%%s" % (site_url, dataset.dbkey, chrom, start, stop) )
- link = '%s?redirect_url=%s&display_url=%s' % ( internal_url, redirect_url, display_url )
- ret_val.append( (site_name, link) )
- else:
- log.debug('@@@ gg ucsc_links - no viewport_tuple')
- return ret_val
+ errors = list()
+ infile = open(dataset.file_name, "r")
+ header= infile.next() # header
+ for i,row in enumerate(infile):
+ ll = row.strip().split('\t')[1:] # first is alpha feature identifier
+ badvals = []
+ for j,x in enumerate(ll):
+ try:
+ x = float(x)
+ except:
+ badval.append('col%d:%s' % (j+1,x))
+ if len(badvals) > 0:
+ errors.append('row %d, %s' % (' '.join(badvals)))
+ return errors
+
def sniff( self, filename ):
"""
- Determines whether the file is in gff format
-
- GFF lines have nine required fields that must be tab-separated.
+ Determines whether the file is in gg format
"""
f = open(filename,'r')
- headers = f.readline().split
- if headers[0].lower() == 'track':
- headers = f.readline.split()
+ headers = f.readline().split()
+ rows = [f.readline().split()[1:] for x in range(3)] # small sample
#headers = get_headers( filename, '\t' )
- try:
- if len(headers) < 2:
- return False
- for hdr in headers:
- if hdr and hdr[0].startswith( '##gff-version' ) and hdr[0].find( '2' ) < 0:
- return False
- if hdr and hdr[0] and not hdr[0].startswith( '#' ):
- if len(hdr) != 9:
- return False
- try:
- int( hdr[3] )
- int( hdr[4] )
- except:
- return False
- if hdr[5] != '.':
- try:
- score = int(hdr[5])
- except:
- return False
- if (score < 0 or score > 1000):
- return False
- if hdr[6] not in data.valid_strand:
- return False
- return True
- except:
- return False
+ for row in rows:
+ try:
+ nums = [float(x) for x in row] # first col has been removed
+ except:
+ return false
+ return true
+
class rgTabList(Tabular):
- """
+ """
for sampleid and for featureid lists of exclusions or inclusions in the clean tool
featureid subsets on statistical criteria -> specialized display such as gg
"""
file_ext = "rgTList"
+
def __init__(self, **kwd):
- """Initialize featurelistt datatype"""
+ """
+ Initialize featurelistt datatype
+ """
Tabular.__init__( self, **kwd )
self.column_names = []
+
def make_html_table( self, dataset, skipchars=[] ):
- """Create HTML table, used for displaying peek"""
+ """
+ Create HTML table, used for displaying peek
+ """
out = ['<table cellspacing="0" cellpadding="3">']
comments = []
try:
@@ -222,8 +210,9 @@
out = "Can't create peek %s" % exc
return out
+
class rgSampleList(rgTabList):
- """
+ """
for sampleid exclusions or inclusions in the clean tool
output from QC eg excess het, gender error, ibd pair member,eigen outlier,excess mendel errors,...
since they can be uploaded, should be flexible
@@ -240,9 +229,8 @@
self.column_names[0] = 'FID'
self.column_names[1] = 'IID'
# this is what Plink wants as at 2009
+
def sniff(self,filename):
- """
- """
infile = open(dataset.file_name, "r")
header= infile.next() # header
if header[0] == 'FID' and header[1] == 'IID':
@@ -264,12 +252,17 @@
rgTabList.__init__( self, **kwd )
for i,s in enumerate(['#FeatureId', 'Chr', 'Genpos', 'Mappos']):
self.column_names[i] = s
+
class Rgenetics(Html):
"""
- class to use for rgenetics
+ base class to use for rgenetics datatypes
+ derived from html - composite datatype elements
+ stored in extra files path
"""
- MetadataElement( name="base_name", desc="base name for all transformed versions of this genetic dataset", default="rgenetics", readonly=True, set_in_upload=True)
+
+ MetadataElement( name="base_name", desc="base name for all transformed versions of this genetic dataset", default="rgenetics",
+ readonly=True, set_in_upload=True)
composite_type = 'auto_primary_file'
allow_datatype_change = False
@@ -279,16 +272,22 @@
rval = ['<html><head><title>Rgenetics Galaxy Composite Dataset </title></head><p/>']
rval.append('<div>This composite dataset is composed of the following files:<p/><ul>')
for composite_name, composite_file in self.get_composite_files( dataset = dataset ).iteritems():
+ fn = composite_name
opt_text = ''
if composite_file.optional:
opt_text = ' (optional)'
- rval.append( '<li><a href="%s" type="application/binary">%s</a>%s' % ( composite_name, composite_name, opt_text ) )
+ if composite_file.get('description'):
+ rval.append( '<li><a href="%s" type="application/binary">%s (%s)</a>%s</li>' % ( fn, fn, composite_file.get('description'), opt_text ) )
+ else:
+ rval.append( '<li><a href="%s" type="application/binary">%s</a>%s</li>' % ( fn, fn, opt_text ) )
rval.append( '</ul></div></html>' )
return "\n".join( rval )
+
def regenerate_primary_file(self,dataset):
"""
cannot do this until we are setting metadata
"""
+ guessmt = {'.log':'text/plain','.ped':'text/plain', '.map':'text/plain','.out':'text/plain','.in':'text/plain'}
def fix(oldpath,newbase):
old,e = os.path.splitext(oldpath)
head,rest = os.path.split(old)
@@ -301,44 +300,45 @@
efp = dataset.extra_files_path
flist = os.listdir(efp)
proper_base = bn
- rval = ['<html><head><title>Files for Composite Dataset %s</title></head><p/>Comprises the following files:<p/><ul>' % (bn)]
+ rval = ['<html><head><title>Files for Composite Dataset %s</title></head><p/>Composite %s contains the following files:<p/><ul>' % (dataset.name,dataset.name)]
for i,fname in enumerate(flist):
newpath = fix(os.path.join(efp,fname),proper_base)
sfname = os.path.split(newpath)[-1]
- rval.append( '<li><a href="%s">%s</a>' % ( sfname, sfname ) )
+ f,e = os.path.splitext(fname)
+ mt = guessmt.get(e,'application/binary')
+ rval.append( '<li><a href="%s" mimetype="%s">%s</a></li>' % ( sfname, mt, sfname) )
rval.append( '</ul></html>' )
f = file(dataset.file_name,'w')
f.write("\n".join( rval ))
f.write('\n')
f.close()
+
def set_meta( self, dataset, **kwd ):
+
"""
for lped/pbed eg
+
"""
+ Html.set_meta( self, dataset, **kwd )
if kwd.get('overwrite') == False:
- #log.debug('@@@ rgenetics set_meta called with overwrite = False')
+ if verbose:
+ gal_Log.debug('@@@ rgenetics set_meta called with overwrite = False')
return True
try:
efp = dataset.extra_files_path
except:
- #log.debug('@@@rgenetics set_meta failed %s - dataset %s has no efp ?' % (sys.exc_info()[0], dataset.name))
+ if verbose:
+ gal_Log.debug('@@@rgenetics set_meta failed %s - dataset %s has no efp ?' % (sys.exc_info()[0], dataset.name))
return False
try:
flist = os.listdir(efp)
except:
- #log.debug('@@@rgenetics set_meta failed %s - dataset %s has no efp ?' % (sys.exc_info()[0],dataset.name))
+ if verbose: gal_Log.debug('@@@rgenetics set_meta failed %s - dataset %s has no efp ?' % (sys.exc_info()[0],dataset.name))
return False
if len(flist) == 0:
- #log.debug('@@@rgenetics set_meta failed - %s efp %s is empty?' % (dataset.name,efp))
+ if verbose:
+ gal_Log.debug('@@@rgenetics set_meta failed - %s efp %s is empty?' % (dataset.name,efp))
return False
- bn = None
- for f in flist:
- n,e = os.path.splitext(f)[0]
- if (not bn) and e in ('.ped','.map','.bim','.fam'):
- bn = n
- dataset.metadata.base_name = bn
- if not bn:
- bn = '?'
self.regenerate_primary_file(dataset)
if not dataset.info:
dataset.info = 'Galaxy genotype datatype object'
@@ -346,22 +346,23 @@
dataset.blurb = 'Composite file - Rgenetics Galaxy toolkit'
return True
+
class SNPMatrix(Rgenetics):
"""
- fake class to distinguish different species of Rgenetics data collections
+ BioC SNPMatrix Rgenetics data collections
"""
file_ext="snpmatrix"
- def set_peek( self, dataset, is_multi_byte=False ):
+ def set_peek( self, dataset, **kwd ):
if not dataset.dataset.purged:
dataset.peek = "Binary RGenetics file"
dataset.blurb = data.nice_size( dataset.get_size() )
else:
dataset.peek = 'file does not exist'
dataset.blurb = 'file purged from disk'
+
def sniff(self,filename):
- """
- need to check the file header hex code
+ """ need to check the file header hex code
"""
infile = open(dataset.file_name, "b")
head = infile.read(16)
@@ -371,9 +372,10 @@
else:
return True
+
class Lped(Rgenetics):
"""
- fake class to distinguish different species of Rgenetics data collections
+ linkage pedigree (ped,map) Rgenetics data collections
"""
file_ext="lped"
@@ -382,25 +384,24 @@
self.add_composite_file( '%s.ped', description = 'Pedigree File', substitute_name_with_metadata = 'base_name', is_binary = True )
self.add_composite_file( '%s.map', description = 'Map File', substitute_name_with_metadata = 'base_name', is_binary = True )
+
class Pphe(Rgenetics):
"""
- fake class to distinguish different species of Rgenetics data collections
+ Plink phenotype file - header must have FID\tIID... Rgenetics data collections
"""
file_ext="pphe"
def __init__( self, **kwd ):
Rgenetics.__init__(self, **kwd)
- self.add_composite_file( '%s.pphe', description = 'Plink Phenotype File', substitute_name_with_metadata = 'base_name' )
+ self.add_composite_file( '%s.pphe', description = 'Plink Phenotype File', substitute_name_with_metadata = 'base_name', is_binary = True )
-class Lmap(Rgenetics):
- """
- fake class to distinguish different species of Rgenetics data collections
- """
- file_ext="lmap"
+
+
class Fphe(Rgenetics):
"""
- fake class to distinguish different species of Rgenetics data collections
+ fbat pedigree file - mad format with ! as first char on header row
+ Rgenetics data collections
"""
file_ext="fphe"
@@ -410,7 +411,7 @@
class Phe(Rgenetics):
"""
- fake class to distinguish different species of Rgenetics data collections
+ Phenotype file
"""
file_ext="phe"
@@ -418,9 +419,12 @@
Rgenetics.__init__(self, **kwd)
self.add_composite_file( '%s.phe', description = 'Phenotype File', substitute_name_with_metadata = 'base_name' )
+
+
class Fped(Rgenetics):
"""
- fake class to distinguish different species of Rgenetics data collections
+ FBAT pedigree format - single file, map is header row of rs numbers. Strange.
+ Rgenetics data collections
"""
file_ext="fped"
@@ -428,9 +432,10 @@
Rgenetics.__init__(self, **kwd)
self.add_composite_file( '%s.fped', description = 'FBAT format pedfile', substitute_name_with_metadata = 'base_name' )
+
class Pbed(Rgenetics):
"""
- fake class to distinguish different species of Rgenetics data collections
+ Plink Binary compressed 2bit/geno Rgenetics data collections
"""
file_ext="pbed"
@@ -442,7 +447,9 @@
class Eigenstratgeno(Rgenetics):
"""
- fake class to distinguish different species of Rgenetics data collections
+ Eigenstrat format - may be able to get rid of this
+ if we move to shellfish
+ Rgenetics data collections
"""
file_ext="eigenstratgeno"
@@ -451,10 +458,13 @@
self.add_composite_file( '%s.eigenstratgeno', substitute_name_with_metadata = 'base_name', is_binary = True )
self.add_composite_file( '%s.ind', substitute_name_with_metadata = 'base_name', is_binary = True )
self.add_composite_file( '%s.map', substitute_name_with_metadata = 'base_name', is_binary = True )
+
+
class Eigenstratpca(Rgenetics):
"""
- fake class to distinguish different species of Rgenetics data collections
+ Eigenstrat PCA file for case control adjustment
+ Rgenetics data collections
"""
file_ext="eigenstratpca"
@@ -462,18 +472,21 @@
Rgenetics.__init__(self, **kwd)
self.add_composite_file( '%s.eigenstratpca', description = 'Eigenstrat PCA file', substitute_name_with_metadata = 'base_name' )
+
class Snptest(Rgenetics):
"""
- fake class to distinguish different species of Rgenetics data collections
+ BioC snptest Rgenetics data collections
"""
file_ext="snptest"
+
class Pheno(Tabular):
"""
base class for pheno files
"""
file_ext = 'pheno'
+
class RexpBase( Html ):
"""
base class for BioC data structures in Galaxy
@@ -492,16 +505,19 @@
composite_type = 'auto_primary_file'
allow_datatype_change = False
+
def __init__( self, **kwd ):
Html.__init__(self,**kwd)
self.add_composite_file( '%s.pheno', description = 'Phenodata tab text file',
substitute_name_with_metadata = 'base_name', is_binary=True)
+
def generate_primary_file( self, dataset = None ):
- """
+ """
This is called only at upload to write the html file
cannot rename the datasets here - they come with the default unfortunately
"""
return '<html><head></head><body>AutoGenerated Primary File for Composite Dataset</body></html>'
+
def get_phecols(self, phenolist=[], maxConc=20):
"""
sept 2009: cannot use whitespace to split - make a more complex structure here
@@ -527,7 +543,7 @@
else:
for col,code in enumerate(row): # keep column order correct
if col >= totcols:
- log.warning('### get_phecols error in pheno file - row %d col %d (%s) longer than header %s' % (nrows, col, row, head))
+ gal_Log.warning('### get_phecols error in pheno file - row %d col %d (%s) longer than header %s' % (nrows, col, row, head))
else:
concordance[col].setdefault(code,0) # first one is zero
concordance[col][code] += 1
@@ -573,6 +589,8 @@
res = [('no usable phenotype columns found',[('?',0),]),]
return res
+
+
def get_pheno(self,dataset):
"""
expects a .pheno file in the extra_files_dir - ugh
@@ -591,12 +609,12 @@
else:
p = []
return '\n'.join(p)
- def set_peek( self, dataset, is_multi_byte=False ):
+
+ def set_peek( self, dataset, **kwd ):
"""
expects a .pheno file in the extra_files_dir - ugh
note that R is wierd and does not include the row.name in
- the header. why?
- """
+ the header. why?"""
if not dataset.dataset.purged:
pp = os.path.join(dataset.extra_files_path,'%s.pheno' % dataset.metadata.base_name)
try:
@@ -608,14 +626,18 @@
else:
dataset.peek = 'file does not exist\n'
dataset.blurb = 'file purged from disk'
+
def get_peek( self, dataset ):
- """expects a .pheno file in the extra_files_dir - ugh"""
+ """
+ expects a .pheno file in the extra_files_dir - ugh
+ """
pp = os.path.join(dataset.extra_files_path,'%s.pheno' % dataset.metadata.base_name)
try:
p = file(pp,'r').readlines()
except:
p = ['##failed to find %s' % pp]
return ''.join(p[:5])
+
def get_file_peek(self,filename):
"""
can't really peek at a filename - need the extra_files_path and such?
@@ -626,8 +648,10 @@
except:
pass
return ''.join(h[:5])
+
def regenerate_primary_file(self,dataset):
- """cannot do this until we are setting metadata
+ """
+ cannot do this until we are setting metadata
"""
bn = dataset.metadata.base_name
flist = os.listdir(dataset.extra_files_path)
@@ -640,28 +664,34 @@
f.write("\n".join( rval ))
f.write('\n')
f.close()
+
def init_meta( self, dataset, copy_from=None ):
- """Add metadata elements"""
if copy_from:
dataset.metadata = copy_from.metadata
+
def set_meta( self, dataset, **kwd ):
+
"""
NOTE we apply the tabular machinary to the phenodata extracted
from a BioC eSet or affybatch.
+
"""
+ Html.set_meta(self, dataset, **kwd)
try:
flist = os.listdir(dataset.extra_files_path)
except:
- #log.debug('@@@rexpression set_meta failed - no dataset?')
+ if verbose:
+ gal_Log.debug('@@@rexpression set_meta failed - no dataset?')
return False
- bn = None
- for f in flist:
- n = os.path.splitext(f)[0]
- if not bn:
- bn = n
- dataset.metadata.base_name = bn
+ bn = dataset.metadata.base_name
+ if not bn:
+ for f in flist:
+ n = os.path.splitext(f)[0]
+ bn = n
+ dataset.metadata.base_name = bn
if not bn:
bn = '?'
+ dataset.metadata.base_name = bn
pn = '%s.pheno' % (bn)
pp = os.path.join(dataset.extra_files_path,pn)
dataset.metadata.pheno_path=pp
@@ -680,7 +710,7 @@
dataset.metadata.column_names = []
dataset.metadata.columns = 0
dataset.peek = 'No pheno file found'
- if len(pf) > 1:
+ if pf and len(pf) > 1:
dataset.metadata.pheCols = self.get_phecols(phenolist=pf)
else:
dataset.metadata.pheCols = [('','No useable phenotypes found',False),]
@@ -690,8 +720,11 @@
if not dataset.blurb:
dataset.blurb = 'R loadable BioC expression object for the Rexpression Galaxy toolkit'
return True
+
def make_html_table( self, pp='nothing supplied from peek\n'):
- """Create HTML table, used for displaying peek"""
+ """
+ Create HTML table, used for displaying peek
+ """
out = ['<table cellspacing="0" cellpadding="3">',]
p = pp.split('\n')
try:
@@ -712,25 +745,37 @@
except Exception, exc:
out = "Can't create html table %s" % str( exc )
return out
+
def display_peek( self, dataset ):
- """Returns formatted html of peek"""
+ """
+ Returns formatted html of peek
+ """
out=self.make_html_table(dataset.peek)
return out
+
def get_mime(self):
- """Returns the mime type of the datatype"""
+ """
+ Returns the mime type of the datatype
+ """
return 'text/html'
+
class Affybatch( RexpBase ):
- """derived class for BioC data structures in Galaxy """
+ """
+ derived class for BioC data structures in Galaxy
+ """
+
file_ext = "affybatch"
def __init__( self, **kwd ):
RexpBase.__init__(self, **kwd)
self.add_composite_file( '%s.affybatch', description = 'AffyBatch R object saved to file',
substitute_name_with_metadata = 'base_name', is_binary=True )
-
+
class Eset( RexpBase ):
- """derived class for BioC data structures in Galaxy """
+ """
+ derived class for BioC data structures in Galaxy
+ """
file_ext = "eset"
def __init__( self, **kwd ):
@@ -738,8 +783,11 @@
self.add_composite_file( '%s.eset', description = 'ESet R object saved to file',
substitute_name_with_metadata = 'base_name', is_binary = True )
+
class MAlist( RexpBase ):
- """derived class for BioC data structures in Galaxy """
+ """
+ derived class for BioC data structures in Galaxy
+ """
file_ext = "malist"
def __init__( self, **kwd ):
@@ -747,6 +795,8 @@
self.add_composite_file( '%s.malist', description = 'MAlist R object saved to file',
substitute_name_with_metadata = 'base_name', is_binary = True )
+
if __name__ == '__main__':
import doctest, sys
doctest.testmod(sys.modules[__name__])
+
diff -r 9701e5ee128d -r 70930ea26347 lib/galaxy/tools/parameters/grouping.py
--- a/lib/galaxy/tools/parameters/grouping.py Wed Mar 10 14:25:34 2010 -0500
+++ b/lib/galaxy/tools/parameters/grouping.py Wed Mar 10 16:09:37 2010 -0500
@@ -99,6 +99,10 @@
self.default_file_type = 'txt'
self.file_type_to_ext = { 'auto':self.default_file_type }
self.metadata_ref = 'files_metadata'
+ def get_file_base_name( self, context ):
+ log.debug('### uploadDataset get base name context = %s' % str(context))
+ fd = context.get('files_metadata|base_name','?')
+ return fd
def get_file_type( self, context ):
return context.get( self.file_type_name, self.default_file_type )
def get_datatype_ext( self, trans, context ):
@@ -291,15 +295,13 @@
temp_name, is_multi_byte = sniff.stream_to_file( StringIO.StringIO( d_type.generate_primary_file( dataset ) ), prefix='upload_auto_primary_file' )
dataset.primary_file = temp_name
dataset.space_to_tab = False
- dataset.precreated_name = dataset.name = 'Uploaded Composite Dataset (%s)' % ( file_type )
+ dataset.precreated_name = dataset.name = dataset.metadata['base_name'] # was 'Uploaded Composite Dataset (%s)' % ( file_type )
else:
file_bunch, warnings = get_one_filename( groups_incoming[ 0 ] )
- if dataset.datatype.composite_type:
- precreated_name = 'Uploaded Composite Dataset (%s)' % ( file_type )
writable_files_offset = 1
dataset.primary_file = file_bunch.path
dataset.space_to_tab = file_bunch.space_to_tab
- dataset.precreated_name = file_bunch.precreated_name
+ dataset.precreated_name = dataset.metadata['base_name'] # file_bunch.precreated_name
dataset.name = file_bunch.precreated_name
dataset.warnings.extend( file_bunch.warnings )
if dataset.primary_file is None:#remove this before finish, this should create an empty dataset
diff -r 9701e5ee128d -r 70930ea26347 lib/galaxy/web/controllers/dataset.py
--- a/lib/galaxy/web/controllers/dataset.py Wed Mar 10 14:25:34 2010 -0500
+++ b/lib/galaxy/web/controllers/dataset.py Wed Mar 10 16:09:37 2010 -0500
@@ -1,4 +1,4 @@
-import logging, os, string, shutil, re, socket, mimetypes, smtplib, urllib
+import logging, os, string, shutil, re, socket, mimetypes, smtplib, urllib, tempfile, zipfile, glob
from galaxy.web.base.controller import *
from galaxy.web.framework.helpers import time_ago, iff, grids
@@ -7,11 +7,30 @@
from galaxy.datatypes.display_applications.util import encode_dataset_user, decode_dataset_user
from email.MIMEText import MIMEText
-
import pkg_resources;
pkg_resources.require( "Paste" )
import paste.httpexceptions
+tmpd = tempfile.mkdtemp()
+comptypes=[]
+ziptype = '32'
+tmpf = os.path.join( tmpd, 'compression_test.zip' )
+try:
+ archive = zipfile.ZipFile( tmpf, 'w', zipfile.ZIP_DEFLATED, True )
+ archive.close()
+ comptypes.append( 'zip' )
+ ziptype = '64'
+except RuntimeError:
+ log.exception( "Compression error when testing zip compression. This option will be disabled for library downloads." )
+except (TypeError, zipfile.LargeZipFile): # ZIP64 is only in Python2.5+. Remove TypeError when 2.4 support is dropped
+ log.warning( 'Max zip file size is 2GB, ZIP64 not supported' )
+ comptypes.append( 'zip' )
+try:
+ os.unlink( tmpf )
+except OSError:
+ pass
+os.rmdir( tmpd )
+
log = logging.getLogger( __name__ )
error_report_template = """
@@ -182,6 +201,97 @@
return 'This link may not be followed from within Galaxy.'
@web.expose
+ def archive_composite_dataset( self, trans, data=None, **kwd ):
+ # save a composite object into a compressed archive for downloading
+ params = util.Params( kwd )
+ if (params.do_action == None):
+ params.do_action = 'zip' # default
+ msg = util.restore_text( params.get( 'msg', '' ) )
+ messagetype = params.get( 'messagetype', 'done' )
+ if not data:
+ msg = "You must select at least one dataset"
+ messagetype = 'error'
+ else:
+ error = False
+ try:
+ if (params.do_action == 'zip'):
+ # Can't use mkstemp - the file must not exist first
+ tmpd = tempfile.mkdtemp()
+ tmpf = os.path.join( tmpd, 'library_download.' + params.do_action )
+ if ziptype == '64':
+ archive = zipfile.ZipFile( tmpf, 'w', zipfile.ZIP_DEFLATED, True )
+ else:
+ archive = zipfile.ZipFile( tmpf, 'w', zipfile.ZIP_DEFLATED )
+ archive.add = lambda x, y: archive.write( x, y.encode('CP437') )
+ elif params.do_action == 'tgz':
+ archive = util.streamball.StreamBall( 'w|gz' )
+ elif params.do_action == 'tbz':
+ archive = util.streamball.StreamBall( 'w|bz2' )
+ except (OSError, zipfile.BadZipFile):
+ error = True
+ log.exception( "Unable to create archive for download" )
+ msg = "Unable to create archive for %s for download, please report this error" % data.name
+ messagetype = 'error'
+ if not error:
+ current_user_roles = trans.get_current_user_roles()
+ ext = data.extension
+ path = data.file_name
+ fname = os.path.split(path)[-1]
+ basename = data.metadata.base_name
+ efp = data.extra_files_path
+ htmlname = os.path.splitext(data.name)[0]
+ if not htmlname.endswith(ext):
+ htmlname = '%s_%s' % (htmlname,ext)
+ archname = '%s.html' % htmlname # fake the real nature of the html file
+ try:
+ archive.add(data.file_name,archname)
+ except IOError:
+ error = True
+ log.exception( "Unable to add composite parent %s to temporary library download archive" % data.file_name)
+ msg = "Unable to create archive for download, please report this error"
+ messagetype = 'error'
+ flist = glob.glob(os.path.join(efp,'*.*')) # glob returns full paths
+ for fpath in flist:
+ efp,fname = os.path.split(fpath)
+ try:
+ archive.add( fpath,fname )
+ except IOError:
+ error = True
+ log.exception( "Unable to add %s to temporary library download archive" % fname)
+ msg = "Unable to create archive for download, please report this error"
+ messagetype = 'error'
+ continue
+ if not error:
+ if params.do_action == 'zip':
+ archive.close()
+ tmpfh = open( tmpf )
+ # clean up now
+ try:
+ os.unlink( tmpf )
+ os.rmdir( tmpd )
+ except OSError:
+ error = True
+ msg = "Unable to remove temporary library download archive and directory"
+ log.exception( msg )
+ messagetype = 'error'
+ if not error:
+ trans.response.set_content_type( "application/x-zip-compressed" )
+ trans.response.headers[ "Content-Disposition" ] = "attachment; filename=GalaxyCompositeObject.zip"
+ return tmpfh
+ else:
+ trans.response.set_content_type( "application/x-tar" )
+ outext = 'tgz'
+ if params.do_action == 'tbz':
+ outext = 'tbz'
+ trans.response.headers[ "Content-Disposition" ] = "attachment; filename=GalaxyLibraryFiles.%s" % outext
+ archive.wsgi_status = trans.response.wsgi_status()
+ archive.wsgi_headeritems = trans.response.wsgi_headeritems()
+ return archive.stream
+ return trans.show_error_message( msg )
+
+
+
+ @web.expose
def display(self, trans, dataset_id=None, preview=False, filename=None, to_ext=None, **kwd):
"""Catches the dataset id and displays file contents as directed"""
@@ -219,15 +329,19 @@
trans.log_event( "Display dataset id: %s" % str( dataset_id ) )
if to_ext: # Saving the file
- trans.response.headers['Content-Length'] = int( os.stat( data.file_name ).st_size )
- if to_ext[0] != ".":
- to_ext = "." + to_ext
- valid_chars = '.,^_-()[]0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ'
- fname = data.name
- fname = ''.join(c in valid_chars and c or '_' for c in fname)[0:150]
- trans.response.headers["Content-Disposition"] = "attachment; filename=GalaxyHistoryItem-%s-[%s]%s" % (data.hid, fname, to_ext)
- return open( data.file_name )
-
+ composite_extensions = trans.app.datatypes_registry.get_composite_extensions( )
+ composite_extensions.append('html')
+ if data.ext in composite_extensions:
+ return self.archive_composite_dataset( trans, data, **kwd )
+ else:
+ trans.response.headers['Content-Length'] = int( os.stat( data.file_name ).st_size )
+ if to_ext[0] != ".":
+ to_ext = "." + to_ext
+ valid_chars = '.,^_-()[]0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ'
+ fname = data.name
+ fname = ''.join(c in valid_chars and c or '_' for c in fname)[0:150]
+ trans.response.headers["Content-Disposition"] = "attachment; filename=GalaxyHistoryItem-%s-[%s]%s" % (data.hid, fname, to_ext)
+ return open( data.file_name )
if os.path.exists( data.file_name ):
max_peek_size = 1000000 # 1 MB
if preview and os.stat( data.file_name ).st_size > max_peek_size:
@@ -367,7 +481,10 @@
raise paste.httpexceptions.HTTPRequestRangeNotSatisfiable( "Invalid reference dataset id: %s." % str( dataset_id ) )
if 'display_url' not in kwd or 'redirect_url' not in kwd:
return trans.show_error_message( 'Invalid parameters specified for "display at" link, please contact a Galaxy administrator' )
- redirect_url = kwd['redirect_url'] % urllib.quote_plus( kwd['display_url'] )
+ try:
+ redirect_url = kwd['redirect_url'] % urllib.quote_plus( kwd['display_url'] )
+ except:
+ redirect_url = kwd['redirect_url'] # not all will need custom text
current_user_roles = trans.get_current_user_roles()
if trans.app.security_agent.dataset_is_public( data.dataset ):
return trans.response.send_redirect( redirect_url ) # anon access already permitted by rbac
@@ -591,4 +708,3 @@
status = SUCCESS
message = done_msg
return status, message
-
\ No newline at end of file
diff -r 9701e5ee128d -r 70930ea26347 lib/galaxy/web/controllers/library_common.py
--- a/lib/galaxy/web/controllers/library_common.py Wed Mar 10 14:25:34 2010 -0500
+++ b/lib/galaxy/web/controllers/library_common.py Wed Mar 10 16:09:37 2010 -0500
@@ -1098,6 +1098,7 @@
def download_dataset_from_folder( self, trans, cntrller, id, library_id=None, **kwd ):
"""Catches the dataset id and displays file contents as directed"""
show_deleted = util.string_as_bool( kwd.get( 'show_deleted', False ) )
+ params = util.Params( kwd )
use_panels = util.string_as_bool( params.get( 'use_panels', False ) )
ldda = trans.sa_session.query( trans.app.model.LibraryDatasetDatasetAssociation ).get( trans.security.decode_id( id ) )
if not ldda.dataset:
diff -r 9701e5ee128d -r 70930ea26347 test/base/twilltestcase.py
--- a/test/base/twilltestcase.py Wed Mar 10 14:25:34 2010 -0500
+++ b/test/base/twilltestcase.py Wed Mar 10 16:09:37 2010 -0500
@@ -23,6 +23,8 @@
log = logging.getLogger( __name__ )
class TwillTestCase( unittest.TestCase ):
+ composite_extensions = ['html','lped','pbed','fped','pphe','eigenstratgeno','eset','affybatch','malist','test-data' ]
+
def setUp( self ):
# Security helper
@@ -61,9 +63,10 @@
else:
files_differ = True
if files_differ:
- allowed_diff_count = attributes.get( 'lines_diff', 0 )
+ allowed_diff_count = int(attributes.get( 'lines_diff', 0 ))
diff = list( difflib.unified_diff( local_file, history_data, "local_file", "history_data" ) )
diff_lines = get_lines_diff( diff )
+ log.debug('## files diff on %s and %s lines_diff=%d, found diff = %d' % (file1,file2,allowed_diff_count,diff_lines))
if diff_lines > allowed_diff_count:
diff_slice = diff[0:40]
#FIXME: This pdf stuff is rather special cased and has not been updated to consider lines_diff
@@ -75,7 +78,7 @@
# PDF files contain creation dates, modification dates, ids and descriptions that change with each
# new file, so we need to handle these differences. As long as the rest of the PDF file does
# not differ we're ok.
- valid_diff_strs = [ 'description', 'createdate', 'creationdate', 'moddate', 'id' ]
+ valid_diff_strs = [ 'description', 'createdate', 'creationdate', 'moddate', 'id', 'producer', 'creator' ]
valid_diff = False
for line in diff_slice:
# Make sure to lower case strings before checking.
@@ -109,7 +112,7 @@
attributes = {}
if attributes.get( 'sort', False ):
history_data.sort()
- lines_diff = attributes.get( 'lines_diff', 0 )
+ lines_diff = int(attributes.get( 'lines_diff', 0 ))
line_diff_count = 0
diffs = []
for i in range( len( history_data ) ):
@@ -194,36 +197,7 @@
raise AssertionError, "Invalid hid (%s) created when pasting %s" % ( hid, url_paste )
# Wait for upload processing to finish (TODO: this should be done in each test case instead)
self.wait()
- def upload_composite_datatype_file( self, ftype, ped_file='', map_file='', bim_file='', bed_file='', fam_file='', dbkey='unspecified (?)', base_name='rgenetics' ):
- """Tests uploading either of 2 different composite data types ( lped and pbed )"""
- self.visit_url( "%s/tool_runner/index?tool_id=upload1" % self.url )
- # Handle refresh_on_change
- self.refresh_form( "file_type", ftype )
- tc.fv( "1", "dbkey", dbkey )
- tc.fv( "1", "files_metadata|base_name", base_name )
- if ftype == 'lped':
- # lped data types include a ped_file and a map_file
- ped_file = self.get_filename( ped_file )
- tc.formfile( "1", "files_0|file_data", ped_file )
- map_file = self.get_filename( map_file )
- tc.formfile( "1", "files_1|file_data", map_file )
- elif ftype == 'pbed':
- # pbed data types include a bim_file, a bed_file and a fam_file
- bim_file = self.get_filename( bim_file )
- tc.formfile( "1", "files_0|file_data", bim_file )
- bed_file = self.get_filename( bed_file )
- tc.formfile( "1", "files_1|file_data", bed_file )
- fam_file = self.get_filename( fam_file )
- tc.formfile( "1", "files_2|file_data", fam_file )
- else:
- raise AssertionError, "Unsupported composite data type (%s) received, currently only lped and pbed data types are supported." % ftype
- tc.submit( "runtool_btn" )
- self.check_page_for_string( 'The following job has been succesfully added to the queue:' )
- check_str = 'Uploaded Composite Dataset (%s)' % ftype
- self.check_page_for_string( check_str )
- # Wait for upload processing to finish (TODO: this should be done in each test case instead)
- self.wait()
- self.check_history_for_string( check_str )
+
# Functions associated with histories
def check_history_for_errors( self ):
"""Raises an exception if there are errors in a history"""
@@ -672,7 +646,7 @@
def verify_composite_datatype_file_content( self, file_name, hda_id, base_name = None, attributes = None ):
local_name = self.get_filename( file_name )
if base_name is None:
- base_name = file_name
+ base_name = os.path.split(file_name)[-1]
temp_name = self.get_filename( '%s_temp' % file_name ) #This is a terrible way to generate a temp name
self.visit_url( "%s/datasets/%s/display/%s" % ( self.url, self.security.encode_id( hda_id ), base_name ) )
data = self.last_page()
@@ -915,9 +889,14 @@
# To help with debugging a tool, print out the form controls when the test fails
print "form '%s' contains the following controls ( note the values )" % f.name
control_names = []
+ hidden_control_names = [] # cannot change these, so ignore or many complex page tool tests will fail
+ hc_prefix = '<HiddenControl('
for i, control in enumerate( f.controls ):
- print "control %d: %s" % ( i, str( control ) )
- try:
+ print "control %d: %s" % ( i, str( control ) )
+ if hc_prefix in str(control):
+ hidden_control_names.append(control.name) # cannot do much with these
+ else:
+ try:
#check if a repeat element needs to be added
if control.name not in kwd and control.name.endswith( '_add' ):
#control name doesn't exist, could be repeat
@@ -946,12 +925,14 @@
# Submit for refresh
tc.submit( '___refresh_grouping___' )
return self.submit_form( form_no=form_no, button=button, **kwd )
- except Exception, e:
+ except Exception, e:
log.debug( "In submit_form, continuing, but caught exception: %s" % str( e ) )
continue
- control_names.append( control.name )
+ control_names.append( control.name )
# No refresh_on_change attribute found in current form, so process as usual
for control_name, control_value in kwd.items():
+ if control_name in hidden_control_names:
+ continue # these cannot be handled safely - cause the test to barf out
if not isinstance( control_value, list ):
control_value = [ control_value ]
try:
1
0

11 Mar '10
details: http://www.bx.psu.edu/hg/galaxy/rev/9701e5ee128d
changeset: 3506:9701e5ee128d
user: Nate Coraor <nate(a)bx.psu.edu>
date: Wed Mar 10 14:25:34 2010 -0500
description:
Now that cntrller is passed as a POST var in the upload form, it's not necessary to have the library_admin and library controller upload convenience methods. This should fix broken library tests on the production buildbot.
diffstat:
lib/galaxy/web/controllers/library.py | 3 ---
lib/galaxy/web/controllers/library_admin.py | 4 ----
lib/galaxy/web/controllers/library_common.py | 8 ++------
3 files changed, 2 insertions(+), 13 deletions(-)
diffs (39 lines):
diff -r 9efe896dbb17 -r 9701e5ee128d lib/galaxy/web/controllers/library.py
--- a/lib/galaxy/web/controllers/library.py Wed Mar 10 11:51:00 2010 -0500
+++ b/lib/galaxy/web/controllers/library.py Wed Mar 10 14:25:34 2010 -0500
@@ -34,6 +34,3 @@
default_action=params.get( 'default_action', None ),
msg=msg,
messagetype=messagetype )
- @web.expose
- def upload_library_dataset( self, trans, library_id, folder_id, **kwd ):
- return trans.webapp.controllers[ 'library_common' ].upload_library_dataset( trans, 'library', library_id, folder_id, **kwd )
diff -r 9efe896dbb17 -r 9701e5ee128d lib/galaxy/web/controllers/library_admin.py
--- a/lib/galaxy/web/controllers/library_admin.py Wed Mar 10 11:51:00 2010 -0500
+++ b/lib/galaxy/web/controllers/library_admin.py Wed Mar 10 14:25:34 2010 -0500
@@ -235,7 +235,3 @@
show_deleted=show_deleted,
msg=msg,
messagetype=status ) )
- @web.expose
- @web.require_admin
- def upload_library_dataset( self, trans, library_id, folder_id, **kwd ):
- return trans.webapp.controllers[ 'library_common' ].upload_library_dataset( trans, 'library_admin', library_id, folder_id, **kwd )
diff -r 9efe896dbb17 -r 9701e5ee128d lib/galaxy/web/controllers/library_common.py
--- a/lib/galaxy/web/controllers/library_common.py Wed Mar 10 11:51:00 2010 -0500
+++ b/lib/galaxy/web/controllers/library_common.py Wed Mar 10 14:25:34 2010 -0500
@@ -804,12 +804,8 @@
if upload_option == 'upload_file' and trans.app.config.nginx_upload_path:
# url_for is intentionally not used on the base URL here -
# nginx_upload_path is expected to include the proxy prefix if the
- # administrator intends for it to be part of the URL. We also
- # redirect to the library or library_admin controller rather than
- # library_common because GET arguments can't be used in conjunction
- # with nginx upload (nginx can't do percent decoding without a
- # bunch of hacky rewrite rules).
- action = trans.app.config.nginx_upload_path + '?nginx_redir=' + web.url_for( controller=cntrller, action='upload_library_dataset' )
+ # administrator intends for it to be part of the URL.
+ action = trans.app.config.nginx_upload_path + '?nginx_redir=' + web.url_for( controller='library_common', action='upload_library_dataset' )
return trans.fill_template( '/library/common/upload.mako',
cntrller=cntrller,
upload_option=upload_option,
1
0

11 Mar '10
details: http://www.bx.psu.edu/hg/galaxy/rev/9efe896dbb17
changeset: 3505:9efe896dbb17
user: Kelly Vincent <kpvincent(a)bx.psu.edu>
date: Wed Mar 10 11:51:00 2010 -0500
description:
Fix try-except-finally bug to be compatible with Python 2.4 in several tools and twilltestcase.py. Updated liftOver test to replace hard-coded (incorrect) path and modified code file to allow for new path. Improved bowtie error handling.
diffstat:
test/base/twilltestcase.py | 70 ++++++++++++++------------
tools/extract/liftOver_wrapper.py | 17 ++++--
tools/extract/liftOver_wrapper.xml | 9 +--
tools/extract/liftOver_wrapper_code.py | 6 +-
tools/samtools/sam_pileup.py | 44 ++++++++--------
tools/sr_mapping/bowtie_wrapper.py | 72 ++++++++++++++++++---------
tools/sr_mapping/bwa_wrapper.py | 88 +++++++++++++++++----------------
7 files changed, 171 insertions(+), 135 deletions(-)
diffs (452 lines):
diff -r f84112d155c0 -r 9efe896dbb17 test/base/twilltestcase.py
--- a/test/base/twilltestcase.py Wed Mar 10 11:28:50 2010 -0500
+++ b/test/base/twilltestcase.py Wed Mar 10 11:51:00 2010 -0500
@@ -633,24 +633,26 @@
data = self.last_page()
file( temp_name, 'wb' ).write(data)
try:
- if attributes is None:
- attributes = {}
- compare = attributes.get( 'compare', 'diff' )
- extra_files = attributes.get( 'extra_files', None )
- if compare == 'diff':
- self.files_diff( local_name, temp_name, attributes=attributes )
- elif compare == 're_match':
- self.files_re_match( local_name, temp_name, attributes=attributes )
- elif compare == 're_match_multiline':
- self.files_re_match_multiline( local_name, temp_name, attributes=attributes )
- else:
- raise Exception, 'Unimplemented Compare type: %s' % compare
- if extra_files:
- self.verify_extra_files_content( extra_files, elem.get( 'id' ) )
- except AssertionError, err:
- errmsg = 'History item %s different than expected, difference (using %s):\n' % ( hid, compare )
- errmsg += str( err )
- raise AssertionError( errmsg )
+ # have to nest try-except in try-finally to handle 2.4
+ try:
+ if attributes is None:
+ attributes = {}
+ compare = attributes.get( 'compare', 'diff' )
+ extra_files = attributes.get( 'extra_files', None )
+ if compare == 'diff':
+ self.files_diff( local_name, temp_name, attributes=attributes )
+ elif compare == 're_match':
+ self.files_re_match( local_name, temp_name, attributes=attributes )
+ elif compare == 're_match_multiline':
+ self.files_re_match_multiline( local_name, temp_name, attributes=attributes )
+ else:
+ raise Exception, 'Unimplemented Compare type: %s' % compare
+ if extra_files:
+ self.verify_extra_files_content( extra_files, elem.get( 'id' ) )
+ except AssertionError, err:
+ errmsg = 'History item %s different than expected, difference (using %s):\n' % ( hid, compare )
+ errmsg += str( err )
+ raise AssertionError( errmsg )
finally:
os.remove( temp_name )
@@ -676,21 +678,23 @@
data = self.last_page()
file( temp_name, 'wb' ).write( data )
try:
- if attributes is None:
- attributes = {}
- compare = attributes.get( 'compare', 'diff' )
- if compare == 'diff':
- self.files_diff( local_name, temp_name, attributes=attributes )
- elif compare == 're_match':
- self.files_re_match( local_name, temp_name, attributes=attributes )
- elif compare == 're_match_multiline':
- self.files_re_match_multiline( local_name, temp_name, attributes=attributes )
- else:
- raise Exception, 'Unimplemented Compare type: %s' % compare
- except AssertionError, err:
- errmsg = 'Composite file (%s) of History item %s different than expected, difference (using %s):\n' % ( base_name, hda_id, compare )
- errmsg += str( err )
- raise AssertionError( errmsg )
+ # have to nest try-except in try-finally to handle 2.4
+ try:
+ if attributes is None:
+ attributes = {}
+ compare = attributes.get( 'compare', 'diff' )
+ if compare == 'diff':
+ self.files_diff( local_name, temp_name, attributes=attributes )
+ elif compare == 're_match':
+ self.files_re_match( local_name, temp_name, attributes=attributes )
+ elif compare == 're_match_multiline':
+ self.files_re_match_multiline( local_name, temp_name, attributes=attributes )
+ else:
+ raise Exception, 'Unimplemented Compare type: %s' % compare
+ except AssertionError, err:
+ errmsg = 'Composite file (%s) of History item %s different than expected, difference (using %s):\n' % ( base_name, hda_id, compare )
+ errmsg += str( err )
+ raise AssertionError( errmsg )
finally:
os.remove( temp_name )
diff -r f84112d155c0 -r 9efe896dbb17 tools/extract/liftOver_wrapper.py
--- a/tools/extract/liftOver_wrapper.py Wed Mar 10 11:28:50 2010 -0500
+++ b/tools/extract/liftOver_wrapper.py Wed Mar 10 11:51:00 2010 -0500
@@ -4,7 +4,7 @@
Converts coordinates from one build/assembly to another using liftOver binary and mapping files downloaded from UCSC.
"""
-import sys, os, string
+import os, string, subprocess, sys
import tempfile
import re
@@ -51,15 +51,20 @@
if in_dbkey == "?":
stop_err( "Input dataset genome build unspecified, click the pencil icon in the history item to specify it." )
-
if not os.path.isfile( mapfilepath ):
stop_err( "%s mapping is not currently available." % ( mapfilepath.split('/')[-1].split('.')[0] ) )
safe_infile = safe_bed_file(infile)
-cmd_line = "liftOver -minMatch=" + str(minMatch) + " " + safe_infile + " " + mapfilepath + " " + outfile1 + " " + outfile2 + " > /dev/null 2>&1"
+cmd_line = "liftOver -minMatch=" + str(minMatch) + " " + safe_infile + " " + mapfilepath + " " + outfile1 + " " + outfile2 + " > /dev/null"
try:
- os.system( cmd_line )
-except Exception, exc:
- stop_err( "Exception caught attempting conversion: %s" % str( exc ) )
+ # have to nest try-except in try-finally to handle 2.4
+ try:
+ proc = subprocess.Popen( args=cmd_line, shell=True, stderr=subprocess.PIPE )
+ returncode = proc.wait()
+ stderr = proc.stderr.read()
+ if returncode != 0:
+ raise Exception, stderr
+ except Exception, e:
+ raise Exception, 'Exception caught attempting conversion: ' + str( e )
finally:
os.remove(safe_infile)
diff -r f84112d155c0 -r 9efe896dbb17 tools/extract/liftOver_wrapper.xml
--- a/tools/extract/liftOver_wrapper.xml Wed Mar 10 11:28:50 2010 -0500
+++ b/tools/extract/liftOver_wrapper.xml Wed Mar 10 11:51:00 2010 -0500
@@ -23,19 +23,16 @@
<requirements>
<requirement type="binary">liftOver</requirement>
</requirements>
-
<tests>
<test>
<param name="input" value="5.bed" dbkey="hg18" ftype="bed" />
- <param name="to_dbkey" value="/galaxy/data/hg18/liftOver/hg18ToPanTro2.over.chain" />
+ <param name="to_dbkey" value="panTro2" />
<param name="minMatch" value="0.95" />
<output name="out_file1" file="5_liftover_mapped.bed"/>
<output name="out_file2" file="5_liftover_unmapped.bed"/>
</test>
</tests>
-
<help>
-
.. class:: warningmark
Make sure that the genome build of the input dataset is specified (click the pencil icon in the history item to set it if necessary).
@@ -71,6 +68,6 @@
chrX 158279 160020 AK097346 0 +
chrX 160024 169033 AK074528 0 -
-</help>
-<code file="liftOver_wrapper_code.py"/>
+ </help>
+ <code file="liftOver_wrapper_code.py" />
</tool>
diff -r f84112d155c0 -r 9efe896dbb17 tools/extract/liftOver_wrapper_code.py
--- a/tools/extract/liftOver_wrapper_code.py Wed Mar 10 11:28:50 2010 -0500
+++ b/tools/extract/liftOver_wrapper_code.py Wed Mar 10 11:51:00 2010 -0500
@@ -1,8 +1,10 @@
+import os
+
def exec_before_job(app, inp_data, out_data, param_dict, tool):
#Assuming the path of the form liftOverDirectory/hg18ToHg17.over.chain (This is how the mapping chain files from UCSC look.)
- to_dbkey = param_dict['to_dbkey'].split('.')[0].split('To')[1]
+ #allows for . in path
+ to_dbkey = os.path.split(param_dict['to_dbkey'])[1].split('.')[0].split('To')[1]
to_dbkey = to_dbkey[0].lower()+to_dbkey[1:]
out_data['out_file1'].set_dbkey(to_dbkey)
out_data['out_file1'].name = out_data['out_file1'].name + " [ MAPPED COORDINATES ]"
out_data['out_file2'].name = out_data['out_file2'].name + " [ UNMAPPED COORDINATES ]"
-
diff -r f84112d155c0 -r 9efe896dbb17 tools/samtools/sam_pileup.py
--- a/tools/samtools/sam_pileup.py Wed Mar 10 11:28:50 2010 -0500
+++ b/tools/samtools/sam_pileup.py Wed Mar 10 11:51:00 2010 -0500
@@ -78,30 +78,32 @@
#prepare basic pileup command
cmd = 'samtools pileup %s -f %s %s > %s'
try:
- #index reference if necessary and prepare pileup command
- if options.ref == 'indexed':
- if not os.path.exists( "%s.fai" % seqPath ):
- raise Exception, "No sequences are available for '%s', request them by reporting this error." % options.dbkey
- cmd = cmd % ( opts, seqPath, tmpf0bam_name, options.output1 )
- elif options.ref == 'history':
- os.symlink( options.ownFile, tmpf1_name )
- cmdIndex = 'samtools faidx %s' % ( tmpf1_name )
- proc = subprocess.Popen( args=cmdIndex, shell=True, cwd=tmpDir, stderr=subprocess.PIPE )
+ # have to nest try-except in try-finally to handle 2.4
+ try:
+ #index reference if necessary and prepare pileup command
+ if options.ref == 'indexed':
+ if not os.path.exists( "%s.fai" % seqPath ):
+ raise Exception, "No sequences are available for '%s', request them by reporting this error." % options.dbkey
+ cmd = cmd % ( opts, seqPath, tmpf0bam_name, options.output1 )
+ elif options.ref == 'history':
+ os.symlink( options.ownFile, tmpf1_name )
+ cmdIndex = 'samtools faidx %s' % ( tmpf1_name )
+ proc = subprocess.Popen( args=cmdIndex, shell=True, cwd=tmpDir, stderr=subprocess.PIPE )
+ returncode = proc.wait()
+ stderr = proc.stderr.read()
+ #did index succeed?
+ if returncode != 0:
+ raise Exception, 'Error creating index file\n' + stderr
+ cmd = cmd % ( opts, tmpf1_name, tmpf0bam_name, options.output1 )
+ #perform pileup command
+ proc = subprocess.Popen( args=cmd, shell=True, cwd=tmpDir, stderr=subprocess.PIPE )
returncode = proc.wait()
+ #did it succeed?
stderr = proc.stderr.read()
- #did index succeed?
if returncode != 0:
- raise Exception, 'Error creating index file\n' + stderr
- cmd = cmd % ( opts, tmpf1_name, tmpf0bam_name, options.output1 )
- #perform pileup command
- proc = subprocess.Popen( args=cmd, shell=True, cwd=tmpDir, stderr=subprocess.PIPE )
- returncode = proc.wait()
- #did it succeed?
- stderr = proc.stderr.read()
- if returncode != 0:
- raise Exception, stderr
- except Exception, e:
- stop_err( 'Error running Samtools pileup tool\n' + str( e ) )
+ raise Exception, stderr
+ except Exception, e:
+ stop_err( 'Error running Samtools pileup tool\n' + str( e ) )
finally:
#clean up temp files
if os.path.exists( tmpDir ):
diff -r f84112d155c0 -r 9efe896dbb17 tools/sr_mapping/bowtie_wrapper.py
--- a/tools/sr_mapping/bowtie_wrapper.py Wed Mar 10 11:28:50 2010 -0500
+++ b/tools/sr_mapping/bowtie_wrapper.py Wed Mar 10 11:51:00 2010 -0500
@@ -2,7 +2,7 @@
"""
Runs Bowtie on single-end or paired-end data.
-For use with Bowtie v. 0.12.1
+For use with Bowtie v. 0.12.3
usage: bowtie_wrapper.py [options]
-t, --threads=t: The number of threads to run
@@ -58,12 +58,12 @@
-H, --suppressHeader=H: Suppress header
"""
-import optparse, os, shutil, sys, tempfile
+import optparse, os, shutil, subprocess, sys, tempfile
def stop_err( msg ):
- sys.stderr.write( "%s\n" % msg )
+ sys.stderr.write( '%s\n' % msg )
sys.exit()
-
+
def __main__():
#Parse Command Line
parser = optparse.OptionParser()
@@ -119,6 +119,7 @@
parser.add_option( '-x', '--indexSettings', dest='index_settings', help='Whether or not indexing options are to be set' )
parser.add_option( '-H', '--suppressHeader', dest='suppressHeader', help='Suppress header' )
(options, args) = parser.parse_args()
+ stdout = ''
# make temp directory for placement of indices and copy reference file there if necessary
tmp_index_dir = tempfile.mkdtemp()
# get type of data (solid or solexa)
@@ -187,17 +188,25 @@
iseed, icutoff, colorspace )
except ValueError:
indexing_cmds = '%s' % colorspace
+ ref_file = tempfile.NamedTemporaryFile( dir=tmp_index_dir )
+ ref_file_name = ref_file.name
+ ref_file.close()
+ os.symlink( options.ref, ref_file_name )
+ cmd1 = 'bowtie-build %s -f %s %s' % ( indexing_cmds, ref_file_name, ref_file_name )
try:
- shutil.copy( options.ref, tmp_index_dir )
+ proc = subprocess.Popen( args=cmd1, shell=True, cwd=tmp_index_dir, stderr=subprocess.PIPE, stdout=subprocess.PIPE )
+ returncode = proc.wait()
+ stderr = proc.stderr.read()
+ if returncode != 0:
+ raise Exception, stderr
except Exception, e:
- stop_err( 'Error creating temp directory for indexing purposes\n' + str( e ) )
- options.ref = os.path.join( tmp_index_dir, os.path.split( options.ref )[1] )
- cmd1 = 'bowtie-build %s -f %s %s 2> /dev/null' % ( indexing_cmds, options.ref, options.ref )
- try:
- os.chdir( tmp_index_dir )
- os.system( cmd1 )
- except Exception, e:
+ # clean up temp dir
+ if os.path.exists( tmp_index_dir ):
+ shutil.rmtree( tmp_index_dir )
stop_err( 'Error indexing reference sequence\n' + str( e ) )
+ stdout += 'File indexed. '
+ else:
+ ref_file_name = options.ref
# set up aligning and generate aligning command options
# automatically set threads in both cases
if options.suppressHeader == 'true':
@@ -328,19 +337,34 @@
best, strata, offrate, seed, colorspace, snpphred, snpfrac,
keepends, options.threads, suppressHeader )
except ValueError, e:
+ # clean up temp dir
+ if os.path.exists( tmp_index_dir ):
+ shutil.rmtree( tmp_index_dir )
stop_err( 'Something is wrong with the alignment parameters and the alignment could not be run\n' + str( e ) )
- # prepare actual aligning commands
- if options.paired == 'paired':
- cmd2 = 'bowtie %s %s -1 %s -2 %s > %s 2> /dev/null' % ( aligning_cmds, options.ref, options.input1, options.input2, options.output )
- else:
- cmd2 = 'bowtie %s %s %s > %s 2> /dev/null' % ( aligning_cmds, options.ref, options.input1, options.output )
- # align
try:
- os.system( cmd2 )
- except Exception, e:
- stop_err( 'Error aligning sequence\n' + str( e ) )
- # clean up temp dir
- if os.path.exists( tmp_index_dir ):
- shutil.rmtree( tmp_index_dir )
+ # have to nest try-except in try-finally to handle 2.4
+ try:
+ # prepare actual aligning commands
+ if options.paired == 'paired':
+ cmd2 = 'bowtie %s %s -1 %s -2 %s > %s' % ( aligning_cmds, ref_file_name, options.input1, options.input2, options.output )
+ else:
+ cmd2 = 'bowtie %s %s %s > %s' % ( aligning_cmds, ref_file_name, options.input1, options.output )
+ # align
+ proc = subprocess.Popen( args=cmd2, shell=True, cwd=tmp_index_dir, stderr=subprocess.PIPE )
+ returncode = proc.wait()
+ stderr = proc.stderr.read()
+ if returncode != 0:
+ raise Exception, stderr
+ # check that there are results in the output file
+ if len( open( options.output, 'rb' ).read().strip() ) == 0:
+ raise Exception, 'The output file is empty, there may be an error with your input file or settings.'
+ except Exception, e:
+ stop_err( 'Error aligning sequence. ' + str( e ) )
+ finally:
+ # clean up temp dir
+ if os.path.exists( tmp_index_dir ):
+ shutil.rmtree( tmp_index_dir )
+ stdout += 'Sequence file aligned.\n'
+ sys.stdout.write( stdout )
if __name__=="__main__": __main__()
diff -r f84112d155c0 -r 9efe896dbb17 tools/sr_mapping/bwa_wrapper.py
--- a/tools/sr_mapping/bwa_wrapper.py Wed Mar 10 11:28:50 2010 -0500
+++ b/tools/sr_mapping/bwa_wrapper.py Wed Mar 10 11:51:00 2010 -0500
@@ -152,55 +152,57 @@
cmd3 = 'bwa samse %s %s %s %s >> %s' % ( gen_alignment_cmds, ref_file_name, tmp_align_out_name, options.fastq, options.output )
# perform alignments
try:
- # align
+ # need to nest try-except in try-finally to handle 2.4
try:
- proc = subprocess.Popen( args=cmd2, shell=True, cwd=tmp_dir, stderr=subprocess.PIPE )
- returncode = proc.wait()
- stderr = proc.stderr.read()
- if returncode != 0:
- raise Exception, stderr
- except Exception, e:
- raise Exception, 'Error aligning sequence. ' + str( e )
- # and again if paired data
- try:
- if cmd2b:
- proc = subprocess.Popen( args=cmd2b, shell=True, cwd=tmp_dir, stderr=subprocess.PIPE )
+ # align
+ try:
+ proc = subprocess.Popen( args=cmd2, shell=True, cwd=tmp_dir, stderr=subprocess.PIPE )
returncode = proc.wait()
stderr = proc.stderr.read()
if returncode != 0:
raise Exception, stderr
+ except Exception, e:
+ raise Exception, 'Error aligning sequence. ' + str( e )
+ # and again if paired data
+ try:
+ if cmd2b:
+ proc = subprocess.Popen( args=cmd2b, shell=True, cwd=tmp_dir, stderr=subprocess.PIPE )
+ returncode = proc.wait()
+ stderr = proc.stderr.read()
+ if returncode != 0:
+ raise Exception, stderr
+ except Exception, e:
+ raise Exception, 'Error aligning second sequence. ' + str( e )
+ # generate align
+ try:
+ proc = subprocess.Popen( args=cmd3, shell=True, cwd=tmp_dir, stderr=subprocess.PIPE )
+ returncode = proc.wait()
+ stderr = proc.stderr.read()
+ if returncode != 0:
+ raise Exception, stderr
+ except Exception, e:
+ raise Exception, 'Error generating alignments. ' + str( e )
+ # remove header if necessary
+ if options.suppressHeader == 'true':
+ tmp_out = tempfile.NamedTemporaryFile( dir=tmp_dir)
+ tmp_out_name = tmp_out.name
+ tmp_out.close()
+ try:
+ shutil.move( options.output, tmp_out_name )
+ except Exception, e:
+ raise Exception, 'Error moving output file before removing headers. ' + str( e )
+ fout = file( options.output, 'w' )
+ for line in file( tmp_out.name, 'r' ):
+ if not ( line.startswith( '@HD' ) or line.startswith( '@SQ' ) or line.startswith( '@RG' ) or line.startswith( '@PG' ) or line.startswith( '@CO' ) ):
+ fout.write( line )
+ fout.close()
+ # check that there are results in the output file
+ if os.path.getsize( options.output ) > 0:
+ sys.stdout.write( 'BWA run on %s-end data' % options.genAlignType )
+ else:
+ raise Exception, 'The output file is empty. You may simply have no matches, or there may be an error with your input file or settings.'
except Exception, e:
- raise Exception, 'Error aligning second sequence. ' + str( e )
- # generate align
- try:
- proc = subprocess.Popen( args=cmd3, shell=True, cwd=tmp_dir, stderr=subprocess.PIPE )
- returncode = proc.wait()
- stderr = proc.stderr.read()
- if returncode != 0:
- raise Exception, stderr
- except Exception, e:
- raise Exception, 'Error generating alignments. ' + str( e )
- # remove header if necessary
- if options.suppressHeader == 'true':
- tmp_out = tempfile.NamedTemporaryFile( dir=tmp_dir)
- tmp_out_name = tmp_out.name
- tmp_out.close()
- try:
- shutil.move( options.output, tmp_out_name )
- except Exception, e:
- raise Exception, 'Error moving output file before removing headers. ' + str( e )
- fout = file( options.output, 'w' )
- for line in file( tmp_out.name, 'r' ):
- if not ( line.startswith( '@HD' ) or line.startswith( '@SQ' ) or line.startswith( '@RG' ) or line.startswith( '@PG' ) or line.startswith( '@CO' ) ):
- fout.write( line )
- fout.close()
- # check that there are results in the output file
- if os.path.getsize( options.output ) > 0:
- sys.stdout.write( 'BWA run on %s-end data' % options.genAlignType )
- else:
- raise Exception, 'The output file is empty. You may simply have no matches, or there may be an error with your input file or settings.'
- except Exception, e:
- stop_err( 'The alignment failed.\n' + str( e ) )
+ stop_err( 'The alignment failed.\n' + str( e ) )
finally:
# clean up temp dir
if os.path.exists( tmp_index_dir ):
1
0

11 Mar '10
details: http://www.bx.psu.edu/hg/galaxy/rev/f84112d155c0
changeset: 3504:f84112d155c0
user: Dan Blankenberg <dan(a)bx.psu.edu>
date: Wed Mar 10 11:28:50 2010 -0500
description:
Allow renaming of uploaded files in toolbox tests by including a <edit_attributes type="name" value="new dataset name" /> child tag. A rename directive is automatically assigned (no xml changes required) to uploaded composite datasets, so that they can be identified uniquely.
diffstat:
lib/galaxy/tools/__init__.py | 23 +++++++++++++++++++++++
lib/galaxy/tools/test.py | 11 +++++++----
test/functional/test_toolbox.py | 16 ++++++++++++++--
3 files changed, 44 insertions(+), 6 deletions(-)
diffs (91 lines):
diff -r c73f093219aa -r f84112d155c0 lib/galaxy/tools/__init__.py
--- a/lib/galaxy/tools/__init__.py Wed Mar 10 11:24:49 2010 -0500
+++ b/lib/galaxy/tools/__init__.py Wed Mar 10 11:28:50 2010 -0500
@@ -510,6 +510,7 @@
store in `self.tests`.
"""
self.tests = []
+ composite_data_names_counter = 0 #composite datasets need a unique name: each test occurs in a fresh history, but we'll keep it unique per set of tests
for i, test_elem in enumerate( tests_elem.findall( 'test' ) ):
name = test_elem.get( 'name', 'Test-%d' % (i+1) )
maxseconds = int( test_elem.get( 'maxseconds', '120' ) )
@@ -524,6 +525,28 @@
else:
value = None
attrib['children'] = list( param_elem.getchildren() )
+ if attrib['children']:
+ #at this time, we can assume having children only occurs on DataToolParameter test items
+ #but this could change and would cause the below parsing to change based upon differences in children items
+ attrib['metadata'] = []
+ attrib['composite_data'] = []
+ attrib['edit_attributes'] = []
+ composite_data_name = None #composite datasets need to be renamed uniquely
+ for child in attrib['children']:
+ if child.tag == 'composite_data':
+ attrib['composite_data'].append( child )
+ if composite_data_name is None:
+ #generate a unique name; each test uses a fresh history
+ composite_data_name = '_COMPOSITE_RENAMED %i_' % ( composite_data_names_counter )
+ composite_data_names_counter += 1
+ elif child.tag == 'metadata':
+ attrib['metadata'].append( child )
+ elif child.tag == 'metadata':
+ attrib['metadata'].append( child )
+ elif child.tag == 'edit_attributes':
+ attrib['edit_attributes'].append( child )
+ if composite_data_name:
+ attrib['edit_attributes'].insert( 0, { 'type': 'name', 'value': composite_data_name } ) #composite datasets need implicit renaming; inserted at front of list so explicit declarations take precedence
test.add_param( attrib.pop( 'name' ), value, attrib )
for output_elem in test_elem.findall( "output" ):
attrib = dict( output_elem.attrib )
diff -r c73f093219aa -r f84112d155c0 lib/galaxy/tools/test.py
--- a/lib/galaxy/tools/test.py Wed Mar 10 11:24:49 2010 -0500
+++ b/lib/galaxy/tools/test.py Wed Mar 10 11:28:50 2010 -0500
@@ -30,12 +30,15 @@
if isinstance( input_value, grouping.Conditional ) or isinstance( input_value, grouping.Repeat ):
self.__expand_grouping_for_data_input(name, value, extra, input_name, input_value)
elif isinstance( self.tool.inputs[name], parameters.DataToolParameter ) and ( value, extra ) not in self.required_files:
- if value is None and len( [ child for child in extra.get( 'children', [] ) if child.tag == 'composite_data' ] ) == 0:
+ name_change = [ att for att in extra.get( 'edit_attributes', [] ) if att.get( 'type' ) == 'name' ]
+ if name_change:
+ name_change = name_change[-1].get( 'value' ) #only the last name change really matters
+ if value is None and not name_change:
assert self.tool.inputs[name].optional, '%s is not optional. You must provide a valid filename.' % name
else:
- self.required_files.append( ( value, extra ) )
- if value is None and len( [ child for child in extra.get( 'children', [] ) if child.tag == 'composite_data' ] ) > 0:
- value = extra.get( 'ftype' )
+ self.required_files.append( ( value, extra ) ) #these files will be uploaded
+ if name_change:
+ value = name_change #change value for select to renamed uploaded file for e.g. composite dataset
except Exception, e:
log.debug( "Error in add_param for %s: %s" % ( name, e ) )
self.inputs.append( ( name, value, extra ) )
diff -r c73f093219aa -r f84112d155c0 test/functional/test_toolbox.py
--- a/test/functional/test_toolbox.py Wed Mar 10 11:24:49 2010 -0500
+++ b/test/functional/test_toolbox.py Wed Mar 10 11:28:50 2010 -0500
@@ -33,10 +33,22 @@
# Upload any needed files
for fname, extra in testdef.required_files:
children = extra.get( 'children', [] )
- metadata = [ child for child in children if child.tag == 'metadata' ]
- composite_data = [ child for child in children if child.tag == 'composite_data' ]
+ metadata = extra.get( 'metadata', [] )
+ composite_data = extra.get( 'composite_data', [] )
self.upload_file( fname, ftype=extra.get( 'ftype', 'auto' ), dbkey=extra.get( 'dbkey', 'hg17' ), metadata = metadata, composite_data = composite_data )
print "Uploaded file: ", fname, ", ftype: ", extra.get( 'ftype', 'auto' ), ", extra: ", extra
+ #Post upload attribute editing
+ edit_attributes = extra.get( 'edit_attributes', [] )
+ #currently only renaming is supported
+ for edit_att in edit_attributes:
+ if edit_att.get( 'type', None ) == 'name':
+ new_name = edit_att.get( 'value', None )
+ assert new_name, 'You must supply the new dataset name as the value tag of the edit_attributes tag'
+ hda_id = self.get_history_as_data_list()[-1].get( 'id' )
+ self.edit_hda_attribute_info( hda_id, new_name = new_name )
+ print "Renamed uploaded file to:", new_name
+ else:
+ raise Exception( 'edit_attributes type (%s) is unimplemented' % edit_att.get( 'type', None ) )
# We need to handle the case where we've uploaded a valid compressed file since the upload
# tool will have uncompressed it on the fly.
all_inputs = {}
1
0

11 Mar '10
details: http://www.bx.psu.edu/hg/galaxy/rev/c73f093219aa
changeset: 3503:c73f093219aa
user: Greg Von Kuster <greg(a)bx.psu.edu>
date: Wed Mar 10 11:24:49 2010 -0500
description:
Fix for deleting a list of library datasets - resolves ticket # 102.
diffstat:
lib/galaxy/web/controllers/library_admin.py | 4 ++--
lib/galaxy/web/controllers/library_common.py | 14 ++++++++------
templates/library/common/browse_library.mako | 2 +-
templates/library/common/ldda_info.mako | 2 +-
test/base/twilltestcase.py | 9 ++++++---
test/functional/test_security_and_libraries.py | 6 +++---
6 files changed, 21 insertions(+), 16 deletions(-)
diffs (130 lines):
diff -r 34babf71a09f -r c73f093219aa lib/galaxy/web/controllers/library_admin.py
--- a/lib/galaxy/web/controllers/library_admin.py Tue Mar 09 16:28:04 2010 -0500
+++ b/lib/galaxy/web/controllers/library_admin.py Wed Mar 10 11:24:49 2010 -0500
@@ -169,8 +169,8 @@
# deleted / purged contents will have the same state ). When a library or folder has been deleted for
# the amount of time defined in the cleanup_datasets.py script, the library or folder and all of its
# contents will be purged. The association between this method and the cleanup_datasets.py script
- # enables clean maintenance of libraries and library dataset disk files. This is also why the following
- # 3 objects, and not any of the associations ( the cleanup_datasets.py scipot handles everything else ).
+ # enables clean maintenance of libraries and library dataset disk files. This is also why the item_types
+ # are not any of the associations ( the cleanup_datasets.py script handles everything ).
show_deleted = util.string_as_bool( kwd.get( 'show_deleted', False ) )
item_types = { 'library': trans.app.model.Library,
'folder': trans.app.model.LibraryFolder,
diff -r 34babf71a09f -r c73f093219aa lib/galaxy/web/controllers/library_common.py
--- a/lib/galaxy/web/controllers/library_common.py Tue Mar 09 16:28:04 2010 -0500
+++ b/lib/galaxy/web/controllers/library_common.py Wed Mar 10 11:24:49 2010 -0500
@@ -1246,8 +1246,7 @@
messagetype=messagetype )
@web.expose
def act_on_multiple_datasets( self, trans, cntrller, library_id, ldda_ids='', **kwd ):
- # This method is used by the select list labeled "Perform action on selected datasets"
- # on the analysis library browser
+ # Perform an action on a list of library datasets.
params = util.Params( kwd )
msg = util.restore_text( params.get( 'msg', '' ) )
messagetype = params.get( 'messagetype', 'done' )
@@ -1262,7 +1261,7 @@
messagetype = 'error'
else:
ldda_ids = util.listify( ldda_ids )
- if action == 'add':
+ if action == 'import_to_history':
history = trans.get_history()
if history is None:
# Must be a bot sending a request without having a history.
@@ -1306,9 +1305,12 @@
elif action == 'delete':
for ldda_id in ldda_ids:
ldda = trans.sa_session.query( trans.app.model.LibraryDatasetDatasetAssociation ).get( trans.security.decode_id( ldda_id ) )
- ldda.deleted = True
- trans.sa_session.add( ldda )
- trans.sa_session.flush()
+ # Do not delete the association, just delete the library_dataset. The
+ # cleanup_datasets.py script handles everything else.
+ ld = ldda.library_dataset
+ ld.deleted = True
+ trans.sa_session.add( ld )
+ trans.sa_session.flush()
msg = "The selected datasets have been removed from this data library"
else:
error = False
diff -r 34babf71a09f -r c73f093219aa templates/library/common/browse_library.mako
--- a/templates/library/common/browse_library.mako Tue Mar 09 16:28:04 2010 -0500
+++ b/templates/library/common/browse_library.mako Wed Mar 10 11:24:49 2010 -0500
@@ -229,7 +229,7 @@
<a class="action-button" href="${h.url_for( controller='library_common', action='upload_library_dataset', cntrller=cntrller, library_id=trans.security.encode_id( library.id ), folder_id=trans.security.encode_id( folder.id ), replace_id=trans.security.encode_id( library_dataset.id ), show_deleted=show_deleted )}">Upload a new version of this dataset</a>
%endif
%if not branch_deleted( folder ) and not ldda.library_dataset.deleted and ldda.has_data:
- <a class="action-button" href="${h.url_for( controller='library_common', action='act_on_multiple_datasets', cntrller=cntrller, library_id=trans.security.encode_id( library.id ), ldda_ids=trans.security.encode_id( ldda.id ), do_action='add', use_panels=use_panels, show_deleted=show_deleted )}">Import this dataset into your current history</a>
+ <a class="action-button" href="${h.url_for( controller='library_common', action='act_on_multiple_datasets', cntrller=cntrller, library_id=trans.security.encode_id( library.id ), ldda_ids=trans.security.encode_id( ldda.id ), do_action='import_to_history', use_panels=use_panels, show_deleted=show_deleted )}">Import this dataset into your current history</a>
<a class="action-button" href="${h.url_for( controller='library_common', action='download_dataset_from_folder', cntrller=cntrller, id=trans.security.encode_id( ldda.id ), library_id=trans.security.encode_id( library.id ), use_panels=use_panels )}">Download this dataset</a>
%endif
%if cntrller in [ 'library_admin', 'requests_admin' ]:
diff -r 34babf71a09f -r c73f093219aa templates/library/common/ldda_info.mako
--- a/templates/library/common/ldda_info.mako Tue Mar 09 16:28:04 2010 -0500
+++ b/templates/library/common/ldda_info.mako Wed Mar 10 11:24:49 2010 -0500
@@ -57,7 +57,7 @@
<a class="action-button" href="${h.url_for( controller='library_common', action='upload_library_dataset', cntrller=cntrller, library_id=trans.security.encode_id( library.id ), folder_id=trans.security.encode_id( ldda.library_dataset.folder.id ), replace_id=trans.security.encode_id( ldda.library_dataset.id ) )}">Upload a new version of this dataset</a>
%endif
%if cntrller=='library' and ldda.has_data:
- <a class="action-button" href="${h.url_for( controller='library_common', action='act_on_multiple_datasets', cntrller=cntrller, library_id=trans.security.encode_id( library.id ), ldda_ids=trans.security.encode_id( ldda.id ), do_action='add', use_panels=use_panels, show_deleted=show_deleted )}">Import this dataset into your current history</a>
+ <a class="action-button" href="${h.url_for( controller='library_common', action='act_on_multiple_datasets', cntrller=cntrller, library_id=trans.security.encode_id( library.id ), ldda_ids=trans.security.encode_id( ldda.id ), do_action='import_to_history', use_panels=use_panels, show_deleted=show_deleted )}">Import this dataset into your current history</a>
<a class="action-button" href="${h.url_for( controller='library', action='download_dataset_from_folder', cntrller=cntrller, id=trans.security.encode_id( ldda.id ), library_id=trans.security.encode_id( library.id ), use_panels=use_panels, show_deleted=show_deleted )}">Download this dataset</a>
%endif
</div>
diff -r 34babf71a09f -r c73f093219aa test/base/twilltestcase.py
--- a/test/base/twilltestcase.py Tue Mar 09 16:28:04 2010 -0500
+++ b/test/base/twilltestcase.py Wed Mar 10 11:24:49 2010 -0500
@@ -1595,14 +1595,17 @@
self.home()
def add_library_dataset( self, cntrller, filename, library_id, folder_id, folder_name,
file_type='auto', dbkey='hg18', roles=[], message='', root=False,
- template_field_name1='', template_field_contents1='' ):
+ template_field_name1='', template_field_contents1='', show_deleted='False',
+ upload_option='upload_file' ):
"""Add a dataset to a folder"""
filename = self.get_filename( filename )
self.home()
- self.visit_url( "%s/library_common/upload_library_dataset?cntrller=%s&upload_option=upload_file&library_id=%s&folder_id=%s&message=%s" % \
- ( self.url, cntrller, library_id, folder_id, message ) )
+ self.visit_url( "%s/library_common/upload_library_dataset?cntrller=%s&library_id=%s&folder_id=%s&upload_option=%s&message=%s" % \
+ ( self.url, cntrller, library_id, folder_id, upload_option, message ) )
self.check_page_for_string( 'Upload files' )
+ tc.fv( "1", "library_id", library_id )
tc.fv( "1", "folder_id", folder_id )
+ tc.fv( "1", "show_deleted", show_deleted )
tc.formfile( "1", "files_0|file_data", filename )
tc.fv( "1", "file_type", file_type )
tc.fv( "1", "dbkey", dbkey )
diff -r 34babf71a09f -r c73f093219aa test/functional/test_security_and_libraries.py
--- a/test/functional/test_security_and_libraries.py Tue Mar 09 16:28:04 2010 -0500
+++ b/test/functional/test_security_and_libraries.py Wed Mar 10 11:24:49 2010 -0500
@@ -1181,7 +1181,7 @@
# Test importing the restricted dataset into a history, can't use the
# ~/library_admin/libraries form as twill barfs on it so we'll simulate the form submission
# by going directly to the form action
- self.visit_url( '%s/library_common/act_on_multiple_datasets?cntrller=library&do_action=add&ldda_ids=%s&library_id=%s' \
+ self.visit_url( '%s/library_common/act_on_multiple_datasets?cntrller=library&do_action=import_to_history&ldda_ids=%s&library_id=%s' \
% ( self.url, self.security.encode_id( ldda_five.id ), self.security.encode_id( library_one.id ) ) )
self.check_page_for_string( '1 dataset(s) have been imported into your history' )
self.logout()
@@ -1473,7 +1473,7 @@
for ldda in lddas:
# Import each library dataset into our history
self.home()
- self.visit_url( '%s/library_common/act_on_multiple_datasets?cntrller=library&do_action=add&ldda_ids=%s&library_id=%s' % \
+ self.visit_url( '%s/library_common/act_on_multiple_datasets?cntrller=library&do_action=import_to_history&ldda_ids=%s&library_id=%s' % \
( self.url, self.security.encode_id( ldda.id ), self.security.encode_id( library_one.id ) ) )
# Determine the new HistoryDatasetAssociation id created when the library dataset was imported into our history
last_hda_created = sa_session.query( galaxy.model.HistoryDatasetAssociation ) \
@@ -1522,7 +1522,7 @@
# be all of the above on any of the 3 datasets that are imported into a history
for ldda in lddas:
self.home()
- self.visit_url( '%s/library_common/act_on_multiple_datasets?cntrller=library&library_id=%s&do_action=add&ldda_ids=%s' % \
+ self.visit_url( '%s/library_common/act_on_multiple_datasets?cntrller=library&library_id=%s&do_action=import_to_history&ldda_ids=%s' % \
( self.url, self.security.encode_id( library_one.id ), self.security.encode_id( ldda.id ) ) )
# Determine the new HistoryDatasetAssociation id created when the library dataset was imported into our history
last_hda_created = sa_session.query( galaxy.model.HistoryDatasetAssociation ) \
1
0

11 Mar '10
details: http://www.bx.psu.edu/hg/galaxy/rev/34babf71a09f
changeset: 3502:34babf71a09f
user: Greg Von Kuster <greg(a)bx.psu.edu>
date: Tue Mar 09 16:28:04 2010 -0500
description:
Keep the state of displaying / hiding deleted library items whn uploading library datasets.
diffstat:
lib/galaxy/web/controllers/library_common.py | 7 +++++++
lib/galaxy/web/controllers/tool_runner.py | 1 -
templates/base_panels.mako | 5 +++--
templates/library/common/browse_library.mako | 2 +-
templates/library/common/common.mako | 3 ++-
templates/library/common/upload.mako | 14 +++++++-------
6 files changed, 20 insertions(+), 12 deletions(-)
diffs (161 lines):
diff -r e4592fc99acc -r 34babf71a09f lib/galaxy/web/controllers/library_common.py
--- a/lib/galaxy/web/controllers/library_common.py Tue Mar 09 15:25:23 2010 -0500
+++ b/lib/galaxy/web/controllers/library_common.py Tue Mar 09 16:28:04 2010 -0500
@@ -659,6 +659,7 @@
msg = util.restore_text( params.get( 'msg', '' ) )
messagetype = params.get( 'messagetype', 'done' )
deleted = util.string_as_bool( params.get( 'deleted', False ) )
+ show_deleted = util.string_as_bool( params.get( 'show_deleted', False ) )
dbkey = params.get( 'dbkey', '?' )
if isinstance( dbkey, list ):
last_used_build = dbkey[0]
@@ -705,6 +706,7 @@
folder_id=folder_id,
replace_id=replace_id,
upload_option=upload_option,
+ show_deleted=show_deleted,
msg=util.sanitize_text( msg ),
messagetype='error' ) )
@@ -758,6 +760,7 @@
id=library_id,
default_action=default_action,
created_ldda_ids=created_ldda_ids,
+ show_deleted=show_deleted,
msg=util.sanitize_text( msg ),
messagetype='done' ) )
@@ -770,6 +773,7 @@
cntrller=cntrller,
id=library_id,
created_ldda_ids=created_ldda_ids,
+ show_deleted=show_deleted,
msg=util.sanitize_text( msg ),
messagetype=messagetype ) )
# See if we have any inherited templates, but do not inherit contents.
@@ -819,6 +823,7 @@
roles=roles,
history=history,
widgets=widgets,
+ show_deleted=show_deleted,
msg=msg,
messagetype=messagetype )
def upload_dataset( self, trans, cntrller, library_id, folder_id, replace_dataset=None, **kwd ):
@@ -834,6 +839,7 @@
dataset_upload_inputs.append( input )
# Library-specific params
params = util.Params( kwd ) # is this filetoolparam safe?
+ show_deleted = util.string_as_bool( params.get( 'show_deleted', False ) )
library_bunch = upload_common.handle_library_params( trans, params, folder_id, replace_dataset )
msg = util.restore_text( params.get( 'msg', '' ) )
messagetype = params.get( 'messagetype', 'done' )
@@ -883,6 +889,7 @@
folder_id=folder_id,
replace_id=replace_id,
upload_option=upload_option,
+ show_deleted=show_deleted,
msg=util.sanitize_text( msg ),
messagetype='error' ) )
json_file_path = upload_common.create_paramfile( trans, uploaded_datasets )
diff -r e4592fc99acc -r 34babf71a09f lib/galaxy/web/controllers/tool_runner.py
--- a/lib/galaxy/web/controllers/tool_runner.py Tue Mar 09 15:25:23 2010 -0500
+++ b/lib/galaxy/web/controllers/tool_runner.py Tue Mar 09 16:28:04 2010 -0500
@@ -155,7 +155,6 @@
permissions, in_roles, error, msg = trans.app.security_agent.derive_roles_from_access( trans, library_id, cntrller, library=True, **vars )
if error:
return [ 'error', msg ]
- permissions = trans.app.security_agent.history_get_default_permissions( trans.history )
def create_dataset( name ):
ud = Bunch( name=name, file_type=None, dbkey=None )
if nonfile_params.get( 'folder_id', False ):
diff -r e4592fc99acc -r 34babf71a09f templates/base_panels.mako
--- a/templates/base_panels.mako Tue Mar 09 15:25:23 2010 -0500
+++ b/templates/base_panels.mako Tue Mar 09 16:28:04 2010 -0500
@@ -131,10 +131,11 @@
$(this).ajaxSubmit( { iframe: true } );
if ( $(this).find("input[name='folder_id']").val() != undefined ) {
var library_id = $(this).find("input[name='library_id']").val();
+ var show_deleted = $(this).find("input[name='show_deleted']").val();
if ( location.pathname.indexOf( 'admin' ) != -1 ) {
- $("iframe#galaxy_main").attr("src","${h.url_for( controller='library_common', action='browse_library' )}?cntrller=library_admin&id=" + library_id + "&created_ldda_ids=" + async_datasets);
+ $("iframe#galaxy_main").attr("src","${h.url_for( controller='library_common', action='browse_library' )}?cntrller=library_admin&id=" + library_id + "&created_ldda_ids=" + async_datasets + "&show_deleted=" + show_deleted);
} else {
- $("iframe#galaxy_main").attr("src","${h.url_for( controller='library_common', action='browse_library' )}?cntrller=library&id=" + library_id + "&created_ldda_ids=" + async_datasets);
+ $("iframe#galaxy_main").attr("src","${h.url_for( controller='library_common', action='browse_library' )}?cntrller=library&id=" + library_id + "&created_ldda_ids=" + async_datasets + "&show_deleted=" + show_deleted);
}
} else {
$("iframe#galaxy_main").attr("src","${h.url_for(controller='tool_runner', action='upload_async_message')}");
diff -r e4592fc99acc -r 34babf71a09f templates/library/common/browse_library.mako
--- a/templates/library/common/browse_library.mako Tue Mar 09 15:25:23 2010 -0500
+++ b/templates/library/common/browse_library.mako Tue Mar 09 16:28:04 2010 -0500
@@ -404,7 +404,7 @@
<ul class="manage-table-actions">
%if not library.deleted and ( cntrller in [ 'library_admin', 'requests_admin' ] or can_add ):
- <li><a class="action-button" href="${h.url_for( controller='library_common', action='upload_library_dataset', cntrller=cntrller, library_id=trans.security.encode_id( library.id ), folder_id=trans.security.encode_id( library.root_folder.id ) )}"><span>Add datasets</span></a></li>
+ <li><a class="action-button" href="${h.url_for( controller='library_common', action='upload_library_dataset', cntrller=cntrller, library_id=trans.security.encode_id( library.id ), folder_id=trans.security.encode_id( library.root_folder.id ), use_panels=use_panels, show_deleted=show_deleted )}"><span>Add datasets</span></a></li>
<li><a class="action-button" href="${h.url_for( controller='library_common', action='create_folder', cntrller=cntrller, parent_id=trans.security.encode_id( library.root_folder.id ), library_id=trans.security.encode_id( library.id ), use_panels=use_panels, show_deleted=show_deleted )}">Add folder</a></li>
%endif
</ul>
diff -r e4592fc99acc -r 34babf71a09f templates/library/common/common.mako
--- a/templates/library/common/common.mako Tue Mar 09 15:25:23 2010 -0500
+++ b/templates/library/common/common.mako Tue Mar 09 16:28:04 2010 -0500
@@ -87,7 +87,7 @@
%endif
</%def>
-<%def name="render_upload_form( cntrller, upload_option, action, library_id, folder_id, replace_dataset, file_formats, dbkeys, widgets, roles, history )">
+<%def name="render_upload_form( cntrller, upload_option, action, library_id, folder_id, replace_dataset, file_formats, dbkeys, widgets, roles, history, show_deleted )">
<% import os, os.path %>
%if upload_option in [ 'upload_file', 'upload_directory', 'upload_paths' ]:
<div class="toolForm" id="upload_library_dataset">
@@ -106,6 +106,7 @@
<input type="hidden" name="library_id" value="${library_id}"/>
<input type="hidden" name="folder_id" value="${folder_id}"/>
<input type="hidden" name="upload_option" value="${upload_option}"/>
+ <input type="hidden" name="show_deleted" value="${show_deleted}"/>
%if replace_dataset not in [ None, 'None' ]:
<input type="hidden" name="replace_id" value="${trans.security.encode_id( replace_dataset.id )}"/>
<div class="form-row">
diff -r e4592fc99acc -r 34babf71a09f templates/library/common/upload.mako
--- a/templates/library/common/upload.mako Tue Mar 09 15:25:23 2010 -0500
+++ b/templates/library/common/upload.mako Tue Mar 09 16:28:04 2010 -0500
@@ -26,26 +26,26 @@
## Don't allow multiple datasets to be uploaded when replacing a dataset with a new version
<a id="upload-librarydataset--popup" class="popup-arrow" style="display: none;">▼</a>
<div popupmenu="upload-librarydataset--popup">
- <a class="action-button" href="${h.url_for( controller='library_common', action='upload_library_dataset', cntrller=cntrller,library_id=library_id, folder_id=folder_id, replace_id=replace_id, upload_option='upload_file' )}">Upload files</a>
+ <a class="action-button" href="${h.url_for( controller='library_common', action='upload_library_dataset', cntrller=cntrller,library_id=library_id, folder_id=folder_id, replace_id=replace_id, upload_option='upload_file', show_deleted=show_deleted )}">Upload files</a>
%if cntrller == 'library_admin':
%if trans.app.config.library_import_dir and os.path.exists( trans.app.config.library_import_dir ):
- <a class="action-button" href="${h.url_for( controller='library_common', action='upload_library_dataset', cntrller=cntrller, library_id=library_id, folder_id=folder_id, replace_id=replace_id, upload_option='upload_directory' )}">Upload directory of files</a>
+ <a class="action-button" href="${h.url_for( controller='library_common', action='upload_library_dataset', cntrller=cntrller, library_id=library_id, folder_id=folder_id, replace_id=replace_id, upload_option='upload_directory', show_deleted=show_deleted )}">Upload directory of files</a>
%endif
%if trans.app.config.allow_library_path_paste:
- <a class="action-button" href="${h.url_for( controller='library_common', action='upload_library_dataset', cntrller=cntrller, library_id=library_id, folder_id=folder_id, replace_id=replace_id, upload_option='upload_paths' )}">Upload files from filesystem paths</a>
+ <a class="action-button" href="${h.url_for( controller='library_common', action='upload_library_dataset', cntrller=cntrller, library_id=library_id, folder_id=folder_id, replace_id=replace_id, upload_option='upload_paths', show_deleted=show_deleted )}">Upload files from filesystem paths</a>
%endif
%elif cntrller == 'library':
%if trans.app.config.user_library_import_dir and os.path.exists( os.path.join( trans.app.config.user_library_import_dir, trans.user.email ) ):
- <a class="action-button" href="${h.url_for( controller='library_common', action='upload_library_dataset', cntrller=cntrller, library_id=library_id, folder_id=folder_id, replace_id=replace_id, upload_option='upload_directory' )}">Upload directory of files</a>
+ <a class="action-button" href="${h.url_for( controller='library_common', action='upload_library_dataset', cntrller=cntrller, library_id=library_id, folder_id=folder_id, replace_id=replace_id, upload_option='upload_directory', show_deleted=show_deleted )}">Upload directory of files</a>
%endif
%endif
- <a class="action-button" href="${h.url_for( controller='library_common', action='upload_library_dataset', cntrller=cntrller, library_id=library_id, folder_id=folder_id, replace_id=replace_id, upload_option='import_from_history' )}">Import datasets from your current history</a>
+ <a class="action-button" href="${h.url_for( controller='library_common', action='upload_library_dataset', cntrller=cntrller, library_id=library_id, folder_id=folder_id, replace_id=replace_id, upload_option='import_from_history', show_deleted=show_deleted )}">Import datasets from your current history</a>
</div>
%endif
<br/><br/>
<ul class="manage-table-actions">
<li>
- <a class="action-button" href="${h.url_for( controller='library_common', action='browse_library', cntrller=cntrller, id=library_id )}"><span>Browse this data library</span></a>
+ <a class="action-button" href="${h.url_for( controller='library_common', action='browse_library', cntrller=cntrller, id=library_id, show_deleted=show_deleted )}"><span>Browse this data library</span></a>
</li>
</ul>
@@ -53,4 +53,4 @@
${render_msg( msg, messagetype )}
%endif
-${render_upload_form( cntrller, upload_option, action, library_id, folder_id, replace_dataset, file_formats, dbkeys, widgets, roles, history )}
+${render_upload_form( cntrller, upload_option, action, library_id, folder_id, replace_dataset, file_formats, dbkeys, widgets, roles, history, show_deleted )}
1
0

11 Mar '10
details: http://www.bx.psu.edu/hg/galaxy/rev/e4592fc99acc
changeset: 3501:e4592fc99acc
user: Dan Blankenberg <dan(a)bx.psu.edu>
date: Tue Mar 09 15:25:23 2010 -0500
description:
Add a helper script that will re.escape files; useful for creating files valid for re_match and re_match_multiline comparisons in tool tests.
diffstat:
scripts/tools/re_escape_output.py | 34 ++++++++++++++++++++++++++++++++++
1 files changed, 34 insertions(+), 0 deletions(-)
diffs (38 lines):
diff -r c67b5628f348 -r e4592fc99acc scripts/tools/re_escape_output.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/scripts/tools/re_escape_output.py Tue Mar 09 15:25:23 2010 -0500
@@ -0,0 +1,34 @@
+#! /usr/bin/python
+
+"""
+Escapes a file into a form suitable for use with tool tests using re_match or re_match_multiline (when -m/--multiline option is used)
+
+usage: re_escape_output.py [options] input_file [output_file]
+ -m: Use Multiline Matching
+"""
+
+import optparse, re
+
+def __main__():
+ #Parse Command Line
+ parser = optparse.OptionParser()
+ parser.add_option( "-m", "--multiline", action="store_true", dest="multiline", default=False, help="Use Multiline Matching")
+ ( options, args ) = parser.parse_args()
+ input = open( args[0] ,'rb' )
+ if len( args ) > 1:
+ output = open( args[1], 'wb' )
+ else:
+ if options.multiline:
+ suffix = 're_match_multiline'
+ else:
+ suffix = 're_match'
+ output = open( "%s.%s" % ( args[0], suffix ), 'wb' )
+ if options.multiline:
+ lines = [ re.escape( input.read() ) ]
+ else:
+ lines = [ "%s\n" % re.escape( line.rstrip( '\n\r' ) ) for line in input ]
+ output.writelines( lines )
+ output.close()
+
+if __name__ == "__main__":
+ __main__()
1
0

11 Mar '10
details: http://www.bx.psu.edu/hg/galaxy/rev/c67b5628f348
changeset: 3500:c67b5628f348
user: Nate Coraor <nate(a)bx.psu.edu>
date: Tue Mar 09 14:51:01 2010 -0500
description:
Add server_side_cursors to database_engine_option_ parsing
diffstat:
lib/galaxy/config.py | 3 ++-
1 files changed, 2 insertions(+), 1 deletions(-)
diffs (13 lines):
diff -r 478447ba0ec6 -r c67b5628f348 lib/galaxy/config.py
--- a/lib/galaxy/config.py Tue Mar 09 14:29:17 2010 -0500
+++ b/lib/galaxy/config.py Tue Mar 09 14:51:01 2010 -0500
@@ -172,7 +172,8 @@
'pool_recycle': int,
'pool_size': int,
'max_overflow': int,
- 'pool_threadlocal': string_as_bool
+ 'pool_threadlocal': string_as_bool,
+ 'server_side_cursors': string_as_bool
}
prefix = "database_engine_option_"
prefix_len = len( prefix )
1
0

11 Mar '10
details: http://www.bx.psu.edu/hg/galaxy/rev/821b6cbbee1c
changeset: 3498:821b6cbbee1c
user: Dan Blankenberg <dan(a)bx.psu.edu>
date: Tue Mar 09 14:28:33 2010 -0500
description:
Allow better testing of tool outputs with an unknown number of dynamically created outputs.
diffstat:
test/functional/test_toolbox.py | 11 ++++++++++-
1 files changed, 10 insertions(+), 1 deletions(-)
diffs (28 lines):
diff -r a719c6971613 -r 821b6cbbee1c test/functional/test_toolbox.py
--- a/test/functional/test_toolbox.py Tue Mar 09 14:13:05 2010 -0500
+++ b/test/functional/test_toolbox.py Tue Mar 09 14:28:33 2010 -0500
@@ -53,6 +53,11 @@
if isinstance( input_value, grouping.Repeat ):
repeat_name = input_name
break
+ #check if we need to verify number of outputs created dynamically by tool
+ if testdef.tool.force_history_refresh:
+ job_finish_by_output_count = len( self.get_history_as_data_list() )
+ else:
+ job_finish_by_output_count = False
# Do the first page
page_inputs = self.__expand_grouping(testdef.tool.inputs_by_page[0], all_inputs)
# Run the tool
@@ -65,7 +70,11 @@
print "page_inputs (%i)" % i, page_inputs
# Check the results ( handles single or multiple tool outputs ). Make sure to pass the correct hid.
# The output datasets from the tool should be in the same order as the testdef.outputs.
- data_list = self.get_history_as_data_list()
+ data_list = None
+ while data_list is None:
+ data_list = self.get_history_as_data_list()
+ if job_finish_by_output_count and len( testdef.outputs ) > ( len( data_list ) - job_finish_by_output_count ):
+ data_list = None
self.assertTrue( data_list )
elem_index = 0 - len( testdef.outputs )
for output_tuple in testdef.outputs:
1
0

11 Mar '10
details: http://www.bx.psu.edu/hg/galaxy/rev/478447ba0ec6
changeset: 3499:478447ba0ec6
user: Dan Blankenberg <dan(a)bx.psu.edu>
date: Tue Mar 09 14:29:17 2010 -0500
description:
Update tool tests for MAF to interval tool
diffstat:
tools/maf/maf_to_interval.xml | 2 +-
1 files changed, 1 insertions(+), 1 deletions(-)
diffs (12 lines):
diff -r 821b6cbbee1c -r 478447ba0ec6 tools/maf/maf_to_interval.xml
--- a/tools/maf/maf_to_interval.xml Tue Mar 09 14:28:33 2010 -0500
+++ b/tools/maf/maf_to_interval.xml Tue Mar 09 14:29:17 2010 -0500
@@ -27,7 +27,7 @@
<param name="complete_blocks" value="partial_disallowed"/>
<param name="remove_gaps" value="keep_gaps"/>
<param name="species" value="panTro1" />
- <!-- <output name="out_file1" file="maf_to_interval_out_hg17.interval"/> cannot test primary species, because we cannot leave species blank and we can only test the last item added to a history-->
+ <output name="out_file1" file="maf_to_interval_out_hg17.interval"/>
<output name="out_file1" file="maf_to_interval_out_panTro1.interval"/>
</test>
</tests>
1
0

11 Mar '10
details: http://www.bx.psu.edu/hg/galaxy/rev/a719c6971613
changeset: 3497:a719c6971613
user: Nate Coraor <nate(a)bx.psu.edu>
date: Tue Mar 09 14:13:05 2010 -0500
description:
Update psycopg2 to 2.0.13/postgresql 8.4.2
diffstat:
dist-eggs.ini | 2 +-
eggs.ini | 6 +-
scripts/scramble/scripts/psycopg2-linux.py | 1 +
scripts/scramble/scripts/psycopg2-macosx.py | 85 ++++++++++++++++++++++++++++
scripts/scramble/scripts/psycopg2-solaris.py | 1 +
scripts/scramble/scripts/psycopg2.py | 1 +
6 files changed, 92 insertions(+), 4 deletions(-)
diffs (162 lines):
diff -r 5f7ace3195b7 -r a719c6971613 dist-eggs.ini
--- a/dist-eggs.ini Tue Mar 09 13:54:30 2010 -0500
+++ b/dist-eggs.ini Tue Mar 09 14:13:05 2010 -0500
@@ -22,7 +22,7 @@
py2.4-macosx-10.3-fat-ucs2 = medeski.bx.psu.edu /usr/local/bin/python2.4
py2.5-macosx-10.3-fat-ucs2 = medeski.bx.psu.edu /usr/local/bin/python2.5
py2.6-macosx-10.3-fat-ucs2 = medeski.bx.psu.edu /usr/local/bin/python2.6
-py2.6-macosx-10.6-universal-ucs2 = lion.bx.psu.edu /usr/bin/python2.6
+py2.6-macosx-10.6-universal-ucs2 = bach.bx.psu.edu /usr/bin/python2.6
py2.4-solaris-2.10-i86pc_32-ucs2 = thumper.bx.psu.edu /afs/bx.psu.edu/project/pythons/solaris-2.10-i86pc_32-ucs2/bin/python2.4
py2.5-solaris-2.10-i86pc_32-ucs2 = thumper.bx.psu.edu /afs/bx.psu.edu/project/pythons/solaris-2.10-i86pc_32-ucs2/bin/python2.5
py2.6-solaris-2.10-i86pc_32-ucs2 = thumper.bx.psu.edu /afs/bx.psu.edu/project/pythons/solaris-2.10-i86pc_32-ucs2/bin/python2.6
diff -r 5f7ace3195b7 -r a719c6971613 eggs.ini
--- a/eggs.ini Tue Mar 09 13:54:30 2010 -0500
+++ b/eggs.ini Tue Mar 09 14:13:05 2010 -0500
@@ -17,7 +17,7 @@
DRMAA_python = 0.2
MySQL_python = 1.2.3c1
pbs_python = 2.9.4
-psycopg2 = 2.0.6
+psycopg2 = 2.0.13
pycrypto = 2.0.1
pysam = 0.1.1
pysqlite = 2.5.6
@@ -56,7 +56,7 @@
; extra version information
[tags]
-psycopg2 = _8.2.6_static
+psycopg2 = _8.4.2_static
pysqlite = _3.6.17_static
MySQL_python = _5.1.41_static
bx_python = _dev_3b9d30e47619
@@ -68,5 +68,5 @@
; the wiki page above
[source]
MySQL_python = mysql-5.1.41
-psycopg2 = postgresql-8.2.6
+psycopg2 = postgresql-8.4.2
pysqlite = sqlite-amalgamation-3_6_17
diff -r 5f7ace3195b7 -r a719c6971613 scripts/scramble/scripts/psycopg2-linux.py
--- a/scripts/scramble/scripts/psycopg2-linux.py Tue Mar 09 13:54:30 2010 -0500
+++ b/scripts/scramble/scripts/psycopg2-linux.py Tue Mar 09 14:13:05 2010 -0500
@@ -20,6 +20,7 @@
"Configuring postgres (./configure)" )
# compile
+ run( "make ../../src/include/utils/fmgroids.h", os.path.join( pg_srcdir, 'src', 'backend' ), "Compiling fmgroids.h (cd src/backend; make ../../src/include/utils/fmgroids.h)" )
run( "make", os.path.join( pg_srcdir, 'src', 'interfaces', 'libpq' ), "Compiling libpq (cd src/interfaces/libpq; make)" )
run( "make", os.path.join( pg_srcdir, 'src', 'bin', 'pg_config' ), "Compiling pg_config (cd src/bin/pg_config; make)" )
diff -r 5f7ace3195b7 -r a719c6971613 scripts/scramble/scripts/psycopg2-macosx.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/scripts/scramble/scripts/psycopg2-macosx.py Tue Mar 09 14:13:05 2010 -0500
@@ -0,0 +1,85 @@
+import os, sys, shutil
+from distutils.sysconfig import get_config_var
+
+def prep_postgres( prepped, args ):
+
+ pg_version = args['version']
+ pg_srcdir = os.path.join( os.getcwd(), "postgresql-%s" % pg_version )
+
+ # set up environment
+ os.environ['CC'] = get_config_var('CC')
+ os.environ['CFLAGS'] = get_config_var('CFLAGS')
+ os.environ['LDFLAGS'] = get_config_var('LDFLAGS')
+
+ if '-fPIC' not in os.environ['CFLAGS']:
+ os.environ['CFLAGS'] += ' -fPIC'
+
+ # run configure
+ run( "./configure --prefix=%s/postgres --disable-dependency-tracking --enable-static --disable-shared --without-readline --with-thread-safety" % os.getcwd(),
+ os.path.join( os.getcwd(), "postgresql-%s" % pg_version ),
+ "Configuring postgres (./configure)" )
+
+ # compile
+ run( "make ../../src/include/utils/fmgroids.h", os.path.join( pg_srcdir, 'src', 'backend' ), "Compiling fmgroids.h (cd src/backend; make ../../src/include/utils/fmgroids.h)" )
+ run( "make all-static-lib", os.path.join( pg_srcdir, 'src', 'interfaces', 'libpq' ), "Compiling libpq (cd src/interfaces/libpq; make)" )
+ run( "make", os.path.join( pg_srcdir, 'src', 'bin', 'pg_config' ), "Compiling pg_config (cd src/bin/pg_config; make)" )
+
+ # install
+ run( "make install-lib-static", os.path.join( pg_srcdir, 'src', 'interfaces', 'libpq' ), "Compiling libpq (cd src/interfaces/libpq; make install)" )
+ run( "make install", os.path.join( pg_srcdir, 'src', 'bin', 'pg_config' ), "Compiling pg_config (cd src/bin/pg_config; make install)" )
+ run( "make install", os.path.join( pg_srcdir, 'src', 'include' ), "Compiling pg_config (cd src/include; make install)" )
+
+ # manually install some headers
+ run( "cp libpq-fe.h %s" % os.path.join( os.getcwd(), 'postgres', 'include' ), os.path.join( pg_srcdir, 'src', 'interfaces', 'libpq' ), "Installing libpq-fe.h" )
+ run( "cp libpq-events.h %s" % os.path.join( os.getcwd(), 'postgres', 'include' ), os.path.join( pg_srcdir, 'src', 'interfaces', 'libpq' ), "Installing libpq-fe.h" )
+ run( "cp libpq-int.h %s" % os.path.join( os.getcwd(), 'postgres', 'include', 'internal' ), os.path.join( pg_srcdir, 'src', 'interfaces', 'libpq' ), "Installing libpq-fe.h" )
+ run( "cp pqexpbuffer.h %s" % os.path.join( os.getcwd(), 'postgres', 'include', 'internal' ), os.path.join( pg_srcdir, 'src', 'interfaces', 'libpq' ), "Installing libpq-fe.h" )
+
+ # create prepped archive
+ print "%s(): Creating prepped archive for future builds at:" % sys._getframe().f_code.co_name
+ print " ", prepped
+ compress( prepped,
+ 'postgres/bin',
+ 'postgres/include',
+ 'postgres/lib' )
+
+if __name__ == '__main__':
+
+ # change back to the build dir
+ if os.path.dirname( sys.argv[0] ) != "":
+ os.chdir( os.path.dirname( sys.argv[0] ) )
+
+ # find setuptools
+ sys.path.append( os.path.abspath( os.path.join( '..', '..', '..', 'lib' ) ) )
+ from scramble_lib import *
+
+ tag = get_tag()
+
+ pg_version = ( tag.split( "_" ) )[1]
+ pg_archive_base = os.path.join( archives, "postgresql-%s" % pg_version )
+ pg_archive = get_archive( pg_archive_base )
+ pg_archive_prepped = os.path.join( archives, "postgresql-%s-%s.tar.gz" % ( pg_version, platform_noucs ) )
+
+ # clean up any existing stuff (could happen if you run scramble.py by hand)
+ clean( [ 'postgresql-%s' % pg_version ] )
+
+ # unpack postgres
+ unpack_dep( pg_archive, pg_archive_prepped, prep_postgres, dict( version=pg_version ) )
+
+ # localize setup.cfg
+ if not os.path.exists( 'setup.cfg.orig' ):
+ shutil.copy( 'setup.cfg', 'setup.cfg.orig' )
+ f = open( 'setup.cfg', 'a' )
+ f.write( '\npg_config=postgres/bin/pg_config\n' )
+ f.close()
+
+ # tag
+ me = sys.argv[0]
+ sys.argv = [ me ]
+ if tag is not None:
+ sys.argv.append( "egg_info" )
+ sys.argv.append( "--tag-build=%s" %tag )
+ sys.argv.append( "bdist_egg" )
+
+ # go
+ execfile( "setup.py", globals(), locals() )
diff -r 5f7ace3195b7 -r a719c6971613 scripts/scramble/scripts/psycopg2-solaris.py
--- a/scripts/scramble/scripts/psycopg2-solaris.py Tue Mar 09 13:54:30 2010 -0500
+++ b/scripts/scramble/scripts/psycopg2-solaris.py Tue Mar 09 14:13:05 2010 -0500
@@ -23,6 +23,7 @@
"Configuring postgres (./configure)" )
# compile
+ run( "gmake ../../src/include/utils/fmgroids.h", os.path.join( pg_srcdir, 'src', 'backend' ), "Compiling fmgroids.h (cd src/backend; gmake ../../src/include/utils/fmgroids.h)" )
run( "gmake", os.path.join( pg_srcdir, 'src', 'interfaces', 'libpq' ), "Compiling libpq (cd src/interfaces/libpq; gmake)" )
run( "gmake", os.path.join( pg_srcdir, 'src', 'bin', 'pg_config' ), "Compiling pg_config (cd src/bin/pg_config; gmake)" )
diff -r 5f7ace3195b7 -r a719c6971613 scripts/scramble/scripts/psycopg2.py
--- a/scripts/scramble/scripts/psycopg2.py Tue Mar 09 13:54:30 2010 -0500
+++ b/scripts/scramble/scripts/psycopg2.py Tue Mar 09 14:13:05 2010 -0500
@@ -20,6 +20,7 @@
"Configuring postgres (./configure)" )
# compile
+ run( "make ../../src/include/utils/fmgroids.h", os.path.join( pg_srcdir, 'src', 'backend' ), "Compiling fmgroids.h (cd src/backend; make ../../src/include/utils/fmgroids.h)" )
run( "make", os.path.join( pg_srcdir, 'src', 'interfaces', 'libpq' ), "Compiling libpq (cd src/interfaces/libpq; make)" )
run( "make", os.path.join( pg_srcdir, 'src', 'bin', 'pg_config' ), "Compiling pg_config (cd src/bin/pg_config; make)" )
1
0

11 Mar '10
details: http://www.bx.psu.edu/hg/galaxy/rev/5f7ace3195b7
changeset: 3496:5f7ace3195b7
user: Dan Blankenberg <dan(a)bx.psu.edu>
date: Tue Mar 09 13:54:30 2010 -0500
description:
Allow uploading and use of composite files in toolbox tests. Along with setting each component of a composite datatype, metadata attributes (e.g. basename in Rgenetics datatypes) can be set.
Example Tool:
<tool id='temp' name='temp test'>
<description>test</description>
<command>cat ${i.extra_files_path}/${i.metadata.base_name}.fam > $out_file1</command>
<inputs>
<param name="i" type="data" label="RGenetics genotype data from your current history" format="pbed" />
</inputs>
<outputs>
<data format="text" name="out_file1" />
</outputs>
<tests>
<test>
<param name='i' ftype='pbed'>
<metadata name='base_name' value='rgenetics_CHANGED' />
<composite_data value='temp/somefile1' />
<composite_data value='temp/somefile2' />
<composite_data value='temp/somefile3' />
</param>
<output name='out_file1' file='temp/somefile3' />
</test>
</tests>
</tool>
diffstat:
lib/galaxy/tools/__init__.py | 1 +
lib/galaxy/tools/test.py | 4 +++-
test/base/twilltestcase.py | 16 ++++++++++++----
test/functional/test_toolbox.py | 5 ++++-
4 files changed, 20 insertions(+), 6 deletions(-)
diffs (73 lines):
diff -r 3b1be99d1f62 -r 5f7ace3195b7 lib/galaxy/tools/__init__.py
--- a/lib/galaxy/tools/__init__.py Tue Mar 09 11:28:21 2010 -0500
+++ b/lib/galaxy/tools/__init__.py Tue Mar 09 13:54:30 2010 -0500
@@ -523,6 +523,7 @@
value = attrib['value']
else:
value = None
+ attrib['children'] = list( param_elem.getchildren() )
test.add_param( attrib.pop( 'name' ), value, attrib )
for output_elem in test_elem.findall( "output" ):
attrib = dict( output_elem.attrib )
diff -r 3b1be99d1f62 -r 5f7ace3195b7 lib/galaxy/tools/test.py
--- a/lib/galaxy/tools/test.py Tue Mar 09 11:28:21 2010 -0500
+++ b/lib/galaxy/tools/test.py Tue Mar 09 13:54:30 2010 -0500
@@ -30,10 +30,12 @@
if isinstance( input_value, grouping.Conditional ) or isinstance( input_value, grouping.Repeat ):
self.__expand_grouping_for_data_input(name, value, extra, input_name, input_value)
elif isinstance( self.tool.inputs[name], parameters.DataToolParameter ) and ( value, extra ) not in self.required_files:
- if value is None:
+ if value is None and len( [ child for child in extra.get( 'children', [] ) if child.tag == 'composite_data' ] ) == 0:
assert self.tool.inputs[name].optional, '%s is not optional. You must provide a valid filename.' % name
else:
self.required_files.append( ( value, extra ) )
+ if value is None and len( [ child for child in extra.get( 'children', [] ) if child.tag == 'composite_data' ] ) > 0:
+ value = extra.get( 'ftype' )
except Exception, e:
log.debug( "Error in add_param for %s: %s" % ( name, e ) )
self.inputs.append( ( name, value, extra ) )
diff -r 3b1be99d1f62 -r 5f7ace3195b7 test/base/twilltestcase.py
--- a/test/base/twilltestcase.py Tue Mar 09 11:28:21 2010 -0500
+++ b/test/base/twilltestcase.py Tue Mar 09 13:54:30 2010 -0500
@@ -142,14 +142,22 @@
filename = os.path.join( *path )
file(filename, 'wt').write(buffer.getvalue())
- def upload_file( self, filename, ftype='auto', dbkey='unspecified (?)' ):
+ def upload_file( self, filename, ftype='auto', dbkey='unspecified (?)', metadata = None, composite_data = None ):
"""Uploads a file"""
- filename = self.get_filename(filename)
self.visit_url( "%s/tool_runner?tool_id=upload1" % self.url )
try:
- tc.fv("1","file_type", ftype)
+ self.refresh_form( "file_type", ftype ) #Refresh, to support composite files
tc.fv("1","dbkey", dbkey)
- tc.formfile("1","file_data", filename)
+ if metadata:
+ for elem in metadata:
+ tc.fv( "1", "files_metadata|%s" % elem.get( 'name' ), elem.get( 'value' ) )
+ if composite_data:
+ for i, composite_file in enumerate( composite_data ):
+ filename = self.get_filename( composite_file.get( 'value' ) )
+ tc.formfile( "1", "files_%i|file_data" % i, filename )
+ else:
+ filename = self.get_filename( filename )
+ tc.formfile( "1", "file_data", filename )
tc.submit("runtool_btn")
self.home()
except AssertionError, err:
diff -r 3b1be99d1f62 -r 5f7ace3195b7 test/functional/test_toolbox.py
--- a/test/functional/test_toolbox.py Tue Mar 09 11:28:21 2010 -0500
+++ b/test/functional/test_toolbox.py Tue Mar 09 13:54:30 2010 -0500
@@ -32,7 +32,10 @@
raise AssertionError("ToolTestCase.do_it failed")
# Upload any needed files
for fname, extra in testdef.required_files:
- self.upload_file( fname, ftype=extra.get( 'ftype', 'auto' ), dbkey=extra.get( 'dbkey', 'hg17' ) )
+ children = extra.get( 'children', [] )
+ metadata = [ child for child in children if child.tag == 'metadata' ]
+ composite_data = [ child for child in children if child.tag == 'composite_data' ]
+ self.upload_file( fname, ftype=extra.get( 'ftype', 'auto' ), dbkey=extra.get( 'dbkey', 'hg17' ), metadata = metadata, composite_data = composite_data )
print "Uploaded file: ", fname, ", ftype: ", extra.get( 'ftype', 'auto' ), ", extra: ", extra
# We need to handle the case where we've uploaded a valid compressed file since the upload
# tool will have uncompressed it on the fly.
1
0