galaxy-commits
Threads by month
- ----- 2024 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2023 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2022 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2021 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2020 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2019 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2018 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2017 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2016 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2015 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2014 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2013 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2012 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2011 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2010 -----
- December
- November
- October
- September
- August
- July
- June
- May
April 2011
- 1 participants
- 112 discussions
2 new changesets in galaxy-central:
http://bitbucket.org/galaxy/galaxy-central/changeset/8e7c00f4352f/
changeset: r5437:8e7c00f4352f
user: peterjc
date: 2011-04-19 17:08:18
summary: Fix BLAST+ HTML output
affected #: 5 files (5 bytes)
--- a/tools/ncbi_blast_plus/ncbi_blastn_wrapper.xml Tue Apr 19 14:45:55 2011 -0400
+++ b/tools/ncbi_blast_plus/ncbi_blastn_wrapper.xml Tue Apr 19 16:08:18 2011 +0100
@@ -1,4 +1,4 @@
-<tool id="ncbi_blastn_wrapper" name="NCBI BLAST+ blastn" version="0.0.9">
+<tool id="ncbi_blastn_wrapper" name="NCBI BLAST+ blastn" version="0.0.10"><description>Search nucleotide database with nucleotide query sequence(s)</description><command interpreter="python">hide_stderr.py
## The command is a Cheetah template which allows some Python based syntax.
@@ -17,7 +17,7 @@
#if str($out_format)=="ext":
-outfmt "6 std sallseqid score nident positive gaps ppos qframe sframe qseq sseq qlen slen"
#else:
- -outfmt "$out_format"
+ -outfmt $out_format
#end if
-num_threads 8
#if $adv_opts.adv_opts_selector=="advanced":
--- a/tools/ncbi_blast_plus/ncbi_blastp_wrapper.xml Tue Apr 19 14:45:55 2011 -0400
+++ b/tools/ncbi_blast_plus/ncbi_blastp_wrapper.xml Tue Apr 19 16:08:18 2011 +0100
@@ -1,4 +1,4 @@
-<tool id="ncbi_blastp_wrapper" name="NCBI BLAST+ blastp" version="0.0.9">
+<tool id="ncbi_blastp_wrapper" name="NCBI BLAST+ blastp" version="0.0.10"><description>Search protein database with protein query sequence(s)</description><command interpreter="python">hide_stderr.py
## The command is a Cheetah template which allows some Python based syntax.
@@ -17,7 +17,7 @@
#if str($out_format)=="ext":
-outfmt "6 std sallseqid score nident positive gaps ppos qframe sframe qseq sseq qlen slen"
#else:
- -outfmt "$out_format"
+ -outfmt $out_format
#end if
-num_threads 8
#if $adv_opts.adv_opts_selector=="advanced":
--- a/tools/ncbi_blast_plus/ncbi_blastx_wrapper.xml Tue Apr 19 14:45:55 2011 -0400
+++ b/tools/ncbi_blast_plus/ncbi_blastx_wrapper.xml Tue Apr 19 16:08:18 2011 +0100
@@ -1,4 +1,4 @@
-<tool id="ncbi_blastx_wrapper" name="NCBI BLAST+ blastx" version="0.0.9">
+<tool id="ncbi_blastx_wrapper" name="NCBI BLAST+ blastx" version="0.0.10"><description>Search protein database with translated nucleotide query sequence(s)</description><command interpreter="python">hide_stderr.py
## The command is a Cheetah template which allows some Python based syntax.
@@ -16,7 +16,7 @@
#if str($out_format)=="ext":
-outfmt "6 std sallseqid score nident positive gaps ppos qframe sframe qseq sseq qlen slen"
#else:
- -outfmt "$out_format"
+ -outfmt $out_format
#end if
-num_threads 8
#if $adv_opts.adv_opts_selector=="advanced":
--- a/tools/ncbi_blast_plus/ncbi_tblastn_wrapper.xml Tue Apr 19 14:45:55 2011 -0400
+++ b/tools/ncbi_blast_plus/ncbi_tblastn_wrapper.xml Tue Apr 19 16:08:18 2011 +0100
@@ -1,4 +1,4 @@
-<tool id="ncbi_tblastn_wrapper" name="NCBI BLAST+ tblastn" version="0.0.9">
+<tool id="ncbi_tblastn_wrapper" name="NCBI BLAST+ tblastn" version="0.0.10"><description>Search translated nucleotide database with protein query sequence(s)</description><command interpreter="python">hide_stderr.py
## The command is a Cheetah template which allows some Python based syntax.
@@ -16,7 +16,7 @@
#if str($out_format)=="ext":
-outfmt "6 std sallseqid score nident positive gaps ppos qframe sframe qseq sseq qlen slen"
#else:
- -outfmt "$out_format"
+ -outfmt $out_format
#end if
-num_threads 8
#if $adv_opts.adv_opts_selector=="advanced":
--- a/tools/ncbi_blast_plus/ncbi_tblastx_wrapper.xml Tue Apr 19 14:45:55 2011 -0400
+++ b/tools/ncbi_blast_plus/ncbi_tblastx_wrapper.xml Tue Apr 19 16:08:18 2011 +0100
@@ -1,4 +1,4 @@
-<tool id="ncbi_tblastx_wrapper" name="NCBI BLAST+ tblastx" version="0.0.9">
+<tool id="ncbi_tblastx_wrapper" name="NCBI BLAST+ tblastx" version="0.0.10"><description>Search translated nucleotide database with translated nucleotide query sequence(s)</description><command interpreter="python">hide_stderr.py
## The command is a Cheetah template which allows some Python based syntax.
@@ -16,7 +16,7 @@
#if str($out_format)=="ext":
-outfmt "6 std sallseqid score nident positive gaps ppos qframe sframe qseq sseq qlen slen"
#else:
- -outfmt "$out_format"
+ -outfmt $out_format
#end if
-num_threads 8
#if $adv_opts.adv_opts_selector=="advanced":
http://bitbucket.org/galaxy/galaxy-central/changeset/531f2d20469a/
changeset: r5438:531f2d20469a
user: peterjc
date: 2011-04-19 17:20:52
summary: BLAST+ HTML output test
affected #: 2 files (20.1 KB)
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/tblastn_four_human_vs_rhodopsin.html Tue Apr 19 16:20:52 2011 +0100
@@ -0,0 +1,787 @@
+<HTML>
+<TITLE>BLAST Search Results</TITLE>
+<BODY BGCOLOR="#FFFFFF" LINK="#0000FF" VLINK="#660099" ALINK="#660099">
+<PRE>
+
+<b>TBLASTN 2.2.25+</b>
+
+
+<b>Query=</b> sp|Q9BS26|ERP44_HUMAN Endoplasmic reticulum resident protein 44
+OS=Homo sapiens GN=ERP44 PE=1 SV=1
+
+Length=406
+
+<b>Subject=</b> gi|57163782|ref|NM_001009242.1| Felis catus rhodopsin (RHO), mRNA
+
+Length=1047
+
+
+***** No hits found *****
+
+
+
+Lambda K H
+ 0.347 0.182 0.684
+
+Gapped
+Lambda K H
+ 0.299 0.0710 0.270
+
+Effective search space used: 127710
+
+
+<b>Query=</b> sp|Q9BS26|ERP44_HUMAN Endoplasmic reticulum resident protein 44
+OS=Homo sapiens GN=ERP44 PE=1 SV=1
+
+Length=406
+
+<b>Subject=</b> gi|2734705|gb|U59921.1|BBU59921 Bufo bufo rhodopsin mRNA, complete
+cds
+
+Length=1574
+
+
+***** No hits found *****
+
+
+
+Lambda K H
+ 0.347 0.182 0.684
+
+Gapped
+Lambda K H
+ 0.299 0.0710 0.270
+
+Effective search space used: 127710
+
+
+<b>Query=</b> sp|Q9BS26|ERP44_HUMAN Endoplasmic reticulum resident protein 44
+OS=Homo sapiens GN=ERP44 PE=1 SV=1
+
+Length=406
+
+<b>Subject=</b> gi|283855845|gb|GQ290303.1| Cynopterus brachyotis voucher 20020434
+rhodopsin (RHO) gene, exons 1 through 5 and partial cds
+
+Length=4301
+
+
+***** No hits found *****
+
+
+
+Lambda K H
+ 0.347 0.182 0.684
+
+Gapped
+Lambda K H
+ 0.299 0.0710 0.270
+
+Effective search space used: 127710
+
+
+<b>Query=</b> sp|Q9BS26|ERP44_HUMAN Endoplasmic reticulum resident protein 44
+OS=Homo sapiens GN=ERP44 PE=1 SV=1
+
+Length=406
+
+<b>Subject=</b> gi|283855822|gb|GQ290312.1| Myotis ricketti voucher GQX10 rhodopsin
+(RHO) mRNA, partial cds
+
+Length=983
+
+
+***** No hits found *****
+
+
+
+Lambda K H
+ 0.347 0.182 0.684
+
+Gapped
+Lambda K H
+ 0.299 0.0710 0.270
+
+Effective search space used: 127710
+
+
+<b>Query=</b> sp|Q9BS26|ERP44_HUMAN Endoplasmic reticulum resident protein 44
+OS=Homo sapiens GN=ERP44 PE=1 SV=1
+
+Length=406
+
+<b>Subject=</b> gi|18148870|dbj|AB062417.1| Synthetic construct Bos taurus gene for
+rhodopsin, complete cds
+
+Length=1047
+
+
+***** No hits found *****
+
+
+
+Lambda K H
+ 0.347 0.182 0.684
+
+Gapped
+Lambda K H
+ 0.299 0.0710 0.270
+
+Effective search space used: 127710
+
+
+<b>Query=</b> sp|Q9BS26|ERP44_HUMAN Endoplasmic reticulum resident protein 44
+OS=Homo sapiens GN=ERP44 PE=1 SV=1
+
+Length=406
+
+<b>Subject=</b> gi|12583664|dbj|AB043817.1| Conger myriaster conf gene for fresh
+water form rod opsin, complete cds
+
+Length=1344
+
+
+***** No hits found *****
+
+
+
+Lambda K H
+ 0.347 0.182 0.684
+
+Gapped
+Lambda K H
+ 0.299 0.0710 0.270
+
+Effective search space used: 127710
+
+
+<b>Query=</b> sp|Q9NSY1|BMP2K_HUMAN BMP-2-inducible protein kinase OS=Homo sapiens
+GN=BMP2K PE=1 SV=2
+
+Length=1161
+
+<b>Subject=</b> gi|57163782|ref|NM_001009242.1| Felis catus rhodopsin (RHO), mRNA
+
+Length=1047
+
+
+***** No hits found *****
+
+
+
+Lambda K H
+ 0.334 0.170 0.615
+
+Gapped
+Lambda K H
+ 0.299 0.0710 0.270
+
+Effective search space used: 370988
+
+
+<b>Query=</b> sp|Q9NSY1|BMP2K_HUMAN BMP-2-inducible protein kinase OS=Homo sapiens
+GN=BMP2K PE=1 SV=2
+
+Length=1161
+
+<b>Subject=</b> gi|2734705|gb|U59921.1|BBU59921 Bufo bufo rhodopsin mRNA, complete
+cds
+
+Length=1574
+
+
+***** No hits found *****
+
+
+
+Lambda K H
+ 0.334 0.170 0.615
+
+Gapped
+Lambda K H
+ 0.299 0.0710 0.270
+
+Effective search space used: 370988
+
+
+<b>Query=</b> sp|Q9NSY1|BMP2K_HUMAN BMP-2-inducible protein kinase OS=Homo sapiens
+GN=BMP2K PE=1 SV=2
+
+Length=1161
+
+<b>Subject=</b> gi|283855845|gb|GQ290303.1| Cynopterus brachyotis voucher 20020434
+rhodopsin (RHO) gene, exons 1 through 5 and partial cds
+
+Length=4301
+
+
+***** No hits found *****
+
+
+
+Lambda K H
+ 0.334 0.170 0.615
+
+Gapped
+Lambda K H
+ 0.299 0.0710 0.270
+
+Effective search space used: 370988
+
+
+<b>Query=</b> sp|Q9NSY1|BMP2K_HUMAN BMP-2-inducible protein kinase OS=Homo sapiens
+GN=BMP2K PE=1 SV=2
+
+Length=1161
+
+<b>Subject=</b> gi|283855822|gb|GQ290312.1| Myotis ricketti voucher GQX10 rhodopsin
+(RHO) mRNA, partial cds
+
+Length=983
+
+
+***** No hits found *****
+
+
+
+Lambda K H
+ 0.334 0.170 0.615
+
+Gapped
+Lambda K H
+ 0.299 0.0710 0.270
+
+Effective search space used: 370988
+
+
+<b>Query=</b> sp|Q9NSY1|BMP2K_HUMAN BMP-2-inducible protein kinase OS=Homo sapiens
+GN=BMP2K PE=1 SV=2
+
+Length=1161
+
+<b>Subject=</b> gi|18148870|dbj|AB062417.1| Synthetic construct Bos taurus gene for
+rhodopsin, complete cds
+
+Length=1047
+
+
+***** No hits found *****
+
+
+
+Lambda K H
+ 0.334 0.170 0.615
+
+Gapped
+Lambda K H
+ 0.299 0.0710 0.270
+
+Effective search space used: 370988
+
+
+<b>Query=</b> sp|Q9NSY1|BMP2K_HUMAN BMP-2-inducible protein kinase OS=Homo sapiens
+GN=BMP2K PE=1 SV=2
+
+Length=1161
+
+<b>Subject=</b> gi|12583664|dbj|AB043817.1| Conger myriaster conf gene for fresh
+water form rod opsin, complete cds
+
+Length=1344
+
+
+***** No hits found *****
+
+
+
+Lambda K H
+ 0.334 0.170 0.615
+
+Gapped
+Lambda K H
+ 0.299 0.0710 0.270
+
+Effective search space used: 370988
+
+
+<b>Query=</b> sp|P06213|INSR_HUMAN Insulin receptor OS=Homo sapiens GN=INSR PE=1
+SV=4
+
+Length=1382
+
+<b>Subject=</b> gi|57163782|ref|NM_001009242.1| Felis catus rhodopsin (RHO), mRNA
+
+Length=1047
+
+
+***** No hits found *****
+
+
+
+Lambda K H
+ 0.346 0.180 0.700
+
+Gapped
+Lambda K H
+ 0.299 0.0710 0.270
+
+Effective search space used: 441350
+
+
+<b>Query=</b> sp|P06213|INSR_HUMAN Insulin receptor OS=Homo sapiens GN=INSR PE=1
+SV=4
+
+Length=1382
+
+<b>Subject=</b> gi|2734705|gb|U59921.1|BBU59921 Bufo bufo rhodopsin mRNA, complete
+cds
+
+Length=1574
+
+
+***** No hits found *****
+
+
+
+Lambda K H
+ 0.346 0.180 0.700
+
+Gapped
+Lambda K H
+ 0.299 0.0710 0.270
+
+Effective search space used: 441350
+
+
+<b>Query=</b> sp|P06213|INSR_HUMAN Insulin receptor OS=Homo sapiens GN=INSR PE=1
+SV=4
+
+Length=1382
+
+<b>Subject=</b> gi|283855845|gb|GQ290303.1| Cynopterus brachyotis voucher 20020434
+rhodopsin (RHO) gene, exons 1 through 5 and partial cds
+
+Length=4301
+
+
+***** No hits found *****
+
+
+
+Lambda K H
+ 0.346 0.180 0.700
+
+Gapped
+Lambda K H
+ 0.299 0.0710 0.270
+
+Effective search space used: 441350
+
+
+<b>Query=</b> sp|P06213|INSR_HUMAN Insulin receptor OS=Homo sapiens GN=INSR PE=1
+SV=4
+
+Length=1382
+
+<b>Subject=</b> gi|283855822|gb|GQ290312.1| Myotis ricketti voucher GQX10 rhodopsin
+(RHO) mRNA, partial cds
+
+Length=983
+
+
+***** No hits found *****
+
+
+
+Lambda K H
+ 0.346 0.180 0.700
+
+Gapped
+Lambda K H
+ 0.299 0.0710 0.270
+
+Effective search space used: 441350
+
+
+<b>Query=</b> sp|P06213|INSR_HUMAN Insulin receptor OS=Homo sapiens GN=INSR PE=1
+SV=4
+
+Length=1382
+
+<b>Subject=</b> gi|18148870|dbj|AB062417.1| Synthetic construct Bos taurus gene for
+rhodopsin, complete cds
+
+Length=1047
+
+
+***** No hits found *****
+
+
+
+Lambda K H
+ 0.346 0.180 0.700
+
+Gapped
+Lambda K H
+ 0.299 0.0710 0.270
+
+Effective search space used: 441350
+
+
+<b>Query=</b> sp|P06213|INSR_HUMAN Insulin receptor OS=Homo sapiens GN=INSR PE=1
+SV=4
+
+Length=1382
+
+<b>Subject=</b> gi|12583664|dbj|AB043817.1| Conger myriaster conf gene for fresh
+water form rod opsin, complete cds
+
+Length=1344
+
+
+***** No hits found *****
+
+
+
+Lambda K H
+ 0.346 0.180 0.700
+
+Gapped
+Lambda K H
+ 0.299 0.0710 0.270
+
+Effective search space used: 441350
+
+
+<b>Query=</b> sp|P08100|OPSD_HUMAN Rhodopsin OS=Homo sapiens GN=RHO PE=1 SV=1
+
+Length=348
+
+<b>Subject=</b> gi|57163782|ref|NM_001009242.1| Felis catus rhodopsin (RHO), mRNA
+
+Length=1047
+
+<script src="blastResult.js"></script>
+ Score = 732 bits (1689), Expect = 0.0, Method: Compositional matrix adjust.
+ Identities = 336/348 (97%), Positives = 343/348 (99%), Gaps = 0/348 (0%)
+ Frame = +1
+
+Query 1 MNGTEGPNFYVPFSNATGVVRSPFEYPQYYLAEPWQFSMLAAYMFLLIVLGFPINFLTLY 60
+ MNGTEGPNFYVPFSN TGVVRSPFEYPQYYLAEPWQFSMLAAYMFLLIVLGFPINFLTLY
+Sbjct 1 MNGTEGPNFYVPFSNKTGVVRSPFEYPQYYLAEPWQFSMLAAYMFLLIVLGFPINFLTLY 180
+
+Query 61 VTVQHKKLRTPLNYILLNLAVADLFMVLGGFTSTLYTSLHGYFVFGPTGCNLEGFFATLG 120
+ VTVQHKKLRTPLNYILLNLAVADLFMV GGFT+TLYTSLHGYFVFGPTGCNLEGFFATLG
+Sbjct 181 VTVQHKKLRTPLNYILLNLAVADLFMVFGGFTTTLYTSLHGYFVFGPTGCNLEGFFATLG 360
+
+Query 121 GEIALWSLVVLAIERYVVVCKPMSNFRFGENHAIMGVAFTWVMALACAAPPLAGWSRYIP 180
+ GEIALWSLVVLAIERYVVVCKPMSNFRFGENHAIMGVAFTWVMALACAAPPL GWSRYIP
+Sbjct 361 GEIALWSLVVLAIERYVVVCKPMSNFRFGENHAIMGVAFTWVMALACAAPPLVGWSRYIP 540
+
+Query 181 EGLQCSCGIDYYTLKPEVNNESFVIYMFVVHFTIPMIIIFFCYGQLVFTVKEAAAQQQES 240
+ EG+QCSCGIDYYTLKPEVNNESFVIYMFVVHFTIPMI+IFFCYGQLVFTVKEAAAQQQES
+Sbjct 541 EGMQCSCGIDYYTLKPEVNNESFVIYMFVVHFTIPMIVIFFCYGQLVFTVKEAAAQQQES 720
+
+Query 241 ATTQKAEKEVTRMVIIMVIAFLICWVPYASVAFYIFTHQGSNFGPIFMTIPAFFAKSAAI 300
+ ATTQKAEKEVTRMVIIMVIAFLICWVPYASVAFYIFTHQGSNFGPIFMT+PAFFAKS++I
+Sbjct 721 ATTQKAEKEVTRMVIIMVIAFLICWVPYASVAFYIFTHQGSNFGPIFMTLPAFFAKSSSI 900
+
+Query 301 YNPVIYIMMNKQFRNCMLTTICCGKNPLGDDEASATVSKTETSQVAPA 348
+ YNPVIYIMMNKQFRNCMLTT+CCGKNPLGDDEAS T SKTETSQVAPA
+Sbjct 901 YNPVIYIMMNKQFRNCMLTTLCCGKNPLGDDEASTTGSKTETSQVAPA 1044
+
+
+
+Lambda K H
+ 0.351 0.182 0.707
+
+Gapped
+Lambda K H
+ 0.299 0.0710 0.270
+
+Effective search space used: 109230
+
+
+<b>Query=</b> sp|P08100|OPSD_HUMAN Rhodopsin OS=Homo sapiens GN=RHO PE=1 SV=1
+
+Length=348
+
+<b>Subject=</b> gi|2734705|gb|U59921.1|BBU59921 Bufo bufo rhodopsin mRNA, complete
+cds
+
+Length=1574
+
+<script src="blastResult.js"></script>
+ Score = 646 bits (1489), Expect = 0.0, Method: Compositional matrix adjust.
+ Identities = 290/342 (85%), Positives = 320/342 (94%), Gaps = 1/342 (0%)
+ Frame = +3
+
+Query 1 MNGTEGPNFYVPFSNATGVVRSPFEYPQYYLAEPWQFSMLAAYMFLLIVLGFPINFLTLY 60
+ MNGTEGPNFY+P SN TGVVRSPFEYPQYYLAEPWQ+S+L AYMFLLI+LGFPINF+TLY
+Sbjct 42 MNGTEGPNFYIPMSNKTGVVRSPFEYPQYYLAEPWQYSILCAYMFLLILLGFPINFMTLY 221
+
+Query 61 VTVQHKKLRTPLNYILLNLAVADLFMVLGGFTSTLYTSLHGYFVFGPTGCNLEGFFATLG 120
+ VT+QHKKLRTPLNYILLNLA A+ FMVL GFT T+Y+S+ GYF+ G TGC +EGFFATLG
+Sbjct 222 VTIQHKKLRTPLNYILLNLAFANHFMVLCGFTVTMYSSMNGYFILGATGCYVEGFFATLG 401
+
+Query 121 GEIALWSLVVLAIERYVVVCKPMSNFRFGENHAIMGVAFTWVMALACAAPPLAGWSRYIP 180
+ GEIALWSLVVLAIERYVVVCKPMSNFRF ENHA+MGVAFTW+MAL+CA PPL GWSRYIP
+Sbjct 402 GEIALWSLVVLAIERYVVVCKPMSNFRFSENHAVMGVAFTWIMALSCAVPPLLGWSRYIP 581
+
+Query 181 EGLQCSCGIDYYTLKPEVNNESFVIYMFVVHFTIPMIIIFFCYGQLVFTVKEAAAQQQES 240
+ EG+QCSCG+DYYTLKPEVNNESFVIYMFVVHFTIP+IIIFFCYG+LV TVKEAAAQQQES
+Sbjct 582 EGMQCSCGVDYYTLKPEVNNESFVIYMFVVHFTIPLIIIFFCYGRLVCTVKEAAAQQQES 761
+
+Query 241 ATTQKAEKEVTRMVIIMVIAFLICWVPYASVAFYIFTHQGSNFGPIFMTIPAFFAKSAAI 300
+ ATTQKAEKEVTRMVIIMV+ FLICWVPYASVAF+IF+ QGS FGPIFMT+PAFFAKS++I
+Sbjct 762 ATTQKAEKEVTRMVIIMVVFFLICWVPYASVAFFIFSNQGSEFGPIFMTVPAFFAKSSSI 941
+
+Query 301 YNPVIYIMMNKQFRNCMLTTICCGKNPLGDDEA-SATVSKTE 341
+ YNPVIYIM+NKQFRNCM+TT+CCGKNP G+D+A SA SKTE
+Sbjct 942 YNPVIYIMLNKQFRNCMITTLCCGKNPFGEDDASSAATSKTE 1067
+
+
+
+Lambda K H
+ 0.351 0.182 0.707
+
+Gapped
+Lambda K H
+ 0.299 0.0710 0.270
+
+Effective search space used: 109230
+
+
+<b>Query=</b> sp|P08100|OPSD_HUMAN Rhodopsin OS=Homo sapiens GN=RHO PE=1 SV=1
+
+Length=348
+
+<b>Subject=</b> gi|283855845|gb|GQ290303.1| Cynopterus brachyotis voucher 20020434
+rhodopsin (RHO) gene, exons 1 through 5 and partial cds
+
+Length=4301
+
+<script src="blastResult.js"></script>
+ Score = 151 bits (342), Expect(2) = 1e-72, Method: Compositional matrix adjust.
+ Identities = 69/74 (94%), Positives = 73/74 (99%), Gaps = 0/74 (0%)
+ Frame = +3
+
+Query 239 ESATTQKAEKEVTRMVIIMVIAFLICWVPYASVAFYIFTHQGSNFGPIFMTIPAFFAKSA 298
+ ESATTQKAEKEVTRMVIIMVIAFLICW+PYA VAFYIFTHQGSNFGPIFMT+PAFFAKS+
+Sbjct 3147 ESATTQKAEKEVTRMVIIMVIAFLICWLPYAGVAFYIFTHQGSNFGPIFMTLPAFFAKSS 3326
+
+Query 299 AIYNPVIYIMMNKQ 312
+ +IYNPVIYIMMNKQ
+Sbjct 3327 SIYNPVIYIMMNKQ 3368
+
+
+ Score = 126 bits (284), Expect(2) = 1e-72, Method: Compositional matrix adjust.
+ Identities = 54/59 (92%), Positives = 57/59 (97%), Gaps = 0/59 (0%)
+ Frame = +2
+
+Query 177 RYIPEGLQCSCGIDYYTLKPEVNNESFVIYMFVVHFTIPMIIIFFCYGQLVFTVKEAAA 235
+ RYIPEG+QCSCGIDYYTLKPEVNNESFVIYMFVVHFTIPMI+IFFCYGQLVFTVKE +
+Sbjct 2855 RYIPEGMQCSCGIDYYTLKPEVNNESFVIYMFVVHFTIPMIVIFFCYGQLVFTVKEVRS 3031
+
+
+ Score = 229 bits (523), Expect = 1e-64, Method: Compositional matrix adjust.
+ Identities = 107/111 (97%), Positives = 109/111 (99%), Gaps = 0/111 (0%)
+ Frame = +1
+
+Query 11 VPFSNATGVVRSPFEYPQYYLAEPWQFSMLAAYMFLLIVLGFPINFLTLYVTVQHKKLRT 70
+ VPFSN TGVVRSPFE+PQYYLAEPWQFSMLAAYMFLLIVLGFPINFLTLYVTVQHKKLRT
+Sbjct 1 VPFSNKTGVVRSPFEHPQYYLAEPWQFSMLAAYMFLLIVLGFPINFLTLYVTVQHKKLRT 180
+
+Query 71 PLNYILLNLAVADLFMVLGGFTSTLYTSLHGYFVFGPTGCNLEGFFATLGG 121
+ PLNYILLNLAVADLFMV GGFT+TLYTSLHGYFVFGPTGCNLEGFFATLGG
+Sbjct 181 PLNYILLNLAVADLFMVFGGFTTTLYTSLHGYFVFGPTGCNLEGFFATLGG 333
+
+
+ Score = 122 bits (276), Expect = 1e-32, Method: Compositional matrix adjust.
+ Identities = 55/59 (94%), Positives = 56/59 (95%), Gaps = 0/59 (0%)
+ Frame = +3
+
+Query 119 LGGEIALWSLVVLAIERYVVVCKPMSNFRFGENHAIMGVAFTWVMALACAAPPLAGWSR 177
+ L GEIALWSLVVLAIERYVVVCKPMSNFRFGENHAIMG+A TWVMALACAAPPL GWSR
+Sbjct 1404 LAGEIALWSLVVLAIERYVVVCKPMSNFRFGENHAIMGLALTWVMALACAAPPLVGWSR 1580
+
+
+ Score = 57.7 bits (125), Expect = 6e-13, Method: Compositional matrix adjust.
+ Identities = 23/26 (89%), Positives = 24/26 (93%), Gaps = 0/26 (0%)
+ Frame = +1
+
+Query 312 QFRNCMLTTICCGKNPLGDDEASATV 337
+ QFRNCMLTT+CCGKNPLGDDEAS T
+Sbjct 4222 QFRNCMLTTLCCGKNPLGDDEASTTA 4299
+
+
+
+Lambda K H
+ 0.351 0.182 0.707
+
+Gapped
+Lambda K H
+ 0.299 0.0710 0.270
+
+Effective search space used: 109230
+
+
+<b>Query=</b> sp|P08100|OPSD_HUMAN Rhodopsin OS=Homo sapiens GN=RHO PE=1 SV=1
+
+Length=348
+
+<b>Subject=</b> gi|283855822|gb|GQ290312.1| Myotis ricketti voucher GQX10 rhodopsin
+(RHO) mRNA, partial cds
+
+Length=983
+
+<script src="blastResult.js"></script>
+ Score = 658 bits (1517), Expect = 0.0, Method: Compositional matrix adjust.
+ Identities = 310/326 (96%), Positives = 322/326 (99%), Gaps = 0/326 (0%)
+ Frame = +1
+
+Query 11 VPFSNATGVVRSPFEYPQYYLAEPWQFSMLAAYMFLLIVLGFPINFLTLYVTVQHKKLRT 70
+ VPFSN TGVVRSPFEYPQYYLAEPWQFSMLAAYMFLLIVLGFPINFLTLYVTVQHKKLRT
+Sbjct 1 VPFSNKTGVVRSPFEYPQYYLAEPWQFSMLAAYMFLLIVLGFPINFLTLYVTVQHKKLRT 180
+
+Query 71 PLNYILLNLAVADLFMVLGGFTSTLYTSLHGYFVFGPTGCNLEGFFATLGGEIALWSLVV 130
+ PLNYILLNLAVA+LFMV GGFT+TLYTS+HGYFVFG TGCNLEGFFATLGGEIALWSLVV
+Sbjct 181 PLNYILLNLAVANLFMVFGGFTTTLYTSMHGYFVFGATGCNLEGFFATLGGEIALWSLVV 360
+
+Query 131 LAIERYVVVCKPMSNFRFGENHAIMGVAFTWVMALACAAPPLAGWSRYIPEGLQCSCGID 190
+ LAIERYVVVCKPMSNFRFGENHAIMG+AFTWVMALACAAPPLAGWSRYIPEG+QCSCGID
+Sbjct 361 LAIERYVVVCKPMSNFRFGENHAIMGLAFTWVMALACAAPPLAGWSRYIPEGMQCSCGID 540
+
+Query 191 YYTLKPEVNNESFVIYMFVVHFTIPMIIIFFCYGQLVFTVKEAAAQQQESATTQKAEKEV 250
+ YYTLKPEVNNESFVIYMFVVHFTIPMI+IFFCYGQLVFTVKEAAAQQQESATTQKAEKEV
+Sbjct 541 YYTLKPEVNNESFVIYMFVVHFTIPMIVIFFCYGQLVFTVKEAAAQQQESATTQKAEKEV 720
+
+Query 251 TRMVIIMVIAFLICWVPYASVAFYIFTHQGSNFGPIFMTIPAFFAKSAAIYNPVIYIMMN 310
+ TRMVIIMV+AFLICW+PYASVAFYIFTHQGSNFGP+FMTIPAFFAKS++IYNPVIYIMMN
+Sbjct 721 TRMVIIMVVAFLICWLPYASVAFYIFTHQGSNFGPVFMTIPAFFAKSSSIYNPVIYIMMN 900
+
+Query 311 KQFRNCMLTTICCGKNPLGDDEASAT 336
+ KQFRNCMLTT+CCGKNPLGDDEAS T
+Sbjct 901 KQFRNCMLTTLCCGKNPLGDDEASTT 978
+
+
+
+Lambda K H
+ 0.351 0.182 0.707
+
+Gapped
+Lambda K H
+ 0.299 0.0710 0.270
+
+Effective search space used: 109230
+
+
+<b>Query=</b> sp|P08100|OPSD_HUMAN Rhodopsin OS=Homo sapiens GN=RHO PE=1 SV=1
+
+Length=348
+
+<b>Subject=</b> gi|18148870|dbj|AB062417.1| Synthetic construct Bos taurus gene for
+rhodopsin, complete cds
+
+Length=1047
+
+<script src="blastResult.js"></script>
+ Score = 711 bits (1640), Expect = 0.0, Method: Compositional matrix adjust.
+ Identities = 325/348 (94%), Positives = 337/348 (97%), Gaps = 0/348 (0%)
+ Frame = +1
+
+Query 1 MNGTEGPNFYVPFSNATGVVRSPFEYPQYYLAEPWQFSMLAAYMFLLIVLGFPINFLTLY 60
+ MNGTEGPNFYVPFSN TGVVRSPFE PQYYLAEPWQFSMLAAYMFLLI+LGFPINFLTLY
+Sbjct 1 MNGTEGPNFYVPFSNKTGVVRSPFEAPQYYLAEPWQFSMLAAYMFLLIMLGFPINFLTLY 180
+
+Query 61 VTVQHKKLRTPLNYILLNLAVADLFMVLGGFTSTLYTSLHGYFVFGPTGCNLEGFFATLG 120
+ VTVQHKKLRTPLNYILLNLAVADLFMV GGFT+TLYTSLHGYFVFGPTGCNLEGFFATLG
+Sbjct 181 VTVQHKKLRTPLNYILLNLAVADLFMVFGGFTTTLYTSLHGYFVFGPTGCNLEGFFATLG 360
+
+Query 121 GEIALWSLVVLAIERYVVVCKPMSNFRFGENHAIMGVAFTWVMALACAAPPLAGWSRYIP 180
+ GEIALWSLVVLAIERYVVVCKPMSNFRFGENHAIMGVAFTWVMALACAAPPL GWSRYIP
+Sbjct 361 GEIALWSLVVLAIERYVVVCKPMSNFRFGENHAIMGVAFTWVMALACAAPPLVGWSRYIP 540
+
+Query 181 EGLQCSCGIDYYTLKPEVNNESFVIYMFVVHFTIPMIIIFFCYGQLVFTVKEAAAQQQES 240
+ EG+QCSCGIDYYT E NNESFVIYMFVVHF IP+I+IFFCYGQLVFTVKEAAAQQQES
+Sbjct 541 EGMQCSCGIDYYTPHEETNNESFVIYMFVVHFIIPLIVIFFCYGQLVFTVKEAAAQQQES 720
+
+Query 241 ATTQKAEKEVTRMVIIMVIAFLICWVPYASVAFYIFTHQGSNFGPIFMTIPAFFAKSAAI 300
+ ATTQKAEKEVTRMVIIMVIAFLICW+PYA VAFYIFTHQGS+FGPIFMTIPAFFAK++A+
+Sbjct 721 ATTQKAEKEVTRMVIIMVIAFLICWLPYAGVAFYIFTHQGSDFGPIFMTIPAFFAKTSAV 900
+
+Query 301 YNPVIYIMMNKQFRNCMLTTICCGKNPLGDDEASATVSKTETSQVAPA 348
+ YNPVIYIMMNKQFRNCM+TT+CCGKNPLGDDEAS TVSKTETSQVAPA
+Sbjct 901 YNPVIYIMMNKQFRNCMVTTLCCGKNPLGDDEASTTVSKTETSQVAPA 1044
+
+
+
+Lambda K H
+ 0.351 0.182 0.707
+
+Gapped
+Lambda K H
+ 0.299 0.0710 0.270
+
+Effective search space used: 109230
+
+
+<b>Query=</b> sp|P08100|OPSD_HUMAN Rhodopsin OS=Homo sapiens GN=RHO PE=1 SV=1
+
+Length=348
+
+<b>Subject=</b> gi|12583664|dbj|AB043817.1| Conger myriaster conf gene for fresh
+water form rod opsin, complete cds
+
+Length=1344
+
+<script src="blastResult.js"></script>
+ Score = 626 bits (1444), Expect = 0.0, Method: Compositional matrix adjust.
+ Identities = 281/342 (83%), Positives = 311/342 (91%), Gaps = 1/342 (0%)
+ Frame = +2
+
+Query 1 MNGTEGPNFYVPFSNATGVVRSPFEYPQYYLAEPWQFSMLAAYMFLLIVLGFPINFLTLY 60
+ MNGTEGPNFY+P SNATGVVRSPFEYPQYYLAEPW FS L+AYMF LI+ GFPINFLTLY
+Sbjct 23 MNGTEGPNFYIPMSNATGVVRSPFEYPQYYLAEPWAFSALSAYMFFLIIAGFPINFLTLY 202
+
+Query 61 VTVQHKKLRTPLNYILLNLAVADLFMVLGGFTSTLYTSLHGYFVFGPTGCNLEGFFATLG 120
+ VT++HKKLRTPLNYILLNLAVADLFMV GGFT+T+YTS+HGYFVFGPTGCN+EGFFATLG
+Sbjct 203 VTIEHKKLRTPLNYILLNLAVADLFMVFGGFTTTMYTSMHGYFVFGPTGCNIEGFFATLG 382
+
+Query 121 GEIALWSLVVLAIERYVVVCKPMSNFRFGENHAIMGVAFTWVMALACAAPPLAGWSRYIP 180
+ GEIALW LVVLAIER++VVCKP++NFRFGE HAIMGV TW MALACA PPL GWSRYIP
+Sbjct 383 GEIALWCLVVLAIERWMVVCKPVTNFRFGESHAIMGVMVTWTMALACALPPLFGWSRYIP 562
+
+Query 181 EGLQCSCGIDYYTLKPEVNNESFVIYMFVVHFTIPMIIIFFCYGQLVFTVKEAAAQQQES 240
+ EGLQCSCGIDYYT P +NNESFVIYMF HF+IP+ +I FCYG+LV TVKEAAAQQQES
+Sbjct 563 EGLQCSCGIDYYTRAPGINNESFVIYMFTCHFSIPLAVISFCYGRLVCTVKEAAAQQQES 742
+
+Query 241 ATTQKAEKEVTRMVIIMVIAFLICWVPYASVAFYIFTHQGSNFGPIFMTIPAFFAKSAAI 300
+ TTQ+AE+EVTRMV+IMVI+FL+CWVPYASVA YIFTHQGS FGPIFMTIP+FFAKS+A+
+Sbjct 743 ETTQRAEREVTRMVVIMVISFLVCWVPYASVAWYIFTHQGSTFGPIFMTIPSFFAKSSAL 922
+
+Query 301 YNPVIYIMMNKQFRNCMLTTICCGKNPL-GDDEASATVSKTE 341
+ YNP+IYI MNKQFR CM+TT+CCGKNP +D ASAT SKTE
+Sbjct 923 YNPMIYICMNKQFRHCMITTLCCGKNPFEEEDGASATSSKTE 1048
+
+
+
+Lambda K H
+ 0.351 0.182 0.707
+
+Gapped
+Lambda K H
+ 0.299 0.0710 0.270
+
+Effective search space used: 109230
+
+
+
+
+Matrix: BLOSUM80
+Gap Penalties: Existence: 10, Extension: 1
+Neighboring words threshold: 14
+Window for multiple hits: 25
+</PRE>
+</BODY>
+</HTML>
--- a/tools/ncbi_blast_plus/ncbi_tblastn_wrapper.xml Tue Apr 19 16:08:18 2011 +0100
+++ b/tools/ncbi_blast_plus/ncbi_tblastn_wrapper.xml Tue Apr 19 16:20:52 2011 +0100
@@ -188,6 +188,21 @@
<param name="parse_deflines" value="true" /><output name="output1" file="tblastn_four_human_vs_rhodopsin.tabular" ftype="tabular" /></test>
+ <test>
+ <param name="query" value="four_human_proteins.fasta" ftype="fasta" />
+ <param name="db_opts_selector" value="file" />
+ <param name="subject" value="rhodopsin_nucs.fasta" ftype="fasta" />
+ <param name="database" value="" />
+ <param name="evalue_cutoff" value="1e-10" />
+ <param name="out_format" value="0 -html" />
+ <param name="adv_opts_selector" value="advanced" />
+ <param name="filter_query" value="false" />
+ <param name="matrix" value="BLOSUM80" />
+ <param name="max_hits" value="0" />
+ <param name="word_size" value="0" />
+ <param name="parse_deflines" value="false" />
+ <output name="output1" file="tblastn_four_human_vs_rhodopsin.html" ftype="html" />
+ </test></tests><help>
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: kanwei: trackster: use simple hash to calculate unique ID for tabix
by Bitbucket 19 Apr '11
by Bitbucket 19 Apr '11
19 Apr '11
1 new changeset in galaxy-central:
http://bitbucket.org/galaxy/galaxy-central/changeset/85d76cb4761f/
changeset: r5436:85d76cb4761f
user: kanwei
date: 2011-04-19 20:45:55
summary: trackster: use simple hash to calculate unique ID for tabix
affected #: 1 file (25 bytes)
--- a/lib/galaxy/visualization/tracks/data_providers.py Tue Apr 19 14:20:11 2011 -0400
+++ b/lib/galaxy/visualization/tracks/data_providers.py Tue Apr 19 14:45:55 2011 -0400
@@ -620,7 +620,8 @@
# BED dataset.
feature = line.split()
length = len(feature)
- payload = [ feature[1]+"-"+feature[2]+":"+str(count), int(feature[1]), int(feature[2]) ]
+ # Unique id is just a hash of the line
+ payload = [ hash(line), int(feature[1]), int(feature[2]) ]
if no_detail:
results.append( payload )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: kanwei: trackster: don't show tile if no tile (fixes ReferenceTrack)
by Bitbucket 19 Apr '11
by Bitbucket 19 Apr '11
19 Apr '11
1 new changeset in galaxy-central:
http://bitbucket.org/galaxy/galaxy-central/changeset/a8f476227efe/
changeset: r5435:a8f476227efe
user: kanwei
date: 2011-04-19 20:20:11
summary: trackster: don't show tile if no tile (fixes ReferenceTrack)
affected #: 1 file (113 bytes)
--- a/static/scripts/trackster.js Tue Apr 19 11:49:18 2011 -0400
+++ b/static/scripts/trackster.js Tue Apr 19 14:20:11 2011 -0400
@@ -1856,6 +1856,10 @@
// console.log( "draw_and_show_tile", resolution, tile_index, w_scale );
var tile = track.draw_tile(result, resolution, tile_index, w_scale, seq_data);
track.tile_cache.set(key, tile);
+ // Don't show if no tile
+ if (tile === undefined) {
+ return;
+ }
track.show_tile(tile, parent_element, tile_low, w_scale);
drawn_tiles[drawn_tiles.length] = tile;
};
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
19 Apr '11
1 new changeset in galaxy-central:
http://bitbucket.org/galaxy/galaxy-central/changeset/9cf729314649/
changeset: r5434:9cf729314649
user: dan
date: 2011-04-19 17:49:18
summary: Add missing test file from 5407:b387de5c0439.
affected #: 1 file (0 bytes)
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
19 Apr '11
1 new changeset in galaxy-central:
http://bitbucket.org/galaxy/galaxy-central/changeset/c1aeb2f33b4a/
changeset: r5433:c1aeb2f33b4a
user: greg
date: 2011-04-19 16:20:01
summary: Improved exception handling in the tool shed.
affected #: 2 files (89 bytes)
--- a/lib/galaxy/webapps/community/controllers/upload.py Tue Apr 19 09:44:28 2011 -0400
+++ b/lib/galaxy/webapps/community/controllers/upload.py Tue Apr 19 10:20:01 2011 -0400
@@ -51,7 +51,7 @@
try:
uploaded_file = urllib2.urlopen( url_paste )
except ( ValueError, urllib2.HTTPError ), e:
- message = 'An error occurred trying to retrieve the URL entered on the upload form: %s' % e
+ message = 'An error occurred trying to retrieve the URL entered on the upload form: %s' % str( e )
status = 'error'
except urllib2.URLError, e:
message = 'An error occurred trying to retrieve the URL entered on the upload form: %s' % e.reason
--- a/lib/galaxy/webapps/community/datatypes/__init__.py Tue Apr 19 09:44:28 2011 -0400
+++ b/lib/galaxy/webapps/community/datatypes/__init__.py Tue Apr 19 10:20:01 2011 -0400
@@ -65,8 +65,8 @@
# xml_files and tool_tags will only be received if we're called from the ToolSuite.verify() method.
try:
tar = tarfile.open( f.name )
- except tarfile.ReadError:
- raise DatatypeVerificationError( 'The archive is not a readable tar file.' )
+ except tarfile.ReadError, e:
+ raise DatatypeVerificationError( 'Error reading the archive, problem: %s' % str( e ) )
if not xml_files:
# Make sure we're not uploading a tool suite
if filter( lambda x: x.lower().find( 'suite_config.xml' ) >= 0, tar.getnames() ):
@@ -78,9 +78,8 @@
try:
tree = ElementTree.parse( tar.extractfile( xml_file ) )
root = tree.getroot()
- except:
- log.exception( 'fail:' )
- continue
+ except Exception, e:
+ raise DatatypeVerificationError( 'Error parsing file "%s", problem: %s' % ( str( xml_file ), str( e ) ) )
if root.tag == 'tool':
if 'id' not in root.keys():
raise DatatypeVerificationError( "Tool xml file (%s) does not include the required 'id' attribute in the <tool> tag" % str( xml_file ) )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: james_taylor: trackster: for line painter, determine min and max from data if not provided (not actually used for trackster)
by Bitbucket 19 Apr '11
by Bitbucket 19 Apr '11
19 Apr '11
1 new changeset in galaxy-central:
http://bitbucket.org/galaxy/galaxy-central/changeset/91d49371fd1e/
changeset: r5432:91d49371fd1e
user: james_taylor
date: 2011-04-19 15:44:28
summary: trackster: for line painter, determine min and max from data if not provided (not actually used for trackster)
affected #: 1 file (537 bytes)
--- a/static/scripts/trackster.js Tue Apr 19 08:24:57 2011 -0400
+++ b/static/scripts/trackster.js Tue Apr 19 09:44:28 2011 -0400
@@ -2864,6 +2864,20 @@
var LinePainter = function( data, view_start, view_end, prefs, mode ) {
Painter.call( this, data, view_start, view_end, prefs, mode );
+ if ( this.prefs.min_value === undefined ) {
+ var min_value = Infinity;
+ for (var i = 0, len = this.data.length; i < len; i++) {
+ min_value = Math.min( min_value, this.data[i][1] );
+ }
+ this.prefs.min_value = min_value;
+ }
+ if ( this.prefs.max_value === undefined ) {
+ var max_value = -Infinity;
+ for (var i = 0, len = this.data.length; i < len; i++) {
+ max_value = Math.max( max_value, this.data[i][1] );
+ }
+ this.prefs.max_value = max_value;
+ }
};
LinePainter.prototype.default_prefs = { min_value: undefined, max_value: undefined, mode: "Histogram", color: "#000", overflow_color: "#F66" };
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
17 new changesets in galaxy-central:
http://bitbucket.org/galaxy/galaxy-central/changeset/e5f904f7a293/
changeset: r5415:e5f904f7a293
user: Rob Hooft
date: 2011-04-13 21:02:06
summary: fixed some configuration files
affected #: 4 files (2 bytes)
--- a/tools/annotation_profiler/annotation_profiler.xml Tue Apr 12 23:41:06 2011 -0400
+++ b/tools/annotation_profiler/annotation_profiler.xml Wed Apr 13 21:02:06 2011 +0200
@@ -1,4 +1,4 @@
-<tool id="Annotation_Profiler_0" name="Profile Annotations" Version="1.0.0">
+<tool id="Annotation_Profiler_0" name="Profile Annotations" version="1.0.0"><description>for a set of genomic intervals</description><command interpreter="python">annotation_profiler_for_interval.py -i $input1 -c ${input1.metadata.chromCol} -s ${input1.metadata.startCol} -e ${input1.metadata.endCol} -o $out_file1 $keep_empty -p ${GALAXY_DATA_INDEX_DIR}/annotation_profiler/$dbkey $summary -b 3 -t $table_names</command><inputs>
--- a/tools/human_genome_variation/funDo.xml Tue Apr 12 23:41:06 2011 -0400
+++ b/tools/human_genome_variation/funDo.xml Wed Apr 13 21:02:06 2011 +0200
@@ -1,4 +1,4 @@
-<tool id="hgv_funDo" name="FunDO" Version="1.0.0">
+<tool id="hgv_funDo" name="FunDO" version="1.0.0"><description>human genes associated with disease terms</description><command interpreter="perl">
--- a/tools/rgenetics/rgRegion.xml Tue Apr 12 23:41:06 2011 -0400
+++ b/tools/rgenetics/rgRegion.xml Wed Apr 13 21:02:06 2011 +0200
@@ -22,7 +22,7 @@
</inputs><outputs>
- <data format="lped" name="out_file1" label="${title}.lped" metadata_source=infile />
+ <data format="lped" name="out_file1" label="${title}.lped" metadata_source="infile" /></outputs><help>
--- a/tools/visualization/GMAJ.xml Tue Apr 12 23:41:06 2011 -0400
+++ b/tools/visualization/GMAJ.xml Wed Apr 13 21:02:06 2011 +0200
@@ -1,4 +1,4 @@
-<tool id="gmaj_1" name="GMAJ" Version="2.0.1">
+<tool id="gmaj_1" name="GMAJ" version="2.0.1"><description>Multiple Alignment Viewer</description><command interpreter="python">GMAJ.py $out_file1 $maf_input $gmaj_file $filenames_file</command><inputs>
http://bitbucket.org/galaxy/galaxy-central/changeset/ff8378b13915/
changeset: r5416:ff8378b13915
user: Rob Hooft
date: 2011-04-13 22:13:23
summary: merge
affected #: 17 files (52.3 KB)
--- a/external_service_types/454_life_sciences.xml Wed Apr 13 21:02:06 2011 +0200
+++ b/external_service_types/454_life_sciences.xml Wed Apr 13 22:13:23 2011 +0200
@@ -3,7 +3,7 @@
<version>1</version><data_transfer_settings>
- <data_transfer type='scp' automatic_transfer='False' host='host' user_name='user_name' password='password' data_location='data_location' />
+ <data_transfer protocol='scp' automatic_transfer='False' host='host' user_name='user_name' password='password' data_location='data_location' /></data_transfer_settings><run_details>
--- a/external_service_types/applied_biosystems_solid.xml Wed Apr 13 21:02:06 2011 +0200
+++ b/external_service_types/applied_biosystems_solid.xml Wed Apr 13 22:13:23 2011 +0200
@@ -3,7 +3,7 @@
<version>3</version><data_transfer_settings>
- <data_transfer type='scp' automatic_transfer='False' host='host' user_name='user_name' password='password' data_location='data_location' rename_dataset='rename_dataset' />
+ <data_transfer protocol='scp' automatic_transfer='False' host='host' user_name='user_name' password='password' data_location='data_location' rename_dataset='rename_dataset' /></data_transfer_settings><run_details>
@@ -31,7 +31,7 @@
<field name="dataset2_name" type="text" label="Sample run output 2" description="" value="Quality file" required="True" /><field name="dataset2_datatype" type="text" label="Sample run datatype 2" description="" value="qual" required="True" /><field name="dataset3_name" type="text" label="Sample run output 3" description="" value="STATS file" required="True" />
- <field name="dataset3_datatype" type="text" label="Sample run datatype 3" description="" value="stats" required="True" />
+ <field name="dataset3_datatype" type="text" label="Sample run datatype 3" description="" value="txt" required="True" /></fields></form>
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/external_service_types/pacific_biosciences_smrt_portal.xml Wed Apr 13 22:13:23 2011 +0200
@@ -0,0 +1,137 @@
+<external_service id="pacific_biosciences_smrt_portal" name="Pacific Biosciences SMRT Portal" version="1.1.0">
+ <description></description>
+ <version>1</version>
+ <data_transfer_settings>
+ <!--
+ <data_transfer protocol='scp' automatic_transfer='True' host='host' user_name='user_name' password='password' data_location='data_location' />
+ -->
+ <data_transfer protocol='http' automatic_transfer='True' />
+ </data_transfer_settings>
+ <run_details>
+ <results>
+ <dataset name="dataset2_name" datatype="dataset2_datatype"
+ url="http://${host}/smrtportal/api/Jobs/${secondary_analysis_job_id}/Contents/data/filtered_subreads.fa" />
+ <dataset name="dataset3_name" datatype="dataset3_datatype"
+ url="http://${host}/smrtportal/api/Jobs/${secondary_analysis_job_id}/Contents/data/aligned_reads.bam" />
+ <dataset name="dataset4_name" datatype="dataset4_datatype"
+ url="http://${host}/smrtportal/api/Jobs/${secondary_analysis_job_id}/Contents/data/alignment_summary.gff" />
+ <dataset name="dataset5_name" datatype="dataset5_datatype"
+ url="http://${host}/smrtportal/api/Jobs/${secondary_analysis_job_id}/Contents/data/coverage.bed" />
+ <dataset name="dataset6_name" datatype="dataset6_datatype"
+ url="http://${host}/smrtportal/api/Jobs/${secondary_analysis_job_id}/Contents/data/variants.bed" />
+ <dataset name="dataset7_name" datatype="dataset7_datatype"
+ url="http://${host}/smrtportal/api/Jobs/${secondary_analysis_job_id}/Contents/data/variants.gff.gz" />
+ </results>
+ </run_details>
+ <form type="external_service" name="Pacific Biosciences SMRT Portal Form" description="">
+ <fields>
+ <field name="host" type="text" label="Hostname or IP address" description="" value="192.168.56.101" required="True" />
+ <field name="user_name" type="text" label="User name" description="User name to log into the sequencer." value="administrator" required="True" />
+ <field name="password" type="password" label="Password" description="" value="galaxy" required="True" />
+ <field name="data_location" type="text" label="Data directory" description="" value="" required="False" />
+
+ <field name="dataset2_name" type="text" label="Filtered reads" description="" value="Filtered reads" required="True" />
+ <field name="dataset2_datatype" type="text" label="Format" description="" value="fasta" required="True" />
+
+ <field name="dataset3_name" type="text" label="Aligned reads bam" description="" value="Aligned reads" required="True" />
+ <field name="dataset3_datatype" type="text" label="Format" description="" value="bam" required="True" />
+
+ <field name="dataset4_name" type="text" label="Coverage gff" description="" value="Coverage (gff)" required="True" />
+ <field name="dataset4_datatype" type="text" label="Format" description="" value="gff" required="True" />
+
+ <field name="dataset5_name" type="text" label="Coverage bed" description="" value="Coverage (bed)" required="True" />
+ <field name="dataset5_datatype" type="text" label="Format" description="" value="bed" required="True" />
+
+ <field name="dataset6_name" type="text" label="Variants bed" description="" value="Variants (bed)" required="True" />
+ <field name="dataset6_datatype" type="text" label="Format" description="" value="bed" required="True" />
+
+ <field name="dataset7_name" type="text" label="Variants gff" description="" value="Variants (gff)" required="True" />
+ <field name="dataset7_datatype" type="text" label="Format" description="" value="gff" required="True" />
+ </fields>
+ </form>
+
+ <actions>
+ <param name="api_url" type="template">http://${fields.host}/smrtportal/api</param>
+ <param name="web_url" type="template">http://${fields.host}/smrtportal/#</param>
+ <section name="jobs" label="Job Service">
+
+ <param name="jobs_url" type="template">${api_url}/Jobs</param>
+ <param name="monitor_jobs_url" type="template">${web_url}/MonitorJobs</param>
+ <!--
+ <action type="web_api" name="jobs_list" label="List Jobs" description="">
+ <request target="galaxy_main" method="post">
+ <url>${jobs.jobs_url}</url>
+ </request>
+ <result_handler type="jquery_grid"></result_handler>
+ </action>
+ -->
+ <conditional name="job_sample" label="Sample to Job Service" ref="item">
+ <when type="item_type" name="sample" value="sample">
+ <param name="secondary_analysis_job_id" type="template">${item.run_details.run.info.content.get( 'secondary_analysis_job_id' )}</param>
+ <conditional name="valid_job_sample" ref="jobs.job_sample.sample.secondary_analysis_job_id">
+ <when type="boolean" name="valid">
+ <param name="job_sample_url" type="template">${jobs.jobs_url}/${jobs.job_sample.sample.secondary_analysis_job_id}</param>
+ <param name="monitor_job_url" type="template">${jobs.monitor_jobs_url}/DetailsOfJob/${jobs.job_sample.sample.secondary_analysis_job_id}</param>
+
+ <action type="web" name="monitor_job" label="View Job in SMRT Portal" description="">
+ <request target="_blank" method="get">
+ <url>${jobs.job_sample.sample.valid_job_sample.valid.monitor_job_url}</url>
+ </request>
+ </action>
+
+ <action type="web_api" name="job_status" label="Job status" description="">
+ <request target="galaxy_main" method="post">
+ <url>${jobs.job_sample.sample.valid_job_sample.valid.job_sample_url}/Status</url>
+ </request>
+ <result_handler type="json_display"></result_handler>
+ </action>
+
+ <action type="web_api" name="job_history" label="Job history" description="">
+ <request target="galaxy_main" method="post">
+ <url>${jobs.job_sample.sample.valid_job_sample.valid.job_sample_url}/History</url>
+ </request>
+ <result_handler type="jquery_grid"></result_handler>
+ </action>
+
+ <action type="web_api" name="job_log" label="Job log" description="">
+ <request target="galaxy_main" method="post">
+ <url>${jobs.job_sample.sample.valid_job_sample.valid.job_sample_url}/Log</url>
+ </request>
+ <result_handler type="display"></result_handler>
+ </action>
+
+ <action type="web_api" name="job_contents" label="Job contents" description="">
+ <request target="galaxy_main" method="post">
+ <url>${jobs.job_sample.sample.valid_job_sample.valid.job_sample_url}/Contents</url>
+ </request>
+ <result_handler type="json_display"></result_handler>
+ </action>
+ <!--
+ <action type="web_api" name="job_protocol" label="Job protocol" description="">
+ <request target="galaxy_main" method="post">
+ <url>${jobs.job_sample.sample.valid_job_sample.valid.job_sample_url}/Protocol</url>
+ </request>
+ <result_handler type="display"></result_handler>
+ </action>
+ -->
+ <action type="web_api" name="job_inputs" label="Job inputs" description="">
+ <request target="galaxy_main" method="post">
+ <url>${jobs.job_sample.sample.valid_job_sample.valid.job_sample_url}/Inputs</url>
+ </request>
+ <result_handler type="jquery_grid"></result_handler>
+ </action>
+
+ <action type="template" name="job_smrt_view" label="Job SMRT View" description="">
+ <template>${jobs.job_sample.sample.valid_job_sample.valid.job_sample_url}/Contents/vis.jnlp</template>
+ <result_handler type="web_redirect"></result_handler>
+ </action>
+
+ </when>
+ </conditional>
+ </when>
+ </conditional>
+
+ </section>
+ </actions>
+
+</external_service>
--- a/external_service_types/simple_unknown_sequencer.xml Wed Apr 13 21:02:06 2011 +0200
+++ b/external_service_types/simple_unknown_sequencer.xml Wed Apr 13 22:13:23 2011 +0200
@@ -7,7 +7,7 @@
<description></description><version></version><data_transfer_settings>
- <data_transfer type='scp' automatic_transfer='False' host='host' user_name='user_name' password='password' data_location='data_location'/>
+ <data_transfer protocol='scp' automatic_transfer='False' host='host' user_name='user_name' password='password' data_location='data_location'/></data_transfer_settings><form type="external_service" name="Simple unknown sequencer form" description=""><fields>
--- a/lib/galaxy/jobs/deferred/__init__.py Wed Apr 13 21:02:06 2011 +0200
+++ b/lib/galaxy/jobs/deferred/__init__.py Wed Apr 13 22:13:23 2011 +0200
@@ -26,7 +26,6 @@
self.monitor_thread = threading.Thread( target=self.__monitor )
self.monitor_thread.start()
log.info( 'Deferred job queue started' )
-
def _load_plugins( self ):
for fname in os.listdir( os.path.dirname( __file__ ) ):
if not fname.startswith( '_' ) and fname.endswith( '.py' ):
@@ -53,7 +52,6 @@
self.plugins[obj] = plugin( self.app )
self.plugins[obj].job_states = self.job_states
log.debug( 'Loaded deferred job plugin: %s' % display_name )
-
def __check_jobs_at_startup( self ):
waiting_jobs = self.sa_session.query( model.DeferredJob ) \
.filter( model.DeferredJob.state == model.DeferredJob.states.WAITING ).all()
@@ -66,7 +64,6 @@
# Pass the job ID as opposed to the job, since the monitor thread
# needs to load it in its own threadlocal scoped session.
self.waiting_jobs.append( job.id )
-
def __monitor( self ):
while self.running:
try:
@@ -75,7 +72,6 @@
log.exception( 'Exception in monitor_step' )
self.sleeper.sleep( 1 )
log.info( 'job queue stopped' )
-
def __monitor_step( self ):
# TODO: Querying the database with this frequency is bad, we need message passing
new_jobs = self.sa_session.query( model.DeferredJob ) \
@@ -121,7 +117,6 @@
else:
new_waiting.append( job )
self.waiting_jobs = new_waiting
-
def __check_job_plugin( self, job ):
if job.plugin not in self.plugins:
log.error( 'Invalid deferred job plugin: %s' ) % job.plugin
@@ -130,15 +125,12 @@
self.sa_session.flush()
return False
return True
-
def __check_if_ready_to_run( self, job ):
return self.plugins[job.plugin].check_job( job )
-
def __fail_job( self, job ):
job.state = model.DeferredJob.states.ERROR
self.sa_session.add( job )
self.sa_session.flush()
-
def shutdown( self ):
self.running = False
self.sleeper.wake()
@@ -158,3 +150,34 @@
self.condition.acquire()
self.condition.notify()
self.condition.release()
+
+class FakeTrans( object ):
+ """A fake trans for calling the external set metadata tool"""
+ def __init__( self, app, history=None, user=None):
+ class Dummy( object ):
+ def __init__( self ):
+ self.id = None
+ self.app = app
+ self.sa_session = app.model.context.current
+ self.dummy = Dummy()
+ self.history = history
+ self.user = user
+ self.model = app.model
+ def get_galaxy_session( self ):
+ return self.dummy
+ def log_event( self, message, tool_id=None ):
+ pass
+ def get_current_user_roles( self ):
+ if self.user:
+ return self.user.all_roles()
+ else:
+ return []
+ def db_dataset_for( self, dbkey ):
+ if self.history is None:
+ return None
+ datasets = self.sa_session.query( self.app.model.HistoryDatasetAssociation ) \
+ .filter_by( deleted=False, history_id=self.history.id, extension="len" )
+ for ds in datasets:
+ if dbkey == ds.dbkey:
+ return ds
+ return None
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/lib/galaxy/jobs/deferred/data_transfer.py Wed Apr 13 22:13:23 2011 +0200
@@ -0,0 +1,377 @@
+"""
+Module for managing data transfer jobs.
+"""
+import logging, urllib2, re, shutil
+
+from galaxy import eggs
+from galaxy.util import json
+from string import Template
+from sqlalchemy import and_
+
+from galaxy.util.odict import odict
+from galaxy.workflow.modules import module_factory
+from galaxy.jobs.actions.post import ActionBox
+from galaxy.jobs.deferred import FakeTrans
+
+from galaxy.tools.parameters import visit_input_values
+from galaxy.tools.parameters.basic import DataToolParameter
+from galaxy.datatypes import sniff
+
+log = logging.getLogger( __name__ )
+
+class DataTransfer( object ):
+ check_interval = 15
+ dataset_name_re = re.compile( '(dataset\d+)_(name)' )
+ dataset_datatype_re = re.compile( '(dataset\d+)_(datatype)' )
+ def __init__( self, app ):
+ self.app = app
+ self.sa_session = app.model.context.current
+ def create_job( self, trans, **kwd ):
+ raise Exception( "Unimplemented Method" )
+ def check_job( self, job ):
+ raise Exception( "Unimplemented Method" )
+ def run_job( self, job ):
+ if job.params[ 'type' ] == 'init_transfer':
+ # TODO: don't create new downloads on restart.
+ if job.params[ 'protocol' ] in [ 'http', 'https' ]:
+ results = []
+ for result in job.params[ 'results' ].values():
+ result[ 'transfer_job' ] = self.app.transfer_manager.new( protocol=job.params[ 'protocol' ], url=result[ 'url' ] )
+ results.append( result )
+ elif job.params[ 'protocol' ] == 'scp':
+ results = []
+ result = {}
+ sample_datasets_dict = job.params[ 'sample_datasets_dict' ]
+ # sample_datasets_dict looks something like the following. The outer dictionary keys are SampleDataset ids.
+ # {'7': {'status': 'Not started', 'name': '3.bed', 'file_path': '/tmp/library/3.bed', 'sample_id': 7,
+ # 'external_service_id': 2, 'error_msg': '', 'size': '8.0K'}}
+ for sample_dataset_id, sample_dataset_info_dict in sample_datasets_dict.items():
+ result = {}
+ result[ 'transfer_job' ] = self.app.transfer_manager.new( protocol=job.params[ 'protocol' ],
+ host=job.params[ 'host' ],
+ user_name=job.params[ 'user_name' ],
+ password=job.params[ 'password' ],
+ sample_dataset_id=sample_dataset_id,
+ status=sample_dataset_info_dict[ 'status' ],
+ name=sample_dataset_info_dict[ 'name' ],
+ file_path=sample_dataset_info_dict[ 'file_path' ],
+ sample_id=sample_dataset_info_dict[ 'sample_id' ],
+ external_service_id=sample_dataset_info_dict[ 'external_service_id' ],
+ error_msg=sample_dataset_info_dict[ 'error_msg' ],
+ size=sample_dataset_info_dict[ 'size' ] )
+ results.append( result )
+ self.app.transfer_manager.run( [ r[ 'transfer_job' ] for r in results ] )
+ for result in results:
+ transfer_job = result.pop( 'transfer_job' )
+ self.create_job( None,
+ transfer_job_id=transfer_job.id,
+ result=transfer_job.params,
+ sample_id=job.params[ 'sample_id' ] )
+ # Update the state of the relevant SampleDataset
+ new_status = self.app.model.SampleDataset.transfer_status.IN_QUEUE
+ self._update_sample_dataset_status( protocol=job.params[ 'protocol' ],
+ sample_id=job.params[ 'sample_id' ],
+ result_dict=transfer_job.params,
+ new_status=new_status,
+ error_msg='' )
+ job.state = self.app.model.DeferredJob.states.OK
+ self.sa_session.add( job )
+ self.sa_session.flush()
+ # TODO: Error handling: failure executing, or errors returned from the manager
+ if job.params[ 'type' ] == 'finish_transfer':
+ protocol = job.params[ 'protocol' ]
+ # Update the state of the relevant SampleDataset
+ new_status = self.app.model.SampleDataset.transfer_status.ADD_TO_LIBRARY
+ if protocol in [ 'http', 'https' ]:
+ result_dict = job.params[ 'results' ]
+ library_dataset_name = job.params[ 'result' ][ 'name' ]
+ extension = job.params[ 'result' ][ 'datatype' ]
+ elif protocol in [ 'scp' ]:
+ # In this case, job.params will be a dictionary that contains a key named 'result'. The value
+ # of the result key is a dictionary that looks something like:
+ # {'sample_dataset_id': '8', 'status': 'Not started', 'protocol': 'scp', 'name': '3.bed',
+ # 'file_path': '/tmp/library/3.bed', 'host': '127.0.0.1', 'sample_id': 8, 'external_service_id': 2,
+ # 'password': 'galaxy', 'user_name': 'gvk', 'error_msg': '', 'size': '8.0K'}
+ result_dict = job.params[ 'result' ]
+ library_dataset_name = result_dict[ 'name' ]
+ # Determine the data format (see the relevant TODO item in the manual_data_transfer plugin)..
+ extension = sniff.guess_ext( result_dict[ 'file_path' ], sniff_order=self.app.datatypes_registry.sniff_order )
+ self._update_sample_dataset_status( protocol=job.params[ 'protocol' ],
+ sample_id=int( job.params[ 'sample_id' ] ),
+ result_dict=result_dict,
+ new_status=new_status,
+ error_msg='' )
+ sample = self.sa_session.query( self.app.model.Sample ).get( int( job.params[ 'sample_id' ] ) )
+ ld = self.app.model.LibraryDataset( folder=sample.folder, name=library_dataset_name )
+ self.sa_session.add( ld )
+ self.sa_session.flush()
+ self.app.security_agent.copy_library_permissions( sample.folder, ld )
+ ldda = self.app.model.LibraryDatasetDatasetAssociation( name = library_dataset_name,
+ extension = extension,
+ dbkey = '?',
+ library_dataset = ld,
+ create_dataset = True,
+ sa_session = self.sa_session )
+ ldda.message = 'Transferred by the Data Transfer Plugin'
+ self.sa_session.add( ldda )
+ self.sa_session.flush()
+ ldda.state = ldda.states.QUEUED # flushed in the set property
+ ld.library_dataset_dataset_association_id = ldda.id
+ self.sa_session.add( ld )
+ self.sa_session.flush()
+ try:
+ # Move the dataset from its temporary location
+ shutil.move( job.transfer_job.path, ldda.file_name )
+ ldda.init_meta()
+ for name, spec in ldda.metadata.spec.items():
+ if name not in [ 'name', 'info', 'dbkey', 'base_name' ]:
+ if spec.get( 'default' ):
+ setattr( ldda.metadata, name, spec.unwrap( spec.get( 'default' ) ) )
+ if self.app.config.set_metadata_externally:
+ self.app.datatypes_registry.set_external_metadata_tool.tool_action.execute( self.app.datatypes_registry.set_external_metadata_tool,
+ FakeTrans( self.app ),
+ incoming = { 'input1':ldda } )
+ else:
+ ldda.set_meta()
+ ldda.datatype.after_setting_metadata( ldda )
+ ldda.state = ldda.states.OK
+ # TODO: not sure if this flush is necessary
+ self.sa_session.add( ldda )
+ self.sa_session.flush()
+ except Exception, e:
+ log.exception( 'Failure preparing library dataset for finished transfer job (id: %s) via deferred job (id: %s):' % \
+ ( str( job.transfer_job.id ), str( job.id ) ) )
+ ldda.state = ldda.states.ERROR
+ if sample.workflow:
+ log.debug( "\n\nLogging sample mappings as: %s" % sample.workflow[ 'mappings' ] )
+ log.debug( "job.params: %s" % job.params )
+ # We have a workflow. Update all mappings to ldda's, and when the final one is done
+ # execute_workflow with either the provided history, or a new one.
+ sub_done = True
+ rep_done = False
+ for k, v in sample.workflow[ 'mappings' ].iteritems():
+ if not 'hda' in v and v[ 'ds_tag' ].startswith( 'hi|' ):
+ sample.workflow[ 'mappings' ][ k ][ 'hda' ] = self.app.security.decode_id( v[ 'ds_tag' ][3:] )
+ for key, value in sample.workflow[ 'mappings' ].iteritems():
+ if 'url' in value and value[ 'url' ] == job.params[ 'result' ][ 'url' ]:
+ # DBTODO Make sure all ds| mappings get the URL of the dataset, for linking to later.
+ # If this dataset maps to what we just finished, update the ldda id in the sample.
+ sample.workflow[ 'mappings' ][ key ][ 'ldda' ] = ldda.id
+ rep_done = True
+ # DBTODO replace the hi| mappings with the hda here. Just rip off the first three chars.
+ elif not 'ldda' in value and not 'hda' in value:
+ # We're not done if some mappings still don't have ldda or hda mappings.
+ sub_done = False
+ if sub_done and rep_done:
+ if not sample.history:
+ new_history = self.app.model.History( name="New History From %s" % sample.name, user=sample.request.user )
+ self.sa_session.add( new_history )
+ sample.history = new_history
+ self.sa_session.flush()
+ self._execute_workflow( sample )
+ # Check the workflow for substitution done-ness
+ self.sa_session.add( sample )
+ self.sa_session.flush()
+ elif sample.history:
+ # We don't have a workflow, but a history was provided.
+ # No processing, go ahead and chunk everything in the history.
+ if ldda.dataset.state in [ 'new', 'upload', 'queued', 'running', 'empty', 'discarded' ]:
+ log.error("Cannot import dataset '%s' to user history since its state is '%s'. " % ( ldda.name, ldda.dataset.state ))
+ elif ldda.dataset.state in [ 'ok', 'error' ]:
+ ldda.to_history_dataset_association( target_history=sample.history, add_to_history=True )
+ # Finished
+ job.state = self.app.model.DeferredJob.states.OK
+ self.sa_session.add( job )
+ self.sa_session.flush()
+ # Update the state of the relevant SampleDataset
+ new_status = self.app.model.SampleDataset.transfer_status.COMPLETE
+ self._update_sample_dataset_status( protocol=job.params[ 'protocol' ],
+ sample_id=int( job.params[ 'sample_id' ] ),
+ result_dict=job.params[ 'result' ],
+ new_status=new_status,
+ error_msg='' )
+ if sample.datasets and not sample.untransferred_dataset_files:
+ # Update the state of the sample to the sample's request type's final state.
+ new_state = sample.request.type.final_sample_state
+ self._update_sample_state( sample.id, new_state )
+ # Update the state of the request, if possible
+ self._update_request_state( sample.request.id )
+ def _missing_params( self, params, required_params ):
+ missing_params = filter( lambda x: x not in params, required_params )
+ if missing_params:
+ log.error( 'Job parameters missing required keys: %s' % ', '.join( missing_params ) )
+ return True
+ return False
+ def _update_sample_dataset_status( self, protocol, sample_id, result_dict, new_status, error_msg=None ):
+ # result_dict looks something like:
+ # {'url': '127.0.0.1/data/filtered_subreads.fa', 'name': 'Filtered reads'}
+ # Check if the new status is a valid transfer status
+ valid_statuses = [ v[1] for v in self.app.model.SampleDataset.transfer_status.items() ]
+ # TODO: error checking on valid new_status value
+ if protocol in [ 'http', 'https' ]:
+ sample_dataset = self.sa_session.query( self.app.model.SampleDataset ) \
+ .filter( and_( self.app.model.SampleDataset.table.c.sample_id == sample_id,
+ self.app.model.SampleDataset.table.c.name == result_dict[ 'name' ],
+ self.app.model.SampleDataset.table.c.file_path == result_dict[ 'url' ] ) ) \
+ .first()
+ elif protocol in [ 'scp' ]:
+ sample_dataset = self.sa_session.query( self.app.model.SampleDataset ).get( int( result_dict[ 'sample_dataset_id' ] ) )
+ sample_dataset.status = new_status
+ sample_dataset.error_msg = error_msg
+ self.sa_session.add( sample_dataset )
+ self.sa_session.flush()
+ def _update_sample_state( self, sample_id, new_state, comment=None ):
+ sample = self.sa_session.query( self.app.model.Sample ).get( sample_id )
+ if comment is None:
+ comment = 'Sample state set to %s' % str( new_state )
+ event = self.app.model.SampleEvent( sample, new_state, comment )
+ self.sa_session.add( event )
+ self.sa_session.flush()
+ def _update_request_state( self, request_id ):
+ request = self.sa_session.query( self.app.model.Request ).get( request_id )
+ # Make sure all the samples of the current request have the same state
+ common_state = request.samples_have_common_state
+ if not common_state:
+ # If the current request state is complete and one of its samples moved from
+ # the final sample state, then move the request state to In-progress
+ if request.is_complete:
+ message = "At least 1 sample state moved from the final sample state, so now the request's state is (%s)" % request.states.SUBMITTED
+ event = self.app.model.RequestEvent( request, request.states.SUBMITTED, message )
+ self.sa_session.add( event )
+ self.sa_session.flush()
+ else:
+ final_state = False
+ request_type_state = request.type.final_sample_state
+ if common_state.id == request_type_state.id:
+ # Since all the samples are in the final state, change the request state to 'Complete'
+ comment = "All samples of this sequencing request are in the final sample state (%s). " % request_type_state.name
+ state = request.states.COMPLETE
+ final_state = True
+ else:
+ comment = "All samples of this sequencing request are in the (%s) sample state. " % common_state.name
+ state = request.states.SUBMITTED
+ event = self.app.model.RequestEvent( request, state, comment )
+ self.sa_session.add( event )
+ self.sa_session.flush()
+ # TODO: handle email notification if it is configured to be sent when the samples are in this state.
+ def _execute_workflow( self, sample):
+ for key, value in sample.workflow['mappings'].iteritems():
+ if 'hda' not in value and 'ldda' in value:
+ # If HDA is already here, it's an external input, we're not copying anything.
+ ldda = self.sa_session.query( self.app.model.LibraryDatasetDatasetAssociation ).get( value['ldda'] )
+ if ldda.dataset.state in [ 'new', 'upload', 'queued', 'running', 'empty', 'discarded' ]:
+ log.error("Cannot import dataset '%s' to user history since its state is '%s'. " % ( ldda.name, ldda.dataset.state ))
+ elif ldda.dataset.state in [ 'ok', 'error' ]:
+ hda = ldda.to_history_dataset_association( target_history=sample.history, add_to_history=True )
+ sample.workflow['mappings'][key]['hda'] = hda.id
+ self.sa_session.add( sample )
+ self.sa_session.flush()
+ workflow_dict = sample.workflow
+ import copy
+ new_wf_dict = copy.deepcopy(workflow_dict)
+ for key in workflow_dict['mappings']:
+ if not isinstance(key, int):
+ new_wf_dict['mappings'][int(key)] = workflow_dict['mappings'][key]
+ workflow_dict = new_wf_dict
+ fk_trans = FakeTrans(self.app, history = sample.history, user=sample.request.user)
+ workflow = self.sa_session.query(self.app.model.Workflow).get(workflow_dict['id'])
+ if not workflow:
+ log.error("Workflow mapping failure.")
+ return
+ if len( workflow.steps ) == 0:
+ log.error( "Workflow cannot be run because it does not have any steps" )
+ return
+ if workflow.has_cycles:
+ log.error( "Workflow cannot be run because it contains cycles" )
+ return
+ if workflow.has_errors:
+ log.error( "Workflow cannot be run because of validation errors in some steps" )
+ return
+ # Build the state for each step
+ errors = {}
+ has_upgrade_messages = False
+ has_errors = False
+ # Build a fake dictionary prior to execution.
+ # Prepare each step
+ for step in workflow.steps:
+ step.upgrade_messages = {}
+ # Contruct modules
+ if step.type == 'tool' or step.type is None:
+ # Restore the tool state for the step
+ step.module = module_factory.from_workflow_step( fk_trans, step )
+ # Fix any missing parameters
+ step.upgrade_messages = step.module.check_and_update_state()
+ if step.upgrade_messages:
+ has_upgrade_messages = True
+ # Any connected input needs to have value DummyDataset (these
+ # are not persisted so we need to do it every time)
+ step.module.add_dummy_datasets( connections=step.input_connections )
+ # Store state with the step
+ step.state = step.module.state
+ # Error dict
+ if step.tool_errors:
+ has_errors = True
+ errors[step.id] = step.tool_errors
+ else:
+ ## Non-tool specific stuff?
+ step.module = module_factory.from_workflow_step( fk_trans, step )
+ step.state = step.module.get_runtime_state()
+ # Connections by input name
+ step.input_connections_by_name = dict( ( conn.input_name, conn ) for conn in step.input_connections )
+ for step in workflow.steps:
+ step.upgrade_messages = {}
+ # Connections by input name
+ step.input_connections_by_name = \
+ dict( ( conn.input_name, conn ) for conn in step.input_connections )
+ # Extract just the arguments for this step by prefix
+ step_errors = None
+ if step.type == 'tool' or step.type is None:
+ module = module_factory.from_workflow_step( fk_trans, step )
+ # Fix any missing parameters
+ step.upgrade_messages = module.check_and_update_state()
+ if step.upgrade_messages:
+ has_upgrade_messages = True
+ # Any connected input needs to have value DummyDataset (these
+ # are not persisted so we need to do it every time)
+ module.add_dummy_datasets( connections=step.input_connections )
+ # Get the tool
+ tool = module.tool
+ # Get the state
+ step.state = state = module.state
+ # Get old errors
+ old_errors = state.inputs.pop( "__errors__", {} )
+ if step_errors:
+ errors[step.id] = state.inputs["__errors__"] = step_errors
+ # Run each step, connecting outputs to inputs
+ workflow_invocation = self.app.model.WorkflowInvocation()
+ workflow_invocation.workflow = workflow
+ outputs = odict()
+ for i, step in enumerate( workflow.steps ):
+ job = None
+ if step.type == 'tool' or step.type is None:
+ tool = self.app.toolbox.tools_by_id[ step.tool_id ]
+ def callback( input, value, prefixed_name, prefixed_label ):
+ if isinstance( input, DataToolParameter ):
+ if prefixed_name in step.input_connections_by_name:
+ conn = step.input_connections_by_name[ prefixed_name ]
+ return outputs[ conn.output_step.id ][ conn.output_name ]
+ visit_input_values( tool.inputs, step.state.inputs, callback )
+ job, out_data = tool.execute( fk_trans, step.state.inputs, history=sample.history)
+ outputs[ step.id ] = out_data
+ for pja in step.post_job_actions:
+ if pja.action_type in ActionBox.immediate_actions:
+ ActionBox.execute(self.app, self.sa_session, pja, job, replacement_dict)
+ else:
+ job.add_post_job_action(pja)
+ else:
+ job, out_data = step.module.execute( fk_trans, step.state)
+ outputs[ step.id ] = out_data
+ if step.id in workflow_dict['mappings']:
+ data = self.sa_session.query( self.app.model.HistoryDatasetAssociation ).get( workflow_dict['mappings'][str(step.id)]['hda'] )
+ outputs[ step.id ]['output'] = data
+ workflow_invocation_step = self.app.model.WorkflowInvocationStep()
+ workflow_invocation_step.workflow_invocation = workflow_invocation
+ workflow_invocation_step.workflow_step = step
+ workflow_invocation_step.job = job
+ self.sa_session.add( workflow_invocation )
+ self.sa_session.flush()
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/lib/galaxy/jobs/deferred/manual_data_transfer.py Wed Apr 13 22:13:23 2011 +0200
@@ -0,0 +1,103 @@
+"""
+Generic module for managing manual data transfer jobs using Galaxy's built-in file browser.
+This module can be used by various external services that are configured to transfer data manually.
+"""
+import logging, urllib2, re, shutil
+from data_transfer import *
+
+log = logging.getLogger( __name__ )
+
+__all__ = [ 'ManualDataTransferPlugin' ]
+
+class ManualDataTransferPlugin( DataTransfer ):
+ def __init__( self, app ):
+ super( ManualDataTransferPlugin, self ).__init__( app )
+ def create_job( self, trans, **kwd ):
+ if 'sample' in kwd and 'sample_datasets' in kwd and 'external_service' in kwd and 'external_service_type' in kwd:
+ sample = kwd[ 'sample' ]
+ sample_datasets = kwd[ 'sample_datasets' ]
+ external_service = kwd[ 'external_service' ]
+ external_service_type = kwd[ 'external_service_type' ]
+ # TODO: is there a better way to store the protocol?
+ protocol = external_service_type.data_transfer.keys()[0]
+ host = external_service.form_values.content[ 'host' ]
+ user_name = external_service.form_values.content[ 'user_name' ]
+ password = external_service.form_values.content[ 'password' ]
+ # TODO: In the future, we may want to implement a way for the user to associate a selected file with one of
+ # the run outputs configured in the <run_details><results> section of the external service config file. The
+ # following was a first pass at implementing something (the datatype was included in the sample_dataset_dict),
+ # but without a way for the user to associate stuff it's useless. However, allowing the user this ability may
+ # open a can of worms, so maybe we shouldn't do it???
+ #
+ #for run_result_file_name, run_result_file_datatype in external_service_type.run_details[ 'results' ].items():
+ # # external_service_type.run_details[ 'results' ] looks something like: {'dataset1_name': 'dataset1_datatype'}
+ # if run_result_file_datatype in external_service.form_values.content:
+ # datatype = external_service.form_values.content[ run_result_file_datatype ]
+ #
+ # When the transfer is automatic (the process used in the SMRT Portal plugin), the datasets and datatypes
+ # can be matched up to those configured in the <run_details><results> settings in the external service type config
+ # (e.g., pacific_biosciences_smrt_portal.xml). However, that's a bit trickier here since the user is manually
+ # selecting files for transfer.
+ sample_datasets_dict = {}
+ for sample_dataset in sample_datasets:
+ sample_dataset_id = sample_dataset.id
+ sample_dataset_dict = dict( sample_id = sample_dataset.sample.id,
+ name = sample_dataset.name,
+ file_path = sample_dataset.file_path,
+ status = sample_dataset.status,
+ error_msg = sample_dataset.error_msg,
+ size = sample_dataset.size,
+ external_service_id = sample_dataset.external_service.id )
+ sample_datasets_dict[ sample_dataset_id ] = sample_dataset_dict
+ params = { 'type' : 'init_transfer',
+ 'sample_id' : sample.id,
+ 'sample_datasets_dict' : sample_datasets_dict,
+ 'protocol' : protocol,
+ 'host' : host,
+ 'user_name' : user_name,
+ 'password' : password }
+ elif 'transfer_job_id' in kwd:
+ params = { 'type' : 'finish_transfer',
+ 'protocol' : kwd[ 'result' ][ 'protocol' ],
+ 'sample_id' : kwd[ 'sample_id' ],
+ 'result' : kwd[ 'result' ],
+ 'transfer_job_id' : kwd[ 'transfer_job_id' ] }
+ else:
+ log.error( 'No job was created because kwd does not include "samples" and "sample_datasets" or "transfer_job_id".' )
+ return
+ deferred_job = self.app.model.DeferredJob( state=self.app.model.DeferredJob.states.NEW,
+ plugin='ManualDataTransferPlugin',
+ params=params )
+ self.sa_session.add( deferred_job )
+ self.sa_session.flush()
+ log.debug( 'Created a deferred job in the ManualDataTransferPlugin of type: %s' % params[ 'type' ] )
+ # TODO: error reporting to caller (if possible?)
+ def check_job( self, job ):
+ if self._missing_params( job.params, [ 'type' ] ):
+ return self.job_states.INVALID
+ if job.params[ 'type' ] == 'init_transfer':
+ if job.params[ 'protocol' ] in [ 'http', 'https' ]:
+ raise Exception( "Manual data transfer is not yet supported for http(s)." )
+ elif job.params[ 'protocol' ] == 'scp':
+ if self._missing_params( job.params, [ 'protocol', 'host', 'user_name', 'password', 'sample_id', 'sample_datasets_dict' ] ):
+ return self.job_states.INVALID
+ # TODO: what kind of checks do we need here?
+ return self.job_states.READY
+ return self.job_states.WAIT
+ if job.params[ 'type' ] == 'finish_transfer':
+ if self._missing_params( job.params, [ 'transfer_job_id' ] ):
+ return self.job_states.INVALID
+ # Get the TransferJob object and add it to the DeferredJob so we only look it up once.
+ if not hasattr( job, 'transfer_job' ):
+ job.transfer_job = self.sa_session.query( self.app.model.TransferJob ).get( int( job.params[ 'transfer_job_id' ] ) )
+ state = self.app.transfer_manager.get_state( job.transfer_job )
+ if not state:
+ log.error( 'No state for transfer job id: %s' % job.transfer_job.id )
+ return self.job_states.WAIT
+ if state[ 'state' ] in self.app.model.TransferJob.terminal_states:
+ return self.job_states.READY
+ log.debug( "Checked on finish transfer job %s, not done yet." % job.id )
+ return self.job_states.WAIT
+ else:
+ log.error( 'Unknown job type for ManualDataTransferPlugin: %s' % str( job.params[ 'type' ] ) )
+ return self.job_states.INVALID
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/lib/galaxy/jobs/deferred/pacific_biosciences_smrt_portal.py Wed Apr 13 22:13:23 2011 +0200
@@ -0,0 +1,113 @@
+"""
+Module for managing jobs in Pacific Bioscience's SMRT Portal and automatically transferring files
+produced by SMRT Portal.
+"""
+import logging, urllib2, re, shutil
+from data_transfer import *
+
+log = logging.getLogger( __name__ )
+
+__all__ = [ 'SMRTPortalPlugin' ]
+
+class SMRTPortalPlugin( DataTransfer ):
+ api_path = '/smrtportal/api'
+ def __init__( self, app ):
+ super( SMRTPortalPlugin, self ).__init__( app )
+ def create_job( self, trans, **kwd ):
+ if 'secondary_analysis_job_id' in kwd:
+ sample = kwd[ 'sample' ]
+ smrt_job_id = kwd[ 'secondary_analysis_job_id' ]
+ external_service = sample.request.type.get_external_service( 'pacific_biosciences_smrt_portal' )
+ smrt_host = external_service.form_values.content[ 'host' ]
+ external_service_type_id = external_service.external_service_type_id
+ external_service_type = self.app.external_service_types.all_external_service_types[ external_service_type_id ]
+ results = {}
+ for k, v in external_service.form_values.content.items():
+ match = self.dataset_name_re.match( k ) or self.dataset_datatype_re.match( k )
+ if match:
+ id, field = match.groups()
+ if id in results:
+ results[ id ][ field ] = v
+ else:
+ results[ id ] = { field : v }
+ for id, attrs in results.items():
+ url_template = external_service_type.run_details[ 'results_urls' ].get( id + '_name' )
+ url = Template( url_template ).substitute( host = smrt_host, secondary_analysis_job_id = kwd[ 'secondary_analysis_job_id' ] )
+ results[ id ][ 'url' ] = url
+ if sample.workflow:
+ # DBTODO Make sure all ds| mappings get the URL of the dataset, for linking to later.
+ for k, v in sample.workflow[ 'mappings' ].iteritems():
+ if 'ds|%s' % id in v.values():
+ sample.workflow['mappings'][k]['url'] = url
+ self.sa_session.add(sample)
+ self.sa_session.flush()
+ params = { 'type' : 'init_transfer',
+ 'sample_id' : sample.id,
+ 'results' : results,
+ 'smrt_host' : smrt_host,
+ 'smrt_job_id' : smrt_job_id }
+ # Create a new SampleDataset for each run result dataset
+ self._associate_untransferred_datasets_with_sample( sample, external_service, results )
+ elif 'transfer_job_id' in kwd:
+ params = { 'type' : 'finish_transfer',
+ 'sample_id' : kwd[ 'sample_id' ],
+ 'result' : kwd[ 'result' ],
+ 'transfer_job_id' : kwd[ 'transfer_job_id' ] }
+ else:
+ log.error( 'No job was created because kwd does not include "secondary_analysis_job_id" or "transfer_job_id".' )
+ return
+ deferred_job = self.app.model.DeferredJob( state=self.app.model.DeferredJob.states.NEW,
+ plugin='SMRTPortalPlugin',
+ params=params )
+ self.sa_session.add( deferred_job )
+ self.sa_session.flush()
+ log.debug( 'Created a deferred job in the SMRTPortalPlugin of type: %s' % params[ 'type' ] )
+ # TODO: error reporting to caller (if possible?)
+ def check_job( self, job ):
+ if self._missing_params( job.params, [ 'type' ] ):
+ return self.job_states.INVALID
+ if job.params[ 'type' ] == 'init_transfer':
+ if self._missing_params( job.params, [ 'smrt_host', 'smrt_job_id' ] ):
+ return self.job_states.INVALID
+ url = 'http://' + job.params[ 'smrt_host' ] + self.api_path + '/Jobs/' + job.params[ 'smrt_job_id' ] + '/Status'
+ r = urllib2.urlopen( url )
+ status = json.from_json_string( r.read() )
+ # TODO: error handling: unexpected json or bad response, bad url, etc.
+ if status[ 'Code' ] == 'Completed':
+ log.debug( "SMRT Portal job '%s' is Completed. Initiating transfer." % job.params[ 'smrt_job_id' ] )
+ return self.job_states.READY
+ return self.job_states.WAIT
+ if job.params[ 'type' ] == 'finish_transfer':
+ if self._missing_params( job.params, [ 'transfer_job_id' ] ):
+ return self.job_states.INVALID
+ # Get the TransferJob object and add it to the DeferredJob so we only look it up once.
+ if not hasattr( job, 'transfer_job' ):
+ job.transfer_job = self.sa_session.query( self.app.model.TransferJob ).get( int( job.params[ 'transfer_job_id' ] ) )
+ state = self.app.transfer_manager.get_state( job.transfer_job )
+ if not state:
+ log.error( 'No state for transfer job id: %s' % job.transfer_job.id )
+ return self.job_states.WAIT
+ if state[ 'state' ] in self.app.model.TransferJob.terminal_states:
+ return self.job_states.READY
+ log.debug( "Checked on finish transfer job %s, not done yet." % job.id )
+ return self.job_states.WAIT
+ else:
+ log.error( 'Unknown job type for SMRTPortalPlugin: %s' % str( job.params[ 'type' ] ) )
+ return self.job_states.INVALID
+ def _associate_untransferred_datasets_with_sample( self, sample, external_service, results_dict ):
+ # results_dict looks something like:
+ # {'dataset2': {'datatype': 'fasta', 'url': '127.0.0.1:8080/data/filtered_subreads.fa', 'name': 'Filtered reads'} }
+ for key, val in results_dict.items():
+ file_path = val[ 'url' ]
+ status = self.app.model.SampleDataset.transfer_status.NOT_STARTED
+ name = val[ 'name' ]
+ size = 'unknown'
+ sample_dataset = self.app.model.SampleDataset( sample=sample,
+ file_path=file_path,
+ status=status,
+ name=name,
+ error_msg='',
+ size=size,
+ external_service=external_service )
+ self.sa_session.add( sample_dataset )
+ self.sa_session.flush()
--- a/lib/galaxy/jobs/transfer_manager.py Wed Apr 13 21:02:06 2011 +0200
+++ b/lib/galaxy/jobs/transfer_manager.py Wed Apr 13 22:13:23 2011 +0200
@@ -24,11 +24,20 @@
self.restarter = threading.Thread( target=self.__restarter )
self.restarter.start()
def new( self, path=None, **kwd ):
- if 'url' not in kwd:
- raise Exception( 'Missing required parameter "url".' )
- # try: except JSON:
- transfer_job = self.app.model.TransferJob( state=self.app.model.TransferJob.states.NEW,
- params=kwd )
+ if 'protocol' not in kwd:
+ raise Exception( 'Missing required parameter "protocol".' )
+ protocol = kwd[ 'protocol' ]
+ if protocol in [ 'http', 'https' ]:
+ if 'url' not in kwd:
+ raise Exception( 'Missing required parameter "url".' )
+ transfer_job = self.app.model.TransferJob( state=self.app.model.TransferJob.states.NEW, params=kwd )
+ elif protocol == 'scp':
+ # TODO: add more checks here?
+ if 'sample_dataset_id' not in kwd:
+ raise Exception( 'Missing required parameter "sample_dataset_id".' )
+ if 'file_path' not in kwd:
+ raise Exception( 'Missing required parameter "file_path".' )
+ transfer_job = self.app.model.TransferJob( state=self.app.model.TransferJob.states.NEW, params=kwd )
self.sa_session.add( transfer_job )
self.sa_session.flush()
return transfer_job
@@ -48,6 +57,8 @@
self.sa_session.add_all( transfer_jobs )
self.sa_session.flush()
for tj in transfer_jobs:
+ params_dict = tj.params
+ protocol = params_dict[ 'protocol' ]
# The transfer script should daemonize fairly quickly - if this is
# not the case, this process will need to be moved to a
# non-blocking method.
@@ -101,7 +112,7 @@
if tj_state['state'] in self.app.model.TransferJob.terminal_states:
log.debug( 'Transfer job %s is in terminal state: %s' % ( tj_state['transfer_job_id'], tj_state['state'] ) )
elif tj_state['state'] == self.app.model.TransferJob.states.PROGRESS and 'percent' in tj_state:
- log.debug( 'Transfer job %s is %s%% complete' % ( tj_state['transfer_job_id'], tj_state['percent'] ) )
+ log.debug( 'Transfer job %s is %s%% complete' % ( tj_state[ 'transfer_job_id' ], tj_state[ 'percent' ] ) )
if len( rval ) == 1:
return rval[0]
return rval
--- a/lib/galaxy/model/__init__.py Wed Apr 13 21:02:06 2011 +0200
+++ b/lib/galaxy/model/__init__.py Wed Apr 13 22:13:23 2011 +0200
@@ -1796,7 +1796,9 @@
self.comment = comment
class ExternalService( object ):
- data_transfer_types = Bunch( SCP = 'scp' )
+ data_transfer_protocol = Bunch( HTTP = 'http',
+ HTTPS = 'https',
+ SCP = 'scp' )
def __init__( self, name=None, description=None, external_service_type_id=None, version=None, form_definition_id=None, form_values_id=None, deleted=None ):
self.name = name
self.description = description
@@ -1812,8 +1814,8 @@
trans.app.external_service_types.reload( self.external_service_type_id )
self.data_transfer = {}
external_service_type = self.get_external_service_type( trans )
- for data_transfer_type, data_transfer_obj in external_service_type.data_transfer.items():
- if data_transfer_type == self.data_transfer_types.SCP:
+ for data_transfer_protocol, data_transfer_obj in external_service_type.data_transfer.items():
+ if data_transfer_protocol == self.data_transfer_protocol.SCP:
scp_configs = {}
automatic_transfer = data_transfer_obj.config.get( 'automatic_transfer', 'false' )
scp_configs[ 'automatic_transfer' ] = util.string_as_bool( automatic_transfer )
@@ -1822,7 +1824,7 @@
scp_configs[ 'password' ] = self.form_values.content.get( data_transfer_obj.config.get( 'password', '' ), '' )
scp_configs[ 'data_location' ] = self.form_values.content.get( data_transfer_obj.config.get( 'data_location', '' ), '' )
scp_configs[ 'rename_dataset' ] = self.form_values.content.get( data_transfer_obj.config.get( 'rename_dataset', '' ), '' )
- self.data_transfer[ self.data_transfer_types.SCP ] = scp_configs
+ self.data_transfer[ self.data_transfer_protocol.SCP ] = scp_configs
def populate_actions( self, trans, item, param_dict=None ):
return self.get_external_service_type( trans ).actions.populate( self, item, param_dict=param_dict )
@@ -1992,14 +1994,14 @@
def print_ticks( d ):
pass
error_msg = 'Error encountered in determining the file size of %s on the external_service.' % filepath
- if not scp_configs['host'] or not scp_configs['user_name'] or not scp_configs['password']:
+ if not scp_configs[ 'host' ] or not scp_configs[ 'user_name' ] or not scp_configs[ 'password' ]:
return error_msg
login_str = '%s@%s' % ( scp_configs['user_name'], scp_configs['host'] )
cmd = 'ssh %s "du -sh \'%s\'"' % ( login_str, filepath )
try:
output = pexpect.run( cmd,
- events={ '.ssword:*': scp_configs['password']+'\r\n',
- pexpect.TIMEOUT:print_ticks},
+ events={ '.ssword:*' : scp_configs['password'] + '\r\n',
+ pexpect.TIMEOUT : print_ticks },
timeout=10 )
except Exception, e:
return error_msg
@@ -2013,7 +2015,7 @@
def run_details( self ):
# self.runs is a list of SampleRunAssociations ordered descending on update_time.
if self.runs:
- # Always use the lates run details template, self.runs[0] is a SampleRunAssociation
+ # Always use the latest run details template, self.runs[0] is a SampleRunAssociation
return self.runs[0]
# Inherit this sample's RequestType run details, if one exists.
return self.request.type.run_details
--- a/lib/galaxy/sample_tracking/data_transfer.py Wed Apr 13 21:02:06 2011 +0200
+++ b/lib/galaxy/sample_tracking/data_transfer.py Wed Apr 13 22:13:23 2011 +0200
@@ -14,11 +14,11 @@
# TODO: The 'automatic_transfer' setting is for future use. If set to True, we will need to
# ensure the sample has an associated destination data library before it moves to a certain state
# ( e.g., Run started ).
- self.config['automatic_transfer'] = elem.get( 'automatic_transfer' )
- self.config['host'] = elem.get( 'host' )
- self.config['user_name'] = elem.get( 'user_name' )
- self.config['password'] = elem.get( 'password' )
- self.config['data_location'] = elem.get( 'data_location' )
+ self.config[ 'automatic_transfer' ] = elem.get( 'automatic_transfer' )
+ self.config[ 'host' ] = elem.get( 'host' )
+ self.config[ 'user_name' ] = elem.get( 'user_name' )
+ self.config[ 'password' ] = elem.get( 'password' )
+ self.config[ 'data_location' ] = elem.get( 'data_location' )
# 'rename_dataset' is optional and it may not be defined in all external types
# It is only used is AB SOLiD external service type for now
rename_dataset = elem.get( 'rename_dataset', None )
--- a/lib/galaxy/sample_tracking/external_service_types.py Wed Apr 13 21:02:06 2011 +0200
+++ b/lib/galaxy/sample_tracking/external_service_types.py Wed Apr 13 22:13:23 2011 +0200
@@ -86,17 +86,18 @@
data_transfer_settings_elem = root.find( 'data_transfer_settings' )
# Currently only data transfer using scp is supported.
for data_transfer_elem in data_transfer_settings_elem.findall( "data_transfer" ):
- if data_transfer_elem.get( 'type' ) == model.ExternalService.data_transfer_types.SCP:
- scp_data_transfer = data_transfer_factories[ model.ExternalService.data_transfer_types.SCP ]
+ if data_transfer_elem.get( 'protocol' ) == model.ExternalService.data_transfer_protocol.SCP:
+ scp_data_transfer = data_transfer_factories[ model.ExternalService.data_transfer_protocol.SCP ]
scp_data_transfer.parse( self.config_file, data_transfer_elem )
- self.data_transfer[ model.ExternalService.data_transfer_types.SCP ] = scp_data_transfer
+ self.data_transfer[ model.ExternalService.data_transfer_protocol.SCP ] = scp_data_transfer
def parse_run_details( self, root ):
self.run_details = {}
run_details_elem = root.find( 'run_details' )
if run_details_elem:
results_elem = run_details_elem.find( 'results' )
if results_elem:
- # get the list of resulting datatypes
+ # Get the list of resulting datatypes
+ # TODO: the 'results_urls' attribute is only useful if the transfer protocol is http(s), so check if that is the case.
self.run_details[ 'results' ], self.run_details[ 'results_urls' ] = self.parse_run_details_results( results_elem )
def parse_run_details_results( self, root ):
datatypes_dict = {}
--- a/lib/galaxy/web/controllers/external_service.py Wed Apr 13 21:02:06 2011 +0200
+++ b/lib/galaxy/web/controllers/external_service.py Wed Apr 13 22:13:23 2011 +0200
@@ -176,7 +176,6 @@
trans.sa_session.add( external_service )
trans.sa_session.add( external_service.form_values )
trans.sa_session.flush()
- external_service.load_data_transfer_settings( trans )
else:
# We're saving a newly created external_service
external_service_type = self.get_external_service_type( trans, external_service_type_id )
--- a/lib/galaxy/web/controllers/requests_admin.py Wed Apr 13 21:02:06 2011 +0200
+++ b/lib/galaxy/web/controllers/requests_admin.py Wed Apr 13 22:13:23 2011 +0200
@@ -351,22 +351,11 @@
request_id = kwd.get( 'request_id', None )
external_service_id = kwd.get( 'external_service_id', None )
files = []
- def handle_error( **kwd ):
- kwd[ 'status' ] = 'error'
- return trans.response.send_redirect( web.url_for( controller='requests_admin',
- action='select_datasets_to_transfer',
- **kwd ) )
- try:
- request = trans.sa_session.query( trans.model.Request ).get( trans.security.decode_id( request_id ) )
- except:
- return invalid_id_redirect( trans, 'requests_admin', request_id )
- try:
- external_service = trans.sa_session.query( trans.model.ExternalService ).get( trans.security.decode_id( external_service_id ) )
- except:
- return invalid_id_redirect( trans, 'requests_admin', external_service_id, 'external_service', action='browse_external_services' )
- # load the data transfer settings
+ request = trans.sa_session.query( trans.model.Request ).get( trans.security.decode_id( request_id ) )
+ external_service = trans.sa_session.query( trans.model.ExternalService ).get( trans.security.decode_id( external_service_id ) )
+ # Load the data transfer settings
external_service.load_data_transfer_settings( trans )
- scp_configs = external_service.data_transfer[ trans.model.ExternalService.data_transfer_types.SCP ]
+ scp_configs = external_service.data_transfer[ trans.model.ExternalService.data_transfer_protocol.SCP ]
selected_datasets_to_transfer = util.restore_text( params.get( 'selected_datasets_to_transfer', '' ) )
if selected_datasets_to_transfer:
selected_datasets_to_transfer = selected_datasets_to_transfer.split(',')
@@ -383,10 +372,13 @@
if params.get( 'select_datasets_to_transfer_button', False ):
# Get the sample that was sequenced to produce these datasets.
if sample_id == 'none':
+ del kwd[ 'select_datasets_to_transfer_button' ]
message = 'Select the sample that was sequenced to produce the datasets you want to transfer.'
kwd[ 'message' ] = message
- del kwd[ 'select_datasets_to_transfer_button' ]
- handle_error( **kwd )
+ kwd[ 'status' ] = 'error'
+ return trans.response.send_redirect( web.url_for( controller='requests_admin',
+ action='select_datasets_to_transfer',
+ **kwd ) )
if not sample.library:
# Display an error if a sample has been selected that
# has not yet been associated with a destination library.
@@ -399,7 +391,7 @@
status=status,
message=message ) )
# Save the sample datasets
- sample_dataset_file_names = self.__save_sample_datasets( trans, sample, selected_datasets_to_transfer, external_service )
+ sample_dataset_file_names = self.__create_sample_datasets( trans, sample, selected_datasets_to_transfer, external_service )
if sample_dataset_file_names:
message = 'Datasets (%s) have been selected for sample (%s)' % \
( str( sample_dataset_file_names )[1:-1].replace( "'", "" ), sample.name )
@@ -426,24 +418,31 @@
# Avoid caching
trans.response.headers['Pragma'] = 'no-cache'
trans.response.headers['Expires'] = '0'
- request = trans.sa_session.query( trans.model.Request ).get( int( id ) )
- external_service = trans.sa_session.query( trans.model.ExternalService ).get( int( external_service_id ) )
+ request = trans.sa_session.query( trans.model.Request ).get( trans.security.decode_id( request_id ) )
+ external_service = trans.sa_session.query( trans.model.ExternalService ).get( trans.security.decode_id( external_service_id ) )
external_service.load_data_transfer_settings( trans )
- scp_configs = external_service.data_transfer[ trans.model.ExternalService.data_transfer_types.SCP ]
- cmd = 'ssh %s@%s "ls -oghp \'%s\'"' % ( scp_configs['user_name'],
- scp_configs['host'],
+ scp_configs = external_service.data_transfer[ trans.model.ExternalService.data_transfer_protocol.SCP ]
+ cmd = 'ssh %s@%s "ls -oghp \'%s\'"' % ( scp_configs[ 'user_name' ],
+ scp_configs[ 'host' ],
folder_path )
+ # TODO: this currently requires rsh / ssh keys to be set. If they are not, the process
+ # hangs. Add an event that handles the authentication message if keys are not set - the
+ # message is something like: "Are you sure you want to continue connecting (yes/no)."
output = pexpect.run( cmd,
- events={ '.ssword:*' : scp_configs[ 'password'] + '\r\n', pexpect.TIMEOUT : print_ticks },
+ events={ '.ssword:*' : scp_configs[ 'password' ] + '\r\n',
+ pexpect.TIMEOUT : print_ticks },
timeout=10 )
+ if 'Password:\r\n' in output:
+ # Eliminate the output created using ssh from the tree
+ output = output.replace( 'Password:\r\n', '' )
return unicode( output.replace( '\n', '<br/>' ) )
@web.json
def open_folder( self, trans, request_id, external_service_id, key ):
# Avoid caching
trans.response.headers['Pragma'] = 'no-cache'
trans.response.headers['Expires'] = '0'
- request = trans.sa_session.query( trans.model.Request ).get( int( request_id ) )
- external_service = trans.sa_session.query( trans.model.ExternalService ).get( int( external_service_id ) )
+ request = trans.sa_session.query( trans.model.Request ).get( trans.security.decode_id( request_id ) )
+ external_service = trans.sa_session.query( trans.model.ExternalService ).get( trans.security.decode_id( external_service_id ) )
folder_path = key
files_list = self.__get_files( trans, request, external_service, folder_path )
folder_contents = []
@@ -451,49 +450,57 @@
is_folder = False
if filename[-1] == os.sep:
is_folder = True
- full_path = os.path.join(folder_path, filename)
- node = {"title": filename,
- "isFolder": is_folder,
- "isLazy": is_folder,
- "tooltip": full_path,
- "key": full_path
- }
- folder_contents.append(node)
+ full_path = os.path.join( folder_path, filename )
+ node = { "title": filename,
+ "isFolder": is_folder,
+ "isLazy": is_folder,
+ "tooltip": full_path,
+ "key": full_path }
+ folder_contents.append( node )
return folder_contents
def __get_files( self, trans, request, external_service, folder_path ):
# Retrieves the filenames to be transferred from the remote host.
ok = True
external_service.load_data_transfer_settings( trans )
- scp_configs = external_service.data_transfer[ trans.model.ExternalService.data_transfer_types.SCP ]
+ scp_configs = external_service.data_transfer[ trans.model.ExternalService.data_transfer_protocol.SCP ]
if not scp_configs[ 'host' ] or not scp_configs[ 'user_name' ] or not scp_configs[ 'password' ]:
status = 'error'
message = "Error in external service login information."
ok = False
def print_ticks( d ):
pass
- cmd = 'ssh %s@%s "ls -p \'%s\'"' % ( scp_configs['user_name'], scp_configs['host'], folder_path )
+ cmd = 'ssh %s@%s "ls -p \'%s\'"' % ( scp_configs[ 'user_name' ], scp_configs[ 'host' ], folder_path )
+ # TODO: this currently requires rsh / ssh keys to be set. If they are not, the process
+ # hangs. Add an event that handles the authentication message if keys are not set - the
+ # message is something like: "Are you sure you want to continue connecting (yes/no)."
output = pexpect.run( cmd,
- events={ '.ssword:*' : scp_configs['password'] + '\r\n', pexpect.TIMEOUT : print_ticks },
+ events={ '.ssword:*' : scp_configs[ 'password' ] + '\r\n',
+ pexpect.TIMEOUT : print_ticks },
timeout=10 )
if 'No such file or directory' in output:
status = 'error'
message = "No folder named (%s) exists on the external service." % folder_path
ok = False
if ok:
- return output.splitlines()
+ if 'Password:' in output:
+ # Eliminate the output created using ssh from the tree
+ output_as_list = output.splitlines()
+ output_as_list.remove( 'Password:' )
+ else:
+ output_as_list = output.splitlines()
+ return output_as_list
return trans.response.send_redirect( web.url_for( controller='requests_admin',
action='select_datasets_to_transfer',
request_id=trans.security.encode_id( request.id ),
status=status,
message=message ) )
- def __save_sample_datasets( self, trans, sample, selected_datasets_to_transfer, external_service ):
+ def __create_sample_datasets( self, trans, sample, selected_datasets_to_transfer, external_service ):
external_service.load_data_transfer_settings( trans )
- scp_configs = external_service.data_transfer[ trans.model.ExternalService.data_transfer_types.SCP ]
+ scp_configs = external_service.data_transfer[ trans.model.ExternalService.data_transfer_protocol.SCP ]
sample_dataset_file_names = []
if selected_datasets_to_transfer:
for filepath in selected_datasets_to_transfer:
- # FIXME: handle folder selection
- # ignore folders for now
+ # FIXME: handle folder selection - ignore folders for now
if filepath[-1] != os.sep:
name = self.__rename_dataset( sample, filepath.split( '/' )[-1], scp_configs )
status = trans.app.model.SampleDataset.transfer_status.NOT_STARTED
@@ -522,25 +529,26 @@
else:
new_name = name
return util.sanitize_for_filename( new_name )
- def __check_library_add_permission( self, trans, target_library, target_folder ):
+ def __ensure_library_add_permission( self, trans, target_library, target_folder ):
"""
- Checks if the current admin user had ADD_LIBRARY permission on the target library
- and the target folder, if not provide the permissions.
+ Ensures the current admin user has ADD_LIBRARY permission on the target data library and folder.
"""
current_user_roles = trans.user.all_roles()
current_user_private_role = trans.app.security_agent.get_private_user_role( trans.user )
- # Make sure this user has LIBRARY_ADD permissions on the target library and folder.
- # If not, give them permission.
+ flush_needed = False
if not trans.app.security_agent.can_add_library_item( current_user_roles, target_library ):
lp = trans.model.LibraryPermissions( trans.app.security_agent.permitted_actions.LIBRARY_ADD.action,
target_library,
current_user_private_role )
trans.sa_session.add( lp )
+ flush_needed = True
if not trans.app.security_agent.can_add_library_item( current_user_roles, target_folder ):
lfp = trans.model.LibraryFolderPermissions( trans.app.security_agent.permitted_actions.LIBRARY_ADD.action,
target_folder,
current_user_private_role )
trans.sa_session.add( lfp )
+ flush_needed = True
+ if flush_needed:
trans.sa_session.flush()
def __create_data_transfer_messages( self, trans, sample, selected_sample_datasets ):
"""
@@ -587,7 +595,7 @@
messages = []
for external_service, dataset_elem in dataset_elements.items():
external_service.load_data_transfer_settings( trans )
- scp_configs = external_service.data_transfer[ trans.model.ExternalService.data_transfer_types.SCP ]
+ scp_configs = external_service.data_transfer[ trans.model.ExternalService.data_transfer_protocol.SCP ]
# Check data transfer settings
err_msg = self.__validate_data_transfer_settings( trans, sample.request.type, scp_configs )
if err_msg:
@@ -596,16 +604,16 @@
sample_id=trans.security.encode_id( sample.id ),
status='error',
message=err_msg ) )
- message = xml % dict( GALAXY_HOST=trans.request.host,
- API_KEY=trans.user.api_keys[0].key,
- DATA_HOST=scp_configs[ 'host' ],
- DATA_USER=scp_configs[ 'user_name' ],
- DATA_PASSWORD=scp_configs[ 'password' ],
- REQUEST_ID=str( sample.request.id ),
- SAMPLE_ID=str( sample.id ),
- LIBRARY_ID=str( sample.library.id ),
- FOLDER_ID=str( sample.folder.id ),
- DATASETS=dataset_elem )
+ message = xml % dict( GALAXY_HOST=trans.request.host,
+ API_KEY=trans.user.api_keys[0].key,
+ DATA_HOST=scp_configs[ 'host' ],
+ DATA_USER=scp_configs[ 'user_name' ],
+ DATA_PASSWORD=scp_configs[ 'password' ],
+ REQUEST_ID=str( sample.request.id ),
+ SAMPLE_ID=str( sample.id ),
+ LIBRARY_ID=str( sample.library.id ),
+ FOLDER_ID=str( sample.folder.id ),
+ DATASETS=dataset_elem )
messages.append( message.replace( '\n', '' ).replace( '\r', '' ) )
return messages
def __validate_data_transfer_settings( self, trans, request_type, scp_configs ):
@@ -620,10 +628,10 @@
err_msg += "The 'enable_api = True' setting is not correctly set in the Galaxy config file. "
if not trans.user.api_keys:
err_msg += "Set your API Key in your User Preferences to transfer datasets. "
- # check if library_import_dir is set
+ # Check if library_import_dir is set
if not trans.app.config.library_import_dir:
err_msg = "'The library_import_dir' setting is not correctly set in the Galaxy config file. "
- # check the RabbitMQ server settings in the config file
+ # Check the RabbitMQ server settings in the config file
for k, v in trans.app.config.amqp.items():
if not v:
err_msg += 'Set RabbitMQ server settings in the "galaxy_amqp" section of the Galaxy config file, specifically "%s" is not set.' % k
@@ -632,60 +640,79 @@
@web.expose
@web.require_admin
def initiate_data_transfer( self, trans, sample_id, sample_datasets=[], sample_dataset_id='' ):
- '''
- Initiate the transfer of the datasets from the external service to the target Galaxy data library:
- - The admin user must have LIBRARY_ADD permission for the target library and folder
- - Create an XML message encapsulating all the data transfer information and send it
- to the message queue (RabbitMQ broker).
- '''
+ # Initiate the transfer of the datasets from the external service to the target Galaxy data library.
+ # The admin user must have LIBRARY_ADD permission for the target library and folder
try:
sample = trans.sa_session.query( trans.model.Sample ).get( trans.security.decode_id( sample_id ) )
except:
return invalid_id_redirect( trans, 'requests_admin', sample_id, 'sample' )
- err_msg = ''
- # Make sure the current user has LIBRARY_ADD
- # permission on the target library and folder.
- self.__check_library_add_permission( trans, sample.library, sample.folder )
+ message = ""
+ status = "done"
+ # Make sure the current admin user has LIBRARY_ADD permission on the target data library and folder.
+ self.__ensure_library_add_permission( trans, sample.library, sample.folder )
if sample_dataset_id and not sample_datasets:
# Either a list of SampleDataset objects or a comma-separated string of
# encoded SampleDataset ids can be received. If the latter, parse the
- # sample_dataset_id to build the list of sample_datasets.
+ # sample_dataset_id string to build the list of sample_datasets.
id_list = util.listify( sample_dataset_id )
for sample_dataset_id in id_list:
sample_dataset = trans.sa_session.query( trans.model.SampleDataset ).get( trans.security.decode_id( sample_dataset_id ) )
sample_datasets.append( sample_dataset )
- # Create the message
- messages = self.__create_data_transfer_messages( trans, sample, sample_datasets )
- # Send the messages
- for message in messages:
- try:
- conn = amqp.Connection( host=trans.app.config.amqp[ 'host' ] + ":" + trans.app.config.amqp[ 'port' ],
- userid=trans.app.config.amqp[ 'userid' ],
- password=trans.app.config.amqp[ 'password' ],
- virtual_host=trans.app.config.amqp[ 'virtual_host' ],
- insist=False )
- chan = conn.channel()
- msg = amqp.Message( message,
- content_type='text/plain',
- application_headers={ 'msg_type': 'data_transfer' } )
- msg.properties[ "delivery_mode" ] = 2
- chan.basic_publish( msg,
- exchange=trans.app.config.amqp[ 'exchange' ],
- routing_key=trans.app.config.amqp[ 'routing_key' ] )
- chan.close()
- conn.close()
- except Exception, e:
- err_msg = "Error sending the data transfer message to the Galaxy AMQP message queue:<br/>%s" % str(e)
- if not err_msg:
- err_msg = "%i datasets have been queued for transfer from the external service." % len( sample_datasets )
- status = "done"
+ if trans.app.config.enable_beta_job_managers:
+ # For now, assume that all SampleDatasets use the same external service ( this may not be optimal ).
+ if sample_datasets:
+ external_service_type_id = sample_datasets[0].external_service.external_service_type_id
+ # Here external_service_type_id will be something like '454_life_sciences'
+ external_service = sample.request.type.get_external_service( external_service_type_id )
+ external_service_type = external_service.get_external_service_type( trans )
+ external_service.load_data_transfer_settings( trans )
+ # For now only scp is supported.
+ scp_configs = external_service.data_transfer[ trans.model.ExternalService.data_transfer_protocol.SCP ]
+ if not scp_configs[ 'automatic_transfer' ]:
+ deferred_plugin = 'ManualDataTransferPlugin'
+ else:
+ raise Exception( "Automatic data transfer using scp is not yet suppored." )
+ trans.app.job_manager.deferred_job_queue.plugins[ deferred_plugin ].create_job( trans,
+ sample=sample,
+ sample_datasets=sample_datasets,
+ external_service=external_service,
+ external_service_type=external_service_type )
else:
- status = 'error'
+ # TODO: Using RabbitMq for now, but eliminate this entire block when we replace RabbitMq with Galaxy's
+ # own messaging engine. We're holding off on using the new way to transfer files manually until we
+ # implement a Galaxy-proprietary messaging engine because the deferred job plugins currently perform
+ # constant db hits to check for deferred jobs that are not in a finished state.
+ # Create the message
+ messages = self.__create_data_transfer_messages( trans, sample, sample_datasets )
+ # Send the messages
+ for rmq_msg in messages:
+ try:
+ conn = amqp.Connection( host=trans.app.config.amqp[ 'host' ] + ":" + trans.app.config.amqp[ 'port' ],
+ userid=trans.app.config.amqp[ 'userid' ],
+ password=trans.app.config.amqp[ 'password' ],
+ virtual_host=trans.app.config.amqp[ 'virtual_host' ],
+ insist=False )
+ chan = conn.channel()
+ msg = amqp.Message( rmq_msg,
+ content_type='text/plain',
+ application_headers={ 'msg_type': 'data_transfer' } )
+ msg.properties[ "delivery_mode" ] = 2
+ chan.basic_publish( msg,
+ exchange=trans.app.config.amqp[ 'exchange' ],
+ routing_key=trans.app.config.amqp[ 'routing_key' ] )
+ chan.close()
+ conn.close()
+ except Exception, e:
+ message = "Error sending the data transfer message to the Galaxy AMQP message queue:<br/>%s" % str(e)
+ status = "error"
+ if not message:
+ message = "%i datasets have been queued for transfer from the external service." % len( sample_datasets )
+ status = "done"
return trans.response.send_redirect( web.url_for( controller='requests_admin',
action='manage_datasets',
sample_id=trans.security.encode_id( sample.id ),
- status=status,
- message=err_msg ) )
+ message=message,
+ status=status ) )
@web.expose
def update_sample_dataset_status(self, trans, cntrller, sample_dataset_ids, new_status, error_msg=None ):
# check if the new status is a valid transfer status
--- a/scripts/galaxy_messaging/server/amqp_consumer.py Wed Apr 13 21:02:06 2011 +0200
+++ b/scripts/galaxy_messaging/server/amqp_consumer.py Wed Apr 13 22:13:23 2011 +0200
@@ -3,7 +3,7 @@
Galaxy uses AMQ protocol to receive messages from external sources like
bar code scanners. Galaxy has been tested against RabbitMQ AMQP implementation.
For Galaxy to receive messages from a message queue the RabbitMQ server has
-to be set up with a user account and other parameters listed in the [galaxy:amq]
+to be set up with a user account and other parameters listed in the [galaxy_amqp]
section in the universe_wsgi.ini config file
Once the RabbitMQ server has been setup and started with the given parameters,
this script can be run to receive messages and update the Galaxy database accordingly
--- a/scripts/transfer.py Wed Apr 13 21:02:06 2011 +0200
+++ b/scripts/transfer.py Wed Apr 13 22:13:23 2011 +0200
@@ -4,22 +4,17 @@
Manager (galaxy.jobs.transfer_manager) and should not normally be invoked by
hand.
"""
-
-import os, sys, optparse, ConfigParser, socket, SocketServer, threading, logging, random
-
-import urllib2, tempfile
-
-import time
-
-log = logging.getLogger( __name__ )
-log.setLevel( logging.INFO )
-handler = logging.StreamHandler( sys.stdout )
-log.addHandler( handler )
+import os, sys, optparse, ConfigParser, socket, SocketServer, threading, logging, random, urllib2, tempfile, time
galaxy_root = os.path.abspath( os.path.join( os.path.dirname( __file__ ), '..' ) )
sys.path.insert( 0, os.path.abspath( os.path.join( galaxy_root, 'lib' ) ) )
from galaxy import eggs
+
+import pkg_resources
+pkg_resources.require( "pexpect" )
+import pexpect
+
eggs.require( "SQLAlchemy >= 0.4" )
from sqlalchemy import *
@@ -32,6 +27,11 @@
eggs.require( 'python_daemon' )
from daemon import DaemonContext
+log = logging.getLogger( __name__ )
+log.setLevel( logging.DEBUG )
+handler = logging.StreamHandler( sys.stdout )
+log.addHandler( handler )
+
debug = False
slow = False
@@ -49,7 +49,7 @@
def parse( self ):
self.opts, args = self.parser.parse_args()
if len( args ) != 1:
- log.error( 'usage: transfer.py [options] <transfer job id>' )
+ log.error( 'usage: transfer.py <transfer job id>' )
sys.exit( 1 )
try:
self.transfer_job_id = int( args[0] )
@@ -138,57 +138,55 @@
self.result = result
def transfer( app, transfer_job_id ):
-
transfer_job = app.get_transfer_job( transfer_job_id )
if transfer_job is None:
log.error( 'Invalid transfer job ID: %s' % transfer_job_id )
return False
-
port_range = app.config.get( 'app:main', 'transfer_worker_port_range' )
try:
port_range = [ int( p ) for p in port_range.split( '-' ) ]
except Exception, e:
log.error( 'Invalid port range set in transfer_worker_port_range: %s: %s' % ( port_range, str( e ) ) )
return False
-
- protocol = transfer_job.params['url'].split( '://' )[0]
- if protocol not in ( 'http', 'https' ):
+ protocol = transfer_job.params[ 'protocol' ]
+ if protocol not in ( 'http', 'https', 'scp' ):
log.error( 'Unsupported protocol: %s' % protocol )
return False
-
state_result = StateResult( result = dict( state = transfer_job.states.RUNNING, info='Transfer process starting up.' ) )
-
listener_server = ListenerServer( range( port_range[0], port_range[1] + 1 ), ListenerRequestHandler, app, transfer_job, state_result )
-
# daemonize here (if desired)
if not debug:
daemon_context = DaemonContext( files_preserve=[ listener_server.fileno() ], working_directory=os.getcwd() )
daemon_context.open()
-
# If this fails, it'll never be detected. Hopefully it won't fail since it succeeded once.
app.connect_database() # daemon closed the database fd
transfer_job = app.get_transfer_job( transfer_job_id )
-
listener_thread = threading.Thread( target=listener_server.serve_forever )
listener_thread.setDaemon( True )
listener_thread.start()
-
# Store this process' pid so unhandled deaths can be handled by the restarter
transfer_job.pid = os.getpid()
app.sa_session.add( transfer_job )
app.sa_session.flush()
-
terminal_state = None
- if protocol in ( 'http', 'https' ):
- for state in http_transfer( transfer_job ):
- state_result.result = state
- if state['state'] in transfer_job.terminal_states:
- terminal_state = state
+ if protocol in [ 'http', 'https' ]:
+ for transfer_result_dict in http_transfer( transfer_job ):
+ state_result.result = transfer_result_dict
+ if transfer_result_dict[ 'state' ] in transfer_job.terminal_states:
+ terminal_state = transfer_result_dict
+ elif protocol in [ 'scp' ]:
+ # Transfer the file using scp
+ transfer_result_dict = scp_transfer( transfer_job )
+ # Handle the state of the transfer
+ state = transfer_result_dict[ 'state' ]
+ state_result.result = transfer_result_dict
+ if state in transfer_job.terminal_states:
+ terminal_state = transfer_result_dict
if terminal_state is not None:
- transfer_job.state = terminal_state['state']
- for name in ( 'info', 'path' ):
+ transfer_job.state = terminal_state[ 'state' ]
+ for name in [ 'info', 'path' ]:
if name in terminal_state:
- transfer_job.__setattr__( name, terminal_state[name] )
+ transfer_job.__setattr__( name, terminal_state[ name ] )
else:
transfer_job.state = transfer_job.states.ERROR
transfer_job.info = 'Unknown error encountered by transfer worker.'
@@ -197,9 +195,7 @@
return True
def http_transfer( transfer_job ):
- """
- "Plugin" for handling http(s) transfers.
- """
+ """Plugin" for handling http(s) transfers."""
url = transfer_job.params['url']
try:
f = urllib2.urlopen( url )
@@ -243,16 +239,41 @@
return
return
+def scp_transfer( transfer_job ):
+ """Plugin" for handling scp transfers using pexpect"""
+ def print_ticks( d ):
+ pass
+ host = transfer_job.params[ 'host' ]
+ user_name = transfer_job.params[ 'user_name' ]
+ password = transfer_job.params[ 'password' ]
+ file_path = transfer_job.params[ 'file_path' ]
+ try:
+ fh, fn = tempfile.mkstemp()
+ except Exception, e:
+ return dict( state = transfer_job.states.ERROR, info = 'Unable to create temporary file for transfer: %s' % str( e ) )
+ try:
+ # TODO: add the ability to determine progress of the copy here like we do in the http_transfer above.
+ cmd = "scp %s@%s:'%s' '%s'" % ( user_name,
+ host,
+ file_path.replace( ' ', '\ ' ),
+ fn )
+ output = pexpect.run( cmd,
+ events={ '.ssword:*': password + '\r\n',
+ pexpect.TIMEOUT: print_ticks },
+ timeout=10 )
+ return dict( state = transfer_job.states.DONE, path = fn )
+ except Exception, e:
+ return dict( state = transfer_job.states.ERROR, info = 'Error during file transfer: %s' % str( e ) )
+
if __name__ == '__main__':
-
arg_handler = ArgHandler()
arg_handler.parse()
app = GalaxyApp( arg_handler.opts.config )
- log.debug( 'Initiating transfer' )
+ log.debug( 'Initiating transfer...' )
if transfer( app, arg_handler.transfer_job_id ):
log.debug( 'Finished' )
else:
- log.error( 'Error in transfer process' )
+ log.error( 'Error in transfer process...' )
sys.exit( 1 )
sys.exit( 0 )
--- a/templates/admin/requests/select_datasets_to_transfer.mako Wed Apr 13 21:02:06 2011 +0200
+++ b/templates/admin/requests/select_datasets_to_transfer.mako Wed Apr 13 22:13:23 2011 +0200
@@ -23,56 +23,60 @@
minExpandLevel: 0, // 1: root node is not collapsible
persist: false,
checkbox: true,
- selectMode: 3,
+ selectMode: 3,
onPostInit: function(isReloading, isError) {
-// alert("reloading: "+isReloading+", error:"+isError);
- logMsg("onPostInit(%o, %o) - %o", isReloading, isError, this);
- // Re-fire onActivate, so the text is updated
- this.reactivate();
+ //alert("reloading: "+isReloading+", error:"+isError);
+ logMsg("onPostInit(%o, %o) - %o", isReloading, isError, this);
+ // Re-fire onActivate, so the text is updated
+ this.reactivate();
},
fx: { height: "toggle", duration: 200 },
- // initAjax is hard to fake, so we pass the children as object array:
- initAjax: {url: "${h.url_for( controller='requests_admin', action='open_folder' )}",
- dataType: "json",
- data: { request_id: "${request.id}", external_service_id: "${external_service.id}", key: "${scp_configs['data_location']}" },
- },
- onLazyRead: function(dtnode){
- dtnode.appendAjax({
- url: "${h.url_for( controller='requests_admin', action='open_folder' )}",
- dataType: "json",
- data: { request_id: "${request.id}", external_service_id: "${external_service.id}", key: dtnode.data.key },
+ // initAjax is hard to fake, so we pass the children as object array:
+ initAjax: {url: "${h.url_for( controller='requests_admin', action='open_folder' )}",
+ dataType: "json",
+ data: { request_id: "${trans.security.encode_id( request.id )}", external_service_id: "${trans.security.encode_id( external_service.id )}", key: "${scp_configs['data_location']}" },
+ },
+ onLazyRead: function(dtnode){
+ dtnode.appendAjax({
+ url: "${h.url_for( controller='requests_admin', action='open_folder' )}",
+ dataType: "json",
+ data: { request_id: "${trans.security.encode_id( request.id )}", external_service_id: "${trans.security.encode_id( external_service.id )}", key: dtnode.data.key },
+ });
+ },
+ onSelect: function(select, dtnode) {
+ // Display list of selected nodes
+ var selNodes = dtnode.tree.getSelectedNodes();
+ // convert to title/key array
+ var selKeys = $.map(selNodes, function(node){
+ return node.data.key;
+ });
+ document.select_datasets_to_transfer.selected_datasets_to_transfer.value = selKeys.join(",")
+ },
+ onActivate: function(dtnode) {
+ var cell = $("#file_details");
+ var selected_value;
+ if (dtnode.data.key == 'root') {
+ selected_value = "${scp_configs['data_location']}/";
+ } else {
+ selected_value = dtnode.data.key;
+ };
+ if (selected_value.charAt(selected_value.length-1) != '/') {
+ // Make ajax call
+ $.ajax( {
+ type: "POST",
+ url: "${h.url_for( controller='requests_admin', action='get_file_details' )}",
+ dataType: "json",
+ data: { request_id: "${trans.security.encode_id(request.id)}", external_service_id: "${trans.security.encode_id(external_service.id)}", folder_path: selected_value },
+ success : function ( data ) {
+ cell.html( '<label>'+data+'</label>' )
+ }
});
- },
- onSelect: function(select, dtnode) {
- // Display list of selected nodes
- var selNodes = dtnode.tree.getSelectedNodes();
- // convert to title/key array
- var selKeys = $.map(selNodes, function(node){
- return node.data.key;
- });
- document.select_datasets_to_transfer.selected_datasets_to_transfer.value = selKeys.join(",")
- },
- onActivate: function(dtnode) {
- var cell = $("#file_details");
- var selected_value = dtnode.data.key
- if(selected_value.charAt(selected_value.length-1) != '/') {
- // Make ajax call
- $.ajax( {
- type: "POST",
- url: "${h.url_for( controller='requests_admin', action='get_file_details' )}",
- dataType: "json",
- data: { request_id: "${request.id}", external_service_id: "${external_service.id}", folder_path: dtnode.data.key },
- success : function ( data ) {
- cell.html( '<label>'+data+'</label>' )
- }
- });
} else {
- cell.html( '' )
- }
- },
- });
+ cell.html( '' );
+ };
+ },
+ });
});
-
</script><%
http://bitbucket.org/galaxy/galaxy-central/changeset/67815f37e073/
changeset: r5417:67815f37e073
user: Rob Hooft
date: 2011-04-13 22:36:57
summary: * Two Scripts:
* Extract "section" headers from the toolbox XML file, and put them into the tool XML;
prepare the tool XML for associating "tags" with the tool.
* Build a toolbox XML file based on augmented tool XML files with section identification.
affected #: 2 files (4.5 KB)
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/scripts/build_toolbox.py Wed Apr 13 22:36:57 2011 +0200
@@ -0,0 +1,106 @@
+import os
+import sys
+from xml.etree import ElementTree as ET
+
+# Todo: Keep order by "prioritizing" tools in sections
+# Todo: Labels (as lower level sections?)
+
+def prettify(elem):
+ from xml.dom import minidom
+ rough_string = ET.tostring(elem, 'utf-8')
+ repaired = minidom.parseString(rough_string)
+ return repaired.toprettyxml(indent=' ')
+
+# Build a list of all toolconf xml files in the tools directory
+def getfnl(startdir):
+ filenamelist = []
+ for root, dirs, files in os.walk(startdir):
+ for fn in files:
+ fullfn = os.path.join(root, fn)
+ if fn.endswith('toolconf.xml'):
+ filenamelist.append(fullfn)
+ elif fn.endswith('.xml'):
+ try:
+ doc = ET.parse(fullfn)
+ except:
+ print "An OOPS on", fullfn
+ raise
+ rootelement = doc.getroot()
+ if rootelement.tag == 'tool':
+ if rootelement.findall('section'):
+ filenamelist.append(fullfn)
+ return filenamelist
+
+class ToolSections(object):
+ def __init__(self):
+ self.tools = {'':[]}
+ self.sections = [''] # Empty section first
+
+ def add(self, el, sectionelement):
+ if sectionelement is not None:
+ section = str(sectionelement.text)
+ section = section.strip()
+ else:
+ section = ''
+ if not self.tools.has_key(section):
+ self.sections.append(section)
+ self.tools[section]= []
+ self.tools[section].append(el)
+
+# Analyze all the toolconf xml files given in the filenamelist (fnl)
+# Build a list of all sections
+def scanfiles(fnl):
+ ts = ToolSections()
+ for fn in fnl: # specialized toolconf.xml files.
+ doc = ET.parse(fn)
+ root = doc.getroot()
+
+ if root.tag == 'tool':
+ tools = [root]
+ else:
+ tools = doc.findall('tool')
+
+ for tool in tools:
+ if tool.attrib.has_key('file'):
+ fileattrib = os.path.join(os.getcwd(),
+ os.path.dirname(fn),
+ tool.attrib['file'])
+ else: # It must be the current file
+ fileattrib = os.path.join(os.getcwd(), fn)
+ attrib = {'file': fileattrib}
+ tags = tool.find('tags')
+ if tags:
+ tagra = []
+ for tag in tags.findall('tag'):
+ tagra.append(tag.text)
+ attrib['tags'] = ",".join(tagra)
+ toolelement = ET.Element('tool', attrib)
+ ts.add(toolelement, tool.find('section'))
+ return ts
+
+def assemble():
+ fnl = getfnl('tools')
+ fnl.sort()
+
+ ts = scanfiles(fnl)
+
+ toolbox = ET.Element('toolbox')
+
+ sectionnumber = 0
+ for section in ts.sections:
+ if section:
+ sectionnumber += 1
+ ident = "section%d" % sectionnumber
+ sectionelement = ET.SubElement(toolbox,'section', {'name': section,
+ 'id': ident})
+ puttoolsin = sectionelement
+ else:
+ puttoolsin = toolbox
+ for tool in ts.tools[section]:
+ attrib = tool.attrib
+ toolelement = ET.SubElement(puttoolsin, 'tool', attrib)
+
+ print prettify(toolbox)
+
+if __name__ == "__main__":
+ assemble()
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/scripts/extract_toolbox_sections.py Wed Apr 13 22:36:57 2011 +0200
@@ -0,0 +1,43 @@
+import os
+import sys
+from xml.etree import ElementTree as ET
+
+# Todo: ""
+
+def main():
+ doc = ET.parse("tool_conf.xml")
+ root = doc.getroot()
+
+ for section in root.findall("section"):
+ sectionname = section.attrib['name']
+ for tool in section.findall("tool"):
+ upgradeFile(tool, sectionname)
+ for tool in root.findall("tool"):
+ upgradeFile(tool, "")
+
+def upgradeFile(tool, sectionname):
+ toolfile = tool.attrib["file"]
+ realtoolfile = os.path.join(os.getcwd(), "tools", toolfile)
+ toolxmlfile = ET.parse(realtoolfile)
+ localroot = toolxmlfile.getroot()
+
+ for existingsectionelement in localroot.findall("section"):
+ localroot.remove(existingsectionelement)
+
+ for existingtagselement in localroot.findall("tags"):
+ localroot.remove(existingtagselement)
+
+ sectionelement = ET.Element("section")
+ sectionelement.text = sectionname
+ sectionelement.tail = "\n "
+ localroot.insert(0, sectionelement)
+
+ tagselement = ET.Element("tags")
+ tagselement.tail = "\n "
+ localroot.insert(1,tagselement)
+
+ toolxmlfile.write(realtoolfile)
+
+
+if __name__ == "__main__":
+ main()
http://bitbucket.org/galaxy/galaxy-central/changeset/49286ea452d8/
changeset: r5418:49286ea452d8
user: natefoo
date: 2011-04-13 23:19:24
summary: Unfinished preliminary support for tagging tools. Requires enable_tool_tags = True in universe_wsgi.ini.
affected #: 8 files (8.6 KB)
--- a/lib/galaxy/model/__init__.py Wed Apr 13 22:36:57 2011 +0200
+++ b/lib/galaxy/model/__init__.py Wed Apr 13 23:19:24 2011 +0200
@@ -2235,6 +2235,16 @@
class VisualizationTagAssociation ( ItemTagAssociation ):
pass
+class ToolTagAssociation( ItemTagAssociation ):
+ def __init__( self, id=None, user=None, tool_id=None, tag_id=None, user_tname=None, value=None ):
+ self.id = id
+ self.user = user
+ self.tool_id = tool_id
+ self.tag_id = tag_id
+ self.user_tname = user_tname
+ self.value = None
+ self.user_value = None
+
# Item annotation classes.
class HistoryAnnotationAssociation( object ):
--- a/lib/galaxy/model/mapping.py Wed Apr 13 22:36:57 2011 +0200
+++ b/lib/galaxy/model/mapping.py Wed Apr 13 23:19:24 2011 +0200
@@ -851,6 +851,15 @@
Column( "value", TrimmedString(255), index=True),
Column( "user_value", TrimmedString(255), index=True) )
+ToolTagAssociation.table = Table( "tool_tag_association", metadata,
+ Column( "id", Integer, primary_key=True ),
+ Column( "tool_id", TrimmedString(255), index=True ),
+ Column( "tag_id", Integer, ForeignKey( "tag.id" ), index=True ),
+ Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
+ Column( "user_tname", TrimmedString(255), index=True),
+ Column( "value", TrimmedString(255), index=True),
+ Column( "user_value", TrimmedString(255), index=True) )
+
# Annotation tables.
HistoryAnnotationAssociation.table = Table( "history_annotation_association", metadata,
@@ -1575,6 +1584,10 @@
properties=dict( tag=relation(Tag, backref="tagged_visualizations"), user=relation( User ) )
)
+assign_mapper( context, ToolTagAssociation, ToolTagAssociation.table,
+ properties=dict( tag=relation(Tag, backref="tagged_tools"), user=relation( User ) )
+ )
+
# Annotation tables.
assign_mapper( context, HistoryAnnotationAssociation, HistoryAnnotationAssociation.table,
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/lib/galaxy/model/migrate/versions/0077_create_tool_tag_association_table.py Wed Apr 13 23:19:24 2011 +0200
@@ -0,0 +1,49 @@
+"""
+Migration script to create table for storing tool tag associations.
+"""
+
+from sqlalchemy import *
+from sqlalchemy.orm import *
+from migrate import *
+from migrate.changeset import *
+
+from galaxy.model.custom_types import *
+
+import datetime
+now = datetime.datetime.utcnow
+
+import logging
+log = logging.getLogger( __name__ )
+
+metadata = MetaData( migrate_engine )
+db_session = scoped_session( sessionmaker( bind=migrate_engine, autoflush=False, autocommit=True ) )
+
+# Table to add
+
+ToolTagAssociation_table = Table( "tool_tag_association", metadata,
+ Column( "id", Integer, primary_key=True ),
+ Column( "tool_id", TrimmedString(255), index=True ),
+ Column( "tag_id", Integer, ForeignKey( "tag.id" ), index=True ),
+ Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
+ Column( "user_tname", TrimmedString(255), index=True),
+ Column( "value", TrimmedString(255), index=True),
+ Column( "user_value", TrimmedString(255), index=True) )
+
+def upgrade():
+ print __doc__
+ metadata.reflect()
+
+ # Create tool_tag_association table
+ try:
+ ToolTagAssociation_table.create()
+ except Exception, e:
+ log.error( "Creating tool_tag_association table failed: %s" % str( e ) )
+
+def downgrade():
+ metadata.reflect()
+
+ # Drop tool_tag_association table
+ try:
+ ToolTagAssociation_table.drop()
+ except Exception, e:
+ log.error( "Dropping tool_tag_association table failed: %s" % str( e ) )
--- a/lib/galaxy/tags/tag_handler.py Wed Apr 13 22:36:57 2011 +0200
+++ b/lib/galaxy/tags/tag_handler.py Wed Apr 13 23:19:24 2011 +0200
@@ -58,6 +58,15 @@
tag_id = row[0]
community_tags.append( self.get_tag_by_id( trans, tag_id ) )
return community_tags
+ def get_tool_tags( self, trans ):
+ result_set = trans.sa_session.execute( select( columns=[ trans.app.model.ToolTagAssociation.table.c.tag_id ],
+ from_obj=trans.app.model.ToolTagAssociation.table ).distinct() )
+
+ tags = []
+ for row in result_set:
+ tag_id = row[0]
+ tags.append( self.get_tag_by_id( trans, tag_id ) )
+ return tags
def remove_item_tag( self, trans, user, item, tag_name ):
"""Remove a tag from an item."""
# Get item tag association.
--- a/lib/galaxy/tools/__init__.py Wed Apr 13 22:36:57 2011 +0200
+++ b/lib/galaxy/tools/__init__.py Wed Apr 13 23:19:24 2011 +0200
@@ -75,6 +75,27 @@
try:
path = elem.get( "file" )
tool = self.load_tool( os.path.join( self.tool_root_dir, path ) )
+ if self.app.config.get_bool( 'enable_tool_tags', False ):
+ tag_names = elem.get( "tags", "" ).split( "," )
+ for tag_name in tag_names:
+ if tag_name == '':
+ continue
+ tag = self.sa_session.query( self.app.model.Tag ).filter_by( name=tag_name ).first()
+ if not tag:
+ tag = self.app.model.Tag( name=tag_name )
+ self.sa_session.add( tag )
+ self.sa_session.flush()
+ tta = self.app.model.ToolTagAssociation( tool_id=tool.id, tag_id=tag.id )
+ self.sa_session.add( tta )
+ self.sa_session.flush()
+ else:
+ for tagged_tool in tag.tagged_tools:
+ if tagged_tool.tool_id == tool.id:
+ break
+ else:
+ tta = self.app.model.ToolTagAssociation( tool_id=tool.id, tag_id=tag.id )
+ self.sa_session.add( tta )
+ self.sa_session.flush()
self.tools_by_id[ tool.id ] = tool
key = 'tool_' + tool.id
panel_dict[ key ] = tool
@@ -181,6 +202,13 @@
if self.app.config.use_tool_dependencies:
self.dependency_manager = DependencyManager( [ self.app.config.tool_dependency_dir ] )
+ @property
+ def sa_session( self ):
+ """
+ Returns a SQLAlchemy session
+ """
+ return self.app.model.context
+
class ToolSection( object ):
"""
A group of tools with similar type/purpose that will be displayed as a
--- a/lib/galaxy/web/controllers/root.py Wed Apr 13 22:36:57 2011 +0200
+++ b/lib/galaxy/web/controllers/root.py Wed Apr 13 23:19:24 2011 +0200
@@ -53,6 +53,15 @@
trans.log_action( trans.get_user(), "tool_search.search", "", { "query" : query } )
return trans.app.toolbox_search.search( query )
+ @web.json
+ def tool_tag_search( self, trans, query ):
+ tag = trans.sa_session.query( trans.app.model.Tag ).filter_by( name=query ).first()
+ tool_ids = []
+ for tagged_tool in tag.tagged_tools:
+ if tagged_tool.tool_id not in tool_ids:
+ tool_ids.append( tagged_tool.tool_id )
+ return tool_ids
+
@web.expose
def tool_help( self, trans, id ):
"""Return help page for tool identified by 'id' if available"""
--- a/templates/root/tool_menu.mako Wed Apr 13 22:36:57 2011 +0200
+++ b/templates/root/tool_menu.mako Wed Apr 13 23:19:24 2011 +0200
@@ -2,6 +2,8 @@
import re
%>
+<%namespace file="/tagging_common.mako" import="render_tool_tagging_elements" />
+
## Render a tool
<%def name="render_tool( tool, section )">
%if not tool.hidden:
@@ -55,9 +57,10 @@
<meta http-equiv="Content-Type" content="text/html; charset=utf-8" /><link href="${h.url_for('/static/style/base.css')}" rel="stylesheet" type="text/css" /><link href="${h.url_for('/static/style/tool_menu.css')}" rel="stylesheet" type="text/css" />
+ <link href="${h.url_for('/static/style/autocomplete_tagging.css')}" rel="stylesheet" type="text/css" />
##<script type="text/javascript" src="${h.url_for('/static/scripts/jquery.js')}"></script>
- ${h.js( "jquery", "galaxy.base", "json2" )}
+ ${h.js( "jquery", "galaxy.base", "json2", "autocomplete_tagging" )}
<script type="text/javascript">
// Set up GalaxyAsync object.
@@ -228,7 +231,62 @@
}
}
});
+
}
+
+ function tool_tag_click(tag_name, tag_value) {
+ $.get("${h.url_for( controller='root', action='tool_tag_search' )}", { query: tag_name }, function (data) {
+ // Show live-search if results and search-term aren't empty
+ //$("#search-no-results").hide();
+ // Hide all tool sections.
+ $(".toolSectionWrapper").hide();
+ // This hides all tools but not workflows link (which is in a .toolTitle div).
+ $(".toolSectionWrapper").find(".toolTitle").hide();
+ if ( data.length !== 0 ) {
+ // Map tool ids to element ids and join them.
+ var s = $.map( data, function( n, i ) { return ".link-" + n.toLowerCase().replace(/[^a-z0-9_]/g,'_'); } ).join( ", " );
+
+ // First pass to show matching tools and their parents.
+ $(s).each( function() {
+ // Add class to denote match.
+ $(this).parent().addClass("search_match");
+ if ($(this).parents("#recently_used_wrapper").length === 0) {
+ // Default behavior.
+ $(this).parent().show().parent().parent().show().parent().show();
+ } else if ($(this).parents(".user_pref_visible").length !== 0) {
+ // RU menu is visible, so filter it as normal.
+ $(this).parent().show().parent().parent().show().parent().show();
+ } else {
+ // RU menu is not visible, so set up classes and visibility so that if menu shown matching is
+ // aleady in place.
+ $(this).parent().show();
+ }
+ });
+
+ // Hide labels that have no visible children.
+ $(".toolPanelLabel").each( function() {
+ var this_label = $(this);
+ var next = this_label.next();
+ var no_visible_tools = true;
+ // Look through tools following label and, if none are visible, hide label.
+ while (next.length !== 0 && next.hasClass("toolTitle")) {
+ if (next.is(":visible")) {
+ no_visible_tools = false;
+ break;
+ } else {
+ next = next.next();
+ }
+ }
+ if (no_visible_tools) {
+ this_label.hide();
+ }
+ });
+ } else {
+ $("#search-no-results").show();
+ }
+ }, "json" );
+ }
+
</script></head>
@@ -247,6 +305,10 @@
display = "none"
%><div id="tool-search" style="padding-bottom: 5px; position: relative; display: ${display}; width: 100%">
+ %if trans.app.config.get_bool( 'enable_tool_tags', False ):
+ Available Tags:
+ ${render_tool_tagging_elements()}
+ %endif
<input type="text" name="query" value="search tools" id="tool-search-query" autocomplete="off" style="width: 100%; font-style:italic; font-size: inherit"/><img src="${h.url_for('/static/images/loading_small_white_bg.gif')}" id="search-spinner" style="display: none; position: absolute; right: 0; top: 5px;"/></div>
--- a/templates/tagging_common.mako Wed Apr 13 22:36:57 2011 +0200
+++ b/templates/tagging_common.mako Wed Apr 13 23:19:24 2011 +0200
@@ -84,6 +84,20 @@
</div></%def>
+## Render tool tagging elements
+<%def name="render_tool_tagging_elements()">
+ <%
+ elt_id = int ( floor ( random()*maxint ) )
+ tags = trans.app.tag_handler.get_tool_tags( trans )
+ %>
+ ${self.render_tagging_element_html(elt_id=elt_id, \
+ tags=tags, \
+ editable=False )}
+ <script type="text/javascript">
+ init_tag_click_function($('#${elt_id}'), tool_tag_click);
+ </script>
+</%def>
+
## Render community tagging element.
<%def name="render_community_tagging_element(tagged_item=None, elt_context=None, use_toggle_link=False, tag_click_fn='default_tag_click_fn')">
## Build HTML.
@@ -213,4 +227,4 @@
}
</style></noscript>
-</%def>
\ No newline at end of file
+</%def>
http://bitbucket.org/galaxy/galaxy-central/changeset/8635dd6e3857/
changeset: r5419:8635dd6e3857
user: Rob Hooft
date: 2011-04-14 10:02:10
summary: * Special cased the "off" tag to leave a tool out of the toolbox.
affected #: 1 file (174 bytes)
--- a/scripts/build_toolbox.py Wed Apr 13 23:19:24 2011 +0200
+++ b/scripts/build_toolbox.py Thu Apr 14 10:02:10 2011 +0200
@@ -4,6 +4,8 @@
# Todo: Keep order by "prioritizing" tools in sections
# Todo: Labels (as lower level sections?)
+# Todo: Some tools are switched "off" by default: it must be possible to "off"
+# a tool without having to remove it?
def prettify(elem):
from xml.dom import minidom
@@ -70,12 +72,13 @@
attrib = {'file': fileattrib}
tags = tool.find('tags')
if tags:
- tagra = []
+ tagarray = []
for tag in tags.findall('tag'):
- tagra.append(tag.text)
- attrib['tags'] = ",".join(tagra)
+ tagarray.append(tag.text)
+ attrib['tags'] = ",".join(tagarray)
toolelement = ET.Element('tool', attrib)
- ts.add(toolelement, tool.find('section'))
+ if not 'off' in tagarray:
+ ts.add(toolelement, tool.find('section'))
return ts
def assemble():
http://bitbucket.org/galaxy/galaxy-central/changeset/47b7d7e22189/
changeset: r5420:47b7d7e22189
user: natefoo
date: 2011-04-14 10:53:22
summary: More functionality for tool tags, still needs to be refactored to remove javascript duplicated from the tool search.
affected #: 5 files (2.4 KB)
--- a/lib/galaxy/web/controllers/root.py Thu Apr 14 10:02:10 2011 +0200
+++ b/lib/galaxy/web/controllers/root.py Thu Apr 14 10:53:22 2011 +0200
@@ -54,12 +54,14 @@
return trans.app.toolbox_search.search( query )
@web.json
- def tool_tag_search( self, trans, query ):
- tag = trans.sa_session.query( trans.app.model.Tag ).filter_by( name=query ).first()
+ def tool_tag_search( self, trans, **kwd ):
+ query = util.listify( kwd.get( 'query[]', [] ) )
+ tags = trans.sa_session.query( trans.app.model.Tag ).filter( trans.app.model.Tag.name.in_( query ) ).all()
tool_ids = []
- for tagged_tool in tag.tagged_tools:
- if tagged_tool.tool_id not in tool_ids:
- tool_ids.append( tagged_tool.tool_id )
+ for tagged_tool_il in [ tag.tagged_tools for tag in tags ]:
+ for tagged_tool in tagged_tool_il:
+ if tagged_tool.tool_id not in tool_ids:
+ tool_ids.append( tagged_tool.tool_id )
return tool_ids
@web.expose
--- a/static/june_2007_style/autocomplete_tagging.css.tmpl Thu Apr 14 10:02:10 2011 +0200
+++ b/static/june_2007_style/autocomplete_tagging.css.tmpl Thu Apr 14 10:53:22 2011 +0200
@@ -139,4 +139,9 @@
{
margin-left: 0.3em;
-}
\ No newline at end of file
+}
+
+.active-tag-name
+{
+ font-weight: bold;
+}
--- a/static/june_2007_style/blue/autocomplete_tagging.css Thu Apr 14 10:02:10 2011 +0200
+++ b/static/june_2007_style/blue/autocomplete_tagging.css Thu Apr 14 10:53:22 2011 +0200
@@ -18,3 +18,4 @@
.add-tag-button:hover{cursor:pointer;}
.tag-input{vertical-align:bottom;border:none;outline:none;resize:none;}
.delete-tag-img{margin-left:0.3em;}
+.active-tag-name{font-weight:bold;}
--- a/templates/root/tool_menu.mako Thu Apr 14 10:02:10 2011 +0200
+++ b/templates/root/tool_menu.mako Thu Apr 14 10:53:22 2011 +0200
@@ -23,9 +23,9 @@
## ${tool.description.replace( '[[', '<a href="link" target="galaxy_main">' % $tool.id ).replace( "]]", "</a>" )
<% tool_id = re.sub( '[^a-z0-9_]', '_', tool.id.lower() ) %>
%if tool.name:
- <a class="link-${tool_id}" href="${link}" target=${tool.target} minsizehint="${tool.uihints.get( 'minwidth', -1 )}">${_(tool.name)}</a> ${tool.description}
+ <a class="link-${tool_id} tool-link" href="${link}" target=${tool.target} minsizehint="${tool.uihints.get( 'minwidth', -1 )}">${_(tool.name)}</a> ${tool.description}
%else:
- <a class="link-${tool_id}" href="${link}" target=${tool.target} minsizehint="${tool.uihints.get( 'minwidth', -1 )}">${tool.description}</a>
+ <a class="link-${tool_id} tool-link" href="${link}" target=${tool.target} minsizehint="${tool.uihints.get( 'minwidth', -1 )}">${tool.description}</a>
%endif
</div>
%endif
@@ -234,10 +234,52 @@
}
+ var current_tags = new Array();
function tool_tag_click(tag_name, tag_value) {
- $.get("${h.url_for( controller='root', action='tool_tag_search' )}", { query: tag_name }, function (data) {
+ var add = true;
+ for ( var i = 0 ; i < current_tags.length ; i++ ) {
+ if ( current_tags[i] == tag_name ) {
+ current_tags.splice( i, 1 );
+ add = false;
+ }
+ }
+ if ( add ) {
+ current_tags.push( tag_name );
+ $("span.tag-name").each( function() {
+ if ( $(this).text() == tag_name ) {
+ $(this).addClass("active-tag-name");
+ $(this).append("<img class='delete-tag-img' src='${h.url_for('/static/images/delete_tag_icon_gray.png')}'/>")
+ }
+ });
+ } else {
+ $("span.tag-name").each( function() {
+ if ( $(this).text() == tag_name ) {
+ $(this).removeClass("active-tag-name");
+ $(this).text(tag_name);
+ }
+ });
+ }
+ if ( current_tags.length == 0 ) {
+ $("#search-no-results").hide();
+ $(".tool-link").each( function() {
+ $(this).parent().removeClass("search_match");
+ if ($(this).parents("#recently_used_wrapper").length === 0) {
+ // Default behavior.
+ $(this).parent().show().parent().parent().hide().parent().show();
+ } else if ($(this).parents(".user_pref_visible").length !== 0) {
+ // RU menu is visible, so filter it as normal.
+ $(this).parent().show().parent().parent().show().parent().show();
+ } else {
+ // RU menu is not visible, so set up classes and visibility so that if menu shown matching is
+ // aleady in place.
+ $(this).parent().show();
+ }
+ });
+ return;
+ }
+ $.get("${h.url_for( controller='root', action='tool_tag_search' )}", { query: current_tags }, function (data) {
// Show live-search if results and search-term aren't empty
- //$("#search-no-results").hide();
+ $("#search-no-results").hide();
// Hide all tool sections.
$(".toolSectionWrapper").hide();
// This hides all tools but not workflows link (which is in a .toolTitle div).
@@ -306,7 +348,7 @@
%><div id="tool-search" style="padding-bottom: 5px; position: relative; display: ${display}; width: 100%">
%if trans.app.config.get_bool( 'enable_tool_tags', False ):
- Available Tags:
+ <b>Tags:</b>
${render_tool_tagging_elements()}
%endif
<input type="text" name="query" value="search tools" id="tool-search-query" autocomplete="off" style="width: 100%; font-style:italic; font-size: inherit"/>
--- a/templates/tagging_common.mako Thu Apr 14 10:02:10 2011 +0200
+++ b/templates/tagging_common.mako Thu Apr 14 10:53:22 2011 +0200
@@ -92,7 +92,8 @@
%>
${self.render_tagging_element_html(elt_id=elt_id, \
tags=tags, \
- editable=False )}
+ editable=False, \
+ use_toggle_link=False )}
<script type="text/javascript">
init_tag_click_function($('#${elt_id}'), tool_tag_click);
</script>
http://bitbucket.org/galaxy/galaxy-central/changeset/2c121a33b4d1/
changeset: r5421:2c121a33b4d1
user: natefoo
date: 2011-04-14 12:44:25
summary: Fix for searching tools, and then selecting tags. Also refactoring to remove duplicated javascript.
affected #: 4 files (3.7 KB)
--- a/lib/galaxy/web/controllers/root.py Thu Apr 14 10:53:22 2011 +0200
+++ b/lib/galaxy/web/controllers/root.py Thu Apr 14 12:44:25 2011 +0200
@@ -49,20 +49,24 @@
return trans.fill_template('/root/tool_menu.mako', toolbox=toolbox, recent_tools=recent_tools )
@web.json
- def tool_search( self, trans, query ):
- trans.log_action( trans.get_user(), "tool_search.search", "", { "query" : query } )
- return trans.app.toolbox_search.search( query )
-
- @web.json
- def tool_tag_search( self, trans, **kwd ):
- query = util.listify( kwd.get( 'query[]', [] ) )
- tags = trans.sa_session.query( trans.app.model.Tag ).filter( trans.app.model.Tag.name.in_( query ) ).all()
- tool_ids = []
- for tagged_tool_il in [ tag.tagged_tools for tag in tags ]:
- for tagged_tool in tagged_tool_il:
- if tagged_tool.tool_id not in tool_ids:
- tool_ids.append( tagged_tool.tool_id )
- return tool_ids
+ def tool_search( self, trans, **kwd ):
+ query = kwd.get( 'query', '' )
+ tags = util.listify( kwd.get( 'tags[]', [] ) )
+ trans.log_action( trans.get_user(), "tool_search.search", "", { "query" : query, "tags" : tags } )
+ results = []
+ if tags:
+ tags = trans.sa_session.query( trans.app.model.Tag ).filter( trans.app.model.Tag.name.in_( tags ) ).all()
+ for tagged_tool_il in [ tag.tagged_tools for tag in tags ]:
+ for tagged_tool in tagged_tool_il:
+ if tagged_tool.tool_id not in results:
+ results.append( tagged_tool.tool_id )
+ if len( query ) > 3:
+ search_results = trans.app.toolbox_search.search( query )
+ if 'tags[]' in kwd:
+ results = filter( lambda x: x in results, search_results )
+ else:
+ results = search_results
+ return results
@web.expose
def tool_help( self, trans, id ):
--- a/static/june_2007_style/autocomplete_tagging.css.tmpl Thu Apr 14 10:53:22 2011 +0200
+++ b/static/june_2007_style/autocomplete_tagging.css.tmpl Thu Apr 14 12:44:25 2011 +0200
@@ -71,11 +71,12 @@
.individual-tag-area
{
cursor: pointer;
+ border:1px dotted transparent;
}
.individual-tag-area:hover
{
- border:dotted #999999 1px;
+ border:1px dotted #999999;
}
.active-tag-area {
--- a/static/june_2007_style/blue/autocomplete_tagging.css Thu Apr 14 10:53:22 2011 +0200
+++ b/static/june_2007_style/blue/autocomplete_tagging.css Thu Apr 14 12:44:25 2011 +0200
@@ -7,8 +7,8 @@
.ac_over{background-color:#0A246A;color:white;}
.ac_header{font-style:normal;color:gray;border-bottom:0.1em solid gray;}
.tag-area{width:100%;}
-.individual-tag-area{cursor:pointer;}
-.individual-tag-area:hover{border:dotted #999999 1px;}
+.individual-tag-area{cursor:pointer;border:1px dotted transparent;}
+.individual-tag-area:hover{border:1px dotted #999999;}
.active-tag-area{background-color:white;}
.toggle-link{font-weight:normal;padding:0.3em;margin-bottom:1em;width:100%;padding:0.2em 0em 0.2em 0em;}
.tag-button{width:auto;color:#444;text-decoration:none;display:inline-block;cursor:pointer;margin:0.2em;border:solid #bbb 1px;padding:0.1em 0.5em 0.1em 0.5em;-moz-border-radius:.5em;-webkit-border-radius:.5em;border-radius:.5em;background:#eee;}
--- a/templates/root/tool_menu.mako Thu Apr 14 10:53:22 2011 +0200
+++ b/templates/root/tool_menu.mako Thu Apr 14 12:44:25 2011 +0200
@@ -114,9 +114,16 @@
// Remove italics.
$(this).css("font-style", "normal");
+ // Don't search if the search value is < 3 chars, but clear the search if there was a previous query
+ if ( this.value.length < 3 && this.lastValue && this.lastValue.length >= 3 ) {
+ reset_tool_search(false);
+ // Re-apply tags
+ if ( current_tags.length > 0 ) {
+ $.get("${h.url_for( controller='root', action='tool_search' )}", { query: "", tags: current_tags }, function (data) {
+ apply_search_results(data);
+ }, "json" );
+ }
// Don't update if same value as last time
- if ( this.value.length < 3 ) {
- reset_tool_search(false);
} else if ( this.value !== this.lastValue ) {
// Add class to denote that searching is active.
$(this).addClass("search_active");
@@ -130,56 +137,8 @@
// Start a new ajax-request in X ms
$("#search-spinner").show();
this.timer = setTimeout(function () {
- $.get("${h.url_for( controller='root', action='tool_search' )}", { query: q }, function (data) {
- // input.removeClass(config.loadingClass);
- // Show live-search if results and search-term aren't empty
- $("#search-no-results").hide();
- // Hide all tool sections.
- $(".toolSectionWrapper").hide();
- // This hides all tools but not workflows link (which is in a .toolTitle div).
- $(".toolSectionWrapper").find(".toolTitle").hide();
- if ( data.length !== 0 ) {
- // Map tool ids to element ids and join them.
- var s = $.map( data, function( n, i ) { return ".link-" + n.toLowerCase().replace(/[^a-z0-9_]/g,'_'); } ).join( ", " );
-
- // First pass to show matching tools and their parents.
- $(s).each( function() {
- // Add class to denote match.
- $(this).parent().addClass("search_match");
- if ($(this).parents("#recently_used_wrapper").length === 0) {
- // Default behavior.
- $(this).parent().show().parent().parent().show().parent().show();
- } else if ($(this).parents(".user_pref_visible").length !== 0) {
- // RU menu is visible, so filter it as normal.
- $(this).parent().show().parent().parent().show().parent().show();
- } else {
- // RU menu is not visible, so set up classes and visibility so that if menu shown matching is
- // aleady in place.
- $(this).parent().show();
- }
- });
-
- // Hide labels that have no visible children.
- $(".toolPanelLabel").each( function() {
- var this_label = $(this);
- var next = this_label.next();
- var no_visible_tools = true;
- // Look through tools following label and, if none are visible, hide label.
- while (next.length !== 0 && next.hasClass("toolTitle")) {
- if (next.is(":visible")) {
- no_visible_tools = false;
- break;
- } else {
- next = next.next();
- }
- }
- if (no_visible_tools) {
- this_label.hide();
- }
- });
- } else {
- $("#search-no-results").show();
- }
+ $.get("${h.url_for( controller='root', action='tool_search' )}", { query: q, tags: current_tags }, function (data) {
+ apply_search_results(data);
$("#search-spinner").hide();
}, "json" );
}, 200 );
@@ -188,6 +147,58 @@
});
});
+ var apply_search_results = function (data) {
+ // input.removeClass(config.loadingClass);
+ // Show live-search if results and search-term aren't empty
+ $("#search-no-results").hide();
+ // Hide all tool sections.
+ $(".toolSectionWrapper").hide();
+ // This hides all tools but not workflows link (which is in a .toolTitle div).
+ $(".toolSectionWrapper").find(".toolTitle").hide();
+ if ( data.length !== 0 ) {
+ // Map tool ids to element ids and join them.
+ var s = $.map( data, function( n, i ) { return ".link-" + n.toLowerCase().replace(/[^a-z0-9_]/g,'_'); } ).join( ", " );
+
+ // First pass to show matching tools and their parents.
+ $(s).each( function() {
+ // Add class to denote match.
+ $(this).parent().addClass("search_match");
+ if ($(this).parents("#recently_used_wrapper").length === 0) {
+ // Default behavior.
+ $(this).parent().show().parent().parent().show().parent().show();
+ } else if ($(this).parents(".user_pref_visible").length !== 0) {
+ // RU menu is visible, so filter it as normal.
+ $(this).parent().show().parent().parent().show().parent().show();
+ } else {
+ // RU menu is not visible, so set up classes and visibility so that if menu shown matching is
+ // aleady in place.
+ $(this).parent().show();
+ }
+ });
+
+ // Hide labels that have no visible children.
+ $(".toolPanelLabel").each( function() {
+ var this_label = $(this);
+ var next = this_label.next();
+ var no_visible_tools = true;
+ // Look through tools following label and, if none are visible, hide label.
+ while (next.length !== 0 && next.hasClass("toolTitle")) {
+ if (next.is(":visible")) {
+ no_visible_tools = false;
+ break;
+ } else {
+ next = next.next();
+ }
+ }
+ if (no_visible_tools) {
+ this_label.hide();
+ }
+ });
+ } else {
+ $("#search-no-results").show();
+ }
+ }
+
// Update recently used tools menu. Function inserts a new item and removes the last item.
function update_recently_used() {
$.ajax({
@@ -262,70 +273,18 @@
if ( current_tags.length == 0 ) {
$("#search-no-results").hide();
$(".tool-link").each( function() {
- $(this).parent().removeClass("search_match");
- if ($(this).parents("#recently_used_wrapper").length === 0) {
- // Default behavior.
- $(this).parent().show().parent().parent().hide().parent().show();
- } else if ($(this).parents(".user_pref_visible").length !== 0) {
- // RU menu is visible, so filter it as normal.
- $(this).parent().show().parent().parent().show().parent().show();
- } else {
- // RU menu is not visible, so set up classes and visibility so that if menu shown matching is
- // aleady in place.
- $(this).parent().show();
- }
+ reset_tool_search(false);
});
return;
}
- $.get("${h.url_for( controller='root', action='tool_tag_search' )}", { query: current_tags }, function (data) {
- // Show live-search if results and search-term aren't empty
- $("#search-no-results").hide();
- // Hide all tool sections.
- $(".toolSectionWrapper").hide();
- // This hides all tools but not workflows link (which is in a .toolTitle div).
- $(".toolSectionWrapper").find(".toolTitle").hide();
- if ( data.length !== 0 ) {
- // Map tool ids to element ids and join them.
- var s = $.map( data, function( n, i ) { return ".link-" + n.toLowerCase().replace(/[^a-z0-9_]/g,'_'); } ).join( ", " );
-
- // First pass to show matching tools and their parents.
- $(s).each( function() {
- // Add class to denote match.
- $(this).parent().addClass("search_match");
- if ($(this).parents("#recently_used_wrapper").length === 0) {
- // Default behavior.
- $(this).parent().show().parent().parent().show().parent().show();
- } else if ($(this).parents(".user_pref_visible").length !== 0) {
- // RU menu is visible, so filter it as normal.
- $(this).parent().show().parent().parent().show().parent().show();
- } else {
- // RU menu is not visible, so set up classes and visibility so that if menu shown matching is
- // aleady in place.
- $(this).parent().show();
- }
- });
-
- // Hide labels that have no visible children.
- $(".toolPanelLabel").each( function() {
- var this_label = $(this);
- var next = this_label.next();
- var no_visible_tools = true;
- // Look through tools following label and, if none are visible, hide label.
- while (next.length !== 0 && next.hasClass("toolTitle")) {
- if (next.is(":visible")) {
- no_visible_tools = false;
- break;
- } else {
- next = next.next();
- }
- }
- if (no_visible_tools) {
- this_label.hide();
- }
- });
- } else {
- $("#search-no-results").show();
- }
+ var q = $("input#tool-search-query").val();
+ if ( q == "search tools" ) {
+ q = "";
+ } else if ( q.length > 0 ) {
+ q = q + '*';
+ }
+ $.get("${h.url_for( controller='root', action='tool_search' )}", { query: q, tags: current_tags }, function (data) {
+ apply_search_results(data);
}, "json" );
}
http://bitbucket.org/galaxy/galaxy-central/changeset/1de0b4fbb5a7/
changeset: r5422:1de0b4fbb5a7
user: natefoo
date: 2011-04-14 13:54:46
summary: Persist tool tag selections for logged-in users.
affected #: 2 files (1.2 KB)
--- a/lib/galaxy/web/controllers/root.py Thu Apr 14 12:44:25 2011 +0200
+++ b/lib/galaxy/web/controllers/root.py Thu Apr 14 13:54:46 2011 +0200
@@ -60,7 +60,13 @@
for tagged_tool in tagged_tool_il:
if tagged_tool.tool_id not in results:
results.append( tagged_tool.tool_id )
- if len( query ) > 3:
+ if trans.user:
+ trans.user.preferences['selected_tool_tags'] = ','.join( [ tag.name for tag in tags ] )
+ trans.sa_session.flush()
+ elif trans.user:
+ trans.user.preferences['selected_tool_tags'] = ''
+ trans.sa_session.flush()
+ if len( query ) > 2:
search_results = trans.app.toolbox_search.search( query )
if 'tags[]' in kwd:
results = filter( lambda x: x in results, search_results )
--- a/templates/root/tool_menu.mako Thu Apr 14 12:44:25 2011 +0200
+++ b/templates/root/tool_menu.mako Thu Apr 14 13:54:46 2011 +0200
@@ -145,6 +145,22 @@
}
this.lastValue = this.value;
});
+
+ // Apply stored tags
+ %if trans.user and trans.user.preferences.get( 'selected_tool_tags', '' ):
+ current_tags = "${trans.user.preferences['selected_tool_tags']}".split(",")
+ $.get("${h.url_for( controller='root', action='tool_search' )}", { query: "", tags: current_tags }, function (data) {
+ apply_search_results(data);
+ }, "json" );
+ $("span.tag-name").each( function() {
+ for ( var i in current_tags ) {
+ if ( $(this).text() == current_tags[i] ) {
+ $(this).addClass("active-tag-name");
+ $(this).append("<img class='delete-tag-img' src='${h.url_for('/static/images/delete_tag_icon_gray.png')}'/>")
+ }
+ }
+ });
+ %endif
});
var apply_search_results = function (data) {
http://bitbucket.org/galaxy/galaxy-central/changeset/43f944000c2d/
changeset: r5423:43f944000c2d
user: Rob Hooft
date: 2011-04-14 14:07:05
summary: * Restructured to use toolboxposition elements instead of section elements. Now
handles order and labels
affected #: 1 file (2.7 KB)
--- a/scripts/build_toolbox.py Thu Apr 14 12:44:25 2011 +0200
+++ b/scripts/build_toolbox.py Thu Apr 14 14:07:05 2011 +0200
@@ -4,8 +4,6 @@
# Todo: Keep order by "prioritizing" tools in sections
# Todo: Labels (as lower level sections?)
-# Todo: Some tools are switched "off" by default: it must be possible to "off"
-# a tool without having to remove it?
def prettify(elem):
from xml.dom import minidom
@@ -14,7 +12,7 @@
return repaired.toprettyxml(indent=' ')
# Build a list of all toolconf xml files in the tools directory
-def getfnl(startdir):
+def getfilenamelist(startdir):
filenamelist = []
for root, dirs, files in os.walk(startdir):
for fn in files:
@@ -28,82 +26,144 @@
print "An OOPS on", fullfn
raise
rootelement = doc.getroot()
+ # Only interpret those 'tool' XML files that have
+ # the 'section' element.
if rootelement.tag == 'tool':
- if rootelement.findall('section'):
+ if rootelement.findall('toolboxposition'):
filenamelist.append(fullfn)
+ else:
+ print "DBG> tool config does not have a <section>:", fullfn
return filenamelist
-class ToolSections(object):
+class ToolBox(object):
def __init__(self):
- self.tools = {'':[]}
- self.sections = [''] # Empty section first
+ from collections import defaultdict
+ self.tools = defaultdict(list)
+ self.sectionorders = {}
- def add(self, el, sectionelement):
- if sectionelement is not None:
- section = str(sectionelement.text)
- section = section.strip()
+ def add(self, toolelement, toolboxpositionelement):
+ section = toolboxpositionelement.attrib.get('section','')
+ label = toolboxpositionelement.attrib.get('label','')
+ order = int(toolboxpositionelement.attrib.get('order', '0'))
+ sectionorder = int(toolboxpositionelement.attrib.get('sectionorder', '0'))
+
+ # If this is the first time we encounter the section, store its order
+ # number. If we have seen it before, ignore the given order and use
+ # the stored one instead
+ if not self.sectionorders.has_key(section):
+ self.sectionorders[section] = sectionorder
else:
- section = ''
- if not self.tools.has_key(section):
- self.sections.append(section)
- self.tools[section]= []
- self.tools[section].append(el)
+ sectionorder = self.sectionorders[section]
-# Analyze all the toolconf xml files given in the filenamelist (fnl)
+ # Sortorder: add intelligent mix to the front
+ self.tools[("%05d-%s"%(sectionorder,section), label, order, section)].append(toolelement)
+
+ def addElementsTo(self, rootelement):
+ toolkeys = self.tools.keys()
+ toolkeys.sort()
+
+ # Initialize the loop: IDs to zero, current section and label to ''
+ currentsection = ''
+ sectionnumber = 0
+ currentlabel = ''
+ labelnumber = 0
+ for toolkey in toolkeys:
+ section = toolkey[3]
+ # If we change sections, add the new section to the XML tree,
+ # and start adding stuff to the new section. If the new section
+ # is '', start adding stuff to the root again.
+ if currentsection != section:
+ currentsection = section
+ # Start the section with empty label
+ currentlabel = ''
+ if section:
+ sectionnumber += 1
+ attrib = {'name': section,
+ 'id': "section%d"% sectionnumber}
+ sectionelement = ET.Element('section', attrib)
+ rootelement.append(sectionelement)
+ currentelement = sectionelement
+ else:
+ currentelement = rootelement
+ label = toolkey[1]
+
+ # If we change labels, add the new label to the XML tree
+ if currentlabel != label:
+ currentlabel = label
+ if label:
+ labelnumber += 1
+ attrib = {'text': label,
+ 'id': "label%d"% labelnumber}
+ labelelement = ET.Element('label', attrib)
+ currentelement.append(labelelement)
+
+ # Add the tools that are in this place
+ for toolelement in self.tools[toolkey]:
+ currentelement.append(toolelement)
+
+# Analyze all the toolconf xml files given in the filenamelist
# Build a list of all sections
-def scanfiles(fnl):
- ts = ToolSections()
- for fn in fnl: # specialized toolconf.xml files.
+def scanfiles(filenamelist):
+ # Build an empty tool box
+ toolbox = ToolBox()
+
+ # Read each of the files in the list
+ for fn in filenamelist:
doc = ET.parse(fn)
root = doc.getroot()
if root.tag == 'tool':
- tools = [root]
+ toolelements = [root]
else:
- tools = doc.findall('tool')
+ toolelements = doc.findall('tool')
- for tool in tools:
- if tool.attrib.has_key('file'):
+ for toolelement in toolelements:
+ # Figure out where the tool XML file is, absolute path.
+ if toolelement.attrib.has_key('file'):
+ # It is mentioned, we need to make it absolute
fileattrib = os.path.join(os.getcwd(),
os.path.dirname(fn),
- tool.attrib['file'])
- else: # It must be the current file
+ toolelement.attrib['file'])
+ else:
+ # It is the current file
fileattrib = os.path.join(os.getcwd(), fn)
+
+ # Store the file in the attibutes of the new tool element
attrib = {'file': fileattrib}
- tags = tool.find('tags')
+
+ # Add the tags into the attributes
+ tags = toolelement.find('tags')
if tags:
tagarray = []
for tag in tags.findall('tag'):
tagarray.append(tag.text)
attrib['tags'] = ",".join(tagarray)
- toolelement = ET.Element('tool', attrib)
- if not 'off' in tagarray:
- ts.add(toolelement, tool.find('section'))
- return ts
+ else:
+ print "DBG> No tags in",fn
+
+ # Build the tool element
+ newtoolelement = ET.Element('tool', attrib)
+ toolboxpositionelements = toolelement.findall('toolboxposition')
+ if not toolboxpositionelements:
+ print "DBG> %s has no toolboxposition" % fn
+ else:
+ for toolboxpositionelement in toolboxpositionelements:
+ toolbox.add(newtoolelement, toolboxpositionelement)
+ return toolbox
def assemble():
- fnl = getfnl('tools')
- fnl.sort()
+ filenamelist = []
+ for directorytree in ['tools']:
+ filenamelist.extend(getfilenamelist('tools'))
+ filenamelist.sort()
- ts = scanfiles(fnl)
+ toolbox = scanfiles(filenamelist)
- toolbox = ET.Element('toolbox')
+ toolboxelement = ET.Element('toolbox')
- sectionnumber = 0
- for section in ts.sections:
- if section:
- sectionnumber += 1
- ident = "section%d" % sectionnumber
- sectionelement = ET.SubElement(toolbox,'section', {'name': section,
- 'id': ident})
- puttoolsin = sectionelement
- else:
- puttoolsin = toolbox
- for tool in ts.tools[section]:
- attrib = tool.attrib
- toolelement = ET.SubElement(puttoolsin, 'tool', attrib)
-
- print prettify(toolbox)
+ toolbox.addElementsTo(toolboxelement)
+
+ print prettify(toolboxelement)
if __name__ == "__main__":
assemble()
http://bitbucket.org/galaxy/galaxy-central/changeset/5ca5403767d5/
changeset: r5424:5ca5403767d5
user: Rob Hooft
date: 2011-04-14 14:07:36
summary: * Remove Todo's that have been done
affected #: 1 file (98 bytes)
--- a/scripts/build_toolbox.py Thu Apr 14 14:07:05 2011 +0200
+++ b/scripts/build_toolbox.py Thu Apr 14 14:07:36 2011 +0200
@@ -2,9 +2,6 @@
import sys
from xml.etree import ElementTree as ET
-# Todo: Keep order by "prioritizing" tools in sections
-# Todo: Labels (as lower level sections?)
-
def prettify(elem):
from xml.dom import minidom
rough_string = ET.tostring(elem, 'utf-8')
http://bitbucket.org/galaxy/galaxy-central/changeset/06eb4747244c/
changeset: r5425:06eb4747244c
user: Rob Hooft
date: 2011-04-14 14:09:54
summary: * merge
affected #: 2 files (1.2 KB)
--- a/lib/galaxy/web/controllers/root.py Thu Apr 14 14:07:36 2011 +0200
+++ b/lib/galaxy/web/controllers/root.py Thu Apr 14 14:09:54 2011 +0200
@@ -60,7 +60,13 @@
for tagged_tool in tagged_tool_il:
if tagged_tool.tool_id not in results:
results.append( tagged_tool.tool_id )
- if len( query ) > 3:
+ if trans.user:
+ trans.user.preferences['selected_tool_tags'] = ','.join( [ tag.name for tag in tags ] )
+ trans.sa_session.flush()
+ elif trans.user:
+ trans.user.preferences['selected_tool_tags'] = ''
+ trans.sa_session.flush()
+ if len( query ) > 2:
search_results = trans.app.toolbox_search.search( query )
if 'tags[]' in kwd:
results = filter( lambda x: x in results, search_results )
--- a/templates/root/tool_menu.mako Thu Apr 14 14:07:36 2011 +0200
+++ b/templates/root/tool_menu.mako Thu Apr 14 14:09:54 2011 +0200
@@ -145,6 +145,22 @@
}
this.lastValue = this.value;
});
+
+ // Apply stored tags
+ %if trans.user and trans.user.preferences.get( 'selected_tool_tags', '' ):
+ current_tags = "${trans.user.preferences['selected_tool_tags']}".split(",")
+ $.get("${h.url_for( controller='root', action='tool_search' )}", { query: "", tags: current_tags }, function (data) {
+ apply_search_results(data);
+ }, "json" );
+ $("span.tag-name").each( function() {
+ for ( var i in current_tags ) {
+ if ( $(this).text() == current_tags[i] ) {
+ $(this).addClass("active-tag-name");
+ $(this).append("<img class='delete-tag-img' src='${h.url_for('/static/images/delete_tag_icon_gray.png')}'/>")
+ }
+ }
+ });
+ %endif
});
var apply_search_results = function (data) {
http://bitbucket.org/galaxy/galaxy-central/changeset/5222e72f1b66/
changeset: r5426:5222e72f1b66
user: Freek de Bruijn
date: 2011-04-14 14:20:48
summary: Alex and Freek: clear the tool tag associations in init_tools before adding these associations.
affected #: 1 file (247 bytes)
--- a/lib/galaxy/tools/__init__.py Thu Apr 14 14:09:54 2011 +0200
+++ b/lib/galaxy/tools/__init__.py Thu Apr 14 14:20:48 2011 +0200
@@ -130,6 +130,9 @@
key = 'section_' + section.id
panel_dict[ key ] = section
+ log.info("removing all tool tag associations (" + str( self.sa_session.query( self.app.model.ToolTagAssociation ).count() ) + ")")
+ self.sa_session.query( self.app.model.ToolTagAssociation ).delete()
+ self.sa_session.flush()
log.info("parsing the tool configuration")
tree = util.parse_xml( config_filename )
root = tree.getroot()
http://bitbucket.org/galaxy/galaxy-central/changeset/b9d54c4f2692/
changeset: r5427:b9d54c4f2692
user: natefoo
date: 2011-04-14 14:23:15
summary: Fix the spinner in the tool search box when tags are enabled.
affected #: 2 files (144 bytes)
--- a/static/june_2007_style/autocomplete_tagging.css.tmpl Thu Apr 14 14:20:48 2011 +0200
+++ b/static/june_2007_style/autocomplete_tagging.css.tmpl Thu Apr 14 14:23:15 2011 +0200
@@ -71,12 +71,12 @@
.individual-tag-area
{
cursor: pointer;
- border:1px dotted transparent;
+ border: 1px dotted transparent;
}
.individual-tag-area:hover
{
- border:1px dotted #999999;
+ border: 1px dotted #999999;
}
.active-tag-area {
--- a/templates/root/tool_menu.mako Thu Apr 14 14:20:48 2011 +0200
+++ b/templates/root/tool_menu.mako Thu Apr 14 14:23:15 2011 +0200
@@ -321,11 +321,13 @@
else:
display = "none"
%>
- <div id="tool-search" style="padding-bottom: 5px; position: relative; display: ${display}; width: 100%">
- %if trans.app.config.get_bool( 'enable_tool_tags', False ):
+ %if trans.app.config.get_bool( 'enable_tool_tags', False ):
+ <div id="tool-tags" style="padding-bottom: 5px; position: relative; display: ${display}; width: 100%"><b>Tags:</b>
${render_tool_tagging_elements()}
- %endif
+ </div>
+ %endif
+ <div id="tool-search" style="padding-bottom: 5px; position: relative; display: ${display}; width: 100%"><input type="text" name="query" value="search tools" id="tool-search-query" autocomplete="off" style="width: 100%; font-style:italic; font-size: inherit"/><img src="${h.url_for('/static/images/loading_small_white_bg.gif')}" id="search-spinner" style="display: none; position: absolute; right: 0; top: 5px;"/></div>
http://bitbucket.org/galaxy/galaxy-central/changeset/0e1bcc194103/
changeset: r5428:0e1bcc194103
user: vdt...(a)hotmail.com
date: 2011-04-14 15:09:00
summary: introduction of toolboxposition elements
affected #: 1 file (4.2 KB)
--- a/scripts/extract_toolbox_sections.py Thu Apr 14 14:23:15 2011 +0200
+++ b/scripts/extract_toolbox_sections.py Thu Apr 14 15:09:00 2011 +0200
@@ -1,43 +1,141 @@
import os
import sys
from xml.etree import ElementTree as ET
+from collections import defaultdict
# Todo: ""
+# execute from galaxy root dir
+
+tooldict = defaultdict(list)
def main():
doc = ET.parse("tool_conf.xml")
root = doc.getroot()
- for section in root.findall("section"):
- sectionname = section.attrib['name']
- for tool in section.findall("tool"):
- upgradeFile(tool, sectionname)
- for tool in root.findall("tool"):
- upgradeFile(tool, "")
+
+ # index range 1-1000, current sections/tools divided between 250-750
+ sectionindex = 250
+ sectionfactor = int( 500 / len( root.getchildren() ) )
+
+ for rootchild in root.getchildren():
+ currentsectionlabel = ""
+ if ( rootchild.tag == "section" ):
+ sectionname = rootchild.attrib['name']
+ # per section tool index range 1-1000, current labels/tools
+ # divided between 20 and 750
+ toolindex = 250
+ toolfactor = int( 500 / len( rootchild.getchildren() ) )
+ currentlabel = ""
+ for sectionchild in rootchild.getchildren():
+ if ( sectionchild.tag == "tool" ):
+ addToToolDict(sectionchild, sectionname, sectionindex, toolindex, currentlabel)
+ toolindex += toolfactor
+ elif ( sectionchild.tag == "label" ):
+ currentlabel = sectionchild.attrib["text"]
+ sectionindex += sectionfactor
+ elif ( rootchild.tag == "tool" ):
+ addToToolDict(rootchild, "", sectionindex, None, currentsectionlabel)
+ sectionindex += sectionfactor
+ elif ( rootchild.tag == "label" ):
+ currentsectionlabel = rootchild.attrib["text"]
+ sectionindex += sectionfactor
+
+
+ # scan galaxy root tools dir for tool-specific xmls
+ toolconffilelist = getfnl( os.path.join(os.getcwd(), "tools" ) )
+
+ # foreach tool xml:
+ # check if the tags element exists in the tool xml (as child of <tool>)
+ # if not, add empty tags element for later use
+ # if this tool is in the above tooldict, add the toolboxposition element to the tool xml
+ # if not, then nothing.
+ for toolconffile in toolconffilelist:
+ hastags = False
+ hastoolboxpos = False
+
+ #parse tool config file into a document structure as defined by the ElementTree
+ tooldoc = ET.parse(toolconffile)
+ # get the root element of the toolconfig file
+ tooldocroot = tooldoc.getroot()
+ #check tags element, set flag
+ tagselement = tooldocroot.find("tags")
+ if (tagselement):
+ hastags = True
+ # check if toolboxposition element already exists in this tooconfig file
+ toolboxposelement = tooldocroot.find("toolboxposition")
+ if ( toolboxposelement ):
+ hastoolboxpos = True
+
+ if ( not ( hastags and hastoolboxpos ) ):
+ original = open( toolconffile, 'r' )
+ contents = original.readlines()
+ original.close()
+
+ # the new elements will be added directly below the root tool element
+ addelementsatposition = 1
+ # but what's on the first line? Root or not?
+ if ( contents[0].startswith("<?") ):
+ addelementsatposition = 2
+ newelements = []
+ if ( not hastoolboxpos ):
+ if ( toolconffile in tooldict ):
+ for attributes in tooldict[toolconffile]:
+ # create toolboxposition element
+ sectionelement = ET.Element("toolboxposition")
+ sectionelement.attrib = attributes
+ sectionelement.tail = "\n "
+ newelements.append( ET.tostring(sectionelement, 'utf-8') )
-def upgradeFile(tool, sectionname):
+ if ( not hastags ):
+ # create empty tags element
+ newelements.append( "<tags/>\n " )
+
+ contents = (
+ contents[ 0:addelementsatposition ] +
+ newelements +
+ contents[ addelementsatposition: ] )
+
+ # add .new for testing/safety purposes :P
+ newtoolconffile = open ( toolconffile, 'w' )
+ newtoolconffile.writelines( contents )
+ newtoolconffile.close()
+
+
+def addToToolDict(tool, sectionname, sectionindex, toolindex, currentlabel):
toolfile = tool.attrib["file"]
realtoolfile = os.path.join(os.getcwd(), "tools", toolfile)
toolxmlfile = ET.parse(realtoolfile)
localroot = toolxmlfile.getroot()
+
+ # define attributes for the toolboxposition xml-tag
+ attribdict = {}
+ if ( sectionname ):
+ attribdict[ "section" ] = sectionname
+ if ( currentlabel ):
+ attribdict[ "label" ] = currentlabel
+ if ( sectionindex ):
+ attribdict[ "sectionorder" ] = str(sectionindex)
+ if ( toolindex ):
+ attribdict[ "order" ] = str(toolindex)
+ tooldict[ realtoolfile ].append(attribdict)
- for existingsectionelement in localroot.findall("section"):
- localroot.remove(existingsectionelement)
-
- for existingtagselement in localroot.findall("tags"):
- localroot.remove(existingtagselement)
-
- sectionelement = ET.Element("section")
- sectionelement.text = sectionname
- sectionelement.tail = "\n "
- localroot.insert(0, sectionelement)
-
- tagselement = ET.Element("tags")
- tagselement.tail = "\n "
- localroot.insert(1,tagselement)
-
- toolxmlfile.write(realtoolfile)
-
+# Build a list of all toolconf xml files in the tools directory
+def getfnl(startdir):
+ filenamelist = []
+ for root, dirs, files in os.walk(startdir):
+ for fn in files:
+ fullfn = os.path.join(root, fn)
+ if fn.endswith('.xml'):
+ try:
+ doc = ET.parse(fullfn)
+ except:
+ print "Oops, bad xml in: ", fullfn
+ raise
+ rootelement = doc.getroot()
+ # here we check if this xml file actually is a tool conf xml!
+ if rootelement.tag == 'tool':
+ filenamelist.append(fullfn)
+ return filenamelist
if __name__ == "__main__":
main()
http://bitbucket.org/galaxy/galaxy-central/changeset/6fd7478fd42d/
changeset: r5429:6fd7478fd42d
user: natefoo
date: 2011-04-19 14:21:54
summary: Merge changes from the NBIC Galaxy Hackathon. Includes the addition of scripts which automatically generate the tool_conf.xml from tool configuration files. Tool configuration files also support optional syntax which specifies their placement in sections, as well as what tags the tool should be associated with. Also includes support for associating tags with tool IDs and filtering tools by tag in the tool menu.
affected #: 16 files (21.0 KB)
--- a/lib/galaxy/model/__init__.py Tue Apr 19 00:07:36 2011 -0400
+++ b/lib/galaxy/model/__init__.py Tue Apr 19 08:21:54 2011 -0400
@@ -2272,6 +2272,16 @@
class VisualizationTagAssociation ( ItemTagAssociation ):
pass
+class ToolTagAssociation( ItemTagAssociation ):
+ def __init__( self, id=None, user=None, tool_id=None, tag_id=None, user_tname=None, value=None ):
+ self.id = id
+ self.user = user
+ self.tool_id = tool_id
+ self.tag_id = tag_id
+ self.user_tname = user_tname
+ self.value = None
+ self.user_value = None
+
# Item annotation classes.
class HistoryAnnotationAssociation( object ):
--- a/lib/galaxy/model/mapping.py Tue Apr 19 00:07:36 2011 -0400
+++ b/lib/galaxy/model/mapping.py Tue Apr 19 08:21:54 2011 -0400
@@ -851,6 +851,15 @@
Column( "value", TrimmedString(255), index=True),
Column( "user_value", TrimmedString(255), index=True) )
+ToolTagAssociation.table = Table( "tool_tag_association", metadata,
+ Column( "id", Integer, primary_key=True ),
+ Column( "tool_id", TrimmedString(255), index=True ),
+ Column( "tag_id", Integer, ForeignKey( "tag.id" ), index=True ),
+ Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
+ Column( "user_tname", TrimmedString(255), index=True),
+ Column( "value", TrimmedString(255), index=True),
+ Column( "user_value", TrimmedString(255), index=True) )
+
# Annotation tables.
HistoryAnnotationAssociation.table = Table( "history_annotation_association", metadata,
@@ -1575,6 +1584,10 @@
properties=dict( tag=relation(Tag, backref="tagged_visualizations"), user=relation( User ) )
)
+assign_mapper( context, ToolTagAssociation, ToolTagAssociation.table,
+ properties=dict( tag=relation(Tag, backref="tagged_tools"), user=relation( User ) )
+ )
+
# Annotation tables.
assign_mapper( context, HistoryAnnotationAssociation, HistoryAnnotationAssociation.table,
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/lib/galaxy/model/migrate/versions/0077_create_tool_tag_association_table.py Tue Apr 19 08:21:54 2011 -0400
@@ -0,0 +1,49 @@
+"""
+Migration script to create table for storing tool tag associations.
+"""
+
+from sqlalchemy import *
+from sqlalchemy.orm import *
+from migrate import *
+from migrate.changeset import *
+
+from galaxy.model.custom_types import *
+
+import datetime
+now = datetime.datetime.utcnow
+
+import logging
+log = logging.getLogger( __name__ )
+
+metadata = MetaData( migrate_engine )
+db_session = scoped_session( sessionmaker( bind=migrate_engine, autoflush=False, autocommit=True ) )
+
+# Table to add
+
+ToolTagAssociation_table = Table( "tool_tag_association", metadata,
+ Column( "id", Integer, primary_key=True ),
+ Column( "tool_id", TrimmedString(255), index=True ),
+ Column( "tag_id", Integer, ForeignKey( "tag.id" ), index=True ),
+ Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
+ Column( "user_tname", TrimmedString(255), index=True),
+ Column( "value", TrimmedString(255), index=True),
+ Column( "user_value", TrimmedString(255), index=True) )
+
+def upgrade():
+ print __doc__
+ metadata.reflect()
+
+ # Create tool_tag_association table
+ try:
+ ToolTagAssociation_table.create()
+ except Exception, e:
+ log.error( "Creating tool_tag_association table failed: %s" % str( e ) )
+
+def downgrade():
+ metadata.reflect()
+
+ # Drop tool_tag_association table
+ try:
+ ToolTagAssociation_table.drop()
+ except Exception, e:
+ log.error( "Dropping tool_tag_association table failed: %s" % str( e ) )
--- a/lib/galaxy/tags/tag_handler.py Tue Apr 19 00:07:36 2011 -0400
+++ b/lib/galaxy/tags/tag_handler.py Tue Apr 19 08:21:54 2011 -0400
@@ -58,6 +58,15 @@
tag_id = row[0]
community_tags.append( self.get_tag_by_id( trans, tag_id ) )
return community_tags
+ def get_tool_tags( self, trans ):
+ result_set = trans.sa_session.execute( select( columns=[ trans.app.model.ToolTagAssociation.table.c.tag_id ],
+ from_obj=trans.app.model.ToolTagAssociation.table ).distinct() )
+
+ tags = []
+ for row in result_set:
+ tag_id = row[0]
+ tags.append( self.get_tag_by_id( trans, tag_id ) )
+ return tags
def remove_item_tag( self, trans, user, item, tag_name ):
"""Remove a tag from an item."""
# Get item tag association.
--- a/lib/galaxy/tools/__init__.py Tue Apr 19 00:07:36 2011 -0400
+++ b/lib/galaxy/tools/__init__.py Tue Apr 19 08:21:54 2011 -0400
@@ -75,6 +75,27 @@
try:
path = elem.get( "file" )
tool = self.load_tool( os.path.join( self.tool_root_dir, path ) )
+ if self.app.config.get_bool( 'enable_tool_tags', False ):
+ tag_names = elem.get( "tags", "" ).split( "," )
+ for tag_name in tag_names:
+ if tag_name == '':
+ continue
+ tag = self.sa_session.query( self.app.model.Tag ).filter_by( name=tag_name ).first()
+ if not tag:
+ tag = self.app.model.Tag( name=tag_name )
+ self.sa_session.add( tag )
+ self.sa_session.flush()
+ tta = self.app.model.ToolTagAssociation( tool_id=tool.id, tag_id=tag.id )
+ self.sa_session.add( tta )
+ self.sa_session.flush()
+ else:
+ for tagged_tool in tag.tagged_tools:
+ if tagged_tool.tool_id == tool.id:
+ break
+ else:
+ tta = self.app.model.ToolTagAssociation( tool_id=tool.id, tag_id=tag.id )
+ self.sa_session.add( tta )
+ self.sa_session.flush()
self.tools_by_id[ tool.id ] = tool
key = 'tool_' + tool.id
panel_dict[ key ] = tool
@@ -109,6 +130,9 @@
key = 'section_' + section.id
panel_dict[ key ] = section
+ log.info("removing all tool tag associations (" + str( self.sa_session.query( self.app.model.ToolTagAssociation ).count() ) + ")")
+ self.sa_session.query( self.app.model.ToolTagAssociation ).delete()
+ self.sa_session.flush()
log.info("parsing the tool configuration")
tree = util.parse_xml( config_filename )
root = tree.getroot()
@@ -181,6 +205,13 @@
if self.app.config.use_tool_dependencies:
self.dependency_manager = DependencyManager( [ self.app.config.tool_dependency_dir ] )
+ @property
+ def sa_session( self ):
+ """
+ Returns a SQLAlchemy session
+ """
+ return self.app.model.context
+
class ToolSection( object ):
"""
A group of tools with similar type/purpose that will be displayed as a
--- a/lib/galaxy/web/controllers/root.py Tue Apr 19 00:07:36 2011 -0400
+++ b/lib/galaxy/web/controllers/root.py Tue Apr 19 08:21:54 2011 -0400
@@ -49,9 +49,30 @@
return trans.fill_template('/root/tool_menu.mako', toolbox=toolbox, recent_tools=recent_tools )
@web.json
- def tool_search( self, trans, query ):
- trans.log_action( trans.get_user(), "tool_search.search", "", { "query" : query } )
- return trans.app.toolbox_search.search( query )
+ def tool_search( self, trans, **kwd ):
+ query = kwd.get( 'query', '' )
+ tags = util.listify( kwd.get( 'tags[]', [] ) )
+ trans.log_action( trans.get_user(), "tool_search.search", "", { "query" : query, "tags" : tags } )
+ results = []
+ if tags:
+ tags = trans.sa_session.query( trans.app.model.Tag ).filter( trans.app.model.Tag.name.in_( tags ) ).all()
+ for tagged_tool_il in [ tag.tagged_tools for tag in tags ]:
+ for tagged_tool in tagged_tool_il:
+ if tagged_tool.tool_id not in results:
+ results.append( tagged_tool.tool_id )
+ if trans.user:
+ trans.user.preferences['selected_tool_tags'] = ','.join( [ tag.name for tag in tags ] )
+ trans.sa_session.flush()
+ elif trans.user:
+ trans.user.preferences['selected_tool_tags'] = ''
+ trans.sa_session.flush()
+ if len( query ) > 2:
+ search_results = trans.app.toolbox_search.search( query )
+ if 'tags[]' in kwd:
+ results = filter( lambda x: x in results, search_results )
+ else:
+ results = search_results
+ return results
@web.expose
def tool_help( self, trans, id ):
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/scripts/build_toolbox.py Tue Apr 19 08:21:54 2011 -0400
@@ -0,0 +1,166 @@
+import os
+import sys
+from xml.etree import ElementTree as ET
+
+def prettify(elem):
+ from xml.dom import minidom
+ rough_string = ET.tostring(elem, 'utf-8')
+ repaired = minidom.parseString(rough_string)
+ return repaired.toprettyxml(indent=' ')
+
+# Build a list of all toolconf xml files in the tools directory
+def getfilenamelist(startdir):
+ filenamelist = []
+ for root, dirs, files in os.walk(startdir):
+ for fn in files:
+ fullfn = os.path.join(root, fn)
+ if fn.endswith('toolconf.xml'):
+ filenamelist.append(fullfn)
+ elif fn.endswith('.xml'):
+ try:
+ doc = ET.parse(fullfn)
+ except:
+ print "An OOPS on", fullfn
+ raise
+ rootelement = doc.getroot()
+ # Only interpret those 'tool' XML files that have
+ # the 'section' element.
+ if rootelement.tag == 'tool':
+ if rootelement.findall('toolboxposition'):
+ filenamelist.append(fullfn)
+ else:
+ print "DBG> tool config does not have a <section>:", fullfn
+ return filenamelist
+
+class ToolBox(object):
+ def __init__(self):
+ from collections import defaultdict
+ self.tools = defaultdict(list)
+ self.sectionorders = {}
+
+ def add(self, toolelement, toolboxpositionelement):
+ section = toolboxpositionelement.attrib.get('section','')
+ label = toolboxpositionelement.attrib.get('label','')
+ order = int(toolboxpositionelement.attrib.get('order', '0'))
+ sectionorder = int(toolboxpositionelement.attrib.get('sectionorder', '0'))
+
+ # If this is the first time we encounter the section, store its order
+ # number. If we have seen it before, ignore the given order and use
+ # the stored one instead
+ if not self.sectionorders.has_key(section):
+ self.sectionorders[section] = sectionorder
+ else:
+ sectionorder = self.sectionorders[section]
+
+ # Sortorder: add intelligent mix to the front
+ self.tools[("%05d-%s"%(sectionorder,section), label, order, section)].append(toolelement)
+
+ def addElementsTo(self, rootelement):
+ toolkeys = self.tools.keys()
+ toolkeys.sort()
+
+ # Initialize the loop: IDs to zero, current section and label to ''
+ currentsection = ''
+ sectionnumber = 0
+ currentlabel = ''
+ labelnumber = 0
+ for toolkey in toolkeys:
+ section = toolkey[3]
+ # If we change sections, add the new section to the XML tree,
+ # and start adding stuff to the new section. If the new section
+ # is '', start adding stuff to the root again.
+ if currentsection != section:
+ currentsection = section
+ # Start the section with empty label
+ currentlabel = ''
+ if section:
+ sectionnumber += 1
+ attrib = {'name': section,
+ 'id': "section%d"% sectionnumber}
+ sectionelement = ET.Element('section', attrib)
+ rootelement.append(sectionelement)
+ currentelement = sectionelement
+ else:
+ currentelement = rootelement
+ label = toolkey[1]
+
+ # If we change labels, add the new label to the XML tree
+ if currentlabel != label:
+ currentlabel = label
+ if label:
+ labelnumber += 1
+ attrib = {'text': label,
+ 'id': "label%d"% labelnumber}
+ labelelement = ET.Element('label', attrib)
+ currentelement.append(labelelement)
+
+ # Add the tools that are in this place
+ for toolelement in self.tools[toolkey]:
+ currentelement.append(toolelement)
+
+# Analyze all the toolconf xml files given in the filenamelist
+# Build a list of all sections
+def scanfiles(filenamelist):
+ # Build an empty tool box
+ toolbox = ToolBox()
+
+ # Read each of the files in the list
+ for fn in filenamelist:
+ doc = ET.parse(fn)
+ root = doc.getroot()
+
+ if root.tag == 'tool':
+ toolelements = [root]
+ else:
+ toolelements = doc.findall('tool')
+
+ for toolelement in toolelements:
+ # Figure out where the tool XML file is, absolute path.
+ if toolelement.attrib.has_key('file'):
+ # It is mentioned, we need to make it absolute
+ fileattrib = os.path.join(os.getcwd(),
+ os.path.dirname(fn),
+ toolelement.attrib['file'])
+ else:
+ # It is the current file
+ fileattrib = os.path.join(os.getcwd(), fn)
+
+ # Store the file in the attibutes of the new tool element
+ attrib = {'file': fileattrib}
+
+ # Add the tags into the attributes
+ tags = toolelement.find('tags')
+ if tags:
+ tagarray = []
+ for tag in tags.findall('tag'):
+ tagarray.append(tag.text)
+ attrib['tags'] = ",".join(tagarray)
+ else:
+ print "DBG> No tags in",fn
+
+ # Build the tool element
+ newtoolelement = ET.Element('tool', attrib)
+ toolboxpositionelements = toolelement.findall('toolboxposition')
+ if not toolboxpositionelements:
+ print "DBG> %s has no toolboxposition" % fn
+ else:
+ for toolboxpositionelement in toolboxpositionelements:
+ toolbox.add(newtoolelement, toolboxpositionelement)
+ return toolbox
+
+def assemble():
+ filenamelist = []
+ for directorytree in ['tools']:
+ filenamelist.extend(getfilenamelist('tools'))
+ filenamelist.sort()
+
+ toolbox = scanfiles(filenamelist)
+
+ toolboxelement = ET.Element('toolbox')
+
+ toolbox.addElementsTo(toolboxelement)
+
+ print prettify(toolboxelement)
+
+if __name__ == "__main__":
+ assemble()
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/scripts/extract_toolbox_sections.py Tue Apr 19 08:21:54 2011 -0400
@@ -0,0 +1,141 @@
+import os
+import sys
+from xml.etree import ElementTree as ET
+from collections import defaultdict
+
+# Todo: ""
+# execute from galaxy root dir
+
+tooldict = defaultdict(list)
+
+def main():
+ doc = ET.parse("tool_conf.xml")
+ root = doc.getroot()
+
+
+ # index range 1-1000, current sections/tools divided between 250-750
+ sectionindex = 250
+ sectionfactor = int( 500 / len( root.getchildren() ) )
+
+ for rootchild in root.getchildren():
+ currentsectionlabel = ""
+ if ( rootchild.tag == "section" ):
+ sectionname = rootchild.attrib['name']
+ # per section tool index range 1-1000, current labels/tools
+ # divided between 20 and 750
+ toolindex = 250
+ toolfactor = int( 500 / len( rootchild.getchildren() ) )
+ currentlabel = ""
+ for sectionchild in rootchild.getchildren():
+ if ( sectionchild.tag == "tool" ):
+ addToToolDict(sectionchild, sectionname, sectionindex, toolindex, currentlabel)
+ toolindex += toolfactor
+ elif ( sectionchild.tag == "label" ):
+ currentlabel = sectionchild.attrib["text"]
+ sectionindex += sectionfactor
+ elif ( rootchild.tag == "tool" ):
+ addToToolDict(rootchild, "", sectionindex, None, currentsectionlabel)
+ sectionindex += sectionfactor
+ elif ( rootchild.tag == "label" ):
+ currentsectionlabel = rootchild.attrib["text"]
+ sectionindex += sectionfactor
+
+
+ # scan galaxy root tools dir for tool-specific xmls
+ toolconffilelist = getfnl( os.path.join(os.getcwd(), "tools" ) )
+
+ # foreach tool xml:
+ # check if the tags element exists in the tool xml (as child of <tool>)
+ # if not, add empty tags element for later use
+ # if this tool is in the above tooldict, add the toolboxposition element to the tool xml
+ # if not, then nothing.
+ for toolconffile in toolconffilelist:
+ hastags = False
+ hastoolboxpos = False
+
+ #parse tool config file into a document structure as defined by the ElementTree
+ tooldoc = ET.parse(toolconffile)
+ # get the root element of the toolconfig file
+ tooldocroot = tooldoc.getroot()
+ #check tags element, set flag
+ tagselement = tooldocroot.find("tags")
+ if (tagselement):
+ hastags = True
+ # check if toolboxposition element already exists in this tooconfig file
+ toolboxposelement = tooldocroot.find("toolboxposition")
+ if ( toolboxposelement ):
+ hastoolboxpos = True
+
+ if ( not ( hastags and hastoolboxpos ) ):
+ original = open( toolconffile, 'r' )
+ contents = original.readlines()
+ original.close()
+
+ # the new elements will be added directly below the root tool element
+ addelementsatposition = 1
+ # but what's on the first line? Root or not?
+ if ( contents[0].startswith("<?") ):
+ addelementsatposition = 2
+ newelements = []
+ if ( not hastoolboxpos ):
+ if ( toolconffile in tooldict ):
+ for attributes in tooldict[toolconffile]:
+ # create toolboxposition element
+ sectionelement = ET.Element("toolboxposition")
+ sectionelement.attrib = attributes
+ sectionelement.tail = "\n "
+ newelements.append( ET.tostring(sectionelement, 'utf-8') )
+
+ if ( not hastags ):
+ # create empty tags element
+ newelements.append( "<tags/>\n " )
+
+ contents = (
+ contents[ 0:addelementsatposition ] +
+ newelements +
+ contents[ addelementsatposition: ] )
+
+ # add .new for testing/safety purposes :P
+ newtoolconffile = open ( toolconffile, 'w' )
+ newtoolconffile.writelines( contents )
+ newtoolconffile.close()
+
+
+def addToToolDict(tool, sectionname, sectionindex, toolindex, currentlabel):
+ toolfile = tool.attrib["file"]
+ realtoolfile = os.path.join(os.getcwd(), "tools", toolfile)
+ toolxmlfile = ET.parse(realtoolfile)
+ localroot = toolxmlfile.getroot()
+
+ # define attributes for the toolboxposition xml-tag
+ attribdict = {}
+ if ( sectionname ):
+ attribdict[ "section" ] = sectionname
+ if ( currentlabel ):
+ attribdict[ "label" ] = currentlabel
+ if ( sectionindex ):
+ attribdict[ "sectionorder" ] = str(sectionindex)
+ if ( toolindex ):
+ attribdict[ "order" ] = str(toolindex)
+ tooldict[ realtoolfile ].append(attribdict)
+
+# Build a list of all toolconf xml files in the tools directory
+def getfnl(startdir):
+ filenamelist = []
+ for root, dirs, files in os.walk(startdir):
+ for fn in files:
+ fullfn = os.path.join(root, fn)
+ if fn.endswith('.xml'):
+ try:
+ doc = ET.parse(fullfn)
+ except:
+ print "Oops, bad xml in: ", fullfn
+ raise
+ rootelement = doc.getroot()
+ # here we check if this xml file actually is a tool conf xml!
+ if rootelement.tag == 'tool':
+ filenamelist.append(fullfn)
+ return filenamelist
+
+if __name__ == "__main__":
+ main()
--- a/static/june_2007_style/autocomplete_tagging.css.tmpl Tue Apr 19 00:07:36 2011 -0400
+++ b/static/june_2007_style/autocomplete_tagging.css.tmpl Tue Apr 19 08:21:54 2011 -0400
@@ -71,11 +71,12 @@
.individual-tag-area
{
cursor: pointer;
+ border: 1px dotted transparent;
}
.individual-tag-area:hover
{
- border:dotted #999999 1px;
+ border: 1px dotted #999999;
}
.active-tag-area {
@@ -139,4 +140,9 @@
{
margin-left: 0.3em;
-}
\ No newline at end of file
+}
+
+.active-tag-name
+{
+ font-weight: bold;
+}
--- a/static/june_2007_style/blue/autocomplete_tagging.css Tue Apr 19 00:07:36 2011 -0400
+++ b/static/june_2007_style/blue/autocomplete_tagging.css Tue Apr 19 08:21:54 2011 -0400
@@ -7,8 +7,8 @@
.ac_over{background-color:#0A246A;color:white;}
.ac_header{font-style:normal;color:gray;border-bottom:0.1em solid gray;}
.tag-area{width:100%;}
-.individual-tag-area{cursor:pointer;}
-.individual-tag-area:hover{border:dotted #999999 1px;}
+.individual-tag-area{cursor:pointer;border:1px dotted transparent;}
+.individual-tag-area:hover{border:1px dotted #999999;}
.active-tag-area{background-color:white;}
.toggle-link{font-weight:normal;padding:0.3em;margin-bottom:1em;width:100%;padding:0.2em 0em 0.2em 0em;}
.tag-button{width:auto;color:#444;text-decoration:none;display:inline-block;cursor:pointer;margin:0.2em;border:solid #bbb 1px;padding:0.1em 0.5em 0.1em 0.5em;-moz-border-radius:.5em;-webkit-border-radius:.5em;border-radius:.5em;background:#eee;}
@@ -18,3 +18,4 @@
.add-tag-button:hover{cursor:pointer;}
.tag-input{vertical-align:bottom;border:none;outline:none;resize:none;}
.delete-tag-img{margin-left:0.3em;}
+.active-tag-name{font-weight:bold;}
--- a/templates/root/tool_menu.mako Tue Apr 19 00:07:36 2011 -0400
+++ b/templates/root/tool_menu.mako Tue Apr 19 08:21:54 2011 -0400
@@ -2,6 +2,8 @@
import re
%>
+<%namespace file="/tagging_common.mako" import="render_tool_tagging_elements" />
+
## Render a tool
<%def name="render_tool( tool, section )">
%if not tool.hidden:
@@ -21,9 +23,9 @@
## ${tool.description.replace( '[[', '<a href="link" target="galaxy_main">' % $tool.id ).replace( "]]", "</a>" )
<% tool_id = re.sub( '[^a-z0-9_]', '_', tool.id.lower() ) %>
%if tool.name:
- <a class="link-${tool_id}" href="${link}" target=${tool.target} minsizehint="${tool.uihints.get( 'minwidth', -1 )}">${_(tool.name)}</a> ${tool.description}
+ <a class="link-${tool_id} tool-link" href="${link}" target=${tool.target} minsizehint="${tool.uihints.get( 'minwidth', -1 )}">${_(tool.name)}</a> ${tool.description}
%else:
- <a class="link-${tool_id}" href="${link}" target=${tool.target} minsizehint="${tool.uihints.get( 'minwidth', -1 )}">${tool.description}</a>
+ <a class="link-${tool_id} tool-link" href="${link}" target=${tool.target} minsizehint="${tool.uihints.get( 'minwidth', -1 )}">${tool.description}</a>
%endif
</div>
%endif
@@ -55,9 +57,10 @@
<meta http-equiv="Content-Type" content="text/html; charset=utf-8" /><link href="${h.url_for('/static/style/base.css')}" rel="stylesheet" type="text/css" /><link href="${h.url_for('/static/style/tool_menu.css')}" rel="stylesheet" type="text/css" />
+ <link href="${h.url_for('/static/style/autocomplete_tagging.css')}" rel="stylesheet" type="text/css" />
##<script type="text/javascript" src="${h.url_for('/static/scripts/jquery.js')}"></script>
- ${h.js( "jquery", "galaxy.base", "json2" )}
+ ${h.js( "jquery", "galaxy.base", "json2", "autocomplete_tagging" )}
<script type="text/javascript">
// Set up GalaxyAsync object.
@@ -111,9 +114,16 @@
// Remove italics.
$(this).css("font-style", "normal");
+ // Don't search if the search value is < 3 chars, but clear the search if there was a previous query
+ if ( this.value.length < 3 && this.lastValue && this.lastValue.length >= 3 ) {
+ reset_tool_search(false);
+ // Re-apply tags
+ if ( current_tags.length > 0 ) {
+ $.get("${h.url_for( controller='root', action='tool_search' )}", { query: "", tags: current_tags }, function (data) {
+ apply_search_results(data);
+ }, "json" );
+ }
// Don't update if same value as last time
- if ( this.value.length < 3 ) {
- reset_tool_search(false);
} else if ( this.value !== this.lastValue ) {
// Add class to denote that searching is active.
$(this).addClass("search_active");
@@ -127,64 +137,84 @@
// Start a new ajax-request in X ms
$("#search-spinner").show();
this.timer = setTimeout(function () {
- $.get("${h.url_for( controller='root', action='tool_search' )}", { query: q }, function (data) {
- // input.removeClass(config.loadingClass);
- // Show live-search if results and search-term aren't empty
- $("#search-no-results").hide();
- // Hide all tool sections.
- $(".toolSectionWrapper").hide();
- // This hides all tools but not workflows link (which is in a .toolTitle div).
- $(".toolSectionWrapper").find(".toolTitle").hide();
- if ( data.length !== 0 ) {
- // Map tool ids to element ids and join them.
- var s = $.map( data, function( n, i ) { return ".link-" + n.toLowerCase().replace(/[^a-z0-9_]/g,'_'); } ).join( ", " );
-
- // First pass to show matching tools and their parents.
- $(s).each( function() {
- // Add class to denote match.
- $(this).parent().addClass("search_match");
- if ($(this).parents("#recently_used_wrapper").length === 0) {
- // Default behavior.
- $(this).parent().show().parent().parent().show().parent().show();
- } else if ($(this).parents(".user_pref_visible").length !== 0) {
- // RU menu is visible, so filter it as normal.
- $(this).parent().show().parent().parent().show().parent().show();
- } else {
- // RU menu is not visible, so set up classes and visibility so that if menu shown matching is
- // aleady in place.
- $(this).parent().show();
- }
- });
-
- // Hide labels that have no visible children.
- $(".toolPanelLabel").each( function() {
- var this_label = $(this);
- var next = this_label.next();
- var no_visible_tools = true;
- // Look through tools following label and, if none are visible, hide label.
- while (next.length !== 0 && next.hasClass("toolTitle")) {
- if (next.is(":visible")) {
- no_visible_tools = false;
- break;
- } else {
- next = next.next();
- }
- }
- if (no_visible_tools) {
- this_label.hide();
- }
- });
- } else {
- $("#search-no-results").show();
- }
+ $.get("${h.url_for( controller='root', action='tool_search' )}", { query: q, tags: current_tags }, function (data) {
+ apply_search_results(data);
$("#search-spinner").hide();
}, "json" );
}, 200 );
}
this.lastValue = this.value;
});
+
+ // Apply stored tags
+ %if trans.user and trans.user.preferences.get( 'selected_tool_tags', '' ):
+ current_tags = "${trans.user.preferences['selected_tool_tags']}".split(",")
+ $.get("${h.url_for( controller='root', action='tool_search' )}", { query: "", tags: current_tags }, function (data) {
+ apply_search_results(data);
+ }, "json" );
+ $("span.tag-name").each( function() {
+ for ( var i in current_tags ) {
+ if ( $(this).text() == current_tags[i] ) {
+ $(this).addClass("active-tag-name");
+ $(this).append("<img class='delete-tag-img' src='${h.url_for('/static/images/delete_tag_icon_gray.png')}'/>")
+ }
+ }
+ });
+ %endif
});
+ var apply_search_results = function (data) {
+ // input.removeClass(config.loadingClass);
+ // Show live-search if results and search-term aren't empty
+ $("#search-no-results").hide();
+ // Hide all tool sections.
+ $(".toolSectionWrapper").hide();
+ // This hides all tools but not workflows link (which is in a .toolTitle div).
+ $(".toolSectionWrapper").find(".toolTitle").hide();
+ if ( data.length !== 0 ) {
+ // Map tool ids to element ids and join them.
+ var s = $.map( data, function( n, i ) { return ".link-" + n.toLowerCase().replace(/[^a-z0-9_]/g,'_'); } ).join( ", " );
+
+ // First pass to show matching tools and their parents.
+ $(s).each( function() {
+ // Add class to denote match.
+ $(this).parent().addClass("search_match");
+ if ($(this).parents("#recently_used_wrapper").length === 0) {
+ // Default behavior.
+ $(this).parent().show().parent().parent().show().parent().show();
+ } else if ($(this).parents(".user_pref_visible").length !== 0) {
+ // RU menu is visible, so filter it as normal.
+ $(this).parent().show().parent().parent().show().parent().show();
+ } else {
+ // RU menu is not visible, so set up classes and visibility so that if menu shown matching is
+ // aleady in place.
+ $(this).parent().show();
+ }
+ });
+
+ // Hide labels that have no visible children.
+ $(".toolPanelLabel").each( function() {
+ var this_label = $(this);
+ var next = this_label.next();
+ var no_visible_tools = true;
+ // Look through tools following label and, if none are visible, hide label.
+ while (next.length !== 0 && next.hasClass("toolTitle")) {
+ if (next.is(":visible")) {
+ no_visible_tools = false;
+ break;
+ } else {
+ next = next.next();
+ }
+ }
+ if (no_visible_tools) {
+ this_label.hide();
+ }
+ });
+ } else {
+ $("#search-no-results").show();
+ }
+ }
+
// Update recently used tools menu. Function inserts a new item and removes the last item.
function update_recently_used() {
$.ajax({
@@ -228,7 +258,52 @@
}
}
});
+
}
+
+ var current_tags = new Array();
+ function tool_tag_click(tag_name, tag_value) {
+ var add = true;
+ for ( var i = 0 ; i < current_tags.length ; i++ ) {
+ if ( current_tags[i] == tag_name ) {
+ current_tags.splice( i, 1 );
+ add = false;
+ }
+ }
+ if ( add ) {
+ current_tags.push( tag_name );
+ $("span.tag-name").each( function() {
+ if ( $(this).text() == tag_name ) {
+ $(this).addClass("active-tag-name");
+ $(this).append("<img class='delete-tag-img' src='${h.url_for('/static/images/delete_tag_icon_gray.png')}'/>")
+ }
+ });
+ } else {
+ $("span.tag-name").each( function() {
+ if ( $(this).text() == tag_name ) {
+ $(this).removeClass("active-tag-name");
+ $(this).text(tag_name);
+ }
+ });
+ }
+ if ( current_tags.length == 0 ) {
+ $("#search-no-results").hide();
+ $(".tool-link").each( function() {
+ reset_tool_search(false);
+ });
+ return;
+ }
+ var q = $("input#tool-search-query").val();
+ if ( q == "search tools" ) {
+ q = "";
+ } else if ( q.length > 0 ) {
+ q = q + '*';
+ }
+ $.get("${h.url_for( controller='root', action='tool_search' )}", { query: q, tags: current_tags }, function (data) {
+ apply_search_results(data);
+ }, "json" );
+ }
+
</script></head>
@@ -246,6 +321,12 @@
else:
display = "none"
%>
+ %if trans.app.config.get_bool( 'enable_tool_tags', False ):
+ <div id="tool-tags" style="padding-bottom: 5px; position: relative; display: ${display}; width: 100%">
+ <b>Tags:</b>
+ ${render_tool_tagging_elements()}
+ </div>
+ %endif
<div id="tool-search" style="padding-bottom: 5px; position: relative; display: ${display}; width: 100%"><input type="text" name="query" value="search tools" id="tool-search-query" autocomplete="off" style="width: 100%; font-style:italic; font-size: inherit"/><img src="${h.url_for('/static/images/loading_small_white_bg.gif')}" id="search-spinner" style="display: none; position: absolute; right: 0; top: 5px;"/>
--- a/templates/tagging_common.mako Tue Apr 19 00:07:36 2011 -0400
+++ b/templates/tagging_common.mako Tue Apr 19 08:21:54 2011 -0400
@@ -84,6 +84,21 @@
</div></%def>
+## Render tool tagging elements
+<%def name="render_tool_tagging_elements()">
+ <%
+ elt_id = int ( floor ( random()*maxint ) )
+ tags = trans.app.tag_handler.get_tool_tags( trans )
+ %>
+ ${self.render_tagging_element_html(elt_id=elt_id, \
+ tags=tags, \
+ editable=False, \
+ use_toggle_link=False )}
+ <script type="text/javascript">
+ init_tag_click_function($('#${elt_id}'), tool_tag_click);
+ </script>
+</%def>
+
## Render community tagging element.
<%def name="render_community_tagging_element(tagged_item=None, elt_context=None, use_toggle_link=False, tag_click_fn='default_tag_click_fn')">
## Build HTML.
@@ -213,4 +228,4 @@
}
</style></noscript>
-</%def>
\ No newline at end of file
+</%def>
--- a/tools/annotation_profiler/annotation_profiler.xml Tue Apr 19 00:07:36 2011 -0400
+++ b/tools/annotation_profiler/annotation_profiler.xml Tue Apr 19 08:21:54 2011 -0400
@@ -1,4 +1,4 @@
-<tool id="Annotation_Profiler_0" name="Profile Annotations" Version="1.0.0">
+<tool id="Annotation_Profiler_0" name="Profile Annotations" version="1.0.0"><description>for a set of genomic intervals</description><command interpreter="python">annotation_profiler_for_interval.py -i $input1 -c ${input1.metadata.chromCol} -s ${input1.metadata.startCol} -e ${input1.metadata.endCol} -o $out_file1 $keep_empty -p ${GALAXY_DATA_INDEX_DIR}/annotation_profiler/$dbkey $summary -b 3 -t $table_names</command><inputs>
--- a/tools/human_genome_variation/funDo.xml Tue Apr 19 00:07:36 2011 -0400
+++ b/tools/human_genome_variation/funDo.xml Tue Apr 19 08:21:54 2011 -0400
@@ -1,4 +1,4 @@
-<tool id="hgv_funDo" name="FunDO" Version="1.0.0">
+<tool id="hgv_funDo" name="FunDO" version="1.0.0"><description>human genes associated with disease terms</description><command interpreter="perl">
--- a/tools/rgenetics/rgRegion.xml Tue Apr 19 00:07:36 2011 -0400
+++ b/tools/rgenetics/rgRegion.xml Tue Apr 19 08:21:54 2011 -0400
@@ -22,7 +22,7 @@
</inputs><outputs>
- <data format="lped" name="out_file1" label="${title}.lped" metadata_source=infile />
+ <data format="lped" name="out_file1" label="${title}.lped" metadata_source="infile" /></outputs><help>
--- a/tools/visualization/GMAJ.xml Tue Apr 19 00:07:36 2011 -0400
+++ b/tools/visualization/GMAJ.xml Tue Apr 19 08:21:54 2011 -0400
@@ -1,4 +1,4 @@
-<tool id="gmaj_1" name="GMAJ" Version="2.0.1">
+<tool id="gmaj_1" name="GMAJ" version="2.0.1"><description>Multiple Alignment Viewer</description><command interpreter="python">GMAJ.py $out_file1 $maf_input $gmaj_file $filenames_file</command><inputs>
http://bitbucket.org/galaxy/galaxy-central/changeset/623a4ec141f1/
changeset: r5430:623a4ec141f1
user: natefoo
date: 2011-04-19 14:22:48
summary: Make all tool tag related functionality conditional on 'enable_tool_tags = True' in the Galaxy configuration file.
affected #: 1 file (78 bytes)
--- a/lib/galaxy/tools/__init__.py Tue Apr 19 08:21:54 2011 -0400
+++ b/lib/galaxy/tools/__init__.py Tue Apr 19 08:22:48 2011 -0400
@@ -130,9 +130,10 @@
key = 'section_' + section.id
panel_dict[ key ] = section
- log.info("removing all tool tag associations (" + str( self.sa_session.query( self.app.model.ToolTagAssociation ).count() ) + ")")
- self.sa_session.query( self.app.model.ToolTagAssociation ).delete()
- self.sa_session.flush()
+ if self.app.config.get_bool( 'enable_tool_tags', False ):
+ log.info("removing all tool tag associations (" + str( self.sa_session.query( self.app.model.ToolTagAssociation ).count() ) + ")")
+ self.sa_session.query( self.app.model.ToolTagAssociation ).delete()
+ self.sa_session.flush()
log.info("parsing the tool configuration")
tree = util.parse_xml( config_filename )
root = tree.getroot()
http://bitbucket.org/galaxy/galaxy-central/changeset/17ec7f2cc139/
changeset: r5431:17ec7f2cc139
user: natefoo
date: 2011-04-19 14:24:57
summary: Add enable_tool_tags to the sample configuration file.
affected #: 1 file (196 bytes)
--- a/universe_wsgi.ini.sample Tue Apr 19 08:22:48 2011 -0400
+++ b/universe_wsgi.ini.sample Tue Apr 19 08:24:57 2011 -0400
@@ -421,6 +421,11 @@
# Enable the (experimental! beta!) Web API. Documentation forthcoming.
#enable_api = False
+# Enable tool tags (associating tools with tags). This has its own option
+# since its implementation has a few performance implications on startup for
+# large servers.
+#enable_tool_tags = False
+
# Enable Galaxy's "Upload via FTP" interface. You'll need to install and
# configure an FTP server (we've used ProFTPd since it can use Galaxy's
# database for authentication) and set the following two options.
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
1 new changeset in galaxy-central:
http://bitbucket.org/galaxy/galaxy-central/changeset/2de8ac3adb53/
changeset: r5414:2de8ac3adb53
user: kanwei
date: 2011-04-19 06:07:36
summary: New pysam for solaris fixes
affected #: 1 file (0 bytes)
--- a/eggs.ini Mon Apr 18 18:07:52 2011 -0400
+++ b/eggs.ini Tue Apr 19 00:07:36 2011 -0400
@@ -21,7 +21,7 @@
pbs_python = 4.1.0
psycopg2 = 2.0.13
pycrypto = 2.0.1
-pysam = 0.4.1
+pysam = 0.4.2
pysqlite = 2.5.6
python_lzo = 1.08_2.03_static
simplejson = 2.1.1
@@ -69,7 +69,7 @@
bx_python = _494c2d1d68b3
GeneTrack = _dev_48da9e998f0caf01c5be731e926f4b0481f658f0
SQLAlchemy = _dev_r6498
-pysam = _kanwei_595e4f94f935
+pysam = _kanwei_6255075f4401
; dependency source urls, necessary for scrambling. for an explanation, see
; the wiki page above
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
1 new changeset in galaxy-central:
http://bitbucket.org/galaxy/galaxy-central/changeset/fd714d78e05e/
changeset: r5413:fd714d78e05e
user: kanwei
date: 2011-04-19 00:07:52
summary: trackster:
- Use tabix (through pysam) as the indexer for feature formats. Bed support included in this commit
- Add bigBed format support (same interface as bigWig)
- Improve implicit converter error handling
affected #: 11 files (3.2 KB)
--- a/datatypes_conf.xml.sample Mon Apr 18 17:02:32 2011 -0400
+++ b/datatypes_conf.xml.sample Mon Apr 18 18:07:52 2011 -0400
@@ -14,7 +14,8 @@
<datatype extension="bed" type="galaxy.datatypes.interval:Bed" display_in_upload="true"><converter file="bed_to_gff_converter.xml" target_datatype="gff"/><converter file="interval_to_coverage.xml" target_datatype="coverage"/>
- <converter file="bed_to_interval_index_converter.xml" target_datatype="interval_index"/>
+ <converter file="bed_to_bgzip_converter.xml" target_datatype="bgzip"/>
+ <converter file="bed_to_tabix_converter.xml" target_datatype="tabix" depends_on="bgzip"/><converter file="bed_to_summary_tree_converter.xml" target_datatype="summary_tree"/><!-- <display file="ucsc/interval_as_bed.xml" /> --><display file="genetrack.xml" />
@@ -131,6 +132,7 @@
<datatype extension="summary_tree" type="galaxy.datatypes.data:Data" /><datatype extension="interval_index" type="galaxy.datatypes.data:Data" /><datatype extension="tabix" type="galaxy.datatypes.data:Data" />
+ <datatype extension="bgzip" type="galaxy.datatypes.data:Data" /><!-- Start EMBOSS tools --><datatype extension="acedb" type="galaxy.datatypes.data:Text"/><datatype extension="asn1" type="galaxy.datatypes.data:Text"/>
--- a/eggs.ini Mon Apr 18 17:02:32 2011 -0400
+++ b/eggs.ini Mon Apr 18 18:07:52 2011 -0400
@@ -69,7 +69,7 @@
bx_python = _494c2d1d68b3
GeneTrack = _dev_48da9e998f0caf01c5be731e926f4b0481f658f0
SQLAlchemy = _dev_r6498
-pysam = _kanwei_ae2bd50d9945
+pysam = _kanwei_595e4f94f935
; dependency source urls, necessary for scrambling. for an explanation, see
; the wiki page above
--- a/lib/galaxy/datatypes/binary.py Mon Apr 18 17:02:32 2011 -0400
+++ b/lib/galaxy/datatypes/binary.py Mon Apr 18 18:07:52 2011 -0400
@@ -288,3 +288,7 @@
Binary.__init__( self, **kwd )
self._magic = 0x8789F2EB
self._name = "BigBed"
+
+ def get_track_type( self ):
+ return "LineTrack", {"data_standalone": "bigbed"}
+
--- a/lib/galaxy/datatypes/interval.py Mon Apr 18 17:02:32 2011 -0400
+++ b/lib/galaxy/datatypes/interval.py Mon Apr 18 18:07:52 2011 -0400
@@ -533,7 +533,7 @@
except: return False
def get_track_type( self ):
- return "FeatureTrack", {"data": "interval_index", "index": "summary_tree"}
+ return "FeatureTrack", {"data": "tabix", "index": "summary_tree"}
class BedStrict( Bed ):
"""Tab delimited data in strict BED format - no non-standard columns allowed"""
--- a/lib/galaxy/model/__init__.py Mon Apr 18 17:02:32 2011 -0400
+++ b/lib/galaxy/model/__init__.py Mon Apr 18 18:07:52 2011 -0400
@@ -21,6 +21,18 @@
datatypes_registry = galaxy.datatypes.registry.Registry() #Default Value Required for unit tests
+class NoConverterException(Exception):
+ def __init__(self, value):
+ self.value = value
+ def __str__(self):
+ return repr(self.value)
+
+class ConverterDependencyException(Exception):
+ def __init__(self, value):
+ self.value = value
+ def __str__(self):
+ return repr(self.value)
+
def set_datatypes_registry( d_registry ):
"""
Set up datatypes_registry
@@ -709,15 +721,33 @@
if not assoc.deleted and assoc.type == file_type:
return assoc.dataset
return None
+ def get_converted_dataset_deps(self, trans, target_ext):
+ """
+ Returns dict of { "dependency" => HDA }
+ """
+ converted_dataset = self.get_converted_files_by_type( target_ext )
+ # List of string of dependencies
+ try:
+ depends_list = trans.app.datatypes_registry.converter_deps[self.extension][target_ext]
+ except KeyError:
+ depends_list = []
+ return dict([ (dep, self.get_converted_dataset(trans, dep)) for dep in depends_list ])
def get_converted_dataset(self, trans, target_ext):
"""
- Return converted dataset(s) if they exist. If not converted yet, do so and return None (the first time).
- If unconvertible, raise exception.
+ Return converted dataset(s) if they exist, along with a dict of dependencies.
+ If not converted yet, do so and return None (the first time). If unconvertible, raise exception.
"""
# See if we can convert the dataset
if target_ext not in self.get_converter_types():
raise ValueError("Conversion from '%s' to '%s' not possible", self.extension, target_ext)
+ deps = {}
+ # List of string of dependencies
+ try:
+ depends_list = trans.app.datatypes_registry.converter_deps[self.extension][target_ext]
+ except KeyError:
+ depends_list = []
+
# See if converted dataset already exists
converted_dataset = self.get_converted_files_by_type( target_ext )
if converted_dataset:
@@ -725,20 +755,22 @@
# Conversion is possible but hasn't been done yet, run converter.
# Check if we have dependencies
- deps = {}
+
try:
- fail_dependencies = False
- depends_on = trans.app.datatypes_registry.converter_deps[self.extension][target_ext]
- for dependency in depends_on:
+ for dependency in depends_list:
dep_dataset = self.get_converted_dataset(trans, dependency)
- if dep_dataset is None or dep_dataset.state != trans.app.model.Job.states.OK:
- fail_dependencies = True
- else:
- deps[dependency] = dep_dataset
- if fail_dependencies:
- return None
+ if dep_dataset is None:
+ # None means converter is running first time
+ return None
+ elif dep_dataset.state == trans.app.model.Job.states.ERROR:
+ raise ConverterDependencyException("A dependency (%s) was in an error state." % dependency)
+ elif dep_dataset.state != trans.app.model.Job.states.OK:
+ # Pending
+ return None
+
+ deps[dependency] = dep_dataset
except ValueError:
- raise ValueError("A dependency could not be converted.")
+ raise NoConverterException("A dependency (%s) is missing a converter." % dependency)
except KeyError:
pass # No deps
--- a/lib/galaxy/visualization/tracks/data_providers.py Mon Apr 18 17:02:32 2011 -0400
+++ b/lib/galaxy/visualization/tracks/data_providers.py Mon Apr 18 18:07:52 2011 -0400
@@ -21,7 +21,7 @@
from galaxy.datatypes.interval import Bed, Gff, Gtf
from galaxy.datatypes.util.gff_util import parse_gff_attributes
-from pysam import csamtools
+from pysam import csamtools, ctabix
MAX_VALS = 5000 # only display first MAX_VALS features
ERROR_MAX_VALS = "Only the first " + str(MAX_VALS) + " %s in this tile are displayed."
@@ -45,10 +45,11 @@
"""
col_name_data_attr_mapping = {}
- def __init__( self, converted_dataset=None, original_dataset=None ):
+ def __init__( self, converted_dataset=None, original_dataset=None, dependencies=None ):
""" Create basic data provider. """
self.converted_dataset = converted_dataset
self.original_dataset = original_dataset
+ self.dependencies = dependencies
def write_data_to_file( self, chrom, start, end, filename ):
"""
@@ -419,36 +420,27 @@
f.close()
return results
-class BigWigDataProvider( TracksDataProvider ):
+class BBIDataProvider( TracksDataProvider ):
"""
- BigWig data provider for the Galaxy track browser.
+ BBI data provider for the Galaxy track browser.
"""
- def _get_dataset( self ):
- if self.converted_dataset is not None:
- f = open( self.converted_dataset.file_name )
- else:
- f = open( self.original_dataset.file_name )
- return f
-
def valid_chroms( self ):
# No way to return this info as of now
return None
def has_data( self, chrom ):
- f = self._get_dataset()
- bw = BigWigFile(file=f)
- all_dat = bw.query(chrom, 0, 2147483647, 1)
+ f, bbi = self._get_dataset()
+ all_dat = bbi.query(chrom, 0, 2147483647, 1)
f.close()
return all_dat is not None
def get_data( self, chrom, start, end, **kwargs ):
# Bigwig has the possibility of it being a standalone bigwig file, in which case we use
# original_dataset, or coming from wig->bigwig conversion in which we use converted_dataset
- f = self._get_dataset()
- bw = BigWigFile(file=f)
+ f, bbi = self._get_dataset()
if 'stats' in kwargs:
- all_dat = bw.query(chrom, 0, 2147483647, 1)
+ all_dat = bbi.query(chrom, 0, 2147483647, 1)
f.close()
if all_dat is None:
return None
@@ -464,7 +456,7 @@
if (end - start) < num_points:
num_points = end - start
- data = bw.query(chrom, start, end, num_points)
+ data = bbi.query(chrom, start, end, num_points)
f.close()
pos = start
@@ -477,6 +469,20 @@
return result
+class BigBedDataProvider( BBIDataProvider ):
+ def _get_dataset( self ):
+ # Nothing converts to bigBed so we don't consider converted dataset
+ f = open( self.original_dataset.file_name )
+ return f, BigBedFile(file=f)
+
+class BigWigDataProvider (BBIDataProvider ):
+ def _get_dataset( self ):
+ if self.converted_dataset is not None:
+ f = open( self.converted_dataset.file_name )
+ else:
+ f = open( self.original_dataset.file_name )
+ return f, BigWigFile(file=f)
+
class IntervalIndexDataProvider( TracksDataProvider ):
"""
Interval index data provider for the Galaxy track browser.
@@ -557,10 +563,25 @@
return filters
+class TabixDataProvider( IntervalIndexDataProvider ):
+ """
+ Tabix index data provider for the Galaxy track browser.
+
+ Payload format: [ uid (offset), start, end, name, strand, thick_start, thick_end, blocks ]
+ """
+
def get_data( self, chrom, start, end, **kwargs ):
+ if end >= 2<<29:
+ end = (2<<29 - 1) # Tabix-enforced maximum
start, end = int(start), int(end)
- source = open( self.original_dataset.file_name )
- index = Indexes( self.converted_dataset.file_name )
+
+ # {'bgzip': (<galaxy.model.HistoryDatasetAssociation object at 0x85fbe90>, {})}
+ bgzip_fname = self.dependencies['bgzip'].file_name
+
+ # if os.path.getsize(self.converted_dataset.file_name) == 0:
+ # return { 'kind': messages.ERROR, 'message': "Tabix converted size was 0, meaning the input file had invalid values." }
+ tabix = ctabix.Tabixfile(bgzip_fname, index_filename=self.converted_dataset.file_name)
+
results = []
count = 0
message = None
@@ -569,7 +590,7 @@
# characters (e.g. 'chr') and see if that works. This enables the
# provider to handle chrome names defined as chrXXX and as XXX.
chrom = str(chrom)
- if chrom not in index.indexes and chrom[3:] in index.indexes:
+ if chrom not in tabix.contigs and ("chr" + chrom[3:]) in tabix.contigs:
chrom = chrom[3:]
#
@@ -581,12 +602,12 @@
#
filter_cols = from_json_string( kwargs.get( "filter_cols", "[]" ) )
no_detail = ( "no_detail" in kwargs )
- for start, end, offset in index.find(chrom, start, end):
+
+ for line in tabix.fetch(reference=chrom, start=start, end=end):
if count >= MAX_VALS:
message = ERROR_MAX_VALS % "features"
break
count += 1
- source.seek( offset )
# TODO: can we use column metadata to fill out payload?
# TODO: use function to set payload data
if isinstance( self.original_dataset.datatype, Gff ):
@@ -597,35 +618,38 @@
payload.insert( 0, offset )
elif isinstance( self.original_dataset.datatype, Bed ):
# BED dataset.
- payload = [ offset, start, end ]
- if not no_detail:
- feature = source.readline().split()
- length = len(feature)
-
- # Simpler way to add stuff, but type casting is not done.
- # Name, score, strand, thick start, thick end.
- #end = min( len( feature ), 8 )
- #payload.extend( feature[ 3:end ] )
-
- # Name, strand, thick start, thick end.
- if length >= 4:
- payload.append(feature[3])
- if length >= 6:
- payload.append(feature[5])
- if length >= 8:
- payload.append(int(feature[6]))
- payload.append(int(feature[7]))
+ feature = line.split()
+ length = len(feature)
+ payload = [ feature[1]+"-"+feature[2]+":"+str(count), int(feature[1]), int(feature[2]) ]
+
+ if no_detail:
+ results.append( payload )
+ continue
+
+ # Simpler way to add stuff, but type casting is not done.
+ # Name, score, strand, thick start, thick end.
+ #end = min( len( feature ), 8 )
+ #payload.extend( feature[ 3:end ] )
+
+ # Name, strand, thick start, thick end.
+ if length >= 4:
+ payload.append(feature[3])
+ if length >= 6:
+ payload.append(feature[5])
+ if length >= 8:
+ payload.append(int(feature[6]))
+ payload.append(int(feature[7]))
- # Blocks.
- if length >= 12:
- block_sizes = [ int(n) for n in feature[10].split(',') if n != '']
- block_starts = [ int(n) for n in feature[11].split(',') if n != '' ]
- blocks = zip( block_sizes, block_starts )
- payload.append( [ ( start + block[1], start + block[1] + block[0] ) for block in blocks ] )
-
- # Score (filter data)
- if length >= 5 and filter_cols and filter_cols[0] == "Score":
- payload.append( float(feature[4]) )
+ # Blocks.
+ if length >= 12:
+ block_sizes = [ int(n) for n in feature[10].split(',') if n != '']
+ block_starts = [ int(n) for n in feature[11].split(',') if n != '' ]
+ blocks = zip( block_sizes, block_starts )
+ payload.append( [ ( int(feature[1]) + block[1], int(feature[1]) + block[1] + block[0] ) for block in blocks ] )
+
+ # Score (filter data)
+ if length >= 5 and filter_cols and filter_cols[0] == "Score":
+ payload.append( float(feature[4]) )
results.append( payload )
@@ -671,10 +695,12 @@
# is original dataset type. TODO: This needs to be more flexible.
dataset_type_name_to_data_provider = {
"array_tree": ArrayTreeDataProvider,
+ "tabix": TabixDataProvider,
"interval_index": { "vcf": VcfDataProvider, "default" : IntervalIndexDataProvider },
"bai": BamDataProvider,
"summary_tree": SummaryTreeDataProvider,
- "bigwig": BigWigDataProvider
+ "bigwig": BigWigDataProvider,
+ "bigbed": BigBedDataProvider
}
dataset_type_to_data_provider = {
--- a/lib/galaxy/web/controllers/tracks.py Mon Apr 18 17:02:32 2011 -0400
+++ b/lib/galaxy/web/controllers/tracks.py Mon Apr 18 18:07:52 2011 -0400
@@ -14,7 +14,7 @@
from galaxy.web.framework.helpers import time_ago, grids
from galaxy.util.bunch import Bunch
from galaxy.datatypes.interval import Gff
-
+from galaxy.model import NoConverterException, ConverterDependencyException
from galaxy.visualization.tracks.data_providers import *
from galaxy.visualization.tracks.visual_analytics import get_tool_def, get_dataset_job
@@ -458,7 +458,8 @@
# Check for data in the genome window.
if data_sources.get( 'index' ):
tracks_dataset_type = data_sources['index']['name']
- indexer = get_data_provider( tracks_dataset_type )( dataset.get_converted_dataset( trans, tracks_dataset_type ), dataset )
+ converted_dataset = dataset.get_converted_dataset( trans, tracks_dataset_type )
+ indexer = get_data_provider( tracks_dataset_type )( converted_dataset, dataset )
if not indexer.has_data( chrom ):
return messages.NO_DATA
valid_chroms = indexer.valid_chroms()
@@ -505,7 +506,8 @@
#
# Have to choose between indexer and data provider
tracks_dataset_type = data_sources['index']['name']
- indexer = get_data_provider( tracks_dataset_type )( dataset.get_converted_dataset( trans, tracks_dataset_type ), dataset )
+ converted_dataset = dataset.get_converted_dataset( trans, tracks_dataset_type )
+ indexer = get_data_provider( tracks_dataset_type )( converted_dataset, dataset )
summary = indexer.get_summary( chrom, low, high, **kwargs )
if summary is None:
return { 'dataset_type': tracks_dataset_type, 'data': None }
@@ -525,7 +527,9 @@
else:
tracks_dataset_type = data_sources['data']['name']
data_provider_class = get_data_provider( name=tracks_dataset_type, original_dataset=dataset )
- data_provider = data_provider_class( dataset.get_converted_dataset(trans, tracks_dataset_type), dataset )
+ converted_dataset = dataset.get_converted_dataset( trans, tracks_dataset_type )
+ deps = dataset.get_converted_dataset_deps( trans, tracks_dataset_type )
+ data_provider = data_provider_class( converted_dataset=converted_dataset, original_dataset=dataset, dependencies=deps )
# Get and return data from data_provider.
data = data_provider.get_data( chrom, low, high, **kwargs )
@@ -724,6 +728,7 @@
track_type, data_sources = input_dataset.datatype.get_track_type()
data_source = data_sources[ 'data' ]
converted_dataset = input_dataset.get_converted_dataset( trans, data_source )
+ deps = input_dataset.get_converted_dataset_deps( trans, data_source )
#
# Create new HDA for input dataset's subset.
@@ -742,7 +747,8 @@
# Write subset of data to new dataset
data_provider_class = get_data_provider( original_dataset=input_dataset )
data_provider = data_provider_class( original_dataset=input_dataset,
- converted_dataset=converted_dataset )
+ converted_dataset=converted_dataset,
+ deps=deps )
data_provider.write_data_to_file( chrom, low, high, new_dataset.file_name )
# TODO: size not working.
@@ -828,9 +834,11 @@
# necessary.
try:
converted_dataset = dataset.get_converted_dataset( trans, target_type )
- except ValueError:
+ except NoConverterException:
return messages.NO_CONVERTER
-
+ except ConverterDependencyException, dep_error:
+ return { 'kind': messages.ERROR, 'message': dep_error.value }
+
# Check dataset state and return any messages.
msg = None
if converted_dataset and converted_dataset.state == model.Dataset.states.ERROR:
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
1 new changeset in galaxy-central:
http://bitbucket.org/galaxy/galaxy-central/changeset/3b4b5a2d9f10/
changeset: r5412:3b4b5a2d9f10
user: kanwei
date: 2011-04-18 23:02:32
summary: trackster: Small UI tweaks
affected #: 3 files (240 bytes)
--- a/lib/galaxy/web/controllers/tracks.py Mon Apr 18 16:43:35 2011 -0400
+++ b/lib/galaxy/web/controllers/tracks.py Mon Apr 18 17:02:32 2011 -0400
@@ -60,7 +60,7 @@
datasets_param = "f-history"
columns = [
NameColumn( "History Name", key="name", filterable="standard" ),
- grids.GridColumn( "Last Updated", key="update_time", format=time_ago ),
+ grids.GridColumn( "Last Updated", key="update_time", format=time_ago, visible=False ),
DbKeyPlaceholderColumn( "Dbkey", key="dbkey", model_class=model.HistoryDatasetAssociation, visible=False )
]
num_rows_per_page = 10
--- a/templates/grid_base.mako Mon Apr 18 16:43:35 2011 -0400
+++ b/templates/grid_base.mako Mon Apr 18 17:02:32 2011 -0400
@@ -631,9 +631,11 @@
}
.page-link a, .inactive-link {
padding: 0px 7px 0px 7px;
+ color: #555;
}
.inactive-link, .current-filter {
- font-style: italic;
+ font-weight: bold;
+ color: #000;
}
.submit-image {
background: url(${h.url_for('/static/images/fugue/magnifier-left.png')}) no-repeat right transparent;
@@ -976,6 +978,9 @@
min_page = 1
%>
Page:
+ % if min_page > 1:
+ <span class='page-link'><a href="${url( page=1 )}" page_num="1">1</a></span> ...
+ % endif
%for page_index in range(min_page, max_page + 1):
%if page_index == cur_page_num:
<span class='page-link inactive-link' id="page-link-${page_index}">${page_index}</span>
@@ -984,16 +989,14 @@
<span class='page-link' id="page-link-${page_index}"><a href="${url( args )}" page_num='${page_index}'>${page_index}</a></span>
%endif
%endfor
- %if max_page != num_pages:
+ %if max_page < num_pages:
...
+ <span class='page-link'><a href="${url( page=num_pages )}" page_num="${num_pages}">${num_pages}</a></span>
%endif
</span>
- ## First, last, all links.
- |
- <span class='page-link'><a href="${url( page=1 )}" page_num="1">First</a></span>
- <span class='page-link'><a href="${url( page=num_pages )}" page_num="${num_pages}">Last</a></span>
- <span class='page-link' id='show-all-link-span'><a href="${url( page='all' )}" page_num="all">All</a></span>
+ ## Show all link
+ <span class='page-link' id='show-all-link-span'> | <a href="${url( page='all' )}" page_num="all">Show All</a></span></td></tr>
%endif
--- a/templates/tracks/history_select_grid.mako Mon Apr 18 16:43:35 2011 -0400
+++ b/templates/tracks/history_select_grid.mako Mon Apr 18 17:02:32 2011 -0400
@@ -28,6 +28,9 @@
});
</script><style>
+ .dialog-box .body {
+ overflow-x: hidden;
+ }
.addtracktab {
margin: 0px 5px;
padding: 5px;
@@ -36,6 +39,7 @@
text-align: center;
float: left;
background-color: #ccc;
+ border: 1px solid #ccc;
border-bottom: 0px;
-webkit-border-top-left-radius: 10px;
-webkit-border-top-right-radius: 10px;
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0