details: http://www.bx.psu.edu/hg/galaxy/rev/37406d8ad116 changeset: 3098:37406d8ad116 user: Enis Afgan <afgane@gmail.com> date: Tue Nov 17 16:16:26 2009 -0500 description: merge diffstat: datatypes_conf.xml.sample | 429 +++++++------ dist-eggs.ini | 9 +- eggs.ini | 13 +- lib/galaxy/datatypes/binary.py | 156 +++++ lib/galaxy/datatypes/data.py | 93 +- lib/galaxy/datatypes/genetics.py | 180 ++--- lib/galaxy/datatypes/images.py | 120 --- lib/galaxy/datatypes/metadata.py | 4 +- lib/galaxy/datatypes/registry.py | 16 +- lib/galaxy/datatypes/tracks.py | 9 +- lib/galaxy/jobs/__init__.py | 22 +- lib/galaxy/jobs/runners/local.py | 4 +- lib/galaxy/jobs/runners/pbs.py | 17 +- lib/galaxy/tools/actions/metadata.py | 1 + lib/galaxy/web/controllers/dataset.py | 61 +- lib/galaxy/web/controllers/forms.py | 252 +++++--- lib/galaxy/web/controllers/history.py | 34 +- lib/galaxy/web/controllers/library_common.py | 2 +- lib/galaxy/web/controllers/page.py | 60 +- lib/galaxy/web/controllers/requests.py | 302 +++++---- lib/galaxy/web/controllers/requests_admin.py | 663 +++++++++++++------- lib/galaxy/web/controllers/user.py | 2 - lib/galaxy/web/framework/__init__.py | 10 +- lib/galaxy/web/framework/helpers/grids.py | 8 +- run.sh | 19 + scripts/check_eggs.py | 14 +- scripts/check_python.py | 19 +- scripts/cleanup_datasets/cleanup_datasets.py | 4 +- scripts/scramble/scripts/pysqlite.py | 44 +- scripts/set_metadata.py | 15 +- static/scripts/galaxy.base.js | 20 +- templates/admin/forms/edit_form.mako | 2 +- templates/admin/forms/grid.mako | 1 + templates/admin/forms/manage_forms.mako | 76 -- templates/admin/forms/show_form_read_only.mako | 4 +- templates/admin/requests/create_request_type.mako | 92 +- templates/admin/requests/grid.mako | 218 +------- templates/admin/requests/manage_request_types.mako | 69 +-- templates/admin/requests/show_request.mako | 2 +- templates/admin/requests/view_request_type.mako | 70 +- templates/dataset/errors.mako | 17 +- templates/grid_base.mako | 4 +- templates/grid_base_async.mako | 698 ++++++++++++++++++++++ templates/grid_body_async.mako | 5 + templates/grid_common_async.mako | 155 +++++ templates/mobile/history/detail.mako | 2 +- templates/requests/grid.mako | 218 +------- templates/requests/show_request.mako | 2 +- templates/root/history_common.mako | 4 +- templates/tagging_common.mako | 13 - templates/tool_form.mako | 16 +- test/base/twilltestcase.py | 43 +- test/functional/test_forms_and_requests.py | 44 +- test/functional/test_get_data.py | 620 ++++++++++++++++--- test/functional/test_history_functions.py | 6 + test/functional/test_sniffing_and_metadata_settings.py | 262 -------- test/functional/test_user_info.py | 9 +- tools/data_source/upload.py | 45 +- tools/data_source/upload.xml | 6 + tools/extract/extract_genomic_dna.xml | 4 +- tools/extract/liftOver_wrapper.xml | 2 +- 61 files changed, 3055 insertions(+), 2256 deletions(-) diffs (truncated from 7153 to 3000 lines): diff -r 555afd0bf457 -r 37406d8ad116 datatypes_conf.xml.sample --- a/datatypes_conf.xml.sample Tue Nov 17 16:14:54 2009 -0500 +++ b/datatypes_conf.xml.sample Tue Nov 17 16:16:26 2009 -0500 @@ -1,211 +1,224 @@ <?xml version="1.0"?> <datatypes> - <registration converters_path="lib/galaxy/datatypes/converters"> - <datatype extension="ab1" type="galaxy.datatypes.images:Ab1" mimetype="application/octet-stream" display_in_upload="true"/> - <datatype extension="axt" type="galaxy.datatypes.sequence:Axt" display_in_upload="true"/> - <datatype extension="bam" type="galaxy.datatypes.images:Bam" mimetype="application/octet-stream"/> - <datatype extension="bed" type="galaxy.datatypes.interval:Bed" display_in_upload="true"> - <converter file="bed_to_gff_converter.xml" target_datatype="gff"/> - <converter file="interval_to_coverage.xml" target_datatype="coverage"/> - <converter file="bed_to_interval_index_converter.xml" target_datatype="interval_index"/> - </datatype> - <datatype extension="binseq.zip" type="galaxy.datatypes.images:Binseq" mimetype="application/zip" display_in_upload="true"/> - <datatype extension="len" type="galaxy.datatypes.chrominfo:ChromInfo" display_in_upload="true"> - <!-- no converters yet --> - </datatype> - <datatype extension="coverage" type="galaxy.datatypes.coverage:LastzCoverage" display_in_upload="true"> - <indexer file="coverage.xml" /> - </datatype> - <datatype extension="customtrack" type="galaxy.datatypes.interval:CustomTrack"/> - <datatype extension="csfasta" type="galaxy.datatypes.sequence:csFasta" display_in_upload="true"/> - <datatype extension="data" type="galaxy.datatypes.data:Data" mimetype="application/octet-stream"/> - <datatype extension="fasta" type="galaxy.datatypes.sequence:Fasta" display_in_upload="true"> - <converter file="fasta_to_tabular_converter.xml" target_datatype="tabular"/> - </datatype> - <datatype extension="fastq" type="galaxy.datatypes.sequence:Fastq" display_in_upload="true"/> - <datatype extension="fastqsanger" type="galaxy.datatypes.sequence:FastqSanger" display_in_upload="true"/> - <datatype extension="genetrack" type="galaxy.datatypes.tracks:GeneTrack"/> - <datatype extension="gff" type="galaxy.datatypes.interval:Gff" display_in_upload="true"> - <converter file="gff_to_bed_converter.xml" target_datatype="bed"/> - </datatype> - <datatype extension="gff3" type="galaxy.datatypes.interval:Gff3" display_in_upload="true"/> - <datatype extension="gif" type="galaxy.datatypes.images:Image" mimetype="image/gif"/> - <datatype extension="gmaj.zip" type="galaxy.datatypes.images:Gmaj" mimetype="application/zip"/> - <datatype extension="html" type="galaxy.datatypes.images:Html" mimetype="text/html"/> - <datatype extension="interval" type="galaxy.datatypes.interval:Interval" display_in_upload="true"> - <converter file="interval_to_bed_converter.xml" target_datatype="bed"/> - <indexer file="interval_awk.xml" /> - </datatype> - <datatype extension="jpg" type="galaxy.datatypes.images:Image" mimetype="image/jpeg"/> - <datatype extension="laj" type="galaxy.datatypes.images:Laj"/> - <datatype extension="lav" type="galaxy.datatypes.sequence:Lav" display_in_upload="true"/> - <datatype extension="maf" type="galaxy.datatypes.sequence:Maf" display_in_upload="true"> - <converter file="maf_to_fasta_converter.xml" target_datatype="fasta"/> - <converter file="maf_to_interval_converter.xml" target_datatype="interval"/> - </datatype> - <datatype extension="pdf" type="galaxy.datatypes.images:Image" mimetype="application/pdf"/> - <datatype extension="png" type="galaxy.datatypes.images:Image" mimetype="image/png"/> - <datatype extension="qualsolexa" type="galaxy.datatypes.qualityscore:QualityScoreSolexa" display_in_upload="true"/> - <datatype extension="qualsolid" type="galaxy.datatypes.qualityscore:QualityScoreSOLiD" display_in_upload="true"/> - <datatype extension="qual454" type="galaxy.datatypes.qualityscore:QualityScore454" display_in_upload="true"/> - <datatype extension="sam" type="galaxy.datatypes.tabular:Sam" display_in_upload="true"/> - <datatype extension="scf" type="galaxy.datatypes.images:Scf" mimetype="application/octet-stream" display_in_upload="true"/> - <datatype extension="taxonomy" type="galaxy.datatypes.tabular:Taxonomy" display_in_upload="true"/> - <datatype extension="tabular" type="galaxy.datatypes.tabular:Tabular" display_in_upload="true"/> - <datatype extension="txt" type="galaxy.datatypes.data:Text" display_in_upload="true"/> - <datatype extension="blastxml" type="galaxy.datatypes.xml:BlastXml" display_in_upload="true"/> - <datatype extension="txtseq.zip" type="galaxy.datatypes.images:Txtseq" mimetype="application/zip" display_in_upload="true"/> - <datatype extension="wig" type="galaxy.datatypes.interval:Wiggle" display_in_upload="true"> - <converter file="wiggle_to_array_tree_converter.xml" target_datatype="array_tree"/> - </datatype> - <datatype extension="array_tree" type="galaxy.datatypes.data:Data" /> - <datatype extension="interval_index" type="galaxy.datatypes.data:Data" /> - <!-- EMBOSS TOOLS --> - <datatype extension="acedb" type="galaxy.datatypes.data:Text"/> - <datatype extension="asn1" type="galaxy.datatypes.data:Text"/> - <datatype extension="btwisted" type="galaxy.datatypes.data:Text"/> - <datatype extension="cai" type="galaxy.datatypes.data:Text"/> - <datatype extension="charge" type="galaxy.datatypes.data:Text"/> - <datatype extension="checktrans" type="galaxy.datatypes.data:Text"/> - <datatype extension="chips" type="galaxy.datatypes.data:Text"/> - <datatype extension="clustal" type="galaxy.datatypes.data:Text"/> - <datatype extension="codata" type="galaxy.datatypes.data:Text"/> - <datatype extension="codcmp" type="galaxy.datatypes.data:Text"/> - <datatype extension="coderet" type="galaxy.datatypes.data:Text"/> - <datatype extension="compseq" type="galaxy.datatypes.data:Text"/> - <datatype extension="cpgplot" type="galaxy.datatypes.data:Text"/> - <datatype extension="cpgreport" type="galaxy.datatypes.data:Text"/> - <datatype extension="cusp" type="galaxy.datatypes.data:Text"/> - <datatype extension="cut" type="galaxy.datatypes.data:Text"/> - <datatype extension="dan" type="galaxy.datatypes.data:Text"/> - <datatype extension="dbmotif" type="galaxy.datatypes.data:Text"/> - <datatype extension="diffseq" type="galaxy.datatypes.data:Text"/> - <datatype extension="digest" type="galaxy.datatypes.data:Text"/> - <datatype extension="dreg" type="galaxy.datatypes.data:Text"/> - <datatype extension="einverted" type="galaxy.datatypes.data:Text"/> - <datatype extension="embl" type="galaxy.datatypes.data:Text"/> - <datatype extension="epestfind" type="galaxy.datatypes.data:Text"/> - <datatype extension="equicktandem" type="galaxy.datatypes.data:Text"/> - <datatype extension="est2genome" type="galaxy.datatypes.data:Text"/> - <datatype extension="etandem" type="galaxy.datatypes.data:Text"/> - <datatype extension="excel" type="galaxy.datatypes.data:Text"/> - <datatype extension="feattable" type="galaxy.datatypes.data:Text"/> - <datatype extension="fitch" type="galaxy.datatypes.data:Text"/> - <datatype extension="freak" type="galaxy.datatypes.data:Text"/> - <datatype extension="fuzznuc" type="galaxy.datatypes.data:Text"/> - <datatype extension="fuzzpro" type="galaxy.datatypes.data:Text"/> - <datatype extension="fuzztran" type="galaxy.datatypes.data:Text"/> - <datatype extension="garnier" type="galaxy.datatypes.data:Text"/> - <datatype extension="gcg" type="galaxy.datatypes.data:Text"/> - <datatype extension="geecee" type="galaxy.datatypes.data:Text"/> - <datatype extension="genbank" type="galaxy.datatypes.data:Text"/> - <datatype extension="helixturnhelix" type="galaxy.datatypes.data:Text"/> - <datatype extension="hennig86" type="galaxy.datatypes.data:Text"/> - <datatype extension="hmoment" type="galaxy.datatypes.data:Text"/> - <datatype extension="ig" type="galaxy.datatypes.data:Text"/> - <datatype extension="isochore" type="galaxy.datatypes.data:Text"/> - <datatype extension="jackknifer" type="galaxy.datatypes.data:Text"/> - <datatype extension="jackknifernon" type="galaxy.datatypes.data:Text"/> - <datatype extension="markx10" type="galaxy.datatypes.data:Text"/> - <datatype extension="markx1" type="galaxy.datatypes.data:Text"/> - <datatype extension="markx0" type="galaxy.datatypes.data:Text"/> - <datatype extension="markx3" type="galaxy.datatypes.data:Text"/> - <datatype extension="markx2" type="galaxy.datatypes.data:Text"/> - <datatype extension="match" type="galaxy.datatypes.data:Text"/> - <datatype extension="mega" type="galaxy.datatypes.data:Text"/> - <datatype extension="meganon" type="galaxy.datatypes.data:Text"/> - <datatype extension="motif" type="galaxy.datatypes.data:Text"/> - <datatype extension="msf" type="galaxy.datatypes.data:Text"/> - <datatype extension="nametable" type="galaxy.datatypes.data:Text"/> - <datatype extension="ncbi" type="galaxy.datatypes.data:Text"/> - <datatype extension="needle" type="galaxy.datatypes.data:Text"/> - <datatype extension="newcpgreport" type="galaxy.datatypes.data:Text"/> - <datatype extension="newcpgseek" type="galaxy.datatypes.data:Text"/> - <datatype extension="nexus" type="galaxy.datatypes.data:Text"/> - <datatype extension="nexusnon" type="galaxy.datatypes.data:Text"/> - <datatype extension="noreturn" type="galaxy.datatypes.data:Text"/> - <datatype extension="pair" type="galaxy.datatypes.data:Text"/> - <datatype extension="palindrome" type="galaxy.datatypes.data:Text"/> - <datatype extension="pepcoil" type="galaxy.datatypes.data:Text"/> - <datatype extension="pepinfo" type="galaxy.datatypes.data:Text"/> - <datatype extension="pepstats" type="galaxy.datatypes.data:Text"/> - <datatype extension="phylip" type="galaxy.datatypes.data:Text"/> - <datatype extension="phylipnon" type="galaxy.datatypes.data:Text"/> - <datatype extension="pir" type="galaxy.datatypes.data:Text"/> - <datatype extension="polydot" type="galaxy.datatypes.data:Text"/> - <datatype extension="preg" type="galaxy.datatypes.data:Text"/> - <datatype extension="prettyseq" type="galaxy.datatypes.data:Text"/> - <datatype extension="primersearch" type="galaxy.datatypes.data:Text"/> - <datatype extension="regions" type="galaxy.datatypes.data:Text"/> - <datatype extension="score" type="galaxy.datatypes.data:Text"/> - <datatype extension="selex" type="galaxy.datatypes.data:Text"/> - <datatype extension="seqtable" type="galaxy.datatypes.data:Text"/> - <datatype extension="showfeat" type="galaxy.datatypes.data:Text"/> - <datatype extension="showorf" type="galaxy.datatypes.data:Text"/> - <datatype extension="simple" type="galaxy.datatypes.data:Text"/> - <datatype extension="sixpack" type="galaxy.datatypes.data:Text"/> - <datatype extension="srs" type="galaxy.datatypes.data:Text"/> - <datatype extension="srspair" type="galaxy.datatypes.data:Text"/> - <datatype extension="staden" type="galaxy.datatypes.data:Text"/> - <datatype extension="strider" type="galaxy.datatypes.data:Text"/> - <datatype extension="supermatcher" type="galaxy.datatypes.data:Text"/> - <datatype extension="swiss" type="galaxy.datatypes.data:Text"/> - <datatype extension="syco" type="galaxy.datatypes.data:Text"/> - <datatype extension="table" type="galaxy.datatypes.data:Text"/> - <datatype extension="textsearch" type="galaxy.datatypes.data:Text"/> - <datatype extension="vectorstrip" type="galaxy.datatypes.data:Text"/> - <datatype extension="wobble" type="galaxy.datatypes.data:Text"/> - <datatype extension="wordcount" type="galaxy.datatypes.data:Text"/> - <datatype extension="tagseq" type="galaxy.datatypes.data:Text"/> - <!-- Start RGenetics Datatypes --> - <!-- genome graphs ucsc file - first col is always marker then numeric values to plot --> - <datatype extension="gg" type="galaxy.datatypes.genetics:GenomeGraphs"/> - <datatype extension="rgenetics" type="galaxy.datatypes.genetics:Rgenetics"/> - <!-- linkage format pedigree (separate .map file) --> - <datatype extension="lped" type="galaxy.datatypes.genetics:Lped" display_in_upload="true"/> - <!-- plink compressed file - has bed extension unfortunately --> - <datatype extension="pbed" type="galaxy.datatypes.genetics:Pbed" display_in_upload="true"/> - <!-- eigenstrat pedigree input file --> - <datatype extension="eigenstratgeno" type="galaxy.datatypes.genetics:Eigenstratgeno"/> - <!-- eigenstrat pca output file for adjusted eigenQTL eg --> - <datatype extension="eigenstratpca" type="galaxy.datatypes.genetics:Eigenstratpca"/> - <!-- fbat/pbat format pedigree (header row of marker names) --> - <datatype extension="fped" type="galaxy.datatypes.genetics:Fped"/> - <!-- part of linkage format pedigree --> - <datatype extension="lmap" type="galaxy.datatypes.genetics:Lmap"/> - <!-- phenotype file - fbat format --> - <datatype extension="fphe" type="galaxy.datatypes.genetics:Fphe"/> - <!-- phenotype file - plink format --> - <datatype extension="pphe" type="galaxy.datatypes.genetics:Pphe"/> - <datatype extension="snptest" type="galaxy.datatypes.genetics:Snptest"/> - <datatype extension="snpmatrix" type="galaxy.datatypes.genetics:SNPMatrix"/> - <datatype extension="xls" type="galaxy.datatypes.tabular:Tabular"/> - <!-- End RGenetics Datatypes --> - </registration> - <sniffers> - <!-- - The order in which Galaxy attempts to determine data types is - important because some formats are much more loosely defined - than others. The following list should be the most rigidly - defined format first, followed by next-most rigidly defined, - and so on. - --> - <sniffer type="galaxy.datatypes.xml:BlastXml"/> - <sniffer type="galaxy.datatypes.sequence:Maf"/> - <sniffer type="galaxy.datatypes.sequence:Lav"/> - <sniffer type="galaxy.datatypes.sequence:csFasta"/> - <sniffer type="galaxy.datatypes.qualityscore:QualityScoreSOLiD"/> - <sniffer type="galaxy.datatypes.qualityscore:QualityScore454"/> - <sniffer type="galaxy.datatypes.sequence:Fasta"/> - <sniffer type="galaxy.datatypes.sequence:Fastq"/> - <sniffer type="galaxy.datatypes.interval:Wiggle"/> - <sniffer type="galaxy.datatypes.images:Html"/> - <sniffer type="galaxy.datatypes.sequence:Axt"/> - <sniffer type="galaxy.datatypes.interval:Bed"/> - <sniffer type="galaxy.datatypes.interval:CustomTrack"/> - <sniffer type="galaxy.datatypes.interval:Gff"/> - <sniffer type="galaxy.datatypes.interval:Gff3"/> - <sniffer type="galaxy.datatypes.interval:Interval"/> - <sniffer type="galaxy.datatypes.tabular:Sam"/> - </sniffers> + <registration converters_path="lib/galaxy/datatypes/converters"> + <datatype extension="ab1" type="galaxy.datatypes.binary:Ab1" mimetype="application/octet-stream" display_in_upload="true"/> + <datatype extension="axt" type="galaxy.datatypes.sequence:Axt" display_in_upload="true"/> + <datatype extension="bam" type="galaxy.datatypes.binary:Bam" mimetype="application/octet-stream"/> + <datatype extension="bed" type="galaxy.datatypes.interval:Bed" display_in_upload="true"> + <converter file="bed_to_gff_converter.xml" target_datatype="gff"/> + <converter file="interval_to_coverage.xml" target_datatype="coverage"/> + <converter file="bed_to_interval_index_converter.xml" target_datatype="interval_index"/> + </datatype> + <datatype extension="binseq.zip" type="galaxy.datatypes.binary:Binseq" mimetype="application/zip" display_in_upload="true"/> + <datatype extension="len" type="galaxy.datatypes.chrominfo:ChromInfo" display_in_upload="true"> + <!-- no converters yet --> + </datatype> + <datatype extension="coverage" type="galaxy.datatypes.coverage:LastzCoverage" display_in_upload="true"> + <indexer file="coverage.xml" /> + </datatype> + <datatype extension="customtrack" type="galaxy.datatypes.interval:CustomTrack"/> + <datatype extension="csfasta" type="galaxy.datatypes.sequence:csFasta" display_in_upload="true"/> + <datatype extension="data" type="galaxy.datatypes.data:Data" mimetype="application/octet-stream"/> + <datatype extension="fasta" type="galaxy.datatypes.sequence:Fasta" display_in_upload="true"> + <converter file="fasta_to_tabular_converter.xml" target_datatype="tabular"/> + </datatype> + <datatype extension="fastq" type="galaxy.datatypes.sequence:Fastq" display_in_upload="true"/> + <datatype extension="fastqsanger" type="galaxy.datatypes.sequence:FastqSanger" display_in_upload="true"/> + <datatype extension="genetrack" type="galaxy.datatypes.tracks:GeneTrack"/> + <datatype extension="gff" type="galaxy.datatypes.interval:Gff" display_in_upload="true"> + <converter file="gff_to_bed_converter.xml" target_datatype="bed"/> + </datatype> + <datatype extension="gff3" type="galaxy.datatypes.interval:Gff3" display_in_upload="true"/> + <datatype extension="gif" type="galaxy.datatypes.images:Image" mimetype="image/gif"/> + <datatype extension="gmaj.zip" type="galaxy.datatypes.images:Gmaj" mimetype="application/zip"/> + <datatype extension="html" type="galaxy.datatypes.images:Html" mimetype="text/html"/> + <datatype extension="interval" type="galaxy.datatypes.interval:Interval" display_in_upload="true"> + <converter file="interval_to_bed_converter.xml" target_datatype="bed"/> + <indexer file="interval_awk.xml" /> + </datatype> + <datatype extension="jpg" type="galaxy.datatypes.images:Image" mimetype="image/jpeg"/> + <datatype extension="laj" type="galaxy.datatypes.images:Laj"/> + <datatype extension="lav" type="galaxy.datatypes.sequence:Lav" display_in_upload="true"/> + <datatype extension="maf" type="galaxy.datatypes.sequence:Maf" display_in_upload="true"> + <converter file="maf_to_fasta_converter.xml" target_datatype="fasta"/> + <converter file="maf_to_interval_converter.xml" target_datatype="interval"/> + </datatype> + <datatype extension="pdf" type="galaxy.datatypes.images:Image" mimetype="application/pdf"/> + <datatype extension="png" type="galaxy.datatypes.images:Image" mimetype="image/png"/> + <datatype extension="qualsolexa" type="galaxy.datatypes.qualityscore:QualityScoreSolexa" display_in_upload="true"/> + <datatype extension="qualsolid" type="galaxy.datatypes.qualityscore:QualityScoreSOLiD" display_in_upload="true"/> + <datatype extension="qual454" type="galaxy.datatypes.qualityscore:QualityScore454" display_in_upload="true"/> + <datatype extension="sam" type="galaxy.datatypes.tabular:Sam" display_in_upload="true"/> + <datatype extension="scf" type="galaxy.datatypes.binary:Scf" mimetype="application/octet-stream" display_in_upload="true"/> + <datatype extension="sff" type="galaxy.datatypes.binary:Sff" mimetype="application/octet-stream" display_in_upload="true"/> + <datatype extension="taxonomy" type="galaxy.datatypes.tabular:Taxonomy" display_in_upload="true"/> + <datatype extension="tabular" type="galaxy.datatypes.tabular:Tabular" display_in_upload="true"/> + <datatype extension="txt" type="galaxy.datatypes.data:Text" display_in_upload="true"/> + <datatype extension="blastxml" type="galaxy.datatypes.xml:BlastXml" display_in_upload="true"/> + <datatype extension="txtseq.zip" type="galaxy.datatypes.data:Txtseq" mimetype="application/zip" display_in_upload="true"/> + <datatype extension="wig" type="galaxy.datatypes.interval:Wiggle" display_in_upload="true"> + <converter file="wiggle_to_array_tree_converter.xml" target_datatype="array_tree"/> + </datatype> + <datatype extension="array_tree" type="galaxy.datatypes.data:Data" /> + <datatype extension="interval_index" type="galaxy.datatypes.data:Data" /> + <!-- Start EMBOSS tools --> + <datatype extension="acedb" type="galaxy.datatypes.data:Text"/> + <datatype extension="asn1" type="galaxy.datatypes.data:Text"/> + <datatype extension="btwisted" type="galaxy.datatypes.data:Text"/> + <datatype extension="cai" type="galaxy.datatypes.data:Text"/> + <datatype extension="charge" type="galaxy.datatypes.data:Text"/> + <datatype extension="checktrans" type="galaxy.datatypes.data:Text"/> + <datatype extension="chips" type="galaxy.datatypes.data:Text"/> + <datatype extension="clustal" type="galaxy.datatypes.data:Text"/> + <datatype extension="codata" type="galaxy.datatypes.data:Text"/> + <datatype extension="codcmp" type="galaxy.datatypes.data:Text"/> + <datatype extension="coderet" type="galaxy.datatypes.data:Text"/> + <datatype extension="compseq" type="galaxy.datatypes.data:Text"/> + <datatype extension="cpgplot" type="galaxy.datatypes.data:Text"/> + <datatype extension="cpgreport" type="galaxy.datatypes.data:Text"/> + <datatype extension="cusp" type="galaxy.datatypes.data:Text"/> + <datatype extension="cut" type="galaxy.datatypes.data:Text"/> + <datatype extension="dan" type="galaxy.datatypes.data:Text"/> + <datatype extension="dbmotif" type="galaxy.datatypes.data:Text"/> + <datatype extension="diffseq" type="galaxy.datatypes.data:Text"/> + <datatype extension="digest" type="galaxy.datatypes.data:Text"/> + <datatype extension="dreg" type="galaxy.datatypes.data:Text"/> + <datatype extension="einverted" type="galaxy.datatypes.data:Text"/> + <datatype extension="embl" type="galaxy.datatypes.data:Text"/> + <datatype extension="epestfind" type="galaxy.datatypes.data:Text"/> + <datatype extension="equicktandem" type="galaxy.datatypes.data:Text"/> + <datatype extension="est2genome" type="galaxy.datatypes.data:Text"/> + <datatype extension="etandem" type="galaxy.datatypes.data:Text"/> + <datatype extension="excel" type="galaxy.datatypes.data:Text"/> + <datatype extension="feattable" type="galaxy.datatypes.data:Text"/> + <datatype extension="fitch" type="galaxy.datatypes.data:Text"/> + <datatype extension="freak" type="galaxy.datatypes.data:Text"/> + <datatype extension="fuzznuc" type="galaxy.datatypes.data:Text"/> + <datatype extension="fuzzpro" type="galaxy.datatypes.data:Text"/> + <datatype extension="fuzztran" type="galaxy.datatypes.data:Text"/> + <datatype extension="garnier" type="galaxy.datatypes.data:Text"/> + <datatype extension="gcg" type="galaxy.datatypes.data:Text"/> + <datatype extension="geecee" type="galaxy.datatypes.data:Text"/> + <datatype extension="genbank" type="galaxy.datatypes.data:Text"/> + <datatype extension="helixturnhelix" type="galaxy.datatypes.data:Text"/> + <datatype extension="hennig86" type="galaxy.datatypes.data:Text"/> + <datatype extension="hmoment" type="galaxy.datatypes.data:Text"/> + <datatype extension="ig" type="galaxy.datatypes.data:Text"/> + <datatype extension="isochore" type="galaxy.datatypes.data:Text"/> + <datatype extension="jackknifer" type="galaxy.datatypes.data:Text"/> + <datatype extension="jackknifernon" type="galaxy.datatypes.data:Text"/> + <datatype extension="markx10" type="galaxy.datatypes.data:Text"/> + <datatype extension="markx1" type="galaxy.datatypes.data:Text"/> + <datatype extension="markx0" type="galaxy.datatypes.data:Text"/> + <datatype extension="markx3" type="galaxy.datatypes.data:Text"/> + <datatype extension="markx2" type="galaxy.datatypes.data:Text"/> + <datatype extension="match" type="galaxy.datatypes.data:Text"/> + <datatype extension="mega" type="galaxy.datatypes.data:Text"/> + <datatype extension="meganon" type="galaxy.datatypes.data:Text"/> + <datatype extension="motif" type="galaxy.datatypes.data:Text"/> + <datatype extension="msf" type="galaxy.datatypes.data:Text"/> + <datatype extension="nametable" type="galaxy.datatypes.data:Text"/> + <datatype extension="ncbi" type="galaxy.datatypes.data:Text"/> + <datatype extension="needle" type="galaxy.datatypes.data:Text"/> + <datatype extension="newcpgreport" type="galaxy.datatypes.data:Text"/> + <datatype extension="newcpgseek" type="galaxy.datatypes.data:Text"/> + <datatype extension="nexus" type="galaxy.datatypes.data:Text"/> + <datatype extension="nexusnon" type="galaxy.datatypes.data:Text"/> + <datatype extension="noreturn" type="galaxy.datatypes.data:Text"/> + <datatype extension="pair" type="galaxy.datatypes.data:Text"/> + <datatype extension="palindrome" type="galaxy.datatypes.data:Text"/> + <datatype extension="pepcoil" type="galaxy.datatypes.data:Text"/> + <datatype extension="pepinfo" type="galaxy.datatypes.data:Text"/> + <datatype extension="pepstats" type="galaxy.datatypes.data:Text"/> + <datatype extension="phylip" type="galaxy.datatypes.data:Text"/> + <datatype extension="phylipnon" type="galaxy.datatypes.data:Text"/> + <datatype extension="pir" type="galaxy.datatypes.data:Text"/> + <datatype extension="polydot" type="galaxy.datatypes.data:Text"/> + <datatype extension="preg" type="galaxy.datatypes.data:Text"/> + <datatype extension="prettyseq" type="galaxy.datatypes.data:Text"/> + <datatype extension="primersearch" type="galaxy.datatypes.data:Text"/> + <datatype extension="regions" type="galaxy.datatypes.data:Text"/> + <datatype extension="score" type="galaxy.datatypes.data:Text"/> + <datatype extension="selex" type="galaxy.datatypes.data:Text"/> + <datatype extension="seqtable" type="galaxy.datatypes.data:Text"/> + <datatype extension="showfeat" type="galaxy.datatypes.data:Text"/> + <datatype extension="showorf" type="galaxy.datatypes.data:Text"/> + <datatype extension="simple" type="galaxy.datatypes.data:Text"/> + <datatype extension="sixpack" type="galaxy.datatypes.data:Text"/> + <datatype extension="srs" type="galaxy.datatypes.data:Text"/> + <datatype extension="srspair" type="galaxy.datatypes.data:Text"/> + <datatype extension="staden" type="galaxy.datatypes.data:Text"/> + <datatype extension="strider" type="galaxy.datatypes.data:Text"/> + <datatype extension="supermatcher" type="galaxy.datatypes.data:Text"/> + <datatype extension="swiss" type="galaxy.datatypes.data:Text"/> + <datatype extension="syco" type="galaxy.datatypes.data:Text"/> + <datatype extension="table" type="galaxy.datatypes.data:Text"/> + <datatype extension="textsearch" type="galaxy.datatypes.data:Text"/> + <datatype extension="vectorstrip" type="galaxy.datatypes.data:Text"/> + <datatype extension="wobble" type="galaxy.datatypes.data:Text"/> + <datatype extension="wordcount" type="galaxy.datatypes.data:Text"/> + <datatype extension="tagseq" type="galaxy.datatypes.data:Text"/> + <!-- End EMBOSS tools --> + <!-- Start RGenetics Datatypes --> + <datatype extension="affybatch" type="galaxy.datatypes.genetics:Affybatch" display_in_upload="true"/> + <!-- eigenstrat pedigree input file --> + <datatype extension="eigenstratgeno" type="galaxy.datatypes.genetics:Eigenstratgeno"/> + <!-- eigenstrat pca output file for adjusted eigenQTL eg --> + <datatype extension="eigenstratpca" type="galaxy.datatypes.genetics:Eigenstratpca"/> + <datatype extension="eset" type="galaxy.datatypes.genetics:Eset" display_in_upload="true" /> + <!-- fbat/pbat format pedigree (header row of marker names) --> + <datatype extension="fped" type="galaxy.datatypes.genetics:Fped" display_in_upload="true"/> + <!-- phenotype file - fbat format --> + <datatype extension="fphe" type="galaxy.datatypes.genetics:Fphe" display_in_upload="true" mimetype="text/html"/> + <!-- genome graphs ucsc file - first col is always marker then numeric values to plot --> + <datatype extension="gg" type="galaxy.datatypes.genetics:GenomeGraphs"/> + <!-- part of linkage format pedigree --> + <datatype extension="lmap" type="galaxy.datatypes.genetics:Lmap" display_in_upload="true"/> + <datatype extension="malist" type="galaxy.datatypes.genetics:MAlist" display_in_upload="true"/> + <!-- linkage format pedigree (separate .map file) --> + <datatype extension="lped" type="galaxy.datatypes.genetics:Lped" display_in_upload="true"> + <converter file="lped_to_fped_converter.xml" target_datatype="fped"/> + <converter file="lped_to_pbed_converter.xml" target_datatype="pbed"/> + </datatype> + <!-- plink compressed file - has bed extension unfortunately --> + <datatype extension="pbed" type="galaxy.datatypes.genetics:Pbed" display_in_upload="true"> + <converter file="pbed_to_lped_converter.xml" target_datatype="lped"/> + </datatype> + <datatype extension="pheno" type="galaxy.datatypes.genetics:Pheno"/> + <!-- phenotype file - plink format --> + <datatype extension="pphe" type="galaxy.datatypes.genetics:Pphe" display_in_upload="true" mimetype="text/html"/> + <datatype extension="rexpbase" type="galaxy.datatypes.genetics:RexpBase"/> + <datatype extension="rgenetics" type="galaxy.datatypes.genetics:Rgenetics"/> + <datatype extension="snptest" type="galaxy.datatypes.genetics:Snptest" display_in_upload="true"/> + <datatype extension="snpmatrix" type="galaxy.datatypes.genetics:SNPMatrix" display_in_upload="true"/> + <datatype extension="xls" type="galaxy.datatypes.tabular:Tabular"/> + <!-- End RGenetics Datatypes --> + </registration> + <sniffers> + <!-- + The order in which Galaxy attempts to determine data types is + important because some formats are much more loosely defined + than others. The following list should be the most rigidly + defined format first, followed by next-most rigidly defined, + and so on. + --> + <sniffer type="galaxy.datatypes.binary:Sff"/> + <sniffer type="galaxy.datatypes.xml:BlastXml"/> + <sniffer type="galaxy.datatypes.sequence:Maf"/> + <sniffer type="galaxy.datatypes.sequence:Lav"/> + <sniffer type="galaxy.datatypes.sequence:csFasta"/> + <sniffer type="galaxy.datatypes.qualityscore:QualityScoreSOLiD"/> + <sniffer type="galaxy.datatypes.qualityscore:QualityScore454"/> + <sniffer type="galaxy.datatypes.sequence:Fasta"/> + <sniffer type="galaxy.datatypes.sequence:Fastq"/> + <sniffer type="galaxy.datatypes.interval:Wiggle"/> + <sniffer type="galaxy.datatypes.images:Html"/> + <sniffer type="galaxy.datatypes.sequence:Axt"/> + <sniffer type="galaxy.datatypes.interval:Bed"/> + <sniffer type="galaxy.datatypes.interval:CustomTrack"/> + <sniffer type="galaxy.datatypes.interval:Gff"/> + <sniffer type="galaxy.datatypes.interval:Gff3"/> + <sniffer type="galaxy.datatypes.interval:Interval"/> + <sniffer type="galaxy.datatypes.tabular:Sam"/> + </sniffers> </datatypes> diff -r 555afd0bf457 -r 37406d8ad116 dist-eggs.ini --- a/dist-eggs.ini Tue Nov 17 16:14:54 2009 -0500 +++ b/dist-eggs.ini Tue Nov 17 16:16:26 2009 -0500 @@ -23,6 +23,9 @@ py2.5-macosx-10.3-fat-ucs2 = medeski.bx.psu.edu /usr/local/bin/python2.5 py2.6-macosx-10.3-fat-ucs2 = medeski.bx.psu.edu /usr/local/bin/python2.6 py2.5-macosx-10.5-i386-ucs2 = lion.bx.psu.edu /usr/bin/python2.5 +py2.4-solaris-2.10-i86pc-ucs2 = thumper.bx.psu.edu /depot/projects/pythons/solaris-2.10-i86pc-ucs2/bin/python2.4 +py2.5-solaris-2.10-i86pc-ucs2 = thumper.bx.psu.edu /depot/projects/pythons/solaris-2.10-i86pc-ucs2/bin/python2.5 +py2.6-solaris-2.10-i86pc-ucs2 = thumper.bx.psu.edu /depot/projects/pythons/solaris-2.10-i86pc-ucs2/bin/python2.6 py2.4-solaris-2.11-i86pc-ucs2 = victory.bx.psu.edu /depot/projects/pythons/solaris-2.11-i86pc-ucs2/bin/python2.4 py2.5-solaris-2.11-i86pc-ucs2 = victory.bx.psu.edu /depot/projects/pythons/solaris-2.11-i86pc-ucs2/bin/python2.5 py2.6-solaris-2.11-i86pc-ucs2 = victory.bx.psu.edu /depot/projects/pythons/solaris-2.11-i86pc-ucs2/bin/python2.6 @@ -47,9 +50,9 @@ py2.5-macosx = py2.5-macosx-10.3-fat-ucs2 py2.5-macosx-10.5-i386-ucs2 py2.6-macosx = py2.6-macosx-10.3-fat-ucs2 macosx = py2.4-macosx py2.5-macosx py2.6-macosx -py2.4-solaris = py2.4-solaris-2.11-i86pc-ucs2 py2.4-solaris-2.10-sun4u-ucs2 -py2.5-solaris = py2.5-solaris-2.11-i86pc-ucs2 py2.5-solaris-2.10-sun4u-ucs2 -py2.6-solaris = py2.6-solaris-2.11-i86pc-ucs2 py2.6-solaris-2.10-sun4u-ucs2 +py2.4-solaris = py2.4-solaris-2.10-i86pc-ucs2 py2.4-solaris-2.11-i86pc-ucs2 py2.4-solaris-2.10-sun4u-ucs2 +py2.5-solaris = py2.5-solaris-2.10-i86pc-ucs2 py2.5-solaris-2.11-i86pc-ucs2 py2.5-solaris-2.10-sun4u-ucs2 +py2.6-solaris = py2.6-solaris-2.10-i86pc-ucs2 py2.6-solaris-2.11-i86pc-ucs2 py2.6-solaris-2.10-sun4u-ucs2 solaris = py2.4-solaris py2.5-solaris py2.6-solaris py2.4-all = py2.4-linux py2.4-macosx py2.4-solaris py2.5-all = py2.5-linux py2.5-macosx py2.5-solaris diff -r 555afd0bf457 -r 37406d8ad116 eggs.ini --- a/eggs.ini Tue Nov 17 16:14:54 2009 -0500 +++ b/eggs.ini Tue Nov 17 16:16:26 2009 -0500 @@ -19,7 +19,7 @@ pbs_python = 2.9.4 psycopg2 = 2.0.6 pycrypto = 2.0.1 -pysqlite = 2.3.5 +pysqlite = 2.5.6 python_lzo = 1.08 threadframe = 0.2 guppy = 0.1.8 @@ -57,11 +57,12 @@ ; extra version information [tags] psycopg2 = _8.2.6_static -pysqlite = _3.5.4_static +pysqlite = _static MySQL_python = _5.0.67_static python_lzo = _static bx_python = _dev_r4bf1f32e6b76 -GeneTrack = _dev_raa786e9fc131d998e532a1aef39d108850c9e93d +GeneTrack = _dev_e380f21c704218622155b9d230a44b3c9c452524 +SQLAlchemy = _dev_r6498 ; nose = .dev_r7156749efc58 ; source location, necessary for scrambling @@ -73,7 +74,7 @@ pbs_python = http://ftp.sara.nl/pub/outgoing/pbs_python-2.9.4.tar.gz psycopg2 = http://initd.org/pub/software/psycopg/PSYCOPG-2-0/psycopg2-2.0.6.tar.gz ftp://ftp-archives.postgresql.org/pub/source/v8.2.6/postgresql-8.2.6.tar.bz2 pycrypto = http://www.amk.ca/files/python/crypto/pycrypto-2.0.1.tar.gz -pysqlite = http://initd.org/pub/software/pysqlite/releases/2.3/2.3.5/pysqlite-2.3.5.tar... http://www.sqlite.org/sqlite-source-3_5_4.zip +pysqlite = http://pypi.python.org/packages/source/p/pysqlite/pysqlite-2.5.6.tar.gz python_lzo = http://www.oberhumer.com/opensource/lzo/download/LZO-v1/python-lzo-1.08.tar.... http://www.oberhumer.com/opensource/lzo/download/LZO-v1/lzo-1.08.tar.gz threadframe = http://www.majid.info/python/threadframe/threadframe-0.2.tar.gz guppy = http://pypi.python.org/packages/source/g/guppy/guppy-0.1.8.tar.gz @@ -82,7 +83,7 @@ decorator = http://pypi.python.org/packages/source/d/decorator/decorator-3.1.2.tar.gz docutils = http://downloads.sourceforge.net/docutils/docutils-0.4.tar.gz elementtree = http://effbot.org/downloads/elementtree-1.2.6-20050316.tar.gz -GeneTrack = http://github.com/ialbert/genetrack-central/tarball/aa786e9fc131d998e532a1ae... +GeneTrack = http://github.com/ialbert/genetrack-central/tarball/e380f21c704218622155b9d2... lrucache = http://evan.prodromou.name/lrucache/lrucache-0.2.tar.gz Mako = http://www.makotemplates.org/downloads/Mako-0.2.5.tar.gz nose = http://pypi.python.org/packages/source/n/nose/nose-0.11.1.tar.gz @@ -93,7 +94,7 @@ PSI = http://pypi.python.org/packages/source/P/PSI/PSI-0.3b1.1.tar.gz Routes = http://pypi.python.org/packages/source/R/Routes/Routes-1.11.tar.gz simplejson = http://cheeseshop.python.org/packages/source/s/simplejson/simplejson-1.5.tar... -SQLAlchemy = http://pypi.python.org/packages/source/S/SQLAlchemy/SQLAlchemy-0.5.6.tar.gz +SQLAlchemy = http://dist.g2.bx.psu.edu/SQLAlchemy-0.5.6_r6498.tar.bz2 sqlalchemy_migrate = http://pypi.python.org/packages/source/s/sqlalchemy-migrate/sqlalchemy-migra... Tempita = http://pypi.python.org/packages/source/T/Tempita/Tempita-0.1.tar.gz twill = http://darcs.idyll.org/~t/projects/twill-0.9.tar.gz diff -r 555afd0bf457 -r 37406d8ad116 lib/galaxy/datatypes/binary.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/lib/galaxy/datatypes/binary.py Tue Nov 17 16:16:26 2009 -0500 @@ -0,0 +1,156 @@ +""" +Binary classes +""" + +import data, logging, binascii +from galaxy.datatypes.metadata import MetadataElement +from galaxy.datatypes import metadata +from galaxy.datatypes.sniff import * +from urllib import urlencode, quote_plus +import zipfile +import os, subprocess, tempfile + +log = logging.getLogger(__name__) + +sniffable_binary_formats = [ 'sff' ] +# Currently these supported binary data types must be manually set on upload +unsniffable_binary_formats = [ 'ab1', 'scf' ] + +class Binary( data.Data ): + """Binary data""" + def set_peek( self, dataset ): + """Set the peek and blurb text""" + if not dataset.dataset.purged: + dataset.peek = 'binary data' + dataset.blurb = 'data' + else: + dataset.peek = 'file does not exist' + dataset.blurb = 'file purged from disk' + +class Ab1( Binary ): + """Class describing an ab1 binary sequence file""" + file_ext = "ab1" + def set_peek( self, dataset ): + if not dataset.dataset.purged: + export_url = "/history_add_to?" + urlencode( {'history_id':dataset.history_id,'ext':'ab1','name':'ab1 sequence','info':'Sequence file','dbkey':dataset.dbkey} ) + dataset.peek = "Binary ab1 sequence file" + dataset.blurb = data.nice_size( dataset.get_size() ) + else: + dataset.peek = 'file does not exist' + dataset.blurb = 'file purged from disk' + def display_peek( self, dataset ): + try: + return dataset.peek + except: + return "Binary ab1 sequence file (%s)" % ( data.nice_size( dataset.get_size() ) ) + +class Bam( Binary ): + """Class describing a BAM binary file""" + file_ext = "bam" + MetadataElement( name="bam_index", desc="BAM Index File", param=metadata.FileParameter, readonly=True, no_value=None, visible=False, optional=True ) + def init_meta( self, dataset, copy_from=None ): + Binary.init_meta( self, dataset, copy_from=copy_from ) + def set_meta( self, dataset, overwrite = True, **kwd ): + """ + Sets index for BAM file. + """ + index_file = dataset.metadata.bam_index + if not index_file: + index_file = dataset.metadata.spec['bam_index'].param.new_file( dataset = dataset ) + tmp_dir = tempfile.gettempdir() + tmpf1 = tempfile.NamedTemporaryFile( dir=tmp_dir ) + tmpf1bai = '%s.bai' % tmpf1.name + try: + os.system( 'cd %s' % tmp_dir ) + os.system( 'cp %s %s' % ( dataset.file_name, tmpf1.name ) ) + os.system( 'samtools index %s' % tmpf1.name ) + os.system( 'cp %s %s' % ( tmpf1bai, index_file.file_name ) ) + except Exception, ex: + sys.stderr.write( 'There was a problem creating the index for the BAM file\n%s\n' + str( ex ) ) + tmpf1.close() + if os.path.exists( tmpf1bai ): + os.remove( tmpf1bai ) + dataset.metadata.bam_index = index_file + def set_peek( self, dataset ): + if not dataset.dataset.purged: + export_url = "/history_add_to?" + urlencode( {'history_id':dataset.history_id,'ext':'bam','name':'bam alignments','info':'Alignments file','dbkey':dataset.dbkey} ) + dataset.peek = "Binary bam alignments file" + dataset.blurb = data.nice_size( dataset.get_size() ) + else: + dataset.peek = 'file does not exist' + dataset.blurb = 'file purged from disk' + def display_peek( self, dataset ): + try: + return dataset.peek + except: + return "Binary bam alignments file (%s)" % ( data.nice_size( dataset.get_size() ) ) + def get_mime( self ): + """Returns the mime type of the datatype""" + return 'application/octet-stream' + +class Binseq( Binary ): + """Class describing a zip archive of binary sequence files""" + file_ext = "binseq.zip" + def set_peek( self, dataset ): + if not dataset.dataset.purged: + zip_file = zipfile.ZipFile( dataset.file_name, "r" ) + num_files = len( zip_file.namelist() ) + dataset.peek = "Archive of %s binary sequence files" % ( str( num_files ) ) + dataset.blurb = data.nice_size( dataset.get_size() ) + else: + dataset.peek = 'file does not exist' + dataset.blurb = 'file purged from disk' + def display_peek( self, dataset ): + try: + return dataset.peek + except: + return "Binary sequence file archive (%s)" % ( data.nice_size( dataset.get_size() ) ) + def get_mime( self ): + """Returns the mime type of the datatype""" + return 'application/zip' + +class Scf( Binary ): + """Class describing an scf binary sequence file""" + file_ext = "scf" + def set_peek( self, dataset ): + if not dataset.dataset.purged: + export_url = "/history_add_to?" + urlencode({'history_id':dataset.history_id,'ext':'scf','name':'scf sequence','info':'Sequence file','dbkey':dataset.dbkey}) + dataset.peek = "Binary scf sequence file" + dataset.blurb = data.nice_size( dataset.get_size() ) + else: + dataset.peek = 'file does not exist' + dataset.blurb = 'file purged from disk' + def display_peek( self, dataset ): + try: + return dataset.peek + except: + return "Binary scf sequence file (%s)" % ( data.nice_size( dataset.get_size() ) ) + +class Sff( Binary ): + """ Standard Flowgram Format (SFF) """ + file_ext = "sff" + def __init__( self, **kwd ): + Binary.__init__( self, **kwd ) + def sniff( self, filename ): + # The first 4 bytes of any sff file is '.sff', and the file is binary. For details + # about the format, see http://www.ncbi.nlm.nih.gov/Traces/trace.cgi?cmd=show&f=formats&m=doc&s=format + try: + header = open( filename ).read(4) + if binascii.b2a_hex( header ) == binascii.hexlify( '.sff' ): + return True + return False + except Exception, e: + return False + def set_peek( self, dataset ): + if not dataset.dataset.purged: + export_url = "/history_add_to?" + urlencode( {'history_id':dataset.history_id,'ext':'sff','name':'sff file','info':'sff file','dbkey':dataset.dbkey} ) + dataset.peek = "Binary sff file" + dataset.blurb = data.nice_size( dataset.get_size() ) + else: + dataset.peek = 'file does not exist' + dataset.blurb = 'file purged from disk' + def display_peek( self, dataset ): + try: + return dataset.peek + except: + return "Binary sff file (%s)" % ( data.nice_size( dataset.get_size() ) ) diff -r 555afd0bf457 -r 37406d8ad116 lib/galaxy/datatypes/data.py --- a/lib/galaxy/datatypes/data.py Tue Nov 17 16:14:54 2009 -0500 +++ b/lib/galaxy/datatypes/data.py Tue Nov 17 16:16:26 2009 -0500 @@ -1,4 +1,4 @@ -import logging, os, sys, time, tempfile, binascii +import logging, os, sys, time, tempfile from galaxy import util from galaxy.util.odict import odict from galaxy.util.bunch import Bunch @@ -40,20 +40,18 @@ """ __metaclass__ = DataMeta - - """Add metadata elements""" + # Add metadata elements MetadataElement( name="dbkey", desc="Database/Build", default="?", param=metadata.DBKeyParameter, multiple=False, no_value="?" ) - - """Stores the set of display applications, and viewing methods, supported by this datatype """ + # Stores the set of display applications, and viewing methods, supported by this datatype supported_display_apps = {} - - """If False, the peek is regenerated whenever a dataset of this type is copied""" + # If False, the peek is regenerated whenever a dataset of this type is copied copy_safe_peek = True - - is_binary = True #The dataset contains binary data --> do not space_to_tab or convert newlines, etc. Allow binary file uploads of this type when True. - - allow_datatype_change = True #Allow user to change between this datatype and others. If False, this datatype cannot be changed from or into. - + # The dataset contains binary data --> do not space_to_tab or convert newlines, etc. + # Allow binary file uploads of this type when True. + is_binary = True + # Allow user to change between this datatype and others. If False, this datatype + # cannot be changed from or into. + allow_datatype_change = True #Composite datatypes composite_type = None composite_files = odict() @@ -162,6 +160,11 @@ info = info.replace( '\r', '<br/>' ) if info.find( '\n' ) >= 0: info = info.replace( '\n', '<br/>' ) + + # Convert to unicode to display non-ascii characters. + if type( info ) is not unicode: + info = unicode( info, 'utf-8') + return info except: return "info unavailable" @@ -270,8 +273,6 @@ def add_composite_file( self, name, **kwds ): #self.composite_files = self.composite_files.copy() self.composite_files[ name ] = self.__new_composite_file( name, **kwds ) - - def __substitute_composite_key( self, key, composite_file, dataset = None ): if composite_file.substitute_name_with_metadata: if dataset: @@ -303,7 +304,6 @@ return files def generate_auto_primary_file( self, dataset = None ): raise Exception( "generate_auto_primary_file is not implemented for this datatype." ) - @property def has_resolution(self): return False @@ -364,23 +364,37 @@ dataset.peek = 'file does not exist' dataset.blurb = 'file purged from disk' -class Binary( Data ): - """Binary data""" +class Txtseq( Data ): + """Class describing a zip archive of text sequence files""" + file_ext = "txtseq.zip" def set_peek( self, dataset ): - """Set the peek and blurb text""" if not dataset.dataset.purged: - dataset.peek = 'binary data' - dataset.blurb = 'data' + zip_file = zipfile.ZipFile( dataset.file_name, "r" ) + num_files = len( zip_file.namelist() ) + dataset.peek = "Archive of %s text sequence files" % ( str( num_files ) ) + dataset.blurb = data.nice_size( dataset.get_size() ) else: dataset.peek = 'file does not exist' dataset.blurb = 'file purged from disk' + def display_peek(self, dataset): + try: + return dataset.peek + except: + return "Text sequence file archive (%s)" % ( data.nice_size( dataset.get_size() ) ) + def get_mime(self): + """Returns the mime type of the datatype""" + return 'application/zip' + +class Newick( Text ): + pass + +# ------------- Utility methods -------------- def get_test_fname( fname ): """Returns test data filename""" path, name = os.path.split(__file__) full_path = os.path.join( path, 'test', fname ) return full_path - def nice_size(size): """ Returns a readably formatted string with the size @@ -406,7 +420,6 @@ out = "%.1f %s" % (size, word) return out return '??? bytes' - def get_file_peek( file_name, is_multi_byte=False, WIDTH=256, LINE_COUNT=5 ): """ Returns the first LINE_COUNT lines wrapped to WIDTH @@ -443,7 +456,6 @@ else: text = unicode( '\n'.join( lines ), 'utf-8' ) return text - def get_line_count(file_name): """Returns the number of lines in a file that are neither null nor comments""" count = 0 @@ -452,38 +464,3 @@ if line and line[0] != '#': count += 1 return count - -class Newick( Text ): - pass - -class Sff( Binary ): - """ Standard Flowgram Format (SFF) """ - file_ext = "sff" - def __init__( self, **kwd ): - Binary.__init__(self, **kwd) - def init_meta( self, dataset, copy_from=None ): - Binary.init_meta( self, dataset, copy_from=copy_from ) - def sniff( self, filename ): - ''' - The first 4 bytes of any sff file is '.sff' - - >>> fname = get_test_fname( '1.sff' ) - >>> Sff().sniff( fname ) - True - ''' - header = open( filename ).read(4) - if binascii.b2a_hex( header ) == binascii.hexlify( '.sff' ): - return True - return False - def set_peek( self, dataset ): - if not dataset.dataset.purged: - dataset.peek = "Binary sff file" - dataset.blurb = nice_size( dataset.get_size() ) - else: - dataset.peek = 'file does not exist' - dataset.blurb = 'file purged from disk' - def display_peek(self, dataset): - try: - return dataset.peek - except: - return "sff file (%s)" % ( nice_size( dataset.get_size() ) ) diff -r 555afd0bf457 -r 37406d8ad116 lib/galaxy/datatypes/genetics.py --- a/lib/galaxy/datatypes/genetics.py Tue Nov 17 16:14:54 2009 -0500 +++ b/lib/galaxy/datatypes/genetics.py Tue Nov 17 16:16:26 2009 -0500 @@ -1,6 +1,5 @@ """ rgenetics datatypes -Use at your peril Ross Lazarus for the rgenetics and galaxy projects @@ -11,7 +10,6 @@ ross lazarus for rgenetics august 20 2007 """ - import logging, os, sys, time, tempfile, shutil, string, glob import data from galaxy import util @@ -26,8 +24,7 @@ from galaxy.datatypes.interval import Interval from galaxy.util.hash_util import * -gal_Log = logging.getLogger(__name__) -verbose = False +log = logging.getLogger(__name__) class GenomeGraphs(Interval): @@ -48,10 +45,8 @@ """Initialize datatype, by adding GBrowse display app""" Interval.__init__(self, **kwd) self.add_display_app ( 'ucsc', 'display at UCSC', 'as_ucsc_display_file', 'ucsc_links' ) - def as_ucsc_display_file( self, dataset, **kwd ): return open( dataset.file_name ) - def set_meta( self, dataset, overwrite = True, **kwd ): i = 0 for i, line in enumerate( file ( dataset.file_name ) ): @@ -66,7 +61,6 @@ except: pass Interval.set_meta( self, dataset, overwrite = overwrite, skip = i ) - def make_html_table( self, dataset, skipchars=[] ): """Create HTML table, used for displaying peek""" out = ['<table cellspacing="0" cellpadding="3">'] @@ -82,7 +76,6 @@ except Exception, exc: out = "Can't create peek %s" % exc return out - def get_estimated_display_viewport( self, dataset ): """ Return a chrom, start, stop tuple for viewing a file. There are slight differences between gff 2 and gff 3 @@ -118,7 +111,6 @@ return ( seqid, str( start ), str( stop ) ) else: return ( '', '', '' ) - def gbrowse_links( self, dataset, type, app, base_url ): ret_val = [] if dataset.has_data: @@ -132,7 +124,6 @@ link = "%s?start=%s&stop=%s&ref=%s&dbkey=%s" % ( site_url, start, stop, seqid, dataset.dbkey ) ret_val.append( ( site_name, link ) ) return ret_val - def ucsc_links( self, dataset, type, app, base_url ): ret_val = [] if dataset.has_data: @@ -160,10 +151,8 @@ link = '%s?redirect_url=%s&display_url=%s' % ( internal_url, redirect_url, display_url ) ret_val.append( (site_name, link) ) else: - gal_Log.debug('@@@ gg ucsc_links - no viewport_tuple') + log.debug('@@@ gg ucsc_links - no viewport_tuple') return ret_val - - def sniff( self, filename ): """ Determines whether the file is in gff format @@ -202,20 +191,17 @@ except: return False - - class rgTabList(Tabular): - """ for sampleid and for featureid lists of exclusions or inclusions in the clean tool + """ + for sampleid and for featureid lists of exclusions or inclusions in the clean tool featureid subsets on statistical criteria -> specialized display such as gg """ file_ext = "rgTList" - def __init__(self, **kwd): """Initialize featurelistt datatype""" Tabular.__init__( self, **kwd ) self.column_names = [] - def make_html_table( self, dataset, skipchars=[] ): """Create HTML table, used for displaying peek""" out = ['<table cellspacing="0" cellpadding="3">'] @@ -236,23 +222,24 @@ out = "Can't create peek %s" % exc return out - class rgSampleList(rgTabList): - """ for sampleid exclusions or inclusions in the clean tool - output from QC eg excess het, gender error, ibd pair member,eigen outlier,excess mendel errors,... - since they can be uploaded, should be flexible - but they are persistent at least - same infrastructure for expression? + """ + for sampleid exclusions or inclusions in the clean tool + output from QC eg excess het, gender error, ibd pair member,eigen outlier,excess mendel errors,... + since they can be uploaded, should be flexible + but they are persistent at least + same infrastructure for expression? """ file_ext = "rgSList" def __init__(self, **kwd): - """Initialize samplelist datatype""" + """ + Initialize samplelist datatype + """ rgTabList.__init__( self, **kwd ) self.column_names[0] = 'FID' self.column_names[1] = 'IID' # this is what Plink wants as at 2009 - def sniff(self,filename): """ """ @@ -264,10 +251,11 @@ return False class rgFeatureList( rgTabList ): - """ for featureid lists of exclusions or inclusions in the clean tool - output from QC eg low maf, high missingness, bad hwe in controls, excess mendel errors,... - featureid subsets on statistical criteria -> specialized display such as gg - same infrastructure for expression? + """ + for featureid lists of exclusions or inclusions in the clean tool + output from QC eg low maf, high missingness, bad hwe in controls, excess mendel errors,... + featureid subsets on statistical criteria -> specialized display such as gg + same infrastructure for expression? """ file_ext = "rgFList" @@ -276,26 +264,23 @@ rgTabList.__init__( self, **kwd ) for i,s in enumerate(['#FeatureId', 'Chr', 'Genpos', 'Mappos']): self.column_names[i] = s - class Rgenetics(Html): - """class to use for rgenetics""" - - MetadataElement( name="base_name", desc="base name for all transformed versions of this genetic dataset", default="rgenetics", - readonly=True, set_in_upload=True) + """ + class to use for rgenetics + """ + MetadataElement( name="base_name", desc="base name for all transformed versions of this genetic dataset", default="rgenetics", readonly=True, set_in_upload=True) composite_type = 'auto_primary_file' allow_datatype_change = False file_ext = 'rgenetics' - def missing_meta( self, dataset=None, **kwargs): """Checks for empty meta values""" for key, value in dataset.metadata.items(): if not value: return True return False - def generate_primary_file( self, dataset = None ): rval = ['<html><head><title>Rgenetics Galaxy Composite Dataset </title></head><p/>'] rval.append('<div>This composite dataset is composed of the following files:<p/><ul>') @@ -306,9 +291,9 @@ rval.append( '<li><a href="%s" type="application/binary">%s</a>%s' % ( composite_name, composite_name, opt_text ) ) rval.append( '</ul></div></html>' ) return "\n".join( rval ) - def regenerate_primary_file(self,dataset): - """cannot do this until we are setting metadata + """ + cannot do this until we are setting metadata """ def fix(oldpath,newbase): old,e = os.path.splitext(oldpath) @@ -332,30 +317,25 @@ f.write("\n".join( rval )) f.write('\n') f.close() - def set_meta( self, dataset, **kwd ): - - """for lped/pbed eg - + """ + for lped/pbed eg """ if kwd.get('overwrite') == False: - if verbose: - gal_Log.debug('@@@ rgenetics set_meta called with overwrite = False') + #log.debug('@@@ rgenetics set_meta called with overwrite = False') return True try: efp = dataset.extra_files_path except: - if verbose: - gal_Log.debug('@@@rgenetics set_meta failed %s - dataset %s has no efp ?' % (sys.exc_info()[0], dataset.name)) + #log.debug('@@@rgenetics set_meta failed %s - dataset %s has no efp ?' % (sys.exc_info()[0], dataset.name)) return False try: - flist = os.listdir(efp) - except: - if verbose: gal_Log.debug('@@@rgenetics set_meta failed %s - dataset %s has no efp ?' % (sys.exc_info()[0],dataset.name)) + flist = os.listdir(efp) + except: + #log.debug('@@@rgenetics set_meta failed %s - dataset %s has no efp ?' % (sys.exc_info()[0],dataset.name)) return False if len(flist) == 0: - if verbose: - gal_Log.debug('@@@rgenetics set_meta failed - %s efp %s is empty?' % (dataset.name,efp)) + #log.debug('@@@rgenetics set_meta failed - %s efp %s is empty?' % (dataset.name,efp)) return False bn = None for f in flist: @@ -372,9 +352,9 @@ dataset.blurb = 'Composite file - Rgenetics Galaxy toolkit' return True - class SNPMatrix(Rgenetics): - """fake class to distinguish different species of Rgenetics data collections + """ + fake class to distinguish different species of Rgenetics data collections """ file_ext="snpmatrix" @@ -385,9 +365,9 @@ else: dataset.peek = 'file does not exist' dataset.blurb = 'file purged from disk' - def sniff(self,filename): - """ need to check the file header hex code + """ + need to check the file header hex code """ infile = open(dataset.file_name, "b") head = infile.read(16) @@ -397,9 +377,9 @@ else: return True - class Lped(Rgenetics): - """fake class to distinguish different species of Rgenetics data collections + """ + fake class to distinguish different species of Rgenetics data collections """ file_ext="lped" @@ -408,9 +388,9 @@ self.add_composite_file( '%s.ped', description = 'Pedigree File', substitute_name_with_metadata = 'base_name', is_binary = True ) self.add_composite_file( '%s.map', description = 'Map File', substitute_name_with_metadata = 'base_name', is_binary = True ) - class Pphe(Rgenetics): - """fake class to distinguish different species of Rgenetics data collections + """ + fake class to distinguish different species of Rgenetics data collections """ file_ext="pphe" @@ -418,14 +398,15 @@ Rgenetics.__init__(self, **kwd) self.add_composite_file( '%s.pphe', description = 'Plink Phenotype File', substitute_name_with_metadata = 'base_name' ) - class Lmap(Rgenetics): - """fake class to distinguish different species of Rgenetics data collections + """ + fake class to distinguish different species of Rgenetics data collections """ file_ext="lmap" class Fphe(Rgenetics): - """fake class to distinguish different species of Rgenetics data collections + """ + fake class to distinguish different species of Rgenetics data collections """ file_ext="fphe" @@ -434,7 +415,8 @@ self.add_composite_file( '%s.fphe', description = 'FBAT Phenotype File', substitute_name_with_metadata = 'base_name' ) class Phe(Rgenetics): - """fake class to distinguish different species of Rgenetics data collections + """ + fake class to distinguish different species of Rgenetics data collections """ file_ext="phe" @@ -442,10 +424,9 @@ Rgenetics.__init__(self, **kwd) self.add_composite_file( '%s.phe', description = 'Phenotype File', substitute_name_with_metadata = 'base_name' ) - - class Fped(Rgenetics): - """fake class to distinguish different species of Rgenetics data collections + """ + fake class to distinguish different species of Rgenetics data collections """ file_ext="fped" @@ -453,9 +434,9 @@ Rgenetics.__init__(self, **kwd) self.add_composite_file( '%s.fped', description = 'FBAT format pedfile', substitute_name_with_metadata = 'base_name' ) - class Pbed(Rgenetics): - """fake class to distinguish different species of Rgenetics data collections + """ + fake class to distinguish different species of Rgenetics data collections """ file_ext="pbed" @@ -466,7 +447,8 @@ self.add_composite_file( '%s.fam', substitute_name_with_metadata = 'base_name', is_binary = True ) class Eigenstratgeno(Rgenetics): - """fake class to distinguish different species of Rgenetics data collections + """ + fake class to distinguish different species of Rgenetics data collections """ file_ext="eigenstratgeno" @@ -475,11 +457,10 @@ self.add_composite_file( '%s.eigenstratgeno', substitute_name_with_metadata = 'base_name', is_binary = True ) self.add_composite_file( '%s.ind', substitute_name_with_metadata = 'base_name', is_binary = True ) self.add_composite_file( '%s.map', substitute_name_with_metadata = 'base_name', is_binary = True ) - - class Eigenstratpca(Rgenetics): - """fake class to distinguish different species of Rgenetics data collections + """ + fake class to distinguish different species of Rgenetics data collections """ file_ext="eigenstratpca" @@ -487,22 +468,21 @@ Rgenetics.__init__(self, **kwd) self.add_composite_file( '%s.eigenstratpca', description = 'Eigenstrat PCA file', substitute_name_with_metadata = 'base_name' ) - class Snptest(Rgenetics): - """fake class to distinguish different species of Rgenetics data collections + """ + fake class to distinguish different species of Rgenetics data collections """ file_ext="snptest" - class Pheno(Tabular): """ base class for pheno files """ file_ext = 'pheno' - class RexpBase( Html ): - """base class for BioC data structures in Galaxy + """ + base class for BioC data structures in Galaxy must be constructed with the pheno data in place since that goes into the metadata for each instance """ @@ -518,18 +498,16 @@ composite_type = 'auto_primary_file' allow_datatype_change = False - def __init__( self, **kwd ): Html.__init__(self,**kwd) self.add_composite_file( '%s.pheno', description = 'Phenodata tab text file', substitute_name_with_metadata = 'base_name', is_binary=True) - def generate_primary_file( self, dataset = None ): - """ This is called only at upload to write the html file + """ + This is called only at upload to write the html file cannot rename the datasets here - they come with the default unfortunately """ return '<html><head></head><body>AutoGenerated Primary File for Composite Dataset</body></html>' - def get_phecols(self, phenolist=[], maxConc=20): """ sept 2009: cannot use whitespace to split - make a more complex structure here @@ -555,7 +533,7 @@ else: for col,code in enumerate(row): # keep column order correct if col >= totcols: - gal_Log.warning('### get_phecols error in pheno file - row %d col %d (%s) longer than header %s' % (nrows, col, row, head)) + log.warning('### get_phecols error in pheno file - row %d col %d (%s) longer than header %s' % (nrows, col, row, head)) else: concordance[col].setdefault(code,0) # first one is zero concordance[col][code] += 1 @@ -601,10 +579,9 @@ res = [('no usable phenotype columns found',[('?',0),]),] return res - - def get_pheno(self,dataset): - """expects a .pheno file in the extra_files_dir - ugh + """ + expects a .pheno file in the extra_files_dir - ugh note that R is wierd and adds the row.name in the header so the columns are all wrong - unless you tell it not to. A file can be written as @@ -620,11 +597,12 @@ else: p = [] return '\n'.join(p) - def set_peek( self, dataset ): - """expects a .pheno file in the extra_files_dir - ugh + """ + expects a .pheno file in the extra_files_dir - ugh note that R is wierd and does not include the row.name in - the header. why?""" + the header. why? + """ if not dataset.dataset.purged: pp = os.path.join(dataset.extra_files_path,'%s.pheno' % dataset.metadata.base_name) try: @@ -636,17 +614,14 @@ else: dataset.peek = 'file does not exist\n' dataset.blurb = 'file purged from disk' - def get_peek( self, dataset ): - """expects a .pheno file in the extra_files_dir - ugh - """ + """expects a .pheno file in the extra_files_dir - ugh""" pp = os.path.join(dataset.extra_files_path,'%s.pheno' % dataset.metadata.base_name) try: p = file(pp,'r').readlines() except: p = ['##failed to find %s' % pp] return ''.join(p[:5]) - def get_file_peek(self,filename): """ can't really peek at a filename - need the extra_files_path and such? @@ -657,7 +632,6 @@ except: pass return ''.join(h[:5]) - def regenerate_primary_file(self,dataset): """cannot do this until we are setting metadata """ @@ -672,24 +646,19 @@ f.write("\n".join( rval )) f.write('\n') f.close() - - """Add metadata elements""" def init_meta( self, dataset, copy_from=None ): + """Add metadata elements""" if copy_from: dataset.metadata = copy_from.metadata - def set_meta( self, dataset, **kwd ): - """ NOTE we apply the tabular machinary to the phenodata extracted from a BioC eSet or affybatch. - """ try: flist = os.listdir(dataset.extra_files_path) except: - if verbose: - gal_Log.debug('@@@rexpression set_meta failed - no dataset?') + #log.debug('@@@rexpression set_meta failed - no dataset?') return False bn = None for f in flist: @@ -727,7 +696,6 @@ if not dataset.blurb: dataset.blurb = 'R loadable BioC expression object for the Rexpression Galaxy toolkit' return True - def make_html_table( self, pp='nothing supplied from peek\n'): """Create HTML table, used for displaying peek""" out = ['<table cellspacing="0" cellpadding="3">',] @@ -750,20 +718,16 @@ except Exception, exc: out = "Can't create html table %s" % str( exc ) return out - def display_peek( self, dataset ): """Returns formatted html of peek""" out=self.make_html_table(dataset.peek) return out - def get_mime(self): """Returns the mime type of the datatype""" return 'text/html' - class Affybatch( RexpBase ): """derived class for BioC data structures in Galaxy """ - file_ext = "affybatch" def __init__( self, **kwd ): @@ -780,7 +744,6 @@ self.add_composite_file( '%s.eset', description = 'ESet R object saved to file', substitute_name_with_metadata = 'base_name', is_binary = True ) - class MAlist( RexpBase ): """derived class for BioC data structures in Galaxy """ file_ext = "malist" @@ -790,9 +753,6 @@ self.add_composite_file( '%s.malist', description = 'MAlist R object saved to file', substitute_name_with_metadata = 'base_name', is_binary = True ) - if __name__ == '__main__': import doctest, sys doctest.testmod(sys.modules[__name__]) - - diff -r 555afd0bf457 -r 37406d8ad116 lib/galaxy/datatypes/images.py --- a/lib/galaxy/datatypes/images.py Tue Nov 17 16:14:54 2009 -0500 +++ b/lib/galaxy/datatypes/images.py Tue Nov 17 16:16:26 2009 -0500 @@ -13,82 +13,6 @@ log = logging.getLogger(__name__) -class Ab1( data.Data ): - """Class describing an ab1 binary sequence file""" - file_ext = "ab1" - def set_peek( self, dataset ): - if not dataset.dataset.purged: - export_url = "/history_add_to?" + urlencode({'history_id':dataset.history_id,'ext':'ab1','name':'ab1 sequence','info':'Sequence file','dbkey':dataset.dbkey}) - dataset.peek = "Binary ab1 sequence file" - dataset.blurb = data.nice_size( dataset.get_size() ) - else: - dataset.peek = 'file does not exist' - dataset.blurb = 'file purged from disk' - def display_peek(self, dataset): - try: - return dataset.peek - except: - return "Binary ab1 sequence file (%s)" % ( data.nice_size( dataset.get_size() ) ) - -class Scf( data.Data ): - """Class describing an scf binary sequence file""" - file_ext = "scf" - def set_peek( self, dataset ): - if not dataset.dataset.purged: - export_url = "/history_add_to?" + urlencode({'history_id':dataset.history_id,'ext':'scf','name':'scf sequence','info':'Sequence file','dbkey':dataset.dbkey}) - dataset.peek = "Binary scf sequence file" - dataset.blurb = data.nice_size( dataset.get_size() ) - else: - dataset.peek = 'file does not exist' - dataset.blurb = 'file purged from disk' - def display_peek(self, dataset): - try: - return dataset.peek - except: - return "Binary scf sequence file (%s)" % ( data.nice_size( dataset.get_size() ) ) - -class Binseq( data.Data ): - """Class describing a zip archive of binary sequence files""" - file_ext = "binseq.zip" - def set_peek( self, dataset ): - if not dataset.dataset.purged: - zip_file = zipfile.ZipFile( dataset.file_name, "r" ) - num_files = len( zip_file.namelist() ) - dataset.peek = "Archive of %s binary sequence files" % ( str( num_files ) ) - dataset.blurb = data.nice_size( dataset.get_size() ) - else: - dataset.peek = 'file does not exist' - dataset.blurb = 'file purged from disk' - def display_peek(self, dataset): - try: - return dataset.peek - except: - return "Binary sequence file archive (%s)" % ( data.nice_size( dataset.get_size() ) ) - def get_mime(self): - """Returns the mime type of the datatype""" - return 'application/zip' - -class Txtseq( data.Data ): - """Class describing a zip archive of text sequence files""" - file_ext = "txtseq.zip" - def set_peek( self, dataset ): - if not dataset.dataset.purged: - zip_file = zipfile.ZipFile( dataset.file_name, "r" ) - num_files = len( zip_file.namelist() ) - dataset.peek = "Archive of %s text sequence files" % ( str( num_files ) ) - dataset.blurb = data.nice_size( dataset.get_size() ) - else: - dataset.peek = 'file does not exist' - dataset.blurb = 'file purged from disk' - def display_peek(self, dataset): - try: - return dataset.peek - except: - return "Text sequence file archive (%s)" % ( data.nice_size( dataset.get_size() ) ) - def get_mime(self): - """Returns the mime type of the datatype""" - return 'application/zip' - class Image( data.Data ): """Class describing an image""" def set_peek( self, dataset ): @@ -236,47 +160,3 @@ return dataset.peek except: return "peek unavailable" - -class Bam( data.Binary ): - """Class describing a BAM binary file""" - file_ext = "bam" - MetadataElement( name="bam_index", desc="BAM Index File", param=metadata.FileParameter, readonly=True, no_value=None, visible=False, optional=True ) - def init_meta( self, dataset, copy_from=None ): - data.Binary.init_meta( self, dataset, copy_from=copy_from ) - def set_meta( self, dataset, overwrite = True, **kwd ): - """ - Sets index for BAM file. - """ - index_file = dataset.metadata.bam_index - if not index_file: - index_file = dataset.metadata.spec['bam_index'].param.new_file( dataset = dataset ) - tmp_dir = tempfile.gettempdir() - tmpf1 = tempfile.NamedTemporaryFile(dir=tmp_dir) - tmpf1bai = '%s.bai' % tmpf1.name - try: - os.system('cd %s' % tmp_dir) - os.system('cp %s %s' % (dataset.file_name, tmpf1.name)) - os.system('samtools index %s' % tmpf1.name) - os.system('cp %s %s' % (tmpf1bai, index_file.file_name)) - except Exception, ex: - sys.stderr.write('There was a problem creating the index for the BAM file\n%s\n' + str(ex)) - tmpf1.close() - if os.path.exists(tmpf1bai): - os.remove(tmpf1bai) - dataset.metadata.bam_index = index_file - def set_peek( self, dataset ): - if not dataset.dataset.purged: - export_url = "/history_add_to?" + urlencode({'history_id':dataset.history_id,'ext':'bam','name':'bam alignments','info':'Alignments file','dbkey':dataset.dbkey}) - dataset.peek = "Binary bam alignments file" - dataset.blurb = data.nice_size( dataset.get_size() ) - else: - dataset.peek = 'file does not exist' - dataset.blurb = 'file purged from disk' - def display_peek(self, dataset): - try: - return dataset.peek - except: - return "Binary bam alignments file (%s)" % ( data.nice_size( dataset.get_size() ) ) - def get_mime(self): - """Returns the mime type of the datatype""" - return 'application/octet-stream' diff -r 555afd0bf457 -r 37406d8ad116 lib/galaxy/datatypes/metadata.py --- a/lib/galaxy/datatypes/metadata.py Tue Nov 17 16:14:54 2009 -0500 +++ b/lib/galaxy/datatypes/metadata.py Tue Nov 17 16:16:26 2009 -0500 @@ -509,7 +509,7 @@ # need to make different keys for them, since ids can overlap return "%s_%d" % ( dataset.__class__.__name__, dataset.id ) def setup_external_metadata( self, datasets, sa_session, exec_dir=None, tmp_dir=None, dataset_files_path=None, - output_fnames=None, config_root=None, datatypes_config=None, kwds={} ): + output_fnames=None, config_root=None, datatypes_config=None, job_metadata=None, kwds={} ): #fill in metadata_files_dict and return the command with args required to set metadata def __metadata_files_list_to_cmd_line( metadata_files ): def __get_filename_override(): @@ -564,7 +564,7 @@ sa_session.flush() metadata_files_list.append( metadata_files ) #return command required to build - return "%s %s %s %s %s %s" % ( os.path.join( exec_dir, 'set_metadata.sh' ), dataset_files_path, tmp_dir, config_root, datatypes_config, " ".join( map( __metadata_files_list_to_cmd_line, metadata_files_list ) ) ) + return "%s %s %s %s %s %s %s" % ( os.path.join( exec_dir, 'set_metadata.sh' ), dataset_files_path, tmp_dir, config_root, datatypes_config, job_metadata, " ".join( map( __metadata_files_list_to_cmd_line, metadata_files_list ) ) ) def external_metadata_set_successfully( self, dataset, sa_session ): metadata_files = self.get_output_filenames_by_dataset( dataset, sa_session ) diff -r 555afd0bf457 -r 37406d8ad116 lib/galaxy/datatypes/registry.py --- a/lib/galaxy/datatypes/registry.py Tue Nov 17 16:14:54 2009 -0500 +++ b/lib/galaxy/datatypes/registry.py Tue Nov 17 16:16:26 2009 -0500 @@ -3,7 +3,7 @@ """ import os, tempfile import logging -import data, tabular, interval, images, sequence, qualityscore, genetics, xml, coverage, tracks, chrominfo +import data, tabular, interval, images, sequence, qualityscore, genetics, xml, coverage, tracks, chrominfo, binary import galaxy.util from galaxy.util.odict import odict @@ -109,11 +109,11 @@ #default values if len(self.datatypes_by_extension) < 1: self.datatypes_by_extension = { - 'ab1' : images.Ab1(), + 'ab1' : binary.Ab1(), 'axt' : sequence.Axt(), - 'bam' : images.Bam(), + 'bam' : binary.Bam(), 'bed' : interval.Bed(), - 'binseq.zip' : images.Binseq(), + 'binseq.zip' : binary.Binseq(), 'blastxml' : xml.BlastXml(), 'coverage' : coverage.LastzCoverage(), 'customtrack' : interval.CustomTrack(), @@ -132,12 +132,12 @@ 'qualsolexa' : qualityscore.QualityScoreSolexa(), 'qual454' : qualityscore.QualityScore454(), 'sam' : tabular.Sam(), - 'scf' : images.Scf(), - 'sff' : data.Sff(), + 'scf' : binary.Scf(), + 'sff' : binary.Sff(), 'tabular' : tabular.Tabular(), 'taxonomy' : tabular.Taxonomy(), 'txt' : data.Text(), - 'txtseq.zip' : images.Txtseq(), + 'txtseq.zip' : data.Txtseq(), 'wig' : interval.Wiggle() } self.mimetypes_by_extension = { @@ -174,7 +174,7 @@ # because some formats are much more flexibly defined than others. if len(self.sniff_order) < 1: self.sniff_order = [ - data.Sff(), + binary.Sff(), xml.BlastXml(), sequence.Maf(), sequence.Lav(), diff -r 555afd0bf457 -r 37406d8ad116 lib/galaxy/datatypes/tracks.py --- a/lib/galaxy/datatypes/tracks.py Tue Nov 17 16:14:54 2009 -0500 +++ b/lib/galaxy/datatypes/tracks.py Tue Nov 17 16:16:26 2009 -0500 @@ -3,10 +3,7 @@ """ import data -import logging -import re -import binascii -from cgi import escape +import tabular, binascii, logging from galaxy.datatypes.metadata import MetadataElement from galaxy.datatypes import metadata import galaxy.model @@ -17,7 +14,7 @@ log = logging.getLogger(__name__) -class GeneTrack( data.Binary ): +class GeneTrack( tabular.Tabular ): file_ext = "genetrack" MetadataElement( name="genetrack", default="data.genetrack", desc="HDF index", readonly=True, visible=True, no_value=0 ) @@ -27,7 +24,7 @@ super( GeneTrack, self ).__init__( **kwargs ) self.add_display_app( 'genetrack', 'View in', '', 'genetrack_link' ) def get_display_links( self, dataset, type, app, base_url, target_frame='galaxy_main', **kwd ): - return data.Binary.get_display_links( self, dataset, type, app, base_url, target_frame=target_frame, **kwd ) + return data.Data.get_display_links( self, dataset, type, app, base_url, target_frame=target_frame, **kwd ) def genetrack_link( self, hda, type, app, base_url ): ret_val = [] if hda.has_data: diff -r 555afd0bf457 -r 37406d8ad116 lib/galaxy/jobs/__init__.py --- a/lib/galaxy/jobs/__init__.py Tue Nov 17 16:14:54 2009 -0500 +++ b/lib/galaxy/jobs/__init__.py Tue Nov 17 16:16:26 2009 -0500 @@ -139,10 +139,15 @@ JobWrapper( job, None, self ).fail( 'This tool was disabled before the job completed. Please contact your Galaxy administrator, or' ) elif job.job_runner_name is None: log.debug( "no runner: %s is still in queued state, adding to the jobs queue" %job.id ) - self.queue.put( ( job.id, job.tool_id ) ) + if self.track_jobs_in_database: + job.state = model.Job.states.NEW + else: + self.queue.put( ( job.id, job.tool_id ) ) else: job_wrapper = JobWrapper( job, self.app.toolbox.tools_by_id[ job.tool_id ], self ) self.dispatcher.recover( job, job_wrapper ) + if self.sa_session.dirty: + self.sa_session.flush() def __monitor( self ): """ @@ -526,6 +531,7 @@ # If the tool was expected to set the extension, attempt to retrieve it if dataset.ext == 'auto': dataset.extension = context.get( 'ext', 'data' ) + dataset.init_meta( copy_from=dataset ) #if a dataset was copied, it won't appear in our dictionary: #either use the metadata from originating output dataset, or call set_meta on the copies #it would be quicker to just copy the metadata from the originating output dataset, @@ -715,14 +721,15 @@ for outfile in [ str( o ) for o in output_paths ]: sizes.append( ( outfile, os.stat( outfile ).st_size ) ) return sizes - def setup_external_metadata( self, exec_dir = None, tmp_dir = None, dataset_files_path = None, config_root = None, datatypes_config = None, **kwds ): + def setup_external_metadata( self, exec_dir = None, tmp_dir = None, dataset_files_path = None, config_root = None, datatypes_config = None, set_extension = True, **kwds ): # extension could still be 'auto' if this is the upload tool. job = self.sa_session.query( model.Job ).get( self.job_id ) - for output_dataset_assoc in job.output_datasets: - if output_dataset_assoc.dataset.ext == 'auto': - context = self.get_dataset_finish_context( dict(), output_dataset_assoc.dataset.dataset ) - output_dataset_assoc.dataset.extension = context.get( 'ext', 'data' ) - self.sa_session.flush() + if set_extension: + for output_dataset_assoc in job.output_datasets: + if output_dataset_assoc.dataset.ext == 'auto': + context = self.get_dataset_finish_context( dict(), output_dataset_assoc.dataset.dataset ) + output_dataset_assoc.dataset.extension = context.get( 'ext', 'data' ) + self.sa_session.flush() if tmp_dir is None: #this dir should should relative to the exec_dir tmp_dir = self.app.config.new_file_path @@ -739,6 +746,7 @@ dataset_files_path = dataset_files_path, config_root = config_root, datatypes_config = datatypes_config, + job_metadata = os.path.join( self.working_directory, TOOL_PROVIDED_JOB_METADATA_FILE ), **kwds ) class DefaultJobDispatcher( object ): diff -r 555afd0bf457 -r 37406d8ad116 lib/galaxy/jobs/runners/local.py --- a/lib/galaxy/jobs/runners/local.py Tue Nov 17 16:14:54 2009 -0500 +++ b/lib/galaxy/jobs/runners/local.py Tue Nov 17 16:16:26 2009 -0500 @@ -106,7 +106,9 @@ #this is terminatable when output dataset/job is deleted #so that long running set_meta()s can be cancelled without having to reboot the server if job_wrapper.get_state() not in [ model.Job.states.ERROR, model.Job.states.DELETED ] and self.app.config.set_metadata_externally and job_wrapper.output_paths: - external_metadata_script = job_wrapper.setup_external_metadata( output_fnames = job_wrapper.get_output_fnames(), kwds = { 'overwrite' : False } ) #we don't want to overwrite metadata that was copied over in init_meta(), as per established behavior + external_metadata_script = job_wrapper.setup_external_metadata( output_fnames = job_wrapper.get_output_fnames(), + set_extension = True, + kwds = { 'overwrite' : False } ) #we don't want to overwrite metadata that was copied over in init_meta(), as per established behavior log.debug( 'executing external set_meta script for job %d: %s' % ( job_wrapper.job_id, external_metadata_script ) ) external_metadata_proc = subprocess.Popen( args = external_metadata_script, shell = True, diff -r 555afd0bf457 -r 37406d8ad116 lib/galaxy/jobs/runners/pbs.py --- a/lib/galaxy/jobs/runners/pbs.py Tue Nov 17 16:14:54 2009 -0500 +++ b/lib/galaxy/jobs/runners/pbs.py Tue Nov 17 16:16:26 2009 -0500 @@ -29,7 +29,6 @@ fi cd %s %s -%s """ pbs_symlink_template = """#!/bin/sh @@ -178,7 +177,9 @@ pbs_queue_name = self.determine_pbs_queue( runner_url ) c = pbs.pbs_connect( pbs_server_name ) if c <= 0: - raise Exception( "Connection to PBS server for submit failed" ) + job_wrapper.fail( "Unable to queue job for execution. Resubmitting the job may succeed." ) + log.error( "Connection to PBS server for submit failed" ) + return # define job attributes ofile = "%s/%s.o" % (self.app.config.cluster_files_directory, job_wrapper.job_id) @@ -221,11 +222,15 @@ if self.app.config.pbs_stage_path != '': script = pbs_symlink_template % (job_wrapper.galaxy_lib_dir, " ".join(job_wrapper.get_input_fnames() + output_files), self.app.config.pbs_stage_path, exec_dir, command_line) else: + script = pbs_template % ( job_wrapper.galaxy_lib_dir, exec_dir, command_line ) if self.app.config.set_metadata_externally: - external_metadata_script = job_wrapper.setup_external_metadata( exec_dir = os.path.abspath( os.getcwd() ), tmp_dir = self.app.config.new_file_path, dataset_files_path = self.app.model.Dataset.file_path, output_fnames = output_fnames, kwds = { 'overwrite' : False } ) #we don't want to overwrite metadata that was copied over in init_meta(), as per established behavior - else: - external_metadata_script = "" - script = pbs_template % ( job_wrapper.galaxy_lib_dir, exec_dir, command_line, external_metadata_script ) + script += "cd %s\n" % os.path.abspath( os.getcwd() ) + script += "%s\n" % job_wrapper.setup_external_metadata( exec_dir = os.path.abspath( os.getcwd() ), + tmp_dir = self.app.config.new_file_path, + dataset_files_path = self.app.model.Dataset.file_path, + output_fnames = output_fnames, + set_extension = False, + kwds = { 'overwrite' : False } ) #we don't want to overwrite metadata that was copied over in init_meta(), as per established behavior job_file = "%s/%s.sh" % (self.app.config.cluster_files_directory, job_wrapper.job_id) fh = file(job_file, "w") fh.write(script) diff -r 555afd0bf457 -r 37406d8ad116 lib/galaxy/tools/actions/metadata.py --- a/lib/galaxy/tools/actions/metadata.py Tue Nov 17 16:14:54 2009 -0500 +++ b/lib/galaxy/tools/actions/metadata.py Tue Nov 17 16:16:26 2009 -0500 @@ -41,6 +41,7 @@ output_fnames = None, config_root = None, datatypes_config = None, + job_metadata = None, kwds = { 'overwrite' : True } ) incoming[ '__SET_EXTERNAL_METADATA_COMMAND_LINE__' ] = cmd_line for name, value in tool.params_to_strings( incoming, trans.app ).iteritems(): diff -r 555afd0bf457 -r 37406d8ad116 lib/galaxy/web/controllers/dataset.py --- a/lib/galaxy/web/controllers/dataset.py Tue Nov 17 16:14:54 2009 -0500 +++ b/lib/galaxy/web/controllers/dataset.py Tue Nov 17 16:16:26 2009 -0500 @@ -20,25 +20,38 @@ This error report was sent from the Galaxy instance hosted on the server "${host}" ----------------------------------------------------------------------------- -This is in reference to output dataset ${dataset_id}. +This is in reference to dataset id ${dataset_id} from history id ${history_id} +----------------------------------------------------------------------------- +You should be able to view the history containing the related history item + +${hid}: ${history_item_name} + +by logging in as a Galaxy admin user to the Galaxy instance referenced above +and pointing your browser to the following link. + +${history_view_link} ----------------------------------------------------------------------------- The user '${email}' provided the following information: + ${message} ----------------------------------------------------------------------------- job id: ${job_id} -tool id: ${tool_id} +tool id: ${job_tool_id} +----------------------------------------------------------------------------- +job command line: +${job_command_line} ----------------------------------------------------------------------------- job stderr: -${stderr} +${job_stderr} ----------------------------------------------------------------------------- job stdout: -${stdout} +${job_stdout} ----------------------------------------------------------------------------- job info: -${info} +${job_info} ----------------------------------------------------------------------------- job traceback: -${traceback} +${job_traceback} ----------------------------------------------------------------------------- (This is an automated message). """ @@ -103,41 +116,45 @@ @web.expose def errors( self, trans, id ): - dataset = trans.sa_session.query( model.HistoryDatasetAssociation ).get( id ) - return trans.fill_template( "dataset/errors.mako", dataset=dataset ) - + hda = trans.sa_session.query( model.HistoryDatasetAssociation ).get( id ) + return trans.fill_template( "dataset/errors.mako", hda=hda ) @web.expose def stderr( self, trans, id ): dataset = trans.sa_session.query( model.HistoryDatasetAssociation ).get( id ) job = dataset.creating_job_associations[0].job trans.response.set_content_type( 'text/plain' ) return job.stderr - @web.expose def report_error( self, trans, id, email='', message="" ): smtp_server = trans.app.config.smtp_server if smtp_server is None: - return trans.show_error_message( "Sorry, mail is not configured for this galaxy instance" ) + return trans.show_error_message( "Mail is not configured for this galaxy instance" ) to_address = trans.app.config.error_email_to if to_address is None: - return trans.show_error_message( "Sorry, error reporting has been disabled for this galaxy instance" ) + return trans.show_error_message( "Error reporting has been disabled for this galaxy instance" ) # Get the dataset and associated job - dataset = trans.sa_session.query( model.HistoryDatasetAssociation ).get( id ) - job = dataset.creating_job_associations[0].job + hda = trans.sa_session.query( model.HistoryDatasetAssociation ).get( id ) + job = hda.creating_job_associations[0].job # Get the name of the server hosting the Galaxy instance from which this report originated host = trans.request.host + history_view_link = "%s/history/view?id=%s" % ( str( host ), trans.security.encode_id( hda.history_id ) ) # Build the email message msg = MIMEText( string.Template( error_report_template ) .safe_substitute( host=host, - dataset_id=dataset.id, + dataset_id=hda.dataset_id, + history_id=hda.history_id, + hid=hda.hid, + history_item_name=hda.get_display_name(), + history_view_link=history_view_link, + job_id=job.id, + job_tool_id=job.tool_id, + job_command_line=job.command_line, + job_stderr=job.stderr, + job_stdout=job.stdout, + job_info=job.info, + job_traceback=job.traceback, email=email, - message=message, - job_id=job.id, - tool_id=job.tool_id, - stderr=job.stderr, - stdout=job.stdout, - traceback=job.traceback, - info=job.info ) ) + message=message ) ) frm = to_address # Check email a bit email = email.strip() diff -r 555afd0bf457 -r 37406d8ad116 lib/galaxy/web/controllers/forms.py --- a/lib/galaxy/web/controllers/forms.py Tue Nov 17 16:14:54 2009 -0500 +++ b/lib/galaxy/web/controllers/forms.py Tue Nov 17 16:16:26 2009 -0500 @@ -8,9 +8,71 @@ from elementtree.ElementTree import XML, Element from galaxy.util.odict import odict import copy +from galaxy.web.framework.helpers import time_ago, iff, grids log = logging.getLogger( __name__ ) +class FormsGrid( grids.Grid ): + # Custom column types + class NameColumn( grids.TextColumn ): + def get_value(self, trans, grid, form): + return form.latest_form.name + class DescriptionColumn( grids.TextColumn ): + def get_value(self, trans, grid, form): + return form.latest_form.desc + class TypeColumn( grids.TextColumn ): + def get_value(self, trans, grid, form): + return form.latest_form.type + class DeletedColumn( grids.GridColumn ): + def get_accepted_filters( self ): + """ Returns a list of accepted filters for this column. """ + accepted_filter_labels_and_vals = { "active" : "False", "deleted" : "True", "all": "All" } + accepted_filters = [] + for label, val in accepted_filter_labels_and_vals.items(): + args = { self.key: val } + accepted_filters.append( grids.GridColumnFilter( label, args) ) + return accepted_filters + # Grid definition + title = "Forms" + template = "admin/forms/grid.mako" + model_class = model.FormDefinitionCurrent + default_sort_key = "-create_time" + num_rows_per_page = 50 + preserve_state = True + use_paging = True + default_filter = dict( deleted="False" ) + columns = [ + NameColumn( "Name", + key="name", + model_class=model.FormDefinition, + link=( lambda item: iff( item.deleted, None, dict( operation="view", id=item.id ) ) ), + attach_popup=True, + filterable="advanced" ), + DescriptionColumn( "Description", + key='desc', + model_class=model.FormDefinition, + filterable="advanced" ), + TypeColumn( "Type" ), + DeletedColumn( "Deleted", + key="deleted", + visible=False, + filterable="advanced" ) + ] + columns.append( grids.MulticolFilterColumn( "Search", + cols_to_filter=[ columns[0], columns[1] ], + key="free-text-search", + visible=False, + filterable="standard" ) ) + operations = [ + grids.GridOperation( "Edit", allow_multiple=False, condition=( lambda item: not item.deleted ) ), + grids.GridOperation( "Delete", allow_multiple=True, condition=( lambda item: not item.deleted ) ), + grids.GridOperation( "Undelete", condition=( lambda item: item.deleted ) ), + ] + global_actions = [ + grids.GridAction( "Create new form", dict( controller='forms', + action='new' ) ) + ] + class Forms( BaseController ): # Empty form field empty_field = { 'label': '', @@ -20,38 +82,38 @@ 'type': BaseField.form_field_types()[0], 'selectlist': [], 'layout': 'none' } + forms_grid = FormsGrid() + @web.expose @web.require_admin - def index( self, trans, **kwd ): - params = util.Params( kwd ) - msg = util.restore_text( params.get( 'msg', '' ) ) - messagetype = params.get( 'messagetype', 'done' ) - return trans.fill_template( "/sample/index.mako", - default_action=params.get( 'default_action', None ), - msg=msg, - messagetype=messagetype ) - @web.expose - @web.require_admin - def manage( self, trans, **kwd ): - params = util.Params( kwd ) - msg = util.restore_text( params.get( 'msg', '' ) ) - messagetype = params.get( 'messagetype', 'done' ) - show_filter = params.get( 'show_filter', 'Active' ) - return self._show_forms_list(trans, msg, messagetype, show_filter) - def _show_forms_list(self, trans, msg, messagetype, show_filter='Active'): - all_forms = trans.sa_session.query( trans.app.model.FormDefinitionCurrent ) - if show_filter == 'All': - forms_list = all_forms - elif show_filter == 'Deleted': - forms_list = [form for form in all_forms if form.deleted] - else: - forms_list = [form for form in all_forms if not form.deleted] - return trans.fill_template( '/admin/forms/manage_forms.mako', - fdc_list=forms_list, - all_forms=all_forms, - show_filter=show_filter, - msg=msg, - messagetype=messagetype ) + def manage( self, trans, **kwd ): + if 'operation' in kwd: + operation = kwd['operation'].lower() + if not kwd.get( 'id', None ): + return trans.response.send_redirect( web.url_for( controller='forms', + action='manage', + status='error', + message="Invalid form ID") ) + if operation == "view": + return self.__view( trans, **kwd ) + elif operation == "delete": + return self.__delete( trans, **kwd ) + elif operation == "undelete": + return self.__undelete( trans, **kwd ) + elif operation == "edit": + return self.__edit( trans, **kwd ) + return self.forms_grid( trans, **kwd ) + def __view(self, trans, **kwd): + try: + fdc = trans.sa_session.query( trans.app.model.FormDefinitionCurrent )\ + .get( trans.security.decode_id(kwd['id']) ) + except: + return trans.response.send_redirect( web.url_for( controller='forms', + action='manage', + msg='Invalid form', + messagetype='error' ) ) + return trans.fill_template( '/admin/forms/show_form_read_only.mako', + form=fdc.latest_form ) def __form_types_widget(self, trans, selected='none'): form_type_selectbox = SelectField( 'form_type_selectbox', refresh_on_change=True, @@ -86,13 +148,14 @@ self.__get_saved_form( fd ) if self.__imported_from_file: return trans.response.send_redirect( web.url_for( controller='forms', - action='edit', - show_form=True, - form_id=fd.id) ) + action='manage', + operation='edit', + id=trans.security.encode_id(fd.current.id)) ) else: return trans.response.send_redirect( web.url_for( controller='forms', - action='edit', - form_id=fd.id, + action='manage', + operation='edit', + id=trans.security.encode_id(fd.current.id), add_field_button='Add field', name=fd.name, description=fd.desc, @@ -105,35 +168,43 @@ inputs=inputs, msg=msg, messagetype=messagetype ) - @web.expose - @web.require_admin - def delete( self, trans, **kwd ): - params = util.Params( kwd ) - msg = util.restore_text( params.get( 'msg', '' ) ) - messagetype = params.get( 'messagetype', 'done' ) - fd = trans.sa_session.query( trans.app.model.FormDefinition ).get( int( util.restore_text( params.form_id ) ) ) - fd.form_definition_current.deleted = True - trans.sa_session.add( fd.form_definition_current ) - trans.sa_session.flush() - return self._show_forms_list(trans, - msg='The form definition named %s is deleted.' % fd.name, - messagetype='done') - @web.expose - @web.require_admin - def undelete( self, trans, **kwd ): - params = util.Params( kwd ) - msg = util.restore_text( params.get( 'msg', '' ) ) - messagetype = params.get( 'messagetype', 'done' ) - fd = trans.sa_session.query( trans.app.model.FormDefinition ).get( int( util.restore_text( params.form_id ) ) ) - fd.form_definition_current.deleted = False - trans.sa_session.add( fd.form_definition_current ) - trans.sa_session.flush() - return self._show_forms_list(trans, - msg='The form definition named %s is undeleted.' % fd.name, - messagetype='done') - @web.expose - @web.require_admin - def edit( self, trans, **kwd ): + def __delete( self, trans, **kwd ): + id_list = util.listify( kwd['id'] ) + delete_failed = [] + for id in id_list: + try: + fdc = trans.sa_session.query( trans.app.model.FormDefinitionCurrent ).get( trans.security.decode_id(id) ) + except: + return trans.response.send_redirect( web.url_for( controller='forms', + action='manage', + message='Invalid form', + status='error' ) ) + fdc.deleted = True + trans.sa_session.add( fdc ) + trans.sa_session.flush() + return trans.response.send_redirect( web.url_for( controller='forms', + action='manage', + message='%i form(s) is deleted.' % len(id_list), + status='done') ) + def __undelete( self, trans, **kwd ): + id_list = util.listify( kwd['id'] ) + delete_failed = [] + for id in id_list: + try: + fdc = trans.sa_session.query( trans.app.model.FormDefinitionCurrent ).get( trans.security.decode_id(id) ) + except: + return trans.response.send_redirect( web.url_for( controller='forms', + action='manage', + message='Invalid form', + status='error' ) ) + fdc.deleted = False + trans.sa_session.add( fdc ) + trans.sa_session.flush() + return trans.response.send_redirect( web.url_for( controller='forms', + action='manage', + message='%i form(s) is undeleted.' % len(id_list), + status='done') ) + def __edit( self, trans, **kwd ): ''' This callback method is for handling all the editing functions like renaming fields, adding/deleting fields, changing fields attributes. @@ -142,17 +213,28 @@ msg = util.restore_text( params.get( 'msg', '' ) ) messagetype = params.get( 'messagetype', 'done' ) try: - fd = trans.sa_session.query( trans.app.model.FormDefinition ).get( int( params.get( 'form_id', None ) ) ) + fdc = trans.sa_session.query( trans.app.model.FormDefinitionCurrent ).get( trans.security.decode_id(kwd['id']) ) except: return trans.response.send_redirect( web.url_for( controller='forms', action='manage', - msg='Invalid form', - messagetype='error' ) ) + message='Invalid form', + status='error' ) ) + fd = fdc.latest_form # - # Show the form for editing + # Save changes # - if params.get( 'show_form', False ): + if params.get( 'save_changes_button', False ): + fd_new, msg = self.__save_form( trans, fdc_id=fd.form_definition_current.id, **kwd ) + # if validation error encountered while saving the form, show the + # unsaved form, with the error message + if not fd_new: + current_form = self.__get_form( trans, **kwd ) + return self.__show( trans=trans, form=fd, current_form=current_form, + msg=msg, messagetype='error', **kwd ) + # everything went fine. form saved successfully. Show the saved form + fd = fd_new current_form = self.__get_saved_form( fd ) + msg = "The form '%s' has been updated with the changes." % fd.name return self.__show( trans=trans, form=fd, current_form=current_form, msg=msg, messagetype=messagetype, **kwd ) # @@ -193,31 +275,6 @@ return self.__show( trans=trans, form=fd, current_form=current_form, msg=msg, messagetype=messagetype, **kwd ) # - # Save changes - # - elif params.get( 'save_changes_button', False ): - fd_new, msg = self.__save_form( trans, fdc_id=fd.form_definition_current.id, **kwd ) - # if validation error encountered while saving the form, show the - # unsaved form, with the error message - if not fd_new: - current_form = self.__get_form( trans, **kwd ) - return self.__show( trans=trans, form=fd, current_form=current_form, - msg=msg, messagetype='error', **kwd ) - # everything went fine. form saved successfully. Show the saved form - fd = fd_new - current_form = self.__get_saved_form( fd ) - msg = "The form '%s' has been updated with the changes." % fd.name - return self.__show( trans=trans, form=fd, current_form=current_form, - msg=msg, messagetype=messagetype, **kwd ) - # - # Show form read-only - # - elif params.get( 'read_only', False ): - return trans.fill_template( '/admin/forms/show_form_read_only.mako', - form=fd, - msg=msg, - messagetype=messagetype ) - # # Add SelectField option # elif 'Add' in kwd.values(): @@ -234,6 +291,13 @@ current_form = self.__get_form( trans, **kwd ) return self.__show( trans=trans, form=fd, current_form=current_form, msg=msg, messagetype=messagetype, **kwd ) + # + # Show the form for editing + # + else: + current_form = self.__get_saved_form( fd ) + return self.__show( trans=trans, form=fd, current_form=current_form, + msg=msg, messagetype=messagetype, **kwd ) def __add_selectbox_option( self, trans, fd, msg, messagetype, **kwd ): ''' diff -r 555afd0bf457 -r 37406d8ad116 lib/galaxy/web/controllers/history.py --- a/lib/galaxy/web/controllers/history.py Tue Nov 17 16:14:54 2009 -0500 +++ b/lib/galaxy/web/controllers/history.py Tue Nov 17 16:16:26 2009 -0500 @@ -87,7 +87,7 @@ # Grid definition title = "Saved Histories" model_class = model.History - template='/history/grid.mako' + template='/grid_base.mako' default_sort_key = "-create_time" columns = [ NameColumn( "Name", key="name", model_class=model.History, @@ -110,14 +110,14 @@ ) operations = [ - grids.GridOperation( "Switch", allow_multiple=False, condition=( lambda item: not item.deleted ) ), - grids.GridOperation( "Share", condition=( lambda item: not item.deleted ) ), - grids.GridOperation( "Unshare", condition=( lambda item: not item.deleted ) ), - grids.GridOperation( "Rename", condition=( lambda item: not item.deleted ) ), - grids.GridOperation( "Delete", condition=( lambda item: not item.deleted ) ), - grids.GridOperation( "Undelete", condition=( lambda item: item.deleted ) ), - grids.GridOperation( "Enable import via link", condition=( lambda item: item.deleted ) ), - grids.GridOperation( "Disable import via link", condition=( lambda item: item.deleted ) ) + grids.GridOperation( "Switch", allow_multiple=False, condition=( lambda item: not item.deleted ), async_compatible=True ), + grids.GridOperation( "Share", condition=( lambda item: not item.deleted ), async_compatible=False ), + grids.GridOperation( "Unshare", condition=( lambda item: not item.deleted ), async_compatible=False ), + grids.GridOperation( "Rename", condition=( lambda item: not item.deleted ), async_compatible=False ), + grids.GridOperation( "Delete", condition=( lambda item: not item.deleted ), async_compatible=True ), + grids.GridOperation( "Undelete", condition=( lambda item: item.deleted ), async_compatible=True ), + grids.GridOperation( "Enable import via link", condition=( lambda item: item.deleted ), async_compatible=True ), + grids.GridOperation( "Disable import via link", condition=( lambda item: item.deleted ), async_compatible=True ) ] standard_filters = [ grids.GridColumnFilter( "Active", args=dict( deleted=False ) ), @@ -262,7 +262,7 @@ n_deleted += 1 status = SUCCESS if n_deleted: - message_parts.append( "Deleted %d histories. " % n_deleted ) + message_parts.append( "Deleted %d %s. " % ( n_deleted, iff( n_deleted != 1, "histories", "history" ) ) ) if deleted_current: message_parts.append( "Your active history was deleted, a new empty history is now active. " ) status = INFO @@ -290,7 +290,7 @@ status = SUCCESS message_parts = [] if n_undeleted: - message_parts.append( "Undeleted %d histories." % n_undeleted ) + message_parts.append( "Undeleted %d %s. " % ( n_undeleted, iff( n_undeleted != 1, "histories", "history" ) ) ) if n_already_purged: message_parts.append( "%d histories have already been purged and cannot be undeleted." % n_already_purged ) status = WARNING @@ -438,23 +438,20 @@ Warning! If you import this history, you will lose your current history. Click <a href="%s">here</a> to confirm. """ % web.url_for( id=id, confirm=True ) ) - @web.expose def view( self, trans, id=None ): """View a history. If a history is importable, then it is viewable by any user.""" - # Get history to view. if not id: return trans.show_error_message( "You must specify a history you want to view." ) history_to_view = get_history( trans, id, False) - # Integrity checks. if not history_to_view: - return trans.show_error_message( "The specified history does not exist.") + return trans.show_error_message( "The specified history does not exist." ) + # Admin users can view any history # TODO: Use a new flag to determine if history is viewable? - if not history_to_view.importable: - error( "The owner of this history has not published this history." ) - + if not trans.user_is_admin and not history_to_view.importable: + error( "Either you are not allowed to view this history or the owner of this history has not published it." ) # View history. query = trans.sa_session.query( model.HistoryDatasetAssociation ) \ .filter( model.HistoryDatasetAssociation.history == history_to_view ) \ @@ -469,7 +466,6 @@ datasets = query.all(), user_owns_history = user_owns_history, show_deleted = False ) - @web.expose @web.require_login( "share histories with other users" ) def share( self, trans, id=None, email="", **kwd ): diff -r 555afd0bf457 -r 37406d8ad116 lib/galaxy/web/controllers/library_common.py --- a/lib/galaxy/web/controllers/library_common.py Tue Nov 17 16:14:54 2009 -0500 +++ b/lib/galaxy/web/controllers/library_common.py Tue Nov 17 16:16:26 2009 -0500 @@ -117,7 +117,7 @@ uploaded_dataset.link_data_only = True uploaded_dataset.data.file_name = os.path.abspath( path ) trans.sa_session.add( uploaded_dataset.data ) - trans.sa_session.data.flush() + trans.sa_session.flush() return uploaded_dataset def get_server_dir_uploaded_datasets( self, trans, params, full_dir, import_dir_desc, library_bunch, err_redirect, msg ): files = [] diff -r 555afd0bf457 -r 37406d8ad116 lib/galaxy/web/controllers/page.py --- a/lib/galaxy/web/controllers/page.py Tue Nov 17 16:14:54 2009 -0500 +++ b/lib/galaxy/web/controllers/page.py Tue Nov 17 16:16:26 2009 -0500 @@ -1,6 +1,7 @@ from galaxy.web.base.controller import * from galaxy.web.framework.helpers import time_ago, grids from galaxy.util.sanitize_html import sanitize_html +from galaxy.util.odict import odict import re @@ -69,21 +70,66 @@ ] def apply_default_filter( self, trans, query, **kwargs ): return query.filter_by( deleted=False, published=True ) - - -class NameColumn( grids.TextColumn ): - def get_value(self, trans, grid, history): - return history.get_display_name() class HistorySelectionGrid( grids.Grid ): + # Custom columns. + class NameColumn( grids.TextColumn ): + def get_value(self, trans, grid, history): + return history.get_display_name() + + class DeletedColumn( grids.GridColumn ): + def get_accepted_filters( self ): + """ Returns a list of accepted filters for this column. """ + accepted_filter_labels_and_vals = { "active" : "False", "deleted" : "True", "all": "All" } + accepted_filters = [] + for label, val in accepted_filter_labels_and_vals.items(): + args = { self.key: val } + accepted_filters.append( grids.GridColumnFilter( label, args) ) + return accepted_filters + + class SharingColumn( grids.GridColumn ): + def filter( self, db_session, query, column_filter ): + """ Modify query to filter histories by sharing status. """ + if column_filter == "All": + pass + elif column_filter: + if column_filter == "private": + query = query.filter( model.History.users_shared_with == None ) + query = query.filter( model.History.importable == False ) + elif column_filter == "shared": + query = query.filter( model.History.users_shared_with != None ) + elif column_filter == "importable": + query = query.filter( model.History.importable == True ) + return query + def get_accepted_filters( self ): + """ Returns a list of accepted filters for this column. """ + accepted_filter_labels_and_vals = odict() + accepted_filter_labels_and_vals["private"] = "private" + accepted_filter_labels_and_vals["shared"] = "shared" + accepted_filter_labels_and_vals["importable"] = "importable" + accepted_filter_labels_and_vals["all"] = "All" + accepted_filters = [] + for label, val in accepted_filter_labels_and_vals.items(): + args = { self.key: val } + accepted_filters.append( grids.GridColumnFilter( label, args) ) + return accepted_filters + # Grid definition. title = "Saved Histories" + template = "grid_base_async.mako" + async_template = "grid_body_async.mako" model_class = model.History + default_filter = { "deleted" : "False" , "shared" : "All" } default_sort_key = "-update_time" + use_paging = True + num_rows_per_page = 5 columns = [ NameColumn( "Name", key="name", model_class=model.History, filterable="advanced" ), grids.TagsColumn( "Tags", "tags", model.History, model.HistoryTagAssociation, filterable="advanced"), grids.GridColumn( "Last Updated", key="update_time", format=time_ago ), + # Columns that are valid for filtering but are not visible. + DeletedColumn( "Deleted", key="deleted", visible=False, filterable="advanced" ), + SharingColumn( "Shared", key="shared", visible=False, filterable="advanced" ), ] columns.append( grids.MulticolFilterColumn( @@ -91,6 +137,8 @@ cols_to_filter=[ columns[0], columns[1] ], key="free-text-search", visible=False, filterable="standard" ) ) + def apply_default_filter( self, trans, query, **kwargs ): + return query.filter_by( user=trans.user, purged=False ) class PageController( BaseController ): @@ -268,4 +316,4 @@ @web.require_login("select a history from saved histories") def list_histories_for_selection( self, trans, **kwargs ): # Render the list view - return self._history_selection_grid( trans, status=status, message=message, **kwargs ) \ No newline at end of file + return self._history_selection_grid( trans, **kwargs ) \ No newline at end of file diff -r 555afd0bf457 -r 37406d8ad116 lib/galaxy/web/controllers/requests.py --- a/lib/galaxy/web/controllers/requests.py Tue Nov 17 16:14:54 2009 -0500 +++ b/lib/galaxy/web/controllers/requests.py Tue Nov 17 16:16:26 2009 -0500 @@ -12,57 +12,109 @@ log = logging.getLogger( __name__ ) -class RequestsListGrid( grids.Grid ): +class RequestsGrid( grids.Grid ): + # Custom column types + class NameColumn( grids.TextColumn ): + def get_value(self, trans, grid, request): + return request.name + class DescriptionColumn( grids.TextColumn ): + def get_value(self, trans, grid, request): + return request.desc + class SamplesColumn( grids.GridColumn ): + def get_value(self, trans, grid, request): + return str(len(request.samples)) + class TypeColumn( grids.TextColumn ): + def get_value(self, trans, grid, request): + return request.type.name + class LastUpdateColumn( grids.TextColumn ): + def get_value(self, trans, grid, request): + return request.update_time + class StateColumn( grids.GridColumn ): + def filter( self, db_session, query, column_filter ): + """ Modify query to filter request by state. """ + if column_filter == "All": + return query + if column_filter: + query = query.filter( model.Request.state == column_filter ) + return query + def get_accepted_filters( self ): + """ Returns a list of accepted filters for this column. """ + accepted_filter_labels_and_vals = [ model.Request.states.UNSUBMITTED, + model.Request.states.SUBMITTED, + model.Request.states.COMPLETE, + "All"] + accepted_filters = [] + for val in accepted_filter_labels_and_vals: + label = val.lower() + args = { self.key: val } + accepted_filters.append( grids.GridColumnFilter( label, args) ) + return accepted_filters + class DeletedColumn( grids.GridColumn ): + def get_accepted_filters( self ): + """ Returns a list of accepted filters for this column. """ + accepted_filter_labels_and_vals = { "active" : "False", "deleted" : "True", "all": "All" } + accepted_filters = [] + for label, val in accepted_filter_labels_and_vals.items(): + args = { self.key: val } + accepted_filters.append( grids.GridColumnFilter( label, args) ) + return accepted_filters + # Grid definition title = "Sequencing Requests" - template = '/requests/grid.mako' + template = 'requests/grid.mako' model_class = model.Request default_sort_key = "-create_time" - show_filter = model.Request.states.UNSUBMITTED + num_rows_per_page = 50 + preserve_state = True + use_paging = True + default_filter = dict( deleted="False", state=model.Request.states.UNSUBMITTED) columns = [ - grids.GridColumn( "Name", key="name", - link=( lambda item: iff( item.deleted, None, dict( operation="show_request", id=item.id ) ) ), - attach_popup=True ), - grids.GridColumn( "Description", key='desc'), - grids.GridColumn( "Sample(s)", method='number_of_samples', - link=( lambda item: iff( item.deleted, None, dict( operation="show_request", id=item.id ) ) ), ), - grids.GridColumn( "Type", key="request_type_id", method='get_request_type'), - grids.GridColumn( "Last update", key="update_time", format=time_ago ), - grids.GridColumn( "State", key='state'), + NameColumn( "Name", + key="name", + model_class=model.Request, + link=( lambda item: iff( item.deleted, None, dict( operation="show_request", id=item.id ) ) ), + attach_popup=True, + filterable="advanced" ), + DescriptionColumn( "Description", + key='desc', + model_class=model.Request, + filterable="advanced" ), + SamplesColumn( "Sample(s)", + link=( lambda item: iff( item.deleted, None, dict( operation="show_request", id=item.id ) ) ), ), + TypeColumn( "Type" ), + LastUpdateColumn( "Last update", + format=time_ago ), + StateColumn( "State", + key='state', + filterable="advanced"), + DeletedColumn( "Deleted", + key="deleted", + visible=True, + filterable="advanced" ) ] + columns.append( grids.MulticolFilterColumn( "Search", + cols_to_filter=[ columns[0], columns[1] ], + key="free-text-search", + visible=False, + filterable="standard" ) ) operations = [ grids.GridOperation( "Submit", allow_multiple=False, condition=( lambda item: not item.deleted and item.unsubmitted() and item.samples ) ), grids.GridOperation( "Edit", allow_multiple=False, condition=( lambda item: not item.deleted and item.unsubmitted() ) ), - grids.GridOperation( "Delete", allow_multiple=False, condition=( lambda item: not item.deleted and item.unsubmitted() ) ), - grids.GridOperation( "Undelete", allow_multiple=False, condition=( lambda item: item.deleted ) ) + grids.GridOperation( "Delete", allow_multiple=True, condition=( lambda item: not item.deleted and item.unsubmitted() ) ), + grids.GridOperation( "Undelete", allow_multiple=True, condition=( lambda item: item.deleted ) ) ] - standard_filters = [ - grids.GridColumnFilter( model.Request.states.UNSUBMITTED, - args=dict( state=model.Request.states.UNSUBMITTED, deleted=False ) ), - grids.GridColumnFilter( model.Request.states.SUBMITTED, - args=dict( state=model.Request.states.SUBMITTED, deleted=False ) ), - grids.GridColumnFilter( model.Request.states.COMPLETE, args=dict( state=model.Request.states.COMPLETE, deleted=False ) ), - grids.GridColumnFilter( "Deleted", args=dict( deleted=True ) ), - grids.GridColumnFilter( "All", args={} ) + global_actions = [ + grids.GridAction( "Create new request", dict( controller='requests', + action='new', + select_request_type='True' ) ) ] - #default_filter = dict( deleted=False ) - def get_current_item( self, trans ): - return None - def get_request_type(self, trans, request): - return request.type.name - def apply_default_filter( self, trans, query, **kwargs ): - query = query.filter_by( user=trans.user ) - if self.default_filter: - return query.filter_by( **self.default_filter ) - else: - return query - def number_of_samples(self, trans, request): - return str(len(request.samples)) - def get_state(self, trans, request): - return request.state + def apply_default_filter( self, trans, query, **kwd ): + return query.filter_by( user=trans.user ) + def build_initial_query( self, session ): + return session.query( self.model_class ) class Requests( BaseController ): - request_grid = RequestsListGrid() + request_grid = RequestsGrid() @web.expose @web.require_login( "create/submit sequencing requests" ) @@ -71,50 +123,43 @@ @web.expose @web.require_login( "create/submit sequencing requests" ) - def list( self, trans, **kwargs ): + def list( self, trans, **kwd ): ''' List all request made by the current user ''' - status = message = None - self.request_grid.default_filter = dict(state=trans.app.model.Request.states.UNSUBMITTED, - deleted=False) - if 'operation' in kwargs: - operation = kwargs['operation'].lower() + + if 'operation' in kwd: + operation = kwd['operation'].lower() + if not kwd.get( 'id', None ): + return trans.response.send_redirect( web.url_for( controller='requests', + action='list', + status='error', + message="Invalid request ID") ) if operation == "show_request": - id = trans.security.decode_id(kwargs['id']) - return self.__show_request(trans, id, kwargs.get('add_sample', False)) + return self.__show_request( trans, **kwd ) elif operation == "submit": - id = trans.security.decode_id(kwargs['id']) - return self.__submit_request(trans, id) + return self.__submit_request( trans, **kwd ) elif operation == "delete": - id = trans.security.decode_id(kwargs['id']) - return self.__delete_request(trans, id) + return self.__delete_request( trans, **kwd ) elif operation == "undelete": - id = trans.security.decode_id(kwargs['id']) - return self.__undelete_request(trans, id) + return self.__undelete_request( trans, **kwd ) elif operation == "edit": - id = trans.security.decode_id(kwargs['id']) - return self.__edit_request(trans, id) - if 'show_filter' in kwargs.keys(): - if kwargs['show_filter'] == 'All': - self.request_grid.default_filter = {} - elif kwargs['show_filter'] == 'Deleted': - self.request_grid.default_filter = dict(deleted=True) - else: - self.request_grid.default_filter = dict(state=kwargs['show_filter'], deleted=False) - self.request_grid.show_filter = kwargs.get('show_filter', trans.app.model.Request.states.UNSUBMITTED) + return self.__edit_request( trans, **kwd ) # Render the list view - return self.request_grid( trans, **kwargs ) + return self.request_grid( trans, **kwd ) - def __show_request(self, trans, id, add_sample=False): + def __show_request(self, trans, **kwd): + params = util.Params( kwd ) + msg = util.restore_text( params.get( 'msg', '' ) ) + messagetype = params.get( 'messagetype', 'done' ) + add_sample = params.get('add_sample', False) try: - request = trans.sa_session.query( trans.app.model.Request ).get( id ) + request = trans.sa_session.query( trans.app.model.Request ).get( trans.security.decode_id(kwd['id']) ) except: return trans.response.send_redirect( web.url_for( controller='requests', action='list', status='error', - message="Invalid request ID", - **kwd) ) + message="Invalid request ID" ) ) current_samples = [] for s in request.samples: current_samples.append([s.name, s.values.content]) @@ -122,10 +167,11 @@ current_samples.append(['Sample_%i' % (len(current_samples)+1),['' for field in request.type.sample_form.fields]]) return trans.fill_template( '/requests/show_request.mako', request=request, - request_details=self.request_details(trans, id), + request_details=self.request_details(trans, request.id), current_samples = current_samples, sample_copy=self.__copy_sample(current_samples), - details='hide', edit_mode='False') + details='hide', edit_mode='False', + msg=msg, messagetype=messagetype ) def request_details(self, trans, id): ''' Shows the request details @@ -685,7 +731,7 @@ message="Invalid request ID", **kwd) ) if params.get('show', False) == 'True': - return self.__edit_request(trans, request.id, **kwd) + return self.__edit_request(trans, **kwd) elif params.get('save_changes_request_button', False) == 'Save changes' \ or params.get('edit_samples_button', False) == 'Edit samples': request_type = trans.sa_session.query( trans.app.model.RequestType ).get( int( params.select_request_type ) ) @@ -714,11 +760,11 @@ messagetype='done', **new_kwd) ) elif params.get('refresh', False) == 'true': - return self.__edit_request(trans, request.id, **kwd) + return self.__edit_request(trans, **kwd) - def __edit_request(self, trans, id, **kwd): + def __edit_request(self, trans, **kwd): try: - request = trans.sa_session.query( trans.app.model.Request ).get( id ) + request = trans.sa_session.query( trans.app.model.Request ).get( trans.security.decode_id(kwd['id']) ) except: msg = "Invalid request ID" log.warn( msg ) @@ -758,59 +804,61 @@ msg=msg, messagetype=messagetype) return self.__show_request_form(trans) - def __delete_request(self, trans, id): - try: - request = trans.sa_session.query( trans.app.model.Request ).get( id ) - except: - msg = "Invalid request ID" - log.warn( msg ) - return trans.response.send_redirect( web.url_for( controller='requests', - action='list', - status='error', - message=msg, - **kwd) ) - # change request's submitted field - if not request.unsubmitted(): - return trans.response.send_redirect( web.url_for( controller='requests', - action='list', - status='error', - message='This request cannot be deleted as it is already been submitted', - **kwd) ) - request.deleted = True - trans.sa_session.add( request ) - trans.sa_session.flush() - kwd = {} - kwd['id'] = trans.security.encode_id(request.id) + def __delete_request(self, trans, **kwd): + id_list = util.listify( kwd['id'] ) + delete_failed = [] + for id in id_list: + try: + request = trans.sa_session.query( trans.app.model.Request ).get( trans.security.decode_id(id) ) + except: + msg = "Invalid request ID" + log.warn( msg ) + return trans.response.send_redirect( web.url_for( controller='requests', + action='list', + status='error', + message=msg, + **kwd) ) + # a request cannot be deleted once its submitted + if not request.unsubmitted(): + delete_failed.append(request.name) + else: + request.deleted = True + trans.sa_session.add( request ) + trans.sa_session.flush() + if not len(delete_failed): + msg = '%i request(s) has been deleted.' % len(id_list) + status = 'done' + else: + msg = '%i request(s) has been deleted. %i request %s could not be deleted as they have been submitted.' % (len(id_list)-len(delete_failed), + len(delete_failed), str(delete_failed)) + status = 'warning' + return trans.response.send_redirect( web.url_for( controller='requests', + action='list', + status=status, + message=msg) ) + def __undelete_request(self, trans, **kwd): + id_list = util.listify( kwd['id'] ) + for id in id_list: + try: + request = trans.sa_session.query( trans.app.model.Request ).get( trans.security.decode_id(id) ) + except: + msg = "Invalid request ID" + log.warn( msg ) + return trans.response.send_redirect( web.url_for( controller='requests', + action='list', + status='error', + message=msg, + **kwd) ) + request.deleted = False + trans.sa_session.add( request ) + trans.sa_session.flush() return trans.response.send_redirect( web.url_for( controller='requests', action='list', status='done', - message='The request <b>%s</b> has been deleted.' % request.name, - **kwd) ) - def __undelete_request(self, trans, id): + message='%i request(s) has been undeleted.' % len(id_list) ) ) + def __submit_request(self, trans, **kwd): try: - request = trans.sa_session.query( trans.app.model.Request ).get( id ) - except: - msg = "Invalid request ID" - log.warn( msg ) - return trans.response.send_redirect( web.url_for( controller='requests', - action='list', - status='error', - message=msg, - **kwd) ) - # change request's submitted field - request.deleted = False - trans.sa_session.add( request ) - trans.sa_session.flush() - kwd = {} - kwd['id'] = trans.security.encode_id(request.id) - return trans.response.send_redirect( web.url_for( controller='requests', - action='list', - status='done', - message='The request <b>%s</b> has been undeleted.' % request.name, - **kwd) ) - def __submit_request(self, trans, id): - try: - request = trans.sa_session.query( trans.app.model.Request ).get( id ) + request = trans.sa_session.query( trans.app.model.Request ).get( trans.security.decode_id(kwd['id']) ) except: msg = "Invalid request ID" log.warn( msg ) @@ -837,14 +885,12 @@ request.state = request.states.SUBMITTED trans.sa_session.add( request ) trans.sa_session.flush() - kwd = {} - kwd['id'] = trans.security.encode_id(request.id) - kwd['status'] = 'done' - kwd['message'] = 'The request <b>%s</b> has been submitted.' % request.name return trans.response.send_redirect( web.url_for( controller='requests', action='list', - show_filter=trans.app.model.Request.states.SUBMITTED, - **kwd) ) + id=trans.security.encode_id(request.id), + status='done', + message='The request <b>%s</b> has been submitted.' % request.name + ) ) @web.expose @web.require_login( "create/submit sequencing requests" ) def show_events(self, trans, **kwd): diff -r 555afd0bf457 -r 37406d8ad116 lib/galaxy/web/controllers/requests_admin.py --- a/lib/galaxy/web/controllers/requests_admin.py Tue Nov 17 16:14:54 2009 -0500 +++ b/lib/galaxy/web/controllers/requests_admin.py Tue Nov 17 16:16:26 2009 -0500 @@ -15,63 +15,187 @@ # ---- Request Grid ------------------------------------------------------------ # -class RequestsListGrid( grids.Grid ): +class RequestsGrid( grids.Grid ): + # Custom column types + class NameColumn( grids.TextColumn ): + def get_value(self, trans, grid, request): + return request.name + class DescriptionColumn( grids.TextColumn ): + def get_value(self, trans, grid, request): + return request.desc + class SamplesColumn( grids.GridColumn ): + def get_value(self, trans, grid, request): + return str(len(request.samples)) + class TypeColumn( grids.TextColumn ): + def get_value(self, trans, grid, request): + return request.type.name + class LastUpdateColumn( grids.TextColumn ): + def get_value(self, trans, grid, request): + return request.update_time + class StateColumn( grids.GridColumn ): + def filter( self, db_session, query, column_filter ): + """ Modify query to filter request by state. """ + if column_filter == "All": + return query + if column_filter: + query = query.filter( model.Request.state == column_filter ) + return query + def get_accepted_filters( self ): + """ Returns a list of accepted filters for this column. """ + accepted_filter_labels_and_vals = [ model.Request.states.UNSUBMITTED, + model.Request.states.SUBMITTED, + model.Request.states.COMPLETE, + "All"] + accepted_filters = [] + for val in accepted_filter_labels_and_vals: + label = val.lower() + args = { self.key: val } + accepted_filters.append( grids.GridColumnFilter( label, args) ) + return accepted_filters + class UserColumn( grids.TextColumn ): + def get_value(self, trans, grid, request): + return request.user.email + class DeletedColumn( grids.GridColumn ): + def get_accepted_filters( self ): + """ Returns a list of accepted filters for this column. """ + accepted_filter_labels_and_vals = { "active" : "False", "deleted" : "True", "all": "All" } + accepted_filters = [] + for label, val in accepted_filter_labels_and_vals.items(): + args = { self.key: val } + accepted_filters.append( grids.GridColumnFilter( label, args) ) + return accepted_filters + # Grid definition title = "Sequencing Requests" template = "admin/requests/grid.mako" model_class = model.Request default_sort_key = "-create_time" - show_filter = model.Request.states.SUBMITTED + num_rows_per_page = 50 + preserve_state = True + use_paging = True + default_filter = dict( deleted="False", state=model.Request.states.SUBMITTED) columns = [ - grids.GridColumn( "Name", key="name", - link=( lambda item: iff( item.deleted, None, dict( operation="show_request", id=item.id ) ) ), - attach_popup=True ), - grids.GridColumn( "Description", key="desc"), - grids.GridColumn( "Sample(s)", method='number_of_samples', - link=( lambda item: iff( item.deleted, None, dict( operation="show_request", id=item.id ) ) ), ), - grids.GridColumn( "Type", key="request_type_id", method='get_request_type'), - grids.GridColumn( "Last update", key="update_time", format=time_ago ), - grids.GridColumn( "State", key='state'), - grids.GridColumn( "User", key="user_id", method='get_user') - + NameColumn( "Name", + key="name", + model_class=model.Request, + link=( lambda item: iff( item.deleted, None, dict( operation="show_request", id=item.id ) ) ), + attach_popup=True, + filterable="advanced" ), + DescriptionColumn( "Description", + key='desc', + model_class=model.Request, + filterable="advanced" ), + SamplesColumn( "Sample(s)", + link=( lambda item: iff( item.deleted, None, dict( operation="show_request", id=item.id ) ) ), ), + TypeColumn( "Type" ), + LastUpdateColumn( "Last update", + format=time_ago ), + StateColumn( "State", + key='state', + filterable="advanced"), + UserColumn( "User", + key='user.email', + model_class=model.Request, + filterable="advanced" ), + DeletedColumn( "Deleted", + key="deleted", + visible=True, + filterable="advanced" ) ] + columns.append( grids.MulticolFilterColumn( "Search", + cols_to_filter=[ columns[0], columns[1], columns[6] ], + key="free-text-search", + visible=False, + filterable="standard" ) ) operations = [ grids.GridOperation( "Submit", allow_multiple=False, condition=( lambda item: not item.deleted and item.unsubmitted() and item.samples ) ), grids.GridOperation( "Edit", allow_multiple=False, condition=( lambda item: not item.deleted ) ), grids.GridOperation( "Reject", allow_multiple=False, condition=( lambda item: not item.deleted and item.submitted() ) ), - grids.GridOperation( "Delete", allow_multiple=False, condition=( lambda item: not item.deleted and item.unsubmitted() ) ), + grids.GridOperation( "Delete", allow_multiple=True, condition=( lambda item: not item.deleted and item.unsubmitted() ) ), grids.GridOperation( "Undelete", condition=( lambda item: item.deleted ) ), ] - standard_filters = [ - grids.GridColumnFilter( model.Request.states.UNSUBMITTED, - args=dict( state=model.Request.states.UNSUBMITTED, deleted=False ) ), - grids.GridColumnFilter( model.Request.states.SUBMITTED, - args=dict( state=model.Request.states.SUBMITTED, deleted=False ) ), - grids.GridColumnFilter( model.Request.states.COMPLETE, args=dict( state=model.Request.states.COMPLETE, deleted=False ) ), - grids.GridColumnFilter( "Deleted", args=dict( deleted=True ) ), - grids.GridColumnFilter( "All", args=dict( deleted=False ) ) + global_actions = [ + grids.GridAction( "Create new request", dict( controller='requests_admin', + action='new', + select_request_type='True' ) ) ] - def get_user(self, trans, request): - return trans.sa_session.query( trans.app.model.User ).get( request.user_id ).email - def get_current_item( self, trans ): - return None - def get_request_type(self, trans, request): - request_type = trans.sa_session.query( trans.app.model.RequestType ).get( request.request_type_id ) - return request_type.name - def number_of_samples(self, trans, request): - return str(len(request.samples)) - def apply_default_filter( self, trans, query, **kwargs ): - if self.default_filter: - return query.filter_by( **self.default_filter ) - else: - return query - + +# +# ---- Request Type Gridr ------------------------------------------------------ +# +class RequestTypeGrid( grids.Grid ): + # Custom column types + class NameColumn( grids.TextColumn ): + def get_value(self, trans, grid, request_type): + return request_type.name + class DescriptionColumn( grids.TextColumn ): + def get_value(self, trans, grid, request_type): + return request_type.desc + class RequestFormColumn( grids.TextColumn ): + def get_value(self, trans, grid, request_type): + return request_type.request_form.name + class SampleFormColumn( grids.TextColumn ): + def get_value(self, trans, grid, request_type): + return request_type.sample_form.name + class DeletedColumn( grids.GridColumn ): + def get_accepted_filters( self ): + """ Returns a list of accepted filters for this column. """ + accepted_filter_labels_and_vals = { "active" : "False", "deleted" : "True", "all": "All" } + accepted_filters = [] + for label, val in accepted_filter_labels_and_vals.items(): + args = { self.key: val } + accepted_filters.append( grids.GridColumnFilter( label, args) ) + return accepted_filters + # Grid definition + title = "Requests Types" + template = "admin/requests/manage_request_types.mako" + model_class = model.RequestType + default_sort_key = "-create_time" + num_rows_per_page = 50 + preserve_state = True + use_paging = True + default_filter = dict( deleted="False" ) + columns = [ + NameColumn( "Name", + key="name", + model_class=model.RequestType, + link=( lambda item: iff( item.deleted, None, dict( operation="view", id=item.id ) ) ), + attach_popup=True, + filterable="advanced" ), + DescriptionColumn( "Description", + key='desc', + model_class=model.Request, + filterable="advanced" ), + RequestFormColumn( "Request Form", + link=( lambda item: iff( item.deleted, None, dict( operation="view_form", id=item.request_form.id ) ) ), ), + SampleFormColumn( "Sample Form", + link=( lambda item: iff( item.deleted, None, dict( operation="view_form", id=item.sample_form.id ) ) ), ), + DeletedColumn( "Deleted", + key="deleted", + visible=False, + filterable="advanced" ) + ] + columns.append( grids.MulticolFilterColumn( "Search", + cols_to_filter=[ columns[0], columns[1] ], + key="free-text-search", + visible=False, + filterable="standard" ) ) + operations = [ + #grids.GridOperation( "Update", allow_multiple=False, condition=( lambda item: not item.deleted ) ), + grids.GridOperation( "Delete", allow_multiple=True, condition=( lambda item: not item.deleted ) ), + grids.GridOperation( "Undelete", condition=( lambda item: item.deleted ) ), + ] + global_actions = [ + grids.GridAction( "Create new request type", dict( controller='requests_admin', + action='create_request_type' ) ) + ] # # ---- Request Controller ------------------------------------------------------ #