galaxy-commits
Threads by month
- ----- 2025 -----
- January
- ----- 2024 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2023 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2022 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2021 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2020 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2019 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2018 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2017 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2016 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2015 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2014 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2013 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2012 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2011 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2010 -----
- December
- November
- October
- September
- August
- July
- June
- May
- 15302 discussions
galaxy-dist commit 3ea6ee14a8b7: Remove debugging statement from previous commit.
by commits-noreply@bitbucket.org 20 Nov '10
by commits-noreply@bitbucket.org 20 Nov '10
20 Nov '10
# HG changeset patch -- Bitbucket.org
# Project galaxy-dist
# URL http://bitbucket.org/galaxy/galaxy-dist/overview
# User jeremy goecks <jeremy.goecks(a)emory.edu>
# Date 1289243761 18000
# Node ID 3ea6ee14a8b71cd7ac48da98a6ecdb50b9067040
# Parent c61ec9e333d5a0ade7a23b3f4b55ae3deb3aa0bb
Remove debugging statement from previous commit.
--- a/lib/galaxy/tools/util/gff_util.py
+++ b/lib/galaxy/tools/util/gff_util.py
@@ -53,7 +53,6 @@ def parse_gff_attributes( attr_str ):
pair = name_value_pair.strip().split(" ")
if len( pair ) == 1:
pair = name_value_pair.strip().split("=")
- print pair
if pair == '':
continue
name = pair[0].strip()
1
0
galaxy-dist commit 7ca26b1cc9b6: Added a sample loc file listing all fasta files and a script to generate the file given a base genome directory.
by commits-noreply@bitbucket.org 20 Nov '10
by commits-noreply@bitbucket.org 20 Nov '10
20 Nov '10
# HG changeset patch -- Bitbucket.org
# Project galaxy-dist
# URL http://bitbucket.org/galaxy/galaxy-dist/overview
# User Kelly Vincent <kpvincent(a)bx.psu.edu>
# Date 1289194982 18000
# Node ID 7ca26b1cc9b6fd67364a6c6af6005ceecaa3f45b
# Parent 535d276c92bcc35e563c4ce5a2351f562f451132
Added a sample loc file listing all fasta files and a script to generate the file given a base genome directory.
--- /dev/null
+++ b/tool-data/all_fasta.loc.sample
@@ -0,0 +1,18 @@
+#This file lists the locations and dbkeys of all the fasta files
+#under the "genome" directory (a directory that contains a directory
+#for each build). The script extract_fasta.py will generate the file
+#all_fasta.loc. This file has the format (white space characters are
+#TAB characters):
+#
+#<unique_build_id><dbkey><display_name><file_path>
+#
+#So, all_fasta.loc could look something like this:
+#
+#apiMel3 apiMel3 Honeybee (Apis mellifera): apiMel3 /path/to/genome/apiMel3/apiMel3.fa
+#hg19canon hg19 Human (Homo sapiens): hg19 Canonical /path/to/genome/hg19/hg19canon.fa
+#hg19full hg19 Human (Homo sapiens): hg19 Full /path/to/genome/hg19/hg19full.fa
+#
+#Your all_fasta.loc file should contain an entry for each individual
+#fasta file. So there will be multiple fasta files for each build,
+#such as with hg19 above.
+#
--- /dev/null
+++ b/scripts/loc_files/create_all_fasta_loc.py
@@ -0,0 +1,300 @@
+import optparse, os, sys
+import elementtree.ElementTree as etree
+
+"""
+Generates a loc file containing names of all the fasta files that match the
+name of the genome subdirectory they're in.
+Assumptions:
+ - fasta files should be named the same as the genome subdirectory they're
+ in, with the possible addition of a recognized variant (canon, full, etc.)
+ - for "variants" (like full, canon[ical], chrM, etc.) the naming needs to be
+ consistent and specific:
+ - <genome_name><variant>, like hg19canon, hg19full, or hg19chrM
+Normal usage:
+create_all_fasta_loc.py -f unmatching_fasta.txt -i seq
+
+usage: %prog [options]
+ -d, --data-table-xml=d: The name of the data table configuration file to get format of loc file
+ -t, --data-table=t: The name of the data table listed in the data table XML file
+ -g, --genome-dir=g: Genome directory to look in
+ -e, --exemptions=e: Comma-separated list of genome dir subdirectories to not look in
+ -i, --inspect-dirs=i: Comma-separated list of subdirectories inside genome dirs to look in (default is all)
+ -x, --fasta-exts=x: Comma-separated list of all fasta extensions to list
+ -s, --loc-sample=s: The name of the sample loc file (to copy text into top of output loc file)
+ -f, --unmatching-fasta=f: Name of file to output non-matching fasta files to, if included
+ -v, --variants=v: Comma-separated list of recognized variants of fasta file names
+ -a, --append=a: Append to existing all_fasta.loc file rather than create new
+ -p, --sample-text=p: Copy over text from all_fasta.loc.sample file (false if set to append)
+"""
+
+DEFAULT_TOOL_DATA_TABLE_CONF = 'tool_data_table_conf.xml'
+DEFAULT_ALL_FASTA_LOC_BASE = 'all_fasta'
+DEFAULT_BASE_GENOME_DIR = '/afs/bx.psu.edu/depot/data/genome'
+EXEMPTIONS = 'bin,tmp,lengths,equCab2_chrM,microbes'
+INSPECT_DIR = None
+FASTA_EXTS = '.fa,.fasta,.fna'
+VARIANTS = 'chrM,chr21,full,canon,female,male,haps,nohaps'
+
+VARIANT_EXCLUSIONS = ':full'
+
+DBKEY_DESCRIPTION_MAP = { 'AaegL1': 'Mosquito (Aedes aegypti): AaegL1',
+ 'AgamP3': 'Mosquito (Anopheles gambiae): AgamP3',
+ 'anoCar1': 'Lizard (Anolis carolinensis): anoCar1',
+ 'anoGam1': 'Mosquito (Anopheles gambiae): anoGam1',
+ 'apiMel1': 'Honeybee (Apis mellifera): apiMel1',
+ 'apiMel2': 'Honeybee (Apis mellifera): apiMel2',
+ 'apiMel3': 'Honeybee (Apis mellifera): apiMel3',
+ 'Arabidopsis_thaliana_TAIR9': '',
+ 'borEut13': 'Boreoeutherian: borEut13',
+ 'bosTau2': 'Cow (Bos taurus): bosTau2',
+ 'bosTau3': 'Cow (Bos taurus): bosTau3',
+ 'bosTau4': 'Cow (Bos taurus): bosTau4',
+ 'bosTauMd3': 'Cow (Bos taurus): bosTauMd3',
+ 'calJac1': 'Marmoset (Callithrix jacchus): calJac1',
+ 'canFam1': 'Dog (Canis lupus familiaris): canFam1',
+ 'canFam2': 'Dog (Canis lupus familiaris): canFam2',
+ 'cavPor3': 'Guinea Pig (Cavia porcellus): cavPor3',
+ 'ce2': 'Caenorhabditis elegans: ce2',
+ 'ce4': 'Caenorhabditis elegans: ce4',
+ 'ce5': 'Caenorhabditis elegans: ce5',
+ 'ce6': 'Caenorhabditis elegans: ce6',
+ 'CpipJ1': 'Mosquito (Culex quinquefasciatus): CpipJ1',
+ 'danRer2': 'Zebrafish (Danio rerio): danRer2',
+ 'danRer3': 'Zebrafish (Danio rerio): danRer3',
+ 'danRer4': 'Zebrafish (Danio rerio): danRer4',
+ 'danRer5': 'Zebrafish (Danio rerio): danRer5',
+ 'danRer6': 'Zebrafish (Danio rerio): danRer6',
+ 'dm1': 'Fruit Fly (Drosophila melanogaster): dm1',
+ 'dm2': 'Fruit Fly (Drosophila melanogaster): dm2',
+ 'dm3': 'Fruit Fly (Drosophila melanogaster): dm3',
+ 'dm4': 'Fruit Fly (Drosophila melanogaster): dm',
+ 'dp3': 'Fruit Fly (Drosophila pseudoobscura): dp3',
+ 'dp4': 'Fruit Fly (Drosophila pseudoobscura): dp4',
+ 'droAna1': 'Fruit Fly (Drosophila ananassae): droAna1',
+ 'droAna2': 'Fruit Fly (Drosophila ananassae): droAna2',
+ 'droAna3': 'Fruit Fly (Drosophila ananassae): droAna3',
+ 'droEre1': 'Fruit Fly (Drosophila erecta): droEre1',
+ 'droEre2': 'Fruit Fly (Drosophila erecta): droEre2',
+ 'droGri1': 'Fruit Fly (Drosophila grimshawi): droGri1',
+ 'droGri2': 'Fruit Fly (Drosophila grimshawi): droGri2',
+ 'droMoj1': 'Fruit Fly (Drosophila mojavensis): droMoj1',
+ 'droMoj2': 'Fruit Fly (Drosophila mojavensis): droMoj2',
+ 'droMoj3': 'Fruit Fly (Drosophila mojavensis): droMoj3',
+ 'droPer1': 'Fruit Fly (Drosophila persimilis): droPer1',
+ 'droSec1': 'Fruit Fly (Drosophila sechellia): droSec1',
+ 'droSim1': 'Fruit Fly (Drosophila simulans): droSim1',
+ 'droVir1': 'Fruit Fly (Drosophila virilis): droVir1',
+ 'droVir2': 'Fruit Fly (Drosophila virilis): droVir2',
+ 'droVir3': 'Fruit Fly (Drosophila virilis): droVir3',
+ 'droYak1': 'Fruit Fly (Drosophila yakuba): droYak1',
+ 'droYak2': 'Fruit Fly (Drosophila yakuba): droYak2',
+ 'echTel1': 'Tenrec (Echinops telfairi): echTel1',
+ 'equCab1': 'Horse (Equus caballus): equCab1',
+ 'equCab2': 'Horse (Equus caballus): equCab2',
+ 'eriEur1': 'Hedgehog (Erinaceus europaeus): eriEur1',
+ 'felCat3': 'Cat (Felis catus): felCat3',
+ 'fr1': 'Fugu (Takifugu rubripes): fr1',
+ 'fr2': 'Fugu (Takifugu rubripes): fr2',
+ 'galGal2': 'Chicken (Gallus gallus): galGal2',
+ 'galGal3': 'Chicken (Gallus gallus): galGal3',
+ 'gasAcu1': 'Stickleback (Gasterosteus aculeatus): gasAcu1',
+ 'hg16': 'Human (Homo sapiens): hg16',
+ 'hg17': 'Human (Homo sapiens): hg17',
+ 'hg18': 'Human (Homo sapiens): hg18',
+ 'hg19': 'Human (Homo sapiens): hg19',
+ 'IscaW1': 'Deer Tick (Ixodes scapularis): IscaW1',
+ 'lMaj5': 'Leishmania major: lMaj5',
+ 'mm5': 'Mouse (Mus musculus): mm5',
+ 'mm6': 'Mouse (Mus musculus): mm6',
+ 'mm7': 'Mouse (Mus musculus): mm7',
+ 'mm8': 'Mouse (Mus musculus): mm8',
+ 'mm9': 'Mouse (Mus musculus): mm9',
+ 'monDom4': 'Opossum (Monodelphis domestica): monDom4',
+ 'monDom5': 'Opossum (Monodelphis domestica): monDom5',
+ 'ornAna1': 'Platypus (Ornithorhynchus anatinus): ornAna1',
+ 'oryCun1': 'Rabbit (Oryctolagus cuniculus): oryCun1',
+ 'oryLat1': 'Medaka (Oryzias latipes): oryLat1',
+ 'oryLat2': 'Medaka (Oryzias latipes): oryLat2',
+ 'oryza_sativa_japonica_nipponbare_IRGSP4.0': 'Rice (Oryza sativa L. ssp. japonica var. Nipponbare): IRGSP4.0',
+ 'otoGar1': 'Bushbaby (Otolemur garnetti): otoGar1',
+ 'panTro1': 'Chimpanzee (Pan troglodytes): panTro1',
+ 'panTro2': 'Chimpanzee (Pan troglodytes): panTro2',
+ 'petMar1': 'Lamprey (Petromyzon marinus): petMar1',
+ 'phiX': 'phiX174 (AF176034)',
+ 'PhumU1': 'Head Louse (Pediculus humanus): PhumU1',
+ 'ponAbe2': 'Orangutan (Pongo pygmaeus abelii): ponAbe2',
+ 'pUC18': 'pUC18 (L09136)',
+ 'rheMac2': 'Rhesus Macaque (Macaca mulatta): rheMac2',
+ 'rn3': 'Rat (Rattus norvegicus): rn3',
+ 'rn4': 'Rat (Rattus norvegicus): rn4',
+ 'sacCer1': 'Yeast (Saccharomyces cerevisiae): sacCer1',
+ 'sacCer2': 'Yeast (Saccharomyces cerevisiae): sacCer2',
+ 'sorAra1': 'Common Shrew (Sorex araneus): sorAra1',
+ 'Sscrofa9.58': 'Pig (Sus scrofa): Sscrofa9.58',
+ 'strPur2': 'Purple Sea Urchin (Strongylocentrotus purpuratus): strPur2',
+ 'susScr2': 'Pig (Sus scrofa): susScr2',
+ 'taeGut1': 'Zebra Finch (Taeniopygia guttata): taeGut1',
+ 'tetNig1': 'Tetraodon (Tetraodon nigroviridis): tetNig1',
+ 'tetNig2': 'Tetraodon (Tetraodon nigroviridis): tetNig2',
+ 'tupBel1': 'Tree Shrew (Tupaia belangeri): tupBel1',
+ 'venter1': 'Human (J. Craig Venter): venter1',
+ 'xenTro2': 'Frog (Xenopus tropicalis): xenTro2'
+ }
+
+VARIANT_MAP = { 'canon': 'Canonical',
+ 'full': 'Full',
+ 'female': 'Female',
+ 'male': 'Male'
+ }
+
+# alphabetize ignoring case
+def caseless_compare( a, b ):
+ au = a.upper()
+ bu = b.upper()
+ if au > bu:
+ return 1
+ elif au == bu:
+ return 0
+ elif au < bu:
+ return -1
+
+def __main__():
+ # command line variables
+ parser = optparse.OptionParser()
+ parser.add_option( '-d', '--data-table-xml', dest='data_table_xml', type='string', default=DEFAULT_TOOL_DATA_TABLE_CONF, help='The name of the data table configuration file to get format of loc file' )
+ parser.add_option( '-t', '--data-table', dest='data_table_name', type='string', default=DEFAULT_ALL_FASTA_LOC_BASE, help='The name of the data table listed in the data table XML file' )
+ parser.add_option( '-g', '--genome_dir', dest='genome_dir', type='string', default=DEFAULT_BASE_GENOME_DIR, help='Genome directory to look in' )
+ parser.add_option( '-e', '--exemptions', dest='exemptions', type='string', default=EXEMPTIONS, help='Comma-separated list of subdirectories in genome dir to not look in' )
+ parser.add_option( '-i', '--inspect-dir', dest='inspect_dir', type='string', default=INSPECT_DIR, help='Comma-separated list of subdirectories inside genome dirs to look in (default is all)' )
+ parser.add_option( '-x', '--fasta_exts', dest='fasta_exts', type='string', default=FASTA_EXTS, help='Comma-separated list of all fasta extensions to list' )
+ parser.add_option( '-s', '--loc-sample', dest='loc_sample_name', type='string', help='The name of the sample loc file (to copy text into top of output loc file)' )
+ parser.add_option( '-f', '--unmatching-fasta', dest='unmatching_fasta', type='string', default=None, help='Name of file to output non-matching fasta files to' )
+ parser.add_option( '-v', '--variants', dest='variants', type='string', default=VARIANTS, help='Comma-separated list of recognized variants of fasta file names' )
+ parser.add_option( '-n', '--variant-exclusions', dest='variant_exclusions', type='string', default=VARIANT_EXCLUSIONS, help="List of files to exclude because they're duplicated by a variants; of the format: '<variant_to_keep_1>:<variant_to_remove_1>[,<variant_to_remove_2>[,...]][;<variant_to_keep_2>:<variant_to_remove_1>[,<variant_to_remove_2>[,...]]]'; default ':(full)' (if non-variant version present (like 'hg19'), full version (like 'hg19full') will be thrown out)" )
+ parser.add_option( '-a', '--append', dest='append', action='store_true', default=False, help='Append to existing all_fasta.loc file rather than create new' )
+ parser.add_option( '-p', '--sample-text', dest='sample_text', action='store_true', default='True', help='Copy over text from all_fasta.loc.sample file (false if set to append)' )
+ (options, args) = parser.parse_args()
+
+ exemptions = [ e.strip() for e in options.exemptions.split( ',' ) ]
+ fasta_exts = [ x.strip() for x in options.fasta_exts.split( ',' ) ]
+ variants = [ v.strip() for v in options.variants.split( ',' ) ]
+ variant_exclusions = {}
+ try:
+ for ve in options.variant_exclusions.split( ';' ):
+ v, e = ve.split( ':' )
+ variant_exclusions[ v ] = e.split( ',' )
+ except:
+ sys.stderr.write( 'Problem parsing the variant exclusion parameter (-n/--variant-exclusion). Make sure it follows the expected format\n' )
+ sys.exit( 1 )
+ if options.append:
+ sample_text = False
+ else:
+ sample_text = options.sample_text
+
+ # all paths to look in
+ if options.inspect_dir:
+ paths_to_look_in = [ os.path.join( options.genome_dir, '%s', id ) for id in options.inspect_dir.split( ',' ) ]
+ else:
+ paths_to_look_in = os.path.join( options.genome_dir, '%s' )
+
+ # say what we're looking in
+ print '\nLooking in:\n\t%s' % '\n\t'.join( [ p % '<build_name>' for p in paths_to_look_in ] )
+ poss_names = [ '<build_name>%s' % v for v in variants ]
+ print 'for files that are named %s' % ', '.join( poss_names[:-1] ),
+ if len( poss_names ) > 1:
+ print 'or %s' % poss_names[-1],
+ if len( options.fasta_exts ) == 1:
+ print 'with the extension %s.' % ', '.join( fasta_exts[:-1] )
+ else:
+ print 'with the extension %s or %s.' % ( ', '.join( fasta_exts[:-1] ), fasta_exts[-1] )
+ print '\nSkipping the following:\n\t%s' % '\n\t'.join( exemptions )
+
+ # get column names
+ col_values = []
+ loc_path = None
+ tree = etree.parse( options.data_table_xml )
+ tables = tree.getroot()
+ for table in tables.getiterator():
+ name = table.attrib.get( 'name' )
+ if name == options.data_table_name:
+ cols = None
+ for node in table.getiterator():
+ if node.tag == 'columns':
+ cols = node.text
+ elif node.tag == 'file':
+ loc_path = node.attrib.get( 'path' )
+ if cols:
+ col_values = [ col.strip() for col in cols.split( ',' ) ]
+ if not col_values or not loc_path:
+ stop_err( 'No columns can be found for this data table (%s) in %s' % ( options.data_table, options.data_table_xml ) )
+
+ # get all fasta paths under genome directory
+ fasta_locs = {}
+ unmatching_fasta_paths = []
+ genome_subdirs = [ dr for dr in os.listdir( options.genome_dir ) if dr not in exemptions ]
+ for genome_subdir in genome_subdirs:
+ possible_names = [ genome_subdir ]
+ possible_names.extend( [ '%s%s' % ( genome_subdir, v ) for v in variants ] )
+ # get paths to all fasta files
+ for path_to_look_in in paths_to_look_in:
+ for dirpath, dirnames, filenames in os.walk( path_to_look_in % genome_subdir ):
+ for fn in filenames:
+ ext = os.path.splitext( fn )[-1]
+ fasta_base = os.path.splitext( fn )[0]
+ if ext in fasta_exts:
+ if fasta_base in possible_names:
+ if fasta_base == genome_subdir:
+ name = DBKEY_DESCRIPTION_MAP[ genome_subdir ]
+ else:
+ try:
+ name = '%s %s' % ( DBKEY_DESCRIPTION_MAP[ genome_subdir ], VARIANT_MAP[ fasta_base.replace( genome_subdir, '' ) ] )
+ except KeyError:
+ name = '%s %s' % ( DBKEY_DESCRIPTION_MAP[ genome_subdir ], fasta_base.replace( genome_subdir, '' ) )
+ fasta_locs[ fasta_base ] = { 'value': fasta_base, 'dbkey': genome_subdir, 'name': name, 'path': os.path.join( dirpath, fn ) }
+ else:
+ unmatching_fasta_paths.append( os.path.join( dirpath, fn ) )
+ # remove redundant fasta files
+ if variant_exclusions.keys():
+ for k in variant_exclusions.keys():
+ leave_in = '%s%s' % ( genome_subdir, k )
+ if fasta_locs.has_key( leave_in ):
+ to_remove = [ '%s%s' % ( genome_subdir, k ) for k in variant_exclusions[ k ] ]
+ for tr in to_remove:
+ if fasta_locs.has_key( tr ):
+ del fasta_locs[ tr ]
+
+ # output results
+ print '\nThere were %s fasta files found that were not included because they did not have the expected file names.' % len( unmatching_fasta_paths )
+ print '%s fasta files were found and listed.\n' % len( fasta_locs.keys() )
+
+ # output unmatching fasta files
+ if options.unmatching_fasta and unmatching_fasta_paths:
+ open( options.unmatching_fasta, 'wb' ).write( '%s\n' % '\n'.join( unmatching_fasta_paths ) )
+
+ # output loc file
+ if options.append:
+ all_fasta_loc = open( loc_path, 'ab' )
+ else:
+ all_fasta_loc = open( loc_path, 'wb' )
+ # put sample loc file text at top of file if appropriate
+ if sample_text:
+ if options.loc_sample_name:
+ all_fasta_loc.write( '%s\n' % open( options.loc_sample_name, 'rb' ).read().strip() )
+ else:
+ all_fasta_loc.write( '%s\n' % open( '%s.sample' % loc_path, 'rb' ).read().strip() )
+ # output list of fasta files in alphabetical order
+ fasta_bases = fasta_locs.keys()
+ fasta_bases.sort( caseless_compare )
+ for fb in fasta_bases:
+ out_line = []
+ for col in col_values:
+ try:
+ out_line.append( fasta_locs[ fb ][ col ] )
+ except KeyError:
+ stop_err( 'Unexpected column (%s) encountered' % col )
+ if out_line:
+ all_fasta_loc.write( '%s\n' % '\t'.join( out_line ) )
+ # close up output loc file
+ all_fasta_loc.close()
+
+if __name__=='__main__': __main__()
1
0
galaxy-dist commit d46ccc650084: Eliminate check_user flag from tool menu and workflow methods.
by commits-noreply@bitbucket.org 20 Nov '10
by commits-noreply@bitbucket.org 20 Nov '10
20 Nov '10
# HG changeset patch -- Bitbucket.org
# Project galaxy-dist
# URL http://bitbucket.org/galaxy/galaxy-dist/overview
# User Greg Von Kuster <greg(a)bx.psu.edu>
# Date 1289046578 14400
# Node ID d46ccc650084224553b3b48c11df64d43d7673ef
# Parent 702f4717a8f3fe38758c54136f81935f7679c937
Eliminate check_user flag from tool menu and workflow methods.
--- a/lib/galaxy/web/controllers/workflow.py
+++ b/lib/galaxy/web/controllers/workflow.py
@@ -1206,14 +1206,13 @@ class WorkflowController( BaseController
## % ( workflow_name, web.url_for( action='editor', id=trans.security.encode_id(stored.id) ) ) )
@web.expose
- def run( self, trans, id, check_user=True, **kwargs ):
+ def run( self, trans, id, **kwargs ):
stored = self.get_stored_workflow( trans, id, check_ownership=False )
- if check_user:
- user = trans.get_user()
- if stored.user != user:
- if trans.sa_session.query( model.StoredWorkflowUserShareAssociation ) \
- .filter_by( user=user, stored_workflow=stored ).count() == 0:
- error( "Workflow is not owned by or shared with current user" )
+ user = trans.get_user()
+ if stored.user != user:
+ if trans.sa_session.query( model.StoredWorkflowUserShareAssociation ) \
+ .filter_by( user=user, stored_workflow=stored ).count() == 0:
+ error( "Workflow is not owned by or shared with current user" )
# Get the latest revision
workflow = stored.latest_workflow
# It is possible for a workflow to have 0 steps
@@ -1343,14 +1342,13 @@ class WorkflowController( BaseController
incoming=kwargs )
@web.expose
- def tag_outputs( self, trans, id, check_user=True, **kwargs ):
+ def tag_outputs( self, trans, id, **kwargs ):
stored = self.get_stored_workflow( trans, id, check_ownership=False )
- if check_user:
- user = trans.get_user()
- if stored.user != user:
- if trans.sa_session.query( model.StoredWorkflowUserShareAssociation ) \
- .filter_by( user=user, stored_workflow=stored ).count() == 0:
- error( "Workflow is not owned by or shared with current user" )
+ user = trans.get_user()
+ if stored.user != user:
+ if trans.sa_session.query( model.StoredWorkflowUserShareAssociation ) \
+ .filter_by( user=user, stored_workflow=stored ).count() == 0:
+ error( "Workflow is not owned by or shared with current user" )
# Get the latest revision
workflow = stored.latest_workflow
# It is possible for a workflow to have 0 steps
--- a/templates/root/tool_menu.mako
+++ b/templates/root/tool_menu.mako
@@ -32,7 +32,7 @@
<div class="toolTitleNoSection">
%endif
<% encoded_id = key.lstrip( 'workflow_' ) %>
- <a id="link-${workflow.id}" href="${ h.url_for( controller='workflow', action='run', id=encoded_id, check_user=False )}" target="_parent">${_(workflow.name)}</a>
+ <a id="link-${workflow.id}" href="${ h.url_for( controller='workflow', action='run', id=encoded_id )}" target="_parent">${_(workflow.name)}</a></div></%def>
1
0
galaxy-dist commit 74620fac33c7: bug fix caused by the prev commit.
by commits-noreply@bitbucket.org 20 Nov '10
by commits-noreply@bitbucket.org 20 Nov '10
20 Nov '10
# HG changeset patch -- Bitbucket.org
# Project galaxy-dist
# URL http://bitbucket.org/galaxy/galaxy-dist/overview
# User rc
# Date 1289178573 18000
# Node ID 74620fac33c74b60c0a2faa69364dea0115e8815
# Parent 328b57b1e2e53cbc9ab208379cdd3dc0864f3cc4
bug fix caused by the prev commit.
--- a/lib/galaxy/web/controllers/requests_admin.py
+++ b/lib/galaxy/web/controllers/requests_admin.py
@@ -664,6 +664,8 @@ class RequestsAdmin( BaseController, Use
if not err_msg:
message = "%i datasets have been queued for transfer from the sequencer. Click the Refresh button above to monitor the transfer status." % len( selected_sample_datasets )
status = "done"
+ else:
+ status = 'error'
return trans.response.send_redirect( web.url_for( controller='requests_admin',
action='manage_datasets',
sample_id=trans.security.encode_id( sample.id ),
1
0
galaxy-dist commit 328b57b1e2e5: sample tracking data transfer mechanism streamlined.
by commits-noreply@bitbucket.org 20 Nov '10
by commits-noreply@bitbucket.org 20 Nov '10
20 Nov '10
# HG changeset patch -- Bitbucket.org
# Project galaxy-dist
# URL http://bitbucket.org/galaxy/galaxy-dist/overview
# User rc
# Date 1289177219 18000
# Node ID 328b57b1e2e53cbc9ab208379cdd3dc0864f3cc4
# Parent 4b78bad81e777cf503ac66acb5dbb01da841400f
sample tracking data transfer mechanism streamlined.
- eliminated the transfer_datasets.ini config file
- eliminated the data transfer user
- now the admin user initiating the data transfer is used as the data transfer user.
- the admin user is provided the add_library_item permission before data transfer if they dont have it
- sample update & data transfer amqp messages now includes api_key
--- a/scripts/galaxy_messaging/client/galaxy_amq.ini.sample
+++ b/scripts/galaxy_messaging/client/galaxy_amq.ini.sample
@@ -14,6 +14,7 @@
#queue = galaxy_queue
#exchange = galaxy_exchange
#routing_key = bar_code_scanner
+#api_key =
# The following section(s) 'scanner#' is for specifying the state of the
# sample this scanner represents. This state name should be one of the
--- a/scripts/galaxy_messaging/server/amqp_consumer.py
+++ b/scripts/galaxy_messaging/server/amqp_consumer.py
@@ -15,6 +15,9 @@ import optparse
import xml.dom.minidom
import subprocess
import urllib2
+
+from xml_helper import get_value, get_value_index
+
from galaxydb_interface import GalaxyDbInterface
api_path = [ os.path.join( os.getcwd(), "scripts/api" ) ]
@@ -44,77 +47,58 @@ log.addHandler(fh)
# data transfer script
data_transfer_script = os.path.join( os.getcwd(),
"scripts/galaxy_messaging/server/data_transfer.py" )
+global config
+global config_file_name
+global http_server_section
-global dbconnstr
-global webconfig
-global config
-def get_value(dom, tag_name):
- '''
- This method extracts the tag value from the xml message
- '''
- nodelist = dom.getElementsByTagName(tag_name)[0].childNodes
- rc = ""
- for node in nodelist:
- if node.nodeType == node.TEXT_NODE:
- rc = rc + node.data
- return rc
-
-def get_value_index(dom, tag_name, index):
- '''
- This method extracts the tag value from the xml message
- '''
- try:
- nodelist = dom.getElementsByTagName(tag_name)[index].childNodes
- except:
- return None
- rc = ""
- for node in nodelist:
- if node.nodeType == node.TEXT_NODE:
- rc = rc + node.data
- return rc
-
-def start_data_transfer(message):
+def start_data_transfer( message ):
# fork a new process to transfer datasets
cmd = '%s "%s" "%s" "%s"' % ( "python",
data_transfer_script,
message.body,
- sys.argv[1] ) # Galaxy config file name
+ config_file_name ) # Galaxy config file name
pid = subprocess.Popen(cmd, shell=True).pid
log.debug('Started process (%i): %s' % (pid, str(cmd)))
-def update_sample_state(message):
+def update_sample_state( message ):
dom = xml.dom.minidom.parseString(message.body)
barcode = get_value(dom, 'barcode')
state = get_value(dom, 'state')
+ api_key = get_value(dom, 'api_key')
log.debug('Barcode: ' + barcode)
log.debug('State: ' + state)
+ log.debug('api_key: ' + api_key)
+ # validate
+ if not barcode or not state or not api_key:
+ log.debug( 'Incomplete sample_state_update message received. Sample barcode, desired state and user API key is required.' )
+ return
# update the sample state in Galaxy db
- galaxydb = GalaxyDbInterface(dbconnstr)
- sample_id = galaxydb.get_sample_id(field_name='bar_code', value=barcode)
+ dbconnstr = config.get("app:main", "database_connection")
+ galaxydb = GalaxyDbInterface( dbconnstr )
+ sample_id = galaxydb.get_sample_id( field_name='bar_code', value=barcode )
if sample_id == -1:
- log.debug('Invalid barcode.')
+ log.debug( 'Invalid barcode.' )
return
galaxydb.change_state(sample_id, state)
# after updating the sample state, update request status
request_id = galaxydb.get_request_id(sample_id)
- update_request( request_id )
+ update_request( api_key, request_id )
-def update_request( request_id ):
- http_server_section = webconfig.get( "universe_wsgi_config", "http_server_section" )
+def update_request( api_key, request_id ):
encoded_request_id = api.encode_id( config.get( "app:main", "id_secret" ), request_id )
- api_key = webconfig.get( "data_transfer_user_login_info", "api_key" )
data = dict( update_type=RequestsController.update_types.REQUEST )
url = "http://%s:%s/api/requests/%s" % ( config.get(http_server_section, "host"),
config.get(http_server_section, "port"),
encoded_request_id )
log.debug( 'Updating request %i' % request_id )
try:
- api.update( api_key, url, data, return_formatted=False )
+ retval = api.update( api_key, url, data, return_formatted=False )
+ log.debug( str( retval ) )
except urllib2.URLError, e:
log.debug( 'ERROR(update_request (%s)): %s' % ( str((self.api_key, url, data)), str(e) ) )
-def recv_callback(message):
+def recv_callback( message ):
# check the meesage type.
msg_type = message.properties['application_headers'].get('msg_type')
log.debug( 'MESSAGE RECVD: ' + str( msg_type ) )
@@ -126,22 +110,25 @@ def recv_callback(message):
update_sample_state( message )
def main():
- if len(sys.argv) < 2:
- print 'Usage: python amqp_consumer.py <Galaxy configuration file>'
- return
+ parser = optparse.OptionParser()
+ parser.add_option('-c', '--config-file', help='Galaxy configuration file',
+ dest='config_file', action='store')
+ parser.add_option('-s', '--http-server-section', help='Name of the HTTP server section in the Galaxy configuration file',
+ dest='http_server_section', action='store')
+ (opts, args) = parser.parse_args()
+ log.debug( "GALAXY LISTENER PID: " + str(os.getpid()) + " - " + str( opts ) )
+ # read the Galaxy config file
+ global config_file_name
+ config_file_name = opts.config_file
global config
config = ConfigParser.ConfigParser()
- config.read( sys.argv[1] )
- global dbconnstr
- dbconnstr = config.get("app:main", "database_connection")
+ config.read( opts.config_file )
+ global http_server_section
+ http_server_section = opts.http_server_section
amqp_config = {}
for option in config.options("galaxy_amqp"):
amqp_config[option] = config.get("galaxy_amqp", option)
- log.debug("PID: " + str(os.getpid()) + ", " + str(amqp_config))
- # web server config
- global webconfig
- webconfig = ConfigParser.ConfigParser()
- webconfig.read('transfer_datasets.ini')
+ log.debug( str( amqp_config ) )
# connect
conn = amqp.Connection(host=amqp_config['host']+":"+amqp_config['port'],
userid=amqp_config['userid'],
--- /dev/null
+++ b/scripts/galaxy_messaging/server/xml_helper.py
@@ -0,0 +1,28 @@
+#======= XML helper methods ====================================================
+
+import xml.dom.minidom
+
+def get_value( dom, tag_name ):
+ '''
+ This method extracts the tag value from the xml message
+ '''
+ nodelist = dom.getElementsByTagName( tag_name )[ 0 ].childNodes
+ rc = ""
+ for node in nodelist:
+ if node.nodeType == node.TEXT_NODE:
+ rc = rc + node.data
+ return rc
+
+def get_value_index( dom, tag_name, dataset_id ):
+ '''
+ This method extracts the tag value from the xml message
+ '''
+ try:
+ nodelist = dom.getElementsByTagName( tag_name )[ dataset_id ].childNodes
+ except:
+ return None
+ rc = ""
+ for node in nodelist:
+ if node.nodeType == node.TEXT_NODE:
+ rc = rc + node.data
+ return rc
--- a/lib/galaxy/web/controllers/requests_common.py
+++ b/lib/galaxy/web/controllers/requests_common.py
@@ -655,8 +655,8 @@ class RequestsCommon( BaseController, Us
event = trans.model.RequestEvent( request, request.states.SUBMITTED, message )
trans.sa_session.add( event )
trans.sa_session.flush()
- if cntrller == 'api':
- return 200, message
+ if cntrller == 'api':
+ return 200, message
return trans.response.send_redirect( web.url_for( controller='requests_common',
action='edit_samples',
cntrller=cntrller,
--- a/scripts/galaxy_messaging/server/data_transfer.py
+++ b/scripts/galaxy_messaging/server/data_transfer.py
@@ -19,6 +19,8 @@ import urllib,urllib2, cookielib, shutil
import logging, time, datetime
import xml.dom.minidom
+from xml_helper import get_value, get_value_index
+
log = logging.getLogger( "datatx_" + str( os.getpid() ) )
log.setLevel( logging.DEBUG )
fh = logging.FileHandler( "data_transfer.log" )
@@ -36,35 +38,38 @@ new_path = [ os.path.join( os.getcwd(),
new_path.extend( sys.path[1:] ) # remove scripts/ from the path
sys.path = new_path
+from galaxy import eggs
from galaxy.util.json import from_json_string, to_json_string
from galaxy.model import SampleDataset
from galaxy.web.api.requests import RequestsController
-from galaxy import eggs
import pkg_resources
pkg_resources.require( "pexpect" )
import pexpect
-
pkg_resources.require( "simplejson" )
import simplejson
+log.debug(str(dir(api)))
+
class DataTransfer( object ):
def __init__( self, msg, config_file ):
log.info( msg )
self.dom = xml.dom.minidom.parseString( msg )
- self.sequencer_host = self.get_value( self.dom, 'data_host' )
- self.sequencer_username = self.get_value( self.dom, 'data_user' )
- self.sequencer_password = self.get_value( self.dom, 'data_password' )
- self.request_id = self.get_value( self.dom, 'request_id' )
- self.sample_id = self.get_value( self.dom, 'sample_id' )
- self.library_id = self.get_value( self.dom, 'library_id' )
- self.folder_id = self.get_value( self.dom, 'folder_id' )
+ self.galaxy_host = get_value( self.dom, 'galaxy_host' )
+ self.api_key = get_value( self.dom, 'api_key' )
+ self.sequencer_host = get_value( self.dom, 'data_host' )
+ self.sequencer_username = get_value( self.dom, 'data_user' )
+ self.sequencer_password = get_value( self.dom, 'data_password' )
+ self.request_id = get_value( self.dom, 'request_id' )
+ self.sample_id = get_value( self.dom, 'sample_id' )
+ self.library_id = get_value( self.dom, 'library_id' )
+ self.folder_id = get_value( self.dom, 'folder_id' )
self.dataset_files = []
count=0
while True:
- dataset_id = self.get_value_index( self.dom, 'dataset_id', count )
- file = self.get_value_index( self.dom, 'file', count )
- name = self.get_value_index( self.dom, 'name', count )
+ dataset_id = get_value_index( self.dom, 'dataset_id', count )
+ file = get_value_index( self.dom, 'file', count )
+ name = get_value_index( self.dom, 'name', count )
if file:
self.dataset_files.append( dict( name=name,
dataset_id=int( dataset_id ),
@@ -72,18 +77,6 @@ class DataTransfer( object ):
else:
break
count=count+1
- try:
- # Retrieve the upload user login information from the config file
- transfer_datasets_config = ConfigParser.ConfigParser( )
- transfer_datasets_config.read( 'transfer_datasets.ini' )
- self.data_transfer_user_email = transfer_datasets_config.get( "data_transfer_user_login_info", "email" )
- self.data_transfer_user_password = transfer_datasets_config.get( "data_transfer_user_login_info", "password" )
- self.api_key = transfer_datasets_config.get( "data_transfer_user_login_info", "api_key" )
- self.http_server_section = transfer_datasets_config.get( "universe_wsgi_config", "http_server_section" )
- except:
- log.error( traceback.format_exc() )
- log.error( 'ERROR reading config values from transfer_datasets.ini.' )
- sys.exit(1)
# read config variables
config = ConfigParser.ConfigParser()
retval = config.read( config_file )
@@ -92,14 +85,6 @@ class DataTransfer( object ):
log.error( error_msg )
sys.exit(1)
try:
- self.server_host = config.get( self.http_server_section, "host" )
- except ConfigParser.NoOptionError,e:
- self.server_host = '127.0.0.1'
- try:
- self.server_port = config.get( self.http_server_section, "port" )
- except ConfigParser.NoOptionError,e:
- self.server_port = '8080'
- try:
self.config_id_secret = config.get( "app:main", "id_secret" )
except ConfigParser.NoOptionError,e:
self.config_id_secret = "USING THE DEFAULT IS NOT SECURE!"
@@ -198,9 +183,8 @@ class DataTransfer( object ):
data[ 'dbkey' ] = ''
data[ 'upload_option' ] = 'upload_directory'
data[ 'create_type' ] = 'file'
- url = "http://%s:%s/api/libraries/%s/contents" % ( self.server_host,
- self.server_port,
- api.encode_id( self.config_id_secret, self.library_id ) )
+ url = "http://%s/api/libraries/%s/contents" % ( self.galaxy_host,
+ api.encode_id( self.config_id_secret, self.library_id ) )
log.debug( str( ( self.api_key, url, data ) ) )
retval = api.submit( self.api_key, url, data, return_formatted=False )
log.debug( str( retval ) )
@@ -225,9 +209,8 @@ class DataTransfer( object ):
data[ 'sample_dataset_ids' ] = sample_dataset_ids
data[ 'new_status' ] = status
data[ 'error_msg' ] = msg
- url = "http://%s:%s/api/requests/%s" % ( self.server_host,
- self.server_port,
- api.encode_id( self.config_id_secret, self.request_id ) )
+ url = "http://%s/api/requests/%s" % ( self.galaxy_host,
+ api.encode_id( self.config_id_secret, self.request_id ) )
log.debug( str( ( self.api_key, url, data)))
retval = api.update( self.api_key, url, data, return_formatted=False )
log.debug( str( retval ) )
@@ -239,31 +222,6 @@ class DataTransfer( object ):
log.error( 'FATAL ERROR' )
sys.exit( 1 )
- def get_value( self, dom, tag_name ):
- '''
- This method extracts the tag value from the xml message
- '''
- nodelist = dom.getElementsByTagName( tag_name )[ 0 ].childNodes
- rc = ""
- for node in nodelist:
- if node.nodeType == node.TEXT_NODE:
- rc = rc + node.data
- return rc
-
- def get_value_index( self, dom, tag_name, dataset_id ):
- '''
- This method extracts the tag value from the xml message
- '''
- try:
- nodelist = dom.getElementsByTagName( tag_name )[ dataset_id ].childNodes
- except:
- return None
- rc = ""
- for node in nodelist:
- if node.nodeType == node.TEXT_NODE:
- rc = rc + node.data
- return rc
-
if __name__ == '__main__':
log.info( 'STARTING %i %s' % ( os.getpid(), str( sys.argv ) ) )
#
--- a/lib/galaxy/web/controllers/requests_admin.py
+++ b/lib/galaxy/web/controllers/requests_admin.py
@@ -109,7 +109,7 @@ class DataTransferGrid( grids.Grid ):
default_sort_key = "-create_time"
num_rows_per_page = 50
preserve_state = True
- use_paging = True
+ use_paging = False
columns = [
NameColumn( "Name",
link=( lambda item: dict( operation="view", id=item.id ) ),
@@ -299,7 +299,7 @@ class RequestsAdmin( BaseController, Use
sample=selected_sample_datasets[0].sample,
id_list=id_list )
elif operation == "transfer":
- self.__start_datatx( trans, selected_sample_datasets[0].sample, selected_sample_datasets )
+ self.initiate_data_transfer( trans, selected_sample_datasets[0].sample, selected_sample_datasets )
# Render the grid view
sample_id = params.get( 'sample_id', None )
try:
@@ -538,68 +538,36 @@ class RequestsAdmin( BaseController, Use
return sample.request.name + '_' + sample.name + '_' + name
if opt == options.EXPERIMENT_NAME:
return sample.request.name + '_' + name
- def __setup_datatx_user( self, trans, sample ):
+ def __check_library_add_permission( self, trans, target_library, target_folder ):
"""
- Sets up the datatx user:
- - Checks if the user exists, if not creates them.
- - Checks if the user had ADD_LIBRARY permission on the target library
- and the target folder, if not sets up the permissions.
+ Checks if the current admin user had ADD_LIBRARY permission on the target library
+ and the target folder, if not provide the permissions.
"""
- # Retrieve the upload user login information from the config file
- config = ConfigParser.ConfigParser()
- ok = True
- try:
- config.read( 'transfer_datasets.ini' )
- except Exception, e:
- message = "Error attempting to read config file named 'transfer_datasets.ini'. Make sure this file is correct."
- ok = False
- try:
- email = config.get( "data_transfer_user_login_info", "email" )
- password = config.get( "data_transfer_user_login_info", "password" )
- except Exception, e:
- message = "The 'data_transfer_user_login_info' section is missing from the 'transfer_datasets.ini'. Make sure this file is correct."
- ok = False
- if not ok:
- status = 'error'
- return trans.response.send_redirect( web.url_for( controller='requests_admin',
- action='manage_datasets',
- sample_id=trans.security.encode_id( sample.id ),
- status=status,
- message=message ) )
- # check if the user already exists
- datatx_user = trans.sa_session.query( trans.model.User ) \
- .filter( trans.model.User.table.c.email==email ) \
- .first()
- if not datatx_user:
- # if not create the user
- datatx_user = trans.model.User( email=email, password=passsword )
- if trans.app.config.use_remote_user:
- datatx_user.external = True
- trans.sa_session.add( datatx_user )
- trans.sa_session.flush()
- trans.app.security_agent.create_private_user_role( datatx_user )
- trans.app.security_agent.user_set_default_permissions( datatx_user, history=False, dataset=False )
- datatx_user_roles = datatx_user.all_roles()
- datatx_user_private_role = trans.app.security_agent.get_private_user_role( datatx_user )
+ current_user_roles = trans.user.all_roles()
+ current_user_private_role = trans.app.security_agent.get_private_user_role( trans.user )
# Make sure this user has LIBRARY_ADD permissions on the target library and folder.
# If not, give them permission.
- if not trans.app.security_agent.can_add_library_item( datatx_user_roles, sample.library ):
+ if not trans.app.security_agent.can_add_library_item( current_user_roles, target_library ):
lp = trans.model.LibraryPermissions( trans.app.security_agent.permitted_actions.LIBRARY_ADD.action,
- sample.library,
- datatx_user_private_role )
+ target_library,
+ current_user_private_role )
trans.sa_session.add( lp )
- if not trans.app.security_agent.can_add_library_item( datatx_user_roles, sample.folder ):
+ if not trans.app.security_agent.can_add_library_item( current_user_roles, target_folder ):
lfp = trans.model.LibraryFolderPermissions( trans.app.security_agent.permitted_actions.LIBRARY_ADD.action,
- sample.folder,
- datatx_user_private_role )
+ target_folder,
+ current_user_private_role )
trans.sa_session.add( lfp )
trans.sa_session.flush()
- return datatx_user
- def __send_message( self, trans, datatx_info, sample, selected_sample_datasets ):
- """Ceates an xml message and sends it to the rabbitmq server"""
+ def __create_data_transfer_message( self, trans, sample, selected_sample_datasets ):
+ """
+ Creates an xml message to send to the rabbitmq server
+ """
+ datatx_info = sample.request.type.datatx_info
# Create the xml message based on the following template
xml = \
''' <data_transfer>
+ <galaxy_host>%(GALAXY_HOST)s</galaxy_host>
+ <api_key>%(API_KEY)s</api_key><data_host>%(DATA_HOST)s</data_host><data_user>%(DATA_USER)s</data_user><data_password>%(DATA_PASSWORD)s</data_password>
@@ -624,50 +592,77 @@ class RequestsAdmin( BaseController, Use
sample_dataset.status = trans.app.model.SampleDataset.transfer_status.IN_QUEUE
trans.sa_session.add( sample_dataset )
trans.sa_session.flush()
- data = xml % dict( DATA_HOST=datatx_info['host'],
- DATA_USER=datatx_info['username'],
- DATA_PASSWORD=datatx_info['password'],
- REQUEST_ID=str(sample.request.id),
- SAMPLE_ID=str(sample.id),
- LIBRARY_ID=str(sample.library.id),
- FOLDER_ID=str(sample.folder.id),
- DATASETS=datasets )
- # Send the message
- try:
- conn = amqp.Connection( host=trans.app.config.amqp['host'] + ":" + trans.app.config.amqp['port'],
- userid=trans.app.config.amqp['userid'],
- password=trans.app.config.amqp['password'],
- virtual_host=trans.app.config.amqp['virtual_host'],
- insist=False )
- chan = conn.channel()
- msg = amqp.Message( data.replace( '\n', '' ).replace( '\r', '' ),
- content_type='text/plain',
- application_headers={'msg_type': 'data_transfer'} )
- msg.properties["delivery_mode"] = 2
- chan.basic_publish( msg,
- exchange=trans.app.config.amqp['exchange'],
- routing_key=trans.app.config.amqp['routing_key'] )
- chan.close()
- conn.close()
- except Exception, e:
- message = "Error in sending the data transfer message to the Galaxy AMQP message queue:<br/>%s" % str(e)
- status = "error"
- return trans.response.send_redirect( web.url_for( controller='requests_admin',
- action='manage_datasets',
- sample_id=trans.security.encode_id( sample.id ),
- status=status,
- message=message) )
-
- def __start_datatx( self, trans, sample, selected_sample_datasets ):
- datatx_user = self.__setup_datatx_user( trans, sample )
- # Validate sequencer information
+ message = xml % dict( GALAXY_HOST=trans.request.host,
+ API_KEY=trans.user.api_keys[0].key,
+ DATA_HOST=datatx_info[ 'host' ],
+ DATA_USER=datatx_info[ 'username' ],
+ DATA_PASSWORD=datatx_info[ 'password' ],
+ REQUEST_ID=str( sample.request.id ),
+ SAMPLE_ID=str( sample.id ),
+ LIBRARY_ID=str( sample.library.id ),
+ FOLDER_ID=str( sample.folder.id ),
+ DATASETS=datasets )
+ return message
+ def __validate_data_transfer_settings( self, trans, sample ):
+ err_msg = ''
+ # check the sequencer login info
datatx_info = sample.request.type.datatx_info
- if not datatx_info['host'] or not datatx_info['username'] or not datatx_info['password']:
- message = "Error in sequencer login information."
- status = "error"
- else:
- self.__send_message( trans, datatx_info, sample, selected_sample_datasets )
- message = "%i datasets have been queued for transfer from the sequencer. Click the Refresh button above to see the latest transfer status." % len( selected_sample_datasets )
+ if not datatx_info[ 'host' ] \
+ or not datatx_info[ 'username' ] \
+ or not datatx_info[ 'password' ]:
+ err_msg = "Error in sequencer login information."
+ # check if web API is enabled and API key exists
+ if not trans.user.api_keys or not trans.app.config.enable_api:
+ err_msg = "Could not start data transfer as Galaxy Web API is not enabled. Enable Galaxy Web API in the Galaxy config file and create an API key."
+ # check if library_import_dir is set
+ if not trans.app.config.library_import_dir:
+ err_msg = "'library_import_dir' config variable is not set in the Galaxy config file."
+ # check the RabbitMQ server settings in the config file
+ for k, v in trans.app.config.amqp.items():
+ if not v:
+ err_msg = 'Set RabbitMQ server settings in the "galaxy_amqp" section of the Galaxy config file. %s is not set.' % k
+ break
+ return err_msg
+ def initiate_data_transfer( self, trans, sample, selected_sample_datasets ):
+ '''
+ This method initiates the transfer of the datasets from the sequencer. It
+ happens in the following steps:
+ - The current admin user needs to have ADD_LIBRARY_ITEM permission for the
+ target library and folder
+ - Create an XML message encapsulating all the data transfer info and send it
+ to the message queue (RabbitMQ broker)
+ '''
+ # check data transfer settings
+ err_msg = self.__validate_data_transfer_settings( trans, sample )
+ if not err_msg:
+ # check if the current user has add_library_item permission to the sample
+ # target library & folder
+ self.__check_library_add_permission( trans, sample.library, sample.folder )
+ # create the message
+ message = self.__create_data_transfer_message( trans,
+ sample,
+ selected_sample_datasets )
+ # Send the message
+ try:
+ conn = amqp.Connection( host=trans.app.config.amqp[ 'host' ] + ":" + trans.app.config.amqp[ 'port' ],
+ userid=trans.app.config.amqp[ 'userid' ],
+ password=trans.app.config.amqp[ 'password' ],
+ virtual_host=trans.app.config.amqp[ 'virtual_host' ],
+ insist=False )
+ chan = conn.channel()
+ msg = amqp.Message( message.replace( '\n', '' ).replace( '\r', '' ),
+ content_type='text/plain',
+ application_headers={ 'msg_type': 'data_transfer' } )
+ msg.properties[ "delivery_mode" ] = 2
+ chan.basic_publish( msg,
+ exchange=trans.app.config.amqp[ 'exchange' ],
+ routing_key=trans.app.config.amqp[ 'routing_key' ] )
+ chan.close()
+ conn.close()
+ except Exception, e:
+ err_msg = "Error in sending the data transfer message to the Galaxy AMQP message queue:<br/>%s" % str(e)
+ if not err_msg:
+ message = "%i datasets have been queued for transfer from the sequencer. Click the Refresh button above to monitor the transfer status." % len( selected_sample_datasets )
status = "done"
return trans.response.send_redirect( web.url_for( controller='requests_admin',
action='manage_datasets',
--- a/transfer_datasets.ini.sample
+++ /dev/null
@@ -1,10 +0,0 @@
-# Galaxy LIMS Transfer Datasets Configuration File
-
-[data_transfer_user_login_info]
-#email = datatx(a)bx.psu.edu
-#password = datatx
-#api_key = Generate it from the User menu in Galaxy
-
-[universe_wsgi_config]
-# The http server section name in the Galaxy config file (universe_wsgi.ini)
-#http_server_section = server:main
--- a/lib/galaxy/web/api/requests.py
+++ b/lib/galaxy/web/api/requests.py
@@ -88,7 +88,7 @@ class RequestsController( BaseController
request_id = trans.security.decode_id( id )
except TypeError:
trans.response.status = 400
- return "Malformed %s id ( %s ) specified, unable to decode." % ( update_type, str( id ) )
+ return "Malformed request id ( %s ) specified, unable to decode." % str( id )
try:
request = trans.sa_session.query( trans.app.model.Request ).get( request_id )
except:
--- a/run_galaxy_listener.sh
+++ b/run_galaxy_listener.sh
@@ -1,4 +1,4 @@
#!/bin/sh
cd `dirname $0`
-python scripts/galaxy_messaging/server/amqp_consumer.py universe_wsgi.ini 2>&1
+python scripts/galaxy_messaging/server/amqp_consumer.py --config-file=universe_wsgi.ini --http-server-section=server:main 2>&1
--- a/scripts/galaxy_messaging/client/amqp_publisher.py
+++ b/scripts/galaxy_messaging/client/amqp_publisher.py
@@ -19,13 +19,15 @@ xml = \
''' <sample><barcode>%(BARCODE)s</barcode><state>%(STATE)s</state>
+ <api_key>%(API_KEY)s</api_key></sample>'''
def handle_scan(states, amqp_config, barcode):
if states.get(barcode[:2], None):
values = dict( BARCODE=barcode[2:],
- STATE=states.get(barcode[:2]) )
+ STATE=states.get(barcode[:2]),
+ API_KEY=amqp_config['api_key'] )
print values
data = xml % values
print data
@@ -68,6 +70,10 @@ def main():
states = {}
for option in config.options("galaxy:amqp"):
amqp_config[option] = config.get("galaxy:amqp", option)
+ # abort if api_key is not set in the config file
+ if not amqp_config['api_key']:
+ print 'Error: Set the api_key config variable in the config file before starting the amqp_publisher script.'
+ sys.exit( 1 )
count = 1
while True:
section = 'scanner%i' % count
1
0
galaxy-dist commit 4187ae2fdb9e: Improved error handling in api methods.
by commits-noreply@bitbucket.org 20 Nov '10
by commits-noreply@bitbucket.org 20 Nov '10
20 Nov '10
# HG changeset patch -- Bitbucket.org
# Project galaxy-dist
# URL http://bitbucket.org/galaxy/galaxy-dist/overview
# User rc
# Date 1289186302 18000
# Node ID 4187ae2fdb9e3f8b7ff5878e7517a24868b8ffca
# Parent dcc68da403f0e8d3ae0da07529ff96327ca05489
Improved error handling in api methods.
--- a/scripts/api/common.py
+++ b/scripts/api/common.py
@@ -48,6 +48,13 @@ def put( api_key, url, data ):
req.get_method = lambda: 'PUT'
return simplejson.loads( urllib2.urlopen( req ).read() )
+def __del( api_key, url, data ):
+ # Do the actual DELETE
+ url = make_url( api_key, url )
+ req = urllib2.Request( url, headers = { 'Content-Type': 'application/json' }, data = simplejson.dumps( data ))
+ req.get_method = lambda: 'DELETE'
+ return simplejson.loads( urllib2.urlopen( req ).read() )
+
def display( api_key, url, return_formatted=True ):
# Sends an API GET request and acts as a generic formatter for the JSON response.
@@ -89,12 +96,12 @@ def submit( api_key, url, data, return_f
try:
r = post( api_key, url, data )
except urllib2.HTTPError, e:
- print e
- print e.read( 1024 )
if return_formatted:
+ print e
+ print e.read( 1024 )
sys.exit( 1 )
else:
- return 'Error. '+ str( e )
+ return 'Error. '+ str( e.read( 1024 ) )
if not return_formatted:
return r
print 'Response'
@@ -123,30 +130,35 @@ def update( api_key, url, data, return_f
try:
r = put( api_key, url, data )
except urllib2.HTTPError, e:
- print e
- print e.read( 1024 )
- sys.exit( 1 )
+ if return_formatted:
+ print e
+ print e.read( 1024 )
+ sys.exit( 1 )
+ else:
+ return 'Error. '+ str( e.read( 1024 ) )
if not return_formatted:
return r
print 'Response'
print '--------'
- if type( r ) == list:
- # Currently the only implemented responses are lists of dicts, because
- # submission creates some number of collection elements.
- for i in r:
- if type( i ) == dict:
- if 'url' in i:
- print i.pop( 'url' )
- else:
- print '----'
- if 'name' in i:
- print ' name: %s' % i.pop( 'name' )
- for k, v in i.items():
- print ' %s: %s' % ( k, v )
- else:
- print i
- else:
- print r
+ print r
+
+def delete( api_key, url, data, return_formatted=True ):
+ # Sends an API DELETE request and acts as a generic formatter for the JSON response.
+ # 'data' will become the JSON payload read by Galaxy.
+ try:
+ r = __del( api_key, url, data )
+ except urllib2.HTTPError, e:
+ if return_formatted:
+ print e
+ print e.read( 1024 )
+ sys.exit( 1 )
+ else:
+ return 'Error. '+ str( e.read( 1024 ) )
+ if not return_formatted:
+ return r
+ print 'Response'
+ print '--------'
+ print r
# utility method to encode ID's
def encode_id( config_id_secret, obj_id ):
--- a/scripts/galaxy_messaging/server/amqp_consumer.py
+++ b/scripts/galaxy_messaging/server/amqp_consumer.py
@@ -95,7 +95,7 @@ def update_request( api_key, request_id
try:
retval = api.update( api_key, url, data, return_formatted=False )
log.debug( str( retval ) )
- except urllib2.URLError, e:
+ except Exception, e:
log.debug( 'ERROR(update_request (%s)): %s' % ( str((self.api_key, url, data)), str(e) ) )
def recv_callback( message ):
--- a/lib/galaxy/web/controllers/requests_admin.py
+++ b/lib/galaxy/web/controllers/requests_admin.py
@@ -662,7 +662,7 @@ class RequestsAdmin( BaseController, Use
except Exception, e:
err_msg = "Error in sending the data transfer message to the Galaxy AMQP message queue:<br/>%s" % str(e)
if not err_msg:
- message = "%i datasets have been queued for transfer from the sequencer. Click the Refresh button above to monitor the transfer status." % len( selected_sample_datasets )
+ err_msg = "%i datasets have been queued for transfer from the sequencer. Click the Refresh button above to monitor the transfer status." % len( selected_sample_datasets )
status = "done"
else:
status = 'error'
@@ -670,7 +670,7 @@ class RequestsAdmin( BaseController, Use
action='manage_datasets',
sample_id=trans.security.encode_id( sample.id ),
status=status,
- message=message ) )
+ message=err_msg ) )
@web.expose
def update_sample_dataset_status(self, trans, cntrller, sample_dataset_ids, new_status, error_msg=None ):
# check if the new status is a valid transfer status
1
0
galaxy-dist commit dcc68da403f0: Fix for failing rgManQQ functional test - changed R outputs. cut down size of default png - much faster...
by commits-noreply@bitbucket.org 20 Nov '10
by commits-noreply@bitbucket.org 20 Nov '10
20 Nov '10
# HG changeset patch -- Bitbucket.org
# Project galaxy-dist
# URL http://bitbucket.org/galaxy/galaxy-dist/overview
# User Ross Lazarus <ross.lazarus(a)gmail.com>
# Date 1289178885 18000
# Node ID dcc68da403f0e8d3ae0da07529ff96327ca05489
# Parent 74620fac33c74b60c0a2faa69364dea0115e8815
Fix for failing rgManQQ functional test - changed R outputs. cut down size of default png - much faster...
Binary file test-data/rgtestouts/rgManQQ/Allelep_manhattan.png has changed
--- a/tools/rgenetics/rgManQQ.py
+++ b/tools/rgenetics/rgManQQ.py
@@ -6,6 +6,7 @@ from rgutils import timenow, RRun, galht
progname = os.path.split(sys.argv[0])[1]
myversion = 'V000.1 March 2010'
verbose = False
+debug = False
rcode="""
# license not stated so I'm assuming LGPL is ok for my derived work?
@@ -156,7 +157,7 @@ if (plen > 0) {
mytitle = paste('p=',cname,', ',title,sep='')
myfname = chartr(' ','_',cname)
myqqplot = qq(rawd[,pvalscolumn],title=mytitle)
- ggsave(filename=paste(myfname,"qqplot.png",sep='_'),myqqplot,width=11,height=8,dpi=100)
+ ggsave(filename=paste(myfname,"qqplot.png",sep='_'),myqqplot,width=6,height=4,dpi=100)
print(paste('## qqplot on',cname,'done'))
if ((chromcolumn > 0) & (offsetcolumn > 0)) {
if (doreorder) {
@@ -168,7 +169,7 @@ if (plen > 0) {
print(paste('## manhattan on',cname,'starting',chromcolumn,offsetcolumn,pvalscolumn))
mymanplot= manhattan(chrom=rawd[,chromcolumn],offset=rawd[,offsetcolumn],pvals=rawd[,pvalscolumn],title=mytitle,grey=grey)
print(paste('## manhattan plot on',cname,'done'))
- ggsave(filename=paste(myfname,"manhattan.png",sep='_'),mymanplot,width=11,height=8,dpi=100)
+ ggsave(filename=paste(myfname,"manhattan.png",sep='_'),mymanplot,width=6,height=4,dpi=100)
}
else {
print(paste('chrom column =',chromcolumn,'offset column = ',offsetcolumn,
@@ -196,6 +197,8 @@ def doManQQ(input_fname,chrom_col,offset
to make them do our bidding - and save the resulting code for posterity
this can be called externally, I guess...for QC eg?
"""
+ if debug:
+ print 'doManQQ',input_fname,chrom_col,offset_col,pval_cols,title,grey,ctitle,outdir
ffd,filtered_fname = tempfile.mkstemp(prefix='rgManQQtemp')
f = open(filtered_fname,'w')
inf = open(input_fname,'r')
@@ -232,6 +235,8 @@ def doManQQ(input_fname,chrom_col,offset
pvc = [x+3 for x in range(len(pval_cols))] # 2 for offset and chrom, 1 for r offset start
pvc = 'c(%s)' % (','.join(map(str,pvc)))
rcmd = '%s%s' % (rcode,rcode2 % (filtered_fname,'1','2',pvc,title,grey))
+ if debug:
+ print 'running\n%s\n' % rcmd
rlog,flist = RRun(rcmd=rcmd,title=ctitle,outdir=outdir)
rlog.append('## R script=')
rlog.append(rcmd)
--- a/test-data/rgtestouts/rgManQQ/rgManQQtest1.R
+++ b/test-data/rgtestouts/rgManQQ/rgManQQtest1.R
@@ -124,7 +124,7 @@ qq = function(pvector, title=NULL, spart
if (spartan) plot=plot+opts(panel.background=theme_rect(col="grey50"), panel.grid.minor=theme_blank())
qq
}
-rgqqMan = function(infile="/tmp/rgManQQtempcWfFkc",chromcolumn=1, offsetcolumn=2, pvalscolumns=c(3),
+rgqqMan = function(infile="/tmp/rgManQQtemplYC5wa",chromcolumn=1, offsetcolumn=2, pvalscolumns=c(3),
title="rgManQQtest1",grey=0) {
rawd = read.table(infile,head=T,sep='\t')
dn = names(rawd)
@@ -142,7 +142,7 @@ if (plen > 0) {
mytitle = paste('p=',cname,', ',title,sep='')
myfname = chartr(' ','_',cname)
myqqplot = qq(rawd[,pvalscolumn],title=mytitle)
- ggsave(filename=paste(myfname,"qqplot.png",sep='_'),myqqplot,width=11,height=8,dpi=100)
+ ggsave(filename=paste(myfname,"qqplot.png",sep='_'),myqqplot,width=6,height=4,dpi=100)
print(paste('## qqplot on',cname,'done'))
if ((chromcolumn > 0) & (offsetcolumn > 0)) {
if (doreorder) {
@@ -154,7 +154,7 @@ if (plen > 0) {
print(paste('## manhattan on',cname,'starting',chromcolumn,offsetcolumn,pvalscolumn))
mymanplot= manhattan(chrom=rawd[,chromcolumn],offset=rawd[,offsetcolumn],pvals=rawd[,pvalscolumn],title=mytitle,grey=grey)
print(paste('## manhattan plot on',cname,'done'))
- ggsave(filename=paste(myfname,"manhattan.png",sep='_'),mymanplot,width=11,height=8,dpi=100)
+ ggsave(filename=paste(myfname,"manhattan.png",sep='_'),mymanplot,width=6,height=4,dpi=100)
}
else {
print(paste('chrom column =',chromcolumn,'offset column = ',offsetcolumn,
@@ -171,4 +171,4 @@ if (plen > 0) {
rgqqMan()
# execute with defaults as substituted
-#R script autogenerated by rgenetics/rgutils.py on 18/09/2010 20:46:49
+#R script autogenerated by rgenetics/rgutils.py on 07/11/2010 20:03:37
--- a/test-data/rgtestouts/rgManQQ/rgManQQtest1.html
+++ b/test-data/rgtestouts/rgManQQ/rgManQQtest1.html
@@ -43,7 +43,7 @@ Loading required package: grid
Loading required package: proto
-[1] "### 101 values read from /tmp/rgManQQtempcWfFkc read - now running plots"
+[1] "### 101 values read from /tmp/rgManQQtemplYC5wa read - now running plots"
[1] "## qqplot on Allelep done"
@@ -178,7 +178,7 @@ qq = function(pvector, title=NULL, spart
if (spartan) plot=plot+opts(panel.background=theme_rect(col="grey50"), panel.grid.minor=theme_blank())
qq
}
-rgqqMan = function(infile="/tmp/rgManQQtempcWfFkc",chromcolumn=1, offsetcolumn=2, pvalscolumns=c(3),
+rgqqMan = function(infile="/tmp/rgManQQtemplYC5wa",chromcolumn=1, offsetcolumn=2, pvalscolumns=c(3),
title="rgManQQtest1",grey=0) {
rawd = read.table(infile,head=T,sep='\t')
dn = names(rawd)
@@ -196,7 +196,7 @@ if (plen > 0) {
mytitle = paste('p=',cname,', ',title,sep='')
myfname = chartr(' ','_',cname)
myqqplot = qq(rawd[,pvalscolumn],title=mytitle)
- ggsave(filename=paste(myfname,"qqplot.png",sep='_'),myqqplot,width=11,height=8,dpi=100)
+ ggsave(filename=paste(myfname,"qqplot.png",sep='_'),myqqplot,width=6,height=4,dpi=100)
print(paste('## qqplot on',cname,'done'))
if ((chromcolumn > 0) & (offsetcolumn > 0)) {
if (doreorder) {
@@ -208,7 +208,7 @@ if (plen > 0) {
print(paste('## manhattan on',cname,'starting',chromcolumn,offsetcolumn,pvalscolumn))
mymanplot= manhattan(chrom=rawd[,chromcolumn],offset=rawd[,offsetcolumn],pvals=rawd[,pvalscolumn],title=mytitle,grey=grey)
print(paste('## manhattan plot on',cname,'done'))
- ggsave(filename=paste(myfname,"manhattan.png",sep='_'),mymanplot,width=11,height=8,dpi=100)
+ ggsave(filename=paste(myfname,"manhattan.png",sep='_'),mymanplot,width=6,height=4,dpi=100)
}
else {
print(paste('chrom column =',chromcolumn,'offset column = ',offsetcolumn,
@@ -227,6 +227,6 @@ rgqqMan()
</pre>
-<h3><a href="http://rgenetics.org">Rgenetics</a> tool rgManQQ.py run at 18/09/2010 20:48:26</h3>
+<h3><a href="http://rgenetics.org">Rgenetics</a> tool rgManQQ.py run at 07/11/2010 20:04:20</h3></div></body></html>
--- a/tools/rgenetics/rgManQQ.xml
+++ b/tools/rgenetics/rgManQQ.xml
@@ -42,7 +42,7 @@
<param name='chrom_col' value='1' /><param name='offset_col' value='2' /><param name='grey' value='0' />
- <output name='out_html' file='rgtestouts/rgManQQ/rgManQQtest1.html' ftype='html' lines_diff='40'>
+ <output name='out_html' file='rgtestouts/rgManQQ/rgManQQtest1.html' ftype='html' lines_diff='60'><extra_files type="file" name='Allelep_manhattan.png' value='rgtestouts/rgManQQ/Allelep_manhattan.png' compare="sim_size"
delta = "10000"/><extra_files type="file" name='Allelep_qqplot.png' value='rgtestouts/rgManQQ/Allelep_qqplot.png' compare="sim_size"
Binary file test-data/rgtestouts/rgManQQ/Allelep_qqplot.png has changed
1
0
galaxy-dist commit fb55bfc792b1: Trackster UI tweaks, mainly moving the navigation to the top, still needs to be compressed and cleaned up. Also, getting borders and spacing a little more even. Some intial keyboard navigation
by commits-noreply@bitbucket.org 20 Nov '10
by commits-noreply@bitbucket.org 20 Nov '10
20 Nov '10
# HG changeset patch -- Bitbucket.org
# Project galaxy-dist
# URL http://bitbucket.org/galaxy/galaxy-dist/overview
# User James Taylor <james(a)jamestaylor.org>
# Date 1289163413 18000
# Node ID fb55bfc792b1d087783055a736d9fb439e5bb41c
# Parent d46ccc650084224553b3b48c11df64d43d7673ef
Trackster UI tweaks, mainly moving the navigation to the top, still needs to be compressed and cleaned up. Also, getting borders and spacing a little more even. Some intial keyboard navigation
--- a/templates/tracks/browser.mako
+++ b/templates/tracks/browser.mako
@@ -15,8 +15,8 @@
${h.css( "history", "autocomplete_tagging", "trackster", "overcast/jquery-ui-1.8.5.custom" )}
<style type="text/css">
- #center {
- overflow: auto;
+ #center, #browser-container {
+ overflow: none;
}
ul#sortable-ul {
list-style: none;
@@ -29,10 +29,7 @@
background: #eee;
}
.nav-container {
- position: fixed;
width: 100%;
- left: 0;
- bottom: 0;
}
# Styles for filters.
.filter-name {
@@ -57,6 +54,7 @@
<a id="add-track" class="panel-header-button right-float" href="javascript:void(0);">Add Tracks</a></div></div>
+<div id="browser-container" class="unified-panel-body"></div></%def>
@@ -79,7 +77,7 @@
$(function() {
%if config:
- view = new View( $("#center"), "${config.get('title') | h}", "${config.get('vis_id')}", "${config.get('dbkey')}" );
+ view = new View( $("#browser-container"), "${config.get('title') | h}", "${config.get('vis_id')}", "${config.get('dbkey')}" );
view.editor = true;
%for track in config.get('tracks'):
view.add_track(
@@ -215,6 +213,16 @@
error: function() { alert("Could not save visualization"); }
});
});
+
+ $(document).keydown( function( e ) {
+ // 37 == left
+ if ( e.which == 39 ) {
+ view.move_fraction( -0.25 );
+ // 39 == right
+ } else if ( e.which == 37 ) {
+ view.move_fraction( 0.25 );
+ }
+ });
};
});
--- a/static/scripts/trackster.js
+++ b/static/scripts/trackster.js
@@ -103,15 +103,25 @@ var View = function( container, title, v
// Create DOM elements
var parent_element = this.container,
view = this;
- this.top_labeltrack = $("<div/>").addClass("top-labeltrack").appendTo(parent_element);
+ // Top container for things that are fixed at the top
+ this.top_container = $("<div/>").addClass("top-container").appendTo(parent_element);
+ // Content container, primary tracks are contained in here
this.content_div = $("<div/>").addClass("content").css("position", "relative").appendTo(parent_element);
+ // Bottom container for things that are fixed at the bottom
+ this.bottom_container = $("<div/>").addClass("bottom-container").appendTo(parent_element);
+ // Label track fixed at top
+ this.top_labeltrack = $("<div/>").addClass("top-labeltrack").appendTo(this.top_container);
+ // Viewport for dragging tracks in center
this.viewport_container = $("<div/>").addClass("viewport-container").addClass("viewport-container").appendTo(this.content_div);
- this.intro_div = $("<div/>").addClass("intro").text("Select a chrom from the dropdown below").hide(); // Future overlay
-
- this.nav_container = $("<div/>").addClass("nav-container").appendTo(parent_element);
- this.nav_labeltrack = $("<div/>").addClass("nav-labeltrack").appendTo(this.nav_container);
+ // Future overlay?
+ this.intro_div = $("<div/>").addClass("intro").text("Select a chrom from the dropdown below").hide();
+ // Another label track at bottom
+ this.nav_labeltrack = $("<div/>").addClass("nav-labeltrack").appendTo(this.bottom_container);
+ // Navigation at top
+ this.nav_container = $("<div/>").addClass("nav-container").prependTo(this.top_container);
this.nav = $("<div/>").addClass("nav").appendTo(this.nav_container);
- this.overview = $("<div/>").addClass("overview").appendTo(this.nav);
+ // Overview (scrollbar and overview plot) at bottom
+ this.overview = $("<div/>").addClass("overview").appendTo(this.bottom_container);
this.overview_viewport = $("<div/>").addClass("overview-viewport").appendTo(this.overview);
this.overview_close = $("<a href='javascript:void(0);'>Close Overview</a>").addClass("overview-close").hide().appendTo(this.overview_viewport);
this.overview_highlight = $("<div />").addClass("overview-highlight").hide().appendTo(this.overview_viewport);
@@ -327,6 +337,11 @@ var View = function( container, title, v
}
view.change_chrom(chrom, new_low, new_high);
},
+ move_fraction : function( fraction ) {
+ var view = this;
+ var span = view.high - view.low;
+ this.move_delta( fraction * span );
+ },
move_delta: function(delta_chrom) {
var view = this;
var current_chrom_span = view.high - view.low;
@@ -439,7 +454,7 @@ var View = function( container, title, v
this.redraw();
},
resize_window: function() {
- this.viewport_container.height( this.container.height() - this.nav_container.height() - 45 );
+ this.viewport_container.height( this.container.height() - this.top_container.height() - this.bottom_container.height() );
this.nav_container.width( this.container.width() );
this.redraw();
},
--- a/static/june_2007_style/blue/trackster.css
+++ b/static/june_2007_style/blue/trackster.css
@@ -1,7 +1,7 @@
.viewport-container{overflow-x:hidden;overflow-y:auto;}
-.nav{padding:0 0;color:#333;font-weight:bold;}
-.content{font:9px verdana;}
-.nav-controls{text-align:center;position:relative;background:#cccccc;background-image:url(panel_header_bg.png);background-position:top center;background-repeat:repeat-x;padding:2px 0;}
+.nav{padding:0 0;color:#333;font-weight:bold;background:#cccccc;background-image:url(panel_header_bg.png);background-position:bottom center;background-repeat:repeat-x;height:2.5em;border-bottom:solid #333 1px;}
+.content{font:10px verdana;}
+.nav-controls{text-align:center;position:relative;padding:2px 0;}
.nav-controls input{margin:0 5px;}
.nav-controls a{padding:0 0.4em;}
.nav-input{font-size:12px;width:30em;z-index:1000;}
@@ -9,24 +9,23 @@
.draghandle{cursor:move;float:left;background:transparent url(../images/visualization/draggable_horizontal.png) center center no-repeat;width:10px;height:12px;}
.intro{z-index:1000;margin-left:auto;margin-right:auto;color:#555;text-align:center;font-size:16px;}
.overview{width:100%;margin:0px;color:white;}
-.overview-viewport{position:relative;height:14px;background:white;border-bottom:solid gray 1px;margin:0;}
+.overview-viewport{position:relative;height:14px;background:white;margin:0;}
.overview-close{font:9px verdana;position:absolute;top:0px;right:0px;padding:5px;z-index:500;background-color:white;}
-.overview-highlight{opacity:0.5;top:0px;position:absolute;z-index:100;border-style:solid;border-color:#484848;border-width:0px 1px;}
-.overview-boxback{width:100%;bottom:0px;z-index:50;position:absolute;height:14px;background:#eee;}
-.overview-box{cursor:pointer;opacity:0.5;bottom:0px;z-index:100;position:absolute;margin-top:0px;height:14px;background:#484848 url(../images/visualization/draggable_horizontal.png) center center no-repeat;border-style:solid;border-color:#484848;border-width:0px 1px;-moz-border-radius:3px;border-radius:3px;}
-.overview-box:hover{background-color:#838383;border-color:#838383;}
+.overview-highlight{top:0px;position:absolute;z-index:100;border-style:solid;border-color:#666;border-width:0px 1px;}
+.overview-boxback{width:100%;bottom:0px;z-index:50;position:absolute;height:14px;background:#eee;border:solid #999 1px;}
+.overview-box{cursor:pointer;bottom:0px;z-index:100;position:absolute;margin-top:0px;height:14px;background:#C1C9E5 url(../images/visualization/draggable_horizontal.png) center center no-repeat;border:solid #666 1px;}
.viewport-canvas{width:100%;height:100px;}
.yaxislabel{color:#777;z-index:100;}
.line-track .track-content{border-top:1px solid #ddd;border-bottom:1px solid #ddd;}
-.track{background:white;margin-bottom:1px;}
+.track{background:white;margin-bottom:1px;padding-bottom:4px;border-bottom:#eee solid 1px;}
.track-header{text-align:left;padding:4px 0px;color:#666;}
.track-header .menubutton{margin-left:3px;}
.track-content{overflow:hidden;text-align:center;}
-.track.error{background-color:#ECB4AF;}
-.track.nodata{background-color:#ddd;}
+.track.error .track-content{background-color:#ECB4AF;}
+.track.nodata .track-content{background-color:#ddd;}
.loading{min-height:100px;}
-.label-track{}
-.label-track .label{border-left:solid #999 1px;padding:1px;display:inline-block;}
+.label-track{font-size:10px;border:none;padding:0;margin:0;height:1.3em;}
+.label-track .label{border-left:solid #999 1px;padding:1px;padding-bottom:2px;display:inline-block;}
.right-float{float:right;margin-left:5px;}
.top-labeltrack{position:relative;border-bottom:solid #999 1px;}
-.nav-labeltrack{border-top:solid #999 1px;border-bottom:solid #999 1px;}
+.nav-labeltrack{border-top:solid #999 1px;border-bottom:solid #333 1px;}
--- a/static/june_2007_style/trackster.css.tmpl
+++ b/static/june_2007_style/trackster.css.tmpl
@@ -5,17 +5,19 @@
.nav {
padding: 0 0;
color:#333;font-weight:bold;
+ background:#cccccc;
+ background-image:url(panel_header_bg.png);
+ background-position: bottom center;
+ background-repeat:repeat-x;
+ height: 2.5em;
+ border-bottom: solid #333 1px;
}
.content {
- font: 9px verdana;
+ font: 10px verdana;
}
.nav-controls {
text-align: center;
position: relative;
- background:#cccccc;
- background-image:url(panel_header_bg.png);
- background-position:top center;
- background-repeat:repeat-x;
padding: 2px 0;
}
.nav-controls input {
@@ -61,7 +63,6 @@
/* border-top: solid #666 1px;*/
/* border-bottom: solid #aaa 1px;*/
background: white;
- border-bottom: solid gray 1px;
margin: 0;
}
.overview-close {
@@ -74,12 +75,11 @@
background-color: white;
}
.overview-highlight {
- opacity: 0.5;
top: 0px;
position: absolute;
z-index: 100;
border-style: solid;
- border-color: #484848;
+ border-color: #666;
border-width: 0px 1px;
}
.overview-boxback {
@@ -89,25 +89,17 @@
position: absolute;
height: 14px;
background: #eee;
+ border: solid #999 1px;
}
.overview-box {
cursor: pointer;
- opacity: 0.5;
bottom: 0px;
z-index: 100;
position: absolute;
margin-top: 0px;
height: 14px;
- background: #484848 url(../images/visualization/draggable_horizontal.png) center center no-repeat;
- border-style: solid;
- border-color: #484848;
- border-width: 0px 1px;
- -moz-border-radius: 3px;
- border-radius: 3px;
-}
-.overview-box:hover {
- background-color: #838383;
- border-color: #838383;
+ background: #C1C9E5 url(../images/visualization/draggable_horizontal.png) center center no-repeat;
+ border: solid #666 1px;
}
.viewport-canvas {
width: 100%;
@@ -129,6 +121,8 @@
/* border-bottom: solid #DDDDDD 1px; */
background: white;
margin-bottom: 1px;
+ padding-bottom: 4px;
+ border-bottom: #eee solid 1px;
}
.track-header {
@@ -146,10 +140,10 @@
text-align: center;
}
-.track.error {
+.track.error .track-content {
background-color: #ECB4AF;
}
-.track.nodata {
+.track.nodata .track-content {
background-color: #ddd;
}
@@ -159,11 +153,16 @@
.label-track {
/* font-weight: bold; */
- /* font-size: 10px; */
+ font-size: 10px;
+ border: none;
+ padding: 0;
+ margin: 0;
+ height: 1.3em;
}
.label-track .label {
border-left: solid #999 1px;
padding: 1px;
+ padding-bottom: 2px;
display: inline-block;
}
.right-float {
@@ -178,5 +177,5 @@
.nav-labeltrack {
border-top: solid #999 1px;
- border-bottom: solid #999 1px;
+ border-bottom: solid #333 1px;
}
1
0
galaxy-dist commit 4b78bad81e77: Trackster: save current viewport when saving viz
by commits-noreply@bitbucket.org 20 Nov '10
by commits-noreply@bitbucket.org 20 Nov '10
20 Nov '10
# HG changeset patch -- Bitbucket.org
# Project galaxy-dist
# URL http://bitbucket.org/galaxy/galaxy-dist/overview
# User James Taylor <james(a)jamestaylor.org>
# Date 1289169067 18000
# Node ID 4b78bad81e777cf503ac66acb5dbb01da841400f
# Parent fb55bfc792b1d087783055a736d9fb439e5bb41c
Trackster: save current viewport when saving viz
--- a/static/scripts/trackster.js
+++ b/static/scripts/trackster.js
@@ -81,7 +81,7 @@ var Cache = function( num_elements ) {
}
});
-var View = function( container, title, vis_id, dbkey ) {
+var View = function( container, title, vis_id, dbkey, callback ) {
this.container = container;
this.vis_id = vis_id;
this.dbkey = dbkey;
@@ -95,11 +95,11 @@ var View = function( container, title, v
this.zoom_factor = 3;
this.min_separation = 30;
this.has_changes = false;
- this.init();
+ this.init( callback );
this.reset();
};
$.extend( View.prototype, {
- init: function() {
+ init: function( callback ) {
// Create DOM elements
var parent_element = this.container,
view = this;
@@ -177,6 +177,9 @@ var View = function( container, title, v
view.chrom_select.bind("change", function() {
view.change_chrom(view.chrom_select.val());
});
+ if ( callback ) {
+ callback();
+ }
},
error: function() {
alert( "Could not load chroms for this dbkey:", view.dbkey );
@@ -244,8 +247,8 @@ var View = function( container, title, v
this.drag_origin_x = e.clientX;
this.drag_origin_pos = e.clientX / view.viewport_container.width() * (view.high - view.low) + view.low;
this.drag_div = $("<div />").css( {
- "height": view.content_div.height()+30, "top": "0px", "position": "absolute",
- "background-color": "#cfc", "border": "1px solid #6a6", "opacity": 0.5, "z-index": 1000
+ "height": view.content_div.height() + view.top_labeltrack.height() + view.nav_labeltrack.height(), "top": "0px", "position": "absolute",
+ "background-color": "#ccf", "opacity": 0.5, "z-index": 1000
} ).appendTo( $(this) );
}).bind( "drag", function(e) {
var min = Math.min(e.clientX, this.drag_origin_x) - view.container.offset().left,
@@ -1216,7 +1219,7 @@ var FeatureTrack = function ( name, view
this.show_labels_scale = 0.001;
this.showing_details = false;
this.vertical_detail_px = 10;
- this.vertical_nodetail_px = 2;
+ this.vertical_nodetail_px = 3;
this.summary_draw_height = 30;
this.default_font = "9px Monaco, Lucida Console, monospace";
this.inc_slots = {};
--- a/lib/galaxy/web/controllers/tracks.py
+++ b/lib/galaxy/web/controllers/tracks.py
@@ -1,12 +1,5 @@
"""
Support for constructing and viewing custom "track" browsers within Galaxy.
-
-Track browsers are currently transient -- nothing is stored to the database
-when a browser is created. Building a browser consists of selecting a set
-of datasets associated with the same dbkey to display. Once selected, jobs
-are started to create any necessary indexes in the background, and the user
-is redirected to the browser interface, which loads the appropriate datasets.
-
"""
import re, pkg_resources
@@ -317,7 +310,7 @@ class TracksController( BaseController,
if 'vis_id' in kwargs:
vis_id = kwargs['vis_id'].strip('"')
dbkey = kwargs['dbkey']
-
+ # Lookup or create Visualization object
if vis_id == "undefined": # new vis
vis = model.Visualization()
vis.user = trans.user
@@ -328,20 +321,30 @@ class TracksController( BaseController,
else:
decoded_id = trans.security.decode_id( vis_id )
vis = session.query( model.Visualization ).get( decoded_id )
-
+ # Decode the payload
decoded_payload = simplejson.loads( kwargs['payload'] )
+ # Create new VisualizationRevision that will be attached to the viz
vis_rev = model.VisualizationRevision()
vis_rev.visualization = vis
vis_rev.title = vis.title
vis_rev.dbkey = dbkey
+ # Tracks from payload
tracks = []
- for track in decoded_payload:
+ for track in decoded_payload['tracks']:
tracks.append( { "dataset_id": str(track['dataset_id']),
"name": track['name'],
"track_type": track['track_type'],
"prefs": track['prefs']
} )
- vis_rev.config = { "tracks": tracks }
+ # Viewport from payload
+ if 'viewport' in decoded_payload:
+ chrom = decoded_payload['viewport']['chrom']
+ start = decoded_payload['viewport']['start']
+ end = decoded_payload['viewport']['end']
+ vis_rev.config = { "tracks": tracks, "viewport": { 'chrom': chrom, 'start': start, 'end': end } }
+ else:
+ vis_rev.config = { "tracks": tracks }
+ print vis_rev.config
vis.latest_revision = vis_rev
session.add( vis_rev )
session.flush()
--- a/templates/tracks/browser.mako
+++ b/templates/tracks/browser.mako
@@ -77,7 +77,11 @@
$(function() {
%if config:
- view = new View( $("#browser-container"), "${config.get('title') | h}", "${config.get('vis_id')}", "${config.get('dbkey')}" );
+ var callback;
+ %if 'viewport' in config:
+ var callback = function() { view.change_chrom( '${config['viewport']['chrom']}', ${config['viewport']['start']}, ${config['viewport']['end']} ); }
+ %endif
+ view = new View( $("#browser-container"), "${config.get('title') | h}", "${config.get('vis_id')}", "${config.get('dbkey')}", callback );
view.editor = true;
%for track in config.get('tracks'):
view.add_track(
@@ -180,21 +184,25 @@
$("#save-button").bind("click", function(e) {
var sorted = $(".viewport-container").sortable('toArray'),
- payload = [];
+ tracks = [];
+
+ // Show saving dialog box
+ show_modal("Saving...", "<img src='${h.url_for('/static/images/yui/rel_interstitial_loading.gif')}'/>");
for (var i in sorted) {
var track_id = parseInt(sorted[i].split("track_")[1]),
track = view.tracks[track_id];
- payload.push( {
+ tracks.push( {
"track_type": track.track_type,
"name": track.name,
"dataset_id": track.dataset_id,
"prefs": track.prefs
});
}
- // Show saving dialog box
- show_modal("Saving...", "<img src='${h.url_for('/static/images/yui/rel_interstitial_loading.gif')}'/>");
+
+ var payload = { 'tracks': tracks, 'viewport': { 'chrom': view.chrom, 'start': view.low , 'end': view.high } }
+ console.log( payload );
$.ajax({
url: "${h.url_for( action='save' )}",
--- a/lib/galaxy/web/base/controller.py
+++ b/lib/galaxy/web/base/controller.py
@@ -185,6 +185,9 @@ class UsesVisualization( SharableItemSec
config = { "title": visualization.title, "vis_id": trans.security.encode_id( visualization.id ),
"tracks": tracks, "chrom": "", "dbkey": visualization.dbkey }
+
+ if 'viewport' in latest_revision.config:
+ config['viewport'] = latest_revision.config['viewport']
return config
1
0
galaxy-dist commit 32e5efb7a7d5: Remove the need for setup.sh - All configuration is done in run.sh/galaxy.config.
by commits-noreply@bitbucket.org 20 Nov '10
by commits-noreply@bitbucket.org 20 Nov '10
20 Nov '10
# HG changeset patch -- Bitbucket.org
# Project galaxy-dist
# URL http://bitbucket.org/galaxy/galaxy-dist/overview
# User Nate Coraor <nate(a)bx.psu.edu>
# Date 1288983302 14400
# Node ID 32e5efb7a7d5606a4dc1c8808fe4888ade2892e8
# Parent eb79ab327351e4789a89748269fc34428f791463
Remove the need for setup.sh - All configuration is done in run.sh/galaxy.config.
--- a/run.sh
+++ b/run.sh
@@ -2,6 +2,53 @@
cd `dirname $0`
+python ./scripts/check_python.py
+[ $? -ne 0 ] && exit 1
+
+FROM_SAMPLE="
+ datatypes_conf.xml
+ reports_wsgi.ini
+ tool_conf.xml
+ tool_data_table_conf.xml
+ universe_wsgi.ini
+ tool-data/add_scores.loc
+ tool-data/alignseq.loc
+ tool-data/annotation_profiler_options.xml
+ tool-data/annotation_profiler_valid_builds.txt
+ tool-data/bfast_indexes.loc
+ tool-data/binned_scores.loc
+ tool-data/blastdb.loc
+ tool-data/blastdb_p.loc
+ tool-data/bowtie_indices.loc
+ tool-data/bowtie_indices_color.loc
+ tool-data/codingSnps.loc
+ tool-data/encode_datasets.loc
+ tool-data/funDo.loc
+ tool-data/lastz_seqs.loc
+ tool-data/liftOver.loc
+ tool-data/maf_index.loc
+ tool-data/maf_pairwise.loc
+ tool-data/microbial_data.loc
+ tool-data/phastOdds.loc
+ tool-data/perm_base_index.loc
+ tool-data/perm_color_index.loc
+ tool-data/quality_scores.loc
+ tool-data/regions.loc
+ tool-data/sam_fa_indices.loc
+ tool-data/sift_db.loc
+ tool-data/srma_index.loc
+ tool-data/twobit.loc
+ tool-data/shared/ucsc/builds.txt
+"
+
+# Create any missing config/location files
+for file in $FROM_SAMPLE; do
+ if [ ! -f "$file" -a -f "$file.sample" ]; then
+ echo "Initializing $file from `basename $file`.sample"
+ cp $file.sample $file
+ fi
+done
+
# explicitly attempt to fetch eggs before running
FETCH_EGGS=1
for arg in "$@"; do
@@ -21,10 +68,4 @@ if [ $FETCH_EGGS -eq 1 ]; then
fi
fi
-# Temporary: since builds.txt is now removed from source control, create it
-# from the sample if necessary
-if [ ! -f "tool-data/shared/ucsc/builds.txt" ]; then
- cp tool-data/shared/ucsc/builds.txt.sample tool-data/shared/ucsc/builds.txt
-fi
-
python ./scripts/paster.py serve universe_wsgi.ini $@
--- a/lib/galaxy/config.py
+++ b/lib/galaxy/config.py
@@ -94,14 +94,8 @@ class Configuration( object ):
self.blog_url = kwargs.get( 'blog_url', None )
self.screencasts_url = kwargs.get( 'screencasts_url', None )
self.library_import_dir = kwargs.get( 'library_import_dir', None )
- if self.library_import_dir is not None and not os.path.exists( self.library_import_dir ):
- raise ConfigurationError( "library_import_dir specified in config (%s) does not exist" % self.library_import_dir )
self.user_library_import_dir = kwargs.get( 'user_library_import_dir', None )
- if self.user_library_import_dir is not None and not os.path.exists( self.user_library_import_dir ):
- raise ConfigurationError( "user_library_import_dir specified in config (%s) does not exist" % self.user_library_import_dir )
self.ftp_upload_dir = kwargs.get( 'ftp_upload_dir', None )
- if self.ftp_upload_dir is not None and not os.path.exists( self.ftp_upload_dir ):
- os.makedirs( self.ftp_upload_dir )
self.ftp_upload_site = kwargs.get( 'ftp_upload_site', None )
self.allow_library_path_paste = kwargs.get( 'allow_library_path_paste', False )
self.disable_library_comptypes = kwargs.get( 'disable_library_comptypes', '' ).lower().split( ',' )
@@ -159,9 +153,16 @@ class Configuration( object ):
return default
def check( self ):
# Check that required directories exist
- for path in self.root, self.file_path, self.tool_path, self.tool_data_path, self.template_path, self.job_working_directory, self.cluster_files_directory:
+ for path in self.root, self.tool_path, self.tool_data_path, self.template_path:
if not os.path.isdir( path ):
raise ConfigurationError("Directory does not exist: %s" % path )
+ # Create the directories that it makes sense to create
+ for path in self.file_path, self.new_file_path, self.job_working_directory, self.cluster_files_directory, self.template_cache, self.ftp_upload_dir, self.library_import_dir, self.user_library_import_dir, self.nginx_upload_store, './static/genetrack/plots', os.path.join( self.tool_data_path, 'shared', 'jars' ):
+ if path not in [ None, False ] and not os.path.isdir( path ):
+ try:
+ os.makedirs( path )
+ except Exception, e:
+ raise ConfigurationError( "Unable to create missing directory: %s\n%s" % ( path, e ) )
# Check that required files exist
for path in self.tool_config, self.datatypes_config:
if not os.path.isfile(path):
1
0