[hg] galaxy 2447: Missed adding chrominfo.py, due to my hardcode...
by Greg Von Kuster
details: http://www.bx.psu.edu/hg/galaxy/rev/0cf5c25d1d2b
changeset: 2447:0cf5c25d1d2b
user: ianschenck(a)Thugunit.local
date: Thu Apr 23 14:42:35 2009 -0400
description:
Missed adding chrominfo.py, due to my hardcoded paths in test environ. Should be good now?
2 file(s) affected in this change:
lib/galaxy/datatypes/chrominfo.py
tools/annotation_profiler/annotation_profiler.xml
diffs (29 lines):
diff -r 854ec7739cfd -r 0cf5c25d1d2b lib/galaxy/datatypes/chrominfo.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/lib/galaxy/datatypes/chrominfo.py Thu Apr 23 14:42:35 2009 -0400
@@ -0,0 +1,14 @@
+import data
+from galaxy import util
+from galaxy.datatypes.sniff import *
+from galaxy.web import url_for
+from tabular import Tabular
+from galaxy.datatypes import metadata
+from galaxy.datatypes.metadata import MetadataElement
+
+
+class ChromInfo( Tabular ):
+ file_ext = "len"
+ MetadataElement( name="chrom", default=1, desc="Chrom column", param=metadata.ColumnParameter )
+ MetadataElement( name="length", default=2, desc="Length column", param=metadata.ColumnParameter )
+
diff -r 854ec7739cfd -r 0cf5c25d1d2b tools/annotation_profiler/annotation_profiler.xml
--- a/tools/annotation_profiler/annotation_profiler.xml Thu Apr 23 13:46:52 2009 -0400
+++ b/tools/annotation_profiler/annotation_profiler.xml Thu Apr 23 14:42:35 2009 -0400
@@ -1,6 +1,6 @@
<tool id="Annotation_Profiler_0" name="Profile Annotations" Version="1.0.0">
<description>for a set of genomic intervals</description>
- <command interpreter="python">annotation_profiler_for_interval.py -i $input1 -c ${input1.metadata.chromCol} -s ${input1.metadata.startCol} -e ${input1.metadata.endCol} -o $out_file1 $keep_empty -p /depot/data2/galaxy/annotation_profiler/$dbkey $summary -l ${GALAXY_DATA_INDEX_DIR}/shared/ucsc/chrom/${dbkey}.len -b 3 -t $table_names</command>
+ <command interpreter="python">annotation_profiler_for_interval.py -i $input1 -c ${input1.metadata.chromCol} -s ${input1.metadata.startCol} -e ${input1.metadata.endCol} -o $out_file1 $keep_empty -p /depot/data2/galaxy/annotation_profiler/$dbkey $summary -l ${chromInfo} -b 3 -t $table_names</command>
<inputs>
<param format="interval" name="input1" type="data" label="Choose Intervals">
<validator type="dataset_metadata_in_file" filename="annotation_profiler_valid_builds.txt" metadata_name="dbkey" metadata_column="0" message="Profiling is not currently available for this species."/>
13 years, 7 months
[hg] galaxy 2446: - Performance of indexers much improved.
by Greg Von Kuster
details: http://www.bx.psu.edu/hg/galaxy/rev/854ec7739cfd
changeset: 2446:854ec7739cfd
user: ianschenck(a)Thugunit.local
date: Thu Apr 23 13:46:52 2009 -0400
description:
- Performance of indexers much improved.
- Indexing for tracks done in background with a visual treatment done to Trackster
- DB builds can be uploaded by a user (chromInfo/len extension).
- TODO: Add ability to change the dbkey of a dataset to any arbitrary string value.
23 file(s) affected in this change:
datatypes_conf.xml.sample
lib/galaxy/datatypes/data.py
lib/galaxy/datatypes/indexers/coverage.py
lib/galaxy/datatypes/indexers/interval.awk
lib/galaxy/datatypes/indexers/interval.py
lib/galaxy/datatypes/indexers/interval_awk.xml
lib/galaxy/datatypes/indexers/wiggle.py
lib/galaxy/datatypes/interval.py
lib/galaxy/datatypes/metadata.py
lib/galaxy/datatypes/registry.py
lib/galaxy/tools/actions/__init__.py
lib/galaxy/tools/actions/upload.py
lib/galaxy/tools/parameters/basic.py
lib/galaxy/tracks/store.py
lib/galaxy/web/controllers/root.py
lib/galaxy/web/controllers/tracks.py
lib/galaxy/web/framework/__init__.py
static/scripts/trackster.js
static/trackster.css
templates/dataset/edit_attributes.mako
templates/tracks/index.mako
tools/data_source/upload.xml
tools/new_operations/complement.xml
diffs (663 lines):
diff -r f7336991d0ee -r 854ec7739cfd datatypes_conf.xml.sample
--- a/datatypes_conf.xml.sample Thu Apr 23 09:20:11 2009 -0400
+++ b/datatypes_conf.xml.sample Thu Apr 23 13:46:52 2009 -0400
@@ -8,6 +8,9 @@
<converter file="interval_to_coverage.xml" target_datatype="coverage"/>
</datatype>
<datatype extension="binseq.zip" type="galaxy.datatypes.images:Binseq" mimetype="application/zip" display_in_upload="true"/>
+ <datatype extension="len" type="galaxy.datatypes.chrominfo:ChromInfo" display_in_upload="true">
+ <!-- no converters yet -->
+ </datatype>
<datatype extension="coverage" type="galaxy.datatypes.coverage:LastzCoverage" display_in_upload="true">
<indexer file="coverage.xml" />
</datatype>
@@ -31,7 +34,7 @@
<datatype extension="html" type="galaxy.datatypes.images:Html" mimetype="text/html"/>
<datatype extension="interval" type="galaxy.datatypes.interval:Interval" display_in_upload="true">
<converter file="interval_to_bed_converter.xml" target_datatype="bed"/>
- <indexer file="interval.xml" />
+ <indexer file="interval_awk.xml" />
</datatype>
<datatype extension="jpg" type="galaxy.datatypes.images:Image" mimetype="image/jpeg"/>
<datatype extension="laj" type="galaxy.datatypes.images:Laj"/>
diff -r f7336991d0ee -r 854ec7739cfd lib/galaxy/datatypes/data.py
--- a/lib/galaxy/datatypes/data.py Thu Apr 23 09:20:11 2009 -0400
+++ b/lib/galaxy/datatypes/data.py Thu Apr 23 13:46:52 2009 -0400
@@ -40,7 +40,7 @@
__metaclass__ = DataMeta
"""Add metadata elements"""
- MetadataElement( name="dbkey", desc="Database/Build", default="?", param=metadata.SelectParameter, multiple=False, values=util.dbnames, no_value="?" )
+ MetadataElement( name="dbkey", desc="Database/Build", default="?", param=metadata.DBKeyParameter, multiple=False, no_value="?" )
"""Stores the set of display applications, and viewing methods, supported by this datatype """
supported_display_apps = {}
@@ -242,7 +242,7 @@
def after_edit( self, dataset ):
"""This function is called on the dataset after metadata is edited."""
dataset.clear_associated_files( metadata_safe = True )
-
+
@property
def has_resolution(self):
return False
diff -r f7336991d0ee -r 854ec7739cfd lib/galaxy/datatypes/indexers/coverage.py
--- a/lib/galaxy/datatypes/indexers/coverage.py Thu Apr 23 09:20:11 2009 -0400
+++ b/lib/galaxy/datatypes/indexers/coverage.py Thu Apr 23 13:46:52 2009 -0400
@@ -37,7 +37,7 @@
os.rename( fname+".npy", fname )
# Write average
- for window in 10, 100, 1000, 10000:
+ for window in 10, 100, 1000, 10000, 100000:
input = scores.copy()
size = len( input )
input.resize( ( ( size / window ), window ) )
diff -r f7336991d0ee -r 854ec7739cfd lib/galaxy/datatypes/indexers/interval.awk
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/lib/galaxy/datatypes/indexers/interval.awk Thu Apr 23 13:46:52 2009 -0400
@@ -0,0 +1,43 @@
+BEGIN {
+ # from galaxy.utils
+ mapped_chars[">"] = "__gt__"
+ mapped_chars["<"] = "__lt__"
+ mapped_chars["'"] = "__sq__"
+ mapped_chars["\""] = "__dq__"
+ mapped_chars["\\["] = "__ob__"
+ mapped_chars["\\]"] = "__cb__"
+ mapped_chars["\\{"] = "__oc__"
+ mapped_chars["\\}"] = "__cc__"
+ mapped_chars["@"] = "__at__"
+ # additional, not in galaxy.utils
+ mapped_chars["/"] = "__fs__"
+ mapped_chars["^manifest\.tab$"] = "__manifest.tab__"
+}
+function escape_filename( name )
+{
+ for( char in mapped_chars ) {
+ gsub( char, mapped_chars[char], name )
+ }
+ return name
+}
+!_[$chrom]++ {
+ # close files only when we switch to a new one.
+ fn && close(fn)
+ fn = storepath "/" escape_filename($1) }
+{
+ print $0 >> fn;
+ # the || part is needed to catch 0 length chromosomes, which
+ # should never happen but...
+ if ($end > chroms[$chrom] || !chroms[$chrom])
+ chroms[$chrom] = $end }
+END {
+ fn = storepath "/manifest.tab"
+ for( x in chroms ) {
+ # add line to manifest
+ print x "\t" chroms[x] >> fn
+ chromfile = storepath "/" escape_filename(x)
+ # sort in-place
+ system( "sort -f -n -k " chrom " -k " start " -k " end " -o " chromfile " " chromfile )
+ close(chromfile)
+ }
+}
\ No newline at end of file
diff -r f7336991d0ee -r 854ec7739cfd lib/galaxy/datatypes/indexers/interval.py
--- a/lib/galaxy/datatypes/indexers/interval.py Thu Apr 23 09:20:11 2009 -0400
+++ b/lib/galaxy/datatypes/indexers/interval.py Thu Apr 23 13:46:52 2009 -0400
@@ -29,13 +29,11 @@
manifest[chrom] = max(manifest.get(chrom,0),line.end)
if not lastchrom == chrom:
if current_file:
- current_file.flush()
current_file.close()
current_file = open( os.path.join( out_path, "%s" % chrom), "a" )
print >> current_file, "\t".join(line)
lastchrom = chrom
if current_file:
- current_file.flush()
current_file.close()
return manifest
diff -r f7336991d0ee -r 854ec7739cfd lib/galaxy/datatypes/indexers/interval_awk.xml
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/lib/galaxy/datatypes/indexers/interval_awk.xml Thu Apr 23 13:46:52 2009 -0400
@@ -0,0 +1,16 @@
+<tool id="INDEXER_Interval_0" name="Index Interval for Track Viewer">
+ <!-- Used internally to generate track indexes -->
+ <command interpreter="awk -f">interval.awk
+ chrom=${input_dataset.metadata.chromCol} start=${input_dataset.metadata.startCol}
+ end=${input_dataset.metadata.endCol} strand=${input_dataset.metadata.strandCol}
+ storepath=${store_path}
+ $input_dataset 2>&1
+ </command>
+ <inputs>
+ <page>
+ <param format="interval" name="input_dataset" type="data" label="Choose intervals"/>
+ </page>
+ </inputs>
+ <help>
+ </help>
+</tool>
diff -r f7336991d0ee -r 854ec7739cfd lib/galaxy/datatypes/indexers/wiggle.py
--- a/lib/galaxy/datatypes/indexers/wiggle.py Thu Apr 23 09:20:11 2009 -0400
+++ b/lib/galaxy/datatypes/indexers/wiggle.py Thu Apr 23 13:46:52 2009 -0400
@@ -18,6 +18,8 @@
from numpy import *
import tempfile
import os
+from galaxy.tracks.store import sanitize_name
+
def write_chrom(max, out_base, instream):
@@ -35,7 +37,7 @@
os.rename( fname+".npy", fname )
# Write average
- for window in 10, 100, 1000, 10000:
+ for window in 10, 100, 1000, 10000, 100000:
input = scores.copy()
size = len( input )
input.resize( ( ( size / window ), window ) )
@@ -60,7 +62,7 @@
LEN[chrom] = max2( LEN.get(chrom,0), pos+1 )
for chrom, stream in chroms.items():
stream.seek(0)
- prefix = os.path.join(sys.argv[2], chrom)
+ prefix = os.path.join(sys.argv[2], sanitize_name(chrom))
write_chrom( LEN[chrom], prefix, stream )
manifest_file = open( os.path.join( sys.argv[2], "manifest.tab" ),"w" )
diff -r f7336991d0ee -r 854ec7739cfd lib/galaxy/datatypes/interval.py
--- a/lib/galaxy/datatypes/interval.py Thu Apr 23 09:20:11 2009 -0400
+++ b/lib/galaxy/datatypes/interval.py Thu Apr 23 13:46:52 2009 -0400
@@ -792,7 +792,7 @@
# Determine appropriate resolution to plot ~1000 points
resolution = ( 10 ** math.ceil( math.log10( range / 1000 ) ) )
# Restrict to valid range
- resolution = min( resolution, 10000 )
+ resolution = min( resolution, 100000 )
resolution = max( resolution, 1 )
# Memory map the array (don't load all the data)
data = numpy.load( data )
@@ -809,7 +809,7 @@
# Determine appropriate resolution to plot ~1000 points
resolution = math.ceil( 10 ** math.ceil( math.log10( range / 1000 ) ) )
# Restrict to valid range
- resolution = min( resolution, 10000 )
+ resolution = min( resolution, 100000 )
resolution = max( resolution, 1 )
return resolution
diff -r f7336991d0ee -r 854ec7739cfd lib/galaxy/datatypes/metadata.py
--- a/lib/galaxy/datatypes/metadata.py Thu Apr 23 09:20:11 2009 -0400
+++ b/lib/galaxy/datatypes/metadata.py Thu Apr 23 13:46:52 2009 -0400
@@ -289,7 +289,22 @@
if value is None: return []
if not isinstance( value, list ): return [value]
return value
-
+
+
+class DBKeyParameter( SelectParameter ):
+ def get_html_field( self, value=None, context={}, other_values={}, values=None, **kwd):
+ try:
+ values = kwd['trans'].db_builds
+ except AttributeError: pass
+ return super(DBKeyParameter, self).get_html_field( value, context, other_values, values, **kwd)
+
+ def get_html( self, value=None, context={}, other_values={}, values=None, **kwd):
+ try:
+ values = kwd['trans'].db_builds
+ except AttributeError: pass
+ return super(DBKeyParameter, self).get_html( value, context, other_values, values, **kwd)
+
+
class RangeParameter( SelectParameter ):
def __init__( self, spec ):
SelectParameter.__init__( self, spec )
diff -r f7336991d0ee -r 854ec7739cfd lib/galaxy/datatypes/registry.py
--- a/lib/galaxy/datatypes/registry.py Thu Apr 23 09:20:11 2009 -0400
+++ b/lib/galaxy/datatypes/registry.py Thu Apr 23 13:46:52 2009 -0400
@@ -3,7 +3,7 @@
"""
import os
import logging
-import data, tabular, interval, images, sequence, qualityscore, genetics, xml, coverage, tracks
+import data, tabular, interval, images, sequence, qualityscore, genetics, xml, coverage, tracks, chrominfo
import galaxy.util
from galaxy.util.odict import odict
diff -r f7336991d0ee -r 854ec7739cfd lib/galaxy/tools/actions/__init__.py
--- a/lib/galaxy/tools/actions/__init__.py Thu Apr 23 09:20:11 2009 -0400
+++ b/lib/galaxy/tools/actions/__init__.py Thu Apr 23 13:46:52 2009 -0400
@@ -107,6 +107,7 @@
out_data = {}
# Collect any input datasets from the incoming parameters
inp_data = self.collect_input_datasets( tool, incoming, trans )
+
# Deal with input dataset names, 'dbkey' and types
input_names = []
input_ext = 'data'
@@ -119,6 +120,16 @@
data = NoneDataset( datatypes_registry = trans.app.datatypes_registry )
if data.dbkey not in [None, '?']:
input_dbkey = data.dbkey
+
+ # Collect chromInfo dataset and add as parameters to incoming
+ db_datasets = {}
+ db_dataset = trans.db_dataset_for( input_dbkey )
+ if db_dataset:
+ db_datasets[ "chromInfo" ] = db_dataset
+ incoming[ "chromInfo" ] = db_dataset.file_name
+ else:
+ incoming[ "chromInfo" ] = os.path.join( trans.app.config.tool_data_path, 'shared','ucsc','chrom', "%s.len" % input_dbkey )
+ inp_data.update( db_datasets )
# Determine output dataset permission/roles list
existing_datasets = [ inp for inp in inp_data.values() if inp ]
diff -r f7336991d0ee -r 854ec7739cfd lib/galaxy/tools/actions/upload.py
--- a/lib/galaxy/tools/actions/upload.py Thu Apr 23 09:20:11 2009 -0400
+++ b/lib/galaxy/tools/actions/upload.py Thu Apr 23 13:46:52 2009 -0400
@@ -21,7 +21,7 @@
def execute( self, tool, trans, incoming={}, set_output_hid = True ):
data_file = incoming['file_data']
file_type = incoming['file_type']
- dbkey = incoming['dbkey']
+ dbkey = incoming['other_dbkey'] or incoming['dbkey']
url_paste = incoming['url_paste']
is_multi_byte = False
space_to_tab = False
diff -r f7336991d0ee -r 854ec7739cfd lib/galaxy/tools/parameters/basic.py
--- a/lib/galaxy/tools/parameters/basic.py Thu Apr 23 09:20:11 2009 -0400
+++ b/lib/galaxy/tools/parameters/basic.py Thu Apr 23 13:46:52 2009 -0400
@@ -657,7 +657,7 @@
>>> # Create a mock transcation with 'hg17' as the current build
>>> from galaxy.util.bunch import Bunch
- >>> trans = Bunch( history=Bunch( genome_build='hg17' ) )
+ >>> trans = Bunch( history=Bunch( genome_build='hg17' ), db_builds=util.dbnames )
>>> p = GenomeBuildParameter( None, XML(
... '''
@@ -692,10 +692,10 @@
"""
def get_options( self, trans, other_values ):
last_used_build = trans.history.genome_build
- for dbkey, build_name in util.dbnames:
+ for dbkey, build_name in trans.db_builds:
yield build_name, dbkey, ( dbkey == last_used_build )
def get_legal_values( self, trans, other_values ):
- return set( dbkey for dbkey, _ in util.dbnames )
+ return set( dbkey for dbkey, _ in trans.db_builds )
class ColumnListParameter( SelectToolParameter ):
"""
diff -r f7336991d0ee -r 854ec7739cfd lib/galaxy/tracks/store.py
--- a/lib/galaxy/tracks/store.py Thu Apr 23 09:20:11 2009 -0400
+++ b/lib/galaxy/tracks/store.py Thu Apr 23 13:46:52 2009 -0400
@@ -1,5 +1,17 @@
import os
+import re
from string import Template
+from galaxy.util import sanitize_text
+
+# extra mappings/escape to keep users from traversing around the
+# filesystem and wreaking havoc
+extra_mappings = { r"/": "__fs__", r"^manifest.tab$": "__manifest.tab__" }
+
+def sanitize_name( name ):
+ name = sanitize_text( name )
+ for key, value in extra_mappings.items():
+ name = re.sub( key, value, name )
+ return name
class TemplateSubber( object ):
def __init__(self, obj):
@@ -56,7 +68,7 @@
fd.close()
def _get_object_path( self, chrom, resolution ):
- object_name = chrom
+ object_name = sanitize_name(chrom)
if resolution: object_name += "_%d" % resolution
return os.path.join( self.path, object_name )
diff -r f7336991d0ee -r 854ec7739cfd lib/galaxy/web/controllers/root.py
--- a/lib/galaxy/web/controllers/root.py Thu Apr 23 09:20:11 2009 -0400
+++ b/lib/galaxy/web/controllers/root.py Thu Apr 23 13:46:52 2009 -0400
@@ -234,11 +234,15 @@
if spec.get("readonly"):
continue
optional = params.get("is_"+name, None)
+ other = params.get("or_"+name, None)
if optional and optional == 'true':
# optional element... == 'true' actually means it is NOT checked (and therefore omitted)
setattr(data.metadata, name, None)
else:
- setattr( data.metadata, name, spec.unwrap( params.get (name, None) ) )
+ if other:
+ setattr( data.metadata, name, other )
+ else:
+ setattr( data.metadata, name, spec.unwrap( params.get (name, None) ) )
data.datatype.after_edit( data )
trans.app.model.flush()
diff -r f7336991d0ee -r 854ec7739cfd lib/galaxy/web/controllers/tracks.py
--- a/lib/galaxy/web/controllers/tracks.py Thu Apr 23 09:20:11 2009 -0400
+++ b/lib/galaxy/web/controllers/tracks.py Thu Apr 23 13:46:52 2009 -0400
@@ -1,13 +1,11 @@
-from mako import exceptions
-from mako.template import Template
-from mako.lookup import TemplateLookup
+import math
+
+import mimeparse
+from galaxy.tracks import messages
+from galaxy.util.json import to_json_string
from galaxy.web.base.controller import *
from galaxy.web.framework import simplejson
-from galaxy import web
-from galaxy.tracks import messages
-import mimeparse
-from galaxy.util.json import to_json_string
-import math
+
class MultiResponse(object):
"""
@@ -82,18 +80,19 @@
def build( self, trans, **kwargs ):
trans.session["track_sets"] = list(kwargs.keys())
trans.session.save()
- waiting = False
- for id, value in kwargs.items():
- status = self.data_handler( trans, id )
- if status == messages.PENDING:
- waiting = True
- if not waiting:
- return trans.response.send_redirect( web.url_for( controller='tracks', action='chroms', dbkey=trans.session["track_dbkey"]) )
- return trans.fill_template( 'tracks/build.mako' )
+ #waiting = False
+ #for id, value in kwargs.items():
+ # status = self.data_handler( trans, id )
+ # if status == messages.PENDING:
+ # waiting = True
+ #if not waiting:
+ return trans.response.send_redirect( web.url_for( controller='tracks/', action='index', chrom="" ) )
+ #return trans.fill_template( 'tracks/build.mako' )
@web.expose
def index(self, trans, **kwargs):
tracks = []
+ dbkey = ""
for track in trans.session["track_sets"]:
dataset = trans.app.model.HistoryDatasetAssociation.get( track )
tracks.append({
@@ -101,17 +100,23 @@
"name": dataset.name,
"id": dataset.id
})
+ dbkey = dataset.dbkey
chrom = kwargs.get("chrom","")
LEN = self.chroms_handler(trans, trans.session["track_dbkey"]).get(chrom,0)
return trans.fill_template( 'tracks/index.mako',
- tracks=tracks, chrom=chrom,
+ tracks=tracks, chrom=chrom, dbkey=dbkey,
LEN=LEN )
def chroms_handler(self, trans, dbkey ):
- db_manifest = os.path.join( trans.app.config.tool_data_path, 'shared','ucsc','chrom', "%s.len" % dbkey )
+ db_manifest = trans.db_dataset_for( dbkey )
+ if not db_manifest:
+ db_manifest = os.path.join( trans.app.config.tool_data_path, 'shared','ucsc','chrom', "%s.len" % dbkey )
+ else:
+ db_manifest = db_manifest.file_name
manifest = {}
if os.path.exists( db_manifest ):
for line in open( db_manifest ):
+ if line.startswith("#"): continue
line = line.rstrip("\r\n")
fields = line.split("\t")
manifest[fields[0]] = int(fields[1])
diff -r f7336991d0ee -r 854ec7739cfd lib/galaxy/web/framework/__init__.py
--- a/lib/galaxy/web/framework/__init__.py Thu Apr 23 09:20:11 2009 -0400
+++ b/lib/galaxy/web/framework/__init__.py Thu Apr 23 13:46:52 2009 -0400
@@ -547,6 +547,31 @@
template = Template( source=template_string,
searchList=[context or kwargs, dict(caller=self)] )
return str(template)
+
+ @property
+ def db_builds( self ):
+ """
+ Returns the builds defined by galaxy and the builds defined by
+ the user (chromInfo in history).
+ """
+ dbnames = list()
+ datasets = self.app.model.HistoryDatasetAssociation.filter_by(deleted=False, history_id=self.history.id, extension="len").all()
+ if len(datasets) > 0:
+ dbnames.append( (util.dbnames.default_value, '--------- User Defined Builds ----------') )
+ for dataset in datasets:
+ dbnames.append( (dataset.dbkey, dataset.name) )
+ dbnames.extend( util.dbnames )
+ return dbnames
+
+ def db_dataset_for( self, dbkey ):
+ """
+ Returns the db_file dataset associated/needed by `dataset`, or `None`.
+ """
+ datasets = self.app.model.HistoryDatasetAssociation.filter_by(deleted=False, history_id=self.history.id, extension="len").all()
+ for ds in datasets:
+ if dbkey == ds.dbkey:
+ return ds
+ return None
class FormBuilder( object ):
"""
diff -r f7336991d0ee -r 854ec7739cfd static/scripts/trackster.js
--- a/static/scripts/trackster.js Thu Apr 23 09:20:11 2009 -0400
+++ b/static/scripts/trackster.js Thu Apr 23 13:46:52 2009 -0400
@@ -85,7 +85,7 @@
var resolution = Math.pow( 10, Math.ceil( Math.log( range / DENSITY ) / Math.log( 10 ) ) );
resolution = Math.max( resolution, 1 );
- resolution = Math.min( resolution, 10000 );
+ resolution = Math.min( resolution, 100000 );
var parent_element = $("<div style='position: relative;'></div>");
this.content_div.children( ":first" ).remove();
@@ -152,10 +152,20 @@
var low = position * DENSITY * resolution;
var high = ( position + 1 ) * DENSITY * resolution;
cache[resolution][position] = { state: "loading" };
- $.getJSON( "data" + this.type, { chr: this.view.chr, low: low, high: high, dataset_id: this.track.dataset_id }, function ( data ) {
- cache[resolution][position] = { state: "loaded", values: data };
- $(document).trigger( "redraw" );
- });
+ // use closure to preserve this and parameters for getJSON
+ var fetcher = function (ref) {
+ return function () {
+ $.getJSON( "data" + ref.type, { chr: ref.view.chr, low: low, high: high, dataset_id: ref.track.dataset_id }, function ( data ) {
+ if( data == "pending" ) {
+ setTimeout( fetcher, 5000 );
+ } else {
+ cache[resolution][position] = { state: "loaded", values: data };
+ }
+ $(document).trigger( "redraw" );
+ });
+ };
+ }(this);
+ fetcher();
}
return cache[resolution][position];
}
@@ -288,8 +298,11 @@
var chunk = this.cache.get( resolution, tile_index );
if ( chunk.state == "loading" ) {
- return null;
- }
+ parent_element.addClass("loading");
+ return null;
+ } else {
+ parent_element.removeClass("loading");
+ }
var values = chunk.values;
for ( var index in values ) {
diff -r f7336991d0ee -r 854ec7739cfd static/trackster.css
--- a/static/trackster.css Thu Apr 23 09:20:11 2009 -0400
+++ b/static/trackster.css Thu Apr 23 13:46:52 2009 -0400
@@ -85,7 +85,10 @@
}
.loading {
- background: #DDDDDD;
+ background-image: url("/static/images/loading_large_white_bg.gif");
+ background-position: center center;
+ background-repeat: no-repeat;
+ min-height: 100px;
}
.label-track .label {
diff -r f7336991d0ee -r 854ec7739cfd templates/dataset/edit_attributes.mako
--- a/templates/dataset/edit_attributes.mako Thu Apr 23 09:20:11 2009 -0400
+++ b/templates/dataset/edit_attributes.mako Thu Apr 23 13:46:52 2009 -0400
@@ -46,7 +46,7 @@
${spec.desc}:
</label>
<div style="float: left; width: 250px; margin-right: 10px;">
- ${data.metadata.get_html_by_name( name )}
+ ${data.metadata.get_html_by_name( name, trans=trans )}
</div>
<div style="clear: both"></div>
</div>
diff -r f7336991d0ee -r 854ec7739cfd templates/tracks/index.mako
--- a/templates/tracks/index.mako Thu Apr 23 09:20:11 2009 -0400
+++ b/templates/tracks/index.mako Thu Apr 23 13:46:52 2009 -0400
@@ -17,18 +17,19 @@
${parent.late_javascripts()}
<script type="text/javascript" src="/static/scripts/jquery.event.drag.js"></script>
<script type="text/javascript" src="/static/scripts/trackster.js"></script>
-<script>
+<script type="text/javascript">
- var view = new View( "${chrom}", ${LEN}, 0, ${LEN} );
+ var view = new View( "${chrom}", ${LEN}, 0, ${max(LEN,1)} );
var tracks = new TrackLayout( view );
-
+ var dbkey = "${dbkey}";
+
$(function() {
tracks.add( new LabelTrack( view, $("#viewport" ) ) );
%for track in tracks:
tracks.add( new ${track["type"]}( "${track["name"]}", view, $("#viewport" ), ${track["id"]} ) );
%endfor
-
+
$(document).bind( "redraw", function( e ) {
tracks.redraw();
});
@@ -56,9 +57,43 @@
view.high = new_high;
tracks.redraw();
});
+ tracks.redraw();
+ load_chroms();
+ });
- tracks.redraw();
- });
+ var load_chroms = function () {
+ var fetcher = function (ref) {
+ return function () {
+ $.getJSON( "chroms", { dbkey: dbkey }, function ( data ) {
+ // Hacky - check length of "object"
+ var chrom_length = 0;
+ for (key in data) chrom_length++;
+ if( chrom_length == 0 ) {
+ setTimeout( fetcher, 5000 );
+ } else {
+ var chrom_options = '';
+ for (key in data) {
+ if( key == view.chr ) {
+ chrom_options += '<option value="' + key + '" selected="true">' + key + '</option>';
+ } else {
+ chrom_options += '<option value="' + key + '">' + key + '</option>';
+ }
+ }
+ $("#chrom").html(chrom_options);
+ $("#chrom").bind( "change", function ( e ) {
+ $("#chr").submit();
+ });
+ if( view.chr == "" ) {
+ $("#chrom option:first").attr("selected", true);
+ $("#chrom").trigger( "change" );
+ }
+ }
+ });
+ };
+ }(this);
+ fetcher();
+ };
+
</script>
</%def>
@@ -79,11 +114,14 @@
<div id="nav">
<div id="nav-controls">
+ <form name="chr" id="chr" method="GET">
<a href="#" onclick="javascript:view.left(5);tracks.redraw();"><<</a>
<a href="#" onclick="javascript:view.left(2);tracks.redraw();"><</a>
-
- <span style="display: inline-block; width: 30em; text-align: center;">Viewing ${chrom}:<span id="low">0</span>-<span id="high">180857866</span></span>
-
+ <span style="display: inline-block; width: 30em; text-align: center;">Viewing
+ <select id="chrom" name="chrom">
+ <option value="">loading</option>
+ </select>
+ <span id="low">0</span>-<span id="high">180857866</span></span>
<span style="display: inline-block; width: 10em;">
<a href="#" onclick="javascript:view.zoom_in(2);tracks.redraw();">+</a>
<a href="#" onclick="javascript:view.zoom_out(2);tracks.redraw();">-</a>
@@ -91,6 +129,7 @@
<a href="#" onclick="javascript:view.right(2);tracks.redraw();">></a>
<a href="#" onclick="javascript:view.right(5);tracks.redraw();">>></a>
+ </form>
</div>
</div>
diff -r f7336991d0ee -r 854ec7739cfd tools/data_source/upload.xml
--- a/tools/data_source/upload.xml Thu Apr 23 09:20:11 2009 -0400
+++ b/tools/data_source/upload.xml Thu Apr 23 13:46:52 2009 -0400
@@ -21,6 +21,7 @@
</options>
</param>
<param name="dbkey" type="genomebuild" label="Genome" />
+ <param name="other_dbkey" type="text" label="Or user-defined Genome" />
</inputs>
<help>
diff -r f7336991d0ee -r 854ec7739cfd tools/new_operations/complement.xml
--- a/tools/new_operations/complement.xml Thu Apr 23 09:20:11 2009 -0400
+++ b/tools/new_operations/complement.xml Thu Apr 23 13:46:52 2009 -0400
@@ -1,6 +1,6 @@
<tool id="gops_complement_1" name="Complement">
<description>intervals of a query</description>
- <command interpreter="python">gops_complement.py $input1 $output -1 ${input1.metadata.chromCol},${input1.metadata.startCol},${input1.metadata.endCol},${input1.metadata.strandCol} -l ${GALAXY_DATA_INDEX_DIR}/shared/ucsc/chrom/${dbkey}.len $allchroms</command>
+ <command interpreter="python">gops_complement.py $input1 $output -1 ${input1.metadata.chromCol},${input1.metadata.startCol},${input1.metadata.endCol},${input1.metadata.strandCol} -l ${chromInfo} $allchroms</command>
<inputs>
<param format="interval" name="input1" type="data">
<label>Complement regions of</label>
@@ -58,4 +58,4 @@
.. image:: ../static/operation_icons/gops_complement.gif
</help>
-</tool>
\ No newline at end of file
+</tool>
13 years, 7 months
[hg] galaxy 2445: Restrict the target history list to the users ...
by Greg Von Kuster
details: http://www.bx.psu.edu/hg/galaxy/rev/c69e55c91036
changeset: 2445:c69e55c91036
user: Greg Von Kuster <greg(a)bx.psu.edu>
date: Thu Jun 11 12:06:29 2009 -0400
description:
Restrict the target history list to the users active hisstories when copying datasets between histories, clean up the relavent templates ( fixes ticket # 41 ), and add functional tests for copying history items between histories.
5 file(s) affected in this change:
lib/galaxy/web/controllers/dataset.py
templates/dataset/copy_view.mako
templates/dataset/edit_attributes.mako
test/base/twilltestcase.py
test/functional/test_history_functions.py
diffs (369 lines):
diff -r 004cd81cff72 -r c69e55c91036 lib/galaxy/web/controllers/dataset.py
--- a/lib/galaxy/web/controllers/dataset.py Wed Jun 10 15:46:26 2009 -0400
+++ b/lib/galaxy/web/controllers/dataset.py Thu Jun 11 12:06:29 2009 -0400
@@ -159,7 +159,8 @@
raise "Error undeleting"
@web.expose
- def copy_datasets( self, trans, source_dataset_ids = "", target_history_ids = "", new_history_name="", do_copy = False ):
+ def copy_datasets( self, trans, source_dataset_ids="", target_history_ids="", new_history_name="", do_copy=False, **kwd ):
+ params = util.Params( kwd )
user = trans.get_user()
history = trans.get_history()
create_new_history = False
@@ -202,7 +203,7 @@
error_msg = error_msg + "You do not have permission to add datasets to %i requested histories. " % ( len( target_history_ids ) - len( target_histories ) )
for data in map( trans.app.model.HistoryDatasetAssociation.get, source_dataset_ids ):
if data is None:
- error_msg = error_msg + "You tried to copy a non-existant dataset. "
+ error_msg = error_msg + "You tried to copy a dataset that does not exist. "
invalid_datasets += 1
elif data.history != history:
error_msg = error_msg + "You tried to copy a dataset which is not in your current history. "
@@ -220,8 +221,7 @@
source_datasets = history.active_datasets
target_histories = [history]
if user:
- target_histories = user.histories
-
+ target_histories = user.active_histories
return trans.fill_template( "/dataset/copy_view.mako",
source_dataset_ids = source_dataset_ids,
target_history_ids = target_history_ids,
diff -r 004cd81cff72 -r c69e55c91036 templates/dataset/copy_view.mako
--- a/templates/dataset/copy_view.mako Wed Jun 10 15:46:26 2009 -0400
+++ b/templates/dataset/copy_view.mako Thu Jun 11 12:06:29 2009 -0400
@@ -5,65 +5,67 @@
${javascripts()}
%if error_msg:
-<p>
-<div class="errormessage">${error_msg}</div>
-<div style="clear: both"></div>
-</p>
+ <p>
+ <div class="errormessage">${error_msg}</div>
+ <div style="clear: both"></div>
+ </p>
%endif
%if done_msg:
-<p>
-<div class="donemessage">${done_msg}</div>
-<div style="clear: both"></div>
-</p>
+ <p>
+ <div class="donemessage">${done_msg}</div>
+ <div style="clear: both"></div>
+ </p>
%endif
<p>
-<div class="toolForm">
- <form>
- <div style="float: left; width: 50%; padding: 0px 0px 0px 0px;">
- <div class="toolFormTitle">Source History Items</div>
- <div class="toolFormBody">
- %for data in source_datasets:
- <%
- checked = ""
- if data.id in source_dataset_ids:
- checked = " checked"
- %>
- <div class="form-row"><input type="checkbox" name="source_dataset_ids" value="${data.id}"${checked}/> ${data.hid}: ${data.name}</div>
- %endfor
- </div>
- </div>
- <div style="float: right; width: 50%; padding: 0px 0px 0px 0px;">
- <div class="toolFormTitle">Target Histories</div>
- <div class="toolFormBody">
- %for i, hist in enumerate( target_histories ):
- <%
- checked = ""
- if hist.id in target_history_ids:
- checked = " checked"
- cur_history_text = ""
- if hist == trans.get_history():
- cur_history_text = " <strong>(current history)</strong>"
- %>
- <div class="form-row"><input type="checkbox" name="target_history_ids" value="${hist.id}"${checked}/> ${i + 1}${cur_history_text}: ${hist.name}</div>
- %endfor
- %if trans.get_user():
- <%
- checked = ""
- if "create_new_history" in target_history_ids:
- checked = " checked"
- %>
- <br>
- <div class="form-row"><input type="checkbox" name="target_history_ids" value="create_new_history"${checked}/>New history named: <input type="textbox" name="new_history_name" value="${new_history_name}"/></div>
- %endif
- </div>
- </div>
- <div style="clear: both"></div>
- <div class="form-row" align="center"><input type="submit" class="primary-button" name="do_copy" value="Copy History Items"/></div>
- </form>
-</div>
+ <div class="toolForm">
+ <form>
+ <div style="float: left; width: 50%; padding: 0px 0px 0px 0px;">
+ <div class="toolFormTitle">Source History Items</div>
+ <div class="toolFormBody">
+ %for data in source_datasets:
+ <%
+ checked = ""
+ if data.id in source_dataset_ids:
+ checked = " checked"
+ %>
+ <div class="form-row"><input type="checkbox" name="source_dataset_ids" value="${data.id}"${checked}/> ${data.hid}: ${data.name}</div>
+ %endfor
+ </div>
+ </div>
+ <div style="float: right; width: 50%; padding: 0px 0px 0px 0px;">
+ <div class="toolFormTitle">Target Histories</div>
+ <div class="toolFormBody">
+ %for i, hist in enumerate( target_histories ):
+ <%
+ checked = ""
+ if hist.id in target_history_ids:
+ checked = " checked"
+ cur_history_text = ""
+ if hist == trans.get_history():
+ cur_history_text = " <strong>(current history)</strong>"
+ %>
+ <div class="form-row"><input type="checkbox" name="target_history_ids" value="${hist.id}"${checked}/> ${i + 1}${cur_history_text}: ${hist.name}</div>
+ %endfor
+ %if trans.get_user():
+ <%
+ checked = ""
+ if "create_new_history" in target_history_ids:
+ checked = " checked"
+ %>
+ <br/>
+ <div class="form-row"><input type="checkbox" name="target_history_ids" value="create_new_history"${checked}/>New history named: <input type="textbox" name="new_history_name" value="${new_history_name}"/></div>
+ %endif
+ </div>
+ </div>
+ <div style="clear: both"></div>
+ <div class="form-row" align="center">
+ <input type="submit" class="primary-button" name="do_copy" value="Copy History Items"/>
+ </div>
+ </form>
+ </div>
</p>
<div style="clear: both"></div>
<p>
-<div class="infomessage">Select any number of source history items and any number of target histories and click on "Copy History Items" to add a copy of each selected dataset to each selected history.</div>
-<div style="clear: both"></div>
+ <div class="infomessage">Select any number of source history items and any number of target histories and click "Copy History Items" to add a copy of each selected history item to each selected target history.</div>
+ <div style="clear: both"></div>
</p>
diff -r 004cd81cff72 -r c69e55c91036 templates/dataset/edit_attributes.mako
--- a/templates/dataset/edit_attributes.mako Wed Jun 10 15:46:26 2009 -0400
+++ b/templates/dataset/edit_attributes.mako Thu Jun 11 12:06:29 2009 -0400
@@ -157,8 +157,16 @@
%endif
<p/>
<div class="toolForm">
-<div class="toolFormTitle">Copy History Item</div>
-<div class="toolFormBody">
- Click <a href="${h.url_for( controller='dataset', action='copy_datasets', source_dataset_ids=data.id, target_history_ids=data.history_id )}" target="galaxy_main">here</a> to make a copy of this history item.
+ <div class="toolFormTitle">Copy History Item</div>
+ <div class="toolFormBody">
+ <form name="copy_hda" action="${h.url_for( controller='dataset', action='copy_datasets', source_dataset_ids=data.id, target_history_ids=data.history_id )}" method="post">
+ <div class="form-row">
+ <input type="submit" name="change" value="Copy history item"/>
+ </div>
+ <div class="toolParamHelp" style="clear: both;">
+ Make a copy of this history item in your current history or any of your active histories.
+ </div>
+ </form>
+ </div>
+ </div>
</div>
-</div>
diff -r 004cd81cff72 -r c69e55c91036 test/base/twilltestcase.py
--- a/test/base/twilltestcase.py Wed Jun 10 15:46:26 2009 -0400
+++ b/test/base/twilltestcase.py Thu Jun 11 12:06:29 2009 -0400
@@ -181,8 +181,9 @@
self.home()
def new_history( self, name=None ):
"""Creates a new, empty history"""
+ self.home()
if name:
- self.visit_url( "%s/history_new?name=%s" % ( self.url, str( name ) ) )
+ self.visit_url( "%s/history_new?name=%s" % ( self.url, name ) )
else:
self.visit_url( "%s/history_new" % self.url )
self.check_history_for_string('Your history is empty')
@@ -191,7 +192,7 @@
"""Rename an existing history"""
self.home()
self.visit_page( "history/rename?id=%s&name=%s" %( id, new_name ) )
- check_str = 'History: %s renamed to: %s' % ( old_name, new_name )
+ check_str = 'History: %s renamed to: %s' % ( old_name, urllib.unquote( new_name ) )
self.check_page_for_string( check_str )
self.home()
def set_history( self ):
@@ -330,7 +331,7 @@
self.check_page_for_string( 'Attributes updated' )
self.home()
def convert_format( self, hda_id, target_type ):
- """Auto-detect history_dataset_association metadata"""
+ """Convert format of history_dataset_association"""
self.home()
self.visit_url( "%s/root/edit?id=%s" % ( self.url, hda_id ) )
self.check_page_for_string( 'This will inspect the dataset and attempt' )
@@ -339,13 +340,36 @@
self.check_page_for_string( 'The file conversion of Convert BED to GFF on data' )
self.home()
def change_datatype( self, hda_id, datatype ):
- """Auto-detect history_dataset_association metadata"""
+ """Change format of history_dataset_association"""
self.home()
self.visit_url( "%s/root/edit?id=%s" % ( self.url, hda_id ) )
self.check_page_for_string( 'This will change the datatype of the existing dataset but' )
tc.fv( 'change_datatype', 'datatype', datatype )
tc.submit( 'change' )
self.check_page_for_string( 'Edit Attributes' )
+ self.home()
+ def copy_history_item( self, source_dataset_ids='', target_history_ids=[], all_target_history_ids=[], deleted_history_ids=[] ):
+ """Copy 1 or more history_dataset_associations to 1 or more histories"""
+ self.home()
+ self.visit_url( "%s/dataset/copy_datasets?source_dataset_ids=%s" % ( self.url, source_dataset_ids ) )
+ self.check_page_for_string( 'Source History Items' )
+ # Make sure all of users active histories are displayed
+ for id in all_target_history_ids:
+ self.check_page_for_string( id )
+ # Make sure only active histories are displayed
+ for id in deleted_history_ids:
+ try:
+ self.check_page_for_string( id )
+ raise AssertionError, "deleted history id %d displayed in list of target histories" % id
+ except:
+ pass
+ # Check each history to which we want to copy the item
+ for id in target_history_ids:
+ tc.fv( '1', 'target_history_ids', id )
+ tc.submit( 'do_copy' )
+ no_source_ids = len( source_dataset_ids.split( ',' ) )
+ check_str = '%d datasets copied to %d histories.' % ( no_source_ids, len( target_history_ids ) )
+ self.check_page_for_string( check_str )
self.home()
def get_dataset_ids_in_history( self ):
"""Returns the ids of datasets in a history"""
diff -r 004cd81cff72 -r c69e55c91036 test/functional/test_history_functions.py
--- a/test/functional/test_history_functions.py Wed Jun 10 15:46:26 2009 -0400
+++ b/test/functional/test_history_functions.py Thu Jun 11 12:06:29 2009 -0400
@@ -1,3 +1,4 @@
+import urllib
import galaxy.model
from galaxy.model.orm import *
from base.twilltestcase import *
@@ -55,7 +56,7 @@
history1 = galaxy.model.History.query().order_by( desc( galaxy.model.History.table.c.create_time ) ).first()
assert history1 is not None, "Problem retrieving history1 from database"
self.upload_file( '1.bed', dbkey='hg18' )
- self.new_history( name='history2' )
+ self.new_history( name=urllib.quote( 'history2' ) )
global history2
history2 = galaxy.model.History.query().order_by( desc( galaxy.model.History.table.c.create_time ) ).first()
assert history2 is not None, "Problem retrieving history2 from database"
@@ -96,7 +97,7 @@
assert history3 is not None, "Problem retrieving history3 from database"
if history3.deleted:
raise AssertionError, "History id %d deleted when it should not be" % latest_history.id
- self.rename_history( str( history3.id ), history3.name, new_name='history3' )
+ self.rename_history( str( history3.id ), history3.name, new_name=urllib.quote( 'history 3' ) )
def test_020_history_list( self ):
"""Testing viewing previously stored histories"""
self.view_stored_active_histories()
@@ -131,7 +132,7 @@
global history4
history4 = galaxy.model.History.query().order_by( desc( galaxy.model.History.table.c.create_time ) ).first()
assert history4 is not None, "Problem retrieving history4 from database"
- self.rename_history( str( history4.id ), history4.name, new_name='history4' )
+ self.rename_history( str( history4.id ), history4.name, new_name=urllib.quote( 'history 4' ) )
history4.refresh()
self.upload_file( '2.bed', dbkey='hg18' )
id = '%s,%s' % ( str( history3.id ), str( history4.id ) )
@@ -195,7 +196,7 @@
global history5
history5 = galaxy.model.History.query().order_by( desc( galaxy.model.History.table.c.create_time ) ).first()
assert history5 is not None, "Problem retrieving history5 from database"
- self.rename_history( str( history5.id ), history5.name, new_name='history5' )
+ self.rename_history( str( history5.id ), history5.name, new_name=urllib.quote( 'history5' ) )
history5.refresh()
# Due to the limitations of twill ( not functional with the permissions forms ), we're forced
# to do this manually. At this point, we just want to restrict the access permission on history5
@@ -377,7 +378,7 @@
action='no_share' )
def test_055_history_show_and_hide_deleted_datasets( self ):
"""Testing displaying deleted history items"""
- self.new_history( name='temp_history1' )
+ self.new_history( name=urllib.quote( 'show hide deleted datasets' ) )
self.upload_file('1.bed', dbkey='hg18')
latest_hda = galaxy.model.HistoryDatasetAssociation.query() \
.order_by( desc( galaxy.model.HistoryDatasetAssociation.table.c.create_time ) ).first()
@@ -393,7 +394,7 @@
self.check_page_for_string( 'Your history is empty' )
def test_060_deleting_and_undeleting_history_items( self ):
"""Testing deleting and un-deleting history items"""
- self.new_history( name='temp_history2' )
+ self.new_history( name=urllib.quote( 'delete undelete history items' ) )
# Add a new history item
self.upload_file( '1.bed', dbkey='hg15' )
self.home()
@@ -416,8 +417,54 @@
self.visit_url( "%s/history/?show_deleted=False" % self.url )
self.check_page_for_string( '1.bed' )
self.check_page_for_string( 'hg15' )
- def test_065_reset_data_for_later_test_runs( self ):
+ def test_065_copying_history_items_between_histories( self ):
+ """Testing copying history items between histories"""
+ self.new_history( name=urllib.quote( 'copy history items' ) )
+ global history6
+ history6 = galaxy.model.History.query().order_by( desc( galaxy.model.History.table.c.create_time ) ).first()
+ assert history6 is not None, "Problem retrieving history6 from database"
+ self.upload_file( '1.bed', dbkey='hg18' )
+ hda1 = galaxy.model.HistoryDatasetAssociation.query() \
+ .order_by( desc( galaxy.model.HistoryDatasetAssociation.table.c.create_time ) ).first()
+ assert hda1 is not None, "Problem retrieving hda1 from database"
+ # We'll just test copying 1 hda
+ source_dataset_ids=str( hda1.id )
+ # The valid list of target histories is only the user's active histories
+ all_target_history_ids = [ str( hda.id ) for hda in admin_user.active_histories ]
+ # Since history1 and history2 have been deleted, they should not be displayed in the list of target histories
+ # on the copy_view.mako form
+ deleted_history_ids = [ str( history1.id ), str( history2.id ) ]
+ # Test copying to the current history
+ target_history_ids=[ str( history6.id ) ]
+ self.copy_history_item( source_dataset_ids=source_dataset_ids,
+ target_history_ids=target_history_ids,
+ all_target_history_ids=all_target_history_ids,
+ deleted_history_ids=deleted_history_ids )
+ history6.refresh()
+ if len( history6.datasets ) != 2:
+ raise AssertionError, "Copying hda1 to the current history failed"
+ # Test copying 1 hda to another history
+ self.new_history( name=urllib.quote( 'copy history items - 2' ) )
+ global history7
+ history7 = galaxy.model.History.query().order_by( desc( galaxy.model.History.table.c.create_time ) ).first()
+ assert history7 is not None, "Problem retrieving history7 from database"
+ # Switch back to our history from which we want to copy
+ self.switch_history( id=str( history6.id ), name=history6.name )
+ target_history_ids=[ str( history7.id ) ]
+ all_target_history_ids = [ str( hda.id ) for hda in admin_user.active_histories ]
+ # Test copying to the a history that is not the current history
+ target_history_ids=[ str( history7.id ) ]
+ self.copy_history_item( source_dataset_ids=source_dataset_ids,
+ target_history_ids=target_history_ids,
+ all_target_history_ids=all_target_history_ids,
+ deleted_history_ids=deleted_history_ids )
+ # Switch to the history to which we copied
+ self.switch_history( id=str( history7.id ), name=history7.name )
+ self.check_history_for_string( hda1.name )
+ def test_070_reset_data_for_later_test_runs( self ):
"""Reseting data to enable later test runs to pass"""
self.delete_history( id=str( history3.id ) )
self.delete_history( id=str( history4.id ) )
self.delete_history( id=str( history5.id ) )
+ self.delete_history( id=str( history6.id ) )
+ self.delete_history( id=str( history7.id ) )
13 years, 7 months
How to make Galaxy link to a local installed UCSC database?
by xue dong
Hallo every one,
I am new to Galaxy, I have a idea which maybe not new to people here. I work
on arabidopsis. maybe you know that UCSC do not offer the genome assembly
for plant.
so i will install UCSC locally and built the genome assembly for
arabidopsis. this work is half done now.
I also installed Galaxy, if i understand correctly, Galaxy has the link to
UCSC so the user can get data from UCSC. since what i interested is
arabidopsis information, I hope Galaxy can retrive data form the locally
installed UCSC in future. I do not know if it is possible to make this idea
come ture.maybe some of you have done it. if yes, I am gratefully if you
can tell me the how to get it.
many thanks,
Xue
13 years, 7 months
[hg] galaxy 2444: Fixes for test_get_data functional test script.
by Greg Von Kuster
details: http://www.bx.psu.edu/hg/galaxy/rev/004cd81cff72
changeset: 2444:004cd81cff72
user: Greg Von Kuster <greg(a)bx.psu.edu>
date: Wed Jun 10 15:46:26 2009 -0400
description:
Fixes for test_get_data functional test script.
1 file(s) affected in this change:
test/functional/test_get_data.py
diffs (152 lines):
diff -r 08194c60c290 -r 004cd81cff72 test/functional/test_get_data.py
--- a/test/functional/test_get_data.py Wed Jun 10 12:22:49 2009 -0400
+++ b/test/functional/test_get_data.py Wed Jun 10 15:46:26 2009 -0400
@@ -1,87 +1,74 @@
+import galaxy.model
+from galaxy.model.orm import *
from base.twilltestcase import TwillTestCase
-""" Tests are executed in order, sorted by name"""
-
class UploadData( TwillTestCase ):
- def test_00_multi_upload( self ):
- """test_get_data.test_multi_upload: Testing multiple uploads"""
- self.login()
- self.upload_file('1.bed')
- self.verify_dataset_correctness('1.bed')
- self.upload_file('2.bed', dbkey='hg17')
- self.verify_dataset_correctness('2.bed')
- self.upload_file('3.bed', dbkey='hg17', ftype='bed')
- self.verify_dataset_correctness('3.bed')
- self.upload_file('4.bed.gz', dbkey='hg17', ftype='bed')
- self.verify_dataset_correctness('4.bed')
- self.upload_file('1.scf', ftype='scf')
- self.verify_dataset_correctness('1.scf')
- self.upload_file('1.scf.zip', ftype='binseq.zip')
- self.verify_dataset_correctness('1.scf.zip')
+ def test_000_upload_files_from_disk( self ):
+ """Test uploading data files from disk"""
+ self.logout()
+ self.login( email='tst(a)bx.psu.edu' )
+ history1 = galaxy.model.History.query().order_by( desc( galaxy.model.History.table.c.create_time ) ).first()
+ self.upload_file( '1.bed' )
+ hda1 = galaxy.model.HistoryDatasetAssociation.query() \
+ .order_by( desc( galaxy.model.HistoryDatasetAssociation.table.c.create_time ) ).first()
+ assert hda1 is not None, "Problem retrieving hda1 from database"
+ self.verify_dataset_correctness( '1.bed', hid=str( hda1.hid ) )
+ self.upload_file( '2.bed', dbkey='hg17' )
+ hda2 = galaxy.model.HistoryDatasetAssociation.query() \
+ .order_by( desc( galaxy.model.HistoryDatasetAssociation.table.c.create_time ) ).first()
+ assert hda2 is not None, "Problem retrieving hda2 from database"
+ self.verify_dataset_correctness( '2.bed', hid=str( hda2.hid ) )
+ self.upload_file( '3.bed', dbkey='hg17', ftype='bed' )
+ hda3 = galaxy.model.HistoryDatasetAssociation.query() \
+ .order_by( desc( galaxy.model.HistoryDatasetAssociation.table.c.create_time ) ).first()
+ assert hda3 is not None, "Problem retrieving hda3 from database"
+ self.verify_dataset_correctness( '3.bed', hid=str( hda3.hid ) )
+ self.upload_file( '4.bed.gz', dbkey='hg17', ftype='bed' )
+ hda4 = galaxy.model.HistoryDatasetAssociation.query() \
+ .order_by( desc( galaxy.model.HistoryDatasetAssociation.table.c.create_time ) ).first()
+ assert hda4 is not None, "Problem retrieving hda4 from database"
+ self.verify_dataset_correctness( '4.bed', hid=str( hda4.hid ) )
+ self.upload_file( '1.scf', ftype='scf' )
+ hda5 = galaxy.model.HistoryDatasetAssociation.query() \
+ .order_by( desc( galaxy.model.HistoryDatasetAssociation.table.c.create_time ) ).first()
+ assert hda5 is not None, "Problem retrieving hda5 from database"
+ self.verify_dataset_correctness( '1.scf', hid=str( hda5.hid ) )
+ self.upload_file( '1.scf.zip', ftype='binseq.zip' )
+ hda6 = galaxy.model.HistoryDatasetAssociation.query() \
+ .order_by( desc( galaxy.model.HistoryDatasetAssociation.table.c.create_time ) ).first()
+ assert hda6 is not None, "Problem retrieving hda6 from database"
+ self.verify_dataset_correctness( '1.scf.zip', hid=str( hda6.hid ) )
+ self.delete_history( id=str( history1.id ) )
+ def test_005_url_paste( self ):
+ """Test url paste behavior"""
+ # Deleting the current history should have created a new history
+ self.check_history_for_string( 'Your history is empty' )
+ history2 = galaxy.model.History.query().order_by( desc( galaxy.model.History.table.c.create_time ) ).first()
self.upload_url_paste( 'hello world' )
self.check_history_for_string( 'Pasted Entry' )
self.check_history_for_string( 'hello world' )
- self.delete_history_item( 1 )
- self.delete_history_item( 2 )
- self.delete_history_item( 3 )
- self.delete_history_item( 4 )
- self.delete_history_item( 5 )
- self.delete_history_item( 6 )
- self.delete_history_item( 7 )
self.upload_url_paste( u'hello world' )
self.check_history_for_string( 'Pasted Entry' )
self.check_history_for_string( 'hello world' )
- self.delete_history_item( 8 )
- def test_9999_clean_up( self ):
- self.delete_history()
- self.logout()
-
-class GetEncodeData( TwillTestCase ):
-
- def test_00_get_encode_data( self ):
- """test_get_data.test_get_encode_data"""
- self.login()
- self.run_tool('encode_import_chromatin_and_chromosomes1', hg17=['cc.EarlyRepSeg.20051216.bed'] )
- #hg17=[ "cc.EarlyRepSeg.20051216.bed", "cc.EarlyRepSeg.20051216.gencode_partitioned.bed", "cc.LateRepSeg.20051216.bed", "cc.LateRepSeg.20051216.gencode_partitioned.bed", "cc.MidRepSeg.20051216.bed", "cc.MidRepSeg.20051216.gencode_partitioned.bed" ] )
+ self.delete_history( id=str( history2.id ) )
+ def test_010_upload_encode_data( self ):
+ """Test uploading encode data"""
+ # Deleting the current history should have created a new history
+ self.check_history_for_string( 'Your history is empty' )
+ history3 = galaxy.model.History.query().order_by( desc( galaxy.model.History.table.c.create_time ) ).first()
+ self.run_tool( 'encode_import_chromatin_and_chromosomes1', hg17=['cc.EarlyRepSeg.20051216.bed'] )
self.wait()
- self.verify_dataset_correctness('cc.EarlyRepSeg.20051216.bed', hid=1)
- #self.verify_dataset_correctness('cc.EarlyRepSeg.20051216.gencode_partitioned.bed', hid=2)
- #self.verify_dataset_correctness('cc.LateRepSeg.20051216.bed', hid=3)
- #self.verify_dataset_correctness('cc.LateRepSeg.20051216.gencode_partitioned.bed', hid=4)
- #self.verify_dataset_correctness('cc.MidRepSeg.20051216.bed', hid=5)
- #self.verify_dataset_correctness('cc.MidRepSeg.20051216.gencode_partitioned.bed', hid=6)
+ hda7 = galaxy.model.HistoryDatasetAssociation.query() \
+ .order_by( desc( galaxy.model.HistoryDatasetAssociation.table.c.create_time ) ).first()
+ assert hda7 is not None, "Problem retrieving hda7 from database"
+ self.verify_dataset_correctness( 'cc.EarlyRepSeg.20051216.bed', hid=str( hda7.hid ) )
self.run_tool('encode_import_gencode1', hg17=['gencode.CDS.20051206.bed'])
self.wait()
- self.verify_dataset_correctness('sc_3D_cds.bed', hid=2)
- self.delete_history_item( 1 )
- self.delete_history_item( 2 )
- def test_9999_clean_up( self ):
- self.delete_history()
+ hda8 = galaxy.model.HistoryDatasetAssociation.query() \
+ .order_by( desc( galaxy.model.HistoryDatasetAssociation.table.c.create_time ) ).first()
+ assert hda8 is not None, "Problem retrieving hda8 from database"
+ self.verify_dataset_correctness( 'sc_3D_cds.bed', hid=str( hda8.hid ) )
+ self.delete_history( id=str( history3.id ) )
+ def test_015_reset_data_for_later_test_runs( self ):
+ """Reseting data to enable later test runs to pass"""
self.logout()
-
-class DataSources( TwillTestCase ):
-
- #def test_hbvar(self):
- # """Getting hybrid gene mutations from HbVar"""
- # #self.load_cookies("hbvar_cookie.txt")
- # self.clear_history()
- # self.run_tool('hbvar')
- # params = dict(
- # htyp="any hybrid gene",
- # )
- # self.submit_form(form=1, button="Submit Query", **params)
- # params = dict(
- # display_format="galaxy",
- # )
- # self.submit_form(form=1, button="Go", **params)
- # params = dict(
- # build="hg17",
- # )
- # self.submit_form(form=1, button="ok", **params);
- # """
- # TODO: Currently fails when using sqlite, although successful when
- # using Postgres. Upgrading our version of sqlite may fix this, but
- # confirmation is required.
- # """
- # self.verify_dataset_correctness('hbvar_hybrid_genes.dat')
- pass
13 years, 7 months
Round-Robin Scheduling
by Assaf Gordon
Hello,
I have another question regarding the local job scheduler:
Is it possible to limit the number of jobs *per user* ?
That is - any given user can have up to X number of jobs running
concurrently, regardless of the value of local_job_queue_workers ?
Imagine the following situation:
local_job_queue_worker = 5
job_scheduler_policy =
galaxy.jobs.schedulingpolicy.roundrobin:UserRoundRobin
Which means that at any given moment, galaxy can run only five jobs.
Now, Galaxy is completely Idle, no jobs are running.
One users starts 7 very long running jobs (each jobs will take about two
hours).
If I understand correctly - since no jobs are running, 5 of the user's
job will be started immediately, even with the round-robin policy, right ?
And this means that for the next two hours, every other user which
starts a job - his/her job will be either new or limbo-running, but none
will actually be started, right ?
I think I'm experiencing this situation on my galaxy server.
Users are complaining their jobs have been running 'forever' or not even
starting for a long long time.
Close examination shows that there are running 5 jobs (all from the same
user) which have been running for three hours, and they are kind of
hogging all the worker threads.
To make a long story short -
I would like to make sure a single user can't hog Galaxy.
Is it possible with the local job runner, and if not - is it possible
with the SGE job runner ?
Thanks for reading so far,
Gordon.
13 years, 7 months
Wrappers for Sequence Alignment Programs
by SHAUN WEBB
Hi,
I am planning on integrating sequence alignment programs to our local
instance of Galaxy. I was wondering if anyone else has developed
wrappers for the following programs: Maq, Bowtie, Tophat.
Also, is there a central repository for any new tools or wrappers that
have been created by other developers? It would certainly be a useful
resource and if I need to wrap these tools myself I would be happy
share the code.
Shaun Webb
--
The University of Edinburgh is a charitable body, registered in
Scotland, with registration number SC005336.
13 years, 7 months
[hg] galaxy 2442: Fix missing split/join char in Join tool when ...
by Nate Coraor
details: http://www.bx.psu.edu/hg/galaxy/rev/46bbe610288a
changeset: 2442:46bbe610288a
user: Dan Blankenberg <dan(a)bx.psu.edu>
date: Wed Jun 10 11:58:21 2009 -0400
description:
Fix missing split/join char in Join tool when filling nonjoined columns.
1 file(s) affected in this change:
tools/filters/join.py
diffs (20 lines):
diff -r c0aa8af62124 -r 46bbe610288a tools/filters/join.py
--- a/tools/filters/join.py Tue Jun 09 15:03:04 2009 -0400
+++ b/tools/filters/join.py Wed Jun 10 11:58:21 2009 -0400
@@ -288,12 +288,14 @@
if not written and keep_unmatched:
out.write( fill_empty_columns( line1.rstrip( '\r\n' ), split, fill_options.file1_columns ) )
if fill_options:
- out.write( fill_empty_columns( "", split, fill_options.file2_columns ) )
+ if fill_options.file2_columns:
+ out.write( "%s%s" % ( split, fill_empty_columns( "", split, fill_options.file2_columns ) ) )
out.write( "\n" )
elif keep_partial:
out.write( fill_empty_columns( line1.rstrip( '\r\n' ), split, fill_options.file1_columns ) )
if fill_options:
- out.write( fill_empty_columns( "", split, fill_options.file2_columns ) )
+ if fill_options.file2_columns:
+ out.write( "%s%s" % ( split, fill_empty_columns( "", split, fill_options.file2_columns ) ) )
out.write( "\n" )
out.close()
13 years, 7 months
[hg] galaxy 2443: Remote user bug found and fixed by Morita Hide...
by Nate Coraor
details: http://www.bx.psu.edu/hg/galaxy/rev/08194c60c290
changeset: 2443:08194c60c290
user: Nate Coraor <nate(a)bx.psu.edu>
date: Wed Jun 10 12:22:49 2009 -0400
description:
Remote user bug found and fixed by Morita Hideyuki
1 file(s) affected in this change:
lib/galaxy/web/framework/__init__.py
diffs (12 lines):
diff -r 46bbe610288a -r 08194c60c290 lib/galaxy/web/framework/__init__.py
--- a/lib/galaxy/web/framework/__init__.py Wed Jun 10 11:58:21 2009 -0400
+++ b/lib/galaxy/web/framework/__init__.py Wed Jun 10 12:22:49 2009 -0400
@@ -268,7 +268,7 @@
invalidate_existing_session = True
user_for_new_session = self.__get_or_create_remote_user( remote_user_email )
log.warning( "User logged in as '%s' externally, but has a cookie as '%s' invalidating session",
- remote_user_email, prev_galaxy_session.user.email )
+ remote_user_email, galaxy_session.user.email )
else:
# No session exists, get/create user for new session
user_for_new_session = self.__get_or_create_remote_user( remote_user_email )
13 years, 7 months