galaxy-dev
Threads by month
- ----- 2024 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2023 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2022 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2021 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2020 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2019 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2018 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2017 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2016 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2015 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2014 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2013 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2012 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2011 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2010 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2009 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2008 -----
- December
- November
- October
- September
- August
- 10007 discussions
details: http://www.bx.psu.edu/hg/galaxy/rev/ee43bace03db
changeset: 2451:ee43bace03db
user: James Taylor <james(a)jamestaylor.org>
date: Thu Jun 11 15:41:57 2009 -0400
description:
Commenting out 'other dbkey' in upload
1 file(s) affected in this change:
tools/data_source/upload.xml
diffs (12 lines):
diff -r 143dde05e1df -r ee43bace03db tools/data_source/upload.xml
--- a/tools/data_source/upload.xml Thu Jun 11 15:37:19 2009 -0400
+++ b/tools/data_source/upload.xml Thu Jun 11 15:41:57 2009 -0400
@@ -25,7 +25,7 @@
</param>
</upload_dataset>
<param name="dbkey" type="genomebuild" label="Genome" />
- <param name="other_dbkey" type="text" label="Or user-defined Genome" />
+ <!-- <param name="other_dbkey" type="text" label="Or user-defined Genome" /> -->
</inputs>
<help>
1
0
16 Jun '09
details: http://www.bx.psu.edu/hg/galaxy/rev/6125f71c838a
changeset: 2449:6125f71c838a
user: James Taylor <james(a)jamestaylor.org>
date: Thu Jun 11 12:20:03 2009 -0400
description:
Merging Ian's trackster update with current head
11 file(s) affected in this change:
datatypes_conf.xml.sample
lib/galaxy/datatypes/data.py
lib/galaxy/datatypes/interval.py
lib/galaxy/datatypes/registry.py
lib/galaxy/tools/actions/__init__.py
lib/galaxy/tools/actions/upload.py
lib/galaxy/tools/parameters/basic.py
lib/galaxy/web/controllers/root.py
lib/galaxy/web/framework/__init__.py
templates/dataset/edit_attributes.mako
tools/data_source/upload.xml
diffs (671 lines):
diff -r c69e55c91036 -r 6125f71c838a datatypes_conf.xml.sample
--- a/datatypes_conf.xml.sample Thu Jun 11 12:06:29 2009 -0400
+++ b/datatypes_conf.xml.sample Thu Jun 11 12:20:03 2009 -0400
@@ -8,6 +8,9 @@
<converter file="interval_to_coverage.xml" target_datatype="coverage"/>
</datatype>
<datatype extension="binseq.zip" type="galaxy.datatypes.images:Binseq" mimetype="application/zip" display_in_upload="true"/>
+ <datatype extension="len" type="galaxy.datatypes.chrominfo:ChromInfo" display_in_upload="true">
+ <!-- no converters yet -->
+ </datatype>
<datatype extension="coverage" type="galaxy.datatypes.coverage:LastzCoverage" display_in_upload="true">
<indexer file="coverage.xml" />
</datatype>
@@ -31,7 +34,7 @@
<datatype extension="html" type="galaxy.datatypes.images:Html" mimetype="text/html"/>
<datatype extension="interval" type="galaxy.datatypes.interval:Interval" display_in_upload="true">
<converter file="interval_to_bed_converter.xml" target_datatype="bed"/>
- <indexer file="interval.xml" />
+ <indexer file="interval_awk.xml" />
</datatype>
<datatype extension="jpg" type="galaxy.datatypes.images:Image" mimetype="image/jpeg"/>
<datatype extension="laj" type="galaxy.datatypes.images:Laj"/>
diff -r c69e55c91036 -r 6125f71c838a lib/galaxy/datatypes/chrominfo.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/lib/galaxy/datatypes/chrominfo.py Thu Jun 11 12:20:03 2009 -0400
@@ -0,0 +1,14 @@
+import data
+from galaxy import util
+from galaxy.datatypes.sniff import *
+from galaxy.web import url_for
+from tabular import Tabular
+from galaxy.datatypes import metadata
+from galaxy.datatypes.metadata import MetadataElement
+
+
+class ChromInfo( Tabular ):
+ file_ext = "len"
+ MetadataElement( name="chrom", default=1, desc="Chrom column", param=metadata.ColumnParameter )
+ MetadataElement( name="length", default=2, desc="Length column", param=metadata.ColumnParameter )
+
diff -r c69e55c91036 -r 6125f71c838a lib/galaxy/datatypes/data.py
--- a/lib/galaxy/datatypes/data.py Thu Jun 11 12:06:29 2009 -0400
+++ b/lib/galaxy/datatypes/data.py Thu Jun 11 12:20:03 2009 -0400
@@ -42,7 +42,7 @@
__metaclass__ = DataMeta
"""Add metadata elements"""
- MetadataElement( name="dbkey", desc="Database/Build", default="?", param=metadata.SelectParameter, multiple=False, values=util.dbnames, no_value="?" )
+ MetadataElement( name="dbkey", desc="Database/Build", default="?", param=metadata.DBKeyParameter, multiple=False, no_value="?" )
"""Stores the set of display applications, and viewing methods, supported by this datatype """
supported_display_apps = {}
diff -r c69e55c91036 -r 6125f71c838a lib/galaxy/datatypes/indexers/coverage.py
--- a/lib/galaxy/datatypes/indexers/coverage.py Thu Jun 11 12:06:29 2009 -0400
+++ b/lib/galaxy/datatypes/indexers/coverage.py Thu Jun 11 12:20:03 2009 -0400
@@ -37,7 +37,7 @@
os.rename( fname+".npy", fname )
# Write average
- for window in 10, 100, 1000, 10000:
+ for window in 10, 100, 1000, 10000, 100000:
input = scores.copy()
size = len( input )
input.resize( ( ( size / window ), window ) )
diff -r c69e55c91036 -r 6125f71c838a lib/galaxy/datatypes/indexers/interval.awk
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/lib/galaxy/datatypes/indexers/interval.awk Thu Jun 11 12:20:03 2009 -0400
@@ -0,0 +1,43 @@
+BEGIN {
+ # from galaxy.utils
+ mapped_chars[">"] = "__gt__"
+ mapped_chars["<"] = "__lt__"
+ mapped_chars["'"] = "__sq__"
+ mapped_chars["\""] = "__dq__"
+ mapped_chars["\\["] = "__ob__"
+ mapped_chars["\\]"] = "__cb__"
+ mapped_chars["\\{"] = "__oc__"
+ mapped_chars["\\}"] = "__cc__"
+ mapped_chars["@"] = "__at__"
+ # additional, not in galaxy.utils
+ mapped_chars["/"] = "__fs__"
+ mapped_chars["^manifest\.tab$"] = "__manifest.tab__"
+}
+function escape_filename( name )
+{
+ for( char in mapped_chars ) {
+ gsub( char, mapped_chars[char], name )
+ }
+ return name
+}
+!_[$chrom]++ {
+ # close files only when we switch to a new one.
+ fn && close(fn)
+ fn = storepath "/" escape_filename($1) }
+{
+ print $0 >> fn;
+ # the || part is needed to catch 0 length chromosomes, which
+ # should never happen but...
+ if ($end > chroms[$chrom] || !chroms[$chrom])
+ chroms[$chrom] = $end }
+END {
+ fn = storepath "/manifest.tab"
+ for( x in chroms ) {
+ # add line to manifest
+ print x "\t" chroms[x] >> fn
+ chromfile = storepath "/" escape_filename(x)
+ # sort in-place
+ system( "sort -f -n -k " chrom " -k " start " -k " end " -o " chromfile " " chromfile )
+ close(chromfile)
+ }
+}
\ No newline at end of file
diff -r c69e55c91036 -r 6125f71c838a lib/galaxy/datatypes/indexers/interval.py
--- a/lib/galaxy/datatypes/indexers/interval.py Thu Jun 11 12:06:29 2009 -0400
+++ b/lib/galaxy/datatypes/indexers/interval.py Thu Jun 11 12:20:03 2009 -0400
@@ -29,13 +29,11 @@
manifest[chrom] = max(manifest.get(chrom,0),line.end)
if not lastchrom == chrom:
if current_file:
- current_file.flush()
current_file.close()
current_file = open( os.path.join( out_path, "%s" % chrom), "a" )
print >> current_file, "\t".join(line)
lastchrom = chrom
if current_file:
- current_file.flush()
current_file.close()
return manifest
diff -r c69e55c91036 -r 6125f71c838a lib/galaxy/datatypes/indexers/interval_awk.xml
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/lib/galaxy/datatypes/indexers/interval_awk.xml Thu Jun 11 12:20:03 2009 -0400
@@ -0,0 +1,16 @@
+<tool id="INDEXER_Interval_0" name="Index Interval for Track Viewer">
+ <!-- Used internally to generate track indexes -->
+ <command interpreter="awk -f">interval.awk
+ chrom=${input_dataset.metadata.chromCol} start=${input_dataset.metadata.startCol}
+ end=${input_dataset.metadata.endCol} strand=${input_dataset.metadata.strandCol}
+ storepath=${store_path}
+ $input_dataset 2>&1
+ </command>
+ <inputs>
+ <page>
+ <param format="interval" name="input_dataset" type="data" label="Choose intervals"/>
+ </page>
+ </inputs>
+ <help>
+ </help>
+</tool>
diff -r c69e55c91036 -r 6125f71c838a lib/galaxy/datatypes/indexers/wiggle.py
--- a/lib/galaxy/datatypes/indexers/wiggle.py Thu Jun 11 12:06:29 2009 -0400
+++ b/lib/galaxy/datatypes/indexers/wiggle.py Thu Jun 11 12:20:03 2009 -0400
@@ -18,6 +18,8 @@
from numpy import *
import tempfile
import os
+from galaxy.tracks.store import sanitize_name
+
def write_chrom(max, out_base, instream):
@@ -35,7 +37,7 @@
os.rename( fname+".npy", fname )
# Write average
- for window in 10, 100, 1000, 10000:
+ for window in 10, 100, 1000, 10000, 100000:
input = scores.copy()
size = len( input )
input.resize( ( ( size / window ), window ) )
@@ -60,7 +62,7 @@
LEN[chrom] = max2( LEN.get(chrom,0), pos+1 )
for chrom, stream in chroms.items():
stream.seek(0)
- prefix = os.path.join(sys.argv[2], chrom)
+ prefix = os.path.join(sys.argv[2], sanitize_name(chrom))
write_chrom( LEN[chrom], prefix, stream )
manifest_file = open( os.path.join( sys.argv[2], "manifest.tab" ),"w" )
diff -r c69e55c91036 -r 6125f71c838a lib/galaxy/datatypes/interval.py
--- a/lib/galaxy/datatypes/interval.py Thu Jun 11 12:06:29 2009 -0400
+++ b/lib/galaxy/datatypes/interval.py Thu Jun 11 12:20:03 2009 -0400
@@ -798,7 +798,7 @@
# Determine appropriate resolution to plot ~1000 points
resolution = ( 10 ** math.ceil( math.log10( range / 1000 ) ) )
# Restrict to valid range
- resolution = min( resolution, 10000 )
+ resolution = min( resolution, 100000 )
resolution = max( resolution, 1 )
# Memory map the array (don't load all the data)
data = numpy.load( data )
@@ -815,7 +815,7 @@
# Determine appropriate resolution to plot ~1000 points
resolution = math.ceil( 10 ** math.ceil( math.log10( range / 1000 ) ) )
# Restrict to valid range
- resolution = min( resolution, 10000 )
+ resolution = min( resolution, 100000 )
resolution = max( resolution, 1 )
return resolution
diff -r c69e55c91036 -r 6125f71c838a lib/galaxy/datatypes/metadata.py
--- a/lib/galaxy/datatypes/metadata.py Thu Jun 11 12:06:29 2009 -0400
+++ b/lib/galaxy/datatypes/metadata.py Thu Jun 11 12:20:03 2009 -0400
@@ -289,7 +289,22 @@
if value is None: return []
if not isinstance( value, list ): return [value]
return value
-
+
+
+class DBKeyParameter( SelectParameter ):
+ def get_html_field( self, value=None, context={}, other_values={}, values=None, **kwd):
+ try:
+ values = kwd['trans'].db_builds
+ except AttributeError: pass
+ return super(DBKeyParameter, self).get_html_field( value, context, other_values, values, **kwd)
+
+ def get_html( self, value=None, context={}, other_values={}, values=None, **kwd):
+ try:
+ values = kwd['trans'].db_builds
+ except AttributeError: pass
+ return super(DBKeyParameter, self).get_html( value, context, other_values, values, **kwd)
+
+
class RangeParameter( SelectParameter ):
def __init__( self, spec ):
SelectParameter.__init__( self, spec )
diff -r c69e55c91036 -r 6125f71c838a lib/galaxy/datatypes/registry.py
--- a/lib/galaxy/datatypes/registry.py Thu Jun 11 12:06:29 2009 -0400
+++ b/lib/galaxy/datatypes/registry.py Thu Jun 11 12:20:03 2009 -0400
@@ -3,7 +3,7 @@
"""
import os
import logging
-import data, tabular, interval, images, sequence, qualityscore, genetics, xml, coverage, tracks
+import data, tabular, interval, images, sequence, qualityscore, genetics, xml, coverage, tracks, chrominfo
import galaxy.util
from galaxy.util.odict import odict
diff -r c69e55c91036 -r 6125f71c838a lib/galaxy/tools/actions/__init__.py
--- a/lib/galaxy/tools/actions/__init__.py Thu Jun 11 12:06:29 2009 -0400
+++ b/lib/galaxy/tools/actions/__init__.py Thu Jun 11 12:20:03 2009 -0400
@@ -107,6 +107,7 @@
out_data = {}
# Collect any input datasets from the incoming parameters
inp_data = self.collect_input_datasets( tool, incoming, trans )
+
# Deal with input dataset names, 'dbkey' and types
input_names = []
input_ext = 'data'
@@ -119,6 +120,16 @@
data = NoneDataset( datatypes_registry = trans.app.datatypes_registry )
if data.dbkey not in [None, '?']:
input_dbkey = data.dbkey
+
+ # Collect chromInfo dataset and add as parameters to incoming
+ db_datasets = {}
+ db_dataset = trans.db_dataset_for( input_dbkey )
+ if db_dataset:
+ db_datasets[ "chromInfo" ] = db_dataset
+ incoming[ "chromInfo" ] = db_dataset.file_name
+ else:
+ incoming[ "chromInfo" ] = os.path.join( trans.app.config.tool_data_path, 'shared','ucsc','chrom', "%s.len" % input_dbkey )
+ inp_data.update( db_datasets )
# Determine output dataset permission/roles list
existing_datasets = [ inp for inp in inp_data.values() if inp ]
diff -r c69e55c91036 -r 6125f71c838a lib/galaxy/tools/parameters/basic.py
--- a/lib/galaxy/tools/parameters/basic.py Thu Jun 11 12:06:29 2009 -0400
+++ b/lib/galaxy/tools/parameters/basic.py Thu Jun 11 12:20:03 2009 -0400
@@ -668,7 +668,7 @@
>>> # Create a mock transcation with 'hg17' as the current build
>>> from galaxy.util.bunch import Bunch
- >>> trans = Bunch( history=Bunch( genome_build='hg17' ) )
+ >>> trans = Bunch( history=Bunch( genome_build='hg17' ), db_builds=util.dbnames )
>>> p = GenomeBuildParameter( None, XML(
... '''
@@ -703,10 +703,10 @@
"""
def get_options( self, trans, other_values ):
last_used_build = trans.history.genome_build
- for dbkey, build_name in util.dbnames:
+ for dbkey, build_name in trans.db_builds:
yield build_name, dbkey, ( dbkey == last_used_build )
def get_legal_values( self, trans, other_values ):
- return set( dbkey for dbkey, _ in util.dbnames )
+ return set( dbkey for dbkey, _ in trans.db_builds )
class ColumnListParameter( SelectToolParameter ):
"""
diff -r c69e55c91036 -r 6125f71c838a lib/galaxy/tracks/store.py
--- a/lib/galaxy/tracks/store.py Thu Jun 11 12:06:29 2009 -0400
+++ b/lib/galaxy/tracks/store.py Thu Jun 11 12:20:03 2009 -0400
@@ -1,5 +1,17 @@
import os
+import re
from string import Template
+from galaxy.util import sanitize_text
+
+# extra mappings/escape to keep users from traversing around the
+# filesystem and wreaking havoc
+extra_mappings = { r"/": "__fs__", r"^manifest.tab$": "__manifest.tab__" }
+
+def sanitize_name( name ):
+ name = sanitize_text( name )
+ for key, value in extra_mappings.items():
+ name = re.sub( key, value, name )
+ return name
class TemplateSubber( object ):
def __init__(self, obj):
@@ -56,7 +68,7 @@
fd.close()
def _get_object_path( self, chrom, resolution ):
- object_name = chrom
+ object_name = sanitize_name(chrom)
if resolution: object_name += "_%d" % resolution
return os.path.join( self.path, object_name )
diff -r c69e55c91036 -r 6125f71c838a lib/galaxy/web/controllers/root.py
--- a/lib/galaxy/web/controllers/root.py Thu Jun 11 12:06:29 2009 -0400
+++ b/lib/galaxy/web/controllers/root.py Thu Jun 11 12:20:03 2009 -0400
@@ -236,11 +236,15 @@
if spec.get("readonly"):
continue
optional = params.get("is_"+name, None)
+ other = params.get("or_"+name, None)
if optional and optional == 'true':
# optional element... == 'true' actually means it is NOT checked (and therefore omitted)
setattr(data.metadata, name, None)
else:
- setattr( data.metadata, name, spec.unwrap( params.get (name, None) ) )
+ if other:
+ setattr( data.metadata, name, other )
+ else:
+ setattr( data.metadata, name, spec.unwrap( params.get (name, None) ) )
data.datatype.after_edit( data )
trans.app.model.flush()
diff -r c69e55c91036 -r 6125f71c838a lib/galaxy/web/controllers/tracks.py
--- a/lib/galaxy/web/controllers/tracks.py Thu Jun 11 12:06:29 2009 -0400
+++ b/lib/galaxy/web/controllers/tracks.py Thu Jun 11 12:20:03 2009 -0400
@@ -1,13 +1,11 @@
-from mako import exceptions
-from mako.template import Template
-from mako.lookup import TemplateLookup
+import math
+
+import mimeparse
+from galaxy.tracks import messages
+from galaxy.util.json import to_json_string
from galaxy.web.base.controller import *
from galaxy.web.framework import simplejson
-from galaxy import web
-from galaxy.tracks import messages
-import mimeparse
-from galaxy.util.json import to_json_string
-import math
+
class MultiResponse(object):
"""
@@ -82,18 +80,19 @@
def build( self, trans, **kwargs ):
trans.session["track_sets"] = list(kwargs.keys())
trans.session.save()
- waiting = False
- for id, value in kwargs.items():
- status = self.data_handler( trans, id )
- if status == messages.PENDING:
- waiting = True
- if not waiting:
- return trans.response.send_redirect( web.url_for( controller='tracks', action='chroms', dbkey=trans.session["track_dbkey"]) )
- return trans.fill_template( 'tracks/build.mako' )
+ #waiting = False
+ #for id, value in kwargs.items():
+ # status = self.data_handler( trans, id )
+ # if status == messages.PENDING:
+ # waiting = True
+ #if not waiting:
+ return trans.response.send_redirect( web.url_for( controller='tracks/', action='index', chrom="" ) )
+ #return trans.fill_template( 'tracks/build.mako' )
@web.expose
def index(self, trans, **kwargs):
tracks = []
+ dbkey = ""
for track in trans.session["track_sets"]:
dataset = trans.app.model.HistoryDatasetAssociation.get( track )
tracks.append({
@@ -101,17 +100,23 @@
"name": dataset.name,
"id": dataset.id
})
+ dbkey = dataset.dbkey
chrom = kwargs.get("chrom","")
LEN = self.chroms_handler(trans, trans.session["track_dbkey"]).get(chrom,0)
return trans.fill_template( 'tracks/index.mako',
- tracks=tracks, chrom=chrom,
+ tracks=tracks, chrom=chrom, dbkey=dbkey,
LEN=LEN )
def chroms_handler(self, trans, dbkey ):
- db_manifest = os.path.join( trans.app.config.tool_data_path, 'shared','ucsc','chrom', "%s.len" % dbkey )
+ db_manifest = trans.db_dataset_for( dbkey )
+ if not db_manifest:
+ db_manifest = os.path.join( trans.app.config.tool_data_path, 'shared','ucsc','chrom', "%s.len" % dbkey )
+ else:
+ db_manifest = db_manifest.file_name
manifest = {}
if os.path.exists( db_manifest ):
for line in open( db_manifest ):
+ if line.startswith("#"): continue
line = line.rstrip("\r\n")
fields = line.split("\t")
manifest[fields[0]] = int(fields[1])
diff -r c69e55c91036 -r 6125f71c838a lib/galaxy/web/framework/__init__.py
--- a/lib/galaxy/web/framework/__init__.py Thu Jun 11 12:06:29 2009 -0400
+++ b/lib/galaxy/web/framework/__init__.py Thu Jun 11 12:20:03 2009 -0400
@@ -549,6 +549,31 @@
template = Template( source=template_string,
searchList=[context or kwargs, dict(caller=self)] )
return str(template)
+
+ @property
+ def db_builds( self ):
+ """
+ Returns the builds defined by galaxy and the builds defined by
+ the user (chromInfo in history).
+ """
+ dbnames = list()
+ datasets = self.app.model.HistoryDatasetAssociation.filter_by(deleted=False, history_id=self.history.id, extension="len").all()
+ if len(datasets) > 0:
+ dbnames.append( (util.dbnames.default_value, '--------- User Defined Builds ----------') )
+ for dataset in datasets:
+ dbnames.append( (dataset.dbkey, dataset.name) )
+ dbnames.extend( util.dbnames )
+ return dbnames
+
+ def db_dataset_for( self, dbkey ):
+ """
+ Returns the db_file dataset associated/needed by `dataset`, or `None`.
+ """
+ datasets = self.app.model.HistoryDatasetAssociation.filter_by(deleted=False, history_id=self.history.id, extension="len").all()
+ for ds in datasets:
+ if dbkey == ds.dbkey:
+ return ds
+ return None
class FormBuilder( object ):
"""
diff -r c69e55c91036 -r 6125f71c838a static/scripts/trackster.js
--- a/static/scripts/trackster.js Thu Jun 11 12:06:29 2009 -0400
+++ b/static/scripts/trackster.js Thu Jun 11 12:20:03 2009 -0400
@@ -85,7 +85,7 @@
var resolution = Math.pow( 10, Math.ceil( Math.log( range / DENSITY ) / Math.log( 10 ) ) );
resolution = Math.max( resolution, 1 );
- resolution = Math.min( resolution, 10000 );
+ resolution = Math.min( resolution, 100000 );
var parent_element = $("<div style='position: relative;'></div>");
this.content_div.children( ":first" ).remove();
@@ -152,10 +152,20 @@
var low = position * DENSITY * resolution;
var high = ( position + 1 ) * DENSITY * resolution;
cache[resolution][position] = { state: "loading" };
- $.getJSON( "data" + this.type, { chr: this.view.chr, low: low, high: high, dataset_id: this.track.dataset_id }, function ( data ) {
- cache[resolution][position] = { state: "loaded", values: data };
- $(document).trigger( "redraw" );
- });
+ // use closure to preserve this and parameters for getJSON
+ var fetcher = function (ref) {
+ return function () {
+ $.getJSON( "data" + ref.type, { chr: ref.view.chr, low: low, high: high, dataset_id: ref.track.dataset_id }, function ( data ) {
+ if( data == "pending" ) {
+ setTimeout( fetcher, 5000 );
+ } else {
+ cache[resolution][position] = { state: "loaded", values: data };
+ }
+ $(document).trigger( "redraw" );
+ });
+ };
+ }(this);
+ fetcher();
}
return cache[resolution][position];
}
@@ -288,8 +298,11 @@
var chunk = this.cache.get( resolution, tile_index );
if ( chunk.state == "loading" ) {
- return null;
- }
+ parent_element.addClass("loading");
+ return null;
+ } else {
+ parent_element.removeClass("loading");
+ }
var values = chunk.values;
for ( var index in values ) {
diff -r c69e55c91036 -r 6125f71c838a static/trackster.css
--- a/static/trackster.css Thu Jun 11 12:06:29 2009 -0400
+++ b/static/trackster.css Thu Jun 11 12:20:03 2009 -0400
@@ -85,7 +85,10 @@
}
.loading {
- background: #DDDDDD;
+ background-image: url("/static/images/loading_large_white_bg.gif");
+ background-position: center center;
+ background-repeat: no-repeat;
+ min-height: 100px;
}
.label-track .label {
diff -r c69e55c91036 -r 6125f71c838a templates/dataset/edit_attributes.mako
--- a/templates/dataset/edit_attributes.mako Thu Jun 11 12:06:29 2009 -0400
+++ b/templates/dataset/edit_attributes.mako Thu Jun 11 12:20:03 2009 -0400
@@ -46,7 +46,7 @@
${spec.desc}:
</label>
<div style="float: left; width: 250px; margin-right: 10px;">
- ${data.metadata.get_html_by_name( name )}
+ ${data.metadata.get_html_by_name( name, trans=trans )}
</div>
<div style="clear: both"></div>
</div>
diff -r c69e55c91036 -r 6125f71c838a templates/tracks/index.mako
--- a/templates/tracks/index.mako Thu Jun 11 12:06:29 2009 -0400
+++ b/templates/tracks/index.mako Thu Jun 11 12:20:03 2009 -0400
@@ -17,18 +17,19 @@
${parent.late_javascripts()}
<script type="text/javascript" src="/static/scripts/jquery.event.drag.js"></script>
<script type="text/javascript" src="/static/scripts/trackster.js"></script>
-<script>
+<script type="text/javascript">
- var view = new View( "${chrom}", ${LEN}, 0, ${LEN} );
+ var view = new View( "${chrom}", ${LEN}, 0, ${max(LEN,1)} );
var tracks = new TrackLayout( view );
-
+ var dbkey = "${dbkey}";
+
$(function() {
tracks.add( new LabelTrack( view, $("#viewport" ) ) );
%for track in tracks:
tracks.add( new ${track["type"]}( "${track["name"]}", view, $("#viewport" ), ${track["id"]} ) );
%endfor
-
+
$(document).bind( "redraw", function( e ) {
tracks.redraw();
});
@@ -56,9 +57,43 @@
view.high = new_high;
tracks.redraw();
});
+ tracks.redraw();
+ load_chroms();
+ });
- tracks.redraw();
- });
+ var load_chroms = function () {
+ var fetcher = function (ref) {
+ return function () {
+ $.getJSON( "chroms", { dbkey: dbkey }, function ( data ) {
+ // Hacky - check length of "object"
+ var chrom_length = 0;
+ for (key in data) chrom_length++;
+ if( chrom_length == 0 ) {
+ setTimeout( fetcher, 5000 );
+ } else {
+ var chrom_options = '';
+ for (key in data) {
+ if( key == view.chr ) {
+ chrom_options += '<option value="' + key + '" selected="true">' + key + '</option>';
+ } else {
+ chrom_options += '<option value="' + key + '">' + key + '</option>';
+ }
+ }
+ $("#chrom").html(chrom_options);
+ $("#chrom").bind( "change", function ( e ) {
+ $("#chr").submit();
+ });
+ if( view.chr == "" ) {
+ $("#chrom option:first").attr("selected", true);
+ $("#chrom").trigger( "change" );
+ }
+ }
+ });
+ };
+ }(this);
+ fetcher();
+ };
+
</script>
</%def>
@@ -79,11 +114,14 @@
<div id="nav">
<div id="nav-controls">
+ <form name="chr" id="chr" method="GET">
<a href="#" onclick="javascript:view.left(5);tracks.redraw();"><<</a>
<a href="#" onclick="javascript:view.left(2);tracks.redraw();"><</a>
-
- <span style="display: inline-block; width: 30em; text-align: center;">Viewing ${chrom}:<span id="low">0</span>-<span id="high">180857866</span></span>
-
+ <span style="display: inline-block; width: 30em; text-align: center;">Viewing
+ <select id="chrom" name="chrom">
+ <option value="">loading</option>
+ </select>
+ <span id="low">0</span>-<span id="high">180857866</span></span>
<span style="display: inline-block; width: 10em;">
<a href="#" onclick="javascript:view.zoom_in(2);tracks.redraw();">+</a>
<a href="#" onclick="javascript:view.zoom_out(2);tracks.redraw();">-</a>
@@ -91,6 +129,7 @@
<a href="#" onclick="javascript:view.right(2);tracks.redraw();">></a>
<a href="#" onclick="javascript:view.right(5);tracks.redraw();">>></a>
+ </form>
</div>
</div>
diff -r c69e55c91036 -r 6125f71c838a tools/annotation_profiler/annotation_profiler.xml
--- a/tools/annotation_profiler/annotation_profiler.xml Thu Jun 11 12:06:29 2009 -0400
+++ b/tools/annotation_profiler/annotation_profiler.xml Thu Jun 11 12:20:03 2009 -0400
@@ -1,6 +1,6 @@
<tool id="Annotation_Profiler_0" name="Profile Annotations" Version="1.0.0">
<description>for a set of genomic intervals</description>
- <command interpreter="python">annotation_profiler_for_interval.py -i $input1 -c ${input1.metadata.chromCol} -s ${input1.metadata.startCol} -e ${input1.metadata.endCol} -o $out_file1 $keep_empty -p /depot/data2/galaxy/annotation_profiler/$dbkey $summary -l ${GALAXY_DATA_INDEX_DIR}/shared/ucsc/chrom/${dbkey}.len -b 3 -t $table_names</command>
+ <command interpreter="python">annotation_profiler_for_interval.py -i $input1 -c ${input1.metadata.chromCol} -s ${input1.metadata.startCol} -e ${input1.metadata.endCol} -o $out_file1 $keep_empty -p /depot/data2/galaxy/annotation_profiler/$dbkey $summary -l ${chromInfo} -b 3 -t $table_names</command>
<inputs>
<param format="interval" name="input1" type="data" label="Choose Intervals">
<validator type="dataset_metadata_in_file" filename="annotation_profiler_valid_builds.txt" metadata_name="dbkey" metadata_column="0" message="Profiling is not currently available for this species."/>
diff -r c69e55c91036 -r 6125f71c838a tools/data_source/upload.xml
--- a/tools/data_source/upload.xml Thu Jun 11 12:06:29 2009 -0400
+++ b/tools/data_source/upload.xml Thu Jun 11 12:20:03 2009 -0400
@@ -25,6 +25,7 @@
</param>
</upload_dataset>
<param name="dbkey" type="genomebuild" label="Genome" />
+ <param name="other_dbkey" type="text" label="Or user-defined Genome" />
</inputs>
<help>
diff -r c69e55c91036 -r 6125f71c838a tools/new_operations/complement.xml
--- a/tools/new_operations/complement.xml Thu Jun 11 12:06:29 2009 -0400
+++ b/tools/new_operations/complement.xml Thu Jun 11 12:20:03 2009 -0400
@@ -1,6 +1,6 @@
<tool id="gops_complement_1" name="Complement">
<description>intervals of a query</description>
- <command interpreter="python">gops_complement.py $input1 $output -1 ${input1.metadata.chromCol},${input1.metadata.startCol},${input1.metadata.endCol},${input1.metadata.strandCol} -l ${GALAXY_DATA_INDEX_DIR}/shared/ucsc/chrom/${dbkey}.len $allchroms</command>
+ <command interpreter="python">gops_complement.py $input1 $output -1 ${input1.metadata.chromCol},${input1.metadata.startCol},${input1.metadata.endCol},${input1.metadata.strandCol} -l ${chromInfo} $allchroms</command>
<inputs>
<param format="interval" name="input1" type="data">
<label>Complement regions of</label>
@@ -58,4 +58,4 @@
.. image:: ../static/operation_icons/gops_complement.gif
</help>
-</tool>
\ No newline at end of file
+</tool>
1
0
details: http://www.bx.psu.edu/hg/galaxy/rev/c6031c4e6546
changeset: 2448:c6031c4e6546
user: ianschenck(a)Thugunit.local
date: Thu Apr 23 15:11:29 2009 -0400
description:
Merge with main.
0 file(s) affected in this change:
diffs (1069 lines):
diff -r 0cf5c25d1d2b -r c6031c4e6546 lib/galaxy/model/__init__.py
--- a/lib/galaxy/model/__init__.py Thu Apr 23 14:42:35 2009 -0400
+++ b/lib/galaxy/model/__init__.py Thu Apr 23 15:11:29 2009 -0400
@@ -229,7 +229,7 @@
return des
@property
def activatable_datasets( self ):
- return [ hda for hda in self.datasets if not hda.dataset.purged ] #this needs to be a list
+ return [ hda for hda in self.datasets if not hda.dataset.deleted ] #this needs to be a list
class UserRoleAssociation( object ):
def __init__( self, user, role ):
@@ -707,6 +707,12 @@
@property
def active_components( self ):
return list( self.active_folders ) + list( self.active_datasets )
+ @property
+ def activatable_datasets( self ):
+ return [ ld for ld in self.datasets if not ld.library_dataset_dataset_association.dataset.deleted ] #this needs to be a list
+ @property #make this a relation
+ def activatable_folders( self ):
+ return [ folder for folder in self.folders if not folder.purged ] #this needs to be a list
class LibraryDataset( object ):
# This class acts as a proxy to the currently selected LDDA
@@ -743,6 +749,14 @@
name = property( get_name, set_name )
def display_name( self ):
self.library_dataset_dataset_association.display_name()
+ def get_purged( self ):
+ return self.library_dataset_dataset_association.dataset.purged
+ def set_purged( self, purged ):
+ if purged:
+ raise Exception( "Not implemented" )
+ if not purged and self.purged:
+ raise Exception( "Cannot unpurge once purged" )
+ purged = property( get_purged, set_purged )
def get_library_item_info_templates( self, template_list=[], restrict=False ):
# If restrict is True, we'll return only those templates directly associated with this LibraryDataset
if self.library_dataset_info_template_associations:
@@ -750,7 +764,7 @@
if restrict not in [ 'True', True ]:
self.folder.get_library_item_info_templates( template_list, restrict )
return template_list
-
+
class LibraryDatasetDatasetAssociation( DatasetInstance ):
def __init__( self,
copied_from_history_dataset_association=None,
diff -r 0cf5c25d1d2b -r c6031c4e6546 lib/galaxy/model/migrate/versions/0005_cleanup_datasets_fix.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/lib/galaxy/model/migrate/versions/0005_cleanup_datasets_fix.py Thu Apr 23 15:11:29 2009 -0400
@@ -0,0 +1,74 @@
+import sys, logging, os, time
+
+log = logging.getLogger( __name__ )
+log.setLevel(logging.DEBUG)
+handler = logging.StreamHandler( sys.stdout )
+format = "%(name)s %(levelname)s %(asctime)s %(message)s"
+formatter = logging.Formatter( format )
+handler.setFormatter( formatter )
+log.addHandler( handler )
+
+from migrate import migrate_engine
+from sqlalchemy import and_
+
+# load existing galaxy model, we are only changing data
+import galaxy.model
+from galaxy.model import mapping
+model = mapping.init( galaxy.model.Dataset.file_path, str( migrate_engine.url ) )
+
+def __guess_dataset_by_filename( filename ):
+ """Return a guessed dataset by filename"""
+ try:
+ fields = os.path.split( filename )
+ if fields:
+ if fields[-1].startswith( 'dataset_' ) and fields[-1].endswith( '.dat' ): #dataset_%d.dat
+ return model.Dataset.get( int( fields[-1][ len( 'dataset_' ): -len( '.dat' ) ] ) )
+ except:
+ pass #some parsing error, we can't guess Dataset
+ return None
+
+def upgrade():
+ log.debug( "Fixing a discrepancy concerning deleted shared history items." )
+ affected_items = 0
+ start_time = time.time()
+ for dataset in model.Dataset.filter( and_( model.Dataset.c.deleted == True, model.Dataset.c.purged == False ) ).all():
+ for dataset_instance in dataset.history_associations + dataset.library_associations:
+ if not dataset_instance.deleted:
+ dataset.deleted = False
+ if dataset.file_size in [ None, 0 ]:
+ dataset.set_size() #Restore filesize
+ affected_items += 1
+ break
+ galaxy.model.mapping.Session.flush()
+ log.debug( "%i items affected, and restored." % ( affected_items ) )
+ log.debug( "Time elapsed: %s" % ( time.time() - start_time ) )
+
+ #fix share before hda
+ log.debug( "Fixing a discrepancy concerning cleaning up deleted history items shared before HDAs." )
+ dataset_by_filename = {}
+ changed_associations = 0
+ start_time = time.time()
+ for dataset in model.Dataset.filter( model.Dataset.external_filename.like( '%dataset_%.dat' ) ).all():
+ if dataset.file_name in dataset_by_filename:
+ guessed_dataset = dataset_by_filename[ dataset.file_name ]
+ else:
+ guessed_dataset = __guess_dataset_by_filename( dataset.file_name )
+ if guessed_dataset and dataset.file_name != guessed_dataset.file_name:#not os.path.samefile( dataset.file_name, guessed_dataset.file_name ):
+ guessed_dataset = None
+ dataset_by_filename[ dataset.file_name ] = guessed_dataset
+
+ if guessed_dataset is not None and guessed_dataset.id != dataset.id: #could we have a self referential dataset?
+ for dataset_instance in dataset.history_associations + dataset.library_associations:
+ dataset_instance.dataset = guessed_dataset
+ changed_associations += 1
+ #mark original Dataset as deleted and purged, it is no longer in use, but do not delete file_name contents
+ dataset.deleted = True
+ dataset.external_filename = "Dataset was result of share before HDA, and has been replaced: %s mapped to Dataset %s" % ( dataset.external_filename, guessed_dataset.id )
+ dataset.purged = True #we don't really purge the file here, but we mark it as purged, since this dataset is now defunct
+ galaxy.model.mapping.Session.flush()
+ log.debug( "%i items affected, and restored." % ( changed_associations ) )
+ log.debug( "Time elapsed: %s" % ( time.time() - start_time ) )
+
+def downgrade():
+ log.debug( "Downgrade is not possible." )
+
diff -r 0cf5c25d1d2b -r c6031c4e6546 lib/galaxy/util/__init__.py
--- a/lib/galaxy/util/__init__.py Thu Apr 23 14:42:35 2009 -0400
+++ b/lib/galaxy/util/__init__.py Thu Apr 23 15:11:29 2009 -0400
@@ -146,7 +146,7 @@
elif isinstance( value, list ):
return map(sanitize_text, value)
else:
- raise Exception, 'Unknown parameter type'
+ raise Exception, 'Unknown parameter type (%s)' % ( type( value ) )
class Params:
"""
diff -r 0cf5c25d1d2b -r c6031c4e6546 lib/galaxy/web/controllers/admin.py
--- a/lib/galaxy/web/controllers/admin.py Thu Apr 23 14:42:35 2009 -0400
+++ b/lib/galaxy/web/controllers/admin.py Thu Apr 23 15:11:29 2009 -0400
@@ -822,11 +822,13 @@
msg=util.sanitize_text( msg ),
messagetype='error' ) )
created_ldda_ids = params.get( 'created_ldda_ids', '' )
+ show_deleted = util.string_as_bool( params.get( 'show_deleted', False ) )
return trans.fill_template( '/admin/library/browse_library.mako',
library=trans.app.model.Library.get( id ),
created_ldda_ids=created_ldda_ids,
msg=msg,
- messagetype=messagetype )
+ messagetype=messagetype,
+ show_deleted=show_deleted )
@web.expose
@web.require_admin
def library( self, trans, id=None, **kwd ):
@@ -960,7 +962,8 @@
libraries=libraries,
deleted=True,
msg=msg,
- messagetype=messagetype )
+ messagetype=messagetype,
+ show_deleted = True )
@web.expose
@web.require_admin
def undelete_library( self, trans, **kwd ):
@@ -2040,6 +2043,50 @@
id=library_id,
msg=util.sanitize_text( msg ),
messagetype=messagetype ) )
+
+ @web.expose
+ @web.require_admin
+ def delete_library_item( self, trans, library_id = None, library_item_id = None, library_item_type = None ):
+ #this action will handle deleting all types of library items in library browsing mode
+ library_item_types = { 'library': trans.app.model.Library, 'folder': trans.app.model.LibraryFolder, 'dataset': trans.app.model.LibraryDataset, }
+ if library_item_type not in library_item_types:
+ raise ValueError( 'Bad library_item_type specified: %s' % library_item_types )
+ if library_item_id is None:
+ raise ValueError( 'library_item_id not specified' )
+ library_item = library_item_types[ library_item_type ].get( int( library_item_id ) )
+ library_item.deleted = True
+ library_item.flush()
+ #need to str because unicode isn't accepted...
+ msg = str( "%s '%s' has been marked deleted" % ( library_item_type, library_item.name ) )
+ messagetype = str( "done" )
+ if library_item_type == 'library' or library_id is None:
+ return self.browse_libraries( trans, msg = msg, messagetype = messagetype )
+ else:
+ return self.browse_library( trans, id = library_id , msg = msg, messagetype = messagetype )
+
+ @web.expose
+ @web.require_admin
+ def undelete_library_item( self, trans, library_id = None, library_item_id = None, library_item_type = None ):
+ #this action will handle deleting all types of library items in library browsing mode
+ library_item_types = { 'library': trans.app.model.Library, 'folder': trans.app.model.LibraryFolder, 'dataset': trans.app.model.LibraryDataset, }
+ if library_item_type not in library_item_types:
+ raise ValueError( 'Bad library_item_type specified: %s' % library_item_types )
+ if library_item_id is None:
+ raise ValueError( 'library_item_id not specified' )
+ library_item = library_item_types[ library_item_type ].get( int( library_item_id ) )
+ if library_item.purged:
+ raise ValueError( '%s %s cannot be undeleted' % ( library_item_type, library_item.name ) )
+ library_item.deleted = False
+ library_item.flush()
+ msg = str( "%s '%s' has been undeleted" % ( library_item_type, library_item.name ) )
+ messagetype = str( "done" )
+ if library_item_type == 'library' or library_id is None:
+ return self.browse_libraries( trans, msg = msg, messagetype = messagetype )
+ else:
+ return self.browse_library( trans, id = library_id , msg = msg, messagetype = messagetype )
+
+
+
#(a)web.expose
#(a)web.require_admin
#def delete_dataset( self, trans, id=None, **kwd):
diff -r 0cf5c25d1d2b -r c6031c4e6546 scripts/cleanup_datasets/cleanup_datasets.py
--- a/scripts/cleanup_datasets/cleanup_datasets.py Thu Apr 23 14:42:35 2009 -0400
+++ b/scripts/cleanup_datasets/cleanup_datasets.py Thu Apr 23 15:11:29 2009 -0400
@@ -1,6 +1,6 @@
#!/usr/bin/env python
-import sys, os, time, ConfigParser
+import sys, os, time, ConfigParser, shutil
from datetime import datetime, timedelta
from time import strftime
from optparse import OptionParser
@@ -15,7 +15,7 @@
pkg_resources.require( "SQLAlchemy >= 0.4" )
-from galaxy.model.orm import *
+from galaxy.model.orm import and_, eagerload
assert sys.version_info[:2] >= ( 2, 4 )
@@ -23,271 +23,172 @@
parser = OptionParser()
parser.add_option( "-d", "--days", dest="days", action="store", type="int", help="number of days (60)", default=60 )
parser.add_option( "-r", "--remove_from_disk", action="store_true", dest="remove_from_disk", help="remove datasets from disk when purged", default=False )
- parser.add_option( "-1", "--info_delete_userless_histories", action="store_true", dest="info_delete_userless_histories", default=False, help="info about the histories and datasets that will be affected by delete_userless_histories()" )
- parser.add_option( "-2", "--delete_userless_histories", action="store_true", dest="delete_userless_histories", default=False, help="delete userless histories and datasets" )
- parser.add_option( "-3", "--info_purge_histories", action="store_true", dest="info_purge_histories", default=False, help="info about histories and datasets that will be affected by purge_histories()" )
- parser.add_option( "-4", "--purge_histories", action="store_true", dest="purge_histories", default=False, help="purge deleted histories" )
- parser.add_option( "-5", "--info_purge_datasets", action="store_true", dest="info_purge_datasets", default=False, help="info about the datasets that will be affected by purge_datasets()" )
- parser.add_option( "-6", "--purge_datasets", action="store_true", dest="purge_datasets", default=False, help="purge deleted datasets" )
+ parser.add_option( "-i", "--info_only", action="store_true", dest="info_only", help="info about the requested action", default=False )
+
+ parser.add_option( "-1", "--delete_userless_histories", action="store_true", dest="delete_userless_histories", default=False, help="delete userless histories and datasets" )
+
+ parser.add_option( "-2", "--purge_histories", action="store_true", dest="purge_histories", default=False, help="purge deleted histories" )
+
+ parser.add_option( "-3", "--purge_datasets", action="store_true", dest="purge_datasets", default=False, help="purge deleted datasets" )
+
+ parser.add_option( "-4", "--purge_libraries", action="store_true", dest="purge_libraries", default=False, help="purge deleted libraries" )
+
+ parser.add_option( "-5", "--purge_folders", action="store_true", dest="purge_folders", default=False, help="purge deleted library folders" )
+
+
( options, args ) = parser.parse_args()
ini_file = args[0]
- if not ( options.info_delete_userless_histories ^ options.delete_userless_histories ^ \
- options.info_purge_histories ^ options.purge_histories ^ \
- options.info_purge_datasets ^ options.purge_datasets ):
+ if not ( options.purge_folders ^ options.delete_userless_histories ^ \
+ options.purge_libraries ^ options.purge_histories ^ \
+ options.purge_datasets ):
parser.print_help()
sys.exit(0)
+
+ if options.remove_from_disk and options.info_only:
+ parser.error( "remove_from_disk and info_only are mutually exclusive" )
conf_parser = ConfigParser.ConfigParser( {'here':os.getcwd()} )
conf_parser.read( ini_file )
configuration = {}
for key, value in conf_parser.items( "app:main" ):
configuration[key] = value
- database_connection = configuration['database_connection']
+
+ if 'database_connection' in configuration:
+ database_connection = configuration['database_connection']
+ else:
+ database_connection = "sqlite:///%s?isolation_level=IMMEDIATE" % configuration["database_file"]
file_path = configuration['file_path']
app = CleanupDatasetsApplication( database_connection=database_connection, file_path=file_path )
- h = app.model.History
- d = app.model.Dataset
- m = app.model.MetadataFile
cutoff_time = datetime.utcnow() - timedelta( days=options.days )
now = strftime( "%Y-%m-%d %H:%M:%S" )
-
+
print "\n# %s - Handling stuff older than %i days\n" % ( now, options.days )
-
- if options.info_delete_userless_histories:
- info_delete_userless_histories( h, cutoff_time )
- elif options.delete_userless_histories:
- delete_userless_histories( h, d, cutoff_time )
- if options.info_purge_histories:
- info_purge_histories( h, d, cutoff_time )
+
+ if options.info_only:
+ print "# Displaying info only ( --info_only )\n"
+ elif options.remove_from_disk:
+ print "# Datasets will be removed from disk.\n"
+ else:
+ print "# Datasets will NOT be removed from disk.\n"
+
+ if options.delete_userless_histories:
+ delete_userless_histories( app, cutoff_time, info_only = options.info_only )
elif options.purge_histories:
- if options.remove_from_disk:
- print "# Datasets will be removed from disk...\n"
- else:
- print "# Datasets will NOT be removed from disk...\n"
- purge_histories( h, d, m, cutoff_time, options.remove_from_disk )
- elif options.info_purge_datasets:
- info_purge_datasets( d, cutoff_time )
+ purge_histories( app, cutoff_time, options.remove_from_disk, info_only = options.info_only )
elif options.purge_datasets:
- if options.remove_from_disk:
- print "# Datasets will be removed from disk...\n"
- else:
- print "# Datasets will NOT be removed from disk...\n"
- purge_datasets( d, m, cutoff_time, options.remove_from_disk )
+ purge_datasets( app, cutoff_time, options.remove_from_disk, info_only = options.info_only )
+ elif options.purge_libraries:
+ purge_libraries( app, cutoff_time, options.remove_from_disk, info_only = options.info_only )
+ elif options.purge_folders:
+ purge_folders( app, cutoff_time, options.remove_from_disk, info_only = options.info_only )
+
sys.exit(0)
-def info_delete_userless_histories( h, cutoff_time ):
- # Provide info about the histories and datasets that will be affected if the delete_userless_histories function is executed.
+def delete_userless_histories( app, cutoff_time, info_only = False ):
+ # Deletes userless histories whose update_time value is older than the cutoff_time.
+ # The purge history script will handle marking DatasetInstances as deleted.
+ # Nothing is removed from disk yet.
history_count = 0
- dataset_count = 0
- histories = h.filter( and_( h.table.c.user_id==None,
- h.table.c.deleted==False,
- h.table.c.update_time < cutoff_time ) ) \
- .options( eagerload( 'active_datasets' ) ).all()
-
- print '# The following datasets and associated userless histories will be deleted'
+ print '# The following datasets and associated userless histories have been deleted'
start = time.clock()
+ histories = app.model.History.filter( and_( app.model.History.table.c.user_id==None,
+ app.model.History.table.c.deleted==False,
+ app.model.History.table.c.update_time < cutoff_time ) ).all()# \
for history in histories:
- for dataset_assoc in history.active_datasets:
- if not dataset_assoc.deleted:
- # This check is not necessary since 'active_datasets' are not
- # deleted, but just being cautious
- print "dataset_%d" %dataset_assoc.dataset_id
- dataset_count += 1
+ if not info_only:
+ history.deleted = True
print "%d" % history.id
history_count += 1
+ app.model.flush()
stop = time.clock()
- print "# %d histories ( including a total of %d datasets ) will be deleted\n" %( history_count, dataset_count )
- print "Elapsed time: ", stop - start, "\n"
-
-def delete_userless_histories( h, d, cutoff_time ):
- # Deletes userless histories whose update_time value is older than the cutoff_time.
- # The datasets associated with each history are also deleted. Nothing is removed from disk.
- history_count = 0
- dataset_count = 0
-
- print '# The following datasets and associated userless histories have been deleted'
- start = time.clock()
- histories = h.filter( and_( h.table.c.user_id==None,
- h.table.c.deleted==False,
- h.table.c.update_time < cutoff_time ) ) \
- .options( eagerload( 'active_datasets' ) ).all()
- for history in histories:
- for dataset_assoc in history.active_datasets:
- if not dataset_assoc.deleted:
- # Mark all datasets as deleted
- datasets = d.filter( d.table.c.id==dataset_assoc.dataset_id ).all()
- for dataset in datasets:
- if not dataset.deleted:
- dataset.deleted = True
- dataset.flush()
- # Mark the history_dataset_association as deleted
- dataset_assoc.deleted = True
- dataset_assoc.clear_associated_files()
- dataset_assoc.flush()
- print "dataset_%d" % dataset_assoc.dataset_id
- dataset_count += 1
- history.deleted = True
- history.flush()
- print "%d" % history.id
- history_count += 1
- stop = time.clock()
- print "# Deleted %d histories ( including a total of %d datasets )\n" %( history_count, dataset_count )
+ print "# Deleted %d histories.\n" % ( history_count )
print "Elapsed time: ", stop - start, "\n"
-def info_purge_histories( h, d, cutoff_time ):
- # Provide info about the histories and datasets that will be affected if the purge_histories function is executed.
+
+def purge_histories( app, cutoff_time, remove_from_disk, info_only = False ):
+ # Purges deleted histories whose update_time is older than the cutoff_time.
+ # The dataset associations of each history are also marked as deleted.
+ # The Purge Dataset method will purge each Dataset as necessary
+ # history.purged == True simply means that it can no longer be undeleted
+ # i.e. all associated datasets are marked as deleted
history_count = 0
- dataset_count = 0
- disk_space = 0
- print '# The following datasets and associated deleted histories will be purged'
+ print '# The following datasets and associated deleted histories have been purged'
start = time.clock()
- histories = h.filter( and_( h.table.c.deleted==True,
- h.table.c.purged==False,
- h.table.c.update_time < cutoff_time ) ) \
+ histories = app.model.History.filter( and_( app.model.History.table.c.deleted==True,
+ app.model.History.table.c.purged==False,
+ app.model.History.table.c.update_time < cutoff_time ) ) \
.options( eagerload( 'datasets' ) ).all()
for history in histories:
for dataset_assoc in history.datasets:
- # Datasets can only be purged if their HistoryDatasetAssociation has been deleted.
- if dataset_assoc.deleted:
- datasets = d.filter( d.table.c.id==dataset_assoc.dataset_id ).all()
- for dataset in datasets:
- if dataset.purgable and not dataset.purged:
- print "%s" % dataset.file_name
- dataset_count += 1
- try:
- disk_space += dataset.file_size
- except:
- pass
+ _purge_dataset_instance( dataset_assoc, app, remove_from_disk, info_only = info_only ) #mark a DatasetInstance as deleted, clear associated files, and mark the Dataset as deleted if it is deletable
+ if not info_only:
+ history.purged = True
print "%d" % history.id
history_count += 1
+ app.model.flush()
stop = time.clock()
- print '# %d histories ( including a total of %d datasets ) will be purged. Freed disk space: ' %( history_count, dataset_count ), disk_space, '\n'
+ print '# Purged %d histories.' % ( history_count ), '\n'
print "Elapsed time: ", stop - start, "\n"
-def purge_histories( h, d, m, cutoff_time, remove_from_disk ):
- # Purges deleted histories whose update_time is older than the cutoff_time.
- # The datasets associated with each history are also purged.
- history_count = 0
- dataset_count = 0
- disk_space = 0
- file_size = 0
- errors = False
- print '# The following datasets and associated deleted histories have been purged'
+def purge_libraries( app, cutoff_time, remove_from_disk, info_only = False ):
+ # Purges deleted libraries whose update_time is older than the cutoff_time.
+ # The dataset associations of each library are also marked as deleted.
+ # The Purge Dataset method will purge each Dataset as necessary
+ # library.purged == True simply means that it can no longer be undeleted
+ # i.e. all associated LibraryDatasets/folders are marked as deleted
+ library_count = 0
+ print '# The following libraries and associated folders have been purged'
start = time.clock()
- histories = h.filter( and_( h.table.c.deleted==True,
- h.table.c.purged==False,
- h.table.c.update_time < cutoff_time ) ) \
- .options( eagerload( 'datasets' ) ).all()
- for history in histories:
- errors = False
- for dataset_assoc in history.datasets:
- if dataset_assoc.deleted:
- datasets = d.filter( d.table.c.id==dataset_assoc.dataset_id ).all()
- for dataset in datasets:
- if dataset.purgable and not dataset.purged:
- file_size = dataset.file_size
- dataset.deleted = True
- dataset.file_size = 0
- if remove_from_disk:
- dataset.flush()
- errmsg = purge_dataset( dataset, d, m )
- if errmsg:
- errors = True
- print errmsg
- else:
- dataset.purged = True
- dataset.flush()
- print "%s" % dataset.file_name
- # Mark all associated MetadataFiles as deleted and purged
- print "The following metadata files associated with dataset '%s' have been marked purged" % dataset.file_name
- for hda in dataset.history_associations:
- for metadata_file in m.filter( m.table.c.hda_id==hda.id ).all():
- metadata_file.deleted = True
- metadata_file.purged = True
- metadata_file.flush()
- print "%s" % metadata_file.file_name()
- for lda in dataset.library_associations:
- for metadata_file in m.filter( m.table.c.lda_id==lda.id ).all():
- metadata_file.deleted = True
- metadata_file.purged = True
- metadata_file.flush()
- print "%s" % metadata_file.file_name()
- dataset_count += 1
- try:
- disk_space += file_size
- except:
- pass
- if not errors:
- history.purged = True
- history.flush()
- print "%d" % history.id
- history_count += 1
+ libraries = app.model.Library.filter( and_( app.model.Library.table.c.deleted==True,
+ app.model.Library.table.c.purged==False,
+ app.model.Library.table.c.update_time < cutoff_time ) ).all()
+ for library in libraries:
+ _purge_folder( library.root_folder, app, remove_from_disk, info_only = info_only )
+ if not info_only:
+ library.purged = True
+ print "%d" % library.id
+ library_count += 1
+ app.model.flush()
stop = time.clock()
- print '# Purged %d histories ( including a total of %d datasets ). Freed disk space: ' %( history_count, dataset_count ), disk_space, '\n'
+ print '# Purged %d libraries .' % ( library_count ), '\n'
print "Elapsed time: ", stop - start, "\n"
-def info_purge_datasets( d, cutoff_time ):
- # Provide info about the datasets that will be affected if the purge_datasets function is executed.
- dataset_count = 0
- disk_space = 0
- print '# The following deleted datasets will be purged'
+def purge_folders( app, cutoff_time, remove_from_disk, info_only = False ):
+ # Purges deleted folders whose update_time is older than the cutoff_time.
+ # The dataset associations of each folder are also marked as deleted.
+ # The Purge Dataset method will purge each Dataset as necessary
+ # libraryFolder.purged == True simply means that it can no longer be undeleted
+ # i.e. all associated LibraryDatasets/folders are marked as deleted
+ folder_count = 0
+ print '# The following folders have been purged'
start = time.clock()
- datasets = d.filter( and_( d.table.c.deleted==True,
- d.table.c.purgable==True,
- d.table.c.purged==False,
- d.table.c.update_time < cutoff_time ) ).all()
- for dataset in datasets:
- print "%s" % dataset.file_name
- dataset_count += 1
- try:
- disk_space += dataset.file_size
- except:
- pass
+ folders = app.model.LibraryFolder.filter( and_( app.model.LibraryFolder.table.c.deleted==True,
+ app.model.LibraryFolder.table.c.purged==False,
+ app.model.LibraryFolder.table.c.update_time < cutoff_time ) ).all()
+ for folder in folders:
+ _purge_folder( folder, app, remove_from_disk, info_only = info_only )
+ print "%d" % folder.id
+ folder_count += 1
stop = time.clock()
- print '# %d datasets will be purged. Freed disk space: ' %dataset_count, disk_space, '\n'
+ print '# Purged %d folders.' % ( folder_count ), '\n'
print "Elapsed time: ", stop - start, "\n"
-def purge_datasets( d, m, cutoff_time, remove_from_disk ):
+def purge_datasets( app, cutoff_time, remove_from_disk, info_only = False ):
# Purges deleted datasets whose update_time is older than cutoff_time. Files may or may
# not be removed from disk.
dataset_count = 0
disk_space = 0
- file_size = 0
print '# The following deleted datasets have been purged'
start = time.clock()
- datasets = d.filter( and_( d.table.c.deleted==True,
- d.table.c.purgable==True,
- d.table.c.purged==False,
- d.table.c.update_time < cutoff_time ) ).all()
+ datasets = app.model.Dataset.filter( and_( app.model.Dataset.table.c.deleted==True,
+ app.model.Dataset.table.c.purgable==True,
+ app.model.Dataset.table.c.purged==False,
+ app.model.Dataset.table.c.update_time < cutoff_time ) ).all()
for dataset in datasets:
file_size = dataset.file_size
- if remove_from_disk:
- errmsg = purge_dataset( dataset, d, m )
- if errmsg:
- print errmsg
- else:
- dataset_count += 1
- else:
- dataset.purged = True
- dataset.file_size = 0
- dataset.flush()
- print "%s" % dataset.file_name
- # Mark all associated MetadataFiles as deleted and purged
- print "The following metadata files associated with dataset '%s' have been marked purged" % dataset.file_name
- for hda in dataset.history_associations:
- for metadata_file in m.filter( m.table.c.hda_id==hda.id ).all():
- metadata_file.deleted = True
- metadata_file.purged = True
- metadata_file.flush()
- print "%s" % metadata_file.file_name()
- for lda in dataset.library_associations:
- for metadata_file in m.filter( m.table.c.lda_id==lda.id ).all():
- metadata_file.deleted = True
- metadata_file.purged = True
- metadata_file.flush()
- print "%s" % metadata_file.file_name()
- dataset_count += 1
+ _purge_dataset( dataset, remove_from_disk, info_only = info_only )
+ dataset_count += 1
try:
disk_space += file_size
except:
@@ -298,68 +199,90 @@
print '# Freed disk space: ', disk_space, '\n'
print "Elapsed time: ", stop - start, "\n"
-def purge_dataset( dataset, d, m ):
- # Removes the file from disk and updates the database accordingly.
+
+def _purge_dataset_instance( dataset_instance, app, remove_from_disk, include_children = True, info_only = False ):
+ #purging a dataset instance marks the instance as deleted,
+ #and marks the dataset as deleted if it is not associated with another DatsetInstance that is not deleted
+ if not info_only:
+ dataset_instance.mark_deleted( include_children = include_children )
+ dataset_instance.clear_associated_files()
+ dataset_instance.flush()
+ dataset_instance.dataset.refresh()
+ if _dataset_is_deletable( dataset_instance.dataset ):
+ _delete_dataset( dataset_instance.dataset, app, remove_from_disk, info_only = info_only )
+ #need to purge children here
+ if include_children:
+ for child in dataset_instance.children:
+ _purge_dataset_instance( child, app, remove_from_disk, include_children = include_children, info_only = info_only )
+
+def _dataset_is_deletable( dataset ):
+ #a dataset is deletable when it no longer has any non-deleted associations
+ return not bool( dataset.active_history_associations or dataset.active_library_associations )
+
+def _delete_dataset( dataset, app, remove_from_disk, info_only = False ):
+ #marks a base dataset as deleted, hdas/ldas associated with dataset can no longer be undeleted
+ #metadata files attached to associated dataset Instances is removed now
+ if not _dataset_is_deletable( dataset ):
+ print "# This Dataset (%i) is not deletable, associated Metadata Files will not be removed.\n" % ( dataset.id )
+ else:
+ # Mark all associated MetadataFiles as deleted and purged and remove them from disk
+ print "The following metadata files attached to associations of Dataset '%s' have been purged:" % dataset.id
+ metadata_files = []
+ #lets create a list of metadata files, then perform actions on them
+ for hda in dataset.history_associations:
+ for metadata_file in app.model.MetadataFile.filter( app.model.MetadataFile.table.c.hda_id==hda.id ).all():
+ metadata_files.append( metadata_file )
+ for lda in dataset.library_associations:
+ for metadata_file in app.model.MetadataFile.filter( app.model.MetadataFile.table.c.lda_id==lda.id ).all():
+ metadata_files.append( metadata_file )
+ for metadata_file in metadata_files:
+ if not info_only:
+ if remove_from_disk:
+ try:
+ os.unlink( metadata_file.file_name )
+ except Exception, e:
+ print "# Error, exception: %s caught attempting to purge metadata file %s\n" %( str( e ), metadata_file.file_name )
+ metadata_file.purged = True
+ metadata_file.deleted = True
+ #metadata_file.flush()
+ print "%s" % metadata_file.file_name
+ print
+ dataset.deleted = True
+ #dataset.flush()
+ app.model.flush()
+
+def _purge_dataset( dataset, remove_from_disk, info_only = False ):
if dataset.deleted:
- purgable = True
- # Remove files from disk and update the database
try:
- # See if the dataset has been shared
- if dataset.external_filename:
- # This check handles the pre-history_dataset_association approach to sharing.
- shared_data = d.filter( and_( d.table.c.external_filename==dataset.external_filename,
- d.table.c.deleted==False ) ).all()
- if shared_data:
- purgable = False
- if purgable:
- # This check handles the history_dataset_association approach to sharing.
- for shared_data in dataset.history_associations:
- # Check to see if another dataset is using this file. This happens when a user shares
- # their history with another user. In this case, a new record is created in the dataset
- # table for each dataset, but the dataset records point to the same data file on disk. So
- # if 1 of the 2 users deletes the dataset from their history but the other doesn't, we need
- # to keep the dataset on disk for the 2nd user.
- if not shared_data.deleted:
- purgable = False
- break
- if purgable:
- # This check handles the library_dataset_dataset_association approach to sharing.
- for shared_data in dataset.library_associations:
- if not shared_data.deleted:
- purgable = False
- break
- if purgable:
- dataset.purged = True
- dataset.file_size = 0
- dataset.flush()
- # Remove dataset file from disk
- os.unlink( dataset.file_name )
+ if dataset.purgable and _dataset_is_deletable( dataset ):
print "%s" % dataset.file_name
- # Mark all associated MetadataFiles as deleted and purged and remove them from disk
- print "The following metadata files associated with dataset '%s' have been purged" % dataset.file_name
- for hda in dataset.history_associations:
- for metadata_file in m.filter( m.table.c.hda_id==hda.id ).all():
- os.unlink( metadata_file.file_name() )
- metadata_file.deleted = True
- metadata_file.purged = True
- metadata_file.flush()
- print "%s" % metadata_file.file_name()
- for lda in dataset.library_associations:
- for metadata_file in m.filter( m.table.c.lda_id==lda.id ).all():
- metadata_file.deleted = True
- metadata_file.purged = True
- metadata_file.flush()
- print "%s" % metadata_file.file_name()
- try:
- # Remove associated extra files from disk if they exist
- os.unlink( dataset.extra_files_path )
- except:
- pass
+ if not info_only:
+ # Remove files from disk and update the database
+ if remove_from_disk:
+ os.unlink( dataset.file_name )
+ # Remove associated extra files from disk if they exist
+ if dataset.extra_files_path and os.path.exists( dataset.extra_files_path ):
+ shutil.rmtree( dataset.extra_files_path ) #we need to delete the directory and its contents; os.unlink would always fail on a directory
+ dataset.purged = True
+ else:
+ print "# This dataset (%i) is not purgable, the file (%s) will not be removed.\n" % ( dataset.id, dataset.file_name )
except Exception, exc:
- return "# Error, exception: %s caught attempting to purge %s\n" %( str( exc ), dataset.file_name )
+ print "# Error, exception: %s caught attempting to purge %s\n" %( str( exc ), dataset.file_name )
else:
- return "# Error: '%s' has not previously been deleted, so it cannot be purged\n" %dataset.file_name
- return ""
+ print "# Error: '%s' has not previously been deleted, so it cannot be purged\n" % dataset.file_name
+ print ""
+
+def _purge_folder( folder, app, remove_from_disk, info_only = False ):
+ """Purges a folder and its contents, recursively"""
+ for ld in folder.datasets:
+ ld.deleted = True
+ for ldda in [ld.library_dataset_dataset_association] + ld.expired_datasets:
+ _purge_dataset_instance( ldda, app, remove_from_disk, info_only = info_only ) #mark a DatasetInstance as deleted, clear associated files, and mark the Dataset as deleted if it is deletable
+ for sub_folder in folder.folders:
+ _purge_folder( sub_folder, app, remove_from_disk, info_only = info_only )
+ if not info_only:
+ folder.purged = True
+ folder.flush()
class CleanupDatasetsApplication( object ):
"""Encapsulates the state of a Universe application"""
diff -r 0cf5c25d1d2b -r c6031c4e6546 scripts/cleanup_datasets/delete_userless_histories.sh
--- a/scripts/cleanup_datasets/delete_userless_histories.sh Thu Apr 23 14:42:35 2009 -0400
+++ b/scripts/cleanup_datasets/delete_userless_histories.sh Thu Apr 23 15:11:29 2009 -0400
@@ -1,4 +1,4 @@
#!/bin/sh
cd `dirname $0`/../..
-python ./scripts/cleanup_datasets/cleanup_datasets.py ./universe_wsgi.ini -d 10 -2 $@ >> ./scripts/cleanup_datasets/delete_userless_histories.log
+python ./scripts/cleanup_datasets/cleanup_datasets.py ./universe_wsgi.ini -d 10 -1 $@ >> ./scripts/cleanup_datasets/delete_userless_histories.log
diff -r 0cf5c25d1d2b -r c6031c4e6546 scripts/cleanup_datasets/delete_userless_histories_main.sh
--- a/scripts/cleanup_datasets/delete_userless_histories_main.sh Thu Apr 23 14:42:35 2009 -0400
+++ b/scripts/cleanup_datasets/delete_userless_histories_main.sh Thu Apr 23 15:11:29 2009 -0400
@@ -1,4 +1,4 @@
#!/bin/sh
cd `dirname $0`/../..
-python ./scripts/cleanup_datasets/cleanup_datasets.py ./universe_wsgi.ini -d 60 -2 $@ >> ./scripts/cleanup_datasets/delete_userless_histories.log
+python ./scripts/cleanup_datasets/cleanup_datasets.py ./universe_wsgi.ini -d 60 -1 $@ >> ./scripts/cleanup_datasets/delete_userless_histories.log
diff -r 0cf5c25d1d2b -r c6031c4e6546 scripts/cleanup_datasets/purge_datasets.sh
--- a/scripts/cleanup_datasets/purge_datasets.sh Thu Apr 23 14:42:35 2009 -0400
+++ b/scripts/cleanup_datasets/purge_datasets.sh Thu Apr 23 15:11:29 2009 -0400
@@ -1,4 +1,4 @@
#!/bin/sh
cd `dirname $0`/../..
-python ./scripts/cleanup_datasets/cleanup_datasets.py ./universe_wsgi.ini -d 10 -6 -r $@ >> ./scripts/cleanup_datasets/purge_datasets.log
+python ./scripts/cleanup_datasets/cleanup_datasets.py ./universe_wsgi.ini -d 10 -3 -r $@ >> ./scripts/cleanup_datasets/purge_datasets.log
diff -r 0cf5c25d1d2b -r c6031c4e6546 scripts/cleanup_datasets/purge_datasets_main.sh
--- a/scripts/cleanup_datasets/purge_datasets_main.sh Thu Apr 23 14:42:35 2009 -0400
+++ b/scripts/cleanup_datasets/purge_datasets_main.sh Thu Apr 23 15:11:29 2009 -0400
@@ -1,4 +1,4 @@
#!/bin/sh
cd `dirname $0`/../..
-python ./scripts/cleanup_datasets/cleanup_datasets.py ./universe_wsgi.ini -d 60 -6 -r $@ >> ./scripts/cleanup_datasets/purge_datasets.log
+python ./scripts/cleanup_datasets/cleanup_datasets.py ./universe_wsgi.ini -d 60 -3 -r $@ >> ./scripts/cleanup_datasets/purge_datasets.log
diff -r 0cf5c25d1d2b -r c6031c4e6546 scripts/cleanup_datasets/purge_folders.sh
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/scripts/cleanup_datasets/purge_folders.sh Thu Apr 23 15:11:29 2009 -0400
@@ -0,0 +1,4 @@
+#!/bin/sh
+
+cd `dirname $0`/../..
+python ./scripts/cleanup_datasets/cleanup_datasets.py ./universe_wsgi.ini -d 10 -5 -r $@ >> ./scripts/cleanup_datasets/purge_folders.log
diff -r 0cf5c25d1d2b -r c6031c4e6546 scripts/cleanup_datasets/purge_folders_main.sh
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/scripts/cleanup_datasets/purge_folders_main.sh Thu Apr 23 15:11:29 2009 -0400
@@ -0,0 +1,4 @@
+#!/bin/sh
+
+cd `dirname $0`/../..
+python ./scripts/cleanup_datasets/cleanup_datasets.py ./universe_wsgi.ini -d 60 -5 -r $@ >> ./scripts/cleanup_datasets/purge_folders.log
diff -r 0cf5c25d1d2b -r c6031c4e6546 scripts/cleanup_datasets/purge_histories.sh
--- a/scripts/cleanup_datasets/purge_histories.sh Thu Apr 23 14:42:35 2009 -0400
+++ b/scripts/cleanup_datasets/purge_histories.sh Thu Apr 23 15:11:29 2009 -0400
@@ -1,4 +1,4 @@
#!/bin/sh
cd `dirname $0`/../..
-python ./scripts/cleanup_datasets/cleanup_datasets.py ./universe_wsgi.ini -d 10 -4 -r $@ >> ./scripts/cleanup_datasets/purge_histories.log
+python ./scripts/cleanup_datasets/cleanup_datasets.py ./universe_wsgi.ini -d 10 -2 -r $@ >> ./scripts/cleanup_datasets/purge_histories.log
diff -r 0cf5c25d1d2b -r c6031c4e6546 scripts/cleanup_datasets/purge_histories_main.sh
--- a/scripts/cleanup_datasets/purge_histories_main.sh Thu Apr 23 14:42:35 2009 -0400
+++ b/scripts/cleanup_datasets/purge_histories_main.sh Thu Apr 23 15:11:29 2009 -0400
@@ -1,4 +1,4 @@
#!/bin/sh
cd `dirname $0`/../..
-python ./scripts/cleanup_datasets/cleanup_datasets.py ./universe_wsgi.ini -d 60 -4 -r $@ >> ./scripts/cleanup_datasets/purge_histories.log
+python ./scripts/cleanup_datasets/cleanup_datasets.py ./universe_wsgi.ini -d 60 -2 -r $@ >> ./scripts/cleanup_datasets/purge_histories.log
diff -r 0cf5c25d1d2b -r c6031c4e6546 scripts/cleanup_datasets/purge_libraries.sh
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/scripts/cleanup_datasets/purge_libraries.sh Thu Apr 23 15:11:29 2009 -0400
@@ -0,0 +1,4 @@
+#!/bin/sh
+
+cd `dirname $0`/../..
+python ./scripts/cleanup_datasets/cleanup_datasets.py ./universe_wsgi.ini -d 10 -4 -r $@ >> ./scripts/cleanup_datasets/purge_libraries.log
diff -r 0cf5c25d1d2b -r c6031c4e6546 scripts/cleanup_datasets/purge_libraries_main.sh
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/scripts/cleanup_datasets/purge_libraries_main.sh Thu Apr 23 15:11:29 2009 -0400
@@ -0,0 +1,4 @@
+#!/bin/sh
+
+cd `dirname $0`/../..
+python ./scripts/cleanup_datasets/cleanup_datasets.py ./universe_wsgi.ini -d 60 -4 -r $@ >> ./scripts/cleanup_datasets/purge_libraries.log
diff -r 0cf5c25d1d2b -r c6031c4e6546 static/june_2007_style/base.css.tmpl
--- a/static/june_2007_style/base.css.tmpl Thu Apr 23 14:42:35 2009 -0400
+++ b/static/june_2007_style/base.css.tmpl Thu Apr 23 15:11:29 2009 -0400
@@ -563,6 +563,7 @@
color: #333;
font-size: 110%;
font-weight: bold;
+ font-style: normal;
white-space: nowrap;
position: absolute;
z-index: 20000;
diff -r 0cf5c25d1d2b -r c6031c4e6546 static/june_2007_style/blue/base.css
--- a/static/june_2007_style/blue/base.css Thu Apr 23 14:42:35 2009 -0400
+++ b/static/june_2007_style/blue/base.css Thu Apr 23 15:11:29 2009 -0400
@@ -559,6 +559,7 @@
color: #333;
font-size: 110%;
font-weight: bold;
+ font-style: normal;
white-space: nowrap;
position: absolute;
z-index: 20000;
diff -r 0cf5c25d1d2b -r c6031c4e6546 static/june_2007_style/blue/library.css
--- a/static/june_2007_style/blue/library.css Thu Apr 23 14:42:35 2009 -0400
+++ b/static/june_2007_style/blue/library.css Thu Apr 23 15:11:29 2009 -0400
@@ -4,6 +4,10 @@
.datasetHighlighted {
background-color: #C1C9E5;
+}
+
+.libraryItemDeleted-True {
+ font-style: italic;
}
div.historyItemBody {
diff -r 0cf5c25d1d2b -r c6031c4e6546 static/june_2007_style/library.css.tmpl
--- a/static/june_2007_style/library.css.tmpl Thu Apr 23 14:42:35 2009 -0400
+++ b/static/june_2007_style/library.css.tmpl Thu Apr 23 15:11:29 2009 -0400
@@ -4,6 +4,10 @@
.datasetHighlighted {
background-color: $menu_bg_over;
+}
+
+.libraryItemDeleted-True {
+ font-style: italic;
}
div.historyItemBody {
diff -r 0cf5c25d1d2b -r c6031c4e6546 templates/admin/library/browse_library.mako
--- a/templates/admin/library/browse_library.mako Thu Apr 23 14:42:35 2009 -0400
+++ b/templates/admin/library/browse_library.mako Thu Apr 23 15:11:29 2009 -0400
@@ -93,7 +93,7 @@
%>
%if not root_folder:
<li class="folderRow libraryOrFolderRow" style="padding-left: ${pad}px;">
- <div class="rowTitle">
+ <div class="rowTitle libraryItemDeleted-${parent.deleted}">
<img src="${h.url_for( expander )}" class="expanderIcon"/><img src="${h.url_for( folder )}" class="rowIcon"/>
${parent.name}
%if parent.description:
@@ -101,7 +101,7 @@
%endif
<a id="folder-${parent.id}-popup" class="popup-arrow" style="display: none;">▼</a>
</div>
- %if not deleted:
+ %if not parent.deleted:
<%
library_item_ids = {}
library_item_ids[ 'folder' ] = parent.id
@@ -117,10 +117,11 @@
<a class="action-button" href="${h.url_for( controller='admin', action='info_template', library_id=library.id, folder_id=parent.id, new_template=True )}">Add an information template to this folder</a>
%endif
<a class="action-button" href="${h.url_for( controller='admin', action='folder', permissions=True, id=parent.id, library_id=library_id )}">Edit this folder's permissions</a>
- ## TODO: need to revamp the way folders and contained LibraryDatasets are deleted
- ##%if subfolder:
- ## <a class="action-button" confirm="Click OK to delete the folder '${parent.name}'" href="${h.url_for( action='folder', delete=True, id=parent.id, library_id=library_id )}">Remove this folder and its contents from the library</a>
- ##%endif
+ <a class="action-button" confirm="Click OK to delete the folder '${parent.name}'" href="${h.url_for( controller='admin', action='delete_library_item', library_id=library_id, library_item_id=parent.id, library_item_type='folder' )}">Remove this folder and its contents from the library</a>
+ </div>
+ %else:
+ <div popupmenu="folder-${parent.id}-popup">
+ <a class="action-button" href="${h.url_for( controller='admin', action='undelete_library_item', library_id=library_id, library_item_id=parent.id, library_item_type='folder' )}">Undelete this folder</a>
</div>
%endif
</li>
@@ -130,10 +131,10 @@
%else:
<ul>
%endif
- %if library.deleted:
+ %if deleted:
<%
- parent_folders = parent.folders
- parent_datasets = parent.datasets
+ parent_folders = parent.activatable_folders
+ parent_datasets = parent.activatable_datasets
%>
%else:
<%
@@ -142,7 +143,7 @@
%>
%endif
%for folder in name_sorted( parent_folders ):
- ${render_folder( folder, pad, library.deleted, created_ldda_ids, library.id )}
+ ${render_folder( folder, pad, deleted, created_ldda_ids, library.id )}
%endfor
%for library_dataset in name_sorted( parent_datasets ):
<%
@@ -182,17 +183,20 @@
<table cellspacing="0" cellpadding="0" border="0" width="100%" class="libraryTitle">
<th width="*">
<img src="${h.url_for( '/static/images/silk/resultset_bottom.png' )}" class="expanderIcon"/><img src="${h.url_for( '/static/images/silk/book_open.png' )}" class="rowIcon"/>
- ${library.name}
- %if library.description:
- <i>- ${library.description}</i>
- %endif
+ <span class="libraryItemDeleted-${library.deleted}">
+ ${library.name}
+ %if library.description:
+ <i>- ${library.description}</i>
+ %endif
+ </span>
<a id="library-${library.id}-popup" class="popup-arrow" style="display: none;">▼</a>
+ <div popupmenu="library-${library.id}-popup">
%if not library.deleted:
<%
library_item_ids = {}
library_item_ids[ 'library' ] = library.id
%>
- <div popupmenu="library-${library.id}-popup">
+
<a class="action-button" href="${h.url_for( controller='admin', action='library', id=library.id, information=True )}">Edit this library's information</a>
%if library.library_info_template_associations:
<% template = library.get_library_item_info_templates( template_list=[], restrict=False )[0] %>
@@ -201,15 +205,16 @@
<a class="action-button" href="${h.url_for( controller='admin', action='info_template', library_id=library.id, new_template=True )}">Add an information template to this library</a>
%endif
<a class="action-button" href="${h.url_for( controller='admin', action='library', id=library.id, permissions=True )}">Edit this library's permissions</a>
- ## TODO: need to revamp the way libraries, folders, and contained LibraryDatasets are deleted
- ##<a class="action-button" confirm="Current state will not be saved, so undeleting the library will restore all of its contents. Click OK to delete the library named '${library.name}'?" href="${h.url_for( controller='admin', action='library', delete=True, id=library.id )}">Delete this library and its contents</a>
- </div>
- ##%else:
- ## <div popupmenu="library-${library.id}-popup">
- ## <a class="action-button" href="${h.url_for( controller='admin', action='undelete_library', id=library.id )}">Undelete this library and its contents</a>
- ## <a class="action-button" href="${h.url_for( controller='admin', action='purge_library', id=library.id )}">Purge this library and its contents</a>
- ## </div>
+ <a class="action-button" confirm="Current state will not be saved, so undeleting the library will restore all of its contents. Click OK to delete the library named '${library.name}'?" href="${h.url_for( controller='admin', action='delete_library_item', library_item_type='library', library_item_id=library.id )}">Delete this library and its contents</a>
+ %if show_deleted:
+ <a class="action-button" href="${h.url_for( controller='admin', action='browse_library', id=library.id, show_deleted=False )}">Hide deleted library items</a>
+ %else:
+ <a class="action-button" href="${h.url_for( controller='admin', action='browse_library', id=library.id, show_deleted=True )}">Show deleted library items</a>
+ %endif
+ %elif not library.purged:
+ <a class="action-button" href="${h.url_for( controller='admin', action='undelete_library_item', library_item_type='library', library_item_id=library.id )}">Undelete this library</a>
%endif
+ </div>
</th>
<th width="300">Information</th>
<th width="150">Uploaded By</th>
@@ -218,7 +223,7 @@
</div>
</li>
<ul>
- ${render_folder( library.root_folder, 0, library.deleted, created_ldda_ids, library.id )}
+ ${render_folder( library.root_folder, 0, library.deleted or show_deleted, created_ldda_ids, library.id )}
</ul>
<br/>
</ul>
diff -r 0cf5c25d1d2b -r c6031c4e6546 templates/admin/library/common.mako
--- a/templates/admin/library/common.mako Thu Apr 23 14:42:35 2009 -0400
+++ b/templates/admin/library/common.mako Thu Apr 23 15:11:29 2009 -0400
@@ -1,6 +1,6 @@
<% from time import strftime %>
-<%def name="render_dataset( library_dataset, selected, library )">
+<%def name="render_dataset( library_dataset, selected, library, show_deleted = False )">
<%
## The received data must always be a LibraryDataset object, but the object id passed to methods from the drop down menu
## should be the underlying ldda id to prevent id collision ( which could happen when displaying children, which are always
@@ -27,13 +27,15 @@
%else:
<input type="checkbox" name="ldda_ids" value="${ldda.id}"/>
%endif
- <a href="${h.url_for( controller='admin', action='library_dataset_dataset_association', library_id=library.id, folder_id=library_dataset.folder.id, id=ldda.id, info=True )}"><b>${ldda.name[:50]}</b></a>
- %if not library.deleted:
+ <span class="libraryItemDeleted-${library_dataset.deleted}">
+ <a href="${h.url_for( controller='admin', action='library_dataset_dataset_association', library_id=library.id, folder_id=library_dataset.folder.id, id=ldda.id, info=True )}"><b>${ldda.name[:50]}</b></a>
+ </span>
+ <a id="dataset-${ldda.id}-popup" class="popup-arrow" style="display: none;">▼</a>
+ %if not library_dataset.deleted:
<%
library_item_ids = {}
library_item_ids[ 'ldda' ] = ldda.id
%>
- <a id="dataset-${ldda.id}-popup" class="popup-arrow" style="display: none;">▼</a>
<div popupmenu="dataset-${ldda.id}-popup">
<a class="action-button" href="${h.url_for( controller='admin', action='library_dataset_dataset_association', library_id=library.id, folder_id=library_dataset.folder.id, id=ldda.id, edit_info=True )}">Edit this dataset's information</a>
## We're disabling the ability to add templates at the LDDA and LibraryDataset level, but will leave this here for possible future use
@@ -46,7 +48,11 @@
<a class="action-button" href="${h.url_for( controller='admin', action='download_dataset_from_folder', id=ldda.id, library_id=library.id )}">Download this dataset</a>
%endif
##TODO: need to revamp the way we remove datasets from disk.
- ##<a class="action-button" confirm="Click OK to remove dataset '${ldda.name}'?" href="${h.url_for( controller='admin', action='library_dataset_dataset_association', library_id=library.id, folder_id=library_dataset.folder.id, id=ldda.id, delete=True )}">Remove this dataset from the library</a>
+ <a class="action-button" confirm="Click OK to remove dataset '${ldda.name}'?" href="${h.url_for( controller='admin', action='delete_library_item', library_id=library.id, library_item_id=library_dataset.id, library_item_type='dataset' )}">Remove this dataset from the library</a>
+ </div>
+ %else:
+ <div popupmenu="dataset-${ldda.id}-popup">
+ <a class="action-button" href="${h.url_for( controller='admin', action='undelete_library_item', library_id=library.id, library_item_id=library_dataset.id, library_item_type='dataset' )}">Undelete this dataset</a>
</div>
%endif
</td>
1
0
16 Jun '09
details: http://www.bx.psu.edu/hg/galaxy/rev/143dde05e1df
changeset: 2450:143dde05e1df
user: James Taylor <james(a)jamestaylor.org>
date: Thu Jun 11 15:37:19 2009 -0400
description:
Fixing up track browser UI. No longer uses session, cleaned up a bunch of unceccesary stuff. Still pretty limited -- coverage indexing doesn't seem to work right
13 file(s) affected in this change:
lib/galaxy/web/controllers/tracks.py
static/scripts/trackster.js
static/trackster.css
templates/base_panels.mako
templates/tracks/browser.mako
templates/tracks/build.mako
templates/tracks/chroms.mako
templates/tracks/datasets.mako
templates/tracks/dbkeys.mako
templates/tracks/debug.mako
templates/tracks/index.mako
templates/tracks/new_browser.mako
templates/tracks/view.mako
diffs (839 lines):
diff -r 6125f71c838a -r 143dde05e1df lib/galaxy/web/controllers/tracks.py
--- a/lib/galaxy/web/controllers/tracks.py Thu Jun 11 12:20:03 2009 -0400
+++ b/lib/galaxy/web/controllers/tracks.py Thu Jun 11 15:37:19 2009 -0400
@@ -1,113 +1,96 @@
+"""
+Support for constructing and viewing custom "track" browsers within Galaxy.
+
+Track browsers are currently transient -- nothing is stored to the database
+when a browser is created. Building a browser consists of selecting a set
+of datasets associated with the same dbkey to display. Once selected, jobs
+are started to create any neccesary indexes in the background, and the user
+is redirected to the browser interface, which loads the appropriate datasets.
+
+Problems
+--------
+ - Assumes that the only indexing type in Galaxy is for this particular
+ application. Thus, datatypes can only have one indexer, and the presence
+ of an indexer results in assuming that datatype can be displayed as a track.
+
+"""
+
import math
-import mimeparse
from galaxy.tracks import messages
from galaxy.util.json import to_json_string
from galaxy.web.base.controller import *
from galaxy.web.framework import simplejson
-
-class MultiResponse(object):
+class TracksController( BaseController ):
"""
- Shamelessly ripped off of a django snippet.
+ Controller for track browser interface. Handles building a new browser from
+ datasets in the current history, and display of the resulting browser.
"""
- def __init__(self, handlers):
- self.handlers = handlers
-
- def __call__(self, view_func):
- def wrapper(that, trans, *args, **kwargs):
- data_resource = view_func(that, trans, *args, **kwargs)
- content_type = mimeparse.best_match(self.handlers.keys(),
- trans.request.environ['HTTP_ACCEPT'])
- response = self.handlers[content_type](data_resource, trans)
- trans.response.headers['Content-Type'] = "%s" % content_type
- return response
- return wrapper
-
- @classmethod
- def JSON( cls, data_resource, trans ):
- return simplejson.dumps( data_resource )
- class XML( object ):
- def __call__(self, data_resource, trans ):
- raise NotImplementedError( "XML MultiResponse handler is not implemented." )
-
- class AMF( object ):
- def __call__(self, data_resource, trans ):
- raise NotImplementedError( "XML MultiResponse handler is not implemented." )
-
- class HTML( object ):
- def __init__(self, template ):
- self.template = template
-
- def __call__(self, data_resource, trans ):
- return trans.fill_template( self.template, data_resource=data_resource, trans=trans )
-
-class WebRoot( BaseController ):
+ @web.expose
+ def index( self, trans ):
+ return trans.fill_template( "tracks/index.mako" )
+
+ @web.expose
+ def new_browser( self, trans, dbkey=None, dataset_ids=None, browse=None ):
+ """
+ Build a new browser from datasets in the current history. Redirects
+ to 'index' once datasets to browse have been selected.
+ """
+ session = trans.sa_session
+ # If the user clicked the submit button explicately, try to build the browser
+ if browse and dataset_ids:
+ dataset_ids = ",".join( map( str, dataset_ids ) )
+ trans.response.send_redirect( web.url_for( controller='tracks', action='browser', chrom="", dataset_ids=dataset_ids ) )
+ return
+ # Determine the set of all dbkeys that are used in the current history
+ dbkeys = [ d.metadata.dbkey for d in trans.get_history().datasets if not d.deleted ]
+ dbkey_set = set( dbkeys )
+ # If a dbkey argument was not provided, or is no longer valid, default
+ # to the first one
+ if dbkey is None or dbkey not in dbkey_set:
+ dbkey = dbkeys[0]
+ # Find all datasets in the current history that are of that dbkey and
+ # have an indexer.
+ datasets = {}
+ for dataset in session.query( model.HistoryDatasetAssociation ).filter_by( deleted=False, history_id=trans.history.id ):
+ if dataset.metadata.dbkey == dbkey and trans.app.datatypes_registry.get_indexers_by_datatype( dataset.extension ):
+ datasets[dataset.id] = dataset.name
+ # Render the template
+ return trans.fill_template( "tracks/new_browser.mako", dbkey=dbkey, dbkey_set=dbkey_set, datasets=datasets )
@web.expose
- @MultiResponse( {'text/html': MultiResponse.HTML( "tracks/dbkeys.mako"),
- 'text/javascript':MultiResponse.JSON} )
- def dbkeys(self, trans ):
- return list(set([x.metadata.dbkey for x in trans.get_history().datasets if not x.deleted]))
-
- @web.expose
- @MultiResponse( {'text/html':MultiResponse.HTML( "tracks/chroms.mako" ),
- 'text/javascript':MultiResponse.JSON} )
- def chroms(self, trans, dbkey=None):
- return self.chroms_handler( trans, dbkey )
-
- @web.expose
- @MultiResponse( {'text/html':MultiResponse.HTML( "tracks/datasets.mako" ),
- 'text/javascript':MultiResponse.JSON} )
- def list(self, trans, dbkey=None ):
- trans.session["track_dbkey"] = dbkey
- trans.session.save()
- datasets = trans.app.model.HistoryDatasetAssociation.filter_by(deleted=False, history_id=trans.history.id).all()
- dataset_list = {}
- for dataset in datasets:
- if dataset.metadata.dbkey == dbkey and trans.app.datatypes_registry.get_indexers_by_datatype( dataset.extension ):
- dataset_list[dataset.id] = dataset.name
- return dataset_list
-
- @web.expose
- @MultiResponse( {'text/html':MultiResponse.JSON,
- 'text/javascript':MultiResponse.JSON} )
- def data(self, trans, dataset_id=None, chr="", low="", high=""):
- return self.data_handler( trans, dataset_id, chrom=chr, low=low, high=high )
-
- @web.expose
- def build( self, trans, **kwargs ):
- trans.session["track_sets"] = list(kwargs.keys())
- trans.session.save()
- #waiting = False
- #for id, value in kwargs.items():
- # status = self.data_handler( trans, id )
- # if status == messages.PENDING:
- # waiting = True
- #if not waiting:
- return trans.response.send_redirect( web.url_for( controller='tracks/', action='index', chrom="" ) )
- #return trans.fill_template( 'tracks/build.mako' )
-
- @web.expose
- def index(self, trans, **kwargs):
+ def browser(self, trans, dataset_ids, chrom=""):
+ """
+ Display browser for the datasets listed in `dataset_ids`.
+ """
tracks = []
dbkey = ""
- for track in trans.session["track_sets"]:
- dataset = trans.app.model.HistoryDatasetAssociation.get( track )
- tracks.append({
- "type": dataset.datatype.get_track_type(),
- "name": dataset.name,
- "id": dataset.id
- })
+ for dataset_id in dataset_ids.split( "," ):
+ dataset = trans.app.model.HistoryDatasetAssociation.get( dataset_id )
+ tracks.append( {
+ "type": dataset.datatype.get_track_type(),
+ "name": dataset.name,
+ "id": dataset.id
+ } )
dbkey = dataset.dbkey
- chrom = kwargs.get("chrom","")
- LEN = self.chroms_handler(trans, trans.session["track_dbkey"]).get(chrom,0)
- return trans.fill_template( 'tracks/index.mako',
- tracks=tracks, chrom=chrom, dbkey=dbkey,
+ LEN = self._chroms(trans, dbkey ).get(chrom,0)
+ return trans.fill_template( 'tracks/browser.mako',
+ dataset_ids=dataset_ids,
+ tracks=tracks,
+ chrom=chrom,
+ dbkey=dbkey,
LEN=LEN )
-
- def chroms_handler(self, trans, dbkey ):
+
+ @web.json
+ def chroms(self, trans, dbkey=None ):
+ return self._chroms( trans, dbkey )
+
+ def _chroms( self, trans, dbkey ):
+ """
+ Called by the browser to get a list of valid chromosomes and lengths
+ """
db_manifest = trans.db_dataset_for( dbkey )
if not db_manifest:
db_manifest = os.path.join( trans.app.config.tool_data_path, 'shared','ucsc','chrom', "%s.len" % dbkey )
@@ -134,7 +117,11 @@
pass
return manifest
- def data_handler( self, trans, dataset_id, chrom="", low="", high="" ):
+ @web.json
+ def data( self, trans, dataset_id, chrom="", low="", high="" ):
+ """
+ Called by the browser to request a block of data
+ """
dataset = trans.app.model.HistoryDatasetAssociation.get( dataset_id )
if not dataset: return messages.NO_DATA
if dataset.state == trans.app.model.Job.states.ERROR:
diff -r 6125f71c838a -r 143dde05e1df static/scripts/trackster.js
--- a/static/scripts/trackster.js Thu Jun 11 12:20:03 2009 -0400
+++ b/static/scripts/trackster.js Thu Jun 11 15:37:19 2009 -0400
@@ -155,7 +155,7 @@
// use closure to preserve this and parameters for getJSON
var fetcher = function (ref) {
return function () {
- $.getJSON( "data" + ref.type, { chr: ref.view.chr, low: low, high: high, dataset_id: ref.track.dataset_id }, function ( data ) {
+ $.getJSON( TRACKSTER_DATA_URL + ref.type, { chrom: ref.view.chr, low: low, high: high, dataset_id: ref.track.dataset_id }, function ( data ) {
if( data == "pending" ) {
setTimeout( fetcher, 5000 );
} else {
@@ -218,6 +218,7 @@
var y1 = data[i][1];
var x2 = data[i+1][0] - tile_low;
var y2 = data[i+1][1];
+ console.log( x1, y1, x2, y2 );
// Missing data causes us to stop drawing
if ( isNaN( y1 ) || isNaN( y2 ) ) {
in_path = false;
diff -r 6125f71c838a -r 143dde05e1df static/trackster.css
--- a/static/trackster.css Thu Jun 11 12:20:03 2009 -0400
+++ b/static/trackster.css Thu Jun 11 15:37:19 2009 -0400
@@ -1,5 +1,5 @@
body {
- margin: 4em 0;
+ margin: 0 0;
padding: 0;
font-family: verdana;
font-size: 75%;
diff -r 6125f71c838a -r 143dde05e1df templates/base_panels.mako
--- a/templates/base_panels.mako Thu Jun 11 12:20:03 2009 -0400
+++ b/templates/base_panels.mako Thu Jun 11 15:37:19 2009 -0400
@@ -148,13 +148,22 @@
<span class="${cls}" style="${style}"><a target="${target}" href="${href}">${display}</a></span>
</%def>
- ## ${tab( "tracks", "View Data", h.url_for( controller='tracks', action='dbkeys' ), target="galaxy_main")}
-
${tab( "analysis", "Analyze Data", h.url_for( controller='root', action='index' ))}
${tab( "workflow", "Workflow", h.url_for( controller='workflow', action='index' ))}
- ${tab( "libraries", "Libraries", h.url_for( controller='library', action='index' ))}
+ ${tab( "libraries", "Libraries", h.url_for( controller='library', action='index' ))}
+
+ %if app.config.get_bool( 'enable_tracks', False ):
+ <span class="tab">
+ Visualization
+ <div class="submenu">
+ <ul>
+ <li><a href="${h.url_for( controller='tracks', action='index' )}">Build track browser</a></li>
+ </ul>
+ </div>
+ </span>
+ %endif
${tab( "admin", "Admin", h.url_for( controller='admin', action='index' ), extra_class="admin-only", visible=( trans.user and app.config.is_admin_user( trans.user ) ) )}
diff -r 6125f71c838a -r 143dde05e1df templates/tracks/browser.mako
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/templates/tracks/browser.mako Thu Jun 11 15:37:19 2009 -0400
@@ -0,0 +1,132 @@
+<%inherit file="/base.mako"/>
+
+<%def name="stylesheets()">
+${parent.stylesheets()}
+<link rel="stylesheet" type="text/css" href="/static/trackster.css" />
+</%def>
+
+<%def name="javascripts()">
+${parent.javascripts()}
+<script type="text/javascript" src="/static/scripts/jquery.event.drag.js"></script>
+<script type="text/javascript" src="/static/scripts/trackster.js"></script>
+<script type="text/javascript">
+
+ ## HACK
+ TRACKSTER_DATA_URL = "${h.url_for( action='data' )}";
+
+ var view = new View( "${chrom}", ${LEN}, 0, ${max(LEN,1)} );
+ var tracks = new TrackLayout( view );
+ var dbkey = "${dbkey}";
+
+ $(function() {
+
+ tracks.add( new LabelTrack( view, $("#viewport" ) ) );
+ %for track in tracks:
+ tracks.add( new ${track["type"]}( "${track["name"]}", view, $("#viewport" ), ${track["id"]} ) );
+ %endfor
+
+ $(document).bind( "redraw", function( e ) {
+ tracks.redraw();
+ });
+
+ $(window).resize( function( e ) {
+ tracks.redraw();
+ });
+
+ $("#viewport").bind( "dragstart", function ( e ) {
+ this.original_low = view.low;
+ }).bind( "drag", function( e ) {
+ var move_amount = ( e.offsetX - this.offsetLeft ) / this.offsetWidth;
+ var range = view.high - view.low;
+ var move_bases = Math.round( range * move_amount );
+ var new_low = this.original_low - move_bases;
+ if ( new_low < 0 ) {
+ new_low = 0;
+ }
+ var new_high = new_low + range;
+ if ( new_high > view.length ) {
+ new_high = view.length;
+ new_low = new_high - range;
+ }
+ view.low = new_low;
+ view.high = new_high;
+ tracks.redraw();
+ });
+ tracks.redraw();
+ load_chroms();
+ });
+
+ var load_chroms = function () {
+ var fetcher = function (ref) {
+ return function () {
+ $.getJSON( "${h.url_for( action='chroms' )}", { dbkey: dbkey }, function ( data ) {
+ // Hacky - check length of "object"
+ var chrom_length = 0;
+ for (key in data) chrom_length++;
+ if( chrom_length == 0 ) {
+ setTimeout( fetcher, 5000 );
+ } else {
+ var chrom_options = '';
+ for (key in data) {
+ if( key == view.chr ) {
+ chrom_options += '<option value="' + key + '" selected="true">' + key + '</option>';
+ } else {
+ chrom_options += '<option value="' + key + '">' + key + '</option>';
+ }
+ }
+ $("#chrom").html(chrom_options);
+ $("#chrom").bind( "change", function ( e ) {
+ $("#chr").submit();
+ });
+ if( view.chr == "" ) {
+ $("#chrom option:first").attr("selected", true);
+ $("#chrom").trigger( "change" );
+ }
+ }
+ });
+ };
+ }(this);
+ fetcher();
+ };
+
+</script>
+</%def>
+
+
+<div id="content">
+
+ <div id="overview">
+ <div id="overview-viewport">
+ <div id="overview-box"></div>
+ </div>
+ </div>
+
+
+ <div id="viewport">
+ </div>
+
+</div>
+ <div id="nav">
+
+ <div id="nav-controls">
+ <form name="chr" id="chr" method="GET">
+ <input type="hidden" name="dataset_ids" value="${dataset_ids}" />
+ <a href="#" onclick="javascript:view.left(5);tracks.redraw();"><<</a>
+ <a href="#" onclick="javascript:view.left(2);tracks.redraw();"><</a>
+ <span style="display: inline-block; width: 30em; text-align: center;">Viewing
+ <select id="chrom" name="chrom">
+ <option value="">loading</option>
+ </select>
+ <span id="low">0</span>-<span id="high">180857866</span></span>
+ <span style="display: inline-block; width: 10em;">
+ <a href="#" onclick="javascript:view.zoom_in(2);tracks.redraw();">+</a>
+ <a href="#" onclick="javascript:view.zoom_out(2);tracks.redraw();">-</a>
+ </span>
+
+ <a href="#" onclick="javascript:view.right(2);tracks.redraw();">></a>
+ <a href="#" onclick="javascript:view.right(5);tracks.redraw();">>></a>
+ </form>
+ </div>
+
+ </div>
+
diff -r 6125f71c838a -r 143dde05e1df templates/tracks/build.mako
--- a/templates/tracks/build.mako Thu Jun 11 12:20:03 2009 -0400
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,22 +0,0 @@
-<%inherit file="/base.mako"/>
-
-<%def name="init()">
-<%
- self.active_view="view"
- self.has_left_panel=False
-%>
-</%def>
-
-<script type="text/javascript">
- setTimeout(function () {
- window.location.reload();
- }, 5000 );
-</script>
-
-<div class="donemessage">
-<p>
-Please wait while we index your tracks for viewing. You will be
-automatically redirected to choose a chromosome to view after indices
-are built.
-</p>
-</div>
diff -r 6125f71c838a -r 143dde05e1df templates/tracks/chroms.mako
--- a/templates/tracks/chroms.mako Thu Jun 11 12:20:03 2009 -0400
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,30 +0,0 @@
-<%inherit file="/base.mako"/>
-
-<%def name="init()">
-<%
- self.active_view="view"
- self.has_left_panel=False
-%>
-</%def>
-
-<div class="form">
- <div class="form-title">Select Chromosome/Contig/Scaffold/etc.</div>
- <div id="dbkey" class="form-body">
- <form action="/tracks/index" method="GET" target="_parent">
- <div class="form-row">
- <label for="dbkey">Chrom: </label>
- <div class="form-row-input">
- <select name="chrom" id="chrom">
- %for chrom in data_resource:
- <option value="${chrom}">${chrom}</option>
- %endfor
- </select>
- </div>
- <div style="clear: both;"></div>
- </div>
- <div class="form-row">
- <input type="submit" value="View" />
- </div>
- </form>
- </div>
-</div>
diff -r 6125f71c838a -r 143dde05e1df templates/tracks/datasets.mako
--- a/templates/tracks/datasets.mako Thu Jun 11 12:20:03 2009 -0400
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,28 +0,0 @@
-<%inherit file="/base.mako"/>
-
-<%def name="init()">
-<%
- self.active_view="view"
- self.has_left_panel=False
-%>
-</%def>
-
-<div class="form">
- <div class="form-title">Select Datasets to View</div>
- <div id="dbkey" class="form-body">
- <form action="/tracks/build" method="GET">
- %for key,value in data_resource.items():
- <div class="form-row">
- <label for="${key}">${value}</label>
- <div class="form-row-input">
- <input type="checkbox" name="${key}" />
- </div>
- <div style="clear: both;"></div>
- </div>
- %endfor
- <div class="form-row">
- <input type="submit" value="Build..." />
- </div>
- </form>
- </div>
-</div>
diff -r 6125f71c838a -r 143dde05e1df templates/tracks/dbkeys.mako
--- a/templates/tracks/dbkeys.mako Thu Jun 11 12:20:03 2009 -0400
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,30 +0,0 @@
-<%inherit file="/base.mako"/>
-
-<%def name="init()">
-<%
- self.active_view="view"
- self.has_left_panel=False
-%>
-</%def>
-
-<div class="form">
- <div class="form-title">Select DBKey</div>
- <div id="dbkey" class="form-body">
- <form action="/tracks/list" method="GET">
- <div class="form-row">
- <label for="dbkey">DBKey: </label>
- <div class="form-row-input">
- <select name="dbkey" id="dbkey">
- %for dbkey in data_resource:
- <option value="${dbkey}">${dbkey}</option>
- %endfor
- </select>
- </div>
- <div style="clear: both;"></div>
- </div>
- <div class="form-row">
- <input type="submit" value="Select Datasets..."/>
- </div>
- </form>
- </div>
-</div>
diff -r 6125f71c838a -r 143dde05e1df templates/tracks/debug.mako
--- a/templates/tracks/debug.mako Thu Jun 11 12:20:03 2009 -0400
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,1 +0,0 @@
-${data_resource}
\ No newline at end of file
diff -r 6125f71c838a -r 143dde05e1df templates/tracks/index.mako
--- a/templates/tracks/index.mako Thu Jun 11 12:20:03 2009 -0400
+++ b/templates/tracks/index.mako Thu Jun 11 15:37:19 2009 -0400
@@ -2,149 +2,15 @@
<%def name="init()">
<%
- self.active_view="tracks"
self.has_left_panel=False
self.has_right_panel=False
+ self.active_view="visualization"
+ self.message_box_visible=False
%>
</%def>
-<%def name="stylesheets()">
-${parent.stylesheets()}
-<link rel="stylesheet" type="text/css" href="/static/trackster.css" />
-</%def>
+<%def name="center_panel()">
-<%def name="late_javascripts()">
-${parent.late_javascripts()}
-<script type="text/javascript" src="/static/scripts/jquery.event.drag.js"></script>
-<script type="text/javascript" src="/static/scripts/trackster.js"></script>
-<script type="text/javascript">
+ <iframe name="galaxy_main" id="galaxy_main" frameborder="0" style="position: absolute; width: 100%; height: 100%;" src="${h.url_for( controller="tracks", action="new_browser" )}"> </iframe>
- var view = new View( "${chrom}", ${LEN}, 0, ${max(LEN,1)} );
- var tracks = new TrackLayout( view );
- var dbkey = "${dbkey}";
-
- $(function() {
-
- tracks.add( new LabelTrack( view, $("#viewport" ) ) );
- %for track in tracks:
- tracks.add( new ${track["type"]}( "${track["name"]}", view, $("#viewport" ), ${track["id"]} ) );
- %endfor
-
- $(document).bind( "redraw", function( e ) {
- tracks.redraw();
- });
-
- $(window).resize( function( e ) {
- tracks.redraw();
- });
-
- $("#viewport").bind( "dragstart", function ( e ) {
- this.original_low = view.low;
- }).bind( "drag", function( e ) {
- var move_amount = ( e.offsetX - this.offsetLeft ) / this.offsetWidth;
- var range = view.high - view.low;
- var move_bases = Math.round( range * move_amount );
- var new_low = this.original_low - move_bases;
- if ( new_low < 0 ) {
- new_low = 0;
- }
- var new_high = new_low + range;
- if ( new_high > view.length ) {
- new_high = view.length;
- new_low = new_high - range;
- }
- view.low = new_low;
- view.high = new_high;
- tracks.redraw();
- });
- tracks.redraw();
- load_chroms();
- });
-
- var load_chroms = function () {
- var fetcher = function (ref) {
- return function () {
- $.getJSON( "chroms", { dbkey: dbkey }, function ( data ) {
- // Hacky - check length of "object"
- var chrom_length = 0;
- for (key in data) chrom_length++;
- if( chrom_length == 0 ) {
- setTimeout( fetcher, 5000 );
- } else {
- var chrom_options = '';
- for (key in data) {
- if( key == view.chr ) {
- chrom_options += '<option value="' + key + '" selected="true">' + key + '</option>';
- } else {
- chrom_options += '<option value="' + key + '">' + key + '</option>';
- }
- }
- $("#chrom").html(chrom_options);
- $("#chrom").bind( "change", function ( e ) {
- $("#chr").submit();
- });
- if( view.chr == "" ) {
- $("#chrom option:first").attr("selected", true);
- $("#chrom").trigger( "change" );
- }
- }
- });
- };
- }(this);
- fetcher();
- };
-
-</script>
-</%def>
-
-<%def name="center_panel()">
-<div id="content">
-
- <div id="overview">
- <div id="overview-viewport">
- <div id="overview-box"></div>
- </div>
- </div>
-
-
- <div id="viewport">
- </div>
-
-</div>
- <div id="nav">
-
- <div id="nav-controls">
- <form name="chr" id="chr" method="GET">
- <a href="#" onclick="javascript:view.left(5);tracks.redraw();"><<</a>
- <a href="#" onclick="javascript:view.left(2);tracks.redraw();"><</a>
- <span style="display: inline-block; width: 30em; text-align: center;">Viewing
- <select id="chrom" name="chrom">
- <option value="">loading</option>
- </select>
- <span id="low">0</span>-<span id="high">180857866</span></span>
- <span style="display: inline-block; width: 10em;">
- <a href="#" onclick="javascript:view.zoom_in(2);tracks.redraw();">+</a>
- <a href="#" onclick="javascript:view.zoom_out(2);tracks.redraw();">-</a>
- </span>
-
- <a href="#" onclick="javascript:view.right(2);tracks.redraw();">></a>
- <a href="#" onclick="javascript:view.right(5);tracks.redraw();">>></a>
- </form>
- </div>
-
- </div>
-</%def>
-
-<%def name="right_panel()">
- <div class="unified-panel-header" unselectable="on">
- <div class="unified-panel-header-inner">
- <div style="float: right">
- <a class='panel-header-button' href="${h.url_for( controller='root', action='history_options' )}" target="galaxy_main"><span>Options</span></a>
- </div>
- <div class="panel-header-text">History</div>
- </div>
- </div>
- <div class="unified-panel-body" style="overflow: hidden;">
- <iframe name="galaxy_history" width="100%" height="100%" frameborder="0" style="position: absolute; margin: 0; border: 0 none; height: 100%;" src="${h.url_for( controller='root', action='history' )}"></iframe>
- </div>
-</%def>
+</%def>
\ No newline at end of file
diff -r 6125f71c838a -r 143dde05e1df templates/tracks/new_browser.mako
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/templates/tracks/new_browser.mako Thu Jun 11 15:37:19 2009 -0400
@@ -0,0 +1,50 @@
+<%inherit file="/base.mako"/>
+
+<%def name="javascripts()">
+${parent.javascripts()}
+<script type="text/javascript">
+$( function() {
+ $( "select[refresh_on_change='true']").change( function() {
+ $("#form").submit();
+ });
+});
+</script>
+</%def>
+
+<div class="form">
+ <div class="form-title">Select datasets to include in browser</div>
+ <div id="dbkey" class="form-body">
+ <form id="form" method="POST">
+ <div class="form-row">
+ <label for="dbkey">Reference genome build (dbkey): </label>
+ <div class="form-row-input">
+ <select name="dbkey" id="dbkey" refresh_on_change="true">
+ %for tmp_dbkey in dbkey_set:
+ <option value="${tmp_dbkey}"
+ %if tmp_dbkey == dbkey:
+ selected="true"
+ %endif
+ >${tmp_dbkey}</option>
+ %endfor
+ </select>
+ </div>
+ <div style="clear: both;"></div>
+ </div>
+ <div class="form-row">
+ <label for="dataset_ids">Datasets to include: </label>
+ %for key,value in datasets.items():
+ <div>
+ <input type="checkbox" name="dataset_ids" value="${key}" />
+ ${value}
+ </div>
+ %endfor
+
+ <div style="clear: both;"></div>
+ </div>
+ </div>
+ <div class="form-row">
+ <input type="submit" name="browse" value="Browse"/>
+ </div>
+ </form>
+ </div>
+</div>
diff -r 6125f71c838a -r 143dde05e1df templates/tracks/view.mako
--- a/templates/tracks/view.mako Thu Jun 11 12:20:03 2009 -0400
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,88 +0,0 @@
-<html>
-
-<head>
-
-<link rel="stylesheet" type="text/css" href="/s/css/trackster.css" />
-
-<script type="text/javascript" src="/static/scripts/jquery.js"></script>
-<script type="text/javascript" src="/static/scripts/jquery.event.drag.js"></script>
-<script type="text/javascript" src="/static/scripts/trackster.js"></script>
-<script>
-
- var view = new View( "chr5", 180857866, 0, 180857866 );
- var tracks = new TrackLayout( view );
-
- $(function() {
-
- tracks.add( new LabelTrack( view, $("#viewport" ) ) );
- tracks.add( new LineTrack( "phastCons44way", view, $("#viewport" ) ) );
- tracks.add( new FeatureTrack( "knownGene", view, $("#viewport" ) ) );
-
- $(document).bind( "redraw", function( e ) {
- tracks.redraw();
- });
-
- $(window).resize( function( e ) {
- tracks.redraw();
- });
-
- $("#viewport").bind( "dragstart", function ( e ) {
- this.original_low = view.low;
- }).bind( "drag", function( e ) {
- var move_amount = ( e.offsetX - this.offsetLeft ) / this.offsetWidth;
- var range = view.high - view.low;
- var move_bases = Math.round( range * move_amount );
- var new_low = this.original_low - move_bases;
- if ( new_low < 0 ) {
- new_low = 0;
- }
- var new_high = new_low + range;
- if ( new_high > view.length ) {
- new_high = view.length;
- new_low = new_high - range;
- }
- view.low = new_low;
- view.high = new_high;
- tracks.redraw();
- });
-
- tracks.redraw();
- });
-</script>
-<body>
-
-<div id="content">
-
- <div id="overview">
- <div id="overview-viewport">
- <div id="overview-box"></div>
- </div>
- </div>
-
- <div id="nav">
-
- <div id="nav-controls">
- <a href="#" onclick="javascript:view.left(5);tracks.redraw();"><<</a>
- <a href="#" onclick="javascript:view.left(2);tracks.redraw();"><</a>
-
- <span style="display: inline-block; width: 30em; text-align: center;">Viewing chr5:<span id="low">0</span>-<span id="high">180857866</span></span>
-
- <span style="display: inline-block; width: 10em;">
- <a href="#" onclick="javascript:view.zoom_in(2);tracks.redraw();">+</a>
- <a href="#" onclick="javascript:view.zoom_out(2);tracks.redraw();">-</a>
- </span>
-
- <a href="#" onclick="javascript:view.right(2);tracks.redraw();">></a>
- <a href="#" onclick="javascript:view.right(5);tracks.redraw();">>></a>
- </div>
-
- </div>
-
- <div id="viewport">
- </div>
-
-</div>
-
-</body>
-
-</html>
1
0
details: http://www.bx.psu.edu/hg/galaxy/rev/854ec7739cfd
changeset: 2446:854ec7739cfd
user: ianschenck(a)Thugunit.local
date: Thu Apr 23 13:46:52 2009 -0400
description:
- Performance of indexers much improved.
- Indexing for tracks done in background with a visual treatment done to Trackster
- DB builds can be uploaded by a user (chromInfo/len extension).
- TODO: Add ability to change the dbkey of a dataset to any arbitrary string value.
23 file(s) affected in this change:
datatypes_conf.xml.sample
lib/galaxy/datatypes/data.py
lib/galaxy/datatypes/indexers/coverage.py
lib/galaxy/datatypes/indexers/interval.awk
lib/galaxy/datatypes/indexers/interval.py
lib/galaxy/datatypes/indexers/interval_awk.xml
lib/galaxy/datatypes/indexers/wiggle.py
lib/galaxy/datatypes/interval.py
lib/galaxy/datatypes/metadata.py
lib/galaxy/datatypes/registry.py
lib/galaxy/tools/actions/__init__.py
lib/galaxy/tools/actions/upload.py
lib/galaxy/tools/parameters/basic.py
lib/galaxy/tracks/store.py
lib/galaxy/web/controllers/root.py
lib/galaxy/web/controllers/tracks.py
lib/galaxy/web/framework/__init__.py
static/scripts/trackster.js
static/trackster.css
templates/dataset/edit_attributes.mako
templates/tracks/index.mako
tools/data_source/upload.xml
tools/new_operations/complement.xml
diffs (663 lines):
diff -r f7336991d0ee -r 854ec7739cfd datatypes_conf.xml.sample
--- a/datatypes_conf.xml.sample Thu Apr 23 09:20:11 2009 -0400
+++ b/datatypes_conf.xml.sample Thu Apr 23 13:46:52 2009 -0400
@@ -8,6 +8,9 @@
<converter file="interval_to_coverage.xml" target_datatype="coverage"/>
</datatype>
<datatype extension="binseq.zip" type="galaxy.datatypes.images:Binseq" mimetype="application/zip" display_in_upload="true"/>
+ <datatype extension="len" type="galaxy.datatypes.chrominfo:ChromInfo" display_in_upload="true">
+ <!-- no converters yet -->
+ </datatype>
<datatype extension="coverage" type="galaxy.datatypes.coverage:LastzCoverage" display_in_upload="true">
<indexer file="coverage.xml" />
</datatype>
@@ -31,7 +34,7 @@
<datatype extension="html" type="galaxy.datatypes.images:Html" mimetype="text/html"/>
<datatype extension="interval" type="galaxy.datatypes.interval:Interval" display_in_upload="true">
<converter file="interval_to_bed_converter.xml" target_datatype="bed"/>
- <indexer file="interval.xml" />
+ <indexer file="interval_awk.xml" />
</datatype>
<datatype extension="jpg" type="galaxy.datatypes.images:Image" mimetype="image/jpeg"/>
<datatype extension="laj" type="galaxy.datatypes.images:Laj"/>
diff -r f7336991d0ee -r 854ec7739cfd lib/galaxy/datatypes/data.py
--- a/lib/galaxy/datatypes/data.py Thu Apr 23 09:20:11 2009 -0400
+++ b/lib/galaxy/datatypes/data.py Thu Apr 23 13:46:52 2009 -0400
@@ -40,7 +40,7 @@
__metaclass__ = DataMeta
"""Add metadata elements"""
- MetadataElement( name="dbkey", desc="Database/Build", default="?", param=metadata.SelectParameter, multiple=False, values=util.dbnames, no_value="?" )
+ MetadataElement( name="dbkey", desc="Database/Build", default="?", param=metadata.DBKeyParameter, multiple=False, no_value="?" )
"""Stores the set of display applications, and viewing methods, supported by this datatype """
supported_display_apps = {}
@@ -242,7 +242,7 @@
def after_edit( self, dataset ):
"""This function is called on the dataset after metadata is edited."""
dataset.clear_associated_files( metadata_safe = True )
-
+
@property
def has_resolution(self):
return False
diff -r f7336991d0ee -r 854ec7739cfd lib/galaxy/datatypes/indexers/coverage.py
--- a/lib/galaxy/datatypes/indexers/coverage.py Thu Apr 23 09:20:11 2009 -0400
+++ b/lib/galaxy/datatypes/indexers/coverage.py Thu Apr 23 13:46:52 2009 -0400
@@ -37,7 +37,7 @@
os.rename( fname+".npy", fname )
# Write average
- for window in 10, 100, 1000, 10000:
+ for window in 10, 100, 1000, 10000, 100000:
input = scores.copy()
size = len( input )
input.resize( ( ( size / window ), window ) )
diff -r f7336991d0ee -r 854ec7739cfd lib/galaxy/datatypes/indexers/interval.awk
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/lib/galaxy/datatypes/indexers/interval.awk Thu Apr 23 13:46:52 2009 -0400
@@ -0,0 +1,43 @@
+BEGIN {
+ # from galaxy.utils
+ mapped_chars[">"] = "__gt__"
+ mapped_chars["<"] = "__lt__"
+ mapped_chars["'"] = "__sq__"
+ mapped_chars["\""] = "__dq__"
+ mapped_chars["\\["] = "__ob__"
+ mapped_chars["\\]"] = "__cb__"
+ mapped_chars["\\{"] = "__oc__"
+ mapped_chars["\\}"] = "__cc__"
+ mapped_chars["@"] = "__at__"
+ # additional, not in galaxy.utils
+ mapped_chars["/"] = "__fs__"
+ mapped_chars["^manifest\.tab$"] = "__manifest.tab__"
+}
+function escape_filename( name )
+{
+ for( char in mapped_chars ) {
+ gsub( char, mapped_chars[char], name )
+ }
+ return name
+}
+!_[$chrom]++ {
+ # close files only when we switch to a new one.
+ fn && close(fn)
+ fn = storepath "/" escape_filename($1) }
+{
+ print $0 >> fn;
+ # the || part is needed to catch 0 length chromosomes, which
+ # should never happen but...
+ if ($end > chroms[$chrom] || !chroms[$chrom])
+ chroms[$chrom] = $end }
+END {
+ fn = storepath "/manifest.tab"
+ for( x in chroms ) {
+ # add line to manifest
+ print x "\t" chroms[x] >> fn
+ chromfile = storepath "/" escape_filename(x)
+ # sort in-place
+ system( "sort -f -n -k " chrom " -k " start " -k " end " -o " chromfile " " chromfile )
+ close(chromfile)
+ }
+}
\ No newline at end of file
diff -r f7336991d0ee -r 854ec7739cfd lib/galaxy/datatypes/indexers/interval.py
--- a/lib/galaxy/datatypes/indexers/interval.py Thu Apr 23 09:20:11 2009 -0400
+++ b/lib/galaxy/datatypes/indexers/interval.py Thu Apr 23 13:46:52 2009 -0400
@@ -29,13 +29,11 @@
manifest[chrom] = max(manifest.get(chrom,0),line.end)
if not lastchrom == chrom:
if current_file:
- current_file.flush()
current_file.close()
current_file = open( os.path.join( out_path, "%s" % chrom), "a" )
print >> current_file, "\t".join(line)
lastchrom = chrom
if current_file:
- current_file.flush()
current_file.close()
return manifest
diff -r f7336991d0ee -r 854ec7739cfd lib/galaxy/datatypes/indexers/interval_awk.xml
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/lib/galaxy/datatypes/indexers/interval_awk.xml Thu Apr 23 13:46:52 2009 -0400
@@ -0,0 +1,16 @@
+<tool id="INDEXER_Interval_0" name="Index Interval for Track Viewer">
+ <!-- Used internally to generate track indexes -->
+ <command interpreter="awk -f">interval.awk
+ chrom=${input_dataset.metadata.chromCol} start=${input_dataset.metadata.startCol}
+ end=${input_dataset.metadata.endCol} strand=${input_dataset.metadata.strandCol}
+ storepath=${store_path}
+ $input_dataset 2>&1
+ </command>
+ <inputs>
+ <page>
+ <param format="interval" name="input_dataset" type="data" label="Choose intervals"/>
+ </page>
+ </inputs>
+ <help>
+ </help>
+</tool>
diff -r f7336991d0ee -r 854ec7739cfd lib/galaxy/datatypes/indexers/wiggle.py
--- a/lib/galaxy/datatypes/indexers/wiggle.py Thu Apr 23 09:20:11 2009 -0400
+++ b/lib/galaxy/datatypes/indexers/wiggle.py Thu Apr 23 13:46:52 2009 -0400
@@ -18,6 +18,8 @@
from numpy import *
import tempfile
import os
+from galaxy.tracks.store import sanitize_name
+
def write_chrom(max, out_base, instream):
@@ -35,7 +37,7 @@
os.rename( fname+".npy", fname )
# Write average
- for window in 10, 100, 1000, 10000:
+ for window in 10, 100, 1000, 10000, 100000:
input = scores.copy()
size = len( input )
input.resize( ( ( size / window ), window ) )
@@ -60,7 +62,7 @@
LEN[chrom] = max2( LEN.get(chrom,0), pos+1 )
for chrom, stream in chroms.items():
stream.seek(0)
- prefix = os.path.join(sys.argv[2], chrom)
+ prefix = os.path.join(sys.argv[2], sanitize_name(chrom))
write_chrom( LEN[chrom], prefix, stream )
manifest_file = open( os.path.join( sys.argv[2], "manifest.tab" ),"w" )
diff -r f7336991d0ee -r 854ec7739cfd lib/galaxy/datatypes/interval.py
--- a/lib/galaxy/datatypes/interval.py Thu Apr 23 09:20:11 2009 -0400
+++ b/lib/galaxy/datatypes/interval.py Thu Apr 23 13:46:52 2009 -0400
@@ -792,7 +792,7 @@
# Determine appropriate resolution to plot ~1000 points
resolution = ( 10 ** math.ceil( math.log10( range / 1000 ) ) )
# Restrict to valid range
- resolution = min( resolution, 10000 )
+ resolution = min( resolution, 100000 )
resolution = max( resolution, 1 )
# Memory map the array (don't load all the data)
data = numpy.load( data )
@@ -809,7 +809,7 @@
# Determine appropriate resolution to plot ~1000 points
resolution = math.ceil( 10 ** math.ceil( math.log10( range / 1000 ) ) )
# Restrict to valid range
- resolution = min( resolution, 10000 )
+ resolution = min( resolution, 100000 )
resolution = max( resolution, 1 )
return resolution
diff -r f7336991d0ee -r 854ec7739cfd lib/galaxy/datatypes/metadata.py
--- a/lib/galaxy/datatypes/metadata.py Thu Apr 23 09:20:11 2009 -0400
+++ b/lib/galaxy/datatypes/metadata.py Thu Apr 23 13:46:52 2009 -0400
@@ -289,7 +289,22 @@
if value is None: return []
if not isinstance( value, list ): return [value]
return value
-
+
+
+class DBKeyParameter( SelectParameter ):
+ def get_html_field( self, value=None, context={}, other_values={}, values=None, **kwd):
+ try:
+ values = kwd['trans'].db_builds
+ except AttributeError: pass
+ return super(DBKeyParameter, self).get_html_field( value, context, other_values, values, **kwd)
+
+ def get_html( self, value=None, context={}, other_values={}, values=None, **kwd):
+ try:
+ values = kwd['trans'].db_builds
+ except AttributeError: pass
+ return super(DBKeyParameter, self).get_html( value, context, other_values, values, **kwd)
+
+
class RangeParameter( SelectParameter ):
def __init__( self, spec ):
SelectParameter.__init__( self, spec )
diff -r f7336991d0ee -r 854ec7739cfd lib/galaxy/datatypes/registry.py
--- a/lib/galaxy/datatypes/registry.py Thu Apr 23 09:20:11 2009 -0400
+++ b/lib/galaxy/datatypes/registry.py Thu Apr 23 13:46:52 2009 -0400
@@ -3,7 +3,7 @@
"""
import os
import logging
-import data, tabular, interval, images, sequence, qualityscore, genetics, xml, coverage, tracks
+import data, tabular, interval, images, sequence, qualityscore, genetics, xml, coverage, tracks, chrominfo
import galaxy.util
from galaxy.util.odict import odict
diff -r f7336991d0ee -r 854ec7739cfd lib/galaxy/tools/actions/__init__.py
--- a/lib/galaxy/tools/actions/__init__.py Thu Apr 23 09:20:11 2009 -0400
+++ b/lib/galaxy/tools/actions/__init__.py Thu Apr 23 13:46:52 2009 -0400
@@ -107,6 +107,7 @@
out_data = {}
# Collect any input datasets from the incoming parameters
inp_data = self.collect_input_datasets( tool, incoming, trans )
+
# Deal with input dataset names, 'dbkey' and types
input_names = []
input_ext = 'data'
@@ -119,6 +120,16 @@
data = NoneDataset( datatypes_registry = trans.app.datatypes_registry )
if data.dbkey not in [None, '?']:
input_dbkey = data.dbkey
+
+ # Collect chromInfo dataset and add as parameters to incoming
+ db_datasets = {}
+ db_dataset = trans.db_dataset_for( input_dbkey )
+ if db_dataset:
+ db_datasets[ "chromInfo" ] = db_dataset
+ incoming[ "chromInfo" ] = db_dataset.file_name
+ else:
+ incoming[ "chromInfo" ] = os.path.join( trans.app.config.tool_data_path, 'shared','ucsc','chrom', "%s.len" % input_dbkey )
+ inp_data.update( db_datasets )
# Determine output dataset permission/roles list
existing_datasets = [ inp for inp in inp_data.values() if inp ]
diff -r f7336991d0ee -r 854ec7739cfd lib/galaxy/tools/actions/upload.py
--- a/lib/galaxy/tools/actions/upload.py Thu Apr 23 09:20:11 2009 -0400
+++ b/lib/galaxy/tools/actions/upload.py Thu Apr 23 13:46:52 2009 -0400
@@ -21,7 +21,7 @@
def execute( self, tool, trans, incoming={}, set_output_hid = True ):
data_file = incoming['file_data']
file_type = incoming['file_type']
- dbkey = incoming['dbkey']
+ dbkey = incoming['other_dbkey'] or incoming['dbkey']
url_paste = incoming['url_paste']
is_multi_byte = False
space_to_tab = False
diff -r f7336991d0ee -r 854ec7739cfd lib/galaxy/tools/parameters/basic.py
--- a/lib/galaxy/tools/parameters/basic.py Thu Apr 23 09:20:11 2009 -0400
+++ b/lib/galaxy/tools/parameters/basic.py Thu Apr 23 13:46:52 2009 -0400
@@ -657,7 +657,7 @@
>>> # Create a mock transcation with 'hg17' as the current build
>>> from galaxy.util.bunch import Bunch
- >>> trans = Bunch( history=Bunch( genome_build='hg17' ) )
+ >>> trans = Bunch( history=Bunch( genome_build='hg17' ), db_builds=util.dbnames )
>>> p = GenomeBuildParameter( None, XML(
... '''
@@ -692,10 +692,10 @@
"""
def get_options( self, trans, other_values ):
last_used_build = trans.history.genome_build
- for dbkey, build_name in util.dbnames:
+ for dbkey, build_name in trans.db_builds:
yield build_name, dbkey, ( dbkey == last_used_build )
def get_legal_values( self, trans, other_values ):
- return set( dbkey for dbkey, _ in util.dbnames )
+ return set( dbkey for dbkey, _ in trans.db_builds )
class ColumnListParameter( SelectToolParameter ):
"""
diff -r f7336991d0ee -r 854ec7739cfd lib/galaxy/tracks/store.py
--- a/lib/galaxy/tracks/store.py Thu Apr 23 09:20:11 2009 -0400
+++ b/lib/galaxy/tracks/store.py Thu Apr 23 13:46:52 2009 -0400
@@ -1,5 +1,17 @@
import os
+import re
from string import Template
+from galaxy.util import sanitize_text
+
+# extra mappings/escape to keep users from traversing around the
+# filesystem and wreaking havoc
+extra_mappings = { r"/": "__fs__", r"^manifest.tab$": "__manifest.tab__" }
+
+def sanitize_name( name ):
+ name = sanitize_text( name )
+ for key, value in extra_mappings.items():
+ name = re.sub( key, value, name )
+ return name
class TemplateSubber( object ):
def __init__(self, obj):
@@ -56,7 +68,7 @@
fd.close()
def _get_object_path( self, chrom, resolution ):
- object_name = chrom
+ object_name = sanitize_name(chrom)
if resolution: object_name += "_%d" % resolution
return os.path.join( self.path, object_name )
diff -r f7336991d0ee -r 854ec7739cfd lib/galaxy/web/controllers/root.py
--- a/lib/galaxy/web/controllers/root.py Thu Apr 23 09:20:11 2009 -0400
+++ b/lib/galaxy/web/controllers/root.py Thu Apr 23 13:46:52 2009 -0400
@@ -234,11 +234,15 @@
if spec.get("readonly"):
continue
optional = params.get("is_"+name, None)
+ other = params.get("or_"+name, None)
if optional and optional == 'true':
# optional element... == 'true' actually means it is NOT checked (and therefore omitted)
setattr(data.metadata, name, None)
else:
- setattr( data.metadata, name, spec.unwrap( params.get (name, None) ) )
+ if other:
+ setattr( data.metadata, name, other )
+ else:
+ setattr( data.metadata, name, spec.unwrap( params.get (name, None) ) )
data.datatype.after_edit( data )
trans.app.model.flush()
diff -r f7336991d0ee -r 854ec7739cfd lib/galaxy/web/controllers/tracks.py
--- a/lib/galaxy/web/controllers/tracks.py Thu Apr 23 09:20:11 2009 -0400
+++ b/lib/galaxy/web/controllers/tracks.py Thu Apr 23 13:46:52 2009 -0400
@@ -1,13 +1,11 @@
-from mako import exceptions
-from mako.template import Template
-from mako.lookup import TemplateLookup
+import math
+
+import mimeparse
+from galaxy.tracks import messages
+from galaxy.util.json import to_json_string
from galaxy.web.base.controller import *
from galaxy.web.framework import simplejson
-from galaxy import web
-from galaxy.tracks import messages
-import mimeparse
-from galaxy.util.json import to_json_string
-import math
+
class MultiResponse(object):
"""
@@ -82,18 +80,19 @@
def build( self, trans, **kwargs ):
trans.session["track_sets"] = list(kwargs.keys())
trans.session.save()
- waiting = False
- for id, value in kwargs.items():
- status = self.data_handler( trans, id )
- if status == messages.PENDING:
- waiting = True
- if not waiting:
- return trans.response.send_redirect( web.url_for( controller='tracks', action='chroms', dbkey=trans.session["track_dbkey"]) )
- return trans.fill_template( 'tracks/build.mako' )
+ #waiting = False
+ #for id, value in kwargs.items():
+ # status = self.data_handler( trans, id )
+ # if status == messages.PENDING:
+ # waiting = True
+ #if not waiting:
+ return trans.response.send_redirect( web.url_for( controller='tracks/', action='index', chrom="" ) )
+ #return trans.fill_template( 'tracks/build.mako' )
@web.expose
def index(self, trans, **kwargs):
tracks = []
+ dbkey = ""
for track in trans.session["track_sets"]:
dataset = trans.app.model.HistoryDatasetAssociation.get( track )
tracks.append({
@@ -101,17 +100,23 @@
"name": dataset.name,
"id": dataset.id
})
+ dbkey = dataset.dbkey
chrom = kwargs.get("chrom","")
LEN = self.chroms_handler(trans, trans.session["track_dbkey"]).get(chrom,0)
return trans.fill_template( 'tracks/index.mako',
- tracks=tracks, chrom=chrom,
+ tracks=tracks, chrom=chrom, dbkey=dbkey,
LEN=LEN )
def chroms_handler(self, trans, dbkey ):
- db_manifest = os.path.join( trans.app.config.tool_data_path, 'shared','ucsc','chrom', "%s.len" % dbkey )
+ db_manifest = trans.db_dataset_for( dbkey )
+ if not db_manifest:
+ db_manifest = os.path.join( trans.app.config.tool_data_path, 'shared','ucsc','chrom', "%s.len" % dbkey )
+ else:
+ db_manifest = db_manifest.file_name
manifest = {}
if os.path.exists( db_manifest ):
for line in open( db_manifest ):
+ if line.startswith("#"): continue
line = line.rstrip("\r\n")
fields = line.split("\t")
manifest[fields[0]] = int(fields[1])
diff -r f7336991d0ee -r 854ec7739cfd lib/galaxy/web/framework/__init__.py
--- a/lib/galaxy/web/framework/__init__.py Thu Apr 23 09:20:11 2009 -0400
+++ b/lib/galaxy/web/framework/__init__.py Thu Apr 23 13:46:52 2009 -0400
@@ -547,6 +547,31 @@
template = Template( source=template_string,
searchList=[context or kwargs, dict(caller=self)] )
return str(template)
+
+ @property
+ def db_builds( self ):
+ """
+ Returns the builds defined by galaxy and the builds defined by
+ the user (chromInfo in history).
+ """
+ dbnames = list()
+ datasets = self.app.model.HistoryDatasetAssociation.filter_by(deleted=False, history_id=self.history.id, extension="len").all()
+ if len(datasets) > 0:
+ dbnames.append( (util.dbnames.default_value, '--------- User Defined Builds ----------') )
+ for dataset in datasets:
+ dbnames.append( (dataset.dbkey, dataset.name) )
+ dbnames.extend( util.dbnames )
+ return dbnames
+
+ def db_dataset_for( self, dbkey ):
+ """
+ Returns the db_file dataset associated/needed by `dataset`, or `None`.
+ """
+ datasets = self.app.model.HistoryDatasetAssociation.filter_by(deleted=False, history_id=self.history.id, extension="len").all()
+ for ds in datasets:
+ if dbkey == ds.dbkey:
+ return ds
+ return None
class FormBuilder( object ):
"""
diff -r f7336991d0ee -r 854ec7739cfd static/scripts/trackster.js
--- a/static/scripts/trackster.js Thu Apr 23 09:20:11 2009 -0400
+++ b/static/scripts/trackster.js Thu Apr 23 13:46:52 2009 -0400
@@ -85,7 +85,7 @@
var resolution = Math.pow( 10, Math.ceil( Math.log( range / DENSITY ) / Math.log( 10 ) ) );
resolution = Math.max( resolution, 1 );
- resolution = Math.min( resolution, 10000 );
+ resolution = Math.min( resolution, 100000 );
var parent_element = $("<div style='position: relative;'></div>");
this.content_div.children( ":first" ).remove();
@@ -152,10 +152,20 @@
var low = position * DENSITY * resolution;
var high = ( position + 1 ) * DENSITY * resolution;
cache[resolution][position] = { state: "loading" };
- $.getJSON( "data" + this.type, { chr: this.view.chr, low: low, high: high, dataset_id: this.track.dataset_id }, function ( data ) {
- cache[resolution][position] = { state: "loaded", values: data };
- $(document).trigger( "redraw" );
- });
+ // use closure to preserve this and parameters for getJSON
+ var fetcher = function (ref) {
+ return function () {
+ $.getJSON( "data" + ref.type, { chr: ref.view.chr, low: low, high: high, dataset_id: ref.track.dataset_id }, function ( data ) {
+ if( data == "pending" ) {
+ setTimeout( fetcher, 5000 );
+ } else {
+ cache[resolution][position] = { state: "loaded", values: data };
+ }
+ $(document).trigger( "redraw" );
+ });
+ };
+ }(this);
+ fetcher();
}
return cache[resolution][position];
}
@@ -288,8 +298,11 @@
var chunk = this.cache.get( resolution, tile_index );
if ( chunk.state == "loading" ) {
- return null;
- }
+ parent_element.addClass("loading");
+ return null;
+ } else {
+ parent_element.removeClass("loading");
+ }
var values = chunk.values;
for ( var index in values ) {
diff -r f7336991d0ee -r 854ec7739cfd static/trackster.css
--- a/static/trackster.css Thu Apr 23 09:20:11 2009 -0400
+++ b/static/trackster.css Thu Apr 23 13:46:52 2009 -0400
@@ -85,7 +85,10 @@
}
.loading {
- background: #DDDDDD;
+ background-image: url("/static/images/loading_large_white_bg.gif");
+ background-position: center center;
+ background-repeat: no-repeat;
+ min-height: 100px;
}
.label-track .label {
diff -r f7336991d0ee -r 854ec7739cfd templates/dataset/edit_attributes.mako
--- a/templates/dataset/edit_attributes.mako Thu Apr 23 09:20:11 2009 -0400
+++ b/templates/dataset/edit_attributes.mako Thu Apr 23 13:46:52 2009 -0400
@@ -46,7 +46,7 @@
${spec.desc}:
</label>
<div style="float: left; width: 250px; margin-right: 10px;">
- ${data.metadata.get_html_by_name( name )}
+ ${data.metadata.get_html_by_name( name, trans=trans )}
</div>
<div style="clear: both"></div>
</div>
diff -r f7336991d0ee -r 854ec7739cfd templates/tracks/index.mako
--- a/templates/tracks/index.mako Thu Apr 23 09:20:11 2009 -0400
+++ b/templates/tracks/index.mako Thu Apr 23 13:46:52 2009 -0400
@@ -17,18 +17,19 @@
${parent.late_javascripts()}
<script type="text/javascript" src="/static/scripts/jquery.event.drag.js"></script>
<script type="text/javascript" src="/static/scripts/trackster.js"></script>
-<script>
+<script type="text/javascript">
- var view = new View( "${chrom}", ${LEN}, 0, ${LEN} );
+ var view = new View( "${chrom}", ${LEN}, 0, ${max(LEN,1)} );
var tracks = new TrackLayout( view );
-
+ var dbkey = "${dbkey}";
+
$(function() {
tracks.add( new LabelTrack( view, $("#viewport" ) ) );
%for track in tracks:
tracks.add( new ${track["type"]}( "${track["name"]}", view, $("#viewport" ), ${track["id"]} ) );
%endfor
-
+
$(document).bind( "redraw", function( e ) {
tracks.redraw();
});
@@ -56,9 +57,43 @@
view.high = new_high;
tracks.redraw();
});
+ tracks.redraw();
+ load_chroms();
+ });
- tracks.redraw();
- });
+ var load_chroms = function () {
+ var fetcher = function (ref) {
+ return function () {
+ $.getJSON( "chroms", { dbkey: dbkey }, function ( data ) {
+ // Hacky - check length of "object"
+ var chrom_length = 0;
+ for (key in data) chrom_length++;
+ if( chrom_length == 0 ) {
+ setTimeout( fetcher, 5000 );
+ } else {
+ var chrom_options = '';
+ for (key in data) {
+ if( key == view.chr ) {
+ chrom_options += '<option value="' + key + '" selected="true">' + key + '</option>';
+ } else {
+ chrom_options += '<option value="' + key + '">' + key + '</option>';
+ }
+ }
+ $("#chrom").html(chrom_options);
+ $("#chrom").bind( "change", function ( e ) {
+ $("#chr").submit();
+ });
+ if( view.chr == "" ) {
+ $("#chrom option:first").attr("selected", true);
+ $("#chrom").trigger( "change" );
+ }
+ }
+ });
+ };
+ }(this);
+ fetcher();
+ };
+
</script>
</%def>
@@ -79,11 +114,14 @@
<div id="nav">
<div id="nav-controls">
+ <form name="chr" id="chr" method="GET">
<a href="#" onclick="javascript:view.left(5);tracks.redraw();"><<</a>
<a href="#" onclick="javascript:view.left(2);tracks.redraw();"><</a>
-
- <span style="display: inline-block; width: 30em; text-align: center;">Viewing ${chrom}:<span id="low">0</span>-<span id="high">180857866</span></span>
-
+ <span style="display: inline-block; width: 30em; text-align: center;">Viewing
+ <select id="chrom" name="chrom">
+ <option value="">loading</option>
+ </select>
+ <span id="low">0</span>-<span id="high">180857866</span></span>
<span style="display: inline-block; width: 10em;">
<a href="#" onclick="javascript:view.zoom_in(2);tracks.redraw();">+</a>
<a href="#" onclick="javascript:view.zoom_out(2);tracks.redraw();">-</a>
@@ -91,6 +129,7 @@
<a href="#" onclick="javascript:view.right(2);tracks.redraw();">></a>
<a href="#" onclick="javascript:view.right(5);tracks.redraw();">>></a>
+ </form>
</div>
</div>
diff -r f7336991d0ee -r 854ec7739cfd tools/data_source/upload.xml
--- a/tools/data_source/upload.xml Thu Apr 23 09:20:11 2009 -0400
+++ b/tools/data_source/upload.xml Thu Apr 23 13:46:52 2009 -0400
@@ -21,6 +21,7 @@
</options>
</param>
<param name="dbkey" type="genomebuild" label="Genome" />
+ <param name="other_dbkey" type="text" label="Or user-defined Genome" />
</inputs>
<help>
diff -r f7336991d0ee -r 854ec7739cfd tools/new_operations/complement.xml
--- a/tools/new_operations/complement.xml Thu Apr 23 09:20:11 2009 -0400
+++ b/tools/new_operations/complement.xml Thu Apr 23 13:46:52 2009 -0400
@@ -1,6 +1,6 @@
<tool id="gops_complement_1" name="Complement">
<description>intervals of a query</description>
- <command interpreter="python">gops_complement.py $input1 $output -1 ${input1.metadata.chromCol},${input1.metadata.startCol},${input1.metadata.endCol},${input1.metadata.strandCol} -l ${GALAXY_DATA_INDEX_DIR}/shared/ucsc/chrom/${dbkey}.len $allchroms</command>
+ <command interpreter="python">gops_complement.py $input1 $output -1 ${input1.metadata.chromCol},${input1.metadata.startCol},${input1.metadata.endCol},${input1.metadata.strandCol} -l ${chromInfo} $allchroms</command>
<inputs>
<param format="interval" name="input1" type="data">
<label>Complement regions of</label>
@@ -58,4 +58,4 @@
.. image:: ../static/operation_icons/gops_complement.gif
</help>
-</tool>
\ No newline at end of file
+</tool>
1
0
16 Jun '09
details: http://www.bx.psu.edu/hg/galaxy/rev/0cf5c25d1d2b
changeset: 2447:0cf5c25d1d2b
user: ianschenck(a)Thugunit.local
date: Thu Apr 23 14:42:35 2009 -0400
description:
Missed adding chrominfo.py, due to my hardcoded paths in test environ. Should be good now?
2 file(s) affected in this change:
lib/galaxy/datatypes/chrominfo.py
tools/annotation_profiler/annotation_profiler.xml
diffs (29 lines):
diff -r 854ec7739cfd -r 0cf5c25d1d2b lib/galaxy/datatypes/chrominfo.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/lib/galaxy/datatypes/chrominfo.py Thu Apr 23 14:42:35 2009 -0400
@@ -0,0 +1,14 @@
+import data
+from galaxy import util
+from galaxy.datatypes.sniff import *
+from galaxy.web import url_for
+from tabular import Tabular
+from galaxy.datatypes import metadata
+from galaxy.datatypes.metadata import MetadataElement
+
+
+class ChromInfo( Tabular ):
+ file_ext = "len"
+ MetadataElement( name="chrom", default=1, desc="Chrom column", param=metadata.ColumnParameter )
+ MetadataElement( name="length", default=2, desc="Length column", param=metadata.ColumnParameter )
+
diff -r 854ec7739cfd -r 0cf5c25d1d2b tools/annotation_profiler/annotation_profiler.xml
--- a/tools/annotation_profiler/annotation_profiler.xml Thu Apr 23 13:46:52 2009 -0400
+++ b/tools/annotation_profiler/annotation_profiler.xml Thu Apr 23 14:42:35 2009 -0400
@@ -1,6 +1,6 @@
<tool id="Annotation_Profiler_0" name="Profile Annotations" Version="1.0.0">
<description>for a set of genomic intervals</description>
- <command interpreter="python">annotation_profiler_for_interval.py -i $input1 -c ${input1.metadata.chromCol} -s ${input1.metadata.startCol} -e ${input1.metadata.endCol} -o $out_file1 $keep_empty -p /depot/data2/galaxy/annotation_profiler/$dbkey $summary -l ${GALAXY_DATA_INDEX_DIR}/shared/ucsc/chrom/${dbkey}.len -b 3 -t $table_names</command>
+ <command interpreter="python">annotation_profiler_for_interval.py -i $input1 -c ${input1.metadata.chromCol} -s ${input1.metadata.startCol} -e ${input1.metadata.endCol} -o $out_file1 $keep_empty -p /depot/data2/galaxy/annotation_profiler/$dbkey $summary -l ${chromInfo} -b 3 -t $table_names</command>
<inputs>
<param format="interval" name="input1" type="data" label="Choose Intervals">
<validator type="dataset_metadata_in_file" filename="annotation_profiler_valid_builds.txt" metadata_name="dbkey" metadata_column="0" message="Profiling is not currently available for this species."/>
1
0
Hello,
I'd like to share a new tool for Galaxy:
Cross-Tabulations -
Reads a tab-delimited file and produces a cross-tabulation table of two
or more variables.
Sources + XMLs are available here:
http://hannonlab.cshl.edu/crosstab/
Screenshots of the galaxy tool:
http://hannonlab.cshl.edu/crosstab/galaxy.html
There's no limit to the number of input lines -
so this tool can create pivot tables of files with more than one million
lines (which Excel currently can't do).
The tool is free for use.
Comments are welcomed,
Gordon.
1
0
11 Jun '09
details: http://www.bx.psu.edu/hg/galaxy/rev/c69e55c91036
changeset: 2445:c69e55c91036
user: Greg Von Kuster <greg(a)bx.psu.edu>
date: Thu Jun 11 12:06:29 2009 -0400
description:
Restrict the target history list to the users active hisstories when copying datasets between histories, clean up the relavent templates ( fixes ticket # 41 ), and add functional tests for copying history items between histories.
5 file(s) affected in this change:
lib/galaxy/web/controllers/dataset.py
templates/dataset/copy_view.mako
templates/dataset/edit_attributes.mako
test/base/twilltestcase.py
test/functional/test_history_functions.py
diffs (369 lines):
diff -r 004cd81cff72 -r c69e55c91036 lib/galaxy/web/controllers/dataset.py
--- a/lib/galaxy/web/controllers/dataset.py Wed Jun 10 15:46:26 2009 -0400
+++ b/lib/galaxy/web/controllers/dataset.py Thu Jun 11 12:06:29 2009 -0400
@@ -159,7 +159,8 @@
raise "Error undeleting"
@web.expose
- def copy_datasets( self, trans, source_dataset_ids = "", target_history_ids = "", new_history_name="", do_copy = False ):
+ def copy_datasets( self, trans, source_dataset_ids="", target_history_ids="", new_history_name="", do_copy=False, **kwd ):
+ params = util.Params( kwd )
user = trans.get_user()
history = trans.get_history()
create_new_history = False
@@ -202,7 +203,7 @@
error_msg = error_msg + "You do not have permission to add datasets to %i requested histories. " % ( len( target_history_ids ) - len( target_histories ) )
for data in map( trans.app.model.HistoryDatasetAssociation.get, source_dataset_ids ):
if data is None:
- error_msg = error_msg + "You tried to copy a non-existant dataset. "
+ error_msg = error_msg + "You tried to copy a dataset that does not exist. "
invalid_datasets += 1
elif data.history != history:
error_msg = error_msg + "You tried to copy a dataset which is not in your current history. "
@@ -220,8 +221,7 @@
source_datasets = history.active_datasets
target_histories = [history]
if user:
- target_histories = user.histories
-
+ target_histories = user.active_histories
return trans.fill_template( "/dataset/copy_view.mako",
source_dataset_ids = source_dataset_ids,
target_history_ids = target_history_ids,
diff -r 004cd81cff72 -r c69e55c91036 templates/dataset/copy_view.mako
--- a/templates/dataset/copy_view.mako Wed Jun 10 15:46:26 2009 -0400
+++ b/templates/dataset/copy_view.mako Thu Jun 11 12:06:29 2009 -0400
@@ -5,65 +5,67 @@
${javascripts()}
%if error_msg:
-<p>
-<div class="errormessage">${error_msg}</div>
-<div style="clear: both"></div>
-</p>
+ <p>
+ <div class="errormessage">${error_msg}</div>
+ <div style="clear: both"></div>
+ </p>
%endif
%if done_msg:
-<p>
-<div class="donemessage">${done_msg}</div>
-<div style="clear: both"></div>
-</p>
+ <p>
+ <div class="donemessage">${done_msg}</div>
+ <div style="clear: both"></div>
+ </p>
%endif
<p>
-<div class="toolForm">
- <form>
- <div style="float: left; width: 50%; padding: 0px 0px 0px 0px;">
- <div class="toolFormTitle">Source History Items</div>
- <div class="toolFormBody">
- %for data in source_datasets:
- <%
- checked = ""
- if data.id in source_dataset_ids:
- checked = " checked"
- %>
- <div class="form-row"><input type="checkbox" name="source_dataset_ids" value="${data.id}"${checked}/> ${data.hid}: ${data.name}</div>
- %endfor
- </div>
- </div>
- <div style="float: right; width: 50%; padding: 0px 0px 0px 0px;">
- <div class="toolFormTitle">Target Histories</div>
- <div class="toolFormBody">
- %for i, hist in enumerate( target_histories ):
- <%
- checked = ""
- if hist.id in target_history_ids:
- checked = " checked"
- cur_history_text = ""
- if hist == trans.get_history():
- cur_history_text = " <strong>(current history)</strong>"
- %>
- <div class="form-row"><input type="checkbox" name="target_history_ids" value="${hist.id}"${checked}/> ${i + 1}${cur_history_text}: ${hist.name}</div>
- %endfor
- %if trans.get_user():
- <%
- checked = ""
- if "create_new_history" in target_history_ids:
- checked = " checked"
- %>
- <br>
- <div class="form-row"><input type="checkbox" name="target_history_ids" value="create_new_history"${checked}/>New history named: <input type="textbox" name="new_history_name" value="${new_history_name}"/></div>
- %endif
- </div>
- </div>
- <div style="clear: both"></div>
- <div class="form-row" align="center"><input type="submit" class="primary-button" name="do_copy" value="Copy History Items"/></div>
- </form>
-</div>
+ <div class="toolForm">
+ <form>
+ <div style="float: left; width: 50%; padding: 0px 0px 0px 0px;">
+ <div class="toolFormTitle">Source History Items</div>
+ <div class="toolFormBody">
+ %for data in source_datasets:
+ <%
+ checked = ""
+ if data.id in source_dataset_ids:
+ checked = " checked"
+ %>
+ <div class="form-row"><input type="checkbox" name="source_dataset_ids" value="${data.id}"${checked}/> ${data.hid}: ${data.name}</div>
+ %endfor
+ </div>
+ </div>
+ <div style="float: right; width: 50%; padding: 0px 0px 0px 0px;">
+ <div class="toolFormTitle">Target Histories</div>
+ <div class="toolFormBody">
+ %for i, hist in enumerate( target_histories ):
+ <%
+ checked = ""
+ if hist.id in target_history_ids:
+ checked = " checked"
+ cur_history_text = ""
+ if hist == trans.get_history():
+ cur_history_text = " <strong>(current history)</strong>"
+ %>
+ <div class="form-row"><input type="checkbox" name="target_history_ids" value="${hist.id}"${checked}/> ${i + 1}${cur_history_text}: ${hist.name}</div>
+ %endfor
+ %if trans.get_user():
+ <%
+ checked = ""
+ if "create_new_history" in target_history_ids:
+ checked = " checked"
+ %>
+ <br/>
+ <div class="form-row"><input type="checkbox" name="target_history_ids" value="create_new_history"${checked}/>New history named: <input type="textbox" name="new_history_name" value="${new_history_name}"/></div>
+ %endif
+ </div>
+ </div>
+ <div style="clear: both"></div>
+ <div class="form-row" align="center">
+ <input type="submit" class="primary-button" name="do_copy" value="Copy History Items"/>
+ </div>
+ </form>
+ </div>
</p>
<div style="clear: both"></div>
<p>
-<div class="infomessage">Select any number of source history items and any number of target histories and click on "Copy History Items" to add a copy of each selected dataset to each selected history.</div>
-<div style="clear: both"></div>
+ <div class="infomessage">Select any number of source history items and any number of target histories and click "Copy History Items" to add a copy of each selected history item to each selected target history.</div>
+ <div style="clear: both"></div>
</p>
diff -r 004cd81cff72 -r c69e55c91036 templates/dataset/edit_attributes.mako
--- a/templates/dataset/edit_attributes.mako Wed Jun 10 15:46:26 2009 -0400
+++ b/templates/dataset/edit_attributes.mako Thu Jun 11 12:06:29 2009 -0400
@@ -157,8 +157,16 @@
%endif
<p/>
<div class="toolForm">
-<div class="toolFormTitle">Copy History Item</div>
-<div class="toolFormBody">
- Click <a href="${h.url_for( controller='dataset', action='copy_datasets', source_dataset_ids=data.id, target_history_ids=data.history_id )}" target="galaxy_main">here</a> to make a copy of this history item.
+ <div class="toolFormTitle">Copy History Item</div>
+ <div class="toolFormBody">
+ <form name="copy_hda" action="${h.url_for( controller='dataset', action='copy_datasets', source_dataset_ids=data.id, target_history_ids=data.history_id )}" method="post">
+ <div class="form-row">
+ <input type="submit" name="change" value="Copy history item"/>
+ </div>
+ <div class="toolParamHelp" style="clear: both;">
+ Make a copy of this history item in your current history or any of your active histories.
+ </div>
+ </form>
+ </div>
+ </div>
</div>
-</div>
diff -r 004cd81cff72 -r c69e55c91036 test/base/twilltestcase.py
--- a/test/base/twilltestcase.py Wed Jun 10 15:46:26 2009 -0400
+++ b/test/base/twilltestcase.py Thu Jun 11 12:06:29 2009 -0400
@@ -181,8 +181,9 @@
self.home()
def new_history( self, name=None ):
"""Creates a new, empty history"""
+ self.home()
if name:
- self.visit_url( "%s/history_new?name=%s" % ( self.url, str( name ) ) )
+ self.visit_url( "%s/history_new?name=%s" % ( self.url, name ) )
else:
self.visit_url( "%s/history_new" % self.url )
self.check_history_for_string('Your history is empty')
@@ -191,7 +192,7 @@
"""Rename an existing history"""
self.home()
self.visit_page( "history/rename?id=%s&name=%s" %( id, new_name ) )
- check_str = 'History: %s renamed to: %s' % ( old_name, new_name )
+ check_str = 'History: %s renamed to: %s' % ( old_name, urllib.unquote( new_name ) )
self.check_page_for_string( check_str )
self.home()
def set_history( self ):
@@ -330,7 +331,7 @@
self.check_page_for_string( 'Attributes updated' )
self.home()
def convert_format( self, hda_id, target_type ):
- """Auto-detect history_dataset_association metadata"""
+ """Convert format of history_dataset_association"""
self.home()
self.visit_url( "%s/root/edit?id=%s" % ( self.url, hda_id ) )
self.check_page_for_string( 'This will inspect the dataset and attempt' )
@@ -339,13 +340,36 @@
self.check_page_for_string( 'The file conversion of Convert BED to GFF on data' )
self.home()
def change_datatype( self, hda_id, datatype ):
- """Auto-detect history_dataset_association metadata"""
+ """Change format of history_dataset_association"""
self.home()
self.visit_url( "%s/root/edit?id=%s" % ( self.url, hda_id ) )
self.check_page_for_string( 'This will change the datatype of the existing dataset but' )
tc.fv( 'change_datatype', 'datatype', datatype )
tc.submit( 'change' )
self.check_page_for_string( 'Edit Attributes' )
+ self.home()
+ def copy_history_item( self, source_dataset_ids='', target_history_ids=[], all_target_history_ids=[], deleted_history_ids=[] ):
+ """Copy 1 or more history_dataset_associations to 1 or more histories"""
+ self.home()
+ self.visit_url( "%s/dataset/copy_datasets?source_dataset_ids=%s" % ( self.url, source_dataset_ids ) )
+ self.check_page_for_string( 'Source History Items' )
+ # Make sure all of users active histories are displayed
+ for id in all_target_history_ids:
+ self.check_page_for_string( id )
+ # Make sure only active histories are displayed
+ for id in deleted_history_ids:
+ try:
+ self.check_page_for_string( id )
+ raise AssertionError, "deleted history id %d displayed in list of target histories" % id
+ except:
+ pass
+ # Check each history to which we want to copy the item
+ for id in target_history_ids:
+ tc.fv( '1', 'target_history_ids', id )
+ tc.submit( 'do_copy' )
+ no_source_ids = len( source_dataset_ids.split( ',' ) )
+ check_str = '%d datasets copied to %d histories.' % ( no_source_ids, len( target_history_ids ) )
+ self.check_page_for_string( check_str )
self.home()
def get_dataset_ids_in_history( self ):
"""Returns the ids of datasets in a history"""
diff -r 004cd81cff72 -r c69e55c91036 test/functional/test_history_functions.py
--- a/test/functional/test_history_functions.py Wed Jun 10 15:46:26 2009 -0400
+++ b/test/functional/test_history_functions.py Thu Jun 11 12:06:29 2009 -0400
@@ -1,3 +1,4 @@
+import urllib
import galaxy.model
from galaxy.model.orm import *
from base.twilltestcase import *
@@ -55,7 +56,7 @@
history1 = galaxy.model.History.query().order_by( desc( galaxy.model.History.table.c.create_time ) ).first()
assert history1 is not None, "Problem retrieving history1 from database"
self.upload_file( '1.bed', dbkey='hg18' )
- self.new_history( name='history2' )
+ self.new_history( name=urllib.quote( 'history2' ) )
global history2
history2 = galaxy.model.History.query().order_by( desc( galaxy.model.History.table.c.create_time ) ).first()
assert history2 is not None, "Problem retrieving history2 from database"
@@ -96,7 +97,7 @@
assert history3 is not None, "Problem retrieving history3 from database"
if history3.deleted:
raise AssertionError, "History id %d deleted when it should not be" % latest_history.id
- self.rename_history( str( history3.id ), history3.name, new_name='history3' )
+ self.rename_history( str( history3.id ), history3.name, new_name=urllib.quote( 'history 3' ) )
def test_020_history_list( self ):
"""Testing viewing previously stored histories"""
self.view_stored_active_histories()
@@ -131,7 +132,7 @@
global history4
history4 = galaxy.model.History.query().order_by( desc( galaxy.model.History.table.c.create_time ) ).first()
assert history4 is not None, "Problem retrieving history4 from database"
- self.rename_history( str( history4.id ), history4.name, new_name='history4' )
+ self.rename_history( str( history4.id ), history4.name, new_name=urllib.quote( 'history 4' ) )
history4.refresh()
self.upload_file( '2.bed', dbkey='hg18' )
id = '%s,%s' % ( str( history3.id ), str( history4.id ) )
@@ -195,7 +196,7 @@
global history5
history5 = galaxy.model.History.query().order_by( desc( galaxy.model.History.table.c.create_time ) ).first()
assert history5 is not None, "Problem retrieving history5 from database"
- self.rename_history( str( history5.id ), history5.name, new_name='history5' )
+ self.rename_history( str( history5.id ), history5.name, new_name=urllib.quote( 'history5' ) )
history5.refresh()
# Due to the limitations of twill ( not functional with the permissions forms ), we're forced
# to do this manually. At this point, we just want to restrict the access permission on history5
@@ -377,7 +378,7 @@
action='no_share' )
def test_055_history_show_and_hide_deleted_datasets( self ):
"""Testing displaying deleted history items"""
- self.new_history( name='temp_history1' )
+ self.new_history( name=urllib.quote( 'show hide deleted datasets' ) )
self.upload_file('1.bed', dbkey='hg18')
latest_hda = galaxy.model.HistoryDatasetAssociation.query() \
.order_by( desc( galaxy.model.HistoryDatasetAssociation.table.c.create_time ) ).first()
@@ -393,7 +394,7 @@
self.check_page_for_string( 'Your history is empty' )
def test_060_deleting_and_undeleting_history_items( self ):
"""Testing deleting and un-deleting history items"""
- self.new_history( name='temp_history2' )
+ self.new_history( name=urllib.quote( 'delete undelete history items' ) )
# Add a new history item
self.upload_file( '1.bed', dbkey='hg15' )
self.home()
@@ -416,8 +417,54 @@
self.visit_url( "%s/history/?show_deleted=False" % self.url )
self.check_page_for_string( '1.bed' )
self.check_page_for_string( 'hg15' )
- def test_065_reset_data_for_later_test_runs( self ):
+ def test_065_copying_history_items_between_histories( self ):
+ """Testing copying history items between histories"""
+ self.new_history( name=urllib.quote( 'copy history items' ) )
+ global history6
+ history6 = galaxy.model.History.query().order_by( desc( galaxy.model.History.table.c.create_time ) ).first()
+ assert history6 is not None, "Problem retrieving history6 from database"
+ self.upload_file( '1.bed', dbkey='hg18' )
+ hda1 = galaxy.model.HistoryDatasetAssociation.query() \
+ .order_by( desc( galaxy.model.HistoryDatasetAssociation.table.c.create_time ) ).first()
+ assert hda1 is not None, "Problem retrieving hda1 from database"
+ # We'll just test copying 1 hda
+ source_dataset_ids=str( hda1.id )
+ # The valid list of target histories is only the user's active histories
+ all_target_history_ids = [ str( hda.id ) for hda in admin_user.active_histories ]
+ # Since history1 and history2 have been deleted, they should not be displayed in the list of target histories
+ # on the copy_view.mako form
+ deleted_history_ids = [ str( history1.id ), str( history2.id ) ]
+ # Test copying to the current history
+ target_history_ids=[ str( history6.id ) ]
+ self.copy_history_item( source_dataset_ids=source_dataset_ids,
+ target_history_ids=target_history_ids,
+ all_target_history_ids=all_target_history_ids,
+ deleted_history_ids=deleted_history_ids )
+ history6.refresh()
+ if len( history6.datasets ) != 2:
+ raise AssertionError, "Copying hda1 to the current history failed"
+ # Test copying 1 hda to another history
+ self.new_history( name=urllib.quote( 'copy history items - 2' ) )
+ global history7
+ history7 = galaxy.model.History.query().order_by( desc( galaxy.model.History.table.c.create_time ) ).first()
+ assert history7 is not None, "Problem retrieving history7 from database"
+ # Switch back to our history from which we want to copy
+ self.switch_history( id=str( history6.id ), name=history6.name )
+ target_history_ids=[ str( history7.id ) ]
+ all_target_history_ids = [ str( hda.id ) for hda in admin_user.active_histories ]
+ # Test copying to the a history that is not the current history
+ target_history_ids=[ str( history7.id ) ]
+ self.copy_history_item( source_dataset_ids=source_dataset_ids,
+ target_history_ids=target_history_ids,
+ all_target_history_ids=all_target_history_ids,
+ deleted_history_ids=deleted_history_ids )
+ # Switch to the history to which we copied
+ self.switch_history( id=str( history7.id ), name=history7.name )
+ self.check_history_for_string( hda1.name )
+ def test_070_reset_data_for_later_test_runs( self ):
"""Reseting data to enable later test runs to pass"""
self.delete_history( id=str( history3.id ) )
self.delete_history( id=str( history4.id ) )
self.delete_history( id=str( history5.id ) )
+ self.delete_history( id=str( history6.id ) )
+ self.delete_history( id=str( history7.id ) )
1
0
Hallo every one,
I am new to Galaxy, I have a idea which maybe not new to people here. I work
on arabidopsis. maybe you know that UCSC do not offer the genome assembly
for plant.
so i will install UCSC locally and built the genome assembly for
arabidopsis. this work is half done now.
I also installed Galaxy, if i understand correctly, Galaxy has the link to
UCSC so the user can get data from UCSC. since what i interested is
arabidopsis information, I hope Galaxy can retrive data form the locally
installed UCSC in future. I do not know if it is possible to make this idea
come ture.maybe some of you have done it. if yes, I am gratefully if you
can tell me the how to get it.
many thanks,
Xue
2
1
11 Jun '09
details: http://www.bx.psu.edu/hg/galaxy/rev/004cd81cff72
changeset: 2444:004cd81cff72
user: Greg Von Kuster <greg(a)bx.psu.edu>
date: Wed Jun 10 15:46:26 2009 -0400
description:
Fixes for test_get_data functional test script.
1 file(s) affected in this change:
test/functional/test_get_data.py
diffs (152 lines):
diff -r 08194c60c290 -r 004cd81cff72 test/functional/test_get_data.py
--- a/test/functional/test_get_data.py Wed Jun 10 12:22:49 2009 -0400
+++ b/test/functional/test_get_data.py Wed Jun 10 15:46:26 2009 -0400
@@ -1,87 +1,74 @@
+import galaxy.model
+from galaxy.model.orm import *
from base.twilltestcase import TwillTestCase
-""" Tests are executed in order, sorted by name"""
-
class UploadData( TwillTestCase ):
- def test_00_multi_upload( self ):
- """test_get_data.test_multi_upload: Testing multiple uploads"""
- self.login()
- self.upload_file('1.bed')
- self.verify_dataset_correctness('1.bed')
- self.upload_file('2.bed', dbkey='hg17')
- self.verify_dataset_correctness('2.bed')
- self.upload_file('3.bed', dbkey='hg17', ftype='bed')
- self.verify_dataset_correctness('3.bed')
- self.upload_file('4.bed.gz', dbkey='hg17', ftype='bed')
- self.verify_dataset_correctness('4.bed')
- self.upload_file('1.scf', ftype='scf')
- self.verify_dataset_correctness('1.scf')
- self.upload_file('1.scf.zip', ftype='binseq.zip')
- self.verify_dataset_correctness('1.scf.zip')
+ def test_000_upload_files_from_disk( self ):
+ """Test uploading data files from disk"""
+ self.logout()
+ self.login( email='tst(a)bx.psu.edu' )
+ history1 = galaxy.model.History.query().order_by( desc( galaxy.model.History.table.c.create_time ) ).first()
+ self.upload_file( '1.bed' )
+ hda1 = galaxy.model.HistoryDatasetAssociation.query() \
+ .order_by( desc( galaxy.model.HistoryDatasetAssociation.table.c.create_time ) ).first()
+ assert hda1 is not None, "Problem retrieving hda1 from database"
+ self.verify_dataset_correctness( '1.bed', hid=str( hda1.hid ) )
+ self.upload_file( '2.bed', dbkey='hg17' )
+ hda2 = galaxy.model.HistoryDatasetAssociation.query() \
+ .order_by( desc( galaxy.model.HistoryDatasetAssociation.table.c.create_time ) ).first()
+ assert hda2 is not None, "Problem retrieving hda2 from database"
+ self.verify_dataset_correctness( '2.bed', hid=str( hda2.hid ) )
+ self.upload_file( '3.bed', dbkey='hg17', ftype='bed' )
+ hda3 = galaxy.model.HistoryDatasetAssociation.query() \
+ .order_by( desc( galaxy.model.HistoryDatasetAssociation.table.c.create_time ) ).first()
+ assert hda3 is not None, "Problem retrieving hda3 from database"
+ self.verify_dataset_correctness( '3.bed', hid=str( hda3.hid ) )
+ self.upload_file( '4.bed.gz', dbkey='hg17', ftype='bed' )
+ hda4 = galaxy.model.HistoryDatasetAssociation.query() \
+ .order_by( desc( galaxy.model.HistoryDatasetAssociation.table.c.create_time ) ).first()
+ assert hda4 is not None, "Problem retrieving hda4 from database"
+ self.verify_dataset_correctness( '4.bed', hid=str( hda4.hid ) )
+ self.upload_file( '1.scf', ftype='scf' )
+ hda5 = galaxy.model.HistoryDatasetAssociation.query() \
+ .order_by( desc( galaxy.model.HistoryDatasetAssociation.table.c.create_time ) ).first()
+ assert hda5 is not None, "Problem retrieving hda5 from database"
+ self.verify_dataset_correctness( '1.scf', hid=str( hda5.hid ) )
+ self.upload_file( '1.scf.zip', ftype='binseq.zip' )
+ hda6 = galaxy.model.HistoryDatasetAssociation.query() \
+ .order_by( desc( galaxy.model.HistoryDatasetAssociation.table.c.create_time ) ).first()
+ assert hda6 is not None, "Problem retrieving hda6 from database"
+ self.verify_dataset_correctness( '1.scf.zip', hid=str( hda6.hid ) )
+ self.delete_history( id=str( history1.id ) )
+ def test_005_url_paste( self ):
+ """Test url paste behavior"""
+ # Deleting the current history should have created a new history
+ self.check_history_for_string( 'Your history is empty' )
+ history2 = galaxy.model.History.query().order_by( desc( galaxy.model.History.table.c.create_time ) ).first()
self.upload_url_paste( 'hello world' )
self.check_history_for_string( 'Pasted Entry' )
self.check_history_for_string( 'hello world' )
- self.delete_history_item( 1 )
- self.delete_history_item( 2 )
- self.delete_history_item( 3 )
- self.delete_history_item( 4 )
- self.delete_history_item( 5 )
- self.delete_history_item( 6 )
- self.delete_history_item( 7 )
self.upload_url_paste( u'hello world' )
self.check_history_for_string( 'Pasted Entry' )
self.check_history_for_string( 'hello world' )
- self.delete_history_item( 8 )
- def test_9999_clean_up( self ):
- self.delete_history()
- self.logout()
-
-class GetEncodeData( TwillTestCase ):
-
- def test_00_get_encode_data( self ):
- """test_get_data.test_get_encode_data"""
- self.login()
- self.run_tool('encode_import_chromatin_and_chromosomes1', hg17=['cc.EarlyRepSeg.20051216.bed'] )
- #hg17=[ "cc.EarlyRepSeg.20051216.bed", "cc.EarlyRepSeg.20051216.gencode_partitioned.bed", "cc.LateRepSeg.20051216.bed", "cc.LateRepSeg.20051216.gencode_partitioned.bed", "cc.MidRepSeg.20051216.bed", "cc.MidRepSeg.20051216.gencode_partitioned.bed" ] )
+ self.delete_history( id=str( history2.id ) )
+ def test_010_upload_encode_data( self ):
+ """Test uploading encode data"""
+ # Deleting the current history should have created a new history
+ self.check_history_for_string( 'Your history is empty' )
+ history3 = galaxy.model.History.query().order_by( desc( galaxy.model.History.table.c.create_time ) ).first()
+ self.run_tool( 'encode_import_chromatin_and_chromosomes1', hg17=['cc.EarlyRepSeg.20051216.bed'] )
self.wait()
- self.verify_dataset_correctness('cc.EarlyRepSeg.20051216.bed', hid=1)
- #self.verify_dataset_correctness('cc.EarlyRepSeg.20051216.gencode_partitioned.bed', hid=2)
- #self.verify_dataset_correctness('cc.LateRepSeg.20051216.bed', hid=3)
- #self.verify_dataset_correctness('cc.LateRepSeg.20051216.gencode_partitioned.bed', hid=4)
- #self.verify_dataset_correctness('cc.MidRepSeg.20051216.bed', hid=5)
- #self.verify_dataset_correctness('cc.MidRepSeg.20051216.gencode_partitioned.bed', hid=6)
+ hda7 = galaxy.model.HistoryDatasetAssociation.query() \
+ .order_by( desc( galaxy.model.HistoryDatasetAssociation.table.c.create_time ) ).first()
+ assert hda7 is not None, "Problem retrieving hda7 from database"
+ self.verify_dataset_correctness( 'cc.EarlyRepSeg.20051216.bed', hid=str( hda7.hid ) )
self.run_tool('encode_import_gencode1', hg17=['gencode.CDS.20051206.bed'])
self.wait()
- self.verify_dataset_correctness('sc_3D_cds.bed', hid=2)
- self.delete_history_item( 1 )
- self.delete_history_item( 2 )
- def test_9999_clean_up( self ):
- self.delete_history()
+ hda8 = galaxy.model.HistoryDatasetAssociation.query() \
+ .order_by( desc( galaxy.model.HistoryDatasetAssociation.table.c.create_time ) ).first()
+ assert hda8 is not None, "Problem retrieving hda8 from database"
+ self.verify_dataset_correctness( 'sc_3D_cds.bed', hid=str( hda8.hid ) )
+ self.delete_history( id=str( history3.id ) )
+ def test_015_reset_data_for_later_test_runs( self ):
+ """Reseting data to enable later test runs to pass"""
self.logout()
-
-class DataSources( TwillTestCase ):
-
- #def test_hbvar(self):
- # """Getting hybrid gene mutations from HbVar"""
- # #self.load_cookies("hbvar_cookie.txt")
- # self.clear_history()
- # self.run_tool('hbvar')
- # params = dict(
- # htyp="any hybrid gene",
- # )
- # self.submit_form(form=1, button="Submit Query", **params)
- # params = dict(
- # display_format="galaxy",
- # )
- # self.submit_form(form=1, button="Go", **params)
- # params = dict(
- # build="hg17",
- # )
- # self.submit_form(form=1, button="ok", **params);
- # """
- # TODO: Currently fails when using sqlite, although successful when
- # using Postgres. Upgrading our version of sqlite may fix this, but
- # confirmation is required.
- # """
- # self.verify_dataset_correctness('hbvar_hybrid_genes.dat')
- pass
1
0