galaxy-dev
Threads by month
- ----- 2025 -----
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2024 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2023 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2022 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2021 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2020 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2019 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2018 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2017 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2016 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2015 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2014 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2013 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2012 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2011 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2010 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2009 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2008 -----
- December
- November
- October
- September
- August
- 10008 discussions
07 Nov '09
details: http://www.bx.psu.edu/hg/galaxy/rev/fdf07565eedf
changeset: 2963:fdf07565eedf
user: Nate Coraor <nate(a)bx.psu.edu>
date: Wed Nov 04 12:58:48 2009 -0500
description:
Add the GeneTrack egg. Also includes some enhancements and cleanup of the eggs/fetch/scramble code.
diffstat:
dist-eggs.ini | 4 +
eggs.ini | 5 +-
lib/galaxy/config.py | 13 +++
lib/galaxy/eggs/__init__.py | 149 ++++++++++++++----------------------
scripts/fetch_eggs.py | 36 ++++----
scripts/scramble.py | 28 +++---
scripts/scramble/scripts/GeneTrack.py | 52 +++++++++++++
7 files changed, 164 insertions(+), 123 deletions(-)
diffs (490 lines):
diff -r 92d395c8614b -r fdf07565eedf dist-eggs.ini
--- a/dist-eggs.ini Wed Nov 04 12:30:36 2009 -0500
+++ b/dist-eggs.ini Wed Nov 04 12:58:48 2009 -0500
@@ -57,3 +57,7 @@
all = py2.4-all py2.5-all py2.6-all
; default hosts for platform-inspecific eggs
noplatform = py2.4-linux-i686-ucs4 py2.5-linux-i686-ucs4 py2.6-linux-i686-ucs4
+
+[ignore]
+; Don't build these eggs on these platforms:
+GeneTrack = py2.4-noplatform
diff -r 92d395c8614b -r fdf07565eedf eggs.ini
--- a/eggs.ini Wed Nov 04 12:30:36 2009 -0500
+++ b/eggs.ini Wed Nov 04 12:58:48 2009 -0500
@@ -9,7 +9,7 @@
[general]
repository = http://eggs.g2.bx.psu.edu
; these eggs must be scrambled for your local environment
-no_download = pbs_python DRMAA_python
+no_auto = pbs_python DRMAA_python
[eggs:platform]
bx_python = 0.5.0
@@ -31,6 +31,7 @@
decorator = 3.1.2
docutils = 0.4
elementtree = 1.2.6_20050316
+GeneTrack = 2.0.0_beta_1
lrucache = 0.2
;lsprof - james
Mako = 0.2.5
@@ -61,6 +62,7 @@
python_lzo = _static
flup = .dev_r2311
bx_python = _dev_r4bf1f32e6b76
+GeneTrack = _dev_raa786e9fc131d998e532a1aef39d108850c9e93d
; nose = .dev_r7156749efc58
; source location, necessary for scrambling
@@ -81,6 +83,7 @@
decorator = http://pypi.python.org/packages/source/d/decorator/decorator-3.1.2.tar.gz
docutils = http://downloads.sourceforge.net/docutils/docutils-0.4.tar.gz
elementtree = http://effbot.org/downloads/elementtree-1.2.6-20050316.tar.gz
+GeneTrack = http://github.com/ialbert/genetrack-central/tarball/aa786e9fc131d998e532a1a…
lrucache = http://evan.prodromou.name/lrucache/lrucache-0.2.tar.gz
Mako = http://www.makotemplates.org/downloads/Mako-0.2.5.tar.gz
MyghtyUtils = http://cheeseshop.python.org/packages/source/M/MyghtyUtils/MyghtyUtils-0.52…
diff -r 92d395c8614b -r fdf07565eedf lib/galaxy/config.py
--- a/lib/galaxy/config.py Wed Nov 04 12:30:36 2009 -0500
+++ b/lib/galaxy/config.py Wed Nov 04 12:58:48 2009 -0500
@@ -8,6 +8,9 @@
import ConfigParser
from galaxy.util import string_as_bool
+from galaxy import eggs
+import pkg_resources
+
log = logging.getLogger( __name__ )
def resolve_path( path, root ):
@@ -126,6 +129,16 @@
for path in self.tool_config, self.datatypes_config:
if not os.path.isfile(path):
raise ConfigurationError("File not found: %s" % path )
+ # Check job runners so the admin can scramble dependent egg.
+ if self.start_job_runners is not None:
+ runner_to_egg = dict( pbs = 'pbs_python', sge = 'DRMAA_python' )
+ for runner in self.start_job_runners.split( ',' ):
+ try:
+ pkg_resources.require( runner_to_egg[runner] )
+ except eggs.EggNotFetchable, e:
+ raise eggs.EggNotFetchable( 'You must scramble the %s egg to use the %s job runner. Instructions are available at:\n http://bitbucket.org/galaxy/galaxy-central/wiki/Config/Cluster' % ( runner_to_egg[runner], runner ) )
+ except KeyError:
+ raise Exception( 'No such job runner: %s. Please double-check the value of start_job_runners in universe_wsgi.ini' % runner )
def is_admin_user( self,user ):
"""
diff -r 92d395c8614b -r fdf07565eedf lib/galaxy/eggs/__init__.py
--- a/lib/galaxy/eggs/__init__.py Wed Nov 04 12:30:36 2009 -0500
+++ b/lib/galaxy/eggs/__init__.py Wed Nov 04 12:58:48 2009 -0500
@@ -14,11 +14,12 @@
# within tools. i don't know of any way around this. -ndc
galaxy_dir = os.path.abspath( os.path.join( os.path.dirname( __file__ ), "..", "..", ".." ) )
-class NewEgg( Exception ):
- pass
-
class EggNotFetchable( Exception ):
- pass
+ def __init__( self, eggs ):
+ if type( eggs ) in ( list, tuple ):
+ self.eggs = eggs
+ else:
+ self.eggs = [ eggs ]
class PlatformNotSupported( Exception ):
pass
@@ -157,6 +158,14 @@
# scramble helper methods
def get_archive_path( self, url ):
return os.path.join( Egg.archive_dir, (url.rsplit( '/', 1 ))[1] )
+ def get_tld( self, names ):
+ tld = names[0].split( os.path.sep, 1 )[0]
+ for name in names:
+ try:
+ assert tld == name.split( os.path.sep, 1 )[0]
+ except:
+ raise Exception( "get_tld(): Archive contains multiple top-level directories!" )
+ return tld
def fetch_source( self ):
if not os.access( Egg.archive_dir, os.F_OK ):
os.makedirs( Egg.archive_dir )
@@ -195,7 +204,7 @@
log.warning( " %s" % self.buildpath )
def unpack_zip( self, source_path, unpack_path ):
z = zipfile.ZipFile( source_path, "r" )
- tld = ( z.namelist()[0].split( os.path.sep, 1 ) )[0]
+ tld = self.get_tld( z.namelist() )
cur = os.getcwd()
os.chdir( unpack_path )
for fn in z.namelist():
@@ -211,12 +220,12 @@
os.chdir( cur )
def unpack_tar( self, source_path, unpack_path ):
t = tarfile.open( source_path, "r" )
- tld = ( t.getnames()[0].split( os.path.sep, 1 ) )[0]
+ members = filter( lambda x: "ez_setup" not in x.name and "pax_global_header" != x.name, t.getmembers() )
+ tld = self.get_tld( [ x.name for x in members ] )
cur = os.getcwd()
os.chdir( unpack_path )
- for member in t.getmembers():
- if "ez_setup" not in member.name:
- t.extract( member )
+ for member in members:
+ t.extract( member )
t.close()
os.rename( tld, self.name )
os.chdir( cur )
@@ -265,7 +274,7 @@
self.eggs = {}
self.config = CSConfigParser()
self.repo = None
- self.no_download = []
+ self.no_auto = []
self.platform = { 'peak' : get_platform( platform=True, peak=True ), 'galaxy' : get_platform( platform=True, peak=False ) }
self.noplatform = { 'peak' : get_platform( platform=False, peak=True ), 'galaxy' : get_platform( platform=False, peak=False ) }
def parse( self ):
@@ -273,7 +282,7 @@
raise Exception( "unable to read egg config from %s" % Crate.config_file )
try:
self.repo = self.config.get( "general", "repository" )
- self.no_download = self.config.get( "general", "no_download" ).split()
+ self.no_auto = self.config.get( "general", "no_auto" ).split()
except ConfigParser.NoSectionError:
raise Exception( "eggs.ini is missing required section [general]" )
#except ConfigParser.NoOptionError:
@@ -316,19 +325,19 @@
return True
def fetch( self, ignore=[] ):
"""
- Fetch all eggs in the crate (ignoring any that you want to
- ignore). If your platform isn't available, it'll attempt to
- download all the noplatform eggs before failing.
+ Fetch all eggs in the crate (ignoring any that you want to
+ ignore). If your platform isn't available, it'll attempt to
+ download all the noplatform eggs before failing.
"""
skip_platform = False
- ignore.extend( self.no_download )
+ ignore.extend( self.no_auto )
+ missing = []
try:
f = urllib2.urlopen( "%s/%s" % ( self.repo, self.platform['galaxy'] ) )
f.close()
except urllib2.HTTPError, e:
if e.code == 404:
skip_platform = True
- missing = []
for egg in self.eggs.itervalues():
if ignore is not None:
if egg.name in ignore:
@@ -336,11 +345,18 @@
if skip_platform and egg.platform['galaxy'] == self.platform['galaxy']:
missing.append( egg.name )
continue
- egg.fetch()
+ try:
+ egg.fetch()
+ except EggNotFetchable:
+ missing.append( egg.name )
if skip_platform:
raise PlatformNotSupported( self.platform['galaxy'] )
+ if missing:
+ raise EggNotFetchable( missing )
return True
def scramble( self, ignore=None ):
+ # Crate-scrambling the no_auto eggs makes no sense
+ ignore.extend( self.no_auto )
for egg in self.eggs.itervalues():
if ignore is not None:
if egg.name in ignore:
@@ -379,21 +395,14 @@
if self.config.read( DistCrate.dist_config_file ) == []:
raise Exception( "unable to read dist egg config from %s" % DistCrate.dist_config_file )
try:
- self.hosts = self.dictize_list_of_tuples( self.config.items( "hosts" ) )
- self.groups = self.dictize_list_of_tuples( self.config.items( "groups" ) )
+ self.hosts = dict( self.config.items( "hosts" ) )
+ self.groups = dict( self.config.items( "groups" ) )
+ self.ignore = dict( self.config.items( "ignore" ) )
except ConfigParser.NoSectionError, e:
raise Exception( "eggs.ini is missing required section: %s" % e )
self.platforms = self.get_platforms( self.build_on )
self.noplatforms = self.get_platforms( 'noplatform' )
Crate.parse( self )
- def dictize_list_of_tuples( self, lot ):
- """
- Makes a list of 2-value tuples into a dict.
- """
- d = {}
- for k, v in lot:
- d[k] = v
- return d
def get_platforms( self, wanted ):
# find all the members of a group and process them
if self.groups.has_key( wanted ):
@@ -409,8 +418,8 @@
raise Exception( "unknown platform: %s" % wanted )
def parse_egg_section( self, eggs, type ):
"""
- Overrides the base class's method. Here we use the third arg
- to find out what type of egg we'll be building.
+ Overrides the base class's method. Here we use the third arg
+ to find out what type of egg we'll be building.
"""
if type == "platform":
platforms = self.platforms
@@ -418,14 +427,16 @@
platforms = self.noplatforms
for name, version in eggs:
for platform in platforms:
- # can't use the regular methods here because we're not
- # actually ON the target platform
+ # can't use the regular methods here because we're not
+ # actually ON the target platform
if type == "platform":
gplat = platform
pplat = platform.rsplit('-', 1)[0]
elif type == "noplatform":
gplat = "%s-noplatform" % platform.split('-', 1)[0]
pplat = platform.split('-', 1)[0]
+ if name in self.ignore and gplat in self.ignore[name].split():
+ continue
egg = Egg()
try:
egg.tag = self.config.get( "tags", name )
@@ -448,75 +459,31 @@
class GalaxyConfig:
config_file = os.path.join( galaxy_dir, "universe_wsgi.ini" )
+ always_conditional = ( 'GeneTrack', )
def __init__( self ):
self.config = ConfigParser.ConfigParser()
if self.config.read( GalaxyConfig.config_file ) == []:
raise Exception( "error: unable to read Galaxy config from %s" % GalaxyConfig.config_file )
# TODO: conditionals should really be handled better than this
def check_conditional( self, egg_name ):
- if egg_name == "psycopg2":
+ if egg_name == "pysqlite":
+ # SQLite is different since it can be specified in two config vars and defaults to True
try:
- if self.config.get( "app:main", "database_connection" ).startswith( "postgres://" ):
- return True
- else:
- return False
+ return self.config.get( "app:main", "database_connection" ).startswith( "sqlite://" )
+ except:
+ return True
+ else:
+ try:
+ return { "psycopg2": lambda: self.config.get( "app:main", "database_connection" ).startswith( "postgres://" ),
+ "MySQL_python": lambda: self.config.get( "app:main", "database_connection" ).startswith( "mysql://" ),
+ "DRMAA_python": lambda: "sge" in self.config.get( "app:main", "start_job_runners" ).split(","),
+ "pbs_python": lambda: "pbs" in self.config.get( "app:main", "start_job_runners" ).split(","),
+ "threadframe": lambda: self.config.get( "app:main", "use_heartbeat" ),
+ "guppy": lambda: self.config.get( "app:main", "use_memdump" ),
+ "GeneTrack": lambda: sys.version_info[:2] >= ( 2, 5 ),
+ }.get( egg_name, lambda: True )()
except:
return False
- elif egg_name == "pysqlite":
- try:
- # database connection is the sqlite alchemy dialect (not really
- # a documented usage in Galaxy, but it would work)
- if self.config.get( "app:main", "database_connection" ).startswith( "sqlite://" ):
- return True
- else:
- return False
- # database connection is unset, so sqlite is the default
- except:
- return True
- elif egg_name == "DRMAA_python":
- try:
- runners = self.config.get( "app:main", "start_job_runners" ).split(",")
- if "sge" in runners:
- return True
- else:
- return False
- except:
- return False
- elif egg_name == "pbs_python":
- try:
- runners = self.config.get( "app:main", "start_job_runners" ).split(",")
- if "pbs" in runners:
- return True
- else:
- return False
- except:
- return False
- elif egg_name == "threadframe":
- try:
- if self.config.get( "app:main", "use_heartbeat" ):
- return True
- else:
- return False
- except:
- return False
- elif egg_name == "guppy":
- try:
- if self.config.get( "app:main", "use_memdump" ):
- return True
- else:
- return False
- except:
- return False
- elif egg_name == "MySQL_python":
- try:
- if self.config.get( "app:main", "database_connection" ).startswith( "mysql://" ):
- return True
- else:
- return False
- except:
- return False
- else:
- return True
def require( pkg ):
# add the egg dirs to sys.path if they're not already there
diff -r 92d395c8614b -r fdf07565eedf scripts/fetch_eggs.py
--- a/scripts/fetch_eggs.py Wed Nov 04 12:30:36 2009 -0500
+++ b/scripts/fetch_eggs.py Wed Nov 04 12:58:48 2009 -0500
@@ -25,26 +25,28 @@
c.platform = { 'peak' : sys.argv[2].rsplit('-',1)[0], 'galaxy' : sys.argv[2] }
c.parse()
try:
+ galaxy_config = GalaxyConfig()
+ names = []
if len( sys.argv ) == 1:
- galaxy_config = GalaxyConfig()
- ignore = []
- for name in c.get_names():
- if not galaxy_config.check_conditional( name ):
- ignore.append( name )
- c.fetch( ignore=ignore )
+ names = c.get_names()
+ elif sys.argv[1] == 'all':
+ names = galaxy_config.always_conditional
else:
- if sys.argv[1] == 'all':
- c.fetch()
- else:
- egg = c.get( sys.argv[1] )
- if egg is None:
- print "error: %s not in eggs.ini" % sys.argv[1]
- sys.exit( 1 )
- egg.fetch()
+ # Fetch a specific egg
+ egg = c.get( sys.argv[1] )
+ if egg is None:
+ print "error: %s not in eggs.ini" % sys.argv[1]
+ sys.exit( 1 )
+ egg.fetch()
+ sys.exit( 0 )
+ ignore = filter( lambda x: not galaxy_config.check_conditional( x ), list( names ) )
+ c.fetch( ignore )
except EggNotFetchable, e:
- print "One of the python eggs necessary to run Galaxy couldn't be downloaded"
- print "automatically. You may want to try building it by hand with:"
- print " python scripts/scramble.py %s" % e
+ print "One or more of the python eggs necessary to run Galaxy couldn't be"
+ print "downloaded automatically. You may want to try building them by"
+ print "hand with:"
+ for egg in e.eggs:
+ print " python scripts/scramble.py %s" % egg
sys.exit( 1 )
except PlatformNotSupported, e:
print "Your platform (%s) is not supported." % e
diff -r 92d395c8614b -r fdf07565eedf scripts/scramble.py
--- a/scripts/scramble.py Wed Nov 04 12:30:36 2009 -0500
+++ b/scripts/scramble.py Wed Nov 04 12:58:48 2009 -0500
@@ -18,19 +18,19 @@
c = Crate()
c.parse()
+galaxy_config = GalaxyConfig()
+names = []
if len( sys.argv ) == 1:
- galaxy_config = GalaxyConfig()
- ignore = []
- for name in c.get_names():
- if not galaxy_config.check_conditional( name ):
- ignore.append( name )
- c.scramble( ignore=ignore )
+ names = c.get_names()
+elif sys.argv[1] == 'all':
+ names = galaxy_config.always_conditional
else:
- if sys.argv[1] == 'all':
- c.scramble()
- else:
- egg = c.get( sys.argv[1] )
- if egg is None:
- print "error: %s not in eggs.ini" % sys.argv[1]
- sys.exit( 1 )
- egg.scramble()
+# Scramble a specific egg
+ egg = c.get( sys.argv[1] )
+ if egg is None:
+ print "error: %s not in eggs.ini" % sys.argv[1]
+ sys.exit( 1 )
+ egg.scramble()
+ sys.exit( 0 )
+ignore = filter( lambda x: not galaxy_config.check_conditional( x ), list( names ) )
+c.scramble( ignore=ignore )
diff -r 92d395c8614b -r fdf07565eedf scripts/scramble/scripts/GeneTrack.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/scripts/scramble/scripts/GeneTrack.py Wed Nov 04 12:58:48 2009 -0500
@@ -0,0 +1,52 @@
+import os, sys, shutil
+
+# change back to the build dir
+if os.path.dirname( sys.argv[0] ) != "":
+ os.chdir( os.path.dirname( sys.argv[0] ) )
+
+# find setuptools
+scramble_lib = os.path.join( "..", "..", "..", "lib" )
+sys.path.append( scramble_lib )
+from ez_setup import use_setuptools
+use_setuptools( download_delay=8, to_dir=scramble_lib )
+from setuptools import *
+
+# get the tag
+if os.access( ".galaxy_tag", os.F_OK ):
+ tagfile = open( ".galaxy_tag", "r" )
+ tag = tagfile.readline().strip()
+else:
+ tag = None
+
+# in case you're running this by hand from a dirty module source dir
+for dir in [ "build", "dist" ]:
+ if os.access( dir, os.F_OK ):
+ print "scramble.py: removing dir:", dir
+ shutil.rmtree( dir )
+
+# reset args for distutils
+me = sys.argv[0]
+sys.argv = [ me ]
+sys.argv.append( "egg_info" )
+if tag is not None:
+ #sys.argv.append( "egg_info" )
+ sys.argv.append( "--tag-build=%s" %tag )
+# svn revision (if any) is handled directly in tag-build
+sys.argv.append( "--no-svn-revision" )
+sys.argv.append( "bdist_egg" )
+
+print "scramble.py: Creating setup.py for GeneTrack"
+setup_py = """from setuptools import setup, find_packages
+setup(
+ name = "GeneTrack",
+ version = "2.0.0-beta-1",
+ package_data = {'':["*.*"]},
+ packages = find_packages(),
+ zip_safe = True,
+)
+"""
+open( 'setup.py', 'w' ).write( setup_py )
+
+
+# do it
+execfile( "setup.py", globals(), locals() )
1
0
details: http://www.bx.psu.edu/hg/galaxy/rev/adfcd8bb13d1
changeset: 2961:adfcd8bb13d1
user: Dan Blankenberg <dan(a)bx.psu.edu>
date: Wed Nov 04 12:21:13 2009 -0500
description:
Quick fix for maf_utilities.src_split.
diffstat:
lib/galaxy/tools/util/maf_utilities.py | 2 +-
1 files changed, 1 insertions(+), 1 deletions(-)
diffs (12 lines):
diff -r 91a3bbb52d7a -r adfcd8bb13d1 lib/galaxy/tools/util/maf_utilities.py
--- a/lib/galaxy/tools/util/maf_utilities.py Wed Nov 04 12:15:53 2009 -0500
+++ b/lib/galaxy/tools/util/maf_utilities.py Wed Nov 04 12:21:13 2009 -0500
@@ -23,7 +23,7 @@
fields = src.split( SRC_SPLIT_CHAR, 1 )
spec = fields.pop( 0 )
if fields:
- chrom = fields
+ chrom = fields.pop( 0 )
else:
chrom = spec
return spec, chrom
1
0
07 Nov '09
details: http://www.bx.psu.edu/hg/galaxy/rev/92d395c8614b
changeset: 2962:92d395c8614b
user: rc
date: Wed Nov 04 12:30:36 2009 -0500
description:
Fixed a bug in user info - adding new address
diffstat:
lib/galaxy/web/controllers/user.py | 114 +++++++++++++++++++++++---------------
templates/user/edit_address.mako | 2 +-
templates/user/info.mako | 2 +-
templates/user/new_address.mako | 33 +++++++++++
4 files changed, 104 insertions(+), 47 deletions(-)
diffs (200 lines):
diff -r adfcd8bb13d1 -r 92d395c8614b lib/galaxy/web/controllers/user.py
--- a/lib/galaxy/web/controllers/user.py Wed Nov 04 12:21:13 2009 -0500
+++ b/lib/galaxy/web/controllers/user.py Wed Nov 04 12:30:36 2009 -0500
@@ -272,7 +272,7 @@
user_address.country = util.restore_text(params.get('field_%i_country' % index, ''))
user_address.phone = util.restore_text(params.get('field_%i_phone' % index, ''))
user_address.flush()
- trans.user.refresh()
+ trans.sa_session.refresh( user )
values.append(int(user_address.id))
elif value == unicode('none'):
values.append('')
@@ -618,57 +618,81 @@
# User not logged in, history group must be only public
return trans.show_error_message( "You must be logged in to change your default permitted actions." )
@web.expose
- def new_address( self, trans, short_desc='', name='', institution='', address1='',
- address2='', city='', state='', postal_code='', country='', phone='' ):
- if trans.app.config.require_login:
- refresh_frames = [ 'masthead', 'history', 'tools' ]
- else:
- refresh_frames = [ 'masthead', 'history' ]
+ def new_address( self, trans, **kwd ):
+ params = util.Params( kwd )
+ msg = util.restore_text( params.get( 'msg', '' ) )
+ messagetype = params.get( 'messagetype', 'done' )
+ admin_view = params.get( 'admin_view', 'False' )
+ error = ''
+ user = trans.sa_session.query( trans.app.model.User ).get( int( params.get( 'user_id', None ) ) )
if not trans.app.config.allow_user_creation and not trans.user_is_admin():
return trans.show_error_message( 'User registration is disabled. Please contact your Galaxy administrator for an account.' )
- short_desc_error = name_error = institution_error = address1_error = city_error = None
- address2_error = state_error = postal_code_error = country_error = phone_error = None
- if short_desc:
- if not len( short_desc ):
- short_desc_error = 'Enter a short description for this address'
- elif not len( name ):
- name_error = 'Enter the full name'
- elif not len( institution ):
- institution_error = 'Enter the institution associated with the user'
- elif not len ( address1 ):
- address1_error = 'Enter the address'
- elif not len( city ):
- city_error = 'Enter the city'
- elif not len( state ):
- state_error = 'Enter the state/province/region'
- elif not len( postal_code ):
- postal_code_error = 'Enter the postal code'
- elif not len( country ):
- country_error = 'Enter the country'
+ if params.get( 'save_new_address_button', None ) == 'Save':
+ if not len( util.restore_text( params.get( 'short_desc', '' ) ) ):
+ error = 'Enter a short description for this address'
+ elif not len( util.restore_text( params.get( 'name', '' ) ) ):
+ error = 'Enter the full name'
+ elif not len( util.restore_text( params.get( 'institution', '' ) ) ):
+ error = 'Enter the institution associated with the user'
+ elif not len ( util.restore_text( params.get( 'address1', '' ) ) ):
+ error = 'Enter the address'
+ elif not len( util.restore_text( params.get( 'city', '' ) ) ):
+ error = 'Enter the city'
+ elif not len( util.restore_text( params.get( 'state', '' ) ) ):
+ error = 'Enter the state/province/region'
+ elif not len( util.restore_text( params.get( 'postal_code', '' ) ) ):
+ error = 'Enter the postal code'
+ elif not len( util.restore_text( params.get( 'country', '' ) ) ):
+ error = 'Enter the country'
else:
- user_address = trans.app.model.UserAddress( user=trans.user, desc=short_desc,
- name=name, institution=institution,
- address=address1+' '+address2, city=city,
- state=state, postal_code=postal_code,
- country=country, phone=phone)
+ user_address = trans.app.model.UserAddress( user=user )
+ user_address.desc = util.restore_text( params.get( 'short_desc', '' ) )
+ user_address.name = util.restore_text( params.get( 'name', '' ) )
+ user_address.institution = util.restore_text( params.get( 'institution', '' ) )
+ user_address.address = util.restore_text( params.get( 'address1', '' ) )+' '+util.restore_text( params.get( 'address2', '' ) )
+ user_address.city = util.restore_text( params.get( 'city', '' ) )
+ user_address.state = util.restore_text( params.get( 'state', '' ) )
+ user_address.postal_code = util.restore_text( params.get( 'postal_code', '' ) )
+ user_address.country = util.restore_text( params.get( 'country', '' ) )
+ user_address.phone = util.restore_text( params.get( 'phone', '' ) )
user_address.flush()
+ msg = 'Address <b>%s</b> has been added' % user_address.desc
+ if admin_view == 'True':
+ return trans.response.send_redirect( web.url_for( controller='user',
+ action='show_info',
+ admin_view=True,
+ user_id=user.id,
+ msg=msg,
+ messagetype='done') )
return trans.response.send_redirect( web.url_for( controller='user',
action='show_info',
- msg='Address <b>%s</b> has been added' % user_address.desc,
+ msg=msg,
messagetype='done') )
-
- return trans.show_form(
- web.FormBuilder( web.url_for(), "New address", submit_text="Save" )
- .add_text( "short_desc", "Short address description", value=short_desc, error=short_desc_error )
- .add_text( "name", "Name", value=name, error=name_error )
- .add_text( "institution", "Institution", value=institution, error=institution_error )
- .add_text( "address1", "Address Line 1", value=address1, error=address1_error )
- .add_text( "address2", "Address Line 2", value=address2, error=address2_error )
- .add_text( "city", "City", value=city, error=city_error )
- .add_text( "state", "State/Province/Region", value=state, error=state_error )
- .add_text( "postal_code", "Postal Code", value=postal_code, error=postal_code_error )
- .add_text( "country", "Country", value=country, error=country_error )
- .add_text( "phone", "Phone", value=phone, error=phone_error ) )
+ else:
+ # show the address form with the current values filled in
+ # create the widgets for each address field
+ widgets = []
+ widgets.append(dict(label='Short description',
+ widget=TextField( 'short_desc', 40, '' ) ) )
+ widgets.append(dict(label='Name',
+ widget=TextField( 'name', 40, '' ) ) )
+ widgets.append(dict(label='Institution',
+ widget=TextField( 'institution', 40, '' ) ) )
+ widgets.append(dict(label='Address Line 1',
+ widget=TextField( 'address1', 40, '' ) ) )
+ widgets.append(dict(label='City',
+ widget=TextField( 'city', 40, '' ) ) )
+ widgets.append(dict(label='State',
+ widget=TextField( 'state', 40, '' ) ) )
+ widgets.append(dict(label='Postal Code',
+ widget=TextField( 'postal_code', 40, '' ) ) )
+ widgets.append(dict(label='Country',
+ widget=TextField( 'country', 40, '' ) ) )
+ widgets.append(dict(label='Phone',
+ widget=TextField( 'phone', 40, '' ) ) )
+ return trans.fill_template( 'user/new_address.mako', user=user,
+ admin_view=admin_view,
+ widgets=widgets, msg=msg, messagetype=messagetype)
@web.expose
def edit_address( self, trans, **kwd ):
params = util.Params( kwd )
diff -r adfcd8bb13d1 -r 92d395c8614b templates/user/edit_address.mako
--- a/templates/user/edit_address.mako Wed Nov 04 12:21:13 2009 -0500
+++ b/templates/user/edit_address.mako Wed Nov 04 12:30:36 2009 -0500
@@ -11,7 +11,7 @@
<ul class="manage-table-actions">
<li>
- <a class="action-button" href="${h.url_for( controller='user', action='show_info')}">
+ <a class="action-button" href="${h.url_for( controller='user', action='show_info', admin_view=admin_view, user_id=user.id)}">
<span>Manage User Information</span></a>
</li>
</ul>
diff -r adfcd8bb13d1 -r 92d395c8614b templates/user/info.mako
--- a/templates/user/info.mako Wed Nov 04 12:21:13 2009 -0500
+++ b/templates/user/info.mako Wed Nov 04 12:30:36 2009 -0500
@@ -99,7 +99,7 @@
</div>
</form>
%endif
- <form name="user_info" id="user_info" action="${h.url_for( controller='user', action='new_address' )}" method="post" >
+ <form name="user_info" id="user_info" action="${h.url_for( controller='user', action='new_address', user_id=user.id, admin_view=admin_view )}" method="post" >
<div class="toolFormTitle">User Addresses</div>
<div class="toolFormBody">
%if user.addresses:
diff -r adfcd8bb13d1 -r 92d395c8614b templates/user/new_address.mako
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/templates/user/new_address.mako Wed Nov 04 12:30:36 2009 -0500
@@ -0,0 +1,33 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+
+
+%if msg:
+ ${render_msg( msg, messagetype )}
+%endif
+</br>
+</br>
+<h3>New address</h3>
+
+<ul class="manage-table-actions">
+ <li>
+ <a class="action-button" href="${h.url_for( controller='user', action='show_info', admin_view=admin_view, user_id=user.id)}">
+ <span>Manage User Information</span></a>
+ </li>
+</ul>
+<div class="toolForm">
+<form name="login_info" id="login_info" action="${h.url_for( controller='user', action='new_address', admin_view=admin_view, user_id=user.id )}" method="post" >
+ <div class="toolFormTitle">New address</div>
+ <div class="toolFormBody">
+ %for field in widgets:
+ <div class="form-row">
+ <label>${field[ 'label' ]}</label>
+ ${field[ 'widget' ].get_html()}
+ </div>
+ %endfor
+ <div class="form-row">
+ <input type="submit" name="save_new_address_button" value="Save">
+ </div>
+ </div>
+</form>
+</div>
\ No newline at end of file
1
0
07 Nov '09
details: http://www.bx.psu.edu/hg/galaxy/rev/91a3bbb52d7a
changeset: 2960:91a3bbb52d7a
user: Dan Blankenberg <dan(a)bx.psu.edu>
date: Wed Nov 04 12:15:53 2009 -0500
description:
Fix maf_utilities.src_split. Should resolve tickets #200, #215 and #220.
Note: Do not use bx.align.src_split in Galaxy.
diffstat:
lib/galaxy/datatypes/converters/maf_to_interval_converter.py | 2 +-
lib/galaxy/datatypes/converters/maf_to_interval_converter.xml | 2 +-
lib/galaxy/tools/util/maf_utilities.py | 11 +++++++----
3 files changed, 9 insertions(+), 6 deletions(-)
diffs (49 lines):
diff -r 4bca8f8ed94d -r 91a3bbb52d7a lib/galaxy/datatypes/converters/maf_to_interval_converter.py
--- a/lib/galaxy/datatypes/converters/maf_to_interval_converter.py Wed Nov 04 11:35:50 2009 -0500
+++ b/lib/galaxy/datatypes/converters/maf_to_interval_converter.py Wed Nov 04 12:15:53 2009 -0500
@@ -21,7 +21,7 @@
for block in bx.align.maf.Reader( open( input_name, 'r' ) ):
for c in maf_utilities.iter_components_by_src_start( block, species ):
if c is not None:
- out.write( "%s\t%i\t%i\t%s\n" % ( bx.align.src_split( c.src )[-1], c.get_forward_strand_start(), c.get_forward_strand_end(), c.strand ) )
+ out.write( "%s\t%i\t%i\t%s\n" % ( maf_utilities.src_split( c.src )[-1], c.get_forward_strand_start(), c.get_forward_strand_end(), c.strand ) )
count += 1
except Exception, e:
print >> sys.stderr, "There was a problem processing your input: %s" % e
diff -r 4bca8f8ed94d -r 91a3bbb52d7a lib/galaxy/datatypes/converters/maf_to_interval_converter.xml
--- a/lib/galaxy/datatypes/converters/maf_to_interval_converter.xml Wed Nov 04 11:35:50 2009 -0500
+++ b/lib/galaxy/datatypes/converters/maf_to_interval_converter.xml Wed Nov 04 12:15:53 2009 -0500
@@ -1,4 +1,4 @@
-<tool id="CONVERTER_maf_to_interval_0" name="Convert MAF to Genomic Intervals" version="1.0.1">
+<tool id="CONVERTER_maf_to_interval_0" name="Convert MAF to Genomic Intervals" version="1.0.2">
<!-- <description>__NOT_USED_CURRENTLY_FOR_CONVERTERS__</description> -->
<command interpreter="python">maf_to_interval_converter.py $output1 $input1 ${input1.metadata.dbkey}</command>
<inputs>
diff -r 4bca8f8ed94d -r 91a3bbb52d7a lib/galaxy/tools/util/maf_utilities.py
--- a/lib/galaxy/tools/util/maf_utilities.py Wed Nov 04 11:35:50 2009 -0500
+++ b/lib/galaxy/tools/util/maf_utilities.py Wed Nov 04 12:15:53 2009 -0500
@@ -20,9 +20,12 @@
SRC_SPLIT_CHAR = '.'
def src_split( src ):
- spec, chrom = bx.align.maf.src_split( src )
- if None in [ spec, chrom ]:
- spec = chrom = src
+ fields = src.split( SRC_SPLIT_CHAR, 1 )
+ spec = fields.pop( 0 )
+ if fields:
+ chrom = fields
+ else:
+ chrom = spec
return spec, chrom
def src_merge( spec, chrom, contig = None ):
@@ -530,7 +533,7 @@
if suffix:
header = "%s%s" % ( header, suffix )
else:
- header = "%s%s" % ( header, bx.align.src_split( component.src )[ 0 ] )
+ header = "%s%s" % ( header, src_split( component.src )[ 0 ] )
return header
def get_attributes_from_fasta_header( header ):
1
0
details: http://www.bx.psu.edu/hg/galaxy/rev/4bca8f8ed94d
changeset: 2959:4bca8f8ed94d
user: James Taylor <james(a)jamestaylor.org>
date: Wed Nov 04 11:35:50 2009 -0500
description:
Fix tool test specification by id
diffstat:
test/functional/test_toolbox.py | 2 +-
1 files changed, 1 insertions(+), 1 deletions(-)
diffs (9 lines):
diff -r 0e78d558fe57 -r 4bca8f8ed94d test/functional/test_toolbox.py
--- a/test/functional/test_toolbox.py Wed Nov 04 10:16:20 2009 -0500
+++ b/test/functional/test_toolbox.py Wed Nov 04 11:35:50 2009 -0500
@@ -128,4 +128,4 @@
for j, testdef in enumerate( tool.tests ):
name = "%s ( %s ) > %s" % ( tool.name, tool.id, testdef.name )
testcase = get_case( testdef, name )
- G[ 'testcase_%d_%d' % ( i, j ) ] = testcase
+ G[ testcase.__name__ ] = testcase
1
0
details: http://www.bx.psu.edu/hg/galaxy/rev/0e78d558fe57
changeset: 2958:0e78d558fe57
user: rc
date: Wed Nov 04 10:16:20 2009 -0500
description:
Fixed the ordering in selectboxes
Fixed a bug in editing samples in the request page
diffstat:
lib/galaxy/model/mapping.py | 3 ++-
lib/galaxy/web/controllers/forms.py | 4 +++-
lib/galaxy/web/controllers/requests.py | 9 +++++----
lib/galaxy/web/controllers/requests_admin.py | 22 ++++++++++++----------
templates/admin/requests/show_request.mako | 2 +-
templates/requests/show_request.mako | 4 ----
test/base/twilltestcase.py | 2 +-
7 files changed, 24 insertions(+), 22 deletions(-)
diffs (191 lines):
diff -r 5b2d593d9aed -r 0e78d558fe57 lib/galaxy/model/mapping.py
--- a/lib/galaxy/model/mapping.py Wed Nov 04 10:04:51 2009 -0500
+++ b/lib/galaxy/model/mapping.py Wed Nov 04 10:16:20 2009 -0500
@@ -652,7 +652,8 @@
primaryjoin=( Request.table.c.user_id == User.table.c.id ),
backref="requests" ),
samples=relation( Sample,
- primaryjoin=( Request.table.c.id == Sample.table.c.request_id ) ),
+ primaryjoin=( Request.table.c.id == Sample.table.c.request_id ),
+ order_by=asc(Sample.table.c.update_time) ),
folder=relation( LibraryFolder,
primaryjoin=( Request.table.c.folder_id == LibraryFolder.table.c.id ) ),
library=relation( Library,
diff -r 5b2d593d9aed -r 0e78d558fe57 lib/galaxy/web/controllers/forms.py
--- a/lib/galaxy/web/controllers/forms.py Wed Nov 04 10:04:51 2009 -0500
+++ b/lib/galaxy/web/controllers/forms.py Wed Nov 04 10:16:20 2009 -0500
@@ -60,7 +60,9 @@
form_type_selectbox.add_option('Select one', 'none', selected=True)
else:
form_type_selectbox.add_option('Select one', 'none')
- for ft in trans.app.model.FormDefinition.types.items():
+ fd_types = trans.app.model.FormDefinition.types.items()
+ fd_types.sort()
+ for ft in fd_types:
if selected == ft[1]:
form_type_selectbox.add_option(ft[1], ft[1], selected=True)
else:
diff -r 5b2d593d9aed -r 0e78d558fe57 lib/galaxy/web/controllers/requests.py
--- a/lib/galaxy/web/controllers/requests.py Wed Nov 04 10:04:51 2009 -0500
+++ b/lib/galaxy/web/controllers/requests.py Wed Nov 04 10:16:20 2009 -0500
@@ -304,8 +304,7 @@
s = trans.app.model.Sample(sample_name, '', request, form_values)
s.flush()
else:
- for index in range(len(current_samples)):
- sample_index = index
+ for sample_index in range(len(current_samples)):
sample_name = current_samples[sample_index][0]
new_sample_name = util.restore_text( params.get( 'sample_%i_name' % sample_index, '' ) )
sample_values = []
@@ -379,8 +378,10 @@
details=details,
edit_mode=edit_mode)
def __select_request_type(self, trans, rtid):
+ requesttype_list = trans.sa_session.query( trans.app.model.RequestType )\
+ .order_by( trans.app.model.RequestType.name.asc() )
rt_ids = ['none']
- for rt in trans.sa_session.query( trans.app.model.RequestType ):
+ for rt in requesttype_list:
if not rt.deleted:
rt_ids.append(str(rt.id))
select_reqtype = SelectField('select_request_type',
@@ -390,7 +391,7 @@
select_reqtype.add_option('Select one', 'none', selected=True)
else:
select_reqtype.add_option('Select one', 'none')
- for rt in trans.sa_session.query( trans.app.model.RequestType ):
+ for rt in requesttype_list:
if not rt.deleted:
if rtid == rt.id:
select_reqtype.add_option(rt.name, rt.id, selected=True)
diff -r 5b2d593d9aed -r 0e78d558fe57 lib/galaxy/web/controllers/requests_admin.py
--- a/lib/galaxy/web/controllers/requests_admin.py Wed Nov 04 10:04:51 2009 -0500
+++ b/lib/galaxy/web/controllers/requests_admin.py Wed Nov 04 10:16:20 2009 -0500
@@ -266,8 +266,10 @@
#---- Request Creation ----------------------------------------------------------
#
def __select_request_type(self, trans, rtid):
+ requesttype_list = trans.sa_session.query( trans.app.model.RequestType )\
+ .order_by( trans.app.model.RequestType.name.asc() )
rt_ids = ['none']
- for rt in trans.sa_session.query( trans.app.model.RequestType ):
+ for rt in requesttype_list:
if not rt.deleted:
rt_ids.append(str(rt.id))
select_reqtype = SelectField('select_request_type',
@@ -277,7 +279,7 @@
select_reqtype.add_option('Select one', 'none', selected=True)
else:
select_reqtype.add_option('Select one', 'none')
- for rt in trans.sa_session.query( trans.app.model.RequestType ):
+ for rt in requesttype_list:
if not rt.deleted:
if rtid == rt.id:
select_reqtype.add_option(rt.name, rt.id, selected=True)
@@ -799,7 +801,7 @@
# save all the new/unsaved samples entered by the user
if edit_mode == 'False':
for index in range(len(current_samples)-len(request.samples)):
- sample_index = index + len(request.samples)
+ sample_index = len(request.samples)
sample_name = util.restore_text( params.get( 'sample_%i_name' % sample_index, '' ) )
sample_values = []
for field_index in range(len(request.type.sample_form.fields)):
@@ -992,7 +994,7 @@
bar_code = util.restore_text(params.get('sample_%i_bar_code' % index, ''))
# check for empty bar code
if not bar_code.strip():
- msg = 'Please fill the bar code for sample <b>%s</b>.' % request.samples[index].name
+ msg = 'Please fill the barcode for sample <b>%s</b>.' % request.samples[index].name
break
# check all the unsaved bar codes
count = 0
@@ -1000,8 +1002,8 @@
if bar_code == util.restore_text(params.get('sample_%i_bar_code' % i, '')):
count = count + 1
if count > 1:
- msg = '''The bar code <b>%s</b> of sample <b>%s</b> already belongs
- another sample in this request. The sample bar codes must
+ msg = '''The barcode <b>%s</b> of sample <b>%s</b> belongs
+ another sample in this request. The sample barcodes must
be unique throughout the system''' % \
(bar_code, request.samples[index].name)
break
@@ -1009,7 +1011,7 @@
all_samples = trans.sa_session.query( trans.app.model.Sample )
for sample in all_samples:
if bar_code == sample.bar_code:
- msg = '''The bar code <b>%s</b> of sample <b>%s</b> already
+ msg = '''The bar code <b>%s</b> of sample <b>%s</b>
belongs another sample. The sample bar codes must be
unique throughout the system''' % \
(bar_code, request.samples[index].name)
@@ -1044,7 +1046,7 @@
return trans.response.send_redirect( web.url_for( controller='requests_admin',
action='bar_codes',
request_id=request.id,
- msg='Bar codes has been saved for this request',
+ msg='Bar codes have been saved for this request',
messagetype='done'))
def __set_request_state(self, request):
@@ -1162,8 +1164,8 @@
if params.get( 'create', False ):
return trans.fill_template( '/admin/requests/create_request_type.mako',
request_forms=get_all_forms( trans,
- filter=dict(deleted=False),
- form_type=trans.app.model.FormDefinition.types.REQUEST ),
+ filter=dict(deleted=False),
+ form_type=trans.app.model.FormDefinition.types.REQUEST ),
sample_forms=get_all_forms( trans,
filter=dict(deleted=False),
form_type=trans.app.model.FormDefinition.types.SAMPLE ),
diff -r 5b2d593d9aed -r 0e78d558fe57 templates/admin/requests/show_request.mako
--- a/templates/admin/requests/show_request.mako Wed Nov 04 10:04:51 2009 -0500
+++ b/templates/admin/requests/show_request.mako Wed Nov 04 10:16:20 2009 -0500
@@ -195,7 +195,7 @@
<div class="toolForm">
##<div class="toolFormTitle">Samples (${len(request.samples)})</div>
- <form id="edit_form" name="edit_form" action="${h.url_for( controller='requests_admin', action='show_request' )}" enctype="multipart/form-data" method="post" >
+ <form id="show_request" name="show_request" action="${h.url_for( controller='requests_admin', action='show_request', edit_mode=edit_mode )}" enctype="multipart/form-data" method="post" >
<div class="form-row">
%if current_samples:
%if not request.type.sample_form.layout:
diff -r 5b2d593d9aed -r 0e78d558fe57 templates/requests/show_request.mako
--- a/templates/requests/show_request.mako Wed Nov 04 10:04:51 2009 -0500
+++ b/templates/requests/show_request.mako Wed Nov 04 10:16:20 2009 -0500
@@ -224,20 +224,16 @@
%endif
</td>
<td>
- ##<div class="form-row">
<label>Import from csv file</label>
<input type="file" name="file_data" />
<input type="submit" name="import_samples_button" value="Import samples"/>
- ##</div>
</td>
<td>
- ##<div class="form-row">
%if current_samples:
<label>Copy from sample</label>
${sample_copy.get_html()}
%endif
<input type="submit" name="add_sample_button" value="Add New"/>
- ##</div>
</td>
</div>
</tr>
diff -r 5b2d593d9aed -r 0e78d558fe57 test/base/twilltestcase.py
--- a/test/base/twilltestcase.py Wed Nov 04 10:04:51 2009 -0500
+++ b/test/base/twilltestcase.py Wed Nov 04 10:16:20 2009 -0500
@@ -1272,7 +1272,7 @@
for index, bar_code in enumerate(bar_codes):
tc.fv( "1", "sample_%i_bar_code" % index, bar_code )
tc.submit( "save_bar_codes" )
- self.check_page_for_string( 'Bar codes has been saved for this request' )
+ self.check_page_for_string( 'Bar codes have been saved for this request' )
def change_sample_state( self, sample_name, sample_id, new_state_id, new_state_name, comment='' ):
self.home()
self.visit_url( "%s/requests_admin/show_events?sample_id=%i" % (self.url, sample_id) )
1
0
07 Nov '09
details: http://www.bx.psu.edu/hg/galaxy/rev/d7c66019de13
changeset: 2955:d7c66019de13
user: Kanwei Li <kanwei(a)gmail.com>
date: Tue Nov 03 19:16:01 2009 -0500
description:
trackster: much greater resolution for line tracks (might still need to tweak BLOCK_SIZE), fixed stacking issue in feature tracks for features sharing same name)
diffstat:
lib/galaxy/datatypes/converters/wiggle_to_array_tree_converter.py | 4 +-
lib/galaxy/visualization/tracks/data/array_tree.py | 45 ++++++-----
lib/galaxy/visualization/tracks/data/interval_index.py | 6 +-
lib/galaxy/web/controllers/tracks.py | 6 +-
static/scripts/trackster.js | 46 +++++++----
static/trackster.css | 1 +
templates/tracks/browser.mako | 1 +
7 files changed, 65 insertions(+), 44 deletions(-)
diffs (293 lines):
diff -r 46791b5a653b -r d7c66019de13 lib/galaxy/datatypes/converters/wiggle_to_array_tree_converter.py
--- a/lib/galaxy/datatypes/converters/wiggle_to_array_tree_converter.py Tue Nov 03 17:16:40 2009 -0500
+++ b/lib/galaxy/datatypes/converters/wiggle_to_array_tree_converter.py Tue Nov 03 19:16:01 2009 -0500
@@ -8,6 +8,8 @@
from bx.arrays.array_tree import *
from bx.arrays.wiggle import IntervalReader
+BLOCK_SIZE = 1000
+
def main():
input_fname = sys.argv[1]
@@ -16,7 +18,7 @@
reader = IntervalReader( open( input_fname ) )
# Fill array from wiggle
- d = array_tree_dict_from_wiggle_reader( reader, {} )
+ d = array_tree_dict_from_wiggle_reader( reader, {}, block_size = BLOCK_SIZE )
for value in d.itervalues():
value.root.build_summary()
diff -r 46791b5a653b -r d7c66019de13 lib/galaxy/visualization/tracks/data/array_tree.py
--- a/lib/galaxy/visualization/tracks/data/array_tree.py Tue Nov 03 17:16:40 2009 -0500
+++ b/lib/galaxy/visualization/tracks/data/array_tree.py Tue Nov 03 19:16:01 2009 -0500
@@ -31,11 +31,16 @@
f.close()
return { 'max': float( max(root_summary.maxs) ), 'min': float( min(root_summary.mins) ) }
- def get_data( self, chrom, start, end ):
+ def get_data( self, chrom, start, end, **kwargs ):
start = int( start )
end = int( end )
- level = int( ceil( log( end - start, BLOCK_SIZE ) ) ) - 1
-
+ resolution = max(1, ceil(float(kwargs['resolution'])))
+
+ level = int( floor( log( resolution, BLOCK_SIZE ) ) )
+ level = max( level, 0 )
+ stepsize = BLOCK_SIZE ** level
+ step1 = stepsize * BLOCK_SIZE
+
# Open the file
f = open( self.dataset.file_name )
d = FileArrayTreeDict( f )
@@ -47,22 +52,20 @@
# Is the requested level valid?
assert 0 <= level <= chrom_array_tree.levels
# Calculate the actual start/range/step of the block we're getting
- size = BLOCK_SIZE ** (level+1)
- block_start = ( start // BLOCK_SIZE ) * BLOCK_SIZE
- block_step = size // BLOCK_SIZE
- indexes = range( block_start, block_start + size, block_step )
- # Return either data point or a summary depending on the level
- if level > 0:
- s = chrom_array_tree.get_summary( start, level )
- f.close()
- if s is not None:
- return zip( indexes, map( float, s.sums / s.counts ) )
+
+ results = []
+ for block_start in range( start, end, stepsize * BLOCK_SIZE ):
+ # print block_start
+ # Return either data point or a summary depending on the level
+ indexes = range( block_start, block_start + stepsize * BLOCK_SIZE, stepsize )
+ if level > 0:
+ s = chrom_array_tree.get_summary( block_start, level )
+ if s is not None:
+ results.extend( zip( indexes, map( float, s.sums / s.counts ) ) )
else:
- return None
- else:
- v = chrom_array_tree.get_leaf( start )
- f.close()
- if v is not None:
- return zip( indexes, map( float, v ) )
- else:
- return None
\ No newline at end of file
+ v = chrom_array_tree.get_leaf( block_start )
+ if v is not None:
+ results.extend( zip( indexes, map( float, v ) ) )
+
+ f.close()
+ return results
diff -r 46791b5a653b -r d7c66019de13 lib/galaxy/visualization/tracks/data/interval_index.py
--- a/lib/galaxy/visualization/tracks/data/interval_index.py Tue Nov 03 17:16:40 2009 -0500
+++ b/lib/galaxy/visualization/tracks/data/interval_index.py Tue Nov 03 19:16:01 2009 -0500
@@ -11,17 +11,18 @@
self.original_dataset = original_dataset
self.converted_dataset = converted_dataset
- def get_data( self, chrom, start, end ):
+ def get_data( self, chrom, start, end, **kwargs ):
start, end = int(start), int(end)
chrom = str(chrom)
source = open( self.original_dataset.file_name )
index = Indexes( self.converted_dataset.file_name )
results = []
+ uid = 0
for start, end, offset in index.find(chrom, start, end):
source.seek(offset)
feature = source.readline().split()
- payload = { 'start': start, 'end': end, 'name': feature[3] }
+ payload = { 'uid': uid, 'start': start, 'end': end, 'name': feature[3] }
try:
payload['strand'] = feature[5]
except IndexError:
@@ -41,5 +42,6 @@
pass
results.append(payload)
+ uid += 1
return results
diff -r 46791b5a653b -r d7c66019de13 lib/galaxy/web/controllers/tracks.py
--- a/lib/galaxy/web/controllers/tracks.py Tue Nov 03 17:16:40 2009 -0500
+++ b/lib/galaxy/web/controllers/tracks.py Tue Nov 03 19:16:01 2009 -0500
@@ -184,7 +184,7 @@
return manifest
@web.json
- def data( self, trans, dataset_id, track_type, chrom, low, high, stats=False ):
+ def data( self, trans, dataset_id, track_type, chrom, low, high, **kwargs ):
"""
Called by the browser to request a block of data
"""
@@ -216,10 +216,10 @@
data_provider = dataset_type_to_data_provider[ converted_dataset_type ]( converted_dataset, dataset )
# Return stats if we need them
- if stats: return data_provider.get_stats( chrom )
+ if 'stats' in kwargs: return data_provider.get_stats( chrom )
# Get the requested chunk of data
- return data_provider.get_data( chrom, low, high )
+ return data_provider.get_data( chrom, low, high, **kwargs )
def __dataset_as_type( self, trans, dataset, type ):
"""
diff -r 46791b5a653b -r d7c66019de13 static/scripts/trackster.js
--- a/static/scripts/trackster.js Tue Nov 03 17:16:40 2009 -0500
+++ b/static/scripts/trackster.js Tue Nov 03 19:16:01 2009 -0500
@@ -1,13 +1,15 @@
/* Trackster
2009, James Taylor, Kanwei Li
*/
+var DEBUG = false;
var DENSITY = 1000,
DATA_ERROR = "There was an error in indexing this dataset.",
DATA_NONE = "No data for this chrom/contig.",
DATA_PENDING = "Currently indexing... please wait",
DATA_LOADING = "Loading data...",
- CACHED_TILES = 10,
+ CACHED_TILES_FEATURE = 10,
+ CACHED_TILES_LINE = 30,
CACHED_DATA = 20,
CONTEXT = $("<canvas></canvas>").get(0).getContext("2d"),
RIGHT_STRAND, LEFT_STRAND;
@@ -104,6 +106,9 @@
this.high = Math.ceil(high);
this.center = Math.round( this.low + (this.high - this.low) / 2 );
+ // 10^log10(range / DENSITY) Close approximation for browser window, assuming DENSITY = window width
+ this.resolution = Math.pow( 10, Math.ceil( Math.log( (this.high - this.low) / DENSITY ) / Math.LN10 ) );
+
// Overview
$("#overview-box").css( {
left: ( this.low / this.span ) * $("#overview-viewport").width(),
@@ -157,18 +162,16 @@
});
var TiledTrack = function() {
- this.tile_cache = new Cache(CACHED_TILES);
- // this.tile_cache = {};
};
$.extend( TiledTrack.prototype, Track.prototype, {
draw: function() {
var low = this.view.low,
high = this.view.high,
- range = high - low;
-
- var resolution = Math.pow( 10, Math.ceil( Math.log( range / DENSITY ) / Math.log( 10 ) ) );
- resolution = Math.max( resolution, 0.1 );
- resolution = Math.min( resolution, 1000000 );
+ range = high - low,
+ resolution = this.view.resolution;
+
+
+ if (DEBUG) { $("#debug").text(resolution); }
var parent_element = $("<div style='position: relative;'></div>");
this.content_div.children( ":first" ).remove();
@@ -187,7 +190,7 @@
// console.log("cached tile " + tile_index);
var tile_low = tile_index * DENSITY * resolution;
cached.css( {
- left: ( tile_low - this.view.low ) * w_scale
+ left: ( tile_low - low ) * w_scale
});
// Our responsibility to move the element to the new parent
parent_element.append( cached );
@@ -229,6 +232,7 @@
});
var LineTrack = function ( name, dataset_id, height ) {
+ this.tile_cache = new Cache(CACHED_TILES_LINE);
Track.call( this, name, $("#viewport") );
TiledTrack.call( this );
@@ -236,6 +240,7 @@
this.height_px = (height ? height : 100);
this.container_div.addClass( "line-track" );
this.dataset_id = dataset_id;
+ this.data_queue = {};
this.cache = new Cache(CACHED_DATA); // We need to cache some data because of
// asynchronous calls
};
@@ -282,11 +287,17 @@
low = position * DENSITY * resolution,
high = ( position + 1 ) * DENSITY * resolution,
key = resolution + "_" + position;
-
- $.getJSON( data_url, { track_type: this.track_type, chrom: this.view.chrom, low: low, high: high, dataset_id: this.dataset_id }, function ( data ) {
- track.cache[key] = data;
- $(document).trigger( "redraw" );
- });
+
+ if (!track.data_queue[key]) {
+ track.data_queue[key] = true;
+ $.getJSON( data_url, { track_type: this.track_type, chrom: this.view.chrom,
+ low: low, high: high, dataset_id: this.dataset_id,
+ resolution: this.view.resolution }, function ( data ) {
+ track.cache[key] = data;
+ delete track.data_queue[key];
+ track.draw();
+ });
+ }
},
draw_tile: function( resolution, tile_index, parent_element, w_scale ) {
if (!this.vertical_range) { // We don't have the necessary information yet
@@ -340,6 +351,7 @@
});
var FeatureTrack = function ( name, dataset_id, height ) {
+ this.tile_cache = new Cache(CACHED_TILES_FEATURE);
Track.call( this, name, $("#viewport") );
TiledTrack.call( this );
@@ -409,9 +421,9 @@
if (end_ary[j] === undefined || end_ary[j] < f_start) {
end_ary[j] = f_end;
if (include_labels) {
- this.zi_slots[feature.name] = j;
+ this.zi_slots[feature.uid] = j;
} else {
- this.zo_slots[feature.name] = j;
+ this.zo_slots[feature.uid] = j;
}
break;
}
@@ -466,7 +478,7 @@
if (feature.start <= tile_high && feature.end >= tile_low) {
var f_start = Math.floor( Math.max(0, (feature.start - tile_low) * w_scale) ),
f_end = Math.ceil( Math.min(width, (feature.end - tile_low) * w_scale) ),
- y_center = this.slots[feature.name] * this.vertical_gap;
+ y_center = this.slots[feature.uid] * this.vertical_gap;
var thickness, y_start, thick_start = null, thick_end = null;
if (feature.thick_start && feature.thick_end) {
diff -r 46791b5a653b -r d7c66019de13 static/trackster.css
--- a/static/trackster.css Tue Nov 03 17:16:40 2009 -0500
+++ b/static/trackster.css Tue Nov 03 19:16:01 2009 -0500
@@ -25,6 +25,7 @@
font-size: 100%;
}
+/*canvas{ border-right: 1px solid red; } /* debugging */
#nav {
padding: 0.5em 0;
background:#cccccc;
diff -r 46791b5a653b -r d7c66019de13 templates/tracks/browser.mako
--- a/templates/tracks/browser.mako Tue Nov 03 17:16:40 2009 -0500
+++ b/templates/tracks/browser.mako Tue Nov 03 19:16:01 2009 -0500
@@ -122,6 +122,7 @@
<a href="#" onclick="javascript:view.zoom_in();view.redraw();">+</a>
<a href="#" onclick="javascript:view.zoom_out();view.redraw();">-</a>
</form>
+ <div id="debug" style="float: right"></div>
</div>
</div>
</div>
1
0
details: http://www.bx.psu.edu/hg/galaxy/rev/d872c1e16afb
changeset: 2950:d872c1e16afb
user: Nate Coraor <nate(a)bx.psu.edu>
date: Tue Nov 03 12:52:01 2009 -0500
description:
imported patch alchemy05_fixes_02
diffstat:
lib/galaxy/jobs/__init__.py | 6 +++---
lib/galaxy/model/mapping.py | 4 ++--
lib/galaxy/model/mapping_tests.py | 15 ++++++++-------
lib/galaxy/model/migrate/versions/0025_user_info.py | 4 ++--
lib/galaxy/tools/actions/__init__.py | 7 ++++---
lib/galaxy/tools/actions/metadata.py | 5 +++--
lib/galaxy/tools/actions/upload_common.py | 4 +++-
lib/galaxy/web/controllers/async.py | 6 +++---
lib/galaxy/web/controllers/dataset.py | 4 ++--
lib/galaxy/web/controllers/library.py | 6 +++---
lib/galaxy/web/controllers/library_admin.py | 12 ++++++++----
lib/galaxy/web/controllers/requests.py | 2 +-
lib/galaxy/web/controllers/root.py | 10 +++++-----
lib/galaxy/web/controllers/tool_runner.py | 2 --
lib/galaxy/web/framework/__init__.py | 23 ++++++++++++++---------
test/functional/test_forms_and_requests.py | 1 +
test/functional/test_security_and_libraries.py | 2 +-
test/functional/test_user_info.py | 3 ++-
tools/data_source/microbial_import_code.py | 6 +++---
tools/filters/lav_to_bed_code.py | 1 -
tools/maf/maf_to_bed_code.py | 4 +---
21 files changed, 69 insertions(+), 58 deletions(-)
diffs (567 lines):
diff -r 133252175425 -r d872c1e16afb lib/galaxy/jobs/__init__.py
--- a/lib/galaxy/jobs/__init__.py Tue Nov 03 12:52:00 2009 -0500
+++ b/lib/galaxy/jobs/__init__.py Tue Nov 03 12:52:01 2009 -0500
@@ -199,7 +199,7 @@
try:
# Clear the session for each job so we get fresh states for
# job and all datasets
- self.sa_session.clear()
+ self.sa_session.expunge_all()
# Get the real job entity corresponding to the wrapper (if we
# are tracking in the database this is probably cached in
# the session from the origianl query above)
@@ -346,7 +346,7 @@
Prepare the job to run by creating the working directory and the
config files.
"""
- self.sa_session.clear() #this prevents the metadata reverting that has been seen in conjunction with the PBS job runner
+ self.sa_session.expunge_all() #this prevents the metadata reverting that has been seen in conjunction with the PBS job runner
if not os.path.exists( self.working_directory ):
os.mkdir( self.working_directory )
# Restore parameters from the database
@@ -477,7 +477,7 @@
the contents of the output files.
"""
# default post job setup
- self.sa_session.clear()
+ self.sa_session.expunge_all()
job = self.sa_session.query( model.Job ).get( self.job_id )
# if the job was deleted, don't finish it
if job.state == job.states.DELETED:
diff -r 133252175425 -r d872c1e16afb lib/galaxy/model/mapping.py
--- a/lib/galaxy/model/mapping.py Tue Nov 03 12:52:00 2009 -0500
+++ b/lib/galaxy/model/mapping.py Tue Nov 03 12:52:01 2009 -0500
@@ -817,7 +817,7 @@
user=relation( User, backref="roles" ),
non_private_roles=relation( User,
backref="non_private_roles",
- primaryjoin=( ( User.table.c.id == UserRoleAssociation.table.c.user_id ) & ( UserRoleAssociation.table.c.role_id == Role.table.c.id ) & not_( Role.table.c.name == User.table.c.email & Role.table.c.type == 'private' ) ) ),
+ primaryjoin=( ( User.table.c.id == UserRoleAssociation.table.c.user_id ) & ( UserRoleAssociation.table.c.role_id == Role.table.c.id ) & not_( Role.table.c.name == User.table.c.email ) ) ),
role=relation( Role )
)
)
@@ -1134,7 +1134,7 @@
# Pack everything into a bunch
result = Bunch( **globals() )
result.engine = engine
- result.flush = lambda *args, **kwargs: Session.flush( *args, **kwargs )
+ # model.flush() has been removed.
result.session = Session
# For backward compatibility with "model.context.current"
result.context = Session
diff -r 133252175425 -r d872c1e16afb lib/galaxy/model/mapping_tests.py
--- a/lib/galaxy/model/mapping_tests.py Tue Nov 03 12:52:00 2009 -0500
+++ b/lib/galaxy/model/mapping_tests.py Tue Nov 03 12:52:01 2009 -0500
@@ -19,16 +19,17 @@
d1 = model.HistoryDatasetAssociation( extension="interval", metadata=dict(chromCol=1,startCol=2,endCol=3 ), history=h2, create_dataset=True )
#h2.queries.append( q1 )
#h2.queries.append( model.Query( "h2->q2" ) )
- model.context.current.flush()
- model.context.current.clear()
+ model.session.add_all( ( u, h1, h2, d1 ) )
+ model.session.flush()
+ model.session.expunge_all()
# Check
- users = model.context.current.query( model.User ).all()
+ users = model.session.query( model.User ).all()
assert len( users ) == 1
assert users[0].email == "james(a)foo.bar.baz"
assert users[0].password == "password"
assert len( users[0].histories ) == 1
assert users[0].histories[0].name == "History 1"
- hists = model.context.current.query( model.History ).all()
+ hists = model.session.query( model.History ).all()
assert hists[0].name == "History 1"
assert hists[1].name == ( "H" * 255 )
assert hists[0].user == users[0]
@@ -38,9 +39,9 @@
assert hists[1].datasets[0].file_name == os.path.join( "/tmp", *directory_hash_id( id ) ) + ( "/dataset_%d.dat" % id )
# Do an update and check
hists[1].name = "History 2b"
- model.context.current.flush()
- model.context.current.clear()
- hists = model.context.current.query( model.History ).all()
+ model.session.flush()
+ model.session.expunge_all()
+ hists = model.session.query( model.History ).all()
assert hists[0].name == "History 1"
assert hists[1].name == "History 2b"
# gvk TODO need to ad test for GalaxySessions, but not yet sure what they should look like.
diff -r 133252175425 -r d872c1e16afb lib/galaxy/model/migrate/versions/0025_user_info.py
--- a/lib/galaxy/model/migrate/versions/0025_user_info.py Tue Nov 03 12:52:00 2009 -0500
+++ b/lib/galaxy/model/migrate/versions/0025_user_info.py Tue Nov 03 12:52:01 2009 -0500
@@ -21,7 +21,7 @@
log.addHandler( handler )
metadata = MetaData( migrate_engine )
-db_session = scoped_session( sessionmaker( bind=migrate_engine, autoflush=False, transactional=False ) )
+db_session = scoped_session( sessionmaker( bind=migrate_engine, autoflush=False, autocommit=True ) )
def display_migration_details():
print "========================================"
@@ -59,4 +59,4 @@
except Exception, e:
log.debug( "Adding foreign key constraint 'user_form_values_id_fk' to table 'galaxy_user' failed: %s" % ( str( e ) ) )
def downgrade():
- pass
\ No newline at end of file
+ pass
diff -r 133252175425 -r d872c1e16afb lib/galaxy/tools/actions/__init__.py
--- a/lib/galaxy/tools/actions/__init__.py Tue Nov 03 12:52:00 2009 -0500
+++ b/lib/galaxy/tools/actions/__init__.py Tue Nov 03 12:52:01 2009 -0500
@@ -235,7 +235,7 @@
# Store output
out_data[ name ] = data
# Store all changes to database
- trans.app.model.flush()
+ trans.sa_session.flush()
# Add all the top-level (non-child) datasets to the history
for name in out_data.keys():
if name not in child_dataset_names and name not in incoming: #don't add children; or already existing datasets, i.e. async created
@@ -248,7 +248,7 @@
child_dataset = out_data[ child_name ]
parent_dataset.children.append( child_dataset )
# Store data after custom code runs
- trans.app.model.flush()
+ trans.sa_session.flush()
# Create the job object
job = trans.app.model.Job()
job.session_id = trans.get_galaxy_session().id
@@ -274,7 +274,8 @@
job.add_input_dataset( name, None )
for name, dataset in out_data.iteritems():
job.add_output_dataset( name, dataset )
- trans.app.model.flush()
+ trans.sa_session.add( job )
+ trans.sa_session.flush()
# Some tools are not really executable, but jobs are still created for them ( for record keeping ).
# Examples include tools that redirect to other applications ( epigraph ). These special tools must
# include something that can be retrieved from the params ( e.g., REDIRECT_URL ) to keep the job
diff -r 133252175425 -r d872c1e16afb lib/galaxy/tools/actions/metadata.py
--- a/lib/galaxy/tools/actions/metadata.py Tue Nov 03 12:52:00 2009 -0500
+++ b/lib/galaxy/tools/actions/metadata.py Tue Nov 03 12:52:01 2009 -0500
@@ -26,7 +26,8 @@
job.tool_version = tool.version
except:
job.tool_version = "1.0.0"
- job.flush() #ensure job.id is available
+ trans.sa_session.add( job )
+ trans.sa_session.flush() #ensure job.id is available
#add parameters to job_parameter table
# Store original dataset state, so we can restore it. A separate table might be better (no chance of 'losing' the original state)?
@@ -49,7 +50,7 @@
#Need a special state here to show that metadata is being set and also allow the job to run
# i.e. if state was set to 'running' the set metadata job would never run, as it would wait for input (the dataset to set metadata on) to be in a ready state
dataset.state = dataset.states.SETTING_METADATA
- trans.app.model.flush()
+ trans.sa_session.flush()
# Queue the job for execution
trans.app.job_queue.put( job.id, tool )
diff -r 133252175425 -r d872c1e16afb lib/galaxy/tools/actions/upload_common.py
--- a/lib/galaxy/tools/actions/upload_common.py Tue Nov 03 12:52:00 2009 -0500
+++ b/lib/galaxy/tools/actions/upload_common.py Tue Nov 03 12:52:01 2009 -0500
@@ -121,6 +121,7 @@
trans.history.add_dataset( hda, genome_build = uploaded_dataset.dbkey )
permissions = trans.app.security_agent.history_get_default_permissions( trans.history )
trans.app.security_agent.set_all_dataset_permissions( hda.dataset, permissions )
+ trans.sa_session.flush()
return hda
def new_library_upload( trans, uploaded_dataset, library_bunch, state=None ):
@@ -291,7 +292,8 @@
for i, dataset in enumerate( data_list ):
job.add_output_dataset( 'output%i' % i, dataset )
job.state = job.states.NEW
- trans.app.model.flush()
+ trans.sa_session.add( job )
+ trans.sa_session.flush()
# Queue the job for execution
trans.app.job_queue.put( job.id, tool )
diff -r 133252175425 -r d872c1e16afb lib/galaxy/web/controllers/async.py
--- a/lib/galaxy/web/controllers/async.py Tue Nov 03 12:52:00 2009 -0500
+++ b/lib/galaxy/web/controllers/async.py Tue Nov 03 12:52:01 2009 -0500
@@ -77,7 +77,7 @@
data.state = data.blurb = jobs.JOB_ERROR
data.info = "Error -> %s" % STATUS
- trans.model.flush()
+ trans.sa_session.flush()
return "Data %s with status %s received. OK" % (data_id, STATUS)
@@ -112,7 +112,7 @@
data.flush()
open( data.file_name, 'wb' ).close() #create the file
trans.history.add_dataset( data, genome_build=GALAXY_BUILD )
- trans.model.flush()
+ trans.sa_session.flush()
trans.log_event( "Added dataset %d to history %d" %(data.id, trans.history.id ), tool_id=tool_id )
try:
@@ -132,6 +132,6 @@
data.info = str(e)
data.state = data.blurb = data.states.ERROR
- trans.model.flush()
+ trans.sa_session.flush()
return trans.fill_template('tool_executed.tmpl', out_data={}, tool=tool, config=self.app.config )
diff -r 133252175425 -r d872c1e16afb lib/galaxy/web/controllers/dataset.py
--- a/lib/galaxy/web/controllers/dataset.py Tue Nov 03 12:52:00 2009 -0500
+++ b/lib/galaxy/web/controllers/dataset.py Tue Nov 03 12:52:01 2009 -0500
@@ -335,7 +335,7 @@
assert topmost_parent in history.datasets, "Data does not belong to current history"
# Mark undeleted
data.mark_undeleted()
- self.app.model.flush()
+ trans.sa_session.flush()
trans.log_event( "Dataset id %s has been undeleted" % str(id) )
return True
return False
@@ -407,7 +407,7 @@
hist.add_dataset( data.copy( copy_children = True ) )
if history in target_histories:
refresh_frames = ['history']
- trans.app.model.flush()
+ trans.sa_session.flush()
done_msg = "%i datasets copied to %i histories." % ( len( source_dataset_ids ) - invalid_datasets, len( target_histories ) )
trans.sa_session.refresh( history )
elif create_new_history:
diff -r 133252175425 -r d872c1e16afb lib/galaxy/web/controllers/library.py
--- a/lib/galaxy/web/controllers/library.py Tue Nov 03 12:52:00 2009 -0500
+++ b/lib/galaxy/web/controllers/library.py Tue Nov 03 12:52:01 2009 -0500
@@ -421,7 +421,7 @@
if trans.app.security_agent.can_modify_library_item( user, roles, ldda ):
if ldda.datatype.allow_datatype_change and trans.app.datatypes_registry.get_datatype_by_extension( params.datatype ).allow_datatype_change:
trans.app.datatypes_registry.change_datatype( ldda, params.datatype )
- trans.app.model.flush()
+ trans.sa_session.flush()
msg = "Data type changed for library dataset '%s'" % ldda.name
messagetype = 'done'
else:
@@ -463,7 +463,7 @@
setattr( ldda.metadata, name, spec.unwrap( params.get ( name, None ) ) )
ldda.metadata.dbkey = dbkey
ldda.datatype.after_edit( ldda )
- trans.app.model.flush()
+ trans.sa_session.flush()
msg = 'Attributes updated for library dataset %s' % ldda.name
messagetype = 'done'
else:
@@ -486,7 +486,7 @@
setattr( ldda.metadata, name, spec.unwrap( spec.get( 'default' ) ) )
ldda.datatype.set_meta( ldda )
ldda.datatype.after_edit( ldda )
- trans.app.model.flush()
+ trans.sa_session.flush()
msg = 'Attributes updated for library dataset %s' % ldda.name
messagetype = 'done'
else:
diff -r 133252175425 -r d872c1e16afb lib/galaxy/web/controllers/library_admin.py
--- a/lib/galaxy/web/controllers/library_admin.py Tue Nov 03 12:52:00 2009 -0500
+++ b/lib/galaxy/web/controllers/library_admin.py Tue Nov 03 12:52:01 2009 -0500
@@ -432,7 +432,7 @@
# The user clicked the Save button on the 'Change data type' form
if ldda.datatype.allow_datatype_change and trans.app.datatypes_registry.get_datatype_by_extension( params.datatype ).allow_datatype_change:
trans.app.datatypes_registry.change_datatype( ldda, params.datatype )
- trans.app.model.flush()
+ trans.sa_session.flush()
msg = "Data type changed for library dataset '%s'" % ldda.name
return trans.fill_template( "/admin/library/ldda_edit_info.mako",
ldda=ldda,
@@ -469,7 +469,7 @@
setattr( ldda.metadata, name, spec.unwrap( params.get ( name, None ) ) )
ldda.metadata.dbkey = dbkey
ldda.datatype.after_edit( ldda )
- trans.app.model.flush()
+ trans.sa_session.flush()
msg = 'Attributes updated for library dataset %s' % ldda.name
messagetype = 'done'
return trans.fill_template( "/admin/library/ldda_edit_info.mako",
@@ -488,7 +488,7 @@
setattr( ldda.metadata, name, spec.unwrap( spec.get( 'default' ) ) )
ldda.datatype.set_meta( ldda )
ldda.datatype.after_edit( ldda )
- trans.app.model.flush()
+ trans.sa_session.flush()
msg = 'Attributes updated for library dataset %s' % ldda.name
return trans.fill_template( "/admin/library/ldda_edit_info.mako",
ldda=ldda,
@@ -674,6 +674,10 @@
replace_id = params.get( 'replace_id', None )
if replace_id not in [ None, 'None' ]:
replace_dataset = trans.sa_session.query( trans.app.model.LibraryDataset ).get( int( replace_id ) )
+ # The name is separately - by the time the new ldda is created,
+ # replace_dataset.name will point to the new ldda, not the one it's
+ # replacing.
+ replace_dataset_name = replace_dataset.name
if not last_used_build:
last_used_build = replace_dataset.library_dataset_dataset_association.dbkey
# Don't allow multiple datasets to be uploaded when replacing a dataset with a new version
@@ -701,7 +705,7 @@
if created_outputs:
total_added = len( created_outputs.values() )
if replace_dataset:
- msg = "Added %d dataset versions to the library dataset '%s' in the folder '%s'." % ( total_added, replace_dataset.name, folder.name )
+ msg = "Added %d dataset versions to the library dataset '%s' in the folder '%s'." % ( total_added, replace_dataset_name, folder.name )
else:
if not folder.parent:
# Libraries have the same name as their root_folder
diff -r 133252175425 -r d872c1e16afb lib/galaxy/web/controllers/requests.py
--- a/lib/galaxy/web/controllers/requests.py Tue Nov 03 12:52:00 2009 -0500
+++ b/lib/galaxy/web/controllers/requests.py Tue Nov 03 12:52:01 2009 -0500
@@ -294,7 +294,7 @@
# save all the new/unsaved samples entered by the user
if edit_mode == 'False':
for index in range(len(current_samples)-len(request.samples)):
- sample_index = index + len(request.samples)
+ sample_index = len(request.samples)
sample_name = util.restore_text( params.get( 'sample_%i_name' % sample_index, '' ) )
sample_values = []
for field_index in range(len(request.type.sample_form.fields)):
diff -r 133252175425 -r d872c1e16afb lib/galaxy/web/controllers/root.py
--- a/lib/galaxy/web/controllers/root.py Tue Nov 03 12:52:00 2009 -0500
+++ b/lib/galaxy/web/controllers/root.py Tue Nov 03 12:52:01 2009 -0500
@@ -277,7 +277,7 @@
if not __ok_to_edit_metadata( data.id ):
return trans.show_error_message( "This dataset is currently being used as input or output. You cannot change datatype until the jobs have completed or you have canceled them." )
trans.app.datatypes_registry.change_datatype( data, params.datatype )
- trans.app.model.flush()
+ trans.sa_session.flush()
else:
return trans.show_error_message( "You are unable to change datatypes in this manner. Changing %s to %s is not allowed." % ( data.extension, params.datatype ) )
elif params.save:
@@ -303,7 +303,7 @@
data.datatype.after_edit( data )
else:
msg = ' (Metadata could not be changed because this dataset is currently being used as input or output. You must cancel or wait for these jobs to complete before changing metadata.)'
- trans.app.model.flush()
+ trans.sa_session.flush()
return trans.show_ok_message( "Attributes updated%s" % msg, refresh_frames=['history'] )
elif params.detect:
# The user clicked the Auto-detect button on the 'Edit Attributes' form
@@ -322,7 +322,7 @@
msg = 'Attributes updated'
data.set_meta()
data.datatype.after_edit( data )
- trans.app.model.flush()
+ trans.sa_session.flush()
return trans.show_ok_message( msg, refresh_frames=['history'] )
elif params.convert_data:
target_type = kwd.get("target_type", None)
@@ -383,7 +383,7 @@
if job.check_if_output_datasets_deleted():
job.mark_deleted()
self.app.job_manager.job_stop_queue.put( job.id )
- self.app.model.flush()
+ trans.sa_session.flush()
@web.expose
def delete( self, trans, id = None, show_deleted_on_refresh = False, **kwd):
@@ -432,7 +432,7 @@
for dataset in history.datasets:
dataset.deleted = True
dataset.clear_associated_files()
- self.app.model.flush()
+ trans.sa_session.flush()
trans.log_event( "History id %s cleared" % (str(history.id)) )
trans.response.send_redirect( url_for("/index" ) )
@web.expose
diff -r 133252175425 -r d872c1e16afb lib/galaxy/web/controllers/tool_runner.py
--- a/lib/galaxy/web/controllers/tool_runner.py Tue Nov 03 12:52:00 2009 -0500
+++ b/lib/galaxy/web/controllers/tool_runner.py Tue Nov 03 12:52:01 2009 -0500
@@ -198,8 +198,6 @@
# pasted data
datasets.append( create_dataset( 'Pasted Entry' ) )
break
- if datasets:
- trans.model.flush()
return [ d.id for d in datasets ]
@web.expose
diff -r 133252175425 -r d872c1e16afb lib/galaxy/web/framework/__init__.py
--- a/lib/galaxy/web/framework/__init__.py Tue Nov 03 12:52:00 2009 -0500
+++ b/lib/galaxy/web/framework/__init__.py Tue Nov 03 12:52:01 2009 -0500
@@ -139,7 +139,7 @@
self.__galaxy_session = NOT_SET
base.DefaultWebTransaction.__init__( self, environ )
self.setup_i18n()
- self.sa_session.clear()
+ self.sa_session.expunge_all()
self.debug = asbool( self.app.config.get( 'debug', False ) )
# Flag indicating whether we are in workflow building mode (means
# that the current history should not be used for parameter values
@@ -302,12 +302,12 @@
self.galaxy_session = galaxy_session
# Do we need to flush the session?
if galaxy_session_requires_flush:
- objects_to_flush = [ galaxy_session ]
+ sa_session.add( galaxy_session )
# FIXME: If prev_session is a proper relation this would not
# be needed.
if prev_galaxy_session:
- objects_to_flush.append( prev_galaxy_session )
- sa_session.flush( objects_to_flush )
+ sa_session.add( prev_galaxy_session )
+ sa_session.flush()
# If the old session was invalid, get a new history with our new session
if invalidate_existing_session:
self.new_history()
@@ -427,7 +427,8 @@
if not last_accessed:
# Only set default history permissions if current history is not from a previous session
self.app.security_agent.history_set_default_permissions( history, dataset=True, bypass_manage_permission=True )
- self.sa_session.flush( [ prev_galaxy_session, self.galaxy_session, history ] )
+ self.sa_session.add_all( ( prev_galaxy_session, self.galaxy_session, history ) )
+ self.sa_session.flush()
# This method is not called from the Galaxy reports, so the cookie will always be galaxysession
self.__update_session_cookie( name='galaxysession' )
def handle_user_logout( self ):
@@ -439,7 +440,8 @@
prev_galaxy_session = self.galaxy_session
prev_galaxy_session.is_valid = False
self.galaxy_session = self.__create_new_session( prev_galaxy_session )
- self.sa_session.flush( [ prev_galaxy_session, self.galaxy_session ] )
+ self.sa_session.add_all( ( prev_galaxy_session, self.galaxy_session ) )
+ self.sa_session.flush()
# This method is not called from the Galaxy reports, so the cookie will always be galaxysession
self.__update_session_cookie( name='galaxysession' )
@@ -466,7 +468,8 @@
def set_history( self, history ):
if history and not history.deleted:
self.galaxy_session.current_history = history
- self.sa_session.flush( [ self.galaxy_session ] )
+ self.sa_session.add( self.galaxy_session )
+ self.sa_session.flush()
history = property( get_history, set_history )
def new_history( self, name=None ):
"""
@@ -489,7 +492,8 @@
# Set the user's default history permissions
self.app.security_agent.history_set_default_permissions( history )
# Save
- self.sa_session.flush( [ self.galaxy_session, history ] )
+ self.sa_session.add_all( ( self.galaxy_session, history ) )
+ self.sa_session.flush()
return history
def get_user( self ):
@@ -498,7 +502,8 @@
def set_user( self, user ):
"""Set the current user."""
self.galaxy_session.user = user
- self.sa_session.flush( [ self.galaxy_session ] )
+ self.sa_session.add( self.galaxy_session )
+ self.sa_session.flush()
user = property( get_user, set_user )
def get_user_and_roles( self ):
diff -r 133252175425 -r d872c1e16afb test/functional/test_forms_and_requests.py
--- a/test/functional/test_forms_and_requests.py Tue Nov 03 12:52:00 2009 -0500
+++ b/test/functional/test_forms_and_requests.py Tue Nov 03 12:52:01 2009 -0500
@@ -29,6 +29,7 @@
.filter( galaxy.model.FormDefinitionCurrent.table.c.deleted==False ) \
.order_by( galaxy.model.FormDefinitionCurrent.table.c.create_time.desc() )
for fdc in fdc_list:
+ sa_session.refresh( fdc.latest_form )
if form_name == fdc.latest_form.name:
return fdc.latest_form
return None
diff -r 133252175425 -r d872c1e16afb test/functional/test_security_and_libraries.py
--- a/test/functional/test_security_and_libraries.py Tue Nov 03 12:52:00 2009 -0500
+++ b/test/functional/test_security_and_libraries.py Tue Nov 03 12:52:01 2009 -0500
@@ -156,7 +156,7 @@
raise AssertionError( '%s not in history id %d default_permissions after they were changed' % ( value.action, latest_history.id ) )
# Add a dataset to the history
self.upload_file( '1.bed' )
- latest_dataset = galaxy.model.Dataset.query().order_by( desc( galaxy.model.Dataset.table.c.create_time ) ).first()
+ latest_dataset = sa_session.query( galaxy.model.Dataset ).order_by( desc( galaxy.model.Dataset.table.c.create_time ) ).first()
# Make sure DatasetPermissionss are correct
if len( latest_dataset.actions ) != len( latest_history.default_permissions ):
raise AssertionError( '%d DatasetPermissionss were created for dataset id %d when it was created ( should have been %d )' % \
diff -r 133252175425 -r d872c1e16afb test/functional/test_user_info.py
--- a/test/functional/test_user_info.py Tue Nov 03 12:52:00 2009 -0500
+++ b/test/functional/test_user_info.py Tue Nov 03 12:52:01 2009 -0500
@@ -14,6 +14,7 @@
.filter( galaxy.model.FormDefinitionCurrent.table.c.deleted==False ) \
.order_by( galaxy.model.FormDefinitionCurrent.table.c.create_time.desc() )
for fdc in fdc_list:
+ sa_session.refresh( fdc.latest_form )
if form_name == fdc.latest_form.name:
return fdc.latest_form
return None
@@ -146,4 +147,4 @@
self.visit_page('forms/manage?show_filter=Deleted')
self.check_page_for_string(form_one_latest.name)
self.logout()
-
\ No newline at end of file
+
diff -r 133252175425 -r d872c1e16afb tools/data_source/microbial_import_code.py
--- a/tools/data_source/microbial_import_code.py Tue Nov 03 12:52:00 2009 -0500
+++ b/tools/data_source/microbial_import_code.py Tue Nov 03 12:52:01 2009 -0500
@@ -123,7 +123,7 @@
data = app.datatypes_registry.change_datatype( data, file_type )
data.init_meta()
data.set_peek()
- app.model.flush()
+ data.flush()
elif fields[0] == "#NewFile":
description = fields[1]
chr = fields[2]
@@ -137,7 +137,7 @@
newdata.flush()
app.security_agent.copy_dataset_permissions( base_dataset.dataset, newdata.dataset )
history.add_dataset( newdata )
- app.model.flush()
+ history.flush()
try:
copyfile(filepath,newdata.file_name)
newdata.info = newdata.name
@@ -148,4 +148,4 @@
newdata.dbkey = dbkey
newdata.init_meta()
newdata.set_peek()
- app.model.flush()
+ newdata.flush()
diff -r 133252175425 -r d872c1e16afb tools/filters/lav_to_bed_code.py
--- a/tools/filters/lav_to_bed_code.py Tue Nov 03 12:52:00 2009 -0500
+++ b/tools/filters/lav_to_bed_code.py Tue Nov 03 12:52:01 2009 -0500
@@ -16,4 +16,3 @@
data.flush()
except:
continue
- app.model.flush()
\ No newline at end of file
diff -r 133252175425 -r d872c1e16afb tools/maf/maf_to_bed_code.py
--- a/tools/maf/maf_to_bed_code.py Tue Nov 03 12:52:00 2009 -0500
+++ b/tools/maf/maf_to_bed_code.py Tue Nov 03 12:52:01 2009 -0500
@@ -21,7 +21,6 @@
output_data.dbkey = dbkey
output_data.name = basic_name + " (" + dbkey + ")"
output_data.flush()
- app.model.flush()
output_data_list.append(output_data)
elif line.startswith("#FILE"):
fields = line.split("\t")
@@ -36,7 +35,6 @@
app.security_agent.copy_dataset_permissions( output_data.dataset, newdata.dataset )
newdata.flush()
history.flush()
- app.model.flush()
try:
move(filepath,newdata.file_name)
newdata.info = newdata.name
@@ -47,7 +45,7 @@
newdata.dbkey = dbkey
newdata.init_meta()
newdata.set_peek()
- app.model.flush()
+ newdata.flush()
output_data_list.append(newdata)
else:
new_stdout = new_stdout + line
1
0
details: http://www.bx.psu.edu/hg/galaxy/rev/5b2d593d9aed
changeset: 2957:5b2d593d9aed
user: rc
date: Wed Nov 04 10:04:51 2009 -0500
description:
AMQP messaging server and client files.
diffstat:
scripts/galaxy_messaging/amqp_consumer.py | 94 -------------
scripts/galaxy_messaging/client/amqp_publisher.py | 87 ++++++++++++
scripts/galaxy_messaging/client/galaxy_amq.ini.sample | 32 ++++
scripts/galaxy_messaging/client/report.bat.sample | 1 +
scripts/galaxy_messaging/client/scan.bat.sample | 1 +
scripts/galaxy_messaging/client/scan.sh.sample | 1 +
scripts/galaxy_messaging/client/scanner.py | 92 +++++++++++++
scripts/galaxy_messaging/client/scanner_interface.py | 76 ++++++++++
scripts/galaxy_messaging/galaxydb_interface.py | 151 ---------------------
scripts/galaxy_messaging/server/amqp_consumer.py | 94 +++++++++++++
scripts/galaxy_messaging/server/galaxydb_interface.py | 149 +++++++++++++++++++++
11 files changed, 533 insertions(+), 245 deletions(-)
diffs (829 lines):
diff -r 984b1eb6c428 -r 5b2d593d9aed scripts/galaxy_messaging/amqp_consumer.py
--- a/scripts/galaxy_messaging/amqp_consumer.py Wed Nov 04 09:32:09 2009 -0500
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,94 +0,0 @@
-'''
-Galaxy Messaging with AMQP (RabbitMQ)
-Galaxy uses AMQ protocol to receive messages from external sources like
-bar code scanners. Galaxy has been tested against RabbitMQ AMQP implementation.
-For Galaxy to receive messages from a message queue the RabbitMQ server has
-to be set up with a user account and other parameters listed in the [galaxy:amq]
-section in the universe_wsgi.ini config file
-Once the RabbitMQ server has been setup and started with the given parameters,
-this script can be run to receive messages and update the Galaxy database accordingly
-'''
-
-import ConfigParser
-import sys, os
-import optparse
-import xml.dom.minidom
-from galaxydb_interface import GalaxyDbInterface
-
-assert sys.version_info[:2] >= ( 2, 4 )
-new_path = [ os.path.join( os.getcwd(), "lib" ) ]
-new_path.extend( sys.path[1:] ) # remove scripts/ from the path
-sys.path = new_path
-
-from galaxy import eggs
-import pkg_resources
-pkg_resources.require( "amqplib" )
-
-from amqplib import client_0_8 as amqp
-
-import logging
-logging.basicConfig(level=logging.DEBUG)
-log = logging.getLogger( 'GalaxyAMQP' )
-
-
-galaxy_config_file = 'universe_wsgi.ini'
-global dbconnstr
-
-def get_value(dom, tag_name):
- '''
- This method extracts the tag value from the xml message
- '''
- nodelist = dom.getElementsByTagName(tag_name)[0].childNodes
- rc = ""
- for node in nodelist:
- if node.nodeType == node.TEXT_NODE:
- rc = rc + node.data
- return rc
-
-def recv_callback(msg):
- dom = xml.dom.minidom.parseString(msg.body)
- barcode = get_value(dom, 'barcode')
- state = get_value(dom, 'state')
- log.debug('Barcode: '+barcode)
- log.debug('State: '+state)
- # update the galaxy db
- galaxy = GalaxyDbInterface(dbconnstr)
- sample_id = galaxy.get_sample_id(field_name='bar_code', value=barcode)
- if sample_id == -1:
- log.debug('Invalid barcode.')
- return
- galaxy.change_state(sample_id, state)
-
-def main():
- config = ConfigParser.ConfigParser()
- config.read(galaxy_config_file)
- global dbconnstr
- dbconnstr = config.get("app:main", "database_connection")
- amqp_config = {}
- for option in config.options("galaxy:amqp"):
- amqp_config[option] = config.get("galaxy:amqp", option)
- log.debug(str(amqp_config))
- conn = amqp.Connection(host=amqp_config['host']+":"+amqp_config['port'],
- userid=amqp_config['userid'],
- password=amqp_config['password'],
- virtual_host=amqp_config['virtual_host'],
- insist=False)
- chan = conn.channel()
- chan.queue_declare(queue=amqp_config['queue'], durable=True, exclusive=True, auto_delete=False)
- chan.exchange_declare(exchange=amqp_config['exchange'], type="direct", durable=True, auto_delete=False,)
- chan.queue_bind(queue=amqp_config['queue'],
- exchange=amqp_config['exchange'],
- routing_key=amqp_config['routing_key'])
-
- chan.basic_consume(queue=amqp_config['queue'],
- no_ack=True,
- callback=recv_callback,
- consumer_tag="testtag")
- while True:
- chan.wait()
- chan.basic_cancel("testtag")
- chan.close()
- conn.close()
-
-if __name__ == '__main__':
- main()
\ No newline at end of file
diff -r 984b1eb6c428 -r 5b2d593d9aed scripts/galaxy_messaging/client/amqp_publisher.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/scripts/galaxy_messaging/client/amqp_publisher.py Wed Nov 04 10:04:51 2009 -0500
@@ -0,0 +1,87 @@
+'''
+This script gets barcode data from a barcode scanner using serial communication
+and sends the state representated by the barcode scanner & the barcode string
+to the Galaxy LIMS RabbitMQ server. The message is sent in XML which has 2 tags,
+barcode & state. The state of the scanner should be set in the galaxy_amq.ini
+file as a configuration variable.
+'''
+
+from amqplib import client_0_8 as amqp
+import ConfigParser
+import sys, os
+import serial
+import array
+import time
+import optparse
+
+
+xml = \
+''' <sample>
+ <barcode>%(BARCODE)s</barcode>
+ <state>%(STATE)s</state>
+ </sample>'''
+
+
+def handle_scan(states, amqp_config, barcode):
+ if states.get(barcode[:2], None):
+ values = dict( BARCODE=barcode[2:],
+ STATE=states.get(barcode[:2]) )
+ print values
+ data = xml % values
+ print data
+ conn = amqp.Connection(host=amqp_config['host']+":"+amqp_config['port'],
+ userid=amqp_config['userid'],
+ password=amqp_config['password'],
+ virtual_host=amqp_config['virtual_host'],
+ insist=False)
+ chan = conn.channel()
+ msg = amqp.Message(data)
+ msg.properties["delivery_mode"] = 2
+ chan.basic_publish(msg,
+ exchange=amqp_config['exchange'],
+ routing_key=amqp_config['routing_key'])
+ chan.close()
+ conn.close()
+
+def recv_data(states, amqp_config, s):
+ while True:
+ bytes = s.inWaiting()
+ if bytes:
+ print '%i bytes recvd' % bytes
+ msg = s.read(bytes)
+ print msg
+ handle_scan(states, amqp_config, msg.strip())
+
+
+def main():
+ parser = optparse.OptionParser()
+ parser.add_option('-c', '--config-file', help='config file with all the AMQP config parameters',
+ dest='config_file', action='store')
+ parser.add_option('-p', '--port', help='Name of the port where the scanner is connected',
+ dest='port', action='store')
+ (opts, args) = parser.parse_args()
+ config = ConfigParser.ConfigParser()
+ config.read(opts.config_file)
+ amqp_config = {}
+ states = {}
+ for option in config.options("galaxy:amqp"):
+ amqp_config[option] = config.get("galaxy:amqp", option)
+ count = 1
+ while True:
+ section = 'scanner%i' % count
+ if config.has_section(section):
+ states[config.get(section, 'prefix')] = config.get(section, 'state')
+ count = count + 1
+ else:
+ break
+ print amqp_config
+ print states
+ s = serial.Serial(int(opts.port))
+ print 'Port %s is open: %s' %( opts.port, s.isOpen())
+ recv_data(states, amqp_config, s)
+ s.close()
+ print 'Port %s is open: %s' %( opts.port, s.isOpen())
+
+
+if __name__ == '__main__':
+ main()
diff -r 984b1eb6c428 -r 5b2d593d9aed scripts/galaxy_messaging/client/galaxy_amq.ini.sample
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/scripts/galaxy_messaging/client/galaxy_amq.ini.sample Wed Nov 04 10:04:51 2009 -0500
@@ -0,0 +1,32 @@
+# Galaxy Message Queue
+# Galaxy uses AMQ protocol to receive messages from external sources like
+# bar code scanners. Galaxy has been tested against RabbitMQ AMQP implementation.
+# For Galaxy to receive messages from a message queue the RabbitMQ server has
+# to be set up with a user account and other parameters listed below. The 'host'
+# and 'port' fields should point to where the RabbitMQ server is running.
+
+#[galaxy:amqp]
+#host = 127.0.0.1
+#port = 5672
+#userid = galaxy
+#password = galaxy
+#virtual_host = galaxy_messaging_engine
+#queue = galaxy_queue
+#exchange = galaxy_exchange
+#routing_key = bar_code_scanner
+
+# The following section(s) 'scanner#' is for specifying the state of the
+# sample this scanner represents. This state name should be one of the
+# possible states created for this request type in Galaxy
+# If there multiple scanners attached to this host the add as many "scanner#"
+# sections below each with the name & prefix of the bar code scanner and
+# the state it represents
+#[scanner1]
+#name =
+#state =
+#prefix =
+
+#[scanner2]
+#name =
+#state =
+#prefix =
\ No newline at end of file
diff -r 984b1eb6c428 -r 5b2d593d9aed scripts/galaxy_messaging/client/report.bat.sample
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/scripts/galaxy_messaging/client/report.bat.sample Wed Nov 04 10:04:51 2009 -0500
@@ -0,0 +1,1 @@
+python scanner.py -p 2 -c galaxy_amq.ini -r
\ No newline at end of file
diff -r 984b1eb6c428 -r 5b2d593d9aed scripts/galaxy_messaging/client/scan.bat.sample
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/scripts/galaxy_messaging/client/scan.bat.sample Wed Nov 04 10:04:51 2009 -0500
@@ -0,0 +1,1 @@
+python amqp_publisher.py -p 2 -c galaxy_amq.ini
\ No newline at end of file
diff -r 984b1eb6c428 -r 5b2d593d9aed scripts/galaxy_messaging/client/scan.sh.sample
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/scripts/galaxy_messaging/client/scan.sh.sample Wed Nov 04 10:04:51 2009 -0500
@@ -0,0 +1,1 @@
+python amqp_publisher.py -p 3 -c galaxy_amq.ini
\ No newline at end of file
diff -r 984b1eb6c428 -r 5b2d593d9aed scripts/galaxy_messaging/client/scanner.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/scripts/galaxy_messaging/client/scanner.py Wed Nov 04 10:04:51 2009 -0500
@@ -0,0 +1,92 @@
+import sys, os
+import serial
+import array
+import time
+import optparse
+import ConfigParser, logging
+from scanner_interface import ScannerInterface
+
+logging.basicConfig(level=logging.DEBUG)
+log = logging.getLogger( 'Scanner' )
+
+# command prefix: SYN M CR
+cmd = [22, 77, 13]
+response = { 6: 'ACK', 5: 'ENQ', 21: 'NAK' }
+image_scanner_report = 'RPTSCN.'
+get_prefix1 = 'PREBK2?.'
+get_prefix2 = ':4820:PREBK2?.'
+set_prefix = 'PREBK2995859.'
+clear_prefix = 'PRECA2.'
+
+def get_prefix_cmd(name):
+ return ':' + name + ':' + 'PREBK2?.'
+
+def set_prefix_cmd(name, prefix):
+ prefix_str = ''
+ for c in prefix:
+ prefix_str = prefix_str + hex(ord(c))[2:]
+ return ':' + name + ':' + 'PREBK299' + prefix_str + '!'
+
+def read_config_file(config_file):
+ config = ConfigParser.ConfigParser()
+ config.read(config_file)
+ count = 1
+ scanners_list = []
+ while True:
+ section = 'scanner%i' % count
+ if config.has_section(section):
+ scanner = dict(name=config.get(section, 'name'),
+ prefix=config.get(section, 'prefix'),
+ state=config.get(section, 'state'))
+ scanners_list.append(scanner)
+ count = count + 1
+ else:
+ return scanners_list
+
+def main():
+ usage = "python %s -p PORT -c CONFIG_FILE [ OPTION ]" % sys.argv[0]
+ parser = optparse.OptionParser(usage=usage)
+ parser.add_option('-p', '--port', help='Name of the port where the scanner is connected',
+ dest='port', action='store')
+ parser.add_option('-c', '--config-file', help='config file with all the AMQP config parameters',
+ dest='config_file', action='store')
+ parser.add_option('-r', '--report', help='scanner report',
+ dest='report', action='store_true', default=False)
+ parser.add_option('-i', '--install', help='install the scanners',
+ dest='install', action='store_true', default=False)
+ (opts, args) = parser.parse_args()
+ # validate
+ if not opts.port:
+ parser.print_help()
+ sys.exit(0)
+ if ( opts.report or opts.install ) and not opts.config_file:
+ parser.print_help()
+ sys.exit(0)
+
+ # create the scanner interface
+ si = ScannerInterface(opts.port)
+ if opts.install:
+ scanners_list = read_config_file(opts.config_file)
+ for scanner in scanners_list:
+ msg = set_prefix_cmd(scanner['name'], scanner['prefix'])
+ si.send(msg)
+ response = si.recv()
+ if not response:
+ log.error("Scanner %s could not be installed." % scanner['name'])
+ elif opts.report:
+ si.send(image_scanner_report)
+ rep = si.recv()
+ log.info(rep)
+ scanners_list = read_config_file(opts.config_file)
+ for scanner in scanners_list:
+ msg = get_prefix_cmd(scanner['name'])
+ si.send(msg)
+ response = si.recv()
+ if response:
+ log.info('PREFIX for scanner %s: %s' % (scanner['name'], chr(int(response[8:12][:2], 16))+chr(int(response[8:12][2:], 16)) ))
+ si.close()
+
+
+
+if __name__ == "__main__":
+ main()
diff -r 984b1eb6c428 -r 5b2d593d9aed scripts/galaxy_messaging/client/scanner_interface.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/scripts/galaxy_messaging/client/scanner_interface.py Wed Nov 04 10:04:51 2009 -0500
@@ -0,0 +1,76 @@
+import sys, os
+import serial
+import array
+import time
+import optparse
+import ConfigParser
+import logging
+
+logging.basicConfig(level=logging.INFO)
+log = logging.getLogger( 'ScannerInterface' )
+
+class ScannerInterface( object ):
+ cmdprefix = [22, 77, 13]
+ response = { 6: 'ACK', 5: 'ENQ', 21: 'NAK' }
+
+ def __init__( self, port ):
+ if os.name in ['posix', 'mac']:
+ self.port = port
+ elif os.name == 'nt':
+ self.port = int(port)
+ if self.port:
+ self.open()
+
+ def open(self):
+ try:
+ self.serial_conn = serial.Serial(self.port)
+ except serial.SerialException:
+ log.exception('Unable to open port: %s' % str(self.port))
+ sys.exit(1)
+ log.debug('Port %s is open: %s' %( str(self.port), self.serial_conn.isOpen() ) )
+
+ def is_open(self):
+ return self.serial_conn.isOpen()
+
+ def close(self):
+ self.serial_conn.close()
+ log.debug('Port %s is open: %s' %( str(self.port), self.serial_conn.isOpen() ) )
+
+ def send(self, msg):
+ message = self.cmdprefix + map(ord, msg)
+ byte_array = array.array('B', message)
+ log.debug('Sending message to %s: %s' % ( str(self.port), message) )
+ bytes = self.serial_conn.write( byte_array.tostring() )
+ log.debug('%i bytes out of %i bytes sent to the scanner' % ( bytes, len(message) ) )
+
+ def recv(self):
+ time.sleep(1)
+ self.serial_conn.flush()
+ nbytes = self.serial_conn.inWaiting()
+ log.debug('%i bytes received' % nbytes)
+ if nbytes:
+ msg = self.serial_conn.read(nbytes)
+ byte_array = map(ord, msg)
+ log.debug('Message received [%s]: %s' % (self.response.get(byte_array[len(byte_array)-2], byte_array[len(byte_array)-2]),
+ msg))
+ return msg
+ else:
+ log.error('Error!')
+ return None
+
+ def setup_recv(self, callback):
+ self.recv_callback = callback
+
+ def wait(self):
+ nbytes = self.serial_conn.inWaiting()
+ if nbytes:
+ msg = self.serial_conn.read(nbytes)
+ byte_array = map(ord, msg)
+ log.debug('Message received [%s]: %s' % (self.response.get(byte_array[len(byte_array)-2], byte_array[len(byte_array)-2],
+ msg)))
+ if self.recv_callback:
+ self.recv_callback(msg)
+ return
+
+
+
\ No newline at end of file
diff -r 984b1eb6c428 -r 5b2d593d9aed scripts/galaxy_messaging/galaxydb_interface.py
--- a/scripts/galaxy_messaging/galaxydb_interface.py Wed Nov 04 09:32:09 2009 -0500
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,151 +0,0 @@
-#/usr/bin/python
-
-from datetime import datetime, timedelta
-import sys
-import optparse
-import os
-import time
-import logging
-
-assert sys.version_info[:2] >= ( 2, 4 )
-new_path = [ os.path.join( os.getcwd(), "lib" ) ]
-new_path.extend( sys.path[1:] ) # remove scripts/ from the path
-sys.path = new_path
-from galaxy import eggs
-import pkg_resources
-pkg_resources.require( "psycopg2" )
-import psycopg2
-pkg_resources.require( "SQLAlchemy >= 0.4" )
-from sqlalchemy import *
-from sqlalchemy.orm import sessionmaker
-
-logging.basicConfig(level=logging.DEBUG)
-log = logging.getLogger( 'GalaxyDbInterface' )
-
-class GalaxyDbInterface(object):
-
- def __init__(self, dbstr):
- self.dbstr = dbstr
- self.db_engine = create_engine(self.dbstr)
-# self.db_engine.echo = True
- self.metadata = MetaData(self.db_engine)
- self.session = sessionmaker(bind=self.db_engine)
- self.event_table = Table('sample_event', self.metadata, autoload=True )
- self.sample_table = Table('sample', self.metadata, autoload=True )
- self.request_table = Table('request', self.metadata, autoload=True )
- self.state_table = Table('sample_state', self.metadata, autoload=True )
-
- def get_sample_id(self, field_name='bar_code', value=None):
- if not value:
- return -1
- sample_id = -1
- if field_name =='name':
- stmt = select(columns=[self.sample_table.c.id],
- whereclause=self.sample_table.c.name==value)
- result = stmt.execute()
- sample_id = result.fetchone()[0]
- elif field_name == 'bar_code':
- stmt = select(columns=[self.sample_table.c.id],
- whereclause=self.sample_table.c.bar_code==value)
- result = stmt.execute()
- x = result.fetchone()
- if x:
- sample_id = x[0]
- log.debug('Sample ID: %i' % sample_id)
- return sample_id
- log.warning('This sample %s %s does not belong to any sample in the database.' % (field_name, value))
- return -1
-
- def current_state(self, sample_id):
- '''
- This method returns the current state of the sample for the given sample_id
- '''
- stmt = select(columns=[self.event_table.c.sample_state_id],
- whereclause=self.event_table.c.sample_id==sample_id,
- order_by=self.event_table.c.update_time.desc())
- result = stmt.execute()
- all_states = result.fetchall()
- current_state_id = all_states[0][0]
- return current_state_id
-
- def all_possible_states(self, sample_id):
- subsubquery = select(columns=[self.sample_table.c.request_id],
- whereclause=self.sample_table.c.id==sample_id)
- self.request_id = subsubquery.execute().fetchall()[0][0]
- log.debug('REQUESTID: %i' % self.request_id)
- subquery = select(columns=[self.request_table.c.request_type_id],
- whereclause=self.request_table.c.id==self.request_id)
- request_type_id = subquery.execute().fetchall()[0][0]
- log.debug('REQUESTTYPEID: %i' % request_type_id)
- query = select(columns=[self.state_table.c.id, self.state_table.c.name],
- whereclause=self.state_table.c.request_type_id==request_type_id,
- order_by=self.state_table.c.id.asc())
- states = query.execute().fetchall()
- log.debug('POSSIBLESTATES: '+ str(states))
- return states
-
- def change_state(self, sample_id, new_state=None):
- '''
- This method changes the state of the sample to the the 'new_state'
- '''
- if not new_state:
- return
- new_state_id = -1
- # find the state_id for this new state in the list of possible states
- possible_states = self.all_possible_states(sample_id)
- for state_id, state_name in possible_states:
- if new_state == state_name:
- new_state_id = state_id
- if new_state_id == -1:
- return
- log.debug('Updating sample_id %i state to %s' % (sample_id, new_state))
- d = timedelta(hours=4)
- i = self.event_table.insert()
- i.execute(update_time=datetime.now()+d,
- create_time=datetime.now()+d,
- sample_id=sample_id,
- sample_state_id=int(new_state_id),
- comment='bar code scanner')
- # if all the samples for this request are in the final state
- # then change the request state to 'Complete'
- result = select(columns=[self.sample_table.c.id],
- whereclause=self.sample_table.c.request_id==self.request_id).execute()
- sample_id_list = result.fetchall()
- request_complete = True
- for sid in sample_id_list:
- current_state_id = self.current_state(sid[0])
- if current_state_id != possible_states[-1][0]:
- request_complete = False
- break
- if request_complete:
- request_state = 'Complete'
- else:
- request_state = 'Submitted'
- log.debug('Updating request_id %i state to "%s"' % (self.request_id, request_state))
- d = timedelta(hours=4)
- i = self.request_table.update(whereclause=self.request_table.c.id==self.request_id,
- values={self.request_table.c.state: request_state})
- i.execute()
-
-
-
-if __name__ == '__main__':
- print '''This file should not be run directly. To start the Galaxy AMQP Listener:
- %sh run_galaxy_listener.sh'''
-# dbstr = 'postgres://postgres:postgres@localhost/galaxy_ft'
-#
-# parser = optparse.OptionParser()
-# parser.add_option('-n', '--name', help='name of the sample field', dest='name', \
-# action='store', default='bar_code')
-# parser.add_option('-v', '--value', help='value of the sample field', dest='value', \
-# action='store')
-# parser.add_option('-s', '--state', help='new state of the sample', dest='state', \
-# action='store')
-# (opts, args) = parser.parse_args()
-#
-# gs = GalaxyDbInterface(dbstr)
-# sample_id = gs.get_sample_id(field_name=opts.name, value=opts.value)
-# gs.change_state(sample_id, opts.state)
-
-
-
diff -r 984b1eb6c428 -r 5b2d593d9aed scripts/galaxy_messaging/server/amqp_consumer.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/scripts/galaxy_messaging/server/amqp_consumer.py Wed Nov 04 10:04:51 2009 -0500
@@ -0,0 +1,94 @@
+'''
+Galaxy Messaging with AMQP (RabbitMQ)
+Galaxy uses AMQ protocol to receive messages from external sources like
+bar code scanners. Galaxy has been tested against RabbitMQ AMQP implementation.
+For Galaxy to receive messages from a message queue the RabbitMQ server has
+to be set up with a user account and other parameters listed in the [galaxy:amq]
+section in the universe_wsgi.ini config file
+Once the RabbitMQ server has been setup and started with the given parameters,
+this script can be run to receive messages and update the Galaxy database accordingly
+'''
+
+import ConfigParser
+import sys, os
+import optparse
+import xml.dom.minidom
+from galaxydb_interface import GalaxyDbInterface
+
+assert sys.version_info[:2] >= ( 2, 4 )
+new_path = [ os.path.join( os.getcwd(), "lib" ) ]
+new_path.extend( sys.path[1:] ) # remove scripts/ from the path
+sys.path = new_path
+
+from galaxy import eggs
+import pkg_resources
+pkg_resources.require( "amqplib" )
+
+from amqplib import client_0_8 as amqp
+
+import logging
+logging.basicConfig(level=logging.DEBUG)
+log = logging.getLogger( 'GalaxyAMQP' )
+
+
+galaxy_config_file = 'universe_wsgi.ini'
+global dbconnstr
+
+def get_value(dom, tag_name):
+ '''
+ This method extracts the tag value from the xml message
+ '''
+ nodelist = dom.getElementsByTagName(tag_name)[0].childNodes
+ rc = ""
+ for node in nodelist:
+ if node.nodeType == node.TEXT_NODE:
+ rc = rc + node.data
+ return rc
+
+def recv_callback(msg):
+ dom = xml.dom.minidom.parseString(msg.body)
+ barcode = get_value(dom, 'barcode')
+ state = get_value(dom, 'state')
+ log.debug('Barcode: '+barcode)
+ log.debug('State: '+state)
+ # update the galaxy db
+ galaxy = GalaxyDbInterface(dbconnstr)
+ sample_id = galaxy.get_sample_id(field_name='bar_code', value=barcode)
+ if sample_id == -1:
+ log.debug('Invalid barcode.')
+ return
+ galaxy.change_state(sample_id, state)
+
+def main():
+ config = ConfigParser.ConfigParser()
+ config.read(galaxy_config_file)
+ global dbconnstr
+ dbconnstr = config.get("app:main", "database_connection")
+ amqp_config = {}
+ for option in config.options("galaxy:amqp"):
+ amqp_config[option] = config.get("galaxy:amqp", option)
+ log.debug(str(amqp_config))
+ conn = amqp.Connection(host=amqp_config['host']+":"+amqp_config['port'],
+ userid=amqp_config['userid'],
+ password=amqp_config['password'],
+ virtual_host=amqp_config['virtual_host'],
+ insist=False)
+ chan = conn.channel()
+ chan.queue_declare(queue=amqp_config['queue'], durable=True, exclusive=True, auto_delete=False)
+ chan.exchange_declare(exchange=amqp_config['exchange'], type="direct", durable=True, auto_delete=False,)
+ chan.queue_bind(queue=amqp_config['queue'],
+ exchange=amqp_config['exchange'],
+ routing_key=amqp_config['routing_key'])
+
+ chan.basic_consume(queue=amqp_config['queue'],
+ no_ack=True,
+ callback=recv_callback,
+ consumer_tag="testtag")
+ while True:
+ chan.wait()
+ chan.basic_cancel("testtag")
+ chan.close()
+ conn.close()
+
+if __name__ == '__main__':
+ main()
\ No newline at end of file
diff -r 984b1eb6c428 -r 5b2d593d9aed scripts/galaxy_messaging/server/galaxydb_interface.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/scripts/galaxy_messaging/server/galaxydb_interface.py Wed Nov 04 10:04:51 2009 -0500
@@ -0,0 +1,149 @@
+#/usr/bin/python
+
+from datetime import datetime
+import sys
+import optparse
+import os
+import time
+import logging
+
+assert sys.version_info[:2] >= ( 2, 4 )
+new_path = [ os.path.join( os.getcwd(), "lib" ) ]
+new_path.extend( sys.path[1:] ) # remove scripts/ from the path
+sys.path = new_path
+from galaxy import eggs
+import pkg_resources
+pkg_resources.require( "psycopg2" )
+import psycopg2
+pkg_resources.require( "SQLAlchemy >= 0.4" )
+from sqlalchemy import *
+from sqlalchemy.orm import sessionmaker
+
+logging.basicConfig(level=logging.DEBUG)
+log = logging.getLogger( 'GalaxyDbInterface' )
+
+class GalaxyDbInterface(object):
+
+ def __init__(self, dbstr):
+ self.dbstr = dbstr
+ self.db_engine = create_engine(self.dbstr)
+# self.db_engine.echo = True
+ self.metadata = MetaData(self.db_engine)
+ self.session = sessionmaker(bind=self.db_engine)
+ self.event_table = Table('sample_event', self.metadata, autoload=True )
+ self.sample_table = Table('sample', self.metadata, autoload=True )
+ self.request_table = Table('request', self.metadata, autoload=True )
+ self.state_table = Table('sample_state', self.metadata, autoload=True )
+
+ def get_sample_id(self, field_name='bar_code', value=None):
+ if not value:
+ return -1
+ sample_id = -1
+ if field_name =='name':
+ stmt = select(columns=[self.sample_table.c.id],
+ whereclause=self.sample_table.c.name==value)
+ result = stmt.execute()
+ sample_id = result.fetchone()[0]
+ elif field_name == 'bar_code':
+ stmt = select(columns=[self.sample_table.c.id],
+ whereclause=self.sample_table.c.bar_code==value)
+ result = stmt.execute()
+ x = result.fetchone()
+ if x:
+ sample_id = x[0]
+ log.debug('Sample ID: %i' % sample_id)
+ return sample_id
+ log.warning('This sample %s %s does not belong to any sample in the database.' % (field_name, value))
+ return -1
+
+ def current_state(self, sample_id):
+ '''
+ This method returns the current state of the sample for the given sample_id
+ '''
+ stmt = select(columns=[self.event_table.c.sample_state_id],
+ whereclause=self.event_table.c.sample_id==sample_id,
+ order_by=self.event_table.c.update_time.desc())
+ result = stmt.execute()
+ all_states = result.fetchall()
+ current_state_id = all_states[0][0]
+ return current_state_id
+
+ def all_possible_states(self, sample_id):
+ subsubquery = select(columns=[self.sample_table.c.request_id],
+ whereclause=self.sample_table.c.id==sample_id)
+ self.request_id = subsubquery.execute().fetchall()[0][0]
+ log.debug('REQUESTID: %i' % self.request_id)
+ subquery = select(columns=[self.request_table.c.request_type_id],
+ whereclause=self.request_table.c.id==self.request_id)
+ request_type_id = subquery.execute().fetchall()[0][0]
+ log.debug('REQUESTTYPEID: %i' % request_type_id)
+ query = select(columns=[self.state_table.c.id, self.state_table.c.name],
+ whereclause=self.state_table.c.request_type_id==request_type_id,
+ order_by=self.state_table.c.id.asc())
+ states = query.execute().fetchall()
+ log.debug('POSSIBLESTATES: '+ str(states))
+ return states
+
+ def change_state(self, sample_id, new_state=None):
+ '''
+ This method changes the state of the sample to the the 'new_state'
+ '''
+ if not new_state:
+ return
+ new_state_id = -1
+ # find the state_id for this new state in the list of possible states
+ possible_states = self.all_possible_states(sample_id)
+ for state_id, state_name in possible_states:
+ if new_state == state_name:
+ new_state_id = state_id
+ if new_state_id == -1:
+ return
+ log.debug('Updating sample_id %i state to %s' % (sample_id, new_state))
+ i = self.event_table.insert()
+ i.execute(update_time=datetime.utcnow(),
+ create_time=datetime.utcnow(),
+ sample_id=sample_id,
+ sample_state_id=int(new_state_id),
+ comment='bar code scanner')
+ # if all the samples for this request are in the final state
+ # then change the request state to 'Complete'
+ result = select(columns=[self.sample_table.c.id],
+ whereclause=self.sample_table.c.request_id==self.request_id).execute()
+ sample_id_list = result.fetchall()
+ request_complete = True
+ for sid in sample_id_list:
+ current_state_id = self.current_state(sid[0])
+ if current_state_id != possible_states[-1][0]:
+ request_complete = False
+ break
+ if request_complete:
+ request_state = 'Complete'
+ else:
+ request_state = 'Submitted'
+ log.debug('Updating request_id %i state to "%s"' % (self.request_id, request_state))
+ i = self.request_table.update(whereclause=self.request_table.c.id==self.request_id,
+ values={self.request_table.c.state: request_state})
+ i.execute()
+
+
+
+if __name__ == '__main__':
+ print '''This file should not be run directly. To start the Galaxy AMQP Listener:
+ %sh run_galaxy_listener.sh'''
+ dbstr = 'postgres://postgres:postgres@localhost/galaxy_uft'
+
+ parser = optparse.OptionParser()
+ parser.add_option('-n', '--name', help='name of the sample field', dest='name', \
+ action='store', default='bar_code')
+ parser.add_option('-v', '--value', help='value of the sample field', dest='value', \
+ action='store')
+ parser.add_option('-s', '--state', help='new state of the sample', dest='state', \
+ action='store')
+ (opts, args) = parser.parse_args()
+
+ gs = GalaxyDbInterface(dbstr)
+ sample_id = gs.get_sample_id(field_name=opts.name, value=opts.value)
+ gs.change_state(sample_id, opts.state)
+
+
+
1
0
details: http://www.bx.psu.edu/hg/galaxy/rev/2300a80d80e5
changeset: 2951:2300a80d80e5
user: Nate Coraor <nate(a)bx.psu.edu>
date: Tue Nov 03 13:04:35 2009 -0500
description:
merge heads
diffstat:
lib/galaxy/tags/tag_handler.py | 8 +-
lib/galaxy/web/controllers/history.py | 145 +++++++++++++++---
lib/galaxy/web/framework/helpers/grids.py | 1 +
static/scripts/autocomplete_tagging.js | 2 +-
static/scripts/packed/autocomplete_tagging.js | 2 +-
static/scripts/packed/trackster.js | 2 +-
templates/history/grid.mako | 250 +++++++++++++++++-------------
test/base/twilltestcase.py | 18 +-
test/functional/test_history_functions.py | 2 +-
9 files changed, 280 insertions(+), 150 deletions(-)
diffs (665 lines):
diff -r d872c1e16afb -r 2300a80d80e5 lib/galaxy/tags/tag_handler.py
--- a/lib/galaxy/tags/tag_handler.py Tue Nov 03 12:52:01 2009 -0500
+++ b/lib/galaxy/tags/tag_handler.py Tue Nov 03 13:04:35 2009 -0500
@@ -3,6 +3,12 @@
class TagHandler( object ):
+ # Minimum tag length.
+ min_tag_len = 2
+
+ # Maximum tag length.
+ max_tag_len = 255
+
# Tag separator.
tag_separators = ',;'
@@ -215,7 +221,7 @@
scrubbed_name = scrubbed_name[1:]
# If name is too short or too long, return None.
- if len(scrubbed_name) < 3 or len(scrubbed_name) > 255:
+ if len(scrubbed_name) < self.min_tag_len or len(scrubbed_name) > self.max_tag_len:
return None
return scrubbed_name
diff -r d872c1e16afb -r 2300a80d80e5 lib/galaxy/web/controllers/history.py
--- a/lib/galaxy/web/controllers/history.py Tue Nov 03 12:52:01 2009 -0500
+++ b/lib/galaxy/web/controllers/history.py Tue Nov 03 13:04:35 2009 -0500
@@ -5,6 +5,7 @@
from galaxy.model import History
from galaxy.model.orm import *
from galaxy.util.json import *
+from galaxy.util.odict import odict
from galaxy.tags.tag_handler import TagHandler
from sqlalchemy.sql.expression import ClauseElement
import webhelpers, logging, operator
@@ -19,12 +20,29 @@
class HistoryListGrid( grids.Grid ):
# Custom column types
class NameColumn( grids.GridColumn ):
- def __init( self, key, link, attach_popup ):
+ def __init( self, key, link, attach_popup, filterable ):
grids.GridColumn.__init__(self, key, link, attach_popup)
def get_value( self, trans, grid, history ):
return history.get_display_name()
+ def filter( self, db_session, query, column_filter ):
+ """ Modify query to filter histories by name. """
+ if column_filter == "All":
+ pass
+ elif column_filter:
+ query = query.filter( func.lower( History.name ).like( "%" + column_filter.lower() + "%" ) )
+ return query
+ def get_accepted_filters( self ):
+ """ Returns a list of accepted filters for this column. """
+ accepted_filter_labels_and_vals = odict()
+ accepted_filter_labels_and_vals["FREETEXT"] = "FREETEXT"
+ accepted_filters = []
+ for label, val in accepted_filter_labels_and_vals.iteritems():
+ args = { self.key: val }
+ accepted_filters.append( grids.GridColumnFilter( label, args) )
+ return accepted_filters
+
class DatasetsByStateColumn( grids.GridColumn ):
def get_value( self, trans, grid, history ):
rval = []
@@ -48,6 +66,7 @@
if item.users_shared_with or item.importable:
return dict( operation="sharing" )
return None
+
class TagsColumn( grids.GridColumn ):
def __init__( self, col_name, key, filterable ):
grids.GridColumn.__init__(self, col_name, key=key, filterable=filterable)
@@ -61,7 +80,7 @@
return div_elt + trans.fill_template( "/tagging_common.mako", trans=trans, tagged_item=history,
elt_id = elt_id, in_form="true", input_size="20", tag_click_fn="add_tag_to_grid_filter" )
def filter( self, db_session, query, column_filter ):
- """ Modify query to include only histories with tags in column_filter. """
+ """ Modify query to filter histories by tag. """
if column_filter == "All":
pass
elif column_filter:
@@ -69,52 +88,115 @@
tag_handler = TagHandler()
raw_tags = tag_handler.parse_tags( column_filter.encode("utf-8") )
for name, value in raw_tags.items():
- tag = tag_handler.get_tag_by_name( db_session, name )
- if tag:
- query = query.filter( History.tags.any( tag_id=tag.id ) )
+ if name:
+ # Search for tag names.
+ query = query.filter( History.tags.any( func.lower( model.HistoryTagAssociation.user_tname ).like( "%" + name.lower() + "%" ) ) )
if value:
- query = query.filter( History.tags.any( value=value.lower() ) )
- else:
- # Tag doesn't exist; unclear what to do here, but the literal thing to do is add the criterion, which
- # will then yield a query that returns no results.
- query = query.filter( History.tags.any( user_tname=name ) )
+ # Search for tag values.
+ query = query.filter( History.tags.any( func.lower( model.HistoryTagAssociation.user_value ).like( "%" + value.lower() + "%" ) ) )
return query
def get_accepted_filters( self ):
- """ Returns a list of accepted filters for this column. """
- accepted_filter_labels_and_vals = { "All": "All" }
- accepted_filters = []
- for label, val in accepted_filter_labels_and_vals.items():
- args = { self.key: val }
- accepted_filters.append( grids.GridColumnFilter( label, args) )
- return accepted_filters
-
-
+ """ Returns a list of accepted filters for this column. """
+ accepted_filter_labels_and_vals = odict()
+ accepted_filter_labels_and_vals["FREETEXT"] = "FREETEXT"
+ accepted_filters = []
+ for label, val in accepted_filter_labels_and_vals.iteritems():
+ args = { self.key: val }
+ accepted_filters.append( grids.GridColumnFilter( label, args) )
+ return accepted_filters
+
class DeletedColumn( grids.GridColumn ):
def get_accepted_filters( self ):
""" Returns a list of accepted filters for this column. """
- accepted_filter_labels_and_vals = { "Active" : "False", "Deleted" : "True", "All": "All" }
+ accepted_filter_labels_and_vals = { "active" : "False", "deleted" : "True", "all": "All" }
accepted_filters = []
for label, val in accepted_filter_labels_and_vals.items():
args = { self.key: val }
accepted_filters.append( grids.GridColumnFilter( label, args) )
return accepted_filters
+
+ class SharingColumn( grids.GridColumn ):
+ def filter( self, db_session, query, column_filter ):
+ """ Modify query to filter histories by sharing status. """
+ if column_filter == "All":
+ pass
+ elif column_filter:
+ if column_filter == "private":
+ query = query.filter( History.users_shared_with == None )
+ query = query.filter( History.importable == False )
+ elif column_filter == "shared":
+ query = query.filter( History.users_shared_with != None )
+ elif column_filter == "importable":
+ query = query.filter( History.importable == True )
+ return query
+ def get_accepted_filters( self ):
+ """ Returns a list of accepted filters for this column. """
+ accepted_filter_labels_and_vals = odict()
+ accepted_filter_labels_and_vals["private"] = "private"
+ accepted_filter_labels_and_vals["shared"] = "shared"
+ accepted_filter_labels_and_vals["importable"] = "importable"
+ accepted_filter_labels_and_vals["all"] = "All"
+ accepted_filters = []
+ for label, val in accepted_filter_labels_and_vals.items():
+ args = { self.key: val }
+ accepted_filters.append( grids.GridColumnFilter( label, args) )
+ return accepted_filters
+
+ class FreeTextSearchColumn( grids.GridColumn ):
+ def filter( self, db_session, query, column_filter ):
+ """ Modify query to search tags and history names. """
+ if column_filter == "All":
+ pass
+ elif column_filter:
+ # Build tags filter.
+ tag_handler = TagHandler()
+ raw_tags = tag_handler.parse_tags( column_filter.encode("utf-8") )
+ tags_filter = None
+ for name, value in raw_tags.items():
+ if name:
+ # Search for tag names.
+ tags_filter = History.tags.any( func.lower( model.HistoryTagAssociation.user_tname ).like( "%" + name.lower() + "%" ) )
+ if value:
+ # Search for tag values.
+ tags_filter = and_( tags_filter, func.lower( History.tags.any( model.HistoryTagAssociation.user_value ).like( "%" + value.lower() + "%" ) ) )
+
+ # Build history name filter.
+ history_name_filter = func.lower( History.name ).like( "%" + column_filter.lower() + "%" )
+
+ # Apply filters to query.
+ if tags_filter:
+ query = query.filter( or_( tags_filter, history_name_filter ) )
+ else:
+ query = query.filter( history_name_filter )
+ return query
+ def get_accepted_filters( self ):
+ """ Returns a list of accepted filters for this column. """
+ accepted_filter_labels_and_vals = odict()
+ accepted_filter_labels_and_vals["FREETEXT"] = "FREETEXT"
+ accepted_filters = []
+ for label, val in accepted_filter_labels_and_vals.iteritems():
+ args = { self.key: val }
+ accepted_filters.append( grids.GridColumnFilter( label, args) )
+ return accepted_filters
# Grid definition
- title = "Stored histories"
+ title = "Saved Histories"
model_class = model.History
template='/history/grid.mako'
default_sort_key = "-create_time"
columns = [
NameColumn( "Name", key="name",
link=( lambda history: iff( history.deleted, None, dict( operation="switch", id=history.id ) ) ),
- attach_popup=True ),
+ attach_popup=True, filterable=True ),
DatasetsByStateColumn( "Datasets (by state)", ncells=4 ),
TagsColumn( "Tags", key="tags", filterable=True),
StatusColumn( "Status", attach_popup=False ),
grids.GridColumn( "Created", key="create_time", format=time_ago ),
grids.GridColumn( "Last Updated", key="update_time", format=time_ago ),
- # Valid for filtering but invisible
- DeletedColumn( "Status", key="deleted", visible=False, filterable=True )
+ # Columns that are valid for filtering but are not visible.
+ DeletedColumn( "Deleted", key="deleted", visible=False, filterable=True ),
+ SharingColumn( "Shared", key="shared", visible=False, filterable=True ),
+ FreeTextSearchColumn( "Search", key="free-text-search", visible=False ) # Not filterable because it's the default search.
]
operations = [
grids.GridOperation( "Switch", allow_multiple=False, condition=( lambda item: not item.deleted ) ),
@@ -131,7 +213,7 @@
grids.GridColumnFilter( "Deleted", args=dict( deleted=True ) ),
grids.GridColumnFilter( "All", args=dict( deleted='All' ) ),
]
- default_filter = dict( deleted="False", tags="All" )
+ default_filter = dict( name="All", deleted="False", tags="All", shared="All" )
num_rows_per_page = 50
preserve_state = False
use_paging = True
@@ -160,6 +242,7 @@
template='/history/grid.mako'
model_class = model.History
default_sort_key = "-update_time"
+ default_filter = {}
columns = [
grids.GridColumn( "Name", key="name" ),
DatasetsByStateColumn( "Datasets (by state)", ncells=4 ),
@@ -374,6 +457,18 @@
trans.sa_session.flush()
@web.expose
+ def name_autocomplete_data( self, trans, q=None, limit=None, timestamp=None ):
+ """Return autocomplete data for history names"""
+ user = trans.get_user()
+ if not user:
+ return
+
+ ac_data = ""
+ for history in trans.sa_session.query( History ).filter_by( user=user ).filter( func.lower( History.name ) .like(q.lower() + "%") ):
+ ac_data = ac_data + history.name + "\n"
+ return ac_data
+
+ @web.expose
def imp( self, trans, id=None, confirm=False, **kwd ):
"""Import another user's history via a shared URL"""
msg = ""
diff -r d872c1e16afb -r 2300a80d80e5 lib/galaxy/web/framework/helpers/grids.py
--- a/lib/galaxy/web/framework/helpers/grids.py Tue Nov 03 12:52:01 2009 -0500
+++ b/lib/galaxy/web/framework/helpers/grids.py Tue Nov 03 13:04:35 2009 -0500
@@ -183,6 +183,7 @@
query=query,
cur_page_num = page_num,
num_pages = num_pages,
+ default_filter_dict=self.default_filter,
cur_filter_dict=cur_filter_dict,
sort_key=sort_key,
encoded_sort_key=encoded_sort_key,
diff -r d872c1e16afb -r 2300a80d80e5 static/scripts/autocomplete_tagging.js
--- a/static/scripts/autocomplete_tagging.js Tue Nov 03 12:52:01 2009 -0500
+++ b/static/scripts/autocomplete_tagging.js Tue Nov 03 13:04:35 2009 -0500
@@ -309,7 +309,7 @@
new_value = new_value.replace(/^\s+|\s+$/g,"");
// Too short?
- if (new_value.length < 3)
+ if (new_value.length < 2)
return false;
//
diff -r d872c1e16afb -r 2300a80d80e5 static/scripts/packed/autocomplete_tagging.js
--- a/static/scripts/packed/autocomplete_tagging.js Tue Nov 03 12:52:01 2009 -0500
+++ b/static/scripts/packed/autocomplete_tagging.js Tue Nov 03 13:04:35 2009 -0500
@@ -1,1 +1,1 @@
-var ac_tag_area_id_gen=1;jQuery.fn.autocomplete_tagging=function(c){var e={get_toggle_link_text_fn:function(u){var w="";var v=o(u);if(v!=0){w=v+(v!=0?" Tags":" Tag")}else{w="Add tags"}return w},tag_click_fn:function(u,v){},editable:true,input_size:20,in_form:false,tags:{},use_toggle_link:true,item_id:"",add_tag_img:"",add_tag_img_rollover:"",delete_tag_img:"",ajax_autocomplete_tag_url:"",ajax_retag_url:"",ajax_delete_tag_url:"",ajax_add_tag_url:""};var p=jQuery.extend(e,c);var k="tag-area-"+(ac_tag_area_id_gen)++;var m=$("<div>").attr("id",k).addClass("tag-area");this.append(m);var o=function(u){if(u.length){return u.length}var v=0;for(element in u){v++}return v};var b=function(){var u=p.get_toggle_link_text_fn(p.tags);var v=$("<a href='/history/tags'>").text(u).addClass("toggle-link");v.click(function(){var w=(m.css("display")=="none");var x;if(w){x=function(){var y=o(p.tags);if(y==0){m.click()}}}else{x=function(){m.blur()}}m.slideToggle("fast",x);return false});return v};v
ar s=b();if(p.use_toggle_link){this.prepend(s)}var t=function(u){var v=new Array();for(key in u){v[v.length]=key+"-->"+u[key]}return"{"+v.join(",")+"}"};var a=function(v,u){return v+((u!=""&&u)?":"+u:"")};var h=function(u){return u.split(":")};var i=function(u){var v=$("<img src='"+p.add_tag_img+"' rollover='"+p.add_tag_img_rollover+"'/>").addClass("add-tag-button");v.click(function(){$(this).hide();m.click();return false});return v};var j=function(u){var v=$("<img src='"+p.delete_tag_img+"'/>").addClass("delete-tag-img");v.mouseenter(function(){$(this).attr("src",p.delete_tag_img_rollover)});v.mouseleave(function(){$(this).attr("src",p.delete_tag_img)});v.click(function(){var D=$(this).parent();var C=D.find(".tag-name").eq(0);var B=C.text();var z=h(B);var F=z[0];var y=z[1];var E=D.prev();D.remove();delete p.tags[F];var A=p.get_toggle_link_text_fn(p.tags);s.text(A);$.ajax({url:p.ajax_delete_tag_url,data:{tag_name:F},error:function(){p.tags[F]=y;if(E.hasClass("tag-button")){E
.after(D)}else{m.prepend(D)}var G=p.get_toggle_link_text_fn(p.tags);alert("Remove tag failed");s.text(G);v.mouseenter(function(){$(this).attr("src",p.delete_tag_img_rollover)});v.mouseleave(function(){$(this).attr("src",p.delete_tag_img)})},success:function(){}});return true});var w=$("<span>").text(u).addClass("tag-name");w.click(function(){tag_name_and_value=u.split(":");p.tag_click_fn(tag_name_and_value[0],tag_name_and_value[1]);return true});var x=$("<span></span>").addClass("tag-button");x.append(w);if(p.editable){x.append(v)}return x};var d=function(v){var u;if(p.in_form){u=$("<textarea id='history-tag-input' rows='1' cols='"+p.input_size+"' value='"+escape(v)+"'></textarea>")}else{u=$("<input id='history-tag-input' type='text' size='"+p.input_size+"' value='"+escape(v)+"'></input>")}u.keyup(function(D){if(D.keyCode==27){$(this).trigger("blur")}else{if((D.keyCode==13)||(D.keyCode==188)||(D.keyCode==32)){new_value=this.value;if(return_key_pressed_for_autocomplete==true)
{return_key_pressed_for_autocomplete=false;return false}if(new_value.indexOf(": ",new_value.length-2)!=-1){this.value=new_value.substring(0,new_value.length-1);return false}if((D.keyCode==188)||(D.keyCode==32)){new_value=new_value.substring(0,new_value.length-1)}new_value=new_value.replace(/^\s+|\s+$/g,"");if(new_value.length<3){return false}this.value="";var A=j(new_value);var z=m.children(".tag-button");if(z.length!=0){var E=z.slice(z.length-1);E.after(A)}else{m.prepend(A)}var y=new_value.split(":");p.tags[y[0]]=y[1];var B=p.get_toggle_link_text_fn(p.tags);s.text(B);var C=$(this);$.ajax({url:p.ajax_add_tag_url,data:{new_tag:new_value},error:function(){A.remove();delete p.tags[y[0]];var F=p.get_toggle_link_text_fn(p.tags);s.text(F);alert("Add tag failed")},success:function(){C.flushCache()}});return false}}});var w=function(A,z,y,C,B){tag_name_and_value=C.split(":");return(tag_name_and_value.length==1?tag_name_and_value[0]:tag_name_and_value[1])};var x={selectFirst:false,fo
rmatItem:w,autoFill:false,highlight:false};u.autocomplete(p.ajax_autocomplete_tag_url,x);u.addClass("tag-input");return u};for(tag_name in p.tags){var q=p.tags[tag_name];var l=a(tag_name,q);var g=j(l,s,p.tags);m.append(g)}var n=d("");var f=i(n);m.blur(function(u){r=o(p.tags);if(r!=0){f.show();n.hide();m.removeClass("active-tag-area")}else{}});if(p.editable){m.append(f);m.append(n);n.hide();m.click(function(w){var v=$(this).hasClass("active-tag-area");if($(w.target).hasClass("delete-tag-img")&&!v){return false}if($(w.target).hasClass("tag-name")&&!v){return false}$(this).addClass("active-tag-area");f.hide();n.show();n.focus();var u=function(y){var x=m.attr("id");if(($(y.target).attr("id")!=x)&&($(y.target).parents().filter(x).length==0)){m.blur();$(document).unbind("click",u)}};$(window).click(u);return false})}if(p.use_toggle_link){m.hide()}else{var r=o(p.tags);if(r==0){f.hide();n.show()}}return this.addClass("tag-element")};
\ No newline at end of file
+var ac_tag_area_id_gen=1;jQuery.fn.autocomplete_tagging=function(c){var e={get_toggle_link_text_fn:function(u){var w="";var v=o(u);if(v!=0){w=v+(v!=0?" Tags":" Tag")}else{w="Add tags"}return w},tag_click_fn:function(u,v){},editable:true,input_size:20,in_form:false,tags:{},use_toggle_link:true,item_id:"",add_tag_img:"",add_tag_img_rollover:"",delete_tag_img:"",ajax_autocomplete_tag_url:"",ajax_retag_url:"",ajax_delete_tag_url:"",ajax_add_tag_url:""};var p=jQuery.extend(e,c);var k="tag-area-"+(ac_tag_area_id_gen)++;var m=$("<div>").attr("id",k).addClass("tag-area");this.append(m);var o=function(u){if(u.length){return u.length}var v=0;for(element in u){v++}return v};var b=function(){var u=p.get_toggle_link_text_fn(p.tags);var v=$("<a href='/history/tags'>").text(u).addClass("toggle-link");v.click(function(){var w=(m.css("display")=="none");var x;if(w){x=function(){var y=o(p.tags);if(y==0){m.click()}}}else{x=function(){m.blur()}}m.slideToggle("fast",x);return false});return v};v
ar s=b();if(p.use_toggle_link){this.prepend(s)}var t=function(u){var v=new Array();for(key in u){v[v.length]=key+"-->"+u[key]}return"{"+v.join(",")+"}"};var a=function(v,u){return v+((u!=""&&u)?":"+u:"")};var h=function(u){return u.split(":")};var i=function(u){var v=$("<img src='"+p.add_tag_img+"' rollover='"+p.add_tag_img_rollover+"'/>").addClass("add-tag-button");v.click(function(){$(this).hide();m.click();return false});return v};var j=function(u){var v=$("<img src='"+p.delete_tag_img+"'/>").addClass("delete-tag-img");v.mouseenter(function(){$(this).attr("src",p.delete_tag_img_rollover)});v.mouseleave(function(){$(this).attr("src",p.delete_tag_img)});v.click(function(){var D=$(this).parent();var C=D.find(".tag-name").eq(0);var B=C.text();var z=h(B);var F=z[0];var y=z[1];var E=D.prev();D.remove();delete p.tags[F];var A=p.get_toggle_link_text_fn(p.tags);s.text(A);$.ajax({url:p.ajax_delete_tag_url,data:{tag_name:F},error:function(){p.tags[F]=y;if(E.hasClass("tag-button")){E
.after(D)}else{m.prepend(D)}var G=p.get_toggle_link_text_fn(p.tags);alert("Remove tag failed");s.text(G);v.mouseenter(function(){$(this).attr("src",p.delete_tag_img_rollover)});v.mouseleave(function(){$(this).attr("src",p.delete_tag_img)})},success:function(){}});return true});var w=$("<span>").text(u).addClass("tag-name");w.click(function(){tag_name_and_value=u.split(":");p.tag_click_fn(tag_name_and_value[0],tag_name_and_value[1]);return true});var x=$("<span></span>").addClass("tag-button");x.append(w);if(p.editable){x.append(v)}return x};var d=function(v){var u;if(p.in_form){u=$("<textarea id='history-tag-input' rows='1' cols='"+p.input_size+"' value='"+escape(v)+"'></textarea>")}else{u=$("<input id='history-tag-input' type='text' size='"+p.input_size+"' value='"+escape(v)+"'></input>")}u.keyup(function(D){if(D.keyCode==27){$(this).trigger("blur")}else{if((D.keyCode==13)||(D.keyCode==188)||(D.keyCode==32)){new_value=this.value;if(return_key_pressed_for_autocomplete==true)
{return_key_pressed_for_autocomplete=false;return false}if(new_value.indexOf(": ",new_value.length-2)!=-1){this.value=new_value.substring(0,new_value.length-1);return false}if((D.keyCode==188)||(D.keyCode==32)){new_value=new_value.substring(0,new_value.length-1)}new_value=new_value.replace(/^\s+|\s+$/g,"");if(new_value.length<2){return false}this.value="";var A=j(new_value);var z=m.children(".tag-button");if(z.length!=0){var E=z.slice(z.length-1);E.after(A)}else{m.prepend(A)}var y=new_value.split(":");p.tags[y[0]]=y[1];var B=p.get_toggle_link_text_fn(p.tags);s.text(B);var C=$(this);$.ajax({url:p.ajax_add_tag_url,data:{new_tag:new_value},error:function(){A.remove();delete p.tags[y[0]];var F=p.get_toggle_link_text_fn(p.tags);s.text(F);alert("Add tag failed")},success:function(){C.flushCache()}});return false}}});var w=function(A,z,y,C,B){tag_name_and_value=C.split(":");return(tag_name_and_value.length==1?tag_name_and_value[0]:tag_name_and_value[1])};var x={selectFirst:false,fo
rmatItem:w,autoFill:false,highlight:false};u.autocomplete(p.ajax_autocomplete_tag_url,x);u.addClass("tag-input");return u};for(tag_name in p.tags){var q=p.tags[tag_name];var l=a(tag_name,q);var g=j(l,s,p.tags);m.append(g)}var n=d("");var f=i(n);m.blur(function(u){r=o(p.tags);if(r!=0){f.show();n.hide();m.removeClass("active-tag-area")}else{}});if(p.editable){m.append(f);m.append(n);n.hide();m.click(function(w){var v=$(this).hasClass("active-tag-area");if($(w.target).hasClass("delete-tag-img")&&!v){return false}if($(w.target).hasClass("tag-name")&&!v){return false}$(this).addClass("active-tag-area");f.hide();n.show();n.focus();var u=function(y){var x=m.attr("id");if(($(y.target).attr("id")!=x)&&($(y.target).parents().filter(x).length==0)){m.blur();$(document).unbind("click",u)}};$(window).click(u);return false})}if(p.use_toggle_link){m.hide()}else{var r=o(p.tags);if(r==0){f.hide();n.show()}}return this.addClass("tag-element")};
\ No newline at end of file
diff -r d872c1e16afb -r 2300a80d80e5 static/scripts/packed/trackster.js
--- a/static/scripts/packed/trackster.js Tue Nov 03 12:52:01 2009 -0500
+++ b/static/scripts/packed/trackster.js Tue Nov 03 13:04:35 2009 -0500
@@ -1,1 +1,1 @@
-var DENSITY=1000,DATA_ERROR="There was an error in indexing this dataset.",DATA_NONE="No data for this chrom/contig.",DATA_PENDING="Currently indexing... please wait",DATA_LOADING="Loading data...",CACHED_TILES=5,CACHED_DATA=20,CONTEXT=$("<canvas></canvas>").get(0).getContext("2d"),RIGHT_STRAND,LEFT_STRAND;var right_img=new Image();right_img.src="../images/visualization/strand_right.png";right_img.onload=function(){RIGHT_STRAND=CONTEXT.createPattern(right_img,"repeat")};var left_img=new Image();left_img.src="../images/visualization/strand_left.png";left_img.onload=function(){LEFT_STRAND=CONTEXT.createPattern(left_img,"repeat")};var right_img_inv=new Image();right_img_inv.src="../images/visualization/strand_right_inv.png";right_img_inv.onload=function(){RIGHT_STRAND_INV=CONTEXT.createPattern(right_img_inv,"repeat")};var left_img_inv=new Image();left_img_inv.src="../images/visualization/strand_left_inv.png";left_img_inv.onload=function(){LEFT_STRAND_INV=CONTEXT.createPattern(l
eft_img_inv,"repeat")};function commatize(b){b+="";var a=/(\d+)(\d{3})/;while(a.test(b)){b=b.replace(a,"$1,$2")}return b}var Cache=function(a){this.num_elements=a;this.obj_cache={};this.key_ary=[]};$.extend(Cache.prototype,{get:function(b){var a=this.key_ary.indexOf(b);if(a!=-1){this.key_ary.splice(a,1);this.key_ary.push(b)}return this.obj_cache[b]},set:function(b,c){if(!this.obj_cache[b]){if(this.key_ary.length>=this.num_elements){var a=this.key_ary.shift();delete this.obj_cache[a]}this.key_ary.push(b)}this.obj_cache[b]=c;return c}});var View=function(b,a){this.chrom=b;this.tracks=[];this.max_low=0;this.max_high=a;this.center=(this.max_high-this.max_low)/2;this.span=this.max_high-this.max_low;this.zoom_factor=2;this.zoom_level=0};$.extend(View.prototype,{add_track:function(a){a.view=this;this.tracks.push(a);if(a.init){a.init()}},redraw:function(){var d=this.span/Math.pow(this.zoom_factor,this.zoom_level),b=this.center-(d/2),e=b+d;if(b<0){b=0;e=b+d}else{if(e>this.max_high){e
=this.max_high;b=e-d}}this.low=Math.floor(b);this.high=Math.ceil(e);this.center=Math.round(this.low+(this.high-this.low)/2);$("#overview-box").css({left:(this.low/this.span)*$("#overview-viewport").width(),width:Math.max(12,((this.high-this.low)/this.span)*$("#overview-viewport").width())}).show();$("#low").val(commatize(this.low));$("#high").val(commatize(this.high));for(var c=0,a=this.tracks.length;c<a;c++){this.tracks[c].draw()}$("#bottom-spacer").remove();$("#viewport").append('<div id="bottom-spacer" style="height: 200px;"></div>')},zoom_in:function(a){if(this.max_high===0||this.high-this.low<30){return}if(a){this.center=a/$(document).width()*(this.high-this.low)+this.low}this.zoom_level+=1;this.redraw()},zoom_out:function(){if(this.max_high===0){return}if(this.zoom_level<=0){this.zoom_level=0;return}this.zoom_level-=1;this.redraw()}});var Track=function(a,b){this.name=a;this.parent_element=b;this.make_container()};$.extend(Track.prototype,{make_container:function(){thi
s.header_div=$("<div class='track-header'>").text(this.name);this.content_div=$("<div class='track-content'>");this.container_div=$("<div class='track'></div>").append(this.header_div).append(this.content_div);this.parent_element.append(this.container_div)}});var TiledTrack=function(){this.tile_cache=new Cache(CACHED_TILES)};$.extend(TiledTrack.prototype,Track.prototype,{draw:function(){var h=this.view.low,d=this.view.high,e=d-h;var c=Math.pow(10,Math.ceil(Math.log(e/DENSITY)/Math.log(10)));c=Math.max(c,0.1);c=Math.min(c,1000000);var j=$("<div style='position: relative;'></div>");this.content_div.children(":first").remove();this.content_div.append(j);var k=this.content_div.width()/e;var g;var a=Math.floor(h/c/DENSITY);while((a*DENSITY*c)<d){var i=this.view.zoom_level+"_"+a;var b=this.tile_cache.get(i);if(b){var f=a*DENSITY*c;b.css({left:(f-this.view.low)*k});j.append(b)}else{g=this.draw_tile(c,a,j,k)}if(g){this.tile_cache.set(i,g)}a+=1}}});var LabelTrack=function(a){Track.ca
ll(this,null,a);this.container_div.addClass("label-track")};$.extend(LabelTrack.prototype,Track.prototype,{draw:function(){var c=this.view,d=c.high-c.low,g=Math.floor(Math.pow(10,Math.floor(Math.log(d)/Math.log(10)))),a=Math.floor(c.low/g)*g,e=this.content_div.width(),b=$("<div style='position: relative; height: 1.3em;'></div>");while(a<c.high){var f=(a-c.low)/d*e;b.append($("<div class='label'>"+commatize(a)+"</div>").css({position:"absolute",left:f-1}));a+=g}this.content_div.children(":first").remove();this.content_div.append(b)}});var LineTrack=function(c,b,a){Track.call(this,c,$("#viewport"));TiledTrack.call(this);this.track_type="line";this.height_px=(a?a:100);this.container_div.addClass("line-track");this.dataset_id=b;this.cache=new Cache(CACHED_DATA)};$.extend(LineTrack.prototype,TiledTrack.prototype,{init:function(){var a=this;a.content_div.text(DATA_LOADING);$.getJSON(data_url,{stats:true,track_type:a.track_type,chrom:a.view.chrom,low:null,high:null,dataset_id:a.dat
aset_id},function(c){if(!c||c=="error"){a.container_div.addClass("error");a.content_div.text(DATA_ERROR)}else{if(c=="no data"){a.container_div.addClass("nodata");a.content_div.text(DATA_NONE)}else{if(c=="pending"){a.container_div.addClass("pending");a.content_div.text(DATA_PENDING);setTimeout(function(){a.init()},5000)}else{a.content_div.text("");a.content_div.css("height",a.height_px+"px");a.min_value=c.min;a.max_value=c.max;a.vertical_range=a.max_value-a.min_value;var d=$("<div class='yaxislabel'>"+a.min_value+"</div>");var b=$("<div class='yaxislabel'>"+a.max_value+"</div>");b.css({position:"relative",top:"35px"});b.prependTo(a.container_div);d.css({position:"relative",top:a.height_px+32+"px",});d.prependTo(a.container_div);a.draw()}}}})},get_data:function(d,b){var c=this,a=b*DENSITY*d,f=(b+1)*DENSITY*d,e=d+"_"+b;$.getJSON(data_url,{track_type:this.track_type,chrom:this.view.chrom,low:a,high:f,dataset_id:this.dataset_id},function(g){c.cache[e]=g;$(document).trigger("redra
w")})},draw_tile:function(d,a,m,o){if(!this.vertical_range){return}var h=a*DENSITY*d,b=DENSITY*d,c=$("<canvas class='tile'></canvas>"),l=d+"_"+a;if(!this.cache[l]){this.get_data(d,a);return}var g=this.cache[l];c.css({position:"absolute",top:0,left:(h-this.view.low)*o});c.get(0).width=Math.ceil(b*o);c.get(0).height=this.height_px;var n=c.get(0).getContext("2d");var e=false;n.beginPath();for(var f=0;f<g.length-1;f++){var k=g[f][0]-h;var j=g[f][1];if(isNaN(j)){e=false}else{k=k*o;j=(j-this.min_value)/this.vertical_range*this.height_px;if(e){n.lineTo(k,j)}else{n.moveTo(k,j);e=true}}}n.stroke();m.append(c);return c}});var FeatureTrack=function(c,b,a){Track.call(this,c,$("#viewport"));TiledTrack.call(this);this.track_type="feature";this.height_px=(a?a:100);this.container_div.addClass("feature-track");this.dataset_id=b;this.zo_slots={};this.show_labels_scale=0.001;this.showing_labels=false;this.vertical_gap=10;this.base_color="#2C3143"};$.extend(FeatureTrack.prototype,TiledTrack.pro
totype,{init:function(){var a=this;a.content_div.text(DATA_LOADING);$.getJSON(data_url,{track_type:a.track_type,low:a.view.max_low,high:a.view.max_high,dataset_id:a.dataset_id,chrom:a.view.chrom},function(b){if(b=="error"){a.container_div.addClass("error");a.content_div.text(DATA_ERROR)}else{if(b.length===0||b=="no data"){a.container_div.addClass("nodata");a.content_div.text(DATA_NONE)}else{if(b=="pending"){a.container_div.addClass("pending");a.content_div.text(DATA_PENDING);setTimeout(function(){a.init()},5000)}else{a.content_div.text("");a.content_div.css("height",a.height_px+"px");a.values=b;a.calc_slots();a.slots=a.zo_slots;a.draw()}}}})},calc_slots:function(o){var c=[],b=this.container_div.width()/(this.view.high-this.view.low),g=this.show_labels_scale,a=this.view.max_high,e=this.view.max_low;if(o){this.zi_slots={}}var m=$("<canvas></canvas>").get(0).getContext("2d");for(var f=0,h=this.values.length;f<h;f++){var k,l,n=this.values[f];if(o){k=Math.floor(Math.max(e,(n.star
t-e)*g));k-=m.measureText(n.name).width;l=Math.ceil(Math.min(a,(n.end-e)*g))}else{k=Math.floor(Math.max(e,(n.start-e)*b));l=Math.ceil(Math.min(a,(n.end-e)*b))}var d=0;while(true){if(c[d]===undefined||c[d]<k){c[d]=l;if(o){this.zi_slots[n.name]=d}else{this.zo_slots[n.name]=d}break}d++}}this.height_px=c.length*this.vertical_gap+15;this.content_div.css("height",this.height_px+"px")},draw_tile:function(w,B,g,n){if(!this.values){return null}if(n>this.show_labels_scale&&!this.showing_labels){this.showing_labels=true;if(!this.zi_slots){this.calc_slots(true)}this.slots=this.zi_slots}else{if(n<=this.show_labels_scale&&this.showing_labels){this.showing_labels=false;this.slots=this.zo_slots}}var C=B*DENSITY*w,c=(B+1)*DENSITY*w,q=DENSITY*w;var u=Math.ceil(q*n),t=this.height_px,s=$("<canvas class='tile'></canvas>");s.css({position:"absolute",top:0,left:(C-this.view.low)*n});s.get(0).width=u;s.get(0).height=t;var v=s.get(0).getContext("2d");v.fillStyle=this.base_color;v.font="10px monospac
e";v.textAlign="right";var y=0;for(var z=0,A=this.values.length;z<A;z++){var f=this.values[z];if(f.start<=c&&f.end>=C){var e=Math.floor(Math.max(0,(f.start-C)*n)),h=Math.ceil(Math.min(u,(f.end-C)*n)),d=this.slots[f.name]*this.vertical_gap;var a,G,b=null,o=null;if(f.thick_start&&f.thick_end){b=Math.floor(Math.max(0,(f.thick_start-C)*n));o=Math.ceil(Math.min(u,(f.thick_end-C)*n))}if(!this.showing_labels){v.fillRect(e,d+5,h-e,1)}else{if(v.fillText){v.fillText(f.name,e-1,d+8)}var E=f.blocks;if(E){if(f.strand){if(f.strand=="+"){v.fillStyle=RIGHT_STRAND}else{if(f.strand=="-"){v.fillStyle=LEFT_STRAND}}v.fillRect(e,d,h-e,10);v.fillStyle=this.base_color}for(var x=0,F=E.length;x<F;x++){var m=E[x],l=Math.floor(Math.max(0,(m[0]-C)*n)),D=Math.ceil(Math.min(u,(m[1]-C)*n));a=5;G=3;v.fillRect(l,d+G,D-l,a);if(b&&(l<o||D>b)){a=9;G=1;var r=Math.max(l,b),p=Math.min(D,o);v.fillRect(r,d+G,p-r,a)}}}else{a=9;G=1;v.fillRect(e,d+G,h-e,a);if(f.strand){if(f.strand=="+"){v.fillStyle=RIGHT_STRAND_INV}els
e{if(f.strand=="-"){v.fillStyle=LEFT_STRAND_INV}}v.fillRect(e,d,h-e,10);v.fillStyle=this.base_color}}}y++}}g.append(s);return s}});
\ No newline at end of file
+var DENSITY=1000,DATA_ERROR="There was an error in indexing this dataset.",DATA_NONE="No data for this chrom/contig.",DATA_PENDING="Currently indexing... please wait",DATA_LOADING="Loading data...",CACHED_TILES=10,CACHED_DATA=20,CONTEXT=$("<canvas></canvas>").get(0).getContext("2d"),RIGHT_STRAND,LEFT_STRAND;var right_img=new Image();right_img.src="../images/visualization/strand_right.png";right_img.onload=function(){RIGHT_STRAND=CONTEXT.createPattern(right_img,"repeat")};var left_img=new Image();left_img.src="../images/visualization/strand_left.png";left_img.onload=function(){LEFT_STRAND=CONTEXT.createPattern(left_img,"repeat")};var right_img_inv=new Image();right_img_inv.src="../images/visualization/strand_right_inv.png";right_img_inv.onload=function(){RIGHT_STRAND_INV=CONTEXT.createPattern(right_img_inv,"repeat")};var left_img_inv=new Image();left_img_inv.src="../images/visualization/strand_left_inv.png";left_img_inv.onload=function(){LEFT_STRAND_INV=CONTEXT.createPattern(
left_img_inv,"repeat")};function commatize(b){b+="";var a=/(\d+)(\d{3})/;while(a.test(b)){b=b.replace(a,"$1,$2")}return b}var Cache=function(a){this.num_elements=a;this.obj_cache={};this.key_ary=[]};$.extend(Cache.prototype,{get:function(b){var a=this.key_ary.indexOf(b);if(a!=-1){this.key_ary.splice(a,1);this.key_ary.push(b)}return this.obj_cache[b]},set:function(b,c){if(!this.obj_cache[b]){if(this.key_ary.length>=this.num_elements){var a=this.key_ary.shift();delete this.obj_cache[a]}this.key_ary.push(b)}this.obj_cache[b]=c;return c}});var View=function(b,a){this.chrom=b;this.tracks=[];this.max_low=0;this.max_high=a;this.center=(this.max_high-this.max_low)/2;this.span=this.max_high-this.max_low;this.zoom_factor=2;this.zoom_level=0};$.extend(View.prototype,{add_track:function(a){a.view=this;this.tracks.push(a);if(a.init){a.init()}},redraw:function(){var d=this.span/Math.pow(this.zoom_factor,this.zoom_level),b=this.center-(d/2),e=b+d;if(b<0){b=0;e=b+d}else{if(e>this.max_high){
e=this.max_high;b=e-d}}this.low=Math.floor(b);this.high=Math.ceil(e);this.center=Math.round(this.low+(this.high-this.low)/2);$("#overview-box").css({left:(this.low/this.span)*$("#overview-viewport").width(),width:Math.max(12,((this.high-this.low)/this.span)*$("#overview-viewport").width())}).show();$("#low").val(commatize(this.low));$("#high").val(commatize(this.high));for(var c=0,a=this.tracks.length;c<a;c++){this.tracks[c].draw()}$("#bottom-spacer").remove();$("#viewport").append('<div id="bottom-spacer" style="height: 200px;"></div>')},zoom_in:function(a){if(this.max_high===0||this.high-this.low<30){return}if(a){this.center=a/$(document).width()*(this.high-this.low)+this.low}this.zoom_level+=1;this.redraw()},zoom_out:function(){if(this.max_high===0){return}if(this.zoom_level<=0){this.zoom_level=0;return}this.zoom_level-=1;this.redraw()}});var Track=function(a,b){this.name=a;this.parent_element=b;this.make_container()};$.extend(Track.prototype,{make_container:function(){th
is.header_div=$("<div class='track-header'>").text(this.name);this.content_div=$("<div class='track-content'>");this.container_div=$("<div class='track'></div>").append(this.header_div).append(this.content_div);this.parent_element.append(this.container_div)}});var TiledTrack=function(){this.tile_cache=new Cache(CACHED_TILES)};$.extend(TiledTrack.prototype,Track.prototype,{draw:function(){var h=this.view.low,d=this.view.high,e=d-h;var c=Math.pow(10,Math.ceil(Math.log(e/DENSITY)/Math.log(10)));c=Math.max(c,0.1);c=Math.min(c,1000000);var j=$("<div style='position: relative;'></div>");this.content_div.children(":first").remove();this.content_div.append(j);var k=this.content_div.width()/e;var g;var a=Math.floor(h/c/DENSITY);while((a*DENSITY*c)<d){var i=this.content_div.width()+"_"+this.view.zoom_level+"_"+a;var b=this.tile_cache.get(i);if(b){var f=a*DENSITY*c;b.css({left:(f-this.view.low)*k});j.append(b)}else{g=this.draw_tile(c,a,j,k);if(g){this.tile_cache.set(i,g)}}a+=1}}});var
LabelTrack=function(a){Track.call(this,null,a);this.container_div.addClass("label-track")};$.extend(LabelTrack.prototype,Track.prototype,{draw:function(){var c=this.view,d=c.high-c.low,g=Math.floor(Math.pow(10,Math.floor(Math.log(d)/Math.log(10)))),a=Math.floor(c.low/g)*g,e=this.content_div.width(),b=$("<div style='position: relative; height: 1.3em;'></div>");while(a<c.high){var f=(a-c.low)/d*e;b.append($("<div class='label'>"+commatize(a)+"</div>").css({position:"absolute",left:f-1}));a+=g}this.content_div.children(":first").remove();this.content_div.append(b)}});var LineTrack=function(c,b,a){Track.call(this,c,$("#viewport"));TiledTrack.call(this);this.track_type="line";this.height_px=(a?a:100);this.container_div.addClass("line-track");this.dataset_id=b;this.cache=new Cache(CACHED_DATA)};$.extend(LineTrack.prototype,TiledTrack.prototype,{init:function(){var a=this;a.content_div.text(DATA_LOADING);$.getJSON(data_url,{stats:true,track_type:a.track_type,chrom:a.view.chrom,low:
null,high:null,dataset_id:a.dataset_id},function(c){if(!c||c=="error"){a.container_div.addClass("error");a.content_div.text(DATA_ERROR)}else{if(c=="no data"){a.container_div.addClass("nodata");a.content_div.text(DATA_NONE)}else{if(c=="pending"){a.container_div.addClass("pending");a.content_div.text(DATA_PENDING);setTimeout(function(){a.init()},5000)}else{a.content_div.text("");a.content_div.css("height",a.height_px+"px");a.min_value=c.min;a.max_value=c.max;a.vertical_range=a.max_value-a.min_value;var d=$("<div class='yaxislabel'>"+a.min_value+"</div>");var b=$("<div class='yaxislabel'>"+a.max_value+"</div>");b.css({position:"relative",top:"35px"});b.prependTo(a.container_div);d.css({position:"relative",top:a.height_px+32+"px",});d.prependTo(a.container_div);a.draw()}}}})},get_data:function(d,b){var c=this,a=b*DENSITY*d,f=(b+1)*DENSITY*d,e=d+"_"+b;$.getJSON(data_url,{track_type:this.track_type,chrom:this.view.chrom,low:a,high:f,dataset_id:this.dataset_id},function(g){c.cache[
e]=g;$(document).trigger("redraw")})},draw_tile:function(d,a,m,o){if(!this.vertical_range){return}var h=a*DENSITY*d,b=DENSITY*d,c=$("<canvas class='tile'></canvas>"),l=d+"_"+a;if(!this.cache[l]){this.get_data(d,a);return}var g=this.cache[l];c.css({position:"absolute",top:0,left:(h-this.view.low)*o});c.get(0).width=Math.ceil(b*o);c.get(0).height=this.height_px;var n=c.get(0).getContext("2d");var e=false;n.beginPath();for(var f=0;f<g.length-1;f++){var k=g[f][0]-h;var j=g[f][1];if(isNaN(j)){e=false}else{k=k*o;j=(j-this.min_value)/this.vertical_range*this.height_px;if(e){n.lineTo(k,j)}else{n.moveTo(k,j);e=true}}}n.stroke();m.append(c);return c}});var FeatureTrack=function(c,b,a){Track.call(this,c,$("#viewport"));TiledTrack.call(this);this.track_type="feature";this.height_px=(a?a:100);this.container_div.addClass("feature-track");this.dataset_id=b;this.zo_slots={};this.show_labels_scale=0.001;this.showing_labels=false;this.vertical_gap=10;this.base_color="#2C3143"};$.extend(Featur
eTrack.prototype,TiledTrack.prototype,{init:function(){var a=this;a.content_div.text(DATA_LOADING);$.getJSON(data_url,{track_type:a.track_type,low:a.view.max_low,high:a.view.max_high,dataset_id:a.dataset_id,chrom:a.view.chrom},function(b){if(b=="error"){a.container_div.addClass("error");a.content_div.text(DATA_ERROR)}else{if(b.length===0||b=="no data"){a.container_div.addClass("nodata");a.content_div.text(DATA_NONE)}else{if(b=="pending"){a.container_div.addClass("pending");a.content_div.text(DATA_PENDING);setTimeout(function(){a.init()},5000)}else{a.content_div.text("");a.content_div.css("height",a.height_px+"px");a.values=b;a.calc_slots();a.slots=a.zo_slots;a.draw()}}}})},calc_slots:function(o){var c=[],b=this.content_div.width()/(this.view.high-this.view.low),g=this.show_labels_scale,a=this.view.max_high,e=this.view.max_low;if(o){this.zi_slots={}}var m=$("<canvas></canvas>").get(0).getContext("2d");for(var f=0,h=this.values.length;f<h;f++){var k,l,n=this.values[f];if(o){k=
Math.floor((n.start-e)*g);k-=m.measureText(n.name).width;l=Math.ceil((n.end-e)*g)}else{k=Math.floor((n.start-e)*b);l=Math.ceil((n.end-e)*b)}var d=0;while(true){if(c[d]===undefined||c[d]<k){c[d]=l;if(o){this.zi_slots[n.name]=d}else{this.zo_slots[n.name]=d}break}d++}}this.height_px=c.length*this.vertical_gap+15;this.content_div.css("height",this.height_px+"px")},draw_tile:function(w,B,g,n){if(!this.values){return null}if(n>this.show_labels_scale&&!this.showing_labels){this.showing_labels=true;if(!this.zi_slots){this.calc_slots(true)}this.slots=this.zi_slots}else{if(n<=this.show_labels_scale&&this.showing_labels){this.showing_labels=false;this.slots=this.zo_slots}}var C=B*DENSITY*w,c=(B+1)*DENSITY*w,q=DENSITY*w;var u=Math.ceil(q*n),t=this.height_px,s=$("<canvas class='tile'></canvas>");s.css({position:"absolute",top:0,left:(C-this.view.low)*n});s.get(0).width=u;s.get(0).height=t;var v=s.get(0).getContext("2d");v.fillStyle=this.base_color;v.font="10px monospace";v.textAlign="rig
ht";var y=0;for(var z=0,A=this.values.length;z<A;z++){var f=this.values[z];if(f.start<=c&&f.end>=C){var e=Math.floor(Math.max(0,(f.start-C)*n)),h=Math.ceil(Math.min(u,(f.end-C)*n)),d=this.slots[f.name]*this.vertical_gap;var a,G,b=null,o=null;if(f.thick_start&&f.thick_end){b=Math.floor(Math.max(0,(f.thick_start-C)*n));o=Math.ceil(Math.min(u,(f.thick_end-C)*n))}if(!this.showing_labels){v.fillRect(e,d+5,h-e,1)}else{if(v.fillText){v.fillText(f.name,e-1,d+8)}var E=f.blocks;if(E){if(f.strand){if(f.strand=="+"){v.fillStyle=RIGHT_STRAND}else{if(f.strand=="-"){v.fillStyle=LEFT_STRAND}}v.fillRect(e,d,h-e,10);v.fillStyle=this.base_color}for(var x=0,F=E.length;x<F;x++){var m=E[x],l=Math.floor(Math.max(0,(m[0]-C)*n)),D=Math.ceil(Math.min(u,(m[1]-C)*n));a=5;G=3;v.fillRect(l,d+G,D-l,a);if(b&&(l<o||D>b)){a=9;G=1;var r=Math.max(l,b),p=Math.min(D,o);v.fillRect(r,d+G,p-r,a)}}}else{a=9;G=1;v.fillRect(e,d+G,h-e,a);if(f.strand){if(f.strand=="+"){v.fillStyle=RIGHT_STRAND_INV}else{if(f.strand=="-")
{v.fillStyle=LEFT_STRAND_INV}}v.fillRect(e,d,h-e,10);v.fillStyle=this.base_color}}}y++}}g.append(s);return s}});
\ No newline at end of file
diff -r d872c1e16afb -r 2300a80d80e5 templates/history/grid.mako
--- a/templates/history/grid.mako Tue Nov 03 12:52:01 2009 -0500
+++ b/templates/history/grid.mako Tue Nov 03 13:04:35 2009 -0500
@@ -29,71 +29,20 @@
});
// Set up autocomplete for tag filter input.
- var t = $("#input-tag-filter");
- t.keyup( function( e )
- {
- if ( e.keyCode == 27 )
- {
- // Escape key
- $(this).trigger( "blur" );
- } else if (
- ( e.keyCode == 13 ) || // Return Key
- ( e.keyCode == 188 ) || // Comma
- ( e.keyCode == 32 ) // Space
- )
- {
- //
- // Check input.
- //
-
- new_value = this.value;
-
- // Do nothing if return key was used to autocomplete.
- if (return_key_pressed_for_autocomplete == true)
- {
- return_key_pressed_for_autocomplete = false;
- return false;
- }
-
- // Suppress space after a ":"
- if ( new_value.indexOf(": ", new_value.length - 2) != -1)
- {
- this.value = new_value.substring(0, new_value.length-1);
- return false;
- }
-
- // Remove trigger keys from input.
- if ( (e.keyCode == 188) || (e.keyCode == 32) )
- new_value = new_value.substring( 0 , new_value.length - 1 );
-
- // Trim whitespace.
- new_value = new_value.replace(/^\s+|\s+$/g,"");
-
- // Too short?
- if (new_value.length < 3)
- return false;
-
- //
- // New tag OK.
- //
- }
- });
+ var t = $("#input-tags-filter");
- // Add autocomplete to input.
- var format_item_func = function(key, row_position, num_rows, value, search_term)
- {
- tag_name_and_value = value.split(":");
- return (tag_name_and_value.length == 1 ? tag_name_and_value[0] :tag_name_and_value[1]);
- //var array = new Array(key, value, row_position, num_rows,
- //search_term ); return "\"" + array.join("*") + "\"";
- }
var autocomplete_options =
- { selectFirst: false, formatItem : format_item_func, autoFill: false, highlight: false, mustMatch: true };
+ { selectFirst: false, autoFill: false, highlight: false, mustMatch: false };
t.autocomplete("${h.url_for( controller='tag', action='tag_autocomplete_data', item_class='History' )}", autocomplete_options);
-
- $("#page-select").change(navigate_to_page);
+ // Set up autocomplete for name filter input.
+ var t2 = $("#input-name-filter");
+
+ var autocomplete_options =
+ { selectFirst: false, autoFill: false, highlight: false, mustMatch: false };
+
+ t2.autocomplete("${h.url_for( controller='history', action='name_autocomplete_data' )}", autocomplete_options);
});
## Can this be moved into base.mako?
%if refresh_frames:
@@ -125,21 +74,52 @@
%endif
%endif
+ // Filter and sort args for grid.
+ var filter_args = ${h.to_json_string(cur_filter_dict)};
+ var sort_key = "${sort_key}";
+
//
- // Add a tag to the current grid filter; this adds the tag to the filter and then issues a request to refresh the grid.
+ // Add tag to grid filter.
//
function add_tag_to_grid_filter(tag_name, tag_value)
{
- // Use tag as a filter: replace TAGNAME with tag_name and issue query.
- <%
- url_args = {}
- if "tags" in cur_filter_dict and cur_filter_dict["tags"] != "All":
- url_args["f-tags"] = cur_filter_dict["tags"].encode("utf-8") + ", TAGNAME"
- else:
- url_args["f-tags"] = "TAGNAME"
- %>
- var url_base = "${url( url_args )}";
- var url = url_base.replace("TAGNAME", tag_name);
+ // Put tag name and value together.
+ var tag = tag_name + (tag_value != null && tag_value != "" ? ":" + tag_value : "");
+ add_condition_to_grid_filter("tags", tag, true);
+ }
+
+ //
+ // Add a filter to the current grid filter; this adds the filter and then issues a request to refresh the grid.
+ //
+ function add_condition_to_grid_filter(name, value, append)
+ {
+ // Update filter arg with new condition.
+ if (append)
+ {
+ // Append value.
+ var cur_val = filter_args[name];
+ if (cur_val != "All")
+ cur_val = cur_val + ", " + value;
+ else
+ cur_val = value;
+ filter_args[name] = cur_val;
+ }
+ else
+ {
+ // Replace value.
+ filter_args[name] = value;
+ }
+
+ // Build URL with filter args, sort key.
+ var filter_arg_value_strs = new Array();
+ var i = 0;
+ for (arg in filter_args)
+ {
+ filter_arg_value_strs[i++] = "f-" + arg + "=" + filter_args[arg];
+ }
+ var filter_str = filter_arg_value_strs.join("&");
+ var url_base = "${h.url_for( controller='history', action='list')}";
+ var url = url_base + "?" + filter_str + "&sort=" + sort_key;
self.location = url;
}
@@ -154,7 +134,7 @@
var url = url_base.replace("PAGE", page_num);
self.location = url;
}
-
+
</script>
</%def>
@@ -175,47 +155,95 @@
<div class="grid-header">
<h2>${grid.title}</h2>
-
- ## Print grid filter.
- <form name="history_actions" action="javascript:add_tag_to_grid_filter($('#input-tag-filter').attr('value'))" method="get" >
- <strong>Filter: </strong>
- %for column in grid.columns:
- %if column.filterable:
- <span> by ${column.label.lower()}:</span>
- ## For now, include special case to handle tags.
- %if column.key == "tags":
- %if cur_filter_dict[column.key] != "All":
- <span class="filter" "style='font-style: italic'">
- ${cur_filter_dict[column.key]}
- </span>
- <span>|</span>
+
+ ## Search box and more options filter at top of grid.
+ <div>
+ ## Grid search. TODO: use more elegant way to get free text search column.
+ <% column = grid.columns[-1] %>
+ <% use_form = False %>
+ %for i, filter in enumerate( column.get_accepted_filters() ):
+ %if i > 0:
+ <span>|</span>
+ %endif
+ %if column.key in cur_filter_dict and cur_filter_dict[column.key] == filter.args[column.key]:
+ <span class="filter" "style='font-style: italic'">${filter.label}</span>
+ %elif filter.label == "FREETEXT":
+ <form name="history_actions"
+ action="javascript:add_condition_to_grid_filter($('#input-${column.key}-filter').attr('name'),$('#input-${column.key}-filter').attr('value'),false)"
+ method="get" >
+ ${column.label}:
+ %if column.key in cur_filter_dict and cur_filter_dict[column.key] != "All":
+ <span style="font-style: italic">${cur_filter_dict[column.key]}</span>
+ <% filter_all = GridColumnFilter( "", { column.key : "All" } ) %>
+ <a href="${url( filter_all.get_url_args() )}"><img src="${h.url_for('/static/images/delete_tag_icon_gray.png')}"/></a>
+ |
%endif
- <input id="input-tag-filter" name="f-tags" type="text" value="" size="15"/>
- <span>|</span>
- %endif
-
- ## Handle other columns.
- %for i, filter in enumerate( column.get_accepted_filters() ):
- %if i > 0:
- <span>|</span>
- %endif
- %if cur_filter_dict[column.key] == filter.args[column.key]:
- <span class="filter" "style='font-style: italic'">${filter.label}</span>
- %else:
- <span class="filter"><a href="${url( filter.get_url_args() )}">${filter.label}</a></span>
- %endif
- %endfor
- <span> </span>
+ <span><input id="input-${column.key}-filter" name="${column.key}" type="text" value="" size="15"/></span>
+ <% use_form = True %>
+ %else:
+ <span class="filter"><a href="${url( filter.get_url_args() )}">${filter.label}</a></span>
%endif
%endfor
-
- ## Link to clear all filters. TODO: this should be the default filter or an empty filter.
- <%
- args = { "deleted" : "False", "tags" : "All" }
- no_filter = GridColumnFilter("Clear Filter", args)
- %>
- <span><a href="${url( no_filter.get_url_args() )}">${no_filter.label}</a></span>
- </form>
+ | <a href="" onclick="javascript:$('#more-search-options').slideToggle('fast');return false;">Advanced Search</a>
+ %if use_form:
+ </form>
+ %endif
+ </div>
+
+ ## Advanced Search
+ <div id="more-search-options" style="display: none; padding-top: 5px">
+ <table style="border: 1px solid gray;">
+ <tr><td style="text-align: left" colspan="100">
+ Advanced Search |
+ <a href=""# onclick="javascript:$('#more-search-options').slideToggle('fast');return false;">Close</a> |
+ ## Link to clear all filters.
+ <%
+ no_filter = GridColumnFilter("Clear All", default_filter_dict)
+ %>
+ <a href="${url( no_filter.get_url_args() )}">${no_filter.label}</a>
+ </td></tr>
+ %for column in grid.columns:
+ %if column.filterable:
+ <tr>
+ ## Show div if current filter has value that is different from the default filter.
+ %if cur_filter_dict[column.key] != default_filter_dict[column.key]:
+ <script type="text/javascript">
+ $('#more-search-options').css("display", "block");
+ </script>
+ %endif
+ <td style="padding-left: 10px">${column.label.lower()}:</td>
+ <td>
+ <% use_form = False %>
+ %for i, filter in enumerate( column.get_accepted_filters() ):
+ %if i > 0:
+ <span>|</span>
+ %endif
+ %if cur_filter_dict[column.key] == filter.args[column.key]:
+ <span class="filter" style="font-style: italic">${filter.label}</span>
+ %elif filter.label == "FREETEXT":
+ <form name="history_actions" action="javascript:add_condition_to_grid_filter($('#input-${column.key}-filter').attr('name'),$('#input-${column.key}-filter').attr('value'),true)"
+ method="get" >
+ %if column.key in cur_filter_dict and cur_filter_dict[column.key] != "All":
+ <span style="font-style: italic">${cur_filter_dict[column.key]}</span>
+ <% filter_all = GridColumnFilter( "", { column.key : "All" } ) %>
+ <a href="${url( filter_all.get_url_args() )}"><img src="${h.url_for('/static/images/delete_tag_icon_gray.png')}"/></a>
+ |
+ %endif
+ <span><input id="input-${column.key}-filter" name="${column.key}" type="text" value="" size="15"/></span>
+ <% use_form = True %>
+ %else:
+ <span class="filter"><a href="${url( filter.get_url_args() )}">${filter.label}</a></span>
+ %endif
+ %endfor
+ %if use_form:
+ </form>
+ %endif
+ </td>
+ </tr>
+ %endif
+ %endfor
+ </table>
+ </div>
</div>
<form name="history_actions" action="${url()}" method="post" >
<input type="hidden" name="page" value="${cur_page_num}">
@@ -291,7 +319,7 @@
extra = ""
%>
%if href:
- <td><div class="menubutton split" style="float: left;"><a class="label" href="${href}">${v}${extra}</a> </td>
+ <td><div class="menubutton split" style="float: left;"><a class="label" href="${href}">${v}</a>${extra}</td>
%else:
<td >${v}${extra}</td>
%endif
diff -r d872c1e16afb -r 2300a80d80e5 test/base/twilltestcase.py
--- a/test/base/twilltestcase.py Tue Nov 03 12:52:01 2009 -0500
+++ b/test/base/twilltestcase.py Tue Nov 03 13:04:35 2009 -0500
@@ -1,7 +1,7 @@
import pkg_resources
pkg_resources.require( "twill==0.9" )
-import StringIO, os, sys, random, filecmp, time, unittest, urllib, logging, difflib, zipfile, tempfile
+import StringIO, os, sys, random, filecmp, time, unittest, urllib, logging, difflib, zipfile, tempfile, re
from itertools import *
import twill
@@ -311,20 +311,20 @@
def view_stored_active_histories( self, check_str='' ):
self.home()
self.visit_page( "history/list" )
- self.check_page_for_string( 'Stored histories' )
+ self.check_page_for_string( 'Saved Histories' )
self.check_page_for_string( '<input type="checkbox" name="id" value=' )
- self.check_page_for_string( 'operation=Rename&id' )
- self.check_page_for_string( 'operation=Switch&id' )
- self.check_page_for_string( 'operation=Delete&id' )
+ self.check_page_for_string( 'operation=Rename' )
+ self.check_page_for_string( 'operation=Switch' )
+ self.check_page_for_string( 'operation=Delete' )
if check_str:
self.check_page_for_string( check_str )
self.home()
def view_stored_deleted_histories( self, check_str='' ):
self.home()
self.visit_page( "history/list?f-deleted=True" )
- self.check_page_for_string( 'Stored histories' )
+ self.check_page_for_string( 'Saved Histories' )
self.check_page_for_string( '<input type="checkbox" name="id" value=' )
- self.check_page_for_string( 'operation=Undelete&id' )
+ self.check_page_for_string( 'operation=Undelete' )
if check_str:
self.check_page_for_string( check_str )
self.home()
@@ -723,14 +723,14 @@
# Functions associated with browsers, cookies, HTML forms and page visits
def check_page_for_string( self, patt ):
- """Looks for 'patt' in the current browser page"""
+ """Looks for 'patt' in the current browser page"""
page = self.last_page()
for subpatt in patt.split():
if page.find( patt ) == -1:
fname = self.write_temp_file( page )
errmsg = "no match to '%s'\npage content written to '%s'" % ( patt, fname )
raise AssertionError( errmsg )
-
+
def write_temp_file( self, content ):
fd, fname = tempfile.mkstemp( suffix='.html', prefix='twilltestcase-' )
f = os.fdopen( fd, "w" )
diff -r d872c1e16afb -r 2300a80d80e5 test/functional/test_history_functions.py
--- a/test/functional/test_history_functions.py Tue Nov 03 12:52:01 2009 -0500
+++ b/test/functional/test_history_functions.py Tue Nov 03 13:04:35 2009 -0500
@@ -179,7 +179,7 @@
self.share_current_history( regular_user1.email,
check_str=history3.name )
# Check out list of histories to make sure history3 was shared
- self.view_stored_active_histories( check_str='operation=sharing">shared' )
+ self.view_stored_active_histories( check_str='operation=sharing' )
# Enable importing history3 via a URL
self.enable_import_via_link( self.security.encode_id( history3.id ),
check_str='Unshare',
1
0