galaxy-commits
Threads by month
- ----- 2025 -----
- April
- March
- February
- January
- ----- 2024 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2023 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2022 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2021 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2020 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2019 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2018 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2017 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2016 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2015 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2014 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2013 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2012 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2011 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2010 -----
- December
- November
- October
- September
- August
- July
- June
- May
December 2011
- 1 participants
- 92 discussions

commit/galaxy-central: dan: Add __non_zero__ method to ToolParameterValueWrappers. Resolves #661.
by Bitbucket 09 Dec '11
by Bitbucket 09 Dec '11
09 Dec '11
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/25f3c2c08e97/
changeset: 25f3c2c08e97
user: dan
date: 2011-12-09 17:10:44
summary: Add __non_zero__ method to ToolParameterValueWrappers. Resolves #661.
affected #: 1 file
diff -r 8718607f7ae5554b6a43caff974a4949eb504950 -r 25f3c2c08e97dc2b8d5ea2885d5083b887c65fba lib/galaxy/tools/__init__.py
--- a/lib/galaxy/tools/__init__.py
+++ b/lib/galaxy/tools/__init__.py
@@ -2141,7 +2141,14 @@
def __init__( self, value ):
self.value = value
-class RawObjectWrapper( object ):
+class ToolParameterValueWrapper( object ):
+ """
+ Base class for object that Wraps a Tool Parameter and Value.
+ """
+ def __nonzero__( self ):
+ return bool( self.value )
+
+class RawObjectWrapper( ToolParameterValueWrapper ):
"""
Wraps an object so that __str__ returns module_name:class_name.
"""
@@ -2152,7 +2159,7 @@
def __getattr__( self, key ):
return getattr( self.obj, key )
-class LibraryDatasetValueWrapper( object ):
+class LibraryDatasetValueWrapper( ToolParameterValueWrapper ):
"""
Wraps an input so that __str__ gives the "param_dict" representation.
"""
@@ -2173,7 +2180,7 @@
def __getattr__( self, key ):
return getattr( self.value, key )
-class InputValueWrapper( object ):
+class InputValueWrapper( ToolParameterValueWrapper ):
"""
Wraps an input so that __str__ gives the "param_dict" representation.
"""
@@ -2186,7 +2193,7 @@
def __getattr__( self, key ):
return getattr( self.value, key )
-class SelectToolParameterWrapper( object ):
+class SelectToolParameterWrapper( ToolParameterValueWrapper ):
"""
Wraps a SelectTooParameter so that __str__ returns the selected value, but all other
attributes are accessible.
@@ -2218,7 +2225,7 @@
def __getattr__( self, key ):
return getattr( self.input, key )
-class DatasetFilenameWrapper( object ):
+class DatasetFilenameWrapper( ToolParameterValueWrapper ):
"""
Wraps a dataset so that __str__ returns the filename, but all other
attributes are accessible.
@@ -2278,6 +2285,9 @@
return self.false_path
else:
return getattr( self.dataset, key )
+
+ def __nonzero__( self ):
+ return bool( self.dataset )
def json_fix( val ):
if isinstance( val, list ):
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: dannon: Update collect_child_datasets to work with ObjectStore.
by Bitbucket 09 Dec '11
by Bitbucket 09 Dec '11
09 Dec '11
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/8718607f7ae5/
changeset: 8718607f7ae5
user: dannon
date: 2011-12-09 16:19:20
summary: Update collect_child_datasets to work with ObjectStore.
affected #: 1 file
diff -r 187267753315c372a684a5170d053a6b561aebb5 -r 8718607f7ae5554b6a43caff974a4949eb504950 lib/galaxy/tools/__init__.py
--- a/lib/galaxy/tools/__init__.py
+++ b/lib/galaxy/tools/__init__.py
@@ -1890,7 +1890,7 @@
sa_session=self.sa_session )
self.app.security_agent.copy_dataset_permissions( outdata.dataset, child_dataset.dataset )
# Move data from temp location to dataset location
- shutil.move( filename, child_dataset.file_name )
+ self.app.object_store.update_from_file(child_dataset.dataset.id, filename, create=True)
self.sa_session.add( child_dataset )
self.sa_session.flush()
child_dataset.set_size()
@@ -1902,7 +1902,7 @@
job = None
for assoc in outdata.creating_job_associations:
job = assoc.job
- break
+ break
if job:
assoc = self.app.model.JobToOutputDatasetAssociation( '__new_child_file_%s|%s__' % ( name, designation ), child_dataset )
assoc.job = job
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: dannon: Fix variable multiple outputs (collect_primary_datasets) to work with ObjectStore.
by Bitbucket 09 Dec '11
by Bitbucket 09 Dec '11
09 Dec '11
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/187267753315/
changeset: 187267753315
user: dannon
date: 2011-12-09 15:50:15
summary: Fix variable multiple outputs (collect_primary_datasets) to work with ObjectStore.
affected #: 1 file
diff -r 1da0c76f4000d6627d6ce66a7de55a6b80753661 -r 187267753315c372a684a5170d053a6b561aebb5 lib/galaxy/tools/__init__.py
--- a/lib/galaxy/tools/__init__.py
+++ b/lib/galaxy/tools/__init__.py
@@ -1958,7 +1958,7 @@
self.sa_session.add( primary_data )
self.sa_session.flush()
# Move data from temp location to dataset location
- shutil.move( filename, primary_data.file_name )
+ self.app.object_store.update_from_file(primary_data.dataset.id, filename, create=True)
primary_data.set_size()
primary_data.name = "%s (%s)" % ( outdata.name, designation )
primary_data.info = outdata.info
@@ -1970,7 +1970,7 @@
job = None
for assoc in outdata.creating_job_associations:
job = assoc.job
- break
+ break
if job:
assoc = self.app.model.JobToOutputDatasetAssociation( '__new_primary_file_%s|%s__' % ( name, designation ), primary_data )
assoc.job = job
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

09 Dec '11
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/1da0c76f4000/
changeset: 1da0c76f4000
user: natefoo
date: 2011-12-09 15:40:40
summary: Fix unit test broken by object store.
affected #: 2 files
diff -r 3ccece0dbc02812da682481bf1196d69bcc38d67 -r 1da0c76f4000d6627d6ce66a7de55a6b80753661 lib/galaxy/model/mapping_tests.py
--- a/lib/galaxy/model/mapping_tests.py
+++ b/lib/galaxy/model/mapping_tests.py
@@ -36,8 +36,9 @@
assert hists[0].user == users[0]
assert hists[1].user is None
assert hists[1].datasets[0].metadata.chromCol == 1
- id = hists[1].datasets[0].id
- assert hists[1].datasets[0].file_name == os.path.join( "/tmp", *directory_hash_id( id ) ) + ( "/dataset_%d.dat" % id )
+ # The filename test has moved to objecstore
+ #id = hists[1].datasets[0].id
+ #assert hists[1].datasets[0].file_name == os.path.join( "/tmp", *directory_hash_id( id ) ) + ( "/dataset_%d.dat" % id )
# Do an update and check
hists[1].name = "History 2b"
model.session.flush()
diff -r 3ccece0dbc02812da682481bf1196d69bcc38d67 -r 1da0c76f4000d6627d6ce66a7de55a6b80753661 lib/galaxy/objectstore/__init__.py
--- a/lib/galaxy/objectstore/__init__.py
+++ b/lib/galaxy/objectstore/__init__.py
@@ -200,6 +200,15 @@
"""
Standard Galaxy object store, stores objects in files under a specific
directory on disk.
+
+ >>> from galaxy.util.bunch import Bunch
+ >>> import tempfile
+ >>> file_path=tempfile.mkdtemp()
+ >>> s = DiskObjectStore(Bunch(umask=077), file_path=file_path)
+ >>> s.create(1)
+ >>> s.exists(1)
+ True
+ >>> assert s.get_filename(1) == file_path + '/000/dataset_1.dat'
"""
def __init__(self, config, file_path=None, extra_dirs=None):
super(DiskObjectStore, self).__init__()
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/3ccece0dbc02/
changeset: 3ccece0dbc02
user: dannon
date: 2011-12-09 13:01:31
summary: Job.fail() failure bugfix.
affected #: 1 file
diff -r 7611d5d306bb462c803b36d236cebf874ba9e1b8 -r 3ccece0dbc02812da682481bf1196d69bcc38d67 lib/galaxy/jobs/__init__.py
--- a/lib/galaxy/jobs/__init__.py
+++ b/lib/galaxy/jobs/__init__.py
@@ -500,7 +500,7 @@
# If the job was deleted, call tool specific fail actions (used for e.g. external metadata) and clean up
if self.tool:
self.tool.job_failed( self, message, exception )
- if self.app.cleanup_job == 'always' or (self.app.config.cleanup_job == 'onsuccess' and job.state == job.states.DELETED):
+ if self.app.config.cleanup_job == 'always' or (self.app.config.cleanup_job == 'onsuccess' and job.state == job.states.DELETED):
self.cleanup()
def change_state( self, state, info = False ):
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: natefoo: Rename HierarchicalObjectStore to DistributedObjectStore.
by Bitbucket 08 Dec '11
by Bitbucket 08 Dec '11
08 Dec '11
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/7611d5d306bb/
changeset: 7611d5d306bb
user: natefoo
date: 2011-12-08 23:46:59
summary: Rename HierarchicalObjectStore to DistributedObjectStore.
affected #: 5 files
diff -r c6735493b09f507e02564e6c79f9fef39d7bf7fa -r 7611d5d306bb462c803b36d236cebf874ba9e1b8 distributed_object_store_conf.xml.sample
--- /dev/null
+++ b/distributed_object_store_conf.xml.sample
@@ -0,0 +1,13 @@
+<?xml version="1.0"?>
+<backends>
+ <backend name="files1" type="disk" weight="1">
+ <files_dir path="database/files1"/>
+ <extra_dir type="temp" path="database/tmp1"/>
+ <extra_dir type="job_work" path="database/job_working_directory1"/>
+ </backend>
+ <backend name="files2" type="disk" weight="1">
+ <files_dir path="database/files2"/>
+ <extra_dir type="temp" path="database/tmp2"/>
+ <extra_dir type="job_work" path="database/job_working_directory2"/>
+ </backend>
+</backends>
diff -r c6735493b09f507e02564e6c79f9fef39d7bf7fa -r 7611d5d306bb462c803b36d236cebf874ba9e1b8 hierarchical_object_store_conf.xml.sample
--- a/hierarchical_object_store_conf.xml.sample
+++ /dev/null
@@ -1,13 +0,0 @@
-<?xml version="1.0"?>
-<backends>
- <backend name="files1" type="disk" weight="1">
- <files_dir path="database/files1"/>
- <extra_dir type="temp" path="database/tmp1"/>
- <extra_dir type="job_work" path="database/job_working_directory1"/>
- </backend>
- <backend name="files2" type="disk" weight="1">
- <files_dir path="database/files2"/>
- <extra_dir type="temp" path="database/tmp2"/>
- <extra_dir type="job_work" path="database/job_working_directory2"/>
- </backend>
-</backends>
diff -r c6735493b09f507e02564e6c79f9fef39d7bf7fa -r 7611d5d306bb462c803b36d236cebf874ba9e1b8 lib/galaxy/config.py
--- a/lib/galaxy/config.py
+++ b/lib/galaxy/config.py
@@ -156,7 +156,7 @@
self.s3_bucket = kwargs.get( 's3_bucket', None)
self.use_reduced_redundancy = kwargs.get( 'use_reduced_redundancy', False )
self.object_store_cache_size = float(kwargs.get( 'object_store_cache_size', -1 ))
- self.hierarchical_object_store_config_file = kwargs.get( 'hierarchical_object_store_config_file', None )
+ self.distributed_object_store_config_file = kwargs.get( 'distributed_object_store_config_file', None )
# Parse global_conf and save the parser
global_conf = kwargs.get( 'global_conf', None )
global_conf_parser = ConfigParser.ConfigParser()
diff -r c6735493b09f507e02564e6c79f9fef39d7bf7fa -r 7611d5d306bb462c803b36d236cebf874ba9e1b8 lib/galaxy/objectstore/__init__.py
--- a/lib/galaxy/objectstore/__init__.py
+++ b/lib/galaxy/objectstore/__init__.py
@@ -854,7 +854,7 @@
return None
-class HierarchicalObjectStore(ObjectStore):
+class DistributedObjectStore(ObjectStore):
"""
ObjectStore that defers to a list of backends, for getting objects the
first store where the object exists is used, objects are created in a
@@ -862,22 +862,22 @@
"""
def __init__(self, config):
- super(HierarchicalObjectStore, self).__init__()
- assert config is not None, "hierarchical object store ('object_store = hierarchical') " \
+ super(DistributedObjectStore, self).__init__()
+ assert config is not None, "distributed object store ('object_store = distributed') " \
"requires a config file, please set one in " \
- "'hierarchical_object_store_config_file')"
- self.hierarchical_config = config
+ "'distributed_object_store_config_file')"
+ self.distributed_config = config
self.backends = {}
self.weighted_backend_names = []
random.seed()
- self.__parse_hierarchical_config(config)
+ self.__parse_distributed_config(config)
- def __parse_hierarchical_config(self, config):
- tree = util.parse_xml(self.hierarchical_config)
+ def __parse_distributed_config(self, config):
+ tree = util.parse_xml(self.distributed_config)
root = tree.getroot()
- log.debug('Loading backends for hierarchical object store from %s' % self.hierarchical_config)
+ log.debug('Loading backends for distributed object store from %s' % self.distributed_config)
for elem in [ e for e in root if e.tag == 'backend' ]:
name = elem.get('name')
weight = int(elem.get('weight', 1))
@@ -980,6 +980,16 @@
return store
return None
+class HierarchicalObjectStore(ObjectStore):
+ """
+ ObjectStore that defers to a list of backends, for getting objects the
+ first store where the object exists is used, objects are always created
+ in the first store.
+ """
+
+ def __init__(self, backends=[]):
+ super(HierarchicalObjectStore, self).__init__()
+
def build_object_store_from_config(config):
""" Depending on the configuration setting, invoke the appropriate object store
"""
@@ -990,8 +1000,10 @@
os.environ['AWS_ACCESS_KEY_ID'] = config.aws_access_key
os.environ['AWS_SECRET_ACCESS_KEY'] = config.aws_secret_key
return S3ObjectStore(config=config)
+ elif store == 'distributed':
+ return DistributedObjectStore(config.distributed_object_store_config_file)
elif store == 'hierarchical':
- return HierarchicalObjectStore(config.hierarchical_object_store_config_file)
+ return HierarchicalObjectStore()
def convert_bytes(bytes):
""" A helper function used for pretty printing disk usage """
diff -r c6735493b09f507e02564e6c79f9fef39d7bf7fa -r 7611d5d306bb462c803b36d236cebf874ba9e1b8 universe_wsgi.ini.sample
--- a/universe_wsgi.ini.sample
+++ b/universe_wsgi.ini.sample
@@ -437,7 +437,7 @@
# -- Beta features
-# Object store mode (valid options are: disk, s3, hierarchical)
+# Object store mode (valid options are: disk, s3, distributed, hierarchical)
#object_store = s3
#aws_access_key = <AWS access key>
#aws_secret_key = <AWS secret key>
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
3 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/7f783a4889f7/
changeset: 7f783a4889f7
user: natefoo
date: 2011-12-08 23:34:19
summary: Object store bug fix (direct filesystem access in the default tool action)
affected #: 1 file
diff -r 8d668e1d51520f7663915ff5535852e47a7dd235 -r 7f783a4889f74bacf61a97a8e8d39ebda1cc9b9d lib/galaxy/tools/actions/__init__.py
--- a/lib/galaxy/tools/actions/__init__.py
+++ b/lib/galaxy/tools/actions/__init__.py
@@ -291,14 +291,11 @@
trans.sa_session.flush()
trans.app.security_agent.set_all_dataset_permissions( data.dataset, output_permissions )
# Create an empty file immediately
- # open( data.file_name, "w" ).close()
trans.app.object_store.create( data.id, store_name=store_name )
if not store_name_set:
# Ensure all other datasets in this job are created in the same store
store_name = trans.app.object_store.store_name( data.id )
store_name_set = True
- # Fix permissions
- util.umask_fix_perms( data.file_name, trans.app.config.umask, 0666 )
# This may not be neccesary with the new parent/child associations
data.designation = name
# Copy metadata from one of the inputs if requested.
https://bitbucket.org/galaxy/galaxy-central/changeset/95529ba09179/
changeset: 95529ba09179
user: natefoo
date: 2011-12-08 23:45:48
summary: Missing part of the last commit.
affected #: 1 file
diff -r 7f783a4889f74bacf61a97a8e8d39ebda1cc9b9d -r 95529ba09179dcd08da79f1c67151a138284e8b1 lib/galaxy/objectstore/__init__.py
--- a/lib/galaxy/objectstore/__init__.py
+++ b/lib/galaxy/objectstore/__init__.py
@@ -204,6 +204,7 @@
def __init__(self, config, file_path=None, extra_dirs=None):
super(DiskObjectStore, self).__init__()
self.file_path = file_path or config.file_path
+ self.config = config
if extra_dirs is not None:
self.extra_dirs = extra_dirs
@@ -300,6 +301,7 @@
if not dir_only:
path = os.path.join(path, alt_name if alt_name else "dataset_%s.dat" % dataset_id)
open(path, 'w').close()
+ util.umask_fix_perms( path, self.config.umask, 0666 )
def empty(self, dataset_id, **kwargs):
return os.path.getsize(self.get_filename(dataset_id, **kwargs)) > 0
https://bitbucket.org/galaxy/galaxy-central/changeset/c6735493b09f/
changeset: c6735493b09f
user: natefoo
date: 2011-12-08 23:46:05
summary: Merge.
affected #: 1 file
diff -r 95529ba09179dcd08da79f1c67151a138284e8b1 -r c6735493b09f507e02564e6c79f9fef39d7bf7fa lib/galaxy/tools/parameters/basic.py
--- a/lib/galaxy/tools/parameters/basic.py
+++ b/lib/galaxy/tools/parameters/basic.py
@@ -224,7 +224,7 @@
int( self.value )
except:
raise ValueError( "An integer is required" )
- elif self.value is None:
+ elif self.value is None and not self.optional:
raise ValueError( "The settings for the field named '%s' require a 'value' setting and optionally a default value which must be an integer" % self.name )
self.min = elem.get( 'min' )
self.max = elem.get( 'max' )
@@ -296,7 +296,7 @@
float( self.value )
except:
raise ValueError( "A real number is required" )
- elif self.value is None:
+ elif self.value is None and not self.optional:
raise ValueError( "The settings for this field require a 'value' setting and optionally a default value which must be a real number" )
if self.min:
try:
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: dan: Allow not specifying a value attribute for an optional tool parameter. Partially resolves #661.
by Bitbucket 08 Dec '11
by Bitbucket 08 Dec '11
08 Dec '11
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/75c27994fc9a/
changeset: 75c27994fc9a
user: dan
date: 2011-12-08 22:57:12
summary: Allow not specifying a value attribute for an optional tool parameter. Partially resolves #661.
affected #: 1 file
diff -r 8d668e1d51520f7663915ff5535852e47a7dd235 -r 75c27994fc9aa265bdecd6ad7962f0c4f51b3682 lib/galaxy/tools/parameters/basic.py
--- a/lib/galaxy/tools/parameters/basic.py
+++ b/lib/galaxy/tools/parameters/basic.py
@@ -224,7 +224,7 @@
int( self.value )
except:
raise ValueError( "An integer is required" )
- elif self.value is None:
+ elif self.value is None and not self.optional:
raise ValueError( "The settings for the field named '%s' require a 'value' setting and optionally a default value which must be an integer" % self.name )
self.min = elem.get( 'min' )
self.max = elem.get( 'max' )
@@ -296,7 +296,7 @@
float( self.value )
except:
raise ValueError( "A real number is required" )
- elif self.value is None:
+ elif self.value is None and not self.optional:
raise ValueError( "The settings for this field require a 'value' setting and optionally a default value which must be a real number" )
if self.min:
try:
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
20 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/f25342f0e100/
changeset: f25342f0e100
user: afgane
date: 2011-07-05 23:48:22
summary: A very much in-progress code implementation of the ObjectStore - most of the functionality exists and works for interaction with a local file system and S3. Setting of the metadata does not work (empty files are created but never filled with content). Not sure if rerunning jobs with dependent datasets that have been deleted from cache works - some tools at least do.
affected #: 17 files
diff -r 9a5c403bb2e57189b442615840e49cc80fe3bc60 -r f25342f0e100ac32312934b9ca84f8fa109ef9d4 lib/galaxy/app.py
--- a/lib/galaxy/app.py
+++ b/lib/galaxy/app.py
@@ -7,6 +7,7 @@
import galaxy.model
import galaxy.datatypes.registry
import galaxy.security
+from galaxy.objectstore import build_object_store_from_config
from galaxy.tags.tag_handler import GalaxyTagHandler
from galaxy.tools.imp_exp import load_history_imp_exp_tools
from galaxy.sample_tracking import external_service_types
@@ -30,12 +31,15 @@
# Initialize database / check for appropriate schema version
from galaxy.model.migrate.check import create_or_verify_database
create_or_verify_database( db_url, kwargs.get( 'global_conf', {} ).get( '__file__', None ), self.config.database_engine_options )
+ # Object store manager
+ self.object_store = build_object_store_from_config(self)
# Setup the database engine and ORM
from galaxy.model import mapping
self.model = mapping.init( self.config.file_path,
db_url,
self.config.database_engine_options,
- database_query_profiling_proxy = self.config.database_query_profiling_proxy )
+ database_query_profiling_proxy = self.config.database_query_profiling_proxy,
+ object_store = self.object_store )
# Security helper
self.security = security.SecurityHelper( id_secret=self.config.id_secret )
# Tag handler
diff -r 9a5c403bb2e57189b442615840e49cc80fe3bc60 -r f25342f0e100ac32312934b9ca84f8fa109ef9d4 lib/galaxy/config.py
--- a/lib/galaxy/config.py
+++ b/lib/galaxy/config.py
@@ -131,6 +131,12 @@
self.nginx_upload_path = kwargs.get( 'nginx_upload_path', False )
if self.nginx_upload_store:
self.nginx_upload_store = os.path.abspath( self.nginx_upload_store )
+ self.object_store = kwargs.get( 'object_store', 'disk' )
+ self.aws_access_key = kwargs.get( 'aws_access_key', None )
+ self.aws_secret_key = kwargs.get( 'aws_secret_key', None )
+ self.s3_bucket = kwargs.get( 's3_bucket', None)
+ self.use_reduced_redundancy = kwargs.get( 'use_reduced_redundancy', False )
+ self.object_store_cache_size = float(kwargs.get( 'object_store_cache_size', -1 ))
# Parse global_conf and save the parser
global_conf = kwargs.get( 'global_conf', None )
global_conf_parser = ConfigParser.ConfigParser()
diff -r 9a5c403bb2e57189b442615840e49cc80fe3bc60 -r f25342f0e100ac32312934b9ca84f8fa109ef9d4 lib/galaxy/datatypes/genetics.py
--- a/lib/galaxy/datatypes/genetics.py
+++ b/lib/galaxy/datatypes/genetics.py
@@ -636,7 +636,7 @@
def set_peek( self, dataset, **kwd ):
"""
expects a .pheno file in the extra_files_dir - ugh
- note that R is wierd and does not include the row.name in
+ note that R is weird and does not include the row.name in
the header. why?"""
if not dataset.dataset.purged:
pp = os.path.join(dataset.extra_files_path,'%s.pheno' % dataset.metadata.base_name)
diff -r 9a5c403bb2e57189b442615840e49cc80fe3bc60 -r f25342f0e100ac32312934b9ca84f8fa109ef9d4 lib/galaxy/jobs/__init__.py
--- a/lib/galaxy/jobs/__init__.py
+++ b/lib/galaxy/jobs/__init__.py
@@ -208,7 +208,7 @@
log.error( "unknown job state '%s' for job %d" % ( job_state, job.id ) )
if not self.track_jobs_in_database:
new_waiting_jobs.append( job.id )
- except Exception, e:
+ except Exception:
log.exception( "failure running job %d" % job.id )
# Update the waiting list
self.waiting_jobs = new_waiting_jobs
@@ -332,7 +332,6 @@
out_data = dict( [ ( da.name, da.dataset ) for da in job.output_datasets ] )
inp_data.update( [ ( da.name, da.dataset ) for da in job.input_library_datasets ] )
out_data.update( [ ( da.name, da.dataset ) for da in job.output_library_datasets ] )
-
# Set up output dataset association for export history jobs. Because job
# uses a Dataset rather than an HDA or LDA, it's necessary to set up a
# fake dataset association that provides the needed attributes for
@@ -428,6 +427,10 @@
dataset.dataset.set_total_size()
if dataset.ext == 'auto':
dataset.extension = 'data'
+ # Update (non-library) job output datasets through the object store
+ if dataset not in job.output_library_datasets:
+ print "====== Handing failed job's dataset '%s' with name '%s' to object store" % (dataset.id, dataset.file_name)
+ self.app.object_store.update_from_file(dataset.id, create=True)
self.sa_session.add( dataset )
self.sa_session.flush()
job.state = model.Job.states.ERROR
@@ -538,11 +541,14 @@
else:
# Security violation.
log.exception( "from_work_dir specified a location not in the working directory: %s, %s" % ( source_file, self.working_directory ) )
-
dataset.blurb = 'done'
dataset.peek = 'no peek'
dataset.info = context['stdout'] + context['stderr']
dataset.set_size()
+ # Update (non-library) job output datasets through the object store
+ if dataset not in job.output_library_datasets:
+ print "===+=== Handing dataset '%s' with name '%s' to object store" % (dataset.id, dataset.file_name)
+ self.app.object_store.update_from_file(dataset.id, create=True)
if context['stderr']:
dataset.blurb = "error"
elif dataset.has_data():
diff -r 9a5c403bb2e57189b442615840e49cc80fe3bc60 -r f25342f0e100ac32312934b9ca84f8fa109ef9d4 lib/galaxy/model/__init__.py
--- a/lib/galaxy/model/__init__.py
+++ b/lib/galaxy/model/__init__.py
@@ -522,6 +522,7 @@
FAILED_METADATA = 'failed_metadata' )
permitted_actions = get_permitted_actions( filter='DATASET' )
file_path = "/tmp/"
+ object_store = None # This get initialized in mapping.py (method init) by app.py
engine = None
def __init__( self, id=None, state=None, external_filename=None, extra_files_path=None, file_size=None, purgable=True ):
self.id = id
@@ -535,17 +536,14 @@
def get_file_name( self ):
if not self.external_filename:
assert self.id is not None, "ID must be set before filename used (commit the object)"
- # First try filename directly under file_path
- filename = os.path.join( self.file_path, "dataset_%d.dat" % self.id )
- # Only use that filename if it already exists (backward compatibility),
- # otherwise construct hashed path
- if not os.path.exists( filename ):
- dir = os.path.join( self.file_path, *directory_hash_id( self.id ) )
+ assert self.object_store is not None, "Object Store has not been initialized for dataset %s" % self.id
+ print "Calling get_filename 1", self.object_store
+ filename = self.object_store.get_filename( self.id )
+ # print 'getting filename: ', filename
+ if not self.object_store.exists( self.id ):
# Create directory if it does not exist
- if not os.path.exists( dir ):
- os.makedirs( dir )
- # Return filename inside hashed directory
- return os.path.abspath( os.path.join( dir, "dataset_%d.dat" % self.id ) )
+ self.object_store.create( self.id, dir_only=True )
+ return filename
else:
filename = self.external_filename
# Make filename absolute
@@ -558,15 +556,8 @@
file_name = property( get_file_name, set_file_name )
@property
def extra_files_path( self ):
- if self._extra_files_path:
- path = self._extra_files_path
- else:
- path = os.path.join( self.file_path, "dataset_%d_files" % self.id )
- #only use path directly under self.file_path if it exists
- if not os.path.exists( path ):
- path = os.path.join( os.path.join( self.file_path, *directory_hash_id( self.id ) ), "dataset_%d_files" % self.id )
- # Make path absolute
- return os.path.abspath( path )
+ print "Calling get_filename 2", self.object_store
+ return self.object_store.get_filename( self.id, dir_only=True, extra_dir=self._extra_files_path or "dataset_%d_files" % self.id)
def get_size( self, nice_size=False ):
"""Returns the size of the data on disk"""
if self.file_size:
@@ -575,20 +566,14 @@
else:
return self.file_size
else:
- try:
- if nice_size:
- return galaxy.datatypes.data.nice_size( os.path.getsize( self.file_name ) )
- else:
- return os.path.getsize( self.file_name )
- except OSError:
- return 0
+ if nice_size:
+ return galaxy.datatypes.data.nice_size( self.object_store.size(self.id) )
+ else:
+ return self.object_store.size(self.id)
def set_size( self ):
"""Returns the size of the data on disk"""
- try:
- if not self.file_size:
- self.file_size = os.path.getsize( self.file_name )
- except OSError:
- self.file_size = 0
+ if not self.file_size:
+ self.file_size = self.object_store.size(self.id)
def get_total_size( self ):
if self.total_size is not None:
return self.total_size
@@ -603,8 +588,9 @@
if self.file_size is None:
self.set_size()
self.total_size = self.file_size or 0
- for root, dirs, files in os.walk( self.extra_files_path ):
- self.total_size += sum( [ os.path.getsize( os.path.join( root, file ) ) for file in files ] )
+ if self.object_store.exists(self.id, extra_dir=self._extra_files_path or "dataset_%d_files" % self.id, dir_only=True):
+ for root, dirs, files in os.walk( self.extra_files_path ):
+ self.total_size += sum( [ os.path.getsize( os.path.join( root, file ) ) for file in files ] )
def has_data( self ):
"""Detects whether there is any data"""
return self.get_size() > 0
@@ -620,10 +606,7 @@
# FIXME: sqlalchemy will replace this
def _delete(self):
"""Remove the file that corresponds to this data"""
- try:
- os.remove(self.data.file_name)
- except OSError, e:
- log.critical('%s delete error %s' % (self.__class__.__name__, e))
+ self.object_store.delete(self.id)
@property
def user_can_purge( self ):
return self.purged == False \
@@ -631,9 +614,12 @@
and len( self.history_associations ) == len( self.purged_history_associations )
def full_delete( self ):
"""Remove the file and extra files, marks deleted and purged"""
- os.unlink( self.file_name )
- if os.path.exists( self.extra_files_path ):
- shutil.rmtree( self.extra_files_path )
+ # os.unlink( self.file_name )
+ self.object_store.delete(self.id)
+ if self.object_store.exists(self.id, extra_dir=self._extra_files_path or "dataset_%d_files" % self.id, dir_only=True):
+ self.object_store.delete(self.id, entire_dir=True, extra_dir=self._extra_files_path or "dataset_%d_files" % self.id, dir_only=True)
+ # if os.path.exists( self.extra_files_path ):
+ # shutil.rmtree( self.extra_files_path )
# TODO: purge metadata files
self.deleted = True
self.purged = True
@@ -1595,16 +1581,32 @@
@property
def file_name( self ):
assert self.id is not None, "ID must be set before filename used (commit the object)"
- path = os.path.join( Dataset.file_path, '_metadata_files', *directory_hash_id( self.id ) )
- # Create directory if it does not exist
+ # Ensure the directory structure and the metadata file object exist
try:
- os.makedirs( path )
- except OSError, e:
- # File Exists is okay, otherwise reraise
- if e.errno != errno.EEXIST:
- raise
- # Return filename inside hashed directory
- return os.path.abspath( os.path.join( path, "metadata_%d.dat" % self.id ) )
+ # self.history_dataset
+ # print "Dataset.file_path: %s, self.id: %s, self.history_dataset.dataset.object_store: %s" \
+ # % (Dataset.file_path, self.id, self.history_dataset.dataset.object_store)
+ self.history_dataset.dataset.object_store.create( self.id, extra_dir='_metadata_files', extra_dir_at_root=True, alt_name="metadata_%d.dat" % self.id )
+ print "Calling get_filename 3", self.object_store
+ path = self.history_dataset.dataset.object_store.get_filename( self.id, extra_dir='_metadata_files', extra_dir_at_root=True, alt_name="metadata_%d.dat" % self.id )
+ print "Created metadata file at path: %s" % path
+ self.library_dataset
+ # raise
+ return path
+ except AttributeError:
+ # In case we're not working with the history_dataset
+ # print "Caught AttributeError"
+ path = os.path.join( Dataset.file_path, '_metadata_files', *directory_hash_id( self.id ) )
+ # Create directory if it does not exist
+ try:
+ os.makedirs( path )
+ except OSError, e:
+ # File Exists is okay, otherwise reraise
+ if e.errno != errno.EEXIST:
+ raise
+ # Return filename inside hashed directory
+ return os.path.abspath( os.path.join( path, "metadata_%d.dat" % self.id ) )
+
class FormDefinition( object, APIItem ):
# The following form_builder classes are supported by the FormDefinition class.
diff -r 9a5c403bb2e57189b442615840e49cc80fe3bc60 -r f25342f0e100ac32312934b9ca84f8fa109ef9d4 lib/galaxy/model/mapping.py
--- a/lib/galaxy/model/mapping.py
+++ b/lib/galaxy/model/mapping.py
@@ -1709,10 +1709,12 @@
# Let this go, it could possibly work with db's we don't support
log.error( "database_connection contains an unknown SQLAlchemy database dialect: %s" % dialect )
-def init( file_path, url, engine_options={}, create_tables=False, database_query_profiling_proxy=False ):
+def init( file_path, url, engine_options={}, create_tables=False, database_query_profiling_proxy=False, object_store=None ):
"""Connect mappings to the database"""
# Connect dataset to the file path
Dataset.file_path = file_path
+ # Connect dataset to object store
+ Dataset.object_store = object_store
# Load the appropriate db module
load_egg_for_url( url )
# Should we use the logging proxy?
diff -r 9a5c403bb2e57189b442615840e49cc80fe3bc60 -r f25342f0e100ac32312934b9ca84f8fa109ef9d4 lib/galaxy/objectstore/__init__.py
--- /dev/null
+++ b/lib/galaxy/objectstore/__init__.py
@@ -0,0 +1,859 @@
+"""
+objectstore package, abstraction for storing blobs of data for use in Galaxy,
+all providers ensure that data can be accessed on the filesystem for running
+tools
+"""
+
+import os
+import time
+import shutil
+import logging
+import threading
+import subprocess
+import multiprocessing
+from datetime import datetime
+
+from galaxy import util
+from galaxy.jobs import Sleeper
+from galaxy.model import directory_hash_id
+from galaxy.objectstore.s3_multipart_upload import multipart_upload
+
+from boto.s3.key import Key
+from boto.s3.connection import S3Connection
+from boto.exception import S3ResponseError
+
+log = logging.getLogger( __name__ )
+logging.getLogger('boto').setLevel(logging.INFO) # Otherwise boto is quite noisy
+
+
+class ObjectNotFound(Exception):
+ """ Accessed object was not found """
+ pass
+
+
+class ObjectStore(object):
+ """
+ ObjectStore abstract interface
+ """
+ def __init__(self):
+ self.running = True
+
+ def shutdown(self):
+ self.running = False
+
+ def exists(self, dataset_id, dir_only=False, extra_dir=None, extra_dir_at_root=False, alt_name=None):
+ """
+ Returns True if the object identified by `dataset_id` exists in this
+ file store, False otherwise.
+
+ FIELD DESCRIPTIONS (these apply to all the methods in this class):
+ :type dataset_id: int
+ :param dataset_id: Galaxy-assigned database ID of the dataset to be checked.
+
+ :type dir_only: bool
+ :param dir_only: If True, check only the path where the file
+ identified by `dataset_id` should be located, not the
+ dataset itself. This option applies to `extra_dir`
+ argument as well.
+
+ :type extra_dir: string
+ :param extra_dir: Append `extra_dir` to the directory structure where
+ the dataset identified by `dataset_id` should be located.
+ (e.g., 000/extra_dir/dataset_id)
+
+ :type extra_dir_at_root: bool
+ :param extra_dir_at_root: Applicable only if `extra_dir` is set.
+ If True, the `extra_dir` argument is placed at
+ root of the created directory structure rather
+ than at the end (e.g., extra_dir/000/dataset_id
+ vs. 000/extra_dir/dataset_id)
+
+ :type alt_name: string
+ :param alt_name: Use this name as the alternative name for the created
+ dataset rather than the default.
+ """
+ raise NotImplementedError()
+
+ def file_ready(self, dataset_id, dir_only=False, extra_dir=None, extra_dir_at_root=False, alt_name=None):
+ """ A helper method that checks if a file corresponding to a dataset
+ is ready and available to be used. Return True if so, False otherwise."""
+ return True
+
+ def create(self, dataset_id, dir_only=False, extra_dir=None, extra_dir_at_root=False, alt_name=None):
+ """
+ Mark the object identified by `dataset_id` as existing in the store, but
+ with no content. This method will create a proper directory structure for
+ the file if the directory does not already exist.
+ See `exists` method for the description of the fields.
+ """
+ raise NotImplementedError()
+
+ def empty(self, dataset_id, extra_dir=None, extra_dir_at_root=False, alt_name=None):
+ """
+ Test if the object identified by `dataset_id` has content.
+ If the object does not exist raises `ObjectNotFound`.
+ See `exists` method for the description of the fields.
+ """
+ raise NotImplementedError()
+
+ def size(self, dataset_id, extra_dir=None, extra_dir_at_root=False, alt_name=None):
+ """
+ Return size of the object identified by `dataset_id`.
+ If the object does not exist, return 0.
+ See `exists` method for the description of the fields.
+ """
+ raise NotImplementedError()
+
+ def delete(self, dataset_id, entire_dir=False, extra_dir=None, extra_dir_at_root=False, alt_name=None):
+ """
+ Deletes the object identified by `dataset_id`.
+ See `exists` method for the description of other fields.
+ :type entire_dir: bool
+ :param entire_dir: If True, delete the entire directory pointed to by
+ extra_dir. For safety reasons, this option applies
+ only for and in conjunction with the extra_dir option.
+ """
+ raise NotImplementedError()
+
+ def get_data(self, dataset_id, start=0, count=-1, extra_dir=None, extra_dir_at_root=False, alt_name=None):
+ """
+ Fetch `count` bytes of data starting at offset `start` from the
+ object identified uniquely by `dataset_id`.
+ If the object does not exist raises `ObjectNotFound`.
+ See `exists` method for the description of other fields.
+
+ :type start: int
+ :param start: Set the position to start reading the dataset file
+
+ :type count: int
+ :param count: Read at most `count` bytes from the dataset
+ """
+ raise NotImplementedError()
+
+ def get_filename(self, dataset_id, dir_only=False, extra_dir=None, extra_dir_at_root=False, alt_name=None):
+ """
+ Get the expected filename (including the absolute path) which can be used
+ to access the contents of the object uniquely identified by `dataset_id`.
+ See `exists` method for the description of the fields.
+ """
+ raise NotImplementedError()
+
+ def update_from_file(self, dataset_id, extra_dir=None, extra_dir_at_root=False, alt_name=None, filename=None, create=False):
+ """
+ Inform the store that the file associated with the object has been
+ updated. If `filename` is provided, update from that file instead
+ of the default.
+ If the object does not exist raises `ObjectNotFound`.
+ See `exists` method for the description of other fields.
+
+ :type filename: string
+ :param filename: Use file pointed to by `filename` as the source for
+ updating the dataset identified by `dataset_id`
+
+ :type create: bool
+ :param create: If True and the default dataset does not exist, create it first.
+ """
+ raise NotImplementedError()
+
+ def get_object_url(self, dataset_id, extra_dir=None, extra_dir_at_root=False, alt_name=None):
+ """
+ If the store supports direct URL access, return a URL. Otherwise return
+ None.
+ Note: need to be careful to to bypass dataset security with this.
+ See `exists` method for the description of the fields.
+ """
+ raise NotImplementedError()
+
+ ## def get_staging_command( id ):
+ ## """
+ ## Return a shell command that can be prepended to the job script to stage the
+ ## dataset -- runs on worker nodes.
+ ##
+ ## Note: not sure about the interface here. Should this return a filename, command
+ ## tuple? Is this even a good idea, seems very useful for S3, other object stores?
+ ## """
+
+
+class DiskObjectStore(ObjectStore):
+ """
+ Standard Galaxy object store, stores objects in files under a specific
+ directory on disk.
+ """
+ def __init__(self, app):
+ super(DiskObjectStore, self).__init__()
+ self.file_path = app.config.file_path
+
+ def _get_filename(self, dataset_id, dir_only=False, extra_dir=None, extra_dir_at_root=False, alt_name=None):
+ """Class method that returns the absolute path for the file corresponding
+ to the `dataset_id` regardless of whether the file exists.
+ """
+ path = self._construct_path(dataset_id, dir_only=dir_only, extra_dir=extra_dir, extra_dir_at_root=extra_dir_at_root, alt_name=alt_name, old_style=True)
+ # For backward compatibility, check the old style root path first; otherwise,
+ # construct hashed path
+ if not os.path.exists(path):
+ return self._construct_path(dataset_id, dir_only=dir_only, extra_dir=extra_dir, extra_dir_at_root=extra_dir_at_root, alt_name=alt_name)
+
+ def _construct_path(self, dataset_id, old_style=False, dir_only=False, extra_dir=None, extra_dir_at_root=False, alt_name=None):
+ """ Construct the expected absolute path for accessing the object
+ identified by `dataset_id`.
+
+ :type dir_only: bool
+ :param dir_only: If True, return only the absolute path where the file
+ identified by `dataset_id` should be located
+
+ :type extra_dir: string
+ :param extra_dir: Append the value of this parameter to the expected path
+ used to access the object identified by `dataset_id`
+ (e.g., /files/000/<extra_dir>/dataset_10.dat).
+
+ :type alt_name: string
+ :param alt_name: Use this name as the alternative name for the returned
+ dataset rather than the default.
+
+ :type old_style: bool
+ param old_style: This option is used for backward compatibility. If True
+ the composed directory structure does not include a hash id
+ (e.g., /files/dataset_10.dat (old) vs. /files/000/dataset_10.dat (new))
+ """
+ if old_style:
+ if extra_dir is not None:
+ path = os.path.join(self.file_path, extra_dir)
+ else:
+ path = self.file_path
+ else:
+ rel_path = os.path.join(*directory_hash_id(dataset_id))
+ if extra_dir is not None:
+ if extra_dir_at_root:
+ rel_path = os.path.join(extra_dir, rel_path)
+ else:
+ rel_path = os.path.join(rel_path, extra_dir)
+ path = os.path.join(self.file_path, rel_path)
+ if not dir_only:
+ path = os.path.join(path, alt_name if alt_name else "dataset_%s.dat" % dataset_id)
+ return os.path.abspath(path)
+
+ def exists(self, dataset_id, **kwargs):
+ path = self._construct_path(dataset_id, old_style=True, **kwargs)
+ # For backward compatibility, check root path first; otherwise, construct
+ # and check hashed path
+ if not os.path.exists(path):
+ path = self._construct_path(dataset_id, **kwargs)
+ return os.path.exists(path)
+
+ def create(self, dataset_id, **kwargs):
+ if not self.exists(dataset_id, **kwargs):
+ # Pull out locally used fields
+ extra_dir = kwargs.get('extra_dir', None)
+ extra_dir_at_root = kwargs.get('extra_dir_at_root', False)
+ dir_only = kwargs.get('dir_only', False)
+ alt_name = kwargs.get('alt_name', None)
+ # Construct hashed path
+ path = os.path.join(*directory_hash_id(dataset_id))
+ # Optionally append extra_dir
+ if extra_dir is not None:
+ if extra_dir_at_root:
+ path = os.path.join(extra_dir, path)
+ else:
+ path = os.path.join(path, extra_dir)
+ # Combine the constructted path with the root dir for all files
+ path = os.path.join(self.file_path, path)
+ # Create directory if it does not exist
+ if not os.path.exists(path):
+ os.makedirs(path)
+ if not dir_only:
+ path = os.path.join(path, alt_name if alt_name else "dataset_%s.dat" % dataset_id)
+ open(path, 'w').close()
+
+ def empty(self, dataset_id, **kwargs):
+ return os.path.getsize(self.get_filename(dataset_id, **kwargs)) > 0
+
+ def size(self, dataset_id, **kwargs):
+ if self.exists(dataset_id, **kwargs):
+ try:
+ return os.path.getsize(self.get_filename(dataset_id, **kwargs))
+ except OSError:
+ return 0
+ else:
+ return 0
+
+ def delete(self, dataset_id, entire_dir=False, **kwargs):
+ path = self.get_filename(dataset_id, **kwargs)
+ extra_dir = kwargs.get('extra_dir', None)
+ try:
+ if entire_dir and extra_dir:
+ shutil.rmtree(path)
+ return True
+ if self.exists(dataset_id, **kwargs):
+ os.remove(path)
+ return True
+ except OSError, ex:
+ log.critical('%s delete error %s' % (self._get_filename(dataset_id, **kwargs), ex))
+ return False
+
+ def get_data(self, dataset_id, start=0, count=-1, **kwargs):
+ data_file = open(self.get_filename(dataset_id, **kwargs), 'r')
+ data_file.seek(start)
+ content = data_file.read(count)
+ data_file.close()
+ return content
+
+ def get_filename(self, dataset_id, **kwargs):
+ path = self._construct_path(dataset_id, old_style=True, **kwargs)
+ # For backward compatibility, check root path first; otherwise, construct
+ # and check hashed path
+ if os.path.exists(path):
+ return path
+ else:
+ path = self._construct_path(dataset_id, **kwargs)
+ print "Checking it %s exists: %s" %(path, os.path.exists(path))
+ if os.path.exists(path):
+ return path
+ else:
+ raise ObjectNotFound()
+
+ def update_from_file(self, dataset_id, file_name=None, create=False, **kwargs):
+ """ `create` parameter is not used in this implementation """
+ if create:
+ self.create(dataset_id, **kwargs)
+ if file_name and self.exists(dataset_id, **kwargs):
+ try:
+ shutil.copy(file_name, self.get_filename(dataset_id, **kwargs))
+ except IOError, ex:
+ log.critical('Error copying %s to %s: %s' % (file_name,
+ self._get_filename(dataset_id, **kwargs), ex))
+
+ def get_object_url(self, dataset_id, **kwargs):
+ return None
+
+
+
+class CachingObjectStore(ObjectStore):
+ """
+ Object store that uses a directory for caching files, but defers and writes
+ back to another object store.
+ """
+
+ def __init__(self, path, backend):
+ super(CachingObjectStore, self).__init__(self, path, backend)
+
+
+
+class S3ObjectStore(ObjectStore):
+ """
+ Object store that stores objects as items in an AWS S3 bucket. A local
+ cache exists that is used as an intermediate location for files between
+ Galaxy and S3.
+ """
+ def __init__(self, app):
+ super(S3ObjectStore, self).__init__()
+ self.app = app
+ self.staging_path = self.app.config.file_path
+ self.s3_conn = S3Connection()
+ self.bucket = self._get_bucket(self.app.config.s3_bucket)
+ self.use_rr = self.app.config.use_reduced_redundancy
+ self.cache_size = self.app.config.object_store_cache_size * 1073741824 # Convert GBs to bytes
+ self.transfer_progress = 0
+ # Clean cache only if value is set in universe_wsgi.ini
+ if self.cache_size != -1:
+ # Helper for interruptable sleep
+ self.sleeper = Sleeper()
+ self.cache_monitor_thread = threading.Thread(target=self.__cache_monitor)
+ self.cache_monitor_thread.start()
+ log.info("Cache cleaner manager started")
+
+ def __cache_monitor(self):
+ time.sleep(2) # Wait for things to load before starting the monitor
+ while self.running:
+ total_size = 0
+ # Is this going to be too expensive of an operation to be done frequently?
+ file_list = []
+ for dirpath, dirnames, filenames in os.walk(self.staging_path):
+ for f in filenames:
+ fp = os.path.join(dirpath, f)
+ file_size = os.path.getsize(fp)
+ total_size += file_size
+ # Get the time given file was last accessed
+ last_access_time = time.localtime(os.stat(fp)[7])
+ # Compose a tuple of the access time and the file path
+ file_tuple = last_access_time, fp, file_size
+ file_list.append(file_tuple)
+ # Sort the file list (based on access time)
+ file_list.sort()
+ # Initiate cleaning once within 10% of the defined cache size?
+ cache_limit = self.cache_size * 0.9
+ if total_size > cache_limit:
+ log.info("Initiating cache cleaning: current cache size: %s; clean until smaller than: %s" \
+ % (convert_bytes(total_size), convert_bytes(cache_limit)))
+ # How much to delete? If simply deleting up to the cache-10% limit,
+ # is likely to be deleting frequently and may run the risk of hitting
+ # the limit - maybe delete additional #%?
+ # For now, delete enough to leave at least 10% of the total cache free
+ delete_this_much = total_size - cache_limit
+ self.__clean_cache(file_list, delete_this_much)
+ self.sleeper.sleep(30) # Test cache size every 30 seconds?
+
+ def __clean_cache(self, file_list, delete_this_much):
+ """ Keep deleting files from the file_list until the size of the deleted
+ files is greater than the value in delete_this_much parameter.
+
+ :type file_list: list
+ :param file_list: List of candidate files that can be deleted. This method
+ will start deleting files from the beginning of the list so the list
+ should be sorted accordingly. The list must contains 3-element tuples,
+ positioned as follows: position 0 holds file last accessed timestamp
+ (as time.struct_time), position 1 holds file path, and position 2 has
+ file size (e.g., (<access time>, /mnt/data/dataset_1.dat), 472394)
+
+ :type delete_this_much: int
+ :param delete_this_much: Total size of files, in bytes, that should be deleted.
+ """
+ # Keep deleting datasets from file_list until deleted_amount does not
+ # exceed delete_this_much; start deleting from the front of the file list,
+ # which assumes the oldest files come first on the list.
+ deleted_amount = 0
+ for i, f in enumerate(file_list):
+ if deleted_amount < delete_this_much:
+ deleted_amount += f[2]
+ os.remove(f[1])
+ # Debugging code for printing deleted files' stats
+ # folder, file_name = os.path.split(f[1])
+ # file_date = time.strftime("%m/%d/%y %H:%M:%S", f[0])
+ # log.debug("%s. %-25s %s, size %s (deleted %s/%s)" \
+ # % (i, file_name, convert_bytes(f[2]), file_date, \
+ # convert_bytes(deleted_amount), convert_bytes(delete_this_much)))
+ else:
+ log.debug("Cache cleaning done. Total space freed: %s" % convert_bytes(deleted_amount))
+ return
+
+ def _get_bucket(self, bucket_name):
+ """ Sometimes a handle to a bucket is not established right away so try
+ it a few times. Raise error is connection is not established. """
+ for i in range(5):
+ try:
+ bucket = self.s3_conn.get_bucket(bucket_name)
+ log.debug("Using S3 object store; got bucket '%s'" % bucket.name)
+ return bucket
+ except S3ResponseError:
+ log.debug("Could not get bucket '%s', attempt %s/5" % (bucket_name, i+1))
+ time.sleep(2)
+ # All the attempts have been exhausted and connection was not established,
+ # raise error
+ raise S3ResponseError
+
+ def _fix_permissions(self, rel_path):
+ """ Set permissions on rel_path"""
+ for basedir, dirs, files in os.walk( rel_path ):
+ util.umask_fix_perms( basedir, self.app.config.umask, 0777, self.app.config.gid )
+ for f in files:
+ path = os.path.join( basedir, f )
+ # Ignore symlinks
+ if os.path.islink( path ):
+ continue
+ util.umask_fix_perms( path, self.app.config.umask, 0666, self.app.config.gid )
+
+ def _construct_path(self, dataset_id, dir_only=None, extra_dir=None, extra_dir_at_root=False, alt_name=None):
+ rel_path = os.path.join(*directory_hash_id(dataset_id))
+ if extra_dir is not None:
+ if extra_dir_at_root:
+ rel_path = os.path.join(extra_dir, rel_path)
+ else:
+ rel_path = os.path.join(rel_path, extra_dir)
+ # S3 folders are marked by having trailing '/' so add it now
+ rel_path = '%s/' % rel_path
+ if not dir_only:
+ rel_path = os.path.join(rel_path, alt_name if alt_name else "dataset_%s.dat" % dataset_id)
+ return rel_path
+
+ def _get_cache_path(self, rel_path):
+ return os.path.abspath(os.path.join(self.staging_path, rel_path))
+
+ def _get_transfer_progress(self):
+ return self.transfer_progress
+
+ def _get_size_in_s3(self, rel_path):
+ try:
+ key = self.bucket.get_key(rel_path)
+ if key:
+ return key.size
+ except S3ResponseError, ex:
+ log.error("Could not get size of key '%s' from S3: %s" % (rel_path, ex))
+ except Exception, ex:
+ log.error("Could not get reference to the key object '%s'; returning -1 for key size: %s" % (rel_path, ex))
+ return -1
+
+ def _key_exists(self, rel_path):
+ exists = False
+ try:
+ # A hackish way of testing if the rel_path is a folder vs a file
+ is_dir = rel_path[-1] == '/'
+ if is_dir:
+ rs = self.bucket.get_all_keys(prefix=rel_path)
+ if len(rs) > 0:
+ exists = True
+ else:
+ exists = False
+ else:
+ key = Key(self.bucket, rel_path)
+ exists = key.exists()
+ except S3ResponseError, ex:
+ log.error("Trouble checking existence of S3 key '%s': %s" % (rel_path, ex))
+ return False
+ print "Checking if '%s' exists in S3: %s" % (rel_path, exists)
+ if rel_path[0] == '/':
+ raise
+ return exists
+
+ def _in_cache(self, rel_path):
+ """ Check if the given dataset is in the local cache and return True if so. """
+ # log.debug("------ Checking cache for rel_path %s" % rel_path)
+ cache_path = self._get_cache_path(rel_path)
+ exists = os.path.exists(cache_path)
+ # print "Checking chache for %s; returning %s" % (cache_path, exists)
+ return exists
+ # EATODO: Part of checking if a file is in cache should be to ensure the
+ # size of the cached file matches that on S3. Once the upload tool explicitly
+ # creates, this check sould be implemented- in the mean time, it's not
+ # looking likely to be implementable reliably.
+ # if os.path.exists(cache_path):
+ # # print "***1 %s exists" % cache_path
+ # if self._key_exists(rel_path):
+ # # print "***2 %s exists in S3" % rel_path
+ # # Make sure the size in cache is available in its entirety
+ # # print "File '%s' cache size: %s, S3 size: %s" % (cache_path, os.path.getsize(cache_path), self._get_size_in_s3(rel_path))
+ # if os.path.getsize(cache_path) == self._get_size_in_s3(rel_path):
+ # # print "***2.1 %s exists in S3 and the size is the same as in cache (in_cache=True)" % rel_path
+ # exists = True
+ # else:
+ # # print "***2.2 %s exists but differs in size from cache (in_cache=False)" % cache_path
+ # exists = False
+ # else:
+ # # Although not perfect decision making, this most likely means
+ # # that the file is currently being uploaded
+ # # print "***3 %s found in cache but not in S3 (in_cache=True)" % cache_path
+ # exists = True
+ # else:
+ # # print "***4 %s does not exist (in_cache=False)" % cache_path
+ # exists = False
+ # # print "Checking cache for %s; returning %s" % (cache_path, exists)
+ # return exists
+ # # return False
+
+ def _pull_into_cache(self, rel_path):
+ # Ensure the cache directory structure exists (e.g., dataset_#_files/)
+ rel_path_dir = os.path.dirname(rel_path)
+ if not os.path.exists(self._get_cache_path(rel_path_dir)):
+ os.makedirs(self._get_cache_path(rel_path_dir))
+ # Now pull in the file
+ ok = self._download(rel_path)
+ self._fix_permissions(rel_path)
+ return ok
+
+ def _transfer_cb(self, complete, total):
+ self.transfer_progress += 10
+ # print "Dataset transfer progress: %s" % self.transfer_progress
+
+ def _download(self, rel_path):
+ try:
+ log.debug("Pulling key '%s' into cache to %s" % (rel_path, self._get_cache_path(rel_path)))
+ key = self.bucket.get_key(rel_path)
+ # Test is cache is large enough to hold the new file
+ if key.size > self.cache_size:
+ log.critical("File %s is larger (%s) than the cache size (%s). Cannot download." \
+ % (rel_path, key.size, self.cache_size))
+ return False
+ # Test if 'axel' is available for parallel download and pull the key into cache
+ try:
+ ret_code = subprocess.call('axel')
+ except OSError:
+ ret_code = 127
+ if ret_code == 127:
+ self.transfer_progress = 0 # Reset transfer progress counter
+ key.get_contents_to_filename(self._get_cache_path(rel_path), cb=self._transfer_cb, num_cb=10)
+ print "(ssss) Pulled key '%s' into cache to %s" % (rel_path, self._get_cache_path(rel_path))
+ return True
+ else:
+ ncores = multiprocessing.cpu_count()
+ url = key.generate_url(7200)
+ ret_code = subprocess.call("axel -a -n %s '%s'" % (ncores, url))
+ if ret_code == 0:
+ print "(ssss) Parallel pulled key '%s' into cache to %s" % (rel_path, self._get_cache_path(rel_path))
+ return True
+ except S3ResponseError, ex:
+ log.error("Problem downloading key '%s' from S3 bucket '%s': %s" % (rel_path, self.bucket.name, ex))
+ return False
+
+ def _push_to_s3(self, rel_path, source_file=None, from_string=None):
+ """
+ Push the file pointed to by `rel_path` to S3 naming the key `rel_path`.
+ If `source_file` is provided, push that file instead while still using
+ `rel_path` as the key name.
+ If `from_string` is provided, set contents of the file to the value of
+ the string
+ """
+ try:
+ source_file = source_file if source_file else self._get_cache_path(rel_path)
+ if os.path.exists(source_file):
+ key = Key(self.bucket, rel_path)
+ if from_string:
+ key.set_contents_from_string(from_string, reduced_redundancy=self.use_rr)
+ log.debug("Pushed data from string '%s' to key '%s'" % (from_string, rel_path))
+ else:
+ start_time = datetime.now()
+ print "[%s] Pushing cache file '%s' to key '%s'" % (start_time, source_file, rel_path)
+ mb_size = os.path.getsize(source_file) / 1e6
+ if mb_size < 60:
+ self.transfer_progress = 0 # Reset transfer progress counter
+ key.set_contents_from_filename(source_file, reduced_redundancy=self.use_rr,
+ cb=self._transfer_cb, num_cb=10)
+ else:
+ multipart_upload(self.bucket, key.name, source_file, mb_size, use_rr=self.use_rr)
+ # self._multipart_upload(key.name, source_file, mb_size)
+ end_time = datetime.now()
+ print "Push ended at '%s'; it lasted '%s'" % (end_time, end_time-start_time)
+ log.debug("Pushed cache file '%s' to key '%s'" % (source_file, rel_path))
+ return True
+ else:
+ log.error("Tried updating key '%s' from source file '%s', but source file does not exist."
+ % (rel_path, source_file))
+ except S3ResponseError, ex:
+ log.error("Trouble pushing S3 key '%s' from file '%s': %s" % (rel_path, source_file, ex))
+ return False
+
+ def file_ready(self, dataset_id, **kwargs):
+ """ A helper method that checks if a file corresponding to a dataset
+ is ready and available to be used. Return True if so, False otherwise."""
+ rel_path = self._construct_path(dataset_id, **kwargs)
+ # Make sure the size in cache is available in its entirety
+ if self._in_cache(rel_path) and os.path.getsize(self._get_cache_path(rel_path)) == self._get_size_in_s3(rel_path):
+ return True
+ return False
+
+ def exists(self, dataset_id, **kwargs):
+ in_cache = in_s3 = False
+ rel_path = self._construct_path(dataset_id, **kwargs)
+ # Check cache
+ if self._in_cache(rel_path):
+ in_cache = True
+ # Check S3
+ in_s3 = self._key_exists(rel_path)
+ # log.debug("~~~~~~ File '%s' exists in cache: %s; in s3: %s" % (rel_path, in_cache, in_s3))
+ # dir_only does not get synced so shortcut the decision
+ dir_only = kwargs.get('dir_only', False)
+ if dir_only:
+ if in_cache or in_s3:
+ return True
+ else:
+ return False
+ # TODO: Sync should probably not be done here. Add this to an async upload stack?
+ if in_cache and not in_s3:
+ self._push_to_s3(rel_path, source_file=self._get_cache_path(rel_path))
+ return True
+ elif in_s3:
+ return True
+ else:
+ return False
+
+ def create(self, dataset_id, **kwargs):
+ if not self.exists(dataset_id, **kwargs):
+ print "S3 OS creating a dataset with ID %s" % dataset_id
+ # Pull out locally used fields
+ extra_dir = kwargs.get('extra_dir', None)
+ extra_dir_at_root = kwargs.get('extra_dir_at_root', False)
+ dir_only = kwargs.get('dir_only', False)
+ alt_name = kwargs.get('alt_name', None)
+ # print "---- Processing: %s; %s" % (alt_name, locals())
+ # Construct hashed path
+ rel_path = os.path.join(*directory_hash_id(dataset_id))
+ # Optionally append extra_dir
+ if extra_dir is not None:
+ if extra_dir_at_root:
+ rel_path = os.path.join(extra_dir, rel_path)
+ else:
+ rel_path = os.path.join(rel_path, extra_dir)
+ # Create given directory in cache
+ cache_dir = os.path.join(self.staging_path, rel_path)
+ if not os.path.exists(cache_dir):
+ os.makedirs(cache_dir)
+ # Although not really necessary to create S3 folders (because S3 has
+ # flat namespace), do so for consistency with the regular file system
+ # S3 folders are marked by having trailing '/' so add it now
+ # s3_dir = '%s/' % rel_path
+ # self._push_to_s3(s3_dir, from_string='')
+ # If instructed, create the dataset in cache & in S3
+ if not dir_only:
+ rel_path = os.path.join(rel_path, alt_name if alt_name else "dataset_%s.dat" % dataset_id)
+ open(os.path.join(self.staging_path, rel_path), 'w').close()
+ self._push_to_s3(rel_path, from_string='')
+
+ def empty(self, dataset_id, **kwargs):
+ if self.exists(dataset_id, **kwargs):
+ return bool(self.size(dataset_id, **kwargs) > 0)
+ else:
+ raise ObjectNotFound()
+
+ def size(self, dataset_id, **kwargs):
+ rel_path = self._construct_path(dataset_id, **kwargs)
+ if self._in_cache(rel_path):
+ try:
+ return os.path.getsize(self._get_cache_path(rel_path))
+ except OSError, ex:
+ log.info("Could not get size of file '%s' in local cache, will try S3. Error: %s" % (rel_path, ex))
+ elif self.exists(dataset_id, **kwargs):
+ return self._get_size_in_s3(rel_path)
+ log.warning("Did not find dataset '%s', returning 0 for size" % rel_path)
+ return 0
+
+ def delete(self, dataset_id, entire_dir=False, **kwargs):
+ rel_path = self._construct_path(dataset_id, **kwargs)
+ extra_dir = kwargs.get('extra_dir', None)
+ try:
+ # For the case of extra_files, because we don't have a reference to
+ # individual files/keys we need to remove the entire directory structure
+ # with all the files in it. This is easy for the local file system,
+ # but requires iterating through each individual key in S3 and deleing it.
+ if entire_dir and extra_dir:
+ shutil.rmtree(self._get_cache_path(rel_path))
+ rs = self.bucket.get_all_keys(prefix=rel_path)
+ for key in rs:
+ log.debug("Deleting key %s" % key.name)
+ key.delete()
+ return True
+ else:
+ # Delete from cache first
+ os.unlink(self._get_cache_path(rel_path))
+ # Delete from S3 as well
+ if self._key_exists(rel_path):
+ key = Key(self.bucket, rel_path)
+ log.debug("Deleting key %s" % key.name)
+ key.delete()
+ return True
+ except S3ResponseError, ex:
+ log.error("Could not delete key '%s' from S3: %s" % (rel_path, ex))
+ except OSError, ex:
+ log.error('%s delete error %s' % (self._get_filename(dataset_id, **kwargs), ex))
+ return False
+
+ def get_data(self, dataset_id, start=0, count=-1, **kwargs):
+ rel_path = self._construct_path(dataset_id, **kwargs)
+ # Check cache first and get file if not there
+ if not self._in_cache(rel_path):
+ self._pull_into_cache(rel_path)
+ else:
+ print "(cccc) Getting '%s' from cache" % self._get_cache_path(rel_path)
+ # Read the file content from cache
+ data_file = open(self._get_cache_path(rel_path), 'r')
+ data_file.seek(start)
+ content = data_file.read(count)
+ data_file.close()
+ return content
+
+ def get_filename(self, dataset_id, **kwargs):
+ print "S3 get_filename for dataset: %s" % dataset_id
+ dir_only = kwargs.get('dir_only', False)
+ rel_path = self._construct_path(dataset_id, **kwargs)
+ cache_path = self._get_cache_path(rel_path)
+ # S3 does not recognize directories as files so cannot check if those exist.
+ # So, if checking dir only, ensure given dir exists in cache and return
+ # the expected cache path.
+ # dir_only = kwargs.get('dir_only', False)
+ # if dir_only:
+ # if not os.path.exists(cache_path):
+ # os.makedirs(cache_path)
+ # return cache_path
+ # Check if the file exists in the cache first
+ if self._in_cache(rel_path):
+ return cache_path
+ # Check if the file exists in persistent storage and, if it does, pull it into cache
+ elif self.exists(dataset_id, **kwargs):
+ if dir_only: # Directories do not get pulled into cache
+ return cache_path
+ else:
+ if self._pull_into_cache(rel_path):
+ return cache_path
+ # For the case of retrieving a directory only, return the expected path
+ # even if it does not exist.
+ # if dir_only:
+ # return cache_path
+ raise ObjectNotFound()
+ # return cache_path # Until the upload tool does not explicitly create the dataset, return expected path
+
+ def update_from_file(self, dataset_id, file_name=None, create=False, **kwargs):
+ if create:
+ self.create(dataset_id, **kwargs)
+ if self.exists(dataset_id, **kwargs):
+ rel_path = self._construct_path(dataset_id, **kwargs)
+ # Chose whether to use the dataset file itself or an alternate file
+ if file_name:
+ source_file = os.path.abspath(file_name)
+ # Copy into cache
+ cache_file = self._get_cache_path(rel_path)
+ try:
+ # FIXME? Should this be a `move`?
+ shutil.copy2(source_file, cache_file)
+ self._fix_permissions(cache_file)
+ except OSError, ex:
+ log.error("Trouble copying source file '%s' to cache '%s': %s" % (source_file, cache_file, ex))
+ else:
+ source_file = self._get_cache_path(rel_path)
+ # Update the file on S3
+ self._push_to_s3(rel_path, source_file)
+ else:
+ raise ObjectNotFound()
+
+ def get_object_url(self, dataset_id, **kwargs):
+ if self.exists(dataset_id, **kwargs):
+ rel_path = self._construct_path(dataset_id, **kwargs)
+ try:
+ key = Key(self.bucket, rel_path)
+ return key.generate_url(expires_in = 86400) # 24hrs
+ except S3ResponseError, ex:
+ log.warning("Trouble generating URL for dataset '%s': %s" % (rel_path, ex))
+ return None
+
+
+
+class HierarchicalObjectStore(ObjectStore):
+ """
+ ObjectStore that defers to a list of backends, for getting objects the
+ first store where the object exists is used, objects are always created
+ in the first store.
+ """
+
+ def __init__(self, backends=[]):
+ super(HierarchicalObjectStore, self).__init__()
+
+
+def build_object_store_from_config(app):
+ """ Depending on the configuration setting, invoke the appropriate object store
+ """
+ store = app.config.object_store
+ if store == 'disk':
+ return DiskObjectStore(app=app)
+ elif store == 's3':
+ os.environ['AWS_ACCESS_KEY_ID'] = app.config.aws_access_key
+ os.environ['AWS_SECRET_ACCESS_KEY'] = app.config.aws_secret_key
+ return S3ObjectStore(app=app)
+ elif store == 'hierarchical':
+ return HierarchicalObjectStore()
+
+def convert_bytes(bytes):
+ """ A helper function used for pretty printing disk usage """
+ if bytes is None:
+ bytes = 0
+ bytes = float(bytes)
+
+ if bytes >= 1099511627776:
+ terabytes = bytes / 1099511627776
+ size = '%.2fTB' % terabytes
+ elif bytes >= 1073741824:
+ gigabytes = bytes / 1073741824
+ size = '%.2fGB' % gigabytes
+ elif bytes >= 1048576:
+ megabytes = bytes / 1048576
+ size = '%.2fMB' % megabytes
+ elif bytes >= 1024:
+ kilobytes = bytes / 1024
+ size = '%.2fKB' % kilobytes
+ else:
+ size = '%.2fb' % bytes
+ return size
diff -r 9a5c403bb2e57189b442615840e49cc80fe3bc60 -r f25342f0e100ac32312934b9ca84f8fa109ef9d4 lib/galaxy/objectstore/s3_multipart_upload.py
--- /dev/null
+++ b/lib/galaxy/objectstore/s3_multipart_upload.py
@@ -0,0 +1,85 @@
+#!/usr/bin/env python
+"""
+Split large file into multiple pieces for upload to S3.
+This parallelizes the task over available cores using multiprocessing.
+Code mostly taken form CloudBioLinux.
+"""
+import os
+import glob
+import subprocess
+import contextlib
+import functools
+import multiprocessing
+from multiprocessing.pool import IMapIterator
+
+import boto
+
+def map_wrap(f):
+ @functools.wraps(f)
+ def wrapper(*args, **kwargs):
+ return apply(f, *args, **kwargs)
+ return wrapper
+
+def mp_from_ids(mp_id, mp_keyname, mp_bucketname):
+ """Get the multipart upload from the bucket and multipart IDs.
+
+ This allows us to reconstitute a connection to the upload
+ from within multiprocessing functions.
+ """
+ conn = boto.connect_s3()
+ bucket = conn.lookup(mp_bucketname)
+ mp = boto.s3.multipart.MultiPartUpload(bucket)
+ mp.key_name = mp_keyname
+ mp.id = mp_id
+ return mp
+
+@map_wrap
+def transfer_part(mp_id, mp_keyname, mp_bucketname, i, part):
+ """Transfer a part of a multipart upload. Designed to be run in parallel.
+ """
+ mp = mp_from_ids(mp_id, mp_keyname, mp_bucketname)
+ print " Transferring", i, part
+ with open(part) as t_handle:
+ mp.upload_part_from_file(t_handle, i+1)
+ os.remove(part)
+
+def multipart_upload(bucket, s3_key_name, tarball, mb_size, use_rr=True):
+ """Upload large files using Amazon's multipart upload functionality.
+ """
+ cores = multiprocessing.cpu_count()
+ print "Initiating multipart upload using %s cores" % cores
+ def split_file(in_file, mb_size, split_num=5):
+ prefix = os.path.join(os.path.dirname(in_file),
+ "%sS3PART" % (os.path.basename(s3_key_name)))
+ # Split chunks so they are 5MB < chunk < 250MB
+ split_size = int(max(min(mb_size / (split_num * 2.0), 250), 5))
+ if not os.path.exists("%saa" % prefix):
+ cl = ["split", "-b%sm" % split_size, in_file, prefix]
+ subprocess.check_call(cl)
+ return sorted(glob.glob("%s*" % prefix))
+
+ mp = bucket.initiate_multipart_upload(s3_key_name, reduced_redundancy=use_rr)
+ with multimap(cores) as pmap:
+ for _ in pmap(transfer_part, ((mp.id, mp.key_name, mp.bucket_name, i, part)
+ for (i, part) in
+ enumerate(split_file(tarball, mb_size, cores)))):
+ pass
+ mp.complete_upload()
+
+(a)contextlib.contextmanager
+def multimap(cores=None):
+ """Provide multiprocessing imap like function.
+
+ The context manager handles setting up the pool, worked around interrupt issues
+ and terminating the pool on completion.
+ """
+ if cores is None:
+ cores = max(multiprocessing.cpu_count() - 1, 1)
+ def wrapper(func):
+ def wrap(self, timeout=None):
+ return func(self, timeout=timeout if timeout is not None else 1e100)
+ return wrap
+ IMapIterator.next = wrapper(IMapIterator.next)
+ pool = multiprocessing.Pool(cores)
+ yield pool.imap
+ pool.terminate()
diff -r 9a5c403bb2e57189b442615840e49cc80fe3bc60 -r f25342f0e100ac32312934b9ca84f8fa109ef9d4 lib/galaxy/tools/__init__.py
--- a/lib/galaxy/tools/__init__.py
+++ b/lib/galaxy/tools/__init__.py
@@ -1701,23 +1701,38 @@
Find extra files in the job working directory and move them into
the appropriate dataset's files directory
"""
+ # print "Working in collect_associated_files"
for name, hda in output.items():
temp_file_path = os.path.join( job_working_directory, "dataset_%s_files" % ( hda.dataset.id ) )
try:
- if len( os.listdir( temp_file_path ) ) > 0:
- store_file_path = os.path.join(
- os.path.join( self.app.config.file_path, *directory_hash_id( hda.dataset.id ) ),
- "dataset_%d_files" % hda.dataset.id )
- shutil.move( temp_file_path, store_file_path )
- # Fix permissions
- for basedir, dirs, files in os.walk( store_file_path ):
- util.umask_fix_perms( basedir, self.app.config.umask, 0777, self.app.config.gid )
- for file in files:
- path = os.path.join( basedir, file )
- # Ignore symlinks
- if os.path.islink( path ):
- continue
- util.umask_fix_perms( path, self.app.config.umask, 0666, self.app.config.gid )
+ a_files = os.listdir( temp_file_path )
+ if len( a_files ) > 0:
+ for f in a_files:
+ # print "------ Instructing ObjectStore to update/create file: %s from %s" \
+ # % (hda.dataset.id, os.path.join(temp_file_path, f))
+ self.app.object_store.update_from_file(hda.dataset.id,
+ extra_dir="dataset_%d_files" % hda.dataset.id,
+ alt_name = f,
+ file_name = os.path.join(temp_file_path, f),
+ create = True)
+ # Clean up after being handled by object store.
+ # FIXME: If the object (e.g., S3) becomes async, this will
+ # cause issues so add it to the object store functionality?
+ # shutil.rmtree(temp_file_path)
+
+ # store_file_path = os.path.join(
+ # os.path.join( self.app.config.file_path, *directory_hash_id( hda.dataset.id ) ),
+ # "dataset_%d_files" % hda.dataset.id )
+ # shutil.move( temp_file_path, store_file_path )
+ # # Fix permissions
+ # for basedir, dirs, files in os.walk( store_file_path ):
+ # util.umask_fix_perms( basedir, self.app.config.umask, 0777, self.app.config.gid )
+ # for file in files:
+ # path = os.path.join( basedir, file )
+ # # Ignore symlinks
+ # if os.path.islink( path ):
+ # continue
+ # util.umask_fix_perms( path, self.app.config.umask, 0666, self.app.config.gid )
except:
continue
diff -r 9a5c403bb2e57189b442615840e49cc80fe3bc60 -r f25342f0e100ac32312934b9ca84f8fa109ef9d4 lib/galaxy/tools/actions/__init__.py
--- a/lib/galaxy/tools/actions/__init__.py
+++ b/lib/galaxy/tools/actions/__init__.py
@@ -282,7 +282,8 @@
trans.sa_session.flush()
trans.app.security_agent.set_all_dataset_permissions( data.dataset, output_permissions )
# Create an empty file immediately
- open( data.file_name, "w" ).close()
+ # open( data.file_name, "w" ).close()
+ trans.app.object_store.create( data.id )
# Fix permissions
util.umask_fix_perms( data.file_name, trans.app.config.umask, 0666 )
# This may not be neccesary with the new parent/child associations
diff -r 9a5c403bb2e57189b442615840e49cc80fe3bc60 -r f25342f0e100ac32312934b9ca84f8fa109ef9d4 lib/galaxy/tools/actions/upload.py
--- a/lib/galaxy/tools/actions/upload.py
+++ b/lib/galaxy/tools/actions/upload.py
@@ -1,4 +1,3 @@
-import os
from __init__ import ToolAction
from galaxy.tools.actions import upload_common
diff -r 9a5c403bb2e57189b442615840e49cc80fe3bc60 -r f25342f0e100ac32312934b9ca84f8fa109ef9d4 lib/galaxy/tools/actions/upload_common.py
--- a/lib/galaxy/tools/actions/upload_common.py
+++ b/lib/galaxy/tools/actions/upload_common.py
@@ -324,13 +324,17 @@
job.add_output_library_dataset( 'output%i' % i, dataset )
# Create an empty file immediately
if not dataset.dataset.external_filename:
- open( dataset.file_name, "w" ).close()
+ trans.app.object_store.create( dataset.id )
+ print "---> Upload tool created a folder(?) %s with ID %s? %s" % (dataset.file_name, dataset.id, trans.app.object_store.exists(dataset.id))
+ # open( dataset.file_name, "w" ).close()
else:
for i, dataset in enumerate( data_list ):
job.add_output_dataset( 'output%i' % i, dataset )
# Create an empty file immediately
if not dataset.dataset.external_filename:
- open( dataset.file_name, "w" ).close()
+ trans.app.object_store.create( dataset.id )
+ print "---> Upload tool created a file %s with ID %s? %s" % (dataset.file_name, dataset.id, trans.app.object_store.exists(dataset.id))
+ # open( dataset.file_name, "w" ).close()
job.state = job.states.NEW
trans.sa_session.add( job )
trans.sa_session.flush()
diff -r 9a5c403bb2e57189b442615840e49cc80fe3bc60 -r f25342f0e100ac32312934b9ca84f8fa109ef9d4 lib/galaxy/web/controllers/dataset.py
--- a/lib/galaxy/web/controllers/dataset.py
+++ b/lib/galaxy/web/controllers/dataset.py
@@ -217,7 +217,7 @@
outfname = data.name[0:150]
outfname = ''.join(c in valid_chars and c or '_' for c in outfname)
if (params.do_action == None):
- params.do_action = 'zip' # default
+ params.do_action = 'zip' # default
msg = util.restore_text( params.get( 'msg', '' ) )
messagetype = params.get( 'messagetype', 'done' )
if not data:
@@ -300,8 +300,7 @@
archive.wsgi_headeritems = trans.response.wsgi_headeritems()
return archive.stream
return trans.show_error_message( msg )
-
-
+
@web.expose
def get_metadata_file(self, trans, hda_id, metadata_name):
""" Allows the downloading of metadata files associated with datasets (eg. bai index for bam files) """
@@ -316,12 +315,8 @@
trans.response.headers["Content-Type"] = "application/octet-stream"
trans.response.headers["Content-Disposition"] = "attachment; filename=Galaxy%s-[%s].%s" % (data.hid, fname, file_ext)
return open(data.metadata.get(metadata_name).file_name)
-
- @web.expose
- def display(self, trans, dataset_id=None, preview=False, filename=None, to_ext=None, **kwd):
- """Catches the dataset id and displays file contents as directed"""
- composite_extensions = trans.app.datatypes_registry.get_composite_extensions( )
- composite_extensions.append('html') # for archiving composite datatypes
+
+ def _check_dataset(self, trans, dataset_id):
# DEPRECATION: We still support unencoded ids for backward compatibility
try:
data = trans.sa_session.query( trans.app.model.HistoryDatasetAssociation ).get( trans.security.decode_id( dataset_id ) )
@@ -340,9 +335,36 @@
if data.state == trans.model.Dataset.states.UPLOAD:
return trans.show_error_message( "Please wait until this dataset finishes uploading before attempting to view it." )
+ return data
+
+ @web.expose
+ @web.json
+ def transfer_status(self, trans, dataset_id, filename=None):
+ """ Primarily used for the S3ObjectStore - get the status of data transfer
+ if the file is not in cache """
+ data = self._check_dataset(trans, dataset_id)
+ print "dataset.py -> transfer_status: Checking transfer status for dataset %s..." % data.id
+
+ # Pulling files in extra_files_path into cache is not handled via this
+ # method but that's primarily because those files are typically linked to
+ # through tool's output page anyhow so tying a JavaScript event that will
+ # call this method does not seem doable?
+ if trans.app.object_store.file_ready(data.id):
+ return True
+ else:
+ return False
+
+ @web.expose
+ def display(self, trans, dataset_id=None, preview=False, filename=None, to_ext=None, **kwd):
+ """Catches the dataset id and displays file contents as directed"""
+ composite_extensions = trans.app.datatypes_registry.get_composite_extensions( )
+ composite_extensions.append('html') # for archiving composite datatypes
+ data = self._check_dataset(trans, dataset_id)
+
if filename and filename != "index":
# For files in extra_files_path
- file_path = os.path.join( data.extra_files_path, filename )
+ file_path = os.path.join( data.extra_files_path, filename ) # remove after debugging
+ file_path = trans.app.object_store.get_filename(data.id, extra_dir='dataset_%s_files' % data.id, alt_name=filename)
if os.path.exists( file_path ):
if os.path.isdir( file_path ):
return trans.show_error_message( "Directory listing is not allowed." ) #TODO: Reconsider allowing listing of directories?
diff -r 9a5c403bb2e57189b442615840e49cc80fe3bc60 -r f25342f0e100ac32312934b9ca84f8fa109ef9d4 lib/galaxy/web/controllers/history.py
--- a/lib/galaxy/web/controllers/history.py
+++ b/lib/galaxy/web/controllers/history.py
@@ -581,7 +581,7 @@
trans.response.set_content_type( 'application/x-gzip' )
else:
trans.response.set_content_type( 'application/x-tar' )
- return open( jeha.dataset.file_name )
+ return trans.app.object_store.get_data(jeha.dataset.id)
elif jeha.job.state in [ model.Job.states.RUNNING, model.Job.states.QUEUED, model.Job.states.WAITING ]:
return trans.show_message( "Still exporting history %(n)s; please check back soon. Link: <a href='%(s)s'>%(s)s</a>" \
% ( { 'n' : history.name, 's' : url_for( action="export_archive", id=id, qualified=True ) } ) )
diff -r 9a5c403bb2e57189b442615840e49cc80fe3bc60 -r f25342f0e100ac32312934b9ca84f8fa109ef9d4 templates/dataset/display.mako
--- a/templates/dataset/display.mako
+++ b/templates/dataset/display.mako
@@ -9,11 +9,11 @@
<%def name="init()"><%
- self.has_left_panel=False
- self.has_right_panel=True
- self.message_box_visible=False
- self.active_view="user"
- self.overlay_visible=False
+ self.has_left_panel=False
+ self.has_right_panel=True
+ self.message_box_visible=False
+ self.active_view="user"
+ self.overlay_visible=False
%></%def>
@@ -44,10 +44,10 @@
<%def name="center_panel()"><div class="unified-panel-header" unselectable="on">
- <div class="unified-panel-header-inner">
- ${get_class_display_name( item.__class__ )}
- | ${get_item_name( item ) | h}
- </div>
+ <div class="unified-panel-header-inner">
+ ${get_class_display_name( item.__class__ )}
+ | ${get_item_name( item ) | h}
+ </div></div><div class="unified-panel-body">
diff -r 9a5c403bb2e57189b442615840e49cc80fe3bc60 -r f25342f0e100ac32312934b9ca84f8fa109ef9d4 templates/root/history.mako
--- a/templates/root/history.mako
+++ b/templates/root/history.mako
@@ -140,6 +140,34 @@
return false;
});
});
+
+ // Check to see if the dataset data is cached or needs to be pulled in
+ // via objectstore
+ $(this).find("a.display").each( function() {
+ var history_item = $(this).parents(".historyItem")[0];
+ var history_id = history_item.id.split( "-" )[1];
+ $(this).click(function() {
+ check_transfer_status($(this), history_id);
+ });
+ });
+
+ // If dataset data is not cached, keep making ajax calls to check on the
+ // data status and update the dataset UI element accordingly
+ function check_transfer_status(link, history_id) {
+ $.getJSON("${h.url_for( controller='dataset', action='transfer_status', dataset_id='XXX' )}".replace( 'XXX', link.attr("dataset_id") ),
+ function(ready) {
+ if (ready === false) {
+ // $("<div/>").text("Data is loading from S3... please be patient").appendTo(link.parent());
+ $( '#historyItem-' + history_id).removeClass( "historyItem-ok" );
+ $( '#historyItem-' + history_id).addClass( "historyItem-running" );
+ setTimeout(function(){check_transfer_status(link, history_id)}, 1000);
+ } else {
+ $( '#historyItem-' + history_id).removeClass( "historyItem-running" );
+ $( '#historyItem-' + history_id).addClass( "historyItem-ok" );
+ }
+ }
+ );
+ }
// Undelete link
$(this).find("a.historyItemUndelete").each( function() {
diff -r 9a5c403bb2e57189b442615840e49cc80fe3bc60 -r f25342f0e100ac32312934b9ca84f8fa109ef9d4 templates/root/history_common.mako
--- a/templates/root/history_common.mako
+++ b/templates/root/history_common.mako
@@ -98,7 +98,7 @@
%if data.purged:
<span class="icon-button display_disabled tooltip" title="Cannoy display datasets removed from disk"></span>
%else:
- <a class="icon-button display tooltip" title="Display data in browser" href="${display_url}"
+ <a class="icon-button display tooltip" dataset_id="${dataset_id}" title="Display data in browser" href="${display_url}"
%if for_editing:
target="galaxy_main"
%endif
https://bitbucket.org/galaxy/galaxy-central/changeset/9b03f63cd8e8/
changeset: 9b03f63cd8e8
user: afgane
date: 2011-07-21 16:44:27
summary: Added config options to universe_wsgi.ini.sample
affected #: 1 file
diff -r f25342f0e100ac32312934b9ca84f8fa109ef9d4 -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 universe_wsgi.ini.sample
--- a/universe_wsgi.ini.sample
+++ b/universe_wsgi.ini.sample
@@ -414,6 +414,17 @@
# -- Beta features
+# Object store mode (valid options are: disk, s3, hierarchical)
+#object_store = s3
+#aws_access_key = <AWS access key>
+#aws_secret_key = <AWS secret key>
+#s3_bucket = <name of an existing S3 bucket>
+#use_reduced_redundancy = True
+# Size (in GB) that the cache used by object store should be limited to.
+# If the value is not specified, the cache size will be limited only by the file
+# system size.
+#object_store_cache_size = 100
+
# Enable Galaxy to communicate directly with a sequencer
#enable_sequencer_communication = False
https://bitbucket.org/galaxy/galaxy-central/changeset/f2878e4d9e0e/
changeset: f2878e4d9e0e
user: afgane
date: 2011-07-21 16:44:47
summary: Merge
affected #: 109 files
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 README.txt
--- a/README.txt
+++ b/README.txt
@@ -28,4 +28,4 @@
Not all dependencies are included for the tools provided in the sample
tool_conf.xml. A full list of external dependencies is available at:
-http://bitbucket.org/galaxy/galaxy-central/wiki/ToolDependencies
+http://wiki.g2.bx.psu.edu/Admin/Tools/Tool%20Dependencies
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 dist-eggs.ini
--- a/dist-eggs.ini
+++ b/dist-eggs.ini
@@ -3,7 +3,7 @@
; eggs.g2.bx.psu.edu) Probably only useful to Galaxy developers at
; Penn State. This file is used by scripts/dist-scramble.py
;
-; More information: http://bitbucket.org/galaxy/galaxy-central/wiki/Config/Eggs
+; More information: http://wiki.g2.bx.psu.edu/Admin/Config/Eggs
;
[hosts]
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 eggs.ini
--- a/eggs.ini
+++ b/eggs.ini
@@ -3,7 +3,7 @@
;
; This file is version controlled and should not be edited by hand!
; For more information, see:
-; http://bitbucket.org/galaxy/galaxy-central/wiki/Config/Eggs
+; http://wiki.g2.bx.psu.edu/Admin/Config/Eggs
;
[general]
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 lib/galaxy/jobs/__init__.py
--- a/lib/galaxy/jobs/__init__.py
+++ b/lib/galaxy/jobs/__init__.py
@@ -271,7 +271,7 @@
class JobWrapper( object ):
"""
- Wraps a 'model.Job' with convience methods for running processes and
+ Wraps a 'model.Job' with convenience methods for running processes and
state management.
"""
def __init__( self, job, queue ):
@@ -284,6 +284,9 @@
self.sa_session = self.app.model.context
self.extra_filenames = []
self.command_line = None
+ # Tool versioning variables
+ self.version_string_cmd = None
+ self.version_string = ""
self.galaxy_lib_dir = None
# With job outputs in the working directory, we need the working
# directory to be set before prepare is run, or else premature deletion
@@ -311,6 +314,9 @@
param_dict = self.tool.params_from_strings( param_dict, self.app )
return param_dict
+ def get_version_string_path( self ):
+ return os.path.abspath(os.path.join(self.app.config.new_file_path, "GALAXY_VERSION_STRING_%s" % self.job_id))
+
def prepare( self ):
"""
Prepare the job to run by creating the working directory and the
@@ -388,6 +394,7 @@
extra_filenames.append( param_filename )
self.param_dict = param_dict
self.extra_filenames = extra_filenames
+ self.version_string_cmd = self.tool.version_string_cmd
return extra_filenames
def fail( self, message, exception=False ):
@@ -494,6 +501,12 @@
job.state = job.states.ERROR
else:
job.state = job.states.OK
+ if self.version_string_cmd:
+ version_filename = self.get_version_string_path()
+ if os.path.exists(version_filename):
+ self.version_string = "Tool version: %s" % open(version_filename).read()
+ os.unlink(version_filename)
+
if self.app.config.outputs_to_working_directory:
for dataset_path in self.get_output_fnames():
try:
@@ -543,7 +556,7 @@
log.exception( "from_work_dir specified a location not in the working directory: %s, %s" % ( source_file, self.working_directory ) )
dataset.blurb = 'done'
dataset.peek = 'no peek'
- dataset.info = context['stdout'] + context['stderr']
+ dataset.info = context['stdout'] + context['stderr'] + self.version_string
dataset.set_size()
# Update (non-library) job output datasets through the object store
if dataset not in job.output_library_datasets:
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 lib/galaxy/jobs/runners/__init__.py
--- a/lib/galaxy/jobs/runners/__init__.py
+++ b/lib/galaxy/jobs/runners/__init__.py
@@ -1,10 +1,9 @@
import os, os.path
class BaseJobRunner( object ):
-
def build_command_line( self, job_wrapper, include_metadata=False ):
"""
- Compose the sequence of commands neccesary to execute a job. This will
+ Compose the sequence of commands necessary to execute a job. This will
currently include:
- environment settings corresponding to any requirement tags
- command line taken from job wrapper
@@ -15,9 +14,13 @@
# occur
if not commands:
return None
+ # Prepend version string
+ if job_wrapper.version_string_cmd:
+ commands = "%s &> %s; " % ( job_wrapper.version_string_cmd, job_wrapper.get_version_string_path() ) + commands
# Prepend dependency injection
if job_wrapper.dependency_shell_commands:
commands = "; ".join( job_wrapper.dependency_shell_commands + [ commands ] )
+
# Append metadata setting commands, we don't want to overwrite metadata
# that was copied over in init_meta(), as per established behavior
if include_metadata and self.app.config.set_metadata_externally:
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 lib/galaxy/jobs/runners/pbs.py
--- a/lib/galaxy/jobs/runners/pbs.py
+++ b/lib/galaxy/jobs/runners/pbs.py
@@ -17,7 +17,7 @@
configured properly. Galaxy's "scramble" system should make this installation
simple, please follow the instructions found at:
- http://bitbucket.org/galaxy/galaxy-central/wiki/Config/Cluster
+ http://wiki.g2.bx.psu.edu/Admin/Config/Performance/Cluster
Additional errors may follow:
%s
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 lib/galaxy/jobs/runners/sge.py
--- a/lib/galaxy/jobs/runners/sge.py
+++ b/lib/galaxy/jobs/runners/sge.py
@@ -14,7 +14,7 @@
"scramble" system should make this installation simple, please follow the
instructions found at:
- http://bitbucket.org/galaxy/galaxy-central/wiki/Config/Cluster
+ http://wiki.g2.bx.psu.edu/Admin/Config/Performance/Cluster
Additional errors may follow:
%s
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 lib/galaxy/security/__init__.py
--- a/lib/galaxy/security/__init__.py
+++ b/lib/galaxy/security/__init__.py
@@ -1005,8 +1005,7 @@
ucsc_main = ( 'hgw1.cse.ucsc.edu', 'hgw2.cse.ucsc.edu', 'hgw3.cse.ucsc.edu', 'hgw4.cse.ucsc.edu',
'hgw5.cse.ucsc.edu', 'hgw6.cse.ucsc.edu', 'hgw7.cse.ucsc.edu', 'hgw8.cse.ucsc.edu' ),
ucsc_test = ( 'hgwdev.cse.ucsc.edu', ),
- ucsc_archaea = ( 'lowepub.cse.ucsc.edu', ),
- ucsc_bhri = ('ucsc.omics.bhri.internal','galaxy.omics.bhri.internal'),
+ ucsc_archaea = ( 'lowepub.cse.ucsc.edu', )
)
def __init__( self, model, permitted_actions=None ):
self.model = model
@@ -1037,7 +1036,7 @@
# We're going to search in order, but if the remote site is load
# balancing their connections (as UCSC does), this is okay.
try:
- if socket.gethostbyname( server ) == addr or server == '127.0.0.1' or server == '172.16.0.38':
+ if socket.gethostbyname( server ) == addr:
break # remote host is in the server list
except ( socket.error, socket.gaierror ):
pass # can't resolve, try next
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 lib/galaxy/tools/__init__.py
--- a/lib/galaxy/tools/__init__.py
+++ b/lib/galaxy/tools/__init__.py
@@ -5,7 +5,7 @@
pkg_resources.require( "simplejson" )
-import logging, os, string, sys, tempfile, glob, shutil, types, urllib
+import logging, os, string, sys, tempfile, glob, shutil, types, urllib, subprocess
import simplejson
import binascii
from UserDict import DictMixin
@@ -395,6 +395,11 @@
self.redirect_url_params = ''
# Short description of the tool
self.description = util.xml_text(root, "description")
+ # Versioning for tools
+ self.version_string_cmd = None
+ version_cmd = root.find("version_command")
+ if version_cmd is not None:
+ self.version_string_cmd = version_cmd.text
# Parallelism for tasks, read from tool config.
parallelism = root.find("parallelism")
if parallelism is not None and parallelism.get("method"):
@@ -922,8 +927,6 @@
if not self.check_values:
return
for input in self.inputs.itervalues():
- if input.name not in value:
- value[input.name] = input.get_initial_value( None, value )
if isinstance( input, ToolParameter ):
callback( "", input, value[input.name] )
else:
@@ -1460,6 +1463,11 @@
elif isinstance( input, SelectToolParameter ):
input_values[ input.name ] = SelectToolParameterWrapper(
input, input_values[ input.name ], self.app, other_values = param_dict )
+
+ elif isinstance( input, LibraryDatasetToolParameter ):
+ input_values[ input.name ] = LibraryDatasetValueWrapper(
+ input, input_values[ input.name ], param_dict )
+
else:
input_values[ input.name ] = InputValueWrapper(
input, input_values[ input.name ], param_dict )
@@ -2025,6 +2033,31 @@
def __getattr__( self, key ):
return getattr( self.obj, key )
+class LibraryDatasetValueWrapper( object ):
+ """
+ Wraps an input so that __str__ gives the "param_dict" representation.
+ """
+ def __init__( self, input, value, other_values={} ):
+ self.input = input
+ self.value = value
+ self._other_values = other_values
+ def __str__( self ):
+ return self.value.name
+ def templates( self ):
+ if not self.value:
+ return None
+ template_data = {}
+ for temp_info in self.value.info_association:
+ template = temp_info.template
+ content = temp_info.info.content
+ tmp_dict = {}
+ for field in template.fields:
+ tmp_dict[field['label']] = content[field['name']]
+ template_data[template.name] = tmp_dict
+ return template_data
+ def __getattr__( self, key ):
+ return getattr( self.value, key )
+
class InputValueWrapper( object ):
"""
Wraps an input so that __str__ gives the "param_dict" representation.
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 lib/galaxy/tools/parameters/basic.py
--- a/lib/galaxy/tools/parameters/basic.py
+++ b/lib/galaxy/tools/parameters/basic.py
@@ -748,6 +748,9 @@
# Dependency on a value that does not yet exist
if isinstance( dep_value, RuntimeValue ):
return True
+ #dataset not ready yet
+ if hasattr( self, 'ref_input' ) and isinstance( dep_value, self.tool.app.model.HistoryDatasetAssociation ) and ( dep_value.is_pending or not isinstance( dep_value.datatype, self.ref_input.formats ) ):
+ return True
# Dynamic, but all dependenceis are known and have values
return False
def get_initial_value( self, trans, context ):
@@ -878,6 +881,7 @@
self.force_select = string_as_bool( elem.get( "force_select", True ))
self.accept_default = string_as_bool( elem.get( "accept_default", False ))
self.data_ref = elem.get( "data_ref", None )
+ self.ref_input = None
self.default_value = elem.get( "default_value", None )
self.is_dynamic = True
def from_html( self, value, trans=None, context={} ):
@@ -973,7 +977,7 @@
if not dataset.metadata.columns:
# Only allow late validation if the dataset is not yet ready
# (since we have reason to expect the metadata to be ready eventually)
- if dataset.is_pending:
+ if dataset.is_pending or not isinstance( dataset.datatype, self.ref_input.formats ):
return True
# No late validation
return False
@@ -1268,7 +1272,7 @@
displayed as radio buttons and multiple selects as a set of checkboxes
TODO: The following must be fixed to test correctly for the new security_check tag in the DataToolParameter ( the last test below is broken )
- Nate's next passs at the dataset security stuff will dramatically alter this anyway.
+ Nate's next pass at the dataset security stuff will dramatically alter this anyway.
"""
def __init__( self, tool, elem ):
@@ -1353,7 +1357,7 @@
selected = ( value and ( hda in value ) )
field.add_option( "%s: %s" % ( hid, hda_name ), hda.id, selected )
else:
- target_ext, converted_dataset = hda.find_conversion_destination( self.formats, converter_safe = self.converter_safe( other_values, trans ) )
+ target_ext, converted_dataset = hda.find_conversion_destination( self.formats )
if target_ext:
if converted_dataset:
hda = converted_dataset
@@ -1402,13 +1406,22 @@
pass #no valid options
def dataset_collector( datasets ):
def is_convertable( dataset ):
- target_ext, converted_dataset = dataset.find_conversion_destination( self.formats, converter_safe = self.converter_safe( context, trans ) )
+ target_ext, converted_dataset = dataset.find_conversion_destination( self.formats )
if target_ext is not None:
return True
return False
for i, data in enumerate( datasets ):
- if data.visible and not data.deleted and data.state not in [data.states.ERROR, data.states.DISCARDED] and ( isinstance( data.datatype, self.formats) or is_convertable( data ) ):
- if self.options and self._options_filter_attribute( data ) != filter_value:
+ if data.visible and not data.deleted and data.state not in [data.states.ERROR, data.states.DISCARDED]:
+ is_valid = False
+ if isinstance( data.datatype, self.formats ):
+ is_valid = True
+ else:
+ target_ext, converted_dataset = data.find_conversion_destination( self.formats )
+ if target_ext:
+ is_valid = True
+ if converted_dataset:
+ data = converted_dataset
+ if not is_valid or ( self.options and self._options_filter_attribute( data ) != filter_value ):
continue
most_recent_dataset[0] = data
# Also collect children via association object
@@ -1509,6 +1522,38 @@
ref = ref()
return ref
+class LibraryDatasetToolParameter( ToolParameter ):
+ """
+ Parameter that lets users select a LDDA from a modal window, then use it within the wrapper.
+ """
+
+ def __init__( self, tool, elem ):
+ ToolParameter.__init__( self, tool, elem )
+
+ def get_html_field( self, trans=None, value=None, other_values={} ):
+ return form_builder.LibraryField( self.name, value=value, trans=trans )
+
+ def get_initial_value( self, trans, context ):
+ return None
+
+ def from_html( self, value, trans, other_values={} ):
+ if not value:
+ return None
+ elif isinstance( value, trans.app.model.LibraryDatasetDatasetAssociation ):
+ return value
+ else:
+ return trans.sa_session.query( trans.app.model.LibraryDatasetDatasetAssociation ).get( trans.security.decode_id( value ) )
+
+ def to_string( self, value, app ):
+ if not value:
+ return None
+ return value.id
+
+ def to_python( self, value, app ):
+ if not value:
+ return value
+ return app.model.context.query( app.model.LibraryDatasetDatasetAssociation ).get( value )
+
# class RawToolParameter( ToolParameter ):
# """
# Completely nondescript parameter, HTML representation is provided as text
@@ -1557,19 +1602,20 @@
# self.html = form_builder.HiddenField( self.name, trans.history.id ).get_html()
# return self.html
-parameter_types = dict( text = TextToolParameter,
- integer = IntegerToolParameter,
- float = FloatToolParameter,
- boolean = BooleanToolParameter,
- genomebuild = GenomeBuildParameter,
- select = SelectToolParameter,
- data_column = ColumnListParameter,
- hidden = HiddenToolParameter,
- baseurl = BaseURLToolParameter,
- file = FileToolParameter,
- ftpfile = FTPFileToolParameter,
- data = DataToolParameter,
- drill_down = DrillDownSelectToolParameter )
+parameter_types = dict( text = TextToolParameter,
+ integer = IntegerToolParameter,
+ float = FloatToolParameter,
+ boolean = BooleanToolParameter,
+ genomebuild = GenomeBuildParameter,
+ select = SelectToolParameter,
+ data_column = ColumnListParameter,
+ hidden = HiddenToolParameter,
+ baseurl = BaseURLToolParameter,
+ file = FileToolParameter,
+ ftpfile = FTPFileToolParameter,
+ data = DataToolParameter,
+ library_data = LibraryDatasetToolParameter,
+ drill_down = DrillDownSelectToolParameter )
class UnvalidatedValue( object ):
"""
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 lib/galaxy/util/__init__.py
--- a/lib/galaxy/util/__init__.py
+++ b/lib/galaxy/util/__init__.py
@@ -127,7 +127,8 @@
'@' : '__at__',
'\n' : '__cn__',
'\r' : '__cr__',
- '\t' : '__tc__'
+ '\t' : '__tc__',
+ '#' : '__pd__'
}
def restore_text(text):
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 lib/galaxy/visualization/tracks/data_providers.py
--- a/lib/galaxy/visualization/tracks/data_providers.py
+++ b/lib/galaxy/visualization/tracks/data_providers.py
@@ -72,10 +72,11 @@
# Override.
pass
- def get_data( self, chrom, start, end, max_vals=None, **kwargs ):
+ def get_data( self, chrom, start, end, start_val=0, max_vals=None, **kwargs ):
"""
- Returns data in region defined by chrom, start, and end. If max_vals
- is set, returns at most max_vals.
+ Returns data in region defined by chrom, start, and end. start_val and
+ max_vals are used to denote the data to return: start_val is the first value to
+ return and max_vals indicates the number of values to return.
"""
# Override.
pass
@@ -215,7 +216,7 @@
# Cleanup.
bamfile.close()
- def get_data( self, chrom, start, end, max_vals=sys.maxint, **kwargs ):
+ def get_data( self, chrom, start, end, start_val=0, max_vals=sys.maxint, **kwargs ):
"""
Fetch reads in the region.
@@ -253,8 +254,10 @@
# Encode reads as list of lists.
results = []
paired_pending = {}
- for read in data:
- if len(results) > max_vals:
+ for count, read in enumerate( data ):
+ if count < start_val:
+ continue
+ if count-start_val >= max_vals:
message = ERROR_MAX_VALS % ( max_vals, "reads" )
break
qname = read.qname
@@ -319,7 +322,7 @@
f.close()
return all_dat is not None
- def get_data( self, chrom, start, end, max_vals=None, **kwargs ):
+ def get_data( self, chrom, start, end, start_val=0, max_vals=None, **kwargs ):
# Bigwig has the possibility of it being a standalone bigwig file, in which case we use
# original_dataset, or coming from wig->bigwig conversion in which we use converted_dataset
f, bbi = self._get_dataset()
@@ -409,7 +412,7 @@
'type': 'int',
'index': filter_col,
'tool_id': 'Filter1',
- 'tool_exp_name': 'c5' } ]
+ 'tool_exp_name': 'c6' } ]
filter_col += 1
if isinstance( self.original_dataset.datatype, Gtf ):
# Create filters based on dataset metadata.
@@ -481,11 +484,14 @@
return tabix.fetch(reference=chrom, start=start, end=end)
- def get_data( self, chrom, start, end, max_vals=None, **kwargs ):
+ def get_data( self, chrom, start, end, start_val=0, max_vals=None, **kwargs ):
iterator = self.get_iterator( chrom, start, end )
- return self.process_data( iterator, max_vals, **kwargs )
-
+ return self.process_data( iterator, start_val, max_vals, **kwargs )
+
class IntervalIndexDataProvider( FilterableMixin, TracksDataProvider ):
+ """
+ Interval index files used only for GFF files.
+ """
col_name_data_attr_mapping = { 4 : { 'index': 4 , 'name' : 'Score' } }
def write_data_to_file( self, chrom, start, end, filename ):
@@ -501,12 +507,11 @@
out.write(interval.raw_line + '\n')
out.close()
- def get_data( self, chrom, start, end, max_vals=sys.maxint, **kwargs ):
+ def get_data( self, chrom, start, end, start_val=0, max_vals=sys.maxint, **kwargs ):
start, end = int(start), int(end)
source = open( self.original_dataset.file_name )
index = Indexes( self.converted_dataset.file_name )
results = []
- count = 0
message = None
# If chrom is not found in indexes, try removing the first three
@@ -525,14 +530,15 @@
#
filter_cols = from_json_string( kwargs.get( "filter_cols", "[]" ) )
no_detail = ( "no_detail" in kwargs )
- for start, end, offset in index.find(chrom, start, end):
- if count >= max_vals:
+ for count, val in enumerate( index.find(chrom, start, end) ):
+ start, end, offset = val[0], val[1], val[2]
+ if count < start_val:
+ continue
+ if count-start_val >= max_vals:
message = ERROR_MAX_VALS % ( max_vals, "features" )
break
- count += 1
source.seek( offset )
# TODO: can we use column metadata to fill out payload?
- # TODO: use function to set payload data
# GFF dataset.
reader = GFFReaderWrapper( source, fix_strand=True )
@@ -549,7 +555,7 @@
Payload format: [ uid (offset), start, end, name, strand, thick_start, thick_end, blocks ]
"""
- def process_data( self, iterator, max_vals=sys.maxint, **kwargs ):
+ def process_data( self, iterator, start_val=0, max_vals=sys.maxint, **kwargs ):
#
# Build data to return. Payload format is:
# [ <guid/offset>, <start>, <end>, <name>, <score>, <strand>, <thick_start>,
@@ -559,14 +565,14 @@
#
filter_cols = from_json_string( kwargs.get( "filter_cols", "[]" ) )
no_detail = ( "no_detail" in kwargs )
- count = 0
rval = []
message = None
- for line in iterator:
- if count >= max_vals:
+ for count, line in enumerate( iterator ):
+ if count < start_val:
+ continue
+ if count-start_val >= max_vals:
message = ERROR_MAX_VALS % ( max_vals, "features" )
break
- count += 1
# TODO: can we use column metadata to fill out payload?
# TODO: use function to set payload data
@@ -625,16 +631,16 @@
col_name_data_attr_mapping = { 'Qual' : { 'index': 6 , 'name' : 'Qual' } }
- def process_data( self, iterator, max_vals=sys.maxint, **kwargs ):
+ def process_data( self, iterator, start_val=0, max_vals=sys.maxint, **kwargs ):
rval = []
- count = 0
message = None
- for line in iterator:
- if count >= max_vals:
+ for count, line in enumerate( iterator ):
+ if count < start_val:
+ continue
+ if count-start_val >= max_vals:
message = ERROR_MAX_VALS % ( "max_vals", "features" )
break
- count += 1
feature = line.split()
payload = [ hash(line), int(feature[1])-1, int(feature[1]),
@@ -657,22 +663,23 @@
NOTE: this data provider does not use indices, and hence will be very slow
for large datasets.
"""
- def get_data( self, chrom, start, end, max_vals=sys.maxint, **kwargs ):
+ def get_data( self, chrom, start, end, start_val=0, max_vals=sys.maxint, **kwargs ):
start, end = int( start ), int( end )
source = open( self.original_dataset.file_name )
results = []
- count = 0
message = None
offset = 0
- for feature in GFFReaderWrapper( source, fix_strand=True ):
+ for count, feature in enumerate( GFFReaderWrapper( source, fix_strand=True ) ):
+ if count < start_val:
+ continue
+ if count-start_val >= max_vals:
+ message = ERROR_MAX_VALS % ( max_vals, "reads" )
+ break
+
feature_start, feature_end = convert_gff_coords_to_bed( [ feature.start, feature.end ] )
if feature.chrom != chrom or feature_start < start or feature_end > end:
continue
- if count >= max_vals:
- message = ERROR_MAX_VALS % ( max_vals, "features" )
- break
- count += 1
payload = package_gff_feature( feature )
payload.insert( 0, offset )
results.append( payload )
@@ -700,6 +707,7 @@
"""
Returns data provider class by name and/or original dataset.
"""
+ data_provider = None
if name:
value = dataset_type_name_to_data_provider[ name ]
if isinstance( value, dict ):
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 lib/galaxy/web/controllers/history.py
--- a/lib/galaxy/web/controllers/history.py
+++ b/lib/galaxy/web/controllers/history.py
@@ -56,15 +56,14 @@
grids.GridColumn( "Created", key="create_time", format=time_ago ),
grids.GridColumn( "Last Updated", key="update_time", format=time_ago ),
# Columns that are valid for filtering but are not visible.
- grids.DeletedColumn( "Deleted", key="deleted", visible=False, filterable="advanced" )
+ grids.DeletedColumn( "Status", key="deleted", visible=False, filterable="advanced" )
]
- columns.append(
- grids.MulticolFilterColumn(
- "search history names and tags",
- cols_to_filter=[ columns[0], columns[2] ],
+ columns.append(
+ grids.MulticolFilterColumn(
+ "search history names and tags",
+ cols_to_filter=[ columns[0], columns[2] ],
key="free-text-search", visible=False, filterable="standard" )
)
-
operations = [
grids.GridOperation( "Switch", allow_multiple=False, condition=( lambda item: not item.deleted ), async_compatible=False ),
grids.GridOperation( "Share or Publish", allow_multiple=False, condition=( lambda item: not item.deleted ), async_compatible=False ),
@@ -125,11 +124,11 @@
return trans.sa_session.query( self.model_class ).join( 'users_shared_with' )
def apply_query_filter( self, trans, query, **kwargs ):
return query.filter( model.HistoryUserShareAssociation.user == trans.user )
-
+
class HistoryAllPublishedGrid( grids.Grid ):
class NameURLColumn( grids.PublicURLColumn, NameColumn ):
pass
-
+
title = "Published Histories"
model_class = model.History
default_sort_key = "update_time"
@@ -138,15 +137,15 @@
columns = [
NameURLColumn( "Name", key="name", filterable="advanced" ),
grids.OwnerAnnotationColumn( "Annotation", key="annotation", model_annotation_association_class=model.HistoryAnnotationAssociation, filterable="advanced" ),
- grids.OwnerColumn( "Owner", key="username", model_class=model.User, filterable="advanced" ),
+ grids.OwnerColumn( "Owner", key="username", model_class=model.User, filterable="advanced" ),
grids.CommunityRatingColumn( "Community Rating", key="rating" ),
grids.CommunityTagsColumn( "Community Tags", key="tags", model_tag_association_class=model.HistoryTagAssociation, filterable="advanced", grid_name="PublicHistoryListGrid" ),
grids.ReverseSortColumn( "Last Updated", key="update_time", format=time_ago )
]
- columns.append(
- grids.MulticolFilterColumn(
- "Search name, annotation, owner, and tags",
- cols_to_filter=[ columns[0], columns[1], columns[2], columns[4] ],
+ columns.append(
+ grids.MulticolFilterColumn(
+ "Search name, annotation, owner, and tags",
+ cols_to_filter=[ columns[0], columns[1], columns[2], columns[4] ],
key="free-text-search", visible=False, filterable="standard" )
)
operations = []
@@ -156,7 +155,7 @@
def apply_query_filter( self, trans, query, **kwargs ):
# A public history is published, has a slug, and is not deleted.
return query.filter( self.model_class.published == True ).filter( self.model_class.slug != None ).filter( self.model_class.deleted == False )
-
+
class HistoryController( BaseController, Sharable, UsesAnnotations, UsesItemRatings, UsesHistory ):
@web.expose
def index( self, trans ):
@@ -166,11 +165,11 @@
"""XML history list for functional tests"""
trans.response.set_content_type( 'text/xml' )
return trans.fill_template( "/history/list_as_xml.mako" )
-
+
stored_list_grid = HistoryListGrid()
shared_list_grid = SharedHistoryListGrid()
published_list_grid = HistoryAllPublishedGrid()
-
+
@web.expose
def list_published( self, trans, **kwargs ):
grid = self.published_list_grid( trans, **kwargs )
@@ -179,7 +178,7 @@
else:
# Render grid wrapped in panels
return trans.fill_template( "history/list_published.mako", grid=grid )
-
+
@web.expose
@web.require_login( "work with multiple histories" )
def list( self, trans, **kwargs ):
@@ -200,7 +199,7 @@
refresh_history = False
# Load the histories and ensure they all belong to the current user
histories = []
- for history_id in history_ids:
+ for history_id in history_ids:
history = self.get_history( trans, history_id )
if history:
# Ensure history is owned by current user
@@ -209,18 +208,18 @@
histories.append( history )
else:
log.warn( "Invalid history id '%r' passed to list", history_id )
- if histories:
+ if histories:
if operation == "switch":
status, message = self._list_switch( trans, histories )
- # Take action to update UI to reflect history switch. If
+ # Take action to update UI to reflect history switch. If
# grid is using panels, it is standalone and hence a redirect
# to root is needed; if grid is not using panels, it is nested
- # in the main Galaxy UI and refreshing the history frame
+ # in the main Galaxy UI and refreshing the history frame
# is sufficient.
use_panels = kwargs.get('use_panels', False) == 'True'
if use_panels:
return trans.response.send_redirect( url_for( "/" ) )
- else:
+ else:
trans.template_context['refresh_frames'] = ['history']
elif operation in ( "delete", "delete and remove datasets from disk" ):
if operation == "delete and remove datasets from disk":
@@ -338,7 +337,7 @@
trans.set_history( new_history )
# No message
return None, None
-
+
@web.expose
@web.require_login( "work with shared histories" )
def list_shared( self, trans, **kwargs ):
@@ -373,7 +372,7 @@
status = 'done'
# Render the list view
return self.shared_list_grid( trans, status=status, message=message, **kwargs )
-
+
@web.expose
def display_structured( self, trans, id=None ):
"""
@@ -444,7 +443,7 @@
items.sort( key=( lambda x: x[0].create_time ), reverse=True )
#
return trans.fill_template( "history/display_structured.mako", items=items )
-
+
@web.expose
def delete_current( self, trans ):
"""Delete just the active history -- this does not require a logged in user."""
@@ -456,25 +455,22 @@
trans.sa_session.add( history )
trans.sa_session.flush()
trans.log_event( "History id %d marked as deleted" % history.id )
- # Regardless of whether it was previously deleted, we make a new history active
+ # Regardless of whether it was previously deleted, we make a new history active
trans.new_history()
- return trans.show_ok_message( "History deleted, a new history is active", refresh_frames=['history'] )
-
+ return trans.show_ok_message( "History deleted, a new history is active", refresh_frames=['history'] )
+
@web.expose
@web.require_login( "rate items" )
@web.json
def rate_async( self, trans, id, rating ):
""" Rate a history asynchronously and return updated community data. """
-
history = self.get_history( trans, id, check_ownership=False, check_accessible=True )
if not history:
return trans.show_error_message( "The specified history does not exist." )
-
# Rate history.
history_rating = self.rate_item( trans.sa_session, trans.get_user(), history, rating )
-
return self.get_ave_item_rating_data( trans.sa_session, history )
-
+
@web.expose
def rename_async( self, trans, id=None, new_name=None ):
history = self.get_history( trans, id )
@@ -490,7 +486,7 @@
trans.sa_session.add( history )
trans.sa_session.flush()
return history.name
-
+
@web.expose
@web.require_login( "use Galaxy histories" )
def annotate_async( self, trans, id, new_annotation=None, **kwargs ):
@@ -503,12 +499,11 @@
return new_annotation
@web.expose
- # TODO: Remove require_login when users are warned that, if they are not
+ # TODO: Remove require_login when users are warned that, if they are not
# logged in, this will remove their current history.
@web.require_login( "use Galaxy histories" )
def import_archive( self, trans, **kwargs ):
""" Import a history from a file archive. """
-
# Set archive source and type.
archive_file = kwargs.get( 'archive_file', None )
archive_url = kwargs.get( 'archive_url', None )
@@ -519,37 +514,34 @@
elif archive_url:
archive_source = archive_url
archive_type = 'url'
-
# If no source to create archive from, show form to upload archive or specify URL.
if not archive_source:
- return trans.show_form(
+ return trans.show_form(
web.FormBuilder( web.url_for(), "Import a History from an Archive", submit_text="Submit" ) \
.add_input( "text", "Archived History URL", "archive_url", value="", error=None )
# TODO: add support for importing via a file.
- #.add_input( "file", "Archived History File", "archive_file", value=None, error=None )
+ #.add_input( "file", "Archived History File", "archive_file", value=None, error=None )
)
-
# Run job to do import.
history_imp_tool = trans.app.toolbox.tools_by_id[ '__IMPORT_HISTORY__' ]
incoming = { '__ARCHIVE_SOURCE__' : archive_source, '__ARCHIVE_TYPE__' : archive_type }
history_imp_tool.execute( trans, incoming=incoming )
return trans.show_message( "Importing history from '%s'. \
This history will be visible when the import is complete" % archive_source )
-
- @web.expose
+
+ @web.expose
def export_archive( self, trans, id=None, gzip=True, include_hidden=False, include_deleted=False ):
""" Export a history to an archive. """
-
- #
+ #
# Convert options to booleans.
#
if isinstance( gzip, basestring ):
- gzip = ( gzip in [ 'True', 'true', 'T', 't' ] )
+ gzip = ( gzip in [ 'True', 'true', 'T', 't' ] )
if isinstance( include_hidden, basestring ):
include_hidden = ( include_hidden in [ 'True', 'true', 'T', 't' ] )
if isinstance( include_deleted, basestring ):
- include_deleted = ( include_deleted in [ 'True', 'true', 'T', 't' ] )
-
+ include_deleted = ( include_deleted in [ 'True', 'true', 'T', 't' ] )
+
#
# Get history to export.
#
@@ -559,10 +551,10 @@
# Use current history.
history = trans.history
id = trans.security.encode_id( history.id )
-
+
if not history:
return trans.show_error_message( "This history does not exist or you cannot export this history." )
-
+
#
# If history has already been exported and it has not changed since export, stream it.
#
@@ -585,40 +577,38 @@
elif jeha.job.state in [ model.Job.states.RUNNING, model.Job.states.QUEUED, model.Job.states.WAITING ]:
return trans.show_message( "Still exporting history %(n)s; please check back soon. Link: <a href='%(s)s'>%(s)s</a>" \
% ( { 'n' : history.name, 's' : url_for( action="export_archive", id=id, qualified=True ) } ) )
-
+
# Run job to do export.
history_exp_tool = trans.app.toolbox.tools_by_id[ '__EXPORT_HISTORY__' ]
- params = {
- 'history_to_export' : history,
- 'compress' : gzip,
- 'include_hidden' : include_hidden,
+ params = {
+ 'history_to_export' : history,
+ 'compress' : gzip,
+ 'include_hidden' : include_hidden,
'include_deleted' : include_deleted }
history_exp_tool.execute( trans, incoming = params, set_output_hid = True )
return trans.show_message( "Exporting History '%(n)s'. Use this link to download \
the archive or import it to another Galaxy server: \
<a href='%(u)s'>%(u)s</a>" \
% ( { 'n' : history.name, 'u' : url_for( action="export_archive", id=id, qualified=True ) } ) )
-
+
@web.expose
@web.json
@web.require_login( "get history name and link" )
def get_name_and_link_async( self, trans, id=None ):
""" Returns history's name and link. """
history = self.get_history( trans, id, False )
-
if self.create_item_slug( trans.sa_session, history ):
trans.sa_session.flush()
- return_dict = {
- "name" : history.name,
+ return_dict = {
+ "name" : history.name,
"link" : url_for( action="display_by_username_and_slug", username=history.user.username, slug=history.slug ) }
return return_dict
-
+
@web.expose
@web.require_login( "set history's accessible flag" )
def set_accessible_async( self, trans, id=None, accessible=False ):
""" Set history's importable attribute and slug. """
history = self.get_history( trans, id, True )
-
# Only set if importable value would change; this prevents a change in the update_time unless attribute really changed.
importable = accessible in ['True', 'true', 't', 'T'];
if history and history.importable != importable:
@@ -627,7 +617,6 @@
else:
history.importable = importable
trans.sa_session.flush()
-
return
@web.expose
@@ -638,7 +627,7 @@
history.slug = new_slug
trans.sa_session.flush()
return history.slug
-
+
@web.expose
def get_item_content_async( self, trans, id ):
""" Returns item content in HTML format. """
@@ -646,7 +635,7 @@
history = self.get_history( trans, id, False, True )
if history is None:
raise web.httpexceptions.HTTPNotFound()
-
+
# Get datasets.
datasets = self.get_history_datasets( trans, history )
# Get annotations.
@@ -654,7 +643,7 @@
for dataset in datasets:
dataset.annotation = self.get_item_annotation_str( trans.sa_session, history.user, dataset )
return trans.stream_template_mako( "/history/item_content.mako", item = history, item_data = datasets )
-
+
@web.expose
def name_autocomplete_data( self, trans, q=None, limit=None, timestamp=None ):
"""Return autocomplete data for history names"""
@@ -666,7 +655,7 @@
for history in trans.sa_session.query( model.History ).filter_by( user=user ).filter( func.lower( model.History.name ) .like(q.lower() + "%") ):
ac_data = ac_data + history.name + "\n"
return ac_data
-
+
@web.expose
def imp( self, trans, id=None, confirm=False, **kwd ):
"""Import another user's history via a shared URL"""
@@ -682,7 +671,7 @@
referer_message = "<a href='%s'>return to the previous page</a>" % referer
else:
referer_message = "<a href='%s'>go to Galaxy's start page</a>" % url_for( '/' )
-
+
# Do import.
if not id:
return trans.show_error_message( "You must specify a history you want to import.<br>You can %s." % referer_message, use_panels=True )
@@ -712,7 +701,7 @@
# Set imported history to be user's current history.
trans.set_history( new_history )
return trans.show_ok_message(
- message="""History "%s" has been imported. <br>You can <a href="%s">start using this history</a> or %s."""
+ message="""History "%s" has been imported. <br>You can <a href="%s">start using this history</a> or %s."""
% ( new_history.name, web.url_for( '/' ), referer_message ), use_panels=True )
elif not user_history or not user_history.datasets or confirm:
new_history = import_history.copy()
@@ -730,13 +719,13 @@
trans.sa_session.flush()
trans.set_history( new_history )
return trans.show_ok_message(
- message="""History "%s" has been imported. <br>You can <a href="%s">start using this history</a> or %s."""
+ message="""History "%s" has been imported. <br>You can <a href="%s">start using this history</a> or %s."""
% ( new_history.name, web.url_for( '/' ), referer_message ), use_panels=True )
return trans.show_warn_message( """
Warning! If you import this history, you will lose your current
history. <br>You can <a href="%s">continue and import this history</a> or %s.
""" % ( web.url_for( id=id, confirm=True, referer=trans.request.referer ), referer_message ), use_panels=True )
-
+
@web.expose
def view( self, trans, id=None, show_deleted=False ):
"""View a history. If a history is importable, then it is viewable by any user."""
@@ -757,11 +746,11 @@
history = history_to_view,
datasets = datasets,
show_deleted = show_deleted )
-
+
@web.expose
def display_by_username_and_slug( self, trans, username, slug ):
- """ Display history based on a username and slug. """
-
+ """ Display history based on a username and slug. """
+
# Get history.
session = trans.sa_session
user = session.query( model.User ).filter_by( username=username ).first()
@@ -770,14 +759,14 @@
raise web.httpexceptions.HTTPNotFound()
# Security check raises error if user cannot access history.
self.security_check( trans.get_user(), history, False, True)
-
+
# Get datasets.
datasets = self.get_history_datasets( trans, history )
# Get annotations.
history.annotation = self.get_item_annotation_str( trans.sa_session, history.user, history )
for dataset in datasets:
dataset.annotation = self.get_item_annotation_str( trans.sa_session, history.user, dataset )
-
+
# Get rating data.
user_item_rating = 0
if trans.get_user():
@@ -787,9 +776,9 @@
else:
user_item_rating = 0
ave_item_rating, num_ratings = self.get_ave_item_rating_data( trans.sa_session, history )
- return trans.stream_template_mako( "history/display.mako", item = history, item_data = datasets,
+ return trans.stream_template_mako( "history/display.mako", item = history, item_data = datasets,
user_item_rating = user_item_rating, ave_item_rating=ave_item_rating, num_ratings=num_ratings )
-
+
@web.expose
@web.require_login( "share Galaxy histories" )
def sharing( self, trans, id=None, histories=[], **kwargs ):
@@ -804,7 +793,7 @@
histories = [ self.get_history( trans, history_id ) for history_id in ids ]
elif not histories:
histories = [ trans.history ]
-
+
# Do operation on histories.
for history in histories:
if 'make_accessible_via_link' in kwargs:
@@ -837,17 +826,17 @@
message = "History '%s' does not seem to be shared with user '%s'" % ( history.name, user.email )
return trans.fill_template( '/sharing_base.mako', item=history,
message=message, status='error' )
-
-
+
+
# Legacy issue: histories made accessible before recent updates may not have a slug. Create slug for any histories that need them.
for history in histories:
if history.importable and not history.slug:
self._make_item_accessible( trans.sa_session, history )
-
+
session.flush()
-
+
return trans.fill_template( "/sharing_base.mako", item=history )
-
+
@web.expose
@web.require_login( "share histories with other users" )
def share( self, trans, id=None, email="", **kwd ):
@@ -890,11 +879,11 @@
send_to_err = "The histories you are sharing do not contain any datasets that can be accessed by the users with which you are sharing."
return trans.fill_template( "/history/share.mako", histories=histories, email=email, send_to_err=send_to_err )
if can_change or cannot_change:
- return trans.fill_template( "/history/share.mako",
- histories=histories,
- email=email,
- send_to_err=send_to_err,
- can_change=can_change,
+ return trans.fill_template( "/history/share.mako",
+ histories=histories,
+ email=email,
+ send_to_err=send_to_err,
+ can_change=can_change,
cannot_change=cannot_change,
no_change_needed=unique_no_change_needed )
if no_change_needed:
@@ -903,11 +892,11 @@
# User seems to be sharing an empty history
send_to_err = "You cannot share an empty history. "
return trans.fill_template( "/history/share.mako", histories=histories, email=email, send_to_err=send_to_err )
-
+
@web.expose
@web.require_login( "share restricted histories with other users" )
def share_restricted( self, trans, id=None, email="", **kwd ):
- if 'action' in kwd:
+ if 'action' in kwd:
action = kwd[ 'action' ]
else:
err_msg = "Select an action. "
@@ -938,10 +927,10 @@
# The action here is either 'public' or 'private', so we'll continue to populate the
# histories_for_sharing dictionary from the can_change dictionary.
for send_to_user, history_dict in can_change.items():
- for history in history_dict:
+ for history in history_dict:
# Make sure the current history has not already been shared with the current send_to_user
if trans.sa_session.query( trans.app.model.HistoryUserShareAssociation ) \
- .filter( and_( trans.app.model.HistoryUserShareAssociation.table.c.user_id == send_to_user.id,
+ .filter( and_( trans.app.model.HistoryUserShareAssociation.table.c.user_id == send_to_user.id,
trans.app.model.HistoryUserShareAssociation.table.c.history_id == history.id ) ) \
.count() > 0:
send_to_err += "History (%s) already shared with user (%s)" % ( history.name, send_to_user.email )
@@ -954,7 +943,7 @@
# The user with which we are sharing the history does not have access permission on the current dataset
if trans.app.security_agent.can_manage_dataset( user_roles, hda.dataset ) and not hda.dataset.library_associations:
# The current user has authority to change permissions on the current dataset because
- # they have permission to manage permissions on the dataset and the dataset is not associated
+ # they have permission to manage permissions on the dataset and the dataset is not associated
# with a library.
if action == "private":
trans.app.security_agent.privately_share_dataset( hda.dataset, users=[ user, send_to_user ] )
@@ -986,7 +975,7 @@
send_to_user = trans.sa_session.query( trans.app.model.User ) \
.filter( and_( trans.app.model.User.table.c.email==email_address,
trans.app.model.User.table.c.deleted==False ) ) \
- .first()
+ .first()
if send_to_user:
send_to_users.append( send_to_user )
else:
@@ -1004,7 +993,7 @@
for history in history_dict:
# Make sure the current history has not already been shared with the current send_to_user
if trans.sa_session.query( trans.app.model.HistoryUserShareAssociation ) \
- .filter( and_( trans.app.model.HistoryUserShareAssociation.table.c.user_id == send_to_user.id,
+ .filter( and_( trans.app.model.HistoryUserShareAssociation.table.c.user_id == send_to_user.id,
trans.app.model.HistoryUserShareAssociation.table.c.history_id == history.id ) ) \
.count() > 0:
send_to_err += "History (%s) already shared with user (%s)" % ( history.name, send_to_user.email )
@@ -1019,7 +1008,7 @@
# The user may be attempting to share histories whose datasets cannot all be accessed by other users.
# If this is the case, the user sharing the histories can:
# 1) action=='public': choose to make the datasets public if he is permitted to do so
- # 2) action=='private': automatically create a new "sharing role" allowing protected
+ # 2) action=='private': automatically create a new "sharing role" allowing protected
# datasets to be accessed only by the desired users
# This method will populate the can_change, cannot_change and no_change_needed dictionaries, which
# are used for either displaying to the user, letting them make 1 of the choices above, or sharing
@@ -1036,7 +1025,7 @@
for send_to_user in send_to_users:
# Make sure the current history has not already been shared with the current send_to_user
if trans.sa_session.query( trans.app.model.HistoryUserShareAssociation ) \
- .filter( and_( trans.app.model.HistoryUserShareAssociation.table.c.user_id == send_to_user.id,
+ .filter( and_( trans.app.model.HistoryUserShareAssociation.table.c.user_id == send_to_user.id,
trans.app.model.HistoryUserShareAssociation.table.c.history_id == history.id ) ) \
.count() > 0:
send_to_err += "History (%s) already shared with user (%s)" % ( history.name, send_to_user.email )
@@ -1125,7 +1114,7 @@
if send_to_err:
msg += send_to_err
return self.sharing( trans, histories=shared_histories, msg=msg )
-
+
@web.expose
@web.require_login( "rename histories" )
def rename( self, trans, id=None, name=None, **kwd ):
@@ -1164,7 +1153,7 @@
else:
change_msg = change_msg + "<p>History: "+cur_names[i]+" does not appear to belong to you.</p>"
return trans.show_message( "<p>%s" % change_msg, refresh_frames=['history'] )
-
+
@web.expose
@web.require_login( "clone shared Galaxy history" )
def clone( self, trans, id=None, **kwd ):
@@ -1207,13 +1196,11 @@
else:
msg = '%d cloned histories are now included in your previously stored histories.' % len( histories )
return trans.show_ok_message( msg )
-
+
@web.expose
@web.require_login( "switch to a history" )
def switch_to_history( self, trans, hist_id=None ):
decoded_id = trans.security.decode_id(hist_id)
hist = trans.sa_session.query( trans.app.model.History ).get( decoded_id )
-
trans.set_history( hist )
return trans.response.send_redirect( url_for( "/" ) )
-
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 lib/galaxy/web/controllers/mobile.py
--- a/lib/galaxy/web/controllers/mobile.py
+++ b/lib/galaxy/web/controllers/mobile.py
@@ -47,7 +47,7 @@
error = password_error = None
user = trans.sa_session.query( model.User ).filter_by( email = email ).first()
if not user:
- error = "No such user"
+ error = "No such user (please note that login is case sensitive)"
elif user.deleted:
error = "This account has been marked deleted, contact your Galaxy administrator to restore the account."
elif user.external:
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 lib/galaxy/web/controllers/tracks.py
--- a/lib/galaxy/web/controllers/tracks.py
+++ b/lib/galaxy/web/controllers/tracks.py
@@ -469,7 +469,7 @@
return { "status": messages.DATA, "valid_chroms": valid_chroms }
@web.json
- def data( self, trans, hda_ldda, dataset_id, chrom, low, high, max_vals=5000, **kwargs ):
+ def data( self, trans, hda_ldda, dataset_id, chrom, low, high, start_val=0, max_vals=5000, **kwargs ):
"""
Provides a block of data from a dataset.
"""
@@ -526,7 +526,7 @@
data_provider = data_provider_class( converted_dataset=converted_dataset, original_dataset=dataset, dependencies=deps )
# Get and return data from data_provider.
- data = data_provider.get_data( chrom, low, high, max_vals, **kwargs )
+ data = data_provider.get_data( chrom, low, high, int(start_val), int(max_vals), **kwargs )
message = None
if isinstance(data, dict) and 'message' in data:
message = data['message']
@@ -730,8 +730,7 @@
if run_on_region:
for jida in original_job.input_datasets:
input_dataset = jida.dataset
- # TODO: put together more robust way to determine if a dataset can be indexed.
- if hasattr( input_dataset, 'get_track_type' ):
+ if get_data_provider( original_dataset=input_dataset ):
# Can index dataset.
track_type, data_sources = input_dataset.datatype.get_track_type()
# Convert to datasource that provides 'data' because we need to
@@ -744,7 +743,7 @@
# Return any messages generated during conversions.
return_message = _get_highest_priority_msg( messages_list )
if return_message:
- return return_message
+ return to_json_string( return_message )
#
# Set target history (the history that tool will use for inputs/outputs).
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 lib/galaxy/web/controllers/user.py
--- a/lib/galaxy/web/controllers/user.py
+++ b/lib/galaxy/web/controllers/user.py
@@ -394,7 +394,7 @@
success = False
user = trans.sa_session.query( trans.app.model.User ).filter( trans.app.model.User.table.c.email==email ).first()
if not user:
- message = "No such user"
+ message = "No such user (please note that login is case sensitive)"
status = 'error'
elif user.deleted:
message = "This account has been marked deleted, contact your Galaxy administrator to restore the account."
@@ -914,6 +914,9 @@
@web.expose
def set_default_permissions( self, trans, cntrller, **kwd ):
"""Sets the user's default permissions for the new histories"""
+ params = util.Params( kwd )
+ message = util.restore_text( params.get( 'message', '' ) )
+ status = params.get( 'status', 'done' )
if trans.user:
if 'update_roles_button' in kwd:
p = util.Params( kwd )
@@ -926,8 +929,11 @@
action = trans.app.security_agent.get_action( v.action ).action
permissions[ action ] = in_roles
trans.app.security_agent.user_set_default_permissions( trans.user, permissions )
- return trans.show_ok_message( 'Default new history permissions have been changed.' )
- return trans.fill_template( 'user/permissions.mako', cntrller=cntrller )
+ message = 'Default new history permissions have been changed.'
+ return trans.fill_template( 'user/permissions.mako',
+ cntrller=cntrller,
+ message=message,
+ status=status )
else:
# User not logged in, history group must be only public
return trans.show_error_message( "You must be logged in to change your default permitted actions." )
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 lib/galaxy/web/form_builder.py
--- a/lib/galaxy/web/form_builder.py
+++ b/lib/galaxy/web/form_builder.py
@@ -652,6 +652,27 @@
return self.value
else:
return '-'
+
+class LibraryField( BaseField ):
+ def __init__( self, name, value=None, trans=None ):
+ self.name = name
+ self.ldda = value
+ self.trans = trans
+ def get_html( self, prefix="", disabled=False ):
+ if not self.ldda:
+ ldda = ""
+ text = "Choose a library dataset"
+ else:
+ ldda = self.trans.security.encode_id(self.ldda.id)
+ text = self.ldda.name
+ return '<a href="javascript:void(0);" class="add-librarydataset">%s</a> \
+ <input type="hidden" name="%s%s" value="%s">' % ( text, prefix, self.name, escape( str(ldda), quote=True ) )
+
+ def get_display_text(self):
+ if self.ldda:
+ return self.ldda.name
+ else:
+ return 'None'
def get_suite():
"""Get unittest suite for this module"""
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 lib/galaxy/web/framework/helpers/grids.py
--- a/lib/galaxy/web/framework/helpers/grids.py
+++ b/lib/galaxy/web/framework/helpers/grids.py
@@ -631,7 +631,7 @@
if column_filter == "All":
pass
elif column_filter in [ "True", "False" ]:
- query = query.filter( self.model_class.deleted == column_filter )
+ query = query.filter( self.model_class.deleted == ( column_filter == "True" ) )
return query
class StateColumn( GridColumn ):
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 lib/galaxy/web/framework/middleware/remoteuser.py
--- a/lib/galaxy/web/framework/middleware/remoteuser.py
+++ b/lib/galaxy/web/framework/middleware/remoteuser.py
@@ -36,7 +36,6 @@
"""
UCSC_MAIN_SERVERS = (
- 'omics.bhri.internal',
'hgw1.cse.ucsc.edu',
'hgw2.cse.ucsc.edu',
'hgw3.cse.ucsc.edu',
@@ -50,7 +49,6 @@
'lowepub.cse.ucsc.edu',
)
-
class RemoteUser( object ):
def __init__( self, app, maildomain=None, ucsc_display_sites=[], admin_users=[] ):
self.app = app
@@ -58,7 +56,7 @@
self.allow_ucsc_main = False
self.allow_ucsc_archaea = False
self.admin_users = admin_users
- if 'main' in ucsc_display_sites or 'test' in ucsc_display_sites or 'bhri' in ucsc_display_sites:
+ if 'main' in ucsc_display_sites or 'test' in ucsc_display_sites:
self.allow_ucsc_main = True
if 'archaea' in ucsc_display_sites:
self.allow_ucsc_archaea = True
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 lib/galaxy/webapps/community/app.py
--- a/lib/galaxy/webapps/community/app.py
+++ b/lib/galaxy/webapps/community/app.py
@@ -1,4 +1,6 @@
import sys, config
+import galaxy.tools.data
+import galaxy.datatypes.registry
import galaxy.webapps.community.model
from galaxy.web import security
from galaxy.tags.tag_handler import CommunityTagHandler
@@ -11,6 +13,8 @@
self.config = config.Configuration( **kwargs )
self.config.check()
config.configure_logging( self.config )
+ # Set up datatypes registry
+ self.datatypes_registry = galaxy.datatypes.registry.Registry( self.config.root, self.config.datatypes_config )
# Determine the database url
if self.config.database_connection:
db_url = self.config.database_connection
@@ -28,6 +32,8 @@
self.security = security.SecurityHelper( id_secret=self.config.id_secret )
# Tag handler
self.tag_handler = CommunityTagHandler()
+ # Tool data tables
+ self.tool_data_tables = galaxy.tools.data.ToolDataTableManager( self.config.tool_data_table_config_path )
# Load security policy
self.security_agent = self.model.security_agent
def shutdown( self ):
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 lib/galaxy/webapps/community/config.py
--- a/lib/galaxy/webapps/community/config.py
+++ b/lib/galaxy/webapps/community/config.py
@@ -41,6 +41,9 @@
self.cookie_path = kwargs.get( "cookie_path", "/" )
self.test_conf = resolve_path( kwargs.get( "test_conf", "" ), self.root )
self.id_secret = kwargs.get( "id_secret", "USING THE DEFAULT IS NOT SECURE!" )
+ self.tool_secret = kwargs.get( "tool_secret", "" )
+ self.tool_data_path = resolve_path( kwargs.get( "tool_data_path", "tool-data" ), os.getcwd() )
+ self.tool_data_table_config_path = resolve_path( kwargs.get( 'tool_data_table_config_path', 'tool_data_table_conf.xml' ), self.root )
self.use_remote_user = string_as_bool( kwargs.get( "use_remote_user", "False" ) )
self.remote_user_maildomain = kwargs.get( "remote_user_maildomain", None )
self.remote_user_logout_href = kwargs.get( "remote_user_logout_href", None )
@@ -56,16 +59,18 @@
self.smtp_server = kwargs.get( 'smtp_server', None )
self.smtp_username = kwargs.get( 'smtp_username', None )
self.smtp_password = kwargs.get( 'smtp_password', None )
+ self.start_job_runners = kwargs.get( 'start_job_runners', None )
self.email_alerts_from = kwargs.get( 'email_alerts_from', None )
+ self.nginx_upload_path = kwargs.get( 'nginx_upload_path', False )
self.log_actions = string_as_bool( kwargs.get( 'log_actions', 'False' ) )
self.brand = kwargs.get( 'brand', None )
- self.wiki_url = kwargs.get( 'wiki_url', 'http://bitbucket.org/galaxy/galaxy-central/wiki/Home' )
+ self.wiki_url = kwargs.get( 'wiki_url', 'http://wiki.g2.bx.psu.edu/FrontPage' )
self.bugs_email = kwargs.get( 'bugs_email', None )
self.blog_url = kwargs.get( 'blog_url', None )
self.screencasts_url = kwargs.get( 'screencasts_url', None )
self.log_events = False
self.cloud_controller_instance = False
- self.datatypes_config = kwargs.get( 'datatypes_config_file', 'community_datatypes_conf.xml' )
+ self.datatypes_config = kwargs.get( 'datatypes_config_file', 'datatypes_conf.xml' )
# Proxy features
self.apache_xsendfile = kwargs.get( 'apache_xsendfile', False )
self.nginx_x_accel_redirect_base = kwargs.get( 'nginx_x_accel_redirect_base', False )
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 lib/galaxy/webapps/community/controllers/common.py
--- a/lib/galaxy/webapps/community/controllers/common.py
+++ b/lib/galaxy/webapps/community/controllers/common.py
@@ -1,12 +1,13 @@
import os, string, socket, logging
from time import strftime
from datetime import *
+from galaxy.tools import *
from galaxy.util.json import from_json_string, to_json_string
from galaxy.web.base.controller import *
from galaxy.webapps.community import model
from galaxy.model.orm import *
from galaxy.model.item_attrs import UsesItemRatings
-from mercurial import hg, ui
+from mercurial import hg, ui, commands
log = logging.getLogger( __name__ )
@@ -68,20 +69,223 @@
def get_repository( trans, id ):
"""Get a repository from the database via id"""
return trans.sa_session.query( trans.model.Repository ).get( trans.security.decode_id( id ) )
+def get_repository_metadata( trans, id, changeset_revision ):
+ """Get metadata for a specified repository change set from the database"""
+ return trans.sa_session.query( trans.model.RepositoryMetadata ) \
+ .filter( and_( trans.model.RepositoryMetadata.table.c.repository_id == trans.security.decode_id( id ),
+ trans.model.RepositoryMetadata.table.c.changeset_revision == changeset_revision ) ) \
+ .first()
+def set_repository_metadata( trans, id, change_set_revision, **kwd ):
+ """Set repository metadata"""
+ message = ''
+ status = 'done'
+ repository = get_repository( trans, id )
+ repo_dir = repository.repo_path
+ repo = hg.repository( get_configured_ui(), repo_dir )
+ change_set = get_change_set( trans, repo, change_set_revision )
+ invalid_files = []
+ flush_needed = False
+ if change_set is not None:
+ metadata_dict = {}
+ for root, dirs, files in os.walk( repo_dir ):
+ if not root.find( '.hg' ) >= 0 and not root.find( 'hgrc' ) >= 0:
+ if '.hg' in dirs:
+ # Don't visit .hg directories - should be impossible since we don't
+ # allow uploaded archives that contain .hg dirs, but just in case...
+ dirs.remove( '.hg' )
+ if 'hgrc' in files:
+ # Don't include hgrc files in commit.
+ files.remove( 'hgrc' )
+ for name in files:
+ # Find all tool configs.
+ if name.endswith( '.xml' ):
+ try:
+ full_path = os.path.abspath( os.path.join( root, name ) )
+ tool = load_tool( trans, full_path )
+ if tool is not None:
+ tool_requirements = []
+ for tr in tool.requirements:
+ requirement_dict = dict( name=tr.name,
+ type=tr.type,
+ version=tr.version )
+ tool_requirements.append( requirement_dict )
+ tool_tests = []
+ if tool.tests:
+ for ttb in tool.tests:
+ test_dict = dict( name=ttb.name,
+ required_files=ttb.required_files,
+ inputs=ttb.inputs,
+ outputs=ttb.outputs )
+ tool_tests.append( test_dict )
+ tool_dict = dict( id=tool.id,
+ name=tool.name,
+ version=tool.version,
+ description=tool.description,
+ version_string_cmd = tool.version_string_cmd,
+ tool_config=os.path.join( root, name ),
+ requirements=tool_requirements,
+ tests=tool_tests )
+ repository_metadata = get_repository_metadata( trans, id, change_set_revision )
+ if repository_metadata:
+ metadata = repository_metadata.metadata
+ if metadata and 'tools' in metadata:
+ metadata_tools = metadata[ 'tools' ]
+ found = False
+ for tool_metadata_dict in metadata_tools:
+ if 'id' in tool_metadata_dict and tool_metadata_dict[ 'id' ] == tool.id and \
+ 'version' in tool_metadata_dict and tool_metadata_dict[ 'version' ] == tool.version:
+ found = True
+ tool_metadata_dict[ 'name' ] = tool.name
+ tool_metadata_dict[ 'description' ] = tool.description
+ tool_metadata_dict[ 'version_string_cmd' ] = tool.version_string_cmd
+ tool_metadata_dict[ 'tool_config' ] = os.path.join( root, name )
+ tool_metadata_dict[ 'requirements' ] = tool_requirements
+ tool_metadata_dict[ 'tests' ] = tool_tests
+ flush_needed = True
+ if not found:
+ metadata_tools.append( tool_dict )
+ else:
+ if metadata is None:
+ repository_metadata.metadata = {}
+ repository_metadata.metadata[ 'tools' ] = [ tool_dict ]
+ trans.sa_session.add( repository_metadata )
+ if not flush_needed:
+ flush_needed = True
+ else:
+ if 'tools' in metadata_dict:
+ metadata_dict[ 'tools' ].append( tool_dict )
+ else:
+ metadata_dict[ 'tools' ] = [ tool_dict ]
+ except Exception, e:
+ invalid_files.append( ( name, str( e ) ) )
+ # Find all exported workflows
+ elif name.endswith( '.ga' ):
+ try:
+ full_path = os.path.abspath( os.path.join( root, name ) )
+ # Convert workflow data from json
+ fp = open( full_path, 'rb' )
+ workflow_text = fp.read()
+ fp.close()
+ exported_workflow_dict = from_json_string( workflow_text )
+ # We'll store everything except the workflow steps in the database.
+ workflow_dict = { 'a_galaxy_workflow' : exported_workflow_dict[ 'a_galaxy_workflow' ],
+ 'name' :exported_workflow_dict[ 'name' ],
+ 'annotation' : exported_workflow_dict[ 'annotation' ],
+ 'format-version' : exported_workflow_dict[ 'format-version' ] }
+ repository_metadata = get_repository_metadata( trans, id, change_set_revision )
+ if repository_metadata:
+ metadata = repository_metadata.metadata
+ if metadata and 'workflows' in metadata:
+ metadata_workflows = metadata[ 'workflows' ]
+ found = False
+ for workflow_metadata_dict in metadata_workflows:
+ if 'a_galaxy_workflow' in workflow_metadata_dict and util.string_as_bool( workflow_metadata_dict[ 'a_galaxy_workflow' ] ) and \
+ 'name' in workflow_metadata_dict and workflow_metadata_dict[ 'name' ] == exported_workflow_dict[ 'name' ] and \
+ 'annotation' in workflow_metadata_dict and workflow_metadata_dict[ 'annotation' ] == exported_workflow_dict[ 'annotation' ] and \
+ 'format-version' in workflow_metadata_dict and workflow_metadata_dict[ 'format-version' ] == exported_workflow_dict[ 'format-version' ]:
+ found = True
+ break
+ if not found:
+ metadata_workflows.append( workflow_dict )
+ else:
+ if metadata is None:
+ repository_metadata.metadata = {}
+ repository_metadata.metadata[ 'workflows' ] = workflow_dict
+ trans.sa_session.add( repository_metadata )
+ if not flush_needed:
+ flush_needed = True
+ else:
+ if 'workflows' in metadata_dict:
+ metadata_dict[ 'workflows' ].append( workflow_dict )
+ else:
+ metadata_dict[ 'workflows' ] = [ workflow_dict ]
+ except Exception, e:
+ invalid_files.append( ( name, str( e ) ) )
+ if metadata_dict:
+ # The metadata_dict dictionary will contain items only
+ # if the repository did not already have metadata set.
+ repository_metadata = trans.model.RepositoryMetadata( repository.id, repository.tip, metadata_dict )
+ trans.sa_session.add( repository_metadata )
+ if not flush_needed:
+ flush_needed = True
+ else:
+ message = "Repository does not include changeset revision '%s'." % str( change_set_revision )
+ status = 'error'
+ if invalid_files:
+ message = "Metadata cannot be defined for change set revision '%s'. Correct the following problems and reset metadata.<br/>" % str( change_set_revision )
+ for itc_tup in invalid_files:
+ tool_file = itc_tup[0]
+ exception_msg = itc_tup[1]
+ if exception_msg.find( 'No such file or directory' ) >= 0:
+ exception_items = exception_msg.split()
+ missing_file_items = exception_items[7].split( '/' )
+ missing_file = missing_file_items[-1].rstrip( '\'' )
+ correction_msg = "This file refers to a missing file <b>%s</b>. " % str( missing_file )
+ if exception_msg.find( '.loc' ) >= 0:
+ # Handle the special case where a tool depends on a missing xxx.loc file by telliing
+ # the user to upload xxx.loc.sample to the repository so that it can be copied to
+ # ~/tool-data/xxx.loc. In this case, exception_msg will look something like:
+ # [Errno 2] No such file or directory: '/Users/gvk/central/tool-data/blast2go.loc'
+ sample_loc_file = '%s.sample' % str( missing_file )
+ correction_msg += "Upload a file named <b>%s</b> to the repository to correct this error." % sample_loc_file
+ else:
+ correction_msg += "Upload a file named <b>%s</b> to the repository to correct this error." % missing_file
+ elif exception_msg.find( 'Data table named' ) >= 0:
+ # Handle the special case where the tool requires an entry in the tool_data_table.conf file.
+ # In this case, exception_msg will look something like:
+ # Data table named 'tmap_indexes' is required by tool but not configured
+ exception_items = exception_msg.split()
+ name_attr = exception_items[3].lstrip( '\'' ).rstrip( '\'' )
+ message += "<b>%s</b> - This tool requires an entry in the tool_data_table_conf.xml file. " % tool_file
+ message += "Complete and <b>Save</b> the form below to resolve this issue.<br/>"
+ return trans.response.send_redirect( web.url_for( controller='repository',
+ action='add_tool_data_table_entry',
+ name_attr=name_attr,
+ repository_id=id,
+ message=message,
+ status='error' ) )
+ else:
+ correction_msg = exception_msg
+ message += "<b>%s</b> - %s<br/>" % ( tool_file, correction_msg )
+ status = 'error'
+ elif flush_needed:
+ # We only flush if there are no tool config errors, so change sets will only have metadata
+ # if everything in them is valid.
+ trans.sa_session.flush()
+ return message, status
def get_repository_by_name( trans, name ):
"""Get a repository from the database via name"""
return trans.sa_session.query( app.model.Repository ).filter_by( name=name ).one()
-def get_repository_tip( repository ):
- # The received repository must be a mercurial repository, not a db record.
- tip_changeset = repository.changelog.tip()
- tip_ctx = repository.changectx( tip_changeset )
- return "%s:%s" % ( str( tip_ctx.rev() ), tip_ctx.parents()[0] )
+def get_change_set( trans, repo, change_set_revision, **kwd ):
+ """Retrieve a specified change set from a repository"""
+ for changeset in repo.changelog:
+ ctx = repo.changectx( changeset )
+ if str( ctx ) == change_set_revision:
+ return ctx
+ return None
+def copy_sample_loc_file( trans, filename ):
+ """Copy xxx.loc.sample to ~/tool-data/xxx.loc"""
+ sample_loc_file = os.path.split( filename )[1]
+ loc_file = os.path.split( filename )[1].rstrip( '.sample' )
+ tool_data_path = os.path.abspath( trans.app.config.tool_data_path )
+ if not ( os.path.exists( os.path.join( tool_data_path, loc_file ) ) or os.path.exists( os.path.join( tool_data_path, sample_loc_file ) ) ):
+ shutil.copy( os.path.abspath( filename ), os.path.join( tool_data_path, sample_loc_file ) )
+ shutil.copy( os.path.abspath( filename ), os.path.join( tool_data_path, loc_file ) )
+def get_configured_ui():
+ # Configure any desired ui settings.
+ _ui = ui.ui()
+ # The following will suppress all messages. This is
+ # the same as adding the following setting to the repo
+ # hgrc file' [ui] section:
+ # quiet = True
+ _ui.setconfig( 'ui', 'quiet', True )
+ return _ui
def get_user( trans, id ):
"""Get a user from the database"""
return trans.sa_session.query( trans.model.User ).get( trans.security.decode_id( id ) )
def handle_email_alerts( trans, repository ):
repo_dir = repository.repo_path
- repo = hg.repository( ui.ui(), repo_dir )
+ repo = hg.repository( get_configured_ui(), repo_dir )
smtp_server = trans.app.config.smtp_server
if smtp_server and repository.email_alerts:
# Send email alert to users that want them.
@@ -118,22 +322,77 @@
util.send_mail( frm, to, subject, body, trans.app.config )
except Exception, e:
log.exception( "An error occurred sending a tool shed repository update alert by email." )
-def update_for_browsing( repository, current_working_dir ):
- # Make a copy of a repository's files for browsing.
+def update_for_browsing( trans, repository, current_working_dir, commit_message='' ):
+ # Make a copy of a repository's files for browsing, remove from disk all files that
+ # are not tracked, and commit all added, modified or removed files that have not yet
+ # been committed.
repo_dir = repository.repo_path
+ repo = hg.repository( get_configured_ui(), repo_dir )
+ # The following will delete the disk copy of only the files in the repository.
+ #os.system( 'hg update -r null > /dev/null 2>&1' )
+ repo.ui.pushbuffer()
+ commands.status( repo.ui, repo, all=True )
+ status_and_file_names = repo.ui.popbuffer().strip().split( "\n" )
+ # status_and_file_names looks something like:
+ # ['? README', '? tmap_tool/tmap-0.0.9.tar.gz', '? dna_filtering.py', 'C filtering.py', 'C filtering.xml']
+ # The codes used to show the status of files are:
+ # M = modified
+ # A = added
+ # R = removed
+ # C = clean
+ # ! = deleted, but still tracked
+ # ? = not tracked
+ # I = ignored
+ files_to_remove_from_disk = []
+ files_to_commit = []
+ for status_and_file_name in status_and_file_names:
+ if status_and_file_name.startswith( '?' ) or status_and_file_name.startswith( 'I' ):
+ files_to_remove_from_disk.append( os.path.abspath( os.path.join( repo_dir, status_and_file_name.split()[1] ) ) )
+ elif status_and_file_name.startswith( 'M' ) or status_and_file_name.startswith( 'A' ) or status_and_file_name.startswith( 'R' ):
+ files_to_commit.append( os.path.abspath( os.path.join( repo_dir, status_and_file_name.split()[1] ) ) )
+ for full_path in files_to_remove_from_disk:
+ # We'll remove all files that are not tracked or ignored.
+ if os.path.isdir( full_path ):
+ try:
+ os.rmdir( full_path )
+ except OSError, e:
+ # The directory is not empty
+ pass
+ elif os.path.isfile( full_path ):
+ os.remove( full_path )
+ dir = os.path.split( full_path )[0]
+ try:
+ os.rmdir( dir )
+ except OSError, e:
+ # The directory is not empty
+ pass
+ if files_to_commit:
+ if not commit_message:
+ commit_message = 'Committed changes to: %s' % ', '.join( files_to_commit )
+ repo.dirstate.write()
+ repo.commit( user=trans.user.username, text=commit_message )
os.chdir( repo_dir )
os.system( 'hg update > /dev/null 2>&1' )
os.chdir( current_working_dir )
+def load_tool( trans, config_file ):
"""
- # TODO: the following is useful if the repository files somehow include missing or
- # untracked files. If this happens, we can enhance the following to clean things up.
- # We're not currently doing any cleanup though since so far none of the repositories
- # have problematic files for browsing.
- # Get the tip change set.
- repo = hg.repository( ui.ui(), repo_dir )
- for changeset in repo.changelog:
- ctx = repo.changectx( changeset )
- ctx_parent = ctx.parents()[0]
- break
- modified, added, removed, deleted, unknown, ignored, clean = repo.status( node1=ctx_parent.node(), node2=ctx.node() )
+ Load a single tool from the file named by `config_file` and return
+ an instance of `Tool`.
"""
+ # Parse XML configuration file and get the root element
+ tree = util.parse_xml( config_file )
+ root = tree.getroot()
+ if root.tag == 'tool':
+ # Allow specifying a different tool subclass to instantiate
+ if root.find( "type" ) is not None:
+ type_elem = root.find( "type" )
+ module = type_elem.get( 'module', 'galaxy.tools' )
+ cls = type_elem.get( 'class' )
+ mod = __import__( module, globals(), locals(), [cls])
+ ToolClass = getattr( mod, cls )
+ elif root.get( 'tool_type', None ) is not None:
+ ToolClass = tool_types.get( root.get( 'tool_type' ) )
+ else:
+ ToolClass = Tool
+ return ToolClass( config_file, root, trans.app )
+ return None
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 lib/galaxy/webapps/community/controllers/repository.py
--- a/lib/galaxy/webapps/community/controllers/repository.py
+++ b/lib/galaxy/webapps/community/controllers/repository.py
@@ -74,10 +74,9 @@
class NameColumn( grids.TextColumn ):
def get_value( self, trans, grid, repository ):
return repository.name
- class VersionColumn( grids.TextColumn ):
+ class RevisionColumn( grids.TextColumn ):
def get_value( self, trans, grid, repository ):
- repo = hg.repository( ui.ui(), repository.repo_path )
- return get_repository_tip( repo )
+ return repository.revision
class DescriptionColumn( grids.TextColumn ):
def get_value( self, trans, grid, repository ):
return repository.description
@@ -124,11 +123,11 @@
key="name",
link=( lambda item: dict( operation="view_or_manage_repository", id=item.id, webapp="community" ) ),
attach_popup=False ),
- DescriptionColumn( "Description",
+ DescriptionColumn( "Synopsis",
key="description",
attach_popup=False ),
- VersionColumn( "Version",
- attach_popup=False ),
+ RevisionColumn( "Revision",
+ attach_popup=False ),
CategoryColumn( "Category",
model_class=model.Category,
key="Category.name",
@@ -215,7 +214,8 @@
if operation == "view_or_manage_repository":
repository_id = kwd.get( 'id', None )
repository = get_repository( trans, repository_id )
- if repository.user == trans.user:
+ is_admin = trans.user_is_admin()
+ if is_admin or repository.user == trans.user:
return trans.response.send_redirect( web.url_for( controller='repository',
action='manage_repository',
**kwd ) )
@@ -312,7 +312,7 @@
if not os.path.exists( repository_path ):
os.makedirs( repository_path )
# Create the local repository
- repo = hg.repository( ui.ui(), repository_path, create=True )
+ repo = hg.repository( get_configured_ui(), repository_path, create=True )
# Add an entry in the hgweb.config file for the local repository
# This enables calls to repository.repo_path
self.__add_hgweb_config_entry( trans, repository, repository_path )
@@ -356,11 +356,19 @@
if not( VALID_REPOSITORYNAME_RE.match( name ) ):
return "Repository names must contain only lower-case letters, numbers and underscore '_'."
return ''
+ def __make_hgweb_config_copy( self, trans, hgweb_config ):
+ # Make a backup of the hgweb.config file
+ today = date.today()
+ backup_date = today.strftime( "%Y_%m_%d" )
+ hgweb_config_copy = '%s/hgweb.config_%s_backup' % ( trans.app.config.root, backup_date )
+ shutil.copy( os.path.abspath( hgweb_config ), os.path.abspath( hgweb_config_copy ) )
def __add_hgweb_config_entry( self, trans, repository, repository_path ):
# Add an entry in the hgweb.config file for a new repository.
# An entry looks something like:
# repos/test/mira_assembler = database/community_files/000/repo_123.
hgweb_config = "%s/hgweb.config" % trans.app.config.root
+ # Make a backup of the hgweb.config file since we're going to be changing it.
+ self.__make_hgweb_config_copy( trans, hgweb_config )
entry = "repos/%s/%s = %s" % ( repository.user.username, repository.name, repository_path.lstrip( './' ) )
if os.path.exists( hgweb_config ):
output = open( hgweb_config, 'a' )
@@ -369,6 +377,25 @@
output.write( '[paths]\n' )
output.write( "%s\n" % entry )
output.close()
+ def __change_hgweb_config_entry( self, trans, repository, old_repository_name, new_repository_name ):
+ # Change an entry in the hgweb.config file for a repository. This only happens when
+ # the owner changes the name of the repository. An entry looks something like:
+ # repos/test/mira_assembler = database/community_files/000/repo_123.
+ hgweb_config = "%s/hgweb.config" % trans.app.config.root
+ # Make a backup of the hgweb.config file since we're going to be changing it.
+ self.__make_hgweb_config_copy( trans, hgweb_config )
+ repo_dir = repository.repo_path
+ old_lhs = "repos/%s/%s" % ( repository.user.username, old_repository_name )
+ old_entry = "%s = %s" % ( old_lhs, repo_dir )
+ new_entry = "repos/%s/%s = %s\n" % ( repository.user.username, new_repository_name, repo_dir )
+ tmp_fd, tmp_fname = tempfile.mkstemp()
+ new_hgweb_config = open( tmp_fname, 'wb' )
+ for i, line in enumerate( open( hgweb_config ) ):
+ if line.startswith( old_lhs ):
+ new_hgweb_config.write( new_entry )
+ else:
+ new_hgweb_config.write( line )
+ shutil.move( tmp_fname, os.path.abspath( hgweb_config ) )
def __create_hgrc_file( self, repository ):
# At this point, an entry for the repository is required to be in the hgweb.config
# file so we can call repository.repo_path.
@@ -379,7 +406,7 @@
# push_ssl = False
# Since we support both http and https, we set push_ssl to False to override
# the default (which is True) in the mercurial api.
- repo = hg.repository( ui.ui(), path=repository.repo_path )
+ repo = hg.repository( get_configured_ui(), path=repository.repo_path )
fp = repo.opener( 'hgrc', 'wb' )
fp.write( '[paths]\n' )
fp.write( 'default = .\n' )
@@ -396,9 +423,10 @@
status = params.get( 'status', 'done' )
commit_message = util.restore_text( params.get( 'commit_message', 'Deleted selected files' ) )
repository = get_repository( trans, id )
- repo = hg.repository( ui.ui(), repository.repo_path )
+ repo = hg.repository( get_configured_ui(), repository.repo_path )
current_working_dir = os.getcwd()
- update_for_browsing( repository, current_working_dir )
+ # Update repository files for browsing.
+ update_for_browsing( trans, repository, current_working_dir, commit_message=commit_message )
return trans.fill_template( '/webapps/community/repository/browse_repository.mako',
repo=repo,
repository=repository,
@@ -413,31 +441,46 @@
commit_message = util.restore_text( params.get( 'commit_message', 'Deleted selected files' ) )
repository = get_repository( trans, id )
repo_dir = repository.repo_path
- repo = hg.repository( ui.ui(), repo_dir )
+ repo = hg.repository( get_configured_ui(), repo_dir )
selected_files_to_delete = util.restore_text( params.get( 'selected_files_to_delete', '' ) )
if params.get( 'select_files_to_delete_button', False ):
if selected_files_to_delete:
selected_files_to_delete = selected_files_to_delete.split( ',' )
current_working_dir = os.getcwd()
# Get the current repository tip.
- tip = repo[ 'tip' ]
+ tip = repository.tip
for selected_file in selected_files_to_delete:
repo_file = os.path.abspath( selected_file )
- commands.remove( repo.ui, repo, repo_file )
+ commands.remove( repo.ui, repo, repo_file, force=True )
# Commit the change set.
if not commit_message:
commit_message = 'Deleted selected files'
- # Commit the changes.
- commands.commit( repo.ui, repo, repo_dir, user=trans.user.username, message=commit_message )
+ try:
+ commands.commit( repo.ui, repo, repo_dir, user=trans.user.username, message=commit_message )
+ except Exception, e:
+ # I never have a problem with commands.commit on a Mac, but in the test/production
+ # tool shed environment, it occasionally throws a "TypeError: array item must be char"
+ # exception. If this happens, we'll try the following.
+ repo.dirstate.write()
+ repo.commit( user=trans.user.username, text=commit_message )
handle_email_alerts( trans, repository )
# Update the repository files for browsing.
- update_for_browsing( repository, current_working_dir )
+ update_for_browsing( trans, repository, current_working_dir, commit_message=commit_message )
# Get the new repository tip.
- repo = hg.repository( ui.ui(), repo_dir )
- if tip != repo[ 'tip' ]:
+ repo = hg.repository( get_configured_ui(), repo_dir )
+ if tip != repository.tip:
message = "The selected files were deleted from the repository."
else:
message = 'No changes to repository.'
+ # Set metadata on the repository tip
+ error_message, status = set_repository_metadata( trans, id, repository.tip, **kwd )
+ if error_message:
+ message = '%s<br/>%s' % ( message, error_message )
+ return trans.response.send_redirect( web.url_for( controller='repository',
+ action='manage_repository',
+ id=id,
+ message=message,
+ status=status ) )
else:
message = "Select at least 1 file to delete from the repository before clicking <b>Delete selected files</b>."
status = "error"
@@ -453,8 +496,7 @@
message = util.restore_text( params.get( 'message', '' ) )
status = params.get( 'status', 'done' )
repository = get_repository( trans, id )
- repo = hg.repository( ui.ui(), repository.repo_path )
- tip = get_repository_tip( repo )
+ repo = hg.repository( get_configured_ui(), repository.repo_path )
avg_rating, num_ratings = self.get_ave_item_rating_data( trans.sa_session, repository, webapp_model=trans.model )
display_reviews = util.string_as_bool( params.get( 'display_reviews', False ) )
alerts = params.get( 'alerts', '' )
@@ -481,10 +523,15 @@
trans.sa_session.flush()
checked = alerts_checked or ( user and user.email in email_alerts )
alerts_check_box = CheckboxField( 'alerts', checked=checked )
+ repository_metadata = get_repository_metadata( trans, id, repository.tip )
+ if repository_metadata:
+ metadata = repository_metadata.metadata
+ else:
+ metadata = None
return trans.fill_template( '/webapps/community/repository/view_repository.mako',
repo=repo,
repository=repository,
- tip=tip,
+ metadata=metadata,
avg_rating=avg_rating,
display_reviews=display_reviews,
num_ratings=num_ratings,
@@ -498,8 +545,8 @@
message = util.restore_text( params.get( 'message', '' ) )
status = params.get( 'status', 'done' )
repository = get_repository( trans, id )
- repo = hg.repository( ui.ui(), repository.repo_path )
- tip = get_repository_tip( repo )
+ repo_dir = repository.repo_path
+ repo = hg.repository( get_configured_ui(), repo_dir )
repo_name = util.restore_text( params.get( 'repo_name', repository.name ) )
description = util.restore_text( params.get( 'description', repository.description ) )
long_description = util.restore_text( params.get( 'long_description', repository.long_description ) )
@@ -507,6 +554,7 @@
display_reviews = util.string_as_bool( params.get( 'display_reviews', False ) )
alerts = params.get( 'alerts', '' )
alerts_checked = CheckboxField.is_checked( alerts )
+ category_ids = util.listify( params.get( 'category_id', '' ) )
if repository.email_alerts:
email_alerts = from_json_string( repository.email_alerts )
else:
@@ -516,6 +564,7 @@
user = trans.user
if params.get( 'edit_repository_button', False ):
flush_needed = False
+ # TODO: add a can_manage in the security agent.
if user != repository.user:
message = "You are not the owner of this repository, so you cannot manage it."
status = error
@@ -529,6 +578,7 @@
if message:
error = True
else:
+ self.__change_hgweb_config_entry( trans, repository, repository.name, repo_name )
repository.name = repo_name
flush_needed = True
if description != repository.description:
@@ -540,6 +590,21 @@
if flush_needed:
trans.sa_session.add( repository )
trans.sa_session.flush()
+ message = "The repository information has been updated."
+ elif params.get( 'manage_categories_button', False ):
+ flush_needed = False
+ # Delete all currently existing categories.
+ for rca in repository.categories:
+ trans.sa_session.delete( rca )
+ trans.sa_session.flush()
+ if category_ids:
+ # Create category associations
+ for category_id in category_ids:
+ category = trans.app.model.Category.get( trans.security.decode_id( category_id ) )
+ rca = trans.app.model.RepositoryCategoryAssociation( repository, category )
+ trans.sa_session.add( rca )
+ trans.sa_session.flush()
+ message = "The repository information has been updated."
elif params.get( 'user_access_button', False ):
if allow_push not in [ 'none' ]:
remove_auth = params.get( 'remove_auth', '' )
@@ -553,6 +618,7 @@
usernames.append( user.username )
usernames = ','.join( usernames )
repository.set_allow_push( usernames, remove_auth=remove_auth )
+ message = "The repository information has been updated."
elif params.get( 'receive_email_alerts_button', False ):
flush_needed = False
if alerts_checked:
@@ -568,6 +634,7 @@
if flush_needed:
trans.sa_session.add( repository )
trans.sa_session.flush()
+ message = "The repository information has been updated."
if error:
status = 'error'
if repository.allow_push:
@@ -577,6 +644,13 @@
allow_push_select_field = self.__build_allow_push_select_field( trans, current_allow_push_list )
checked = alerts_checked or user.email in email_alerts
alerts_check_box = CheckboxField( 'alerts', checked=checked )
+ repository_metadata = get_repository_metadata( trans, id, repository.tip )
+ if repository_metadata:
+ metadata = repository_metadata.metadata
+ else:
+ metadata = None
+ categories = get_categories( trans )
+ selected_categories = [ rca.category_id for rca in repository.categories ]
return trans.fill_template( '/webapps/community/repository/manage_repository.mako',
repo_name=repo_name,
description=description,
@@ -585,7 +659,9 @@
allow_push_select_field=allow_push_select_field,
repo=repo,
repository=repository,
- tip=tip,
+ selected_categories=selected_categories,
+ categories=categories,
+ metadata=metadata,
avg_rating=avg_rating,
display_reviews=display_reviews,
num_ratings=num_ratings,
@@ -598,7 +674,7 @@
message = util.restore_text( params.get( 'message', '' ) )
status = params.get( 'status', 'done' )
repository = get_repository( trans, id )
- repo = hg.repository( ui.ui(), repository.repo_path )
+ repo = hg.repository( get_configured_ui(), repository.repo_path )
changesets = []
for changeset in repo.changelog:
ctx = repo.changectx( changeset )
@@ -626,14 +702,9 @@
message = util.restore_text( params.get( 'message', '' ) )
status = params.get( 'status', 'done' )
repository = get_repository( trans, id )
- repo = hg.repository( ui.ui(), repository.repo_path )
- found = False
- for changeset in repo.changelog:
- ctx = repo.changectx( changeset )
- if str( ctx ) == ctx_str:
- found = True
- break
- if not found:
+ repo = hg.repository( get_configured_ui(), repository.repo_path )
+ ctx = get_change_set( trans, repo, ctx_str )
+ if ctx is None:
message = "Repository does not include changeset revision '%s'." % str( ctx_str )
status = 'error'
return trans.response.send_redirect( web.url_for( controller='repository',
@@ -675,8 +746,7 @@
message='Select a repository to rate',
status='error' ) )
repository = get_repository( trans, id )
- repo = hg.repository( ui.ui(), repository.repo_path )
- tip = get_repository_tip( repo )
+ repo = hg.repository( get_configured_ui(), repository.repo_path )
if repository.user == trans.user:
return trans.response.send_redirect( web.url_for( controller='repository',
action='browse_repositories',
@@ -691,7 +761,6 @@
rra = self.get_user_item_rating( trans.sa_session, trans.user, repository, webapp_model=trans.model )
return trans.fill_template( '/webapps/community/repository/rate_repository.mako',
repository=repository,
- tip=tip,
avg_rating=avg_rating,
display_reviews=display_reviews,
num_ratings=num_ratings,
@@ -736,6 +805,134 @@
action='browse_repositories',
**kwd ) )
@web.expose
+ @web.require_login( "set repository metadata" )
+ def set_metadata( self, trans, id, ctx_str, **kwd ):
+ message, status = set_repository_metadata( trans, id, ctx_str, **kwd )
+ if not message:
+ message = "Metadata for change set revision '%s' has been reset." % str( ctx_str )
+ return trans.response.send_redirect( web.url_for( controller='repository',
+ action='manage_repository',
+ id=id,
+ message=message,
+ status=status ) )
+ @web.expose
+ def add_tool_data_table_entry( self, trans, name_attr, repository_id, **kwd ):
+ params = util.Params( kwd )
+ message = util.restore_text( params.get( 'message', '' ) )
+ status = params.get( 'status', 'done' )
+ comment_char = util.restore_text( params.get( 'comment_char', '#' ) )
+ loc_filename = util.restore_text( params.get( 'loc_filename', '' ) )
+ repository = get_repository( trans, repository_id )
+ repo = hg.repository( get_configured_ui(), repository.repo_path )
+ column_fields = self.__get_column_fields( **kwd )
+ if params.get( 'add_field_button', False ):
+ # Add a field
+ field_index = len( column_fields ) + 1
+ field_tup = ( '%i_field_name' % field_index, '' )
+ column_fields.append( field_tup )
+ elif params.get( 'remove_button', False ):
+ # Delete a field - find the index of the field to be removed from the remove button label
+ index = int( kwd[ 'remove_button' ].split( ' ' )[2] ) - 1
+ tup_to_remove = column_fields[ index ]
+ column_fields.remove( tup_to_remove )
+ # Re-number field tups
+ new_column_fields = []
+ for field_index, old_field_tup in enumerate( column_fields ):
+ name = '%i_field_name' % ( field_index + 1 )
+ value = old_field_tup[1]
+ new_column_fields.append( ( name, value ) )
+ column_fields = new_column_fields
+ elif params.get( 'add_tool_data_table_entry_button', False ):
+ # Add an entry to the end of the tool_data_table_conf.xml file
+ tdt_config = "%s/tool_data_table_conf.xml" % trans.app.config.root
+ if os.path.exists( tdt_config ):
+ # Make a backup of the file since we're going to be changing it.
+ today = date.today()
+ backup_date = today.strftime( "%Y_%m_%d" )
+ tdt_config_copy = '%s/tool_data_table_conf.xml_%s_backup' % ( trans.app.config.root, backup_date )
+ shutil.copy( os.path.abspath( tdt_config ), os.path.abspath( tdt_config_copy ) )
+ # Generate the string of column names
+ column_names = ', '.join( [ column_tup[1] for column_tup in column_fields ] )
+ # Write each line of the tool_data_table_conf.xml file, except the last line to a temp file.
+ fh = tempfile.NamedTemporaryFile( 'wb' )
+ tmp_filename = fh.name
+ fh.close()
+ new_tdt_config = open( tmp_filename, 'wb' )
+ for i, line in enumerate( open( tdt_config, 'rb' ) ):
+ if line.startswith( '</tables>' ):
+ break
+ new_tdt_config.write( line )
+ new_tdt_config.write( ' <!-- Location of %s files -->\n' % name_attr )
+ new_tdt_config.write( ' <table name="%s" comment_char="%s">\n' % ( name_attr, comment_char ) )
+ new_tdt_config.write( ' <columns>%s</columns>\n' % column_names )
+ new_tdt_config.write( ' <file path="tool-data/%s" />\n' % loc_filename )
+ new_tdt_config.write( ' </table>\n' )
+ # Now write the last line of the file
+ new_tdt_config.write( '</tables>\n' )
+ new_tdt_config.close()
+ shutil.move( tmp_filename, os.path.abspath( tdt_config ) )
+ # Reload the tool_data_table_conf entries
+ trans.app.tool_data_tables = galaxy.tools.data.ToolDataTableManager( trans.app.config.tool_data_table_config_path )
+ message = "The new entry has been added to the tool_data_table_conf.xml file, so click the <b>Reset metadata</b> button below."
+ # TODO: what if ~/tool-data/<loc_filename> doesn't exist? We need to figure out how to
+ # force the user to upload it's sample to the repository in order to generate metadata.
+ return trans.response.send_redirect( web.url_for( controller='repository',
+ action='manage_repository',
+ id=repository_id,
+ message=message,
+ status=status ) )
+ return trans.fill_template( '/webapps/community/repository/add_tool_data_table_entry.mako',
+ name_attr=name_attr,
+ repository=repository,
+ comment_char=comment_char,
+ loc_filename=loc_filename,
+ column_fields=column_fields,
+ message=message,
+ status=status )
+ def __get_column_fields( self, **kwd ):
+ '''
+ Return a dictionary of the user-entered form fields representing columns
+ in the location file.
+ '''
+ params = util.Params( kwd )
+ column_fields = []
+ index = 0
+ while True:
+ name = '%i_field_name' % ( index + 1 )
+ if kwd.has_key( name ):
+ value = util.restore_text( params.get( name, '' ) )
+ field_tup = ( name, value )
+ index += 1
+ column_fields.append( field_tup )
+ else:
+ break
+ return column_fields
+ @web.expose
+ def display_tool( self, trans, repository_id, tool_config, **kwd ):
+ params = util.Params( kwd )
+ message = util.restore_text( params.get( 'message', '' ) )
+ status = params.get( 'status', 'done' )
+ repository = get_repository( trans, repository_id )
+ tool = load_tool( trans, os.path.abspath( tool_config ) )
+ tool_state = self.__new_state( trans )
+ return trans.fill_template( "/webapps/community/repository/tool_form.mako",
+ repository=repository,
+ tool=tool,
+ tool_state=tool_state,
+ message=message,
+ status=status )
+ def __new_state( self, trans, all_pages=False ):
+ """
+ Create a new `DefaultToolState` for this tool. It will not be initialized
+ with default values for inputs.
+
+ Only inputs on the first page will be initialized unless `all_pages` is
+ True, in which case all inputs regardless of page are initialized.
+ """
+ state = DefaultToolState()
+ state.inputs = {}
+ return state
+ @web.expose
def download( self, trans, repository_id, file_type, **kwd ):
# Download an archive of the repository files compressed as zip, gz or bz2.
params = util.Params( kwd )
@@ -780,7 +977,6 @@
folder_contents.append( node )
return folder_contents
def __get_files( self, trans, repository, folder_path ):
- ok = True
def print_ticks( d ):
pass
cmd = "ls -p '%s'" % folder_path
@@ -789,17 +985,22 @@
events={ pexpect.TIMEOUT : print_ticks },
timeout=10 )
if 'No such file or directory' in output:
- status = 'error'
- message = "No folder named (%s) exists." % folder_path
- ok = False
- if ok:
- return output.split()
- return trans.response.send_redirect( web.url_for( controller='repository',
- action='browse_repositories',
- operation="view_or_manage_repository",
- id=trans.security.encode_id( repository.id ),
- status=status,
- message=message ) )
+ if 'root' in output:
+ # The repository is empty
+ return []
+ else:
+ # Some strange error occurred, the selected file was displayed, but
+ # does not exist in the sub-directory from which it was displayed.
+ # This should not happen...
+ status = 'error'
+ message = "No folder named (%s) exists." % folder_path
+ return trans.response.send_redirect( web.url_for( controller='repository',
+ action='browse_repositories',
+ operation="view_or_manage_repository",
+ id=trans.security.encode_id( repository.id ),
+ status=status,
+ message=message ) )
+ return output.split()
@web.json
def get_file_contents( self, trans, file_path ):
# Avoid caching
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 lib/galaxy/webapps/community/controllers/upload.py
--- a/lib/galaxy/webapps/community/controllers/upload.py
+++ b/lib/galaxy/webapps/community/controllers/upload.py
@@ -27,13 +27,13 @@
repository_id = params.get( 'repository_id', '' )
repository = get_repository( trans, repository_id )
repo_dir = repository.repo_path
- repo = hg.repository( ui.ui(), repo_dir )
+ repo = hg.repository( get_configured_ui(), repo_dir )
uncompress_file = util.string_as_bool( params.get( 'uncompress_file', 'true' ) )
remove_repo_files_not_in_tar = util.string_as_bool( params.get( 'remove_repo_files_not_in_tar', 'true' ) )
uploaded_file = None
upload_point = self.__get_upload_point( repository, **kwd )
# Get the current repository tip.
- tip = repo[ 'tip' ]
+ tip = repository.tip
if params.get( 'upload_button', False ):
current_working_dir = os.getcwd()
file_data = params.get( 'file_data', '' )
@@ -45,6 +45,7 @@
uploaded_file = file_data.file
uploaded_file_name = uploaded_file.name
uploaded_file_filename = file_data.filename
+ isempty = os.path.getsize( os.path.abspath( uploaded_file_name ) ) == 0
if uploaded_file:
isgzip = False
isbz2 = False
@@ -53,17 +54,21 @@
if not isgzip:
isbz2 = is_bz2( uploaded_file_name )
ok = True
- # Determine what we have - a single file or an archive
- try:
- if ( isgzip or isbz2 ) and uncompress_file:
- # Open for reading with transparent compression.
- tar = tarfile.open( uploaded_file_name, 'r:*' )
- else:
- tar = tarfile.open( uploaded_file_name )
- istar = True
- except tarfile.ReadError, e:
+ if isempty:
tar = None
istar = False
+ else:
+ # Determine what we have - a single file or an archive
+ try:
+ if ( isgzip or isbz2 ) and uncompress_file:
+ # Open for reading with transparent compression.
+ tar = tarfile.open( uploaded_file_name, 'r:*' )
+ else:
+ tar = tarfile.open( uploaded_file_name )
+ istar = True
+ except tarfile.ReadError, e:
+ tar = None
+ istar = False
if istar:
ok, message, files_to_remove = self.upload_tar( trans,
repository,
@@ -82,15 +87,24 @@
# Move the uploaded file to the load_point within the repository hierarchy.
shutil.move( uploaded_file_name, full_path )
commands.add( repo.ui, repo, full_path )
- commands.commit( repo.ui, repo, full_path, user=trans.user.username, message=commit_message )
+ try:
+ commands.commit( repo.ui, repo, full_path, user=trans.user.username, message=commit_message )
+ except Exception, e:
+ # I never have a problem with commands.commit on a Mac, but in the test/production
+ # tool shed environment, it occasionally throws a "TypeError: array item must be char"
+ # exception. If this happens, we'll try the following.
+ repo.dirstate.write()
+ repo.commit( user=trans.user.username, text=commit_message )
+ if full_path.endswith( '.loc.sample' ):
+ # Handle the special case where a xxx.loc.sample file is
+ # being uploaded by copying it to ~/tool-data/xxx.loc.
+ copy_sample_loc_file( trans, full_path )
handle_email_alerts( trans, repository )
if ok:
- # Update the repository files for browsing, a by-product of doing this
- # is eliminating unwanted files from the repository directory.
- update_for_browsing( repository, current_working_dir )
+ # Update the repository files for browsing.
+ update_for_browsing( trans, repository, current_working_dir, commit_message=commit_message )
# Get the new repository tip.
- repo = hg.repository( ui.ui(), repo_dir )
- if tip != repo[ 'tip' ]:
+ if tip != repository.tip:
if ( isgzip or isbz2 ) and uncompress_file:
uncompress_str = ' uncompressed and '
else:
@@ -102,12 +116,22 @@
else:
message += " %d files were removed from the repository root." % len( files_to_remove )
else:
- message = 'No changes to repository.'
+ message = 'No changes to repository.'
+ # Set metadata on the repository tip
+ error_message, status = set_repository_metadata( trans, repository_id, repository.tip, **kwd )
+ if error_message:
+ message = '%s<br/>%s' % ( message, error_message )
+ return trans.response.send_redirect( web.url_for( controller='repository',
+ action='manage_repository',
+ id=repository_id,
+ message=message,
+ status=status ) )
trans.response.send_redirect( web.url_for( controller='repository',
action='browse_repository',
+ id=repository_id,
commit_message='Deleted selected files',
message=message,
- id=trans.security.encode_id( repository.id ) ) )
+ status=status ) )
else:
status = 'error'
selected_categories = [ trans.security.decode_id( id ) for id in category_ids ]
@@ -121,7 +145,7 @@
def upload_tar( self, trans, repository, tar, uploaded_file, upload_point, remove_repo_files_not_in_tar, commit_message ):
# Upload a tar archive of files.
repo_dir = repository.repo_path
- repo = hg.repository( ui.ui(), repo_dir )
+ repo = hg.repository( get_configured_ui(), repo_dir )
files_to_remove = []
ok, message = self.__check_archive( tar )
if not ok:
@@ -158,11 +182,21 @@
for repo_file in files_to_remove:
# Remove files in the repository (relative to the upload point)
# that are not in the uploaded archive.
- commands.remove( repo.ui, repo, repo_file )
+ commands.remove( repo.ui, repo, repo_file, force=True )
for filename_in_archive in filenames_in_archive:
commands.add( repo.ui, repo, filename_in_archive )
- # Commit the changes.
- commands.commit( repo.ui, repo, full_path, user=trans.user.username, message=commit_message )
+ if filename_in_archive.endswith( '.loc.sample' ):
+ # Handle the special case where a xxx.loc.sample file is
+ # being uploaded by copying it to ~/tool-data/xxx.loc.
+ copy_sample_loc_file( trans, filename_in_archive )
+ try:
+ commands.commit( repo.ui, repo, full_path, user=trans.user.username, message=commit_message )
+ except Exception, e:
+ # I never have a problem with commands.commit on a Mac, but in the test/production
+ # tool shed environment, it occasionally throws a "TypeError: array item must be char"
+ # exception. If this happens, we'll try the following.
+ repo.dirstate.write()
+ repo.commit( user=trans.user.username, text=commit_message )
handle_email_alerts( trans, repository )
return True, '', files_to_remove
def uncompress( self, repository, uploaded_file_name, uploaded_file_filename, isgzip, isbz2 ):
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 lib/galaxy/webapps/community/model/__init__.py
--- a/lib/galaxy/webapps/community/model/__init__.py
+++ b/lib/galaxy/webapps/community/model/__init__.py
@@ -4,7 +4,7 @@
Naming: try to use class names that have a distinct plural form so that
the relationship cardinalities are obvious (e.g. prefer Dataset to Data)
"""
-import os.path, os, errno, sys, codecs, operator, tempfile, logging, tarfile, mimetypes, ConfigParser
+import os.path, os, errno, sys, codecs, operator, logging, tarfile, mimetypes, ConfigParser
from galaxy import util
from galaxy.util.bunch import Bunch
from galaxy.util.hash_util import *
@@ -114,6 +114,15 @@
return config.get( "paths", option )
raise Exception( "Entry for repository %s missing in %s/hgweb.config file." % ( lhs, os.getcwd() ) )
@property
+ def revision( self ):
+ repo = hg.repository( ui.ui(), self.repo_path )
+ tip_ctx = repo.changectx( repo.changelog.tip() )
+ return "%s:%s" % ( str( tip_ctx.rev() ), str( repo.changectx( repo.changelog.tip() ) ) )
+ @property
+ def tip( self ):
+ repo = hg.repository( ui.ui(), self.repo_path )
+ return str( repo.changectx( repo.changelog.tip() ) )
+ @property
def is_new( self ):
repo = hg.repository( ui.ui(), self.repo_path )
tip_ctx = repo.changectx( repo.changelog.tip() )
@@ -143,6 +152,12 @@
fp.write( line )
fp.close()
+class RepositoryMetadata( object ):
+ def __init__( self, repository_id=None, changeset_revision=None, metadata=None ):
+ self.repository_id = repository_id
+ self.changeset_revision = changeset_revision
+ self.metadata = metadata or dict()
+
class ItemRatingAssociation( object ):
def __init__( self, id=None, user=None, item=None, rating=0, comment='' ):
self.id = id
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 lib/galaxy/webapps/community/model/mapping.py
--- a/lib/galaxy/webapps/community/model/mapping.py
+++ b/lib/galaxy/webapps/community/model/mapping.py
@@ -111,6 +111,14 @@
Column( "email_alerts", JSONType, nullable=True ),
Column( "times_downloaded", Integer ) )
+RepositoryMetadata.table = Table( "repository_metadata", metadata,
+ Column( "id", Integer, primary_key=True ),
+ Column( "create_time", DateTime, default=now ),
+ Column( "update_time", DateTime, default=now, onupdate=now ),
+ Column( "repository_id", Integer, ForeignKey( "repository.id" ), index=True ),
+ Column( "changeset_revision", TrimmedString( 255 ), index=True ),
+ Column( "metadata", JSONType, nullable=True ) )
+
RepositoryRatingAssociation.table = Table( "repository_rating_association", metadata,
Column( "id", Integer, primary_key=True ),
Column( "create_time", DateTime, default=now ),
@@ -186,6 +194,9 @@
ratings=relation( RepositoryRatingAssociation, order_by=desc( RepositoryRatingAssociation.table.c.update_time ), backref="repositories" ),
user=relation( User.mapper ) ) )
+assign_mapper( context, RepositoryMetadata, RepositoryMetadata.table,
+ properties=dict( repository=relation( Repository ) ) )
+
assign_mapper( context, RepositoryRatingAssociation, RepositoryRatingAssociation.table,
properties=dict( repository=relation( Repository ), user=relation( User ) ) )
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 lib/galaxy/webapps/community/model/migrate/versions/0008_add_repository_metadata_table.py
--- /dev/null
+++ b/lib/galaxy/webapps/community/model/migrate/versions/0008_add_repository_metadata_table.py
@@ -0,0 +1,52 @@
+"""
+Migration script to add the repository_metadata table.
+"""
+
+from sqlalchemy import *
+from sqlalchemy.orm import *
+from migrate import *
+from migrate.changeset import *
+
+import datetime
+now = datetime.datetime.utcnow
+# Need our custom types, but don't import anything else from model
+from galaxy.model.custom_types import *
+
+import sys, logging
+log = logging.getLogger( __name__ )
+log.setLevel(logging.DEBUG)
+handler = logging.StreamHandler( sys.stdout )
+format = "%(name)s %(levelname)s %(asctime)s %(message)s"
+formatter = logging.Formatter( format )
+handler.setFormatter( formatter )
+log.addHandler( handler )
+
+metadata = MetaData( migrate_engine )
+db_session = scoped_session( sessionmaker( bind=migrate_engine, autoflush=False, autocommit=True ) )
+
+RepositoryMetadata_table = Table( "repository_metadata", metadata,
+ Column( "id", Integer, primary_key=True ),
+ Column( "create_time", DateTime, default=now ),
+ Column( "update_time", DateTime, default=now, onupdate=now ),
+ Column( "repository_id", Integer, ForeignKey( "repository.id" ), index=True ),
+ Column( "changeset_revision", TrimmedString( 255 ), index=True ),
+ Column( "metadata", JSONType, nullable=True ) )
+
+def upgrade():
+ print __doc__
+ metadata.reflect()
+ # Create repository_metadata table.
+ try:
+ RepositoryMetadata_table.create()
+ except Exception, e:
+ print str(e)
+ log.debug( "Creating repository_metadata table failed: %s" % str( e ) )
+
+def downgrade():
+ metadata.reflect()
+ # Drop repository_metadata table.
+ try:
+ RepositoryMetadata_table.drop()
+ except Exception, e:
+ print str(e)
+ log.debug( "Dropping repository_metadata table failed: %s" % str( e ) )
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 lib/galaxy/webapps/demo_sequencer/config.py
--- a/lib/galaxy/webapps/demo_sequencer/config.py
+++ b/lib/galaxy/webapps/demo_sequencer/config.py
@@ -49,7 +49,7 @@
self.smtp_server = kwargs.get( 'smtp_server', None )
self.log_actions = string_as_bool( kwargs.get( 'log_actions', 'False' ) )
self.brand = kwargs.get( 'brand', None )
- self.wiki_url = kwargs.get( 'wiki_url', 'http://bitbucket.org/galaxy/galaxy-central/wiki/Home' )
+ self.wiki_url = kwargs.get( 'wiki_url', 'http://wiki.g2.bx.psu.edu/FrontPage' )
self.bugs_email = kwargs.get( 'bugs_email', None )
self.blog_url = kwargs.get( 'blog_url', None )
self.screencasts_url = kwargs.get( 'screencasts_url', None )
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 lib/galaxy/webapps/reports/config.py
--- a/lib/galaxy/webapps/reports/config.py
+++ b/lib/galaxy/webapps/reports/config.py
@@ -33,7 +33,7 @@
self.sendmail_path = kwargs.get('sendmail_path',"/usr/sbin/sendmail")
self.log_actions = string_as_bool( kwargs.get( 'log_actions', 'False' ) )
self.brand = kwargs.get( 'brand', None )
- self.wiki_url = kwargs.get( 'wiki_url', 'http://bitbucket.org/galaxy/galaxy-central/wiki/Home' )
+ self.wiki_url = kwargs.get( 'wiki_url', 'http://wiki.g2.bx.psu.edu/FrontPage' )
self.bugs_email = kwargs.get( 'bugs_email', None )
self.blog_url = kwargs.get( 'blog_url', None )
self.screencasts_url = kwargs.get( 'screencasts_url', None )
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 scripts/check_galaxy.py
--- a/scripts/check_galaxy.py
+++ b/scripts/check_galaxy.py
@@ -292,7 +292,7 @@
self.in_span = False
def handle_data(self, data):
if self.in_span:
- if data == "No such user":
+ if data == "No such user (please note that login is case sensitive)":
self.no_user = True
elif data == "Invalid password":
self.bad_pw = True
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 scripts/cleanup_datasets/cleanup_datasets.py
--- a/scripts/cleanup_datasets/cleanup_datasets.py
+++ b/scripts/cleanup_datasets/cleanup_datasets.py
@@ -170,6 +170,7 @@
app.model.History.table.c.update_time < cutoff_time ) ) \
.options( eagerload( 'datasets' ) )
for history in histories:
+ print "### Processing history id %d (%s)" % (history.id, history.name)
for dataset_assoc in history.datasets:
_purge_dataset_instance( dataset_assoc, app, remove_from_disk, info_only = info_only ) #mark a DatasetInstance as deleted, clear associated files, and mark the Dataset as deleted if it is deletable
if not info_only:
@@ -182,6 +183,8 @@
history.purged = True
app.sa_session.add( history )
app.sa_session.flush()
+ else:
+ print "History id %d will be purged (without 'info_only' mode)" % history.id
history_count += 1
stop = time.time()
print 'Purged %d histories.' % history_count
@@ -310,17 +313,21 @@
dataset_ids.extend( [ row.id for row in history_dataset_ids_query.execute() ] )
# Process each of the Dataset objects
for dataset_id in dataset_ids:
+ dataset = app.sa_session.query( app.model.Dataset ).get( dataset_id )
+ if dataset.id in skip:
+ continue
+ skip.append( dataset.id )
print "######### Processing dataset id:", dataset_id
- dataset = app.sa_session.query( app.model.Dataset ).get( dataset_id )
- if dataset.id not in skip and _dataset_is_deletable( dataset ):
- deleted_dataset_count += 1
- for dataset_instance in dataset.history_associations + dataset.library_associations:
- # Mark each associated HDA as deleted
- _purge_dataset_instance( dataset_instance, app, remove_from_disk, include_children=True, info_only=info_only, is_deletable=True )
- deleted_instance_count += 1
- skip.append( dataset.id )
+ if not _dataset_is_deletable( dataset ):
+ print "Dataset is not deletable (shared between multiple histories/libraries, at least one is not deleted)"
+ continue
+ deleted_dataset_count += 1
+ for dataset_instance in dataset.history_associations + dataset.library_associations:
+ # Mark each associated HDA as deleted
+ _purge_dataset_instance( dataset_instance, app, remove_from_disk, include_children=True, info_only=info_only, is_deletable=True )
+ deleted_instance_count += 1
stop = time.time()
- print "Examined %d datasets, marked %d as deleted and purged %d dataset instances" % ( len( skip ), deleted_dataset_count, deleted_instance_count )
+ print "Examined %d datasets, marked %d datasets and %d dataset instances (HDA) as deleted" % ( len( skip ), deleted_dataset_count, deleted_instance_count )
print "Total elapsed time: ", stop - start
print "##########################################"
@@ -360,15 +367,24 @@
# A dataset_instance is either a HDA or an LDDA. Purging a dataset instance marks the instance as deleted,
# and marks the associated dataset as deleted if it is not associated with another active DatsetInstance.
if not info_only:
- print "Marking as deleted: ", dataset_instance.__class__.__name__, " id ", dataset_instance.id
+ print "Marking as deleted: %s id %d (for dataset id %d)" % \
+ ( dataset_instance.__class__.__name__, dataset_instance.id, dataset_instance.dataset.id )
dataset_instance.mark_deleted( include_children = include_children )
dataset_instance.clear_associated_files()
app.sa_session.add( dataset_instance )
app.sa_session.flush()
app.sa_session.refresh( dataset_instance.dataset )
+ else:
+ print "%s id %d (for dataset id %d) will be marked as deleted (without 'info_only' mode)" % \
+ ( dataset_instance.__class__.__name__, dataset_instance.id, dataset_instance.dataset.id )
if is_deletable or _dataset_is_deletable( dataset_instance.dataset ):
# Calling methods may have already checked _dataset_is_deletable, if so, is_deletable should be True
_delete_dataset( dataset_instance.dataset, app, remove_from_disk, info_only=info_only, is_deletable=is_deletable )
+ else:
+ if info_only:
+ print "Not deleting dataset ", dataset_instance.dataset.id, " (will be possibly deleted without 'info_only' mode)"
+ else:
+ print "Not deleting dataset %d (shared between multiple histories/libraries, at least one not deleted)" % dataset_instance.dataset.id
#need to purge children here
if include_children:
for child in dataset_instance.children:
@@ -396,8 +412,13 @@
.filter( app.model.MetadataFile.table.c.lda_id==ldda.id ):
metadata_files.append( metadata_file )
for metadata_file in metadata_files:
- print "The following metadata files attached to associations of Dataset '%s' have been purged:" % dataset.id
- if not info_only:
+ op_description = "marked as deleted"
+ if remove_from_disk:
+ op_description = op_description + " and purged from disk"
+ if info_only:
+ print "The following metadata files attached to associations of Dataset '%s' will be %s (without 'info_only' mode):" % ( dataset.id, op_description )
+ else:
+ print "The following metadata files attached to associations of Dataset '%s' have been %s:" % ( dataset.id, op_description )
if remove_from_disk:
try:
print "Removing disk file ", metadata_file.file_name
@@ -411,10 +432,13 @@
app.sa_session.add( metadata_file )
app.sa_session.flush()
print "%s" % metadata_file.file_name
- print "Deleting dataset id", dataset.id
- dataset.deleted = True
- app.sa_session.add( dataset )
- app.sa_session.flush()
+ if not info_only:
+ print "Deleting dataset id", dataset.id
+ dataset.deleted = True
+ app.sa_session.add( dataset )
+ app.sa_session.flush()
+ else:
+ print "Dataset %i will be deleted (without 'info_only' mode)" % ( dataset.id )
def _purge_dataset( app, dataset, remove_from_disk, info_only = False ):
if dataset.deleted:
@@ -433,6 +457,8 @@
dataset.purged = True
app.sa_session.add( dataset )
app.sa_session.flush()
+ else:
+ print "Dataset %i will be purged (without 'info_only' mode)" % (dataset.id)
else:
print "This dataset (%i) is not purgable, the file (%s) will not be removed.\n" % ( dataset.id, dataset.file_name )
except OSError, exc:
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 scripts/cleanup_datasets/purge_libraries.sh
--- a/scripts/cleanup_datasets/purge_libraries.sh
+++ b/scripts/cleanup_datasets/purge_libraries.sh
@@ -1,5 +1,4 @@
#!/bin/sh
cd `dirname $0`/../..
-#python ./scripts/cleanup_datasets/cleanup_datasets.py ./universe_wsgi.ini -d 10 -4 -r $@ >> ./scripts/cleanup_datasets/purge_libraries.log
-python ./scripts/cleanup_datasets/cleanup_datasets.py ./universe_wsgi.ini -d 2 -4 -r $@ >> ./scripts/cleanup_datasets/purge_libraries.log
+python ./scripts/cleanup_datasets/cleanup_datasets.py ./universe_wsgi.ini -d 10 -4 -r $@ >> ./scripts/cleanup_datasets/purge_libraries.log
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 static/images/fugue/arrow-transition-270-bw.png
Binary file static/images/fugue/arrow-transition-270-bw.png has changed
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 static/images/fugue/arrow-transition-bw.png
Binary file static/images/fugue/arrow-transition-bw.png has changed
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 static/june_2007_style/blue/trackster.css
--- a/static/june_2007_style/blue/trackster.css
+++ b/static/june_2007_style/blue/trackster.css
@@ -51,3 +51,8 @@
.bookmark{background:white;border:solid #999 1px;border-right:none;margin:0.5em;margin-right:0;padding:0.5em;}
.bookmark .position{font-weight:bold;}
.delete-icon-container{float:right;}
+.icon{display:inline-block;width:16px;height:16px;}
+.icon.more-down{background:url('../images/fugue/arrow-transition-270-bw.png') no-repeat 0px 0px;}
+.icon.more-across{background:url('../images/fugue/arrow-transition-bw.png') no-repeat 0px 0px;}
+.intro{padding:1em;}
+.intro>.action-button{background-color:#CCC;padding:1em;}
\ No newline at end of file
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 static/june_2007_style/trackster.css.tmpl
--- a/static/june_2007_style/trackster.css.tmpl
+++ b/static/june_2007_style/trackster.css.tmpl
@@ -293,3 +293,22 @@
float:right;
}
+.icon {
+ display:inline-block;
+ width:16px;
+ height:16px;
+}
+.icon.more-down {
+ background:url('../images/fugue/arrow-transition-270-bw.png') no-repeat 0px 0px;
+}
+.icon.more-across {
+ background: url('../images/fugue/arrow-transition-bw.png') no-repeat 0px 0px;
+}
+.intro {
+ padding: 1em;
+}
+.intro > .action-button {
+ background-color: #CCC;
+ padding: 1em;
+}
+
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 static/scripts/galaxy.base.js
--- a/static/scripts/galaxy.base.js
+++ b/static/scripts/galaxy.base.js
@@ -725,4 +725,5 @@
}
return anchor;
});
+
});
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 static/scripts/galaxy.panels.js
--- a/static/scripts/galaxy.panels.js
+++ b/static/scripts/galaxy.panels.js
@@ -214,7 +214,7 @@
init_fn();
}
};
-
+
function show_in_overlay( options ) {
var width = options.width || '600';
var height = options.height || '400';
@@ -226,9 +226,9 @@
show_modal( null, $( "<div style='margin: -5px;'><img id='close_button' style='position:absolute;right:-17px;top:-15px;src='" + image_path + "/closebox.png'><iframe style='margin: 0; padding: 0;' src='" + options.url + "' width='" + width + "' height='" + height + "' scrolling='" + scroll + "' frameborder='0'></iframe></div>" ) );
$("#close_button").bind( "click", function() { hide_modal(); } );
}
-
+
// Tab management
-
+
$(function() {
$(".tab").each( function() {
var submenu = $(this).children( ".submenu" );
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 static/scripts/packed/trackster.js
--- a/static/scripts/packed/trackster.js
+++ b/static/scripts/packed/trackster.js
@@ -1,1 +1,1 @@
-var class_module=function(b,a){var c=function(){var f=arguments[0];for(var e=1;e<arguments.length;e++){var d=arguments[e];for(key in d){f[key]=d[key]}}return f};a.extend=c};var BEFORE=1001,CONTAINS=1002,OVERLAP_START=1003,OVERLAP_END=1004,CONTAINED_BY=1005,AFTER=1006;var compute_overlap=function(e,b){var g=e[0],f=e[1],d=b[0],c=b[1],a;if(g<d){if(f<d){a=BEFORE}else{if(f<=c){a=OVERLAP_START}else{a=CONTAINS}}}else{if(g>c){a=AFTER}else{if(f<=c){a=CONTAINED_BY}else{a=OVERLAP_END}}}return a};var is_overlap=function(c,b){var a=compute_overlap(c,b);return(a!==BEFORE&&a!==AFTER)};var trackster_module=function(f,T){var n=f("class").extend,p=f("slotting"),I=f("painters");var Z=function(aa,ab){this.document=aa;this.default_font=ab!==undefined?ab:"9px Monaco, Lucida Console, monospace";this.dummy_canvas=this.new_canvas();this.dummy_context=this.dummy_canvas.getContext("2d");this.dummy_context.font=this.default_font;this.char_width_px=this.dummy_context.measureText("A").width;this.patterns={};this.load_pattern("right_strand","/visualization/strand_right.png");this.load_pattern("left_strand","/visualization/strand_left.png");this.load_pattern("right_strand_inv","/visualization/strand_right_inv.png");this.load_pattern("left_strand_inv","/visualization/strand_left_inv.png")};n(Z.prototype,{load_pattern:function(aa,ae){var ab=this.patterns,ac=this.dummy_context,ad=new Image();ad.src=image_path+ae;ad.onload=function(){ab[aa]=ac.createPattern(ad,"repeat")}},get_pattern:function(aa){return this.patterns[aa]},new_canvas:function(){var aa=this.document.createElement("canvas");if(window.G_vmlCanvasManager){G_vmlCanvasManager.initElement(aa)}aa.manager=this;return aa}});var C=function(aa,ab){aa.bind("drag",{handle:ab,relative:true},function(af,ag){var ae=$(this).parent();var ad=ae.children();var ac;for(ac=0;ac<ad.length;ac++){if(ag.offsetY<$(ad.get(ac)).position().top){break}}if(ac===ad.length){if(this!==ad.get(ac-1)){ae.append(this)}}else{if(this!==ad.get(ac)){$(this).insertBefore(ad.get(ac))}}}).bind("dragstart",function(){$(this).css({"border-top":"1px solid blue","border-bottom":"1px solid blue"})}).bind("dragend",function(){$(this).css("border","0px")})};T.sortable=C;var D=9,A=18,O=D+2,w=100,F=12000,M=200,z=5,s=10,H=5000,t=100,m="There was an error in indexing this dataset. ",G="A converter for this dataset is not installed. Please check your datatypes_conf.xml file.",B="No data for this chrom/contig.",q="Currently indexing... please wait",v="Tool cannot be rerun: ",a="Loading data...",U="Ready for display",d=10,r=5,y=5;function u(aa){return Math.round(aa*1000)/1000}var c=function(aa){this.num_elements=aa;this.clear()};n(c.prototype,{get:function(ab){var aa=this.key_ary.indexOf(ab);if(aa!==-1){if(this.obj_cache[ab].stale){this.key_ary.splice(aa,1);delete this.obj_cache[ab]}else{this.move_key_to_end(ab,aa)}}return this.obj_cache[ab]},set:function(ab,ac){if(!this.obj_cache[ab]){if(this.key_ary.length>=this.num_elements){var aa=this.key_ary.shift();delete this.obj_cache[aa]}this.key_ary.push(ab)}this.obj_cache[ab]=ac;return ac},move_key_to_end:function(ab,aa){this.key_ary.splice(aa,1);this.key_ary.push(ab)},clear:function(){this.obj_cache={};this.key_ary=[]},size:function(){return this.key_ary.length}});var N=function(ab,aa,ac){c.call(this,ab);this.track=aa;this.subset=(ac!==undefined?ac:true)};n(N.prototype,c.prototype,{load_data:function(ai,aj,ae,ah,ab,ag){var ad={chrom:ai,low:aj,high:ae,mode:ah,resolution:ab,dataset_id:this.track.dataset_id,hda_ldda:this.track.hda_ldda};$.extend(ad,ag);if(this.track.filters_manager){var ak=[];var aa=this.track.filters_manager.filters;for(var af=0;af<aa.length;af++){ak[ak.length]=aa[af].name}ad.filter_cols=JSON.stringify(ak)}var ac=this;return $.getJSON(this.track.data_url,ad,function(al){ac.set_data(aj,ae,ah,al)})},get_data:function(ac,aa,af,ag,ab,ae){var ad=this.get(this.gen_key(aa,af,ag));if(ad){return ad}ad=this.load_data(ac,aa,af,ag,ab,ae);this.set_data(aa,af,ag,ad);return ad},set_data:function(ab,ac,ad,aa){return this.set(this.gen_key(ab,ac,ad),aa)},gen_key:function(aa,ac,ad){var ab=aa+"_"+ac+"_"+ad;return ab},split_key:function(aa){return aa.split("_")}});var E=function(ab,aa,ac){N.call(this,ab,aa,ac)};n(E.prototype,N.prototype,c.prototype,{load_data:function(ac,aa,ae,af,ab,ad){if(ab>1){return}return N.prototype.load_data.call(this,ac,aa,ae,af,ab,ad)}});var Y=function(aa,ad,ac,ab,ae){this.container=aa;this.chrom=null;this.vis_id=ac;this.dbkey=ab;this.title=ad;this.tracks=[];this.label_tracks=[];this.max_low=0;this.max_high=0;this.num_tracks=0;this.track_id_counter=0;this.zoom_factor=3;this.min_separation=30;this.has_changes=false;this.init(ae);this.canvas_manager=new Z(aa.get(0).ownerDocument);this.reset()};n(Y.prototype,{init:function(ad){var ac=this.container,aa=this;this.top_container=$("<div/>").addClass("top-container").appendTo(ac);this.content_div=$("<div/>").addClass("content").css("position","relative").appendTo(ac);this.bottom_container=$("<div/>").addClass("bottom-container").appendTo(ac);this.top_labeltrack=$("<div/>").addClass("top-labeltrack").appendTo(this.top_container);this.viewport_container=$("<div/>").addClass("viewport-container").addClass("viewport-container").appendTo(this.content_div);this.intro_div=$("<div/>").addClass("intro").text("Select a chrom from the dropdown below").hide();this.nav_labeltrack=$("<div/>").addClass("nav-labeltrack").appendTo(this.bottom_container);this.nav_container=$("<div/>").addClass("nav-container").prependTo(this.top_container);this.nav=$("<div/>").addClass("nav").appendTo(this.nav_container);this.overview=$("<div/>").addClass("overview").appendTo(this.bottom_container);this.overview_viewport=$("<div/>").addClass("overview-viewport").appendTo(this.overview);this.overview_close=$("<a href='javascript:void(0);'>Close Overview</a>").addClass("overview-close").hide().appendTo(this.overview_viewport);this.overview_highlight=$("<div/>").addClass("overview-highlight").hide().appendTo(this.overview_viewport);this.overview_box_background=$("<div/>").addClass("overview-boxback").appendTo(this.overview_viewport);this.overview_box=$("<div/>").addClass("overview-box").appendTo(this.overview_viewport);this.default_overview_height=this.overview_box.height();this.nav_controls=$("<div/>").addClass("nav-controls").appendTo(this.nav);this.chrom_select=$("<select/>").attr({name:"chrom"}).css("width","15em").addClass("no-autocomplete").append("<option value=''>Loading</option>").appendTo(this.nav_controls);var ab=function(ae){if(ae.type==="focusout"||(ae.keyCode||ae.which)===13||(ae.keyCode||ae.which)===27){if((ae.keyCode||ae.which)!==27){aa.go_to($(this).val())}$(this).hide();$(this).val("");aa.location_span.show();aa.chrom_select.show()}};this.nav_input=$("<input/>").addClass("nav-input").hide().bind("keyup focusout",ab).appendTo(this.nav_controls);this.location_span=$("<span/>").addClass("location").appendTo(this.nav_controls);this.location_span.bind("click",function(){aa.location_span.hide();aa.chrom_select.hide();aa.nav_input.val(aa.chrom+":"+aa.low+"-"+aa.high);aa.nav_input.css("display","inline-block");aa.nav_input.select();aa.nav_input.focus()});if(this.vis_id!==undefined){this.hidden_input=$("<input/>").attr("type","hidden").val(this.vis_id).appendTo(this.nav_controls)}this.zo_link=$("<a id='zoom-out' />").click(function(){aa.zoom_out();aa.redraw()}).appendTo(this.nav_controls);this.zi_link=$("<a id='zoom-in' />").click(function(){aa.zoom_in();aa.redraw()}).appendTo(this.nav_controls);this.load_chroms({low:0},ad);this.chrom_select.bind("change",function(){aa.change_chrom(aa.chrom_select.val())});this.intro_div.show();this.content_div.bind("click",function(ae){$(this).find("input").trigger("blur")});this.content_div.bind("dblclick",function(ae){aa.zoom_in(ae.pageX,this.viewport_container)});this.overview_box.bind("dragstart",function(ae,af){this.current_x=af.offsetX}).bind("drag",function(ae,ag){var ah=ag.offsetX-this.current_x;this.current_x=ag.offsetX;var af=Math.round(ah/aa.viewport_container.width()*(aa.max_high-aa.max_low));aa.move_delta(-af)});this.overview_close.bind("click",function(){for(var af=0,ae=aa.tracks.length;af<ae;af++){aa.tracks[af].is_overview=false}$(this).siblings().filter("canvas").remove();$(this).parent().css("height",aa.overview_box.height());aa.overview_highlight.hide();$(this).hide()});this.viewport_container.bind("draginit",function(ae,af){if(ae.clientX>aa.viewport_container.width()-16){return false}}).bind("dragstart",function(ae,af){af.original_low=aa.low;af.current_height=ae.clientY;af.current_x=af.offsetX}).bind("drag",function(ag,ai){var ae=$(this);var aj=ai.offsetX-ai.current_x;var af=ae.scrollTop()-(ag.clientY-ai.current_height);ae.scrollTop(af);ai.current_height=ag.clientY;ai.current_x=ai.offsetX;var ah=Math.round(aj/aa.viewport_container.width()*(aa.high-aa.low));aa.move_delta(ah)}).bind("mousewheel",function(ag,ai,af,ae){if(af){var ah=Math.round(-af/aa.viewport_container.width()*(aa.high-aa.low));aa.move_delta(ah)}});this.top_labeltrack.bind("dragstart",function(ae,af){return $("<div />").css({height:aa.content_div.height()+aa.top_labeltrack.height()+aa.nav_labeltrack.height()+1,top:"0px",position:"absolute","background-color":"#ccf",opacity:0.5,"z-index":1000}).appendTo($(this))}).bind("drag",function(ai,aj){$(aj.proxy).css({left:Math.min(ai.pageX,aj.startX),width:Math.abs(ai.pageX-aj.startX)});var af=Math.min(ai.pageX,aj.startX)-aa.container.offset().left,ae=Math.max(ai.pageX,aj.startX)-aa.container.offset().left,ah=(aa.high-aa.low),ag=aa.viewport_container.width();aa.update_location(Math.round(af/ag*ah)+aa.low,Math.round(ae/ag*ah)+aa.low)}).bind("dragend",function(aj,ak){var af=Math.min(aj.pageX,ak.startX),ae=Math.max(aj.pageX,ak.startX),ah=(aa.high-aa.low),ag=aa.viewport_container.width(),ai=aa.low;aa.low=Math.round(af/ag*ah)+ai;aa.high=Math.round(ae/ag*ah)+ai;$(ak.proxy).remove();aa.redraw()});this.add_label_track(new X(this,this.top_labeltrack));this.add_label_track(new X(this,this.nav_labeltrack));$(window).bind("resize",function(){aa.resize_window()});$(document).bind("redraw",function(){aa.redraw()});this.reset();$(window).trigger("resize")},update_location:function(aa,ab){this.location_span.text(commatize(aa)+" - "+commatize(ab));this.nav_input.val(this.chrom+":"+commatize(aa)+"-"+commatize(ab))},load_chroms:function(ab,ac){ab.num=t;$.extend(ab,(this.vis_id!==undefined?{vis_id:this.vis_id}:{dbkey:this.dbkey}));var aa=this;$.ajax({url:chrom_url,data:ab,dataType:"json",success:function(ae){if(ae.chrom_info.length===0){alert("Invalid chromosome: "+ab.chrom);return}if(ae.reference){aa.add_label_track(new x(aa))}aa.chrom_data=ae.chrom_info;var ah='<option value="">Select Chrom/Contig</option>';for(var ag=0,ad=aa.chrom_data.length;ag<ad;ag++){var af=aa.chrom_data[ag].chrom;ah+='<option value="'+af+'">'+af+"</option>"}if(ae.prev_chroms){ah+='<option value="previous">Previous '+t+"</option>"}if(ae.next_chroms){ah+='<option value="next">Next '+t+"</option>"}aa.chrom_select.html(ah);if(ac){ac()}aa.chrom_start_index=ae.start_index},error:function(){alert("Could not load chroms for this dbkey:",aa.dbkey)}})},change_chrom:function(ae,ab,ag){if(!ae||ae==="None"){return}var ad=this;if(ae==="previous"){ad.load_chroms({low:this.chrom_start_index-t});return}if(ae==="next"){ad.load_chroms({low:this.chrom_start_index+t});return}var af=$.grep(ad.chrom_data,function(ai,aj){return ai.chrom===ae})[0];if(af===undefined){ad.load_chroms({chrom:ae},function(){ad.change_chrom(ae,ab,ag)});return}else{if(ae!==ad.chrom){ad.chrom=ae;if(!ad.chrom){ad.intro_div.show()}else{ad.intro_div.hide()}ad.chrom_select.val(ad.chrom);ad.max_high=af.len-1;ad.reset();ad.redraw(true);for(var ah=0,aa=ad.tracks.length;ah<aa;ah++){var ac=ad.tracks[ah];if(ac.init){ac.init()}}}if(ab!==undefined&&ag!==undefined){ad.low=Math.max(ab,0);ad.high=Math.min(ag,ad.max_high)}ad.reset_overview();ad.redraw()}},go_to:function(ae){var ai=this,aa,ad,ab=ae.split(":"),ag=ab[0],ah=ab[1];if(ah!==undefined){try{var af=ah.split("-");aa=parseInt(af[0].replace(/,/g,""),10);ad=parseInt(af[1].replace(/,/g,""),10)}catch(ac){return false}}ai.change_chrom(ag,aa,ad)},move_fraction:function(ac){var aa=this;var ab=aa.high-aa.low;this.move_delta(ac*ab)},move_delta:function(ac){var aa=this;var ab=aa.high-aa.low;if(aa.low-ac<aa.max_low){aa.low=aa.max_low;aa.high=aa.max_low+ab}else{if(aa.high-ac>aa.max_high){aa.high=aa.max_high;aa.low=aa.max_high-ab}else{aa.high-=ac;aa.low-=ac}}aa.redraw()},add_track:function(aa){aa.view=this;aa.track_id=this.track_id_counter;this.tracks.push(aa);if(aa.init){aa.init()}aa.container_div.attr("id","track_"+aa.track_id);C(aa.container_div,".draghandle");this.track_id_counter+=1;this.num_tracks+=1},add_label_track:function(aa){aa.view=this;this.label_tracks.push(aa)},remove_track:function(aa){this.has_changes=true;aa.container_div.fadeOut("slow",function(){$(this).remove()});delete this.tracks[this.tracks.indexOf(aa)];this.num_tracks-=1},reset:function(){this.low=this.max_low;this.high=this.max_high;this.viewport_container.find(".yaxislabel").remove()},redraw:function(ah){var ag=this.high-this.low,af=this.low,ab=this.high;if(af<this.max_low){af=this.max_low}if(ab>this.max_high){ab=this.max_high}if(this.high!==0&&ag<this.min_separation){ab=af+this.min_separation}this.low=Math.floor(af);this.high=Math.ceil(ab);this.resolution=Math.pow(z,Math.ceil(Math.log((this.high-this.low)/M)/Math.log(z)));this.zoom_res=Math.pow(s,Math.max(0,Math.ceil(Math.log(this.resolution,s)/Math.log(s))));var aa=(this.low/(this.max_high-this.max_low)*this.overview_viewport.width())||0;var ae=((this.high-this.low)/(this.max_high-this.max_low)*this.overview_viewport.width())||0;var ai=13;this.overview_box.css({left:aa,width:Math.max(ai,ae)}).show();if(ae<ai){this.overview_box.css("left",aa-(ai-ae)/2)}if(this.overview_highlight){this.overview_highlight.css({left:aa,width:ae})}this.update_location(this.low,this.high);if(!ah){for(var ac=0,ad=this.tracks.length;ac<ad;ac++){if(this.tracks[ac]&&this.tracks[ac].enabled){this.tracks[ac].draw()}}for(ac=0,ad=this.label_tracks.length;ac<ad;ac++){this.label_tracks[ac].draw()}}},zoom_in:function(ab,ac){if(this.max_high===0||this.high-this.low<this.min_separation){return}var ad=this.high-this.low,ae=ad/2+this.low,aa=(ad/this.zoom_factor)/2;if(ab){ae=ab/this.viewport_container.width()*(this.high-this.low)+this.low}this.low=Math.round(ae-aa);this.high=Math.round(ae+aa);this.redraw()},zoom_out:function(){if(this.max_high===0){return}var ab=this.high-this.low,ac=ab/2+this.low,aa=(ab*this.zoom_factor)/2;this.low=Math.round(ac-aa);this.high=Math.round(ac+aa);this.redraw()},resize_window:function(){this.viewport_container.height(this.container.height()-this.top_container.height()-this.bottom_container.height());this.nav_container.width(this.container.width());this.redraw()},reset_overview:function(){this.overview_viewport.find("canvas").remove();this.overview_viewport.height(this.default_overview_height);this.overview_box.height(this.default_overview_height);this.overview_close.hide();this.overview_highlight.hide()}});var o=function(ac,ag){this.track=ac;this.name=ag.name;this.params=[];var an=ag.params;for(var ad=0;ad<an.length;ad++){var ai=an[ad],ab=ai.name,am=ai.label,ae=unescape(ai.html),ao=ai.value,ak=ai.type;if(ak==="number"){this.params[this.params.length]=new g(ab,am,ae,ao,ai.min,ai.max)}else{if(ak=="select"){this.params[this.params.length]=new K(ab,am,ae,ao)}else{console.log("WARNING: unrecognized tool parameter type:",ab,ak)}}}this.parent_div=$("<div/>").addClass("dynamic-tool").hide();this.parent_div.bind("drag",function(aq){aq.stopPropagation()}).bind("click",function(aq){aq.stopPropagation()}).bind("dblclick",function(aq){aq.stopPropagation()});var al=$("<div class='tool-name'>").appendTo(this.parent_div).text(this.name);var aj=this.params;var ah=this;$.each(this.params,function(ar,av){var au=$("<div>").addClass("param-row").appendTo(ah.parent_div);var aq=$("<div>").addClass("param-label").text(av.label).appendTo(au);var at=$("<div/>").addClass("slider").html(av.html).appendTo(au);at.find(":input").val(av.value);$("<div style='clear: both;'/>").appendTo(au)});this.parent_div.find("input").click(function(){$(this).select()});var ap=$("<div>").addClass("param-row").appendTo(this.parent_div);var af=$("<input type='submit'>").attr("value","Run on complete dataset").appendTo(ap);var aa=$("<input type='submit'>").attr("value","Run on visible region").css("margin-left","3em").appendTo(ap);var ah=this;aa.click(function(){ah.run_on_region()});af.click(function(){ah.run_on_dataset()})};n(o.prototype,{get_param_values_dict:function(){var aa={};this.parent_div.find(":input").each(function(){var ab=$(this).attr("name"),ac=$(this).val();aa[ab]=JSON.stringify(ac)});return aa},get_param_values:function(){var ab=[];var aa={};this.parent_div.find(":input").each(function(){var ac=$(this).attr("name"),ad=$(this).val();if(ac){ab[ab.length]=ad}});return ab},run_on_dataset:function(){var aa=this;aa.run({dataset_id:this.track.original_dataset_id,tool_id:aa.name},function(ab){show_modal(aa.name+" is Running",aa.name+" is running on the complete dataset. Tool outputs are in dataset's history.",{Close:hide_modal})})},run_on_region:function(){var aa={dataset_id:this.track.original_dataset_id,chrom:this.track.view.chrom,low:this.track.view.low,high:this.track.view.high,tool_id:this.name},ac=this.track,ab=aa.tool_id+ac.tool_region_and_parameters_str(aa.chrom,aa.low,aa.high),ad;if(ac instanceof e){ad=new Q(ab,view,ac.hda_ldda,undefined,{},{},ac);ad.change_mode(ac.mode)}this.track.add_track(ad);ad.content_div.text("Starting job.");this.run(aa,function(ae){ad.dataset_id=ae.dataset_id;ad.content_div.text("Running job.");ad.init()})},run:function(ab,ac){$.extend(ab,this.get_param_values_dict());var aa=function(){$.getJSON(rerun_tool_url,ab,function(ad){if(ad==="no converter"){new_track.container_div.addClass("error");new_track.content_div.text(G)}else{if(ad.error){new_track.container_div.addClass("error");new_track.content_div.text(v+ad.message)}else{if(ad==="pending"){new_track.container_div.addClass("pending");new_track.content_div.text("Converting input data so that it can be easily reused.");setTimeout(aa,2000)}else{ac(ad)}}}})};aa()}});var K=function(ab,aa,ac,ad){this.name=ab;this.label=aa;this.html=ac;this.value=ad};var g=function(ac,ab,ae,af,ad,aa){K.call(this,ac,ab,ae,af);this.min=ad;this.max=aa};var h=function(ab,aa,ac,ad){this.name=ab;this.index=aa;this.tool_id=ac;this.tool_exp_name=ad};var R=function(ab,aa,ac,ad){h.call(this,ab,aa,ac,ad);this.low=-Number.MAX_VALUE;this.high=Number.MAX_VALUE;this.min=Number.MAX_VALUE;this.max=-Number.MAX_VALUE;this.slider=null;this.slider_label=null};n(R.prototype,{applies_to:function(aa){if(aa.length>this.index){return true}return false},keep:function(aa){if(!this.applies_to(aa)){return true}var ab=parseInt(aa[this.index]);return(isNaN(ab)||(ab>=this.low&&ab<=this.high))},update_attrs:function(ab){var aa=false;if(!this.applies_to(ab)){return aa}if(ab[this.index]<this.min){this.min=Math.floor(ab[this.index]);aa=true}if(ab[this.index]>this.max){this.max=Math.ceil(ab[this.index]);aa=true}return aa},update_ui_elt:function(){var ac=function(af,ad){var ae=ad-af;return(ae<=2?0.01:1)};var ab=this.slider.slider("option","min"),aa=this.slider.slider("option","max");if(this.min<ab||this.max>aa){this.slider.slider("option","min",this.min);this.slider.slider("option","max",this.max);this.slider.slider("option","step",ac(this.min,this.max));this.slider.slider("option","values",[this.min,this.max])}}});var W=function(ac,al){this.track=ac;this.filters=[];for(var ag=0;ag<al.length;ag++){var aa=al[ag],ab=aa.name,ak=aa.type,ai=aa.index,an=aa.tool_id,ad=aa.tool_exp_name;if(ak==="int"||ak==="float"){this.filters[ag]=new R(ab,ai,an,ad)}else{console.log("ERROR: unsupported filter: ",ab,ak)}}var aj=function(ao,ap,aq){ao.click(function(){var ar=ap.text();max=parseFloat(aq.slider("option","max")),input_size=(max<=1?4:max<=1000000?max.toString().length:6),multi_value=false;if(aq.slider("option","values")){input_size=2*input_size+1;multi_value=true}ap.text("");$("<input type='text'/>").attr("size",input_size).attr("maxlength",input_size).attr("value",ar).appendTo(ap).focus().select().click(function(at){at.stopPropagation()}).blur(function(){$(this).remove();ap.text(ar)}).keyup(function(ax){if(ax.keyCode===27){$(this).trigger("blur")}else{if(ax.keyCode===13){var av=aq.slider("option","min"),at=aq.slider("option","max"),aw=function(ay){return(isNaN(ay)||ay>at||ay<av)},au=$(this).val();if(!multi_value){au=parseFloat(au);if(aw(au)){alert("Parameter value must be in the range ["+av+"-"+at+"]");return $(this)}}else{au=au.split("-");au=[parseFloat(au[0]),parseFloat(au[1])];if(aw(au[0])||aw(au[1])){alert("Parameter value must be in the range ["+av+"-"+at+"]");return $(this)}}aq.slider((multi_value?"values":"value"),au)}}})})};this.parent_div=$("<div/>").addClass("filters").hide();this.parent_div.bind("drag",function(ao){ao.stopPropagation()}).bind("click",function(ao){ao.stopPropagation()}).bind("dblclick",function(ao){ao.stopPropagation()}).bind("keydown",function(ao){ao.stopPropagation()});var ae=this;$.each(this.filters,function(av,ap){var ar=$("<div/>").addClass("slider-row").appendTo(ae.parent_div);var ao=$("<div/>").addClass("slider-label").appendTo(ar);var ax=$("<span/>").addClass("slider-name").text(ap.name+" ").appendTo(ao);var aq=$("<span/>");var at=$("<span/>").addClass("slider-value").appendTo(ao).append("[").append(aq).append("]");var aw=$("<div/>").addClass("slider").appendTo(ar);ap.control_element=$("<div/>").attr("id",ap.name+"-filter-control").appendTo(aw);var au=[0,0];ap.control_element.slider({range:true,min:Number.MAX_VALUE,max:-Number.MIN_VALUE,values:[0,0],slide:function(ay,az){au=az.values;aq.text(az.values[0]+"-"+az.values[1]);setTimeout(function(){if(az.values[0]==au[0]&&az.values[1]==au[1]){var aA=az.values;aq.text(aA[0]+"-"+aA[1]);ap.low=aA[0];ap.high=aA[1];ae.track.draw(true,true)}},50)},change:function(ay,az){ap.control_element.slider("option","slide").call(ap.control_element,ay,az)}});ap.slider=ap.control_element;ap.slider_label=aq;aj(at,aq,ap.control_element);$("<div style='clear: both;'/>").appendTo(ar)});if(this.filters.length!=0){var am=$("<div>").addClass("param-row").appendTo(this.parent_div);var ah=$("<input type='submit'>").attr("value","Run on complete dataset").appendTo(am);var af=this;ah.click(function(){af.run_on_dataset()})}};n(W.prototype,{reset_filters:function(){for(var aa=0;aa<this.filters.length;aa++){filter=this.filters[aa];filter.slider.slider("option","values",[filter.min,filter.max])}},run_on_dataset:function(){var ai=function(am,ak,al){if(!(ak in am)){am[ak]=al}return am[ak]};var ac={},aa,ab,ad;for(var ae=0;ae<this.filters.length;ae++){aa=this.filters[ae];if(aa.tool_id){if(aa.min!=aa.low){ab=ai(ac,aa.tool_id,[]);ab[ab.length]=aa.tool_exp_name+" >= "+aa.low}if(aa.max!=aa.high){ab=ai(ac,aa.tool_id,[]);ab[ab.length]=aa.tool_exp_name+" <= "+aa.high}}}var ag=[];for(var aj in ac){ag[ag.length]=[aj,ac[aj]]}var ah=ag.length;(function af(aq,an){var al=an[0],am=al[0],ap=al[1],ao="("+ap.join(") and (")+")",ak={cond:ao,input:aq,target_dataset_id:aq,tool_id:am},an=an.slice(1);$.getJSON(run_tool_url,ak,function(ar){if(ar.error){show_modal("Filter Dataset","Error running tool "+am,{Close:hide_modal})}else{if(an.length===0){show_modal("Filtering Dataset","Filter(s) are running on the complete dataset. Outputs are in dataset's history.",{Close:hide_modal})}else{af(ar.dataset_id,an)}}})})(this.track.dataset_id,ag)}});var V=function(aa){this.track=aa.track;this.params=aa.params;this.values={};if(aa.saved_values){this.restore_values(aa.saved_values)}this.onchange=aa.onchange};n(V.prototype,{restore_values:function(aa){var ab=this;$.each(this.params,function(ac,ad){if(aa[ad.key]!==undefined){ab.values[ad.key]=aa[ad.key]}else{ab.values[ad.key]=ad.default_value}})},build_form:function(){var ab=this;var aa=$("<div />");$.each(this.params,function(af,ad){if(!ad.hidden){var ac="param_"+af;var ak=$("<div class='form-row' />").appendTo(aa);ak.append($("<label />").attr("for",ac).text(ad.label+":"));if(ad.type==="bool"){ak.append($('<input type="checkbox" />').attr("id",ac).attr("name",ac).attr("checked",ab.values[ad.key]))}else{if(ad.type==="color"){var ah=ab.values[ad.key];var ag=$("<input />").attr("id",ac).attr("name",ac).val(ah);var ai=$("<div class='tipsy tipsy-north' style='position: absolute;' />").hide();var ae=$("<div style='background-color: black; padding: 10px;'></div>").appendTo(ai);var aj=$("<div/>").appendTo(ae).farbtastic({width:100,height:100,callback:ag,color:ah});$("<div />").append(ag).append(ai).appendTo(ak).bind("click",function(al){ai.css({left:$(this).position().left+($(ag).width()/2)-60,top:$(this).position().top+$(this.height)}).show();$(document).bind("click.color-picker",function(){ai.hide();$(document).unbind("click.color-picker")});al.stopPropagation()})}else{ak.append($("<input />").attr("id",ac).attr("name",ac).val(ab.values[ad.key]))}}}});return aa},update_from_form:function(aa){var ac=this;var ab=false;$.each(this.params,function(ad,af){if(!af.hidden){var ag="param_"+ad;var ae=aa.find("#"+ag).val();if(af.type==="float"){ae=parseFloat(ae)}else{if(af.type==="int"){ae=parseInt(ae)}else{if(af.type==="bool"){ae=aa.find("#"+ag).is(":checked")}}}if(ae!==ac.values[af.key]){ac.values[af.key]=ae;ab=true}}});if(ab){this.onchange()}}});var b=function(ac,ab,aa){this.index=ac;this.resolution=ab;this.canvas=$("<div class='track-tile'/>").append(aa);this.stale=false};var l=function(ac,ab,aa,ad){b.call(this,ac,ab,aa);this.max_val=ad};var L=function(ac,ab,aa,ad){b.call(this,ac,ab,aa);this.message=ad};var j=function(ab,aa,ae,ac,ad){this.name=ab;this.view=aa;this.parent_element=ae;this.data_url=(ac?ac:default_data_url);this.data_url_extra_params={};this.data_query_wait=(ad?ad:H);this.dataset_check_url=converted_datasets_state_url;this.container_div=$("<div />").addClass("track").css("position","relative");if(!this.hidden){this.header_div=$("<div class='track-header' />").appendTo(this.container_div);if(this.view.editor){this.drag_div=$("<div class='draghandle' />").appendTo(this.header_div)}this.name_div=$("<div class='menubutton popup' />").appendTo(this.header_div);this.name_div.text(this.name);this.name_div.attr("id",this.name.replace(/\s+/g,"-").replace(/[^a-zA-Z0-9\-]/g,"").toLowerCase())}this.content_div=$("<div class='track-content'>").appendTo(this.container_div);this.parent_element.append(this.container_div)};n(j.prototype,{get_type:function(){if(this instanceof X){return"LabelTrack"}else{if(this instanceof x){return"ReferenceTrack"}else{if(this instanceof k){return"LineTrack"}else{if(this instanceof S){return"ReadTrack"}else{if(this instanceof Q){return"ToolDataFeatureTrack"}else{if(this instanceof P){return"VcfTrack"}else{if(this instanceof e){return"FeatureTrack"}}}}}}}return""},init:function(){var aa=this;aa.enabled=false;aa.tile_cache.clear();aa.data_cache.clear();aa.initial_canvas=undefined;aa.content_div.css("height","auto");aa.container_div.removeClass("nodata error pending");if(!aa.dataset_id){return}$.getJSON(converted_datasets_state_url,{hda_ldda:aa.hda_ldda,dataset_id:aa.dataset_id,chrom:aa.view.chrom},function(ab){if(!ab||ab==="error"||ab.kind==="error"){aa.container_div.addClass("error");aa.content_div.text(m);if(ab.message){var ad=aa.view.tracks.indexOf(aa);var ac=$(" <a href='javascript:void(0);'></a>").text("View error").bind("click",function(){show_modal("Trackster Error","<pre>"+ab.message+"</pre>",{Close:hide_modal})});aa.content_div.append(ac)}}else{if(ab==="no converter"){aa.container_div.addClass("error");aa.content_div.text(G)}else{if(ab==="no data"||(ab.data!==undefined&&(ab.data===null||ab.data.length===0))){aa.container_div.addClass("nodata");aa.content_div.text(B)}else{if(ab==="pending"){aa.container_div.addClass("pending");aa.content_div.text(q);setTimeout(function(){aa.init()},aa.data_query_wait)}else{if(ab.status==="data"){if(ab.valid_chroms){aa.valid_chroms=ab.valid_chroms;aa.make_name_popup_menu()}aa.content_div.text(U);if(aa.view.chrom){aa.content_div.text("");aa.content_div.css("height",aa.height_px+"px");aa.enabled=true;$.when(aa.predraw_init()).done(function(){aa.container_div.removeClass("nodata error pending");aa.draw()})}}}}}}})},predraw_init:function(){},update_name:function(aa){this.old_name=this.name;this.name=aa;this.name_div.text(this.name)},revert_name:function(){this.name=this.old_name;this.name_div.text(this.name)}});var J=function(ah,af,ai){var ab=this,aj=ab.view;this.filters_manager=(ah!==undefined?new W(this,ah):undefined);this.filters_available=false;this.filters_visible=false;this.tool=(af!==undefined&&obj_length(af)>0?new o(this,af):undefined);this.parent_track=ai;this.child_tracks=[];if(ab.hidden){return}if(this.parent_track){this.header_div.find(".draghandle").removeClass("draghandle").addClass("child-track-icon").addClass("icon-button");this.parent_element.addClass("child-track");this.tool=undefined}ab.child_tracks_container=$("<div/>").addClass("child-tracks-container").hide();ab.container_div.append(ab.child_tracks_container);if(this.filters_manager){this.filters_div=this.filters_manager.parent_div;this.header_div.after(this.filters_div)}if(this.tool){this.dynamic_tool_div=this.tool.parent_div;this.header_div.after(this.dynamic_tool_div)}if(ab.display_modes!==undefined){if(ab.mode_div===undefined){ab.mode_div=$("<div class='right-float menubutton popup' />").appendTo(ab.header_div);var ac=(ab.track_config&&ab.track_config.values.mode?ab.track_config.values.mode:ab.display_modes[0]);ab.mode=ac;ab.mode_div.text(ac);var aa={};for(var ad=0,ag=ab.display_modes.length;ad<ag;ad++){var ae=ab.display_modes[ad];aa[ae]=function(ak){return function(){ab.change_mode(ak)}}(ae)}make_popupmenu(ab.mode_div,aa)}else{ab.mode_div.hide()}}this.make_name_popup_menu()};n(J.prototype,j.prototype,{change_mode:function(ab){var aa=this;aa.mode_div.text(ab);aa.mode=ab;aa.track_config.values.mode=ab;aa.tile_cache.clear();aa.draw()},make_name_popup_menu:function(){var ab=this;var aa={};aa["Edit configuration"]=function(){var ah=function(){hide_modal();$(window).unbind("keypress.check_enter_esc")},af=function(){ab.track_config.update_from_form($(".dialog-box"));hide_modal();$(window).unbind("keypress.check_enter_esc")},ag=function(ai){if((ai.keyCode||ai.which)===27){ah()}else{if((ai.keyCode||ai.which)===13){af()}}};$(window).bind("keypress.check_enter_esc",ag);show_modal("Configure Track",ab.track_config.build_form(),{Cancel:ah,OK:af})};if(ab.filters_available>0){var ae=(ab.filters_div.is(":visible")?"Hide filters":"Show filters");aa[ae]=function(){ab.filters_visible=(ab.filters_div.is(":visible"));if(ab.filters_visible){ab.filters_manager.reset_filters()}ab.filters_div.toggle();ab.make_name_popup_menu()}}if(ab.tool){var ae=(ab.dynamic_tool_div.is(":visible")?"Hide tool":"Show tool");aa[ae]=function(){if(!ab.dynamic_tool_div.is(":visible")){ab.update_name(ab.name+ab.tool_region_and_parameters_str())}else{menu_option_text="Show dynamic tool";ab.revert_name()}ab.dynamic_tool_div.toggle();ab.make_name_popup_menu()}}if(ab.valid_chroms){aa["List chrom/contigs with data"]=function(){show_modal("Chrom/contigs with data","<p>"+ab.valid_chroms.join("<br/>")+"</p>",{Close:function(){hide_modal()}})}}var ac=view;var ad=function(){$("#no-tracks").show()};if(this.parent_track){ac=this.parent_track;ad=function(){}}aa.Remove=function(){ac.remove_track(ab);if(ac.num_tracks===0){ad()}};make_popupmenu(ab.name_div,aa)},draw:function(aa,ac){if(!this.dataset_id){return}var au=this.view.low,ag=this.view.high,ai=ag-au,ak=this.view.container.width(),ae=ak/ai,al=this.view.resolution,ad=$("<div style='position: relative;'></div>"),am=function(aw,ax,av){return aw+"_"+ax+"_"+av};if(!ac){this.content_div.children().remove()}this.content_div.append(ad);this.max_height=0;var ao=Math.floor(au/al/M);var af=[];var ap=0;while((ao*M*al)<ag){var at=am(ak,ae,ao);var ah=this.tile_cache.get(at);var aq=ao*M*this.view.resolution;var ab=aq+M*this.view.resolution;if(!aa&&ah){af[af.length]=ah;this.show_tile(ah,ad,aq,ab,ae)}else{this.delayed_draw(aa,at,ao,al,ad,ae,af)}ao+=1;ap++}var aj=this;var ar=setInterval(function(){if(af.length===ap){clearInterval(ar);if(ac){var aA=aj.content_div.children();var aB=false;for(var az=aA.length-1,aF=0;az>=aF;az--){var ay=$(aA[az]);if(aB){ay.remove()}else{if(ay.children().length!==0){aB=true}}}}if(aj instanceof e&&aj.mode=="Histogram"){var aE=-1;for(var az=0;az<af.length;az++){var aH=af[az].max_val;if(aH>aE){aE=aH}}for(var az=0;az<af.length;az++){if(af[az].max_val!==aE){var aG=af[az];aG.canvas.remove();aj.delayed_draw(true,am(ak,ae,aG.index),aG.index,aG.resolution,ad,ae,[],{max:aE})}}}if(aj.filters_manager){var ax=aj.filters_manager.filters;for(var aD=0;aD<ax.length;aD++){ax[aD].update_ui_elt()}var aC=false;if(aj.example_feature){for(var aD=0;aD<ax.length;aD++){if(ax[aD].applies_to(aj.example_feature)){aC=true;break}}}if(aj.filters_available!==aC){aj.filters_available=aC;if(!aj.filters_available){aj.filters_div.hide()}aj.make_name_popup_menu()}}var av=false;for(var aw=0;aw<af.length;aw++){if(af[aw].message){av=true;break}}if(av){for(var aw=0;aw<af.length;aw++){aG=af[aw];if(!aG.message){aG.canvas.css("padding-top",A)}}}}},50);for(var an=0;an<this.child_tracks.length;an++){this.child_tracks[an].draw(aa,ac)}},delayed_draw:function(ab,ai,ac,ae,aj,am,ak,af){var ad=this,ag=ac*M*ae,al=ag+M*ae;var ah=function(av,an,ap,ao,at,au,aq){var ar=ad.draw_tile(an,ap,ao,au,aq);ad.tile_cache.set(ai,ar);if(ar===undefined){return}ad.show_tile(ar,at,ag,al,au);ak[ak.length]=ar};var aa=setTimeout(function(){if(ag<=ad.view.high&&al>=ad.view.low){var an=(ab?undefined:ad.tile_cache.get(ai));if(an){ad.show_tile(an,aj,ag,al,am);ak[ak.length]=an}else{$.when(ad.data_cache.get_data(view.chrom,ag,al,ad.mode,ae,ad.data_url_extra_params)).then(function(ao){n(ao,af);if(view.reference_track&&am>view.canvas_manager.char_width_px){$.when(view.reference_track.data_cache.get_data(view.chrom,ag,al,ad.mode,ae,view.reference_track.data_url_extra_params)).then(function(ap){ah(aa,ao,ae,ac,aj,am,ap)})}else{ah(aa,ao,ae,ac,aj,am)}})}}},50)},show_tile:function(ah,aj,ae,ak,am){var ab=this,aa=ah.canvas,af=aa;if(ah.message){var al=$("<div/>"),ai=$("<div/>").addClass("tile-message").text(ah.message).css({height:A-1,width:ah.canvas.width}).appendTo(al),ag=$("<button/>").text("Show more").css("margin-left","0.5em").appendTo(ai);al.append(aa);af=al;ag.click(function(){var an=ab.data_cache.get_data(ab.view.chrom,ae,ak,ab.mode,ah.resolution);an.stale=true;ah.stale=true;ab.data_cache.get_data(ab.view.chrom,ae,ak,ab.mode,ah.resolution,{max_vals:an.data.length*2});ab.draw()}).dblclick(function(an){an.stopPropagation()})}var ad=this.view.high-this.view.low,ac=(ae-this.view.low)*am;if(this.left_offset){ac-=this.left_offset}af.css({position:"absolute",top:0,left:ac,height:""});aj.append(af);ab.max_height=Math.max(ab.max_height,af.height());ab.content_div.css("height",ab.max_height+"px");aj.children().css("height",ab.max_height+"px")},set_overview:function(){var aa=this.view;if(this.initial_canvas&&this.is_overview){aa.overview_close.show();aa.overview_viewport.append(this.initial_canvas);aa.overview_highlight.show().height(this.initial_canvas.height());aa.overview_viewport.height(this.initial_canvas.height()+aa.overview_box.height())}$(window).trigger("resize")},tool_region_and_parameters_str:function(ac,aa,ad){var ab=this,ae=(ac!==undefined&&aa!==undefined&&ad!==undefined?ac+":"+aa+"-"+ad:"all");return" - region=["+ae+"], parameters=["+ab.tool.get_param_values().join(", ")+"]"},add_track:function(aa){aa.track_id=this.track_id+"_"+this.child_tracks.length;aa.container_div.attr("id","track_"+aa.track_id);this.child_tracks_container.append(aa.container_div);C(aa.container_div,".child-track-icon");if(!$(this.child_tracks_container).is(":visible")){this.child_tracks_container.show()}this.child_tracks.push(aa);this.view.has_changes=true},remove_track:function(aa){aa.container_div.fadeOut("slow",function(){$(this).remove()})}});var X=function(aa,ab){this.hidden=true;j.call(this,null,aa,ab);this.container_div.addClass("label-track")};n(X.prototype,j.prototype,{draw:function(){var ac=this.view,ad=ac.high-ac.low,ag=Math.floor(Math.pow(10,Math.floor(Math.log(ad)/Math.log(10)))),aa=Math.floor(ac.low/ag)*ag,ae=this.view.container.width(),ab=$("<div style='position: relative; height: 1.3em;'></div>");while(aa<ac.high){var af=(aa-ac.low)/ad*ae;ab.append($("<div class='label'>"+commatize(aa)+"</div>").css({position:"absolute",left:af-1}));aa+=ag}this.content_div.children(":first").remove();this.content_div.append(ab)}});var x=function(aa){this.hidden=true;j.call(this,null,aa,aa.top_labeltrack);J.call(this);aa.reference_track=this;this.left_offset=200;this.height_px=12;this.container_div.addClass("reference-track");this.content_div.css("background","none");this.content_div.css("min-height","0px");this.content_div.css("border","none");this.data_url=reference_url;this.data_url_extra_params={dbkey:aa.dbkey};this.data_cache=new E(y,this,false);this.tile_cache=new c(r)};n(x.prototype,J.prototype,{draw_tile:function(ai,af,ab,ak){var ae=this,ac=M*af;if(ak>this.view.canvas_manager.char_width_px){if(ai===null){ae.content_div.css("height","0px");return}var ad=this.view.canvas_manager.new_canvas();var aj=ad.getContext("2d");ad.width=Math.ceil(ac*ak+ae.left_offset);ad.height=ae.height_px;aj.font=aj.canvas.manager.default_font;aj.textAlign="center";for(var ag=0,ah=ai.length;ag<ah;ag++){var aa=Math.round(ag*ak);aj.fillText(ai[ag],aa+ae.left_offset,10)}return new b(ab,af,ad)}this.content_div.css("height","0px")}});var k=function(ae,ac,af,aa,ad){var ab=this;this.display_modes=["Histogram","Line","Filled","Intensity"];this.mode="Histogram";j.call(this,ae,ac,ac.viewport_container);J.call(this);this.min_height_px=16;this.max_height_px=400;this.height_px=80;this.hda_ldda=af;this.dataset_id=aa;this.original_dataset_id=aa;this.data_cache=new N(y,this);this.tile_cache=new c(r);this.track_config=new V({track:this,params:[{key:"color",label:"Color",type:"color",default_value:"black"},{key:"min_value",label:"Min Value",type:"float",default_value:undefined},{key:"max_value",label:"Max Value",type:"float",default_value:undefined},{key:"mode",type:"string",default_value:this.mode,hidden:true},{key:"height",type:"int",default_value:this.height_px,hidden:true}],saved_values:ad,onchange:function(){ab.vertical_range=ab.prefs.max_value-ab.prefs.min_value;$("#linetrack_"+ab.track_id+"_minval").text(ab.prefs.min_value);$("#linetrack_"+ab.track_id+"_maxval").text(ab.prefs.max_value);ab.tile_cache.clear();ab.draw()}});this.prefs=this.track_config.values;this.height_px=this.track_config.values.height;this.vertical_range=this.track_config.values.max_value-this.track_config.values.min_value;this.add_resize_handle()};n(k.prototype,J.prototype,{add_resize_handle:function(){var aa=this;var ad=false;var ac=false;var ab=$("<div class='track-resize'>");$(aa.container_div).hover(function(){ad=true;ab.show()},function(){ad=false;if(!ac){ab.hide()}});ab.hide().bind("dragstart",function(ae,af){ac=true;af.original_height=$(aa.content_div).height()}).bind("drag",function(af,ag){var ae=Math.min(Math.max(ag.original_height+ag.deltaY,aa.min_height_px),aa.max_height_px);$(aa.content_div).css("height",ae);aa.height_px=ae;aa.draw(true)}).bind("dragend",function(ae,af){aa.tile_cache.clear();ac=false;if(!ad){ab.hide()}aa.track_config.values.height=aa.height_px}).appendTo(aa.container_div)},predraw_init:function(){var aa=this,ab=aa.view.tracks.indexOf(aa);aa.vertical_range=undefined;return $.getJSON(aa.data_url,{stats:true,chrom:aa.view.chrom,low:null,high:null,hda_ldda:aa.hda_ldda,dataset_id:aa.dataset_id},function(ac){aa.container_div.addClass("line-track");var ae=ac.data;if(isNaN(parseFloat(aa.prefs.min_value))||isNaN(parseFloat(aa.prefs.max_value))){aa.prefs.min_value=ae.min;aa.prefs.max_value=ae.max;$("#track_"+ab+"_minval").val(aa.prefs.min_value);$("#track_"+ab+"_maxval").val(aa.prefs.max_value)}aa.vertical_range=aa.prefs.max_value-aa.prefs.min_value;aa.total_frequency=ae.total_frequency;aa.container_div.find(".yaxislabel").remove();var af=$("<div />").addClass("yaxislabel").attr("id","linetrack_"+ab+"_minval").text(u(aa.prefs.min_value));var ad=$("<div />").addClass("yaxislabel").attr("id","linetrack_"+ab+"_maxval").text(u(aa.prefs.max_value));ad.css({position:"absolute",top:"24px",left:"10px"});ad.prependTo(aa.container_div);af.css({position:"absolute",bottom:"2px",left:"10px"});af.prependTo(aa.container_div)})},draw_tile:function(ak,ae,ab,aj){if(this.vertical_range===undefined){return}var af=ab*M*ae,ad=M*ae,aa=Math.ceil(ad*aj),ah=this.height_px;var ac=this.view.canvas_manager.new_canvas();ac.width=aa,ac.height=ah;var ai=ac.getContext("2d");var ag=new I.LinePainter(ak.data,af,af+ad,this.prefs,this.mode);ag.draw(ai,aa,ah);return new b(ad,ae,ac)}});var e=function(aa,af,ae,ai,ah,ac,ad,ag){var ab=this;this.display_modes=["Auto","Histogram","Dense","Squish","Pack"];this.track_config=new V({track:this,params:[{key:"block_color",label:"Block color",type:"color",default_value:"#444"},{key:"label_color",label:"Label color",type:"color",default_value:"black"},{key:"show_counts",label:"Show summary counts",type:"bool",default_value:true},{key:"mode",type:"string",default_value:this.mode,hidden:true},],saved_values:ah,onchange:function(){ab.tile_cache.clear();ab.draw()}});this.prefs=this.track_config.values;j.call(this,aa,af,af.viewport_container);J.call(this,ac,ad,ag);this.height_px=0;this.container_div.addClass("feature-track");this.hda_ldda=ae;this.dataset_id=ai;this.original_dataset_id=ai;this.show_labels_scale=0.001;this.showing_details=false;this.summary_draw_height=30;this.inc_slots={};this.start_end_dct={};this.tile_cache=new c(d);this.data_cache=new N(20,this);this.left_offset=200;this.painter=I.LinkedFeaturePainter};n(e.prototype,J.prototype,{update_auto_mode:function(aa){if(this.mode=="Auto"){if(aa=="no_detail"){aa="feature spans"}else{if(aa=="summary_tree"){aa="coverage histogram"}}this.mode_div.text("Auto ("+aa+")")}},incremental_slots:function(ae,ab,ad){var ac=this.view.canvas_manager.dummy_context,aa=this.inc_slots[ae];if(!aa||(aa.mode!==ad)){aa=new (p.FeatureSlotter)(ae,ad==="Pack",w,function(af){return ac.measureText(af)});aa.mode=ad;this.inc_slots[ae]=aa}return aa.slot_features(ab)},get_summary_tree_data:function(ae,ah,ac,ap){if(ap>ac-ah){ap=ac-ah}var al=Math.floor((ac-ah)/ap),ao=[],ad=0;var af=0,ag=0,ak,an=0,ai=[],am,aj;var ab=function(at,ar,au,aq){at[0]=ar+au*aq;at[1]=ar+(au+1)*aq};while(an<ap&&af!==ae.length){var aa=false;for(;an<ap&&!aa;an++){ab(ai,ah,an,al);for(ag=af;ag<ae.length;ag++){ak=ae[ag].slice(1,3);if(is_overlap(ak,ai)){aa=true;break}}if(aa){break}}data_start_index=ag;ao[ao.length]=am=[ai[0],0];for(;ag<ae.length;ag++){ak=ae[ag].slice(1,3);if(is_overlap(ak,ai)){am[1]++}else{break}}if(am[1]>ad){ad=am[1]}an++}return{max:ad,delta:al,data:ao}},draw_tile:function(an,av,az,aj,ad){var ar=this,aB=az*M*av,ab=(az+1)*M*av,ap=ab-aB,at=Math.ceil(ap*aj),aq=this.mode,aF=25,ae=this.left_offset,ao,af;if(aq==="Auto"){if(an.dataset_type==="summary_tree"){aq=an.dataset_type}else{if(an.extra_info==="no_detail"){aq="no_detail"}else{var aE=an.data;if(this.view.high-this.view.low>F){aq="Squish"}else{aq="Pack"}}}this.update_auto_mode(aq)}if(aq==="summary_tree"||aq==="Histogram"){af=this.summary_draw_height;this.container_div.find(".yaxislabel").remove();var aa=$("<div />").addClass("yaxislabel");aa.text(an.max);aa.css({position:"absolute",top:"24px",left:"10px",color:this.prefs.label_color});aa.prependTo(this.container_div);var ac=this.view.canvas_manager.new_canvas();ac.width=at+ae;ac.height=af+O;if(an.dataset_type!="summary_tree"){var ak=this.get_summary_tree_data(an.data,aB,ab,200);if(an.max){ak.max=an.max}an=ak}var aC=new I.SummaryTreePainter(an,aB,ab,this.prefs);var au=ac.getContext("2d");au.translate(ae,O);aC.draw(au,at,af);return new l(az,av,ac,an.max)}var ao,ah=1;if(aq==="no_detail"||aq==="Squish"||aq==="Pack"){ah=this.incremental_slots(aj,an.data,aq);ao=this.inc_slots[aj].slots}var ai=[];if(an.data){var al=this.filters_manager.filters;for(var aw=0,ay=an.data.length;aw<ay;aw++){var ag=an.data[aw];var ax=false;var am;for(var aA=0,aD=al.length;aA<aD;aA++){am=al[aA];am.update_attrs(ag);if(!am.keep(ag)){ax=true;break}}if(!ax){ai.push(ag)}}}var aC=new (this.painter)(ai,aB,ab,this.prefs,aq,ad);var af=aC.get_required_height(ah);var ac=this.view.canvas_manager.new_canvas();ac.width=at+ae;ac.height=af;var au=ac.getContext("2d");au.fillStyle=this.prefs.block_color;au.font=au.canvas.manager.default_font;au.textAlign="right";this.container_div.find(".yaxislabel").remove();if(an.data){this.example_feature=(an.data.length?an.data[0]:undefined);au.translate(ae,0);aC.draw(au,at,af,ao)}return new L(az,av,ac,an.message)}});var P=function(ad,ab,af,aa,ac,ae){e.call(this,ad,ab,af,aa,ac,ae);this.painter=I.VariantPainter};n(P.prototype,J.prototype,e.prototype);var S=function(ad,ab,af,aa,ac,ae){e.call(this,ad,ab,af,aa,ac,ae);this.track_config=new V({track:this,params:[{key:"block_color",label:"Block color",type:"color",default_value:"#444"},{key:"label_color",label:"Label color",type:"color",default_value:"black"},{key:"show_insertions",label:"Show insertions",type:"bool",default_value:false},{key:"show_differences",label:"Show differences only",type:"bool",default_value:true},{key:"show_counts",label:"Show summary counts",type:"bool",default_value:true},{key:"mode",type:"string",default_value:this.mode,hidden:true},],saved_values:ac,onchange:function(){this.track.tile_cache.clear();this.track.draw()}});this.prefs=this.track_config.values;this.painter=I.ReadPainter;this.make_name_popup_menu()};n(S.prototype,J.prototype,e.prototype);var Q=function(ae,ac,ag,aa,ad,af,ab){e.call(this,ae,ac,ag,aa,ad,af,{},ab);this.data_url=raw_data_url;this.data_query_wait=1000;this.dataset_check_url=dataset_state_url};n(Q.prototype,J.prototype,e.prototype,{predraw_init:function(){var ab=this;var aa=function(){if(ab.data_cache.size()===0){setTimeout(aa,300)}else{ab.data_url=default_data_url;ab.data_query_wait=H;ab.dataset_state_url=converted_datasets_state_url;$.getJSON(ab.dataset_state_url,{dataset_id:ab.dataset_id,hda_ldda:ab.hda_ldda},function(ac){})}};aa()}});T.View=Y;T.LineTrack=k;T.FeatureTrack=e;T.ReadTrack=S};var slotting_module=function(c,b){var e=c("class").extend;var d=2,a=5;b.FeatureSlotter=function(j,h,f,g){this.slots={};this.start_end_dct={};this.w_scale=j;this.include_label=h;this.max_rows=f;this.measureText=g};e(b.FeatureSlotter.prototype,{slot_features:function(m){var p=this.w_scale,s=this.slots,h=this.start_end_dct,y=[],A=[],n=0,z=this.max_rows;for(var w=0,x=m.length;w<x;w++){var l=m[w],o=l[0];if(s[o]!==undefined){n=Math.max(n,s[o]);A.push(s[o])}else{y.push(w)}}var q=function(G,H){for(var F=0;F<=z;F++){var D=false,I=h[F];if(I!==undefined){for(var C=0,E=I.length;C<E;C++){var B=I[C];if(H>B[0]&&G<B[1]){D=true;break}}}if(!D){return F}}return -1};for(var w=0,x=y.length;w<x;w++){var l=m[y[w]],o=l[0],u=l[1],f=l[2],r=l[3],g=Math.floor(u*p),k=Math.ceil(f*p),v=this.measureText(r).width,j;if(r!==undefined&&this.include_label){v+=(d+a);if(g-v>=0){g-=v;j="left"}else{k+=v;j="right"}}var t=q(g,k);if(t>=0){if(h[t]===undefined){h[t]=[]}h[t].push([g,k]);s[o]=t;n=Math.max(n,t)}else{}}return n+1}})};var painters_module=function(j,w){var t=j("class").extend;var o=function(H,z,F,y,E,C){if(C===undefined){C=4}var B=y-z;var A=E-F;var D=Math.floor(Math.sqrt(B*B+A*A)/C);var I=B/D;var G=A/D;var x;for(x=0;x<D;x++,z+=I,F+=G){if(x%2!==0){continue}H.fillRect(z,F,C,1)}};var p=function(A,z,x,D){var C=z-D/2,B=z+D/2,E=x-Math.sqrt(D*3/2);A.beginPath();A.moveTo(C,E);A.lineTo(B,E);A.lineTo(z,x);A.lineTo(C,E);A.strokeStyle=this.fillStyle;A.fill();A.stroke();A.closePath()};var m=function(z,B,x,y,A){this.data=z;this.view_start=B;this.view_end=x;this.prefs=t({},this.default_prefs,y);this.mode=A};m.prototype.default_prefs={};var u=function(z,B,x,y,A){m.call(this,z,B,x,y,A)};u.prototype.default_prefs={show_counts:false};u.prototype.draw=function(M,z,L){var E=this.view_start,O=this.view_end-this.view_start,N=z/O;var J=this.data.data,I=this.data.delta,G=this.data.max,B=L;delta_x_px=Math.ceil(I*N);M.save();for(var C=0,D=J.length;C<D;C++){var H=Math.floor((J[C][0]-E)*N);var F=J[C][1];if(!F){continue}var K=F/G*L;if(F!==0&&K<1){K=1}M.fillStyle=this.prefs.block_color;M.fillRect(H,B-K,delta_x_px,K);var A=4;if(this.prefs.show_counts&&(M.measureText(F).width+A)<delta_x_px){M.fillStyle=this.prefs.label_color;M.textAlign="center";M.fillText(F,H+(delta_x_px/2),10)}}M.restore()};var c=function(x,B,D,E,z){m.call(this,x,B,D,E,z);if(this.prefs.min_value===undefined){var F=Infinity;for(var y=0,A=this.data.length;y<A;y++){F=Math.min(F,this.data[y][1])}this.prefs.min_value=F}if(this.prefs.max_value===undefined){var C=-Infinity;for(var y=0,A=this.data.length;y<A;y++){C=Math.max(C,this.data[y][1])}this.prefs.max_value=C}};c.prototype.default_prefs={min_value:undefined,max_value:undefined,mode:"Histogram",color:"#000",overflow_color:"#F66"};c.prototype.draw=function(M,L,J){var E=false,G=this.prefs.min_value,C=this.prefs.max_value,I=C-G,x=J,z=this.view_start,K=this.view_end-this.view_start,A=L/K,H=this.mode,S=this.data;M.save();var T=Math.round(J+G/I*J);if(H!=="Intensity"){M.fillStyle="#aaa";M.fillRect(0,T,L,1)}M.beginPath();var Q,D,B;if(S.length>1){B=Math.ceil((S[1][0]-S[0][0])*A)}else{B=10}for(var N=0,O=S.length;N<O;N++){M.fillStyle=this.prefs.color;Q=Math.round((S[N][0]-z)*A);D=S[N][1];var P=false,F=false;if(D===null){if(E&&H==="Filled"){M.lineTo(Q,x)}E=false;continue}if(D<G){F=true;D=G}else{if(D>C){P=true;D=C}}if(H==="Histogram"){D=Math.round(D/I*x);M.fillRect(Q,T,B,-D)}else{if(H==="Intensity"){D=255-Math.floor((D-G)/I*255);M.fillStyle="rgb("+D+","+D+","+D+")";M.fillRect(Q,0,B,x)}else{D=Math.round(x-(D-G)/I*x);if(E){M.lineTo(Q,D)}else{E=true;if(H==="Filled"){M.moveTo(Q,x);M.lineTo(Q,D)}else{M.moveTo(Q,D)}}}}M.fillStyle=this.prefs.overflow_color;if(P||F){var R;if(H==="Histogram"||H==="Intensity"){R=B}else{Q-=2;R=4}if(P){M.fillRect(Q,0,R,3)}if(F){M.fillRect(Q,x-3,R,3)}}M.fillStyle=this.prefs.color}if(H==="Filled"){if(E){M.lineTo(Q,T);M.lineTo(0,T)}M.fill()}else{M.stroke()}M.restore()};var n=function(z,B,x,y,A){m.call(this,z,B,x,y,A)};n.prototype.default_prefs={block_color:"#FFF",connector_color:"#FFF"};t(n.prototype,{get_required_height:function(y){var x=y_scale=this.get_row_height(),z=this.mode;if(z==="no_detail"||z==="Squish"||z==="Pack"){x=y*y_scale}return x+Math.max(Math.round(y_scale/2),5)},draw:function(J,A,I,F){var D=this.data,G=this.view_start,K=this.view_end;J.save();J.fillStyle=this.prefs.block_color;J.textAlign="right";var N=this.view_end-this.view_start,M=A/N,z=this.get_row_height();for(var C=0,E=D.length;C<E;C++){var L=D[C],B=L[0],x=L[1],y=L[2],H=(F&&F[B]!==undefined?F[B]:null);if((x<K&&y>G)&&(this.mode=="Dense"||H!==null)){this.draw_element(J,this.mode,L,H,G,K,M,z,A)}}J.restore()}});var d=10,h=3,l=5,v=10,f=1,r=3,e=3,a=9,k=2,g="#ccc";var q=function(z,B,x,y,A){n.call(this,z,B,x,y,A)};t(q.prototype,n.prototype,{get_row_height:function(){var y=this.mode,x;if(y==="Dense"){x=d}else{if(y==="no_detail"){x=h}else{if(y==="Squish"){x=l}else{x=v}}}return x},draw_element:function(J,C,R,E,L,ab,af,ag,x){var O=R[0],ad=R[1],V=R[2],M=R[3],W=Math.floor(Math.max(0,(ad-L)*af)),K=Math.ceil(Math.min(x,Math.max(0,(V-L)*af))),U=(C==="Dense"?0:(0+E))*ag,I,Z,N=null,ah=null,A=this.prefs.block_color,Y=this.prefs.label_color;if(C=="Dense"){E=1}if(C==="no_detail"){J.fillStyle=A;J.fillRect(W,U+5,K-W,f)}else{var H=R[4],T=R[5],X=R[6],B=R[7];if(T&&X){N=Math.floor(Math.max(0,(T-L)*af));ah=Math.ceil(Math.min(x,Math.max(0,(X-L)*af)))}var ae,P;if(C==="Squish"||C==="Dense"){ae=1;P=e}else{ae=5;P=a}if(!B){if(R.strand){if(R.strand==="+"){J.fillStyle=J.canvas.manager.get_pattern("right_strand_inv")}else{if(R.strand==="-"){J.fillStyle=J.canvas.manager.get_pattern("left_strand_inv")}}}else{J.fillStyle=A}J.fillRect(W,U,K-W,P)}else{var G,Q;if(C==="Squish"||C==="Dense"){J.fillStyle=g;G=U+Math.floor(e/2)+1;Q=1}else{if(H){var G=U;var Q=P;if(H==="+"){J.fillStyle=J.canvas.manager.get_pattern("right_strand")}else{if(H==="-"){J.fillStyle=J.canvas.manager.get_pattern("left_strand")}}}else{J.fillStyle=g;G+=(e/2)+1;Q=1}}J.fillRect(W,G,K-W,Q);for(var ac=0,z=B.length;ac<z;ac++){var D=B[ac],y=Math.floor(Math.max(0,(D[0]-L)*af)),S=Math.ceil(Math.min(x,Math.max((D[1]-L)*af)));if(y>S){continue}J.fillStyle=A;J.fillRect(y,U+(P-ae)/2+1,S-y,ae);if(N!==undefined&&X>T&&!(y>ah||S<N)){var aa=Math.max(y,N),F=Math.min(S,ah);J.fillRect(aa,U+1,F-aa,P);if(B.length==1&&C=="Pack"){if(H==="+"){J.fillStyle=J.canvas.manager.get_pattern("right_strand_inv")}else{if(H==="-"){J.fillStyle=J.canvas.manager.get_pattern("left_strand_inv")}}if(aa+14<F){aa+=2;F-=2}J.fillRect(aa,U+1,F-aa,P)}}}}if(C==="Pack"&&ad>L){J.fillStyle=Y;if(L===0&&W-J.measureText(M).width<0){J.textAlign="left";J.fillText(M,K+k,U+8)}else{J.textAlign="right";J.fillText(M,W-k,U+8)}J.fillStyle=A}}}});var b=function(z,B,x,y,A){n.call(this,z,B,x,y,A)};t(b.prototype,n.prototype,{draw_element:function(Q,L,F,B,T,z,I,R,O){var F=data[i],H=F[0],P=F[1],A=F[2],K=F[3],D=Math.floor(Math.max(0,(P-T)*I)),G=Math.ceil(Math.min(O,Math.max(0,(A-T)*I))),C=(L==="Dense"?0:(0+B))*R,x,U,y=null,J=null;if(no_label){Q.fillStyle=block_color;Q.fillRect(D+left_offset,C+5,G-D,1)}else{var S=F[4],N=F[5],E=F[6];x=9;U=1;Q.fillRect(D+left_offset,C,G-D,x);if(L!=="Dense"&&K!==undefined&&P>T){Q.fillStyle=label_color;if(T===0&&D-Q.measureText(K).width<0){Q.textAlign="left";Q.fillText(K,G+2+left_offset,C+8)}else{Q.textAlign="right";Q.fillText(K,D-2+left_offset,C+8)}Q.fillStyle=block_color}var M=S+" / "+N;if(P>T&&Q.measureText(M).width<(G-D)){Q.fillStyle="white";Q.textAlign="center";Q.fillText(M,left_offset+D+(G-D)/2,C+8);Q.fillStyle=block_color}}}});var s=function(A,C,x,z,B,y){n.call(this,A,C,x,z,B);this.ref_seq=y};s.prototype.default_prefs=t({},n.prototype.default_prefs,{show_insertions:false});t(s.prototype,n.prototype,{get_row_height:function(){var x,y=this.mode;if(y==="Dense"){x=d}else{if(y==="Squish"){x=l}else{x=v;if(this.prefs.show_insertions){x*=2}}}return x},draw_read:function(T,O,K,Y,z,S,H,E,D){T.textAlign="center";var R=this,y=[Y,z],N=0,U=0,Q=0;ref_seq=this.ref_seq,char_width_px=T.canvas.manager.char_width_px;var ad=[];if((O==="Pack"||this.mode==="Auto")&&E!==undefined&&K>char_width_px){Q=Math.round(K/2)}if(!H){H=[[0,E.length]]}for(var L=0,W=H.length;L<W;L++){var I=H[L],A="MIDNSHP=X"[I[0]],M=I[1];if(A==="H"||A==="S"){N-=M}var F=S+N,ac=Math.floor(Math.max(0,(F-Y)*K)),G=Math.floor(Math.max(0,(F+M-Y)*K));if(ac===G){G+=1}switch(A){case"H":break;case"S":case"M":case"=":if(is_overlap([F,F+M],y)){var P=E.slice(U,U+M);if(Q>0){T.fillStyle=this.prefs.block_color;T.fillRect(ac-Q,D+1,G-ac,9);T.fillStyle=g;for(var aa=0,x=P.length;aa<x;aa++){if(this.prefs.show_differences&&ref_seq){var J=ref_seq[F-Y+aa];if(!J||J.toLowerCase()===P[aa].toLowerCase()){continue}}if(F+aa>=Y&&F+aa<=z){var ab=Math.floor(Math.max(0,(F+aa-Y)*K));T.fillText(P[aa],ab,D+9)}}}else{T.fillStyle=this.prefs.block_color;T.fillRect(ac,D+4,G-ac,e)}}U+=M;N+=M;break;case"N":T.fillStyle=g;T.fillRect(ac-Q,D+5,G-ac,1);N+=M;break;case"D":T.fillStyle="red";T.fillRect(ac-Q,D+4,G-ac,3);N+=M;break;case"P":break;case"I":var X=ac-Q;if(is_overlap([F,F+M],y)){var P=E.slice(U,U+M);if(this.prefs.show_insertions){var C=ac-(G-ac)/2;if((O==="Pack"||this.mode==="Auto")&&E!==undefined&&K>char_width_px){T.fillStyle="yellow";T.fillRect(C-Q,D-9,G-ac,9);ad[ad.length]={type:"triangle",data:[X,D+4,5]};T.fillStyle=g;switch(seq_tile_overlap){case (OVERLAP_START):P=P.slice(Y-F);break;case (OVERLAP_END):P=P.slice(0,F-z);break;case (CONTAINED_BY):break;case (CONTAINS):P=P.slice(Y-F,F-z);break}for(var aa=0,x=P.length;aa<x;aa++){var ab=Math.floor(Math.max(0,(F+aa-Y)*K));T.fillText(P[aa],ab-(G-ac)/2,D)}}else{T.fillStyle="yellow";T.fillRect(C,D+(this.mode!=="Dense"?2:5),G-ac,(O!=="Dense"?e:r))}}else{if((O==="Pack"||this.mode==="Auto")&&E!==undefined&&K>char_width_px){ad[ad.length]={type:"text",data:[P.length,X,D+9]}}else{}}}U+=M;break;case"X":U+=M;break}}T.fillStyle="yellow";var Z,B,ae;for(var V=0;V<ad.length;V++){Z=ad[V];B=Z.type;ae=Z.data;if(B==="text"){T.save();T.font="bold "+T.font;T.fillText(ae[0],ae[1],ae[2]);T.restore()}else{if(B=="triangle"){p(T,ae[0],ae[1],ae[2])}}}},draw_element:function(Q,L,D,A,T,y,H,R,O){var G=D[0],P=D[1],z=D[2],I=D[3],C=Math.floor(Math.max(0,(P-T)*H)),E=Math.ceil(Math.min(O,Math.max(0,(z-T)*H))),B=(L==="Dense"?0:(0+A))*R,U=this.prefs.block_color,F=this.prefs.label_color,N=0;if((L==="Pack"||this.mode==="Auto")&&H>Q.canvas.manager.char_width_px){var N=Math.round(H/2)}Q.fillStyle=U;if(D[5] instanceof Array){var M=Math.floor(Math.max(0,(D[4][0]-T)*H)),K=Math.ceil(Math.min(O,Math.max(0,(D[4][1]-T)*H))),J=Math.floor(Math.max(0,(D[5][0]-T)*H)),x=Math.ceil(Math.min(O,Math.max(0,(D[5][1]-T)*H)));if(D[4][1]>=T&&D[4][0]<=y&&D[4][2]){this.draw_read(Q,L,H,T,y,D[4][0],D[4][2],D[4][3],B)}if(D[5][1]>=T&&D[5][0]<=y&&D[5][2]){this.draw_read(Q,L,H,T,y,D[5][0],D[5][2],D[5][3],B)}if(J>K){Q.fillStyle=g;o(Q,K-N,B+5,J-N,B+5)}}else{Q.fillStyle=U;this.draw_read(Q,L,H,T,y,P,D[4],D[5],B)}if(L==="Pack"&&P>T){Q.fillStyle=this.prefs.label_color;var S=1;if(S===0&&C-Q.measureText(I).width<0){Q.textAlign="left";Q.fillText(I,E+k-N,B+8)}else{Q.textAlign="right";Q.fillText(I,C-k-N,B+8)}Q.fillStyle=U}}});w.SummaryTreePainter=u;w.LinePainter=c;w.LinkedFeaturePainter=q;w.ReadPainter=s;w.VariantPainter=b};(function(d){var c={};var b=function(e){return c[e]};var a=function(f,g){var e={};g(b,e);c[f]=e};a("class",class_module);a("slotting",slotting_module);a("painters",painters_module);a("trackster",trackster_module);for(key in c.trackster){d[key]=c.trackster[key]}})(window);
\ No newline at end of file
+var class_module=function(b,a){var c=function(){var f=arguments[0];for(var e=1;e<arguments.length;e++){var d=arguments[e];for(key in d){f[key]=d[key]}}return f};a.extend=c};var BEFORE=1001,CONTAINS=1002,OVERLAP_START=1003,OVERLAP_END=1004,CONTAINED_BY=1005,AFTER=1006;var compute_overlap=function(e,b){var g=e[0],f=e[1],d=b[0],c=b[1],a;if(g<d){if(f<d){a=BEFORE}else{if(f<=c){a=OVERLAP_START}else{a=CONTAINS}}}else{if(g>c){a=AFTER}else{if(f<=c){a=CONTAINED_BY}else{a=OVERLAP_END}}}return a};var is_overlap=function(c,b){var a=compute_overlap(c,b);return(a!==BEFORE&&a!==AFTER)};var trackster_module=function(f,T){var n=f("class").extend,p=f("slotting"),I=f("painters");var Z=function(aa,ab){this.document=aa;this.default_font=ab!==undefined?ab:"9px Monaco, Lucida Console, monospace";this.dummy_canvas=this.new_canvas();this.dummy_context=this.dummy_canvas.getContext("2d");this.dummy_context.font=this.default_font;this.char_width_px=this.dummy_context.measureText("A").width;this.patterns={};this.load_pattern("right_strand","/visualization/strand_right.png");this.load_pattern("left_strand","/visualization/strand_left.png");this.load_pattern("right_strand_inv","/visualization/strand_right_inv.png");this.load_pattern("left_strand_inv","/visualization/strand_left_inv.png")};n(Z.prototype,{load_pattern:function(aa,ae){var ab=this.patterns,ac=this.dummy_context,ad=new Image();ad.src=image_path+ae;ad.onload=function(){ab[aa]=ac.createPattern(ad,"repeat")}},get_pattern:function(aa){return this.patterns[aa]},new_canvas:function(){var aa=this.document.createElement("canvas");if(window.G_vmlCanvasManager){G_vmlCanvasManager.initElement(aa)}aa.manager=this;return aa}});var C=function(aa,ab){aa.bind("drag",{handle:ab,relative:true},function(af,ag){var ae=$(this).parent();var ad=ae.children();var ac;for(ac=0;ac<ad.length;ac++){if(ag.offsetY<$(ad.get(ac)).position().top){break}}if(ac===ad.length){if(this!==ad.get(ac-1)){ae.append(this)}}else{if(this!==ad.get(ac)){$(this).insertBefore(ad.get(ac))}}}).bind("dragstart",function(){$(this).css({"border-top":"1px solid blue","border-bottom":"1px solid blue"})}).bind("dragend",function(){$(this).css("border","0px")})};T.sortable=C;var D=9,A=20,O=D+2,w=100,F=12000,M=200,z=5,s=10,H=5000,t=100,m="There was an error in indexing this dataset. ",G="A converter for this dataset is not installed. Please check your datatypes_conf.xml file.",B="No data for this chrom/contig.",q="Currently indexing... please wait",v="Tool cannot be rerun: ",a="Loading data...",U="Ready for display",d=10,r=5,y=5;function u(aa){return Math.round(aa*1000)/1000}var c=function(aa){this.num_elements=aa;this.clear()};n(c.prototype,{get:function(ab){var aa=this.key_ary.indexOf(ab);if(aa!==-1){if(this.obj_cache[ab].stale){this.key_ary.splice(aa,1);delete this.obj_cache[ab]}else{this.move_key_to_end(ab,aa)}}return this.obj_cache[ab]},set:function(ab,ac){if(!this.obj_cache[ab]){if(this.key_ary.length>=this.num_elements){var aa=this.key_ary.shift();delete this.obj_cache[aa]}this.key_ary.push(ab)}this.obj_cache[ab]=ac;return ac},move_key_to_end:function(ab,aa){this.key_ary.splice(aa,1);this.key_ary.push(ab)},clear:function(){this.obj_cache={};this.key_ary=[]},size:function(){return this.key_ary.length}});var N=function(ab,aa,ac){c.call(this,ab);this.track=aa;this.subset=(ac!==undefined?ac:true)};n(N.prototype,c.prototype,{load_data:function(aj,ae,ab,ag){var ai=this.track.view.chrom,ah=this.track.mode,ad={chrom:ai,low:aj,high:ae,mode:ah,resolution:ab,dataset_id:this.track.dataset_id,hda_ldda:this.track.hda_ldda};$.extend(ad,ag);if(this.track.filters_manager){var ak=[];var aa=this.track.filters_manager.filters;for(var af=0;af<aa.length;af++){ak[ak.length]=aa[af].name}ad.filter_cols=JSON.stringify(ak)}var ac=this;return $.getJSON(this.track.data_url,ad,function(al){ac.set_data(aj,ae,ah,al)})},get_data:function(aa,ae,ab,ad){var af=this.track.mode,ac=this.get_data_from_cache(aa,ae,af);if(ac){return ac}ac=this.load_data(aa,ae,ab,ad);this.set_data(aa,ae,af,ac);return ac},DEEP_DATA_REQ:"deep",BROAD_DATA_REQ:"breadth",get_more_data:function(ai,ad,ac,ag,ae){var ah=this.track.mode,aj=this.get_data_from_cache(ai,ad,ah);if(!aj){console.log("ERROR: no current data for: ",this.track,ai,ad,ac,ag);return}aj.stale=true;var ab=ai;if(ae===this.DEEP_DATA_REQ){$.extend(ag,{start_val:aj.data.length+1})}else{if(ae===this.BROAD_DATA_REQ){ab=aj.data[aj.data.length-1][2]+1}}var aa=this,af=this.load_data(ab,ad,ac,ag);new_data_available=$.Deferred();this.set_data(ai,ad,ah,new_data_available);$.when(af).then(function(ak){if(ak.data){ak.data=aj.data.concat(ak.data);if(ak.message){ak.message=ak.message.replace(/[0-9]+/,ak.data.length)}}aa.set_data(ai,ad,ah,ak);new_data_available.resolve(ak)});return new_data_available},get_data_from_cache:function(aa,ab,ac){return this.get(this.gen_key(aa,ab,ac))},set_data:function(ab,ac,ad,aa){return this.set(this.gen_key(ab,ac,ad),aa)},gen_key:function(aa,ac,ad){var ab=aa+"_"+ac+"_"+ad;return ab},split_key:function(aa){return aa.split("_")}});var E=function(ab,aa,ac){N.call(this,ab,aa,ac)};n(E.prototype,N.prototype,c.prototype,{load_data:function(ac,aa,ae,af,ab,ad){if(ab>1){return}return N.prototype.load_data.call(this,ac,aa,ae,af,ab,ad)}});var Y=function(aa,ad,ac,ab,ae){this.container=aa;this.chrom=null;this.vis_id=ac;this.dbkey=ab;this.title=ad;this.tracks=[];this.label_tracks=[];this.max_low=0;this.max_high=0;this.num_tracks=0;this.track_id_counter=0;this.zoom_factor=3;this.min_separation=30;this.has_changes=false;this.init(ae);this.canvas_manager=new Z(aa.get(0).ownerDocument);this.reset()};n(Y.prototype,{init:function(ae){var ac=this.container,aa=this;this.top_container=$("<div/>").addClass("top-container").appendTo(ac);this.content_div=$("<div/>").addClass("content").css("position","relative").appendTo(ac);this.bottom_container=$("<div/>").addClass("bottom-container").appendTo(ac);this.top_labeltrack=$("<div/>").addClass("top-labeltrack").appendTo(this.top_container);this.viewport_container=$("<div/>").addClass("viewport-container").addClass("viewport-container").appendTo(this.content_div);this.intro_div=$("<div/>").addClass("intro").appendTo(this.viewport_container).hide();var ad=$("<div/>").text("Add Datasets to Visualization").addClass("action-button").appendTo(this.intro_div).click(function(){add_tracks()});this.nav_labeltrack=$("<div/>").addClass("nav-labeltrack").appendTo(this.bottom_container);this.nav_container=$("<div/>").addClass("nav-container").prependTo(this.top_container);this.nav=$("<div/>").addClass("nav").appendTo(this.nav_container);this.overview=$("<div/>").addClass("overview").appendTo(this.bottom_container);this.overview_viewport=$("<div/>").addClass("overview-viewport").appendTo(this.overview);this.overview_close=$("<a href='javascript:void(0);'>Close Overview</a>").addClass("overview-close").hide().appendTo(this.overview_viewport);this.overview_highlight=$("<div/>").addClass("overview-highlight").hide().appendTo(this.overview_viewport);this.overview_box_background=$("<div/>").addClass("overview-boxback").appendTo(this.overview_viewport);this.overview_box=$("<div/>").addClass("overview-box").appendTo(this.overview_viewport);this.default_overview_height=this.overview_box.height();this.nav_controls=$("<div/>").addClass("nav-controls").appendTo(this.nav);this.chrom_select=$("<select/>").attr({name:"chrom"}).css("width","15em").addClass("no-autocomplete").append("<option value=''>Loading</option>").appendTo(this.nav_controls);var ab=function(af){if(af.type==="focusout"||(af.keyCode||af.which)===13||(af.keyCode||af.which)===27){if((af.keyCode||af.which)!==27){aa.go_to($(this).val())}$(this).hide();$(this).val("");aa.location_span.show();aa.chrom_select.show()}};this.nav_input=$("<input/>").addClass("nav-input").hide().bind("keyup focusout",ab).appendTo(this.nav_controls);this.location_span=$("<span/>").addClass("location").appendTo(this.nav_controls);this.location_span.bind("click",function(){aa.location_span.hide();aa.chrom_select.hide();aa.nav_input.val(aa.chrom+":"+aa.low+"-"+aa.high);aa.nav_input.css("display","inline-block");aa.nav_input.select();aa.nav_input.focus()});if(this.vis_id!==undefined){this.hidden_input=$("<input/>").attr("type","hidden").val(this.vis_id).appendTo(this.nav_controls)}this.zo_link=$("<a id='zoom-out' />").click(function(){aa.zoom_out();aa.redraw()}).appendTo(this.nav_controls);this.zi_link=$("<a id='zoom-in' />").click(function(){aa.zoom_in();aa.redraw()}).appendTo(this.nav_controls);this.load_chroms({low:0},ae);this.chrom_select.bind("change",function(){aa.change_chrom(aa.chrom_select.val())});this.content_div.bind("click",function(af){$(this).find("input").trigger("blur")});this.content_div.bind("dblclick",function(af){aa.zoom_in(af.pageX,this.viewport_container)});this.overview_box.bind("dragstart",function(af,ag){this.current_x=ag.offsetX}).bind("drag",function(af,ah){var ai=ah.offsetX-this.current_x;this.current_x=ah.offsetX;var ag=Math.round(ai/aa.viewport_container.width()*(aa.max_high-aa.max_low));aa.move_delta(-ag)});this.overview_close.bind("click",function(){for(var ag=0,af=aa.tracks.length;ag<af;ag++){aa.tracks[ag].is_overview=false}$(this).siblings().filter("canvas").remove();$(this).parent().css("height",aa.overview_box.height());aa.overview_highlight.hide();$(this).hide()});this.viewport_container.bind("draginit",function(af,ag){if(af.clientX>aa.viewport_container.width()-16){return false}}).bind("dragstart",function(af,ag){ag.original_low=aa.low;ag.current_height=af.clientY;ag.current_x=ag.offsetX}).bind("drag",function(ah,aj){var af=$(this);var ak=aj.offsetX-aj.current_x;var ag=af.scrollTop()-(ah.clientY-aj.current_height);af.scrollTop(ag);aj.current_height=ah.clientY;aj.current_x=aj.offsetX;var ai=Math.round(ak/aa.viewport_container.width()*(aa.high-aa.low));aa.move_delta(ai)}).bind("mousewheel",function(ah,aj,ag,af){if(ag){var ai=Math.round(-ag/aa.viewport_container.width()*(aa.high-aa.low));aa.move_delta(ai)}});this.top_labeltrack.bind("dragstart",function(af,ag){return $("<div />").css({height:aa.content_div.height()+aa.top_labeltrack.height()+aa.nav_labeltrack.height()+1,top:"0px",position:"absolute","background-color":"#ccf",opacity:0.5,"z-index":1000}).appendTo($(this))}).bind("drag",function(aj,ak){$(ak.proxy).css({left:Math.min(aj.pageX,ak.startX),width:Math.abs(aj.pageX-ak.startX)});var ag=Math.min(aj.pageX,ak.startX)-aa.container.offset().left,af=Math.max(aj.pageX,ak.startX)-aa.container.offset().left,ai=(aa.high-aa.low),ah=aa.viewport_container.width();aa.update_location(Math.round(ag/ah*ai)+aa.low,Math.round(af/ah*ai)+aa.low)}).bind("dragend",function(ak,al){var ag=Math.min(ak.pageX,al.startX),af=Math.max(ak.pageX,al.startX),ai=(aa.high-aa.low),ah=aa.viewport_container.width(),aj=aa.low;aa.low=Math.round(ag/ah*ai)+aj;aa.high=Math.round(af/ah*ai)+aj;$(al.proxy).remove();aa.redraw()});this.add_label_track(new X(this,this.top_labeltrack));this.add_label_track(new X(this,this.nav_labeltrack));$(window).bind("resize",function(){aa.resize_window()});$(document).bind("redraw",function(){aa.redraw()});this.reset();$(window).trigger("resize");this.update_intro_div()},update_intro_div:function(){if(this.num_tracks===0){this.intro_div.show()}else{this.intro_div.hide()}},update_location:function(aa,ab){this.location_span.text(commatize(aa)+" - "+commatize(ab));this.nav_input.val(this.chrom+":"+commatize(aa)+"-"+commatize(ab))},load_chroms:function(ab,ac){ab.num=t;$.extend(ab,(this.vis_id!==undefined?{vis_id:this.vis_id}:{dbkey:this.dbkey}));var aa=this;$.ajax({url:chrom_url,data:ab,dataType:"json",success:function(ae){if(ae.chrom_info.length===0){alert("Invalid chromosome: "+ab.chrom);return}if(ae.reference){aa.add_label_track(new x(aa))}aa.chrom_data=ae.chrom_info;var ah='<option value="">Select Chrom/Contig</option>';for(var ag=0,ad=aa.chrom_data.length;ag<ad;ag++){var af=aa.chrom_data[ag].chrom;ah+='<option value="'+af+'">'+af+"</option>"}if(ae.prev_chroms){ah+='<option value="previous">Previous '+t+"</option>"}if(ae.next_chroms){ah+='<option value="next">Next '+t+"</option>"}aa.chrom_select.html(ah);if(ac){ac()}aa.chrom_start_index=ae.start_index},error:function(){alert("Could not load chroms for this dbkey:",aa.dbkey)}})},change_chrom:function(ae,ab,ag){if(!ae||ae==="None"){return}var ad=this;if(ae==="previous"){ad.load_chroms({low:this.chrom_start_index-t});return}if(ae==="next"){ad.load_chroms({low:this.chrom_start_index+t});return}var af=$.grep(ad.chrom_data,function(ai,aj){return ai.chrom===ae})[0];if(af===undefined){ad.load_chroms({chrom:ae},function(){ad.change_chrom(ae,ab,ag)});return}else{if(ae!==ad.chrom){ad.chrom=ae;ad.chrom_select.val(ad.chrom);ad.max_high=af.len-1;ad.reset();ad.redraw(true);for(var ah=0,aa=ad.tracks.length;ah<aa;ah++){var ac=ad.tracks[ah];if(ac.init){ac.init()}}}if(ab!==undefined&&ag!==undefined){ad.low=Math.max(ab,0);ad.high=Math.min(ag,ad.max_high)}ad.reset_overview();ad.redraw()}},go_to:function(ae){var ai=this,aa,ad,ab=ae.split(":"),ag=ab[0],ah=ab[1];if(ah!==undefined){try{var af=ah.split("-");aa=parseInt(af[0].replace(/,/g,""),10);ad=parseInt(af[1].replace(/,/g,""),10)}catch(ac){return false}}ai.change_chrom(ag,aa,ad)},move_fraction:function(ac){var aa=this;var ab=aa.high-aa.low;this.move_delta(ac*ab)},move_delta:function(ac){var aa=this;var ab=aa.high-aa.low;if(aa.low-ac<aa.max_low){aa.low=aa.max_low;aa.high=aa.max_low+ab}else{if(aa.high-ac>aa.max_high){aa.high=aa.max_high;aa.low=aa.max_high-ab}else{aa.high-=ac;aa.low-=ac}}aa.redraw()},add_track:function(aa){aa.view=this;aa.track_id=this.track_id_counter;this.tracks.push(aa);if(aa.init){aa.init()}aa.container_div.attr("id","track_"+aa.track_id);C(aa.container_div,".draghandle");this.track_id_counter+=1;this.num_tracks+=1;this.update_intro_div()},add_label_track:function(aa){aa.view=this;this.label_tracks.push(aa)},remove_track:function(ab){this.has_changes=true;delete this.tracks[this.tracks.indexOf(ab)];this.num_tracks-=1;var aa=this;ab.container_div.fadeOut("slow",function(){$(this).remove();aa.update_intro_div()})},reset:function(){this.low=this.max_low;this.high=this.max_high;this.viewport_container.find(".yaxislabel").remove()},redraw:function(ah){var ag=this.high-this.low,af=this.low,ab=this.high;if(af<this.max_low){af=this.max_low}if(ab>this.max_high){ab=this.max_high}if(this.high!==0&&ag<this.min_separation){ab=af+this.min_separation}this.low=Math.floor(af);this.high=Math.ceil(ab);this.resolution=Math.pow(z,Math.ceil(Math.log((this.high-this.low)/M)/Math.log(z)));this.zoom_res=Math.pow(s,Math.max(0,Math.ceil(Math.log(this.resolution,s)/Math.log(s))));var aa=(this.low/(this.max_high-this.max_low)*this.overview_viewport.width())||0;var ae=((this.high-this.low)/(this.max_high-this.max_low)*this.overview_viewport.width())||0;var ai=13;this.overview_box.css({left:aa,width:Math.max(ai,ae)}).show();if(ae<ai){this.overview_box.css("left",aa-(ai-ae)/2)}if(this.overview_highlight){this.overview_highlight.css({left:aa,width:ae})}this.update_location(this.low,this.high);if(!ah){for(var ac=0,ad=this.tracks.length;ac<ad;ac++){if(this.tracks[ac]&&this.tracks[ac].enabled){this.tracks[ac].draw()}}for(ac=0,ad=this.label_tracks.length;ac<ad;ac++){this.label_tracks[ac].draw()}}},zoom_in:function(ab,ac){if(this.max_high===0||this.high-this.low<this.min_separation){return}var ad=this.high-this.low,ae=ad/2+this.low,aa=(ad/this.zoom_factor)/2;if(ab){ae=ab/this.viewport_container.width()*(this.high-this.low)+this.low}this.low=Math.round(ae-aa);this.high=Math.round(ae+aa);this.redraw()},zoom_out:function(){if(this.max_high===0){return}var ab=this.high-this.low,ac=ab/2+this.low,aa=(ab*this.zoom_factor)/2;this.low=Math.round(ac-aa);this.high=Math.round(ac+aa);this.redraw()},resize_window:function(){this.viewport_container.height(this.container.height()-this.top_container.height()-this.bottom_container.height());this.nav_container.width(this.container.width());this.redraw()},reset_overview:function(){this.overview_viewport.find("canvas").remove();this.overview_viewport.height(this.default_overview_height);this.overview_box.height(this.default_overview_height);this.overview_close.hide();this.overview_highlight.hide()}});var o=function(ac,ag){this.track=ac;this.name=ag.name;this.params=[];var an=ag.params;for(var ad=0;ad<an.length;ad++){var ai=an[ad],ab=ai.name,am=ai.label,ae=unescape(ai.html),ao=ai.value,ak=ai.type;if(ak==="number"){this.params[this.params.length]=new g(ab,am,ae,ao,ai.min,ai.max)}else{if(ak=="select"){this.params[this.params.length]=new K(ab,am,ae,ao)}else{console.log("WARNING: unrecognized tool parameter type:",ab,ak)}}}this.parent_div=$("<div/>").addClass("dynamic-tool").hide();this.parent_div.bind("drag",function(aq){aq.stopPropagation()}).bind("click",function(aq){aq.stopPropagation()}).bind("dblclick",function(aq){aq.stopPropagation()});var al=$("<div class='tool-name'>").appendTo(this.parent_div).text(this.name);var aj=this.params;var ah=this;$.each(this.params,function(ar,av){var au=$("<div>").addClass("param-row").appendTo(ah.parent_div);var aq=$("<div>").addClass("param-label").text(av.label).appendTo(au);var at=$("<div/>").addClass("slider").html(av.html).appendTo(au);at.find(":input").val(av.value);$("<div style='clear: both;'/>").appendTo(au)});this.parent_div.find("input").click(function(){$(this).select()});var ap=$("<div>").addClass("param-row").appendTo(this.parent_div);var af=$("<input type='submit'>").attr("value","Run on complete dataset").appendTo(ap);var aa=$("<input type='submit'>").attr("value","Run on visible region").css("margin-left","3em").appendTo(ap);var ah=this;aa.click(function(){ah.run_on_region()});af.click(function(){ah.run_on_dataset()})};n(o.prototype,{get_param_values_dict:function(){var aa={};this.parent_div.find(":input").each(function(){var ab=$(this).attr("name"),ac=$(this).val();aa[ab]=JSON.stringify(ac)});return aa},get_param_values:function(){var ab=[];var aa={};this.parent_div.find(":input").each(function(){var ac=$(this).attr("name"),ad=$(this).val();if(ac){ab[ab.length]=ad}});return ab},run_on_dataset:function(){var aa=this;aa.run({dataset_id:this.track.original_dataset_id,tool_id:aa.name},function(ab){show_modal(aa.name+" is Running",aa.name+" is running on the complete dataset. Tool outputs are in dataset's history.",{Close:hide_modal})})},run_on_region:function(){var aa={dataset_id:this.track.original_dataset_id,chrom:this.track.view.chrom,low:this.track.view.low,high:this.track.view.high,tool_id:this.name},ac=this.track,ab=aa.tool_id+ac.tool_region_and_parameters_str(aa.chrom,aa.low,aa.high),ad;if(ac instanceof e){ad=new Q(ab,view,ac.hda_ldda,undefined,{},{},ac);ad.change_mode(ac.mode)}this.track.add_track(ad);ad.content_div.text("Starting job.");this.run(aa,function(ae){ad.dataset_id=ae.dataset_id;ad.content_div.text("Running job.");ad.init()})},run:function(ab,ac){$.extend(ab,this.get_param_values_dict());var aa=function(){$.getJSON(rerun_tool_url,ab,function(ad){if(ad==="no converter"){new_track.container_div.addClass("error");new_track.content_div.text(G)}else{if(ad.error){new_track.container_div.addClass("error");new_track.content_div.text(v+ad.message)}else{if(ad==="pending"){new_track.container_div.addClass("pending");new_track.content_div.text("Converting input data so that it can be easily reused.");setTimeout(aa,2000)}else{ac(ad)}}}})};aa()}});var K=function(ab,aa,ac,ad){this.name=ab;this.label=aa;this.html=ac;this.value=ad};var g=function(ac,ab,ae,af,ad,aa){K.call(this,ac,ab,ae,af);this.min=ad;this.max=aa};var h=function(ab,aa,ac,ad){this.name=ab;this.index=aa;this.tool_id=ac;this.tool_exp_name=ad};var R=function(ab,aa,ac,ad){h.call(this,ab,aa,ac,ad);this.low=-Number.MAX_VALUE;this.high=Number.MAX_VALUE;this.min=Number.MAX_VALUE;this.max=-Number.MAX_VALUE;this.slider=null;this.slider_label=null};n(R.prototype,{applies_to:function(aa){if(aa.length>this.index){return true}return false},keep:function(aa){if(!this.applies_to(aa)){return true}var ab=parseInt(aa[this.index]);return(isNaN(ab)||(ab>=this.low&&ab<=this.high))},update_attrs:function(ab){var aa=false;if(!this.applies_to(ab)){return aa}if(ab[this.index]<this.min){this.min=Math.floor(ab[this.index]);aa=true}if(ab[this.index]>this.max){this.max=Math.ceil(ab[this.index]);aa=true}return aa},update_ui_elt:function(){var ac=function(af,ad){var ae=ad-af;return(ae<=2?0.01:1)};var ab=this.slider.slider("option","min"),aa=this.slider.slider("option","max");if(this.min<ab||this.max>aa){this.slider.slider("option","min",this.min);this.slider.slider("option","max",this.max);this.slider.slider("option","step",ac(this.min,this.max));this.slider.slider("option","values",[this.min,this.max])}}});var W=function(ac,al){this.track=ac;this.filters=[];for(var ag=0;ag<al.length;ag++){var aa=al[ag],ab=aa.name,ak=aa.type,ai=aa.index,an=aa.tool_id,ad=aa.tool_exp_name;if(ak==="int"||ak==="float"){this.filters[ag]=new R(ab,ai,an,ad)}else{console.log("ERROR: unsupported filter: ",ab,ak)}}var aj=function(ao,ap,aq){ao.click(function(){var ar=ap.text();max=parseFloat(aq.slider("option","max")),input_size=(max<=1?4:max<=1000000?max.toString().length:6),multi_value=false;if(aq.slider("option","values")){input_size=2*input_size+1;multi_value=true}ap.text("");$("<input type='text'/>").attr("size",input_size).attr("maxlength",input_size).attr("value",ar).appendTo(ap).focus().select().click(function(at){at.stopPropagation()}).blur(function(){$(this).remove();ap.text(ar)}).keyup(function(ax){if(ax.keyCode===27){$(this).trigger("blur")}else{if(ax.keyCode===13){var av=aq.slider("option","min"),at=aq.slider("option","max"),aw=function(ay){return(isNaN(ay)||ay>at||ay<av)},au=$(this).val();if(!multi_value){au=parseFloat(au);if(aw(au)){alert("Parameter value must be in the range ["+av+"-"+at+"]");return $(this)}}else{au=au.split("-");au=[parseFloat(au[0]),parseFloat(au[1])];if(aw(au[0])||aw(au[1])){alert("Parameter value must be in the range ["+av+"-"+at+"]");return $(this)}}aq.slider((multi_value?"values":"value"),au)}}})})};this.parent_div=$("<div/>").addClass("filters").hide();this.parent_div.bind("drag",function(ao){ao.stopPropagation()}).bind("click",function(ao){ao.stopPropagation()}).bind("dblclick",function(ao){ao.stopPropagation()}).bind("keydown",function(ao){ao.stopPropagation()});var ae=this;$.each(this.filters,function(av,ap){var ar=$("<div/>").addClass("slider-row").appendTo(ae.parent_div);var ao=$("<div/>").addClass("slider-label").appendTo(ar);var ax=$("<span/>").addClass("slider-name").text(ap.name+" ").appendTo(ao);var aq=$("<span/>");var at=$("<span/>").addClass("slider-value").appendTo(ao).append("[").append(aq).append("]");var aw=$("<div/>").addClass("slider").appendTo(ar);ap.control_element=$("<div/>").attr("id",ap.name+"-filter-control").appendTo(aw);var au=[0,0];ap.control_element.slider({range:true,min:Number.MAX_VALUE,max:-Number.MIN_VALUE,values:[0,0],slide:function(ay,az){au=az.values;aq.text(az.values[0]+"-"+az.values[1]);setTimeout(function(){if(az.values[0]==au[0]&&az.values[1]==au[1]){var aA=az.values;aq.text(aA[0]+"-"+aA[1]);ap.low=aA[0];ap.high=aA[1];ae.track.draw(true,true)}},50)},change:function(ay,az){ap.control_element.slider("option","slide").call(ap.control_element,ay,az)}});ap.slider=ap.control_element;ap.slider_label=aq;aj(at,aq,ap.control_element);$("<div style='clear: both;'/>").appendTo(ar)});if(this.filters.length!=0){var am=$("<div>").addClass("param-row").appendTo(this.parent_div);var ah=$("<input type='submit'>").attr("value","Run on complete dataset").appendTo(am);var af=this;ah.click(function(){af.run_on_dataset()})}};n(W.prototype,{reset_filters:function(){for(var aa=0;aa<this.filters.length;aa++){filter=this.filters[aa];filter.slider.slider("option","values",[filter.min,filter.max])}},run_on_dataset:function(){var ai=function(am,ak,al){if(!(ak in am)){am[ak]=al}return am[ak]};var ac={},aa,ab,ad;for(var ae=0;ae<this.filters.length;ae++){aa=this.filters[ae];if(aa.tool_id){if(aa.min!=aa.low){ab=ai(ac,aa.tool_id,[]);ab[ab.length]=aa.tool_exp_name+" >= "+aa.low}if(aa.max!=aa.high){ab=ai(ac,aa.tool_id,[]);ab[ab.length]=aa.tool_exp_name+" <= "+aa.high}}}var ag=[];for(var aj in ac){ag[ag.length]=[aj,ac[aj]]}var ah=ag.length;(function af(aq,an){var al=an[0],am=al[0],ap=al[1],ao="("+ap.join(") and (")+")",ak={cond:ao,input:aq,target_dataset_id:aq,tool_id:am},an=an.slice(1);$.getJSON(run_tool_url,ak,function(ar){if(ar.error){show_modal("Filter Dataset","Error running tool "+am,{Close:hide_modal})}else{if(an.length===0){show_modal("Filtering Dataset","Filter(s) are running on the complete dataset. Outputs are in dataset's history.",{Close:hide_modal})}else{af(ar.dataset_id,an)}}})})(this.track.dataset_id,ag)}});var V=function(aa){this.track=aa.track;this.params=aa.params;this.values={};if(aa.saved_values){this.restore_values(aa.saved_values)}this.onchange=aa.onchange};n(V.prototype,{restore_values:function(aa){var ab=this;$.each(this.params,function(ac,ad){if(aa[ad.key]!==undefined){ab.values[ad.key]=aa[ad.key]}else{ab.values[ad.key]=ad.default_value}})},build_form:function(){var ab=this;var aa=$("<div />");$.each(this.params,function(af,ad){if(!ad.hidden){var ac="param_"+af;var ak=$("<div class='form-row' />").appendTo(aa);ak.append($("<label />").attr("for",ac).text(ad.label+":"));if(ad.type==="bool"){ak.append($('<input type="checkbox" />').attr("id",ac).attr("name",ac).attr("checked",ab.values[ad.key]))}else{if(ad.type==="color"){var ah=ab.values[ad.key];var ag=$("<input />").attr("id",ac).attr("name",ac).val(ah);var ai=$("<div class='tipsy tipsy-north' style='position: absolute;' />").hide();var ae=$("<div style='background-color: black; padding: 10px;'></div>").appendTo(ai);var aj=$("<div/>").appendTo(ae).farbtastic({width:100,height:100,callback:ag,color:ah});$("<div />").append(ag).append(ai).appendTo(ak).bind("click",function(al){ai.css({left:$(this).position().left+($(ag).width()/2)-60,top:$(this).position().top+$(this.height)}).show();$(document).bind("click.color-picker",function(){ai.hide();$(document).unbind("click.color-picker")});al.stopPropagation()})}else{ak.append($("<input />").attr("id",ac).attr("name",ac).val(ab.values[ad.key]))}}}});return aa},update_from_form:function(aa){var ac=this;var ab=false;$.each(this.params,function(ad,af){if(!af.hidden){var ag="param_"+ad;var ae=aa.find("#"+ag).val();if(af.type==="float"){ae=parseFloat(ae)}else{if(af.type==="int"){ae=parseInt(ae)}else{if(af.type==="bool"){ae=aa.find("#"+ag).is(":checked")}}}if(ae!==ac.values[af.key]){ac.values[af.key]=ae;ab=true}}});if(ab){this.onchange()}}});var b=function(ac,ab,aa){this.index=ac;this.low=ac*M*ab;this.high=(ac+1)*M*ab;this.resolution=ab;this.canvas=$("<div class='track-tile'/>").append(aa);this.stale=false};var l=function(ac,ab,aa,ad){b.call(this,ac,ab,aa);this.max_val=ad};var L=function(ac,ab,aa,ad){b.call(this,ac,ab,aa);this.message=ad};var j=function(ab,aa,ae,ac,ad){this.name=ab;this.view=aa;this.parent_element=ae;this.data_url=(ac?ac:default_data_url);this.data_url_extra_params={};this.data_query_wait=(ad?ad:H);this.dataset_check_url=converted_datasets_state_url;this.container_div=$("<div />").addClass("track").css("position","relative");if(!this.hidden){this.header_div=$("<div class='track-header' />").appendTo(this.container_div);if(this.view.editor){this.drag_div=$("<div class='draghandle' />").appendTo(this.header_div)}this.name_div=$("<div class='menubutton popup' />").appendTo(this.header_div);this.name_div.text(this.name);this.name_div.attr("id",this.name.replace(/\s+/g,"-").replace(/[^a-zA-Z0-9\-]/g,"").toLowerCase())}this.content_div=$("<div class='track-content'>").appendTo(this.container_div);this.parent_element.append(this.container_div)};n(j.prototype,{get_type:function(){if(this instanceof X){return"LabelTrack"}else{if(this instanceof x){return"ReferenceTrack"}else{if(this instanceof k){return"LineTrack"}else{if(this instanceof S){return"ReadTrack"}else{if(this instanceof Q){return"ToolDataFeatureTrack"}else{if(this instanceof P){return"VcfTrack"}else{if(this instanceof e){return"FeatureTrack"}}}}}}}return""},init:function(){var aa=this;aa.enabled=false;aa.tile_cache.clear();aa.data_manager.clear();aa.initial_canvas=undefined;aa.content_div.css("height","auto");aa.container_div.removeClass("nodata error pending");if(!aa.dataset_id){return}$.getJSON(converted_datasets_state_url,{hda_ldda:aa.hda_ldda,dataset_id:aa.dataset_id,chrom:aa.view.chrom},function(ab){if(!ab||ab==="error"||ab.kind==="error"){aa.container_div.addClass("error");aa.content_div.text(m);if(ab.message){var ad=aa.view.tracks.indexOf(aa);var ac=$(" <a href='javascript:void(0);'></a>").text("View error").bind("click",function(){show_modal("Trackster Error","<pre>"+ab.message+"</pre>",{Close:hide_modal})});aa.content_div.append(ac)}}else{if(ab==="no converter"){aa.container_div.addClass("error");aa.content_div.text(G)}else{if(ab==="no data"||(ab.data!==undefined&&(ab.data===null||ab.data.length===0))){aa.container_div.addClass("nodata");aa.content_div.text(B)}else{if(ab==="pending"){aa.container_div.addClass("pending");aa.content_div.text(q);setTimeout(function(){aa.init()},aa.data_query_wait)}else{if(ab.status==="data"){if(ab.valid_chroms){aa.valid_chroms=ab.valid_chroms;aa.make_name_popup_menu()}aa.content_div.text(U);if(aa.view.chrom){aa.content_div.text("");aa.content_div.css("height",aa.height_px+"px");aa.enabled=true;$.when(aa.predraw_init()).done(function(){aa.container_div.removeClass("nodata error pending");aa.draw()})}}}}}}})},predraw_init:function(){},update_name:function(aa){this.old_name=this.name;this.name=aa;this.name_div.text(this.name)},revert_name:function(){this.name=this.old_name;this.name_div.text(this.name)}});var J=function(ah,af,ai){var ab=this,aj=ab.view;this.filters_manager=(ah!==undefined?new W(this,ah):undefined);this.filters_available=false;this.filters_visible=false;this.tool=(af!==undefined&&obj_length(af)>0?new o(this,af):undefined);this.parent_track=ai;this.child_tracks=[];if(ab.hidden){return}if(this.parent_track){this.header_div.find(".draghandle").removeClass("draghandle").addClass("child-track-icon").addClass("icon-button");this.parent_element.addClass("child-track");this.tool=undefined}ab.child_tracks_container=$("<div/>").addClass("child-tracks-container").hide();ab.container_div.append(ab.child_tracks_container);if(this.filters_manager){this.filters_div=this.filters_manager.parent_div;this.header_div.after(this.filters_div)}if(this.tool){this.dynamic_tool_div=this.tool.parent_div;this.header_div.after(this.dynamic_tool_div)}if(ab.display_modes!==undefined){if(ab.mode_div===undefined){ab.mode_div=$("<div class='right-float menubutton popup' />").appendTo(ab.header_div);var ac=(ab.track_config&&ab.track_config.values.mode?ab.track_config.values.mode:ab.display_modes[0]);ab.mode=ac;ab.mode_div.text(ac);var aa={};for(var ad=0,ag=ab.display_modes.length;ad<ag;ad++){var ae=ab.display_modes[ad];aa[ae]=function(ak){return function(){ab.change_mode(ak)}}(ae)}make_popupmenu(ab.mode_div,aa)}else{ab.mode_div.hide()}}this.make_name_popup_menu()};n(J.prototype,j.prototype,{change_mode:function(ab){var aa=this;aa.mode_div.text(ab);aa.mode=ab;aa.track_config.values.mode=ab;aa.tile_cache.clear();aa.draw()},make_name_popup_menu:function(){var ab=this;var aa={};aa["Edit configuration"]=function(){var ah=function(){hide_modal();$(window).unbind("keypress.check_enter_esc")},af=function(){ab.track_config.update_from_form($(".dialog-box"));hide_modal();$(window).unbind("keypress.check_enter_esc")},ag=function(ai){if((ai.keyCode||ai.which)===27){ah()}else{if((ai.keyCode||ai.which)===13){af()}}};$(window).bind("keypress.check_enter_esc",ag);show_modal("Configure Track",ab.track_config.build_form(),{Cancel:ah,OK:af})};if(ab.filters_available>0){var ae=(ab.filters_div.is(":visible")?"Hide filters":"Show filters");aa[ae]=function(){ab.filters_visible=(ab.filters_div.is(":visible"));if(ab.filters_visible){ab.filters_manager.reset_filters()}ab.filters_div.toggle();ab.make_name_popup_menu()}}if(ab.tool){var ae=(ab.dynamic_tool_div.is(":visible")?"Hide tool":"Show tool");aa[ae]=function(){if(!ab.dynamic_tool_div.is(":visible")){ab.update_name(ab.name+ab.tool_region_and_parameters_str())}else{menu_option_text="Show dynamic tool";ab.revert_name()}ab.dynamic_tool_div.toggle();ab.make_name_popup_menu()}}if(ab.valid_chroms){aa["List chrom/contigs with data"]=function(){show_modal("Chrom/contigs with data","<p>"+ab.valid_chroms.join("<br/>")+"</p>",{Close:function(){hide_modal()}})}}var ac=view;var ad=function(){$("#no-tracks").show()};if(this.parent_track){ac=this.parent_track;ad=function(){}}aa.Remove=function(){ac.remove_track(ab);if(ac.num_tracks===0){ad()}};make_popupmenu(ab.name_div,aa)},draw:function(aa,ac){if(!this.dataset_id){return}var au=this.view.low,ag=this.view.high,ai=ag-au,ak=this.view.container.width(),ae=ak/ai,al=this.view.resolution,ad=$("<div style='position: relative;'></div>"),am=function(aw,ax,av){return aw+"_"+ax+"_"+av};if(!ac){this.content_div.children().remove()}this.content_div.append(ad);this.max_height=0;var ao=Math.floor(au/al/M);var af=[];var ap=0;while((ao*M*al)<ag){var at=am(ak,ae,ao);var ah=this.tile_cache.get(at);var aq=ao*M*this.view.resolution;var ab=aq+M*this.view.resolution;if(!aa&&ah){af[af.length]=ah;this.show_tile(ah,ad,ae)}else{this.delayed_draw(aa,at,ao,al,ad,ae,af)}ao+=1;ap++}var aj=this;var ar=setInterval(function(){if(af.length===ap){clearInterval(ar);if(ac){var aA=aj.content_div.children();var aB=false;for(var az=aA.length-1,aF=0;az>=aF;az--){var ay=$(aA[az]);if(aB){ay.remove()}else{if(ay.children().length!==0){aB=true}}}}if(aj instanceof e&&aj.mode=="Histogram"){var aE=-1;for(var az=0;az<af.length;az++){var aH=af[az].max_val;if(aH>aE){aE=aH}}for(var az=0;az<af.length;az++){if(af[az].max_val!==aE){var aG=af[az];aG.canvas.remove();aj.delayed_draw(true,am(ak,ae,aG.index),aG.index,aG.resolution,ad,ae,[],{max:aE})}}}if(aj.filters_manager){var ax=aj.filters_manager.filters;for(var aD=0;aD<ax.length;aD++){ax[aD].update_ui_elt()}var aC=false;if(aj.example_feature){for(var aD=0;aD<ax.length;aD++){if(ax[aD].applies_to(aj.example_feature)){aC=true;break}}}if(aj.filters_available!==aC){aj.filters_available=aC;if(!aj.filters_available){aj.filters_div.hide()}aj.make_name_popup_menu()}}var av=false;for(var aw=0;aw<af.length;aw++){if(af[aw].message){av=true;break}}if(av){for(var aw=0;aw<af.length;aw++){aG=af[aw];if(!aG.message){aG.canvas.css("padding-top",A)}}}}},50);for(var an=0;an<this.child_tracks.length;an++){this.child_tracks[an].draw(aa,ac)}},delayed_draw:function(ab,ai,ac,ae,aj,am,ak,af){var ad=this,ag=ac*M*ae,al=ag+M*ae;var ah=function(av,an,ap,ao,at,au,aq){var ar=ad.draw_tile(an,ap,ao,au,aq);ad.tile_cache.set(ai,ar);if(ar===undefined){return}ad.show_tile(ar,at,au);ak[ak.length]=ar};var aa=setTimeout(function(){if(ag<=ad.view.high&&al>=ad.view.low){var an=(ab?undefined:ad.tile_cache.get(ai));if(an){ad.show_tile(an,aj,am);ak[ak.length]=an}else{$.when(ad.data_manager.get_data(ag,al,ae,ad.data_url_extra_params)).then(function(ao){n(ao,af);if(view.reference_track&&am>view.canvas_manager.char_width_px){$.when(view.reference_track.data_manager.get_data(ag,al,ae,view.reference_track.data_url_extra_params)).then(function(ap){ah(aa,ao,ae,ac,aj,am,ap)})}else{ah(aa,ao,ae,ac,aj,am)}})}}},50)},show_tile:function(ah,aj,ak){var ac=this,ab=ah.canvas,ag=ab;if(ah.message){var al=$("<div/>"),ai=$("<div/>").addClass("tile-message").text(ah.message).css({height:A-1,width:ah.canvas.width}).appendTo(al),ae=$("<a href='javascript:void(0);'/>").addClass("icon more-down").appendTo(ai),aa=$("<a href='javascript:void(0);'/>").addClass("icon more-across").appendTo(ai);al.append(ab);ag=al;ae.click(function(){ah.stale=true;ac.data_manager.get_more_data(ah.low,ah.high,ah.resolution,{},ac.data_manager.DEEP_DATA_REQ);ac.draw()}).dblclick(function(am){am.stopPropagation()});aa.click(function(){ah.stale=true;ac.data_manager.get_more_data(ah.low,ah.high,ah.resolution,{},ac.data_manager.BROAD_DATA_REQ);ac.draw()}).dblclick(function(am){am.stopPropagation()})}var af=this.view.high-this.view.low,ad=(ah.low-this.view.low)*ak;if(this.left_offset){ad-=this.left_offset}ag.css({position:"absolute",top:0,left:ad,height:""});aj.append(ag);ac.max_height=Math.max(ac.max_height,ag.height());ac.content_div.css("height",ac.max_height+"px");aj.children().css("height",ac.max_height+"px")},set_overview:function(){var aa=this.view;if(this.initial_canvas&&this.is_overview){aa.overview_close.show();aa.overview_viewport.append(this.initial_canvas);aa.overview_highlight.show().height(this.initial_canvas.height());aa.overview_viewport.height(this.initial_canvas.height()+aa.overview_box.height())}$(window).trigger("resize")},tool_region_and_parameters_str:function(ac,aa,ad){var ab=this,ae=(ac!==undefined&&aa!==undefined&&ad!==undefined?ac+":"+aa+"-"+ad:"all");return" - region=["+ae+"], parameters=["+ab.tool.get_param_values().join(", ")+"]"},add_track:function(aa){aa.track_id=this.track_id+"_"+this.child_tracks.length;aa.container_div.attr("id","track_"+aa.track_id);this.child_tracks_container.append(aa.container_div);C(aa.container_div,".child-track-icon");if(!$(this.child_tracks_container).is(":visible")){this.child_tracks_container.show()}this.child_tracks.push(aa);this.view.has_changes=true},remove_track:function(aa){aa.container_div.fadeOut("slow",function(){$(this).remove()})}});var X=function(aa,ab){this.hidden=true;j.call(this,null,aa,ab);this.container_div.addClass("label-track")};n(X.prototype,j.prototype,{draw:function(){var ac=this.view,ad=ac.high-ac.low,ag=Math.floor(Math.pow(10,Math.floor(Math.log(ad)/Math.log(10)))),aa=Math.floor(ac.low/ag)*ag,ae=this.view.container.width(),ab=$("<div style='position: relative; height: 1.3em;'></div>");while(aa<ac.high){var af=(aa-ac.low)/ad*ae;ab.append($("<div class='label'>"+commatize(aa)+"</div>").css({position:"absolute",left:af-1}));aa+=ag}this.content_div.children(":first").remove();this.content_div.append(ab)}});var x=function(aa){this.hidden=true;j.call(this,null,aa,aa.top_labeltrack);J.call(this);aa.reference_track=this;this.left_offset=200;this.height_px=12;this.container_div.addClass("reference-track");this.content_div.css("background","none");this.content_div.css("min-height","0px");this.content_div.css("border","none");this.data_url=reference_url;this.data_url_extra_params={dbkey:aa.dbkey};this.data_manager=new E(y,this,false);this.tile_cache=new c(r)};n(x.prototype,J.prototype,{draw_tile:function(ai,af,ab,ak){var ae=this,ac=M*af;if(ak>this.view.canvas_manager.char_width_px){if(ai===null){ae.content_div.css("height","0px");return}var ad=this.view.canvas_manager.new_canvas();var aj=ad.getContext("2d");ad.width=Math.ceil(ac*ak+ae.left_offset);ad.height=ae.height_px;aj.font=aj.canvas.manager.default_font;aj.textAlign="center";for(var ag=0,ah=ai.length;ag<ah;ag++){var aa=Math.round(ag*ak);aj.fillText(ai[ag],aa+ae.left_offset,10)}return new b(ab,af,ad)}this.content_div.css("height","0px")}});var k=function(ae,ac,af,aa,ad){var ab=this;this.display_modes=["Histogram","Line","Filled","Intensity"];this.mode="Histogram";j.call(this,ae,ac,ac.viewport_container);J.call(this);this.min_height_px=16;this.max_height_px=400;this.height_px=80;this.hda_ldda=af;this.dataset_id=aa;this.original_dataset_id=aa;this.data_manager=new N(y,this);this.tile_cache=new c(r);this.track_config=new V({track:this,params:[{key:"color",label:"Color",type:"color",default_value:"black"},{key:"min_value",label:"Min Value",type:"float",default_value:undefined},{key:"max_value",label:"Max Value",type:"float",default_value:undefined},{key:"mode",type:"string",default_value:this.mode,hidden:true},{key:"height",type:"int",default_value:this.height_px,hidden:true}],saved_values:ad,onchange:function(){ab.vertical_range=ab.prefs.max_value-ab.prefs.min_value;$("#linetrack_"+ab.track_id+"_minval").text(ab.prefs.min_value);$("#linetrack_"+ab.track_id+"_maxval").text(ab.prefs.max_value);ab.tile_cache.clear();ab.draw()}});this.prefs=this.track_config.values;this.height_px=this.track_config.values.height;this.vertical_range=this.track_config.values.max_value-this.track_config.values.min_value;this.add_resize_handle()};n(k.prototype,J.prototype,{add_resize_handle:function(){var aa=this;var ad=false;var ac=false;var ab=$("<div class='track-resize'>");$(aa.container_div).hover(function(){ad=true;ab.show()},function(){ad=false;if(!ac){ab.hide()}});ab.hide().bind("dragstart",function(ae,af){ac=true;af.original_height=$(aa.content_div).height()}).bind("drag",function(af,ag){var ae=Math.min(Math.max(ag.original_height+ag.deltaY,aa.min_height_px),aa.max_height_px);$(aa.content_div).css("height",ae);aa.height_px=ae;aa.draw(true)}).bind("dragend",function(ae,af){aa.tile_cache.clear();ac=false;if(!ad){ab.hide()}aa.track_config.values.height=aa.height_px}).appendTo(aa.container_div)},predraw_init:function(){var aa=this,ab=aa.view.tracks.indexOf(aa);aa.vertical_range=undefined;return $.getJSON(aa.data_url,{stats:true,chrom:aa.view.chrom,low:null,high:null,hda_ldda:aa.hda_ldda,dataset_id:aa.dataset_id},function(ac){aa.container_div.addClass("line-track");var ae=ac.data;if(isNaN(parseFloat(aa.prefs.min_value))||isNaN(parseFloat(aa.prefs.max_value))){aa.prefs.min_value=ae.min;aa.prefs.max_value=ae.max;$("#track_"+ab+"_minval").val(aa.prefs.min_value);$("#track_"+ab+"_maxval").val(aa.prefs.max_value)}aa.vertical_range=aa.prefs.max_value-aa.prefs.min_value;aa.total_frequency=ae.total_frequency;aa.container_div.find(".yaxislabel").remove();var af=$("<div />").addClass("yaxislabel").attr("id","linetrack_"+ab+"_minval").text(u(aa.prefs.min_value));var ad=$("<div />").addClass("yaxislabel").attr("id","linetrack_"+ab+"_maxval").text(u(aa.prefs.max_value));ad.css({position:"absolute",top:"24px",left:"10px"});ad.prependTo(aa.container_div);af.css({position:"absolute",bottom:"2px",left:"10px"});af.prependTo(aa.container_div)})},draw_tile:function(ak,ae,ab,aj){if(this.vertical_range===undefined){return}var af=ab*M*ae,ad=M*ae,aa=Math.ceil(ad*aj),ah=this.height_px;var ac=this.view.canvas_manager.new_canvas();ac.width=aa,ac.height=ah;var ai=ac.getContext("2d");var ag=new I.LinePainter(ak.data,af,af+ad,this.prefs,this.mode);ag.draw(ai,aa,ah);return new b(ab,ae,ac)}});var e=function(aa,af,ae,ai,ah,ac,ad,ag){var ab=this;this.display_modes=["Auto","Histogram","Dense","Squish","Pack"];this.track_config=new V({track:this,params:[{key:"block_color",label:"Block color",type:"color",default_value:"#444"},{key:"label_color",label:"Label color",type:"color",default_value:"black"},{key:"show_counts",label:"Show summary counts",type:"bool",default_value:true},{key:"mode",type:"string",default_value:this.mode,hidden:true},],saved_values:ah,onchange:function(){ab.tile_cache.clear();ab.draw()}});this.prefs=this.track_config.values;j.call(this,aa,af,af.viewport_container);J.call(this,ac,ad,ag);this.height_px=0;this.container_div.addClass("feature-track");this.hda_ldda=ae;this.dataset_id=ai;this.original_dataset_id=ai;this.show_labels_scale=0.001;this.showing_details=false;this.summary_draw_height=30;this.inc_slots={};this.start_end_dct={};this.tile_cache=new c(d);this.data_manager=new N(20,this);this.left_offset=200;this.painter=I.LinkedFeaturePainter};n(e.prototype,J.prototype,{update_auto_mode:function(aa){if(this.mode=="Auto"){if(aa=="no_detail"){aa="feature spans"}else{if(aa=="summary_tree"){aa="coverage histogram"}}this.mode_div.text("Auto ("+aa+")")}},incremental_slots:function(ae,ab,ad){var ac=this.view.canvas_manager.dummy_context,aa=this.inc_slots[ae];if(!aa||(aa.mode!==ad)){aa=new (p.FeatureSlotter)(ae,ad==="Pack",w,function(af){return ac.measureText(af)});aa.mode=ad;this.inc_slots[ae]=aa}return aa.slot_features(ab)},get_summary_tree_data:function(ae,ah,ac,ap){if(ap>ac-ah){ap=ac-ah}var al=Math.floor((ac-ah)/ap),ao=[],ad=0;var af=0,ag=0,ak,an=0,ai=[],am,aj;var ab=function(at,ar,au,aq){at[0]=ar+au*aq;at[1]=ar+(au+1)*aq};while(an<ap&&af!==ae.length){var aa=false;for(;an<ap&&!aa;an++){ab(ai,ah,an,al);for(ag=af;ag<ae.length;ag++){ak=ae[ag].slice(1,3);if(is_overlap(ak,ai)){aa=true;break}}if(aa){break}}data_start_index=ag;ao[ao.length]=am=[ai[0],0];for(;ag<ae.length;ag++){ak=ae[ag].slice(1,3);if(is_overlap(ak,ai)){am[1]++}else{break}}if(am[1]>ad){ad=am[1]}an++}return{max:ad,delta:al,data:ao}},draw_tile:function(an,av,az,aj,ad){var ar=this,aB=az*M*av,ab=(az+1)*M*av,ap=ab-aB,at=Math.ceil(ap*aj),aq=this.mode,aF=25,ae=this.left_offset,ao,af;if(aq==="Auto"){if(an.dataset_type==="summary_tree"){aq=an.dataset_type}else{if(an.extra_info==="no_detail"){aq="no_detail"}else{var aE=an.data;if(this.view.high-this.view.low>F){aq="Squish"}else{aq="Pack"}}}this.update_auto_mode(aq)}if(aq==="summary_tree"||aq==="Histogram"){af=this.summary_draw_height;this.container_div.find(".yaxislabel").remove();var aa=$("<div />").addClass("yaxislabel");aa.text(an.max);aa.css({position:"absolute",top:"24px",left:"10px",color:this.prefs.label_color});aa.prependTo(this.container_div);var ac=this.view.canvas_manager.new_canvas();ac.width=at+ae;ac.height=af+O;if(an.dataset_type!="summary_tree"){var ak=this.get_summary_tree_data(an.data,aB,ab,200);if(an.max){ak.max=an.max}an=ak}var aC=new I.SummaryTreePainter(an,aB,ab,this.prefs);var au=ac.getContext("2d");au.translate(ae,O);aC.draw(au,at,af);return new l(az,av,ac,an.max)}var ao,ah=1;if(aq==="no_detail"||aq==="Squish"||aq==="Pack"){ah=this.incremental_slots(aj,an.data,aq);ao=this.inc_slots[aj].slots}var ai=[];if(an.data){var al=this.filters_manager.filters;for(var aw=0,ay=an.data.length;aw<ay;aw++){var ag=an.data[aw];var ax=false;var am;for(var aA=0,aD=al.length;aA<aD;aA++){am=al[aA];am.update_attrs(ag);if(!am.keep(ag)){ax=true;break}}if(!ax){ai.push(ag)}}}var aC=new (this.painter)(ai,aB,ab,this.prefs,aq,ad);var af=aC.get_required_height(ah);var ac=this.view.canvas_manager.new_canvas();ac.width=at+ae;ac.height=af;var au=ac.getContext("2d");au.fillStyle=this.prefs.block_color;au.font=au.canvas.manager.default_font;au.textAlign="right";this.container_div.find(".yaxislabel").remove();if(an.data){this.example_feature=(an.data.length?an.data[0]:undefined);au.translate(ae,0);aC.draw(au,at,af,ao)}return new L(az,av,ac,an.message)}});var P=function(ad,ab,af,aa,ac,ae){e.call(this,ad,ab,af,aa,ac,ae);this.painter=I.VariantPainter};n(P.prototype,J.prototype,e.prototype);var S=function(ad,ab,af,aa,ac,ae){e.call(this,ad,ab,af,aa,ac,ae);this.track_config=new V({track:this,params:[{key:"block_color",label:"Block color",type:"color",default_value:"#444"},{key:"label_color",label:"Label color",type:"color",default_value:"black"},{key:"show_insertions",label:"Show insertions",type:"bool",default_value:false},{key:"show_differences",label:"Show differences only",type:"bool",default_value:true},{key:"show_counts",label:"Show summary counts",type:"bool",default_value:true},{key:"mode",type:"string",default_value:this.mode,hidden:true},],saved_values:ac,onchange:function(){this.track.tile_cache.clear();this.track.draw()}});this.prefs=this.track_config.values;this.painter=I.ReadPainter;this.make_name_popup_menu()};n(S.prototype,J.prototype,e.prototype);var Q=function(ae,ac,ag,aa,ad,af,ab){e.call(this,ae,ac,ag,aa,ad,af,{},ab);this.data_url=raw_data_url;this.data_query_wait=1000;this.dataset_check_url=dataset_state_url};n(Q.prototype,J.prototype,e.prototype,{predraw_init:function(){var ab=this;var aa=function(){if(ab.data_manager.size()===0){setTimeout(aa,300)}else{ab.data_url=default_data_url;ab.data_query_wait=H;ab.dataset_state_url=converted_datasets_state_url;$.getJSON(ab.dataset_state_url,{dataset_id:ab.dataset_id,hda_ldda:ab.hda_ldda},function(ac){})}};aa()}});T.View=Y;T.LineTrack=k;T.FeatureTrack=e;T.ReadTrack=S};var slotting_module=function(c,b){var e=c("class").extend;var d=2,a=5;b.FeatureSlotter=function(j,h,f,g){this.slots={};this.start_end_dct={};this.w_scale=j;this.include_label=h;this.max_rows=f;this.measureText=g};e(b.FeatureSlotter.prototype,{slot_features:function(m){var p=this.w_scale,s=this.slots,h=this.start_end_dct,y=[],A=[],n=0,z=this.max_rows;for(var w=0,x=m.length;w<x;w++){var l=m[w],o=l[0];if(s[o]!==undefined){n=Math.max(n,s[o]);A.push(s[o])}else{y.push(w)}}var q=function(G,H){for(var F=0;F<=z;F++){var D=false,I=h[F];if(I!==undefined){for(var C=0,E=I.length;C<E;C++){var B=I[C];if(H>B[0]&&G<B[1]){D=true;break}}}if(!D){return F}}return -1};for(var w=0,x=y.length;w<x;w++){var l=m[y[w]],o=l[0],u=l[1],f=l[2],r=l[3],g=Math.floor(u*p),k=Math.ceil(f*p),v=this.measureText(r).width,j;if(r!==undefined&&this.include_label){v+=(d+a);if(g-v>=0){g-=v;j="left"}else{k+=v;j="right"}}var t=q(g,k);if(t>=0){if(h[t]===undefined){h[t]=[]}h[t].push([g,k]);s[o]=t;n=Math.max(n,t)}else{}}return n+1}})};var painters_module=function(j,w){var t=j("class").extend;var o=function(H,z,F,y,E,C){if(C===undefined){C=4}var B=y-z;var A=E-F;var D=Math.floor(Math.sqrt(B*B+A*A)/C);var I=B/D;var G=A/D;var x;for(x=0;x<D;x++,z+=I,F+=G){if(x%2!==0){continue}H.fillRect(z,F,C,1)}};var p=function(A,z,x,D){var C=z-D/2,B=z+D/2,E=x-Math.sqrt(D*3/2);A.beginPath();A.moveTo(C,E);A.lineTo(B,E);A.lineTo(z,x);A.lineTo(C,E);A.strokeStyle=this.fillStyle;A.fill();A.stroke();A.closePath()};var m=function(z,B,x,y,A){this.data=z;this.view_start=B;this.view_end=x;this.prefs=t({},this.default_prefs,y);this.mode=A};m.prototype.default_prefs={};var u=function(z,B,x,y,A){m.call(this,z,B,x,y,A)};u.prototype.default_prefs={show_counts:false};u.prototype.draw=function(M,z,L){var E=this.view_start,O=this.view_end-this.view_start,N=z/O;var J=this.data.data,I=this.data.delta,G=this.data.max,B=L;delta_x_px=Math.ceil(I*N);M.save();for(var C=0,D=J.length;C<D;C++){var H=Math.floor((J[C][0]-E)*N);var F=J[C][1];if(!F){continue}var K=F/G*L;if(F!==0&&K<1){K=1}M.fillStyle=this.prefs.block_color;M.fillRect(H,B-K,delta_x_px,K);var A=4;if(this.prefs.show_counts&&(M.measureText(F).width+A)<delta_x_px){M.fillStyle=this.prefs.label_color;M.textAlign="center";M.fillText(F,H+(delta_x_px/2),10)}}M.restore()};var c=function(x,B,D,E,z){m.call(this,x,B,D,E,z);if(this.prefs.min_value===undefined){var F=Infinity;for(var y=0,A=this.data.length;y<A;y++){F=Math.min(F,this.data[y][1])}this.prefs.min_value=F}if(this.prefs.max_value===undefined){var C=-Infinity;for(var y=0,A=this.data.length;y<A;y++){C=Math.max(C,this.data[y][1])}this.prefs.max_value=C}};c.prototype.default_prefs={min_value:undefined,max_value:undefined,mode:"Histogram",color:"#000",overflow_color:"#F66"};c.prototype.draw=function(M,L,J){var E=false,G=this.prefs.min_value,C=this.prefs.max_value,I=C-G,x=J,z=this.view_start,K=this.view_end-this.view_start,A=L/K,H=this.mode,S=this.data;M.save();var T=Math.round(J+G/I*J);if(H!=="Intensity"){M.fillStyle="#aaa";M.fillRect(0,T,L,1)}M.beginPath();var Q,D,B;if(S.length>1){B=Math.ceil((S[1][0]-S[0][0])*A)}else{B=10}for(var N=0,O=S.length;N<O;N++){M.fillStyle=this.prefs.color;Q=Math.round((S[N][0]-z)*A);D=S[N][1];var P=false,F=false;if(D===null){if(E&&H==="Filled"){M.lineTo(Q,x)}E=false;continue}if(D<G){F=true;D=G}else{if(D>C){P=true;D=C}}if(H==="Histogram"){D=Math.round(D/I*x);M.fillRect(Q,T,B,-D)}else{if(H==="Intensity"){D=255-Math.floor((D-G)/I*255);M.fillStyle="rgb("+D+","+D+","+D+")";M.fillRect(Q,0,B,x)}else{D=Math.round(x-(D-G)/I*x);if(E){M.lineTo(Q,D)}else{E=true;if(H==="Filled"){M.moveTo(Q,x);M.lineTo(Q,D)}else{M.moveTo(Q,D)}}}}M.fillStyle=this.prefs.overflow_color;if(P||F){var R;if(H==="Histogram"||H==="Intensity"){R=B}else{Q-=2;R=4}if(P){M.fillRect(Q,0,R,3)}if(F){M.fillRect(Q,x-3,R,3)}}M.fillStyle=this.prefs.color}if(H==="Filled"){if(E){M.lineTo(Q,T);M.lineTo(0,T)}M.fill()}else{M.stroke()}M.restore()};var n=function(z,B,x,y,A){m.call(this,z,B,x,y,A)};n.prototype.default_prefs={block_color:"#FFF",connector_color:"#FFF"};t(n.prototype,{get_required_height:function(y){var x=y_scale=this.get_row_height(),z=this.mode;if(z==="no_detail"||z==="Squish"||z==="Pack"){x=y*y_scale}return x+Math.max(Math.round(y_scale/2),5)},draw:function(J,A,I,F){var D=this.data,G=this.view_start,K=this.view_end;J.save();J.fillStyle=this.prefs.block_color;J.textAlign="right";var N=this.view_end-this.view_start,M=A/N,z=this.get_row_height();for(var C=0,E=D.length;C<E;C++){var L=D[C],B=L[0],x=L[1],y=L[2],H=(F&&F[B]!==undefined?F[B]:null);if((x<K&&y>G)&&(this.mode=="Dense"||H!==null)){this.draw_element(J,this.mode,L,H,G,K,M,z,A)}}J.restore()},draw_element:function(D,z,F,B,A,C,E,y,x){}});var d=10,h=3,l=5,v=10,f=1,r=3,e=3,a=9,k=2,g="#ccc";var q=function(z,B,x,y,A){n.call(this,z,B,x,y,A)};t(q.prototype,n.prototype,{get_row_height:function(){var y=this.mode,x;if(y==="Dense"){x=d}else{if(y==="no_detail"){x=h}else{if(y==="Squish"){x=l}else{x=v}}}return x},draw_element:function(J,C,R,E,L,ab,af,ag,x){var O=R[0],ad=R[1],V=R[2],M=R[3],W=Math.floor(Math.max(0,(ad-L)*af)),K=Math.ceil(Math.min(x,Math.max(0,(V-L)*af))),U=(C==="Dense"?0:(0+E))*ag,I,Z,N=null,ah=null,A=this.prefs.block_color,Y=this.prefs.label_color;if(C=="Dense"){E=1}if(C==="no_detail"){J.fillStyle=A;J.fillRect(W,U+5,K-W,f)}else{var H=R[4],T=R[5],X=R[6],B=R[7];if(T&&X){N=Math.floor(Math.max(0,(T-L)*af));ah=Math.ceil(Math.min(x,Math.max(0,(X-L)*af)))}var ae,P;if(C==="Squish"||C==="Dense"){ae=1;P=e}else{ae=5;P=a}if(!B){if(R.strand){if(R.strand==="+"){J.fillStyle=J.canvas.manager.get_pattern("right_strand_inv")}else{if(R.strand==="-"){J.fillStyle=J.canvas.manager.get_pattern("left_strand_inv")}}}else{J.fillStyle=A}J.fillRect(W,U,K-W,P)}else{var G,Q;if(C==="Squish"||C==="Dense"){J.fillStyle=g;G=U+Math.floor(e/2)+1;Q=1}else{if(H){var G=U;var Q=P;if(H==="+"){J.fillStyle=J.canvas.manager.get_pattern("right_strand")}else{if(H==="-"){J.fillStyle=J.canvas.manager.get_pattern("left_strand")}}}else{J.fillStyle=g;G+=(e/2)+1;Q=1}}J.fillRect(W,G,K-W,Q);for(var ac=0,z=B.length;ac<z;ac++){var D=B[ac],y=Math.floor(Math.max(0,(D[0]-L)*af)),S=Math.ceil(Math.min(x,Math.max((D[1]-L)*af)));if(y>S){continue}J.fillStyle=A;J.fillRect(y,U+(P-ae)/2+1,S-y,ae);if(N!==undefined&&X>T&&!(y>ah||S<N)){var aa=Math.max(y,N),F=Math.min(S,ah);J.fillRect(aa,U+1,F-aa,P);if(B.length==1&&C=="Pack"){if(H==="+"){J.fillStyle=J.canvas.manager.get_pattern("right_strand_inv")}else{if(H==="-"){J.fillStyle=J.canvas.manager.get_pattern("left_strand_inv")}}if(aa+14<F){aa+=2;F-=2}J.fillRect(aa,U+1,F-aa,P)}}}}if(C==="Pack"&&ad>L){J.fillStyle=Y;if(L===0&&W-J.measureText(M).width<0){J.textAlign="left";J.fillText(M,K+k,U+8)}else{J.textAlign="right";J.fillText(M,W-k,U+8)}J.fillStyle=A}}}});var b=function(z,B,x,y,A){n.call(this,z,B,x,y,A)};t(b.prototype,n.prototype,{draw_element:function(Q,L,F,B,T,z,I,R,O){var F=data[i],H=F[0],P=F[1],A=F[2],K=F[3],D=Math.floor(Math.max(0,(P-T)*I)),G=Math.ceil(Math.min(O,Math.max(0,(A-T)*I))),C=(L==="Dense"?0:(0+B))*R,x,U,y=null,J=null;if(no_label){Q.fillStyle=block_color;Q.fillRect(D+left_offset,C+5,G-D,1)}else{var S=F[4],N=F[5],E=F[6];x=9;U=1;Q.fillRect(D+left_offset,C,G-D,x);if(L!=="Dense"&&K!==undefined&&P>T){Q.fillStyle=label_color;if(T===0&&D-Q.measureText(K).width<0){Q.textAlign="left";Q.fillText(K,G+2+left_offset,C+8)}else{Q.textAlign="right";Q.fillText(K,D-2+left_offset,C+8)}Q.fillStyle=block_color}var M=S+" / "+N;if(P>T&&Q.measureText(M).width<(G-D)){Q.fillStyle="white";Q.textAlign="center";Q.fillText(M,left_offset+D+(G-D)/2,C+8);Q.fillStyle=block_color}}}});var s=function(A,C,x,z,B,y){n.call(this,A,C,x,z,B);this.ref_seq=y};s.prototype.default_prefs=t({},n.prototype.default_prefs,{show_insertions:false});t(s.prototype,n.prototype,{get_row_height:function(){var x,y=this.mode;if(y==="Dense"){x=d}else{if(y==="Squish"){x=l}else{x=v;if(this.prefs.show_insertions){x*=2}}}return x},draw_read:function(T,O,K,Y,z,S,H,E,D){T.textAlign="center";var R=this,y=[Y,z],N=0,U=0,Q=0;ref_seq=this.ref_seq,char_width_px=T.canvas.manager.char_width_px;var ad=[];if((O==="Pack"||this.mode==="Auto")&&E!==undefined&&K>char_width_px){Q=Math.round(K/2)}if(!H){H=[[0,E.length]]}for(var L=0,W=H.length;L<W;L++){var I=H[L],A="MIDNSHP=X"[I[0]],M=I[1];if(A==="H"||A==="S"){N-=M}var F=S+N,ac=Math.floor(Math.max(0,(F-Y)*K)),G=Math.floor(Math.max(0,(F+M-Y)*K));if(ac===G){G+=1}switch(A){case"H":break;case"S":case"M":case"=":if(is_overlap([F,F+M],y)){var P=E.slice(U,U+M);if(Q>0){T.fillStyle=this.prefs.block_color;T.fillRect(ac-Q,D+1,G-ac,9);T.fillStyle=g;for(var aa=0,x=P.length;aa<x;aa++){if(this.prefs.show_differences&&ref_seq){var J=ref_seq[F-Y+aa];if(!J||J.toLowerCase()===P[aa].toLowerCase()){continue}}if(F+aa>=Y&&F+aa<=z){var ab=Math.floor(Math.max(0,(F+aa-Y)*K));T.fillText(P[aa],ab,D+9)}}}else{T.fillStyle=this.prefs.block_color;T.fillRect(ac,D+4,G-ac,e)}}U+=M;N+=M;break;case"N":T.fillStyle=g;T.fillRect(ac-Q,D+5,G-ac,1);N+=M;break;case"D":T.fillStyle="red";T.fillRect(ac-Q,D+4,G-ac,3);N+=M;break;case"P":break;case"I":var X=ac-Q;if(is_overlap([F,F+M],y)){var P=E.slice(U,U+M);if(this.prefs.show_insertions){var C=ac-(G-ac)/2;if((O==="Pack"||this.mode==="Auto")&&E!==undefined&&K>char_width_px){T.fillStyle="yellow";T.fillRect(C-Q,D-9,G-ac,9);ad[ad.length]={type:"triangle",data:[X,D+4,5]};T.fillStyle=g;switch(seq_tile_overlap){case (OVERLAP_START):P=P.slice(Y-F);break;case (OVERLAP_END):P=P.slice(0,F-z);break;case (CONTAINED_BY):break;case (CONTAINS):P=P.slice(Y-F,F-z);break}for(var aa=0,x=P.length;aa<x;aa++){var ab=Math.floor(Math.max(0,(F+aa-Y)*K));T.fillText(P[aa],ab-(G-ac)/2,D)}}else{T.fillStyle="yellow";T.fillRect(C,D+(this.mode!=="Dense"?2:5),G-ac,(O!=="Dense"?e:r))}}else{if((O==="Pack"||this.mode==="Auto")&&E!==undefined&&K>char_width_px){ad[ad.length]={type:"text",data:[P.length,X,D+9]}}else{}}}U+=M;break;case"X":U+=M;break}}T.fillStyle="yellow";var Z,B,ae;for(var V=0;V<ad.length;V++){Z=ad[V];B=Z.type;ae=Z.data;if(B==="text"){T.save();T.font="bold "+T.font;T.fillText(ae[0],ae[1],ae[2]);T.restore()}else{if(B=="triangle"){p(T,ae[0],ae[1],ae[2])}}}},draw_element:function(Q,L,D,A,T,y,H,R,O){var G=D[0],P=D[1],z=D[2],I=D[3],C=Math.floor(Math.max(0,(P-T)*H)),E=Math.ceil(Math.min(O,Math.max(0,(z-T)*H))),B=(L==="Dense"?0:(0+A))*R,U=this.prefs.block_color,F=this.prefs.label_color,N=0;if((L==="Pack"||this.mode==="Auto")&&H>Q.canvas.manager.char_width_px){var N=Math.round(H/2)}Q.fillStyle=U;if(D[5] instanceof Array){var M=Math.floor(Math.max(0,(D[4][0]-T)*H)),K=Math.ceil(Math.min(O,Math.max(0,(D[4][1]-T)*H))),J=Math.floor(Math.max(0,(D[5][0]-T)*H)),x=Math.ceil(Math.min(O,Math.max(0,(D[5][1]-T)*H)));if(D[4][1]>=T&&D[4][0]<=y&&D[4][2]){this.draw_read(Q,L,H,T,y,D[4][0],D[4][2],D[4][3],B)}if(D[5][1]>=T&&D[5][0]<=y&&D[5][2]){this.draw_read(Q,L,H,T,y,D[5][0],D[5][2],D[5][3],B)}if(J>K){Q.fillStyle=g;o(Q,K-N,B+5,J-N,B+5)}}else{Q.fillStyle=U;this.draw_read(Q,L,H,T,y,P,D[4],D[5],B)}if(L==="Pack"&&P>T){Q.fillStyle=this.prefs.label_color;var S=1;if(S===0&&C-Q.measureText(I).width<0){Q.textAlign="left";Q.fillText(I,E+k-N,B+8)}else{Q.textAlign="right";Q.fillText(I,C-k-N,B+8)}Q.fillStyle=U}}});w.SummaryTreePainter=u;w.LinePainter=c;w.LinkedFeaturePainter=q;w.ReadPainter=s;w.VariantPainter=b};(function(d){var c={};var b=function(e){return c[e]};var a=function(f,g){var e={};g(b,e);c[f]=e};a("class",class_module);a("slotting",slotting_module);a("painters",painters_module);a("trackster",trackster_module);for(key in c.trackster){d[key]=c.trackster[key]}})(window);
\ No newline at end of file
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 static/scripts/trackster.js
--- a/static/scripts/trackster.js
+++ b/static/scripts/trackster.js
@@ -172,7 +172,7 @@
// height of individual features within tracks. Feature height, then, should always be less
// than track height.
CHAR_HEIGHT_PX = 9, // FIXME: font size may not be static
- ERROR_PADDING = 18, // Padding at the top of tracks for error messages
+ ERROR_PADDING = 20, // Padding at the top of tracks for error messages
SUMMARY_TREE_TOP_PADDING = CHAR_HEIGHT_PX + 2,
// Maximum number of rows un a slotted track
MAX_FEATURE_DEPTH = 100,
@@ -369,15 +369,15 @@
//
// Set parameters based on request type.
//
+ var query_low = low;
if (req_type === this.DEEP_DATA_REQ) {
- // HACK: for now, just up the max vals and request all data; in the future,
- // need server to recognize min_vals and max_vals to specify range of data to
- // return.
- $.extend(extra_params, {max_vals: cur_data.data.length * 2});
+ // Use same interval but set start_val to skip data that's already in cur_data.
+ $.extend(extra_params, {start_val: cur_data.data.length + 1});
}
else if (req_type === this.BROAD_DATA_REQ) {
- // Set low to be past the last feature returned.
- low = cur_data.data[cur_data.length-1][2] + 1;
+ // Set query low to be past the last feature returned so that an area of extreme feature depth
+ // is bypassed.
+ query_low = cur_data.data[cur_data.data.length - 1][2] + 1;
}
//
@@ -386,14 +386,19 @@
//
var
data_manager = this,
- new_data_request = this.load_data(low, high, resolution, extra_params)
+ new_data_request = this.load_data(query_low, high, resolution, extra_params)
new_data_available = $.Deferred();
// load_data sets cache to new_data_request, but use custom deferred object so that signal and data
// is all data, not just new data.
this.set_data(low, high, mode, new_data_available);
$.when(new_data_request).then(function(result) {
+ // Update data and message.
if (result.data) {
- //result.data.append(cur_data.data);
+ result.data = cur_data.data.concat(result.data);
+ if (result.message) {
+ // HACK: replace number in message with current data length. Works but is ugly.
+ result.message = result.message.replace(/[0-9]+/, result.data.length);
+ }
}
data_manager.set_data(low, high, mode, result);
new_data_available.resolve(result);
@@ -481,8 +486,11 @@
this.top_labeltrack = $("<div/>").addClass("top-labeltrack").appendTo(this.top_container);
// Viewport for dragging tracks in center
this.viewport_container = $("<div/>").addClass("viewport-container").addClass("viewport-container").appendTo(this.content_div);
- // Future overlay?
- this.intro_div = $("<div/>").addClass("intro").text("Select a chrom from the dropdown below").hide();
+ // Introduction div shown when there are no tracks.
+ this.intro_div = $("<div/>").addClass("intro").appendTo(this.viewport_container).hide();
+ var add_tracks_button = $("<div/>").text("Add Datasets to Visualization").addClass("action-button").appendTo(this.intro_div).click(function () {
+ add_tracks();
+ });
// Another label track at bottom
this.nav_labeltrack = $("<div/>").addClass("nav-labeltrack").appendTo(this.bottom_container);
// Navigation at top
@@ -531,7 +539,6 @@
this.chrom_select.bind("change", function() {
view.change_chrom(view.chrom_select.val());
});
- this.intro_div.show();
/*
this.content_div.bind("mousewheel", function( e, delta ) {
@@ -646,6 +653,16 @@
this.reset();
$(window).trigger("resize");
+ this.update_intro_div();
+ },
+ /** Show or hide intro div depending on view state. */
+ update_intro_div: function() {
+ if (this.num_tracks === 0) {
+ this.intro_div.show();
+ }
+ else {
+ this.intro_div.hide();
+ }
},
update_location: function(low, high) {
this.location_span.text( commatize(low) + ' - ' + commatize(high) );
@@ -730,12 +747,6 @@
// Switching to local chrom.
if (chrom !== view.chrom) {
view.chrom = chrom;
- if (!view.chrom) {
- // No chrom selected
- view.intro_div.show();
- } else {
- view.intro_div.hide();
- }
view.chrom_select.val(view.chrom);
view.max_high = found.len-1; // -1 because we're using 0-based indexing.
view.reset();
@@ -805,6 +816,7 @@
sortable( track.container_div, '.draghandle' );
this.track_id_counter += 1;
this.num_tracks += 1;
+ this.update_intro_div();
},
add_label_track: function (label_track) {
label_track.view = this;
@@ -812,9 +824,13 @@
},
remove_track: function(track) {
this.has_changes = true;
- track.container_div.fadeOut('slow', function() { $(this).remove(); });
delete this.tracks[this.tracks.indexOf(track)];
this.num_tracks -= 1;
+ var view = this;
+ track.container_div.fadeOut('slow', function() {
+ $(this).remove();
+ view.update_intro_div();
+ });
},
reset: function() {
this.low = this.max_low;
@@ -1018,6 +1034,7 @@
dataset_id: this.track.original_dataset_id,
tool_id: tool.name
},
+ null,
// Success callback.
function(track_data) {
show_modal(tool.name + " is Running",
@@ -1060,7 +1077,7 @@
new_track.content_div.text("Starting job.");
// Run tool.
- this.run(url_params,
+ this.run(url_params, new_track,
// Success callback.
function(track_data) {
new_track.dataset_id = track_data.dataset_id;
@@ -1072,7 +1089,7 @@
/**
* Run tool using a set of URL params and a success callback.
*/
- run: function(url_params, success_callback) {
+ run: function(url_params, new_track, success_callback) {
// Add tool params to URL params.
$.extend(url_params, this.get_param_values_dict());
@@ -1093,7 +1110,7 @@
else if (response === "pending") {
// Converting/indexing input datasets; show message and try again.
new_track.container_div.addClass("pending");
- new_track.content_div.text("Converting input data so that it can be easily reused.");
+ new_track.content_div.text("Converting input data so that it can be used quickly with tool.");
setTimeout(json_run_tool, 2000);
}
else {
@@ -2211,18 +2228,29 @@
message_div = $("<div/>").addClass("tile-message").text(tile.message).
// -1 to account for border.
css({'height': ERROR_PADDING-1, 'width': tile.canvas.width}).appendTo(container_div),
- show_more_data_btn = $("<div/>").text("Show more").addClass("action-button").css({'padding-top': 0, 'padding-bottom':0}).appendTo(message_div);
+ more_down_icon = $("<a href='javas