galaxy-commits
Threads by month
- ----- 2024 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2023 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2022 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2021 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2020 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2019 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2018 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2017 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2016 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2015 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2014 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2013 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2012 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2011 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2010 -----
- December
- November
- October
- September
- August
- July
- June
- May
December 2011
- 1 participants
- 92 discussions
commit/galaxy-central: dan: Add __non_zero__ method to ToolParameterValueWrappers. Resolves #661.
by Bitbucket 09 Dec '11
by Bitbucket 09 Dec '11
09 Dec '11
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/25f3c2c08e97/
changeset: 25f3c2c08e97
user: dan
date: 2011-12-09 17:10:44
summary: Add __non_zero__ method to ToolParameterValueWrappers. Resolves #661.
affected #: 1 file
diff -r 8718607f7ae5554b6a43caff974a4949eb504950 -r 25f3c2c08e97dc2b8d5ea2885d5083b887c65fba lib/galaxy/tools/__init__.py
--- a/lib/galaxy/tools/__init__.py
+++ b/lib/galaxy/tools/__init__.py
@@ -2141,7 +2141,14 @@
def __init__( self, value ):
self.value = value
-class RawObjectWrapper( object ):
+class ToolParameterValueWrapper( object ):
+ """
+ Base class for object that Wraps a Tool Parameter and Value.
+ """
+ def __nonzero__( self ):
+ return bool( self.value )
+
+class RawObjectWrapper( ToolParameterValueWrapper ):
"""
Wraps an object so that __str__ returns module_name:class_name.
"""
@@ -2152,7 +2159,7 @@
def __getattr__( self, key ):
return getattr( self.obj, key )
-class LibraryDatasetValueWrapper( object ):
+class LibraryDatasetValueWrapper( ToolParameterValueWrapper ):
"""
Wraps an input so that __str__ gives the "param_dict" representation.
"""
@@ -2173,7 +2180,7 @@
def __getattr__( self, key ):
return getattr( self.value, key )
-class InputValueWrapper( object ):
+class InputValueWrapper( ToolParameterValueWrapper ):
"""
Wraps an input so that __str__ gives the "param_dict" representation.
"""
@@ -2186,7 +2193,7 @@
def __getattr__( self, key ):
return getattr( self.value, key )
-class SelectToolParameterWrapper( object ):
+class SelectToolParameterWrapper( ToolParameterValueWrapper ):
"""
Wraps a SelectTooParameter so that __str__ returns the selected value, but all other
attributes are accessible.
@@ -2218,7 +2225,7 @@
def __getattr__( self, key ):
return getattr( self.input, key )
-class DatasetFilenameWrapper( object ):
+class DatasetFilenameWrapper( ToolParameterValueWrapper ):
"""
Wraps a dataset so that __str__ returns the filename, but all other
attributes are accessible.
@@ -2278,6 +2285,9 @@
return self.false_path
else:
return getattr( self.dataset, key )
+
+ def __nonzero__( self ):
+ return bool( self.dataset )
def json_fix( val ):
if isinstance( val, list ):
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: dannon: Update collect_child_datasets to work with ObjectStore.
by Bitbucket 09 Dec '11
by Bitbucket 09 Dec '11
09 Dec '11
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/8718607f7ae5/
changeset: 8718607f7ae5
user: dannon
date: 2011-12-09 16:19:20
summary: Update collect_child_datasets to work with ObjectStore.
affected #: 1 file
diff -r 187267753315c372a684a5170d053a6b561aebb5 -r 8718607f7ae5554b6a43caff974a4949eb504950 lib/galaxy/tools/__init__.py
--- a/lib/galaxy/tools/__init__.py
+++ b/lib/galaxy/tools/__init__.py
@@ -1890,7 +1890,7 @@
sa_session=self.sa_session )
self.app.security_agent.copy_dataset_permissions( outdata.dataset, child_dataset.dataset )
# Move data from temp location to dataset location
- shutil.move( filename, child_dataset.file_name )
+ self.app.object_store.update_from_file(child_dataset.dataset.id, filename, create=True)
self.sa_session.add( child_dataset )
self.sa_session.flush()
child_dataset.set_size()
@@ -1902,7 +1902,7 @@
job = None
for assoc in outdata.creating_job_associations:
job = assoc.job
- break
+ break
if job:
assoc = self.app.model.JobToOutputDatasetAssociation( '__new_child_file_%s|%s__' % ( name, designation ), child_dataset )
assoc.job = job
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: dannon: Fix variable multiple outputs (collect_primary_datasets) to work with ObjectStore.
by Bitbucket 09 Dec '11
by Bitbucket 09 Dec '11
09 Dec '11
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/187267753315/
changeset: 187267753315
user: dannon
date: 2011-12-09 15:50:15
summary: Fix variable multiple outputs (collect_primary_datasets) to work with ObjectStore.
affected #: 1 file
diff -r 1da0c76f4000d6627d6ce66a7de55a6b80753661 -r 187267753315c372a684a5170d053a6b561aebb5 lib/galaxy/tools/__init__.py
--- a/lib/galaxy/tools/__init__.py
+++ b/lib/galaxy/tools/__init__.py
@@ -1958,7 +1958,7 @@
self.sa_session.add( primary_data )
self.sa_session.flush()
# Move data from temp location to dataset location
- shutil.move( filename, primary_data.file_name )
+ self.app.object_store.update_from_file(primary_data.dataset.id, filename, create=True)
primary_data.set_size()
primary_data.name = "%s (%s)" % ( outdata.name, designation )
primary_data.info = outdata.info
@@ -1970,7 +1970,7 @@
job = None
for assoc in outdata.creating_job_associations:
job = assoc.job
- break
+ break
if job:
assoc = self.app.model.JobToOutputDatasetAssociation( '__new_primary_file_%s|%s__' % ( name, designation ), primary_data )
assoc.job = job
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
09 Dec '11
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/1da0c76f4000/
changeset: 1da0c76f4000
user: natefoo
date: 2011-12-09 15:40:40
summary: Fix unit test broken by object store.
affected #: 2 files
diff -r 3ccece0dbc02812da682481bf1196d69bcc38d67 -r 1da0c76f4000d6627d6ce66a7de55a6b80753661 lib/galaxy/model/mapping_tests.py
--- a/lib/galaxy/model/mapping_tests.py
+++ b/lib/galaxy/model/mapping_tests.py
@@ -36,8 +36,9 @@
assert hists[0].user == users[0]
assert hists[1].user is None
assert hists[1].datasets[0].metadata.chromCol == 1
- id = hists[1].datasets[0].id
- assert hists[1].datasets[0].file_name == os.path.join( "/tmp", *directory_hash_id( id ) ) + ( "/dataset_%d.dat" % id )
+ # The filename test has moved to objecstore
+ #id = hists[1].datasets[0].id
+ #assert hists[1].datasets[0].file_name == os.path.join( "/tmp", *directory_hash_id( id ) ) + ( "/dataset_%d.dat" % id )
# Do an update and check
hists[1].name = "History 2b"
model.session.flush()
diff -r 3ccece0dbc02812da682481bf1196d69bcc38d67 -r 1da0c76f4000d6627d6ce66a7de55a6b80753661 lib/galaxy/objectstore/__init__.py
--- a/lib/galaxy/objectstore/__init__.py
+++ b/lib/galaxy/objectstore/__init__.py
@@ -200,6 +200,15 @@
"""
Standard Galaxy object store, stores objects in files under a specific
directory on disk.
+
+ >>> from galaxy.util.bunch import Bunch
+ >>> import tempfile
+ >>> file_path=tempfile.mkdtemp()
+ >>> s = DiskObjectStore(Bunch(umask=077), file_path=file_path)
+ >>> s.create(1)
+ >>> s.exists(1)
+ True
+ >>> assert s.get_filename(1) == file_path + '/000/dataset_1.dat'
"""
def __init__(self, config, file_path=None, extra_dirs=None):
super(DiskObjectStore, self).__init__()
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/3ccece0dbc02/
changeset: 3ccece0dbc02
user: dannon
date: 2011-12-09 13:01:31
summary: Job.fail() failure bugfix.
affected #: 1 file
diff -r 7611d5d306bb462c803b36d236cebf874ba9e1b8 -r 3ccece0dbc02812da682481bf1196d69bcc38d67 lib/galaxy/jobs/__init__.py
--- a/lib/galaxy/jobs/__init__.py
+++ b/lib/galaxy/jobs/__init__.py
@@ -500,7 +500,7 @@
# If the job was deleted, call tool specific fail actions (used for e.g. external metadata) and clean up
if self.tool:
self.tool.job_failed( self, message, exception )
- if self.app.cleanup_job == 'always' or (self.app.config.cleanup_job == 'onsuccess' and job.state == job.states.DELETED):
+ if self.app.config.cleanup_job == 'always' or (self.app.config.cleanup_job == 'onsuccess' and job.state == job.states.DELETED):
self.cleanup()
def change_state( self, state, info = False ):
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: natefoo: Rename HierarchicalObjectStore to DistributedObjectStore.
by Bitbucket 08 Dec '11
by Bitbucket 08 Dec '11
08 Dec '11
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/7611d5d306bb/
changeset: 7611d5d306bb
user: natefoo
date: 2011-12-08 23:46:59
summary: Rename HierarchicalObjectStore to DistributedObjectStore.
affected #: 5 files
diff -r c6735493b09f507e02564e6c79f9fef39d7bf7fa -r 7611d5d306bb462c803b36d236cebf874ba9e1b8 distributed_object_store_conf.xml.sample
--- /dev/null
+++ b/distributed_object_store_conf.xml.sample
@@ -0,0 +1,13 @@
+<?xml version="1.0"?>
+<backends>
+ <backend name="files1" type="disk" weight="1">
+ <files_dir path="database/files1"/>
+ <extra_dir type="temp" path="database/tmp1"/>
+ <extra_dir type="job_work" path="database/job_working_directory1"/>
+ </backend>
+ <backend name="files2" type="disk" weight="1">
+ <files_dir path="database/files2"/>
+ <extra_dir type="temp" path="database/tmp2"/>
+ <extra_dir type="job_work" path="database/job_working_directory2"/>
+ </backend>
+</backends>
diff -r c6735493b09f507e02564e6c79f9fef39d7bf7fa -r 7611d5d306bb462c803b36d236cebf874ba9e1b8 hierarchical_object_store_conf.xml.sample
--- a/hierarchical_object_store_conf.xml.sample
+++ /dev/null
@@ -1,13 +0,0 @@
-<?xml version="1.0"?>
-<backends>
- <backend name="files1" type="disk" weight="1">
- <files_dir path="database/files1"/>
- <extra_dir type="temp" path="database/tmp1"/>
- <extra_dir type="job_work" path="database/job_working_directory1"/>
- </backend>
- <backend name="files2" type="disk" weight="1">
- <files_dir path="database/files2"/>
- <extra_dir type="temp" path="database/tmp2"/>
- <extra_dir type="job_work" path="database/job_working_directory2"/>
- </backend>
-</backends>
diff -r c6735493b09f507e02564e6c79f9fef39d7bf7fa -r 7611d5d306bb462c803b36d236cebf874ba9e1b8 lib/galaxy/config.py
--- a/lib/galaxy/config.py
+++ b/lib/galaxy/config.py
@@ -156,7 +156,7 @@
self.s3_bucket = kwargs.get( 's3_bucket', None)
self.use_reduced_redundancy = kwargs.get( 'use_reduced_redundancy', False )
self.object_store_cache_size = float(kwargs.get( 'object_store_cache_size', -1 ))
- self.hierarchical_object_store_config_file = kwargs.get( 'hierarchical_object_store_config_file', None )
+ self.distributed_object_store_config_file = kwargs.get( 'distributed_object_store_config_file', None )
# Parse global_conf and save the parser
global_conf = kwargs.get( 'global_conf', None )
global_conf_parser = ConfigParser.ConfigParser()
diff -r c6735493b09f507e02564e6c79f9fef39d7bf7fa -r 7611d5d306bb462c803b36d236cebf874ba9e1b8 lib/galaxy/objectstore/__init__.py
--- a/lib/galaxy/objectstore/__init__.py
+++ b/lib/galaxy/objectstore/__init__.py
@@ -854,7 +854,7 @@
return None
-class HierarchicalObjectStore(ObjectStore):
+class DistributedObjectStore(ObjectStore):
"""
ObjectStore that defers to a list of backends, for getting objects the
first store where the object exists is used, objects are created in a
@@ -862,22 +862,22 @@
"""
def __init__(self, config):
- super(HierarchicalObjectStore, self).__init__()
- assert config is not None, "hierarchical object store ('object_store = hierarchical') " \
+ super(DistributedObjectStore, self).__init__()
+ assert config is not None, "distributed object store ('object_store = distributed') " \
"requires a config file, please set one in " \
- "'hierarchical_object_store_config_file')"
- self.hierarchical_config = config
+ "'distributed_object_store_config_file')"
+ self.distributed_config = config
self.backends = {}
self.weighted_backend_names = []
random.seed()
- self.__parse_hierarchical_config(config)
+ self.__parse_distributed_config(config)
- def __parse_hierarchical_config(self, config):
- tree = util.parse_xml(self.hierarchical_config)
+ def __parse_distributed_config(self, config):
+ tree = util.parse_xml(self.distributed_config)
root = tree.getroot()
- log.debug('Loading backends for hierarchical object store from %s' % self.hierarchical_config)
+ log.debug('Loading backends for distributed object store from %s' % self.distributed_config)
for elem in [ e for e in root if e.tag == 'backend' ]:
name = elem.get('name')
weight = int(elem.get('weight', 1))
@@ -980,6 +980,16 @@
return store
return None
+class HierarchicalObjectStore(ObjectStore):
+ """
+ ObjectStore that defers to a list of backends, for getting objects the
+ first store where the object exists is used, objects are always created
+ in the first store.
+ """
+
+ def __init__(self, backends=[]):
+ super(HierarchicalObjectStore, self).__init__()
+
def build_object_store_from_config(config):
""" Depending on the configuration setting, invoke the appropriate object store
"""
@@ -990,8 +1000,10 @@
os.environ['AWS_ACCESS_KEY_ID'] = config.aws_access_key
os.environ['AWS_SECRET_ACCESS_KEY'] = config.aws_secret_key
return S3ObjectStore(config=config)
+ elif store == 'distributed':
+ return DistributedObjectStore(config.distributed_object_store_config_file)
elif store == 'hierarchical':
- return HierarchicalObjectStore(config.hierarchical_object_store_config_file)
+ return HierarchicalObjectStore()
def convert_bytes(bytes):
""" A helper function used for pretty printing disk usage """
diff -r c6735493b09f507e02564e6c79f9fef39d7bf7fa -r 7611d5d306bb462c803b36d236cebf874ba9e1b8 universe_wsgi.ini.sample
--- a/universe_wsgi.ini.sample
+++ b/universe_wsgi.ini.sample
@@ -437,7 +437,7 @@
# -- Beta features
-# Object store mode (valid options are: disk, s3, hierarchical)
+# Object store mode (valid options are: disk, s3, distributed, hierarchical)
#object_store = s3
#aws_access_key = <AWS access key>
#aws_secret_key = <AWS secret key>
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
3 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/7f783a4889f7/
changeset: 7f783a4889f7
user: natefoo
date: 2011-12-08 23:34:19
summary: Object store bug fix (direct filesystem access in the default tool action)
affected #: 1 file
diff -r 8d668e1d51520f7663915ff5535852e47a7dd235 -r 7f783a4889f74bacf61a97a8e8d39ebda1cc9b9d lib/galaxy/tools/actions/__init__.py
--- a/lib/galaxy/tools/actions/__init__.py
+++ b/lib/galaxy/tools/actions/__init__.py
@@ -291,14 +291,11 @@
trans.sa_session.flush()
trans.app.security_agent.set_all_dataset_permissions( data.dataset, output_permissions )
# Create an empty file immediately
- # open( data.file_name, "w" ).close()
trans.app.object_store.create( data.id, store_name=store_name )
if not store_name_set:
# Ensure all other datasets in this job are created in the same store
store_name = trans.app.object_store.store_name( data.id )
store_name_set = True
- # Fix permissions
- util.umask_fix_perms( data.file_name, trans.app.config.umask, 0666 )
# This may not be neccesary with the new parent/child associations
data.designation = name
# Copy metadata from one of the inputs if requested.
https://bitbucket.org/galaxy/galaxy-central/changeset/95529ba09179/
changeset: 95529ba09179
user: natefoo
date: 2011-12-08 23:45:48
summary: Missing part of the last commit.
affected #: 1 file
diff -r 7f783a4889f74bacf61a97a8e8d39ebda1cc9b9d -r 95529ba09179dcd08da79f1c67151a138284e8b1 lib/galaxy/objectstore/__init__.py
--- a/lib/galaxy/objectstore/__init__.py
+++ b/lib/galaxy/objectstore/__init__.py
@@ -204,6 +204,7 @@
def __init__(self, config, file_path=None, extra_dirs=None):
super(DiskObjectStore, self).__init__()
self.file_path = file_path or config.file_path
+ self.config = config
if extra_dirs is not None:
self.extra_dirs = extra_dirs
@@ -300,6 +301,7 @@
if not dir_only:
path = os.path.join(path, alt_name if alt_name else "dataset_%s.dat" % dataset_id)
open(path, 'w').close()
+ util.umask_fix_perms( path, self.config.umask, 0666 )
def empty(self, dataset_id, **kwargs):
return os.path.getsize(self.get_filename(dataset_id, **kwargs)) > 0
https://bitbucket.org/galaxy/galaxy-central/changeset/c6735493b09f/
changeset: c6735493b09f
user: natefoo
date: 2011-12-08 23:46:05
summary: Merge.
affected #: 1 file
diff -r 95529ba09179dcd08da79f1c67151a138284e8b1 -r c6735493b09f507e02564e6c79f9fef39d7bf7fa lib/galaxy/tools/parameters/basic.py
--- a/lib/galaxy/tools/parameters/basic.py
+++ b/lib/galaxy/tools/parameters/basic.py
@@ -224,7 +224,7 @@
int( self.value )
except:
raise ValueError( "An integer is required" )
- elif self.value is None:
+ elif self.value is None and not self.optional:
raise ValueError( "The settings for the field named '%s' require a 'value' setting and optionally a default value which must be an integer" % self.name )
self.min = elem.get( 'min' )
self.max = elem.get( 'max' )
@@ -296,7 +296,7 @@
float( self.value )
except:
raise ValueError( "A real number is required" )
- elif self.value is None:
+ elif self.value is None and not self.optional:
raise ValueError( "The settings for this field require a 'value' setting and optionally a default value which must be a real number" )
if self.min:
try:
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: dan: Allow not specifying a value attribute for an optional tool parameter. Partially resolves #661.
by Bitbucket 08 Dec '11
by Bitbucket 08 Dec '11
08 Dec '11
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/75c27994fc9a/
changeset: 75c27994fc9a
user: dan
date: 2011-12-08 22:57:12
summary: Allow not specifying a value attribute for an optional tool parameter. Partially resolves #661.
affected #: 1 file
diff -r 8d668e1d51520f7663915ff5535852e47a7dd235 -r 75c27994fc9aa265bdecd6ad7962f0c4f51b3682 lib/galaxy/tools/parameters/basic.py
--- a/lib/galaxy/tools/parameters/basic.py
+++ b/lib/galaxy/tools/parameters/basic.py
@@ -224,7 +224,7 @@
int( self.value )
except:
raise ValueError( "An integer is required" )
- elif self.value is None:
+ elif self.value is None and not self.optional:
raise ValueError( "The settings for the field named '%s' require a 'value' setting and optionally a default value which must be an integer" % self.name )
self.min = elem.get( 'min' )
self.max = elem.get( 'max' )
@@ -296,7 +296,7 @@
float( self.value )
except:
raise ValueError( "A real number is required" )
- elif self.value is None:
+ elif self.value is None and not self.optional:
raise ValueError( "The settings for this field require a 'value' setting and optionally a default value which must be a real number" )
if self.min:
try:
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
20 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/f25342f0e100/
changeset: f25342f0e100
user: afgane
date: 2011-07-05 23:48:22
summary: A very much in-progress code implementation of the ObjectStore - most of the functionality exists and works for interaction with a local file system and S3. Setting of the metadata does not work (empty files are created but never filled with content). Not sure if rerunning jobs with dependent datasets that have been deleted from cache works - some tools at least do.
affected #: 17 files
diff -r 9a5c403bb2e57189b442615840e49cc80fe3bc60 -r f25342f0e100ac32312934b9ca84f8fa109ef9d4 lib/galaxy/app.py
--- a/lib/galaxy/app.py
+++ b/lib/galaxy/app.py
@@ -7,6 +7,7 @@
import galaxy.model
import galaxy.datatypes.registry
import galaxy.security
+from galaxy.objectstore import build_object_store_from_config
from galaxy.tags.tag_handler import GalaxyTagHandler
from galaxy.tools.imp_exp import load_history_imp_exp_tools
from galaxy.sample_tracking import external_service_types
@@ -30,12 +31,15 @@
# Initialize database / check for appropriate schema version
from galaxy.model.migrate.check import create_or_verify_database
create_or_verify_database( db_url, kwargs.get( 'global_conf', {} ).get( '__file__', None ), self.config.database_engine_options )
+ # Object store manager
+ self.object_store = build_object_store_from_config(self)
# Setup the database engine and ORM
from galaxy.model import mapping
self.model = mapping.init( self.config.file_path,
db_url,
self.config.database_engine_options,
- database_query_profiling_proxy = self.config.database_query_profiling_proxy )
+ database_query_profiling_proxy = self.config.database_query_profiling_proxy,
+ object_store = self.object_store )
# Security helper
self.security = security.SecurityHelper( id_secret=self.config.id_secret )
# Tag handler
diff -r 9a5c403bb2e57189b442615840e49cc80fe3bc60 -r f25342f0e100ac32312934b9ca84f8fa109ef9d4 lib/galaxy/config.py
--- a/lib/galaxy/config.py
+++ b/lib/galaxy/config.py
@@ -131,6 +131,12 @@
self.nginx_upload_path = kwargs.get( 'nginx_upload_path', False )
if self.nginx_upload_store:
self.nginx_upload_store = os.path.abspath( self.nginx_upload_store )
+ self.object_store = kwargs.get( 'object_store', 'disk' )
+ self.aws_access_key = kwargs.get( 'aws_access_key', None )
+ self.aws_secret_key = kwargs.get( 'aws_secret_key', None )
+ self.s3_bucket = kwargs.get( 's3_bucket', None)
+ self.use_reduced_redundancy = kwargs.get( 'use_reduced_redundancy', False )
+ self.object_store_cache_size = float(kwargs.get( 'object_store_cache_size', -1 ))
# Parse global_conf and save the parser
global_conf = kwargs.get( 'global_conf', None )
global_conf_parser = ConfigParser.ConfigParser()
diff -r 9a5c403bb2e57189b442615840e49cc80fe3bc60 -r f25342f0e100ac32312934b9ca84f8fa109ef9d4 lib/galaxy/datatypes/genetics.py
--- a/lib/galaxy/datatypes/genetics.py
+++ b/lib/galaxy/datatypes/genetics.py
@@ -636,7 +636,7 @@
def set_peek( self, dataset, **kwd ):
"""
expects a .pheno file in the extra_files_dir - ugh
- note that R is wierd and does not include the row.name in
+ note that R is weird and does not include the row.name in
the header. why?"""
if not dataset.dataset.purged:
pp = os.path.join(dataset.extra_files_path,'%s.pheno' % dataset.metadata.base_name)
diff -r 9a5c403bb2e57189b442615840e49cc80fe3bc60 -r f25342f0e100ac32312934b9ca84f8fa109ef9d4 lib/galaxy/jobs/__init__.py
--- a/lib/galaxy/jobs/__init__.py
+++ b/lib/galaxy/jobs/__init__.py
@@ -208,7 +208,7 @@
log.error( "unknown job state '%s' for job %d" % ( job_state, job.id ) )
if not self.track_jobs_in_database:
new_waiting_jobs.append( job.id )
- except Exception, e:
+ except Exception:
log.exception( "failure running job %d" % job.id )
# Update the waiting list
self.waiting_jobs = new_waiting_jobs
@@ -332,7 +332,6 @@
out_data = dict( [ ( da.name, da.dataset ) for da in job.output_datasets ] )
inp_data.update( [ ( da.name, da.dataset ) for da in job.input_library_datasets ] )
out_data.update( [ ( da.name, da.dataset ) for da in job.output_library_datasets ] )
-
# Set up output dataset association for export history jobs. Because job
# uses a Dataset rather than an HDA or LDA, it's necessary to set up a
# fake dataset association that provides the needed attributes for
@@ -428,6 +427,10 @@
dataset.dataset.set_total_size()
if dataset.ext == 'auto':
dataset.extension = 'data'
+ # Update (non-library) job output datasets through the object store
+ if dataset not in job.output_library_datasets:
+ print "====== Handing failed job's dataset '%s' with name '%s' to object store" % (dataset.id, dataset.file_name)
+ self.app.object_store.update_from_file(dataset.id, create=True)
self.sa_session.add( dataset )
self.sa_session.flush()
job.state = model.Job.states.ERROR
@@ -538,11 +541,14 @@
else:
# Security violation.
log.exception( "from_work_dir specified a location not in the working directory: %s, %s" % ( source_file, self.working_directory ) )
-
dataset.blurb = 'done'
dataset.peek = 'no peek'
dataset.info = context['stdout'] + context['stderr']
dataset.set_size()
+ # Update (non-library) job output datasets through the object store
+ if dataset not in job.output_library_datasets:
+ print "===+=== Handing dataset '%s' with name '%s' to object store" % (dataset.id, dataset.file_name)
+ self.app.object_store.update_from_file(dataset.id, create=True)
if context['stderr']:
dataset.blurb = "error"
elif dataset.has_data():
diff -r 9a5c403bb2e57189b442615840e49cc80fe3bc60 -r f25342f0e100ac32312934b9ca84f8fa109ef9d4 lib/galaxy/model/__init__.py
--- a/lib/galaxy/model/__init__.py
+++ b/lib/galaxy/model/__init__.py
@@ -522,6 +522,7 @@
FAILED_METADATA = 'failed_metadata' )
permitted_actions = get_permitted_actions( filter='DATASET' )
file_path = "/tmp/"
+ object_store = None # This get initialized in mapping.py (method init) by app.py
engine = None
def __init__( self, id=None, state=None, external_filename=None, extra_files_path=None, file_size=None, purgable=True ):
self.id = id
@@ -535,17 +536,14 @@
def get_file_name( self ):
if not self.external_filename:
assert self.id is not None, "ID must be set before filename used (commit the object)"
- # First try filename directly under file_path
- filename = os.path.join( self.file_path, "dataset_%d.dat" % self.id )
- # Only use that filename if it already exists (backward compatibility),
- # otherwise construct hashed path
- if not os.path.exists( filename ):
- dir = os.path.join( self.file_path, *directory_hash_id( self.id ) )
+ assert self.object_store is not None, "Object Store has not been initialized for dataset %s" % self.id
+ print "Calling get_filename 1", self.object_store
+ filename = self.object_store.get_filename( self.id )
+ # print 'getting filename: ', filename
+ if not self.object_store.exists( self.id ):
# Create directory if it does not exist
- if not os.path.exists( dir ):
- os.makedirs( dir )
- # Return filename inside hashed directory
- return os.path.abspath( os.path.join( dir, "dataset_%d.dat" % self.id ) )
+ self.object_store.create( self.id, dir_only=True )
+ return filename
else:
filename = self.external_filename
# Make filename absolute
@@ -558,15 +556,8 @@
file_name = property( get_file_name, set_file_name )
@property
def extra_files_path( self ):
- if self._extra_files_path:
- path = self._extra_files_path
- else:
- path = os.path.join( self.file_path, "dataset_%d_files" % self.id )
- #only use path directly under self.file_path if it exists
- if not os.path.exists( path ):
- path = os.path.join( os.path.join( self.file_path, *directory_hash_id( self.id ) ), "dataset_%d_files" % self.id )
- # Make path absolute
- return os.path.abspath( path )
+ print "Calling get_filename 2", self.object_store
+ return self.object_store.get_filename( self.id, dir_only=True, extra_dir=self._extra_files_path or "dataset_%d_files" % self.id)
def get_size( self, nice_size=False ):
"""Returns the size of the data on disk"""
if self.file_size:
@@ -575,20 +566,14 @@
else:
return self.file_size
else:
- try:
- if nice_size:
- return galaxy.datatypes.data.nice_size( os.path.getsize( self.file_name ) )
- else:
- return os.path.getsize( self.file_name )
- except OSError:
- return 0
+ if nice_size:
+ return galaxy.datatypes.data.nice_size( self.object_store.size(self.id) )
+ else:
+ return self.object_store.size(self.id)
def set_size( self ):
"""Returns the size of the data on disk"""
- try:
- if not self.file_size:
- self.file_size = os.path.getsize( self.file_name )
- except OSError:
- self.file_size = 0
+ if not self.file_size:
+ self.file_size = self.object_store.size(self.id)
def get_total_size( self ):
if self.total_size is not None:
return self.total_size
@@ -603,8 +588,9 @@
if self.file_size is None:
self.set_size()
self.total_size = self.file_size or 0
- for root, dirs, files in os.walk( self.extra_files_path ):
- self.total_size += sum( [ os.path.getsize( os.path.join( root, file ) ) for file in files ] )
+ if self.object_store.exists(self.id, extra_dir=self._extra_files_path or "dataset_%d_files" % self.id, dir_only=True):
+ for root, dirs, files in os.walk( self.extra_files_path ):
+ self.total_size += sum( [ os.path.getsize( os.path.join( root, file ) ) for file in files ] )
def has_data( self ):
"""Detects whether there is any data"""
return self.get_size() > 0
@@ -620,10 +606,7 @@
# FIXME: sqlalchemy will replace this
def _delete(self):
"""Remove the file that corresponds to this data"""
- try:
- os.remove(self.data.file_name)
- except OSError, e:
- log.critical('%s delete error %s' % (self.__class__.__name__, e))
+ self.object_store.delete(self.id)
@property
def user_can_purge( self ):
return self.purged == False \
@@ -631,9 +614,12 @@
and len( self.history_associations ) == len( self.purged_history_associations )
def full_delete( self ):
"""Remove the file and extra files, marks deleted and purged"""
- os.unlink( self.file_name )
- if os.path.exists( self.extra_files_path ):
- shutil.rmtree( self.extra_files_path )
+ # os.unlink( self.file_name )
+ self.object_store.delete(self.id)
+ if self.object_store.exists(self.id, extra_dir=self._extra_files_path or "dataset_%d_files" % self.id, dir_only=True):
+ self.object_store.delete(self.id, entire_dir=True, extra_dir=self._extra_files_path or "dataset_%d_files" % self.id, dir_only=True)
+ # if os.path.exists( self.extra_files_path ):
+ # shutil.rmtree( self.extra_files_path )
# TODO: purge metadata files
self.deleted = True
self.purged = True
@@ -1595,16 +1581,32 @@
@property
def file_name( self ):
assert self.id is not None, "ID must be set before filename used (commit the object)"
- path = os.path.join( Dataset.file_path, '_metadata_files', *directory_hash_id( self.id ) )
- # Create directory if it does not exist
+ # Ensure the directory structure and the metadata file object exist
try:
- os.makedirs( path )
- except OSError, e:
- # File Exists is okay, otherwise reraise
- if e.errno != errno.EEXIST:
- raise
- # Return filename inside hashed directory
- return os.path.abspath( os.path.join( path, "metadata_%d.dat" % self.id ) )
+ # self.history_dataset
+ # print "Dataset.file_path: %s, self.id: %s, self.history_dataset.dataset.object_store: %s" \
+ # % (Dataset.file_path, self.id, self.history_dataset.dataset.object_store)
+ self.history_dataset.dataset.object_store.create( self.id, extra_dir='_metadata_files', extra_dir_at_root=True, alt_name="metadata_%d.dat" % self.id )
+ print "Calling get_filename 3", self.object_store
+ path = self.history_dataset.dataset.object_store.get_filename( self.id, extra_dir='_metadata_files', extra_dir_at_root=True, alt_name="metadata_%d.dat" % self.id )
+ print "Created metadata file at path: %s" % path
+ self.library_dataset
+ # raise
+ return path
+ except AttributeError:
+ # In case we're not working with the history_dataset
+ # print "Caught AttributeError"
+ path = os.path.join( Dataset.file_path, '_metadata_files', *directory_hash_id( self.id ) )
+ # Create directory if it does not exist
+ try:
+ os.makedirs( path )
+ except OSError, e:
+ # File Exists is okay, otherwise reraise
+ if e.errno != errno.EEXIST:
+ raise
+ # Return filename inside hashed directory
+ return os.path.abspath( os.path.join( path, "metadata_%d.dat" % self.id ) )
+
class FormDefinition( object, APIItem ):
# The following form_builder classes are supported by the FormDefinition class.
diff -r 9a5c403bb2e57189b442615840e49cc80fe3bc60 -r f25342f0e100ac32312934b9ca84f8fa109ef9d4 lib/galaxy/model/mapping.py
--- a/lib/galaxy/model/mapping.py
+++ b/lib/galaxy/model/mapping.py
@@ -1709,10 +1709,12 @@
# Let this go, it could possibly work with db's we don't support
log.error( "database_connection contains an unknown SQLAlchemy database dialect: %s" % dialect )
-def init( file_path, url, engine_options={}, create_tables=False, database_query_profiling_proxy=False ):
+def init( file_path, url, engine_options={}, create_tables=False, database_query_profiling_proxy=False, object_store=None ):
"""Connect mappings to the database"""
# Connect dataset to the file path
Dataset.file_path = file_path
+ # Connect dataset to object store
+ Dataset.object_store = object_store
# Load the appropriate db module
load_egg_for_url( url )
# Should we use the logging proxy?
diff -r 9a5c403bb2e57189b442615840e49cc80fe3bc60 -r f25342f0e100ac32312934b9ca84f8fa109ef9d4 lib/galaxy/objectstore/__init__.py
--- /dev/null
+++ b/lib/galaxy/objectstore/__init__.py
@@ -0,0 +1,859 @@
+"""
+objectstore package, abstraction for storing blobs of data for use in Galaxy,
+all providers ensure that data can be accessed on the filesystem for running
+tools
+"""
+
+import os
+import time
+import shutil
+import logging
+import threading
+import subprocess
+import multiprocessing
+from datetime import datetime
+
+from galaxy import util
+from galaxy.jobs import Sleeper
+from galaxy.model import directory_hash_id
+from galaxy.objectstore.s3_multipart_upload import multipart_upload
+
+from boto.s3.key import Key
+from boto.s3.connection import S3Connection
+from boto.exception import S3ResponseError
+
+log = logging.getLogger( __name__ )
+logging.getLogger('boto').setLevel(logging.INFO) # Otherwise boto is quite noisy
+
+
+class ObjectNotFound(Exception):
+ """ Accessed object was not found """
+ pass
+
+
+class ObjectStore(object):
+ """
+ ObjectStore abstract interface
+ """
+ def __init__(self):
+ self.running = True
+
+ def shutdown(self):
+ self.running = False
+
+ def exists(self, dataset_id, dir_only=False, extra_dir=None, extra_dir_at_root=False, alt_name=None):
+ """
+ Returns True if the object identified by `dataset_id` exists in this
+ file store, False otherwise.
+
+ FIELD DESCRIPTIONS (these apply to all the methods in this class):
+ :type dataset_id: int
+ :param dataset_id: Galaxy-assigned database ID of the dataset to be checked.
+
+ :type dir_only: bool
+ :param dir_only: If True, check only the path where the file
+ identified by `dataset_id` should be located, not the
+ dataset itself. This option applies to `extra_dir`
+ argument as well.
+
+ :type extra_dir: string
+ :param extra_dir: Append `extra_dir` to the directory structure where
+ the dataset identified by `dataset_id` should be located.
+ (e.g., 000/extra_dir/dataset_id)
+
+ :type extra_dir_at_root: bool
+ :param extra_dir_at_root: Applicable only if `extra_dir` is set.
+ If True, the `extra_dir` argument is placed at
+ root of the created directory structure rather
+ than at the end (e.g., extra_dir/000/dataset_id
+ vs. 000/extra_dir/dataset_id)
+
+ :type alt_name: string
+ :param alt_name: Use this name as the alternative name for the created
+ dataset rather than the default.
+ """
+ raise NotImplementedError()
+
+ def file_ready(self, dataset_id, dir_only=False, extra_dir=None, extra_dir_at_root=False, alt_name=None):
+ """ A helper method that checks if a file corresponding to a dataset
+ is ready and available to be used. Return True if so, False otherwise."""
+ return True
+
+ def create(self, dataset_id, dir_only=False, extra_dir=None, extra_dir_at_root=False, alt_name=None):
+ """
+ Mark the object identified by `dataset_id` as existing in the store, but
+ with no content. This method will create a proper directory structure for
+ the file if the directory does not already exist.
+ See `exists` method for the description of the fields.
+ """
+ raise NotImplementedError()
+
+ def empty(self, dataset_id, extra_dir=None, extra_dir_at_root=False, alt_name=None):
+ """
+ Test if the object identified by `dataset_id` has content.
+ If the object does not exist raises `ObjectNotFound`.
+ See `exists` method for the description of the fields.
+ """
+ raise NotImplementedError()
+
+ def size(self, dataset_id, extra_dir=None, extra_dir_at_root=False, alt_name=None):
+ """
+ Return size of the object identified by `dataset_id`.
+ If the object does not exist, return 0.
+ See `exists` method for the description of the fields.
+ """
+ raise NotImplementedError()
+
+ def delete(self, dataset_id, entire_dir=False, extra_dir=None, extra_dir_at_root=False, alt_name=None):
+ """
+ Deletes the object identified by `dataset_id`.
+ See `exists` method for the description of other fields.
+ :type entire_dir: bool
+ :param entire_dir: If True, delete the entire directory pointed to by
+ extra_dir. For safety reasons, this option applies
+ only for and in conjunction with the extra_dir option.
+ """
+ raise NotImplementedError()
+
+ def get_data(self, dataset_id, start=0, count=-1, extra_dir=None, extra_dir_at_root=False, alt_name=None):
+ """
+ Fetch `count` bytes of data starting at offset `start` from the
+ object identified uniquely by `dataset_id`.
+ If the object does not exist raises `ObjectNotFound`.
+ See `exists` method for the description of other fields.
+
+ :type start: int
+ :param start: Set the position to start reading the dataset file
+
+ :type count: int
+ :param count: Read at most `count` bytes from the dataset
+ """
+ raise NotImplementedError()
+
+ def get_filename(self, dataset_id, dir_only=False, extra_dir=None, extra_dir_at_root=False, alt_name=None):
+ """
+ Get the expected filename (including the absolute path) which can be used
+ to access the contents of the object uniquely identified by `dataset_id`.
+ See `exists` method for the description of the fields.
+ """
+ raise NotImplementedError()
+
+ def update_from_file(self, dataset_id, extra_dir=None, extra_dir_at_root=False, alt_name=None, filename=None, create=False):
+ """
+ Inform the store that the file associated with the object has been
+ updated. If `filename` is provided, update from that file instead
+ of the default.
+ If the object does not exist raises `ObjectNotFound`.
+ See `exists` method for the description of other fields.
+
+ :type filename: string
+ :param filename: Use file pointed to by `filename` as the source for
+ updating the dataset identified by `dataset_id`
+
+ :type create: bool
+ :param create: If True and the default dataset does not exist, create it first.
+ """
+ raise NotImplementedError()
+
+ def get_object_url(self, dataset_id, extra_dir=None, extra_dir_at_root=False, alt_name=None):
+ """
+ If the store supports direct URL access, return a URL. Otherwise return
+ None.
+ Note: need to be careful to to bypass dataset security with this.
+ See `exists` method for the description of the fields.
+ """
+ raise NotImplementedError()
+
+ ## def get_staging_command( id ):
+ ## """
+ ## Return a shell command that can be prepended to the job script to stage the
+ ## dataset -- runs on worker nodes.
+ ##
+ ## Note: not sure about the interface here. Should this return a filename, command
+ ## tuple? Is this even a good idea, seems very useful for S3, other object stores?
+ ## """
+
+
+class DiskObjectStore(ObjectStore):
+ """
+ Standard Galaxy object store, stores objects in files under a specific
+ directory on disk.
+ """
+ def __init__(self, app):
+ super(DiskObjectStore, self).__init__()
+ self.file_path = app.config.file_path
+
+ def _get_filename(self, dataset_id, dir_only=False, extra_dir=None, extra_dir_at_root=False, alt_name=None):
+ """Class method that returns the absolute path for the file corresponding
+ to the `dataset_id` regardless of whether the file exists.
+ """
+ path = self._construct_path(dataset_id, dir_only=dir_only, extra_dir=extra_dir, extra_dir_at_root=extra_dir_at_root, alt_name=alt_name, old_style=True)
+ # For backward compatibility, check the old style root path first; otherwise,
+ # construct hashed path
+ if not os.path.exists(path):
+ return self._construct_path(dataset_id, dir_only=dir_only, extra_dir=extra_dir, extra_dir_at_root=extra_dir_at_root, alt_name=alt_name)
+
+ def _construct_path(self, dataset_id, old_style=False, dir_only=False, extra_dir=None, extra_dir_at_root=False, alt_name=None):
+ """ Construct the expected absolute path for accessing the object
+ identified by `dataset_id`.
+
+ :type dir_only: bool
+ :param dir_only: If True, return only the absolute path where the file
+ identified by `dataset_id` should be located
+
+ :type extra_dir: string
+ :param extra_dir: Append the value of this parameter to the expected path
+ used to access the object identified by `dataset_id`
+ (e.g., /files/000/<extra_dir>/dataset_10.dat).
+
+ :type alt_name: string
+ :param alt_name: Use this name as the alternative name for the returned
+ dataset rather than the default.
+
+ :type old_style: bool
+ param old_style: This option is used for backward compatibility. If True
+ the composed directory structure does not include a hash id
+ (e.g., /files/dataset_10.dat (old) vs. /files/000/dataset_10.dat (new))
+ """
+ if old_style:
+ if extra_dir is not None:
+ path = os.path.join(self.file_path, extra_dir)
+ else:
+ path = self.file_path
+ else:
+ rel_path = os.path.join(*directory_hash_id(dataset_id))
+ if extra_dir is not None:
+ if extra_dir_at_root:
+ rel_path = os.path.join(extra_dir, rel_path)
+ else:
+ rel_path = os.path.join(rel_path, extra_dir)
+ path = os.path.join(self.file_path, rel_path)
+ if not dir_only:
+ path = os.path.join(path, alt_name if alt_name else "dataset_%s.dat" % dataset_id)
+ return os.path.abspath(path)
+
+ def exists(self, dataset_id, **kwargs):
+ path = self._construct_path(dataset_id, old_style=True, **kwargs)
+ # For backward compatibility, check root path first; otherwise, construct
+ # and check hashed path
+ if not os.path.exists(path):
+ path = self._construct_path(dataset_id, **kwargs)
+ return os.path.exists(path)
+
+ def create(self, dataset_id, **kwargs):
+ if not self.exists(dataset_id, **kwargs):
+ # Pull out locally used fields
+ extra_dir = kwargs.get('extra_dir', None)
+ extra_dir_at_root = kwargs.get('extra_dir_at_root', False)
+ dir_only = kwargs.get('dir_only', False)
+ alt_name = kwargs.get('alt_name', None)
+ # Construct hashed path
+ path = os.path.join(*directory_hash_id(dataset_id))
+ # Optionally append extra_dir
+ if extra_dir is not None:
+ if extra_dir_at_root:
+ path = os.path.join(extra_dir, path)
+ else:
+ path = os.path.join(path, extra_dir)
+ # Combine the constructted path with the root dir for all files
+ path = os.path.join(self.file_path, path)
+ # Create directory if it does not exist
+ if not os.path.exists(path):
+ os.makedirs(path)
+ if not dir_only:
+ path = os.path.join(path, alt_name if alt_name else "dataset_%s.dat" % dataset_id)
+ open(path, 'w').close()
+
+ def empty(self, dataset_id, **kwargs):
+ return os.path.getsize(self.get_filename(dataset_id, **kwargs)) > 0
+
+ def size(self, dataset_id, **kwargs):
+ if self.exists(dataset_id, **kwargs):
+ try:
+ return os.path.getsize(self.get_filename(dataset_id, **kwargs))
+ except OSError:
+ return 0
+ else:
+ return 0
+
+ def delete(self, dataset_id, entire_dir=False, **kwargs):
+ path = self.get_filename(dataset_id, **kwargs)
+ extra_dir = kwargs.get('extra_dir', None)
+ try:
+ if entire_dir and extra_dir:
+ shutil.rmtree(path)
+ return True
+ if self.exists(dataset_id, **kwargs):
+ os.remove(path)
+ return True
+ except OSError, ex:
+ log.critical('%s delete error %s' % (self._get_filename(dataset_id, **kwargs), ex))
+ return False
+
+ def get_data(self, dataset_id, start=0, count=-1, **kwargs):
+ data_file = open(self.get_filename(dataset_id, **kwargs), 'r')
+ data_file.seek(start)
+ content = data_file.read(count)
+ data_file.close()
+ return content
+
+ def get_filename(self, dataset_id, **kwargs):
+ path = self._construct_path(dataset_id, old_style=True, **kwargs)
+ # For backward compatibility, check root path first; otherwise, construct
+ # and check hashed path
+ if os.path.exists(path):
+ return path
+ else:
+ path = self._construct_path(dataset_id, **kwargs)
+ print "Checking it %s exists: %s" %(path, os.path.exists(path))
+ if os.path.exists(path):
+ return path
+ else:
+ raise ObjectNotFound()
+
+ def update_from_file(self, dataset_id, file_name=None, create=False, **kwargs):
+ """ `create` parameter is not used in this implementation """
+ if create:
+ self.create(dataset_id, **kwargs)
+ if file_name and self.exists(dataset_id, **kwargs):
+ try:
+ shutil.copy(file_name, self.get_filename(dataset_id, **kwargs))
+ except IOError, ex:
+ log.critical('Error copying %s to %s: %s' % (file_name,
+ self._get_filename(dataset_id, **kwargs), ex))
+
+ def get_object_url(self, dataset_id, **kwargs):
+ return None
+
+
+
+class CachingObjectStore(ObjectStore):
+ """
+ Object store that uses a directory for caching files, but defers and writes
+ back to another object store.
+ """
+
+ def __init__(self, path, backend):
+ super(CachingObjectStore, self).__init__(self, path, backend)
+
+
+
+class S3ObjectStore(ObjectStore):
+ """
+ Object store that stores objects as items in an AWS S3 bucket. A local
+ cache exists that is used as an intermediate location for files between
+ Galaxy and S3.
+ """
+ def __init__(self, app):
+ super(S3ObjectStore, self).__init__()
+ self.app = app
+ self.staging_path = self.app.config.file_path
+ self.s3_conn = S3Connection()
+ self.bucket = self._get_bucket(self.app.config.s3_bucket)
+ self.use_rr = self.app.config.use_reduced_redundancy
+ self.cache_size = self.app.config.object_store_cache_size * 1073741824 # Convert GBs to bytes
+ self.transfer_progress = 0
+ # Clean cache only if value is set in universe_wsgi.ini
+ if self.cache_size != -1:
+ # Helper for interruptable sleep
+ self.sleeper = Sleeper()
+ self.cache_monitor_thread = threading.Thread(target=self.__cache_monitor)
+ self.cache_monitor_thread.start()
+ log.info("Cache cleaner manager started")
+
+ def __cache_monitor(self):
+ time.sleep(2) # Wait for things to load before starting the monitor
+ while self.running:
+ total_size = 0
+ # Is this going to be too expensive of an operation to be done frequently?
+ file_list = []
+ for dirpath, dirnames, filenames in os.walk(self.staging_path):
+ for f in filenames:
+ fp = os.path.join(dirpath, f)
+ file_size = os.path.getsize(fp)
+ total_size += file_size
+ # Get the time given file was last accessed
+ last_access_time = time.localtime(os.stat(fp)[7])
+ # Compose a tuple of the access time and the file path
+ file_tuple = last_access_time, fp, file_size
+ file_list.append(file_tuple)
+ # Sort the file list (based on access time)
+ file_list.sort()
+ # Initiate cleaning once within 10% of the defined cache size?
+ cache_limit = self.cache_size * 0.9
+ if total_size > cache_limit:
+ log.info("Initiating cache cleaning: current cache size: %s; clean until smaller than: %s" \
+ % (convert_bytes(total_size), convert_bytes(cache_limit)))
+ # How much to delete? If simply deleting up to the cache-10% limit,
+ # is likely to be deleting frequently and may run the risk of hitting
+ # the limit - maybe delete additional #%?
+ # For now, delete enough to leave at least 10% of the total cache free
+ delete_this_much = total_size - cache_limit
+ self.__clean_cache(file_list, delete_this_much)
+ self.sleeper.sleep(30) # Test cache size every 30 seconds?
+
+ def __clean_cache(self, file_list, delete_this_much):
+ """ Keep deleting files from the file_list until the size of the deleted
+ files is greater than the value in delete_this_much parameter.
+
+ :type file_list: list
+ :param file_list: List of candidate files that can be deleted. This method
+ will start deleting files from the beginning of the list so the list
+ should be sorted accordingly. The list must contains 3-element tuples,
+ positioned as follows: position 0 holds file last accessed timestamp
+ (as time.struct_time), position 1 holds file path, and position 2 has
+ file size (e.g., (<access time>, /mnt/data/dataset_1.dat), 472394)
+
+ :type delete_this_much: int
+ :param delete_this_much: Total size of files, in bytes, that should be deleted.
+ """
+ # Keep deleting datasets from file_list until deleted_amount does not
+ # exceed delete_this_much; start deleting from the front of the file list,
+ # which assumes the oldest files come first on the list.
+ deleted_amount = 0
+ for i, f in enumerate(file_list):
+ if deleted_amount < delete_this_much:
+ deleted_amount += f[2]
+ os.remove(f[1])
+ # Debugging code for printing deleted files' stats
+ # folder, file_name = os.path.split(f[1])
+ # file_date = time.strftime("%m/%d/%y %H:%M:%S", f[0])
+ # log.debug("%s. %-25s %s, size %s (deleted %s/%s)" \
+ # % (i, file_name, convert_bytes(f[2]), file_date, \
+ # convert_bytes(deleted_amount), convert_bytes(delete_this_much)))
+ else:
+ log.debug("Cache cleaning done. Total space freed: %s" % convert_bytes(deleted_amount))
+ return
+
+ def _get_bucket(self, bucket_name):
+ """ Sometimes a handle to a bucket is not established right away so try
+ it a few times. Raise error is connection is not established. """
+ for i in range(5):
+ try:
+ bucket = self.s3_conn.get_bucket(bucket_name)
+ log.debug("Using S3 object store; got bucket '%s'" % bucket.name)
+ return bucket
+ except S3ResponseError:
+ log.debug("Could not get bucket '%s', attempt %s/5" % (bucket_name, i+1))
+ time.sleep(2)
+ # All the attempts have been exhausted and connection was not established,
+ # raise error
+ raise S3ResponseError
+
+ def _fix_permissions(self, rel_path):
+ """ Set permissions on rel_path"""
+ for basedir, dirs, files in os.walk( rel_path ):
+ util.umask_fix_perms( basedir, self.app.config.umask, 0777, self.app.config.gid )
+ for f in files:
+ path = os.path.join( basedir, f )
+ # Ignore symlinks
+ if os.path.islink( path ):
+ continue
+ util.umask_fix_perms( path, self.app.config.umask, 0666, self.app.config.gid )
+
+ def _construct_path(self, dataset_id, dir_only=None, extra_dir=None, extra_dir_at_root=False, alt_name=None):
+ rel_path = os.path.join(*directory_hash_id(dataset_id))
+ if extra_dir is not None:
+ if extra_dir_at_root:
+ rel_path = os.path.join(extra_dir, rel_path)
+ else:
+ rel_path = os.path.join(rel_path, extra_dir)
+ # S3 folders are marked by having trailing '/' so add it now
+ rel_path = '%s/' % rel_path
+ if not dir_only:
+ rel_path = os.path.join(rel_path, alt_name if alt_name else "dataset_%s.dat" % dataset_id)
+ return rel_path
+
+ def _get_cache_path(self, rel_path):
+ return os.path.abspath(os.path.join(self.staging_path, rel_path))
+
+ def _get_transfer_progress(self):
+ return self.transfer_progress
+
+ def _get_size_in_s3(self, rel_path):
+ try:
+ key = self.bucket.get_key(rel_path)
+ if key:
+ return key.size
+ except S3ResponseError, ex:
+ log.error("Could not get size of key '%s' from S3: %s" % (rel_path, ex))
+ except Exception, ex:
+ log.error("Could not get reference to the key object '%s'; returning -1 for key size: %s" % (rel_path, ex))
+ return -1
+
+ def _key_exists(self, rel_path):
+ exists = False
+ try:
+ # A hackish way of testing if the rel_path is a folder vs a file
+ is_dir = rel_path[-1] == '/'
+ if is_dir:
+ rs = self.bucket.get_all_keys(prefix=rel_path)
+ if len(rs) > 0:
+ exists = True
+ else:
+ exists = False
+ else:
+ key = Key(self.bucket, rel_path)
+ exists = key.exists()
+ except S3ResponseError, ex:
+ log.error("Trouble checking existence of S3 key '%s': %s" % (rel_path, ex))
+ return False
+ print "Checking if '%s' exists in S3: %s" % (rel_path, exists)
+ if rel_path[0] == '/':
+ raise
+ return exists
+
+ def _in_cache(self, rel_path):
+ """ Check if the given dataset is in the local cache and return True if so. """
+ # log.debug("------ Checking cache for rel_path %s" % rel_path)
+ cache_path = self._get_cache_path(rel_path)
+ exists = os.path.exists(cache_path)
+ # print "Checking chache for %s; returning %s" % (cache_path, exists)
+ return exists
+ # EATODO: Part of checking if a file is in cache should be to ensure the
+ # size of the cached file matches that on S3. Once the upload tool explicitly
+ # creates, this check sould be implemented- in the mean time, it's not
+ # looking likely to be implementable reliably.
+ # if os.path.exists(cache_path):
+ # # print "***1 %s exists" % cache_path
+ # if self._key_exists(rel_path):
+ # # print "***2 %s exists in S3" % rel_path
+ # # Make sure the size in cache is available in its entirety
+ # # print "File '%s' cache size: %s, S3 size: %s" % (cache_path, os.path.getsize(cache_path), self._get_size_in_s3(rel_path))
+ # if os.path.getsize(cache_path) == self._get_size_in_s3(rel_path):
+ # # print "***2.1 %s exists in S3 and the size is the same as in cache (in_cache=True)" % rel_path
+ # exists = True
+ # else:
+ # # print "***2.2 %s exists but differs in size from cache (in_cache=False)" % cache_path
+ # exists = False
+ # else:
+ # # Although not perfect decision making, this most likely means
+ # # that the file is currently being uploaded
+ # # print "***3 %s found in cache but not in S3 (in_cache=True)" % cache_path
+ # exists = True
+ # else:
+ # # print "***4 %s does not exist (in_cache=False)" % cache_path
+ # exists = False
+ # # print "Checking cache for %s; returning %s" % (cache_path, exists)
+ # return exists
+ # # return False
+
+ def _pull_into_cache(self, rel_path):
+ # Ensure the cache directory structure exists (e.g., dataset_#_files/)
+ rel_path_dir = os.path.dirname(rel_path)
+ if not os.path.exists(self._get_cache_path(rel_path_dir)):
+ os.makedirs(self._get_cache_path(rel_path_dir))
+ # Now pull in the file
+ ok = self._download(rel_path)
+ self._fix_permissions(rel_path)
+ return ok
+
+ def _transfer_cb(self, complete, total):
+ self.transfer_progress += 10
+ # print "Dataset transfer progress: %s" % self.transfer_progress
+
+ def _download(self, rel_path):
+ try:
+ log.debug("Pulling key '%s' into cache to %s" % (rel_path, self._get_cache_path(rel_path)))
+ key = self.bucket.get_key(rel_path)
+ # Test is cache is large enough to hold the new file
+ if key.size > self.cache_size:
+ log.critical("File %s is larger (%s) than the cache size (%s). Cannot download." \
+ % (rel_path, key.size, self.cache_size))
+ return False
+ # Test if 'axel' is available for parallel download and pull the key into cache
+ try:
+ ret_code = subprocess.call('axel')
+ except OSError:
+ ret_code = 127
+ if ret_code == 127:
+ self.transfer_progress = 0 # Reset transfer progress counter
+ key.get_contents_to_filename(self._get_cache_path(rel_path), cb=self._transfer_cb, num_cb=10)
+ print "(ssss) Pulled key '%s' into cache to %s" % (rel_path, self._get_cache_path(rel_path))
+ return True
+ else:
+ ncores = multiprocessing.cpu_count()
+ url = key.generate_url(7200)
+ ret_code = subprocess.call("axel -a -n %s '%s'" % (ncores, url))
+ if ret_code == 0:
+ print "(ssss) Parallel pulled key '%s' into cache to %s" % (rel_path, self._get_cache_path(rel_path))
+ return True
+ except S3ResponseError, ex:
+ log.error("Problem downloading key '%s' from S3 bucket '%s': %s" % (rel_path, self.bucket.name, ex))
+ return False
+
+ def _push_to_s3(self, rel_path, source_file=None, from_string=None):
+ """
+ Push the file pointed to by `rel_path` to S3 naming the key `rel_path`.
+ If `source_file` is provided, push that file instead while still using
+ `rel_path` as the key name.
+ If `from_string` is provided, set contents of the file to the value of
+ the string
+ """
+ try:
+ source_file = source_file if source_file else self._get_cache_path(rel_path)
+ if os.path.exists(source_file):
+ key = Key(self.bucket, rel_path)
+ if from_string:
+ key.set_contents_from_string(from_string, reduced_redundancy=self.use_rr)
+ log.debug("Pushed data from string '%s' to key '%s'" % (from_string, rel_path))
+ else:
+ start_time = datetime.now()
+ print "[%s] Pushing cache file '%s' to key '%s'" % (start_time, source_file, rel_path)
+ mb_size = os.path.getsize(source_file) / 1e6
+ if mb_size < 60:
+ self.transfer_progress = 0 # Reset transfer progress counter
+ key.set_contents_from_filename(source_file, reduced_redundancy=self.use_rr,
+ cb=self._transfer_cb, num_cb=10)
+ else:
+ multipart_upload(self.bucket, key.name, source_file, mb_size, use_rr=self.use_rr)
+ # self._multipart_upload(key.name, source_file, mb_size)
+ end_time = datetime.now()
+ print "Push ended at '%s'; it lasted '%s'" % (end_time, end_time-start_time)
+ log.debug("Pushed cache file '%s' to key '%s'" % (source_file, rel_path))
+ return True
+ else:
+ log.error("Tried updating key '%s' from source file '%s', but source file does not exist."
+ % (rel_path, source_file))
+ except S3ResponseError, ex:
+ log.error("Trouble pushing S3 key '%s' from file '%s': %s" % (rel_path, source_file, ex))
+ return False
+
+ def file_ready(self, dataset_id, **kwargs):
+ """ A helper method that checks if a file corresponding to a dataset
+ is ready and available to be used. Return True if so, False otherwise."""
+ rel_path = self._construct_path(dataset_id, **kwargs)
+ # Make sure the size in cache is available in its entirety
+ if self._in_cache(rel_path) and os.path.getsize(self._get_cache_path(rel_path)) == self._get_size_in_s3(rel_path):
+ return True
+ return False
+
+ def exists(self, dataset_id, **kwargs):
+ in_cache = in_s3 = False
+ rel_path = self._construct_path(dataset_id, **kwargs)
+ # Check cache
+ if self._in_cache(rel_path):
+ in_cache = True
+ # Check S3
+ in_s3 = self._key_exists(rel_path)
+ # log.debug("~~~~~~ File '%s' exists in cache: %s; in s3: %s" % (rel_path, in_cache, in_s3))
+ # dir_only does not get synced so shortcut the decision
+ dir_only = kwargs.get('dir_only', False)
+ if dir_only:
+ if in_cache or in_s3:
+ return True
+ else:
+ return False
+ # TODO: Sync should probably not be done here. Add this to an async upload stack?
+ if in_cache and not in_s3:
+ self._push_to_s3(rel_path, source_file=self._get_cache_path(rel_path))
+ return True
+ elif in_s3:
+ return True
+ else:
+ return False
+
+ def create(self, dataset_id, **kwargs):
+ if not self.exists(dataset_id, **kwargs):
+ print "S3 OS creating a dataset with ID %s" % dataset_id
+ # Pull out locally used fields
+ extra_dir = kwargs.get('extra_dir', None)
+ extra_dir_at_root = kwargs.get('extra_dir_at_root', False)
+ dir_only = kwargs.get('dir_only', False)
+ alt_name = kwargs.get('alt_name', None)
+ # print "---- Processing: %s; %s" % (alt_name, locals())
+ # Construct hashed path
+ rel_path = os.path.join(*directory_hash_id(dataset_id))
+ # Optionally append extra_dir
+ if extra_dir is not None:
+ if extra_dir_at_root:
+ rel_path = os.path.join(extra_dir, rel_path)
+ else:
+ rel_path = os.path.join(rel_path, extra_dir)
+ # Create given directory in cache
+ cache_dir = os.path.join(self.staging_path, rel_path)
+ if not os.path.exists(cache_dir):
+ os.makedirs(cache_dir)
+ # Although not really necessary to create S3 folders (because S3 has
+ # flat namespace), do so for consistency with the regular file system
+ # S3 folders are marked by having trailing '/' so add it now
+ # s3_dir = '%s/' % rel_path
+ # self._push_to_s3(s3_dir, from_string='')
+ # If instructed, create the dataset in cache & in S3
+ if not dir_only:
+ rel_path = os.path.join(rel_path, alt_name if alt_name else "dataset_%s.dat" % dataset_id)
+ open(os.path.join(self.staging_path, rel_path), 'w').close()
+ self._push_to_s3(rel_path, from_string='')
+
+ def empty(self, dataset_id, **kwargs):
+ if self.exists(dataset_id, **kwargs):
+ return bool(self.size(dataset_id, **kwargs) > 0)
+ else:
+ raise ObjectNotFound()
+
+ def size(self, dataset_id, **kwargs):
+ rel_path = self._construct_path(dataset_id, **kwargs)
+ if self._in_cache(rel_path):
+ try:
+ return os.path.getsize(self._get_cache_path(rel_path))
+ except OSError, ex:
+ log.info("Could not get size of file '%s' in local cache, will try S3. Error: %s" % (rel_path, ex))
+ elif self.exists(dataset_id, **kwargs):
+ return self._get_size_in_s3(rel_path)
+ log.warning("Did not find dataset '%s', returning 0 for size" % rel_path)
+ return 0
+
+ def delete(self, dataset_id, entire_dir=False, **kwargs):
+ rel_path = self._construct_path(dataset_id, **kwargs)
+ extra_dir = kwargs.get('extra_dir', None)
+ try:
+ # For the case of extra_files, because we don't have a reference to
+ # individual files/keys we need to remove the entire directory structure
+ # with all the files in it. This is easy for the local file system,
+ # but requires iterating through each individual key in S3 and deleing it.
+ if entire_dir and extra_dir:
+ shutil.rmtree(self._get_cache_path(rel_path))
+ rs = self.bucket.get_all_keys(prefix=rel_path)
+ for key in rs:
+ log.debug("Deleting key %s" % key.name)
+ key.delete()
+ return True
+ else:
+ # Delete from cache first
+ os.unlink(self._get_cache_path(rel_path))
+ # Delete from S3 as well
+ if self._key_exists(rel_path):
+ key = Key(self.bucket, rel_path)
+ log.debug("Deleting key %s" % key.name)
+ key.delete()
+ return True
+ except S3ResponseError, ex:
+ log.error("Could not delete key '%s' from S3: %s" % (rel_path, ex))
+ except OSError, ex:
+ log.error('%s delete error %s' % (self._get_filename(dataset_id, **kwargs), ex))
+ return False
+
+ def get_data(self, dataset_id, start=0, count=-1, **kwargs):
+ rel_path = self._construct_path(dataset_id, **kwargs)
+ # Check cache first and get file if not there
+ if not self._in_cache(rel_path):
+ self._pull_into_cache(rel_path)
+ else:
+ print "(cccc) Getting '%s' from cache" % self._get_cache_path(rel_path)
+ # Read the file content from cache
+ data_file = open(self._get_cache_path(rel_path), 'r')
+ data_file.seek(start)
+ content = data_file.read(count)
+ data_file.close()
+ return content
+
+ def get_filename(self, dataset_id, **kwargs):
+ print "S3 get_filename for dataset: %s" % dataset_id
+ dir_only = kwargs.get('dir_only', False)
+ rel_path = self._construct_path(dataset_id, **kwargs)
+ cache_path = self._get_cache_path(rel_path)
+ # S3 does not recognize directories as files so cannot check if those exist.
+ # So, if checking dir only, ensure given dir exists in cache and return
+ # the expected cache path.
+ # dir_only = kwargs.get('dir_only', False)
+ # if dir_only:
+ # if not os.path.exists(cache_path):
+ # os.makedirs(cache_path)
+ # return cache_path
+ # Check if the file exists in the cache first
+ if self._in_cache(rel_path):
+ return cache_path
+ # Check if the file exists in persistent storage and, if it does, pull it into cache
+ elif self.exists(dataset_id, **kwargs):
+ if dir_only: # Directories do not get pulled into cache
+ return cache_path
+ else:
+ if self._pull_into_cache(rel_path):
+ return cache_path
+ # For the case of retrieving a directory only, return the expected path
+ # even if it does not exist.
+ # if dir_only:
+ # return cache_path
+ raise ObjectNotFound()
+ # return cache_path # Until the upload tool does not explicitly create the dataset, return expected path
+
+ def update_from_file(self, dataset_id, file_name=None, create=False, **kwargs):
+ if create:
+ self.create(dataset_id, **kwargs)
+ if self.exists(dataset_id, **kwargs):
+ rel_path = self._construct_path(dataset_id, **kwargs)
+ # Chose whether to use the dataset file itself or an alternate file
+ if file_name:
+ source_file = os.path.abspath(file_name)
+ # Copy into cache
+ cache_file = self._get_cache_path(rel_path)
+ try:
+ # FIXME? Should this be a `move`?
+ shutil.copy2(source_file, cache_file)
+ self._fix_permissions(cache_file)
+ except OSError, ex:
+ log.error("Trouble copying source file '%s' to cache '%s': %s" % (source_file, cache_file, ex))
+ else:
+ source_file = self._get_cache_path(rel_path)
+ # Update the file on S3
+ self._push_to_s3(rel_path, source_file)
+ else:
+ raise ObjectNotFound()
+
+ def get_object_url(self, dataset_id, **kwargs):
+ if self.exists(dataset_id, **kwargs):
+ rel_path = self._construct_path(dataset_id, **kwargs)
+ try:
+ key = Key(self.bucket, rel_path)
+ return key.generate_url(expires_in = 86400) # 24hrs
+ except S3ResponseError, ex:
+ log.warning("Trouble generating URL for dataset '%s': %s" % (rel_path, ex))
+ return None
+
+
+
+class HierarchicalObjectStore(ObjectStore):
+ """
+ ObjectStore that defers to a list of backends, for getting objects the
+ first store where the object exists is used, objects are always created
+ in the first store.
+ """
+
+ def __init__(self, backends=[]):
+ super(HierarchicalObjectStore, self).__init__()
+
+
+def build_object_store_from_config(app):
+ """ Depending on the configuration setting, invoke the appropriate object store
+ """
+ store = app.config.object_store
+ if store == 'disk':
+ return DiskObjectStore(app=app)
+ elif store == 's3':
+ os.environ['AWS_ACCESS_KEY_ID'] = app.config.aws_access_key
+ os.environ['AWS_SECRET_ACCESS_KEY'] = app.config.aws_secret_key
+ return S3ObjectStore(app=app)
+ elif store == 'hierarchical':
+ return HierarchicalObjectStore()
+
+def convert_bytes(bytes):
+ """ A helper function used for pretty printing disk usage """
+ if bytes is None:
+ bytes = 0
+ bytes = float(bytes)
+
+ if bytes >= 1099511627776:
+ terabytes = bytes / 1099511627776
+ size = '%.2fTB' % terabytes
+ elif bytes >= 1073741824:
+ gigabytes = bytes / 1073741824
+ size = '%.2fGB' % gigabytes
+ elif bytes >= 1048576:
+ megabytes = bytes / 1048576
+ size = '%.2fMB' % megabytes
+ elif bytes >= 1024:
+ kilobytes = bytes / 1024
+ size = '%.2fKB' % kilobytes
+ else:
+ size = '%.2fb' % bytes
+ return size
diff -r 9a5c403bb2e57189b442615840e49cc80fe3bc60 -r f25342f0e100ac32312934b9ca84f8fa109ef9d4 lib/galaxy/objectstore/s3_multipart_upload.py
--- /dev/null
+++ b/lib/galaxy/objectstore/s3_multipart_upload.py
@@ -0,0 +1,85 @@
+#!/usr/bin/env python
+"""
+Split large file into multiple pieces for upload to S3.
+This parallelizes the task over available cores using multiprocessing.
+Code mostly taken form CloudBioLinux.
+"""
+import os
+import glob
+import subprocess
+import contextlib
+import functools
+import multiprocessing
+from multiprocessing.pool import IMapIterator
+
+import boto
+
+def map_wrap(f):
+ @functools.wraps(f)
+ def wrapper(*args, **kwargs):
+ return apply(f, *args, **kwargs)
+ return wrapper
+
+def mp_from_ids(mp_id, mp_keyname, mp_bucketname):
+ """Get the multipart upload from the bucket and multipart IDs.
+
+ This allows us to reconstitute a connection to the upload
+ from within multiprocessing functions.
+ """
+ conn = boto.connect_s3()
+ bucket = conn.lookup(mp_bucketname)
+ mp = boto.s3.multipart.MultiPartUpload(bucket)
+ mp.key_name = mp_keyname
+ mp.id = mp_id
+ return mp
+
+@map_wrap
+def transfer_part(mp_id, mp_keyname, mp_bucketname, i, part):
+ """Transfer a part of a multipart upload. Designed to be run in parallel.
+ """
+ mp = mp_from_ids(mp_id, mp_keyname, mp_bucketname)
+ print " Transferring", i, part
+ with open(part) as t_handle:
+ mp.upload_part_from_file(t_handle, i+1)
+ os.remove(part)
+
+def multipart_upload(bucket, s3_key_name, tarball, mb_size, use_rr=True):
+ """Upload large files using Amazon's multipart upload functionality.
+ """
+ cores = multiprocessing.cpu_count()
+ print "Initiating multipart upload using %s cores" % cores
+ def split_file(in_file, mb_size, split_num=5):
+ prefix = os.path.join(os.path.dirname(in_file),
+ "%sS3PART" % (os.path.basename(s3_key_name)))
+ # Split chunks so they are 5MB < chunk < 250MB
+ split_size = int(max(min(mb_size / (split_num * 2.0), 250), 5))
+ if not os.path.exists("%saa" % prefix):
+ cl = ["split", "-b%sm" % split_size, in_file, prefix]
+ subprocess.check_call(cl)
+ return sorted(glob.glob("%s*" % prefix))
+
+ mp = bucket.initiate_multipart_upload(s3_key_name, reduced_redundancy=use_rr)
+ with multimap(cores) as pmap:
+ for _ in pmap(transfer_part, ((mp.id, mp.key_name, mp.bucket_name, i, part)
+ for (i, part) in
+ enumerate(split_file(tarball, mb_size, cores)))):
+ pass
+ mp.complete_upload()
+
+(a)contextlib.contextmanager
+def multimap(cores=None):
+ """Provide multiprocessing imap like function.
+
+ The context manager handles setting up the pool, worked around interrupt issues
+ and terminating the pool on completion.
+ """
+ if cores is None:
+ cores = max(multiprocessing.cpu_count() - 1, 1)
+ def wrapper(func):
+ def wrap(self, timeout=None):
+ return func(self, timeout=timeout if timeout is not None else 1e100)
+ return wrap
+ IMapIterator.next = wrapper(IMapIterator.next)
+ pool = multiprocessing.Pool(cores)
+ yield pool.imap
+ pool.terminate()
diff -r 9a5c403bb2e57189b442615840e49cc80fe3bc60 -r f25342f0e100ac32312934b9ca84f8fa109ef9d4 lib/galaxy/tools/__init__.py
--- a/lib/galaxy/tools/__init__.py
+++ b/lib/galaxy/tools/__init__.py
@@ -1701,23 +1701,38 @@
Find extra files in the job working directory and move them into
the appropriate dataset's files directory
"""
+ # print "Working in collect_associated_files"
for name, hda in output.items():
temp_file_path = os.path.join( job_working_directory, "dataset_%s_files" % ( hda.dataset.id ) )
try:
- if len( os.listdir( temp_file_path ) ) > 0:
- store_file_path = os.path.join(
- os.path.join( self.app.config.file_path, *directory_hash_id( hda.dataset.id ) ),
- "dataset_%d_files" % hda.dataset.id )
- shutil.move( temp_file_path, store_file_path )
- # Fix permissions
- for basedir, dirs, files in os.walk( store_file_path ):
- util.umask_fix_perms( basedir, self.app.config.umask, 0777, self.app.config.gid )
- for file in files:
- path = os.path.join( basedir, file )
- # Ignore symlinks
- if os.path.islink( path ):
- continue
- util.umask_fix_perms( path, self.app.config.umask, 0666, self.app.config.gid )
+ a_files = os.listdir( temp_file_path )
+ if len( a_files ) > 0:
+ for f in a_files:
+ # print "------ Instructing ObjectStore to update/create file: %s from %s" \
+ # % (hda.dataset.id, os.path.join(temp_file_path, f))
+ self.app.object_store.update_from_file(hda.dataset.id,
+ extra_dir="dataset_%d_files" % hda.dataset.id,
+ alt_name = f,
+ file_name = os.path.join(temp_file_path, f),
+ create = True)
+ # Clean up after being handled by object store.
+ # FIXME: If the object (e.g., S3) becomes async, this will
+ # cause issues so add it to the object store functionality?
+ # shutil.rmtree(temp_file_path)
+
+ # store_file_path = os.path.join(
+ # os.path.join( self.app.config.file_path, *directory_hash_id( hda.dataset.id ) ),
+ # "dataset_%d_files" % hda.dataset.id )
+ # shutil.move( temp_file_path, store_file_path )
+ # # Fix permissions
+ # for basedir, dirs, files in os.walk( store_file_path ):
+ # util.umask_fix_perms( basedir, self.app.config.umask, 0777, self.app.config.gid )
+ # for file in files:
+ # path = os.path.join( basedir, file )
+ # # Ignore symlinks
+ # if os.path.islink( path ):
+ # continue
+ # util.umask_fix_perms( path, self.app.config.umask, 0666, self.app.config.gid )
except:
continue
diff -r 9a5c403bb2e57189b442615840e49cc80fe3bc60 -r f25342f0e100ac32312934b9ca84f8fa109ef9d4 lib/galaxy/tools/actions/__init__.py
--- a/lib/galaxy/tools/actions/__init__.py
+++ b/lib/galaxy/tools/actions/__init__.py
@@ -282,7 +282,8 @@
trans.sa_session.flush()
trans.app.security_agent.set_all_dataset_permissions( data.dataset, output_permissions )
# Create an empty file immediately
- open( data.file_name, "w" ).close()
+ # open( data.file_name, "w" ).close()
+ trans.app.object_store.create( data.id )
# Fix permissions
util.umask_fix_perms( data.file_name, trans.app.config.umask, 0666 )
# This may not be neccesary with the new parent/child associations
diff -r 9a5c403bb2e57189b442615840e49cc80fe3bc60 -r f25342f0e100ac32312934b9ca84f8fa109ef9d4 lib/galaxy/tools/actions/upload.py
--- a/lib/galaxy/tools/actions/upload.py
+++ b/lib/galaxy/tools/actions/upload.py
@@ -1,4 +1,3 @@
-import os
from __init__ import ToolAction
from galaxy.tools.actions import upload_common
diff -r 9a5c403bb2e57189b442615840e49cc80fe3bc60 -r f25342f0e100ac32312934b9ca84f8fa109ef9d4 lib/galaxy/tools/actions/upload_common.py
--- a/lib/galaxy/tools/actions/upload_common.py
+++ b/lib/galaxy/tools/actions/upload_common.py
@@ -324,13 +324,17 @@
job.add_output_library_dataset( 'output%i' % i, dataset )
# Create an empty file immediately
if not dataset.dataset.external_filename:
- open( dataset.file_name, "w" ).close()
+ trans.app.object_store.create( dataset.id )
+ print "---> Upload tool created a folder(?) %s with ID %s? %s" % (dataset.file_name, dataset.id, trans.app.object_store.exists(dataset.id))
+ # open( dataset.file_name, "w" ).close()
else:
for i, dataset in enumerate( data_list ):
job.add_output_dataset( 'output%i' % i, dataset )
# Create an empty file immediately
if not dataset.dataset.external_filename:
- open( dataset.file_name, "w" ).close()
+ trans.app.object_store.create( dataset.id )
+ print "---> Upload tool created a file %s with ID %s? %s" % (dataset.file_name, dataset.id, trans.app.object_store.exists(dataset.id))
+ # open( dataset.file_name, "w" ).close()
job.state = job.states.NEW
trans.sa_session.add( job )
trans.sa_session.flush()
diff -r 9a5c403bb2e57189b442615840e49cc80fe3bc60 -r f25342f0e100ac32312934b9ca84f8fa109ef9d4 lib/galaxy/web/controllers/dataset.py
--- a/lib/galaxy/web/controllers/dataset.py
+++ b/lib/galaxy/web/controllers/dataset.py
@@ -217,7 +217,7 @@
outfname = data.name[0:150]
outfname = ''.join(c in valid_chars and c or '_' for c in outfname)
if (params.do_action == None):
- params.do_action = 'zip' # default
+ params.do_action = 'zip' # default
msg = util.restore_text( params.get( 'msg', '' ) )
messagetype = params.get( 'messagetype', 'done' )
if not data:
@@ -300,8 +300,7 @@
archive.wsgi_headeritems = trans.response.wsgi_headeritems()
return archive.stream
return trans.show_error_message( msg )
-
-
+
@web.expose
def get_metadata_file(self, trans, hda_id, metadata_name):
""" Allows the downloading of metadata files associated with datasets (eg. bai index for bam files) """
@@ -316,12 +315,8 @@
trans.response.headers["Content-Type"] = "application/octet-stream"
trans.response.headers["Content-Disposition"] = "attachment; filename=Galaxy%s-[%s].%s" % (data.hid, fname, file_ext)
return open(data.metadata.get(metadata_name).file_name)
-
- @web.expose
- def display(self, trans, dataset_id=None, preview=False, filename=None, to_ext=None, **kwd):
- """Catches the dataset id and displays file contents as directed"""
- composite_extensions = trans.app.datatypes_registry.get_composite_extensions( )
- composite_extensions.append('html') # for archiving composite datatypes
+
+ def _check_dataset(self, trans, dataset_id):
# DEPRECATION: We still support unencoded ids for backward compatibility
try:
data = trans.sa_session.query( trans.app.model.HistoryDatasetAssociation ).get( trans.security.decode_id( dataset_id ) )
@@ -340,9 +335,36 @@
if data.state == trans.model.Dataset.states.UPLOAD:
return trans.show_error_message( "Please wait until this dataset finishes uploading before attempting to view it." )
+ return data
+
+ @web.expose
+ @web.json
+ def transfer_status(self, trans, dataset_id, filename=None):
+ """ Primarily used for the S3ObjectStore - get the status of data transfer
+ if the file is not in cache """
+ data = self._check_dataset(trans, dataset_id)
+ print "dataset.py -> transfer_status: Checking transfer status for dataset %s..." % data.id
+
+ # Pulling files in extra_files_path into cache is not handled via this
+ # method but that's primarily because those files are typically linked to
+ # through tool's output page anyhow so tying a JavaScript event that will
+ # call this method does not seem doable?
+ if trans.app.object_store.file_ready(data.id):
+ return True
+ else:
+ return False
+
+ @web.expose
+ def display(self, trans, dataset_id=None, preview=False, filename=None, to_ext=None, **kwd):
+ """Catches the dataset id and displays file contents as directed"""
+ composite_extensions = trans.app.datatypes_registry.get_composite_extensions( )
+ composite_extensions.append('html') # for archiving composite datatypes
+ data = self._check_dataset(trans, dataset_id)
+
if filename and filename != "index":
# For files in extra_files_path
- file_path = os.path.join( data.extra_files_path, filename )
+ file_path = os.path.join( data.extra_files_path, filename ) # remove after debugging
+ file_path = trans.app.object_store.get_filename(data.id, extra_dir='dataset_%s_files' % data.id, alt_name=filename)
if os.path.exists( file_path ):
if os.path.isdir( file_path ):
return trans.show_error_message( "Directory listing is not allowed." ) #TODO: Reconsider allowing listing of directories?
diff -r 9a5c403bb2e57189b442615840e49cc80fe3bc60 -r f25342f0e100ac32312934b9ca84f8fa109ef9d4 lib/galaxy/web/controllers/history.py
--- a/lib/galaxy/web/controllers/history.py
+++ b/lib/galaxy/web/controllers/history.py
@@ -581,7 +581,7 @@
trans.response.set_content_type( 'application/x-gzip' )
else:
trans.response.set_content_type( 'application/x-tar' )
- return open( jeha.dataset.file_name )
+ return trans.app.object_store.get_data(jeha.dataset.id)
elif jeha.job.state in [ model.Job.states.RUNNING, model.Job.states.QUEUED, model.Job.states.WAITING ]:
return trans.show_message( "Still exporting history %(n)s; please check back soon. Link: <a href='%(s)s'>%(s)s</a>" \
% ( { 'n' : history.name, 's' : url_for( action="export_archive", id=id, qualified=True ) } ) )
diff -r 9a5c403bb2e57189b442615840e49cc80fe3bc60 -r f25342f0e100ac32312934b9ca84f8fa109ef9d4 templates/dataset/display.mako
--- a/templates/dataset/display.mako
+++ b/templates/dataset/display.mako
@@ -9,11 +9,11 @@
<%def name="init()"><%
- self.has_left_panel=False
- self.has_right_panel=True
- self.message_box_visible=False
- self.active_view="user"
- self.overlay_visible=False
+ self.has_left_panel=False
+ self.has_right_panel=True
+ self.message_box_visible=False
+ self.active_view="user"
+ self.overlay_visible=False
%></%def>
@@ -44,10 +44,10 @@
<%def name="center_panel()"><div class="unified-panel-header" unselectable="on">
- <div class="unified-panel-header-inner">
- ${get_class_display_name( item.__class__ )}
- | ${get_item_name( item ) | h}
- </div>
+ <div class="unified-panel-header-inner">
+ ${get_class_display_name( item.__class__ )}
+ | ${get_item_name( item ) | h}
+ </div></div><div class="unified-panel-body">
diff -r 9a5c403bb2e57189b442615840e49cc80fe3bc60 -r f25342f0e100ac32312934b9ca84f8fa109ef9d4 templates/root/history.mako
--- a/templates/root/history.mako
+++ b/templates/root/history.mako
@@ -140,6 +140,34 @@
return false;
});
});
+
+ // Check to see if the dataset data is cached or needs to be pulled in
+ // via objectstore
+ $(this).find("a.display").each( function() {
+ var history_item = $(this).parents(".historyItem")[0];
+ var history_id = history_item.id.split( "-" )[1];
+ $(this).click(function() {
+ check_transfer_status($(this), history_id);
+ });
+ });
+
+ // If dataset data is not cached, keep making ajax calls to check on the
+ // data status and update the dataset UI element accordingly
+ function check_transfer_status(link, history_id) {
+ $.getJSON("${h.url_for( controller='dataset', action='transfer_status', dataset_id='XXX' )}".replace( 'XXX', link.attr("dataset_id") ),
+ function(ready) {
+ if (ready === false) {
+ // $("<div/>").text("Data is loading from S3... please be patient").appendTo(link.parent());
+ $( '#historyItem-' + history_id).removeClass( "historyItem-ok" );
+ $( '#historyItem-' + history_id).addClass( "historyItem-running" );
+ setTimeout(function(){check_transfer_status(link, history_id)}, 1000);
+ } else {
+ $( '#historyItem-' + history_id).removeClass( "historyItem-running" );
+ $( '#historyItem-' + history_id).addClass( "historyItem-ok" );
+ }
+ }
+ );
+ }
// Undelete link
$(this).find("a.historyItemUndelete").each( function() {
diff -r 9a5c403bb2e57189b442615840e49cc80fe3bc60 -r f25342f0e100ac32312934b9ca84f8fa109ef9d4 templates/root/history_common.mako
--- a/templates/root/history_common.mako
+++ b/templates/root/history_common.mako
@@ -98,7 +98,7 @@
%if data.purged:
<span class="icon-button display_disabled tooltip" title="Cannoy display datasets removed from disk"></span>
%else:
- <a class="icon-button display tooltip" title="Display data in browser" href="${display_url}"
+ <a class="icon-button display tooltip" dataset_id="${dataset_id}" title="Display data in browser" href="${display_url}"
%if for_editing:
target="galaxy_main"
%endif
https://bitbucket.org/galaxy/galaxy-central/changeset/9b03f63cd8e8/
changeset: 9b03f63cd8e8
user: afgane
date: 2011-07-21 16:44:27
summary: Added config options to universe_wsgi.ini.sample
affected #: 1 file
diff -r f25342f0e100ac32312934b9ca84f8fa109ef9d4 -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 universe_wsgi.ini.sample
--- a/universe_wsgi.ini.sample
+++ b/universe_wsgi.ini.sample
@@ -414,6 +414,17 @@
# -- Beta features
+# Object store mode (valid options are: disk, s3, hierarchical)
+#object_store = s3
+#aws_access_key = <AWS access key>
+#aws_secret_key = <AWS secret key>
+#s3_bucket = <name of an existing S3 bucket>
+#use_reduced_redundancy = True
+# Size (in GB) that the cache used by object store should be limited to.
+# If the value is not specified, the cache size will be limited only by the file
+# system size.
+#object_store_cache_size = 100
+
# Enable Galaxy to communicate directly with a sequencer
#enable_sequencer_communication = False
https://bitbucket.org/galaxy/galaxy-central/changeset/f2878e4d9e0e/
changeset: f2878e4d9e0e
user: afgane
date: 2011-07-21 16:44:47
summary: Merge
affected #: 109 files
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 README.txt
--- a/README.txt
+++ b/README.txt
@@ -28,4 +28,4 @@
Not all dependencies are included for the tools provided in the sample
tool_conf.xml. A full list of external dependencies is available at:
-http://bitbucket.org/galaxy/galaxy-central/wiki/ToolDependencies
+http://wiki.g2.bx.psu.edu/Admin/Tools/Tool%20Dependencies
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 dist-eggs.ini
--- a/dist-eggs.ini
+++ b/dist-eggs.ini
@@ -3,7 +3,7 @@
; eggs.g2.bx.psu.edu) Probably only useful to Galaxy developers at
; Penn State. This file is used by scripts/dist-scramble.py
;
-; More information: http://bitbucket.org/galaxy/galaxy-central/wiki/Config/Eggs
+; More information: http://wiki.g2.bx.psu.edu/Admin/Config/Eggs
;
[hosts]
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 eggs.ini
--- a/eggs.ini
+++ b/eggs.ini
@@ -3,7 +3,7 @@
;
; This file is version controlled and should not be edited by hand!
; For more information, see:
-; http://bitbucket.org/galaxy/galaxy-central/wiki/Config/Eggs
+; http://wiki.g2.bx.psu.edu/Admin/Config/Eggs
;
[general]
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 lib/galaxy/jobs/__init__.py
--- a/lib/galaxy/jobs/__init__.py
+++ b/lib/galaxy/jobs/__init__.py
@@ -271,7 +271,7 @@
class JobWrapper( object ):
"""
- Wraps a 'model.Job' with convience methods for running processes and
+ Wraps a 'model.Job' with convenience methods for running processes and
state management.
"""
def __init__( self, job, queue ):
@@ -284,6 +284,9 @@
self.sa_session = self.app.model.context
self.extra_filenames = []
self.command_line = None
+ # Tool versioning variables
+ self.version_string_cmd = None
+ self.version_string = ""
self.galaxy_lib_dir = None
# With job outputs in the working directory, we need the working
# directory to be set before prepare is run, or else premature deletion
@@ -311,6 +314,9 @@
param_dict = self.tool.params_from_strings( param_dict, self.app )
return param_dict
+ def get_version_string_path( self ):
+ return os.path.abspath(os.path.join(self.app.config.new_file_path, "GALAXY_VERSION_STRING_%s" % self.job_id))
+
def prepare( self ):
"""
Prepare the job to run by creating the working directory and the
@@ -388,6 +394,7 @@
extra_filenames.append( param_filename )
self.param_dict = param_dict
self.extra_filenames = extra_filenames
+ self.version_string_cmd = self.tool.version_string_cmd
return extra_filenames
def fail( self, message, exception=False ):
@@ -494,6 +501,12 @@
job.state = job.states.ERROR
else:
job.state = job.states.OK
+ if self.version_string_cmd:
+ version_filename = self.get_version_string_path()
+ if os.path.exists(version_filename):
+ self.version_string = "Tool version: %s" % open(version_filename).read()
+ os.unlink(version_filename)
+
if self.app.config.outputs_to_working_directory:
for dataset_path in self.get_output_fnames():
try:
@@ -543,7 +556,7 @@
log.exception( "from_work_dir specified a location not in the working directory: %s, %s" % ( source_file, self.working_directory ) )
dataset.blurb = 'done'
dataset.peek = 'no peek'
- dataset.info = context['stdout'] + context['stderr']
+ dataset.info = context['stdout'] + context['stderr'] + self.version_string
dataset.set_size()
# Update (non-library) job output datasets through the object store
if dataset not in job.output_library_datasets:
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 lib/galaxy/jobs/runners/__init__.py
--- a/lib/galaxy/jobs/runners/__init__.py
+++ b/lib/galaxy/jobs/runners/__init__.py
@@ -1,10 +1,9 @@
import os, os.path
class BaseJobRunner( object ):
-
def build_command_line( self, job_wrapper, include_metadata=False ):
"""
- Compose the sequence of commands neccesary to execute a job. This will
+ Compose the sequence of commands necessary to execute a job. This will
currently include:
- environment settings corresponding to any requirement tags
- command line taken from job wrapper
@@ -15,9 +14,13 @@
# occur
if not commands:
return None
+ # Prepend version string
+ if job_wrapper.version_string_cmd:
+ commands = "%s &> %s; " % ( job_wrapper.version_string_cmd, job_wrapper.get_version_string_path() ) + commands
# Prepend dependency injection
if job_wrapper.dependency_shell_commands:
commands = "; ".join( job_wrapper.dependency_shell_commands + [ commands ] )
+
# Append metadata setting commands, we don't want to overwrite metadata
# that was copied over in init_meta(), as per established behavior
if include_metadata and self.app.config.set_metadata_externally:
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 lib/galaxy/jobs/runners/pbs.py
--- a/lib/galaxy/jobs/runners/pbs.py
+++ b/lib/galaxy/jobs/runners/pbs.py
@@ -17,7 +17,7 @@
configured properly. Galaxy's "scramble" system should make this installation
simple, please follow the instructions found at:
- http://bitbucket.org/galaxy/galaxy-central/wiki/Config/Cluster
+ http://wiki.g2.bx.psu.edu/Admin/Config/Performance/Cluster
Additional errors may follow:
%s
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 lib/galaxy/jobs/runners/sge.py
--- a/lib/galaxy/jobs/runners/sge.py
+++ b/lib/galaxy/jobs/runners/sge.py
@@ -14,7 +14,7 @@
"scramble" system should make this installation simple, please follow the
instructions found at:
- http://bitbucket.org/galaxy/galaxy-central/wiki/Config/Cluster
+ http://wiki.g2.bx.psu.edu/Admin/Config/Performance/Cluster
Additional errors may follow:
%s
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 lib/galaxy/security/__init__.py
--- a/lib/galaxy/security/__init__.py
+++ b/lib/galaxy/security/__init__.py
@@ -1005,8 +1005,7 @@
ucsc_main = ( 'hgw1.cse.ucsc.edu', 'hgw2.cse.ucsc.edu', 'hgw3.cse.ucsc.edu', 'hgw4.cse.ucsc.edu',
'hgw5.cse.ucsc.edu', 'hgw6.cse.ucsc.edu', 'hgw7.cse.ucsc.edu', 'hgw8.cse.ucsc.edu' ),
ucsc_test = ( 'hgwdev.cse.ucsc.edu', ),
- ucsc_archaea = ( 'lowepub.cse.ucsc.edu', ),
- ucsc_bhri = ('ucsc.omics.bhri.internal','galaxy.omics.bhri.internal'),
+ ucsc_archaea = ( 'lowepub.cse.ucsc.edu', )
)
def __init__( self, model, permitted_actions=None ):
self.model = model
@@ -1037,7 +1036,7 @@
# We're going to search in order, but if the remote site is load
# balancing their connections (as UCSC does), this is okay.
try:
- if socket.gethostbyname( server ) == addr or server == '127.0.0.1' or server == '172.16.0.38':
+ if socket.gethostbyname( server ) == addr:
break # remote host is in the server list
except ( socket.error, socket.gaierror ):
pass # can't resolve, try next
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 lib/galaxy/tools/__init__.py
--- a/lib/galaxy/tools/__init__.py
+++ b/lib/galaxy/tools/__init__.py
@@ -5,7 +5,7 @@
pkg_resources.require( "simplejson" )
-import logging, os, string, sys, tempfile, glob, shutil, types, urllib
+import logging, os, string, sys, tempfile, glob, shutil, types, urllib, subprocess
import simplejson
import binascii
from UserDict import DictMixin
@@ -395,6 +395,11 @@
self.redirect_url_params = ''
# Short description of the tool
self.description = util.xml_text(root, "description")
+ # Versioning for tools
+ self.version_string_cmd = None
+ version_cmd = root.find("version_command")
+ if version_cmd is not None:
+ self.version_string_cmd = version_cmd.text
# Parallelism for tasks, read from tool config.
parallelism = root.find("parallelism")
if parallelism is not None and parallelism.get("method"):
@@ -922,8 +927,6 @@
if not self.check_values:
return
for input in self.inputs.itervalues():
- if input.name not in value:
- value[input.name] = input.get_initial_value( None, value )
if isinstance( input, ToolParameter ):
callback( "", input, value[input.name] )
else:
@@ -1460,6 +1463,11 @@
elif isinstance( input, SelectToolParameter ):
input_values[ input.name ] = SelectToolParameterWrapper(
input, input_values[ input.name ], self.app, other_values = param_dict )
+
+ elif isinstance( input, LibraryDatasetToolParameter ):
+ input_values[ input.name ] = LibraryDatasetValueWrapper(
+ input, input_values[ input.name ], param_dict )
+
else:
input_values[ input.name ] = InputValueWrapper(
input, input_values[ input.name ], param_dict )
@@ -2025,6 +2033,31 @@
def __getattr__( self, key ):
return getattr( self.obj, key )
+class LibraryDatasetValueWrapper( object ):
+ """
+ Wraps an input so that __str__ gives the "param_dict" representation.
+ """
+ def __init__( self, input, value, other_values={} ):
+ self.input = input
+ self.value = value
+ self._other_values = other_values
+ def __str__( self ):
+ return self.value.name
+ def templates( self ):
+ if not self.value:
+ return None
+ template_data = {}
+ for temp_info in self.value.info_association:
+ template = temp_info.template
+ content = temp_info.info.content
+ tmp_dict = {}
+ for field in template.fields:
+ tmp_dict[field['label']] = content[field['name']]
+ template_data[template.name] = tmp_dict
+ return template_data
+ def __getattr__( self, key ):
+ return getattr( self.value, key )
+
class InputValueWrapper( object ):
"""
Wraps an input so that __str__ gives the "param_dict" representation.
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 lib/galaxy/tools/parameters/basic.py
--- a/lib/galaxy/tools/parameters/basic.py
+++ b/lib/galaxy/tools/parameters/basic.py
@@ -748,6 +748,9 @@
# Dependency on a value that does not yet exist
if isinstance( dep_value, RuntimeValue ):
return True
+ #dataset not ready yet
+ if hasattr( self, 'ref_input' ) and isinstance( dep_value, self.tool.app.model.HistoryDatasetAssociation ) and ( dep_value.is_pending or not isinstance( dep_value.datatype, self.ref_input.formats ) ):
+ return True
# Dynamic, but all dependenceis are known and have values
return False
def get_initial_value( self, trans, context ):
@@ -878,6 +881,7 @@
self.force_select = string_as_bool( elem.get( "force_select", True ))
self.accept_default = string_as_bool( elem.get( "accept_default", False ))
self.data_ref = elem.get( "data_ref", None )
+ self.ref_input = None
self.default_value = elem.get( "default_value", None )
self.is_dynamic = True
def from_html( self, value, trans=None, context={} ):
@@ -973,7 +977,7 @@
if not dataset.metadata.columns:
# Only allow late validation if the dataset is not yet ready
# (since we have reason to expect the metadata to be ready eventually)
- if dataset.is_pending:
+ if dataset.is_pending or not isinstance( dataset.datatype, self.ref_input.formats ):
return True
# No late validation
return False
@@ -1268,7 +1272,7 @@
displayed as radio buttons and multiple selects as a set of checkboxes
TODO: The following must be fixed to test correctly for the new security_check tag in the DataToolParameter ( the last test below is broken )
- Nate's next passs at the dataset security stuff will dramatically alter this anyway.
+ Nate's next pass at the dataset security stuff will dramatically alter this anyway.
"""
def __init__( self, tool, elem ):
@@ -1353,7 +1357,7 @@
selected = ( value and ( hda in value ) )
field.add_option( "%s: %s" % ( hid, hda_name ), hda.id, selected )
else:
- target_ext, converted_dataset = hda.find_conversion_destination( self.formats, converter_safe = self.converter_safe( other_values, trans ) )
+ target_ext, converted_dataset = hda.find_conversion_destination( self.formats )
if target_ext:
if converted_dataset:
hda = converted_dataset
@@ -1402,13 +1406,22 @@
pass #no valid options
def dataset_collector( datasets ):
def is_convertable( dataset ):
- target_ext, converted_dataset = dataset.find_conversion_destination( self.formats, converter_safe = self.converter_safe( context, trans ) )
+ target_ext, converted_dataset = dataset.find_conversion_destination( self.formats )
if target_ext is not None:
return True
return False
for i, data in enumerate( datasets ):
- if data.visible and not data.deleted and data.state not in [data.states.ERROR, data.states.DISCARDED] and ( isinstance( data.datatype, self.formats) or is_convertable( data ) ):
- if self.options and self._options_filter_attribute( data ) != filter_value:
+ if data.visible and not data.deleted and data.state not in [data.states.ERROR, data.states.DISCARDED]:
+ is_valid = False
+ if isinstance( data.datatype, self.formats ):
+ is_valid = True
+ else:
+ target_ext, converted_dataset = data.find_conversion_destination( self.formats )
+ if target_ext:
+ is_valid = True
+ if converted_dataset:
+ data = converted_dataset
+ if not is_valid or ( self.options and self._options_filter_attribute( data ) != filter_value ):
continue
most_recent_dataset[0] = data
# Also collect children via association object
@@ -1509,6 +1522,38 @@
ref = ref()
return ref
+class LibraryDatasetToolParameter( ToolParameter ):
+ """
+ Parameter that lets users select a LDDA from a modal window, then use it within the wrapper.
+ """
+
+ def __init__( self, tool, elem ):
+ ToolParameter.__init__( self, tool, elem )
+
+ def get_html_field( self, trans=None, value=None, other_values={} ):
+ return form_builder.LibraryField( self.name, value=value, trans=trans )
+
+ def get_initial_value( self, trans, context ):
+ return None
+
+ def from_html( self, value, trans, other_values={} ):
+ if not value:
+ return None
+ elif isinstance( value, trans.app.model.LibraryDatasetDatasetAssociation ):
+ return value
+ else:
+ return trans.sa_session.query( trans.app.model.LibraryDatasetDatasetAssociation ).get( trans.security.decode_id( value ) )
+
+ def to_string( self, value, app ):
+ if not value:
+ return None
+ return value.id
+
+ def to_python( self, value, app ):
+ if not value:
+ return value
+ return app.model.context.query( app.model.LibraryDatasetDatasetAssociation ).get( value )
+
# class RawToolParameter( ToolParameter ):
# """
# Completely nondescript parameter, HTML representation is provided as text
@@ -1557,19 +1602,20 @@
# self.html = form_builder.HiddenField( self.name, trans.history.id ).get_html()
# return self.html
-parameter_types = dict( text = TextToolParameter,
- integer = IntegerToolParameter,
- float = FloatToolParameter,
- boolean = BooleanToolParameter,
- genomebuild = GenomeBuildParameter,
- select = SelectToolParameter,
- data_column = ColumnListParameter,
- hidden = HiddenToolParameter,
- baseurl = BaseURLToolParameter,
- file = FileToolParameter,
- ftpfile = FTPFileToolParameter,
- data = DataToolParameter,
- drill_down = DrillDownSelectToolParameter )
+parameter_types = dict( text = TextToolParameter,
+ integer = IntegerToolParameter,
+ float = FloatToolParameter,
+ boolean = BooleanToolParameter,
+ genomebuild = GenomeBuildParameter,
+ select = SelectToolParameter,
+ data_column = ColumnListParameter,
+ hidden = HiddenToolParameter,
+ baseurl = BaseURLToolParameter,
+ file = FileToolParameter,
+ ftpfile = FTPFileToolParameter,
+ data = DataToolParameter,
+ library_data = LibraryDatasetToolParameter,
+ drill_down = DrillDownSelectToolParameter )
class UnvalidatedValue( object ):
"""
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 lib/galaxy/util/__init__.py
--- a/lib/galaxy/util/__init__.py
+++ b/lib/galaxy/util/__init__.py
@@ -127,7 +127,8 @@
'@' : '__at__',
'\n' : '__cn__',
'\r' : '__cr__',
- '\t' : '__tc__'
+ '\t' : '__tc__',
+ '#' : '__pd__'
}
def restore_text(text):
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 lib/galaxy/visualization/tracks/data_providers.py
--- a/lib/galaxy/visualization/tracks/data_providers.py
+++ b/lib/galaxy/visualization/tracks/data_providers.py
@@ -72,10 +72,11 @@
# Override.
pass
- def get_data( self, chrom, start, end, max_vals=None, **kwargs ):
+ def get_data( self, chrom, start, end, start_val=0, max_vals=None, **kwargs ):
"""
- Returns data in region defined by chrom, start, and end. If max_vals
- is set, returns at most max_vals.
+ Returns data in region defined by chrom, start, and end. start_val and
+ max_vals are used to denote the data to return: start_val is the first value to
+ return and max_vals indicates the number of values to return.
"""
# Override.
pass
@@ -215,7 +216,7 @@
# Cleanup.
bamfile.close()
- def get_data( self, chrom, start, end, max_vals=sys.maxint, **kwargs ):
+ def get_data( self, chrom, start, end, start_val=0, max_vals=sys.maxint, **kwargs ):
"""
Fetch reads in the region.
@@ -253,8 +254,10 @@
# Encode reads as list of lists.
results = []
paired_pending = {}
- for read in data:
- if len(results) > max_vals:
+ for count, read in enumerate( data ):
+ if count < start_val:
+ continue
+ if count-start_val >= max_vals:
message = ERROR_MAX_VALS % ( max_vals, "reads" )
break
qname = read.qname
@@ -319,7 +322,7 @@
f.close()
return all_dat is not None
- def get_data( self, chrom, start, end, max_vals=None, **kwargs ):
+ def get_data( self, chrom, start, end, start_val=0, max_vals=None, **kwargs ):
# Bigwig has the possibility of it being a standalone bigwig file, in which case we use
# original_dataset, or coming from wig->bigwig conversion in which we use converted_dataset
f, bbi = self._get_dataset()
@@ -409,7 +412,7 @@
'type': 'int',
'index': filter_col,
'tool_id': 'Filter1',
- 'tool_exp_name': 'c5' } ]
+ 'tool_exp_name': 'c6' } ]
filter_col += 1
if isinstance( self.original_dataset.datatype, Gtf ):
# Create filters based on dataset metadata.
@@ -481,11 +484,14 @@
return tabix.fetch(reference=chrom, start=start, end=end)
- def get_data( self, chrom, start, end, max_vals=None, **kwargs ):
+ def get_data( self, chrom, start, end, start_val=0, max_vals=None, **kwargs ):
iterator = self.get_iterator( chrom, start, end )
- return self.process_data( iterator, max_vals, **kwargs )
-
+ return self.process_data( iterator, start_val, max_vals, **kwargs )
+
class IntervalIndexDataProvider( FilterableMixin, TracksDataProvider ):
+ """
+ Interval index files used only for GFF files.
+ """
col_name_data_attr_mapping = { 4 : { 'index': 4 , 'name' : 'Score' } }
def write_data_to_file( self, chrom, start, end, filename ):
@@ -501,12 +507,11 @@
out.write(interval.raw_line + '\n')
out.close()
- def get_data( self, chrom, start, end, max_vals=sys.maxint, **kwargs ):
+ def get_data( self, chrom, start, end, start_val=0, max_vals=sys.maxint, **kwargs ):
start, end = int(start), int(end)
source = open( self.original_dataset.file_name )
index = Indexes( self.converted_dataset.file_name )
results = []
- count = 0
message = None
# If chrom is not found in indexes, try removing the first three
@@ -525,14 +530,15 @@
#
filter_cols = from_json_string( kwargs.get( "filter_cols", "[]" ) )
no_detail = ( "no_detail" in kwargs )
- for start, end, offset in index.find(chrom, start, end):
- if count >= max_vals:
+ for count, val in enumerate( index.find(chrom, start, end) ):
+ start, end, offset = val[0], val[1], val[2]
+ if count < start_val:
+ continue
+ if count-start_val >= max_vals:
message = ERROR_MAX_VALS % ( max_vals, "features" )
break
- count += 1
source.seek( offset )
# TODO: can we use column metadata to fill out payload?
- # TODO: use function to set payload data
# GFF dataset.
reader = GFFReaderWrapper( source, fix_strand=True )
@@ -549,7 +555,7 @@
Payload format: [ uid (offset), start, end, name, strand, thick_start, thick_end, blocks ]
"""
- def process_data( self, iterator, max_vals=sys.maxint, **kwargs ):
+ def process_data( self, iterator, start_val=0, max_vals=sys.maxint, **kwargs ):
#
# Build data to return. Payload format is:
# [ <guid/offset>, <start>, <end>, <name>, <score>, <strand>, <thick_start>,
@@ -559,14 +565,14 @@
#
filter_cols = from_json_string( kwargs.get( "filter_cols", "[]" ) )
no_detail = ( "no_detail" in kwargs )
- count = 0
rval = []
message = None
- for line in iterator:
- if count >= max_vals:
+ for count, line in enumerate( iterator ):
+ if count < start_val:
+ continue
+ if count-start_val >= max_vals:
message = ERROR_MAX_VALS % ( max_vals, "features" )
break
- count += 1
# TODO: can we use column metadata to fill out payload?
# TODO: use function to set payload data
@@ -625,16 +631,16 @@
col_name_data_attr_mapping = { 'Qual' : { 'index': 6 , 'name' : 'Qual' } }
- def process_data( self, iterator, max_vals=sys.maxint, **kwargs ):
+ def process_data( self, iterator, start_val=0, max_vals=sys.maxint, **kwargs ):
rval = []
- count = 0
message = None
- for line in iterator:
- if count >= max_vals:
+ for count, line in enumerate( iterator ):
+ if count < start_val:
+ continue
+ if count-start_val >= max_vals:
message = ERROR_MAX_VALS % ( "max_vals", "features" )
break
- count += 1
feature = line.split()
payload = [ hash(line), int(feature[1])-1, int(feature[1]),
@@ -657,22 +663,23 @@
NOTE: this data provider does not use indices, and hence will be very slow
for large datasets.
"""
- def get_data( self, chrom, start, end, max_vals=sys.maxint, **kwargs ):
+ def get_data( self, chrom, start, end, start_val=0, max_vals=sys.maxint, **kwargs ):
start, end = int( start ), int( end )
source = open( self.original_dataset.file_name )
results = []
- count = 0
message = None
offset = 0
- for feature in GFFReaderWrapper( source, fix_strand=True ):
+ for count, feature in enumerate( GFFReaderWrapper( source, fix_strand=True ) ):
+ if count < start_val:
+ continue
+ if count-start_val >= max_vals:
+ message = ERROR_MAX_VALS % ( max_vals, "reads" )
+ break
+
feature_start, feature_end = convert_gff_coords_to_bed( [ feature.start, feature.end ] )
if feature.chrom != chrom or feature_start < start or feature_end > end:
continue
- if count >= max_vals:
- message = ERROR_MAX_VALS % ( max_vals, "features" )
- break
- count += 1
payload = package_gff_feature( feature )
payload.insert( 0, offset )
results.append( payload )
@@ -700,6 +707,7 @@
"""
Returns data provider class by name and/or original dataset.
"""
+ data_provider = None
if name:
value = dataset_type_name_to_data_provider[ name ]
if isinstance( value, dict ):
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 lib/galaxy/web/controllers/history.py
--- a/lib/galaxy/web/controllers/history.py
+++ b/lib/galaxy/web/controllers/history.py
@@ -56,15 +56,14 @@
grids.GridColumn( "Created", key="create_time", format=time_ago ),
grids.GridColumn( "Last Updated", key="update_time", format=time_ago ),
# Columns that are valid for filtering but are not visible.
- grids.DeletedColumn( "Deleted", key="deleted", visible=False, filterable="advanced" )
+ grids.DeletedColumn( "Status", key="deleted", visible=False, filterable="advanced" )
]
- columns.append(
- grids.MulticolFilterColumn(
- "search history names and tags",
- cols_to_filter=[ columns[0], columns[2] ],
+ columns.append(
+ grids.MulticolFilterColumn(
+ "search history names and tags",
+ cols_to_filter=[ columns[0], columns[2] ],
key="free-text-search", visible=False, filterable="standard" )
)
-
operations = [
grids.GridOperation( "Switch", allow_multiple=False, condition=( lambda item: not item.deleted ), async_compatible=False ),
grids.GridOperation( "Share or Publish", allow_multiple=False, condition=( lambda item: not item.deleted ), async_compatible=False ),
@@ -125,11 +124,11 @@
return trans.sa_session.query( self.model_class ).join( 'users_shared_with' )
def apply_query_filter( self, trans, query, **kwargs ):
return query.filter( model.HistoryUserShareAssociation.user == trans.user )
-
+
class HistoryAllPublishedGrid( grids.Grid ):
class NameURLColumn( grids.PublicURLColumn, NameColumn ):
pass
-
+
title = "Published Histories"
model_class = model.History
default_sort_key = "update_time"
@@ -138,15 +137,15 @@
columns = [
NameURLColumn( "Name", key="name", filterable="advanced" ),
grids.OwnerAnnotationColumn( "Annotation", key="annotation", model_annotation_association_class=model.HistoryAnnotationAssociation, filterable="advanced" ),
- grids.OwnerColumn( "Owner", key="username", model_class=model.User, filterable="advanced" ),
+ grids.OwnerColumn( "Owner", key="username", model_class=model.User, filterable="advanced" ),
grids.CommunityRatingColumn( "Community Rating", key="rating" ),
grids.CommunityTagsColumn( "Community Tags", key="tags", model_tag_association_class=model.HistoryTagAssociation, filterable="advanced", grid_name="PublicHistoryListGrid" ),
grids.ReverseSortColumn( "Last Updated", key="update_time", format=time_ago )
]
- columns.append(
- grids.MulticolFilterColumn(
- "Search name, annotation, owner, and tags",
- cols_to_filter=[ columns[0], columns[1], columns[2], columns[4] ],
+ columns.append(
+ grids.MulticolFilterColumn(
+ "Search name, annotation, owner, and tags",
+ cols_to_filter=[ columns[0], columns[1], columns[2], columns[4] ],
key="free-text-search", visible=False, filterable="standard" )
)
operations = []
@@ -156,7 +155,7 @@
def apply_query_filter( self, trans, query, **kwargs ):
# A public history is published, has a slug, and is not deleted.
return query.filter( self.model_class.published == True ).filter( self.model_class.slug != None ).filter( self.model_class.deleted == False )
-
+
class HistoryController( BaseController, Sharable, UsesAnnotations, UsesItemRatings, UsesHistory ):
@web.expose
def index( self, trans ):
@@ -166,11 +165,11 @@
"""XML history list for functional tests"""
trans.response.set_content_type( 'text/xml' )
return trans.fill_template( "/history/list_as_xml.mako" )
-
+
stored_list_grid = HistoryListGrid()
shared_list_grid = SharedHistoryListGrid()
published_list_grid = HistoryAllPublishedGrid()
-
+
@web.expose
def list_published( self, trans, **kwargs ):
grid = self.published_list_grid( trans, **kwargs )
@@ -179,7 +178,7 @@
else:
# Render grid wrapped in panels
return trans.fill_template( "history/list_published.mako", grid=grid )
-
+
@web.expose
@web.require_login( "work with multiple histories" )
def list( self, trans, **kwargs ):
@@ -200,7 +199,7 @@
refresh_history = False
# Load the histories and ensure they all belong to the current user
histories = []
- for history_id in history_ids:
+ for history_id in history_ids:
history = self.get_history( trans, history_id )
if history:
# Ensure history is owned by current user
@@ -209,18 +208,18 @@
histories.append( history )
else:
log.warn( "Invalid history id '%r' passed to list", history_id )
- if histories:
+ if histories:
if operation == "switch":
status, message = self._list_switch( trans, histories )
- # Take action to update UI to reflect history switch. If
+ # Take action to update UI to reflect history switch. If
# grid is using panels, it is standalone and hence a redirect
# to root is needed; if grid is not using panels, it is nested
- # in the main Galaxy UI and refreshing the history frame
+ # in the main Galaxy UI and refreshing the history frame
# is sufficient.
use_panels = kwargs.get('use_panels', False) == 'True'
if use_panels:
return trans.response.send_redirect( url_for( "/" ) )
- else:
+ else:
trans.template_context['refresh_frames'] = ['history']
elif operation in ( "delete", "delete and remove datasets from disk" ):
if operation == "delete and remove datasets from disk":
@@ -338,7 +337,7 @@
trans.set_history( new_history )
# No message
return None, None
-
+
@web.expose
@web.require_login( "work with shared histories" )
def list_shared( self, trans, **kwargs ):
@@ -373,7 +372,7 @@
status = 'done'
# Render the list view
return self.shared_list_grid( trans, status=status, message=message, **kwargs )
-
+
@web.expose
def display_structured( self, trans, id=None ):
"""
@@ -444,7 +443,7 @@
items.sort( key=( lambda x: x[0].create_time ), reverse=True )
#
return trans.fill_template( "history/display_structured.mako", items=items )
-
+
@web.expose
def delete_current( self, trans ):
"""Delete just the active history -- this does not require a logged in user."""
@@ -456,25 +455,22 @@
trans.sa_session.add( history )
trans.sa_session.flush()
trans.log_event( "History id %d marked as deleted" % history.id )
- # Regardless of whether it was previously deleted, we make a new history active
+ # Regardless of whether it was previously deleted, we make a new history active
trans.new_history()
- return trans.show_ok_message( "History deleted, a new history is active", refresh_frames=['history'] )
-
+ return trans.show_ok_message( "History deleted, a new history is active", refresh_frames=['history'] )
+
@web.expose
@web.require_login( "rate items" )
@web.json
def rate_async( self, trans, id, rating ):
""" Rate a history asynchronously and return updated community data. """
-
history = self.get_history( trans, id, check_ownership=False, check_accessible=True )
if not history:
return trans.show_error_message( "The specified history does not exist." )
-
# Rate history.
history_rating = self.rate_item( trans.sa_session, trans.get_user(), history, rating )
-
return self.get_ave_item_rating_data( trans.sa_session, history )
-
+
@web.expose
def rename_async( self, trans, id=None, new_name=None ):
history = self.get_history( trans, id )
@@ -490,7 +486,7 @@
trans.sa_session.add( history )
trans.sa_session.flush()
return history.name
-
+
@web.expose
@web.require_login( "use Galaxy histories" )
def annotate_async( self, trans, id, new_annotation=None, **kwargs ):
@@ -503,12 +499,11 @@
return new_annotation
@web.expose
- # TODO: Remove require_login when users are warned that, if they are not
+ # TODO: Remove require_login when users are warned that, if they are not
# logged in, this will remove their current history.
@web.require_login( "use Galaxy histories" )
def import_archive( self, trans, **kwargs ):
""" Import a history from a file archive. """
-
# Set archive source and type.
archive_file = kwargs.get( 'archive_file', None )
archive_url = kwargs.get( 'archive_url', None )
@@ -519,37 +514,34 @@
elif archive_url:
archive_source = archive_url
archive_type = 'url'
-
# If no source to create archive from, show form to upload archive or specify URL.
if not archive_source:
- return trans.show_form(
+ return trans.show_form(
web.FormBuilder( web.url_for(), "Import a History from an Archive", submit_text="Submit" ) \
.add_input( "text", "Archived History URL", "archive_url", value="", error=None )
# TODO: add support for importing via a file.
- #.add_input( "file", "Archived History File", "archive_file", value=None, error=None )
+ #.add_input( "file", "Archived History File", "archive_file", value=None, error=None )
)
-
# Run job to do import.
history_imp_tool = trans.app.toolbox.tools_by_id[ '__IMPORT_HISTORY__' ]
incoming = { '__ARCHIVE_SOURCE__' : archive_source, '__ARCHIVE_TYPE__' : archive_type }
history_imp_tool.execute( trans, incoming=incoming )
return trans.show_message( "Importing history from '%s'. \
This history will be visible when the import is complete" % archive_source )
-
- @web.expose
+
+ @web.expose
def export_archive( self, trans, id=None, gzip=True, include_hidden=False, include_deleted=False ):
""" Export a history to an archive. """
-
- #
+ #
# Convert options to booleans.
#
if isinstance( gzip, basestring ):
- gzip = ( gzip in [ 'True', 'true', 'T', 't' ] )
+ gzip = ( gzip in [ 'True', 'true', 'T', 't' ] )
if isinstance( include_hidden, basestring ):
include_hidden = ( include_hidden in [ 'True', 'true', 'T', 't' ] )
if isinstance( include_deleted, basestring ):
- include_deleted = ( include_deleted in [ 'True', 'true', 'T', 't' ] )
-
+ include_deleted = ( include_deleted in [ 'True', 'true', 'T', 't' ] )
+
#
# Get history to export.
#
@@ -559,10 +551,10 @@
# Use current history.
history = trans.history
id = trans.security.encode_id( history.id )
-
+
if not history:
return trans.show_error_message( "This history does not exist or you cannot export this history." )
-
+
#
# If history has already been exported and it has not changed since export, stream it.
#
@@ -585,40 +577,38 @@
elif jeha.job.state in [ model.Job.states.RUNNING, model.Job.states.QUEUED, model.Job.states.WAITING ]:
return trans.show_message( "Still exporting history %(n)s; please check back soon. Link: <a href='%(s)s'>%(s)s</a>" \
% ( { 'n' : history.name, 's' : url_for( action="export_archive", id=id, qualified=True ) } ) )
-
+
# Run job to do export.
history_exp_tool = trans.app.toolbox.tools_by_id[ '__EXPORT_HISTORY__' ]
- params = {
- 'history_to_export' : history,
- 'compress' : gzip,
- 'include_hidden' : include_hidden,
+ params = {
+ 'history_to_export' : history,
+ 'compress' : gzip,
+ 'include_hidden' : include_hidden,
'include_deleted' : include_deleted }
history_exp_tool.execute( trans, incoming = params, set_output_hid = True )
return trans.show_message( "Exporting History '%(n)s'. Use this link to download \
the archive or import it to another Galaxy server: \
<a href='%(u)s'>%(u)s</a>" \
% ( { 'n' : history.name, 'u' : url_for( action="export_archive", id=id, qualified=True ) } ) )
-
+
@web.expose
@web.json
@web.require_login( "get history name and link" )
def get_name_and_link_async( self, trans, id=None ):
""" Returns history's name and link. """
history = self.get_history( trans, id, False )
-
if self.create_item_slug( trans.sa_session, history ):
trans.sa_session.flush()
- return_dict = {
- "name" : history.name,
+ return_dict = {
+ "name" : history.name,
"link" : url_for( action="display_by_username_and_slug", username=history.user.username, slug=history.slug ) }
return return_dict
-
+
@web.expose
@web.require_login( "set history's accessible flag" )
def set_accessible_async( self, trans, id=None, accessible=False ):
""" Set history's importable attribute and slug. """
history = self.get_history( trans, id, True )
-
# Only set if importable value would change; this prevents a change in the update_time unless attribute really changed.
importable = accessible in ['True', 'true', 't', 'T'];
if history and history.importable != importable:
@@ -627,7 +617,6 @@
else:
history.importable = importable
trans.sa_session.flush()
-
return
@web.expose
@@ -638,7 +627,7 @@
history.slug = new_slug
trans.sa_session.flush()
return history.slug
-
+
@web.expose
def get_item_content_async( self, trans, id ):
""" Returns item content in HTML format. """
@@ -646,7 +635,7 @@
history = self.get_history( trans, id, False, True )
if history is None:
raise web.httpexceptions.HTTPNotFound()
-
+
# Get datasets.
datasets = self.get_history_datasets( trans, history )
# Get annotations.
@@ -654,7 +643,7 @@
for dataset in datasets:
dataset.annotation = self.get_item_annotation_str( trans.sa_session, history.user, dataset )
return trans.stream_template_mako( "/history/item_content.mako", item = history, item_data = datasets )
-
+
@web.expose
def name_autocomplete_data( self, trans, q=None, limit=None, timestamp=None ):
"""Return autocomplete data for history names"""
@@ -666,7 +655,7 @@
for history in trans.sa_session.query( model.History ).filter_by( user=user ).filter( func.lower( model.History.name ) .like(q.lower() + "%") ):
ac_data = ac_data + history.name + "\n"
return ac_data
-
+
@web.expose
def imp( self, trans, id=None, confirm=False, **kwd ):
"""Import another user's history via a shared URL"""
@@ -682,7 +671,7 @@
referer_message = "<a href='%s'>return to the previous page</a>" % referer
else:
referer_message = "<a href='%s'>go to Galaxy's start page</a>" % url_for( '/' )
-
+
# Do import.
if not id:
return trans.show_error_message( "You must specify a history you want to import.<br>You can %s." % referer_message, use_panels=True )
@@ -712,7 +701,7 @@
# Set imported history to be user's current history.
trans.set_history( new_history )
return trans.show_ok_message(
- message="""History "%s" has been imported. <br>You can <a href="%s">start using this history</a> or %s."""
+ message="""History "%s" has been imported. <br>You can <a href="%s">start using this history</a> or %s."""
% ( new_history.name, web.url_for( '/' ), referer_message ), use_panels=True )
elif not user_history or not user_history.datasets or confirm:
new_history = import_history.copy()
@@ -730,13 +719,13 @@
trans.sa_session.flush()
trans.set_history( new_history )
return trans.show_ok_message(
- message="""History "%s" has been imported. <br>You can <a href="%s">start using this history</a> or %s."""
+ message="""History "%s" has been imported. <br>You can <a href="%s">start using this history</a> or %s."""
% ( new_history.name, web.url_for( '/' ), referer_message ), use_panels=True )
return trans.show_warn_message( """
Warning! If you import this history, you will lose your current
history. <br>You can <a href="%s">continue and import this history</a> or %s.
""" % ( web.url_for( id=id, confirm=True, referer=trans.request.referer ), referer_message ), use_panels=True )
-
+
@web.expose
def view( self, trans, id=None, show_deleted=False ):
"""View a history. If a history is importable, then it is viewable by any user."""
@@ -757,11 +746,11 @@
history = history_to_view,
datasets = datasets,
show_deleted = show_deleted )
-
+
@web.expose
def display_by_username_and_slug( self, trans, username, slug ):
- """ Display history based on a username and slug. """
-
+ """ Display history based on a username and slug. """
+
# Get history.
session = trans.sa_session
user = session.query( model.User ).filter_by( username=username ).first()
@@ -770,14 +759,14 @@
raise web.httpexceptions.HTTPNotFound()
# Security check raises error if user cannot access history.
self.security_check( trans.get_user(), history, False, True)
-
+
# Get datasets.
datasets = self.get_history_datasets( trans, history )
# Get annotations.
history.annotation = self.get_item_annotation_str( trans.sa_session, history.user, history )
for dataset in datasets:
dataset.annotation = self.get_item_annotation_str( trans.sa_session, history.user, dataset )
-
+
# Get rating data.
user_item_rating = 0
if trans.get_user():
@@ -787,9 +776,9 @@
else:
user_item_rating = 0
ave_item_rating, num_ratings = self.get_ave_item_rating_data( trans.sa_session, history )
- return trans.stream_template_mako( "history/display.mako", item = history, item_data = datasets,
+ return trans.stream_template_mako( "history/display.mako", item = history, item_data = datasets,
user_item_rating = user_item_rating, ave_item_rating=ave_item_rating, num_ratings=num_ratings )
-
+
@web.expose
@web.require_login( "share Galaxy histories" )
def sharing( self, trans, id=None, histories=[], **kwargs ):
@@ -804,7 +793,7 @@
histories = [ self.get_history( trans, history_id ) for history_id in ids ]
elif not histories:
histories = [ trans.history ]
-
+
# Do operation on histories.
for history in histories:
if 'make_accessible_via_link' in kwargs:
@@ -837,17 +826,17 @@
message = "History '%s' does not seem to be shared with user '%s'" % ( history.name, user.email )
return trans.fill_template( '/sharing_base.mako', item=history,
message=message, status='error' )
-
-
+
+
# Legacy issue: histories made accessible before recent updates may not have a slug. Create slug for any histories that need them.
for history in histories:
if history.importable and not history.slug:
self._make_item_accessible( trans.sa_session, history )
-
+
session.flush()
-
+
return trans.fill_template( "/sharing_base.mako", item=history )
-
+
@web.expose
@web.require_login( "share histories with other users" )
def share( self, trans, id=None, email="", **kwd ):
@@ -890,11 +879,11 @@
send_to_err = "The histories you are sharing do not contain any datasets that can be accessed by the users with which you are sharing."
return trans.fill_template( "/history/share.mako", histories=histories, email=email, send_to_err=send_to_err )
if can_change or cannot_change:
- return trans.fill_template( "/history/share.mako",
- histories=histories,
- email=email,
- send_to_err=send_to_err,
- can_change=can_change,
+ return trans.fill_template( "/history/share.mako",
+ histories=histories,
+ email=email,
+ send_to_err=send_to_err,
+ can_change=can_change,
cannot_change=cannot_change,
no_change_needed=unique_no_change_needed )
if no_change_needed:
@@ -903,11 +892,11 @@
# User seems to be sharing an empty history
send_to_err = "You cannot share an empty history. "
return trans.fill_template( "/history/share.mako", histories=histories, email=email, send_to_err=send_to_err )
-
+
@web.expose
@web.require_login( "share restricted histories with other users" )
def share_restricted( self, trans, id=None, email="", **kwd ):
- if 'action' in kwd:
+ if 'action' in kwd:
action = kwd[ 'action' ]
else:
err_msg = "Select an action. "
@@ -938,10 +927,10 @@
# The action here is either 'public' or 'private', so we'll continue to populate the
# histories_for_sharing dictionary from the can_change dictionary.
for send_to_user, history_dict in can_change.items():
- for history in history_dict:
+ for history in history_dict:
# Make sure the current history has not already been shared with the current send_to_user
if trans.sa_session.query( trans.app.model.HistoryUserShareAssociation ) \
- .filter( and_( trans.app.model.HistoryUserShareAssociation.table.c.user_id == send_to_user.id,
+ .filter( and_( trans.app.model.HistoryUserShareAssociation.table.c.user_id == send_to_user.id,
trans.app.model.HistoryUserShareAssociation.table.c.history_id == history.id ) ) \
.count() > 0:
send_to_err += "History (%s) already shared with user (%s)" % ( history.name, send_to_user.email )
@@ -954,7 +943,7 @@
# The user with which we are sharing the history does not have access permission on the current dataset
if trans.app.security_agent.can_manage_dataset( user_roles, hda.dataset ) and not hda.dataset.library_associations:
# The current user has authority to change permissions on the current dataset because
- # they have permission to manage permissions on the dataset and the dataset is not associated
+ # they have permission to manage permissions on the dataset and the dataset is not associated
# with a library.
if action == "private":
trans.app.security_agent.privately_share_dataset( hda.dataset, users=[ user, send_to_user ] )
@@ -986,7 +975,7 @@
send_to_user = trans.sa_session.query( trans.app.model.User ) \
.filter( and_( trans.app.model.User.table.c.email==email_address,
trans.app.model.User.table.c.deleted==False ) ) \
- .first()
+ .first()
if send_to_user:
send_to_users.append( send_to_user )
else:
@@ -1004,7 +993,7 @@
for history in history_dict:
# Make sure the current history has not already been shared with the current send_to_user
if trans.sa_session.query( trans.app.model.HistoryUserShareAssociation ) \
- .filter( and_( trans.app.model.HistoryUserShareAssociation.table.c.user_id == send_to_user.id,
+ .filter( and_( trans.app.model.HistoryUserShareAssociation.table.c.user_id == send_to_user.id,
trans.app.model.HistoryUserShareAssociation.table.c.history_id == history.id ) ) \
.count() > 0:
send_to_err += "History (%s) already shared with user (%s)" % ( history.name, send_to_user.email )
@@ -1019,7 +1008,7 @@
# The user may be attempting to share histories whose datasets cannot all be accessed by other users.
# If this is the case, the user sharing the histories can:
# 1) action=='public': choose to make the datasets public if he is permitted to do so
- # 2) action=='private': automatically create a new "sharing role" allowing protected
+ # 2) action=='private': automatically create a new "sharing role" allowing protected
# datasets to be accessed only by the desired users
# This method will populate the can_change, cannot_change and no_change_needed dictionaries, which
# are used for either displaying to the user, letting them make 1 of the choices above, or sharing
@@ -1036,7 +1025,7 @@
for send_to_user in send_to_users:
# Make sure the current history has not already been shared with the current send_to_user
if trans.sa_session.query( trans.app.model.HistoryUserShareAssociation ) \
- .filter( and_( trans.app.model.HistoryUserShareAssociation.table.c.user_id == send_to_user.id,
+ .filter( and_( trans.app.model.HistoryUserShareAssociation.table.c.user_id == send_to_user.id,
trans.app.model.HistoryUserShareAssociation.table.c.history_id == history.id ) ) \
.count() > 0:
send_to_err += "History (%s) already shared with user (%s)" % ( history.name, send_to_user.email )
@@ -1125,7 +1114,7 @@
if send_to_err:
msg += send_to_err
return self.sharing( trans, histories=shared_histories, msg=msg )
-
+
@web.expose
@web.require_login( "rename histories" )
def rename( self, trans, id=None, name=None, **kwd ):
@@ -1164,7 +1153,7 @@
else:
change_msg = change_msg + "<p>History: "+cur_names[i]+" does not appear to belong to you.</p>"
return trans.show_message( "<p>%s" % change_msg, refresh_frames=['history'] )
-
+
@web.expose
@web.require_login( "clone shared Galaxy history" )
def clone( self, trans, id=None, **kwd ):
@@ -1207,13 +1196,11 @@
else:
msg = '%d cloned histories are now included in your previously stored histories.' % len( histories )
return trans.show_ok_message( msg )
-
+
@web.expose
@web.require_login( "switch to a history" )
def switch_to_history( self, trans, hist_id=None ):
decoded_id = trans.security.decode_id(hist_id)
hist = trans.sa_session.query( trans.app.model.History ).get( decoded_id )
-
trans.set_history( hist )
return trans.response.send_redirect( url_for( "/" ) )
-
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 lib/galaxy/web/controllers/mobile.py
--- a/lib/galaxy/web/controllers/mobile.py
+++ b/lib/galaxy/web/controllers/mobile.py
@@ -47,7 +47,7 @@
error = password_error = None
user = trans.sa_session.query( model.User ).filter_by( email = email ).first()
if not user:
- error = "No such user"
+ error = "No such user (please note that login is case sensitive)"
elif user.deleted:
error = "This account has been marked deleted, contact your Galaxy administrator to restore the account."
elif user.external:
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 lib/galaxy/web/controllers/tracks.py
--- a/lib/galaxy/web/controllers/tracks.py
+++ b/lib/galaxy/web/controllers/tracks.py
@@ -469,7 +469,7 @@
return { "status": messages.DATA, "valid_chroms": valid_chroms }
@web.json
- def data( self, trans, hda_ldda, dataset_id, chrom, low, high, max_vals=5000, **kwargs ):
+ def data( self, trans, hda_ldda, dataset_id, chrom, low, high, start_val=0, max_vals=5000, **kwargs ):
"""
Provides a block of data from a dataset.
"""
@@ -526,7 +526,7 @@
data_provider = data_provider_class( converted_dataset=converted_dataset, original_dataset=dataset, dependencies=deps )
# Get and return data from data_provider.
- data = data_provider.get_data( chrom, low, high, max_vals, **kwargs )
+ data = data_provider.get_data( chrom, low, high, int(start_val), int(max_vals), **kwargs )
message = None
if isinstance(data, dict) and 'message' in data:
message = data['message']
@@ -730,8 +730,7 @@
if run_on_region:
for jida in original_job.input_datasets:
input_dataset = jida.dataset
- # TODO: put together more robust way to determine if a dataset can be indexed.
- if hasattr( input_dataset, 'get_track_type' ):
+ if get_data_provider( original_dataset=input_dataset ):
# Can index dataset.
track_type, data_sources = input_dataset.datatype.get_track_type()
# Convert to datasource that provides 'data' because we need to
@@ -744,7 +743,7 @@
# Return any messages generated during conversions.
return_message = _get_highest_priority_msg( messages_list )
if return_message:
- return return_message
+ return to_json_string( return_message )
#
# Set target history (the history that tool will use for inputs/outputs).
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 lib/galaxy/web/controllers/user.py
--- a/lib/galaxy/web/controllers/user.py
+++ b/lib/galaxy/web/controllers/user.py
@@ -394,7 +394,7 @@
success = False
user = trans.sa_session.query( trans.app.model.User ).filter( trans.app.model.User.table.c.email==email ).first()
if not user:
- message = "No such user"
+ message = "No such user (please note that login is case sensitive)"
status = 'error'
elif user.deleted:
message = "This account has been marked deleted, contact your Galaxy administrator to restore the account."
@@ -914,6 +914,9 @@
@web.expose
def set_default_permissions( self, trans, cntrller, **kwd ):
"""Sets the user's default permissions for the new histories"""
+ params = util.Params( kwd )
+ message = util.restore_text( params.get( 'message', '' ) )
+ status = params.get( 'status', 'done' )
if trans.user:
if 'update_roles_button' in kwd:
p = util.Params( kwd )
@@ -926,8 +929,11 @@
action = trans.app.security_agent.get_action( v.action ).action
permissions[ action ] = in_roles
trans.app.security_agent.user_set_default_permissions( trans.user, permissions )
- return trans.show_ok_message( 'Default new history permissions have been changed.' )
- return trans.fill_template( 'user/permissions.mako', cntrller=cntrller )
+ message = 'Default new history permissions have been changed.'
+ return trans.fill_template( 'user/permissions.mako',
+ cntrller=cntrller,
+ message=message,
+ status=status )
else:
# User not logged in, history group must be only public
return trans.show_error_message( "You must be logged in to change your default permitted actions." )
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 lib/galaxy/web/form_builder.py
--- a/lib/galaxy/web/form_builder.py
+++ b/lib/galaxy/web/form_builder.py
@@ -652,6 +652,27 @@
return self.value
else:
return '-'
+
+class LibraryField( BaseField ):
+ def __init__( self, name, value=None, trans=None ):
+ self.name = name
+ self.ldda = value
+ self.trans = trans
+ def get_html( self, prefix="", disabled=False ):
+ if not self.ldda:
+ ldda = ""
+ text = "Choose a library dataset"
+ else:
+ ldda = self.trans.security.encode_id(self.ldda.id)
+ text = self.ldda.name
+ return '<a href="javascript:void(0);" class="add-librarydataset">%s</a> \
+ <input type="hidden" name="%s%s" value="%s">' % ( text, prefix, self.name, escape( str(ldda), quote=True ) )
+
+ def get_display_text(self):
+ if self.ldda:
+ return self.ldda.name
+ else:
+ return 'None'
def get_suite():
"""Get unittest suite for this module"""
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 lib/galaxy/web/framework/helpers/grids.py
--- a/lib/galaxy/web/framework/helpers/grids.py
+++ b/lib/galaxy/web/framework/helpers/grids.py
@@ -631,7 +631,7 @@
if column_filter == "All":
pass
elif column_filter in [ "True", "False" ]:
- query = query.filter( self.model_class.deleted == column_filter )
+ query = query.filter( self.model_class.deleted == ( column_filter == "True" ) )
return query
class StateColumn( GridColumn ):
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 lib/galaxy/web/framework/middleware/remoteuser.py
--- a/lib/galaxy/web/framework/middleware/remoteuser.py
+++ b/lib/galaxy/web/framework/middleware/remoteuser.py
@@ -36,7 +36,6 @@
"""
UCSC_MAIN_SERVERS = (
- 'omics.bhri.internal',
'hgw1.cse.ucsc.edu',
'hgw2.cse.ucsc.edu',
'hgw3.cse.ucsc.edu',
@@ -50,7 +49,6 @@
'lowepub.cse.ucsc.edu',
)
-
class RemoteUser( object ):
def __init__( self, app, maildomain=None, ucsc_display_sites=[], admin_users=[] ):
self.app = app
@@ -58,7 +56,7 @@
self.allow_ucsc_main = False
self.allow_ucsc_archaea = False
self.admin_users = admin_users
- if 'main' in ucsc_display_sites or 'test' in ucsc_display_sites or 'bhri' in ucsc_display_sites:
+ if 'main' in ucsc_display_sites or 'test' in ucsc_display_sites:
self.allow_ucsc_main = True
if 'archaea' in ucsc_display_sites:
self.allow_ucsc_archaea = True
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 lib/galaxy/webapps/community/app.py
--- a/lib/galaxy/webapps/community/app.py
+++ b/lib/galaxy/webapps/community/app.py
@@ -1,4 +1,6 @@
import sys, config
+import galaxy.tools.data
+import galaxy.datatypes.registry
import galaxy.webapps.community.model
from galaxy.web import security
from galaxy.tags.tag_handler import CommunityTagHandler
@@ -11,6 +13,8 @@
self.config = config.Configuration( **kwargs )
self.config.check()
config.configure_logging( self.config )
+ # Set up datatypes registry
+ self.datatypes_registry = galaxy.datatypes.registry.Registry( self.config.root, self.config.datatypes_config )
# Determine the database url
if self.config.database_connection:
db_url = self.config.database_connection
@@ -28,6 +32,8 @@
self.security = security.SecurityHelper( id_secret=self.config.id_secret )
# Tag handler
self.tag_handler = CommunityTagHandler()
+ # Tool data tables
+ self.tool_data_tables = galaxy.tools.data.ToolDataTableManager( self.config.tool_data_table_config_path )
# Load security policy
self.security_agent = self.model.security_agent
def shutdown( self ):
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 lib/galaxy/webapps/community/config.py
--- a/lib/galaxy/webapps/community/config.py
+++ b/lib/galaxy/webapps/community/config.py
@@ -41,6 +41,9 @@
self.cookie_path = kwargs.get( "cookie_path", "/" )
self.test_conf = resolve_path( kwargs.get( "test_conf", "" ), self.root )
self.id_secret = kwargs.get( "id_secret", "USING THE DEFAULT IS NOT SECURE!" )
+ self.tool_secret = kwargs.get( "tool_secret", "" )
+ self.tool_data_path = resolve_path( kwargs.get( "tool_data_path", "tool-data" ), os.getcwd() )
+ self.tool_data_table_config_path = resolve_path( kwargs.get( 'tool_data_table_config_path', 'tool_data_table_conf.xml' ), self.root )
self.use_remote_user = string_as_bool( kwargs.get( "use_remote_user", "False" ) )
self.remote_user_maildomain = kwargs.get( "remote_user_maildomain", None )
self.remote_user_logout_href = kwargs.get( "remote_user_logout_href", None )
@@ -56,16 +59,18 @@
self.smtp_server = kwargs.get( 'smtp_server', None )
self.smtp_username = kwargs.get( 'smtp_username', None )
self.smtp_password = kwargs.get( 'smtp_password', None )
+ self.start_job_runners = kwargs.get( 'start_job_runners', None )
self.email_alerts_from = kwargs.get( 'email_alerts_from', None )
+ self.nginx_upload_path = kwargs.get( 'nginx_upload_path', False )
self.log_actions = string_as_bool( kwargs.get( 'log_actions', 'False' ) )
self.brand = kwargs.get( 'brand', None )
- self.wiki_url = kwargs.get( 'wiki_url', 'http://bitbucket.org/galaxy/galaxy-central/wiki/Home' )
+ self.wiki_url = kwargs.get( 'wiki_url', 'http://wiki.g2.bx.psu.edu/FrontPage' )
self.bugs_email = kwargs.get( 'bugs_email', None )
self.blog_url = kwargs.get( 'blog_url', None )
self.screencasts_url = kwargs.get( 'screencasts_url', None )
self.log_events = False
self.cloud_controller_instance = False
- self.datatypes_config = kwargs.get( 'datatypes_config_file', 'community_datatypes_conf.xml' )
+ self.datatypes_config = kwargs.get( 'datatypes_config_file', 'datatypes_conf.xml' )
# Proxy features
self.apache_xsendfile = kwargs.get( 'apache_xsendfile', False )
self.nginx_x_accel_redirect_base = kwargs.get( 'nginx_x_accel_redirect_base', False )
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 lib/galaxy/webapps/community/controllers/common.py
--- a/lib/galaxy/webapps/community/controllers/common.py
+++ b/lib/galaxy/webapps/community/controllers/common.py
@@ -1,12 +1,13 @@
import os, string, socket, logging
from time import strftime
from datetime import *
+from galaxy.tools import *
from galaxy.util.json import from_json_string, to_json_string
from galaxy.web.base.controller import *
from galaxy.webapps.community import model
from galaxy.model.orm import *
from galaxy.model.item_attrs import UsesItemRatings
-from mercurial import hg, ui
+from mercurial import hg, ui, commands
log = logging.getLogger( __name__ )
@@ -68,20 +69,223 @@
def get_repository( trans, id ):
"""Get a repository from the database via id"""
return trans.sa_session.query( trans.model.Repository ).get( trans.security.decode_id( id ) )
+def get_repository_metadata( trans, id, changeset_revision ):
+ """Get metadata for a specified repository change set from the database"""
+ return trans.sa_session.query( trans.model.RepositoryMetadata ) \
+ .filter( and_( trans.model.RepositoryMetadata.table.c.repository_id == trans.security.decode_id( id ),
+ trans.model.RepositoryMetadata.table.c.changeset_revision == changeset_revision ) ) \
+ .first()
+def set_repository_metadata( trans, id, change_set_revision, **kwd ):
+ """Set repository metadata"""
+ message = ''
+ status = 'done'
+ repository = get_repository( trans, id )
+ repo_dir = repository.repo_path
+ repo = hg.repository( get_configured_ui(), repo_dir )
+ change_set = get_change_set( trans, repo, change_set_revision )
+ invalid_files = []
+ flush_needed = False
+ if change_set is not None:
+ metadata_dict = {}
+ for root, dirs, files in os.walk( repo_dir ):
+ if not root.find( '.hg' ) >= 0 and not root.find( 'hgrc' ) >= 0:
+ if '.hg' in dirs:
+ # Don't visit .hg directories - should be impossible since we don't
+ # allow uploaded archives that contain .hg dirs, but just in case...
+ dirs.remove( '.hg' )
+ if 'hgrc' in files:
+ # Don't include hgrc files in commit.
+ files.remove( 'hgrc' )
+ for name in files:
+ # Find all tool configs.
+ if name.endswith( '.xml' ):
+ try:
+ full_path = os.path.abspath( os.path.join( root, name ) )
+ tool = load_tool( trans, full_path )
+ if tool is not None:
+ tool_requirements = []
+ for tr in tool.requirements:
+ requirement_dict = dict( name=tr.name,
+ type=tr.type,
+ version=tr.version )
+ tool_requirements.append( requirement_dict )
+ tool_tests = []
+ if tool.tests:
+ for ttb in tool.tests:
+ test_dict = dict( name=ttb.name,
+ required_files=ttb.required_files,
+ inputs=ttb.inputs,
+ outputs=ttb.outputs )
+ tool_tests.append( test_dict )
+ tool_dict = dict( id=tool.id,
+ name=tool.name,
+ version=tool.version,
+ description=tool.description,
+ version_string_cmd = tool.version_string_cmd,
+ tool_config=os.path.join( root, name ),
+ requirements=tool_requirements,
+ tests=tool_tests )
+ repository_metadata = get_repository_metadata( trans, id, change_set_revision )
+ if repository_metadata:
+ metadata = repository_metadata.metadata
+ if metadata and 'tools' in metadata:
+ metadata_tools = metadata[ 'tools' ]
+ found = False
+ for tool_metadata_dict in metadata_tools:
+ if 'id' in tool_metadata_dict and tool_metadata_dict[ 'id' ] == tool.id and \
+ 'version' in tool_metadata_dict and tool_metadata_dict[ 'version' ] == tool.version:
+ found = True
+ tool_metadata_dict[ 'name' ] = tool.name
+ tool_metadata_dict[ 'description' ] = tool.description
+ tool_metadata_dict[ 'version_string_cmd' ] = tool.version_string_cmd
+ tool_metadata_dict[ 'tool_config' ] = os.path.join( root, name )
+ tool_metadata_dict[ 'requirements' ] = tool_requirements
+ tool_metadata_dict[ 'tests' ] = tool_tests
+ flush_needed = True
+ if not found:
+ metadata_tools.append( tool_dict )
+ else:
+ if metadata is None:
+ repository_metadata.metadata = {}
+ repository_metadata.metadata[ 'tools' ] = [ tool_dict ]
+ trans.sa_session.add( repository_metadata )
+ if not flush_needed:
+ flush_needed = True
+ else:
+ if 'tools' in metadata_dict:
+ metadata_dict[ 'tools' ].append( tool_dict )
+ else:
+ metadata_dict[ 'tools' ] = [ tool_dict ]
+ except Exception, e:
+ invalid_files.append( ( name, str( e ) ) )
+ # Find all exported workflows
+ elif name.endswith( '.ga' ):
+ try:
+ full_path = os.path.abspath( os.path.join( root, name ) )
+ # Convert workflow data from json
+ fp = open( full_path, 'rb' )
+ workflow_text = fp.read()
+ fp.close()
+ exported_workflow_dict = from_json_string( workflow_text )
+ # We'll store everything except the workflow steps in the database.
+ workflow_dict = { 'a_galaxy_workflow' : exported_workflow_dict[ 'a_galaxy_workflow' ],
+ 'name' :exported_workflow_dict[ 'name' ],
+ 'annotation' : exported_workflow_dict[ 'annotation' ],
+ 'format-version' : exported_workflow_dict[ 'format-version' ] }
+ repository_metadata = get_repository_metadata( trans, id, change_set_revision )
+ if repository_metadata:
+ metadata = repository_metadata.metadata
+ if metadata and 'workflows' in metadata:
+ metadata_workflows = metadata[ 'workflows' ]
+ found = False
+ for workflow_metadata_dict in metadata_workflows:
+ if 'a_galaxy_workflow' in workflow_metadata_dict and util.string_as_bool( workflow_metadata_dict[ 'a_galaxy_workflow' ] ) and \
+ 'name' in workflow_metadata_dict and workflow_metadata_dict[ 'name' ] == exported_workflow_dict[ 'name' ] and \
+ 'annotation' in workflow_metadata_dict and workflow_metadata_dict[ 'annotation' ] == exported_workflow_dict[ 'annotation' ] and \
+ 'format-version' in workflow_metadata_dict and workflow_metadata_dict[ 'format-version' ] == exported_workflow_dict[ 'format-version' ]:
+ found = True
+ break
+ if not found:
+ metadata_workflows.append( workflow_dict )
+ else:
+ if metadata is None:
+ repository_metadata.metadata = {}
+ repository_metadata.metadata[ 'workflows' ] = workflow_dict
+ trans.sa_session.add( repository_metadata )
+ if not flush_needed:
+ flush_needed = True
+ else:
+ if 'workflows' in metadata_dict:
+ metadata_dict[ 'workflows' ].append( workflow_dict )
+ else:
+ metadata_dict[ 'workflows' ] = [ workflow_dict ]
+ except Exception, e:
+ invalid_files.append( ( name, str( e ) ) )
+ if metadata_dict:
+ # The metadata_dict dictionary will contain items only
+ # if the repository did not already have metadata set.
+ repository_metadata = trans.model.RepositoryMetadata( repository.id, repository.tip, metadata_dict )
+ trans.sa_session.add( repository_metadata )
+ if not flush_needed:
+ flush_needed = True
+ else:
+ message = "Repository does not include changeset revision '%s'." % str( change_set_revision )
+ status = 'error'
+ if invalid_files:
+ message = "Metadata cannot be defined for change set revision '%s'. Correct the following problems and reset metadata.<br/>" % str( change_set_revision )
+ for itc_tup in invalid_files:
+ tool_file = itc_tup[0]
+ exception_msg = itc_tup[1]
+ if exception_msg.find( 'No such file or directory' ) >= 0:
+ exception_items = exception_msg.split()
+ missing_file_items = exception_items[7].split( '/' )
+ missing_file = missing_file_items[-1].rstrip( '\'' )
+ correction_msg = "This file refers to a missing file <b>%s</b>. " % str( missing_file )
+ if exception_msg.find( '.loc' ) >= 0:
+ # Handle the special case where a tool depends on a missing xxx.loc file by telliing
+ # the user to upload xxx.loc.sample to the repository so that it can be copied to
+ # ~/tool-data/xxx.loc. In this case, exception_msg will look something like:
+ # [Errno 2] No such file or directory: '/Users/gvk/central/tool-data/blast2go.loc'
+ sample_loc_file = '%s.sample' % str( missing_file )
+ correction_msg += "Upload a file named <b>%s</b> to the repository to correct this error." % sample_loc_file
+ else:
+ correction_msg += "Upload a file named <b>%s</b> to the repository to correct this error." % missing_file
+ elif exception_msg.find( 'Data table named' ) >= 0:
+ # Handle the special case where the tool requires an entry in the tool_data_table.conf file.
+ # In this case, exception_msg will look something like:
+ # Data table named 'tmap_indexes' is required by tool but not configured
+ exception_items = exception_msg.split()
+ name_attr = exception_items[3].lstrip( '\'' ).rstrip( '\'' )
+ message += "<b>%s</b> - This tool requires an entry in the tool_data_table_conf.xml file. " % tool_file
+ message += "Complete and <b>Save</b> the form below to resolve this issue.<br/>"
+ return trans.response.send_redirect( web.url_for( controller='repository',
+ action='add_tool_data_table_entry',
+ name_attr=name_attr,
+ repository_id=id,
+ message=message,
+ status='error' ) )
+ else:
+ correction_msg = exception_msg
+ message += "<b>%s</b> - %s<br/>" % ( tool_file, correction_msg )
+ status = 'error'
+ elif flush_needed:
+ # We only flush if there are no tool config errors, so change sets will only have metadata
+ # if everything in them is valid.
+ trans.sa_session.flush()
+ return message, status
def get_repository_by_name( trans, name ):
"""Get a repository from the database via name"""
return trans.sa_session.query( app.model.Repository ).filter_by( name=name ).one()
-def get_repository_tip( repository ):
- # The received repository must be a mercurial repository, not a db record.
- tip_changeset = repository.changelog.tip()
- tip_ctx = repository.changectx( tip_changeset )
- return "%s:%s" % ( str( tip_ctx.rev() ), tip_ctx.parents()[0] )
+def get_change_set( trans, repo, change_set_revision, **kwd ):
+ """Retrieve a specified change set from a repository"""
+ for changeset in repo.changelog:
+ ctx = repo.changectx( changeset )
+ if str( ctx ) == change_set_revision:
+ return ctx
+ return None
+def copy_sample_loc_file( trans, filename ):
+ """Copy xxx.loc.sample to ~/tool-data/xxx.loc"""
+ sample_loc_file = os.path.split( filename )[1]
+ loc_file = os.path.split( filename )[1].rstrip( '.sample' )
+ tool_data_path = os.path.abspath( trans.app.config.tool_data_path )
+ if not ( os.path.exists( os.path.join( tool_data_path, loc_file ) ) or os.path.exists( os.path.join( tool_data_path, sample_loc_file ) ) ):
+ shutil.copy( os.path.abspath( filename ), os.path.join( tool_data_path, sample_loc_file ) )
+ shutil.copy( os.path.abspath( filename ), os.path.join( tool_data_path, loc_file ) )
+def get_configured_ui():
+ # Configure any desired ui settings.
+ _ui = ui.ui()
+ # The following will suppress all messages. This is
+ # the same as adding the following setting to the repo
+ # hgrc file' [ui] section:
+ # quiet = True
+ _ui.setconfig( 'ui', 'quiet', True )
+ return _ui
def get_user( trans, id ):
"""Get a user from the database"""
return trans.sa_session.query( trans.model.User ).get( trans.security.decode_id( id ) )
def handle_email_alerts( trans, repository ):
repo_dir = repository.repo_path
- repo = hg.repository( ui.ui(), repo_dir )
+ repo = hg.repository( get_configured_ui(), repo_dir )
smtp_server = trans.app.config.smtp_server
if smtp_server and repository.email_alerts:
# Send email alert to users that want them.
@@ -118,22 +322,77 @@
util.send_mail( frm, to, subject, body, trans.app.config )
except Exception, e:
log.exception( "An error occurred sending a tool shed repository update alert by email." )
-def update_for_browsing( repository, current_working_dir ):
- # Make a copy of a repository's files for browsing.
+def update_for_browsing( trans, repository, current_working_dir, commit_message='' ):
+ # Make a copy of a repository's files for browsing, remove from disk all files that
+ # are not tracked, and commit all added, modified or removed files that have not yet
+ # been committed.
repo_dir = repository.repo_path
+ repo = hg.repository( get_configured_ui(), repo_dir )
+ # The following will delete the disk copy of only the files in the repository.
+ #os.system( 'hg update -r null > /dev/null 2>&1' )
+ repo.ui.pushbuffer()
+ commands.status( repo.ui, repo, all=True )
+ status_and_file_names = repo.ui.popbuffer().strip().split( "\n" )
+ # status_and_file_names looks something like:
+ # ['? README', '? tmap_tool/tmap-0.0.9.tar.gz', '? dna_filtering.py', 'C filtering.py', 'C filtering.xml']
+ # The codes used to show the status of files are:
+ # M = modified
+ # A = added
+ # R = removed
+ # C = clean
+ # ! = deleted, but still tracked
+ # ? = not tracked
+ # I = ignored
+ files_to_remove_from_disk = []
+ files_to_commit = []
+ for status_and_file_name in status_and_file_names:
+ if status_and_file_name.startswith( '?' ) or status_and_file_name.startswith( 'I' ):
+ files_to_remove_from_disk.append( os.path.abspath( os.path.join( repo_dir, status_and_file_name.split()[1] ) ) )
+ elif status_and_file_name.startswith( 'M' ) or status_and_file_name.startswith( 'A' ) or status_and_file_name.startswith( 'R' ):
+ files_to_commit.append( os.path.abspath( os.path.join( repo_dir, status_and_file_name.split()[1] ) ) )
+ for full_path in files_to_remove_from_disk:
+ # We'll remove all files that are not tracked or ignored.
+ if os.path.isdir( full_path ):
+ try:
+ os.rmdir( full_path )
+ except OSError, e:
+ # The directory is not empty
+ pass
+ elif os.path.isfile( full_path ):
+ os.remove( full_path )
+ dir = os.path.split( full_path )[0]
+ try:
+ os.rmdir( dir )
+ except OSError, e:
+ # The directory is not empty
+ pass
+ if files_to_commit:
+ if not commit_message:
+ commit_message = 'Committed changes to: %s' % ', '.join( files_to_commit )
+ repo.dirstate.write()
+ repo.commit( user=trans.user.username, text=commit_message )
os.chdir( repo_dir )
os.system( 'hg update > /dev/null 2>&1' )
os.chdir( current_working_dir )
+def load_tool( trans, config_file ):
"""
- # TODO: the following is useful if the repository files somehow include missing or
- # untracked files. If this happens, we can enhance the following to clean things up.
- # We're not currently doing any cleanup though since so far none of the repositories
- # have problematic files for browsing.
- # Get the tip change set.
- repo = hg.repository( ui.ui(), repo_dir )
- for changeset in repo.changelog:
- ctx = repo.changectx( changeset )
- ctx_parent = ctx.parents()[0]
- break
- modified, added, removed, deleted, unknown, ignored, clean = repo.status( node1=ctx_parent.node(), node2=ctx.node() )
+ Load a single tool from the file named by `config_file` and return
+ an instance of `Tool`.
"""
+ # Parse XML configuration file and get the root element
+ tree = util.parse_xml( config_file )
+ root = tree.getroot()
+ if root.tag == 'tool':
+ # Allow specifying a different tool subclass to instantiate
+ if root.find( "type" ) is not None:
+ type_elem = root.find( "type" )
+ module = type_elem.get( 'module', 'galaxy.tools' )
+ cls = type_elem.get( 'class' )
+ mod = __import__( module, globals(), locals(), [cls])
+ ToolClass = getattr( mod, cls )
+ elif root.get( 'tool_type', None ) is not None:
+ ToolClass = tool_types.get( root.get( 'tool_type' ) )
+ else:
+ ToolClass = Tool
+ return ToolClass( config_file, root, trans.app )
+ return None
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 lib/galaxy/webapps/community/controllers/repository.py
--- a/lib/galaxy/webapps/community/controllers/repository.py
+++ b/lib/galaxy/webapps/community/controllers/repository.py
@@ -74,10 +74,9 @@
class NameColumn( grids.TextColumn ):
def get_value( self, trans, grid, repository ):
return repository.name
- class VersionColumn( grids.TextColumn ):
+ class RevisionColumn( grids.TextColumn ):
def get_value( self, trans, grid, repository ):
- repo = hg.repository( ui.ui(), repository.repo_path )
- return get_repository_tip( repo )
+ return repository.revision
class DescriptionColumn( grids.TextColumn ):
def get_value( self, trans, grid, repository ):
return repository.description
@@ -124,11 +123,11 @@
key="name",
link=( lambda item: dict( operation="view_or_manage_repository", id=item.id, webapp="community" ) ),
attach_popup=False ),
- DescriptionColumn( "Description",
+ DescriptionColumn( "Synopsis",
key="description",
attach_popup=False ),
- VersionColumn( "Version",
- attach_popup=False ),
+ RevisionColumn( "Revision",
+ attach_popup=False ),
CategoryColumn( "Category",
model_class=model.Category,
key="Category.name",
@@ -215,7 +214,8 @@
if operation == "view_or_manage_repository":
repository_id = kwd.get( 'id', None )
repository = get_repository( trans, repository_id )
- if repository.user == trans.user:
+ is_admin = trans.user_is_admin()
+ if is_admin or repository.user == trans.user:
return trans.response.send_redirect( web.url_for( controller='repository',
action='manage_repository',
**kwd ) )
@@ -312,7 +312,7 @@
if not os.path.exists( repository_path ):
os.makedirs( repository_path )
# Create the local repository
- repo = hg.repository( ui.ui(), repository_path, create=True )
+ repo = hg.repository( get_configured_ui(), repository_path, create=True )
# Add an entry in the hgweb.config file for the local repository
# This enables calls to repository.repo_path
self.__add_hgweb_config_entry( trans, repository, repository_path )
@@ -356,11 +356,19 @@
if not( VALID_REPOSITORYNAME_RE.match( name ) ):
return "Repository names must contain only lower-case letters, numbers and underscore '_'."
return ''
+ def __make_hgweb_config_copy( self, trans, hgweb_config ):
+ # Make a backup of the hgweb.config file
+ today = date.today()
+ backup_date = today.strftime( "%Y_%m_%d" )
+ hgweb_config_copy = '%s/hgweb.config_%s_backup' % ( trans.app.config.root, backup_date )
+ shutil.copy( os.path.abspath( hgweb_config ), os.path.abspath( hgweb_config_copy ) )
def __add_hgweb_config_entry( self, trans, repository, repository_path ):
# Add an entry in the hgweb.config file for a new repository.
# An entry looks something like:
# repos/test/mira_assembler = database/community_files/000/repo_123.
hgweb_config = "%s/hgweb.config" % trans.app.config.root
+ # Make a backup of the hgweb.config file since we're going to be changing it.
+ self.__make_hgweb_config_copy( trans, hgweb_config )
entry = "repos/%s/%s = %s" % ( repository.user.username, repository.name, repository_path.lstrip( './' ) )
if os.path.exists( hgweb_config ):
output = open( hgweb_config, 'a' )
@@ -369,6 +377,25 @@
output.write( '[paths]\n' )
output.write( "%s\n" % entry )
output.close()
+ def __change_hgweb_config_entry( self, trans, repository, old_repository_name, new_repository_name ):
+ # Change an entry in the hgweb.config file for a repository. This only happens when
+ # the owner changes the name of the repository. An entry looks something like:
+ # repos/test/mira_assembler = database/community_files/000/repo_123.
+ hgweb_config = "%s/hgweb.config" % trans.app.config.root
+ # Make a backup of the hgweb.config file since we're going to be changing it.
+ self.__make_hgweb_config_copy( trans, hgweb_config )
+ repo_dir = repository.repo_path
+ old_lhs = "repos/%s/%s" % ( repository.user.username, old_repository_name )
+ old_entry = "%s = %s" % ( old_lhs, repo_dir )
+ new_entry = "repos/%s/%s = %s\n" % ( repository.user.username, new_repository_name, repo_dir )
+ tmp_fd, tmp_fname = tempfile.mkstemp()
+ new_hgweb_config = open( tmp_fname, 'wb' )
+ for i, line in enumerate( open( hgweb_config ) ):
+ if line.startswith( old_lhs ):
+ new_hgweb_config.write( new_entry )
+ else:
+ new_hgweb_config.write( line )
+ shutil.move( tmp_fname, os.path.abspath( hgweb_config ) )
def __create_hgrc_file( self, repository ):
# At this point, an entry for the repository is required to be in the hgweb.config
# file so we can call repository.repo_path.
@@ -379,7 +406,7 @@
# push_ssl = False
# Since we support both http and https, we set push_ssl to False to override
# the default (which is True) in the mercurial api.
- repo = hg.repository( ui.ui(), path=repository.repo_path )
+ repo = hg.repository( get_configured_ui(), path=repository.repo_path )
fp = repo.opener( 'hgrc', 'wb' )
fp.write( '[paths]\n' )
fp.write( 'default = .\n' )
@@ -396,9 +423,10 @@
status = params.get( 'status', 'done' )
commit_message = util.restore_text( params.get( 'commit_message', 'Deleted selected files' ) )
repository = get_repository( trans, id )
- repo = hg.repository( ui.ui(), repository.repo_path )
+ repo = hg.repository( get_configured_ui(), repository.repo_path )
current_working_dir = os.getcwd()
- update_for_browsing( repository, current_working_dir )
+ # Update repository files for browsing.
+ update_for_browsing( trans, repository, current_working_dir, commit_message=commit_message )
return trans.fill_template( '/webapps/community/repository/browse_repository.mako',
repo=repo,
repository=repository,
@@ -413,31 +441,46 @@
commit_message = util.restore_text( params.get( 'commit_message', 'Deleted selected files' ) )
repository = get_repository( trans, id )
repo_dir = repository.repo_path
- repo = hg.repository( ui.ui(), repo_dir )
+ repo = hg.repository( get_configured_ui(), repo_dir )
selected_files_to_delete = util.restore_text( params.get( 'selected_files_to_delete', '' ) )
if params.get( 'select_files_to_delete_button', False ):
if selected_files_to_delete:
selected_files_to_delete = selected_files_to_delete.split( ',' )
current_working_dir = os.getcwd()
# Get the current repository tip.
- tip = repo[ 'tip' ]
+ tip = repository.tip
for selected_file in selected_files_to_delete:
repo_file = os.path.abspath( selected_file )
- commands.remove( repo.ui, repo, repo_file )
+ commands.remove( repo.ui, repo, repo_file, force=True )
# Commit the change set.
if not commit_message:
commit_message = 'Deleted selected files'
- # Commit the changes.
- commands.commit( repo.ui, repo, repo_dir, user=trans.user.username, message=commit_message )
+ try:
+ commands.commit( repo.ui, repo, repo_dir, user=trans.user.username, message=commit_message )
+ except Exception, e:
+ # I never have a problem with commands.commit on a Mac, but in the test/production
+ # tool shed environment, it occasionally throws a "TypeError: array item must be char"
+ # exception. If this happens, we'll try the following.
+ repo.dirstate.write()
+ repo.commit( user=trans.user.username, text=commit_message )
handle_email_alerts( trans, repository )
# Update the repository files for browsing.
- update_for_browsing( repository, current_working_dir )
+ update_for_browsing( trans, repository, current_working_dir, commit_message=commit_message )
# Get the new repository tip.
- repo = hg.repository( ui.ui(), repo_dir )
- if tip != repo[ 'tip' ]:
+ repo = hg.repository( get_configured_ui(), repo_dir )
+ if tip != repository.tip:
message = "The selected files were deleted from the repository."
else:
message = 'No changes to repository.'
+ # Set metadata on the repository tip
+ error_message, status = set_repository_metadata( trans, id, repository.tip, **kwd )
+ if error_message:
+ message = '%s<br/>%s' % ( message, error_message )
+ return trans.response.send_redirect( web.url_for( controller='repository',
+ action='manage_repository',
+ id=id,
+ message=message,
+ status=status ) )
else:
message = "Select at least 1 file to delete from the repository before clicking <b>Delete selected files</b>."
status = "error"
@@ -453,8 +496,7 @@
message = util.restore_text( params.get( 'message', '' ) )
status = params.get( 'status', 'done' )
repository = get_repository( trans, id )
- repo = hg.repository( ui.ui(), repository.repo_path )
- tip = get_repository_tip( repo )
+ repo = hg.repository( get_configured_ui(), repository.repo_path )
avg_rating, num_ratings = self.get_ave_item_rating_data( trans.sa_session, repository, webapp_model=trans.model )
display_reviews = util.string_as_bool( params.get( 'display_reviews', False ) )
alerts = params.get( 'alerts', '' )
@@ -481,10 +523,15 @@
trans.sa_session.flush()
checked = alerts_checked or ( user and user.email in email_alerts )
alerts_check_box = CheckboxField( 'alerts', checked=checked )
+ repository_metadata = get_repository_metadata( trans, id, repository.tip )
+ if repository_metadata:
+ metadata = repository_metadata.metadata
+ else:
+ metadata = None
return trans.fill_template( '/webapps/community/repository/view_repository.mako',
repo=repo,
repository=repository,
- tip=tip,
+ metadata=metadata,
avg_rating=avg_rating,
display_reviews=display_reviews,
num_ratings=num_ratings,
@@ -498,8 +545,8 @@
message = util.restore_text( params.get( 'message', '' ) )
status = params.get( 'status', 'done' )
repository = get_repository( trans, id )
- repo = hg.repository( ui.ui(), repository.repo_path )
- tip = get_repository_tip( repo )
+ repo_dir = repository.repo_path
+ repo = hg.repository( get_configured_ui(), repo_dir )
repo_name = util.restore_text( params.get( 'repo_name', repository.name ) )
description = util.restore_text( params.get( 'description', repository.description ) )
long_description = util.restore_text( params.get( 'long_description', repository.long_description ) )
@@ -507,6 +554,7 @@
display_reviews = util.string_as_bool( params.get( 'display_reviews', False ) )
alerts = params.get( 'alerts', '' )
alerts_checked = CheckboxField.is_checked( alerts )
+ category_ids = util.listify( params.get( 'category_id', '' ) )
if repository.email_alerts:
email_alerts = from_json_string( repository.email_alerts )
else:
@@ -516,6 +564,7 @@
user = trans.user
if params.get( 'edit_repository_button', False ):
flush_needed = False
+ # TODO: add a can_manage in the security agent.
if user != repository.user:
message = "You are not the owner of this repository, so you cannot manage it."
status = error
@@ -529,6 +578,7 @@
if message:
error = True
else:
+ self.__change_hgweb_config_entry( trans, repository, repository.name, repo_name )
repository.name = repo_name
flush_needed = True
if description != repository.description:
@@ -540,6 +590,21 @@
if flush_needed:
trans.sa_session.add( repository )
trans.sa_session.flush()
+ message = "The repository information has been updated."
+ elif params.get( 'manage_categories_button', False ):
+ flush_needed = False
+ # Delete all currently existing categories.
+ for rca in repository.categories:
+ trans.sa_session.delete( rca )
+ trans.sa_session.flush()
+ if category_ids:
+ # Create category associations
+ for category_id in category_ids:
+ category = trans.app.model.Category.get( trans.security.decode_id( category_id ) )
+ rca = trans.app.model.RepositoryCategoryAssociation( repository, category )
+ trans.sa_session.add( rca )
+ trans.sa_session.flush()
+ message = "The repository information has been updated."
elif params.get( 'user_access_button', False ):
if allow_push not in [ 'none' ]:
remove_auth = params.get( 'remove_auth', '' )
@@ -553,6 +618,7 @@
usernames.append( user.username )
usernames = ','.join( usernames )
repository.set_allow_push( usernames, remove_auth=remove_auth )
+ message = "The repository information has been updated."
elif params.get( 'receive_email_alerts_button', False ):
flush_needed = False
if alerts_checked:
@@ -568,6 +634,7 @@
if flush_needed:
trans.sa_session.add( repository )
trans.sa_session.flush()
+ message = "The repository information has been updated."
if error:
status = 'error'
if repository.allow_push:
@@ -577,6 +644,13 @@
allow_push_select_field = self.__build_allow_push_select_field( trans, current_allow_push_list )
checked = alerts_checked or user.email in email_alerts
alerts_check_box = CheckboxField( 'alerts', checked=checked )
+ repository_metadata = get_repository_metadata( trans, id, repository.tip )
+ if repository_metadata:
+ metadata = repository_metadata.metadata
+ else:
+ metadata = None
+ categories = get_categories( trans )
+ selected_categories = [ rca.category_id for rca in repository.categories ]
return trans.fill_template( '/webapps/community/repository/manage_repository.mako',
repo_name=repo_name,
description=description,
@@ -585,7 +659,9 @@
allow_push_select_field=allow_push_select_field,
repo=repo,
repository=repository,
- tip=tip,
+ selected_categories=selected_categories,
+ categories=categories,
+ metadata=metadata,
avg_rating=avg_rating,
display_reviews=display_reviews,
num_ratings=num_ratings,
@@ -598,7 +674,7 @@
message = util.restore_text( params.get( 'message', '' ) )
status = params.get( 'status', 'done' )
repository = get_repository( trans, id )
- repo = hg.repository( ui.ui(), repository.repo_path )
+ repo = hg.repository( get_configured_ui(), repository.repo_path )
changesets = []
for changeset in repo.changelog:
ctx = repo.changectx( changeset )
@@ -626,14 +702,9 @@
message = util.restore_text( params.get( 'message', '' ) )
status = params.get( 'status', 'done' )
repository = get_repository( trans, id )
- repo = hg.repository( ui.ui(), repository.repo_path )
- found = False
- for changeset in repo.changelog:
- ctx = repo.changectx( changeset )
- if str( ctx ) == ctx_str:
- found = True
- break
- if not found:
+ repo = hg.repository( get_configured_ui(), repository.repo_path )
+ ctx = get_change_set( trans, repo, ctx_str )
+ if ctx is None:
message = "Repository does not include changeset revision '%s'." % str( ctx_str )
status = 'error'
return trans.response.send_redirect( web.url_for( controller='repository',
@@ -675,8 +746,7 @@
message='Select a repository to rate',
status='error' ) )
repository = get_repository( trans, id )
- repo = hg.repository( ui.ui(), repository.repo_path )
- tip = get_repository_tip( repo )
+ repo = hg.repository( get_configured_ui(), repository.repo_path )
if repository.user == trans.user:
return trans.response.send_redirect( web.url_for( controller='repository',
action='browse_repositories',
@@ -691,7 +761,6 @@
rra = self.get_user_item_rating( trans.sa_session, trans.user, repository, webapp_model=trans.model )
return trans.fill_template( '/webapps/community/repository/rate_repository.mako',
repository=repository,
- tip=tip,
avg_rating=avg_rating,
display_reviews=display_reviews,
num_ratings=num_ratings,
@@ -736,6 +805,134 @@
action='browse_repositories',
**kwd ) )
@web.expose
+ @web.require_login( "set repository metadata" )
+ def set_metadata( self, trans, id, ctx_str, **kwd ):
+ message, status = set_repository_metadata( trans, id, ctx_str, **kwd )
+ if not message:
+ message = "Metadata for change set revision '%s' has been reset." % str( ctx_str )
+ return trans.response.send_redirect( web.url_for( controller='repository',
+ action='manage_repository',
+ id=id,
+ message=message,
+ status=status ) )
+ @web.expose
+ def add_tool_data_table_entry( self, trans, name_attr, repository_id, **kwd ):
+ params = util.Params( kwd )
+ message = util.restore_text( params.get( 'message', '' ) )
+ status = params.get( 'status', 'done' )
+ comment_char = util.restore_text( params.get( 'comment_char', '#' ) )
+ loc_filename = util.restore_text( params.get( 'loc_filename', '' ) )
+ repository = get_repository( trans, repository_id )
+ repo = hg.repository( get_configured_ui(), repository.repo_path )
+ column_fields = self.__get_column_fields( **kwd )
+ if params.get( 'add_field_button', False ):
+ # Add a field
+ field_index = len( column_fields ) + 1
+ field_tup = ( '%i_field_name' % field_index, '' )
+ column_fields.append( field_tup )
+ elif params.get( 'remove_button', False ):
+ # Delete a field - find the index of the field to be removed from the remove button label
+ index = int( kwd[ 'remove_button' ].split( ' ' )[2] ) - 1
+ tup_to_remove = column_fields[ index ]
+ column_fields.remove( tup_to_remove )
+ # Re-number field tups
+ new_column_fields = []
+ for field_index, old_field_tup in enumerate( column_fields ):
+ name = '%i_field_name' % ( field_index + 1 )
+ value = old_field_tup[1]
+ new_column_fields.append( ( name, value ) )
+ column_fields = new_column_fields
+ elif params.get( 'add_tool_data_table_entry_button', False ):
+ # Add an entry to the end of the tool_data_table_conf.xml file
+ tdt_config = "%s/tool_data_table_conf.xml" % trans.app.config.root
+ if os.path.exists( tdt_config ):
+ # Make a backup of the file since we're going to be changing it.
+ today = date.today()
+ backup_date = today.strftime( "%Y_%m_%d" )
+ tdt_config_copy = '%s/tool_data_table_conf.xml_%s_backup' % ( trans.app.config.root, backup_date )
+ shutil.copy( os.path.abspath( tdt_config ), os.path.abspath( tdt_config_copy ) )
+ # Generate the string of column names
+ column_names = ', '.join( [ column_tup[1] for column_tup in column_fields ] )
+ # Write each line of the tool_data_table_conf.xml file, except the last line to a temp file.
+ fh = tempfile.NamedTemporaryFile( 'wb' )
+ tmp_filename = fh.name
+ fh.close()
+ new_tdt_config = open( tmp_filename, 'wb' )
+ for i, line in enumerate( open( tdt_config, 'rb' ) ):
+ if line.startswith( '</tables>' ):
+ break
+ new_tdt_config.write( line )
+ new_tdt_config.write( ' <!-- Location of %s files -->\n' % name_attr )
+ new_tdt_config.write( ' <table name="%s" comment_char="%s">\n' % ( name_attr, comment_char ) )
+ new_tdt_config.write( ' <columns>%s</columns>\n' % column_names )
+ new_tdt_config.write( ' <file path="tool-data/%s" />\n' % loc_filename )
+ new_tdt_config.write( ' </table>\n' )
+ # Now write the last line of the file
+ new_tdt_config.write( '</tables>\n' )
+ new_tdt_config.close()
+ shutil.move( tmp_filename, os.path.abspath( tdt_config ) )
+ # Reload the tool_data_table_conf entries
+ trans.app.tool_data_tables = galaxy.tools.data.ToolDataTableManager( trans.app.config.tool_data_table_config_path )
+ message = "The new entry has been added to the tool_data_table_conf.xml file, so click the <b>Reset metadata</b> button below."
+ # TODO: what if ~/tool-data/<loc_filename> doesn't exist? We need to figure out how to
+ # force the user to upload it's sample to the repository in order to generate metadata.
+ return trans.response.send_redirect( web.url_for( controller='repository',
+ action='manage_repository',
+ id=repository_id,
+ message=message,
+ status=status ) )
+ return trans.fill_template( '/webapps/community/repository/add_tool_data_table_entry.mako',
+ name_attr=name_attr,
+ repository=repository,
+ comment_char=comment_char,
+ loc_filename=loc_filename,
+ column_fields=column_fields,
+ message=message,
+ status=status )
+ def __get_column_fields( self, **kwd ):
+ '''
+ Return a dictionary of the user-entered form fields representing columns
+ in the location file.
+ '''
+ params = util.Params( kwd )
+ column_fields = []
+ index = 0
+ while True:
+ name = '%i_field_name' % ( index + 1 )
+ if kwd.has_key( name ):
+ value = util.restore_text( params.get( name, '' ) )
+ field_tup = ( name, value )
+ index += 1
+ column_fields.append( field_tup )
+ else:
+ break
+ return column_fields
+ @web.expose
+ def display_tool( self, trans, repository_id, tool_config, **kwd ):
+ params = util.Params( kwd )
+ message = util.restore_text( params.get( 'message', '' ) )
+ status = params.get( 'status', 'done' )
+ repository = get_repository( trans, repository_id )
+ tool = load_tool( trans, os.path.abspath( tool_config ) )
+ tool_state = self.__new_state( trans )
+ return trans.fill_template( "/webapps/community/repository/tool_form.mako",
+ repository=repository,
+ tool=tool,
+ tool_state=tool_state,
+ message=message,
+ status=status )
+ def __new_state( self, trans, all_pages=False ):
+ """
+ Create a new `DefaultToolState` for this tool. It will not be initialized
+ with default values for inputs.
+
+ Only inputs on the first page will be initialized unless `all_pages` is
+ True, in which case all inputs regardless of page are initialized.
+ """
+ state = DefaultToolState()
+ state.inputs = {}
+ return state
+ @web.expose
def download( self, trans, repository_id, file_type, **kwd ):
# Download an archive of the repository files compressed as zip, gz or bz2.
params = util.Params( kwd )
@@ -780,7 +977,6 @@
folder_contents.append( node )
return folder_contents
def __get_files( self, trans, repository, folder_path ):
- ok = True
def print_ticks( d ):
pass
cmd = "ls -p '%s'" % folder_path
@@ -789,17 +985,22 @@
events={ pexpect.TIMEOUT : print_ticks },
timeout=10 )
if 'No such file or directory' in output:
- status = 'error'
- message = "No folder named (%s) exists." % folder_path
- ok = False
- if ok:
- return output.split()
- return trans.response.send_redirect( web.url_for( controller='repository',
- action='browse_repositories',
- operation="view_or_manage_repository",
- id=trans.security.encode_id( repository.id ),
- status=status,
- message=message ) )
+ if 'root' in output:
+ # The repository is empty
+ return []
+ else:
+ # Some strange error occurred, the selected file was displayed, but
+ # does not exist in the sub-directory from which it was displayed.
+ # This should not happen...
+ status = 'error'
+ message = "No folder named (%s) exists." % folder_path
+ return trans.response.send_redirect( web.url_for( controller='repository',
+ action='browse_repositories',
+ operation="view_or_manage_repository",
+ id=trans.security.encode_id( repository.id ),
+ status=status,
+ message=message ) )
+ return output.split()
@web.json
def get_file_contents( self, trans, file_path ):
# Avoid caching
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 lib/galaxy/webapps/community/controllers/upload.py
--- a/lib/galaxy/webapps/community/controllers/upload.py
+++ b/lib/galaxy/webapps/community/controllers/upload.py
@@ -27,13 +27,13 @@
repository_id = params.get( 'repository_id', '' )
repository = get_repository( trans, repository_id )
repo_dir = repository.repo_path
- repo = hg.repository( ui.ui(), repo_dir )
+ repo = hg.repository( get_configured_ui(), repo_dir )
uncompress_file = util.string_as_bool( params.get( 'uncompress_file', 'true' ) )
remove_repo_files_not_in_tar = util.string_as_bool( params.get( 'remove_repo_files_not_in_tar', 'true' ) )
uploaded_file = None
upload_point = self.__get_upload_point( repository, **kwd )
# Get the current repository tip.
- tip = repo[ 'tip' ]
+ tip = repository.tip
if params.get( 'upload_button', False ):
current_working_dir = os.getcwd()
file_data = params.get( 'file_data', '' )
@@ -45,6 +45,7 @@
uploaded_file = file_data.file
uploaded_file_name = uploaded_file.name
uploaded_file_filename = file_data.filename
+ isempty = os.path.getsize( os.path.abspath( uploaded_file_name ) ) == 0
if uploaded_file:
isgzip = False
isbz2 = False
@@ -53,17 +54,21 @@
if not isgzip:
isbz2 = is_bz2( uploaded_file_name )
ok = True
- # Determine what we have - a single file or an archive
- try:
- if ( isgzip or isbz2 ) and uncompress_file:
- # Open for reading with transparent compression.
- tar = tarfile.open( uploaded_file_name, 'r:*' )
- else:
- tar = tarfile.open( uploaded_file_name )
- istar = True
- except tarfile.ReadError, e:
+ if isempty:
tar = None
istar = False
+ else:
+ # Determine what we have - a single file or an archive
+ try:
+ if ( isgzip or isbz2 ) and uncompress_file:
+ # Open for reading with transparent compression.
+ tar = tarfile.open( uploaded_file_name, 'r:*' )
+ else:
+ tar = tarfile.open( uploaded_file_name )
+ istar = True
+ except tarfile.ReadError, e:
+ tar = None
+ istar = False
if istar:
ok, message, files_to_remove = self.upload_tar( trans,
repository,
@@ -82,15 +87,24 @@
# Move the uploaded file to the load_point within the repository hierarchy.
shutil.move( uploaded_file_name, full_path )
commands.add( repo.ui, repo, full_path )
- commands.commit( repo.ui, repo, full_path, user=trans.user.username, message=commit_message )
+ try:
+ commands.commit( repo.ui, repo, full_path, user=trans.user.username, message=commit_message )
+ except Exception, e:
+ # I never have a problem with commands.commit on a Mac, but in the test/production
+ # tool shed environment, it occasionally throws a "TypeError: array item must be char"
+ # exception. If this happens, we'll try the following.
+ repo.dirstate.write()
+ repo.commit( user=trans.user.username, text=commit_message )
+ if full_path.endswith( '.loc.sample' ):
+ # Handle the special case where a xxx.loc.sample file is
+ # being uploaded by copying it to ~/tool-data/xxx.loc.
+ copy_sample_loc_file( trans, full_path )
handle_email_alerts( trans, repository )
if ok:
- # Update the repository files for browsing, a by-product of doing this
- # is eliminating unwanted files from the repository directory.
- update_for_browsing( repository, current_working_dir )
+ # Update the repository files for browsing.
+ update_for_browsing( trans, repository, current_working_dir, commit_message=commit_message )
# Get the new repository tip.
- repo = hg.repository( ui.ui(), repo_dir )
- if tip != repo[ 'tip' ]:
+ if tip != repository.tip:
if ( isgzip or isbz2 ) and uncompress_file:
uncompress_str = ' uncompressed and '
else:
@@ -102,12 +116,22 @@
else:
message += " %d files were removed from the repository root." % len( files_to_remove )
else:
- message = 'No changes to repository.'
+ message = 'No changes to repository.'
+ # Set metadata on the repository tip
+ error_message, status = set_repository_metadata( trans, repository_id, repository.tip, **kwd )
+ if error_message:
+ message = '%s<br/>%s' % ( message, error_message )
+ return trans.response.send_redirect( web.url_for( controller='repository',
+ action='manage_repository',
+ id=repository_id,
+ message=message,
+ status=status ) )
trans.response.send_redirect( web.url_for( controller='repository',
action='browse_repository',
+ id=repository_id,
commit_message='Deleted selected files',
message=message,
- id=trans.security.encode_id( repository.id ) ) )
+ status=status ) )
else:
status = 'error'
selected_categories = [ trans.security.decode_id( id ) for id in category_ids ]
@@ -121,7 +145,7 @@
def upload_tar( self, trans, repository, tar, uploaded_file, upload_point, remove_repo_files_not_in_tar, commit_message ):
# Upload a tar archive of files.
repo_dir = repository.repo_path
- repo = hg.repository( ui.ui(), repo_dir )
+ repo = hg.repository( get_configured_ui(), repo_dir )
files_to_remove = []
ok, message = self.__check_archive( tar )
if not ok:
@@ -158,11 +182,21 @@
for repo_file in files_to_remove:
# Remove files in the repository (relative to the upload point)
# that are not in the uploaded archive.
- commands.remove( repo.ui, repo, repo_file )
+ commands.remove( repo.ui, repo, repo_file, force=True )
for filename_in_archive in filenames_in_archive:
commands.add( repo.ui, repo, filename_in_archive )
- # Commit the changes.
- commands.commit( repo.ui, repo, full_path, user=trans.user.username, message=commit_message )
+ if filename_in_archive.endswith( '.loc.sample' ):
+ # Handle the special case where a xxx.loc.sample file is
+ # being uploaded by copying it to ~/tool-data/xxx.loc.
+ copy_sample_loc_file( trans, filename_in_archive )
+ try:
+ commands.commit( repo.ui, repo, full_path, user=trans.user.username, message=commit_message )
+ except Exception, e:
+ # I never have a problem with commands.commit on a Mac, but in the test/production
+ # tool shed environment, it occasionally throws a "TypeError: array item must be char"
+ # exception. If this happens, we'll try the following.
+ repo.dirstate.write()
+ repo.commit( user=trans.user.username, text=commit_message )
handle_email_alerts( trans, repository )
return True, '', files_to_remove
def uncompress( self, repository, uploaded_file_name, uploaded_file_filename, isgzip, isbz2 ):
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 lib/galaxy/webapps/community/model/__init__.py
--- a/lib/galaxy/webapps/community/model/__init__.py
+++ b/lib/galaxy/webapps/community/model/__init__.py
@@ -4,7 +4,7 @@
Naming: try to use class names that have a distinct plural form so that
the relationship cardinalities are obvious (e.g. prefer Dataset to Data)
"""
-import os.path, os, errno, sys, codecs, operator, tempfile, logging, tarfile, mimetypes, ConfigParser
+import os.path, os, errno, sys, codecs, operator, logging, tarfile, mimetypes, ConfigParser
from galaxy import util
from galaxy.util.bunch import Bunch
from galaxy.util.hash_util import *
@@ -114,6 +114,15 @@
return config.get( "paths", option )
raise Exception( "Entry for repository %s missing in %s/hgweb.config file." % ( lhs, os.getcwd() ) )
@property
+ def revision( self ):
+ repo = hg.repository( ui.ui(), self.repo_path )
+ tip_ctx = repo.changectx( repo.changelog.tip() )
+ return "%s:%s" % ( str( tip_ctx.rev() ), str( repo.changectx( repo.changelog.tip() ) ) )
+ @property
+ def tip( self ):
+ repo = hg.repository( ui.ui(), self.repo_path )
+ return str( repo.changectx( repo.changelog.tip() ) )
+ @property
def is_new( self ):
repo = hg.repository( ui.ui(), self.repo_path )
tip_ctx = repo.changectx( repo.changelog.tip() )
@@ -143,6 +152,12 @@
fp.write( line )
fp.close()
+class RepositoryMetadata( object ):
+ def __init__( self, repository_id=None, changeset_revision=None, metadata=None ):
+ self.repository_id = repository_id
+ self.changeset_revision = changeset_revision
+ self.metadata = metadata or dict()
+
class ItemRatingAssociation( object ):
def __init__( self, id=None, user=None, item=None, rating=0, comment='' ):
self.id = id
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 lib/galaxy/webapps/community/model/mapping.py
--- a/lib/galaxy/webapps/community/model/mapping.py
+++ b/lib/galaxy/webapps/community/model/mapping.py
@@ -111,6 +111,14 @@
Column( "email_alerts", JSONType, nullable=True ),
Column( "times_downloaded", Integer ) )
+RepositoryMetadata.table = Table( "repository_metadata", metadata,
+ Column( "id", Integer, primary_key=True ),
+ Column( "create_time", DateTime, default=now ),
+ Column( "update_time", DateTime, default=now, onupdate=now ),
+ Column( "repository_id", Integer, ForeignKey( "repository.id" ), index=True ),
+ Column( "changeset_revision", TrimmedString( 255 ), index=True ),
+ Column( "metadata", JSONType, nullable=True ) )
+
RepositoryRatingAssociation.table = Table( "repository_rating_association", metadata,
Column( "id", Integer, primary_key=True ),
Column( "create_time", DateTime, default=now ),
@@ -186,6 +194,9 @@
ratings=relation( RepositoryRatingAssociation, order_by=desc( RepositoryRatingAssociation.table.c.update_time ), backref="repositories" ),
user=relation( User.mapper ) ) )
+assign_mapper( context, RepositoryMetadata, RepositoryMetadata.table,
+ properties=dict( repository=relation( Repository ) ) )
+
assign_mapper( context, RepositoryRatingAssociation, RepositoryRatingAssociation.table,
properties=dict( repository=relation( Repository ), user=relation( User ) ) )
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 lib/galaxy/webapps/community/model/migrate/versions/0008_add_repository_metadata_table.py
--- /dev/null
+++ b/lib/galaxy/webapps/community/model/migrate/versions/0008_add_repository_metadata_table.py
@@ -0,0 +1,52 @@
+"""
+Migration script to add the repository_metadata table.
+"""
+
+from sqlalchemy import *
+from sqlalchemy.orm import *
+from migrate import *
+from migrate.changeset import *
+
+import datetime
+now = datetime.datetime.utcnow
+# Need our custom types, but don't import anything else from model
+from galaxy.model.custom_types import *
+
+import sys, logging
+log = logging.getLogger( __name__ )
+log.setLevel(logging.DEBUG)
+handler = logging.StreamHandler( sys.stdout )
+format = "%(name)s %(levelname)s %(asctime)s %(message)s"
+formatter = logging.Formatter( format )
+handler.setFormatter( formatter )
+log.addHandler( handler )
+
+metadata = MetaData( migrate_engine )
+db_session = scoped_session( sessionmaker( bind=migrate_engine, autoflush=False, autocommit=True ) )
+
+RepositoryMetadata_table = Table( "repository_metadata", metadata,
+ Column( "id", Integer, primary_key=True ),
+ Column( "create_time", DateTime, default=now ),
+ Column( "update_time", DateTime, default=now, onupdate=now ),
+ Column( "repository_id", Integer, ForeignKey( "repository.id" ), index=True ),
+ Column( "changeset_revision", TrimmedString( 255 ), index=True ),
+ Column( "metadata", JSONType, nullable=True ) )
+
+def upgrade():
+ print __doc__
+ metadata.reflect()
+ # Create repository_metadata table.
+ try:
+ RepositoryMetadata_table.create()
+ except Exception, e:
+ print str(e)
+ log.debug( "Creating repository_metadata table failed: %s" % str( e ) )
+
+def downgrade():
+ metadata.reflect()
+ # Drop repository_metadata table.
+ try:
+ RepositoryMetadata_table.drop()
+ except Exception, e:
+ print str(e)
+ log.debug( "Dropping repository_metadata table failed: %s" % str( e ) )
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 lib/galaxy/webapps/demo_sequencer/config.py
--- a/lib/galaxy/webapps/demo_sequencer/config.py
+++ b/lib/galaxy/webapps/demo_sequencer/config.py
@@ -49,7 +49,7 @@
self.smtp_server = kwargs.get( 'smtp_server', None )
self.log_actions = string_as_bool( kwargs.get( 'log_actions', 'False' ) )
self.brand = kwargs.get( 'brand', None )
- self.wiki_url = kwargs.get( 'wiki_url', 'http://bitbucket.org/galaxy/galaxy-central/wiki/Home' )
+ self.wiki_url = kwargs.get( 'wiki_url', 'http://wiki.g2.bx.psu.edu/FrontPage' )
self.bugs_email = kwargs.get( 'bugs_email', None )
self.blog_url = kwargs.get( 'blog_url', None )
self.screencasts_url = kwargs.get( 'screencasts_url', None )
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 lib/galaxy/webapps/reports/config.py
--- a/lib/galaxy/webapps/reports/config.py
+++ b/lib/galaxy/webapps/reports/config.py
@@ -33,7 +33,7 @@
self.sendmail_path = kwargs.get('sendmail_path',"/usr/sbin/sendmail")
self.log_actions = string_as_bool( kwargs.get( 'log_actions', 'False' ) )
self.brand = kwargs.get( 'brand', None )
- self.wiki_url = kwargs.get( 'wiki_url', 'http://bitbucket.org/galaxy/galaxy-central/wiki/Home' )
+ self.wiki_url = kwargs.get( 'wiki_url', 'http://wiki.g2.bx.psu.edu/FrontPage' )
self.bugs_email = kwargs.get( 'bugs_email', None )
self.blog_url = kwargs.get( 'blog_url', None )
self.screencasts_url = kwargs.get( 'screencasts_url', None )
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 scripts/check_galaxy.py
--- a/scripts/check_galaxy.py
+++ b/scripts/check_galaxy.py
@@ -292,7 +292,7 @@
self.in_span = False
def handle_data(self, data):
if self.in_span:
- if data == "No such user":
+ if data == "No such user (please note that login is case sensitive)":
self.no_user = True
elif data == "Invalid password":
self.bad_pw = True
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 scripts/cleanup_datasets/cleanup_datasets.py
--- a/scripts/cleanup_datasets/cleanup_datasets.py
+++ b/scripts/cleanup_datasets/cleanup_datasets.py
@@ -170,6 +170,7 @@
app.model.History.table.c.update_time < cutoff_time ) ) \
.options( eagerload( 'datasets' ) )
for history in histories:
+ print "### Processing history id %d (%s)" % (history.id, history.name)
for dataset_assoc in history.datasets:
_purge_dataset_instance( dataset_assoc, app, remove_from_disk, info_only = info_only ) #mark a DatasetInstance as deleted, clear associated files, and mark the Dataset as deleted if it is deletable
if not info_only:
@@ -182,6 +183,8 @@
history.purged = True
app.sa_session.add( history )
app.sa_session.flush()
+ else:
+ print "History id %d will be purged (without 'info_only' mode)" % history.id
history_count += 1
stop = time.time()
print 'Purged %d histories.' % history_count
@@ -310,17 +313,21 @@
dataset_ids.extend( [ row.id for row in history_dataset_ids_query.execute() ] )
# Process each of the Dataset objects
for dataset_id in dataset_ids:
+ dataset = app.sa_session.query( app.model.Dataset ).get( dataset_id )
+ if dataset.id in skip:
+ continue
+ skip.append( dataset.id )
print "######### Processing dataset id:", dataset_id
- dataset = app.sa_session.query( app.model.Dataset ).get( dataset_id )
- if dataset.id not in skip and _dataset_is_deletable( dataset ):
- deleted_dataset_count += 1
- for dataset_instance in dataset.history_associations + dataset.library_associations:
- # Mark each associated HDA as deleted
- _purge_dataset_instance( dataset_instance, app, remove_from_disk, include_children=True, info_only=info_only, is_deletable=True )
- deleted_instance_count += 1
- skip.append( dataset.id )
+ if not _dataset_is_deletable( dataset ):
+ print "Dataset is not deletable (shared between multiple histories/libraries, at least one is not deleted)"
+ continue
+ deleted_dataset_count += 1
+ for dataset_instance in dataset.history_associations + dataset.library_associations:
+ # Mark each associated HDA as deleted
+ _purge_dataset_instance( dataset_instance, app, remove_from_disk, include_children=True, info_only=info_only, is_deletable=True )
+ deleted_instance_count += 1
stop = time.time()
- print "Examined %d datasets, marked %d as deleted and purged %d dataset instances" % ( len( skip ), deleted_dataset_count, deleted_instance_count )
+ print "Examined %d datasets, marked %d datasets and %d dataset instances (HDA) as deleted" % ( len( skip ), deleted_dataset_count, deleted_instance_count )
print "Total elapsed time: ", stop - start
print "##########################################"
@@ -360,15 +367,24 @@
# A dataset_instance is either a HDA or an LDDA. Purging a dataset instance marks the instance as deleted,
# and marks the associated dataset as deleted if it is not associated with another active DatsetInstance.
if not info_only:
- print "Marking as deleted: ", dataset_instance.__class__.__name__, " id ", dataset_instance.id
+ print "Marking as deleted: %s id %d (for dataset id %d)" % \
+ ( dataset_instance.__class__.__name__, dataset_instance.id, dataset_instance.dataset.id )
dataset_instance.mark_deleted( include_children = include_children )
dataset_instance.clear_associated_files()
app.sa_session.add( dataset_instance )
app.sa_session.flush()
app.sa_session.refresh( dataset_instance.dataset )
+ else:
+ print "%s id %d (for dataset id %d) will be marked as deleted (without 'info_only' mode)" % \
+ ( dataset_instance.__class__.__name__, dataset_instance.id, dataset_instance.dataset.id )
if is_deletable or _dataset_is_deletable( dataset_instance.dataset ):
# Calling methods may have already checked _dataset_is_deletable, if so, is_deletable should be True
_delete_dataset( dataset_instance.dataset, app, remove_from_disk, info_only=info_only, is_deletable=is_deletable )
+ else:
+ if info_only:
+ print "Not deleting dataset ", dataset_instance.dataset.id, " (will be possibly deleted without 'info_only' mode)"
+ else:
+ print "Not deleting dataset %d (shared between multiple histories/libraries, at least one not deleted)" % dataset_instance.dataset.id
#need to purge children here
if include_children:
for child in dataset_instance.children:
@@ -396,8 +412,13 @@
.filter( app.model.MetadataFile.table.c.lda_id==ldda.id ):
metadata_files.append( metadata_file )
for metadata_file in metadata_files:
- print "The following metadata files attached to associations of Dataset '%s' have been purged:" % dataset.id
- if not info_only:
+ op_description = "marked as deleted"
+ if remove_from_disk:
+ op_description = op_description + " and purged from disk"
+ if info_only:
+ print "The following metadata files attached to associations of Dataset '%s' will be %s (without 'info_only' mode):" % ( dataset.id, op_description )
+ else:
+ print "The following metadata files attached to associations of Dataset '%s' have been %s:" % ( dataset.id, op_description )
if remove_from_disk:
try:
print "Removing disk file ", metadata_file.file_name
@@ -411,10 +432,13 @@
app.sa_session.add( metadata_file )
app.sa_session.flush()
print "%s" % metadata_file.file_name
- print "Deleting dataset id", dataset.id
- dataset.deleted = True
- app.sa_session.add( dataset )
- app.sa_session.flush()
+ if not info_only:
+ print "Deleting dataset id", dataset.id
+ dataset.deleted = True
+ app.sa_session.add( dataset )
+ app.sa_session.flush()
+ else:
+ print "Dataset %i will be deleted (without 'info_only' mode)" % ( dataset.id )
def _purge_dataset( app, dataset, remove_from_disk, info_only = False ):
if dataset.deleted:
@@ -433,6 +457,8 @@
dataset.purged = True
app.sa_session.add( dataset )
app.sa_session.flush()
+ else:
+ print "Dataset %i will be purged (without 'info_only' mode)" % (dataset.id)
else:
print "This dataset (%i) is not purgable, the file (%s) will not be removed.\n" % ( dataset.id, dataset.file_name )
except OSError, exc:
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 scripts/cleanup_datasets/purge_libraries.sh
--- a/scripts/cleanup_datasets/purge_libraries.sh
+++ b/scripts/cleanup_datasets/purge_libraries.sh
@@ -1,5 +1,4 @@
#!/bin/sh
cd `dirname $0`/../..
-#python ./scripts/cleanup_datasets/cleanup_datasets.py ./universe_wsgi.ini -d 10 -4 -r $@ >> ./scripts/cleanup_datasets/purge_libraries.log
-python ./scripts/cleanup_datasets/cleanup_datasets.py ./universe_wsgi.ini -d 2 -4 -r $@ >> ./scripts/cleanup_datasets/purge_libraries.log
+python ./scripts/cleanup_datasets/cleanup_datasets.py ./universe_wsgi.ini -d 10 -4 -r $@ >> ./scripts/cleanup_datasets/purge_libraries.log
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 static/images/fugue/arrow-transition-270-bw.png
Binary file static/images/fugue/arrow-transition-270-bw.png has changed
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 static/images/fugue/arrow-transition-bw.png
Binary file static/images/fugue/arrow-transition-bw.png has changed
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 static/june_2007_style/blue/trackster.css
--- a/static/june_2007_style/blue/trackster.css
+++ b/static/june_2007_style/blue/trackster.css
@@ -51,3 +51,8 @@
.bookmark{background:white;border:solid #999 1px;border-right:none;margin:0.5em;margin-right:0;padding:0.5em;}
.bookmark .position{font-weight:bold;}
.delete-icon-container{float:right;}
+.icon{display:inline-block;width:16px;height:16px;}
+.icon.more-down{background:url('../images/fugue/arrow-transition-270-bw.png') no-repeat 0px 0px;}
+.icon.more-across{background:url('../images/fugue/arrow-transition-bw.png') no-repeat 0px 0px;}
+.intro{padding:1em;}
+.intro>.action-button{background-color:#CCC;padding:1em;}
\ No newline at end of file
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 static/june_2007_style/trackster.css.tmpl
--- a/static/june_2007_style/trackster.css.tmpl
+++ b/static/june_2007_style/trackster.css.tmpl
@@ -293,3 +293,22 @@
float:right;
}
+.icon {
+ display:inline-block;
+ width:16px;
+ height:16px;
+}
+.icon.more-down {
+ background:url('../images/fugue/arrow-transition-270-bw.png') no-repeat 0px 0px;
+}
+.icon.more-across {
+ background: url('../images/fugue/arrow-transition-bw.png') no-repeat 0px 0px;
+}
+.intro {
+ padding: 1em;
+}
+.intro > .action-button {
+ background-color: #CCC;
+ padding: 1em;
+}
+
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 static/scripts/galaxy.base.js
--- a/static/scripts/galaxy.base.js
+++ b/static/scripts/galaxy.base.js
@@ -725,4 +725,5 @@
}
return anchor;
});
+
});
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 static/scripts/galaxy.panels.js
--- a/static/scripts/galaxy.panels.js
+++ b/static/scripts/galaxy.panels.js
@@ -214,7 +214,7 @@
init_fn();
}
};
-
+
function show_in_overlay( options ) {
var width = options.width || '600';
var height = options.height || '400';
@@ -226,9 +226,9 @@
show_modal( null, $( "<div style='margin: -5px;'><img id='close_button' style='position:absolute;right:-17px;top:-15px;src='" + image_path + "/closebox.png'><iframe style='margin: 0; padding: 0;' src='" + options.url + "' width='" + width + "' height='" + height + "' scrolling='" + scroll + "' frameborder='0'></iframe></div>" ) );
$("#close_button").bind( "click", function() { hide_modal(); } );
}
-
+
// Tab management
-
+
$(function() {
$(".tab").each( function() {
var submenu = $(this).children( ".submenu" );
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 static/scripts/packed/trackster.js
--- a/static/scripts/packed/trackster.js
+++ b/static/scripts/packed/trackster.js
@@ -1,1 +1,1 @@
-var class_module=function(b,a){var c=function(){var f=arguments[0];for(var e=1;e<arguments.length;e++){var d=arguments[e];for(key in d){f[key]=d[key]}}return f};a.extend=c};var BEFORE=1001,CONTAINS=1002,OVERLAP_START=1003,OVERLAP_END=1004,CONTAINED_BY=1005,AFTER=1006;var compute_overlap=function(e,b){var g=e[0],f=e[1],d=b[0],c=b[1],a;if(g<d){if(f<d){a=BEFORE}else{if(f<=c){a=OVERLAP_START}else{a=CONTAINS}}}else{if(g>c){a=AFTER}else{if(f<=c){a=CONTAINED_BY}else{a=OVERLAP_END}}}return a};var is_overlap=function(c,b){var a=compute_overlap(c,b);return(a!==BEFORE&&a!==AFTER)};var trackster_module=function(f,T){var n=f("class").extend,p=f("slotting"),I=f("painters");var Z=function(aa,ab){this.document=aa;this.default_font=ab!==undefined?ab:"9px Monaco, Lucida Console, monospace";this.dummy_canvas=this.new_canvas();this.dummy_context=this.dummy_canvas.getContext("2d");this.dummy_context.font=this.default_font;this.char_width_px=this.dummy_context.measureText("A").width;this.patterns={};this.load_pattern("right_strand","/visualization/strand_right.png");this.load_pattern("left_strand","/visualization/strand_left.png");this.load_pattern("right_strand_inv","/visualization/strand_right_inv.png");this.load_pattern("left_strand_inv","/visualization/strand_left_inv.png")};n(Z.prototype,{load_pattern:function(aa,ae){var ab=this.patterns,ac=this.dummy_context,ad=new Image();ad.src=image_path+ae;ad.onload=function(){ab[aa]=ac.createPattern(ad,"repeat")}},get_pattern:function(aa){return this.patterns[aa]},new_canvas:function(){var aa=this.document.createElement("canvas");if(window.G_vmlCanvasManager){G_vmlCanvasManager.initElement(aa)}aa.manager=this;return aa}});var C=function(aa,ab){aa.bind("drag",{handle:ab,relative:true},function(af,ag){var ae=$(this).parent();var ad=ae.children();var ac;for(ac=0;ac<ad.length;ac++){if(ag.offsetY<$(ad.get(ac)).position().top){break}}if(ac===ad.length){if(this!==ad.get(ac-1)){ae.append(this)}}else{if(this!==ad.get(ac)){$(this).insertBefore(ad.get(ac))}}}).bind("dragstart",function(){$(this).css({"border-top":"1px solid blue","border-bottom":"1px solid blue"})}).bind("dragend",function(){$(this).css("border","0px")})};T.sortable=C;var D=9,A=18,O=D+2,w=100,F=12000,M=200,z=5,s=10,H=5000,t=100,m="There was an error in indexing this dataset. ",G="A converter for this dataset is not installed. Please check your datatypes_conf.xml file.",B="No data for this chrom/contig.",q="Currently indexing... please wait",v="Tool cannot be rerun: ",a="Loading data...",U="Ready for display",d=10,r=5,y=5;function u(aa){return Math.round(aa*1000)/1000}var c=function(aa){this.num_elements=aa;this.clear()};n(c.prototype,{get:function(ab){var aa=this.key_ary.indexOf(ab);if(aa!==-1){if(this.obj_cache[ab].stale){this.key_ary.splice(aa,1);delete this.obj_cache[ab]}else{this.move_key_to_end(ab,aa)}}return this.obj_cache[ab]},set:function(ab,ac){if(!this.obj_cache[ab]){if(this.key_ary.length>=this.num_elements){var aa=this.key_ary.shift();delete this.obj_cache[aa]}this.key_ary.push(ab)}this.obj_cache[ab]=ac;return ac},move_key_to_end:function(ab,aa){this.key_ary.splice(aa,1);this.key_ary.push(ab)},clear:function(){this.obj_cache={};this.key_ary=[]},size:function(){return this.key_ary.length}});var N=function(ab,aa,ac){c.call(this,ab);this.track=aa;this.subset=(ac!==undefined?ac:true)};n(N.prototype,c.prototype,{load_data:function(ai,aj,ae,ah,ab,ag){var ad={chrom:ai,low:aj,high:ae,mode:ah,resolution:ab,dataset_id:this.track.dataset_id,hda_ldda:this.track.hda_ldda};$.extend(ad,ag);if(this.track.filters_manager){var ak=[];var aa=this.track.filters_manager.filters;for(var af=0;af<aa.length;af++){ak[ak.length]=aa[af].name}ad.filter_cols=JSON.stringify(ak)}var ac=this;return $.getJSON(this.track.data_url,ad,function(al){ac.set_data(aj,ae,ah,al)})},get_data:function(ac,aa,af,ag,ab,ae){var ad=this.get(this.gen_key(aa,af,ag));if(ad){return ad}ad=this.load_data(ac,aa,af,ag,ab,ae);this.set_data(aa,af,ag,ad);return ad},set_data:function(ab,ac,ad,aa){return this.set(this.gen_key(ab,ac,ad),aa)},gen_key:function(aa,ac,ad){var ab=aa+"_"+ac+"_"+ad;return ab},split_key:function(aa){return aa.split("_")}});var E=function(ab,aa,ac){N.call(this,ab,aa,ac)};n(E.prototype,N.prototype,c.prototype,{load_data:function(ac,aa,ae,af,ab,ad){if(ab>1){return}return N.prototype.load_data.call(this,ac,aa,ae,af,ab,ad)}});var Y=function(aa,ad,ac,ab,ae){this.container=aa;this.chrom=null;this.vis_id=ac;this.dbkey=ab;this.title=ad;this.tracks=[];this.label_tracks=[];this.max_low=0;this.max_high=0;this.num_tracks=0;this.track_id_counter=0;this.zoom_factor=3;this.min_separation=30;this.has_changes=false;this.init(ae);this.canvas_manager=new Z(aa.get(0).ownerDocument);this.reset()};n(Y.prototype,{init:function(ad){var ac=this.container,aa=this;this.top_container=$("<div/>").addClass("top-container").appendTo(ac);this.content_div=$("<div/>").addClass("content").css("position","relative").appendTo(ac);this.bottom_container=$("<div/>").addClass("bottom-container").appendTo(ac);this.top_labeltrack=$("<div/>").addClass("top-labeltrack").appendTo(this.top_container);this.viewport_container=$("<div/>").addClass("viewport-container").addClass("viewport-container").appendTo(this.content_div);this.intro_div=$("<div/>").addClass("intro").text("Select a chrom from the dropdown below").hide();this.nav_labeltrack=$("<div/>").addClass("nav-labeltrack").appendTo(this.bottom_container);this.nav_container=$("<div/>").addClass("nav-container").prependTo(this.top_container);this.nav=$("<div/>").addClass("nav").appendTo(this.nav_container);this.overview=$("<div/>").addClass("overview").appendTo(this.bottom_container);this.overview_viewport=$("<div/>").addClass("overview-viewport").appendTo(this.overview);this.overview_close=$("<a href='javascript:void(0);'>Close Overview</a>").addClass("overview-close").hide().appendTo(this.overview_viewport);this.overview_highlight=$("<div/>").addClass("overview-highlight").hide().appendTo(this.overview_viewport);this.overview_box_background=$("<div/>").addClass("overview-boxback").appendTo(this.overview_viewport);this.overview_box=$("<div/>").addClass("overview-box").appendTo(this.overview_viewport);this.default_overview_height=this.overview_box.height();this.nav_controls=$("<div/>").addClass("nav-controls").appendTo(this.nav);this.chrom_select=$("<select/>").attr({name:"chrom"}).css("width","15em").addClass("no-autocomplete").append("<option value=''>Loading</option>").appendTo(this.nav_controls);var ab=function(ae){if(ae.type==="focusout"||(ae.keyCode||ae.which)===13||(ae.keyCode||ae.which)===27){if((ae.keyCode||ae.which)!==27){aa.go_to($(this).val())}$(this).hide();$(this).val("");aa.location_span.show();aa.chrom_select.show()}};this.nav_input=$("<input/>").addClass("nav-input").hide().bind("keyup focusout",ab).appendTo(this.nav_controls);this.location_span=$("<span/>").addClass("location").appendTo(this.nav_controls);this.location_span.bind("click",function(){aa.location_span.hide();aa.chrom_select.hide();aa.nav_input.val(aa.chrom+":"+aa.low+"-"+aa.high);aa.nav_input.css("display","inline-block");aa.nav_input.select();aa.nav_input.focus()});if(this.vis_id!==undefined){this.hidden_input=$("<input/>").attr("type","hidden").val(this.vis_id).appendTo(this.nav_controls)}this.zo_link=$("<a id='zoom-out' />").click(function(){aa.zoom_out();aa.redraw()}).appendTo(this.nav_controls);this.zi_link=$("<a id='zoom-in' />").click(function(){aa.zoom_in();aa.redraw()}).appendTo(this.nav_controls);this.load_chroms({low:0},ad);this.chrom_select.bind("change",function(){aa.change_chrom(aa.chrom_select.val())});this.intro_div.show();this.content_div.bind("click",function(ae){$(this).find("input").trigger("blur")});this.content_div.bind("dblclick",function(ae){aa.zoom_in(ae.pageX,this.viewport_container)});this.overview_box.bind("dragstart",function(ae,af){this.current_x=af.offsetX}).bind("drag",function(ae,ag){var ah=ag.offsetX-this.current_x;this.current_x=ag.offsetX;var af=Math.round(ah/aa.viewport_container.width()*(aa.max_high-aa.max_low));aa.move_delta(-af)});this.overview_close.bind("click",function(){for(var af=0,ae=aa.tracks.length;af<ae;af++){aa.tracks[af].is_overview=false}$(this).siblings().filter("canvas").remove();$(this).parent().css("height",aa.overview_box.height());aa.overview_highlight.hide();$(this).hide()});this.viewport_container.bind("draginit",function(ae,af){if(ae.clientX>aa.viewport_container.width()-16){return false}}).bind("dragstart",function(ae,af){af.original_low=aa.low;af.current_height=ae.clientY;af.current_x=af.offsetX}).bind("drag",function(ag,ai){var ae=$(this);var aj=ai.offsetX-ai.current_x;var af=ae.scrollTop()-(ag.clientY-ai.current_height);ae.scrollTop(af);ai.current_height=ag.clientY;ai.current_x=ai.offsetX;var ah=Math.round(aj/aa.viewport_container.width()*(aa.high-aa.low));aa.move_delta(ah)}).bind("mousewheel",function(ag,ai,af,ae){if(af){var ah=Math.round(-af/aa.viewport_container.width()*(aa.high-aa.low));aa.move_delta(ah)}});this.top_labeltrack.bind("dragstart",function(ae,af){return $("<div />").css({height:aa.content_div.height()+aa.top_labeltrack.height()+aa.nav_labeltrack.height()+1,top:"0px",position:"absolute","background-color":"#ccf",opacity:0.5,"z-index":1000}).appendTo($(this))}).bind("drag",function(ai,aj){$(aj.proxy).css({left:Math.min(ai.pageX,aj.startX),width:Math.abs(ai.pageX-aj.startX)});var af=Math.min(ai.pageX,aj.startX)-aa.container.offset().left,ae=Math.max(ai.pageX,aj.startX)-aa.container.offset().left,ah=(aa.high-aa.low),ag=aa.viewport_container.width();aa.update_location(Math.round(af/ag*ah)+aa.low,Math.round(ae/ag*ah)+aa.low)}).bind("dragend",function(aj,ak){var af=Math.min(aj.pageX,ak.startX),ae=Math.max(aj.pageX,ak.startX),ah=(aa.high-aa.low),ag=aa.viewport_container.width(),ai=aa.low;aa.low=Math.round(af/ag*ah)+ai;aa.high=Math.round(ae/ag*ah)+ai;$(ak.proxy).remove();aa.redraw()});this.add_label_track(new X(this,this.top_labeltrack));this.add_label_track(new X(this,this.nav_labeltrack));$(window).bind("resize",function(){aa.resize_window()});$(document).bind("redraw",function(){aa.redraw()});this.reset();$(window).trigger("resize")},update_location:function(aa,ab){this.location_span.text(commatize(aa)+" - "+commatize(ab));this.nav_input.val(this.chrom+":"+commatize(aa)+"-"+commatize(ab))},load_chroms:function(ab,ac){ab.num=t;$.extend(ab,(this.vis_id!==undefined?{vis_id:this.vis_id}:{dbkey:this.dbkey}));var aa=this;$.ajax({url:chrom_url,data:ab,dataType:"json",success:function(ae){if(ae.chrom_info.length===0){alert("Invalid chromosome: "+ab.chrom);return}if(ae.reference){aa.add_label_track(new x(aa))}aa.chrom_data=ae.chrom_info;var ah='<option value="">Select Chrom/Contig</option>';for(var ag=0,ad=aa.chrom_data.length;ag<ad;ag++){var af=aa.chrom_data[ag].chrom;ah+='<option value="'+af+'">'+af+"</option>"}if(ae.prev_chroms){ah+='<option value="previous">Previous '+t+"</option>"}if(ae.next_chroms){ah+='<option value="next">Next '+t+"</option>"}aa.chrom_select.html(ah);if(ac){ac()}aa.chrom_start_index=ae.start_index},error:function(){alert("Could not load chroms for this dbkey:",aa.dbkey)}})},change_chrom:function(ae,ab,ag){if(!ae||ae==="None"){return}var ad=this;if(ae==="previous"){ad.load_chroms({low:this.chrom_start_index-t});return}if(ae==="next"){ad.load_chroms({low:this.chrom_start_index+t});return}var af=$.grep(ad.chrom_data,function(ai,aj){return ai.chrom===ae})[0];if(af===undefined){ad.load_chroms({chrom:ae},function(){ad.change_chrom(ae,ab,ag)});return}else{if(ae!==ad.chrom){ad.chrom=ae;if(!ad.chrom){ad.intro_div.show()}else{ad.intro_div.hide()}ad.chrom_select.val(ad.chrom);ad.max_high=af.len-1;ad.reset();ad.redraw(true);for(var ah=0,aa=ad.tracks.length;ah<aa;ah++){var ac=ad.tracks[ah];if(ac.init){ac.init()}}}if(ab!==undefined&&ag!==undefined){ad.low=Math.max(ab,0);ad.high=Math.min(ag,ad.max_high)}ad.reset_overview();ad.redraw()}},go_to:function(ae){var ai=this,aa,ad,ab=ae.split(":"),ag=ab[0],ah=ab[1];if(ah!==undefined){try{var af=ah.split("-");aa=parseInt(af[0].replace(/,/g,""),10);ad=parseInt(af[1].replace(/,/g,""),10)}catch(ac){return false}}ai.change_chrom(ag,aa,ad)},move_fraction:function(ac){var aa=this;var ab=aa.high-aa.low;this.move_delta(ac*ab)},move_delta:function(ac){var aa=this;var ab=aa.high-aa.low;if(aa.low-ac<aa.max_low){aa.low=aa.max_low;aa.high=aa.max_low+ab}else{if(aa.high-ac>aa.max_high){aa.high=aa.max_high;aa.low=aa.max_high-ab}else{aa.high-=ac;aa.low-=ac}}aa.redraw()},add_track:function(aa){aa.view=this;aa.track_id=this.track_id_counter;this.tracks.push(aa);if(aa.init){aa.init()}aa.container_div.attr("id","track_"+aa.track_id);C(aa.container_div,".draghandle");this.track_id_counter+=1;this.num_tracks+=1},add_label_track:function(aa){aa.view=this;this.label_tracks.push(aa)},remove_track:function(aa){this.has_changes=true;aa.container_div.fadeOut("slow",function(){$(this).remove()});delete this.tracks[this.tracks.indexOf(aa)];this.num_tracks-=1},reset:function(){this.low=this.max_low;this.high=this.max_high;this.viewport_container.find(".yaxislabel").remove()},redraw:function(ah){var ag=this.high-this.low,af=this.low,ab=this.high;if(af<this.max_low){af=this.max_low}if(ab>this.max_high){ab=this.max_high}if(this.high!==0&&ag<this.min_separation){ab=af+this.min_separation}this.low=Math.floor(af);this.high=Math.ceil(ab);this.resolution=Math.pow(z,Math.ceil(Math.log((this.high-this.low)/M)/Math.log(z)));this.zoom_res=Math.pow(s,Math.max(0,Math.ceil(Math.log(this.resolution,s)/Math.log(s))));var aa=(this.low/(this.max_high-this.max_low)*this.overview_viewport.width())||0;var ae=((this.high-this.low)/(this.max_high-this.max_low)*this.overview_viewport.width())||0;var ai=13;this.overview_box.css({left:aa,width:Math.max(ai,ae)}).show();if(ae<ai){this.overview_box.css("left",aa-(ai-ae)/2)}if(this.overview_highlight){this.overview_highlight.css({left:aa,width:ae})}this.update_location(this.low,this.high);if(!ah){for(var ac=0,ad=this.tracks.length;ac<ad;ac++){if(this.tracks[ac]&&this.tracks[ac].enabled){this.tracks[ac].draw()}}for(ac=0,ad=this.label_tracks.length;ac<ad;ac++){this.label_tracks[ac].draw()}}},zoom_in:function(ab,ac){if(this.max_high===0||this.high-this.low<this.min_separation){return}var ad=this.high-this.low,ae=ad/2+this.low,aa=(ad/this.zoom_factor)/2;if(ab){ae=ab/this.viewport_container.width()*(this.high-this.low)+this.low}this.low=Math.round(ae-aa);this.high=Math.round(ae+aa);this.redraw()},zoom_out:function(){if(this.max_high===0){return}var ab=this.high-this.low,ac=ab/2+this.low,aa=(ab*this.zoom_factor)/2;this.low=Math.round(ac-aa);this.high=Math.round(ac+aa);this.redraw()},resize_window:function(){this.viewport_container.height(this.container.height()-this.top_container.height()-this.bottom_container.height());this.nav_container.width(this.container.width());this.redraw()},reset_overview:function(){this.overview_viewport.find("canvas").remove();this.overview_viewport.height(this.default_overview_height);this.overview_box.height(this.default_overview_height);this.overview_close.hide();this.overview_highlight.hide()}});var o=function(ac,ag){this.track=ac;this.name=ag.name;this.params=[];var an=ag.params;for(var ad=0;ad<an.length;ad++){var ai=an[ad],ab=ai.name,am=ai.label,ae=unescape(ai.html),ao=ai.value,ak=ai.type;if(ak==="number"){this.params[this.params.length]=new g(ab,am,ae,ao,ai.min,ai.max)}else{if(ak=="select"){this.params[this.params.length]=new K(ab,am,ae,ao)}else{console.log("WARNING: unrecognized tool parameter type:",ab,ak)}}}this.parent_div=$("<div/>").addClass("dynamic-tool").hide();this.parent_div.bind("drag",function(aq){aq.stopPropagation()}).bind("click",function(aq){aq.stopPropagation()}).bind("dblclick",function(aq){aq.stopPropagation()});var al=$("<div class='tool-name'>").appendTo(this.parent_div).text(this.name);var aj=this.params;var ah=this;$.each(this.params,function(ar,av){var au=$("<div>").addClass("param-row").appendTo(ah.parent_div);var aq=$("<div>").addClass("param-label").text(av.label).appendTo(au);var at=$("<div/>").addClass("slider").html(av.html).appendTo(au);at.find(":input").val(av.value);$("<div style='clear: both;'/>").appendTo(au)});this.parent_div.find("input").click(function(){$(this).select()});var ap=$("<div>").addClass("param-row").appendTo(this.parent_div);var af=$("<input type='submit'>").attr("value","Run on complete dataset").appendTo(ap);var aa=$("<input type='submit'>").attr("value","Run on visible region").css("margin-left","3em").appendTo(ap);var ah=this;aa.click(function(){ah.run_on_region()});af.click(function(){ah.run_on_dataset()})};n(o.prototype,{get_param_values_dict:function(){var aa={};this.parent_div.find(":input").each(function(){var ab=$(this).attr("name"),ac=$(this).val();aa[ab]=JSON.stringify(ac)});return aa},get_param_values:function(){var ab=[];var aa={};this.parent_div.find(":input").each(function(){var ac=$(this).attr("name"),ad=$(this).val();if(ac){ab[ab.length]=ad}});return ab},run_on_dataset:function(){var aa=this;aa.run({dataset_id:this.track.original_dataset_id,tool_id:aa.name},function(ab){show_modal(aa.name+" is Running",aa.name+" is running on the complete dataset. Tool outputs are in dataset's history.",{Close:hide_modal})})},run_on_region:function(){var aa={dataset_id:this.track.original_dataset_id,chrom:this.track.view.chrom,low:this.track.view.low,high:this.track.view.high,tool_id:this.name},ac=this.track,ab=aa.tool_id+ac.tool_region_and_parameters_str(aa.chrom,aa.low,aa.high),ad;if(ac instanceof e){ad=new Q(ab,view,ac.hda_ldda,undefined,{},{},ac);ad.change_mode(ac.mode)}this.track.add_track(ad);ad.content_div.text("Starting job.");this.run(aa,function(ae){ad.dataset_id=ae.dataset_id;ad.content_div.text("Running job.");ad.init()})},run:function(ab,ac){$.extend(ab,this.get_param_values_dict());var aa=function(){$.getJSON(rerun_tool_url,ab,function(ad){if(ad==="no converter"){new_track.container_div.addClass("error");new_track.content_div.text(G)}else{if(ad.error){new_track.container_div.addClass("error");new_track.content_div.text(v+ad.message)}else{if(ad==="pending"){new_track.container_div.addClass("pending");new_track.content_div.text("Converting input data so that it can be easily reused.");setTimeout(aa,2000)}else{ac(ad)}}}})};aa()}});var K=function(ab,aa,ac,ad){this.name=ab;this.label=aa;this.html=ac;this.value=ad};var g=function(ac,ab,ae,af,ad,aa){K.call(this,ac,ab,ae,af);this.min=ad;this.max=aa};var h=function(ab,aa,ac,ad){this.name=ab;this.index=aa;this.tool_id=ac;this.tool_exp_name=ad};var R=function(ab,aa,ac,ad){h.call(this,ab,aa,ac,ad);this.low=-Number.MAX_VALUE;this.high=Number.MAX_VALUE;this.min=Number.MAX_VALUE;this.max=-Number.MAX_VALUE;this.slider=null;this.slider_label=null};n(R.prototype,{applies_to:function(aa){if(aa.length>this.index){return true}return false},keep:function(aa){if(!this.applies_to(aa)){return true}var ab=parseInt(aa[this.index]);return(isNaN(ab)||(ab>=this.low&&ab<=this.high))},update_attrs:function(ab){var aa=false;if(!this.applies_to(ab)){return aa}if(ab[this.index]<this.min){this.min=Math.floor(ab[this.index]);aa=true}if(ab[this.index]>this.max){this.max=Math.ceil(ab[this.index]);aa=true}return aa},update_ui_elt:function(){var ac=function(af,ad){var ae=ad-af;return(ae<=2?0.01:1)};var ab=this.slider.slider("option","min"),aa=this.slider.slider("option","max");if(this.min<ab||this.max>aa){this.slider.slider("option","min",this.min);this.slider.slider("option","max",this.max);this.slider.slider("option","step",ac(this.min,this.max));this.slider.slider("option","values",[this.min,this.max])}}});var W=function(ac,al){this.track=ac;this.filters=[];for(var ag=0;ag<al.length;ag++){var aa=al[ag],ab=aa.name,ak=aa.type,ai=aa.index,an=aa.tool_id,ad=aa.tool_exp_name;if(ak==="int"||ak==="float"){this.filters[ag]=new R(ab,ai,an,ad)}else{console.log("ERROR: unsupported filter: ",ab,ak)}}var aj=function(ao,ap,aq){ao.click(function(){var ar=ap.text();max=parseFloat(aq.slider("option","max")),input_size=(max<=1?4:max<=1000000?max.toString().length:6),multi_value=false;if(aq.slider("option","values")){input_size=2*input_size+1;multi_value=true}ap.text("");$("<input type='text'/>").attr("size",input_size).attr("maxlength",input_size).attr("value",ar).appendTo(ap).focus().select().click(function(at){at.stopPropagation()}).blur(function(){$(this).remove();ap.text(ar)}).keyup(function(ax){if(ax.keyCode===27){$(this).trigger("blur")}else{if(ax.keyCode===13){var av=aq.slider("option","min"),at=aq.slider("option","max"),aw=function(ay){return(isNaN(ay)||ay>at||ay<av)},au=$(this).val();if(!multi_value){au=parseFloat(au);if(aw(au)){alert("Parameter value must be in the range ["+av+"-"+at+"]");return $(this)}}else{au=au.split("-");au=[parseFloat(au[0]),parseFloat(au[1])];if(aw(au[0])||aw(au[1])){alert("Parameter value must be in the range ["+av+"-"+at+"]");return $(this)}}aq.slider((multi_value?"values":"value"),au)}}})})};this.parent_div=$("<div/>").addClass("filters").hide();this.parent_div.bind("drag",function(ao){ao.stopPropagation()}).bind("click",function(ao){ao.stopPropagation()}).bind("dblclick",function(ao){ao.stopPropagation()}).bind("keydown",function(ao){ao.stopPropagation()});var ae=this;$.each(this.filters,function(av,ap){var ar=$("<div/>").addClass("slider-row").appendTo(ae.parent_div);var ao=$("<div/>").addClass("slider-label").appendTo(ar);var ax=$("<span/>").addClass("slider-name").text(ap.name+" ").appendTo(ao);var aq=$("<span/>");var at=$("<span/>").addClass("slider-value").appendTo(ao).append("[").append(aq).append("]");var aw=$("<div/>").addClass("slider").appendTo(ar);ap.control_element=$("<div/>").attr("id",ap.name+"-filter-control").appendTo(aw);var au=[0,0];ap.control_element.slider({range:true,min:Number.MAX_VALUE,max:-Number.MIN_VALUE,values:[0,0],slide:function(ay,az){au=az.values;aq.text(az.values[0]+"-"+az.values[1]);setTimeout(function(){if(az.values[0]==au[0]&&az.values[1]==au[1]){var aA=az.values;aq.text(aA[0]+"-"+aA[1]);ap.low=aA[0];ap.high=aA[1];ae.track.draw(true,true)}},50)},change:function(ay,az){ap.control_element.slider("option","slide").call(ap.control_element,ay,az)}});ap.slider=ap.control_element;ap.slider_label=aq;aj(at,aq,ap.control_element);$("<div style='clear: both;'/>").appendTo(ar)});if(this.filters.length!=0){var am=$("<div>").addClass("param-row").appendTo(this.parent_div);var ah=$("<input type='submit'>").attr("value","Run on complete dataset").appendTo(am);var af=this;ah.click(function(){af.run_on_dataset()})}};n(W.prototype,{reset_filters:function(){for(var aa=0;aa<this.filters.length;aa++){filter=this.filters[aa];filter.slider.slider("option","values",[filter.min,filter.max])}},run_on_dataset:function(){var ai=function(am,ak,al){if(!(ak in am)){am[ak]=al}return am[ak]};var ac={},aa,ab,ad;for(var ae=0;ae<this.filters.length;ae++){aa=this.filters[ae];if(aa.tool_id){if(aa.min!=aa.low){ab=ai(ac,aa.tool_id,[]);ab[ab.length]=aa.tool_exp_name+" >= "+aa.low}if(aa.max!=aa.high){ab=ai(ac,aa.tool_id,[]);ab[ab.length]=aa.tool_exp_name+" <= "+aa.high}}}var ag=[];for(var aj in ac){ag[ag.length]=[aj,ac[aj]]}var ah=ag.length;(function af(aq,an){var al=an[0],am=al[0],ap=al[1],ao="("+ap.join(") and (")+")",ak={cond:ao,input:aq,target_dataset_id:aq,tool_id:am},an=an.slice(1);$.getJSON(run_tool_url,ak,function(ar){if(ar.error){show_modal("Filter Dataset","Error running tool "+am,{Close:hide_modal})}else{if(an.length===0){show_modal("Filtering Dataset","Filter(s) are running on the complete dataset. Outputs are in dataset's history.",{Close:hide_modal})}else{af(ar.dataset_id,an)}}})})(this.track.dataset_id,ag)}});var V=function(aa){this.track=aa.track;this.params=aa.params;this.values={};if(aa.saved_values){this.restore_values(aa.saved_values)}this.onchange=aa.onchange};n(V.prototype,{restore_values:function(aa){var ab=this;$.each(this.params,function(ac,ad){if(aa[ad.key]!==undefined){ab.values[ad.key]=aa[ad.key]}else{ab.values[ad.key]=ad.default_value}})},build_form:function(){var ab=this;var aa=$("<div />");$.each(this.params,function(af,ad){if(!ad.hidden){var ac="param_"+af;var ak=$("<div class='form-row' />").appendTo(aa);ak.append($("<label />").attr("for",ac).text(ad.label+":"));if(ad.type==="bool"){ak.append($('<input type="checkbox" />').attr("id",ac).attr("name",ac).attr("checked",ab.values[ad.key]))}else{if(ad.type==="color"){var ah=ab.values[ad.key];var ag=$("<input />").attr("id",ac).attr("name",ac).val(ah);var ai=$("<div class='tipsy tipsy-north' style='position: absolute;' />").hide();var ae=$("<div style='background-color: black; padding: 10px;'></div>").appendTo(ai);var aj=$("<div/>").appendTo(ae).farbtastic({width:100,height:100,callback:ag,color:ah});$("<div />").append(ag).append(ai).appendTo(ak).bind("click",function(al){ai.css({left:$(this).position().left+($(ag).width()/2)-60,top:$(this).position().top+$(this.height)}).show();$(document).bind("click.color-picker",function(){ai.hide();$(document).unbind("click.color-picker")});al.stopPropagation()})}else{ak.append($("<input />").attr("id",ac).attr("name",ac).val(ab.values[ad.key]))}}}});return aa},update_from_form:function(aa){var ac=this;var ab=false;$.each(this.params,function(ad,af){if(!af.hidden){var ag="param_"+ad;var ae=aa.find("#"+ag).val();if(af.type==="float"){ae=parseFloat(ae)}else{if(af.type==="int"){ae=parseInt(ae)}else{if(af.type==="bool"){ae=aa.find("#"+ag).is(":checked")}}}if(ae!==ac.values[af.key]){ac.values[af.key]=ae;ab=true}}});if(ab){this.onchange()}}});var b=function(ac,ab,aa){this.index=ac;this.resolution=ab;this.canvas=$("<div class='track-tile'/>").append(aa);this.stale=false};var l=function(ac,ab,aa,ad){b.call(this,ac,ab,aa);this.max_val=ad};var L=function(ac,ab,aa,ad){b.call(this,ac,ab,aa);this.message=ad};var j=function(ab,aa,ae,ac,ad){this.name=ab;this.view=aa;this.parent_element=ae;this.data_url=(ac?ac:default_data_url);this.data_url_extra_params={};this.data_query_wait=(ad?ad:H);this.dataset_check_url=converted_datasets_state_url;this.container_div=$("<div />").addClass("track").css("position","relative");if(!this.hidden){this.header_div=$("<div class='track-header' />").appendTo(this.container_div);if(this.view.editor){this.drag_div=$("<div class='draghandle' />").appendTo(this.header_div)}this.name_div=$("<div class='menubutton popup' />").appendTo(this.header_div);this.name_div.text(this.name);this.name_div.attr("id",this.name.replace(/\s+/g,"-").replace(/[^a-zA-Z0-9\-]/g,"").toLowerCase())}this.content_div=$("<div class='track-content'>").appendTo(this.container_div);this.parent_element.append(this.container_div)};n(j.prototype,{get_type:function(){if(this instanceof X){return"LabelTrack"}else{if(this instanceof x){return"ReferenceTrack"}else{if(this instanceof k){return"LineTrack"}else{if(this instanceof S){return"ReadTrack"}else{if(this instanceof Q){return"ToolDataFeatureTrack"}else{if(this instanceof P){return"VcfTrack"}else{if(this instanceof e){return"FeatureTrack"}}}}}}}return""},init:function(){var aa=this;aa.enabled=false;aa.tile_cache.clear();aa.data_cache.clear();aa.initial_canvas=undefined;aa.content_div.css("height","auto");aa.container_div.removeClass("nodata error pending");if(!aa.dataset_id){return}$.getJSON(converted_datasets_state_url,{hda_ldda:aa.hda_ldda,dataset_id:aa.dataset_id,chrom:aa.view.chrom},function(ab){if(!ab||ab==="error"||ab.kind==="error"){aa.container_div.addClass("error");aa.content_div.text(m);if(ab.message){var ad=aa.view.tracks.indexOf(aa);var ac=$(" <a href='javascript:void(0);'></a>").text("View error").bind("click",function(){show_modal("Trackster Error","<pre>"+ab.message+"</pre>",{Close:hide_modal})});aa.content_div.append(ac)}}else{if(ab==="no converter"){aa.container_div.addClass("error");aa.content_div.text(G)}else{if(ab==="no data"||(ab.data!==undefined&&(ab.data===null||ab.data.length===0))){aa.container_div.addClass("nodata");aa.content_div.text(B)}else{if(ab==="pending"){aa.container_div.addClass("pending");aa.content_div.text(q);setTimeout(function(){aa.init()},aa.data_query_wait)}else{if(ab.status==="data"){if(ab.valid_chroms){aa.valid_chroms=ab.valid_chroms;aa.make_name_popup_menu()}aa.content_div.text(U);if(aa.view.chrom){aa.content_div.text("");aa.content_div.css("height",aa.height_px+"px");aa.enabled=true;$.when(aa.predraw_init()).done(function(){aa.container_div.removeClass("nodata error pending");aa.draw()})}}}}}}})},predraw_init:function(){},update_name:function(aa){this.old_name=this.name;this.name=aa;this.name_div.text(this.name)},revert_name:function(){this.name=this.old_name;this.name_div.text(this.name)}});var J=function(ah,af,ai){var ab=this,aj=ab.view;this.filters_manager=(ah!==undefined?new W(this,ah):undefined);this.filters_available=false;this.filters_visible=false;this.tool=(af!==undefined&&obj_length(af)>0?new o(this,af):undefined);this.parent_track=ai;this.child_tracks=[];if(ab.hidden){return}if(this.parent_track){this.header_div.find(".draghandle").removeClass("draghandle").addClass("child-track-icon").addClass("icon-button");this.parent_element.addClass("child-track");this.tool=undefined}ab.child_tracks_container=$("<div/>").addClass("child-tracks-container").hide();ab.container_div.append(ab.child_tracks_container);if(this.filters_manager){this.filters_div=this.filters_manager.parent_div;this.header_div.after(this.filters_div)}if(this.tool){this.dynamic_tool_div=this.tool.parent_div;this.header_div.after(this.dynamic_tool_div)}if(ab.display_modes!==undefined){if(ab.mode_div===undefined){ab.mode_div=$("<div class='right-float menubutton popup' />").appendTo(ab.header_div);var ac=(ab.track_config&&ab.track_config.values.mode?ab.track_config.values.mode:ab.display_modes[0]);ab.mode=ac;ab.mode_div.text(ac);var aa={};for(var ad=0,ag=ab.display_modes.length;ad<ag;ad++){var ae=ab.display_modes[ad];aa[ae]=function(ak){return function(){ab.change_mode(ak)}}(ae)}make_popupmenu(ab.mode_div,aa)}else{ab.mode_div.hide()}}this.make_name_popup_menu()};n(J.prototype,j.prototype,{change_mode:function(ab){var aa=this;aa.mode_div.text(ab);aa.mode=ab;aa.track_config.values.mode=ab;aa.tile_cache.clear();aa.draw()},make_name_popup_menu:function(){var ab=this;var aa={};aa["Edit configuration"]=function(){var ah=function(){hide_modal();$(window).unbind("keypress.check_enter_esc")},af=function(){ab.track_config.update_from_form($(".dialog-box"));hide_modal();$(window).unbind("keypress.check_enter_esc")},ag=function(ai){if((ai.keyCode||ai.which)===27){ah()}else{if((ai.keyCode||ai.which)===13){af()}}};$(window).bind("keypress.check_enter_esc",ag);show_modal("Configure Track",ab.track_config.build_form(),{Cancel:ah,OK:af})};if(ab.filters_available>0){var ae=(ab.filters_div.is(":visible")?"Hide filters":"Show filters");aa[ae]=function(){ab.filters_visible=(ab.filters_div.is(":visible"));if(ab.filters_visible){ab.filters_manager.reset_filters()}ab.filters_div.toggle();ab.make_name_popup_menu()}}if(ab.tool){var ae=(ab.dynamic_tool_div.is(":visible")?"Hide tool":"Show tool");aa[ae]=function(){if(!ab.dynamic_tool_div.is(":visible")){ab.update_name(ab.name+ab.tool_region_and_parameters_str())}else{menu_option_text="Show dynamic tool";ab.revert_name()}ab.dynamic_tool_div.toggle();ab.make_name_popup_menu()}}if(ab.valid_chroms){aa["List chrom/contigs with data"]=function(){show_modal("Chrom/contigs with data","<p>"+ab.valid_chroms.join("<br/>")+"</p>",{Close:function(){hide_modal()}})}}var ac=view;var ad=function(){$("#no-tracks").show()};if(this.parent_track){ac=this.parent_track;ad=function(){}}aa.Remove=function(){ac.remove_track(ab);if(ac.num_tracks===0){ad()}};make_popupmenu(ab.name_div,aa)},draw:function(aa,ac){if(!this.dataset_id){return}var au=this.view.low,ag=this.view.high,ai=ag-au,ak=this.view.container.width(),ae=ak/ai,al=this.view.resolution,ad=$("<div style='position: relative;'></div>"),am=function(aw,ax,av){return aw+"_"+ax+"_"+av};if(!ac){this.content_div.children().remove()}this.content_div.append(ad);this.max_height=0;var ao=Math.floor(au/al/M);var af=[];var ap=0;while((ao*M*al)<ag){var at=am(ak,ae,ao);var ah=this.tile_cache.get(at);var aq=ao*M*this.view.resolution;var ab=aq+M*this.view.resolution;if(!aa&&ah){af[af.length]=ah;this.show_tile(ah,ad,aq,ab,ae)}else{this.delayed_draw(aa,at,ao,al,ad,ae,af)}ao+=1;ap++}var aj=this;var ar=setInterval(function(){if(af.length===ap){clearInterval(ar);if(ac){var aA=aj.content_div.children();var aB=false;for(var az=aA.length-1,aF=0;az>=aF;az--){var ay=$(aA[az]);if(aB){ay.remove()}else{if(ay.children().length!==0){aB=true}}}}if(aj instanceof e&&aj.mode=="Histogram"){var aE=-1;for(var az=0;az<af.length;az++){var aH=af[az].max_val;if(aH>aE){aE=aH}}for(var az=0;az<af.length;az++){if(af[az].max_val!==aE){var aG=af[az];aG.canvas.remove();aj.delayed_draw(true,am(ak,ae,aG.index),aG.index,aG.resolution,ad,ae,[],{max:aE})}}}if(aj.filters_manager){var ax=aj.filters_manager.filters;for(var aD=0;aD<ax.length;aD++){ax[aD].update_ui_elt()}var aC=false;if(aj.example_feature){for(var aD=0;aD<ax.length;aD++){if(ax[aD].applies_to(aj.example_feature)){aC=true;break}}}if(aj.filters_available!==aC){aj.filters_available=aC;if(!aj.filters_available){aj.filters_div.hide()}aj.make_name_popup_menu()}}var av=false;for(var aw=0;aw<af.length;aw++){if(af[aw].message){av=true;break}}if(av){for(var aw=0;aw<af.length;aw++){aG=af[aw];if(!aG.message){aG.canvas.css("padding-top",A)}}}}},50);for(var an=0;an<this.child_tracks.length;an++){this.child_tracks[an].draw(aa,ac)}},delayed_draw:function(ab,ai,ac,ae,aj,am,ak,af){var ad=this,ag=ac*M*ae,al=ag+M*ae;var ah=function(av,an,ap,ao,at,au,aq){var ar=ad.draw_tile(an,ap,ao,au,aq);ad.tile_cache.set(ai,ar);if(ar===undefined){return}ad.show_tile(ar,at,ag,al,au);ak[ak.length]=ar};var aa=setTimeout(function(){if(ag<=ad.view.high&&al>=ad.view.low){var an=(ab?undefined:ad.tile_cache.get(ai));if(an){ad.show_tile(an,aj,ag,al,am);ak[ak.length]=an}else{$.when(ad.data_cache.get_data(view.chrom,ag,al,ad.mode,ae,ad.data_url_extra_params)).then(function(ao){n(ao,af);if(view.reference_track&&am>view.canvas_manager.char_width_px){$.when(view.reference_track.data_cache.get_data(view.chrom,ag,al,ad.mode,ae,view.reference_track.data_url_extra_params)).then(function(ap){ah(aa,ao,ae,ac,aj,am,ap)})}else{ah(aa,ao,ae,ac,aj,am)}})}}},50)},show_tile:function(ah,aj,ae,ak,am){var ab=this,aa=ah.canvas,af=aa;if(ah.message){var al=$("<div/>"),ai=$("<div/>").addClass("tile-message").text(ah.message).css({height:A-1,width:ah.canvas.width}).appendTo(al),ag=$("<button/>").text("Show more").css("margin-left","0.5em").appendTo(ai);al.append(aa);af=al;ag.click(function(){var an=ab.data_cache.get_data(ab.view.chrom,ae,ak,ab.mode,ah.resolution);an.stale=true;ah.stale=true;ab.data_cache.get_data(ab.view.chrom,ae,ak,ab.mode,ah.resolution,{max_vals:an.data.length*2});ab.draw()}).dblclick(function(an){an.stopPropagation()})}var ad=this.view.high-this.view.low,ac=(ae-this.view.low)*am;if(this.left_offset){ac-=this.left_offset}af.css({position:"absolute",top:0,left:ac,height:""});aj.append(af);ab.max_height=Math.max(ab.max_height,af.height());ab.content_div.css("height",ab.max_height+"px");aj.children().css("height",ab.max_height+"px")},set_overview:function(){var aa=this.view;if(this.initial_canvas&&this.is_overview){aa.overview_close.show();aa.overview_viewport.append(this.initial_canvas);aa.overview_highlight.show().height(this.initial_canvas.height());aa.overview_viewport.height(this.initial_canvas.height()+aa.overview_box.height())}$(window).trigger("resize")},tool_region_and_parameters_str:function(ac,aa,ad){var ab=this,ae=(ac!==undefined&&aa!==undefined&&ad!==undefined?ac+":"+aa+"-"+ad:"all");return" - region=["+ae+"], parameters=["+ab.tool.get_param_values().join(", ")+"]"},add_track:function(aa){aa.track_id=this.track_id+"_"+this.child_tracks.length;aa.container_div.attr("id","track_"+aa.track_id);this.child_tracks_container.append(aa.container_div);C(aa.container_div,".child-track-icon");if(!$(this.child_tracks_container).is(":visible")){this.child_tracks_container.show()}this.child_tracks.push(aa);this.view.has_changes=true},remove_track:function(aa){aa.container_div.fadeOut("slow",function(){$(this).remove()})}});var X=function(aa,ab){this.hidden=true;j.call(this,null,aa,ab);this.container_div.addClass("label-track")};n(X.prototype,j.prototype,{draw:function(){var ac=this.view,ad=ac.high-ac.low,ag=Math.floor(Math.pow(10,Math.floor(Math.log(ad)/Math.log(10)))),aa=Math.floor(ac.low/ag)*ag,ae=this.view.container.width(),ab=$("<div style='position: relative; height: 1.3em;'></div>");while(aa<ac.high){var af=(aa-ac.low)/ad*ae;ab.append($("<div class='label'>"+commatize(aa)+"</div>").css({position:"absolute",left:af-1}));aa+=ag}this.content_div.children(":first").remove();this.content_div.append(ab)}});var x=function(aa){this.hidden=true;j.call(this,null,aa,aa.top_labeltrack);J.call(this);aa.reference_track=this;this.left_offset=200;this.height_px=12;this.container_div.addClass("reference-track");this.content_div.css("background","none");this.content_div.css("min-height","0px");this.content_div.css("border","none");this.data_url=reference_url;this.data_url_extra_params={dbkey:aa.dbkey};this.data_cache=new E(y,this,false);this.tile_cache=new c(r)};n(x.prototype,J.prototype,{draw_tile:function(ai,af,ab,ak){var ae=this,ac=M*af;if(ak>this.view.canvas_manager.char_width_px){if(ai===null){ae.content_div.css("height","0px");return}var ad=this.view.canvas_manager.new_canvas();var aj=ad.getContext("2d");ad.width=Math.ceil(ac*ak+ae.left_offset);ad.height=ae.height_px;aj.font=aj.canvas.manager.default_font;aj.textAlign="center";for(var ag=0,ah=ai.length;ag<ah;ag++){var aa=Math.round(ag*ak);aj.fillText(ai[ag],aa+ae.left_offset,10)}return new b(ab,af,ad)}this.content_div.css("height","0px")}});var k=function(ae,ac,af,aa,ad){var ab=this;this.display_modes=["Histogram","Line","Filled","Intensity"];this.mode="Histogram";j.call(this,ae,ac,ac.viewport_container);J.call(this);this.min_height_px=16;this.max_height_px=400;this.height_px=80;this.hda_ldda=af;this.dataset_id=aa;this.original_dataset_id=aa;this.data_cache=new N(y,this);this.tile_cache=new c(r);this.track_config=new V({track:this,params:[{key:"color",label:"Color",type:"color",default_value:"black"},{key:"min_value",label:"Min Value",type:"float",default_value:undefined},{key:"max_value",label:"Max Value",type:"float",default_value:undefined},{key:"mode",type:"string",default_value:this.mode,hidden:true},{key:"height",type:"int",default_value:this.height_px,hidden:true}],saved_values:ad,onchange:function(){ab.vertical_range=ab.prefs.max_value-ab.prefs.min_value;$("#linetrack_"+ab.track_id+"_minval").text(ab.prefs.min_value);$("#linetrack_"+ab.track_id+"_maxval").text(ab.prefs.max_value);ab.tile_cache.clear();ab.draw()}});this.prefs=this.track_config.values;this.height_px=this.track_config.values.height;this.vertical_range=this.track_config.values.max_value-this.track_config.values.min_value;this.add_resize_handle()};n(k.prototype,J.prototype,{add_resize_handle:function(){var aa=this;var ad=false;var ac=false;var ab=$("<div class='track-resize'>");$(aa.container_div).hover(function(){ad=true;ab.show()},function(){ad=false;if(!ac){ab.hide()}});ab.hide().bind("dragstart",function(ae,af){ac=true;af.original_height=$(aa.content_div).height()}).bind("drag",function(af,ag){var ae=Math.min(Math.max(ag.original_height+ag.deltaY,aa.min_height_px),aa.max_height_px);$(aa.content_div).css("height",ae);aa.height_px=ae;aa.draw(true)}).bind("dragend",function(ae,af){aa.tile_cache.clear();ac=false;if(!ad){ab.hide()}aa.track_config.values.height=aa.height_px}).appendTo(aa.container_div)},predraw_init:function(){var aa=this,ab=aa.view.tracks.indexOf(aa);aa.vertical_range=undefined;return $.getJSON(aa.data_url,{stats:true,chrom:aa.view.chrom,low:null,high:null,hda_ldda:aa.hda_ldda,dataset_id:aa.dataset_id},function(ac){aa.container_div.addClass("line-track");var ae=ac.data;if(isNaN(parseFloat(aa.prefs.min_value))||isNaN(parseFloat(aa.prefs.max_value))){aa.prefs.min_value=ae.min;aa.prefs.max_value=ae.max;$("#track_"+ab+"_minval").val(aa.prefs.min_value);$("#track_"+ab+"_maxval").val(aa.prefs.max_value)}aa.vertical_range=aa.prefs.max_value-aa.prefs.min_value;aa.total_frequency=ae.total_frequency;aa.container_div.find(".yaxislabel").remove();var af=$("<div />").addClass("yaxislabel").attr("id","linetrack_"+ab+"_minval").text(u(aa.prefs.min_value));var ad=$("<div />").addClass("yaxislabel").attr("id","linetrack_"+ab+"_maxval").text(u(aa.prefs.max_value));ad.css({position:"absolute",top:"24px",left:"10px"});ad.prependTo(aa.container_div);af.css({position:"absolute",bottom:"2px",left:"10px"});af.prependTo(aa.container_div)})},draw_tile:function(ak,ae,ab,aj){if(this.vertical_range===undefined){return}var af=ab*M*ae,ad=M*ae,aa=Math.ceil(ad*aj),ah=this.height_px;var ac=this.view.canvas_manager.new_canvas();ac.width=aa,ac.height=ah;var ai=ac.getContext("2d");var ag=new I.LinePainter(ak.data,af,af+ad,this.prefs,this.mode);ag.draw(ai,aa,ah);return new b(ad,ae,ac)}});var e=function(aa,af,ae,ai,ah,ac,ad,ag){var ab=this;this.display_modes=["Auto","Histogram","Dense","Squish","Pack"];this.track_config=new V({track:this,params:[{key:"block_color",label:"Block color",type:"color",default_value:"#444"},{key:"label_color",label:"Label color",type:"color",default_value:"black"},{key:"show_counts",label:"Show summary counts",type:"bool",default_value:true},{key:"mode",type:"string",default_value:this.mode,hidden:true},],saved_values:ah,onchange:function(){ab.tile_cache.clear();ab.draw()}});this.prefs=this.track_config.values;j.call(this,aa,af,af.viewport_container);J.call(this,ac,ad,ag);this.height_px=0;this.container_div.addClass("feature-track");this.hda_ldda=ae;this.dataset_id=ai;this.original_dataset_id=ai;this.show_labels_scale=0.001;this.showing_details=false;this.summary_draw_height=30;this.inc_slots={};this.start_end_dct={};this.tile_cache=new c(d);this.data_cache=new N(20,this);this.left_offset=200;this.painter=I.LinkedFeaturePainter};n(e.prototype,J.prototype,{update_auto_mode:function(aa){if(this.mode=="Auto"){if(aa=="no_detail"){aa="feature spans"}else{if(aa=="summary_tree"){aa="coverage histogram"}}this.mode_div.text("Auto ("+aa+")")}},incremental_slots:function(ae,ab,ad){var ac=this.view.canvas_manager.dummy_context,aa=this.inc_slots[ae];if(!aa||(aa.mode!==ad)){aa=new (p.FeatureSlotter)(ae,ad==="Pack",w,function(af){return ac.measureText(af)});aa.mode=ad;this.inc_slots[ae]=aa}return aa.slot_features(ab)},get_summary_tree_data:function(ae,ah,ac,ap){if(ap>ac-ah){ap=ac-ah}var al=Math.floor((ac-ah)/ap),ao=[],ad=0;var af=0,ag=0,ak,an=0,ai=[],am,aj;var ab=function(at,ar,au,aq){at[0]=ar+au*aq;at[1]=ar+(au+1)*aq};while(an<ap&&af!==ae.length){var aa=false;for(;an<ap&&!aa;an++){ab(ai,ah,an,al);for(ag=af;ag<ae.length;ag++){ak=ae[ag].slice(1,3);if(is_overlap(ak,ai)){aa=true;break}}if(aa){break}}data_start_index=ag;ao[ao.length]=am=[ai[0],0];for(;ag<ae.length;ag++){ak=ae[ag].slice(1,3);if(is_overlap(ak,ai)){am[1]++}else{break}}if(am[1]>ad){ad=am[1]}an++}return{max:ad,delta:al,data:ao}},draw_tile:function(an,av,az,aj,ad){var ar=this,aB=az*M*av,ab=(az+1)*M*av,ap=ab-aB,at=Math.ceil(ap*aj),aq=this.mode,aF=25,ae=this.left_offset,ao,af;if(aq==="Auto"){if(an.dataset_type==="summary_tree"){aq=an.dataset_type}else{if(an.extra_info==="no_detail"){aq="no_detail"}else{var aE=an.data;if(this.view.high-this.view.low>F){aq="Squish"}else{aq="Pack"}}}this.update_auto_mode(aq)}if(aq==="summary_tree"||aq==="Histogram"){af=this.summary_draw_height;this.container_div.find(".yaxislabel").remove();var aa=$("<div />").addClass("yaxislabel");aa.text(an.max);aa.css({position:"absolute",top:"24px",left:"10px",color:this.prefs.label_color});aa.prependTo(this.container_div);var ac=this.view.canvas_manager.new_canvas();ac.width=at+ae;ac.height=af+O;if(an.dataset_type!="summary_tree"){var ak=this.get_summary_tree_data(an.data,aB,ab,200);if(an.max){ak.max=an.max}an=ak}var aC=new I.SummaryTreePainter(an,aB,ab,this.prefs);var au=ac.getContext("2d");au.translate(ae,O);aC.draw(au,at,af);return new l(az,av,ac,an.max)}var ao,ah=1;if(aq==="no_detail"||aq==="Squish"||aq==="Pack"){ah=this.incremental_slots(aj,an.data,aq);ao=this.inc_slots[aj].slots}var ai=[];if(an.data){var al=this.filters_manager.filters;for(var aw=0,ay=an.data.length;aw<ay;aw++){var ag=an.data[aw];var ax=false;var am;for(var aA=0,aD=al.length;aA<aD;aA++){am=al[aA];am.update_attrs(ag);if(!am.keep(ag)){ax=true;break}}if(!ax){ai.push(ag)}}}var aC=new (this.painter)(ai,aB,ab,this.prefs,aq,ad);var af=aC.get_required_height(ah);var ac=this.view.canvas_manager.new_canvas();ac.width=at+ae;ac.height=af;var au=ac.getContext("2d");au.fillStyle=this.prefs.block_color;au.font=au.canvas.manager.default_font;au.textAlign="right";this.container_div.find(".yaxislabel").remove();if(an.data){this.example_feature=(an.data.length?an.data[0]:undefined);au.translate(ae,0);aC.draw(au,at,af,ao)}return new L(az,av,ac,an.message)}});var P=function(ad,ab,af,aa,ac,ae){e.call(this,ad,ab,af,aa,ac,ae);this.painter=I.VariantPainter};n(P.prototype,J.prototype,e.prototype);var S=function(ad,ab,af,aa,ac,ae){e.call(this,ad,ab,af,aa,ac,ae);this.track_config=new V({track:this,params:[{key:"block_color",label:"Block color",type:"color",default_value:"#444"},{key:"label_color",label:"Label color",type:"color",default_value:"black"},{key:"show_insertions",label:"Show insertions",type:"bool",default_value:false},{key:"show_differences",label:"Show differences only",type:"bool",default_value:true},{key:"show_counts",label:"Show summary counts",type:"bool",default_value:true},{key:"mode",type:"string",default_value:this.mode,hidden:true},],saved_values:ac,onchange:function(){this.track.tile_cache.clear();this.track.draw()}});this.prefs=this.track_config.values;this.painter=I.ReadPainter;this.make_name_popup_menu()};n(S.prototype,J.prototype,e.prototype);var Q=function(ae,ac,ag,aa,ad,af,ab){e.call(this,ae,ac,ag,aa,ad,af,{},ab);this.data_url=raw_data_url;this.data_query_wait=1000;this.dataset_check_url=dataset_state_url};n(Q.prototype,J.prototype,e.prototype,{predraw_init:function(){var ab=this;var aa=function(){if(ab.data_cache.size()===0){setTimeout(aa,300)}else{ab.data_url=default_data_url;ab.data_query_wait=H;ab.dataset_state_url=converted_datasets_state_url;$.getJSON(ab.dataset_state_url,{dataset_id:ab.dataset_id,hda_ldda:ab.hda_ldda},function(ac){})}};aa()}});T.View=Y;T.LineTrack=k;T.FeatureTrack=e;T.ReadTrack=S};var slotting_module=function(c,b){var e=c("class").extend;var d=2,a=5;b.FeatureSlotter=function(j,h,f,g){this.slots={};this.start_end_dct={};this.w_scale=j;this.include_label=h;this.max_rows=f;this.measureText=g};e(b.FeatureSlotter.prototype,{slot_features:function(m){var p=this.w_scale,s=this.slots,h=this.start_end_dct,y=[],A=[],n=0,z=this.max_rows;for(var w=0,x=m.length;w<x;w++){var l=m[w],o=l[0];if(s[o]!==undefined){n=Math.max(n,s[o]);A.push(s[o])}else{y.push(w)}}var q=function(G,H){for(var F=0;F<=z;F++){var D=false,I=h[F];if(I!==undefined){for(var C=0,E=I.length;C<E;C++){var B=I[C];if(H>B[0]&&G<B[1]){D=true;break}}}if(!D){return F}}return -1};for(var w=0,x=y.length;w<x;w++){var l=m[y[w]],o=l[0],u=l[1],f=l[2],r=l[3],g=Math.floor(u*p),k=Math.ceil(f*p),v=this.measureText(r).width,j;if(r!==undefined&&this.include_label){v+=(d+a);if(g-v>=0){g-=v;j="left"}else{k+=v;j="right"}}var t=q(g,k);if(t>=0){if(h[t]===undefined){h[t]=[]}h[t].push([g,k]);s[o]=t;n=Math.max(n,t)}else{}}return n+1}})};var painters_module=function(j,w){var t=j("class").extend;var o=function(H,z,F,y,E,C){if(C===undefined){C=4}var B=y-z;var A=E-F;var D=Math.floor(Math.sqrt(B*B+A*A)/C);var I=B/D;var G=A/D;var x;for(x=0;x<D;x++,z+=I,F+=G){if(x%2!==0){continue}H.fillRect(z,F,C,1)}};var p=function(A,z,x,D){var C=z-D/2,B=z+D/2,E=x-Math.sqrt(D*3/2);A.beginPath();A.moveTo(C,E);A.lineTo(B,E);A.lineTo(z,x);A.lineTo(C,E);A.strokeStyle=this.fillStyle;A.fill();A.stroke();A.closePath()};var m=function(z,B,x,y,A){this.data=z;this.view_start=B;this.view_end=x;this.prefs=t({},this.default_prefs,y);this.mode=A};m.prototype.default_prefs={};var u=function(z,B,x,y,A){m.call(this,z,B,x,y,A)};u.prototype.default_prefs={show_counts:false};u.prototype.draw=function(M,z,L){var E=this.view_start,O=this.view_end-this.view_start,N=z/O;var J=this.data.data,I=this.data.delta,G=this.data.max,B=L;delta_x_px=Math.ceil(I*N);M.save();for(var C=0,D=J.length;C<D;C++){var H=Math.floor((J[C][0]-E)*N);var F=J[C][1];if(!F){continue}var K=F/G*L;if(F!==0&&K<1){K=1}M.fillStyle=this.prefs.block_color;M.fillRect(H,B-K,delta_x_px,K);var A=4;if(this.prefs.show_counts&&(M.measureText(F).width+A)<delta_x_px){M.fillStyle=this.prefs.label_color;M.textAlign="center";M.fillText(F,H+(delta_x_px/2),10)}}M.restore()};var c=function(x,B,D,E,z){m.call(this,x,B,D,E,z);if(this.prefs.min_value===undefined){var F=Infinity;for(var y=0,A=this.data.length;y<A;y++){F=Math.min(F,this.data[y][1])}this.prefs.min_value=F}if(this.prefs.max_value===undefined){var C=-Infinity;for(var y=0,A=this.data.length;y<A;y++){C=Math.max(C,this.data[y][1])}this.prefs.max_value=C}};c.prototype.default_prefs={min_value:undefined,max_value:undefined,mode:"Histogram",color:"#000",overflow_color:"#F66"};c.prototype.draw=function(M,L,J){var E=false,G=this.prefs.min_value,C=this.prefs.max_value,I=C-G,x=J,z=this.view_start,K=this.view_end-this.view_start,A=L/K,H=this.mode,S=this.data;M.save();var T=Math.round(J+G/I*J);if(H!=="Intensity"){M.fillStyle="#aaa";M.fillRect(0,T,L,1)}M.beginPath();var Q,D,B;if(S.length>1){B=Math.ceil((S[1][0]-S[0][0])*A)}else{B=10}for(var N=0,O=S.length;N<O;N++){M.fillStyle=this.prefs.color;Q=Math.round((S[N][0]-z)*A);D=S[N][1];var P=false,F=false;if(D===null){if(E&&H==="Filled"){M.lineTo(Q,x)}E=false;continue}if(D<G){F=true;D=G}else{if(D>C){P=true;D=C}}if(H==="Histogram"){D=Math.round(D/I*x);M.fillRect(Q,T,B,-D)}else{if(H==="Intensity"){D=255-Math.floor((D-G)/I*255);M.fillStyle="rgb("+D+","+D+","+D+")";M.fillRect(Q,0,B,x)}else{D=Math.round(x-(D-G)/I*x);if(E){M.lineTo(Q,D)}else{E=true;if(H==="Filled"){M.moveTo(Q,x);M.lineTo(Q,D)}else{M.moveTo(Q,D)}}}}M.fillStyle=this.prefs.overflow_color;if(P||F){var R;if(H==="Histogram"||H==="Intensity"){R=B}else{Q-=2;R=4}if(P){M.fillRect(Q,0,R,3)}if(F){M.fillRect(Q,x-3,R,3)}}M.fillStyle=this.prefs.color}if(H==="Filled"){if(E){M.lineTo(Q,T);M.lineTo(0,T)}M.fill()}else{M.stroke()}M.restore()};var n=function(z,B,x,y,A){m.call(this,z,B,x,y,A)};n.prototype.default_prefs={block_color:"#FFF",connector_color:"#FFF"};t(n.prototype,{get_required_height:function(y){var x=y_scale=this.get_row_height(),z=this.mode;if(z==="no_detail"||z==="Squish"||z==="Pack"){x=y*y_scale}return x+Math.max(Math.round(y_scale/2),5)},draw:function(J,A,I,F){var D=this.data,G=this.view_start,K=this.view_end;J.save();J.fillStyle=this.prefs.block_color;J.textAlign="right";var N=this.view_end-this.view_start,M=A/N,z=this.get_row_height();for(var C=0,E=D.length;C<E;C++){var L=D[C],B=L[0],x=L[1],y=L[2],H=(F&&F[B]!==undefined?F[B]:null);if((x<K&&y>G)&&(this.mode=="Dense"||H!==null)){this.draw_element(J,this.mode,L,H,G,K,M,z,A)}}J.restore()}});var d=10,h=3,l=5,v=10,f=1,r=3,e=3,a=9,k=2,g="#ccc";var q=function(z,B,x,y,A){n.call(this,z,B,x,y,A)};t(q.prototype,n.prototype,{get_row_height:function(){var y=this.mode,x;if(y==="Dense"){x=d}else{if(y==="no_detail"){x=h}else{if(y==="Squish"){x=l}else{x=v}}}return x},draw_element:function(J,C,R,E,L,ab,af,ag,x){var O=R[0],ad=R[1],V=R[2],M=R[3],W=Math.floor(Math.max(0,(ad-L)*af)),K=Math.ceil(Math.min(x,Math.max(0,(V-L)*af))),U=(C==="Dense"?0:(0+E))*ag,I,Z,N=null,ah=null,A=this.prefs.block_color,Y=this.prefs.label_color;if(C=="Dense"){E=1}if(C==="no_detail"){J.fillStyle=A;J.fillRect(W,U+5,K-W,f)}else{var H=R[4],T=R[5],X=R[6],B=R[7];if(T&&X){N=Math.floor(Math.max(0,(T-L)*af));ah=Math.ceil(Math.min(x,Math.max(0,(X-L)*af)))}var ae,P;if(C==="Squish"||C==="Dense"){ae=1;P=e}else{ae=5;P=a}if(!B){if(R.strand){if(R.strand==="+"){J.fillStyle=J.canvas.manager.get_pattern("right_strand_inv")}else{if(R.strand==="-"){J.fillStyle=J.canvas.manager.get_pattern("left_strand_inv")}}}else{J.fillStyle=A}J.fillRect(W,U,K-W,P)}else{var G,Q;if(C==="Squish"||C==="Dense"){J.fillStyle=g;G=U+Math.floor(e/2)+1;Q=1}else{if(H){var G=U;var Q=P;if(H==="+"){J.fillStyle=J.canvas.manager.get_pattern("right_strand")}else{if(H==="-"){J.fillStyle=J.canvas.manager.get_pattern("left_strand")}}}else{J.fillStyle=g;G+=(e/2)+1;Q=1}}J.fillRect(W,G,K-W,Q);for(var ac=0,z=B.length;ac<z;ac++){var D=B[ac],y=Math.floor(Math.max(0,(D[0]-L)*af)),S=Math.ceil(Math.min(x,Math.max((D[1]-L)*af)));if(y>S){continue}J.fillStyle=A;J.fillRect(y,U+(P-ae)/2+1,S-y,ae);if(N!==undefined&&X>T&&!(y>ah||S<N)){var aa=Math.max(y,N),F=Math.min(S,ah);J.fillRect(aa,U+1,F-aa,P);if(B.length==1&&C=="Pack"){if(H==="+"){J.fillStyle=J.canvas.manager.get_pattern("right_strand_inv")}else{if(H==="-"){J.fillStyle=J.canvas.manager.get_pattern("left_strand_inv")}}if(aa+14<F){aa+=2;F-=2}J.fillRect(aa,U+1,F-aa,P)}}}}if(C==="Pack"&&ad>L){J.fillStyle=Y;if(L===0&&W-J.measureText(M).width<0){J.textAlign="left";J.fillText(M,K+k,U+8)}else{J.textAlign="right";J.fillText(M,W-k,U+8)}J.fillStyle=A}}}});var b=function(z,B,x,y,A){n.call(this,z,B,x,y,A)};t(b.prototype,n.prototype,{draw_element:function(Q,L,F,B,T,z,I,R,O){var F=data[i],H=F[0],P=F[1],A=F[2],K=F[3],D=Math.floor(Math.max(0,(P-T)*I)),G=Math.ceil(Math.min(O,Math.max(0,(A-T)*I))),C=(L==="Dense"?0:(0+B))*R,x,U,y=null,J=null;if(no_label){Q.fillStyle=block_color;Q.fillRect(D+left_offset,C+5,G-D,1)}else{var S=F[4],N=F[5],E=F[6];x=9;U=1;Q.fillRect(D+left_offset,C,G-D,x);if(L!=="Dense"&&K!==undefined&&P>T){Q.fillStyle=label_color;if(T===0&&D-Q.measureText(K).width<0){Q.textAlign="left";Q.fillText(K,G+2+left_offset,C+8)}else{Q.textAlign="right";Q.fillText(K,D-2+left_offset,C+8)}Q.fillStyle=block_color}var M=S+" / "+N;if(P>T&&Q.measureText(M).width<(G-D)){Q.fillStyle="white";Q.textAlign="center";Q.fillText(M,left_offset+D+(G-D)/2,C+8);Q.fillStyle=block_color}}}});var s=function(A,C,x,z,B,y){n.call(this,A,C,x,z,B);this.ref_seq=y};s.prototype.default_prefs=t({},n.prototype.default_prefs,{show_insertions:false});t(s.prototype,n.prototype,{get_row_height:function(){var x,y=this.mode;if(y==="Dense"){x=d}else{if(y==="Squish"){x=l}else{x=v;if(this.prefs.show_insertions){x*=2}}}return x},draw_read:function(T,O,K,Y,z,S,H,E,D){T.textAlign="center";var R=this,y=[Y,z],N=0,U=0,Q=0;ref_seq=this.ref_seq,char_width_px=T.canvas.manager.char_width_px;var ad=[];if((O==="Pack"||this.mode==="Auto")&&E!==undefined&&K>char_width_px){Q=Math.round(K/2)}if(!H){H=[[0,E.length]]}for(var L=0,W=H.length;L<W;L++){var I=H[L],A="MIDNSHP=X"[I[0]],M=I[1];if(A==="H"||A==="S"){N-=M}var F=S+N,ac=Math.floor(Math.max(0,(F-Y)*K)),G=Math.floor(Math.max(0,(F+M-Y)*K));if(ac===G){G+=1}switch(A){case"H":break;case"S":case"M":case"=":if(is_overlap([F,F+M],y)){var P=E.slice(U,U+M);if(Q>0){T.fillStyle=this.prefs.block_color;T.fillRect(ac-Q,D+1,G-ac,9);T.fillStyle=g;for(var aa=0,x=P.length;aa<x;aa++){if(this.prefs.show_differences&&ref_seq){var J=ref_seq[F-Y+aa];if(!J||J.toLowerCase()===P[aa].toLowerCase()){continue}}if(F+aa>=Y&&F+aa<=z){var ab=Math.floor(Math.max(0,(F+aa-Y)*K));T.fillText(P[aa],ab,D+9)}}}else{T.fillStyle=this.prefs.block_color;T.fillRect(ac,D+4,G-ac,e)}}U+=M;N+=M;break;case"N":T.fillStyle=g;T.fillRect(ac-Q,D+5,G-ac,1);N+=M;break;case"D":T.fillStyle="red";T.fillRect(ac-Q,D+4,G-ac,3);N+=M;break;case"P":break;case"I":var X=ac-Q;if(is_overlap([F,F+M],y)){var P=E.slice(U,U+M);if(this.prefs.show_insertions){var C=ac-(G-ac)/2;if((O==="Pack"||this.mode==="Auto")&&E!==undefined&&K>char_width_px){T.fillStyle="yellow";T.fillRect(C-Q,D-9,G-ac,9);ad[ad.length]={type:"triangle",data:[X,D+4,5]};T.fillStyle=g;switch(seq_tile_overlap){case (OVERLAP_START):P=P.slice(Y-F);break;case (OVERLAP_END):P=P.slice(0,F-z);break;case (CONTAINED_BY):break;case (CONTAINS):P=P.slice(Y-F,F-z);break}for(var aa=0,x=P.length;aa<x;aa++){var ab=Math.floor(Math.max(0,(F+aa-Y)*K));T.fillText(P[aa],ab-(G-ac)/2,D)}}else{T.fillStyle="yellow";T.fillRect(C,D+(this.mode!=="Dense"?2:5),G-ac,(O!=="Dense"?e:r))}}else{if((O==="Pack"||this.mode==="Auto")&&E!==undefined&&K>char_width_px){ad[ad.length]={type:"text",data:[P.length,X,D+9]}}else{}}}U+=M;break;case"X":U+=M;break}}T.fillStyle="yellow";var Z,B,ae;for(var V=0;V<ad.length;V++){Z=ad[V];B=Z.type;ae=Z.data;if(B==="text"){T.save();T.font="bold "+T.font;T.fillText(ae[0],ae[1],ae[2]);T.restore()}else{if(B=="triangle"){p(T,ae[0],ae[1],ae[2])}}}},draw_element:function(Q,L,D,A,T,y,H,R,O){var G=D[0],P=D[1],z=D[2],I=D[3],C=Math.floor(Math.max(0,(P-T)*H)),E=Math.ceil(Math.min(O,Math.max(0,(z-T)*H))),B=(L==="Dense"?0:(0+A))*R,U=this.prefs.block_color,F=this.prefs.label_color,N=0;if((L==="Pack"||this.mode==="Auto")&&H>Q.canvas.manager.char_width_px){var N=Math.round(H/2)}Q.fillStyle=U;if(D[5] instanceof Array){var M=Math.floor(Math.max(0,(D[4][0]-T)*H)),K=Math.ceil(Math.min(O,Math.max(0,(D[4][1]-T)*H))),J=Math.floor(Math.max(0,(D[5][0]-T)*H)),x=Math.ceil(Math.min(O,Math.max(0,(D[5][1]-T)*H)));if(D[4][1]>=T&&D[4][0]<=y&&D[4][2]){this.draw_read(Q,L,H,T,y,D[4][0],D[4][2],D[4][3],B)}if(D[5][1]>=T&&D[5][0]<=y&&D[5][2]){this.draw_read(Q,L,H,T,y,D[5][0],D[5][2],D[5][3],B)}if(J>K){Q.fillStyle=g;o(Q,K-N,B+5,J-N,B+5)}}else{Q.fillStyle=U;this.draw_read(Q,L,H,T,y,P,D[4],D[5],B)}if(L==="Pack"&&P>T){Q.fillStyle=this.prefs.label_color;var S=1;if(S===0&&C-Q.measureText(I).width<0){Q.textAlign="left";Q.fillText(I,E+k-N,B+8)}else{Q.textAlign="right";Q.fillText(I,C-k-N,B+8)}Q.fillStyle=U}}});w.SummaryTreePainter=u;w.LinePainter=c;w.LinkedFeaturePainter=q;w.ReadPainter=s;w.VariantPainter=b};(function(d){var c={};var b=function(e){return c[e]};var a=function(f,g){var e={};g(b,e);c[f]=e};a("class",class_module);a("slotting",slotting_module);a("painters",painters_module);a("trackster",trackster_module);for(key in c.trackster){d[key]=c.trackster[key]}})(window);
\ No newline at end of file
+var class_module=function(b,a){var c=function(){var f=arguments[0];for(var e=1;e<arguments.length;e++){var d=arguments[e];for(key in d){f[key]=d[key]}}return f};a.extend=c};var BEFORE=1001,CONTAINS=1002,OVERLAP_START=1003,OVERLAP_END=1004,CONTAINED_BY=1005,AFTER=1006;var compute_overlap=function(e,b){var g=e[0],f=e[1],d=b[0],c=b[1],a;if(g<d){if(f<d){a=BEFORE}else{if(f<=c){a=OVERLAP_START}else{a=CONTAINS}}}else{if(g>c){a=AFTER}else{if(f<=c){a=CONTAINED_BY}else{a=OVERLAP_END}}}return a};var is_overlap=function(c,b){var a=compute_overlap(c,b);return(a!==BEFORE&&a!==AFTER)};var trackster_module=function(f,T){var n=f("class").extend,p=f("slotting"),I=f("painters");var Z=function(aa,ab){this.document=aa;this.default_font=ab!==undefined?ab:"9px Monaco, Lucida Console, monospace";this.dummy_canvas=this.new_canvas();this.dummy_context=this.dummy_canvas.getContext("2d");this.dummy_context.font=this.default_font;this.char_width_px=this.dummy_context.measureText("A").width;this.patterns={};this.load_pattern("right_strand","/visualization/strand_right.png");this.load_pattern("left_strand","/visualization/strand_left.png");this.load_pattern("right_strand_inv","/visualization/strand_right_inv.png");this.load_pattern("left_strand_inv","/visualization/strand_left_inv.png")};n(Z.prototype,{load_pattern:function(aa,ae){var ab=this.patterns,ac=this.dummy_context,ad=new Image();ad.src=image_path+ae;ad.onload=function(){ab[aa]=ac.createPattern(ad,"repeat")}},get_pattern:function(aa){return this.patterns[aa]},new_canvas:function(){var aa=this.document.createElement("canvas");if(window.G_vmlCanvasManager){G_vmlCanvasManager.initElement(aa)}aa.manager=this;return aa}});var C=function(aa,ab){aa.bind("drag",{handle:ab,relative:true},function(af,ag){var ae=$(this).parent();var ad=ae.children();var ac;for(ac=0;ac<ad.length;ac++){if(ag.offsetY<$(ad.get(ac)).position().top){break}}if(ac===ad.length){if(this!==ad.get(ac-1)){ae.append(this)}}else{if(this!==ad.get(ac)){$(this).insertBefore(ad.get(ac))}}}).bind("dragstart",function(){$(this).css({"border-top":"1px solid blue","border-bottom":"1px solid blue"})}).bind("dragend",function(){$(this).css("border","0px")})};T.sortable=C;var D=9,A=20,O=D+2,w=100,F=12000,M=200,z=5,s=10,H=5000,t=100,m="There was an error in indexing this dataset. ",G="A converter for this dataset is not installed. Please check your datatypes_conf.xml file.",B="No data for this chrom/contig.",q="Currently indexing... please wait",v="Tool cannot be rerun: ",a="Loading data...",U="Ready for display",d=10,r=5,y=5;function u(aa){return Math.round(aa*1000)/1000}var c=function(aa){this.num_elements=aa;this.clear()};n(c.prototype,{get:function(ab){var aa=this.key_ary.indexOf(ab);if(aa!==-1){if(this.obj_cache[ab].stale){this.key_ary.splice(aa,1);delete this.obj_cache[ab]}else{this.move_key_to_end(ab,aa)}}return this.obj_cache[ab]},set:function(ab,ac){if(!this.obj_cache[ab]){if(this.key_ary.length>=this.num_elements){var aa=this.key_ary.shift();delete this.obj_cache[aa]}this.key_ary.push(ab)}this.obj_cache[ab]=ac;return ac},move_key_to_end:function(ab,aa){this.key_ary.splice(aa,1);this.key_ary.push(ab)},clear:function(){this.obj_cache={};this.key_ary=[]},size:function(){return this.key_ary.length}});var N=function(ab,aa,ac){c.call(this,ab);this.track=aa;this.subset=(ac!==undefined?ac:true)};n(N.prototype,c.prototype,{load_data:function(aj,ae,ab,ag){var ai=this.track.view.chrom,ah=this.track.mode,ad={chrom:ai,low:aj,high:ae,mode:ah,resolution:ab,dataset_id:this.track.dataset_id,hda_ldda:this.track.hda_ldda};$.extend(ad,ag);if(this.track.filters_manager){var ak=[];var aa=this.track.filters_manager.filters;for(var af=0;af<aa.length;af++){ak[ak.length]=aa[af].name}ad.filter_cols=JSON.stringify(ak)}var ac=this;return $.getJSON(this.track.data_url,ad,function(al){ac.set_data(aj,ae,ah,al)})},get_data:function(aa,ae,ab,ad){var af=this.track.mode,ac=this.get_data_from_cache(aa,ae,af);if(ac){return ac}ac=this.load_data(aa,ae,ab,ad);this.set_data(aa,ae,af,ac);return ac},DEEP_DATA_REQ:"deep",BROAD_DATA_REQ:"breadth",get_more_data:function(ai,ad,ac,ag,ae){var ah=this.track.mode,aj=this.get_data_from_cache(ai,ad,ah);if(!aj){console.log("ERROR: no current data for: ",this.track,ai,ad,ac,ag);return}aj.stale=true;var ab=ai;if(ae===this.DEEP_DATA_REQ){$.extend(ag,{start_val:aj.data.length+1})}else{if(ae===this.BROAD_DATA_REQ){ab=aj.data[aj.data.length-1][2]+1}}var aa=this,af=this.load_data(ab,ad,ac,ag);new_data_available=$.Deferred();this.set_data(ai,ad,ah,new_data_available);$.when(af).then(function(ak){if(ak.data){ak.data=aj.data.concat(ak.data);if(ak.message){ak.message=ak.message.replace(/[0-9]+/,ak.data.length)}}aa.set_data(ai,ad,ah,ak);new_data_available.resolve(ak)});return new_data_available},get_data_from_cache:function(aa,ab,ac){return this.get(this.gen_key(aa,ab,ac))},set_data:function(ab,ac,ad,aa){return this.set(this.gen_key(ab,ac,ad),aa)},gen_key:function(aa,ac,ad){var ab=aa+"_"+ac+"_"+ad;return ab},split_key:function(aa){return aa.split("_")}});var E=function(ab,aa,ac){N.call(this,ab,aa,ac)};n(E.prototype,N.prototype,c.prototype,{load_data:function(ac,aa,ae,af,ab,ad){if(ab>1){return}return N.prototype.load_data.call(this,ac,aa,ae,af,ab,ad)}});var Y=function(aa,ad,ac,ab,ae){this.container=aa;this.chrom=null;this.vis_id=ac;this.dbkey=ab;this.title=ad;this.tracks=[];this.label_tracks=[];this.max_low=0;this.max_high=0;this.num_tracks=0;this.track_id_counter=0;this.zoom_factor=3;this.min_separation=30;this.has_changes=false;this.init(ae);this.canvas_manager=new Z(aa.get(0).ownerDocument);this.reset()};n(Y.prototype,{init:function(ae){var ac=this.container,aa=this;this.top_container=$("<div/>").addClass("top-container").appendTo(ac);this.content_div=$("<div/>").addClass("content").css("position","relative").appendTo(ac);this.bottom_container=$("<div/>").addClass("bottom-container").appendTo(ac);this.top_labeltrack=$("<div/>").addClass("top-labeltrack").appendTo(this.top_container);this.viewport_container=$("<div/>").addClass("viewport-container").addClass("viewport-container").appendTo(this.content_div);this.intro_div=$("<div/>").addClass("intro").appendTo(this.viewport_container).hide();var ad=$("<div/>").text("Add Datasets to Visualization").addClass("action-button").appendTo(this.intro_div).click(function(){add_tracks()});this.nav_labeltrack=$("<div/>").addClass("nav-labeltrack").appendTo(this.bottom_container);this.nav_container=$("<div/>").addClass("nav-container").prependTo(this.top_container);this.nav=$("<div/>").addClass("nav").appendTo(this.nav_container);this.overview=$("<div/>").addClass("overview").appendTo(this.bottom_container);this.overview_viewport=$("<div/>").addClass("overview-viewport").appendTo(this.overview);this.overview_close=$("<a href='javascript:void(0);'>Close Overview</a>").addClass("overview-close").hide().appendTo(this.overview_viewport);this.overview_highlight=$("<div/>").addClass("overview-highlight").hide().appendTo(this.overview_viewport);this.overview_box_background=$("<div/>").addClass("overview-boxback").appendTo(this.overview_viewport);this.overview_box=$("<div/>").addClass("overview-box").appendTo(this.overview_viewport);this.default_overview_height=this.overview_box.height();this.nav_controls=$("<div/>").addClass("nav-controls").appendTo(this.nav);this.chrom_select=$("<select/>").attr({name:"chrom"}).css("width","15em").addClass("no-autocomplete").append("<option value=''>Loading</option>").appendTo(this.nav_controls);var ab=function(af){if(af.type==="focusout"||(af.keyCode||af.which)===13||(af.keyCode||af.which)===27){if((af.keyCode||af.which)!==27){aa.go_to($(this).val())}$(this).hide();$(this).val("");aa.location_span.show();aa.chrom_select.show()}};this.nav_input=$("<input/>").addClass("nav-input").hide().bind("keyup focusout",ab).appendTo(this.nav_controls);this.location_span=$("<span/>").addClass("location").appendTo(this.nav_controls);this.location_span.bind("click",function(){aa.location_span.hide();aa.chrom_select.hide();aa.nav_input.val(aa.chrom+":"+aa.low+"-"+aa.high);aa.nav_input.css("display","inline-block");aa.nav_input.select();aa.nav_input.focus()});if(this.vis_id!==undefined){this.hidden_input=$("<input/>").attr("type","hidden").val(this.vis_id).appendTo(this.nav_controls)}this.zo_link=$("<a id='zoom-out' />").click(function(){aa.zoom_out();aa.redraw()}).appendTo(this.nav_controls);this.zi_link=$("<a id='zoom-in' />").click(function(){aa.zoom_in();aa.redraw()}).appendTo(this.nav_controls);this.load_chroms({low:0},ae);this.chrom_select.bind("change",function(){aa.change_chrom(aa.chrom_select.val())});this.content_div.bind("click",function(af){$(this).find("input").trigger("blur")});this.content_div.bind("dblclick",function(af){aa.zoom_in(af.pageX,this.viewport_container)});this.overview_box.bind("dragstart",function(af,ag){this.current_x=ag.offsetX}).bind("drag",function(af,ah){var ai=ah.offsetX-this.current_x;this.current_x=ah.offsetX;var ag=Math.round(ai/aa.viewport_container.width()*(aa.max_high-aa.max_low));aa.move_delta(-ag)});this.overview_close.bind("click",function(){for(var ag=0,af=aa.tracks.length;ag<af;ag++){aa.tracks[ag].is_overview=false}$(this).siblings().filter("canvas").remove();$(this).parent().css("height",aa.overview_box.height());aa.overview_highlight.hide();$(this).hide()});this.viewport_container.bind("draginit",function(af,ag){if(af.clientX>aa.viewport_container.width()-16){return false}}).bind("dragstart",function(af,ag){ag.original_low=aa.low;ag.current_height=af.clientY;ag.current_x=ag.offsetX}).bind("drag",function(ah,aj){var af=$(this);var ak=aj.offsetX-aj.current_x;var ag=af.scrollTop()-(ah.clientY-aj.current_height);af.scrollTop(ag);aj.current_height=ah.clientY;aj.current_x=aj.offsetX;var ai=Math.round(ak/aa.viewport_container.width()*(aa.high-aa.low));aa.move_delta(ai)}).bind("mousewheel",function(ah,aj,ag,af){if(ag){var ai=Math.round(-ag/aa.viewport_container.width()*(aa.high-aa.low));aa.move_delta(ai)}});this.top_labeltrack.bind("dragstart",function(af,ag){return $("<div />").css({height:aa.content_div.height()+aa.top_labeltrack.height()+aa.nav_labeltrack.height()+1,top:"0px",position:"absolute","background-color":"#ccf",opacity:0.5,"z-index":1000}).appendTo($(this))}).bind("drag",function(aj,ak){$(ak.proxy).css({left:Math.min(aj.pageX,ak.startX),width:Math.abs(aj.pageX-ak.startX)});var ag=Math.min(aj.pageX,ak.startX)-aa.container.offset().left,af=Math.max(aj.pageX,ak.startX)-aa.container.offset().left,ai=(aa.high-aa.low),ah=aa.viewport_container.width();aa.update_location(Math.round(ag/ah*ai)+aa.low,Math.round(af/ah*ai)+aa.low)}).bind("dragend",function(ak,al){var ag=Math.min(ak.pageX,al.startX),af=Math.max(ak.pageX,al.startX),ai=(aa.high-aa.low),ah=aa.viewport_container.width(),aj=aa.low;aa.low=Math.round(ag/ah*ai)+aj;aa.high=Math.round(af/ah*ai)+aj;$(al.proxy).remove();aa.redraw()});this.add_label_track(new X(this,this.top_labeltrack));this.add_label_track(new X(this,this.nav_labeltrack));$(window).bind("resize",function(){aa.resize_window()});$(document).bind("redraw",function(){aa.redraw()});this.reset();$(window).trigger("resize");this.update_intro_div()},update_intro_div:function(){if(this.num_tracks===0){this.intro_div.show()}else{this.intro_div.hide()}},update_location:function(aa,ab){this.location_span.text(commatize(aa)+" - "+commatize(ab));this.nav_input.val(this.chrom+":"+commatize(aa)+"-"+commatize(ab))},load_chroms:function(ab,ac){ab.num=t;$.extend(ab,(this.vis_id!==undefined?{vis_id:this.vis_id}:{dbkey:this.dbkey}));var aa=this;$.ajax({url:chrom_url,data:ab,dataType:"json",success:function(ae){if(ae.chrom_info.length===0){alert("Invalid chromosome: "+ab.chrom);return}if(ae.reference){aa.add_label_track(new x(aa))}aa.chrom_data=ae.chrom_info;var ah='<option value="">Select Chrom/Contig</option>';for(var ag=0,ad=aa.chrom_data.length;ag<ad;ag++){var af=aa.chrom_data[ag].chrom;ah+='<option value="'+af+'">'+af+"</option>"}if(ae.prev_chroms){ah+='<option value="previous">Previous '+t+"</option>"}if(ae.next_chroms){ah+='<option value="next">Next '+t+"</option>"}aa.chrom_select.html(ah);if(ac){ac()}aa.chrom_start_index=ae.start_index},error:function(){alert("Could not load chroms for this dbkey:",aa.dbkey)}})},change_chrom:function(ae,ab,ag){if(!ae||ae==="None"){return}var ad=this;if(ae==="previous"){ad.load_chroms({low:this.chrom_start_index-t});return}if(ae==="next"){ad.load_chroms({low:this.chrom_start_index+t});return}var af=$.grep(ad.chrom_data,function(ai,aj){return ai.chrom===ae})[0];if(af===undefined){ad.load_chroms({chrom:ae},function(){ad.change_chrom(ae,ab,ag)});return}else{if(ae!==ad.chrom){ad.chrom=ae;ad.chrom_select.val(ad.chrom);ad.max_high=af.len-1;ad.reset();ad.redraw(true);for(var ah=0,aa=ad.tracks.length;ah<aa;ah++){var ac=ad.tracks[ah];if(ac.init){ac.init()}}}if(ab!==undefined&&ag!==undefined){ad.low=Math.max(ab,0);ad.high=Math.min(ag,ad.max_high)}ad.reset_overview();ad.redraw()}},go_to:function(ae){var ai=this,aa,ad,ab=ae.split(":"),ag=ab[0],ah=ab[1];if(ah!==undefined){try{var af=ah.split("-");aa=parseInt(af[0].replace(/,/g,""),10);ad=parseInt(af[1].replace(/,/g,""),10)}catch(ac){return false}}ai.change_chrom(ag,aa,ad)},move_fraction:function(ac){var aa=this;var ab=aa.high-aa.low;this.move_delta(ac*ab)},move_delta:function(ac){var aa=this;var ab=aa.high-aa.low;if(aa.low-ac<aa.max_low){aa.low=aa.max_low;aa.high=aa.max_low+ab}else{if(aa.high-ac>aa.max_high){aa.high=aa.max_high;aa.low=aa.max_high-ab}else{aa.high-=ac;aa.low-=ac}}aa.redraw()},add_track:function(aa){aa.view=this;aa.track_id=this.track_id_counter;this.tracks.push(aa);if(aa.init){aa.init()}aa.container_div.attr("id","track_"+aa.track_id);C(aa.container_div,".draghandle");this.track_id_counter+=1;this.num_tracks+=1;this.update_intro_div()},add_label_track:function(aa){aa.view=this;this.label_tracks.push(aa)},remove_track:function(ab){this.has_changes=true;delete this.tracks[this.tracks.indexOf(ab)];this.num_tracks-=1;var aa=this;ab.container_div.fadeOut("slow",function(){$(this).remove();aa.update_intro_div()})},reset:function(){this.low=this.max_low;this.high=this.max_high;this.viewport_container.find(".yaxislabel").remove()},redraw:function(ah){var ag=this.high-this.low,af=this.low,ab=this.high;if(af<this.max_low){af=this.max_low}if(ab>this.max_high){ab=this.max_high}if(this.high!==0&&ag<this.min_separation){ab=af+this.min_separation}this.low=Math.floor(af);this.high=Math.ceil(ab);this.resolution=Math.pow(z,Math.ceil(Math.log((this.high-this.low)/M)/Math.log(z)));this.zoom_res=Math.pow(s,Math.max(0,Math.ceil(Math.log(this.resolution,s)/Math.log(s))));var aa=(this.low/(this.max_high-this.max_low)*this.overview_viewport.width())||0;var ae=((this.high-this.low)/(this.max_high-this.max_low)*this.overview_viewport.width())||0;var ai=13;this.overview_box.css({left:aa,width:Math.max(ai,ae)}).show();if(ae<ai){this.overview_box.css("left",aa-(ai-ae)/2)}if(this.overview_highlight){this.overview_highlight.css({left:aa,width:ae})}this.update_location(this.low,this.high);if(!ah){for(var ac=0,ad=this.tracks.length;ac<ad;ac++){if(this.tracks[ac]&&this.tracks[ac].enabled){this.tracks[ac].draw()}}for(ac=0,ad=this.label_tracks.length;ac<ad;ac++){this.label_tracks[ac].draw()}}},zoom_in:function(ab,ac){if(this.max_high===0||this.high-this.low<this.min_separation){return}var ad=this.high-this.low,ae=ad/2+this.low,aa=(ad/this.zoom_factor)/2;if(ab){ae=ab/this.viewport_container.width()*(this.high-this.low)+this.low}this.low=Math.round(ae-aa);this.high=Math.round(ae+aa);this.redraw()},zoom_out:function(){if(this.max_high===0){return}var ab=this.high-this.low,ac=ab/2+this.low,aa=(ab*this.zoom_factor)/2;this.low=Math.round(ac-aa);this.high=Math.round(ac+aa);this.redraw()},resize_window:function(){this.viewport_container.height(this.container.height()-this.top_container.height()-this.bottom_container.height());this.nav_container.width(this.container.width());this.redraw()},reset_overview:function(){this.overview_viewport.find("canvas").remove();this.overview_viewport.height(this.default_overview_height);this.overview_box.height(this.default_overview_height);this.overview_close.hide();this.overview_highlight.hide()}});var o=function(ac,ag){this.track=ac;this.name=ag.name;this.params=[];var an=ag.params;for(var ad=0;ad<an.length;ad++){var ai=an[ad],ab=ai.name,am=ai.label,ae=unescape(ai.html),ao=ai.value,ak=ai.type;if(ak==="number"){this.params[this.params.length]=new g(ab,am,ae,ao,ai.min,ai.max)}else{if(ak=="select"){this.params[this.params.length]=new K(ab,am,ae,ao)}else{console.log("WARNING: unrecognized tool parameter type:",ab,ak)}}}this.parent_div=$("<div/>").addClass("dynamic-tool").hide();this.parent_div.bind("drag",function(aq){aq.stopPropagation()}).bind("click",function(aq){aq.stopPropagation()}).bind("dblclick",function(aq){aq.stopPropagation()});var al=$("<div class='tool-name'>").appendTo(this.parent_div).text(this.name);var aj=this.params;var ah=this;$.each(this.params,function(ar,av){var au=$("<div>").addClass("param-row").appendTo(ah.parent_div);var aq=$("<div>").addClass("param-label").text(av.label).appendTo(au);var at=$("<div/>").addClass("slider").html(av.html).appendTo(au);at.find(":input").val(av.value);$("<div style='clear: both;'/>").appendTo(au)});this.parent_div.find("input").click(function(){$(this).select()});var ap=$("<div>").addClass("param-row").appendTo(this.parent_div);var af=$("<input type='submit'>").attr("value","Run on complete dataset").appendTo(ap);var aa=$("<input type='submit'>").attr("value","Run on visible region").css("margin-left","3em").appendTo(ap);var ah=this;aa.click(function(){ah.run_on_region()});af.click(function(){ah.run_on_dataset()})};n(o.prototype,{get_param_values_dict:function(){var aa={};this.parent_div.find(":input").each(function(){var ab=$(this).attr("name"),ac=$(this).val();aa[ab]=JSON.stringify(ac)});return aa},get_param_values:function(){var ab=[];var aa={};this.parent_div.find(":input").each(function(){var ac=$(this).attr("name"),ad=$(this).val();if(ac){ab[ab.length]=ad}});return ab},run_on_dataset:function(){var aa=this;aa.run({dataset_id:this.track.original_dataset_id,tool_id:aa.name},function(ab){show_modal(aa.name+" is Running",aa.name+" is running on the complete dataset. Tool outputs are in dataset's history.",{Close:hide_modal})})},run_on_region:function(){var aa={dataset_id:this.track.original_dataset_id,chrom:this.track.view.chrom,low:this.track.view.low,high:this.track.view.high,tool_id:this.name},ac=this.track,ab=aa.tool_id+ac.tool_region_and_parameters_str(aa.chrom,aa.low,aa.high),ad;if(ac instanceof e){ad=new Q(ab,view,ac.hda_ldda,undefined,{},{},ac);ad.change_mode(ac.mode)}this.track.add_track(ad);ad.content_div.text("Starting job.");this.run(aa,function(ae){ad.dataset_id=ae.dataset_id;ad.content_div.text("Running job.");ad.init()})},run:function(ab,ac){$.extend(ab,this.get_param_values_dict());var aa=function(){$.getJSON(rerun_tool_url,ab,function(ad){if(ad==="no converter"){new_track.container_div.addClass("error");new_track.content_div.text(G)}else{if(ad.error){new_track.container_div.addClass("error");new_track.content_div.text(v+ad.message)}else{if(ad==="pending"){new_track.container_div.addClass("pending");new_track.content_div.text("Converting input data so that it can be easily reused.");setTimeout(aa,2000)}else{ac(ad)}}}})};aa()}});var K=function(ab,aa,ac,ad){this.name=ab;this.label=aa;this.html=ac;this.value=ad};var g=function(ac,ab,ae,af,ad,aa){K.call(this,ac,ab,ae,af);this.min=ad;this.max=aa};var h=function(ab,aa,ac,ad){this.name=ab;this.index=aa;this.tool_id=ac;this.tool_exp_name=ad};var R=function(ab,aa,ac,ad){h.call(this,ab,aa,ac,ad);this.low=-Number.MAX_VALUE;this.high=Number.MAX_VALUE;this.min=Number.MAX_VALUE;this.max=-Number.MAX_VALUE;this.slider=null;this.slider_label=null};n(R.prototype,{applies_to:function(aa){if(aa.length>this.index){return true}return false},keep:function(aa){if(!this.applies_to(aa)){return true}var ab=parseInt(aa[this.index]);return(isNaN(ab)||(ab>=this.low&&ab<=this.high))},update_attrs:function(ab){var aa=false;if(!this.applies_to(ab)){return aa}if(ab[this.index]<this.min){this.min=Math.floor(ab[this.index]);aa=true}if(ab[this.index]>this.max){this.max=Math.ceil(ab[this.index]);aa=true}return aa},update_ui_elt:function(){var ac=function(af,ad){var ae=ad-af;return(ae<=2?0.01:1)};var ab=this.slider.slider("option","min"),aa=this.slider.slider("option","max");if(this.min<ab||this.max>aa){this.slider.slider("option","min",this.min);this.slider.slider("option","max",this.max);this.slider.slider("option","step",ac(this.min,this.max));this.slider.slider("option","values",[this.min,this.max])}}});var W=function(ac,al){this.track=ac;this.filters=[];for(var ag=0;ag<al.length;ag++){var aa=al[ag],ab=aa.name,ak=aa.type,ai=aa.index,an=aa.tool_id,ad=aa.tool_exp_name;if(ak==="int"||ak==="float"){this.filters[ag]=new R(ab,ai,an,ad)}else{console.log("ERROR: unsupported filter: ",ab,ak)}}var aj=function(ao,ap,aq){ao.click(function(){var ar=ap.text();max=parseFloat(aq.slider("option","max")),input_size=(max<=1?4:max<=1000000?max.toString().length:6),multi_value=false;if(aq.slider("option","values")){input_size=2*input_size+1;multi_value=true}ap.text("");$("<input type='text'/>").attr("size",input_size).attr("maxlength",input_size).attr("value",ar).appendTo(ap).focus().select().click(function(at){at.stopPropagation()}).blur(function(){$(this).remove();ap.text(ar)}).keyup(function(ax){if(ax.keyCode===27){$(this).trigger("blur")}else{if(ax.keyCode===13){var av=aq.slider("option","min"),at=aq.slider("option","max"),aw=function(ay){return(isNaN(ay)||ay>at||ay<av)},au=$(this).val();if(!multi_value){au=parseFloat(au);if(aw(au)){alert("Parameter value must be in the range ["+av+"-"+at+"]");return $(this)}}else{au=au.split("-");au=[parseFloat(au[0]),parseFloat(au[1])];if(aw(au[0])||aw(au[1])){alert("Parameter value must be in the range ["+av+"-"+at+"]");return $(this)}}aq.slider((multi_value?"values":"value"),au)}}})})};this.parent_div=$("<div/>").addClass("filters").hide();this.parent_div.bind("drag",function(ao){ao.stopPropagation()}).bind("click",function(ao){ao.stopPropagation()}).bind("dblclick",function(ao){ao.stopPropagation()}).bind("keydown",function(ao){ao.stopPropagation()});var ae=this;$.each(this.filters,function(av,ap){var ar=$("<div/>").addClass("slider-row").appendTo(ae.parent_div);var ao=$("<div/>").addClass("slider-label").appendTo(ar);var ax=$("<span/>").addClass("slider-name").text(ap.name+" ").appendTo(ao);var aq=$("<span/>");var at=$("<span/>").addClass("slider-value").appendTo(ao).append("[").append(aq).append("]");var aw=$("<div/>").addClass("slider").appendTo(ar);ap.control_element=$("<div/>").attr("id",ap.name+"-filter-control").appendTo(aw);var au=[0,0];ap.control_element.slider({range:true,min:Number.MAX_VALUE,max:-Number.MIN_VALUE,values:[0,0],slide:function(ay,az){au=az.values;aq.text(az.values[0]+"-"+az.values[1]);setTimeout(function(){if(az.values[0]==au[0]&&az.values[1]==au[1]){var aA=az.values;aq.text(aA[0]+"-"+aA[1]);ap.low=aA[0];ap.high=aA[1];ae.track.draw(true,true)}},50)},change:function(ay,az){ap.control_element.slider("option","slide").call(ap.control_element,ay,az)}});ap.slider=ap.control_element;ap.slider_label=aq;aj(at,aq,ap.control_element);$("<div style='clear: both;'/>").appendTo(ar)});if(this.filters.length!=0){var am=$("<div>").addClass("param-row").appendTo(this.parent_div);var ah=$("<input type='submit'>").attr("value","Run on complete dataset").appendTo(am);var af=this;ah.click(function(){af.run_on_dataset()})}};n(W.prototype,{reset_filters:function(){for(var aa=0;aa<this.filters.length;aa++){filter=this.filters[aa];filter.slider.slider("option","values",[filter.min,filter.max])}},run_on_dataset:function(){var ai=function(am,ak,al){if(!(ak in am)){am[ak]=al}return am[ak]};var ac={},aa,ab,ad;for(var ae=0;ae<this.filters.length;ae++){aa=this.filters[ae];if(aa.tool_id){if(aa.min!=aa.low){ab=ai(ac,aa.tool_id,[]);ab[ab.length]=aa.tool_exp_name+" >= "+aa.low}if(aa.max!=aa.high){ab=ai(ac,aa.tool_id,[]);ab[ab.length]=aa.tool_exp_name+" <= "+aa.high}}}var ag=[];for(var aj in ac){ag[ag.length]=[aj,ac[aj]]}var ah=ag.length;(function af(aq,an){var al=an[0],am=al[0],ap=al[1],ao="("+ap.join(") and (")+")",ak={cond:ao,input:aq,target_dataset_id:aq,tool_id:am},an=an.slice(1);$.getJSON(run_tool_url,ak,function(ar){if(ar.error){show_modal("Filter Dataset","Error running tool "+am,{Close:hide_modal})}else{if(an.length===0){show_modal("Filtering Dataset","Filter(s) are running on the complete dataset. Outputs are in dataset's history.",{Close:hide_modal})}else{af(ar.dataset_id,an)}}})})(this.track.dataset_id,ag)}});var V=function(aa){this.track=aa.track;this.params=aa.params;this.values={};if(aa.saved_values){this.restore_values(aa.saved_values)}this.onchange=aa.onchange};n(V.prototype,{restore_values:function(aa){var ab=this;$.each(this.params,function(ac,ad){if(aa[ad.key]!==undefined){ab.values[ad.key]=aa[ad.key]}else{ab.values[ad.key]=ad.default_value}})},build_form:function(){var ab=this;var aa=$("<div />");$.each(this.params,function(af,ad){if(!ad.hidden){var ac="param_"+af;var ak=$("<div class='form-row' />").appendTo(aa);ak.append($("<label />").attr("for",ac).text(ad.label+":"));if(ad.type==="bool"){ak.append($('<input type="checkbox" />').attr("id",ac).attr("name",ac).attr("checked",ab.values[ad.key]))}else{if(ad.type==="color"){var ah=ab.values[ad.key];var ag=$("<input />").attr("id",ac).attr("name",ac).val(ah);var ai=$("<div class='tipsy tipsy-north' style='position: absolute;' />").hide();var ae=$("<div style='background-color: black; padding: 10px;'></div>").appendTo(ai);var aj=$("<div/>").appendTo(ae).farbtastic({width:100,height:100,callback:ag,color:ah});$("<div />").append(ag).append(ai).appendTo(ak).bind("click",function(al){ai.css({left:$(this).position().left+($(ag).width()/2)-60,top:$(this).position().top+$(this.height)}).show();$(document).bind("click.color-picker",function(){ai.hide();$(document).unbind("click.color-picker")});al.stopPropagation()})}else{ak.append($("<input />").attr("id",ac).attr("name",ac).val(ab.values[ad.key]))}}}});return aa},update_from_form:function(aa){var ac=this;var ab=false;$.each(this.params,function(ad,af){if(!af.hidden){var ag="param_"+ad;var ae=aa.find("#"+ag).val();if(af.type==="float"){ae=parseFloat(ae)}else{if(af.type==="int"){ae=parseInt(ae)}else{if(af.type==="bool"){ae=aa.find("#"+ag).is(":checked")}}}if(ae!==ac.values[af.key]){ac.values[af.key]=ae;ab=true}}});if(ab){this.onchange()}}});var b=function(ac,ab,aa){this.index=ac;this.low=ac*M*ab;this.high=(ac+1)*M*ab;this.resolution=ab;this.canvas=$("<div class='track-tile'/>").append(aa);this.stale=false};var l=function(ac,ab,aa,ad){b.call(this,ac,ab,aa);this.max_val=ad};var L=function(ac,ab,aa,ad){b.call(this,ac,ab,aa);this.message=ad};var j=function(ab,aa,ae,ac,ad){this.name=ab;this.view=aa;this.parent_element=ae;this.data_url=(ac?ac:default_data_url);this.data_url_extra_params={};this.data_query_wait=(ad?ad:H);this.dataset_check_url=converted_datasets_state_url;this.container_div=$("<div />").addClass("track").css("position","relative");if(!this.hidden){this.header_div=$("<div class='track-header' />").appendTo(this.container_div);if(this.view.editor){this.drag_div=$("<div class='draghandle' />").appendTo(this.header_div)}this.name_div=$("<div class='menubutton popup' />").appendTo(this.header_div);this.name_div.text(this.name);this.name_div.attr("id",this.name.replace(/\s+/g,"-").replace(/[^a-zA-Z0-9\-]/g,"").toLowerCase())}this.content_div=$("<div class='track-content'>").appendTo(this.container_div);this.parent_element.append(this.container_div)};n(j.prototype,{get_type:function(){if(this instanceof X){return"LabelTrack"}else{if(this instanceof x){return"ReferenceTrack"}else{if(this instanceof k){return"LineTrack"}else{if(this instanceof S){return"ReadTrack"}else{if(this instanceof Q){return"ToolDataFeatureTrack"}else{if(this instanceof P){return"VcfTrack"}else{if(this instanceof e){return"FeatureTrack"}}}}}}}return""},init:function(){var aa=this;aa.enabled=false;aa.tile_cache.clear();aa.data_manager.clear();aa.initial_canvas=undefined;aa.content_div.css("height","auto");aa.container_div.removeClass("nodata error pending");if(!aa.dataset_id){return}$.getJSON(converted_datasets_state_url,{hda_ldda:aa.hda_ldda,dataset_id:aa.dataset_id,chrom:aa.view.chrom},function(ab){if(!ab||ab==="error"||ab.kind==="error"){aa.container_div.addClass("error");aa.content_div.text(m);if(ab.message){var ad=aa.view.tracks.indexOf(aa);var ac=$(" <a href='javascript:void(0);'></a>").text("View error").bind("click",function(){show_modal("Trackster Error","<pre>"+ab.message+"</pre>",{Close:hide_modal})});aa.content_div.append(ac)}}else{if(ab==="no converter"){aa.container_div.addClass("error");aa.content_div.text(G)}else{if(ab==="no data"||(ab.data!==undefined&&(ab.data===null||ab.data.length===0))){aa.container_div.addClass("nodata");aa.content_div.text(B)}else{if(ab==="pending"){aa.container_div.addClass("pending");aa.content_div.text(q);setTimeout(function(){aa.init()},aa.data_query_wait)}else{if(ab.status==="data"){if(ab.valid_chroms){aa.valid_chroms=ab.valid_chroms;aa.make_name_popup_menu()}aa.content_div.text(U);if(aa.view.chrom){aa.content_div.text("");aa.content_div.css("height",aa.height_px+"px");aa.enabled=true;$.when(aa.predraw_init()).done(function(){aa.container_div.removeClass("nodata error pending");aa.draw()})}}}}}}})},predraw_init:function(){},update_name:function(aa){this.old_name=this.name;this.name=aa;this.name_div.text(this.name)},revert_name:function(){this.name=this.old_name;this.name_div.text(this.name)}});var J=function(ah,af,ai){var ab=this,aj=ab.view;this.filters_manager=(ah!==undefined?new W(this,ah):undefined);this.filters_available=false;this.filters_visible=false;this.tool=(af!==undefined&&obj_length(af)>0?new o(this,af):undefined);this.parent_track=ai;this.child_tracks=[];if(ab.hidden){return}if(this.parent_track){this.header_div.find(".draghandle").removeClass("draghandle").addClass("child-track-icon").addClass("icon-button");this.parent_element.addClass("child-track");this.tool=undefined}ab.child_tracks_container=$("<div/>").addClass("child-tracks-container").hide();ab.container_div.append(ab.child_tracks_container);if(this.filters_manager){this.filters_div=this.filters_manager.parent_div;this.header_div.after(this.filters_div)}if(this.tool){this.dynamic_tool_div=this.tool.parent_div;this.header_div.after(this.dynamic_tool_div)}if(ab.display_modes!==undefined){if(ab.mode_div===undefined){ab.mode_div=$("<div class='right-float menubutton popup' />").appendTo(ab.header_div);var ac=(ab.track_config&&ab.track_config.values.mode?ab.track_config.values.mode:ab.display_modes[0]);ab.mode=ac;ab.mode_div.text(ac);var aa={};for(var ad=0,ag=ab.display_modes.length;ad<ag;ad++){var ae=ab.display_modes[ad];aa[ae]=function(ak){return function(){ab.change_mode(ak)}}(ae)}make_popupmenu(ab.mode_div,aa)}else{ab.mode_div.hide()}}this.make_name_popup_menu()};n(J.prototype,j.prototype,{change_mode:function(ab){var aa=this;aa.mode_div.text(ab);aa.mode=ab;aa.track_config.values.mode=ab;aa.tile_cache.clear();aa.draw()},make_name_popup_menu:function(){var ab=this;var aa={};aa["Edit configuration"]=function(){var ah=function(){hide_modal();$(window).unbind("keypress.check_enter_esc")},af=function(){ab.track_config.update_from_form($(".dialog-box"));hide_modal();$(window).unbind("keypress.check_enter_esc")},ag=function(ai){if((ai.keyCode||ai.which)===27){ah()}else{if((ai.keyCode||ai.which)===13){af()}}};$(window).bind("keypress.check_enter_esc",ag);show_modal("Configure Track",ab.track_config.build_form(),{Cancel:ah,OK:af})};if(ab.filters_available>0){var ae=(ab.filters_div.is(":visible")?"Hide filters":"Show filters");aa[ae]=function(){ab.filters_visible=(ab.filters_div.is(":visible"));if(ab.filters_visible){ab.filters_manager.reset_filters()}ab.filters_div.toggle();ab.make_name_popup_menu()}}if(ab.tool){var ae=(ab.dynamic_tool_div.is(":visible")?"Hide tool":"Show tool");aa[ae]=function(){if(!ab.dynamic_tool_div.is(":visible")){ab.update_name(ab.name+ab.tool_region_and_parameters_str())}else{menu_option_text="Show dynamic tool";ab.revert_name()}ab.dynamic_tool_div.toggle();ab.make_name_popup_menu()}}if(ab.valid_chroms){aa["List chrom/contigs with data"]=function(){show_modal("Chrom/contigs with data","<p>"+ab.valid_chroms.join("<br/>")+"</p>",{Close:function(){hide_modal()}})}}var ac=view;var ad=function(){$("#no-tracks").show()};if(this.parent_track){ac=this.parent_track;ad=function(){}}aa.Remove=function(){ac.remove_track(ab);if(ac.num_tracks===0){ad()}};make_popupmenu(ab.name_div,aa)},draw:function(aa,ac){if(!this.dataset_id){return}var au=this.view.low,ag=this.view.high,ai=ag-au,ak=this.view.container.width(),ae=ak/ai,al=this.view.resolution,ad=$("<div style='position: relative;'></div>"),am=function(aw,ax,av){return aw+"_"+ax+"_"+av};if(!ac){this.content_div.children().remove()}this.content_div.append(ad);this.max_height=0;var ao=Math.floor(au/al/M);var af=[];var ap=0;while((ao*M*al)<ag){var at=am(ak,ae,ao);var ah=this.tile_cache.get(at);var aq=ao*M*this.view.resolution;var ab=aq+M*this.view.resolution;if(!aa&&ah){af[af.length]=ah;this.show_tile(ah,ad,ae)}else{this.delayed_draw(aa,at,ao,al,ad,ae,af)}ao+=1;ap++}var aj=this;var ar=setInterval(function(){if(af.length===ap){clearInterval(ar);if(ac){var aA=aj.content_div.children();var aB=false;for(var az=aA.length-1,aF=0;az>=aF;az--){var ay=$(aA[az]);if(aB){ay.remove()}else{if(ay.children().length!==0){aB=true}}}}if(aj instanceof e&&aj.mode=="Histogram"){var aE=-1;for(var az=0;az<af.length;az++){var aH=af[az].max_val;if(aH>aE){aE=aH}}for(var az=0;az<af.length;az++){if(af[az].max_val!==aE){var aG=af[az];aG.canvas.remove();aj.delayed_draw(true,am(ak,ae,aG.index),aG.index,aG.resolution,ad,ae,[],{max:aE})}}}if(aj.filters_manager){var ax=aj.filters_manager.filters;for(var aD=0;aD<ax.length;aD++){ax[aD].update_ui_elt()}var aC=false;if(aj.example_feature){for(var aD=0;aD<ax.length;aD++){if(ax[aD].applies_to(aj.example_feature)){aC=true;break}}}if(aj.filters_available!==aC){aj.filters_available=aC;if(!aj.filters_available){aj.filters_div.hide()}aj.make_name_popup_menu()}}var av=false;for(var aw=0;aw<af.length;aw++){if(af[aw].message){av=true;break}}if(av){for(var aw=0;aw<af.length;aw++){aG=af[aw];if(!aG.message){aG.canvas.css("padding-top",A)}}}}},50);for(var an=0;an<this.child_tracks.length;an++){this.child_tracks[an].draw(aa,ac)}},delayed_draw:function(ab,ai,ac,ae,aj,am,ak,af){var ad=this,ag=ac*M*ae,al=ag+M*ae;var ah=function(av,an,ap,ao,at,au,aq){var ar=ad.draw_tile(an,ap,ao,au,aq);ad.tile_cache.set(ai,ar);if(ar===undefined){return}ad.show_tile(ar,at,au);ak[ak.length]=ar};var aa=setTimeout(function(){if(ag<=ad.view.high&&al>=ad.view.low){var an=(ab?undefined:ad.tile_cache.get(ai));if(an){ad.show_tile(an,aj,am);ak[ak.length]=an}else{$.when(ad.data_manager.get_data(ag,al,ae,ad.data_url_extra_params)).then(function(ao){n(ao,af);if(view.reference_track&&am>view.canvas_manager.char_width_px){$.when(view.reference_track.data_manager.get_data(ag,al,ae,view.reference_track.data_url_extra_params)).then(function(ap){ah(aa,ao,ae,ac,aj,am,ap)})}else{ah(aa,ao,ae,ac,aj,am)}})}}},50)},show_tile:function(ah,aj,ak){var ac=this,ab=ah.canvas,ag=ab;if(ah.message){var al=$("<div/>"),ai=$("<div/>").addClass("tile-message").text(ah.message).css({height:A-1,width:ah.canvas.width}).appendTo(al),ae=$("<a href='javascript:void(0);'/>").addClass("icon more-down").appendTo(ai),aa=$("<a href='javascript:void(0);'/>").addClass("icon more-across").appendTo(ai);al.append(ab);ag=al;ae.click(function(){ah.stale=true;ac.data_manager.get_more_data(ah.low,ah.high,ah.resolution,{},ac.data_manager.DEEP_DATA_REQ);ac.draw()}).dblclick(function(am){am.stopPropagation()});aa.click(function(){ah.stale=true;ac.data_manager.get_more_data(ah.low,ah.high,ah.resolution,{},ac.data_manager.BROAD_DATA_REQ);ac.draw()}).dblclick(function(am){am.stopPropagation()})}var af=this.view.high-this.view.low,ad=(ah.low-this.view.low)*ak;if(this.left_offset){ad-=this.left_offset}ag.css({position:"absolute",top:0,left:ad,height:""});aj.append(ag);ac.max_height=Math.max(ac.max_height,ag.height());ac.content_div.css("height",ac.max_height+"px");aj.children().css("height",ac.max_height+"px")},set_overview:function(){var aa=this.view;if(this.initial_canvas&&this.is_overview){aa.overview_close.show();aa.overview_viewport.append(this.initial_canvas);aa.overview_highlight.show().height(this.initial_canvas.height());aa.overview_viewport.height(this.initial_canvas.height()+aa.overview_box.height())}$(window).trigger("resize")},tool_region_and_parameters_str:function(ac,aa,ad){var ab=this,ae=(ac!==undefined&&aa!==undefined&&ad!==undefined?ac+":"+aa+"-"+ad:"all");return" - region=["+ae+"], parameters=["+ab.tool.get_param_values().join(", ")+"]"},add_track:function(aa){aa.track_id=this.track_id+"_"+this.child_tracks.length;aa.container_div.attr("id","track_"+aa.track_id);this.child_tracks_container.append(aa.container_div);C(aa.container_div,".child-track-icon");if(!$(this.child_tracks_container).is(":visible")){this.child_tracks_container.show()}this.child_tracks.push(aa);this.view.has_changes=true},remove_track:function(aa){aa.container_div.fadeOut("slow",function(){$(this).remove()})}});var X=function(aa,ab){this.hidden=true;j.call(this,null,aa,ab);this.container_div.addClass("label-track")};n(X.prototype,j.prototype,{draw:function(){var ac=this.view,ad=ac.high-ac.low,ag=Math.floor(Math.pow(10,Math.floor(Math.log(ad)/Math.log(10)))),aa=Math.floor(ac.low/ag)*ag,ae=this.view.container.width(),ab=$("<div style='position: relative; height: 1.3em;'></div>");while(aa<ac.high){var af=(aa-ac.low)/ad*ae;ab.append($("<div class='label'>"+commatize(aa)+"</div>").css({position:"absolute",left:af-1}));aa+=ag}this.content_div.children(":first").remove();this.content_div.append(ab)}});var x=function(aa){this.hidden=true;j.call(this,null,aa,aa.top_labeltrack);J.call(this);aa.reference_track=this;this.left_offset=200;this.height_px=12;this.container_div.addClass("reference-track");this.content_div.css("background","none");this.content_div.css("min-height","0px");this.content_div.css("border","none");this.data_url=reference_url;this.data_url_extra_params={dbkey:aa.dbkey};this.data_manager=new E(y,this,false);this.tile_cache=new c(r)};n(x.prototype,J.prototype,{draw_tile:function(ai,af,ab,ak){var ae=this,ac=M*af;if(ak>this.view.canvas_manager.char_width_px){if(ai===null){ae.content_div.css("height","0px");return}var ad=this.view.canvas_manager.new_canvas();var aj=ad.getContext("2d");ad.width=Math.ceil(ac*ak+ae.left_offset);ad.height=ae.height_px;aj.font=aj.canvas.manager.default_font;aj.textAlign="center";for(var ag=0,ah=ai.length;ag<ah;ag++){var aa=Math.round(ag*ak);aj.fillText(ai[ag],aa+ae.left_offset,10)}return new b(ab,af,ad)}this.content_div.css("height","0px")}});var k=function(ae,ac,af,aa,ad){var ab=this;this.display_modes=["Histogram","Line","Filled","Intensity"];this.mode="Histogram";j.call(this,ae,ac,ac.viewport_container);J.call(this);this.min_height_px=16;this.max_height_px=400;this.height_px=80;this.hda_ldda=af;this.dataset_id=aa;this.original_dataset_id=aa;this.data_manager=new N(y,this);this.tile_cache=new c(r);this.track_config=new V({track:this,params:[{key:"color",label:"Color",type:"color",default_value:"black"},{key:"min_value",label:"Min Value",type:"float",default_value:undefined},{key:"max_value",label:"Max Value",type:"float",default_value:undefined},{key:"mode",type:"string",default_value:this.mode,hidden:true},{key:"height",type:"int",default_value:this.height_px,hidden:true}],saved_values:ad,onchange:function(){ab.vertical_range=ab.prefs.max_value-ab.prefs.min_value;$("#linetrack_"+ab.track_id+"_minval").text(ab.prefs.min_value);$("#linetrack_"+ab.track_id+"_maxval").text(ab.prefs.max_value);ab.tile_cache.clear();ab.draw()}});this.prefs=this.track_config.values;this.height_px=this.track_config.values.height;this.vertical_range=this.track_config.values.max_value-this.track_config.values.min_value;this.add_resize_handle()};n(k.prototype,J.prototype,{add_resize_handle:function(){var aa=this;var ad=false;var ac=false;var ab=$("<div class='track-resize'>");$(aa.container_div).hover(function(){ad=true;ab.show()},function(){ad=false;if(!ac){ab.hide()}});ab.hide().bind("dragstart",function(ae,af){ac=true;af.original_height=$(aa.content_div).height()}).bind("drag",function(af,ag){var ae=Math.min(Math.max(ag.original_height+ag.deltaY,aa.min_height_px),aa.max_height_px);$(aa.content_div).css("height",ae);aa.height_px=ae;aa.draw(true)}).bind("dragend",function(ae,af){aa.tile_cache.clear();ac=false;if(!ad){ab.hide()}aa.track_config.values.height=aa.height_px}).appendTo(aa.container_div)},predraw_init:function(){var aa=this,ab=aa.view.tracks.indexOf(aa);aa.vertical_range=undefined;return $.getJSON(aa.data_url,{stats:true,chrom:aa.view.chrom,low:null,high:null,hda_ldda:aa.hda_ldda,dataset_id:aa.dataset_id},function(ac){aa.container_div.addClass("line-track");var ae=ac.data;if(isNaN(parseFloat(aa.prefs.min_value))||isNaN(parseFloat(aa.prefs.max_value))){aa.prefs.min_value=ae.min;aa.prefs.max_value=ae.max;$("#track_"+ab+"_minval").val(aa.prefs.min_value);$("#track_"+ab+"_maxval").val(aa.prefs.max_value)}aa.vertical_range=aa.prefs.max_value-aa.prefs.min_value;aa.total_frequency=ae.total_frequency;aa.container_div.find(".yaxislabel").remove();var af=$("<div />").addClass("yaxislabel").attr("id","linetrack_"+ab+"_minval").text(u(aa.prefs.min_value));var ad=$("<div />").addClass("yaxislabel").attr("id","linetrack_"+ab+"_maxval").text(u(aa.prefs.max_value));ad.css({position:"absolute",top:"24px",left:"10px"});ad.prependTo(aa.container_div);af.css({position:"absolute",bottom:"2px",left:"10px"});af.prependTo(aa.container_div)})},draw_tile:function(ak,ae,ab,aj){if(this.vertical_range===undefined){return}var af=ab*M*ae,ad=M*ae,aa=Math.ceil(ad*aj),ah=this.height_px;var ac=this.view.canvas_manager.new_canvas();ac.width=aa,ac.height=ah;var ai=ac.getContext("2d");var ag=new I.LinePainter(ak.data,af,af+ad,this.prefs,this.mode);ag.draw(ai,aa,ah);return new b(ab,ae,ac)}});var e=function(aa,af,ae,ai,ah,ac,ad,ag){var ab=this;this.display_modes=["Auto","Histogram","Dense","Squish","Pack"];this.track_config=new V({track:this,params:[{key:"block_color",label:"Block color",type:"color",default_value:"#444"},{key:"label_color",label:"Label color",type:"color",default_value:"black"},{key:"show_counts",label:"Show summary counts",type:"bool",default_value:true},{key:"mode",type:"string",default_value:this.mode,hidden:true},],saved_values:ah,onchange:function(){ab.tile_cache.clear();ab.draw()}});this.prefs=this.track_config.values;j.call(this,aa,af,af.viewport_container);J.call(this,ac,ad,ag);this.height_px=0;this.container_div.addClass("feature-track");this.hda_ldda=ae;this.dataset_id=ai;this.original_dataset_id=ai;this.show_labels_scale=0.001;this.showing_details=false;this.summary_draw_height=30;this.inc_slots={};this.start_end_dct={};this.tile_cache=new c(d);this.data_manager=new N(20,this);this.left_offset=200;this.painter=I.LinkedFeaturePainter};n(e.prototype,J.prototype,{update_auto_mode:function(aa){if(this.mode=="Auto"){if(aa=="no_detail"){aa="feature spans"}else{if(aa=="summary_tree"){aa="coverage histogram"}}this.mode_div.text("Auto ("+aa+")")}},incremental_slots:function(ae,ab,ad){var ac=this.view.canvas_manager.dummy_context,aa=this.inc_slots[ae];if(!aa||(aa.mode!==ad)){aa=new (p.FeatureSlotter)(ae,ad==="Pack",w,function(af){return ac.measureText(af)});aa.mode=ad;this.inc_slots[ae]=aa}return aa.slot_features(ab)},get_summary_tree_data:function(ae,ah,ac,ap){if(ap>ac-ah){ap=ac-ah}var al=Math.floor((ac-ah)/ap),ao=[],ad=0;var af=0,ag=0,ak,an=0,ai=[],am,aj;var ab=function(at,ar,au,aq){at[0]=ar+au*aq;at[1]=ar+(au+1)*aq};while(an<ap&&af!==ae.length){var aa=false;for(;an<ap&&!aa;an++){ab(ai,ah,an,al);for(ag=af;ag<ae.length;ag++){ak=ae[ag].slice(1,3);if(is_overlap(ak,ai)){aa=true;break}}if(aa){break}}data_start_index=ag;ao[ao.length]=am=[ai[0],0];for(;ag<ae.length;ag++){ak=ae[ag].slice(1,3);if(is_overlap(ak,ai)){am[1]++}else{break}}if(am[1]>ad){ad=am[1]}an++}return{max:ad,delta:al,data:ao}},draw_tile:function(an,av,az,aj,ad){var ar=this,aB=az*M*av,ab=(az+1)*M*av,ap=ab-aB,at=Math.ceil(ap*aj),aq=this.mode,aF=25,ae=this.left_offset,ao,af;if(aq==="Auto"){if(an.dataset_type==="summary_tree"){aq=an.dataset_type}else{if(an.extra_info==="no_detail"){aq="no_detail"}else{var aE=an.data;if(this.view.high-this.view.low>F){aq="Squish"}else{aq="Pack"}}}this.update_auto_mode(aq)}if(aq==="summary_tree"||aq==="Histogram"){af=this.summary_draw_height;this.container_div.find(".yaxislabel").remove();var aa=$("<div />").addClass("yaxislabel");aa.text(an.max);aa.css({position:"absolute",top:"24px",left:"10px",color:this.prefs.label_color});aa.prependTo(this.container_div);var ac=this.view.canvas_manager.new_canvas();ac.width=at+ae;ac.height=af+O;if(an.dataset_type!="summary_tree"){var ak=this.get_summary_tree_data(an.data,aB,ab,200);if(an.max){ak.max=an.max}an=ak}var aC=new I.SummaryTreePainter(an,aB,ab,this.prefs);var au=ac.getContext("2d");au.translate(ae,O);aC.draw(au,at,af);return new l(az,av,ac,an.max)}var ao,ah=1;if(aq==="no_detail"||aq==="Squish"||aq==="Pack"){ah=this.incremental_slots(aj,an.data,aq);ao=this.inc_slots[aj].slots}var ai=[];if(an.data){var al=this.filters_manager.filters;for(var aw=0,ay=an.data.length;aw<ay;aw++){var ag=an.data[aw];var ax=false;var am;for(var aA=0,aD=al.length;aA<aD;aA++){am=al[aA];am.update_attrs(ag);if(!am.keep(ag)){ax=true;break}}if(!ax){ai.push(ag)}}}var aC=new (this.painter)(ai,aB,ab,this.prefs,aq,ad);var af=aC.get_required_height(ah);var ac=this.view.canvas_manager.new_canvas();ac.width=at+ae;ac.height=af;var au=ac.getContext("2d");au.fillStyle=this.prefs.block_color;au.font=au.canvas.manager.default_font;au.textAlign="right";this.container_div.find(".yaxislabel").remove();if(an.data){this.example_feature=(an.data.length?an.data[0]:undefined);au.translate(ae,0);aC.draw(au,at,af,ao)}return new L(az,av,ac,an.message)}});var P=function(ad,ab,af,aa,ac,ae){e.call(this,ad,ab,af,aa,ac,ae);this.painter=I.VariantPainter};n(P.prototype,J.prototype,e.prototype);var S=function(ad,ab,af,aa,ac,ae){e.call(this,ad,ab,af,aa,ac,ae);this.track_config=new V({track:this,params:[{key:"block_color",label:"Block color",type:"color",default_value:"#444"},{key:"label_color",label:"Label color",type:"color",default_value:"black"},{key:"show_insertions",label:"Show insertions",type:"bool",default_value:false},{key:"show_differences",label:"Show differences only",type:"bool",default_value:true},{key:"show_counts",label:"Show summary counts",type:"bool",default_value:true},{key:"mode",type:"string",default_value:this.mode,hidden:true},],saved_values:ac,onchange:function(){this.track.tile_cache.clear();this.track.draw()}});this.prefs=this.track_config.values;this.painter=I.ReadPainter;this.make_name_popup_menu()};n(S.prototype,J.prototype,e.prototype);var Q=function(ae,ac,ag,aa,ad,af,ab){e.call(this,ae,ac,ag,aa,ad,af,{},ab);this.data_url=raw_data_url;this.data_query_wait=1000;this.dataset_check_url=dataset_state_url};n(Q.prototype,J.prototype,e.prototype,{predraw_init:function(){var ab=this;var aa=function(){if(ab.data_manager.size()===0){setTimeout(aa,300)}else{ab.data_url=default_data_url;ab.data_query_wait=H;ab.dataset_state_url=converted_datasets_state_url;$.getJSON(ab.dataset_state_url,{dataset_id:ab.dataset_id,hda_ldda:ab.hda_ldda},function(ac){})}};aa()}});T.View=Y;T.LineTrack=k;T.FeatureTrack=e;T.ReadTrack=S};var slotting_module=function(c,b){var e=c("class").extend;var d=2,a=5;b.FeatureSlotter=function(j,h,f,g){this.slots={};this.start_end_dct={};this.w_scale=j;this.include_label=h;this.max_rows=f;this.measureText=g};e(b.FeatureSlotter.prototype,{slot_features:function(m){var p=this.w_scale,s=this.slots,h=this.start_end_dct,y=[],A=[],n=0,z=this.max_rows;for(var w=0,x=m.length;w<x;w++){var l=m[w],o=l[0];if(s[o]!==undefined){n=Math.max(n,s[o]);A.push(s[o])}else{y.push(w)}}var q=function(G,H){for(var F=0;F<=z;F++){var D=false,I=h[F];if(I!==undefined){for(var C=0,E=I.length;C<E;C++){var B=I[C];if(H>B[0]&&G<B[1]){D=true;break}}}if(!D){return F}}return -1};for(var w=0,x=y.length;w<x;w++){var l=m[y[w]],o=l[0],u=l[1],f=l[2],r=l[3],g=Math.floor(u*p),k=Math.ceil(f*p),v=this.measureText(r).width,j;if(r!==undefined&&this.include_label){v+=(d+a);if(g-v>=0){g-=v;j="left"}else{k+=v;j="right"}}var t=q(g,k);if(t>=0){if(h[t]===undefined){h[t]=[]}h[t].push([g,k]);s[o]=t;n=Math.max(n,t)}else{}}return n+1}})};var painters_module=function(j,w){var t=j("class").extend;var o=function(H,z,F,y,E,C){if(C===undefined){C=4}var B=y-z;var A=E-F;var D=Math.floor(Math.sqrt(B*B+A*A)/C);var I=B/D;var G=A/D;var x;for(x=0;x<D;x++,z+=I,F+=G){if(x%2!==0){continue}H.fillRect(z,F,C,1)}};var p=function(A,z,x,D){var C=z-D/2,B=z+D/2,E=x-Math.sqrt(D*3/2);A.beginPath();A.moveTo(C,E);A.lineTo(B,E);A.lineTo(z,x);A.lineTo(C,E);A.strokeStyle=this.fillStyle;A.fill();A.stroke();A.closePath()};var m=function(z,B,x,y,A){this.data=z;this.view_start=B;this.view_end=x;this.prefs=t({},this.default_prefs,y);this.mode=A};m.prototype.default_prefs={};var u=function(z,B,x,y,A){m.call(this,z,B,x,y,A)};u.prototype.default_prefs={show_counts:false};u.prototype.draw=function(M,z,L){var E=this.view_start,O=this.view_end-this.view_start,N=z/O;var J=this.data.data,I=this.data.delta,G=this.data.max,B=L;delta_x_px=Math.ceil(I*N);M.save();for(var C=0,D=J.length;C<D;C++){var H=Math.floor((J[C][0]-E)*N);var F=J[C][1];if(!F){continue}var K=F/G*L;if(F!==0&&K<1){K=1}M.fillStyle=this.prefs.block_color;M.fillRect(H,B-K,delta_x_px,K);var A=4;if(this.prefs.show_counts&&(M.measureText(F).width+A)<delta_x_px){M.fillStyle=this.prefs.label_color;M.textAlign="center";M.fillText(F,H+(delta_x_px/2),10)}}M.restore()};var c=function(x,B,D,E,z){m.call(this,x,B,D,E,z);if(this.prefs.min_value===undefined){var F=Infinity;for(var y=0,A=this.data.length;y<A;y++){F=Math.min(F,this.data[y][1])}this.prefs.min_value=F}if(this.prefs.max_value===undefined){var C=-Infinity;for(var y=0,A=this.data.length;y<A;y++){C=Math.max(C,this.data[y][1])}this.prefs.max_value=C}};c.prototype.default_prefs={min_value:undefined,max_value:undefined,mode:"Histogram",color:"#000",overflow_color:"#F66"};c.prototype.draw=function(M,L,J){var E=false,G=this.prefs.min_value,C=this.prefs.max_value,I=C-G,x=J,z=this.view_start,K=this.view_end-this.view_start,A=L/K,H=this.mode,S=this.data;M.save();var T=Math.round(J+G/I*J);if(H!=="Intensity"){M.fillStyle="#aaa";M.fillRect(0,T,L,1)}M.beginPath();var Q,D,B;if(S.length>1){B=Math.ceil((S[1][0]-S[0][0])*A)}else{B=10}for(var N=0,O=S.length;N<O;N++){M.fillStyle=this.prefs.color;Q=Math.round((S[N][0]-z)*A);D=S[N][1];var P=false,F=false;if(D===null){if(E&&H==="Filled"){M.lineTo(Q,x)}E=false;continue}if(D<G){F=true;D=G}else{if(D>C){P=true;D=C}}if(H==="Histogram"){D=Math.round(D/I*x);M.fillRect(Q,T,B,-D)}else{if(H==="Intensity"){D=255-Math.floor((D-G)/I*255);M.fillStyle="rgb("+D+","+D+","+D+")";M.fillRect(Q,0,B,x)}else{D=Math.round(x-(D-G)/I*x);if(E){M.lineTo(Q,D)}else{E=true;if(H==="Filled"){M.moveTo(Q,x);M.lineTo(Q,D)}else{M.moveTo(Q,D)}}}}M.fillStyle=this.prefs.overflow_color;if(P||F){var R;if(H==="Histogram"||H==="Intensity"){R=B}else{Q-=2;R=4}if(P){M.fillRect(Q,0,R,3)}if(F){M.fillRect(Q,x-3,R,3)}}M.fillStyle=this.prefs.color}if(H==="Filled"){if(E){M.lineTo(Q,T);M.lineTo(0,T)}M.fill()}else{M.stroke()}M.restore()};var n=function(z,B,x,y,A){m.call(this,z,B,x,y,A)};n.prototype.default_prefs={block_color:"#FFF",connector_color:"#FFF"};t(n.prototype,{get_required_height:function(y){var x=y_scale=this.get_row_height(),z=this.mode;if(z==="no_detail"||z==="Squish"||z==="Pack"){x=y*y_scale}return x+Math.max(Math.round(y_scale/2),5)},draw:function(J,A,I,F){var D=this.data,G=this.view_start,K=this.view_end;J.save();J.fillStyle=this.prefs.block_color;J.textAlign="right";var N=this.view_end-this.view_start,M=A/N,z=this.get_row_height();for(var C=0,E=D.length;C<E;C++){var L=D[C],B=L[0],x=L[1],y=L[2],H=(F&&F[B]!==undefined?F[B]:null);if((x<K&&y>G)&&(this.mode=="Dense"||H!==null)){this.draw_element(J,this.mode,L,H,G,K,M,z,A)}}J.restore()},draw_element:function(D,z,F,B,A,C,E,y,x){}});var d=10,h=3,l=5,v=10,f=1,r=3,e=3,a=9,k=2,g="#ccc";var q=function(z,B,x,y,A){n.call(this,z,B,x,y,A)};t(q.prototype,n.prototype,{get_row_height:function(){var y=this.mode,x;if(y==="Dense"){x=d}else{if(y==="no_detail"){x=h}else{if(y==="Squish"){x=l}else{x=v}}}return x},draw_element:function(J,C,R,E,L,ab,af,ag,x){var O=R[0],ad=R[1],V=R[2],M=R[3],W=Math.floor(Math.max(0,(ad-L)*af)),K=Math.ceil(Math.min(x,Math.max(0,(V-L)*af))),U=(C==="Dense"?0:(0+E))*ag,I,Z,N=null,ah=null,A=this.prefs.block_color,Y=this.prefs.label_color;if(C=="Dense"){E=1}if(C==="no_detail"){J.fillStyle=A;J.fillRect(W,U+5,K-W,f)}else{var H=R[4],T=R[5],X=R[6],B=R[7];if(T&&X){N=Math.floor(Math.max(0,(T-L)*af));ah=Math.ceil(Math.min(x,Math.max(0,(X-L)*af)))}var ae,P;if(C==="Squish"||C==="Dense"){ae=1;P=e}else{ae=5;P=a}if(!B){if(R.strand){if(R.strand==="+"){J.fillStyle=J.canvas.manager.get_pattern("right_strand_inv")}else{if(R.strand==="-"){J.fillStyle=J.canvas.manager.get_pattern("left_strand_inv")}}}else{J.fillStyle=A}J.fillRect(W,U,K-W,P)}else{var G,Q;if(C==="Squish"||C==="Dense"){J.fillStyle=g;G=U+Math.floor(e/2)+1;Q=1}else{if(H){var G=U;var Q=P;if(H==="+"){J.fillStyle=J.canvas.manager.get_pattern("right_strand")}else{if(H==="-"){J.fillStyle=J.canvas.manager.get_pattern("left_strand")}}}else{J.fillStyle=g;G+=(e/2)+1;Q=1}}J.fillRect(W,G,K-W,Q);for(var ac=0,z=B.length;ac<z;ac++){var D=B[ac],y=Math.floor(Math.max(0,(D[0]-L)*af)),S=Math.ceil(Math.min(x,Math.max((D[1]-L)*af)));if(y>S){continue}J.fillStyle=A;J.fillRect(y,U+(P-ae)/2+1,S-y,ae);if(N!==undefined&&X>T&&!(y>ah||S<N)){var aa=Math.max(y,N),F=Math.min(S,ah);J.fillRect(aa,U+1,F-aa,P);if(B.length==1&&C=="Pack"){if(H==="+"){J.fillStyle=J.canvas.manager.get_pattern("right_strand_inv")}else{if(H==="-"){J.fillStyle=J.canvas.manager.get_pattern("left_strand_inv")}}if(aa+14<F){aa+=2;F-=2}J.fillRect(aa,U+1,F-aa,P)}}}}if(C==="Pack"&&ad>L){J.fillStyle=Y;if(L===0&&W-J.measureText(M).width<0){J.textAlign="left";J.fillText(M,K+k,U+8)}else{J.textAlign="right";J.fillText(M,W-k,U+8)}J.fillStyle=A}}}});var b=function(z,B,x,y,A){n.call(this,z,B,x,y,A)};t(b.prototype,n.prototype,{draw_element:function(Q,L,F,B,T,z,I,R,O){var F=data[i],H=F[0],P=F[1],A=F[2],K=F[3],D=Math.floor(Math.max(0,(P-T)*I)),G=Math.ceil(Math.min(O,Math.max(0,(A-T)*I))),C=(L==="Dense"?0:(0+B))*R,x,U,y=null,J=null;if(no_label){Q.fillStyle=block_color;Q.fillRect(D+left_offset,C+5,G-D,1)}else{var S=F[4],N=F[5],E=F[6];x=9;U=1;Q.fillRect(D+left_offset,C,G-D,x);if(L!=="Dense"&&K!==undefined&&P>T){Q.fillStyle=label_color;if(T===0&&D-Q.measureText(K).width<0){Q.textAlign="left";Q.fillText(K,G+2+left_offset,C+8)}else{Q.textAlign="right";Q.fillText(K,D-2+left_offset,C+8)}Q.fillStyle=block_color}var M=S+" / "+N;if(P>T&&Q.measureText(M).width<(G-D)){Q.fillStyle="white";Q.textAlign="center";Q.fillText(M,left_offset+D+(G-D)/2,C+8);Q.fillStyle=block_color}}}});var s=function(A,C,x,z,B,y){n.call(this,A,C,x,z,B);this.ref_seq=y};s.prototype.default_prefs=t({},n.prototype.default_prefs,{show_insertions:false});t(s.prototype,n.prototype,{get_row_height:function(){var x,y=this.mode;if(y==="Dense"){x=d}else{if(y==="Squish"){x=l}else{x=v;if(this.prefs.show_insertions){x*=2}}}return x},draw_read:function(T,O,K,Y,z,S,H,E,D){T.textAlign="center";var R=this,y=[Y,z],N=0,U=0,Q=0;ref_seq=this.ref_seq,char_width_px=T.canvas.manager.char_width_px;var ad=[];if((O==="Pack"||this.mode==="Auto")&&E!==undefined&&K>char_width_px){Q=Math.round(K/2)}if(!H){H=[[0,E.length]]}for(var L=0,W=H.length;L<W;L++){var I=H[L],A="MIDNSHP=X"[I[0]],M=I[1];if(A==="H"||A==="S"){N-=M}var F=S+N,ac=Math.floor(Math.max(0,(F-Y)*K)),G=Math.floor(Math.max(0,(F+M-Y)*K));if(ac===G){G+=1}switch(A){case"H":break;case"S":case"M":case"=":if(is_overlap([F,F+M],y)){var P=E.slice(U,U+M);if(Q>0){T.fillStyle=this.prefs.block_color;T.fillRect(ac-Q,D+1,G-ac,9);T.fillStyle=g;for(var aa=0,x=P.length;aa<x;aa++){if(this.prefs.show_differences&&ref_seq){var J=ref_seq[F-Y+aa];if(!J||J.toLowerCase()===P[aa].toLowerCase()){continue}}if(F+aa>=Y&&F+aa<=z){var ab=Math.floor(Math.max(0,(F+aa-Y)*K));T.fillText(P[aa],ab,D+9)}}}else{T.fillStyle=this.prefs.block_color;T.fillRect(ac,D+4,G-ac,e)}}U+=M;N+=M;break;case"N":T.fillStyle=g;T.fillRect(ac-Q,D+5,G-ac,1);N+=M;break;case"D":T.fillStyle="red";T.fillRect(ac-Q,D+4,G-ac,3);N+=M;break;case"P":break;case"I":var X=ac-Q;if(is_overlap([F,F+M],y)){var P=E.slice(U,U+M);if(this.prefs.show_insertions){var C=ac-(G-ac)/2;if((O==="Pack"||this.mode==="Auto")&&E!==undefined&&K>char_width_px){T.fillStyle="yellow";T.fillRect(C-Q,D-9,G-ac,9);ad[ad.length]={type:"triangle",data:[X,D+4,5]};T.fillStyle=g;switch(seq_tile_overlap){case (OVERLAP_START):P=P.slice(Y-F);break;case (OVERLAP_END):P=P.slice(0,F-z);break;case (CONTAINED_BY):break;case (CONTAINS):P=P.slice(Y-F,F-z);break}for(var aa=0,x=P.length;aa<x;aa++){var ab=Math.floor(Math.max(0,(F+aa-Y)*K));T.fillText(P[aa],ab-(G-ac)/2,D)}}else{T.fillStyle="yellow";T.fillRect(C,D+(this.mode!=="Dense"?2:5),G-ac,(O!=="Dense"?e:r))}}else{if((O==="Pack"||this.mode==="Auto")&&E!==undefined&&K>char_width_px){ad[ad.length]={type:"text",data:[P.length,X,D+9]}}else{}}}U+=M;break;case"X":U+=M;break}}T.fillStyle="yellow";var Z,B,ae;for(var V=0;V<ad.length;V++){Z=ad[V];B=Z.type;ae=Z.data;if(B==="text"){T.save();T.font="bold "+T.font;T.fillText(ae[0],ae[1],ae[2]);T.restore()}else{if(B=="triangle"){p(T,ae[0],ae[1],ae[2])}}}},draw_element:function(Q,L,D,A,T,y,H,R,O){var G=D[0],P=D[1],z=D[2],I=D[3],C=Math.floor(Math.max(0,(P-T)*H)),E=Math.ceil(Math.min(O,Math.max(0,(z-T)*H))),B=(L==="Dense"?0:(0+A))*R,U=this.prefs.block_color,F=this.prefs.label_color,N=0;if((L==="Pack"||this.mode==="Auto")&&H>Q.canvas.manager.char_width_px){var N=Math.round(H/2)}Q.fillStyle=U;if(D[5] instanceof Array){var M=Math.floor(Math.max(0,(D[4][0]-T)*H)),K=Math.ceil(Math.min(O,Math.max(0,(D[4][1]-T)*H))),J=Math.floor(Math.max(0,(D[5][0]-T)*H)),x=Math.ceil(Math.min(O,Math.max(0,(D[5][1]-T)*H)));if(D[4][1]>=T&&D[4][0]<=y&&D[4][2]){this.draw_read(Q,L,H,T,y,D[4][0],D[4][2],D[4][3],B)}if(D[5][1]>=T&&D[5][0]<=y&&D[5][2]){this.draw_read(Q,L,H,T,y,D[5][0],D[5][2],D[5][3],B)}if(J>K){Q.fillStyle=g;o(Q,K-N,B+5,J-N,B+5)}}else{Q.fillStyle=U;this.draw_read(Q,L,H,T,y,P,D[4],D[5],B)}if(L==="Pack"&&P>T){Q.fillStyle=this.prefs.label_color;var S=1;if(S===0&&C-Q.measureText(I).width<0){Q.textAlign="left";Q.fillText(I,E+k-N,B+8)}else{Q.textAlign="right";Q.fillText(I,C-k-N,B+8)}Q.fillStyle=U}}});w.SummaryTreePainter=u;w.LinePainter=c;w.LinkedFeaturePainter=q;w.ReadPainter=s;w.VariantPainter=b};(function(d){var c={};var b=function(e){return c[e]};var a=function(f,g){var e={};g(b,e);c[f]=e};a("class",class_module);a("slotting",slotting_module);a("painters",painters_module);a("trackster",trackster_module);for(key in c.trackster){d[key]=c.trackster[key]}})(window);
\ No newline at end of file
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 static/scripts/trackster.js
--- a/static/scripts/trackster.js
+++ b/static/scripts/trackster.js
@@ -172,7 +172,7 @@
// height of individual features within tracks. Feature height, then, should always be less
// than track height.
CHAR_HEIGHT_PX = 9, // FIXME: font size may not be static
- ERROR_PADDING = 18, // Padding at the top of tracks for error messages
+ ERROR_PADDING = 20, // Padding at the top of tracks for error messages
SUMMARY_TREE_TOP_PADDING = CHAR_HEIGHT_PX + 2,
// Maximum number of rows un a slotted track
MAX_FEATURE_DEPTH = 100,
@@ -369,15 +369,15 @@
//
// Set parameters based on request type.
//
+ var query_low = low;
if (req_type === this.DEEP_DATA_REQ) {
- // HACK: for now, just up the max vals and request all data; in the future,
- // need server to recognize min_vals and max_vals to specify range of data to
- // return.
- $.extend(extra_params, {max_vals: cur_data.data.length * 2});
+ // Use same interval but set start_val to skip data that's already in cur_data.
+ $.extend(extra_params, {start_val: cur_data.data.length + 1});
}
else if (req_type === this.BROAD_DATA_REQ) {
- // Set low to be past the last feature returned.
- low = cur_data.data[cur_data.length-1][2] + 1;
+ // Set query low to be past the last feature returned so that an area of extreme feature depth
+ // is bypassed.
+ query_low = cur_data.data[cur_data.data.length - 1][2] + 1;
}
//
@@ -386,14 +386,19 @@
//
var
data_manager = this,
- new_data_request = this.load_data(low, high, resolution, extra_params)
+ new_data_request = this.load_data(query_low, high, resolution, extra_params)
new_data_available = $.Deferred();
// load_data sets cache to new_data_request, but use custom deferred object so that signal and data
// is all data, not just new data.
this.set_data(low, high, mode, new_data_available);
$.when(new_data_request).then(function(result) {
+ // Update data and message.
if (result.data) {
- //result.data.append(cur_data.data);
+ result.data = cur_data.data.concat(result.data);
+ if (result.message) {
+ // HACK: replace number in message with current data length. Works but is ugly.
+ result.message = result.message.replace(/[0-9]+/, result.data.length);
+ }
}
data_manager.set_data(low, high, mode, result);
new_data_available.resolve(result);
@@ -481,8 +486,11 @@
this.top_labeltrack = $("<div/>").addClass("top-labeltrack").appendTo(this.top_container);
// Viewport for dragging tracks in center
this.viewport_container = $("<div/>").addClass("viewport-container").addClass("viewport-container").appendTo(this.content_div);
- // Future overlay?
- this.intro_div = $("<div/>").addClass("intro").text("Select a chrom from the dropdown below").hide();
+ // Introduction div shown when there are no tracks.
+ this.intro_div = $("<div/>").addClass("intro").appendTo(this.viewport_container).hide();
+ var add_tracks_button = $("<div/>").text("Add Datasets to Visualization").addClass("action-button").appendTo(this.intro_div).click(function () {
+ add_tracks();
+ });
// Another label track at bottom
this.nav_labeltrack = $("<div/>").addClass("nav-labeltrack").appendTo(this.bottom_container);
// Navigation at top
@@ -531,7 +539,6 @@
this.chrom_select.bind("change", function() {
view.change_chrom(view.chrom_select.val());
});
- this.intro_div.show();
/*
this.content_div.bind("mousewheel", function( e, delta ) {
@@ -646,6 +653,16 @@
this.reset();
$(window).trigger("resize");
+ this.update_intro_div();
+ },
+ /** Show or hide intro div depending on view state. */
+ update_intro_div: function() {
+ if (this.num_tracks === 0) {
+ this.intro_div.show();
+ }
+ else {
+ this.intro_div.hide();
+ }
},
update_location: function(low, high) {
this.location_span.text( commatize(low) + ' - ' + commatize(high) );
@@ -730,12 +747,6 @@
// Switching to local chrom.
if (chrom !== view.chrom) {
view.chrom = chrom;
- if (!view.chrom) {
- // No chrom selected
- view.intro_div.show();
- } else {
- view.intro_div.hide();
- }
view.chrom_select.val(view.chrom);
view.max_high = found.len-1; // -1 because we're using 0-based indexing.
view.reset();
@@ -805,6 +816,7 @@
sortable( track.container_div, '.draghandle' );
this.track_id_counter += 1;
this.num_tracks += 1;
+ this.update_intro_div();
},
add_label_track: function (label_track) {
label_track.view = this;
@@ -812,9 +824,13 @@
},
remove_track: function(track) {
this.has_changes = true;
- track.container_div.fadeOut('slow', function() { $(this).remove(); });
delete this.tracks[this.tracks.indexOf(track)];
this.num_tracks -= 1;
+ var view = this;
+ track.container_div.fadeOut('slow', function() {
+ $(this).remove();
+ view.update_intro_div();
+ });
},
reset: function() {
this.low = this.max_low;
@@ -1018,6 +1034,7 @@
dataset_id: this.track.original_dataset_id,
tool_id: tool.name
},
+ null,
// Success callback.
function(track_data) {
show_modal(tool.name + " is Running",
@@ -1060,7 +1077,7 @@
new_track.content_div.text("Starting job.");
// Run tool.
- this.run(url_params,
+ this.run(url_params, new_track,
// Success callback.
function(track_data) {
new_track.dataset_id = track_data.dataset_id;
@@ -1072,7 +1089,7 @@
/**
* Run tool using a set of URL params and a success callback.
*/
- run: function(url_params, success_callback) {
+ run: function(url_params, new_track, success_callback) {
// Add tool params to URL params.
$.extend(url_params, this.get_param_values_dict());
@@ -1093,7 +1110,7 @@
else if (response === "pending") {
// Converting/indexing input datasets; show message and try again.
new_track.container_div.addClass("pending");
- new_track.content_div.text("Converting input data so that it can be easily reused.");
+ new_track.content_div.text("Converting input data so that it can be used quickly with tool.");
setTimeout(json_run_tool, 2000);
}
else {
@@ -2211,18 +2228,29 @@
message_div = $("<div/>").addClass("tile-message").text(tile.message).
// -1 to account for border.
css({'height': ERROR_PADDING-1, 'width': tile.canvas.width}).appendTo(container_div),
- show_more_data_btn = $("<div/>").text("Show more").addClass("action-button").css({'padding-top': 0, 'padding-bottom':0}).appendTo(message_div);
+ more_down_icon = $("<a href='javascript:void(0);'/>").addClass("icon more-down").appendTo(message_div),
+ more_across_icon = $("<a href='javascript:void(0);'/>").addClass("icon more-across").appendTo(message_div);
container_div.append(canvas);
tile_element = container_div;
- // Set up actions for button.
- show_more_data_btn.click(function() {
+ // Set up actions for icons.
+ more_down_icon.click(function() {
// Mark tile as stale, request more data, and redraw track.
tile.stale = true;
track.data_manager.get_more_data(tile.low, tile.high, tile.resolution, {}, track.data_manager.DEEP_DATA_REQ);
track.draw();
}).dblclick(function(e) {
- // Do not propogate as this would normal zoom in.
+ // Do not propogate as this would normally zoom in.
+ e.stopPropagation();
+ });
+
+ more_across_icon.click(function() {
+ // Mark tile as stale, request more data, and redraw track.
+ tile.stale = true;
+ track.data_manager.get_more_data(tile.low, tile.high, tile.resolution, {}, track.data_manager.BROAD_DATA_REQ);
+ track.draw();
+ }).dblclick(function(e) {
+ // Do not propogate as this would normally zoom in.
e.stopPropagation();
});
}
@@ -3291,6 +3319,9 @@
ctx.restore();
}
+/**
+ * Abstract object for painting feature tracks. Subclasses must implement draw_element() for painting to work.
+ */
var FeaturePainter = function( data, view_start, view_end, prefs, mode ) {
Painter.call( this, data, view_start, view_end, prefs, mode );
}
@@ -3311,7 +3342,6 @@
},
draw: function( ctx, width, height, slots ) {
-
var data = this.data, view_start = this.view_start, view_end = this.view_end;
ctx.save();
@@ -3340,7 +3370,12 @@
}
ctx.restore();
- }
+ },
+
+ /**
+ * Abstract function for drawing an individual feature. NOTE: this method must be implemented by subclasses for drawing to work.
+ */
+ draw_element: function(ctx, mode, feature, slot, tile_low, tile_high, w_scale, y_scale, width ) {}
});
// Constants specific to feature tracks moved here (HACKING, these should
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 static/welcome.rst
--- a/static/welcome.rst
+++ b/static/welcome.rst
@@ -30,7 +30,7 @@
.. __: /static/help.html
.. __: http://www.bx.psu.edu/cgi-bin/trac.cgi
-.. __: http://bitbucket.org/galaxy/galaxy-central/wiki/GalaxyTeam
+.. __: http://wiki.g2.bx.psu.edu/Galaxy%20Team
.. __: mailto:galaxy@bx.psu.edu
Version: <b>2.1</b> Revision: <b>$Rev$</b>
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 templates/base.mako
--- a/templates/base.mako
+++ b/templates/base.mako
@@ -1,15 +1,13 @@
<% _=n_ %><!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd"><html>
-
-<head>
-<title>${self.title()}</title>
-<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
-${self.metas()}
-${self.stylesheets()}
-${self.javascripts()}
-</head>
-
+ <head>
+ <title>${self.title()}</title>
+ <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
+ ${self.metas()}
+ ${self.stylesheets()}
+ ${self.javascripts()}
+ </head><body>
${next.body()}
</body>
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 templates/base_panels.mako
--- a/templates/base_panels.mako
+++ b/templates/base_panels.mako
@@ -155,13 +155,13 @@
## Override
</%def>
-<%def name="overlay( title='', content='' )">
+<%def name="overlay( title='', content='', visible=False )"><%def name="title()"></%def><%def name="content()"></%def><div id="overlay"
- %if not self.overlay_visible:
- style="display: none;"
+ %if not visible:
+ style="display: none;"
%endif
>
##
@@ -169,21 +169,21 @@
## Need a table here for centering in IE6
<table class="dialog-box-container" border="0" cellpadding="0" cellspacing="0"
- %if not self.overlay_visible:
+ %if not visible:
style="display: none;"
%endif
><tr><td><div class="dialog-box-wrapper"><div class="dialog-box">
- <div class="unified-panel-header">
- <div class="unified-panel-header-inner"><span class='title'>${title}</span></div>
- </div>
- <div class="body">${content}</div>
- <div>
- <div class="buttons" style="display: none; float: right;"></div>
- <div class="extra_buttons" style="display: none; padding: 5px;"></div>
- <div style="clear: both;"></div>
- </div>
+ <div class="unified-panel-header">
+ <div class="unified-panel-header-inner"><span class='title'>${title}</span></div>
+ </div>
+ <div class="body">${content}</div>
+ <div>
+ <div class="buttons" style="display: none; float: right;"></div>
+ <div class="extra_buttons" style="display: none; padding: 5px;"></div>
+ <div style="clear: both;"></div>
+ </div></div></div></td></tr></table>
@@ -198,45 +198,45 @@
<html>
${self.init()}
<head>
- <title>${self.title()}</title>
- <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
- <meta name = "viewport" content = "maximum-scale=1.0">
- ${self.stylesheets()}
- ${self.javascripts()}
+ <title>${self.title()}</title>
+ <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
+ <meta name = "viewport" content = "maximum-scale=1.0">
+ ${self.stylesheets()}
+ ${self.javascripts()}
</head><body scroll="no" class="${self.body_class}">
- <div id="everything" style="position: absolute; top: 0; left: 0; width: 100%; height: 100%; min-width: 600px;">
- ## Background displays first
- <div id="background"></div>
- ## Layer iframes over backgrounds
- <div id="masthead">
- ${self.masthead()}
- </div>
- <div id="messagebox" class="panel-${self.message_box_class}-message">
- %if self.message_box_visible:
- ${self.message_box_content()}
+ <div id="everything" style="position: absolute; top: 0; left: 0; width: 100%; height: 100%; min-width: 600px;">
+ ## Background displays first
+ <div id="background"></div>
+ ## Layer iframes over backgrounds
+ <div id="masthead">
+ ${self.masthead()}
+ </div>
+ <div id="messagebox" class="panel-${self.message_box_class}-message">
+ %if self.message_box_visible:
+ ${self.message_box_content()}
+ %endif
+ </div>
+ ${self.overlay(visible=self.overlay_visible)}
+ %if self.has_left_panel:
+ <div id="left">
+ ${self.left_panel()}
+ </div>
+ <div id="left-border">
+ <div id="left-border-inner" style="display: none;"></div>
+ </div>
+ %endif
+ <div id="center">
+ ${self.center_panel()}
+ </div>
+ %if self.has_right_panel:
+ <div id="right-border"><div id="right-border-inner" style="display: none;"></div></div>
+ <div id="right">
+ ${self.right_panel()}
+ </div>
%endif
</div>
- ${self.overlay()}
- %if self.has_left_panel:
- <div id="left">
- ${self.left_panel()}
- </div>
- <div id="left-border">
- <div id="left-border-inner" style="display: none;"></div>
- </div>
- %endif
- <div id="center">
- ${self.center_panel()}
- </div>
- %if self.has_right_panel:
- <div id="right-border"><div id="right-border-inner" style="display: none;"></div></div>
- <div id="right">
- ${self.right_panel()}
- </div>
- %endif
- </div>
## Allow other body level elements
</body>
## Scripts can be loaded later since they progressively add features to
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 templates/root/history_common.mako
--- a/templates/root/history_common.mako
+++ b/templates/root/history_common.mako
@@ -210,7 +210,14 @@
%if for_editing:
<a href="${h.url_for( controller='tool_runner', action='rerun', id=data.id )}" target="galaxy_main" title="Run this job again" class="icon-button arrow-circle tooltip"></a>
%if app.config.get_bool( 'enable_tracks', False ) and data.ext in app.datatypes_registry.get_available_tracks():
- <a data-url="${h.url_for( controller='tracks', action='list_tracks' )}" class="icon-button vis-chart tooltip trackster-add"
+ <%
+ if data.dbkey != '?':
+ data_url = h.url_for( controller='tracks', action='list_tracks', dbkey=data.dbkey )
+ data_url = data_url.replace( 'dbkey', 'f-dbkey' )
+ else:
+ data_url = h.url_for( controller='tracks', action='list_tracks' )
+ %>
+ <a data-url="${data_url}" class="icon-button vis-chart tooltip trackster-add"
action-url="${h.url_for( controller='tracks', action='browser', dataset_id=dataset_id)}"
new-url="${h.url_for( controller='tracks', action='index', dataset_id=dataset_id, default_dbkey=data.dbkey)}" title="Visualize in Trackster"></a>
%endif
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 templates/root/index.mako
--- a/templates/root/index.mako
+++ b/templates/root/index.mako
@@ -174,14 +174,14 @@
<%def name="init()"><%
- if trans.app.config.cloud_controller_instance:
- self.has_left_panel=False
- self.has_right_panel=False
- self.active_view="cloud"
- else:
- self.has_left_panel=True
- self.has_right_panel=True
- self.active_view="analysis"
+ if trans.app.config.cloud_controller_instance:
+ self.has_left_panel=False
+ self.has_right_panel=False
+ self.active_view="cloud"
+ else:
+ self.has_left_panel=True
+ self.has_right_panel=True
+ self.active_view="analysis"
%>
%if trans.app.config.require_login and not trans.user:
<script type="text/javascript">
@@ -219,7 +219,7 @@
elif m_c is not None:
center_url = h.url_for( controller=m_c, action=m_a )
elif trans.app.config.cloud_controller_instance:
- center_url = h.url_for( controller='cloud', action='list' )
+ center_url = h.url_for( controller='cloud', action='list' )
else:
center_url = h.url_for( '/static/welcome.html' )
%>
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 templates/tool_form.mako
--- a/templates/tool_form.mako
+++ b/templates/tool_form.mako
@@ -1,203 +1,230 @@
-<!-- -->
-<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
+<%inherit file="/base.mako"/>
+<%namespace file="/base_panels.mako" import="overlay" />
-<%
-from galaxy.util.expressions import ExpressionContext
-%>
+<%def name="stylesheets()">
+ ${h.css( "autocomplete_tagging", "panel_layout", "base", "library" )}
+</%def>
-<html>
+<%def name="javascripts()">
+ ${h.js( "jquery", "galaxy.panels", "galaxy.base", "jquery.autocomplete", "jstorage" )}
+ <script type="text/javascript">
+ $(function() {
+ $(window).bind("refresh_on_change", function() {
+ $(':file').each( function() {
+ var file = $(this);
+ var file_value = file.val();
+ if (file_value) {
+ // disable file input, since we don't want to upload the file on refresh
+ var file_name = $(this).attr("name");
+ file.attr( { name: 'replaced_file_input_' + file_name, disabled: true } );
+ // create a new hidden field which stores the filename and has the original name of the file input
+ var new_file_input = $(document.createElement('input'));
+ new_file_input.attr( { "type": "hidden", "value": file_value, "name": file_name } );
+ file.after(new_file_input);
+ }
+ });
+ });
-<head>
-<title>Galaxy</title>
-<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
-${h.css( "base", "autocomplete_tagging" )}
-${h.js( "jquery", "galaxy.base", "jquery.autocomplete" )}
-<script type="text/javascript">
-$(function() {
- $(window).bind("refresh_on_change", function() {
- $(':file').each( function() {
- var file = $(this);
- var file_value = file.val();
- if (file_value) {
- // disable file input, since we don't want to upload the file on refresh
- var file_name = $(this).attr("name");
- file.attr( { name: 'replaced_file_input_' + file_name, disabled: true } );
- // create a new hidden field which stores the filename and has the original name of the file input
- var new_file_input = $(document.createElement('input'));
- new_file_input.attr( { "type": "hidden", "value": file_value, "name": file_name } );
- file.after(new_file_input);
+ // For drilldown parameters: add expand/collapse buttons and collapse initially-collapsed elements
+ $( 'li ul.toolParameterExpandableCollapsable' ).each( function() {
+ var el = $(this),
+ parent_li = el.parent('li'),
+ sub_ul = el.remove();
+
+ parent_li.find( 'span' ).wrapInner( '<a/>' ).find( 'a' ).click( function() {
+ sub_ul.toggle();
+ (this).html( sub_ul.is(":hidden") ? '[+]' : '[-]' );
+ });
+ parent_li.append( sub_ul );
+ });
+
+ $( 'ul ul.toolParameterExpandableCollapsable' ).each( function(i) {
+ var el = $(this);
+ if (el.attr("default_state") === "collapsed") {
+ el.hide();
}
});
+
+ function checkUncheckAll( name, check ) {
+ $("input[name='" + name + "'][type='checkbox']").attr('checked', !!check);
+ }
+
+ // Inserts the Select All / Unselect All buttons for checkboxes
+ $("div.checkUncheckAllPlaceholder").each( function() {
+ var check_name = $(this).attr("checkbox_name");
+ select_link = $("<a class='action-button'></a>").text("Select All").click(function() {
+ checkUncheckAll(check_name, true);
+ });
+ unselect_link = $("<a class='action-button'></a>").text("Unselect All").click(function() {
+ checkUncheckAll(check_name, false);
+ });
+ $(this).append(select_link).append(" ").append(unselect_link);
+ });
+
+ $(".add-librarydataset").live("click", function() {
+ var link = $(this);
+ $.ajax({
+ url: "/tracks/list_libraries",
+ error: function(xhr, ajaxOptions, thrownError) { alert( "Grid failed" ); console.log(xhr, ajaxOptions, thrownError); },
+ success: function(table_html) {
+ show_modal(
+ "Select Library Dataset",
+ table_html, {
+ "Cancel": function() {
+ hide_modal();
+ },
+ "Select": function() {
+ $('input[name=ldda_ids]:checked').each(function() {
+ var name = $.trim( $(this).siblings("div").find("a").text() );
+ var id = $(this).val();
+ link.text(name);
+ link.siblings("input[type=hidden]").val(id);
+ });
+ hide_modal();
+ }
+ }
+ );
+ }
+ });
+ });
});
-
- // For drilldown parameters: add expand/collapse buttons and collapse initially-collapsed elements
- $( 'li ul.toolParameterExpandableCollapsable' ).each( function() {
- var el = $(this),
- parent_li = el.parent('li'),
- sub_ul = el.remove();
-
- parent_li.find( 'span' ).wrapInner( '<a/>' ).find( 'a' ).click( function() {
- sub_ul.toggle();
- (this).html( sub_ul.is(":hidden") ? '[+]' : '[-]' );
- });
- parent_li.append( sub_ul );
- });
-
- $( 'ul ul.toolParameterExpandableCollapsable' ).each( function(i) {
- var el = $(this);
- if (el.attr("default_state") === "collapsed") {
- el.hide();
+
+ %if not add_frame.debug:
+ if( window.name != "galaxy_main" ) {
+ location.replace( '${h.url_for( controller='root', action='index', tool_id=tool.id )}' );
}
- });
-
- function checkUncheckAll( name, check ) {
- $("input[name='" + name + "'][type='checkbox']").attr('checked', !!check);
- }
-
- // Inserts the Select All / Unselect All buttons for checkboxes
- $("div.checkUncheckAllPlaceholder").each( function() {
- var check_name = $(this).attr("checkbox_name");
- select_link = $("<a class='action-button'></a>").text("Select All").click(function() {
- checkUncheckAll(check_name, true);
- });
- unselect_link = $("<a class='action-button'></a>").text("Unselect All").click(function() {
- checkUncheckAll(check_name, false);
- });
- $(this).append(select_link).append(" ").append(unselect_link);
- });
-});
+ %endif
-%if not add_frame.debug:
- if( window.name != "galaxy_main" ) {
- location.replace( '${h.url_for( controller='root', action='index', tool_id=tool.id )}' );
- }
-%endif
+ </script>
-</script>
-</head>
+</%def>
-<body>
- <%def name="do_inputs( inputs, tool_state, errors, prefix, other_values=None )">
- <% other_values = ExpressionContext( tool_state, other_values ) %>
- %for input_index, input in enumerate( inputs.itervalues() ):
- %if not input.visible:
- <% pass %>
- %elif input.type == "repeat":
- <div class="repeat-group">
- <div class="form-title-row"><b>${input.title_plural}</b></div>
- <% repeat_state = tool_state[input.name] %>
- %for i in range( len( repeat_state ) ):
- <div class="repeat-group-item">
- <%
- if input.name in errors:
- rep_errors = errors[input.name][i]
- else:
- rep_errors = dict()
- index = repeat_state[i]['__index__']
- %>
- <div class="form-title-row"><b>${input.title} ${i + 1}</b></div>
- ${do_inputs( input.inputs, repeat_state[i], rep_errors, prefix + input.name + "_" + str(index) + "|", other_values )}
- <div class="form-row"><input type="submit" name="${prefix}${input.name}_${index}_remove" value="Remove ${input.title} ${i+1}"></div>
- </div>
- %if rep_errors.has_key( '__index__' ):
- <div><img style="vertical-align: middle;" src="${h.url_for('/static/style/error_small.png')}"> <span style="vertical-align: middle;">${rep_errors['__index__']}</span></div>
- %endif
- %endfor
- <div class="form-row"><input type="submit" name="${prefix}${input.name}_add" value="Add new ${input.title}"></div>
- </div>
- %elif input.type == "conditional":
- <%
- group_state = tool_state[input.name]
- group_errors = errors.get( input.name, {} )
- current_case = group_state['__current_case__']
- group_prefix = prefix + input.name + "|"
- %>
- %if input.value_ref_in_group:
- ${row_for_param( group_prefix, input.test_param, group_state, group_errors, other_values )}
- %endif
- ${do_inputs( input.cases[current_case].inputs, group_state, group_errors, group_prefix, other_values )}
- %elif input.type == "upload_dataset":
- %if input.get_datatype( trans, other_values ).composite_type is None: #have non-composite upload appear as before
+<%def name="do_inputs( inputs, tool_state, errors, prefix, other_values=None )">
+ <%
+ from galaxy.util.expressions import ExpressionContext
+ other_values = ExpressionContext( tool_state, other_values )
+ %>
+ %for input_index, input in enumerate( inputs.itervalues() ):
+ %if not input.visible:
+ <% pass %>
+ %elif input.type == "repeat":
+ <div class="repeat-group">
+ <div class="form-title-row"><strong>${input.title_plural}</strong></div>
+ <% repeat_state = tool_state[input.name] %>
+ %for i in range( len( repeat_state ) ):
+ <div class="repeat-group-item"><%
if input.name in errors:
- rep_errors = errors[input.name][0]
+ rep_errors = errors[input.name][i]
else:
rep_errors = dict()
+ index = repeat_state[i]['__index__']
%>
- ${do_inputs( input.inputs, tool_state[input.name][0], rep_errors, prefix + input.name + "_" + str( 0 ) + "|", other_values )}
- %else:
- <div class="repeat-group">
- <div class="form-title-row"><b>${input.group_title( other_values )}</b></div>
- <%
- repeat_state = tool_state[input.name]
- %>
- %for i in range( len( repeat_state ) ):
- <div class="repeat-group-item">
- <%
- if input.name in errors:
- rep_errors = errors[input.name][i]
- else:
- rep_errors = dict()
- index = repeat_state[i]['__index__']
- %>
- <div class="form-title-row"><b>File Contents for ${input.title_by_index( trans, i, other_values )}</b></div>
- ${do_inputs( input.inputs, repeat_state[i], rep_errors, prefix + input.name + "_" + str(index) + "|", other_values )}
- ##<div class="form-row"><input type="submit" name="${prefix}${input.name}_${index}_remove" value="Remove ${input.title} ${i+1}"></div>
- </div>
- %endfor
- ##<div class="form-row"><input type="submit" name="${prefix}${input.name}_add" value="Add new ${input.title}"></div>
- </div>
+ <div class="form-title-row"><strong>${input.title} ${i + 1}</strong></div>
+ ${do_inputs( input.inputs, repeat_state[i], rep_errors, prefix + input.name + "_" + str(index) + "|", other_values )}
+ <div class="form-row"><input type="submit" name="${prefix}${input.name}_${index}_remove" value="Remove ${input.title} ${i+1}"></div>
+ </div>
+ %if rep_errors.has_key( '__index__' ):
+ <div><img style="vertical-align: middle;" src="${h.url_for('/static/style/error_small.png')}"> <span style="vertical-align: middle;">${rep_errors['__index__']}</span></div>
%endif
+ %endfor
+ <div class="form-row"><input type="submit" name="${prefix}${input.name}_add" value="Add new ${input.title}"></div>
+ </div>
+ %elif input.type == "conditional":
+ <%
+ group_state = tool_state[input.name]
+ group_errors = errors.get( input.name, {} )
+ current_case = group_state['__current_case__']
+ group_prefix = prefix + input.name + "|"
+ %>
+ %if input.value_ref_in_group:
+ ${row_for_param( group_prefix, input.test_param, group_state, group_errors, other_values )}
+ %endif
+ ${do_inputs( input.cases[current_case].inputs, group_state, group_errors, group_prefix, other_values )}
+ %elif input.type == "upload_dataset":
+ %if input.get_datatype( trans, other_values ).composite_type is None: #have non-composite upload appear as before
+ <%
+ if input.name in errors:
+ rep_errors = errors[input.name][0]
+ else:
+ rep_errors = dict()
+ %>
+ ${do_inputs( input.inputs, tool_state[input.name][0], rep_errors, prefix + input.name + "_" + str( 0 ) + "|", other_values )}
%else:
- ${row_for_param( prefix, input, tool_state, errors, other_values )}
- %endif
- %endfor
- </%def>
-
- <%def name="row_for_param( prefix, param, parent_state, parent_errors, other_values )">
- <%
- if parent_errors.has_key( param.name ):
- cls = "form-row form-row-error"
- else:
- cls = "form-row"
-
- label = param.get_label()
-
- field = param.get_html_field( trans, parent_state[ param.name ], other_values )
- field.refresh_on_change = param.refresh_on_change
-
- # Field may contain characters submitted by user and these characters may be unicode; handle non-ascii characters gracefully.
- field_html = field.get_html( prefix )
- if type( field_html ) is not unicode:
- field_html = unicode( field_html, 'utf-8' )
-
- if param.type == "hidden":
- return field_html
- %>
- <div class="${cls}">
- %if label:
- <label for="${param.name}">${label}:</label>
- %endif
- <div class="form-row-input">${field_html}</div>
- %if parent_errors.has_key( param.name ):
- <div class="form-row-error-message">
- <div><img style="vertical-align: middle;" src="${h.url_for('/static/style/error_small.png')}"> <span style="vertical-align: middle;">${parent_errors[param.name]}</span></div>
+ <div class="repeat-group">
+ <div class="form-title-row"><strong>${input.group_title( other_values )}</strong></div>
+ <%
+ repeat_state = tool_state[input.name]
+ %>
+ %for i in range( len( repeat_state ) ):
+ <div class="repeat-group-item">
+ <%
+ if input.name in errors:
+ rep_errors = errors[input.name][i]
+ else:
+ rep_errors = dict()
+ index = repeat_state[i]['__index__']
+ %>
+ <div class="form-title-row"><strong>File Contents for ${input.title_by_index( trans, i, other_values )}</strong></div>
+ ${do_inputs( input.inputs, repeat_state[i], rep_errors, prefix + input.name + "_" + str(index) + "|", other_values )}
+ ##<div class="form-row"><input type="submit" name="${prefix}${input.name}_${index}_remove" value="Remove ${input.title} ${i+1}"></div>
+ </div>
+ %endfor
+ ##<div class="form-row"><input type="submit" name="${prefix}${input.name}_add" value="Add new ${input.title}"></div></div>
%endif
-
- %if param.help:
- <div class="toolParamHelp" style="clear: both;">
- ${param.help}
- </div>
- %endif
-
- <div style="clear: both"></div>
-
- </div>
- </%def>
-
- %if add_frame.from_noframe:
- <div class="warningmessage">
+ %else:
+ ${row_for_param( prefix, input, tool_state, errors, other_values )}
+ %endif
+ %endfor
+</%def>
+
+<%def name="row_for_param( prefix, param, parent_state, parent_errors, other_values )">
+ <%
+ if parent_errors.has_key( param.name ):
+ cls = "form-row form-row-error"
+ else:
+ cls = "form-row"
+
+ label = param.get_label()
+
+ field = param.get_html_field( trans, parent_state[ param.name ], other_values )
+ field.refresh_on_change = param.refresh_on_change
+
+ # Field may contain characters submitted by user and these characters may be unicode; handle non-ascii characters gracefully.
+ field_html = field.get_html( prefix )
+ if type( field_html ) is not unicode:
+ field_html = unicode( field_html, 'utf-8' )
+
+ if param.type == "hidden":
+ return field_html
+ %>
+ <div class="${cls}">
+ %if label:
+ <label for="${param.name}">${label}:</label>
+ %endif
+ <div class="form-row-input">${field_html}</div>
+ %if parent_errors.has_key( param.name ):
+ <div class="form-row-error-message">
+ <div><img style="vertical-align: middle;" src="${h.url_for('/static/style/error_small.png')}"> <span style="vertical-align: middle;">${parent_errors[param.name]}</span></div>
+ </div>
+ %endif
+
+ %if param.help:
+ <div class="toolParamHelp" style="clear: both;">
+ ${param.help}
+ </div>
+ %endif
+
+ <div style="clear: both;"></div>
+
+ </div>
+</%def>
+
+<% overlay(visible=False) %>
+
+%if add_frame.from_noframe:
+ <div class="warningmessage"><strong>Welcome to Galaxy</strong><hr/>
It appears that you found this tool from a link outside of Galaxy.
@@ -205,60 +232,59 @@
<a href="${h.url_for( controller='root' )}" target="_top">welcome page</a>.
To learn more about what Galaxy is and what it can do for you, please visit
the <a href="$add_frame.wiki_url" target="_top">Galaxy wiki</a>.
- </div>
- <br/>
+ </div>
+ <br/>
+%endif
+
+## handle calculating the redict url for the special case where we have nginx proxy
+## upload and need to do url_for on the redirect portion of the tool action
+<%
+ try:
+ tool_url = h.url_for(tool.action)
+ except AttributeError:
+ assert len(tool.action) == 2
+ tool_url = tool.action[0] + h.url_for(tool.action[1])
+%>
+<div class="toolForm" id="${tool.id}">
+ %if tool.has_multiple_pages:
+ <div class="toolFormTitle">${tool.name} (step ${tool_state.page+1} of ${tool.npages})</div>
+ %else:
+ <div class="toolFormTitle">${tool.name}</div>
%endif
-
- ## handle calculating the redict url for the special case where we have nginx proxy
- ## upload and need to do url_for on the redirect portion of the tool action
- <%
- try:
- tool_url = h.url_for(tool.action)
- except AttributeError:
- assert len(tool.action) == 2
- tool_url = tool.action[0] + h.url_for(tool.action[1])
- %><div class="toolForm" id="${tool.id}">
- %if tool.has_multiple_pages:
- <div class="toolFormTitle">${tool.name} (step ${tool_state.page+1} of ${tool.npages})</div>
- %else:
- <div class="toolFormTitle">${tool.name}</div>
- %endif
- <div class="toolFormBody">
- <form id="tool_form" name="tool_form" action="${tool_url}" enctype="${tool.enctype}" target="${tool.target}" method="${tool.method}">
- <input type="hidden" name="tool_id" value="${tool.id}">
- <input type="hidden" name="tool_state" value="${util.object_to_string( tool_state.encode( tool, app ) )}">
- %if tool.display_by_page[tool_state.page]:
- ${trans.fill_template_string( tool.display_by_page[tool_state.page], context=tool.get_param_html_map( trans, tool_state.page, tool_state.inputs ) )}
- <input type="submit" class="primary-button" name="runtool_btn" value="Execute">
- %else:
- ${do_inputs( tool.inputs_by_page[ tool_state.page ], tool_state.inputs, errors, "" )}
- <div class="form-row">
- %if tool_state.page == tool.last_page:
- <input type="submit" class="primary-button" name="runtool_btn" value="Execute">
- %else:
- <input type="submit" class="primary-button" name="runtool_btn" value="Next step">
- %endif
- </div>
- %endif
- </form>
+ <div class="toolFormBody">
+ <form id="tool_form" name="tool_form" action="${tool_url}" enctype="${tool.enctype}" target="${tool.target}" method="${tool.method}">
+ <input type="hidden" name="tool_id" value="${tool.id}">
+ <input type="hidden" name="tool_state" value="${util.object_to_string( tool_state.encode( tool, app ) )}">
+ %if tool.display_by_page[tool_state.page]:
+ ${trans.fill_template_string( tool.display_by_page[tool_state.page], context=tool.get_param_html_map( trans, tool_state.page, tool_state.inputs ) )}
+ <input type="submit" class="primary-button" name="runtool_btn" value="Execute">
+ %else:
+ ${do_inputs( tool.inputs_by_page[ tool_state.page ], tool_state.inputs, errors, "" )}
+ <div class="form-row">
+ %if tool_state.page == tool.last_page:
+ <input type="submit" class="primary-button" name="runtool_btn" value="Execute">
+ %else:
+ <input type="submit" class="primary-button" name="runtool_btn" value="Next step">
+ %endif
+ </div>
+ %endif
+ </form>
+ </div>
+</div>
+%if tool.help:
+ <div class="toolHelp">
+ <div class="toolHelpBody">
+ <%
+ if tool.has_multiple_pages:
+ tool_help = tool.help_by_page[tool_state.page]
+ else:
+ tool_help = tool.help
+
+ # Convert to unicode to display non-ascii characters.
+ if type( tool_help ) is not unicode:
+ tool_help = unicode( tool_help, 'utf-8')
+ %>
+ ${tool_help}
</div></div>
- %if tool.help:
- <div class="toolHelp">
- <div class="toolHelpBody">
- <%
- if tool.has_multiple_pages:
- tool_help = tool.help_by_page[tool_state.page]
- else:
- tool_help = tool.help
-
- # Convert to unicode to display non-ascii characters.
- if type( tool_help ) is not unicode:
- tool_help = unicode( tool_help, 'utf-8')
- %>
- ${tool_help}
- </div>
- </div>
- %endif
-</body>
-</html>
+%endif
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 templates/tracks/browser.mako
--- a/templates/tracks/browser.mako
+++ b/templates/tracks/browser.mako
@@ -86,6 +86,75 @@
return new_bookmark;
};
+ /**
+ * Use track data to add a track to a view.
+ * TODO: rename function?
+ */
+ var add_async_success = function(track_data) {
+ var td = track_data,
+ new_track = new addable_track_types[track_data.track_type](
+ track_data.name, view, track_data.hda_ldda, track_data.dataset_id,
+ track_data.prefs, track_data.filters, track_data.tool );
+ view.add_track(new_track);
+ // Should replace with live event but can't get working
+ sortable(new_track.container_div, ".draghandle");
+ view.has_changes = true;
+ $("#no-tracks").hide();
+ };
+
+ /**
+ * Use a popup grid to add more tracks.
+ */
+ var add_tracks = function() {
+ $.ajax({
+ url: "${h.url_for( action='list_histories' )}",
+ data: { "f-dbkey": view.dbkey },
+ error: function() { alert( "Grid failed" ); },
+ success: function(table_html) {
+ show_modal(
+ "Select datasets for new tracks",
+ table_html, {
+ "Cancel": function() {
+ hide_modal();
+ },
+ "Insert": function() {
+ var requests = [];
+ $('input[name=id]:checked,input[name=ldda_ids]:checked').each(function() {
+ var data,
+ id = $(this).val();
+ if ($(this).attr("name") === "id") {
+ data = { hda_id: id };
+ } else {
+ data = { ldda_id: id};
+ }
+ requests[requests.length] = $.ajax({
+ url: "${h.url_for( action='add_track_async' )}",
+ data: data,
+ dataType: "json",
+ });
+ });
+ // To preserve order, wait until there are definitions for all tracks and then add
+ // them sequentially.
+ $.when.apply($, requests).then(function() {
+ // jQuery always returns an Array for arguments, so need to look at first element
+ // to determine whether multiple requests were made and consequently how to
+ // map arguments to track definitions.
+ var track_defs = (arguments[0] instanceof Array ?
+ $.map(arguments, function(arg) { return arg[0]; }) :
+ [ arguments[0] ]
+ );
+ for (var i= 0; i < track_defs.length; i++) {
+ add_async_success(track_defs[i]);
+ }
+ });
+ hide_modal();
+ }
+ }
+ );
+ }
+ });
+ };
+
$(function() {
// Hide bookmarks by default right now.
parent.force_right_panel("hide");
@@ -170,19 +239,7 @@
return "There are unsaved changes to your visualization which will be lost.";
}
};
-
- var add_async_success = function(track_data) {
- var td = track_data,
- new_track = new addable_track_types[track_data.track_type](
- track_data.name, view, track_data.hda_ldda, track_data.dataset_id,
- track_data.prefs, track_data.filters, track_data.tool );
- view.add_track(new_track);
- // Should replace with live event but can't get working
- sortable(new_track.container_div, ".draghandle");
- view.has_changes = true;
- $("#no-tracks").hide();
- };
-
+
%if add_dataset is not None:
$.ajax({
url: "${h.url_for( action='add_track_async' )}",
@@ -195,56 +252,7 @@
$("#viz-options-button").css( "position", "relative" );
make_popupmenu( $("#viz-options-button"), {
- "Add Tracks": function() {
- // Use a popup grid to add more tracks
- $.ajax({
- url: "${h.url_for( action='list_histories' )}",
- data: { "f-dbkey": view.dbkey },
- error: function() { alert( "Grid failed" ); },
- success: function(table_html) {
- show_modal(
- "Select datasets for new tracks",
- table_html, {
- "Cancel": function() {
- hide_modal();
- },
- "Insert": function() {
- var requests = [];
- $('input[name=id]:checked,input[name=ldda_ids]:checked').each(function() {
- var data,
- id = $(this).val();
- if ($(this).attr("name") === "id") {
- data = { hda_id: id };
- } else {
- data = { ldda_id: id};
- }
- requests[requests.length] = $.ajax({
- url: "${h.url_for( action='add_track_async' )}",
- data: data,
- dataType: "json",
- });
- });
- // To preserve order, wait until there are definitions for all tracks and then add
- // them sequentially.
- $.when.apply($, requests).then(function() {
- // jQuery always returns an Array for arguments, so need to look at first element
- // to determine whether multiple requests were made and consequently how to
- // map arguments to track definitions.
- var track_defs = (arguments[0] instanceof Array ?
- $.map(arguments, function(arg) { return arg[0]; }) :
- [ arguments[0] ]
- );
- for (var i= 0; i < track_defs.length; i++) {
- add_async_success(track_defs[i]);
- }
- });
- hide_modal();
- }
- }
- );
- }
- });
- },
+ "Add Tracks": add_tracks,
"Save": function() {
// Show saving dialog box
show_modal("Saving...", "<img src='${h.url_for('/static/images/yui/rel_interstitial_loading.gif')}'/>");
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 templates/user/permissions.mako
--- a/templates/user/permissions.mako
+++ b/templates/user/permissions.mako
@@ -1,6 +1,13 @@
<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+<%namespace file="/dataset/security_common.mako" import="render_permission_form" />
+
<%def name="title()">Change Default Permissions on New Histories</%def>
-<%namespace file="/dataset/security_common.mako" import="render_permission_form" />
+
+%if message:
+ ${render_msg( message, status )}
+%endif
+
<br/><br/><ul class="manage-table-actions"><li>
@@ -8,5 +15,5 @@
</li></ul>
%if trans.user:
- ${render_permission_form( trans.user, trans.user.email, h.url_for(), trans.user.all_roles() )}
+ ${render_permission_form( trans.user, trans.user.email, h.url_for( controller='user', action='set_default_permissions', cntrller=cntrller ), trans.user.all_roles() )}
%endif
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 templates/webapps/community/base_panels.mako
--- a/templates/webapps/community/base_panels.mako
+++ b/templates/webapps/community/base_panels.mako
@@ -34,9 +34,9 @@
<div class="submenu"><ul><li><a href="${app.config.get( "bugs_email", "mailto:galaxy-bugs@bx.psu.edu" )}">Email comments, bug reports, or suggestions</a></li>
- <li><a target="_blank" href="${app.config.get( "wiki_url", "http://bitbucket.org/galaxy/galaxy-central/wiki" )}">Galaxy Wiki</a></li>
+ <li><a target="_blank" href="${app.config.get( "wiki_url", "http://wiki.g2.bx.psu.edu/" )}">Galaxy Wiki</a></li><li><a target="_blank" href="${app.config.get( "screencasts_url", "http://galaxycast.org" )}">Video tutorials (screencasts)</a></li>
- <li><a target="_blank" href="${app.config.get( "citation_url", "http://bitbucket.org/galaxy/galaxy-central/wiki/Citations" )}">How to Cite Galaxy</a></li>
+ <li><a target="_blank" href="${app.config.get( "citation_url", "http://wiki.g2.bx.psu.edu/Citing%20Galaxy" )}">How to Cite Galaxy</a></li></ul></div></td>
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 templates/webapps/community/repository/add_tool_data_table_entry.mako
--- /dev/null
+++ b/templates/webapps/community/repository/add_tool_data_table_entry.mako
@@ -0,0 +1,117 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+
+<%
+ from galaxy.web.form_builder import TextField
+ is_new = repository.is_new
+ can_push = trans.app.security_agent.can_push( trans.user, repository )
+ can_upload = can_push
+ can_browse_contents = not is_new
+ can_set_metadata = not is_new
+ can_rate = not is_new and trans.user and repository.user != trans.user
+ can_view_change_log = not is_new
+ if can_push:
+ browse_label = 'Browse or delete repository files'
+ else:
+ browse_label = 'Browse repository files'
+%>
+
+<%def name="javascripts()">
+ ${parent.javascripts()}
+ <script type="text/javascript">
+ $(function(){
+ $("input:text:first").focus();
+ })
+ </script>
+</%def>
+
+<br/><br/>
+<ul class="manage-table-actions">
+ %if is_new and can_upload:
+ <a class="action-button" href="${h.url_for( controller='upload', action='upload', repository_id=trans.security.encode_id( repository.id ), webapp='community' )}">Upload files to repository</a>
+ %else:
+ <li><a class="action-button" id="repository-${repository.id}-popup" class="menubutton">Repository Actions</a></li>
+ <div popupmenu="repository-${repository.id}-popup">
+ %if can_upload:
+ <a class="action-button" href="${h.url_for( controller='upload', action='upload', repository_id=trans.security.encode_id( repository.id ), webapp='community' )}">Upload files to repository</a>
+ %endif
+ %if can_view_change_log:
+ <a class="action-button" href="${h.url_for( controller='repository', action='view_changelog', id=trans.app.security.encode_id( repository.id ) )}">View change log</a>
+ %endif
+ %if can_rate:
+ <a class="action-button" href="${h.url_for( controller='repository', action='rate_repository', id=trans.app.security.encode_id( repository.id ) )}">Rate repository</a>
+ %endif
+ %if can_browse_contents:
+ <a class="action-button" href="${h.url_for( controller='repository', action='browse_repository', id=trans.app.security.encode_id( repository.id ) )}">${browse_label}</a>
+ <a class="action-button" href="${h.url_for( controller='repository', action='download', repository_id=trans.app.security.encode_id( repository.id ), file_type='gz' )}">Download as a .tar.gz file</a>
+ <a class="action-button" href="${h.url_for( controller='repository', action='download', repository_id=trans.app.security.encode_id( repository.id ), file_type='bz2' )}">Download as a .tar.bz2 file</a>
+ <a class="action-button" href="${h.url_for( controller='repository', action='download', repository_id=trans.app.security.encode_id( repository.id ), file_type='zip' )}">Download as a zip file</a>
+ %endif
+ </div>
+ %endif
+</ul>
+
+%if message:
+ ${render_msg( message, status )}
+%endif
+
+<%def name="render_field( index, field_tup )">
+ <h4 class="msg_head">
+ <div class="form-row">Column ${index + 1}</div>
+ </h4>
+ <div class="msg_body2">
+ <div class="repeat-group-item">
+ <div class="form-row">
+ <% column_field = TextField( field_tup[0], 40, field_tup[1] ) %>
+ ${column_field.get_html()}
+ <div class="toolParamHelp" style="clear: both;">
+ Enter the name of the location file column (e.g., value, dbkey, name, path, etc). See the tool_data_table_conf.xml file for examples.
+ </div>
+ </div>
+ <div class="form-row">
+ <input type="submit" name="remove_button" value="Remove field ${index + 1}"/>
+ </div>
+ </div>
+ </div>
+</%def>
+
+<div class="toolForm">
+ <div class="toolFormTitle">Add tool data table entry</div>
+ <div class="toolFormBody">
+ <form name="add_tool_data_table_entry" id="add_tool_data_table_entry" action="${h.url_for( controller='repository', action='add_tool_data_table_entry', name_attr=name_attr, repository_id=trans.security.encode_id( repository.id ) )}" method="post" >
+ <div class="form-row">
+ <label>Table name:</label>
+ ${name_attr}
+ <div style="clear: both"></div>
+ </div>
+ <div class="form-row">
+ <label>Comment lines begin with:</label>
+ <input name="comment_char" type="textfield" value="${comment_char}" size="8"/>
+ <div class="toolParamHelp" style="clear: both;">
+ Enter the character that designates comments lines in the location file (default is #).
+ </div>
+ <div style="clear: both"></div>
+ </div>
+ <div class="form-row">
+ <label>Location file name:</label>
+ <input name="loc_filename" type="textfield" value="${loc_filename}" size="80"/>
+ <div class="toolParamHelp" style="clear: both;">
+ Enter the name of the location file (e.g., bwa_index.loc).
+ </div>
+ <div style="clear: both"></div>
+ </div>
+ <div class="form-row">
+ <label>Location file columns:</label>
+ </div>
+ %for ctr, field_tup in enumerate( column_fields ):
+ ${render_field( ctr, field_tup )}
+ %endfor
+ <div class="form-row">
+ <input type="submit" name="add_field_button" value="Add field"/>
+ </div>
+ <div class="form-row">
+ <input type="submit" name="add_tool_data_table_entry_button" value="Save"/>
+ </div>
+ </form>
+ </div>
+</div>
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 templates/webapps/community/repository/browse_repository.mako
--- a/templates/webapps/community/repository/browse_repository.mako
+++ b/templates/webapps/community/repository/browse_repository.mako
@@ -5,12 +5,13 @@
<%
from galaxy.web.framework.helpers import time_ago
+ is_admin = trans.user_is_admin()
is_new = repository.is_new
can_push = trans.app.security_agent.can_push( trans.user, repository )
can_upload = can_push
can_browse_contents = not is_new
can_rate = trans.user and repository.user != trans.user
- can_manage = repository.user == trans.user
+ can_manage = is_admin or repository.user == trans.user
can_view_change_log = not is_new
%>
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 templates/webapps/community/repository/common.mako
--- a/templates/webapps/community/repository/common.mako
+++ b/templates/webapps/community/repository/common.mako
@@ -44,7 +44,7 @@
}
// The following is used only in ~/templates/webapps/community/repository/upload.mako.
if (document.forms["upload_form"]) {
- document.upload_form.upload_point.value = selKeys[0];
+ document.upload_form.upload_point.value = selKeys.slice(-1);
}
},
onActivate: function(dtnode) {
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 templates/webapps/community/repository/create_repository.mako
--- a/templates/webapps/community/repository/create_repository.mako
+++ b/templates/webapps/community/repository/create_repository.mako
@@ -17,15 +17,15 @@
<div class="toolForm"><div class="toolFormTitle">Create Repository</div><div class="toolFormBody">
- <form name="create_repository_form" id="create_repository_form" action="${h.url_for( action='create_repository' )}" method="post" >
+ <form name="create_repository_form" id="create_repository_form" action="${h.url_for( controller='repository', action='create_repository' )}" method="post" ><div class="form-row"><label>Name:</label>
- <input name="name" type="textfield" value="${name}" size=40"/>
+ <input name="name" type="textfield" value="${name}" size="40"/><div style="clear: both"></div></div><div class="form-row"><label>Synopsis:</label>
- <input name="description" type="textfield" value="${description}" size=80"/>
+ <input name="description" type="textfield" value="${description}" size="80"/><div style="clear: both"></div></div><div class="form-row">
@@ -38,7 +38,7 @@
<div style="clear: both"></div></div><div class="form-row">
- <label>Category</label>
+ <label>Categories</label><div class="form-row"><select name="category_id" multiple>
%for category in categories:
@@ -50,6 +50,9 @@
%endfor
</select></div>
+ <div class="toolParamHelp" style="clear: both;">
+ Multi-select list - hold the appropriate key while clicking to select multiple categories.
+ </div><div style="clear: both"></div></div><div class="form-row">
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 templates/webapps/community/repository/manage_repository.mako
--- a/templates/webapps/community/repository/manage_repository.mako
+++ b/templates/webapps/community/repository/manage_repository.mako
@@ -9,6 +9,7 @@
can_push = trans.app.security_agent.can_push( trans.user, repository )
can_upload = can_push
can_browse_contents = not is_new
+ can_set_metadata = not is_new
can_rate = not is_new and trans.user and repository.user != trans.user
can_view_change_log = not is_new
if can_push:
@@ -79,10 +80,10 @@
%endif
%if can_browse_contents:
<a class="action-button" href="${h.url_for( controller='repository', action='browse_repository', id=trans.app.security.encode_id( repository.id ) )}">${browse_label}</a>
+ <a class="action-button" href="${h.url_for( controller='repository', action='download', repository_id=trans.app.security.encode_id( repository.id ), file_type='gz' )}">Download as a .tar.gz file</a>
+ <a class="action-button" href="${h.url_for( controller='repository', action='download', repository_id=trans.app.security.encode_id( repository.id ), file_type='bz2' )}">Download as a .tar.bz2 file</a>
+ <a class="action-button" href="${h.url_for( controller='repository', action='download', repository_id=trans.app.security.encode_id( repository.id ), file_type='zip' )}">Download as a zip file</a>
%endif
- <a class="action-button" href="${h.url_for( controller='repository', action='download', repository_id=trans.app.security.encode_id( repository.id ), file_type='gz' )}">Download as a .tar.gz file</a>
- <a class="action-button" href="${h.url_for( controller='repository', action='download', repository_id=trans.app.security.encode_id( repository.id ), file_type='bz2' )}">Download as a .tar.bz2 file</a>
- <a class="action-button" href="${h.url_for( controller='repository', action='download', repository_id=trans.app.security.encode_id( repository.id ), file_type='zip' )}">Download as a zip file</a></div>
%endif
</ul>
@@ -121,9 +122,9 @@
<div class="form-row"><label>Version:</label>
%if can_view_change_log:
- <a href="${h.url_for( controller='repository', action='view_changelog', id=trans.app.security.encode_id( repository.id ) )}">${tip}</a>
+ <a href="${h.url_for( controller='repository', action='view_changelog', id=trans.app.security.encode_id( repository.id ) )}">${repository.revision}</a>
%else:
- ${tip}
+ ${repository.revision}
%endif
</div><div class="form-row">
@@ -150,10 +151,133 @@
</form></div></div>
+<p/>
+<div class="toolForm">
+ <div class="toolFormTitle">Manage categories</div>
+ <div class="toolFormBody">
+ <form name="categories" id="categories" action="${h.url_for( controller='repository', action='manage_repository', id=trans.security.encode_id( repository.id ) )}" method="post" >
+ <div class="form-row">
+ <label>Categories</label>
+ <select name="category_id" multiple>
+ %for category in categories:
+ %if category.id in selected_categories:
+ <option value="${trans.security.encode_id( category.id )}" selected>${category.name}</option>
+ %else:
+ <option value="${trans.security.encode_id( category.id )}">${category.name}</option>
+ %endif
+ %endfor
+ </select>
+ <div class="toolParamHelp" style="clear: both;">
+ Multi-select list - hold the appropriate key while clicking to select multiple categories.
+ </div>
+ <div style="clear: both"></div>
+ </div>
+ <div class="form-row">
+ <input type="submit" name="manage_categories_button" value="Save"/>
+ </div>
+ </form>
+ </div>
+</div>
+<p/>
+%if can_set_metadata:
+ <p/>
+ <div class="toolForm">
+ <div class="toolFormTitle">Repository metadata</div>
+ <div class="toolFormBody">
+ %if metadata:
+ %if 'tools' in metadata:
+ <div class="form-row">
+ <table width="100%">
+ <tr bgcolor="#D8D8D8" width="100%">
+ <td><label>Tools:</label></td>
+ </tr>
+ </table>
+ </div>
+ <div class="form-row">
+ <% tool_dicts = metadata[ 'tools' ] %>
+ <table class="grid">
+ <tr>
+ <td><b>name</b></td>
+ <td><b>description</b></td>
+ <td><b>version</b></td>
+ <td><b>requirements</b></td>
+ </tr>
+ %for tool_dict in tool_dicts:
+ <tr>
+ <td><a href="${h.url_for( controller='repository', action='display_tool', repository_id=trans.security.encode_id( repository.id ), tool_config=tool_dict[ 'tool_config' ] )}">${tool_dict[ 'name' ]}</a></td>
+ <td>${tool_dict[ 'description' ]}</td>
+ <td>${tool_dict[ 'version' ]}</td>
+ <td>
+ <%
+ if 'requirements' in tool_dict:
+ requirements = tool_dict[ 'requirements' ]
+ else:
+ requirements = None
+ %>
+ %if requirements:
+ <%
+ requirements_str = ''
+ for requirement_dict in tool_dict[ 'requirements' ]:
+ requirements_str += '%s (%s), ' % ( requirement_dict[ 'name' ], requirement_dict[ 'type' ] )
+ requirements_str = requirements_str.rstrip( ', ' )
+ %>
+ ${requirements_str}
+ %else:
+ none
+ %endif
+ </td>
+ </tr>
+ %endfor
+ </table>
+ </div>
+ <div style="clear: both"></div>
+ %endif
+ %if 'workflows' in metadata:
+ <div class="form-row">
+ <table width="100%">
+ <tr bgcolor="#D8D8D8" width="100%">
+ <td><label>Workflows:</label></td>
+ </tr>
+ </table>
+ </div>
+ <div style="clear: both"></div>
+ <div class="form-row">
+ <% workflow_dicts = metadata[ 'workflows' ] %>
+ <table class="grid">
+ <tr>
+ <td><b>name</b></td>
+ <td><b>format-version</b></td>
+ <td><b>annotation</b></td>
+ </tr>
+ %for workflow_dict in workflow_dicts:
+ <tr>
+ <td>${workflow_dict[ 'name' ]}</td>
+ <td>${workflow_dict[ 'format-version' ]}</td>
+ <td>${workflow_dict[ 'annotation' ]}</td>
+ </tr>
+ %endfor
+ </table>
+ </div>
+ <div style="clear: both"></div>
+ %endif
+ %endif
+ <form name="set_metadata" action="${h.url_for( controller='repository', action='set_metadata', id=trans.security.encode_id( repository.id ), ctx_str=repository.tip )}" method="post">
+ <div class="form-row">
+ <div style="float: left; width: 250px; margin-right: 10px;">
+ <input type="submit" name="set_metadata_button" value="Reset metadata"/>
+ </div>
+ <div class="toolParamHelp" style="clear: both;">
+ Inspect the repository and reset the above attributes for the repository tip.
+ </div>
+ </div>
+ </form>
+ </div>
+ </div>
+%endif
%if trans.app.config.smtp_server:
<p/><div class="toolForm">
- <div class="toolFormTitle">${repository.name}</div>
+ <div class="toolFormTitle">Notification on update</div><div class="toolFormBody"><form name="receive_email_alerts" id="receive_email_alerts" action="${h.url_for( controller='repository', action='manage_repository', id=trans.security.encode_id( repository.id ) )}" method="post" ><div class="form-row">
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 templates/webapps/community/repository/rate_repository.mako
--- a/templates/webapps/community/repository/rate_repository.mako
+++ b/templates/webapps/community/repository/rate_repository.mako
@@ -4,12 +4,13 @@
<%
from galaxy.web.framework.helpers import time_ago
from urllib import quote_plus
+ is_admin = trans.user_is_admin()
is_new = repository.is_new
can_push = trans.app.security_agent.can_push( trans.user, repository )
can_upload = can_push
can_browse_contents = not is_new
can_rate = repository.user != trans.user
- can_manage = repository.user == trans.user
+ can_manage = is_admin or repository.user == trans.user
can_view_change_log = not is_new
if can_push:
browse_label = 'Browse or delete repository files'
@@ -103,7 +104,7 @@
</div><div class="form-row"><label>Version:</label>
- ${tip}
+ ${repository.revision}
<div style="clear: both"></div></div><div class="form-row">
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 templates/webapps/community/repository/tool_form.mako
--- /dev/null
+++ b/templates/webapps/community/repository/tool_form.mako
@@ -0,0 +1,166 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+
+<%
+ from galaxy.util.expressions import ExpressionContext
+ from galaxy import util
+ from galaxy.tools.parameters.basic import DataToolParameter, ColumnListParameter, SelectToolParameter
+ from galaxy.web.form_builder import SelectField
+
+ is_admin = trans.user_is_admin()
+ is_new = repository.is_new
+ can_push = trans.app.security_agent.can_push( trans.user, repository )
+ can_upload = can_push
+ can_browse_contents = not is_new
+ can_rate = trans.user and repository.user != trans.user
+ can_manage = is_admin or repository.user == trans.user
+ can_view_change_log = not is_new
+ if can_push:
+ browse_label = 'Browse or delete repository files'
+ else:
+ browse_label = 'Browse repository files'
+%>
+
+<html>
+ <head>
+ <title>Galaxy tool display</title>
+ <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
+ ${h.css( "base" )}
+ </head>
+ <body>
+ <%def name="do_inputs( inputs, tool_state, prefix, other_values=None )">
+ <% other_values = ExpressionContext( tool_state, other_values ) %>
+ %for input_index, input in enumerate( inputs.itervalues() ):
+ %if not input.visible:
+ <% pass %>
+ %elif input.type == "repeat":
+ <div class="repeat-group">
+ <div class="form-title-row">
+ <b>${input.title_plural}</b>
+ </div>
+ <div class="repeat-group-item">
+ <div class="form-title-row">
+ <b>${input.title} 0</b>
+ </div>
+ </div>
+ </div>
+ %elif input.type == "conditional":
+ %if tool_state.items():
+ <%
+ group_state = tool_state[input.name][0]
+ current_case = group_state['__current_case__']
+ group_prefix = prefix + input.name + "|"
+ %>
+ %if input.value_ref_in_group:
+ ${row_for_param( group_prefix, input.test_param, group_state, other_values )}
+ %endif
+ ${do_inputs( input.cases[current_case].inputs, group_state, group_prefix, other_values )}
+ %endif
+ %elif input.type == "upload_dataset":
+ %if input.get_datatype( trans, other_values ).composite_type is None:
+ # Have non-composite upload appear as before
+ ${do_inputs( input.inputs, tool_state[input.name][0], prefix + input.name + "_" + str( 0 ) + "|", other_values )}
+ %else:
+ <div class="repeat-group">
+ <div class="form-title-row">
+ <b>${input.group_title( other_values )}</b>
+ </div>
+ <div class="repeat-group-item">
+ <div class="form-title-row">
+ <b>File Contents for ${input.title_by_index( trans, 0, other_values )}</b>
+ </div>
+ </div>
+ %endif
+ %else:
+ ${row_for_param( prefix, input, tool_state, other_values )}
+ %endif
+ %endfor
+ </%def>
+
+ <%def name="row_for_param( prefix, param, parent_state, other_values )">
+ <%
+ label = param.get_label()
+ if isinstance( param, DataToolParameter ) or isinstance( param, ColumnListParameter ):
+ field = SelectField( param.name )
+ field.add_option( param.name, param.name )
+ field_html = field.get_html()
+ elif isinstance( param, SelectToolParameter ) and hasattr( param, 'data_ref' ):
+ field = SelectField( param.name, display=param.display )
+ field.add_option( param.data_ref, param.data_ref )
+ field_html = field.get_html( prefix )
+ else:
+ field = param.get_html_field( trans, None, other_values )
+ field_html = field.get_html( prefix )
+ %>
+ <div class="form-row">
+ %if label:
+ <label for="${param.name}">${label}:</label>
+ %endif
+ <div class="form-row-input">${field_html}</div>
+ %if param.help:
+ <div class="toolParamHelp" style="clear: both;">
+ ${param.help}
+ </div>
+ %endif
+ <div style="clear: both"></div>
+ </div>
+ </%def>
+
+ <br/><br/>
+ <ul class="manage-table-actions">
+ %if is_new:
+ <a class="action-button" href="${h.url_for( controller='upload', action='upload', repository_id=trans.security.encode_id( repository.id ), webapp='community' )}">Upload files to repository</a>
+ %else:
+ <li><a class="action-button" id="repository-${repository.id}-popup" class="menubutton">Repository Actions</a></li>
+ <div popupmenu="repository-${repository.id}-popup">
+ %if can_manage:
+ <a class="action-button" href="${h.url_for( controller='repository', action='manage_repository', id=trans.app.security.encode_id( repository.id ) )}">Manage repository</a>
+ %else:
+ <a class="action-button" href="${h.url_for( controller='repository', action='view_repository', id=trans.app.security.encode_id( repository.id ) )}">View repository</a>
+ %endif
+ %if can_upload:
+ <a class="action-button" href="${h.url_for( controller='upload', action='upload', repository_id=trans.security.encode_id( repository.id ), webapp='community' )}">Upload files to repository</a>
+ %endif
+ %if can_view_change_log:
+ <a class="action-button" href="${h.url_for( controller='repository', action='view_changelog', id=trans.app.security.encode_id( repository.id ) )}">View change log</a>
+ %endif
+ %if can_browse_contents:
+ <a class="action-button" href="${h.url_for( controller='repository', action='browse_repository', id=trans.app.security.encode_id( repository.id ) )}">${browse_label}</a>
+ %endif
+ %if can_rate:
+ <a class="action-button" href="${h.url_for( controller='repository', action='rate_repository', id=trans.app.security.encode_id( repository.id ) )}">Rate repository</a>
+ %endif
+ <a class="action-button" href="${h.url_for( controller='repository', action='download', repository_id=trans.app.security.encode_id( repository.id ), file_type='gz' )}">Download as a .tar.gz file</a>
+ <a class="action-button" href="${h.url_for( controller='repository', action='download', repository_id=trans.app.security.encode_id( repository.id ), file_type='bz2' )}">Download as a .tar.bz2 file</a>
+ <a class="action-button" href="${h.url_for( controller='repository', action='download', repository_id=trans.app.security.encode_id( repository.id ), file_type='zip' )}">Download as a zip file</a>
+ </div>
+ %endif
+ </ul>
+
+ %if message:
+ ${render_msg( message, status )}
+ %endif
+
+ <div class="toolForm" id="${tool.id}">
+ <div class="toolFormTitle">${tool.name}</div>
+ <div class="toolFormBody">
+ <form id="tool_form" name="tool_form" action="" method="get">
+ <input type="hidden" name="tool_state" value="${util.object_to_string( tool_state.encode( tool, app ) )}">
+ ${do_inputs( tool.inputs_by_page[ tool_state.page ], tool_state.inputs, "" )}
+ </form>
+ </div>
+ </div>
+ %if tool.help:
+ <div class="toolHelp">
+ <div class="toolHelpBody">
+ <%
+ # Convert to unicode to display non-ascii characters.
+ if type( tool.help ) is not unicode:
+ tool.help = unicode( tool.help, 'utf-8')
+ %>
+ ${tool.help}
+ </div>
+ </div>
+ %endif
+ </body>
+</html>
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 templates/webapps/community/repository/upload.mako
--- a/templates/webapps/community/repository/upload.mako
+++ b/templates/webapps/community/repository/upload.mako
@@ -2,11 +2,12 @@
<%namespace file="/webapps/community/repository/common.mako" import="*" /><%
+ is_admin = trans.user_is_admin()
is_new = repository.is_new
can_browse_contents = not is_new
can_browse_contents = not is_new
can_rate = repository.user != trans.user
- can_manage = repository.user == trans.user
+ can_manage = is_admin or repository.user == trans.user
can_view_change_log = not is_new
%>
@@ -63,96 +64,96 @@
<div class="toolForm"><div class="toolFormTitle">Upload a single file or a tarball</div><div class="toolFormBody">
- ## TODO: nginx
- <form id="upload_form" name="upload_form" action="${h.url_for( controller='upload', action='upload', repository_id=trans.security.encode_id( repository.id ) )}" enctype="multipart/form-data" method="post">
- <div class="form-row">
- <label>File:</label>
- <div class="form-row-input">
- <input type="file" name="file_data"/>
- </div>
- <div style="clear: both"></div>
- </div>
-
- <div class="form-row">
- <%
- if uncompress_file:
- yes_selected = 'selected'
- no_selected = ''
- else:
- yes_selected = ''
- no_selected = 'selected'
- %>
- <label>Uncompress files?</label>
- <div class="form-row-input">
- <select name="uncompress_file">
- <option value="true" ${yes_selected}>Yes
- <option value="false" ${no_selected}>No
- </select>
- </div>
- <div class="toolParamHelp" style="clear: both;">
- Supported compression types are gz and bz2. If <b>Yes</b> is selected, the uploaded file will be uncompressed. However,
- if the uploaded file is an archive that contains compressed files, the contained files will not be uncompressed. For
- example, if the uploaded compressed file is some_file.tar.gz, some_file.tar will be uncompressed and extracted, but if
- some_file.tar contains some_contained_file.gz, the contained file will not be uncompressed.
- </div>
- </div>
- %if not is_new:
- <div class="form-row">
- <%
- if remove_repo_files_not_in_tar:
- yes_selected = 'selected'
- no_selected = ''
- else:
- yes_selected = ''
- no_selected = 'selected'
- %>
- <label>Remove files in the repository (relative to the root or selected upload point) that are not in the uploaded archive?</label>
- <div class="form-row-input">
- <select name="remove_repo_files_not_in_tar">
- <option value="true" ${yes_selected}>Yes
- <option value="false" ${no_selected}>No
- </select>
+ ## TODO: nginx
+ <form id="upload_form" name="upload_form" action="${h.url_for( controller='upload', action='upload', repository_id=trans.security.encode_id( repository.id ) )}" enctype="multipart/form-data" method="post">
+ <div class="form-row">
+ <label>File:</label>
+ <div class="form-row-input">
+ <input type="file" name="file_data"/>
+ </div>
+ <div style="clear: both"></div></div>
- <div class="toolParamHelp" style="clear: both;">
- This selection pertains only to uploaded tar archives, not to single file uploads. If <b>Yes</b> is selected, files
- that exist in the repository (relative to the root or selected upload point) but that are not in the uploaded archive
- will be removed from the repository. Otherwise, all existing repository files will remain and the uploaded archive
- files will be added to the repository.
+
+ <div class="form-row">
+ <%
+ if uncompress_file:
+ yes_selected = 'selected'
+ no_selected = ''
+ else:
+ yes_selected = ''
+ no_selected = 'selected'
+ %>
+ <label>Uncompress files?</label>
+ <div class="form-row-input">
+ <select name="uncompress_file">
+ <option value="true" ${yes_selected}>Yes
+ <option value="false" ${no_selected}>No
+ </select>
+ </div>
+ <div class="toolParamHelp" style="clear: both;">
+ Supported compression types are gz and bz2. If <b>Yes</b> is selected, the uploaded file will be uncompressed. However,
+ if the uploaded file is an archive that contains compressed files, the contained files will not be uncompressed. For
+ example, if the uploaded compressed file is some_file.tar.gz, some_file.tar will be uncompressed and extracted, but if
+ some_file.tar contains some_contained_file.gz, the contained file will not be uncompressed.
+ </div></div>
- </div>
- %endif
- <div class="form-row">
- <label>Change set commit message:</label>
- <div class="form-row-input">
- %if commit_message:
- <pre><textarea name="commit_message" rows="3" cols="35">${commit_message}</textarea></pre>
- %else:
- <textarea name="commit_message" rows="3" cols="35"></textarea>
+ %if not is_new:
+ <div class="form-row">
+ <%
+ if remove_repo_files_not_in_tar:
+ yes_selected = 'selected'
+ no_selected = ''
+ else:
+ yes_selected = ''
+ no_selected = 'selected'
+ %>
+ <label>Remove files in the repository (relative to the root or selected upload point) that are not in the uploaded archive?</label>
+ <div class="form-row-input">
+ <select name="remove_repo_files_not_in_tar">
+ <option value="true" ${yes_selected}>Yes
+ <option value="false" ${no_selected}>No
+ </select>
+ </div>
+ <div class="toolParamHelp" style="clear: both;">
+ This selection pertains only to uploaded tar archives, not to single file uploads. If <b>Yes</b> is selected, files
+ that exist in the repository (relative to the root or selected upload point) but that are not in the uploaded archive
+ will be removed from the repository. Otherwise, all existing repository files will remain and the uploaded archive
+ files will be added to the repository.
+ </div>
+ </div>
%endif
- </div>
- <div class="toolParamHelp" style="clear: both;">
- This is the commit message for the mercurial change set that will be created by this upload.
- </div>
- <div style="clear: both"></div>
- </div>
- %if not repository.is_new:
- <div class="form-row" >
- <label>Contents:</label>
- <div id="tree" >
- Loading...
+ <div class="form-row">
+ <label>Change set commit message:</label>
+ <div class="form-row-input">
+ %if commit_message:
+ <pre><textarea name="commit_message" rows="3" cols="35">${commit_message}</textarea></pre>
+ %else:
+ <textarea name="commit_message" rows="3" cols="35"></textarea>
+ %endif
+ </div>
+ <div class="toolParamHelp" style="clear: both;">
+ This is the commit message for the mercurial change set that will be created by this upload.
+ </div>
+ <div style="clear: both"></div></div>
- <input type="hidden" id="upload_point" name="upload_point" value=""/>
- <div class="toolParamHelp" style="clear: both;">
- Select a location within the repository to upload your files by clicking a check box next to the location. The
- selected location is considered the upload point. If a location is not selected, the upload point will be the
- repository root.
+ %if not repository.is_new:
+ <div class="form-row" >
+ <label>Contents:</label>
+ <div id="tree" >
+ Loading...
+ </div>
+ <input type="hidden" id="upload_point" name="upload_point" value=""/>
+ <div class="toolParamHelp" style="clear: both;">
+ Select a location within the repository to upload your files by clicking a check box next to the location. The
+ selected location is considered the upload point. If a location is not selected, the upload point will be the
+ repository root.
+ </div>
+ <div style="clear: both"></div>
+ </div>
+ %endif
+ <div class="form-row">
+ <input type="submit" class="primary-button" name="upload_button" value="Upload"></div>
- <div style="clear: both"></div>
- </div>
- %endif
- <div class="form-row">
- <input type="submit" class="primary-button" name="upload_button" value="Upload">
- </div>
- </form>
+ </form></div></div>
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 templates/webapps/community/repository/view_changelog.mako
--- a/templates/webapps/community/repository/view_changelog.mako
+++ b/templates/webapps/community/repository/view_changelog.mako
@@ -5,9 +5,10 @@
<%
from galaxy.web.framework.helpers import time_ago
+ is_admin = trans.user_is_admin()
is_new = repository.is_new
can_browse_contents = not is_new
- can_manage = trans.user == repository.user
+ can_manage = is_admin or trans.user == repository.user
can_push = trans.app.security_agent.can_push( trans.user, repository )
can_rate = trans.user and repository.user != trans.user
can_upload = can_push
@@ -93,7 +94,7 @@
%><% display_date = changeset[ 'display_date' ] %>
%if test_date != display_date:
- <tr colspan="2"><td bgcolor="#D8D8D8 ">${display_date}</td></tr>
+ <tr colspan="2"><td bgcolor="#D8D8D8">${display_date}</td></tr>
%endif
<tr><td>
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 templates/webapps/community/repository/view_changeset.mako
--- a/templates/webapps/community/repository/view_changeset.mako
+++ b/templates/webapps/community/repository/view_changeset.mako
@@ -5,10 +5,11 @@
<%
from galaxy.web.framework.helpers import time_ago
+ is_admin = trans.user_is_admin()
is_new = repository.is_new
can_browse_contents = not is_new
can_rate = trans.user and repository.user != trans.user
- can_manage = trans.user == repository.user
+ can_manage = is_admin or trans.user == repository.user
can_push = trans.app.security_agent.can_push( trans.user, repository )
can_view_change_log = not is_new
can_upload = can_push
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 templates/webapps/community/repository/view_repository.mako
--- a/templates/webapps/community/repository/view_repository.mako
+++ b/templates/webapps/community/repository/view_repository.mako
@@ -79,10 +79,10 @@
%endif
%if can_browse_contents:
<a class="action-button" href="${h.url_for( controller='repository', action='browse_repository', id=trans.app.security.encode_id( repository.id ) )}">${browse_label}</a>
+ <a class="action-button" href="${h.url_for( controller='repository', action='download', repository_id=trans.app.security.encode_id( repository.id ), file_type='gz' )}">Download as a .tar.gz file</a>
+ <a class="action-button" href="${h.url_for( controller='repository', action='download', repository_id=trans.app.security.encode_id( repository.id ), file_type='bz2' )}">Download as a .tar.bz2 file</a>
+ <a class="action-button" href="${h.url_for( controller='repository', action='download', repository_id=trans.app.security.encode_id( repository.id ), file_type='zip' )}">Download as a zip file</a>
%endif
- <a class="action-button" href="${h.url_for( controller='repository', action='download', repository_id=trans.app.security.encode_id( repository.id ), file_type='gz' )}">Download as a .tar.gz file</a>
- <a class="action-button" href="${h.url_for( controller='repository', action='download', repository_id=trans.app.security.encode_id( repository.id ), file_type='bz2' )}">Download as a .tar.bz2 file</a>
- <a class="action-button" href="${h.url_for( controller='repository', action='download', repository_id=trans.app.security.encode_id( repository.id ), file_type='zip' )}">Download as a zip file</a></div>
%endif
</ul>
@@ -120,9 +120,9 @@
<div class="form-row"><label>Version:</label>
%if can_view_change_log:
- <a href="${h.url_for( controller='repository', action='view_changelog', id=trans.app.security.encode_id( repository.id ) )}">${tip}</a>
+ <a href="${h.url_for( controller='repository', action='view_changelog', id=trans.app.security.encode_id( repository.id ) )}">${repository.revision}</a>
%else:
- ${tip}
+ ${repository.revision}
%endif
</div><div class="form-row">
@@ -145,10 +145,107 @@
%endif
</div></div>
+%if repository.categories:
+ <p/>
+ <div class="toolForm">
+ <div class="toolFormTitle">Categories</div>
+ <div class="toolFormBody">
+ %for rca in repository.categories:
+ <div class="form-row">
+ ${rca.category.name}
+ </div>
+ %endfor
+ <div style="clear: both"></div>
+ </div>
+ </div>
+%endif
+%if metadata:
+ <p/>
+ <div class="toolForm">
+ <div class="toolFormTitle">Repository metadata</div>
+ <div class="toolFormBody">
+ %if 'tools' in metadata:
+ <div class="form-row">
+ <table width="100%">
+ <tr bgcolor="#D8D8D8" width="100%">
+ <td><label>Tools:</label></td>
+ </tr>
+ </table>
+ </div>
+ <div class="form-row">
+ <% tool_dicts = metadata[ 'tools' ] %>
+ <table class="grid">
+ <tr>
+ <td><b>name</b></td>
+ <td><b>description</b></td>
+ <td><b>version</b></td>
+ <td><b>requirements</b></td>
+ </tr>
+ %for tool_dict in tool_dicts:
+ <tr>
+ <td><a href="${h.url_for( controller='repository', action='display_tool', repository_id=trans.security.encode_id( repository.id ), tool_config=tool_dict[ 'tool_config' ] )}">${tool_dict[ 'name' ]}</a></td>
+ <td>${tool_dict[ 'description' ]}</td>
+ <td>version: ${tool_dict[ 'version' ]}</td>
+ <td>
+ <%
+ if 'requirements' in tool_dict:
+ requirements = tool_dict[ 'requirements' ]
+ else:
+ requirements = None
+ %>
+ %if requirements:
+ <%
+ requirements_str = ''
+ for requirement_dict in tool_dict[ 'requirements' ]:
+ requirements_str += '%s (%s), ' % ( requirement_dict[ 'name' ], requirement_dict[ 'type' ] )
+ requirements_str = requirements_str.rstrip( ', ' )
+ %>
+ ${requirements_str}
+ %else:
+ none
+ %endif
+ </td>
+ </tr>
+ %endfor
+ </table>
+ </div>
+ <div style="clear: both"></div>
+ %endif
+ %if 'workflows' in metadata:
+ <div class="form-row">
+ <table width="100%">
+ <tr bgcolor="#D8D8D8" width="100%">
+ <td><label>Workflows:</label></td>
+ </tr>
+ </table>
+ </div>
+ <div style="clear: both"></div>
+ <div class="form-row">
+ <% workflow_dicts = metadata[ 'workflows' ] %>
+ <table class="grid">
+ <tr>
+ <td><b>name</b></td>
+ <td><b>format-version</b></td>
+ <td><b>annotation</b></td>
+ </tr>
+ %for workflow_dict in workflow_dicts:
+ <tr>
+ <td>${workflow_dict[ 'name' ]}</td>
+ <td>${workflow_dict[ 'format-version' ]}</td>
+ <td>${workflow_dict[ 'annotation' ]}</td>
+ </tr>
+ %endfor
+ </table>
+ </div>
+ <div style="clear: both"></div>
+ %endif
+ </div>
+ </div>
+%endif
%if trans.user and trans.app.config.smtp_server:
<p/><div class="toolForm">
- <div class="toolFormTitle">${repository.name}</div>
+ <div class="toolFormTitle">Notification on update</div><div class="toolFormBody"><form name="receive_email_alerts" id="receive_email_alerts" action="${h.url_for( controller='repository', action='view_repository', id=trans.security.encode_id( repository.id ) )}" method="post" ><div class="form-row">
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 templates/webapps/galaxy/base_panels.mako
--- a/templates/webapps/galaxy/base_panels.mako
+++ b/templates/webapps/galaxy/base_panels.mako
@@ -107,9 +107,9 @@
<%
menu_options = [
['Email comments, bug reports, or suggestions', app.config.get( "bugs_email", "mailto:galaxy-bugs@bx.psu.edu" ) ],
- ['Galaxy Wiki', app.config.get( "wiki_url", "http://bitbucket.org/galaxy/galaxy-central/wiki" ), "_blank" ],
+ ['Galaxy Wiki', app.config.get( "wiki_url", "http://wiki.g2.bx.psu.edu/" ), "_blank" ],
['Video tutorials (screencasts)', app.config.get( "screencasts_url", "http://galaxycast.org" ), "_blank" ],
- ['How to Cite Galaxy', app.config.get( "screencasts_url", "http://bitbucket.org/galaxy/galaxy-central/wiki/Citations" ), "_blank" ]
+ ['How to Cite Galaxy', app.config.get( "screencasts_url", "http://wiki.g2.bx.psu.edu/Citing%20Galaxy" ), "_blank" ]
]
tab( "help", "Help", None, menu_options=menu_options)
%>
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 test-data/pca_out1.tabular
--- a/test-data/pca_out1.tabular
+++ b/test-data/pca_out1.tabular
@@ -1,6 +1,6 @@
#Component 1 2 3 4
-#Std. deviation 0.9598 0.1436 0.3839 1.706
-#Proportion of variance explained 0.2303 0.005152 0.03684 0.7277
+#Std. deviation 1.706 0.9598 0.3839 0.1436
+#Proportion of variance explained 0.7277 0.2303 0.03684 0.005152
#Loadings 1 2 3 4
c1 0.5224 -0.3723 0.721 0.262
c2 -0.2634 -0.9256 -0.242 -0.1241
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 test-data/pca_out3.tabular
--- a/test-data/pca_out3.tabular
+++ b/test-data/pca_out3.tabular
@@ -1,6 +1,6 @@
#Component 1 2 3 4
-#Std. deviation 0.4905 0.1534 0.2793 2.049
-#Proportion of variance explained 0.05302 0.005183 0.01719 0.9246
+#Std. deviation 2.049 0.4905 0.2793 0.1534
+#Proportion of variance explained 0.9246 0.05302 0.01719 0.005183
#Loadings 1 2 3 4
c1 0.3616 -0.6565 0.581 0.3173
c2 -0.08227 -0.7297 -0.5964 -0.3241
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 test-data/rgtestouts/rgManQQ/rgManQQtest1.html
--- a/test-data/rgtestouts/rgManQQ/rgManQQtest1.html
+++ b/test-data/rgtestouts/rgManQQ/rgManQQtest1.html
@@ -13,8 +13,8 @@
<h1>rgManQQtest1</h1><table>
-<tr><td><a href="Allelep_manhattan.png"><img src="Allelep_manhattan.png" alt="Allelep_manhattan.png hspace="10" width="400"><br>(Click to download image Allelep_manhattan.png)</a></td></tr>
-<tr><td><a href="Allelep_qqplot.png"><img src="Allelep_qqplot.png" alt="Allelep_qqplot.png hspace="10" width="400"><br>(Click to download image Allelep_qqplot.png)</a></td></tr>
+<tr><td><a href="Allelep_manhattan.png"><img src="Allelep_manhattan.png" title="Allelep_manhattan.png hspace="10" width="400"><br>(Click to download image Allelep_manhattan.png)</a></td></tr>
+<tr><td><a href="Allelep_qqplot.png"><img src="Allelep_qqplot.png" title="Allelep_qqplot.png hspace="10" width="400"><br>(Click to download image Allelep_qqplot.png)</a></td></tr><tr><td><a href="rgManQQtest1.R">rgManQQtest1.R</a></td></tr><tr><td><a href="rgManQQtest1.R.log">rgManQQtest1.R.log</a></td></tr></table>
@@ -35,7 +35,7 @@
- round_any
+ rename, round_any
@@ -43,11 +43,11 @@
Loading required package: proto
-[1] "### 101 values read from /tmp/rgManQQtemplYC5wa read - now running plots"
+[1] "### 101 values read from /data/tmp/tmpM8NZ50/database/files/000/dataset_1.dat read - now running plots"
[1] "## qqplot on Allelep done"
-[1] "## manhattan on Allelep starting 1 2 3"
+[1] "## manhattan on Allelep starting 2 3 8"
[1] "## manhattan plot on Allelep done"
@@ -62,7 +62,7 @@
# http://StephenTurner.us/
# http://GettingGeneticsDone.blogspot.com/
-# Last updated: Tuesday, December 22, 2009
+# Last updated: 19 July 2011 by Ross Lazarus
# R code for making manhattan plots and QQ plots from plink output files.
# With GWAS data this can take a lot of memory. Recommended for use on
# 64bit machines only, for now.
@@ -72,30 +72,30 @@
library(ggplot2)
coloursTouse = c('firebrick','darkblue','goldenrod','darkgreen')
-# not too fugly but need a colour expert please...
+# not too ugly but need a colour expert please...
-manhattan = function(chrom=NULL,offset=NULL,pvals=NULL, title=NULL, max.y="max",
- suggestiveline=0, genomewide=T, size.x.labels=9, size.y.labels=10, annotate=F, SNPlist=NULL,grey=0) {
-
+DrawManhattan = function(pvals=Null,chrom=Null,offset=Null,title=NULL, max.y="max",suggestiveline=0, genomewide=T, size.x.labels=9,
+ size.y.labels=10, annotate=F, SNPlist=NULL,grey=0) {
if (annotate & is.null(SNPlist)) stop("You requested annotation but provided no SNPlist!")
genomewideline=NULL # was genomewideline=-log10(5e-8)
if (genomewide) { # use bonferroni since might be only a small region?
genomewideline = -log10(0.05/length(pvals)) }
- d=data.frame(CHR=chrom,BP=offset,P=pvals)
-
- #limit to only chrs 1-23?
- d=d[d$CHR %in% 1:23, ]
-
+ offset = as.integer(offset)
+ pvals = as.double(pvals)
+ chro = as.integer(chrom) # already dealt with X and friends?
+ d=data.frame(CHR=chro,BP=offset,P=pvals)
+ #limit to only chrs 1-22, x=23,y=24,Mt=25?
+ d=d[d$CHR %in% 1:25, ]
if ("CHR" %in% names(d) & "BP" %in% names(d) & "P" %in% names(d) ) {
- d=na.omit(d)
+ #d=na.omit(d)
d=d[d$P>0 & d$P<=1, ]
- d$logp = -log10(d$P)
-
+ d$logp = as.double(-log10(d$P))
d$pos=NA
ticks=NULL
lastbase=0
chrlist = unique(d$CHR)
+ chrlist = sort(chrlist) # returns lexical ordering
nchr = length(chrlist) # may be any number?
if (nchr >= 2) {
for (x in c(1:nchr)) {
@@ -107,7 +107,11 @@
lastchr = chrlist[x-1] # previous whatever the list
lastbase=lastbase+tail(subset(d,CHR==lastchr)$BP, 1)
d[d$CHR==i, ]$pos=d[d$CHR==i, ]$BP+lastbase
+ if (sum(is.na(lastchr),is.na(lastbase),is.na(d[d$CHR==i, ]$pos))) {
+ cat(paste('manhattan: For',title,'chrlistx=',i,'lastchr=',lastchr,'lastbase=',lastbase,'pos=',d[d$CHR==i,]$pos))
+ }
tks=c(tks, d[d$CHR==i, ]$pos[floor(length(d[d$CHR==i, ]$pos)/2)+1])
+
}
ticklim=c(min(d$pos),max(d$pos))
xlabs = chrlist
@@ -129,8 +133,6 @@
if (max.y=="max") maxy=ceiling(max(d$logp)) else maxy=max.y
maxy = max(maxy,1.1*genomewideline)
- # if (maxy<8) maxy=8
- # only makes sense if genome wide is assumed - we could have a fine mapping region?
if (annotate) d.annotate=d[as.numeric(substr(d$SNP,3,100)) %in% SNPlist, ]
if (nchr >= 2) {
manplot=qplot(pos,logp,data=d, ylab=expression(-log[10](italic(p))) , colour=factor(CHR))
@@ -149,9 +151,6 @@
axis.text.y=theme_text(size=size.y.labels, colour="grey50"),
axis.ticks=theme_segment(colour=NA)
)
- #manplot = manplot + opts(panel.grid.y.minor=theme_blank(),panel.grid.y.major=theme_blank())
- #manplot = manplot + opts(panel.grid.major=theme_blank())
-
if (suggestiveline) manplot=manplot+geom_hline(yintercept=suggestiveline,colour="blue", alpha=I(1/3))
if (genomewideline) manplot=manplot+geom_hline(yintercept=genomewideline,colour="red")
manplot
@@ -178,16 +177,24 @@
if (spartan) plot=plot+opts(panel.background=theme_rect(col="grey50"), panel.grid.minor=theme_blank())
qq
}
-rgqqMan = function(infile="/tmp/rgManQQtemplYC5wa",chromcolumn=1, offsetcolumn=2, pvalscolumns=c(3),
+
+rgqqMan = function(infile="/data/tmp/tmpM8NZ50/database/files/000/dataset_1.dat",chromcolumn=2, offsetcolumn=3, pvalscolumns=c(8),
title="rgManQQtest1",grey=0) {
rawd = read.table(infile,head=T,sep='\t')
dn = names(rawd)
cc = dn[chromcolumn]
oc = dn[offsetcolumn]
-nams = c(cc,oc)
+rawd[,cc] = sub('chr','',rawd[,cc],ignore.case = T) # just in case
+rawd[,cc] = sub(':','',rawd[,cc],ignore.case = T) # ugh
+rawd[,cc] = sub('X',23,rawd[,cc],ignore.case = T)
+rawd[,cc] = sub('Y',24,rawd[,cc],ignore.case = T)
+rawd[,cc] = sub('Mt',25,rawd[,cc], ignore.case = T)
+nams = c(cc,oc) # for sorting
plen = length(rawd[,1])
-doreorder=1
print(paste('###',plen,'values read from',infile,'read - now running plots',sep=' '))
+rawd = rawd[do.call(order,rawd[nams]),]
+# mmmf - suggested by http://onertipaday.blogspot.com/2007/08/sortingordering-dataframe-according…
+# in case not yet ordered
if (plen > 0) {
for (pvalscolumn in pvalscolumns) {
if (pvalscolumn > 0)
@@ -199,14 +206,8 @@
ggsave(filename=paste(myfname,"qqplot.png",sep='_'),myqqplot,width=6,height=4,dpi=100)
print(paste('## qqplot on',cname,'done'))
if ((chromcolumn > 0) & (offsetcolumn > 0)) {
- if (doreorder) {
- rawd = rawd[do.call(order,rawd[nams]),]
- # mmmf - suggested by http://onertipaday.blogspot.com/2007/08/sortingordering-dataframe-according…
- # in case not yet ordered
- doreorder = 0
- }
print(paste('## manhattan on',cname,'starting',chromcolumn,offsetcolumn,pvalscolumn))
- mymanplot= manhattan(chrom=rawd[,chromcolumn],offset=rawd[,offsetcolumn],pvals=rawd[,pvalscolumn],title=mytitle,grey=grey)
+ mymanplot= DrawManhattan(chrom=rawd[,chromcolumn],offset=rawd[,offsetcolumn],pvals=rawd[,pvalscolumn],title=mytitle,grey=grey)
print(paste('## manhattan plot on',cname,'done'))
ggsave(filename=paste(myfname,"manhattan.png",sep='_'),mymanplot,width=6,height=4,dpi=100)
}
@@ -227,6 +228,6 @@
</pre>
-<h3><a href="http://rgenetics.org">Rgenetics</a> tool rgManQQ.py run at 07/11/2010 20:04:20</h3>
+<b><a href="http://rgenetics.org">Galaxy Rgenetics</a> tool output rgManQQ.py run at 20/07/2011 13:29:43</b><br/></div></body></html>
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 tool-data/annotation_profiler_options.xml.sample
--- a/tool-data/annotation_profiler_options.xml.sample
+++ b/tool-data/annotation_profiler_options.xml.sample
@@ -1,4 +1,4 @@
-<filter type="meta_key" name="dbkey" value="hg18">
+<filter type="data_meta" data_ref="input1" meta_key="dbkey" value="hg18"><options><option name="Mapping and Sequencing Tracks" value="group-map"><option name="STS Markers" value="stsMap"/>
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 tool-data/shared/ucsc/ucsc_build_sites.txt
--- a/tool-data/shared/ucsc/ucsc_build_sites.txt
+++ b/tool-data/shared/ucsc/ucsc_build_sites.txt
@@ -1,6 +1,5 @@
#Harvested from http://genome.ucsc.edu/cgi-bin/das/dsn
main http://genome.ucsc.edu/cgi-bin/hgTracks? priPac1,danRer4,mm9,mm8,droAna1,mm5,caeRem2,mm7,mm6,panTro1,dm3,panTro2,anoCar1,ce4,galGal3,galGal2,ce1,rn3,rn2,droMoj1,droMoj2,rn4,droYak1,droYak2,dp3,dp2,dm1,canFam1,danRer5,canFam2,danRer3,danRer2,ornAna1,ci2,ci1,tetNig1,bosTau1,bosTau3,bosTau2,equCab1,oryLat1,droAna2,droEre1,ponAbe2,rheMac2,sacCer1,droPer1,droSim1,monDom1,cb1,dm2,droSec1,strPur1,droVir2,droVir1,strPur2,sc1,xenTro1,droGri1,xenTro2,cb3,gasAcu1,caePb1,anoGam1,fr2,fr1,hg15,hg16,hg17,hg18,felCat3,apiMel2,monDom4,apiMel1,ce2
-bhri http://ucsc.omics.bhri.internal/cgi-bin/hgTracks? hg18,hg19,mm8,mm9,rn4
#Harvested from http://archaea.ucsc.edu/cgi-bin/das/dsn
archaea http://archaea.ucsc.edu/cgi-bin/hgTracks? alkaEhrl_MLHE_1,shewW318,idioLoih_L2TR,sulSol1,erwiCaro_ATROSEPTICA,symbTher_IAM14863,moorTher_ATCC39073,therFusc_YX,methHung1,bradJapo,therElon,shewPutrCN32,pediPent_ATCC25745,mariMari_MCS10,nanEqu1,baciSubt,chlaTrac,magnMagn_AMB_1,chroViol,ralsSola,acidCryp_JF_5,erytLito_HTCC2594,desuVulg_HILDENBOROUG,pyrAer1,sulfToko1,shewANA3,paraSp_UWE25,geobKaus_HTA426,rhizEtli_CFN_42,uncuMeth_RCI,candBloc_FLORIDANUS,deinRadi,yersPest_CO92,saccEryt_NRRL_2338,rhodRHA1,candCars_RUDDII,burkMall_ATCC23344,eschColi_O157H7,burk383,psycIngr_37,rhodSpha_2_4_1,wolbEndo_OF_DROSOPHIL,burkViet_G4,propAcne_KPA171202,enteFaec_V583,campJeju_81_176,acidJS42,heliPylo_26695,pseuHalo_TAC125,chroSale_DSM3043,methVann1,archFulg1,neisMeni_Z2491_1,fusoNucl,vermEise_EF01_2,anabVari_ATCC29413,tropWhip_TW08_27,heliHepa,acinSp_ADP1,anapMarg_ST_MARIES,natrPhar1,haheChej_KCTC_2396,therPetr_RKU_1,neisGono_FA1090_1,colwPsyc_34H,desuPsyc_LSV54,hyphNept_ATCC15444,vibrChol1,deinGeot_DSM11300,strePyog_M1_GAS,franCcI3,salmTyph,metaSedu,lactSali_UCC118,trepPall,neisMeni_MC58_1,syntWolf_GOETTINGEN,flavJohn_UW101,methBoon1,haemSomn_129PT,shewLoihPV4,igniHosp1,haemInfl_KW20,haloHalo_SL1,ferrAcid1,sphiAlas_RB2256,candPela_UBIQUE_HTCC1,caldSacc_DSM8903,aerPer1,lactPlan,carbHydr_Z_2901,therTher_HB8,vibrVuln_YJ016_1,rhodPalu_CGA009,acidCell_11B,siliPome_DSS_3,therVolc1,haloWals1,rubrXyla_DSM9941,shewAmaz,nocaJS61,vibrVuln_CMCP6_1,sinoMeli,ureaUrea,baciHalo,bartHens_HOUSTON_1,nitrWino_NB_255,hypeButy1,methBurt2,polaJS66,mesoLoti,methMari_C7,caulCres,neisMeni_FAM18_1,acidBact_ELLIN345,caldMaqu1,salmEnte_PARATYPI_ATC,glucOxyd_621H,cytoHutc_ATCC33406,nitrEuro,therMari,coxiBurn,woliSucc,heliPylo_HPAG1,mesoFlor_L1,pyrHor1,methAeol1,procMari_CCMP1375,pyroArse1,oenoOeni_PSU_1,alcaBork_SK2,wiggBrev,actiPleu_L20,lactLact,methJann1,paraDeni_PD1222,borrBurg,pyroIsla1,orieTsut_BORYONG,shewMR4,methKand1,methCaps_BATH,onioYell_PHYTOPLASMA,bordBron,cenaSymb1,burkCeno_HI2424,franTula_TULARENSIS,pyrFur2,mariAqua_VT8,heliPylo_J99,psycArct_273_4,vibrChol_MO10_1,vibrPara1,rickBell_RML369_C,metAce1,buchSp,ehrlRumi_WELGEVONDEN,methLabrZ_1,chlaPneu_CWL029,thioCrun_XCL_2,pyroCali1,chloTepi_TLS,stapAure_MU50,novoArom_DSM12444,magnMC1,zymoMobi_ZM4,salmTyph_TY2,chloChlo_CAD3,azoaSp_EBN1,therTher_HB27,bifiLong,picrTorr1,listInno,bdelBact,gramFors_KT0803,sulfAcid1,geobTher_NG80_2,peloCarb,ralsEutr_JMP134,mannSucc_MBEL55E,syneSp_WH8102,methTherPT1,clavMich_NCPPB_382,therAcid1,syntAcid_SB,porpGing_W83,therNeut0,leifXyli_XYLI_CTCB0,shewFrig,photProf_SS9,thioDeni_ATCC25259,methMaze1,desuRedu_MI_1,burkThai_E264,campFetu_82_40,blocFlor,jannCCS1,nitrMult_ATCC25196,streCoel,soliUsit_ELLIN6076,pastMult,saliRube_DSM13855,methTher1,nostSp,shigFlex_2A,saccDegr_2_40,oceaIhey,dehaEthe_195,rhodRubr_ATCC11170,arthFB24,shewMR7,pireSp,anaeDeha_2CP_C,haloVolc1,dichNodo_VCS1703A,tricEryt_IMS101,mycoGeni,thioDeni_ATCC33889,methSmit1,geobUran_RF4,shewDeni,halMar1,desuHafn_Y51,methStad1,granBeth_CGDNIH1,therPend1,legiPneu_PHILADELPHIA,vibrChol_O395_1,nitrOcea_ATCC19707,campJeju_RM1221,methPetr_PM1,heliAcin_SHEEBA,eschColi_APEC_O1,peloTher_SI,haloHalo1,syntFuma_MPOB,xyleFast,gloeViol,leucMese_ATCC8293,bactThet_VPI_5482,xantCamp,sodaGlos_MORSITANS,geobSulf,roseDeni_OCH_114,coryEffi_YS_314,brucMeli,mycoTube_H37RV,vibrFisc_ES114_1,pyrAby1,burkXeno_LB400,polyQLWP,stapMari1,peloLute_DSM273,burkCeno_AU_1054,shewBalt,nocaFarc_IFM10152,ente638,mculMari1,saliTrop_CNB_440,neorSenn_MIYAYAMA,aquiAeol,dechArom_RCB,myxoXant_DK_1622,burkPseu_1106A,burkCepa_AMMD,methMari_C5_1,azorCaul2,methFlag_KT,leptInte,eschColi_K12,synePCC6,baumCica_HOMALODISCA,methBark1,pseuAeru,geobMeta_GS15,eschColi_CFT073,photLumi,metMar1,hermArse,campJeju,therKoda1,aeroHydr_ATCC7966,baciAnth_AMES,shewOnei,therTeng,lawsIntr_PHE_MN1_00
#Harvested from http://genome-test.cse.ucsc.edu/cgi-bin/das/dsn
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 tool_conf.xml.sample
--- a/tool_conf.xml.sample
+++ b/tool_conf.xml.sample
@@ -368,7 +368,6 @@
</section><section name="NGS: Simulation" id="ngs-simulation"><tool file="ngs_simulation/ngs_simulation.xml" />
- <tool file="rgenetics/EpiD.xml" /></section><section name="SNP/WGA: Data; Filters" id="rgdat"><label text="Data: Import and upload" id="rgimport" />
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 tools/annotation_profiler/annotation_profiler_for_interval.py
--- a/tools/annotation_profiler/annotation_profiler_for_interval.py
+++ b/tools/annotation_profiler/annotation_profiler_for_interval.py
@@ -340,6 +340,8 @@
options, args = parser.parse_args()
+ assert os.path.isdir( options.path ), IOError( "Configuration error: Table directory is missing (%s)" % options.path )
+
#get profiler_info
profiler_info = parse_profiler_info( os.path.join( options.path, 'profiler_info.txt' ) )
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 tools/data_source/microbial_import.xml
--- a/tools/data_source/microbial_import.xml
+++ b/tools/data_source/microbial_import.xml
@@ -109,7 +109,7 @@
**Note:** Having trouble locating your organism? Click here_ for a list of available species and their location.
-.. _here: http://bitbucket.org/galaxy/galaxy-central/wiki/Microbes
+.. _here: http://wiki.g2.bx.psu.edu/Main/Data%20Libraries/Microbes
</help></tool>
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 tools/data_source/ucsc_tablebrowser.xml
--- a/tools/data_source/ucsc_tablebrowser.xml
+++ b/tools/data_source/ucsc_tablebrowser.xml
@@ -5,27 +5,27 @@
initial response ( in addition to 'URL' ) will be encoded and appended to URL and a post will be performed.
--><tool name="UCSC Main" id="ucsc_table_direct1" tool_type="data_source">
- <description>table browser</description>
- <command interpreter="python">data_source.py $output $__app__.config.output_size_limit</command>
- <inputs action="http://ucsc.omics.bhri.internal/cgi-bin/hgTables" check_values="false" method="get">
- <display>go to UCSC Table Browser $GALAXY_URL</display>
- <param name="GALAXY_URL" type="baseurl" value="/tool_runner" />
- <param name="tool_id" type="hidden" value="ucsc_table_direct1" />
- <param name="sendToGalaxy" type="hidden" value="1" />
- <param name="hgta_compressType" type="hidden" value="none" />
- <param name="hgta_outputType" type="hidden" value="bed" />
- </inputs>
- <request_param_translation>
+ <description>table browser</description>
+ <command interpreter="python">data_source.py $output $__app__.config.output_size_limit</command>
+ <inputs action="http://genome.ucsc.edu/cgi-bin/hgTables" check_values="false" method="get">
+ <display>go to UCSC Table Browser $GALAXY_URL</display>
+ <param name="GALAXY_URL" type="baseurl" value="/tool_runner" />
+ <param name="tool_id" type="hidden" value="ucsc_table_direct1" />
+ <param name="sendToGalaxy" type="hidden" value="1" />
+ <param name="hgta_compressType" type="hidden" value="none" />
+ <param name="hgta_outputType" type="hidden" value="bed" />
+ </inputs>
+ <request_param_translation><request_param galaxy_name="URL_method" remote_name="URL_method" missing="post" /><request_param galaxy_name="URL" remote_name="URL" missing="" /><request_param galaxy_name="dbkey" remote_name="db" missing="?" /><request_param galaxy_name="organism" remote_name="org" missing="unknown species" />
- <request_param galaxy_name="table" remote_name="hgta_table" missing="unknown table" />
- <request_param galaxy_name="description" remote_name="hgta_regionType" missing="no description" />
- <request_param galaxy_name="data_type" remote_name="hgta_outputType" missing="tabular" >
+ <request_param galaxy_name="table" remote_name="hgta_table" missing="unknown table" />
+ <request_param galaxy_name="description" remote_name="hgta_regionType" missing="no description" />
+ <request_param galaxy_name="data_type" remote_name="hgta_outputType" missing="auto" ><value_translation>
- <value galaxy_value="tabular" remote_value="primaryTable" />
- <value galaxy_value="tabular" remote_value="selectedFields" />
+ <value galaxy_value="auto" remote_value="primaryTable" />
+ <value galaxy_value="auto" remote_value="selectedFields" /><value galaxy_value="wig" remote_value="wigData" /><value galaxy_value="interval" remote_value="tab" /><value galaxy_value="html" remote_value="hyperlinks" />
@@ -33,10 +33,10 @@
<value galaxy_value="gtf" remote_value="gff" /></value_translation></request_param>
- </request_param_translation>
- <uihints minwidth="800"/>
- <outputs>
- <data name="output" format="tabular" />
- </outputs>
- <options sanitize="False" refresh="True"/>
+ </request_param_translation>
+ <uihints minwidth="800"/>
+ <outputs>
+ <data name="output" format="tabular" label="${tool.name} on ${organism}: ${table} (#if $description == 'range' then $getVar( 'position', 'unknown position' ) else $description#)"/>
+ </outputs>
+ <options sanitize="False" refresh="True"/></tool>
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 tools/meme/meme.xml
--- a/tools/meme/meme.xml
+++ b/tools/meme/meme.xml
@@ -304,7 +304,7 @@
</when></conditional>
- <param name="non_commercial_use" label="I certify that I am not using this tool for commercial purposes." type="boolean" truevalue="NON_COMMERCIAL_USE" falsevalue="COMMERCIAL_USE" checked="True">
+ <param name="non_commercial_use" label="I certify that I am not using this tool for commercial purposes." type="boolean" truevalue="NON_COMMERCIAL_USE" falsevalue="COMMERCIAL_USE" checked="False"><validator type="expression" message="This tool is only available for non-commercial use.">value == True</validator></param>
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 tools/metag_tools/megablast_xml_parser.py
--- a/tools/metag_tools/megablast_xml_parser.py
+++ b/tools/metag_tools/megablast_xml_parser.py
@@ -2,12 +2,12 @@
import sys, os, re
-assert sys.version_info[:2] >= ( 2, 4 )
-
if sys.version_info[:2] >= ( 2, 5 ):
- import xml.etree.cElementTree as cElementTree
+ import xml.etree.cElementTree as ElementTree
else:
- import cElementTree
+ from galaxy import eggs
+ import pkg_resources; pkg_resources.require( "elementtree" )
+ from elementtree import ElementTree
def stop_err( msg ):
sys.stderr.write( "%s\n" % msg )
@@ -34,7 +34,7 @@
# get an iterable
try:
- context = cElementTree.iterparse( source, events=( "start", "end" ) )
+ context = ElementTree.iterparse( source, events=( "start", "end" ) )
except:
stop_err( "Invalid data format." )
# turn it into an iterator
@@ -46,7 +46,7 @@
stop_err( "Invalid data format." )
outfile = open( sys.argv[2], 'w' )
- try:
+ try:
for event, elem in context:
# for every <Iteration> tag
if event == "end" and elem.tag == "Iteration":
@@ -71,7 +71,7 @@
elem.clear()
except:
outfile.close()
- stop_err( "The input data contains tags that are not recognizable by the tool." )
+ stop_err( "The input data is malformed, or there is more than one dataset in the input file. Error: %s" % sys.exc_info()[1] )
outfile.close()
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 tools/metag_tools/megablast_xml_parser.xml
--- a/tools/metag_tools/megablast_xml_parser.xml
+++ b/tools/metag_tools/megablast_xml_parser.xml
@@ -2,26 +2,23 @@
<description></description><command interpreter="python">megablast_xml_parser.py $input1 $output1</command><inputs>
- <param name="input1" type="data" format="blastxml" label="Megablast XML output" />
+ <param name="input1" type="data" format="blastxml" label="Megablast XML output" /></inputs><outputs>
- <data name="output1" format="tabular"/>
+ <data name="output1" format="tabular"/></outputs>
-<requirements>
- <requirement type="python-module">cElementTree</requirement>
-</requirements><tests>
- <test>
- <param name="input1" value="megablast_xml_parser_test1.gz" ftype="blastxml" />
- <output name="output1" file="megablast_xml_parser_test1_out.tabular" ftype="tabular" />
- </test>
+ <test>
+ <param name="input1" value="megablast_xml_parser_test1.gz" ftype="blastxml" />
+ <output name="output1" file="megablast_xml_parser_test1_out.tabular" ftype="tabular" />
+ </test></tests><help>
**What it does**
This tool processes the XML output of any NCBI blast tool (if you run your own blast jobs, the XML output can be generated with **-m 7** option).
-
+
-----
**Output fields**
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 tools/multivariate_stats/pca.py
--- a/tools/multivariate_stats/pca.py
+++ b/tools/multivariate_stats/pca.py
@@ -83,10 +83,12 @@
ncomps = len(summary['sdev'])
if type(summary['sdev']) == type({}):
- comps = summary['sdev'].keys()
+ comps_unsorted = summary['sdev'].keys()
+ comps=[]
sd = summary['sdev'].values()
for i in range(ncomps):
- sd[comps.index('Comp.%s' %(i+1))] = summary['sdev'].values()[i]
+ sd[i] = summary['sdev'].values()[comps_unsorted.index('Comp.%s' %(i+1))]
+ comps.append('Comp.%s' %(i+1))
elif type(summary['sdev']) == type([]):
comps=[]
for i in range(ncomps):
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 tools/multivariate_stats/pca.xml
--- a/tools/multivariate_stats/pca.xml
+++ b/tools/multivariate_stats/pca.xml
@@ -1,4 +1,4 @@
-<tool id="pca1" name="Principal Component Analysis" version="1.0.1">
+<tool id="pca1" name="Principal Component Analysis" version="1.0.2"><description></description><command interpreter="python">
pca.py
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 tools/ncbi_blast_plus/blastxml_to_tabular.py
--- a/tools/ncbi_blast_plus/blastxml_to_tabular.py
+++ b/tools/ncbi_blast_plus/blastxml_to_tabular.py
@@ -5,7 +5,7 @@
BLAST filename, output format (std for standard 12 columns, or ext for the
extended 24 columns offered in the BLAST+ wrappers).
-The 12 colums output are 'qseqid sseqid pident length mismatch gapopen qstart
+The 12 columns output are 'qseqid sseqid pident length mismatch gapopen qstart
qend sstart send evalue bitscore' or 'std' at the BLAST+ command line, which
mean:
@@ -51,22 +51,23 @@
Be aware that the sequence in the extended tabular output or XML direct from
BLAST+ may or may not use XXXX masking on regions of low complexity. This
can throw the off the calculation of percentage identity and gap openings.
-[In fact, both BLAST 2.2.24+ and 2.2.25+ have a sutle bug in this regard,
+[In fact, both BLAST 2.2.24+ and 2.2.25+ have a subtle bug in this regard,
with these numbers changing depending on whether or not the low complexity
filter is used.]
-This script attempts to produce idential output to what BLAST+ would have done.
+This script attempts to produce identical output to what BLAST+ would have done.
However, check this with "diff -b ..." since BLAST+ sometimes includes an extra
space character (probably a bug).
"""
import sys
import re
-assert sys.version_info[:2] >= ( 2, 4 )
if sys.version_info[:2] >= ( 2, 5 ):
- import xml.etree.cElementTree as cElementTree
+ import xml.etree.cElementTree as ElementTree
else:
- import cElementTree
+ from galaxy import eggs
+ import pkg_resources; pkg_resources.require( "elementtree" )
+ from elementtree import ElementTree
def stop_err( msg ):
sys.stderr.write("%s\n" % msg)
@@ -90,7 +91,7 @@
# get an iterable
try:
- context = cElementTree.iterparse(in_file, events=("start", "end"))
+ context = ElementTree.iterparse(in_file, events=("start", "end"))
except:
stop_err("Invalid data format.")
# turn it into an iterator
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 tools/ncbi_blast_plus/ncbi_blastn_wrapper.xml
--- a/tools/ncbi_blast_plus/ncbi_blastn_wrapper.xml
+++ b/tools/ncbi_blast_plus/ncbi_blastn_wrapper.xml
@@ -1,5 +1,6 @@
-<tool id="ncbi_blastn_wrapper" name="NCBI BLAST+ blastn" version="0.0.10">
+<tool id="ncbi_blastn_wrapper" name="NCBI BLAST+ blastn" version="0.0.11"><description>Search nucleotide database with nucleotide query sequence(s)</description>
+ <version_command>blastn -version</version_command><command interpreter="python">hide_stderr.py
## The command is a Cheetah template which allows some Python based syntax.
## Lines starting hash hash are comments. Galaxy will turn newlines into spaces
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 tools/ncbi_blast_plus/ncbi_blastp_wrapper.xml
--- a/tools/ncbi_blast_plus/ncbi_blastp_wrapper.xml
+++ b/tools/ncbi_blast_plus/ncbi_blastp_wrapper.xml
@@ -1,5 +1,6 @@
-<tool id="ncbi_blastp_wrapper" name="NCBI BLAST+ blastp" version="0.0.10">
+<tool id="ncbi_blastp_wrapper" name="NCBI BLAST+ blastp" version="0.0.11"><description>Search protein database with protein query sequence(s)</description>
+ <version_command>blastp -version</version_command><command interpreter="python">hide_stderr.py
## The command is a Cheetah template which allows some Python based syntax.
## Lines starting hash hash are comments. Galaxy will turn newlines into spaces
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 tools/ncbi_blast_plus/ncbi_blastx_wrapper.xml
--- a/tools/ncbi_blast_plus/ncbi_blastx_wrapper.xml
+++ b/tools/ncbi_blast_plus/ncbi_blastx_wrapper.xml
@@ -1,5 +1,6 @@
-<tool id="ncbi_blastx_wrapper" name="NCBI BLAST+ blastx" version="0.0.10">
+<tool id="ncbi_blastx_wrapper" name="NCBI BLAST+ blastx" version="0.0.11"><description>Search protein database with translated nucleotide query sequence(s)</description>
+ <version_command>blastx -version</version_command><command interpreter="python">hide_stderr.py
## The command is a Cheetah template which allows some Python based syntax.
## Lines starting hash hash are comments. Galaxy will turn newlines into spaces
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 tools/ncbi_blast_plus/ncbi_tblastn_wrapper.xml
--- a/tools/ncbi_blast_plus/ncbi_tblastn_wrapper.xml
+++ b/tools/ncbi_blast_plus/ncbi_tblastn_wrapper.xml
@@ -1,5 +1,6 @@
-<tool id="ncbi_tblastn_wrapper" name="NCBI BLAST+ tblastn" version="0.0.10">
+<tool id="ncbi_tblastn_wrapper" name="NCBI BLAST+ tblastn" version="0.0.11"><description>Search translated nucleotide database with protein query sequence(s)</description>
+ <version_command>tblastn -version</version_command><command interpreter="python">hide_stderr.py
## The command is a Cheetah template which allows some Python based syntax.
## Lines starting hash hash are comments. Galaxy will turn newlines into spaces
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 tools/ncbi_blast_plus/ncbi_tblastx_wrapper.xml
--- a/tools/ncbi_blast_plus/ncbi_tblastx_wrapper.xml
+++ b/tools/ncbi_blast_plus/ncbi_tblastx_wrapper.xml
@@ -1,5 +1,6 @@
-<tool id="ncbi_tblastx_wrapper" name="NCBI BLAST+ tblastx" version="0.0.10">
+<tool id="ncbi_tblastx_wrapper" name="NCBI BLAST+ tblastx" version="0.0.11"><description>Search translated nucleotide database with translated nucleotide query sequence(s)</description>
+ <version_command>tblastx -version</version_command><command interpreter="python">hide_stderr.py
## The command is a Cheetah template which allows some Python based syntax.
## Lines starting hash hash are comments. Galaxy will turn newlines into spaces
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 tools/new_operations/basecoverage.xml
--- a/tools/new_operations/basecoverage.xml
+++ b/tools/new_operations/basecoverage.xml
@@ -34,7 +34,7 @@
See Galaxy Interval Operation Screencasts_ (right click to open this link in another window).
-.. _Screencasts: http://bitbucket.org/galaxy/galaxy-central/wiki/GopsDesc
+.. _Screencasts: http://wiki.g2.bx.psu.edu/Learn/Interval%20Operations
</help>
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 tools/new_operations/cluster.xml
--- a/tools/new_operations/cluster.xml
+++ b/tools/new_operations/cluster.xml
@@ -67,7 +67,7 @@
See Galaxy Interval Operation Screencasts_ (right click to open this link in another window).
-.. _Screencasts: http://bitbucket.org/galaxy/galaxy-central/wiki/GopsDesc
+.. _Screencasts: http://wiki.g2.bx.psu.edu/Learn/Interval%20Operations
-----
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 tools/new_operations/complement.xml
--- a/tools/new_operations/complement.xml
+++ b/tools/new_operations/complement.xml
@@ -43,7 +43,7 @@
See Galaxy Interval Operation Screencasts_ (right click to open this link in another window).
-.. _Screencasts: http://bitbucket.org/galaxy/galaxy-central/wiki/GopsDesc
+.. _Screencasts: http://wiki.g2.bx.psu.edu/Learn/Interval%20Operations
-----
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 tools/new_operations/concat.xml
--- a/tools/new_operations/concat.xml
+++ b/tools/new_operations/concat.xml
@@ -41,7 +41,7 @@
See Galaxy Interval Operation Screencasts_ (right click to open this link in another window).
-.. _Screencasts: http://bitbucket.org/galaxy/galaxy-central/wiki/GopsDesc
+.. _Screencasts: http://wiki.g2.bx.psu.edu/Learn/Interval%20Operations
-----
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 tools/new_operations/coverage.xml
--- a/tools/new_operations/coverage.xml
+++ b/tools/new_operations/coverage.xml
@@ -44,7 +44,7 @@
See Galaxy Interval Operation Screencasts_ (right click to open this link in another window).
-.. _Screencasts: http://bitbucket.org/galaxy/galaxy-central/wiki/GopsDesc
+.. _Screencasts: http://wiki.g2.bx.psu.edu/Learn/Interval%20Operations
-----
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 tools/new_operations/intersect.xml
--- a/tools/new_operations/intersect.xml
+++ b/tools/new_operations/intersect.xml
@@ -117,7 +117,7 @@
See Galaxy Interval Operation Screencasts_ (right click to open this link in another window).
-.. _Screencasts: http://bitbucket.org/galaxy/galaxy-central/wiki/GopsDesc
+.. _Screencasts: http://wiki.g2.bx.psu.edu/Learn/Interval%20Operations
-----
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 tools/new_operations/join.xml
--- a/tools/new_operations/join.xml
+++ b/tools/new_operations/join.xml
@@ -78,7 +78,7 @@
See Galaxy Interval Operation Screencasts_ (right click to open this link in another window).
-.. _Screencasts: http://bitbucket.org/galaxy/galaxy-central/wiki/GopsDesc
+.. _Screencasts: http://wiki.g2.bx.psu.edu/Learn/Interval%20Operations
-----
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 tools/new_operations/merge.xml
--- a/tools/new_operations/merge.xml
+++ b/tools/new_operations/merge.xml
@@ -44,7 +44,7 @@
See Galaxy Interval Operation Screencasts_ (right click to open this link in another window).
-.. _Screencasts: http://bitbucket.org/galaxy/galaxy-central/wiki/GopsDesc
+.. _Screencasts: http://wiki.g2.bx.psu.edu/Learn/Interval%20Operations
-----
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 tools/new_operations/subtract.xml
--- a/tools/new_operations/subtract.xml
+++ b/tools/new_operations/subtract.xml
@@ -98,7 +98,7 @@
See Galaxy Interval Operation Screencasts_ (right click to open this link in another window).
-.. _Screencasts: http://bitbucket.org/galaxy/galaxy-central/wiki/GopsDesc
+.. _Screencasts: http://wiki.g2.bx.psu.edu/Learn/Interval%20Operations
-----
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 tools/next_gen_conversion/fastq_gen_conv.xml
--- a/tools/next_gen_conversion/fastq_gen_conv.xml
+++ b/tools/next_gen_conversion/fastq_gen_conv.xml
@@ -75,7 +75,7 @@
A good description of fastq datasets can be found `here`__, while a description of Galaxy's fastq "logic" can be found `here`__. Because ranges of quality values within different types of fastq datasets overlap it very difficult to detect them automatically. This tool supports conversion of two commonly found types (Solexa/Illumina 1.0 and Illumina 1.3+) into fastq Sanger.
.. __: http://en.wikipedia.org/wiki/FASTQ_format
- .. __: http://bitbucket.org/galaxy/galaxy-central/wiki/NGS
+ .. __: http://wiki.g2.bx.psu.edu/Admin/NGS%20Local%20Setup
.. class:: warningmark
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 tools/ngs_rna/cuffcompare_wrapper.xml
--- a/tools/ngs_rna/cuffcompare_wrapper.xml
+++ b/tools/ngs_rna/cuffcompare_wrapper.xml
@@ -1,4 +1,5 @@
-<tool id="cuffcompare" name="Cuffcompare" version="0.9.1">
+<tool id="cuffcompare" name="Cuffcompare" version="0.0.5">
+ <!-- Wrapper supports Cuffcompare versions v1.0.0-v1.0.3 --><description>compare assembled transcripts to a reference annotation and track Cufflinks transcripts across multiple experiments</description><requirements><requirement type="package">cufflinks</requirement>
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 tools/ngs_rna/cuffdiff_wrapper.xml
--- a/tools/ngs_rna/cuffdiff_wrapper.xml
+++ b/tools/ngs_rna/cuffdiff_wrapper.xml
@@ -1,4 +1,5 @@
-<tool id="cuffdiff" name="Cuffdiff" version="0.9.1">
+<tool id="cuffdiff" name="Cuffdiff" version="0.0.5">
+ <!-- Wrapper supports Cuffdiff versions v1.0.0-v1.0.3 --><description>find significant changes in transcript expression, splicing, and promoter use</description><requirements><requirement type="package">cufflinks</requirement>
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 tools/ngs_rna/cufflinks_wrapper.xml
--- a/tools/ngs_rna/cufflinks_wrapper.xml
+++ b/tools/ngs_rna/cufflinks_wrapper.xml
@@ -1,4 +1,5 @@
-<tool id="cufflinks" name="Cufflinks" version="0.9.1">
+<tool id="cufflinks" name="Cufflinks" version="0.0.5">
+ <!-- Wrapper supports Cufflinks versions v1.0.0-v1.0.3 --><description>transcript assembly and FPKM (RPKM) estimates for RNA-Seq data</description><requirements><requirement type="package">cufflinks</requirement>
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 tools/ngs_rna/tophat_wrapper.xml
--- a/tools/ngs_rna/tophat_wrapper.xml
+++ b/tools/ngs_rna/tophat_wrapper.xml
@@ -1,5 +1,6 @@
<tool id="tophat" name="Tophat for Illumina" version="1.5.0"><description>Find splice junctions using RNA-seq data</description>
+ <version_command>tophat --version</version_command><requirements><requirement type="package">tophat</requirement></requirements>
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 tools/peak_calling/macs_wrapper.py
--- a/tools/peak_calling/macs_wrapper.py
+++ b/tools/peak_calling/macs_wrapper.py
@@ -51,8 +51,7 @@
cmdline = "macs -t %s" % ",".join( options['input_chipseq'] )
if options['input_control']:
cmdline = "%s -c %s" % ( cmdline, ",".join( options['input_control'] ) )
- cmdline = "%s --format='%s' --name='%s' --gsize='%s' --tsize='%s' --bw='%s' --pvalue='%s' --mfold='%s' %s %s" %\
- ( cmdline, options['format'], experiment_name, options['gsize'], options['tsize'], options['bw'], options['pvalue'], options['mfold'], options['nolambda'], options['futurefdr'] )
+ cmdline = "%s --format='%s' --name='%s' --gsize='%s' --tsize='%s' --bw='%s' --pvalue='%s' --mfold='%s' %s --lambdaset='%s' %s" % ( cmdline, options['format'], experiment_name, options['gsize'], options['tsize'], options['bw'], options['pvalue'], options['mfold'], options['nolambda'], options['lambdaset'], options['futurefdr'] )
if 'wig' in options:
wigextend = int( options['wig']['wigextend'] )
if wigextend >= 0:
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 tools/peak_calling/macs_wrapper.xml
--- a/tools/peak_calling/macs_wrapper.xml
+++ b/tools/peak_calling/macs_wrapper.xml
@@ -28,8 +28,7 @@
<param name="tsize" type="integer" label="Tag size" value="25"/><param name="bw" type="integer" label="Band width" value="300"/><param name="pvalue" type="float" label="Pvalue cutoff for peak detection" value="1e-5" help="default: 1e-5"/>
- <param name="mfold" type="text" label="Select the regions with MFOLD high-confidence enrichment ratio against background to build model" value="10,30"
- help="Range for high-confidence enrichment ratio against background for model are within this range. DEFAULT:10,30" />
+ <param name="mfold" type="integer" label="Select the regions with MFOLD high-confidence enrichment ratio against background to build model" value="32"/><param name="xls_to_interval" label="Parse xls files into into distinct interval files" type="boolean" truevalue="create" falsevalue="do_not_create" checked="False"/><conditional name="wig_type"><param name="wig_type_selector" type="select" label="Save shifted raw tag count at every bp into a wiggle file">
@@ -45,6 +44,7 @@
</when></conditional><param name="nolambda" label="Use fixed background lambda as local lambda for every peak region" type="boolean" truevalue="--nolambda" falsevalue="" checked="False" help="up to 9X more time consuming"/>
+ <param name="lambdaset" type="text" label="3 levels of regions around the peak region to calculate the maximum lambda as local lambda" value="1000,5000,10000" size="50"/><conditional name="nomodel_type"><param name="nomodel_type_selector" type="select" label="Build Model"><option value="nomodel">Do not build the shifting model</option>
@@ -95,7 +95,7 @@
<configfile name="options_file"><%
import simplejson
%>
-#set $__options = { 'experiment_name':str( $experiment_name ), 'gsize':int( float( str( $gsize ) ) ), 'tsize':str( $tsize ), 'bw':str( $bw ), 'pvalue':str( $pvalue ), 'mfold':str( $mfold ), 'nolambda':str( $nolambda ), 'futurefdr':str( $futurefdr ) }
+#set $__options = { 'experiment_name':str( $experiment_name ), 'gsize':int( float( str( $gsize ) ) ), 'tsize':str( $tsize ), 'bw':str( $bw ), 'pvalue':str( $pvalue ), 'mfold':str( $mfold ), 'nolambda':str( $nolambda ), 'lambdaset': str( $lambdaset ), 'futurefdr':str( $futurefdr ) }
#if str( $xls_to_interval ) == 'create':
#set $__options['xls_to_interval'] = { 'peaks_file': str( $output_xls_to_interval_peaks_file ), 'negative_peaks_file': str( $output_xls_to_interval_negative_peaks_file ) }
#else:
@@ -145,13 +145,14 @@
<param name="input_control_file1" value="chipseq_input.bed.gz" ftype="bed" /><param name="experiment_name" value="Galaxy Test Run" /><param name="tsize" value="36" />
- <param name="mfold" value="10,30" />
+ <param name="mfold" value="13" /><param name="gsize" value="2.7e+9" /><param name="bw" value="300" /><param name="pvalue" value="1e-5" /><param name="xls_to_interval" /><param name="wig_type_selector" value="no_wig" /><param name="nolambda"/>
+ <param name="lambdaset" value="1000,5000,10000"/><param name="nomodel_type_selector" value="create_model" /><param name="diag_type_selector" value="no_diag" /><param name="futurefdr"/>
@@ -170,13 +171,14 @@
<param name="input_control_file1" value="chipseq_input.bed.gz" ftype="bed" /><param name="experiment_name" value="Galaxy Test Run" /><param name="tsize" value="36" />
- <param name="mfold" value="10,30" />
+ <param name="mfold" value="13" /><param name="gsize" value="2.7e+9" /><param name="bw" value="300" /><param name="pvalue" value="1e-5" /><param name="xls_to_interval" value="true" /><param name="wig_type_selector" value="no_wig" /><param name="nolambda"/>
+ <param name="lambdaset" value="1000,5000,10000"/><param name="nomodel_type_selector" value="create_model" /><param name="diag_type_selector" value="no_diag" /><param name="futurefdr"/>
@@ -193,7 +195,7 @@
<param name="input_control_file1" value="chipseq_input.bed.gz" ftype="bed" /><param name="experiment_name" value="Galaxy Test Run" /><param name="tsize" value="36" />
- <param name="mfold" value="10,30" />
+ <param name="mfold" value="13" /><param name="gsize" value="2.7e+9" /><param name="bw" value="300" /><param name="pvalue" value="1e-5" />
@@ -202,6 +204,7 @@
<param name="wigextend" value="-1" /><param name="space" value="10" /><param name="nolambda"/>
+ <param name="lambdaset" value="1000,5000,10000"/><param name="nomodel_type_selector" value="create_model" /><param name="diag_type_selector" value="no_diag" /><param name="futurefdr"/>
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 tools/plotting/histogram.py
--- a/tools/plotting/histogram.py
+++ b/tools/plotting/histogram.py
@@ -73,8 +73,7 @@
if skipped_lines < i:
try:
- #a = array( matrix )
- a=matrix
+ a = r.array( matrix )
r.pdf( out_fname, 8, 8 )
histogram = r.hist( a, probability=not frequency, main=title, xlab=xlab, breaks=breaks )
if density:
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 tools/plotting/scatterplot.py
--- a/tools/plotting/scatterplot.py
+++ b/tools/plotting/scatterplot.py
@@ -19,14 +19,8 @@
title = sys.argv[5]
xlab = sys.argv[6]
ylab = sys.argv[7]
- out_type = sys.argv[8]
- out_width = int(sys.argv[9])
- out_height = int(sys.argv[10])
- point_size = float(sys.argv[11])
-
- xvec=[]
- yvec=[]
+ matrix = []
skipped_lines = 0
first_invalid_line = 0
invalid_value = ''
@@ -34,19 +28,17 @@
i = 0
for i, line in enumerate( file( in_fname ) ):
valid = True
- vals = []
line = line.rstrip( '\r\n' )
if line and not line.startswith( '#' ):
row = []
fields = line.split( "\t" )
- for c,column in enumerate(columns):
+ for column in columns:
try:
val = fields[column]
if val.lower() == "na":
- v = float( "nan" )
+ row.append( float( "nan" ) )
else:
- v = float( fields[column] )
- vals.append(val)
+ row.append( float( fields[column] ) )
except:
valid = False
skipped_lines += 1
@@ -65,19 +57,12 @@
first_invalid_line = i+1
if valid:
- xvec.append(vals[0])
- yvec.append(vals[1])
+ matrix.append( row )
+
if skipped_lines < i:
try:
- if out_type == "jpg":
- r.jpeg(out_fname,width=out_width,height=out_height)
- elif out_type == "png":
- # type="cairo" needed to be set for headless servers
- r.png(out_fname,type="cairo",width=out_width,height=out_height)
- else:
- r.pdf(out_fname, out_width, out_height)
-
- r.plot(xvec,yvec, type="p", main=title, xlab=xlab, ylab=ylab, col="blue", pch=19,cex=point_size )
+ r.pdf( out_fname, 8, 8 )
+ r.plot( array( matrix ), type="p", main=title, xlab=xlab, ylab=ylab, col="blue", pch=19 )
r.dev_off()
except Exception, exc:
stop_err( "%s" %str( exc ) )
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 tools/plotting/scatterplot.xml
--- a/tools/plotting/scatterplot.xml
+++ b/tools/plotting/scatterplot.xml
@@ -1,6 +1,6 @@
<tool id="scatterplot_rpy" name="Scatterplot"><description>of two numeric columns</description>
- <command interpreter="python">scatterplot.py $input $out_file1 $col1 $col2 "$title" "$xlab" "$ylab" $output_format $out_width $out_height $point_size</command>
+ <command interpreter="python">scatterplot.py $input $out_file1 $col1 $col2 "$title" "$xlab" "$ylab"</command><inputs><param name="input" type="data" format="tabular" label="Dataset" help="Dataset missing? See TIP below"/><param name="col1" type="data_column" data_ref="input" numerical="True" label="Numerical column for x axis" />
@@ -8,26 +8,15 @@
<param name="title" size="30" type="text" value="Scatterplot" label="Plot title"/><param name="xlab" size="30" type="text" value="V1" label="Label for x axis"/><param name="ylab" size="30" type="text" value="V2" label="Label for y axis"/>
- <param name="output_format" type="select" label="Output format">
- <option value="pdf">pdf</option>
- <option value="png">png</option>
- <option value="jpg">jpg</option>
- </param>
- <param name="out_width" size="4" type="integer" value="8" label="Width (pdf=inches, image=pixels)"/>
- <param name="out_height" size="4" type="integer" value="8" label="Height (pdf=inches, image=pixels)"/>
- <param name="point_size" size="4" type="float" value="0.2" label="Point Size"/></inputs><outputs>
- <data format="pdf" name="out_file1">
- <change_format>
- <when input="output_format" value="png" format="png" />
- <when input="output_format" value="jpg" format="jpg" />
- </change_format>
- </data>
+ <data format="pdf" name="out_file1" /></outputs><requirements><requirement type="python-module">rpy</requirement></requirements>
+ <!-- TODO: uncomment the following test when we have tools.update_state() working for
+ multiple dependents with the same dependency.
<tests><test><param name="input" value="scatterplot_in1.tabular" ftype="tabular"/>
@@ -36,38 +25,8 @@
<param name="title" value="Scatterplot"/><param name="xlab" value="V1"/><param name="ylab" value="V2"/>
- <param name="out_width" value="8"/>
- <param name="out_height" value="8"/>
- <param name="point_size" value="0.5"/>
- <param name="output_format" value="pdf" /><output name="out_file1" file="scatterplot_out1.pdf" /></test>
- <test>
- <param name="input" value="scatterplot_in1.tabular" ftype="tabular"/>
- <param name="col1" value="2"/>
- <param name="col2" value="3"/>
- <param name="title" value="Scatterplot"/>
- <param name="xlab" value="V1"/>
- <param name="ylab" value="V2"/>
- <param name="out_width" value="800"/>
- <param name="out_height" value="600"/>
- <param name="point_size" value="0.5"/>
- <param name="output_format" value="png" />
- <output name="out_file1" file="scatterplot_out1.png" />
- </test>
- <test>
- <param name="input" value="scatterplot_in1.tabular" ftype="tabular"/>
- <param name="col1" value="2"/>
- <param name="col2" value="3"/>
- <param name="title" value="Scatterplot"/>
- <param name="xlab" value="V1"/>
- <param name="ylab" value="V2"/>
- <param name="out_width" value="800"/>
- <param name="out_height" value="600"/>
- <param name="point_size" value="0.5"/>
- <param name="output_format" value="jpg" />
- <output name="out_file1" file="scatterplot_out1.jpg" />
- </test></tests>
--><help>
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 tools/rgenetics/rgManQQ.py
--- a/tools/rgenetics/rgManQQ.py
+++ b/tools/rgenetics/rgManQQ.py
@@ -1,5 +1,9 @@
#!/usr/local/bin/python
-
+# updated july 20 to fix sort order - R unique() sorts into strict collating order
+# so need to sort after unique to revert to lexicographic order for x axis on Manhattan
+# rgmanqq updated july 19 to deal with x,y and mt
+# lots of fixes
+# ross lazarus
import sys,math,shutil,subprocess,os,time,tempfile,string
from os.path import abspath
from rgutils import timenow, RRun, galhtmlprefix, galhtmlpostfix, galhtmlattr
@@ -18,7 +22,7 @@
# http://StephenTurner.us/
# http://GettingGeneticsDone.blogspot.com/
-# Last updated: Tuesday, December 22, 2009
+# Last updated: 19 July 2011 by Ross Lazarus
# R code for making manhattan plots and QQ plots from plink output files.
# With GWAS data this can take a lot of memory. Recommended for use on
# 64bit machines only, for now.
@@ -28,30 +32,30 @@
library(ggplot2)
coloursTouse = c('firebrick','darkblue','goldenrod','darkgreen')
-# not too fugly but need a colour expert please...
+# not too ugly but need a colour expert please...
-manhattan = function(chrom=NULL,offset=NULL,pvals=NULL, title=NULL, max.y="max",
- suggestiveline=0, genomewide=T, size.x.labels=9, size.y.labels=10, annotate=F, SNPlist=NULL,grey=0) {
-
+DrawManhattan = function(pvals=Null,chrom=Null,offset=Null,title=NULL, max.y="max",suggestiveline=0, genomewide=T, size.x.labels=9,
+ size.y.labels=10, annotate=F, SNPlist=NULL,grey=0) {
if (annotate & is.null(SNPlist)) stop("You requested annotation but provided no SNPlist!")
genomewideline=NULL # was genomewideline=-log10(5e-8)
if (genomewide) { # use bonferroni since might be only a small region?
genomewideline = -log10(0.05/length(pvals)) }
- d=data.frame(CHR=chrom,BP=offset,P=pvals)
-
- #limit to only chrs 1-23?
- d=d[d$CHR %in% 1:23, ]
-
+ offset = as.integer(offset)
+ pvals = as.double(pvals)
+ chro = as.integer(chrom) # already dealt with X and friends?
+ d=data.frame(CHR=chro,BP=offset,P=pvals)
+ #limit to only chrs 1-22, x=23,y=24,Mt=25?
+ d=d[d$CHR %in% 1:25, ]
if ("CHR" %in% names(d) & "BP" %in% names(d) & "P" %in% names(d) ) {
- d=na.omit(d)
+ #d=na.omit(d)
d=d[d$P>0 & d$P<=1, ]
- d$logp = -log10(d$P)
-
+ d$logp = as.double(-log10(d$P))
d$pos=NA
ticks=NULL
lastbase=0
chrlist = unique(d$CHR)
+ chrlist = sort(chrlist) # returns lexical ordering
nchr = length(chrlist) # may be any number?
if (nchr >= 2) {
for (x in c(1:nchr)) {
@@ -63,7 +67,11 @@
lastchr = chrlist[x-1] # previous whatever the list
lastbase=lastbase+tail(subset(d,CHR==lastchr)$BP, 1)
d[d$CHR==i, ]$pos=d[d$CHR==i, ]$BP+lastbase
+ if (sum(is.na(lastchr),is.na(lastbase),is.na(d[d$CHR==i, ]$pos))) {
+ cat(paste('manhattan: For',title,'chrlistx=',i,'lastchr=',lastchr,'lastbase=',lastbase,'pos=',d[d$CHR==i,]$pos))
+ }
tks=c(tks, d[d$CHR==i, ]$pos[floor(length(d[d$CHR==i, ]$pos)/2)+1])
+
}
ticklim=c(min(d$pos),max(d$pos))
xlabs = chrlist
@@ -85,8 +93,6 @@
if (max.y=="max") maxy=ceiling(max(d$logp)) else maxy=max.y
maxy = max(maxy,1.1*genomewideline)
- # if (maxy<8) maxy=8
- # only makes sense if genome wide is assumed - we could have a fine mapping region?
if (annotate) d.annotate=d[as.numeric(substr(d$SNP,3,100)) %in% SNPlist, ]
if (nchr >= 2) {
manplot=qplot(pos,logp,data=d, ylab=expression(-log[10](italic(p))) , colour=factor(CHR))
@@ -105,9 +111,6 @@
axis.text.y=theme_text(size=size.y.labels, colour="grey50"),
axis.ticks=theme_segment(colour=NA)
)
- #manplot = manplot + opts(panel.grid.y.minor=theme_blank(),panel.grid.y.major=theme_blank())
- #manplot = manplot + opts(panel.grid.major=theme_blank())
-
if (suggestiveline) manplot=manplot+geom_hline(yintercept=suggestiveline,colour="blue", alpha=I(1/3))
if (genomewideline) manplot=manplot+geom_hline(yintercept=genomewideline,colour="red")
manplot
@@ -134,21 +137,29 @@
if (spartan) plot=plot+opts(panel.background=theme_rect(col="grey50"), panel.grid.minor=theme_blank())
qq
}
+
"""
# we need another string to avoid confusion over string substitutions with %in%
# instantiate rcode2 string with infile,chromcol,offsetcol,pvalscols,title before saving and running
-rcode2 = """rgqqMan = function(infile="%s",chromcolumn=%s, offsetcolumn=%s, pvalscolumns=%s,
+rcode2 = """rgqqMan = function(infile="%s",chromcolumn=%d, offsetcolumn=%d, pvalscolumns=c(%s),
title="%s",grey=%d) {
rawd = read.table(infile,head=T,sep='\\t')
dn = names(rawd)
cc = dn[chromcolumn]
oc = dn[offsetcolumn]
-nams = c(cc,oc)
+rawd[,cc] = sub('chr','',rawd[,cc],ignore.case = T) # just in case
+rawd[,cc] = sub(':','',rawd[,cc],ignore.case = T) # ugh
+rawd[,cc] = sub('X',23,rawd[,cc],ignore.case = T)
+rawd[,cc] = sub('Y',24,rawd[,cc],ignore.case = T)
+rawd[,cc] = sub('Mt',25,rawd[,cc], ignore.case = T)
+nams = c(cc,oc) # for sorting
plen = length(rawd[,1])
-doreorder=1
print(paste('###',plen,'values read from',infile,'read - now running plots',sep=' '))
+rawd = rawd[do.call(order,rawd[nams]),]
+# mmmf - suggested by http://onertipaday.blogspot.com/2007/08/sortingordering-dataframe-according…
+# in case not yet ordered
if (plen > 0) {
for (pvalscolumn in pvalscolumns) {
if (pvalscolumn > 0)
@@ -160,14 +171,8 @@
ggsave(filename=paste(myfname,"qqplot.png",sep='_'),myqqplot,width=6,height=4,dpi=100)
print(paste('## qqplot on',cname,'done'))
if ((chromcolumn > 0) & (offsetcolumn > 0)) {
- if (doreorder) {
- rawd = rawd[do.call(order,rawd[nams]),]
- # mmmf - suggested by http://onertipaday.blogspot.com/2007/08/sortingordering-dataframe-according…
- # in case not yet ordered
- doreorder = 0
- }
print(paste('## manhattan on',cname,'starting',chromcolumn,offsetcolumn,pvalscolumn))
- mymanplot= manhattan(chrom=rawd[,chromcolumn],offset=rawd[,offsetcolumn],pvals=rawd[,pvalscolumn],title=mytitle,grey=grey)
+ mymanplot= DrawManhattan(chrom=rawd[,chromcolumn],offset=rawd[,offsetcolumn],pvals=rawd[,pvalscolumn],title=mytitle,grey=grey)
print(paste('## manhattan plot on',cname,'done'))
ggsave(filename=paste(myfname,"manhattan.png",sep='_'),mymanplot,width=6,height=4,dpi=100)
}
@@ -198,50 +203,13 @@
this can be called externally, I guess...for QC eg?
"""
if debug:
- print 'doManQQ',input_fname,chrom_col,offset_col,pval_cols,title,grey,ctitle,outdir
- ffd,filtered_fname = tempfile.mkstemp(prefix='rgManQQtemp')
- f = open(filtered_fname,'w')
- inf = open(input_fname,'r')
- ohead = inf.readline().strip().split('\t') # see if we have a header
- inf.seek(0) # rewind
- newhead = ['pval%d' % (x+1) for x in pval_cols]
- newhead.insert(0,'Offset')
- newhead.insert(0,'Chrom')
- havehead = 0
- wewant = [chrom_col,offset_col]
- wewant += pval_cols
- try:
- allnums = ['%d' % x for x in ohead] # this should barf if non numerics == header row?
- f.write('\t'.join(newhead)) # for R to read
- f.write('\n')
- except:
- havehead = 1
- newhead = [ohead[chrom_col],ohead[offset_col]]
- newhead += [ohead[x] for x in pval_cols]
- f.write('\t'.join(newhead)) # use the original head
- f.write('\n')
- for i,row in enumerate(inf):
- if i == 0 and havehead:
- continue # ignore header
- sr = row.strip().split('\t')
- if len(sr) > 1:
- if sr[chrom_col].lower().find('chr') <> -1:
- sr[chrom_col] = sr[chrom_col][3:]
- newr = [sr[x] for x in wewant] # grab cols we need
- s = '\t'.join(newr)
- f.write(s)
- f.write('\n')
- f.close()
- pvc = [x+3 for x in range(len(pval_cols))] # 2 for offset and chrom, 1 for r offset start
- pvc = 'c(%s)' % (','.join(map(str,pvc)))
- rcmd = '%s%s' % (rcode,rcode2 % (filtered_fname,'1','2',pvc,title,grey))
+ print 'doManQQ',input_fname,chrom_col,offset_col,pval_cols,title,grey,ctitle,outdir
+ rcmd = '%s%s' % (rcode,rcode2 % (input_fname,chrom_col,offset_col,pval_cols,title,grey))
if debug:
- print 'running\n%s\n' % rcmd
+ print 'running\n%s\n' % rcmd
rlog,flist = RRun(rcmd=rcmd,title=ctitle,outdir=outdir)
rlog.append('## R script=')
rlog.append(rcmd)
- if beTidy:
- os.unlink(filtered_fname)
return rlog,flist
@@ -272,19 +240,20 @@
offset_col = -1
p = sys.argv[7].strip().split(',')
try:
- p = [int(x) for x in p]
+ q = [int(x) for x in p]
except:
- p = [-1]
+ p = -1
if chrom_col == -1 or offset_col == -1: # was passed as zero - do not do manhattan plots
chrom_col = -1
offset_col = -1
grey = 0
if (sys.argv[8].lower() in ['1','true']):
grey = 1
- if p == [-1]:
+ if p == -1:
print >> sys.stderr,'## Cannot run rgManQQ - missing pval column'
sys.exit(1)
- rlog,flist = doManQQ(input_fname,chrom_col,offset_col,p,title,grey,ctitle,outdir)
+ p = ['%d' % (int(x) + 1) for x in p]
+ rlog,flist = doManQQ(input_fname,chrom_col+1,offset_col+1,','.join(p),title,grey,ctitle,outdir)
flist.sort()
html = [galhtmlprefix % progname,]
html.append('<h1>%s</h1>' % title)
@@ -294,7 +263,7 @@
fname,expl = row # RRun returns pairs of filenames fiddled for the log and R script
e = os.path.splitext(fname)[-1]
if e in ['.png','.jpg']:
- s= '<tr><td><a href="%s"><img src="%s" alt="%s hspace="10" width="400"><br>(Click to download image %s)</a></td></tr>' \
+ s= '<tr><td><a href="%s"><img src="%s" title="%s hspace="10" width="400"><br>(Click to download image %s)</a></td></tr>' \
% (fname,fname,expl,expl )
html.append(s)
else:
@@ -317,3 +286,4 @@
if __name__ == "__main__":
main()
+
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 tools/rgenetics/rgManQQ.xml
--- a/tools/rgenetics/rgManQQ.xml
+++ b/tools/rgenetics/rgManQQ.xml
@@ -1,4 +1,4 @@
-<tool id="rgManQQ1" name="Manhattan/QQ:" version="1.0.1">
+<tool id="rgManQQ1" name="Manhattan/QQ:" version="1.0.2"><code file="rgManQQ_code.py"/><description>Plots for WGA P values</description>
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 tools/rgenetics/rgWebLogo3.py
--- a/tools/rgenetics/rgWebLogo3.py
+++ b/tools/rgenetics/rgWebLogo3.py
@@ -43,6 +43,11 @@
tlf = open(templog,'w')
process = subprocess.Popen(cl, shell=True, stderr=tlf, stdout=tlf)
rval = process.wait()
+ if rval <> 0:
+ print >> sys.stderr, '## rgWebLogo3.py error - executing %s returned error code %d' % cl
+ print >> sys.stderr, '## This may be a data problem or a tool dependency (%s) installation problem' % WEBLOGO
+ print >> sys.stderr, '## Please ensure %s is correctly installed and working on the command line -see http://code.google.com/p/weblogo' % WEBLOGO
+ sys.exit(1)
tlf.close()
tlogs = ''.join(open(templog,'r').readlines())
if len(tlogs) > 1:
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 tools/rgenetics/rgWebLogo3.xml
--- a/tools/rgenetics/rgWebLogo3.xml
+++ b/tools/rgenetics/rgWebLogo3.xml
@@ -1,4 +1,4 @@
-<tool id="rgweblogo3" name="Sequence Logo" version="0.3">
+<tool id="rgweblogo3" name="Sequence Logo" version="0.4"><description>generator for fasta (eg Clustal alignments)</description><command interpreter="python">
rgWebLogo3.py -F $outformat -s $size -i $input -o $output -t "$logoname" -c "$colours" -U "$units"
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 tools/samtools/bam_to_sam.py
--- a/tools/samtools/bam_to_sam.py
+++ b/tools/samtools/bam_to_sam.py
@@ -21,6 +21,7 @@
parser = optparse.OptionParser()
parser.add_option( '', '--input1', dest='input1', help='The input SAM dataset' )
parser.add_option( '', '--output1', dest='output1', help='The output BAM dataset' )
+ parser.add_option( '', '--header', dest='header', action='store_true', default=False, help='Write SAM Header' )
( options, args ) = parser.parse_args()
# output version # of tool
@@ -87,7 +88,11 @@
try:
# Extract all alignments from the input BAM file to SAM format ( since no region is specified, all the alignments will be extracted ).
- command = 'samtools view -o -h %s %s' % ( options.output1, tmp_sorted_aligns_file_name )
+ if options.header:
+ view_options = "-h"
+ else:
+ view_options = ""
+ command = 'samtools view %s -o %s %s' % ( view_options, options.output1, tmp_sorted_aligns_file_name )
tmp = tempfile.NamedTemporaryFile( dir=tmp_dir ).name
tmp_stderr = open( tmp, 'wb' )
proc = subprocess.Popen( args=command, shell=True, cwd=tmp_dir, stderr=tmp_stderr.fileno() )
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 tools/samtools/sam_bitwise_flag_filter.py
--- a/tools/samtools/sam_bitwise_flag_filter.py
+++ b/tools/samtools/sam_bitwise_flag_filter.py
@@ -1,6 +1,6 @@
#!/usr/bin/env python
# Refactored on 11/13/2010 by Kanwei Li
-# Added drop_header option - default is now keep headers for downstream sanity by ross lazarus
+
import sys
import optparse
@@ -14,11 +14,7 @@
options (listed below) default to 'None' if omitted
"""
parser = optparse.OptionParser(usage=usage)
- parser.add_option('--drop_header',
- action = 'store_true',
- dest='drop_header',
- help="Remove sam header - you probably NEVER want this for fussy downstream tools")
-
+
parser.add_option(
'--0x0001','--is_paired',
choices = ( '0','1' ),
@@ -133,24 +129,21 @@
opt_map = { '0': False, '1': True }
used_indices = [(index, opt_map[opt]) for index, opt in enumerate(opt_ary) if opt is not None]
flag_col = int( options.flag_col ) - 1
+
for line in infile:
line = line.rstrip( '\r\n' )
- if line:
- if line.startswith('@'):
- if not options.drop_header:
- print line # usually want these so add -h if you don't want headers
- elif not line.startswith( '#' ) :
- fields = line.split( '\t' )
- flags = int( fields[flag_col] )
+ if line and not line.startswith( '#' ) and not line.startswith( '@' ) :
+ fields = line.split( '\t' )
+ flags = int( fields[flag_col] )
- valid_line = True
- for index, opt_bool in used_indices:
- if bool(flags & 0x0001 << index) != opt_bool:
- valid_line = False
- break
+ valid_line = True
+ for index, opt_bool in used_indices:
+ if bool(flags & 0x0001 << index) != opt_bool:
+ valid_line = False
+ break
- if valid_line:
- print line
+ if valid_line:
+ print line
if __name__ == "__main__": main()
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 tools/samtools/sam_bitwise_flag_filter.xml
--- a/tools/samtools/sam_bitwise_flag_filter.xml
+++ b/tools/samtools/sam_bitwise_flag_filter.xml
@@ -1,11 +1,8 @@
-<tool id="sam_bw_filter" name="Filter SAM" version="1.0.1">
+<tool id="sam_bw_filter" name="Filter SAM" version="1.0.0"><description>on bitwise flag values</description><parallelism method="basic"></parallelism><command interpreter="python">
sam_bitwise_flag_filter.py
- #if $drop_header == "1":
- --drop_header
- #end if
--input_sam_file=$input1
--flag_column=2
#for $bit in $bits
@@ -15,11 +12,6 @@
</command><inputs><param format="sam" name="input1" type="data" label="Select dataset to filter"/>
- <param name="drop_header" type="select" display="radio" label="DROP sam headers"
- help="Set this if you REALLY want to throw away existing sam header metadata - downstream (eg Picard) tools often demand it" >
- <option value="0" selected="true">Keep headers (default)</option>
- <option value="1">Drop headers</option>
- </param><repeat name="bits" title="Flag"><param name="flags" type="select" label="Type"><option value="--0x0001">Read is paired</option>
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 tools/samtools/sam_to_bam.py
--- a/tools/samtools/sam_to_bam.py
+++ b/tools/samtools/sam_to_bam.py
@@ -68,7 +68,7 @@
# and the equCab2.fa file will contain fasta sequences.
seq_path = check_seq_file( options.dbkey, cached_seqs_pointer_file )
tmp_dir = tempfile.mkdtemp()
- if options.ref_file == 'None':
+ if not options.ref_file or options.ref_file == 'None':
# We're using locally cached reference sequences( e.g., /galaxy/data/equCab2/sam_index/equCab2.fa ).
# The indexes for /galaxy/data/equCab2/sam_index/equCab2.fa will be contained in
# a file named /galaxy/data/equCab2/sam_index/equCab2.fa.fai
@@ -125,9 +125,7 @@
tmp_aligns_file = tempfile.NamedTemporaryFile( dir=tmp_dir )
tmp_aligns_file_name = tmp_aligns_file.name
tmp_aligns_file.close()
- # IMPORTANT NOTE: for some reason the samtools view command gzips the resulting bam file without warning,
- # and the docs do not currently state that this occurs ( very bad ).
- command = 'samtools view -h -bt %s -o %s %s' % ( fai_index_file_path, tmp_aligns_file_name, options.input1 )
+ command = 'samtools view -bt %s -o %s %s' % ( fai_index_file_path, tmp_aligns_file_name, options.input1 )
tmp = tempfile.NamedTemporaryFile( dir=tmp_dir ).name
tmp_stderr = open( tmp, 'wb' )
proc = subprocess.Popen( args=command, shell=True, cwd=tmp_dir, stderr=tmp_stderr.fileno() )
@@ -147,8 +145,6 @@
tmp_stderr.close()
if returncode != 0:
raise Exception, stderr
- if len( open( tmp_aligns_file_name ).read() ) == 0:
- raise Exception, 'Initial BAM file empty'
except Exception, e:
#clean up temp files
if os.path.exists( tmp_dir ):
@@ -188,11 +184,6 @@
stop_err( 'Error sorting alignments from (%s), %s' % ( tmp_aligns_file_name, str( e ) ) )
# Move tmp_aligns_file_name to our output dataset location
sorted_bam_file = '%s.bam' % tmp_sorted_aligns_file_name
- if os.path.getsize( sorted_bam_file ) == 0:
- #clean up temp files
- if os.path.exists( tmp_dir ):
- shutil.rmtree( tmp_dir )
- stop_err( 'Error creating sorted version of BAM file' )
shutil.move( sorted_bam_file, options.output1 )
#clean up temp files
if os.path.exists( tmp_dir ):
@@ -201,6 +192,6 @@
if os.path.getsize( options.output1 ) > 0:
sys.stdout.write( 'SAM file converted to BAM' )
else:
- stop_err( 'The output file is empty, there may be an error with your input file or settings.' )
+ stop_err( 'Error creating sorted version of BAM file.' )
if __name__=="__main__": __main__()
diff -r 9b03f63cd8e830de9f1716aa5414e0694ca08032 -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 universe_wsgi.ini.sample
--- a/universe_wsgi.ini.sample
+++ b/universe_wsgi.ini.sample
@@ -3,7 +3,7 @@
# environment. To tune the application for a multi-user production
# environment, see the documentation at:
#
-# http://bitbucket.org/galaxy/galaxy-central/wiki/Config/ProductionServer
+# http://wiki.g2.bx.psu.edu/Admin/Config/Performance/Production%20Server
#
# Throughout this sample configuration file, except where stated otherwise,
@@ -129,7 +129,7 @@
# Directory where data used by tools is located, see the samples in that
# directory and the wiki for help:
-# http://bitbucket.org/galaxy/galaxy-central/wiki/DataIntegration
+# http://wiki.g2.bx.psu.edu/Admin/Data%20Integration
#tool_data_path = tool-data
# Directory where chrom len files are kept, currently mainly used by trackster
@@ -208,13 +208,13 @@
#logo_url = /
# The URL linked by the "Galaxy Wiki" link in the "Help" menu.
-#wiki_url = http://bitbucket.org/galaxy/galaxy-central/wiki
+#wiki_url = http://wiki.g2.bx.psu.edu/
# The URL linked by the "Email comments..." link in the "Help" menu.
#bugs_email = mailto:galaxy-bugs@bx.psu.edu
# The URL linked by the "How to Cite..." link in the "Help" menu.
-#citation_url = http://bitbucket.org/galaxy/galaxy-central/wiki/Citations
+#citation_url = http://wiki.g2.bx.psu.edu/Citing%20Galaxy
# Serve static content, which must be enabled if you're not serving it via a
# proxy server. These options should be self explanatory and so are not
@@ -314,7 +314,7 @@
# -- Data Libraries
# These library upload options are described in much more detail in the wiki:
-# http://bitbucket.org/galaxy/galaxy-central/wiki/DataLibraries/UploadingFiles
+# http://wiki.g2.bx.psu.edu/Admin/Data%20Libraries/Uploading%20Library%20Files
# Add an option to the library upload form which allows administrators to
# upload a directory of files.
@@ -372,7 +372,7 @@
# User authentication can be delegated to an upstream proxy server (usually
# Apache). The upstream proxy should set a REMOTE_USER header in the request.
# Enabling remote user disables regular logins. For more information, see:
-# http://bitbucket.org/galaxy/galaxy-central/wiki/Config/ApacheProxy
+# http://wiki.g2.bx.psu.edu/Admin/Config/Apache%20Proxy
#use_remote_user = False
# If use_remote_user is enabled and your external authentication
@@ -388,7 +388,7 @@
# users (email addresses). These users will have access to the Admin section
# of the server, and will have access to create users, groups, roles,
# libraries, and more. For more information, see:
-# http://bitbucket.org/galaxy/galaxy-central/wiki/Admin/AdminInterface
+# http://wiki.g2.bx.psu.edu/Admin/Interface
#admin_users = None
# Force everyone to log in (disable anonymous access).
@@ -465,7 +465,7 @@
# If running multiple Galaxy processes, one can be designated as the job
# runner. For more information, see:
-# http://bitbucket.org/galaxy/galaxy-central/wiki/Config/WebApplicationScaling
+# http://wiki.g2.bx.psu.edu/Admin/Config/Performance/Web%20Application%20Scal…
#enable_job_running = True
# Should jobs be tracked through the database, rather than in memory.
@@ -516,7 +516,7 @@
# Clustering Galaxy is not a straightforward process and requires some
# pre-configuration. See the the wiki before attempting to set any of these
# options:
-# http://bitbucket.org/galaxy/galaxy-central/wiki/Config/Cluster
+# http://wiki.g2.bx.psu.edu/Admin/Config/Performance/Cluster
# Comma-separated list of job runners to start. local is always started. If
# left commented, no jobs will be run on the cluster, even if a cluster URL is
https://bitbucket.org/galaxy/galaxy-central/changeset/855a749b4504/
changeset: 855a749b4504
user: afgane
date: 2011-07-25 16:50:04
summary: Another merge
affected #: 19 files
diff -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 -r 855a749b450408708bfe24848a28922059b23d0d contrib/collect_sge_job_timings.sh
--- /dev/null
+++ b/contrib/collect_sge_job_timings.sh
@@ -0,0 +1,126 @@
+#!/bin/sh
+
+##
+## CHANGE ME to galaxy's database name
+##
+DATABASE=galaxyprod
+
+##
+## AWK script to extract the relevant fields of SGE's qacct report
+## and write them all in one line.
+AWKSCRIPT='
+$1=="jobnumber" { job_number = $2 }
+$1=="qsub_time" { qsub_time = $2 }
+$1=="start_time" { start_time = $2 }
+$1=="end_time" { end_time = $2
+ print job_number, qsub_time, start_time, end_time
+}
+'
+
+FIFO=$(mktemp -u) || exit 1
+mkfifo "$FIFO" || exit 1
+
+##
+## Write the SGE/QACCT job report into a pipe
+## (later will be loaded into a temporary table)
+qacct -j |
+ egrep "jobnumber|qsub_time|start_time|end_time" |
+ sed 's/ */\t/' |
+ awk -v FS="\t" -v OFS="\t" "$AWKSCRIPT" |
+ grep -v -- "-/-" > "$FIFO" &
+
+##
+## The SQL to generate the report
+##
+SQL="
+--
+-- Temporary table which contains the qsub/start/end times, based on SGE's qacct report.
+--
+CREATE TEMPORARY TABLE sge_times (
+ sge_job_id INTEGER PRIMARY KEY,
+ qsub_time TIMESTAMP WITHOUT TIME ZONE,
+ start_time TIMESTAMP WITHOUT TIME ZONE,
+ end_time TIMESTAMP WITHOUT TIME ZONE
+);
+
+COPY sge_times FROM '$FIFO' ;
+
+--
+-- Temporary table which contains a unified view of all galaxy jobs.
+-- for each job:
+-- the user name, total input size (bytes), and input file types, DBKEY
+-- creation time, update time, SGE job runner parameters
+-- If a job had more than one input file, then some parameters might not be accurate (e.g. DBKEY)
+-- as one will be chosen arbitrarily
+CREATE TEMPORARY TABLE job_input_sizes AS
+SELECT
+ job.job_runner_external_id as job_runner_external_id,
+ min(job.id) as job_id,
+ min(job.create_time) as job_create_time,
+ min(job.update_time) as job_update_time,
+ min(galaxy_user.email) as email,
+ min(job.tool_id) as tool_name,
+-- This hack requires a user-custom aggregate function, comment it out for now
+-- textcat_all(hda.extension || ' ') as file_types,
+ sum(dataset.file_size) as total_input_size,
+ count(dataset.file_size) as input_dataset_count,
+ min(job.job_runner_name) as job_runner_name,
+-- This hack tries to extract the DBKEY attribute from the metadata JSON string
+ min(substring(encode(metadata,'escape') from '\"dbkey\": \\\\[\"(.*?)\"\\\\]')) as dbkey
+FROM
+ job,
+ galaxy_user,
+ job_to_input_dataset,
+ history_dataset_association hda,
+ dataset
+WHERE
+ job.user_id = galaxy_user.id
+ AND
+ job.id = job_to_input_dataset.job_id
+ AND
+ hda.id = job_to_input_dataset.dataset_id
+ AND
+ dataset.id = hda.dataset_id
+ AND
+ job.job_runner_external_id is not NULL
+GROUP BY
+ job.job_runner_external_id;
+
+
+--
+-- Join the two temporary tables, create a nice report
+--
+SELECT
+ job_input_sizes.job_runner_external_id as sge_job_id,
+ job_input_sizes.job_id as galaxy_job_id,
+ job_input_sizes.email,
+ job_input_sizes.tool_name,
+-- ## SEE previous query for commented-out filetypes field
+-- job_input_sizes.file_types,
+ job_input_sizes.job_runner_name as sge_params,
+ job_input_sizes.dbkey,
+ job_input_sizes.total_input_size,
+ job_input_sizes.input_dataset_count,
+ job_input_sizes.job_update_time - job_input_sizes.job_create_time as galaxy_total_time,
+ sge_times.end_time - sge_times.qsub_time as sge_total_time,
+ sge_times.start_time - sge_times.qsub_time as sge_waiting_time,
+ sge_times.end_time - sge_times.start_time as sge_running_time,
+ job_input_sizes.job_create_time as galaxy_job_create_time
+-- ## no need to show the exact times, the deltas (above) are informative enough
+-- job_input_sizes.job_update_time as galaxy_job_update_time,
+-- sge_times.qsub_time as sge_qsub_time,
+-- sge_times.start_time as sge_start_time,
+-- sge_times.end_time as sge_end_time
+FROM
+ job_input_sizes
+LEFT OUTER JOIN
+ SGE_TIMES
+ON (job_input_sizes.job_runner_external_id = sge_times.sge_job_id)
+ORDER BY
+ galaxy_job_create_time
+
+"
+
+echo "$SQL" | psql --pset "footer=off" -F" " -A --quiet "$DATABASE"
+
+
diff -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 -r 855a749b450408708bfe24848a28922059b23d0d datatypes_conf.xml.sample
--- a/datatypes_conf.xml.sample
+++ b/datatypes_conf.xml.sample
@@ -123,7 +123,7 @@
<datatype extension="vcf" type="galaxy.datatypes.tabular:Vcf" display_in_upload="true"><converter file="vcf_to_bgzip_converter.xml" target_datatype="bgzip"/><converter file="vcf_to_tabix_converter.xml" target_datatype="tabix" depends_on="bgzip"/>
- <converter file="vcf_to_summary_tree_converter.xml" target_datatype="summary_tree"/>
+ <converter file="vcf_to_summary_tree_converter.xml" target_datatype="summary_tree"/></datatype><datatype extension="wsf" type="galaxy.datatypes.wsf:SnpFile" display_in_upload="true"/><datatype extension="velvet" type="galaxy.datatypes.assembly:Velvet" display_in_upload="false"/>
@@ -274,10 +274,10 @@
</registration><sniffers><!--
- The order in which Galaxy attempts to determine data types is
- important because some formats are much more loosely defined
- than others. The following list should be the most rigidly
- defined format first, followed by next-most rigidly defined,
+ The order in which Galaxy attempts to determine data types is
+ important because some formats are much more loosely defined
+ than others. The following list should be the most rigidly
+ defined format first, followed by next-most rigidly defined,
and so on.
--><sniffer type="galaxy.datatypes.tabular:Vcf"/>
diff -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 -r 855a749b450408708bfe24848a28922059b23d0d lib/galaxy/tools/__init__.py
--- a/lib/galaxy/tools/__init__.py
+++ b/lib/galaxy/tools/__init__.py
@@ -374,15 +374,15 @@
self.input_translator = root.find( "request_param_translation" )
if self.input_translator:
self.input_translator = ToolInputTranslator.from_element( self.input_translator )
- # Command line (template). Optional for tools that do not invoke a
- # local program
+ # Command line (template). Optional for tools that do not invoke a local program
command = root.find("command")
if command is not None and command.text is not None:
self.command = command.text.lstrip() # get rid of leading whitespace
+ # Must pre-pend this AFTER processing the cheetah command template
+ self.interpreter = command.get( "interpreter", None )
else:
self.command = ''
- # Must pre-pend this AFTER processing the cheetah command template
- self.interpreter = command.get("interpreter", None)
+ self.interpreter = None
# Parameters used to build URL for redirection to external app
redirect_url_params = root.find( "redirect_url_params" )
if redirect_url_params is not None and redirect_url_params.text is not None:
@@ -2044,6 +2044,7 @@
def __str__( self ):
return self.value.name
def templates( self ):
+ """ Returns JSON dict of templates => data """
if not self.value:
return None
template_data = {}
@@ -2054,7 +2055,7 @@
for field in template.fields:
tmp_dict[field['label']] = content[field['name']]
template_data[template.name] = tmp_dict
- return template_data
+ return simplejson.dumps( template_data )
def __getattr__( self, key ):
return getattr( self.value, key )
diff -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 -r 855a749b450408708bfe24848a28922059b23d0d lib/galaxy/web/base/controller.py
--- a/lib/galaxy/web/base/controller.py
+++ b/lib/galaxy/web/base/controller.py
@@ -21,7 +21,7 @@
# RE that tests for valid slug.
VALID_SLUG_RE = re.compile( "^[a-z0-9\-]+$" )
-
+
class BaseController( object ):
"""
Base class for Galaxy web application controllers.
@@ -51,7 +51,7 @@
else:
item_class = None
return item_class
-
+
Root = BaseController
class SharableItemSecurity:
@@ -72,7 +72,7 @@
#
# TODO: need to move UsesHistory, etc. mixins to better location - perhaps lib/galaxy/model/XXX ?
-#
+#
class UsesHistoryDatasetAssociation:
""" Mixin for controllers that use HistoryDatasetAssociation objects. """
@@ -119,15 +119,15 @@
dataset_data = open( dataset.file_name ).read(max_peek_size)
truncated = False
return truncated, dataset_data
-
+
class UsesVisualization( SharableItemSecurity ):
""" Mixin for controllers that use Visualization objects. """
len_files = None
-
+
def _get_dbkeys( self, trans ):
""" Returns all valid dbkeys that a user can use in a visualization. """
-
+
# Read len files.
if not self.len_files:
len_files = glob.glob( os.path.join(trans.app.config.len_file_path, "*.len") )
@@ -137,10 +137,10 @@
user = trans.get_user()
if 'dbkeys' in user.preferences:
user_keys = from_json_string( user.preferences['dbkeys'] )
-
+
dbkeys = [ (v, k) for k, v in trans.db_builds if k in self.len_files or k in user_keys ]
return dbkeys
-
+
def get_visualization( self, trans, id, check_ownership=True, check_accessible=False ):
""" Get a Visualization from the database by id, verifying ownership. """
# Load workflow from database
@@ -152,7 +152,7 @@
error( "Visualization not found" )
else:
return self.security_check( trans.get_user(), visualization, check_ownership, check_accessible )
-
+
def get_visualization_config( self, trans, visualization ):
""" Returns a visualization's configuration. Only works for trackster visualizations right now. """
@@ -172,16 +172,16 @@
dataset = self.get_dataset( trans, dataset_id, check_ownership=False, check_accessible=True )
else:
dataset = trans.sa_session.query( trans.app.model.LibraryDatasetDatasetAssociation ).get( trans.security.decode_id(dataset_id) )
-
+
try:
prefs = t['prefs']
except KeyError:
prefs = {}
-
+
track_type, _ = dataset.datatype.get_track_type()
track_data_provider_class = get_data_provider( original_dataset=dataset )
track_data_provider = track_data_provider_class( original_dataset=dataset )
-
+
tracks.append( {
"track_type": track_type,
"name": t['name'],
@@ -192,15 +192,15 @@
"tool": get_tool_def( trans, dataset ),
"is_child": t.get('is_child', False)
} )
-
- config = { "title": visualization.title, "vis_id": trans.security.encode_id( visualization.id ),
+
+ config = { "title": visualization.title, "vis_id": trans.security.encode_id( visualization.id ),
"tracks": tracks, "bookmarks": bookmarks, "chrom": "", "dbkey": visualization.dbkey }
if 'viewport' in latest_revision.config:
config['viewport'] = latest_revision.config['viewport']
-
+
return config
-
+
class UsesStoredWorkflow( SharableItemSecurity ):
""" Mixin for controllers that use StoredWorkflow objects. """
def get_stored_workflow( self, trans, id, check_ownership=True, check_accessible=False ):
@@ -225,7 +225,7 @@
step.upgrade_messages = module.check_and_update_state()
# Any connected input needs to have value DummyDataset (these
# are not persisted so we need to do it every time)
- module.add_dummy_datasets( connections=step.input_connections )
+ module.add_dummy_datasets( connections=step.input_connections )
# Store state with the step
step.module = module
step.state = module.state
@@ -270,7 +270,7 @@
"""Mixin for controllers that use Galaxy form objects."""
def get_all_forms( self, trans, all_versions=False, filter=None, form_type='All' ):
"""
- Return all the latest forms from the form_definition_current table
+ Return all the latest forms from the form_definition_current table
if all_versions is set to True. Otherwise return all the versions
of all the forms from the form_definition table.
"""
@@ -684,7 +684,7 @@
trans.sa_session.flush()
info_association = sra.run
else:
- info_association = assoc.run
+ info_association = assoc.run
else:
info_association = None
if info_association:
@@ -912,7 +912,7 @@
else:
field_value = int( input_text_value )
elif field_type == CheckboxField.__name__:
- field_value = CheckboxField.is_checked( input_value )
+ field_value = CheckboxField.is_checked( input_value )
elif field_type == PasswordField.__name__:
field_value = kwd.get( field_name, '' )
else:
@@ -1043,7 +1043,7 @@
@web.require_login( "modify Galaxy items" )
def set_slug_async( self, trans, id, new_slug ):
""" Set item slug asynchronously. """
- pass
+ pass
@web.expose
@web.require_login( "share Galaxy items" )
def sharing( self, trans, id, **kwargs ):
@@ -1099,7 +1099,7 @@
item.slug = slug
return True
return False
-
+
"""
Deprecated: `BaseController` used to be available under the name `Root`
"""
@@ -1111,7 +1111,7 @@
user_list_grid = None
role_list_grid = None
group_list_grid = None
-
+
@web.expose
@web.require_admin
def index( self, trans, **kwd ):
@@ -1158,7 +1158,7 @@
toolbox=self.app.toolbox,
message=message,
status='done' )
-
+
# Galaxy Role Stuff
@web.expose
@web.require_admin
@@ -1342,7 +1342,7 @@
action='roles',
webapp=webapp,
message=util.sanitize_text( message ),
- status=status ) )
+ status=status ) )
in_users = []
out_users = []
in_groups = []
@@ -1934,7 +1934,7 @@
def purge_user( self, trans, **kwd ):
# This method should only be called for a User that has previously been deleted.
# We keep the User in the database ( marked as purged ), and stuff associated
- # with the user's private role in case we want the ability to unpurge the user
+ # with the user's private role in case we want the ability to unpurge the user
# some time in the future.
# Purging a deleted User deletes all of the following:
# - History where user_id = User.id
@@ -2158,7 +2158,7 @@
@web.expose
@web.require_admin
- def jobs( self, trans, stop = [], stop_msg = None, cutoff = 180, job_lock = None, **kwd ):
+ def jobs( self, trans, stop = [], stop_msg = None, cutoff = 180, job_lock = None, ajl_submit = None, **kwd ):
deleted = []
msg = None
status = None
@@ -2181,10 +2181,11 @@
msg += ' for deletion: '
msg += ', '.join( deleted )
status = 'done'
- if job_lock == 'lock':
- trans.app.job_manager.job_queue.job_lock = True
- elif job_lock == 'unlock':
- trans.app.job_manager.job_queue.job_lock = False
+ if ajl_submit:
+ if job_lock == 'on':
+ trans.app.job_manager.job_queue.job_lock = True
+ else:
+ trans.app.job_manager.job_queue.job_lock = False
cutoff_time = datetime.utcnow() - timedelta( seconds=int( cutoff ) )
jobs = trans.sa_session.query( trans.app.model.Job ) \
.filter( and_( trans.app.model.Job.table.c.update_time < cutoff_time,
@@ -2209,7 +2210,7 @@
job_lock = trans.app.job_manager.job_queue.job_lock )
## ---- Utility methods -------------------------------------------------------
-
+
def get_user( trans, id ):
"""Get a User from the database by id."""
# Load user from database
diff -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 -r 855a749b450408708bfe24848a28922059b23d0d lib/galaxy/web/controllers/tracks.py
--- a/lib/galaxy/web/controllers/tracks.py
+++ b/lib/galaxy/web/controllers/tracks.py
@@ -90,10 +90,14 @@
class DbKeyColumn( grids.GridColumn ):
""" Column for filtering by and displaying dataset dbkey. """
def filter( self, trans, user, query, dbkey ):
- """ Filter by dbkey. """
+ """ Filter by dbkey; datasets without a dbkey are returned as well. """
# use raw SQL b/c metadata is a BLOB
dbkey = dbkey.replace("'", "\\'")
- return query.filter( or_( "metadata like '%%\"dbkey\": [\"%s\"]%%'" % dbkey, "metadata like '%%\"dbkey\": \"%s\"%%'" % dbkey ) )
+ return query.filter( or_( \
+ or_( "metadata like '%%\"dbkey\": [\"%s\"]%%'" % dbkey, "metadata like '%%\"dbkey\": \"%s\"%%'" % dbkey ), \
+ or_( "metadata like '%%\"dbkey\": [\"?\"]%%'", "metadata like '%%\"dbkey\": \"?\"%%'" ) \
+ )
+ )
class HistoryColumn( grids.GridColumn ):
""" Column for filtering by history id. """
diff -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 -r 855a749b450408708bfe24848a28922059b23d0d lib/galaxy/webapps/community/controllers/repository.py
--- a/lib/galaxy/webapps/community/controllers/repository.py
+++ b/lib/galaxy/webapps/community/controllers/repository.py
@@ -450,8 +450,30 @@
# Get the current repository tip.
tip = repository.tip
for selected_file in selected_files_to_delete:
- repo_file = os.path.abspath( selected_file )
- commands.remove( repo.ui, repo, repo_file, force=True )
+ try:
+ commands.remove( repo.ui, repo, repo_file, force=True )
+ except Exception, e:
+ # I never have a problem with commands.remove on a Mac, but in the test/production
+ # tool shed environment, it throws an exception whenever I delete all files from a
+ # repository. If this happens, we'll try the following.
+ relative_selected_file = selected_file.split( 'repo_%d' % repository.id )[1].lstrip( '/' )
+ repo.dirstate.remove( relative_selected_file )
+ repo.dirstate.write()
+ absolute_selected_file = os.path.abspath( selected_file )
+ if os.path.isdir( absolute_selected_file ):
+ try:
+ os.rmdir( absolute_selected_file )
+ except OSError, e:
+ # The directory is not empty
+ pass
+ elif os.path.isfile( absolute_selected_file ):
+ os.remove( absolute_selected_file )
+ dir = os.path.split( absolute_selected_file )[0]
+ try:
+ os.rmdir( dir )
+ except OSError, e:
+ # The directory is not empty
+ pass
# Commit the change set.
if not commit_message:
commit_message = 'Deleted selected files'
@@ -913,14 +935,22 @@
message = util.restore_text( params.get( 'message', '' ) )
status = params.get( 'status', 'done' )
repository = get_repository( trans, repository_id )
- tool = load_tool( trans, os.path.abspath( tool_config ) )
- tool_state = self.__new_state( trans )
- return trans.fill_template( "/webapps/community/repository/tool_form.mako",
- repository=repository,
- tool=tool,
- tool_state=tool_state,
- message=message,
- status=status )
+ try:
+ tool = load_tool( trans, os.path.abspath( tool_config ) )
+ tool_state = self.__new_state( trans )
+ return trans.fill_template( "/webapps/community/repository/tool_form.mako",
+ repository=repository,
+ tool=tool,
+ tool_state=tool_state,
+ message=message,
+ status=status )
+ except Exception, e:
+ message = 'Error loading tool: %s. Click <b>Reset metadata</b> to correct this error.' % str( e )
+ return trans.response.send_redirect( web.url_for( controller='repository',
+ action='manage_repository',
+ id=repository_id,
+ message=message,
+ status='error' ) )
def __new_state( self, trans, all_pages=False ):
"""
Create a new `DefaultToolState` for this tool. It will not be initialized
@@ -933,6 +963,27 @@
state.inputs = {}
return state
@web.expose
+ def view_tool_metadata( self, trans, repository_id, changeset_revision, tool_id, **kwd ):
+ params = util.Params( kwd )
+ message = util.restore_text( params.get( 'message', '' ) )
+ status = params.get( 'status', 'done' )
+ repository = get_repository( trans, repository_id )
+ metadata = {}
+ tool = None
+ repository_metadata = get_repository_metadata( trans, repository_id, changeset_revision ).metadata
+ if 'tools' in repository_metadata:
+ for tool_metadata_dict in repository_metadata[ 'tools' ]:
+ if tool_metadata_dict[ 'id' ] == tool_id:
+ metadata = tool_metadata_dict
+ tool = load_tool( trans, os.path.abspath( metadata[ 'tool_config' ] ) )
+ break
+ return trans.fill_template( "/webapps/community/repository/view_tool_metadata.mako",
+ repository=repository,
+ tool=tool,
+ metadata=metadata,
+ message=message,
+ status=status )
+ @web.expose
def download( self, trans, repository_id, file_type, **kwd ):
# Download an archive of the repository files compressed as zip, gz or bz2.
params = util.Params( kwd )
diff -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 -r 855a749b450408708bfe24848a28922059b23d0d lib/galaxy/webapps/community/controllers/upload.py
--- a/lib/galaxy/webapps/community/controllers/upload.py
+++ b/lib/galaxy/webapps/community/controllers/upload.py
@@ -182,7 +182,30 @@
for repo_file in files_to_remove:
# Remove files in the repository (relative to the upload point)
# that are not in the uploaded archive.
- commands.remove( repo.ui, repo, repo_file, force=True )
+ try:
+ commands.remove( repo.ui, repo, repo_file, force=True )
+ except Exception, e:
+ # I never have a problem with commands.remove on a Mac, but in the test/production
+ # tool shed environment, it throws an exception whenever I delete all files from a
+ # repository. If this happens, we'll try the following.
+ relative_selected_file = selected_file.split( 'repo_%d' % repository.id )[1].lstrip( '/' )
+ repo.dirstate.remove( relative_selected_file )
+ repo.dirstate.write()
+ absolute_selected_file = os.path.abspath( selected_file )
+ if os.path.isdir( absolute_selected_file ):
+ try:
+ os.rmdir( absolute_selected_file )
+ except OSError, e:
+ # The directory is not empty
+ pass
+ elif os.path.isfile( absolute_selected_file ):
+ os.remove( absolute_selected_file )
+ dir = os.path.split( absolute_selected_file )[0]
+ try:
+ os.rmdir( dir )
+ except OSError, e:
+ # The directory is not empty
+ pass
for filename_in_archive in filenames_in_archive:
commands.add( repo.ui, repo, filename_in_archive )
if filename_in_archive.endswith( '.loc.sample' ):
diff -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 -r 855a749b450408708bfe24848a28922059b23d0d static/scripts/trackster.js
--- a/static/scripts/trackster.js
+++ b/static/scripts/trackster.js
@@ -807,6 +807,9 @@
}
view.redraw();
},
+ /**
+ * Add a track to the view.
+ */
add_track: function(track) {
track.view = this;
track.track_id = this.track_id_counter;
@@ -822,6 +825,9 @@
label_track.view = this;
this.label_tracks.push(label_track);
},
+ /**
+ * Remove a track from the view.
+ */
remove_track: function(track) {
this.has_changes = true;
delete this.tracks[this.tracks.indexOf(track)];
@@ -1588,23 +1594,24 @@
/**
* Tiles drawn by tracks.
*/
-var Tile = function(index, resolution, canvas) {
+var Tile = function(index, resolution, canvas, data) {
this.index = index;
this.low = index * DENSITY * resolution;
this.high = (index + 1) * DENSITY * resolution;
this.resolution = resolution;
// Wrap element in div for background.
this.canvas = $("<div class='track-tile'/>").append(canvas);
+ this.data = data;
this.stale = false;
};
-var SummaryTreeTile = function(index, resolution, canvas, max_val) {
- Tile.call(this, index, resolution, canvas);
+var SummaryTreeTile = function(index, resolution, canvas, data, max_val) {
+ Tile.call(this, index, resolution, canvas, data);
this.max_val = max_val;
};
-var FeatureTrackTile = function(index, resolution, canvas, message) {
- Tile.call(this, index, resolution, canvas);
+var FeatureTrackTile = function(index, resolution, canvas, data, message) {
+ Tile.call(this, index, resolution, canvas, data);
this.message = message;
};
@@ -2096,13 +2103,17 @@
filters[f].update_ui_elt();
}
- // Determine if filters are available; this is based on the example feature.
- var filters_available = false;
- if (track.example_feature) {
- for (var f = 0; f < filters.length; f++) {
- if (filters[f].applies_to(track.example_feature)) {
- filters_available = true;
- break;
+ // Determine if filters are available; this is based on the tiles' data.
+ var filters_available = false,
+ example_feature;
+ for (var i = 0; i < drawn_tiles.length; i++) {
+ if (drawn_tiles[i].data.length) {
+ example_feature = drawn_tiles[i].data[0];
+ for (var f = 0; f < filters.length; f++) {
+ if (filters[f].applies_to(example_feature)) {
+ filters_available = true;
+ break;
+ }
}
}
}
@@ -2385,7 +2396,7 @@
var c_start = Math.round(c * w_scale);
ctx.fillText(seq[c], c_start + track.left_offset, 10);
}
- return new Tile(tile_index, resolution, canvas);
+ return new Tile(tile_index, resolution, canvas, seq);
}
this.content_div.css("height", "0px");
}
@@ -2523,7 +2534,7 @@
var painter = new painters.LinePainter(result.data, tile_low, tile_low + tile_length, this.prefs, this.mode);
painter.draw(ctx, width, height);
- return new Tile(tile_index, resolution, canvas);
+ return new Tile(tile_index, resolution, canvas, result.data);
}
});
@@ -2737,7 +2748,7 @@
// TODO: this shouldn't be done at the tile level
this.container_div.find(".yaxislabel").remove();
var max_label = $("<div />").addClass('yaxislabel');
- max_label.text( result.max );
+ max_label.text(result.max);
max_label.css({ position: "absolute", top: "24px", left: "10px", color: this.prefs.label_color });
max_label.prependTo(this.container_div);
// Create canvas
@@ -2760,7 +2771,7 @@
// Deal with left_offset by translating
ctx.translate(left_offset, SUMMARY_TREE_TOP_PADDING);
painter.draw(ctx, width, required_height);
- return new SummaryTreeTile(tile_index, resolution, canvas, result.max);
+ return new SummaryTreeTile(tile_index, resolution, canvas, result.data, result.max);
}
// Start dealing with row-by-row tracks
@@ -2811,16 +2822,12 @@
this.container_div.find(".yaxislabel").remove();
if (result.data) {
- // Set example feature. This is needed so that track can update its UI based on feature attributes.
- // TODO: use tile data rather than example feature?
- this.example_feature = (result.data.length ? result.data[0] : undefined);
-
// Draw features.
ctx.translate(left_offset, 0);
painter.draw(ctx, width, required_height, slots);
}
- return new FeatureTrackTile(tile_index, resolution, canvas, result.message);
+ return new FeatureTrackTile(tile_index, resolution, canvas, result.data, result.message);
}
});
diff -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 -r 855a749b450408708bfe24848a28922059b23d0d templates/admin/jobs.mako
--- a/templates/admin/jobs.mako
+++ b/templates/admin/jobs.mako
@@ -21,11 +21,11 @@
report this error".
</p>
-<form name="jobs" action="${h.url_for()}" method="POST"><p/>
%if jobs:
+<form name="jobs" action="${h.url_for()}" method="POST"><table class="manage-table colored" border="0" cellspacing="0" cellpadding="0" width="100%"><tr class="header"><td> </td>
@@ -84,10 +84,12 @@
</div></div><p/>
+</form>
%else:
<div class="infomessage">There are no unfinished jobs to show with current cutoff time.</div><p/>
%endif
+<form name="jobs" action="${h.url_for()}" method="POST"><div class="toolForm"><div class="toolFormTitle">
Update Jobs
@@ -110,30 +112,33 @@
</div></div></div>
- <p/>
+</form>
+<form name="jobs" action="${h.url_for()}" method="POST">
+ <p/><div class="toolForm"><div class="toolFormTitle">
Administrative Job Lock
</div><div class="toolFormBody">
- %if job_lock==True:
<div class="form-row">
- <p>All job execution is currently locked. Click here to unlock.</p>
- <input type='hidden' name='job_lock' value='unlock'/>
+ <input type="hidden" name="ajl_submit" value="True"/>
+ %if job_lock==True:
+ <p>Job dispatching is currently <strong>locked</strong>.</p>
+ <label>
+ <input type='checkbox' name='job_lock' checked='checked' />
+ Prevent jobs from dispatching.
+ </label>
+ %else:
+ <p>Job dispatching is currently <strong>unlocked</strong>.</p>
+ <label>
+ <input type='checkbox' name='job_lock' />
+ Prevent jobs from dispatching.
+ </label>
+ %endif
</div><div class="form-row">
- <input type="submit" class="primary-button" name="submit" value="Unlock">
+ <input type="submit" class="primary-button" name="submit" value="Update"></div>
- %else:
- <div class="form-row">
- <p>To prevent new jobs from dispatching, you can lock the job queue here.</p>
- <input type='hidden' name='job_lock' value='lock'/>
- </div>
- <div class="form-row">
- <input type="submit" class="primary-button" name="submit" value="Lock">
- </div>
- %endif
</div></div>
-
</form>
diff -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 -r 855a749b450408708bfe24848a28922059b23d0d templates/tool_form.mako
--- a/templates/tool_form.mako
+++ b/templates/tool_form.mako
@@ -3,6 +3,13 @@
<%def name="stylesheets()">
${h.css( "autocomplete_tagging", "panel_layout", "base", "library" )}
+ <style type="text/css">
+ body, html {
+ overflow: auto;
+ width: auto;
+ height: auto;
+ }
+ </style></%def><%def name="javascripts()">
diff -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 -r 855a749b450408708bfe24848a28922059b23d0d templates/tracks/browser.mako
--- a/templates/tracks/browser.mako
+++ b/templates/tracks/browser.mako
@@ -273,7 +273,7 @@
}
// Add track.
- tracks.push( {
+ tracks.push({
"track_type": track.get_type(),
"name": track.name,
"hda_ldda": track.hda_ldda,
diff -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 -r 855a749b450408708bfe24848a28922059b23d0d templates/visualization/display.mako
--- a/templates/visualization/display.mako
+++ b/templates/visualization/display.mako
@@ -122,6 +122,11 @@
// Keyboard navigation. Scroll ~7% of height when scrolling up/down.
//
$(document).keydown(function(e) {
+ // Do not navigate if arrow keys used in input element.
+ if ($(e.srcElement).is(':input')) {
+ return;
+ }
+
// Key codes: left == 37, up == 38, right == 39, down == 40
switch(e.which) {
case 37:
diff -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 -r 855a749b450408708bfe24848a28922059b23d0d templates/webapps/community/repository/manage_repository.mako
--- a/templates/webapps/community/repository/manage_repository.mako
+++ b/templates/webapps/community/repository/manage_repository.mako
@@ -120,7 +120,7 @@
<div style="clear: both"></div></div><div class="form-row">
- <label>Version:</label>
+ <label>Revision:</label>
%if can_view_change_log:
<a href="${h.url_for( controller='repository', action='view_changelog', id=trans.app.security.encode_id( repository.id ) )}">${repository.revision}</a>
%else:
@@ -151,34 +151,6 @@
</form></div></div>
-<p/>
-<div class="toolForm">
- <div class="toolFormTitle">Manage categories</div>
- <div class="toolFormBody">
- <form name="categories" id="categories" action="${h.url_for( controller='repository', action='manage_repository', id=trans.security.encode_id( repository.id ) )}" method="post" >
- <div class="form-row">
- <label>Categories</label>
- <select name="category_id" multiple>
- %for category in categories:
- %if category.id in selected_categories:
- <option value="${trans.security.encode_id( category.id )}" selected>${category.name}</option>
- %else:
- <option value="${trans.security.encode_id( category.id )}">${category.name}</option>
- %endif
- %endfor
- </select>
- <div class="toolParamHelp" style="clear: both;">
- Multi-select list - hold the appropriate key while clicking to select multiple categories.
- </div>
- <div style="clear: both"></div>
- </div>
- <div class="form-row">
- <input type="submit" name="manage_categories_button" value="Save"/>
- </div>
- </form>
- </div>
-</div>
-<p/>
%if can_set_metadata:
<p/><div class="toolForm">
@@ -204,7 +176,16 @@
</tr>
%for tool_dict in tool_dicts:
<tr>
- <td><a href="${h.url_for( controller='repository', action='display_tool', repository_id=trans.security.encode_id( repository.id ), tool_config=tool_dict[ 'tool_config' ] )}">${tool_dict[ 'name' ]}</a></td>
+ <td>
+ <div style="float: left; margin-left: 1px;" class="menubutton split popup" id="tool-${tool_dict[ 'id' ]}-popup">
+ <a class="view-info" href="${h.url_for( controller='repository', action='display_tool', repository_id=trans.security.encode_id( repository.id ), tool_config=tool_dict[ 'tool_config' ] )}">
+ ${tool_dict[ 'name' ]}
+ </a>
+ </div>
+ <div popupmenu="tool-${tool_dict[ 'id' ]}-popup">
+ <a class="action-button" href="${h.url_for( controller='repository', action='view_tool_metadata', repository_id=trans.security.encode_id( repository.id ), changeset_revision=repository.tip, tool_id=tool_dict[ 'id' ] )}">View all metadata for this tool</a>
+ </div>
+ </td><td>${tool_dict[ 'description' ]}</td><td>${tool_dict[ 'version' ]}</td><td>
@@ -274,6 +255,33 @@
</div></div>
%endif
+<p/>
+<div class="toolForm">
+ <div class="toolFormTitle">Manage categories</div>
+ <div class="toolFormBody">
+ <form name="categories" id="categories" action="${h.url_for( controller='repository', action='manage_repository', id=trans.security.encode_id( repository.id ) )}" method="post" >
+ <div class="form-row">
+ <label>Categories</label>
+ <select name="category_id" multiple>
+ %for category in categories:
+ %if category.id in selected_categories:
+ <option value="${trans.security.encode_id( category.id )}" selected>${category.name}</option>
+ %else:
+ <option value="${trans.security.encode_id( category.id )}">${category.name}</option>
+ %endif
+ %endfor
+ </select>
+ <div class="toolParamHelp" style="clear: both;">
+ Multi-select list - hold the appropriate key while clicking to select multiple categories.
+ </div>
+ <div style="clear: both"></div>
+ </div>
+ <div class="form-row">
+ <input type="submit" name="manage_categories_button" value="Save"/>
+ </div>
+ </form>
+ </div>
+</div>
%if trans.app.config.smtp_server:
<p/><div class="toolForm">
@@ -330,8 +338,8 @@
</form></div></div>
-<p/>
%if repository.ratings:
+ <p/><div class="toolForm"><div class="toolFormTitle">Rating</div><div class="toolFormBody">
diff -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 -r 855a749b450408708bfe24848a28922059b23d0d templates/webapps/community/repository/view_repository.mako
--- a/templates/webapps/community/repository/view_repository.mako
+++ b/templates/webapps/community/repository/view_repository.mako
@@ -118,7 +118,7 @@
</div>
%endif
<div class="form-row">
- <label>Version:</label>
+ <label>Revision:</label>
%if can_view_change_log:
<a href="${h.url_for( controller='repository', action='view_changelog', id=trans.app.security.encode_id( repository.id ) )}">${repository.revision}</a>
%else:
@@ -145,20 +145,6 @@
%endif
</div></div>
-%if repository.categories:
- <p/>
- <div class="toolForm">
- <div class="toolFormTitle">Categories</div>
- <div class="toolFormBody">
- %for rca in repository.categories:
- <div class="form-row">
- ${rca.category.name}
- </div>
- %endfor
- <div style="clear: both"></div>
- </div>
- </div>
-%endif
%if metadata:
<p/><div class="toolForm">
@@ -183,9 +169,18 @@
</tr>
%for tool_dict in tool_dicts:
<tr>
- <td><a href="${h.url_for( controller='repository', action='display_tool', repository_id=trans.security.encode_id( repository.id ), tool_config=tool_dict[ 'tool_config' ] )}">${tool_dict[ 'name' ]}</a></td>
+ <td>
+ <div style="float: left; margin-left: 1px;" class="menubutton split popup" id="tool-${repository.id}-popup">
+ <a class="view-info" href="${h.url_for( controller='repository', action='display_tool', repository_id=trans.security.encode_id( repository.id ), tool_config=tool_dict[ 'tool_config' ] )}">
+ ${tool_dict[ 'name' ]}
+ </a>
+ </div>
+ <div popupmenu="tool-${repository.id}-popup">
+ <a class="action-button" href="${h.url_for( controller='repository', action='view_tool_metadata', repository_id=trans.security.encode_id( repository.id ), changeset_revision=repository.tip, tool_id=tool_dict[ 'id' ] )}">View all metadata for this tool</a>
+ </div>
+ </td><td>${tool_dict[ 'description' ]}</td>
- <td>version: ${tool_dict[ 'version' ]}</td>
+ <td>${tool_dict[ 'version' ]}</td><td><%
if 'requirements' in tool_dict:
@@ -242,6 +237,20 @@
</div></div>
%endif
+%if repository.categories:
+ <p/>
+ <div class="toolForm">
+ <div class="toolFormTitle">Categories</div>
+ <div class="toolFormBody">
+ %for rca in repository.categories:
+ <div class="form-row">
+ ${rca.category.name}
+ </div>
+ %endfor
+ <div style="clear: both"></div>
+ </div>
+ </div>
+%endif
%if trans.user and trans.app.config.smtp_server:
<p/><div class="toolForm">
diff -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 -r 855a749b450408708bfe24848a28922059b23d0d templates/webapps/community/repository/view_tool_metadata.mako
--- /dev/null
+++ b/templates/webapps/community/repository/view_tool_metadata.mako
@@ -0,0 +1,202 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+<%namespace file="/webapps/community/common/common.mako" import="*" />
+<%namespace file="/webapps/community/repository/common.mako" import="*" />
+
+<%
+ from galaxy.web.framework.helpers import time_ago
+ from urllib import quote_plus
+ is_admin = trans.user_is_admin()
+ is_new = repository.is_new
+ can_push = trans.app.security_agent.can_push( trans.user, repository )
+ can_upload = can_push
+ can_browse_contents = not is_new
+ can_rate = repository.user != trans.user
+ can_manage = is_admin or repository.user == trans.user
+ can_view_change_log = not is_new
+ if can_push:
+ browse_label = 'Browse or delete repository files'
+ else:
+ browse_label = 'Browse repository files'
+%>
+
+<%!
+ def inherit(context):
+ if context.get('use_panels'):
+ return '/webapps/community/base_panels.mako'
+ else:
+ return '/base.mako'
+%>
+<%inherit file="${inherit(context)}"/>
+
+<br/><br/>
+<ul class="manage-table-actions">
+ %if is_new:
+ <a class="action-button" href="${h.url_for( controller='upload', action='upload', repository_id=trans.security.encode_id( repository.id ), webapp='community' )}">Upload files to repository</a>
+ %else:
+ <li><a class="action-button" id="repository-${repository.id}-popup" class="menubutton">Repository Actions</a></li>
+ <div popupmenu="repository-${repository.id}-popup">
+ %if can_manage:
+ <a class="action-button" href="${h.url_for( controller='repository', action='manage_repository', id=trans.app.security.encode_id( repository.id ) )}">Manage repository</a>
+ %else:
+ <a class="action-button" href="${h.url_for( controller='repository', action='view_repository', id=trans.app.security.encode_id( repository.id ) )}">View repository</a>
+ %endif
+ %if can_upload:
+ <a class="action-button" href="${h.url_for( controller='upload', action='upload', repository_id=trans.security.encode_id( repository.id ), webapp='community' )}">Upload files to repository</a>
+ %endif
+ %if can_view_change_log:
+ <a class="action-button" href="${h.url_for( controller='repository', action='view_changelog', id=trans.app.security.encode_id( repository.id ) )}">View change log</a>
+ %endif
+ %if can_browse_contents:
+ <a class="action-button" href="${h.url_for( controller='repository', action='browse_repository', id=trans.app.security.encode_id( repository.id ) )}">${browse_label}</a>
+ %endif
+ <a class="action-button" href="${h.url_for( controller='repository', action='download', repository_id=trans.app.security.encode_id( repository.id ), file_type='gz' )}">Download as a .tar.gz file</a>
+ <a class="action-button" href="${h.url_for( controller='repository', action='download', repository_id=trans.app.security.encode_id( repository.id ), file_type='bz2' )}">Download as a .tar.bz2 file</a>
+ <a class="action-button" href="${h.url_for( controller='repository', action='download', repository_id=trans.app.security.encode_id( repository.id ), file_type='zip' )}">Download as a zip file</a>
+ </div>
+ %endif
+</ul>
+
+%if message:
+ ${render_msg( message, status )}
+%endif
+
+<div class="toolForm">
+ <div class="toolFormTitle">${repository.name}</div>
+ <div class="toolFormBody">
+ <div class="form-row">
+ <label>Clone this repository:</label>
+ ${render_clone_str( repository )}
+ </div>
+ </div>
+</div>
+%if metadata:
+## "{"tools":
+## [{"description": "data on any column using simple expressions",
+## "id": "Filter1",
+## "name": "Filter",
+## "requirements": [],
+## "tests": [{
+## "inputs": [["input", "1.bed", {"children": [], "value": "1.bed"}], ["cond", "c1=='chr22'", {"children": [], "value": "c1=='chr22'"}]], "name": "Test-1",
+## "outputs": [["out_file1", "filter1_test1.bed", {"compare": "diff", "delta": 10000, "extra_files": [], "lines_diff": 0, "sort": false}]],
+## "required_files": [["1.bed", {"children": [], "value": "1.bed"}]]}, {"inputs": [["input", "7.bed", {"children": [], "value": "7.bed"}], ["cond", "c1=='chr1' and c3-c2>=2000 and c6=='+'", {"children": [], "value": "c1=='chr1' and c3-c2>=2000 and c6=='+'"}]], "name": "Test-2", "outputs": [["out_file1", "filter1_test2.bed", {"compare": "diff", "delta": 10000, "extra_files": [], "lines_diff": 0, "sort": false}]], "required_files": [["7.bed", {"children": [], "value": "7.bed"}]]}], "tool_config": "database/community_files/000/repo_1/filtering.xml", "version": "1.0.1", "version_string_cmd": null}], "workflows": [{"a_galaxy_workflow": "true", "annotation": "", "format-version": "0.1", "name": "Workflow constructed from history 'Unnamed history'", "steps": {"0": {"annotation": "", "id": 0, "input_connections": {}, "inputs": [{"description": "", "name": "Input Dataset"}], "name": "Input dataset", "outputs": [], "position": {"left": 10, "top": 10}, "tool_errors": null, "tool_id": null, "tool_state": "{\\"name\\": \\"Input Dataset\\"}", "tool_version": null, "type": "data_input", "user_outputs": []}, "1": {"annotation": "", "id": 1, "input_connections": {"input": {"id": 0, "output_name": "output"}}, "inputs": [], "name": "Filter", "outputs": [{"name": "out_file1", "type": "input"}], "position": {"left": 230, "top": 10}, "post_job_actions": {}, "tool_errors": null, "tool_id": "Filter1", "tool_state": "{\\"__page__\\": 0, \\"cond\\": \\"\\\\\\"c1=='chr1'\\\\\\"\\", \\"chromInfo\\": \\"\\\\\\"/Users/gvk/workspaces_2008/central_051111/tool-data/shared/ucsc/chrom/?.len\\\\\\"\\", \\"input\\": \\"null\\"}", "tool_version": null, "type": "tool", "user_outputs": []}, "2": {"annotation": "", "id": 2, "input_connections": {"input1": {"id": 0, "output_name": "output"}, "input2": {"id": 1, "output_name": "out_file1"}}, "inputs": [], "name": "Subtract Whole Dataset", "outputs": [{"name": "output", "type": "input"}], "position": {"left": 450, "top": 10}, "post_job_actions": {}, "tool_errors": null, "tool_id": "subtract_query1", "tool_state": "{\\"input2\\": \\"null\\", \\"__page__\\": 0, \\"end_col\\": \\"{\\\\\\"__class__\\\\\\": \\\\\\"UnvalidatedValue\\\\\\", \\\\\\"value\\\\\\": \\\\\\"None\\\\\\"}\\", \\"begin_col\\": \\"{\\\\\\"__class__\\\\\\": \\\\\\"UnvalidatedValue\\\\\\", \\\\\\"value\\\\\\": \\\\\\"None\\\\\\"}\\", \\"input1\\": \\"null\\", \\"chromInfo\\": \\"\\\\\\"/Users/gvk/workspaces_2008/central_051111/tool-data/shared/ucsc/chrom/?.len\\\\\\"\\"}", "tool_version": null, "type": "tool", "user_outputs": []}}}]}"
+ <p/>
+ <div class="toolForm">
+ <div class="toolFormTitle">${metadata[ 'name' ]} tool metadata</div>
+ <div class="toolFormBody">
+ <div class="form-row">
+ <label>Name:</label>
+ <a href="${h.url_for( controller='repository', action='display_tool', repository_id=trans.security.encode_id( repository.id ), tool_config=metadata[ 'tool_config' ] )}">${metadata[ 'name' ]}</a>
+ <div style="clear: both"></div>
+ </div>
+ <div class="form-row">
+ <label>Description:</label>
+ ${metadata[ 'description' ]}
+ <div style="clear: both"></div>
+ </div>
+ <div class="form-row">
+ <label>Id:</label>
+ ${metadata[ 'id' ]}
+ <div style="clear: both"></div>
+ </div>
+ <div class="form-row">
+ <label>Version:</label>
+ ${metadata[ 'version' ]}
+ <div style="clear: both"></div>
+ </div>
+ <div class="form-row">
+ <label>Version command string:</label>
+ ${metadata[ 'version_string_cmd' ]}
+ <div style="clear: both"></div>
+ </div>
+ <div class="form-row">
+ <label>Command:</label>
+ ${tool.command}
+ <div style="clear: both"></div>
+ </div>
+ <div class="form-row">
+ <label>Interpreter:</label>
+ ${tool.interpreter}
+ <div style="clear: both"></div>
+ </div>
+ <div class="form-row">
+ <label>Is multi-byte:</label>
+ ${tool.is_multi_byte}
+ <div style="clear: both"></div>
+ </div>
+ <div class="form-row">
+ <label>Forces a history refresh:</label>
+ ${tool.force_history_refresh}
+ <div style="clear: both"></div>
+ </div>
+ <div class="form-row">
+ <label>Parallelism:</label>
+ ${tool.parallelism}
+ <div style="clear: both"></div>
+ </div>
+ <%
+ if 'requirements' in metadata:
+ requirements = metadata[ 'requirements' ]
+ else:
+ requirements = None
+ %>
+ %if requirements:
+ <%
+ requirements_str = ''
+ for requirement_dict in metadata[ 'requirements' ]:
+ requirements_str += '%s (%s), ' % ( requirement_dict[ 'name' ], requirement_dict[ 'type' ] )
+ requirements_str = requirements_str.rstrip( ', ' )
+ %>
+ <div class="form-row">
+ <label>Requirements:</label>
+ ${requirements_str}
+ <div style="clear: both"></div>
+ </div>
+ %endif
+ <%
+ if 'tests' in metadata:
+ tests = metadata[ 'tests' ]
+ else:
+ tests = None
+ %>
+ %if tests:
+ <div class="form-row">
+ <label>Functional tests:</label></td>
+ <table class="grid">
+ <tr>
+ <td><b>name</b></td>
+ <td><b>inputs</b></td>
+ <td><b>outputs</b></td>
+ <td><b>required files</b></td>
+ </tr>
+ %for test_dict in tests:
+ <%
+ inputs = test_dict[ 'inputs' ]
+ outputs = test_dict[ 'outputs' ]
+ required_files = test_dict[ 'required_files' ]
+ %>
+ <tr>
+ <td>${test_dict[ 'name' ]}</td>
+ <td>
+ %for input in inputs:
+ <b>${input[0]}:</b> ${input[1]}<br/>
+ %endfor
+ </td>
+ <td>
+ %for output in outputs:
+ <b>${output[0]}:</b> ${output[1]}<br/>
+ %endfor
+ </td>
+ <td>
+ %for required_file in required_files:
+ ${required_file[0]}<br/>
+ %endfor
+ </td>
+ </tr>
+ %endfor
+ </table>
+ </div>
+ %endif
+ </div>
+ </div>
+%endif
diff -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 -r 855a749b450408708bfe24848a28922059b23d0d templates/workflow/editor.mako
--- a/templates/workflow/editor.mako
+++ b/templates/workflow/editor.mako
@@ -931,9 +931,9 @@
</div></%def>
-<%def name="overlay()">
+<%def name="overlay(visible=False)">
${parent.overlay( "Loading workflow editor...",
- "<img src='" + h.url_for('/static/images/yui/rel_interstitial_loading.gif') + "'/>" )}
+ "<img src='" + h.url_for('/static/images/yui/rel_interstitial_loading.gif') + "'/>", self.overlay_visible )}
</%def><%def name="left_panel()">
diff -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 -r 855a749b450408708bfe24848a28922059b23d0d tool_conf.xml.main
--- a/tool_conf.xml.main
+++ b/tool_conf.xml.main
@@ -51,7 +51,7 @@
<tool file="fasta_tools/fasta_to_tabular.xml" /><tool file="filters/gff2bed.xml" /><tool file="maf/maf_to_bed.xml" />
- <tool file="maf/maf_to_interval.xml" />
+ <tool file="maf/maf_to_interval.xml" /><tool file="maf/maf_to_fasta.xml" /><tool file="fasta_tools/tabular_to_fasta.xml" /><tool file="fastq/fastq_to_fasta.xml" />
@@ -78,13 +78,13 @@
<tool file="filters/gff/extract_GFF_Features.xml" /><tool file="filters/gff/gff_filter_by_attribute.xml" /><tool file="filters/gff/gff_filter_by_feature_count.xml" />
- <tool file="filters/gff/gtf_filter_by_attribute_values_list.xml" />
+ <tool file="filters/gff/gtf_filter_by_attribute_values_list.xml" /></section><section name="Join, Subtract and Group" id="group"><tool file="filters/joiner.xml" /><tool file="filters/compare.xml"/><tool file="new_operations/subtract_query.xml"/>
- <tool file="stats/grouping.xml" />
+ <tool file="stats/grouping.xml" /><tool file="new_operations/column_join.xml"/></section><section name="Extract Features" id="features">
@@ -112,7 +112,7 @@
<tool file="extract/phastOdds/phastOdds_tool.xml" /></section><section name="Operate on Genomic Intervals" id="bxops">
- <tool file="new_operations/intersect.xml" />
+ <tool file="new_operations/intersect.xml" /><tool file="new_operations/subtract.xml" /><tool file="new_operations/merge.xml" /><tool file="new_operations/concat.xml" />
@@ -127,7 +127,7 @@
</section><section name="Statistics" id="stats"><tool file="stats/gsummary.xml" />
- <tool file="filters/uniq.xml" />
+ <tool file="filters/uniq.xml" /><tool file="stats/cor.xml" /><tool file="stats/generate_matrix_for_pca_lda.xml" /><tool file="stats/lda_analy.xml" />
@@ -223,13 +223,13 @@
<tool file="emboss_5/emboss_chips.xml" /><tool file="emboss_5/emboss_cirdna.xml" /><tool file="emboss_5/emboss_codcmp.xml" />
- <tool file="emboss_5/emboss_coderet.xml" />
+ <tool file="emboss_5/emboss_coderet.xml" /><tool file="emboss_5/emboss_compseq.xml" />
- <tool file="emboss_5/emboss_cpgplot.xml" />
+ <tool file="emboss_5/emboss_cpgplot.xml" /><tool file="emboss_5/emboss_cpgreport.xml" /><tool file="emboss_5/emboss_cusp.xml" /><tool file="emboss_5/emboss_cutseq.xml" />
- <tool file="emboss_5/emboss_dan.xml" />
+ <tool file="emboss_5/emboss_dan.xml" /><tool file="emboss_5/emboss_degapseq.xml" /><tool file="emboss_5/emboss_descseq.xml" /><tool file="emboss_5/emboss_diffseq.xml" />
@@ -245,7 +245,7 @@
<tool file="emboss_5/emboss_etandem.xml" /><tool file="emboss_5/emboss_extractfeat.xml" /><tool file="emboss_5/emboss_extractseq.xml" />
- <tool file="emboss_5/emboss_freak.xml" />
+ <tool file="emboss_5/emboss_freak.xml" /><tool file="emboss_5/emboss_fuzznuc.xml" /><tool file="emboss_5/emboss_fuzzpro.xml" /><tool file="emboss_5/emboss_fuzztran.xml" />
@@ -266,7 +266,7 @@
<tool file="emboss_5/emboss_merger.xml" /><tool file="emboss_5/emboss_msbar.xml" /><tool file="emboss_5/emboss_needle.xml" />
- <tool file="emboss_5/emboss_newcpgreport.xml" />
+ <tool file="emboss_5/emboss_newcpgreport.xml" /><tool file="emboss_5/emboss_newcpgseek.xml" /><tool file="emboss_5/emboss_newseq.xml" /><tool file="emboss_5/emboss_noreturn.xml" />
@@ -294,7 +294,7 @@
<tool file="emboss_5/emboss_revseq.xml" /><tool file="emboss_5/emboss_seqmatchall.xml" /><tool file="emboss_5/emboss_seqret.xml" />
- <tool file="emboss_5/emboss_showfeat.xml" />
+ <tool file="emboss_5/emboss_showfeat.xml" /><tool file="emboss_5/emboss_shuffleseq.xml" /><tool file="emboss_5/emboss_sigcleave.xml" /><tool file="emboss_5/emboss_sirna.xml" />
@@ -316,7 +316,7 @@
<tool file="emboss_5/emboss_water.xml" /><tool file="emboss_5/emboss_wobble.xml" /><tool file="emboss_5/emboss_wordcount.xml" />
- <tool file="emboss_5/emboss_wordmatch.xml" />
+ <tool file="emboss_5/emboss_wordmatch.xml" /></section><label text="NGS Toolbox Beta" id="ngs" /><section name="NGS: QC and manipulation" id="cshl_library_information">
diff -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 -r 855a749b450408708bfe24848a28922059b23d0d tool_conf.xml.sample
--- a/tool_conf.xml.sample
+++ b/tool_conf.xml.sample
@@ -144,14 +144,14 @@
<tool file="regVariation/t_test_two_samples.xml" /><tool file="regVariation/compute_q_values.xml" /><label text="GFF" id="gff" />
- <tool file="stats/count_gff_features.xml" />
+ <tool file="stats/count_gff_features.xml" /></section><!--
Keep this section commented until all of the tools have functional tests
<section name="Wavelet Analysis" id="dwt"><tool file="discreteWavelet/execute_dwt_IvC_all.xml" /><tool file="discreteWavelet/execute_dwt_cor_aVa_perClass.xml" />
- <tool file="discreteWavelet/execute_dwt_cor_aVb_all.xml" />
+ <tool file="discreteWavelet/execute_dwt_cor_aVb_all.xml" /><tool file="discreteWavelet/execute_dwt_var_perClass.xml" /></section>
-->
@@ -184,8 +184,8 @@
<tool file="regVariation/compute_motif_frequencies_for_all_motifs.xml" /><tool file="regVariation/categorize_elements_satisfying_criteria.xml" />s
<tool file="regVariation/draw_stacked_barplots.xml" />
- <tool file="regVariation/multispecies_MicrosatDataGenerator_interrupted_GALAXY.xml" />
- <tool file="regVariation/microsatellite_birthdeath.xml" />
+ <tool file="regVariation/multispecies_MicrosatDataGenerator_interrupted_GALAXY.xml" />
+ <tool file="regVariation/microsatellite_birthdeath.xml" /></section><section name="Multiple regression" id="multReg"><tool file="regVariation/linear_regression.xml" />
@@ -241,7 +241,7 @@
</section><section name="NGS: QC and manipulation" id="NGS_QC"><label text="FastQC: fastq/sam/bam" id="fastqcsambam" />
- <tool file="rgenetics/rgFastQC.xml" />
+ <tool file="rgenetics/rgFastQC.xml" /><label text="Illumina fastq" id="illumina" /><tool file="fastq/fastq_groomer.xml" /><tool file="fastq/fastq_paired_end_splitter.xml" />
@@ -280,21 +280,21 @@
<tool file="fastx_toolkit/fastx_collapser.xml" /><tool file="fastx_toolkit/fastx_renamer.xml" /><tool file="fastx_toolkit/fastx_reverse_complement.xml" />
- <tool file="fastx_toolkit/fastx_trimmer.xml" />
+ <tool file="fastx_toolkit/fastx_trimmer.xml" /></section><section name="NGS: Picard (beta)" id="picard_beta"><label text="QC/Metrics for sam/bam" id="qcsambam"/><tool file="picard/picard_BamIndexStats.xml" />
- <tool file="picard/rgPicardASMetrics.xml" />
- <tool file="picard/rgPicardGCBiasMetrics.xml" />
- <tool file="picard/rgPicardLibComplexity.xml" />
+ <tool file="picard/rgPicardASMetrics.xml" />
+ <tool file="picard/rgPicardGCBiasMetrics.xml" />
+ <tool file="picard/rgPicardLibComplexity.xml" /><tool file="picard/rgPicardInsertSize.xml" /><tool file="picard/rgPicardHsMetrics.xml" /><label text="bam/sam Cleaning" id="picard-clean" /><tool file="picard/picard_AddOrReplaceReadGroups.xml" /><tool file="picard/picard_ReorderSam.xml" /><tool file="picard/picard_ReplaceSamHeader.xml" />
- <tool file="picard/rgPicardFixMate.xml" />
+ <tool file="picard/rgPicardFixMate.xml" /><tool file="picard/rgPicardMarkDups.xml" /></section><!--
diff -r f2878e4d9e0e551da8f9719b18923ebefe5d41b4 -r 855a749b450408708bfe24848a28922059b23d0d tool_list.py
--- a/tool_list.py
+++ b/tool_list.py
@@ -4,19 +4,19 @@
onoff = 1
tool_list = []
for line in open("tool_conf.xml.sample", "r"):
- if line.find("<!--") != -1:
+ if line.find("<!--") != -1:
onoff = 0
- if line.find("file") != -1 and onoff==1:
- strs = line.split('\"')
+ if line.find("file") != -1 and onoff==1:
+ strs = line.split('\"')
tool_list.append(strs[1])
- if line.find("<section") != -1 and onoff==1:
+ if line.find("<section") != -1 and onoff==1:
keys = line.strip().split('\"')
n = 0
strtmp = "section::"
- while n < len(keys) :
- if keys[n].find("id") != -1 : strtmp = strtmp + keys[n+1]
- if keys[n].find("name") != -1 : strtmp = strtmp + keys[n+1] + "-"
- n = n + 1
+ while n < len(keys) :
+ if keys[n].find("id") != -1 : strtmp = strtmp + keys[n+1]
+ if keys[n].find("name") != -1 : strtmp = strtmp + keys[n+1] + "-"
+ n = n + 1
tool_list.append(strtmp.replace(' ', '_'))
if line.find("-->") != -1:
onoff =1
@@ -26,42 +26,42 @@
id = []
desc = []
tool_infos = []
-for tool in tool_list :
- if tool.find("section")!=-1 :
+for tool in tool_list :
+ if tool.find("section")!=-1 :
tool_info = dict()
tool_info["id"] = tool
tool_infos.append(tool_info)
- if os.path.exists("tools/"+tool) :
- for line in open("tools/"+tool) :
- if line.find("<tool ") != -1 and line.find("id") != -1 :
- keys = line.strip().split('\"')
- n = 0
- tool_info = dict()
- tool_info["desc"] = ''
- while n < len(keys) :
- if keys[n].find("id") != -1 : tool_info["id"] = keys[n+1].replace(' ', '_')
- if keys[n].find("name") != -1 : tool_info["name"] = keys[n+1]
- if keys[n].find("description") != -1 : tool_info["desc"] = keys[n+1]
- n = n + 1
- tool_infos.append(tool_info)
- break
+ if os.path.exists("tools/"+tool) :
+ for line in open("tools/"+tool) :
+ if line.find("<tool ") != -1 and line.find("id") != -1 :
+ keys = line.strip().split('\"')
+ n = 0
+ tool_info = dict()
+ tool_info["desc"] = ''
+ while n < len(keys) :
+ if keys[n].find("id") != -1 : tool_info["id"] = keys[n+1].replace(' ', '_')
+ if keys[n].find("name") != -1 : tool_info["name"] = keys[n+1]
+ if keys[n].find("description") != -1 : tool_info["desc"] = keys[n+1]
+ n = n + 1
+ tool_infos.append(tool_info)
+ break
flag=0
-if len(sys.argv) == 1 :
- for tool_info in tool_infos:
- if tool_info["id"].find("section") != -1 :
+if len(sys.argv) == 1 :
+ for tool_info in tool_infos:
+ if tool_info["id"].find("section") != -1 :
print "==========================================================================================================================================="
print "%-45s\t%-40s\t%s" % ("id", "name", tool_info["id"])
print "- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -"
else :
print "%-45s\t%-40s" % (tool_info["id"], tool_info["name"])
-else:
- for tool_info in tool_infos:
+else:
+ for tool_info in tool_infos:
if tool_info["id"].find("section") != -1 :
flag=0
elif flag==1:
print " functional.test_toolbox:TestForTool_%s" % tool_info["id"],
- if tool_info["id"].replace('section::', '')==sys.argv[1]:
+ if tool_info["id"].replace('section::', '')==sys.argv[1]:
flag=1
#for key in tool_infos.keys():
https://bitbucket.org/galaxy/galaxy-central/changeset/fd17baeca99a/
changeset: fd17baeca99a
user: afgane
date: 2011-07-25 20:19:51
summary: Fix for permission setting on files downloaded from S3 into cache
affected #: 1 file
diff -r 855a749b450408708bfe24848a28922059b23d0d -r fd17baeca99a5e7b8b7474e675fe5b984e555d88 lib/galaxy/objectstore/__init__.py
--- a/lib/galaxy/objectstore/__init__.py
+++ b/lib/galaxy/objectstore/__init__.py
@@ -442,12 +442,12 @@
def _fix_permissions(self, rel_path):
""" Set permissions on rel_path"""
- for basedir, dirs, files in os.walk( rel_path ):
- util.umask_fix_perms( basedir, self.app.config.umask, 0777, self.app.config.gid )
+ for basedir, dirs, files in os.walk(rel_path):
+ util.umask_fix_perms(basedir, self.app.config.umask, 0777, self.app.config.gid)
for f in files:
- path = os.path.join( basedir, f )
+ path = os.path.join(basedir, f)
# Ignore symlinks
- if os.path.islink( path ):
+ if os.path.islink(path):
continue
util.umask_fix_perms( path, self.app.config.umask, 0666, self.app.config.gid )
@@ -545,7 +545,7 @@
os.makedirs(self._get_cache_path(rel_path_dir))
# Now pull in the file
ok = self._download(rel_path)
- self._fix_permissions(rel_path)
+ self._fix_permissions(self._get_cache_path(rel_path_dir))
return ok
def _transfer_cb(self, complete, total):
@@ -569,14 +569,14 @@
if ret_code == 127:
self.transfer_progress = 0 # Reset transfer progress counter
key.get_contents_to_filename(self._get_cache_path(rel_path), cb=self._transfer_cb, num_cb=10)
- print "(ssss) Pulled key '%s' into cache to %s" % (rel_path, self._get_cache_path(rel_path))
+ print "(ssss1) Pulled key '%s' into cache to %s" % (rel_path, self._get_cache_path(rel_path))
return True
else:
ncores = multiprocessing.cpu_count()
url = key.generate_url(7200)
ret_code = subprocess.call("axel -a -n %s '%s'" % (ncores, url))
if ret_code == 0:
- print "(ssss) Parallel pulled key '%s' into cache to %s" % (rel_path, self._get_cache_path(rel_path))
+ print "(ssss2) Parallel pulled key '%s' into cache to %s" % (rel_path, self._get_cache_path(rel_path))
return True
except S3ResponseError, ex:
log.error("Problem downloading key '%s' from S3 bucket '%s': %s" % (rel_path, self.bucket.name, ex))
https://bitbucket.org/galaxy/galaxy-central/changeset/626808a6bd50/
changeset: 626808a6bd50
user: afgane
date: 2011-08-01 23:35:26
summary: Metadata is now being handled by the ObjectStore as well
affected #: 8 files
diff -r fd17baeca99a5e7b8b7474e675fe5b984e555d88 -r 626808a6bd5044b8f0c5688c93549093d62bae56 lib/galaxy/app.py
--- a/lib/galaxy/app.py
+++ b/lib/galaxy/app.py
@@ -32,7 +32,7 @@
from galaxy.model.migrate.check import create_or_verify_database
create_or_verify_database( db_url, kwargs.get( 'global_conf', {} ).get( '__file__', None ), self.config.database_engine_options )
# Object store manager
- self.object_store = build_object_store_from_config(self)
+ self.object_store = build_object_store_from_config(self.config)
# Setup the database engine and ORM
from galaxy.model import mapping
self.model = mapping.init( self.config.file_path,
@@ -94,5 +94,6 @@
def shutdown( self ):
self.job_manager.shutdown()
+ self.object_store.shutdown()
if self.heartbeat:
self.heartbeat.shutdown()
diff -r fd17baeca99a5e7b8b7474e675fe5b984e555d88 -r 626808a6bd5044b8f0c5688c93549093d62bae56 lib/galaxy/datatypes/metadata.py
--- a/lib/galaxy/datatypes/metadata.py
+++ b/lib/galaxy/datatypes/metadata.py
@@ -411,6 +411,7 @@
mf = galaxy.model.MetadataFile()
mf.id = value #we assume this is a valid id, since we cannot check it
return mf
+
def make_copy( self, value, target_context, source_context ):
value = self.wrap( value )
if value:
@@ -438,8 +439,11 @@
if mf is None:
mf = self.new_file( dataset = parent, **value.kwds )
shutil.move( value.file_name, mf.file_name )
+ # Ensure the metadata file gets updated with content
+ parent.dataset.object_store.update_from_file( parent.dataset.id, file_name=mf.file_name, extra_dir='_metadata_files', extra_dir_at_root=True, alt_name=os.path.basename(mf.file_name) )
value = mf.id
return value
+
def to_external_value( self, value ):
"""
Turns a value read from a metadata into its value to be pushed directly into the external dict.
@@ -461,7 +465,7 @@
#we will be copying its contents into the MetadataFile objects filename after restoring from JSON
#we do not include 'dataset' in the kwds passed, as from_JSON_value() will handle this for us
return MetadataTempFile( **kwds )
-
+
#This class is used when a database file connection is not available
class MetadataTempFile( object ):
tmp_dir = 'database/tmp' #this should be overwritten as necessary in calling scripts
diff -r fd17baeca99a5e7b8b7474e675fe5b984e555d88 -r 626808a6bd5044b8f0c5688c93549093d62bae56 lib/galaxy/jobs/__init__.py
--- a/lib/galaxy/jobs/__init__.py
+++ b/lib/galaxy/jobs/__init__.py
@@ -560,7 +560,7 @@
dataset.set_size()
# Update (non-library) job output datasets through the object store
if dataset not in job.output_library_datasets:
- print "===+=== Handing dataset '%s' with name '%s' to object store" % (dataset.id, dataset.file_name)
+ print "===++=== Job finish method handing dataset '%s' to object store" % dataset.file_name
self.app.object_store.update_from_file(dataset.id, create=True)
if context['stderr']:
dataset.blurb = "error"
diff -r fd17baeca99a5e7b8b7474e675fe5b984e555d88 -r 626808a6bd5044b8f0c5688c93549093d62bae56 lib/galaxy/jobs/runners/local.py
--- a/lib/galaxy/jobs/runners/local.py
+++ b/lib/galaxy/jobs/runners/local.py
@@ -118,7 +118,7 @@
preexec_fn = os.setpgrp )
job_wrapper.external_output_metadata.set_job_runner_external_pid( external_metadata_proc.pid, self.sa_session )
external_metadata_proc.wait()
- log.debug( 'execution of external set_meta finished for job %d' % job_wrapper.job_id )
+ log.debug( 'execution of external set_meta for job %d finished' % job_wrapper.job_id )
# Finish the job
try:
diff -r fd17baeca99a5e7b8b7474e675fe5b984e555d88 -r 626808a6bd5044b8f0c5688c93549093d62bae56 lib/galaxy/model/__init__.py
--- a/lib/galaxy/model/__init__.py
+++ b/lib/galaxy/model/__init__.py
@@ -533,13 +533,12 @@
self.external_filename = external_filename
self._extra_files_path = extra_files_path
self.file_size = file_size
+
def get_file_name( self ):
if not self.external_filename:
assert self.id is not None, "ID must be set before filename used (commit the object)"
assert self.object_store is not None, "Object Store has not been initialized for dataset %s" % self.id
- print "Calling get_filename 1", self.object_store
filename = self.object_store.get_filename( self.id )
- # print 'getting filename: ', filename
if not self.object_store.exists( self.id ):
# Create directory if it does not exist
self.object_store.create( self.id, dir_only=True )
@@ -556,7 +555,6 @@
file_name = property( get_file_name, set_file_name )
@property
def extra_files_path( self ):
- print "Calling get_filename 2", self.object_store
return self.object_store.get_filename( self.id, dir_only=True, extra_dir=self._extra_files_path or "dataset_%d_files" % self.id)
def get_size( self, nice_size=False ):
"""Returns the size of the data on disk"""
@@ -1583,15 +1581,8 @@
assert self.id is not None, "ID must be set before filename used (commit the object)"
# Ensure the directory structure and the metadata file object exist
try:
- # self.history_dataset
- # print "Dataset.file_path: %s, self.id: %s, self.history_dataset.dataset.object_store: %s" \
- # % (Dataset.file_path, self.id, self.history_dataset.dataset.object_store)
self.history_dataset.dataset.object_store.create( self.id, extra_dir='_metadata_files', extra_dir_at_root=True, alt_name="metadata_%d.dat" % self.id )
- print "Calling get_filename 3", self.object_store
path = self.history_dataset.dataset.object_store.get_filename( self.id, extra_dir='_metadata_files', extra_dir_at_root=True, alt_name="metadata_%d.dat" % self.id )
- print "Created metadata file at path: %s" % path
- self.library_dataset
- # raise
return path
except AttributeError:
# In case we're not working with the history_dataset
diff -r fd17baeca99a5e7b8b7474e675fe5b984e555d88 -r 626808a6bd5044b8f0c5688c93549093d62bae56 lib/galaxy/objectstore/__init__.py
--- a/lib/galaxy/objectstore/__init__.py
+++ b/lib/galaxy/objectstore/__init__.py
@@ -179,9 +179,9 @@
Standard Galaxy object store, stores objects in files under a specific
directory on disk.
"""
- def __init__(self, app):
+ def __init__(self, config):
super(DiskObjectStore, self).__init__()
- self.file_path = app.config.file_path
+ self.file_path = config.file_path
def _get_filename(self, dataset_id, dir_only=False, extra_dir=None, extra_dir_at_root=False, alt_name=None):
"""Class method that returns the absolute path for the file corresponding
@@ -344,14 +344,14 @@
cache exists that is used as an intermediate location for files between
Galaxy and S3.
"""
- def __init__(self, app):
+ def __init__(self, config):
super(S3ObjectStore, self).__init__()
- self.app = app
- self.staging_path = self.app.config.file_path
+ self.config = config
+ self.staging_path = self.config.file_path
self.s3_conn = S3Connection()
- self.bucket = self._get_bucket(self.app.config.s3_bucket)
- self.use_rr = self.app.config.use_reduced_redundancy
- self.cache_size = self.app.config.object_store_cache_size * 1073741824 # Convert GBs to bytes
+ self.bucket = self._get_bucket(self.config.s3_bucket)
+ self.use_rr = self.config.use_reduced_redundancy
+ self.cache_size = self.config.object_store_cache_size * 1073741824 # Convert GBs to bytes
self.transfer_progress = 0
# Clean cache only if value is set in universe_wsgi.ini
if self.cache_size != -1:
@@ -443,13 +443,13 @@
def _fix_permissions(self, rel_path):
""" Set permissions on rel_path"""
for basedir, dirs, files in os.walk(rel_path):
- util.umask_fix_perms(basedir, self.app.config.umask, 0777, self.app.config.gid)
+ util.umask_fix_perms(basedir, self.config.umask, 0777, self.config.gid)
for f in files:
path = os.path.join(basedir, f)
# Ignore symlinks
if os.path.islink(path):
continue
- util.umask_fix_perms( path, self.app.config.umask, 0666, self.app.config.gid )
+ util.umask_fix_perms( path, self.config.umask, 0666, self.config.gid )
def _construct_path(self, dataset_id, dir_only=None, extra_dir=None, extra_dir_at_root=False, alt_name=None):
rel_path = os.path.join(*directory_hash_id(dataset_id))
@@ -594,12 +594,16 @@
source_file = source_file if source_file else self._get_cache_path(rel_path)
if os.path.exists(source_file):
key = Key(self.bucket, rel_path)
+ if os.path.getsize(source_file) == 0 and key.exists():
+ log.debug("Wanted to push file '%s' to S3 key '%s' but its size is 0; skipping." % (source_file, rel_path))
+ return True
if from_string:
key.set_contents_from_string(from_string, reduced_redundancy=self.use_rr)
log.debug("Pushed data from string '%s' to key '%s'" % (from_string, rel_path))
else:
start_time = datetime.now()
- print "[%s] Pushing cache file '%s' to key '%s'" % (start_time, source_file, rel_path)
+ # print "Pushing cache file '%s' of size %s bytes to key '%s'" % (source_file, os.path.getsize(source_file), rel_path)
+ # print "+ Push started at '%s'" % start_time
mb_size = os.path.getsize(source_file) / 1e6
if mb_size < 60:
self.transfer_progress = 0 # Reset transfer progress counter
@@ -607,10 +611,9 @@
cb=self._transfer_cb, num_cb=10)
else:
multipart_upload(self.bucket, key.name, source_file, mb_size, use_rr=self.use_rr)
- # self._multipart_upload(key.name, source_file, mb_size)
end_time = datetime.now()
- print "Push ended at '%s'; it lasted '%s'" % (end_time, end_time-start_time)
- log.debug("Pushed cache file '%s' to key '%s'" % (source_file, rel_path))
+ # print "+ Push ended at '%s'; %s bytes transfered in %ssec" % (end_time, os.path.getsize(source_file), end_time-start_time)
+ log.debug("Pushed cache file '%s' to key '%s' (%s bytes transfered in %s sec)" % (source_file, rel_path, os.path.getsize(source_file), end_time-start_time))
return True
else:
log.error("Tried updating key '%s' from source file '%s', but source file does not exist."
@@ -788,8 +791,9 @@
# Copy into cache
cache_file = self._get_cache_path(rel_path)
try:
- # FIXME? Should this be a `move`?
- shutil.copy2(source_file, cache_file)
+ if source_file != cache_file:
+ # FIXME? Should this be a `move`?
+ shutil.copy2(source_file, cache_file)
self._fix_permissions(cache_file)
except OSError, ex:
log.error("Trouble copying source file '%s' to cache '%s': %s" % (source_file, cache_file, ex))
@@ -823,16 +827,16 @@
super(HierarchicalObjectStore, self).__init__()
-def build_object_store_from_config(app):
+def build_object_store_from_config(config):
""" Depending on the configuration setting, invoke the appropriate object store
"""
- store = app.config.object_store
+ store = config.object_store
if store == 'disk':
- return DiskObjectStore(app=app)
+ return DiskObjectStore(config=config)
elif store == 's3':
- os.environ['AWS_ACCESS_KEY_ID'] = app.config.aws_access_key
- os.environ['AWS_SECRET_ACCESS_KEY'] = app.config.aws_secret_key
- return S3ObjectStore(app=app)
+ os.environ['AWS_ACCESS_KEY_ID'] = config.aws_access_key
+ os.environ['AWS_SECRET_ACCESS_KEY'] = config.aws_secret_key
+ return S3ObjectStore(config=config)
elif store == 'hierarchical':
return HierarchicalObjectStore()
diff -r fd17baeca99a5e7b8b7474e675fe5b984e555d88 -r 626808a6bd5044b8f0c5688c93549093d62bae56 lib/galaxy/tools/actions/upload_common.py
--- a/lib/galaxy/tools/actions/upload_common.py
+++ b/lib/galaxy/tools/actions/upload_common.py
@@ -325,7 +325,6 @@
# Create an empty file immediately
if not dataset.dataset.external_filename:
trans.app.object_store.create( dataset.id )
- print "---> Upload tool created a folder(?) %s with ID %s? %s" % (dataset.file_name, dataset.id, trans.app.object_store.exists(dataset.id))
# open( dataset.file_name, "w" ).close()
else:
for i, dataset in enumerate( data_list ):
@@ -333,7 +332,6 @@
# Create an empty file immediately
if not dataset.dataset.external_filename:
trans.app.object_store.create( dataset.id )
- print "---> Upload tool created a file %s with ID %s? %s" % (dataset.file_name, dataset.id, trans.app.object_store.exists(dataset.id))
# open( dataset.file_name, "w" ).close()
job.state = job.states.NEW
trans.sa_session.add( job )
diff -r fd17baeca99a5e7b8b7474e675fe5b984e555d88 -r 626808a6bd5044b8f0c5688c93549093d62bae56 scripts/set_metadata.py
--- a/scripts/set_metadata.py
+++ b/scripts/set_metadata.py
@@ -27,6 +27,9 @@
from galaxy.util import stringify_dictionary_keys
from galaxy.util.json import from_json_string
from sqlalchemy.orm import clear_mappers
+from galaxy.objectstore import build_object_store_from_config
+from galaxy import config
+import ConfigParser
def __main__():
file_path = sys.argv.pop( 1 )
@@ -34,11 +37,32 @@
galaxy.model.Dataset.file_path = file_path
galaxy.datatypes.metadata.MetadataTempFile.tmp_dir = tmp_dir
+ # Set up reference to object store
+ # First, read in the main config file for Galaxy; this is required because
+ # the object store configuration is stored there
+ conf = ConfigParser.ConfigParser()
+ config_file_name = 'universe_wsgi.ini' # Safe assumption?
+ conf.read(config_file_name)
+ conf_dict = {}
+ for section in conf.sections():
+ for option in conf.options(section):
+ try:
+ conf_dict[option] = conf.get(section, option)
+ except ConfigParser.InterpolationMissingOptionError:
+ # Because this is not called from Paste Script, %(here)s variable
+ # is not initialized in the config file so skip those fields -
+ # just need not to use any such fields for the object store conf...
+ log.debug("Did not load option %s from %s" % (option, config_file_name))
+ # config object is required by ObjectStore class so create it now
+ universe_config = config.Configuration(**conf_dict)
+ object_store = build_object_store_from_config(universe_config)
+ galaxy.model.Dataset.object_store = object_store
+
# Set up datatypes registry
config_root = sys.argv.pop( 1 )
datatypes_config = sys.argv.pop( 1 )
galaxy.model.set_datatypes_registry( galaxy.datatypes.registry.Registry( config_root, datatypes_config ) )
-
+
job_metadata = sys.argv.pop( 1 )
ext_override = dict()
if job_metadata != "None" and os.path.exists( job_metadata ):
@@ -83,5 +107,7 @@
except Exception, e:
simplejson.dump( ( False, str( e ) ), open( filename_results_code, 'wb+' ) ) #setting metadata has failed somehow
clear_mappers()
+ # Shut down any additional threads that might have been created via the ObjectStore
+ object_store.shutdown()
-__main__()
+__main__()
\ No newline at end of file
https://bitbucket.org/galaxy/galaxy-central/changeset/f0a15f976f1f/
changeset: f0a15f976f1f
user: afgane
date: 2011-08-01 23:36:28
summary: Merge
affected #: 49 files
diff -r 626808a6bd5044b8f0c5688c93549093d62bae56 -r f0a15f976f1ffee8c144a05a413a290b34ad90e6 datatypes_conf.xml.sample
--- a/datatypes_conf.xml.sample
+++ b/datatypes_conf.xml.sample
@@ -72,9 +72,10 @@
<!-- <display file="gbrowse/gbrowse_gff.xml" inherit="True" /> --></datatype><datatype extension="gff3" type="galaxy.datatypes.interval:Gff3" display_in_upload="true"/>
- <datatype extension="gif" type="galaxy.datatypes.images:Image" mimetype="image/gif"/>
+ <datatype extension="gif" type="galaxy.datatypes.images:Gif" mimetype="image/gif"/><datatype extension="gmaj.zip" type="galaxy.datatypes.images:Gmaj" mimetype="application/zip"/><datatype extension="gtf" type="galaxy.datatypes.interval:Gtf" display_in_upload="true"/>
+ <datatype extension="h5" type="galaxy.datatypes.data:Data" mimetype="application/octet-stream"/><datatype extension="html" type="galaxy.datatypes.images:Html" mimetype="text/html"/><datatype extension="interval" type="galaxy.datatypes.interval:Interval" display_in_upload="true"><converter file="interval_to_bed_converter.xml" target_datatype="bed"/>
@@ -90,7 +91,21 @@
<datatype extension="picard_interval_list" type="galaxy.datatypes.data:Text" subclass="True" display_in_upload="True"/><datatype extension="gatk_interval" type="galaxy.datatypes.data:Text" subclass="True" display_in_upload="True"/><datatype extension="gatk_dbsnp" type="galaxy.datatypes.data:Text" subclass="True" display_in_upload="True"/>
- <datatype extension="jpg" type="galaxy.datatypes.images:Image" mimetype="image/jpeg"/>
+ <datatype extension="jpg" type="galaxy.datatypes.images:Jpg" mimetype="image/jpeg"/>
+ <datatype extension="tiff" type="galaxy.datatypes.images:Tiff" mimetype="image/tiff"/>
+ <datatype extension="bmp" type="galaxy.datatypes.images:Bmp" mimetype="image/bmp"/>
+ <datatype extension="im" type="galaxy.datatypes.images:Im" mimetype="image/im"/>
+ <datatype extension="pcd" type="galaxy.datatypes.images:Pcd" mimetype="image/pcd"/>
+ <datatype extension="pcx" type="galaxy.datatypes.images:Pcx" mimetype="image/pcx"/>
+ <datatype extension="ppm" type="galaxy.datatypes.images:Ppm" mimetype="image/ppm"/>
+ <datatype extension="psd" type="galaxy.datatypes.images:Psd" mimetype="image/psd"/>
+ <datatype extension="xbm" type="galaxy.datatypes.images:Xbm" mimetype="image/xbm"/>
+ <datatype extension="xpm" type="galaxy.datatypes.images:Xpm" mimetype="image/xpm"/>
+ <datatype extension="rgb" type="galaxy.datatypes.images:Rgb" mimetype="image/rgb"/>
+ <datatype extension="pbm" type="galaxy.datatypes.images:Pbm" mimetype="image/pbm"/>
+ <datatype extension="pgm" type="galaxy.datatypes.images:Pgm" mimetype="image/pgm"/>
+ <datatype extension="eps" type="galaxy.datatypes.images:Eps" mimetype="image/eps"/>
+ <datatype extension="rast" type="galaxy.datatypes.images:Rast" mimetype="image/rast"/><datatype extension="laj" type="galaxy.datatypes.images:Laj"/><datatype extension="lav" type="galaxy.datatypes.sequence:Lav" display_in_upload="true"/><datatype extension="maf" type="galaxy.datatypes.sequence:Maf" display_in_upload="true">
@@ -102,7 +117,7 @@
</datatype><datatype extension="pdf" type="galaxy.datatypes.images:Pdf" mimetype="application/pdf"/><datatype extension="pileup" type="galaxy.datatypes.tabular:Pileup" display_in_upload="true" />
- <datatype extension="png" type="galaxy.datatypes.images:Image" mimetype="image/png"/>
+ <datatype extension="png" type="galaxy.datatypes.images:Png" mimetype="image/png"/><datatype extension="qual" type="galaxy.datatypes.qualityscore:QualityScore" /><datatype extension="qualsolexa" type="galaxy.datatypes.qualityscore:QualityScoreSolexa" display_in_upload="true"/><datatype extension="qualillumina" type="galaxy.datatypes.qualityscore:QualityScoreIllumina" display_in_upload="true"/>
@@ -116,7 +131,7 @@
<datatype extension="svg" type="galaxy.datatypes.images:Image" mimetype="image/svg+xml"/><datatype extension="taxonomy" type="galaxy.datatypes.tabular:Taxonomy" display_in_upload="true"/><datatype extension="tabular" type="galaxy.datatypes.tabular:Tabular" display_in_upload="true"/>
- <datatype extension="twobit" type="galaxy.datatypes.binary:TwoBit" mimetype="application/octet-stream" display_in_upload="true"/>
+ <datatype extension="twobit" type="galaxy.datatypes.binary:TwoBit" mimetype="application/octet-stream" display_in_upload="true"/><datatype extension="txt" type="galaxy.datatypes.data:Text" display_in_upload="true"/><datatype extension="memexml" type="galaxy.datatypes.xml:MEMEXml" mimetype="application/xml" display_in_upload="true"/><datatype extension="blastxml" type="galaxy.datatypes.xml:BlastXml" mimetype="application/xml" display_in_upload="true"/>
@@ -304,6 +319,24 @@
<sniffer type="galaxy.datatypes.tabular:Pileup"/><sniffer type="galaxy.datatypes.interval:Interval"/><sniffer type="galaxy.datatypes.tabular:Sam"/>
+ <sniffer type="galaxy.datatypes.images:Jpg"/>
+ <sniffer type="galaxy.datatypes.images:Png"/>
+ <sniffer type="galaxy.datatypes.images:Tiff"/>
+ <sniffer type="galaxy.datatypes.images:Bmp"/>
+ <sniffer type="galaxy.datatypes.images:Gif"/>
+ <sniffer type="galaxy.datatypes.images:Im"/>
+ <sniffer type="galaxy.datatypes.images:Pcd"/>
+ <sniffer type="galaxy.datatypes.images:Pcx"/>
+ <sniffer type="galaxy.datatypes.images:Ppm"/>
+ <sniffer type="galaxy.datatypes.images:Psd"/>
+ <sniffer type="galaxy.datatypes.images:Xbm"/>
+ <sniffer type="galaxy.datatypes.images:Xpm"/>
+ <sniffer type="galaxy.datatypes.images:Rgb"/>
+ <sniffer type="galaxy.datatypes.images:Pbm"/>
+ <sniffer type="galaxy.datatypes.images:Pgm"/>
+ <sniffer type="galaxy.datatypes.images:Xpm"/>
+ <sniffer type="galaxy.datatypes.images:Eps"/>
+ <sniffer type="galaxy.datatypes.images:Rast"/><!--
Keep this commented until the sniff method in the assembly.py
module is fixed to not read the entire file.
diff -r 626808a6bd5044b8f0c5688c93549093d62bae56 -r f0a15f976f1ffee8c144a05a413a290b34ad90e6 lib/galaxy/datatypes/binary.py
--- a/lib/galaxy/datatypes/binary.py
+++ b/lib/galaxy/datatypes/binary.py
@@ -18,7 +18,7 @@
log = logging.getLogger(__name__)
# Currently these supported binary data types must be manually set on upload
-unsniffable_binary_formats = [ 'ab1', 'scf' ]
+unsniffable_binary_formats = [ 'ab1', 'scf', 'h5' ]
class Binary( data.Data ):
"""Binary data"""
@@ -206,7 +206,24 @@
return "Binary bam alignments file (%s)" % ( data.nice_size( dataset.get_size() ) )
def get_track_type( self ):
return "ReadTrack", {"data": "bai", "index": "summary_tree"}
-
+
+class H5( Binary ):
+ """Class describing an HDF5 file"""
+ file_ext = "h5"
+
+ def set_peek( self, dataset, is_multi_byte=False ):
+ if not dataset.dataset.purged:
+ dataset.peek = "Binary h5 file"
+ dataset.blurb = data.nice_size( dataset.get_size() )
+ else:
+ dataset.peek = 'file does not exist'
+ dataset.blurb = 'file purged from disk'
+ def display_peek( self, dataset ):
+ try:
+ return dataset.peek
+ except:
+ return "Binary h5 sequence file (%s)" % ( data.nice_size( dataset.get_size() ) )
+
class Scf( Binary ):
"""Class describing an scf binary sequence file"""
file_ext = "scf"
@@ -292,7 +309,6 @@
Binary.__init__( self, **kwd )
self._magic = 0x8789F2EB
self._name = "BigBed"
-
def get_track_type( self ):
return "LineTrack", {"data_standalone": "bigbed"}
@@ -309,14 +325,12 @@
return True
except IOError:
return False
-
def set_peek(self, dataset, is_multi_byte=False):
if not dataset.dataset.purged:
dataset.peek = "Binary TwoBit format nucleotide file"
dataset.blurb = data.nice_size(dataset.get_size())
else:
return super(TwoBit, self).set_peek(dataset, is_multi_byte)
-
def display_peek(self, dataset):
try:
return dataset.peek
diff -r 626808a6bd5044b8f0c5688c93549093d62bae56 -r f0a15f976f1ffee8c144a05a413a290b34ad90e6 lib/galaxy/datatypes/checkers.py
--- a/lib/galaxy/datatypes/checkers.py
+++ b/lib/galaxy/datatypes/checkers.py
@@ -1,6 +1,28 @@
-import os, gzip, re, gzip, zipfile, binascii, bz2
+import os, gzip, re, gzip, zipfile, binascii, bz2, imghdr
from galaxy import util
+try:
+ import Image as PIL
+except ImportError:
+ try:
+ from PIL import Image as PIL
+ except:
+ PIL = None
+
+def check_image( file_path ):
+ if PIL != None:
+ try:
+ im = PIL.open( file_path )
+ except:
+ return False
+ if im:
+ return im
+ return False
+ else:
+ if imghdr.what( file_path ) != None:
+ return True
+ return False
+
def check_html( file_path, chunk=None ):
if chunk is None:
temp = open( file_path, "U" )
diff -r 626808a6bd5044b8f0c5688c93549093d62bae56 -r f0a15f976f1ffee8c144a05a413a290b34ad90e6 lib/galaxy/datatypes/images.py
--- a/lib/galaxy/datatypes/images.py
+++ b/lib/galaxy/datatypes/images.py
@@ -7,12 +7,31 @@
from galaxy.datatypes.metadata import MetadataElement
from galaxy.datatypes import metadata
from galaxy.datatypes.sniff import *
+from galaxy.datatypes.util.image_util import *
from urllib import urlencode, quote_plus
import zipfile
-import os, subprocess, tempfile
+import os, subprocess, tempfile, imghdr
+
+try:
+ import Image as PIL
+except ImportError:
+ try:
+ from PIL import Image as PIL
+ except:
+ PIL = None
log = logging.getLogger(__name__)
+# TODO: Uploading image files of various types is supported in Galaxy, but on
+# the main public instance, the display_in_upload is not set for these data
+# types in datatypes_conf.xml because we do not allow image files to be uploaded
+# there. There is currently no API feature that allows uploading files outside
+# of a data library ( where it requires either the upload_paths or upload_directory
+# option to be enabled, which is not the case on the main public instance ). Because
+# of this, we're currently safe, but when the api is enhanced to allow other uploads,
+# we need to ensure that the implementation is such that image files cannot be uploaded
+# to our main public instance.
+
class Image( data.Data ):
"""Class describing an image"""
def set_peek( self, dataset, is_multi_byte=False ):
@@ -22,11 +41,110 @@
else:
dataset.peek = 'file does not exist'
dataset.blurb = 'file purged from disk'
+ def sniff( self, filename ):
+ # First check if we can use PIL
+ if PIL is not None:
+ try:
+ im = PIL.open( filename )
+ im.close()
+ return True
+ except:
+ return False
+ else:
+ if imghdr.what( filename ) is not None:
+ return True
+ else:
+ return False
+
+class Jpg( Image ):
+ def sniff(self, filename, image=None):
+ """Determine if the file is in jpg format."""
+ return check_image_type( filename, ['JPEG'], image )
+
+class Png( Image ):
+ def sniff(self, filename, image=None):
+ """Determine if the file is in png format."""
+ return check_image_type( filename, ['PNG'], image )
+
+class Tiff( Image ):
+ def sniff(self, filename, image=None):
+ """Determine if the file is in tiff format."""
+ return check_image_type( filename, ['TIFF'], image )
+
+class Bmp( Image ):
+ def sniff(self, filename, image=None):
+ """Determine if the file is in bmp format."""
+ return check_image_type( filename, ['BMP'], image )
+
+class Gif( Image ):
+ def sniff(self, filename, image=None):
+ """Determine if the file is in gif format."""
+ return check_image_type( filename, ['GIF'], image )
+
+class Im( Image ):
+ def sniff(self, filename, image=None):
+ """Determine if the file is in im format."""
+ return check_image_type( filename, ['IM'], image )
+
+class Pcd( Image ):
+ def sniff(self, filename, image=None):
+ """Determine if the file is in pcd format."""
+ return check_image_type( filename, ['PCD'], image )
+
+class Pcx( Image ):
+ def sniff(self, filename, image=None):
+ """Determine if the file is in pcx format."""
+ return check_image_type( filename, ['PCX'], image )
+
+class Ppm( Image ):
+ def sniff(self, filename, image=None):
+ """Determine if the file is in ppm format."""
+ return check_image_type( filename, ['PPM'], image )
+
+class Psd( Image ):
+ def sniff(self, filename, image=None):
+ """Determine if the file is in psd format."""
+ return check_image_type( filename, ['PSD'], image )
+
+class Xbm( Image ):
+ def sniff(self, filename, image=None):
+ """Determine if the file is in XBM format."""
+ return check_image_type( filename, ['XBM'], image )
+
+class Xpm( Image ):
+ def sniff(self, filename, image=None):
+ """Determine if the file is in XPM format."""
+ return check_image_type( filename, ['XPM'], image )
+
+class Rgb( Image ):
+ def sniff(self, filename, image=None):
+ """Determine if the file is in RGB format."""
+ return check_image_type( filename, ['RGB'], image )
+
+class Pbm( Image ):
+ def sniff(self, filename, image=None):
+ """Determine if the file is in PBM format"""
+ return check_image_type( filename, ['PBM'], image )
+
+class Pgm( Image ):
+ def sniff(self, filename, image=None):
+ """Determine if the file is in PGM format"""
+ return check_image_type( filename, ['PGM'], image )
+
+class Eps( Image ):
+ def sniff(self, filename, image=None):
+ """Determine if the file is in eps format."""
+ return check_image_type( filename, ['EPS'], image )
+
+
+class Rast( Image ):
+ def sniff(self, filename, image=None):
+ """Determine if the file is in rast format"""
+ return check_image_type( filename, ['RAST'], image )
class Pdf( Image ):
def sniff(self, filename):
- """Determine if the file is in pdf format.
- """
+ """Determine if the file is in pdf format."""
headers = get_headers(filename, None, 1)
try:
if headers[0][0].startswith("%PDF"):
diff -r 626808a6bd5044b8f0c5688c93549093d62bae56 -r f0a15f976f1ffee8c144a05a413a290b34ad90e6 lib/galaxy/datatypes/util/image_util.py
--- /dev/null
+++ b/lib/galaxy/datatypes/util/image_util.py
@@ -0,0 +1,76 @@
+"""
+Provides utilities for working with image files.
+"""
+import logging, imghdr
+
+try:
+ import Image as PIL
+except ImportError:
+ try:
+ from PIL import Image as PIL
+ except:
+ PIL = None
+
+log = logging.getLogger(__name__)
+
+def image_type( filename, image=None ):
+ format = ''
+ if PIL is not None:
+ if image is not None:
+ format = image.format
+ else:
+ try:
+ im = PIL.open( filename )
+ format = im.format
+ im.close()
+ except:
+ return False
+ else:
+ format = imghdr.what( filename )
+ if format is not None:
+ format = format.upper()
+ else:
+ return False
+ return format
+def check_image_type( filename, types, image=None ):
+ format = image_type( filename, image )
+ # First check if we can use PIL
+ if format in types:
+ return True
+ return False
+def get_image_ext ( file_path, image ):
+ #determine ext
+ format = image_type( file_path, image )
+ if format in [ 'JPG','JPEG' ]:
+ return 'jpg'
+ if format == 'PNG':
+ return 'png'
+ if format == 'TIFF':
+ return 'tiff'
+ if format == 'BMP':
+ return 'bmp'
+ if format == 'GIF':
+ return 'gif'
+ if format == 'IM':
+ return 'im'
+ if format == 'PCD':
+ return 'pcd'
+ if format == 'PCX':
+ return 'pcx'
+ if format == 'PPM':
+ return 'ppm'
+ if format == 'PSD':
+ return 'psd'
+ if format == 'XBM':
+ return 'xbm'
+ if format == 'XPM':
+ return 'xpm'
+ if format == 'RGB':
+ return 'rgb'
+ if format == 'PBM':
+ return 'pbm'
+ if format == 'PGM':
+ return 'pgm'
+ if format == 'EPS':
+ return 'eps'
+ return None
diff -r 626808a6bd5044b8f0c5688c93549093d62bae56 -r f0a15f976f1ffee8c144a05a413a290b34ad90e6 lib/galaxy/jobs/__init__.py
--- a/lib/galaxy/jobs/__init__.py
+++ b/lib/galaxy/jobs/__init__.py
@@ -652,9 +652,14 @@
tool=self.tool, stdout=stdout, stderr=stderr )
job.command_line = self.command_line
+ bytes = 0
# Once datasets are collected, set the total dataset size (includes extra files)
for dataset_assoc in job.output_datasets + job.output_library_datasets:
dataset_assoc.dataset.dataset.set_total_size()
+ bytes += dataset_assoc.dataset.dataset.get_total_size()
+
+ if job.user:
+ job.user.total_disk_usage += bytes
# fix permissions
for path in [ dp.real_path for dp in self.get_output_fnames() ]:
diff -r 626808a6bd5044b8f0c5688c93549093d62bae56 -r f0a15f976f1ffee8c144a05a413a290b34ad90e6 lib/galaxy/model/__init__.py
--- a/lib/galaxy/model/__init__.py
+++ b/lib/galaxy/model/__init__.py
@@ -4,7 +4,9 @@
Naming: try to use class names that have a distinct plural form so that
the relationship cardinalities are obvious (e.g. prefer Dataset to Data)
"""
-
+import pkg_resources
+pkg_resources.require( "simplejson" )
+import simplejson
import galaxy.datatypes
from galaxy.util.bunch import Bunch
from galaxy import util
@@ -70,6 +72,27 @@
if role not in roles:
roles.append( role )
return roles
+ def get_disk_usage( self, nice_size=False ):
+ rval = 0
+ if self.disk_usage is not None:
+ rval = self.disk_usage
+ if nice_size:
+ rval = galaxy.datatypes.data.nice_size( rval )
+ return rval
+ def set_disk_usage( self, bytes ):
+ self.disk_usage = bytes
+ total_disk_usage = property( get_disk_usage, set_disk_usage )
+ def calculate_disk_usage( self ):
+ dataset_ids = []
+ total = 0
+ # this can be a huge number and can run out of memory, so we avoid the mappers
+ db_session = object_session( self )
+ for history in db_session.query( History ).enable_eagerloads( False ).filter_by( user_id=self.id ).yield_per( 1000 ):
+ for hda in db_session.query( HistoryDatasetAssociation ).enable_eagerloads( False ).filter_by( history_id=history.id, purged=False ).yield_per( 1000 ):
+ if not hda.dataset.id in dataset_ids and not hda.dataset.purged and not hda.dataset.library_associations:
+ dataset_ids.append( hda.dataset.id )
+ total += hda.dataset.get_total_size()
+ return total
class Job( object ):
"""
@@ -349,7 +372,7 @@
self.galaxy_sessions.append( GalaxySessionToHistoryAssociation( galaxy_session, self ) )
else:
self.galaxy_sessions.append( association )
- def add_dataset( self, dataset, parent_id=None, genome_build=None, set_hid = True ):
+ def add_dataset( self, dataset, parent_id=None, genome_build=None, set_hid=True, quota=True ):
if isinstance( dataset, Dataset ):
dataset = HistoryDatasetAssociation(dataset=dataset)
object_session( self ).add( dataset )
@@ -367,6 +390,8 @@
else:
if set_hid:
dataset.hid = self._next_hid()
+ if quota and self.user:
+ self.user.total_disk_usage += dataset.quota_amount( self.user )
dataset.history = self
if genome_build not in [None, '?']:
self.genome_build = genome_build
@@ -378,6 +403,9 @@
name = self.name
if not target_user:
target_user = self.user
+ quota = True
+ if target_user == self.user:
+ quota = False
new_history = History( name=name, user=target_user )
db_session = object_session( self )
db_session.add( new_history )
@@ -393,8 +421,8 @@
hdas = self.active_datasets
for hda in hdas:
# Copy HDA.
- new_hda = hda.copy( copy_children=True, target_history=new_history )
- new_history.add_dataset( new_hda, set_hid = False )
+ new_hda = hda.copy( copy_children=True )
+ new_history.add_dataset( new_hda, set_hid = False, quota=quota )
db_session.add( new_hda )
db_session.flush()
# Copy annotation.
@@ -725,6 +753,10 @@
def set_size( self ):
"""Returns the size of the data on disk"""
return self.dataset.set_size()
+ def get_total_size( self ):
+ return self.dataset.get_total_size()
+ def set_total_size( self ):
+ return self.dataset.set_total_size()
def has_data( self ):
"""Detects whether there is any data"""
return self.dataset.has_data()
@@ -906,7 +938,7 @@
self.history = history
self.copied_from_history_dataset_association = copied_from_history_dataset_association
self.copied_from_library_dataset_dataset_association = copied_from_library_dataset_dataset_association
- def copy( self, copy_children = False, parent_id = None, target_history = None ):
+ def copy( self, copy_children = False, parent_id = None ):
hda = HistoryDatasetAssociation( hid=self.hid,
name=self.name,
info=self.info,
@@ -918,8 +950,7 @@
visible=self.visible,
deleted=self.deleted,
parent_id=parent_id,
- copied_from_history_dataset_association=self,
- history = target_history )
+ copied_from_history_dataset_association=self )
object_session( self ).add( hda )
object_session( self ).flush()
hda.set_size()
@@ -1001,6 +1032,26 @@
return hda_name
def get_access_roles( self, trans ):
return self.dataset.get_access_roles( trans )
+ def quota_amount( self, user ):
+ """
+ If the user has multiple instances of this dataset, it will not affect their disk usage statistic.
+ """
+ rval = 0
+ # Anon users are handled just by their single history size.
+ if not user:
+ return rval
+ # Gets an HDA and its children's disk usage, if the user does not already have an association of the same dataset
+ if not self.dataset.library_associations and not self.purged and not self.dataset.purged:
+ for hda in self.dataset.history_associations:
+ if hda.id == self.id:
+ continue
+ if not hda.purged and hda.history and hda.history.user and hda.history.user == user:
+ break
+ else:
+ rval += self.get_total_size()
+ for child in self.children:
+ rval += child.get_disk_usage( user )
+ return rval
class HistoryDatasetAssociationDisplayAtAuthorization( object ):
def __init__( self, hda=None, user=None, site=None ):
@@ -1350,6 +1401,22 @@
else:
return template.get_widgets( trans.user )
return []
+ def templates_dict( self ):
+ """
+ Returns a dict of template info
+ """
+ template_data = {}
+ for temp_info in self.info_association:
+ template = temp_info.template
+ content = temp_info.info.content
+ tmp_dict = {}
+ for field in template.fields:
+ tmp_dict[field['label']] = content[field['name']]
+ template_data[template.name] = tmp_dict
+ return template_data
+ def templates_json( self ):
+ return simplejson.dumps( self.templates_dict() )
+
def get_display_name( self ):
"""
LibraryDatasetDatasetAssociation name can be either a string or a unicode object.
@@ -1451,6 +1518,13 @@
self.histories.append( GalaxySessionToHistoryAssociation( self, history ) )
else:
self.histories.append( association )
+ def get_disk_usage( self ):
+ if self.disk_usage is None:
+ return 0
+ return self.disk_usage
+ def set_disk_usage( self, bytes ):
+ self.disk_usage = bytes
+ total_disk_usage = property( get_disk_usage, set_disk_usage )
class GalaxySessionToHistoryAssociation( object ):
def __init__( self, galaxy_session, history ):
diff -r 626808a6bd5044b8f0c5688c93549093d62bae56 -r f0a15f976f1ffee8c144a05a413a290b34ad90e6 lib/galaxy/security/__init__.py
--- a/lib/galaxy/security/__init__.py
+++ b/lib/galaxy/security/__init__.py
@@ -90,6 +90,8 @@
raise "Unimplemented Method"
def get_permissions( self, library_dataset ):
raise "Unimplemented Method"
+ def get_all_roles( self, trans, cntrller ):
+ raise "Unimplemented Method"
def get_legitimate_roles( self, trans, item, cntrller ):
raise "Unimplemented Method"
def derive_roles_from_access( self, trans, item_id, cntrller, library=False, **kwd ):
@@ -120,6 +122,50 @@
def sa_session( self ):
"""Returns a SQLAlchemy session"""
return self.model.context
+ def sort_by_attr( self, seq, attr ):
+ """
+ Sort the sequence of objects by object's attribute
+ Arguments:
+ seq - the list or any sequence (including immutable one) of objects to sort.
+ attr - the name of attribute to sort by
+ """
+ # Use the "Schwartzian transform"
+ # Create the auxiliary list of tuples where every i-th tuple has form
+ # (seq[i].attr, i, seq[i]) and sort it. The second item of tuple is needed not
+ # only to provide stable sorting, but mainly to eliminate comparison of objects
+ # (which can be expensive or prohibited) in case of equal attribute values.
+ intermed = map( None, map( getattr, seq, ( attr, ) * len( seq ) ), xrange( len( seq ) ), seq )
+ intermed.sort()
+ return map( operator.getitem, intermed, ( -1, ) * len( intermed ) )
+ def get_all_roles( self, trans, cntrller ):
+ admin_controller = cntrller in [ 'library_admin' ]
+ roles = set()
+ if not trans.user:
+ return trans.sa_session.query( trans.app.model.Role ) \
+ .filter( and_( self.model.Role.table.c.deleted==False,
+ self.model.Role.table.c.type != self.model.Role.types.PRIVATE,
+ self.model.Role.table.c.type != self.model.Role.types.SHARING ) ) \
+ .order_by( self.model.Role.table.c.name )
+ if admin_controller:
+ # The library is public and the user is an admin, so all roles are legitimate
+ for role in trans.sa_session.query( trans.app.model.Role ) \
+ .filter( self.model.Role.table.c.deleted==False ) \
+ .order_by( self.model.Role.table.c.name ):
+ roles.add( role )
+ else:
+ # Add the current user's private role
+ roles.add( self.get_private_user_role( trans.user ) )
+ # Add the current user's sharing roles
+ for role in self.get_sharing_roles( trans.user ):
+ roles.add( role )
+ # Add all remaining non-private, non-sharing roles
+ for role in trans.sa_session.query( trans.app.model.Role ) \
+ .filter( and_( self.model.Role.table.c.deleted==False,
+ self.model.Role.table.c.type != self.model.Role.types.PRIVATE,
+ self.model.Role.table.c.type != self.model.Role.types.SHARING ) ) \
+ .order_by( self.model.Role.table.c.name ):
+ roles.add( role )
+ return self.sort_by_attr( [ role for role in roles ], 'name' )
def get_legitimate_roles( self, trans, item, cntrller ):
"""
Return a sorted list of legitimate roles that can be associated with a permission on
@@ -140,51 +186,10 @@
for the current user's private role, will be excluded.
"""
admin_controller = cntrller in [ 'library_admin' ]
- def sort_by_attr( seq, attr ):
- """
- Sort the sequence of objects by object's attribute
- Arguments:
- seq - the list or any sequence (including immutable one) of objects to sort.
- attr - the name of attribute to sort by
- """
- # Use the "Schwartzian transform"
- # Create the auxiliary list of tuples where every i-th tuple has form
- # (seq[i].attr, i, seq[i]) and sort it. The second item of tuple is needed not
- # only to provide stable sorting, but mainly to eliminate comparison of objects
- # (which can be expensive or prohibited) in case of equal attribute values.
- intermed = map( None, map( getattr, seq, ( attr, ) * len( seq ) ), xrange( len( seq ) ), seq )
- intermed.sort()
- return map( operator.getitem, intermed, ( -1, ) * len( intermed ) )
roles = set()
if ( isinstance( item, self.model.Library ) and self.library_is_public( item ) ) or \
( isinstance( item, self.model.Dataset ) and self.dataset_is_public( item ) ):
- if not trans.user:
- return trans.sa_session.query( trans.app.model.Role ) \
- .filter( and_( self.model.Role.table.c.deleted==False,
- self.model.Role.table.c.type != self.model.Role.types.PRIVATE,
- self.model.Role.table.c.type != self.model.Role.types.SHARING ) ) \
- .order_by( self.model.Role.table.c.name )
- if admin_controller:
- # The library is public and the user is an admin, so all roles are legitimate
- for role in trans.sa_session.query( trans.app.model.Role ) \
- .filter( self.model.Role.table.c.deleted==False ) \
- .order_by( self.model.Role.table.c.name ):
- roles.add( role )
- return sort_by_attr( [ role for role in roles ], 'name' )
- else:
- # Add the current user's private role
- roles.add( self.get_private_user_role( trans.user ) )
- # Add the current user's sharing roles
- for role in self.get_sharing_roles( trans.user ):
- roles.add( role )
- # Add all remaining non-private, non-sharing roles
- for role in trans.sa_session.query( trans.app.model.Role ) \
- .filter( and_( self.model.Role.table.c.deleted==False,
- self.model.Role.table.c.type != self.model.Role.types.PRIVATE,
- self.model.Role.table.c.type != self.model.Role.types.SHARING ) ) \
- .order_by( self.model.Role.table.c.name ):
- roles.add( role )
- return sort_by_attr( [ role for role in roles ], 'name' )
+ return self.get_all_roles( trans, cntrller )
# If item has roles associated with the access permission, we need to start with them.
access_roles = item.get_access_roles( trans )
for role in access_roles:
@@ -205,7 +210,7 @@
for ura in user.roles:
if admin_controller or self.ok_to_display( trans.user, ura.role ):
roles.add( ura.role )
- return sort_by_attr( [ role for role in roles ], 'name' )
+ return self.sort_by_attr( [ role for role in roles ], 'name' )
def ok_to_display( self, user, role ):
"""
Method for checking if:
diff -r 626808a6bd5044b8f0c5688c93549093d62bae56 -r f0a15f976f1ffee8c144a05a413a290b34ad90e6 lib/galaxy/tools/__init__.py
--- a/lib/galaxy/tools/__init__.py
+++ b/lib/galaxy/tools/__init__.py
@@ -1,7 +1,7 @@
"""
Classes encapsulating galaxy tools and tool configuration.
"""
-import pkg_resources;
+import pkg_resources
pkg_resources.require( "simplejson" )
@@ -2041,21 +2041,16 @@
self.input = input
self.value = value
self._other_values = other_values
+ self.counter = 0
def __str__( self ):
- return self.value.name
- def templates( self ):
- """ Returns JSON dict of templates => data """
- if not self.value:
- return None
- template_data = {}
- for temp_info in self.value.info_association:
- template = temp_info.template
- content = temp_info.info.content
- tmp_dict = {}
- for field in template.fields:
- tmp_dict[field['label']] = content[field['name']]
- template_data[template.name] = tmp_dict
- return simplejson.dumps( template_data )
+ return self.value
+ def __iter__( self ):
+ return self
+ def next( self ):
+ if self.counter >= len(self.value):
+ raise StopIteration
+ self.counter += 1
+ return self.value[self.counter-1]
def __getattr__( self, key ):
return getattr( self.value, key )
diff -r 626808a6bd5044b8f0c5688c93549093d62bae56 -r f0a15f976f1ffee8c144a05a413a290b34ad90e6 lib/galaxy/tools/parameters/basic.py
--- a/lib/galaxy/tools/parameters/basic.py
+++ b/lib/galaxy/tools/parameters/basic.py
@@ -725,7 +725,7 @@
dynamic options, we need to check whether the other parameters which
determine what options are valid have been set. For the old style
dynamic options which do not specify dependencies, this is always true
- (must valiate at runtime).
+ (must validate at runtime).
"""
# Option list is statically defined, never need late validation
if not self.is_dynamic:
@@ -804,7 +804,7 @@
Select list that sets the last used genome build for the current history
as "selected".
- >>> # Create a mock transcation with 'hg17' as the current build
+ >>> # Create a mock transaction with 'hg17' as the current build
>>> from galaxy.util.bunch import Bunch
>>> trans = Bunch( history=Bunch( genome_build='hg17' ), db_builds=util.dbnames )
@@ -1539,20 +1539,26 @@
def from_html( self, value, trans, other_values={} ):
if not value:
return None
- elif isinstance( value, trans.app.model.LibraryDatasetDatasetAssociation ):
+ elif isinstance( value, list ):
return value
else:
- return trans.sa_session.query( trans.app.model.LibraryDatasetDatasetAssociation ).get( trans.security.decode_id( value ) )
+ decoded_lst = []
+ for encoded_id in value.split("||"):
+ decoded_lst.append( trans.sa_session.query( trans.app.model.LibraryDatasetDatasetAssociation ).get( trans.security.decode_id( encoded_id ) ) )
+ return decoded_lst
def to_string( self, value, app ):
if not value:
- return None
- return value.id
+ return value
+ return [ldda.id for ldda in value]
def to_python( self, value, app ):
if not value:
return value
- return app.model.context.query( app.model.LibraryDatasetDatasetAssociation ).get( value )
+ lddas = []
+ for ldda_id in value:
+ lddas.append( app.model.context.query( app.model.LibraryDatasetDatasetAssociation ).get( ldda_id ) )
+ return lddas
# class RawToolParameter( ToolParameter ):
# """
diff -r 626808a6bd5044b8f0c5688c93549093d62bae56 -r f0a15f976f1ffee8c144a05a413a290b34ad90e6 lib/galaxy/web/api/workflows.py
--- a/lib/galaxy/web/api/workflows.py
+++ b/lib/galaxy/web/api/workflows.py
@@ -29,6 +29,14 @@
encoded_id = trans.security.encode_id(wf.id)
item['url'] = url_for('workflow', id=encoded_id)
rval.append(item)
+ for wf_sa in trans.sa_session.query( trans.app.model.StoredWorkflowUserShareAssociation ).filter_by(
+ user=trans.user ).join( 'stored_workflow' ).filter(
+ trans.app.model.StoredWorkflow.deleted == False ).order_by(
+ desc( trans.app.model.StoredWorkflow.update_time ) ).all():
+ item = wf_sa.stored_workflow.get_api_value(value_mapper={'id':trans.security.encode_id})
+ encoded_id = trans.security.encode_id(wf_sa.stored_workflow.id)
+ item['url'] = url_for('workflow', id=encoded_id)
+ rval.append(item)
return rval
@web.expose_api
def show(self, trans, id, **kwd):
diff -r 626808a6bd5044b8f0c5688c93549093d62bae56 -r f0a15f976f1ffee8c144a05a413a290b34ad90e6 lib/galaxy/web/controllers/dataset.py
--- a/lib/galaxy/web/controllers/dataset.py
+++ b/lib/galaxy/web/controllers/dataset.py
@@ -9,6 +9,7 @@
from galaxy.util import inflector
from galaxy.model.item_attrs import *
from galaxy.model import LibraryDatasetDatasetAssociation, HistoryDatasetAssociation
+from galaxy.web.framework.helpers import to_unicode
import pkg_resources;
pkg_resources.require( "Paste" )
@@ -405,6 +406,188 @@
return trans.stream_template_mako( "/dataset/large_file.mako",
truncated_data = open( data.file_name ).read(max_peek_size),
data = data )
+
+ @web.expose
+ def edit(self, trans, dataset_id=None, filename=None, hid=None, **kwd):
+ """Allows user to modify parameters of an HDA."""
+ message = None
+ status = 'done'
+ refresh_frames = []
+ error = False
+ def __ok_to_edit_metadata( dataset_id ):
+ #prevent modifying metadata when dataset is queued or running as input/output
+ #This code could be more efficient, i.e. by using mappers, but to prevent slowing down loading a History panel, we'll leave the code here for now
+ for job_to_dataset_association in trans.sa_session.query( self.app.model.JobToInputDatasetAssociation ) \
+ .filter_by( dataset_id=dataset_id ) \
+ .all() \
+ + trans.sa_session.query( self.app.model.JobToOutputDatasetAssociation ) \
+ .filter_by( dataset_id=dataset_id ) \
+ .all():
+ if job_to_dataset_association.job.state not in [ job_to_dataset_association.job.states.OK, job_to_dataset_association.job.states.ERROR, job_to_dataset_association.job.states.DELETED ]:
+ return False
+ return True
+ if hid is not None:
+ history = trans.get_history()
+ # TODO: hid handling
+ data = history.datasets[ int( hid ) - 1 ]
+ id = None
+ elif dataset_id is not None:
+ id = trans.app.security.decode_id( dataset_id )
+ data = trans.sa_session.query( self.app.model.HistoryDatasetAssociation ).get( id )
+ else:
+ trans.log_event( "dataset_id and hid are both None, cannot load a dataset to edit" )
+ return trans.show_error_message( "You must provide a history dataset id to edit" )
+ if data is None:
+ trans.log_event( "Problem retrieving dataset (encoded: %s, decoded: %s) with history id %s." % ( str( dataset_id ), str( id ), str( hid ) ) )
+ return trans.show_error_message( "History dataset id is invalid" )
+ if dataset_id is not None and data.history.user is not None and data.history.user != trans.user:
+ trans.log_event( "User attempted to edit an HDA they do not own (encoded: %s, decoded: %s)" % ( dataset_id, id ) )
+ # Do not reveal the dataset's existence
+ return trans.show_error_message( "History dataset id is invalid" )
+ current_user_roles = trans.get_current_user_roles()
+ if data.history.user and not data.dataset.has_manage_permissions_roles( trans ):
+ # Permission setting related to DATASET_MANAGE_PERMISSIONS was broken for a period of time,
+ # so it is possible that some Datasets have no roles associated with the DATASET_MANAGE_PERMISSIONS
+ # permission. In this case, we'll reset this permission to the hda user's private role.
+ manage_permissions_action = trans.app.security_agent.get_action( trans.app.security_agent.permitted_actions.DATASET_MANAGE_PERMISSIONS.action )
+ permissions = { manage_permissions_action : [ trans.app.security_agent.get_private_user_role( data.history.user ) ] }
+ trans.app.security_agent.set_dataset_permission( data.dataset, permissions )
+ if trans.app.security_agent.can_access_dataset( current_user_roles, data.dataset ):
+ if data.state == trans.model.Dataset.states.UPLOAD:
+ return trans.show_error_message( "Please wait until this dataset finishes uploading before attempting to edit its metadata." )
+ params = util.Params( kwd, sanitize=False )
+ if params.change:
+ # The user clicked the Save button on the 'Change data type' form
+ if data.datatype.allow_datatype_change and trans.app.datatypes_registry.get_datatype_by_extension( params.datatype ).allow_datatype_change:
+ #prevent modifying datatype when dataset is queued or running as input/output
+ if not __ok_to_edit_metadata( data.id ):
+ message = "This dataset is currently being used as input or output. You cannot change datatype until the jobs have completed or you have canceled them."
+ error = True
+ else:
+ trans.app.datatypes_registry.change_datatype( data, params.datatype, set_meta = not trans.app.config.set_metadata_externally )
+ trans.sa_session.flush()
+ if trans.app.config.set_metadata_externally:
+ trans.app.datatypes_registry.set_external_metadata_tool.tool_action.execute( trans.app.datatypes_registry.set_external_metadata_tool, trans, incoming = { 'input1':data }, overwrite = False ) #overwrite is False as per existing behavior
+ message = "Changed the type of dataset '%s' to %s" % ( to_unicode( data.name ), params.datatype )
+ refresh_frames=['history']
+ else:
+ message = "You are unable to change datatypes in this manner. Changing %s to %s is not allowed." % ( data.extension, params.datatype )
+ error = True
+ elif params.save:
+ # The user clicked the Save button on the 'Edit Attributes' form
+ data.name = params.name
+ data.info = params.info
+ message = ''
+ if __ok_to_edit_metadata( data.id ):
+ # The following for loop will save all metadata_spec items
+ for name, spec in data.datatype.metadata_spec.items():
+ if spec.get("readonly"):
+ continue
+ optional = params.get("is_"+name, None)
+ other = params.get("or_"+name, None)
+ if optional and optional == 'true':
+ # optional element... == 'true' actually means it is NOT checked (and therefore omitted)
+ setattr(data.metadata, name, None)
+ else:
+ if other:
+ setattr( data.metadata, name, other )
+ else:
+ setattr( data.metadata, name, spec.unwrap( params.get (name, None) ) )
+ data.datatype.after_setting_metadata( data )
+ # Sanitize annotation before adding it.
+ if params.annotation:
+ annotation = sanitize_html( params.annotation, 'utf-8', 'text/html' )
+ self.add_item_annotation( trans.sa_session, trans.get_user(), data, annotation )
+ # If setting metadata previously failed and all required elements have now been set, clear the failed state.
+ if data._state == trans.model.Dataset.states.FAILED_METADATA and not data.missing_meta():
+ data._state = None
+ trans.sa_session.flush()
+ message = "Attributes updated%s" % message
+ refresh_frames=['history']
+ else:
+ trans.sa_session.flush()
+ message = "Attributes updated, but metadata could not be changed because this dataset is currently being used as input or output. You must cancel or wait for these jobs to complete before changing metadata."
+ status = "warning"
+ refresh_frames=['history']
+ elif params.detect:
+ # The user clicked the Auto-detect button on the 'Edit Attributes' form
+ #prevent modifying metadata when dataset is queued or running as input/output
+ if not __ok_to_edit_metadata( data.id ):
+ message = "This dataset is currently being used as input or output. You cannot change metadata until the jobs have completed or you have canceled them."
+ error = True
+ else:
+ for name, spec in data.metadata.spec.items():
+ # We need to be careful about the attributes we are resetting
+ if name not in [ 'name', 'info', 'dbkey', 'base_name' ]:
+ if spec.get( 'default' ):
+ setattr( data.metadata, name, spec.unwrap( spec.get( 'default' ) ) )
+ if trans.app.config.set_metadata_externally:
+ message = 'Attributes have been queued to be updated'
+ trans.app.datatypes_registry.set_external_metadata_tool.tool_action.execute( trans.app.datatypes_registry.set_external_metadata_tool, trans, incoming = { 'input1':data } )
+ else:
+ message = 'Attributes updated'
+ data.set_meta()
+ data.datatype.after_setting_metadata( data )
+ trans.sa_session.flush()
+ refresh_frames=['history']
+ elif params.convert_data:
+ target_type = kwd.get("target_type", None)
+ if target_type:
+ message = data.datatype.convert_dataset(trans, data, target_type)
+ refresh_frames=['history']
+ elif params.update_roles_button:
+ if not trans.user:
+ return trans.show_error_message( "You must be logged in if you want to change permissions." )
+ if trans.app.security_agent.can_manage_dataset( current_user_roles, data.dataset ):
+ access_action = trans.app.security_agent.get_action( trans.app.security_agent.permitted_actions.DATASET_ACCESS.action )
+ manage_permissions_action = trans.app.security_agent.get_action( trans.app.security_agent.permitted_actions.DATASET_MANAGE_PERMISSIONS.action )
+ # The user associated the DATASET_ACCESS permission on the dataset with 1 or more roles. We
+ # need to ensure that they did not associate roles that would cause accessibility problems.
+ permissions, in_roles, error, message = \
+ trans.app.security_agent.derive_roles_from_access( trans, data.dataset.id, 'root', **kwd )
+ if error:
+ # Keep the original role associations for the DATASET_ACCESS permission on the dataset.
+ permissions[ access_action ] = data.dataset.get_access_roles( trans )
+ status = 'error'
+ else:
+ error = trans.app.security_agent.set_all_dataset_permissions( data.dataset, permissions )
+ if error:
+ message += error
+ status = 'error'
+ else:
+ message = 'Your changes completed successfully.'
+ trans.sa_session.refresh( data.dataset )
+ else:
+ message = "You are not authorized to change this dataset's permissions"
+ error = True
+ else:
+ if "dbkey" in data.datatype.metadata_spec and not data.metadata.dbkey:
+ # Copy dbkey into metadata, for backwards compatability
+ # This looks like it does nothing, but getting the dbkey
+ # returns the metadata dbkey unless it is None, in which
+ # case it resorts to the old dbkey. Setting the dbkey
+ # sets it properly in the metadata
+ #### This is likely no longer required, since the dbkey exists entirely within metadata (the old_dbkey field is gone): REMOVE ME?
+ data.metadata.dbkey = data.dbkey
+ # let's not overwrite the imported datatypes module with the variable datatypes?
+ # the built-in 'id' is overwritten in lots of places as well
+ ldatatypes = [ dtype_name for dtype_name, dtype_value in trans.app.datatypes_registry.datatypes_by_extension.iteritems() if dtype_value.allow_datatype_change ]
+ ldatatypes.sort()
+ all_roles = trans.app.security_agent.get_legitimate_roles( trans, data.dataset, 'root' )
+ if error:
+ status = 'error'
+ return trans.fill_template( "/dataset/edit_attributes.mako",
+ data=data,
+ data_annotation=self.get_item_annotation_str( trans.sa_session, trans.user, data ),
+ datatypes=ldatatypes,
+ current_user_roles=current_user_roles,
+ all_roles=all_roles,
+ message=message,
+ status=status,
+ dataset_id=dataset_id,
+ refresh_frames=refresh_frames )
+ else:
+ return trans.show_error_message( "You do not have permission to edit this dataset's ( id: %s ) information." % str( dataset_id ) )
@web.expose
@web.require_login( "see all available datasets" )
@@ -676,111 +859,190 @@
return trans.fill_template_mako( "dataset/display_application/display.mako", msg = msg, display_app = display_app, display_link = display_link, refresh = refresh )
return trans.show_error_message( 'You do not have permission to view this dataset at an external display application.' )
- def _undelete( self, trans, id ):
+ def _delete( self, trans, dataset_id ):
+ message = None
+ status = 'done'
+ id = None
try:
- id = int( id )
- except ValueError, e:
- return False
- history = trans.get_history()
- data = trans.sa_session.query( self.app.model.HistoryDatasetAssociation ).get( id )
- if data and data.undeletable:
+ id = trans.app.security.decode_id( dataset_id )
+ history = trans.get_history()
+ hda = trans.sa_session.query( self.app.model.HistoryDatasetAssociation ).get( id )
+ assert hda, 'Invalid HDA: %s' % id
# Walk up parent datasets to find the containing history
- topmost_parent = data
+ topmost_parent = hda
+ while topmost_parent.parent:
+ topmost_parent = topmost_parent.parent
+ assert topmost_parent in trans.history.datasets, "Data does not belong to current history"
+ # Mark deleted and cleanup
+ hda.mark_deleted()
+ hda.clear_associated_files()
+ trans.log_event( "Dataset id %s marked as deleted" % str(id) )
+ if hda.parent_id is None and len( hda.creating_job_associations ) > 0:
+ # Mark associated job for deletion
+ job = hda.creating_job_associations[0].job
+ if job.state in [ self.app.model.Job.states.QUEUED, self.app.model.Job.states.RUNNING, self.app.model.Job.states.NEW ]:
+ # Are *all* of the job's other output datasets deleted?
+ if job.check_if_output_datasets_deleted():
+ job.mark_deleted( self.app.config.get_bool( 'enable_job_running', True ),
+ self.app.config.get_bool( 'track_jobs_in_database', False ) )
+ self.app.job_manager.job_stop_queue.put( job.id )
+ trans.sa_session.flush()
+ except Exception, e:
+ msg = 'HDA deletion failed (encoded: %s, decoded: %s)' % ( dataset_id, id )
+ log.exception( msg )
+ trans.log_event( msg )
+ message = 'Dataset deletion failed'
+ status = 'error'
+ return ( message, status )
+
+ def _undelete( self, trans, dataset_id ):
+ message = None
+ status = 'done'
+ id = None
+ try:
+ id = trans.app.security.decode_id( dataset_id )
+ history = trans.get_history()
+ hda = trans.sa_session.query( self.app.model.HistoryDatasetAssociation ).get( id )
+ assert hda and hda.undeletable, 'Invalid HDA: %s' % id
+ # Walk up parent datasets to find the containing history
+ topmost_parent = hda
while topmost_parent.parent:
topmost_parent = topmost_parent.parent
assert topmost_parent in history.datasets, "Data does not belong to current history"
# Mark undeleted
- data.mark_undeleted()
+ hda.mark_undeleted()
trans.sa_session.flush()
trans.log_event( "Dataset id %s has been undeleted" % str(id) )
- return True
- return False
+ except Exception, e:
+ msg = 'HDA undeletion failed (encoded: %s, decoded: %s)' % ( dataset_id, id )
+ log.exception( msg )
+ trans.log_event( msg )
+ message = 'Dataset undeletion failed'
+ status = 'error'
+ return ( message, status )
- def _unhide( self, trans, id ):
+ def _unhide( self, trans, dataset_id ):
try:
- id = int( id )
- except ValueError, e:
+ id = trans.app.security.decode_id( dataset_id )
+ except:
return False
history = trans.get_history()
- data = trans.sa_session.query( self.app.model.HistoryDatasetAssociation ).get( id )
- if data:
+ hda = trans.sa_session.query( self.app.model.HistoryDatasetAssociation ).get( id )
+ if hda:
# Walk up parent datasets to find the containing history
- topmost_parent = data
+ topmost_parent = hda
while topmost_parent.parent:
topmost_parent = topmost_parent.parent
assert topmost_parent in history.datasets, "Data does not belong to current history"
# Mark undeleted
- data.mark_unhidden()
+ hda.mark_unhidden()
trans.sa_session.flush()
trans.log_event( "Dataset id %s has been unhidden" % str(id) )
return True
return False
- def _purge( self, trans, id ):
+ def _purge( self, trans, dataset_id ):
+ message = None
+ status = 'done'
try:
- id = int( id )
- except ValueError, e:
- return False
- hda = trans.sa_session.query( self.app.model.HistoryDatasetAssociation ).get( id )
- # Invalid HDA or not deleted
- if not hda or not hda.history or not hda.deleted:
- return False
- # If the user is anonymous, make sure the HDA is owned by the current session.
- if not hda.history.user and trans.galaxy_session.id not in [ s.id for s in hda.history.galaxy_sessions ]:
- return False
- # If the user is known, make sure the HDA is owned by the current user.
- if hda.history.user and hda.history.user != trans.user:
- return False
- # HDA is purgeable
- hda.purged = True
- trans.sa_session.add( hda )
- trans.log_event( "HDA id %s has been purged" % hda.id )
- # Don't delete anything if there are active HDAs or any LDDAs, even if
- # the LDDAs are deleted. Let the cleanup scripts get it in the latter
- # case.
- if hda.dataset.user_can_purge:
- try:
- hda.dataset.full_delete()
- trans.log_event( "Dataset id %s has been purged upon the the purge of HDA id %s" % ( hda.dataset.id, hda.id ) )
- trans.sa_session.add( hda.dataset )
- except:
- log.exception( 'Unable to purge dataset (%s) on purge of hda (%s):' % ( hda.dataset.id, hda.id ) )
- trans.sa_session.flush()
- return True
+ id = trans.app.security.decode_id( dataset_id )
+ history = trans.get_history()
+ user = trans.get_user()
+ hda = trans.sa_session.query( self.app.model.HistoryDatasetAssociation ).get( id )
+ # Invalid HDA
+ assert hda, 'Invalid history dataset ID'
+ # Walk up parent datasets to find the containing history
+ topmost_parent = hda
+ while topmost_parent.parent:
+ topmost_parent = topmost_parent.parent
+ assert topmost_parent in history.datasets, "Data does not belong to current history"
+ # If the user is anonymous, make sure the HDA is owned by the current session.
+ if not user:
+ assert trans.galaxy_session.id in [ s.id for s in hda.history.galaxy_sessions ], 'Invalid history dataset ID'
+ # If the user is known, make sure the HDA is owned by the current user.
+ else:
+ assert topmost_parent.history.user == trans.user, 'Invalid history dataset ID'
+ # HDA is not deleted
+ assert hda.deleted, 'History dataset is not marked as deleted'
+ # HDA is purgeable
+ # Decrease disk usage first
+ if user:
+ user.total_disk_usage -= hda.quota_amount( user )
+ # Mark purged
+ hda.purged = True
+ trans.sa_session.add( hda )
+ trans.log_event( "HDA id %s has been purged" % hda.id )
+ # Don't delete anything if there are active HDAs or any LDDAs, even if
+ # the LDDAs are deleted. Let the cleanup scripts get it in the latter
+ # case.
+ if hda.dataset.user_can_purge:
+ try:
+ hda.dataset.full_delete()
+ trans.log_event( "Dataset id %s has been purged upon the the purge of HDA id %s" % ( hda.dataset.id, hda.id ) )
+ trans.sa_session.add( hda.dataset )
+ except:
+ log.exception( 'Unable to purge dataset (%s) on purge of HDA (%s):' % ( hda.dataset.id, hda.id ) )
+ trans.sa_session.flush()
+ except Exception, e:
+ msg = 'HDA purge failed (encoded: %s, decoded: %s)' % ( dataset_id, id )
+ log.exception( msg )
+ trans.log_event( msg )
+ message = 'Dataset removal from disk failed'
+ status = 'error'
+ return ( message, status )
@web.expose
- def undelete( self, trans, id ):
- if self._undelete( trans, id ):
- return trans.response.send_redirect( web.url_for( controller='root', action='history', show_deleted = True ) )
- raise Exception( "Error undeleting" )
+ def delete( self, trans, dataset_id, filename, show_deleted_on_refresh = False ):
+ message, status = self._delete( trans, dataset_id )
+ return trans.response.send_redirect( web.url_for( controller='root', action='history', show_deleted=show_deleted_on_refresh, message=message, status=status ) )
@web.expose
- def unhide( self, trans, id ):
- if self._unhide( trans, id ):
+ def delete_async( self, trans, dataset_id, filename ):
+ message, status = self._delete( trans, dataset_id )
+ if status == 'done':
+ return "OK"
+ else:
+ raise Exception( message )
+
+ @web.expose
+ def undelete( self, trans, dataset_id, filename ):
+ message, status = self._undelete( trans, dataset_id )
+ return trans.response.send_redirect( web.url_for( controller='root', action='history', show_deleted = True, message=message, status=status ) )
+
+ @web.expose
+ def undelete_async( self, trans, dataset_id, filename ):
+ message, status =self._undelete( trans, dataset_id )
+ if status == 'done':
+ return "OK"
+ else:
+ raise Exception( message )
+
+ @web.expose
+ def unhide( self, trans, dataset_id, filename ):
+ if self._unhide( trans, dataset_id ):
return trans.response.send_redirect( web.url_for( controller='root', action='history', show_hidden = True ) )
raise Exception( "Error unhiding" )
@web.expose
- def undelete_async( self, trans, id ):
- if self._undelete( trans, id ):
- return "OK"
- raise Exception( "Error undeleting" )
-
- @web.expose
- def purge( self, trans, id ):
- if not trans.app.config.allow_user_dataset_purge:
- raise Exception( "Removal of datasets by users is not allowed in this Galaxy instance. Please contact your Galaxy administrator." )
- if self._purge( trans, id ):
- return trans.response.send_redirect( web.url_for( controller='root', action='history', show_deleted = True ) )
- raise Exception( "Error removing disk file" )
+ def purge( self, trans, dataset_id, filename, show_deleted_on_refresh = False ):
+ if trans.app.config.allow_user_dataset_purge:
+ message, status = self._purge( trans, dataset_id )
+ else:
+ message = "Removal of datasets by users is not allowed in this Galaxy instance. Please contact your Galaxy administrator."
+ status = 'error'
+ return trans.response.send_redirect( web.url_for( controller='root', action='history', show_deleted=show_deleted_on_refresh, message=message, status=status ) )
@web.expose
- def purge_async( self, trans, id ):
- if not trans.app.config.allow_user_dataset_purge:
- raise Exception( "Removal of datasets by users is not allowed in this Galaxy instance. Please contact your Galaxy administrator." )
- if self._purge( trans, id ):
+ def purge_async( self, trans, dataset_id, filename ):
+ if trans.app.config.allow_user_dataset_purge:
+ message, status = self._purge( trans, dataset_id )
+ else:
+ message = "Removal of datasets by users is not allowed in this Galaxy instance. Please contact your Galaxy administrator."
+ status = 'error'
+ if status == 'done':
return "OK"
- raise Exception( "Error removing disk file" )
+ else:
+ raise Exception( message )
@web.expose
def show_params( self, trans, dataset_id=None, from_noframe=None, **kwd ):
diff -r 626808a6bd5044b8f0c5688c93549093d62bae56 -r f0a15f976f1ffee8c144a05a413a290b34ad90e6 lib/galaxy/web/controllers/library_common.py
--- a/lib/galaxy/web/controllers/library_common.py
+++ b/lib/galaxy/web/controllers/library_common.py
@@ -246,12 +246,14 @@
message=util.sanitize_text( message ),
status='done' ) )
roles = trans.app.security_agent.get_legitimate_roles( trans, library, cntrller )
+ all_roles = trans.app.security_agent.get_all_roles( trans, cntrller )
return trans.fill_template( '/library/common/library_permissions.mako',
cntrller=cntrller,
use_panels=use_panels,
library=library,
current_user_roles=current_user_roles,
roles=roles,
+ all_roles=all_roles,
show_deleted=show_deleted,
message=message,
status=status )
diff -r 626808a6bd5044b8f0c5688c93549093d62bae56 -r f0a15f976f1ffee8c144a05a413a290b34ad90e6 lib/galaxy/web/controllers/root.py
--- a/lib/galaxy/web/controllers/root.py
+++ b/lib/galaxy/web/controllers/root.py
@@ -8,7 +8,6 @@
from galaxy.util.sanitize_html import sanitize_html
from galaxy.model.orm import *
from galaxy.model.item_attrs import UsesAnnotations
-from galaxy.web.framework.helpers import to_unicode
log = logging.getLogger( __name__ )
@@ -99,11 +98,14 @@
return trans.fill_template_mako( "/my_data.mako" )
@web.expose
- def history( self, trans, as_xml=False, show_deleted=False, show_hidden=False, hda_id=None ):
+ def history( self, trans, as_xml=False, show_deleted=False, show_hidden=False, hda_id=None, **kwd ):
"""
Display the current history, creating a new history if necessary.
NOTE: No longer accepts "id" or "template" options for security reasons.
"""
+ params = util.Params( kwd )
+ message = params.get( 'message', None )
+ status = params.get( 'status', 'done' )
if trans.app.config.require_login and not trans.user:
return trans.fill_template( '/no_access.mako', message = 'Please log in to access Galaxy histories.' )
history = trans.get_history( create=True )
@@ -123,7 +125,9 @@
datasets = datasets,
hda_id = hda_id,
show_deleted = show_deleted,
- show_hidden=show_hidden )
+ show_hidden=show_hidden,
+ message=message,
+ status=status )
@web.expose
def dataset_state ( self, trans, id=None, stamp=None ):
@@ -160,9 +164,13 @@
# Create new HTML for any that have changed
rval = {}
if ids is not None and states is not None:
- ids = map( int, ids.split( "," ) )
+ ids = ids.split( "," )
states = states.split( "," )
- for id, state in zip( ids, states ):
+ for encoded_id, state in zip( ids, states ):
+ try:
+ id = int( trans.app.security.decode_id( encoded_id ) )
+ except:
+ id = int( encoded_id )
data = trans.sa_session.query( self.app.model.HistoryDatasetAssociation ).get( id )
if data.state != state:
job_hda = data
@@ -175,7 +183,7 @@
force_history_refresh = tool.force_history_refresh
if not job_hda.visible:
force_history_refresh = True
- rval[id] = {
+ rval[encoded_id] = {
"state": data.state,
"html": unicode( trans.fill_template( "root/history_item.mako", data=data, hid=data.hid ), 'utf-8' ),
"force_history_refresh": force_history_refresh
@@ -288,237 +296,6 @@
else:
yield "No data with id=%d" % id
- @web.expose
- def edit(self, trans, id=None, hid=None, **kwd):
- """Allows user to modify parameters of an HDA."""
- message = ''
- error = False
- def __ok_to_edit_metadata( dataset_id ):
- #prevent modifying metadata when dataset is queued or running as input/output
- #This code could be more efficient, i.e. by using mappers, but to prevent slowing down loading a History panel, we'll leave the code here for now
- for job_to_dataset_association in trans.sa_session.query( self.app.model.JobToInputDatasetAssociation ) \
- .filter_by( dataset_id=dataset_id ) \
- .all() \
- + trans.sa_session.query( self.app.model.JobToOutputDatasetAssociation ) \
- .filter_by( dataset_id=dataset_id ) \
- .all():
- if job_to_dataset_association.job.state not in [ job_to_dataset_association.job.states.OK, job_to_dataset_association.job.states.ERROR, job_to_dataset_association.job.states.DELETED ]:
- return False
- return True
- if hid is not None:
- history = trans.get_history()
- # TODO: hid handling
- data = history.datasets[ int( hid ) - 1 ]
- elif id is not None:
- data = trans.sa_session.query( self.app.model.HistoryDatasetAssociation ).get( id )
- else:
- trans.log_event( "Problem loading dataset id %s with history id %s." % ( str( id ), str( hid ) ) )
- return trans.show_error_message( "Problem loading dataset." )
- if data is None:
- trans.log_event( "Problem retrieving dataset id %s with history id." % ( str( id ), str( hid ) ) )
- return trans.show_error_message( "Problem retrieving dataset." )
- if id is not None and data.history.user is not None and data.history.user != trans.user:
- return trans.show_error_message( "This instance of a dataset (%s) in a history does not belong to you." % ( data.id ) )
- current_user_roles = trans.get_current_user_roles()
- if data.history.user and not data.dataset.has_manage_permissions_roles( trans ):
- # Permission setting related to DATASET_MANAGE_PERMISSIONS was broken for a period of time,
- # so it is possible that some Datasets have no roles associated with the DATASET_MANAGE_PERMISSIONS
- # permission. In this case, we'll reset this permission to the hda user's private role.
- manage_permissions_action = trans.app.security_agent.get_action( trans.app.security_agent.permitted_actions.DATASET_MANAGE_PERMISSIONS.action )
- permissions = { manage_permissions_action : [ trans.app.security_agent.get_private_user_role( data.history.user ) ] }
- trans.app.security_agent.set_dataset_permission( data.dataset, permissions )
- if trans.app.security_agent.can_access_dataset( current_user_roles, data.dataset ):
- if data.state == trans.model.Dataset.states.UPLOAD:
- return trans.show_error_message( "Please wait until this dataset finishes uploading before attempting to edit its metadata." )
- params = util.Params( kwd, sanitize=False )
- if params.change:
- # The user clicked the Save button on the 'Change data type' form
- if data.datatype.allow_datatype_change and trans.app.datatypes_registry.get_datatype_by_extension( params.datatype ).allow_datatype_change:
- #prevent modifying datatype when dataset is queued or running as input/output
- if not __ok_to_edit_metadata( data.id ):
- return trans.show_error_message( "This dataset is currently being used as input or output. You cannot change datatype until the jobs have completed or you have canceled them." )
- trans.app.datatypes_registry.change_datatype( data, params.datatype, set_meta = not trans.app.config.set_metadata_externally )
- trans.sa_session.flush()
- if trans.app.config.set_metadata_externally:
- trans.app.datatypes_registry.set_external_metadata_tool.tool_action.execute( trans.app.datatypes_registry.set_external_metadata_tool, trans, incoming = { 'input1':data }, overwrite = False ) #overwrite is False as per existing behavior
- return trans.show_ok_message( "Changed the type of dataset '%s' to %s" % ( to_unicode( data.name ), params.datatype ), refresh_frames=['history'] )
- else:
- return trans.show_error_message( "You are unable to change datatypes in this manner. Changing %s to %s is not allowed." % ( data.extension, params.datatype ) )
- elif params.save:
- # The user clicked the Save button on the 'Edit Attributes' form
- data.name = params.name
- data.info = params.info
- message = ''
- if __ok_to_edit_metadata( data.id ):
- # The following for loop will save all metadata_spec items
- for name, spec in data.datatype.metadata_spec.items():
- if spec.get("readonly"):
- continue
- optional = params.get("is_"+name, None)
- other = params.get("or_"+name, None)
- if optional and optional == 'true':
- # optional element... == 'true' actually means it is NOT checked (and therefore omitted)
- setattr(data.metadata, name, None)
- else:
- if other:
- setattr( data.metadata, name, other )
- else:
- setattr( data.metadata, name, spec.unwrap( params.get (name, None) ) )
- data.datatype.after_setting_metadata( data )
- # Sanitize annotation before adding it.
- if params.annotation:
- annotation = sanitize_html( params.annotation, 'utf-8', 'text/html' )
- self.add_item_annotation( trans.sa_session, trans.get_user(), data, annotation )
- # If setting metadata previously failed and all required elements have now been set, clear the failed state.
- if data._state == trans.model.Dataset.states.FAILED_METADATA and not data.missing_meta():
- data._state = None
- trans.sa_session.flush()
- return trans.show_ok_message( "Attributes updated%s" % message, refresh_frames=['history'] )
- else:
- trans.sa_session.flush()
- return trans.show_warn_message( "Attributes updated, but metadata could not be changed because this dataset is currently being used as input or output. You must cancel or wait for these jobs to complete before changing metadata.", refresh_frames=['history'] )
- elif params.detect:
- # The user clicked the Auto-detect button on the 'Edit Attributes' form
- #prevent modifying metadata when dataset is queued or running as input/output
- if not __ok_to_edit_metadata( data.id ):
- return trans.show_error_message( "This dataset is currently being used as input or output. You cannot change metadata until the jobs have completed or you have canceled them." )
- for name, spec in data.metadata.spec.items():
- # We need to be careful about the attributes we are resetting
- if name not in [ 'name', 'info', 'dbkey', 'base_name' ]:
- if spec.get( 'default' ):
- setattr( data.metadata, name, spec.unwrap( spec.get( 'default' ) ) )
- if trans.app.config.set_metadata_externally:
- message = 'Attributes have been queued to be updated'
- trans.app.datatypes_registry.set_external_metadata_tool.tool_action.execute( trans.app.datatypes_registry.set_external_metadata_tool, trans, incoming = { 'input1':data } )
- else:
- message = 'Attributes updated'
- data.set_meta()
- data.datatype.after_setting_metadata( data )
- trans.sa_session.flush()
- return trans.show_ok_message( message, refresh_frames=['history'] )
- elif params.convert_data:
- target_type = kwd.get("target_type", None)
- if target_type:
- message = data.datatype.convert_dataset(trans, data, target_type)
- return trans.show_ok_message( message, refresh_frames=['history'] )
- elif params.update_roles_button:
- if not trans.user:
- return trans.show_error_message( "You must be logged in if you want to change permissions." )
- if trans.app.security_agent.can_manage_dataset( current_user_roles, data.dataset ):
- access_action = trans.app.security_agent.get_action( trans.app.security_agent.permitted_actions.DATASET_ACCESS.action )
- manage_permissions_action = trans.app.security_agent.get_action( trans.app.security_agent.permitted_actions.DATASET_MANAGE_PERMISSIONS.action )
- # The user associated the DATASET_ACCESS permission on the dataset with 1 or more roles. We
- # need to ensure that they did not associate roles that would cause accessibility problems.
- permissions, in_roles, error, message = \
- trans.app.security_agent.derive_roles_from_access( trans, data.dataset.id, 'root', **kwd )
- if error:
- # Keep the original role associations for the DATASET_ACCESS permission on the dataset.
- permissions[ access_action ] = data.dataset.get_access_roles( trans )
- status = 'error'
- else:
- error = trans.app.security_agent.set_all_dataset_permissions( data.dataset, permissions )
- if error:
- message += error
- status = 'error'
- else:
- message = 'Your changes completed successfully.'
- trans.sa_session.refresh( data.dataset )
- else:
- return trans.show_error_message( "You are not authorized to change this dataset's permissions" )
- if "dbkey" in data.datatype.metadata_spec and not data.metadata.dbkey:
- # Copy dbkey into metadata, for backwards compatability
- # This looks like it does nothing, but getting the dbkey
- # returns the metadata dbkey unless it is None, in which
- # case it resorts to the old dbkey. Setting the dbkey
- # sets it properly in the metadata
- #### This is likely no longer required, since the dbkey exists entirely within metadata (the old_dbkey field is gone): REMOVE ME?
- data.metadata.dbkey = data.dbkey
- # let's not overwrite the imported datatypes module with the variable datatypes?
- # the built-in 'id' is overwritten in lots of places as well
- ldatatypes = [ dtype_name for dtype_name, dtype_value in trans.app.datatypes_registry.datatypes_by_extension.iteritems() if dtype_value.allow_datatype_change ]
- ldatatypes.sort()
- all_roles = trans.app.security_agent.get_legitimate_roles( trans, data.dataset, 'root' )
- if error:
- status = 'error'
- else:
- status = 'done'
- return trans.fill_template( "/dataset/edit_attributes.mako",
- data=data,
- data_annotation=self.get_item_annotation_str( trans.sa_session, trans.user, data ),
- datatypes=ldatatypes,
- current_user_roles=current_user_roles,
- all_roles=all_roles,
- message=message,
- status=status )
- else:
- return trans.show_error_message( "You do not have permission to edit this dataset's ( id: %s ) information." % str( id ) )
-
- def __delete_dataset( self, trans, id ):
- data = trans.sa_session.query( self.app.model.HistoryDatasetAssociation ).get( id )
- if data:
- # Walk up parent datasets to find the containing history
- topmost_parent = data
- while topmost_parent.parent:
- topmost_parent = topmost_parent.parent
- assert topmost_parent in trans.history.datasets, "Data does not belong to current history"
- # Mark deleted and cleanup
- data.mark_deleted()
- data.clear_associated_files()
- trans.log_event( "Dataset id %s marked as deleted" % str(id) )
- if data.parent_id is None and len( data.creating_job_associations ) > 0:
- # Mark associated job for deletion
- job = data.creating_job_associations[0].job
- if job.state in [ self.app.model.Job.states.QUEUED, self.app.model.Job.states.RUNNING, self.app.model.Job.states.NEW ]:
- # Are *all* of the job's other output datasets deleted?
- if job.check_if_output_datasets_deleted():
- job.mark_deleted( self.app.config.get_bool( 'enable_job_running', True ),
- self.app.config.get_bool( 'track_jobs_in_database', False ) )
- self.app.job_manager.job_stop_queue.put( job.id )
- trans.sa_session.flush()
-
- @web.expose
- def delete( self, trans, id = None, show_deleted_on_refresh = False, **kwd):
- if id:
- if isinstance( id, list ):
- dataset_ids = id
- else:
- dataset_ids = [ id ]
- history = trans.get_history()
- for id in dataset_ids:
- try:
- id = int( id )
- except:
- continue
- self.__delete_dataset( trans, id )
- return self.history( trans, show_deleted = show_deleted_on_refresh )
-
- @web.expose
- def delete_async( self, trans, id = None, **kwd):
- if id:
- try:
- id = int( id )
- except:
- return "Dataset id '%s' is invalid" %str( id )
- self.__delete_dataset( trans, id )
- return "OK"
-
- @web.expose
- def purge( self, trans, id = None, show_deleted_on_refresh = False, **kwd ):
- if not trans.app.config.allow_user_dataset_purge:
- return trans.show_error_message( "Removal of datasets by users is not allowed in this Galaxy instance. Please contact your Galaxy administrator." )
- hda = trans.sa_session.query( self.app.model.HistoryDatasetAssociation ).get( int( id ) )
- if bool( hda.dataset.active_history_associations or hda.dataset.library_associations ):
- return trans.show_error_message( "Unable to purge: LDDA(s) or active HDA(s) exist" )
- elif hda.dataset.purged:
- return trans.show_error_message( "Unable to purge: dataset is already purged" )
- os.unlink( hda.dataset.file_name )
- if os.path.exists( hda.extra_files_path ):
- shutil.rmtree( hda.extra_files_path )
- hda.dataset.purged = True
- trans.sa_session.add( hda.dataset )
- trans.sa_session.flush()
- return self.history( trans, show_deleted = show_deleted_on_refresh )
-
## ---- History management -----------------------------------------------
@web.expose
diff -r 626808a6bd5044b8f0c5688c93549093d62bae56 -r f0a15f976f1ffee8c144a05a413a290b34ad90e6 lib/galaxy/web/form_builder.py
--- a/lib/galaxy/web/form_builder.py
+++ b/lib/galaxy/web/form_builder.py
@@ -656,17 +656,17 @@
class LibraryField( BaseField ):
def __init__( self, name, value=None, trans=None ):
self.name = name
- self.ldda = value
+ self.lddas = value
self.trans = trans
def get_html( self, prefix="", disabled=False ):
- if not self.ldda:
- ldda = ""
- text = "Choose a library dataset"
+ if not self.lddas:
+ ldda_ids = ""
+ text = "Select library dataset(s)"
else:
- ldda = self.trans.security.encode_id(self.ldda.id)
- text = self.ldda.name
+ ldda_ids = "||".join( [ self.trans.security.encode_id( ldda.id ) for ldda in self.lddas ] )
+ text = "<br />".join( [ "%s. %s" % (i+1, ldda.name) for i, ldda in enumerate(self.lddas)] )
return '<a href="javascript:void(0);" class="add-librarydataset">%s</a> \
- <input type="hidden" name="%s%s" value="%s">' % ( text, prefix, self.name, escape( str(ldda), quote=True ) )
+ <input type="hidden" name="%s%s" value="%s">' % ( text, prefix, self.name, escape( str(ldda_ids), quote=True ) )
def get_display_text(self):
if self.ldda:
diff -r 626808a6bd5044b8f0c5688c93549093d62bae56 -r f0a15f976f1ffee8c144a05a413a290b34ad90e6 lib/galaxy/web/framework/__init__.py
--- a/lib/galaxy/web/framework/__init__.py
+++ b/lib/galaxy/web/framework/__init__.py
@@ -471,6 +471,7 @@
- associate new session with user
- if old session had a history and it was not associated with a user, associate it with the new session,
otherwise associate the current session's history with the user
+ - add the disk usage of the current session to the user's total disk usage
"""
# Set the previous session
prev_galaxy_session = self.galaxy_session
@@ -494,6 +495,10 @@
# If the previous galaxy session had a history, associate it with the new
# session, but only if it didn't belong to a different user.
history = prev_galaxy_session.current_history
+ if prev_galaxy_session.user is None:
+ # Increase the user's disk usage by the amount of the previous history's datasets if they didn't already own it.
+ for hda in history.datasets:
+ user.total_disk_usage += hda.quota_amount( user )
elif self.galaxy_session.current_history:
history = self.galaxy_session.current_history
if not history and \
diff -r 626808a6bd5044b8f0c5688c93549093d62bae56 -r f0a15f976f1ffee8c144a05a413a290b34ad90e6 lib/galaxy/webapps/reports/controllers/system.py
--- a/lib/galaxy/webapps/reports/controllers/system.py
+++ b/lib/galaxy/webapps/reports/controllers/system.py
@@ -112,7 +112,7 @@
except:
pass
message = str( dataset_count ) + " datasets were deleted more than " + str( deleted_datasets_days ) + \
- " days ago, but have not yet been purged, disk space: " + str( disk_space ) + "."
+ " days ago, but have not yet been purged, disk space: " + nice_size( disk_space ) + "."
else:
message = "Enter the number of days."
return str( deleted_datasets_days ), message
diff -r 626808a6bd5044b8f0c5688c93549093d62bae56 -r f0a15f976f1ffee8c144a05a413a290b34ad90e6 scripts/set_user_disk_usage.py
--- /dev/null
+++ b/scripts/set_user_disk_usage.py
@@ -0,0 +1,76 @@
+#!/usr/bin/env python
+
+import os, sys
+from ConfigParser import ConfigParser
+from optparse import OptionParser
+
+parser = OptionParser()
+parser.add_option( '-c', '--config', dest='config', help='Path to Galaxy config file (universe_wsgi.ini)', default='universe_wsgi.ini' )
+parser.add_option( '-u', '--username', dest='username', help='Username of user to update', default='all' )
+parser.add_option( '-e', '--email', dest='email', help='Email address of user to update', default='all' )
+parser.add_option( '--dry-run', dest='dryrun', help='Dry run (show changes but do not save to database)', action='store_true', default=False )
+( options, args ) = parser.parse_args()
+
+def init():
+
+ options.config = os.path.abspath( options.config )
+ if options.username == 'all':
+ options.username = None
+ if options.email == 'all':
+ options.email = None
+
+ os.chdir( os.path.dirname( options.config ) )
+ sys.path.append( 'lib' )
+
+ from galaxy import eggs
+ import pkg_resources
+
+ config = ConfigParser( dict( file_path = 'database/files',
+ database_connection = 'sqlite:///database/universe.sqlite?isolation_level=IMMEDIATE' ) )
+ config.read( os.path.basename( options.config ) )
+
+ from galaxy.model import mapping
+
+ return mapping.init( config.get( 'app:main', 'file_path' ), config.get( 'app:main', 'database_connection' ), create_tables = False )
+
+def quotacheck( sa_session, users ):
+ sa_session.refresh( user )
+ current = user.get_disk_usage()
+ print user.username, '<' + user.email + '> current usage:', str( current ) + ',',
+ new = user.calculate_disk_usage()
+ sa_session.refresh( user )
+ # usage changed while calculating, do it again
+ if user.get_disk_usage() != current:
+ print 'usage changed while calculating, trying again...'
+ return quotacheck( sa_session, user )
+ # yes, still a small race condition between here and the flush
+ if new == current:
+ print 'no change'
+ else:
+ print 'new:', new
+ if not options.dryrun:
+ user.set_disk_usage( new )
+ sa_session.add( user )
+ sa_session.flush()
+
+if __name__ == '__main__':
+ print 'Loading Galaxy model...'
+ model = init()
+ sa_session = model.context.current
+
+ if not options.username and not options.email:
+ user_count = sa_session.query( model.User ).count()
+ print 'Processing %i users...' % user_count
+ for i, user in enumerate( sa_session.query( model.User ).enable_eagerloads( False ).yield_per( 1000 ) ):
+ print '%3i%%' % int( float(i) / user_count * 100 ),
+ quotacheck( sa_session, user )
+ print '100% complete'
+ sys.exit( 0 )
+ elif options.username:
+ user = sa_session.query( model.User ).enable_eagerloads( False ).filter_by( username=options.username ).first()
+ elif options.email:
+ user = sa_session.query( model.User ).enable_eagerloads( False ).filter_by( email=options.email ).first()
+ if not user:
+ print 'User not found'
+ sys.exit( 1 )
+ quotacheck( sa_session, user )
diff -r 626808a6bd5044b8f0c5688c93549093d62bae56 -r f0a15f976f1ffee8c144a05a413a290b34ad90e6 static/june_2007_style/blue/panel_layout.css
--- a/static/june_2007_style/blue/panel_layout.css
+++ b/static/june_2007_style/blue/panel_layout.css
@@ -1,4 +1,3 @@
-body,html{overflow:hidden;margin:0;padding:0;width:100%;height:100%;}
body{font:75% "Lucida Grande",verdana,arial,helvetica,sans-serif;background:#eee;}
.unselectable{user-select:none;-moz-user-select:none;-webkit-user-select:none;}
#background{position:absolute;background:#eee;z-index:-1;top:0;left:0;margin:0;padding:0;width:100%;height:100%;}
diff -r 626808a6bd5044b8f0c5688c93549093d62bae56 -r f0a15f976f1ffee8c144a05a413a290b34ad90e6 static/june_2007_style/blue/trackster.css
--- a/static/june_2007_style/blue/trackster.css
+++ b/static/june_2007_style/blue/trackster.css
@@ -55,4 +55,4 @@
.icon.more-down{background:url('../images/fugue/arrow-transition-270-bw.png') no-repeat 0px 0px;}
.icon.more-across{background:url('../images/fugue/arrow-transition-bw.png') no-repeat 0px 0px;}
.intro{padding:1em;}
-.intro>.action-button{background-color:#CCC;padding:1em;}
\ No newline at end of file
+.intro > .action-button{background-color:#CCC;padding:1em;}
diff -r 626808a6bd5044b8f0c5688c93549093d62bae56 -r f0a15f976f1ffee8c144a05a413a290b34ad90e6 static/june_2007_style/panel_layout.css.tmpl
--- a/static/june_2007_style/panel_layout.css.tmpl
+++ b/static/june_2007_style/panel_layout.css.tmpl
@@ -1,11 +1,3 @@
-body, html {
- overflow: hidden;
- margin: 0;
- padding: 0;
- width: 100%;
- height: 100%;
-}
-
body {
font: 75% "Lucida Grande",verdana,arial,helvetica,sans-serif;
background: ${layout_bg};
diff -r 626808a6bd5044b8f0c5688c93549093d62bae56 -r f0a15f976f1ffee8c144a05a413a290b34ad90e6 templates/base_panels.mako
--- a/templates/base_panels.mako
+++ b/templates/base_panels.mako
@@ -19,6 +19,13 @@
<%def name="stylesheets()">
${h.css('base','panel_layout','jquery.rating')}
<style type="text/css">
+ body, html {
+ overflow: hidden;
+ margin: 0;
+ padding: 0;
+ width: 100%;
+ height: 100%;
+ }
#center {
%if not self.has_left_panel:
left: 0;
diff -r 626808a6bd5044b8f0c5688c93549093d62bae56 -r f0a15f976f1ffee8c144a05a413a290b34ad90e6 templates/dataset/edit_attributes.mako
--- a/templates/dataset/edit_attributes.mako
+++ b/templates/dataset/edit_attributes.mako
@@ -1,5 +1,6 @@
<%inherit file="/base.mako"/><%namespace file="/message.mako" import="render_msg" />
+<%namespace file="/message.mako" name="message_ns" import="javascripts" /><%def name="title()">${_('Edit Dataset Attributes')}</%def>
@@ -9,6 +10,7 @@
<%def name="javascripts()">
${parent.javascripts()}
+ ${message_ns.javascripts()}
${h.js( "galaxy.base", "jquery.autocomplete", "autocomplete_tagging" )}
</%def>
@@ -31,8 +33,7 @@
<div class="toolForm"><div class="toolFormTitle">${_('Edit Attributes')}</div><div class="toolFormBody">
- <form name="edit_attributes" action="${h.url_for( controller='root', action='edit' )}" method="post">
- <input type="hidden" name="id" value="${data.id}"/>
+ <form name="edit_attributes" action="${h.url_for( controller='dataset', action='edit', dataset_id=dataset_id )}" method="post"><div class="form-row"><label>
Name:
@@ -80,8 +81,7 @@
<input type="submit" name="save" value="${_('Save')}"/></div></form>
- <form name="auto_detect" action="${h.url_for( controller='root', action='edit' )}" method="post">
- <input type="hidden" name="id" value="${data.id}"/>
+ <form name="auto_detect" action="${h.url_for( controller='dataset', action='edit', dataset_id=dataset_id )}" method="post"><div class="form-row"><div style="float: left; width: 250px; margin-right: 10px;"><input type="submit" name="detect" value="${_('Auto-detect')}"/>
@@ -104,8 +104,7 @@
<div class="toolForm"><div class="toolFormTitle">${_('Convert to new format')}</div><div class="toolFormBody">
- <form name="convert_data" action="${h.url_for( controller='root', action='edit' )}" method="post">
- <input type="hidden" name="id" value="${data.id}"/>
+ <form name="convert_data" action="${h.url_for( controller='dataset', action='edit', dataset_id=dataset_id )}" method="post"><div class="form-row"><div style="float: left; width: 250px; margin-right: 10px;"><select name="target_type">
@@ -132,8 +131,7 @@
<div class="toolFormTitle">${_('Change data type')}</div><div class="toolFormBody">
%if data.datatype.allow_datatype_change:
- <form name="change_datatype" action="${h.url_for( controller='root', action='edit' )}" method="post">
- <input type="hidden" name="id" value="${data.id}"/>
+ <form name="change_datatype" action="${h.url_for( controller='dataset', action='edit', dataset_id=dataset_id )}" method="post"><div class="form-row"><label>
${_('New Type')}:
@@ -161,7 +159,7 @@
%if trans.app.security_agent.can_manage_dataset( current_user_roles, data.dataset ):
<%namespace file="/dataset/security_common.mako" import="render_permission_form" />
- ${render_permission_form( data.dataset, data.get_display_name(), h.url_for( controller='root', action='edit', id=data.id ), all_roles )}
+ ${render_permission_form( data.dataset, data.get_display_name(), h.url_for( controller='dataset', action='edit', dataset_id=dataset_id ), all_roles )}
%elif trans.user:
<div class="toolForm"><div class="toolFormTitle">View Permissions</div>
diff -r 626808a6bd5044b8f0c5688c93549093d62bae56 -r f0a15f976f1ffee8c144a05a413a290b34ad90e6 templates/dataset/security_common.mako
--- a/templates/dataset/security_common.mako
+++ b/templates/dataset/security_common.mako
@@ -1,11 +1,11 @@
-<%def name="render_select( current_actions, action_key, action, all_roles )">
+<%def name="render_select( current_actions, action_key, action, roles )"><%
import sets
in_roles = sets.Set()
for a in current_actions:
if a.action == action.action:
in_roles.add( a.role )
- out_roles = filter( lambda x: x not in in_roles, all_roles )
+ out_roles = filter( lambda x: x not in in_roles, roles )
%><p><b>${action.action}:</b> ${action.description}
@@ -37,7 +37,7 @@
</%def>
## Any permission ( e.g., 'DATASET_ACCESS' ) included in the do_not_render param will not be rendered on the page.
-<%def name="render_permission_form( obj, obj_name, form_url, all_roles, do_not_render=[] )">
+<%def name="render_permission_form( obj, obj_name, form_url, roles, do_not_render=[], all_roles=[] )"><%
if isinstance( obj, trans.app.model.User ):
current_actions = obj.default_permissions
@@ -109,7 +109,14 @@
%for k, v in permitted_actions:
%if k not in do_not_render:
<div class="form-row">
- ${render_select( current_actions, k, v, all_roles )}
+ # LIBRARY_ACCESS is a special case because we need to render all roles instead of
+ # roles derived from the roles associated with LIBRARY_ACCESS.
+ <% render_all_roles = k == 'LIBRARY_ACCESS' %>
+ %if render_all_roles:
+ ${render_select( current_actions, k, v, all_roles )}
+ %else:
+ ${render_select( current_actions, k, v, roles )}
+ %endif
</div>
%endif
%endfor
diff -r 626808a6bd5044b8f0c5688c93549093d62bae56 -r f0a15f976f1ffee8c144a05a413a290b34ad90e6 templates/library/common/library_permissions.mako
--- a/templates/library/common/library_permissions.mako
+++ b/templates/library/common/library_permissions.mako
@@ -14,5 +14,5 @@
%endif
%if ( trans.user_is_admin() and cntrller == 'library_admin' ) or trans.app.security_agent.can_manage_library_item( current_user_roles, library ):
- ${render_permission_form( library, library.name, h.url_for( controller='library_common', action='library_permissions', cntrller=cntrller, id=trans.security.encode_id( library.id ), show_deleted=show_deleted ), roles )}
+ ${render_permission_form( library, library.name, h.url_for( controller='library_common', action='library_permissions', cntrller=cntrller, id=trans.security.encode_id( library.id ), show_deleted=show_deleted ), roles, do_not_render=[], all_roles=all_roles )}
%endif
diff -r 626808a6bd5044b8f0c5688c93549093d62bae56 -r f0a15f976f1ffee8c144a05a413a290b34ad90e6 templates/root/history.mako
--- a/templates/root/history.mako
+++ b/templates/root/history.mako
@@ -1,3 +1,5 @@
+<%namespace file="/message.mako" import="render_msg" />
+
<% _=n_ %><!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
@@ -105,6 +107,11 @@
});
};
+// Update the message for async operations
+function render_message(message, status) {
+ $("div#message-container").html( "<div class=\"" + status + "message\">" + message + "</div><br/>" );
+}
+
$(function() {
var historywrapper = $("div.historyItemWrapper");
init_history_items(historywrapper);
@@ -115,8 +122,8 @@
$(this).click( function() {
$( '#historyItem-' + data_id + "> div.historyItemTitleBar" ).addClass( "spinner" );
$.ajax({
- url: "${h.url_for( action='delete_async', id='XXX' )}".replace( 'XXX', data_id ),
- error: function() { alert( "Delete failed" ); },
+ url: "${h.url_for( controller='dataset', action='delete_async', dataset_id='XXX' )}".replace( 'XXX', data_id ),
+ error: function() { render_message( "Dataset deletion failed", "error" ); },
success: function(msg) {
if (msg === "OK") {
%if show_deleted:
@@ -133,7 +140,7 @@
%endif
$(".tipsy").remove();
} else {
- alert( "Delete failed" );
+ render_message( "Dataset deletion failed", "error" );
}
}
});
@@ -175,8 +182,8 @@
$(this).click( function() {
$( '#historyItem-' + data_id + " > div.historyItemTitleBar" ).addClass( "spinner" );
$.ajax({
- url: "${h.url_for( controller='dataset', action='undelete_async', id='XXX' )}".replace( 'XXX', data_id ),
- error: function() { alert( "Undelete failed" ) },
+ url: "${h.url_for( controller='dataset', action='undelete_async', dataset_id='XXX' )}".replace( 'XXX', data_id ),
+ error: function() { render_message( "Dataset undeletion failed", "error" ); },
success: function() {
var to_update = {};
to_update[data_id] = "none";
@@ -193,8 +200,8 @@
$(this).click( function() {
$( '#historyItem-' + data_id + " > div.historyItemTitleBar" ).addClass( "spinner" );
$.ajax({
- url: "${h.url_for( controller='dataset', action='purge_async', id='XXX' )}".replace( 'XXX', data_id ),
- error: function() { alert( "Removal from disk failed" ) },
+ url: "${h.url_for( controller='dataset', action='purge_async', dataset_id='XXX' )}".replace( 'XXX', data_id ),
+ error: function() { render_message( "Dataset removal from disk failed", "error" ) },
success: function() {
var to_update = {};
to_update[data_id] = "none";
@@ -286,7 +293,7 @@
// Updater
updater(
- ${ h.to_json_string( dict([(data.id, data.state) for data in reversed( datasets ) if data.visible and data.state not in TERMINAL_STATES]) ) }
+ ${ h.to_json_string( dict([(trans.app.security.encode_id(data.id), data.state) for data in reversed( datasets ) if data.visible and data.state not in TERMINAL_STATES]) ) }
);
// Navigate to a dataset.
@@ -339,11 +346,11 @@
if ( val.force_history_refresh ){
force_history_refresh = true;
}
- delete tracked_datasets[ parseInt(id) ];
+ delete tracked_datasets[id];
// When a dataset becomes terminal, check for changes in history disk size
check_history_size = true;
} else {
- tracked_datasets[ parseInt(id) ] = val.state;
+ tracked_datasets[id] = val.state;
}
});
if ( force_history_refresh ) {
@@ -486,6 +493,12 @@
</div>
%endif
+<div id="message-container">
+ %if message:
+ ${render_msg( message, status )}
+ %endif
+</div>
+
%if not datasets:
<div class="infomessagesmall" id="emptyHistoryMessage">
@@ -495,7 +508,7 @@
## Render requested datasets, ordered from newest to oldest
%for data in reversed( datasets ):
%if data.visible or show_hidden:
- <div class="historyItemContainer" id="historyItemContainer-${data.id}">
+ <div class="historyItemContainer" id="historyItemContainer-${trans.app.security.encode_id(data.id)}">
${render_dataset( data, data.hid, show_deleted_on_refresh = show_deleted, for_editing = True )}
</div>
%endif
diff -r 626808a6bd5044b8f0c5688c93549093d62bae56 -r f0a15f976f1ffee8c144a05a413a290b34ad90e6 templates/root/history_common.mako
--- a/templates/root/history_common.mako
+++ b/templates/root/history_common.mako
@@ -39,9 +39,9 @@
can_edit = not ( data.deleted or data.purged )
%>
%if not trans.user_is_admin() and not trans.app.security_agent.can_access_dataset( current_user_roles, data.dataset ):
- <div class="historyItemWrapper historyItem historyItem-${data_state} historyItem-noPermission" id="historyItem-${data.id}">
+ <div class="historyItemWrapper historyItem historyItem-${data_state} historyItem-noPermission" id="historyItem-${dataset_id}">
%else:
- <div class="historyItemWrapper historyItem historyItem-${data_state}" id="historyItem-${data.id}">
+ <div class="historyItemWrapper historyItem historyItem-${data_state}" id="historyItem-${dataset_id}">
%endif
%if data.deleted or data.purged or data.dataset.purged:
@@ -51,9 +51,9 @@
%else:
This dataset has been deleted.
%if for_editing:
- Click <a href="${h.url_for( controller='dataset', action='undelete', id=data.id )}" class="historyItemUndelete" id="historyItemUndeleter-${data.id}" target="galaxy_history">here</a> to undelete
+ Click <a href="${h.url_for( controller='dataset', action='undelete', dataset_id=dataset_id )}" class="historyItemUndelete" id="historyItemUndeleter-${dataset_id}" target="galaxy_history">here</a> to undelete
%if trans.app.config.allow_user_dataset_purge:
- or <a href="${h.url_for( controller='dataset', action='purge', id=data.id )}" class="historyItemPurge" id="historyItemPurger-${data.id}" target="galaxy_history">here</a> to immediately remove it from disk.
+ or <a href="${h.url_for( controller='dataset', action='purge', dataset_id=dataset_id )}" class="historyItemPurge" id="historyItemPurger-${dataset_id}" target="galaxy_history">here</a> to immediately remove it from disk.
%else:
it.
%endif
@@ -64,7 +64,7 @@
%if data.visible is False:
<div class="warningmessagesmall">
- <strong>This dataset has been hidden. Click <a href="${h.url_for( controller='dataset', action='unhide', id=data.id )}" class="historyItemUnhide" id="historyItemUnhider-${data.id}" target="galaxy_history">here</a> to unhide.</strong>
+ <strong>This dataset has been hidden. Click <a href="${h.url_for( controller='dataset', action='unhide', dataset_id=dataset_id )}" class="historyItemUnhide" id="historyItemUnhider-${dataset_id}" target="galaxy_history">here</a> to unhide.</strong></div>
%endif
@@ -110,13 +110,13 @@
%elif data.purged:
<span title="Cannot edit attributes of datasets removed from disk" class="icon-button edit_disabled tooltip"></span>
%else:
- <a class="icon-button edit tooltip" title="Edit attributes" href="${h.url_for( controller='root', action='edit', id=data.id )}" target="galaxy_main"></a>
+ <a class="icon-button edit tooltip" title="Edit attributes" href="${h.url_for( controller='dataset', action='edit', dataset_id=dataset_id )}" target="galaxy_main"></a>
%endif
%endif
%endif
%if for_editing:
%if can_edit:
- <a class="icon-button delete tooltip" title="Delete" href="${h.url_for( action='delete', id=data.id, show_deleted_on_refresh=show_deleted_on_refresh )}" id="historyItemDeleter-${data.id}"></a>
+ <a class="icon-button delete tooltip" title="Delete" href="${h.url_for( controller='dataset', action='delete', dataset_id=dataset_id, show_deleted_on_refresh=show_deleted_on_refresh )}" id="historyItemDeleter-${dataset_id}"></a>
%else:
<span title="Dataset is already deleted" class="icon-button delete_disabled tooltip"></span>
%endif
@@ -184,7 +184,7 @@
<div class="warningmessagesmall" style="margin: 4px 0 4px 0">
An error occurred setting the metadata for this dataset.
%if can_edit:
- You may be able to <a href="${h.url_for( controller='root', action='edit', id=data.id )}" target="galaxy_main">set it manually or retry auto-detection</a>.
+ You may be able to <a href="${h.url_for( controller='dataset', action='edit', dataset_id=dataset_id )}" target="galaxy_main">set it manually or retry auto-detection</a>.
%endif
</div>
%endif
@@ -193,7 +193,7 @@
format: <span class="${data.ext}">${data.ext}</span>,
database:
%if data.dbkey == '?' and can_edit:
- <a href="${h.url_for( controller='root', action='edit', id=data.id )}" target="galaxy_main">${_(data.dbkey)}</a>
+ <a href="${h.url_for( controller='dataset', action='edit', dataset_id=dataset_id )}" target="galaxy_main">${_(data.dbkey)}</a>
%else:
<span class="${data.dbkey}">${_(data.dbkey)}</span>
%endif
diff -r 626808a6bd5044b8f0c5688c93549093d62bae56 -r f0a15f976f1ffee8c144a05a413a290b34ad90e6 templates/tool_form.mako
--- a/templates/tool_form.mako
+++ b/templates/tool_form.mako
@@ -2,12 +2,10 @@
<%namespace file="/base_panels.mako" import="overlay" /><%def name="stylesheets()">
- ${h.css( "autocomplete_tagging", "panel_layout", "base", "library" )}
+ ${h.css( "autocomplete_tagging", "base", "panel_layout", "library" )}
<style type="text/css">
- body, html {
- overflow: auto;
- width: auto;
- height: auto;
+ html, body {
+ background-color: #fff;
}
</style></%def>
@@ -81,12 +79,18 @@
hide_modal();
},
"Select": function() {
+ var names = [];
+ var ids = [];
+ counter = 1;
$('input[name=ldda_ids]:checked').each(function() {
var name = $.trim( $(this).siblings("div").find("a").text() );
var id = $(this).val();
- link.text(name);
- link.siblings("input[type=hidden]").val(id);
+ names.push( counter + ". " + name );
+ counter += 1;
+ ids.push(id);
});
+ link.html( names.join("<br/>") );
+ link.siblings("input[type=hidden]").val( ids.join("||") );
hide_modal();
}
}
diff -r 626808a6bd5044b8f0c5688c93549093d62bae56 -r f0a15f976f1ffee8c144a05a413a290b34ad90e6 templates/user/index.mako
--- a/templates/user/index.mako
+++ b/templates/user/index.mako
@@ -22,6 +22,7 @@
<li><a href="${h.url_for( controller='user', action='manage_user_info', cntrller=cntrller, webapp='community' )}">${_('Manage your information')}</a></li>
%endif
</ul>
+ <p>You are currently using <strong>${trans.user.get_disk_usage( nice_size=True )}</strong> of disk space in this Galaxy instance.</p>
%else:
%if not message:
<p>${n_('You are currently not logged in.')}</p>
diff -r 626808a6bd5044b8f0c5688c93549093d62bae56 -r f0a15f976f1ffee8c144a05a413a290b34ad90e6 templates/webapps/community/repository/common.mako
--- a/templates/webapps/community/repository/common.mako
+++ b/templates/webapps/community/repository/common.mako
@@ -50,7 +50,7 @@
onActivate: function(dtnode) {
var cell = $("#file_contents");
var selected_value;
- if (dtnode.data.key == 'root') {
+ if (dtnode.data.key == 'root') {
selected_value = "${repository.repo_path}/";
} else {
selected_value = dtnode.data.key;
@@ -85,4 +85,4 @@
clone_str = '%s://%s%s/repos/%s/%s' % ( protocol, username, base, repository.user.username, repository.name )
%>
hg clone <a href="${clone_str}">${clone_str}</a>
-</%def>
\ No newline at end of file
+</%def>
diff -r 626808a6bd5044b8f0c5688c93549093d62bae56 -r f0a15f976f1ffee8c144a05a413a290b34ad90e6 templates/webapps/community/repository/manage_repository.mako
--- a/templates/webapps/community/repository/manage_repository.mako
+++ b/templates/webapps/community/repository/manage_repository.mako
@@ -177,12 +177,12 @@
%for tool_dict in tool_dicts:
<tr><td>
- <div style="float: left; margin-left: 1px;" class="menubutton split popup" id="tool-${tool_dict[ 'id' ]}-popup">
+ <div style="float: left; margin-left: 1px;" class="menubutton split popup" id="tool-${tool_dict[ 'id' ].replace( ' ', '_' )}-popup"><a class="view-info" href="${h.url_for( controller='repository', action='display_tool', repository_id=trans.security.encode_id( repository.id ), tool_config=tool_dict[ 'tool_config' ] )}">
${tool_dict[ 'name' ]}
</a></div>
- <div popupmenu="tool-${tool_dict[ 'id' ]}-popup">
+ <div popupmenu="tool-${tool_dict[ 'id' ].replace( ' ', '_' )}-popup"><a class="action-button" href="${h.url_for( controller='repository', action='view_tool_metadata', repository_id=trans.security.encode_id( repository.id ), changeset_revision=repository.tip, tool_id=tool_dict[ 'id' ] )}">View all metadata for this tool</a></div></td>
diff -r 626808a6bd5044b8f0c5688c93549093d62bae56 -r f0a15f976f1ffee8c144a05a413a290b34ad90e6 templates/webapps/community/repository/view_repository.mako
--- a/templates/webapps/community/repository/view_repository.mako
+++ b/templates/webapps/community/repository/view_repository.mako
@@ -170,12 +170,12 @@
%for tool_dict in tool_dicts:
<tr><td>
- <div style="float: left; margin-left: 1px;" class="menubutton split popup" id="tool-${repository.id}-popup">
+ <div style="float: left; margin-left: 1px;" class="menubutton split popup" id="tool-${tool_dict[ 'id' ].replace( ' ', '_' )}-popup"><a class="view-info" href="${h.url_for( controller='repository', action='display_tool', repository_id=trans.security.encode_id( repository.id ), tool_config=tool_dict[ 'tool_config' ] )}">
${tool_dict[ 'name' ]}
</a></div>
- <div popupmenu="tool-${repository.id}-popup">
+ <div popupmenu="tool-${tool_dict[ 'id' ].replace( ' ', '_' )}-popup"><a class="action-button" href="${h.url_for( controller='repository', action='view_tool_metadata', repository_id=trans.security.encode_id( repository.id ), changeset_revision=repository.tip, tool_id=tool_dict[ 'id' ] )}">View all metadata for this tool</a></div></td>
diff -r 626808a6bd5044b8f0c5688c93549093d62bae56 -r f0a15f976f1ffee8c144a05a413a290b34ad90e6 templates/webapps/community/repository/view_tool_metadata.mako
--- a/templates/webapps/community/repository/view_tool_metadata.mako
+++ b/templates/webapps/community/repository/view_tool_metadata.mako
@@ -111,7 +111,7 @@
</div><div class="form-row"><label>Command:</label>
- ${tool.command}
+ <pre>${tool.command}</pre><div style="clear: both"></div></div><div class="form-row">
diff -r 626808a6bd5044b8f0c5688c93549093d62bae56 -r f0a15f976f1ffee8c144a05a413a290b34ad90e6 templates/workflow/editor.mako
--- a/templates/workflow/editor.mako
+++ b/templates/workflow/editor.mako
@@ -18,17 +18,17 @@
</%def><%def name="javascripts()">
-
+
${parent.javascripts()}
-
+
<!--[if lt IE 9]><script type='text/javascript' src="${h.url_for('/static/scripts/excanvas.js')}"></script><![endif]-->
${h.js( "jquery",
"jquery.tipsy",
- "jquery.event.drag",
- "jquery.event.drop",
+ "jquery.event.drag",
+ "jquery.event.drop",
"jquery.event.hover",
"jquery.form",
"json2",
@@ -43,7 +43,7 @@
window.lt_ie_7 = true;
</script><![endif]-->
-
+
<script type='text/javascript'>
// Globals
workflow = null;
@@ -51,10 +51,10 @@
active_ajax_call = false;
var galaxy_async = new GalaxyAsync();
galaxy_async.set_func_url(galaxy_async.set_user_pref, "${h.url_for( controller='user', action='set_user_pref_async' )}");
-
+
// jQuery onReady
$( function() {
-
+
if ( window.lt_ie_7 ) {
show_modal(
"Browser not supported",
@@ -62,7 +62,7 @@
);
return;
}
-
+
// Init tool options.
%if trans.app.toolbox_search.enabled:
make_popupmenu( $("#tools-options-button"), {
@@ -71,7 +71,7 @@
show_tool_search = False
if trans.user:
show_tool_search = trans.user.preferences.get( "workflow.show_tool_search", "True" )
-
+
if show_tool_search == "True":
initial_text = "Hide Search"
else:
@@ -85,7 +85,6 @@
pref_value = "False";
menu_option_text = "Search Tools";
menu.toggle();
-
// Reset search.
reset_tool_search(true);
} else {
@@ -94,14 +93,12 @@
menu_option_text = "Hide Search";
menu.toggle();
}
-
// Update menu option.
$("#tools-options-button-menu").find("li").eq(0).text(menu_option_text);
-
galaxy_async.set_user_pref("workflow.show_tool_search", pref_value);
}
});
-
+
// Init searching.
$("#tool-search-query").click( function (){
$(this).focus();
@@ -110,7 +107,6 @@
.keyup( function () {
// Remove italics.
$(this).css("font-style", "normal");
-
// Don't update if same value as last time
if ( this.value.length < 3 ) {
reset_tool_search(false);
@@ -127,7 +123,6 @@
// Start a new ajax-request in X ms
$("#search-spinner").show();
this.timer = setTimeout(function () {
-
$.get("${h.url_for( controller='root', action='tool_search' )}", { query: q }, function (data) {
// input.removeClass(config.loadingClass);
// Show live-search if results and search-term aren't empty
@@ -139,17 +134,15 @@
if ( data.length != 0 ) {
// Map tool ids to element ids and join them.
var s = $.map( data, function( n, i ) { return "#link-" + n; } ).join( ", " );
-
// First pass to show matching tools and their parents.
$(s).each( function() {
// Add class to denote match.
$(this).parent().addClass("search_match");
$(this).parent().show().parent().parent().show().parent().show();
});
-
// Hide labels that have no visible children.
$(".toolPanelLabel").each( function() {
- var this_label = $(this);
+ var this_label = $(this);
var next = this_label.next();
var no_visible_tools = true;
// Look through tools following label and, if none are visible, hide label.
@@ -174,11 +167,11 @@
}
this.lastValue = this.value;
});
- %endif
-
+ %endif
+
// Canvas overview management
canvas_manager = new CanvasManager( $("#canvas-viewport"), $("#overview") );
-
+
// Initialize workflow state
reset();
// Load the datatype info
@@ -225,7 +218,7 @@
});
}
});
-
+
// For autosave purposes
$(document).ajaxStart( function() {
active_ajax_call = true;
@@ -233,14 +226,14 @@
active_ajax_call = false;
});
});
-
+
$(document).ajaxError( function ( e, x ) {
// console.log( e, x );
var message = x.responseText || x.statusText || "Could not connect to server";
show_modal( "Server error", message, { "Ignore error" : hide_modal } );
return false;
});
-
+
make_popupmenu( $("#workflow-options-button"), {
"Save" : save_current_workflow,
##"Create New" : create_new_workflow_dialog,
@@ -250,7 +243,7 @@
##"Load a Workflow" : load_workflow,
"Close": close_editor
});
-
+
function edit_workflow_outputs(){
workflow.clear_active_node();
$('.right-content').hide();
@@ -297,14 +290,14 @@
scroll_to_nodes();
canvas_manager.draw_overview();
}
-
+
function edit_workflow_attributes() {
workflow.clear_active_node();
$('.right-content').hide();
$('#edit-attributes').show();
}
-
+
// On load, set the size to the pref stored in local storage if it exists
overview_size = $.jStorage.get("overview-size");
if (overview_size !== undefined) {
@@ -313,14 +306,14 @@
height: overview_size
});
}
-
+
// Show viewport on load unless pref says it's off
if ($.jStorage.get("overview-off")) {
hide_overview();
} else {
show_overview();
}
-
+
// Stores the size of the overview into local storage when it's resized
$("#overview-border").bind( "dragend", function( e, d ) {
var op = $(this).offsetParent();
@@ -329,19 +322,19 @@
op.height() - ( d.offsetY - opo.top ) );
$.jStorage.set("overview-size", new_size + "px");
});
-
+
function show_overview() {
$.jStorage.set("overview-off", false);
$("#overview-border").css("right", "0px");
$("#close-viewport").css("background-position", "0px 0px");
}
-
+
function hide_overview() {
$.jStorage.set("overview-off", true);
$("#overview-border").css("right", "20000px");
$("#close-viewport").css("background-position", "12px 0px");
}
-
+
// Lets the overview be toggled visible and invisible, adjusting the arrows accordingly
$("#close-viewport").click( function() {
if ( $("#overview-border").css("right") === "0px" ) {
@@ -350,19 +343,19 @@
show_overview();
}
});
-
+
// Unload handler
window.onbeforeunload = function() {
if ( workflow && workflow.has_changes ) {
return "There are unsaved changes to your workflow which will be lost.";
}
};
-
+
// Tool menu
$( "div.toolSectionBody" ).hide();
$( "div.toolSectionTitle > span" ).wrap( "<a href='#'></a>" );
var last_expanded = null;
- $( "div.toolSectionTitle" ).each( function() {
+ $( "div.toolSectionTitle" ).each( function() {
var body = $(this).next( "div.toolSectionBody" );
$(this).click( function() {
if ( body.is( ":hidden" ) ) {
@@ -379,7 +372,7 @@
// Rename async.
async_save_text("workflow-name", "workflow-name", "${h.url_for( action='rename_async', id=trans.security.encode_id(stored.id) )}", "new_name");
-
+
// Tag async. Simply have the workflow edit element generate a click on the tag element to activate tagging.
$('#workflow-tag').click( function() {
$('.tag-area').click();
@@ -396,7 +389,7 @@
}
workflow = new Workflow( $("#canvas-container") );
}
-
+
function scroll_to_nodes() {
var cv = $("#canvas-viewport");
var cc = $("#canvas-container");
@@ -413,7 +406,7 @@
}
cc.css( { left: left, top: top } );
}
-
+
// Add a new step to the workflow by tool id
function add_node_for_tool( id, title ) {
var node = prebuild_node( 'tool', title, id );
@@ -422,7 +415,7 @@
canvas_manager.draw_overview();
workflow.activate_node( node );
$.ajax( {
- url: "${h.url_for( action='get_new_module_info' )}",
+ url: "${h.url_for( action='get_new_module_info' )}",
data: { type: "tool", tool_id: id, "_": "true" },
global: false,
dataType: "json",
@@ -438,7 +431,7 @@
}
});
}
-
+
function add_node_for_module( type, title ) {
node = prebuild_node( type, title );
workflow.add_node( node );
@@ -446,8 +439,8 @@
canvas_manager.draw_overview();
workflow.activate_node( node );
$.ajax( {
- url: "${h.url_for( action='get_new_module_info' )}",
- data: { type: type, "_": "true" },
+ url: "${h.url_for( action='get_new_module_info' )}",
+ data: { type: type, "_": "true" },
dataType: "json",
success: function( data ) {
node.init_field_data( data );
@@ -479,11 +472,11 @@
workflow.active_form_has_changes = true;
});
}
-
+
function display_pja_list(){
return "${ActionBox.get_add_list()}";
}
-
+
function display_file_list(node){
addlist = "<select id='node_data_list' name='node_data_list'>";
for (var out_terminal in node.output_terminals){
@@ -492,7 +485,7 @@
addlist += "</select>";
return addlist;
}
-
+
function new_pja(action_type, target, node){
if (node.post_job_actions === undefined){
//New tool node, set up dict.
@@ -511,7 +504,7 @@
return false;
}
}
-
+
function show_workflow_parameters(){
var parameter_re = /\$\{.+?\}/g;
var workflow_parameters = [];
@@ -532,7 +525,7 @@
if (arg_matches){
matches = matches.concat(arg_matches);
}
- });
+ });
}
});
if (matches){
@@ -541,7 +534,7 @@
workflow_parameters.push(element);
}
});
- }
+ }
}
});
if (workflow_parameters && workflow_parameters.length !== 0){
@@ -555,7 +548,7 @@
wf_parm_box.hide();
}
}
-
+
function show_form_for_tool( text, node ) {
$('.right-content').hide();
$("#right-content").show().html( text );
@@ -632,7 +625,7 @@
});
});
}
-
+
var close_editor = function() {
<% next_url = h.url_for( controller='workflow', action='index' ) %>
workflow.check_changes_in_active_form();
@@ -655,7 +648,7 @@
window.document.location = "${next_url}";
}
};
-
+
var save_current_workflow = function ( eventObj, success_callback ) {
show_modal( "Saving workflow", "progress" );
workflow.check_changes_in_active_form();
@@ -677,7 +670,7 @@
"_": "true"
},
dataType: 'json',
- success: function( data ) {
+ success: function( data ) {
var body = $("<div></div>").text( data.message );
if ( data.errors ) {
body.addClass( "warningmark" );
@@ -704,7 +697,7 @@
}
});
};
-
+
// We bind to ajaxStop because of auto-saving, since the form submission ajax
// call needs to be completed so that the new data is saved
if (active_ajax_call) {
@@ -718,7 +711,7 @@
savefn(success_callback);
}
};
-
+
</script></%def>
@@ -732,7 +725,7 @@
<style type="text/css">
body { margin: 0; padding: 0; overflow: hidden; }
-
+
/* Wider right panel */
#center { right: 309px; }
#right-border { right: 300px; }
@@ -744,11 +737,11 @@
## top: 2.5em;
## margin-top: 7px;
## }
-
+
#left {
background: #C1C9E5 url(${h.url_for('/static/style/menu_bg.png')}) top repeat-x;
}
-
+
div.toolMenu {
margin: 5px;
margin-left: 10px;
@@ -785,8 +778,8 @@
.right-content {
margin: 5px;
}
-
- canvas { position: absolute; z-index: 10; }
+
+ canvas { position: absolute; z-index: 10; }
canvas.dragging { position: absolute; z-index: 1000; }
.input-terminal { width: 12px; height: 12px; background: url(${h.url_for('/static/style/workflow_circle_open.png')}); position: absolute; top: 50%; margin-top: -6px; left: -6px; z-index: 1500; }
.output-terminal { width: 12px; height: 12px; background: url(${h.url_for('/static/style/workflow_circle_open.png')}); position: absolute; top: 50%; margin-top: -6px; right: -6px; z-index: 1500; }
@@ -795,12 +788,12 @@
## .input-terminal-hover { background: yellow; border: solid black 1px; }
.unselectable { -moz-user-select: none; -khtml-user-select: none; user-select: none; }
img { border: 0; }
-
+
div.buttons img {
width: 16px; height: 16px;
cursor: pointer;
}
-
+
## Extra styles for the representation of a tool on the canvas (looks like
## a tiny tool form)
div.toolFormInCanvas {
@@ -809,18 +802,18 @@
## min-width: 130px;
margin: 6px;
}
-
+
div.toolForm-active {
z-index: 1001;
border: solid #8080FF 4px;
margin: 3px;
}
-
+
div.toolFormTitle {
cursor: move;
min-height: 16px;
}
-
+
div.titleRow {
font-weight: bold;
border-bottom: dotted gray 1px;
@@ -830,7 +823,7 @@
div.form-row {
position: relative;
}
-
+
div.tool-node-error div.toolFormTitle {
background: #FFCCCC;
border-color: #AA6666;
@@ -838,14 +831,14 @@
div.tool-node-error {
border-color: #AA6666;
}
-
+
#canvas-area {
position: absolute;
top: 0; left: 305px; bottom: 0; right: 0;
border: solid red 1px;
overflow: none;
}
-
+
.form-row {
}
@@ -855,14 +848,14 @@
.form-row-clear {
clear: both;
}
-
+
div.rule {
height: 0;
border: none;
border-bottom: dotted black 1px;
margin: 0 5px;
}
-
+
.callout {
position: absolute;
z-index: 10000;
@@ -871,21 +864,21 @@
.pjaForm {
margin-bottom:10px;
}
-
+
.pjaForm .toolFormBody{
padding:10px;
}
-
+
.pjaForm .toolParamHelp{
padding:5px;
}
-
+
.panel-header-button-group {
margin-right: 5px;
padding-right: 5px;
border-right: solid gray 1px;
}
-
+
</style></%def>
@@ -945,7 +938,7 @@
${n_('Tools')}
</div></div>
-
+
<div class="unified-panel-body" style="overflow: auto;"><div class="toolMenu">
## Tool search.
@@ -953,7 +946,6 @@
show_tool_search = False
if trans.user:
show_tool_search = trans.user.preferences.get( "workflow.show_tool_search", "True" )
-
if show_tool_search == "True":
display = "block"
else:
@@ -963,7 +955,6 @@
<input type="text" name="query" value="search tools" id="tool-search-query" style="width: 100%; font-style:italic; font-size: inherit" autocomplete="off"/><img src="${h.url_for('/static/images/loading_small_white_bg.gif')}" id="search-spinner" style="display: none; position: absolute; right: 0; top: 5px;"/></div>
-
<div class="toolSectionList">
%for key, val in app.toolbox.tool_panel.items():
<div class="toolSectionWrapper">
@@ -1007,10 +998,10 @@
<a href="#" onclick="add_node_for_module( 'data_input', 'Input Dataset' )">Input dataset</a></div></div>
- </div>
+ </div></div></div>
-
+
</%def><%def name="center_panel()">
@@ -1023,7 +1014,6 @@
Workflow Canvas | ${h.to_unicode( stored.name ) | h}
</div></div>
-
<div class="unified-panel-body"><div id="canvas-viewport" style="width: 100%; height: 100%; position: absolute; overflow: hidden; background: #EEEEEE; background: white url(${h.url_for('/static/images/light_gray_grid.gif')}) repeat;"><div id="canvas-container" style="position: absolute; width: 100%; height: 100%;"></div>
@@ -1079,7 +1069,7 @@
<div class="toolParamHelp">Apply tags to make it easy to search for and find items with the same tag.</div></div>
## Workflow annotation.
- ## Annotation elt.
+ ## Annotation elt.
<div id="workflow-annotation-area" class="form-row"><label>Annotation / Notes:</label><div id="workflow-annotation" class="tooltip editable-text" original-title="Click to edit annotation">
diff -r 626808a6bd5044b8f0c5688c93549093d62bae56 -r f0a15f976f1ffee8c144a05a413a290b34ad90e6 test-data/5_mult_liftover_mapped.bed
--- /dev/null
+++ b/test-data/5_mult_liftover_mapped.bed
@@ -0,0 +1,132 @@
+chr7 116197893 116197920 CCDS5763.1_cds_0_0_chr7_115444713_f 1 +
+chr7 116222929 116223015 CCDS5763.1_cds_1_0_chr7_115468539_f 1 +
+chr7 116237938 116238188 CCDS5763.1_cds_2_0_chr7_115483025_f 1 +
+chr7 116239076 116239412 CCDS5763.1_cds_3_0_chr7_115484166_f 1 +
+chr7 116240675 116240891 CCDS5763.1_cds_4_0_chr7_115485765_f 1 +
+chr7 116241233 116241396 CCDS5763.1_cds_5_0_chr7_115486323_f 1 +
+chr7 116246193 116246382 CCDS5763.1_cds_6_0_chr7_115491299_f 1 +
+chr7 116222929 116223015 CCDS5764.1_cds_0_0_chr7_115468539_f 1 +
+chr7 116237938 116238188 CCDS5764.1_cds_1_0_chr7_115483025_f 1 +
+chr7 116239076 116239412 CCDS5764.1_cds_2_0_chr7_115484166_f 1 +
+chr7 116240675 116240891 CCDS5764.1_cds_3_0_chr7_115485765_f 1 +
+chr7 116241233 116241396 CCDS5764.1_cds_4_0_chr7_115486323_f 1 +
+chr7 116246193 116246382 CCDS5764.1_cds_5_0_chr7_115491299_f 1 +
+chr7 116495075 116495225 CCDS5766.1_cds_0_0_chr7_115733787_f 1 +
+chr7 116495546 116495734 CCDS5766.1_cds_1_0_chr7_115734265_f 1 +
+chr7 116501260 116501411 CCDS5766.1_cds_2_0_chr7_115739976_f 1 +
+chr7 116495075 116495225 CCDS5765.1_cds_0_0_chr7_115733787_f 1 +
+chr7 116501260 116501449 CCDS5765.1_cds_1_0_chr7_115739976_f 1 +
+chr7 116522088 116522118 CCDS5767.1_cds_0_0_chr7_115759068_f 1 +
+chr7 116523550 116523715 CCDS5767.1_cds_1_0_chr7_115760530_f 1 +
+chr7 116562770 116563112 CCDS5767.1_cds_2_0_chr7_115792951_f 1 +
+chr7 116872904 116872943 CCDS5768.1_cds_0_0_chr7_116096617_f 1 +
+chr7 116899050 116899114 CCDS5768.1_cds_1_0_chr7_116122132_f 1 +
+chr7 116903933 116903985 CCDS5768.1_cds_2_0_chr7_116126999_f 1 +
+chr7 116909709 116909773 CCDS5768.1_cds_3_0_chr7_116132777_f 1 +
+chr7 116915110 116915317 CCDS5768.1_cds_4_0_chr7_116138182_f 1 +
+chr7 116917198 116917278 CCDS5768.1_cds_5_0_chr7_116140268_f 1 +
+chr7 116921242 116921321 CCDS5768.1_cds_6_0_chr7_116144238_f 1 +
+chr7 116923078 116923150 CCDS5768.1_cds_7_0_chr7_116146074_f 1 +
+chr7 116927214 116927277 CCDS5768.1_cds_8_0_chr7_116150065_f 1 +
+chr7 116928880 116929021 CCDS5768.1_cds_9_0_chr7_116151732_f 1 +
+chr7 116964784 116964932 CCDS5770.1_cds_0_0_chr7_116187546_f 1 +
+chr7 117108894 117108977 CCDS5770.1_cds_1_0_chr7_116333767_f 1 +
+chr7 117128694 117128854 CCDS5770.1_cds_2_0_chr7_116353566_f 1 +
+chr7 117138899 117138954 CCDS5770.1_cds_3_0_chr7_116363798_f 1 +
+chr7 117139597 117139713 CCDS5770.1_cds_4_0_chr7_116364496_f 1 +
+chr7 117140988 117141064 CCDS5770.1_cds_5_0_chr7_116365890_f 1 +
+chr7 117143259 117143328 CCDS5770.1_cds_6_0_chr7_116368129_f 1 +
+chr7 117145226 117145381 CCDS5770.1_cds_7_0_chr7_116370086_f 1 +
+chr7 117147574 117147672 CCDS5770.1_cds_8_0_chr7_116372440_f 1 +
+chr7_random 2679423 2679538 CCDS5770.1_cds_9_0_chr7_116404867_f 1 +
+chr7 117201671 117201751 CCDS5770.1_cds_10_0_chr7_116423326_f 1 +
+chr7 117203227 117203330 CCDS5770.1_cds_11_0_chr7_116424839_f 1 +
+chr7 117222109 117222260 CCDS5770.1_cds_12_0_chr7_116443792_f 1 +
+chr7 117231432 117231525 CCDS5770.1_cds_13_0_chr7_116453089_f 1 +
+chr7 117234203 117234343 CCDS5770.1_cds_14_0_chr7_116455928_f 1 +
+chr7 117235141 117235261 CCDS5770.1_cds_15_0_chr7_116456866_f 1 +
+chr7 116964784 116964932 CCDS5769.1_cds_0_0_chr7_116187546_f 1 +
+chr7 117108894 117108977 CCDS5769.1_cds_1_0_chr7_116333767_f 1 +
+chr7 117128694 117128854 CCDS5769.1_cds_2_0_chr7_116353566_f 1 +
+chr7 117138899 117138954 CCDS5769.1_cds_3_0_chr7_116363798_f 1 +
+chr7 117139597 117139713 CCDS5769.1_cds_4_0_chr7_116364496_f 1 +
+chr7 117140988 117141064 CCDS5769.1_cds_5_0_chr7_116365890_f 1 +
+chr7 117145226 117145381 CCDS5769.1_cds_6_0_chr7_116370086_f 1 +
+chr7 117147574 117147672 CCDS5769.1_cds_7_0_chr7_116372440_f 1 +
+chr7_random 2679423 2679538 CCDS5769.1_cds_8_0_chr7_116404867_f 1 +
+chr7 117201671 117201751 CCDS5769.1_cds_9_0_chr7_116423326_f 1 +
+chr7 117203227 117203330 CCDS5769.1_cds_10_0_chr7_116424839_f 1 +
+chr7 117222109 117222260 CCDS5769.1_cds_11_0_chr7_116443792_f 1 +
+chr7 117231432 117231525 CCDS5769.1_cds_12_0_chr7_116453089_f 1 +
+chr7 117234203 117234343 CCDS5769.1_cds_13_0_chr7_116455928_f 1 +
+chr7 117241962 117242058 CCDS5769.1_cds_14_0_chr7_116463767_f 1 +
+chr7 117291331 117291561 CCDS5771.1_cds_0_0_chr7_116512160_r 1 -
+chr7 117310742 117311007 CCDS5771.1_cds_1_0_chr7_116531617_r 1 -
+chr7 117328536 117328629 CCDS5771.1_cds_2_0_chr7_116549076_r 1 -
+chr7 117333743 117333970 CCDS5771.1_cds_3_0_chr7_116554572_r 1 -
+chr7 117336084 117336167 CCDS5771.1_cds_4_0_chr7_116556912_r 1 -
+chr7 117382797 117382950 CCDS5772.1_cds_0_0_chr7_116597601_r 1 -
+chr7 117386552 117386666 CCDS5772.1_cds_1_0_chr7_116601357_r 1 -
+chr7 117387812 117387919 CCDS5772.1_cds_2_0_chr7_116602617_r 1 -
+chr7 117397672 117397782 CCDS5772.1_cds_3_0_chr7_116613943_r 1 -
+chr7 117398745 117398802 CCDS5772.1_cds_4_0_chr7_116615016_r 1 -
+chr7 117399808 117399884 CCDS5772.1_cds_5_0_chr7_116616074_r 1 -
+chr7 117400724 117400849 CCDS5772.1_cds_6_0_chr7_116616991_r 1 -
+chr7 117402466 117402602 CCDS5772.1_cds_7_0_chr7_116618731_r 1 -
+chr7 117403442 117403554 CCDS5772.1_cds_8_0_chr7_116619703_r 1 -
+chr7 117438281 117438393 CCDS5772.1_cds_9_0_chr7_116654168_r 1 -
+chr7 117440357 117440480 CCDS5772.1_cds_10_0_chr7_116656242_r 1 -
+chr7 117444948 117445048 CCDS5772.1_cds_11_0_chr7_116660841_r 1 -
+chr7 117445468 117445573 CCDS5772.1_cds_12_0_chr7_116661361_r 1 -
+chr7 117499706 117499759 CCDS5773.1_cds_0_0_chr7_116714100_f 1 +
+chr7 117523820 117523931 CCDS5773.1_cds_1_0_chr7_116738258_f 1 +
+chr7 117528597 117528706 CCDS5773.1_cds_2_0_chr7_116743039_f 1 +
+chr7 117550464 117550680 CCDS5773.1_cds_3_0_chr7_116764904_f 1 +
+chr7 117553829 117553919 CCDS5773.1_cds_4_0_chr7_116768281_f 1 +
+chr7 117554806 117554970 CCDS5773.1_cds_5_0_chr7_116769253_f 1 +
+chr7 117556111 117556237 CCDS5773.1_cds_6_0_chr7_116770553_f 1 +
+chr7 117559659 117559905 CCDS5773.1_cds_7_0_chr7_116774105_f 1 +
+chr7 117561568 117561660 CCDS5773.1_cds_8_0_chr7_116776021_f 1 +
+chr7 117568199 117568382 CCDS5773.1_cds_9_0_chr7_116782646_f 1 +
+chr7 117579005 117579197 CCDS5773.1_cds_10_0_chr7_116793469_f 1 +
+chr7 117609945 117610041 CCDS5773.1_cds_11_0_chr7_116821744_f 1 +
+chr7 117612558 117612645 CCDS5773.1_cds_12_0_chr7_116824358_f 1 +
+chr7 117614292 117615016 CCDS5773.1_cds_13_0_chr7_116825939_f 1 +
+chr7 117617279 117617408 CCDS5773.1_cds_14_0_chr7_116828935_f 1 +
+chr7 117625173 117625211 CCDS5773.1_cds_15_0_chr7_116836831_f 1 +
+chr7 117625879 117626130 CCDS5773.1_cds_16_0_chr7_116837537_f 1 +
+chr7 117628986 117629066 CCDS5773.1_cds_17_0_chr7_116840679_f 1 +
+chr7 117632825 117632976 CCDS5773.1_cds_18_0_chr7_116844524_f 1 +
+chr7 117633887 117634115 CCDS5773.1_cds_19_0_chr7_116845586_f 1 +
+chr7 117636923 117637024 CCDS5773.1_cds_20_0_chr7_116848618_f 1 +
+chr7 117649505 117649753 CCDS5773.1_cds_21_0_chr7_116861527_f 1 +
+chr7 117664146 117664302 CCDS5773.1_cds_22_0_chr7_116876443_f 1 +
+chr7 117674548 117674638 CCDS5773.1_cds_23_0_chr7_116886847_f 1 +
+chr7 117686685 117686858 CCDS5773.1_cds_24_0_chr7_116898693_f 1 +
+chr7 117687456 117687562 CCDS5773.1_cds_25_0_chr7_116899464_f 1 +
+chr7 117688902 117689103 CCDS5773.1_cds_26_0_chr7_116900913_f 1 +
+chr7 117734744 117734996 CCDS5774.1_cds_0_0_chr7_116945542_r 1 -
+chr7 117741224 117741326 CCDS5774.1_cds_1_0_chr7_116952023_r 1 -
+chr7 117743450 117743638 CCDS5774.1_cds_4_0_chr7_116958552_r 1 -
+chr7 117743957 117744164 CCDS5774.1_cds_5_0_chr7_116959057_r 1 -
+chr7 117746996 117747175 CCDS5774.1_cds_6_0_chr7_116962094_r 1 -
+chr7 117753794 117753981 CCDS5774.1_cds_7_0_chr7_116968918_r 1 -
+chr7 117754149 117754302 CCDS5774.1_cds_8_0_chr7_116969274_r 1 -
+chr7 117764699 117764799 CCDS5774.1_cds_9_0_chr7_116979836_r 1 -
+chr7 117764881 117764968 CCDS5774.1_cds_10_0_chr7_116980018_r 1 -
+chr7 117775103 117775183 CCDS5774.1_cds_11_0_chr7_116990560_r 1 -
+chr7 117776423 117776519 CCDS5774.1_cds_12_0_chr7_116991880_r 1 -
+chr7 117779436 117779712 CCDS5774.1_cds_13_0_chr7_116994440_r 1 -
+chr7 117786062 117786180 CCDS5774.1_cds_14_0_chr7_117001064_r 1 -
+chr7 117796458 117796713 CCDS5774.1_cds_15_0_chr7_117011516_r 1 -
+chr7 117799369 117799520 CCDS5774.1_cds_16_0_chr7_117014446_r 1 -
+chr7 117801790 117801890 CCDS5774.1_cds_17_0_chr7_117016867_r 1 -
+chr7 117803186 117803390 CCDS5774.1_cds_18_0_chr7_117018256_r 1 -
+chr7 117810065 117811719 CCDS5774.1_cds_19_0_chr7_117025133_r 1 -
+chr7 117829639 117829865 CCDS5774.1_cds_20_0_chr7_117044770_r 1 -
+chr7 117880732 117880840 CCDS5774.1_cds_21_0_chr7_117095214_r 1 -
+chr7 117893163 117893244 CCDS5774.1_cds_22_0_chr7_117107340_r 1 -
+chr5 133682646 133682808 CCDS4149.1_cds_0_0_chr5_131424299_f 1 +
+chr5 133682906 133682948 CCDS4149.1_cds_1_0_chr5_131424559_f 1 +
+chr5 133684249 133684339 CCDS4149.1_cds_2_0_chr5_131425904_f 1 +
+chr5 133684463 133684505 CCDS4149.1_cds_3_0_chr5_131426118_f 1 +
diff -r 626808a6bd5044b8f0c5688c93549093d62bae56 -r f0a15f976f1ffee8c144a05a413a290b34ad90e6 test-data/5_mult_liftover_unmapped.bed
--- /dev/null
+++ b/test-data/5_mult_liftover_unmapped.bed
@@ -0,0 +1,4 @@
+#Deleted in new
+chr7 116953508 116953641 CCDS5774.1_cds_2_0_chr7_116953509_r 0 -
+#Deleted in new
+chr7 116955071 116955135 CCDS5774.1_cds_3_0_chr7_116955072_r 0 -
diff -r 626808a6bd5044b8f0c5688c93549093d62bae56 -r f0a15f976f1ffee8c144a05a413a290b34ad90e6 test-data/assembly_stats.tabular
--- /dev/null
+++ b/test-data/assembly_stats.tabular
@@ -0,0 +1,1 @@
+5 2589 524 517 518 5
diff -r 626808a6bd5044b8f0c5688c93549093d62bae56 -r f0a15f976f1ffee8c144a05a413a290b34ad90e6 test-data/cuffcompare_in1_liftover_mapped.bed
--- /dev/null
+++ b/test-data/cuffcompare_in1_liftover_mapped.bed
@@ -0,0 +1,86 @@
+chr1 Cufflinks transcript 3022555 3022596 1000 . . gene_id "CUFF.1"; transcript_id "CUFF.1.1"; FPKM "20.6079364877"; frac "1.000000"; conf_lo "0.000000"; conf_hi "49.751960"; cov "1.317073";
+chr1 Cufflinks exon 3022555 3022596 1000 . . gene_id "CUFF.1"; transcript_id "CUFF.1.1"; exon_number "1"; FPKM "20.6079364877"; frac "1.000000"; conf_lo "0.000000"; conf_hi "49.751960"; cov "1.317073";
+chr1 Cufflinks transcript 3117334 3117360 1000 . . gene_id "CUFF.5"; transcript_id "CUFF.5.1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr1 Cufflinks exon 3117334 3117360 1000 . . gene_id "CUFF.5"; transcript_id "CUFF.5.1"; exon_number "1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr1 Cufflinks transcript 3117031 3117199 1000 . . gene_id "CUFF.7"; transcript_id "CUFF.7.1"; FPKM "9.9991171124"; frac "1.000000"; conf_lo "0.000000"; conf_hi "19.998234"; cov "0.639053";
+chr1 Cufflinks exon 3117031 3117199 1000 . . gene_id "CUFF.7"; transcript_id "CUFF.7.1"; exon_number "1"; FPKM "9.9991171124"; frac "1.000000"; conf_lo "0.000000"; conf_hi "19.998234"; cov "0.639053";
+chr1 Cufflinks transcript 3118118 3118521 1000 . . gene_id "CUFF.9"; transcript_id "CUFF.9.1"; FPKM "17.7768957078"; frac "1.000000"; conf_lo "9.153835"; conf_hi "26.399957"; cov "1.136139";
+chr1 Cufflinks exon 3118118 3118521 1000 . . gene_id "CUFF.9"; transcript_id "CUFF.9.1"; exon_number "1"; FPKM "17.7768957078"; frac "1.000000"; conf_lo "9.153835"; conf_hi "26.399957"; cov "1.136139";
+chr1 Cufflinks transcript 3118713 3118739 1000 . . gene_id "CUFF.11"; transcript_id "CUFF.11.1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr1 Cufflinks exon 3118713 3118739 1000 . . gene_id "CUFF.11"; transcript_id "CUFF.11.1"; exon_number "1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr1 Cufflinks transcript 3121789 3121867 1000 . . gene_id "CUFF.13"; transcript_id "CUFF.13.1"; FPKM "10.6952581772"; frac "1.000000"; conf_lo "0.000000"; conf_hi "25.820637"; cov "0.683544";
+chr1 Cufflinks exon 3121789 3121867 1000 . . gene_id "CUFF.13"; transcript_id "CUFF.13.1"; exon_number "1"; FPKM "10.6952581772"; frac "1.000000"; conf_lo "0.000000"; conf_hi "25.820637"; cov "0.683544";
+chr1 Cufflinks transcript 3128503 3128581 1000 . . gene_id "CUFF.15"; transcript_id "CUFF.15.1"; FPKM "10.6952581772"; frac "1.000000"; conf_lo "0.000000"; conf_hi "25.820637"; cov "0.683544";
+chr1 Cufflinks exon 3128503 3128581 1000 . . gene_id "CUFF.15"; transcript_id "CUFF.15.1"; exon_number "1"; FPKM "10.6952581772"; frac "1.000000"; conf_lo "0.000000"; conf_hi "25.820637"; cov "0.683544";
+chr1 Cufflinks transcript 3129386 3129482 1000 . . gene_id "CUFF.17"; transcript_id "CUFF.17.1"; FPKM "8.7105710927"; frac "1.000000"; conf_lo "0.000000"; conf_hi "21.029179"; cov "0.556701";
+chr1 Cufflinks exon 3129386 3129482 1000 . . gene_id "CUFF.17"; transcript_id "CUFF.17.1"; exon_number "1"; FPKM "8.7105710927"; frac "1.000000"; conf_lo "0.000000"; conf_hi "21.029179"; cov "0.556701";
+chr1 Cufflinks transcript 3128657 3128728 1000 . . gene_id "CUFF.19"; transcript_id "CUFF.19.1"; FPKM "29.3376873610"; frac "1.000000"; conf_lo "3.097262"; conf_hi "55.578113"; cov "1.875000";
+chr1 Cufflinks exon 3128657 3128728 1000 . . gene_id "CUFF.19"; transcript_id "CUFF.19.1"; exon_number "1"; FPKM "29.3376873610"; frac "1.000000"; conf_lo "3.097262"; conf_hi "55.578113"; cov "1.875000";
+chr1 Cufflinks transcript 3162445 3162500 1000 . . gene_id "CUFF.23"; transcript_id "CUFF.23.1"; FPKM "23.4701498888"; frac "1.000000"; conf_lo "0.000000"; conf_hi "50.571145"; cov "1.500000";
+chr1 Cufflinks exon 3162445 3162500 1000 . . gene_id "CUFF.23"; transcript_id "CUFF.23.1"; exon_number "1"; FPKM "23.4701498888"; frac "1.000000"; conf_lo "0.000000"; conf_hi "50.571145"; cov "1.500000";
+chr1 Cufflinks transcript 3176998 3177034 1000 . . gene_id "CUFF.27"; transcript_id "CUFF.27.1"; FPKM "34.2537322701"; frac "1.000000"; conf_lo "0.000000"; conf_hi "73.806535"; cov "2.189189";
+chr1 Cufflinks exon 3176998 3177034 1000 . . gene_id "CUFF.27"; transcript_id "CUFF.27.1"; exon_number "1"; FPKM "34.2537322701"; frac "1.000000"; conf_lo "0.000000"; conf_hi "73.806535"; cov "2.189189";
+chr1 Cufflinks transcript 3107191 3107612 1000 . . gene_id "CUFF.29"; transcript_id "CUFF.29.1"; FPKM "107.1032192108"; frac "1.000000"; conf_lo "71.402146"; conf_hi "142.804292"; cov "6.845070";
+chr1 Cufflinks exon 3107191 3107612 1000 . . gene_id "CUFF.29"; transcript_id "CUFF.29.1"; exon_number "1"; FPKM "107.1032192108"; frac "1.000000"; conf_lo "71.402146"; conf_hi "142.804292"; cov "6.845070";
+chr1 Cufflinks transcript 3107844 3107874 1000 . . gene_id "CUFF.31"; transcript_id "CUFF.31.1"; FPKM "122.6504607091"; frac "1.000000"; conf_lo "40.883487"; conf_hi "204.417435"; cov "7.838710";
+chr1 Cufflinks exon 3107844 3107874 1000 . . gene_id "CUFF.31"; transcript_id "CUFF.31.1"; exon_number "1"; FPKM "122.6504607091"; frac "1.000000"; conf_lo "40.883487"; conf_hi "204.417435"; cov "7.838710";
+chr1 Cufflinks transcript 3108025 3108051 1000 . . gene_id "CUFF.33"; transcript_id "CUFF.33.1"; FPKM "109.5273661476"; frac "1.000000"; conf_lo "26.732460"; conf_hi "192.322273"; cov "7.000000";
+chr1 Cufflinks exon 3108025 3108051 1000 . . gene_id "CUFF.33"; transcript_id "CUFF.33.1"; exon_number "1"; FPKM "109.5273661476"; frac "1.000000"; conf_lo "26.732460"; conf_hi "192.322273"; cov "7.000000";
+chr1 Cufflinks transcript 3109111 3109241 1000 . . gene_id "CUFF.35"; transcript_id "CUFF.35.1"; FPKM "96.7471827476"; frac "1.000000"; conf_lo "61.420107"; conf_hi "132.074259"; cov "6.183206";
+chr1 Cufflinks exon 3109111 3109241 1000 . . gene_id "CUFF.35"; transcript_id "CUFF.35.1"; exon_number "1"; FPKM "96.7471827476"; frac "1.000000"; conf_lo "61.420107"; conf_hi "132.074259"; cov "6.183206";
+chr1 Cufflinks transcript 3109989 3110041 1000 . . gene_id "CUFF.39"; transcript_id "CUFF.39.1"; FPKM "23.9129829055"; frac "1.000000"; conf_lo "0.000000"; conf_hi "51.525317"; cov "1.528302";
+chr1 Cufflinks exon 3109989 3110041 1000 . . gene_id "CUFF.39"; transcript_id "CUFF.39.1"; exon_number "1"; FPKM "23.9129829055"; frac "1.000000"; conf_lo "0.000000"; conf_hi "51.525317"; cov "1.528302";
+chr1 Cufflinks transcript 3110098 3110176 1000 . . gene_id "CUFF.41"; transcript_id "CUFF.41.1"; FPKM "10.6952581772"; frac "1.000000"; conf_lo "0.000000"; conf_hi "25.820637"; cov "0.683544";
+chr1 Cufflinks exon 3110098 3110176 1000 . . gene_id "CUFF.41"; transcript_id "CUFF.41.1"; exon_number "1"; FPKM "10.6952581772"; frac "1.000000"; conf_lo "0.000000"; conf_hi "25.820637"; cov "0.683544";
+chr1 Cufflinks transcript 3110280 3110358 1000 . . gene_id "CUFF.43"; transcript_id "CUFF.43.1"; FPKM "10.5615674500"; frac "1.000000"; conf_lo "0.000000"; conf_hi "25.497879"; cov "0.675000";
+chr1 Cufflinks exon 3110280 3110358 1000 . . gene_id "CUFF.43"; transcript_id "CUFF.43.1"; exon_number "1"; FPKM "10.5615674500"; frac "1.000000"; conf_lo "0.000000"; conf_hi "25.497879"; cov "0.675000";
+chr1 Cufflinks transcript 3110488 3110589 1000 . . gene_id "CUFF.45"; transcript_id "CUFF.45.1"; FPKM "20.7089557842"; frac "1.000000"; conf_lo "2.186303"; conf_hi "39.231609"; cov "1.323529";
+chr1 Cufflinks exon 3110488 3110589 1000 . . gene_id "CUFF.45"; transcript_id "CUFF.45.1"; exon_number "1"; FPKM "20.7089557842"; frac "1.000000"; conf_lo "2.186303"; conf_hi "39.231609"; cov "1.323529";
+chr1 Cufflinks transcript 3111332 3111358 1000 . . gene_id "CUFF.49"; transcript_id "CUFF.49.1"; FPKM "15.6467665925"; frac "1.000000"; conf_lo "0.000000"; conf_hi "46.940300"; cov "1.000000";
+chr1 Cufflinks exon 3111332 3111358 1000 . . gene_id "CUFF.49"; transcript_id "CUFF.49.1"; exon_number "1"; FPKM "15.6467665925"; frac "1.000000"; conf_lo "0.000000"; conf_hi "46.940300"; cov "1.000000";
+chr1 Cufflinks transcript 3112113 3112139 1000 . . gene_id "CUFF.51"; transcript_id "CUFF.51.1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr1 Cufflinks exon 3112113 3112139 1000 . . gene_id "CUFF.51"; transcript_id "CUFF.51.1"; exon_number "1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr1 Cufflinks transcript 3112479 3112505 1000 . . gene_id "CUFF.53"; transcript_id "CUFF.53.1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr1 Cufflinks exon 3112479 3112505 1000 . . gene_id "CUFF.53"; transcript_id "CUFF.53.1"; exon_number "1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr1 Cufflinks transcript 3114116 3114142 1000 . . gene_id "CUFF.55"; transcript_id "CUFF.55.1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr1 Cufflinks exon 3114116 3114142 1000 . . gene_id "CUFF.55"; transcript_id "CUFF.55.1"; exon_number "1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr1 Cufflinks transcript 3114273 3114299 1000 . . gene_id "CUFF.57"; transcript_id "CUFF.57.1"; FPKM "15.6467665925"; frac "1.000000"; conf_lo "0.000000"; conf_hi "46.940300"; cov "1.000000";
+chr1 Cufflinks exon 3114273 3114299 1000 . . gene_id "CUFF.57"; transcript_id "CUFF.57.1"; exon_number "1"; FPKM "15.6467665925"; frac "1.000000"; conf_lo "0.000000"; conf_hi "46.940300"; cov "1.000000";
+chr1 Cufflinks transcript 3114373 3114399 1000 . . gene_id "CUFF.59"; transcript_id "CUFF.59.1"; FPKM "15.6467665925"; frac "1.000000"; conf_lo "0.000000"; conf_hi "46.940300"; cov "1.000000";
+chr1 Cufflinks exon 3114373 3114399 1000 . . gene_id "CUFF.59"; transcript_id "CUFF.59.1"; exon_number "1"; FPKM "15.6467665925"; frac "1.000000"; conf_lo "0.000000"; conf_hi "46.940300"; cov "1.000000";
+chr1 Cufflinks transcript 3201794 3201848 1000 . . gene_id "CUFF.65"; transcript_id "CUFF.65.1"; FPKM "15.3622799272"; frac "1.000000"; conf_lo "0.000000"; conf_hi "37.087825"; cov "0.981818";
+chr1 Cufflinks exon 3201794 3201848 1000 . . gene_id "CUFF.65"; transcript_id "CUFF.65.1"; exon_number "1"; FPKM "15.3622799272"; frac "1.000000"; conf_lo "0.000000"; conf_hi "37.087825"; cov "0.981818";
+chr1 Cufflinks transcript 3211077 3211141 1000 . . gene_id "CUFF.67"; transcript_id "CUFF.67.1"; FPKM "12.9988522461"; frac "1.000000"; conf_lo "0.000000"; conf_hi "31.382005"; cov "0.830769";
+chr1 Cufflinks exon 3211077 3211141 1000 . . gene_id "CUFF.67"; transcript_id "CUFF.67.1"; exon_number "1"; FPKM "12.9988522461"; frac "1.000000"; conf_lo "0.000000"; conf_hi "31.382005"; cov "0.830769";
+chr1 Cufflinks transcript 3211528 3211611 1000 . . gene_id "CUFF.69"; transcript_id "CUFF.69.1"; FPKM "10.0586356666"; frac "1.000000"; conf_lo "0.000000"; conf_hi "24.283695"; cov "0.642857";
+chr1 Cufflinks exon 3211528 3211611 1000 . . gene_id "CUFF.69"; transcript_id "CUFF.69.1"; exon_number "1"; FPKM "10.0586356666"; frac "1.000000"; conf_lo "0.000000"; conf_hi "24.283695"; cov "0.642857";
+chr1 Cufflinks transcript 3211677 3211774 1000 . . gene_id "CUFF.71"; transcript_id "CUFF.71.1"; FPKM "8.6216877142"; frac "1.000000"; conf_lo "0.000000"; conf_hi "20.814595"; cov "0.551020";
+chr1 Cufflinks exon 3211677 3211774 1000 . . gene_id "CUFF.71"; transcript_id "CUFF.71.1"; exon_number "1"; FPKM "8.6216877142"; frac "1.000000"; conf_lo "0.000000"; conf_hi "20.814595"; cov "0.551020";
+chr1 Cufflinks transcript 3220199 3220253 1000 . . gene_id "CUFF.73"; transcript_id "CUFF.73.1"; FPKM "15.3622799272"; frac "1.000000"; conf_lo "0.000000"; conf_hi "37.087825"; cov "0.981818";
+chr1 Cufflinks exon 3220199 3220253 1000 . . gene_id "CUFF.73"; transcript_id "CUFF.73.1"; exon_number "1"; FPKM "15.3622799272"; frac "1.000000"; conf_lo "0.000000"; conf_hi "37.087825"; cov "0.981818";
+chr1 Cufflinks transcript 3220641 3220667 1000 . . gene_id "CUFF.75"; transcript_id "CUFF.75.1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr1 Cufflinks exon 3220641 3220667 1000 . . gene_id "CUFF.75"; transcript_id "CUFF.75.1"; exon_number "1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr1 Cufflinks transcript 3240464 3240515 1000 . . gene_id "CUFF.77"; transcript_id "CUFF.77.1"; FPKM "16.2485653076"; frac "1.000000"; conf_lo "0.000000"; conf_hi "39.227507"; cov "1.038462";
+chr1 Cufflinks exon 3240464 3240515 1000 . . gene_id "CUFF.77"; transcript_id "CUFF.77.1"; exon_number "1"; FPKM "16.2485653076"; frac "1.000000"; conf_lo "0.000000"; conf_hi "39.227507"; cov "1.038462";
+chr1 Cufflinks transcript 3277601 3277627 1000 . . gene_id "CUFF.79"; transcript_id "CUFF.79.1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr1 Cufflinks exon 3277601 3277627 1000 . . gene_id "CUFF.79"; transcript_id "CUFF.79.1"; exon_number "1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr1 Cufflinks transcript 3285318 3285381 1000 . . gene_id "CUFF.81"; transcript_id "CUFF.81.1"; FPKM "13.2019593124"; frac "1.000000"; conf_lo "0.000000"; conf_hi "31.872349"; cov "0.843750";
+chr1 Cufflinks exon 3285318 3285381 1000 . . gene_id "CUFF.81"; transcript_id "CUFF.81.1"; exon_number "1"; FPKM "13.2019593124"; frac "1.000000"; conf_lo "0.000000"; conf_hi "31.872349"; cov "0.843750";
+chr1 Cufflinks transcript 3285858 3285953 1000 . . gene_id "CUFF.83"; transcript_id "CUFF.83.1"; FPKM "13.2019593124"; frac "1.000000"; conf_lo "0.000000"; conf_hi "28.446269"; cov "0.843750";
+chr1 Cufflinks exon 3285858 3285953 1000 . . gene_id "CUFF.83"; transcript_id "CUFF.83.1"; exon_number "1"; FPKM "13.2019593124"; frac "1.000000"; conf_lo "0.000000"; conf_hi "28.446269"; cov "0.843750";
+chr1 Cufflinks transcript 3289268 3289294 1000 . . gene_id "CUFF.85"; transcript_id "CUFF.85.1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr1 Cufflinks exon 3289268 3289294 1000 . . gene_id "CUFF.85"; transcript_id "CUFF.85.1"; exon_number "1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr1 Cufflinks transcript 3289466 3289514 1000 . . gene_id "CUFF.87"; transcript_id "CUFF.87.1"; FPKM "17.2433754285"; frac "1.000000"; conf_lo "0.000000"; conf_hi "41.629191"; cov "1.102041";
+chr1 Cufflinks exon 3289466 3289514 1000 . . gene_id "CUFF.87"; transcript_id "CUFF.87.1"; exon_number "1"; FPKM "17.2433754285"; frac "1.000000"; conf_lo "0.000000"; conf_hi "41.629191"; cov "1.102041";
+chr1 Cufflinks transcript 3300382 3300432 1000 . . gene_id "CUFF.89"; transcript_id "CUFF.89.1"; FPKM "16.5671646274"; frac "1.000000"; conf_lo "0.000000"; conf_hi "39.996674"; cov "1.058824";
+chr1 Cufflinks exon 3300382 3300432 1000 . . gene_id "CUFF.89"; transcript_id "CUFF.89.1"; exon_number "1"; FPKM "16.5671646274"; frac "1.000000"; conf_lo "0.000000"; conf_hi "39.996674"; cov "1.058824";
+chr1 Cufflinks transcript 3317446 3317472 1000 . . gene_id "CUFF.91"; transcript_id "CUFF.91.1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr1 Cufflinks exon 3317446 3317472 1000 . . gene_id "CUFF.91"; transcript_id "CUFF.91.1"; exon_number "1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr1 Cufflinks transcript 3365246 3365284 1000 . . gene_id "CUFF.93"; transcript_id "CUFF.93.1"; FPKM "21.6647537435"; frac "1.000000"; conf_lo "0.000000"; conf_hi "52.303342"; cov "1.384615";
+chr1 Cufflinks exon 3365246 3365284 1000 . . gene_id "CUFF.93"; transcript_id "CUFF.93.1"; exon_number "1"; FPKM "21.6647537435"; frac "1.000000"; conf_lo "0.000000"; conf_hi "52.303342"; cov "1.384615";
+chr1 Cufflinks transcript 3377607 3377633 1000 . . gene_id "CUFF.95"; transcript_id "CUFF.95.1"; FPKM "46.9402997776"; frac "1.000000"; conf_lo "0.000000"; conf_hi "101.142289"; cov "3.000000";
+chr1 Cufflinks exon 3377607 3377633 1000 . . gene_id "CUFF.95"; transcript_id "CUFF.95.1"; exon_number "1"; FPKM "46.9402997776"; frac "1.000000"; conf_lo "0.000000"; conf_hi "101.142289"; cov "3.000000";
+chr1 Cufflinks transcript 3381259 3381317 1000 . . gene_id "CUFF.97"; transcript_id "CUFF.97.1"; FPKM "21.4811541355"; frac "1.000000"; conf_lo "0.000000"; conf_hi "46.285454"; cov "1.372881";
+chr1 Cufflinks exon 3381259 3381317 1000 . . gene_id "CUFF.97"; transcript_id "CUFF.97.1"; exon_number "1"; FPKM "21.4811541355"; frac "1.000000"; conf_lo "0.000000"; conf_hi "46.285454"; cov "1.372881";
+chr1 Cufflinks transcript 3381404 3381474 1000 . . gene_id "CUFF.99"; transcript_id "CUFF.99.1"; FPKM "14.5676792413"; frac "1.000000"; conf_lo "0.000000"; conf_hi "35.169489"; cov "0.931034";
+chr1 Cufflinks exon 3381404 3381474 1000 . . gene_id "CUFF.99"; transcript_id "CUFF.99.1"; exon_number "1"; FPKM "14.5676792413"; frac "1.000000"; conf_lo "0.000000"; conf_hi "35.169489"; cov "0.931034";
diff -r 626808a6bd5044b8f0c5688c93549093d62bae56 -r f0a15f976f1ffee8c144a05a413a290b34ad90e6 test-data/cuffcompare_in1_liftover_unmapped.bed
--- /dev/null
+++ b/test-data/cuffcompare_in1_liftover_unmapped.bed
@@ -0,0 +1,28 @@
+# Deleted in new
+chr1 Cufflinks transcript 3111546 3111576 1000 . . gene_id "CUFF.3"; transcript_id "CUFF.3.1"; FPKM "27.2556579354"; frac "1.000000"; conf_lo "0.000000"; conf_hi "65.800979"; cov "1.741935";
+# Deleted in new
+chr1 Cufflinks exon 3111546 3111576 1000 . . gene_id "CUFF.3"; transcript_id "CUFF.3.1"; exon_number "1"; FPKM "27.2556579354"; frac "1.000000"; conf_lo "0.000000"; conf_hi "65.800979"; cov "1.741935";
+# Partially deleted in new
+chr1 Cufflinks transcript 3243019 3243079 1000 . . gene_id "CUFF.21"; transcript_id "CUFF.21.1"; FPKM "13.8512359999"; frac "1.000000"; conf_lo "0.000000"; conf_hi "33.439842"; cov "0.885246";
+# Partially deleted in new
+chr1 Cufflinks exon 3243019 3243079 1000 . . gene_id "CUFF.21"; transcript_id "CUFF.21.1"; exon_number "1"; FPKM "13.8512359999"; frac "1.000000"; conf_lo "0.000000"; conf_hi "33.439842"; cov "0.885246";
+# Partially deleted in new
+chr1 Cufflinks transcript 3242634 3242923 1000 . . gene_id "CUFF.25"; transcript_id "CUFF.25.1"; FPKM "14.5676792413"; frac "1.000000"; conf_lo "5.354270"; conf_hi "23.781089"; cov "0.931034";
+# Partially deleted in new
+chr1 Cufflinks exon 3242634 3242923 1000 . . gene_id "CUFF.25"; transcript_id "CUFF.25.1"; exon_number "1"; FPKM "14.5676792413"; frac "1.000000"; conf_lo "5.354270"; conf_hi "23.781089"; cov "0.931034";
+# Partially deleted in new
+chr1 Cufflinks transcript 3191877 3191945 1000 . . gene_id "CUFF.37"; transcript_id "CUFF.37.1"; FPKM "104.0850125502"; frac "1.000000"; conf_lo "53.596365"; conf_hi "154.573660"; cov "6.652174";
+# Partially deleted in new
+chr1 Cufflinks exon 3191877 3191945 1000 . . gene_id "CUFF.37"; transcript_id "CUFF.37.1"; exon_number "1"; FPKM "104.0850125502"; frac "1.000000"; conf_lo "53.596365"; conf_hi "154.573660"; cov "6.652174";
+# Partially deleted in new
+chr1 Cufflinks transcript 3194186 3194226 1000 . . gene_id "CUFF.47"; transcript_id "CUFF.47.1"; FPKM "20.6079364877"; frac "1.000000"; conf_lo "0.000000"; conf_hi "49.751960"; cov "1.317073";
+# Partially deleted in new
+chr1 Cufflinks exon 3194186 3194226 1000 . . gene_id "CUFF.47"; transcript_id "CUFF.47.1"; exon_number "1"; FPKM "20.6079364877"; frac "1.000000"; conf_lo "0.000000"; conf_hi "49.751960"; cov "1.317073";
+# Deleted in new
+chr1 Cufflinks transcript 3277191 3277218 1000 . . gene_id "CUFF.61"; transcript_id "CUFF.61.1"; FPKM "45.2638604998"; frac "1.000000"; conf_lo "0.000000"; conf_hi "97.530065"; cov "2.892857";
+# Deleted in new
+chr1 Cufflinks exon 3277191 3277218 1000 . . gene_id "CUFF.61"; transcript_id "CUFF.61.1"; exon_number "1"; FPKM "45.2638604998"; frac "1.000000"; conf_lo "0.000000"; conf_hi "97.530065"; cov "2.892857";
+# Deleted in new
+chr1 Cufflinks transcript 3278237 3278263 1000 . . gene_id "CUFF.63"; transcript_id "CUFF.63.1"; FPKM "15.6467665925"; frac "1.000000"; conf_lo "0.000000"; conf_hi "46.940300"; cov "1.000000";
+# Deleted in new
+chr1 Cufflinks exon 3278237 3278263 1000 . . gene_id "CUFF.63"; transcript_id "CUFF.63.1"; exon_number "1"; FPKM "15.6467665925"; frac "1.000000"; conf_lo "0.000000"; conf_hi "46.940300"; cov "1.000000";
diff -r 626808a6bd5044b8f0c5688c93549093d62bae56 -r f0a15f976f1ffee8c144a05a413a290b34ad90e6 test-data/cuffcompare_in1_mult_liftover_mapped.bed
--- /dev/null
+++ b/test-data/cuffcompare_in1_mult_liftover_mapped.bed
@@ -0,0 +1,92 @@
+chr1 Cufflinks transcript 3022555 3022596 1000 . . gene_id "CUFF.1"; transcript_id "CUFF.1.1"; FPKM "20.6079364877"; frac "1.000000"; conf_lo "0.000000"; conf_hi "49.751960"; cov "1.317073";
+chr1 Cufflinks exon 3022555 3022596 1000 . . gene_id "CUFF.1"; transcript_id "CUFF.1.1"; exon_number "1"; FPKM "20.6079364877"; frac "1.000000"; conf_lo "0.000000"; conf_hi "49.751960"; cov "1.317073";
+chr1 Cufflinks transcript 3117334 3117360 1000 . . gene_id "CUFF.5"; transcript_id "CUFF.5.1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr1 Cufflinks exon 3117334 3117360 1000 . . gene_id "CUFF.5"; transcript_id "CUFF.5.1"; exon_number "1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr1 Cufflinks transcript 3117031 3117199 1000 . . gene_id "CUFF.7"; transcript_id "CUFF.7.1"; FPKM "9.9991171124"; frac "1.000000"; conf_lo "0.000000"; conf_hi "19.998234"; cov "0.639053";
+chr1 Cufflinks exon 3117031 3117199 1000 . . gene_id "CUFF.7"; transcript_id "CUFF.7.1"; exon_number "1"; FPKM "9.9991171124"; frac "1.000000"; conf_lo "0.000000"; conf_hi "19.998234"; cov "0.639053";
+chr1 Cufflinks transcript 3118118 3118521 1000 . . gene_id "CUFF.9"; transcript_id "CUFF.9.1"; FPKM "17.7768957078"; frac "1.000000"; conf_lo "9.153835"; conf_hi "26.399957"; cov "1.136139";
+chr1 Cufflinks exon 3118118 3118521 1000 . . gene_id "CUFF.9"; transcript_id "CUFF.9.1"; exon_number "1"; FPKM "17.7768957078"; frac "1.000000"; conf_lo "9.153835"; conf_hi "26.399957"; cov "1.136139";
+chr1 Cufflinks transcript 3118713 3118739 1000 . . gene_id "CUFF.11"; transcript_id "CUFF.11.1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr1 Cufflinks exon 3118713 3118739 1000 . . gene_id "CUFF.11"; transcript_id "CUFF.11.1"; exon_number "1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr1 Cufflinks transcript 3121789 3121867 1000 . . gene_id "CUFF.13"; transcript_id "CUFF.13.1"; FPKM "10.6952581772"; frac "1.000000"; conf_lo "0.000000"; conf_hi "25.820637"; cov "0.683544";
+chr1 Cufflinks exon 3121789 3121867 1000 . . gene_id "CUFF.13"; transcript_id "CUFF.13.1"; exon_number "1"; FPKM "10.6952581772"; frac "1.000000"; conf_lo "0.000000"; conf_hi "25.820637"; cov "0.683544";
+chr1 Cufflinks transcript 3128503 3128581 1000 . . gene_id "CUFF.15"; transcript_id "CUFF.15.1"; FPKM "10.6952581772"; frac "1.000000"; conf_lo "0.000000"; conf_hi "25.820637"; cov "0.683544";
+chr1 Cufflinks exon 3128503 3128581 1000 . . gene_id "CUFF.15"; transcript_id "CUFF.15.1"; exon_number "1"; FPKM "10.6952581772"; frac "1.000000"; conf_lo "0.000000"; conf_hi "25.820637"; cov "0.683544";
+chr1 Cufflinks transcript 3129386 3129482 1000 . . gene_id "CUFF.17"; transcript_id "CUFF.17.1"; FPKM "8.7105710927"; frac "1.000000"; conf_lo "0.000000"; conf_hi "21.029179"; cov "0.556701";
+chr1 Cufflinks exon 3129386 3129482 1000 . . gene_id "CUFF.17"; transcript_id "CUFF.17.1"; exon_number "1"; FPKM "8.7105710927"; frac "1.000000"; conf_lo "0.000000"; conf_hi "21.029179"; cov "0.556701";
+chr1 Cufflinks transcript 3128657 3128728 1000 . . gene_id "CUFF.19"; transcript_id "CUFF.19.1"; FPKM "29.3376873610"; frac "1.000000"; conf_lo "3.097262"; conf_hi "55.578113"; cov "1.875000";
+chr1 Cufflinks exon 3128657 3128728 1000 . . gene_id "CUFF.19"; transcript_id "CUFF.19.1"; exon_number "1"; FPKM "29.3376873610"; frac "1.000000"; conf_lo "3.097262"; conf_hi "55.578113"; cov "1.875000";
+chr1 Cufflinks transcript 3162123 3162179 1000 . . gene_id "CUFF.21"; transcript_id "CUFF.21.1"; FPKM "13.8512359999"; frac "1.000000"; conf_lo "0.000000"; conf_hi "33.439842"; cov "0.885246";
+chr1 Cufflinks exon 3162123 3162179 1000 . . gene_id "CUFF.21"; transcript_id "CUFF.21.1"; exon_number "1"; FPKM "13.8512359999"; frac "1.000000"; conf_lo "0.000000"; conf_hi "33.439842"; cov "0.885246";
+chr1 Cufflinks transcript 3162445 3162500 1000 . . gene_id "CUFF.23"; transcript_id "CUFF.23.1"; FPKM "23.4701498888"; frac "1.000000"; conf_lo "0.000000"; conf_hi "50.571145"; cov "1.500000";
+chr1 Cufflinks exon 3162445 3162500 1000 . . gene_id "CUFF.23"; transcript_id "CUFF.23.1"; exon_number "1"; FPKM "23.4701498888"; frac "1.000000"; conf_lo "0.000000"; conf_hi "50.571145"; cov "1.500000";
+chr1 Cufflinks transcript 3161752 3162025 1000 . . gene_id "CUFF.25"; transcript_id "CUFF.25.1"; FPKM "14.5676792413"; frac "1.000000"; conf_lo "5.354270"; conf_hi "23.781089"; cov "0.931034";
+chr1 Cufflinks exon 3161752 3162025 1000 . . gene_id "CUFF.25"; transcript_id "CUFF.25.1"; exon_number "1"; FPKM "14.5676792413"; frac "1.000000"; conf_lo "5.354270"; conf_hi "23.781089"; cov "0.931034";
+chr1 Cufflinks transcript 3176998 3177034 1000 . . gene_id "CUFF.27"; transcript_id "CUFF.27.1"; FPKM "34.2537322701"; frac "1.000000"; conf_lo "0.000000"; conf_hi "73.806535"; cov "2.189189";
+chr1 Cufflinks exon 3176998 3177034 1000 . . gene_id "CUFF.27"; transcript_id "CUFF.27.1"; exon_number "1"; FPKM "34.2537322701"; frac "1.000000"; conf_lo "0.000000"; conf_hi "73.806535"; cov "2.189189";
+chr1 Cufflinks transcript 3107191 3107612 1000 . . gene_id "CUFF.29"; transcript_id "CUFF.29.1"; FPKM "107.1032192108"; frac "1.000000"; conf_lo "71.402146"; conf_hi "142.804292"; cov "6.845070";
+chr1 Cufflinks exon 3107191 3107612 1000 . . gene_id "CUFF.29"; transcript_id "CUFF.29.1"; exon_number "1"; FPKM "107.1032192108"; frac "1.000000"; conf_lo "71.402146"; conf_hi "142.804292"; cov "6.845070";
+chr1 Cufflinks transcript 3107844 3107874 1000 . . gene_id "CUFF.31"; transcript_id "CUFF.31.1"; FPKM "122.6504607091"; frac "1.000000"; conf_lo "40.883487"; conf_hi "204.417435"; cov "7.838710";
+chr1 Cufflinks exon 3107844 3107874 1000 . . gene_id "CUFF.31"; transcript_id "CUFF.31.1"; exon_number "1"; FPKM "122.6504607091"; frac "1.000000"; conf_lo "40.883487"; conf_hi "204.417435"; cov "7.838710";
+chr1 Cufflinks transcript 3108025 3108051 1000 . . gene_id "CUFF.33"; transcript_id "CUFF.33.1"; FPKM "109.5273661476"; frac "1.000000"; conf_lo "26.732460"; conf_hi "192.322273"; cov "7.000000";
+chr1 Cufflinks exon 3108025 3108051 1000 . . gene_id "CUFF.33"; transcript_id "CUFF.33.1"; exon_number "1"; FPKM "109.5273661476"; frac "1.000000"; conf_lo "26.732460"; conf_hi "192.322273"; cov "7.000000";
+chr1 Cufflinks transcript 3109111 3109241 1000 . . gene_id "CUFF.35"; transcript_id "CUFF.35.1"; FPKM "96.7471827476"; frac "1.000000"; conf_lo "61.420107"; conf_hi "132.074259"; cov "6.183206";
+chr1 Cufflinks exon 3109111 3109241 1000 . . gene_id "CUFF.35"; transcript_id "CUFF.35.1"; exon_number "1"; FPKM "96.7471827476"; frac "1.000000"; conf_lo "61.420107"; conf_hi "132.074259"; cov "6.183206";
+chr1 Cufflinks transcript 3109449 3109512 1000 . . gene_id "CUFF.37"; transcript_id "CUFF.37.1"; FPKM "104.0850125502"; frac "1.000000"; conf_lo "53.596365"; conf_hi "154.573660"; cov "6.652174";
+chr1 Cufflinks exon 3109449 3109512 1000 . . gene_id "CUFF.37"; transcript_id "CUFF.37.1"; exon_number "1"; FPKM "104.0850125502"; frac "1.000000"; conf_lo "53.596365"; conf_hi "154.573660"; cov "6.652174";
+chr1 Cufflinks transcript 3109989 3110041 1000 . . gene_id "CUFF.39"; transcript_id "CUFF.39.1"; FPKM "23.9129829055"; frac "1.000000"; conf_lo "0.000000"; conf_hi "51.525317"; cov "1.528302";
+chr1 Cufflinks exon 3109989 3110041 1000 . . gene_id "CUFF.39"; transcript_id "CUFF.39.1"; exon_number "1"; FPKM "23.9129829055"; frac "1.000000"; conf_lo "0.000000"; conf_hi "51.525317"; cov "1.528302";
+chr1 Cufflinks transcript 3110098 3110176 1000 . . gene_id "CUFF.41"; transcript_id "CUFF.41.1"; FPKM "10.6952581772"; frac "1.000000"; conf_lo "0.000000"; conf_hi "25.820637"; cov "0.683544";
+chr1 Cufflinks exon 3110098 3110176 1000 . . gene_id "CUFF.41"; transcript_id "CUFF.41.1"; exon_number "1"; FPKM "10.6952581772"; frac "1.000000"; conf_lo "0.000000"; conf_hi "25.820637"; cov "0.683544";
+chr1 Cufflinks transcript 3110280 3110358 1000 . . gene_id "CUFF.43"; transcript_id "CUFF.43.1"; FPKM "10.5615674500"; frac "1.000000"; conf_lo "0.000000"; conf_hi "25.497879"; cov "0.675000";
+chr1 Cufflinks exon 3110280 3110358 1000 . . gene_id "CUFF.43"; transcript_id "CUFF.43.1"; exon_number "1"; FPKM "10.5615674500"; frac "1.000000"; conf_lo "0.000000"; conf_hi "25.497879"; cov "0.675000";
+chr1 Cufflinks transcript 3110488 3110589 1000 . . gene_id "CUFF.45"; transcript_id "CUFF.45.1"; FPKM "20.7089557842"; frac "1.000000"; conf_lo "2.186303"; conf_hi "39.231609"; cov "1.323529";
+chr1 Cufflinks exon 3110488 3110589 1000 . . gene_id "CUFF.45"; transcript_id "CUFF.45.1"; exon_number "1"; FPKM "20.7089557842"; frac "1.000000"; conf_lo "2.186303"; conf_hi "39.231609"; cov "1.323529";
+chr1 Cufflinks transcript 3111332 3111358 1000 . . gene_id "CUFF.49"; transcript_id "CUFF.49.1"; FPKM "15.6467665925"; frac "1.000000"; conf_lo "0.000000"; conf_hi "46.940300"; cov "1.000000";
+chr1 Cufflinks exon 3111332 3111358 1000 . . gene_id "CUFF.49"; transcript_id "CUFF.49.1"; exon_number "1"; FPKM "15.6467665925"; frac "1.000000"; conf_lo "0.000000"; conf_hi "46.940300"; cov "1.000000";
+chr1 Cufflinks transcript 3112113 3112139 1000 . . gene_id "CUFF.51"; transcript_id "CUFF.51.1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr1 Cufflinks exon 3112113 3112139 1000 . . gene_id "CUFF.51"; transcript_id "CUFF.51.1"; exon_number "1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr1 Cufflinks transcript 3112479 3112505 1000 . . gene_id "CUFF.53"; transcript_id "CUFF.53.1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr1 Cufflinks exon 3112479 3112505 1000 . . gene_id "CUFF.53"; transcript_id "CUFF.53.1"; exon_number "1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr1 Cufflinks transcript 3114116 3114142 1000 . . gene_id "CUFF.55"; transcript_id "CUFF.55.1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr1 Cufflinks exon 3114116 3114142 1000 . . gene_id "CUFF.55"; transcript_id "CUFF.55.1"; exon_number "1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr1 Cufflinks transcript 3114273 3114299 1000 . . gene_id "CUFF.57"; transcript_id "CUFF.57.1"; FPKM "15.6467665925"; frac "1.000000"; conf_lo "0.000000"; conf_hi "46.940300"; cov "1.000000";
+chr1 Cufflinks exon 3114273 3114299 1000 . . gene_id "CUFF.57"; transcript_id "CUFF.57.1"; exon_number "1"; FPKM "15.6467665925"; frac "1.000000"; conf_lo "0.000000"; conf_hi "46.940300"; cov "1.000000";
+chr1 Cufflinks transcript 3114373 3114399 1000 . . gene_id "CUFF.59"; transcript_id "CUFF.59.1"; FPKM "15.6467665925"; frac "1.000000"; conf_lo "0.000000"; conf_hi "46.940300"; cov "1.000000";
+chr1 Cufflinks exon 3114373 3114399 1000 . . gene_id "CUFF.59"; transcript_id "CUFF.59.1"; exon_number "1"; FPKM "15.6467665925"; frac "1.000000"; conf_lo "0.000000"; conf_hi "46.940300"; cov "1.000000";
+chr1 Cufflinks transcript 3201794 3201848 1000 . . gene_id "CUFF.65"; transcript_id "CUFF.65.1"; FPKM "15.3622799272"; frac "1.000000"; conf_lo "0.000000"; conf_hi "37.087825"; cov "0.981818";
+chr1 Cufflinks exon 3201794 3201848 1000 . . gene_id "CUFF.65"; transcript_id "CUFF.65.1"; exon_number "1"; FPKM "15.3622799272"; frac "1.000000"; conf_lo "0.000000"; conf_hi "37.087825"; cov "0.981818";
+chr1 Cufflinks transcript 3211077 3211141 1000 . . gene_id "CUFF.67"; transcript_id "CUFF.67.1"; FPKM "12.9988522461"; frac "1.000000"; conf_lo "0.000000"; conf_hi "31.382005"; cov "0.830769";
+chr1 Cufflinks exon 3211077 3211141 1000 . . gene_id "CUFF.67"; transcript_id "CUFF.67.1"; exon_number "1"; FPKM "12.9988522461"; frac "1.000000"; conf_lo "0.000000"; conf_hi "31.382005"; cov "0.830769";
+chr1 Cufflinks transcript 3211528 3211611 1000 . . gene_id "CUFF.69"; transcript_id "CUFF.69.1"; FPKM "10.0586356666"; frac "1.000000"; conf_lo "0.000000"; conf_hi "24.283695"; cov "0.642857";
+chr1 Cufflinks exon 3211528 3211611 1000 . . gene_id "CUFF.69"; transcript_id "CUFF.69.1"; exon_number "1"; FPKM "10.0586356666"; frac "1.000000"; conf_lo "0.000000"; conf_hi "24.283695"; cov "0.642857";
+chr1 Cufflinks transcript 3211677 3211774 1000 . . gene_id "CUFF.71"; transcript_id "CUFF.71.1"; FPKM "8.6216877142"; frac "1.000000"; conf_lo "0.000000"; conf_hi "20.814595"; cov "0.551020";
+chr1 Cufflinks exon 3211677 3211774 1000 . . gene_id "CUFF.71"; transcript_id "CUFF.71.1"; exon_number "1"; FPKM "8.6216877142"; frac "1.000000"; conf_lo "0.000000"; conf_hi "20.814595"; cov "0.551020";
+chr1 Cufflinks transcript 3220199 3220253 1000 . . gene_id "CUFF.73"; transcript_id "CUFF.73.1"; FPKM "15.3622799272"; frac "1.000000"; conf_lo "0.000000"; conf_hi "37.087825"; cov "0.981818";
+chr1 Cufflinks exon 3220199 3220253 1000 . . gene_id "CUFF.73"; transcript_id "CUFF.73.1"; exon_number "1"; FPKM "15.3622799272"; frac "1.000000"; conf_lo "0.000000"; conf_hi "37.087825"; cov "0.981818";
+chr1 Cufflinks transcript 3220641 3220667 1000 . . gene_id "CUFF.75"; transcript_id "CUFF.75.1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr1 Cufflinks exon 3220641 3220667 1000 . . gene_id "CUFF.75"; transcript_id "CUFF.75.1"; exon_number "1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr1 Cufflinks transcript 3240464 3240515 1000 . . gene_id "CUFF.77"; transcript_id "CUFF.77.1"; FPKM "16.2485653076"; frac "1.000000"; conf_lo "0.000000"; conf_hi "39.227507"; cov "1.038462";
+chr1 Cufflinks exon 3240464 3240515 1000 . . gene_id "CUFF.77"; transcript_id "CUFF.77.1"; exon_number "1"; FPKM "16.2485653076"; frac "1.000000"; conf_lo "0.000000"; conf_hi "39.227507"; cov "1.038462";
+chr1 Cufflinks transcript 3277601 3277627 1000 . . gene_id "CUFF.79"; transcript_id "CUFF.79.1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr1 Cufflinks exon 3277601 3277627 1000 . . gene_id "CUFF.79"; transcript_id "CUFF.79.1"; exon_number "1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr1 Cufflinks transcript 3285318 3285381 1000 . . gene_id "CUFF.81"; transcript_id "CUFF.81.1"; FPKM "13.2019593124"; frac "1.000000"; conf_lo "0.000000"; conf_hi "31.872349"; cov "0.843750";
+chr1 Cufflinks exon 3285318 3285381 1000 . . gene_id "CUFF.81"; transcript_id "CUFF.81.1"; exon_number "1"; FPKM "13.2019593124"; frac "1.000000"; conf_lo "0.000000"; conf_hi "31.872349"; cov "0.843750";
+chr1 Cufflinks transcript 3285858 3285953 1000 . . gene_id "CUFF.83"; transcript_id "CUFF.83.1"; FPKM "13.2019593124"; frac "1.000000"; conf_lo "0.000000"; conf_hi "28.446269"; cov "0.843750";
+chr1 Cufflinks exon 3285858 3285953 1000 . . gene_id "CUFF.83"; transcript_id "CUFF.83.1"; exon_number "1"; FPKM "13.2019593124"; frac "1.000000"; conf_lo "0.000000"; conf_hi "28.446269"; cov "0.843750";
+chr1 Cufflinks transcript 3289268 3289294 1000 . . gene_id "CUFF.85"; transcript_id "CUFF.85.1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr1 Cufflinks exon 3289268 3289294 1000 . . gene_id "CUFF.85"; transcript_id "CUFF.85.1"; exon_number "1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr1 Cufflinks transcript 3289466 3289514 1000 . . gene_id "CUFF.87"; transcript_id "CUFF.87.1"; FPKM "17.2433754285"; frac "1.000000"; conf_lo "0.000000"; conf_hi "41.629191"; cov "1.102041";
+chr1 Cufflinks exon 3289466 3289514 1000 . . gene_id "CUFF.87"; transcript_id "CUFF.87.1"; exon_number "1"; FPKM "17.2433754285"; frac "1.000000"; conf_lo "0.000000"; conf_hi "41.629191"; cov "1.102041";
+chr1 Cufflinks transcript 3300382 3300432 1000 . . gene_id "CUFF.89"; transcript_id "CUFF.89.1"; FPKM "16.5671646274"; frac "1.000000"; conf_lo "0.000000"; conf_hi "39.996674"; cov "1.058824";
+chr1 Cufflinks exon 3300382 3300432 1000 . . gene_id "CUFF.89"; transcript_id "CUFF.89.1"; exon_number "1"; FPKM "16.5671646274"; frac "1.000000"; conf_lo "0.000000"; conf_hi "39.996674"; cov "1.058824";
+chr1 Cufflinks transcript 3317446 3317472 1000 . . gene_id "CUFF.91"; transcript_id "CUFF.91.1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr1 Cufflinks exon 3317446 3317472 1000 . . gene_id "CUFF.91"; transcript_id "CUFF.91.1"; exon_number "1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr1 Cufflinks transcript 3365246 3365284 1000 . . gene_id "CUFF.93"; transcript_id "CUFF.93.1"; FPKM "21.6647537435"; frac "1.000000"; conf_lo "0.000000"; conf_hi "52.303342"; cov "1.384615";
+chr1 Cufflinks exon 3365246 3365284 1000 . . gene_id "CUFF.93"; transcript_id "CUFF.93.1"; exon_number "1"; FPKM "21.6647537435"; frac "1.000000"; conf_lo "0.000000"; conf_hi "52.303342"; cov "1.384615";
+chr1 Cufflinks transcript 3377607 3377633 1000 . . gene_id "CUFF.95"; transcript_id "CUFF.95.1"; FPKM "46.9402997776"; frac "1.000000"; conf_lo "0.000000"; conf_hi "101.142289"; cov "3.000000";
+chr1 Cufflinks exon 3377607 3377633 1000 . . gene_id "CUFF.95"; transcript_id "CUFF.95.1"; exon_number "1"; FPKM "46.9402997776"; frac "1.000000"; conf_lo "0.000000"; conf_hi "101.142289"; cov "3.000000";
+chr1 Cufflinks transcript 3381259 3381317 1000 . . gene_id "CUFF.97"; transcript_id "CUFF.97.1"; FPKM "21.4811541355"; frac "1.000000"; conf_lo "0.000000"; conf_hi "46.285454"; cov "1.372881";
+chr1 Cufflinks exon 3381259 3381317 1000 . . gene_id "CUFF.97"; transcript_id "CUFF.97.1"; exon_number "1"; FPKM "21.4811541355"; frac "1.000000"; conf_lo "0.000000"; conf_hi "46.285454"; cov "1.372881";
+chr1 Cufflinks transcript 3381404 3381474 1000 . . gene_id "CUFF.99"; transcript_id "CUFF.99.1"; FPKM "14.5676792413"; frac "1.000000"; conf_lo "0.000000"; conf_hi "35.169489"; cov "0.931034";
+chr1 Cufflinks exon 3381404 3381474 1000 . . gene_id "CUFF.99"; transcript_id "CUFF.99.1"; exon_number "1"; FPKM "14.5676792413"; frac "1.000000"; conf_lo "0.000000"; conf_hi "35.169489"; cov "0.931034";
diff -r 626808a6bd5044b8f0c5688c93549093d62bae56 -r f0a15f976f1ffee8c144a05a413a290b34ad90e6 test-data/cuffcompare_in1_mult_liftover_unmapped.bed
--- /dev/null
+++ b/test-data/cuffcompare_in1_mult_liftover_unmapped.bed
@@ -0,0 +1,16 @@
+# Deleted in new
+chr1 Cufflinks transcript 3111546 3111576 1000 . . gene_id "CUFF.3"; transcript_id "CUFF.3.1"; FPKM "27.2556579354"; frac "1.000000"; conf_lo "0.000000"; conf_hi "65.800979"; cov "1.741935";
+# Deleted in new
+chr1 Cufflinks exon 3111546 3111576 1000 . . gene_id "CUFF.3"; transcript_id "CUFF.3.1"; exon_number "1"; FPKM "27.2556579354"; frac "1.000000"; conf_lo "0.000000"; conf_hi "65.800979"; cov "1.741935";
+# Partially deleted in new
+chr1 Cufflinks transcript 3194186 3194226 1000 . . gene_id "CUFF.47"; transcript_id "CUFF.47.1"; FPKM "20.6079364877"; frac "1.000000"; conf_lo "0.000000"; conf_hi "49.751960"; cov "1.317073";
+# Partially deleted in new
+chr1 Cufflinks exon 3194186 3194226 1000 . . gene_id "CUFF.47"; transcript_id "CUFF.47.1"; exon_number "1"; FPKM "20.6079364877"; frac "1.000000"; conf_lo "0.000000"; conf_hi "49.751960"; cov "1.317073";
+# Deleted in new
+chr1 Cufflinks transcript 3277191 3277218 1000 . . gene_id "CUFF.61"; transcript_id "CUFF.61.1"; FPKM "45.2638604998"; frac "1.000000"; conf_lo "0.000000"; conf_hi "97.530065"; cov "2.892857";
+# Deleted in new
+chr1 Cufflinks exon 3277191 3277218 1000 . . gene_id "CUFF.61"; transcript_id "CUFF.61.1"; exon_number "1"; FPKM "45.2638604998"; frac "1.000000"; conf_lo "0.000000"; conf_hi "97.530065"; cov "2.892857";
+# Deleted in new
+chr1 Cufflinks transcript 3278237 3278263 1000 . . gene_id "CUFF.63"; transcript_id "CUFF.63.1"; FPKM "15.6467665925"; frac "1.000000"; conf_lo "0.000000"; conf_hi "46.940300"; cov "1.000000";
+# Deleted in new
+chr1 Cufflinks exon 3278237 3278263 1000 . . gene_id "CUFF.63"; transcript_id "CUFF.63.1"; exon_number "1"; FPKM "15.6467665925"; frac "1.000000"; conf_lo "0.000000"; conf_hi "46.940300"; cov "1.000000";
diff -r 626808a6bd5044b8f0c5688c93549093d62bae56 -r f0a15f976f1ffee8c144a05a413a290b34ad90e6 test/base/twilltestcase.py
--- a/test/base/twilltestcase.py
+++ b/test/base/twilltestcase.py
@@ -474,7 +474,7 @@
elem = data_list[-1]
hid = int( elem.get('hid') )
self.assertTrue( hid )
- self.visit_page( "edit?hid=%s" % hid )
+ self.visit_page( "dataset/edit?hid=%s" % hid )
for subpatt in patt.split():
tc.find(subpatt)
def delete_history_item( self, hda_id, strings_displayed=[] ):
@@ -483,7 +483,7 @@
hda_id = int( hda_id )
except:
raise AssertionError, "Invalid hda_id '%s' - must be int" % hda_id
- self.visit_url( "%s/root/delete?show_deleted_on_refresh=False&id=%s" % ( self.url, hda_id ) )
+ self.visit_url( "%s/datasets/%s/delete?show_deleted_on_refresh=False" % ( self.url, self.security.encode_id( hda_id ) ) )
for check_str in strings_displayed:
self.check_page_for_string( check_str )
def undelete_history_item( self, hda_id, strings_displayed=[] ):
@@ -492,7 +492,7 @@
hda_id = int( hda_id )
except:
raise AssertionError, "Invalid hda_id '%s' - must be int" % hda_id
- self.visit_url( "%s/dataset/undelete?id=%s" % ( self.url, hda_id ) )
+ self.visit_url( "%s/datasets/%s/undelete" % ( self.url, self.security.encode_id( hda_id ) ) )
for check_str in strings_displayed:
self.check_page_for_string( check_str )
def display_history_item( self, hda_id, strings_displayed=[] ):
@@ -511,7 +511,7 @@
strings_displayed=[], strings_not_displayed=[] ):
"""Edit history_dataset_association attribute information"""
self.home()
- self.visit_url( "%s/root/edit?id=%s" % ( self.url, hda_id ) )
+ self.visit_url( "%s/datasets/%s/edit" % ( self.url, self.security.encode_id( hda_id ) ) )
submit_required = False
self.check_page_for_string( 'Edit Attributes' )
if new_name:
@@ -545,9 +545,9 @@
def auto_detect_metadata( self, hda_id ):
"""Auto-detect history_dataset_association metadata"""
self.home()
- self.visit_url( "%s/root/edit?id=%s" % ( self.url, hda_id ) )
+ self.visit_url( "%s/datasets/%s/edit" % ( self.url, self.security.encode_id( hda_id ) ) )
self.check_page_for_string( 'This will inspect the dataset and attempt' )
- tc.fv( 'auto_detect', 'id', hda_id )
+ tc.fv( 'auto_detect', 'detect', 'Auto-detect' )
tc.submit( 'detect' )
try:
self.check_page_for_string( 'Attributes have been queued to be updated' )
@@ -559,7 +559,7 @@
def convert_format( self, hda_id, target_type ):
"""Convert format of history_dataset_association"""
self.home()
- self.visit_url( "%s/root/edit?id=%s" % ( self.url, hda_id ) )
+ self.visit_url( "%s/datasets/%s/edit" % ( self.url, self.security.encode_id( hda_id ) ) )
self.check_page_for_string( 'This will inspect the dataset and attempt' )
tc.fv( 'convert_data', 'target_type', target_type )
tc.submit( 'convert_data' )
@@ -569,7 +569,7 @@
def change_datatype( self, hda_id, datatype ):
"""Change format of history_dataset_association"""
self.home()
- self.visit_url( "%s/root/edit?id=%s" % ( self.url, hda_id ) )
+ self.visit_url( "%s/datasets/%s/edit" % ( self.url, self.security.encode_id( hda_id ) ) )
self.check_page_for_string( 'This will change the datatype of the existing dataset but' )
tc.fv( 'change_datatype', 'datatype', datatype )
tc.submit( 'change' )
diff -r 626808a6bd5044b8f0c5688c93549093d62bae56 -r f0a15f976f1ffee8c144a05a413a290b34ad90e6 test/functional/test_get_data.py
--- a/test/functional/test_get_data.py
+++ b/test/functional/test_get_data.py
@@ -124,7 +124,7 @@
self.upload_file( '454Score.png' )
hda = get_latest_hda()
assert hda is not None, "Problem retrieving hda from database"
- self.check_history_for_string( "The uploaded file contains inappropriate content" )
+ self.check_history_for_string( "454Score.png" )
def test_0055_upload_file( self ):
"""Test uploading lped composite datatype file, manually setting the file format"""
# Logged in as admin_user
diff -r 626808a6bd5044b8f0c5688c93549093d62bae56 -r f0a15f976f1ffee8c144a05a413a290b34ad90e6 test/functional/test_history_functions.py
--- a/test/functional/test_history_functions.py
+++ b/test/functional/test_history_functions.py
@@ -664,7 +664,7 @@
.order_by( desc( galaxy.model.HistoryDatasetAssociation.table.c.create_time ) ) \
.first()
self.home()
- self.visit_url( "%s/root/delete?show_deleted_on_refresh=False&id=%s" % ( self.url, str( latest_hda.id ) ) )
+ self.delete_history_item( str( latest_hda.id ) )
self.check_history_for_string( 'Your history is empty' )
self.home()
self.visit_url( "%s/history/?show_deleted=True" % self.url )
diff -r 626808a6bd5044b8f0c5688c93549093d62bae56 -r f0a15f976f1ffee8c144a05a413a290b34ad90e6 tools/data_source/upload.py
--- a/tools/data_source/upload.py
+++ b/tools/data_source/upload.py
@@ -14,9 +14,18 @@
from galaxy.datatypes.images import Pdf
from galaxy.datatypes.registry import Registry
from galaxy import util
+from galaxy.datatypes.util.image_util import *
from galaxy.util.json import *
try:
+ import Image as PIL
+except ImportError:
+ try:
+ from PIL import Image as PIL
+ except:
+ PIL = None
+
+try:
import bz2
except:
bz2 = None
@@ -51,16 +60,12 @@
return d
def check_bam( file_path ):
return Bam().sniff( file_path )
-
def check_sff( file_path ):
return Sff().sniff( file_path )
-
def check_pdf( file_path ):
return Pdf().sniff( file_path )
-
def check_bigwig( file_path ):
return BigWig().sniff( file_path )
-
def check_bigbed( file_path ):
return BigBed().sniff( file_path )
def parse_outputs( args ):
@@ -102,8 +107,16 @@
dataset.is_multi_byte = util.is_multi_byte( codecs.open( dataset.path, 'r', 'utf-8' ).read( 100 ) )
except UnicodeDecodeError, e:
dataset.is_multi_byte = False
+ # Is dataset an image?
+ image = check_image( dataset.path )
+ if image:
+ if not PIL:
+ image = None
+ # get_image_ext() returns None if nor a supported Image type
+ ext = get_image_ext( dataset.path, image )
+ data_type = ext
# Is dataset content multi-byte?
- if dataset.is_multi_byte:
+ elif dataset.is_multi_byte:
data_type = 'multi-byte char'
ext = sniff.guess_ext( dataset.path, is_multi_byte=True )
# Is dataset content supported sniffable binary?
@@ -122,7 +135,7 @@
elif check_bigbed( dataset.path ):
ext = 'bigbed'
data_type = 'bigbed'
- else:
+ if not data_type:
# See if we have a gzipped file, which, if it passes our restrictions, we'll uncompress
is_gzipped, is_valid = check_gzip( dataset.path )
if is_gzipped and not is_valid:
@@ -314,7 +327,6 @@
if datatype.dataset_content_needs_grooming( output_path ):
# Groom the dataset content if necessary
datatype.groom_dataset_content( output_path )
-
def add_composite_file( dataset, registry, json_file, output_path, files_path ):
if dataset.composite_files:
os.mkdir( files_path )
diff -r 626808a6bd5044b8f0c5688c93549093d62bae56 -r f0a15f976f1ffee8c144a05a413a290b34ad90e6 tools/extract/liftOver_wrapper.py
--- a/tools/extract/liftOver_wrapper.py
+++ b/tools/extract/liftOver_wrapper.py
@@ -34,15 +34,27 @@
out_handle.close()
return fname
-if len( sys.argv ) != 7:
- stop_err( "USAGE: prog input out_file1 out_file2 input_dbkey output_dbkey minMatch" )
+if len( sys.argv ) < 9:
+ stop_err( "USAGE: prog input out_file1 out_file2 input_dbkey output_dbkey infile_type minMatch multiple <minChainT><minChainQ><minSizeQ>" )
infile = sys.argv[1]
outfile1 = sys.argv[2]
outfile2 = sys.argv[3]
in_dbkey = sys.argv[4]
mapfilepath = sys.argv[5]
-minMatch = sys.argv[6]
+infile_type = sys.argv[6]
+gff_option = ""
+if infile_type == "gff":
+ gff_option = "-gff "
+minMatch = sys.argv[7]
+multiple = int(sys.argv[8])
+multiple_option = ""
+if multiple:
+ minChainT = sys.argv[9]
+ minChainQ = sys.argv[10]
+ minSizeQ = sys.argv[11]
+ multiple_option = " -multiple -minChainT=%s -minChainQ=%s -minSizeQ=%s " %(minChainT,minChainQ,minSizeQ)
+
try:
assert float(minMatch)
except:
@@ -55,7 +67,8 @@
stop_err( "%s mapping is not currently available." % ( mapfilepath.split('/')[-1].split('.')[0] ) )
safe_infile = safe_bed_file(infile)
-cmd_line = "liftOver -minMatch=" + str(minMatch) + " " + safe_infile + " " + mapfilepath + " " + outfile1 + " " + outfile2 + " > /dev/null"
+cmd_line = "liftOver " + gff_option + "-minMatch=" + str(minMatch) + multiple_option + " " + safe_infile + " " + mapfilepath + " " + outfile1 + " " + outfile2 + " > /dev/null"
+
try:
# have to nest try-except in try-finally to handle 2.4
try:
diff -r 626808a6bd5044b8f0c5688c93549093d62bae56 -r f0a15f976f1ffee8c144a05a413a290b34ad90e6 tools/extract/liftOver_wrapper.xml
--- a/tools/extract/liftOver_wrapper.xml
+++ b/tools/extract/liftOver_wrapper.xml
@@ -1,8 +1,21 @@
-<tool id="liftOver1" name="Convert genome coordinates" version="1.0.2">
+<tool id="liftOver1" name="Convert genome coordinates" version="1.0.3"><description> between assemblies and genomes</description>
- <command interpreter="python">liftOver_wrapper.py $input "$out_file1" "$out_file2" $dbkey $to_dbkey $minMatch</command>
+ <command interpreter="python">
+ liftOver_wrapper.py
+ $input
+ "$out_file1"
+ "$out_file2"
+ $dbkey
+ $to_dbkey
+ #if isinstance( $input.datatype, $__app__.datatypes_registry.get_datatype_by_extension('gff').__class__) or isinstance( $input.datatype, $__app__.datatypes_registry.get_datatype_by_extension('gtf').__class__):
+ "gff"
+ #else:
+ "interval"
+ #end if
+ $minMatch ${multiple.choice} ${multiple.minChainT} ${multiple.minChainQ} ${multiple.minSizeQ}
+ </command><inputs>
- <param format="interval" name="input" type="data" label="Convert coordinates of">
+ <param format="interval,gff,gtf" name="input" type="data" label="Convert coordinates of"><validator type="unspecified_build" /><validator type="dataset_metadata_in_file" filename="liftOver.loc" metadata_name="dbkey" metadata_column="0" message="Liftover mappings are currently not available for the specified build." /></param>
@@ -14,7 +27,23 @@
<filter type="data_meta" ref="input" key="dbkey" column="0" /></options></param>
- <param name="minMatch" size="10" type="float" value="0.95" label="Minimum ratio of bases that must remap" />
+ <param name="minMatch" size="10" type="float" value="0.95" label="Minimum ratio of bases that must remap" help="Recommended values: same species = 0.95, different species = 0.10" />
+ <conditional name="multiple">
+ <param name="choice" type="select" label="Allow multiple output regions?" help="Recommended values: same species = No, different species = Yes">
+ <option value="0" selected="true">No</option>
+ <option value="1">Yes</option>
+ </param>
+ <when value="0">
+ <param name="minSizeQ" type="hidden" value="0" />
+ <param name="minChainQ" type="hidden" value="0" />
+ <param name="minChainT" type="hidden" value="0" />
+ </when>
+ <when value="1">
+ <param name="minSizeQ" size="10" type="integer" value="0" label="Minimum matching region size in query" help="Recommended value: set to >= 300 bases for complete transcripts"/>
+ <param name="minChainQ" size="10" type="integer" value="500" label="Minimum chain size in query"/>
+ <param name="minChainT" size="10" type="integer" value="500" label="Minimum chain size in target"/>
+ </when>
+ </conditional></inputs><outputs><data format="input" name="out_file1" label="${tool.name} on ${on_string} [ MAPPED COORDINATES ]">
@@ -37,9 +66,40 @@
<param name="input" value="5.bed" dbkey="hg18" ftype="bed" /><param name="to_dbkey" value="panTro2" /><param name="minMatch" value="0.95" />
+ <param name="choice" value="0" /><output name="out_file1" file="5_liftover_mapped.bed"/><output name="out_file2" file="5_liftover_unmapped.bed"/></test>
+ <test>
+ <param name="input" value="5.bed" dbkey="hg18" ftype="bed" />
+ <param name="to_dbkey" value="panTro2" />
+ <param name="minMatch" value="0.10" />
+ <param name="choice" value="1" />
+ <param name="minSizeQ" value="0" />
+ <param name="minChainQ" value="500" />
+ <param name="minChainT" value="500" />
+ <output name="out_file1" file="5_mult_liftover_mapped.bed"/>
+ <output name="out_file2" file="5_mult_liftover_unmapped.bed"/>
+ </test>
+ <test>
+ <param name="input" value="cuffcompare_in1.gtf" dbkey="hg18" ftype="gtf" />
+ <param name="to_dbkey" value="panTro2" />
+ <param name="minMatch" value="0.95" />
+ <param name="choice" value="0" />
+ <output name="out_file1" file="cuffcompare_in1_liftover_mapped.bed"/>
+ <output name="out_file2" file="cuffcompare_in1_liftover_unmapped.bed"/>
+ </test>
+ <test>
+ <param name="input" value="cuffcompare_in1.gtf" dbkey="hg18" ftype="gtf" />
+ <param name="to_dbkey" value="panTro2" />
+ <param name="minMatch" value="0.10" />
+ <param name="choice" value="1" />
+ <param name="minSizeQ" value="0" />
+ <param name="minChainQ" value="500" />
+ <param name="minChainT" value="500" />
+ <output name="out_file1" file="cuffcompare_in1_mult_liftover_mapped.bed"/>
+ <output name="out_file2" file="cuffcompare_in1_mult_liftover_unmapped.bed"/>
+ </test></tests><help>
.. class:: warningmark
@@ -48,7 +108,7 @@
.. class:: warningmark
-This tool will only work on interval datasets with chromosome in column 1,
+This tool can work with interval, GFF, and GTF datasets. It requires the interval datasets to have chromosome in column 1,
start co-ordinate in column 2 and end co-ordinate in column 3. BED comments
and track and browser lines will be ignored, but if other non-interval lines
are present the tool will return empty output datasets.
@@ -59,7 +119,11 @@
**What it does**
-This tool converts coordinates and annotations between assemblies and genomes. It produces 2 files, one containing all the mapped coordinates and the other containing the unmapped coordinates, if any.
+This tool is based on the LiftOver utility and Chain track from `the UC Santa Cruz Genome Browser`__.
+
+It converts coordinates and annotations between assemblies and genomes. It produces 2 files, one containing all the mapped coordinates and the other containing the unmapped coordinates, if any.
+
+ .. __: http://genome.ucsc.edu/
-----
diff -r 626808a6bd5044b8f0c5688c93549093d62bae56 -r f0a15f976f1ffee8c144a05a413a290b34ad90e6 tools/ilmn_pacbio/assembly_stats.py
--- a/tools/ilmn_pacbio/assembly_stats.py
+++ b/tools/ilmn_pacbio/assembly_stats.py
@@ -1,107 +1,83 @@
#!/usr/bin/env python
-import sys
-import os
-import random
+#
+#Copyright (c) 2011, Pacific Biosciences of California, Inc.
+#
+#All rights reserved.
+#
+#Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
+# * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
+# * Neither the name of Pacific Biosciences nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
+#
+#THIS SOFTWARE IS PROVIDED BY PACIFIC BIOSCIENCES AND ITS CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+#WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL PACIFIC BIOSCIENCES OR ITS CONTRIBUTORS BE LIABLE FOR ANY
+#DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+#LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+#(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import sys, os
+from optparse import OptionParser
+from galaxy import eggs
+import pkg_resources
+pkg_resources.require( 'bx-python' )
+from bx.seq.fasta import FastaReader
-from optparse import OptionParser
-from pbpy.io.FastaIO import FastaEntry, SimpleFastaReader
-
-class FastaStats:
- def __init__(self, argv):
- self.__parseOptions( argv )
-
- def __parseOptions(self, argv):
- usage = 'Usage: %prog [--help] [options] [fastaFileList]'
- parser = OptionParser( usage=usage )
- parser.add_option("--minContigLength", help="Minimum length of contigs to analyze")
- parser.add_option("--genomeLength", help="Length of genome to calculate N50s for.")
- parser.add_option("--outputFormat", help="Format of output [wiki]")
- parser.add_option("--noHeader", action="store_true",
- help="Don't output a header line" )
- parser.set_defaults( noHeader=False,
- minContigLength=0, genomeLength=0, outputFormat="wiki")
-
- self.options, args = parser.parse_args(argv)
-
- if len(args) < 2:
- parser.error( 'Expected 1 arguments' )
-
- self.fastaFiles = args[1:]
- self.outputFormat = self.options.outputFormat
- self.genomeLength = int(self.options.genomeLength)
- self.minContigLength = int(self.options.minContigLength)
- self.statKeys = "File Num Sum Max Avg N50 99%".split(" ")
-
- def getStats(self, fastaFile):
- lengths = []
- for entry in SimpleFastaReader(fastaFile):
- if len(entry.sequence) < self.minContigLength: continue
- lengths.append( len(entry.sequence) )
-
- stats = {"File":fastaFile,
- "Num":len(lengths),
- "Sum":sum(lengths),
- "Max":max(lengths),
- # "MinLenSum": sum( filter(lambda x: x > self.minContigLength, lengths)),
- "Avg":int(sum(lengths)/float(len(lengths))),
- "N50":0,
- "99%":0}
-
- if self.genomeLength == 0: self.genomeLength = sum(lengths)
-
+def getStats( fastaFile, genomeLength, minContigLength ):
+ lengths = []
+ stats = { "Num" : 0,
+ "Sum" : 0,
+ "Max" : 0,
+ "Avg" : 0,
+ "N50" : 0,
+ "99%" : 0 }
+ fasta_reader = FastaReader( open( fastaFile, 'rb' ) )
+ while True:
+ seq = fasta_reader.next()
+ if not seq:
+ break
+ if seq.length < minContigLength:
+ continue
+ lengths.append( seq.length )
+ if lengths:
+ stats[ 'Num' ] = len( lengths )
+ stats[ 'Sum' ] = sum( lengths )
+ stats[ 'Max' ] = max( lengths )
+ stats[ 'Avg' ] = int( sum( lengths ) / float( len( lengths ) ) )
+ stats[ 'N50' ] = 0
+ stats[ '99%' ] = 0
+ if genomeLength == 0:
+ genomeLength = sum( lengths )
lengths.sort()
lengths.reverse()
lenSum = 0
- stats["99%"] = len(lengths)
- for idx, length in enumerate(lengths):
+ stats[ "99%" ] = len( lengths )
+ for idx, length in enumerate( lengths ):
lenSum += length
- if (lenSum > self.genomeLength/2):
- stats["N50"] = length
+ if ( lenSum > genomeLength / 2 ):
+ stats[ "N50" ] = length
break
lenSum = 0
- for idx, length in enumerate(lengths):
+ for idx, length in enumerate( lengths ):
lenSum += length
- if (lenSum > self.genomeLength*0.99):
- stats["99%"] = idx + 1
+ if lenSum > genomeLength * 0.99:
+ stats[ "99%" ] = idx + 1
break
+ return stats
- return stats
+def __main__():
+ #Parse Command Line
+ usage = 'Usage: %prog input output --minContigLength'
+ parser = OptionParser( usage=usage )
+ parser.add_option( "--minContigLength", dest="minContigLength", help="Minimum length of contigs to analyze" )
+ parser.add_option( "--genomeLength", dest="genomeLength", help="Length of genome for which to calculate N50s" )
+ parser.set_defaults( minContigLength=0, genomeLength=0 )
+ options, args = parser.parse_args()
+ input_fasta_file = args[ 0 ]
+ output_tabular_file = args[ 1 ]
+ statKeys = "Num Sum Max Avg N50 99%".split( " " )
+ stats = getStats( input_fasta_file, int( options.genomeLength ), int( options.minContigLength ) )
+ fout = open( output_tabular_file, "w" )
+ fout.write( "%s\n" % "\t".join( map( lambda key: str( stats[ key ] ), statKeys ) ) )
+ fout.close()
- def header(self):
- if self.outputFormat == "wiki":
- buffer = '{| width="200" cellspacing="1" cellpadding="1" border="1"\n'
- buffer += '|-\n'
- for key in self.statKeys:
- buffer += '| %s\n' % key
- return buffer
- elif self.outputFormat == "tsv":
- return "%s\n" % "\t".join(self.statKeys)
- else:
- sys.exit("Unsupported format %s" % self.outputFormat)
-
- def footer(self):
- if self.outputFormat == "wiki":
- return "|}\n"
- else:
- return ""
-
- def format(self, stats):
- if self.outputFormat == "wiki":
- buffer = "|-\n"
- for key in self.statKeys:
- buffer += "| %s\n" % stats[key]
- return buffer
- elif self.outputFormat == "tsv":
- return "%s\n" % "\t".join(map(lambda key: str(stats[key]), self.statKeys))
- else:
- sys.exit("Unsupported format %s" % self.outputFormat)
-
- def run(self):
- if not self.options.noHeader:
- print self.header(),
- for file in self.fastaFiles: print self.format(self.getStats(file)),
- print self.footer()
-
-if __name__=='__main__':
- app = FastaStats(sys.argv)
- app.run()
+if __name__=="__main__": __main__()
diff -r 626808a6bd5044b8f0c5688c93549093d62bae56 -r f0a15f976f1ffee8c144a05a413a290b34ad90e6 tools/ilmn_pacbio/assembly_stats.xml
--- a/tools/ilmn_pacbio/assembly_stats.xml
+++ b/tools/ilmn_pacbio/assembly_stats.xml
@@ -1,35 +1,33 @@
<tool id="assembly_stats" name="Assembly Statistics" version="1.0.0">
- <description>Calculate common measures of assembly quality</description>
- <command interpreter="python">
- assembly_stats.py ${wiki} --minContigLength=${minLength} $input1 > $output1
- </command>
- <inputs>
- <param name="input1" format="fasta" type="data" label="Select FASTA file containing contigs"/>
- <param name="minLength" type="integer" value="0" label="Minimum length of contigs to consider"/>
- <param name="wiki" type="boolean"
- checked="true" value="True"
- truevalue="--outputFormat=wiki"
- falsevalue="--noHeader --outputFormat=tsv"
- label="Human-readable?" />
- </inputs>
- <outputs>
- <data format="tabular" name="output1" label="Assembly statistics for ${on_string}"/>
- </outputs>
- <help>
+ <description>Calculate common measures of assembly quality</description>
+ <command interpreter="python">
+ assembly_stats.py $input1 $output1 --minContigLength=${minLength}
+ </command>
+ <inputs>
+ <param name="input1" format="fasta" type="data" label="Select FASTA file containing contigs"/>
+ <param name="minLength" type="integer" value="0" label="Minimum length of contigs to consider"/>
+ </inputs>
+ <outputs>
+ <data name="output1" format="tabular" label="Assembly statistics for ${on_string}"/>
+ </outputs>
+ <tests>
+ <test>
+ <param name="input1" value="3.fasta" ftype="fasta"/>
+ <param name="minLength" value="100"/>
+ <output name="output1" ftype="tabular" file="assembly_stats.tabular" />
+ </test>
+ </tests>
+ <help>
**What it does**
-Reports standard measures of *de novo* assembly quality such as
-number of contigs, sum of contigs, mean contig length, and N50.
+Reports standard measures of *de novo* assembly quality such as number of contigs, sum of contigs, mean contig length, and N50.
**Parameter list**
Minimum length
Only include contigs of this size or greater for calculating statistics.
-Human-readable?
- If true, output the statistics in a wiki format which can be read by humans. If false, output the metrics in a tab-delimited row.
-
**Output**
Num contigs
@@ -50,7 +48,7 @@
99%
Number of contigs accounting for 99% of the observed assembly.
- </help>
+ </help></tool>
https://bitbucket.org/galaxy/galaxy-central/changeset/41fe17936d94/
changeset: 41fe17936d94
user: natefoo
date: 2011-10-05 22:44:43
summary: merge
affected #: 467 files
Diff too large to display.
https://bitbucket.org/galaxy/galaxy-central/changeset/c4d68748fc75/
changeset: c4d68748fc75
user: natefoo
date: 2011-11-15 18:34:47
summary: Add the boto egg for S3 Object Store.
affected #: 2 files
diff -r 41fe17936d944a16faf9554f824dd223cef48357 -r c4d68748fc75bc29b13b962681b8e99227e505f6 eggs.ini
--- a/eggs.ini
+++ b/eggs.ini
@@ -32,6 +32,7 @@
[eggs:noplatform]
amqplib = 0.6.1
Beaker = 1.4
+boto = 1.8d
decorator = 3.1.2
docutils = 0.7
drmaa = 0.4b3
diff -r 41fe17936d944a16faf9554f824dd223cef48357 -r c4d68748fc75bc29b13b962681b8e99227e505f6 lib/galaxy/objectstore/s3_multipart_upload.py
--- a/lib/galaxy/objectstore/s3_multipart_upload.py
+++ b/lib/galaxy/objectstore/s3_multipart_upload.py
@@ -12,6 +12,9 @@
import multiprocessing
from multiprocessing.pool import IMapIterator
+from galaxy import eggs
+eggs.require('boto')
+
import boto
def map_wrap(f):
https://bitbucket.org/galaxy/galaxy-central/changeset/2e4b0a495f20/
changeset: 2e4b0a495f20
user: natefoo
date: 2011-11-15 18:35:57
summary: Merge galaxy-central
affected #: 238 files
Diff too large to display.
https://bitbucket.org/galaxy/galaxy-central/changeset/b6b9c118b24c/
changeset: b6b9c118b24c
user: natefoo
date: 2011-11-16 22:55:44
summary: Hierarchical object store with weighting.
affected #: 4 files
diff -r 2e4b0a495f20f0edaa206387726ae5352fb06831 -r b6b9c118b24ce696f853071690a3df6f7427f221 lib/galaxy/config.py
--- a/lib/galaxy/config.py
+++ b/lib/galaxy/config.py
@@ -152,6 +152,7 @@
self.s3_bucket = kwargs.get( 's3_bucket', None)
self.use_reduced_redundancy = kwargs.get( 'use_reduced_redundancy', False )
self.object_store_cache_size = float(kwargs.get( 'object_store_cache_size', -1 ))
+ self.hierarchical_object_store_config_file = kwargs.get( 'hierarchical_object_store_config_file', None )
# Parse global_conf and save the parser
global_conf = kwargs.get( 'global_conf', None )
global_conf_parser = ConfigParser.ConfigParser()
diff -r 2e4b0a495f20f0edaa206387726ae5352fb06831 -r b6b9c118b24ce696f853071690a3df6f7427f221 lib/galaxy/datatypes/metadata.py
--- a/lib/galaxy/datatypes/metadata.py
+++ b/lib/galaxy/datatypes/metadata.py
@@ -438,9 +438,9 @@
mf = parent.metadata.get( self.spec.name, None)
if mf is None:
mf = self.new_file( dataset = parent, **value.kwds )
- shutil.move( value.file_name, mf.file_name )
# Ensure the metadata file gets updated with content
- parent.dataset.object_store.update_from_file( parent.dataset.id, file_name=mf.file_name, extra_dir='_metadata_files', extra_dir_at_root=True, alt_name=os.path.basename(mf.file_name) )
+ parent.dataset.object_store.update_from_file( parent.dataset.id, file_name=value.file_name, extra_dir='_metadata_files', extra_dir_at_root=True, alt_name=os.path.basename(mf.file_name) )
+ os.unlink( value.file_name )
value = mf.id
return value
diff -r 2e4b0a495f20f0edaa206387726ae5352fb06831 -r b6b9c118b24ce696f853071690a3df6f7427f221 lib/galaxy/jobs/runners/local.py
--- a/lib/galaxy/jobs/runners/local.py
+++ b/lib/galaxy/jobs/runners/local.py
@@ -58,8 +58,8 @@
job_wrapper.prepare()
command_line = self.build_command_line( job_wrapper )
except:
+ log.exception("failure running job %d" % job_wrapper.job_id)
job_wrapper.fail( "failure preparing job", exception=True )
- log.exception("failure running job %d" % job_wrapper.job_id)
return
# If we were able to get a command line, run the job
if command_line:
diff -r 2e4b0a495f20f0edaa206387726ae5352fb06831 -r b6b9c118b24ce696f853071690a3df6f7427f221 lib/galaxy/objectstore/__init__.py
--- a/lib/galaxy/objectstore/__init__.py
+++ b/lib/galaxy/objectstore/__init__.py
@@ -6,6 +6,7 @@
import os
import time
+import random
import shutil
import logging
import threading
@@ -138,16 +139,16 @@
"""
raise NotImplementedError()
- def update_from_file(self, dataset_id, extra_dir=None, extra_dir_at_root=False, alt_name=None, filename=None, create=False):
+ def update_from_file(self, dataset_id, extra_dir=None, extra_dir_at_root=False, alt_name=None, file_name=None, create=False):
"""
Inform the store that the file associated with the object has been
- updated. If `filename` is provided, update from that file instead
+ updated. If `file_name` is provided, update from that file instead
of the default.
If the object does not exist raises `ObjectNotFound`.
See `exists` method for the description of other fields.
- :type filename: string
- :param filename: Use file pointed to by `filename` as the source for
+ :type file_name: string
+ :param file_name: Use file pointed to by `file_name` as the source for
updating the dataset identified by `dataset_id`
:type create: bool
@@ -179,9 +180,9 @@
Standard Galaxy object store, stores objects in files under a specific
directory on disk.
"""
- def __init__(self, config):
+ def __init__(self, config, file_path=None):
super(DiskObjectStore, self).__init__()
- self.file_path = config.file_path
+ self.file_path = file_path or config.file_path
def _get_filename(self, dataset_id, dir_only=False, extra_dir=None, extra_dir_at_root=False, alt_name=None):
"""Class method that returns the absolute path for the file corresponding
@@ -819,13 +820,107 @@
class HierarchicalObjectStore(ObjectStore):
"""
ObjectStore that defers to a list of backends, for getting objects the
- first store where the object exists is used, objects are always created
- in the first store.
+ first store where the object exists is used, objects are created in a
+ store selected randomly, but with weighting.
"""
- def __init__(self, backends=[]):
+ def __init__(self, config):
super(HierarchicalObjectStore, self).__init__()
-
+ assert config is not None, "hierarchical object store ('object_store = hierarchical') " \
+ "requires a config file, please set one in " \
+ "'hierarchical_object_store_config_file')"
+ self.hierarchical_config = config
+ self.backends = {}
+ self.weighted_backend_names = []
+
+ random.seed()
+
+ self.__parse_hierarchical_config(config)
+
+ def __parse_hierarchical_config(self, config):
+ tree = util.parse_xml(self.hierarchical_config)
+ root = tree.getroot()
+ log.debug('Loading backends for hierarchical object store from %s' % self.hierarchical_config)
+ for elem in [ e for e in root if e.tag == 'backend' ]:
+ name = elem.get('name')
+ weight = int(elem.get('weight', 1))
+ if elem.get('type', 'disk'):
+ path = None
+ for sub in elem:
+ if sub.tag == 'path':
+ path = sub.get('value')
+ self.backends[name] = DiskObjectStore(config, file_path=path)
+ log.debug("Loaded disk backend '%s' with weight %s and disk path '%s'" % (name, weight, path))
+ for i in range(0, weight):
+ # The simplest way to do weighting: add backend names to a
+ # sequence the number of times equalling weight, then randomly
+ # choose a backend from that sequence at creation
+ self.weighted_backend_names.append(name)
+
+ def exists(self, dataset_id, **kwargs):
+ store = self.__get_store_for(dataset_id, **kwargs)
+ return store is not None
+
+ def file_ready(self, dataset_id, **kwargs):
+ store = self.__get_store_for(dataset_id, **kwargs)
+ if store is not None:
+ return store.file_ready(dataset_id, **kwargs)
+ return False
+
+ def create(self, dataset_id, **kwargs):
+ if not self.exists(dataset_id, **kwargs):
+ store_name = random.choice(self.weighted_backend_names)
+ log.debug("Selected backend '%s' for creation of dataset %s" % (store_name, dataset_id))
+ return self.backends[store_name].create(dataset_id, **kwargs)
+
+ def empty(self, dataset_id, **kwargs):
+ store = self.__get_store_for(dataset_id, **kwargs):
+ if store is not None:
+ return store.empty(dataset_id, **kwargs)
+ return True
+
+ def size(self, dataset_id, **kwargs):
+ store = self.__get_store_for(dataset_id, **kwargs):
+ if store is not None:
+ return store.size(dataset_id, **kwargs)
+ return 0
+
+ def delete(self, dataset_id, entire_dir=False, **kwargs):
+ store = self.__get_store_for(dataset_id, **kwargs):
+ if store is not None:
+ return store.delete(dataset_id, entire_dir=entire_dir, **kwargs)
+ return False
+
+ def get_data(self, dataset_id, start=0, count=-1, **kwargs):
+ store = self.__get_store_for(dataset_id, **kwargs):
+ if store is not None:
+ return store.get_data(dataset_id, start=0, count=-1, **kwargs)
+ raise ObjectNotFound()
+
+ def get_filename(self, dataset_id, **kwargs):
+ store = self.__get_store_for(dataset_id, **kwargs):
+ if store is not None:
+ return store.get_filename(dataset_id, **kwargs)
+ raise ObjectNotFound()
+
+ def update_from_file(self, dataset_id, file_name=None, create=False, **kwargs):
+ store = self.__get_store_for(dataset_id, **kwargs):
+ if store is not None:
+ return store.update_from_file(dataset_id, file_name=file_name, create=create, **kwargs)
+ raise ObjectNotFound()
+
+ def get_object_url(self, dataset_id, **kwargs):
+ # FIXME: dir_only
+ store = self.__get_store_for(dataset_id, **kwargs):
+ if store is not None:
+ return store.get_object_url(dataset_id, **kwargs)
+ return None
+
+ def __get_store_for(self, dataset_id, **kwargs):
+ for store in self.backends.values():
+ if store.exists(dataset_id, **kwargs):
+ return store
+ return None
def build_object_store_from_config(config):
""" Depending on the configuration setting, invoke the appropriate object store
@@ -838,7 +933,7 @@
os.environ['AWS_SECRET_ACCESS_KEY'] = config.aws_secret_key
return S3ObjectStore(config=config)
elif store == 'hierarchical':
- return HierarchicalObjectStore()
+ return HierarchicalObjectStore(config.hierarchical_object_store_config_file)
def convert_bytes(bytes):
""" A helper function used for pretty printing disk usage """