1 new changeset in galaxy-central:
http://bitbucket.org/galaxy/galaxy-central/changeset/384f3b803e64/
changeset: r5044:384f3b803e64
user: kanwei
date: 2011-02-11 06:53:49
summary: trackster: Properly return NO_DATA for LineTrack datasets, fix error when no data is available
affected #: 3 files (329 bytes)
--- a/lib/galaxy/datatypes/interval.py Thu Feb 10 17:43:39 2011 -0500
+++ b/lib/galaxy/datatypes/interval.py Fri Feb 11 00:53:49 2011 -0500
@@ -1161,7 +1161,7 @@
resolution = max( resolution, 1 )
return resolution
def get_track_type( self ):
- return "LineTrack", {"data": "bigwig"}
+ return "LineTrack", {"data": "bigwig", "index": "bigwig"}
class CustomTrack ( Tabular ):
"""UCSC CustomTrack"""
--- a/lib/galaxy/visualization/tracks/data_providers.py Thu Feb 10 17:43:39 2011 -0500
+++ b/lib/galaxy/visualization/tracks/data_providers.py Fri Feb 11 00:53:49 2011 -0500
@@ -48,13 +48,13 @@
# Override.
pass
- def has_data( self, chrom, start, end ):
+ def has_data( self, chrom, start, end, **kwargs ):
"""
Returns true if dataset has data in the specified genome window, false
otherwise.
"""
# Override.
- return False
+ pass
def get_data( self, chrom, start, end, **kwargs ):
""" Returns data in region defined by chrom, start, and end. """
@@ -140,10 +140,9 @@
else:
return results, stats[level]["max"], stats[level]["avg"], stats[level]["delta"]
- def has_data( self, chrom, start, end ):
+ def has_data( self, chrom ):
"""
- Returns true if dataset has data in the specified genome window, false
- otherwise.
+ Returns true if dataset has data for this chrom
"""
# Get summary tree.
@@ -154,8 +153,7 @@
self.CACHE[filename] = st
# Check for data.
- level = ceil( log( 100000, st.block_size ) ) - 1
- return ( st.query( chrom, int(start), int(end), level ) is not None )
+ return st.chrom_blocks.get(chrom, None) is not None
class VcfDataProvider( TracksDataProvider ):
"""
@@ -405,14 +403,24 @@
"""
BigWig data provider for the Galaxy track browser.
"""
-
- def get_data( self, chrom, start, end, **kwargs ):
- # Bigwig has the possibility of it being a standalone bigwig file, in which case we use
- # original_dataset, or coming from wig->bigwig conversion in which we use converted_dataset
+ def _get_dataset( self ):
if self.converted_dataset is not None:
f = open( self.converted_dataset.file_name )
else:
f = open( self.original_dataset.file_name )
+ return f
+
+ def has_data( self, chrom ):
+ f = self._get_dataset()
+ bw = BigWigFile(file=f)
+ all_dat = bw.query(chrom, 0, 2147483647, 1)
+ f.close()
+ return all_dat is not None
+
+ def get_data( self, chrom, start, end, **kwargs ):
+ # Bigwig has the possibility of it being a standalone bigwig file, in which case we use
+ # original_dataset, or coming from wig->bigwig conversion in which we use converted_dataset
+ f = self._get_dataset()
bw = BigWigFile(file=f)
if 'stats' in kwargs:
@@ -426,7 +434,6 @@
'min': float( all_dat['min'] ), \
'total_frequency': float( all_dat['coverage'] ) }
-
start = int(start)
end = int(end)
num_points = 2000
@@ -439,9 +446,10 @@
pos = start
step_size = (end - start) / num_points
result = []
- for dat_dict in data:
- result.append( (pos, float_nan(dat_dict['mean']) ) )
- pos += step_size
+ if data:
+ for dat_dict in data:
+ result.append( (pos, float_nan(dat_dict['mean']) ) )
+ pos += step_size
return result
--- a/lib/galaxy/web/controllers/tracks.py Thu Feb 10 17:43:39 2011 -0500
+++ b/lib/galaxy/web/controllers/tracks.py Fri Feb 11 00:53:49 2011 -0500
@@ -458,13 +458,17 @@
return msg
# Check for data in the genome window.
- # TODO: Not all tracks have an index data source, so need to try alternative data sources to check for data.
- if chrom and low and high and data_sources.get( 'index' ):
+ if data_sources.get( 'index' ):
tracks_dataset_type = data_sources['index']['name']
indexer = get_data_provider( tracks_dataset_type )( dataset.get_converted_dataset( trans, tracks_dataset_type ), dataset )
- if not indexer.has_data( chrom, low, high ):
+ if not indexer.has_data( chrom ):
return messages.NO_DATA
-
+ else:
+ # Standalone data provider
+ standalone_provider = get_data_provider(data_sources['data_standalone']['name'])( dataset )
+ kwargs = {"stats": True}
+ if not standalone_provider.has_data( chrom ):
+ return messages.NO_DATA
return messages.DATA
@web.json
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new changeset in galaxy-central:
http://bitbucket.org/galaxy/galaxy-central/changeset/6d5331f3b468/
changeset: r5041:6d5331f3b468
user: natefoo
date: 2011-02-10 21:53:06
summary: Another bugfix for egg conflicts, don't add items to the pkg_resources working set unless the item's not already in there.
affected #: 1 file (79 bytes)
--- a/lib/galaxy/eggs/__init__.py Thu Feb 10 15:29:08 2011 -0500
+++ b/lib/galaxy/eggs/__init__.py Thu Feb 10 15:53:06 2011 -0500
@@ -212,7 +212,8 @@
try:
dists = self.resolve()
for dist in dists:
- pkg_resources.working_set.add( dist )
+ if dist.location not in pkg_resources.working_set.entries:
+ pkg_resources.working_set.add( dist )
return dists
except:
raise
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new changeset in galaxy-central:
http://bitbucket.org/galaxy/galaxy-central/changeset/559a9ad2844d/
changeset: r5038:559a9ad2844d
user: natefoo
date: 2011-02-10 18:13:55
summary: Fix for pysam egg conditional, which would always fail, and actually check always_conditional eggs in the crate's all_eggs method. Fixes downloading of eggs for offline systems.
affected #: 1 file (82 bytes)
--- a/lib/galaxy/eggs/__init__.py Thu Feb 10 10:33:08 2011 -0500
+++ b/lib/galaxy/eggs/__init__.py Thu Feb 10 12:13:55 2011 -0500
@@ -295,8 +295,9 @@
"""
rval = []
for egg in self.eggs.values():
- if egg.name not in self.galaxy_config.always_conditional:
- rval.append( egg )
+ if egg.name in self.galaxy_config.always_conditional and not self.galaxy_config.check_conditional( egg.name ):
+ continue
+ rval.append( egg )
return rval
def __getitem__( self, name ):
"""
@@ -357,7 +358,7 @@
"python_daemon": lambda: sys.version_info[:2] >= ( 2, 5 ),
"GeneTrack": lambda: sys.version_info[:2] >= ( 2, 5 ),
"ctypes": lambda: ( "drmaa" in self.config.get( "app:main", "start_job_runners" ).split(",") ) and sys.version_info[:2] == ( 2, 4 ),
- "pysam": check_pysam()
+ "pysam": lambda: check_pysam()
}.get( egg_name, lambda: True )()
except:
return False
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new changeset in galaxy-central:
http://bitbucket.org/galaxy/galaxy-central/changeset/72cbd1ed1a81/
changeset: r5036:72cbd1ed1a81
user: natefoo
date: 2011-02-09 22:49:43
summary: Fix for MySQL index lengths in migration script 62. Thanks Leandro Hermida.
affected #: 1 file (1 byte)
--- a/lib/galaxy/model/migrate/versions/0062_user_openid_table.py Wed Feb 09 16:46:30 2011 -0500
+++ b/lib/galaxy/model/migrate/versions/0062_user_openid_table.py Wed Feb 09 16:49:43 2011 -0500
@@ -40,7 +40,7 @@
ix_name = 'ix_galaxy_user_openid_openid'
if migrate_engine.name == 'mysql':
- i = "ALTER TABLE galaxy_user_openid ADD UNIQUE INDEX ( openid( 1000 ) )"
+ i = "ALTER TABLE galaxy_user_openid ADD UNIQUE INDEX ( openid( 255 ) )"
db_session.execute( i )
else:
i = Index( ix_name, UserOpenID_table.c.openid, unique=True )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.