2 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/99fef2a32e19/
Changeset: 99fef2a32e19
Branch: release_15.03
User: dan
Date: 2015-02-24 17:02:42+00:00
Summary: Fix for installing .loc files from the toolshed to respect
shed_tool_data_path. Mention shed_tool_data_path in galaxy.ini.sample.
Affected #: 2 files
diff -r d1c3d84a7e06553cf948fcbd8c00b45bebcfb059 -r
99fef2a32e19ce5a8cf6137c1da2dbd9ddbb0536 config/galaxy.ini.sample
--- a/config/galaxy.ini.sample
+++ b/config/galaxy.ini.sample
@@ -217,6 +217,10 @@
#
https://wiki.galaxyproject.org/Admin/DataIntegration
#tool_data_path = tool-data
+# Directory where Tool Data Table related files will be placed
+# when installed from a ToolShed. Defaults to tool_data_path
+#shed_tool_data_path = tool-data
+
# File containing old-style genome builds
#builds_file_path = tool-data/shared/ucsc/builds.txt
diff -r d1c3d84a7e06553cf948fcbd8c00b45bebcfb059 -r
99fef2a32e19ce5a8cf6137c1da2dbd9ddbb0536 lib/tool_shed/tools/data_table_manager.py
--- a/lib/tool_shed/tools/data_table_manager.py
+++ b/lib/tool_shed/tools/data_table_manager.py
@@ -103,7 +103,7 @@
try:
new_table_elems, message = self.app.tool_data_tables \
.add_new_entries_from_config_file( config_filename=filename,
-
tool_data_path=self.app.config.tool_data_path,
+
tool_data_path=self.app.config.shed_tool_data_path,
shed_tool_data_table_config=self.app.config.shed_tool_data_table_config,
persist=persist )
if message:
https://bitbucket.org/galaxy/galaxy-central/commits/4a42638e8cd4/
Changeset: 4a42638e8cd4
Branch: release_15.03
User: dannon
Date: 2015-02-24 19:50:02+00:00
Summary: Apply custom_types bugfixes from dev branch to release.
Affected #: 1 file
diff -r 99fef2a32e19ce5a8cf6137c1da2dbd9ddbb0536 -r
4a42638e8cd45652db9dc1f1eed54be6aa2c20b1 lib/galaxy/model/custom_types.py
--- a/lib/galaxy/model/custom_types.py
+++ b/lib/galaxy/model/custom_types.py
@@ -1,25 +1,23 @@
-from sqlalchemy.types import *
+import binascii
+import json
+import logging
+import uuid
-import json
-import pickle
-import copy
-import uuid
-import binascii
-from galaxy.util.bunch import Bunch
+from galaxy import eggs
+eggs.require("SQLAlchemy")
+import sqlalchemy
+
from galaxy.util.aliaspickler import AliasPickleModule
+from sqlalchemy.types import CHAR, LargeBinary, String, TypeDecorator
+from sqlalchemy.ext.mutable import Mutable
-# For monkeypatching BIGINT
-import sqlalchemy.dialects.sqlite
-import sqlalchemy.dialects.postgresql
-import sqlalchemy.dialects.mysql
-
-import logging
log = logging.getLogger( __name__ )
# Default JSON encoder and decoder
json_encoder = json.JSONEncoder( sort_keys=True )
json_decoder = json.JSONDecoder( )
+
def _sniffnfix_pg9_hex(value):
"""
Sniff for and fix postgres 9 hex decoding issue
@@ -31,39 +29,32 @@
return binascii.unhexlify( value[2:] )
else:
return value
- except Exception, ex:
+ except Exception:
return value
-class JSONType( TypeDecorator ):
+
+class JSONType(sqlalchemy.types.TypeDecorator):
"""
- Defines a JSONType for SQLAlchemy. Takes a primitive as input and
- JSONifies it. This should replace PickleType throughout Galaxy.
+ Represents an immutable structure as a json-encoded string.
+
+ If default is, for example, a dict, then a NULL value in the
+ database will be exposed as an empty dict.
"""
+
+ # TODO: Figure out why this is a large binary, and provide a migratino to
+ # something like sqlalchemy.String, or even better, when applicable, native
+ # sqlalchemy.dialects.postgresql.JSON
impl = LargeBinary
- def process_bind_param( self, value, dialect ):
- if value is None:
- return None
- return json_encoder.encode( value )
+ def process_bind_param(self, value, dialect):
+ if value is not None:
+ value = json_encoder.encode(value)
+ return value
- def process_result_value( self, value, dialect ):
+ def process_result_value(self, value, dialect):
if value is not None:
- try:
- return json_decoder.decode( str( _sniffnfix_pg9_hex( value ) ) )
- except Exception, e:
- log.error( 'Failed to decode JSON (%s): %s', value, e )
- return None
-
- def copy_value( self, value ):
- # return json_decoder.decode( json_encoder.encode( value ) )
- return copy.deepcopy( value )
-
- def compare_values( self, x, y ):
- # return json_encoder.encode( x ) == json_encoder.encode( y )
- return ( x == y )
-
- def is_mutable( self ):
- return True
+ value = json_decoder.decode( str( _sniffnfix_pg9_hex( value ) ) )
+ return value
def load_dialect_impl(self, dialect):
if dialect.name == "mysql":
@@ -72,10 +63,153 @@
return self.impl
+class MutationObj(Mutable):
+ """
+ Mutable JSONType for SQLAlchemy from original gist:
+
https://gist.github.com/dbarnett/1730610
+
+ Using minor changes from this fork of the gist:
+
https://gist.github.com/miracle2k/52a031cced285ba9b8cd
+
+ And other minor changes to make it work for us.
+ """
+ @classmethod
+ def coerce(cls, key, value):
+ if isinstance(value, dict) and not isinstance(value, MutationDict):
+ return MutationDict.coerce(key, value)
+ if isinstance(value, list) and not isinstance(value, MutationList):
+ return MutationList.coerce(key, value)
+ return value
+
+ @classmethod
+ def _listen_on_attribute(cls, attribute, coerce, parent_cls):
+ key = attribute.key
+ if parent_cls is not attribute.class_:
+ return
+
+ # rely on "propagate" here
+ parent_cls = attribute.class_
+
+ def load(state, *args):
+ val = state.dict.get(key, None)
+ if coerce:
+ val = cls.coerce(key, val)
+ state.dict[key] = val
+ if isinstance(val, cls):
+ val._parents[state.obj()] = key
+
+ def set(target, value, oldvalue, initiator):
+ if not isinstance(value, cls):
+ value = cls.coerce(key, value)
+ if isinstance(value, cls):
+ value._parents[target.obj()] = key
+ if isinstance(oldvalue, cls):
+ oldvalue._parents.pop(target.obj(), None)
+ return value
+
+ def pickle(state, state_dict):
+ val = state.dict.get(key, None)
+ if isinstance(val, cls):
+ if 'ext.mutable.values' not in state_dict:
+ state_dict['ext.mutable.values'] = []
+ state_dict['ext.mutable.values'].append(val)
+
+ def unpickle(state, state_dict):
+ if 'ext.mutable.values' in state_dict:
+ for val in state_dict['ext.mutable.values']:
+ val._parents[state.obj()] = key
+
+ sqlalchemy.event.listen(parent_cls, 'load', load, raw=True,
propagate=True)
+ sqlalchemy.event.listen(parent_cls, 'refresh', load, raw=True,
propagate=True)
+ sqlalchemy.event.listen(attribute, 'set', set, raw=True, retval=True,
propagate=True)
+ sqlalchemy.event.listen(parent_cls, 'pickle', pickle, raw=True,
propagate=True)
+ sqlalchemy.event.listen(parent_cls, 'unpickle', unpickle, raw=True,
propagate=True)
+
+
+class MutationDict(MutationObj, dict):
+ @classmethod
+ def coerce(cls, key, value):
+ """Convert plain dictionary to MutationDict"""
+ self = MutationDict((k, MutationObj.coerce(key, v)) for (k, v) in value.items())
+ self._key = key
+ return self
+
+ def __setitem__(self, key, value):
+ if hasattr(self, '_key'):
+ value = MutationObj.coerce(self._key, value)
+ dict.__setitem__(self, key, value)
+ self.changed()
+
+ def __delitem__(self, key):
+ dict.__delitem__(self, key)
+ self.changed()
+
+ def __getstate__(self):
+ return dict(self)
+
+ def __setstate__(self, state):
+ self.update(state)
+
+
+class MutationList(MutationObj, list):
+ @classmethod
+ def coerce(cls, key, value):
+ """Convert plain list to MutationList"""
+ self = MutationList((MutationObj.coerce(key, v) for v in value))
+ self._key = key
+ return self
+
+ def __setitem__(self, idx, value):
+ list.__setitem__(self, idx, MutationObj.coerce(self._key, value))
+ self.changed()
+
+ def __setslice__(self, start, stop, values):
+ list.__setslice__(self, start, stop, (MutationObj.coerce(self._key, v) for v in
values))
+ self.changed()
+
+ def __delitem__(self, idx):
+ list.__delitem__(self, idx)
+ self.changed()
+
+ def __delslice__(self, start, stop):
+ list.__delslice__(self, start, stop)
+ self.changed()
+
+ def append(self, value):
+ list.append(self, MutationObj.coerce(self._key, value))
+ self.changed()
+
+ def insert(self, idx, value):
+ list.insert(self, idx, MutationObj.coerce(self._key, value))
+ self.changed()
+
+ def extend(self, values):
+ list.extend(self, (MutationObj.coerce(self._key, v) for v in values))
+ self.changed()
+
+ def pop(self, *args, **kw):
+ value = list.pop(self, *args, **kw)
+ self.changed()
+ return value
+
+ def remove(self, value):
+ list.remove(self, value)
+ self.changed()
+
+ def __getstate__(self):
+ return list(self)
+
+ def __setstate__(self, state):
+ self.update(state)
+
+
+MutationObj.associate_with(JSONType)
+
metadata_pickler = AliasPickleModule( {
- ( "cookbook.patterns", "Bunch" ) : (
"galaxy.util.bunch" , "Bunch" )
+ ( "cookbook.patterns", "Bunch" ): (
"galaxy.util.bunch", "Bunch" )
} )
+
class MetadataType( JSONType ):
"""
Backward compatible metadata type. Can read pickles or JSON, but always
@@ -96,6 +230,7 @@
ret = None
return ret
+
class UUIDType(TypeDecorator):
"""
Platform-independent UUID type.
@@ -129,31 +264,9 @@
class TrimmedString( TypeDecorator ):
impl = String
+
def process_bind_param( self, value, dialect ):
"""Automatically truncate string values"""
if self.impl.length and value is not None:
value = value[0:self.impl.length]
return value
-
-
-#class BigInteger( Integer ):
- #"""
- #A type for bigger ``int`` integers.
-
- #Typically generates a ``BIGINT`` in DDL, and otherwise acts like
- #a normal :class:`Integer` on the Python side.
-
- #"""
-
-#class BIGINT( BigInteger ):
- #"""The SQL BIGINT type."""
-
-#class SLBigInteger( BigInteger ):
- #def get_col_spec( self ):
- #return "BIGINT"
-
-#sqlalchemy.dialects.sqlite.SLBigInteger = SLBigInteger
-#sqlalchemy.dialects.sqlite.colspecs[BigInteger] = SLBigInteger
-#sqlalchemy.dialects.sqlite.ischema_names['BIGINT'] = SLBigInteger
-#sqlalchemy.dialects.postgres.colspecs[BigInteger] =
sqlalchemy.dialects.postgres.PGBigInteger
-#sqlalchemy.dialects.mysql.colspecs[BigInteger] = sqlalchemy.dialects.mysql.MSBigInteger
Repository URL:
https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from
bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.