1 new changeset in galaxy-central:
http://bitbucket.org/galaxy/galaxy-central/changeset/62d51750d7df/
changeset: 62d51750d7df
user: natefoo
date: 2011-08-29 15:32:26
summary: It turns out that our old version of SQLAlchemy already has the Postgres/MySQL BIGINT types, it just needed the BigInteger SQLAlchemy type and SQLite BIGINT. And fix a bug found by Jack Zhu.
affected #: 1 file (946 bytes)
--- a/lib/galaxy/model/custom_types.py Sun Aug 28 14:07:33 2011 -0400
+++ b/lib/galaxy/model/custom_types.py Mon Aug 29 09:32:26 2011 -0400
@@ -105,40 +105,12 @@
class BIGINT( BigInteger ):
"""The SQL BIGINT type."""
-class DBBigInteger( BigInteger ):
+class SLBigInteger( BigInteger ):
def get_col_spec( self ):
return "BIGINT"
-sqlalchemy.databases.postgres.PGBigInteger = DBBigInteger
-sqlalchemy.databases.postgres.colspecs[BigInteger] = DBBigInteger
-sqlalchemy.databases.sqlite.SLBigInteger = DBBigInteger
-sqlalchemy.databases.sqlite.colspecs[BigInteger] = DBBigInteger
-
-class MSBigInteger( BigInteger, sqlalchemy.databases.mysql.MSInteger ):
- """MySQL BIGINTEGER type."""
-
- def __init__(self, display_width=None, **kw):
- """Construct a BIGINTEGER.
-
- :param display_width: Optional, maximum display width for this number.
-
- :param unsigned: a boolean, optional.
-
- :param zerofill: Optional. If true, values will be stored as strings
- left-padded with zeros. Note that this does not effect the values
- returned by the underlying database API, which continue to be
- numeric.
-
- """
- self.display_width = display_width
- sqlalchemy.databases.mysql._NumericType.__init__(self, kw)
- BigInteger.__init__(self, **kw)
-
- def get_col_spec(self):
- if self.display_width is not None:
- return self._extend("BIGINT(%(display_width)s)" % {'display_width': self.display_width})
- else:
- return self._extend("BIGINT")
-
-sqlalchemy.databases.mysql.MSBigInteger = MSBigInteger
-sqlalchemy.databases.mysql.colspecs[BigInteger] = MSBigInteger
+sqlalchemy.databases.sqlite.SLBigInteger = SLBigInteger
+sqlalchemy.databases.sqlite.colspecs[BigInteger] = SLBigInteger
+sqlalchemy.databases.sqlite.ischema_names['BIGINT'] = SLBigInteger
+sqlalchemy.databases.postgres.colspecs[BigInteger] = sqlalchemy.databases.postgres.PGBigInteger
+sqlalchemy.databases.mysql.colspecs[BigInteger] = sqlalchemy.databases.mysql.MSBigInteger
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new changeset in galaxy-central:
http://bitbucket.org/galaxy/galaxy-central/changeset/434f99cdfd96/
changeset: 434f99cdfd96
user: jgoecks
date: 2011-08-28 20:07:33
summary: Trackster: add left_offset attribute to LineTrack so that line tracks can be set in overview.
affected #: 1 file (26 bytes)
--- a/static/scripts/trackster.js Fri Aug 26 17:45:50 2011 -0400
+++ b/static/scripts/trackster.js Sun Aug 28 14:07:33 2011 -0400
@@ -2598,6 +2598,7 @@
this.original_dataset_id = dataset_id;
this.data_manager = new DataManager(CACHED_DATA, this);
this.tile_cache = new Cache(CACHED_TILES_LINE);
+ this.left_offset = 0;
// Define track configuration
this.track_config = new TrackConfig( {
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new changeset in galaxy-central:
http://bitbucket.org/galaxy/galaxy-central/changeset/ef955e304925/
changeset: ef955e304925
user: greg
date: 2011-08-26 22:04:53
summary: Eliminate the code for the long-unused tool shed datatypes.
affected #: 3 files (247 bytes)
--- a/lib/galaxy/webapps/community/app.py Fri Aug 26 15:59:29 2011 -0400
+++ b/lib/galaxy/webapps/community/app.py Fri Aug 26 16:04:53 2011 -0400
@@ -13,8 +13,6 @@
self.config = config.Configuration( **kwargs )
self.config.check()
config.configure_logging( self.config )
- # Set up datatypes registry
- self.datatypes_registry = galaxy.datatypes.registry.Registry( self.config.root, self.config.datatypes_config )
# Determine the database url
if self.config.database_connection:
db_url = self.config.database_connection
--- a/lib/galaxy/webapps/community/config.py Fri Aug 26 15:59:29 2011 -0400
+++ b/lib/galaxy/webapps/community/config.py Fri Aug 26 16:04:53 2011 -0400
@@ -70,7 +70,6 @@
self.screencasts_url = kwargs.get( 'screencasts_url', None )
self.log_events = False
self.cloud_controller_instance = False
- self.datatypes_config = kwargs.get( 'datatypes_config_file', 'datatypes_conf.xml' )
# Proxy features
self.apache_xsendfile = kwargs.get( 'apache_xsendfile', False )
self.nginx_x_accel_redirect_base = kwargs.get( 'nginx_x_accel_redirect_base', False )
--- a/lib/galaxy/webapps/community/datatypes/__init__.py Fri Aug 26 15:59:29 2011 -0400
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,196 +0,0 @@
-import sys, logging, tarfile
-from galaxy.util import parse_xml
-from galaxy.util.bunch import Bunch
-
-log = logging.getLogger( __name__ )
-
-if sys.version_info[:2] == ( 2, 4 ):
- from galaxy import eggs
- eggs.require( 'ElementTree' )
- from elementtree import ElementTree
-else:
- from xml.etree import ElementTree
-
-class DatatypeVerificationError( Exception ):
- pass
-
-class Registry( object ):
- def __init__( self, root_dir=None, config=None ):
- self.datatypes_by_extension = {}
- if root_dir and config:
- # Parse datatypes_conf.xml
- tree = parse_xml( config )
- root = tree.getroot()
- # Load datatypes and converters from config
- log.debug( 'Loading datatypes from %s' % config )
- registration = root.find( 'registration' )
- for elem in registration.findall( 'datatype' ):
- try:
- extension = elem.get( 'extension', None )
- dtype = elem.get( 'type', None )
- model_object = elem.get( 'model', None )
- if extension and dtype:
- fields = dtype.split( ':' )
- datatype_module = fields[0]
- datatype_class = fields[1]
- fields = datatype_module.split( '.' )
- module = __import__( fields.pop(0) )
- for mod in fields:
- module = getattr( module, mod )
- self.datatypes_by_extension[extension] = getattr( module, datatype_class )()
- log.debug( 'Loaded datatype: %s' % dtype )
- if model_object:
- model_module, model_class = model_object.split( ':' )
- fields = model_module.split( '.' )
- module = __import__( fields.pop(0) )
- for mod in fields:
- module = getattr( module, mod )
- self.datatypes_by_extension[extension].model_object = getattr( module, model_class )
- log.debug( 'Added model class: %s to datatype: %s' % ( model_class, dtype ) )
- except Exception, e:
- log.warning( 'Error loading datatype "%s", problem: %s' % ( extension, str( e ) ) )
- def get_datatype_by_extension( self, ext ):
- return self.datatypes_by_extension.get( ext, None )
- def get_datatype_extensions( self ):
- rval = []
- for ext, datatype in self.datatypes_by_extension.items():
- rval.append( ext )
- return rval
-
-class Tool( object ):
- def __init__( self, model_object=None ):
- self.model_object = model_object
- self.label = 'Tool'
- def verify( self, f, xml_files=[], tool_tags={} ):
- # xml_files and tool_tags will only be received if we're called from the ToolSuite.verify() method.
- try:
- tar = tarfile.open( f.name )
- except tarfile.ReadError, e:
- raise DatatypeVerificationError( 'Error reading the archive, problem: %s' % str( e ) )
- if not xml_files:
- # Make sure we're not uploading a tool suite
- if filter( lambda x: x.lower().find( 'suite_config.xml' ) >= 0, tar.getnames() ):
- raise DatatypeVerificationError( 'The archive includes a suite_config.xml file, so set the upload type to "Tool Suite".' )
- xml_files = filter( lambda x: x.lower().endswith( '.xml' ), tar.getnames() )
- if not xml_files:
- raise DatatypeVerificationError( 'The archive does not contain any xml config files.' )
- for xml_file in xml_files:
- try:
- tree = ElementTree.parse( tar.extractfile( xml_file ) )
- root = tree.getroot()
- except Exception, e:
- raise DatatypeVerificationError( 'Error parsing file "%s", problem: %s' % ( str( xml_file ), str( e ) ) )
- if root.tag == 'tool':
- if 'id' not in root.keys():
- raise DatatypeVerificationError( "Tool xml file (%s) does not include the required 'id' attribute in the <tool> tag" % str( xml_file ) )
- if 'name' not in root.keys():
- raise DatatypeVerificationError( "Tool xml file (%s) does not include the required 'name' attribute in the <tool> tag" % str( xml_file ) )
- if 'version' not in root.keys():
- raise DatatypeVerificationError( "Tool xml file (%s) does not include the required 'version' attribute in the <tool> tag" % str( xml_file ) )
- if tool_tags:
- # We are verifying the tools inside a tool suite, so the current tag should have been found in the suite_config.xml
- # file parsed in the ToolSuite verify() method. The tool_tags dictionary should include a key matching the current
- # tool Id, and a tuple value matching the tool name and version.
- if root.attrib[ 'id' ] not in tool_tags:
- raise DatatypeVerificationError( 'Tool Id (%s) is not included in the suite_config.xml file.' % \
- ( str( root.attrib[ 'id' ] ) ) )
- tup = tool_tags[ root.attrib[ 'id' ] ]
- if root.attrib[ 'name' ] != tup[ 0 ]:
- raise DatatypeVerificationError( 'Tool name (%s) differs between suite_config.xml and the tool config file for tool Id (%s).' % \
- ( str( root.attrib[ 'name' ] ), str( root.attrib[ 'id' ] ) ) )
- if root.attrib[ 'version' ] != tup[ 1 ]:
- raise DatatypeVerificationError( 'Tool version (%s) differs between suite_config.xml and the tool config file for tool Id (%s).' % \
- ( str( root.attrib[ 'version' ] ), str( root.attrib[ 'id' ] ) ) )
- else:
- # We are not verifying a tool suite, so we'll create a bunch for returning to the caller.
- tool_bunch = Bunch()
- try:
- tool_bunch.id = root.attrib['id']
- tool_bunch.name = root.attrib['name']
- tool_bunch.version = root.attrib['version']
- except KeyError, e:
- raise DatatypeVerificationError( 'Tool XML file does not conform to the specification. Missing required <tool> tag attribute: %s' % str( e ) )
- tool_bunch.description = ''
- desc_tag = root.find( 'description' )
- if desc_tag is not None:
- description = desc_tag.text
- if description:
- tool_bunch.description = description.strip()
- tool_bunch.message = 'Tool: %s %s, Version: %s, Id: %s' % \
- ( str( tool_bunch.name ), str( tool_bunch.description ), str( tool_bunch.version ), str( tool_bunch.id ) )
- return tool_bunch
- else:
- # TODO: should we verify files that are not tool configs?
- log.debug( "The file named (%s) is not a tool config, so skipping verification." % str( xml_file ) )
- def create_model_object( self, datatype_bunch ):
- if self.model_object is None:
- raise Exception( 'No model object configured for %s, check the datatype configuration file' % self.__class__.__name__ )
- if datatype_bunch is None:
- # TODO: do it automatically
- raise Exception( 'Unable to create %s model object without passing in data' % self.__class__.__name__ )
- o = self.model_object()
- o.create_from_datatype( datatype_bunch )
- return o
-
-class ToolSuite( Tool ):
- def __init__( self, model_object=None ):
- self.model_object = model_object
- self.label = 'Tool Suite'
- def verify( self, f ):
- """
- A sample tool suite config:
- <suite id="onto_toolkit" name="ONTO Toolkit" version="1.0">
- <description>ONTO-Toolkit is a collection of Galaxy tools which support the manipulation of bio-ontologies.</description>
- <tool id="get_ancestor_terms" name="Get the ancestor terms of a given OBO term" version="1.0.0">
- <description>Collects the ancestor terms from a given term in the given OBO ontology</description>
- </tool>
- <tool id="get_child_terms" name="Get the child terms of a given OBO term" version="1.0.0">
- <description>Collects the child terms from a given term in the given OBO ontology</description>
- </tool>
- </suite>
- """
- try:
- tar = tarfile.open( f.name )
- except tarfile.ReadError:
- raise DatatypeVerificationError( 'The archive is not a readable tar file.' )
- suite_config = filter( lambda x: x.lower().find( 'suite_config.xml' ) >=0, tar.getnames() )
- if not suite_config:
- raise DatatypeVerificationError( 'The archive does not contain the required suite_config.xml config file. If you are uploading a single tool archive, set the upload type to "Tool".' )
- suite_config = suite_config[ 0 ]
- # Parse and verify suite_config
- archive_ok = False
- try:
- tree = ElementTree.parse( tar.extractfile( suite_config ) )
- root = tree.getroot()
- archive_ok = True
- except:
- log.exception( 'fail:' )
- if archive_ok and root.tag == 'suite':
- suite_bunch = Bunch()
- try:
- suite_bunch.id = root.attrib['id']
- suite_bunch.name = root.attrib['name']
- suite_bunch.version = root.attrib['version']
- except KeyError, e:
- raise DatatypeVerificationError( 'The file named tool-suite.xml does not conform to the specification. Missing required <suite> tag attribute: %s' % str( e ) )
- suite_bunch.description = ''
- desc_tag = root.find( 'description' )
- if desc_tag is not None:
- description = desc_tag.text
- if description:
- suite_bunch.description = description.strip()
- suite_bunch.message = 'Tool suite: %s %s, Version: %s, Id: %s' % \
- ( str( suite_bunch.name ), str( suite_bunch.description ), str( suite_bunch.version ), str( suite_bunch.id ) )
- # Create a dictionary of the tools in the suite where the keys are tool_ids and the
- # values are tuples of tool name and version
- tool_tags = {}
- for elem in root.findall( 'tool' ):
- tool_tags[ elem.attrib['id'] ] = ( elem.attrib['name'], elem.attrib['version'] )
- else:
- raise DatatypeVerificationError( "The file named %s is not a valid tool suite config." % str( suite_config ) )
- # Verify all included tool config files
- xml_files = filter( lambda x: x.lower().endswith( '.xml' ) and x.lower() != 'suite_config.xml', tar.getnames() )
- if not xml_files:
- raise DatatypeVerificationError( 'The archive does not contain any tool config (xml) files.' )
- Tool.verify( self, f, xml_files=xml_files, tool_tags=tool_tags )
- return suite_bunch
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new changeset in galaxy-central:
http://bitbucket.org/galaxy/galaxy-central/changeset/4b3fefa396cf/
changeset: 4b3fefa396cf
user: natefoo
date: 2011-08-26 21:59:29
summary: Fix uploading sorted BAMs to libraries when using the link method rather than copying.
affected #: 1 file (36 bytes)
--- a/tools/data_source/upload.py Fri Aug 26 15:53:16 2011 -0400
+++ b/tools/data_source/upload.py Fri Aug 26 15:59:29 2011 -0400
@@ -297,7 +297,7 @@
datatype = registry.get_datatype_by_extension( ext )
if dataset.type in ( 'server_dir', 'path_paste' ) and link_data_only == 'link_to_files':
# Never alter a file that will not be copied to Galaxy's local file store.
- if datatype.dataset_content_needs_grooming( output_path ):
+ if datatype.dataset_content_needs_grooming( dataset.path ):
err_msg = 'The uploaded files need grooming, so change your <b>Copy data into Galaxy?</b> selection to be ' + \
'<b>Copy files into Galaxy</b> instead of <b>Link to files without copying into Galaxy</b> so grooming can be performed.'
file_err( err_msg, dataset, json_file )
@@ -324,7 +324,7 @@
name = dataset.name,
line_count = line_count )
json_file.write( to_json_string( info ) + "\n" )
- if datatype.dataset_content_needs_grooming( output_path ):
+ if link_data_only == 'copy_files' and datatype.dataset_content_needs_grooming( output_path ):
# Groom the dataset content if necessary
datatype.groom_dataset_content( output_path )
def add_composite_file( dataset, registry, json_file, output_path, files_path ):
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.