1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/d62fd7469c62/
Changeset: d62fd7469c62
Branch: next-stable
User: dannon
Date: 2014-05-19 23:42:05
Summary: Fix (kind of a guess as to the intended functionality) input_dbkey error.
Affected #: 1 file
diff -r e7bcba2bec53e16c8e11bb136da9f8aad9e430ba -r d62fd7469c626064d8fe9792ea4584c57b5ed431 lib/galaxy/util/dbkeys.py
--- a/lib/galaxy/util/dbkeys.py
+++ b/lib/galaxy/util/dbkeys.py
@@ -55,9 +55,9 @@
chrom_info = db_dataset.file_name
else:
# -- Get chrom_info (len file) from either a custom or built-in build. --
- if trans.user and ( 'dbkeys' in trans.user.preferences ) and ( input_dbkey in from_json_string( trans.user.preferences[ 'dbkeys' ] ) ):
+ if trans.user and ( 'dbkeys' in trans.user.preferences ) and ( dbkey in from_json_string( trans.user.preferences[ 'dbkeys' ] ) ):
# Custom build.
- custom_build_dict = from_json_string( trans.user.preferences[ 'dbkeys' ] )[ input_dbkey ]
+ custom_build_dict = from_json_string( trans.user.preferences[ 'dbkeys' ] )[ dbkey ]
# HACK: the attempt to get chrom_info below will trigger the
# fasta-to-len converter if the dataset is not available or,
# which will in turn create a recursive loop when
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/35afd36f9014/
Changeset: 35afd36f9014
User: dannon
Date: 2014-05-19 22:56:28
Summary: Merge from next-stable.
Affected #: 7 files
diff -r 174b6a2281e53793a8188ba2b2792d0dd220d362 -r 35afd36f9014915c4c82f65b1c52a6636f48328e lib/galaxy/app.py
--- a/lib/galaxy/app.py
+++ b/lib/galaxy/app.py
@@ -18,6 +18,7 @@
from galaxy.tools.data_manager.manager import DataManagers
from galaxy.jobs import metrics as job_metrics
from galaxy.web.base import pluginframework
+from galaxy.queue_worker import GalaxyQueueWorker
import logging
log = logging.getLogger( __name__ )
@@ -146,6 +147,10 @@
# Initialize the external service types
self.external_service_types = external_service_types.ExternalServiceTypesCollection( self.config.external_service_type_config_file, self.config.external_service_type_path, self )
self.model.engine.dispose()
+ self.control_worker = GalaxyQueueWorker(self,
+ galaxy.queues.control_queue_from_config(self.config),
+ galaxy.queue_worker.control_message_to_task)
+ self.control_worker.start()
def shutdown( self ):
self.job_manager.shutdown()
diff -r 174b6a2281e53793a8188ba2b2792d0dd220d362 -r 35afd36f9014915c4c82f65b1c52a6636f48328e lib/galaxy/config.py
--- a/lib/galaxy/config.py
+++ b/lib/galaxy/config.py
@@ -340,6 +340,17 @@
amqp_config = {}
for k, v in amqp_config:
self.amqp[k] = v
+ # Galaxy internal control queue configuration.
+ # If specified in universe, use it, otherwise we use whatever 'real'
+ # database is specified. Lastly, we create and use new sqlite database
+ # (to minimize locking) as a final option.
+ if 'amqp_internal_connection' in kwargs:
+ self.amqp_internal_connection = kwargs.get('amqp_internal_connection')
+ # TODO Get extra amqp args as necessary for ssl
+ elif 'database_connection' in kwargs:
+ self.amqp_internal_connection = "sqlalchemy+"+self.database_connection
+ else:
+ self.amqp_internal_connection = "sqlalchemy+sqlite:///%s?isolation_level=IMMEDIATE" % resolve_path( "database/control.sqlite", self.root )
self.biostar_url = kwargs.get( 'biostar_url', None )
self.biostar_key_name = kwargs.get( 'biostar_key_name', None )
self.biostar_key = kwargs.get( 'biostar_key', None )
diff -r 174b6a2281e53793a8188ba2b2792d0dd220d362 -r 35afd36f9014915c4c82f65b1c52a6636f48328e lib/galaxy/queue_worker.py
--- /dev/null
+++ b/lib/galaxy/queue_worker.py
@@ -0,0 +1,94 @@
+"""
+Galaxy control queue and worker. This is used to handle 'app' control like
+reloading the toolbox, etc., across multiple processes.
+"""
+
+import logging
+import threading
+import galaxy.queues
+from galaxy import eggs, util
+eggs.require('kombu')
+
+from kombu import Connection
+from kombu.mixins import ConsumerMixin
+from kombu.pools import producers
+
+
+log = logging.getLogger(__name__)
+
+
+class GalaxyQueueWorker(ConsumerMixin, threading.Thread):
+ """
+ This is a flexible worker for galaxy's queues. Each process, web or
+ handler, will have one of these used for dispatching so called 'control'
+ tasks.
+ """
+ def __init__(self, app, queue, task_mapping):
+ super(GalaxyQueueWorker, self).__init__()
+ log.info("Initalizing Galaxy Queue Worker on %s" % app.config.amqp_internal_connection)
+ self.connection = Connection(app.config.amqp_internal_connection)
+ self.app = app
+ # Eventually we may want different workers w/ their own queues and task
+ # mappings. Right now, there's only the one.
+ self.control_queue = queue
+ self.task_mapping = task_mapping
+ self.declare_queues = galaxy.queues.all_control_queues_for_declare(app.config)
+ # TODO we may want to purge the queue at the start to avoid executing
+ # stale 'reload_tool', etc messages. This can happen if, say, a web
+ # process goes down and messages get sent before it comes back up.
+ # Those messages will no longer be useful (in any current case)
+
+ def get_consumers(self, Consumer, channel):
+ return [Consumer(queues=self.control_queue,
+ callbacks=[self.process_task])]
+
+ def process_task(self, body, message):
+ if body['task'] in self.task_mapping:
+ if body.get('noop', None) != self.app.config.server_name:
+ try:
+ f = self.task_mapping[body['task']]
+ log.debug("Instance recieved '%s' task, executing now." % body['task'])
+ f(self.app, **body['kwargs'])
+ except Exception:
+ # this shouldn't ever throw an exception, but...
+ log.exception("Error running control task type: %s" % body['task'])
+ else:
+ log.warning("Recieved a malformed task message:\n%s" % body)
+ message.ack()
+
+
+def send_control_task(trans, task, noop_self=False, kwargs={}):
+ log.info("Sending %s control task." % task)
+ payload = {'task': task,
+ 'kwargs': kwargs}
+ if noop_self:
+ payload['noop'] = trans.app.config.server_name
+ c = Connection(trans.app.config.amqp_internal_connection)
+ with producers[c].acquire(block=True) as producer:
+ producer.publish(payload, exchange=galaxy.queues.galaxy_exchange,
+ declare=[galaxy.queues.galaxy_exchange] + galaxy.queues.all_control_queues_for_declare(trans.app.config),
+ routing_key='control')
+
+
+# Tasks -- to be reorganized into a separate module as appropriate. This is
+# just an example method. Ideally this gets pushed into atomic tasks, whether
+# where they're currently invoked, or elsewhere. (potentially using a dispatch
+# decorator).
+def reload_tool(app, **kwargs):
+ params = util.Params(kwargs)
+ tool_id = params.get('tool_id', None)
+ log.debug("Executing reload tool task for %s" % tool_id)
+ if tool_id:
+ app.toolbox.reload_tool_by_id( tool_id )
+ else:
+ log.error("Reload tool invoked without tool id.")
+
+
+def reload_tool_data_tables(app, **kwargs):
+ params = util.Params(kwargs)
+ log.debug("Executing tool data table reload for %s" % params.get('table_names', 'all tables'))
+ table_names = app.tool_data_tables.reload_tables( table_names=params.get('table_name', None))
+ log.debug("Finished data table reload for %s" % table_names)
+
+control_message_to_task = { 'reload_tool': reload_tool,
+ 'reload_tool_data_tables': reload_tool_data_tables}
diff -r 174b6a2281e53793a8188ba2b2792d0dd220d362 -r 35afd36f9014915c4c82f65b1c52a6636f48328e lib/galaxy/queues.py
--- /dev/null
+++ b/lib/galaxy/queues.py
@@ -0,0 +1,32 @@
+"""
+
+All message queues used by Galaxy
+
+"""
+
+from galaxy import eggs
+
+eggs.require("kombu")
+from kombu import Exchange, Queue
+
+ALL_CONTROL = "control.*"
+galaxy_exchange = Exchange('galaxy_core_exchange', type='topic')
+
+
+def all_control_queues_for_declare(config):
+ """
+ For in-memory routing (used by sqlalchemy-based transports), we need to be able to
+ build the entire routing table in producers.
+
+ Refactor later to actually persist this somewhere instead of building it repeatedly.
+ """
+ return [Queue('control.%s' % q, galaxy_exchange, routing_key='control') for q in config.server_names]
+
+
+def control_queue_from_config(config):
+ """
+ Returns a Queue instance with the correct name and routing key for this
+ galaxy process's config
+ """
+ return Queue("control.%s" % config.server_name, galaxy_exchange,
+ routing_key='control')
diff -r 174b6a2281e53793a8188ba2b2792d0dd220d362 -r 35afd36f9014915c4c82f65b1c52a6636f48328e lib/galaxy/web/base/controllers/admin.py
--- a/lib/galaxy/web/base/controllers/admin.py
+++ b/lib/galaxy/web/base/controllers/admin.py
@@ -5,6 +5,7 @@
from galaxy.util import inflector
from galaxy.web.form_builder import CheckboxField
from string import punctuation as PUNCTUATION
+import galaxy.queue_worker
log = logging.getLogger( __name__ )
@@ -62,8 +63,9 @@
toolbox = self.app.toolbox
tool_id = None
if params.get( 'reload_tool_button', False ):
- tool_id = params.tool_id
- message, status = toolbox.reload_tool_by_id( tool_id )
+ tool_id = params.get('tool_id', None)
+ galaxy.queue_worker.send_control_task(trans, 'reload_tool', noop_self=True, kwargs={'tool_id': tool_id} )
+ message, status = trans.app.toolbox.reload_tool_by_id( tool_id)
return trans.fill_template( '/admin/reload_tool.mako',
tool_id=tool_id,
toolbox=toolbox,
diff -r 174b6a2281e53793a8188ba2b2792d0dd220d362 -r 35afd36f9014915c4c82f65b1c52a6636f48328e lib/galaxy/webapps/galaxy/controllers/data_manager.py
--- a/lib/galaxy/webapps/galaxy/controllers/data_manager.py
+++ b/lib/galaxy/webapps/galaxy/controllers/data_manager.py
@@ -1,6 +1,7 @@
+import galaxy.queue_worker
from galaxy import web
+from galaxy.util.json import from_json_string
from galaxy.web.base.controller import BaseUIController
-from galaxy.util.json import from_json_string
import pkg_resources;
pkg_resources.require( "Paste" )
@@ -90,6 +91,9 @@
table_name = table_name.split( "," )
# Reload the tool data tables
table_names = self.app.tool_data_tables.reload_tables( table_names=table_name )
+ galaxy.queue_worker.send_control_task(trans, 'reload_tool_data_tables',
+ noop_self=True,
+ kwargs={'table_name': table_name} )
redirect_url = None
if table_names:
status = 'done'
diff -r 174b6a2281e53793a8188ba2b2792d0dd220d362 -r 35afd36f9014915c4c82f65b1c52a6636f48328e universe_wsgi.ini.sample
--- a/universe_wsgi.ini.sample
+++ b/universe_wsgi.ini.sample
@@ -821,7 +821,21 @@
#user_tool_section_filters = examples:restrict_text
#user_tool_label_filters = examples:restrict_upload_to_admins, examples:restrict_encode
-# ---- Galaxy Message Queue -------------------------------------------------
+# Galaxy Application Internal Message Queue
+
+# Galaxy uses AMQP internally TODO more documentation on what for.
+# For examples, see http://ask.github.io/kombu/userguide/connections.html
+#
+# Without specifying anything here, galaxy will first attempt to use your
+# specified database_connection above. If that's not specified either, Galaxy
+# will automatically create and use a separate sqlite database located in your
+# <galaxy>/database folder (indicated in the commented out line below).
+
+#amqp_internal_connection = "sqlite:///./database/control.sqlite?isolation_level=IMMEDIATE"
+
+
+
+# ---- Galaxy External Message Queue -------------------------------------------------
# Galaxy uses AMQ protocol to receive messages from external sources like
# bar code scanners. Galaxy has been tested against RabbitMQ AMQP implementation.
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/c822084e6e54/
Changeset: c822084e6e54
Branch: next-stable
User: dannon
Date: 2014-05-19 22:56:08
Summary: Initial internal control message queue for IPC implementation. Needs significant refactoring, as noted in some TODOs, but this is a minimally functional version so it makes the next-stable cutoff. Currently only handles reloading tools and data tables, but adding additional tasks is simple and should be straightforward.
Affected #: 7 files
diff -r da128dd1ab03d905da0cf8dafec70443267cff64 -r c822084e6e5407c6ac80f999fa313808049e58f7 lib/galaxy/app.py
--- a/lib/galaxy/app.py
+++ b/lib/galaxy/app.py
@@ -18,6 +18,7 @@
from galaxy.tools.data_manager.manager import DataManagers
from galaxy.jobs import metrics as job_metrics
from galaxy.web.base import pluginframework
+from galaxy.queue_worker import GalaxyQueueWorker
import logging
log = logging.getLogger( __name__ )
@@ -146,6 +147,10 @@
# Initialize the external service types
self.external_service_types = external_service_types.ExternalServiceTypesCollection( self.config.external_service_type_config_file, self.config.external_service_type_path, self )
self.model.engine.dispose()
+ self.control_worker = GalaxyQueueWorker(self,
+ galaxy.queues.control_queue_from_config(self.config),
+ galaxy.queue_worker.control_message_to_task)
+ self.control_worker.start()
def shutdown( self ):
self.job_manager.shutdown()
diff -r da128dd1ab03d905da0cf8dafec70443267cff64 -r c822084e6e5407c6ac80f999fa313808049e58f7 lib/galaxy/config.py
--- a/lib/galaxy/config.py
+++ b/lib/galaxy/config.py
@@ -340,6 +340,17 @@
amqp_config = {}
for k, v in amqp_config:
self.amqp[k] = v
+ # Galaxy internal control queue configuration.
+ # If specified in universe, use it, otherwise we use whatever 'real'
+ # database is specified. Lastly, we create and use new sqlite database
+ # (to minimize locking) as a final option.
+ if 'amqp_internal_connection' in kwargs:
+ self.amqp_internal_connection = kwargs.get('amqp_internal_connection')
+ # TODO Get extra amqp args as necessary for ssl
+ elif 'database_connection' in kwargs:
+ self.amqp_internal_connection = "sqlalchemy+"+self.database_connection
+ else:
+ self.amqp_internal_connection = "sqlalchemy+sqlite:///%s?isolation_level=IMMEDIATE" % resolve_path( "database/control.sqlite", self.root )
self.biostar_url = kwargs.get( 'biostar_url', None )
self.biostar_key_name = kwargs.get( 'biostar_key_name', None )
self.biostar_key = kwargs.get( 'biostar_key', None )
diff -r da128dd1ab03d905da0cf8dafec70443267cff64 -r c822084e6e5407c6ac80f999fa313808049e58f7 lib/galaxy/queue_worker.py
--- /dev/null
+++ b/lib/galaxy/queue_worker.py
@@ -0,0 +1,94 @@
+"""
+Galaxy control queue and worker. This is used to handle 'app' control like
+reloading the toolbox, etc., across multiple processes.
+"""
+
+import logging
+import threading
+import galaxy.queues
+from galaxy import eggs, util
+eggs.require('kombu')
+
+from kombu import Connection
+from kombu.mixins import ConsumerMixin
+from kombu.pools import producers
+
+
+log = logging.getLogger(__name__)
+
+
+class GalaxyQueueWorker(ConsumerMixin, threading.Thread):
+ """
+ This is a flexible worker for galaxy's queues. Each process, web or
+ handler, will have one of these used for dispatching so called 'control'
+ tasks.
+ """
+ def __init__(self, app, queue, task_mapping):
+ super(GalaxyQueueWorker, self).__init__()
+ log.info("Initalizing Galaxy Queue Worker on %s" % app.config.amqp_internal_connection)
+ self.connection = Connection(app.config.amqp_internal_connection)
+ self.app = app
+ # Eventually we may want different workers w/ their own queues and task
+ # mappings. Right now, there's only the one.
+ self.control_queue = queue
+ self.task_mapping = task_mapping
+ self.declare_queues = galaxy.queues.all_control_queues_for_declare(app.config)
+ # TODO we may want to purge the queue at the start to avoid executing
+ # stale 'reload_tool', etc messages. This can happen if, say, a web
+ # process goes down and messages get sent before it comes back up.
+ # Those messages will no longer be useful (in any current case)
+
+ def get_consumers(self, Consumer, channel):
+ return [Consumer(queues=self.control_queue,
+ callbacks=[self.process_task])]
+
+ def process_task(self, body, message):
+ if body['task'] in self.task_mapping:
+ if body.get('noop', None) != self.app.config.server_name:
+ try:
+ f = self.task_mapping[body['task']]
+ log.debug("Instance recieved '%s' task, executing now." % body['task'])
+ f(self.app, **body['kwargs'])
+ except Exception:
+ # this shouldn't ever throw an exception, but...
+ log.exception("Error running control task type: %s" % body['task'])
+ else:
+ log.warning("Recieved a malformed task message:\n%s" % body)
+ message.ack()
+
+
+def send_control_task(trans, task, noop_self=False, kwargs={}):
+ log.info("Sending %s control task." % task)
+ payload = {'task': task,
+ 'kwargs': kwargs}
+ if noop_self:
+ payload['noop'] = trans.app.config.server_name
+ c = Connection(trans.app.config.amqp_internal_connection)
+ with producers[c].acquire(block=True) as producer:
+ producer.publish(payload, exchange=galaxy.queues.galaxy_exchange,
+ declare=[galaxy.queues.galaxy_exchange] + galaxy.queues.all_control_queues_for_declare(trans.app.config),
+ routing_key='control')
+
+
+# Tasks -- to be reorganized into a separate module as appropriate. This is
+# just an example method. Ideally this gets pushed into atomic tasks, whether
+# where they're currently invoked, or elsewhere. (potentially using a dispatch
+# decorator).
+def reload_tool(app, **kwargs):
+ params = util.Params(kwargs)
+ tool_id = params.get('tool_id', None)
+ log.debug("Executing reload tool task for %s" % tool_id)
+ if tool_id:
+ app.toolbox.reload_tool_by_id( tool_id )
+ else:
+ log.error("Reload tool invoked without tool id.")
+
+
+def reload_tool_data_tables(app, **kwargs):
+ params = util.Params(kwargs)
+ log.debug("Executing tool data table reload for %s" % params.get('table_names', 'all tables'))
+ table_names = app.tool_data_tables.reload_tables( table_names=params.get('table_name', None))
+ log.debug("Finished data table reload for %s" % table_names)
+
+control_message_to_task = { 'reload_tool': reload_tool,
+ 'reload_tool_data_tables': reload_tool_data_tables}
diff -r da128dd1ab03d905da0cf8dafec70443267cff64 -r c822084e6e5407c6ac80f999fa313808049e58f7 lib/galaxy/queues.py
--- /dev/null
+++ b/lib/galaxy/queues.py
@@ -0,0 +1,32 @@
+"""
+
+All message queues used by Galaxy
+
+"""
+
+from galaxy import eggs
+
+eggs.require("kombu")
+from kombu import Exchange, Queue
+
+ALL_CONTROL = "control.*"
+galaxy_exchange = Exchange('galaxy_core_exchange', type='topic')
+
+
+def all_control_queues_for_declare(config):
+ """
+ For in-memory routing (used by sqlalchemy-based transports), we need to be able to
+ build the entire routing table in producers.
+
+ Refactor later to actually persist this somewhere instead of building it repeatedly.
+ """
+ return [Queue('control.%s' % q, galaxy_exchange, routing_key='control') for q in config.server_names]
+
+
+def control_queue_from_config(config):
+ """
+ Returns a Queue instance with the correct name and routing key for this
+ galaxy process's config
+ """
+ return Queue("control.%s" % config.server_name, galaxy_exchange,
+ routing_key='control')
diff -r da128dd1ab03d905da0cf8dafec70443267cff64 -r c822084e6e5407c6ac80f999fa313808049e58f7 lib/galaxy/web/base/controllers/admin.py
--- a/lib/galaxy/web/base/controllers/admin.py
+++ b/lib/galaxy/web/base/controllers/admin.py
@@ -5,6 +5,7 @@
from galaxy.util import inflector
from galaxy.web.form_builder import CheckboxField
from string import punctuation as PUNCTUATION
+import galaxy.queue_worker
log = logging.getLogger( __name__ )
@@ -62,8 +63,9 @@
toolbox = self.app.toolbox
tool_id = None
if params.get( 'reload_tool_button', False ):
- tool_id = params.tool_id
- message, status = toolbox.reload_tool_by_id( tool_id )
+ tool_id = params.get('tool_id', None)
+ galaxy.queue_worker.send_control_task(trans, 'reload_tool', noop_self=True, kwargs={'tool_id': tool_id} )
+ message, status = trans.app.toolbox.reload_tool_by_id( tool_id)
return trans.fill_template( '/admin/reload_tool.mako',
tool_id=tool_id,
toolbox=toolbox,
diff -r da128dd1ab03d905da0cf8dafec70443267cff64 -r c822084e6e5407c6ac80f999fa313808049e58f7 lib/galaxy/webapps/galaxy/controllers/data_manager.py
--- a/lib/galaxy/webapps/galaxy/controllers/data_manager.py
+++ b/lib/galaxy/webapps/galaxy/controllers/data_manager.py
@@ -1,6 +1,7 @@
+import galaxy.queue_worker
from galaxy import web
+from galaxy.util.json import from_json_string
from galaxy.web.base.controller import BaseUIController
-from galaxy.util.json import from_json_string
import pkg_resources;
pkg_resources.require( "Paste" )
@@ -90,6 +91,9 @@
table_name = table_name.split( "," )
# Reload the tool data tables
table_names = self.app.tool_data_tables.reload_tables( table_names=table_name )
+ galaxy.queue_worker.send_control_task(trans, 'reload_tool_data_tables',
+ noop_self=True,
+ kwargs={'table_name': table_name} )
redirect_url = None
if table_names:
status = 'done'
diff -r da128dd1ab03d905da0cf8dafec70443267cff64 -r c822084e6e5407c6ac80f999fa313808049e58f7 universe_wsgi.ini.sample
--- a/universe_wsgi.ini.sample
+++ b/universe_wsgi.ini.sample
@@ -821,7 +821,21 @@
#user_tool_section_filters = examples:restrict_text
#user_tool_label_filters = examples:restrict_upload_to_admins, examples:restrict_encode
-# ---- Galaxy Message Queue -------------------------------------------------
+# Galaxy Application Internal Message Queue
+
+# Galaxy uses AMQP internally TODO more documentation on what for.
+# For examples, see http://ask.github.io/kombu/userguide/connections.html
+#
+# Without specifying anything here, galaxy will first attempt to use your
+# specified database_connection above. If that's not specified either, Galaxy
+# will automatically create and use a separate sqlite database located in your
+# <galaxy>/database folder (indicated in the commented out line below).
+
+#amqp_internal_connection = "sqlite:///./database/control.sqlite?isolation_level=IMMEDIATE"
+
+
+
+# ---- Galaxy External Message Queue -------------------------------------------------
# Galaxy uses AMQ protocol to receive messages from external sources like
# bar code scanners. Galaxy has been tested against RabbitMQ AMQP implementation.
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/9b33b543fb14/
Changeset: 9b33b543fb14
User: dan
Date: 2014-05-19 21:35:58
Summary: Fix for tool shed handling new GenomeBuilds manager.
Affected #: 2 files
diff -r 3faf01e5111ecf454f06b22546b530131b0550be -r 9b33b543fb1448f8f775ac975e2605ac610c8f63 lib/galaxy/webapps/tool_shed/app.py
--- a/lib/galaxy/webapps/tool_shed/app.py
+++ b/lib/galaxy/webapps/tool_shed/app.py
@@ -6,6 +6,7 @@
import galaxy.datatypes.registry
import galaxy.webapps.tool_shed.model
from galaxy.openid.providers import OpenIDProviders
+from galaxy.util.dbkeys import GenomeBuilds
from galaxy.web import security
from galaxy.tags.tag_handler import CommunityTagHandler
from tool_shed.grids.util import RepositoryGridFilterManager
@@ -49,6 +50,7 @@
# Initialize the Tool Shed tool data tables. Never pass a configuration file here
# because the Tool Shed should always have an empty dictionary!
self.tool_data_tables = galaxy.tools.data.ToolDataTableManager( self.config.tool_data_path )
+ self.genome_builds = GenomeBuilds( self )
# The Tool Shed makes no use of a Galaxy toolbox, but this attribute is still required.
self.toolbox = tools.ToolBox( [], self.config.tool_path, self )
# Initialize the Tool Shed security agent.
diff -r 3faf01e5111ecf454f06b22546b530131b0550be -r 9b33b543fb1448f8f775ac975e2605ac610c8f63 lib/galaxy/webapps/tool_shed/config.py
--- a/lib/galaxy/webapps/tool_shed/config.py
+++ b/lib/galaxy/webapps/tool_shed/config.py
@@ -53,6 +53,7 @@
self.tool_data_path = resolve_path( kwargs.get( "tool_data_path", "shed-tool-data" ), os.getcwd() )
self.tool_data_table_config_path = resolve_path( kwargs.get( 'tool_data_table_config_path', 'tool_data_table_conf.xml' ), self.root )
self.shed_tool_data_table_config = resolve_path( kwargs.get( 'shed_tool_data_table_config', 'shed_tool_data_table_conf.xml' ), self.root )
+ self.len_file_path = resolve_path( kwargs.get( "len_file_path", os.path.join( self.tool_data_path, 'shared','ucsc','chrom') ), self.root )
self.ftp_upload_dir = kwargs.get( 'ftp_upload_dir', None )
# Install and test framework for testing tools contained in repositories.
self.num_tool_test_results_saved = kwargs.get( 'num_tool_test_results_saved', 5 )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.