1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/a32dcf32dffc/
Changeset: a32dcf32dffc
User: jmchilton
Date: 2013-12-13 22:00:23
Summary: Introduce mixin to reduce duplication between app.py and migrate/common.py.
This includes a mix so that tool migrations will target the galaxy install database (if different than the general galaxy database).
Affected #: 3 files
diff -r 09533cfe154682bdfb1b8786fb827a99461022a6 -r a32dcf32dffc6dd7175092f4fda7476258fe061e lib/galaxy/app.py
--- a/lib/galaxy/app.py
+++ b/lib/galaxy/app.py
@@ -1,16 +1,10 @@
from __future__ import absolute_import
-import sys, os, atexit
+import sys
+import os
-from galaxy import config, jobs, util, tools, web
-import galaxy.tools.search
-import galaxy.tools.data
-import tool_shed.galaxy_install
-import tool_shed.tool_shed_registry
-from galaxy.web import security
+from galaxy import config, jobs
import galaxy.model
-import galaxy.datatypes.registry
import galaxy.security
-from galaxy.objectstore import build_object_store_from_config
import galaxy.quota
from galaxy.tags.tag_handler import GalaxyTagHandler
from galaxy.visualization.genomes import Genomes
@@ -27,7 +21,8 @@
import logging
log = logging.getLogger( __name__ )
-class UniverseApplication( object ):
+
+class UniverseApplication( object, config.ConfiguresGalaxyMixin ):
"""Encapsulates the state of a Universe application"""
def __init__( self, **kwargs ):
print >> sys.stderr, "python path is: " + ", ".join( sys.path )
@@ -38,93 +33,38 @@
self.config.check()
config.configure_logging( self.config )
self.configure_fluent_log()
- # Determine the database url
- if self.config.database_connection:
- db_url = self.config.database_connection
- else:
- db_url = "sqlite:///%s?isolation_level=IMMEDIATE" % self.config.database
- install_db_url = self.config.install_database_connection
- # TODO: Consider more aggressive check here that this is not the same
- # database file under the hood.
- combined_install_database = not( install_db_url and install_db_url != db_url )
- # Set up the tool sheds registry
- if os.path.isfile( self.config.tool_sheds_config ):
- self.tool_shed_registry = tool_shed.tool_shed_registry.Registry( self.config.root, self.config.tool_sheds_config )
- else:
- self.tool_shed_registry = None
- # Initialize database / check for appropriate schema version. # If this
- # is a new installation, we'll restrict the tool migration messaging.
- from galaxy.model.migrate.check import create_or_verify_database
- create_or_verify_database( db_url, kwargs.get( 'global_conf', {} ).get( '__file__', None ), self.config.database_engine_options, app=self )
- if not combined_install_database:
- from galaxy.model.tool_shed_install.migrate.check import create_or_verify_database as tsi_create_or_verify_database
- tsi_create_or_verify_database( install_db_url, self.config.install_database_engine_options, app=self )
- # Alert the Galaxy admin to tools that have been moved from the distribution to the tool shed.
- from tool_shed.galaxy_install.migrate.check import verify_tools
- verify_tools( self, db_url, kwargs.get( 'global_conf', {} ).get( '__file__', None ), self.config.database_engine_options )
- # Object store manager
- self.object_store = build_object_store_from_config(self.config, fsmon=True)
+ self._configure_tool_shed_registry()
+
+ self._configure_object_store( fsmon=True )
+
# Setup the database engine and ORM
- from galaxy.model import mapping
- self.model = mapping.init( self.config.file_path,
- db_url,
- self.config.database_engine_options,
- map_install_models=combined_install_database,
- database_query_profiling_proxy = self.config.database_query_profiling_proxy,
- object_store = self.object_store,
- trace_logger=self.trace_logger,
- use_pbkdf2=self.config.get_bool( 'use_pbkdf2', True ) )
+ config_file = kwargs.get( 'global_conf', {} ).get( '__file__', None )
+ self._configure_models( check_migrate_databases=True, check_migrate_tools=True, config_file=config_file )
- if combined_install_database:
- log.info("Install database targetting Galaxy's database configuration.")
- self.install_model = self.model
- else:
- from galaxy.model.tool_shed_install import mapping as install_mapping
- install_db_url = self.config.install_database_connection
- log.info("Install database using its own connection %s" % install_db_url)
- install_db_engine_options = self.config.install_database_engine_options
- self.install_model = install_mapping.init( install_db_url,
- install_db_engine_options )
# Manage installed tool shed repositories.
from tool_shed.galaxy_install import installed_repository_manager
self.installed_repository_manager = installed_repository_manager.InstalledRepositoryManager( self )
- # Create an empty datatypes registry.
- self.datatypes_registry = galaxy.datatypes.registry.Registry()
- # Load proprietary datatypes defined in datatypes_conf.xml files in all installed tool shed repositories. We
- # load proprietary datatypes before datatypes in the distribution because Galaxy's default sniffers include some
- # generic sniffers (eg text,xml) which catch anything, so it's impossible for proprietary sniffers to be used.
- # However, if there is a conflict (2 datatypes with the same extension) between a proprietary datatype and a datatype
- # in the Galaxy distribution, the datatype in the Galaxy distribution will take precedence. If there is a conflict
- # between 2 proprietary datatypes, the datatype from the repository that was installed earliest will take precedence.
- self.installed_repository_manager.load_proprietary_datatypes()
- # Load the data types in the Galaxy distribution, which are defined in self.config.datatypes_config.
- self.datatypes_registry.load_datatypes( self.config.root, self.config.datatypes_config )
+
+ self._configure_datatypes_registry( self.installed_repository_manager )
galaxy.model.set_datatypes_registry( self.datatypes_registry )
+
# Security helper
- self.security = security.SecurityHelper( id_secret=self.config.id_secret )
+ self._configure_security()
# Tag handler
self.tag_handler = GalaxyTagHandler()
# Genomes
self.genomes = Genomes( self )
# Data providers registry.
self.data_provider_registry = DataProviderRegistry()
- # Initialize tool data tables using the config defined by self.config.tool_data_table_config_path.
- self.tool_data_tables = galaxy.tools.data.ToolDataTableManager( tool_data_path=self.config.tool_data_path,
- config_filename=self.config.tool_data_table_config_path )
- # Load additional entries defined by self.config.shed_tool_data_table_config into tool data tables.
- self.tool_data_tables.load_from_config_file( config_filename=self.config.shed_tool_data_table_config,
- tool_data_path=self.tool_data_tables.tool_data_path,
- from_shed_config=False )
+
+ self._configure_tool_data_tables( from_shed_config=False )
+
# Initialize the job management configuration
self.job_config = jobs.JobConfiguration(self)
- # Initialize the tools, making sure the list of tool configs includes the reserved migrated_tools_conf.xml file.
- tool_configs = self.config.tool_configs
- if self.config.migrated_tools_config not in tool_configs:
- tool_configs.append( self.config.migrated_tools_config )
- self.toolbox = tools.ToolBox( tool_configs, self.config.tool_path, self )
- # Search support for tools
- self.toolbox_search = galaxy.tools.search.ToolBoxSearch( self.toolbox )
+
+ self._configure_toolbox()
+
# Load Data Manager
self.data_managers = DataManagers( self )
# If enabled, poll respective tool sheds to see if updates are available for any installed tool shed repositories.
diff -r 09533cfe154682bdfb1b8786fb827a99461022a6 -r a32dcf32dffc6dd7175092f4fda7476258fe061e lib/galaxy/config.py
--- a/lib/galaxy/config.py
+++ b/lib/galaxy/config.py
@@ -473,6 +473,7 @@
rval[ key ] = value
return rval
+
def configure_logging( config ):
"""
Allow some basic logging configuration to be read from ini file.
@@ -513,3 +514,110 @@
sentry_handler.setLevel( logging.WARN )
root.addHandler( sentry_handler )
+
+class ConfiguresGalaxyMixin:
+ """ Shared code for configuring Galaxy-like app objects.
+ """
+
+ def _configure_toolbox( self ):
+ # Initialize the tools, making sure the list of tool configs includes the reserved migrated_tools_conf.xml file.
+ tool_configs = self.config.tool_configs
+ if self.config.migrated_tools_config not in tool_configs:
+ tool_configs.append( self.config.migrated_tools_config )
+ from galaxy import tools
+ self.toolbox = tools.ToolBox( tool_configs, self.config.tool_path, self )
+ # Search support for tools
+ import galaxy.tools.search
+ self.toolbox_search = galaxy.tools.search.ToolBoxSearch( self.toolbox )
+
+ def _configure_tool_data_tables( self, from_shed_config ):
+ from galaxy.tools.data import ToolDataTableManager
+
+ # Initialize tool data tables using the config defined by self.config.tool_data_table_config_path.
+ self.tool_data_tables = ToolDataTableManager( tool_data_path=self.config.tool_data_path,
+ config_filename=self.config.tool_data_table_config_path )
+ # Load additional entries defined by self.config.shed_tool_data_table_config into tool data tables.
+ self.tool_data_tables.load_from_config_file( config_filename=self.config.shed_tool_data_table_config,
+ tool_data_path=self.tool_data_tables.tool_data_path,
+ from_shed_config=from_shed_config )
+
+ def _configure_datatypes_registry( self, installed_repository_manager=None ):
+ from galaxy.datatypes import registry
+ # Create an empty datatypes registry.
+ self.datatypes_registry = registry.Registry()
+ if installed_repository_manager:
+ # Load proprietary datatypes defined in datatypes_conf.xml files in all installed tool shed repositories. We
+ # load proprietary datatypes before datatypes in the distribution because Galaxy's default sniffers include some
+ # generic sniffers (eg text,xml) which catch anything, so it's impossible for proprietary sniffers to be used.
+ # However, if there is a conflict (2 datatypes with the same extension) between a proprietary datatype and a datatype
+ # in the Galaxy distribution, the datatype in the Galaxy distribution will take precedence. If there is a conflict
+ # between 2 proprietary datatypes, the datatype from the repository that was installed earliest will take precedence.
+ installed_repository_manager.load_proprietary_datatypes()
+ # Load the data types in the Galaxy distribution, which are defined in self.config.datatypes_config.
+ self.datatypes_registry.load_datatypes( self.config.root, self.config.datatypes_config )
+
+ def _configure_object_store( self, **kwds ):
+ from galaxy.objectstore import build_object_store_from_config
+ self.object_store = build_object_store_from_config( self.config, **kwds )
+
+ def _configure_security( self ):
+ from galaxy.web import security
+ self.security = security.SecurityHelper( id_secret=self.config.id_secret )
+
+ def _configure_tool_shed_registry( self ):
+ import tool_shed.tool_shed_registry
+
+ # Set up the tool sheds registry
+ if os.path.isfile( self.config.tool_sheds_config ):
+ self.tool_shed_registry = tool_shed.tool_shed_registry.Registry( self.config.root, self.config.tool_sheds_config )
+ else:
+ self.tool_shed_registry = None
+
+ def _configure_models( self, check_migrate_databases=False, check_migrate_tools=False, config_file=None ):
+ """
+ Preconditions: object_store must be set on self.
+ """
+ if self.config.database_connection:
+ db_url = self.config.database_connection
+ else:
+ db_url = "sqlite:///%s?isolation_level=IMMEDIATE" % self.config.database
+ install_db_url = self.config.install_database_connection
+ # TODO: Consider more aggressive check here that this is not the same
+ # database file under the hood.
+ combined_install_database = not( install_db_url and install_db_url != db_url )
+ install_db_url = install_db_url or db_url
+
+ if check_migrate_databases:
+ # Initialize database / check for appropriate schema version. # If this
+ # is a new installation, we'll restrict the tool migration messaging.
+ from galaxy.model.migrate.check import create_or_verify_database
+ create_or_verify_database( db_url, config_file, self.config.database_engine_options, app=self )
+ if not combined_install_database:
+ from galaxy.model.tool_shed_install.migrate.check import create_or_verify_database as tsi_create_or_verify_database
+ tsi_create_or_verify_database( install_db_url, self.config.install_database_engine_options, app=self )
+
+ if check_migrate_tools:
+ # Alert the Galaxy admin to tools that have been moved from the distribution to the tool shed.
+ from tool_shed.galaxy_install.migrate.check import verify_tools
+ verify_tools( self, install_db_url, config_file, self.config.database_engine_options )
+
+ from galaxy.model import mapping
+ self.model = mapping.init( self.config.file_path,
+ db_url,
+ self.config.database_engine_options,
+ map_install_models=combined_install_database,
+ database_query_profiling_proxy=self.config.database_query_profiling_proxy,
+ object_store=self.object_store,
+ trace_logger=getattr(self, "trace_logger", None),
+ use_pbkdf2=self.config.get_bool( 'use_pbkdf2', True ) )
+
+ if combined_install_database:
+ log.info("Install database targetting Galaxy's database configuration.")
+ self.install_model = self.model
+ else:
+ from galaxy.model.tool_shed_install import mapping as install_mapping
+ install_db_url = self.config.install_database_connection
+ log.info("Install database using its own connection %s" % install_db_url)
+ install_db_engine_options = self.config.install_database_engine_options
+ self.install_model = install_mapping.init( install_db_url,
+ install_db_engine_options )
diff -r 09533cfe154682bdfb1b8786fb827a99461022a6 -r a32dcf32dffc6dd7175092f4fda7476258fe061e lib/tool_shed/galaxy_install/migrate/common.py
--- a/lib/tool_shed/galaxy_install/migrate/common.py
+++ b/lib/tool_shed/galaxy_install/migrate/common.py
@@ -2,18 +2,10 @@
import os
import sys
import galaxy.config
-import galaxy.datatypes.registry
-from galaxy import tools
-from galaxy.tools.data import ToolDataTableManager
-from galaxy.web import security
-import galaxy.model.mapping
-import galaxy.tools.search
-from galaxy.objectstore import build_object_store_from_config
-import tool_shed.tool_shed_registry
from tool_shed.galaxy_install import install_manager
-class MigrateToolsApplication( object ):
+class MigrateToolsApplication( object, galaxy.config.ConfiguresGalaxyMixin ):
"""Encapsulates the state of a basic Galaxy Universe application in order to initiate the Install Manager"""
def __init__( self, tools_migration_config ):
@@ -33,41 +25,23 @@
for key, value in config_parser.items( "app:main" ):
galaxy_config_dict[ key ] = value
self.config = galaxy.config.Configuration( **galaxy_config_dict )
- if not self.config.database_connection:
- self.config.database_connection = "sqlite:///%s?isolation_level=IMMEDIATE" % self.config.database
+
self.config.update_integrated_tool_panel = True
- self.object_store = build_object_store_from_config( self.config )
- # Security helper
- self.security = security.SecurityHelper( id_secret=self.config.id_secret )
- # Setup the database engine and ORM
- self.model = galaxy.model.mapping.init( self.config.file_path,
- self.config.database_connection,
- engine_options={},
- create_tables=False,
- object_store=self.object_store )
- # Create an empty datatypes registry.
- self.datatypes_registry = galaxy.datatypes.registry.Registry()
- # Load the data types in the Galaxy distribution, which are defined in self.config.datatypes_config.
- self.datatypes_registry.load_datatypes( self.config.root, self.config.datatypes_config )
- # Initialize tool data tables using the config defined by self.config.tool_data_table_config_path.
- self.tool_data_tables = ToolDataTableManager( tool_data_path=self.config.tool_data_path,
- config_filename=self.config.tool_data_table_config_path )
- # Load additional entries defined by self.config.shed_tool_data_table_config into tool data tables.
- self.tool_data_tables.load_from_config_file( config_filename=self.config.shed_tool_data_table_config,
- tool_data_path=self.tool_data_tables.tool_data_path,
- from_shed_config=True )
- # Initialize the tools, making sure the list of tool configs includes the reserved migrated_tools_conf.xml file.
- tool_configs = self.config.tool_configs
- if self.config.migrated_tools_config not in tool_configs:
- tool_configs.append( self.config.migrated_tools_config )
- self.toolbox = tools.ToolBox( tool_configs, self.config.tool_path, self )
- # Search support for tools
- self.toolbox_search = galaxy.tools.search.ToolBoxSearch( self.toolbox )
- # Set up the tool sheds registry.
- if os.path.isfile( self.config.tool_sheds_config ):
- self.tool_shed_registry = tool_shed.tool_shed_registry.Registry( self.config.root, self.config.tool_sheds_config )
- else:
- self.tool_shed_registry = None
+
+ self._configure_object_store()
+
+ self._configure_security()
+
+ self._configure_models()
+
+ self._configure_datatypes_registry( )
+
+ self._configure_tool_data_tables( from_shed_config=True )
+
+ self._configure_toolbox()
+
+ self._configure_tool_shed_registry()
+
# Get the latest tool migration script number to send to the Install manager.
latest_migration_script_number = int( tools_migration_config.split( '_' )[ 0 ] )
# The value of migrated_tools_config is migrated_tools_conf.xml, and is reserved for containing only those tools that have been
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/e059c6607f34/
Changeset: e059c6607f34
Branch: stable
User: inithello
Date: 2013-12-13 21:49:28
Summary: Expose the show method anonymously.
Affected #: 1 file
diff -r 7dec267e7553c9279fc4fb4476232bedd80e3fa1 -r e059c6607f34b603e92e85cf0113b7b9b9635571 lib/galaxy/webapps/tool_shed/api/repository_revisions.py
--- a/lib/galaxy/webapps/tool_shed/api/repository_revisions.py
+++ b/lib/galaxy/webapps/tool_shed/api/repository_revisions.py
@@ -133,7 +133,7 @@
trans.response.status = 500
return message
- @web.expose_api
+ @web.expose_api_anonymous
def show( self, trans, id, **kwd ):
"""
GET /api/repository_revisions/{encoded_repository_metadata_id}
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/09533cfe1546/
Changeset: 09533cfe1546
User: greg
Date: 2013-12-13 21:44:39
Summary: Allow storage of multiple test runs in the prep script for install and testing tool dependencies and restrict creation of new entries in the tool test results dictionary when running the prep script for install and test repositories.
Affected #: 2 files
diff -r 6639d560b0a3333052af50773334641654d42a09 -r 09533cfe154682bdfb1b8786fb827a99461022a6 lib/tool_shed/scripts/check_repositories_for_functional_tests.py
--- a/lib/tool_shed/scripts/check_repositories_for_functional_tests.py
+++ b/lib/tool_shed/scripts/check_repositories_for_functional_tests.py
@@ -253,6 +253,11 @@
# a test_environment entry. If we use it we need to temporarily eliminate it from the list of tool_test_results_dicts
# since it will be re-inserted later.
tool_test_results_dict = tool_test_results_dicts.pop( 0 )
+ elif len( tool_test_results_dict ) == 2 and \
+ 'test_environment' in tool_test_results_dict and 'missing_test_components' in tool_test_results_dict:
+ # We can re-use tool_test_results_dict if its only entries are "test_environment" and "missing_test_components".
+ # In this case, some tools are missing tests components while others are not.
+ tool_test_results_dict = tool_test_results_dicts.pop( 0 )
else:
# The latest tool_test_results_dict has been populated with the results of a test run, so it cannot be used.
tool_test_results_dict = {}
diff -r 6639d560b0a3333052af50773334641654d42a09 -r 09533cfe154682bdfb1b8786fb827a99461022a6 lib/tool_shed/scripts/check_tool_dependency_definition_repositories.py
--- a/lib/tool_shed/scripts/check_tool_dependency_definition_repositories.py
+++ b/lib/tool_shed/scripts/check_tool_dependency_definition_repositories.py
@@ -41,6 +41,7 @@
class RepositoriesApplication( object ):
"""Encapsulates the state of a Universe application"""
+
def __init__( self, config ):
if config.database_connection is False:
config.database_connection = "sqlite:///%s?isolation_level=IMMEDIATE" % config.database
@@ -133,11 +134,12 @@
# Do not check metadata records that have an entry in the skip_tool_tests table, since they won't be tested anyway.
skip_metadata_ids = select( [ app.model.SkipToolTest.table.c.repository_metadata_id ] )
# Get the list of metadata records to check, restricting it to records that have not been flagged do_not_test.
- for repository_metadata in app.sa_session.query( app.model.RepositoryMetadata ) \
- .filter( and_( app.model.RepositoryMetadata.table.c.downloadable == True,
- app.model.RepositoryMetadata.table.c.do_not_test == False,
- app.model.RepositoryMetadata.table.c.repository_id.in_( tool_dependency_defintion_repository_ids ),
- not_( app.model.RepositoryMetadata.table.c.id.in_( skip_metadata_ids ) ) ) ):
+ for repository_metadata in \
+ app.sa_session.query( app.model.RepositoryMetadata ) \
+ .filter( and_( app.model.RepositoryMetadata.table.c.downloadable == True,
+ app.model.RepositoryMetadata.table.c.do_not_test == False,
+ app.model.RepositoryMetadata.table.c.repository_id.in_( tool_dependency_defintion_repository_ids ),
+ not_( app.model.RepositoryMetadata.table.c.id.in_( skip_metadata_ids ) ) ) ):
records_checked += 1
# Check the next repository revision.
changeset_revision = str( repository_metadata.changeset_revision )
@@ -161,9 +163,31 @@
invalid_metadata += 1
if not info_only:
# Create the tool_test_results_dict dictionary, using the dictionary from the previous test run if available.
- if repository_metadata.tool_test_results:
- tool_test_results_dict = repository_metadata.tool_test_results
+ if repository_metadata.tool_test_results is not None:
+ # We'll listify the column value in case it uses the old approach of storing the results of only a single test run.
+ tool_test_results_dicts = listify( repository_metadata.tool_test_results )
else:
+ tool_test_results_dicts = []
+ if tool_test_results_dicts:
+ # Inspect the tool_test_results_dict for the last test run in case it contains only a test_environment
+ # entry. This will occur with multiple runs of this script without running the associated
+ # install_and_test_tool_sed_repositories.sh script which will further populate the tool_test_results_dict.
+ tool_test_results_dict = tool_test_results_dicts[ 0 ]
+ if len( tool_test_results_dict ) <= 1:
+ # We can re-use the mostly empty tool_test_results_dict for this run because it is either empty or it contains only
+ # a test_environment entry. If we use it we need to temporarily eliminate it from the list of tool_test_results_dicts
+ # since it will be re-inserted later.
+ tool_test_results_dict = tool_test_results_dicts.pop( 0 )
+ elif len( tool_test_results_dict ) == 2 and \
+ 'test_environment' in tool_test_results_dict and 'missing_test_components' in tool_test_results_dict:
+ # We can re-use tool_test_results_dict if its only entries are "test_environment" and "missing_test_components".
+ # In this case, some tools are missing tests components while others are not.
+ tool_test_results_dict = tool_test_results_dicts.pop( 0 )
+ else:
+ # The latest tool_test_results_dict has been populated with the results of a test run, so it cannot be used.
+ tool_test_results_dict = {}
+ else:
+ # Create a new dictionary for the most recent test run.
tool_test_results_dict = {}
# Initialize the tool_test_results_dict dictionary with the information about the current test environment.
test_environment_dict = tool_test_results_dict.get( 'test_environment', {} )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
2 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/7dec267e7553/
Changeset: 7dec267e7553
Branch: stable
User: inithello
Date: 2013-12-13 20:45:42
Summary: Fix API visibility for the repository_revisions index.
Affected #: 1 file
diff -r 77b5d4888d6d957d24caccdcd5154bb8cb7574e4 -r 7dec267e7553c9279fc4fb4476232bedd80e3fa1 lib/galaxy/webapps/tool_shed/api/repository_revisions.py
--- a/lib/galaxy/webapps/tool_shed/api/repository_revisions.py
+++ b/lib/galaxy/webapps/tool_shed/api/repository_revisions.py
@@ -66,7 +66,7 @@
trans.response.status = 500
return message
- @web.expose_api
+ @web.expose_api_anonymous
def index( self, trans, **kwd ):
"""
GET /api/repository_revisions
https://bitbucket.org/galaxy/galaxy-central/commits/6639d560b0a3/
Changeset: 6639d560b0a3
User: inithello
Date: 2013-12-13 20:48:11
Summary: Remove the forced addition of the API key to API calls, since the issue being worked around has now been fixed in -stable.
Affected #: 1 file
diff -r 67b996a66ef9f2de2381bfb66f11bba99b3552fd -r 6639d560b0a3333052af50773334641654d42a09 test/install_and_test_tool_shed_repositories/functional_tests.py
--- a/test/install_and_test_tool_shed_repositories/functional_tests.py
+++ b/test/install_and_test_tool_shed_repositories/functional_tests.py
@@ -307,12 +307,8 @@
parts.insert( 0, 'api' )
url = suc.url_join( base, *parts )
if params is not None:
- if 'key' not in params:
- params[ 'key' ] = tool_shed_api_key
query_string = urllib.urlencode( params )
url += '?%s' % query_string
- else:
- url += '?key=%s' % tool_shed_api_key
return url
def get_latest_downloadable_changeset_revision( url, name, owner ):
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/46dd06fbc774/
Changeset: 46dd06fbc774
User: greg
Date: 2013-12-13 20:30:34
Summary: Use the latest test results dictionary in the current install and test run if it contains only test_environment and missing_test_components entries.
Affected #: 1 file
diff -r a46ac538684e86002e5386aefb7a6c3b921eae45 -r 46dd06fbc7744f9539e6dba7233350964a272eb5 test/install_and_test_tool_shed_repositories/functional_tests.py
--- a/test/install_and_test_tool_shed_repositories/functional_tests.py
+++ b/test/install_and_test_tool_shed_repositories/functional_tests.py
@@ -638,6 +638,11 @@
# a test_environment entry. If we use it we need to temporarily eliminate it from the list of tool_test_results_dicts
# since it will be re-inserted later.
tool_test_results_dict = tool_test_results_dicts.pop( 0 )
+ elif len( tool_test_results_dict ) == 2 and \
+ 'test_environment' in tool_test_results_dict and 'missing_test_components' in tool_test_results_dict:
+ # We can re-use tool_test_results_dict if its only entries are "test_environment" and "missing_test_components".
+ # In this case, some tools are missing tests components while others are not.
+ tool_test_results_dict = tool_test_results_dicts.pop( 0 )
else:
# The latest tool_test_results_dict has been populated with the results of a test run, so it cannot be used.
tool_test_results_dict = {}
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/a46ac538684e/
Changeset: a46ac538684e
User: dan
Date: 2013-12-13 20:28:54
Summary: Move debug logging statement for external metadata command so that it includes the dependency shell commands, if any.
Affected #: 1 file
diff -r 5fb649265c513d6efd1e9d6c93ab06e6996e2008 -r a46ac538684e86002e5386aefb7a6c3b921eae45 lib/galaxy/jobs/runners/__init__.py
--- a/lib/galaxy/jobs/runners/__init__.py
+++ b/lib/galaxy/jobs/runners/__init__.py
@@ -212,13 +212,13 @@
tmp_dir=job_wrapper.working_directory,
#we don't want to overwrite metadata that was copied over in init_meta(), as per established behavior
kwds={ 'overwrite' : False } )
- log.debug( 'executing external set_meta script for job %d: %s' % ( job_wrapper.job_id, external_metadata_script ) )
if resolve_requirements:
dependency_shell_commands = self.app.datatypes_registry.set_external_metadata_tool.build_dependency_shell_commands()
if dependency_shell_commands:
if isinstance( dependency_shell_commands, list ):
dependency_shell_commands = "&&".join( dependency_shell_commands )
external_metadata_script = "%s&&%s" % ( dependency_shell_commands, external_metadata_script )
+ log.debug( 'executing external set_meta script for job %d: %s' % ( job_wrapper.job_id, external_metadata_script ) )
external_metadata_proc = subprocess.Popen( args=external_metadata_script,
shell=True,
env=os.environ,
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/5fb649265c51/
Changeset: 5fb649265c51
User: greg
Date: 2013-12-13 20:18:38
Summary: Eliminate the deprecated "orphan" designation when displaying tool dependencies in tool shed containers.
Affected #: 1 file
diff -r 35b1a3c6e617247de50ac3f83de46a090f9ac59c -r 5fb649265c513d6efd1e9d6c93ab06e6996e2008 lib/tool_shed/util/container_util.py
--- a/lib/tool_shed/util/container_util.py
+++ b/lib/tool_shed/util/container_util.py
@@ -5,7 +5,6 @@
from galaxy.web.framework.helpers import time_ago
from tool_shed.util import common_util
from tool_shed.util import readme_util
-from tool_shed.util.tool_dependency_util import tool_dependency_is_orphan
import tool_shed.util.shed_util_common as suc
log = logging.getLogger( __name__ )
@@ -269,7 +268,7 @@
"""Tool dependency object"""
def __init__( self, id=None, name=None, version=None, type=None, readme=None, installation_status=None, repository_id=None,
- tool_dependency_id=None, is_orphan=None ):
+ tool_dependency_id=None ):
self.id = id
self.name = name
self.version = version
@@ -278,15 +277,6 @@
self.installation_status = installation_status
self.repository_id = repository_id
self.tool_dependency_id = tool_dependency_id
- # The designation of a ToolDependency into the "orphan" category has evolved over time, and is significantly restricted since the
- # introduction of the TOOL_DEPENDENCY_DEFINITION repository type. This designation is still critical, however, in that it handles
- # the case where a repository contains both tools and a tool_dependencies.xml file, but the definition in the tool_dependencies.xml
- # file is in no way related to anything defined by any of the contained tool's requirements tag sets. This is important in that it
- # is often a result of a typo (e.g., dependency name or version) that differs between the tool dependency definition within the
- # tool_dependencies.xml file and what is defined in the tool config's <requirements> tag sets. In these cases, the user should be
- # presented with a warning message, and this warning message is is in fact displayed if the following is_orphan attribute is True.
- # This is tricky because in some cases it may be intentional, and tool dependencies that are categorized as "orphan" are in fact valid.
- self.is_orphan = is_orphan
@property
def listify( self ):
@@ -318,42 +308,6 @@
self.repository_metadata_id = repository_metadata_id
self.repository_id = repository_id
-def add_orphan_settings_to_tool_dependencies( tool_dependencies, tools ):
- """Inspect all received tool dependencies and label those that are orphans within the repository."""
- #orphan_env_dependencies = orphan_tool_dependencies.get( 'set_environment', None )
- new_tool_dependencies = {}
- for td_key, requirements_dict in tool_dependencies.items():
- if td_key in [ 'set_environment' ]:
- # "set_environment": [{"name": "R_SCRIPT_PATH", "type": "set_environment"}]
- new_set_environment_dict_list = []
- for env_requirements_dict in requirements_dict:
- try:
- name = env_requirements_dict[ 'name' ]
- type = env_requirements_dict[ 'type' ]
- if tool_dependency_is_orphan( type, name, None, tools ):
- env_requirements_dict[ 'is_orphan' ] = True
- except Exception, e:
- name = str( e )
- type = 'unknown'
- is_orphan = 'unknown'
- new_set_environment_dict_list.append( env_requirements_dict )
- new_tool_dependencies[ td_key ] = new_set_environment_dict_list
- else:
- # {"R/2.15.1": {"name": "R", "readme": "some string", "type": "package", "version": "2.15.1"}
- try:
- name = requirements_dict[ 'name' ]
- type = requirements_dict[ 'type' ]
- version = requirements_dict[ 'version']
- if tool_dependency_is_orphan( type, name, version, tools ):
- requirements_dict[ 'is_orphan' ] = True
- except Exception, e:
- name = str( e )
- type = 'unknown'
- version = 'unknown'
- is_orphan = 'unknown'
- new_tool_dependencies[ td_key ] = requirements_dict
- return new_tool_dependencies
-
def build_data_managers_folder( trans, folder_id, data_managers, label=None ):
"""Return a folder hierarchy containing Data Managers."""
if data_managers:
@@ -849,8 +803,6 @@
if 'tools' not in exclude:
tools = metadata.get( 'tools', [] )
tools.extend( metadata.get( 'invalid_tools', [] ) )
- if tools:
- tool_dependencies = add_orphan_settings_to_tool_dependencies( tool_dependencies, tools )
folder_id, tool_dependencies_root_folder = build_tool_dependencies_folder( trans,
folder_id,
tool_dependencies,
@@ -1055,13 +1007,6 @@
tool_dependency_id += 1
if dependency_key in [ 'set_environment' ]:
for set_environment_dict in requirements_dict:
- if trans.webapp.name == 'tool_shed':
- is_orphan = set_environment_dict.get( 'is_orphan', False )
- else:
- # This is probably not necessary to display in Galaxy.
- is_orphan = False
- if is_orphan:
- folder.description = not_used_by_local_tools_description
try:
name = set_environment_dict.get( 'name', None )
type = set_environment_dict[ 'type' ]
@@ -1086,16 +1031,9 @@
readme=None,
installation_status=installation_status,
repository_id=repository_id,
- tool_dependency_id=td_id,
- is_orphan=is_orphan )
+ tool_dependency_id=td_id )
folder.tool_dependencies.append( tool_dependency )
else:
- if trans.webapp.name == 'tool_shed':
- is_orphan = requirements_dict.get( 'is_orphan', False )
- else:
- is_orphan = False
- if is_orphan:
- folder.description = not_used_by_local_tools_description
try:
name = requirements_dict[ 'name' ]
version = requirements_dict[ 'version' ]
@@ -1122,8 +1060,7 @@
readme=None,
installation_status=installation_status,
repository_id=repository_id,
- tool_dependency_id=td_id,
- is_orphan=is_orphan )
+ tool_dependency_id=td_id )
folder.tool_dependencies.append( tool_dependency )
else:
tool_dependencies_root_folder = None
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.