1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/11bdc0f5c964/
Changeset: 11bdc0f5c964
User: greg
Date: 2014-01-16 20:07:28
Summary: Fixes in the tool shed api for mapping the repository user_id and for handling repository_dependency changeset_revision values that have been updated.
Affected #: 3 files
diff -r 1ffcb550e2f9464bf71a5f5154dff71ba0dd6379 -r 11bdc0f5c964a18e5a810abc283f7f1ec5e9f5ff lib/galaxy/webapps/tool_shed/api/repositories.py
--- a/lib/galaxy/webapps/tool_shed/api/repositories.py
+++ b/lib/galaxy/webapps/tool_shed/api/repositories.py
@@ -129,7 +129,7 @@
repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans,
encoded_repository_id,
changeset_revision )
- if not repository_metadata:
+ if repository_metadata is None:
# The changeset_revision column in the repository_metadata table has been updated with a new
# value value, so find the changeset_revision to which we need to update.
repo_dir = repository.repo_path( trans.app )
@@ -139,7 +139,7 @@
encoded_repository_id,
new_changeset_revision )
changeset_revision = new_changeset_revision
- if repository_metadata:
+ if repository_metadata is not None:
encoded_repository_metadata_id = trans.security.encode_id( repository_metadata.id )
repository_metadata_dict = repository_metadata.to_dict( view='collection',
value_mapper=self.__get_value_mapper( trans ) )
@@ -168,7 +168,8 @@
def __get_value_mapper( self, trans ):
value_mapper = { 'id' : trans.security.encode_id,
- 'repository_id' : trans.security.encode_id }
+ 'repository_id' : trans.security.encode_id,
+ 'user_id' : trans.security.encode_id }
return value_mapper
@web.expose_api
diff -r 1ffcb550e2f9464bf71a5f5154dff71ba0dd6379 -r 11bdc0f5c964a18e5a810abc283f7f1ec5e9f5ff lib/galaxy/webapps/tool_shed/api/repository_revisions.py
--- a/lib/galaxy/webapps/tool_shed/api/repository_revisions.py
+++ b/lib/galaxy/webapps/tool_shed/api/repository_revisions.py
@@ -68,7 +68,8 @@
def __get_value_mapper( self, trans ):
value_mapper = { 'id' : trans.security.encode_id,
- 'repository_id' : trans.security.encode_id }
+ 'repository_id' : trans.security.encode_id,
+ 'user_id' : trans.security.encode_id }
return value_mapper
@web.expose_api_anonymous
@@ -159,15 +160,33 @@
repository_dependency_repository_metadata = \
suc.get_repository_metadata_by_changeset_revision( trans, repository_dependency_id, changeset_revision )
if repository_dependency_repository_metadata is None:
- log.debug( 'Cannot locate repository_metadata with id %s for repository dependency %s owned by %s.' % \
- ( str( repository_dependency_id ), str( name ), str( owner ) ) )
- continue
+ # The changeset_revision column in the repository_metadata table has been updated with a new
+ # value value, so find the changeset_revision to which we need to update.
+ repo_dir = repository_dependency.repo_path( trans.app )
+ repo = hg.repository( suc.get_configured_ui(), repo_dir )
+ new_changeset_revision = suc.get_next_downloadable_changeset_revision( repository_dependency,
+ repo,
+ changeset_revision )
+ repository_dependency_repository_metadata = \
+ suc.get_repository_metadata_by_changeset_revision( trans,
+ repository_dependency_id,
+ new_changeset_revision )
+ if repository_dependency_repository_metadata is None:
+ decoded_repository_dependency_id = trans.security.decode_id( repository_dependency_id )
+ debug_msg = 'Cannot locate repository_metadata with id %d for repository dependency %s owned by %s ' % \
+ ( decoded_repository_dependency_id, str( name ), str( owner ) )
+ debug_msg += 'using either of these changeset_revisions: %s, %s.' % \
+ ( str( changeset_revision ), str( new_changeset_revision ) )
+ log.debug( debug_msg )
+ continue
+ else:
+ changeset_revision = new_changeset_revision
repository_dependency_repository_metadata_id = trans.security.encode_id( repository_dependency_repository_metadata.id )
repository_dependency_dict = repository_dependency.to_dict( view='element',
value_mapper=self.__get_value_mapper( trans ) )
# We have to add the changeset_revision of of the repository dependency.
repository_dependency_dict[ 'changeset_revision' ] = changeset_revision
- repository_dependency_dict[ 'url' ] = web.url_for( controller='repositories',
+ repository_dependency_dict[ 'url' ] = web.url_for( controller='repository_revisions',
action='show',
id=repository_dependency_repository_metadata_id )
repository_dependencies_dicts.append( repository_dependency_dict )
diff -r 1ffcb550e2f9464bf71a5f5154dff71ba0dd6379 -r 11bdc0f5c964a18e5a810abc283f7f1ec5e9f5ff lib/tool_shed/util/shed_util_common.py
--- a/lib/tool_shed/util/shed_util_common.py
+++ b/lib/tool_shed/util/shed_util_common.py
@@ -724,8 +724,8 @@
def get_next_downloadable_changeset_revision( repository, repo, after_changeset_revision ):
"""
- Return the installable changeset_revision in the repository changelog after the changeset to which after_changeset_revision refers. If there
- isn't one, return None.
+ Return the installable changeset_revision in the repository changelog after the changeset to which
+ after_changeset_revision refers. If there isn't one, return None.
"""
changeset_revisions = get_ordered_metadata_changeset_revisions( repository, repo, downloadable=True )
if len( changeset_revisions ) == 1:
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/1ffcb550e2f9/
Changeset: 1ffcb550e2f9
User: jmchilton
Date: 2014-01-16 20:00:30
Summary: Fix database engine options when verifying tools using separate install database.
Bug report and fix thanks to Jim Johnson.
Affected #: 1 file
diff -r dd0bf9c3d0e42246964c8d65785ece1feec6b98e -r 1ffcb550e2f9464bf71a5f5154dff71ba0dd6379 lib/galaxy/config.py
--- a/lib/galaxy/config.py
+++ b/lib/galaxy/config.py
@@ -602,7 +602,11 @@
if check_migrate_tools:
# Alert the Galaxy admin to tools that have been moved from the distribution to the tool shed.
from tool_shed.galaxy_install.migrate.check import verify_tools
- verify_tools( self, install_db_url, config_file, self.config.database_engine_options )
+ if combined_install_database:
+ install_database_options = self.config.database_engine_options
+ else:
+ install_database_options = self.config.install_database_engine_options
+ verify_tools( self, install_db_url, config_file, install_database_options )
from galaxy.model import mapping
self.model = mapping.init( self.config.file_path,
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/dd0bf9c3d0e4/
Changeset: dd0bf9c3d0e4
User: martenson
Date: 2014-01-16 19:35:51
Summary: remove doubled import
Affected #: 1 file
diff -r 3149e4f0836584708c417c42e36250709026d18c -r dd0bf9c3d0e42246964c8d65785ece1feec6b98e lib/galaxy/webapps/galaxy/api/lda_datasets.py
--- a/lib/galaxy/webapps/galaxy/api/lda_datasets.py
+++ b/lib/galaxy/webapps/galaxy/api/lda_datasets.py
@@ -2,7 +2,6 @@
API operations on the datasets from library.
"""
import glob
-import logging
import operator
import os
import os.path
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/3149e4f08365/
Changeset: 3149e4f08365
User: martenson
Date: 2014-01-16 19:30:53
Summary: removed obvious bugs in api_key retrieval through API using basic authentication
Affected #: 1 file
diff -r a24014287a29cddcd4ef392a4bde59a833c3ecab -r 3149e4f0836584708c417c42e36250709026d18c lib/galaxy/webapps/galaxy/api/authenticate.py
--- a/lib/galaxy/webapps/galaxy/api/authenticate.py
+++ b/lib/galaxy/webapps/galaxy/api/authenticate.py
@@ -10,7 +10,6 @@
from galaxy.exceptions import ObjectNotFound
from paste.httpexceptions import HTTPBadRequest
-
import logging
log = logging.getLogger( __name__ )
@@ -23,7 +22,7 @@
* GET /api/authenticate/baseauth
returns an API key for authenticated user based on BaseAuth headers
"""
- email, password = _decode_baseauth( trans.environ.get( 'HTTP_AUTHORIZATION' ) )
+ email, password = self._decode_baseauth( trans.environ.get( 'HTTP_AUTHORIZATION' ) )
user = trans.sa_session.query( trans.app.model.User ).filter( trans.app.model.User.table.c.email == email ).all()
@@ -41,38 +40,38 @@
trans.response.status = 500
return "invalid password"
- return dict('api_key', api_key_row.key)
+ return dict( api_key= api_key_row.key )
-def _decode_baseauth( encoded_str ):
- """Decode an encrypted HTTP basic authentication string. Returns a tuple of
- the form (email, password), and raises a DecodeError exception if
- nothing could be decoded.
- """
- split = encoded_str.strip().split(' ')
+ def _decode_baseauth( self, encoded_str ):
+ """Decode an encrypted HTTP basic authentication string. Returns a tuple of
+ the form (email, password), and raises a HTTPBadRequest exception if
+ nothing could be decoded.
+ """
+ split = encoded_str.strip().split(' ')
- # If split is only one element, try to decode the email and password
- # directly.
- if len(split) == 1:
- try:
- email, password = b64decode(split[0]).split(':')
- except:
- raise HTTPBadRequest
+ # If split is only one element, try to decode the email and password
+ # directly.
+ if len(split) == 1:
+ try:
+ email, password = b64decode(split[0]).split(':')
+ except:
+ raise HTTPBadRequest
- # If there are only two elements, check the first and ensure it says
- # 'basic' so that we know we're about to decode the right thing. If not,
- # bail out.
- elif len(split) == 2:
- if split[0].strip().lower() == 'basic':
- try:
- email, password = b64decode(split[1]).split(':')
- except:
- raise DecodeError
+ # If there are only two elements, check the first and ensure it says
+ # 'basic' so that we know we're about to decode the right thing. If not,
+ # bail out.
+ elif len(split) == 2:
+ if split[0].strip().lower() == 'basic':
+ try:
+ email, password = b64decode(split[1]).split(':')
+ except:
+ raise HTTPBadRequest
+ else:
+ raise HTTPBadRequest
+
+ # If there are more than 2 elements, something crazy must be happening.
+ # Bail.
else:
raise HTTPBadRequest
- # If there are more than 2 elements, something crazy must be happening.
- # Bail.
- else:
- raise HTTPBadRequest
-
- return unquote(email), unquote(password)
+ return unquote(email), unquote(password)
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/7b889da9b563/
Changeset: 7b889da9b563
User: inithello
Date: 2014-01-16 17:30:25
Summary: Fix deletion of test methods after tests have been run.
Affected #: 3 files
diff -r 659a29d112f622db5dabb65e4105535b5aee3b30 -r 7b889da9b563bc8245977dceb5aa4b8965857aa2 scripts/functional_tests.py
--- a/scripts/functional_tests.py
+++ b/scripts/functional_tests.py
@@ -498,6 +498,7 @@
functional.test_toolbox.toolbox = app.toolbox
# When testing data managers, do not test toolbox.
functional.test_toolbox.build_tests(
+ app=app,
testing_shed_tools=testing_shed_tools,
master_api_key=master_api_key,
user_api_key=get_user_api_key(),
diff -r 659a29d112f622db5dabb65e4105535b5aee3b30 -r 7b889da9b563bc8245977dceb5aa4b8965857aa2 test/functional/test_toolbox.py
--- a/test/functional/test_toolbox.py
+++ b/test/functional/test_toolbox.py
@@ -72,13 +72,13 @@
raise
-def build_tests( testing_shed_tools=False, master_api_key=None, user_api_key=None ):
+def build_tests( app=None, testing_shed_tools=False, master_api_key=None, user_api_key=None ):
"""
If the module level variable `toolbox` is set, generate `ToolTestCase`
classes for all of its tests and put them into this modules globals() so
they can be discovered by nose.
"""
- if toolbox is None:
+ if app is None:
return
# Push all the toolbox tests to module level
@@ -88,9 +88,8 @@
for key, val in G.items():
if key.startswith( 'TestForTool_' ):
del G[ key ]
-
- for i, tool_id in enumerate( toolbox.tools_by_id ):
- tool = toolbox.get_tool( tool_id )
+ for i, tool_id in enumerate( app.toolbox.tools_by_id ):
+ tool = app.toolbox.get_tool( tool_id )
if isinstance( tool, TOOL_TYPES_NO_TEST ):
#We do not test certain types of tools (e.g. Data Manager tools) as part of ToolTestCase
continue
diff -r 659a29d112f622db5dabb65e4105535b5aee3b30 -r 7b889da9b563bc8245977dceb5aa4b8965857aa2 test/install_and_test_tool_shed_repositories/repositories_with_tools/functional_tests.py
--- a/test/install_and_test_tool_shed_repositories/repositories_with_tools/functional_tests.py
+++ b/test/install_and_test_tool_shed_repositories/repositories_with_tools/functional_tests.py
@@ -595,21 +595,16 @@
"""
tests_to_delete = []
tools_to_delete_by_id = []
- # Push all the toolbox tests to module level
- G = globals()
- # Eliminate all previous tests from G.
- for key, val in G.items():
- if key.startswith( 'TestForTool_' ):
- del G[ key ]
- # Find all tests previously generated by twill.
for key in test_toolbox.__dict__:
if key.startswith( 'TestForTool_' ):
- log.debug( 'Tool test %s discovered in test_toolbox.' % str( key ) )
# We can't delete this test just yet, we're still iterating over __dict__.
tests_to_delete.append( key )
tool_id = key.replace( 'TestForTool_', '' )
for app_tool_id in app.toolbox.tools_by_id:
- if app_tool_id.replace( '_', ' ' ) == tool_id.replace( '_', ' ' ):
+ # Revisit this code if at some point we notice that Twill re-runs tests that should have been deleted.
+ # Undoubtedly the following if statement will need to be enhanced to find the tool id in question. For
+ # example, the following or is required because Twill replaces some spaces with underscores in test names.
+ if app_tool_id == tool_id or app_tool_id.replace( '_', ' ' ) == tool_id.replace( '_', ' ' ):
tools_to_delete_by_id.append( tool_id )
# Delete the discovered twill-generated tests.
for key in tests_to_delete:
@@ -617,16 +612,12 @@
log.debug( 'Deleting test %s from test_toolbox.' % str( key ) )
del test_toolbox.__dict__[ key ]
for tool_id in tools_to_delete_by_id:
- if tool_id in app.toolbox.tools_by_id:
- log.debug( 'Deleting tool id %s from app.toolbox[ tools_by_id ].' % str( tool_id ) )
- del app.toolbox.tools_by_id[ tool_id ]
+ log.debug( 'Deleting tool id %s from app.toolbox[ tools_by_id ].' % str( tool_id ) )
+ del app.toolbox.tools_by_id[ tool_id ]
def test_repository_tools( app, repository, repository_dict, tool_test_results_dicts, tool_test_results_dict,
install_and_test_statistics_dict ):
"""Test tools contained in the received repository."""
- # Set the global variable 'test_toolbox', so that test.functional.test_toolbox will generate the
- # appropriate test methods.
- test_toolbox.toolbox = app.toolbox
# Get the attributes that identify the repository whose contained tools are being tested.
name = str( repository.name )
owner = str( repository.owner )
@@ -635,7 +626,8 @@
# Generate the test methods for this installed repository. We need to pass testing_shed_tools=True here
# or twill will look in $GALAXY_HOME/test-data for test data, which may result in missing or invalid test
# files.
- test_toolbox.build_tests( testing_shed_tools=True,
+ test_toolbox.build_tests( app=app,
+ testing_shed_tools=True,
master_api_key=install_and_test_base_util.default_galaxy_master_api_key )
# Set up nose to run the generated functional tests.
test_config = nose.config.Config( env=os.environ, plugins=nose.plugins.manager.DefaultPluginManager() )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/ff018b067bb2/
Changeset: ff018b067bb2
User: dannon
Date: 2014-01-16 15:56:31
Summary: Explicitly require UsesAnnotations mixin for the annotations api controller *after* UsesStoredWorkflowMixin.
Affected #: 1 file
diff -r 2f7781fc4ce9dbb1768fd345621ec2a62000f2ae -r ff018b067bb21760335de776f7e97a987d86c7e6 lib/galaxy/webapps/galaxy/api/annotations.py
--- a/lib/galaxy/webapps/galaxy/api/annotations.py
+++ b/lib/galaxy/webapps/galaxy/api/annotations.py
@@ -3,13 +3,14 @@
"""
import logging
from galaxy import web
+from galaxy.model.item_attrs import UsesAnnotations
from galaxy.util.sanitize_html import sanitize_html
from galaxy.web.base.controller import BaseAPIController, HTTPNotImplemented, UsesHistoryDatasetAssociationMixin, UsesHistoryMixin, UsesStoredWorkflowMixin
log = logging.getLogger( __name__ )
-class BaseAnnotationsController( BaseAPIController, UsesHistoryMixin, UsesHistoryDatasetAssociationMixin, UsesStoredWorkflowMixin ):
+class BaseAnnotationsController( BaseAPIController, UsesHistoryMixin, UsesHistoryDatasetAssociationMixin, UsesStoredWorkflowMixin, UsesAnnotations ):
@web.expose_api
def index( self, trans, **kwd ):
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.