1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/1d57caac677c/
Changeset: 1d57caac677c
User: greg
Date: 2014-01-05 03:25:53
Summary: Improved logging in the tool shed's install and test framework.
Affected #: 3 files
diff -r cd8ca90e93a1f896ee6ff3ac56665e64373ace00 -r 1d57caac677cbbd8465c149b6a091684b21af1f8 test/install_and_test_tool_shed_repositories/base/util.py
--- a/test/install_and_test_tool_shed_repositories/base/util.py
+++ b/test/install_and_test_tool_shed_repositories/base/util.py
@@ -547,13 +547,18 @@
tool_test_results_dict[ 'installation_errors' ][ 'repository_dependencies' ]\
.append( missing_repository_dependency_info_dict )
# Record the status of this repository in the tool shed.
- # TODO: do something useful with response_dict
+ log.debug('=============================================================' )
+ log.debug( 'Inserting the following into tool_test_results for revision %s of repository %s owned by %s:\n%s' % \
+ ( str( repository.changeset_revision ), str( repository.name ), str( repository.owner ), str( tool_test_results_dict ) ) )
response_dict = register_test_result( galaxy_tool_shed_url,
tool_test_results_dicts,
tool_test_results_dict,
repository_dict,
params,
can_update_tool_shed )
+ log.debug( 'Result of inserting tool_test_results for revision %s of repository %s owned by %s:\n%s' % \
+ ( str( repository.changeset_revision ), str( repository.name ), str( repository.owner ), str( response_dict ) ) )
+ log.debug('=============================================================' )
def initialize_install_and_test_statistics_dict( test_framework ):
# Initialize a dictionary for the summary that will be printed to stdout.
@@ -818,6 +823,7 @@
if can_update_tool_shed:
metadata_revision_id = repository_dict.get( 'id', None )
if metadata_revision_id is not None:
+ log.debug( 'Updating tool_test_results for repository_metadata id %s.' % str( metadata_revision_id ) )
tool_test_results_dicts.insert( 0, tool_test_results_dict )
params[ 'tool_test_results' ] = tool_test_results_dicts
# Set the time_last_tested entry so that the repository_metadata.time_last_tested will be set in the tool shed.
@@ -826,7 +832,8 @@
try:
return update( tool_shed_api_key, url, params, return_formatted=False )
except Exception, e:
- log.exception( 'Error attempting to register test results: %s' % str( e ) )
+ log.exception( 'Error updating tool_test_results for repository_metadata id %s:\n%s' % \
+ ( str( metadata_revision_id ), str( e ) ) )
return {}
else:
return {}
diff -r cd8ca90e93a1f896ee6ff3ac56665e64373ace00 -r 1d57caac677cbbd8465c149b6a091684b21af1f8 test/install_and_test_tool_shed_repositories/repositories_with_tools/functional_tests.py
--- a/test/install_and_test_tool_shed_repositories/repositories_with_tools/functional_tests.py
+++ b/test/install_and_test_tool_shed_repositories/repositories_with_tools/functional_tests.py
@@ -189,18 +189,23 @@
changeset_revision,
encoded_repository_metadata_id )
if is_excluded:
+ log.debug( "Not testing revision %s of repository %s owned by %s because it is in the exclude list for this test run." % \
+ ( changeset_revision, name, owner ) )
# If this repository is being skipped, register the reason.
tool_test_results_dict[ 'not_tested' ] = dict( reason=reason )
params = dict( do_not_test=False )
- # TODO: do something useful with response_dict
+ log.debug('=============================================================' )
+ log.debug( 'Inserting the following into tool_test_results for revision %s of repository %s owned by %s:\n%s' % \
+ ( changeset_revision, name, owner, str( tool_test_results_dict ) ) )
response_dict = install_and_test_base_util.register_test_result( install_and_test_base_util.galaxy_tool_shed_url,
tool_test_results_dicts,
tool_test_results_dict,
repository_dict,
params,
can_update_tool_shed )
- log.debug( "Not testing revision %s of repository %s owned by %s because it is in the exclude list for this test run." % \
- ( changeset_revision, name, owner ) )
+ log.debug( 'Result of inserting tool_test_results for revision %s of repository %s owned by %s:\n%s' % \
+ ( changeset_revision, name, owner, str( response_dict ) ) )
+ log.debug('=============================================================' )
else:
# See if the repository was installed in a previous test.
repository = install_and_test_base_util.get_repository( name, owner, changeset_revision )
@@ -215,13 +220,18 @@
install_and_test_statistics_dict[ 'repositories_with_installation_error' ].append( repository_identifier_dict )
tool_test_results_dict[ 'installation_errors' ][ 'current_repository' ] = error_message
params = dict( test_install_error=True )
- # TODO: do something useful with response_dict
+ log.debug('=============================================================' )
+ log.debug( 'Inserting the following into tool_test_results for revision %s of repository %s owned by %s:\n%s' % \
+ ( changeset_revision, name, owner, str( tool_test_results_dict ) ) )
response_dict = install_and_test_base_util.register_test_result( install_and_test_base_util.galaxy_tool_shed_url,
tool_test_results_dicts,
tool_test_results_dict,
repository_dict,
params,
can_update_tool_shed )
+ log.debug( 'Result of inserting tool_test_results for revision %s of repository %s owned by %s:\n%s' % \
+ ( changeset_revision, name, owner, str( response_dict ) ) )
+ log.debug('=============================================================' )
else:
# The repository was successfully installed.
log.debug( 'Installation succeeded for revision %s of repository %s owned by %s.' % \
@@ -277,7 +287,9 @@
install_and_test_statistics_dict[ 'at_least_one_test_failed' ].append( repository_identifier_dict )
# Record the status of this repository in the tool shed.
params[ 'tools_functionally_correct' ] = False
- # TODO: do something useful with response_dict
+ log.debug('=============================================================' )
+ log.debug( 'Inserting the following into tool_test_results for revision %s of repository %s owned by %s:\n%s' % \
+ ( changeset_revision, name, owner, str( tool_test_results_dict ) ) )
response_dict = \
install_and_test_base_util.register_test_result( install_and_test_base_util.galaxy_tool_shed_url,
tool_test_results_dicts,
@@ -285,6 +297,9 @@
repository_dict,
params,
can_update_tool_shed )
+ log.debug( 'Result of inserting tool_test_results for revision %s of repository %s owned by %s:\n%s' % \
+ ( changeset_revision, name, owner, str( response_dict ) ) )
+ log.debug('=============================================================' )
else:
log.debug( 'Skipped attempt to install revision %s of repository %s owned by %s because ' % \
( changeset_revision, name, owner ) )
@@ -627,6 +642,8 @@
if result.wasSuccessful():
# This repository's tools passed all functional tests. Use the ReportResults nose plugin to get a list
# of tests that passed.
+ log.debug( 'Revision %s of repository %s owned by %s installed and passed functional tests.' % \
+ ( changeset_revision, name, owner ) )
for plugin in test_plugins:
if hasattr( plugin, 'getTestStatus' ):
test_identifier = '%s/%s' % ( owner, name )
@@ -647,17 +664,22 @@
# Call the register_test_result() method to execute a PUT request to the repository_revisions API
# controller with the status of the test. This also sets the do_not_test and tools_functionally
# correct flags and updates the time_last_tested field to today's date.
- # TODO: do something useful with response_dict
+ log.debug('=============================================================' )
+ log.debug( 'Inserting the following into tool_test_results for revision %s of repository %s owned by %s:\n%s' % \
+ ( changeset_revision, name, owner, str( tool_test_results_dict ) ) )
response_dict = install_and_test_base_util.register_test_result( install_and_test_base_util.galaxy_tool_shed_url,
tool_test_results_dicts,
tool_test_results_dict,
repository_dict,
params,
can_update_tool_shed )
- log.debug( 'Revision %s of repository %s owned by %s installed and passed functional tests.' % \
- ( changeset_revision, name, owner ) )
+ log.debug( 'Result of inserting tool_test_results for revision %s of repository %s owned by %s:\n%s' % \
+ ( changeset_revision, name, owner, str( response_dict ) ) )
+ log.debug('=============================================================' )
else:
# The get_failed_test_dicts() method returns a list.
+ log.debug( 'Revision %s of repository %s owned by %s installed successfully but did not pass functional tests.' % \
+ ( changeset_revision, name, owner ) )
failed_test_dicts = get_failed_test_dicts( result, from_tool_test=True )
tool_test_results_dict[ 'failed_tests' ] = failed_test_dicts
failed_repository_dict = repository_identifier_dict
@@ -668,15 +690,18 @@
params = dict( tools_functionally_correct=False,
test_install_error=False,
do_not_test=str( set_do_not_test ) )
- # TODO: do something useful with response_dict
+ log.debug('=============================================================' )
+ log.debug( 'Inserting the following into tool_test_results for revision %s of repository %s owned by %s:\n%s' % \
+ ( changeset_revision, name, owner, str( tool_test_results_dict ) ) )
response_dict = install_and_test_base_util.register_test_result( install_and_test_base_util.galaxy_tool_shed_url,
tool_test_results_dicts,
tool_test_results_dict,
repository_dict,
params,
can_update_tool_shed )
- log.debug( 'Revision %s of repository %s owned by %s installed successfully but did not pass functional tests.' % \
- ( changeset_revision, name, owner ) )
+ log.debug( 'Result of inserting tool_test_results for revision %s of repository %s owned by %s:\n%s' % \
+ ( changeset_revision, name, owner, str( response_dict ) ) )
+ log.debug('=============================================================' )
# Remove the just-executed tests so twill will not find and re-test them along with the tools
# contained in the next repository.
remove_tests( app )
diff -r cd8ca90e93a1f896ee6ff3ac56665e64373ace00 -r 1d57caac677cbbd8465c149b6a091684b21af1f8 test/install_and_test_tool_shed_repositories/tool_dependency_definitions/functional_tests.py
--- a/test/install_and_test_tool_shed_repositories/tool_dependency_definitions/functional_tests.py
+++ b/test/install_and_test_tool_shed_repositories/tool_dependency_definitions/functional_tests.py
@@ -124,17 +124,22 @@
encoded_repository_metadata_id )
if is_excluded:
# If this repository is being skipped, register the reason.
+ log.debug( "Not testing revision %s of repository %s owned by %s because it is in the exclude list for this test run." % \
+ ( changeset_revision, name, owner ) )
tool_test_results_dict[ 'not_tested' ] = dict( reason=reason )
params = dict( do_not_test=False )
- # TODO: do something useful with response_dict
+ log.debug('=============================================================' )
+ log.debug( 'Inserting the following into tool_test_results for revision %s of repository %s owned by %s:\n%s' % \
+ ( changeset_revision, name, owner, str( tool_test_results_dict ) ) )
response_dict = install_and_test_base_util.register_test_result( install_and_test_base_util.galaxy_tool_shed_url,
tool_test_results_dicts,
tool_test_results_dict,
repository_dict,
params,
can_update_tool_shed )
- log.debug( "Not testing revision %s of repository %s owned by %s because it is in the exclude list for this test run." % \
- ( changeset_revision, name, owner ) )
+ log.debug( 'Result of inserting tool_test_results for revision %s of repository %s owned by %s:\n%s' % \
+ ( changeset_revision, name, owner, str( response_dict ) ) )
+ log.debug('=============================================================' )
else:
# See if the repository was installed in a previous test.
repository = install_and_test_base_util.get_repository( name, owner, changeset_revision )
@@ -149,13 +154,18 @@
install_and_test_statistics_dict[ 'repositories_with_installation_error' ].append( repository_identifier_dict )
tool_test_results_dict[ 'installation_errors' ][ 'current_repository' ] = error_message
params = dict( test_install_error=True )
- # TODO: do something useful with response_dict
+ log.debug('=============================================================' )
+ log.debug( 'Inserting the following into tool_test_results for revision %s of repository %s owned by %s:\n%s' % \
+ ( changeset_revision, name, owner, str( tool_test_results_dict ) ) )
response_dict = install_and_test_base_util.register_test_result( install_and_test_base_util.galaxy_tool_shed_url,
tool_test_results_dicts,
tool_test_results_dict,
repository_dict,
params,
can_update_tool_shed )
+ log.debug( 'Result of inserting tool_test_results for revision %s of repository %s owned by %s:\n%s' % \
+ ( changeset_revision, name, owner, str( response_dict ) ) )
+ log.debug('=============================================================' )
else:
# The repository was successfully installed.
log.debug( 'Installation succeeded for revision %s of repository %s owned by %s.' % \
@@ -166,13 +176,18 @@
repository_identifier_dict,
install_and_test_statistics_dict,
tool_test_results_dict )
- # TODO: do something useful with response_dict
+ log.debug('=============================================================' )
+ log.debug( 'Inserting the following into tool_test_results for revision %s of repository %s owned by %s:\n%s' % \
+ ( changeset_revision, name, owner, str( tool_test_results_dict ) ) )
response_dict = install_and_test_base_util.register_test_result( install_and_test_base_util.galaxy_tool_shed_url,
tool_test_results_dicts,
tool_test_results_dict,
repository_dict,
params,
can_update_tool_shed )
+ log.debug( 'Result of inserting tool_test_results for revision %s of repository %s owned by %s:\n%s' % \
+ ( changeset_revision, name, owner, str( response_dict ) ) )
+ log.debug('=============================================================' )
else:
log.debug( 'Skipped attempt to install revision %s of repository %s owned by %s because ' % \
( changeset_revision, name, owner ) )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/cd8ca90e93a1/
Changeset: cd8ca90e93a1
User: greg
Date: 2014-01-05 01:25:43
Summary: Set a repository and its dependent repositories created from a capsule to be defined as installable only if its creation resulted in no errors.
Affected #: 7 files
diff -r f11a729d42cb66753b8b1616ad625c1b2a306b3d -r cd8ca90e93a1f896ee6ff3ac56665e64373ace00 lib/galaxy/webapps/tool_shed/api/repositories.py
--- a/lib/galaxy/webapps/tool_shed/api/repositories.py
+++ b/lib/galaxy/webapps/tool_shed/api/repositories.py
@@ -219,11 +219,12 @@
import_results_tups = repository_maintenance_util.create_repository_and_import_archive( trans,
repository_status_info_dict,
import_results_tups )
+ import_util.check_status_and_reset_downloadable( trans, import_results_tups )
suc.remove_dir( file_path )
# NOTE: the order of installation is defined in import_results_tups, but order will be lost when transferred to return_dict.
return_dict = {}
for import_results_tup in import_results_tups:
- name_owner, message = import_results_tup
+ ok, name_owner, message = import_results_tup
name, owner = name_owner
key = 'Archive of repository "%s" owned by "%s"' % ( str( name ), str( owner ) )
val = message.replace( '<b>', '"' ).replace( '</b>', '"' )
diff -r f11a729d42cb66753b8b1616ad625c1b2a306b3d -r cd8ca90e93a1f896ee6ff3ac56665e64373ace00 lib/galaxy/webapps/tool_shed/controllers/repository.py
--- a/lib/galaxy/webapps/tool_shed/controllers/repository.py
+++ b/lib/galaxy/webapps/tool_shed/controllers/repository.py
@@ -1883,9 +1883,11 @@
# Add the capsule_file_name and encoded_file_path to the repository_status_info_dict.
repository_status_info_dict[ 'capsule_file_name' ] = capsule_file_name
repository_status_info_dict[ 'encoded_file_path' ] = encoded_file_path
- import_results_tups = repository_maintenance_util.create_repository_and_import_archive( trans,
- repository_status_info_dict,
- import_results_tups )
+ import_results_tups = \
+ repository_maintenance_util.create_repository_and_import_archive( trans,
+ repository_status_info_dict,
+ import_results_tups )
+ import_util.check_status_and_reset_downloadable( trans, import_results_tups )
suc.remove_dir( file_path )
return trans.fill_template( '/webapps/tool_shed/repository/import_capsule_results.mako',
export_info_dict=export_info_dict,
diff -r f11a729d42cb66753b8b1616ad625c1b2a306b3d -r cd8ca90e93a1f896ee6ff3ac56665e64373ace00 lib/tool_shed/util/import_util.py
--- a/lib/tool_shed/util/import_util.py
+++ b/lib/tool_shed/util/import_util.py
@@ -21,6 +21,37 @@
log = logging.getLogger( __name__ )
+def check_status_and_reset_downloadable( trans, import_results_tups ):
+ """Check the status of each imported repository and set downloadable to False if errors."""
+ flush = False
+ for import_results_tup in import_results_tups:
+ ok, name_owner, message = import_results_tup
+ name, owner = name_owner
+ if not ok:
+ repository = suc.get_repository_by_name_and_owner( trans.app, name, owner )
+ # Do not allow the repository to be automatically installed if population resulted in errors.
+ tip_changeset_revision = repository.tip( trans.app )
+ repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans,
+ trans.security.encode_id( repository.id ),
+ tip_changeset_revision )
+ if repository_metadata:
+ if repository_metadata.downloadable:
+ repository_metadata.downloadable = False
+ trans.sa_session.add( repository_metadata )
+ if not flush:
+ flush = True
+ # Do not allow dependent repository revisions to be automatically installed if population
+ # resulted in errors.
+ dependent_downloadable_revisions = suc.get_dependent_downloadable_revisions( trans, repository_metadata )
+ for dependent_downloadable_revision in dependent_downloadable_revisions:
+ if dependent_downloadable_revision.downloadable:
+ dependent_downloadable_revision.downloadable = False
+ trans.sa_session.add( dependent_downloadable_revision )
+ if not flush:
+ flush = True
+ if flush:
+ trans.sa_session.flush()
+
def extract_capsule_files( trans, **kwd ):
"""Extract the uploaded capsule archive into a temporary location for inspection, validation and potential import."""
return_dict = {}
@@ -243,13 +274,16 @@
commit_message,
undesirable_dirs_removed,
undesirable_files_removed )
+ if error_message:
+ results_dict[ 'ok' ] = False
+ results_dict[ 'error_message' ] += error_message
try:
- metadata_util.set_repository_metadata_due_to_new_tip( trans, repository, content_alert_str=content_alert_str )
+ metadata_util.set_repository_metadata_due_to_new_tip( trans,
+ repository,
+ content_alert_str=content_alert_str )
except Exception, e:
log.debug( "Error setting metadata on repository %s created from imported archive %s: %s" % \
( str( repository.name ), str( archive_file_name ), str( e ) ) )
- results_dict[ 'ok' ] = ok
- results_dict[ 'error_message' ] += error_message
else:
archive.close()
results_dict[ 'ok' ] = False
diff -r f11a729d42cb66753b8b1616ad625c1b2a306b3d -r cd8ca90e93a1f896ee6ff3ac56665e64373ace00 lib/tool_shed/util/metadata_util.py
--- a/lib/tool_shed/util/metadata_util.py
+++ b/lib/tool_shed/util/metadata_util.py
@@ -296,14 +296,19 @@
includes_tool_dependencies = True
if 'workflows' in metadata_dict:
includes_workflows = True
- if has_repository_dependencies or has_repository_dependencies_only_if_compiling_contained_td or includes_datatypes or \
- includes_tools or includes_tool_dependencies or includes_workflows:
+ if has_repository_dependencies or \
+ has_repository_dependencies_only_if_compiling_contained_td or \
+ includes_datatypes or \
+ includes_tools or \
+ includes_tool_dependencies or \
+ includes_workflows:
downloadable = True
else:
downloadable = False
repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans, id, changeset_revision )
if repository_metadata:
- # A repository metadata record already exists with the received changeset_revision, so we don't need to check the skip_tool_test table.
+ # A repository metadata record already exists with the received changeset_revision, so we don't need to
+ # check the skip_tool_test table.
check_skip_tool_test = False
repository_metadata.metadata = metadata_dict
repository_metadata.downloadable = downloadable
@@ -313,7 +318,8 @@
repository_metadata.includes_tool_dependencies = includes_tool_dependencies
repository_metadata.includes_workflows = includes_workflows
else:
- # No repository_metadata record exists for the received changeset_revision, so we may need to update the skip_tool_test table.
+ # No repository_metadata record exists for the received changeset_revision, so we may need to update the
+ # skip_tool_test table.
check_skip_tool_test = True
repository_metadata = trans.model.RepositoryMetadata( repository_id=repository.id,
changeset_revision=changeset_revision,
@@ -324,7 +330,8 @@
includes_tools=includes_tools,
includes_tool_dependencies=includes_tool_dependencies,
includes_workflows=includes_workflows )
- # Always set the default values for the following columns. When resetting all metadata on a repository, this will reset the values.
+ # Always set the default values for the following columns. When resetting all metadata on a repository
+ # this will reset the values.
repository_metadata.tools_functionally_correct = False
repository_metadata.missing_test_components = False
repository_metadata.test_install_error = False
@@ -1738,7 +1745,8 @@
# NO_METADATA - no metadata for either ancestor or current, so continue from current
# EQUAL - ancestor metadata is equivalent to current metadata, so continue from current
# SUBSET - ancestor metadata is a subset of current metadata, so continue from current
- # NOT_EQUAL_AND_NOT_SUBSET - ancestor metadata is neither equal to nor a subset of current metadata, so persist ancestor metadata.
+ # NOT_EQUAL_AND_NOT_SUBSET - ancestor metadata is neither equal to nor a subset of current
+ # metadata, so persist ancestor metadata.
comparison = compare_changeset_revisions( trans,
ancestor_changeset_revision,
ancestor_metadata_dict,
@@ -1750,7 +1758,11 @@
elif comparison == NOT_EQUAL_AND_NOT_SUBSET:
metadata_changeset_revision = ancestor_changeset_revision
metadata_dict = ancestor_metadata_dict
- repository_metadata = create_or_update_repository_metadata( trans, id, repository, metadata_changeset_revision, metadata_dict )
+ repository_metadata = create_or_update_repository_metadata( trans,
+ id,
+ repository,
+ metadata_changeset_revision,
+ metadata_dict )
changeset_revisions.append( metadata_changeset_revision )
ancestor_changeset_revision = current_changeset_revision
ancestor_metadata_dict = current_metadata_dict
@@ -1762,7 +1774,11 @@
metadata_changeset_revision = current_changeset_revision
metadata_dict = current_metadata_dict
# We're at the end of the change log.
- repository_metadata = create_or_update_repository_metadata( trans, id, repository, metadata_changeset_revision, metadata_dict )
+ repository_metadata = create_or_update_repository_metadata( trans,
+ id,
+ repository,
+ metadata_changeset_revision,
+ metadata_dict )
changeset_revisions.append( metadata_changeset_revision )
ancestor_changeset_revision = None
ancestor_metadata_dict = None
@@ -1770,14 +1786,20 @@
# We reach here only if current_metadata_dict is empty and ancestor_metadata_dict is not.
if not ctx.children():
# We're at the end of the change log.
- repository_metadata = create_or_update_repository_metadata( trans, id, repository, metadata_changeset_revision, metadata_dict )
+ repository_metadata = create_or_update_repository_metadata( trans,
+ id,
+ repository,
+ metadata_changeset_revision,
+ metadata_dict )
changeset_revisions.append( metadata_changeset_revision )
ancestor_changeset_revision = None
ancestor_metadata_dict = None
suc.remove_dir( work_dir )
- # Delete all repository_metadata records for this repository that do not have a changeset_revision value in changeset_revisions.
+ # Delete all repository_metadata records for this repository that do not have a changeset_revision
+ # value in changeset_revisions.
clean_repository_metadata( trans, id, changeset_revisions )
- # Set tool version information for all downloadable changeset revisions. Get the list of changeset revisions from the changelog.
+ # Set tool version information for all downloadable changeset revisions. Get the list of changeset
+ # revisions from the changelog.
reset_all_tool_versions( trans, id, repo )
# Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file.
tool_util.reset_tool_data_tables( trans.app )
@@ -1785,8 +1807,8 @@
def reset_metadata_on_selected_repositories( trans, **kwd ):
"""
- Inspect the repository changelog to reset metadata for all appropriate changeset revisions. This method is called from both Galaxy and the
- Tool Shed.
+ Inspect the repository changelog to reset metadata for all appropriate changeset revisions.
+ This method is called from both Galaxy and the Tool Shed.
"""
repository_ids = util.listify( kwd.get( 'repository_ids', None ) )
message = ''
@@ -1879,7 +1901,11 @@
tip_only = isinstance( repository_type_class, TipOnly )
if not tip_only and new_metadata_required_for_utilities( trans, repository, metadata_dict ):
# Create a new repository_metadata table row.
- repository_metadata = create_or_update_repository_metadata( trans, encoded_id, repository, repository.tip( trans.app ), metadata_dict )
+ repository_metadata = create_or_update_repository_metadata( trans,
+ encoded_id,
+ repository,
+ repository.tip( trans.app ),
+ metadata_dict )
# If this is the first record stored for this repository, see if we need to send any email alerts.
if len( repository.downloadable_revisions ) == 1:
suc.handle_email_alerts( trans, repository, content_alert_str='', new_repo_alert=True, admin_only=False )
@@ -1896,8 +1922,8 @@
repository_metadata.includes_datatypes = True
else:
repository_metadata.includes_datatypes = False
- # We don't store information about the special type of repository dependency that is needed only for compiling a tool dependency
- # defined for the dependent repository.
+ # We don't store information about the special type of repository dependency that is needed only for
+ # compiling a tool dependency defined for the dependent repository.
repository_dependencies_dict = metadata_dict.get( 'repository_dependencies', {} )
repository_dependencies = repository_dependencies_dict.get( 'repository_dependencies', [] )
has_repository_dependencies, has_repository_dependencies_only_if_compiling_contained_td = \
@@ -1924,9 +1950,14 @@
trans.sa_session.flush()
else:
# There are no metadata records associated with the repository.
- repository_metadata = create_or_update_repository_metadata( trans, encoded_id, repository, repository.tip( trans.app ), metadata_dict )
+ repository_metadata = create_or_update_repository_metadata( trans,
+ encoded_id,
+ repository,
+ repository.tip( trans.app ),
+ metadata_dict )
if 'tools' in metadata_dict and repository_metadata and status != 'error':
- # Set tool versions on the new downloadable change set. The order of the list of changesets is critical, so we use the repo's changelog.
+ # Set tool versions on the new downloadable change set. The order of the list of changesets is
+ # critical, so we use the repo's changelog.
changeset_revisions = []
for changeset in repo.changelog:
changeset_revision = str( repo.changectx( changeset ) )
@@ -1945,8 +1976,12 @@
return message, status
def set_repository_metadata_due_to_new_tip( trans, repository, content_alert_str=None, **kwd ):
- """Set metadata on the repository tip in the tool shed - this method is not called from Galaxy."""
- error_message, status = set_repository_metadata( trans, repository, content_alert_str=content_alert_str, **kwd )
+ """Set metadata on the repository tip in the tool shed."""
+ # This method is not called from Galaxy.
+ error_message, status = set_repository_metadata( trans,
+ repository,
+ content_alert_str=content_alert_str,
+ **kwd )
if error_message:
# FIXME: This probably should not redirect since this method is called from the upload controller as well as the repository controller.
# If there is an error, display it.
@@ -1994,8 +2029,12 @@
break
if new_dependency_name and new_dependency_type and new_dependency_version:
# Update all attributes of the tool_dependency record in the database.
- log.debug( "Updating tool dependency '%s' with type '%s' and version '%s' to have new type '%s' and version '%s'." % \
- ( str( tool_dependency.name ), str( tool_dependency.type ), str( tool_dependency.version ), str( new_dependency_type ), str( new_dependency_version ) ) )
+ log.debug( "Updating version %s of tool dependency %s %s to have new version %s and type %s." % \
+ ( str( tool_dependency.version ),
+ str( tool_dependency.type ),
+ str( tool_dependency.name ),
+ str( new_dependency_version ),
+ str( new_dependency_type ) ) )
tool_dependency.type = new_dependency_type
tool_dependency.version = new_dependency_version
tool_dependency.status = app.install_model.ToolDependency.installation_status.UNINSTALLED
@@ -2004,9 +2043,10 @@
context.flush()
new_tool_dependency = tool_dependency
else:
- # We have no new tool dependency definition based on a matching dependency name, so remove the existing tool dependency record from the database.
- log.debug( "Deleting tool dependency with name '%s', type '%s' and version '%s' from the database since it is no longer defined." % \
- ( str( tool_dependency.name ), str( tool_dependency.type ), str( tool_dependency.version ) ) )
+ # We have no new tool dependency definition based on a matching dependency name, so remove
+ # the existing tool dependency record from the database.
+ log.debug( "Deleting version %s of tool dependency %s %s from the database since it is no longer defined." % \
+ ( str( tool_dependency.version ), str( tool_dependency.type ), str( tool_dependency.name ) ) )
context.delete( tool_dependency )
context.flush()
return new_tool_dependency
diff -r f11a729d42cb66753b8b1616ad625c1b2a306b3d -r cd8ca90e93a1f896ee6ff3ac56665e64373ace00 lib/tool_shed/util/repository_maintenance_util.py
--- a/lib/tool_shed/util/repository_maintenance_util.py
+++ b/lib/tool_shed/util/repository_maintenance_util.py
@@ -102,15 +102,17 @@
def create_repository_and_import_archive( trans, repository_archive_dict, import_results_tups ):
"""
- Create a new repository in the tool shed and populate it with the contents of a gzip compressed tar archive that was exported
- as part or all of the contents of a capsule.
+ Create a new repository in the tool shed and populate it with the contents of a gzip compressed tar archive
+ that was exported as part or all of the contents of a capsule.
"""
results_message = ''
name = repository_archive_dict.get( 'name', None )
username = repository_archive_dict.get( 'owner', None )
if name is None or username is None:
- results_message += 'Import failed: required repository name <b>%s</b> or owner <b>%s</b> is missing.' % ( str( name ), str( username ))
- import_results_tups.append( ( ( str( name ), str( username ) ), results_message ) )
+ ok = False
+ results_message += 'Import failed: required repository name <b>%s</b> or owner <b>%s</b> is missing.' % \
+ ( str( name ), str( username ))
+ import_results_tups.append( ( ok, ( str( name ), str( username ) ), results_message ) )
else:
if repository_archive_dict[ 'status' ] is None:
# The repository does not yet exist in this Tool Shed and the current user is authorized to import
@@ -122,8 +124,9 @@
# the exported repository archive.
user = suc.get_user_by_username( trans.app, username )
if user is None:
+ ok = False
results_message += 'Import failed: repository owner <b>%s</b> does not have an account in this Tool Shed.' % str( username )
- import_results_tups.append( ( ( str( name ), str( username ) ), results_message ) )
+ import_results_tups.append( ( ok, ( str( name ), str( username ) ), results_message ) )
else:
user_id = user.id
# The categories entry in the repository_archive_dict is a list of category names. If a name does not
@@ -134,8 +137,8 @@
for category_name in category_names:
category = suc.get_category_by_name( trans, category_name )
if category is None:
- results_message += 'This Tool Shed does not have the category <b>%s</b> so it will not be associated with this repository.' % \
- str( category_name )
+ results_message += 'This Tool Shed does not have the category <b>%s</b> so it ' % str( category_name )
+ results_message += 'will not be associated with this repository.'
else:
category_ids.append( trans.security.encode_id( category.id ) )
# Create the repository record in the database.
@@ -150,11 +153,14 @@
results_message += create_message
# Populate the new repository with the contents of exported repository archive.
results_dict = import_util.import_repository_archive( trans, repository, repository_archive_dict )
- import_results_tups.append( ( ( str( name ), str( username ) ), results_message ) )
+ ok = results_dict.get( 'ok', False )
+ import_results_tups.append( ( ok, ( str( name ), str( username ) ), results_message ) )
else:
# The repository either already exists in this Tool Shed or the current user is not authorized to create it.
- results_message += 'Import not necessary: repository status for this Tool Shed is: %s.' % str( repository_archive_dict[ 'status' ] )
- import_results_tups.append( ( ( str( name ), str( username ) ), results_message ) )
+ ok = True
+ results_message += 'Import not necessary: repository status for this Tool Shed is: %s.' % \
+ str( repository_archive_dict[ 'status' ] )
+ import_results_tups.append( ( ok, ( str( name ), str( username ) ), results_message ) )
return import_results_tups
def validate_repository_name( app, name, user ):
diff -r f11a729d42cb66753b8b1616ad625c1b2a306b3d -r cd8ca90e93a1f896ee6ff3ac56665e64373ace00 lib/tool_shed/util/shed_util_common.py
--- a/lib/tool_shed/util/shed_util_common.py
+++ b/lib/tool_shed/util/shed_util_common.py
@@ -559,12 +559,65 @@
return repository_metadata
return None
+def get_dependent_downloadable_revisions( trans, repository_metadata ):
+ """
+ Return all repository_metadata records that are downloadable and that depend upon the received
+ repository_metadata record.
+ """
+ # This method is called only from the tool shed.
+ rm_changeset_revision = repository_metadata.changeset_revision
+ rm_repository = repository_metadata.repository
+ rm_repository_name = str( rm_repository.name )
+ rm_repository_owner = str( rm_repository.user.username )
+ dependent_downloadable_revisions = []
+ for repository in trans.sa_session.query( trans.model.Repository ) \
+ .filter( and_( trans.model.Repository.table.c.id != rm_repository.id,
+ trans.model.Repository.table.c.deleted == False,
+ trans.model.Repository.table.c.deprecated == False ) ):
+ downloadable_revisions = repository.downloadable_revisions
+ if downloadable_revisions:
+ for downloadable_revision in downloadable_revisions:
+ if downloadable_revision.has_repository_dependencies:
+ metadata = downloadable_revision.metadata
+ if metadata:
+ repository_dependencies_dict = metadata.get( 'repository_dependencies', {} )
+ repository_dependencies_tups = repository_dependencies_dict.get( 'repository_dependencies', [] )
+ for repository_dependencies_tup in repository_dependencies_tups:
+ tool_shed, name, owner, changeset_revision, prior_installation_required, only_if_compiling_contained_td = \
+ common_util.parse_repository_dependency_tuple( repository_dependencies_tup )
+ if name == rm_repository_name and owner == rm_repository_owner:
+ # We've discovered a repository revision that depends upon the repository associated
+ # with the received repository_metadata record, but we need to make sure it depends
+ # upon the revision.
+ if changeset_revision == rm_changeset_revision:
+ dependent_downloadable_revisions.append( downloadable_revision )
+ else:
+ # Make sure the defined changeset_revision is current.
+ defined_repository_metadata = \
+ trans.sa_session.query( trans.model.RepositoryMetadata ) \
+ .filter( trans.model.RepositoryMetadata.table.c.changeset_revision == changeset_revision ) \
+ .first()
+ if defined_repository_metadata is None:
+ # The defined changeset_revision is not associated with a repository_metadata
+ # record, so updates must be necessary.
+ defined_repository = get_repository_by_name_and_owner( trans.app, name, owner )
+ defined_repo_dir = defined_repository.repo_path( trans.app )
+ defined_repo = hg.repository( get_configured_ui(), defined_repo_dir )
+ updated_changeset_revision = \
+ get_next_downloadable_changeset_revision( defined_repository,
+ defined_repo,
+ changeset_revision )
+ if updated_changeset_revision == rm_changeset_revision:
+ dependent_downloadable_revisions.append( downloadable_revision )
+ return dependent_downloadable_revisions
+
def get_file_context_from_ctx( ctx, filename ):
"""Return the mercurial file context for a specified file."""
- # We have to be careful in determining if we found the correct file because multiple files with the same name may be in different directories
- # within ctx if the files were moved within the change set. For example, in the following ctx.files() list, the former may have been moved to
- # the latter: ['tmap_wrapper_0.0.19/tool_data_table_conf.xml.sample', 'tmap_wrapper_0.3.3/tool_data_table_conf.xml.sample']. Another scenario
- # is that the file has been deleted.
+ # We have to be careful in determining if we found the correct file because multiple files with
+ # the same name may be in different directories within ctx if the files were moved within the change
+ # set. For example, in the following ctx.files() list, the former may have been moved to the latter:
+ # ['tmap_wrapper_0.0.19/tool_data_table_conf.xml.sample', 'tmap_wrapper_0.3.3/tool_data_table_conf.xml.sample'].
+ # Another scenario is that the file has been deleted.
deleted = False
filename = strip_path( filename )
for ctx_file in ctx.files():
diff -r f11a729d42cb66753b8b1616ad625c1b2a306b3d -r cd8ca90e93a1f896ee6ff3ac56665e64373ace00 templates/webapps/tool_shed/repository/import_capsule_results.mako
--- a/templates/webapps/tool_shed/repository/import_capsule_results.mako
+++ b/templates/webapps/tool_shed/repository/import_capsule_results.mako
@@ -70,7 +70,7 @@
<table class="grid">
%for import_results_tup in import_results_tups:
<%
- name_owner_tup, results_message = import_results_tup
+ ok, name_owner_tup, results_message = import_results_tup
name, owner = name_owner_tup
%><tr><td>Archive of repository <b>${name}</b> owned by <b>${owner}</b><br/>${results_message}</td></tr>
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/f11a729d42cb/
Changeset: f11a729d42cb
User: martenson
Date: 2014-01-03 22:01:54
Summary: fix for bug with hid starting at 2 instead of 1, introduced by myself at 2bb7df0
Affected #: 1 file
diff -r 720ad6d7b18324740eaf8727e2ed41ee24c68b45 -r f11a729d42cb66753b8b1616ad625c1b2a306b3d lib/galaxy/model/mapping.py
--- a/lib/galaxy/model/mapping.py
+++ b/lib/galaxy/model/mapping.py
@@ -1852,16 +1852,21 @@
def db_next_hid( self ):
"""
- Override __next_hid to generate from the database in a concurrency
- safe way.
+ db_next_hid( self )
+
+ Override __next_hid to generate from the database in a concurrency safe way.
+ Loads the next history ID from the DB and returns it.
+ It also saves the future next_id into the DB.
+
+ :rtype: int
+ :returns: the next history id
"""
conn = object_session( self ).connection()
table = self.table
trans = conn.begin()
try:
- current_hid = select( [table.c.hid_counter], table.c.id == self.id, for_update=True ).scalar()
- next_hid = current_hid + 1
- table.update( table.c.id == self.id ).execute( hid_counter = ( next_hid ) )
+ next_hid = select( [table.c.hid_counter], table.c.id == self.id, for_update=True ).scalar()
+ table.update( table.c.id == self.id ).execute( hid_counter = ( next_hid + 1 ) )
trans.commit()
return next_hid
except:
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/c4a97053aca3/
Changeset: c4a97053aca3
User: greg
Date: 2014-01-03 20:21:29
Summary: Raise an exception if attempting to install a repository into Galaxy that contains an invalid repository dependency definition (implying a bug in the Tool Shed framework).
Affected #: 5 files
diff -r 10b7c04f919ff6d7fc1d03ba88ebb282645fcbba -r c4a97053aca397ca783079a1e05c0cd2f921fb88 lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
--- a/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
+++ b/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
@@ -1440,16 +1440,17 @@
tool_path, relative_install_dir = repository.get_tool_relative_path( trans.app )
if relative_install_dir:
original_metadata_dict = repository.metadata
- metadata_dict, invalid_file_tups = metadata_util.generate_metadata_for_changeset_revision( app=trans.app,
- repository=repository,
- changeset_revision=repository.changeset_revision,
- repository_clone_url=repository_clone_url,
- shed_config_dict = repository.get_shed_config_dict( trans.app ),
- relative_install_dir=relative_install_dir,
- repository_files_dir=None,
- resetting_all_metadata_on_repository=False,
- updating_installed_repository=False,
- persist=False )
+ metadata_dict, invalid_file_tups = \
+ metadata_util.generate_metadata_for_changeset_revision( app=trans.app,
+ repository=repository,
+ changeset_revision=repository.changeset_revision,
+ repository_clone_url=repository_clone_url,
+ shed_config_dict = repository.get_shed_config_dict( trans.app ),
+ relative_install_dir=relative_install_dir,
+ repository_files_dir=None,
+ resetting_all_metadata_on_repository=False,
+ updating_installed_repository=False,
+ persist=False )
repository.metadata = metadata_dict
if metadata_dict != original_metadata_dict:
suc.update_in_shed_tool_config( trans.app, repository )
@@ -1607,7 +1608,11 @@
changeset_revision = kwd.get( 'changeset_revision', None )
latest_changeset_revision = kwd.get( 'latest_changeset_revision', None )
latest_ctx_rev = kwd.get( 'latest_ctx_rev', None )
- repository = suc.get_tool_shed_repository_by_shed_name_owner_changeset_revision( trans.app, tool_shed_url, name, owner, changeset_revision )
+ repository = suc.get_tool_shed_repository_by_shed_name_owner_changeset_revision( trans.app,
+ tool_shed_url,
+ name,
+ owner,
+ changeset_revision )
if changeset_revision and latest_changeset_revision and latest_ctx_rev:
if changeset_revision == latest_changeset_revision:
message = "The installed repository named '%s' is current, there are no updates available. " % name
@@ -1627,16 +1632,17 @@
if repository.includes_data_managers:
data_manager_util.remove_from_data_manager( trans.app, repository )
# Update the repository metadata.
- metadata_dict, invalid_file_tups = metadata_util.generate_metadata_for_changeset_revision( app=trans.app,
- repository=repository,
- changeset_revision=latest_changeset_revision,
- repository_clone_url=repository_clone_url,
- shed_config_dict = repository.get_shed_config_dict( trans.app ),
- relative_install_dir=relative_install_dir,
- repository_files_dir=None,
- resetting_all_metadata_on_repository=False,
- updating_installed_repository=True,
- persist=True )
+ metadata_dict, invalid_file_tups = \
+ metadata_util.generate_metadata_for_changeset_revision( app=trans.app,
+ repository=repository,
+ changeset_revision=latest_changeset_revision,
+ repository_clone_url=repository_clone_url,
+ shed_config_dict=repository.get_shed_config_dict( trans.app ),
+ relative_install_dir=relative_install_dir,
+ repository_files_dir=None,
+ resetting_all_metadata_on_repository=False,
+ updating_installed_repository=True,
+ persist=True )
repository.metadata = metadata_dict
# Update the repository.changeset_revision column in the database.
repository.changeset_revision = latest_changeset_revision
@@ -1674,10 +1680,15 @@
repository_tools_tups )
# Create tool_dependency records if necessary.
if 'tool_dependencies' in metadata_dict:
- tool_dependencies = tool_dependency_util.create_tool_dependency_objects( trans.app, repository, relative_install_dir, set_status=False )
- message = "The installed repository named '%s' has been updated to change set revision '%s'. " % ( name, latest_changeset_revision )
+ tool_dependencies = tool_dependency_util.create_tool_dependency_objects( trans.app,
+ repository,
+ relative_install_dir,
+ set_status=False )
+ message = "The installed repository named '%s' has been updated to change set revision '%s'. " % \
+ ( name, latest_changeset_revision )
# See if any tool dependencies can be installed.
- shed_tool_conf, tool_path, relative_install_dir = suc.get_tool_panel_config_tool_path_install_dir( trans.app, repository )
+ shed_tool_conf, tool_path, relative_install_dir = \
+ suc.get_tool_panel_config_tool_path_install_dir( trans.app, repository )
if repository.missing_tool_dependencies:
message += "Click the name of one of the missing tool dependencies listed below to install tool dependencies."
else:
diff -r 10b7c04f919ff6d7fc1d03ba88ebb282645fcbba -r c4a97053aca397ca783079a1e05c0cd2f921fb88 lib/tool_shed/galaxy_install/install_manager.py
--- a/lib/tool_shed/galaxy_install/install_manager.py
+++ b/lib/tool_shed/galaxy_install/install_manager.py
@@ -330,12 +330,13 @@
tool_panel_elems.append( elem )
return tool_panel_elems
- def handle_repository_contents( self, tool_shed_repository, repository_clone_url, relative_install_dir, repository_elem, install_dependencies, is_repository_dependency=False ):
+ def handle_repository_contents( self, tool_shed_repository, repository_clone_url, relative_install_dir, repository_elem,
+ install_dependencies, is_repository_dependency=False ):
"""
- Generate the metadata for the installed tool shed repository, among other things. If the installed tool_shed_repository contains tools
- that are loaded into the Galaxy tool panel, this method will automatically eliminate all entries for each of the tools defined in the
- received repository_elem from all non-shed-related tool panel configuration files since the entries are automatically added to the reserved
- migrated_tools_conf.xml file as part of the migration process.
+ Generate the metadata for the installed tool shed repository, among other things. If the installed tool_shed_repository
+ contains tools that are loaded into the Galaxy tool panel, this method will automatically eliminate all entries for each
+ of the tools defined in the received repository_elem from all non-shed-related tool panel configuration files since the
+ entries are automatically added to the reserved migrated_tools_conf.xml file as part of the migration process.
"""
tool_configs_to_filter = []
tool_panel_dict_for_display = odict()
@@ -351,14 +352,16 @@
# See if tool_config is defined inside of a section in self.proprietary_tool_panel_elems.
is_displayed, tool_sections = self.get_containing_tool_sections( tool_config )
if is_displayed:
- tool_panel_dict_for_tool_config = tool_util.generate_tool_panel_dict_for_tool_config( guid, tool_config, tool_sections=tool_sections )
+ tool_panel_dict_for_tool_config = \
+ tool_util.generate_tool_panel_dict_for_tool_config( guid, tool_config, tool_sections=tool_sections )
# The tool-panel_dict has the following structure.
- # {<Tool guid> : [{ tool_config : <tool_config_file>, id: <ToolSection id>, version : <ToolSection version>, name : <TooSection name>}]}
+ # {<Tool guid> : [{ tool_config : <tool_config_file>, id: <ToolSection id>, version : <ToolSection version>,
+ # name : <TooSection name>}]}
for k, v in tool_panel_dict_for_tool_config.items():
tool_panel_dict_for_display[ k ] = v
for tool_panel_dict in v:
- # Keep track of tool config file names associated with entries that have been made to the migrated_tools_conf.xml file so
- # they can be eliminated from all non-shed-related tool panel configs.
+ # Keep track of tool config file names associated with entries that have been made to the
+ # migrated_tools_conf.xml file so they can be eliminated from all non-shed-related tool panel configs.
tool_config_file = tool_panel_dict.get( 'tool_config', None )
if tool_config_file:
if tool_config_file not in tool_configs_to_filter:
@@ -375,23 +378,28 @@
log.exception( "Exception attempting to filter and persist non-shed-related tool panel configs:\n%s" % str( e ) )
finally:
lock.release()
- metadata_dict, invalid_file_tups = metadata_util.generate_metadata_for_changeset_revision( app=self.app,
- repository=tool_shed_repository,
- changeset_revision=tool_shed_repository.changeset_revision,
- repository_clone_url=repository_clone_url,
- shed_config_dict = self.shed_config_dict,
- relative_install_dir=relative_install_dir,
- repository_files_dir=None,
- resetting_all_metadata_on_repository=False,
- updating_installed_repository=False,
- persist=True )
+ metadata_dict, invalid_file_tups = \
+ metadata_util.generate_metadata_for_changeset_revision( app=self.app,
+ repository=tool_shed_repository,
+ changeset_revision=tool_shed_repository.changeset_revision,
+ repository_clone_url=repository_clone_url,
+ shed_config_dict = self.shed_config_dict,
+ relative_install_dir=relative_install_dir,
+ repository_files_dir=None,
+ resetting_all_metadata_on_repository=False,
+ updating_installed_repository=False,
+ persist=True )
tool_shed_repository.metadata = metadata_dict
self.app.install_model.context.add( tool_shed_repository )
self.app.install_model.context.flush()
has_tool_dependencies = self.__has_tool_dependencies( metadata_dict )
if has_tool_dependencies:
- # All tool_dependency objects must be created before the tools are processed even if no tool dependencies will be installed.
- tool_dependencies = tool_dependency_util.create_tool_dependency_objects( self.app, tool_shed_repository, relative_install_dir, set_status=True )
+ # All tool_dependency objects must be created before the tools are processed even if no
+ # tool dependencies will be installed.
+ tool_dependencies = tool_dependency_util.create_tool_dependency_objects( self.app,
+ tool_shed_repository,
+ relative_install_dir,
+ set_status=True )
else:
tool_dependencies = None
if 'tools' in metadata_dict:
diff -r 10b7c04f919ff6d7fc1d03ba88ebb282645fcbba -r c4a97053aca397ca783079a1e05c0cd2f921fb88 lib/tool_shed/galaxy_install/repository_util.py
--- a/lib/tool_shed/galaxy_install/repository_util.py
+++ b/lib/tool_shed/galaxy_install/repository_util.py
@@ -339,23 +339,24 @@
changeset_revision_dict[ 'ctx_rev' ] = None
return changeset_revision_dict
-def handle_repository_contents( trans, tool_shed_repository, tool_path, repository_clone_url, relative_install_dir, tool_shed=None, tool_section=None, shed_tool_conf=None,
- reinstalling=False ):
+def handle_repository_contents( trans, tool_shed_repository, tool_path, repository_clone_url, relative_install_dir,
+ tool_shed=None, tool_section=None, shed_tool_conf=None, reinstalling=False ):
"""
- Generate the metadata for the installed tool shed repository, among other things. This method is called from Galaxy (never the tool shed)
- when an administrator is installing a new repository or reinstalling an uninstalled repository.
+ Generate the metadata for the installed tool shed repository, among other things. This method is called from Galaxy
+ (never the tool shed) when an administrator is installing a new repository or reinstalling an uninstalled repository.
"""
shed_config_dict = trans.app.toolbox.get_shed_config_dict_by_filename( shed_tool_conf )
- metadata_dict, invalid_file_tups = metadata_util.generate_metadata_for_changeset_revision( app=trans.app,
- repository=tool_shed_repository,
- changeset_revision=tool_shed_repository.changeset_revision,
- repository_clone_url=repository_clone_url,
- shed_config_dict=shed_config_dict,
- relative_install_dir=relative_install_dir,
- repository_files_dir=None,
- resetting_all_metadata_on_repository=False,
- updating_installed_repository=False,
- persist=True )
+ metadata_dict, invalid_file_tups = \
+ metadata_util.generate_metadata_for_changeset_revision( app=trans.app,
+ repository=tool_shed_repository,
+ changeset_revision=tool_shed_repository.changeset_revision,
+ repository_clone_url=repository_clone_url,
+ shed_config_dict=shed_config_dict,
+ relative_install_dir=relative_install_dir,
+ repository_files_dir=None,
+ resetting_all_metadata_on_repository=False,
+ updating_installed_repository=False,
+ persist=True )
tool_shed_repository.metadata = metadata_dict
# Update the tool_shed_repository.tool_shed_status column in the database.
tool_shed_status_dict = suc.get_tool_shed_status_for_installed_repository( trans.app, tool_shed_repository )
@@ -364,13 +365,20 @@
trans.install_model.context.add( tool_shed_repository )
trans.install_model.context.flush()
if 'tool_dependencies' in metadata_dict and not reinstalling:
- tool_dependencies = tool_dependency_util.create_tool_dependency_objects( trans.app, tool_shed_repository, relative_install_dir, set_status=True )
+ tool_dependencies = tool_dependency_util.create_tool_dependency_objects( trans.app,
+ tool_shed_repository,
+ relative_install_dir,
+ set_status=True )
if 'sample_files' in metadata_dict:
sample_files = metadata_dict.get( 'sample_files', [] )
tool_index_sample_files = tool_util.get_tool_index_sample_files( sample_files )
- tool_data_table_conf_filename, tool_data_table_elems = tool_util.install_tool_data_tables( trans.app, tool_shed_repository, tool_index_sample_files )
+ tool_data_table_conf_filename, tool_data_table_elems = \
+ tool_util.install_tool_data_tables( trans.app, tool_shed_repository, tool_index_sample_files )
if tool_data_table_elems:
- trans.app.tool_data_tables.add_new_entries_from_config_file( tool_data_table_conf_filename, None, trans.app.config.shed_tool_data_table_config, persist=True )
+ trans.app.tool_data_tables.add_new_entries_from_config_file( tool_data_table_conf_filename,
+ None,
+ trans.app.config.shed_tool_data_table_config,
+ persist=True )
if 'tools' in metadata_dict:
tool_panel_dict = tool_util.generate_tool_panel_dict_for_new_install( metadata_dict[ 'tools' ], tool_section )
sample_files = metadata_dict.get( 'sample_files', [] )
@@ -380,10 +388,19 @@
repository_tools_tups = suc.get_repository_tools_tups( trans.app, metadata_dict )
if repository_tools_tups:
# Handle missing data table entries for tool parameters that are dynamically generated select lists.
- repository_tools_tups = tool_util.handle_missing_data_table_entry( trans.app, relative_install_dir, tool_path, repository_tools_tups )
+ repository_tools_tups = tool_util.handle_missing_data_table_entry( trans.app,
+ relative_install_dir,
+ tool_path,
+ repository_tools_tups )
# Handle missing index files for tool parameters that are dynamically generated select lists.
- repository_tools_tups, sample_files_copied = tool_util.handle_missing_index_file( trans.app, tool_path, sample_files, repository_tools_tups, sample_files_copied )
- # Copy remaining sample files included in the repository to the ~/tool-data directory of the local Galaxy instance.
+ repository_tools_tups, sample_files_copied = \
+ tool_util.handle_missing_index_file( trans.app,
+ tool_path,
+ sample_files,
+ repository_tools_tups,
+ sample_files_copied )
+ # Copy remaining sample files included in the repository to the ~/tool-data directory of the
+ # local Galaxy instance.
tool_util.copy_sample_files( trans.app, sample_files, tool_path=tool_path, sample_files_copied=sample_files_copied )
tool_util.add_to_tool_panel( app=trans.app,
repository_name=tool_shed_repository.name,
@@ -413,16 +430,18 @@
files_dir = os.path.join( shed_config_dict[ 'tool_path' ], files_dir )
datatypes_config = suc.get_config_from_disk( suc.DATATYPES_CONFIG_FILENAME, files_dir )
# Load data types required by tools.
- converter_path, display_path = datatype_util.alter_config_and_load_prorietary_datatypes( trans.app, datatypes_config, files_dir, override=False )
+ converter_path, display_path = \
+ datatype_util.alter_config_and_load_prorietary_datatypes( trans.app, datatypes_config, files_dir, override=False )
if converter_path or display_path:
# Create a dictionary of tool shed repository related information.
- repository_dict = datatype_util.create_repository_dict_for_proprietary_datatypes( tool_shed=tool_shed,
- name=tool_shed_repository.name,
- owner=tool_shed_repository.owner,
- installed_changeset_revision=tool_shed_repository.installed_changeset_revision,
- tool_dicts=metadata_dict.get( 'tools', [] ),
- converter_path=converter_path,
- display_path=display_path )
+ repository_dict = \
+ datatype_util.create_repository_dict_for_proprietary_datatypes( tool_shed=tool_shed,
+ name=tool_shed_repository.name,
+ owner=tool_shed_repository.owner,
+ installed_changeset_revision=tool_shed_repository.installed_changeset_revision,
+ tool_dicts=metadata_dict.get( 'tools', [] ),
+ converter_path=converter_path,
+ display_path=display_path )
if converter_path:
# Load proprietary datatype converters
trans.app.datatypes_registry.load_datatype_converters( trans.app.toolbox, installed_repository_dict=repository_dict )
diff -r 10b7c04f919ff6d7fc1d03ba88ebb282645fcbba -r c4a97053aca397ca783079a1e05c0cd2f921fb88 lib/tool_shed/util/metadata_util.py
--- a/lib/tool_shed/util/metadata_util.py
+++ b/lib/tool_shed/util/metadata_util.py
@@ -568,23 +568,25 @@
tmp_url = suc.clean_repository_clone_url( repository_clone_url )
return '%s/%s/%s/%s' % ( tmp_url, guid_type, obj_id, version )
-def generate_metadata_for_changeset_revision( app, repository, changeset_revision, repository_clone_url, shed_config_dict=None, relative_install_dir=None,
- repository_files_dir=None, resetting_all_metadata_on_repository=False, updating_installed_repository=False,
+def generate_metadata_for_changeset_revision( app, repository, changeset_revision, repository_clone_url,
+ shed_config_dict=None, relative_install_dir=None, repository_files_dir=None,
+ resetting_all_metadata_on_repository=False, updating_installed_repository=False,
persist=False ):
"""
- Generate metadata for a repository using it's files on disk. To generate metadata for changeset revisions older than the repository tip,
- the repository will have been cloned to a temporary location and updated to a specified changeset revision to access that changeset revision's
- disk files, so the value of repository_files_dir will not always be repository.repo_path( app ) (it could be an absolute path to a temporary
- directory containing a clone). If it is an absolute path, the value of relative_install_dir must contain repository.repo_path( app ).
+ Generate metadata for a repository using it's files on disk. To generate metadata for changeset revisions older than
+ the repository tip, the repository will have been cloned to a temporary location and updated to a specified changeset
+ revision to access that changeset revision's disk files, so the value of repository_files_dir will not always be
+ repository.repo_path( app ) (it could be an absolute path to a temporary directory containing a clone). If it is an
+ absolute path, the value of relative_install_dir must contain repository.repo_path( app ).
- The value of persist will be True when the installed repository contains a valid tool_data_table_conf.xml.sample file, in which case the entries
- should ultimately be persisted to the file referred to by app.config.shed_tool_data_table_config.
+ The value of persist will be True when the installed repository contains a valid tool_data_table_conf.xml.sample file,
+ in which case the entries should ultimately be persisted to the file referred to by app.config.shed_tool_data_table_config.
"""
if shed_config_dict is None:
shed_config_dict = {}
if updating_installed_repository:
- # Keep the original tool shed repository metadata if setting metadata on a repository installed into a local Galaxy instance for which
- # we have pulled updates.
+ # Keep the original tool shed repository metadata if setting metadata on a repository installed into a local Galaxy
+ # instance for which we have pulled updates.
original_repository_metadata = repository.metadata
else:
original_repository_metadata = None
@@ -603,11 +605,12 @@
if resetting_all_metadata_on_repository:
if not relative_install_dir:
raise Exception( "The value of repository.repo_path( app ) must be sent when resetting all metadata on a repository." )
- # Keep track of the location where the repository is temporarily cloned so that we can strip the path when setting metadata. The value of
- # repository_files_dir is the full path to the temporary directory to which the repository was cloned.
+ # Keep track of the location where the repository is temporarily cloned so that we can strip the path when setting metadata.
+ # The value of repository_files_dir is the full path to the temporary directory to which the repository was cloned.
work_dir = repository_files_dir
files_dir = repository_files_dir
- # Since we're working from a temporary directory, we can safely copy sample files included in the repository to the repository root.
+ # Since we're working from a temporary directory, we can safely copy sample files included in the repository to the repository
+ # root.
app.config.tool_data_path = repository_files_dir
app.config.tool_data_table_config_path = repository_files_dir
else:
@@ -624,10 +627,11 @@
if datatypes_config:
metadata_dict = generate_datatypes_metadata( app, repository, repository_clone_url, files_dir, datatypes_config, metadata_dict )
# Get the relative path to all sample files included in the repository for storage in the repository's metadata.
- sample_file_metadata_paths, sample_file_copy_paths = get_sample_files_from_disk( repository_files_dir=files_dir,
- tool_path=shed_config_dict.get( 'tool_path' ),
- relative_install_dir=relative_install_dir,
- resetting_all_metadata_on_repository=resetting_all_metadata_on_repository )
+ sample_file_metadata_paths, sample_file_copy_paths = \
+ get_sample_files_from_disk( repository_files_dir=files_dir,
+ tool_path=shed_config_dict.get( 'tool_path' ),
+ relative_install_dir=relative_install_dir,
+ resetting_all_metadata_on_repository=resetting_all_metadata_on_repository )
if sample_file_metadata_paths:
metadata_dict[ 'sample_files' ] = sample_file_metadata_paths
# Copy all sample files included in the repository to a single directory location so we can load tools that depend on them.
@@ -636,10 +640,11 @@
# If the list of sample files includes a tool_data_table_conf.xml.sample file, laad it's table elements into memory.
relative_path, filename = os.path.split( sample_file )
if filename == 'tool_data_table_conf.xml.sample':
- new_table_elems, error_message = app.tool_data_tables.add_new_entries_from_config_file( config_filename=sample_file,
- tool_data_path=app.config.tool_data_path,
- shed_tool_data_table_config=app.config.shed_tool_data_table_config,
- persist=False )
+ new_table_elems, error_message = \
+ app.tool_data_tables.add_new_entries_from_config_file( config_filename=sample_file,
+ tool_data_path=app.config.tool_data_path,
+ shed_tool_data_table_config=app.config.shed_tool_data_table_config,
+ persist=False )
if error_message:
invalid_file_tups.append( ( filename, error_message ) )
for root, dirs, files in os.walk( files_dir ):
@@ -650,7 +655,9 @@
# See if we have a repository dependencies defined.
if name == suc.REPOSITORY_DEPENDENCY_DEFINITION_FILENAME:
path_to_repository_dependencies_config = os.path.join( root, name )
- metadata_dict, error_message = generate_repository_dependency_metadata( app, path_to_repository_dependencies_config, metadata_dict )
+ metadata_dict, error_message = generate_repository_dependency_metadata( app,
+ path_to_repository_dependencies_config,
+ metadata_dict )
if error_message:
invalid_file_tups.append( ( name, error_message ) )
# See if we have one or more READ_ME files.
@@ -666,8 +673,11 @@
elif name not in NOT_TOOL_CONFIGS and name.endswith( '.xml' ):
full_path = str( os.path.abspath( os.path.join( root, name ) ) )
if os.path.getsize( full_path ) > 0:
- if not ( checkers.check_binary( full_path ) or checkers.check_image( full_path ) or checkers.check_gzip( full_path )[ 0 ]
- or checkers.check_bz2( full_path )[ 0 ] or checkers.check_zip( full_path ) ):
+ if not ( checkers.check_binary( full_path ) or
+ checkers.check_image( full_path ) or
+ checkers.check_gzip( full_path )[ 0 ] or
+ checkers.check_bz2( full_path )[ 0 ] or
+ checkers.check_zip( full_path ) ):
# Make sure we're looking at a tool config and not a display application config or something else.
element_tree, error_message = xml_util.parse_xml( full_path )
if element_tree is None:
@@ -676,13 +686,15 @@
element_tree_root = element_tree.getroot()
is_tool = element_tree_root.tag == 'tool'
if is_tool:
- tool, valid, error_message = tool_util.load_tool_from_config( app, app.security.encode_id( repository.id ), full_path )
+ tool, valid, error_message = \
+ tool_util.load_tool_from_config( app, app.security.encode_id( repository.id ), full_path )
if tool is None:
if not valid:
invalid_tool_configs.append( name )
invalid_file_tups.append( ( name, error_message ) )
else:
- invalid_files_and_errors_tups = tool_util.check_tool_input_params( app, files_dir, name, tool, sample_file_copy_paths )
+ invalid_files_and_errors_tups = \
+ tool_util.check_tool_input_params( app, files_dir, name, tool, sample_file_copy_paths )
can_set_metadata = True
for tup in invalid_files_and_errors_tups:
if name in tup:
@@ -690,15 +702,17 @@
invalid_tool_configs.append( name )
break
if can_set_metadata:
- relative_path_to_tool_config = get_relative_path_to_repository_file( root,
- name,
- relative_install_dir,
- work_dir,
- shed_config_dict,
- resetting_all_metadata_on_repository )
-
-
- metadata_dict = generate_tool_metadata( relative_path_to_tool_config, tool, repository_clone_url, metadata_dict )
+ relative_path_to_tool_config = \
+ get_relative_path_to_repository_file( root,
+ name,
+ relative_install_dir,
+ work_dir,
+ shed_config_dict,
+ resetting_all_metadata_on_repository )
+ metadata_dict = generate_tool_metadata( relative_path_to_tool_config,
+ tool,
+ repository_clone_url,
+ metadata_dict )
else:
for tup in invalid_files_and_errors_tups:
invalid_file_tups.append( tup )
@@ -752,8 +766,9 @@
def generate_package_dependency_metadata( app, elem, valid_tool_dependencies_dict, invalid_tool_dependencies_dict ):
"""
- Generate the metadata for a tool dependencies package defined for a repository. The value of package_name must match the value of the "package"
- type in the tool config's <requirements> tag set. This method is called from both Galaxy and the tool shed.
+ Generate the metadata for a tool dependencies package defined for a repository. The value of package_name must
+ match the value of the "package" type in the tool config's <requirements> tag set. This method is called from
+ both Galaxy and the tool shed.
"""
repository_dependency_is_valid = True
repository_dependency_tup = []
@@ -833,22 +848,23 @@
valid_repository_dependencies_dict = dict( description=root.get( 'description' ) )
valid_repository_dependency_tups = []
for repository_elem in root.findall( 'repository' ):
- repository_dependency_tup, repository_dependency_is_valid, error_message = handle_repository_elem( app,
- repository_elem,
- only_if_compiling_contained_td=False )
+ repository_dependency_tup, repository_dependency_is_valid, err_msg = \
+ handle_repository_elem( app, repository_elem, only_if_compiling_contained_td=False )
if repository_dependency_is_valid:
valid_repository_dependency_tups.append( repository_dependency_tup )
else:
# Append the error_message to the repository dependencies tuple.
- toolshed, name, owner, changeset_revision, prior_installation_required, only_if_compiling_contained_td = repository_dependency_tup
+ toolshed, name, owner, changeset_revision, prior_installation_required, only_if_compiling_contained_td = \
+ repository_dependency_tup
repository_dependency_tup = ( toolshed,
name,
owner,
changeset_revision,
prior_installation_required,
only_if_compiling_contained_td,
- error_message )
+ err_msg )
invalid_repository_dependency_tups.append( repository_dependency_tup )
+ error_message += err_msg
if invalid_repository_dependency_tups:
invalid_repository_dependencies_dict[ 'repository_dependencies' ] = invalid_repository_dependency_tups
metadata_dict[ 'invalid_repository_dependencies' ] = invalid_repository_dependencies_dict
@@ -1142,50 +1158,72 @@
def handle_repository_elem( app, repository_elem, only_if_compiling_contained_td=False ):
"""
- Process the received repository_elem which is a <repository> tag either from a repository_dependencies.xml file or a tool_dependencies.xml file.
- If the former, we're generating repository dependencies metadata for a repository in the tool shed. If the latter, we're generating package
- dependency metadata within Galaxy or the tool shed.
+ Process the received repository_elem which is a <repository> tag either from a repository_dependencies.xml
+ file or a tool_dependencies.xml file. If the former, we're generating repository dependencies metadata for
+ a repository in the tool shed. If the latter, we're generating package dependency metadata within Galaxy or
+ the tool shed.
"""
sa_session = app.model.context.current
is_valid = True
error_message = ''
- toolshed = repository_elem.get( 'toolshed' )
+ toolshed = repository_elem.get( 'toolshed', None )
+ name = repository_elem.get( 'name', None )
+ owner = repository_elem.get( 'owner', None )
+ changeset_revision = repository_elem.get( 'changeset_revision', None )
+ prior_installation_required = str( repository_elem.get( 'prior_installation_required', False ) )
+ if app.name == 'galaxy':
+ # We're installing a repository into Galaxy, so make sure its contained repository dependency definition
+ # is valid.
+ if toolshed is None or name is None or owner is None or changeset_revision is None:
+ # Raise an exception here instead of returning an error_message to keep the installation from
+ # proceeding. Reaching here implies a bug in the Tool Shed framework.
+ error_message = 'Installation halted because the following repository dependency definition is invalid:\n'
+ error_message += xml_util.xml_to_string( repository_elem, use_indent=True )
+ raise Exception( error_message )
if not toolshed:
# Default to the current tool shed.
toolshed = str( url_for( '/', qualified=True ) ).rstrip( '/' )
+ repository_dependency_tup = [ toolshed,
+ name,
+ owner,
+ changeset_revision,
+ prior_installation_required,
+ str( only_if_compiling_contained_td ) ]
cleaned_toolshed = td_common_util.clean_tool_shed_url( toolshed )
- name = repository_elem.get( 'name' )
- owner = repository_elem.get( 'owner' )
- changeset_revision = repository_elem.get( 'changeset_revision' )
- prior_installation_required = str( repository_elem.get( 'prior_installation_required', False ) )
- repository_dependency_tup = [ toolshed, name, owner, changeset_revision, prior_installation_required, str( only_if_compiling_contained_td ) ]
user = None
repository = None
if app.name == 'galaxy':
- # We're in Galaxy. We reach here when we're generating the metadata for a tool dependencies package defined for a repository or when we're
- # generating metadata for an installed repository. See if we can locate the installed repository via the changeset_revision defined in the
- # repository_elem (it may be outdated). If we're successful in locating an installed repository with the attributes defined in the
- # repository_elem, we know it is valid.
+ # We're in Galaxy. We reach here when we're generating the metadata for a tool dependencies package defined
+ # for a repository or when we're generating metadata for an installed repository. See if we can locate the
+ # installed repository via the changeset_revision defined in the repository_elem (it may be outdated). If we're
+ # successful in locating an installed repository with the attributes defined in the repository_elem, we know it
+ # is valid.
repository = suc.get_repository_for_dependency_relationship( app, cleaned_toolshed, name, owner, changeset_revision )
if repository:
return repository_dependency_tup, is_valid, error_message
else:
- # Send a request to the tool shed to retrieve appropriate additional changeset revisions with which the repository may have been installed.
+ # Send a request to the tool shed to retrieve appropriate additional changeset revisions with which the repository
+ # may have been installed.
text = install_util.get_updated_changeset_revisions_from_tool_shed( app, toolshed, name, owner, changeset_revision )
if text:
updated_changeset_revisions = util.listify( text )
for updated_changeset_revision in updated_changeset_revisions:
- repository = suc.get_repository_for_dependency_relationship( app, cleaned_toolshed, name, owner, updated_changeset_revision )
+ repository = suc.get_repository_for_dependency_relationship( app,
+ cleaned_toolshed,
+ name,
+ owner,
+ updated_changeset_revision )
if repository:
return repository_dependency_tup, is_valid, error_message
- # Don't generate an error message for missing repository dependencies that are required only if compiling the dependent repository's
- # tool dependency.
+ # Don't generate an error message for missing repository dependencies that are required only if compiling the
+ # dependent repository's tool dependency.
if not only_if_compiling_contained_td:
- # We'll currently default to setting the repository dependency definition as invalid if an installed repository cannot be found.
- # This may not be ideal because the tool shed may have simply been inaccessible when metadata was being generated for the installed
- # tool shed repository.
- error_message = "Ignoring invalid repository dependency definition for tool shed %s, name %s, owner %s, changeset revision %s "% \
- ( toolshed, name, owner, changeset_revision )
+ # We'll currently default to setting the repository dependency definition as invalid if an installed repository
+ # cannot be found. This may not be ideal because the tool shed may have simply been inaccessible when metadata
+ # was being generated for the installed tool shed repository.
+ error_message = "Ignoring invalid repository dependency definition for tool shed %s, name %s, owner %s, " % \
+ ( toolshed, name, owner )
+ error_message += "changeset revision %s." % changeset_revision
log.debug( error_message )
is_valid = False
return repository_dependency_tup, is_valid, error_message
@@ -1197,9 +1235,9 @@
.filter( app.model.User.table.c.username == owner ) \
.one()
except Exception, e:
- error_message = "Ignoring repository dependency definition for tool shed %s, name %s, owner %s, changeset revision %s "% \
- ( toolshed, name, owner, changeset_revision )
- error_message += "because the owner is invalid. "
+ error_message = "Ignoring repository dependency definition for tool shed %s, name %s, owner %s, " % \
+ ( toolshed, name, owner )
+ error_message += "changeset revision %s because the owner is invalid. " % changeset_revision
log.debug( error_message )
is_valid = False
return repository_dependency_tup, is_valid, error_message
@@ -1209,21 +1247,26 @@
app.model.Repository.table.c.user_id == user.id ) ) \
.one()
except:
- error_message = "Ignoring repository dependency definition for tool shed %s, name %s, owner %s, changeset revision %s "% \
- ( toolshed, name, owner, changeset_revision )
- error_message += "because the name is invalid. "
+ error_message = "Ignoring repository dependency definition for tool shed %s, name %s, owner %s, " % \
+ ( toolshed, name, owner )
+ error_message += "changeset revision %s because the name is invalid. " % changeset_revision
log.debug( error_message )
is_valid = False
return repository_dependency_tup, is_valid, error_message
repo = hg.repository( suc.get_configured_ui(), repository.repo_path( app ) )
- # The received changeset_revision may be None since defining it in the dependency definition is optional. If this is the case,
- # the default will be to set it's value to the repository dependency tip revision. This probably occurs only when handling
- # circular dependency definitions.
+ # The received changeset_revision may be None since defining it in the dependency definition is optional.
+ # If this is the case, the default will be to set it's value to the repository dependency tip revision.
+ # This probably occurs only when handling circular dependency definitions.
tip_ctx = repo.changectx( repo.changelog.tip() )
# Make sure the repo.changlog includes at least 1 revision.
if changeset_revision is None and tip_ctx.rev() >= 0:
changeset_revision = str( tip_ctx )
- repository_dependency_tup = [ toolshed, name, owner, changeset_revision, prior_installation_required, str( only_if_compiling_contained_td ) ]
+ repository_dependency_tup = [ toolshed,
+ name,
+ owner,
+ changeset_revision,
+ prior_installation_required,
+ str( only_if_compiling_contained_td ) ]
return repository_dependency_tup, is_valid, error_message
else:
# Find the specified changeset revision in the repository's changelog to see if it's valid.
@@ -1234,16 +1277,17 @@
found = True
break
if not found:
- error_message = "Ignoring repository dependency definition for tool shed %s, name %s, owner %s, changeset revision %s "% \
- ( toolshed, name, owner, changeset_revision )
- error_message += "because the changeset revision is invalid. "
+ error_message = "Ignoring repository dependency definition for tool shed %s, name %s, owner %s, " % \
+ ( toolshed, name, owner )
+ error_message += "changeset revision %s because the changeset revision is invalid. " % changeset_revision
log.debug( error_message )
is_valid = False
return repository_dependency_tup, is_valid, error_message
else:
# Repository dependencies are currently supported within a single tool shed.
- error_message = "Repository dependencies are currently supported only within the same tool shed. Ignoring repository dependency definition "
- error_message += "for tool shed %s, name %s, owner %s, changeset revision %s. " % ( toolshed, name, owner, changeset_revision )
+ error_message = "Repository dependencies are currently supported only within the same tool shed. Ignoring "
+ error_message += "repository dependency definition for tool shed %s, name %s, owner %s, changeset revision %s. " % \
+ ( toolshed, name, owner, changeset_revision )
log.debug( error_message )
is_valid = False
return repository_dependency_tup, is_valid, error_message
@@ -1811,8 +1855,8 @@
def set_repository_metadata( trans, repository, content_alert_str='', **kwd ):
"""
- Set metadata using the repository's current disk files, returning specific error messages (if any) to alert the repository owner that the changeset
- has problems.
+ Set metadata using the repository's current disk files, returning specific error messages (if any) to alert the
+ repository owner that the changeset has problems.
"""
message = ''
status = 'done'
diff -r 10b7c04f919ff6d7fc1d03ba88ebb282645fcbba -r c4a97053aca397ca783079a1e05c0cd2f921fb88 lib/tool_shed/util/shed_util_common.py
--- a/lib/tool_shed/util/shed_util_common.py
+++ b/lib/tool_shed/util/shed_util_common.py
@@ -945,7 +945,10 @@
return has_repository_dependencies, has_repository_dependencies_only_if_compiling_contained_td
def get_repository_for_dependency_relationship( app, tool_shed, name, owner, changeset_revision ):
- """Return an installed tool_shed_repository database record that is defined by either the current changeset revision or the installed_changeset_revision."""
+ """
+ Return an installed tool_shed_repository database record that is defined by either the current changeset
+ revision or the installed_changeset_revision.
+ """
# This method is used only in Galaxy, not the tool shed.
if tool_shed.endswith( '/' ):
tool_shed = tool_shed.rstrip( '/' )
@@ -1330,8 +1333,8 @@
def get_updated_changeset_revisions( trans, name, owner, changeset_revision ):
"""
- Return a string of comma-separated changeset revision hashes for all available updates to the received changeset revision for the repository
- defined by the received name and owner.
+ Return a string of comma-separated changeset revision hashes for all available updates to the received changeset
+ revision for the repository defined by the received name and owner.
"""
repository = get_repository_by_name_and_owner( trans.app, name, owner )
repo_dir = repository.repo_path( trans.app )
@@ -1351,8 +1354,8 @@
def get_url_from_tool_shed( app, tool_shed ):
"""
- The value of tool_shed is something like: toolshed.g2.bx.psu.edu. We need the URL to this tool shed, which is something like:
- http://toolshed.g2.bx.psu.edu/
+ The value of tool_shed is something like: toolshed.g2.bx.psu.edu. We need the URL to this tool shed, which is
+ something like: http://toolshed.g2.bx.psu.edu/
"""
for shed_name, shed_url in app.tool_shed_registry.tool_sheds.items():
if shed_url.find( tool_shed ) >= 0:
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/e92e13e9c103/
Changeset: e92e13e9c103
User: natefoo
Date: 2014-01-03 17:41:19
Summary: Allow changing the header for remote user.
Affected #: 6 files
diff -r 5f221e5774804c77987831bebc6941d5a91fa872 -r e92e13e9c103cc1f36dff65e1523479bf5cb17ed lib/galaxy/config.py
--- a/lib/galaxy/config.py
+++ b/lib/galaxy/config.py
@@ -123,6 +123,7 @@
self.retry_metadata_internally = string_as_bool( kwargs.get( "retry_metadata_internally", "True" ) )
self.use_remote_user = string_as_bool( kwargs.get( "use_remote_user", "False" ) )
self.remote_user_maildomain = kwargs.get( "remote_user_maildomain", None )
+ self.remote_user_header = kwargs.get( "remote_user_header", 'HTTP_REMOTE_USER' )
self.remote_user_logout_href = kwargs.get( "remote_user_logout_href", None )
self.require_login = string_as_bool( kwargs.get( "require_login", "False" ) )
self.allow_user_creation = string_as_bool( kwargs.get( "allow_user_creation", "True" ) )
diff -r 5f221e5774804c77987831bebc6941d5a91fa872 -r e92e13e9c103cc1f36dff65e1523479bf5cb17ed lib/galaxy/web/framework/__init__.py
--- a/lib/galaxy/web/framework/__init__.py
+++ b/lib/galaxy/web/framework/__init__.py
@@ -586,9 +586,9 @@
# things now.
if self.app.config.use_remote_user:
#If this is an api request, and they've passed a key, we let this go.
- assert "HTTP_REMOTE_USER" in self.environ, \
- "use_remote_user is set but no HTTP_REMOTE_USER variable"
- remote_user_email = self.environ[ 'HTTP_REMOTE_USER' ]
+ assert self.app.config.remote_user_header in self.environ, \
+ "use_remote_user is set but %s header was not provided" % self.app.config.remote_user_header
+ remote_user_email = self.environ[ self.app.config.remote_user_header ]
if galaxy_session:
# An existing session, make sure correct association exists
if galaxy_session.user is None:
diff -r 5f221e5774804c77987831bebc6941d5a91fa872 -r e92e13e9c103cc1f36dff65e1523479bf5cb17ed lib/galaxy/web/framework/middleware/remoteuser.py
--- a/lib/galaxy/web/framework/middleware/remoteuser.py
+++ b/lib/galaxy/web/framework/middleware/remoteuser.py
@@ -36,11 +36,12 @@
"""
class RemoteUser( object ):
- def __init__( self, app, maildomain=None, display_servers=None, admin_users=None ):
+ def __init__( self, app, maildomain=None, display_servers=None, admin_users=None, remote_user_header=None ):
self.app = app
self.maildomain = maildomain
self.display_servers = display_servers or []
self.admin_users = admin_users or []
+ self.remote_user_header = remote_user_header or 'HTTP_REMOTE_USER'
def __call__( self, environ, start_response ):
# Allow display servers
if self.display_servers and environ.has_key( 'REMOTE_ADDR' ):
@@ -50,16 +51,16 @@
# in the event of a lookup failure, deny access
host = None
if host in self.display_servers:
- environ[ 'HTTP_REMOTE_USER' ] = 'remote_display_server@%s' % ( self.maildomain or 'example.org' )
+ environ[ self.remote_user_header ] = 'remote_display_server@%s' % ( self.maildomain or 'example.org' )
return self.app( environ, start_response )
# Apache sets REMOTE_USER to the string '(null)' when using the
# Rewrite* method for passing REMOTE_USER and a user is
# un-authenticated. Any other possible values need to go here as well.
path_info = environ.get('PATH_INFO', '')
- if environ.has_key( 'HTTP_REMOTE_USER' ) and environ[ 'HTTP_REMOTE_USER' ] != '(null)':
- if not environ[ 'HTTP_REMOTE_USER' ].count( '@' ):
+ if environ.has_key( self.remote_user_header ) and environ[ self.remote_user_header ] != '(null)':
+ if not environ[ self.remote_user_header ].count( '@' ):
if self.maildomain is not None:
- environ[ 'HTTP_REMOTE_USER' ] += '@' + self.maildomain
+ environ[ self.remote_user_header ] += '@' + self.maildomain
else:
title = "Access to Galaxy is denied"
message = """
@@ -73,7 +74,7 @@
before you may access Galaxy.
"""
return self.error( start_response, title, message )
- if path_info.startswith( '/user/create' ) and environ[ 'HTTP_REMOTE_USER' ] in self.admin_users:
+ if path_info.startswith( '/user/create' ) and environ[ self.remote_user_header ] in self.admin_users:
pass # admins can create users
elif path_info.startswith( '/user/api_keys' ):
pass # api keys can be managed when remote_user is in use
diff -r 5f221e5774804c77987831bebc6941d5a91fa872 -r e92e13e9c103cc1f36dff65e1523479bf5cb17ed lib/galaxy/webapps/galaxy/buildapp.py
--- a/lib/galaxy/webapps/galaxy/buildapp.py
+++ b/lib/galaxy/webapps/galaxy/buildapp.py
@@ -350,7 +350,8 @@
from galaxy.web.framework.middleware.remoteuser import RemoteUser
app = RemoteUser( app, maildomain = conf.get( 'remote_user_maildomain', None ),
display_servers = util.listify( conf.get( 'display_servers', '' ) ),
- admin_users = conf.get( 'admin_users', '' ).split( ',' ) )
+ admin_users = conf.get( 'admin_users', '' ).split( ',' ),
+ remote_user_header = conf.get( 'remote_user_header', 'HTTP_REMOTE_USER' ) )
log.debug( "Enabling 'remote user' middleware" )
# The recursive middleware allows for including requests in other
# requests or forwarding of requests, all on the server side.
diff -r 5f221e5774804c77987831bebc6941d5a91fa872 -r e92e13e9c103cc1f36dff65e1523479bf5cb17ed lib/galaxy/webapps/tool_shed/config.py
--- a/lib/galaxy/webapps/tool_shed/config.py
+++ b/lib/galaxy/webapps/tool_shed/config.py
@@ -72,6 +72,7 @@
self.blacklist_location = kwargs.get( 'blacklist_file', None )
self.blacklist_content = None
self.remote_user_maildomain = kwargs.get( "remote_user_maildomain", None )
+ self.remote_user_header = kwargs.get( "remote_user_header", 'HTTP_REMOTE_USER' )
self.remote_user_logout_href = kwargs.get( "remote_user_logout_href", None )
self.require_login = string_as_bool( kwargs.get( "require_login", "False" ) )
self.allow_user_creation = string_as_bool( kwargs.get( "allow_user_creation", "True" ) )
diff -r 5f221e5774804c77987831bebc6941d5a91fa872 -r e92e13e9c103cc1f36dff65e1523479bf5cb17ed universe_wsgi.ini.sample
--- a/universe_wsgi.ini.sample
+++ b/universe_wsgi.ini.sample
@@ -586,6 +586,13 @@
# to usernames, to become your Galaxy usernames (email addresses).
#remote_user_maildomain = None
+# If use_remote_user is enabled, the header that the upstream proxy provides
+# the remote username in defaults to HTTP_REMOTE_USER (the 'HTTP_' is prepended
+# by WSGI). This option allows you to change the header. Note, you still need
+# to prepend 'HTTP_' to the header in this option, but your proxy server should
+# *not* include 'HTTP_' at the beginning of the header name.
+#remote_user_header = 'HTTP_REMOTE_USER'
+
# If use_remote_user is enabled, you can set this to a URL that will log your
# users out.
#remote_user_logout_href = None
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/5f221e577480/
Changeset: 5f221e577480
User: greg
Date: 2014-01-03 16:02:19
Summary: Order imports per database_contexts requirements to fix the tool shed's install and test framework.
Affected #: 2 files
diff -r f0f7c3cd2e8af64243f878c04f418c1ee054bc55 -r 5f221e5774804c77987831bebc6941d5a91fa872 scripts/functional_tests.py
--- a/scripts/functional_tests.py
+++ b/scripts/functional_tests.py
@@ -485,9 +485,14 @@
master_api_key=master_api_key,
user_api_key=os.environ.get( "GALAXY_TEST_USER_API_KEY", default_galaxy_user_key ),
)
- else: #when testing data managers, do not test toolbox
+ else:
+ # We must make sure that functional.test_toolbox is always imported after
+ # database_contexts.galaxy_content is set (which occurs in this method above).
+ # If functional.test_toolbox is imported before database_contexts.galaxy_content
+ # is set, sa_session will be None in all methods that use it.
import functional.test_toolbox
functional.test_toolbox.toolbox = app.toolbox
+ # When testing data managers, do not test toolbox.
functional.test_toolbox.build_tests(
testing_shed_tools=testing_shed_tools,
master_api_key=master_api_key,
diff -r f0f7c3cd2e8af64243f878c04f418c1ee054bc55 -r 5f221e5774804c77987831bebc6941d5a91fa872 test/install_and_test_tool_shed_repositories/repositories_with_tools/functional_tests.py
--- a/test/install_and_test_tool_shed_repositories/repositories_with_tools/functional_tests.py
+++ b/test/install_and_test_tool_shed_repositories/repositories_with_tools/functional_tests.py
@@ -52,7 +52,6 @@
from paste import httpserver
from functional import database_contexts
-import functional.test_toolbox as imported_test_toolbox
log = logging.getLogger( 'install_and_test_repositories_with_tools' )
@@ -136,6 +135,10 @@
return tool_id, tool_version
def install_and_test_repositories( app, galaxy_shed_tools_dict, galaxy_shed_tool_conf_file ):
+ # We must make sure that functional.test_toolbox is always imported after database_contexts.galaxy_content
+ # is set (which occurs in the main method before this method is called). If functional.test_toolbox is
+ # imported before database_contexts.galaxy_content is set, sa_session will be None in all methods that use it.
+ import functional.test_toolbox as imported_test_toolbox
global test_toolbox
test_toolbox = imported_test_toolbox
# Initialize a dictionary for the summary that will be printed to stdout.
@@ -442,7 +445,6 @@
app = UniverseApplication( **kwargs )
database_contexts.galaxy_context = app.model.context
database_contexts.install_context = app.install_model.context
-
log.debug( "Embedded Galaxy application started..." )
# ---- Run galaxy webserver ------------------------------------------------------
server = None
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.